mirror of
https://github.com/9001/copyparty.git
synced 2025-10-24 16:43:55 +00:00
Compare commits
96 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
83fec3cca7 | ||
|
|
3cefc99b7d | ||
|
|
3a38dcbc05 | ||
|
|
7ff08bce57 | ||
|
|
fd490af434 | ||
|
|
1195b8f17e | ||
|
|
28dce13776 | ||
|
|
431f20177a | ||
|
|
87aff54d9d | ||
|
|
f50462de82 | ||
|
|
9bda8c7eb6 | ||
|
|
e83c63d239 | ||
|
|
b38533b0cc | ||
|
|
5ccca3fbd5 | ||
|
|
9e850fc3ab | ||
|
|
ffbfcd7e00 | ||
|
|
5ea7590748 | ||
|
|
290c3bc2bb | ||
|
|
b12131e91c | ||
|
|
3b354447b0 | ||
|
|
d09ec6feaa | ||
|
|
21405c3fda | ||
|
|
13e5c96cab | ||
|
|
426687b75e | ||
|
|
c8f59fb978 | ||
|
|
871dde79a9 | ||
|
|
e14d81bc6f | ||
|
|
514d046d1f | ||
|
|
4ed9528d36 | ||
|
|
625560e642 | ||
|
|
73ebd917d1 | ||
|
|
cd3e0afad2 | ||
|
|
d8d1f94a86 | ||
|
|
00dfd8cfd1 | ||
|
|
273de6db31 | ||
|
|
c6c0eeb0ff | ||
|
|
e70c74a3b5 | ||
|
|
f7d939eeab | ||
|
|
e815c091b9 | ||
|
|
963529b7cf | ||
|
|
638a52374d | ||
|
|
d9d42b7aa2 | ||
|
|
ec7e5f36a2 | ||
|
|
56110883ea | ||
|
|
7f8d7d6006 | ||
|
|
49e4fb7e12 | ||
|
|
8dbbea473f | ||
|
|
3d375d5114 | ||
|
|
f3eae67d97 | ||
|
|
40c1b19235 | ||
|
|
ccaf0ab159 | ||
|
|
d07f147423 | ||
|
|
f5cb9f92b9 | ||
|
|
f991f74983 | ||
|
|
6b3295059e | ||
|
|
b18a07ae6b | ||
|
|
8ab03dabda | ||
|
|
5e760e35dc | ||
|
|
afbfa04514 | ||
|
|
7aace470c5 | ||
|
|
b4acb24f6a | ||
|
|
bcee8a4934 | ||
|
|
36b0718542 | ||
|
|
9a92bca45d | ||
|
|
b07445a363 | ||
|
|
a62ec0c27e | ||
|
|
57e3a2d382 | ||
|
|
b61022b374 | ||
|
|
a3e2b2ec87 | ||
|
|
a83d3f8801 | ||
|
|
90c5f2b9d2 | ||
|
|
4885653c07 | ||
|
|
21e1cd87ca | ||
|
|
81f82e8e9f | ||
|
|
c0e31851da | ||
|
|
6599c3eced | ||
|
|
5d6c61a861 | ||
|
|
1a5c66edd3 | ||
|
|
deae9fe95a | ||
|
|
abd65c6334 | ||
|
|
8137a99904 | ||
|
|
6f6f9c1f74 | ||
|
|
7b575f716f | ||
|
|
6ba6ea3572 | ||
|
|
9a22ad5ea3 | ||
|
|
beaab9778e | ||
|
|
f327bdb6b4 | ||
|
|
ae180e0f5f | ||
|
|
e3f1d19756 | ||
|
|
93c2bd6ef6 | ||
|
|
4d0e5ff6db | ||
|
|
0893f06919 | ||
|
|
46b6abde3f | ||
|
|
0696610dee | ||
|
|
edf0d3684c | ||
|
|
7af159f5f6 |
6
.vscode/tasks.json
vendored
6
.vscode/tasks.json
vendored
@@ -8,8 +8,10 @@
|
||||
},
|
||||
{
|
||||
"label": "no_dbg",
|
||||
"command": "${config:python.pythonPath} -m copyparty -ed -emp -e2dsa -e2ts -a ed:wark -v srv::r:aed:cnodupe -v dist:dist:r ;exit 1",
|
||||
"type": "shell"
|
||||
"type": "shell",
|
||||
"command": "${config:python.pythonPath} -m copyparty -ed -emp -e2dsa -e2ts -a ed:wark -v srv::r:aed:cnodupe -v dist:dist:r ;exit 1"
|
||||
// -v ~/Music/mt:mt:r:cmtp=.bpm=~/dev/copyparty/bin/mtag/audio-bpm.py:cmtp=key=~/dev/copyparty/bin/mtag/audio-key.py:ce2tsr
|
||||
// -v ~/Music/mt:mt:r:cmtp=.bpm=~/dev/copyparty/bin/mtag/audio-bpm.py:ce2tsr
|
||||
}
|
||||
]
|
||||
}
|
||||
92
README.md
92
README.md
@@ -13,6 +13,31 @@ turn your phone or raspi into a portable file server with resumable uploads/down
|
||||
* code standard: `black`
|
||||
|
||||
|
||||
## readme toc
|
||||
|
||||
* top
|
||||
* [quickstart](#quickstart)
|
||||
* [notes](#notes)
|
||||
* [status](#status)
|
||||
* [bugs](#bugs)
|
||||
* [usage](#usage)
|
||||
* [zip downloads](#zip-downloads)
|
||||
* [searching](#searching)
|
||||
* [search configuration](#search-configuration)
|
||||
* [metadata from audio files](#metadata-from-audio-files)
|
||||
* [file parser plugins](#file-parser-plugins)
|
||||
* [complete examples](#complete-examples)
|
||||
* [client examples](#client-examples)
|
||||
* [dependencies](#dependencies)
|
||||
* [optional gpl stuff](#optional-gpl-stuff)
|
||||
* [sfx](#sfx)
|
||||
* [sfx repack](#sfx-repack)
|
||||
* [install on android](#install-on-android)
|
||||
* [dev env setup](#dev-env-setup)
|
||||
* [how to release](#how-to-release)
|
||||
* [todo](#todo)
|
||||
|
||||
|
||||
## quickstart
|
||||
|
||||
download [copyparty-sfx.py](https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.py) and you're all set!
|
||||
@@ -48,7 +73,7 @@ you may also want these, especially on servers:
|
||||
* ☑ symlink/discard existing files (content-matching)
|
||||
* download
|
||||
* ☑ single files in browser
|
||||
* ✖ folders as zip files
|
||||
* ☑ folders as zip / tar files
|
||||
* ☑ FUSE client (read-only)
|
||||
* browser
|
||||
* ☑ tree-view
|
||||
@@ -71,9 +96,38 @@ summary: it works! you can use it! (but technically not even close to beta)
|
||||
|
||||
* Windows: python 3.7 and older cannot read tags with ffprobe, so use mutagen or upgrade
|
||||
* Windows: python 2.7 cannot index non-ascii filenames with `-e2d`
|
||||
* Windows: python 2.7 cannot handle filenames with mojibake
|
||||
* hiding the contents at url `/d1/d2/d3` using `-v :d1/d2/d3:cd2d` has the side-effect of creating databases (for files/tags) inside folders d1 and d2, and those databases take precedence over the main db at the top of the vfs - this means all files in d2 and below will be reindexed unless you already had a vfs entry at or below d2
|
||||
* probably more, pls let me know
|
||||
|
||||
|
||||
# usage
|
||||
|
||||
the browser has the following hotkeys
|
||||
* `0..9` jump to 10%..90%
|
||||
* `U/O` skip 10sec back/forward
|
||||
* `J/L` prev/next song
|
||||
* `I/K` prev/next folder
|
||||
* `P` parent folder
|
||||
|
||||
|
||||
## zip downloads
|
||||
|
||||
the `zip` link next to folders can produce various types of zip/tar files using these alternatives in the browser settings tab:
|
||||
|
||||
| name | url-suffix | description |
|
||||
|--|--|--|
|
||||
| `tar` | `?tar` | plain gnutar, works great with `curl \| tar -xv` |
|
||||
| `zip` | `?zip=utf8` | works everywhere, glitchy filenames on win7 and older |
|
||||
| `zip_dos` | `?zip` | traditional cp437 (no unicode) to fix glitchy filenames |
|
||||
| `zip_crc` | `?zip=crc` | cp437 with crc32 computed early for truly ancient software |
|
||||
|
||||
* hidden files (dotfiles) are excluded unless `-ed`
|
||||
* the up2k.db is always excluded
|
||||
* `zip_crc` will take longer to download since the server has to read each file twice
|
||||
* please let me know if you find a program old enough to actually need this
|
||||
|
||||
|
||||
# searching
|
||||
|
||||
when started with `-e2dsa` copyparty will scan/index all your files. This avoids duplicates on upload, and also makes the volumes searchable through the web-ui:
|
||||
@@ -100,19 +154,24 @@ through arguments:
|
||||
* `-e2tsr` deletes all existing tags, so a full reindex
|
||||
|
||||
the same arguments can be set as volume flags, in addition to `d2d` and `d2t` for disabling:
|
||||
* `-v ~/music::ce2dsa:ce2tsr` does a full reindex of everything on startup
|
||||
* `-v ~/music::cd2d` disables **all** indexing, even if any `-e2*` are on
|
||||
* `-v ~/music::cd2t` disables all `-e2t*` (tags), does not affect `-e2d*`
|
||||
* `-v ~/music::r:ce2dsa:ce2tsr` does a full reindex of everything on startup
|
||||
* `-v ~/music::r:cd2d` disables **all** indexing, even if any `-e2*` are on
|
||||
* `-v ~/music::r:cd2t` disables all `-e2t*` (tags), does not affect `-e2d*`
|
||||
|
||||
`e2tsr` is probably always overkill, since `e2ds`/`e2dsa` would pick up any file modifications and cause `e2ts` to reindex those
|
||||
|
||||
|
||||
## metadata from audio files
|
||||
|
||||
`-mte` decides which tags to index and display in the browser (and also the display order), this can be changed per-volume:
|
||||
* `-v ~/music::cmte=title,artist` indexes and displays *title* followed by *artist*
|
||||
* `-v ~/music::r:cmte=title,artist` indexes and displays *title* followed by *artist*
|
||||
|
||||
if you add/remove a tag from `mte` you will need to run with `-e2tsr` once to rebuild the database, otherwise only new files will be affected
|
||||
|
||||
`-mtm` can be used to add or redefine a metadata mapping, say you have media files with `foo` and `bar` tags and you want them to display as `qux` in the browser (preferring `foo` if both are present), then do `-mtm qux=foo,bar` and now you can `-mte artist,title,qux`
|
||||
|
||||
tags that start with a `.` such as `.bpm` and `.dur`(ation) indicate numeric value
|
||||
|
||||
see the beautiful mess of a dictionary in [mtag.py](https://github.com/9001/copyparty/blob/master/copyparty/mtag.py) for the default mappings (should cover mp3,opus,flac,m4a,wav,aif,)
|
||||
|
||||
`--no-mutagen` disables mutagen and uses ffprobe instead, which...
|
||||
@@ -122,6 +181,21 @@ see the beautiful mess of a dictionary in [mtag.py](https://github.com/9001/copy
|
||||
* more importantly runs ffprobe on incoming files which is bad if your ffmpeg has a cve
|
||||
|
||||
|
||||
## file parser plugins
|
||||
|
||||
copyparty can invoke external programs to collect additional metadata for files using `mtp` (as argument or volume flag), there is a default timeout of 30sec
|
||||
|
||||
* `-mtp .bpm=~/bin/audio-bpm.py` will execute `~/bin/audio-bpm.py` with the audio file as argument 1 to provide the `.bpm` tag, if that does not exist in the audio metadata
|
||||
* `-mtp key=f,t5,~/bin/audio-key.py` uses `~/bin/audio-key.py` to get the `key` tag, replacing any existing metadata tag (`f,`), aborting if it takes longer than 5sec (`t5,`)
|
||||
* `-v ~/music::r:cmtp=.bpm=~/bin/audio-bpm.py:cmtp=key=f,t5,~/bin/audio-key.py` both as a per-volume config wow this is getting ugly
|
||||
|
||||
|
||||
## complete examples
|
||||
|
||||
* read-only music server with bpm and key scanning
|
||||
`python copyparty-sfx.py -v /mnt/nas/music:/music:r -e2dsa -e2ts -mtp .bpm=f,audio-bpm.py -mtp key=f,audio-key.py`
|
||||
|
||||
|
||||
# client examples
|
||||
|
||||
* javascript: dump some state into a file (two separate examples)
|
||||
@@ -158,6 +232,13 @@ copyparty returns a truncated sha512sum of your PUT/POST as base64; you can gene
|
||||
* `Pillow` (requires py2.7 or py3.5+)
|
||||
|
||||
|
||||
## optional gpl stuff
|
||||
|
||||
some bundled tools have copyleft dependencies, see [./bin/#mtag](bin/#mtag)
|
||||
|
||||
these are standalone and will never be imported / evaluated by copyparty
|
||||
|
||||
|
||||
# sfx
|
||||
|
||||
currently there are two self-contained binaries:
|
||||
@@ -212,6 +293,7 @@ pip install black bandit pylint flake8 # vscode tooling
|
||||
in the `scripts` folder:
|
||||
|
||||
* run `make -C deps-docker` to build all dependencies
|
||||
* `git tag v1.2.3 && git push origin --tags`
|
||||
* create github release with `make-tgz-release.sh`
|
||||
* upload to pypi with `make-pypi-release.(sh|bat)`
|
||||
* create sfx with `make-sfx.sh`
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# copyparty-fuse.py
|
||||
# [`copyparty-fuse.py`](copyparty-fuse.py)
|
||||
* mount a copyparty server as a local filesystem (read-only)
|
||||
* **supports Windows!** -- expect `194 MiB/s` sequential read
|
||||
* **supports Linux** -- expect `117 MiB/s` sequential read
|
||||
@@ -29,7 +29,7 @@ you could replace winfsp with [dokan](https://github.com/dokan-dev/dokany/releas
|
||||
|
||||
|
||||
|
||||
# copyparty-fuse🅱️.py
|
||||
# [`copyparty-fuse🅱️.py`](copyparty-fuseb.py)
|
||||
* mount a copyparty server as a local filesystem (read-only)
|
||||
* does the same thing except more correct, `samba` approves
|
||||
* **supports Linux** -- expect `18 MiB/s` (wait what)
|
||||
@@ -37,5 +37,11 @@ you could replace winfsp with [dokan](https://github.com/dokan-dev/dokany/releas
|
||||
|
||||
|
||||
|
||||
# copyparty-fuse-streaming.py
|
||||
# [`copyparty-fuse-streaming.py`](copyparty-fuse-streaming.py)
|
||||
* pretend this doesn't exist
|
||||
|
||||
|
||||
|
||||
# [`mtag/`](mtag/)
|
||||
* standalone programs which perform misc. file analysis
|
||||
* copyparty can Popen programs like these during file indexing to collect additional metadata
|
||||
|
||||
@@ -1008,6 +1008,12 @@ def main():
|
||||
log = null_log
|
||||
dbg = null_log
|
||||
|
||||
if ar.a and ar.a.startswith("$"):
|
||||
fn = ar.a[1:]
|
||||
log("reading password from file [{}]".format(fn))
|
||||
with open(fn, "rb") as f:
|
||||
ar.a = f.read().decode("utf-8").strip()
|
||||
|
||||
if WINDOWS:
|
||||
os.system("rem")
|
||||
|
||||
|
||||
34
bin/mtag/README.md
Normal file
34
bin/mtag/README.md
Normal file
@@ -0,0 +1,34 @@
|
||||
standalone programs which take an audio file as argument
|
||||
|
||||
some of these rely on libraries which are not MIT-compatible
|
||||
|
||||
* [audio-bpm.py](./audio-bpm.py) detects the BPM of music using the BeatRoot Vamp Plugin; imports GPL2
|
||||
* [audio-key.py](./audio-key.py) detects the melodic key of music using the Mixxx fork of keyfinder; imports GPL3
|
||||
|
||||
|
||||
# dependencies
|
||||
|
||||
run [`install-deps.sh`](install-deps.sh) to build/install most dependencies required by these programs (supports windows/linux/macos)
|
||||
|
||||
*alternatively* (or preferably) use packages from your distro instead, then you'll need at least these:
|
||||
|
||||
* from distro: `numpy vamp-plugin-sdk beatroot-vamp mixxx-keyfinder ffmpeg`
|
||||
* from pypy: `keyfinder vamp`
|
||||
|
||||
|
||||
# usage from copyparty
|
||||
|
||||
`copyparty -e2dsa -e2ts -mtp key=f,audio-key.py -mtp .bpm=f,audio-bpm.py`
|
||||
|
||||
* `f,` makes the detected value replace any existing values
|
||||
* the `.` in `.bpm` indicates numeric value
|
||||
* assumes the python files are in the folder you're launching copyparty from, replace the filename with a relative/absolute path if that's not the case
|
||||
* `mtp` modules will not run if a file has existing tags in the db, so clear out the tags with `-e2tsr` the first time you launch with new `mtp` options
|
||||
|
||||
|
||||
## usage with volume-flags
|
||||
|
||||
instead of affecting all volumes, you can set the options for just one volume like so:
|
||||
```
|
||||
copyparty -v /mnt/nas/music:/music:r:cmtp=key=f,audio-key.py:cmtp=.bpm=f,audio-bpm.py:ce2dsa:ce2ts
|
||||
```
|
||||
69
bin/mtag/audio-bpm.py
Executable file
69
bin/mtag/audio-bpm.py
Executable file
@@ -0,0 +1,69 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import os
|
||||
import sys
|
||||
import vamp
|
||||
import tempfile
|
||||
import numpy as np
|
||||
import subprocess as sp
|
||||
|
||||
from copyparty.util import fsenc
|
||||
|
||||
"""
|
||||
dep: vamp
|
||||
dep: beatroot-vamp
|
||||
dep: ffmpeg
|
||||
"""
|
||||
|
||||
|
||||
def det(tf):
|
||||
# fmt: off
|
||||
sp.check_call([
|
||||
"ffmpeg",
|
||||
"-nostdin",
|
||||
"-hide_banner",
|
||||
"-v", "fatal",
|
||||
"-ss", "13",
|
||||
"-y", "-i", fsenc(sys.argv[1]),
|
||||
"-ac", "1",
|
||||
"-ar", "22050",
|
||||
"-t", "300",
|
||||
"-f", "f32le",
|
||||
tf
|
||||
])
|
||||
# fmt: on
|
||||
|
||||
with open(tf, "rb") as f:
|
||||
d = np.fromfile(f, dtype=np.float32)
|
||||
try:
|
||||
# 98% accuracy on jcore
|
||||
c = vamp.collect(d, 22050, "beatroot-vamp:beatroot")
|
||||
cl = c["list"]
|
||||
except:
|
||||
# fallback; 73% accuracy
|
||||
plug = "vamp-example-plugins:fixedtempo"
|
||||
c = vamp.collect(d, 22050, plug, parameters={"maxdflen": 40})
|
||||
print(c["list"][0]["label"].split(" ")[0])
|
||||
return
|
||||
|
||||
# throws if detection failed:
|
||||
bpm = float(cl[-1]["timestamp"] - cl[1]["timestamp"])
|
||||
bpm = round(60 * ((len(cl) - 1) / bpm), 2)
|
||||
print(f"{bpm:.2f}")
|
||||
|
||||
|
||||
def main():
|
||||
with tempfile.NamedTemporaryFile(suffix=".pcm", delete=False) as f:
|
||||
f.write(b"h")
|
||||
tf = f.name
|
||||
|
||||
try:
|
||||
det(tf)
|
||||
except:
|
||||
pass
|
||||
finally:
|
||||
os.unlink(tf)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
18
bin/mtag/audio-key.py
Executable file
18
bin/mtag/audio-key.py
Executable file
@@ -0,0 +1,18 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import sys
|
||||
import keyfinder
|
||||
|
||||
"""
|
||||
dep: github/mixxxdj/libkeyfinder
|
||||
dep: pypi/keyfinder
|
||||
dep: ffmpeg
|
||||
|
||||
note: cannot fsenc
|
||||
"""
|
||||
|
||||
|
||||
try:
|
||||
print(keyfinder.key(sys.argv[1]).camelot())
|
||||
except:
|
||||
pass
|
||||
265
bin/mtag/install-deps.sh
Executable file
265
bin/mtag/install-deps.sh
Executable file
@@ -0,0 +1,265 @@
|
||||
#!/bin/bash
|
||||
set -e
|
||||
|
||||
|
||||
# install dependencies for audio-*.py
|
||||
#
|
||||
# linux: requires {python3,ffmpeg,fftw}-dev py3-{wheel,pip} py3-numpy{,-dev} vamp-sdk-dev patchelf
|
||||
# win64: requires msys2-mingw64 environment
|
||||
# macos: requires macports
|
||||
#
|
||||
# has the following manual dependencies, especially on mac:
|
||||
# https://www.vamp-plugins.org/pack.html
|
||||
#
|
||||
# installs stuff to the following locations:
|
||||
# ~/pe/
|
||||
# whatever your python uses for --user packages
|
||||
#
|
||||
# does the following terrible things:
|
||||
# modifies the keyfinder python lib to load the .so in ~/pe
|
||||
|
||||
|
||||
linux=1
|
||||
|
||||
win=
|
||||
[ ! -z "$MSYSTEM" ] || [ -e /msys2.exe ] && {
|
||||
[ "$MSYSTEM" = MINGW64 ] || {
|
||||
echo windows detected, msys2-mingw64 required
|
||||
exit 1
|
||||
}
|
||||
pacman -S --needed mingw-w64-x86_64-{ffmpeg,python,python-pip,vamp-plugin-sdk}
|
||||
win=1
|
||||
linux=
|
||||
}
|
||||
|
||||
mac=
|
||||
[ $(uname -s) = Darwin ] && {
|
||||
#pybin="$(printf '%s\n' /opt/local/bin/python* | (sed -E 's/(.*\/[^/0-9]+)([0-9]?[^/]*)$/\2 \1/' || cat) | (sort -nr || cat) | (sed -E 's/([^ ]*) (.*)/\2\1/' || cat) | grep -E '/(python|pypy)[0-9\.-]*$' | head -n 1)"
|
||||
pybin=/opt/local/bin/python3.9
|
||||
[ -e "$pybin" ] || {
|
||||
echo mac detected, python3 from macports required
|
||||
exit 1
|
||||
}
|
||||
pkgs='ffmpeg python39 py39-wheel'
|
||||
ninst=$(port installed | awk '/^ /{print$1}' | sort | uniq | grep -E '^('"$(echo "$pkgs" | tr ' ' '|')"')$' | wc -l)
|
||||
[ $ninst -eq 3 ] || {
|
||||
sudo port install $pkgs
|
||||
}
|
||||
mac=1
|
||||
linux=
|
||||
}
|
||||
|
||||
hash -r
|
||||
|
||||
[ $mac ] || {
|
||||
command -v python3 && pybin=python3 || pybin=python
|
||||
}
|
||||
|
||||
$pybin -m pip install --user numpy
|
||||
|
||||
|
||||
command -v gnutar && tar() { gnutar "$@"; }
|
||||
command -v gtar && tar() { gtar "$@"; }
|
||||
command -v gsed && sed() { gsed "$@"; }
|
||||
|
||||
|
||||
need() {
|
||||
command -v $1 >/dev/null || {
|
||||
echo need $1
|
||||
exit 1
|
||||
}
|
||||
}
|
||||
need cmake
|
||||
need ffmpeg
|
||||
need $pybin
|
||||
#need patchelf
|
||||
|
||||
|
||||
td="$(mktemp -d)"
|
||||
cln() {
|
||||
rm -rf "$td"
|
||||
}
|
||||
trap cln EXIT
|
||||
cd "$td"
|
||||
pwd
|
||||
|
||||
|
||||
dl_text() {
|
||||
command -v curl >/dev/null && exec curl "$@"
|
||||
exec wget -O- "$@"
|
||||
}
|
||||
dl_files() {
|
||||
local yolo= ex=
|
||||
[ $1 = "yolo" ] && yolo=1 && ex=k && shift
|
||||
command -v curl >/dev/null && exec curl -${ex}JOL "$@"
|
||||
|
||||
[ $yolo ] && ex=--no-check-certificate
|
||||
exec wget --trust-server-names $ex "$@"
|
||||
}
|
||||
export -f dl_files
|
||||
|
||||
|
||||
github_tarball() {
|
||||
dl_text "$1" |
|
||||
tee json |
|
||||
(
|
||||
# prefer jq if available
|
||||
jq -r '.tarball_url' ||
|
||||
|
||||
# fallback to awk (sorry)
|
||||
awk -F\" '/"tarball_url": "/ {print$4}'
|
||||
) |
|
||||
tee /dev/stderr |
|
||||
tr -d '\r' | tr '\n' '\0' |
|
||||
xargs -0 bash -c 'dl_files "$@"' _
|
||||
}
|
||||
|
||||
|
||||
gitlab_tarball() {
|
||||
dl_text "$1" |
|
||||
tee json |
|
||||
(
|
||||
# prefer jq if available
|
||||
jq -r '.[0].assets.sources[]|select(.format|test("tar.gz")).url' ||
|
||||
|
||||
# fallback to abomination
|
||||
tr \" '\n' | grep -E '\.tar\.gz$' | head -n 1
|
||||
) |
|
||||
tee /dev/stderr |
|
||||
tr -d '\r' | tr '\n' '\0' |
|
||||
tee links |
|
||||
xargs -0 bash -c 'dl_files "$@"' _
|
||||
}
|
||||
|
||||
|
||||
install_keyfinder() {
|
||||
# windows support:
|
||||
# use msys2 in mingw-w64 mode
|
||||
# pacman -S --needed mingw-w64-x86_64-{ffmpeg,python}
|
||||
|
||||
github_tarball https://api.github.com/repos/mixxxdj/libkeyfinder/releases/latest
|
||||
|
||||
tar -xf mixxxdj-libkeyfinder-*
|
||||
rm -- *.tar.gz
|
||||
cd mixxxdj-libkeyfinder*
|
||||
|
||||
h="$HOME"
|
||||
so="lib/libkeyfinder.so"
|
||||
memes=()
|
||||
|
||||
[ $win ] &&
|
||||
so="bin/libkeyfinder.dll" &&
|
||||
h="$(printf '%s\n' "$USERPROFILE" | tr '\\' '/')" &&
|
||||
memes+=(-G "MinGW Makefiles" -DBUILD_TESTING=OFF)
|
||||
|
||||
[ $mac ] &&
|
||||
so="lib/libkeyfinder.dylib"
|
||||
|
||||
cmake -DCMAKE_INSTALL_PREFIX="$h/pe/keyfinder" "${memes[@]}" -S . -B build
|
||||
cmake --build build --parallel $(nproc || echo 4)
|
||||
cmake --install build
|
||||
|
||||
libpath="$h/pe/keyfinder/$so"
|
||||
[ $linux ] && [ ! -e "$libpath" ] &&
|
||||
so=lib64/libkeyfinder.so
|
||||
|
||||
libpath="$h/pe/keyfinder/$so"
|
||||
[ -e "$libpath" ] || {
|
||||
echo "so not found at $sop"
|
||||
exit 1
|
||||
}
|
||||
|
||||
# rm -rf /Users/ed/Library/Python/3.9/lib/python/site-packages/*keyfinder*
|
||||
CFLAGS="-I$h/pe/keyfinder/include -I/opt/local/include" \
|
||||
LDFLAGS="-L$h/pe/keyfinder/lib -L$h/pe/keyfinder/lib64 -L/opt/local/lib" \
|
||||
PKG_CONFIG_PATH=/c/msys64/mingw64/lib/pkgconfig \
|
||||
$pybin -m pip install --user keyfinder
|
||||
|
||||
pypath="$($pybin -c 'import keyfinder; print(keyfinder.__file__)')"
|
||||
for pyso in "${pypath%/*}"/*.so; do
|
||||
[ -e "$pyso" ] || break
|
||||
patchelf --set-rpath "${libpath%/*}" "$pyso" ||
|
||||
echo "WARNING: patchelf failed (only fatal on musl-based distros)"
|
||||
done
|
||||
|
||||
mv "$pypath"{,.bak}
|
||||
(
|
||||
printf 'import ctypes\nctypes.cdll.LoadLibrary("%s")\n' "$libpath"
|
||||
cat "$pypath.bak"
|
||||
) >"$pypath"
|
||||
|
||||
echo
|
||||
echo libkeyfinder successfully installed to the following locations:
|
||||
echo " $libpath"
|
||||
echo " $pypath"
|
||||
}
|
||||
|
||||
|
||||
have_beatroot() {
|
||||
$pybin -c 'import vampyhost, sys; plugs = vampyhost.list_plugins(); sys.exit(0 if "beatroot-vamp:beatroot" in plugs else 1)'
|
||||
}
|
||||
|
||||
|
||||
install_vamp() {
|
||||
# windows support:
|
||||
# use msys2 in mingw-w64 mode
|
||||
# pacman -S --needed mingw-w64-x86_64-{ffmpeg,python,python-pip,vamp-plugin-sdk}
|
||||
|
||||
$pybin -m pip install --user vamp
|
||||
|
||||
have_beatroot || {
|
||||
printf '\033[33mcould not find the vamp beatroot plugin, building from source\033[0m\n'
|
||||
(dl_files yolo https://code.soundsoftware.ac.uk/attachments/download/885/beatroot-vamp-v1.0.tar.gz)
|
||||
sha512sum -c <(
|
||||
echo "1f444d1d58ccf565c0adfe99f1a1aa62789e19f5071e46857e2adfbc9d453037bc1c4dcb039b02c16240e9b97f444aaff3afb625c86aa2470233e711f55b6874 -"
|
||||
) <beatroot-vamp-v1.0.tar.gz
|
||||
tar -xf beatroot-vamp-v1.0.tar.gz
|
||||
cd beatroot-vamp-v1.0
|
||||
make -f Makefile.linux -j4
|
||||
# /home/ed/vamp /home/ed/.vamp /usr/local/lib/vamp
|
||||
mkdir ~/vamp
|
||||
cp -pv beatroot-vamp.* ~/vamp/
|
||||
}
|
||||
|
||||
have_beatroot &&
|
||||
printf '\033[32mfound the vamp beatroot plugin, nice\033[0m\n' ||
|
||||
printf '\033[31mWARNING: could not find the vamp beatroot plugin, please install it for optimal results\033[0m\n'
|
||||
}
|
||||
|
||||
|
||||
# not in use because it kinda segfaults, also no windows support
|
||||
install_soundtouch() {
|
||||
gitlab_tarball https://gitlab.com/api/v4/projects/soundtouch%2Fsoundtouch/releases
|
||||
|
||||
tar -xvf soundtouch-*
|
||||
rm -- *.tar.gz
|
||||
cd soundtouch-*
|
||||
|
||||
# https://github.com/jrising/pysoundtouch
|
||||
./bootstrap
|
||||
./configure --enable-integer-samples CXXFLAGS="-fPIC" --prefix="$HOME/pe/soundtouch"
|
||||
make -j$(nproc || echo 4)
|
||||
make install
|
||||
|
||||
CFLAGS=-I$HOME/pe/soundtouch/include/ \
|
||||
LDFLAGS=-L$HOME/pe/soundtouch/lib \
|
||||
$pybin -m pip install --user git+https://github.com/snowxmas/pysoundtouch.git
|
||||
|
||||
pypath="$($pybin -c 'import importlib; print(importlib.util.find_spec("soundtouch").origin)')"
|
||||
libpath="$(echo "$HOME/pe/soundtouch/lib/")"
|
||||
patchelf --set-rpath "$libpath" "$pypath"
|
||||
|
||||
echo
|
||||
echo soundtouch successfully installed to the following locations:
|
||||
echo " $libpath"
|
||||
echo " $pypath"
|
||||
}
|
||||
|
||||
|
||||
[ "$1" = keyfinder ] && { install_keyfinder; exit $?; }
|
||||
[ "$1" = soundtouch ] && { install_soundtouch; exit $?; }
|
||||
[ "$1" = vamp ] && { install_vamp; exit $?; }
|
||||
|
||||
echo no args provided, installing keyfinder and vamp
|
||||
install_keyfinder
|
||||
install_vamp
|
||||
8
bin/mtag/sleep.py
Normal file
8
bin/mtag/sleep.py
Normal file
@@ -0,0 +1,8 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import time
|
||||
import random
|
||||
|
||||
v = random.random() * 6
|
||||
time.sleep(v)
|
||||
print(f"{v:.2f}")
|
||||
@@ -12,16 +12,19 @@ import re
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
import signal
|
||||
import shutil
|
||||
import filecmp
|
||||
import locale
|
||||
import argparse
|
||||
import threading
|
||||
import traceback
|
||||
from textwrap import dedent
|
||||
|
||||
from .__init__ import E, WINDOWS, VT100, PY2
|
||||
from .__version__ import S_VERSION, S_BUILD_DT, CODENAME
|
||||
from .svchub import SvcHub
|
||||
from .util import py_desc, align_tab
|
||||
from .util import py_desc, align_tab, IMPLICATIONS
|
||||
|
||||
HAVE_SSL = True
|
||||
try:
|
||||
@@ -164,11 +167,24 @@ def configure_ssl_ciphers(al):
|
||||
sys.exit(0)
|
||||
|
||||
|
||||
def main():
|
||||
def sighandler(signal=None, frame=None):
|
||||
msg = [""] * 5
|
||||
for th in threading.enumerate():
|
||||
msg.append(str(th))
|
||||
msg.extend(traceback.format_stack(sys._current_frames()[th.ident]))
|
||||
|
||||
msg.append("\n")
|
||||
print("\n".join(msg))
|
||||
|
||||
|
||||
def main(argv=None):
|
||||
time.strptime("19970815", "%Y%m%d") # python#7980
|
||||
if WINDOWS:
|
||||
os.system("rem") # enables colors
|
||||
|
||||
if argv is None:
|
||||
argv = sys.argv
|
||||
|
||||
desc = py_desc().replace("[", "\033[1;30m[")
|
||||
|
||||
f = '\033[36mcopyparty v{} "\033[35m{}\033[36m" ({})\n{}\033[0m\n'
|
||||
@@ -181,13 +197,13 @@ def main():
|
||||
deprecated = [["-e2s", "-e2ds"]]
|
||||
for dk, nk in deprecated:
|
||||
try:
|
||||
idx = sys.argv.index(dk)
|
||||
idx = argv.index(dk)
|
||||
except:
|
||||
continue
|
||||
|
||||
msg = "\033[1;31mWARNING:\033[0;1m\n {} \033[0;33mwas replaced with\033[0;1m {} \033[0;33mand will be removed\n\033[0m"
|
||||
print(msg.format(dk, nk))
|
||||
sys.argv[idx] = nk
|
||||
argv[idx] = nk
|
||||
time.sleep(2)
|
||||
|
||||
ap = argparse.ArgumentParser(
|
||||
@@ -241,15 +257,17 @@ def main():
|
||||
ap.add_argument("-a", metavar="ACCT", type=str, action="append", help="add account")
|
||||
ap.add_argument("-v", metavar="VOL", type=str, action="append", help="add volume")
|
||||
ap.add_argument("-q", action="store_true", help="quiet")
|
||||
ap.add_argument("--log-conn", action="store_true", help="print tcp-server msgs")
|
||||
ap.add_argument("-ed", action="store_true", help="enable ?dots")
|
||||
ap.add_argument("-emp", action="store_true", help="enable markdown plugins")
|
||||
ap.add_argument("-mcr", metavar="SEC", type=int, default=60, help="md-editor mod-chk rate")
|
||||
ap.add_argument("-nw", action="store_true", help="disable writes (benchmark)")
|
||||
ap.add_argument("-nih", action="store_true", help="no info hostname")
|
||||
ap.add_argument("-nid", action="store_true", help="no info disk-usage")
|
||||
ap.add_argument("--no-zip", action="store_true", help="disable download as zip/tar")
|
||||
ap.add_argument("--no-sendfile", action="store_true", help="disable sendfile (for debugging)")
|
||||
ap.add_argument("--no-scandir", action="store_true", help="disable scandir (for debugging)")
|
||||
ap.add_argument("--urlform", type=str, default="print,get", help="how to handle url-forms")
|
||||
ap.add_argument("--urlform", metavar="MODE", type=str, default="print,get", help="how to handle url-forms")
|
||||
ap.add_argument("--salt", type=str, default="hunter2", help="up2k file-hash salt")
|
||||
|
||||
ap2 = ap.add_argument_group('database options')
|
||||
@@ -264,26 +282,22 @@ def main():
|
||||
ap2.add_argument("-mtm", metavar="M=t,t,t", action="append", type=str, help="add/replace metadata mapping")
|
||||
ap2.add_argument("-mte", metavar="M,M,M", type=str, help="tags to index/display (comma-sep.)",
|
||||
default="circle,album,.tn,artist,title,.bpm,key,.dur,.q")
|
||||
ap2.add_argument("-mtp", metavar="M=[f,]bin", action="append", type=str, help="read tag M using bin")
|
||||
ap2.add_argument("--srch-time", metavar="SEC", type=int, default=30, help="search deadline")
|
||||
|
||||
ap2 = ap.add_argument_group('SSL/TLS options')
|
||||
ap2.add_argument("--http-only", action="store_true", help="disable ssl/tls")
|
||||
ap2.add_argument("--https-only", action="store_true", help="disable plaintext")
|
||||
ap2.add_argument("--ssl-ver", type=str, help="ssl/tls versions to allow")
|
||||
ap2.add_argument("--ssl-ver", metavar="LIST", type=str, help="ssl/tls versions to allow")
|
||||
ap2.add_argument("--ciphers", metavar="LIST", help="set allowed ciphers")
|
||||
ap2.add_argument("--ssl-dbg", action="store_true", help="dump some tls info")
|
||||
ap2.add_argument("--ssl-log", metavar="PATH", help="log master secrets")
|
||||
|
||||
al = ap.parse_args()
|
||||
al = ap.parse_args(args=argv[1:])
|
||||
# fmt: on
|
||||
|
||||
# propagate implications
|
||||
for k1, k2 in [
|
||||
["e2dsa", "e2ds"],
|
||||
["e2ds", "e2d"],
|
||||
["e2tsr", "e2ts"],
|
||||
["e2ts", "e2t"],
|
||||
["e2t", "e2d"],
|
||||
]:
|
||||
for k1, k2 in IMPLICATIONS:
|
||||
if getattr(al, k1):
|
||||
setattr(al, k2, True)
|
||||
|
||||
@@ -312,6 +326,8 @@ def main():
|
||||
+ " (if you crash with codec errors then that is why)"
|
||||
)
|
||||
|
||||
# signal.signal(signal.SIGINT, sighandler)
|
||||
|
||||
SvcHub(al).run()
|
||||
|
||||
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
# coding: utf-8
|
||||
|
||||
VERSION = (0, 9, 7)
|
||||
CODENAME = "the strongest music server"
|
||||
BUILD_DT = (2021, 3, 8)
|
||||
VERSION = (0, 10, 5)
|
||||
CODENAME = "zip it"
|
||||
BUILD_DT = (2021, 3, 31)
|
||||
|
||||
S_VERSION = ".".join(map(str, VERSION))
|
||||
S_BUILD_DT = "{0:04d}-{1:02d}-{2:02d}".format(*BUILD_DT)
|
||||
|
||||
@@ -1,12 +1,14 @@
|
||||
# coding: utf-8
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
import os
|
||||
import re
|
||||
import os
|
||||
import sys
|
||||
import stat
|
||||
import threading
|
||||
|
||||
from .__init__ import PY2, WINDOWS
|
||||
from .util import undot, Pebkac, fsdec, fsenc, statdir, nuprint
|
||||
from .util import IMPLICATIONS, undot, Pebkac, fsdec, fsenc, statdir, nuprint
|
||||
|
||||
|
||||
class VFS(object):
|
||||
@@ -21,6 +23,14 @@ class VFS(object):
|
||||
self.nodes = {} # child nodes
|
||||
self.all_vols = {vpath: self} # flattened recursive
|
||||
|
||||
def __repr__(self):
|
||||
return "VFS({})".format(
|
||||
", ".join(
|
||||
"{}={!r}".format(k, self.__dict__[k])
|
||||
for k in "realpath vpath uread uwrite flags".split()
|
||||
)
|
||||
)
|
||||
|
||||
def _trk(self, vol):
|
||||
self.all_vols[vol.vpath] = vol
|
||||
return vol
|
||||
@@ -44,6 +54,7 @@ class VFS(object):
|
||||
self.uwrite,
|
||||
self.flags,
|
||||
)
|
||||
self._trk(vn)
|
||||
self.nodes[name] = vn
|
||||
return self._trk(vn.add(src, dst))
|
||||
|
||||
@@ -118,6 +129,73 @@ class VFS(object):
|
||||
|
||||
return [abspath, real, virt_vis]
|
||||
|
||||
def walk(self, rel, rem, uname, dots, scandir, lstat=False):
|
||||
"""
|
||||
recursively yields from ./rem;
|
||||
rel is a unix-style user-defined vpath (not vfs-related)
|
||||
"""
|
||||
|
||||
fsroot, vfs_ls, vfs_virt = self.ls(rem, uname, scandir, lstat)
|
||||
rfiles = [x for x in vfs_ls if not stat.S_ISDIR(x[1].st_mode)]
|
||||
rdirs = [x for x in vfs_ls if stat.S_ISDIR(x[1].st_mode)]
|
||||
|
||||
rfiles.sort()
|
||||
rdirs.sort()
|
||||
|
||||
yield rel, fsroot, rfiles, rdirs, vfs_virt
|
||||
|
||||
for rdir, _ in rdirs:
|
||||
if not dots and rdir.startswith("."):
|
||||
continue
|
||||
|
||||
wrel = (rel + "/" + rdir).lstrip("/")
|
||||
wrem = (rem + "/" + rdir).lstrip("/")
|
||||
for x in self.walk(wrel, wrem, uname, scandir, lstat):
|
||||
yield x
|
||||
|
||||
for n, vfs in sorted(vfs_virt.items()):
|
||||
if not dots and n.startswith("."):
|
||||
continue
|
||||
|
||||
wrel = (rel + "/" + n).lstrip("/")
|
||||
for x in vfs.walk(wrel, "", uname, scandir, lstat):
|
||||
yield x
|
||||
|
||||
def zipgen(self, vrem, flt, uname, dots, scandir):
|
||||
if flt:
|
||||
flt = {k: True for k in flt}
|
||||
|
||||
for vpath, apath, files, rd, vd in self.walk("", vrem, uname, dots, scandir):
|
||||
if flt:
|
||||
files = [x for x in files if x[0] in flt]
|
||||
rd = [x for x in rd if x[0] in flt]
|
||||
vd = {x: y for x, y in vd.items() if x in flt}
|
||||
flt = None
|
||||
|
||||
# print(repr([vpath, apath, [x[0] for x in files]]))
|
||||
fnames = [n[0] for n in files]
|
||||
vpaths = [vpath + "/" + n for n in fnames] if vpath else fnames
|
||||
apaths = [os.path.join(apath, n) for n in fnames]
|
||||
files = list(zip(vpaths, apaths, files))
|
||||
|
||||
if not dots:
|
||||
# dotfile filtering based on vpath (intended visibility)
|
||||
files = [x for x in files if "/." not in "/" + x[0]]
|
||||
|
||||
rm = [x for x in rd if x[0].startswith(".")]
|
||||
for x in rm:
|
||||
rd.remove(x)
|
||||
|
||||
rm = [k for k in vd.keys() if k.startswith(".")]
|
||||
for x in rm:
|
||||
del vd[x]
|
||||
|
||||
# up2k filetring based on actual abspath
|
||||
files = [x for x in files if "{0}.hist{0}up2k.".format(os.sep) not in x[1]]
|
||||
|
||||
for f in [{"vp": v, "ap": a, "st": n[1]} for v, a, n in files]:
|
||||
yield f
|
||||
|
||||
def user_tree(self, uname, readable=False, writable=False):
|
||||
ret = []
|
||||
opt1 = readable and (uname in self.uread or "*" in self.uread)
|
||||
@@ -200,16 +278,39 @@ class AuthSrv(object):
|
||||
continue
|
||||
|
||||
lvl, uname = ln.split(" ")
|
||||
if lvl in "ra":
|
||||
mread[vol_dst].append(uname)
|
||||
if lvl in "wa":
|
||||
mwrite[vol_dst].append(uname)
|
||||
if lvl == "c":
|
||||
cval = True
|
||||
if "=" in uname:
|
||||
uname, cval = uname.split("=", 1)
|
||||
self._read_vol_str(
|
||||
lvl, uname, mread[vol_dst], mwrite[vol_dst], mflags[vol_dst]
|
||||
)
|
||||
|
||||
mflags[vol_dst][uname] = cval
|
||||
def _read_vol_str(self, lvl, uname, mr, mw, mf):
|
||||
if lvl == "c":
|
||||
cval = True
|
||||
if "=" in uname:
|
||||
uname, cval = uname.split("=", 1)
|
||||
|
||||
self._read_volflag(mf, uname, cval, False)
|
||||
return
|
||||
|
||||
if uname == "":
|
||||
uname = "*"
|
||||
|
||||
if lvl in "ra":
|
||||
mr.append(uname)
|
||||
|
||||
if lvl in "wa":
|
||||
mw.append(uname)
|
||||
|
||||
def _read_volflag(self, flags, name, value, is_list):
|
||||
if name not in ["mtp"]:
|
||||
flags[name] = value
|
||||
return
|
||||
|
||||
if not is_list:
|
||||
value = [value]
|
||||
elif not value:
|
||||
return
|
||||
|
||||
flags[name] = flags.get(name, []) + value
|
||||
|
||||
def reload(self):
|
||||
"""
|
||||
@@ -232,7 +333,7 @@ class AuthSrv(object):
|
||||
|
||||
if self.args.v:
|
||||
# list of src:dst:permset:permset:...
|
||||
# permset is [rwa]username
|
||||
# permset is [rwa]username or [c]flag
|
||||
for v_str in self.args.v:
|
||||
m = self.re_vol.match(v_str)
|
||||
if not m:
|
||||
@@ -249,22 +350,7 @@ class AuthSrv(object):
|
||||
|
||||
perms = perms.split(":")
|
||||
for (lvl, uname) in [[x[0], x[1:]] for x in perms]:
|
||||
if lvl == "c":
|
||||
cval = True
|
||||
if "=" in uname:
|
||||
uname, cval = uname.split("=", 1)
|
||||
|
||||
mflags[dst][uname] = cval
|
||||
continue
|
||||
|
||||
if uname == "":
|
||||
uname = "*"
|
||||
|
||||
if lvl in "ra":
|
||||
mread[dst].append(uname)
|
||||
|
||||
if lvl in "wa":
|
||||
mwrite[dst].append(uname)
|
||||
self._read_vol_str(lvl, uname, mread[dst], mwrite[dst], mflags[dst])
|
||||
|
||||
if self.args.c:
|
||||
for cfg_fn in self.args.c:
|
||||
@@ -310,6 +396,8 @@ class AuthSrv(object):
|
||||
)
|
||||
raise Exception("invalid config")
|
||||
|
||||
all_mte = {}
|
||||
errors = False
|
||||
for vol in vfs.all_vols.values():
|
||||
if (self.args.e2ds and vol.uwrite) or self.args.e2dsa:
|
||||
vol.flags["e2ds"] = True
|
||||
@@ -321,10 +409,75 @@ class AuthSrv(object):
|
||||
if getattr(self.args, k):
|
||||
vol.flags[k] = True
|
||||
|
||||
for k1, k2 in IMPLICATIONS:
|
||||
if k1 in vol.flags:
|
||||
vol.flags[k2] = True
|
||||
|
||||
# default tag-list if unset
|
||||
if "mte" not in vol.flags:
|
||||
vol.flags["mte"] = self.args.mte
|
||||
|
||||
# append parsers from argv to volume-flags
|
||||
self._read_volflag(vol.flags, "mtp", self.args.mtp, True)
|
||||
|
||||
# d2d drops all database features for a volume
|
||||
for grp, rm in [["d2d", "e2d"], ["d2t", "e2t"]]:
|
||||
if not vol.flags.get(grp, False):
|
||||
continue
|
||||
|
||||
vol.flags["d2t"] = True
|
||||
vol.flags = {k: v for k, v in vol.flags.items() if not k.startswith(rm)}
|
||||
|
||||
# mt* needs e2t so drop those too
|
||||
for grp, rm in [["e2t", "mt"]]:
|
||||
if vol.flags.get(grp, False):
|
||||
continue
|
||||
|
||||
vol.flags = {k: v for k, v in vol.flags.items() if not k.startswith(rm)}
|
||||
|
||||
# verify tags mentioned by -mt[mp] are used by -mte
|
||||
local_mtp = {}
|
||||
local_only_mtp = {}
|
||||
for a in vol.flags.get("mtp", []) + vol.flags.get("mtm", []):
|
||||
a = a.split("=")[0]
|
||||
local_mtp[a] = True
|
||||
local = True
|
||||
for b in self.args.mtp or []:
|
||||
b = b.split("=")[0]
|
||||
if a == b:
|
||||
local = False
|
||||
|
||||
if local:
|
||||
local_only_mtp[a] = True
|
||||
|
||||
local_mte = {}
|
||||
for a in vol.flags.get("mte", "").split(","):
|
||||
local = True
|
||||
all_mte[a] = True
|
||||
local_mte[a] = True
|
||||
for b in self.args.mte.split(","):
|
||||
if not a or not b:
|
||||
continue
|
||||
|
||||
if a == b:
|
||||
local = False
|
||||
|
||||
for mtp in local_only_mtp.keys():
|
||||
if mtp not in local_mte:
|
||||
m = 'volume "/{}" defines metadata tag "{}", but doesnt use it in "-mte" (or with "cmte" in its volume-flags)'
|
||||
self.log(m.format(vol.vpath, mtp), 1)
|
||||
errors = True
|
||||
|
||||
for mtp in self.args.mtp or []:
|
||||
mtp = mtp.split("=")[0]
|
||||
if mtp not in all_mte:
|
||||
m = 'metadata tag "{}" is defined by "-mtm" or "-mtp", but is not used by "-mte" (or by any "cmte" volume-flag)'
|
||||
self.log(m.format(mtp), 1)
|
||||
errors = True
|
||||
|
||||
if errors:
|
||||
sys.exit(1)
|
||||
|
||||
try:
|
||||
v, _ = vfs.get("/", "*", False, True)
|
||||
if self.warn_anonwrite and os.getcwd() == v.realpath:
|
||||
|
||||
@@ -51,7 +51,7 @@ class BrokerMp(object):
|
||||
self.procs.append(proc)
|
||||
proc.start()
|
||||
|
||||
if True:
|
||||
if not self.args.q:
|
||||
thr = threading.Thread(target=self.debug_load_balancer)
|
||||
thr.daemon = True
|
||||
thr.start()
|
||||
|
||||
@@ -73,7 +73,9 @@ class MpWorker(object):
|
||||
if PY2:
|
||||
sck = pickle.loads(sck) # nosec
|
||||
|
||||
self.log("%s %s" % addr, "|%sC-qpop" % ("-" * 4,), c="1;30")
|
||||
if self.args.log_conn:
|
||||
self.log("%s %s" % addr, "|%sC-qpop" % ("-" * 4,), c="1;30")
|
||||
|
||||
self.httpsrv.accept(sck, addr)
|
||||
|
||||
with self.mutex:
|
||||
|
||||
@@ -28,7 +28,9 @@ class BrokerThr(object):
|
||||
def put(self, want_retval, dest, *args):
|
||||
if dest == "httpconn":
|
||||
sck, addr = args
|
||||
self.log("%s %s" % addr, "|%sC-qpop" % ("-" * 4,), c="1;30")
|
||||
if self.args.log_conn:
|
||||
self.log("%s %s" % addr, "|%sC-qpop" % ("-" * 4,), c="1;30")
|
||||
|
||||
self.httpsrv.accept(sck, addr)
|
||||
|
||||
else:
|
||||
|
||||
@@ -7,6 +7,7 @@ import gzip
|
||||
import time
|
||||
import copy
|
||||
import json
|
||||
import string
|
||||
import socket
|
||||
import ctypes
|
||||
from datetime import datetime
|
||||
@@ -14,6 +15,8 @@ import calendar
|
||||
|
||||
from .__init__ import E, PY2, WINDOWS
|
||||
from .util import * # noqa # pylint: disable=unused-wildcard-import
|
||||
from .szip import StreamZip
|
||||
from .star import StreamTar
|
||||
|
||||
if not PY2:
|
||||
unicode = str
|
||||
@@ -45,13 +48,17 @@ class HttpCli(object):
|
||||
self.log_func(self.log_src, msg, c)
|
||||
|
||||
def _check_nonfatal(self, ex):
|
||||
return ex.code < 400 or ex.code == 404
|
||||
return ex.code < 400 or ex.code in [404, 429]
|
||||
|
||||
def _assert_safe_rem(self, rem):
|
||||
# sanity check to prevent any disasters
|
||||
if rem.startswith("/") or rem.startswith("../") or "/../" in rem:
|
||||
raise Exception("that was close")
|
||||
|
||||
def j2(self, name, **kwargs):
|
||||
tpl = self.conn.hsrv.j2[name]
|
||||
return tpl.render(**kwargs) if kwargs else tpl
|
||||
|
||||
def run(self):
|
||||
"""returns true if connection can be reused"""
|
||||
self.keepalive = False
|
||||
@@ -154,7 +161,9 @@ class HttpCli(object):
|
||||
try:
|
||||
# self.log("pebkac at httpcli.run #2: " + repr(ex))
|
||||
self.keepalive = self._check_nonfatal(ex)
|
||||
self.loud_reply("{}: {}".format(str(ex), self.vpath), status=ex.code)
|
||||
self.log("{}\033[0m, {}".format(str(ex), self.vpath), 3)
|
||||
msg = "<pre>{}\r\nURL: {}\r\n".format(str(ex), self.vpath)
|
||||
self.reply(msg.encode("utf-8", "replace"), status=ex.code)
|
||||
return self.keepalive
|
||||
except Pebkac:
|
||||
return False
|
||||
@@ -388,8 +397,30 @@ class HttpCli(object):
|
||||
if act == "tput":
|
||||
return self.handle_text_upload()
|
||||
|
||||
if act == "zip":
|
||||
return self.handle_zip_post()
|
||||
|
||||
raise Pebkac(422, 'invalid action "{}"'.format(act))
|
||||
|
||||
def handle_zip_post(self):
|
||||
for k in ["zip", "tar"]:
|
||||
v = self.uparam.get(k)
|
||||
if v is not None:
|
||||
break
|
||||
|
||||
if v is None:
|
||||
raise Pebkac(422, "need zip or tar keyword")
|
||||
|
||||
vn, rem = self.auth.vfs.get(self.vpath, self.uname, True, False)
|
||||
items = self.parser.require("files", 1024 * 1024)
|
||||
if not items:
|
||||
raise Pebkac(422, "need files list")
|
||||
|
||||
items = items.replace("\r", "").split("\n")
|
||||
items = [unquotep(x) for x in items if items]
|
||||
|
||||
return self.tx_zip(k, v, vn, rem, items, self.args.ed)
|
||||
|
||||
def handle_post_json(self):
|
||||
try:
|
||||
remains = int(self.headers["content-length"])
|
||||
@@ -417,15 +448,18 @@ class HttpCli(object):
|
||||
if "srch" in self.uparam or "srch" in body:
|
||||
return self.handle_search(body)
|
||||
|
||||
# prefer this over undot; no reason to allow traversion
|
||||
if "/" in body["name"]:
|
||||
raise Pebkac(400, "folders verboten")
|
||||
|
||||
# up2k-php compat
|
||||
for k in "chunkpit.php", "handshake.php":
|
||||
if self.vpath.endswith(k):
|
||||
self.vpath = self.vpath[: -len(k)]
|
||||
|
||||
sub = None
|
||||
name = undot(body["name"])
|
||||
if "/" in name:
|
||||
sub, name = name.rsplit("/", 1)
|
||||
self.vpath = "/".join([self.vpath, sub]).strip("/")
|
||||
body["name"] = name
|
||||
|
||||
vfs, rem = self.conn.auth.vfs.get(self.vpath, self.uname, False, True)
|
||||
|
||||
body["vtop"] = vfs.vpath
|
||||
@@ -434,12 +468,22 @@ class HttpCli(object):
|
||||
body["addr"] = self.ip
|
||||
body["vcfg"] = vfs.flags
|
||||
|
||||
x = self.conn.hsrv.broker.put(True, "up2k.handle_json", body)
|
||||
response = x.get()
|
||||
response = json.dumps(response)
|
||||
if sub:
|
||||
try:
|
||||
dst = os.path.join(vfs.realpath, rem)
|
||||
os.makedirs(dst)
|
||||
except:
|
||||
if not os.path.isdir(dst):
|
||||
raise Pebkac(400, "some file got your folder name")
|
||||
|
||||
self.log(response)
|
||||
self.reply(response.encode("utf-8"), mime="application/json")
|
||||
x = self.conn.hsrv.broker.put(True, "up2k.handle_json", body)
|
||||
ret = x.get()
|
||||
if sub:
|
||||
ret["name"] = "/".join([sub, ret["name"]])
|
||||
|
||||
ret = json.dumps(ret)
|
||||
self.log(ret)
|
||||
self.reply(ret.encode("utf-8"), mime="application/json")
|
||||
return True
|
||||
|
||||
def handle_search(self, body):
|
||||
@@ -450,19 +494,30 @@ class HttpCli(object):
|
||||
|
||||
idx = self.conn.get_u2idx()
|
||||
t0 = time.time()
|
||||
if idx.p_end:
|
||||
penalty = 0.7
|
||||
t_idle = t0 - idx.p_end
|
||||
if idx.p_dur > 0.7 and t_idle < penalty:
|
||||
m = "rate-limit ({:.1f} sec), cost {:.2f}, idle {:.2f}"
|
||||
raise Pebkac(429, m.format(penalty, idx.p_dur, t_idle))
|
||||
|
||||
if "srch" in body:
|
||||
# search by up2k hashlist
|
||||
vbody = copy.deepcopy(body)
|
||||
vbody["hash"] = len(vbody["hash"])
|
||||
self.log("qj: " + repr(vbody))
|
||||
hits = idx.fsearch(vols, body)
|
||||
self.log("q#: {} ({:.2f}s)".format(repr(hits), time.time() - t0))
|
||||
msg = repr(hits)
|
||||
taglist = []
|
||||
else:
|
||||
# search by query params
|
||||
self.log("qj: " + repr(body))
|
||||
hits, taglist = idx.search(vols, body)
|
||||
self.log("q#: {} ({:.2f}s)".format(len(hits), time.time() - t0))
|
||||
msg = len(hits)
|
||||
|
||||
idx.p_end = time.time()
|
||||
idx.p_dur = idx.p_end - t0
|
||||
self.log("q#: {} ({:.2f}s)".format(msg, idx.p_dur))
|
||||
|
||||
order = []
|
||||
cfg = self.args.mte.split(",")
|
||||
@@ -569,7 +624,7 @@ class HttpCli(object):
|
||||
pwd = "x" # nosec
|
||||
|
||||
h = {"Set-Cookie": "cppwd={}; Path=/; SameSite=Lax".format(pwd)}
|
||||
html = self.conn.tpl_msg.render(h1=msg, h2='<a href="/">ack</a>', redir="/")
|
||||
html = self.j2("msg", h1=msg, h2='<a href="/">ack</a>', redir="/")
|
||||
self.reply(html.encode("utf-8"), headers=h)
|
||||
return True
|
||||
|
||||
@@ -600,7 +655,8 @@ class HttpCli(object):
|
||||
|
||||
vpath = "{}/{}".format(self.vpath, sanitized).lstrip("/")
|
||||
esc_paths = [quotep(vpath), html_escape(vpath)]
|
||||
html = self.conn.tpl_msg.render(
|
||||
html = self.j2(
|
||||
"msg",
|
||||
h2='<a href="/{}">go to /{}</a>'.format(*esc_paths),
|
||||
pre="aight",
|
||||
click=True,
|
||||
@@ -632,7 +688,8 @@ class HttpCli(object):
|
||||
f.write(b"`GRUNNUR`\n")
|
||||
|
||||
vpath = "{}/{}".format(self.vpath, sanitized).lstrip("/")
|
||||
html = self.conn.tpl_msg.render(
|
||||
html = self.j2(
|
||||
"msg",
|
||||
h2='<a href="/{}?edit">go to /{}?edit</a>'.format(
|
||||
quotep(vpath), html_escape(vpath)
|
||||
),
|
||||
@@ -738,7 +795,8 @@ class HttpCli(object):
|
||||
).encode("utf-8")
|
||||
)
|
||||
|
||||
html = self.conn.tpl_msg.render(
|
||||
html = self.j2(
|
||||
"msg",
|
||||
h2='<a href="/{}">return to /{}</a>'.format(
|
||||
quotep(self.vpath), html_escape(self.vpath)
|
||||
),
|
||||
@@ -1026,16 +1084,75 @@ class HttpCli(object):
|
||||
self.log("{}, {}".format(logmsg, spd))
|
||||
return ret
|
||||
|
||||
def tx_zip(self, fmt, uarg, vn, rem, items, dots):
|
||||
if self.args.no_zip:
|
||||
raise Pebkac(400, "not enabled")
|
||||
|
||||
logmsg = "{:4} {} ".format("", self.req)
|
||||
self.keepalive = False
|
||||
|
||||
if not uarg:
|
||||
uarg = ""
|
||||
|
||||
if fmt == "tar":
|
||||
mime = "application/x-tar"
|
||||
packer = StreamTar
|
||||
else:
|
||||
mime = "application/zip"
|
||||
packer = StreamZip
|
||||
|
||||
fn = items[0] if items and items[0] else self.vpath
|
||||
if fn:
|
||||
fn = fn.rstrip("/").split("/")[-1]
|
||||
else:
|
||||
fn = self.headers.get("host", "hey")
|
||||
|
||||
afn = "".join(
|
||||
[x if x in (string.ascii_letters + string.digits) else "_" for x in fn]
|
||||
)
|
||||
|
||||
bascii = unicode(string.ascii_letters + string.digits).encode("utf-8")
|
||||
ufn = fn.encode("utf-8", "xmlcharrefreplace")
|
||||
if PY2:
|
||||
ufn = [unicode(x) if x in bascii else "%{:02x}".format(ord(x)) for x in ufn]
|
||||
else:
|
||||
ufn = [
|
||||
chr(x).encode("utf-8")
|
||||
if x in bascii
|
||||
else "%{:02x}".format(x).encode("ascii")
|
||||
for x in ufn
|
||||
]
|
||||
ufn = b"".join(ufn).decode("ascii")
|
||||
|
||||
cdis = "attachment; filename=\"{}.{}\"; filename*=UTF-8''{}.{}"
|
||||
cdis = cdis.format(afn, fmt, ufn, fmt)
|
||||
self.send_headers(None, mime=mime, headers={"Content-Disposition": cdis})
|
||||
|
||||
fgen = vn.zipgen(rem, items, self.uname, dots, not self.args.no_scandir)
|
||||
# for f in fgen: print(repr({k: f[k] for k in ["vp", "ap"]}))
|
||||
bgen = packer(fgen, utf8="utf" in uarg, pre_crc="crc" in uarg)
|
||||
bsent = 0
|
||||
for buf in bgen.gen():
|
||||
if not buf:
|
||||
break
|
||||
|
||||
try:
|
||||
self.s.sendall(buf)
|
||||
bsent += len(buf)
|
||||
except:
|
||||
logmsg += " \033[31m" + unicode(bsent) + "\033[0m"
|
||||
break
|
||||
|
||||
spd = self._spd(bsent)
|
||||
self.log("{}, {}".format(logmsg, spd))
|
||||
return True
|
||||
|
||||
def tx_md(self, fs_path):
|
||||
logmsg = "{:4} {} ".format("", self.req)
|
||||
if "edit2" in self.uparam:
|
||||
html_path = "web/mde.html"
|
||||
template = self.conn.tpl_mde
|
||||
else:
|
||||
html_path = "web/md.html"
|
||||
template = self.conn.tpl_md
|
||||
|
||||
html_path = os.path.join(E.mod, html_path)
|
||||
tpl = "mde" if "edit2" in self.uparam else "md"
|
||||
html_path = os.path.join(E.mod, "web", "{}.html".format(tpl))
|
||||
template = self.j2(tpl)
|
||||
|
||||
st = os.stat(fsenc(fs_path))
|
||||
# sz_md = st.st_size
|
||||
@@ -1087,7 +1204,7 @@ class HttpCli(object):
|
||||
def tx_mounts(self):
|
||||
rvol = [x + "/" if x else x for x in self.rvol]
|
||||
wvol = [x + "/" if x else x for x in self.wvol]
|
||||
html = self.conn.tpl_mounts.render(this=self, rvol=rvol, wvol=wvol)
|
||||
html = self.j2("splash", this=self, rvol=rvol, wvol=wvol)
|
||||
self.reply(html.encode("utf-8"))
|
||||
return True
|
||||
|
||||
@@ -1176,6 +1293,11 @@ class HttpCli(object):
|
||||
|
||||
return self.tx_file(abspath)
|
||||
|
||||
for k in ["zip", "tar"]:
|
||||
v = self.uparam.get(k)
|
||||
if v is not None:
|
||||
return self.tx_zip(k, v, vn, rem, [], self.args.ed)
|
||||
|
||||
fsroot, vfs_ls, vfs_virt = vn.ls(rem, self.uname, not self.args.no_scandir)
|
||||
stats = {k: v for k, v in vfs_ls}
|
||||
vfs_ls = [x[0] for x in vfs_ls]
|
||||
@@ -1236,8 +1358,11 @@ class HttpCli(object):
|
||||
|
||||
is_dir = stat.S_ISDIR(inf.st_mode)
|
||||
if is_dir:
|
||||
margin = "DIR"
|
||||
href += "/"
|
||||
if self.args.no_zip:
|
||||
margin = "DIR"
|
||||
else:
|
||||
margin = '<a href="{}?zip">zip</a>'.format(quotep(href))
|
||||
elif fn in hist:
|
||||
margin = '<a href="{}.hist/{}">#{}</a>'.format(
|
||||
base, html_escape(hist[fn][2], quote=True), hist[fn][0]
|
||||
@@ -1284,7 +1409,7 @@ class HttpCli(object):
|
||||
|
||||
tags = {}
|
||||
f["tags"] = tags
|
||||
|
||||
|
||||
if not r:
|
||||
continue
|
||||
|
||||
@@ -1295,7 +1420,7 @@ class HttpCli(object):
|
||||
tags[k] = v
|
||||
|
||||
if icur:
|
||||
taglist = [k for k in self.args.mte.split(",") if k in taglist]
|
||||
taglist = [k for k in vn.flags.get("mte", "").split(",") if k in taglist]
|
||||
for f in dirs:
|
||||
f["tags"] = {}
|
||||
|
||||
@@ -1361,16 +1486,20 @@ class HttpCli(object):
|
||||
|
||||
dirs.extend(files)
|
||||
|
||||
html = self.conn.tpl_browser.render(
|
||||
html = self.j2(
|
||||
"browser",
|
||||
vdir=quotep(self.vpath),
|
||||
vpnodes=vpnodes,
|
||||
files=dirs,
|
||||
ts=ts,
|
||||
perms=json.dumps(perms),
|
||||
taglist=taglist,
|
||||
tag_order=json.dumps(self.args.mte.split(",")),
|
||||
tag_order=json.dumps(
|
||||
vn.flags["mte"].split(",") if "mte" in vn.flags else []
|
||||
),
|
||||
have_up2k_idx=("e2d" in vn.flags),
|
||||
have_tags_idx=("e2t" in vn.flags),
|
||||
have_zip=(not self.args.no_zip),
|
||||
logues=logues,
|
||||
title=html_escape(self.vpath),
|
||||
srv_info=srv_info,
|
||||
|
||||
@@ -12,23 +12,6 @@ try:
|
||||
except:
|
||||
HAVE_SSL = False
|
||||
|
||||
try:
|
||||
import jinja2
|
||||
except ImportError:
|
||||
print(
|
||||
"""\033[1;31m
|
||||
you do not have jinja2 installed,\033[33m
|
||||
choose one of these:\033[0m
|
||||
* apt install python-jinja2
|
||||
* {} -m pip install --user jinja2
|
||||
* (try another python version, if you have one)
|
||||
* (try copyparty.sfx instead)
|
||||
""".format(
|
||||
os.path.basename(sys.executable)
|
||||
)
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
from .__init__ import E
|
||||
from .util import Unrecv
|
||||
from .httpcli import HttpCli
|
||||
@@ -57,14 +40,6 @@ class HttpConn(object):
|
||||
self.log_func = hsrv.log
|
||||
self.set_rproxy()
|
||||
|
||||
env = jinja2.Environment()
|
||||
env.loader = jinja2.FileSystemLoader(os.path.join(E.mod, "web"))
|
||||
self.tpl_mounts = env.get_template("splash.html")
|
||||
self.tpl_browser = env.get_template("browser.html")
|
||||
self.tpl_msg = env.get_template("msg.html")
|
||||
self.tpl_md = env.get_template("md.html")
|
||||
self.tpl_mde = env.get_template("mde.html")
|
||||
|
||||
def set_rproxy(self, ip=None):
|
||||
if ip is None:
|
||||
color = 36
|
||||
@@ -112,7 +87,9 @@ class HttpConn(object):
|
||||
err = "need at least 4 bytes in the first packet; got {}".format(
|
||||
len(method)
|
||||
)
|
||||
self.log(err)
|
||||
if method:
|
||||
self.log(err)
|
||||
|
||||
self.s.send(b"HTTP/1.1 400 Bad Request\r\n\r\n" + err.encode("utf-8"))
|
||||
return
|
||||
|
||||
|
||||
@@ -2,10 +2,28 @@
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
import socket
|
||||
import threading
|
||||
|
||||
try:
|
||||
import jinja2
|
||||
except ImportError:
|
||||
print(
|
||||
"""\033[1;31m
|
||||
you do not have jinja2 installed,\033[33m
|
||||
choose one of these:\033[0m
|
||||
* apt install python-jinja2
|
||||
* {} -m pip install --user jinja2
|
||||
* (try another python version, if you have one)
|
||||
* (try copyparty.sfx instead)
|
||||
""".format(
|
||||
os.path.basename(sys.executable)
|
||||
)
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
from .__init__ import E, MACOS
|
||||
from .httpconn import HttpConn
|
||||
from .authsrv import AuthSrv
|
||||
@@ -30,6 +48,13 @@ class HttpSrv(object):
|
||||
self.workload_thr_alive = False
|
||||
self.auth = AuthSrv(self.args, self.log)
|
||||
|
||||
env = jinja2.Environment()
|
||||
env.loader = jinja2.FileSystemLoader(os.path.join(E.mod, "web"))
|
||||
self.j2 = {
|
||||
x: env.get_template(x + ".html")
|
||||
for x in ["splash", "browser", "msg", "md", "mde"]
|
||||
}
|
||||
|
||||
cert_path = os.path.join(E.cfg, "cert.pem")
|
||||
if os.path.exists(cert_path):
|
||||
self.cert_path = cert_path
|
||||
@@ -38,7 +63,9 @@ class HttpSrv(object):
|
||||
|
||||
def accept(self, sck, addr):
|
||||
"""takes an incoming tcp connection and creates a thread to handle it"""
|
||||
self.log("%s %s" % addr, "|%sC-cthr" % ("-" * 5,), c="1;30")
|
||||
if self.args.log_conn:
|
||||
self.log("%s %s" % addr, "|%sC-cthr" % ("-" * 5,), c="1;30")
|
||||
|
||||
thr = threading.Thread(target=self.thr_client, args=(sck, addr))
|
||||
thr.daemon = True
|
||||
thr.start()
|
||||
@@ -66,11 +93,15 @@ class HttpSrv(object):
|
||||
thr.start()
|
||||
|
||||
try:
|
||||
self.log("%s %s" % addr, "|%sC-crun" % ("-" * 6,), c="1;30")
|
||||
if self.args.log_conn:
|
||||
self.log("%s %s" % addr, "|%sC-crun" % ("-" * 6,), c="1;30")
|
||||
|
||||
cli.run()
|
||||
|
||||
finally:
|
||||
self.log("%s %s" % addr, "|%sC-cdone" % ("-" * 7,), c="1;30")
|
||||
if self.args.log_conn:
|
||||
self.log("%s %s" % addr, "|%sC-cdone" % ("-" * 7,), c="1;30")
|
||||
|
||||
try:
|
||||
sck.shutdown(socket.SHUT_RDWR)
|
||||
sck.close()
|
||||
|
||||
@@ -8,7 +8,7 @@ import shutil
|
||||
import subprocess as sp
|
||||
|
||||
from .__init__ import PY2, WINDOWS
|
||||
from .util import fsenc, fsdec
|
||||
from .util import fsenc, fsdec, REKOBO_LKEY
|
||||
|
||||
if not PY2:
|
||||
unicode = str
|
||||
@@ -151,6 +151,12 @@ class MTag(object):
|
||||
v = v.split("/")[0].strip().lstrip("0")
|
||||
ret[k] = v or 0
|
||||
|
||||
# normalize key notation to rkeobo
|
||||
okey = ret.get("key")
|
||||
if okey:
|
||||
key = okey.replace(" ", "").replace("maj", "").replace("min", "m")
|
||||
ret["key"] = REKOBO_LKEY.get(key.lower(), okey)
|
||||
|
||||
return ret
|
||||
|
||||
def compare(self, abspath):
|
||||
@@ -225,7 +231,7 @@ class MTag(object):
|
||||
"""
|
||||
note:
|
||||
tags which contain newline will be truncated on first \n,
|
||||
ffmpeg emits \n and spacepads the : to align visually
|
||||
ffprobe emits \n and spacepads the : to align visually
|
||||
note:
|
||||
the Stream ln always mentions Audio: if audio
|
||||
the Stream ln usually has kb/s, is more accurate
|
||||
@@ -295,7 +301,7 @@ class MTag(object):
|
||||
sec *= 60
|
||||
sec += int(f)
|
||||
except:
|
||||
self.log("invalid timestr from ffmpeg: [{}]".format(tstr), c=3)
|
||||
self.log("invalid timestr from ffprobe: [{}]".format(tstr), c=3)
|
||||
|
||||
ret[".dur"] = sec
|
||||
m = ptn_br1.search(ln)
|
||||
@@ -312,3 +318,30 @@ class MTag(object):
|
||||
ret = {k: [0, v] for k, v in ret.items()}
|
||||
|
||||
return self.normalize_tags(ret, md)
|
||||
|
||||
def get_bin(self, parsers, abspath):
|
||||
pypath = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
|
||||
pypath = [str(pypath)] + [str(x) for x in sys.path if x]
|
||||
pypath = str(os.pathsep.join(pypath))
|
||||
env = os.environ.copy()
|
||||
env["PYTHONPATH"] = pypath
|
||||
|
||||
ret = {}
|
||||
for tagname, (binpath, timeout) in parsers.items():
|
||||
try:
|
||||
cmd = [sys.executable, binpath, abspath]
|
||||
args = {"env": env, "timeout": timeout}
|
||||
|
||||
if WINDOWS:
|
||||
args["creationflags"] = 0x4000
|
||||
else:
|
||||
cmd = ["nice"] + cmd
|
||||
|
||||
cmd = [fsenc(x) for x in cmd]
|
||||
v = sp.check_output(cmd, **args).strip()
|
||||
if v:
|
||||
ret[tagname] = v.decode("utf-8")
|
||||
except:
|
||||
pass
|
||||
|
||||
return ret
|
||||
|
||||
95
copyparty/star.py
Normal file
95
copyparty/star.py
Normal file
@@ -0,0 +1,95 @@
|
||||
import os
|
||||
import tarfile
|
||||
import threading
|
||||
|
||||
from .sutil import errdesc
|
||||
from .util import Queue, fsenc
|
||||
|
||||
|
||||
class QFile(object):
|
||||
"""file-like object which buffers writes into a queue"""
|
||||
|
||||
def __init__(self):
|
||||
self.q = Queue(64)
|
||||
self.bq = []
|
||||
self.nq = 0
|
||||
|
||||
def write(self, buf):
|
||||
if buf is None or self.nq >= 240 * 1024:
|
||||
self.q.put(b"".join(self.bq))
|
||||
self.bq = []
|
||||
self.nq = 0
|
||||
|
||||
if buf is None:
|
||||
self.q.put(None)
|
||||
else:
|
||||
self.bq.append(buf)
|
||||
self.nq += len(buf)
|
||||
|
||||
|
||||
class StreamTar(object):
|
||||
"""construct in-memory tar file from the given path"""
|
||||
|
||||
def __init__(self, fgen, **kwargs):
|
||||
self.ci = 0
|
||||
self.co = 0
|
||||
self.qfile = QFile()
|
||||
self.fgen = fgen
|
||||
self.errf = None
|
||||
|
||||
# python 3.8 changed to PAX_FORMAT as default,
|
||||
# waste of space and don't care about the new features
|
||||
fmt = tarfile.GNU_FORMAT
|
||||
self.tar = tarfile.open(fileobj=self.qfile, mode="w|", format=fmt)
|
||||
|
||||
w = threading.Thread(target=self._gen)
|
||||
w.daemon = True
|
||||
w.start()
|
||||
|
||||
def gen(self):
|
||||
while True:
|
||||
buf = self.qfile.q.get()
|
||||
if not buf:
|
||||
break
|
||||
|
||||
self.co += len(buf)
|
||||
yield buf
|
||||
|
||||
yield None
|
||||
if self.errf:
|
||||
os.unlink(self.errf["ap"])
|
||||
|
||||
def ser(self, f):
|
||||
name = f["vp"]
|
||||
src = f["ap"]
|
||||
fsi = f["st"]
|
||||
|
||||
inf = tarfile.TarInfo(name=name)
|
||||
inf.mode = fsi.st_mode
|
||||
inf.size = fsi.st_size
|
||||
inf.mtime = fsi.st_mtime
|
||||
inf.uid = 0
|
||||
inf.gid = 0
|
||||
|
||||
self.ci += inf.size
|
||||
with open(fsenc(src), "rb", 512 * 1024) as f:
|
||||
self.tar.addfile(inf, f)
|
||||
|
||||
def _gen(self):
|
||||
errors = []
|
||||
for f in self.fgen:
|
||||
if "err" in f:
|
||||
errors.append([f["vp"], f["err"]])
|
||||
continue
|
||||
|
||||
try:
|
||||
self.ser(f)
|
||||
except Exception as ex:
|
||||
errors.append([f["vp"], repr(ex)])
|
||||
|
||||
if errors:
|
||||
self.errf = errdesc(errors)
|
||||
self.ser(self.errf)
|
||||
|
||||
self.tar.close()
|
||||
self.qfile.write(None)
|
||||
25
copyparty/sutil.py
Normal file
25
copyparty/sutil.py
Normal file
@@ -0,0 +1,25 @@
|
||||
import os
|
||||
import time
|
||||
import tempfile
|
||||
from datetime import datetime
|
||||
|
||||
|
||||
def errdesc(errors):
|
||||
report = ["copyparty failed to add the following files to the archive:", ""]
|
||||
|
||||
for fn, err in errors:
|
||||
report.extend([" file: {}".format(fn), "error: {}".format(err), ""])
|
||||
|
||||
with tempfile.NamedTemporaryFile(prefix="copyparty-", delete=False) as tf:
|
||||
tf_path = tf.name
|
||||
tf.write("\r\n".join(report).encode("utf-8", "replace"))
|
||||
|
||||
dt = datetime.utcfromtimestamp(time.time())
|
||||
dt = dt.strftime("%Y-%m%d-%H%M%S")
|
||||
|
||||
os.chmod(tf_path, 0o444)
|
||||
return {
|
||||
"vp": "archive-errors-{}.txt".format(dt),
|
||||
"ap": tf_path,
|
||||
"st": os.stat(tf_path),
|
||||
}
|
||||
271
copyparty/szip.py
Normal file
271
copyparty/szip.py
Normal file
@@ -0,0 +1,271 @@
|
||||
import os
|
||||
import time
|
||||
import zlib
|
||||
import struct
|
||||
from datetime import datetime
|
||||
|
||||
from .sutil import errdesc
|
||||
from .util import yieldfile, sanitize_fn
|
||||
|
||||
|
||||
def dostime2unix(buf):
|
||||
t, d = struct.unpack("<HH", buf)
|
||||
|
||||
ts = (t & 0x1F) * 2
|
||||
tm = (t >> 5) & 0x3F
|
||||
th = t >> 11
|
||||
|
||||
dd = d & 0x1F
|
||||
dm = (d >> 5) & 0xF
|
||||
dy = (d >> 9) + 1980
|
||||
|
||||
tt = (dy, dm, dd, th, tm, ts)
|
||||
tf = "{:04d}-{:02d}-{:02d} {:02d}:{:02d}:{:02d}"
|
||||
iso = tf.format(*tt)
|
||||
|
||||
dt = datetime.strptime(iso, "%Y-%m-%d %H:%M:%S")
|
||||
return int(dt.timestamp())
|
||||
|
||||
|
||||
def unixtime2dos(ts):
|
||||
tt = time.gmtime(ts)
|
||||
dy, dm, dd, th, tm, ts = list(tt)[:6]
|
||||
|
||||
bd = ((dy - 1980) << 9) + (dm << 5) + dd
|
||||
bt = (th << 11) + (tm << 5) + ts // 2
|
||||
return struct.pack("<HH", bt, bd)
|
||||
|
||||
|
||||
def gen_fdesc(sz, crc32, z64):
|
||||
ret = b"\x50\x4b\x07\x08"
|
||||
fmt = "<LQQ" if z64 else "<LLL"
|
||||
ret += struct.pack(fmt, crc32, sz, sz)
|
||||
return ret
|
||||
|
||||
|
||||
def gen_hdr(h_pos, fn, sz, lastmod, utf8, crc32, pre_crc):
|
||||
"""
|
||||
does regular file headers
|
||||
and the central directory meme if h_pos is set
|
||||
(h_pos = absolute position of the regular header)
|
||||
"""
|
||||
|
||||
# appnote 4.5 / zip 3.0 (2008) / unzip 6.0 (2009) says to add z64
|
||||
# extinfo for values which exceed H, but that becomes an off-by-one
|
||||
# (can't tell if it was clamped or exactly maxval), make it obvious
|
||||
z64 = sz >= 0xFFFFFFFF
|
||||
z64v = [sz, sz] if z64 else []
|
||||
if h_pos and h_pos >= 0xFFFFFFFF:
|
||||
# central, also consider ptr to original header
|
||||
z64v.append(h_pos)
|
||||
|
||||
# confusingly this doesn't bump if h_pos
|
||||
req_ver = b"\x2d\x00" if z64 else b"\x0a\x00"
|
||||
|
||||
if crc32:
|
||||
crc32 = struct.pack("<L", crc32)
|
||||
else:
|
||||
crc32 = b"\x00" * 4
|
||||
|
||||
if h_pos is None:
|
||||
# 4b magic, 2b min-ver
|
||||
ret = b"\x50\x4b\x03\x04" + req_ver
|
||||
else:
|
||||
# 4b magic, 2b spec-ver, 2b min-ver
|
||||
ret = b"\x50\x4b\x01\x02\x1e\x03" + req_ver
|
||||
|
||||
ret += b"\x00" if pre_crc else b"\x08" # streaming
|
||||
ret += b"\x08" if utf8 else b"\x00" # appnote 6.3.2 (2007)
|
||||
|
||||
# 2b compression, 4b time, 4b crc
|
||||
ret += b"\x00\x00" + unixtime2dos(lastmod) + crc32
|
||||
|
||||
# spec says to put zeros when !crc if bit3 (streaming)
|
||||
# however infozip does actual sz and it even works on winxp
|
||||
# (same reasning for z64 extradata later)
|
||||
vsz = 0xFFFFFFFF if z64 else sz
|
||||
ret += struct.pack("<LL", vsz, vsz)
|
||||
|
||||
# windows support (the "?" replace below too)
|
||||
fn = sanitize_fn(fn, "/")
|
||||
bfn = fn.encode("utf-8" if utf8 else "cp437", "replace").replace(b"?", b"_")
|
||||
|
||||
z64_len = len(z64v) * 8 + 4 if z64v else 0
|
||||
ret += struct.pack("<HH", len(bfn), z64_len)
|
||||
|
||||
if h_pos is not None:
|
||||
# 2b comment, 2b diskno
|
||||
ret += b"\x00" * 4
|
||||
|
||||
# 2b internal.attr, 4b external.attr
|
||||
# infozip-macos: 0100 0000 a481 file:644
|
||||
# infozip-macos: 0100 0100 0080 file:000
|
||||
ret += b"\x01\x00\x00\x00\xa4\x81"
|
||||
|
||||
# 4b local-header-ofs
|
||||
ret += struct.pack("<L", min(h_pos, 0xFFFFFFFF))
|
||||
|
||||
ret += bfn
|
||||
|
||||
if z64v:
|
||||
ret += struct.pack("<HH" + "Q" * len(z64v), 1, len(z64v) * 8, *z64v)
|
||||
|
||||
return ret
|
||||
|
||||
|
||||
def gen_ecdr(items, cdir_pos, cdir_end):
|
||||
"""
|
||||
summary of all file headers,
|
||||
usually the zipfile footer unless something clamps
|
||||
"""
|
||||
|
||||
ret = b"\x50\x4b\x05\x06"
|
||||
|
||||
# 2b ndisk, 2b disk0
|
||||
ret += b"\x00" * 4
|
||||
|
||||
cdir_sz = cdir_end - cdir_pos
|
||||
|
||||
nitems = min(0xFFFF, len(items))
|
||||
csz = min(0xFFFFFFFF, cdir_sz)
|
||||
cpos = min(0xFFFFFFFF, cdir_pos)
|
||||
|
||||
need_64 = nitems == 0xFFFF or 0xFFFFFFFF in [csz, cpos]
|
||||
|
||||
# 2b tnfiles, 2b dnfiles, 4b dir sz, 4b dir pos
|
||||
ret += struct.pack("<HHLL", nitems, nitems, csz, cpos)
|
||||
|
||||
# 2b comment length
|
||||
ret += b"\x00\x00"
|
||||
|
||||
return [ret, need_64]
|
||||
|
||||
|
||||
def gen_ecdr64(items, cdir_pos, cdir_end):
|
||||
"""
|
||||
z64 end of central directory
|
||||
added when numfiles or a headerptr clamps
|
||||
"""
|
||||
|
||||
ret = b"\x50\x4b\x06\x06"
|
||||
|
||||
# 8b own length from hereon
|
||||
ret += b"\x2c" + b"\x00" * 7
|
||||
|
||||
# 2b spec-ver, 2b min-ver
|
||||
ret += b"\x1e\x03\x2d\x00"
|
||||
|
||||
# 4b ndisk, 4b disk0
|
||||
ret += b"\x00" * 8
|
||||
|
||||
# 8b tnfiles, 8b dnfiles, 8b dir sz, 8b dir pos
|
||||
cdir_sz = cdir_end - cdir_pos
|
||||
ret += struct.pack("<QQQQ", len(items), len(items), cdir_sz, cdir_pos)
|
||||
|
||||
return ret
|
||||
|
||||
|
||||
def gen_ecdr64_loc(ecdr64_pos):
|
||||
"""
|
||||
z64 end of central directory locator
|
||||
points to ecdr64
|
||||
why
|
||||
"""
|
||||
|
||||
ret = b"\x50\x4b\x06\x07"
|
||||
|
||||
# 4b cdisk, 8b start of ecdr64, 4b ndisks
|
||||
ret += struct.pack("<LQL", 0, ecdr64_pos, 1)
|
||||
|
||||
return ret
|
||||
|
||||
|
||||
class StreamZip(object):
|
||||
def __init__(self, fgen, utf8=False, pre_crc=False):
|
||||
self.fgen = fgen
|
||||
self.utf8 = utf8
|
||||
self.pre_crc = pre_crc
|
||||
|
||||
self.pos = 0
|
||||
self.items = []
|
||||
|
||||
def _ct(self, buf):
|
||||
self.pos += len(buf)
|
||||
return buf
|
||||
|
||||
def ser(self, f):
|
||||
name = f["vp"]
|
||||
src = f["ap"]
|
||||
st = f["st"]
|
||||
|
||||
sz = st.st_size
|
||||
ts = st.st_mtime + 1
|
||||
|
||||
crc = None
|
||||
if self.pre_crc:
|
||||
crc = 0
|
||||
for buf in yieldfile(src):
|
||||
crc = zlib.crc32(buf, crc)
|
||||
|
||||
crc &= 0xFFFFFFFF
|
||||
|
||||
h_pos = self.pos
|
||||
buf = gen_hdr(None, name, sz, ts, self.utf8, crc, self.pre_crc)
|
||||
yield self._ct(buf)
|
||||
|
||||
crc = crc or 0
|
||||
for buf in yieldfile(src):
|
||||
if not self.pre_crc:
|
||||
crc = zlib.crc32(buf, crc)
|
||||
|
||||
yield self._ct(buf)
|
||||
|
||||
crc &= 0xFFFFFFFF
|
||||
|
||||
self.items.append([name, sz, ts, crc, h_pos])
|
||||
|
||||
z64 = sz >= 4 * 1024 * 1024 * 1024
|
||||
|
||||
if z64 or not self.pre_crc:
|
||||
buf = gen_fdesc(sz, crc, z64)
|
||||
yield self._ct(buf)
|
||||
|
||||
def gen(self):
|
||||
errors = []
|
||||
for f in self.fgen:
|
||||
if "err" in f:
|
||||
errors.append([f["vp"], f["err"]])
|
||||
continue
|
||||
|
||||
try:
|
||||
for x in self.ser(f):
|
||||
yield x
|
||||
except Exception as ex:
|
||||
errors.append([f["vp"], repr(ex)])
|
||||
|
||||
if errors:
|
||||
errf = errdesc(errors)
|
||||
print(repr(errf))
|
||||
for x in self.ser(errf):
|
||||
yield x
|
||||
|
||||
cdir_pos = self.pos
|
||||
for name, sz, ts, crc, h_pos in self.items:
|
||||
buf = gen_hdr(h_pos, name, sz, ts, self.utf8, crc, self.pre_crc)
|
||||
yield self._ct(buf)
|
||||
cdir_end = self.pos
|
||||
|
||||
_, need_64 = gen_ecdr(self.items, cdir_pos, cdir_end)
|
||||
if need_64:
|
||||
ecdir64_pos = self.pos
|
||||
buf = gen_ecdr64(self.items, cdir_pos, cdir_end)
|
||||
yield self._ct(buf)
|
||||
|
||||
buf = gen_ecdr64_loc(ecdir64_pos)
|
||||
yield self._ct(buf)
|
||||
|
||||
ecdr, _ = gen_ecdr(self.items, cdir_pos, cdir_end)
|
||||
yield self._ct(ecdr)
|
||||
|
||||
if errors:
|
||||
os.unlink(errf["ap"])
|
||||
@@ -68,23 +68,29 @@ class TcpSrv(object):
|
||||
self.log("tcpsrv", "listening @ {0}:{1}".format(ip, port))
|
||||
|
||||
while True:
|
||||
self.log("tcpsrv", "|%sC-ncli" % ("-" * 1,), c="1;30")
|
||||
if self.args.log_conn:
|
||||
self.log("tcpsrv", "|%sC-ncli" % ("-" * 1,), c="1;30")
|
||||
|
||||
if self.num_clients.v >= self.args.nc:
|
||||
time.sleep(0.1)
|
||||
continue
|
||||
|
||||
self.log("tcpsrv", "|%sC-acc1" % ("-" * 2,), c="1;30")
|
||||
if self.args.log_conn:
|
||||
self.log("tcpsrv", "|%sC-acc1" % ("-" * 2,), c="1;30")
|
||||
|
||||
ready, _, _ = select.select(self.srv, [], [])
|
||||
for srv in ready:
|
||||
sck, addr = srv.accept()
|
||||
sip, sport = srv.getsockname()
|
||||
self.log(
|
||||
"%s %s" % addr,
|
||||
"|{}C-acc2 \033[0;36m{} \033[3{}m{}".format(
|
||||
"-" * 3, sip, sport % 8, sport
|
||||
),
|
||||
c="1;30",
|
||||
)
|
||||
if self.args.log_conn:
|
||||
self.log(
|
||||
"%s %s" % addr,
|
||||
"|{}C-acc2 \033[0;36m{} \033[3{}m{}".format(
|
||||
"-" * 3, sip, sport % 8, sport
|
||||
),
|
||||
c="1;30",
|
||||
)
|
||||
|
||||
self.num_clients.add()
|
||||
self.hub.broker.put(False, "httpconn", sck, addr)
|
||||
|
||||
|
||||
@@ -1,10 +1,13 @@
|
||||
# coding: utf-8
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
import re
|
||||
import os
|
||||
import time
|
||||
import threading
|
||||
from datetime import datetime
|
||||
|
||||
from .util import u8safe
|
||||
from .util import u8safe, s3dec, html_escape, Pebkac
|
||||
from .up2k import up2k_wark_from_hashlist
|
||||
|
||||
|
||||
@@ -19,6 +22,7 @@ class U2idx(object):
|
||||
def __init__(self, args, log_func):
|
||||
self.args = args
|
||||
self.log_func = log_func
|
||||
self.timeout = args.srch_time
|
||||
|
||||
if not HAVE_SQLITE3:
|
||||
self.log("could not load sqlite3; searchign wqill be disabled")
|
||||
@@ -28,6 +32,9 @@ class U2idx(object):
|
||||
self.mem_cur = sqlite3.connect(":memory:")
|
||||
self.mem_cur.execute(r"create table a (b text)")
|
||||
|
||||
self.p_end = None
|
||||
self.p_dur = 0
|
||||
|
||||
def log(self, msg, c=0):
|
||||
self.log_func("u2idx", msg, c)
|
||||
|
||||
@@ -43,7 +50,10 @@ class U2idx(object):
|
||||
uq = "substr(w,1,16) = ? and w = ?"
|
||||
uv = [wark[:16], wark]
|
||||
|
||||
return self.run_query(vols, uq, uv, "", [])[0]
|
||||
try:
|
||||
return self.run_query(vols, uq, uv, {})[0]
|
||||
except Exception as ex:
|
||||
raise Pebkac(500, repr(ex))
|
||||
|
||||
def get_cur(self, ptop):
|
||||
cur = self.cur.get(ptop)
|
||||
@@ -73,17 +83,64 @@ class U2idx(object):
|
||||
|
||||
uq, uv = _sqlize(qobj)
|
||||
|
||||
tq = ""
|
||||
tv = []
|
||||
qobj = {}
|
||||
if "tags" in body:
|
||||
_conv_txt(qobj, body, "tags", "mt.v")
|
||||
tq, tv = _sqlize(qobj)
|
||||
|
||||
return self.run_query(vols, uq, uv, tq, tv)
|
||||
if "adv" in body:
|
||||
_conv_adv(qobj, body, "adv")
|
||||
|
||||
def run_query(self, vols, uq, uv, tq, tv):
|
||||
self.log("qs: {} {} , {} {}".format(uq, repr(uv), tq, repr(tv)))
|
||||
try:
|
||||
return self.run_query(vols, uq, uv, qobj)
|
||||
except Exception as ex:
|
||||
raise Pebkac(500, repr(ex))
|
||||
|
||||
def run_query(self, vols, uq, uv, targs):
|
||||
self.log("qs: {} {} , {}".format(uq, repr(uv), repr(targs)))
|
||||
|
||||
done_flag = []
|
||||
self.active_id = "{:.6f}_{}".format(
|
||||
time.time(), threading.current_thread().ident
|
||||
)
|
||||
thr = threading.Thread(
|
||||
target=self.terminator,
|
||||
args=(
|
||||
self.active_id,
|
||||
done_flag,
|
||||
),
|
||||
)
|
||||
thr.daemon = True
|
||||
thr.start()
|
||||
|
||||
if not targs:
|
||||
if not uq:
|
||||
q = "select * from up"
|
||||
v = ()
|
||||
else:
|
||||
q = "select * from up where " + uq
|
||||
v = tuple(uv)
|
||||
else:
|
||||
q = "select up.* from up"
|
||||
keycmp = "substr(up.w,1,16)"
|
||||
where = []
|
||||
v = []
|
||||
ctr = 0
|
||||
for tq, tv in sorted(targs.items()):
|
||||
ctr += 1
|
||||
tq = tq.split("\n")[0]
|
||||
keycmp2 = "mt{}.w".format(ctr)
|
||||
q += " inner join mt mt{} on {} = {}".format(ctr, keycmp, keycmp2)
|
||||
keycmp = keycmp2
|
||||
where.append(tq.replace("mt.", keycmp[:-1]))
|
||||
v.append(tv)
|
||||
|
||||
if uq:
|
||||
where.append(uq)
|
||||
v.extend(uv)
|
||||
|
||||
q += " where " + (" and ".join(where))
|
||||
|
||||
# self.log("q2: {} {}".format(q, repr(v)))
|
||||
|
||||
ret = []
|
||||
lim = 1000
|
||||
@@ -93,18 +150,7 @@ class U2idx(object):
|
||||
if not cur:
|
||||
continue
|
||||
|
||||
if not tq:
|
||||
if not uq:
|
||||
q = "select * from up"
|
||||
v = ()
|
||||
else:
|
||||
q = "select * from up where " + uq
|
||||
v = tuple(uv)
|
||||
else:
|
||||
# naive assumption: tags first
|
||||
q = "select up.* from up inner join mt on substr(up.w,1,16) = mt.w where {}"
|
||||
q = q.format(" and ".join([tq, uq]) if uq else tq)
|
||||
v = tuple(tv + uv)
|
||||
self.active_cur = cur
|
||||
|
||||
sret = []
|
||||
c = cur.execute(q, v)
|
||||
@@ -124,17 +170,35 @@ class U2idx(object):
|
||||
w = hit["w"]
|
||||
del hit["w"]
|
||||
tags = {}
|
||||
q = "select k, v from mt where w = ? and k != 'x'"
|
||||
for k, v in cur.execute(q, (w,)):
|
||||
q2 = "select k, v from mt where w = ? and k != 'x'"
|
||||
for k, v2 in cur.execute(q2, (w,)):
|
||||
taglist[k] = True
|
||||
tags[k] = v
|
||||
tags[k] = v2
|
||||
|
||||
hit["tags"] = tags
|
||||
|
||||
ret.extend(sret)
|
||||
|
||||
done_flag.append(True)
|
||||
self.active_id = None
|
||||
|
||||
# undupe hits from multiple metadata keys
|
||||
if len(ret) > 1:
|
||||
ret = [ret[0]] + [
|
||||
y for x, y in zip(ret[:-1], ret[1:]) if x["rp"] != y["rp"]
|
||||
]
|
||||
|
||||
return ret, list(taglist.keys())
|
||||
|
||||
def terminator(self, identifier, done_flag):
|
||||
for _ in range(self.timeout):
|
||||
time.sleep(1)
|
||||
if done_flag:
|
||||
return
|
||||
|
||||
if identifier == self.active_id:
|
||||
self.active_cur.connection.interrupt()
|
||||
|
||||
|
||||
def _open(ptop):
|
||||
db_path = os.path.join(ptop, ".hist", "up2k.db")
|
||||
@@ -190,6 +254,23 @@ def _conv_txt(q, body, k, sql):
|
||||
q[qk + "\n" + v] = u8safe(v)
|
||||
|
||||
|
||||
def _conv_adv(q, body, k):
|
||||
ptn = re.compile(r"^(\.?[a-z]+) *(==?|!=|<=?|>=?) *(.*)$")
|
||||
|
||||
parts = body[k].split(" ")
|
||||
parts = [x.strip() for x in parts if x.strip()]
|
||||
|
||||
for part in parts:
|
||||
m = ptn.match(part)
|
||||
if not m:
|
||||
p = html_escape(part)
|
||||
raise Pebkac(400, "invalid argument [" + p + "]")
|
||||
|
||||
k, op, v = m.groups()
|
||||
qk = "mt.k = '{}' and mt.v {} ?".format(k, op)
|
||||
q[qk + "\n" + v] = u8safe(v)
|
||||
|
||||
|
||||
def _sqlize(qobj):
|
||||
keys = []
|
||||
values = []
|
||||
|
||||
@@ -13,6 +13,7 @@ import base64
|
||||
import hashlib
|
||||
import threading
|
||||
import traceback
|
||||
import subprocess as sp
|
||||
from copy import deepcopy
|
||||
|
||||
from .__init__ import WINDOWS
|
||||
@@ -28,6 +29,7 @@ from .util import (
|
||||
s3enc,
|
||||
s3dec,
|
||||
statdir,
|
||||
s2hms,
|
||||
)
|
||||
from .mtag import MTag
|
||||
from .authsrv import AuthSrv
|
||||
@@ -64,7 +66,7 @@ class Up2k(object):
|
||||
self.flags = {}
|
||||
self.cur = {}
|
||||
self.mtag = None
|
||||
self.n_mtag_tags_added = -1
|
||||
self.pending_tags = None
|
||||
|
||||
self.mem_cur = None
|
||||
self.sqlite_ver = None
|
||||
@@ -107,6 +109,10 @@ class Up2k(object):
|
||||
thr.daemon = True
|
||||
thr.start()
|
||||
|
||||
thr = threading.Thread(target=self._run_all_mtp)
|
||||
thr.daemon = True
|
||||
thr.start()
|
||||
|
||||
def log(self, msg, c=0):
|
||||
self.log_func("up2k", msg + "\033[K", c)
|
||||
|
||||
@@ -219,6 +225,16 @@ class Up2k(object):
|
||||
|
||||
_, flags = self._expr_idx_filter(flags)
|
||||
|
||||
ft = "\033[0;32m{}{:.0}"
|
||||
ff = "\033[0;35m{}{:.0}"
|
||||
fv = "\033[0;36m{}:\033[1;30m{}"
|
||||
a = [
|
||||
(ft if v is True else ff if v is False else fv).format(k, str(v))
|
||||
for k, v in flags.items()
|
||||
]
|
||||
if a:
|
||||
self.log(" ".join(sorted(a)) + "\033[0m")
|
||||
|
||||
reg = {}
|
||||
path = os.path.join(ptop, ".hist", "up2k.snap")
|
||||
if "e2d" in flags and os.path.exists(path):
|
||||
@@ -272,9 +288,12 @@ class Up2k(object):
|
||||
dbw = [reg[0], 0, time.time()]
|
||||
self.pp.n = next(dbw[0].execute("select count(w) from up"))[0]
|
||||
|
||||
# can be symlink so don't `and d.startswith(top)``
|
||||
excl = set([d.realpath for d in all_vols if d != vol])
|
||||
n_add = self._build_dir(dbw, top, excl, top)
|
||||
excl = [
|
||||
vol.realpath + "/" + d.vpath[len(vol.vpath) :].lstrip("/")
|
||||
for d in all_vols
|
||||
if d != vol and (d.vpath.startswith(vol.vpath + "/") or not vol.vpath)
|
||||
]
|
||||
n_add = self._build_dir(dbw, top, set(excl), top)
|
||||
n_rm = self._drop_lost(dbw[0], top)
|
||||
if dbw[1]:
|
||||
self.log("commit {} new files".format(dbw[1]))
|
||||
@@ -435,18 +454,7 @@ class Up2k(object):
|
||||
|
||||
mpool = False
|
||||
if self.mtag.prefer_mt and not self.args.no_mtag_mt:
|
||||
# mp.pool.ThreadPool and concurrent.futures.ThreadPoolExecutor
|
||||
# both do crazy runahead so lets reinvent another wheel
|
||||
nw = os.cpu_count() if hasattr(os, "cpu_count") else 4
|
||||
if self.n_mtag_tags_added == -1:
|
||||
self.log("using {}x {}".format(nw, self.mtag.backend))
|
||||
self.n_mtag_tags_added = 0
|
||||
|
||||
mpool = Queue(nw)
|
||||
for _ in range(nw):
|
||||
thr = threading.Thread(target=self._tag_thr, args=(mpool,))
|
||||
thr.daemon = True
|
||||
thr.start()
|
||||
mpool = self._start_mpool()
|
||||
|
||||
c2 = cur.connection.cursor()
|
||||
c3 = cur.connection.cursor()
|
||||
@@ -457,19 +465,21 @@ class Up2k(object):
|
||||
if c2.execute(q, (w[:16],)).fetchone():
|
||||
continue
|
||||
|
||||
if "mtp" in flags:
|
||||
q = "insert into mt values (?,'t:mtp','a')"
|
||||
c2.execute(q, (w[:16],))
|
||||
|
||||
if rd.startswith("//") or fn.startswith("//"):
|
||||
rd, fn = s3dec(rd, fn)
|
||||
|
||||
abspath = os.path.join(ptop, rd, fn)
|
||||
self.pp.msg = "c{} {}".format(n_left, abspath)
|
||||
args = c3, entags, w, abspath
|
||||
args = [entags, w, abspath]
|
||||
if not mpool:
|
||||
n_tags = self._tag_file(*args)
|
||||
n_tags = self._tag_file(c3, *args)
|
||||
else:
|
||||
mpool.put(args)
|
||||
with self.mutex:
|
||||
n_tags = self.n_mtag_tags_added
|
||||
self.n_mtag_tags_added = 0
|
||||
mpool.put(["mtag"] + args)
|
||||
n_tags = len(self._flush_mpool(c3))
|
||||
|
||||
n_add += n_tags
|
||||
n_buf += n_tags
|
||||
@@ -481,17 +491,220 @@ class Up2k(object):
|
||||
last_write = time.time()
|
||||
n_buf = 0
|
||||
|
||||
if mpool:
|
||||
for _ in range(mpool.maxsize):
|
||||
mpool.put(None)
|
||||
|
||||
mpool.join()
|
||||
self._stop_mpool(mpool, c3)
|
||||
|
||||
c3.close()
|
||||
c2.close()
|
||||
|
||||
return n_add, n_rm, True
|
||||
|
||||
def _flush_mpool(self, wcur):
|
||||
with self.mutex:
|
||||
ret = []
|
||||
for x in self.pending_tags:
|
||||
self._tag_file(wcur, *x)
|
||||
ret.append(x[1])
|
||||
|
||||
self.pending_tags = []
|
||||
return ret
|
||||
|
||||
def _run_all_mtp(self):
|
||||
t0 = time.time()
|
||||
self.mtp_force = {}
|
||||
self.mtp_parsers = {}
|
||||
for ptop, flags in self.flags.items():
|
||||
if "mtp" in flags:
|
||||
self._run_one_mtp(ptop)
|
||||
|
||||
td = time.time() - t0
|
||||
msg = "mtp finished in {:.2f} sec ({})"
|
||||
self.log(msg.format(td, s2hms(td, True)))
|
||||
|
||||
def _run_one_mtp(self, ptop):
|
||||
db_path = os.path.join(ptop, ".hist", "up2k.db")
|
||||
sz0 = os.path.getsize(db_path) // 1024
|
||||
|
||||
entags = self.entags[ptop]
|
||||
|
||||
force = {}
|
||||
timeout = {}
|
||||
parsers = {}
|
||||
for parser in self.flags[ptop]["mtp"]:
|
||||
orig = parser
|
||||
tag, parser = parser.split("=", 1)
|
||||
if tag not in entags:
|
||||
continue
|
||||
|
||||
while True:
|
||||
try:
|
||||
bp = os.path.expanduser(parser)
|
||||
if os.path.exists(bp):
|
||||
parsers[tag] = [bp, timeout.get(tag, 30)]
|
||||
break
|
||||
except:
|
||||
pass
|
||||
|
||||
try:
|
||||
arg, parser = parser.split(",", 1)
|
||||
arg = arg.lower()
|
||||
|
||||
if arg == "f":
|
||||
force[tag] = True
|
||||
continue
|
||||
|
||||
if arg.startswith("t"):
|
||||
timeout[tag] = int(arg[1:])
|
||||
continue
|
||||
|
||||
raise Exception()
|
||||
|
||||
except:
|
||||
self.log("invalid argument: " + orig, 1)
|
||||
return
|
||||
|
||||
self.mtp_force[ptop] = force
|
||||
self.mtp_parsers[ptop] = parsers
|
||||
|
||||
q = "select count(w) from mt where k = 't:mtp'"
|
||||
with self.mutex:
|
||||
cur = self.cur[ptop]
|
||||
cur = cur.connection.cursor()
|
||||
wcur = cur.connection.cursor()
|
||||
n_left = cur.execute(q).fetchone()[0]
|
||||
|
||||
mpool = self._start_mpool()
|
||||
batch_sz = mpool.maxsize * 3
|
||||
t_prev = time.time()
|
||||
n_prev = n_left
|
||||
n_done = 0
|
||||
to_delete = {}
|
||||
in_progress = {}
|
||||
while True:
|
||||
with self.mutex:
|
||||
q = "select w from mt where k = 't:mtp' limit ?"
|
||||
warks = cur.execute(q, (batch_sz,)).fetchall()
|
||||
warks = [x[0] for x in warks]
|
||||
jobs = []
|
||||
for w in warks:
|
||||
q = "select rd, fn from up where substr(w,1,16)=? limit 1"
|
||||
rd, fn = cur.execute(q, (w,)).fetchone()
|
||||
rd, fn = s3dec(rd, fn)
|
||||
abspath = os.path.join(ptop, rd, fn)
|
||||
|
||||
q = "select k from mt where w = ?"
|
||||
have = cur.execute(q, (w,)).fetchall()
|
||||
have = [x[0] for x in have]
|
||||
|
||||
if ".dur" not in have and ".dur" in entags:
|
||||
# skip non-audio
|
||||
to_delete[w] = True
|
||||
n_left -= 1
|
||||
continue
|
||||
|
||||
if w in in_progress:
|
||||
continue
|
||||
|
||||
task_parsers = {
|
||||
k: v for k, v in parsers.items() if k in force or k not in have
|
||||
}
|
||||
jobs.append([task_parsers, None, w, abspath])
|
||||
in_progress[w] = True
|
||||
|
||||
done = self._flush_mpool(wcur)
|
||||
|
||||
with self.mutex:
|
||||
for w in done:
|
||||
to_delete[w] = True
|
||||
in_progress.pop(w)
|
||||
n_done += 1
|
||||
|
||||
for w in to_delete.keys():
|
||||
q = "delete from mt where w = ? and k = 't:mtp'"
|
||||
cur.execute(q, (w,))
|
||||
|
||||
to_delete = {}
|
||||
|
||||
if not warks:
|
||||
break
|
||||
|
||||
if not jobs:
|
||||
continue
|
||||
|
||||
try:
|
||||
now = time.time()
|
||||
s = ((now - t_prev) / (n_prev - n_left)) * n_left
|
||||
h, s = divmod(s, 3600)
|
||||
m, s = divmod(s, 60)
|
||||
n_prev = n_left
|
||||
t_prev = now
|
||||
except:
|
||||
h = 1
|
||||
m = 1
|
||||
|
||||
msg = "mtp: {} done, {} left, eta {}h {:02d}m"
|
||||
with self.mutex:
|
||||
msg = msg.format(n_done, n_left, int(h), int(m))
|
||||
self.log(msg, c=6)
|
||||
|
||||
for j in jobs:
|
||||
n_left -= 1
|
||||
mpool.put(j)
|
||||
|
||||
with self.mutex:
|
||||
cur.connection.commit()
|
||||
|
||||
done = self._stop_mpool(mpool, wcur)
|
||||
with self.mutex:
|
||||
for w in done:
|
||||
q = "delete from mt where w = ? and k = 't:mtp'"
|
||||
cur.execute(q, (w,))
|
||||
|
||||
cur.connection.commit()
|
||||
if n_done:
|
||||
self.vac(cur, db_path, n_done, 0, sz0)
|
||||
|
||||
wcur.close()
|
||||
cur.close()
|
||||
|
||||
def _start_mpool(self):
|
||||
if WINDOWS and False:
|
||||
nah = open(os.devnull, "wb")
|
||||
wmic = "processid={}".format(os.getpid())
|
||||
wmic = ["wmic", "process", "where", wmic, "call", "setpriority"]
|
||||
sp.call(wmic + ["below normal"], stdout=nah, stderr=nah)
|
||||
|
||||
# mp.pool.ThreadPool and concurrent.futures.ThreadPoolExecutor
|
||||
# both do crazy runahead so lets reinvent another wheel
|
||||
nw = os.cpu_count() if hasattr(os, "cpu_count") else 4
|
||||
if self.pending_tags is None:
|
||||
self.log("using {}x {}".format(nw, self.mtag.backend))
|
||||
self.pending_tags = []
|
||||
|
||||
mpool = Queue(nw)
|
||||
for _ in range(nw):
|
||||
thr = threading.Thread(target=self._tag_thr, args=(mpool,))
|
||||
thr.daemon = True
|
||||
thr.start()
|
||||
|
||||
return mpool
|
||||
|
||||
def _stop_mpool(self, mpool, wcur):
|
||||
if not mpool:
|
||||
return
|
||||
|
||||
for _ in range(mpool.maxsize):
|
||||
mpool.put(None)
|
||||
|
||||
mpool.join()
|
||||
done = self._flush_mpool(wcur)
|
||||
if WINDOWS and False:
|
||||
nah = open(os.devnull, "wb")
|
||||
wmic = "processid={}".format(os.getpid())
|
||||
wmic = ["wmic", "process", "where", wmic, "call", "setpriority"]
|
||||
sp.call(wmic + ["below normal"], stdout=nah, stderr=nah)
|
||||
|
||||
return done
|
||||
|
||||
def _tag_thr(self, q):
|
||||
while True:
|
||||
task = q.get()
|
||||
@@ -500,24 +713,47 @@ class Up2k(object):
|
||||
return
|
||||
|
||||
try:
|
||||
write_cur, entags, wark, abspath = task
|
||||
tags = self.mtag.get(abspath)
|
||||
parser, entags, wark, abspath = task
|
||||
if parser == "mtag":
|
||||
tags = self.mtag.get(abspath)
|
||||
else:
|
||||
tags = self.mtag.get_bin(parser, abspath)
|
||||
vtags = [
|
||||
"\033[36m{} \033[33m{}".format(k, v) for k, v in tags.items()
|
||||
]
|
||||
self.log("{}\033[0m [{}]".format(" ".join(vtags), abspath))
|
||||
|
||||
with self.mutex:
|
||||
n = self._tag_file(write_cur, entags, wark, abspath, tags)
|
||||
self.n_mtag_tags_added += n
|
||||
self.pending_tags.append([entags, wark, abspath, tags])
|
||||
except:
|
||||
ex = traceback.format_exc()
|
||||
if parser == "mtag":
|
||||
parser = self.mtag.backend
|
||||
|
||||
msg = "{} failed to read tags from {}:\n{}"
|
||||
self.log(msg.format(self.mtag.backend, abspath, ex), c=3)
|
||||
self.log(msg.format(parser, abspath, ex), c=3)
|
||||
|
||||
q.task_done()
|
||||
|
||||
def _tag_file(self, write_cur, entags, wark, abspath, tags=None):
|
||||
tags = tags or self.mtag.get(abspath)
|
||||
tags = {k: v for k, v in tags.items() if k in entags}
|
||||
if tags is None:
|
||||
tags = self.mtag.get(abspath)
|
||||
|
||||
if entags:
|
||||
tags = {k: v for k, v in tags.items() if k in entags}
|
||||
if not tags:
|
||||
# indicate scanned without tags
|
||||
tags = {"x": 0}
|
||||
|
||||
if not tags:
|
||||
# indicate scanned without tags
|
||||
tags = {"x": 0}
|
||||
return 0
|
||||
|
||||
for k in tags.keys():
|
||||
q = "delete from mt where w = ? and ({})".format(
|
||||
" or ".join(["k = ?"] * len(tags))
|
||||
)
|
||||
args = [wark[:16]] + list(tags.keys())
|
||||
write_cur.execute(q, tuple(args))
|
||||
|
||||
ret = 0
|
||||
for k, v in tags.items():
|
||||
@@ -529,6 +765,7 @@ class Up2k(object):
|
||||
|
||||
def _orz(self, db_path):
|
||||
return sqlite3.connect(db_path, check_same_thread=False).cursor()
|
||||
# x.set_trace_callback(trace)
|
||||
|
||||
def _open_db(self, db_path):
|
||||
existed = os.path.exists(db_path)
|
||||
@@ -1053,24 +1290,37 @@ class Up2k(object):
|
||||
def _tagger(self):
|
||||
while True:
|
||||
ptop, wark, rd, fn = self.tagq.get()
|
||||
if "e2t" not in self.flags[ptop]:
|
||||
continue
|
||||
|
||||
abspath = os.path.join(ptop, rd, fn)
|
||||
self.log("tagging " + abspath)
|
||||
tags = self.mtag.get(abspath)
|
||||
ntags1 = len(tags)
|
||||
if self.mtp_parsers.get(ptop, {}):
|
||||
parser = {
|
||||
k: v
|
||||
for k, v in self.mtp_parsers[ptop].items()
|
||||
if k in self.mtp_force[ptop] or k not in tags
|
||||
}
|
||||
tags.update(self.mtag.get_bin(parser, abspath))
|
||||
|
||||
with self.mutex:
|
||||
cur = self.cur[ptop]
|
||||
if not cur:
|
||||
self.log("no cursor to write tags with??", c=1)
|
||||
continue
|
||||
|
||||
# TODO is undef if vol 404 on startup
|
||||
entags = self.entags[ptop]
|
||||
if not entags:
|
||||
self.log("no entags okay.jpg", c=3)
|
||||
continue
|
||||
|
||||
if "e2t" in self.flags[ptop]:
|
||||
self._tag_file(cur, entags, wark, abspath)
|
||||
|
||||
self._tag_file(cur, entags, wark, abspath, tags)
|
||||
cur.connection.commit()
|
||||
|
||||
self.log("tagged {} ({}+{})".format(abspath, ntags1, len(tags) - ntags1))
|
||||
|
||||
def _hasher(self):
|
||||
while True:
|
||||
ptop, rd, fn = self.hashq.get()
|
||||
|
||||
@@ -10,6 +10,7 @@ import select
|
||||
import struct
|
||||
import hashlib
|
||||
import platform
|
||||
import traceback
|
||||
import threading
|
||||
import mimetypes
|
||||
import contextlib
|
||||
@@ -56,11 +57,58 @@ HTTPCODE = {
|
||||
413: "Payload Too Large",
|
||||
416: "Requested Range Not Satisfiable",
|
||||
422: "Unprocessable Entity",
|
||||
429: "Too Many Requests",
|
||||
500: "Internal Server Error",
|
||||
501: "Not Implemented",
|
||||
}
|
||||
|
||||
|
||||
IMPLICATIONS = [
|
||||
["e2dsa", "e2ds"],
|
||||
["e2ds", "e2d"],
|
||||
["e2tsr", "e2ts"],
|
||||
["e2ts", "e2t"],
|
||||
["e2t", "e2d"],
|
||||
]
|
||||
|
||||
|
||||
REKOBO_KEY = {
|
||||
v: ln.split(" ", 1)[0]
|
||||
for ln in """
|
||||
1B 6d B
|
||||
2B 7d Gb F#
|
||||
3B 8d Db C#
|
||||
4B 9d Ab G#
|
||||
5B 10d Eb D#
|
||||
6B 11d Bb A#
|
||||
7B 12d F
|
||||
8B 1d C
|
||||
9B 2d G
|
||||
10B 3d D
|
||||
11B 4d A
|
||||
12B 5d E
|
||||
1A 6m Abm G#m
|
||||
2A 7m Ebm D#m
|
||||
3A 8m Bbm A#m
|
||||
4A 9m Fm
|
||||
5A 10m Cm
|
||||
6A 11m Gm
|
||||
7A 12m Dm
|
||||
8A 1m Am
|
||||
9A 2m Em
|
||||
10A 3m Bm
|
||||
11A 4m Gbm F#m
|
||||
12A 5m Dbm C#m
|
||||
""".strip().split(
|
||||
"\n"
|
||||
)
|
||||
for v in ln.strip().split(" ")[1:]
|
||||
if v
|
||||
}
|
||||
|
||||
REKOBO_LKEY = {k.lower(): v for k, v in REKOBO_KEY.items()}
|
||||
|
||||
|
||||
class Counter(object):
|
||||
def __init__(self, v=0):
|
||||
self.v = v
|
||||
@@ -139,6 +187,31 @@ def nuprint(msg):
|
||||
uprint("{}\n".format(msg))
|
||||
|
||||
|
||||
def rice_tid():
|
||||
tid = threading.current_thread().ident
|
||||
c = struct.unpack(b"B" * 5, struct.pack(b">Q", tid)[-5:])
|
||||
return "".join("\033[1;37;48;5;{}m{:02x}".format(x, x) for x in c) + "\033[0m"
|
||||
|
||||
|
||||
def trace(*args, **kwargs):
|
||||
t = time.time()
|
||||
stack = "".join(
|
||||
"\033[36m{}\033[33m{}".format(x[0].split(os.sep)[-1][:-3], x[1])
|
||||
for x in traceback.extract_stack()[3:-1]
|
||||
)
|
||||
parts = ["{:.6f}".format(t), rice_tid(), stack]
|
||||
|
||||
if args:
|
||||
parts.append(repr(args))
|
||||
|
||||
if kwargs:
|
||||
parts.append(repr(kwargs))
|
||||
|
||||
msg = "\033[0m ".join(parts)
|
||||
# _tracebuf.append(msg)
|
||||
nuprint(msg)
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def ren_open(fname, *args, **kwargs):
|
||||
fdir = kwargs.pop("fdir", None)
|
||||
@@ -477,6 +550,16 @@ def get_spd(nbyte, t0, t=None):
|
||||
return "{} \033[0m{}/s\033[0m".format(s1, s2)
|
||||
|
||||
|
||||
def s2hms(s, optional_h=False):
|
||||
s = int(s)
|
||||
h, s = divmod(s, 3600)
|
||||
m, s = divmod(s, 60)
|
||||
if not h and optional_h:
|
||||
return "{}:{:02}".format(m, s)
|
||||
|
||||
return "{}:{:02}:{:02}".format(h, m, s)
|
||||
|
||||
|
||||
def undot(path):
|
||||
ret = []
|
||||
for node in path.split("/"):
|
||||
@@ -493,11 +576,12 @@ def undot(path):
|
||||
return "/".join(ret)
|
||||
|
||||
|
||||
def sanitize_fn(fn):
|
||||
fn = fn.replace("\\", "/").split("/")[-1]
|
||||
def sanitize_fn(fn, ok=""):
|
||||
if "/" not in ok:
|
||||
fn = fn.replace("\\", "/").split("/")[-1]
|
||||
|
||||
if WINDOWS:
|
||||
for bad, good in [
|
||||
for bad, good in [x for x in [
|
||||
["<", "<"],
|
||||
[">", ">"],
|
||||
[":", ":"],
|
||||
@@ -507,7 +591,7 @@ def sanitize_fn(fn):
|
||||
["|", "|"],
|
||||
["?", "?"],
|
||||
["*", "*"],
|
||||
]:
|
||||
] if x[0] not in ok]:
|
||||
fn = fn.replace(bad, good)
|
||||
|
||||
bad = ["con", "prn", "aux", "nul"]
|
||||
@@ -697,6 +781,16 @@ def read_socket_chunked(sr, log=None):
|
||||
sr.recv(2) # \r\n after each chunk too
|
||||
|
||||
|
||||
def yieldfile(fn):
|
||||
with open(fsenc(fn), "rb", 512 * 1024) as f:
|
||||
while True:
|
||||
buf = f.read(64 * 1024)
|
||||
if not buf:
|
||||
break
|
||||
|
||||
yield buf
|
||||
|
||||
|
||||
def hashcopy(actor, fin, fout):
|
||||
u32_lim = int((2 ** 31) * 0.9)
|
||||
hashobj = hashlib.sha512()
|
||||
@@ -837,7 +931,11 @@ def chkcmd(*argv):
|
||||
def gzip_orig_sz(fn):
|
||||
with open(fsenc(fn), "rb") as f:
|
||||
f.seek(-4, 2)
|
||||
return struct.unpack(b"I", f.read(4))[0]
|
||||
rv = f.read(4)
|
||||
try:
|
||||
return struct.unpack(b"I", rv)[0]
|
||||
except:
|
||||
return struct.unpack("I", rv)[0]
|
||||
|
||||
|
||||
def py_desc():
|
||||
@@ -847,7 +945,11 @@ def py_desc():
|
||||
if ofs > 0:
|
||||
py_ver = py_ver[:ofs]
|
||||
|
||||
bitness = struct.calcsize(b"P") * 8
|
||||
try:
|
||||
bitness = struct.calcsize(b"P") * 8
|
||||
except:
|
||||
bitness = struct.calcsize("P") * 8
|
||||
|
||||
host_os = platform.system()
|
||||
compiler = platform.python_compiler()
|
||||
|
||||
|
||||
@@ -182,6 +182,11 @@ a, #files tbody div a:last-child {
|
||||
color: #840;
|
||||
text-shadow: 0 0 .3em #b80;
|
||||
}
|
||||
#files tbody tr.sel td {
|
||||
background: #80b;
|
||||
color: #fff;
|
||||
border-color: #a3d;
|
||||
}
|
||||
#blocked {
|
||||
position: fixed;
|
||||
top: 0;
|
||||
@@ -268,6 +273,25 @@ a, #files tbody div a:last-child {
|
||||
padding: .2em 0 0 .07em;
|
||||
color: #fff;
|
||||
}
|
||||
#wtoggle>span {
|
||||
display: none;
|
||||
}
|
||||
#wtoggle.sel {
|
||||
width: 4.27em;
|
||||
}
|
||||
#wtoggle.sel>span {
|
||||
display: inline-block;
|
||||
line-height: 0;
|
||||
}
|
||||
#wtoggle.sel>span a {
|
||||
font-size: .4em;
|
||||
margin: -.3em 0;
|
||||
position: relative;
|
||||
display: inline-block;
|
||||
}
|
||||
#wtoggle.sel>span #selzip {
|
||||
top: -.6em;
|
||||
}
|
||||
#barpos,
|
||||
#barbuf {
|
||||
position: absolute;
|
||||
@@ -405,7 +429,7 @@ input[type="checkbox"]:checked+label {
|
||||
|
||||
|
||||
|
||||
#op_search table {
|
||||
#srch_form {
|
||||
border: 1px solid #3a3a3a;
|
||||
box-shadow: 0 0 1em #222 inset;
|
||||
background: #2d2d2d;
|
||||
@@ -414,14 +438,25 @@ input[type="checkbox"]:checked+label {
|
||||
margin-bottom: 0;
|
||||
padding: 0 .5em .5em 0;
|
||||
}
|
||||
#srch_form table {
|
||||
display: inline-block;
|
||||
}
|
||||
#srch_form td {
|
||||
padding: .6em .6em;
|
||||
}
|
||||
#srch_form td:first-child {
|
||||
width: 3em;
|
||||
padding-right: .2em;
|
||||
text-align: right;
|
||||
}
|
||||
#op_search input {
|
||||
margin: 0;
|
||||
}
|
||||
#srch_q {
|
||||
white-space: pre;
|
||||
color: #f80;
|
||||
height: 1em;
|
||||
margin: .2em 0 -1em 1.6em;
|
||||
}
|
||||
#files td div span {
|
||||
color: #fff;
|
||||
@@ -482,6 +517,7 @@ input[type="checkbox"]:checked+label {
|
||||
left: -1.7em;
|
||||
width: calc(100% + 1.3em);
|
||||
}
|
||||
.tglbtn,
|
||||
#tree>a+a {
|
||||
padding: .2em .4em;
|
||||
font-size: 1.2em;
|
||||
@@ -492,9 +528,11 @@ input[type="checkbox"]:checked+label {
|
||||
position: relative;
|
||||
top: -.2em;
|
||||
}
|
||||
.tglbtn:hover,
|
||||
#tree>a+a:hover {
|
||||
background: #805;
|
||||
}
|
||||
.tglbtn.on,
|
||||
#tree>a+a.on {
|
||||
background: #fc4;
|
||||
color: #400;
|
||||
@@ -601,7 +639,10 @@ input[type="checkbox"]:checked+label {
|
||||
max-width: none;
|
||||
margin-right: 1.5em;
|
||||
}
|
||||
#key_notation>span {
|
||||
#op_cfg>div>a {
|
||||
line-height: 2em;
|
||||
}
|
||||
#op_cfg>div>span {
|
||||
display: inline-block;
|
||||
padding: .2em .4em;
|
||||
}
|
||||
@@ -624,6 +665,9 @@ input[type="checkbox"]:checked+label {
|
||||
top: 6em;
|
||||
right: 1.5em;
|
||||
}
|
||||
#ops:hover #opdesc.off {
|
||||
display: none;
|
||||
}
|
||||
#opdesc code {
|
||||
background: #3c3c3c;
|
||||
padding: .2em .3em;
|
||||
@@ -631,4 +675,4 @@ input[type="checkbox"]:checked+label {
|
||||
border-radius: .3em;
|
||||
font-family: monospace, monospace;
|
||||
line-height: 2em;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -29,9 +29,9 @@
|
||||
|
||||
<div id="op_search" class="opview">
|
||||
{%- if have_tags_idx %}
|
||||
<table id="srch_form" class="tags"></table>
|
||||
<div id="srch_form" class="tags"></div>
|
||||
{%- else %}
|
||||
<table id="srch_form"></table>
|
||||
<div id="srch_form"></div>
|
||||
{%- endif %}
|
||||
<div id="srch_q"></div>
|
||||
</div>
|
||||
@@ -41,6 +41,12 @@
|
||||
<div id="op_cfg" class="opview opbox">
|
||||
<h3>key notation</h3>
|
||||
<div id="key_notation"></div>
|
||||
{%- if have_zip %}
|
||||
<h3>folder download</h3>
|
||||
<div id="arc_fmt"></div>
|
||||
{%- endif %}
|
||||
<h3>tooltips</h3>
|
||||
<div><a id="tooltips" class="tglbtn" href="#">enable</a></div>
|
||||
</div>
|
||||
|
||||
<h1 id="path">
|
||||
@@ -54,7 +60,7 @@
|
||||
<a href="#" id="detree">🍞...</a>
|
||||
<a href="#" step="2" id="twobytwo">+</a>
|
||||
<a href="#" step="-2" id="twig">–</a>
|
||||
<a href="#" id="dyntree">a</a>
|
||||
<a href="#" class="tglbtn" id="dyntree">a</a>
|
||||
<ul id="treeul"></ul>
|
||||
<div id="thx_ff"> </div>
|
||||
</div>
|
||||
@@ -66,18 +72,18 @@
|
||||
<table id="files">
|
||||
<thead>
|
||||
<tr>
|
||||
<th></th>
|
||||
<th><span>File Name</span></th>
|
||||
<th sort="int"><span>Size</span></th>
|
||||
<th name="lead"><span>c</span></th>
|
||||
<th name="href"><span>File Name</span></th>
|
||||
<th name="sz" sort="int"><span>Size</span></th>
|
||||
{%- for k in taglist %}
|
||||
{%- if k.startswith('.') %}
|
||||
<th sort="int"><span>{{ k[1:] }}</span></th>
|
||||
<th name="tags/{{ k }}" sort="int"><span>{{ k[1:] }}</span></th>
|
||||
{%- else %}
|
||||
<th><span>{{ k[0]|upper }}{{ k[1:] }}</span></th>
|
||||
<th name="tags/{{ k }}"><span>{{ k[0]|upper }}{{ k[1:] }}</span></th>
|
||||
{%- endif %}
|
||||
{%- endfor %}
|
||||
<th><span>T</span></th>
|
||||
<th><span>Date</span></th>
|
||||
<th name="ext"><span>T</span></th>
|
||||
<th name="ts"><span>Date</span></th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
@@ -106,7 +112,14 @@
|
||||
{%- endif %}
|
||||
|
||||
<div id="widget">
|
||||
<div id="wtoggle">♫</div>
|
||||
<div id="wtoggle">
|
||||
<span>
|
||||
<a href="#" id="selall">sel.<br />all</a>
|
||||
<a href="#" id="selinv">sel.<br />inv.</a>
|
||||
<a href="#" id="selzip">zip</a>
|
||||
</span>
|
||||
♫
|
||||
</div>
|
||||
<div id="widgeti">
|
||||
<div id="pctl"><a href="#" id="bprev">⏮</a><a href="#" id="bplay">▶</a><a href="#" id="bnext">⏭</a></div>
|
||||
<canvas id="pvol" width="288" height="38"></canvas>
|
||||
|
||||
@@ -6,22 +6,19 @@ function dbg(msg) {
|
||||
ebi('path').innerHTML = msg;
|
||||
}
|
||||
|
||||
makeSortable(ebi('files'));
|
||||
|
||||
|
||||
// extract songs + add play column
|
||||
function init_mp() {
|
||||
var tracks = [];
|
||||
var ret = {
|
||||
'au': null,
|
||||
'au_native': null,
|
||||
'au_ogvjs': null,
|
||||
'tracks': tracks,
|
||||
'cover_url': ''
|
||||
};
|
||||
var re_audio = /\.(opus|ogg|m4a|aac|mp3|wav|flac)$/i;
|
||||
function MPlayer() {
|
||||
this.id = new Date().getTime();
|
||||
this.au = null;
|
||||
this.au_native = null;
|
||||
this.au_ogvjs = null;
|
||||
this.cover_url = '';
|
||||
this.tracks = {};
|
||||
this.order = [];
|
||||
|
||||
var trs = ebi('files').getElementsByTagName('tbody')[0].getElementsByTagName('tr');
|
||||
var re_audio = /\.(opus|ogg|m4a|aac|mp3|wav|flac)$/i;
|
||||
var trs = document.querySelectorAll('#files tbody tr');
|
||||
for (var a = 0, aa = trs.length; a < aa; a++) {
|
||||
var tds = trs[a].getElementsByTagName('td');
|
||||
var link = tds[1].getElementsByTagName('a');
|
||||
@@ -30,37 +27,48 @@ function init_mp() {
|
||||
|
||||
var m = re_audio.exec(url);
|
||||
if (m) {
|
||||
var ntrack = tracks.length;
|
||||
tracks.push(url);
|
||||
|
||||
tds[0].innerHTML = '<a id="trk' + ntrack + '" href="#trk' + ntrack + '" class="play">play</a></td>';
|
||||
var tid = link.getAttribute('id');
|
||||
this.order.push(tid);
|
||||
this.tracks[tid] = url;
|
||||
tds[0].innerHTML = '<a id="a' + tid + '" href="#a' + tid + '" class="play">play</a></td>';
|
||||
ebi('a' + tid).onclick = ev_play;
|
||||
}
|
||||
}
|
||||
|
||||
for (var a = 0, aa = tracks.length; a < aa; a++)
|
||||
ebi('trk' + a).onclick = ev_play;
|
||||
|
||||
ret.vol = sread('vol');
|
||||
if (ret.vol !== null)
|
||||
ret.vol = parseFloat(ret.vol);
|
||||
this.vol = sread('vol');
|
||||
if (this.vol !== null)
|
||||
this.vol = parseFloat(this.vol);
|
||||
else
|
||||
ret.vol = 0.5;
|
||||
this.vol = 0.5;
|
||||
|
||||
ret.expvol = function () {
|
||||
return 0.5 * ret.vol + 0.5 * ret.vol * ret.vol;
|
||||
this.expvol = function () {
|
||||
return 0.5 * this.vol + 0.5 * this.vol * this.vol;
|
||||
};
|
||||
|
||||
ret.setvol = function (vol) {
|
||||
ret.vol = Math.max(Math.min(vol, 1), 0);
|
||||
this.setvol = function (vol) {
|
||||
this.vol = Math.max(Math.min(vol, 1), 0);
|
||||
swrite('vol', vol);
|
||||
|
||||
if (ret.au)
|
||||
ret.au.volume = ret.expvol();
|
||||
if (this.au)
|
||||
this.au.volume = this.expvol();
|
||||
};
|
||||
|
||||
return ret;
|
||||
this.read_order = function () {
|
||||
var order = [];
|
||||
var links = document.querySelectorAll('#files>tbody>tr>td:nth-child(1)>a');
|
||||
for (var a = 0, aa = links.length; a < aa; a++) {
|
||||
var tid = links[a].getAttribute('id');
|
||||
if (!tid || tid.indexOf('af-') !== 0)
|
||||
continue;
|
||||
|
||||
order.push(tid.slice(1));
|
||||
}
|
||||
this.order = order;
|
||||
};
|
||||
}
|
||||
var mp = init_mp();
|
||||
addcrc();
|
||||
var mp = new MPlayer();
|
||||
makeSortable(ebi('files'), mp.read_order.bind(mp));
|
||||
|
||||
|
||||
// toggle player widget
|
||||
@@ -180,10 +188,31 @@ var pbar = (function () {
|
||||
pctx.setTransform(scale, 0, 0, scale, 0, 0);
|
||||
pctx.clearRect(0, 0, sw, sh);
|
||||
|
||||
pctx.fillStyle = 'rgba(204,255,128,0.15)';
|
||||
for (var p = 1, mins = mp.au.duration / 10; p <= mins; p++)
|
||||
pctx.fillRect(Math.floor(sm * p * 10), 0, 2, sh);
|
||||
|
||||
pctx.fillStyle = '#9b7';
|
||||
pctx.fillStyle = 'rgba(192,255,96,0.5)';
|
||||
for (var p = 1, mins = mp.au.duration / 60; p <= mins; p++)
|
||||
pctx.fillRect(Math.floor(sm * p * 60), 0, 2, sh);
|
||||
|
||||
var w = 8;
|
||||
var x = sm * mp.au.currentTime;
|
||||
pctx.fillStyle = '#573'; pctx.fillRect((x - w / 2) - 1, 0, w + 2, sh);
|
||||
pctx.fillStyle = '#dfc'; pctx.fillRect((x - w / 2), 0, 8, sh);
|
||||
|
||||
pctx.fillStyle = '#fff';
|
||||
pctx.font = '1em sans-serif';
|
||||
var txt = s2ms(mp.au.duration);
|
||||
var tw = pctx.measureText(txt).width;
|
||||
pctx.fillText(txt, sw - (tw + 8), sh / 3 * 2);
|
||||
|
||||
txt = s2ms(mp.au.currentTime);
|
||||
tw = pctx.measureText(txt).width;
|
||||
var gw = pctx.measureText("88:88::").width;
|
||||
var xt = x < sw / 2 ? (x + 8) : (Math.min(sw - gw, x - 8) - tw);
|
||||
pctx.fillText(txt, xt, sh / 3 * 2);
|
||||
};
|
||||
return r;
|
||||
})();
|
||||
@@ -277,18 +306,41 @@ var vbar = (function () {
|
||||
})();
|
||||
|
||||
|
||||
function seek_au_mul(mul) {
|
||||
if (mp.au)
|
||||
seek_au_sec(mp.au.duration * mul);
|
||||
}
|
||||
|
||||
function seek_au_sec(seek) {
|
||||
if (!mp.au)
|
||||
return;
|
||||
|
||||
console.log('seek: ' + seek);
|
||||
if (!isFinite(seek))
|
||||
return;
|
||||
|
||||
mp.au.currentTime = seek;
|
||||
|
||||
if (mp.au === mp.au_native)
|
||||
// hack: ogv.js breaks on .play() during playback
|
||||
mp.au.play();
|
||||
};
|
||||
|
||||
|
||||
function song_skip(n) {
|
||||
var tid = null;
|
||||
if (mp.au)
|
||||
tid = mp.au.tid;
|
||||
|
||||
if (tid !== null)
|
||||
play(mp.order.indexOf(tid) + n);
|
||||
else
|
||||
play(mp.order[0]);
|
||||
};
|
||||
|
||||
|
||||
// hook up the widget buttons
|
||||
(function () {
|
||||
var bskip = function (n) {
|
||||
var tid = null;
|
||||
if (mp.au)
|
||||
tid = mp.au.tid;
|
||||
|
||||
if (tid !== null)
|
||||
play(tid + n);
|
||||
else
|
||||
play(0);
|
||||
};
|
||||
ebi('bplay').onclick = function (e) {
|
||||
ev(e);
|
||||
if (mp.au) {
|
||||
@@ -302,31 +354,20 @@ var vbar = (function () {
|
||||
};
|
||||
ebi('bprev').onclick = function (e) {
|
||||
ev(e);
|
||||
bskip(-1);
|
||||
song_skip(-1);
|
||||
};
|
||||
ebi('bnext').onclick = function (e) {
|
||||
ev(e);
|
||||
bskip(1);
|
||||
song_skip(1);
|
||||
};
|
||||
ebi('barpos').onclick = function (e) {
|
||||
if (!mp.au) {
|
||||
//dbg((new Date()).getTime());
|
||||
return play(0);
|
||||
}
|
||||
|
||||
var rect = pbar.pcan.getBoundingClientRect();
|
||||
var x = e.clientX - rect.left;
|
||||
var mul = x * 1.0 / rect.width;
|
||||
var seek = mp.au.duration * mul;
|
||||
console.log('seek: ' + seek);
|
||||
if (!isFinite(seek))
|
||||
return;
|
||||
|
||||
mp.au.currentTime = seek;
|
||||
|
||||
if (mp.au === mp.au_native)
|
||||
// hack: ogv.js breaks on .play() during playback
|
||||
mp.au.play();
|
||||
seek_au_mul(x * 1.0 / rect.width);
|
||||
};
|
||||
})();
|
||||
|
||||
@@ -359,7 +400,7 @@ var vbar = (function () {
|
||||
var len = mp.au.duration;
|
||||
if (pos > 0 && pos > len - 0.1) {
|
||||
last_skip_url = mp.au.src;
|
||||
play(mp.au.tid + 1);
|
||||
song_skip(1);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -372,7 +413,7 @@ var vbar = (function () {
|
||||
// event from play button next to a file in the list
|
||||
function ev_play(e) {
|
||||
ev(e);
|
||||
play(parseInt(this.getAttribute('id').substr(3)));
|
||||
play(this.getAttribute('id').slice(1));
|
||||
return false;
|
||||
}
|
||||
|
||||
@@ -394,18 +435,24 @@ catch (ex) { }
|
||||
|
||||
// plays the tid'th audio file on the page
|
||||
function play(tid, call_depth) {
|
||||
if (mp.tracks.length == 0)
|
||||
if (mp.order.length == 0)
|
||||
return alert('no audio found wait what');
|
||||
|
||||
while (tid >= mp.tracks.length)
|
||||
tid -= mp.tracks.length;
|
||||
var tn = tid;
|
||||
if ((tn + '').indexOf('f-') === 0)
|
||||
tn = mp.order.indexOf(tn);
|
||||
|
||||
while (tid < 0)
|
||||
tid += mp.tracks.length;
|
||||
while (tn >= mp.order.length)
|
||||
tn -= mp.order.length;
|
||||
|
||||
while (tn < 0)
|
||||
tn += mp.order.length;
|
||||
|
||||
tid = mp.order[tn];
|
||||
|
||||
if (mp.au) {
|
||||
mp.au.pause();
|
||||
setclass('trk' + mp.au.tid, 'play');
|
||||
setclass('a' + mp.au.tid, 'play');
|
||||
}
|
||||
|
||||
// ogv.js breaks on .play() unless directly user-triggered
|
||||
@@ -449,7 +496,7 @@ function play(tid, call_depth) {
|
||||
mp.au.tid = tid;
|
||||
mp.au.src = url;
|
||||
mp.au.volume = mp.expvol();
|
||||
var oid = 'trk' + tid;
|
||||
var oid = 'a' + tid;
|
||||
setclass(oid, 'play act');
|
||||
var trs = ebi('files').getElementsByTagName('tbody')[0].getElementsByTagName('tr');
|
||||
for (var a = 0, aa = trs.length; a < aa; a++) {
|
||||
@@ -480,8 +527,8 @@ function play(tid, call_depth) {
|
||||
catch (ex) {
|
||||
alert('playback failed: ' + ex);
|
||||
}
|
||||
setclass('trk' + mp.au.tid, 'play');
|
||||
setTimeout('play(' + (mp.au.tid + 1) + ');', 500);
|
||||
setclass(oid, 'play');
|
||||
setTimeout('song_skip(1));', 500);
|
||||
}
|
||||
|
||||
|
||||
@@ -560,12 +607,73 @@ function autoplay_blocked() {
|
||||
// autoplay linked track
|
||||
(function () {
|
||||
var v = location.hash;
|
||||
if (v && v.length > 4 && v.indexOf('#trk') === 0)
|
||||
play(parseInt(v.substr(4)));
|
||||
if (v && v.length == 12 && v.indexOf('#af-') === 0)
|
||||
play(v.slice(2));
|
||||
})();
|
||||
|
||||
|
||||
//widget.open();
|
||||
function tree_neigh(n) {
|
||||
var links = document.querySelectorAll('#treeul li>a+a');
|
||||
if (!links.length) {
|
||||
alert('switch to the tree for that');
|
||||
return;
|
||||
}
|
||||
var act = -1;
|
||||
for (var a = 0, aa = links.length; a < aa; a++) {
|
||||
if (links[a].getAttribute('class') == 'hl') {
|
||||
act = a;
|
||||
break;
|
||||
}
|
||||
}
|
||||
a += n;
|
||||
if (a < 0)
|
||||
a = links.length - 1;
|
||||
if (a >= links.length)
|
||||
a = 0;
|
||||
|
||||
links[a].click();
|
||||
}
|
||||
|
||||
|
||||
function tree_up() {
|
||||
var act = document.querySelector('#treeul a.hl');
|
||||
if (!act) {
|
||||
alert('switch to the tree for that');
|
||||
return;
|
||||
}
|
||||
if (act.previousSibling.textContent == '-')
|
||||
return act.previousSibling.click();
|
||||
|
||||
act.parentNode.parentNode.parentNode.getElementsByTagName('a')[1].click();
|
||||
}
|
||||
|
||||
|
||||
document.onkeydown = function (e) {
|
||||
if (document.activeElement != document.body && document.activeElement.nodeName.toLowerCase() != 'a')
|
||||
return;
|
||||
|
||||
var k = e.code, pos = -1;
|
||||
if (k.indexOf('Digit') === 0)
|
||||
pos = parseInt(k.slice(-1)) * 0.1;
|
||||
|
||||
if (pos !== -1)
|
||||
return seek_au_mul(pos);
|
||||
|
||||
var n = k == 'KeyJ' ? -1 : k == 'KeyL' ? 1 : 0;
|
||||
if (n !== 0)
|
||||
return song_skip(n);
|
||||
|
||||
n = k == 'KeyU' ? -10 : k == 'KeyO' ? 10 : 0;
|
||||
if (n !== 0)
|
||||
return mp.au ? seek_au_sec(mp.au.currentTime + n) : true;
|
||||
|
||||
n = k == 'KeyI' ? -1 : k == 'KeyK' ? 1 : 0;
|
||||
if (n !== 0)
|
||||
return tree_neigh(n);
|
||||
|
||||
if (k == 'KeyP')
|
||||
return tree_up();
|
||||
};
|
||||
|
||||
|
||||
// search
|
||||
@@ -588,15 +696,19 @@ function autoplay_blocked() {
|
||||
];
|
||||
var oldcfg = [];
|
||||
|
||||
if (document.querySelector('#srch_form.tags'))
|
||||
if (document.querySelector('#srch_form.tags')) {
|
||||
sconf.push(["tags",
|
||||
["tags", "tags", "tags contains (^=start, end=$)", "46"]
|
||||
]);
|
||||
sconf.push(["adv.",
|
||||
["adv", "adv", "key>=1A key<=2B .bpm>165", "46"]
|
||||
]);
|
||||
}
|
||||
|
||||
var html = [];
|
||||
var trs = [];
|
||||
var orig_html = null;
|
||||
for (var a = 0; a < sconf.length; a++) {
|
||||
html.push('<tr><td><br />' + sconf[a][0] + '</td>');
|
||||
var html = ['<tr><td><br />' + sconf[a][0] + '</td>'];
|
||||
for (var b = 1; b < 3; b++) {
|
||||
var hn = "srch_" + sconf[a][b][0];
|
||||
var csp = (sconf[a].length == 2) ? 2 : 1;
|
||||
@@ -609,6 +721,11 @@ function autoplay_blocked() {
|
||||
break;
|
||||
}
|
||||
html.push('</tr>');
|
||||
trs.push(html);
|
||||
}
|
||||
var html = [];
|
||||
for (var a = 0; a < trs.length; a += 2) {
|
||||
html.push('<table>' + (trs[a].concat(trs[a + 1])).join('\n') + '</table>');
|
||||
}
|
||||
ebi('srch_form').innerHTML = html.join('\n');
|
||||
|
||||
@@ -617,7 +734,14 @@ function autoplay_blocked() {
|
||||
o[a].oninput = ev_search_input;
|
||||
}
|
||||
|
||||
function srch_msg(err, txt) {
|
||||
var o = ebi('srch_q');
|
||||
o.textContent = txt;
|
||||
o.style.color = err ? '#f09' : '#c90';
|
||||
}
|
||||
|
||||
var search_timeout;
|
||||
var search_in_progress = 0;
|
||||
|
||||
function ev_search_input() {
|
||||
var v = this.value;
|
||||
@@ -627,10 +751,14 @@ function autoplay_blocked() {
|
||||
chk.checked = ((v + '').length > 0);
|
||||
}
|
||||
clearTimeout(search_timeout);
|
||||
search_timeout = setTimeout(do_search, 100);
|
||||
var now = new Date().getTime();
|
||||
if (now - search_in_progress > 30 * 1000)
|
||||
search_timeout = setTimeout(do_search, 200);
|
||||
}
|
||||
|
||||
function do_search() {
|
||||
search_in_progress = new Date().getTime();
|
||||
srch_msg(false, "searching...");
|
||||
clearTimeout(search_timeout);
|
||||
var params = {};
|
||||
var o = document.querySelectorAll('#op_search input[type="text"]');
|
||||
@@ -654,13 +782,22 @@ function autoplay_blocked() {
|
||||
return;
|
||||
|
||||
if (this.status !== 200) {
|
||||
alert("http " + this.status + ": " + this.responseText);
|
||||
var msg = this.responseText;
|
||||
if (msg.indexOf('<pre>') === 0)
|
||||
msg = msg.slice(5);
|
||||
|
||||
srch_msg(true, "http " + this.status + ": " + msg);
|
||||
search_in_progress = 0;
|
||||
return;
|
||||
}
|
||||
search_in_progress = 0;
|
||||
srch_msg(false, '');
|
||||
|
||||
var res = JSON.parse(this.responseText),
|
||||
tagord = res.tag_order;
|
||||
|
||||
sortfiles(res.hits);
|
||||
|
||||
var ofiles = ebi('files');
|
||||
if (ofiles.getAttribute('ts') > this.ts)
|
||||
return;
|
||||
@@ -674,11 +811,12 @@ function autoplay_blocked() {
|
||||
ebi('path').style.display = 'none';
|
||||
ebi('tree').style.display = 'none';
|
||||
ebi('wrap').style.marginLeft = '0';
|
||||
treectl.hidden = true;
|
||||
}
|
||||
|
||||
var html = mk_files_header(tagord);
|
||||
html.push('<tbody>');
|
||||
html.push('<tr><td>-</td><td colspan="42"><a href="#" id="unsearch">close search results</a></td></tr>');
|
||||
html.push('<tr><td>-</td><td colspan="42"><a href="#" id="unsearch">! close search results</a></td></tr>');
|
||||
for (var a = 0; a < res.hits.length; a++) {
|
||||
var r = res.hits[a],
|
||||
ts = parseInt(r.ts),
|
||||
@@ -696,8 +834,8 @@ function autoplay_blocked() {
|
||||
var k = tagord[b],
|
||||
v = r.tags[k] || "";
|
||||
|
||||
if (k == "dur") {
|
||||
var sv = s2ms(v);
|
||||
if (k == ".dur") {
|
||||
var sv = v ? s2ms(v) : "";
|
||||
nodes[nodes.length - 1] += '</td><td sortv="' + v + '">' + sv;
|
||||
continue;
|
||||
}
|
||||
@@ -716,6 +854,7 @@ function autoplay_blocked() {
|
||||
ofiles.innerHTML = html.join('\n');
|
||||
ofiles.setAttribute("ts", this.ts);
|
||||
filecols.set_style();
|
||||
mukey.render();
|
||||
reload_browser();
|
||||
|
||||
ebi('unsearch').onclick = unsearch;
|
||||
@@ -726,6 +865,7 @@ function autoplay_blocked() {
|
||||
ebi('path').style.display = oldcfg[0];
|
||||
ebi('tree').style.display = oldcfg[1];
|
||||
ebi('wrap').style.marginLeft = oldcfg[2];
|
||||
treectl.hidden = false;
|
||||
oldcfg = [];
|
||||
ebi('files').innerHTML = orig_html;
|
||||
orig_html = null;
|
||||
@@ -735,13 +875,18 @@ function autoplay_blocked() {
|
||||
|
||||
|
||||
var treectl = (function () {
|
||||
var treectl = {
|
||||
"hidden": false
|
||||
};
|
||||
var dyn = bcfg_get('dyntree', true);
|
||||
var treesz = icfg_get('treesz', 16);
|
||||
treesz = Math.min(Math.max(treesz, 4), 50);
|
||||
console.log('treesz [' + treesz + ']');
|
||||
var entreed = false;
|
||||
|
||||
function entree(e) {
|
||||
ev(e);
|
||||
entreed = true;
|
||||
ebi('path').style.display = 'none';
|
||||
|
||||
var tree = ebi('tree');
|
||||
@@ -749,22 +894,29 @@ var treectl = (function () {
|
||||
|
||||
swrite('entreed', 'tree');
|
||||
get_tree("", get_evpath(), true);
|
||||
window.addEventListener('scroll', onscroll);
|
||||
window.addEventListener('resize', onresize);
|
||||
onresize();
|
||||
}
|
||||
|
||||
function detree(e) {
|
||||
ev(e);
|
||||
entreed = false;
|
||||
ebi('tree').style.display = 'none';
|
||||
ebi('path').style.display = 'inline-block';
|
||||
ebi('wrap').style.marginLeft = '0';
|
||||
swrite('entreed', 'na');
|
||||
window.removeEventListener('resize', onresize);
|
||||
window.removeEventListener('scroll', onscroll);
|
||||
}
|
||||
|
||||
function onscroll() {
|
||||
if (!entreed || treectl.hidden)
|
||||
return;
|
||||
|
||||
var top = ebi('wrap').getBoundingClientRect().top;
|
||||
ebi('tree').style.top = Math.max(0, parseInt(top)) + 'px';
|
||||
}
|
||||
window.addEventListener('scroll', onscroll);
|
||||
|
||||
function periodic() {
|
||||
onscroll();
|
||||
@@ -773,6 +925,9 @@ var treectl = (function () {
|
||||
periodic();
|
||||
|
||||
function onresize(e) {
|
||||
if (!entreed || treectl.hidden)
|
||||
return;
|
||||
|
||||
var q = '#tree';
|
||||
var nq = 0;
|
||||
while (dyn) {
|
||||
@@ -786,7 +941,6 @@ var treectl = (function () {
|
||||
ebi('wrap').style.marginLeft = w + 'em';
|
||||
onscroll();
|
||||
}
|
||||
window.addEventListener('resize', onresize);
|
||||
|
||||
function get_tree(top, dst, rst) {
|
||||
var xhr = new XMLHttpRequest();
|
||||
@@ -934,6 +1088,8 @@ var treectl = (function () {
|
||||
|
||||
ebi('srv_info').innerHTML = '<span>' + res.srvinf + '</span>';
|
||||
var nodes = res.dirs.concat(res.files);
|
||||
nodes = sortfiles(nodes);
|
||||
|
||||
var top = this.top;
|
||||
var html = mk_files_header(res.taglist);
|
||||
html.push('<tbody>');
|
||||
@@ -946,11 +1102,8 @@ var treectl = (function () {
|
||||
var k = res.taglist[b],
|
||||
v = (r.tags || {})[k] || "";
|
||||
|
||||
if (k[0] == '.')
|
||||
k = k.slice(1);
|
||||
|
||||
if (k == "dur") {
|
||||
var sv = s2ms(v);
|
||||
if (k == ".dur") {
|
||||
var sv = v ? s2ms(v) : "";
|
||||
ln[ln.length - 1] += '</td><td sortv="' + v + '">' + sv;
|
||||
continue;
|
||||
}
|
||||
@@ -974,6 +1127,7 @@ var treectl = (function () {
|
||||
|
||||
filecols.set_style();
|
||||
mukey.render();
|
||||
msel.render();
|
||||
reload_tree();
|
||||
reload_browser();
|
||||
}
|
||||
@@ -1049,9 +1203,8 @@ var treectl = (function () {
|
||||
hist_replace(get_evpath() + window.location.hash);
|
||||
}
|
||||
|
||||
return {
|
||||
"onscroll": onscroll
|
||||
}
|
||||
treectl.onscroll = onscroll;
|
||||
return treectl;
|
||||
})();
|
||||
|
||||
|
||||
@@ -1107,27 +1260,47 @@ function apply_perms(perms) {
|
||||
}
|
||||
|
||||
|
||||
function find_file_col(txt) {
|
||||
var tds = ebi('files').tHead.getElementsByTagName('th');
|
||||
var i = -1;
|
||||
var min = false;
|
||||
for (var a = 0; a < tds.length; a++) {
|
||||
var spans = tds[a].getElementsByTagName('span');
|
||||
if (spans.length && spans[0].textContent == txt) {
|
||||
min = tds[a].getAttribute('class').indexOf('min') !== -1;
|
||||
i = a;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (i == -1)
|
||||
return;
|
||||
|
||||
return [i, min];
|
||||
}
|
||||
|
||||
|
||||
function mk_files_header(taglist) {
|
||||
var html = [
|
||||
'<thead>',
|
||||
'<th></th>',
|
||||
'<th><span>File Name</span></th>',
|
||||
'<th sort="int"><span>Size</span></th>'
|
||||
'<th name="lead"><span>c</span></th>',
|
||||
'<th name="href"><span>File Name</span></th>',
|
||||
'<th name="sz" sort="int"><span>Size</span></th>'
|
||||
];
|
||||
for (var a = 0; a < taglist.length; a++) {
|
||||
var tag = taglist[a];
|
||||
var c1 = tag.slice(0, 1).toUpperCase();
|
||||
tag = c1 + tag.slice(1);
|
||||
if (c1 == '.')
|
||||
tag = '<th sort="int"><span>' + tag.slice(1);
|
||||
tag = '<th name="tags/' + tag + '" sort="int"><span>' + tag.slice(1);
|
||||
else
|
||||
tag = '<th><span>' + tag;
|
||||
tag = '<th name="tags/' + tag + '"><span>' + tag;
|
||||
|
||||
html.push(tag + '</span></th>');
|
||||
}
|
||||
html = html.concat([
|
||||
'<th><span>T</span></th>',
|
||||
'<th><span>Date</span></th>',
|
||||
'<th name="ext"><span>T</span></th>',
|
||||
'<th name="ts"><span>Date</span></th>',
|
||||
'</thead>',
|
||||
]);
|
||||
return html;
|
||||
@@ -1204,6 +1377,21 @@ var filecols = (function () {
|
||||
set_style();
|
||||
};
|
||||
|
||||
try {
|
||||
var ci = find_file_col('dur'),
|
||||
i = ci[0],
|
||||
min = ci[1],
|
||||
rows = ebi('files').tBodies[0].rows;
|
||||
|
||||
if (!min)
|
||||
for (var a = 0, aa = rows.length; a < aa; a++) {
|
||||
var c = rows[a].cells[i];
|
||||
if (c && c.textContent)
|
||||
c.textContent = s2ms(c.textContent);
|
||||
}
|
||||
}
|
||||
catch (ex) { }
|
||||
|
||||
return {
|
||||
"add_btns": add_btns,
|
||||
"set_style": set_style,
|
||||
@@ -1255,7 +1443,7 @@ var mukey = (function () {
|
||||
ev(e);
|
||||
var notation = this.getAttribute('value');
|
||||
load_notation(notation);
|
||||
render();
|
||||
try_render();
|
||||
}
|
||||
|
||||
function load_notation(notation) {
|
||||
@@ -1270,22 +1458,13 @@ var mukey = (function () {
|
||||
}
|
||||
|
||||
function render() {
|
||||
var tds = ebi('files').tHead.getElementsByTagName('th');
|
||||
var i = -1;
|
||||
var min = false;
|
||||
for (var a = 0; a < tds.length; a++) {
|
||||
var spans = tds[a].getElementsByTagName('span');
|
||||
if (spans.length && spans[0].textContent == 'Key') {
|
||||
min = tds[a].getAttribute('class').indexOf('min') !== -1;
|
||||
i = a;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (i == -1)
|
||||
var ci = find_file_col('Key');
|
||||
if (!ci)
|
||||
return;
|
||||
|
||||
var rows = ebi('files').tBodies[0].rows;
|
||||
var i = ci[0],
|
||||
min = ci[1],
|
||||
rows = ebi('files').tBodies[0].rows;
|
||||
|
||||
if (min)
|
||||
for (var a = 0, aa = rows.length; a < aa; a++) {
|
||||
@@ -1331,15 +1510,172 @@ var mukey = (function () {
|
||||
})();
|
||||
|
||||
|
||||
function addcrc() {
|
||||
var links = document.querySelectorAll(
|
||||
'#files>tbody>tr>td:nth-child(2)>' + (
|
||||
ebi('unsearch') ? 'div>a:last-child' : 'a'));
|
||||
|
||||
for (var a = 0, aa = links.length; a < aa; a++)
|
||||
if (!links[a].getAttribute('id'))
|
||||
links[a].setAttribute('id', 'f-' + crc32(links[a].textContent));
|
||||
}
|
||||
|
||||
|
||||
(function () {
|
||||
var tt = bcfg_get('tooltips', true);
|
||||
|
||||
function set_tooltip(e) {
|
||||
ev(e);
|
||||
ebi('opdesc').innerHTML = this.getAttribute('data-desc');
|
||||
var o = ebi('opdesc');
|
||||
o.innerHTML = this.getAttribute('data-desc');
|
||||
o.setAttribute('class', tt ? '' : 'off');
|
||||
}
|
||||
|
||||
var btns = document.querySelectorAll('#ops, #ops>a');
|
||||
for (var a = 0; a < btns.length; a++) {
|
||||
btns[a].onmouseenter = set_tooltip;
|
||||
}
|
||||
|
||||
ebi('tooltips').onclick = function (e) {
|
||||
ev(e);
|
||||
tt = !tt;
|
||||
bcfg_set('tooltips', tt);
|
||||
};
|
||||
})();
|
||||
|
||||
|
||||
var arcfmt = (function () {
|
||||
if (!ebi('arc_fmt'))
|
||||
return { "render": function () { } };
|
||||
|
||||
var html = [],
|
||||
arcfmts = ["tar", "zip", "zip_dos", "zip_crc"],
|
||||
arcv = ["tar", "zip=utf8", "zip", "zip=crc"];
|
||||
|
||||
for (var a = 0; a < arcfmts.length; a++) {
|
||||
var k = arcfmts[a];
|
||||
html.push(
|
||||
'<span><input type="radio" name="arcfmt" value="' + k + '" id="arcfmt_' + k + '">' +
|
||||
'<label for="arcfmt_' + k + '">' + k + '</label></span>');
|
||||
}
|
||||
ebi('arc_fmt').innerHTML = html.join('\n');
|
||||
|
||||
var fmt = sread("arc_fmt") || "zip";
|
||||
ebi('arcfmt_' + fmt).checked = true;
|
||||
|
||||
function render() {
|
||||
var arg = arcv[arcfmts.indexOf(fmt)],
|
||||
tds = document.querySelectorAll('#files tbody td:first-child a');
|
||||
|
||||
for (var a = 0, aa = tds.length; a < aa; a++) {
|
||||
var o = tds[a], txt = o.textContent, href = o.getAttribute('href');
|
||||
if (txt != 'tar' && txt != 'zip')
|
||||
continue;
|
||||
|
||||
var ofs = href.lastIndexOf('?');
|
||||
if (ofs < 0)
|
||||
throw 'missing arg in url';
|
||||
|
||||
o.setAttribute("href", href.slice(0, ofs + 1) + arg);
|
||||
o.textContent = fmt.split('_')[0];
|
||||
}
|
||||
ebi('selzip').textContent = fmt.split('_')[0];
|
||||
ebi('selzip').setAttribute('fmt', arg);
|
||||
}
|
||||
|
||||
function try_render() {
|
||||
try {
|
||||
render();
|
||||
}
|
||||
catch (ex) {
|
||||
console.log("arcfmt failed: " + ex);
|
||||
}
|
||||
}
|
||||
|
||||
function change_fmt(e) {
|
||||
ev(e);
|
||||
fmt = this.getAttribute('value');
|
||||
swrite("arc_fmt", fmt);
|
||||
try_render();
|
||||
}
|
||||
|
||||
var o = document.querySelectorAll('#arc_fmt input');
|
||||
for (var a = 0; a < o.length; a++) {
|
||||
o[a].onchange = change_fmt;
|
||||
}
|
||||
|
||||
return {
|
||||
"render": try_render
|
||||
};
|
||||
})();
|
||||
|
||||
|
||||
var msel = (function () {
|
||||
function getsel() {
|
||||
var names = [];
|
||||
var links = document.querySelectorAll('#files tbody tr.sel td:nth-child(2) a');
|
||||
for (var a = 0, aa = links.length; a < aa; a++)
|
||||
names.push(links[a].getAttribute('href').replace(/\/$/, "").split('/').slice(-1));
|
||||
|
||||
return names;
|
||||
}
|
||||
function selui() {
|
||||
var fun = getsel().length ? "add" : "remove";
|
||||
ebi('wtoggle').classList[fun]('sel');
|
||||
}
|
||||
function seltgl(e) {
|
||||
ev(e);
|
||||
var tr = this.parentNode;
|
||||
tr.classList.toggle('sel');
|
||||
selui();
|
||||
}
|
||||
function evsel(e, fun) {
|
||||
ev(e);
|
||||
var trs = document.querySelectorAll('#files tbody tr');
|
||||
for (var a = 0, aa = trs.length; a < aa; a++)
|
||||
trs[a].classList[fun]('sel');
|
||||
selui();
|
||||
}
|
||||
ebi('selall').onclick = function (e) {
|
||||
evsel(e, "add");
|
||||
};
|
||||
ebi('selinv').onclick = function (e) {
|
||||
evsel(e, "toggle");
|
||||
};
|
||||
ebi('selzip').onclick = function (e) {
|
||||
ev(e);
|
||||
var names = getsel();
|
||||
var arg = ebi('selzip').getAttribute('fmt');
|
||||
var txt = names.join('\n');
|
||||
var frm = document.createElement('form');
|
||||
frm.setAttribute('action', '?' + arg);
|
||||
frm.setAttribute('method', 'post');
|
||||
frm.setAttribute('target', '_blank');
|
||||
frm.setAttribute('enctype', 'multipart/form-data');
|
||||
frm.innerHTML = '<input name="act" value="zip" />' +
|
||||
'<textarea name="files" id="ziptxt"></textarea>';
|
||||
frm.style.display = 'none';
|
||||
|
||||
var oldform = document.querySelector('#widgeti>form');
|
||||
if (oldform)
|
||||
oldform.parentNode.removeChild(oldform);
|
||||
|
||||
ebi('widgeti').appendChild(frm);
|
||||
var obj = ebi('ziptxt');
|
||||
obj.value = txt;
|
||||
console.log(txt);
|
||||
frm.submit();
|
||||
};
|
||||
function render() {
|
||||
var tds = document.querySelectorAll('#files tbody td+td+td');
|
||||
for (var a = 0, aa = tds.length; a < aa; a++) {
|
||||
tds[a].onclick = seltgl;
|
||||
}
|
||||
arcfmt.render();
|
||||
}
|
||||
return {
|
||||
"render": render
|
||||
};
|
||||
})();
|
||||
|
||||
|
||||
@@ -1349,9 +1685,18 @@ function ev_row_tgl(e) {
|
||||
}
|
||||
|
||||
|
||||
function reload_mp() {
|
||||
if (mp && mp.au) {
|
||||
mp.au.pause();
|
||||
mp.au = null;
|
||||
}
|
||||
widget.close();
|
||||
mp = new MPlayer();
|
||||
}
|
||||
|
||||
|
||||
function reload_browser(not_mp) {
|
||||
filecols.set_style();
|
||||
makeSortable(ebi('files'));
|
||||
|
||||
var parts = get_evpath().split('/');
|
||||
var rm = document.querySelectorAll('#path>a+a+a');
|
||||
@@ -1363,7 +1708,7 @@ function reload_browser(not_mp) {
|
||||
link += parts[a] + '/';
|
||||
var o = document.createElement('a');
|
||||
o.setAttribute('href', link);
|
||||
o.innerHTML = parts[a];
|
||||
o.textContent = uricom_dec(parts[a])[0];
|
||||
ebi('path').appendChild(o);
|
||||
}
|
||||
|
||||
@@ -1376,12 +1721,9 @@ function reload_browser(not_mp) {
|
||||
}
|
||||
|
||||
if (!not_mp) {
|
||||
if (mp && mp.au) {
|
||||
mp.au.pause();
|
||||
mp.au = null;
|
||||
}
|
||||
widget.close();
|
||||
mp = init_mp();
|
||||
addcrc();
|
||||
reload_mp();
|
||||
makeSortable(ebi('files'), mp.read_order.bind(mp));
|
||||
}
|
||||
|
||||
if (window['up2k'])
|
||||
@@ -1389,3 +1731,4 @@ function reload_browser(not_mp) {
|
||||
}
|
||||
reload_browser(true);
|
||||
mukey.render();
|
||||
msel.render();
|
||||
|
||||
@@ -147,7 +147,7 @@ var md_opt = {
|
||||
|
||||
</script>
|
||||
<script src="/.cpr/util.js"></script>
|
||||
<script src="/.cpr/deps/marked.full.js"></script>
|
||||
<script src="/.cpr/deps/marked.js"></script>
|
||||
<script src="/.cpr/md.js"></script>
|
||||
{%- if edit %}
|
||||
<script src="/.cpr/md2.js"></script>
|
||||
|
||||
@@ -278,18 +278,27 @@ function up2k_init(have_crypto) {
|
||||
}
|
||||
else files = e.target.files;
|
||||
|
||||
if (files.length == 0)
|
||||
if (!files || files.length == 0)
|
||||
return alert('no files selected??');
|
||||
|
||||
more_one_file();
|
||||
var bad_files = [];
|
||||
var good_files = [];
|
||||
var dirs = [];
|
||||
for (var a = 0; a < files.length; a++) {
|
||||
var fobj = files[a];
|
||||
if (is_itemlist) {
|
||||
if (fobj.kind !== 'file')
|
||||
continue;
|
||||
|
||||
try {
|
||||
var wi = fobj.webkitGetAsEntry();
|
||||
if (wi.isDirectory) {
|
||||
dirs.push(wi);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
catch (ex) { }
|
||||
fobj = fobj.getAsFile();
|
||||
}
|
||||
try {
|
||||
@@ -300,12 +309,69 @@ function up2k_init(have_crypto) {
|
||||
bad_files.push(fobj.name);
|
||||
continue;
|
||||
}
|
||||
good_files.push(fobj);
|
||||
good_files.push([fobj, fobj.name]);
|
||||
}
|
||||
if (dirs) {
|
||||
return read_dirs(null, [], dirs, good_files, bad_files);
|
||||
}
|
||||
}
|
||||
|
||||
function read_dirs(rd, pf, dirs, good, bad) {
|
||||
if (!dirs.length) {
|
||||
if (!pf.length)
|
||||
return gotallfiles(good, bad);
|
||||
|
||||
console.log("retry pf, " + pf.length);
|
||||
setTimeout(function () {
|
||||
read_dirs(rd, pf, dirs, good, bad);
|
||||
}, 50);
|
||||
return;
|
||||
}
|
||||
|
||||
if (!rd)
|
||||
rd = dirs[0].createReader();
|
||||
|
||||
rd.readEntries(function (ents) {
|
||||
var ngot = 0;
|
||||
ents.forEach(function (dn) {
|
||||
if (dn.isDirectory) {
|
||||
dirs.push(dn);
|
||||
}
|
||||
else {
|
||||
var name = dn.fullPath;
|
||||
if (name.indexOf('/') === 0)
|
||||
name = name.slice(1);
|
||||
|
||||
pf.push(name);
|
||||
dn.file(function (fobj) {
|
||||
var idx = pf.indexOf(name);
|
||||
pf.splice(idx, 1);
|
||||
try {
|
||||
if (fobj.size > 0) {
|
||||
good.push([fobj, name]);
|
||||
return;
|
||||
}
|
||||
}
|
||||
catch (ex) { }
|
||||
bad.push(name);
|
||||
});
|
||||
}
|
||||
ngot += 1;
|
||||
});
|
||||
// console.log("ngot: " + ngot);
|
||||
if (!ngot) {
|
||||
dirs.shift();
|
||||
rd = null;
|
||||
}
|
||||
return read_dirs(rd, pf, dirs, good, bad);
|
||||
});
|
||||
}
|
||||
|
||||
function gotallfiles(good_files, bad_files) {
|
||||
if (bad_files.length > 0) {
|
||||
var msg = 'These {0} files (of {1} total) were skipped because they are empty:\n'.format(bad_files.length, files.length);
|
||||
for (var a = 0; a < bad_files.length; a++)
|
||||
var ntot = bad_files.length + good_files.length;
|
||||
var msg = 'These {0} files (of {1} total) were skipped because they are empty:\n'.format(bad_files.length, ntot);
|
||||
for (var a = 0, aa = Math.min(20, bad_files.length); a < aa; a++)
|
||||
msg += '-- ' + bad_files[a] + '\n';
|
||||
|
||||
if (files.length - bad_files.length <= 1 && /(android)/i.test(navigator.userAgent))
|
||||
@@ -315,21 +381,21 @@ function up2k_init(have_crypto) {
|
||||
}
|
||||
|
||||
var msg = ['upload these ' + good_files.length + ' files?'];
|
||||
for (var a = 0; a < good_files.length; a++)
|
||||
msg.push(good_files[a].name);
|
||||
for (var a = 0, aa = Math.min(20, good_files.length); a < aa; a++)
|
||||
msg.push(good_files[a][1]);
|
||||
|
||||
if (ask_up && !fsearch && !confirm(msg.join('\n')))
|
||||
return;
|
||||
|
||||
for (var a = 0; a < good_files.length; a++) {
|
||||
var fobj = good_files[a];
|
||||
var fobj = good_files[a][0];
|
||||
var now = new Date().getTime();
|
||||
var lmod = fobj.lastModified || now;
|
||||
var entry = {
|
||||
"n": parseInt(st.files.length.toString()),
|
||||
"t0": now, // TODO remove probably
|
||||
"t0": now,
|
||||
"fobj": fobj,
|
||||
"name": fobj.name,
|
||||
"name": good_files[a][1],
|
||||
"size": fobj.size,
|
||||
"lmod": lmod / 1000,
|
||||
"purl": get_evpath(),
|
||||
|
||||
@@ -88,7 +88,7 @@
|
||||
width: 30em;
|
||||
}
|
||||
#u2conf.has_btn {
|
||||
width: 46em;
|
||||
width: 48em;
|
||||
}
|
||||
#u2conf * {
|
||||
text-align: center;
|
||||
|
||||
@@ -73,7 +73,8 @@
|
||||
<div id="u2btn_ct">
|
||||
<div id="u2btn">
|
||||
<span id="u2bm"></span><br />
|
||||
drop files here<br />
|
||||
drag/drop files<br />
|
||||
and folders here<br />
|
||||
(or click me)
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -91,7 +91,98 @@ function import_js(url, cb) {
|
||||
}
|
||||
|
||||
|
||||
function sortTable(table, col) {
|
||||
var crctab = (function () {
|
||||
var c, tab = [];
|
||||
for (var n = 0; n < 256; n++) {
|
||||
c = n;
|
||||
for (var k = 0; k < 8; k++) {
|
||||
c = ((c & 1) ? (0xEDB88320 ^ (c >>> 1)) : (c >>> 1));
|
||||
}
|
||||
tab[n] = c;
|
||||
}
|
||||
return tab;
|
||||
})();
|
||||
|
||||
|
||||
function crc32(str) {
|
||||
var crc = 0 ^ (-1);
|
||||
for (var i = 0; i < str.length; i++) {
|
||||
crc = (crc >>> 8) ^ crctab[(crc ^ str.charCodeAt(i)) & 0xFF];
|
||||
}
|
||||
return ((crc ^ (-1)) >>> 0).toString(16);
|
||||
};
|
||||
|
||||
|
||||
function sortfiles(nodes) {
|
||||
var sopts = jread('fsort', [["lead", -1, ""], ["href", 1, ""]]);
|
||||
|
||||
try {
|
||||
var is_srch = false;
|
||||
if (nodes[0]['rp']) {
|
||||
is_srch = true;
|
||||
for (var b = 0, bb = nodes.length; b < bb; b++)
|
||||
nodes[b].ext = nodes[b].rp.split('.').pop();
|
||||
for (var b = 0; b < sopts.length; b++)
|
||||
if (sopts[b][0] == 'href')
|
||||
sopts[b][0] = 'rp';
|
||||
}
|
||||
for (var a = sopts.length - 1; a >= 0; a--) {
|
||||
var name = sopts[a][0], rev = sopts[a][1], typ = sopts[a][2];
|
||||
if (!name)
|
||||
continue;
|
||||
|
||||
if (name.indexOf('tags/') === 0) {
|
||||
name = name.slice(5);
|
||||
for (var b = 0, bb = nodes.length; b < bb; b++)
|
||||
nodes[b]._sv = nodes[b].tags[name];
|
||||
}
|
||||
else {
|
||||
for (var b = 0, bb = nodes.length; b < bb; b++) {
|
||||
var v = nodes[b][name];
|
||||
|
||||
if ((v + '').indexOf('<a ') === 0)
|
||||
v = v.split('>')[1];
|
||||
else if (name == "href" && v)
|
||||
v = uricom_dec(v)[0]
|
||||
|
||||
nodes[b]._sv = v;
|
||||
}
|
||||
}
|
||||
|
||||
var onodes = nodes.map((x) => x);
|
||||
nodes.sort(function (n1, n2) {
|
||||
var v1 = n1._sv,
|
||||
v2 = n2._sv;
|
||||
|
||||
if (v1 === undefined) {
|
||||
if (v2 === undefined) {
|
||||
return onodes.indexOf(n1) - onodes.indexOf(n2);
|
||||
}
|
||||
return -1 * rev;
|
||||
}
|
||||
if (v2 === undefined) return 1 * rev;
|
||||
|
||||
var ret = rev * (typ == 'int' ? (v1 - v2) : (v1.localeCompare(v2)));
|
||||
if (ret === 0)
|
||||
ret = onodes.indexOf(n1) - onodes.indexOf(n2);
|
||||
|
||||
return ret;
|
||||
});
|
||||
}
|
||||
for (var b = 0, bb = nodes.length; b < bb; b++) {
|
||||
delete nodes[b]._sv;
|
||||
if (is_srch)
|
||||
delete nodes[b].ext;
|
||||
}
|
||||
}
|
||||
catch (ex) {
|
||||
console.log("failed to apply sort config: " + ex);
|
||||
}
|
||||
return nodes;
|
||||
}
|
||||
|
||||
|
||||
function sortTable(table, col, cb) {
|
||||
var tb = table.tBodies[0],
|
||||
th = table.tHead.rows[0].cells,
|
||||
tr = Array.prototype.slice.call(tb.rows, 0),
|
||||
@@ -100,6 +191,27 @@ function sortTable(table, col) {
|
||||
th[a].className = th[a].className.replace(/ *sort-?1 */, " ");
|
||||
th[col].className += ' sort' + reverse;
|
||||
var stype = th[col].getAttribute('sort');
|
||||
try {
|
||||
var nrules = [], rules = jread("fsort", []);
|
||||
rules.unshift([th[col].getAttribute('name'), reverse, stype || '']);
|
||||
for (var a = 0; a < rules.length; a++) {
|
||||
var add = true;
|
||||
for (var b = 0; b < a; b++)
|
||||
if (rules[a][0] == rules[b][0])
|
||||
add = false;
|
||||
|
||||
if (add)
|
||||
nrules.push(rules[a]);
|
||||
|
||||
if (nrules.length >= 10)
|
||||
break;
|
||||
}
|
||||
jwrite("fsort", nrules);
|
||||
}
|
||||
catch (ex) {
|
||||
console.log("failed to persist sort rules, resetting: " + ex);
|
||||
jwrite("fsort", null);
|
||||
}
|
||||
var vl = [];
|
||||
for (var a = 0; a < tr.length; a++) {
|
||||
var cell = tr[a].cells[col];
|
||||
@@ -127,8 +239,9 @@ function sortTable(table, col) {
|
||||
return reverse * (a.localeCompare(b));
|
||||
});
|
||||
for (i = 0; i < tr.length; ++i) tb.appendChild(tr[vl[i][1]]);
|
||||
if (cb) cb();
|
||||
}
|
||||
function makeSortable(table) {
|
||||
function makeSortable(table, cb) {
|
||||
var th = table.tHead, i;
|
||||
th && (th = th.rows[0]) && (th = th.cells);
|
||||
if (th) i = th.length;
|
||||
@@ -136,13 +249,12 @@ function makeSortable(table) {
|
||||
while (--i >= 0) (function (i) {
|
||||
th[i].onclick = function (e) {
|
||||
ev(e);
|
||||
sortTable(table, i);
|
||||
sortTable(table, i, cb);
|
||||
};
|
||||
}(i));
|
||||
}
|
||||
|
||||
|
||||
|
||||
(function () {
|
||||
var ops = document.querySelectorAll('#ops>a');
|
||||
for (var a = 0; a < ops.length; a++) {
|
||||
@@ -193,7 +305,10 @@ function goto(dest) {
|
||||
goto();
|
||||
var op = sread('opmode');
|
||||
if (op !== null && op !== '.')
|
||||
goto(op);
|
||||
try {
|
||||
goto(op);
|
||||
}
|
||||
catch (ex) { }
|
||||
})();
|
||||
|
||||
|
||||
@@ -273,6 +388,7 @@ function unix2iso(ts) {
|
||||
|
||||
|
||||
function s2ms(s) {
|
||||
s = Math.floor(s);
|
||||
var m = Math.floor(s / 60);
|
||||
return m + ":" + ("0" + (s - m * 60)).slice(-2);
|
||||
}
|
||||
@@ -359,8 +475,10 @@ function bcfg_upd_ui(name, val) {
|
||||
|
||||
if (o.getAttribute('type') == 'checkbox')
|
||||
o.checked = val;
|
||||
else if (o)
|
||||
o.setAttribute('class', val ? 'on' : '');
|
||||
else if (o) {
|
||||
var fun = val ? 'add' : 'remove';
|
||||
o.classList[fun]('on');
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
242
docs/music-analysis.sh
Normal file
242
docs/music-analysis.sh
Normal file
@@ -0,0 +1,242 @@
|
||||
#!/bin/bash
|
||||
echo please dont actually run this as a scriopt
|
||||
exit 1
|
||||
|
||||
|
||||
# dependency-heavy, not particularly good fit
|
||||
pacman -S llvm10
|
||||
python3 -m pip install --user librosa
|
||||
git clone https://github.com/librosa/librosa.git
|
||||
|
||||
|
||||
# correct bpm for tracks with bad tags
|
||||
br='
|
||||
/Trip Trip Trip\(Hardcore Edit\).mp3/ {v=176}
|
||||
/World!!.BIG_SOS/ {v=175}
|
||||
/\/08\..*\(BIG_SOS Bootleg\)\.mp3/ {v=175}
|
||||
/もってけ!セーラ服.Asterisk DnB/ {v=175}
|
||||
/Rondo\(Asterisk DnB Re.mp3/ {v=175}
|
||||
/Ray Nautica 175 Edit/ {v=175;x="thunk"}
|
||||
/TOKIMEKI Language.Jauz/ {v=174}
|
||||
/YUPPUN Hardcore Remix\).mp3/ {v=174;x="keeps drifting"}
|
||||
/(èâAâï.î╧ûδ|バーチャリアル.狐耶)J-Core Remix\).mp3/ {v=172;x="hard"}
|
||||
/lucky train..Freezer/ {v=170}
|
||||
/Alf zero Bootleg ReMix/ {v=170}
|
||||
/Prisoner of Love.Kacky/ {v=170}
|
||||
/火炎 .Qota/ {v=170}
|
||||
/\(hu-zin Bootleg\)\.mp3/ {v=170}
|
||||
/15. STRAIGHT BET\(Milynn Bootleg\)\.mp3/ {v=170}
|
||||
/\/13.*\(Milynn Bootleg\)\.mp3/ {v=167;x="way hard"}
|
||||
/COLOR PLANET .10SAI . nijikon Remix\)\.mp3/ {v=165}
|
||||
/11\. (朝はご飯派|Æ⌐é═é▓ö╤öh)\.mp3/ {v=162}
|
||||
/09\. Where.s the core/ {v=160}
|
||||
/PLANET\(Koushif Jersey Club Bootleg\)remaster.mp3/ {v=160;x="starts ez turns bs"}
|
||||
/kened Soul - Madeon x Angel Beats!.mp3/ {v=160}
|
||||
/Dear Moments\(Mother Harlot Bootleg\)\.mp3/ {v=150}
|
||||
/POWER.Ringos UKG/ {v=140}
|
||||
/ブルー・フィールド\(Ringos UKG Remix\).mp3/ {v=135}
|
||||
/プラチナジェット.Ringo Remix..mp3/ {v=131.2}
|
||||
/Mirrorball Love \(TKM Bootleg Mix\).mp3/ {v=130}
|
||||
/Photon Melodies \(TKM Bootleg Mix\).mp3/ {v=128}
|
||||
/Trap of Love \(TKM Bootleg Mix\).mp3/ {v=128}
|
||||
/One Step \(TKM Bootleg Mix\)\.mp3/ {v=126}
|
||||
/04 (トリカムイ岩|âgâèâJâÇâCèΓ).mp3/ {v=125}
|
||||
/Get your Wish \(NAWN REMIX\)\.mp3/ {v=95}
|
||||
/Flicker .Nitro Fun/ {v=92}
|
||||
/\/14\..*suicat Remix/ {v=85.5;x="tricky"}
|
||||
/Yanagi Nagi - Harumodoki \(EO Remix\)\.mp3/ {v=150}
|
||||
/Azure - Nicology\.mp3/ {v=128;x="off by 5 how"}
|
||||
'
|
||||
|
||||
|
||||
# afun host, collects/grades the results
|
||||
runfun() { cores=8; touch run; rm -f /dev/shm/mres.*; t00=$(date +%s); tbc() { bc | sed -r 's/(\.[0-9]{2}).*/\1/'; }; for ((core=0; core<$cores; core++)); do sqlite3 /mnt/Users/ed/Music/.hist/up2k.db 'select dur.w, dur.v, bpm.v from mt bpm join mt dur on bpm.w = dur.w where bpm.k = ".bpm" and dur.k = ".dur" order by dur.w' | uniq -w16 | while IFS=\| read w dur bpm; do sqlite3 /mnt/Users/ed/Music/.hist/up2k.db "select rd, fn from up where substr(w,1,16) = '$w'" | sed -r "s/^/$bpm /"; done | grep mir/cr | tr \| / | awk '{v=$1;sub(/[^ ]+ /,"")} '"$br"' {printf "%s %s\n",v,$0}' | while read bpm fn; do [ -e run ] || break; n=$((n+1)); ncore=$((n%cores)); [ $ncore -eq $core ] || continue; t0=$(date +%s.%N); (afun || exit 1; t=$(date +%s.%N); td=$(echo "scale=3; $t - $t0" | tbc); bd=$(echo "scale=3; $bpm / $py" | tbc); printf '%4s sec, %4s orig, %6s py, %4s div, %s\n' $td $bpm $py $bd "$fn") | tee -a /dev/shm/mres.$ncore; rv=${PIPESTATUS[0]}; [ $rv -eq 0 ] || { echo "FAULT($rv): $fn"; }; done & done; wait 2>/dev/null; cat /dev/shm/mres.* | awk 'function prt(c) {printf "\033[3%sm%s\033[0m\n",c,$0} $8!="div,"{next} $5!~/^[0-9\.]+/{next} {meta=$3;det=$5;div=meta/det} div<0.7{det/=2} div>1.3{det*=2} {idet=sprintf("%.0f",det)} {idiff=idet-meta} meta>idet{idiff=meta-idet} idiff==0{n0++;prt(6);next} idiff==1{n1++;prt(3);next} idiff>10{nx++;prt(1);next} {n10++;prt(5)} END {printf "ok: %d 1off: %2s (%3s) 10off: %2s (%3s) fail: %2s\n",n0,n1,n0+n1,n10,n0+n1+n10,nx}'; te=$(date +%s); echo $((te-t00)) sec spent; }
|
||||
|
||||
|
||||
# ok: 8 1off: 62 ( 70) 10off: 86 (156) fail: 25 # 105 sec, librosa @ 8c archvm on 3700x w10
|
||||
# ok: 4 1off: 59 ( 63) 10off: 65 (128) fail: 53 # using original tags (bad)
|
||||
afun() { ffmpeg -hide_banner -v fatal -nostdin -ss $((dur/3)) -y -i /mnt/Users/ed/Music/"$fn" -t 60 /dev/shm/$core.wav || return 1; py="$(/home/ed/src/librosa/examples/beat_tracker.py /dev/shm/$core.wav x 2>&1 | awk 'BEGIN {v=1} /^Estimated tempo: /{v=$3} END {print v}')"; } runfun
|
||||
|
||||
|
||||
# ok: 119 1off: 5 (124) 10off: 8 (132) fail: 49 # 51 sec, vamp-example-fixedtempo
|
||||
# ok: 109 1off: 4 (113) 10off: 9 (122) fail: 59 # bad-tags
|
||||
afun() { ffmpeg -hide_banner -v fatal -nostdin -ss $((dur/3)) -y -i /mnt/Users/ed/Music/"$fn" -ac 1 -ar 22050 -f f32le /dev/shm/$core.pcm || return 1; py="$(python3 -c 'import vamp; import numpy as np; f = open("/dev/shm/'$core'.pcm", "rb"); d = np.fromfile(f, dtype=np.float32); c = vamp.collect(d, 22050, "vamp-example-plugins:fixedtempo", parameters={"maxdflen":40}); print(c["list"][0]["label"].split(" ")[0])')"; }; runfun
|
||||
|
||||
|
||||
# ok: 102 1off: 61 (163) 10off: 12 (175) fail: 6 # 61 sec, vamp-qm-tempotracker
|
||||
# ok: 80 1off: 48 (128) 10off: 11 (139) fail: 42 # bad-tags
|
||||
afun() { ffmpeg -hide_banner -v fatal -nostdin -ss $((dur/3)) -y -i /mnt/Users/ed/Music/"$fn" -ac 1 -ar 22050 -f f32le /dev/shm/$core.pcm || return 1; py="$(python3 -c 'import vamp; import numpy as np; f = open("/dev/shm/'$core'.pcm", "rb"); d = np.fromfile(f, dtype=np.float32); c = vamp.collect(d, 22050, "qm-vamp-plugins:qm-tempotracker", parameters={"inputtempo":150}); v = [float(x["label"].split(" ")[0]) for x in c["list"] if x["label"]]; v = list(sorted(v))[len(v)//4:-len(v)//4]; print(round(sum(v) / len(v), 1))')"; }; runfun
|
||||
|
||||
|
||||
# ok: 133 1off: 32 (165) 10off: 12 (177) fail: 3 # 51 sec, vamp-beatroot
|
||||
# ok: 101 1off: 22 (123) 10off: 16 (139) fail: 39 # bad-tags
|
||||
# note: some tracks fully fail to analyze (unlike the others which always provide a guess)
|
||||
afun() { ffmpeg -hide_banner -v fatal -nostdin -ss $((dur/3)) -y -i /mnt/Users/ed/Music/"$fn" -ac 1 -ar 22050 -f f32le /dev/shm/$core.pcm || return 1; py="$(python3 -c 'import vamp; import numpy as np; f = open("/dev/shm/'$core'.pcm", "rb"); d = np.fromfile(f, dtype=np.float32); c = vamp.collect(d, 22050, "beatroot-vamp:beatroot"); cl=c["list"]; print(round(60*((len(cl)-1)/(float(cl[-1]["timestamp"]-cl[1]["timestamp"]))), 2))')"; }; runfun
|
||||
|
||||
|
||||
# ok: 124 1off: 9 (133) 10off: 40 (173) fail: 8 # 231 sec, essentia/full
|
||||
# ok: 109 1off: 8 (117) 10off: 22 (139) fail: 42 # bad-tags
|
||||
afun() { ffmpeg -hide_banner -v fatal -nostdin -ss $((dur/3)) -y -i /mnt/Users/ed/Music/"$fn" -ac 1 -ar 44100 /dev/shm/$core.wav || return 1; py="$(python3 -c 'import essentia; import essentia.standard as es; fe, fef = es.MusicExtractor(lowlevelStats=["mean", "stdev"], rhythmStats=["mean", "stdev"], tonalStats=["mean", "stdev"])("/dev/shm/'$core'.wav"); print("{:.2f}".format(fe["rhythm.bpm"]))')"; }; runfun
|
||||
|
||||
|
||||
# ok: 113 1off: 18 (131) 10off: 46 (177) fail: 4 # 134 sec, essentia/re2013
|
||||
# ok: 101 1off: 15 (116) 10off: 26 (142) fail: 39 # bad-tags
|
||||
afun() { ffmpeg -hide_banner -v fatal -nostdin -ss $((dur/3)) -y -i /mnt/Users/ed/Music/"$fn" -ac 1 -ar 44100 /dev/shm/$core.wav || return 1; py="$(python3 -c 'from essentia.standard import *; a=MonoLoader(filename="/dev/shm/'$core'.wav")(); bpm,beats,confidence,_,intervals=RhythmExtractor2013(method="multifeature")(a); print("{:.2f}".format(bpm))')"; }; runfun
|
||||
|
||||
|
||||
|
||||
########################################################################
|
||||
##
|
||||
## key detectyion
|
||||
##
|
||||
########################################################################
|
||||
|
||||
|
||||
|
||||
# console scriptlet reusing keytabs from browser.js
|
||||
var m=''; for (var a=0; a<24; a++) m += 's/\\|(' + maps["traktor_sharps"][a].trim() + "|" + maps["rekobo_classic"][a].trim() + "|" + maps["traktor_musical"][a].trim() + "|" + maps["traktor_open"][a].trim() + ')$/|' + maps["rekobo_alnum"][a].trim() + '/;'; console.log(m);
|
||||
|
||||
|
||||
# translate to camelot
|
||||
re='s/\|(B|B|B|6d)$/|1B/;s/\|(F#|F#|Gb|7d)$/|2B/;s/\|(C#|Db|Db|8d)$/|3B/;s/\|(G#|Ab|Ab|9d)$/|4B/;s/\|(D#|Eb|Eb|10d)$/|5B/;s/\|(A#|Bb|Bb|11d)$/|6B/;s/\|(F|F|F|12d)$/|7B/;s/\|(C|C|C|1d)$/|8B/;s/\|(G|G|G|2d)$/|9B/;s/\|(D|D|D|3d)$/|10B/;s/\|(A|A|A|4d)$/|11B/;s/\|(E|E|E|5d)$/|12B/;s/\|(G#m|Abm|Abm|6m)$/|1A/;s/\|(D#m|Ebm|Ebm|7m)$/|2A/;s/\|(A#m|Bbm|Bbm|8m)$/|3A/;s/\|(Fm|Fm|Fm|9m)$/|4A/;s/\|(Cm|Cm|Cm|10m)$/|5A/;s/\|(Gm|Gm|Gm|11m)$/|6A/;s/\|(Dm|Dm|Dm|12m)$/|7A/;s/\|(Am|Am|Am|1m)$/|8A/;s/\|(Em|Em|Em|2m)$/|9A/;s/\|(Bm|Bm|Bm|3m)$/|10A/;s/\|(F#m|F#m|Gbm|4m)$/|11A/;s/\|(C#m|Dbm|Dbm|5m)$/|12A/;'
|
||||
|
||||
|
||||
# runner/wrapper
|
||||
runfun() { cores=8; touch run; tbc() { bc | sed -r 's/(\.[0-9]{2}).*/\1/'; }; for ((core=0; core<$cores; core++)); do sqlite3 /mnt/Users/ed/Music/.hist/up2k.db 'select dur.w, dur.v, key.v from mt key join mt dur on key.w = dur.w where key.k = "key" and dur.k = ".dur" order by dur.w' | uniq -w16 | grep -vE '(Off-Key|None)$' | sed -r "s/ //g;$re" | uniq -w16 | while IFS=\| read w dur bpm; do sqlite3 /mnt/Users/ed/Music/.hist/up2k.db "select rd, fn from up where substr(w,1,16) = '$w'" | sed -r "s/^/$bpm /"; done| grep mir/cr | tr \| / | while read key fn; do [ -e run ] || break; n=$((n+1)); ncore=$((n%cores)); [ $ncore -eq $core ] || continue; t0=$(date +%s.%N); (afun || exit 1; t=$(date +%s.%N); td=$(echo "scale=3; $t - $t0" | tbc); [ "$key" = "$py" ] && c=2 || c=5; printf '%4s sec, %4s orig, \033[3%dm%4s py,\033[0m %s\n' $td "$key" $c "$py" "$fn") || break; done & done; time wait 2>/dev/null; }
|
||||
|
||||
|
||||
# ok: 26 1off: 10 2off: 1 fail: 3 # 15 sec, keyfinder
|
||||
afun() { ffmpeg -hide_banner -v fatal -nostdin -ss $((dur/3)) -y -i /mnt/Users/ed/Music/"$fn" -ac 1 -ar 44100 -t 60 /dev/shm/$core.wav || break; py="$(python3 -c 'import sys; import keyfinder; print(keyfinder.key(sys.argv[1]).camelot())' "/dev/shm/$core.wav")"; }; runfun
|
||||
|
||||
|
||||
# https://github.com/MTG/essentia/raw/master/src/examples/tutorial/example_key_by_steps_streaming.py
|
||||
# https://essentia.upf.edu/reference/std_Key.html # edma edmm braw bgate
|
||||
sed -ri 's/^(key = Key\().*/\1profileType="bgate")/' example_key_by_steps_streaming.py
|
||||
afun() { ffmpeg -hide_banner -v fatal -nostdin -ss $((dur/3)) -y -i /mnt/Users/ed/Music/"$fn" -ac 1 -ar 44100 -t 60 /dev/shm/$core.wav || break; py="$(python3 example_key_by_steps_streaming.py /dev/shm/$core.{wav,yml} 2>/dev/null | sed -r "s/ major//;s/ minor/m/;s/^/|/;$re;s/.//")"; }; runfun
|
||||
|
||||
|
||||
|
||||
########################################################################
|
||||
##
|
||||
## misc
|
||||
##
|
||||
########################################################################
|
||||
|
||||
|
||||
|
||||
python3 -m pip install --user vamp
|
||||
|
||||
import librosa
|
||||
d, r = librosa.load('/dev/shm/0.wav')
|
||||
d.dtype
|
||||
# dtype('float32')
|
||||
d.shape
|
||||
# (1323000,)
|
||||
d
|
||||
# array([-1.9614939e-08, 1.8037968e-08, -1.4106059e-08, ...,
|
||||
# 1.2024145e-01, 2.7462116e-01, 1.6202132e-01], dtype=float32)
|
||||
|
||||
|
||||
|
||||
import vamp
|
||||
c = vamp.collect(d, r, "vamp-example-plugins:fixedtempo")
|
||||
c
|
||||
# {'list': [{'timestamp': 0.005804988, 'duration': 9.999092971, 'label': '110.0 bpm', 'values': array([109.98116], dtype=float32)}]}
|
||||
|
||||
|
||||
|
||||
ffmpeg -ss 48 -i /mnt/Users/ed/Music/mir/cr-a/'I Beg You(ths Bootleg).wav' -ac 1 -ar 22050 -f f32le -t 60 /dev/shm/f32.pcm
|
||||
|
||||
import numpy as np
|
||||
f = open('/dev/shm/f32.pcm', 'rb')
|
||||
d = np.fromfile(f, dtype=np.float32)
|
||||
d
|
||||
array([-0.17803933, -0.27206388, -0.41586545, ..., -0.04940119,
|
||||
-0.0267825 , -0.03564296], dtype=float32)
|
||||
|
||||
d = np.reshape(d, [1, -1])
|
||||
d
|
||||
array([[-0.17803933, -0.27206388, -0.41586545, ..., -0.04940119,
|
||||
-0.0267825 , -0.03564296]], dtype=float32)
|
||||
|
||||
|
||||
|
||||
import vampyhost
|
||||
print("\n".join(vampyhost.list_plugins()))
|
||||
|
||||
mvamp:marsyas_bextract_centroid
|
||||
mvamp:marsyas_bextract_lpcc
|
||||
mvamp:marsyas_bextract_lsp
|
||||
mvamp:marsyas_bextract_mfcc
|
||||
mvamp:marsyas_bextract_rolloff
|
||||
mvamp:marsyas_bextract_scf
|
||||
mvamp:marsyas_bextract_sfm
|
||||
mvamp:marsyas_bextract_zero_crossings
|
||||
mvamp:marsyas_ibt
|
||||
mvamp:zerocrossing
|
||||
qm-vamp-plugins:qm-adaptivespectrogram
|
||||
qm-vamp-plugins:qm-barbeattracker
|
||||
qm-vamp-plugins:qm-chromagram
|
||||
qm-vamp-plugins:qm-constantq
|
||||
qm-vamp-plugins:qm-dwt
|
||||
qm-vamp-plugins:qm-keydetector
|
||||
qm-vamp-plugins:qm-mfcc
|
||||
qm-vamp-plugins:qm-onsetdetector
|
||||
qm-vamp-plugins:qm-segmenter
|
||||
qm-vamp-plugins:qm-similarity
|
||||
qm-vamp-plugins:qm-tempotracker
|
||||
qm-vamp-plugins:qm-tonalchange
|
||||
qm-vamp-plugins:qm-transcription
|
||||
vamp-aubio:aubiomelenergy
|
||||
vamp-aubio:aubiomfcc
|
||||
vamp-aubio:aubionotes
|
||||
vamp-aubio:aubioonset
|
||||
vamp-aubio:aubiopitch
|
||||
vamp-aubio:aubiosilence
|
||||
vamp-aubio:aubiospecdesc
|
||||
vamp-aubio:aubiotempo
|
||||
vamp-example-plugins:amplitudefollower
|
||||
vamp-example-plugins:fixedtempo
|
||||
vamp-example-plugins:percussiononsets
|
||||
vamp-example-plugins:powerspectrum
|
||||
vamp-example-plugins:spectralcentroid
|
||||
vamp-example-plugins:zerocrossing
|
||||
vamp-rubberband:rubberband
|
||||
|
||||
|
||||
|
||||
plug = vampyhost.load_plugin("vamp-example-plugins:fixedtempo", 22050, 0)
|
||||
plug.info
|
||||
{'apiVersion': 2, 'pluginVersion': 1, 'identifier': 'fixedtempo', 'name': 'Simple Fixed Tempo Estimator', 'description': 'Study a short section of audio and estimate its tempo, assuming the tempo is constant', 'maker': 'Vamp SDK Example Plugins', 'copyright': 'Code copyright 2008 Queen Mary, University of London. Freely redistributable (BSD license)'}
|
||||
plug = vampyhost.load_plugin("qm-vamp-plugins:qm-tempotracker", 22050, 0)
|
||||
from pprint import pprint; pprint(plug.parameters)
|
||||
|
||||
|
||||
|
||||
for c in plug.parameters: print("{} \033[36m{} [\033[33m{}\033[36m] = {}\033[0m".format(c["identifier"], c["name"], "\033[36m, \033[33m".join(c["valueNames"]), c["valueNames"][int(c["defaultValue"])])) if "valueNames" in c else print("{} \033[36m{} [\033[33m{}..{}\033[36m] = {}\033[0m".format(c["identifier"], c["name"], c["minValue"], c["maxValue"], c["defaultValue"]))
|
||||
|
||||
|
||||
|
||||
beatroot-vamp:beatroot
|
||||
cl=c["list"]; 60*((len(cl)-1)/(float(cl[-1]["timestamp"]-cl[1]["timestamp"])))
|
||||
|
||||
|
||||
|
||||
ffmpeg -ss 48 -i /mnt/Users/ed/Music/mir/cr-a/'I Beg You(ths Bootleg).wav' -ac 1 -ar 22050 -f f32le -t 60 /dev/shm/f32.pcm
|
||||
# 128 bpm, key 5A Cm
|
||||
|
||||
import vamp
|
||||
import numpy as np
|
||||
f = open('/dev/shm/f32.pcm', 'rb')
|
||||
d = np.fromfile(f, dtype=np.float32)
|
||||
c = vamp.collect(d, 22050, "vamp-example-plugins:fixedtempo", parameters={"maxdflen":40})
|
||||
c["list"][0]["label"]
|
||||
# 127.6 bpm
|
||||
|
||||
c = vamp.collect(d, 22050, "qm-vamp-plugins:qm-tempotracker", parameters={"inputtempo":150})
|
||||
print("\n".join([v["label"] for v in c["list"] if v["label"]]))
|
||||
v = [float(x["label"].split(' ')[0]) for x in c["list"] if x["label"]]
|
||||
v = list(sorted(v))[len(v)//4:-len(v)//4]
|
||||
v = sum(v) / len(v)
|
||||
# 128.1 bpm
|
||||
|
||||
@@ -67,6 +67,36 @@ wget -S --header='Accept-Encoding: gzip' -U 'MSIE 6.0; SV1' http://127.0.0.1:392
|
||||
shab64() { sp=$1; f="$2"; v=0; sz=$(stat -c%s "$f"); while true; do w=$((v+sp*1024*1024)); printf $(tail -c +$((v+1)) "$f" | head -c $((w-v)) | sha512sum | cut -c-64 | sed -r 's/ .*//;s/(..)/\\x\1/g') | base64 -w0 | cut -c-43 | tr '+/' '-_'; v=$w; [ $v -lt $sz ] || break; done; }
|
||||
|
||||
|
||||
##
|
||||
## poll url for performance issues
|
||||
|
||||
command -v gdate && date() { gdate "$@"; }; while true; do t=$(date +%s.%N); (time wget http://127.0.0.1:3923/?ls -qO- | jq -C '.files[]|{sz:.sz,ta:.tags.artist,tb:.tags.".bpm"}|del(.[]|select(.==null))' | awk -F\" '/"/{t[$2]++} END {for (k in t){v=t[k];p=sprintf("%" (v+1) "s",v);gsub(/ /,"#",p);printf "\033[36m%s\033[33m%s ",k,p}}') 2>&1 | awk -v ts=$t 'NR==1{t1=$0} NR==2{sub(/.*0m/,"");sub(/s$/,"");t2=$0;c=2; if(t2>0.3){c=3} if(t2>0.8){c=1} } END{sub(/[0-9]{6}$/,"",ts);printf "%s \033[3%dm%s %s\033[0m\n",ts,c,t2,t1}'; sleep 0.1 || break; done
|
||||
|
||||
|
||||
##
|
||||
## sqlite3 stuff
|
||||
|
||||
# find dupe metadata keys
|
||||
sqlite3 up2k.db 'select mt1.w, mt1.k, mt1.v, mt2.v from mt mt1 inner join mt mt2 on mt1.w = mt2.w where mt1.k = mt2.k and mt1.rowid != mt2.rowid'
|
||||
|
||||
# partial reindex by deleting all tags for a list of files
|
||||
time sqlite3 up2k.db 'select mt1.w from mt mt1 inner join mt mt2 on mt1.w = mt2.w where mt1.k = +mt2.k and mt1.rowid != mt2.rowid' > warks
|
||||
cat warks | while IFS= read -r x; do sqlite3 up2k.db "delete from mt where w = '$x'"; done
|
||||
|
||||
# dump all dbs
|
||||
find -iname up2k.db | while IFS= read -r x; do sqlite3 "$x" 'select substr(w,1,12), rd, fn from up' | sed -r 's/\|/ \| /g' | while IFS= read -r y; do printf '%s | %s\n' "$x" "$y"; done; done
|
||||
|
||||
|
||||
##
|
||||
## media
|
||||
|
||||
# split track into test files
|
||||
e=6; s=10; d=~/dev/copyparty/srv/aus; n=1; p=0; e=$((e*60)); rm -rf $d; mkdir $d; while true; do ffmpeg -hide_banner -ss $p -i 'nervous_testpilot - office.mp3' -c copy -t $s $d/$(printf %04d $n).mp3; n=$((n+1)); p=$((p+s)); [ $p -gt $e ] && break; done
|
||||
|
||||
-v srv/aus:aus:r:ce2dsa:ce2ts:cmtp=fgsfds=bin/mtag/sleep.py
|
||||
sqlite3 .hist/up2k.db 'select * from mt where k="fgsfds" or k="t:mtp"' | tee /dev/stderr | wc -l
|
||||
|
||||
|
||||
##
|
||||
## vscode
|
||||
|
||||
@@ -96,6 +126,18 @@ for d in /usr /var; do find $d -type f -size +30M 2>/dev/null; done | while IFS=
|
||||
brew install python@2
|
||||
pip install virtualenv
|
||||
|
||||
# readme toc
|
||||
cat README.md | awk '!/^#/{next} {lv=length($1);sub(/[^ ]+ /,"");bab=$0;gsub(/ /,"-",bab)} {printf "%" ((lv-1)*4+1) "s [%s](#%s)\n", "*",$0,bab}'
|
||||
|
||||
# fix firefox phantom breakpoints,
|
||||
# suggestions from bugtracker, doesnt work (debugger is not attachable)
|
||||
devtools settings >> advanced >> enable browser chrome debugging + enable remote debugging
|
||||
burger > developer >> browser toolbox (ctrl-alt-shift-i)
|
||||
iframe btn topright >> chrome://devtools/content/debugger/index.html
|
||||
dbg.asyncStore.pendingBreakpoints = {}
|
||||
|
||||
# fix firefox phantom breakpoints
|
||||
about:config >> devtools.debugger.prefs-schema-version = -1
|
||||
|
||||
##
|
||||
## http 206
|
||||
|
||||
@@ -20,6 +20,7 @@ set -e
|
||||
# -rwxr-xr-x 0 ed ed 183808 Nov 19 00:43 copyparty-extras/sfx-lite/copyparty-sfx.py
|
||||
|
||||
|
||||
command -v gnutar && tar() { gnutar "$@"; }
|
||||
command -v gtar && tar() { gtar "$@"; }
|
||||
command -v gsed && sed() { gsed "$@"; }
|
||||
td="$(mktemp -d)"
|
||||
@@ -29,11 +30,11 @@ pwd
|
||||
|
||||
|
||||
dl_text() {
|
||||
command -v curl && exec curl "$@"
|
||||
command -v curl >/dev/null && exec curl "$@"
|
||||
exec wget -O- "$@"
|
||||
}
|
||||
dl_files() {
|
||||
command -v curl && exec curl -L --remote-name-all "$@"
|
||||
command -v curl >/dev/null && exec curl -L --remote-name-all "$@"
|
||||
exec wget "$@"
|
||||
}
|
||||
export -f dl_files
|
||||
|
||||
@@ -28,6 +28,13 @@ gtar=$(command -v gtar || command -v gnutar) || true
|
||||
unexpand() { gunexpand "$@"; }
|
||||
command -v grealpath >/dev/null &&
|
||||
realpath() { grealpath "$@"; }
|
||||
|
||||
[ -e /opt/local/bin/bzip2 ] &&
|
||||
bzip2() { /opt/local/bin/bzip2 "$@"; }
|
||||
}
|
||||
pybin=$(command -v python3 || command -v python) || {
|
||||
echo need python
|
||||
exit 1
|
||||
}
|
||||
|
||||
[ -e copyparty/__main__.py ] || cd ..
|
||||
@@ -38,11 +45,15 @@ gtar=$(command -v gtar || command -v gnutar) || true
|
||||
exit 1
|
||||
}
|
||||
|
||||
do_sh=1
|
||||
do_py=1
|
||||
while [ ! -z "$1" ]; do
|
||||
[ "$1" = clean ] && clean=1 && shift && continue
|
||||
[ "$1" = re ] && repack=1 && shift && continue
|
||||
[ "$1" = no-ogv ] && no_ogv=1 && shift && continue
|
||||
[ "$1" = no-cm ] && no_cm=1 && shift && continue
|
||||
[ "$1" = no-sh ] && do_sh= && shift && continue
|
||||
[ "$1" = no-py ] && do_py= && shift && continue
|
||||
break
|
||||
done
|
||||
|
||||
@@ -150,7 +161,7 @@ find .. -type f \( -name .DS_Store -or -name ._.DS_Store \) -delete
|
||||
find .. -type f -name ._\* | while IFS= read -r f; do cmp <(printf '\x00\x05\x16') <(head -c 3 -- "$f") && rm -f -- "$f"; done
|
||||
|
||||
echo use smol web deps
|
||||
rm -f copyparty/web/deps/*.full.*
|
||||
rm -f copyparty/web/deps/*.full.* copyparty/web/{Makefile,splash.js}
|
||||
|
||||
# it's fine dw
|
||||
grep -lE '\.full\.(js|css)' copyparty/web/* |
|
||||
@@ -169,10 +180,11 @@ done
|
||||
sed -r '/edit2">edit \(fancy/d' <$f >t && tmv "$f"
|
||||
}
|
||||
|
||||
[ $repack ] ||
|
||||
find | grep -E '\.py$' |
|
||||
grep -vE '__version__' |
|
||||
tr '\n' '\0' |
|
||||
xargs -0 python ../scripts/uncomment.py
|
||||
xargs -0 $pybin ../scripts/uncomment.py
|
||||
|
||||
f=dep-j2/jinja2/constants.py
|
||||
awk '/^LOREM_IPSUM_WORDS/{o=1;print "LOREM_IPSUM_WORDS = u\"a\"";next} !o; /"""/{o=0}' <$f >t
|
||||
@@ -180,7 +192,7 @@ tmv "$f"
|
||||
|
||||
# up2k goes from 28k to 22k laff
|
||||
echo entabbening
|
||||
find | grep -E '\.(js|css|html|py)$' | while IFS= read -r f; do
|
||||
find | grep -E '\.(js|css|html)$' | while IFS= read -r f; do
|
||||
unexpand -t 4 --first-only <"$f" >t
|
||||
tmv "$f"
|
||||
done
|
||||
@@ -194,25 +206,36 @@ tar -cf tar "${args[@]}" --numeric-owner copyparty dep-j2
|
||||
|
||||
echo compressing tar
|
||||
# detect best level; bzip2 -7 is usually better than -9
|
||||
for n in {2..9}; do cp tar t.$n; bzip2 -$n t.$n & done; wait; mv -v $(ls -1S t.*.bz2 | tail -n 1) tar.bz2
|
||||
for n in {2..9}; do cp tar t.$n; xz -ze$n t.$n & done; wait; mv -v $(ls -1S t.*.xz | tail -n 1) tar.xz
|
||||
rm t.*
|
||||
[ $do_py ] && { for n in {2..9}; do cp tar t.$n; bzip2 -$n t.$n & done; wait; mv -v $(ls -1S t.*.bz2 | tail -n 1) tar.bz2; }
|
||||
[ $do_sh ] && { for n in {2..9}; do cp tar t.$n; xz -ze$n t.$n & done; wait; mv -v $(ls -1S t.*.xz | tail -n 1) tar.xz; }
|
||||
rm t.* || true
|
||||
exts=()
|
||||
|
||||
|
||||
[ $do_sh ] && {
|
||||
exts+=(sh)
|
||||
echo creating unix sfx
|
||||
(
|
||||
sed "s/PACK_TS/$ts/; s/PACK_HTS/$hts/; s/CPP_VER/$ver/" <../scripts/sfx.sh |
|
||||
grep -E '^sfx_eof$' -B 9001;
|
||||
cat tar.xz
|
||||
) >$sfx_out.sh
|
||||
}
|
||||
|
||||
|
||||
[ $do_py ] && {
|
||||
exts+=(py)
|
||||
echo creating generic sfx
|
||||
python ../scripts/sfx.py --sfx-make tar.bz2 $ver $ts
|
||||
$pybin ../scripts/sfx.py --sfx-make tar.bz2 $ver $ts
|
||||
mv sfx.out $sfx_out.py
|
||||
chmod 755 $sfx_out.*
|
||||
}
|
||||
|
||||
|
||||
printf "done:\n"
|
||||
printf " %s\n" "$(realpath $sfx_out)."{sh,py}
|
||||
# rm -rf *
|
||||
for ext in ${exts[@]}; do
|
||||
printf " %s\n" "$(realpath $sfx_out)."$ext
|
||||
done
|
||||
|
||||
# tar -tvf ../sfx/tar | sed -r 's/(.* ....-..-.. ..:.. )(.*)/\2 `` \1/' | sort | sed -r 's/(.*) `` (.*)/\2 \1/'| less
|
||||
# for n in {1..9}; do tar -tf tar | grep -vE '/$' | sed -r 's/(.*)\.(.*)/\2.\1/' | sort | sed -r 's/([^\.]+)\.(.*)/\2.\1/' | tar -cT- | bzip2 -c$n | wc -c; done
|
||||
# apk add bash python3 tar xz bzip2
|
||||
# while true; do ./make-sfx.sh; for f in ..//dist/copyparty-sfx.{sh,py}; do mv $f $f.$(wc -c <$f | awk '{print$1}'); done; done
|
||||
|
||||
@@ -1,9 +1,8 @@
|
||||
#!/usr/bin/env python
|
||||
# coding: utf-8
|
||||
# coding: latin-1
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
import os, sys, time, shutil, signal, tarfile, hashlib, platform, tempfile
|
||||
import subprocess as sp
|
||||
import os, sys, time, shutil, runpy, tarfile, hashlib, platform, tempfile, traceback
|
||||
|
||||
"""
|
||||
run me with any version of python, i will unpack and run copyparty
|
||||
@@ -344,20 +343,24 @@ def get_payload():
|
||||
break
|
||||
|
||||
|
||||
def confirm():
|
||||
def confirm(rv):
|
||||
msg()
|
||||
msg(traceback.format_exc())
|
||||
msg("*** hit enter to exit ***")
|
||||
try:
|
||||
raw_input() if PY2 else input()
|
||||
except:
|
||||
pass
|
||||
|
||||
sys.exit(rv)
|
||||
|
||||
|
||||
def run(tmp, j2ver):
|
||||
global cpp
|
||||
|
||||
msg("jinja2:", j2ver or "bundled")
|
||||
msg("sfxdir:", tmp)
|
||||
msg()
|
||||
|
||||
# "systemd-tmpfiles-clean.timer"?? HOW do you even come up with this shit
|
||||
try:
|
||||
@@ -373,30 +376,16 @@ def run(tmp, j2ver):
|
||||
if j2ver:
|
||||
del ld[-1]
|
||||
|
||||
cmd = (
|
||||
"import sys, runpy; "
|
||||
+ "".join(['sys.path.insert(0, r"' + x + '"); ' for x in ld])
|
||||
+ 'runpy.run_module("copyparty", run_name="__main__")'
|
||||
)
|
||||
cmd = [sys.executable, "-c", cmd] + list(sys.argv[1:])
|
||||
for x in ld:
|
||||
sys.path.insert(0, x)
|
||||
|
||||
cmd = [str(x) for x in cmd]
|
||||
msg("\n", cmd, "\n")
|
||||
cpp = sp.Popen(cmd)
|
||||
try:
|
||||
cpp.wait()
|
||||
runpy.run_module(str("copyparty"), run_name=str("__main__"))
|
||||
except SystemExit as ex:
|
||||
if ex.code:
|
||||
confirm(ex.code)
|
||||
except:
|
||||
cpp.wait()
|
||||
|
||||
if cpp.returncode != 0:
|
||||
confirm()
|
||||
|
||||
sys.exit(cpp.returncode)
|
||||
|
||||
|
||||
def bye(sig, frame):
|
||||
if cpp is not None:
|
||||
cpp.terminate()
|
||||
confirm(1)
|
||||
|
||||
|
||||
def main():
|
||||
@@ -430,8 +419,6 @@ def main():
|
||||
|
||||
# skip 0
|
||||
|
||||
signal.signal(signal.SIGTERM, bye)
|
||||
|
||||
tmp = unpack()
|
||||
|
||||
try:
|
||||
@@ -439,7 +426,7 @@ def main():
|
||||
except:
|
||||
j2ver = None
|
||||
|
||||
return run(tmp, j2ver)
|
||||
run(tmp, j2ver)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
@@ -18,7 +18,9 @@ from copyparty import util
|
||||
|
||||
class Cfg(Namespace):
|
||||
def __init__(self, a=[], v=[], c=None):
|
||||
ex = {k: False for k in "e2d e2ds e2dsa e2t e2ts e2tsr mte".split()}
|
||||
ex = {k: False for k in "e2d e2ds e2dsa e2t e2ts e2tsr".split()}
|
||||
ex["mtp"] = []
|
||||
ex["mte"] = "a"
|
||||
super(Cfg, self).__init__(a=a, v=v, c=c, **ex)
|
||||
|
||||
|
||||
|
||||
Reference in New Issue
Block a user