Compare commits

...

9 Commits

Author SHA1 Message Date
ed
a900c36395 v1.3.15 2022-08-18 01:02:19 +02:00
ed
1d9b324d3e explain w/a wasm leaks in workers (chrome bug) 2022-08-18 01:02:06 +02:00
ed
539e7b8efe help chrome gc by reusing one filereader 2022-08-18 00:05:32 +02:00
ed
50a477ee47 up2k-hook-ytid: upload into subdirs by id 2022-08-15 21:52:41 +02:00
ed
7000123a8b v1.3.14 2022-08-15 20:25:31 +02:00
ed
d48a7d2398 provide tagparsers with uploader info 2022-08-15 20:23:17 +02:00
ed
389a00ce59 v1.3.13 2022-08-15 19:11:21 +02:00
ed
7a460de3c2 windows db fix 2022-08-15 18:01:28 +02:00
ed
8ea1f4a751 idx multimedia format/container type 2022-08-15 17:56:13 +02:00
14 changed files with 203 additions and 38 deletions

38
bin/mtag/mousepad.py Normal file
View File

@@ -0,0 +1,38 @@
#!/usr/bin/env python3
import os
import sys
import subprocess as sp
"""
mtp test -- opens a texteditor
usage:
-vsrv/v1:v1:r:c,mte=+x1:c,mtp=x1=ad,p,bin/mtag/mousepad.py
explained:
c,mte: list of tags to index in this volume
c,mtp: add new tag provider
x1: dummy tag to provide
ad: dontcare if audio or not
p: priority 1 (run after initial tag-scan with ffprobe or mutagen)
"""
def main():
env = os.environ.copy()
env["DISPLAY"] = ":0.0"
if False:
# open the uploaded file
fp = sys.argv[-1]
else:
# display stdin contents (`oth_tags`)
fp = "/dev/stdin"
p = sp.Popen(["/usr/bin/mousepad", fp])
p.communicate()
main()

View File

@@ -47,8 +47,8 @@ CONDITIONAL_UPLOAD = True
def main():
fp = sys.argv[1]
if CONDITIONAL_UPLOAD:
fp = sys.argv[1]
zb = sys.stdin.buffer.read()
zs = zb.decode("utf-8", "replace")
md = json.loads(zs)

View File

@@ -97,7 +97,7 @@ def main():
zs = (
"ffmpeg -y -hide_banner -nostdin -v warning"
+ " -err_detect +crccheck+bitstream+buffer+careful+compliant+aggressive+explode"
" -xerror -i"
+ " -xerror -i"
)
cmd = zs.encode("ascii").split(b" ") + [fsenc(fp)]

View File

@@ -51,6 +51,8 @@ async function a_up2k_namefilter(good_files, nil_files, bad_files, hooks) {
cname = name, // will clobber
sz = fobj.size,
ids = [],
fn_ids = [],
md_ids = [],
id_ok = false,
m;
@@ -71,7 +73,7 @@ async function a_up2k_namefilter(good_files, nil_files, bad_files, hooks) {
cname = cname.replace(m[1], '');
yt_ids.add(m[1]);
ids.push(m[1]);
fn_ids.unshift(m[1]);
}
// look for IDs in video metadata,
@@ -110,10 +112,13 @@ async function a_up2k_namefilter(good_files, nil_files, bad_files, hooks) {
console.log(`found ${m} @${bofs}, ${name} `);
yt_ids.add(m);
if (!has(ids, m)) {
ids.push(m);
if (!has(fn_ids, m) && !has(md_ids, m)) {
md_ids.push(m);
md_only.push(`${m} ${name}`);
}
else
// id appears several times; make it preferred
md_ids.unshift(m);
// bail after next iteration
chunk = nchunks - 1;
@@ -130,6 +135,13 @@ async function a_up2k_namefilter(good_files, nil_files, bad_files, hooks) {
}
}
}
for (var yi of md_ids)
ids.push(yi);
for (var yi of fn_ids)
if (!has(ids, yi))
ids.push(yi);
}
if (md_only.length)
@@ -164,6 +176,7 @@ async function a_up2k_namefilter(good_files, nil_files, bad_files, hooks) {
function process_id_list(txt) {
var wanted_ids = new Set(txt.trim().split('\n')),
name_id = {},
wanted_names = new Set(), // basenames with a wanted ID
wanted_files = new Set(); // filedrops
@@ -174,8 +187,11 @@ async function a_up2k_namefilter(good_files, nil_files, bad_files, hooks) {
wanted_files.add(good_files[a]);
var m = /(.*)\.(mp4|webm|mkv|flv|opus|ogg|mp3|m4a|aac)$/i.exec(name);
if (m)
wanted_names.add(m[1]);
if (!m)
continue;
wanted_names.add(m[1]);
name_id[m[1]] = file_ids[a][b];
break;
}
@@ -189,6 +205,9 @@ async function a_up2k_namefilter(good_files, nil_files, bad_files, hooks) {
name = name.replace(/\.[^\.]+$/, '');
if (wanted_names.has(name)) {
wanted_files.add(good_files[a]);
var subdir = `${name_id[name]}-${Date.now()}-${a}`;
good_files[a][1] = subdir + '/' + good_files[a][1].split(/\//g).pop();
break;
}
}

View File

@@ -335,7 +335,7 @@ def run_argparse(argv: list[str], formatter: Any, retry: bool) -> argparse.Names
except:
fk_salt = "hunter2"
cores = os.cpu_count() if hasattr(os, "cpu_count") else 4
cores = (os.cpu_count() if hasattr(os, "cpu_count") else 0) or 4
hcores = min(cores, 3) # 4% faster than 4+ on py3.9 @ r5-4500U
sects = [
@@ -630,9 +630,9 @@ def run_argparse(argv: list[str], formatter: Any, retry: bool) -> argparse.Names
ap2.add_argument("--mtag-v", action="store_true", help="verbose tag scanning; print errors from mtp subprocesses and such")
ap2.add_argument("-mtm", metavar="M=t,t,t", type=u, action="append", help="add/replace metadata mapping")
ap2.add_argument("-mte", metavar="M,M,M", type=u, help="tags to index/display (comma-sep.)",
default="circle,album,.tn,artist,title,.bpm,key,.dur,.q,.vq,.aq,vc,ac,res,.fps,ahash,vhash")
default="circle,album,.tn,artist,title,.bpm,key,.dur,.q,.vq,.aq,vc,ac,fmt,res,.fps,ahash,vhash")
ap2.add_argument("-mth", metavar="M,M,M", type=u, help="tags to hide by default (comma-sep.)",
default=".vq,.aq,vc,ac,res,.fps")
default=".vq,.aq,vc,ac,fmt,res,.fps")
ap2.add_argument("-mtp", metavar="M=[f,]BIN", type=u, action="append", help="read tag M using program BIN to parse the file")
ap2 = ap.add_argument_group('ui options')

View File

@@ -1,8 +1,8 @@
# coding: utf-8
VERSION = (1, 3, 12)
VERSION = (1, 3, 15)
CODENAME = "god dag"
BUILD_DT = (2022, 8, 13)
BUILD_DT = (2022, 8, 18)
S_VERSION = ".".join(map(str, VERSION))
S_BUILD_DT = "{0:04d}-{1:02d}-{2:02d}".format(*BUILD_DT)

View File

@@ -178,7 +178,7 @@ def parse_ffprobe(txt: str) -> tuple[dict[str, tuple[int, Any]], dict[str, list[
]
if typ == "format":
kvm = [["duration", ".dur"], ["bit_rate", ".q"]]
kvm = [["duration", ".dur"], ["bit_rate", ".q"], ["format_name", "fmt"]]
for sk, rk in kvm:
v1 = strm.get(sk)
@@ -239,6 +239,9 @@ def parse_ffprobe(txt: str) -> tuple[dict[str, tuple[int, Any]], dict[str, list[
if ".q" in ret:
del ret[".q"]
if "fmt" in ret:
ret["fmt"] = ret["fmt"].split(",")[0]
if ".resw" in ret and ".resh" in ret:
ret["res"] = "{}x{}".format(ret[".resw"], ret[".resh"])

View File

@@ -125,7 +125,7 @@ class Up2k(object):
self.mtp_parsers: dict[str, dict[str, MParser]] = {}
self.pending_tags: list[tuple[set[str], str, str, dict[str, Any]]] = []
self.hashq: Queue[tuple[str, str, str, str, float]] = Queue()
self.tagq: Queue[tuple[str, str, str, str]] = Queue()
self.tagq: Queue[tuple[str, str, str, str, str, float]] = Queue()
self.tag_event = threading.Condition()
self.n_hashq = 0
self.n_tagq = 0
@@ -775,7 +775,7 @@ class Up2k(object):
assert self.pp and self.mem_cur
self.pp.msg = "a{} {}".format(self.pp.n, cdir)
rd = cdir[len(top) + 1 :].strip("/")
rd = cdir[len(top) :].strip("/")
if WINDOWS:
rd = rd.replace("\\", "/").strip("/")
@@ -1288,8 +1288,8 @@ class Up2k(object):
with self.mutex:
try:
q = "select rd, fn from up where substr(w,1,16)=? and +w=?"
rd, fn = cur.execute(q, (w[:16], w)).fetchone()
q = "select rd, fn, ip, at from up where substr(w,1,16)=? and +w=?"
rd, fn, ip, at = cur.execute(q, (w[:16], w)).fetchone()
except:
# file modified/deleted since spooling
continue
@@ -1304,9 +1304,14 @@ class Up2k(object):
abspath = os.path.join(ptop, rd, fn)
self.pp.msg = "c{} {}".format(nq, abspath)
if not mpool:
n_tags = self._tagscan_file(cur, entags, w, abspath)
n_tags = self._tagscan_file(cur, entags, w, abspath, ip, at)
else:
mpool.put(Mpqe({}, entags, w, abspath, {}))
if ip:
oth_tags = {"up_ip": ip, "up_at": at}
else:
oth_tags = {}
mpool.put(Mpqe({}, entags, w, abspath, oth_tags))
with self.mutex:
n_tags = len(self._flush_mpool(cur))
@@ -1449,8 +1454,8 @@ class Up2k(object):
if w in in_progress:
continue
q = "select rd, fn from up where substr(w,1,16)=? limit 1"
rd, fn = cur.execute(q, (w,)).fetchone()
q = "select rd, fn, ip, at from up where substr(w,1,16)=? limit 1"
rd, fn, ip, at = cur.execute(q, (w,)).fetchone()
rd, fn = s3dec(rd, fn)
abspath = os.path.join(ptop, rd, fn)
@@ -1472,6 +1477,10 @@ class Up2k(object):
else:
oth_tags = {}
if ip:
oth_tags["up_ip"] = ip
oth_tags["up_at"] = at
jobs.append(Mpqe(parsers, set(), w, abspath, oth_tags))
in_progress[w] = True
@@ -1641,6 +1650,8 @@ class Up2k(object):
entags: set[str],
wark: str,
abspath: str,
ip: str,
at: float
) -> int:
"""will mutex"""
assert self.mtag
@@ -1654,6 +1665,10 @@ class Up2k(object):
self._log_tag_err("", abspath, ex)
return 0
if ip:
tags["up_ip"] = ip
tags["up_at"] = at
with self.mutex:
return self._tag_file(write_cur, entags, wark, abspath, tags)
@@ -2295,7 +2310,7 @@ class Up2k(object):
raise
if "e2t" in self.flags[ptop]:
self.tagq.put((ptop, wark, rd, fn))
self.tagq.put((ptop, wark, rd, fn, ip, at))
self.n_tagq += 1
return True
@@ -2941,7 +2956,7 @@ class Up2k(object):
with self.mutex:
self.n_tagq -= 1
ptop, wark, rd, fn = self.tagq.get()
ptop, wark, rd, fn, ip, at = self.tagq.get()
if "e2t" not in self.flags[ptop]:
continue
@@ -2952,6 +2967,8 @@ class Up2k(object):
ntags1 = len(tags)
parsers = self._get_parsers(ptop, tags, abspath)
if parsers:
tags["up_ip"] = ip
tags["up_at"] = at
tags.update(self.mtag.get_bin(parsers, abspath, tags))
except Exception as ex:
self._log_tag_err("", abspath, ex)

View File

@@ -11,6 +11,7 @@ var Ls = {
"q": "quality / bitrate",
"Ac": "audio codec",
"Vc": "video codec",
"Fmt": "format / container",
"Ahash": "audio checksum",
"Vhash": "video checksum",
"Res": "resolution",
@@ -317,6 +318,7 @@ var Ls = {
"u_ehssrch": "server rejected the request to perform search",
"u_ehsinit": "server rejected the request to initiate upload",
"u_ehsdf": "server ran out of disk space!\n\nwill keep retrying, in case someone\nfrees up enough space to continue",
"u_emtleak": "it looks like your webbrowser may have a memory leak;\nplease try the following:\n<ul><li>hit <code>F5</code> to refresh the page</li><li>then disable the &nbsp;<code>mt</code>&nbsp; button in the &nbsp;<code>⚙️ settings</code></li><li>and try that upload again</li></ul>Uploads will be a bit slower, but oh well.\nSorry for the trouble!",
"u_s404": "not found on server",
"u_expl": "explain",
"u_tu": '<p class="warn">WARNING: turbo enabled, <span>&nbsp;client may not detect and resume incomplete uploads; see turbo-button tooltip</span></p>',
@@ -348,6 +350,7 @@ var Ls = {
"q": "kvalitet / bitrate",
"Ac": "lyd-format",
"Vc": "video-format",
"Fmt": "format / innpakning",
"Ahash": "lyd-kontrollsum",
"Vhash": "video-kontrollsum",
"Res": "oppløsning",
@@ -654,6 +657,7 @@ var Ls = {
"u_ehssrch": "server nektet forespørselen om å utføre søk",
"u_ehsinit": "server nektet forespørselen om å begynne en ny opplastning",
"u_ehsdf": "serveren er full!\n\nprøver igjen regelmessig,\ni tilfelle noen rydder litt...",
"u_emtleak": "uff, det er mulig at nettleseren din har en minnelekkasje...\nForeslår at du prøver følgende:\n<ul><li>trykk F5 for å laste siden på nytt</li><li>så skru av &nbsp;<code>mt</code>&nbsp; bryteren under &nbsp;<code>⚙️ innstillinger</code></li><li>og forsøk den samme opplastningen igjen</li></ul>Opplastning vil gå litt tregere, men det får så være.\nBeklager bryderiet!",
"u_s404": "ikke funnet på serveren",
"u_expl": "forklar",
"u_tu": '<p class="warn">ADVARSEL: turbo er på, <span>&nbsp;avbrutte opplastninger vil muligens ikke oppdages og gjenopptas; hold musepekeren over turbo-knappen for mer info</span></p>',

View File

@@ -847,6 +847,7 @@ function up2k_init(subtle) {
},
"car": 0,
"slow_io": null,
"oserr": false,
"modn": 0,
"modv": 0,
"mod0": null
@@ -1365,6 +1366,14 @@ function up2k_init(subtle) {
etaskip = 0;
}
function got_oserr() {
if (!hws.length || !uc.hashw || st.oserr)
return;
st.oserr = true;
modal.alert(L.u_emtleak);
}
/////
////
/// actuator
@@ -1723,6 +1732,7 @@ function up2k_init(subtle) {
pvis.seth(t.n, 2, err + ' @ ' + car);
console.log('OS-error', reader.error, '@', car);
handled = true;
got_oserr();
}
if (handled) {
@@ -1841,6 +1851,8 @@ function up2k_init(subtle) {
pvis.seth(t.n, 1, d[1]);
pvis.seth(t.n, 2, d[2]);
console.log(d[1], d[2]);
if (d[1] == 'OS-error')
got_oserr();
pvis.move(t.n, 'ng');
apop(st.busy.hash, t);

View File

@@ -8,7 +8,7 @@ function hex2u8(txt) {
var subtle = null;
try {
subtle = crypto.subtle || crypto.webkitSubtle;
subtle = crypto.subtle;
subtle.digest('SHA-512', new Uint8Array(1)).then(
function (x) { },
function (x) { load_fb(); }
@@ -23,11 +23,20 @@ function load_fb() {
}
var reader = null,
busy = false;
onmessage = (d) => {
var [nchunk, fobj, car, cdr] = d.data,
t0 = Date.now(),
if (busy)
return postMessage(["panic", 'worker got another task while busy']);
if (!reader)
reader = new FileReader();
var [nchunk, fobj, car, cdr] = d.data,
t0 = Date.now();
reader.onload = function (e) {
try {
//console.log('[ w] %d HASH bgin', nchunk);
@@ -39,6 +48,7 @@ onmessage = (d) => {
}
};
reader.onerror = function () {
busy = false;
var err = reader.error + '';
if (err.indexOf('NotReadableError') !== -1 || // win10-chrome defender
@@ -49,12 +59,14 @@ onmessage = (d) => {
postMessage(["ferr", err]);
};
//console.log('[ w] %d read bgin', nchunk);
busy = true;
reader.readAsArrayBuffer(
File.prototype.slice.call(fobj, car, cdr));
var hash_calc = function (buf) {
var hash_done = function (hashbuf) {
busy = false;
try {
var hslice = new Uint8Array(hashbuf).subarray(0, 33);
//console.log('[ w] %d HASH DONE', nchunk);

View File

@@ -1,3 +1,43 @@
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
# 2022-0815-1825 `v1.3.14` fix windows db
after two exciting releases, time for something boring
* read-only demo server at https://a.ocv.me/pub/demo/
* latest gzip edition of the sfx: [v1.0.14](https://github.com/9001/copyparty/releases/tag/v1.0.14#:~:text=release-specific%20notes)
## new features
* upload-info (ip and timestamp) is provided to `mtp` tagparser plugins as json
* tagscanner will index `fmt` (file-format / container type) by default
* and `description` can be enabled in `-mte`
## bugfixes
* [v1.3.12](https://github.com/9001/copyparty/releases/tag/v1.3.12) broke file-indexing on windows if an entire HDD was mounted as a volume
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
# 2022-0812-2258 `v1.3.12` quickboot
* read-only demo server at https://a.ocv.me/pub/demo/
* latest gzip edition of the sfx: [v1.0.14](https://github.com/9001/copyparty/releases/tag/v1.0.14#:~:text=release-specific%20notes)
## new features
*but wait, there's more!*   not only do you get the [multithreaded file hashing](https://github.com/9001/copyparty/releases/tag/v1.3.11) but also --
* faster bootup and volume reindexing when `-e2ds` (file indexing) is enabled
* `3x` faster is probably the average on most instances; more files per folder = faster
* `9x` faster on a 36 TiB zfs music/media nas with `-e2ts` (metadata indexing), dropping from 46sec to 5sec
* and `34x` on another zfs box, 63sec -> 1.8sec
* new arg `--no-dhash` disables the speedhax in case it's buggy (skipping files or audio tags)
* add option `--exit idx` to abort and shutdown after volume indexing has finished
## bugfixes
* [u2cli](https://github.com/9001/copyparty/tree/hovudstraum/bin#up2kpy): detect and skip uploading from recursive symlinks
* stop reindexing empty files on startup
* support fips-compliant cpython builds
* replaces md5 with sha1, changing the filetype-associated colors in the gallery view
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
# 2022-0810-2135 `v1.3.11` webworkers

View File

@@ -69,6 +69,9 @@ pybin=$(command -v python3 || command -v python) || {
exit 1
}
[ $CSN ] ||
CSN=sfx
langs=
use_gz=
zopf=2560
@@ -99,9 +102,9 @@ stamp=$(
done | sort | tail -n 1 | sha1sum | cut -c-16
)
rm -rf sfx/*
mkdir -p sfx build
cd sfx
rm -rf $CSN/*
mkdir -p $CSN build
cd $CSN
tmpdir="$(
printf '%s\n' "$TMPDIR" /tmp |
@@ -237,7 +240,7 @@ ts=$(date -u +%s)
hts=$(date -u +%Y-%m%d-%H%M%S) # --date=@$ts (thx osx)
mkdir -p ../dist
sfx_out=../dist/copyparty-sfx
sfx_out=../dist/copyparty-$CSN
echo cleanup
find -name '*.pyc' -delete
@@ -371,7 +374,7 @@ gzres() {
}
zdir="$tmpdir/cpp-mksfx"
zdir="$tmpdir/cpp-mk$CSN"
[ -e "$zdir/$stamp" ] || rm -rf "$zdir"
mkdir -p "$zdir"
echo a > "$zdir/$stamp"
@@ -423,7 +426,7 @@ pe=bz2
echo compressing tar
# detect best level; bzip2 -7 is usually better than -9
for n in {2..9}; do cp tar t.$n; $pc -$n t.$n & done; wait; mv -v $(ls -1S t.*.$pe | tail -n 1) tar.bz2
for n in {2..9}; do cp tar t.$n; nice $pc -$n t.$n & done; wait; mv -v $(ls -1S t.*.$pe | tail -n 1) tar.bz2
rm t.* || true
exts=()

View File

@@ -1,6 +1,8 @@
#!/bin/bash
set -e
parallel=2
cd ~/dev/copyparty/scripts
v=$1
@@ -21,16 +23,31 @@ v=$1
./make-tgz-release.sh $v
}
rm -f ../dist/copyparty-sfx.*
rm -f ../dist/copyparty-sfx*
shift
./make-sfx.sh "$@"
f=../dist/copyparty-sfx.py
[ -e $f ] ||
f=../dist/copyparty-sfx-gz.py
f=../dist/copyparty-sfx
[ -e $f.py ] ||
f=../dist/copyparty-sfx-gz
$f.py -h >/dev/null
[ $parallel -gt 1 ] && {
printf '\033[%s' s 2r H "0;1;37;44mbruteforcing sfx size -- press enter to terminate" K u "7m $* " K $'27m\n'
trap "rm -f .sfx-run; printf '\033[%s' s r u" INT TERM EXIT
touch .sfx-run
for ((a=0; a<$parallel; a++)); do
while [ -e .sfx-run ]; do
CSN=sfx$a ./make-sfx.sh re "$@"
mv $f$a.py $f.$(wc -c <$f$a.py | awk '{print$1}').py
done &
done
read
exit
}
$f -h
while true; do
mv $f $f.$(wc -c <$f | awk '{print$1}')
mv $f.py $f.$(wc -c <$f.py | awk '{print$1}').py
./make-sfx.sh re "$@"
done