mirror of
https://github.com/9001/copyparty.git
synced 2025-10-25 00:53:47 +00:00
Compare commits
51 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
83127858ca | ||
|
|
d89329757e | ||
|
|
49ffec5320 | ||
|
|
2eaae2b66a | ||
|
|
ea4441e25c | ||
|
|
e5f34042f9 | ||
|
|
271096874a | ||
|
|
8efd780a72 | ||
|
|
41bcf7308d | ||
|
|
d102bb3199 | ||
|
|
d0bed95415 | ||
|
|
2528729971 | ||
|
|
292c18b3d0 | ||
|
|
0be7c5e2d8 | ||
|
|
eb5aaddba4 | ||
|
|
d8fd82bcb5 | ||
|
|
97be495861 | ||
|
|
8b53c159fc | ||
|
|
81e281f703 | ||
|
|
3948214050 | ||
|
|
c5e9a643e7 | ||
|
|
d25881d5c3 | ||
|
|
38d8d9733f | ||
|
|
118ebf668d | ||
|
|
a86f09fa46 | ||
|
|
dd4fb35c8f | ||
|
|
621eb4cf95 | ||
|
|
deea66ad0b | ||
|
|
bf99445377 | ||
|
|
7b54a63396 | ||
|
|
0fcb015f9a | ||
|
|
0a22b1ffb6 | ||
|
|
68cecc52ab | ||
|
|
53657ccfff | ||
|
|
96223fda01 | ||
|
|
374ff3433e | ||
|
|
5d63949e98 | ||
|
|
6b065d507d | ||
|
|
e79997498a | ||
|
|
f7ee02ec35 | ||
|
|
69dc433e1c | ||
|
|
c880cd848c | ||
|
|
5752b6db48 | ||
|
|
b36f905eab | ||
|
|
483dd527c6 | ||
|
|
e55678e28f | ||
|
|
3f4a8b9d6f | ||
|
|
02a856ecb4 | ||
|
|
4dff726310 | ||
|
|
cbc449036f | ||
|
|
8f53152220 |
53
README.md
53
README.md
@@ -13,7 +13,7 @@ turn your phone or raspi into a portable file server with resumable uploads/down
|
||||
* *resumable* uploads need `firefox 34+` / `chrome 41+` / `safari 7+` for full speed
|
||||
* code standard: `black`
|
||||
|
||||
📷 screenshots: [browser](#the-browser) // [upload](#uploading) // [md-viewer](#markdown-viewer) // [search](#searching) // [fsearch](#file-search) // [zip-DL](#zip-downloads) // [ie4](#browser-support)
|
||||
📷 **screenshots:** [browser](#the-browser) // [upload](#uploading) // [thumbnails](#thumbnails) // [md-viewer](#markdown-viewer) // [search](#searching) // [fsearch](#file-search) // [zip-DL](#zip-downloads) // [ie4](#browser-support)
|
||||
|
||||
|
||||
## readme toc
|
||||
@@ -29,6 +29,7 @@ turn your phone or raspi into a portable file server with resumable uploads/down
|
||||
* [tabs](#tabs)
|
||||
* [hotkeys](#hotkeys)
|
||||
* [tree-mode](#tree-mode)
|
||||
* [thumbnails](#thumbnails)
|
||||
* [zip downloads](#zip-downloads)
|
||||
* [uploading](#uploading)
|
||||
* [file-search](#file-search)
|
||||
@@ -43,6 +44,8 @@ turn your phone or raspi into a portable file server with resumable uploads/down
|
||||
* [client examples](#client-examples)
|
||||
* [up2k](#up2k)
|
||||
* [dependencies](#dependencies)
|
||||
* [optional dependencies](#optional-dependencies)
|
||||
* [install recommended deps](#install-recommended-deps)
|
||||
* [optional gpl stuff](#optional-gpl-stuff)
|
||||
* [sfx](#sfx)
|
||||
* [sfx repack](#sfx-repack)
|
||||
@@ -75,6 +78,8 @@ you may also want these, especially on servers:
|
||||
|
||||
## status
|
||||
|
||||
summary: all planned features work! now please enjoy the bloatening
|
||||
|
||||
* backend stuff
|
||||
* ☑ sanic multipart parser
|
||||
* ☑ load balancer (multiprocessing)
|
||||
@@ -92,7 +97,10 @@ you may also want these, especially on servers:
|
||||
* browser
|
||||
* ☑ tree-view
|
||||
* ☑ media player
|
||||
* ✖ thumbnails
|
||||
* ☑ thumbnails
|
||||
* ☑ images using Pillow
|
||||
* ☑ videos using FFmpeg
|
||||
* ☑ cache eviction (max-age; maybe max-size eventually)
|
||||
* ☑ SPA (browse while uploading)
|
||||
* if you use the file-tree on the left only, not folders in the file list
|
||||
* server indexing
|
||||
@@ -103,8 +111,6 @@ you may also want these, especially on servers:
|
||||
* ☑ viewer
|
||||
* ☑ editor (sure why not)
|
||||
|
||||
summary: it works! you can use it! (but technically not even close to beta)
|
||||
|
||||
|
||||
# bugs
|
||||
|
||||
@@ -145,11 +151,16 @@ summary: it works! you can use it! (but technically not even close to beta)
|
||||
the browser has the following hotkeys
|
||||
* `I/K` prev/next folder
|
||||
* `P` parent folder
|
||||
* `G` toggle list / grid view
|
||||
* `T` toggle thumbnails / icons
|
||||
* when playing audio:
|
||||
* `0..9` jump to 10%..90%
|
||||
* `U/O` skip 10sec back/forward
|
||||
* `J/L` prev/next song
|
||||
* `J` also starts playing the folder
|
||||
* in the grid view:
|
||||
* `S` toggle multiselect
|
||||
* `A/D` zoom
|
||||
|
||||
|
||||
## tree-mode
|
||||
@@ -159,6 +170,13 @@ by default there's a breadcrumbs path; you can replace this with a tree-browser
|
||||
click `[-]` and `[+]` to adjust the size, and the `[a]` toggles if the tree should widen dynamically as you go deeper or stay fixed-size
|
||||
|
||||
|
||||
## thumbnails
|
||||
|
||||

|
||||
|
||||
it does static images with Pillow and uses FFmpeg for video files, so you may want to `--no-thumb` or maybe just `--no-vthumb` depending on how destructive your users are
|
||||
|
||||
|
||||
## zip downloads
|
||||
|
||||
the `zip` link next to folders can produce various types of zip/tar files using these alternatives in the browser settings tab:
|
||||
@@ -276,6 +294,8 @@ the same arguments can be set as volume flags, in addition to `d2d` and `d2t` fo
|
||||
|
||||
`e2tsr` is probably always overkill, since `e2ds`/`e2dsa` would pick up any file modifications and cause `e2ts` to reindex those
|
||||
|
||||
the rescan button in the admin panel has no effect unless the volume has `-e2ds` or higher
|
||||
|
||||
|
||||
## metadata from audio files
|
||||
|
||||
@@ -293,6 +313,7 @@ see the beautiful mess of a dictionary in [mtag.py](https://github.com/9001/copy
|
||||
`--no-mutagen` disables mutagen and uses ffprobe instead, which...
|
||||
* is about 20x slower than mutagen
|
||||
* catches a few tags that mutagen doesn't
|
||||
* melodic key, video resolution, framerate, pixfmt
|
||||
* avoids pulling any GPL code into copyparty
|
||||
* more importantly runs ffprobe on incoming files which is bad if your ffmpeg has a cve
|
||||
|
||||
@@ -308,6 +329,7 @@ copyparty can invoke external programs to collect additional metadata for files
|
||||
*but wait, there's more!* `-mtp` can be used for non-audio files as well using the `a` flag: `ay` only do audio files, `an` only do non-audio files, or `ad` do all files (d as in dontcare)
|
||||
|
||||
* `-mtp ext=an,~/bin/file-ext.py` runs `~/bin/file-ext.py` to get the `ext` tag only if file is not audio (`an`)
|
||||
* `-mtp arch,built,ver,orig=an,eexe,edll,~/bin/exe.py` runs `~/bin/exe.py` to get properties about windows-binaries only if file is not audio (`an`) and file extension is exe or dll
|
||||
|
||||
|
||||
## complete examples
|
||||
@@ -401,13 +423,31 @@ quick outline of the up2k protocol, see [uploading](#uploading) for the web-clie
|
||||
|
||||
* `jinja2` (is built into the SFX)
|
||||
|
||||
**optional,** enables music tags:
|
||||
|
||||
## optional dependencies
|
||||
|
||||
enable music tags:
|
||||
* either `mutagen` (fast, pure-python, skips a few tags, makes copyparty GPL? idk)
|
||||
* or `FFprobe` (20x slower, more accurate, possibly dangerous depending on your distro and users)
|
||||
|
||||
**optional,** will eventually enable thumbnails:
|
||||
enable image thumbnails:
|
||||
* `Pillow` (requires py2.7 or py3.5+)
|
||||
|
||||
enable video thumbnails:
|
||||
* `ffmpeg` and `ffprobe` somewhere in `$PATH`
|
||||
|
||||
enable reading HEIF pictures:
|
||||
* `pyheif-pillow-opener` (requires Linux or a C compiler)
|
||||
|
||||
enable reading AVIF pictures:
|
||||
* `pillow-avif-plugin`
|
||||
|
||||
|
||||
## install recommended deps
|
||||
```
|
||||
python -m pip install --user -U jinja2 mutagen Pillow
|
||||
```
|
||||
|
||||
|
||||
## optional gpl stuff
|
||||
|
||||
@@ -487,7 +527,6 @@ roughly sorted by priority
|
||||
* start from a chunk index and just go
|
||||
* terminate client on bad data
|
||||
* `os.copy_file_range` for up2k cloning
|
||||
* support pillow-simd
|
||||
* single sha512 across all up2k chunks? maybe
|
||||
* figure out the deal with pixel3a not being connectable as hotspot
|
||||
* pixel3a having unpredictable 3sec latency in general :||||
|
||||
|
||||
@@ -45,3 +45,18 @@ you could replace winfsp with [dokan](https://github.com/dokan-dev/dokany/releas
|
||||
# [`mtag/`](mtag/)
|
||||
* standalone programs which perform misc. file analysis
|
||||
* copyparty can Popen programs like these during file indexing to collect additional metadata
|
||||
|
||||
|
||||
# [`dbtool.py`](dbtool.py)
|
||||
upgrade utility which can show db info and help transfer data between databases, for example when a new version of copyparty recommends to wipe the DB and reindex because it now collects additional metadata during analysis, but you have some really expensive `-mtp` parsers and want to copy over the tags from the old db
|
||||
|
||||
for that example (upgrading to v0.11.0), first move the old db aside, launch copyparty, let it rebuild the db until the point where it starts running mtp (colored messages as it adds the mtp tags), then CTRL-C and patch in the old mtp tags from the old db instead
|
||||
|
||||
so assuming you have `-mtp` parsers to provide the tags `key` and `.bpm`:
|
||||
|
||||
```
|
||||
~/bin/dbtool.py -ls up2k.db
|
||||
~/bin/dbtool.py -src up2k.db.v0.10.22 up2k.db -cmp
|
||||
~/bin/dbtool.py -src up2k.db.v0.10.22 up2k.db -rm-mtp-flag -copy key
|
||||
~/bin/dbtool.py -src up2k.db.v0.10.22 up2k.db -rm-mtp-flag -copy .bpm -vac
|
||||
```
|
||||
|
||||
198
bin/dbtool.py
Executable file
198
bin/dbtool.py
Executable file
@@ -0,0 +1,198 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import os
|
||||
import sys
|
||||
import sqlite3
|
||||
import argparse
|
||||
|
||||
DB_VER = 3
|
||||
|
||||
|
||||
def die(msg):
|
||||
print("\033[31m\n" + msg + "\n\033[0m")
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def read_ver(db):
|
||||
for tab in ["ki", "kv"]:
|
||||
try:
|
||||
c = db.execute(r"select v from {} where k = 'sver'".format(tab))
|
||||
except:
|
||||
continue
|
||||
|
||||
rows = c.fetchall()
|
||||
if rows:
|
||||
return int(rows[0][0])
|
||||
|
||||
return "corrupt"
|
||||
|
||||
|
||||
def ls(db):
|
||||
nfiles = next(db.execute("select count(w) from up"))[0]
|
||||
ntags = next(db.execute("select count(w) from mt"))[0]
|
||||
print(f"{nfiles} files")
|
||||
print(f"{ntags} tags\n")
|
||||
|
||||
print("number of occurences for each tag,")
|
||||
print(" 'x' = file has no tags")
|
||||
print(" 't:mtp' = the mtp flag (file not mtp processed yet)")
|
||||
print()
|
||||
for k, nk in db.execute("select k, count(k) from mt group by k order by k"):
|
||||
print(f"{nk:9} {k}")
|
||||
|
||||
|
||||
def compare(n1, d1, n2, d2, verbose):
|
||||
nt = next(d1.execute("select count(w) from up"))[0]
|
||||
n = 0
|
||||
miss = 0
|
||||
for w, rd, fn in d1.execute("select w, rd, fn from up"):
|
||||
n += 1
|
||||
if n % 25_000 == 0:
|
||||
m = f"\033[36mchecked {n:,} of {nt:,} files in {n1} against {n2}\033[0m"
|
||||
print(m)
|
||||
|
||||
q = "select w from up where substr(w,1,16) = ?"
|
||||
hit = d2.execute(q, (w[:16],)).fetchone()
|
||||
if not hit:
|
||||
miss += 1
|
||||
if verbose:
|
||||
print(f"file in {n1} missing in {n2}: [{w}] {rd}/{fn}")
|
||||
|
||||
print(f" {miss} files in {n1} missing in {n2}\n")
|
||||
|
||||
nt = next(d1.execute("select count(w) from mt"))[0]
|
||||
n = 0
|
||||
miss = {}
|
||||
nmiss = 0
|
||||
for w, k, v in d1.execute("select * from mt"):
|
||||
n += 1
|
||||
if n % 100_000 == 0:
|
||||
m = f"\033[36mchecked {n:,} of {nt:,} tags in {n1} against {n2}, so far {nmiss} missing tags\033[0m"
|
||||
print(m)
|
||||
|
||||
v2 = d2.execute("select v from mt where w = ? and +k = ?", (w, k)).fetchone()
|
||||
if v2:
|
||||
v2 = v2[0]
|
||||
|
||||
# if v != v2 and v2 and k in [".bpm", "key"] and n2 == "src":
|
||||
# print(f"{w} [{rd}/{fn}] {k} = [{v}] / [{v2}]")
|
||||
|
||||
if v2 is not None:
|
||||
if k.startswith("."):
|
||||
try:
|
||||
diff = abs(float(v) - float(v2))
|
||||
if diff > float(v) / 0.9:
|
||||
v2 = None
|
||||
else:
|
||||
v2 = v
|
||||
except:
|
||||
pass
|
||||
|
||||
if v != v2:
|
||||
v2 = None
|
||||
|
||||
if v2 is None:
|
||||
nmiss += 1
|
||||
try:
|
||||
miss[k] += 1
|
||||
except:
|
||||
miss[k] = 1
|
||||
|
||||
if verbose:
|
||||
q = "select rd, fn from up where substr(w,1,16) = ?"
|
||||
rd, fn = d1.execute(q, (w,)).fetchone()
|
||||
print(f"missing in {n2}: [{w}] [{rd}/{fn}] {k} = {v}")
|
||||
|
||||
for k, v in sorted(miss.items()):
|
||||
if v:
|
||||
print(f"{n1} has {v:6} more {k:<6} tags than {n2}")
|
||||
|
||||
print(f"in total, {nmiss} missing tags in {n2}\n")
|
||||
|
||||
|
||||
def copy_mtp(d1, d2, tag, rm):
|
||||
nt = next(d1.execute("select count(w) from mt where k = ?", (tag,)))[0]
|
||||
n = 0
|
||||
ndone = 0
|
||||
for w, k, v in d1.execute("select * from mt where k = ?", (tag,)):
|
||||
n += 1
|
||||
if n % 25_000 == 0:
|
||||
m = f"\033[36m{n:,} of {nt:,} tags checked, so far {ndone} copied\033[0m"
|
||||
print(m)
|
||||
|
||||
hit = d2.execute("select v from mt where w = ? and +k = ?", (w, k)).fetchone()
|
||||
if hit:
|
||||
hit = hit[0]
|
||||
|
||||
if hit != v:
|
||||
ndone += 1
|
||||
if hit is not None:
|
||||
d2.execute("delete from mt where w = ? and +k = ?", (w, k))
|
||||
|
||||
d2.execute("insert into mt values (?,?,?)", (w, k, v))
|
||||
if rm:
|
||||
d2.execute("delete from mt where w = ? and +k = 't:mtp'", (w,))
|
||||
|
||||
d2.commit()
|
||||
print(f"copied {ndone} {tag} tags over")
|
||||
|
||||
|
||||
def main():
|
||||
os.system("")
|
||||
print()
|
||||
|
||||
ap = argparse.ArgumentParser()
|
||||
ap.add_argument("db", help="database to work on")
|
||||
ap.add_argument("-src", metavar="DB", type=str, help="database to copy from")
|
||||
|
||||
ap2 = ap.add_argument_group("informational / read-only stuff")
|
||||
ap2.add_argument("-v", action="store_true", help="verbose")
|
||||
ap2.add_argument("-ls", action="store_true", help="list summary for db")
|
||||
ap2.add_argument("-cmp", action="store_true", help="compare databases")
|
||||
|
||||
ap2 = ap.add_argument_group("options which modify target db")
|
||||
ap2.add_argument("-copy", metavar="TAG", type=str, help="mtp tag to copy over")
|
||||
ap2.add_argument(
|
||||
"-rm-mtp-flag",
|
||||
action="store_true",
|
||||
help="when an mtp tag is copied over, also mark that as done, so copyparty won't run mtp on it",
|
||||
)
|
||||
ap2.add_argument("-vac", action="store_true", help="optimize DB")
|
||||
|
||||
ar = ap.parse_args()
|
||||
|
||||
for v in [ar.db, ar.src]:
|
||||
if v and not os.path.exists(v):
|
||||
die("database must exist")
|
||||
|
||||
db = sqlite3.connect(ar.db)
|
||||
ds = sqlite3.connect(ar.src) if ar.src else None
|
||||
|
||||
for d, n in [[ds, "src"], [db, "dst"]]:
|
||||
if not d:
|
||||
continue
|
||||
|
||||
ver = read_ver(d)
|
||||
if ver == "corrupt":
|
||||
die("{} database appears to be corrupt, sorry")
|
||||
|
||||
if ver != DB_VER:
|
||||
m = f"{n} db is version {ver}, this tool only supports version {DB_VER}, please upgrade it with copyparty first"
|
||||
die(m)
|
||||
|
||||
if ar.ls:
|
||||
ls(db)
|
||||
|
||||
if ar.cmp:
|
||||
if not ds:
|
||||
die("need src db to compare against")
|
||||
|
||||
compare("src", ds, "dst", db, ar.v)
|
||||
compare("dst", db, "src", ds, ar.v)
|
||||
|
||||
if ar.copy:
|
||||
copy_mtp(ds, db, ar.copy, ar.rm_mtp_flag)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
96
bin/mtag/exe.py
Normal file
96
bin/mtag/exe.py
Normal file
@@ -0,0 +1,96 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import sys
|
||||
import time
|
||||
import json
|
||||
import pefile
|
||||
|
||||
"""
|
||||
retrieve exe info,
|
||||
example for multivalue providers
|
||||
"""
|
||||
|
||||
|
||||
def unk(v):
|
||||
return "unk({:04x})".format(v)
|
||||
|
||||
|
||||
class PE2(pefile.PE):
|
||||
def __init__(self, *a, **ka):
|
||||
for k in [
|
||||
# -- parse_data_directories:
|
||||
"parse_import_directory",
|
||||
"parse_export_directory",
|
||||
# "parse_resources_directory",
|
||||
"parse_debug_directory",
|
||||
"parse_relocations_directory",
|
||||
"parse_directory_tls",
|
||||
"parse_directory_load_config",
|
||||
"parse_delay_import_directory",
|
||||
"parse_directory_bound_imports",
|
||||
# -- full_load:
|
||||
"parse_rich_header",
|
||||
]:
|
||||
setattr(self, k, self.noop)
|
||||
|
||||
super(PE2, self).__init__(*a, **ka)
|
||||
|
||||
def noop(*a, **ka):
|
||||
pass
|
||||
|
||||
|
||||
try:
|
||||
pe = PE2(sys.argv[1], fast_load=False)
|
||||
except:
|
||||
sys.exit(0)
|
||||
|
||||
arch = pe.FILE_HEADER.Machine
|
||||
if arch == 0x14C:
|
||||
arch = "x86"
|
||||
elif arch == 0x8664:
|
||||
arch = "x64"
|
||||
else:
|
||||
arch = unk(arch)
|
||||
|
||||
try:
|
||||
buildtime = time.gmtime(pe.FILE_HEADER.TimeDateStamp)
|
||||
buildtime = time.strftime("%Y-%m-%d_%H:%M:%S", buildtime)
|
||||
except:
|
||||
buildtime = "invalid"
|
||||
|
||||
ui = pe.OPTIONAL_HEADER.Subsystem
|
||||
if ui == 2:
|
||||
ui = "GUI"
|
||||
elif ui == 3:
|
||||
ui = "cmdline"
|
||||
else:
|
||||
ui = unk(ui)
|
||||
|
||||
extra = {}
|
||||
if hasattr(pe, "FileInfo"):
|
||||
for v1 in pe.FileInfo:
|
||||
for v2 in v1:
|
||||
if v2.name != "StringFileInfo":
|
||||
continue
|
||||
|
||||
for v3 in v2.StringTable:
|
||||
for k, v in v3.entries.items():
|
||||
v = v.decode("utf-8", "replace").strip()
|
||||
if not v:
|
||||
continue
|
||||
|
||||
if k in [b"FileVersion", b"ProductVersion"]:
|
||||
extra["ver"] = v
|
||||
|
||||
if k in [b"OriginalFilename", b"InternalName"]:
|
||||
extra["orig"] = v
|
||||
|
||||
r = {
|
||||
"arch": arch,
|
||||
"built": buildtime,
|
||||
"ui": ui,
|
||||
"cksum": "{:08x}".format(pe.OPTIONAL_HEADER.CheckSum),
|
||||
}
|
||||
r.update(extra)
|
||||
|
||||
print(json.dumps(r, indent=4))
|
||||
@@ -2,6 +2,7 @@
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
import platform
|
||||
import time
|
||||
import sys
|
||||
import os
|
||||
|
||||
@@ -23,6 +24,7 @@ MACOS = platform.system() == "Darwin"
|
||||
|
||||
class EnvParams(object):
|
||||
def __init__(self):
|
||||
self.t0 = time.time()
|
||||
self.mod = os.path.dirname(os.path.realpath(__file__))
|
||||
if self.mod.endswith("__init__"):
|
||||
self.mod = os.path.dirname(self.mod)
|
||||
|
||||
@@ -249,6 +249,21 @@ def run_argparse(argv, formatter):
|
||||
ap.add_argument("--urlform", metavar="MODE", type=str, default="print,get", help="how to handle url-forms")
|
||||
ap.add_argument("--salt", type=str, default="hunter2", help="up2k file-hash salt")
|
||||
|
||||
ap2 = ap.add_argument_group('admin panel options')
|
||||
ap2.add_argument("--no-rescan", action="store_true", help="disable ?scan (volume reindexing)")
|
||||
ap2.add_argument("--no-stack", action="store_true", help="disable ?stack (list all stacks)")
|
||||
|
||||
ap2 = ap.add_argument_group('thumbnail options')
|
||||
ap2.add_argument("--no-thumb", action="store_true", help="disable all thumbnails")
|
||||
ap2.add_argument("--no-vthumb", action="store_true", help="disable video thumbnails")
|
||||
ap2.add_argument("--th-size", metavar="WxH", default="320x256", help="thumbnail res")
|
||||
ap2.add_argument("--th-no-crop", action="store_true", help="dynamic height; show full image")
|
||||
ap2.add_argument("--th-no-jpg", action="store_true", help="disable jpg output")
|
||||
ap2.add_argument("--th-no-webp", action="store_true", help="disable webp output")
|
||||
ap2.add_argument("--th-poke", metavar="SEC", type=int, default=300, help="activity labeling cooldown")
|
||||
ap2.add_argument("--th-clean", metavar="SEC", type=int, default=43200, help="cleanup interval")
|
||||
ap2.add_argument("--th-maxage", metavar="SEC", type=int, default=604800, help="max folder age")
|
||||
|
||||
ap2 = ap.add_argument_group('database options')
|
||||
ap2.add_argument("-e2d", action="store_true", help="enable up2k database")
|
||||
ap2.add_argument("-e2ds", action="store_true", help="enable up2k db-scanner, sets -e2d")
|
||||
@@ -260,7 +275,7 @@ def run_argparse(argv, formatter):
|
||||
ap2.add_argument("--no-mtag-mt", action="store_true", help="disable tag-read parallelism")
|
||||
ap2.add_argument("-mtm", metavar="M=t,t,t", action="append", type=str, help="add/replace metadata mapping")
|
||||
ap2.add_argument("-mte", metavar="M,M,M", type=str, help="tags to index/display (comma-sep.)",
|
||||
default="circle,album,.tn,artist,title,.bpm,key,.dur,.q")
|
||||
default="circle,album,.tn,artist,title,.bpm,key,.dur,.q,.vq,.aq,ac,vc,res,.fps")
|
||||
ap2.add_argument("-mtp", metavar="M=[f,]bin", action="append", type=str, help="read tag M using bin")
|
||||
ap2.add_argument("--srch-time", metavar="SEC", type=int, default=30, help="search deadline")
|
||||
|
||||
@@ -276,8 +291,9 @@ def run_argparse(argv, formatter):
|
||||
ap2.add_argument("--log-conn", action="store_true", help="print tcp-server msgs")
|
||||
ap2.add_argument("--no-sendfile", action="store_true", help="disable sendfile")
|
||||
ap2.add_argument("--no-scandir", action="store_true", help="disable scandir")
|
||||
ap2.add_argument("--no-fastboot", action="store_true", help="wait for up2k indexing")
|
||||
ap2.add_argument("--ihead", metavar="HEADER", action='append', help="dump incoming header")
|
||||
ap2.add_argument("--lf-url", metavar="RE", type=str, default=r"^/\.cpr/", help="dont log URLs matching")
|
||||
ap2.add_argument("--lf-url", metavar="RE", type=str, default=r"^/\.cpr/|\?th=[wj]$", help="dont log URLs matching")
|
||||
|
||||
return ap.parse_args(args=argv[1:])
|
||||
# fmt: on
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
# coding: utf-8
|
||||
|
||||
VERSION = (0, 10, 22)
|
||||
CODENAME = "zip it"
|
||||
BUILD_DT = (2021, 5, 18)
|
||||
VERSION = (0, 11, 4)
|
||||
CODENAME = "the grid"
|
||||
BUILD_DT = (2021, 6, 1)
|
||||
|
||||
S_VERSION = ".".join(map(str, VERSION))
|
||||
S_BUILD_DT = "{0:04d}-{1:02d}-{2:02d}".format(*BUILD_DT)
|
||||
|
||||
@@ -14,11 +14,12 @@ from .util import IMPLICATIONS, undot, Pebkac, fsdec, fsenc, statdir, nuprint
|
||||
class VFS(object):
|
||||
"""single level in the virtual fs"""
|
||||
|
||||
def __init__(self, realpath, vpath, uread=[], uwrite=[], flags={}):
|
||||
def __init__(self, realpath, vpath, uread=[], uwrite=[], uadm=[], flags={}):
|
||||
self.realpath = realpath # absolute path on host filesystem
|
||||
self.vpath = vpath # absolute path in the virtual filesystem
|
||||
self.uread = uread # users who can read this
|
||||
self.uwrite = uwrite # users who can write this
|
||||
self.uadm = uadm # users who are regular admins
|
||||
self.flags = flags # config switches
|
||||
self.nodes = {} # child nodes
|
||||
self.all_vols = {vpath: self} # flattened recursive
|
||||
@@ -27,7 +28,7 @@ class VFS(object):
|
||||
return "VFS({})".format(
|
||||
", ".join(
|
||||
"{}={!r}".format(k, self.__dict__[k])
|
||||
for k in "realpath vpath uread uwrite flags".split()
|
||||
for k in "realpath vpath uread uwrite uadm flags".split()
|
||||
)
|
||||
)
|
||||
|
||||
@@ -52,6 +53,7 @@ class VFS(object):
|
||||
"{}/{}".format(self.vpath, name).lstrip("/"),
|
||||
self.uread,
|
||||
self.uwrite,
|
||||
self.uadm,
|
||||
self.flags,
|
||||
)
|
||||
self._trk(vn)
|
||||
@@ -133,7 +135,7 @@ class VFS(object):
|
||||
#
|
||||
return os.path.realpath(rp)
|
||||
|
||||
def ls(self, rem, uname, scandir, lstat=False):
|
||||
def ls(self, rem, uname, scandir, incl_wo=False, lstat=False):
|
||||
"""return user-readable [fsdir,real,virt] items at vpath"""
|
||||
virt_vis = {} # nodes readable by user
|
||||
abspath = self.canonical(rem)
|
||||
@@ -141,12 +143,12 @@ class VFS(object):
|
||||
real.sort()
|
||||
if not rem:
|
||||
for name, vn2 in sorted(self.nodes.items()):
|
||||
if (
|
||||
uname in vn2.uread
|
||||
or "*" in vn2.uread
|
||||
or uname in vn2.uwrite
|
||||
or "*" in vn2.uwrite
|
||||
):
|
||||
ok = uname in vn2.uread or "*" in vn2.uread
|
||||
|
||||
if not ok and incl_wo:
|
||||
ok = uname in vn2.uwrite or "*" in vn2.uwrite
|
||||
|
||||
if ok:
|
||||
virt_vis[name] = vn2
|
||||
|
||||
# no vfs nodes in the list of real inodes
|
||||
@@ -160,7 +162,7 @@ class VFS(object):
|
||||
rel is a unix-style user-defined vpath (not vfs-related)
|
||||
"""
|
||||
|
||||
fsroot, vfs_ls, vfs_virt = self.ls(rem, uname, scandir, lstat)
|
||||
fsroot, vfs_ls, vfs_virt = self.ls(rem, uname, scandir, False, lstat)
|
||||
rfiles = [x for x in vfs_ls if not stat.S_ISDIR(x[1].st_mode)]
|
||||
rdirs = [x for x in vfs_ls if stat.S_ISDIR(x[1].st_mode)]
|
||||
|
||||
@@ -226,15 +228,19 @@ class VFS(object):
|
||||
for f in [{"vp": v, "ap": a, "st": n[1]} for v, a, n in files]:
|
||||
yield f
|
||||
|
||||
def user_tree(self, uname, readable=False, writable=False):
|
||||
def user_tree(self, uname, readable=False, writable=False, admin=False):
|
||||
ret = []
|
||||
opt1 = readable and (uname in self.uread or "*" in self.uread)
|
||||
opt2 = writable and (uname in self.uwrite or "*" in self.uwrite)
|
||||
if opt1 or opt2:
|
||||
ret.append(self.vpath)
|
||||
if admin:
|
||||
if opt1 and opt2:
|
||||
ret.append(self.vpath)
|
||||
else:
|
||||
if opt1 or opt2:
|
||||
ret.append(self.vpath)
|
||||
|
||||
for _, vn in sorted(self.nodes.items()):
|
||||
ret.extend(vn.user_tree(uname, readable, writable))
|
||||
ret.extend(vn.user_tree(uname, readable, writable, admin))
|
||||
|
||||
return ret
|
||||
|
||||
@@ -269,7 +275,7 @@ class AuthSrv(object):
|
||||
|
||||
yield prev, True
|
||||
|
||||
def _parse_config_file(self, fd, user, mread, mwrite, mflags, mount):
|
||||
def _parse_config_file(self, fd, user, mread, mwrite, madm, mflags, mount):
|
||||
vol_src = None
|
||||
vol_dst = None
|
||||
self.line_ctr = 0
|
||||
@@ -301,6 +307,7 @@ class AuthSrv(object):
|
||||
mount[vol_dst] = vol_src
|
||||
mread[vol_dst] = []
|
||||
mwrite[vol_dst] = []
|
||||
madm[vol_dst] = []
|
||||
mflags[vol_dst] = {}
|
||||
continue
|
||||
|
||||
@@ -311,10 +318,15 @@ class AuthSrv(object):
|
||||
uname = "*"
|
||||
|
||||
self._read_vol_str(
|
||||
lvl, uname, mread[vol_dst], mwrite[vol_dst], mflags[vol_dst]
|
||||
lvl,
|
||||
uname,
|
||||
mread[vol_dst],
|
||||
mwrite[vol_dst],
|
||||
madm[vol_dst],
|
||||
mflags[vol_dst],
|
||||
)
|
||||
|
||||
def _read_vol_str(self, lvl, uname, mr, mw, mf):
|
||||
def _read_vol_str(self, lvl, uname, mr, mw, ma, mf):
|
||||
if lvl == "c":
|
||||
cval = True
|
||||
if "=" in uname:
|
||||
@@ -332,6 +344,9 @@ class AuthSrv(object):
|
||||
if lvl in "wa":
|
||||
mw.append(uname)
|
||||
|
||||
if lvl == "a":
|
||||
ma.append(uname)
|
||||
|
||||
def _read_volflag(self, flags, name, value, is_list):
|
||||
if name not in ["mtp"]:
|
||||
flags[name] = value
|
||||
@@ -355,6 +370,7 @@ class AuthSrv(object):
|
||||
user = {} # username:password
|
||||
mread = {} # mountpoint:[username]
|
||||
mwrite = {} # mountpoint:[username]
|
||||
madm = {} # mountpoint:[username]
|
||||
mflags = {} # mountpoint:[flag]
|
||||
mount = {} # dst:src (mountpoint:realpath)
|
||||
|
||||
@@ -378,17 +394,22 @@ class AuthSrv(object):
|
||||
mount[dst] = src
|
||||
mread[dst] = []
|
||||
mwrite[dst] = []
|
||||
madm[dst] = []
|
||||
mflags[dst] = {}
|
||||
|
||||
perms = perms.split(":")
|
||||
for (lvl, uname) in [[x[0], x[1:]] for x in perms]:
|
||||
self._read_vol_str(lvl, uname, mread[dst], mwrite[dst], mflags[dst])
|
||||
self._read_vol_str(
|
||||
lvl, uname, mread[dst], mwrite[dst], madm[dst], mflags[dst]
|
||||
)
|
||||
|
||||
if self.args.c:
|
||||
for cfg_fn in self.args.c:
|
||||
with open(cfg_fn, "rb") as f:
|
||||
try:
|
||||
self._parse_config_file(f, user, mread, mwrite, mflags, mount)
|
||||
self._parse_config_file(
|
||||
f, user, mread, mwrite, madm, mflags, mount
|
||||
)
|
||||
except:
|
||||
m = "\n\033[1;31m\nerror in config file {} on line {}:\n\033[0m"
|
||||
print(m.format(cfg_fn, self.line_ctr))
|
||||
@@ -410,12 +431,15 @@ class AuthSrv(object):
|
||||
|
||||
if dst == "":
|
||||
# rootfs was mapped; fully replaces the default CWD vfs
|
||||
vfs = VFS(mount[dst], dst, mread[dst], mwrite[dst], mflags[dst])
|
||||
vfs = VFS(
|
||||
mount[dst], dst, mread[dst], mwrite[dst], madm[dst], mflags[dst]
|
||||
)
|
||||
continue
|
||||
|
||||
v = vfs.add(mount[dst], dst)
|
||||
v.uread = mread[dst]
|
||||
v.uwrite = mwrite[dst]
|
||||
v.uadm = madm[dst]
|
||||
v.flags = mflags[dst]
|
||||
|
||||
missing_users = {}
|
||||
@@ -475,8 +499,10 @@ class AuthSrv(object):
|
||||
# verify tags mentioned by -mt[mp] are used by -mte
|
||||
local_mtp = {}
|
||||
local_only_mtp = {}
|
||||
for a in vol.flags.get("mtp", []) + vol.flags.get("mtm", []):
|
||||
a = a.split("=")[0]
|
||||
tags = vol.flags.get("mtp", []) + vol.flags.get("mtm", [])
|
||||
tags = [x.split("=")[0] for x in tags]
|
||||
tags = [y for x in tags for y in x.split(",")]
|
||||
for a in tags:
|
||||
local_mtp[a] = True
|
||||
local = True
|
||||
for b in self.args.mtp or []:
|
||||
@@ -505,8 +531,10 @@ class AuthSrv(object):
|
||||
self.log(m.format(vol.vpath, mtp), 1)
|
||||
errors = True
|
||||
|
||||
for mtp in self.args.mtp or []:
|
||||
mtp = mtp.split("=")[0]
|
||||
tags = self.args.mtp or []
|
||||
tags = [x.split("=")[0] for x in tags]
|
||||
tags = [y for x in tags for y in x.split(",")]
|
||||
for mtp in tags:
|
||||
if mtp not in all_mte:
|
||||
m = 'metadata tag "{}" is defined by "-mtm" or "-mtp", but is not used by "-mte" (or by any "cmte" volume-flag)'
|
||||
self.log(m.format(mtp), 1)
|
||||
|
||||
@@ -10,6 +10,7 @@ import json
|
||||
import string
|
||||
import socket
|
||||
import ctypes
|
||||
import traceback
|
||||
from datetime import datetime
|
||||
import calendar
|
||||
|
||||
@@ -22,6 +23,10 @@ if not PY2:
|
||||
unicode = str
|
||||
|
||||
|
||||
NO_CACHE = {"Cache-Control": "no-cache"}
|
||||
NO_STORE = {"Cache-Control": "no-store; max-age=0"}
|
||||
|
||||
|
||||
class HttpCli(object):
|
||||
"""
|
||||
Spawned by HttpConn to process one http transaction
|
||||
@@ -36,6 +41,8 @@ class HttpCli(object):
|
||||
self.addr = conn.addr
|
||||
self.args = conn.args
|
||||
self.auth = conn.auth
|
||||
self.ico = conn.ico
|
||||
self.thumbcli = conn.thumbcli
|
||||
self.log_func = conn.log_func
|
||||
self.log_src = conn.log_src
|
||||
self.tls = hasattr(self.s, "cipher")
|
||||
@@ -149,6 +156,7 @@ class HttpCli(object):
|
||||
if self.uname:
|
||||
self.rvol = self.auth.vfs.user_tree(self.uname, readable=True)
|
||||
self.wvol = self.auth.vfs.user_tree(self.uname, writable=True)
|
||||
self.avol = self.auth.vfs.user_tree(self.uname, True, True, True)
|
||||
|
||||
ua = self.headers.get("user-agent", "")
|
||||
self.is_rclone = ua.startswith("rclone/")
|
||||
@@ -158,7 +166,7 @@ class HttpCli(object):
|
||||
uparam["b"] = False
|
||||
cookies["b"] = False
|
||||
|
||||
self.do_log = not self.conn.lf_url or not self.conn.lf_url.match(self.req)
|
||||
self.do_log = not self.conn.lf_url or not self.conn.lf_url.search(self.req)
|
||||
|
||||
try:
|
||||
if self.mode in ["GET", "HEAD"]:
|
||||
@@ -283,6 +291,9 @@ class HttpCli(object):
|
||||
|
||||
# "embedded" resources
|
||||
if self.vpath.startswith(".cpr"):
|
||||
if self.vpath.startswith(".cpr/ico/"):
|
||||
return self.tx_ico(self.vpath.split("/")[-1], exact=True)
|
||||
|
||||
static_path = os.path.join(E.mod, "web/", self.vpath[5:])
|
||||
return self.tx_file(static_path)
|
||||
|
||||
@@ -317,6 +328,12 @@ class HttpCli(object):
|
||||
self.vpath = None
|
||||
return self.tx_mounts()
|
||||
|
||||
if "scan" in self.uparam:
|
||||
return self.scanvol()
|
||||
|
||||
if "stack" in self.uparam:
|
||||
return self.tx_stack()
|
||||
|
||||
return self.tx_browser()
|
||||
|
||||
def handle_options(self):
|
||||
@@ -423,7 +440,7 @@ class HttpCli(object):
|
||||
fn = "put-{:.6f}-{}.bin".format(time.time(), addr)
|
||||
path = os.path.join(fdir, fn)
|
||||
|
||||
with open(path, "wb", 512 * 1024) as f:
|
||||
with open(fsenc(path), "wb", 512 * 1024) as f:
|
||||
post_sz, _, sha_b64 = hashcopy(self.conn, reader, f)
|
||||
|
||||
self.conn.hsrv.broker.put(
|
||||
@@ -543,9 +560,9 @@ class HttpCli(object):
|
||||
if sub:
|
||||
try:
|
||||
dst = os.path.join(vfs.realpath, rem)
|
||||
os.makedirs(dst)
|
||||
os.makedirs(fsenc(dst))
|
||||
except:
|
||||
if not os.path.isdir(dst):
|
||||
if not os.path.isdir(fsenc(dst)):
|
||||
raise Pebkac(400, "some file got your folder name")
|
||||
|
||||
x = self.conn.hsrv.broker.put(True, "up2k.handle_json", body)
|
||||
@@ -583,8 +600,9 @@ class HttpCli(object):
|
||||
taglist = {}
|
||||
else:
|
||||
# search by query params
|
||||
self.log("qj: " + repr(body))
|
||||
hits, taglist = idx.search(vols, body)
|
||||
q = body["q"]
|
||||
self.log("qj: " + q)
|
||||
hits, taglist = idx.search(vols, q)
|
||||
msg = len(hits)
|
||||
|
||||
idx.p_end = time.time()
|
||||
@@ -633,7 +651,7 @@ class HttpCli(object):
|
||||
|
||||
reader = read_socket(self.sr, remains)
|
||||
|
||||
with open(path, "rb+", 512 * 1024) as f:
|
||||
with open(fsenc(path), "rb+", 512 * 1024) as f:
|
||||
f.seek(cstart[0])
|
||||
post_sz, _, sha_b64 = hashcopy(self.conn, reader, f)
|
||||
|
||||
@@ -676,7 +694,7 @@ class HttpCli(object):
|
||||
times = (int(time.time()), int(lastmod))
|
||||
self.log("no more chunks, setting times {}".format(times))
|
||||
try:
|
||||
os.utime(path, times)
|
||||
os.utime(fsenc(path), times)
|
||||
except:
|
||||
self.log("failed to utime ({}, {})".format(path, times))
|
||||
|
||||
@@ -927,16 +945,16 @@ class HttpCli(object):
|
||||
mdir, mfile = os.path.split(fp)
|
||||
mfile2 = "{}.{:.3f}.md".format(mfile[:-3], srv_lastmod)
|
||||
try:
|
||||
os.mkdir(os.path.join(mdir, ".hist"))
|
||||
os.mkdir(fsenc(os.path.join(mdir, ".hist")))
|
||||
except:
|
||||
pass
|
||||
os.rename(fp, os.path.join(mdir, ".hist", mfile2))
|
||||
os.rename(fsenc(fp), fsenc(os.path.join(mdir, ".hist", mfile2)))
|
||||
|
||||
p_field, _, p_data = next(self.parser.gen)
|
||||
if p_field != "body":
|
||||
raise Pebkac(400, "expected body, got {}".format(p_field))
|
||||
|
||||
with open(fp, "wb", 512 * 1024) as f:
|
||||
with open(fsenc(fp), "wb", 512 * 1024) as f:
|
||||
sz, sha512, _ = hashcopy(self.conn, p_data, f)
|
||||
|
||||
new_lastmod = os.stat(fsenc(fp)).st_mtime
|
||||
@@ -952,14 +970,11 @@ class HttpCli(object):
|
||||
return True
|
||||
|
||||
def _chk_lastmod(self, file_ts):
|
||||
date_fmt = "%a, %d %b %Y %H:%M:%S GMT"
|
||||
file_dt = datetime.utcfromtimestamp(file_ts)
|
||||
file_lastmod = file_dt.strftime(date_fmt)
|
||||
|
||||
file_lastmod = http_ts(file_ts)
|
||||
cli_lastmod = self.headers.get("if-modified-since")
|
||||
if cli_lastmod:
|
||||
try:
|
||||
cli_dt = time.strptime(cli_lastmod, date_fmt)
|
||||
cli_dt = time.strptime(cli_lastmod, HTTP_TS_FMT)
|
||||
cli_ts = calendar.timegm(cli_dt)
|
||||
return file_lastmod, int(file_ts) > int(cli_ts)
|
||||
except Exception as ex:
|
||||
@@ -1106,13 +1121,13 @@ class HttpCli(object):
|
||||
# send reply
|
||||
|
||||
if not is_compressed:
|
||||
self.out_headers["Cache-Control"] = "no-cache"
|
||||
self.out_headers.update(NO_CACHE)
|
||||
|
||||
self.out_headers["Accept-Ranges"] = "bytes"
|
||||
self.send_headers(
|
||||
length=upper - lower,
|
||||
status=status,
|
||||
mime=guess_mime(req_path)[0] or "application/octet-stream",
|
||||
mime=guess_mime(req_path),
|
||||
)
|
||||
|
||||
logmsg += unicode(status) + logtail
|
||||
@@ -1202,6 +1217,34 @@ class HttpCli(object):
|
||||
self.log("{}, {}".format(logmsg, spd))
|
||||
return True
|
||||
|
||||
def tx_ico(self, ext, exact=False):
|
||||
if ext.endswith("/"):
|
||||
ext = "folder"
|
||||
exact = True
|
||||
|
||||
bad = re.compile(r"[](){}/[]|^[0-9_-]*$")
|
||||
n = ext.split(".")[::-1]
|
||||
if not exact:
|
||||
n = n[:-1]
|
||||
|
||||
ext = ""
|
||||
for v in n:
|
||||
if len(v) > 7 or bad.search(v):
|
||||
break
|
||||
|
||||
ext = "{}.{}".format(v, ext)
|
||||
|
||||
ext = ext.rstrip(".") or "unk"
|
||||
if len(ext) > 11:
|
||||
ext = "⋯" + ext[-9:]
|
||||
|
||||
mime, ico = self.ico.get(ext, not exact)
|
||||
|
||||
dt = datetime.utcfromtimestamp(E.t0)
|
||||
lm = dt.strftime("%a, %d %b %Y %H:%M:%S GMT")
|
||||
self.reply(ico, mime=mime, headers={"Last-Modified": lm})
|
||||
return True
|
||||
|
||||
def tx_md(self, fs_path):
|
||||
logmsg = "{:4} {} ".format("", self.req)
|
||||
|
||||
@@ -1224,7 +1267,7 @@ class HttpCli(object):
|
||||
file_ts = max(ts_md, ts_html)
|
||||
file_lastmod, do_send = self._chk_lastmod(file_ts)
|
||||
self.out_headers["Last-Modified"] = file_lastmod
|
||||
self.out_headers["Cache-Control"] = "no-cache"
|
||||
self.out_headers.update(NO_CACHE)
|
||||
status = 200 if do_send else 304
|
||||
|
||||
boundary = "\roll\tide"
|
||||
@@ -1236,7 +1279,7 @@ class HttpCli(object):
|
||||
"md_chk_rate": self.args.mcr,
|
||||
"md": boundary,
|
||||
}
|
||||
html = template.render(**targs).encode("utf-8")
|
||||
html = template.render(**targs).encode("utf-8", "replace")
|
||||
html = html.split(boundary.encode("utf-8"))
|
||||
if len(html) != 2:
|
||||
raise Exception("boundary appears in " + html_path)
|
||||
@@ -1268,12 +1311,66 @@ class HttpCli(object):
|
||||
|
||||
def tx_mounts(self):
|
||||
suf = self.urlq(rm=["h"])
|
||||
rvol = [x + "/" if x else x for x in self.rvol]
|
||||
wvol = [x + "/" if x else x for x in self.wvol]
|
||||
html = self.j2("splash", this=self, rvol=rvol, wvol=wvol, url_suf=suf)
|
||||
self.reply(html.encode("utf-8"))
|
||||
rvol, wvol, avol = [
|
||||
[("/" + x).rstrip("/") + "/" for x in y]
|
||||
for y in [self.rvol, self.wvol, self.avol]
|
||||
]
|
||||
|
||||
vstate = {}
|
||||
if self.avol and not self.args.no_rescan:
|
||||
x = self.conn.hsrv.broker.put(True, "up2k.get_volstate")
|
||||
vstate = json.loads(x.get())
|
||||
vstate = {("/" + k).rstrip("/") + "/": v for k, v in vstate.items()}
|
||||
|
||||
html = self.j2(
|
||||
"splash",
|
||||
this=self,
|
||||
rvol=rvol,
|
||||
wvol=wvol,
|
||||
avol=avol,
|
||||
vstate=vstate,
|
||||
url_suf=suf,
|
||||
)
|
||||
self.reply(html.encode("utf-8"), headers=NO_STORE)
|
||||
return True
|
||||
|
||||
def scanvol(self):
|
||||
if not self.readable or not self.writable:
|
||||
raise Pebkac(403, "not admin")
|
||||
|
||||
if self.args.no_rescan:
|
||||
raise Pebkac(403, "disabled by argv")
|
||||
|
||||
vn, _ = self.auth.vfs.get(self.vpath, self.uname, True, True)
|
||||
|
||||
args = [self.auth.vfs.all_vols, [vn.vpath]]
|
||||
x = self.conn.hsrv.broker.put(True, "up2k.rescan", *args)
|
||||
x = x.get()
|
||||
if not x:
|
||||
self.redirect("", "?h")
|
||||
return ""
|
||||
|
||||
raise Pebkac(500, x)
|
||||
|
||||
def tx_stack(self):
|
||||
if not self.readable or not self.writable:
|
||||
raise Pebkac(403, "not admin")
|
||||
|
||||
if self.args.no_stack:
|
||||
raise Pebkac(403, "disabled by argv")
|
||||
|
||||
ret = []
|
||||
names = dict([(t.ident, t.name) for t in threading.enumerate()])
|
||||
for tid, stack in sys._current_frames().items():
|
||||
ret.append("\n\n# {} ({:x})".format(names.get(tid), tid))
|
||||
for fn, lno, name, line in traceback.extract_stack(stack):
|
||||
ret.append('File: "{}", line {}, in {}'.format(fn, lno, name))
|
||||
if line:
|
||||
ret.append(" " + str(line.strip()))
|
||||
|
||||
ret = ("<pre>" + "\n".join(ret)).encode("utf-8")
|
||||
self.reply(ret)
|
||||
|
||||
def tx_tree(self):
|
||||
top = self.uparam["tree"] or ""
|
||||
dst = self.vpath
|
||||
@@ -1303,7 +1400,9 @@ class HttpCli(object):
|
||||
|
||||
try:
|
||||
vn, rem = self.auth.vfs.get(top, self.uname, True, False)
|
||||
fsroot, vfs_ls, vfs_virt = vn.ls(rem, self.uname, not self.args.no_scandir)
|
||||
fsroot, vfs_ls, vfs_virt = vn.ls(
|
||||
rem, self.uname, not self.args.no_scandir, True
|
||||
)
|
||||
except:
|
||||
vfs_ls = []
|
||||
vfs_virt = {}
|
||||
@@ -1346,10 +1445,31 @@ class HttpCli(object):
|
||||
)
|
||||
abspath = vn.canonical(rem)
|
||||
|
||||
if not os.path.exists(fsenc(abspath)):
|
||||
# print(abspath)
|
||||
try:
|
||||
st = os.stat(fsenc(abspath))
|
||||
except:
|
||||
raise Pebkac(404)
|
||||
|
||||
if self.readable and not stat.S_ISDIR(st.st_mode):
|
||||
if rem.startswith(".hist/up2k."):
|
||||
raise Pebkac(403)
|
||||
|
||||
th_fmt = self.uparam.get("th")
|
||||
if th_fmt is not None:
|
||||
thp = None
|
||||
if self.thumbcli:
|
||||
thp = self.thumbcli.get(vn.realpath, rem, int(st.st_mtime), th_fmt)
|
||||
|
||||
if thp:
|
||||
return self.tx_file(thp)
|
||||
|
||||
return self.tx_ico(rem)
|
||||
|
||||
if abspath.endswith(".md") and "raw" not in self.uparam:
|
||||
return self.tx_md(abspath)
|
||||
|
||||
return self.tx_file(abspath)
|
||||
|
||||
srv_info = []
|
||||
|
||||
try:
|
||||
@@ -1368,7 +1488,7 @@ class HttpCli(object):
|
||||
)
|
||||
srv_info.append(humansize(bfree.value) + " free")
|
||||
else:
|
||||
sv = os.statvfs(abspath)
|
||||
sv = os.statvfs(fsenc(abspath))
|
||||
free = humansize(sv.f_frsize * sv.f_bfree, True)
|
||||
total = humansize(sv.f_frsize * sv.f_blocks, True)
|
||||
|
||||
@@ -1428,31 +1548,28 @@ class HttpCli(object):
|
||||
if not self.readable:
|
||||
if is_ls:
|
||||
ret = json.dumps(ls_ret)
|
||||
self.reply(ret.encode("utf-8", "replace"), mime="application/json")
|
||||
self.reply(
|
||||
ret.encode("utf-8", "replace"),
|
||||
mime="application/json",
|
||||
headers=NO_STORE,
|
||||
)
|
||||
return True
|
||||
|
||||
if not os.path.isdir(fsenc(abspath)):
|
||||
if not stat.S_ISDIR(st.st_mode):
|
||||
raise Pebkac(404)
|
||||
|
||||
html = self.j2(tpl, **j2a)
|
||||
self.reply(html.encode("utf-8", "replace"))
|
||||
self.reply(html.encode("utf-8", "replace"), headers=NO_STORE)
|
||||
return True
|
||||
|
||||
if not os.path.isdir(fsenc(abspath)):
|
||||
if abspath.endswith(".md") and "raw" not in self.uparam:
|
||||
return self.tx_md(abspath)
|
||||
|
||||
if rem.startswith(".hist/up2k."):
|
||||
raise Pebkac(403)
|
||||
|
||||
return self.tx_file(abspath)
|
||||
|
||||
for k in ["zip", "tar"]:
|
||||
v = self.uparam.get(k)
|
||||
if v is not None:
|
||||
return self.tx_zip(k, v, vn, rem, [], self.args.ed)
|
||||
|
||||
fsroot, vfs_ls, vfs_virt = vn.ls(rem, self.uname, not self.args.no_scandir)
|
||||
fsroot, vfs_ls, vfs_virt = vn.ls(
|
||||
rem, self.uname, not self.args.no_scandir, True
|
||||
)
|
||||
stats = {k: v for k, v in vfs_ls}
|
||||
vfs_ls = [x[0] for x in vfs_ls]
|
||||
vfs_ls.extend(vfs_virt.keys())
|
||||
@@ -1582,7 +1699,11 @@ class HttpCli(object):
|
||||
ls_ret["files"] = files
|
||||
ls_ret["taglist"] = taglist
|
||||
ret = json.dumps(ls_ret)
|
||||
self.reply(ret.encode("utf-8", "replace"), mime="application/json")
|
||||
self.reply(
|
||||
ret.encode("utf-8", "replace"),
|
||||
mime="application/json",
|
||||
headers=NO_STORE,
|
||||
)
|
||||
return True
|
||||
|
||||
j2a["files"] = dirs + files
|
||||
@@ -1592,5 +1713,5 @@ class HttpCli(object):
|
||||
j2a["tag_order"] = json.dumps(vn.flags["mte"].split(","))
|
||||
|
||||
html = self.j2(tpl, **j2a)
|
||||
self.reply(html.encode("utf-8", "replace"))
|
||||
self.reply(html.encode("utf-8", "replace"), headers=NO_STORE)
|
||||
return True
|
||||
|
||||
@@ -17,6 +17,9 @@ from .__init__ import E
|
||||
from .util import Unrecv
|
||||
from .httpcli import HttpCli
|
||||
from .u2idx import U2idx
|
||||
from .th_cli import ThumbCli
|
||||
from .th_srv import HAVE_PIL
|
||||
from .ico import Ico
|
||||
|
||||
|
||||
class HttpConn(object):
|
||||
@@ -34,6 +37,10 @@ class HttpConn(object):
|
||||
self.auth = hsrv.auth
|
||||
self.cert_path = hsrv.cert_path
|
||||
|
||||
enth = HAVE_PIL and not self.args.no_thumb
|
||||
self.thumbcli = ThumbCli(hsrv.broker) if enth else None
|
||||
self.ico = Ico(self.args)
|
||||
|
||||
self.t0 = time.time()
|
||||
self.nbyte = 0
|
||||
self.workload = 0
|
||||
|
||||
39
copyparty/ico.py
Normal file
39
copyparty/ico.py
Normal file
@@ -0,0 +1,39 @@
|
||||
import hashlib
|
||||
import colorsys
|
||||
|
||||
from .__init__ import PY2
|
||||
|
||||
|
||||
class Ico(object):
|
||||
def __init__(self, args):
|
||||
self.args = args
|
||||
|
||||
def get(self, ext, as_thumb):
|
||||
"""placeholder to make thumbnails not break"""
|
||||
|
||||
h = hashlib.md5(ext.encode("utf-8")).digest()[:2]
|
||||
if PY2:
|
||||
h = [ord(x) for x in h]
|
||||
|
||||
c1 = colorsys.hsv_to_rgb(h[0] / 256.0, 1, 0.3)
|
||||
c2 = colorsys.hsv_to_rgb(h[0] / 256.0, 1, 1)
|
||||
c = list(c1) + list(c2)
|
||||
c = [int(x * 255) for x in c]
|
||||
c = "".join(["{:02x}".format(x) for x in c])
|
||||
|
||||
h = 30
|
||||
if not self.args.th_no_crop and as_thumb:
|
||||
w, h = self.args.th_size.split("x")
|
||||
h = int(100 / (float(w) / float(h)))
|
||||
|
||||
svg = """\
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<svg version="1.1" viewBox="0 0 100 {}" xmlns="http://www.w3.org/2000/svg"><g>
|
||||
<rect width="100%" height="100%" fill="#{}" />
|
||||
<text x="50%" y="50%" dominant-baseline="middle" text-anchor="middle" xml:space="preserve"
|
||||
fill="#{}" font-family="monospace" font-size="14px" style="letter-spacing:.5px">{}</text>
|
||||
</g></svg>
|
||||
"""
|
||||
svg = svg.format(h, c[:6], c[6:], ext).encode("utf-8")
|
||||
|
||||
return ["image/svg+xml", svg]
|
||||
@@ -4,6 +4,7 @@ from __future__ import print_function, unicode_literals
|
||||
import re
|
||||
import os
|
||||
import sys
|
||||
import json
|
||||
import shutil
|
||||
import subprocess as sp
|
||||
|
||||
@@ -14,6 +15,204 @@ if not PY2:
|
||||
unicode = str
|
||||
|
||||
|
||||
def have_ff(cmd):
|
||||
if PY2:
|
||||
cmd = (cmd + " -version").encode("ascii").split(b" ")
|
||||
try:
|
||||
sp.Popen(cmd, stdout=sp.PIPE, stderr=sp.PIPE).communicate()
|
||||
return True
|
||||
except:
|
||||
return False
|
||||
else:
|
||||
return bool(shutil.which(cmd))
|
||||
|
||||
|
||||
HAVE_FFMPEG = have_ff("ffmpeg")
|
||||
HAVE_FFPROBE = have_ff("ffprobe")
|
||||
|
||||
|
||||
class MParser(object):
|
||||
def __init__(self, cmdline):
|
||||
self.tag, args = cmdline.split("=", 1)
|
||||
self.tags = self.tag.split(",")
|
||||
|
||||
self.timeout = 30
|
||||
self.force = False
|
||||
self.audio = "y"
|
||||
self.ext = []
|
||||
|
||||
while True:
|
||||
try:
|
||||
bp = os.path.expanduser(args)
|
||||
if os.path.exists(bp):
|
||||
self.bin = bp
|
||||
return
|
||||
except:
|
||||
pass
|
||||
|
||||
arg, args = args.split(",", 1)
|
||||
arg = arg.lower()
|
||||
|
||||
if arg.startswith("a"):
|
||||
self.audio = arg[1:] # [r]equire [n]ot [d]ontcare
|
||||
continue
|
||||
|
||||
if arg == "f":
|
||||
self.force = True
|
||||
continue
|
||||
|
||||
if arg.startswith("t"):
|
||||
self.timeout = int(arg[1:])
|
||||
continue
|
||||
|
||||
if arg.startswith("e"):
|
||||
self.ext.append(arg[1:])
|
||||
continue
|
||||
|
||||
raise Exception()
|
||||
|
||||
|
||||
def ffprobe(abspath):
|
||||
cmd = [
|
||||
b"ffprobe",
|
||||
b"-hide_banner",
|
||||
b"-show_streams",
|
||||
b"-show_format",
|
||||
b"--",
|
||||
fsenc(abspath),
|
||||
]
|
||||
p = sp.Popen(cmd, stdout=sp.PIPE, stderr=sp.PIPE)
|
||||
r = p.communicate()
|
||||
txt = r[0].decode("utf-8", "replace")
|
||||
return parse_ffprobe(txt)
|
||||
|
||||
|
||||
def parse_ffprobe(txt):
|
||||
"""ffprobe -show_format -show_streams"""
|
||||
streams = []
|
||||
fmt = {}
|
||||
g = None
|
||||
for ln in [x.rstrip("\r") for x in txt.split("\n")]:
|
||||
try:
|
||||
k, v = ln.split("=", 1)
|
||||
g[k] = v
|
||||
continue
|
||||
except:
|
||||
pass
|
||||
|
||||
if ln == "[STREAM]":
|
||||
g = {}
|
||||
streams.append(g)
|
||||
|
||||
if ln == "[FORMAT]":
|
||||
g = {"codec_type": "format"} # heh
|
||||
fmt = g
|
||||
|
||||
streams = [fmt] + streams
|
||||
ret = {} # processed
|
||||
md = {} # raw tags
|
||||
|
||||
have = {}
|
||||
for strm in streams:
|
||||
typ = strm.get("codec_type")
|
||||
if typ in have:
|
||||
continue
|
||||
|
||||
have[typ] = True
|
||||
kvm = []
|
||||
|
||||
if typ == "audio":
|
||||
kvm = [
|
||||
["codec_name", "ac"],
|
||||
["channel_layout", "chs"],
|
||||
["sample_rate", ".hz"],
|
||||
["bit_rate", ".aq"],
|
||||
["duration", ".dur"],
|
||||
]
|
||||
|
||||
if typ == "video":
|
||||
if strm.get("DISPOSITION:attached_pic") == "1" or fmt.get(
|
||||
"format_name"
|
||||
) in ["mp3", "ogg", "flac"]:
|
||||
continue
|
||||
|
||||
kvm = [
|
||||
["codec_name", "vc"],
|
||||
["pix_fmt", "pixfmt"],
|
||||
["r_frame_rate", ".fps"],
|
||||
["bit_rate", ".vq"],
|
||||
["width", ".resw"],
|
||||
["height", ".resh"],
|
||||
["duration", ".dur"],
|
||||
]
|
||||
|
||||
if typ == "format":
|
||||
kvm = [["duration", ".dur"], ["bit_rate", ".q"]]
|
||||
|
||||
for sk, rk in kvm:
|
||||
v = strm.get(sk)
|
||||
if v is None:
|
||||
continue
|
||||
|
||||
if rk.startswith("."):
|
||||
try:
|
||||
v = float(v)
|
||||
v2 = ret.get(rk)
|
||||
if v2 is None or v > v2:
|
||||
ret[rk] = v
|
||||
except:
|
||||
# sqlite doesnt care but the code below does
|
||||
if v not in ["N/A"]:
|
||||
ret[rk] = v
|
||||
else:
|
||||
ret[rk] = v
|
||||
|
||||
if ret.get("vc") == "ansi": # shellscript
|
||||
return {}, {}
|
||||
|
||||
for strm in streams:
|
||||
for k, v in strm.items():
|
||||
if not k.startswith("TAG:"):
|
||||
continue
|
||||
|
||||
k = k[4:].strip()
|
||||
v = v.strip()
|
||||
if k and v:
|
||||
md[k] = [v]
|
||||
|
||||
for k in [".q", ".vq", ".aq"]:
|
||||
if k in ret:
|
||||
ret[k] /= 1000 # bit_rate=320000
|
||||
|
||||
for k in [".q", ".vq", ".aq", ".resw", ".resh"]:
|
||||
if k in ret:
|
||||
ret[k] = int(ret[k])
|
||||
|
||||
if ".fps" in ret:
|
||||
fps = ret[".fps"]
|
||||
if "/" in fps:
|
||||
fa, fb = fps.split("/")
|
||||
fps = int(fa) * 1.0 / int(fb)
|
||||
|
||||
if fps < 1000 and fmt.get("format_name") not in ["image2", "png_pipe"]:
|
||||
ret[".fps"] = round(fps, 3)
|
||||
else:
|
||||
del ret[".fps"]
|
||||
|
||||
if ".dur" in ret:
|
||||
if ret[".dur"] < 0.1:
|
||||
del ret[".dur"]
|
||||
if ".q" in ret:
|
||||
del ret[".q"]
|
||||
|
||||
if ".resw" in ret and ".resh" in ret:
|
||||
ret["res"] = "{}x{}".format(ret[".resw"], ret[".resh"])
|
||||
|
||||
ret = {k: [0, v] for k, v in ret.items()}
|
||||
|
||||
return ret, md
|
||||
|
||||
|
||||
class MTag(object):
|
||||
def __init__(self, log_func, args):
|
||||
self.log_func = log_func
|
||||
@@ -35,15 +234,7 @@ class MTag(object):
|
||||
self.get = self.get_ffprobe
|
||||
self.prefer_mt = True
|
||||
# about 20x slower
|
||||
if PY2:
|
||||
cmd = [b"ffprobe", b"-version"]
|
||||
try:
|
||||
sp.Popen(cmd, stdout=sp.PIPE, stderr=sp.PIPE)
|
||||
except:
|
||||
self.usable = False
|
||||
else:
|
||||
if not shutil.which("ffprobe"):
|
||||
self.usable = False
|
||||
self.usable = HAVE_FFPROBE
|
||||
|
||||
if self.usable and WINDOWS and sys.version_info < (3, 8):
|
||||
self.usable = False
|
||||
@@ -52,8 +243,10 @@ class MTag(object):
|
||||
self.log(msg, c=1)
|
||||
|
||||
if not self.usable:
|
||||
msg = "need mutagen{} to read media tags so please run this:\n {} -m pip install --user mutagen"
|
||||
self.log(msg.format(or_ffprobe, os.path.basename(sys.executable)), c=1)
|
||||
msg = "need mutagen{} to read media tags so please run this:\n{}{} -m pip install --user mutagen\n"
|
||||
self.log(
|
||||
msg.format(or_ffprobe, " " * 37, os.path.basename(sys.executable)), c=1
|
||||
)
|
||||
return
|
||||
|
||||
# https://picard-docs.musicbrainz.org/downloads/MusicBrainz_Picard_Tag_Map.html
|
||||
@@ -201,7 +394,7 @@ class MTag(object):
|
||||
import mutagen
|
||||
|
||||
try:
|
||||
md = mutagen.File(abspath, easy=True)
|
||||
md = mutagen.File(fsenc(abspath), easy=True)
|
||||
x = md.info.length
|
||||
except Exception as ex:
|
||||
return {}
|
||||
@@ -212,7 +405,7 @@ class MTag(object):
|
||||
try:
|
||||
q = int(md.info.bitrate / 1024)
|
||||
except:
|
||||
q = int((os.path.getsize(abspath) / dur) / 128)
|
||||
q = int((os.path.getsize(fsenc(abspath)) / dur) / 128)
|
||||
|
||||
ret[".dur"] = [0, dur]
|
||||
ret[".q"] = [0, q]
|
||||
@@ -222,101 +415,7 @@ class MTag(object):
|
||||
return self.normalize_tags(ret, md)
|
||||
|
||||
def get_ffprobe(self, abspath):
|
||||
cmd = [b"ffprobe", b"-hide_banner", b"--", fsenc(abspath)]
|
||||
p = sp.Popen(cmd, stdout=sp.PIPE, stderr=sp.PIPE)
|
||||
r = p.communicate()
|
||||
txt = r[1].decode("utf-8", "replace")
|
||||
txt = [x.rstrip("\r") for x in txt.split("\n")]
|
||||
|
||||
"""
|
||||
note:
|
||||
tags which contain newline will be truncated on first \n,
|
||||
ffprobe emits \n and spacepads the : to align visually
|
||||
note:
|
||||
the Stream ln always mentions Audio: if audio
|
||||
the Stream ln usually has kb/s, is more accurate
|
||||
the Duration ln always has kb/s
|
||||
the Metadata: after Chapter may contain BPM info,
|
||||
title : Tempo: 126.0
|
||||
|
||||
Input #0, wav,
|
||||
Metadata:
|
||||
date : <OK>
|
||||
Duration:
|
||||
Chapter #
|
||||
Metadata:
|
||||
title : <NG>
|
||||
|
||||
Input #0, mp3,
|
||||
Metadata:
|
||||
album : <OK>
|
||||
Duration:
|
||||
Stream #0:0: Audio:
|
||||
Stream #0:1: Video:
|
||||
Metadata:
|
||||
comment : <NG>
|
||||
"""
|
||||
|
||||
ptn_md_beg = re.compile("^( +)Metadata:$")
|
||||
ptn_md_kv = re.compile("^( +)([^:]+) *: (.*)")
|
||||
ptn_dur = re.compile("^ *Duration: ([^ ]+)(, |$)")
|
||||
ptn_br1 = re.compile("^ *Duration: .*, bitrate: ([0-9]+) kb/s(, |$)")
|
||||
ptn_br2 = re.compile("^ *Stream.*: Audio:.* ([0-9]+) kb/s(, |$)")
|
||||
ptn_audio = re.compile("^ *Stream .*: Audio: ")
|
||||
ptn_au_parent = re.compile("^ *(Input #|Stream .*: Audio: )")
|
||||
|
||||
ret = {}
|
||||
md = {}
|
||||
in_md = False
|
||||
is_audio = False
|
||||
au_parent = False
|
||||
for ln in txt:
|
||||
m = ptn_md_kv.match(ln)
|
||||
if m and in_md and len(m.group(1)) == in_md:
|
||||
_, k, v = [x.strip() for x in m.groups()]
|
||||
if k != "" and v != "":
|
||||
md[k] = [v]
|
||||
continue
|
||||
else:
|
||||
in_md = False
|
||||
|
||||
m = ptn_md_beg.match(ln)
|
||||
if m and au_parent:
|
||||
in_md = len(m.group(1)) + 2
|
||||
continue
|
||||
|
||||
au_parent = bool(ptn_au_parent.search(ln))
|
||||
|
||||
if ptn_audio.search(ln):
|
||||
is_audio = True
|
||||
|
||||
m = ptn_dur.search(ln)
|
||||
if m:
|
||||
sec = 0
|
||||
tstr = m.group(1)
|
||||
if tstr.lower() != "n/a":
|
||||
try:
|
||||
tf = tstr.split(",")[0].split(".")[0].split(":")
|
||||
for f in tf:
|
||||
sec *= 60
|
||||
sec += int(f)
|
||||
except:
|
||||
self.log("invalid timestr from ffprobe: [{}]".format(tstr), c=3)
|
||||
|
||||
ret[".dur"] = sec
|
||||
m = ptn_br1.search(ln)
|
||||
if m:
|
||||
ret[".q"] = m.group(1)
|
||||
|
||||
m = ptn_br2.search(ln)
|
||||
if m:
|
||||
ret[".q"] = m.group(1)
|
||||
|
||||
if not is_audio:
|
||||
return {}
|
||||
|
||||
ret = {k: [0, v] for k, v in ret.items()}
|
||||
|
||||
ret, md = ffprobe(abspath)
|
||||
return self.normalize_tags(ret, md)
|
||||
|
||||
def get_bin(self, parsers, abspath):
|
||||
@@ -327,10 +426,10 @@ class MTag(object):
|
||||
env["PYTHONPATH"] = pypath
|
||||
|
||||
ret = {}
|
||||
for tagname, (binpath, timeout) in parsers.items():
|
||||
for tagname, mp in parsers.items():
|
||||
try:
|
||||
cmd = [sys.executable, binpath, abspath]
|
||||
args = {"env": env, "timeout": timeout}
|
||||
cmd = [sys.executable, mp.bin, abspath]
|
||||
args = {"env": env, "timeout": mp.timeout}
|
||||
|
||||
if WINDOWS:
|
||||
args["creationflags"] = 0x4000
|
||||
@@ -339,8 +438,16 @@ class MTag(object):
|
||||
|
||||
cmd = [fsenc(x) for x in cmd]
|
||||
v = sp.check_output(cmd, **args).strip()
|
||||
if v:
|
||||
if not v:
|
||||
continue
|
||||
|
||||
if "," not in tagname:
|
||||
ret[tagname] = v.decode("utf-8")
|
||||
else:
|
||||
v = json.loads(v)
|
||||
for tag in tagname.split(","):
|
||||
if tag and tag in v:
|
||||
ret[tag] = v[tag]
|
||||
except:
|
||||
pass
|
||||
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
import re
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
import threading
|
||||
@@ -9,9 +10,11 @@ from datetime import datetime, timedelta
|
||||
import calendar
|
||||
|
||||
from .__init__ import PY2, WINDOWS, MACOS, VT100
|
||||
from .util import mp
|
||||
from .authsrv import AuthSrv
|
||||
from .tcpsrv import TcpSrv
|
||||
from .up2k import Up2k
|
||||
from .util import mp
|
||||
from .th_srv import ThumbSrv, HAVE_PIL, HAVE_WEBP
|
||||
|
||||
|
||||
class SvcHub(object):
|
||||
@@ -34,9 +37,27 @@ class SvcHub(object):
|
||||
|
||||
self.log = self._log_disabled if args.q else self._log_enabled
|
||||
|
||||
# jank goes here
|
||||
auth = AuthSrv(self.args, self.log, False)
|
||||
|
||||
# initiate all services to manage
|
||||
self.tcpsrv = TcpSrv(self)
|
||||
self.up2k = Up2k(self)
|
||||
self.up2k = Up2k(self, auth.vfs.all_vols)
|
||||
|
||||
self.thumbsrv = None
|
||||
if not args.no_thumb:
|
||||
if HAVE_PIL:
|
||||
if not HAVE_WEBP:
|
||||
args.th_no_webp = True
|
||||
msg = "setting --th-no-webp because either libwebp is not available or your Pillow is too old"
|
||||
self.log("thumb", msg, c=3)
|
||||
|
||||
self.thumbsrv = ThumbSrv(self, auth.vfs.all_vols)
|
||||
else:
|
||||
msg = "need Pillow to create thumbnails; for example:\n{}{} -m pip install --user Pillow\n"
|
||||
self.log(
|
||||
"thumb", msg.format(" " * 37, os.path.basename(sys.executable)), c=3
|
||||
)
|
||||
|
||||
# decide which worker impl to use
|
||||
if self.check_mp_enable():
|
||||
@@ -63,6 +84,17 @@ class SvcHub(object):
|
||||
|
||||
self.tcpsrv.shutdown()
|
||||
self.broker.shutdown()
|
||||
if self.thumbsrv:
|
||||
self.thumbsrv.shutdown()
|
||||
|
||||
for n in range(200): # 10s
|
||||
time.sleep(0.05)
|
||||
if self.thumbsrv.stopped():
|
||||
break
|
||||
|
||||
if n == 3:
|
||||
print("waiting for thumbsrv...")
|
||||
|
||||
print("nailed it")
|
||||
|
||||
def _log_disabled(self, src, msg, c=0):
|
||||
|
||||
49
copyparty/th_cli.py
Normal file
49
copyparty/th_cli.py
Normal file
@@ -0,0 +1,49 @@
|
||||
import os
|
||||
import time
|
||||
|
||||
from .util import Cooldown
|
||||
from .th_srv import thumb_path, THUMBABLE, FMT_FF
|
||||
|
||||
|
||||
class ThumbCli(object):
|
||||
def __init__(self, broker):
|
||||
self.broker = broker
|
||||
self.args = broker.args
|
||||
|
||||
# cache on both sides for less broker spam
|
||||
self.cooldown = Cooldown(self.args.th_poke)
|
||||
|
||||
def get(self, ptop, rem, mtime, fmt):
|
||||
ext = rem.rsplit(".")[-1].lower()
|
||||
if ext not in THUMBABLE:
|
||||
return None
|
||||
|
||||
if self.args.no_vthumb and ext in FMT_FF:
|
||||
return None
|
||||
|
||||
if fmt == "j" and self.args.th_no_jpg:
|
||||
fmt = "w"
|
||||
|
||||
if fmt == "w" and self.args.th_no_webp:
|
||||
fmt = "j"
|
||||
|
||||
tpath = thumb_path(ptop, rem, mtime, fmt)
|
||||
ret = None
|
||||
try:
|
||||
st = os.stat(tpath)
|
||||
if st.st_size:
|
||||
ret = tpath
|
||||
else:
|
||||
return None
|
||||
except:
|
||||
pass
|
||||
|
||||
if ret:
|
||||
tdir = os.path.dirname(tpath)
|
||||
if self.cooldown.poke(tdir):
|
||||
self.broker.put(False, "thumbsrv.poke", tdir)
|
||||
|
||||
return ret
|
||||
|
||||
x = self.broker.put(True, "thumbsrv.get", ptop, rem, mtime, fmt)
|
||||
return x.get()
|
||||
375
copyparty/th_srv.py
Normal file
375
copyparty/th_srv.py
Normal file
@@ -0,0 +1,375 @@
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
import shutil
|
||||
import base64
|
||||
import hashlib
|
||||
import threading
|
||||
import subprocess as sp
|
||||
|
||||
from .__init__ import PY2
|
||||
from .util import fsenc, mchkcmd, Queue, Cooldown, BytesIO
|
||||
from .mtag import HAVE_FFMPEG, HAVE_FFPROBE, ffprobe
|
||||
|
||||
|
||||
if not PY2:
|
||||
unicode = str
|
||||
|
||||
|
||||
HAVE_PIL = False
|
||||
HAVE_HEIF = False
|
||||
HAVE_AVIF = False
|
||||
HAVE_WEBP = False
|
||||
|
||||
try:
|
||||
from PIL import Image, ImageOps
|
||||
|
||||
HAVE_PIL = True
|
||||
try:
|
||||
Image.new("RGB", (2, 2)).save(BytesIO(), format="webp")
|
||||
HAVE_WEBP = True
|
||||
except:
|
||||
pass
|
||||
|
||||
try:
|
||||
from pyheif_pillow_opener import register_heif_opener
|
||||
|
||||
register_heif_opener()
|
||||
HAVE_HEIF = True
|
||||
except:
|
||||
pass
|
||||
|
||||
try:
|
||||
import pillow_avif
|
||||
|
||||
HAVE_AVIF = True
|
||||
except:
|
||||
pass
|
||||
except:
|
||||
pass
|
||||
|
||||
# https://pillow.readthedocs.io/en/stable/handbook/image-file-formats.html
|
||||
# ffmpeg -formats
|
||||
FMT_PIL = "bmp dib gif icns ico jpg jpeg jp2 jpx pcx png pbm pgm ppm pnm sgi tga tif tiff webp xbm dds xpm"
|
||||
FMT_FF = "av1 asf avi flv m4v mkv mjpeg mjpg mpg mpeg mpg2 mpeg2 mov 3gp mp4 ts mpegts nut ogv ogm rm vob webm wmv"
|
||||
|
||||
if HAVE_HEIF:
|
||||
FMT_PIL += " heif heifs heic heics"
|
||||
|
||||
if HAVE_AVIF:
|
||||
FMT_PIL += " avif avifs"
|
||||
|
||||
FMT_PIL, FMT_FF = [{x: True for x in y.split(" ") if x} for y in [FMT_PIL, FMT_FF]]
|
||||
|
||||
|
||||
THUMBABLE = {}
|
||||
|
||||
if HAVE_PIL:
|
||||
THUMBABLE.update(FMT_PIL)
|
||||
|
||||
if HAVE_FFMPEG and HAVE_FFPROBE:
|
||||
THUMBABLE.update(FMT_FF)
|
||||
|
||||
|
||||
def thumb_path(ptop, rem, mtime, fmt):
|
||||
# base16 = 16 = 256
|
||||
# b64-lc = 38 = 1444
|
||||
# base64 = 64 = 4096
|
||||
try:
|
||||
rd, fn = rem.rsplit("/", 1)
|
||||
except:
|
||||
rd = ""
|
||||
fn = rem
|
||||
|
||||
if rd:
|
||||
h = hashlib.sha512(fsenc(rd)).digest()[:24]
|
||||
b64 = base64.urlsafe_b64encode(h).decode("ascii")[:24]
|
||||
rd = "{}/{}/".format(b64[:2], b64[2:4]).lower() + b64
|
||||
else:
|
||||
rd = "top"
|
||||
|
||||
# could keep original filenames but this is safer re pathlen
|
||||
h = hashlib.sha512(fsenc(fn)).digest()[:24]
|
||||
fn = base64.urlsafe_b64encode(h).decode("ascii")[:24]
|
||||
|
||||
return "{}/.hist/th/{}/{}.{:x}.{}".format(
|
||||
ptop, rd, fn, int(mtime), "webp" if fmt == "w" else "jpg"
|
||||
)
|
||||
|
||||
|
||||
class ThumbSrv(object):
|
||||
def __init__(self, hub, vols):
|
||||
self.hub = hub
|
||||
self.vols = [v.realpath for v in vols.values()]
|
||||
|
||||
self.args = hub.args
|
||||
self.log_func = hub.log
|
||||
|
||||
res = hub.args.th_size.split("x")
|
||||
self.res = tuple([int(x) for x in res])
|
||||
self.poke_cd = Cooldown(self.args.th_poke)
|
||||
|
||||
self.mutex = threading.Lock()
|
||||
self.busy = {}
|
||||
self.stopping = False
|
||||
self.nthr = os.cpu_count() if hasattr(os, "cpu_count") else 4
|
||||
self.q = Queue(self.nthr * 4)
|
||||
for _ in range(self.nthr):
|
||||
t = threading.Thread(target=self.worker)
|
||||
t.daemon = True
|
||||
t.start()
|
||||
|
||||
if not self.args.no_vthumb and (not HAVE_FFMPEG or not HAVE_FFPROBE):
|
||||
missing = []
|
||||
if not HAVE_FFMPEG:
|
||||
missing.append("ffmpeg")
|
||||
|
||||
if not HAVE_FFPROBE:
|
||||
missing.append("ffprobe")
|
||||
|
||||
msg = "cannot create video thumbnails because some of the required programs are not available: "
|
||||
msg += ", ".join(missing)
|
||||
self.log(msg, c=1)
|
||||
|
||||
t = threading.Thread(target=self.cleaner)
|
||||
t.daemon = True
|
||||
t.start()
|
||||
|
||||
def log(self, msg, c=0):
|
||||
self.log_func("thumb", msg, c)
|
||||
|
||||
def shutdown(self):
|
||||
self.stopping = True
|
||||
for _ in range(self.nthr):
|
||||
self.q.put(None)
|
||||
|
||||
def stopped(self):
|
||||
with self.mutex:
|
||||
return not self.nthr
|
||||
|
||||
def get(self, ptop, rem, mtime, fmt):
|
||||
tpath = thumb_path(ptop, rem, mtime, fmt)
|
||||
abspath = os.path.join(ptop, rem)
|
||||
cond = threading.Condition()
|
||||
with self.mutex:
|
||||
try:
|
||||
self.busy[tpath].append(cond)
|
||||
self.log("wait {}".format(tpath))
|
||||
except:
|
||||
thdir = os.path.dirname(tpath)
|
||||
try:
|
||||
os.makedirs(thdir)
|
||||
except:
|
||||
pass
|
||||
|
||||
inf_path = os.path.join(thdir, "dir.txt")
|
||||
if not os.path.exists(inf_path):
|
||||
with open(inf_path, "wb") as f:
|
||||
f.write(fsenc(os.path.dirname(abspath)))
|
||||
|
||||
self.busy[tpath] = [cond]
|
||||
self.q.put([abspath, tpath])
|
||||
self.log("conv {} \033[0m{}".format(tpath, abspath), c=6)
|
||||
|
||||
while not self.stopping:
|
||||
with self.mutex:
|
||||
if tpath not in self.busy:
|
||||
break
|
||||
|
||||
with cond:
|
||||
cond.wait()
|
||||
|
||||
try:
|
||||
st = os.stat(tpath)
|
||||
if st.st_size:
|
||||
return tpath
|
||||
except:
|
||||
pass
|
||||
|
||||
return None
|
||||
|
||||
def worker(self):
|
||||
while not self.stopping:
|
||||
task = self.q.get()
|
||||
if not task:
|
||||
break
|
||||
|
||||
abspath, tpath = task
|
||||
ext = abspath.split(".")[-1].lower()
|
||||
fun = None
|
||||
if not os.path.exists(tpath):
|
||||
if ext in FMT_PIL:
|
||||
fun = self.conv_pil
|
||||
elif ext in FMT_FF:
|
||||
fun = self.conv_ffmpeg
|
||||
|
||||
if fun:
|
||||
try:
|
||||
fun(abspath, tpath)
|
||||
except Exception as ex:
|
||||
msg = "{} failed on {}\n {!r}"
|
||||
self.log(msg.format(fun.__name__, abspath, ex), 3)
|
||||
with open(tpath, "wb") as _:
|
||||
pass
|
||||
|
||||
with self.mutex:
|
||||
subs = self.busy[tpath]
|
||||
del self.busy[tpath]
|
||||
|
||||
for x in subs:
|
||||
with x:
|
||||
x.notify_all()
|
||||
|
||||
with self.mutex:
|
||||
self.nthr -= 1
|
||||
|
||||
def conv_pil(self, abspath, tpath):
|
||||
with Image.open(fsenc(abspath)) as im:
|
||||
crop = not self.args.th_no_crop
|
||||
res2 = self.res
|
||||
if crop:
|
||||
res2 = (res2[0] * 2, res2[1] * 2)
|
||||
|
||||
try:
|
||||
im.thumbnail(res2, resample=Image.LANCZOS)
|
||||
if crop:
|
||||
iw, ih = im.size
|
||||
dw, dh = self.res
|
||||
res = (min(iw, dw), min(ih, dh))
|
||||
im = ImageOps.fit(im, res, method=Image.LANCZOS)
|
||||
except:
|
||||
im.thumbnail(self.res)
|
||||
|
||||
if im.mode not in ("RGB", "L"):
|
||||
im = im.convert("RGB")
|
||||
|
||||
if tpath.endswith(".webp"):
|
||||
# quality 80 = pillow-default
|
||||
# quality 75 = ffmpeg-default
|
||||
# method 0 = pillow-default, fast
|
||||
# method 4 = ffmpeg-default
|
||||
# method 6 = max, slow
|
||||
im.save(tpath, quality=40, method=6)
|
||||
else:
|
||||
im.save(tpath, quality=40) # default=75
|
||||
|
||||
def conv_ffmpeg(self, abspath, tpath):
|
||||
ret, _ = ffprobe(abspath)
|
||||
|
||||
dur = ret[".dur"][1] if ".dur" in ret else 4
|
||||
seek = "{:.0f}".format(dur / 3)
|
||||
|
||||
scale = "scale={0}:{1}:force_original_aspect_ratio="
|
||||
if self.args.th_no_crop:
|
||||
scale += "decrease,setsar=1:1"
|
||||
else:
|
||||
scale += "increase,crop={0}:{1},setsar=1:1"
|
||||
|
||||
scale = scale.format(*list(self.res)).encode("utf-8")
|
||||
cmd = [
|
||||
b"ffmpeg",
|
||||
b"-nostdin",
|
||||
b"-hide_banner",
|
||||
b"-ss",
|
||||
seek,
|
||||
b"-i",
|
||||
fsenc(abspath),
|
||||
b"-vf",
|
||||
scale,
|
||||
b"-vframes",
|
||||
b"1",
|
||||
]
|
||||
|
||||
if tpath.endswith(".jpg"):
|
||||
cmd += [
|
||||
b"-q:v",
|
||||
b"6", # default=??
|
||||
]
|
||||
else:
|
||||
cmd += [
|
||||
b"-q:v",
|
||||
b"50", # default=75
|
||||
b"-compression_level:v",
|
||||
b"6", # default=4, 0=fast, 6=max
|
||||
]
|
||||
|
||||
cmd += [fsenc(tpath)]
|
||||
|
||||
mchkcmd(cmd)
|
||||
|
||||
def poke(self, tdir):
|
||||
if not self.poke_cd.poke(tdir):
|
||||
return
|
||||
|
||||
ts = int(time.time())
|
||||
try:
|
||||
p1 = os.path.dirname(tdir)
|
||||
p2 = os.path.dirname(p1)
|
||||
for dp in [tdir, p1, p2]:
|
||||
os.utime(fsenc(dp), (ts, ts))
|
||||
except:
|
||||
pass
|
||||
|
||||
def cleaner(self):
|
||||
interval = self.args.th_clean
|
||||
while True:
|
||||
time.sleep(interval)
|
||||
for vol in self.vols:
|
||||
vol += "/.hist/th"
|
||||
self.log("\033[Jcln {}/\033[A".format(vol))
|
||||
self.clean(vol)
|
||||
|
||||
self.log("\033[Jcln ok")
|
||||
|
||||
def clean(self, vol):
|
||||
# self.log("cln {}".format(vol))
|
||||
maxage = self.args.th_maxage
|
||||
now = time.time()
|
||||
prev_b64 = None
|
||||
prev_fp = None
|
||||
try:
|
||||
ents = os.listdir(vol)
|
||||
except:
|
||||
return
|
||||
|
||||
for f in sorted(ents):
|
||||
fp = os.path.join(vol, f)
|
||||
cmp = fp.lower().replace("\\", "/")
|
||||
|
||||
# "top" or b64 prefix/full (a folder)
|
||||
if len(f) <= 3 or len(f) == 24:
|
||||
age = now - os.path.getmtime(fp)
|
||||
if age > maxage:
|
||||
with self.mutex:
|
||||
safe = True
|
||||
for k in self.busy.keys():
|
||||
if k.lower().replace("\\", "/").startswith(cmp):
|
||||
safe = False
|
||||
break
|
||||
|
||||
if safe:
|
||||
self.log("rm -rf [{}]".format(fp))
|
||||
shutil.rmtree(fp, ignore_errors=True)
|
||||
else:
|
||||
self.clean(fp)
|
||||
continue
|
||||
|
||||
# thumb file
|
||||
try:
|
||||
b64, ts, ext = f.split(".")
|
||||
if len(b64) != 24 or len(ts) != 8 or ext not in ["jpg", "webp"]:
|
||||
raise Exception()
|
||||
|
||||
ts = int(ts, 16)
|
||||
except:
|
||||
if f != "dir.txt":
|
||||
self.log("foreign file in thumbs dir: [{}]".format(fp), 1)
|
||||
|
||||
continue
|
||||
|
||||
if b64 == prev_b64:
|
||||
self.log("rm replaced [{}]".format(fp))
|
||||
os.unlink(prev_fp)
|
||||
|
||||
prev_b64 = b64
|
||||
prev_fp = fp
|
||||
@@ -47,11 +47,11 @@ class U2idx(object):
|
||||
fhash = body["hash"]
|
||||
wark = up2k_wark_from_hashlist(self.args.salt, fsize, fhash)
|
||||
|
||||
uq = "substr(w,1,16) = ? and w = ?"
|
||||
uq = "where substr(w,1,16) = ? and w = ?"
|
||||
uv = [wark[:16], wark]
|
||||
|
||||
try:
|
||||
return self.run_query(vols, uq, uv, {})[0]
|
||||
return self.run_query(vols, uq, uv)[0]
|
||||
except Exception as ex:
|
||||
raise Pebkac(500, repr(ex))
|
||||
|
||||
@@ -67,37 +67,120 @@ class U2idx(object):
|
||||
self.cur[ptop] = cur
|
||||
return cur
|
||||
|
||||
def search(self, vols, body):
|
||||
def search(self, vols, uq):
|
||||
"""search by query params"""
|
||||
if not HAVE_SQLITE3:
|
||||
return []
|
||||
|
||||
qobj = {}
|
||||
_conv_sz(qobj, body, "sz_min", "up.sz >= ?")
|
||||
_conv_sz(qobj, body, "sz_max", "up.sz <= ?")
|
||||
_conv_dt(qobj, body, "dt_min", "up.mt >= ?")
|
||||
_conv_dt(qobj, body, "dt_max", "up.mt <= ?")
|
||||
for seg, dk in [["path", "up.rd"], ["name", "up.fn"]]:
|
||||
if seg in body:
|
||||
_conv_txt(qobj, body, seg, dk)
|
||||
q = ""
|
||||
va = []
|
||||
joins = ""
|
||||
is_key = True
|
||||
is_size = False
|
||||
is_date = False
|
||||
kw_key = ["(", ")", "and ", "or ", "not "]
|
||||
kw_val = ["==", "=", "!=", ">", ">=", "<", "<=", "like "]
|
||||
ptn_mt = re.compile(r"^\.?[a-z]+$")
|
||||
mt_ctr = 0
|
||||
mt_keycmp = "substr(up.w,1,16)"
|
||||
mt_keycmp2 = None
|
||||
|
||||
uq, uv = _sqlize(qobj)
|
||||
while True:
|
||||
uq = uq.strip()
|
||||
if not uq:
|
||||
break
|
||||
|
||||
qobj = {}
|
||||
if "tags" in body:
|
||||
_conv_txt(qobj, body, "tags", "mt.v")
|
||||
ok = False
|
||||
for kw in kw_key + kw_val:
|
||||
if uq.startswith(kw):
|
||||
is_key = kw in kw_key
|
||||
uq = uq[len(kw) :]
|
||||
ok = True
|
||||
q += kw
|
||||
break
|
||||
|
||||
if "adv" in body:
|
||||
_conv_adv(qobj, body, "adv")
|
||||
if ok:
|
||||
continue
|
||||
|
||||
v, uq = (uq + " ").split(" ", 1)
|
||||
if is_key:
|
||||
is_key = False
|
||||
|
||||
if v == "size":
|
||||
v = "up.sz"
|
||||
is_size = True
|
||||
|
||||
elif v == "date":
|
||||
v = "up.mt"
|
||||
is_date = True
|
||||
|
||||
elif v == "path":
|
||||
v = "up.rd"
|
||||
|
||||
elif v == "name":
|
||||
v = "up.fn"
|
||||
|
||||
elif v == "tags" or ptn_mt.match(v):
|
||||
mt_ctr += 1
|
||||
mt_keycmp2 = "mt{}.w".format(mt_ctr)
|
||||
joins += "inner join mt mt{} on {} = {} ".format(
|
||||
mt_ctr, mt_keycmp, mt_keycmp2
|
||||
)
|
||||
if v == "tags":
|
||||
v = "mt{0}.v".format(mt_ctr)
|
||||
else:
|
||||
v = "mt{0}.k = '{1}' and mt{0}.v".format(mt_ctr, v)
|
||||
|
||||
else:
|
||||
raise Pebkac(400, "invalid key [" + v + "]")
|
||||
|
||||
q += v + " "
|
||||
continue
|
||||
|
||||
head = ""
|
||||
tail = ""
|
||||
|
||||
if is_date:
|
||||
is_date = False
|
||||
v = v.upper().rstrip("Z").replace(",", " ").replace("T", " ")
|
||||
while " " in v:
|
||||
v = v.replace(" ", " ")
|
||||
|
||||
for fmt in [
|
||||
"%Y-%m-%d %H:%M:%S",
|
||||
"%Y-%m-%d %H:%M",
|
||||
"%Y-%m-%d %H",
|
||||
"%Y-%m-%d",
|
||||
]:
|
||||
try:
|
||||
v = datetime.strptime(v, fmt).timestamp()
|
||||
break
|
||||
except:
|
||||
pass
|
||||
|
||||
elif is_size:
|
||||
is_size = False
|
||||
v = int(float(v) * 1024 * 1024)
|
||||
|
||||
else:
|
||||
if v.startswith("*"):
|
||||
head = "'%'||"
|
||||
v = v[1:]
|
||||
|
||||
if v.endswith("*"):
|
||||
tail = "||'%'"
|
||||
v = v[:-1]
|
||||
|
||||
q += " {}?{} ".format(head, tail)
|
||||
va.append(v)
|
||||
is_key = True
|
||||
|
||||
try:
|
||||
return self.run_query(vols, uq, uv, qobj)
|
||||
return self.run_query(vols, joins + "where " + q, va)
|
||||
except Exception as ex:
|
||||
raise Pebkac(500, repr(ex))
|
||||
|
||||
def run_query(self, vols, uq, uv, targs):
|
||||
self.log("qs: {} {} , {}".format(uq, repr(uv), repr(targs)))
|
||||
|
||||
def run_query(self, vols, uq, uv):
|
||||
done_flag = []
|
||||
self.active_id = "{:.6f}_{}".format(
|
||||
time.time(), threading.current_thread().ident
|
||||
@@ -112,35 +195,14 @@ class U2idx(object):
|
||||
thr.daemon = True
|
||||
thr.start()
|
||||
|
||||
if not targs:
|
||||
if not uq:
|
||||
q = "select * from up"
|
||||
v = ()
|
||||
else:
|
||||
q = "select * from up where " + uq
|
||||
v = tuple(uv)
|
||||
if not uq:
|
||||
q = "select * from up"
|
||||
v = ()
|
||||
else:
|
||||
q = "select up.* from up"
|
||||
keycmp = "substr(up.w,1,16)"
|
||||
where = []
|
||||
v = []
|
||||
ctr = 0
|
||||
for tq, tv in sorted(targs.items()):
|
||||
ctr += 1
|
||||
tq = tq.split("\n")[0]
|
||||
keycmp2 = "mt{}.w".format(ctr)
|
||||
q += " inner join mt mt{} on {} = {}".format(ctr, keycmp, keycmp2)
|
||||
keycmp = keycmp2
|
||||
where.append(tq.replace("mt.", keycmp[:-1]))
|
||||
v.append(tv)
|
||||
q = "select up.* from up " + uq
|
||||
v = tuple(uv)
|
||||
|
||||
if uq:
|
||||
where.append(uq)
|
||||
v.extend(uv)
|
||||
|
||||
q += " where " + (" and ".join(where))
|
||||
|
||||
# self.log("q2: {} {}".format(q, repr(v)))
|
||||
self.log("qs: {!r} {!r}".format(q, v))
|
||||
|
||||
ret = []
|
||||
lim = 1000
|
||||
@@ -163,7 +225,7 @@ class U2idx(object):
|
||||
if rd.startswith("//") or fn.startswith("//"):
|
||||
rd, fn = s3dec(rd, fn)
|
||||
|
||||
rp = os.path.join(vtop, rd, fn).replace("\\", "/")
|
||||
rp = "/".join([x for x in [vtop, rd, fn] if x])
|
||||
sret.append({"ts": int(ts), "sz": sz, "rp": rp, "w": w[:16]})
|
||||
|
||||
for hit in sret:
|
||||
@@ -204,78 +266,3 @@ def _open(ptop):
|
||||
db_path = os.path.join(ptop, ".hist", "up2k.db")
|
||||
if os.path.exists(db_path):
|
||||
return sqlite3.connect(db_path).cursor()
|
||||
|
||||
|
||||
def _conv_sz(q, body, k, sql):
|
||||
if k in body:
|
||||
q[sql] = int(float(body[k]) * 1024 * 1024)
|
||||
|
||||
|
||||
def _conv_dt(q, body, k, sql):
|
||||
if k not in body:
|
||||
return
|
||||
|
||||
v = body[k].upper().rstrip("Z").replace(",", " ").replace("T", " ")
|
||||
while " " in v:
|
||||
v = v.replace(" ", " ")
|
||||
|
||||
for fmt in ["%Y-%m-%d %H:%M:%S", "%Y-%m-%d %H:%M", "%Y-%m-%d %H", "%Y-%m-%d"]:
|
||||
try:
|
||||
ts = datetime.strptime(v, fmt).timestamp()
|
||||
break
|
||||
except:
|
||||
ts = None
|
||||
|
||||
if ts:
|
||||
q[sql] = ts
|
||||
|
||||
|
||||
def _conv_txt(q, body, k, sql):
|
||||
for v in body[k].split(" "):
|
||||
inv = ""
|
||||
if v.startswith("-"):
|
||||
inv = "not"
|
||||
v = v[1:]
|
||||
|
||||
if not v:
|
||||
continue
|
||||
|
||||
head = "'%'||"
|
||||
if v.startswith("^"):
|
||||
head = ""
|
||||
v = v[1:]
|
||||
|
||||
tail = "||'%'"
|
||||
if v.endswith("$"):
|
||||
tail = ""
|
||||
v = v[:-1]
|
||||
|
||||
qk = "{} {} like {}?{}".format(sql, inv, head, tail)
|
||||
q[qk + "\n" + v] = u8safe(v)
|
||||
|
||||
|
||||
def _conv_adv(q, body, k):
|
||||
ptn = re.compile(r"^(\.?[a-z]+) *(==?|!=|<=?|>=?) *(.*)$")
|
||||
|
||||
parts = body[k].split(" ")
|
||||
parts = [x.strip() for x in parts if x.strip()]
|
||||
|
||||
for part in parts:
|
||||
m = ptn.match(part)
|
||||
if not m:
|
||||
p = html_escape(part)
|
||||
raise Pebkac(400, "invalid argument [" + p + "]")
|
||||
|
||||
k, op, v = m.groups()
|
||||
qk = "mt.k = '{}' and mt.v {} ?".format(k, op)
|
||||
q[qk + "\n" + v] = u8safe(v)
|
||||
|
||||
|
||||
def _sqlize(qobj):
|
||||
keys = []
|
||||
values = []
|
||||
for k, v in sorted(qobj.items()):
|
||||
keys.append(k.split("\n")[0])
|
||||
values.append(v)
|
||||
|
||||
return " and ".join(keys), values
|
||||
|
||||
@@ -31,8 +31,7 @@ from .util import (
|
||||
statdir,
|
||||
s2hms,
|
||||
)
|
||||
from .mtag import MTag
|
||||
from .authsrv import AuthSrv
|
||||
from .mtag import MTag, MParser
|
||||
|
||||
try:
|
||||
HAVE_SQLITE3 = True
|
||||
@@ -49,24 +48,26 @@ class Up2k(object):
|
||||
* ~/.config flatfiles for active jobs
|
||||
"""
|
||||
|
||||
def __init__(self, broker):
|
||||
self.broker = broker
|
||||
self.args = broker.args
|
||||
self.log_func = broker.log
|
||||
def __init__(self, hub, all_vols):
|
||||
self.hub = hub
|
||||
self.args = hub.args
|
||||
self.log_func = hub.log
|
||||
|
||||
# config
|
||||
self.salt = broker.args.salt
|
||||
self.salt = self.args.salt
|
||||
|
||||
# state
|
||||
self.mutex = threading.Lock()
|
||||
self.hashq = Queue()
|
||||
self.tagq = Queue()
|
||||
self.volstate = {}
|
||||
self.registry = {}
|
||||
self.entags = {}
|
||||
self.flags = {}
|
||||
self.cur = {}
|
||||
self.mtag = None
|
||||
self.pending_tags = None
|
||||
self.mtp_parsers = {}
|
||||
|
||||
self.mem_cur = None
|
||||
self.sqlite_ver = None
|
||||
@@ -92,9 +93,15 @@ class Up2k(object):
|
||||
if not HAVE_SQLITE3:
|
||||
self.log("could not initialize sqlite3, will use in-memory registry only")
|
||||
|
||||
# this is kinda jank
|
||||
auth = AuthSrv(self.args, self.log_func, False)
|
||||
have_e2d = self.init_indexes(auth)
|
||||
if self.args.no_fastboot:
|
||||
self.deferred_init(all_vols)
|
||||
else:
|
||||
t = threading.Thread(target=self.deferred_init, args=(all_vols,))
|
||||
t.daemon = True
|
||||
t.start()
|
||||
|
||||
def deferred_init(self, all_vols):
|
||||
have_e2d = self.init_indexes(all_vols)
|
||||
|
||||
if have_e2d:
|
||||
thr = threading.Thread(target=self._snapshot)
|
||||
@@ -117,6 +124,19 @@ class Up2k(object):
|
||||
def log(self, msg, c=0):
|
||||
self.log_func("up2k", msg + "\033[K", c)
|
||||
|
||||
def get_volstate(self):
|
||||
return json.dumps(self.volstate, indent=4)
|
||||
|
||||
def rescan(self, all_vols, scan_vols):
|
||||
if hasattr(self, "pp"):
|
||||
return "cannot initiate; scan is already in progress"
|
||||
|
||||
args = (all_vols, scan_vols)
|
||||
t = threading.Thread(target=self.init_indexes, args=args)
|
||||
t.daemon = True
|
||||
t.start()
|
||||
return None
|
||||
|
||||
def _vis_job_progress(self, job):
|
||||
perc = 100 - (len(job["need"]) * 100.0 / len(job["hash"]))
|
||||
path = os.path.join(job["ptop"], job["prel"], job["name"])
|
||||
@@ -139,9 +159,9 @@ class Up2k(object):
|
||||
|
||||
return True, ret
|
||||
|
||||
def init_indexes(self, auth):
|
||||
def init_indexes(self, all_vols, scan_vols=[]):
|
||||
self.pp = ProgressPrinter()
|
||||
vols = auth.vfs.all_vols.values()
|
||||
vols = all_vols.values()
|
||||
t0 = time.time()
|
||||
have_e2d = False
|
||||
|
||||
@@ -161,24 +181,35 @@ class Up2k(object):
|
||||
for vol in vols:
|
||||
try:
|
||||
os.listdir(vol.realpath)
|
||||
live_vols.append(vol)
|
||||
except:
|
||||
self.volstate[vol.vpath] = "OFFLINE (cannot access folder)"
|
||||
self.log("cannot access " + vol.realpath, c=1)
|
||||
continue
|
||||
|
||||
if not self.register_vpath(vol.realpath, vol.flags):
|
||||
# self.log("db not enabled for {}".format(m, vol.realpath))
|
||||
continue
|
||||
|
||||
if vol.vpath in scan_vols or not scan_vols:
|
||||
live_vols.append(vol)
|
||||
|
||||
if vol.vpath not in self.volstate:
|
||||
self.volstate[vol.vpath] = "OFFLINE (pending initialization)"
|
||||
|
||||
vols = live_vols
|
||||
need_vac = {}
|
||||
|
||||
need_mtag = False
|
||||
for vol in vols:
|
||||
if "e2t" in vol.flags:
|
||||
need_mtag = True
|
||||
|
||||
if need_mtag:
|
||||
if need_mtag and not self.mtag:
|
||||
self.mtag = MTag(self.log_func, self.args)
|
||||
if not self.mtag.usable:
|
||||
self.mtag = None
|
||||
|
||||
# e2ds(a) volumes first,
|
||||
# also covers tags where e2ts is set
|
||||
# e2ds(a) volumes first
|
||||
for vol in vols:
|
||||
en = {}
|
||||
if "mte" in vol.flags:
|
||||
@@ -190,26 +221,45 @@ class Up2k(object):
|
||||
have_e2d = True
|
||||
|
||||
if "e2ds" in vol.flags:
|
||||
r = self._build_file_index(vol, vols)
|
||||
if not r:
|
||||
needed_mutagen = True
|
||||
self.volstate[vol.vpath] = "busy (hashing files)"
|
||||
_, vac = self._build_file_index(vol, list(all_vols.values()))
|
||||
if vac:
|
||||
need_vac[vol] = True
|
||||
|
||||
if "e2ts" not in vol.flags:
|
||||
m = "online, idle"
|
||||
else:
|
||||
m = "online (tags pending)"
|
||||
|
||||
self.volstate[vol.vpath] = m
|
||||
|
||||
# open the rest + do any e2ts(a)
|
||||
needed_mutagen = False
|
||||
for vol in vols:
|
||||
r = self.register_vpath(vol.realpath, vol.flags)
|
||||
if not r or "e2ts" not in vol.flags:
|
||||
if "e2ts" not in vol.flags:
|
||||
continue
|
||||
|
||||
cur, db_path, sz0 = r
|
||||
n_add, n_rm, success = self._build_tags_index(vol.realpath)
|
||||
m = "online (reading tags)"
|
||||
self.volstate[vol.vpath] = m
|
||||
self.log("{} [{}]".format(m, vol.realpath))
|
||||
|
||||
nadd, nrm, success = self._build_tags_index(vol)
|
||||
if not success:
|
||||
needed_mutagen = True
|
||||
|
||||
if n_add or n_rm:
|
||||
self.vac(cur, db_path, n_add, n_rm, sz0)
|
||||
if nadd or nrm:
|
||||
need_vac[vol] = True
|
||||
|
||||
self.volstate[vol.vpath] = "online (mtp soon)"
|
||||
|
||||
for vol in need_vac:
|
||||
cur, _ = self.register_vpath(vol.realpath, vol.flags)
|
||||
with self.mutex:
|
||||
cur.connection.commit()
|
||||
cur.execute("vacuum")
|
||||
|
||||
self.pp.end = True
|
||||
|
||||
msg = "{} volumes in {:.2f} sec"
|
||||
self.log(msg.format(len(vols), time.time() - t0))
|
||||
|
||||
@@ -217,105 +267,104 @@ class Up2k(object):
|
||||
msg = "could not read tags because no backends are available (mutagen or ffprobe)"
|
||||
self.log(msg, c=1)
|
||||
|
||||
thr = None
|
||||
if self.mtag:
|
||||
m = "online (running mtp)"
|
||||
if scan_vols:
|
||||
thr = threading.Thread(target=self._run_all_mtp)
|
||||
thr.daemon = True
|
||||
else:
|
||||
del self.pp
|
||||
m = "online, idle"
|
||||
|
||||
for vol in vols:
|
||||
self.volstate[vol.vpath] = m
|
||||
|
||||
if thr:
|
||||
thr.start()
|
||||
|
||||
return have_e2d
|
||||
|
||||
def register_vpath(self, ptop, flags):
|
||||
with self.mutex:
|
||||
if ptop in self.registry:
|
||||
return None
|
||||
db_path = os.path.join(ptop, ".hist", "up2k.db")
|
||||
if ptop in self.registry:
|
||||
return [self.cur[ptop], db_path]
|
||||
|
||||
_, flags = self._expr_idx_filter(flags)
|
||||
_, flags = self._expr_idx_filter(flags)
|
||||
|
||||
ft = "\033[0;32m{}{:.0}"
|
||||
ff = "\033[0;35m{}{:.0}"
|
||||
fv = "\033[0;36m{}:\033[1;30m{}"
|
||||
a = [
|
||||
(ft if v is True else ff if v is False else fv).format(k, str(v))
|
||||
for k, v in flags.items()
|
||||
]
|
||||
if a:
|
||||
self.log(" ".join(sorted(a)) + "\033[0m")
|
||||
ft = "\033[0;32m{}{:.0}"
|
||||
ff = "\033[0;35m{}{:.0}"
|
||||
fv = "\033[0;36m{}:\033[1;30m{}"
|
||||
a = [
|
||||
(ft if v is True else ff if v is False else fv).format(k, str(v))
|
||||
for k, v in flags.items()
|
||||
]
|
||||
if a:
|
||||
self.log(" ".join(sorted(a)) + "\033[0m")
|
||||
|
||||
reg = {}
|
||||
path = os.path.join(ptop, ".hist", "up2k.snap")
|
||||
if "e2d" in flags and os.path.exists(path):
|
||||
with gzip.GzipFile(path, "rb") as f:
|
||||
j = f.read().decode("utf-8")
|
||||
reg = {}
|
||||
path = os.path.join(ptop, ".hist", "up2k.snap")
|
||||
if "e2d" in flags and os.path.exists(path):
|
||||
with gzip.GzipFile(path, "rb") as f:
|
||||
j = f.read().decode("utf-8")
|
||||
|
||||
reg = json.loads(j)
|
||||
for _, job in reg.items():
|
||||
reg2 = json.loads(j)
|
||||
for k, job in reg2.items():
|
||||
path = os.path.join(job["ptop"], job["prel"], job["name"])
|
||||
if os.path.exists(fsenc(path)):
|
||||
reg[k] = job
|
||||
job["poke"] = time.time()
|
||||
else:
|
||||
self.log("ign deleted file in snap: [{}]".format(path))
|
||||
|
||||
m = "loaded snap {} |{}|".format(path, len(reg.keys()))
|
||||
m = [m] + self._vis_reg_progress(reg)
|
||||
self.log("\n".join(m))
|
||||
|
||||
self.flags[ptop] = flags
|
||||
self.registry[ptop] = reg
|
||||
if not HAVE_SQLITE3 or "e2d" not in flags or "d2d" in flags:
|
||||
return None
|
||||
|
||||
try:
|
||||
os.mkdir(os.path.join(ptop, ".hist"))
|
||||
except:
|
||||
pass
|
||||
|
||||
db_path = os.path.join(ptop, ".hist", "up2k.db")
|
||||
if ptop in self.cur:
|
||||
return None
|
||||
|
||||
try:
|
||||
sz0 = 0
|
||||
if os.path.exists(db_path):
|
||||
sz0 = os.path.getsize(db_path) // 1024
|
||||
|
||||
cur = self._open_db(db_path)
|
||||
self.cur[ptop] = cur
|
||||
return [cur, db_path, sz0]
|
||||
except:
|
||||
msg = "cannot use database at [{}]:\n{}"
|
||||
self.log(msg.format(ptop, traceback.format_exc()))
|
||||
m = "loaded snap {} |{}|".format(path, len(reg.keys()))
|
||||
m = [m] + self._vis_reg_progress(reg)
|
||||
self.log("\n".join(m))
|
||||
|
||||
self.flags[ptop] = flags
|
||||
self.registry[ptop] = reg
|
||||
if not HAVE_SQLITE3 or "e2d" not in flags or "d2d" in flags:
|
||||
return None
|
||||
|
||||
try:
|
||||
os.mkdir(os.path.join(ptop, ".hist"))
|
||||
except:
|
||||
pass
|
||||
|
||||
try:
|
||||
cur = self._open_db(db_path)
|
||||
self.cur[ptop] = cur
|
||||
return [cur, db_path]
|
||||
except:
|
||||
msg = "cannot use database at [{}]:\n{}"
|
||||
self.log(msg.format(ptop, traceback.format_exc()))
|
||||
|
||||
return None
|
||||
|
||||
def _build_file_index(self, vol, all_vols):
|
||||
do_vac = False
|
||||
top = vol.realpath
|
||||
reg = self.register_vpath(top, vol.flags)
|
||||
if not reg:
|
||||
return
|
||||
with self.mutex:
|
||||
cur, _ = self.register_vpath(top, vol.flags)
|
||||
|
||||
_, db_path, sz0 = reg
|
||||
dbw = [reg[0], 0, time.time()]
|
||||
self.pp.n = next(dbw[0].execute("select count(w) from up"))[0]
|
||||
dbw = [cur, 0, time.time()]
|
||||
self.pp.n = next(dbw[0].execute("select count(w) from up"))[0]
|
||||
|
||||
excl = [
|
||||
vol.realpath + "/" + d.vpath[len(vol.vpath) :].lstrip("/")
|
||||
for d in all_vols
|
||||
if d != vol and (d.vpath.startswith(vol.vpath + "/") or not vol.vpath)
|
||||
]
|
||||
n_add = self._build_dir(dbw, top, set(excl), top)
|
||||
n_rm = self._drop_lost(dbw[0], top)
|
||||
if dbw[1]:
|
||||
self.log("commit {} new files".format(dbw[1]))
|
||||
dbw[0].connection.commit()
|
||||
excl = [
|
||||
vol.realpath + "/" + d.vpath[len(vol.vpath) :].lstrip("/")
|
||||
for d in all_vols
|
||||
if d != vol and (d.vpath.startswith(vol.vpath + "/") or not vol.vpath)
|
||||
]
|
||||
if WINDOWS:
|
||||
excl = [x.replace("/", "\\") for x in excl]
|
||||
|
||||
n_add, n_rm, success = self._build_tags_index(vol.realpath)
|
||||
n_add = self._build_dir(dbw, top, set(excl), top)
|
||||
n_rm = self._drop_lost(dbw[0], top)
|
||||
if dbw[1]:
|
||||
self.log("commit {} new files".format(dbw[1]))
|
||||
dbw[0].connection.commit()
|
||||
|
||||
dbw[0].connection.commit()
|
||||
if n_add or n_rm or do_vac:
|
||||
self.vac(dbw[0], db_path, n_add, n_rm, sz0)
|
||||
|
||||
return success
|
||||
|
||||
def vac(self, cur, db_path, n_add, n_rm, sz0):
|
||||
sz1 = os.path.getsize(db_path) // 1024
|
||||
cur.execute("vacuum")
|
||||
sz2 = os.path.getsize(db_path) // 1024
|
||||
msg = "{} new, {} del, {} kB vacced, {} kB gain, {} kB now".format(
|
||||
n_add, n_rm, sz1 - sz2, sz2 - sz0, sz2
|
||||
)
|
||||
self.log(msg)
|
||||
return True, n_add or n_rm or do_vac
|
||||
|
||||
def _build_dir(self, dbw, top, excl, cdir):
|
||||
self.pp.msg = "a{} {}".format(self.pp.n, cdir)
|
||||
@@ -410,45 +459,53 @@ class Up2k(object):
|
||||
|
||||
return len(rm)
|
||||
|
||||
def _build_tags_index(self, ptop):
|
||||
entags = self.entags[ptop]
|
||||
flags = self.flags[ptop]
|
||||
cur = self.cur[ptop]
|
||||
def _build_tags_index(self, vol):
|
||||
ptop = vol.realpath
|
||||
with self.mutex:
|
||||
_, db_path = self.register_vpath(ptop, vol.flags)
|
||||
entags = self.entags[ptop]
|
||||
flags = self.flags[ptop]
|
||||
cur = self.cur[ptop]
|
||||
|
||||
n_add = 0
|
||||
n_rm = 0
|
||||
n_buf = 0
|
||||
last_write = time.time()
|
||||
|
||||
if "e2tsr" in flags:
|
||||
n_rm = cur.execute("select count(w) from mt").fetchone()[0]
|
||||
if n_rm:
|
||||
self.log("discarding {} media tags for a full rescan".format(n_rm))
|
||||
cur.execute("delete from mt")
|
||||
else:
|
||||
self.log("volume has e2tsr but there are no media tags to discard")
|
||||
with self.mutex:
|
||||
n_rm = cur.execute("select count(w) from mt").fetchone()[0]
|
||||
if n_rm:
|
||||
self.log("discarding {} media tags for a full rescan".format(n_rm))
|
||||
cur.execute("delete from mt")
|
||||
|
||||
# integrity: drop tags for tracks that were deleted
|
||||
if "e2t" in flags:
|
||||
drops = []
|
||||
c2 = cur.connection.cursor()
|
||||
up_q = "select w from up where substr(w,1,16) = ?"
|
||||
for (w,) in cur.execute("select w from mt"):
|
||||
if not c2.execute(up_q, (w,)).fetchone():
|
||||
drops.append(w[:16])
|
||||
c2.close()
|
||||
with self.mutex:
|
||||
drops = []
|
||||
c2 = cur.connection.cursor()
|
||||
up_q = "select w from up where substr(w,1,16) = ?"
|
||||
for (w,) in cur.execute("select w from mt"):
|
||||
if not c2.execute(up_q, (w,)).fetchone():
|
||||
drops.append(w[:16])
|
||||
c2.close()
|
||||
|
||||
if drops:
|
||||
msg = "discarding media tags for {} deleted files"
|
||||
self.log(msg.format(len(drops)))
|
||||
n_rm += len(drops)
|
||||
for w in drops:
|
||||
cur.execute("delete from mt where w = ?", (w,))
|
||||
if drops:
|
||||
msg = "discarding media tags for {} deleted files"
|
||||
self.log(msg.format(len(drops)))
|
||||
n_rm += len(drops)
|
||||
for w in drops:
|
||||
cur.execute("delete from mt where w = ?", (w,))
|
||||
|
||||
# bail if a volume flag disables indexing
|
||||
if "d2t" in flags or "d2d" in flags:
|
||||
return n_add, n_rm, True
|
||||
|
||||
# add tags for new files
|
||||
gcur = cur
|
||||
with self.mutex:
|
||||
gcur.connection.commit()
|
||||
|
||||
if "e2ts" in flags:
|
||||
if not self.mtag:
|
||||
return n_add, n_rm, False
|
||||
@@ -457,8 +514,10 @@ class Up2k(object):
|
||||
if self.mtag.prefer_mt and not self.args.no_mtag_mt:
|
||||
mpool = self._start_mpool()
|
||||
|
||||
c2 = cur.connection.cursor()
|
||||
c3 = cur.connection.cursor()
|
||||
conn = sqlite3.connect(db_path, timeout=15)
|
||||
cur = conn.cursor()
|
||||
c2 = conn.cursor()
|
||||
c3 = conn.cursor()
|
||||
n_left = cur.execute("select count(w) from up").fetchone()[0]
|
||||
for w, rd, fn in cur.execute("select w, rd, fn from up"):
|
||||
n_left -= 1
|
||||
@@ -480,7 +539,8 @@ class Up2k(object):
|
||||
n_tags = self._tag_file(c3, *args)
|
||||
else:
|
||||
mpool.put(["mtag"] + args)
|
||||
n_tags = len(self._flush_mpool(c3))
|
||||
with self.mutex:
|
||||
n_tags = len(self._flush_mpool(c3))
|
||||
|
||||
n_add += n_tags
|
||||
n_buf += n_tags
|
||||
@@ -492,28 +552,32 @@ class Up2k(object):
|
||||
last_write = time.time()
|
||||
n_buf = 0
|
||||
|
||||
self._stop_mpool(mpool, c3)
|
||||
self._stop_mpool(mpool)
|
||||
with self.mutex:
|
||||
n_add += len(self._flush_mpool(c3))
|
||||
|
||||
conn.commit()
|
||||
c3.close()
|
||||
c2.close()
|
||||
cur.close()
|
||||
conn.close()
|
||||
|
||||
with self.mutex:
|
||||
gcur.connection.commit()
|
||||
|
||||
return n_add, n_rm, True
|
||||
|
||||
def _flush_mpool(self, wcur):
|
||||
with self.mutex:
|
||||
ret = []
|
||||
for x in self.pending_tags:
|
||||
self._tag_file(wcur, *x)
|
||||
ret.append(x[1])
|
||||
ret = []
|
||||
for x in self.pending_tags:
|
||||
self._tag_file(wcur, *x)
|
||||
ret.append(x[1])
|
||||
|
||||
self.pending_tags = []
|
||||
return ret
|
||||
self.pending_tags = []
|
||||
return ret
|
||||
|
||||
def _run_all_mtp(self):
|
||||
t0 = time.time()
|
||||
self.mtp_audio = {}
|
||||
self.mtp_force = {}
|
||||
self.mtp_parsers = {}
|
||||
for ptop, flags in self.flags.items():
|
||||
if "mtp" in flags:
|
||||
self._run_one_mtp(ptop)
|
||||
@@ -522,58 +586,26 @@ class Up2k(object):
|
||||
msg = "mtp finished in {:.2f} sec ({})"
|
||||
self.log(msg.format(td, s2hms(td, True)))
|
||||
|
||||
def _run_one_mtp(self, ptop):
|
||||
db_path = os.path.join(ptop, ".hist", "up2k.db")
|
||||
sz0 = os.path.getsize(db_path) // 1024
|
||||
del self.pp
|
||||
for k in list(self.volstate.keys()):
|
||||
if "OFFLINE" not in self.volstate[k]:
|
||||
self.volstate[k] = "online, idle"
|
||||
|
||||
def _run_one_mtp(self, ptop):
|
||||
entags = self.entags[ptop]
|
||||
|
||||
audio = {} # [r]equire [n]ot [d]ontcare
|
||||
force = {} # bool
|
||||
timeout = {} # int
|
||||
parsers = {}
|
||||
for parser in self.flags[ptop]["mtp"]:
|
||||
orig = parser
|
||||
tag, parser = parser.split("=", 1)
|
||||
if tag not in entags:
|
||||
continue
|
||||
try:
|
||||
parser = MParser(parser)
|
||||
except:
|
||||
self.log("invalid argument: " + parser, 1)
|
||||
return
|
||||
|
||||
audio[tag] = "y"
|
||||
for tag in entags:
|
||||
if tag in parser.tags:
|
||||
parsers[parser.tag] = parser
|
||||
|
||||
while True:
|
||||
try:
|
||||
bp = os.path.expanduser(parser)
|
||||
if os.path.exists(bp):
|
||||
parsers[tag] = [bp, timeout.get(tag, 30)]
|
||||
break
|
||||
except:
|
||||
pass
|
||||
|
||||
try:
|
||||
arg, parser = parser.split(",", 1)
|
||||
arg = arg.lower()
|
||||
|
||||
if arg.startswith("a"):
|
||||
audio[tag] = arg[1:]
|
||||
continue
|
||||
|
||||
if arg == "f":
|
||||
force[tag] = True
|
||||
continue
|
||||
|
||||
if arg.startswith("t"):
|
||||
timeout[tag] = int(arg[1:])
|
||||
continue
|
||||
|
||||
raise Exception()
|
||||
|
||||
except:
|
||||
self.log("invalid argument: " + orig, 1)
|
||||
return
|
||||
|
||||
# todo audio/force => parser attributes
|
||||
self.mtp_audio[ptop] = audio
|
||||
self.mtp_force[ptop] = force
|
||||
self.mtp_parsers[ptop] = parsers
|
||||
|
||||
q = "select count(w) from mt where k = 't:mtp'"
|
||||
@@ -606,7 +638,7 @@ class Up2k(object):
|
||||
have = cur.execute(q, (w,)).fetchall()
|
||||
have = [x[0] for x in have]
|
||||
|
||||
parsers = self._get_parsers(ptop, have)
|
||||
parsers = self._get_parsers(ptop, have, abspath)
|
||||
if not parsers:
|
||||
to_delete[w] = True
|
||||
n_left -= 1
|
||||
@@ -618,9 +650,8 @@ class Up2k(object):
|
||||
jobs.append([parsers, None, w, abspath])
|
||||
in_progress[w] = True
|
||||
|
||||
done = self._flush_mpool(wcur)
|
||||
|
||||
with self.mutex:
|
||||
done = self._flush_mpool(wcur)
|
||||
for w in done:
|
||||
to_delete[w] = True
|
||||
in_progress.pop(w)
|
||||
@@ -661,48 +692,60 @@ class Up2k(object):
|
||||
with self.mutex:
|
||||
cur.connection.commit()
|
||||
|
||||
done = self._stop_mpool(mpool, wcur)
|
||||
self._stop_mpool(mpool)
|
||||
with self.mutex:
|
||||
done = self._flush_mpool(wcur)
|
||||
for w in done:
|
||||
q = "delete from mt where w = ? and k = 't:mtp'"
|
||||
cur.execute(q, (w,))
|
||||
|
||||
cur.connection.commit()
|
||||
if n_done:
|
||||
self.vac(cur, db_path, n_done, 0, sz0)
|
||||
cur.execute("vacuum")
|
||||
|
||||
wcur.close()
|
||||
cur.close()
|
||||
|
||||
def _get_parsers(self, ptop, have):
|
||||
def _get_parsers(self, ptop, have, abspath):
|
||||
try:
|
||||
all_parsers = self.mtp_parsers[ptop]
|
||||
except:
|
||||
return {}
|
||||
|
||||
audio = self.mtp_audio[ptop]
|
||||
force = self.mtp_force[ptop]
|
||||
entags = self.entags[ptop]
|
||||
parsers = {}
|
||||
for k, v in all_parsers.items():
|
||||
if ".dur" in entags:
|
||||
if ".dur" in have:
|
||||
if "ac" in entags or ".aq" in entags:
|
||||
if "ac" in have or ".aq" in have:
|
||||
# is audio, require non-audio?
|
||||
if audio[k] == "n":
|
||||
if v.audio == "n":
|
||||
continue
|
||||
# is not audio, require audio?
|
||||
elif audio[k] == "y":
|
||||
elif v.audio == "y":
|
||||
continue
|
||||
|
||||
if v.ext:
|
||||
match = False
|
||||
for ext in v.ext:
|
||||
if abspath.lower().endswith("." + ext):
|
||||
match = True
|
||||
break
|
||||
|
||||
if not match:
|
||||
continue
|
||||
|
||||
parsers[k] = v
|
||||
|
||||
parsers = {k: v for k, v in parsers.items() if k in force or k not in have}
|
||||
parsers = {k: v for k, v in parsers.items() if v.force or k not in have}
|
||||
return parsers
|
||||
|
||||
def _start_mpool(self):
|
||||
# mp.pool.ThreadPool and concurrent.futures.ThreadPoolExecutor
|
||||
# both do crazy runahead so lets reinvent another wheel
|
||||
nw = os.cpu_count() if hasattr(os, "cpu_count") else 4
|
||||
if self.args.no_mtag_mt:
|
||||
nw = 1
|
||||
|
||||
if self.pending_tags is None:
|
||||
self.log("using {}x {}".format(nw, self.mtag.backend))
|
||||
self.pending_tags = []
|
||||
@@ -715,7 +758,7 @@ class Up2k(object):
|
||||
|
||||
return mpool
|
||||
|
||||
def _stop_mpool(self, mpool, wcur):
|
||||
def _stop_mpool(self, mpool):
|
||||
if not mpool:
|
||||
return
|
||||
|
||||
@@ -723,8 +766,6 @@ class Up2k(object):
|
||||
mpool.put(None)
|
||||
|
||||
mpool.join()
|
||||
done = self._flush_mpool(wcur)
|
||||
return done
|
||||
|
||||
def _tag_thr(self, q):
|
||||
while True:
|
||||
@@ -742,7 +783,8 @@ class Up2k(object):
|
||||
vtags = [
|
||||
"\033[36m{} \033[33m{}".format(k, v) for k, v in tags.items()
|
||||
]
|
||||
self.log("{}\033[0m [{}]".format(" ".join(vtags), abspath))
|
||||
if vtags:
|
||||
self.log("{}\033[0m [{}]".format(" ".join(vtags), abspath))
|
||||
|
||||
with self.mutex:
|
||||
self.pending_tags.append([entags, wark, abspath, tags])
|
||||
@@ -944,7 +986,7 @@ class Up2k(object):
|
||||
if dp_dir.startswith("//") or dp_fn.startswith("//"):
|
||||
dp_dir, dp_fn = s3dec(dp_dir, dp_fn)
|
||||
|
||||
dp_abs = os.path.join(cj["ptop"], dp_dir, dp_fn).replace("\\", "/")
|
||||
dp_abs = "/".join([cj["ptop"], dp_dir, dp_fn])
|
||||
# relying on path.exists to return false on broken symlinks
|
||||
if os.path.exists(fsenc(dp_abs)):
|
||||
job = {
|
||||
@@ -970,7 +1012,7 @@ class Up2k(object):
|
||||
for fn in names:
|
||||
path = os.path.join(job["ptop"], job["prel"], fn)
|
||||
try:
|
||||
if os.path.getsize(path) > 0:
|
||||
if os.path.getsize(fsenc(path)) > 0:
|
||||
# upload completed or both present
|
||||
break
|
||||
except:
|
||||
@@ -1094,6 +1136,9 @@ class Up2k(object):
|
||||
raise Pebkac(400, "unknown wark")
|
||||
|
||||
if chash not in job["need"]:
|
||||
msg = "chash = {} , need:\n".format(chash)
|
||||
msg += "\n".join(job["need"])
|
||||
self.log(msg)
|
||||
raise Pebkac(400, "already got that but thanks??")
|
||||
|
||||
nchunk = [n for n, v in enumerate(job["hash"]) if v == chash]
|
||||
@@ -1199,12 +1244,15 @@ class Up2k(object):
|
||||
return wark
|
||||
|
||||
def _hashlist_from_file(self, path):
|
||||
fsz = os.path.getsize(path)
|
||||
pp = self.pp if hasattr(self, "pp") else None
|
||||
fsz = os.path.getsize(fsenc(path))
|
||||
csz = up2k_chunksize(fsz)
|
||||
ret = []
|
||||
with open(path, "rb", 512 * 1024) as f:
|
||||
with open(fsenc(path), "rb", 512 * 1024) as f:
|
||||
while fsz > 0:
|
||||
self.pp.msg = "{} MB, {}".format(int(fsz / 1024 / 1024), path)
|
||||
if pp:
|
||||
pp.msg = "{} MB, {}".format(int(fsz / 1024 / 1024), path)
|
||||
|
||||
hashobj = hashlib.sha512()
|
||||
rem = min(csz, fsz)
|
||||
fsz -= rem
|
||||
@@ -1293,13 +1341,13 @@ class Up2k(object):
|
||||
try:
|
||||
# remove the filename reservation
|
||||
path = os.path.join(job["ptop"], job["prel"], job["name"])
|
||||
if os.path.getsize(path) == 0:
|
||||
os.unlink(path)
|
||||
if os.path.getsize(fsenc(path)) == 0:
|
||||
os.unlink(fsenc(path))
|
||||
|
||||
if len(job["hash"]) == len(job["need"]):
|
||||
# PARTIAL is empty, delete that too
|
||||
path = os.path.join(job["ptop"], job["prel"], job["tnam"])
|
||||
os.unlink(path)
|
||||
os.unlink(fsenc(path))
|
||||
except:
|
||||
pass
|
||||
|
||||
@@ -1307,8 +1355,8 @@ class Up2k(object):
|
||||
if not reg:
|
||||
if k not in prev or prev[k] is not None:
|
||||
prev[k] = None
|
||||
if os.path.exists(path):
|
||||
os.unlink(path)
|
||||
if os.path.exists(fsenc(path)):
|
||||
os.unlink(fsenc(path))
|
||||
return
|
||||
|
||||
newest = max(x["poke"] for _, x in reg.items()) if reg else 0
|
||||
@@ -1340,7 +1388,7 @@ class Up2k(object):
|
||||
abspath = os.path.join(ptop, rd, fn)
|
||||
tags = self.mtag.get(abspath)
|
||||
ntags1 = len(tags)
|
||||
parsers = self._get_parsers(ptop, tags)
|
||||
parsers = self._get_parsers(ptop, tags, abspath)
|
||||
if parsers:
|
||||
tags.update(self.mtag.get_bin(parsers, abspath))
|
||||
|
||||
|
||||
@@ -15,6 +15,7 @@ import threading
|
||||
import mimetypes
|
||||
import contextlib
|
||||
import subprocess as sp # nosec
|
||||
from datetime import datetime
|
||||
|
||||
from .__init__ import PY2, WINDOWS, ANYWIN
|
||||
from .stolen import surrogateescape
|
||||
@@ -34,10 +35,12 @@ if not PY2:
|
||||
from urllib.parse import unquote_to_bytes as unquote
|
||||
from urllib.parse import quote_from_bytes as quote
|
||||
from queue import Queue
|
||||
from io import BytesIO
|
||||
else:
|
||||
from urllib import unquote # pylint: disable=no-name-in-module
|
||||
from urllib import quote # pylint: disable=no-name-in-module
|
||||
from Queue import Queue # pylint: disable=import-error,no-name-in-module
|
||||
from StringIO import StringIO as BytesIO
|
||||
|
||||
surrogateescape.register_surrogateescape()
|
||||
FS_ENCODING = sys.getfilesystemencoding()
|
||||
@@ -45,6 +48,9 @@ if WINDOWS and PY2:
|
||||
FS_ENCODING = "utf-8"
|
||||
|
||||
|
||||
HTTP_TS_FMT = "%a, %d %b %Y %H:%M:%S GMT"
|
||||
|
||||
|
||||
HTTPCODE = {
|
||||
200: "OK",
|
||||
204: "No Content",
|
||||
@@ -73,6 +79,13 @@ IMPLICATIONS = [
|
||||
]
|
||||
|
||||
|
||||
MIMES = {
|
||||
"md": "text/plain; charset=UTF-8",
|
||||
"opus": "audio/ogg; codecs=opus",
|
||||
"webp": "image/webp",
|
||||
}
|
||||
|
||||
|
||||
REKOBO_KEY = {
|
||||
v: ln.split(" ", 1)[0]
|
||||
for ln in """
|
||||
@@ -124,6 +137,32 @@ class Counter(object):
|
||||
self.v = absval
|
||||
|
||||
|
||||
class Cooldown(object):
|
||||
def __init__(self, maxage):
|
||||
self.maxage = maxage
|
||||
self.mutex = threading.Lock()
|
||||
self.hist = {}
|
||||
self.oldest = 0
|
||||
|
||||
def poke(self, key):
|
||||
with self.mutex:
|
||||
now = time.time()
|
||||
|
||||
ret = False
|
||||
v = self.hist.get(key, 0)
|
||||
if now - v > self.maxage:
|
||||
self.hist[key] = now
|
||||
ret = True
|
||||
|
||||
if self.oldest - now > self.maxage * 2:
|
||||
self.hist = {
|
||||
k: v for k, v in self.hist.items() if now - v < self.maxage
|
||||
}
|
||||
self.oldest = sorted(self.hist.values())[0]
|
||||
|
||||
return ret
|
||||
|
||||
|
||||
class Unrecv(object):
|
||||
"""
|
||||
undo any number of socket recv ops
|
||||
@@ -243,7 +282,7 @@ def ren_open(fname, *args, **kwargs):
|
||||
else:
|
||||
fpath = fname
|
||||
|
||||
if suffix and os.path.exists(fpath):
|
||||
if suffix and os.path.exists(fsenc(fpath)):
|
||||
fpath += suffix
|
||||
fname += suffix
|
||||
ext += suffix
|
||||
@@ -617,6 +656,11 @@ def exclude_dotfiles(filepaths):
|
||||
return [x for x in filepaths if not x.split("/")[-1].startswith(".")]
|
||||
|
||||
|
||||
def http_ts(ts):
|
||||
file_dt = datetime.utcfromtimestamp(ts)
|
||||
return file_dt.strftime(HTTP_TS_FMT)
|
||||
|
||||
|
||||
def html_escape(s, quote=False, crlf=False):
|
||||
"""html.escape but also newlines"""
|
||||
s = s.replace("&", "&").replace("<", "<").replace(">", ">")
|
||||
@@ -723,6 +767,8 @@ def s3dec(rd, fn):
|
||||
|
||||
|
||||
def atomic_move(src, dst):
|
||||
src = fsenc(src)
|
||||
dst = fsenc(dst)
|
||||
if not PY2:
|
||||
os.replace(src, dst)
|
||||
else:
|
||||
@@ -914,11 +960,13 @@ def unescape_cookie(orig):
|
||||
return ret
|
||||
|
||||
|
||||
def guess_mime(url):
|
||||
if url.endswith(".md"):
|
||||
return ["text/plain; charset=UTF-8"]
|
||||
def guess_mime(url, fallback="application/octet-stream"):
|
||||
try:
|
||||
_, ext = url.rsplit(".", 1)
|
||||
except:
|
||||
return fallback
|
||||
|
||||
return mimetypes.guess_type(url)
|
||||
return MIMES.get(ext) or mimetypes.guess_type(url)[0] or fallback
|
||||
|
||||
|
||||
def runcmd(*argv):
|
||||
@@ -937,6 +985,17 @@ def chkcmd(*argv):
|
||||
return sout, serr
|
||||
|
||||
|
||||
def mchkcmd(argv, timeout=10):
|
||||
if PY2:
|
||||
with open(os.devnull, "wb") as f:
|
||||
rv = sp.call(argv, stdout=f, stderr=f)
|
||||
else:
|
||||
rv = sp.call(argv, stdout=sp.DEVNULL, stderr=sp.DEVNULL, timeout=timeout)
|
||||
|
||||
if rv:
|
||||
raise sp.CalledProcessError(rv, (argv[0], b"...", argv[-1]))
|
||||
|
||||
|
||||
def gzip_orig_sz(fn):
|
||||
with open(fsenc(fn), "rb") as f:
|
||||
f.seek(-4, 2)
|
||||
|
||||
@@ -1,3 +1,6 @@
|
||||
:root {
|
||||
--grid-sz: 10em;
|
||||
}
|
||||
* {
|
||||
line-height: 1.2em;
|
||||
}
|
||||
@@ -64,6 +67,11 @@ a, #files tbody div a:last-child {
|
||||
background: #161616;
|
||||
text-decoration: underline;
|
||||
}
|
||||
#files thead {
|
||||
background: #333;
|
||||
position: sticky;
|
||||
top: 0;
|
||||
}
|
||||
#files thead a {
|
||||
color: #999;
|
||||
font-weight: normal;
|
||||
@@ -180,11 +188,32 @@ a, #files tbody div a:last-child {
|
||||
color: #840;
|
||||
text-shadow: 0 0 .3em #b80;
|
||||
}
|
||||
#files tbody tr.sel td {
|
||||
#files tbody tr.sel td,
|
||||
#ggrid a.sel,
|
||||
html.light #ggrid a.sel {
|
||||
color: #fff;
|
||||
background: #925;
|
||||
border-color: #c37;
|
||||
}
|
||||
#files tbody tr.sel:hover td,
|
||||
#ggrid a.sel:hover,
|
||||
html.light #ggrid a.sel:hover {
|
||||
color: #fff;
|
||||
background: #d39;
|
||||
border-color: #d48;
|
||||
text-shadow: 1px 1px 0 #804;
|
||||
}
|
||||
#ggrid a.sel,
|
||||
html.light #ggrid a.sel {
|
||||
border-top: 1px solid #d48;
|
||||
box-shadow: 0 .1em 1.2em #b36;
|
||||
transition: all 0.2s cubic-bezier(.2, 2.2, .5, 1); /* https://cubic-bezier.com/#.4,2,.7,1 */
|
||||
}
|
||||
#ggrid a.sel img {
|
||||
opacity: .7;
|
||||
box-shadow: 0 0 1em #b36;
|
||||
filter: contrast(130%) brightness(107%);
|
||||
}
|
||||
#files tr.sel a {
|
||||
color: #fff;
|
||||
}
|
||||
@@ -252,7 +281,10 @@ a, #files tbody div a:last-child {
|
||||
background: #3c3c3c;
|
||||
}
|
||||
#wtico {
|
||||
cursor: url(/.cpr/dd/1.png), pointer;
|
||||
cursor: url(/.cpr/dd/4.png), pointer;
|
||||
animation: cursor 500ms;
|
||||
}
|
||||
#wtico:hover {
|
||||
animation: cursor 500ms infinite;
|
||||
}
|
||||
@keyframes cursor {
|
||||
@@ -260,7 +292,7 @@ a, #files tbody div a:last-child {
|
||||
30% {cursor: url(/.cpr/dd/3.png), pointer}
|
||||
50% {cursor: url(/.cpr/dd/4.png), pointer}
|
||||
75% {cursor: url(/.cpr/dd/5.png), pointer}
|
||||
85% {cursor: url(/.cpr/dd/1.png), pointer}
|
||||
85% {cursor: url(/.cpr/dd/4.png), pointer}
|
||||
}
|
||||
@keyframes spin {
|
||||
100% {transform: rotate(360deg)}
|
||||
@@ -497,6 +529,17 @@ input[type="checkbox"]:checked+label {
|
||||
height: 1em;
|
||||
margin: .2em 0 -1em 1.6em;
|
||||
}
|
||||
#tq_raw {
|
||||
width: calc(100% - 2em);
|
||||
margin: .3em 0 0 1.4em;
|
||||
}
|
||||
#tq_raw td+td {
|
||||
width: 100%;
|
||||
}
|
||||
#op_search #q_raw {
|
||||
width: 100%;
|
||||
display: block;
|
||||
}
|
||||
#files td div span {
|
||||
color: #fff;
|
||||
padding: 0 .4em;
|
||||
@@ -550,8 +593,7 @@ input[type="checkbox"]:checked+label {
|
||||
left: -1.7em;
|
||||
width: calc(100% + 1.3em);
|
||||
}
|
||||
.tglbtn,
|
||||
#tree>a+a {
|
||||
.btn {
|
||||
padding: .2em .4em;
|
||||
font-size: 1.2em;
|
||||
background: #2a2a2a;
|
||||
@@ -561,12 +603,10 @@ input[type="checkbox"]:checked+label {
|
||||
position: relative;
|
||||
top: -.2em;
|
||||
}
|
||||
.tglbtn:hover,
|
||||
#tree>a+a:hover {
|
||||
.btn:hover {
|
||||
background: #805;
|
||||
}
|
||||
.tglbtn.on,
|
||||
#tree>a+a.on {
|
||||
.tgl.btn.on {
|
||||
background: #fc4;
|
||||
color: #400;
|
||||
text-shadow: none;
|
||||
@@ -711,6 +751,69 @@ input[type="checkbox"]:checked+label {
|
||||
font-family: monospace, monospace;
|
||||
line-height: 2em;
|
||||
}
|
||||
#griden.on+#thumbs {
|
||||
opacity: .3;
|
||||
}
|
||||
#ghead {
|
||||
background: #3c3c3c;
|
||||
border: 1px solid #444;
|
||||
border-radius: .3em;
|
||||
padding: .5em;
|
||||
margin: 0 1.5em 1em .4em;
|
||||
position: sticky;
|
||||
top: -.3em;
|
||||
}
|
||||
html.light #ghead {
|
||||
background: #f7f7f7;
|
||||
border-color: #ddd;
|
||||
}
|
||||
#ghead .btn {
|
||||
position: relative;
|
||||
top: 0;
|
||||
}
|
||||
#ggrid {
|
||||
padding-top: .5em;
|
||||
}
|
||||
#ggrid a {
|
||||
display: inline-block;
|
||||
width: var(--grid-sz);
|
||||
vertical-align: top;
|
||||
overflow-wrap: break-word;
|
||||
background: #383838;
|
||||
border: 1px solid #444;
|
||||
border-top: 1px solid #555;
|
||||
box-shadow: 0 .1em .2em #222;
|
||||
border-radius: .3em;
|
||||
padding: .3em;
|
||||
margin: .5em;
|
||||
}
|
||||
#ggrid a img {
|
||||
border-radius: .2em;
|
||||
max-width: var(--grid-sz);
|
||||
max-height: calc(var(--grid-sz)/1.25);
|
||||
margin: 0 auto;
|
||||
display: block;
|
||||
}
|
||||
#ggrid a span {
|
||||
padding: .2em .3em;
|
||||
display: block;
|
||||
}
|
||||
#ggrid a:hover {
|
||||
background: #444;
|
||||
border-color: #555;
|
||||
color: #fd9;
|
||||
}
|
||||
html.light #ggrid a {
|
||||
background: #f7f7f7;
|
||||
border-color: #ddd;
|
||||
box-shadow: 0 .1em .2em #ddd;
|
||||
}
|
||||
html.light #ggrid a:hover {
|
||||
background: #fff;
|
||||
border-color: #ccc;
|
||||
color: #015;
|
||||
box-shadow: 0 .1em .5em #aaa;
|
||||
}
|
||||
#pvol,
|
||||
#barbuf,
|
||||
#barpos,
|
||||
@@ -725,6 +828,21 @@ input[type="checkbox"]:checked+label {
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
html.light {
|
||||
color: #333;
|
||||
background: #eee;
|
||||
@@ -746,18 +864,15 @@ html.light #ops a.act {
|
||||
html.light #op_cfg h3 {
|
||||
border-color: #ccc;
|
||||
}
|
||||
html.light .tglbtn,
|
||||
html.light #tree > a + a {
|
||||
html.light .btn {
|
||||
color: #666;
|
||||
background: #ddd;
|
||||
box-shadow: none;
|
||||
}
|
||||
html.light .tglbtn:hover,
|
||||
html.light #tree > a + a:hover {
|
||||
html.light .btn:hover {
|
||||
background: #caf;
|
||||
}
|
||||
html.light .tglbtn.on,
|
||||
html.light #tree > a + a.on {
|
||||
html.light .tgl.btn.on {
|
||||
background: #4a0;
|
||||
color: #fff;
|
||||
}
|
||||
|
||||
@@ -41,8 +41,10 @@
|
||||
<div id="op_cfg" class="opview opbox">
|
||||
<h3>switches</h3>
|
||||
<div>
|
||||
<a id="tooltips" class="tglbtn" href="#">tooltips</a>
|
||||
<a id="lightmode" class="tglbtn" href="#">lightmode</a>
|
||||
<a id="tooltips" class="tgl btn" href="#">tooltips</a>
|
||||
<a id="lightmode" class="tgl btn" href="#">lightmode</a>
|
||||
<a id="griden" class="tgl btn" href="#">the grid</a>
|
||||
<a id="thumbs" class="tgl btn" href="#">thumbs</a>
|
||||
</div>
|
||||
{%- if have_zip %}
|
||||
<h3>folder download</h3>
|
||||
@@ -61,9 +63,9 @@
|
||||
|
||||
<div id="tree">
|
||||
<a href="#" id="detree">🍞...</a>
|
||||
<a href="#" step="2" id="twobytwo">+</a>
|
||||
<a href="#" step="-2" id="twig">–</a>
|
||||
<a href="#" class="tglbtn" id="dyntree">a</a>
|
||||
<a href="#" class="btn" step="2" id="twobytwo">+</a>
|
||||
<a href="#" class="btn" step="-2" id="twig">–</a>
|
||||
<a href="#" class="tgl btn" id="dyntree">a</a>
|
||||
<ul id="treeul"></ul>
|
||||
<div id="thx_ff"> </div>
|
||||
</div>
|
||||
|
||||
@@ -29,6 +29,19 @@ ebi('widget').innerHTML = (
|
||||
);
|
||||
|
||||
|
||||
var have_webp = null;
|
||||
(function () {
|
||||
var img = new Image();
|
||||
img.onload = function () {
|
||||
have_webp = img.width > 0 && img.height > 0;
|
||||
};
|
||||
img.onerror = function () {
|
||||
have_webp = false;
|
||||
};
|
||||
img.src = "data:image/webp;base64,UklGRiIAAABXRUJQVlA4IBYAAAAwAQCdASoBAAEADsD+JaQAA3AAAAAA";
|
||||
})();
|
||||
|
||||
|
||||
// extract songs + add play column
|
||||
function MPlayer() {
|
||||
this.id = Date.now();
|
||||
@@ -696,6 +709,188 @@ function autoplay_blocked(seek) {
|
||||
})();
|
||||
|
||||
|
||||
var thegrid = (function () {
|
||||
var lfiles = ebi('files');
|
||||
var gfiles = document.createElement('div');
|
||||
gfiles.setAttribute('id', 'gfiles');
|
||||
gfiles.style.display = 'none';
|
||||
gfiles.innerHTML = (
|
||||
'<div id="ghead">' +
|
||||
'<a href="#" class="tgl btn" id="gridsel">multiselect</a> zoom ' +
|
||||
'<a href="#" class="btn" z="-1.2">–</a> ' +
|
||||
'<a href="#" class="btn" z="1.2">+</a> sort by: ' +
|
||||
'<a href="#" s="href">name</a>, ' +
|
||||
'<a href="#" s="sz">size</a>, ' +
|
||||
'<a href="#" s="ts">date</a>, ' +
|
||||
'<a href="#" s="ext">type</a>' +
|
||||
'</div>' +
|
||||
'<div id="ggrid"></div>'
|
||||
);
|
||||
lfiles.parentNode.insertBefore(gfiles, lfiles);
|
||||
|
||||
var r = {
|
||||
'thumbs': bcfg_get('thumbs', true),
|
||||
'en': bcfg_get('griden', false),
|
||||
'sel': bcfg_get('gridsel', false),
|
||||
'sz': fcfg_get('gridsz', 10),
|
||||
'isdirty': true
|
||||
};
|
||||
|
||||
ebi('thumbs').onclick = function (e) {
|
||||
ev(e);
|
||||
r.thumbs = !r.thumbs;
|
||||
bcfg_set('thumbs', r.thumbs);
|
||||
if (r.en) {
|
||||
loadgrid();
|
||||
}
|
||||
};
|
||||
|
||||
ebi('griden').onclick = function (e) {
|
||||
ev(e);
|
||||
r.en = !r.en;
|
||||
bcfg_set('griden', r.en);
|
||||
if (r.en) {
|
||||
loadgrid();
|
||||
}
|
||||
else {
|
||||
lfiles.style.display = '';
|
||||
gfiles.style.display = 'none';
|
||||
}
|
||||
};
|
||||
|
||||
var btnclick = function (e) {
|
||||
ev(e);
|
||||
var s = this.getAttribute('s'),
|
||||
z = this.getAttribute('z');
|
||||
|
||||
if (z)
|
||||
return setsz(z > 0 ? r.sz * z : r.sz / (-z));
|
||||
|
||||
var t = lfiles.tHead.rows[0].cells;
|
||||
for (var a = 0; a < t.length; a++)
|
||||
if (t[a].getAttribute('name') == s) {
|
||||
t[a].click();
|
||||
break;
|
||||
}
|
||||
|
||||
r.setdirty();
|
||||
};
|
||||
|
||||
var links = QSA('#ghead>a');
|
||||
for (var a = 0; a < links.length; a++)
|
||||
links[a].onclick = btnclick;
|
||||
|
||||
ebi('gridsel').onclick = function (e) {
|
||||
ev(e);
|
||||
r.sel = !r.sel;
|
||||
bcfg_set('gridsel', r.sel);
|
||||
r.loadsel();
|
||||
};
|
||||
|
||||
r.setvis = function (vis) {
|
||||
(r.en ? gfiles : lfiles).style.display = vis ? '' : 'none';
|
||||
}
|
||||
|
||||
r.setdirty = function () {
|
||||
r.dirty = true;
|
||||
if (r.en) {
|
||||
loadgrid();
|
||||
}
|
||||
}
|
||||
|
||||
function setsz(v) {
|
||||
if (v !== undefined) {
|
||||
r.sz = v;
|
||||
swrite('gridsz', r.sz);
|
||||
}
|
||||
document.documentElement.style.setProperty('--grid-sz', r.sz + 'em');
|
||||
}
|
||||
setsz();
|
||||
|
||||
function seltgl(e) {
|
||||
if (e && e.ctrlKey)
|
||||
return true;
|
||||
|
||||
ev(e);
|
||||
var oth = ebi(this.getAttribute('ref')),
|
||||
td = oth.parentNode.nextSibling,
|
||||
tr = td.parentNode;
|
||||
|
||||
td.click();
|
||||
this.setAttribute('class', tr.getAttribute('class'));
|
||||
}
|
||||
|
||||
r.loadsel = function () {
|
||||
var ths = QSA('#ggrid>a');
|
||||
for (var a = 0, aa = ths.length; a < aa; a++) {
|
||||
ths[a].onclick = r.sel ? seltgl : null;
|
||||
ths[a].setAttribute('class', ebi(ths[a].getAttribute('ref')).parentNode.parentNode.getAttribute('class'));
|
||||
}
|
||||
var uns = QS('#ggrid a[ref="unsearch"]');
|
||||
if (uns)
|
||||
uns.onclick = function () {
|
||||
ebi('unsearch').click();
|
||||
};
|
||||
}
|
||||
|
||||
function loadgrid() {
|
||||
if (have_webp === null)
|
||||
return setTimeout(loadgrid, 50);
|
||||
|
||||
if (!r.dirty)
|
||||
return r.loadsel();
|
||||
|
||||
var html = [];
|
||||
var files = QSA('#files>tbody>tr>td:nth-child(2) a[id]');
|
||||
for (var a = 0, aa = files.length; a < aa; a++) {
|
||||
var ao = files[a],
|
||||
href = esc(ao.getAttribute('href')),
|
||||
ref = ao.getAttribute('id'),
|
||||
isdir = href.split('?')[0].slice(-1)[0] == '/',
|
||||
ihref = href;
|
||||
|
||||
if (isdir) {
|
||||
ihref = '/.cpr/ico/folder'
|
||||
}
|
||||
else if (r.thumbs) {
|
||||
ihref += (ihref.indexOf('?') === -1 ? '?' : '&') + 'th=' + (have_webp ? 'w' : 'j');
|
||||
}
|
||||
else {
|
||||
var ar = href.split('?')[0].split('.');
|
||||
if (ar.length > 1)
|
||||
ar = ar.slice(1);
|
||||
|
||||
ihref = '';
|
||||
ar.reverse();
|
||||
for (var b = 0; b < ar.length; b++) {
|
||||
if (ar[b].length > 7)
|
||||
break;
|
||||
|
||||
ihref = ar[b] + '.' + ihref;
|
||||
}
|
||||
if (!ihref) {
|
||||
ihref = 'unk.';
|
||||
}
|
||||
ihref = '/.cpr/ico/' + ihref.slice(0, -1);
|
||||
}
|
||||
|
||||
html.push('<a href="' + href + '" ref="' + ref + '"><img src="' +
|
||||
ihref + '" /><span>' + ao.innerHTML + '</span></a>');
|
||||
}
|
||||
lfiles.style.display = 'none';
|
||||
gfiles.style.display = 'block';
|
||||
ebi('ggrid').innerHTML = html.join('\n');
|
||||
r.loadsel();
|
||||
}
|
||||
|
||||
if (r.en) {
|
||||
loadgrid();
|
||||
}
|
||||
|
||||
return r;
|
||||
})();
|
||||
|
||||
|
||||
function tree_neigh(n) {
|
||||
var links = QSA('#treeul li>a+a');
|
||||
if (!links.length) {
|
||||
@@ -763,6 +958,23 @@ document.onkeydown = function (e) {
|
||||
|
||||
if (k == 'KeyP')
|
||||
return tree_up();
|
||||
|
||||
if (k == 'KeyG')
|
||||
return ebi('griden').click();
|
||||
|
||||
if (k == 'KeyT')
|
||||
return ebi('thumbs').click();
|
||||
|
||||
if (window['thegrid'] && thegrid.en) {
|
||||
if (k == 'KeyS')
|
||||
return ebi('gridsel').click();
|
||||
|
||||
if (k == 'KeyA')
|
||||
return QSA('#ghead>a[z]')[0].click();
|
||||
|
||||
if (k == 'KeyD')
|
||||
return QSA('#ghead>a[z]')[1].click();
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
@@ -819,6 +1031,7 @@ document.onkeydown = function (e) {
|
||||
for (var a = 0; a < trs.length; a += 2) {
|
||||
html.push('<table>' + (trs[a].concat(trs[a + 1])).join('\n') + '</table>');
|
||||
}
|
||||
html.push('<table id="tq_raw"><tr><td>raw</td><td><input id="q_raw" type="text" name="q" /></td></tr></table>');
|
||||
ebi('srch_form').innerHTML = html.join('\n');
|
||||
|
||||
var o = QSA('#op_search input');
|
||||
@@ -843,33 +1056,83 @@ document.onkeydown = function (e) {
|
||||
var chk = ebi(id.slice(0, -1) + 'c');
|
||||
chk.checked = ((v + '').length > 0);
|
||||
}
|
||||
|
||||
if (id != "q_raw")
|
||||
encode_query();
|
||||
|
||||
clearTimeout(search_timeout);
|
||||
if (Date.now() - search_in_progress > 30 * 1000)
|
||||
search_timeout = setTimeout(do_search, 200);
|
||||
}
|
||||
|
||||
function encode_query() {
|
||||
var q = '';
|
||||
for (var a = 0; a < sconf.length; a++) {
|
||||
for (var b = 1; b < sconf[a].length; b++) {
|
||||
var k = sconf[a][b][0],
|
||||
chk = 'srch_' + k + 'c',
|
||||
tvs = ebi('srch_' + k + 'v').value.split(/ /g);
|
||||
|
||||
if (!ebi(chk).checked)
|
||||
continue;
|
||||
|
||||
for (var c = 0; c < tvs.length; c++) {
|
||||
var tv = tvs[c];
|
||||
if (!tv.length)
|
||||
break;
|
||||
|
||||
q += ' and ';
|
||||
|
||||
if (k == 'adv') {
|
||||
q += tv.replace(/ /g, " and ").replace(/([=!><]=?)/, " $1 ");
|
||||
continue;
|
||||
}
|
||||
|
||||
if (k.length == 3) {
|
||||
q += k.replace(/sz/, 'size').replace(/dt/, 'date').replace(/l$/, ' >= ').replace(/u$/, ' <= ') + tv;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (k == 'path' || k == 'name' || k == 'tags') {
|
||||
var not = ' ';
|
||||
if (tv.slice(0, 1) == '-') {
|
||||
tv = tv.slice(1);
|
||||
not = ' not ';
|
||||
}
|
||||
|
||||
if (tv.slice(0, 1) == '^') {
|
||||
tv = tv.slice(1);
|
||||
}
|
||||
else {
|
||||
tv = '*' + tv;
|
||||
}
|
||||
|
||||
if (tv.slice(-1) == '$') {
|
||||
tv = tv.slice(0, -1);
|
||||
}
|
||||
else {
|
||||
tv += '*';
|
||||
}
|
||||
|
||||
q += k + not + 'like ' + tv;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
ebi('q_raw').value = q.slice(5);
|
||||
}
|
||||
|
||||
function do_search() {
|
||||
search_in_progress = Date.now();
|
||||
srch_msg(false, "searching...");
|
||||
clearTimeout(search_timeout);
|
||||
|
||||
var params = {},
|
||||
o = QSA('#op_search input[type="text"]');
|
||||
|
||||
for (var a = 0; a < o.length; a++) {
|
||||
var chk = ebi(o[a].getAttribute('id').slice(0, -1) + 'c');
|
||||
if (!chk.checked)
|
||||
continue;
|
||||
|
||||
params[o[a].getAttribute('name')] = o[a].value;
|
||||
}
|
||||
// ebi('srch_q').textContent = JSON.stringify(params, null, 4);
|
||||
var xhr = new XMLHttpRequest();
|
||||
xhr.open('POST', '/?srch', true);
|
||||
xhr.setRequestHeader('Content-Type', 'text/plain');
|
||||
xhr.onreadystatechange = xhr_search_results;
|
||||
xhr.ts = Date.now();
|
||||
xhr.send(JSON.stringify(params));
|
||||
xhr.send(JSON.stringify({ "q": ebi('q_raw').value }));
|
||||
}
|
||||
|
||||
function xhr_search_results() {
|
||||
@@ -918,7 +1181,7 @@ document.onkeydown = function (e) {
|
||||
sz = esc(r.sz + ''),
|
||||
rp = esc(r.rp + ''),
|
||||
ext = rp.lastIndexOf('.') > 0 ? rp.split('.').slice(-1)[0] : '%',
|
||||
links = linksplit(rp);
|
||||
links = linksplit(r.rp + '');
|
||||
|
||||
if (ext.length > 8)
|
||||
ext = '%';
|
||||
@@ -1226,8 +1489,10 @@ var treectl = (function () {
|
||||
nodes = sortfiles(nodes);
|
||||
for (var a = 0; a < nodes.length; a++) {
|
||||
var r = nodes[a],
|
||||
ln = ['<tr><td>' + r.lead + '</td><td><a href="' +
|
||||
top + r.href + '">' + esc(uricom_dec(r.href)[0]) + '</a>', r.sz];
|
||||
hname = esc(uricom_dec(r.href)[0]),
|
||||
sortv = (r.href.slice(-1) == '/' ? '\t' : '') + hname,
|
||||
ln = ['<tr><td>' + r.lead + '</td><td sortv="' + sortv +
|
||||
'"><a href="' + top + r.href + '">' + hname + '</a>', r.sz];
|
||||
|
||||
for (var b = 0; b < res.taglist.length; b++) {
|
||||
var k = res.taglist[b],
|
||||
@@ -1256,6 +1521,8 @@ var treectl = (function () {
|
||||
ebi('pro').innerHTML = res.logues ? res.logues[0] || "" : "";
|
||||
ebi('epi').innerHTML = res.logues ? res.logues[1] || "" : "";
|
||||
|
||||
document.title = '⇆🎉 ' + uricom_dec(document.location.pathname.slice(1, -1))[0];
|
||||
|
||||
filecols.set_style();
|
||||
mukey.render();
|
||||
msel.render();
|
||||
@@ -1391,7 +1658,7 @@ function apply_perms(perms) {
|
||||
up2k.set_fsearch();
|
||||
|
||||
ebi('widget').style.display = have_read ? '' : 'none';
|
||||
ebi('files').style.display = have_read ? '' : 'none';
|
||||
thegrid.setvis(have_read);
|
||||
if (!have_read)
|
||||
goto('up2k');
|
||||
}
|
||||
@@ -1784,6 +2051,8 @@ var msel = (function () {
|
||||
}
|
||||
function selui() {
|
||||
clmod(ebi('wtoggle'), 'sel', getsel().length);
|
||||
if (window['thegrid'])
|
||||
thegrid.loadsel();
|
||||
}
|
||||
function seltgl(e) {
|
||||
ev(e);
|
||||
@@ -1842,6 +2111,23 @@ var msel = (function () {
|
||||
})();
|
||||
|
||||
|
||||
(function () {
|
||||
try {
|
||||
var tr = ebi('files').tBodies[0].rows;
|
||||
for (var a = 0; a < tr.length; a++) {
|
||||
var td = tr[a].cells[1],
|
||||
ao = td.firstChild,
|
||||
href = ao.getAttribute('href'),
|
||||
isdir = href.split('?')[0].slice(-1)[0] == '/',
|
||||
txt = ao.textContent;
|
||||
|
||||
td.setAttribute('sortv', (isdir ? '\t' : '') + txt);
|
||||
}
|
||||
}
|
||||
catch (ex) { }
|
||||
})();
|
||||
|
||||
|
||||
function ev_row_tgl(e) {
|
||||
ev(e);
|
||||
filecols.toggle(this.parentElement.parentElement.getElementsByTagName('span')[0].textContent);
|
||||
@@ -1892,6 +2178,8 @@ function reload_browser(not_mp) {
|
||||
|
||||
if (window['up2k'])
|
||||
up2k.set_fsearch();
|
||||
|
||||
thegrid.setdirty();
|
||||
}
|
||||
reload_browser(true);
|
||||
mukey.render();
|
||||
|
||||
Binary file not shown.
|
Before Width: | Height: | Size: 248 B |
@@ -26,6 +26,13 @@ a {
|
||||
border-radius: .2em;
|
||||
padding: .2em .8em;
|
||||
}
|
||||
td, th {
|
||||
padding: .3em .6em;
|
||||
text-align: left;
|
||||
}
|
||||
.btns {
|
||||
margin: 1em 0;
|
||||
}
|
||||
|
||||
|
||||
html.dark,
|
||||
|
||||
@@ -13,11 +13,28 @@
|
||||
<div id="wrap">
|
||||
<p>hello {{ this.uname }}</p>
|
||||
|
||||
{%- if avol %}
|
||||
<h1>admin panel:</h1>
|
||||
<table>
|
||||
<thead><tr><th>vol</th><th>action</th><th>status</th></tr></thead>
|
||||
<tbody>
|
||||
{% for mp in avol %}
|
||||
{%- if mp in vstate and vstate[mp] %}
|
||||
<tr><td><a href="{{ mp }}{{ url_suf }}">{{ mp }}</a></td><td><a href="{{ mp }}?scan">rescan</a></td><td>{{ vstate[mp] }}</td></tr>
|
||||
{%- endif %}
|
||||
{% endfor %}
|
||||
</tbody>
|
||||
</table>
|
||||
<div class="btns">
|
||||
<a href="{{ avol[0] }}?stack">dump stack</a>
|
||||
</div>
|
||||
{%- endif %}
|
||||
|
||||
{%- if rvol %}
|
||||
<h1>you can browse these:</h1>
|
||||
<ul>
|
||||
{% for mp in rvol %}
|
||||
<li><a href="/{{ mp }}{{ url_suf }}">/{{ mp }}</a></li>
|
||||
<li><a href="{{ mp }}{{ url_suf }}">{{ mp }}</a></li>
|
||||
{% endfor %}
|
||||
</ul>
|
||||
{%- endif %}
|
||||
@@ -26,7 +43,7 @@
|
||||
<h1>you can upload to:</h1>
|
||||
<ul>
|
||||
{% for mp in wvol %}
|
||||
<li><a href="/{{ mp }}{{ url_suf }}">/{{ mp }}</a></li>
|
||||
<li><a href="{{ mp }}{{ url_suf }}">{{ mp }}</a></li>
|
||||
{% endfor %}
|
||||
</ul>
|
||||
{%- endif %}
|
||||
|
||||
@@ -696,7 +696,7 @@ function up2k_init(subtle) {
|
||||
|
||||
pvis.addfile([
|
||||
fsearch ? esc(entry.name) : linksplit(
|
||||
esc(uricom_dec(entry.purl)[0] + entry.name)).join(' '),
|
||||
uricom_dec(entry.purl)[0] + entry.name).join(' '),
|
||||
'📐 hash',
|
||||
''
|
||||
], fobj.size);
|
||||
@@ -1023,7 +1023,7 @@ function up2k_init(subtle) {
|
||||
else {
|
||||
smsg = 'found';
|
||||
var hit = response.hits[0],
|
||||
msg = linksplit(esc(hit.rp)).join(''),
|
||||
msg = linksplit(hit.rp).join(''),
|
||||
tr = unix2iso(hit.ts),
|
||||
tu = unix2iso(t.lmod),
|
||||
diff = parseInt(t.lmod) - parseInt(hit.ts),
|
||||
@@ -1045,7 +1045,7 @@ function up2k_init(subtle) {
|
||||
if (response.name !== t.name) {
|
||||
// file exists; server renamed us
|
||||
t.name = response.name;
|
||||
pvis.seth(t.n, 0, linksplit(esc(t.purl + t.name)).join(' '));
|
||||
pvis.seth(t.n, 0, linksplit(t.purl + t.name).join(' '));
|
||||
}
|
||||
|
||||
var chunksize = get_chunksize(t.size),
|
||||
|
||||
@@ -135,7 +135,7 @@ function clmod(obj, cls, add) {
|
||||
|
||||
|
||||
function sortfiles(nodes) {
|
||||
var sopts = jread('fsort', [["lead", -1, ""], ["href", 1, ""]]);
|
||||
var sopts = jread('fsort', [["href", 1, ""]]);
|
||||
|
||||
try {
|
||||
var is_srch = false;
|
||||
@@ -152,6 +152,9 @@ function sortfiles(nodes) {
|
||||
if (!name)
|
||||
continue;
|
||||
|
||||
if (name == 'ts')
|
||||
typ = 'int';
|
||||
|
||||
if (name.indexOf('tags/') === 0) {
|
||||
name = name.slice(5);
|
||||
for (var b = 0, bb = nodes.length; b < bb; b++)
|
||||
@@ -163,8 +166,12 @@ function sortfiles(nodes) {
|
||||
|
||||
if ((v + '').indexOf('<a ') === 0)
|
||||
v = v.split('>')[1];
|
||||
else if (name == "href" && v)
|
||||
else if (name == "href" && v) {
|
||||
if (v.slice(-1) == '/')
|
||||
v = '\t' + v;
|
||||
|
||||
v = uricom_dec(v)[0]
|
||||
}
|
||||
|
||||
nodes[b]._sv = v;
|
||||
}
|
||||
@@ -198,6 +205,8 @@ function sortfiles(nodes) {
|
||||
}
|
||||
catch (ex) {
|
||||
console.log("failed to apply sort config: " + ex);
|
||||
console.log("resetting fsort " + sread('fsort'))
|
||||
localStorage.removeItem('fsort');
|
||||
}
|
||||
return nodes;
|
||||
}
|
||||
@@ -349,12 +358,16 @@ function linksplit(rp) {
|
||||
link = rp.slice(0, ofs + 1);
|
||||
rp = rp.slice(ofs + 1);
|
||||
}
|
||||
var vlink = link;
|
||||
if (link.indexOf('/') !== -1)
|
||||
vlink = link.slice(0, -1) + '<span>/</span>';
|
||||
var vlink = esc(link),
|
||||
elink = uricom_enc(link);
|
||||
|
||||
ret.push('<a href="' + apath + link + '">' + vlink + '</a>');
|
||||
apath += link;
|
||||
if (link.indexOf('/') !== -1) {
|
||||
vlink = vlink.slice(0, -1) + '<span>/</span>';
|
||||
elink = elink.slice(0, -3) + '/';
|
||||
}
|
||||
|
||||
ret.push('<a href="' + apath + elink + '">' + vlink + '</a>');
|
||||
apath += elink;
|
||||
}
|
||||
return ret;
|
||||
}
|
||||
@@ -456,11 +469,15 @@ function jwrite(key, val) {
|
||||
}
|
||||
|
||||
function icfg_get(name, defval) {
|
||||
return parseInt(fcfg_get(name, defval));
|
||||
}
|
||||
|
||||
function fcfg_get(name, defval) {
|
||||
var o = ebi(name);
|
||||
|
||||
var val = parseInt(sread(name));
|
||||
var val = parseFloat(sread(name));
|
||||
if (isNaN(val))
|
||||
return parseInt(o ? o.value : defval);
|
||||
return parseFloat(o ? o.value : defval);
|
||||
|
||||
if (o)
|
||||
o.value = val;
|
||||
|
||||
@@ -80,6 +80,13 @@ command -v gdate && date() { gdate "$@"; }; while true; do t=$(date +%s.%N); (ti
|
||||
var t=[]; var b=document.location.href.split('#')[0].slice(0, -1); document.querySelectorAll('#u2tab .prog a').forEach((x) => {t.push(b+encodeURI(x.getAttribute("href")))}); console.log(t.join("\n"));
|
||||
|
||||
|
||||
##
|
||||
## bash oneliners
|
||||
|
||||
# get the size and video-id of all youtube vids in folder, assuming filename ends with -id.ext, and create a copyparty search query
|
||||
find -maxdepth 1 -printf '%s %p\n' | sort -n | awk '!/-([0-9a-zA-Z_-]{11})\.(mkv|mp4|webm)$/{next} {sub(/\.[^\.]+$/,"");n=length($0);v=substr($0,n-10);print $1, v}' | tee /dev/stderr | awk 'BEGIN {p="("} {printf("%s name like *-%s.* ",p,$2);p="or"} END {print ")\n"}' | cat >&2
|
||||
|
||||
|
||||
##
|
||||
## sqlite3 stuff
|
||||
|
||||
|
||||
82
docs/nuitka.txt
Normal file
82
docs/nuitka.txt
Normal file
@@ -0,0 +1,82 @@
|
||||
# recipe for building an exe with nuitka (extreme jank edition)
|
||||
#
|
||||
# NOTE: win7 and win10 builds both work on win10 but
|
||||
# on win7 they immediately c0000005 in kernelbase.dll
|
||||
#
|
||||
# first install python-3.6.8-amd64.exe
|
||||
# [x] add to path
|
||||
#
|
||||
# copypaste the rest of this file into cmd
|
||||
|
||||
rem from pypi
|
||||
cd \users\ed\downloads
|
||||
python -m pip install --user Nuitka-0.6.14.7.tar.gz
|
||||
|
||||
rem https://github.com/brechtsanders/winlibs_mingw/releases/download/10.2.0-11.0.0-8.0.0-r5/winlibs-x86_64-posix-seh-gcc-10.2.0-llvm-11.0.0-mingw-w64-8.0.0-r5.zip
|
||||
mkdir C:\Users\ed\AppData\Local\Nuitka\
|
||||
mkdir C:\Users\ed\AppData\Local\Nuitka\Nuitka\
|
||||
mkdir C:\Users\ed\AppData\Local\Nuitka\Nuitka\gcc\
|
||||
mkdir C:\Users\ed\AppData\Local\Nuitka\Nuitka\gcc\x86_64\
|
||||
mkdir C:\Users\ed\AppData\Local\Nuitka\Nuitka\gcc\x86_64\10.2.0-11.0.0-8.0.0-r5\
|
||||
copy c:\users\ed\downloads\winlibs-x86_64-posix-seh-gcc-10.2.0-llvm-11.0.0-mingw-w64-8.0.0-r5.zip C:\Users\ed\AppData\Local\Nuitka\Nuitka\gcc\x86_64\10.2.0-11.0.0-8.0.0-r5\winlibs-x86_64-posix-seh-gcc-10.2.0-llvm-11.0.0-mingw-w64-8.0.0-r5.zip
|
||||
|
||||
rem https://github.com/ccache/ccache/releases/download/v3.7.12/ccache-3.7.12-windows-32.zip
|
||||
mkdir C:\Users\ed\AppData\Local\Nuitka\Nuitka\ccache\
|
||||
mkdir C:\Users\ed\AppData\Local\Nuitka\Nuitka\ccache\v3.7.12\
|
||||
copy c:\users\ed\downloads\ccache-3.7.12-windows-32.zip C:\Users\ed\AppData\Local\Nuitka\Nuitka\ccache\v3.7.12\ccache-3.7.12-windows-32.zip
|
||||
|
||||
rem https://dependencywalker.com/depends22_x64.zip
|
||||
mkdir C:\Users\ed\AppData\Local\Nuitka\Nuitka\depends\
|
||||
mkdir C:\Users\ed\AppData\Local\Nuitka\Nuitka\depends\x86_64\
|
||||
copy c:\users\ed\downloads\depends22_x64.zip C:\Users\ed\AppData\Local\Nuitka\Nuitka\depends\x86_64\depends22_x64.zip
|
||||
|
||||
cd \
|
||||
rd /s /q %appdata%\..\local\temp\pe-copyparty
|
||||
cd \users\ed\downloads
|
||||
python copyparty-sfx.py -h
|
||||
cd %appdata%\..\local\temp\pe-copyparty\copyparty
|
||||
|
||||
python
|
||||
import os, re
|
||||
os.rename('../dep-j2/jinja2', '../jinja2')
|
||||
os.rename('../dep-j2/markupsafe', '../markupsafe')
|
||||
|
||||
print("# nuitka dies if .__init__.stuff is imported")
|
||||
with open('__init__.py','r',encoding='utf-8') as f:
|
||||
t1 = f.read()
|
||||
|
||||
with open('util.py','r',encoding='utf-8') as f:
|
||||
t2 = f.read().split('\n')[3:]
|
||||
|
||||
t2 = [x for x in t2 if 'from .__init__' not in x]
|
||||
t = t1 + '\n'.join(t2)
|
||||
with open('__init__.py','w',encoding='utf-8') as f:
|
||||
f.write('\n')
|
||||
|
||||
with open('util.py','w',encoding='utf-8') as f:
|
||||
f.write(t)
|
||||
|
||||
print("# local-imports fail, prefix module names")
|
||||
ptn = re.compile(r'^( *from )(\.[^ ]+ import .*)')
|
||||
for d, _, fs in os.walk('.'):
|
||||
for f in fs:
|
||||
fp = os.path.join(d, f)
|
||||
if not fp.endswith('.py'):
|
||||
continue
|
||||
t = ''
|
||||
with open(fp,'r',encoding='utf-8') as f:
|
||||
for ln in [x.rstrip('\r\n') for x in f]:
|
||||
m = ptn.match(ln)
|
||||
if not m:
|
||||
t += ln + '\n'
|
||||
continue
|
||||
p1, p2 = m.groups()
|
||||
t += "{}copyparty{}\n".format(p1, p2).replace("__init__", "util")
|
||||
with open(fp,'w',encoding='utf-8') as f:
|
||||
f.write(t)
|
||||
|
||||
exit()
|
||||
|
||||
cd ..
|
||||
|
||||
rd /s /q bout & python -m nuitka --standalone --onefile --windows-onefile-tempdir --python-flag=no_site --assume-yes-for-downloads --include-data-dir=copyparty\web=copyparty\web --include-data-dir=copyparty\res=copyparty\res --run --output-dir=bout --mingw64 --include-package=markupsafe --include-package=jinja2 copyparty
|
||||
55
setup.py
55
setup.py
@@ -5,22 +5,7 @@ from __future__ import print_function
|
||||
import os
|
||||
import sys
|
||||
from shutil import rmtree
|
||||
|
||||
setuptools_available = True
|
||||
try:
|
||||
# need setuptools to build wheel
|
||||
from setuptools import setup, Command, find_packages
|
||||
|
||||
except ImportError:
|
||||
# works in a pinch
|
||||
setuptools_available = False
|
||||
from distutils.core import setup, Command
|
||||
|
||||
from distutils.spawn import spawn
|
||||
|
||||
if "bdist_wheel" in sys.argv and not setuptools_available:
|
||||
print("cannot build wheel without setuptools")
|
||||
sys.exit(1)
|
||||
from setuptools import setup, Command, find_packages
|
||||
|
||||
|
||||
NAME = "copyparty"
|
||||
@@ -100,9 +85,8 @@ args = {
|
||||
"author_email": "copyparty@ocv.me",
|
||||
"url": "https://github.com/9001/copyparty",
|
||||
"license": "MIT",
|
||||
"data_files": data_files,
|
||||
"classifiers": [
|
||||
"Development Status :: 3 - Alpha",
|
||||
"Development Status :: 4 - Beta",
|
||||
"License :: OSI Approved :: MIT License",
|
||||
"Programming Language :: Python",
|
||||
"Programming Language :: Python :: 2",
|
||||
@@ -120,35 +104,16 @@ args = {
|
||||
"Environment :: Console",
|
||||
"Environment :: No Input/Output (Daemon)",
|
||||
"Topic :: Communications :: File Sharing",
|
||||
"Topic :: Internet :: WWW/HTTP :: HTTP Servers",
|
||||
],
|
||||
"include_package_data": True,
|
||||
"data_files": data_files,
|
||||
"packages": find_packages(),
|
||||
"install_requires": ["jinja2"],
|
||||
"extras_require": {"thumbnails": ["Pillow"], "audiotags": ["mutagen"]},
|
||||
"entry_points": {"console_scripts": ["copyparty = copyparty.__main__:main"]},
|
||||
"scripts": ["bin/copyparty-fuse.py"],
|
||||
"cmdclass": {"clean2": clean2},
|
||||
}
|
||||
|
||||
|
||||
if setuptools_available:
|
||||
args.update(
|
||||
{
|
||||
"packages": find_packages(),
|
||||
"install_requires": ["jinja2"],
|
||||
"extras_require": {"thumbnails": ["Pillow"]},
|
||||
"include_package_data": True,
|
||||
"entry_points": {
|
||||
"console_scripts": ["copyparty = copyparty.__main__:main"]
|
||||
},
|
||||
"scripts": ["bin/copyparty-fuse.py"],
|
||||
}
|
||||
)
|
||||
else:
|
||||
args.update(
|
||||
{
|
||||
"packages": ["copyparty", "copyparty.stolen"],
|
||||
"scripts": ["bin/copyparty-fuse.py"],
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
# import pprint
|
||||
# pprint.PrettyPrinter().pprint(args)
|
||||
# sys.exit(0)
|
||||
|
||||
setup(**args)
|
||||
|
||||
@@ -32,6 +32,8 @@ class Cfg(Namespace):
|
||||
no_zip=False,
|
||||
no_scandir=False,
|
||||
no_sendfile=True,
|
||||
no_rescan=True,
|
||||
ihead=False,
|
||||
nih=True,
|
||||
mtp=[],
|
||||
mte="a",
|
||||
|
||||
@@ -91,7 +91,10 @@ class VHttpConn(object):
|
||||
self.auth = auth
|
||||
self.log_func = log
|
||||
self.log_src = "a"
|
||||
self.lf_url = None
|
||||
self.hsrv = VHttpSrv()
|
||||
self.nbyte = 0
|
||||
self.workload = 0
|
||||
self.ico = None
|
||||
self.thumbcli = None
|
||||
self.t0 = time.time()
|
||||
Reference in New Issue
Block a user