mirror of
https://github.com/9001/copyparty.git
synced 2025-10-27 01:53:44 +00:00
Compare commits
76 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
eb5aaddba4 | ||
|
|
d8fd82bcb5 | ||
|
|
97be495861 | ||
|
|
8b53c159fc | ||
|
|
81e281f703 | ||
|
|
3948214050 | ||
|
|
c5e9a643e7 | ||
|
|
d25881d5c3 | ||
|
|
38d8d9733f | ||
|
|
118ebf668d | ||
|
|
a86f09fa46 | ||
|
|
dd4fb35c8f | ||
|
|
621eb4cf95 | ||
|
|
deea66ad0b | ||
|
|
bf99445377 | ||
|
|
7b54a63396 | ||
|
|
0fcb015f9a | ||
|
|
0a22b1ffb6 | ||
|
|
68cecc52ab | ||
|
|
53657ccfff | ||
|
|
96223fda01 | ||
|
|
374ff3433e | ||
|
|
5d63949e98 | ||
|
|
6b065d507d | ||
|
|
e79997498a | ||
|
|
f7ee02ec35 | ||
|
|
69dc433e1c | ||
|
|
c880cd848c | ||
|
|
5752b6db48 | ||
|
|
b36f905eab | ||
|
|
483dd527c6 | ||
|
|
e55678e28f | ||
|
|
3f4a8b9d6f | ||
|
|
02a856ecb4 | ||
|
|
4dff726310 | ||
|
|
cbc449036f | ||
|
|
8f53152220 | ||
|
|
bbb1e165d6 | ||
|
|
fed8d94885 | ||
|
|
58040cc0ed | ||
|
|
03d692db66 | ||
|
|
903f8e8453 | ||
|
|
405ae1308e | ||
|
|
8a0f583d71 | ||
|
|
b6d7017491 | ||
|
|
0f0217d203 | ||
|
|
a203e33347 | ||
|
|
3b8f697dd4 | ||
|
|
78ba16f722 | ||
|
|
0fcfe79994 | ||
|
|
c0e6df4b63 | ||
|
|
322abdcb43 | ||
|
|
31100787ce | ||
|
|
c57d721be4 | ||
|
|
3b5a03e977 | ||
|
|
ed807ee43e | ||
|
|
073c130ae6 | ||
|
|
8810e0be13 | ||
|
|
f93016ab85 | ||
|
|
b19cf260c2 | ||
|
|
db03e1e7eb | ||
|
|
e0d975e36a | ||
|
|
cfeb15259f | ||
|
|
3b3f8fc8fb | ||
|
|
88bd2c084c | ||
|
|
bd367389b0 | ||
|
|
58ba71a76f | ||
|
|
d03e34d55d | ||
|
|
24f239a46c | ||
|
|
2c0826f85a | ||
|
|
c061461d01 | ||
|
|
e7982a04fe | ||
|
|
33b91a7513 | ||
|
|
9bb1323e44 | ||
|
|
e62bb807a5 | ||
|
|
3fc0d2cc4a |
2
.vscode/launch.py
vendored
2
.vscode/launch.py
vendored
@@ -12,7 +12,7 @@ sys.path.insert(0, os.getcwd())
|
|||||||
import jstyleson
|
import jstyleson
|
||||||
from copyparty.__main__ import main as copyparty
|
from copyparty.__main__ import main as copyparty
|
||||||
|
|
||||||
with open(".vscode/launch.json", "r") as f:
|
with open(".vscode/launch.json", "r", encoding="utf-8") as f:
|
||||||
tj = f.read()
|
tj = f.read()
|
||||||
|
|
||||||
oj = jstyleson.loads(tj)
|
oj = jstyleson.loads(tj)
|
||||||
|
|||||||
115
README.md
115
README.md
@@ -9,10 +9,11 @@
|
|||||||
turn your phone or raspi into a portable file server with resumable uploads/downloads using IE6 or any other browser
|
turn your phone or raspi into a portable file server with resumable uploads/downloads using IE6 or any other browser
|
||||||
|
|
||||||
* server runs on anything with `py2.7` or `py3.3+`
|
* server runs on anything with `py2.7` or `py3.3+`
|
||||||
* *resumable* uploads need `firefox 12+` / `chrome 6+` / `safari 6+` / `IE 10+`
|
* browse/upload with IE4 / netscape4.0 on win3.11 (heh)
|
||||||
|
* *resumable* uploads need `firefox 34+` / `chrome 41+` / `safari 7+` for full speed
|
||||||
* code standard: `black`
|
* code standard: `black`
|
||||||
|
|
||||||
📷 screenshots: [browser](#the-browser) // [upload](#uploading) // [md-viewer](#markdown-viewer) // [search](#searching) // [fsearch](#file-search) // [zip-DL](#zip-downloads) // [ie4](#browser-support)
|
📷 **screenshots:** [browser](#the-browser) // [upload](#uploading) // [thumbnails](#thumbnails) // [md-viewer](#markdown-viewer) // [search](#searching) // [fsearch](#file-search) // [zip-DL](#zip-downloads) // [ie4](#browser-support)
|
||||||
|
|
||||||
|
|
||||||
## readme toc
|
## readme toc
|
||||||
@@ -22,11 +23,13 @@ turn your phone or raspi into a portable file server with resumable uploads/down
|
|||||||
* [notes](#notes)
|
* [notes](#notes)
|
||||||
* [status](#status)
|
* [status](#status)
|
||||||
* [bugs](#bugs)
|
* [bugs](#bugs)
|
||||||
|
* [general bugs](#general-bugs)
|
||||||
* [not my bugs](#not-my-bugs)
|
* [not my bugs](#not-my-bugs)
|
||||||
* [the browser](#the-browser)
|
* [the browser](#the-browser)
|
||||||
* [tabs](#tabs)
|
* [tabs](#tabs)
|
||||||
* [hotkeys](#hotkeys)
|
* [hotkeys](#hotkeys)
|
||||||
* [tree-mode](#tree-mode)
|
* [tree-mode](#tree-mode)
|
||||||
|
* [thumbnails](#thumbnails)
|
||||||
* [zip downloads](#zip-downloads)
|
* [zip downloads](#zip-downloads)
|
||||||
* [uploading](#uploading)
|
* [uploading](#uploading)
|
||||||
* [file-search](#file-search)
|
* [file-search](#file-search)
|
||||||
@@ -41,6 +44,8 @@ turn your phone or raspi into a portable file server with resumable uploads/down
|
|||||||
* [client examples](#client-examples)
|
* [client examples](#client-examples)
|
||||||
* [up2k](#up2k)
|
* [up2k](#up2k)
|
||||||
* [dependencies](#dependencies)
|
* [dependencies](#dependencies)
|
||||||
|
* [optional dependencies](#optional-dependencies)
|
||||||
|
* [install recommended deps](#install-recommended-deps)
|
||||||
* [optional gpl stuff](#optional-gpl-stuff)
|
* [optional gpl stuff](#optional-gpl-stuff)
|
||||||
* [sfx](#sfx)
|
* [sfx](#sfx)
|
||||||
* [sfx repack](#sfx-repack)
|
* [sfx repack](#sfx-repack)
|
||||||
@@ -54,11 +59,11 @@ turn your phone or raspi into a portable file server with resumable uploads/down
|
|||||||
|
|
||||||
download [copyparty-sfx.py](https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.py) and you're all set!
|
download [copyparty-sfx.py](https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.py) and you're all set!
|
||||||
|
|
||||||
running the sfx without arguments (for example doubleclicking it on Windows) will let anyone access the current folder; see `-h` for help if you want accounts and volumes etc
|
running the sfx without arguments (for example doubleclicking it on Windows) will give everyone full access to the current folder; see `-h` for help if you want accounts and volumes etc
|
||||||
|
|
||||||
you may also want these, especially on servers:
|
you may also want these, especially on servers:
|
||||||
* [contrib/systemd/copyparty.service](contrib/systemd/copyparty.service) to run copyparty as a systemd service
|
* [contrib/systemd/copyparty.service](contrib/systemd/copyparty.service) to run copyparty as a systemd service
|
||||||
* [contrib/nginx/copyparty.conf](contrib/nginx/copyparty.conf) to reverse-proxy behind nginx (for legit https)
|
* [contrib/nginx/copyparty.conf](contrib/nginx/copyparty.conf) to reverse-proxy behind nginx (for better https)
|
||||||
|
|
||||||
|
|
||||||
## notes
|
## notes
|
||||||
@@ -73,6 +78,8 @@ you may also want these, especially on servers:
|
|||||||
|
|
||||||
## status
|
## status
|
||||||
|
|
||||||
|
summary: all planned features work! now please enjoy the bloatening
|
||||||
|
|
||||||
* backend stuff
|
* backend stuff
|
||||||
* ☑ sanic multipart parser
|
* ☑ sanic multipart parser
|
||||||
* ☑ load balancer (multiprocessing)
|
* ☑ load balancer (multiprocessing)
|
||||||
@@ -90,9 +97,12 @@ you may also want these, especially on servers:
|
|||||||
* browser
|
* browser
|
||||||
* ☑ tree-view
|
* ☑ tree-view
|
||||||
* ☑ media player
|
* ☑ media player
|
||||||
* ✖ thumbnails
|
* ☑ thumbnails
|
||||||
* ✖ SPA (browse while uploading)
|
* ☑ images using Pillow
|
||||||
* currently safe using the file-tree on the left only, not folders in the file list
|
* ☑ videos using FFmpeg
|
||||||
|
* ☑ cache eviction (max-age; maybe max-size eventually)
|
||||||
|
* ☑ SPA (browse while uploading)
|
||||||
|
* if you use the file-tree on the left only, not folders in the file list
|
||||||
* server indexing
|
* server indexing
|
||||||
* ☑ locate files by contents
|
* ☑ locate files by contents
|
||||||
* ☑ search by name/path/date/size
|
* ☑ search by name/path/date/size
|
||||||
@@ -101,14 +111,17 @@ you may also want these, especially on servers:
|
|||||||
* ☑ viewer
|
* ☑ viewer
|
||||||
* ☑ editor (sure why not)
|
* ☑ editor (sure why not)
|
||||||
|
|
||||||
summary: it works! you can use it! (but technically not even close to beta)
|
|
||||||
|
|
||||||
|
|
||||||
# bugs
|
# bugs
|
||||||
|
|
||||||
* Windows: python 3.7 and older cannot read tags with ffprobe, so use mutagen or upgrade
|
* Windows: python 3.7 and older cannot read tags with ffprobe, so use mutagen or upgrade
|
||||||
* Windows: python 2.7 cannot index non-ascii filenames with `-e2d`
|
* Windows: python 2.7 cannot index non-ascii filenames with `-e2d`
|
||||||
* Windows: python 2.7 cannot handle filenames with mojibake
|
* Windows: python 2.7 cannot handle filenames with mojibake
|
||||||
|
|
||||||
|
## general bugs
|
||||||
|
|
||||||
|
* all volumes must exist / be available on startup; up2k (mtp especially) gets funky otherwise
|
||||||
|
* cannot mount something at `/d1/d2/d3` unless `d2` exists inside `d1`
|
||||||
* hiding the contents at url `/d1/d2/d3` using `-v :d1/d2/d3:cd2d` has the side-effect of creating databases (for files/tags) inside folders d1 and d2, and those databases take precedence over the main db at the top of the vfs - this means all files in d2 and below will be reindexed unless you already had a vfs entry at or below d2
|
* hiding the contents at url `/d1/d2/d3` using `-v :d1/d2/d3:cd2d` has the side-effect of creating databases (for files/tags) inside folders d1 and d2, and those databases take precedence over the main db at the top of the vfs - this means all files in d2 and below will be reindexed unless you already had a vfs entry at or below d2
|
||||||
* probably more, pls let me know
|
* probably more, pls let me know
|
||||||
|
|
||||||
@@ -138,11 +151,16 @@ summary: it works! you can use it! (but technically not even close to beta)
|
|||||||
the browser has the following hotkeys
|
the browser has the following hotkeys
|
||||||
* `I/K` prev/next folder
|
* `I/K` prev/next folder
|
||||||
* `P` parent folder
|
* `P` parent folder
|
||||||
|
* `G` toggle list / grid view
|
||||||
|
* `T` toggle thumbnails / icons
|
||||||
* when playing audio:
|
* when playing audio:
|
||||||
* `0..9` jump to 10%..90%
|
* `0..9` jump to 10%..90%
|
||||||
* `U/O` skip 10sec back/forward
|
* `U/O` skip 10sec back/forward
|
||||||
* `J/L` prev/next song
|
* `J/L` prev/next song
|
||||||
* `J` also starts playing the folder
|
* `J` also starts playing the folder
|
||||||
|
* in the grid view:
|
||||||
|
* `S` toggle multiselect
|
||||||
|
* `A/D` zoom
|
||||||
|
|
||||||
|
|
||||||
## tree-mode
|
## tree-mode
|
||||||
@@ -152,6 +170,13 @@ by default there's a breadcrumbs path; you can replace this with a tree-browser
|
|||||||
click `[-]` and `[+]` to adjust the size, and the `[a]` toggles if the tree should widen dynamically as you go deeper or stay fixed-size
|
click `[-]` and `[+]` to adjust the size, and the `[a]` toggles if the tree should widen dynamically as you go deeper or stay fixed-size
|
||||||
|
|
||||||
|
|
||||||
|
## thumbnails
|
||||||
|
|
||||||
|

|
||||||
|
|
||||||
|
it does static images with Pillow and uses FFmpeg for video files, so you may want to `--no-thumb` or maybe just `--no-vthumb` depending on how destructive your users are
|
||||||
|
|
||||||
|
|
||||||
## zip downloads
|
## zip downloads
|
||||||
|
|
||||||
the `zip` link next to folders can produce various types of zip/tar files using these alternatives in the browser settings tab:
|
the `zip` link next to folders can produce various types of zip/tar files using these alternatives in the browser settings tab:
|
||||||
@@ -175,8 +200,8 @@ you can also zip a selection of files or folders by clicking them in the browser
|
|||||||
## uploading
|
## uploading
|
||||||
|
|
||||||
two upload methods are available in the html client:
|
two upload methods are available in the html client:
|
||||||
* 🎈 bup, the basic uploader, supports almost every browser since netscape 4.0
|
* `🎈 bup`, the basic uploader, supports almost every browser since netscape 4.0
|
||||||
* 🚀 up2k, the fancy one
|
* `🚀 up2k`, the fancy one
|
||||||
|
|
||||||
up2k has several advantages:
|
up2k has several advantages:
|
||||||
* you can drop folders into the browser (files are added recursively)
|
* you can drop folders into the browser (files are added recursively)
|
||||||
@@ -190,6 +215,8 @@ see [up2k](#up2k) for details on how it works
|
|||||||
|
|
||||||

|

|
||||||
|
|
||||||
|
**protip:** you can avoid scaring away users with [docs/minimal-up2k.html](docs/minimal-up2k.html) which makes it look [much simpler](https://user-images.githubusercontent.com/241032/118311195-dd6ca380-b4ef-11eb-86f3-75a3ff2e1332.png)
|
||||||
|
|
||||||
the up2k UI is the epitome of polished inutitive experiences:
|
the up2k UI is the epitome of polished inutitive experiences:
|
||||||
* "parallel uploads" specifies how many chunks to upload at the same time
|
* "parallel uploads" specifies how many chunks to upload at the same time
|
||||||
* `[🏃]` analysis of other files should continue while one is uploading
|
* `[🏃]` analysis of other files should continue while one is uploading
|
||||||
@@ -205,20 +232,18 @@ and then theres the tabs below it,
|
|||||||
* plus up to 3 entries each from `[done]` and `[que]` for context
|
* plus up to 3 entries each from `[done]` and `[que]` for context
|
||||||
* `[que]` is all the files that are still queued
|
* `[que]` is all the files that are still queued
|
||||||
|
|
||||||
protip: you can avoid scaring away users by hiding some of the UI with hacks like [docs/minimal-up2k.html](docs/minimal-up2k.html)
|
|
||||||
|
|
||||||
### file-search
|
### file-search
|
||||||
|
|
||||||

|

|
||||||
|
|
||||||
in the 🚀 up2k tab, after toggling the `[🔎]` switch green, any files/folders you drop onto the dropzone will be hashed on the client-side. Each hash is sent to the server which checks if that file exists somewhere already
|
in the `[🚀 up2k]` tab, after toggling the `[🔎]` switch green, any files/folders you drop onto the dropzone will be hashed on the client-side. Each hash is sent to the server which checks if that file exists somewhere already
|
||||||
|
|
||||||
files go into `[ok]` if they exist (and you get a link to where it is), otherwise they land in `[ng]`
|
files go into `[ok]` if they exist (and you get a link to where it is), otherwise they land in `[ng]`
|
||||||
* the main reason filesearch is combined with the uploader is cause the code was too spaghetti to separate it out somewhere else
|
* the main reason filesearch is combined with the uploader is cause the code was too spaghetti to separate it out somewhere else, this is no longer the case but now i've warmed up to the idea too much
|
||||||
|
|
||||||
adding the same file multiple times is blocked, so if you first search for a file and then decide to upload it, you have to click the `[cleanup]` button to discard `[done]` files
|
adding the same file multiple times is blocked, so if you first search for a file and then decide to upload it, you have to click the `[cleanup]` button to discard `[done]` files
|
||||||
|
|
||||||
note that since up2k has to read the file twice, 🎈 bup can be up to 2x faster if your internet connection is faster than the read-speed of your HDD
|
note that since up2k has to read the file twice, `[🎈 bup]` can be up to 2x faster in extreme cases (if your internet connection is faster than the read-speed of your HDD)
|
||||||
|
|
||||||
up2k has saved a few uploads from becoming corrupted in-transfer already; caught an android phone on wifi redhanded in wireshark with a bitflip, however bup with https would *probably* have noticed as well thanks to tls also functioning as an integrity check
|
up2k has saved a few uploads from becoming corrupted in-transfer already; caught an android phone on wifi redhanded in wireshark with a bitflip, however bup with https would *probably* have noticed as well thanks to tls also functioning as an integrity check
|
||||||
|
|
||||||
@@ -286,6 +311,7 @@ see the beautiful mess of a dictionary in [mtag.py](https://github.com/9001/copy
|
|||||||
`--no-mutagen` disables mutagen and uses ffprobe instead, which...
|
`--no-mutagen` disables mutagen and uses ffprobe instead, which...
|
||||||
* is about 20x slower than mutagen
|
* is about 20x slower than mutagen
|
||||||
* catches a few tags that mutagen doesn't
|
* catches a few tags that mutagen doesn't
|
||||||
|
* melodic key, video resolution, framerate, pixfmt
|
||||||
* avoids pulling any GPL code into copyparty
|
* avoids pulling any GPL code into copyparty
|
||||||
* more importantly runs ffprobe on incoming files which is bad if your ffmpeg has a cve
|
* more importantly runs ffprobe on incoming files which is bad if your ffmpeg has a cve
|
||||||
|
|
||||||
@@ -298,6 +324,11 @@ copyparty can invoke external programs to collect additional metadata for files
|
|||||||
* `-mtp key=f,t5,~/bin/audio-key.py` uses `~/bin/audio-key.py` to get the `key` tag, replacing any existing metadata tag (`f,`), aborting if it takes longer than 5sec (`t5,`)
|
* `-mtp key=f,t5,~/bin/audio-key.py` uses `~/bin/audio-key.py` to get the `key` tag, replacing any existing metadata tag (`f,`), aborting if it takes longer than 5sec (`t5,`)
|
||||||
* `-v ~/music::r:cmtp=.bpm=~/bin/audio-bpm.py:cmtp=key=f,t5,~/bin/audio-key.py` both as a per-volume config wow this is getting ugly
|
* `-v ~/music::r:cmtp=.bpm=~/bin/audio-bpm.py:cmtp=key=f,t5,~/bin/audio-key.py` both as a per-volume config wow this is getting ugly
|
||||||
|
|
||||||
|
*but wait, there's more!* `-mtp` can be used for non-audio files as well using the `a` flag: `ay` only do audio files, `an` only do non-audio files, or `ad` do all files (d as in dontcare)
|
||||||
|
|
||||||
|
* `-mtp ext=an,~/bin/file-ext.py` runs `~/bin/file-ext.py` to get the `ext` tag only if file is not audio (`an`)
|
||||||
|
* `-mtp arch,built,ver,orig=an,eexe,edll,~/bin/exe.py` runs `~/bin/exe.py` to get properties about windows-binaries only if file is not audio (`an`) and file extension is exe or dll
|
||||||
|
|
||||||
|
|
||||||
## complete examples
|
## complete examples
|
||||||
|
|
||||||
@@ -307,7 +338,7 @@ copyparty can invoke external programs to collect additional metadata for files
|
|||||||
|
|
||||||
# browser support
|
# browser support
|
||||||
|
|
||||||

|

|
||||||
|
|
||||||
`ie` = internet-explorer, `ff` = firefox, `c` = chrome, `iOS` = iPhone/iPad, `Andr` = Android
|
`ie` = internet-explorer, `ff` = firefox, `c` = chrome, `iOS` = iPhone/iPad, `Andr` = Android
|
||||||
|
|
||||||
@@ -333,14 +364,18 @@ copyparty can invoke external programs to collect additional metadata for files
|
|||||||
* `*2` using a wasm decoder which can sometimes get stuck and consumes a bit more power
|
* `*2` using a wasm decoder which can sometimes get stuck and consumes a bit more power
|
||||||
|
|
||||||
quick summary of more eccentric web-browsers trying to view a directory index:
|
quick summary of more eccentric web-browsers trying to view a directory index:
|
||||||
* safari (14.0.3/macos) is chrome with janky wasm, so playing opus can deadlock the javascript engine
|
|
||||||
* safari (14.0.1/iOS) same as macos, except it recovers from the deadlocks if you poke it a bit
|
| browser | will it blend |
|
||||||
* links (2.21/macports) can browse, login, upload/mkdir/msg
|
| ------- | ------------- |
|
||||||
* lynx (2.8.9/macports) can browse, login, upload/mkdir/msg
|
| **safari** (14.0.3/macos) | is chrome with janky wasm, so playing opus can deadlock the javascript engine |
|
||||||
* w3m (0.5.3/macports) can browse, login, upload at 100kB/s, mkdir/msg
|
| **safari** (14.0.1/iOS) | same as macos, except it recovers from the deadlocks if you poke it a bit |
|
||||||
* netsurf (3.10/arch) is basically ie6 with much better css (javascript has almost no effect)
|
| **links** (2.21/macports) | can browse, login, upload/mkdir/msg |
|
||||||
* ie4 and netscape 4.0 can browse (text is yellow on white), upload with `?b=u`
|
| **lynx** (2.8.9/macports) | can browse, login, upload/mkdir/msg |
|
||||||
* SerenityOS (22d13d8) hits a page fault, works with `?b=u`, file input not-impl, url params are multiplying
|
| **w3m** (0.5.3/macports) | can browse, login, upload at 100kB/s, mkdir/msg |
|
||||||
|
| **netsurf** (3.10/arch) | is basically ie6 with much better css (javascript has almost no effect) |
|
||||||
|
| **ie4** and **netscape** 4.0 | can browse (text is yellow on white), upload with `?b=u` |
|
||||||
|
| **SerenityOS** (22d13d8) | hits a page fault, works with `?b=u`, file input not-impl, url params are multiplying |
|
||||||
|
|
||||||
|
|
||||||
# client examples
|
# client examples
|
||||||
|
|
||||||
@@ -371,7 +406,7 @@ copyparty returns a truncated sha512sum of your PUT/POST as base64; you can gene
|
|||||||
quick outline of the up2k protocol, see [uploading](#uploading) for the web-client
|
quick outline of the up2k protocol, see [uploading](#uploading) for the web-client
|
||||||
* the up2k client splits a file into an "optimal" number of chunks
|
* the up2k client splits a file into an "optimal" number of chunks
|
||||||
* 1 MiB each, unless that becomes more than 256 chunks
|
* 1 MiB each, unless that becomes more than 256 chunks
|
||||||
* tries 1.5M, 2M, 3, 4, 6, ... until <= 256# or chunksize >= 32M
|
* tries 1.5M, 2M, 3, 4, 6, ... until <= 256 chunks or size >= 32M
|
||||||
* client posts the list of hashes, filename, size, last-modified
|
* client posts the list of hashes, filename, size, last-modified
|
||||||
* server creates the `wark`, an identifier for this upload
|
* server creates the `wark`, an identifier for this upload
|
||||||
* `sha512( salt + filesize + chunk_hashes )`
|
* `sha512( salt + filesize + chunk_hashes )`
|
||||||
@@ -386,13 +421,31 @@ quick outline of the up2k protocol, see [uploading](#uploading) for the web-clie
|
|||||||
|
|
||||||
* `jinja2` (is built into the SFX)
|
* `jinja2` (is built into the SFX)
|
||||||
|
|
||||||
**optional,** enables music tags:
|
|
||||||
|
## optional dependencies
|
||||||
|
|
||||||
|
enable music tags:
|
||||||
* either `mutagen` (fast, pure-python, skips a few tags, makes copyparty GPL? idk)
|
* either `mutagen` (fast, pure-python, skips a few tags, makes copyparty GPL? idk)
|
||||||
* or `FFprobe` (20x slower, more accurate, possibly dangerous depending on your distro and users)
|
* or `FFprobe` (20x slower, more accurate, possibly dangerous depending on your distro and users)
|
||||||
|
|
||||||
**optional,** will eventually enable thumbnails:
|
enable image thumbnails:
|
||||||
* `Pillow` (requires py2.7 or py3.5+)
|
* `Pillow` (requires py2.7 or py3.5+)
|
||||||
|
|
||||||
|
enable video thumbnails:
|
||||||
|
* `ffmpeg` and `ffprobe` somewhere in `$PATH`
|
||||||
|
|
||||||
|
enable reading HEIF pictures:
|
||||||
|
* `pyheif-pillow-opener` (requires Linux or a C compiler)
|
||||||
|
|
||||||
|
enable reading AVIF pictures:
|
||||||
|
* `pillow-avif-plugin`
|
||||||
|
|
||||||
|
|
||||||
|
## install recommended deps
|
||||||
|
```
|
||||||
|
python -m pip install --user -U jinja2 mutagen Pillow
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
## optional gpl stuff
|
## optional gpl stuff
|
||||||
|
|
||||||
@@ -404,8 +457,8 @@ these are standalone programs and will never be imported / evaluated by copypart
|
|||||||
# sfx
|
# sfx
|
||||||
|
|
||||||
currently there are two self-contained "binaries":
|
currently there are two self-contained "binaries":
|
||||||
* [copyparty-sfx.py](https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.py) -- pure python, works everywhere
|
* [copyparty-sfx.py](https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.py) -- pure python, works everywhere, **recommended**
|
||||||
* [copyparty-sfx.sh](https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.sh) -- smaller, but only for linux and macos
|
* [copyparty-sfx.sh](https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.sh) -- smaller, but only for linux and macos, kinda deprecated
|
||||||
|
|
||||||
launch either of them (**use sfx.py on systemd**) and it'll unpack and run copyparty, assuming you have python installed of course
|
launch either of them (**use sfx.py on systemd**) and it'll unpack and run copyparty, assuming you have python installed of course
|
||||||
|
|
||||||
@@ -472,7 +525,7 @@ roughly sorted by priority
|
|||||||
* start from a chunk index and just go
|
* start from a chunk index and just go
|
||||||
* terminate client on bad data
|
* terminate client on bad data
|
||||||
* `os.copy_file_range` for up2k cloning
|
* `os.copy_file_range` for up2k cloning
|
||||||
* support pillow-simd
|
* single sha512 across all up2k chunks? maybe
|
||||||
* figure out the deal with pixel3a not being connectable as hotspot
|
* figure out the deal with pixel3a not being connectable as hotspot
|
||||||
* pixel3a having unpredictable 3sec latency in general :||||
|
* pixel3a having unpredictable 3sec latency in general :||||
|
||||||
|
|
||||||
|
|||||||
96
bin/mtag/exe.py
Normal file
96
bin/mtag/exe.py
Normal file
@@ -0,0 +1,96 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
|
||||||
|
import sys
|
||||||
|
import time
|
||||||
|
import json
|
||||||
|
import pefile
|
||||||
|
|
||||||
|
"""
|
||||||
|
retrieve exe info,
|
||||||
|
example for multivalue providers
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
def unk(v):
|
||||||
|
return "unk({:04x})".format(v)
|
||||||
|
|
||||||
|
|
||||||
|
class PE2(pefile.PE):
|
||||||
|
def __init__(self, *a, **ka):
|
||||||
|
for k in [
|
||||||
|
# -- parse_data_directories:
|
||||||
|
"parse_import_directory",
|
||||||
|
"parse_export_directory",
|
||||||
|
# "parse_resources_directory",
|
||||||
|
"parse_debug_directory",
|
||||||
|
"parse_relocations_directory",
|
||||||
|
"parse_directory_tls",
|
||||||
|
"parse_directory_load_config",
|
||||||
|
"parse_delay_import_directory",
|
||||||
|
"parse_directory_bound_imports",
|
||||||
|
# -- full_load:
|
||||||
|
"parse_rich_header",
|
||||||
|
]:
|
||||||
|
setattr(self, k, self.noop)
|
||||||
|
|
||||||
|
super(PE2, self).__init__(*a, **ka)
|
||||||
|
|
||||||
|
def noop(*a, **ka):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
try:
|
||||||
|
pe = PE2(sys.argv[1], fast_load=False)
|
||||||
|
except:
|
||||||
|
sys.exit(0)
|
||||||
|
|
||||||
|
arch = pe.FILE_HEADER.Machine
|
||||||
|
if arch == 0x14C:
|
||||||
|
arch = "x86"
|
||||||
|
elif arch == 0x8664:
|
||||||
|
arch = "x64"
|
||||||
|
else:
|
||||||
|
arch = unk(arch)
|
||||||
|
|
||||||
|
try:
|
||||||
|
buildtime = time.gmtime(pe.FILE_HEADER.TimeDateStamp)
|
||||||
|
buildtime = time.strftime("%Y-%m-%d_%H:%M:%S", buildtime)
|
||||||
|
except:
|
||||||
|
buildtime = "invalid"
|
||||||
|
|
||||||
|
ui = pe.OPTIONAL_HEADER.Subsystem
|
||||||
|
if ui == 2:
|
||||||
|
ui = "GUI"
|
||||||
|
elif ui == 3:
|
||||||
|
ui = "cmdline"
|
||||||
|
else:
|
||||||
|
ui = unk(ui)
|
||||||
|
|
||||||
|
extra = {}
|
||||||
|
if hasattr(pe, "FileInfo"):
|
||||||
|
for v1 in pe.FileInfo:
|
||||||
|
for v2 in v1:
|
||||||
|
if v2.name != "StringFileInfo":
|
||||||
|
continue
|
||||||
|
|
||||||
|
for v3 in v2.StringTable:
|
||||||
|
for k, v in v3.entries.items():
|
||||||
|
v = v.decode("utf-8", "replace").strip()
|
||||||
|
if not v:
|
||||||
|
continue
|
||||||
|
|
||||||
|
if k in [b"FileVersion", b"ProductVersion"]:
|
||||||
|
extra["ver"] = v
|
||||||
|
|
||||||
|
if k in [b"OriginalFilename", b"InternalName"]:
|
||||||
|
extra["orig"] = v
|
||||||
|
|
||||||
|
r = {
|
||||||
|
"arch": arch,
|
||||||
|
"built": buildtime,
|
||||||
|
"ui": ui,
|
||||||
|
"cksum": "{:08x}".format(pe.OPTIONAL_HEADER.CheckSum),
|
||||||
|
}
|
||||||
|
r.update(extra)
|
||||||
|
|
||||||
|
print(json.dumps(r, indent=4))
|
||||||
9
bin/mtag/file-ext.py
Normal file
9
bin/mtag/file-ext.py
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
|
||||||
|
import sys
|
||||||
|
|
||||||
|
"""
|
||||||
|
example that just prints the file extension
|
||||||
|
"""
|
||||||
|
|
||||||
|
print(sys.argv[1].split(".")[-1])
|
||||||
@@ -2,6 +2,7 @@
|
|||||||
from __future__ import print_function, unicode_literals
|
from __future__ import print_function, unicode_literals
|
||||||
|
|
||||||
import platform
|
import platform
|
||||||
|
import time
|
||||||
import sys
|
import sys
|
||||||
import os
|
import os
|
||||||
|
|
||||||
@@ -23,6 +24,7 @@ MACOS = platform.system() == "Darwin"
|
|||||||
|
|
||||||
class EnvParams(object):
|
class EnvParams(object):
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
|
self.t0 = time.time()
|
||||||
self.mod = os.path.dirname(os.path.realpath(__file__))
|
self.mod = os.path.dirname(os.path.realpath(__file__))
|
||||||
if self.mod.endswith("__init__"):
|
if self.mod.endswith("__init__"):
|
||||||
self.mod = os.path.dirname(self.mod)
|
self.mod = os.path.dirname(self.mod)
|
||||||
|
|||||||
@@ -237,20 +237,29 @@ def run_argparse(argv, formatter):
|
|||||||
ap.add_argument("-a", metavar="ACCT", type=str, action="append", help="add account")
|
ap.add_argument("-a", metavar="ACCT", type=str, action="append", help="add account")
|
||||||
ap.add_argument("-v", metavar="VOL", type=str, action="append", help="add volume")
|
ap.add_argument("-v", metavar="VOL", type=str, action="append", help="add volume")
|
||||||
ap.add_argument("-q", action="store_true", help="quiet")
|
ap.add_argument("-q", action="store_true", help="quiet")
|
||||||
ap.add_argument("--log-conn", action="store_true", help="print tcp-server msgs")
|
|
||||||
ap.add_argument("-ed", action="store_true", help="enable ?dots")
|
ap.add_argument("-ed", action="store_true", help="enable ?dots")
|
||||||
ap.add_argument("-emp", action="store_true", help="enable markdown plugins")
|
ap.add_argument("-emp", action="store_true", help="enable markdown plugins")
|
||||||
ap.add_argument("-mcr", metavar="SEC", type=int, default=60, help="md-editor mod-chk rate")
|
ap.add_argument("-mcr", metavar="SEC", type=int, default=60, help="md-editor mod-chk rate")
|
||||||
ap.add_argument("-nw", action="store_true", help="disable writes (benchmark)")
|
ap.add_argument("-nw", action="store_true", help="disable writes (benchmark)")
|
||||||
ap.add_argument("-nih", action="store_true", help="no info hostname")
|
ap.add_argument("-nih", action="store_true", help="no info hostname")
|
||||||
ap.add_argument("-nid", action="store_true", help="no info disk-usage")
|
ap.add_argument("-nid", action="store_true", help="no info disk-usage")
|
||||||
|
ap.add_argument("--dotpart", action="store_true", help="dotfile incomplete uploads")
|
||||||
ap.add_argument("--no-zip", action="store_true", help="disable download as zip/tar")
|
ap.add_argument("--no-zip", action="store_true", help="disable download as zip/tar")
|
||||||
ap.add_argument("--no-sendfile", action="store_true", help="disable sendfile (for debugging)")
|
|
||||||
ap.add_argument("--no-scandir", action="store_true", help="disable scandir (for debugging)")
|
|
||||||
ap.add_argument("--sparse", metavar="MiB", type=int, default=4, help="up2k min.size threshold (mswin-only)")
|
ap.add_argument("--sparse", metavar="MiB", type=int, default=4, help="up2k min.size threshold (mswin-only)")
|
||||||
ap.add_argument("--urlform", metavar="MODE", type=str, default="print,get", help="how to handle url-forms")
|
ap.add_argument("--urlform", metavar="MODE", type=str, default="print,get", help="how to handle url-forms")
|
||||||
ap.add_argument("--salt", type=str, default="hunter2", help="up2k file-hash salt")
|
ap.add_argument("--salt", type=str, default="hunter2", help="up2k file-hash salt")
|
||||||
|
|
||||||
|
ap2 = ap.add_argument_group('thumbnail options')
|
||||||
|
ap2.add_argument("--no-thumb", action="store_true", help="disable all thumbnails")
|
||||||
|
ap2.add_argument("--no-vthumb", action="store_true", help="disable video thumbnails")
|
||||||
|
ap2.add_argument("--th-size", metavar="WxH", default="320x256", help="thumbnail res")
|
||||||
|
ap2.add_argument("--th-no-crop", action="store_true", help="dynamic height; show full image")
|
||||||
|
ap2.add_argument("--th-no-jpg", action="store_true", help="disable jpg output")
|
||||||
|
ap2.add_argument("--th-no-webp", action="store_true", help="disable webp output")
|
||||||
|
ap2.add_argument("--th-poke", metavar="SEC", type=int, default=300, help="activity labeling cooldown")
|
||||||
|
ap2.add_argument("--th-clean", metavar="SEC", type=int, default=1800, help="cleanup interval")
|
||||||
|
ap2.add_argument("--th-maxage", metavar="SEC", type=int, default=604800, help="max folder age")
|
||||||
|
|
||||||
ap2 = ap.add_argument_group('database options')
|
ap2 = ap.add_argument_group('database options')
|
||||||
ap2.add_argument("-e2d", action="store_true", help="enable up2k database")
|
ap2.add_argument("-e2d", action="store_true", help="enable up2k database")
|
||||||
ap2.add_argument("-e2ds", action="store_true", help="enable up2k db-scanner, sets -e2d")
|
ap2.add_argument("-e2ds", action="store_true", help="enable up2k db-scanner, sets -e2d")
|
||||||
@@ -262,7 +271,7 @@ def run_argparse(argv, formatter):
|
|||||||
ap2.add_argument("--no-mtag-mt", action="store_true", help="disable tag-read parallelism")
|
ap2.add_argument("--no-mtag-mt", action="store_true", help="disable tag-read parallelism")
|
||||||
ap2.add_argument("-mtm", metavar="M=t,t,t", action="append", type=str, help="add/replace metadata mapping")
|
ap2.add_argument("-mtm", metavar="M=t,t,t", action="append", type=str, help="add/replace metadata mapping")
|
||||||
ap2.add_argument("-mte", metavar="M,M,M", type=str, help="tags to index/display (comma-sep.)",
|
ap2.add_argument("-mte", metavar="M,M,M", type=str, help="tags to index/display (comma-sep.)",
|
||||||
default="circle,album,.tn,artist,title,.bpm,key,.dur,.q")
|
default="circle,album,.tn,artist,title,.bpm,key,.dur,.q,.vq,.aq,ac,vc,res,.fps")
|
||||||
ap2.add_argument("-mtp", metavar="M=[f,]bin", action="append", type=str, help="read tag M using bin")
|
ap2.add_argument("-mtp", metavar="M=[f,]bin", action="append", type=str, help="read tag M using bin")
|
||||||
ap2.add_argument("--srch-time", metavar="SEC", type=int, default=30, help="search deadline")
|
ap2.add_argument("--srch-time", metavar="SEC", type=int, default=30, help="search deadline")
|
||||||
|
|
||||||
@@ -273,6 +282,13 @@ def run_argparse(argv, formatter):
|
|||||||
ap2.add_argument("--ciphers", metavar="LIST", help="set allowed ciphers")
|
ap2.add_argument("--ciphers", metavar="LIST", help="set allowed ciphers")
|
||||||
ap2.add_argument("--ssl-dbg", action="store_true", help="dump some tls info")
|
ap2.add_argument("--ssl-dbg", action="store_true", help="dump some tls info")
|
||||||
ap2.add_argument("--ssl-log", metavar="PATH", help="log master secrets")
|
ap2.add_argument("--ssl-log", metavar="PATH", help="log master secrets")
|
||||||
|
|
||||||
|
ap2 = ap.add_argument_group('debug options')
|
||||||
|
ap2.add_argument("--log-conn", action="store_true", help="print tcp-server msgs")
|
||||||
|
ap2.add_argument("--no-sendfile", action="store_true", help="disable sendfile")
|
||||||
|
ap2.add_argument("--no-scandir", action="store_true", help="disable scandir")
|
||||||
|
ap2.add_argument("--ihead", metavar="HEADER", action='append', help="dump incoming header")
|
||||||
|
ap2.add_argument("--lf-url", metavar="RE", type=str, default=r"^/\.cpr/|\?th=[wj]$", help="dont log URLs matching")
|
||||||
|
|
||||||
return ap.parse_args(args=argv[1:])
|
return ap.parse_args(args=argv[1:])
|
||||||
# fmt: on
|
# fmt: on
|
||||||
|
|||||||
@@ -1,8 +1,8 @@
|
|||||||
# coding: utf-8
|
# coding: utf-8
|
||||||
|
|
||||||
VERSION = (0, 10, 17)
|
VERSION = (0, 11, 0)
|
||||||
CODENAME = "zip it"
|
CODENAME = "the grid"
|
||||||
BUILD_DT = (2021, 5, 12)
|
BUILD_DT = (2021, 5, 29)
|
||||||
|
|
||||||
S_VERSION = ".".join(map(str, VERSION))
|
S_VERSION = ".".join(map(str, VERSION))
|
||||||
S_BUILD_DT = "{0:04d}-{1:02d}-{2:02d}".format(*BUILD_DT)
|
S_BUILD_DT = "{0:04d}-{1:02d}-{2:02d}".format(*BUILD_DT)
|
||||||
|
|||||||
@@ -475,8 +475,10 @@ class AuthSrv(object):
|
|||||||
# verify tags mentioned by -mt[mp] are used by -mte
|
# verify tags mentioned by -mt[mp] are used by -mte
|
||||||
local_mtp = {}
|
local_mtp = {}
|
||||||
local_only_mtp = {}
|
local_only_mtp = {}
|
||||||
for a in vol.flags.get("mtp", []) + vol.flags.get("mtm", []):
|
tags = vol.flags.get("mtp", []) + vol.flags.get("mtm", [])
|
||||||
a = a.split("=")[0]
|
tags = [x.split("=")[0] for x in tags]
|
||||||
|
tags = [y for x in tags for y in x.split(",")]
|
||||||
|
for a in tags:
|
||||||
local_mtp[a] = True
|
local_mtp[a] = True
|
||||||
local = True
|
local = True
|
||||||
for b in self.args.mtp or []:
|
for b in self.args.mtp or []:
|
||||||
@@ -505,8 +507,10 @@ class AuthSrv(object):
|
|||||||
self.log(m.format(vol.vpath, mtp), 1)
|
self.log(m.format(vol.vpath, mtp), 1)
|
||||||
errors = True
|
errors = True
|
||||||
|
|
||||||
for mtp in self.args.mtp or []:
|
tags = self.args.mtp or []
|
||||||
mtp = mtp.split("=")[0]
|
tags = [x.split("=")[0] for x in tags]
|
||||||
|
tags = [y for x in tags for y in x.split(",")]
|
||||||
|
for mtp in tags:
|
||||||
if mtp not in all_mte:
|
if mtp not in all_mte:
|
||||||
m = 'metadata tag "{}" is defined by "-mtm" or "-mtp", but is not used by "-mte" (or by any "cmte" volume-flag)'
|
m = 'metadata tag "{}" is defined by "-mtm" or "-mtp", but is not used by "-mte" (or by any "cmte" volume-flag)'
|
||||||
self.log(m.format(mtp), 1)
|
self.log(m.format(mtp), 1)
|
||||||
|
|||||||
@@ -22,6 +22,10 @@ if not PY2:
|
|||||||
unicode = str
|
unicode = str
|
||||||
|
|
||||||
|
|
||||||
|
NO_CACHE = {"Cache-Control": "no-cache"}
|
||||||
|
NO_STORE = {"Cache-Control": "no-store; max-age=0"}
|
||||||
|
|
||||||
|
|
||||||
class HttpCli(object):
|
class HttpCli(object):
|
||||||
"""
|
"""
|
||||||
Spawned by HttpConn to process one http transaction
|
Spawned by HttpConn to process one http transaction
|
||||||
@@ -36,6 +40,8 @@ class HttpCli(object):
|
|||||||
self.addr = conn.addr
|
self.addr = conn.addr
|
||||||
self.args = conn.args
|
self.args = conn.args
|
||||||
self.auth = conn.auth
|
self.auth = conn.auth
|
||||||
|
self.ico = conn.ico
|
||||||
|
self.thumbcli = conn.thumbcli
|
||||||
self.log_func = conn.log_func
|
self.log_func = conn.log_func
|
||||||
self.log_src = conn.log_src
|
self.log_src = conn.log_src
|
||||||
self.tls = hasattr(self.s, "cipher")
|
self.tls = hasattr(self.s, "cipher")
|
||||||
@@ -100,6 +106,16 @@ class HttpCli(object):
|
|||||||
self.ip = v.split(",")[0]
|
self.ip = v.split(",")[0]
|
||||||
self.log_src = self.conn.set_rproxy(self.ip)
|
self.log_src = self.conn.set_rproxy(self.ip)
|
||||||
|
|
||||||
|
if self.args.ihead:
|
||||||
|
keys = self.args.ihead
|
||||||
|
if "*" in keys:
|
||||||
|
keys = list(sorted(self.headers.keys()))
|
||||||
|
|
||||||
|
for k in keys:
|
||||||
|
v = self.headers.get(k)
|
||||||
|
if v is not None:
|
||||||
|
self.log("[H] {}: \033[33m[{}]".format(k, v), 6)
|
||||||
|
|
||||||
# split req into vpath + uparam
|
# split req into vpath + uparam
|
||||||
uparam = {}
|
uparam = {}
|
||||||
if "?" not in self.req:
|
if "?" not in self.req:
|
||||||
@@ -120,29 +136,35 @@ class HttpCli(object):
|
|||||||
else:
|
else:
|
||||||
uparam[k.lower()] = False
|
uparam[k.lower()] = False
|
||||||
|
|
||||||
|
self.ouparam = {k: v for k, v in uparam.items()}
|
||||||
|
|
||||||
|
cookies = self.headers.get("cookie") or {}
|
||||||
|
if cookies:
|
||||||
|
cookies = [x.split("=", 1) for x in cookies.split(";") if "=" in x]
|
||||||
|
cookies = {k.strip(): unescape_cookie(v) for k, v in cookies}
|
||||||
|
for kc, ku in [["cppwd", "pw"], ["b", "b"]]:
|
||||||
|
if kc in cookies and ku not in uparam:
|
||||||
|
uparam[ku] = cookies[kc]
|
||||||
|
|
||||||
self.uparam = uparam
|
self.uparam = uparam
|
||||||
|
self.cookies = cookies
|
||||||
self.vpath = unquotep(vpath)
|
self.vpath = unquotep(vpath)
|
||||||
|
|
||||||
pwd = None
|
pwd = uparam.get("pw")
|
||||||
if "cookie" in self.headers:
|
|
||||||
cookies = self.headers["cookie"].split(";")
|
|
||||||
for k, v in [x.split("=", 1) for x in cookies]:
|
|
||||||
if k.strip() != "cppwd":
|
|
||||||
continue
|
|
||||||
|
|
||||||
pwd = unescape_cookie(v)
|
|
||||||
break
|
|
||||||
|
|
||||||
pwd = uparam.get("pw", pwd)
|
|
||||||
self.uname = self.auth.iuser.get(pwd, "*")
|
self.uname = self.auth.iuser.get(pwd, "*")
|
||||||
if self.uname:
|
if self.uname:
|
||||||
self.rvol = self.auth.vfs.user_tree(self.uname, readable=True)
|
self.rvol = self.auth.vfs.user_tree(self.uname, readable=True)
|
||||||
self.wvol = self.auth.vfs.user_tree(self.uname, writable=True)
|
self.wvol = self.auth.vfs.user_tree(self.uname, writable=True)
|
||||||
|
|
||||||
ua = self.headers.get("user-agent", "")
|
ua = self.headers.get("user-agent", "")
|
||||||
if ua.startswith("rclone/"):
|
self.is_rclone = ua.startswith("rclone/")
|
||||||
|
if self.is_rclone:
|
||||||
uparam["raw"] = False
|
uparam["raw"] = False
|
||||||
uparam["dots"] = False
|
uparam["dots"] = False
|
||||||
|
uparam["b"] = False
|
||||||
|
cookies["b"] = False
|
||||||
|
|
||||||
|
self.do_log = not self.conn.lf_url or not self.conn.lf_url.search(self.req)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
if self.mode in ["GET", "HEAD"]:
|
if self.mode in ["GET", "HEAD"]:
|
||||||
@@ -218,7 +240,14 @@ class HttpCli(object):
|
|||||||
removing anything in rm, adding pairs in add
|
removing anything in rm, adding pairs in add
|
||||||
"""
|
"""
|
||||||
|
|
||||||
kv = {k: v for k, v in self.uparam.items() if k not in rm}
|
if self.is_rclone:
|
||||||
|
return ""
|
||||||
|
|
||||||
|
kv = {
|
||||||
|
k: v
|
||||||
|
for k, v in self.uparam.items()
|
||||||
|
if k not in rm and self.cookies.get(k) != v
|
||||||
|
}
|
||||||
kv.update(add)
|
kv.update(add)
|
||||||
if not kv:
|
if not kv:
|
||||||
return ""
|
return ""
|
||||||
@@ -226,21 +255,43 @@ class HttpCli(object):
|
|||||||
r = ["{}={}".format(k, quotep(v)) if v else k for k, v in kv.items()]
|
r = ["{}={}".format(k, quotep(v)) if v else k for k, v in kv.items()]
|
||||||
return "?" + "&".join(r)
|
return "?" + "&".join(r)
|
||||||
|
|
||||||
|
def redirect(
|
||||||
|
self, vpath, suf="", msg="aight", flavor="go to", click=True, use302=False
|
||||||
|
):
|
||||||
|
html = self.j2(
|
||||||
|
"msg",
|
||||||
|
h2='<a href="/{}">{} /{}</a>'.format(
|
||||||
|
quotep(vpath) + suf, flavor, html_escape(vpath, crlf=True) + suf
|
||||||
|
),
|
||||||
|
pre=msg,
|
||||||
|
click=click,
|
||||||
|
).encode("utf-8", "replace")
|
||||||
|
|
||||||
|
if use302:
|
||||||
|
h = {"Location": "/" + vpath, "Cache-Control": "no-cache"}
|
||||||
|
self.reply(html, status=302, headers=h)
|
||||||
|
else:
|
||||||
|
self.reply(html)
|
||||||
|
|
||||||
def handle_get(self):
|
def handle_get(self):
|
||||||
logmsg = "{:4} {}".format(self.mode, self.req)
|
if self.do_log:
|
||||||
|
logmsg = "{:4} {}".format(self.mode, self.req)
|
||||||
|
|
||||||
if "range" in self.headers:
|
if "range" in self.headers:
|
||||||
try:
|
try:
|
||||||
rval = self.headers["range"].split("=", 1)[1]
|
rval = self.headers["range"].split("=", 1)[1]
|
||||||
except:
|
except:
|
||||||
rval = self.headers["range"]
|
rval = self.headers["range"]
|
||||||
|
|
||||||
logmsg += " [\033[36m" + rval + "\033[0m]"
|
logmsg += " [\033[36m" + rval + "\033[0m]"
|
||||||
|
|
||||||
self.log(logmsg)
|
self.log(logmsg)
|
||||||
|
|
||||||
# "embedded" resources
|
# "embedded" resources
|
||||||
if self.vpath.startswith(".cpr"):
|
if self.vpath.startswith(".cpr"):
|
||||||
|
if self.vpath.startswith(".cpr/ico/"):
|
||||||
|
return self.tx_ico(self.vpath.split("/")[-1], exact=True)
|
||||||
|
|
||||||
static_path = os.path.join(E.mod, "web/", self.vpath[5:])
|
static_path = os.path.join(E.mod, "web/", self.vpath[5:])
|
||||||
return self.tx_file(static_path)
|
return self.tx_file(static_path)
|
||||||
|
|
||||||
@@ -248,16 +299,18 @@ class HttpCli(object):
|
|||||||
return self.tx_tree()
|
return self.tx_tree()
|
||||||
|
|
||||||
# conditional redirect to single volumes
|
# conditional redirect to single volumes
|
||||||
if self.vpath == "" and not self.uparam:
|
if self.vpath == "" and not self.ouparam:
|
||||||
nread = len(self.rvol)
|
nread = len(self.rvol)
|
||||||
nwrite = len(self.wvol)
|
nwrite = len(self.wvol)
|
||||||
if nread + nwrite == 1 or (self.rvol == self.wvol and nread == 1):
|
if nread + nwrite == 1 or (self.rvol == self.wvol and nread == 1):
|
||||||
if nread == 1:
|
if nread == 1:
|
||||||
self.vpath = self.rvol[0]
|
vpath = self.rvol[0]
|
||||||
else:
|
else:
|
||||||
self.vpath = self.wvol[0]
|
vpath = self.wvol[0]
|
||||||
|
|
||||||
self.absolute_urls = True
|
if self.vpath != vpath:
|
||||||
|
self.redirect(vpath, flavor="redirecting to", use302=True)
|
||||||
|
return True
|
||||||
|
|
||||||
self.readable, self.writable = self.conn.auth.vfs.can_access(
|
self.readable, self.writable = self.conn.auth.vfs.can_access(
|
||||||
self.vpath, self.uname
|
self.vpath, self.uname
|
||||||
@@ -276,7 +329,9 @@ class HttpCli(object):
|
|||||||
return self.tx_browser()
|
return self.tx_browser()
|
||||||
|
|
||||||
def handle_options(self):
|
def handle_options(self):
|
||||||
self.log("OPTIONS " + self.req)
|
if self.do_log:
|
||||||
|
self.log("OPTIONS " + self.req)
|
||||||
|
|
||||||
self.send_headers(
|
self.send_headers(
|
||||||
None,
|
None,
|
||||||
204,
|
204,
|
||||||
@@ -377,7 +432,7 @@ class HttpCli(object):
|
|||||||
fn = "put-{:.6f}-{}.bin".format(time.time(), addr)
|
fn = "put-{:.6f}-{}.bin".format(time.time(), addr)
|
||||||
path = os.path.join(fdir, fn)
|
path = os.path.join(fdir, fn)
|
||||||
|
|
||||||
with open(path, "wb", 512 * 1024) as f:
|
with open(fsenc(path), "wb", 512 * 1024) as f:
|
||||||
post_sz, _, sha_b64 = hashcopy(self.conn, reader, f)
|
post_sz, _, sha_b64 = hashcopy(self.conn, reader, f)
|
||||||
|
|
||||||
self.conn.hsrv.broker.put(
|
self.conn.hsrv.broker.put(
|
||||||
@@ -497,9 +552,9 @@ class HttpCli(object):
|
|||||||
if sub:
|
if sub:
|
||||||
try:
|
try:
|
||||||
dst = os.path.join(vfs.realpath, rem)
|
dst = os.path.join(vfs.realpath, rem)
|
||||||
os.makedirs(dst)
|
os.makedirs(fsenc(dst))
|
||||||
except:
|
except:
|
||||||
if not os.path.isdir(dst):
|
if not os.path.isdir(fsenc(dst)):
|
||||||
raise Pebkac(400, "some file got your folder name")
|
raise Pebkac(400, "some file got your folder name")
|
||||||
|
|
||||||
x = self.conn.hsrv.broker.put(True, "up2k.handle_json", body)
|
x = self.conn.hsrv.broker.put(True, "up2k.handle_json", body)
|
||||||
@@ -587,7 +642,7 @@ class HttpCli(object):
|
|||||||
|
|
||||||
reader = read_socket(self.sr, remains)
|
reader = read_socket(self.sr, remains)
|
||||||
|
|
||||||
with open(path, "rb+", 512 * 1024) as f:
|
with open(fsenc(path), "rb+", 512 * 1024) as f:
|
||||||
f.seek(cstart[0])
|
f.seek(cstart[0])
|
||||||
post_sz, _, sha_b64 = hashcopy(self.conn, reader, f)
|
post_sz, _, sha_b64 = hashcopy(self.conn, reader, f)
|
||||||
|
|
||||||
@@ -630,7 +685,7 @@ class HttpCli(object):
|
|||||||
times = (int(time.time()), int(lastmod))
|
times = (int(time.time()), int(lastmod))
|
||||||
self.log("no more chunks, setting times {}".format(times))
|
self.log("no more chunks, setting times {}".format(times))
|
||||||
try:
|
try:
|
||||||
os.utime(path, times)
|
os.utime(fsenc(path), times)
|
||||||
except:
|
except:
|
||||||
self.log("failed to utime ({}, {})".format(path, times))
|
self.log("failed to utime ({}, {})".format(path, times))
|
||||||
|
|
||||||
@@ -645,13 +700,16 @@ class HttpCli(object):
|
|||||||
|
|
||||||
if pwd in self.auth.iuser:
|
if pwd in self.auth.iuser:
|
||||||
msg = "login ok"
|
msg = "login ok"
|
||||||
|
dt = datetime.utcfromtimestamp(time.time() + 60 * 60 * 24 * 365)
|
||||||
|
exp = dt.strftime("%a, %d %b %Y %H:%M:%S GMT")
|
||||||
else:
|
else:
|
||||||
msg = "naw dude"
|
msg = "naw dude"
|
||||||
pwd = "x" # nosec
|
pwd = "x" # nosec
|
||||||
|
exp = "Fri, 15 Aug 1997 01:00:00 GMT"
|
||||||
|
|
||||||
h = {"Set-Cookie": "cppwd={}; Path=/; SameSite=Lax".format(pwd)}
|
ck = "cppwd={}; Path=/; Expires={}; SameSite=Lax".format(pwd, exp)
|
||||||
html = self.j2("msg", h1=msg, h2='<a href="/">ack</a>', redir="/")
|
html = self.j2("msg", h1=msg, h2='<a href="/">ack</a>', redir="/")
|
||||||
self.reply(html.encode("utf-8"), headers=h)
|
self.reply(html.encode("utf-8"), headers={"Set-Cookie": ck})
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def handle_mkdir(self):
|
def handle_mkdir(self):
|
||||||
@@ -680,14 +738,7 @@ class HttpCli(object):
|
|||||||
raise Pebkac(500, "mkdir failed, check the logs")
|
raise Pebkac(500, "mkdir failed, check the logs")
|
||||||
|
|
||||||
vpath = "{}/{}".format(self.vpath, sanitized).lstrip("/")
|
vpath = "{}/{}".format(self.vpath, sanitized).lstrip("/")
|
||||||
esc_paths = [quotep(vpath), html_escape(vpath, crlf=True)]
|
self.redirect(vpath)
|
||||||
html = self.j2(
|
|
||||||
"msg",
|
|
||||||
h2='<a href="/{}">go to /{}</a>'.format(*esc_paths),
|
|
||||||
pre="aight",
|
|
||||||
click=True,
|
|
||||||
)
|
|
||||||
self.reply(html.encode("utf-8", "replace"))
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def handle_new_md(self):
|
def handle_new_md(self):
|
||||||
@@ -714,15 +765,7 @@ class HttpCli(object):
|
|||||||
f.write(b"`GRUNNUR`\n")
|
f.write(b"`GRUNNUR`\n")
|
||||||
|
|
||||||
vpath = "{}/{}".format(self.vpath, sanitized).lstrip("/")
|
vpath = "{}/{}".format(self.vpath, sanitized).lstrip("/")
|
||||||
html = self.j2(
|
self.redirect(vpath, "?edit")
|
||||||
"msg",
|
|
||||||
h2='<a href="/{}?edit">go to /{}?edit</a>'.format(
|
|
||||||
quotep(vpath), html_escape(vpath)
|
|
||||||
),
|
|
||||||
pre="aight",
|
|
||||||
click=True,
|
|
||||||
)
|
|
||||||
self.reply(html.encode("utf-8", "replace"))
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def handle_plain_upload(self):
|
def handle_plain_upload(self):
|
||||||
@@ -763,7 +806,7 @@ class HttpCli(object):
|
|||||||
if sz == 0:
|
if sz == 0:
|
||||||
raise Pebkac(400, "empty files in post")
|
raise Pebkac(400, "empty files in post")
|
||||||
|
|
||||||
files.append([sz, sha512_hex])
|
files.append([sz, sha512_hex, p_file, fname])
|
||||||
self.conn.hsrv.broker.put(
|
self.conn.hsrv.broker.put(
|
||||||
False, "up2k.hash_file", vfs.realpath, vfs.flags, rem, fname
|
False, "up2k.hash_file", vfs.realpath, vfs.flags, rem, fname
|
||||||
)
|
)
|
||||||
@@ -772,12 +815,16 @@ class HttpCli(object):
|
|||||||
except Pebkac:
|
except Pebkac:
|
||||||
if fname != os.devnull:
|
if fname != os.devnull:
|
||||||
fp = os.path.join(fdir, fname)
|
fp = os.path.join(fdir, fname)
|
||||||
|
fp2 = fp
|
||||||
|
if self.args.dotpart:
|
||||||
|
fp2 = os.path.join(fdir, "." + fname)
|
||||||
|
|
||||||
suffix = ".PARTIAL"
|
suffix = ".PARTIAL"
|
||||||
try:
|
try:
|
||||||
os.rename(fsenc(fp), fsenc(fp + suffix))
|
os.rename(fsenc(fp), fsenc(fp2 + suffix))
|
||||||
except:
|
except:
|
||||||
fp = fp[: -len(suffix)]
|
fp2 = fp2[: -len(suffix) - 1]
|
||||||
os.rename(fsenc(fp), fsenc(fp + suffix))
|
os.rename(fsenc(fp), fsenc(fp2 + suffix))
|
||||||
|
|
||||||
raise
|
raise
|
||||||
|
|
||||||
@@ -794,10 +841,13 @@ class HttpCli(object):
|
|||||||
errmsg = "ERROR: " + errmsg
|
errmsg = "ERROR: " + errmsg
|
||||||
status = "ERROR"
|
status = "ERROR"
|
||||||
|
|
||||||
msg = "{0} // {1} bytes // {2:.3f} MiB/s\n".format(status, sz_total, spd)
|
msg = "{} // {} bytes // {:.3f} MiB/s\n".format(status, sz_total, spd)
|
||||||
|
|
||||||
for sz, sha512 in files:
|
for sz, sha512, ofn, lfn in files:
|
||||||
msg += "sha512: {0} // {1} bytes\n".format(sha512[:56], sz)
|
vpath = self.vpath + "/" + lfn
|
||||||
|
msg += 'sha512: {} // {} bytes // <a href="/{}">{}</a>\n'.format(
|
||||||
|
sha512[:56], sz, quotep(vpath), html_escape(ofn, crlf=True)
|
||||||
|
)
|
||||||
# truncated SHA-512 prevents length extension attacks;
|
# truncated SHA-512 prevents length extension attacks;
|
||||||
# using SHA-512/224, optionally SHA-512/256 = :64
|
# using SHA-512/224, optionally SHA-512/256 = :64
|
||||||
|
|
||||||
@@ -805,32 +855,13 @@ class HttpCli(object):
|
|||||||
self.log("{} {}".format(vspd, msg))
|
self.log("{} {}".format(vspd, msg))
|
||||||
|
|
||||||
if not nullwrite:
|
if not nullwrite:
|
||||||
# TODO this is bad
|
|
||||||
log_fn = "up.{:.6f}.txt".format(t0)
|
log_fn = "up.{:.6f}.txt".format(t0)
|
||||||
with open(log_fn, "wb") as f:
|
with open(log_fn, "wb") as f:
|
||||||
f.write(
|
ft = "{}:{}".format(self.ip, self.addr[1])
|
||||||
(
|
ft = "{}\n{}\n{}\n".format(ft, msg.rstrip(), errmsg)
|
||||||
"\n".join(
|
f.write(ft.encode("utf-8"))
|
||||||
unicode(x)
|
|
||||||
for x in [
|
|
||||||
":".join(unicode(x) for x in [self.ip, self.addr[1]]),
|
|
||||||
msg.rstrip(),
|
|
||||||
]
|
|
||||||
)
|
|
||||||
+ "\n"
|
|
||||||
+ errmsg
|
|
||||||
+ "\n"
|
|
||||||
).encode("utf-8")
|
|
||||||
)
|
|
||||||
|
|
||||||
html = self.j2(
|
self.redirect(self.vpath, msg=msg, flavor="return to", click=False)
|
||||||
"msg",
|
|
||||||
h2='<a href="/{}">return to /{}</a>'.format(
|
|
||||||
quotep(self.vpath), html_escape(self.vpath)
|
|
||||||
),
|
|
||||||
pre=msg,
|
|
||||||
)
|
|
||||||
self.reply(html.encode("utf-8", "replace"))
|
|
||||||
self.parser.drop()
|
self.parser.drop()
|
||||||
return True
|
return True
|
||||||
|
|
||||||
@@ -905,16 +936,16 @@ class HttpCli(object):
|
|||||||
mdir, mfile = os.path.split(fp)
|
mdir, mfile = os.path.split(fp)
|
||||||
mfile2 = "{}.{:.3f}.md".format(mfile[:-3], srv_lastmod)
|
mfile2 = "{}.{:.3f}.md".format(mfile[:-3], srv_lastmod)
|
||||||
try:
|
try:
|
||||||
os.mkdir(os.path.join(mdir, ".hist"))
|
os.mkdir(fsenc(os.path.join(mdir, ".hist")))
|
||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
os.rename(fp, os.path.join(mdir, ".hist", mfile2))
|
os.rename(fsenc(fp), fsenc(os.path.join(mdir, ".hist", mfile2)))
|
||||||
|
|
||||||
p_field, _, p_data = next(self.parser.gen)
|
p_field, _, p_data = next(self.parser.gen)
|
||||||
if p_field != "body":
|
if p_field != "body":
|
||||||
raise Pebkac(400, "expected body, got {}".format(p_field))
|
raise Pebkac(400, "expected body, got {}".format(p_field))
|
||||||
|
|
||||||
with open(fp, "wb", 512 * 1024) as f:
|
with open(fsenc(fp), "wb", 512 * 1024) as f:
|
||||||
sz, sha512, _ = hashcopy(self.conn, p_data, f)
|
sz, sha512, _ = hashcopy(self.conn, p_data, f)
|
||||||
|
|
||||||
new_lastmod = os.stat(fsenc(fp)).st_mtime
|
new_lastmod = os.stat(fsenc(fp)).st_mtime
|
||||||
@@ -930,13 +961,11 @@ class HttpCli(object):
|
|||||||
return True
|
return True
|
||||||
|
|
||||||
def _chk_lastmod(self, file_ts):
|
def _chk_lastmod(self, file_ts):
|
||||||
file_dt = datetime.utcfromtimestamp(file_ts)
|
file_lastmod = http_ts(file_ts)
|
||||||
file_lastmod = file_dt.strftime("%a, %d %b %Y %H:%M:%S GMT")
|
|
||||||
|
|
||||||
cli_lastmod = self.headers.get("if-modified-since")
|
cli_lastmod = self.headers.get("if-modified-since")
|
||||||
if cli_lastmod:
|
if cli_lastmod:
|
||||||
try:
|
try:
|
||||||
cli_dt = time.strptime(cli_lastmod, "%a, %d %b %Y %H:%M:%S GMT")
|
cli_dt = time.strptime(cli_lastmod, HTTP_TS_FMT)
|
||||||
cli_ts = calendar.timegm(cli_dt)
|
cli_ts = calendar.timegm(cli_dt)
|
||||||
return file_lastmod, int(file_ts) > int(cli_ts)
|
return file_lastmod, int(file_ts) > int(cli_ts)
|
||||||
except Exception as ex:
|
except Exception as ex:
|
||||||
@@ -1083,19 +1112,21 @@ class HttpCli(object):
|
|||||||
# send reply
|
# send reply
|
||||||
|
|
||||||
if not is_compressed:
|
if not is_compressed:
|
||||||
self.out_headers["Cache-Control"] = "no-cache"
|
self.out_headers.update(NO_CACHE)
|
||||||
|
|
||||||
self.out_headers["Accept-Ranges"] = "bytes"
|
self.out_headers["Accept-Ranges"] = "bytes"
|
||||||
self.send_headers(
|
self.send_headers(
|
||||||
length=upper - lower,
|
length=upper - lower,
|
||||||
status=status,
|
status=status,
|
||||||
mime=guess_mime(req_path)[0] or "application/octet-stream",
|
mime=guess_mime(req_path),
|
||||||
)
|
)
|
||||||
|
|
||||||
logmsg += unicode(status) + logtail
|
logmsg += unicode(status) + logtail
|
||||||
|
|
||||||
if self.mode == "HEAD" or not do_send:
|
if self.mode == "HEAD" or not do_send:
|
||||||
self.log(logmsg)
|
if self.do_log:
|
||||||
|
self.log(logmsg)
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
ret = True
|
ret = True
|
||||||
@@ -1109,7 +1140,9 @@ class HttpCli(object):
|
|||||||
logmsg += " \033[31m" + unicode(upper - remains) + "\033[0m"
|
logmsg += " \033[31m" + unicode(upper - remains) + "\033[0m"
|
||||||
|
|
||||||
spd = self._spd((upper - lower) - remains)
|
spd = self._spd((upper - lower) - remains)
|
||||||
self.log("{}, {}".format(logmsg, spd))
|
if self.do_log:
|
||||||
|
self.log("{}, {}".format(logmsg, spd))
|
||||||
|
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
def tx_zip(self, fmt, uarg, vn, rem, items, dots):
|
def tx_zip(self, fmt, uarg, vn, rem, items, dots):
|
||||||
@@ -1175,6 +1208,34 @@ class HttpCli(object):
|
|||||||
self.log("{}, {}".format(logmsg, spd))
|
self.log("{}, {}".format(logmsg, spd))
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
def tx_ico(self, ext, exact=False):
|
||||||
|
if ext.endswith("/"):
|
||||||
|
ext = "folder"
|
||||||
|
exact = True
|
||||||
|
|
||||||
|
bad = re.compile(r"[](){}/[]|^[0-9_-]*$")
|
||||||
|
n = ext.split(".")[::-1]
|
||||||
|
if not exact:
|
||||||
|
n = n[:-1]
|
||||||
|
|
||||||
|
ext = ""
|
||||||
|
for v in n:
|
||||||
|
if len(v) > 7 or bad.search(v):
|
||||||
|
break
|
||||||
|
|
||||||
|
ext = "{}.{}".format(v, ext)
|
||||||
|
|
||||||
|
ext = ext.rstrip(".") or "unk"
|
||||||
|
if len(ext) > 11:
|
||||||
|
ext = "⋯" + ext[-9:]
|
||||||
|
|
||||||
|
mime, ico = self.ico.get(ext, not exact)
|
||||||
|
|
||||||
|
dt = datetime.utcfromtimestamp(E.t0)
|
||||||
|
lm = dt.strftime("%a, %d %b %Y %H:%M:%S GMT")
|
||||||
|
self.reply(ico, mime=mime, headers={"Last-Modified": lm})
|
||||||
|
return True
|
||||||
|
|
||||||
def tx_md(self, fs_path):
|
def tx_md(self, fs_path):
|
||||||
logmsg = "{:4} {} ".format("", self.req)
|
logmsg = "{:4} {} ".format("", self.req)
|
||||||
|
|
||||||
@@ -1197,7 +1258,7 @@ class HttpCli(object):
|
|||||||
file_ts = max(ts_md, ts_html)
|
file_ts = max(ts_md, ts_html)
|
||||||
file_lastmod, do_send = self._chk_lastmod(file_ts)
|
file_lastmod, do_send = self._chk_lastmod(file_ts)
|
||||||
self.out_headers["Last-Modified"] = file_lastmod
|
self.out_headers["Last-Modified"] = file_lastmod
|
||||||
self.out_headers["Cache-Control"] = "no-cache"
|
self.out_headers.update(NO_CACHE)
|
||||||
status = 200 if do_send else 304
|
status = 200 if do_send else 304
|
||||||
|
|
||||||
boundary = "\roll\tide"
|
boundary = "\roll\tide"
|
||||||
@@ -1209,7 +1270,7 @@ class HttpCli(object):
|
|||||||
"md_chk_rate": self.args.mcr,
|
"md_chk_rate": self.args.mcr,
|
||||||
"md": boundary,
|
"md": boundary,
|
||||||
}
|
}
|
||||||
html = template.render(**targs).encode("utf-8")
|
html = template.render(**targs).encode("utf-8", "replace")
|
||||||
html = html.split(boundary.encode("utf-8"))
|
html = html.split(boundary.encode("utf-8"))
|
||||||
if len(html) != 2:
|
if len(html) != 2:
|
||||||
raise Exception("boundary appears in " + html_path)
|
raise Exception("boundary appears in " + html_path)
|
||||||
@@ -1218,7 +1279,9 @@ class HttpCli(object):
|
|||||||
|
|
||||||
logmsg += unicode(status)
|
logmsg += unicode(status)
|
||||||
if self.mode == "HEAD" or not do_send:
|
if self.mode == "HEAD" or not do_send:
|
||||||
self.log(logmsg)
|
if self.do_log:
|
||||||
|
self.log(logmsg)
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@@ -1232,7 +1295,9 @@ class HttpCli(object):
|
|||||||
self.log(logmsg + " \033[31md/c\033[0m")
|
self.log(logmsg + " \033[31md/c\033[0m")
|
||||||
return False
|
return False
|
||||||
|
|
||||||
self.log(logmsg + " " + unicode(len(html)))
|
if self.do_log:
|
||||||
|
self.log(logmsg + " " + unicode(len(html)))
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def tx_mounts(self):
|
def tx_mounts(self):
|
||||||
@@ -1240,7 +1305,7 @@ class HttpCli(object):
|
|||||||
rvol = [x + "/" if x else x for x in self.rvol]
|
rvol = [x + "/" if x else x for x in self.rvol]
|
||||||
wvol = [x + "/" if x else x for x in self.wvol]
|
wvol = [x + "/" if x else x for x in self.wvol]
|
||||||
html = self.j2("splash", this=self, rvol=rvol, wvol=wvol, url_suf=suf)
|
html = self.j2("splash", this=self, rvol=rvol, wvol=wvol, url_suf=suf)
|
||||||
self.reply(html.encode("utf-8"))
|
self.reply(html.encode("utf-8"), headers=NO_STORE)
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def tx_tree(self):
|
def tx_tree(self):
|
||||||
@@ -1315,10 +1380,31 @@ class HttpCli(object):
|
|||||||
)
|
)
|
||||||
abspath = vn.canonical(rem)
|
abspath = vn.canonical(rem)
|
||||||
|
|
||||||
if not os.path.exists(fsenc(abspath)):
|
try:
|
||||||
# print(abspath)
|
st = os.stat(fsenc(abspath))
|
||||||
|
except:
|
||||||
raise Pebkac(404)
|
raise Pebkac(404)
|
||||||
|
|
||||||
|
if self.readable and not stat.S_ISDIR(st.st_mode):
|
||||||
|
if rem.startswith(".hist/up2k."):
|
||||||
|
raise Pebkac(403)
|
||||||
|
|
||||||
|
th_fmt = self.uparam.get("th")
|
||||||
|
if th_fmt is not None:
|
||||||
|
thp = None
|
||||||
|
if self.thumbcli:
|
||||||
|
thp = self.thumbcli.get(vn.realpath, rem, int(st.st_mtime), th_fmt)
|
||||||
|
|
||||||
|
if thp:
|
||||||
|
return self.tx_file(thp)
|
||||||
|
|
||||||
|
return self.tx_ico(rem)
|
||||||
|
|
||||||
|
if abspath.endswith(".md") and "raw" not in self.uparam:
|
||||||
|
return self.tx_md(abspath)
|
||||||
|
|
||||||
|
return self.tx_file(abspath)
|
||||||
|
|
||||||
srv_info = []
|
srv_info = []
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@@ -1337,7 +1423,7 @@ class HttpCli(object):
|
|||||||
)
|
)
|
||||||
srv_info.append(humansize(bfree.value) + " free")
|
srv_info.append(humansize(bfree.value) + " free")
|
||||||
else:
|
else:
|
||||||
sv = os.statvfs(abspath)
|
sv = os.statvfs(fsenc(abspath))
|
||||||
free = humansize(sv.f_frsize * sv.f_bfree, True)
|
free = humansize(sv.f_frsize * sv.f_bfree, True)
|
||||||
total = humansize(sv.f_frsize * sv.f_blocks, True)
|
total = humansize(sv.f_frsize * sv.f_blocks, True)
|
||||||
|
|
||||||
@@ -1397,25 +1483,20 @@ class HttpCli(object):
|
|||||||
if not self.readable:
|
if not self.readable:
|
||||||
if is_ls:
|
if is_ls:
|
||||||
ret = json.dumps(ls_ret)
|
ret = json.dumps(ls_ret)
|
||||||
self.reply(ret.encode("utf-8", "replace"), mime="application/json")
|
self.reply(
|
||||||
|
ret.encode("utf-8", "replace"),
|
||||||
|
mime="application/json",
|
||||||
|
headers=NO_STORE,
|
||||||
|
)
|
||||||
return True
|
return True
|
||||||
|
|
||||||
if not os.path.isdir(fsenc(abspath)):
|
if not stat.S_ISDIR(st.st_mode):
|
||||||
raise Pebkac(404)
|
raise Pebkac(404)
|
||||||
|
|
||||||
html = self.j2(tpl, **j2a)
|
html = self.j2(tpl, **j2a)
|
||||||
self.reply(html.encode("utf-8", "replace"))
|
self.reply(html.encode("utf-8", "replace"), headers=NO_STORE)
|
||||||
return True
|
return True
|
||||||
|
|
||||||
if not os.path.isdir(fsenc(abspath)):
|
|
||||||
if abspath.endswith(".md") and "raw" not in self.uparam:
|
|
||||||
return self.tx_md(abspath)
|
|
||||||
|
|
||||||
if rem.startswith(".hist/up2k."):
|
|
||||||
raise Pebkac(403)
|
|
||||||
|
|
||||||
return self.tx_file(abspath)
|
|
||||||
|
|
||||||
for k in ["zip", "tar"]:
|
for k in ["zip", "tar"]:
|
||||||
v = self.uparam.get(k)
|
v = self.uparam.get(k)
|
||||||
if v is not None:
|
if v is not None:
|
||||||
@@ -1551,7 +1632,11 @@ class HttpCli(object):
|
|||||||
ls_ret["files"] = files
|
ls_ret["files"] = files
|
||||||
ls_ret["taglist"] = taglist
|
ls_ret["taglist"] = taglist
|
||||||
ret = json.dumps(ls_ret)
|
ret = json.dumps(ls_ret)
|
||||||
self.reply(ret.encode("utf-8", "replace"), mime="application/json")
|
self.reply(
|
||||||
|
ret.encode("utf-8", "replace"),
|
||||||
|
mime="application/json",
|
||||||
|
headers=NO_STORE,
|
||||||
|
)
|
||||||
return True
|
return True
|
||||||
|
|
||||||
j2a["files"] = dirs + files
|
j2a["files"] = dirs + files
|
||||||
@@ -1561,5 +1646,5 @@ class HttpCli(object):
|
|||||||
j2a["tag_order"] = json.dumps(vn.flags["mte"].split(","))
|
j2a["tag_order"] = json.dumps(vn.flags["mte"].split(","))
|
||||||
|
|
||||||
html = self.j2(tpl, **j2a)
|
html = self.j2(tpl, **j2a)
|
||||||
self.reply(html.encode("utf-8", "replace"))
|
self.reply(html.encode("utf-8", "replace"), headers=NO_STORE)
|
||||||
return True
|
return True
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
# coding: utf-8
|
# coding: utf-8
|
||||||
from __future__ import print_function, unicode_literals
|
from __future__ import print_function, unicode_literals
|
||||||
|
|
||||||
|
import re
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
import time
|
import time
|
||||||
@@ -16,6 +17,9 @@ from .__init__ import E
|
|||||||
from .util import Unrecv
|
from .util import Unrecv
|
||||||
from .httpcli import HttpCli
|
from .httpcli import HttpCli
|
||||||
from .u2idx import U2idx
|
from .u2idx import U2idx
|
||||||
|
from .th_cli import ThumbCli
|
||||||
|
from .th_srv import HAVE_PIL
|
||||||
|
from .ico import Ico
|
||||||
|
|
||||||
|
|
||||||
class HttpConn(object):
|
class HttpConn(object):
|
||||||
@@ -33,11 +37,16 @@ class HttpConn(object):
|
|||||||
self.auth = hsrv.auth
|
self.auth = hsrv.auth
|
||||||
self.cert_path = hsrv.cert_path
|
self.cert_path = hsrv.cert_path
|
||||||
|
|
||||||
|
enth = HAVE_PIL and not self.args.no_thumb
|
||||||
|
self.thumbcli = ThumbCli(hsrv.broker) if enth else None
|
||||||
|
self.ico = Ico(self.args)
|
||||||
|
|
||||||
self.t0 = time.time()
|
self.t0 = time.time()
|
||||||
self.nbyte = 0
|
self.nbyte = 0
|
||||||
self.workload = 0
|
self.workload = 0
|
||||||
self.u2idx = None
|
self.u2idx = None
|
||||||
self.log_func = hsrv.log
|
self.log_func = hsrv.log
|
||||||
|
self.lf_url = re.compile(self.args.lf_url) if self.args.lf_url else None
|
||||||
self.set_rproxy()
|
self.set_rproxy()
|
||||||
|
|
||||||
def set_rproxy(self, ip=None):
|
def set_rproxy(self, ip=None):
|
||||||
|
|||||||
39
copyparty/ico.py
Normal file
39
copyparty/ico.py
Normal file
@@ -0,0 +1,39 @@
|
|||||||
|
import hashlib
|
||||||
|
import colorsys
|
||||||
|
|
||||||
|
from .__init__ import PY2
|
||||||
|
|
||||||
|
|
||||||
|
class Ico(object):
|
||||||
|
def __init__(self, args):
|
||||||
|
self.args = args
|
||||||
|
|
||||||
|
def get(self, ext, as_thumb):
|
||||||
|
"""placeholder to make thumbnails not break"""
|
||||||
|
|
||||||
|
h = hashlib.md5(ext.encode("utf-8")).digest()[:2]
|
||||||
|
if PY2:
|
||||||
|
h = [ord(x) for x in h]
|
||||||
|
|
||||||
|
c1 = colorsys.hsv_to_rgb(h[0] / 256.0, 1, 0.3)
|
||||||
|
c2 = colorsys.hsv_to_rgb(h[0] / 256.0, 1, 1)
|
||||||
|
c = list(c1) + list(c2)
|
||||||
|
c = [int(x * 255) for x in c]
|
||||||
|
c = "".join(["{:02x}".format(x) for x in c])
|
||||||
|
|
||||||
|
h = 30
|
||||||
|
if not self.args.th_no_crop and as_thumb:
|
||||||
|
w, h = self.args.th_size.split("x")
|
||||||
|
h = int(100 / (float(w) / float(h)))
|
||||||
|
|
||||||
|
svg = """\
|
||||||
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
|
<svg version="1.1" viewBox="0 0 100 {}" xmlns="http://www.w3.org/2000/svg"><g>
|
||||||
|
<rect width="100%" height="100%" fill="#{}" />
|
||||||
|
<text x="50%" y="50%" dominant-baseline="middle" text-anchor="middle" xml:space="preserve"
|
||||||
|
fill="#{}" font-family="monospace" font-size="14px" style="letter-spacing:.5px">{}</text>
|
||||||
|
</g></svg>
|
||||||
|
"""
|
||||||
|
svg = svg.format(h, c[:6], c[6:], ext).encode("utf-8")
|
||||||
|
|
||||||
|
return ["image/svg+xml", svg]
|
||||||
@@ -4,6 +4,7 @@ from __future__ import print_function, unicode_literals
|
|||||||
import re
|
import re
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
|
import json
|
||||||
import shutil
|
import shutil
|
||||||
import subprocess as sp
|
import subprocess as sp
|
||||||
|
|
||||||
@@ -14,6 +15,204 @@ if not PY2:
|
|||||||
unicode = str
|
unicode = str
|
||||||
|
|
||||||
|
|
||||||
|
def have_ff(cmd):
|
||||||
|
if PY2:
|
||||||
|
cmd = (cmd + " -version").encode("ascii").split(b" ")
|
||||||
|
try:
|
||||||
|
sp.Popen(cmd, stdout=sp.PIPE, stderr=sp.PIPE).communicate()
|
||||||
|
return True
|
||||||
|
except:
|
||||||
|
return False
|
||||||
|
else:
|
||||||
|
return bool(shutil.which(cmd))
|
||||||
|
|
||||||
|
|
||||||
|
HAVE_FFMPEG = have_ff("ffmpeg")
|
||||||
|
HAVE_FFPROBE = have_ff("ffprobe")
|
||||||
|
|
||||||
|
|
||||||
|
class MParser(object):
|
||||||
|
def __init__(self, cmdline):
|
||||||
|
self.tag, args = cmdline.split("=", 1)
|
||||||
|
self.tags = self.tag.split(",")
|
||||||
|
|
||||||
|
self.timeout = 30
|
||||||
|
self.force = False
|
||||||
|
self.audio = "y"
|
||||||
|
self.ext = []
|
||||||
|
|
||||||
|
while True:
|
||||||
|
try:
|
||||||
|
bp = os.path.expanduser(args)
|
||||||
|
if os.path.exists(bp):
|
||||||
|
self.bin = bp
|
||||||
|
return
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
|
arg, args = args.split(",", 1)
|
||||||
|
arg = arg.lower()
|
||||||
|
|
||||||
|
if arg.startswith("a"):
|
||||||
|
self.audio = arg[1:] # [r]equire [n]ot [d]ontcare
|
||||||
|
continue
|
||||||
|
|
||||||
|
if arg == "f":
|
||||||
|
self.force = True
|
||||||
|
continue
|
||||||
|
|
||||||
|
if arg.startswith("t"):
|
||||||
|
self.timeout = int(arg[1:])
|
||||||
|
continue
|
||||||
|
|
||||||
|
if arg.startswith("e"):
|
||||||
|
self.ext.append(arg[1:])
|
||||||
|
continue
|
||||||
|
|
||||||
|
raise Exception()
|
||||||
|
|
||||||
|
|
||||||
|
def ffprobe(abspath):
|
||||||
|
cmd = [
|
||||||
|
b"ffprobe",
|
||||||
|
b"-hide_banner",
|
||||||
|
b"-show_streams",
|
||||||
|
b"-show_format",
|
||||||
|
b"--",
|
||||||
|
fsenc(abspath),
|
||||||
|
]
|
||||||
|
p = sp.Popen(cmd, stdout=sp.PIPE, stderr=sp.PIPE)
|
||||||
|
r = p.communicate()
|
||||||
|
txt = r[0].decode("utf-8", "replace")
|
||||||
|
return parse_ffprobe(txt)
|
||||||
|
|
||||||
|
|
||||||
|
def parse_ffprobe(txt):
|
||||||
|
"""ffprobe -show_format -show_streams"""
|
||||||
|
streams = []
|
||||||
|
fmt = {}
|
||||||
|
g = None
|
||||||
|
for ln in [x.rstrip("\r") for x in txt.split("\n")]:
|
||||||
|
try:
|
||||||
|
k, v = ln.split("=", 1)
|
||||||
|
g[k] = v
|
||||||
|
continue
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
|
if ln == "[STREAM]":
|
||||||
|
g = {}
|
||||||
|
streams.append(g)
|
||||||
|
|
||||||
|
if ln == "[FORMAT]":
|
||||||
|
g = {"codec_type": "format"} # heh
|
||||||
|
fmt = g
|
||||||
|
|
||||||
|
streams = [fmt] + streams
|
||||||
|
ret = {} # processed
|
||||||
|
md = {} # raw tags
|
||||||
|
|
||||||
|
have = {}
|
||||||
|
for strm in streams:
|
||||||
|
typ = strm.get("codec_type")
|
||||||
|
if typ in have:
|
||||||
|
continue
|
||||||
|
|
||||||
|
have[typ] = True
|
||||||
|
kvm = []
|
||||||
|
|
||||||
|
if typ == "audio":
|
||||||
|
kvm = [
|
||||||
|
["codec_name", "ac"],
|
||||||
|
["channel_layout", "chs"],
|
||||||
|
["sample_rate", ".hz"],
|
||||||
|
["bit_rate", ".aq"],
|
||||||
|
["duration", ".dur"],
|
||||||
|
]
|
||||||
|
|
||||||
|
if typ == "video":
|
||||||
|
if strm.get("DISPOSITION:attached_pic") == "1" or fmt.get(
|
||||||
|
"format_name"
|
||||||
|
) in ["mp3", "ogg", "flac"]:
|
||||||
|
continue
|
||||||
|
|
||||||
|
kvm = [
|
||||||
|
["codec_name", "vc"],
|
||||||
|
["pix_fmt", "pixfmt"],
|
||||||
|
["r_frame_rate", ".fps"],
|
||||||
|
["bit_rate", ".vq"],
|
||||||
|
["width", ".resw"],
|
||||||
|
["height", ".resh"],
|
||||||
|
["duration", ".dur"],
|
||||||
|
]
|
||||||
|
|
||||||
|
if typ == "format":
|
||||||
|
kvm = [["duration", ".dur"], ["bit_rate", ".q"]]
|
||||||
|
|
||||||
|
for sk, rk in kvm:
|
||||||
|
v = strm.get(sk)
|
||||||
|
if v is None:
|
||||||
|
continue
|
||||||
|
|
||||||
|
if rk.startswith("."):
|
||||||
|
try:
|
||||||
|
v = float(v)
|
||||||
|
v2 = ret.get(rk)
|
||||||
|
if v2 is None or v > v2:
|
||||||
|
ret[rk] = v
|
||||||
|
except:
|
||||||
|
# sqlite doesnt care but the code below does
|
||||||
|
if v not in ["N/A"]:
|
||||||
|
ret[rk] = v
|
||||||
|
else:
|
||||||
|
ret[rk] = v
|
||||||
|
|
||||||
|
if ret.get("vc") == "ansi": # shellscript
|
||||||
|
return {}, {}
|
||||||
|
|
||||||
|
for strm in streams:
|
||||||
|
for k, v in strm.items():
|
||||||
|
if not k.startswith("TAG:"):
|
||||||
|
continue
|
||||||
|
|
||||||
|
k = k[4:].strip()
|
||||||
|
v = v.strip()
|
||||||
|
if k and v:
|
||||||
|
md[k] = [v]
|
||||||
|
|
||||||
|
for k in [".q", ".vq", ".aq"]:
|
||||||
|
if k in ret:
|
||||||
|
ret[k] /= 1000 # bit_rate=320000
|
||||||
|
|
||||||
|
for k in [".q", ".vq", ".aq", ".resw", ".resh"]:
|
||||||
|
if k in ret:
|
||||||
|
ret[k] = int(ret[k])
|
||||||
|
|
||||||
|
if ".fps" in ret:
|
||||||
|
fps = ret[".fps"]
|
||||||
|
if "/" in fps:
|
||||||
|
fa, fb = fps.split("/")
|
||||||
|
fps = int(fa) * 1.0 / int(fb)
|
||||||
|
|
||||||
|
if fps < 1000 and fmt.get("format_name") not in ["image2", "png_pipe"]:
|
||||||
|
ret[".fps"] = round(fps, 3)
|
||||||
|
else:
|
||||||
|
del ret[".fps"]
|
||||||
|
|
||||||
|
if ".dur" in ret:
|
||||||
|
if ret[".dur"] < 0.1:
|
||||||
|
del ret[".dur"]
|
||||||
|
if ".q" in ret:
|
||||||
|
del ret[".q"]
|
||||||
|
|
||||||
|
if ".resw" in ret and ".resh" in ret:
|
||||||
|
ret["res"] = "{}x{}".format(ret[".resw"], ret[".resh"])
|
||||||
|
|
||||||
|
ret = {k: [0, v] for k, v in ret.items()}
|
||||||
|
|
||||||
|
return ret, md
|
||||||
|
|
||||||
|
|
||||||
class MTag(object):
|
class MTag(object):
|
||||||
def __init__(self, log_func, args):
|
def __init__(self, log_func, args):
|
||||||
self.log_func = log_func
|
self.log_func = log_func
|
||||||
@@ -35,15 +234,7 @@ class MTag(object):
|
|||||||
self.get = self.get_ffprobe
|
self.get = self.get_ffprobe
|
||||||
self.prefer_mt = True
|
self.prefer_mt = True
|
||||||
# about 20x slower
|
# about 20x slower
|
||||||
if PY2:
|
self.usable = HAVE_FFPROBE
|
||||||
cmd = [b"ffprobe", b"-version"]
|
|
||||||
try:
|
|
||||||
sp.Popen(cmd, stdout=sp.PIPE, stderr=sp.PIPE)
|
|
||||||
except:
|
|
||||||
self.usable = False
|
|
||||||
else:
|
|
||||||
if not shutil.which("ffprobe"):
|
|
||||||
self.usable = False
|
|
||||||
|
|
||||||
if self.usable and WINDOWS and sys.version_info < (3, 8):
|
if self.usable and WINDOWS and sys.version_info < (3, 8):
|
||||||
self.usable = False
|
self.usable = False
|
||||||
@@ -52,8 +243,10 @@ class MTag(object):
|
|||||||
self.log(msg, c=1)
|
self.log(msg, c=1)
|
||||||
|
|
||||||
if not self.usable:
|
if not self.usable:
|
||||||
msg = "need mutagen{} to read media tags so please run this:\n {} -m pip install --user mutagen"
|
msg = "need mutagen{} to read media tags so please run this:\n{}{} -m pip install --user mutagen\n"
|
||||||
self.log(msg.format(or_ffprobe, os.path.basename(sys.executable)), c=1)
|
self.log(
|
||||||
|
msg.format(or_ffprobe, " " * 37, os.path.basename(sys.executable)), c=1
|
||||||
|
)
|
||||||
return
|
return
|
||||||
|
|
||||||
# https://picard-docs.musicbrainz.org/downloads/MusicBrainz_Picard_Tag_Map.html
|
# https://picard-docs.musicbrainz.org/downloads/MusicBrainz_Picard_Tag_Map.html
|
||||||
@@ -201,7 +394,7 @@ class MTag(object):
|
|||||||
import mutagen
|
import mutagen
|
||||||
|
|
||||||
try:
|
try:
|
||||||
md = mutagen.File(abspath, easy=True)
|
md = mutagen.File(fsenc(abspath), easy=True)
|
||||||
x = md.info.length
|
x = md.info.length
|
||||||
except Exception as ex:
|
except Exception as ex:
|
||||||
return {}
|
return {}
|
||||||
@@ -212,7 +405,7 @@ class MTag(object):
|
|||||||
try:
|
try:
|
||||||
q = int(md.info.bitrate / 1024)
|
q = int(md.info.bitrate / 1024)
|
||||||
except:
|
except:
|
||||||
q = int((os.path.getsize(abspath) / dur) / 128)
|
q = int((os.path.getsize(fsenc(abspath)) / dur) / 128)
|
||||||
|
|
||||||
ret[".dur"] = [0, dur]
|
ret[".dur"] = [0, dur]
|
||||||
ret[".q"] = [0, q]
|
ret[".q"] = [0, q]
|
||||||
@@ -222,101 +415,7 @@ class MTag(object):
|
|||||||
return self.normalize_tags(ret, md)
|
return self.normalize_tags(ret, md)
|
||||||
|
|
||||||
def get_ffprobe(self, abspath):
|
def get_ffprobe(self, abspath):
|
||||||
cmd = [b"ffprobe", b"-hide_banner", b"--", fsenc(abspath)]
|
ret, md = ffprobe(abspath)
|
||||||
p = sp.Popen(cmd, stdout=sp.PIPE, stderr=sp.PIPE)
|
|
||||||
r = p.communicate()
|
|
||||||
txt = r[1].decode("utf-8", "replace")
|
|
||||||
txt = [x.rstrip("\r") for x in txt.split("\n")]
|
|
||||||
|
|
||||||
"""
|
|
||||||
note:
|
|
||||||
tags which contain newline will be truncated on first \n,
|
|
||||||
ffprobe emits \n and spacepads the : to align visually
|
|
||||||
note:
|
|
||||||
the Stream ln always mentions Audio: if audio
|
|
||||||
the Stream ln usually has kb/s, is more accurate
|
|
||||||
the Duration ln always has kb/s
|
|
||||||
the Metadata: after Chapter may contain BPM info,
|
|
||||||
title : Tempo: 126.0
|
|
||||||
|
|
||||||
Input #0, wav,
|
|
||||||
Metadata:
|
|
||||||
date : <OK>
|
|
||||||
Duration:
|
|
||||||
Chapter #
|
|
||||||
Metadata:
|
|
||||||
title : <NG>
|
|
||||||
|
|
||||||
Input #0, mp3,
|
|
||||||
Metadata:
|
|
||||||
album : <OK>
|
|
||||||
Duration:
|
|
||||||
Stream #0:0: Audio:
|
|
||||||
Stream #0:1: Video:
|
|
||||||
Metadata:
|
|
||||||
comment : <NG>
|
|
||||||
"""
|
|
||||||
|
|
||||||
ptn_md_beg = re.compile("^( +)Metadata:$")
|
|
||||||
ptn_md_kv = re.compile("^( +)([^:]+) *: (.*)")
|
|
||||||
ptn_dur = re.compile("^ *Duration: ([^ ]+)(, |$)")
|
|
||||||
ptn_br1 = re.compile("^ *Duration: .*, bitrate: ([0-9]+) kb/s(, |$)")
|
|
||||||
ptn_br2 = re.compile("^ *Stream.*: Audio:.* ([0-9]+) kb/s(, |$)")
|
|
||||||
ptn_audio = re.compile("^ *Stream .*: Audio: ")
|
|
||||||
ptn_au_parent = re.compile("^ *(Input #|Stream .*: Audio: )")
|
|
||||||
|
|
||||||
ret = {}
|
|
||||||
md = {}
|
|
||||||
in_md = False
|
|
||||||
is_audio = False
|
|
||||||
au_parent = False
|
|
||||||
for ln in txt:
|
|
||||||
m = ptn_md_kv.match(ln)
|
|
||||||
if m and in_md and len(m.group(1)) == in_md:
|
|
||||||
_, k, v = [x.strip() for x in m.groups()]
|
|
||||||
if k != "" and v != "":
|
|
||||||
md[k] = [v]
|
|
||||||
continue
|
|
||||||
else:
|
|
||||||
in_md = False
|
|
||||||
|
|
||||||
m = ptn_md_beg.match(ln)
|
|
||||||
if m and au_parent:
|
|
||||||
in_md = len(m.group(1)) + 2
|
|
||||||
continue
|
|
||||||
|
|
||||||
au_parent = bool(ptn_au_parent.search(ln))
|
|
||||||
|
|
||||||
if ptn_audio.search(ln):
|
|
||||||
is_audio = True
|
|
||||||
|
|
||||||
m = ptn_dur.search(ln)
|
|
||||||
if m:
|
|
||||||
sec = 0
|
|
||||||
tstr = m.group(1)
|
|
||||||
if tstr.lower() != "n/a":
|
|
||||||
try:
|
|
||||||
tf = tstr.split(",")[0].split(".")[0].split(":")
|
|
||||||
for f in tf:
|
|
||||||
sec *= 60
|
|
||||||
sec += int(f)
|
|
||||||
except:
|
|
||||||
self.log("invalid timestr from ffprobe: [{}]".format(tstr), c=3)
|
|
||||||
|
|
||||||
ret[".dur"] = sec
|
|
||||||
m = ptn_br1.search(ln)
|
|
||||||
if m:
|
|
||||||
ret[".q"] = m.group(1)
|
|
||||||
|
|
||||||
m = ptn_br2.search(ln)
|
|
||||||
if m:
|
|
||||||
ret[".q"] = m.group(1)
|
|
||||||
|
|
||||||
if not is_audio:
|
|
||||||
return {}
|
|
||||||
|
|
||||||
ret = {k: [0, v] for k, v in ret.items()}
|
|
||||||
|
|
||||||
return self.normalize_tags(ret, md)
|
return self.normalize_tags(ret, md)
|
||||||
|
|
||||||
def get_bin(self, parsers, abspath):
|
def get_bin(self, parsers, abspath):
|
||||||
@@ -327,10 +426,10 @@ class MTag(object):
|
|||||||
env["PYTHONPATH"] = pypath
|
env["PYTHONPATH"] = pypath
|
||||||
|
|
||||||
ret = {}
|
ret = {}
|
||||||
for tagname, (binpath, timeout) in parsers.items():
|
for tagname, mp in parsers.items():
|
||||||
try:
|
try:
|
||||||
cmd = [sys.executable, binpath, abspath]
|
cmd = [sys.executable, mp.bin, abspath]
|
||||||
args = {"env": env, "timeout": timeout}
|
args = {"env": env, "timeout": mp.timeout}
|
||||||
|
|
||||||
if WINDOWS:
|
if WINDOWS:
|
||||||
args["creationflags"] = 0x4000
|
args["creationflags"] = 0x4000
|
||||||
@@ -339,8 +438,16 @@ class MTag(object):
|
|||||||
|
|
||||||
cmd = [fsenc(x) for x in cmd]
|
cmd = [fsenc(x) for x in cmd]
|
||||||
v = sp.check_output(cmd, **args).strip()
|
v = sp.check_output(cmd, **args).strip()
|
||||||
if v:
|
if not v:
|
||||||
|
continue
|
||||||
|
|
||||||
|
if "," not in tagname:
|
||||||
ret[tagname] = v.decode("utf-8")
|
ret[tagname] = v.decode("utf-8")
|
||||||
|
else:
|
||||||
|
v = json.loads(v)
|
||||||
|
for tag in tagname.split(","):
|
||||||
|
if tag and tag in v:
|
||||||
|
ret[tag] = v[tag]
|
||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|||||||
@@ -2,6 +2,7 @@
|
|||||||
from __future__ import print_function, unicode_literals
|
from __future__ import print_function, unicode_literals
|
||||||
|
|
||||||
import re
|
import re
|
||||||
|
import os
|
||||||
import sys
|
import sys
|
||||||
import time
|
import time
|
||||||
import threading
|
import threading
|
||||||
@@ -9,9 +10,11 @@ from datetime import datetime, timedelta
|
|||||||
import calendar
|
import calendar
|
||||||
|
|
||||||
from .__init__ import PY2, WINDOWS, MACOS, VT100
|
from .__init__ import PY2, WINDOWS, MACOS, VT100
|
||||||
|
from .util import mp
|
||||||
|
from .authsrv import AuthSrv
|
||||||
from .tcpsrv import TcpSrv
|
from .tcpsrv import TcpSrv
|
||||||
from .up2k import Up2k
|
from .up2k import Up2k
|
||||||
from .util import mp
|
from .th_srv import ThumbSrv, HAVE_PIL, HAVE_WEBP
|
||||||
|
|
||||||
|
|
||||||
class SvcHub(object):
|
class SvcHub(object):
|
||||||
@@ -34,9 +37,27 @@ class SvcHub(object):
|
|||||||
|
|
||||||
self.log = self._log_disabled if args.q else self._log_enabled
|
self.log = self._log_disabled if args.q else self._log_enabled
|
||||||
|
|
||||||
|
# jank goes here
|
||||||
|
auth = AuthSrv(self.args, self.log, False)
|
||||||
|
|
||||||
# initiate all services to manage
|
# initiate all services to manage
|
||||||
self.tcpsrv = TcpSrv(self)
|
self.tcpsrv = TcpSrv(self)
|
||||||
self.up2k = Up2k(self)
|
self.up2k = Up2k(self, auth.vfs.all_vols)
|
||||||
|
|
||||||
|
self.thumbsrv = None
|
||||||
|
if not args.no_thumb:
|
||||||
|
if HAVE_PIL:
|
||||||
|
if not HAVE_WEBP:
|
||||||
|
args.th_no_webp = True
|
||||||
|
msg = "setting --th-no-webp because either libwebp is not available or your Pillow is too old"
|
||||||
|
self.log("thumb", msg, c=3)
|
||||||
|
|
||||||
|
self.thumbsrv = ThumbSrv(self, auth.vfs.all_vols)
|
||||||
|
else:
|
||||||
|
msg = "need Pillow to create thumbnails; for example:\n{}{} -m pip install --user Pillow\n"
|
||||||
|
self.log(
|
||||||
|
"thumb", msg.format(" " * 37, os.path.basename(sys.executable)), c=3
|
||||||
|
)
|
||||||
|
|
||||||
# decide which worker impl to use
|
# decide which worker impl to use
|
||||||
if self.check_mp_enable():
|
if self.check_mp_enable():
|
||||||
@@ -63,6 +84,17 @@ class SvcHub(object):
|
|||||||
|
|
||||||
self.tcpsrv.shutdown()
|
self.tcpsrv.shutdown()
|
||||||
self.broker.shutdown()
|
self.broker.shutdown()
|
||||||
|
if self.thumbsrv:
|
||||||
|
self.thumbsrv.shutdown()
|
||||||
|
|
||||||
|
for n in range(200): # 10s
|
||||||
|
time.sleep(0.05)
|
||||||
|
if self.thumbsrv.stopped():
|
||||||
|
break
|
||||||
|
|
||||||
|
if n == 3:
|
||||||
|
print("waiting for thumbsrv...")
|
||||||
|
|
||||||
print("nailed it")
|
print("nailed it")
|
||||||
|
|
||||||
def _log_disabled(self, src, msg, c=0):
|
def _log_disabled(self, src, msg, c=0):
|
||||||
|
|||||||
49
copyparty/th_cli.py
Normal file
49
copyparty/th_cli.py
Normal file
@@ -0,0 +1,49 @@
|
|||||||
|
import os
|
||||||
|
import time
|
||||||
|
|
||||||
|
from .util import Cooldown
|
||||||
|
from .th_srv import thumb_path, THUMBABLE, FMT_FF
|
||||||
|
|
||||||
|
|
||||||
|
class ThumbCli(object):
|
||||||
|
def __init__(self, broker):
|
||||||
|
self.broker = broker
|
||||||
|
self.args = broker.args
|
||||||
|
|
||||||
|
# cache on both sides for less broker spam
|
||||||
|
self.cooldown = Cooldown(self.args.th_poke)
|
||||||
|
|
||||||
|
def get(self, ptop, rem, mtime, fmt):
|
||||||
|
ext = rem.rsplit(".")[-1].lower()
|
||||||
|
if ext not in THUMBABLE:
|
||||||
|
return None
|
||||||
|
|
||||||
|
if self.args.no_vthumb and ext in FMT_FF:
|
||||||
|
return None
|
||||||
|
|
||||||
|
if fmt == "j" and self.args.th_no_jpg:
|
||||||
|
fmt = "w"
|
||||||
|
|
||||||
|
if fmt == "w" and self.args.th_no_webp:
|
||||||
|
fmt = "j"
|
||||||
|
|
||||||
|
tpath = thumb_path(ptop, rem, mtime, fmt)
|
||||||
|
ret = None
|
||||||
|
try:
|
||||||
|
st = os.stat(tpath)
|
||||||
|
if st.st_size:
|
||||||
|
ret = tpath
|
||||||
|
else:
|
||||||
|
return None
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
|
if ret:
|
||||||
|
tdir = os.path.dirname(tpath)
|
||||||
|
if self.cooldown.poke(tdir):
|
||||||
|
self.broker.put(False, "thumbsrv.poke", tdir)
|
||||||
|
|
||||||
|
return ret
|
||||||
|
|
||||||
|
x = self.broker.put(True, "thumbsrv.get", ptop, rem, mtime, fmt)
|
||||||
|
return x.get()
|
||||||
375
copyparty/th_srv.py
Normal file
375
copyparty/th_srv.py
Normal file
@@ -0,0 +1,375 @@
|
|||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import time
|
||||||
|
import shutil
|
||||||
|
import base64
|
||||||
|
import hashlib
|
||||||
|
import threading
|
||||||
|
import subprocess as sp
|
||||||
|
|
||||||
|
from .__init__ import PY2
|
||||||
|
from .util import fsenc, mchkcmd, Queue, Cooldown, BytesIO
|
||||||
|
from .mtag import HAVE_FFMPEG, HAVE_FFPROBE, ffprobe
|
||||||
|
|
||||||
|
|
||||||
|
if not PY2:
|
||||||
|
unicode = str
|
||||||
|
|
||||||
|
|
||||||
|
HAVE_PIL = False
|
||||||
|
HAVE_HEIF = False
|
||||||
|
HAVE_AVIF = False
|
||||||
|
HAVE_WEBP = False
|
||||||
|
|
||||||
|
try:
|
||||||
|
from PIL import Image, ImageOps
|
||||||
|
|
||||||
|
HAVE_PIL = True
|
||||||
|
try:
|
||||||
|
Image.new("RGB", (2, 2)).save(BytesIO(), format="webp")
|
||||||
|
HAVE_WEBP = True
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
|
try:
|
||||||
|
from pyheif_pillow_opener import register_heif_opener
|
||||||
|
|
||||||
|
register_heif_opener()
|
||||||
|
HAVE_HEIF = True
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
|
try:
|
||||||
|
import pillow_avif
|
||||||
|
|
||||||
|
HAVE_AVIF = True
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
|
# https://pillow.readthedocs.io/en/stable/handbook/image-file-formats.html
|
||||||
|
# ffmpeg -formats
|
||||||
|
FMT_PIL = "bmp dib gif icns ico jpg jpeg jp2 jpx pcx png pbm pgm ppm pnm sgi tga tif tiff webp xbm dds xpm"
|
||||||
|
FMT_FF = "av1 asf avi flv m4v mkv mjpeg mjpg mpg mpeg mpg2 mpeg2 mov 3gp mp4 ts mpegts nut ogv ogm rm vob webm wmv"
|
||||||
|
|
||||||
|
if HAVE_HEIF:
|
||||||
|
FMT_PIL += " heif heifs heic heics"
|
||||||
|
|
||||||
|
if HAVE_AVIF:
|
||||||
|
FMT_PIL += " avif avifs"
|
||||||
|
|
||||||
|
FMT_PIL, FMT_FF = [{x: True for x in y.split(" ") if x} for y in [FMT_PIL, FMT_FF]]
|
||||||
|
|
||||||
|
|
||||||
|
THUMBABLE = {}
|
||||||
|
|
||||||
|
if HAVE_PIL:
|
||||||
|
THUMBABLE.update(FMT_PIL)
|
||||||
|
|
||||||
|
if HAVE_FFMPEG and HAVE_FFPROBE:
|
||||||
|
THUMBABLE.update(FMT_FF)
|
||||||
|
|
||||||
|
|
||||||
|
def thumb_path(ptop, rem, mtime, fmt):
|
||||||
|
# base16 = 16 = 256
|
||||||
|
# b64-lc = 38 = 1444
|
||||||
|
# base64 = 64 = 4096
|
||||||
|
try:
|
||||||
|
rd, fn = rem.rsplit("/", 1)
|
||||||
|
except:
|
||||||
|
rd = ""
|
||||||
|
fn = rem
|
||||||
|
|
||||||
|
if rd:
|
||||||
|
h = hashlib.sha512(fsenc(rd)).digest()[:24]
|
||||||
|
b64 = base64.urlsafe_b64encode(h).decode("ascii")[:24]
|
||||||
|
rd = "{}/{}/".format(b64[:2], b64[2:4]).lower() + b64
|
||||||
|
else:
|
||||||
|
rd = "top"
|
||||||
|
|
||||||
|
# could keep original filenames but this is safer re pathlen
|
||||||
|
h = hashlib.sha512(fsenc(fn)).digest()[:24]
|
||||||
|
fn = base64.urlsafe_b64encode(h).decode("ascii")[:24]
|
||||||
|
|
||||||
|
return "{}/.hist/th/{}/{}.{:x}.{}".format(
|
||||||
|
ptop, rd, fn, int(mtime), "webp" if fmt == "w" else "jpg"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class ThumbSrv(object):
|
||||||
|
def __init__(self, hub, vols):
|
||||||
|
self.hub = hub
|
||||||
|
self.vols = [v.realpath for v in vols.values()]
|
||||||
|
|
||||||
|
self.args = hub.args
|
||||||
|
self.log_func = hub.log
|
||||||
|
|
||||||
|
res = hub.args.th_size.split("x")
|
||||||
|
self.res = tuple([int(x) for x in res])
|
||||||
|
self.poke_cd = Cooldown(self.args.th_poke)
|
||||||
|
|
||||||
|
self.mutex = threading.Lock()
|
||||||
|
self.busy = {}
|
||||||
|
self.stopping = False
|
||||||
|
self.nthr = os.cpu_count() if hasattr(os, "cpu_count") else 4
|
||||||
|
self.q = Queue(self.nthr * 4)
|
||||||
|
for _ in range(self.nthr):
|
||||||
|
t = threading.Thread(target=self.worker)
|
||||||
|
t.daemon = True
|
||||||
|
t.start()
|
||||||
|
|
||||||
|
if not self.args.no_vthumb and (not HAVE_FFMPEG or not HAVE_FFPROBE):
|
||||||
|
missing = []
|
||||||
|
if not HAVE_FFMPEG:
|
||||||
|
missing.append("ffmpeg")
|
||||||
|
|
||||||
|
if not HAVE_FFPROBE:
|
||||||
|
missing.append("ffprobe")
|
||||||
|
|
||||||
|
msg = "cannot create video thumbnails because some of the required programs are not available: "
|
||||||
|
msg += ", ".join(missing)
|
||||||
|
self.log(msg, c=1)
|
||||||
|
|
||||||
|
t = threading.Thread(target=self.cleaner)
|
||||||
|
t.daemon = True
|
||||||
|
t.start()
|
||||||
|
|
||||||
|
def log(self, msg, c=0):
|
||||||
|
self.log_func("thumb", msg, c)
|
||||||
|
|
||||||
|
def shutdown(self):
|
||||||
|
self.stopping = True
|
||||||
|
for _ in range(self.nthr):
|
||||||
|
self.q.put(None)
|
||||||
|
|
||||||
|
def stopped(self):
|
||||||
|
with self.mutex:
|
||||||
|
return not self.nthr
|
||||||
|
|
||||||
|
def get(self, ptop, rem, mtime, fmt):
|
||||||
|
tpath = thumb_path(ptop, rem, mtime, fmt)
|
||||||
|
abspath = os.path.join(ptop, rem)
|
||||||
|
cond = threading.Condition()
|
||||||
|
with self.mutex:
|
||||||
|
try:
|
||||||
|
self.busy[tpath].append(cond)
|
||||||
|
self.log("wait {}".format(tpath))
|
||||||
|
except:
|
||||||
|
thdir = os.path.dirname(tpath)
|
||||||
|
try:
|
||||||
|
os.makedirs(thdir)
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
|
inf_path = os.path.join(thdir, "dir.txt")
|
||||||
|
if not os.path.exists(inf_path):
|
||||||
|
with open(inf_path, "wb") as f:
|
||||||
|
f.write(fsenc(os.path.dirname(abspath)))
|
||||||
|
|
||||||
|
self.busy[tpath] = [cond]
|
||||||
|
self.q.put([abspath, tpath])
|
||||||
|
self.log("conv {} \033[0m{}".format(tpath, abspath), c=6)
|
||||||
|
|
||||||
|
while not self.stopping:
|
||||||
|
with self.mutex:
|
||||||
|
if tpath not in self.busy:
|
||||||
|
break
|
||||||
|
|
||||||
|
with cond:
|
||||||
|
cond.wait()
|
||||||
|
|
||||||
|
try:
|
||||||
|
st = os.stat(tpath)
|
||||||
|
if st.st_size:
|
||||||
|
return tpath
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
|
def worker(self):
|
||||||
|
while not self.stopping:
|
||||||
|
task = self.q.get()
|
||||||
|
if not task:
|
||||||
|
break
|
||||||
|
|
||||||
|
abspath, tpath = task
|
||||||
|
ext = abspath.split(".")[-1].lower()
|
||||||
|
fun = None
|
||||||
|
if not os.path.exists(tpath):
|
||||||
|
if ext in FMT_PIL:
|
||||||
|
fun = self.conv_pil
|
||||||
|
elif ext in FMT_FF:
|
||||||
|
fun = self.conv_ffmpeg
|
||||||
|
|
||||||
|
if fun:
|
||||||
|
try:
|
||||||
|
fun(abspath, tpath)
|
||||||
|
except Exception as ex:
|
||||||
|
msg = "{} failed on {}\n {!r}"
|
||||||
|
self.log(msg.format(fun.__name__, abspath, ex), 3)
|
||||||
|
with open(tpath, "wb") as _:
|
||||||
|
pass
|
||||||
|
|
||||||
|
with self.mutex:
|
||||||
|
subs = self.busy[tpath]
|
||||||
|
del self.busy[tpath]
|
||||||
|
|
||||||
|
for x in subs:
|
||||||
|
with x:
|
||||||
|
x.notify_all()
|
||||||
|
|
||||||
|
with self.mutex:
|
||||||
|
self.nthr -= 1
|
||||||
|
|
||||||
|
def conv_pil(self, abspath, tpath):
|
||||||
|
with Image.open(fsenc(abspath)) as im:
|
||||||
|
crop = not self.args.th_no_crop
|
||||||
|
res2 = self.res
|
||||||
|
if crop:
|
||||||
|
res2 = (res2[0] * 2, res2[1] * 2)
|
||||||
|
|
||||||
|
try:
|
||||||
|
im.thumbnail(res2, resample=Image.LANCZOS)
|
||||||
|
if crop:
|
||||||
|
iw, ih = im.size
|
||||||
|
dw, dh = self.res
|
||||||
|
res = (min(iw, dw), min(ih, dh))
|
||||||
|
im = ImageOps.fit(im, res, method=Image.LANCZOS)
|
||||||
|
except:
|
||||||
|
im.thumbnail(self.res)
|
||||||
|
|
||||||
|
if im.mode not in ("RGB", "L"):
|
||||||
|
im = im.convert("RGB")
|
||||||
|
|
||||||
|
if tpath.endswith(".webp"):
|
||||||
|
# quality 80 = pillow-default
|
||||||
|
# quality 75 = ffmpeg-default
|
||||||
|
# method 0 = pillow-default, fast
|
||||||
|
# method 4 = ffmpeg-default
|
||||||
|
# method 6 = max, slow
|
||||||
|
im.save(tpath, quality=40, method=6)
|
||||||
|
else:
|
||||||
|
im.save(tpath, quality=40) # default=75
|
||||||
|
|
||||||
|
def conv_ffmpeg(self, abspath, tpath):
|
||||||
|
ret, _ = ffprobe(abspath)
|
||||||
|
|
||||||
|
dur = ret[".dur"][1] if ".dur" in ret else 4
|
||||||
|
seek = "{:.0f}".format(dur / 3)
|
||||||
|
|
||||||
|
scale = "scale={0}:{1}:force_original_aspect_ratio="
|
||||||
|
if self.args.th_no_crop:
|
||||||
|
scale += "decrease,setsar=1:1"
|
||||||
|
else:
|
||||||
|
scale += "increase,crop={0}:{1},setsar=1:1"
|
||||||
|
|
||||||
|
scale = scale.format(*list(self.res)).encode("utf-8")
|
||||||
|
cmd = [
|
||||||
|
b"ffmpeg",
|
||||||
|
b"-nostdin",
|
||||||
|
b"-hide_banner",
|
||||||
|
b"-ss",
|
||||||
|
seek,
|
||||||
|
b"-i",
|
||||||
|
fsenc(abspath),
|
||||||
|
b"-vf",
|
||||||
|
scale,
|
||||||
|
b"-vframes",
|
||||||
|
b"1",
|
||||||
|
]
|
||||||
|
|
||||||
|
if tpath.endswith(".jpg"):
|
||||||
|
cmd += [
|
||||||
|
b"-q:v",
|
||||||
|
b"6", # default=??
|
||||||
|
]
|
||||||
|
else:
|
||||||
|
cmd += [
|
||||||
|
b"-q:v",
|
||||||
|
b"50", # default=75
|
||||||
|
b"-compression_level:v",
|
||||||
|
b"6", # default=4, 0=fast, 6=max
|
||||||
|
]
|
||||||
|
|
||||||
|
cmd += [fsenc(tpath)]
|
||||||
|
|
||||||
|
mchkcmd(cmd)
|
||||||
|
|
||||||
|
def poke(self, tdir):
|
||||||
|
if not self.poke_cd.poke(tdir):
|
||||||
|
return
|
||||||
|
|
||||||
|
ts = int(time.time())
|
||||||
|
try:
|
||||||
|
p1 = os.path.dirname(tdir)
|
||||||
|
p2 = os.path.dirname(p1)
|
||||||
|
for dp in [tdir, p1, p2]:
|
||||||
|
os.utime(fsenc(dp), (ts, ts))
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
|
def cleaner(self):
|
||||||
|
interval = self.args.th_clean
|
||||||
|
while True:
|
||||||
|
time.sleep(interval)
|
||||||
|
for vol in self.vols:
|
||||||
|
vol += "/.hist/th"
|
||||||
|
self.log("cln {}/".format(vol))
|
||||||
|
self.clean(vol)
|
||||||
|
|
||||||
|
self.log("cln ok")
|
||||||
|
|
||||||
|
def clean(self, vol):
|
||||||
|
# self.log("cln {}".format(vol))
|
||||||
|
maxage = self.args.th_maxage
|
||||||
|
now = time.time()
|
||||||
|
prev_b64 = None
|
||||||
|
prev_fp = None
|
||||||
|
try:
|
||||||
|
ents = os.listdir(vol)
|
||||||
|
except:
|
||||||
|
return
|
||||||
|
|
||||||
|
for f in sorted(ents):
|
||||||
|
fp = os.path.join(vol, f)
|
||||||
|
cmp = fp.lower().replace("\\", "/")
|
||||||
|
|
||||||
|
# "top" or b64 prefix/full (a folder)
|
||||||
|
if len(f) <= 3 or len(f) == 24:
|
||||||
|
age = now - os.path.getmtime(fp)
|
||||||
|
if age > maxage:
|
||||||
|
with self.mutex:
|
||||||
|
safe = True
|
||||||
|
for k in self.busy.keys():
|
||||||
|
if k.lower().replace("\\", "/").startswith(cmp):
|
||||||
|
safe = False
|
||||||
|
break
|
||||||
|
|
||||||
|
if safe:
|
||||||
|
self.log("rm -rf [{}]".format(fp))
|
||||||
|
shutil.rmtree(fp, ignore_errors=True)
|
||||||
|
else:
|
||||||
|
self.clean(fp)
|
||||||
|
continue
|
||||||
|
|
||||||
|
# thumb file
|
||||||
|
try:
|
||||||
|
b64, ts, ext = f.split(".")
|
||||||
|
if len(b64) != 24 or len(ts) != 8 or ext not in ["jpg", "webp"]:
|
||||||
|
raise Exception()
|
||||||
|
|
||||||
|
ts = int(ts, 16)
|
||||||
|
except:
|
||||||
|
if f != "dir.txt":
|
||||||
|
self.log("foreign file in thumbs dir: [{}]".format(fp), 1)
|
||||||
|
|
||||||
|
continue
|
||||||
|
|
||||||
|
if b64 == prev_b64:
|
||||||
|
self.log("rm replaced [{}]".format(fp))
|
||||||
|
os.unlink(prev_fp)
|
||||||
|
|
||||||
|
prev_b64 = b64
|
||||||
|
prev_fp = fp
|
||||||
@@ -163,7 +163,7 @@ class U2idx(object):
|
|||||||
if rd.startswith("//") or fn.startswith("//"):
|
if rd.startswith("//") or fn.startswith("//"):
|
||||||
rd, fn = s3dec(rd, fn)
|
rd, fn = s3dec(rd, fn)
|
||||||
|
|
||||||
rp = os.path.join(vtop, rd, fn).replace("\\", "/")
|
rp = "/".join([vtop, rd, fn])
|
||||||
sret.append({"ts": int(ts), "sz": sz, "rp": rp, "w": w[:16]})
|
sret.append({"ts": int(ts), "sz": sz, "rp": rp, "w": w[:16]})
|
||||||
|
|
||||||
for hit in sret:
|
for hit in sret:
|
||||||
|
|||||||
@@ -31,8 +31,7 @@ from .util import (
|
|||||||
statdir,
|
statdir,
|
||||||
s2hms,
|
s2hms,
|
||||||
)
|
)
|
||||||
from .mtag import MTag
|
from .mtag import MTag, MParser
|
||||||
from .authsrv import AuthSrv
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
HAVE_SQLITE3 = True
|
HAVE_SQLITE3 = True
|
||||||
@@ -49,13 +48,14 @@ class Up2k(object):
|
|||||||
* ~/.config flatfiles for active jobs
|
* ~/.config flatfiles for active jobs
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, broker):
|
def __init__(self, hub, all_vols):
|
||||||
self.broker = broker
|
self.hub = hub
|
||||||
self.args = broker.args
|
self.args = hub.args
|
||||||
self.log_func = broker.log
|
self.log_func = hub.log
|
||||||
|
self.all_vols = all_vols
|
||||||
|
|
||||||
# config
|
# config
|
||||||
self.salt = broker.args.salt
|
self.salt = self.args.salt
|
||||||
|
|
||||||
# state
|
# state
|
||||||
self.mutex = threading.Lock()
|
self.mutex = threading.Lock()
|
||||||
@@ -92,9 +92,7 @@ class Up2k(object):
|
|||||||
if not HAVE_SQLITE3:
|
if not HAVE_SQLITE3:
|
||||||
self.log("could not initialize sqlite3, will use in-memory registry only")
|
self.log("could not initialize sqlite3, will use in-memory registry only")
|
||||||
|
|
||||||
# this is kinda jank
|
have_e2d = self.init_indexes()
|
||||||
auth = AuthSrv(self.args, self.log_func, False)
|
|
||||||
have_e2d = self.init_indexes(auth)
|
|
||||||
|
|
||||||
if have_e2d:
|
if have_e2d:
|
||||||
thr = threading.Thread(target=self._snapshot)
|
thr = threading.Thread(target=self._snapshot)
|
||||||
@@ -139,9 +137,9 @@ class Up2k(object):
|
|||||||
|
|
||||||
return True, ret
|
return True, ret
|
||||||
|
|
||||||
def init_indexes(self, auth):
|
def init_indexes(self):
|
||||||
self.pp = ProgressPrinter()
|
self.pp = ProgressPrinter()
|
||||||
vols = auth.vfs.all_vols.values()
|
vols = self.all_vols.values()
|
||||||
t0 = time.time()
|
t0 = time.time()
|
||||||
have_e2d = False
|
have_e2d = False
|
||||||
|
|
||||||
@@ -242,9 +240,14 @@ class Up2k(object):
|
|||||||
with gzip.GzipFile(path, "rb") as f:
|
with gzip.GzipFile(path, "rb") as f:
|
||||||
j = f.read().decode("utf-8")
|
j = f.read().decode("utf-8")
|
||||||
|
|
||||||
reg = json.loads(j)
|
reg2 = json.loads(j)
|
||||||
for _, job in reg.items():
|
for k, job in reg2.items():
|
||||||
job["poke"] = time.time()
|
path = os.path.join(job["ptop"], job["prel"], job["name"])
|
||||||
|
if os.path.exists(fsenc(path)):
|
||||||
|
reg[k] = job
|
||||||
|
job["poke"] = time.time()
|
||||||
|
else:
|
||||||
|
self.log("ign deleted file in snap: [{}]".format(path))
|
||||||
|
|
||||||
m = "loaded snap {} |{}|".format(path, len(reg.keys()))
|
m = "loaded snap {} |{}|".format(path, len(reg.keys()))
|
||||||
m = [m] + self._vis_reg_progress(reg)
|
m = [m] + self._vis_reg_progress(reg)
|
||||||
@@ -511,7 +514,6 @@ class Up2k(object):
|
|||||||
|
|
||||||
def _run_all_mtp(self):
|
def _run_all_mtp(self):
|
||||||
t0 = time.time()
|
t0 = time.time()
|
||||||
self.mtp_force = {}
|
|
||||||
self.mtp_parsers = {}
|
self.mtp_parsers = {}
|
||||||
for ptop, flags in self.flags.items():
|
for ptop, flags in self.flags.items():
|
||||||
if "mtp" in flags:
|
if "mtp" in flags:
|
||||||
@@ -527,43 +529,18 @@ class Up2k(object):
|
|||||||
|
|
||||||
entags = self.entags[ptop]
|
entags = self.entags[ptop]
|
||||||
|
|
||||||
force = {}
|
|
||||||
timeout = {}
|
|
||||||
parsers = {}
|
parsers = {}
|
||||||
for parser in self.flags[ptop]["mtp"]:
|
for parser in self.flags[ptop]["mtp"]:
|
||||||
orig = parser
|
try:
|
||||||
tag, parser = parser.split("=", 1)
|
parser = MParser(parser)
|
||||||
if tag not in entags:
|
except:
|
||||||
continue
|
self.log("invalid argument: " + parser, 1)
|
||||||
|
return
|
||||||
|
|
||||||
while True:
|
for tag in entags:
|
||||||
try:
|
if tag in parser.tags:
|
||||||
bp = os.path.expanduser(parser)
|
parsers[parser.tag] = parser
|
||||||
if os.path.exists(bp):
|
|
||||||
parsers[tag] = [bp, timeout.get(tag, 30)]
|
|
||||||
break
|
|
||||||
except:
|
|
||||||
pass
|
|
||||||
|
|
||||||
try:
|
|
||||||
arg, parser = parser.split(",", 1)
|
|
||||||
arg = arg.lower()
|
|
||||||
|
|
||||||
if arg == "f":
|
|
||||||
force[tag] = True
|
|
||||||
continue
|
|
||||||
|
|
||||||
if arg.startswith("t"):
|
|
||||||
timeout[tag] = int(arg[1:])
|
|
||||||
continue
|
|
||||||
|
|
||||||
raise Exception()
|
|
||||||
|
|
||||||
except:
|
|
||||||
self.log("invalid argument: " + orig, 1)
|
|
||||||
return
|
|
||||||
|
|
||||||
self.mtp_force[ptop] = force
|
|
||||||
self.mtp_parsers[ptop] = parsers
|
self.mtp_parsers[ptop] = parsers
|
||||||
|
|
||||||
q = "select count(w) from mt where k = 't:mtp'"
|
q = "select count(w) from mt where k = 't:mtp'"
|
||||||
@@ -596,8 +573,8 @@ class Up2k(object):
|
|||||||
have = cur.execute(q, (w,)).fetchall()
|
have = cur.execute(q, (w,)).fetchall()
|
||||||
have = [x[0] for x in have]
|
have = [x[0] for x in have]
|
||||||
|
|
||||||
if ".dur" not in have and ".dur" in entags:
|
parsers = self._get_parsers(ptop, have, abspath)
|
||||||
# skip non-audio
|
if not parsers:
|
||||||
to_delete[w] = True
|
to_delete[w] = True
|
||||||
n_left -= 1
|
n_left -= 1
|
||||||
continue
|
continue
|
||||||
@@ -605,10 +582,7 @@ class Up2k(object):
|
|||||||
if w in in_progress:
|
if w in in_progress:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
task_parsers = {
|
jobs.append([parsers, None, w, abspath])
|
||||||
k: v for k, v in parsers.items() if k in force or k not in have
|
|
||||||
}
|
|
||||||
jobs.append([task_parsers, None, w, abspath])
|
|
||||||
in_progress[w] = True
|
in_progress[w] = True
|
||||||
|
|
||||||
done = self._flush_mpool(wcur)
|
done = self._flush_mpool(wcur)
|
||||||
@@ -667,10 +641,46 @@ class Up2k(object):
|
|||||||
wcur.close()
|
wcur.close()
|
||||||
cur.close()
|
cur.close()
|
||||||
|
|
||||||
|
def _get_parsers(self, ptop, have, abspath):
|
||||||
|
try:
|
||||||
|
all_parsers = self.mtp_parsers[ptop]
|
||||||
|
except:
|
||||||
|
return {}
|
||||||
|
|
||||||
|
entags = self.entags[ptop]
|
||||||
|
parsers = {}
|
||||||
|
for k, v in all_parsers.items():
|
||||||
|
if "ac" in entags or ".aq" in entags:
|
||||||
|
if "ac" in have or ".aq" in have:
|
||||||
|
# is audio, require non-audio?
|
||||||
|
if v.audio == "n":
|
||||||
|
continue
|
||||||
|
# is not audio, require audio?
|
||||||
|
elif v.audio == "y":
|
||||||
|
continue
|
||||||
|
|
||||||
|
if v.ext:
|
||||||
|
match = False
|
||||||
|
for ext in v.ext:
|
||||||
|
if abspath.lower().endswith("." + ext):
|
||||||
|
match = True
|
||||||
|
break
|
||||||
|
|
||||||
|
if not match:
|
||||||
|
continue
|
||||||
|
|
||||||
|
parsers[k] = v
|
||||||
|
|
||||||
|
parsers = {k: v for k, v in parsers.items() if v.force or k not in have}
|
||||||
|
return parsers
|
||||||
|
|
||||||
def _start_mpool(self):
|
def _start_mpool(self):
|
||||||
# mp.pool.ThreadPool and concurrent.futures.ThreadPoolExecutor
|
# mp.pool.ThreadPool and concurrent.futures.ThreadPoolExecutor
|
||||||
# both do crazy runahead so lets reinvent another wheel
|
# both do crazy runahead so lets reinvent another wheel
|
||||||
nw = os.cpu_count() if hasattr(os, "cpu_count") else 4
|
nw = os.cpu_count() if hasattr(os, "cpu_count") else 4
|
||||||
|
if self.args.no_mtag_mt:
|
||||||
|
nw = 1
|
||||||
|
|
||||||
if self.pending_tags is None:
|
if self.pending_tags is None:
|
||||||
self.log("using {}x {}".format(nw, self.mtag.backend))
|
self.log("using {}x {}".format(nw, self.mtag.backend))
|
||||||
self.pending_tags = []
|
self.pending_tags = []
|
||||||
@@ -710,7 +720,8 @@ class Up2k(object):
|
|||||||
vtags = [
|
vtags = [
|
||||||
"\033[36m{} \033[33m{}".format(k, v) for k, v in tags.items()
|
"\033[36m{} \033[33m{}".format(k, v) for k, v in tags.items()
|
||||||
]
|
]
|
||||||
self.log("{}\033[0m [{}]".format(" ".join(vtags), abspath))
|
if vtags:
|
||||||
|
self.log("{}\033[0m [{}]".format(" ".join(vtags), abspath))
|
||||||
|
|
||||||
with self.mutex:
|
with self.mutex:
|
||||||
self.pending_tags.append([entags, wark, abspath, tags])
|
self.pending_tags.append([entags, wark, abspath, tags])
|
||||||
@@ -912,7 +923,7 @@ class Up2k(object):
|
|||||||
if dp_dir.startswith("//") or dp_fn.startswith("//"):
|
if dp_dir.startswith("//") or dp_fn.startswith("//"):
|
||||||
dp_dir, dp_fn = s3dec(dp_dir, dp_fn)
|
dp_dir, dp_fn = s3dec(dp_dir, dp_fn)
|
||||||
|
|
||||||
dp_abs = os.path.join(cj["ptop"], dp_dir, dp_fn).replace("\\", "/")
|
dp_abs = "/".join([cj["ptop"], dp_dir, dp_fn])
|
||||||
# relying on path.exists to return false on broken symlinks
|
# relying on path.exists to return false on broken symlinks
|
||||||
if os.path.exists(fsenc(dp_abs)):
|
if os.path.exists(fsenc(dp_abs)):
|
||||||
job = {
|
job = {
|
||||||
@@ -938,7 +949,7 @@ class Up2k(object):
|
|||||||
for fn in names:
|
for fn in names:
|
||||||
path = os.path.join(job["ptop"], job["prel"], fn)
|
path = os.path.join(job["ptop"], job["prel"], fn)
|
||||||
try:
|
try:
|
||||||
if os.path.getsize(path) > 0:
|
if os.path.getsize(fsenc(path)) > 0:
|
||||||
# upload completed or both present
|
# upload completed or both present
|
||||||
break
|
break
|
||||||
except:
|
except:
|
||||||
@@ -1062,6 +1073,9 @@ class Up2k(object):
|
|||||||
raise Pebkac(400, "unknown wark")
|
raise Pebkac(400, "unknown wark")
|
||||||
|
|
||||||
if chash not in job["need"]:
|
if chash not in job["need"]:
|
||||||
|
msg = "chash = {} , need:\n".format(chash)
|
||||||
|
msg += "\n".join(job["need"])
|
||||||
|
self.log(msg)
|
||||||
raise Pebkac(400, "already got that but thanks??")
|
raise Pebkac(400, "already got that but thanks??")
|
||||||
|
|
||||||
nchunk = [n for n, v in enumerate(job["hash"]) if v == chash]
|
nchunk = [n for n, v in enumerate(job["hash"]) if v == chash]
|
||||||
@@ -1167,10 +1181,10 @@ class Up2k(object):
|
|||||||
return wark
|
return wark
|
||||||
|
|
||||||
def _hashlist_from_file(self, path):
|
def _hashlist_from_file(self, path):
|
||||||
fsz = os.path.getsize(path)
|
fsz = os.path.getsize(fsenc(path))
|
||||||
csz = up2k_chunksize(fsz)
|
csz = up2k_chunksize(fsz)
|
||||||
ret = []
|
ret = []
|
||||||
with open(path, "rb", 512 * 1024) as f:
|
with open(fsenc(path), "rb", 512 * 1024) as f:
|
||||||
while fsz > 0:
|
while fsz > 0:
|
||||||
self.pp.msg = "{} MB, {}".format(int(fsz / 1024 / 1024), path)
|
self.pp.msg = "{} MB, {}".format(int(fsz / 1024 / 1024), path)
|
||||||
hashobj = hashlib.sha512()
|
hashobj = hashlib.sha512()
|
||||||
@@ -1198,6 +1212,9 @@ class Up2k(object):
|
|||||||
# raise Exception("aaa")
|
# raise Exception("aaa")
|
||||||
|
|
||||||
tnam = job["name"] + ".PARTIAL"
|
tnam = job["name"] + ".PARTIAL"
|
||||||
|
if self.args.dotpart:
|
||||||
|
tnam = "." + tnam
|
||||||
|
|
||||||
suffix = ".{:.6f}-{}".format(job["t0"], job["addr"])
|
suffix = ".{:.6f}-{}".format(job["t0"], job["addr"])
|
||||||
with ren_open(tnam, "wb", fdir=pdir, suffix=suffix) as f:
|
with ren_open(tnam, "wb", fdir=pdir, suffix=suffix) as f:
|
||||||
f, job["tnam"] = f["orz"]
|
f, job["tnam"] = f["orz"]
|
||||||
@@ -1258,13 +1275,13 @@ class Up2k(object):
|
|||||||
try:
|
try:
|
||||||
# remove the filename reservation
|
# remove the filename reservation
|
||||||
path = os.path.join(job["ptop"], job["prel"], job["name"])
|
path = os.path.join(job["ptop"], job["prel"], job["name"])
|
||||||
if os.path.getsize(path) == 0:
|
if os.path.getsize(fsenc(path)) == 0:
|
||||||
os.unlink(path)
|
os.unlink(fsenc(path))
|
||||||
|
|
||||||
if len(job["hash"]) == len(job["need"]):
|
if len(job["hash"]) == len(job["need"]):
|
||||||
# PARTIAL is empty, delete that too
|
# PARTIAL is empty, delete that too
|
||||||
path = os.path.join(job["ptop"], job["prel"], job["tnam"])
|
path = os.path.join(job["ptop"], job["prel"], job["tnam"])
|
||||||
os.unlink(path)
|
os.unlink(fsenc(path))
|
||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@@ -1272,8 +1289,8 @@ class Up2k(object):
|
|||||||
if not reg:
|
if not reg:
|
||||||
if k not in prev or prev[k] is not None:
|
if k not in prev or prev[k] is not None:
|
||||||
prev[k] = None
|
prev[k] = None
|
||||||
if os.path.exists(path):
|
if os.path.exists(fsenc(path)):
|
||||||
os.unlink(path)
|
os.unlink(fsenc(path))
|
||||||
return
|
return
|
||||||
|
|
||||||
newest = max(x["poke"] for _, x in reg.items()) if reg else 0
|
newest = max(x["poke"] for _, x in reg.items()) if reg else 0
|
||||||
@@ -1305,13 +1322,9 @@ class Up2k(object):
|
|||||||
abspath = os.path.join(ptop, rd, fn)
|
abspath = os.path.join(ptop, rd, fn)
|
||||||
tags = self.mtag.get(abspath)
|
tags = self.mtag.get(abspath)
|
||||||
ntags1 = len(tags)
|
ntags1 = len(tags)
|
||||||
if self.mtp_parsers.get(ptop, {}):
|
parsers = self._get_parsers(ptop, tags, abspath)
|
||||||
parser = {
|
if parsers:
|
||||||
k: v
|
tags.update(self.mtag.get_bin(parsers, abspath))
|
||||||
for k, v in self.mtp_parsers[ptop].items()
|
|
||||||
if k in self.mtp_force[ptop] or k not in tags
|
|
||||||
}
|
|
||||||
tags.update(self.mtag.get_bin(parser, abspath))
|
|
||||||
|
|
||||||
with self.mutex:
|
with self.mutex:
|
||||||
cur = self.cur[ptop]
|
cur = self.cur[ptop]
|
||||||
|
|||||||
@@ -15,6 +15,7 @@ import threading
|
|||||||
import mimetypes
|
import mimetypes
|
||||||
import contextlib
|
import contextlib
|
||||||
import subprocess as sp # nosec
|
import subprocess as sp # nosec
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
from .__init__ import PY2, WINDOWS, ANYWIN
|
from .__init__ import PY2, WINDOWS, ANYWIN
|
||||||
from .stolen import surrogateescape
|
from .stolen import surrogateescape
|
||||||
@@ -34,10 +35,12 @@ if not PY2:
|
|||||||
from urllib.parse import unquote_to_bytes as unquote
|
from urllib.parse import unquote_to_bytes as unquote
|
||||||
from urllib.parse import quote_from_bytes as quote
|
from urllib.parse import quote_from_bytes as quote
|
||||||
from queue import Queue
|
from queue import Queue
|
||||||
|
from io import BytesIO
|
||||||
else:
|
else:
|
||||||
from urllib import unquote # pylint: disable=no-name-in-module
|
from urllib import unquote # pylint: disable=no-name-in-module
|
||||||
from urllib import quote # pylint: disable=no-name-in-module
|
from urllib import quote # pylint: disable=no-name-in-module
|
||||||
from Queue import Queue # pylint: disable=import-error,no-name-in-module
|
from Queue import Queue # pylint: disable=import-error,no-name-in-module
|
||||||
|
from StringIO import StringIO as BytesIO
|
||||||
|
|
||||||
surrogateescape.register_surrogateescape()
|
surrogateescape.register_surrogateescape()
|
||||||
FS_ENCODING = sys.getfilesystemencoding()
|
FS_ENCODING = sys.getfilesystemencoding()
|
||||||
@@ -45,10 +48,14 @@ if WINDOWS and PY2:
|
|||||||
FS_ENCODING = "utf-8"
|
FS_ENCODING = "utf-8"
|
||||||
|
|
||||||
|
|
||||||
|
HTTP_TS_FMT = "%a, %d %b %Y %H:%M:%S GMT"
|
||||||
|
|
||||||
|
|
||||||
HTTPCODE = {
|
HTTPCODE = {
|
||||||
200: "OK",
|
200: "OK",
|
||||||
204: "No Content",
|
204: "No Content",
|
||||||
206: "Partial Content",
|
206: "Partial Content",
|
||||||
|
302: "Found",
|
||||||
304: "Not Modified",
|
304: "Not Modified",
|
||||||
400: "Bad Request",
|
400: "Bad Request",
|
||||||
403: "Forbidden",
|
403: "Forbidden",
|
||||||
@@ -72,6 +79,13 @@ IMPLICATIONS = [
|
|||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
|
MIMES = {
|
||||||
|
"md": "text/plain; charset=UTF-8",
|
||||||
|
"opus": "audio/ogg; codecs=opus",
|
||||||
|
"webp": "image/webp",
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
REKOBO_KEY = {
|
REKOBO_KEY = {
|
||||||
v: ln.split(" ", 1)[0]
|
v: ln.split(" ", 1)[0]
|
||||||
for ln in """
|
for ln in """
|
||||||
@@ -123,6 +137,32 @@ class Counter(object):
|
|||||||
self.v = absval
|
self.v = absval
|
||||||
|
|
||||||
|
|
||||||
|
class Cooldown(object):
|
||||||
|
def __init__(self, maxage):
|
||||||
|
self.maxage = maxage
|
||||||
|
self.mutex = threading.Lock()
|
||||||
|
self.hist = {}
|
||||||
|
self.oldest = 0
|
||||||
|
|
||||||
|
def poke(self, key):
|
||||||
|
with self.mutex:
|
||||||
|
now = time.time()
|
||||||
|
|
||||||
|
ret = False
|
||||||
|
v = self.hist.get(key, 0)
|
||||||
|
if now - v > self.maxage:
|
||||||
|
self.hist[key] = now
|
||||||
|
ret = True
|
||||||
|
|
||||||
|
if self.oldest - now > self.maxage * 2:
|
||||||
|
self.hist = {
|
||||||
|
k: v for k, v in self.hist.items() if now - v < self.maxage
|
||||||
|
}
|
||||||
|
self.oldest = sorted(self.hist.values())[0]
|
||||||
|
|
||||||
|
return ret
|
||||||
|
|
||||||
|
|
||||||
class Unrecv(object):
|
class Unrecv(object):
|
||||||
"""
|
"""
|
||||||
undo any number of socket recv ops
|
undo any number of socket recv ops
|
||||||
@@ -242,7 +282,7 @@ def ren_open(fname, *args, **kwargs):
|
|||||||
else:
|
else:
|
||||||
fpath = fname
|
fpath = fname
|
||||||
|
|
||||||
if suffix and os.path.exists(fpath):
|
if suffix and os.path.exists(fsenc(fpath)):
|
||||||
fpath += suffix
|
fpath += suffix
|
||||||
fname += suffix
|
fname += suffix
|
||||||
ext += suffix
|
ext += suffix
|
||||||
@@ -592,8 +632,8 @@ def sanitize_fn(fn, ok="", bad=[]):
|
|||||||
["?", "?"],
|
["?", "?"],
|
||||||
["*", "*"],
|
["*", "*"],
|
||||||
]
|
]
|
||||||
for bad, good in [x for x in remap if x[0] not in ok]:
|
for a, b in [x for x in remap if x[0] not in ok]:
|
||||||
fn = fn.replace(bad, good)
|
fn = fn.replace(a, b)
|
||||||
|
|
||||||
bad.extend(["con", "prn", "aux", "nul"])
|
bad.extend(["con", "prn", "aux", "nul"])
|
||||||
for n in range(1, 10):
|
for n in range(1, 10):
|
||||||
@@ -616,6 +656,11 @@ def exclude_dotfiles(filepaths):
|
|||||||
return [x for x in filepaths if not x.split("/")[-1].startswith(".")]
|
return [x for x in filepaths if not x.split("/")[-1].startswith(".")]
|
||||||
|
|
||||||
|
|
||||||
|
def http_ts(ts):
|
||||||
|
file_dt = datetime.utcfromtimestamp(ts)
|
||||||
|
return file_dt.strftime(HTTP_TS_FMT)
|
||||||
|
|
||||||
|
|
||||||
def html_escape(s, quote=False, crlf=False):
|
def html_escape(s, quote=False, crlf=False):
|
||||||
"""html.escape but also newlines"""
|
"""html.escape but also newlines"""
|
||||||
s = s.replace("&", "&").replace("<", "<").replace(">", ">")
|
s = s.replace("&", "&").replace("<", "<").replace(">", ">")
|
||||||
@@ -722,6 +767,8 @@ def s3dec(rd, fn):
|
|||||||
|
|
||||||
|
|
||||||
def atomic_move(src, dst):
|
def atomic_move(src, dst):
|
||||||
|
src = fsenc(src)
|
||||||
|
dst = fsenc(dst)
|
||||||
if not PY2:
|
if not PY2:
|
||||||
os.replace(src, dst)
|
os.replace(src, dst)
|
||||||
else:
|
else:
|
||||||
@@ -913,11 +960,13 @@ def unescape_cookie(orig):
|
|||||||
return ret
|
return ret
|
||||||
|
|
||||||
|
|
||||||
def guess_mime(url):
|
def guess_mime(url, fallback="application/octet-stream"):
|
||||||
if url.endswith(".md"):
|
try:
|
||||||
return ["text/plain; charset=UTF-8"]
|
_, ext = url.rsplit(".", 1)
|
||||||
|
except:
|
||||||
|
return fallback
|
||||||
|
|
||||||
return mimetypes.guess_type(url)
|
return MIMES.get(ext) or mimetypes.guess_type(url)[0] or fallback
|
||||||
|
|
||||||
|
|
||||||
def runcmd(*argv):
|
def runcmd(*argv):
|
||||||
@@ -936,6 +985,17 @@ def chkcmd(*argv):
|
|||||||
return sout, serr
|
return sout, serr
|
||||||
|
|
||||||
|
|
||||||
|
def mchkcmd(argv, timeout=10):
|
||||||
|
if PY2:
|
||||||
|
with open(os.devnull, "wb") as f:
|
||||||
|
rv = sp.call(argv, stdout=f, stderr=f)
|
||||||
|
else:
|
||||||
|
rv = sp.call(argv, stdout=sp.DEVNULL, stderr=sp.DEVNULL, timeout=timeout)
|
||||||
|
|
||||||
|
if rv:
|
||||||
|
raise sp.CalledProcessError(rv, (argv[0], b"...", argv[-1]))
|
||||||
|
|
||||||
|
|
||||||
def gzip_orig_sz(fn):
|
def gzip_orig_sz(fn):
|
||||||
with open(fsenc(fn), "rb") as f:
|
with open(fsenc(fn), "rb") as f:
|
||||||
f.seek(-4, 2)
|
f.seek(-4, 2)
|
||||||
|
|||||||
@@ -1,3 +1,6 @@
|
|||||||
|
:root {
|
||||||
|
--grid-sz: 10em;
|
||||||
|
}
|
||||||
* {
|
* {
|
||||||
line-height: 1.2em;
|
line-height: 1.2em;
|
||||||
}
|
}
|
||||||
@@ -64,6 +67,11 @@ a, #files tbody div a:last-child {
|
|||||||
background: #161616;
|
background: #161616;
|
||||||
text-decoration: underline;
|
text-decoration: underline;
|
||||||
}
|
}
|
||||||
|
#files thead {
|
||||||
|
background: #333;
|
||||||
|
position: sticky;
|
||||||
|
top: 0;
|
||||||
|
}
|
||||||
#files thead a {
|
#files thead a {
|
||||||
color: #999;
|
color: #999;
|
||||||
font-weight: normal;
|
font-weight: normal;
|
||||||
@@ -180,11 +188,32 @@ a, #files tbody div a:last-child {
|
|||||||
color: #840;
|
color: #840;
|
||||||
text-shadow: 0 0 .3em #b80;
|
text-shadow: 0 0 .3em #b80;
|
||||||
}
|
}
|
||||||
#files tbody tr.sel td {
|
#files tbody tr.sel td,
|
||||||
|
#ggrid a.sel,
|
||||||
|
html.light #ggrid a.sel {
|
||||||
color: #fff;
|
color: #fff;
|
||||||
background: #925;
|
background: #925;
|
||||||
border-color: #c37;
|
border-color: #c37;
|
||||||
}
|
}
|
||||||
|
#files tbody tr.sel:hover td,
|
||||||
|
#ggrid a.sel:hover,
|
||||||
|
html.light #ggrid a.sel:hover {
|
||||||
|
color: #fff;
|
||||||
|
background: #d39;
|
||||||
|
border-color: #d48;
|
||||||
|
text-shadow: 1px 1px 0 #804;
|
||||||
|
}
|
||||||
|
#ggrid a.sel,
|
||||||
|
html.light #ggrid a.sel {
|
||||||
|
border-top: 1px solid #d48;
|
||||||
|
box-shadow: 0 .1em 1.2em #b36;
|
||||||
|
transition: all 0.2s cubic-bezier(.2, 2.2, .5, 1); /* https://cubic-bezier.com/#.4,2,.7,1 */
|
||||||
|
}
|
||||||
|
#ggrid a.sel img {
|
||||||
|
opacity: .7;
|
||||||
|
box-shadow: 0 0 1em #b36;
|
||||||
|
filter: contrast(130%) brightness(107%);
|
||||||
|
}
|
||||||
#files tr.sel a {
|
#files tr.sel a {
|
||||||
color: #fff;
|
color: #fff;
|
||||||
}
|
}
|
||||||
@@ -252,7 +281,10 @@ a, #files tbody div a:last-child {
|
|||||||
background: #3c3c3c;
|
background: #3c3c3c;
|
||||||
}
|
}
|
||||||
#wtico {
|
#wtico {
|
||||||
cursor: url(/.cpr/dd/1.png), pointer;
|
cursor: url(/.cpr/dd/4.png), pointer;
|
||||||
|
animation: cursor 500ms;
|
||||||
|
}
|
||||||
|
#wtico:hover {
|
||||||
animation: cursor 500ms infinite;
|
animation: cursor 500ms infinite;
|
||||||
}
|
}
|
||||||
@keyframes cursor {
|
@keyframes cursor {
|
||||||
@@ -260,7 +292,7 @@ a, #files tbody div a:last-child {
|
|||||||
30% {cursor: url(/.cpr/dd/3.png), pointer}
|
30% {cursor: url(/.cpr/dd/3.png), pointer}
|
||||||
50% {cursor: url(/.cpr/dd/4.png), pointer}
|
50% {cursor: url(/.cpr/dd/4.png), pointer}
|
||||||
75% {cursor: url(/.cpr/dd/5.png), pointer}
|
75% {cursor: url(/.cpr/dd/5.png), pointer}
|
||||||
85% {cursor: url(/.cpr/dd/1.png), pointer}
|
85% {cursor: url(/.cpr/dd/4.png), pointer}
|
||||||
}
|
}
|
||||||
@keyframes spin {
|
@keyframes spin {
|
||||||
100% {transform: rotate(360deg)}
|
100% {transform: rotate(360deg)}
|
||||||
@@ -281,29 +313,48 @@ a, #files tbody div a:last-child {
|
|||||||
padding: .2em 0 0 .07em;
|
padding: .2em 0 0 .07em;
|
||||||
color: #fff;
|
color: #fff;
|
||||||
}
|
}
|
||||||
#wzip {
|
#wzip, #wnp {
|
||||||
display: none;
|
display: none;
|
||||||
margin-right: .3em;
|
margin-right: .3em;
|
||||||
padding-right: .3em;
|
padding-right: .3em;
|
||||||
border-right: .1em solid #555;
|
border-right: .1em solid #555;
|
||||||
}
|
}
|
||||||
|
#wnp a {
|
||||||
|
position: relative;
|
||||||
|
font-size: .47em;
|
||||||
|
margin: 0 .1em;
|
||||||
|
top: -.4em;
|
||||||
|
}
|
||||||
|
#wnp a+a {
|
||||||
|
margin-left: .33em;
|
||||||
|
}
|
||||||
#wtoggle,
|
#wtoggle,
|
||||||
#wtoggle * {
|
#wtoggle * {
|
||||||
line-height: 1em;
|
line-height: 1em;
|
||||||
}
|
}
|
||||||
|
#wtoggle.np {
|
||||||
|
width: 5.5em;
|
||||||
|
}
|
||||||
#wtoggle.sel {
|
#wtoggle.sel {
|
||||||
width: 6.4em;
|
width: 6.4em;
|
||||||
}
|
}
|
||||||
#wtoggle.sel #wzip {
|
#wtoggle.sel #wzip,
|
||||||
|
#wtoggle.np #wnp {
|
||||||
display: inline-block;
|
display: inline-block;
|
||||||
}
|
}
|
||||||
#wtoggle.sel #wzip a {
|
#wtoggle.sel.np #wnp {
|
||||||
|
display: none;
|
||||||
|
}
|
||||||
|
#wzip a {
|
||||||
font-size: .4em;
|
font-size: .4em;
|
||||||
padding: 0 .3em;
|
padding: 0 .3em;
|
||||||
margin: -.3em .2em;
|
margin: -.3em .2em;
|
||||||
position: relative;
|
position: relative;
|
||||||
display: inline-block;
|
display: inline-block;
|
||||||
}
|
}
|
||||||
|
#wzip a+a {
|
||||||
|
margin-left: .8em;
|
||||||
|
}
|
||||||
#wtoggle.sel #wzip #selzip {
|
#wtoggle.sel #wzip #selzip {
|
||||||
top: -.6em;
|
top: -.6em;
|
||||||
padding: .4em .3em;
|
padding: .4em .3em;
|
||||||
@@ -418,6 +469,7 @@ a, #files tbody div a:last-child {
|
|||||||
padding: .3em .6em;
|
padding: .3em .6em;
|
||||||
border-radius: .3em;
|
border-radius: .3em;
|
||||||
border-width: .15em 0;
|
border-width: .15em 0;
|
||||||
|
white-space: nowrap;
|
||||||
}
|
}
|
||||||
.opbox {
|
.opbox {
|
||||||
background: #2d2d2d;
|
background: #2d2d2d;
|
||||||
@@ -507,7 +559,6 @@ input[type="checkbox"]:checked+label {
|
|||||||
left: 0;
|
left: 0;
|
||||||
bottom: 0;
|
bottom: 0;
|
||||||
top: 7em;
|
top: 7em;
|
||||||
padding-top: .2em;
|
|
||||||
overflow-y: auto;
|
overflow-y: auto;
|
||||||
-ms-scroll-chaining: none;
|
-ms-scroll-chaining: none;
|
||||||
overscroll-behavior-y: none;
|
overscroll-behavior-y: none;
|
||||||
@@ -531,8 +582,7 @@ input[type="checkbox"]:checked+label {
|
|||||||
left: -1.7em;
|
left: -1.7em;
|
||||||
width: calc(100% + 1.3em);
|
width: calc(100% + 1.3em);
|
||||||
}
|
}
|
||||||
.tglbtn,
|
.btn {
|
||||||
#tree>a+a {
|
|
||||||
padding: .2em .4em;
|
padding: .2em .4em;
|
||||||
font-size: 1.2em;
|
font-size: 1.2em;
|
||||||
background: #2a2a2a;
|
background: #2a2a2a;
|
||||||
@@ -542,12 +592,10 @@ input[type="checkbox"]:checked+label {
|
|||||||
position: relative;
|
position: relative;
|
||||||
top: -.2em;
|
top: -.2em;
|
||||||
}
|
}
|
||||||
.tglbtn:hover,
|
.btn:hover {
|
||||||
#tree>a+a:hover {
|
|
||||||
background: #805;
|
background: #805;
|
||||||
}
|
}
|
||||||
.tglbtn.on,
|
.tgl.btn.on {
|
||||||
#tree>a+a.on {
|
|
||||||
background: #fc4;
|
background: #fc4;
|
||||||
color: #400;
|
color: #400;
|
||||||
text-shadow: none;
|
text-shadow: none;
|
||||||
@@ -555,6 +603,7 @@ input[type="checkbox"]:checked+label {
|
|||||||
#detree {
|
#detree {
|
||||||
padding: .3em .5em;
|
padding: .3em .5em;
|
||||||
font-size: 1.5em;
|
font-size: 1.5em;
|
||||||
|
line-height: 1.5em;
|
||||||
}
|
}
|
||||||
#tree ul,
|
#tree ul,
|
||||||
#tree li {
|
#tree li {
|
||||||
@@ -691,6 +740,69 @@ input[type="checkbox"]:checked+label {
|
|||||||
font-family: monospace, monospace;
|
font-family: monospace, monospace;
|
||||||
line-height: 2em;
|
line-height: 2em;
|
||||||
}
|
}
|
||||||
|
#griden.on+#thumbs {
|
||||||
|
opacity: .3;
|
||||||
|
}
|
||||||
|
#ghead {
|
||||||
|
background: #3c3c3c;
|
||||||
|
border: 1px solid #444;
|
||||||
|
border-radius: .3em;
|
||||||
|
padding: .5em;
|
||||||
|
margin: 0 1.5em 1em .4em;
|
||||||
|
position: sticky;
|
||||||
|
top: -.3em;
|
||||||
|
}
|
||||||
|
html.light #ghead {
|
||||||
|
background: #f7f7f7;
|
||||||
|
border-color: #ddd;
|
||||||
|
}
|
||||||
|
#ghead .btn {
|
||||||
|
position: relative;
|
||||||
|
top: 0;
|
||||||
|
}
|
||||||
|
#ggrid {
|
||||||
|
padding-top: .5em;
|
||||||
|
}
|
||||||
|
#ggrid a {
|
||||||
|
display: inline-block;
|
||||||
|
width: var(--grid-sz);
|
||||||
|
vertical-align: top;
|
||||||
|
overflow-wrap: break-word;
|
||||||
|
background: #383838;
|
||||||
|
border: 1px solid #444;
|
||||||
|
border-top: 1px solid #555;
|
||||||
|
box-shadow: 0 .1em .2em #222;
|
||||||
|
border-radius: .3em;
|
||||||
|
padding: .3em;
|
||||||
|
margin: .5em;
|
||||||
|
}
|
||||||
|
#ggrid a img {
|
||||||
|
border-radius: .2em;
|
||||||
|
max-width: var(--grid-sz);
|
||||||
|
max-height: calc(var(--grid-sz)/1.25);
|
||||||
|
margin: 0 auto;
|
||||||
|
display: block;
|
||||||
|
}
|
||||||
|
#ggrid a span {
|
||||||
|
padding: .2em .3em;
|
||||||
|
display: block;
|
||||||
|
}
|
||||||
|
#ggrid a:hover {
|
||||||
|
background: #444;
|
||||||
|
border-color: #555;
|
||||||
|
color: #fd9;
|
||||||
|
}
|
||||||
|
html.light #ggrid a {
|
||||||
|
background: #f7f7f7;
|
||||||
|
border-color: #ddd;
|
||||||
|
box-shadow: 0 .1em .2em #ddd;
|
||||||
|
}
|
||||||
|
html.light #ggrid a:hover {
|
||||||
|
background: #fff;
|
||||||
|
border-color: #ccc;
|
||||||
|
color: #015;
|
||||||
|
box-shadow: 0 .1em .5em #aaa;
|
||||||
|
}
|
||||||
#pvol,
|
#pvol,
|
||||||
#barbuf,
|
#barbuf,
|
||||||
#barpos,
|
#barpos,
|
||||||
@@ -705,6 +817,21 @@ input[type="checkbox"]:checked+label {
|
|||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
html.light {
|
html.light {
|
||||||
color: #333;
|
color: #333;
|
||||||
background: #eee;
|
background: #eee;
|
||||||
@@ -726,18 +853,15 @@ html.light #ops a.act {
|
|||||||
html.light #op_cfg h3 {
|
html.light #op_cfg h3 {
|
||||||
border-color: #ccc;
|
border-color: #ccc;
|
||||||
}
|
}
|
||||||
html.light .tglbtn,
|
html.light .btn {
|
||||||
html.light #tree > a + a {
|
|
||||||
color: #666;
|
color: #666;
|
||||||
background: #ddd;
|
background: #ddd;
|
||||||
box-shadow: none;
|
box-shadow: none;
|
||||||
}
|
}
|
||||||
html.light .tglbtn:hover,
|
html.light .btn:hover {
|
||||||
html.light #tree > a + a:hover {
|
|
||||||
background: #caf;
|
background: #caf;
|
||||||
}
|
}
|
||||||
html.light .tglbtn.on,
|
html.light .tgl.btn.on {
|
||||||
html.light #tree > a + a.on {
|
|
||||||
background: #4a0;
|
background: #4a0;
|
||||||
color: #fff;
|
color: #fff;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -21,7 +21,7 @@
|
|||||||
{%- endif %}
|
{%- endif %}
|
||||||
<a href="#" data-perm="write" data-dest="bup" data-desc="bup: basic uploader, even supports netscape 4.0">🎈</a>
|
<a href="#" data-perm="write" data-dest="bup" data-desc="bup: basic uploader, even supports netscape 4.0">🎈</a>
|
||||||
<a href="#" data-perm="write" data-dest="mkdir" data-desc="mkdir: create a new directory">📂</a>
|
<a href="#" data-perm="write" data-dest="mkdir" data-desc="mkdir: create a new directory">📂</a>
|
||||||
<a href="#" data-perm="write" data-dest="new_md" data-desc="new-md: create a new markdown document">📝</a>
|
<a href="#" data-perm="read write" data-dest="new_md" data-desc="new-md: create a new markdown document">📝</a>
|
||||||
<a href="#" data-perm="write" data-dest="msg" data-desc="msg: send a message to the server log">📟</a>
|
<a href="#" data-perm="write" data-dest="msg" data-desc="msg: send a message to the server log">📟</a>
|
||||||
<a href="#" data-dest="cfg" data-desc="configuration options">⚙️</a>
|
<a href="#" data-dest="cfg" data-desc="configuration options">⚙️</a>
|
||||||
<div id="opdesc"></div>
|
<div id="opdesc"></div>
|
||||||
@@ -41,8 +41,10 @@
|
|||||||
<div id="op_cfg" class="opview opbox">
|
<div id="op_cfg" class="opview opbox">
|
||||||
<h3>switches</h3>
|
<h3>switches</h3>
|
||||||
<div>
|
<div>
|
||||||
<a id="tooltips" class="tglbtn" href="#">tooltips</a>
|
<a id="tooltips" class="tgl btn" href="#">tooltips</a>
|
||||||
<a id="lightmode" class="tglbtn" href="#">lightmode</a>
|
<a id="lightmode" class="tgl btn" href="#">lightmode</a>
|
||||||
|
<a id="griden" class="tgl btn" href="#">the grid</a>
|
||||||
|
<a id="thumbs" class="tgl btn" href="#">thumbs</a>
|
||||||
</div>
|
</div>
|
||||||
{%- if have_zip %}
|
{%- if have_zip %}
|
||||||
<h3>folder download</h3>
|
<h3>folder download</h3>
|
||||||
@@ -61,9 +63,9 @@
|
|||||||
|
|
||||||
<div id="tree">
|
<div id="tree">
|
||||||
<a href="#" id="detree">🍞...</a>
|
<a href="#" id="detree">🍞...</a>
|
||||||
<a href="#" step="2" id="twobytwo">+</a>
|
<a href="#" class="btn" step="2" id="twobytwo">+</a>
|
||||||
<a href="#" step="-2" id="twig">–</a>
|
<a href="#" class="btn" step="-2" id="twig">–</a>
|
||||||
<a href="#" class="tglbtn" id="dyntree">a</a>
|
<a href="#" class="tgl btn" id="dyntree">a</a>
|
||||||
<ul id="treeul"></ul>
|
<ul id="treeul"></ul>
|
||||||
<div id="thx_ff"> </div>
|
<div id="thx_ff"> </div>
|
||||||
</div>
|
</div>
|
||||||
@@ -114,22 +116,7 @@
|
|||||||
<div id="srv_info"><span>{{ srv_info }}</span></div>
|
<div id="srv_info"><span>{{ srv_info }}</span></div>
|
||||||
{%- endif %}
|
{%- endif %}
|
||||||
|
|
||||||
<div id="widget">
|
<div id="widget"></div>
|
||||||
<div id="wtoggle">
|
|
||||||
<span id="wzip">
|
|
||||||
<a href="#" id="selall">sel.<br />all</a>
|
|
||||||
<a href="#" id="selinv">sel.<br />inv.</a>
|
|
||||||
<a href="#" id="selzip">zip</a>
|
|
||||||
</span><a
|
|
||||||
href="#" id="wtico">♫</a>
|
|
||||||
</div>
|
|
||||||
<div id="widgeti">
|
|
||||||
<div id="pctl"><a href="#" id="bprev">⏮</a><a href="#" id="bplay">▶</a><a href="#" id="bnext">⏭</a></div>
|
|
||||||
<canvas id="pvol" width="288" height="38"></canvas>
|
|
||||||
<canvas id="barpos"></canvas>
|
|
||||||
<canvas id="barbuf"></canvas>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<script>
|
<script>
|
||||||
var tag_order_cfg = {{ tag_order }};
|
var tag_order_cfg = {{ tag_order }};
|
||||||
|
|||||||
@@ -7,6 +7,41 @@ function dbg(msg) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
// add widget buttons
|
||||||
|
ebi('widget').innerHTML = (
|
||||||
|
'<div id="wtoggle">' +
|
||||||
|
'<span id="wzip"><a' +
|
||||||
|
' href="#" id="selall">sel.<br />all</a><a' +
|
||||||
|
' href="#" id="selinv">sel.<br />inv.</a><a' +
|
||||||
|
' href="#" id="selzip">zip</a>' +
|
||||||
|
'</span><span id="wnp"><a' +
|
||||||
|
' href="#" id="npirc">📋irc</a><a' +
|
||||||
|
' href="#" id="nptxt">📋txt</a>' +
|
||||||
|
'</span><a' +
|
||||||
|
' href="#" id="wtico">♫</a>' +
|
||||||
|
'</div>' +
|
||||||
|
'<div id="widgeti">' +
|
||||||
|
' <div id="pctl"><a href="#" id="bprev">⏮</a><a href="#" id="bplay">▶</a><a href="#" id="bnext">⏭</a></div>' +
|
||||||
|
' <canvas id="pvol" width="288" height="38"></canvas>' +
|
||||||
|
' <canvas id="barpos"></canvas>' +
|
||||||
|
' <canvas id="barbuf"></canvas>' +
|
||||||
|
'</div>'
|
||||||
|
);
|
||||||
|
|
||||||
|
|
||||||
|
var have_webp = null;
|
||||||
|
(function () {
|
||||||
|
var img = new Image();
|
||||||
|
img.onload = function () {
|
||||||
|
have_webp = img.width > 0 && img.height > 0;
|
||||||
|
};
|
||||||
|
img.onerror = function () {
|
||||||
|
have_webp = false;
|
||||||
|
};
|
||||||
|
img.src = "data:image/webp;base64,UklGRiIAAABXRUJQVlA4IBYAAAAwAQCdASoBAAEADsD+JaQAA3AAAAAA";
|
||||||
|
})();
|
||||||
|
|
||||||
|
|
||||||
// extract songs + add play column
|
// extract songs + add play column
|
||||||
function MPlayer() {
|
function MPlayer() {
|
||||||
this.id = Date.now();
|
this.id = Date.now();
|
||||||
@@ -79,6 +114,8 @@ var widget = (function () {
|
|||||||
var ret = {},
|
var ret = {},
|
||||||
widget = ebi('widget'),
|
widget = ebi('widget'),
|
||||||
wtico = ebi('wtico'),
|
wtico = ebi('wtico'),
|
||||||
|
nptxt = ebi('nptxt'),
|
||||||
|
npirc = ebi('npirc'),
|
||||||
touchmode = false,
|
touchmode = false,
|
||||||
side_open = false,
|
side_open = false,
|
||||||
was_paused = true;
|
was_paused = true;
|
||||||
@@ -116,6 +153,35 @@ var widget = (function () {
|
|||||||
|
|
||||||
return false;
|
return false;
|
||||||
};
|
};
|
||||||
|
npirc.onclick = nptxt.onclick = function (e) {
|
||||||
|
ev(e);
|
||||||
|
var th = ebi('files').tHead.rows[0].cells,
|
||||||
|
tr = QS('#files tr.play').cells,
|
||||||
|
irc = this.getAttribute('id') == 'npirc',
|
||||||
|
ck = irc ? '06' : '',
|
||||||
|
cv = irc ? '07' : '',
|
||||||
|
m = ck + 'np: ';
|
||||||
|
|
||||||
|
for (var a = 1, aa = th.length; a < aa; a++) {
|
||||||
|
var tk = a == 1 ? '' : th[a].getAttribute('name').split('/').slice(-1)[0];
|
||||||
|
var tv = tr[a].getAttribute('html') || tr[a].textContent;
|
||||||
|
m += tk + '(' + cv + tv + ck + ') // ';
|
||||||
|
}
|
||||||
|
|
||||||
|
m += '[' + cv + s2ms(mp.au.currentTime) + ck + '/' + cv + s2ms(mp.au.duration) + ck + ']';
|
||||||
|
|
||||||
|
var o = document.createElement('input');
|
||||||
|
o.style.cssText = 'position:fixed;top:45%;left:48%;padding:1em;z-index:9';
|
||||||
|
o.value = m;
|
||||||
|
document.body.appendChild(o);
|
||||||
|
o.focus();
|
||||||
|
o.select();
|
||||||
|
document.execCommand("copy");
|
||||||
|
o.value = 'copied to clipboard ';
|
||||||
|
setTimeout(function () {
|
||||||
|
document.body.removeChild(o);
|
||||||
|
}, 500);
|
||||||
|
};
|
||||||
return ret;
|
return ret;
|
||||||
})();
|
})();
|
||||||
|
|
||||||
@@ -189,8 +255,8 @@ var pbar = (function () {
|
|||||||
};
|
};
|
||||||
|
|
||||||
r.drawpos = function () {
|
r.drawpos = function () {
|
||||||
if (!mp.au)
|
if (!mp.au || isNaN(mp.au.duration) || isNaN(mp.au.currentTime))
|
||||||
return;
|
return; // not-init || unsupp-codec
|
||||||
|
|
||||||
var bc = r.buf,
|
var bc = r.buf,
|
||||||
pc = r.pos,
|
pc = r.pos,
|
||||||
@@ -513,6 +579,7 @@ function play(tid, seek, call_depth) {
|
|||||||
clmod(trs[a], 'play');
|
clmod(trs[a], 'play');
|
||||||
}
|
}
|
||||||
ebi(oid).parentElement.parentElement.className += ' play';
|
ebi(oid).parentElement.parentElement.className += ' play';
|
||||||
|
clmod(ebi('wtoggle'), 'np', 1);
|
||||||
|
|
||||||
try {
|
try {
|
||||||
if (attempt_play)
|
if (attempt_play)
|
||||||
@@ -642,6 +709,183 @@ function autoplay_blocked(seek) {
|
|||||||
})();
|
})();
|
||||||
|
|
||||||
|
|
||||||
|
var thegrid = (function () {
|
||||||
|
var lfiles = ebi('files');
|
||||||
|
var gfiles = document.createElement('div');
|
||||||
|
gfiles.setAttribute('id', 'gfiles');
|
||||||
|
gfiles.style.display = 'none';
|
||||||
|
gfiles.innerHTML = (
|
||||||
|
'<div id="ghead">' +
|
||||||
|
'<a href="#" class="tgl btn" id="gridsel">multiselect</a> zoom ' +
|
||||||
|
'<a href="#" class="btn" z="-1.2">–</a> ' +
|
||||||
|
'<a href="#" class="btn" z="1.2">+</a> sort by: ' +
|
||||||
|
'<a href="#" s="href">name</a>, ' +
|
||||||
|
'<a href="#" s="sz">size</a>, ' +
|
||||||
|
'<a href="#" s="ts">date</a>, ' +
|
||||||
|
'<a href="#" s="ext">type</a>' +
|
||||||
|
'</div>' +
|
||||||
|
'<div id="ggrid"></div>'
|
||||||
|
);
|
||||||
|
lfiles.parentNode.insertBefore(gfiles, lfiles);
|
||||||
|
|
||||||
|
var r = {
|
||||||
|
'thumbs': bcfg_get('thumbs', true),
|
||||||
|
'en': bcfg_get('griden', false),
|
||||||
|
'sel': bcfg_get('gridsel', false),
|
||||||
|
'sz': fcfg_get('gridsz', 10),
|
||||||
|
'isdirty': true
|
||||||
|
};
|
||||||
|
|
||||||
|
ebi('thumbs').onclick = function (e) {
|
||||||
|
ev(e);
|
||||||
|
r.thumbs = !r.thumbs;
|
||||||
|
bcfg_set('thumbs', r.thumbs);
|
||||||
|
if (r.en) {
|
||||||
|
loadgrid();
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
ebi('griden').onclick = function (e) {
|
||||||
|
ev(e);
|
||||||
|
r.en = !r.en;
|
||||||
|
bcfg_set('griden', r.en);
|
||||||
|
if (r.en) {
|
||||||
|
loadgrid();
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
lfiles.style.display = '';
|
||||||
|
gfiles.style.display = 'none';
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
var btnclick = function (e) {
|
||||||
|
ev(e);
|
||||||
|
var s = this.getAttribute('s'),
|
||||||
|
z = this.getAttribute('z');
|
||||||
|
|
||||||
|
if (z)
|
||||||
|
return setsz(z > 0 ? r.sz * z : r.sz / (-z));
|
||||||
|
|
||||||
|
var t = lfiles.tHead.rows[0].cells;
|
||||||
|
for (var a = 0; a < t.length; a++)
|
||||||
|
if (t[a].getAttribute('name') == s) {
|
||||||
|
t[a].click();
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
r.setdirty();
|
||||||
|
};
|
||||||
|
|
||||||
|
var links = QSA('#ghead>a');
|
||||||
|
for (var a = 0; a < links.length; a++)
|
||||||
|
links[a].onclick = btnclick;
|
||||||
|
|
||||||
|
ebi('gridsel').onclick = function (e) {
|
||||||
|
ev(e);
|
||||||
|
r.sel = !r.sel;
|
||||||
|
bcfg_set('gridsel', r.sel);
|
||||||
|
r.loadsel();
|
||||||
|
};
|
||||||
|
|
||||||
|
r.setvis = function (vis) {
|
||||||
|
(r.en ? gfiles : lfiles).style.display = vis ? '' : 'none';
|
||||||
|
}
|
||||||
|
|
||||||
|
r.setdirty = function () {
|
||||||
|
r.dirty = true;
|
||||||
|
if (r.en) {
|
||||||
|
loadgrid();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function setsz(v) {
|
||||||
|
if (v !== undefined) {
|
||||||
|
r.sz = v;
|
||||||
|
swrite('gridsz', r.sz);
|
||||||
|
}
|
||||||
|
document.documentElement.style.setProperty('--grid-sz', r.sz + 'em');
|
||||||
|
}
|
||||||
|
setsz();
|
||||||
|
|
||||||
|
function seltgl(e) {
|
||||||
|
if (e && e.ctrlKey)
|
||||||
|
return true;
|
||||||
|
|
||||||
|
ev(e);
|
||||||
|
var oth = ebi(this.getAttribute('ref')),
|
||||||
|
td = oth.parentNode.nextSibling,
|
||||||
|
tr = td.parentNode;
|
||||||
|
|
||||||
|
td.click();
|
||||||
|
this.setAttribute('class', tr.getAttribute('class'));
|
||||||
|
}
|
||||||
|
|
||||||
|
r.loadsel = function () {
|
||||||
|
var ths = QSA('#ggrid>a');
|
||||||
|
for (var a = 0, aa = ths.length; a < aa; a++) {
|
||||||
|
ths[a].onclick = r.sel ? seltgl : null;
|
||||||
|
ths[a].setAttribute('class', ebi(ths[a].getAttribute('ref')).parentNode.parentNode.getAttribute('class'));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function loadgrid() {
|
||||||
|
if (have_webp === null)
|
||||||
|
return setTimeout(loadgrid, 50);
|
||||||
|
|
||||||
|
if (!r.dirty)
|
||||||
|
return r.loadsel();
|
||||||
|
|
||||||
|
var html = [];
|
||||||
|
var tr = lfiles.tBodies[0].rows;
|
||||||
|
for (var a = 0; a < tr.length; a++) {
|
||||||
|
var ao = tr[a].cells[1].firstChild,
|
||||||
|
href = esc(ao.getAttribute('href')),
|
||||||
|
ref = ao.getAttribute('id'),
|
||||||
|
isdir = href.split('?')[0].slice(-1)[0] == '/',
|
||||||
|
ihref = href;
|
||||||
|
|
||||||
|
if (isdir) {
|
||||||
|
ihref = '/.cpr/ico/folder'
|
||||||
|
}
|
||||||
|
else if (r.thumbs) {
|
||||||
|
ihref += (ihref.indexOf('?') === -1 ? '?' : '&') + 'th=' + (have_webp ? 'w' : 'j');
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
var ar = href.split('?')[0].split('.');
|
||||||
|
if (ar.length > 1)
|
||||||
|
ar = ar.slice(1);
|
||||||
|
|
||||||
|
ihref = '';
|
||||||
|
ar.reverse();
|
||||||
|
for (var b = 0; b < ar.length; b++) {
|
||||||
|
if (ar[b].length > 7)
|
||||||
|
break;
|
||||||
|
|
||||||
|
ihref = ar[b] + '.' + ihref;
|
||||||
|
}
|
||||||
|
if (!ihref) {
|
||||||
|
ihref = 'unk.';
|
||||||
|
}
|
||||||
|
ihref = '/.cpr/ico/' + ihref.slice(0, -1);
|
||||||
|
}
|
||||||
|
|
||||||
|
html.push('<a href="' + href + '" ref="' + ref + '"><img src="' +
|
||||||
|
ihref + '" /><span>' + ao.innerHTML + '</span></a>');
|
||||||
|
}
|
||||||
|
lfiles.style.display = 'none';
|
||||||
|
gfiles.style.display = 'block';
|
||||||
|
ebi('ggrid').innerHTML = html.join('\n');
|
||||||
|
r.loadsel();
|
||||||
|
}
|
||||||
|
|
||||||
|
if (r.en) {
|
||||||
|
loadgrid();
|
||||||
|
}
|
||||||
|
|
||||||
|
return r;
|
||||||
|
})();
|
||||||
|
|
||||||
|
|
||||||
function tree_neigh(n) {
|
function tree_neigh(n) {
|
||||||
var links = QSA('#treeul li>a+a');
|
var links = QSA('#treeul li>a+a');
|
||||||
if (!links.length) {
|
if (!links.length) {
|
||||||
@@ -709,6 +953,23 @@ document.onkeydown = function (e) {
|
|||||||
|
|
||||||
if (k == 'KeyP')
|
if (k == 'KeyP')
|
||||||
return tree_up();
|
return tree_up();
|
||||||
|
|
||||||
|
if (k == 'KeyG')
|
||||||
|
return ebi('griden').click();
|
||||||
|
|
||||||
|
if (k == 'KeyT')
|
||||||
|
return ebi('thumbs').click();
|
||||||
|
|
||||||
|
if (window['thegrid'] && thegrid.en) {
|
||||||
|
if (k == 'KeyS')
|
||||||
|
return ebi('gridsel').click();
|
||||||
|
|
||||||
|
if (k == 'KeyA')
|
||||||
|
return QSA('#ghead>a[z]')[0].click();
|
||||||
|
|
||||||
|
if (k == 'KeyD')
|
||||||
|
return QSA('#ghead>a[z]')[1].click();
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
||||||
@@ -864,7 +1125,7 @@ document.onkeydown = function (e) {
|
|||||||
sz = esc(r.sz + ''),
|
sz = esc(r.sz + ''),
|
||||||
rp = esc(r.rp + ''),
|
rp = esc(r.rp + ''),
|
||||||
ext = rp.lastIndexOf('.') > 0 ? rp.split('.').slice(-1)[0] : '%',
|
ext = rp.lastIndexOf('.') > 0 ? rp.split('.').slice(-1)[0] : '%',
|
||||||
links = linksplit(rp);
|
links = linksplit(r.rp + '');
|
||||||
|
|
||||||
if (ext.length > 8)
|
if (ext.length > 8)
|
||||||
ext = '%';
|
ext = '%';
|
||||||
@@ -928,7 +1189,6 @@ var treectl = (function () {
|
|||||||
treesz = icfg_get('treesz', 16);
|
treesz = icfg_get('treesz', 16);
|
||||||
|
|
||||||
treesz = Math.min(Math.max(treesz, 4), 50);
|
treesz = Math.min(Math.max(treesz, 4), 50);
|
||||||
console.log('treesz [' + treesz + ']');
|
|
||||||
|
|
||||||
function entree(e) {
|
function entree(e) {
|
||||||
ev(e);
|
ev(e);
|
||||||
@@ -987,7 +1247,7 @@ var treectl = (function () {
|
|||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
var top = Math.max(0, parseInt(wrap.offsetTop)),
|
var top = Math.max(0, parseInt(wrap.offsetTop)),
|
||||||
treeh = (winh - atop) - 4;
|
treeh = winh - atop;
|
||||||
|
|
||||||
tree.style.top = top + 'px';
|
tree.style.top = top + 'px';
|
||||||
tree.style.height = treeh < 10 ? '' : treeh + 'px';
|
tree.style.height = treeh < 10 ? '' : treeh + 'px';
|
||||||
@@ -1173,8 +1433,10 @@ var treectl = (function () {
|
|||||||
nodes = sortfiles(nodes);
|
nodes = sortfiles(nodes);
|
||||||
for (var a = 0; a < nodes.length; a++) {
|
for (var a = 0; a < nodes.length; a++) {
|
||||||
var r = nodes[a],
|
var r = nodes[a],
|
||||||
ln = ['<tr><td>' + r.lead + '</td><td><a href="' +
|
hname = esc(uricom_dec(r.href)[0]),
|
||||||
top + r.href + '">' + esc(uricom_dec(r.href)[0]) + '</a>', r.sz];
|
sortv = (r.href.slice(-1) == '/' ? '\t' : '') + hname,
|
||||||
|
ln = ['<tr><td>' + r.lead + '</td><td sortv="' + sortv +
|
||||||
|
'"><a href="' + top + r.href + '">' + hname + '</a>', r.sz];
|
||||||
|
|
||||||
for (var b = 0; b < res.taglist.length; b++) {
|
for (var b = 0; b < res.taglist.length; b++) {
|
||||||
var k = res.taglist[b],
|
var k = res.taglist[b],
|
||||||
@@ -1203,6 +1465,8 @@ var treectl = (function () {
|
|||||||
ebi('pro').innerHTML = res.logues ? res.logues[0] || "" : "";
|
ebi('pro').innerHTML = res.logues ? res.logues[0] || "" : "";
|
||||||
ebi('epi').innerHTML = res.logues ? res.logues[1] || "" : "";
|
ebi('epi').innerHTML = res.logues ? res.logues[1] || "" : "";
|
||||||
|
|
||||||
|
document.title = '⇆🎉 ' + uricom_dec(document.location.pathname.slice(1, -1))[0];
|
||||||
|
|
||||||
filecols.set_style();
|
filecols.set_style();
|
||||||
mukey.render();
|
mukey.render();
|
||||||
msel.render();
|
msel.render();
|
||||||
@@ -1306,22 +1570,21 @@ function despin(sel) {
|
|||||||
function apply_perms(perms) {
|
function apply_perms(perms) {
|
||||||
perms = perms || [];
|
perms = perms || [];
|
||||||
|
|
||||||
var o = QSA('#ops>a[data-perm]');
|
var o = QSA('#ops>a[data-perm], #u2footfoot');
|
||||||
for (var a = 0; a < o.length; a++)
|
for (var a = 0; a < o.length; a++) {
|
||||||
o[a].style.display = 'none';
|
var display = 'inline';
|
||||||
|
var needed = o[a].getAttribute('data-perm').split(' ');
|
||||||
for (var a = 0; a < perms.length; a++) {
|
for (var b = 0; b < needed.length; b++) {
|
||||||
o = QSA('#ops>a[data-perm="' + perms[a] + '"]');
|
if (!has(perms, needed[b])) {
|
||||||
for (var b = 0; b < o.length; b++)
|
display = 'none';
|
||||||
o[b].style.display = 'inline';
|
}
|
||||||
|
}
|
||||||
|
o[a].style.display = display;
|
||||||
}
|
}
|
||||||
|
|
||||||
var act = QS('#ops>a.act');
|
var act = QS('#ops>a.act');
|
||||||
if (act) {
|
if (act && act.style.display === 'none')
|
||||||
var areq = act.getAttribute('data-perm');
|
goto();
|
||||||
if (areq && !has(perms, areq))
|
|
||||||
goto();
|
|
||||||
}
|
|
||||||
|
|
||||||
document.body.setAttribute('perms', perms.join(' '));
|
document.body.setAttribute('perms', perms.join(' '));
|
||||||
|
|
||||||
@@ -1339,7 +1602,7 @@ function apply_perms(perms) {
|
|||||||
up2k.set_fsearch();
|
up2k.set_fsearch();
|
||||||
|
|
||||||
ebi('widget').style.display = have_read ? '' : 'none';
|
ebi('widget').style.display = have_read ? '' : 'none';
|
||||||
ebi('files').style.display = have_read ? '' : 'none';
|
thegrid.setvis(have_read);
|
||||||
if (!have_read)
|
if (!have_read)
|
||||||
goto('up2k');
|
goto('up2k');
|
||||||
}
|
}
|
||||||
@@ -1732,6 +1995,8 @@ var msel = (function () {
|
|||||||
}
|
}
|
||||||
function selui() {
|
function selui() {
|
||||||
clmod(ebi('wtoggle'), 'sel', getsel().length);
|
clmod(ebi('wtoggle'), 'sel', getsel().length);
|
||||||
|
if (window['thegrid'])
|
||||||
|
thegrid.loadsel();
|
||||||
}
|
}
|
||||||
function seltgl(e) {
|
function seltgl(e) {
|
||||||
ev(e);
|
ev(e);
|
||||||
@@ -1790,6 +2055,23 @@ var msel = (function () {
|
|||||||
})();
|
})();
|
||||||
|
|
||||||
|
|
||||||
|
(function () {
|
||||||
|
try {
|
||||||
|
var tr = ebi('files').tBodies[0].rows;
|
||||||
|
for (var a = 0; a < tr.length; a++) {
|
||||||
|
var td = tr[a].cells[1],
|
||||||
|
ao = td.firstChild,
|
||||||
|
href = ao.getAttribute('href'),
|
||||||
|
isdir = href.split('?')[0].slice(-1)[0] == '/',
|
||||||
|
txt = ao.textContent;
|
||||||
|
|
||||||
|
td.setAttribute('sortv', (isdir ? '\t' : '') + txt);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
catch (ex) { }
|
||||||
|
})();
|
||||||
|
|
||||||
|
|
||||||
function ev_row_tgl(e) {
|
function ev_row_tgl(e) {
|
||||||
ev(e);
|
ev(e);
|
||||||
filecols.toggle(this.parentElement.parentElement.getElementsByTagName('span')[0].textContent);
|
filecols.toggle(this.parentElement.parentElement.getElementsByTagName('span')[0].textContent);
|
||||||
@@ -1840,6 +2122,8 @@ function reload_browser(not_mp) {
|
|||||||
|
|
||||||
if (window['up2k'])
|
if (window['up2k'])
|
||||||
up2k.set_fsearch();
|
up2k.set_fsearch();
|
||||||
|
|
||||||
|
thegrid.setdirty();
|
||||||
}
|
}
|
||||||
reload_browser(true);
|
reload_browser(true);
|
||||||
mukey.render();
|
mukey.render();
|
||||||
|
|||||||
@@ -6,6 +6,11 @@
|
|||||||
<title>{{ title }}</title>
|
<title>{{ title }}</title>
|
||||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||||
<meta name="viewport" content="width=device-width, initial-scale=0.8">
|
<meta name="viewport" content="width=device-width, initial-scale=0.8">
|
||||||
|
<style>
|
||||||
|
html{font-family:sans-serif}
|
||||||
|
td{border:1px solid #999;border-width:1px 1px 0 0;padding:0 5px}
|
||||||
|
a{display:block}
|
||||||
|
</style>
|
||||||
</head>
|
</head>
|
||||||
|
|
||||||
<body>
|
<body>
|
||||||
@@ -49,7 +54,7 @@
|
|||||||
<div>{{ logues[1] }}</div><br />
|
<div>{{ logues[1] }}</div><br />
|
||||||
{%- endif %}
|
{%- endif %}
|
||||||
|
|
||||||
<h2><a href="{{ url_suf }}&h">control-panel</a></h2>
|
<h2><a href="{{ url_suf }}{{ url_suf and '&' or '?' }}h">control-panel</a></h2>
|
||||||
|
|
||||||
</body>
|
</body>
|
||||||
</html>
|
</html>
|
||||||
|
|||||||
Binary file not shown.
|
Before Width: | Height: | Size: 248 B |
@@ -13,19 +13,23 @@
|
|||||||
<div id="wrap">
|
<div id="wrap">
|
||||||
<p>hello {{ this.uname }}</p>
|
<p>hello {{ this.uname }}</p>
|
||||||
|
|
||||||
|
{%- if rvol %}
|
||||||
<h1>you can browse these:</h1>
|
<h1>you can browse these:</h1>
|
||||||
<ul>
|
<ul>
|
||||||
{% for mp in rvol %}
|
{% for mp in rvol %}
|
||||||
<li><a href="/{{ mp }}{{ url_suf }}">/{{ mp }}</a></li>
|
<li><a href="/{{ mp }}{{ url_suf }}">/{{ mp }}</a></li>
|
||||||
{% endfor %}
|
{% endfor %}
|
||||||
</ul>
|
</ul>
|
||||||
|
{%- endif %}
|
||||||
|
|
||||||
|
{%- if wvol %}
|
||||||
<h1>you can upload to:</h1>
|
<h1>you can upload to:</h1>
|
||||||
<ul>
|
<ul>
|
||||||
{% for mp in wvol %}
|
{% for mp in wvol %}
|
||||||
<li><a href="/{{ mp }}{{ url_suf }}">/{{ mp }}</a></li>
|
<li><a href="/{{ mp }}{{ url_suf }}">/{{ mp }}</a></li>
|
||||||
{% endfor %}
|
{% endfor %}
|
||||||
</ul>
|
</ul>
|
||||||
|
{%- endif %}
|
||||||
|
|
||||||
<h1>login for more:</h1>
|
<h1>login for more:</h1>
|
||||||
<ul>
|
<ul>
|
||||||
|
|||||||
@@ -18,14 +18,14 @@ function goto_up2k() {
|
|||||||
// usually it's undefined but some chromes throw on invoke
|
// usually it's undefined but some chromes throw on invoke
|
||||||
var up2k = null;
|
var up2k = null;
|
||||||
try {
|
try {
|
||||||
crypto.subtle.digest(
|
var cf = crypto.subtle || crypto.webkitSubtle;
|
||||||
'SHA-512', new Uint8Array(1)
|
cf.digest('SHA-512', new Uint8Array(1)).then(
|
||||||
).then(
|
function (x) { console.log('sha-ok'); up2k = up2k_init(cf); },
|
||||||
function (x) { up2k = up2k_init(true) },
|
function (x) { console.log('sha-ng:', x); up2k = up2k_init(false); }
|
||||||
function (x) { up2k = up2k_init(false) }
|
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
catch (ex) {
|
catch (ex) {
|
||||||
|
console.log('sha-na:', ex);
|
||||||
try {
|
try {
|
||||||
up2k = up2k_init(false);
|
up2k = up2k_init(false);
|
||||||
}
|
}
|
||||||
@@ -401,9 +401,7 @@ function U2pvis(act, btns) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
function up2k_init(have_crypto) {
|
function up2k_init(subtle) {
|
||||||
//have_crypto = false;
|
|
||||||
|
|
||||||
// show modal message
|
// show modal message
|
||||||
function showmodal(msg) {
|
function showmodal(msg) {
|
||||||
ebi('u2notbtn').innerHTML = msg;
|
ebi('u2notbtn').innerHTML = msg;
|
||||||
@@ -426,12 +424,12 @@ function up2k_init(have_crypto) {
|
|||||||
is_https = (window.location + '').indexOf('https:') === 0;
|
is_https = (window.location + '').indexOf('https:') === 0;
|
||||||
|
|
||||||
if (is_https)
|
if (is_https)
|
||||||
// chrome<37 firefox<34 edge<12 ie<11 opera<24 safari<10.1
|
// chrome<37 firefox<34 edge<12 opera<24 safari<7
|
||||||
shame = 'your browser is impressively ancient';
|
shame = 'your browser is impressively ancient';
|
||||||
|
|
||||||
// upload ui hidden by default, clicking the header shows it
|
// upload ui hidden by default, clicking the header shows it
|
||||||
function init_deps() {
|
function init_deps() {
|
||||||
if (!have_crypto && !window.asmCrypto) {
|
if (!subtle && !window.asmCrypto) {
|
||||||
showmodal('<h1>loading sha512.js</h1><h2>since ' + shame + '</h2><h4>thanks chrome</h4>');
|
showmodal('<h1>loading sha512.js</h1><h2>since ' + shame + '</h2><h4>thanks chrome</h4>');
|
||||||
import_js('/.cpr/deps/sha512.js', unmodal);
|
import_js('/.cpr/deps/sha512.js', unmodal);
|
||||||
|
|
||||||
@@ -443,8 +441,8 @@ function up2k_init(have_crypto) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// show uploader if the user only has write-access
|
// show uploader if the user only has write-access
|
||||||
var perms = (document.body.getAttribute('perms') + '').split(' ');
|
var perms = document.body.getAttribute('perms');
|
||||||
if (!has(perms, 'read'))
|
if (perms && !has(perms.split(' '), 'read'))
|
||||||
goto('up2k');
|
goto('up2k');
|
||||||
|
|
||||||
// shows or clears a message in the basic uploader ui
|
// shows or clears a message in the basic uploader ui
|
||||||
@@ -698,7 +696,7 @@ function up2k_init(have_crypto) {
|
|||||||
|
|
||||||
pvis.addfile([
|
pvis.addfile([
|
||||||
fsearch ? esc(entry.name) : linksplit(
|
fsearch ? esc(entry.name) : linksplit(
|
||||||
esc(uricom_dec(entry.purl)[0] + entry.name)).join(' '),
|
uricom_dec(entry.purl)[0] + entry.name).join(' '),
|
||||||
'📐 hash',
|
'📐 hash',
|
||||||
''
|
''
|
||||||
], fobj.size);
|
], fobj.size);
|
||||||
@@ -986,14 +984,14 @@ function up2k_init(have_crypto) {
|
|||||||
st.todo.handshake.push(t);
|
st.todo.handshake.push(t);
|
||||||
};
|
};
|
||||||
|
|
||||||
if (have_crypto)
|
if (subtle)
|
||||||
crypto.subtle.digest('SHA-512', buf).then(hash_done);
|
subtle.digest('SHA-512', buf).then(hash_done);
|
||||||
else {
|
else setTimeout(function () {
|
||||||
var hasher = new asmCrypto.Sha512();
|
var hasher = new asmCrypto.Sha512();
|
||||||
hasher.process(new Uint8Array(buf));
|
hasher.process(new Uint8Array(buf));
|
||||||
hasher.finish();
|
hasher.finish();
|
||||||
hash_done(hasher.result);
|
hash_done(hasher.result);
|
||||||
}
|
}, 1);
|
||||||
};
|
};
|
||||||
|
|
||||||
t.t1 = Date.now();
|
t.t1 = Date.now();
|
||||||
@@ -1025,7 +1023,7 @@ function up2k_init(have_crypto) {
|
|||||||
else {
|
else {
|
||||||
smsg = 'found';
|
smsg = 'found';
|
||||||
var hit = response.hits[0],
|
var hit = response.hits[0],
|
||||||
msg = linksplit(esc(hit.rp)).join(''),
|
msg = linksplit(hit.rp).join(''),
|
||||||
tr = unix2iso(hit.ts),
|
tr = unix2iso(hit.ts),
|
||||||
tu = unix2iso(t.lmod),
|
tu = unix2iso(t.lmod),
|
||||||
diff = parseInt(t.lmod) - parseInt(hit.ts),
|
diff = parseInt(t.lmod) - parseInt(hit.ts),
|
||||||
@@ -1047,7 +1045,7 @@ function up2k_init(have_crypto) {
|
|||||||
if (response.name !== t.name) {
|
if (response.name !== t.name) {
|
||||||
// file exists; server renamed us
|
// file exists; server renamed us
|
||||||
t.name = response.name;
|
t.name = response.name;
|
||||||
pvis.seth(t.n, 0, linksplit(esc(t.purl + t.name)).join(' '));
|
pvis.seth(t.n, 0, linksplit(t.purl + t.name).join(' '));
|
||||||
}
|
}
|
||||||
|
|
||||||
var chunksize = get_chunksize(t.size),
|
var chunksize = get_chunksize(t.size),
|
||||||
@@ -1242,6 +1240,10 @@ function up2k_init(have_crypto) {
|
|||||||
onresize();
|
onresize();
|
||||||
|
|
||||||
function desc_show(e) {
|
function desc_show(e) {
|
||||||
|
var cfg = sread('tooltips');
|
||||||
|
if (cfg !== null && cfg != '1')
|
||||||
|
return;
|
||||||
|
|
||||||
var msg = this.getAttribute('alt'),
|
var msg = this.getAttribute('alt'),
|
||||||
cdesc = ebi('u2cdesc');
|
cdesc = ebi('u2cdesc');
|
||||||
|
|
||||||
@@ -1310,14 +1312,21 @@ function up2k_init(have_crypto) {
|
|||||||
|
|
||||||
function set_fsearch(new_state) {
|
function set_fsearch(new_state) {
|
||||||
var perms = document.body.getAttribute('perms'),
|
var perms = document.body.getAttribute('perms'),
|
||||||
read_only = false;
|
fixed = false;
|
||||||
|
|
||||||
if (!ebi('fsearch')) {
|
if (!ebi('fsearch')) {
|
||||||
new_state = false;
|
new_state = false;
|
||||||
}
|
}
|
||||||
else if (perms && perms.indexOf('write') === -1) {
|
else if (perms) {
|
||||||
new_state = true;
|
perms = perms.split(' ');
|
||||||
read_only = true;
|
if (!has(perms, 'write')) {
|
||||||
|
new_state = true;
|
||||||
|
fixed = true;
|
||||||
|
}
|
||||||
|
if (!has(perms, 'read')) {
|
||||||
|
new_state = false;
|
||||||
|
fixed = true;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (new_state !== undefined) {
|
if (new_state !== undefined) {
|
||||||
@@ -1326,7 +1335,7 @@ function up2k_init(have_crypto) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
QS('label[for="fsearch"]').style.opacity = read_only ? '0' : '1';
|
QS('label[for="fsearch"]').style.display = QS('#fsearch').style.display = fixed ? 'none' : '';
|
||||||
}
|
}
|
||||||
catch (ex) { }
|
catch (ex) { }
|
||||||
|
|
||||||
|
|||||||
@@ -22,6 +22,7 @@
|
|||||||
#u2err.msg {
|
#u2err.msg {
|
||||||
color: #999;
|
color: #999;
|
||||||
padding: .5em;
|
padding: .5em;
|
||||||
|
font-size: .9em;
|
||||||
}
|
}
|
||||||
#u2btn {
|
#u2btn {
|
||||||
color: #eee;
|
color: #eee;
|
||||||
@@ -96,6 +97,7 @@
|
|||||||
#u2cards {
|
#u2cards {
|
||||||
padding: 1em 0 .3em 1em;
|
padding: 1em 0 .3em 1em;
|
||||||
margin: 1.5em auto -2.5em auto;
|
margin: 1.5em auto -2.5em auto;
|
||||||
|
white-space: nowrap;
|
||||||
text-align: center;
|
text-align: center;
|
||||||
overflow: hidden;
|
overflow: hidden;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -36,7 +36,7 @@
|
|||||||
|
|
||||||
<table id="u2conf">
|
<table id="u2conf">
|
||||||
<tr>
|
<tr>
|
||||||
<td>parallel uploads:</td>
|
<td><br />parallel uploads:</td>
|
||||||
<td rowspan="2">
|
<td rowspan="2">
|
||||||
<input type="checkbox" id="multitask" />
|
<input type="checkbox" id="multitask" />
|
||||||
<label for="multitask" alt="continue hashing other files while uploading">🏃</label>
|
<label for="multitask" alt="continue hashing other files while uploading">🏃</label>
|
||||||
@@ -61,7 +61,7 @@
|
|||||||
<td>
|
<td>
|
||||||
<a href="#" id="nthread_sub">–</a><input
|
<a href="#" id="nthread_sub">–</a><input
|
||||||
class="txtbox" id="nthread" value="2"/><a
|
class="txtbox" id="nthread" value="2"/><a
|
||||||
href="#" id="nthread_add">+</a>
|
href="#" id="nthread_add">+</a><br />
|
||||||
</td>
|
</td>
|
||||||
</tr>
|
</tr>
|
||||||
</table>
|
</table>
|
||||||
@@ -99,5 +99,5 @@
|
|||||||
</table>
|
</table>
|
||||||
|
|
||||||
<p id="u2foot"></p>
|
<p id="u2foot"></p>
|
||||||
<p id="u2footfoot">( you can use the <a href="#" id="u2nope">basic uploader</a> if you don't need lastmod timestamps, resumable uploads, or progress bars )</p>
|
<p id="u2footfoot" data-perm="write">( you can use the <a href="#" id="u2nope">basic uploader</a> if you don't need lastmod timestamps, resumable uploads, or progress bars )</p>
|
||||||
</div>
|
</div>
|
||||||
|
|||||||
@@ -135,7 +135,7 @@ function clmod(obj, cls, add) {
|
|||||||
|
|
||||||
|
|
||||||
function sortfiles(nodes) {
|
function sortfiles(nodes) {
|
||||||
var sopts = jread('fsort', [["lead", -1, ""], ["href", 1, ""]]);
|
var sopts = jread('fsort', [["href", 1, ""]]);
|
||||||
|
|
||||||
try {
|
try {
|
||||||
var is_srch = false;
|
var is_srch = false;
|
||||||
@@ -152,6 +152,9 @@ function sortfiles(nodes) {
|
|||||||
if (!name)
|
if (!name)
|
||||||
continue;
|
continue;
|
||||||
|
|
||||||
|
if (name == 'ts')
|
||||||
|
typ = 'int';
|
||||||
|
|
||||||
if (name.indexOf('tags/') === 0) {
|
if (name.indexOf('tags/') === 0) {
|
||||||
name = name.slice(5);
|
name = name.slice(5);
|
||||||
for (var b = 0, bb = nodes.length; b < bb; b++)
|
for (var b = 0, bb = nodes.length; b < bb; b++)
|
||||||
@@ -163,8 +166,12 @@ function sortfiles(nodes) {
|
|||||||
|
|
||||||
if ((v + '').indexOf('<a ') === 0)
|
if ((v + '').indexOf('<a ') === 0)
|
||||||
v = v.split('>')[1];
|
v = v.split('>')[1];
|
||||||
else if (name == "href" && v)
|
else if (name == "href" && v) {
|
||||||
|
if (v.slice(-1) == '/')
|
||||||
|
v = '\t' + v;
|
||||||
|
|
||||||
v = uricom_dec(v)[0]
|
v = uricom_dec(v)[0]
|
||||||
|
}
|
||||||
|
|
||||||
nodes[b]._sv = v;
|
nodes[b]._sv = v;
|
||||||
}
|
}
|
||||||
@@ -198,6 +205,8 @@ function sortfiles(nodes) {
|
|||||||
}
|
}
|
||||||
catch (ex) {
|
catch (ex) {
|
||||||
console.log("failed to apply sort config: " + ex);
|
console.log("failed to apply sort config: " + ex);
|
||||||
|
console.log("resetting fsort " + sread('fsort'))
|
||||||
|
localStorage.removeItem('fsort');
|
||||||
}
|
}
|
||||||
return nodes;
|
return nodes;
|
||||||
}
|
}
|
||||||
@@ -349,12 +358,16 @@ function linksplit(rp) {
|
|||||||
link = rp.slice(0, ofs + 1);
|
link = rp.slice(0, ofs + 1);
|
||||||
rp = rp.slice(ofs + 1);
|
rp = rp.slice(ofs + 1);
|
||||||
}
|
}
|
||||||
var vlink = link;
|
var vlink = esc(link),
|
||||||
if (link.indexOf('/') !== -1)
|
elink = uricom_enc(link);
|
||||||
vlink = link.slice(0, -1) + '<span>/</span>';
|
|
||||||
|
|
||||||
ret.push('<a href="' + apath + link + '">' + vlink + '</a>');
|
if (link.indexOf('/') !== -1) {
|
||||||
apath += link;
|
vlink = vlink.slice(0, -1) + '<span>/</span>';
|
||||||
|
elink = elink.slice(0, -3) + '/';
|
||||||
|
}
|
||||||
|
|
||||||
|
ret.push('<a href="' + apath + elink + '">' + vlink + '</a>');
|
||||||
|
apath += elink;
|
||||||
}
|
}
|
||||||
return ret;
|
return ret;
|
||||||
}
|
}
|
||||||
@@ -456,11 +469,15 @@ function jwrite(key, val) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
function icfg_get(name, defval) {
|
function icfg_get(name, defval) {
|
||||||
|
return parseInt(fcfg_get(name, defval));
|
||||||
|
}
|
||||||
|
|
||||||
|
function fcfg_get(name, defval) {
|
||||||
var o = ebi(name);
|
var o = ebi(name);
|
||||||
|
|
||||||
var val = parseInt(sread(name));
|
var val = parseFloat(sread(name));
|
||||||
if (isNaN(val))
|
if (isNaN(val))
|
||||||
return parseInt(o ? o.value : defval);
|
return parseFloat(o ? o.value : defval);
|
||||||
|
|
||||||
if (o)
|
if (o)
|
||||||
o.value = val;
|
o.value = val;
|
||||||
|
|||||||
@@ -1,29 +1,32 @@
|
|||||||
<!--
|
<!--
|
||||||
save this as .epilogue.html inside a
|
save this as .epilogue.html inside a write-only folder to declutter the UI, makes it look like
|
||||||
write-only folder to declutter the UI
|
https://user-images.githubusercontent.com/241032/118311195-dd6ca380-b4ef-11eb-86f3-75a3ff2e1332.png
|
||||||
-->
|
-->
|
||||||
|
|
||||||
<style>
|
<style>
|
||||||
|
|
||||||
/* make the up2k ui REALLY minimal by hiding a bunch of stuff: */
|
/* make the up2k ui REALLY minimal by hiding a bunch of stuff: */
|
||||||
|
|
||||||
#ops, #tree, #path, /* main tabs and navigators (tree/breadcrumbs) */
|
#ops, #tree, #path, #wrap>h2:last-child, /* main tabs and navigators (tree/breadcrumbs) */
|
||||||
|
|
||||||
#u2cleanup, #u2conf tr:first-child>td[rowspan]:not(#u2btn_cw), /* most of the config options */
|
#u2cleanup, #u2conf tr:first-child>td[rowspan]:not(#u2btn_cw), /* most of the config options */
|
||||||
|
|
||||||
#u2cards /* and the upload progress tabs */
|
#u2cards /* and the upload progress tabs */
|
||||||
|
|
||||||
{display:none!important} /* do it! */
|
{display: none !important} /* do it! */
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
/* add some margins because now it's weird */
|
/* add some margins because now it's weird */
|
||||||
.opview {margin-top: 2.5em}
|
.opview {margin-top: 2.5em}
|
||||||
#op_up2k {margin-top: 5em}
|
#op_up2k {margin-top: 3em}
|
||||||
|
|
||||||
/* and embiggen the upload button */
|
/* and embiggen the upload button */
|
||||||
#u2conf #u2btn, #u2btn {padding:1.5em 0}
|
#u2conf #u2btn, #u2btn {padding:1.5em 0}
|
||||||
|
|
||||||
|
/* adjust the button area a bit */
|
||||||
|
#u2conf.has_btn {width: 35em !important; margin: 5em auto}
|
||||||
|
|
||||||
</style>
|
</style>
|
||||||
|
|
||||||
<a href="#" onclick="this.parentNode.innerHTML='';">show advanced options</a>
|
<a href="#" onclick="this.parentNode.innerHTML='';">show advanced options</a>
|
||||||
|
|||||||
82
docs/nuitka.txt
Normal file
82
docs/nuitka.txt
Normal file
@@ -0,0 +1,82 @@
|
|||||||
|
# recipe for building an exe with nuitka (extreme jank edition)
|
||||||
|
#
|
||||||
|
# NOTE: win7 and win10 builds both work on win10 but
|
||||||
|
# on win7 they immediately c0000005 in kernelbase.dll
|
||||||
|
#
|
||||||
|
# first install python-3.6.8-amd64.exe
|
||||||
|
# [x] add to path
|
||||||
|
#
|
||||||
|
# copypaste the rest of this file into cmd
|
||||||
|
|
||||||
|
rem from pypi
|
||||||
|
cd \users\ed\downloads
|
||||||
|
python -m pip install --user Nuitka-0.6.14.7.tar.gz
|
||||||
|
|
||||||
|
rem https://github.com/brechtsanders/winlibs_mingw/releases/download/10.2.0-11.0.0-8.0.0-r5/winlibs-x86_64-posix-seh-gcc-10.2.0-llvm-11.0.0-mingw-w64-8.0.0-r5.zip
|
||||||
|
mkdir C:\Users\ed\AppData\Local\Nuitka\
|
||||||
|
mkdir C:\Users\ed\AppData\Local\Nuitka\Nuitka\
|
||||||
|
mkdir C:\Users\ed\AppData\Local\Nuitka\Nuitka\gcc\
|
||||||
|
mkdir C:\Users\ed\AppData\Local\Nuitka\Nuitka\gcc\x86_64\
|
||||||
|
mkdir C:\Users\ed\AppData\Local\Nuitka\Nuitka\gcc\x86_64\10.2.0-11.0.0-8.0.0-r5\
|
||||||
|
copy c:\users\ed\downloads\winlibs-x86_64-posix-seh-gcc-10.2.0-llvm-11.0.0-mingw-w64-8.0.0-r5.zip C:\Users\ed\AppData\Local\Nuitka\Nuitka\gcc\x86_64\10.2.0-11.0.0-8.0.0-r5\winlibs-x86_64-posix-seh-gcc-10.2.0-llvm-11.0.0-mingw-w64-8.0.0-r5.zip
|
||||||
|
|
||||||
|
rem https://github.com/ccache/ccache/releases/download/v3.7.12/ccache-3.7.12-windows-32.zip
|
||||||
|
mkdir C:\Users\ed\AppData\Local\Nuitka\Nuitka\ccache\
|
||||||
|
mkdir C:\Users\ed\AppData\Local\Nuitka\Nuitka\ccache\v3.7.12\
|
||||||
|
copy c:\users\ed\downloads\ccache-3.7.12-windows-32.zip C:\Users\ed\AppData\Local\Nuitka\Nuitka\ccache\v3.7.12\ccache-3.7.12-windows-32.zip
|
||||||
|
|
||||||
|
rem https://dependencywalker.com/depends22_x64.zip
|
||||||
|
mkdir C:\Users\ed\AppData\Local\Nuitka\Nuitka\depends\
|
||||||
|
mkdir C:\Users\ed\AppData\Local\Nuitka\Nuitka\depends\x86_64\
|
||||||
|
copy c:\users\ed\downloads\depends22_x64.zip C:\Users\ed\AppData\Local\Nuitka\Nuitka\depends\x86_64\depends22_x64.zip
|
||||||
|
|
||||||
|
cd \
|
||||||
|
rd /s /q %appdata%\..\local\temp\pe-copyparty
|
||||||
|
cd \users\ed\downloads
|
||||||
|
python copyparty-sfx.py -h
|
||||||
|
cd %appdata%\..\local\temp\pe-copyparty\copyparty
|
||||||
|
|
||||||
|
python
|
||||||
|
import os, re
|
||||||
|
os.rename('../dep-j2/jinja2', '../jinja2')
|
||||||
|
os.rename('../dep-j2/markupsafe', '../markupsafe')
|
||||||
|
|
||||||
|
print("# nuitka dies if .__init__.stuff is imported")
|
||||||
|
with open('__init__.py','r',encoding='utf-8') as f:
|
||||||
|
t1 = f.read()
|
||||||
|
|
||||||
|
with open('util.py','r',encoding='utf-8') as f:
|
||||||
|
t2 = f.read().split('\n')[3:]
|
||||||
|
|
||||||
|
t2 = [x for x in t2 if 'from .__init__' not in x]
|
||||||
|
t = t1 + '\n'.join(t2)
|
||||||
|
with open('__init__.py','w',encoding='utf-8') as f:
|
||||||
|
f.write('\n')
|
||||||
|
|
||||||
|
with open('util.py','w',encoding='utf-8') as f:
|
||||||
|
f.write(t)
|
||||||
|
|
||||||
|
print("# local-imports fail, prefix module names")
|
||||||
|
ptn = re.compile(r'^( *from )(\.[^ ]+ import .*)')
|
||||||
|
for d, _, fs in os.walk('.'):
|
||||||
|
for f in fs:
|
||||||
|
fp = os.path.join(d, f)
|
||||||
|
if not fp.endswith('.py'):
|
||||||
|
continue
|
||||||
|
t = ''
|
||||||
|
with open(fp,'r',encoding='utf-8') as f:
|
||||||
|
for ln in [x.rstrip('\r\n') for x in f]:
|
||||||
|
m = ptn.match(ln)
|
||||||
|
if not m:
|
||||||
|
t += ln + '\n'
|
||||||
|
continue
|
||||||
|
p1, p2 = m.groups()
|
||||||
|
t += "{}copyparty{}\n".format(p1, p2).replace("__init__", "util")
|
||||||
|
with open(fp,'w',encoding='utf-8') as f:
|
||||||
|
f.write(t)
|
||||||
|
|
||||||
|
exit()
|
||||||
|
|
||||||
|
cd ..
|
||||||
|
|
||||||
|
rd /s /q bout & python -m nuitka --standalone --onefile --windows-onefile-tempdir --python-flag=no_site --assume-yes-for-downloads --include-data-dir=copyparty\web=copyparty\web --include-data-dir=copyparty\res=copyparty\res --run --output-dir=bout --mingw64 --include-package=markupsafe --include-package=jinja2 copyparty
|
||||||
@@ -163,7 +163,7 @@ find .. -type f \( -name .DS_Store -or -name ._.DS_Store \) -delete
|
|||||||
find .. -type f -name ._\* | while IFS= read -r f; do cmp <(printf '\x00\x05\x16') <(head -c 3 -- "$f") && rm -f -- "$f"; done
|
find .. -type f -name ._\* | while IFS= read -r f; do cmp <(printf '\x00\x05\x16') <(head -c 3 -- "$f") && rm -f -- "$f"; done
|
||||||
|
|
||||||
echo use smol web deps
|
echo use smol web deps
|
||||||
rm -f copyparty/web/deps/*.full.* copyparty/web/{Makefile,splash.js}
|
rm -f copyparty/web/deps/*.full.* copyparty/web/Makefile
|
||||||
|
|
||||||
# it's fine dw
|
# it's fine dw
|
||||||
grep -lE '\.full\.(js|css)' copyparty/web/* |
|
grep -lE '\.full\.(js|css)' copyparty/web/* |
|
||||||
|
|||||||
@@ -17,14 +17,15 @@ __license__ = "MIT"
|
|||||||
__url__ = "https://github.com/9001/copyparty/"
|
__url__ = "https://github.com/9001/copyparty/"
|
||||||
|
|
||||||
|
|
||||||
def get_spd(nbyte, nsec):
|
def get_spd(nbyte, nfiles, nsec):
|
||||||
if not nsec:
|
if not nsec:
|
||||||
return "0.000 MB 0.000 sec 0.000 MB/s"
|
return "0.000 MB 0 files 0.000 sec 0.000 MB/s 0.000 f/s"
|
||||||
|
|
||||||
mb = nbyte / (1024 * 1024.0)
|
mb = nbyte / (1024 * 1024.0)
|
||||||
spd = mb / nsec
|
spd = mb / nsec
|
||||||
|
nspd = nfiles / nsec
|
||||||
|
|
||||||
return f"{mb:.3f} MB {nsec:.3f} sec {spd:.3f} MB/s"
|
return f"{mb:.3f} MB {nfiles} files {nsec:.3f} sec {spd:.3f} MB/s {nspd:.3f} f/s"
|
||||||
|
|
||||||
|
|
||||||
class Inf(object):
|
class Inf(object):
|
||||||
@@ -36,6 +37,7 @@ class Inf(object):
|
|||||||
self.mtx_reports = threading.Lock()
|
self.mtx_reports = threading.Lock()
|
||||||
|
|
||||||
self.n_byte = 0
|
self.n_byte = 0
|
||||||
|
self.n_file = 0
|
||||||
self.n_sec = 0
|
self.n_sec = 0
|
||||||
self.n_done = 0
|
self.n_done = 0
|
||||||
self.t0 = t0
|
self.t0 = t0
|
||||||
@@ -63,7 +65,8 @@ class Inf(object):
|
|||||||
continue
|
continue
|
||||||
|
|
||||||
msgs = msgs[-64:]
|
msgs = msgs[-64:]
|
||||||
msgs = [f"{get_spd(self.n_byte, self.n_sec)} {x}" for x in msgs]
|
spd = get_spd(self.n_byte, len(self.reports), self.n_sec)
|
||||||
|
msgs = [f"{spd} {x}" for x in msgs]
|
||||||
print("\n".join(msgs))
|
print("\n".join(msgs))
|
||||||
|
|
||||||
def report(self, fn, n_byte, n_sec):
|
def report(self, fn, n_byte, n_sec):
|
||||||
@@ -131,8 +134,9 @@ def main():
|
|||||||
|
|
||||||
num_threads = 8
|
num_threads = 8
|
||||||
read_sz = 32 * 1024
|
read_sz = 32 * 1024
|
||||||
|
targs = (q, inf, read_sz)
|
||||||
for _ in range(num_threads):
|
for _ in range(num_threads):
|
||||||
thr = threading.Thread(target=worker, args=(q, inf, read_sz,))
|
thr = threading.Thread(target=worker, args=targs)
|
||||||
thr.daemon = True
|
thr.daemon = True
|
||||||
thr.start()
|
thr.start()
|
||||||
|
|
||||||
@@ -151,14 +155,14 @@ def main():
|
|||||||
log = inf.reports
|
log = inf.reports
|
||||||
log.sort()
|
log.sort()
|
||||||
for nbyte, nsec, fn in log[-64:]:
|
for nbyte, nsec, fn in log[-64:]:
|
||||||
print(f"{get_spd(nbyte, nsec)} {fn}")
|
spd = get_spd(nbyte, len(log), nsec)
|
||||||
|
print(f"{spd} {fn}")
|
||||||
|
|
||||||
print()
|
print()
|
||||||
print("\n".join(inf.errors))
|
print("\n".join(inf.errors))
|
||||||
|
|
||||||
print(get_spd(inf.n_byte, t2 - t0))
|
print(get_spd(inf.n_byte, len(log), t2 - t0))
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
main()
|
main()
|
||||||
|
|
||||||
|
|||||||
55
setup.py
55
setup.py
@@ -5,22 +5,7 @@ from __future__ import print_function
|
|||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
from shutil import rmtree
|
from shutil import rmtree
|
||||||
|
from setuptools import setup, Command, find_packages
|
||||||
setuptools_available = True
|
|
||||||
try:
|
|
||||||
# need setuptools to build wheel
|
|
||||||
from setuptools import setup, Command, find_packages
|
|
||||||
|
|
||||||
except ImportError:
|
|
||||||
# works in a pinch
|
|
||||||
setuptools_available = False
|
|
||||||
from distutils.core import setup, Command
|
|
||||||
|
|
||||||
from distutils.spawn import spawn
|
|
||||||
|
|
||||||
if "bdist_wheel" in sys.argv and not setuptools_available:
|
|
||||||
print("cannot build wheel without setuptools")
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
|
|
||||||
NAME = "copyparty"
|
NAME = "copyparty"
|
||||||
@@ -100,9 +85,8 @@ args = {
|
|||||||
"author_email": "copyparty@ocv.me",
|
"author_email": "copyparty@ocv.me",
|
||||||
"url": "https://github.com/9001/copyparty",
|
"url": "https://github.com/9001/copyparty",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"data_files": data_files,
|
|
||||||
"classifiers": [
|
"classifiers": [
|
||||||
"Development Status :: 3 - Alpha",
|
"Development Status :: 4 - Beta",
|
||||||
"License :: OSI Approved :: MIT License",
|
"License :: OSI Approved :: MIT License",
|
||||||
"Programming Language :: Python",
|
"Programming Language :: Python",
|
||||||
"Programming Language :: Python :: 2",
|
"Programming Language :: Python :: 2",
|
||||||
@@ -120,35 +104,16 @@ args = {
|
|||||||
"Environment :: Console",
|
"Environment :: Console",
|
||||||
"Environment :: No Input/Output (Daemon)",
|
"Environment :: No Input/Output (Daemon)",
|
||||||
"Topic :: Communications :: File Sharing",
|
"Topic :: Communications :: File Sharing",
|
||||||
|
"Topic :: Internet :: WWW/HTTP :: HTTP Servers",
|
||||||
],
|
],
|
||||||
|
"include_package_data": True,
|
||||||
|
"data_files": data_files,
|
||||||
|
"packages": find_packages(),
|
||||||
|
"install_requires": ["jinja2"],
|
||||||
|
"extras_require": {"thumbnails": ["Pillow"], "audiotags": ["mutagen"]},
|
||||||
|
"entry_points": {"console_scripts": ["copyparty = copyparty.__main__:main"]},
|
||||||
|
"scripts": ["bin/copyparty-fuse.py"],
|
||||||
"cmdclass": {"clean2": clean2},
|
"cmdclass": {"clean2": clean2},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
if setuptools_available:
|
|
||||||
args.update(
|
|
||||||
{
|
|
||||||
"packages": find_packages(),
|
|
||||||
"install_requires": ["jinja2"],
|
|
||||||
"extras_require": {"thumbnails": ["Pillow"]},
|
|
||||||
"include_package_data": True,
|
|
||||||
"entry_points": {
|
|
||||||
"console_scripts": ["copyparty = copyparty.__main__:main"]
|
|
||||||
},
|
|
||||||
"scripts": ["bin/copyparty-fuse.py"],
|
|
||||||
}
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
args.update(
|
|
||||||
{
|
|
||||||
"packages": ["copyparty", "copyparty.stolen"],
|
|
||||||
"scripts": ["bin/copyparty-fuse.py"],
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
# import pprint
|
|
||||||
# pprint.PrettyPrinter().pprint(args)
|
|
||||||
# sys.exit(0)
|
|
||||||
|
|
||||||
setup(**args)
|
setup(**args)
|
||||||
|
|||||||
Reference in New Issue
Block a user