mirror of
https://github.com/9001/copyparty.git
synced 2025-10-23 16:14:10 +00:00
Compare commits
138 Commits
Author | SHA1 | Date | |
---|---|---|---|
|
5649d26077 | ||
|
92f923effe | ||
|
0d46d548b9 | ||
|
062df3f0c3 | ||
|
789fb53b8e | ||
|
351db5a18f | ||
|
aabbd271c8 | ||
|
aae8e0171e | ||
|
45827a2458 | ||
|
726030296f | ||
|
6659ab3881 | ||
|
c6a103609e | ||
|
c6b3f035e5 | ||
|
2b0a7e378e | ||
|
b75ce909c8 | ||
|
229c3f5dab | ||
|
ec73094506 | ||
|
c7650c9326 | ||
|
d94c6d4e72 | ||
|
3cc8760733 | ||
|
a2f6973495 | ||
|
f8648fa651 | ||
|
177aa038df | ||
|
e0a14ec881 | ||
|
9366512f2f | ||
|
ea38b8041a | ||
|
f1870daf0d | ||
|
9722441aad | ||
|
9d014087f4 | ||
|
83b4038b85 | ||
|
1e0a448feb | ||
|
fb81de3b36 | ||
|
aa4f352301 | ||
|
f1a1c2ea45 | ||
|
6249bd4163 | ||
|
2579dc64ce | ||
|
356512270a | ||
|
bed27f2b43 | ||
|
54013d861b | ||
|
ec100210dc | ||
|
3ab1acf32c | ||
|
8c28266418 | ||
|
7f8b8dcb92 | ||
|
6dd39811d4 | ||
|
35e2138e3e | ||
|
239b4e9fe6 | ||
|
2fcd0e7e72 | ||
|
357347ce3a | ||
|
36dc1107fb | ||
|
0a3bbc4b4a | ||
|
855b93dcf6 | ||
|
89b79ba267 | ||
|
f5651b7d94 | ||
|
1881019ede | ||
|
caba4e974c | ||
|
bc3c9613bc | ||
|
15a3ee252e | ||
|
be055961ae | ||
|
e3031bdeec | ||
|
75917b9f7c | ||
|
910732e02c | ||
|
264b497681 | ||
|
372b949622 | ||
|
789a602914 | ||
|
093e955100 | ||
|
c32a89bebf | ||
|
c0bebe9f9f | ||
|
57579b2fe5 | ||
|
51d14a6b4d | ||
|
c50f1b64e5 | ||
|
98aaab02c5 | ||
|
0fc7973d8b | ||
|
10362aa02e | ||
|
0a8e759fe6 | ||
|
d70981cdd1 | ||
|
e08c03b886 | ||
|
56086e8984 | ||
|
1aa9033022 | ||
|
076e103d53 | ||
|
38c00ea8fc | ||
|
415757af43 | ||
|
e72ed8c0ed | ||
|
32f9c6b5bb | ||
|
6251584ef6 | ||
|
f3e413bc28 | ||
|
6f6cc8f3f8 | ||
|
8b081e9e69 | ||
|
c8a510d10e | ||
|
6f834f6679 | ||
|
cf2d6650ac | ||
|
cd52dea488 | ||
|
6ea75df05d | ||
|
4846e1e8d6 | ||
|
fc024f789d | ||
|
473e773aea | ||
|
48a2e1a353 | ||
|
6da63fbd79 | ||
|
5bec37fcee | ||
|
3fd0ba0a31 | ||
|
241a143366 | ||
|
a537064da7 | ||
|
f3dfd24c92 | ||
|
fa0a7f50bb | ||
|
44a78a7e21 | ||
|
6b75cbf747 | ||
|
e7b18ab9fe | ||
|
aa12830015 | ||
|
f156e00064 | ||
|
d53c212516 | ||
|
ca27f8587c | ||
|
88ce008e16 | ||
|
081d2cc5d7 | ||
|
60ac68d000 | ||
|
fbe656957d | ||
|
5534c78c17 | ||
|
a45a53fdce | ||
|
972a56e738 | ||
|
5e03b3ca38 | ||
|
1078d933b4 | ||
|
d6bf300d80 | ||
|
a359d64d44 | ||
|
22396e8c33 | ||
|
5ded5a4516 | ||
|
79c7639aaf | ||
|
5bbf875385 | ||
|
5e159432af | ||
|
1d6ae409f6 | ||
|
9d729d3d1a | ||
|
4dd5d4e1b7 | ||
|
acd8149479 | ||
|
b97a1088fa | ||
|
b77bed3324 | ||
|
a2b7c85a1f | ||
|
b28533f850 | ||
|
bd8c7e538a | ||
|
89e48cff24 | ||
|
ae90a7b7b6 | ||
|
6fc1be04da |
17
.vscode/launch.json
vendored
17
.vscode/launch.json
vendored
@@ -16,12 +16,9 @@
|
|||||||
"-e2ts",
|
"-e2ts",
|
||||||
"-mtp",
|
"-mtp",
|
||||||
".bpm=f,bin/mtag/audio-bpm.py",
|
".bpm=f,bin/mtag/audio-bpm.py",
|
||||||
"-a",
|
"-aed:wark",
|
||||||
"ed:wark",
|
"-vsrv::r:aed:cnodupe",
|
||||||
"-v",
|
"-vdist:dist:r"
|
||||||
"srv::r:aed:cnodupe",
|
|
||||||
"-v",
|
|
||||||
"dist:dist:r"
|
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@@ -43,5 +40,13 @@
|
|||||||
"${file}"
|
"${file}"
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
"name": "Python: Current File",
|
||||||
|
"type": "python",
|
||||||
|
"request": "launch",
|
||||||
|
"program": "${file}",
|
||||||
|
"console": "integratedTerminal",
|
||||||
|
"justMyCode": false
|
||||||
|
},
|
||||||
]
|
]
|
||||||
}
|
}
|
28
.vscode/launch.py
vendored
28
.vscode/launch.py
vendored
@@ -3,14 +3,16 @@
|
|||||||
# launches 10x faster than mspython debugpy
|
# launches 10x faster than mspython debugpy
|
||||||
# and is stoppable with ^C
|
# and is stoppable with ^C
|
||||||
|
|
||||||
|
import re
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
|
print(sys.executable)
|
||||||
|
|
||||||
import shlex
|
import shlex
|
||||||
|
|
||||||
sys.path.insert(0, os.getcwd())
|
|
||||||
|
|
||||||
import jstyleson
|
import jstyleson
|
||||||
from copyparty.__main__ import main as copyparty
|
import subprocess as sp
|
||||||
|
|
||||||
|
|
||||||
with open(".vscode/launch.json", "r", encoding="utf-8") as f:
|
with open(".vscode/launch.json", "r", encoding="utf-8") as f:
|
||||||
tj = f.read()
|
tj = f.read()
|
||||||
@@ -25,11 +27,19 @@ except:
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
argv = [os.path.expanduser(x) if x.startswith("~") else x for x in argv]
|
argv = [os.path.expanduser(x) if x.startswith("~") else x for x in argv]
|
||||||
try:
|
|
||||||
copyparty(["a"] + argv)
|
if re.search(" -j ?[0-9]", " ".join(argv)):
|
||||||
except SystemExit as ex:
|
argv = [sys.executable, "-m", "copyparty"] + argv
|
||||||
if ex.code:
|
sp.check_call(argv)
|
||||||
raise
|
else:
|
||||||
|
sys.path.insert(0, os.getcwd())
|
||||||
|
from copyparty.__main__ import main as copyparty
|
||||||
|
|
||||||
|
try:
|
||||||
|
copyparty(["a"] + argv)
|
||||||
|
except SystemExit as ex:
|
||||||
|
if ex.code:
|
||||||
|
raise
|
||||||
|
|
||||||
print("\n\033[32mokke\033[0m")
|
print("\n\033[32mokke\033[0m")
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
126
README.md
126
README.md
@@ -20,6 +20,7 @@ turn your phone or raspi into a portable file server with resumable uploads/down
|
|||||||
|
|
||||||
* top
|
* top
|
||||||
* [quickstart](#quickstart)
|
* [quickstart](#quickstart)
|
||||||
|
* [on debian](#on-debian)
|
||||||
* [notes](#notes)
|
* [notes](#notes)
|
||||||
* [status](#status)
|
* [status](#status)
|
||||||
* [bugs](#bugs)
|
* [bugs](#bugs)
|
||||||
@@ -37,6 +38,7 @@ turn your phone or raspi into a portable file server with resumable uploads/down
|
|||||||
* [other tricks](#other-tricks)
|
* [other tricks](#other-tricks)
|
||||||
* [searching](#searching)
|
* [searching](#searching)
|
||||||
* [search configuration](#search-configuration)
|
* [search configuration](#search-configuration)
|
||||||
|
* [database location](#database-location)
|
||||||
* [metadata from audio files](#metadata-from-audio-files)
|
* [metadata from audio files](#metadata-from-audio-files)
|
||||||
* [file parser plugins](#file-parser-plugins)
|
* [file parser plugins](#file-parser-plugins)
|
||||||
* [complete examples](#complete-examples)
|
* [complete examples](#complete-examples)
|
||||||
@@ -50,8 +52,10 @@ turn your phone or raspi into a portable file server with resumable uploads/down
|
|||||||
* [sfx](#sfx)
|
* [sfx](#sfx)
|
||||||
* [sfx repack](#sfx-repack)
|
* [sfx repack](#sfx-repack)
|
||||||
* [install on android](#install-on-android)
|
* [install on android](#install-on-android)
|
||||||
* [dev env setup](#dev-env-setup)
|
* [building](#building)
|
||||||
* [how to release](#how-to-release)
|
* [dev env setup](#dev-env-setup)
|
||||||
|
* [just the sfx](#just-the-sfx)
|
||||||
|
* [complete release](#complete-release)
|
||||||
* [todo](#todo)
|
* [todo](#todo)
|
||||||
|
|
||||||
|
|
||||||
@@ -61,19 +65,45 @@ download [copyparty-sfx.py](https://github.com/9001/copyparty/releases/latest/do
|
|||||||
|
|
||||||
running the sfx without arguments (for example doubleclicking it on Windows) will give everyone full access to the current folder; see `-h` for help if you want accounts and volumes etc
|
running the sfx without arguments (for example doubleclicking it on Windows) will give everyone full access to the current folder; see `-h` for help if you want accounts and volumes etc
|
||||||
|
|
||||||
|
some recommended options:
|
||||||
|
* `-e2dsa` enables general file indexing, see [search configuration](#search-configuration)
|
||||||
|
* `-e2ts` enables audio metadata indexing (needs either FFprobe or mutagen), see [optional dependencies](#optional-dependencies)
|
||||||
|
* `-v /mnt/music:/music:r:afoo -a foo:bar` shares `/mnt/music` as `/music`, `r`eadable by anyone, with user `foo` as `a`dmin (read/write), password `bar`
|
||||||
|
* the syntax is `-v src:dst:perm:perm:...` so local-path, url-path, and one or more permissions to set
|
||||||
|
* replace `:r:afoo` with `:rfoo` to only make the folder readable by `foo` and nobody else
|
||||||
|
* in addition to `r`ead and `a`dmin, `w`rite makes a folder write-only, so cannot list/access files in it
|
||||||
|
* `--ls '**,*,ln,p,r'` to crash on startup if any of the volumes contain a symlink which point outside the volume, as that could give users unintended access
|
||||||
|
|
||||||
you may also want these, especially on servers:
|
you may also want these, especially on servers:
|
||||||
* [contrib/systemd/copyparty.service](contrib/systemd/copyparty.service) to run copyparty as a systemd service
|
* [contrib/systemd/copyparty.service](contrib/systemd/copyparty.service) to run copyparty as a systemd service
|
||||||
* [contrib/nginx/copyparty.conf](contrib/nginx/copyparty.conf) to reverse-proxy behind nginx (for better https)
|
* [contrib/nginx/copyparty.conf](contrib/nginx/copyparty.conf) to reverse-proxy behind nginx (for better https)
|
||||||
|
|
||||||
|
|
||||||
|
### on debian
|
||||||
|
|
||||||
|
recommended steps to enable audio metadata and thumbnails (from images and videos):
|
||||||
|
|
||||||
|
* as root, run the following:
|
||||||
|
`apt install python3 python3-pip python3-dev ffmpeg`
|
||||||
|
|
||||||
|
* then, as the user which will be running copyparty (so hopefully not root), run this:
|
||||||
|
`python3 -m pip install --user -U Pillow pillow-avif-plugin`
|
||||||
|
|
||||||
|
(skipped `pyheif-pillow-opener` because apparently debian is too old to build it)
|
||||||
|
|
||||||
|
|
||||||
## notes
|
## notes
|
||||||
|
|
||||||
|
general:
|
||||||
|
* paper-printing is affected by dark/light-mode! use lightmode for color, darkmode for grayscale
|
||||||
|
* because no browsers currently implement the media-query to do this properly orz
|
||||||
|
|
||||||
|
browser-specific:
|
||||||
* iPhone/iPad: use Firefox to download files
|
* iPhone/iPad: use Firefox to download files
|
||||||
* Android-Chrome: increase "parallel uploads" for higher speed (android bug)
|
* Android-Chrome: increase "parallel uploads" for higher speed (android bug)
|
||||||
* Android-Firefox: takes a while to select files (their fix for ☝️)
|
* Android-Firefox: takes a while to select files (their fix for ☝️)
|
||||||
* Desktop-Firefox: ~~may use gigabytes of RAM if your files are massive~~ *seems to be OK now*
|
* Desktop-Firefox: ~~may use gigabytes of RAM if your files are massive~~ *seems to be OK now*
|
||||||
* paper-printing is affected by dark/light-mode! use lightmode for color, darkmode for grayscale
|
* Desktop-Firefox: may stop you from deleting folders you've uploaded until you visit `about:memory` and click `Minimize memory usage`
|
||||||
* because no browsers currently implement the media-query to do this properly orz
|
|
||||||
|
|
||||||
|
|
||||||
## status
|
## status
|
||||||
@@ -96,11 +126,12 @@ summary: all planned features work! now please enjoy the bloatening
|
|||||||
* ☑ FUSE client (read-only)
|
* ☑ FUSE client (read-only)
|
||||||
* browser
|
* browser
|
||||||
* ☑ tree-view
|
* ☑ tree-view
|
||||||
* ☑ media player
|
* ☑ audio player (with OS media controls)
|
||||||
* ☑ thumbnails
|
* ☑ thumbnails
|
||||||
* ☑ images using Pillow
|
* ☑ images using Pillow
|
||||||
* ☑ videos using FFmpeg
|
* ☑ videos using FFmpeg
|
||||||
* ☑ cache eviction (max-age; maybe max-size eventually)
|
* ☑ cache eviction (max-age; maybe max-size eventually)
|
||||||
|
* ☑ image gallery
|
||||||
* ☑ SPA (browse while uploading)
|
* ☑ SPA (browse while uploading)
|
||||||
* if you use the file-tree on the left only, not folders in the file list
|
* if you use the file-tree on the left only, not folders in the file list
|
||||||
* server indexing
|
* server indexing
|
||||||
@@ -117,16 +148,19 @@ summary: all planned features work! now please enjoy the bloatening
|
|||||||
* Windows: python 3.7 and older cannot read tags with ffprobe, so use mutagen or upgrade
|
* Windows: python 3.7 and older cannot read tags with ffprobe, so use mutagen or upgrade
|
||||||
* Windows: python 2.7 cannot index non-ascii filenames with `-e2d`
|
* Windows: python 2.7 cannot index non-ascii filenames with `-e2d`
|
||||||
* Windows: python 2.7 cannot handle filenames with mojibake
|
* Windows: python 2.7 cannot handle filenames with mojibake
|
||||||
|
* MacOS: `--th-ff-jpg` may fix thumbnails using macports-FFmpeg
|
||||||
|
|
||||||
## general bugs
|
## general bugs
|
||||||
|
|
||||||
* all volumes must exist / be available on startup; up2k (mtp especially) gets funky otherwise
|
* all volumes must exist / be available on startup; up2k (mtp especially) gets funky otherwise
|
||||||
* cannot mount something at `/d1/d2/d3` unless `d2` exists inside `d1`
|
* cannot mount something at `/d1/d2/d3` unless `d2` exists inside `d1`
|
||||||
* hiding the contents at url `/d1/d2/d3` using `-v :d1/d2/d3:cd2d` has the side-effect of creating databases (for files/tags) inside folders d1 and d2, and those databases take precedence over the main db at the top of the vfs - this means all files in d2 and below will be reindexed unless you already had a vfs entry at or below d2
|
|
||||||
* probably more, pls let me know
|
* probably more, pls let me know
|
||||||
|
|
||||||
## not my bugs
|
## not my bugs
|
||||||
|
|
||||||
|
* Windows: folders cannot be accessed if the name ends with `.`
|
||||||
|
* python or windows bug
|
||||||
|
|
||||||
* Windows: msys2-python 3.8.6 occasionally throws "RuntimeError: release unlocked lock" when leaving a scoped mutex in up2k
|
* Windows: msys2-python 3.8.6 occasionally throws "RuntimeError: release unlocked lock" when leaving a scoped mutex in up2k
|
||||||
* this is an msys2 bug, the regular windows edition of python is fine
|
* this is an msys2 bug, the regular windows edition of python is fine
|
||||||
|
|
||||||
@@ -149,25 +183,28 @@ summary: all planned features work! now please enjoy the bloatening
|
|||||||
## hotkeys
|
## hotkeys
|
||||||
|
|
||||||
the browser has the following hotkeys
|
the browser has the following hotkeys
|
||||||
|
* `B` toggle breadcrumbs / directory tree
|
||||||
* `I/K` prev/next folder
|
* `I/K` prev/next folder
|
||||||
* `P` parent folder
|
* `M` parent folder
|
||||||
* `G` toggle list / grid view
|
* `G` toggle list / grid view
|
||||||
* `T` toggle thumbnails / icons
|
* `T` toggle thumbnails / icons
|
||||||
* when playing audio:
|
* when playing audio:
|
||||||
* `0..9` jump to 10%..90%
|
* `0..9` jump to 10%..90%
|
||||||
* `U/O` skip 10sec back/forward
|
* `U/O` skip 10sec back/forward
|
||||||
* `J/L` prev/next song
|
* `J/L` prev/next song
|
||||||
* `J` also starts playing the folder
|
* `P` play/pause (also starts playing the folder)
|
||||||
|
* when tree-sidebar is open:
|
||||||
|
* `A/D` adjust tree width
|
||||||
* in the grid view:
|
* in the grid view:
|
||||||
* `S` toggle multiselect
|
* `S` toggle multiselect
|
||||||
* `A/D` zoom
|
* shift+`A/D` zoom
|
||||||
|
|
||||||
|
|
||||||
## tree-mode
|
## tree-mode
|
||||||
|
|
||||||
by default there's a breadcrumbs path; you can replace this with a tree-browser sidebar thing by clicking the 🌲
|
by default there's a breadcrumbs path; you can replace this with a tree-browser sidebar thing by clicking the `🌲` or pressing the `B` hotkey
|
||||||
|
|
||||||
click `[-]` and `[+]` to adjust the size, and the `[a]` toggles if the tree should widen dynamically as you go deeper or stay fixed-size
|
click `[-]` and `[+]` (or hotkeys `A`/`D`) to adjust the size, and the `[a]` toggles if the tree should widen dynamically as you go deeper or stay fixed-size
|
||||||
|
|
||||||
|
|
||||||
## thumbnails
|
## thumbnails
|
||||||
@@ -176,6 +213,10 @@ click `[-]` and `[+]` to adjust the size, and the `[a]` toggles if the tree shou
|
|||||||
|
|
||||||
it does static images with Pillow and uses FFmpeg for video files, so you may want to `--no-thumb` or maybe just `--no-vthumb` depending on how destructive your users are
|
it does static images with Pillow and uses FFmpeg for video files, so you may want to `--no-thumb` or maybe just `--no-vthumb` depending on how destructive your users are
|
||||||
|
|
||||||
|
images named `folder.jpg` and `folder.png` become the thumbnail of the folder they're in
|
||||||
|
|
||||||
|
in the grid/thumbnail view, if the audio player panel is open, songs will start playing when clicked
|
||||||
|
|
||||||
|
|
||||||
## zip downloads
|
## zip downloads
|
||||||
|
|
||||||
@@ -259,6 +300,8 @@ up2k has saved a few uploads from becoming corrupted in-transfer already; caught
|
|||||||
|
|
||||||
* you can link a particular timestamp in an audio file by adding it to the URL, such as `&20` / `&20s` / `&1m20` / `&t=1:20` after the `.../#af-c8960dab`
|
* you can link a particular timestamp in an audio file by adding it to the URL, such as `&20` / `&20s` / `&1m20` / `&t=1:20` after the `.../#af-c8960dab`
|
||||||
|
|
||||||
|
* if you are using media hotkeys to switch songs and are getting tired of seeing the OSD popup which Windows doesn't let you disable, consider https://ocv.me/dev/?media-osd-bgone.ps1
|
||||||
|
|
||||||
|
|
||||||
# searching
|
# searching
|
||||||
|
|
||||||
@@ -292,9 +335,29 @@ the same arguments can be set as volume flags, in addition to `d2d` and `d2t` fo
|
|||||||
* `-v ~/music::r:cd2d` disables **all** indexing, even if any `-e2*` are on
|
* `-v ~/music::r:cd2d` disables **all** indexing, even if any `-e2*` are on
|
||||||
* `-v ~/music::r:cd2t` disables all `-e2t*` (tags), does not affect `-e2d*`
|
* `-v ~/music::r:cd2t` disables all `-e2t*` (tags), does not affect `-e2d*`
|
||||||
|
|
||||||
`e2tsr` is probably always overkill, since `e2ds`/`e2dsa` would pick up any file modifications and cause `e2ts` to reindex those
|
note:
|
||||||
|
* `e2tsr` is probably always overkill, since `e2ds`/`e2dsa` would pick up any file modifications and `e2ts` would then reindex those
|
||||||
|
* the rescan button in the admin panel has no effect unless the volume has `-e2ds` or higher
|
||||||
|
|
||||||
the rescan button in the admin panel has no effect unless the volume has `-e2ds` or higher
|
you can choose to only index filename/path/size/last-modified (and not the hash of the file contents) by setting `--no-hash` or the volume-flag `cdhash`, this has the following consequences:
|
||||||
|
* initial indexing is way faster, especially when the volume is on a networked disk
|
||||||
|
* makes it impossible to [file-search](#file-search)
|
||||||
|
* if someone uploads the same file contents, the upload will not be detected as a dupe, so it will not get symlinked or rejected
|
||||||
|
|
||||||
|
if you set `--no-hash`, you can enable hashing for specific volumes using flag `cehash`
|
||||||
|
|
||||||
|
|
||||||
|
## database location
|
||||||
|
|
||||||
|
copyparty creates a subfolder named `.hist` inside each volume where it stores the database, thumbnails, and some other stuff
|
||||||
|
|
||||||
|
this can instead be kept in a single place using the `--hist` argument, or the `hist=` volume flag, or a mix of both:
|
||||||
|
* `--hist ~/.cache/copyparty -v ~/music::r:chist=-` sets `~/.cache/copyparty` as the default place to put volume info, but `~/music` gets the regular `.hist` subfolder (`-` restores default behavior)
|
||||||
|
|
||||||
|
note:
|
||||||
|
* markdown edits are always stored in a local `.hist` subdirectory
|
||||||
|
* on windows the volflag path is cyglike, so `/c/temp` means `C:\temp` but use regular paths for `--hist`
|
||||||
|
* you can use cygpaths for volumes too, `-v C:\Users::r` and `-v /c/users::r` both work
|
||||||
|
|
||||||
|
|
||||||
## metadata from audio files
|
## metadata from audio files
|
||||||
@@ -401,7 +464,7 @@ quick summary of more eccentric web-browsers trying to view a directory index:
|
|||||||
|
|
||||||
copyparty returns a truncated sha512sum of your PUT/POST as base64; you can generate the same checksum locally to verify uplaods:
|
copyparty returns a truncated sha512sum of your PUT/POST as base64; you can generate the same checksum locally to verify uplaods:
|
||||||
|
|
||||||
b512(){ printf "$((sha512sum||shasum -a512)|sed -E 's/ .*//;s/(..)/\\x\1/g')"|base64|head -c43;}
|
b512(){ printf "$((sha512sum||shasum -a512)|sed -E 's/ .*//;s/(..)/\\x\1/g')"|base64|tr '+/' '-_'|head -c44;}
|
||||||
b512 <movie.mkv
|
b512 <movie.mkv
|
||||||
|
|
||||||
|
|
||||||
@@ -496,18 +559,45 @@ echo $?
|
|||||||
after the initial setup, you can launch copyparty at any time by running `copyparty` anywhere in Termux
|
after the initial setup, you can launch copyparty at any time by running `copyparty` anywhere in Termux
|
||||||
|
|
||||||
|
|
||||||
# dev env setup
|
# building
|
||||||
|
|
||||||
|
## dev env setup
|
||||||
|
|
||||||
|
mostly optional; if you need a working env for vscode or similar
|
||||||
|
|
||||||
```sh
|
```sh
|
||||||
python3 -m venv .venv
|
python3 -m venv .venv
|
||||||
. .venv/bin/activate
|
. .venv/bin/activate
|
||||||
pip install jinja2 # mandatory deps
|
pip install jinja2 # mandatory
|
||||||
pip install Pillow # thumbnail deps
|
pip install mutagen # audio metadata
|
||||||
|
pip install Pillow pyheif-pillow-opener pillow-avif-plugin # thumbnails
|
||||||
pip install black bandit pylint flake8 # vscode tooling
|
pip install black bandit pylint flake8 # vscode tooling
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
||||||
# how to release
|
## just the sfx
|
||||||
|
|
||||||
|
unless you need to modify something in the web-dependencies, it's faster to grab those from a previous release:
|
||||||
|
|
||||||
|
```sh
|
||||||
|
rm -rf copyparty/web/deps
|
||||||
|
curl -L https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.py >x.py
|
||||||
|
python3 x.py -h
|
||||||
|
rm x.py
|
||||||
|
mv /tmp/pe-copyparty/copyparty/web/deps/ copyparty/web/
|
||||||
|
```
|
||||||
|
|
||||||
|
then build the sfx using any of the following examples:
|
||||||
|
|
||||||
|
```sh
|
||||||
|
./scripts/make-sfx.sh # both python and sh editions
|
||||||
|
./scripts/make-sfx.sh no-sh gz # just python with gzip
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
|
## complete release
|
||||||
|
|
||||||
|
also builds the sfx so disregard the sfx section above
|
||||||
|
|
||||||
in the `scripts` folder:
|
in the `scripts` folder:
|
||||||
|
|
||||||
|
@@ -48,15 +48,16 @@ you could replace winfsp with [dokan](https://github.com/dokan-dev/dokany/releas
|
|||||||
|
|
||||||
|
|
||||||
# [`dbtool.py`](dbtool.py)
|
# [`dbtool.py`](dbtool.py)
|
||||||
upgrade utility which can show db info and help transfer data between databases, for example when a new version of copyparty recommends to wipe the DB and reindex because it now collects additional metadata during analysis, but you have some really expensive `-mtp` parsers and want to copy over the tags from the old db
|
upgrade utility which can show db info and help transfer data between databases, for example when a new version of copyparty is incompatible with the old DB and automatically rebuilds the DB from scratch, but you have some really expensive `-mtp` parsers and want to copy over the tags from the old db
|
||||||
|
|
||||||
for that example (upgrading to v0.11.0), first move the old db aside, launch copyparty, let it rebuild the db until the point where it starts running mtp (colored messages as it adds the mtp tags), then CTRL-C and patch in the old mtp tags from the old db instead
|
for that example (upgrading to v0.11.20), first launch the new version of copyparty like usual, let it make a backup of the old db and rebuild the new db until the point where it starts running mtp (colored messages as it adds the mtp tags), that's when you hit CTRL-C and patch in the old mtp tags from the old db instead
|
||||||
|
|
||||||
so assuming you have `-mtp` parsers to provide the tags `key` and `.bpm`:
|
so assuming you have `-mtp` parsers to provide the tags `key` and `.bpm`:
|
||||||
|
|
||||||
```
|
```
|
||||||
~/bin/dbtool.py -ls up2k.db
|
cd /mnt/nas/music/.hist
|
||||||
~/bin/dbtool.py -src up2k.db.v0.10.22 up2k.db -cmp
|
~/src/copyparty/bin/dbtool.py -ls up2k.db
|
||||||
~/bin/dbtool.py -src up2k.db.v0.10.22 up2k.db -rm-mtp-flag -copy key
|
~/src/copyparty/bin/dbtool.py -src up2k.*.v3 up2k.db -cmp
|
||||||
~/bin/dbtool.py -src up2k.db.v0.10.22 up2k.db -rm-mtp-flag -copy .bpm -vac
|
~/src/copyparty/bin/dbtool.py -src up2k.*.v3 up2k.db -rm-mtp-flag -copy key
|
||||||
|
~/src/copyparty/bin/dbtool.py -src up2k.*.v3 up2k.db -rm-mtp-flag -copy .bpm -vac
|
||||||
```
|
```
|
||||||
|
@@ -54,6 +54,12 @@ MACOS = platform.system() == "Darwin"
|
|||||||
info = log = dbg = None
|
info = log = dbg = None
|
||||||
|
|
||||||
|
|
||||||
|
print("{} v{} @ {}".format(
|
||||||
|
platform.python_implementation(),
|
||||||
|
".".join([str(x) for x in sys.version_info]),
|
||||||
|
sys.executable))
|
||||||
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
from fuse import FUSE, FuseOSError, Operations
|
from fuse import FUSE, FuseOSError, Operations
|
||||||
except:
|
except:
|
||||||
|
@@ -2,10 +2,13 @@
|
|||||||
|
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
|
import time
|
||||||
|
import shutil
|
||||||
import sqlite3
|
import sqlite3
|
||||||
import argparse
|
import argparse
|
||||||
|
|
||||||
DB_VER = 3
|
DB_VER1 = 3
|
||||||
|
DB_VER2 = 4
|
||||||
|
|
||||||
|
|
||||||
def die(msg):
|
def die(msg):
|
||||||
@@ -45,18 +48,21 @@ def compare(n1, d1, n2, d2, verbose):
|
|||||||
nt = next(d1.execute("select count(w) from up"))[0]
|
nt = next(d1.execute("select count(w) from up"))[0]
|
||||||
n = 0
|
n = 0
|
||||||
miss = 0
|
miss = 0
|
||||||
for w, rd, fn in d1.execute("select w, rd, fn from up"):
|
for w1, rd, fn in d1.execute("select w, rd, fn from up"):
|
||||||
n += 1
|
n += 1
|
||||||
if n % 25_000 == 0:
|
if n % 25_000 == 0:
|
||||||
m = f"\033[36mchecked {n:,} of {nt:,} files in {n1} against {n2}\033[0m"
|
m = f"\033[36mchecked {n:,} of {nt:,} files in {n1} against {n2}\033[0m"
|
||||||
print(m)
|
print(m)
|
||||||
|
|
||||||
q = "select w from up where substr(w,1,16) = ?"
|
if rd.split("/", 1)[0] == ".hist":
|
||||||
hit = d2.execute(q, (w[:16],)).fetchone()
|
continue
|
||||||
|
|
||||||
|
q = "select w from up where rd = ? and fn = ?"
|
||||||
|
hit = d2.execute(q, (rd, fn)).fetchone()
|
||||||
if not hit:
|
if not hit:
|
||||||
miss += 1
|
miss += 1
|
||||||
if verbose:
|
if verbose:
|
||||||
print(f"file in {n1} missing in {n2}: [{w}] {rd}/{fn}")
|
print(f"file in {n1} missing in {n2}: [{w1}] {rd}/{fn}")
|
||||||
|
|
||||||
print(f" {miss} files in {n1} missing in {n2}\n")
|
print(f" {miss} files in {n1} missing in {n2}\n")
|
||||||
|
|
||||||
@@ -64,15 +70,30 @@ def compare(n1, d1, n2, d2, verbose):
|
|||||||
n = 0
|
n = 0
|
||||||
miss = {}
|
miss = {}
|
||||||
nmiss = 0
|
nmiss = 0
|
||||||
for w, k, v in d1.execute("select * from mt"):
|
for w1, k, v in d1.execute("select * from mt"):
|
||||||
|
|
||||||
n += 1
|
n += 1
|
||||||
if n % 100_000 == 0:
|
if n % 100_000 == 0:
|
||||||
m = f"\033[36mchecked {n:,} of {nt:,} tags in {n1} against {n2}, so far {nmiss} missing tags\033[0m"
|
m = f"\033[36mchecked {n:,} of {nt:,} tags in {n1} against {n2}, so far {nmiss} missing tags\033[0m"
|
||||||
print(m)
|
print(m)
|
||||||
|
|
||||||
v2 = d2.execute("select v from mt where w = ? and +k = ?", (w, k)).fetchone()
|
q = "select rd, fn from up where substr(w,1,16) = ?"
|
||||||
if v2:
|
rd, fn = d1.execute(q, (w1,)).fetchone()
|
||||||
v2 = v2[0]
|
if rd.split("/", 1)[0] == ".hist":
|
||||||
|
continue
|
||||||
|
|
||||||
|
q = "select substr(w,1,16) from up where rd = ? and fn = ?"
|
||||||
|
w2 = d2.execute(q, (rd, fn)).fetchone()
|
||||||
|
if w2:
|
||||||
|
w2 = w2[0]
|
||||||
|
|
||||||
|
v2 = None
|
||||||
|
if w2:
|
||||||
|
v2 = d2.execute(
|
||||||
|
"select v from mt where w = ? and +k = ?", (w2, k)
|
||||||
|
).fetchone()
|
||||||
|
if v2:
|
||||||
|
v2 = v2[0]
|
||||||
|
|
||||||
# if v != v2 and v2 and k in [".bpm", "key"] and n2 == "src":
|
# if v != v2 and v2 and k in [".bpm", "key"] and n2 == "src":
|
||||||
# print(f"{w} [{rd}/{fn}] {k} = [{v}] / [{v2}]")
|
# print(f"{w} [{rd}/{fn}] {k} = [{v}] / [{v2}]")
|
||||||
@@ -99,9 +120,7 @@ def compare(n1, d1, n2, d2, verbose):
|
|||||||
miss[k] = 1
|
miss[k] = 1
|
||||||
|
|
||||||
if verbose:
|
if verbose:
|
||||||
q = "select rd, fn from up where substr(w,1,16) = ?"
|
print(f"missing in {n2}: [{w1}] [{rd}/{fn}] {k} = {v}")
|
||||||
rd, fn = d1.execute(q, (w,)).fetchone()
|
|
||||||
print(f"missing in {n2}: [{w}] [{rd}/{fn}] {k} = {v}")
|
|
||||||
|
|
||||||
for k, v in sorted(miss.items()):
|
for k, v in sorted(miss.items()):
|
||||||
if v:
|
if v:
|
||||||
@@ -114,24 +133,35 @@ def copy_mtp(d1, d2, tag, rm):
|
|||||||
nt = next(d1.execute("select count(w) from mt where k = ?", (tag,)))[0]
|
nt = next(d1.execute("select count(w) from mt where k = ?", (tag,)))[0]
|
||||||
n = 0
|
n = 0
|
||||||
ndone = 0
|
ndone = 0
|
||||||
for w, k, v in d1.execute("select * from mt where k = ?", (tag,)):
|
for w1, k, v in d1.execute("select * from mt where k = ?", (tag,)):
|
||||||
n += 1
|
n += 1
|
||||||
if n % 25_000 == 0:
|
if n % 25_000 == 0:
|
||||||
m = f"\033[36m{n:,} of {nt:,} tags checked, so far {ndone} copied\033[0m"
|
m = f"\033[36m{n:,} of {nt:,} tags checked, so far {ndone} copied\033[0m"
|
||||||
print(m)
|
print(m)
|
||||||
|
|
||||||
hit = d2.execute("select v from mt where w = ? and +k = ?", (w, k)).fetchone()
|
q = "select rd, fn from up where substr(w,1,16) = ?"
|
||||||
|
rd, fn = d1.execute(q, (w1,)).fetchone()
|
||||||
|
if rd.split("/", 1)[0] == ".hist":
|
||||||
|
continue
|
||||||
|
|
||||||
|
q = "select substr(w,1,16) from up where rd = ? and fn = ?"
|
||||||
|
w2 = d2.execute(q, (rd, fn)).fetchone()
|
||||||
|
if not w2:
|
||||||
|
continue
|
||||||
|
|
||||||
|
w2 = w2[0]
|
||||||
|
hit = d2.execute("select v from mt where w = ? and +k = ?", (w2, k)).fetchone()
|
||||||
if hit:
|
if hit:
|
||||||
hit = hit[0]
|
hit = hit[0]
|
||||||
|
|
||||||
if hit != v:
|
if hit != v:
|
||||||
ndone += 1
|
ndone += 1
|
||||||
if hit is not None:
|
if hit is not None:
|
||||||
d2.execute("delete from mt where w = ? and +k = ?", (w, k))
|
d2.execute("delete from mt where w = ? and +k = ?", (w2, k))
|
||||||
|
|
||||||
d2.execute("insert into mt values (?,?,?)", (w, k, v))
|
d2.execute("insert into mt values (?,?,?)", (w2, k, v))
|
||||||
if rm:
|
if rm:
|
||||||
d2.execute("delete from mt where w = ? and +k = 't:mtp'", (w,))
|
d2.execute("delete from mt where w = ? and +k = 't:mtp'", (w2,))
|
||||||
|
|
||||||
d2.commit()
|
d2.commit()
|
||||||
print(f"copied {ndone} {tag} tags over")
|
print(f"copied {ndone} {tag} tags over")
|
||||||
@@ -140,7 +170,7 @@ def copy_mtp(d1, d2, tag, rm):
|
|||||||
def main():
|
def main():
|
||||||
os.system("")
|
os.system("")
|
||||||
print()
|
print()
|
||||||
|
|
||||||
ap = argparse.ArgumentParser()
|
ap = argparse.ArgumentParser()
|
||||||
ap.add_argument("db", help="database to work on")
|
ap.add_argument("db", help="database to work on")
|
||||||
ap.add_argument("-src", metavar="DB", type=str, help="database to copy from")
|
ap.add_argument("-src", metavar="DB", type=str, help="database to copy from")
|
||||||
@@ -168,6 +198,23 @@ def main():
|
|||||||
db = sqlite3.connect(ar.db)
|
db = sqlite3.connect(ar.db)
|
||||||
ds = sqlite3.connect(ar.src) if ar.src else None
|
ds = sqlite3.connect(ar.src) if ar.src else None
|
||||||
|
|
||||||
|
# revert journals
|
||||||
|
for d, p in [[db, ar.db], [ds, ar.src]]:
|
||||||
|
if not d:
|
||||||
|
continue
|
||||||
|
|
||||||
|
pj = "{}-journal".format(p)
|
||||||
|
if not os.path.exists(pj):
|
||||||
|
continue
|
||||||
|
|
||||||
|
d.execute("create table foo (bar int)")
|
||||||
|
d.execute("drop table foo")
|
||||||
|
|
||||||
|
if ar.copy:
|
||||||
|
db.close()
|
||||||
|
shutil.copy2(ar.db, "{}.bak.dbtool.{:x}".format(ar.db, int(time.time())))
|
||||||
|
db = sqlite3.connect(ar.db)
|
||||||
|
|
||||||
for d, n in [[ds, "src"], [db, "dst"]]:
|
for d, n in [[ds, "src"], [db, "dst"]]:
|
||||||
if not d:
|
if not d:
|
||||||
continue
|
continue
|
||||||
@@ -176,8 +223,8 @@ def main():
|
|||||||
if ver == "corrupt":
|
if ver == "corrupt":
|
||||||
die("{} database appears to be corrupt, sorry")
|
die("{} database appears to be corrupt, sorry")
|
||||||
|
|
||||||
if ver != DB_VER:
|
if ver < DB_VER1 or ver > DB_VER2:
|
||||||
m = f"{n} db is version {ver}, this tool only supports version {DB_VER}, please upgrade it with copyparty first"
|
m = f"{n} db is version {ver}, this tool only supports versions between {DB_VER1} and {DB_VER2}, please upgrade it with copyparty first"
|
||||||
die(m)
|
die(m)
|
||||||
|
|
||||||
if ar.ls:
|
if ar.ls:
|
||||||
|
@@ -60,7 +60,7 @@ def main():
|
|||||||
try:
|
try:
|
||||||
det(tf)
|
det(tf)
|
||||||
except:
|
except:
|
||||||
pass
|
pass # mute
|
||||||
finally:
|
finally:
|
||||||
os.unlink(tf)
|
os.unlink(tf)
|
||||||
|
|
||||||
|
123
bin/mtag/audio-key-slicing.py
Executable file
123
bin/mtag/audio-key-slicing.py
Executable file
@@ -0,0 +1,123 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
|
||||||
|
import re
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import tempfile
|
||||||
|
import subprocess as sp
|
||||||
|
|
||||||
|
import keyfinder
|
||||||
|
|
||||||
|
from copyparty.util import fsenc
|
||||||
|
|
||||||
|
"""
|
||||||
|
dep: github/mixxxdj/libkeyfinder
|
||||||
|
dep: pypi/keyfinder
|
||||||
|
dep: ffmpeg
|
||||||
|
|
||||||
|
note: this is a janky edition of the regular audio-key.py,
|
||||||
|
slicing the files at 20sec intervals and keeping 5sec from each,
|
||||||
|
surprisingly accurate but still garbage (446 ok, 69 bad, 13% miss)
|
||||||
|
|
||||||
|
it is fast tho
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
def get_duration():
|
||||||
|
# TODO provide ffprobe tags to mtp as json
|
||||||
|
|
||||||
|
# fmt: off
|
||||||
|
dur = sp.check_output([
|
||||||
|
"ffprobe",
|
||||||
|
"-hide_banner",
|
||||||
|
"-v", "fatal",
|
||||||
|
"-show_streams",
|
||||||
|
"-show_format",
|
||||||
|
fsenc(sys.argv[1])
|
||||||
|
])
|
||||||
|
# fmt: on
|
||||||
|
|
||||||
|
dur = dur.decode("ascii", "replace").split("\n")
|
||||||
|
dur = [x.split("=")[1] for x in dur if x.startswith("duration=")]
|
||||||
|
dur = [float(x) for x in dur if re.match(r"^[0-9\.,]+$", x)]
|
||||||
|
return list(sorted(dur))[-1] if dur else None
|
||||||
|
|
||||||
|
|
||||||
|
def get_segs(dur):
|
||||||
|
# keep first 5s of each 20s,
|
||||||
|
# keep entire last segment
|
||||||
|
ofs = 0
|
||||||
|
segs = []
|
||||||
|
while True:
|
||||||
|
seg = [ofs, 5]
|
||||||
|
segs.append(seg)
|
||||||
|
if dur - ofs < 20:
|
||||||
|
seg[-1] = int(dur - seg[0])
|
||||||
|
break
|
||||||
|
|
||||||
|
ofs += 20
|
||||||
|
|
||||||
|
return segs
|
||||||
|
|
||||||
|
|
||||||
|
def slice(tf):
|
||||||
|
dur = get_duration()
|
||||||
|
dur = min(dur, 600) # max 10min
|
||||||
|
segs = get_segs(dur)
|
||||||
|
|
||||||
|
# fmt: off
|
||||||
|
cmd = [
|
||||||
|
"ffmpeg",
|
||||||
|
"-nostdin",
|
||||||
|
"-hide_banner",
|
||||||
|
"-v", "fatal",
|
||||||
|
"-y"
|
||||||
|
]
|
||||||
|
|
||||||
|
for seg in segs:
|
||||||
|
cmd.extend([
|
||||||
|
"-ss", str(seg[0]),
|
||||||
|
"-i", fsenc(sys.argv[1])
|
||||||
|
])
|
||||||
|
|
||||||
|
filt = ""
|
||||||
|
for n, seg in enumerate(segs):
|
||||||
|
filt += "[{}:a:0]atrim=duration={}[a{}]; ".format(n, seg[1], n)
|
||||||
|
|
||||||
|
prev = "a0"
|
||||||
|
for n in range(1, len(segs)):
|
||||||
|
nxt = "b{}".format(n)
|
||||||
|
filt += "[{}][a{}]acrossfade=d=0.5[{}]; ".format(prev, n, nxt)
|
||||||
|
prev = nxt
|
||||||
|
|
||||||
|
cmd.extend([
|
||||||
|
"-filter_complex", filt[:-2],
|
||||||
|
"-map", "[{}]".format(nxt),
|
||||||
|
"-sample_fmt", "s16",
|
||||||
|
tf
|
||||||
|
])
|
||||||
|
# fmt: on
|
||||||
|
|
||||||
|
# print(cmd)
|
||||||
|
sp.check_call(cmd)
|
||||||
|
|
||||||
|
|
||||||
|
def det(tf):
|
||||||
|
slice(tf)
|
||||||
|
print(keyfinder.key(tf).camelot())
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
with tempfile.NamedTemporaryFile(suffix=".flac", delete=False) as f:
|
||||||
|
f.write(b"h")
|
||||||
|
tf = f.name
|
||||||
|
|
||||||
|
try:
|
||||||
|
det(tf)
|
||||||
|
finally:
|
||||||
|
os.unlink(tf)
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
@@ -1,18 +1,54 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
|
import os
|
||||||
import sys
|
import sys
|
||||||
|
import tempfile
|
||||||
|
import subprocess as sp
|
||||||
import keyfinder
|
import keyfinder
|
||||||
|
|
||||||
|
from copyparty.util import fsenc
|
||||||
|
|
||||||
"""
|
"""
|
||||||
dep: github/mixxxdj/libkeyfinder
|
dep: github/mixxxdj/libkeyfinder
|
||||||
dep: pypi/keyfinder
|
dep: pypi/keyfinder
|
||||||
dep: ffmpeg
|
dep: ffmpeg
|
||||||
|
|
||||||
note: cannot fsenc
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
||||||
try:
|
# tried trimming the first/last 5th, bad idea,
|
||||||
print(keyfinder.key(sys.argv[1]).camelot())
|
# misdetects 9a law field (Sphere Caliber) as 10b,
|
||||||
except:
|
# obvious when mixing 9a ghostly parapara ship
|
||||||
pass
|
|
||||||
|
|
||||||
|
def det(tf):
|
||||||
|
# fmt: off
|
||||||
|
sp.check_call([
|
||||||
|
"ffmpeg",
|
||||||
|
"-nostdin",
|
||||||
|
"-hide_banner",
|
||||||
|
"-v", "fatal",
|
||||||
|
"-y", "-i", fsenc(sys.argv[1]),
|
||||||
|
"-t", "300",
|
||||||
|
"-sample_fmt", "s16",
|
||||||
|
tf
|
||||||
|
])
|
||||||
|
# fmt: on
|
||||||
|
|
||||||
|
print(keyfinder.key(tf).camelot())
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
with tempfile.NamedTemporaryFile(suffix=".flac", delete=False) as f:
|
||||||
|
f.write(b"h")
|
||||||
|
tf = f.name
|
||||||
|
|
||||||
|
try:
|
||||||
|
det(tf)
|
||||||
|
except:
|
||||||
|
pass # mute
|
||||||
|
finally:
|
||||||
|
os.unlink(tf)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
|
@@ -1,3 +1,8 @@
|
|||||||
|
# when running copyparty behind a reverse-proxy,
|
||||||
|
# make sure that copyparty allows at least as many clients as the proxy does,
|
||||||
|
# so run copyparty with -nc 512 if your nginx has the default limits
|
||||||
|
# (worker_processes 1, worker_connections 512)
|
||||||
|
|
||||||
upstream cpp {
|
upstream cpp {
|
||||||
server 127.0.0.1:3923;
|
server 127.0.0.1:3923;
|
||||||
keepalive 120;
|
keepalive 120;
|
||||||
|
@@ -23,7 +23,7 @@ from textwrap import dedent
|
|||||||
from .__init__ import E, WINDOWS, VT100, PY2
|
from .__init__ import E, WINDOWS, VT100, PY2
|
||||||
from .__version__ import S_VERSION, S_BUILD_DT, CODENAME
|
from .__version__ import S_VERSION, S_BUILD_DT, CODENAME
|
||||||
from .svchub import SvcHub
|
from .svchub import SvcHub
|
||||||
from .util import py_desc, align_tab, IMPLICATIONS
|
from .util import py_desc, align_tab, IMPLICATIONS, alltrace
|
||||||
|
|
||||||
HAVE_SSL = True
|
HAVE_SSL = True
|
||||||
try:
|
try:
|
||||||
@@ -182,6 +182,16 @@ def sighandler(sig=None, frame=None):
|
|||||||
print("\n".join(msg))
|
print("\n".join(msg))
|
||||||
|
|
||||||
|
|
||||||
|
def stackmon(fp, ival):
|
||||||
|
ctr = 0
|
||||||
|
while True:
|
||||||
|
ctr += 1
|
||||||
|
time.sleep(ival)
|
||||||
|
st = "{}, {}\n{}".format(ctr, time.time(), alltrace())
|
||||||
|
with open(fp, "wb") as f:
|
||||||
|
f.write(st.encode("utf-8", "replace"))
|
||||||
|
|
||||||
|
|
||||||
def run_argparse(argv, formatter):
|
def run_argparse(argv, formatter):
|
||||||
ap = argparse.ArgumentParser(
|
ap = argparse.ArgumentParser(
|
||||||
formatter_class=formatter,
|
formatter_class=formatter,
|
||||||
@@ -222,10 +232,6 @@ def run_argparse(argv, formatter):
|
|||||||
"print,get" prints the data in the log and returns GET
|
"print,get" prints the data in the log and returns GET
|
||||||
(leave out the ",get" to return an error instead)
|
(leave out the ",get" to return an error instead)
|
||||||
|
|
||||||
--ciphers help = available ssl/tls ciphers,
|
|
||||||
--ssl-ver help = available ssl/tls versions,
|
|
||||||
default is what python considers safe, usually >= TLS1
|
|
||||||
|
|
||||||
values for --ls:
|
values for --ls:
|
||||||
"USR" is a user to browse as; * is anonymous, ** is all users
|
"USR" is a user to browse as; * is anonymous, ** is all users
|
||||||
"VOL" is a single volume to scan, default is * (all vols)
|
"VOL" is a single volume to scan, default is * (all vols)
|
||||||
@@ -238,29 +244,51 @@ def run_argparse(argv, formatter):
|
|||||||
--ls '**' # list all files which are possible to read
|
--ls '**' # list all files which are possible to read
|
||||||
--ls '**,*,ln' # check for dangerous symlinks
|
--ls '**,*,ln' # check for dangerous symlinks
|
||||||
--ls '**,*,ln,p,r' # check, then start normally if safe
|
--ls '**,*,ln,p,r' # check, then start normally if safe
|
||||||
|
\033[0m
|
||||||
"""
|
"""
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
# fmt: off
|
# fmt: off
|
||||||
ap.add_argument("-c", metavar="PATH", type=str, action="append", help="add config file")
|
ap.add_argument("-c", metavar="PATH", type=str, action="append", help="add config file")
|
||||||
ap.add_argument("-i", metavar="IP", type=str, default="0.0.0.0", help="ip to bind (comma-sep.)")
|
|
||||||
ap.add_argument("-p", metavar="PORT", type=str, default="3923", help="ports to bind (comma/range)")
|
|
||||||
ap.add_argument("-nc", metavar="NUM", type=int, default=64, help="max num clients")
|
ap.add_argument("-nc", metavar="NUM", type=int, default=64, help="max num clients")
|
||||||
ap.add_argument("-j", metavar="CORES", type=int, default=1, help="max num cpu cores")
|
ap.add_argument("-j", metavar="CORES", type=int, default=1, help="max num cpu cores")
|
||||||
ap.add_argument("-a", metavar="ACCT", type=str, action="append", help="add account")
|
ap.add_argument("-a", metavar="ACCT", type=str, action="append", help="add account, USER:PASS; example [ed:wark")
|
||||||
ap.add_argument("-v", metavar="VOL", type=str, action="append", help="add volume")
|
ap.add_argument("-v", metavar="VOL", type=str, action="append", help="add volume, SRC:DST:FLAG; example [.::r], [/mnt/nas/music:/music:r:aed")
|
||||||
ap.add_argument("-q", action="store_true", help="quiet")
|
|
||||||
ap.add_argument("-ed", action="store_true", help="enable ?dots")
|
ap.add_argument("-ed", action="store_true", help="enable ?dots")
|
||||||
ap.add_argument("-emp", action="store_true", help="enable markdown plugins")
|
ap.add_argument("-emp", action="store_true", help="enable markdown plugins")
|
||||||
ap.add_argument("-mcr", metavar="SEC", type=int, default=60, help="md-editor mod-chk rate")
|
ap.add_argument("-mcr", metavar="SEC", type=int, default=60, help="md-editor mod-chk rate")
|
||||||
ap.add_argument("-nw", action="store_true", help="disable writes (benchmark)")
|
|
||||||
ap.add_argument("-nih", action="store_true", help="no info hostname")
|
|
||||||
ap.add_argument("-nid", action="store_true", help="no info disk-usage")
|
|
||||||
ap.add_argument("--dotpart", action="store_true", help="dotfile incomplete uploads")
|
ap.add_argument("--dotpart", action="store_true", help="dotfile incomplete uploads")
|
||||||
ap.add_argument("--no-zip", action="store_true", help="disable download as zip/tar")
|
|
||||||
ap.add_argument("--sparse", metavar="MiB", type=int, default=4, help="up2k min.size threshold (mswin-only)")
|
ap.add_argument("--sparse", metavar="MiB", type=int, default=4, help="up2k min.size threshold (mswin-only)")
|
||||||
ap.add_argument("--urlform", metavar="MODE", type=str, default="print,get", help="how to handle url-forms")
|
ap.add_argument("--urlform", metavar="MODE", type=str, default="print,get", help="how to handle url-forms; examples: [stash], [save,get]")
|
||||||
ap.add_argument("--salt", type=str, default="hunter2", help="up2k file-hash salt")
|
|
||||||
|
ap2 = ap.add_argument_group('network options')
|
||||||
|
ap2.add_argument("-i", metavar="IP", type=str, default="0.0.0.0", help="ip to bind (comma-sep.)")
|
||||||
|
ap2.add_argument("-p", metavar="PORT", type=str, default="3923", help="ports to bind (comma/range)")
|
||||||
|
ap2.add_argument("--rproxy", metavar="DEPTH", type=int, default=1, help="which ip to keep; 0 = tcp, 1 = origin (first x-fwd), 2 = cloudflare, 3 = nginx, -1 = closest proxy")
|
||||||
|
|
||||||
|
ap2 = ap.add_argument_group('SSL/TLS options')
|
||||||
|
ap2.add_argument("--http-only", action="store_true", help="disable ssl/tls")
|
||||||
|
ap2.add_argument("--https-only", action="store_true", help="disable plaintext")
|
||||||
|
ap2.add_argument("--ssl-ver", metavar="LIST", type=str, help="set allowed ssl/tls versions; [help] shows available versions; default is what your python version considers safe")
|
||||||
|
ap2.add_argument("--ciphers", metavar="LIST", help="set allowed ssl/tls ciphers; [help] shows available ciphers")
|
||||||
|
ap2.add_argument("--ssl-dbg", action="store_true", help="dump some tls info")
|
||||||
|
ap2.add_argument("--ssl-log", metavar="PATH", help="log master secrets")
|
||||||
|
|
||||||
|
ap2 = ap.add_argument_group('opt-outs')
|
||||||
|
ap2.add_argument("-nw", action="store_true", help="disable writes (benchmark)")
|
||||||
|
ap2.add_argument("-nih", action="store_true", help="no info hostname")
|
||||||
|
ap2.add_argument("-nid", action="store_true", help="no info disk-usage")
|
||||||
|
ap2.add_argument("--no-zip", action="store_true", help="disable download as zip/tar")
|
||||||
|
|
||||||
|
ap2 = ap.add_argument_group('safety options')
|
||||||
|
ap2.add_argument("--ls", metavar="U[,V[,F]]", help="scan all volumes; arguments USER,VOL,FLAGS; example [**,*,ln,p,r]")
|
||||||
|
ap2.add_argument("--salt", type=str, default="hunter2", help="up2k file-hash salt")
|
||||||
|
|
||||||
|
ap2 = ap.add_argument_group('logging options')
|
||||||
|
ap2.add_argument("-q", action="store_true", help="quiet")
|
||||||
|
ap2.add_argument("--log-conn", action="store_true", help="print tcp-server msgs")
|
||||||
|
ap2.add_argument("--ihead", metavar="HEADER", action='append', help="dump incoming header")
|
||||||
|
ap2.add_argument("--lf-url", metavar="RE", type=str, default=r"^/\.cpr/|\?th=[wj]$", help="dont log URLs matching")
|
||||||
|
|
||||||
ap2 = ap.add_argument_group('admin panel options')
|
ap2 = ap.add_argument_group('admin panel options')
|
||||||
ap2.add_argument("--no-rescan", action="store_true", help="disable ?scan (volume reindexing)")
|
ap2.add_argument("--no-rescan", action="store_true", help="disable ?scan (volume reindexing)")
|
||||||
@@ -273,6 +301,7 @@ def run_argparse(argv, formatter):
|
|||||||
ap2.add_argument("--th-no-crop", action="store_true", help="dynamic height; show full image")
|
ap2.add_argument("--th-no-crop", action="store_true", help="dynamic height; show full image")
|
||||||
ap2.add_argument("--th-no-jpg", action="store_true", help="disable jpg output")
|
ap2.add_argument("--th-no-jpg", action="store_true", help="disable jpg output")
|
||||||
ap2.add_argument("--th-no-webp", action="store_true", help="disable webp output")
|
ap2.add_argument("--th-no-webp", action="store_true", help="disable webp output")
|
||||||
|
ap2.add_argument("--th-ff-jpg", action="store_true", help="force jpg for video thumbs")
|
||||||
ap2.add_argument("--th-poke", metavar="SEC", type=int, default=300, help="activity labeling cooldown")
|
ap2.add_argument("--th-poke", metavar="SEC", type=int, default=300, help="activity labeling cooldown")
|
||||||
ap2.add_argument("--th-clean", metavar="SEC", type=int, default=43200, help="cleanup interval")
|
ap2.add_argument("--th-clean", metavar="SEC", type=int, default=43200, help="cleanup interval")
|
||||||
ap2.add_argument("--th-maxage", metavar="SEC", type=int, default=604800, help="max folder age")
|
ap2.add_argument("--th-maxage", metavar="SEC", type=int, default=604800, help="max folder age")
|
||||||
@@ -284,6 +313,8 @@ def run_argparse(argv, formatter):
|
|||||||
ap2.add_argument("-e2t", action="store_true", help="enable metadata indexing")
|
ap2.add_argument("-e2t", action="store_true", help="enable metadata indexing")
|
||||||
ap2.add_argument("-e2ts", action="store_true", help="enable metadata scanner, sets -e2t")
|
ap2.add_argument("-e2ts", action="store_true", help="enable metadata scanner, sets -e2t")
|
||||||
ap2.add_argument("-e2tsr", action="store_true", help="rescan all metadata, sets -e2ts")
|
ap2.add_argument("-e2tsr", action="store_true", help="rescan all metadata, sets -e2ts")
|
||||||
|
ap2.add_argument("--hist", metavar="PATH", type=str, help="where to store volume state")
|
||||||
|
ap2.add_argument("--no-hash", action="store_true", help="disable hashing during e2ds folder scans")
|
||||||
ap2.add_argument("--no-mutagen", action="store_true", help="use ffprobe for tags instead")
|
ap2.add_argument("--no-mutagen", action="store_true", help="use ffprobe for tags instead")
|
||||||
ap2.add_argument("--no-mtag-mt", action="store_true", help="disable tag-read parallelism")
|
ap2.add_argument("--no-mtag-mt", action="store_true", help="disable tag-read parallelism")
|
||||||
ap2.add_argument("-mtm", metavar="M=t,t,t", action="append", type=str, help="add/replace metadata mapping")
|
ap2.add_argument("-mtm", metavar="M=t,t,t", action="append", type=str, help="add/replace metadata mapping")
|
||||||
@@ -292,22 +323,14 @@ def run_argparse(argv, formatter):
|
|||||||
ap2.add_argument("-mtp", metavar="M=[f,]bin", action="append", type=str, help="read tag M using bin")
|
ap2.add_argument("-mtp", metavar="M=[f,]bin", action="append", type=str, help="read tag M using bin")
|
||||||
ap2.add_argument("--srch-time", metavar="SEC", type=int, default=30, help="search deadline")
|
ap2.add_argument("--srch-time", metavar="SEC", type=int, default=30, help="search deadline")
|
||||||
|
|
||||||
ap2 = ap.add_argument_group('SSL/TLS options')
|
ap2 = ap.add_argument_group('appearance options')
|
||||||
ap2.add_argument("--http-only", action="store_true", help="disable ssl/tls")
|
ap2.add_argument("--css-browser", metavar="L", help="URL to additional CSS to include")
|
||||||
ap2.add_argument("--https-only", action="store_true", help="disable plaintext")
|
|
||||||
ap2.add_argument("--ssl-ver", metavar="LIST", type=str, help="ssl/tls versions to allow")
|
|
||||||
ap2.add_argument("--ciphers", metavar="LIST", help="set allowed ciphers")
|
|
||||||
ap2.add_argument("--ssl-dbg", action="store_true", help="dump some tls info")
|
|
||||||
ap2.add_argument("--ssl-log", metavar="PATH", help="log master secrets")
|
|
||||||
|
|
||||||
ap2 = ap.add_argument_group('debug options')
|
ap2 = ap.add_argument_group('debug options')
|
||||||
ap2.add_argument("--ls", metavar="U[,V[,F]]", help="scan all volumes")
|
|
||||||
ap2.add_argument("--log-conn", action="store_true", help="print tcp-server msgs")
|
|
||||||
ap2.add_argument("--no-sendfile", action="store_true", help="disable sendfile")
|
ap2.add_argument("--no-sendfile", action="store_true", help="disable sendfile")
|
||||||
ap2.add_argument("--no-scandir", action="store_true", help="disable scandir")
|
ap2.add_argument("--no-scandir", action="store_true", help="disable scandir")
|
||||||
ap2.add_argument("--no-fastboot", action="store_true", help="wait for up2k indexing")
|
ap2.add_argument("--no-fastboot", action="store_true", help="wait for up2k indexing")
|
||||||
ap2.add_argument("--ihead", metavar="HEADER", action='append', help="dump incoming header")
|
ap2.add_argument("--stackmon", metavar="P,S", help="write stacktrace to Path every S second")
|
||||||
ap2.add_argument("--lf-url", metavar="RE", type=str, default=r"^/\.cpr/|\?th=[wj]$", help="dont log URLs matching")
|
|
||||||
|
|
||||||
return ap.parse_args(args=argv[1:])
|
return ap.parse_args(args=argv[1:])
|
||||||
# fmt: on
|
# fmt: on
|
||||||
@@ -347,6 +370,16 @@ def main(argv=None):
|
|||||||
except AssertionError:
|
except AssertionError:
|
||||||
al = run_argparse(argv, Dodge11874)
|
al = run_argparse(argv, Dodge11874)
|
||||||
|
|
||||||
|
if al.stackmon:
|
||||||
|
fp, f = al.stackmon.rsplit(",", 1)
|
||||||
|
f = int(f)
|
||||||
|
t = threading.Thread(
|
||||||
|
target=stackmon,
|
||||||
|
args=(fp, f),
|
||||||
|
)
|
||||||
|
t.daemon = True
|
||||||
|
t.start()
|
||||||
|
|
||||||
# propagate implications
|
# propagate implications
|
||||||
for k1, k2 in IMPLICATIONS:
|
for k1, k2 in IMPLICATIONS:
|
||||||
if getattr(al, k1):
|
if getattr(al, k1):
|
||||||
@@ -377,6 +410,9 @@ def main(argv=None):
|
|||||||
+ " (if you crash with codec errors then that is why)"
|
+ " (if you crash with codec errors then that is why)"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
if sys.version_info < (3, 6):
|
||||||
|
al.no_scandir = True
|
||||||
|
|
||||||
# signal.signal(signal.SIGINT, sighandler)
|
# signal.signal(signal.SIGINT, sighandler)
|
||||||
|
|
||||||
SvcHub(al).run()
|
SvcHub(al).run()
|
||||||
|
@@ -1,8 +1,8 @@
|
|||||||
# coding: utf-8
|
# coding: utf-8
|
||||||
|
|
||||||
VERSION = (0, 11, 8)
|
VERSION = (0, 11, 28)
|
||||||
CODENAME = "the grid"
|
CODENAME = "the grid"
|
||||||
BUILD_DT = (2021, 6, 6)
|
BUILD_DT = (2021, 6, 28)
|
||||||
|
|
||||||
S_VERSION = ".".join(map(str, VERSION))
|
S_VERSION = ".".join(map(str, VERSION))
|
||||||
S_BUILD_DT = "{0:04d}-{1:02d}-{2:02d}".format(*BUILD_DT)
|
S_BUILD_DT = "{0:04d}-{1:02d}-{2:02d}".format(*BUILD_DT)
|
||||||
|
@@ -5,10 +5,12 @@ import re
|
|||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
import stat
|
import stat
|
||||||
|
import base64
|
||||||
|
import hashlib
|
||||||
import threading
|
import threading
|
||||||
|
|
||||||
from .__init__ import PY2, WINDOWS
|
from .__init__ import WINDOWS
|
||||||
from .util import IMPLICATIONS, undot, Pebkac, fsdec, fsenc, statdir, nuprint
|
from .util import IMPLICATIONS, uncyg, undot, Pebkac, fsdec, fsenc, statdir, nuprint
|
||||||
|
|
||||||
|
|
||||||
class VFS(object):
|
class VFS(object):
|
||||||
@@ -22,7 +24,15 @@ class VFS(object):
|
|||||||
self.uadm = uadm # users who are regular admins
|
self.uadm = uadm # users who are regular admins
|
||||||
self.flags = flags # config switches
|
self.flags = flags # config switches
|
||||||
self.nodes = {} # child nodes
|
self.nodes = {} # child nodes
|
||||||
self.all_vols = {vpath: self} # flattened recursive
|
self.histtab = None # all realpath->histpath
|
||||||
|
self.dbv = None # closest full/non-jump parent
|
||||||
|
|
||||||
|
if realpath:
|
||||||
|
self.histpath = os.path.join(realpath, ".hist") # db / thumbcache
|
||||||
|
self.all_vols = {vpath: self} # flattened recursive
|
||||||
|
else:
|
||||||
|
self.histpath = None
|
||||||
|
self.all_vols = None
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
return "VFS({})".format(
|
return "VFS({})".format(
|
||||||
@@ -32,9 +42,12 @@ class VFS(object):
|
|||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
def _trk(self, vol):
|
def get_all_vols(self, outdict):
|
||||||
self.all_vols[vol.vpath] = vol
|
if self.realpath:
|
||||||
return vol
|
outdict[self.vpath] = self
|
||||||
|
|
||||||
|
for v in self.nodes.values():
|
||||||
|
v.get_all_vols(outdict)
|
||||||
|
|
||||||
def add(self, src, dst):
|
def add(self, src, dst):
|
||||||
"""get existing, or add new path to the vfs"""
|
"""get existing, or add new path to the vfs"""
|
||||||
@@ -46,19 +59,19 @@ class VFS(object):
|
|||||||
name, dst = dst.split("/", 1)
|
name, dst = dst.split("/", 1)
|
||||||
if name in self.nodes:
|
if name in self.nodes:
|
||||||
# exists; do not manipulate permissions
|
# exists; do not manipulate permissions
|
||||||
return self._trk(self.nodes[name].add(src, dst))
|
return self.nodes[name].add(src, dst)
|
||||||
|
|
||||||
vn = VFS(
|
vn = VFS(
|
||||||
"{}/{}".format(self.realpath, name),
|
os.path.join(self.realpath, name) if self.realpath else None,
|
||||||
"{}/{}".format(self.vpath, name).lstrip("/"),
|
"{}/{}".format(self.vpath, name).lstrip("/"),
|
||||||
self.uread,
|
self.uread,
|
||||||
self.uwrite,
|
self.uwrite,
|
||||||
self.uadm,
|
self.uadm,
|
||||||
self.flags,
|
self._copy_flags(name),
|
||||||
)
|
)
|
||||||
self._trk(vn)
|
vn.dbv = self.dbv or self
|
||||||
self.nodes[name] = vn
|
self.nodes[name] = vn
|
||||||
return self._trk(vn.add(src, dst))
|
return vn.add(src, dst)
|
||||||
|
|
||||||
if dst in self.nodes:
|
if dst in self.nodes:
|
||||||
# leaf exists; return as-is
|
# leaf exists; return as-is
|
||||||
@@ -67,8 +80,26 @@ class VFS(object):
|
|||||||
# leaf does not exist; create and keep permissions blank
|
# leaf does not exist; create and keep permissions blank
|
||||||
vp = "{}/{}".format(self.vpath, dst).lstrip("/")
|
vp = "{}/{}".format(self.vpath, dst).lstrip("/")
|
||||||
vn = VFS(src, vp)
|
vn = VFS(src, vp)
|
||||||
|
vn.dbv = self.dbv or self
|
||||||
self.nodes[dst] = vn
|
self.nodes[dst] = vn
|
||||||
return self._trk(vn)
|
return vn
|
||||||
|
|
||||||
|
def _copy_flags(self, name):
|
||||||
|
flags = {k: v for k, v in self.flags.items()}
|
||||||
|
hist = flags.get("hist")
|
||||||
|
if hist and hist != "-":
|
||||||
|
flags["hist"] = "{}/{}".format(hist.rstrip("/"), name)
|
||||||
|
|
||||||
|
return flags
|
||||||
|
|
||||||
|
def bubble_flags(self):
|
||||||
|
if self.dbv:
|
||||||
|
for k, v in self.dbv.flags.items():
|
||||||
|
if k not in ["hist"]:
|
||||||
|
self.flags[k] = v
|
||||||
|
|
||||||
|
for v in self.nodes.values():
|
||||||
|
v.bubble_flags()
|
||||||
|
|
||||||
def _find(self, vpath):
|
def _find(self, vpath):
|
||||||
"""return [vfs,remainder]"""
|
"""return [vfs,remainder]"""
|
||||||
@@ -96,6 +127,7 @@ class VFS(object):
|
|||||||
]
|
]
|
||||||
|
|
||||||
def get(self, vpath, uname, will_read, will_write):
|
def get(self, vpath, uname, will_read, will_write):
|
||||||
|
# type: (str, str, bool, bool) -> tuple[VFS, str]
|
||||||
"""returns [vfsnode,fs_remainder] if user has the requested permissions"""
|
"""returns [vfsnode,fs_remainder] if user has the requested permissions"""
|
||||||
vn, rem = self._find(vpath)
|
vn, rem = self._find(vpath)
|
||||||
|
|
||||||
@@ -107,6 +139,15 @@ class VFS(object):
|
|||||||
|
|
||||||
return vn, rem
|
return vn, rem
|
||||||
|
|
||||||
|
def get_dbv(self, vrem):
|
||||||
|
dbv = self.dbv
|
||||||
|
if not dbv:
|
||||||
|
return self, vrem
|
||||||
|
|
||||||
|
vrem = [self.vpath[len(dbv.vpath) + 1 :], vrem]
|
||||||
|
vrem = "/".join([x for x in vrem if x])
|
||||||
|
return dbv, vrem
|
||||||
|
|
||||||
def canonical(self, rem):
|
def canonical(self, rem):
|
||||||
"""returns the canonical path (fully-resolved absolute fs path)"""
|
"""returns the canonical path (fully-resolved absolute fs path)"""
|
||||||
rp = self.realpath
|
rp = self.realpath
|
||||||
@@ -136,6 +177,7 @@ class VFS(object):
|
|||||||
return os.path.realpath(rp)
|
return os.path.realpath(rp)
|
||||||
|
|
||||||
def ls(self, rem, uname, scandir, incl_wo=False, lstat=False):
|
def ls(self, rem, uname, scandir, incl_wo=False, lstat=False):
|
||||||
|
# type: (str, str, bool, bool, bool) -> tuple[str, str, dict[str, VFS]]
|
||||||
"""return user-readable [fsdir,real,virt] items at vpath"""
|
"""return user-readable [fsdir,real,virt] items at vpath"""
|
||||||
virt_vis = {} # nodes readable by user
|
virt_vis = {} # nodes readable by user
|
||||||
abspath = self.canonical(rem)
|
abspath = self.canonical(rem)
|
||||||
@@ -156,13 +198,21 @@ class VFS(object):
|
|||||||
|
|
||||||
return [abspath, real, virt_vis]
|
return [abspath, real, virt_vis]
|
||||||
|
|
||||||
def walk(self, rel, rem, uname, dots, scandir, lstat=False):
|
def walk(self, rel, rem, seen, uname, dots, scandir, lstat):
|
||||||
"""
|
"""
|
||||||
recursively yields from ./rem;
|
recursively yields from ./rem;
|
||||||
rel is a unix-style user-defined vpath (not vfs-related)
|
rel is a unix-style user-defined vpath (not vfs-related)
|
||||||
"""
|
"""
|
||||||
|
|
||||||
fsroot, vfs_ls, vfs_virt = self.ls(rem, uname, scandir, False, lstat)
|
fsroot, vfs_ls, vfs_virt = self.ls(
|
||||||
|
rem, uname, scandir, incl_wo=False, lstat=lstat
|
||||||
|
)
|
||||||
|
|
||||||
|
if seen and not fsroot.startswith(seen[-1]) and fsroot in seen:
|
||||||
|
print("bailing from symlink loop,\n {}\n {}".format(seen[-1], fsroot))
|
||||||
|
return
|
||||||
|
|
||||||
|
seen = seen[:] + [fsroot]
|
||||||
rfiles = [x for x in vfs_ls if not stat.S_ISDIR(x[1].st_mode)]
|
rfiles = [x for x in vfs_ls if not stat.S_ISDIR(x[1].st_mode)]
|
||||||
rdirs = [x for x in vfs_ls if stat.S_ISDIR(x[1].st_mode)]
|
rdirs = [x for x in vfs_ls if stat.S_ISDIR(x[1].st_mode)]
|
||||||
|
|
||||||
@@ -177,7 +227,7 @@ class VFS(object):
|
|||||||
|
|
||||||
wrel = (rel + "/" + rdir).lstrip("/")
|
wrel = (rel + "/" + rdir).lstrip("/")
|
||||||
wrem = (rem + "/" + rdir).lstrip("/")
|
wrem = (rem + "/" + rdir).lstrip("/")
|
||||||
for x in self.walk(wrel, wrem, uname, scandir, lstat):
|
for x in self.walk(wrel, wrem, seen, uname, dots, scandir, lstat):
|
||||||
yield x
|
yield x
|
||||||
|
|
||||||
for n, vfs in sorted(vfs_virt.items()):
|
for n, vfs in sorted(vfs_virt.items()):
|
||||||
@@ -185,14 +235,16 @@ class VFS(object):
|
|||||||
continue
|
continue
|
||||||
|
|
||||||
wrel = (rel + "/" + n).lstrip("/")
|
wrel = (rel + "/" + n).lstrip("/")
|
||||||
for x in vfs.walk(wrel, "", uname, scandir, lstat):
|
for x in vfs.walk(wrel, "", seen, uname, dots, scandir, lstat):
|
||||||
yield x
|
yield x
|
||||||
|
|
||||||
def zipgen(self, vrem, flt, uname, dots, scandir):
|
def zipgen(self, vrem, flt, uname, dots, scandir):
|
||||||
if flt:
|
if flt:
|
||||||
flt = {k: True for k in flt}
|
flt = {k: True for k in flt}
|
||||||
|
|
||||||
for vpath, apath, files, rd, vd in self.walk("", vrem, uname, dots, scandir):
|
for vpath, apath, files, rd, vd in self.walk(
|
||||||
|
"", vrem, [], uname, dots, scandir, False
|
||||||
|
):
|
||||||
if flt:
|
if flt:
|
||||||
files = [x for x in files if x[0] in flt]
|
files = [x for x in files if x[0] in flt]
|
||||||
|
|
||||||
@@ -261,7 +313,8 @@ class AuthSrv(object):
|
|||||||
self.reload()
|
self.reload()
|
||||||
|
|
||||||
def log(self, msg, c=0):
|
def log(self, msg, c=0):
|
||||||
self.log_func("auth", msg, c)
|
if self.log_func:
|
||||||
|
self.log_func("auth", msg, c)
|
||||||
|
|
||||||
def laggy_iter(self, iterable):
|
def laggy_iter(self, iterable):
|
||||||
"""returns [value,isFinalValue]"""
|
"""returns [value,isFinalValue]"""
|
||||||
@@ -386,6 +439,9 @@ class AuthSrv(object):
|
|||||||
raise Exception("invalid -v argument: [{}]".format(v_str))
|
raise Exception("invalid -v argument: [{}]".format(v_str))
|
||||||
|
|
||||||
src, dst, perms = m.groups()
|
src, dst, perms = m.groups()
|
||||||
|
if WINDOWS:
|
||||||
|
src = uncyg(src)
|
||||||
|
|
||||||
# print("\n".join([src, dst, perms]))
|
# print("\n".join([src, dst, perms]))
|
||||||
src = fsdec(os.path.abspath(fsenc(src)))
|
src = fsdec(os.path.abspath(fsenc(src)))
|
||||||
dst = dst.strip("/")
|
dst = dst.strip("/")
|
||||||
@@ -413,12 +469,23 @@ class AuthSrv(object):
|
|||||||
print(m.format(cfg_fn, self.line_ctr))
|
print(m.format(cfg_fn, self.line_ctr))
|
||||||
raise
|
raise
|
||||||
|
|
||||||
|
# case-insensitive; normalize
|
||||||
|
if WINDOWS:
|
||||||
|
cased = {}
|
||||||
|
for k, v in mount.items():
|
||||||
|
try:
|
||||||
|
cased[k] = fsdec(os.path.realpath(fsenc(v)))
|
||||||
|
except:
|
||||||
|
cased[k] = v
|
||||||
|
|
||||||
|
mount = cased
|
||||||
|
|
||||||
if not mount:
|
if not mount:
|
||||||
# -h says our defaults are CWD at root and read/write for everyone
|
# -h says our defaults are CWD at root and read/write for everyone
|
||||||
vfs = VFS(os.path.abspath("."), "", ["*"], ["*"])
|
vfs = VFS(os.path.abspath("."), "", ["*"], ["*"])
|
||||||
elif "" not in mount:
|
elif "" not in mount:
|
||||||
# there's volumes but no root; make root inaccessible
|
# there's volumes but no root; make root inaccessible
|
||||||
vfs = VFS(os.path.abspath("."), "")
|
vfs = VFS(None, "")
|
||||||
vfs.flags["d2d"] = True
|
vfs.flags["d2d"] = True
|
||||||
|
|
||||||
maxdepth = 0
|
maxdepth = 0
|
||||||
@@ -439,6 +506,10 @@ class AuthSrv(object):
|
|||||||
v.uwrite = mwrite[dst]
|
v.uwrite = mwrite[dst]
|
||||||
v.uadm = madm[dst]
|
v.uadm = madm[dst]
|
||||||
v.flags = mflags[dst]
|
v.flags = mflags[dst]
|
||||||
|
v.dbv = None
|
||||||
|
|
||||||
|
vfs.all_vols = {}
|
||||||
|
vfs.get_all_vols(vfs.all_vols)
|
||||||
|
|
||||||
missing_users = {}
|
missing_users = {}
|
||||||
for d in [mread, mwrite]:
|
for d in [mread, mwrite]:
|
||||||
@@ -455,6 +526,67 @@ class AuthSrv(object):
|
|||||||
)
|
)
|
||||||
raise Exception("invalid config")
|
raise Exception("invalid config")
|
||||||
|
|
||||||
|
promote = []
|
||||||
|
demote = []
|
||||||
|
for vol in vfs.all_vols.values():
|
||||||
|
hid = hashlib.sha512(fsenc(vol.realpath)).digest()
|
||||||
|
hid = base64.b32encode(hid).decode("ascii").lower()
|
||||||
|
vflag = vol.flags.get("hist")
|
||||||
|
if vflag == "-":
|
||||||
|
pass
|
||||||
|
elif vflag:
|
||||||
|
vol.histpath = uncyg(vflag) if WINDOWS else vflag
|
||||||
|
elif self.args.hist:
|
||||||
|
for nch in range(len(hid)):
|
||||||
|
hpath = os.path.join(self.args.hist, hid[: nch + 1])
|
||||||
|
try:
|
||||||
|
os.makedirs(hpath)
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
|
powner = os.path.join(hpath, "owner.txt")
|
||||||
|
try:
|
||||||
|
with open(powner, "rb") as f:
|
||||||
|
owner = f.read().rstrip()
|
||||||
|
except:
|
||||||
|
owner = None
|
||||||
|
|
||||||
|
me = fsenc(vol.realpath).rstrip()
|
||||||
|
if owner not in [None, me]:
|
||||||
|
continue
|
||||||
|
|
||||||
|
if owner is None:
|
||||||
|
with open(powner, "wb") as f:
|
||||||
|
f.write(me)
|
||||||
|
|
||||||
|
vol.histpath = hpath
|
||||||
|
break
|
||||||
|
|
||||||
|
vol.histpath = os.path.realpath(vol.histpath)
|
||||||
|
if vol.dbv:
|
||||||
|
if os.path.exists(os.path.join(vol.histpath, "up2k.db")):
|
||||||
|
promote.append(vol)
|
||||||
|
vol.dbv = None
|
||||||
|
else:
|
||||||
|
demote.append(vol)
|
||||||
|
|
||||||
|
# discard jump-vols
|
||||||
|
for v in demote:
|
||||||
|
vfs.all_vols.pop(v.vpath)
|
||||||
|
|
||||||
|
if promote:
|
||||||
|
msg = [
|
||||||
|
"\n the following jump-volumes were generated to assist the vfs.\n As they contain a database (probably from v0.11.11 or older),\n they are promoted to full volumes:"
|
||||||
|
]
|
||||||
|
for vol in promote:
|
||||||
|
msg.append(
|
||||||
|
" /{} ({}) ({})".format(vol.vpath, vol.realpath, vol.histpath)
|
||||||
|
)
|
||||||
|
|
||||||
|
self.log("\n\n".join(msg) + "\n", c=3)
|
||||||
|
|
||||||
|
vfs.histtab = {v.realpath: v.histpath for v in vfs.all_vols.values()}
|
||||||
|
|
||||||
all_mte = {}
|
all_mte = {}
|
||||||
errors = False
|
errors = False
|
||||||
for vol in vfs.all_vols.values():
|
for vol in vfs.all_vols.values():
|
||||||
@@ -464,6 +596,10 @@ class AuthSrv(object):
|
|||||||
if self.args.e2d or "e2ds" in vol.flags:
|
if self.args.e2d or "e2ds" in vol.flags:
|
||||||
vol.flags["e2d"] = True
|
vol.flags["e2d"] = True
|
||||||
|
|
||||||
|
if self.args.no_hash:
|
||||||
|
if "ehash" not in vol.flags:
|
||||||
|
vol.flags["dhash"] = True
|
||||||
|
|
||||||
for k in ["e2t", "e2ts", "e2tsr"]:
|
for k in ["e2t", "e2ts", "e2tsr"]:
|
||||||
if getattr(self.args, k):
|
if getattr(self.args, k):
|
||||||
vol.flags[k] = True
|
vol.flags[k] = True
|
||||||
@@ -541,6 +677,8 @@ class AuthSrv(object):
|
|||||||
if errors:
|
if errors:
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
|
vfs.bubble_flags()
|
||||||
|
|
||||||
try:
|
try:
|
||||||
v, _ = vfs.get("/", "*", False, True)
|
v, _ = vfs.get("/", "*", False, True)
|
||||||
if self.warn_anonwrite and os.getcwd() == v.realpath:
|
if self.warn_anonwrite and os.getcwd() == v.realpath:
|
||||||
@@ -555,6 +693,11 @@ class AuthSrv(object):
|
|||||||
self.user = user
|
self.user = user
|
||||||
self.iuser = {v: k for k, v in user.items()}
|
self.iuser = {v: k for k, v in user.items()}
|
||||||
|
|
||||||
|
self.re_pwd = None
|
||||||
|
pwds = [re.escape(x) for x in self.iuser.keys()]
|
||||||
|
if pwds:
|
||||||
|
self.re_pwd = re.compile("=(" + "|".join(pwds) + ")([]&; ]|$)")
|
||||||
|
|
||||||
# import pprint
|
# import pprint
|
||||||
# pprint.pprint({"usr": user, "rd": mread, "wr": mwrite, "mnt": mount})
|
# pprint.pprint({"usr": user, "rd": mread, "wr": mwrite, "mnt": mount})
|
||||||
|
|
||||||
@@ -616,13 +759,13 @@ class AuthSrv(object):
|
|||||||
continue
|
continue
|
||||||
|
|
||||||
atop = vn.realpath
|
atop = vn.realpath
|
||||||
g = vn.walk("", "", u, True, not self.args.no_scandir, lstat=False)
|
g = vn.walk("", "", [], u, True, not self.args.no_scandir, False)
|
||||||
for vpath, apath, files, _, _ in g:
|
for vpath, apath, files, _, _ in g:
|
||||||
fnames = [n[0] for n in files]
|
fnames = [n[0] for n in files]
|
||||||
vpaths = [vpath + "/" + n for n in fnames] if vpath else fnames
|
vpaths = [vpath + "/" + n for n in fnames] if vpath else fnames
|
||||||
vpaths = [vtop + x for x in vpaths]
|
vpaths = [vtop + x for x in vpaths]
|
||||||
apaths = [os.path.join(apath, n) for n in fnames]
|
apaths = [os.path.join(apath, n) for n in fnames]
|
||||||
files = list(zip(vpaths, apaths))
|
files = [[vpath + "/", apath + os.sep]] + list(zip(vpaths, apaths))
|
||||||
|
|
||||||
if flag_ln:
|
if flag_ln:
|
||||||
files = [x for x in files if not x[1].startswith(atop + os.sep)]
|
files = [x for x in files if not x[1].startswith(atop + os.sep)]
|
||||||
|
@@ -44,7 +44,9 @@ class BrokerMp(object):
|
|||||||
proc.clients = {}
|
proc.clients = {}
|
||||||
proc.workload = 0
|
proc.workload = 0
|
||||||
|
|
||||||
thr = threading.Thread(target=self.collector, args=(proc,))
|
thr = threading.Thread(
|
||||||
|
target=self.collector, args=(proc,), name="mp-collector"
|
||||||
|
)
|
||||||
thr.daemon = True
|
thr.daemon = True
|
||||||
thr.start()
|
thr.start()
|
||||||
|
|
||||||
@@ -52,14 +54,19 @@ class BrokerMp(object):
|
|||||||
proc.start()
|
proc.start()
|
||||||
|
|
||||||
if not self.args.q:
|
if not self.args.q:
|
||||||
thr = threading.Thread(target=self.debug_load_balancer)
|
thr = threading.Thread(
|
||||||
|
target=self.debug_load_balancer, name="mp-dbg-loadbalancer"
|
||||||
|
)
|
||||||
thr.daemon = True
|
thr.daemon = True
|
||||||
thr.start()
|
thr.start()
|
||||||
|
|
||||||
def shutdown(self):
|
def shutdown(self):
|
||||||
self.log("broker", "shutting down")
|
self.log("broker", "shutting down")
|
||||||
for proc in self.procs:
|
for n, proc in enumerate(self.procs):
|
||||||
thr = threading.Thread(target=proc.q_pend.put([0, "shutdown", []]))
|
thr = threading.Thread(
|
||||||
|
target=proc.q_pend.put([0, "shutdown", []]),
|
||||||
|
name="mp-shutdown-{}-{}".format(n, len(self.procs)),
|
||||||
|
)
|
||||||
thr.start()
|
thr.start()
|
||||||
|
|
||||||
with self.mutex:
|
with self.mutex:
|
||||||
|
@@ -1,5 +1,6 @@
|
|||||||
# coding: utf-8
|
# coding: utf-8
|
||||||
from __future__ import print_function, unicode_literals
|
from __future__ import print_function, unicode_literals
|
||||||
|
from copyparty.authsrv import AuthSrv
|
||||||
|
|
||||||
import sys
|
import sys
|
||||||
import time
|
import time
|
||||||
@@ -27,20 +28,23 @@ class MpWorker(object):
|
|||||||
self.retpend = {}
|
self.retpend = {}
|
||||||
self.retpend_mutex = threading.Lock()
|
self.retpend_mutex = threading.Lock()
|
||||||
self.mutex = threading.Lock()
|
self.mutex = threading.Lock()
|
||||||
self.workload_thr_active = False
|
self.workload_thr_alive = False
|
||||||
|
|
||||||
# we inherited signal_handler from parent,
|
# we inherited signal_handler from parent,
|
||||||
# replace it with something harmless
|
# replace it with something harmless
|
||||||
if not FAKE_MP:
|
if not FAKE_MP:
|
||||||
signal.signal(signal.SIGINT, self.signal_handler)
|
signal.signal(signal.SIGINT, self.signal_handler)
|
||||||
|
|
||||||
|
# starting to look like a good idea
|
||||||
|
self.asrv = AuthSrv(args, None, False)
|
||||||
|
|
||||||
# instantiate all services here (TODO: inheritance?)
|
# instantiate all services here (TODO: inheritance?)
|
||||||
self.httpsrv = HttpSrv(self)
|
self.httpsrv = HttpSrv(self, True)
|
||||||
self.httpsrv.disconnect_func = self.httpdrop
|
self.httpsrv.disconnect_func = self.httpdrop
|
||||||
|
|
||||||
# on winxp and some other platforms,
|
# on winxp and some other platforms,
|
||||||
# use thr.join() to block all signals
|
# use thr.join() to block all signals
|
||||||
thr = threading.Thread(target=self.main)
|
thr = threading.Thread(target=self.main, name="mpw-main")
|
||||||
thr.daemon = True
|
thr.daemon = True
|
||||||
thr.start()
|
thr.start()
|
||||||
thr.join()
|
thr.join()
|
||||||
@@ -64,6 +68,7 @@ class MpWorker(object):
|
|||||||
|
|
||||||
# self.logw("work: [{}]".format(d[0]))
|
# self.logw("work: [{}]".format(d[0]))
|
||||||
if dest == "shutdown":
|
if dest == "shutdown":
|
||||||
|
self.httpsrv.shutdown()
|
||||||
self.logw("ok bye")
|
self.logw("ok bye")
|
||||||
sys.exit(0)
|
sys.exit(0)
|
||||||
return
|
return
|
||||||
@@ -75,13 +80,15 @@ class MpWorker(object):
|
|||||||
|
|
||||||
if self.args.log_conn:
|
if self.args.log_conn:
|
||||||
self.log("%s %s" % addr, "|%sC-qpop" % ("-" * 4,), c="1;30")
|
self.log("%s %s" % addr, "|%sC-qpop" % ("-" * 4,), c="1;30")
|
||||||
|
|
||||||
self.httpsrv.accept(sck, addr)
|
self.httpsrv.accept(sck, addr)
|
||||||
|
|
||||||
with self.mutex:
|
with self.mutex:
|
||||||
if not self.workload_thr_active:
|
if not self.workload_thr_alive:
|
||||||
self.workload_thr_alive = True
|
self.workload_thr_alive = True
|
||||||
thr = threading.Thread(target=self.thr_workload)
|
thr = threading.Thread(
|
||||||
|
target=self.thr_workload, name="mpw-workload"
|
||||||
|
)
|
||||||
thr.daemon = True
|
thr.daemon = True
|
||||||
thr.start()
|
thr.start()
|
||||||
|
|
||||||
|
@@ -3,6 +3,7 @@ from __future__ import print_function, unicode_literals
|
|||||||
|
|
||||||
import threading
|
import threading
|
||||||
|
|
||||||
|
from .authsrv import AuthSrv
|
||||||
from .httpsrv import HttpSrv
|
from .httpsrv import HttpSrv
|
||||||
from .broker_util import ExceptionalQueue, try_exec
|
from .broker_util import ExceptionalQueue, try_exec
|
||||||
|
|
||||||
@@ -14,6 +15,7 @@ class BrokerThr(object):
|
|||||||
self.hub = hub
|
self.hub = hub
|
||||||
self.log = hub.log
|
self.log = hub.log
|
||||||
self.args = hub.args
|
self.args = hub.args
|
||||||
|
self.asrv = hub.asrv
|
||||||
|
|
||||||
self.mutex = threading.Lock()
|
self.mutex = threading.Lock()
|
||||||
|
|
||||||
@@ -23,6 +25,7 @@ class BrokerThr(object):
|
|||||||
|
|
||||||
def shutdown(self):
|
def shutdown(self):
|
||||||
# self.log("broker", "shutting down")
|
# self.log("broker", "shutting down")
|
||||||
|
self.httpsrv.shutdown()
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def put(self, want_retval, dest, *args):
|
def put(self, want_retval, dest, *args):
|
||||||
|
@@ -10,12 +10,12 @@ import json
|
|||||||
import string
|
import string
|
||||||
import socket
|
import socket
|
||||||
import ctypes
|
import ctypes
|
||||||
import traceback
|
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
import calendar
|
import calendar
|
||||||
|
|
||||||
from .__init__ import E, PY2, WINDOWS, ANYWIN
|
from .__init__ import E, PY2, WINDOWS, ANYWIN
|
||||||
from .util import * # noqa # pylint: disable=unused-wildcard-import
|
from .util import * # noqa # pylint: disable=unused-wildcard-import
|
||||||
|
from .authsrv import AuthSrv
|
||||||
from .szip import StreamZip
|
from .szip import StreamZip
|
||||||
from .star import StreamTar
|
from .star import StreamTar
|
||||||
|
|
||||||
@@ -35,12 +35,13 @@ class HttpCli(object):
|
|||||||
def __init__(self, conn):
|
def __init__(self, conn):
|
||||||
self.t0 = time.time()
|
self.t0 = time.time()
|
||||||
self.conn = conn
|
self.conn = conn
|
||||||
self.s = conn.s
|
self.s = conn.s # type: socket
|
||||||
self.sr = conn.sr
|
self.sr = conn.sr # type: Unrecv
|
||||||
self.ip = conn.addr[0]
|
self.ip = conn.addr[0]
|
||||||
self.addr = conn.addr
|
self.addr = conn.addr # type: tuple[str, int]
|
||||||
self.args = conn.args
|
self.args = conn.args
|
||||||
self.auth = conn.auth
|
self.is_mp = conn.is_mp
|
||||||
|
self.asrv = conn.asrv # type: AuthSrv
|
||||||
self.ico = conn.ico
|
self.ico = conn.ico
|
||||||
self.thumbcli = conn.thumbcli
|
self.thumbcli = conn.thumbcli
|
||||||
self.log_func = conn.log_func
|
self.log_func = conn.log_func
|
||||||
@@ -48,12 +49,21 @@ class HttpCli(object):
|
|||||||
self.tls = hasattr(self.s, "cipher")
|
self.tls = hasattr(self.s, "cipher")
|
||||||
|
|
||||||
self.bufsz = 1024 * 32
|
self.bufsz = 1024 * 32
|
||||||
|
self.hint = None
|
||||||
self.absolute_urls = False
|
self.absolute_urls = False
|
||||||
self.out_headers = {"Access-Control-Allow-Origin": "*"}
|
self.out_headers = {"Access-Control-Allow-Origin": "*"}
|
||||||
|
|
||||||
def log(self, msg, c=0):
|
def log(self, msg, c=0):
|
||||||
|
ptn = self.asrv.re_pwd
|
||||||
|
if ptn and ptn.search(msg):
|
||||||
|
msg = ptn.sub(self.unpwd, msg)
|
||||||
|
|
||||||
self.log_func(self.log_src, msg, c)
|
self.log_func(self.log_src, msg, c)
|
||||||
|
|
||||||
|
def unpwd(self, m):
|
||||||
|
a, b = m.groups()
|
||||||
|
return "=\033[7m {} \033[27m{}".format(self.asrv.iuser[a], b)
|
||||||
|
|
||||||
def _check_nonfatal(self, ex):
|
def _check_nonfatal(self, ex):
|
||||||
return ex.code < 400 or ex.code in [404, 429]
|
return ex.code < 400 or ex.code in [404, 429]
|
||||||
|
|
||||||
@@ -70,6 +80,7 @@ class HttpCli(object):
|
|||||||
"""returns true if connection can be reused"""
|
"""returns true if connection can be reused"""
|
||||||
self.keepalive = False
|
self.keepalive = False
|
||||||
self.headers = {}
|
self.headers = {}
|
||||||
|
self.hint = None
|
||||||
try:
|
try:
|
||||||
headerlines = read_header(self.sr)
|
headerlines = read_header(self.sr)
|
||||||
if not headerlines:
|
if not headerlines:
|
||||||
@@ -102,10 +113,21 @@ class HttpCli(object):
|
|||||||
v = self.headers.get("connection", "").lower()
|
v = self.headers.get("connection", "").lower()
|
||||||
self.keepalive = not v.startswith("close") and self.http_ver != "HTTP/1.0"
|
self.keepalive = not v.startswith("close") and self.http_ver != "HTTP/1.0"
|
||||||
|
|
||||||
v = self.headers.get("x-forwarded-for", None)
|
n = self.args.rproxy
|
||||||
if v is not None and self.conn.addr[0] in ["127.0.0.1", "::1"]:
|
if n:
|
||||||
self.ip = v.split(",")[0]
|
v = self.headers.get("x-forwarded-for")
|
||||||
self.log_src = self.conn.set_rproxy(self.ip)
|
if v and self.conn.addr[0] in ["127.0.0.1", "::1"]:
|
||||||
|
if n > 0:
|
||||||
|
n -= 1
|
||||||
|
|
||||||
|
vs = v.split(",")
|
||||||
|
try:
|
||||||
|
self.ip = vs[n].strip()
|
||||||
|
except:
|
||||||
|
self.ip = vs[0].strip()
|
||||||
|
self.log("rproxy={} oob x-fwd {}".format(self.args.rproxy, v), c=3)
|
||||||
|
|
||||||
|
self.log_src = self.conn.set_rproxy(self.ip)
|
||||||
|
|
||||||
if self.args.ihead:
|
if self.args.ihead:
|
||||||
keys = self.args.ihead
|
keys = self.args.ihead
|
||||||
@@ -117,6 +139,9 @@ class HttpCli(object):
|
|||||||
if v is not None:
|
if v is not None:
|
||||||
self.log("[H] {}: \033[33m[{}]".format(k, v), 6)
|
self.log("[H] {}: \033[33m[{}]".format(k, v), 6)
|
||||||
|
|
||||||
|
if "&" in self.req and "?" not in self.req:
|
||||||
|
self.hint = "did you mean '?' instead of '&'"
|
||||||
|
|
||||||
# split req into vpath + uparam
|
# split req into vpath + uparam
|
||||||
uparam = {}
|
uparam = {}
|
||||||
if "?" not in self.req:
|
if "?" not in self.req:
|
||||||
@@ -152,9 +177,12 @@ class HttpCli(object):
|
|||||||
self.vpath = unquotep(vpath)
|
self.vpath = unquotep(vpath)
|
||||||
|
|
||||||
pwd = uparam.get("pw")
|
pwd = uparam.get("pw")
|
||||||
self.uname = self.auth.iuser.get(pwd, "*")
|
self.uname = self.asrv.iuser.get(pwd, "*")
|
||||||
self.rvol, self.wvol, self.avol = [[], [], []]
|
self.rvol, self.wvol, self.avol = [[], [], []]
|
||||||
self.auth.vfs.user_tree(self.uname, self.rvol, self.wvol, self.avol)
|
self.asrv.vfs.user_tree(self.uname, self.rvol, self.wvol, self.avol)
|
||||||
|
|
||||||
|
if pwd and "pw" in self.ouparam and pwd != cookies.get("cppwd"):
|
||||||
|
self.out_headers["Set-Cookie"] = self.get_pwd_cookie(pwd)[0]
|
||||||
|
|
||||||
ua = self.headers.get("user-agent", "")
|
ua = self.headers.get("user-agent", "")
|
||||||
self.is_rclone = ua.startswith("rclone/")
|
self.is_rclone = ua.startswith("rclone/")
|
||||||
@@ -186,6 +214,9 @@ class HttpCli(object):
|
|||||||
|
|
||||||
self.log("{}\033[0m, {}".format(str(ex), self.vpath), 3)
|
self.log("{}\033[0m, {}".format(str(ex), self.vpath), 3)
|
||||||
msg = "<pre>{}\r\nURL: {}\r\n".format(str(ex), self.vpath)
|
msg = "<pre>{}\r\nURL: {}\r\n".format(str(ex), self.vpath)
|
||||||
|
if self.hint:
|
||||||
|
msg += "hint: {}\r\n".format(self.hint)
|
||||||
|
|
||||||
self.reply(msg.encode("utf-8", "replace"), status=ex.code)
|
self.reply(msg.encode("utf-8", "replace"), status=ex.code)
|
||||||
return self.keepalive
|
return self.keepalive
|
||||||
except Pebkac:
|
except Pebkac:
|
||||||
@@ -243,10 +274,11 @@ class HttpCli(object):
|
|||||||
if self.is_rclone:
|
if self.is_rclone:
|
||||||
return ""
|
return ""
|
||||||
|
|
||||||
|
cmap = {"pw": "cppwd"}
|
||||||
kv = {
|
kv = {
|
||||||
k: v
|
k: v
|
||||||
for k, v in self.uparam.items()
|
for k, v in self.uparam.items()
|
||||||
if k not in rm and self.cookies.get(k) != v
|
if k not in rm and self.cookies.get(cmap.get(k, k)) != v
|
||||||
}
|
}
|
||||||
kv.update(add)
|
kv.update(add)
|
||||||
if not kv:
|
if not kv:
|
||||||
@@ -256,7 +288,14 @@ class HttpCli(object):
|
|||||||
return "?" + "&".join(r)
|
return "?" + "&".join(r)
|
||||||
|
|
||||||
def redirect(
|
def redirect(
|
||||||
self, vpath, suf="", msg="aight", flavor="go to", click=True, use302=False
|
self,
|
||||||
|
vpath,
|
||||||
|
suf="",
|
||||||
|
msg="aight",
|
||||||
|
flavor="go to",
|
||||||
|
click=True,
|
||||||
|
status=200,
|
||||||
|
use302=False,
|
||||||
):
|
):
|
||||||
html = self.j2(
|
html = self.j2(
|
||||||
"msg",
|
"msg",
|
||||||
@@ -271,7 +310,7 @@ class HttpCli(object):
|
|||||||
h = {"Location": "/" + vpath, "Cache-Control": "no-cache"}
|
h = {"Location": "/" + vpath, "Cache-Control": "no-cache"}
|
||||||
self.reply(html, status=302, headers=h)
|
self.reply(html, status=302, headers=h)
|
||||||
else:
|
else:
|
||||||
self.reply(html)
|
self.reply(html, status=status)
|
||||||
|
|
||||||
def handle_get(self):
|
def handle_get(self):
|
||||||
if self.do_log:
|
if self.do_log:
|
||||||
@@ -312,9 +351,7 @@ class HttpCli(object):
|
|||||||
self.redirect(vpath, flavor="redirecting to", use302=True)
|
self.redirect(vpath, flavor="redirecting to", use302=True)
|
||||||
return True
|
return True
|
||||||
|
|
||||||
self.readable, self.writable = self.conn.auth.vfs.can_access(
|
self.readable, self.writable = self.asrv.vfs.can_access(self.vpath, self.uname)
|
||||||
self.vpath, self.uname
|
|
||||||
)
|
|
||||||
if not self.readable and not self.writable:
|
if not self.readable and not self.writable:
|
||||||
if self.vpath:
|
if self.vpath:
|
||||||
self.log("inaccessible: [{}]".format(self.vpath))
|
self.log("inaccessible: [{}]".format(self.vpath))
|
||||||
@@ -431,7 +468,7 @@ class HttpCli(object):
|
|||||||
|
|
||||||
def dump_to_file(self):
|
def dump_to_file(self):
|
||||||
reader, remains = self.get_body_reader()
|
reader, remains = self.get_body_reader()
|
||||||
vfs, rem = self.conn.auth.vfs.get(self.vpath, self.uname, False, True)
|
vfs, rem = self.asrv.vfs.get(self.vpath, self.uname, False, True)
|
||||||
fdir = os.path.join(vfs.realpath, rem)
|
fdir = os.path.join(vfs.realpath, rem)
|
||||||
|
|
||||||
addr = self.ip.replace(":", ".")
|
addr = self.ip.replace(":", ".")
|
||||||
@@ -441,8 +478,10 @@ class HttpCli(object):
|
|||||||
with open(fsenc(path), "wb", 512 * 1024) as f:
|
with open(fsenc(path), "wb", 512 * 1024) as f:
|
||||||
post_sz, _, sha_b64 = hashcopy(self.conn, reader, f)
|
post_sz, _, sha_b64 = hashcopy(self.conn, reader, f)
|
||||||
|
|
||||||
|
vfs, vrem = vfs.get_dbv(rem)
|
||||||
|
|
||||||
self.conn.hsrv.broker.put(
|
self.conn.hsrv.broker.put(
|
||||||
False, "up2k.hash_file", vfs.realpath, vfs.flags, rem, fn
|
False, "up2k.hash_file", vfs.realpath, vfs.flags, vrem, fn
|
||||||
)
|
)
|
||||||
|
|
||||||
return post_sz, sha_b64, remains, path
|
return post_sz, sha_b64, remains, path
|
||||||
@@ -498,7 +537,7 @@ class HttpCli(object):
|
|||||||
if v is None:
|
if v is None:
|
||||||
raise Pebkac(422, "need zip or tar keyword")
|
raise Pebkac(422, "need zip or tar keyword")
|
||||||
|
|
||||||
vn, rem = self.auth.vfs.get(self.vpath, self.uname, True, False)
|
vn, rem = self.asrv.vfs.get(self.vpath, self.uname, True, False)
|
||||||
items = self.parser.require("files", 1024 * 1024)
|
items = self.parser.require("files", 1024 * 1024)
|
||||||
if not items:
|
if not items:
|
||||||
raise Pebkac(422, "need files list")
|
raise Pebkac(422, "need files list")
|
||||||
@@ -506,6 +545,7 @@ class HttpCli(object):
|
|||||||
items = items.replace("\r", "").split("\n")
|
items = items.replace("\r", "").split("\n")
|
||||||
items = [unquotep(x) for x in items if items]
|
items = [unquotep(x) for x in items if items]
|
||||||
|
|
||||||
|
self.parser.drop()
|
||||||
return self.tx_zip(k, v, vn, rem, items, self.args.ed)
|
return self.tx_zip(k, v, vn, rem, items, self.args.ed)
|
||||||
|
|
||||||
def handle_post_json(self):
|
def handle_post_json(self):
|
||||||
@@ -547,22 +587,32 @@ class HttpCli(object):
|
|||||||
self.vpath = "/".join([self.vpath, sub]).strip("/")
|
self.vpath = "/".join([self.vpath, sub]).strip("/")
|
||||||
body["name"] = name
|
body["name"] = name
|
||||||
|
|
||||||
vfs, rem = self.conn.auth.vfs.get(self.vpath, self.uname, False, True)
|
vfs, rem = self.asrv.vfs.get(self.vpath, self.uname, False, True)
|
||||||
|
dbv, vrem = vfs.get_dbv(rem)
|
||||||
|
|
||||||
body["vtop"] = vfs.vpath
|
body["vtop"] = dbv.vpath
|
||||||
body["ptop"] = vfs.realpath
|
body["ptop"] = dbv.realpath
|
||||||
body["prel"] = rem
|
body["prel"] = vrem
|
||||||
body["addr"] = self.ip
|
body["addr"] = self.ip
|
||||||
body["vcfg"] = vfs.flags
|
body["vcfg"] = dbv.flags
|
||||||
|
|
||||||
if sub:
|
if sub:
|
||||||
try:
|
try:
|
||||||
dst = os.path.join(vfs.realpath, rem)
|
dst = os.path.join(vfs.realpath, rem)
|
||||||
os.makedirs(fsenc(dst))
|
|
||||||
except:
|
|
||||||
if not os.path.isdir(fsenc(dst)):
|
if not os.path.isdir(fsenc(dst)):
|
||||||
|
os.makedirs(fsenc(dst))
|
||||||
|
except OSError as ex:
|
||||||
|
self.log("makedirs failed [{}]".format(dst))
|
||||||
|
if ex.errno == 13:
|
||||||
|
raise Pebkac(500, "the server OS denied write-access")
|
||||||
|
|
||||||
|
if ex.errno == 17:
|
||||||
raise Pebkac(400, "some file got your folder name")
|
raise Pebkac(400, "some file got your folder name")
|
||||||
|
|
||||||
|
raise Pebkac(500, min_ex())
|
||||||
|
except:
|
||||||
|
raise Pebkac(500, min_ex())
|
||||||
|
|
||||||
x = self.conn.hsrv.broker.put(True, "up2k.handle_json", body)
|
x = self.conn.hsrv.broker.put(True, "up2k.handle_json", body)
|
||||||
ret = x.get()
|
ret = x.get()
|
||||||
if sub:
|
if sub:
|
||||||
@@ -575,8 +625,14 @@ class HttpCli(object):
|
|||||||
|
|
||||||
def handle_search(self, body):
|
def handle_search(self, body):
|
||||||
vols = []
|
vols = []
|
||||||
|
seen = {}
|
||||||
for vtop in self.rvol:
|
for vtop in self.rvol:
|
||||||
vfs, _ = self.conn.auth.vfs.get(vtop, self.uname, True, False)
|
vfs, _ = self.asrv.vfs.get(vtop, self.uname, True, False)
|
||||||
|
vfs = vfs.dbv or vfs
|
||||||
|
if vfs in seen:
|
||||||
|
continue
|
||||||
|
|
||||||
|
seen[vfs] = True
|
||||||
vols.append([vfs.vpath, vfs.realpath, vfs.flags])
|
vols.append([vfs.vpath, vfs.realpath, vfs.flags])
|
||||||
|
|
||||||
idx = self.conn.get_u2idx()
|
idx = self.conn.get_u2idx()
|
||||||
@@ -585,7 +641,7 @@ class HttpCli(object):
|
|||||||
penalty = 0.7
|
penalty = 0.7
|
||||||
t_idle = t0 - idx.p_end
|
t_idle = t0 - idx.p_end
|
||||||
if idx.p_dur > 0.7 and t_idle < penalty:
|
if idx.p_dur > 0.7 and t_idle < penalty:
|
||||||
m = "rate-limit ({:.1f} sec), cost {:.2f}, idle {:.2f}"
|
m = "rate-limit {:.1f} sec, cost {:.2f}, idle {:.2f}"
|
||||||
raise Pebkac(429, m.format(penalty, idx.p_dur, t_idle))
|
raise Pebkac(429, m.format(penalty, idx.p_dur, t_idle))
|
||||||
|
|
||||||
if "srch" in body:
|
if "srch" in body:
|
||||||
@@ -632,8 +688,8 @@ class HttpCli(object):
|
|||||||
except KeyError:
|
except KeyError:
|
||||||
raise Pebkac(400, "need hash and wark headers for binary POST")
|
raise Pebkac(400, "need hash and wark headers for binary POST")
|
||||||
|
|
||||||
vfs, _ = self.conn.auth.vfs.get(self.vpath, self.uname, False, True)
|
vfs, _ = self.asrv.vfs.get(self.vpath, self.uname, False, True)
|
||||||
ptop = vfs.realpath
|
ptop = (vfs.dbv or vfs).realpath
|
||||||
|
|
||||||
x = self.conn.hsrv.broker.put(True, "up2k.handle_chunk", ptop, wark, chash)
|
x = self.conn.hsrv.broker.put(True, "up2k.handle_chunk", ptop, wark, chash)
|
||||||
response = x.get()
|
response = x.get()
|
||||||
@@ -705,7 +761,13 @@ class HttpCli(object):
|
|||||||
pwd = self.parser.require("cppwd", 64)
|
pwd = self.parser.require("cppwd", 64)
|
||||||
self.parser.drop()
|
self.parser.drop()
|
||||||
|
|
||||||
if pwd in self.auth.iuser:
|
ck, msg = self.get_pwd_cookie(pwd)
|
||||||
|
html = self.j2("msg", h1=msg, h2='<a href="/">ack</a>', redir="/")
|
||||||
|
self.reply(html.encode("utf-8"), headers={"Set-Cookie": ck})
|
||||||
|
return True
|
||||||
|
|
||||||
|
def get_pwd_cookie(self, pwd):
|
||||||
|
if pwd in self.asrv.iuser:
|
||||||
msg = "login ok"
|
msg = "login ok"
|
||||||
dt = datetime.utcfromtimestamp(time.time() + 60 * 60 * 24 * 365)
|
dt = datetime.utcfromtimestamp(time.time() + 60 * 60 * 24 * 365)
|
||||||
exp = dt.strftime("%a, %d %b %Y %H:%M:%S GMT")
|
exp = dt.strftime("%a, %d %b %Y %H:%M:%S GMT")
|
||||||
@@ -715,16 +777,14 @@ class HttpCli(object):
|
|||||||
exp = "Fri, 15 Aug 1997 01:00:00 GMT"
|
exp = "Fri, 15 Aug 1997 01:00:00 GMT"
|
||||||
|
|
||||||
ck = "cppwd={}; Path=/; Expires={}; SameSite=Lax".format(pwd, exp)
|
ck = "cppwd={}; Path=/; Expires={}; SameSite=Lax".format(pwd, exp)
|
||||||
html = self.j2("msg", h1=msg, h2='<a href="/">ack</a>', redir="/")
|
return [ck, msg]
|
||||||
self.reply(html.encode("utf-8"), headers={"Set-Cookie": ck})
|
|
||||||
return True
|
|
||||||
|
|
||||||
def handle_mkdir(self):
|
def handle_mkdir(self):
|
||||||
new_dir = self.parser.require("name", 512)
|
new_dir = self.parser.require("name", 512)
|
||||||
self.parser.drop()
|
self.parser.drop()
|
||||||
|
|
||||||
nullwrite = self.args.nw
|
nullwrite = self.args.nw
|
||||||
vfs, rem = self.conn.auth.vfs.get(self.vpath, self.uname, False, True)
|
vfs, rem = self.asrv.vfs.get(self.vpath, self.uname, False, True)
|
||||||
self._assert_safe_rem(rem)
|
self._assert_safe_rem(rem)
|
||||||
|
|
||||||
sanitized = sanitize_fn(new_dir)
|
sanitized = sanitize_fn(new_dir)
|
||||||
@@ -741,8 +801,13 @@ class HttpCli(object):
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
os.mkdir(fsenc(fn))
|
os.mkdir(fsenc(fn))
|
||||||
|
except OSError as ex:
|
||||||
|
if ex.errno == 13:
|
||||||
|
raise Pebkac(500, "the server OS denied write-access")
|
||||||
|
|
||||||
|
raise Pebkac(500, "mkdir failed:\n" + min_ex())
|
||||||
except:
|
except:
|
||||||
raise Pebkac(500, "mkdir failed, check the logs")
|
raise Pebkac(500, min_ex())
|
||||||
|
|
||||||
vpath = "{}/{}".format(self.vpath, sanitized).lstrip("/")
|
vpath = "{}/{}".format(self.vpath, sanitized).lstrip("/")
|
||||||
self.redirect(vpath)
|
self.redirect(vpath)
|
||||||
@@ -753,7 +818,7 @@ class HttpCli(object):
|
|||||||
self.parser.drop()
|
self.parser.drop()
|
||||||
|
|
||||||
nullwrite = self.args.nw
|
nullwrite = self.args.nw
|
||||||
vfs, rem = self.conn.auth.vfs.get(self.vpath, self.uname, False, True)
|
vfs, rem = self.asrv.vfs.get(self.vpath, self.uname, False, True)
|
||||||
self._assert_safe_rem(rem)
|
self._assert_safe_rem(rem)
|
||||||
|
|
||||||
if not new_file.endswith(".md"):
|
if not new_file.endswith(".md"):
|
||||||
@@ -777,7 +842,7 @@ class HttpCli(object):
|
|||||||
|
|
||||||
def handle_plain_upload(self):
|
def handle_plain_upload(self):
|
||||||
nullwrite = self.args.nw
|
nullwrite = self.args.nw
|
||||||
vfs, rem = self.conn.auth.vfs.get(self.vpath, self.uname, False, True)
|
vfs, rem = self.asrv.vfs.get(self.vpath, self.uname, False, True)
|
||||||
self._assert_safe_rem(rem)
|
self._assert_safe_rem(rem)
|
||||||
|
|
||||||
files = []
|
files = []
|
||||||
@@ -814,8 +879,14 @@ class HttpCli(object):
|
|||||||
raise Pebkac(400, "empty files in post")
|
raise Pebkac(400, "empty files in post")
|
||||||
|
|
||||||
files.append([sz, sha512_hex, p_file, fname])
|
files.append([sz, sha512_hex, p_file, fname])
|
||||||
|
dbv, vrem = vfs.get_dbv(rem)
|
||||||
self.conn.hsrv.broker.put(
|
self.conn.hsrv.broker.put(
|
||||||
False, "up2k.hash_file", vfs.realpath, vfs.flags, rem, fname
|
False,
|
||||||
|
"up2k.hash_file",
|
||||||
|
dbv.realpath,
|
||||||
|
dbv.flags,
|
||||||
|
vrem,
|
||||||
|
fname,
|
||||||
)
|
)
|
||||||
self.conn.nbyte += sz
|
self.conn.nbyte += sz
|
||||||
|
|
||||||
@@ -845,12 +916,16 @@ class HttpCli(object):
|
|||||||
status = "OK"
|
status = "OK"
|
||||||
if errmsg:
|
if errmsg:
|
||||||
self.log(errmsg)
|
self.log(errmsg)
|
||||||
errmsg = "ERROR: " + errmsg
|
|
||||||
status = "ERROR"
|
status = "ERROR"
|
||||||
|
|
||||||
msg = "{} // {} bytes // {:.3f} MiB/s\n".format(status, sz_total, spd)
|
msg = "{} // {} bytes // {:.3f} MiB/s\n".format(status, sz_total, spd)
|
||||||
jmsg = {"status": status, "sz": sz_total, "mbps": round(spd, 3), "files": []}
|
jmsg = {"status": status, "sz": sz_total, "mbps": round(spd, 3), "files": []}
|
||||||
|
|
||||||
|
if errmsg:
|
||||||
|
msg += errmsg + "\n"
|
||||||
|
jmsg["error"] = errmsg
|
||||||
|
errmsg = "ERROR: " + errmsg
|
||||||
|
|
||||||
for sz, sha512, ofn, lfn in files:
|
for sz, sha512, ofn, lfn in files:
|
||||||
vpath = (self.vpath + "/" if self.vpath else "") + lfn
|
vpath = (self.vpath + "/" if self.vpath else "") + lfn
|
||||||
msg += 'sha512: {} // {} bytes // <a href="/{}">{}</a>\n'.format(
|
msg += 'sha512: {} // {} bytes // <a href="/{}">{}</a>\n'.format(
|
||||||
@@ -882,11 +957,21 @@ class HttpCli(object):
|
|||||||
ft = "{}\n{}\n{}\n".format(ft, msg.rstrip(), errmsg)
|
ft = "{}\n{}\n{}\n".format(ft, msg.rstrip(), errmsg)
|
||||||
f.write(ft.encode("utf-8"))
|
f.write(ft.encode("utf-8"))
|
||||||
|
|
||||||
|
status = 400 if errmsg else 200
|
||||||
if "j" in self.uparam:
|
if "j" in self.uparam:
|
||||||
jtxt = json.dumps(jmsg, indent=2, sort_keys=True)
|
jtxt = json.dumps(jmsg, indent=2, sort_keys=True).encode("utf-8", "replace")
|
||||||
self.reply(jtxt.encode("utf-8", "replace"), mime="application/json")
|
self.reply(jtxt, mime="application/json", status=status)
|
||||||
else:
|
else:
|
||||||
self.redirect(self.vpath, msg=msg, flavor="return to", click=False)
|
self.redirect(
|
||||||
|
self.vpath,
|
||||||
|
msg=msg,
|
||||||
|
flavor="return to",
|
||||||
|
click=False,
|
||||||
|
status=status,
|
||||||
|
)
|
||||||
|
|
||||||
|
if errmsg:
|
||||||
|
return False
|
||||||
|
|
||||||
self.parser.drop()
|
self.parser.drop()
|
||||||
return True
|
return True
|
||||||
@@ -898,7 +983,7 @@ class HttpCli(object):
|
|||||||
raise Pebkac(400, "could not read lastmod from request")
|
raise Pebkac(400, "could not read lastmod from request")
|
||||||
|
|
||||||
nullwrite = self.args.nw
|
nullwrite = self.args.nw
|
||||||
vfs, rem = self.conn.auth.vfs.get(self.vpath, self.uname, False, True)
|
vfs, rem = self.asrv.vfs.get(self.vpath, self.uname, False, True)
|
||||||
self._assert_safe_rem(rem)
|
self._assert_safe_rem(rem)
|
||||||
|
|
||||||
# TODO:
|
# TODO:
|
||||||
@@ -991,6 +1076,8 @@ class HttpCli(object):
|
|||||||
cli_lastmod = self.headers.get("if-modified-since")
|
cli_lastmod = self.headers.get("if-modified-since")
|
||||||
if cli_lastmod:
|
if cli_lastmod:
|
||||||
try:
|
try:
|
||||||
|
# some browser append "; length=573"
|
||||||
|
cli_lastmod = cli_lastmod.split(";")[0].strip()
|
||||||
cli_dt = time.strptime(cli_lastmod, HTTP_TS_FMT)
|
cli_dt = time.strptime(cli_lastmod, HTTP_TS_FMT)
|
||||||
cli_ts = calendar.timegm(cli_dt)
|
cli_ts = calendar.timegm(cli_dt)
|
||||||
return file_lastmod, int(file_ts) > int(cli_ts)
|
return file_lastmod, int(file_ts) > int(cli_ts)
|
||||||
@@ -1137,7 +1224,7 @@ class HttpCli(object):
|
|||||||
#
|
#
|
||||||
# send reply
|
# send reply
|
||||||
|
|
||||||
if not is_compressed:
|
if not is_compressed and "cache" not in self.uparam:
|
||||||
self.out_headers.update(NO_CACHE)
|
self.out_headers.update(NO_CACHE)
|
||||||
|
|
||||||
self.out_headers["Accept-Ranges"] = "bytes"
|
self.out_headers["Accept-Ranges"] = "bytes"
|
||||||
@@ -1160,7 +1247,8 @@ class HttpCli(object):
|
|||||||
if use_sendfile:
|
if use_sendfile:
|
||||||
remains = sendfile_kern(lower, upper, f, self.s)
|
remains = sendfile_kern(lower, upper, f, self.s)
|
||||||
else:
|
else:
|
||||||
remains = sendfile_py(lower, upper, f, self.s)
|
actor = self.conn if self.is_mp else None
|
||||||
|
remains = sendfile_py(lower, upper, f, self.s, actor)
|
||||||
|
|
||||||
if remains > 0:
|
if remains > 0:
|
||||||
logmsg += " \033[31m" + unicode(upper - remains) + "\033[0m"
|
logmsg += " \033[31m" + unicode(upper - remains) + "\033[0m"
|
||||||
@@ -1239,7 +1327,7 @@ class HttpCli(object):
|
|||||||
ext = "folder"
|
ext = "folder"
|
||||||
exact = True
|
exact = True
|
||||||
|
|
||||||
bad = re.compile(r"[](){}/[]|^[0-9_-]*$")
|
bad = re.compile(r"[](){}/ []|^[0-9_-]*$")
|
||||||
n = ext.split(".")[::-1]
|
n = ext.split(".")[::-1]
|
||||||
if not exact:
|
if not exact:
|
||||||
n = n[:-1]
|
n = n[:-1]
|
||||||
@@ -1333,11 +1421,13 @@ class HttpCli(object):
|
|||||||
for y in [self.rvol, self.wvol, self.avol]
|
for y in [self.rvol, self.wvol, self.avol]
|
||||||
]
|
]
|
||||||
|
|
||||||
vstate = {}
|
|
||||||
if self.avol and not self.args.no_rescan:
|
if self.avol and not self.args.no_rescan:
|
||||||
x = self.conn.hsrv.broker.put(True, "up2k.get_volstate")
|
x = self.conn.hsrv.broker.put(True, "up2k.get_state")
|
||||||
vstate = json.loads(x.get())
|
vs = json.loads(x.get())
|
||||||
vstate = {("/" + k).rstrip("/") + "/": v for k, v in vstate.items()}
|
vstate = {("/" + k).rstrip("/") + "/": v for k, v in vs["volstate"].items()}
|
||||||
|
else:
|
||||||
|
vstate = {}
|
||||||
|
vs = {"scanning": None, "hashq": None, "tagq": None, "mtpq": None}
|
||||||
|
|
||||||
html = self.j2(
|
html = self.j2(
|
||||||
"splash",
|
"splash",
|
||||||
@@ -1346,6 +1436,10 @@ class HttpCli(object):
|
|||||||
wvol=wvol,
|
wvol=wvol,
|
||||||
avol=avol,
|
avol=avol,
|
||||||
vstate=vstate,
|
vstate=vstate,
|
||||||
|
scanning=vs["scanning"],
|
||||||
|
hashq=vs["hashq"],
|
||||||
|
tagq=vs["tagq"],
|
||||||
|
mtpq=vs["mtpq"],
|
||||||
url_suf=suf,
|
url_suf=suf,
|
||||||
)
|
)
|
||||||
self.reply(html.encode("utf-8"), headers=NO_STORE)
|
self.reply(html.encode("utf-8"), headers=NO_STORE)
|
||||||
@@ -1358,9 +1452,10 @@ class HttpCli(object):
|
|||||||
if self.args.no_rescan:
|
if self.args.no_rescan:
|
||||||
raise Pebkac(403, "disabled by argv")
|
raise Pebkac(403, "disabled by argv")
|
||||||
|
|
||||||
vn, _ = self.auth.vfs.get(self.vpath, self.uname, True, True)
|
vn, _ = self.asrv.vfs.get(self.vpath, self.uname, True, True)
|
||||||
|
|
||||||
|
args = [self.asrv.vfs.all_vols, [vn.vpath]]
|
||||||
|
|
||||||
args = [self.auth.vfs.all_vols, [vn.vpath]]
|
|
||||||
x = self.conn.hsrv.broker.put(True, "up2k.rescan", *args)
|
x = self.conn.hsrv.broker.put(True, "up2k.rescan", *args)
|
||||||
x = x.get()
|
x = x.get()
|
||||||
if not x:
|
if not x:
|
||||||
@@ -1376,17 +1471,8 @@ class HttpCli(object):
|
|||||||
if self.args.no_stack:
|
if self.args.no_stack:
|
||||||
raise Pebkac(403, "disabled by argv")
|
raise Pebkac(403, "disabled by argv")
|
||||||
|
|
||||||
ret = []
|
ret = "<pre>{}\n{}".format(time.time(), alltrace())
|
||||||
names = dict([(t.ident, t.name) for t in threading.enumerate()])
|
self.reply(ret.encode("utf-8"))
|
||||||
for tid, stack in sys._current_frames().items():
|
|
||||||
ret.append("\n\n# {} ({:x})".format(names.get(tid), tid))
|
|
||||||
for fn, lno, name, line in traceback.extract_stack(stack):
|
|
||||||
ret.append('File: "{}", line {}, in {}'.format(fn, lno, name))
|
|
||||||
if line:
|
|
||||||
ret.append(" " + str(line.strip()))
|
|
||||||
|
|
||||||
ret = ("<pre>" + "\n".join(ret)).encode("utf-8")
|
|
||||||
self.reply(ret)
|
|
||||||
|
|
||||||
def tx_tree(self):
|
def tx_tree(self):
|
||||||
top = self.uparam["tree"] or ""
|
top = self.uparam["tree"] or ""
|
||||||
@@ -1416,9 +1502,9 @@ class HttpCli(object):
|
|||||||
ret["k" + quotep(excl)] = sub
|
ret["k" + quotep(excl)] = sub
|
||||||
|
|
||||||
try:
|
try:
|
||||||
vn, rem = self.auth.vfs.get(top, self.uname, True, False)
|
vn, rem = self.asrv.vfs.get(top, self.uname, True, False)
|
||||||
fsroot, vfs_ls, vfs_virt = vn.ls(
|
fsroot, vfs_ls, vfs_virt = vn.ls(
|
||||||
rem, self.uname, not self.args.no_scandir, True
|
rem, self.uname, not self.args.no_scandir, incl_wo=True
|
||||||
)
|
)
|
||||||
except:
|
except:
|
||||||
vfs_ls = []
|
vfs_ls = []
|
||||||
@@ -1457,35 +1543,51 @@ class HttpCli(object):
|
|||||||
|
|
||||||
vpnodes.append([quotep(vpath) + "/", html_escape(node, crlf=True)])
|
vpnodes.append([quotep(vpath) + "/", html_escape(node, crlf=True)])
|
||||||
|
|
||||||
vn, rem = self.auth.vfs.get(
|
vn, rem = self.asrv.vfs.get(
|
||||||
self.vpath, self.uname, self.readable, self.writable
|
self.vpath, self.uname, self.readable, self.writable
|
||||||
)
|
)
|
||||||
abspath = vn.canonical(rem)
|
abspath = vn.canonical(rem)
|
||||||
|
dbv, vrem = vn.get_dbv(rem)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
st = os.stat(fsenc(abspath))
|
st = os.stat(fsenc(abspath))
|
||||||
except:
|
except:
|
||||||
raise Pebkac(404)
|
raise Pebkac(404)
|
||||||
|
|
||||||
if self.readable and not stat.S_ISDIR(st.st_mode):
|
if self.readable:
|
||||||
if rem.startswith(".hist/up2k."):
|
if rem.startswith(".hist/up2k."):
|
||||||
raise Pebkac(403)
|
raise Pebkac(403)
|
||||||
|
|
||||||
|
is_dir = stat.S_ISDIR(st.st_mode)
|
||||||
th_fmt = self.uparam.get("th")
|
th_fmt = self.uparam.get("th")
|
||||||
if th_fmt is not None:
|
if th_fmt is not None:
|
||||||
|
if is_dir:
|
||||||
|
for fn in ["folder.png", "folder.jpg"]:
|
||||||
|
fp = os.path.join(abspath, fn)
|
||||||
|
if os.path.exists(fp):
|
||||||
|
vrem = "{}/{}".format(vrem.rstrip("/"), fn)
|
||||||
|
is_dir = False
|
||||||
|
break
|
||||||
|
|
||||||
|
if is_dir:
|
||||||
|
return self.tx_ico("a.folder")
|
||||||
|
|
||||||
thp = None
|
thp = None
|
||||||
if self.thumbcli:
|
if self.thumbcli:
|
||||||
thp = self.thumbcli.get(vn.realpath, rem, int(st.st_mtime), th_fmt)
|
thp = self.thumbcli.get(
|
||||||
|
dbv.realpath, vrem, int(st.st_mtime), th_fmt
|
||||||
|
)
|
||||||
|
|
||||||
if thp:
|
if thp:
|
||||||
return self.tx_file(thp)
|
return self.tx_file(thp)
|
||||||
|
|
||||||
return self.tx_ico(rem)
|
return self.tx_ico(rem)
|
||||||
|
|
||||||
if abspath.endswith(".md") and "raw" not in self.uparam:
|
if not is_dir:
|
||||||
return self.tx_md(abspath)
|
if abspath.endswith(".md") and "raw" not in self.uparam:
|
||||||
|
return self.tx_md(abspath)
|
||||||
|
|
||||||
return self.tx_file(abspath)
|
return self.tx_file(abspath)
|
||||||
|
|
||||||
srv_info = []
|
srv_info = []
|
||||||
|
|
||||||
@@ -1585,7 +1687,7 @@ class HttpCli(object):
|
|||||||
return self.tx_zip(k, v, vn, rem, [], self.args.ed)
|
return self.tx_zip(k, v, vn, rem, [], self.args.ed)
|
||||||
|
|
||||||
fsroot, vfs_ls, vfs_virt = vn.ls(
|
fsroot, vfs_ls, vfs_virt = vn.ls(
|
||||||
rem, self.uname, not self.args.no_scandir, True
|
rem, self.uname, not self.args.no_scandir, incl_wo=True
|
||||||
)
|
)
|
||||||
stats = {k: v for k, v in vfs_ls}
|
stats = {k: v for k, v in vfs_ls}
|
||||||
vfs_ls = [x[0] for x in vfs_ls]
|
vfs_ls = [x[0] for x in vfs_ls]
|
||||||
@@ -1618,7 +1720,7 @@ class HttpCli(object):
|
|||||||
icur = None
|
icur = None
|
||||||
if "e2t" in vn.flags:
|
if "e2t" in vn.flags:
|
||||||
idx = self.conn.get_u2idx()
|
idx = self.conn.get_u2idx()
|
||||||
icur = idx.get_cur(vn.realpath)
|
icur = idx.get_cur(dbv.realpath)
|
||||||
|
|
||||||
dirs = []
|
dirs = []
|
||||||
files = []
|
files = []
|
||||||
@@ -1685,25 +1787,44 @@ class HttpCli(object):
|
|||||||
fn = f["name"]
|
fn = f["name"]
|
||||||
rd = f["rd"]
|
rd = f["rd"]
|
||||||
del f["rd"]
|
del f["rd"]
|
||||||
if icur:
|
if not icur:
|
||||||
q = "select w from up where rd = ? and fn = ?"
|
break
|
||||||
|
|
||||||
|
if vn != dbv:
|
||||||
|
_, rd = vn.get_dbv(rd)
|
||||||
|
|
||||||
|
q = "select w from up where rd = ? and fn = ?"
|
||||||
|
r = None
|
||||||
|
try:
|
||||||
|
r = icur.execute(q, (rd, fn)).fetchone()
|
||||||
|
except Exception as ex:
|
||||||
|
if "database is locked" in str(ex):
|
||||||
|
break
|
||||||
|
|
||||||
try:
|
try:
|
||||||
r = icur.execute(q, (rd, fn)).fetchone()
|
|
||||||
except:
|
|
||||||
args = s3enc(idx.mem_cur, rd, fn)
|
args = s3enc(idx.mem_cur, rd, fn)
|
||||||
r = icur.execute(q, args).fetchone()
|
r = icur.execute(q, args).fetchone()
|
||||||
|
except:
|
||||||
|
m = "tag list error, {}/{}\n{}"
|
||||||
|
self.log(m.format(rd, fn, min_ex()))
|
||||||
|
break
|
||||||
|
|
||||||
tags = {}
|
tags = {}
|
||||||
f["tags"] = tags
|
f["tags"] = tags
|
||||||
|
|
||||||
if not r:
|
if not r:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
w = r[0][:16]
|
w = r[0][:16]
|
||||||
q = "select k, v from mt where w = ? and k != 'x'"
|
q = "select k, v from mt where w = ? and k != 'x'"
|
||||||
|
try:
|
||||||
for k, v in icur.execute(q, (w,)):
|
for k, v in icur.execute(q, (w,)):
|
||||||
taglist[k] = True
|
taglist[k] = True
|
||||||
tags[k] = v
|
tags[k] = v
|
||||||
|
except:
|
||||||
|
m = "tag read error, {}/{} [{}]:\n{}"
|
||||||
|
self.log(m.format(rd, fn, w, min_ex()))
|
||||||
|
break
|
||||||
|
|
||||||
if icur:
|
if icur:
|
||||||
taglist = [k for k in vn.flags.get("mte", "").split(",") if k in taglist]
|
taglist = [k for k in vn.flags.get("mte", "").split(",") if k in taglist]
|
||||||
@@ -1726,9 +1847,13 @@ class HttpCli(object):
|
|||||||
j2a["files"] = dirs + files
|
j2a["files"] = dirs + files
|
||||||
j2a["logues"] = logues
|
j2a["logues"] = logues
|
||||||
j2a["taglist"] = taglist
|
j2a["taglist"] = taglist
|
||||||
|
|
||||||
if "mte" in vn.flags:
|
if "mte" in vn.flags:
|
||||||
j2a["tag_order"] = json.dumps(vn.flags["mte"].split(","))
|
j2a["tag_order"] = json.dumps(vn.flags["mte"].split(","))
|
||||||
|
|
||||||
|
if self.args.css_browser:
|
||||||
|
j2a["css"] = self.args.css_browser
|
||||||
|
|
||||||
html = self.j2(tpl, **j2a)
|
html = self.j2(tpl, **j2a)
|
||||||
self.reply(html.encode("utf-8", "replace"), headers=NO_STORE)
|
self.reply(html.encode("utf-8", "replace"), headers=NO_STORE)
|
||||||
return True
|
return True
|
||||||
|
@@ -3,7 +3,6 @@ from __future__ import print_function, unicode_literals
|
|||||||
|
|
||||||
import re
|
import re
|
||||||
import os
|
import os
|
||||||
import sys
|
|
||||||
import time
|
import time
|
||||||
import socket
|
import socket
|
||||||
|
|
||||||
@@ -34,7 +33,8 @@ class HttpConn(object):
|
|||||||
self.hsrv = hsrv
|
self.hsrv = hsrv
|
||||||
|
|
||||||
self.args = hsrv.args
|
self.args = hsrv.args
|
||||||
self.auth = hsrv.auth
|
self.asrv = hsrv.asrv
|
||||||
|
self.is_mp = hsrv.is_mp
|
||||||
self.cert_path = hsrv.cert_path
|
self.cert_path = hsrv.cert_path
|
||||||
|
|
||||||
enth = HAVE_PIL and not self.args.no_thumb
|
enth = HAVE_PIL and not self.args.no_thumb
|
||||||
@@ -42,6 +42,7 @@ class HttpConn(object):
|
|||||||
self.ico = Ico(self.args)
|
self.ico = Ico(self.args)
|
||||||
|
|
||||||
self.t0 = time.time()
|
self.t0 = time.time()
|
||||||
|
self.stopping = False
|
||||||
self.nbyte = 0
|
self.nbyte = 0
|
||||||
self.workload = 0
|
self.workload = 0
|
||||||
self.u2idx = None
|
self.u2idx = None
|
||||||
@@ -49,6 +50,14 @@ class HttpConn(object):
|
|||||||
self.lf_url = re.compile(self.args.lf_url) if self.args.lf_url else None
|
self.lf_url = re.compile(self.args.lf_url) if self.args.lf_url else None
|
||||||
self.set_rproxy()
|
self.set_rproxy()
|
||||||
|
|
||||||
|
def shutdown(self):
|
||||||
|
self.stopping = True
|
||||||
|
try:
|
||||||
|
self.s.shutdown(socket.SHUT_RDWR)
|
||||||
|
self.s.close()
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
def set_rproxy(self, ip=None):
|
def set_rproxy(self, ip=None):
|
||||||
if ip is None:
|
if ip is None:
|
||||||
color = 36
|
color = 36
|
||||||
@@ -70,7 +79,7 @@ class HttpConn(object):
|
|||||||
|
|
||||||
def get_u2idx(self):
|
def get_u2idx(self):
|
||||||
if not self.u2idx:
|
if not self.u2idx:
|
||||||
self.u2idx = U2idx(self.args, self.log_func)
|
self.u2idx = U2idx(self)
|
||||||
|
|
||||||
return self.u2idx
|
return self.u2idx
|
||||||
|
|
||||||
@@ -162,7 +171,7 @@ class HttpConn(object):
|
|||||||
self.log("client rejected our certificate (nice)")
|
self.log("client rejected our certificate (nice)")
|
||||||
|
|
||||||
elif "ALERT_CERTIFICATE_UNKNOWN" in em:
|
elif "ALERT_CERTIFICATE_UNKNOWN" in em:
|
||||||
# chrome-android keeps doing this
|
# android-chrome keeps doing this
|
||||||
pass
|
pass
|
||||||
|
|
||||||
else:
|
else:
|
||||||
@@ -173,7 +182,12 @@ class HttpConn(object):
|
|||||||
if not self.sr:
|
if not self.sr:
|
||||||
self.sr = Unrecv(self.s)
|
self.sr = Unrecv(self.s)
|
||||||
|
|
||||||
while True:
|
while not self.stopping:
|
||||||
|
if self.is_mp:
|
||||||
|
self.workload += 50
|
||||||
|
if self.workload >= 2 ** 31:
|
||||||
|
self.workload = 100
|
||||||
|
|
||||||
cli = HttpCli(self)
|
cli = HttpCli(self)
|
||||||
if not cli.run():
|
if not cli.run():
|
||||||
return
|
return
|
||||||
|
@@ -25,8 +25,8 @@ except ImportError:
|
|||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
from .__init__ import E, MACOS
|
from .__init__ import E, MACOS
|
||||||
from .httpconn import HttpConn
|
|
||||||
from .authsrv import AuthSrv
|
from .authsrv import AuthSrv
|
||||||
|
from .httpconn import HttpConn
|
||||||
|
|
||||||
|
|
||||||
class HttpSrv(object):
|
class HttpSrv(object):
|
||||||
@@ -35,10 +35,12 @@ class HttpSrv(object):
|
|||||||
relying on MpSrv for performance (HttpSrv is just plain threads)
|
relying on MpSrv for performance (HttpSrv is just plain threads)
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, broker):
|
def __init__(self, broker, is_mp=False):
|
||||||
self.broker = broker
|
self.broker = broker
|
||||||
|
self.is_mp = is_mp
|
||||||
self.args = broker.args
|
self.args = broker.args
|
||||||
self.log = broker.log
|
self.log = broker.log
|
||||||
|
self.asrv = broker.asrv
|
||||||
|
|
||||||
self.disconnect_func = None
|
self.disconnect_func = None
|
||||||
self.mutex = threading.Lock()
|
self.mutex = threading.Lock()
|
||||||
@@ -46,7 +48,6 @@ class HttpSrv(object):
|
|||||||
self.clients = {}
|
self.clients = {}
|
||||||
self.workload = 0
|
self.workload = 0
|
||||||
self.workload_thr_alive = False
|
self.workload_thr_alive = False
|
||||||
self.auth = AuthSrv(self.args, self.log)
|
|
||||||
|
|
||||||
env = jinja2.Environment()
|
env = jinja2.Environment()
|
||||||
env.loader = jinja2.FileSystemLoader(os.path.join(E.mod, "web"))
|
env.loader = jinja2.FileSystemLoader(os.path.join(E.mod, "web"))
|
||||||
@@ -66,7 +67,11 @@ class HttpSrv(object):
|
|||||||
if self.args.log_conn:
|
if self.args.log_conn:
|
||||||
self.log("%s %s" % addr, "|%sC-cthr" % ("-" * 5,), c="1;30")
|
self.log("%s %s" % addr, "|%sC-cthr" % ("-" * 5,), c="1;30")
|
||||||
|
|
||||||
thr = threading.Thread(target=self.thr_client, args=(sck, addr))
|
thr = threading.Thread(
|
||||||
|
target=self.thr_client,
|
||||||
|
args=(sck, addr),
|
||||||
|
name="httpsrv-{}-{}".format(addr[0].split(".", 2)[-1][-6:], addr[1]),
|
||||||
|
)
|
||||||
thr.daemon = True
|
thr.daemon = True
|
||||||
thr.start()
|
thr.start()
|
||||||
|
|
||||||
@@ -75,7 +80,14 @@ class HttpSrv(object):
|
|||||||
return len(self.clients)
|
return len(self.clients)
|
||||||
|
|
||||||
def shutdown(self):
|
def shutdown(self):
|
||||||
self.log("ok bye")
|
clients = list(self.clients.keys())
|
||||||
|
for cli in clients:
|
||||||
|
try:
|
||||||
|
cli.shutdown()
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
|
self.log("httpsrv-n", "ok bye")
|
||||||
|
|
||||||
def thr_client(self, sck, addr):
|
def thr_client(self, sck, addr):
|
||||||
"""thread managing one tcp client"""
|
"""thread managing one tcp client"""
|
||||||
@@ -84,32 +96,46 @@ class HttpSrv(object):
|
|||||||
cli = HttpConn(sck, addr, self)
|
cli = HttpConn(sck, addr, self)
|
||||||
with self.mutex:
|
with self.mutex:
|
||||||
self.clients[cli] = 0
|
self.clients[cli] = 0
|
||||||
self.workload += 50
|
|
||||||
|
|
||||||
if not self.workload_thr_alive:
|
if self.is_mp:
|
||||||
self.workload_thr_alive = True
|
self.workload += 50
|
||||||
thr = threading.Thread(target=self.thr_workload)
|
if not self.workload_thr_alive:
|
||||||
thr.daemon = True
|
self.workload_thr_alive = True
|
||||||
thr.start()
|
thr = threading.Thread(
|
||||||
|
target=self.thr_workload, name="httpsrv-workload"
|
||||||
|
)
|
||||||
|
thr.daemon = True
|
||||||
|
thr.start()
|
||||||
|
|
||||||
|
fno = sck.fileno()
|
||||||
try:
|
try:
|
||||||
if self.args.log_conn:
|
if self.args.log_conn:
|
||||||
self.log("%s %s" % addr, "|%sC-crun" % ("-" * 6,), c="1;30")
|
self.log("%s %s" % addr, "|%sC-crun" % ("-" * 6,), c="1;30")
|
||||||
|
|
||||||
cli.run()
|
cli.run()
|
||||||
|
|
||||||
|
except (OSError, socket.error) as ex:
|
||||||
|
if ex.errno not in [10038, 10054, 107, 57, 9]:
|
||||||
|
self.log(
|
||||||
|
"%s %s" % addr,
|
||||||
|
"run({}): {}".format(fno, ex),
|
||||||
|
c=6,
|
||||||
|
)
|
||||||
|
|
||||||
finally:
|
finally:
|
||||||
|
sck = cli.s
|
||||||
if self.args.log_conn:
|
if self.args.log_conn:
|
||||||
self.log("%s %s" % addr, "|%sC-cdone" % ("-" * 7,), c="1;30")
|
self.log("%s %s" % addr, "|%sC-cdone" % ("-" * 7,), c="1;30")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
fno = sck.fileno()
|
||||||
sck.shutdown(socket.SHUT_RDWR)
|
sck.shutdown(socket.SHUT_RDWR)
|
||||||
sck.close()
|
sck.close()
|
||||||
except (OSError, socket.error) as ex:
|
except (OSError, socket.error) as ex:
|
||||||
if not MACOS:
|
if not MACOS:
|
||||||
self.log(
|
self.log(
|
||||||
"%s %s" % addr,
|
"%s %s" % addr,
|
||||||
"shut({}): {}".format(sck.fileno(), ex),
|
"shut({}): {}".format(fno, ex),
|
||||||
c="1;30",
|
c="1;30",
|
||||||
)
|
)
|
||||||
if ex.errno not in [10038, 10054, 107, 57, 9]:
|
if ex.errno not in [10038, 10054, 107, 57, 9]:
|
||||||
|
@@ -1,3 +1,6 @@
|
|||||||
|
# coding: utf-8
|
||||||
|
from __future__ import print_function, unicode_literals
|
||||||
|
|
||||||
import hashlib
|
import hashlib
|
||||||
import colorsys
|
import colorsys
|
||||||
|
|
||||||
|
@@ -1,7 +1,6 @@
|
|||||||
# coding: utf-8
|
# coding: utf-8
|
||||||
from __future__ import print_function, unicode_literals
|
from __future__ import print_function, unicode_literals
|
||||||
|
|
||||||
import re
|
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
import json
|
import json
|
||||||
@@ -9,7 +8,7 @@ import shutil
|
|||||||
import subprocess as sp
|
import subprocess as sp
|
||||||
|
|
||||||
from .__init__ import PY2, WINDOWS
|
from .__init__ import PY2, WINDOWS
|
||||||
from .util import fsenc, fsdec, REKOBO_LKEY
|
from .util import fsenc, fsdec, uncyg, REKOBO_LKEY
|
||||||
|
|
||||||
if not PY2:
|
if not PY2:
|
||||||
unicode = str
|
unicode = str
|
||||||
@@ -17,6 +16,7 @@ if not PY2:
|
|||||||
|
|
||||||
def have_ff(cmd):
|
def have_ff(cmd):
|
||||||
if PY2:
|
if PY2:
|
||||||
|
print("# checking {}".format(cmd))
|
||||||
cmd = (cmd + " -version").encode("ascii").split(b" ")
|
cmd = (cmd + " -version").encode("ascii").split(b" ")
|
||||||
try:
|
try:
|
||||||
sp.Popen(cmd, stdout=sp.PIPE, stderr=sp.PIPE).communicate()
|
sp.Popen(cmd, stdout=sp.PIPE, stderr=sp.PIPE).communicate()
|
||||||
@@ -44,6 +44,9 @@ class MParser(object):
|
|||||||
while True:
|
while True:
|
||||||
try:
|
try:
|
||||||
bp = os.path.expanduser(args)
|
bp = os.path.expanduser(args)
|
||||||
|
if WINDOWS:
|
||||||
|
bp = uncyg(bp)
|
||||||
|
|
||||||
if os.path.exists(bp):
|
if os.path.exists(bp):
|
||||||
self.bin = bp
|
self.bin = bp
|
||||||
return
|
return
|
||||||
@@ -112,6 +115,19 @@ def parse_ffprobe(txt):
|
|||||||
ret = {} # processed
|
ret = {} # processed
|
||||||
md = {} # raw tags
|
md = {} # raw tags
|
||||||
|
|
||||||
|
is_audio = fmt.get("format_name") in ["mp3", "ogg", "flac", "wav"]
|
||||||
|
if fmt.get("filename", "").split(".")[-1].lower() in ["m4a", "aac"]:
|
||||||
|
is_audio = True
|
||||||
|
|
||||||
|
# if audio file, ensure audio stream appears first
|
||||||
|
if (
|
||||||
|
is_audio
|
||||||
|
and len(streams) > 2
|
||||||
|
and streams[1].get("codec_type") != "audio"
|
||||||
|
and streams[2].get("codec_type") == "audio"
|
||||||
|
):
|
||||||
|
streams = [fmt, streams[2], streams[1]] + streams[3:]
|
||||||
|
|
||||||
have = {}
|
have = {}
|
||||||
for strm in streams:
|
for strm in streams:
|
||||||
typ = strm.get("codec_type")
|
typ = strm.get("codec_type")
|
||||||
@@ -131,9 +147,7 @@ def parse_ffprobe(txt):
|
|||||||
]
|
]
|
||||||
|
|
||||||
if typ == "video":
|
if typ == "video":
|
||||||
if strm.get("DISPOSITION:attached_pic") == "1" or fmt.get(
|
if strm.get("DISPOSITION:attached_pic") == "1" or is_audio:
|
||||||
"format_name"
|
|
||||||
) in ["mp3", "ogg", "flac"]:
|
|
||||||
continue
|
continue
|
||||||
|
|
||||||
kvm = [
|
kvm = [
|
||||||
@@ -177,7 +191,7 @@ def parse_ffprobe(txt):
|
|||||||
|
|
||||||
k = k[4:].strip()
|
k = k[4:].strip()
|
||||||
v = v.strip()
|
v = v.strip()
|
||||||
if k and v:
|
if k and v and k not in md:
|
||||||
md[k] = [v]
|
md[k] = [v]
|
||||||
|
|
||||||
for k in [".q", ".vq", ".aq"]:
|
for k in [".q", ".vq", ".aq"]:
|
||||||
|
@@ -1,3 +1,6 @@
|
|||||||
|
# coding: utf-8
|
||||||
|
from __future__ import print_function, unicode_literals
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import tarfile
|
import tarfile
|
||||||
import threading
|
import threading
|
||||||
@@ -42,7 +45,7 @@ class StreamTar(object):
|
|||||||
fmt = tarfile.GNU_FORMAT
|
fmt = tarfile.GNU_FORMAT
|
||||||
self.tar = tarfile.open(fileobj=self.qfile, mode="w|", format=fmt)
|
self.tar = tarfile.open(fileobj=self.qfile, mode="w|", format=fmt)
|
||||||
|
|
||||||
w = threading.Thread(target=self._gen)
|
w = threading.Thread(target=self._gen, name="star-gen")
|
||||||
w.daemon = True
|
w.daemon = True
|
||||||
w.start()
|
w.start()
|
||||||
|
|
||||||
|
@@ -1,3 +1,6 @@
|
|||||||
|
# coding: utf-8
|
||||||
|
from __future__ import print_function, unicode_literals
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import time
|
import time
|
||||||
import tempfile
|
import tempfile
|
||||||
|
@@ -37,14 +37,13 @@ class SvcHub(object):
|
|||||||
|
|
||||||
self.log = self._log_disabled if args.q else self._log_enabled
|
self.log = self._log_disabled if args.q else self._log_enabled
|
||||||
|
|
||||||
# jank goes here
|
|
||||||
auth = AuthSrv(self.args, self.log, False)
|
|
||||||
if args.ls:
|
|
||||||
auth.dbg_ls()
|
|
||||||
|
|
||||||
# initiate all services to manage
|
# initiate all services to manage
|
||||||
|
self.asrv = AuthSrv(self.args, self.log, False)
|
||||||
|
if args.ls:
|
||||||
|
self.asrv.dbg_ls()
|
||||||
|
|
||||||
self.tcpsrv = TcpSrv(self)
|
self.tcpsrv = TcpSrv(self)
|
||||||
self.up2k = Up2k(self, auth.vfs.all_vols)
|
self.up2k = Up2k(self)
|
||||||
|
|
||||||
self.thumbsrv = None
|
self.thumbsrv = None
|
||||||
if not args.no_thumb:
|
if not args.no_thumb:
|
||||||
@@ -54,7 +53,7 @@ class SvcHub(object):
|
|||||||
msg = "setting --th-no-webp because either libwebp is not available or your Pillow is too old"
|
msg = "setting --th-no-webp because either libwebp is not available or your Pillow is too old"
|
||||||
self.log("thumb", msg, c=3)
|
self.log("thumb", msg, c=3)
|
||||||
|
|
||||||
self.thumbsrv = ThumbSrv(self, auth.vfs.all_vols)
|
self.thumbsrv = ThumbSrv(self)
|
||||||
else:
|
else:
|
||||||
msg = "need Pillow to create thumbnails; for example:\n{}{} -m pip install --user Pillow\n"
|
msg = "need Pillow to create thumbnails; for example:\n{}{} -m pip install --user Pillow\n"
|
||||||
self.log(
|
self.log(
|
||||||
@@ -71,7 +70,7 @@ class SvcHub(object):
|
|||||||
self.broker = Broker(self)
|
self.broker = Broker(self)
|
||||||
|
|
||||||
def run(self):
|
def run(self):
|
||||||
thr = threading.Thread(target=self.tcpsrv.run)
|
thr = threading.Thread(target=self.tcpsrv.run, name="svchub-main")
|
||||||
thr.daemon = True
|
thr.daemon = True
|
||||||
thr.start()
|
thr.start()
|
||||||
|
|
||||||
@@ -95,9 +94,11 @@ class SvcHub(object):
|
|||||||
break
|
break
|
||||||
|
|
||||||
if n == 3:
|
if n == 3:
|
||||||
print("waiting for thumbsrv...")
|
print("waiting for thumbsrv (10sec)...")
|
||||||
|
|
||||||
print("nailed it")
|
print("nailed it", end="")
|
||||||
|
finally:
|
||||||
|
print("\033[0m")
|
||||||
|
|
||||||
def _log_disabled(self, src, msg, c=0):
|
def _log_disabled(self, src, msg, c=0):
|
||||||
pass
|
pass
|
||||||
|
@@ -1,3 +1,6 @@
|
|||||||
|
# coding: utf-8
|
||||||
|
from __future__ import print_function, unicode_literals
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import time
|
import time
|
||||||
import zlib
|
import zlib
|
||||||
|
@@ -21,6 +21,7 @@ class TcpSrv(object):
|
|||||||
self.log = hub.log
|
self.log = hub.log
|
||||||
|
|
||||||
self.num_clients = Counter()
|
self.num_clients = Counter()
|
||||||
|
self.stopping = False
|
||||||
|
|
||||||
ip = "127.0.0.1"
|
ip = "127.0.0.1"
|
||||||
eps = {ip: "local only"}
|
eps = {ip: "local only"}
|
||||||
@@ -67,7 +68,7 @@ class TcpSrv(object):
|
|||||||
ip, port = srv.getsockname()
|
ip, port = srv.getsockname()
|
||||||
self.log("tcpsrv", "listening @ {0}:{1}".format(ip, port))
|
self.log("tcpsrv", "listening @ {0}:{1}".format(ip, port))
|
||||||
|
|
||||||
while True:
|
while not self.stopping:
|
||||||
if self.args.log_conn:
|
if self.args.log_conn:
|
||||||
self.log("tcpsrv", "|%sC-ncli" % ("-" * 1,), c="1;30")
|
self.log("tcpsrv", "|%sC-ncli" % ("-" * 1,), c="1;30")
|
||||||
|
|
||||||
@@ -78,8 +79,18 @@ class TcpSrv(object):
|
|||||||
if self.args.log_conn:
|
if self.args.log_conn:
|
||||||
self.log("tcpsrv", "|%sC-acc1" % ("-" * 2,), c="1;30")
|
self.log("tcpsrv", "|%sC-acc1" % ("-" * 2,), c="1;30")
|
||||||
|
|
||||||
ready, _, _ = select.select(self.srv, [], [])
|
try:
|
||||||
|
# macos throws bad-fd
|
||||||
|
ready, _, _ = select.select(self.srv, [], [])
|
||||||
|
except:
|
||||||
|
ready = []
|
||||||
|
if not self.stopping:
|
||||||
|
raise
|
||||||
|
|
||||||
for srv in ready:
|
for srv in ready:
|
||||||
|
if self.stopping:
|
||||||
|
break
|
||||||
|
|
||||||
sck, addr = srv.accept()
|
sck, addr = srv.accept()
|
||||||
sip, sport = srv.getsockname()
|
sip, sport = srv.getsockname()
|
||||||
if self.args.log_conn:
|
if self.args.log_conn:
|
||||||
@@ -95,6 +106,13 @@ class TcpSrv(object):
|
|||||||
self.hub.broker.put(False, "httpconn", sck, addr)
|
self.hub.broker.put(False, "httpconn", sck, addr)
|
||||||
|
|
||||||
def shutdown(self):
|
def shutdown(self):
|
||||||
|
self.stopping = True
|
||||||
|
try:
|
||||||
|
for srv in self.srv:
|
||||||
|
srv.close()
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
self.log("tcpsrv", "ok bye")
|
self.log("tcpsrv", "ok bye")
|
||||||
|
|
||||||
def detect_interfaces(self, listen_ips):
|
def detect_interfaces(self, listen_ips):
|
||||||
|
@@ -1,5 +1,7 @@
|
|||||||
|
# coding: utf-8
|
||||||
|
from __future__ import print_function, unicode_literals
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import time
|
|
||||||
|
|
||||||
from .util import Cooldown
|
from .util import Cooldown
|
||||||
from .th_srv import thumb_path, THUMBABLE, FMT_FF
|
from .th_srv import thumb_path, THUMBABLE, FMT_FF
|
||||||
@@ -9,6 +11,7 @@ class ThumbCli(object):
|
|||||||
def __init__(self, broker):
|
def __init__(self, broker):
|
||||||
self.broker = broker
|
self.broker = broker
|
||||||
self.args = broker.args
|
self.args = broker.args
|
||||||
|
self.asrv = broker.asrv
|
||||||
|
|
||||||
# cache on both sides for less broker spam
|
# cache on both sides for less broker spam
|
||||||
self.cooldown = Cooldown(self.args.th_poke)
|
self.cooldown = Cooldown(self.args.th_poke)
|
||||||
@@ -18,16 +21,19 @@ class ThumbCli(object):
|
|||||||
if ext not in THUMBABLE:
|
if ext not in THUMBABLE:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
if self.args.no_vthumb and ext in FMT_FF:
|
is_vid = ext in FMT_FF
|
||||||
|
if is_vid and self.args.no_vthumb:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
if fmt == "j" and self.args.th_no_jpg:
|
if fmt == "j" and self.args.th_no_jpg:
|
||||||
fmt = "w"
|
fmt = "w"
|
||||||
|
|
||||||
if fmt == "w" and self.args.th_no_webp:
|
if fmt == "w":
|
||||||
fmt = "j"
|
if self.args.th_no_webp or (is_vid and self.args.th_ff_jpg):
|
||||||
|
fmt = "j"
|
||||||
|
|
||||||
tpath = thumb_path(ptop, rem, mtime, fmt)
|
histpath = self.asrv.vfs.histtab[ptop]
|
||||||
|
tpath = thumb_path(histpath, rem, mtime, fmt)
|
||||||
ret = None
|
ret = None
|
||||||
try:
|
try:
|
||||||
st = os.stat(tpath)
|
st = os.stat(tpath)
|
||||||
|
@@ -1,5 +1,7 @@
|
|||||||
|
# coding: utf-8
|
||||||
|
from __future__ import print_function, unicode_literals
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import sys
|
|
||||||
import time
|
import time
|
||||||
import shutil
|
import shutil
|
||||||
import base64
|
import base64
|
||||||
@@ -8,7 +10,7 @@ import threading
|
|||||||
import subprocess as sp
|
import subprocess as sp
|
||||||
|
|
||||||
from .__init__ import PY2
|
from .__init__ import PY2
|
||||||
from .util import fsenc, mchkcmd, Queue, Cooldown, BytesIO
|
from .util import fsenc, runcmd, Queue, Cooldown, BytesIO, min_ex
|
||||||
from .mtag import HAVE_FFMPEG, HAVE_FFPROBE, ffprobe
|
from .mtag import HAVE_FFMPEG, HAVE_FFPROBE, ffprobe
|
||||||
|
|
||||||
|
|
||||||
@@ -51,7 +53,7 @@ except:
|
|||||||
# https://pillow.readthedocs.io/en/stable/handbook/image-file-formats.html
|
# https://pillow.readthedocs.io/en/stable/handbook/image-file-formats.html
|
||||||
# ffmpeg -formats
|
# ffmpeg -formats
|
||||||
FMT_PIL = "bmp dib gif icns ico jpg jpeg jp2 jpx pcx png pbm pgm ppm pnm sgi tga tif tiff webp xbm dds xpm"
|
FMT_PIL = "bmp dib gif icns ico jpg jpeg jp2 jpx pcx png pbm pgm ppm pnm sgi tga tif tiff webp xbm dds xpm"
|
||||||
FMT_FF = "av1 asf avi flv m4v mkv mjpeg mjpg mpg mpeg mpg2 mpeg2 mov 3gp mp4 ts mpegts nut ogv ogm rm vob webm wmv"
|
FMT_FF = "av1 asf avi flv m4v mkv mjpeg mjpg mpg mpeg mpg2 mpeg2 h264 avc h265 hevc mov 3gp mp4 ts mpegts nut ogv ogm rm vob webm wmv"
|
||||||
|
|
||||||
if HAVE_HEIF:
|
if HAVE_HEIF:
|
||||||
FMT_PIL += " heif heifs heic heics"
|
FMT_PIL += " heif heifs heic heics"
|
||||||
@@ -71,7 +73,7 @@ if HAVE_FFMPEG and HAVE_FFPROBE:
|
|||||||
THUMBABLE.update(FMT_FF)
|
THUMBABLE.update(FMT_FF)
|
||||||
|
|
||||||
|
|
||||||
def thumb_path(ptop, rem, mtime, fmt):
|
def thumb_path(histpath, rem, mtime, fmt):
|
||||||
# base16 = 16 = 256
|
# base16 = 16 = 256
|
||||||
# b64-lc = 38 = 1444
|
# b64-lc = 38 = 1444
|
||||||
# base64 = 64 = 4096
|
# base64 = 64 = 4096
|
||||||
@@ -82,26 +84,25 @@ def thumb_path(ptop, rem, mtime, fmt):
|
|||||||
fn = rem
|
fn = rem
|
||||||
|
|
||||||
if rd:
|
if rd:
|
||||||
h = hashlib.sha512(fsenc(rd)).digest()[:24]
|
h = hashlib.sha512(fsenc(rd)).digest()
|
||||||
b64 = base64.urlsafe_b64encode(h).decode("ascii")[:24]
|
b64 = base64.urlsafe_b64encode(h).decode("ascii")[:24]
|
||||||
rd = "{}/{}/".format(b64[:2], b64[2:4]).lower() + b64
|
rd = "{}/{}/".format(b64[:2], b64[2:4]).lower() + b64
|
||||||
else:
|
else:
|
||||||
rd = "top"
|
rd = "top"
|
||||||
|
|
||||||
# could keep original filenames but this is safer re pathlen
|
# could keep original filenames but this is safer re pathlen
|
||||||
h = hashlib.sha512(fsenc(fn)).digest()[:24]
|
h = hashlib.sha512(fsenc(fn)).digest()
|
||||||
fn = base64.urlsafe_b64encode(h).decode("ascii")[:24]
|
fn = base64.urlsafe_b64encode(h).decode("ascii")[:24]
|
||||||
|
|
||||||
return "{}/.hist/th/{}/{}.{:x}.{}".format(
|
return "{}/th/{}/{}.{:x}.{}".format(
|
||||||
ptop, rd, fn, int(mtime), "webp" if fmt == "w" else "jpg"
|
histpath, rd, fn, int(mtime), "webp" if fmt == "w" else "jpg"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
class ThumbSrv(object):
|
class ThumbSrv(object):
|
||||||
def __init__(self, hub, vols):
|
def __init__(self, hub):
|
||||||
self.hub = hub
|
self.hub = hub
|
||||||
self.vols = [v.realpath for v in vols.values()]
|
self.asrv = hub.asrv
|
||||||
|
|
||||||
self.args = hub.args
|
self.args = hub.args
|
||||||
self.log_func = hub.log
|
self.log_func = hub.log
|
||||||
|
|
||||||
@@ -114,8 +115,10 @@ class ThumbSrv(object):
|
|||||||
self.stopping = False
|
self.stopping = False
|
||||||
self.nthr = os.cpu_count() if hasattr(os, "cpu_count") else 4
|
self.nthr = os.cpu_count() if hasattr(os, "cpu_count") else 4
|
||||||
self.q = Queue(self.nthr * 4)
|
self.q = Queue(self.nthr * 4)
|
||||||
for _ in range(self.nthr):
|
for n in range(self.nthr):
|
||||||
t = threading.Thread(target=self.worker)
|
t = threading.Thread(
|
||||||
|
target=self.worker, name="thumb-{}-{}".format(n, self.nthr)
|
||||||
|
)
|
||||||
t.daemon = True
|
t.daemon = True
|
||||||
t.start()
|
t.start()
|
||||||
|
|
||||||
@@ -131,7 +134,7 @@ class ThumbSrv(object):
|
|||||||
msg += ", ".join(missing)
|
msg += ", ".join(missing)
|
||||||
self.log(msg, c=3)
|
self.log(msg, c=3)
|
||||||
|
|
||||||
t = threading.Thread(target=self.cleaner)
|
t = threading.Thread(target=self.cleaner, name="thumb-cleaner")
|
||||||
t.daemon = True
|
t.daemon = True
|
||||||
t.start()
|
t.start()
|
||||||
|
|
||||||
@@ -148,9 +151,11 @@ class ThumbSrv(object):
|
|||||||
return not self.nthr
|
return not self.nthr
|
||||||
|
|
||||||
def get(self, ptop, rem, mtime, fmt):
|
def get(self, ptop, rem, mtime, fmt):
|
||||||
tpath = thumb_path(ptop, rem, mtime, fmt)
|
histpath = self.asrv.vfs.histtab[ptop]
|
||||||
|
tpath = thumb_path(histpath, rem, mtime, fmt)
|
||||||
abspath = os.path.join(ptop, rem)
|
abspath = os.path.join(ptop, rem)
|
||||||
cond = threading.Condition()
|
cond = threading.Condition(self.mutex)
|
||||||
|
do_conv = False
|
||||||
with self.mutex:
|
with self.mutex:
|
||||||
try:
|
try:
|
||||||
self.busy[tpath].append(cond)
|
self.busy[tpath].append(cond)
|
||||||
@@ -168,8 +173,11 @@ class ThumbSrv(object):
|
|||||||
f.write(fsenc(os.path.dirname(abspath)))
|
f.write(fsenc(os.path.dirname(abspath)))
|
||||||
|
|
||||||
self.busy[tpath] = [cond]
|
self.busy[tpath] = [cond]
|
||||||
self.q.put([abspath, tpath])
|
do_conv = True
|
||||||
self.log("conv {} \033[0m{}".format(tpath, abspath), c=6)
|
|
||||||
|
if do_conv:
|
||||||
|
self.q.put([abspath, tpath])
|
||||||
|
self.log("conv {} \033[0m{}".format(tpath, abspath), c=6)
|
||||||
|
|
||||||
while not self.stopping:
|
while not self.stopping:
|
||||||
with self.mutex:
|
with self.mutex:
|
||||||
@@ -177,7 +185,7 @@ class ThumbSrv(object):
|
|||||||
break
|
break
|
||||||
|
|
||||||
with cond:
|
with cond:
|
||||||
cond.wait()
|
cond.wait(3)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
st = os.stat(tpath)
|
st = os.stat(tpath)
|
||||||
@@ -206,9 +214,9 @@ class ThumbSrv(object):
|
|||||||
if fun:
|
if fun:
|
||||||
try:
|
try:
|
||||||
fun(abspath, tpath)
|
fun(abspath, tpath)
|
||||||
except Exception as ex:
|
except:
|
||||||
msg = "{} failed on {}\n {!r}"
|
msg = "{} failed on {}\n{}"
|
||||||
self.log(msg.format(fun.__name__, abspath, ex), 3)
|
self.log(msg.format(fun.__name__, abspath, min_ex()), 3)
|
||||||
with open(tpath, "wb") as _:
|
with open(tpath, "wb") as _:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@@ -240,8 +248,8 @@ class ThumbSrv(object):
|
|||||||
except:
|
except:
|
||||||
im.thumbnail(self.res)
|
im.thumbnail(self.res)
|
||||||
|
|
||||||
if im.mode not in ("RGB", "L"):
|
fmts = ["RGB", "L"]
|
||||||
im = im.convert("RGB")
|
args = {"quality": 40}
|
||||||
|
|
||||||
if tpath.endswith(".webp"):
|
if tpath.endswith(".webp"):
|
||||||
# quality 80 = pillow-default
|
# quality 80 = pillow-default
|
||||||
@@ -249,15 +257,27 @@ class ThumbSrv(object):
|
|||||||
# method 0 = pillow-default, fast
|
# method 0 = pillow-default, fast
|
||||||
# method 4 = ffmpeg-default
|
# method 4 = ffmpeg-default
|
||||||
# method 6 = max, slow
|
# method 6 = max, slow
|
||||||
im.save(tpath, quality=40, method=6)
|
fmts += ["RGBA", "LA"]
|
||||||
|
args["method"] = 6
|
||||||
else:
|
else:
|
||||||
im.save(tpath, quality=40) # default=75
|
pass # default q = 75
|
||||||
|
|
||||||
|
if im.mode not in fmts:
|
||||||
|
print("conv {}".format(im.mode))
|
||||||
|
im = im.convert("RGB")
|
||||||
|
|
||||||
|
im.save(tpath, quality=40, method=6)
|
||||||
|
|
||||||
def conv_ffmpeg(self, abspath, tpath):
|
def conv_ffmpeg(self, abspath, tpath):
|
||||||
ret, _ = ffprobe(abspath)
|
ret, _ = ffprobe(abspath)
|
||||||
|
|
||||||
dur = ret[".dur"][1] if ".dur" in ret else 4
|
ext = abspath.rsplit(".")[-1]
|
||||||
seek = "{:.0f}".format(dur / 3)
|
if ext in ["h264", "h265"]:
|
||||||
|
seek = []
|
||||||
|
else:
|
||||||
|
dur = ret[".dur"][1] if ".dur" in ret else 4
|
||||||
|
seek = "{:.0f}".format(dur / 3)
|
||||||
|
seek = [b"-ss", seek.encode("utf-8")]
|
||||||
|
|
||||||
scale = "scale={0}:{1}:force_original_aspect_ratio="
|
scale = "scale={0}:{1}:force_original_aspect_ratio="
|
||||||
if self.args.th_no_crop:
|
if self.args.th_no_crop:
|
||||||
@@ -266,19 +286,20 @@ class ThumbSrv(object):
|
|||||||
scale += "increase,crop={0}:{1},setsar=1:1"
|
scale += "increase,crop={0}:{1},setsar=1:1"
|
||||||
|
|
||||||
scale = scale.format(*list(self.res)).encode("utf-8")
|
scale = scale.format(*list(self.res)).encode("utf-8")
|
||||||
|
# fmt: off
|
||||||
cmd = [
|
cmd = [
|
||||||
b"ffmpeg",
|
b"ffmpeg",
|
||||||
b"-nostdin",
|
b"-nostdin",
|
||||||
b"-hide_banner",
|
b"-v", b"error",
|
||||||
b"-ss",
|
b"-hide_banner"
|
||||||
seek,
|
|
||||||
b"-i",
|
|
||||||
fsenc(abspath),
|
|
||||||
b"-vf",
|
|
||||||
scale,
|
|
||||||
b"-vframes",
|
|
||||||
b"1",
|
|
||||||
]
|
]
|
||||||
|
cmd += seek
|
||||||
|
cmd += [
|
||||||
|
b"-i", fsenc(abspath),
|
||||||
|
b"-vf", scale,
|
||||||
|
b"-vframes", b"1",
|
||||||
|
]
|
||||||
|
# fmt: on
|
||||||
|
|
||||||
if tpath.endswith(".jpg"):
|
if tpath.endswith(".jpg"):
|
||||||
cmd += [
|
cmd += [
|
||||||
@@ -295,7 +316,11 @@ class ThumbSrv(object):
|
|||||||
|
|
||||||
cmd += [fsenc(tpath)]
|
cmd += [fsenc(tpath)]
|
||||||
|
|
||||||
mchkcmd(cmd)
|
ret, sout, serr = runcmd(*cmd)
|
||||||
|
if ret != 0:
|
||||||
|
msg = ["ff: {}".format(x) for x in serr.split("\n")]
|
||||||
|
self.log("FFmpeg failed:\n" + "\n".join(msg), c="1;30")
|
||||||
|
raise sp.CalledProcessError(ret, (cmd[0], b"...", cmd[-1]))
|
||||||
|
|
||||||
def poke(self, tdir):
|
def poke(self, tdir):
|
||||||
if not self.poke_cd.poke(tdir):
|
if not self.poke_cd.poke(tdir):
|
||||||
@@ -314,26 +339,32 @@ class ThumbSrv(object):
|
|||||||
interval = self.args.th_clean
|
interval = self.args.th_clean
|
||||||
while True:
|
while True:
|
||||||
time.sleep(interval)
|
time.sleep(interval)
|
||||||
for vol in self.vols:
|
ndirs = 0
|
||||||
vol += "/.hist/th"
|
for vol, histpath in self.asrv.vfs.histtab.items():
|
||||||
self.log("\033[Jcln {}/\033[A".format(vol))
|
if histpath.startswith(vol):
|
||||||
self.clean(vol)
|
self.log("\033[Jcln {}/\033[A".format(histpath))
|
||||||
|
else:
|
||||||
|
self.log("\033[Jcln {} ({})/\033[A".format(histpath, vol))
|
||||||
|
|
||||||
self.log("\033[Jcln ok")
|
ndirs += self.clean(histpath)
|
||||||
|
|
||||||
def clean(self, vol):
|
self.log("\033[Jcln ok; rm {} dirs".format(ndirs))
|
||||||
# self.log("cln {}".format(vol))
|
|
||||||
|
def clean(self, histpath):
|
||||||
|
thumbpath = os.path.join(histpath, "th")
|
||||||
|
# self.log("cln {}".format(thumbpath))
|
||||||
maxage = self.args.th_maxage
|
maxage = self.args.th_maxage
|
||||||
now = time.time()
|
now = time.time()
|
||||||
prev_b64 = None
|
prev_b64 = None
|
||||||
prev_fp = None
|
prev_fp = None
|
||||||
try:
|
try:
|
||||||
ents = os.listdir(vol)
|
ents = os.listdir(thumbpath)
|
||||||
except:
|
except:
|
||||||
return
|
return 0
|
||||||
|
|
||||||
|
ndirs = 0
|
||||||
for f in sorted(ents):
|
for f in sorted(ents):
|
||||||
fp = os.path.join(vol, f)
|
fp = os.path.join(thumbpath, f)
|
||||||
cmp = fp.lower().replace("\\", "/")
|
cmp = fp.lower().replace("\\", "/")
|
||||||
|
|
||||||
# "top" or b64 prefix/full (a folder)
|
# "top" or b64 prefix/full (a folder)
|
||||||
@@ -348,10 +379,11 @@ class ThumbSrv(object):
|
|||||||
break
|
break
|
||||||
|
|
||||||
if safe:
|
if safe:
|
||||||
|
ndirs += 1
|
||||||
self.log("rm -rf [{}]".format(fp))
|
self.log("rm -rf [{}]".format(fp))
|
||||||
shutil.rmtree(fp, ignore_errors=True)
|
shutil.rmtree(fp, ignore_errors=True)
|
||||||
else:
|
else:
|
||||||
self.clean(fp)
|
ndirs += self.clean(fp)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# thumb file
|
# thumb file
|
||||||
@@ -373,3 +405,5 @@ class ThumbSrv(object):
|
|||||||
|
|
||||||
prev_b64 = b64
|
prev_b64 = b64
|
||||||
prev_fp = fp
|
prev_fp = fp
|
||||||
|
|
||||||
|
return ndirs
|
||||||
|
@@ -7,7 +7,7 @@ import time
|
|||||||
import threading
|
import threading
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
|
|
||||||
from .util import u8safe, s3dec, html_escape, Pebkac
|
from .util import s3dec, Pebkac, min_ex
|
||||||
from .up2k import up2k_wark_from_hashlist
|
from .up2k import up2k_wark_from_hashlist
|
||||||
|
|
||||||
|
|
||||||
@@ -19,13 +19,14 @@ except:
|
|||||||
|
|
||||||
|
|
||||||
class U2idx(object):
|
class U2idx(object):
|
||||||
def __init__(self, args, log_func):
|
def __init__(self, conn):
|
||||||
self.args = args
|
self.log_func = conn.log_func
|
||||||
self.log_func = log_func
|
self.asrv = conn.asrv
|
||||||
self.timeout = args.srch_time
|
self.args = conn.args
|
||||||
|
self.timeout = self.args.srch_time
|
||||||
|
|
||||||
if not HAVE_SQLITE3:
|
if not HAVE_SQLITE3:
|
||||||
self.log("could not load sqlite3; searchign wqill be disabled")
|
self.log("your python does not have sqlite3; searching will be disabled")
|
||||||
return
|
return
|
||||||
|
|
||||||
self.cur = {}
|
self.cur = {}
|
||||||
@@ -52,18 +53,23 @@ class U2idx(object):
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
return self.run_query(vols, uq, uv)[0]
|
return self.run_query(vols, uq, uv)[0]
|
||||||
except Exception as ex:
|
except:
|
||||||
raise Pebkac(500, repr(ex))
|
raise Pebkac(500, min_ex())
|
||||||
|
|
||||||
def get_cur(self, ptop):
|
def get_cur(self, ptop):
|
||||||
|
if not HAVE_SQLITE3:
|
||||||
|
return None
|
||||||
|
|
||||||
cur = self.cur.get(ptop)
|
cur = self.cur.get(ptop)
|
||||||
if cur:
|
if cur:
|
||||||
return cur
|
return cur
|
||||||
|
|
||||||
cur = _open(ptop)
|
histpath = self.asrv.vfs.histtab[ptop]
|
||||||
if not cur:
|
db_path = os.path.join(histpath, "up2k.db")
|
||||||
|
if not os.path.exists(db_path):
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
cur = sqlite3.connect(db_path, 2).cursor()
|
||||||
self.cur[ptop] = cur
|
self.cur[ptop] = cur
|
||||||
return cur
|
return cur
|
||||||
|
|
||||||
@@ -192,6 +198,7 @@ class U2idx(object):
|
|||||||
self.active_id,
|
self.active_id,
|
||||||
done_flag,
|
done_flag,
|
||||||
),
|
),
|
||||||
|
name="u2idx-terminator",
|
||||||
)
|
)
|
||||||
thr.daemon = True
|
thr.daemon = True
|
||||||
thr.start()
|
thr.start()
|
||||||
@@ -241,6 +248,7 @@ class U2idx(object):
|
|||||||
hit["tags"] = tags
|
hit["tags"] = tags
|
||||||
|
|
||||||
ret.extend(sret)
|
ret.extend(sret)
|
||||||
|
# print("[{}] {}".format(ptop, sret))
|
||||||
|
|
||||||
done_flag.append(True)
|
done_flag.append(True)
|
||||||
self.active_id = None
|
self.active_id = None
|
||||||
@@ -261,9 +269,3 @@ class U2idx(object):
|
|||||||
|
|
||||||
if identifier == self.active_id:
|
if identifier == self.active_id:
|
||||||
self.active_cur.connection.interrupt()
|
self.active_cur.connection.interrupt()
|
||||||
|
|
||||||
|
|
||||||
def _open(ptop):
|
|
||||||
db_path = os.path.join(ptop, ".hist", "up2k.db")
|
|
||||||
if os.path.exists(db_path):
|
|
||||||
return sqlite3.connect(db_path).cursor()
|
|
||||||
|
@@ -16,7 +16,7 @@ import traceback
|
|||||||
import subprocess as sp
|
import subprocess as sp
|
||||||
from copy import deepcopy
|
from copy import deepcopy
|
||||||
|
|
||||||
from .__init__ import WINDOWS, ANYWIN
|
from .__init__ import WINDOWS, ANYWIN, PY2
|
||||||
from .util import (
|
from .util import (
|
||||||
Pebkac,
|
Pebkac,
|
||||||
Queue,
|
Queue,
|
||||||
@@ -30,6 +30,7 @@ from .util import (
|
|||||||
s3dec,
|
s3dec,
|
||||||
statdir,
|
statdir,
|
||||||
s2hms,
|
s2hms,
|
||||||
|
min_ex,
|
||||||
)
|
)
|
||||||
from .mtag import MTag, MParser
|
from .mtag import MTag, MParser
|
||||||
|
|
||||||
@@ -39,6 +40,8 @@ try:
|
|||||||
except:
|
except:
|
||||||
HAVE_SQLITE3 = False
|
HAVE_SQLITE3 = False
|
||||||
|
|
||||||
|
DB_VER = 4
|
||||||
|
|
||||||
|
|
||||||
class Up2k(object):
|
class Up2k(object):
|
||||||
"""
|
"""
|
||||||
@@ -48,8 +51,9 @@ class Up2k(object):
|
|||||||
* ~/.config flatfiles for active jobs
|
* ~/.config flatfiles for active jobs
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, hub, all_vols):
|
def __init__(self, hub):
|
||||||
self.hub = hub
|
self.hub = hub
|
||||||
|
self.asrv = hub.asrv
|
||||||
self.args = hub.args
|
self.args = hub.args
|
||||||
self.log_func = hub.log
|
self.log_func = hub.log
|
||||||
|
|
||||||
@@ -60,6 +64,8 @@ class Up2k(object):
|
|||||||
self.mutex = threading.Lock()
|
self.mutex = threading.Lock()
|
||||||
self.hashq = Queue()
|
self.hashq = Queue()
|
||||||
self.tagq = Queue()
|
self.tagq = Queue()
|
||||||
|
self.n_hashq = 0
|
||||||
|
self.n_tagq = 0
|
||||||
self.volstate = {}
|
self.volstate = {}
|
||||||
self.registry = {}
|
self.registry = {}
|
||||||
self.entags = {}
|
self.entags = {}
|
||||||
@@ -83,56 +89,84 @@ class Up2k(object):
|
|||||||
if ANYWIN:
|
if ANYWIN:
|
||||||
# usually fails to set lastmod too quickly
|
# usually fails to set lastmod too quickly
|
||||||
self.lastmod_q = Queue()
|
self.lastmod_q = Queue()
|
||||||
thr = threading.Thread(target=self._lastmodder)
|
thr = threading.Thread(target=self._lastmodder, name="up2k-lastmod")
|
||||||
thr.daemon = True
|
thr.daemon = True
|
||||||
thr.start()
|
thr.start()
|
||||||
|
|
||||||
# static
|
# static
|
||||||
self.r_hash = re.compile("^[0-9a-zA-Z_-]{43}$")
|
self.r_hash = re.compile("^[0-9a-zA-Z_-]{44}$")
|
||||||
|
|
||||||
if not HAVE_SQLITE3:
|
if not HAVE_SQLITE3:
|
||||||
self.log("could not initialize sqlite3, will use in-memory registry only")
|
self.log("could not initialize sqlite3, will use in-memory registry only")
|
||||||
|
|
||||||
if self.args.no_fastboot:
|
if self.args.no_fastboot:
|
||||||
self.deferred_init(all_vols)
|
self.deferred_init()
|
||||||
else:
|
else:
|
||||||
t = threading.Thread(target=self.deferred_init, args=(all_vols,))
|
t = threading.Thread(
|
||||||
|
target=self.deferred_init,
|
||||||
|
name="up2k-deferred-init",
|
||||||
|
)
|
||||||
t.daemon = True
|
t.daemon = True
|
||||||
t.start()
|
t.start()
|
||||||
|
|
||||||
def deferred_init(self, all_vols):
|
def deferred_init(self):
|
||||||
|
all_vols = self.asrv.vfs.all_vols
|
||||||
have_e2d = self.init_indexes(all_vols)
|
have_e2d = self.init_indexes(all_vols)
|
||||||
|
|
||||||
if have_e2d:
|
if have_e2d:
|
||||||
thr = threading.Thread(target=self._snapshot)
|
thr = threading.Thread(target=self._snapshot, name="up2k-snapshot")
|
||||||
thr.daemon = True
|
thr.daemon = True
|
||||||
thr.start()
|
thr.start()
|
||||||
|
|
||||||
thr = threading.Thread(target=self._hasher)
|
thr = threading.Thread(target=self._hasher, name="up2k-hasher")
|
||||||
thr.daemon = True
|
thr.daemon = True
|
||||||
thr.start()
|
thr.start()
|
||||||
|
|
||||||
if self.mtag:
|
if self.mtag:
|
||||||
thr = threading.Thread(target=self._tagger)
|
thr = threading.Thread(target=self._tagger, name="up2k-tagger")
|
||||||
thr.daemon = True
|
thr.daemon = True
|
||||||
thr.start()
|
thr.start()
|
||||||
|
|
||||||
thr = threading.Thread(target=self._run_all_mtp)
|
thr = threading.Thread(target=self._run_all_mtp, name="up2k-mtp-init")
|
||||||
thr.daemon = True
|
thr.daemon = True
|
||||||
thr.start()
|
thr.start()
|
||||||
|
|
||||||
def log(self, msg, c=0):
|
def log(self, msg, c=0):
|
||||||
self.log_func("up2k", msg + "\033[K", c)
|
self.log_func("up2k", msg + "\033[K", c)
|
||||||
|
|
||||||
def get_volstate(self):
|
def get_state(self):
|
||||||
return json.dumps(self.volstate, indent=4)
|
mtpq = 0
|
||||||
|
q = "select count(w) from mt where k = 't:mtp'"
|
||||||
|
got_lock = False if PY2 else self.mutex.acquire(timeout=0.5)
|
||||||
|
if got_lock:
|
||||||
|
for cur in self.cur.values():
|
||||||
|
try:
|
||||||
|
mtpq += cur.execute(q).fetchone()[0]
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
self.mutex.release()
|
||||||
|
else:
|
||||||
|
mtpq = "?"
|
||||||
|
|
||||||
|
ret = {
|
||||||
|
"volstate": self.volstate,
|
||||||
|
"scanning": hasattr(self, "pp"),
|
||||||
|
"hashq": self.n_hashq,
|
||||||
|
"tagq": self.n_tagq,
|
||||||
|
"mtpq": mtpq,
|
||||||
|
}
|
||||||
|
return json.dumps(ret, indent=4)
|
||||||
|
|
||||||
def rescan(self, all_vols, scan_vols):
|
def rescan(self, all_vols, scan_vols):
|
||||||
if hasattr(self, "pp"):
|
if hasattr(self, "pp"):
|
||||||
return "cannot initiate; scan is already in progress"
|
return "cannot initiate; scan is already in progress"
|
||||||
|
|
||||||
args = (all_vols, scan_vols)
|
args = (all_vols, scan_vols)
|
||||||
t = threading.Thread(target=self.init_indexes, args=args)
|
t = threading.Thread(
|
||||||
|
target=self.init_indexes,
|
||||||
|
args=args,
|
||||||
|
name="up2k-rescan-{}".format(scan_vols[0]),
|
||||||
|
)
|
||||||
t.daemon = True
|
t.daemon = True
|
||||||
t.start()
|
t.start()
|
||||||
return None
|
return None
|
||||||
@@ -178,23 +212,27 @@ class Up2k(object):
|
|||||||
self.log(msg, c=3)
|
self.log(msg, c=3)
|
||||||
|
|
||||||
live_vols = []
|
live_vols = []
|
||||||
for vol in vols:
|
with self.mutex:
|
||||||
try:
|
# only need to protect register_vpath but all in one go feels right
|
||||||
os.listdir(vol.realpath)
|
for vol in vols:
|
||||||
except:
|
try:
|
||||||
self.volstate[vol.vpath] = "OFFLINE (cannot access folder)"
|
os.listdir(vol.realpath)
|
||||||
self.log("cannot access " + vol.realpath, c=1)
|
except:
|
||||||
continue
|
self.volstate[vol.vpath] = "OFFLINE (cannot access folder)"
|
||||||
|
self.log("cannot access " + vol.realpath, c=1)
|
||||||
|
continue
|
||||||
|
|
||||||
if not self.register_vpath(vol.realpath, vol.flags):
|
if scan_vols and vol.vpath not in scan_vols:
|
||||||
# self.log("db not enabled for {}".format(m, vol.realpath))
|
continue
|
||||||
continue
|
|
||||||
|
if not self.register_vpath(vol.realpath, vol.flags):
|
||||||
|
# self.log("db not enable for {}".format(m, vol.realpath))
|
||||||
|
continue
|
||||||
|
|
||||||
if vol.vpath in scan_vols or not scan_vols:
|
|
||||||
live_vols.append(vol)
|
live_vols.append(vol)
|
||||||
|
|
||||||
if vol.vpath not in self.volstate:
|
if vol.vpath not in self.volstate:
|
||||||
self.volstate[vol.vpath] = "OFFLINE (pending initialization)"
|
self.volstate[vol.vpath] = "OFFLINE (pending initialization)"
|
||||||
|
|
||||||
vols = live_vols
|
vols = live_vols
|
||||||
need_vac = {}
|
need_vac = {}
|
||||||
@@ -271,7 +309,7 @@ class Up2k(object):
|
|||||||
if self.mtag:
|
if self.mtag:
|
||||||
m = "online (running mtp)"
|
m = "online (running mtp)"
|
||||||
if scan_vols:
|
if scan_vols:
|
||||||
thr = threading.Thread(target=self._run_all_mtp)
|
thr = threading.Thread(target=self._run_all_mtp, name="up2k-mtp-scan")
|
||||||
thr.daemon = True
|
thr.daemon = True
|
||||||
else:
|
else:
|
||||||
del self.pp
|
del self.pp
|
||||||
@@ -286,9 +324,13 @@ class Up2k(object):
|
|||||||
return have_e2d
|
return have_e2d
|
||||||
|
|
||||||
def register_vpath(self, ptop, flags):
|
def register_vpath(self, ptop, flags):
|
||||||
db_path = os.path.join(ptop, ".hist", "up2k.db")
|
histpath = self.asrv.vfs.histtab[ptop]
|
||||||
|
db_path = os.path.join(histpath, "up2k.db")
|
||||||
if ptop in self.registry:
|
if ptop in self.registry:
|
||||||
return [self.cur[ptop], db_path]
|
try:
|
||||||
|
return [self.cur[ptop], db_path]
|
||||||
|
except:
|
||||||
|
return None
|
||||||
|
|
||||||
_, flags = self._expr_idx_filter(flags)
|
_, flags = self._expr_idx_filter(flags)
|
||||||
|
|
||||||
@@ -303,7 +345,7 @@ class Up2k(object):
|
|||||||
self.log(" ".join(sorted(a)) + "\033[0m")
|
self.log(" ".join(sorted(a)) + "\033[0m")
|
||||||
|
|
||||||
reg = {}
|
reg = {}
|
||||||
path = os.path.join(ptop, ".hist", "up2k.snap")
|
path = os.path.join(histpath, "up2k.snap")
|
||||||
if "e2d" in flags and os.path.exists(path):
|
if "e2d" in flags and os.path.exists(path):
|
||||||
with gzip.GzipFile(path, "rb") as f:
|
with gzip.GzipFile(path, "rb") as f:
|
||||||
j = f.read().decode("utf-8")
|
j = f.read().decode("utf-8")
|
||||||
@@ -327,7 +369,7 @@ class Up2k(object):
|
|||||||
return None
|
return None
|
||||||
|
|
||||||
try:
|
try:
|
||||||
os.mkdir(os.path.join(ptop, ".hist"))
|
os.makedirs(histpath)
|
||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@@ -344,6 +386,7 @@ class Up2k(object):
|
|||||||
def _build_file_index(self, vol, all_vols):
|
def _build_file_index(self, vol, all_vols):
|
||||||
do_vac = False
|
do_vac = False
|
||||||
top = vol.realpath
|
top = vol.realpath
|
||||||
|
nohash = "dhash" in vol.flags
|
||||||
with self.mutex:
|
with self.mutex:
|
||||||
cur, _ = self.register_vpath(top, vol.flags)
|
cur, _ = self.register_vpath(top, vol.flags)
|
||||||
|
|
||||||
@@ -358,7 +401,7 @@ class Up2k(object):
|
|||||||
if WINDOWS:
|
if WINDOWS:
|
||||||
excl = [x.replace("/", "\\") for x in excl]
|
excl = [x.replace("/", "\\") for x in excl]
|
||||||
|
|
||||||
n_add = self._build_dir(dbw, top, set(excl), top)
|
n_add = self._build_dir(dbw, top, set(excl), top, nohash)
|
||||||
n_rm = self._drop_lost(dbw[0], top)
|
n_rm = self._drop_lost(dbw[0], top)
|
||||||
if dbw[1]:
|
if dbw[1]:
|
||||||
self.log("commit {} new files".format(dbw[1]))
|
self.log("commit {} new files".format(dbw[1]))
|
||||||
@@ -366,23 +409,28 @@ class Up2k(object):
|
|||||||
|
|
||||||
return True, n_add or n_rm or do_vac
|
return True, n_add or n_rm or do_vac
|
||||||
|
|
||||||
def _build_dir(self, dbw, top, excl, cdir):
|
def _build_dir(self, dbw, top, excl, cdir, nohash):
|
||||||
self.pp.msg = "a{} {}".format(self.pp.n, cdir)
|
self.pp.msg = "a{} {}".format(self.pp.n, cdir)
|
||||||
histdir = os.path.join(top, ".hist")
|
histpath = self.asrv.vfs.histtab[top]
|
||||||
ret = 0
|
ret = 0
|
||||||
for iname, inf in statdir(self.log, not self.args.no_scandir, False, cdir):
|
g = statdir(self.log, not self.args.no_scandir, False, cdir)
|
||||||
|
for iname, inf in sorted(g):
|
||||||
abspath = os.path.join(cdir, iname)
|
abspath = os.path.join(cdir, iname)
|
||||||
lmod = int(inf.st_mtime)
|
lmod = int(inf.st_mtime)
|
||||||
|
sz = inf.st_size
|
||||||
if stat.S_ISDIR(inf.st_mode):
|
if stat.S_ISDIR(inf.st_mode):
|
||||||
if abspath in excl or abspath == histdir:
|
if abspath in excl or abspath == histpath:
|
||||||
continue
|
continue
|
||||||
# self.log(" dir: {}".format(abspath))
|
# self.log(" dir: {}".format(abspath))
|
||||||
ret += self._build_dir(dbw, top, excl, abspath)
|
ret += self._build_dir(dbw, top, excl, abspath, nohash)
|
||||||
else:
|
else:
|
||||||
# self.log("file: {}".format(abspath))
|
# self.log("file: {}".format(abspath))
|
||||||
rp = abspath[len(top) :].replace("\\", "/").strip("/")
|
rp = abspath[len(top) + 1 :]
|
||||||
|
if WINDOWS:
|
||||||
|
rp = rp.replace("\\", "/").strip("/")
|
||||||
|
|
||||||
rd, fn = rp.rsplit("/", 1) if "/" in rp else ["", rp]
|
rd, fn = rp.rsplit("/", 1) if "/" in rp else ["", rp]
|
||||||
sql = "select * from up where rd = ? and fn = ?"
|
sql = "select w, mt, sz from up where rd = ? and fn = ?"
|
||||||
try:
|
try:
|
||||||
c = dbw[0].execute(sql, (rd, fn))
|
c = dbw[0].execute(sql, (rd, fn))
|
||||||
except:
|
except:
|
||||||
@@ -391,18 +439,18 @@ class Up2k(object):
|
|||||||
in_db = list(c.fetchall())
|
in_db = list(c.fetchall())
|
||||||
if in_db:
|
if in_db:
|
||||||
self.pp.n -= 1
|
self.pp.n -= 1
|
||||||
_, dts, dsz, _, _ = in_db[0]
|
dw, dts, dsz = in_db[0]
|
||||||
if len(in_db) > 1:
|
if len(in_db) > 1:
|
||||||
m = "WARN: multiple entries: [{}] => [{}] |{}|\n{}"
|
m = "WARN: multiple entries: [{}] => [{}] |{}|\n{}"
|
||||||
rep_db = "\n".join([repr(x) for x in in_db])
|
rep_db = "\n".join([repr(x) for x in in_db])
|
||||||
self.log(m.format(top, rp, len(in_db), rep_db))
|
self.log(m.format(top, rp, len(in_db), rep_db))
|
||||||
dts = -1
|
dts = -1
|
||||||
|
|
||||||
if dts == lmod and dsz == inf.st_size:
|
if dts == lmod and dsz == sz and (nohash or dw[0] != "#"):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
m = "reindex [{}] => [{}] ({}/{}) ({}/{})".format(
|
m = "reindex [{}] => [{}] ({}/{}) ({}/{})".format(
|
||||||
top, rp, dts, lmod, dsz, inf.st_size
|
top, rp, dts, lmod, dsz, sz
|
||||||
)
|
)
|
||||||
self.log(m)
|
self.log(m)
|
||||||
self.db_rm(dbw[0], rd, fn)
|
self.db_rm(dbw[0], rd, fn)
|
||||||
@@ -411,17 +459,22 @@ class Up2k(object):
|
|||||||
in_db = None
|
in_db = None
|
||||||
|
|
||||||
self.pp.msg = "a{} {}".format(self.pp.n, abspath)
|
self.pp.msg = "a{} {}".format(self.pp.n, abspath)
|
||||||
if inf.st_size > 1024 * 1024:
|
|
||||||
self.log("file: {}".format(abspath))
|
|
||||||
|
|
||||||
try:
|
if nohash:
|
||||||
hashes = self._hashlist_from_file(abspath)
|
wark = up2k_wark_from_metadata(self.salt, sz, lmod, rd, fn)
|
||||||
except Exception as ex:
|
else:
|
||||||
self.log("hash: {} @ [{}]".format(repr(ex), abspath))
|
if sz > 1024 * 1024:
|
||||||
continue
|
self.log("file: {}".format(abspath))
|
||||||
|
|
||||||
wark = up2k_wark_from_hashlist(self.salt, inf.st_size, hashes)
|
try:
|
||||||
self.db_add(dbw[0], wark, rd, fn, lmod, inf.st_size)
|
hashes = self._hashlist_from_file(abspath)
|
||||||
|
except Exception as ex:
|
||||||
|
self.log("hash: {} @ [{}]".format(repr(ex), abspath))
|
||||||
|
continue
|
||||||
|
|
||||||
|
wark = up2k_wark_from_hashlist(self.salt, sz, hashes)
|
||||||
|
|
||||||
|
self.db_add(dbw[0], wark, rd, fn, lmod, sz)
|
||||||
dbw[1] += 1
|
dbw[1] += 1
|
||||||
ret += 1
|
ret += 1
|
||||||
td = time.time() - dbw[2]
|
td = time.time() - dbw[2]
|
||||||
@@ -600,7 +653,7 @@ class Up2k(object):
|
|||||||
try:
|
try:
|
||||||
parser = MParser(parser)
|
parser = MParser(parser)
|
||||||
except:
|
except:
|
||||||
self.log("invalid argument: " + parser, 1)
|
self.log("invalid argument (could not find program): " + parser, 1)
|
||||||
return
|
return
|
||||||
|
|
||||||
for tag in entags:
|
for tag in entags:
|
||||||
@@ -753,7 +806,9 @@ class Up2k(object):
|
|||||||
|
|
||||||
mpool = Queue(nw)
|
mpool = Queue(nw)
|
||||||
for _ in range(nw):
|
for _ in range(nw):
|
||||||
thr = threading.Thread(target=self._tag_thr, args=(mpool,))
|
thr = threading.Thread(
|
||||||
|
target=self._tag_thr, args=(mpool,), name="up2k-mpool"
|
||||||
|
)
|
||||||
thr.daemon = True
|
thr.daemon = True
|
||||||
thr.start()
|
thr.start()
|
||||||
|
|
||||||
@@ -838,59 +893,31 @@ class Up2k(object):
|
|||||||
if not existed and ver is None:
|
if not existed and ver is None:
|
||||||
return self._create_db(db_path, cur)
|
return self._create_db(db_path, cur)
|
||||||
|
|
||||||
orig_ver = ver
|
if ver == DB_VER:
|
||||||
if not ver or ver < 3:
|
|
||||||
bak = "{}.bak.{:x}.v{}".format(db_path, int(time.time()), ver)
|
|
||||||
db = cur.connection
|
|
||||||
cur.close()
|
|
||||||
db.close()
|
|
||||||
msg = "creating new DB (old is bad); backup: {}"
|
|
||||||
if ver:
|
|
||||||
msg = "creating backup before upgrade: {}"
|
|
||||||
|
|
||||||
self.log(msg.format(bak))
|
|
||||||
shutil.copy2(db_path, bak)
|
|
||||||
cur = self._orz(db_path)
|
|
||||||
|
|
||||||
if ver == 1:
|
|
||||||
cur = self._upgrade_v1(cur, db_path)
|
|
||||||
if cur:
|
|
||||||
ver = 2
|
|
||||||
|
|
||||||
if ver == 2:
|
|
||||||
cur = self._create_v3(cur)
|
|
||||||
ver = self._read_ver(cur) if cur else None
|
|
||||||
|
|
||||||
if ver == 3:
|
|
||||||
if orig_ver != ver:
|
|
||||||
cur.connection.commit()
|
|
||||||
cur.execute("vacuum")
|
|
||||||
cur.connection.commit()
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
nfiles = next(cur.execute("select count(w) from up"))[0]
|
nfiles = next(cur.execute("select count(w) from up"))[0]
|
||||||
self.log("OK: {} |{}|".format(db_path, nfiles))
|
self.log("OK: {} |{}|".format(db_path, nfiles))
|
||||||
return cur
|
return cur
|
||||||
except Exception as ex:
|
except:
|
||||||
self.log("WARN: could not list files, DB corrupt?\n " + repr(ex))
|
self.log("WARN: could not list files; DB corrupt?\n" + min_ex())
|
||||||
|
|
||||||
if cur:
|
if (ver or 0) > DB_VER:
|
||||||
db = cur.connection
|
m = "database is version {}, this copyparty only supports versions <= {}"
|
||||||
cur.close()
|
raise Exception(m.format(ver, DB_VER))
|
||||||
db.close()
|
|
||||||
|
bak = "{}.bak.{:x}.v{}".format(db_path, int(time.time()), ver)
|
||||||
|
db = cur.connection
|
||||||
|
cur.close()
|
||||||
|
db.close()
|
||||||
|
msg = "creating new DB (old is bad); backup: {}"
|
||||||
|
if ver:
|
||||||
|
msg = "creating new DB (too old to upgrade); backup: {}"
|
||||||
|
|
||||||
|
self.log(msg.format(bak))
|
||||||
|
os.rename(fsenc(db_path), fsenc(bak))
|
||||||
|
|
||||||
return self._create_db(db_path, None)
|
return self._create_db(db_path, None)
|
||||||
|
|
||||||
def _create_db(self, db_path, cur):
|
|
||||||
if not cur:
|
|
||||||
cur = self._orz(db_path)
|
|
||||||
|
|
||||||
self._create_v2(cur)
|
|
||||||
self._create_v3(cur)
|
|
||||||
cur.connection.commit()
|
|
||||||
self.log("created DB at {}".format(db_path))
|
|
||||||
return cur
|
|
||||||
|
|
||||||
def _read_ver(self, cur):
|
def _read_ver(self, cur):
|
||||||
for tab in ["ki", "kv"]:
|
for tab in ["ki", "kv"]:
|
||||||
try:
|
try:
|
||||||
@@ -902,69 +929,43 @@ class Up2k(object):
|
|||||||
if rows:
|
if rows:
|
||||||
return int(rows[0][0])
|
return int(rows[0][0])
|
||||||
|
|
||||||
def _create_v2(self, cur):
|
def _create_db(self, db_path, cur):
|
||||||
for cmd in [
|
|
||||||
r"create table up (w text, mt int, sz int, rd text, fn text)",
|
|
||||||
r"create index up_rd on up(rd)",
|
|
||||||
r"create index up_fn on up(fn)",
|
|
||||||
]:
|
|
||||||
cur.execute(cmd)
|
|
||||||
return cur
|
|
||||||
|
|
||||||
def _create_v3(self, cur):
|
|
||||||
"""
|
"""
|
||||||
collision in 2^(n/2) files where n = bits (6 bits/ch)
|
collision in 2^(n/2) files where n = bits (6 bits/ch)
|
||||||
10*6/2 = 2^30 = 1'073'741'824, 24.1mb idx
|
10*6/2 = 2^30 = 1'073'741'824, 24.1mb idx 1<<(3*10)
|
||||||
12*6/2 = 2^36 = 68'719'476'736, 24.8mb idx
|
12*6/2 = 2^36 = 68'719'476'736, 24.8mb idx
|
||||||
16*6/2 = 2^48 = 281'474'976'710'656, 26.1mb idx
|
16*6/2 = 2^48 = 281'474'976'710'656, 26.1mb idx
|
||||||
"""
|
"""
|
||||||
for c, ks in [["drop table k", "isv"], ["drop index up_", "w"]]:
|
if not cur:
|
||||||
for k in ks:
|
cur = self._orz(db_path)
|
||||||
try:
|
|
||||||
cur.execute(c + k)
|
|
||||||
except:
|
|
||||||
pass
|
|
||||||
|
|
||||||
idx = r"create index up_w on up(substr(w,1,16))"
|
idx = r"create index up_w on up(substr(w,1,16))"
|
||||||
if self.no_expr_idx:
|
if self.no_expr_idx:
|
||||||
idx = r"create index up_w on up(w)"
|
idx = r"create index up_w on up(w)"
|
||||||
|
|
||||||
for cmd in [
|
for cmd in [
|
||||||
|
r"create table up (w text, mt int, sz int, rd text, fn text)",
|
||||||
|
r"create index up_rd on up(rd)",
|
||||||
|
r"create index up_fn on up(fn)",
|
||||||
idx,
|
idx,
|
||||||
r"create table mt (w text, k text, v int)",
|
r"create table mt (w text, k text, v int)",
|
||||||
r"create index mt_w on mt(w)",
|
r"create index mt_w on mt(w)",
|
||||||
r"create index mt_k on mt(k)",
|
r"create index mt_k on mt(k)",
|
||||||
r"create index mt_v on mt(v)",
|
r"create index mt_v on mt(v)",
|
||||||
r"create table kv (k text, v int)",
|
r"create table kv (k text, v int)",
|
||||||
r"insert into kv values ('sver', 3)",
|
r"insert into kv values ('sver', {})".format(DB_VER),
|
||||||
]:
|
]:
|
||||||
cur.execute(cmd)
|
cur.execute(cmd)
|
||||||
|
|
||||||
|
cur.connection.commit()
|
||||||
|
self.log("created DB at {}".format(db_path))
|
||||||
return cur
|
return cur
|
||||||
|
|
||||||
def _upgrade_v1(self, odb, db_path):
|
|
||||||
npath = db_path + ".next"
|
|
||||||
if os.path.exists(npath):
|
|
||||||
os.unlink(npath)
|
|
||||||
|
|
||||||
ndb = self._orz(npath)
|
|
||||||
self._create_v2(ndb)
|
|
||||||
|
|
||||||
c = odb.execute("select * from up")
|
|
||||||
for wark, ts, sz, rp in c:
|
|
||||||
rd, fn = rp.rsplit("/", 1) if "/" in rp else ["", rp]
|
|
||||||
v = (wark, ts, sz, rd, fn)
|
|
||||||
ndb.execute("insert into up values (?,?,?,?,?)", v)
|
|
||||||
|
|
||||||
ndb.connection.commit()
|
|
||||||
ndb.connection.close()
|
|
||||||
odb.connection.close()
|
|
||||||
atomic_move(npath, db_path)
|
|
||||||
return self._orz(db_path)
|
|
||||||
|
|
||||||
def handle_json(self, cj):
|
def handle_json(self, cj):
|
||||||
if not self.register_vpath(cj["ptop"], cj["vcfg"]):
|
with self.mutex:
|
||||||
if cj["ptop"] not in self.registry:
|
if not self.register_vpath(cj["ptop"], cj["vcfg"]):
|
||||||
raise Pebkac(410, "location unavailable")
|
if cj["ptop"] not in self.registry:
|
||||||
|
raise Pebkac(410, "location unavailable")
|
||||||
|
|
||||||
cj["name"] = sanitize_fn(cj["name"], bad=[".prologue.html", ".epilogue.html"])
|
cj["name"] = sanitize_fn(cj["name"], bad=[".prologue.html", ".epilogue.html"])
|
||||||
cj["poke"] = time.time()
|
cj["poke"] = time.time()
|
||||||
@@ -972,7 +973,7 @@ class Up2k(object):
|
|||||||
now = time.time()
|
now = time.time()
|
||||||
job = None
|
job = None
|
||||||
with self.mutex:
|
with self.mutex:
|
||||||
cur = self.cur.get(cj["ptop"], None)
|
cur = self.cur.get(cj["ptop"])
|
||||||
reg = self.registry[cj["ptop"]]
|
reg = self.registry[cj["ptop"]]
|
||||||
if cur:
|
if cur:
|
||||||
if self.no_expr_idx:
|
if self.no_expr_idx:
|
||||||
@@ -1130,7 +1131,7 @@ class Up2k(object):
|
|||||||
|
|
||||||
def handle_chunk(self, ptop, wark, chash):
|
def handle_chunk(self, ptop, wark, chash):
|
||||||
with self.mutex:
|
with self.mutex:
|
||||||
job = self.registry[ptop].get(wark, None)
|
job = self.registry[ptop].get(wark)
|
||||||
if not job:
|
if not job:
|
||||||
known = " ".join([x for x in self.registry[ptop].keys()])
|
known = " ".join([x for x in self.registry[ptop].keys()])
|
||||||
self.log("unknown wark [{}], known: {}".format(wark, known))
|
self.log("unknown wark [{}], known: {}".format(wark, known))
|
||||||
@@ -1195,7 +1196,7 @@ class Up2k(object):
|
|||||||
return ret, dst
|
return ret, dst
|
||||||
|
|
||||||
def idx_wark(self, ptop, wark, rd, fn, lmod, sz):
|
def idx_wark(self, ptop, wark, rd, fn, lmod, sz):
|
||||||
cur = self.cur.get(ptop, None)
|
cur = self.cur.get(ptop)
|
||||||
if not cur:
|
if not cur:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
@@ -1205,6 +1206,7 @@ class Up2k(object):
|
|||||||
|
|
||||||
if "e2t" in self.flags[ptop]:
|
if "e2t" in self.flags[ptop]:
|
||||||
self.tagq.put([ptop, wark, rd, fn])
|
self.tagq.put([ptop, wark, rd, fn])
|
||||||
|
self.n_tagq += 1
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
@@ -1265,9 +1267,9 @@ class Up2k(object):
|
|||||||
hashobj.update(buf)
|
hashobj.update(buf)
|
||||||
rem -= len(buf)
|
rem -= len(buf)
|
||||||
|
|
||||||
digest = hashobj.digest()[:32]
|
digest = hashobj.digest()[:33]
|
||||||
digest = base64.urlsafe_b64encode(digest)
|
digest = base64.urlsafe_b64encode(digest)
|
||||||
ret.append(digest.decode("utf-8").rstrip("="))
|
ret.append(digest.decode("utf-8"))
|
||||||
|
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
@@ -1330,11 +1332,12 @@ class Up2k(object):
|
|||||||
for k, reg in self.registry.items():
|
for k, reg in self.registry.items():
|
||||||
self._snap_reg(prev, k, reg, discard_interval)
|
self._snap_reg(prev, k, reg, discard_interval)
|
||||||
|
|
||||||
def _snap_reg(self, prev, k, reg, discard_interval):
|
def _snap_reg(self, prev, ptop, reg, discard_interval):
|
||||||
now = time.time()
|
now = time.time()
|
||||||
|
histpath = self.asrv.vfs.histtab[ptop]
|
||||||
rm = [x for x in reg.values() if now - x["poke"] > discard_interval]
|
rm = [x for x in reg.values() if now - x["poke"] > discard_interval]
|
||||||
if rm:
|
if rm:
|
||||||
m = "dropping {} abandoned uploads in {}".format(len(rm), k)
|
m = "dropping {} abandoned uploads in {}".format(len(rm), ptop)
|
||||||
vis = [self._vis_job_progress(x) for x in rm]
|
vis = [self._vis_job_progress(x) for x in rm]
|
||||||
self.log("\n".join([m] + vis))
|
self.log("\n".join([m] + vis))
|
||||||
for job in rm:
|
for job in rm:
|
||||||
@@ -1352,21 +1355,21 @@ class Up2k(object):
|
|||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
path = os.path.join(k, ".hist", "up2k.snap")
|
path = os.path.join(histpath, "up2k.snap")
|
||||||
if not reg:
|
if not reg:
|
||||||
if k not in prev or prev[k] is not None:
|
if ptop not in prev or prev[ptop] is not None:
|
||||||
prev[k] = None
|
prev[ptop] = None
|
||||||
if os.path.exists(fsenc(path)):
|
if os.path.exists(fsenc(path)):
|
||||||
os.unlink(fsenc(path))
|
os.unlink(fsenc(path))
|
||||||
return
|
return
|
||||||
|
|
||||||
newest = max(x["poke"] for _, x in reg.items()) if reg else 0
|
newest = max(x["poke"] for _, x in reg.items()) if reg else 0
|
||||||
etag = [len(reg), newest]
|
etag = [len(reg), newest]
|
||||||
if etag == prev.get(k, None):
|
if etag == prev.get(ptop):
|
||||||
return
|
return
|
||||||
|
|
||||||
try:
|
try:
|
||||||
os.mkdir(os.path.join(k, ".hist"))
|
os.makedirs(histpath)
|
||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@@ -1378,14 +1381,21 @@ class Up2k(object):
|
|||||||
atomic_move(path2, path)
|
atomic_move(path2, path)
|
||||||
|
|
||||||
self.log("snap: {} |{}|".format(path, len(reg.keys())))
|
self.log("snap: {} |{}|".format(path, len(reg.keys())))
|
||||||
prev[k] = etag
|
prev[ptop] = etag
|
||||||
|
|
||||||
def _tagger(self):
|
def _tagger(self):
|
||||||
|
with self.mutex:
|
||||||
|
self.n_tagq += 1
|
||||||
|
|
||||||
while True:
|
while True:
|
||||||
|
with self.mutex:
|
||||||
|
self.n_tagq -= 1
|
||||||
|
|
||||||
ptop, wark, rd, fn = self.tagq.get()
|
ptop, wark, rd, fn = self.tagq.get()
|
||||||
if "e2t" not in self.flags[ptop]:
|
if "e2t" not in self.flags[ptop]:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
# self.log("\n " + repr([ptop, rd, fn]))
|
||||||
abspath = os.path.join(ptop, rd, fn)
|
abspath = os.path.join(ptop, rd, fn)
|
||||||
tags = self.mtag.get(abspath)
|
tags = self.mtag.get(abspath)
|
||||||
ntags1 = len(tags)
|
ntags1 = len(tags)
|
||||||
@@ -1411,8 +1421,16 @@ class Up2k(object):
|
|||||||
self.log("tagged {} ({}+{})".format(abspath, ntags1, len(tags) - ntags1))
|
self.log("tagged {} ({}+{})".format(abspath, ntags1, len(tags) - ntags1))
|
||||||
|
|
||||||
def _hasher(self):
|
def _hasher(self):
|
||||||
|
with self.mutex:
|
||||||
|
self.n_hashq += 1
|
||||||
|
|
||||||
while True:
|
while True:
|
||||||
|
with self.mutex:
|
||||||
|
self.n_hashq -= 1
|
||||||
|
# self.log("hashq {}".format(self.n_hashq))
|
||||||
|
|
||||||
ptop, rd, fn = self.hashq.get()
|
ptop, rd, fn = self.hashq.get()
|
||||||
|
# self.log("hashq {} pop {}/{}/{}".format(self.n_hashq, ptop, rd, fn))
|
||||||
if "e2d" not in self.flags[ptop]:
|
if "e2d" not in self.flags[ptop]:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
@@ -1425,8 +1443,11 @@ class Up2k(object):
|
|||||||
self.idx_wark(ptop, wark, rd, fn, inf.st_mtime, inf.st_size)
|
self.idx_wark(ptop, wark, rd, fn, inf.st_mtime, inf.st_size)
|
||||||
|
|
||||||
def hash_file(self, ptop, flags, rd, fn):
|
def hash_file(self, ptop, flags, rd, fn):
|
||||||
self.register_vpath(ptop, flags)
|
with self.mutex:
|
||||||
self.hashq.put([ptop, rd, fn])
|
self.register_vpath(ptop, flags)
|
||||||
|
self.hashq.put([ptop, rd, fn])
|
||||||
|
self.n_hashq += 1
|
||||||
|
# self.log("hashq {} push {}/{}/{}".format(self.n_hashq, ptop, rd, fn))
|
||||||
|
|
||||||
|
|
||||||
def up2k_chunksize(filesize):
|
def up2k_chunksize(filesize):
|
||||||
@@ -1448,9 +1469,12 @@ def up2k_wark_from_hashlist(salt, filesize, hashes):
|
|||||||
ident.extend(hashes)
|
ident.extend(hashes)
|
||||||
ident = "\n".join(ident)
|
ident = "\n".join(ident)
|
||||||
|
|
||||||
hasher = hashlib.sha512()
|
wark = hashlib.sha512(ident.encode("utf-8")).digest()[:33]
|
||||||
hasher.update(ident.encode("utf-8"))
|
wark = base64.urlsafe_b64encode(wark)
|
||||||
digest = hasher.digest()[:32]
|
return wark.decode("ascii")
|
||||||
|
|
||||||
wark = base64.urlsafe_b64encode(digest)
|
|
||||||
return wark.decode("utf-8").rstrip("=")
|
def up2k_wark_from_metadata(salt, sz, lastmod, rd, fn):
|
||||||
|
ret = fsenc("{}\n{}\n{}\n{}\n{}".format(salt, lastmod, sz, rd, fn))
|
||||||
|
ret = base64.urlsafe_b64encode(hashlib.sha512(ret).digest())
|
||||||
|
return "#{}".format(ret.decode("ascii"))[:44]
|
||||||
|
@@ -193,7 +193,7 @@ class ProgressPrinter(threading.Thread):
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
threading.Thread.__init__(self)
|
threading.Thread.__init__(self, name="pp")
|
||||||
self.daemon = True
|
self.daemon = True
|
||||||
self.msg = None
|
self.msg = None
|
||||||
self.end = False
|
self.end = False
|
||||||
@@ -208,6 +208,8 @@ class ProgressPrinter(threading.Thread):
|
|||||||
|
|
||||||
msg = self.msg
|
msg = self.msg
|
||||||
uprint(" {}\033[K\r".format(msg))
|
uprint(" {}\033[K\r".format(msg))
|
||||||
|
if PY2:
|
||||||
|
sys.stdout.flush()
|
||||||
|
|
||||||
print("\033[K", end="")
|
print("\033[K", end="")
|
||||||
sys.stdout.flush() # necessary on win10 even w/ stderr btw
|
sys.stdout.flush() # necessary on win10 even w/ stderr btw
|
||||||
@@ -252,6 +254,45 @@ def trace(*args, **kwargs):
|
|||||||
nuprint(msg)
|
nuprint(msg)
|
||||||
|
|
||||||
|
|
||||||
|
def alltrace():
|
||||||
|
threads = {}
|
||||||
|
names = dict([(t.ident, t.name) for t in threading.enumerate()])
|
||||||
|
for tid, stack in sys._current_frames().items():
|
||||||
|
name = "{} ({:x})".format(names.get(tid), tid)
|
||||||
|
threads[name] = stack
|
||||||
|
|
||||||
|
rret = []
|
||||||
|
bret = []
|
||||||
|
for name, stack in sorted(threads.items()):
|
||||||
|
ret = ["\n\n# {}".format(name)]
|
||||||
|
pad = None
|
||||||
|
for fn, lno, name, line in traceback.extract_stack(stack):
|
||||||
|
fn = os.sep.join(fn.split(os.sep)[-3:])
|
||||||
|
ret.append('File: "{}", line {}, in {}'.format(fn, lno, name))
|
||||||
|
if line:
|
||||||
|
ret.append(" " + str(line.strip()))
|
||||||
|
if "self.not_empty.wait()" in line:
|
||||||
|
pad = " " * 4
|
||||||
|
|
||||||
|
if pad:
|
||||||
|
bret += [ret[0]] + [pad + x for x in ret[1:]]
|
||||||
|
else:
|
||||||
|
rret += ret
|
||||||
|
|
||||||
|
return "\n".join(rret + bret)
|
||||||
|
|
||||||
|
|
||||||
|
def min_ex():
|
||||||
|
et, ev, tb = sys.exc_info()
|
||||||
|
tb = traceback.extract_tb(tb, 2)
|
||||||
|
ex = [
|
||||||
|
"{} @ {} <{}>: {}".format(fp.split(os.sep)[-1], ln, fun, txt)
|
||||||
|
for fp, ln, fun, txt in tb
|
||||||
|
]
|
||||||
|
ex.append("{}: {}".format(et.__name__, ev))
|
||||||
|
return "\n".join(ex)
|
||||||
|
|
||||||
|
|
||||||
@contextlib.contextmanager
|
@contextlib.contextmanager
|
||||||
def ren_open(fname, *args, **kwargs):
|
def ren_open(fname, *args, **kwargs):
|
||||||
fdir = kwargs.pop("fdir", None)
|
fdir = kwargs.pop("fdir", None)
|
||||||
@@ -310,7 +351,7 @@ def ren_open(fname, *args, **kwargs):
|
|||||||
if not b64:
|
if not b64:
|
||||||
b64 = (bname + ext).encode("utf-8", "replace")
|
b64 = (bname + ext).encode("utf-8", "replace")
|
||||||
b64 = hashlib.sha512(b64).digest()[:12]
|
b64 = hashlib.sha512(b64).digest()[:12]
|
||||||
b64 = base64.urlsafe_b64encode(b64).decode("utf-8").rstrip("=")
|
b64 = base64.urlsafe_b64encode(b64).decode("utf-8")
|
||||||
|
|
||||||
badlen = len(fname)
|
badlen = len(fname)
|
||||||
while len(fname) >= badlen:
|
while len(fname) >= badlen:
|
||||||
@@ -566,8 +607,10 @@ def read_header(sr):
|
|||||||
else:
|
else:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
sr.unrecv(ret[ofs + 4 :])
|
if len(ret) > ofs + 4:
|
||||||
return ret[:ofs].decode("utf-8", "surrogateescape").split("\r\n")
|
sr.unrecv(ret[ofs + 4 :])
|
||||||
|
|
||||||
|
return ret[:ofs].decode("utf-8", "surrogateescape").lstrip("\r\n").split("\r\n")
|
||||||
|
|
||||||
|
|
||||||
def humansize(sz, terse=False):
|
def humansize(sz, terse=False):
|
||||||
@@ -605,6 +648,16 @@ def s2hms(s, optional_h=False):
|
|||||||
return "{}:{:02}:{:02}".format(h, m, s)
|
return "{}:{:02}:{:02}".format(h, m, s)
|
||||||
|
|
||||||
|
|
||||||
|
def uncyg(path):
|
||||||
|
if len(path) < 2 or not path.startswith("/"):
|
||||||
|
return path
|
||||||
|
|
||||||
|
if len(path) > 2 and path[2] != "/":
|
||||||
|
return path
|
||||||
|
|
||||||
|
return "{}:\\{}".format(path[1], path[3:])
|
||||||
|
|
||||||
|
|
||||||
def undot(path):
|
def undot(path):
|
||||||
ret = []
|
ret = []
|
||||||
for node in path.split("/"):
|
for node in path.split("/"):
|
||||||
@@ -852,30 +905,36 @@ def yieldfile(fn):
|
|||||||
|
|
||||||
|
|
||||||
def hashcopy(actor, fin, fout):
|
def hashcopy(actor, fin, fout):
|
||||||
u32_lim = int((2 ** 31) * 0.9)
|
is_mp = actor.is_mp
|
||||||
hashobj = hashlib.sha512()
|
hashobj = hashlib.sha512()
|
||||||
tlen = 0
|
tlen = 0
|
||||||
for buf in fin:
|
for buf in fin:
|
||||||
actor.workload += 1
|
if is_mp:
|
||||||
if actor.workload > u32_lim:
|
actor.workload += 1
|
||||||
actor.workload = 100 # prevent overflow
|
if actor.workload > 2 ** 31:
|
||||||
|
actor.workload = 100
|
||||||
|
|
||||||
tlen += len(buf)
|
tlen += len(buf)
|
||||||
hashobj.update(buf)
|
hashobj.update(buf)
|
||||||
fout.write(buf)
|
fout.write(buf)
|
||||||
|
|
||||||
digest32 = hashobj.digest()[:32]
|
digest = hashobj.digest()[:33]
|
||||||
digest_b64 = base64.urlsafe_b64encode(digest32).decode("utf-8").rstrip("=")
|
digest_b64 = base64.urlsafe_b64encode(digest).decode("utf-8")
|
||||||
|
|
||||||
return tlen, hashobj.hexdigest(), digest_b64
|
return tlen, hashobj.hexdigest(), digest_b64
|
||||||
|
|
||||||
|
|
||||||
def sendfile_py(lower, upper, f, s):
|
def sendfile_py(lower, upper, f, s, actor=None):
|
||||||
remains = upper - lower
|
remains = upper - lower
|
||||||
f.seek(lower)
|
f.seek(lower)
|
||||||
while remains > 0:
|
while remains > 0:
|
||||||
|
if actor:
|
||||||
|
actor.workload += 1
|
||||||
|
if actor.workload > 2 ** 31:
|
||||||
|
actor.workload = 100
|
||||||
|
|
||||||
# time.sleep(0.01)
|
# time.sleep(0.01)
|
||||||
buf = f.read(min(4096, remains))
|
buf = f.read(min(1024 * 32, remains))
|
||||||
if not buf:
|
if not buf:
|
||||||
return remains
|
return remains
|
||||||
|
|
||||||
@@ -971,14 +1030,20 @@ def guess_mime(url, fallback="application/octet-stream"):
|
|||||||
except:
|
except:
|
||||||
return fallback
|
return fallback
|
||||||
|
|
||||||
return MIMES.get(ext) or mimetypes.guess_type(url)[0] or fallback
|
ret = MIMES.get(ext) or mimetypes.guess_type(url)[0] or fallback
|
||||||
|
|
||||||
|
if ";" not in ret:
|
||||||
|
if ret.startswith("text/") or ret.endswith("/javascript"):
|
||||||
|
ret += "; charset=UTF-8"
|
||||||
|
|
||||||
|
return ret
|
||||||
|
|
||||||
|
|
||||||
def runcmd(*argv):
|
def runcmd(*argv):
|
||||||
p = sp.Popen(argv, stdout=sp.PIPE, stderr=sp.PIPE)
|
p = sp.Popen(argv, stdout=sp.PIPE, stderr=sp.PIPE)
|
||||||
stdout, stderr = p.communicate()
|
stdout, stderr = p.communicate()
|
||||||
stdout = stdout.decode("utf-8")
|
stdout = stdout.decode("utf-8", "replace")
|
||||||
stderr = stderr.decode("utf-8")
|
stderr = stderr.decode("utf-8", "replace")
|
||||||
return [p.returncode, stdout, stderr]
|
return [p.returncode, stdout, stderr]
|
||||||
|
|
||||||
|
|
||||||
|
583
copyparty/web/baguettebox.js
Normal file
583
copyparty/web/baguettebox.js
Normal file
@@ -0,0 +1,583 @@
|
|||||||
|
/*!
|
||||||
|
* baguetteBox.js
|
||||||
|
* @author feimosi
|
||||||
|
* @version 1.11.1-mod
|
||||||
|
* @url https://github.com/feimosi/baguetteBox.js
|
||||||
|
*/
|
||||||
|
|
||||||
|
window.baguetteBox = (function () {
|
||||||
|
'use strict';
|
||||||
|
|
||||||
|
var options = {},
|
||||||
|
defaults = {
|
||||||
|
captions: true,
|
||||||
|
buttons: 'auto',
|
||||||
|
noScrollbars: false,
|
||||||
|
bodyClass: 'baguetteBox-open',
|
||||||
|
titleTag: false,
|
||||||
|
async: false,
|
||||||
|
preload: 2,
|
||||||
|
animation: 'slideIn',
|
||||||
|
afterShow: null,
|
||||||
|
afterHide: null,
|
||||||
|
onChange: null,
|
||||||
|
},
|
||||||
|
overlay, slider, previousButton, nextButton, closeButton,
|
||||||
|
currentGallery = [],
|
||||||
|
currentIndex = 0,
|
||||||
|
isOverlayVisible = false,
|
||||||
|
touch = {}, // start-pos
|
||||||
|
touchFlag = false, // busy
|
||||||
|
regex = /.+\.(gif|jpe?g|png|webp)/i,
|
||||||
|
data = {}, // all galleries
|
||||||
|
imagesElements = [],
|
||||||
|
documentLastFocus = null;
|
||||||
|
|
||||||
|
var overlayClickHandler = function (event) {
|
||||||
|
if (event.target.id.indexOf('baguette-img') !== -1) {
|
||||||
|
hideOverlay();
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
var touchstartHandler = function (event) {
|
||||||
|
touch.count++;
|
||||||
|
if (touch.count > 1) {
|
||||||
|
touch.multitouch = true;
|
||||||
|
}
|
||||||
|
touch.startX = event.changedTouches[0].pageX;
|
||||||
|
touch.startY = event.changedTouches[0].pageY;
|
||||||
|
};
|
||||||
|
var touchmoveHandler = function (event) {
|
||||||
|
if (touchFlag || touch.multitouch) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
event.preventDefault ? event.preventDefault() : event.returnValue = false;
|
||||||
|
var touchEvent = event.touches[0] || event.changedTouches[0];
|
||||||
|
if (touchEvent.pageX - touch.startX > 40) {
|
||||||
|
touchFlag = true;
|
||||||
|
showPreviousImage();
|
||||||
|
} else if (touchEvent.pageX - touch.startX < -40) {
|
||||||
|
touchFlag = true;
|
||||||
|
showNextImage();
|
||||||
|
} else if (touch.startY - touchEvent.pageY > 100) {
|
||||||
|
hideOverlay();
|
||||||
|
}
|
||||||
|
};
|
||||||
|
var touchendHandler = function () {
|
||||||
|
touch.count--;
|
||||||
|
if (touch.count <= 0) {
|
||||||
|
touch.multitouch = false;
|
||||||
|
}
|
||||||
|
touchFlag = false;
|
||||||
|
};
|
||||||
|
var contextmenuHandler = function () {
|
||||||
|
touchendHandler();
|
||||||
|
};
|
||||||
|
|
||||||
|
var trapFocusInsideOverlay = function (event) {
|
||||||
|
if (overlay.style.display === 'block' && (overlay.contains && !overlay.contains(event.target))) {
|
||||||
|
event.stopPropagation();
|
||||||
|
initFocus();
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
function run(selector, userOptions) {
|
||||||
|
buildOverlay();
|
||||||
|
removeFromCache(selector);
|
||||||
|
return bindImageClickListeners(selector, userOptions);
|
||||||
|
}
|
||||||
|
|
||||||
|
function bindImageClickListeners(selector, userOptions) {
|
||||||
|
var galleryNodeList = document.querySelectorAll(selector);
|
||||||
|
var selectorData = {
|
||||||
|
galleries: [],
|
||||||
|
nodeList: galleryNodeList
|
||||||
|
};
|
||||||
|
data[selector] = selectorData;
|
||||||
|
|
||||||
|
[].forEach.call(galleryNodeList, function (galleryElement) {
|
||||||
|
if (userOptions && userOptions.filter) {
|
||||||
|
regex = userOptions.filter;
|
||||||
|
}
|
||||||
|
|
||||||
|
var tagsNodeList = [];
|
||||||
|
if (galleryElement.tagName === 'A') {
|
||||||
|
tagsNodeList = [galleryElement];
|
||||||
|
} else {
|
||||||
|
tagsNodeList = galleryElement.getElementsByTagName('a');
|
||||||
|
}
|
||||||
|
|
||||||
|
tagsNodeList = [].filter.call(tagsNodeList, function (element) {
|
||||||
|
if (element.className.indexOf(userOptions && userOptions.ignoreClass) === -1) {
|
||||||
|
return regex.test(element.href);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
if (tagsNodeList.length === 0) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
var gallery = [];
|
||||||
|
[].forEach.call(tagsNodeList, function (imageElement, imageIndex) {
|
||||||
|
var imageElementClickHandler = function (event) {
|
||||||
|
if (event && event.ctrlKey)
|
||||||
|
return true;
|
||||||
|
|
||||||
|
event.preventDefault ? event.preventDefault() : event.returnValue = false;
|
||||||
|
prepareOverlay(gallery, userOptions);
|
||||||
|
showOverlay(imageIndex);
|
||||||
|
};
|
||||||
|
var imageItem = {
|
||||||
|
eventHandler: imageElementClickHandler,
|
||||||
|
imageElement: imageElement
|
||||||
|
};
|
||||||
|
bind(imageElement, 'click', imageElementClickHandler);
|
||||||
|
gallery.push(imageItem);
|
||||||
|
});
|
||||||
|
selectorData.galleries.push(gallery);
|
||||||
|
});
|
||||||
|
|
||||||
|
return selectorData.galleries;
|
||||||
|
}
|
||||||
|
|
||||||
|
function clearCachedData() {
|
||||||
|
for (var selector in data) {
|
||||||
|
if (data.hasOwnProperty(selector)) {
|
||||||
|
removeFromCache(selector);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function removeFromCache(selector) {
|
||||||
|
if (!data.hasOwnProperty(selector)) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
var galleries = data[selector].galleries;
|
||||||
|
[].forEach.call(galleries, function (gallery) {
|
||||||
|
[].forEach.call(gallery, function (imageItem) {
|
||||||
|
unbind(imageItem.imageElement, 'click', imageItem.eventHandler);
|
||||||
|
});
|
||||||
|
|
||||||
|
if (currentGallery === gallery) {
|
||||||
|
currentGallery = [];
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
delete data[selector];
|
||||||
|
}
|
||||||
|
|
||||||
|
function buildOverlay() {
|
||||||
|
overlay = ebi('baguetteBox-overlay');
|
||||||
|
if (overlay) {
|
||||||
|
slider = ebi('baguetteBox-slider');
|
||||||
|
previousButton = ebi('previous-button');
|
||||||
|
nextButton = ebi('next-button');
|
||||||
|
closeButton = ebi('close-button');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
overlay = mknod('div');
|
||||||
|
overlay.setAttribute('role', 'dialog');
|
||||||
|
overlay.id = 'baguetteBox-overlay';
|
||||||
|
document.getElementsByTagName('body')[0].appendChild(overlay);
|
||||||
|
|
||||||
|
slider = mknod('div');
|
||||||
|
slider.id = 'baguetteBox-slider';
|
||||||
|
overlay.appendChild(slider);
|
||||||
|
|
||||||
|
previousButton = mknod('button');
|
||||||
|
previousButton.setAttribute('type', 'button');
|
||||||
|
previousButton.id = 'previous-button';
|
||||||
|
previousButton.setAttribute('aria-label', 'Previous');
|
||||||
|
previousButton.innerHTML = '<';
|
||||||
|
overlay.appendChild(previousButton);
|
||||||
|
|
||||||
|
nextButton = mknod('button');
|
||||||
|
nextButton.setAttribute('type', 'button');
|
||||||
|
nextButton.id = 'next-button';
|
||||||
|
nextButton.setAttribute('aria-label', 'Next');
|
||||||
|
nextButton.innerHTML = '>';
|
||||||
|
overlay.appendChild(nextButton);
|
||||||
|
|
||||||
|
closeButton = mknod('button');
|
||||||
|
closeButton.setAttribute('type', 'button');
|
||||||
|
closeButton.id = 'close-button';
|
||||||
|
closeButton.setAttribute('aria-label', 'Close');
|
||||||
|
closeButton.innerHTML = '×';
|
||||||
|
overlay.appendChild(closeButton);
|
||||||
|
|
||||||
|
previousButton.className = nextButton.className = closeButton.className = 'baguetteBox-button';
|
||||||
|
|
||||||
|
bindEvents();
|
||||||
|
}
|
||||||
|
|
||||||
|
function keyDownHandler(event) {
|
||||||
|
switch (event.keyCode) {
|
||||||
|
case 37: // Left
|
||||||
|
showPreviousImage();
|
||||||
|
break;
|
||||||
|
case 39: // Right
|
||||||
|
showNextImage();
|
||||||
|
break;
|
||||||
|
case 27: // Esc
|
||||||
|
hideOverlay();
|
||||||
|
break;
|
||||||
|
case 36: // Home
|
||||||
|
showFirstImage(event);
|
||||||
|
break;
|
||||||
|
case 35: // End
|
||||||
|
showLastImage(event);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var passiveSupp = false;
|
||||||
|
try {
|
||||||
|
var opts = {
|
||||||
|
get passive() {
|
||||||
|
passiveSupp = true;
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
window.addEventListener('test', null, opts);
|
||||||
|
window.removeEventListener('test', null, opts);
|
||||||
|
}
|
||||||
|
catch (ex) {
|
||||||
|
passiveSupp = false;
|
||||||
|
}
|
||||||
|
var passiveEvent = passiveSupp ? { passive: false } : null;
|
||||||
|
var nonPassiveEvent = passiveSupp ? { passive: true } : null;
|
||||||
|
|
||||||
|
function bindEvents() {
|
||||||
|
bind(overlay, 'click', overlayClickHandler);
|
||||||
|
bind(previousButton, 'click', showPreviousImage);
|
||||||
|
bind(nextButton, 'click', showNextImage);
|
||||||
|
bind(closeButton, 'click', hideOverlay);
|
||||||
|
bind(slider, 'contextmenu', contextmenuHandler);
|
||||||
|
bind(overlay, 'touchstart', touchstartHandler, nonPassiveEvent);
|
||||||
|
bind(overlay, 'touchmove', touchmoveHandler, passiveEvent);
|
||||||
|
bind(overlay, 'touchend', touchendHandler);
|
||||||
|
bind(document, 'focus', trapFocusInsideOverlay, true);
|
||||||
|
}
|
||||||
|
|
||||||
|
function unbindEvents() {
|
||||||
|
unbind(overlay, 'click', overlayClickHandler);
|
||||||
|
unbind(previousButton, 'click', showPreviousImage);
|
||||||
|
unbind(nextButton, 'click', showNextImage);
|
||||||
|
unbind(closeButton, 'click', hideOverlay);
|
||||||
|
unbind(slider, 'contextmenu', contextmenuHandler);
|
||||||
|
unbind(overlay, 'touchstart', touchstartHandler, nonPassiveEvent);
|
||||||
|
unbind(overlay, 'touchmove', touchmoveHandler, passiveEvent);
|
||||||
|
unbind(overlay, 'touchend', touchendHandler);
|
||||||
|
unbind(document, 'focus', trapFocusInsideOverlay, true);
|
||||||
|
}
|
||||||
|
|
||||||
|
function prepareOverlay(gallery, userOptions) {
|
||||||
|
if (currentGallery === gallery) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
currentGallery = gallery;
|
||||||
|
setOptions(userOptions);
|
||||||
|
slider.innerHTML = '';
|
||||||
|
imagesElements.length = 0;
|
||||||
|
|
||||||
|
var imagesFiguresIds = [];
|
||||||
|
var imagesCaptionsIds = [];
|
||||||
|
for (var i = 0, fullImage; i < gallery.length; i++) {
|
||||||
|
fullImage = mknod('div');
|
||||||
|
fullImage.className = 'full-image';
|
||||||
|
fullImage.id = 'baguette-img-' + i;
|
||||||
|
imagesElements.push(fullImage);
|
||||||
|
|
||||||
|
imagesFiguresIds.push('baguetteBox-figure-' + i);
|
||||||
|
imagesCaptionsIds.push('baguetteBox-figcaption-' + i);
|
||||||
|
slider.appendChild(imagesElements[i]);
|
||||||
|
}
|
||||||
|
overlay.setAttribute('aria-labelledby', imagesFiguresIds.join(' '));
|
||||||
|
overlay.setAttribute('aria-describedby', imagesCaptionsIds.join(' '));
|
||||||
|
}
|
||||||
|
|
||||||
|
function setOptions(newOptions) {
|
||||||
|
if (!newOptions) {
|
||||||
|
newOptions = {};
|
||||||
|
}
|
||||||
|
for (var item in defaults) {
|
||||||
|
options[item] = defaults[item];
|
||||||
|
if (typeof newOptions[item] !== 'undefined') {
|
||||||
|
options[item] = newOptions[item];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
slider.style.transition = (options.animation === 'fadeIn' ? 'opacity .4s ease' :
|
||||||
|
options.animation === 'slideIn' ? '' : 'none');
|
||||||
|
|
||||||
|
if (options.buttons === 'auto' && ('ontouchstart' in window || currentGallery.length === 1)) {
|
||||||
|
options.buttons = false;
|
||||||
|
}
|
||||||
|
|
||||||
|
previousButton.style.display = nextButton.style.display = (options.buttons ? '' : 'none');
|
||||||
|
}
|
||||||
|
|
||||||
|
function showOverlay(chosenImageIndex) {
|
||||||
|
if (options.noScrollbars) {
|
||||||
|
document.documentElement.style.overflowY = 'hidden';
|
||||||
|
document.body.style.overflowY = 'scroll';
|
||||||
|
}
|
||||||
|
if (overlay.style.display === 'block') {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
bind(document, 'keydown', keyDownHandler);
|
||||||
|
currentIndex = chosenImageIndex;
|
||||||
|
touch = {
|
||||||
|
count: 0,
|
||||||
|
startX: null,
|
||||||
|
startY: null
|
||||||
|
};
|
||||||
|
loadImage(currentIndex, function () {
|
||||||
|
preloadNext(currentIndex);
|
||||||
|
preloadPrev(currentIndex);
|
||||||
|
});
|
||||||
|
|
||||||
|
updateOffset();
|
||||||
|
overlay.style.display = 'block';
|
||||||
|
// Fade in overlay
|
||||||
|
setTimeout(function () {
|
||||||
|
overlay.className = 'visible';
|
||||||
|
if (options.bodyClass && document.body.classList) {
|
||||||
|
document.body.classList.add(options.bodyClass);
|
||||||
|
}
|
||||||
|
if (options.afterShow) {
|
||||||
|
options.afterShow();
|
||||||
|
}
|
||||||
|
}, 50);
|
||||||
|
if (options.onChange) {
|
||||||
|
options.onChange(currentIndex, imagesElements.length);
|
||||||
|
}
|
||||||
|
documentLastFocus = document.activeElement;
|
||||||
|
initFocus();
|
||||||
|
isOverlayVisible = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
function initFocus() {
|
||||||
|
if (options.buttons) {
|
||||||
|
previousButton.focus();
|
||||||
|
} else {
|
||||||
|
closeButton.focus();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function hideOverlay(e) {
|
||||||
|
ev(e);
|
||||||
|
if (options.noScrollbars) {
|
||||||
|
document.documentElement.style.overflowY = 'auto';
|
||||||
|
document.body.style.overflowY = 'auto';
|
||||||
|
}
|
||||||
|
if (overlay.style.display === 'none') {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
unbind(document, 'keydown', keyDownHandler);
|
||||||
|
// Fade out and hide the overlay
|
||||||
|
overlay.className = '';
|
||||||
|
setTimeout(function () {
|
||||||
|
overlay.style.display = 'none';
|
||||||
|
if (options.bodyClass && document.body.classList) {
|
||||||
|
document.body.classList.remove(options.bodyClass);
|
||||||
|
}
|
||||||
|
if (options.afterHide) {
|
||||||
|
options.afterHide();
|
||||||
|
}
|
||||||
|
documentLastFocus && documentLastFocus.focus();
|
||||||
|
isOverlayVisible = false;
|
||||||
|
}, 500);
|
||||||
|
}
|
||||||
|
|
||||||
|
function loadImage(index, callback) {
|
||||||
|
var imageContainer = imagesElements[index];
|
||||||
|
var galleryItem = currentGallery[index];
|
||||||
|
|
||||||
|
if (typeof imageContainer === 'undefined' || typeof galleryItem === 'undefined') {
|
||||||
|
return; // out-of-bounds or gallery dirty
|
||||||
|
}
|
||||||
|
|
||||||
|
if (imageContainer.getElementsByTagName('img')[0]) {
|
||||||
|
// image is loaded, cb and bail
|
||||||
|
if (callback) {
|
||||||
|
callback();
|
||||||
|
}
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
var imageElement = galleryItem.imageElement,
|
||||||
|
imageSrc = imageElement.href,
|
||||||
|
thumbnailElement = imageElement.getElementsByTagName('img')[0],
|
||||||
|
imageCaption = typeof options.captions === 'function' ?
|
||||||
|
options.captions.call(currentGallery, imageElement) :
|
||||||
|
imageElement.getAttribute('data-caption') || imageElement.title;
|
||||||
|
|
||||||
|
var figure = mknod('figure');
|
||||||
|
figure.id = 'baguetteBox-figure-' + index;
|
||||||
|
figure.innerHTML = '<div class="baguetteBox-spinner">' +
|
||||||
|
'<div class="baguetteBox-double-bounce1"></div>' +
|
||||||
|
'<div class="baguetteBox-double-bounce2"></div>' +
|
||||||
|
'</div>';
|
||||||
|
|
||||||
|
if (options.captions && imageCaption) {
|
||||||
|
var figcaption = mknod('figcaption');
|
||||||
|
figcaption.id = 'baguetteBox-figcaption-' + index;
|
||||||
|
figcaption.innerHTML = imageCaption;
|
||||||
|
figure.appendChild(figcaption);
|
||||||
|
}
|
||||||
|
imageContainer.appendChild(figure);
|
||||||
|
|
||||||
|
var image = mknod('img');
|
||||||
|
image.onload = function () {
|
||||||
|
// Remove loader element
|
||||||
|
var spinner = document.querySelector('#baguette-img-' + index + ' .baguetteBox-spinner');
|
||||||
|
figure.removeChild(spinner);
|
||||||
|
if (!options.async && callback) {
|
||||||
|
callback();
|
||||||
|
}
|
||||||
|
};
|
||||||
|
image.setAttribute('src', imageSrc);
|
||||||
|
image.alt = thumbnailElement ? thumbnailElement.alt || '' : '';
|
||||||
|
if (options.titleTag && imageCaption) {
|
||||||
|
image.title = imageCaption;
|
||||||
|
}
|
||||||
|
figure.appendChild(image);
|
||||||
|
|
||||||
|
if (options.async && callback) {
|
||||||
|
callback();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function showNextImage(e) {
|
||||||
|
ev(e);
|
||||||
|
return show(currentIndex + 1);
|
||||||
|
}
|
||||||
|
|
||||||
|
function showPreviousImage(e) {
|
||||||
|
ev(e);
|
||||||
|
return show(currentIndex - 1);
|
||||||
|
}
|
||||||
|
|
||||||
|
function showFirstImage(event) {
|
||||||
|
if (event) {
|
||||||
|
event.preventDefault();
|
||||||
|
}
|
||||||
|
return show(0);
|
||||||
|
}
|
||||||
|
|
||||||
|
function showLastImage(event) {
|
||||||
|
if (event) {
|
||||||
|
event.preventDefault();
|
||||||
|
}
|
||||||
|
return show(currentGallery.length - 1);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Move the gallery to a specific index
|
||||||
|
* @param `index` {number} - the position of the image
|
||||||
|
* @param `gallery` {array} - gallery which should be opened, if omitted assumes the currently opened one
|
||||||
|
* @return {boolean} - true on success or false if the index is invalid
|
||||||
|
*/
|
||||||
|
function show(index, gallery) {
|
||||||
|
if (!isOverlayVisible && index >= 0 && index < gallery.length) {
|
||||||
|
prepareOverlay(gallery, options);
|
||||||
|
showOverlay(index);
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
if (index < 0) {
|
||||||
|
if (options.animation) {
|
||||||
|
bounceAnimation('left');
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
if (index >= imagesElements.length) {
|
||||||
|
if (options.animation) {
|
||||||
|
bounceAnimation('right');
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
currentIndex = index;
|
||||||
|
loadImage(currentIndex, function () {
|
||||||
|
preloadNext(currentIndex);
|
||||||
|
preloadPrev(currentIndex);
|
||||||
|
});
|
||||||
|
updateOffset();
|
||||||
|
|
||||||
|
if (options.onChange) {
|
||||||
|
options.onChange(currentIndex, imagesElements.length);
|
||||||
|
}
|
||||||
|
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Triggers the bounce animation
|
||||||
|
* @param {('left'|'right')} direction - Direction of the movement
|
||||||
|
*/
|
||||||
|
function bounceAnimation(direction) {
|
||||||
|
slider.className = 'bounce-from-' + direction;
|
||||||
|
setTimeout(function () {
|
||||||
|
slider.className = '';
|
||||||
|
}, 400);
|
||||||
|
}
|
||||||
|
|
||||||
|
function updateOffset() {
|
||||||
|
var offset = -currentIndex * 100 + '%';
|
||||||
|
if (options.animation === 'fadeIn') {
|
||||||
|
slider.style.opacity = 0;
|
||||||
|
setTimeout(function () {
|
||||||
|
slider.style.transform = 'translate3d(' + offset + ',0,0)';
|
||||||
|
slider.style.opacity = 1;
|
||||||
|
}, 400);
|
||||||
|
} else {
|
||||||
|
slider.style.transform = 'translate3d(' + offset + ',0,0)';
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function preloadNext(index) {
|
||||||
|
if (index - currentIndex >= options.preload) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
loadImage(index + 1, function () {
|
||||||
|
preloadNext(index + 1);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
function preloadPrev(index) {
|
||||||
|
if (currentIndex - index >= options.preload) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
loadImage(index - 1, function () {
|
||||||
|
preloadPrev(index - 1);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
function bind(element, event, callback, options) {
|
||||||
|
element.addEventListener(event, callback, options);
|
||||||
|
}
|
||||||
|
|
||||||
|
function unbind(element, event, callback, options) {
|
||||||
|
element.removeEventListener(event, callback, options);
|
||||||
|
}
|
||||||
|
|
||||||
|
function destroyPlugin() {
|
||||||
|
unbindEvents();
|
||||||
|
clearCachedData();
|
||||||
|
unbind(document, 'keydown', keyDownHandler);
|
||||||
|
document.getElementsByTagName('body')[0].removeChild(ebi('baguetteBox-overlay'));
|
||||||
|
data = {};
|
||||||
|
currentGallery = [];
|
||||||
|
currentIndex = 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
run: run,
|
||||||
|
show: show,
|
||||||
|
showNext: showNextImage,
|
||||||
|
showPrevious: showPreviousImage,
|
||||||
|
hide: hideOverlay,
|
||||||
|
destroy: destroyPlugin
|
||||||
|
};
|
||||||
|
})();
|
@@ -25,6 +25,35 @@ html, body {
|
|||||||
body {
|
body {
|
||||||
padding-bottom: 5em;
|
padding-bottom: 5em;
|
||||||
}
|
}
|
||||||
|
#tt {
|
||||||
|
position: fixed;
|
||||||
|
max-width: 34em;
|
||||||
|
background: #222;
|
||||||
|
border: 0 solid #555;
|
||||||
|
overflow: hidden;
|
||||||
|
margin-top: 1em;
|
||||||
|
padding: 0 1em;
|
||||||
|
height: 0;
|
||||||
|
opacity: .1;
|
||||||
|
transition: opacity 0.14s, height 0.14s, padding 0.14s;
|
||||||
|
box-shadow: 0 .2em .5em #222;
|
||||||
|
border-radius: .4em;
|
||||||
|
z-index: 9001;
|
||||||
|
}
|
||||||
|
#tt.show {
|
||||||
|
padding: 1em;
|
||||||
|
height: auto;
|
||||||
|
border-width: .2em 0;
|
||||||
|
opacity: 1;
|
||||||
|
}
|
||||||
|
#tt code {
|
||||||
|
background: #3c3c3c;
|
||||||
|
padding: .2em .3em;
|
||||||
|
border-top: 1px solid #777;
|
||||||
|
border-radius: .3em;
|
||||||
|
font-family: monospace, monospace;
|
||||||
|
line-height: 2em;
|
||||||
|
}
|
||||||
#path,
|
#path,
|
||||||
#path * {
|
#path * {
|
||||||
font-size: 1em;
|
font-size: 1em;
|
||||||
@@ -53,6 +82,7 @@ body {
|
|||||||
#files tbody a {
|
#files tbody a {
|
||||||
display: block;
|
display: block;
|
||||||
padding: .3em 0;
|
padding: .3em 0;
|
||||||
|
scroll-margin-top: 45vh;
|
||||||
}
|
}
|
||||||
#files tbody div a {
|
#files tbody div a {
|
||||||
color: #f5a;
|
color: #f5a;
|
||||||
@@ -68,7 +98,6 @@ a, #files tbody div a:last-child {
|
|||||||
text-decoration: underline;
|
text-decoration: underline;
|
||||||
}
|
}
|
||||||
#files thead {
|
#files thead {
|
||||||
background: #333;
|
|
||||||
position: sticky;
|
position: sticky;
|
||||||
top: 0;
|
top: 0;
|
||||||
}
|
}
|
||||||
@@ -76,29 +105,30 @@ a, #files tbody div a:last-child {
|
|||||||
color: #999;
|
color: #999;
|
||||||
font-weight: normal;
|
font-weight: normal;
|
||||||
}
|
}
|
||||||
#files tr:hover {
|
#files tr:hover td {
|
||||||
background: #1c1c1c;
|
background: #1c1c1c;
|
||||||
}
|
}
|
||||||
#files thead th {
|
#files thead th {
|
||||||
padding: .5em 1.3em .3em 1.3em;
|
padding: .5em .3em .3em .3em;
|
||||||
|
border-right: 2px solid #3c3c3c;
|
||||||
|
border-bottom: 2px solid #444;
|
||||||
|
background: #333;
|
||||||
cursor: pointer;
|
cursor: pointer;
|
||||||
}
|
}
|
||||||
|
#files thead th+th {
|
||||||
|
border-left: 2px solid #2a2a2a;
|
||||||
|
}
|
||||||
#files thead th:last-child {
|
#files thead th:last-child {
|
||||||
background: #444;
|
border-right: none;
|
||||||
border-radius: .7em .7em 0 0;
|
|
||||||
}
|
}
|
||||||
#files thead th:first-child {
|
#files tbody {
|
||||||
background: #222;
|
background: #222;
|
||||||
}
|
}
|
||||||
#files tbody,
|
|
||||||
#files thead th:nth-child(2) {
|
|
||||||
background: #222;
|
|
||||||
border-radius: 0 .7em 0 0;
|
|
||||||
}
|
|
||||||
#files td {
|
#files td {
|
||||||
margin: 0;
|
margin: 0;
|
||||||
padding: 0 .5em;
|
padding: 0 .5em;
|
||||||
border-bottom: 1px solid #111;
|
border-bottom: 1px solid #111;
|
||||||
|
border-left: 1px solid #2c2c2c;
|
||||||
}
|
}
|
||||||
#files td+td+td {
|
#files td+td+td {
|
||||||
max-width: 30em;
|
max-width: 30em;
|
||||||
@@ -185,9 +215,17 @@ a, #files tbody div a:last-child {
|
|||||||
margin: -.2em;
|
margin: -.2em;
|
||||||
}
|
}
|
||||||
#files tbody a.play.act {
|
#files tbody a.play.act {
|
||||||
color: #840;
|
color: #720;
|
||||||
text-shadow: 0 0 .3em #b80;
|
text-shadow: 0 0 .3em #b80;
|
||||||
}
|
}
|
||||||
|
#ggrid a.play,
|
||||||
|
html.light #ggrid a.play {
|
||||||
|
color: #fff;
|
||||||
|
background: #750;
|
||||||
|
border-color: #c90;
|
||||||
|
border-top: 1px solid #da4;
|
||||||
|
box-shadow: 0 .1em 1.2em #b83;
|
||||||
|
}
|
||||||
#files tbody tr.sel td,
|
#files tbody tr.sel td,
|
||||||
#ggrid a.sel,
|
#ggrid a.sel,
|
||||||
html.light #ggrid a.sel {
|
html.light #ggrid a.sel {
|
||||||
@@ -209,11 +247,17 @@ html.light #ggrid a.sel {
|
|||||||
box-shadow: 0 .1em 1.2em #b36;
|
box-shadow: 0 .1em 1.2em #b36;
|
||||||
transition: all 0.2s cubic-bezier(.2, 2.2, .5, 1); /* https://cubic-bezier.com/#.4,2,.7,1 */
|
transition: all 0.2s cubic-bezier(.2, 2.2, .5, 1); /* https://cubic-bezier.com/#.4,2,.7,1 */
|
||||||
}
|
}
|
||||||
#ggrid a.sel img {
|
#ggrid a.sel img,
|
||||||
|
#ggrid a.play img {
|
||||||
opacity: .7;
|
opacity: .7;
|
||||||
box-shadow: 0 0 1em #b36;
|
|
||||||
filter: contrast(130%) brightness(107%);
|
filter: contrast(130%) brightness(107%);
|
||||||
}
|
}
|
||||||
|
#ggrid a.sel img {
|
||||||
|
box-shadow: 0 0 1em #b36;
|
||||||
|
}
|
||||||
|
#ggrid a.play img {
|
||||||
|
box-shadow: 0 0 1em #b83;
|
||||||
|
}
|
||||||
#files tr.sel a {
|
#files tr.sel a {
|
||||||
color: #fff;
|
color: #fff;
|
||||||
}
|
}
|
||||||
@@ -267,6 +311,7 @@ html.light #ggrid a.sel {
|
|||||||
height: 6em;
|
height: 6em;
|
||||||
width: 100%;
|
width: 100%;
|
||||||
z-index: 3;
|
z-index: 3;
|
||||||
|
touch-action: none;
|
||||||
transition: bottom 0.15s;
|
transition: bottom 0.15s;
|
||||||
}
|
}
|
||||||
#widget.open {
|
#widget.open {
|
||||||
@@ -483,20 +528,56 @@ html.light #ggrid a.sel {
|
|||||||
margin: .5em;
|
margin: .5em;
|
||||||
}
|
}
|
||||||
.opview input[type=text] {
|
.opview input[type=text] {
|
||||||
color: #fff;
|
|
||||||
background: #383838;
|
background: #383838;
|
||||||
|
color: #fff;
|
||||||
border: none;
|
border: none;
|
||||||
box-shadow: 0 0 .3em #222;
|
box-shadow: 0 0 .3em #222;
|
||||||
border-bottom: 1px solid #fc5;
|
border-bottom: 1px solid #fc5;
|
||||||
border-radius: .2em;
|
border-radius: .2em;
|
||||||
padding: .2em .3em;
|
padding: .2em .3em;
|
||||||
}
|
}
|
||||||
|
.opview input.err,
|
||||||
|
html.light .opview input[type="text"].err {
|
||||||
|
color: #fff;
|
||||||
|
background: #a20;
|
||||||
|
border-color: #f00;
|
||||||
|
box-shadow: 0 0 .7em #f00;
|
||||||
|
text-shadow: 1px 1px 0 #500;
|
||||||
|
outline: none;
|
||||||
|
}
|
||||||
input[type="checkbox"]+label {
|
input[type="checkbox"]+label {
|
||||||
color: #f5a;
|
color: #f5a;
|
||||||
}
|
}
|
||||||
input[type="checkbox"]:checked+label {
|
input[type="checkbox"]:checked+label {
|
||||||
color: #fc5;
|
color: #fc5;
|
||||||
}
|
}
|
||||||
|
input[type="radio"]:checked+label {
|
||||||
|
color: #fc0;
|
||||||
|
}
|
||||||
|
html.light input[type="radio"]:checked+label {
|
||||||
|
color: #07c;
|
||||||
|
}
|
||||||
|
input.eq_gain {
|
||||||
|
width: 3em;
|
||||||
|
text-align: center;
|
||||||
|
margin: 0 .6em;
|
||||||
|
}
|
||||||
|
#audio_eq table {
|
||||||
|
border-collapse: collapse;
|
||||||
|
}
|
||||||
|
#audio_eq td {
|
||||||
|
text-align: center;
|
||||||
|
}
|
||||||
|
#audio_eq a.eq_step {
|
||||||
|
font-size: 1.5em;
|
||||||
|
display: block;
|
||||||
|
padding: 0;
|
||||||
|
}
|
||||||
|
#au_eq {
|
||||||
|
display: block;
|
||||||
|
margin-top: .5em;
|
||||||
|
padding: 1.3em .3em;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
@@ -563,6 +644,7 @@ input[type="checkbox"]:checked+label {
|
|||||||
}
|
}
|
||||||
#wrap {
|
#wrap {
|
||||||
margin-top: 2em;
|
margin-top: 2em;
|
||||||
|
min-height: 90vh;
|
||||||
}
|
}
|
||||||
#tree {
|
#tree {
|
||||||
display: none;
|
display: none;
|
||||||
@@ -575,8 +657,15 @@ input[type="checkbox"]:checked+label {
|
|||||||
overscroll-behavior-y: none;
|
overscroll-behavior-y: none;
|
||||||
scrollbar-color: #eb0 #333;
|
scrollbar-color: #eb0 #333;
|
||||||
}
|
}
|
||||||
|
#treeh {
|
||||||
|
background: #333;
|
||||||
|
position: sticky;
|
||||||
|
z-index: 1;
|
||||||
|
top: 0;
|
||||||
|
}
|
||||||
#thx_ff {
|
#thx_ff {
|
||||||
padding: 5em 0;
|
padding: 5em 0;
|
||||||
|
/* widget */
|
||||||
}
|
}
|
||||||
#tree::-webkit-scrollbar-track,
|
#tree::-webkit-scrollbar-track,
|
||||||
#tree::-webkit-scrollbar {
|
#tree::-webkit-scrollbar {
|
||||||
@@ -600,6 +689,7 @@ input[type="checkbox"]:checked+label {
|
|||||||
box-shadow: 0 .1em .2em #222 inset;
|
box-shadow: 0 .1em .2em #222 inset;
|
||||||
border-radius: .3em;
|
border-radius: .3em;
|
||||||
margin: .2em;
|
margin: .2em;
|
||||||
|
white-space: pre;
|
||||||
position: relative;
|
position: relative;
|
||||||
top: -.2em;
|
top: -.2em;
|
||||||
}
|
}
|
||||||
@@ -636,15 +726,14 @@ input[type="checkbox"]:checked+label {
|
|||||||
#treeul a.hl {
|
#treeul a.hl {
|
||||||
color: #400;
|
color: #400;
|
||||||
background: #fc4;
|
background: #fc4;
|
||||||
border-radius: .3em;
|
|
||||||
text-shadow: none;
|
text-shadow: none;
|
||||||
}
|
}
|
||||||
#treeul a {
|
#treeul a {
|
||||||
|
border-radius: .3em;
|
||||||
display: inline-block;
|
display: inline-block;
|
||||||
}
|
}
|
||||||
#treeul a+a {
|
#treeul a+a {
|
||||||
width: calc(100% - 2em);
|
width: calc(100% - 2em);
|
||||||
background: #333;
|
|
||||||
line-height: 1em;
|
line-height: 1em;
|
||||||
}
|
}
|
||||||
#treeul a+a:hover {
|
#treeul a+a:hover {
|
||||||
@@ -668,34 +757,20 @@ input[type="checkbox"]:checked+label {
|
|||||||
font-size: 2em;
|
font-size: 2em;
|
||||||
white-space: nowrap;
|
white-space: nowrap;
|
||||||
}
|
}
|
||||||
#files th:hover .cfg,
|
#files th:hover .cfg {
|
||||||
#files th.min .cfg {
|
|
||||||
display: block;
|
display: block;
|
||||||
width: 1em;
|
width: 1em;
|
||||||
border-radius: .2em;
|
border-radius: .2em;
|
||||||
margin: -1.3em auto 0 auto;
|
margin: -1.3em auto 0 auto;
|
||||||
background: #444;
|
background: #444;
|
||||||
}
|
}
|
||||||
#files th.min .cfg {
|
#files>thead>tr>th.min,
|
||||||
margin: -.6em;
|
#files td.min {
|
||||||
}
|
display: none;
|
||||||
#files>thead>tr>th.min span {
|
|
||||||
position: absolute;
|
|
||||||
transform: rotate(270deg);
|
|
||||||
background: linear-gradient(90deg, rgba(68,68,68,0), rgba(68,68,68,0.5) 70%, #444);
|
|
||||||
margin-left: -4.6em;
|
|
||||||
padding: .4em;
|
|
||||||
top: 5.4em;
|
|
||||||
width: 8em;
|
|
||||||
text-align: right;
|
|
||||||
letter-spacing: .04em;
|
|
||||||
}
|
}
|
||||||
#files td:nth-child(2n) {
|
#files td:nth-child(2n) {
|
||||||
color: #f5a;
|
color: #f5a;
|
||||||
}
|
}
|
||||||
#files td.min a {
|
|
||||||
display: none;
|
|
||||||
}
|
|
||||||
#files tr.play td,
|
#files tr.play td,
|
||||||
#files tr.play div a {
|
#files tr.play div a {
|
||||||
background: #fc4;
|
background: #fc4;
|
||||||
@@ -710,50 +785,40 @@ input[type="checkbox"]:checked+label {
|
|||||||
color: #300;
|
color: #300;
|
||||||
background: #fea;
|
background: #fea;
|
||||||
}
|
}
|
||||||
#op_cfg {
|
.opwide {
|
||||||
max-width: none;
|
max-width: none;
|
||||||
margin-right: 1.5em;
|
margin-right: 1.5em;
|
||||||
}
|
}
|
||||||
#op_cfg>div>a {
|
.opwide>div {
|
||||||
|
display: inline-block;
|
||||||
|
vertical-align: top;
|
||||||
|
border-left: .2em solid #4c4c4c;
|
||||||
|
margin-left: .5em;
|
||||||
|
padding-left: .5em;
|
||||||
|
}
|
||||||
|
.opwide>div.fill {
|
||||||
|
display: block;
|
||||||
|
}
|
||||||
|
.opwide>div>div>a {
|
||||||
line-height: 2em;
|
line-height: 2em;
|
||||||
}
|
}
|
||||||
#op_cfg>div>span {
|
#op_cfg>div>div>span {
|
||||||
display: inline-block;
|
display: inline-block;
|
||||||
padding: .2em .4em;
|
padding: .2em .4em;
|
||||||
}
|
}
|
||||||
#op_cfg h3 {
|
.opbox h3 {
|
||||||
margin: .8em 0 0 .6em;
|
margin: .8em 0 0 .6em;
|
||||||
padding: 0;
|
padding: 0;
|
||||||
border-bottom: 1px solid #555;
|
border-bottom: 1px solid #555;
|
||||||
}
|
}
|
||||||
#opdesc {
|
#thumbs,
|
||||||
display: none;
|
#au_osd_cv {
|
||||||
}
|
|
||||||
#ops:hover #opdesc {
|
|
||||||
display: block;
|
|
||||||
background: linear-gradient(0deg,#555, #4c4c4c 80%, #444);
|
|
||||||
box-shadow: 0 .3em 1em #222;
|
|
||||||
padding: 1em;
|
|
||||||
border-radius: .3em;
|
|
||||||
position: absolute;
|
|
||||||
z-index: 3;
|
|
||||||
top: 6em;
|
|
||||||
right: 1.5em;
|
|
||||||
}
|
|
||||||
#ops:hover #opdesc.off {
|
|
||||||
display: none;
|
|
||||||
}
|
|
||||||
#opdesc code {
|
|
||||||
background: #3c3c3c;
|
|
||||||
padding: .2em .3em;
|
|
||||||
border-top: 1px solid #777;
|
|
||||||
border-radius: .3em;
|
|
||||||
font-family: monospace, monospace;
|
|
||||||
line-height: 2em;
|
|
||||||
}
|
|
||||||
#griden.on+#thumbs {
|
|
||||||
opacity: .3;
|
opacity: .3;
|
||||||
}
|
}
|
||||||
|
#griden.on+#thumbs,
|
||||||
|
#au_os_ctl.on+#au_osd_cv {
|
||||||
|
opacity: 1;
|
||||||
|
}
|
||||||
#ghead {
|
#ghead {
|
||||||
background: #3c3c3c;
|
background: #3c3c3c;
|
||||||
border: 1px solid #444;
|
border: 1px solid #444;
|
||||||
@@ -798,6 +863,12 @@ html.light #ghead {
|
|||||||
padding: .2em .3em;
|
padding: .2em .3em;
|
||||||
display: block;
|
display: block;
|
||||||
}
|
}
|
||||||
|
#ggrid span.dir:before {
|
||||||
|
content: '📂';
|
||||||
|
line-height: 0;
|
||||||
|
font-size: 2em;
|
||||||
|
margin: -.7em .1em -.5em -.3em;
|
||||||
|
}
|
||||||
#ggrid a:hover {
|
#ggrid a:hover {
|
||||||
background: #444;
|
background: #444;
|
||||||
border-color: #555;
|
border-color: #555;
|
||||||
@@ -848,6 +919,15 @@ html.light {
|
|||||||
background: #eee;
|
background: #eee;
|
||||||
text-shadow: none;
|
text-shadow: none;
|
||||||
}
|
}
|
||||||
|
html.light #tt {
|
||||||
|
background: #fff;
|
||||||
|
border-color: #888;
|
||||||
|
box-shadow: 0 .3em 1em rgba(0,0,0,0.4);
|
||||||
|
}
|
||||||
|
html.light #tt code {
|
||||||
|
background: #060;
|
||||||
|
color: #fff;
|
||||||
|
}
|
||||||
html.light #ops,
|
html.light #ops,
|
||||||
html.light .opbox,
|
html.light .opbox,
|
||||||
html.light #srch_form {
|
html.light #srch_form {
|
||||||
@@ -891,8 +971,14 @@ html.light #treeul a.hl {
|
|||||||
background: #07a;
|
background: #07a;
|
||||||
color: #fff;
|
color: #fff;
|
||||||
}
|
}
|
||||||
|
html.light #treeul a.hl:hover {
|
||||||
|
background: #059;
|
||||||
|
}
|
||||||
html.light #tree li {
|
html.light #tree li {
|
||||||
border-color: #ddd #fff #f7f7f7 #fff;
|
border-color: #f7f7f7 #fff #ddd #fff;
|
||||||
|
}
|
||||||
|
html.light #tree a:hover {
|
||||||
|
background: #fff;
|
||||||
}
|
}
|
||||||
html.light #tree ul {
|
html.light #tree ul {
|
||||||
border-color: #ccc;
|
border-color: #ccc;
|
||||||
@@ -910,12 +996,14 @@ html.light #files {
|
|||||||
}
|
}
|
||||||
html.light #files thead th {
|
html.light #files thead th {
|
||||||
background: #eee;
|
background: #eee;
|
||||||
|
border: 1px solid #ccc;
|
||||||
|
border-top: none;
|
||||||
}
|
}
|
||||||
html.light #files tr td {
|
html.light #files thead th+th {
|
||||||
border-top: 1px solid #ddd;
|
border-left: 1px solid #f7f7f7;
|
||||||
}
|
}
|
||||||
html.light #files td {
|
html.light #files td {
|
||||||
border-bottom: 1px solid #f7f7f7;
|
border-color: #fff #fff #ddd #ddd;
|
||||||
}
|
}
|
||||||
html.light #files tbody tr:last-child td {
|
html.light #files tbody tr:last-child td {
|
||||||
border-bottom: .2em solid #ccc;
|
border-bottom: .2em solid #ccc;
|
||||||
@@ -923,25 +1011,28 @@ html.light #files tbody tr:last-child td {
|
|||||||
html.light #files td:nth-child(2n) {
|
html.light #files td:nth-child(2n) {
|
||||||
color: #d38;
|
color: #d38;
|
||||||
}
|
}
|
||||||
html.light #files tr:hover td {
|
html.light #files tr.play td:nth-child(2n) {
|
||||||
background: #fff;
|
color: #c16;
|
||||||
}
|
}
|
||||||
html.light #files tbody a.play {
|
html.light #files tbody a.play {
|
||||||
color: #c0f;
|
color: #c0f;
|
||||||
}
|
}
|
||||||
html.light tr.play td {
|
html.light #files tbody a.play.act {
|
||||||
|
color: #90c;
|
||||||
|
}
|
||||||
|
html.light #files tr.play td {
|
||||||
background: #fc5;
|
background: #fc5;
|
||||||
|
border-color: #eb1;
|
||||||
|
}
|
||||||
|
html.light #files tr:hover td {
|
||||||
|
background: #fff;
|
||||||
}
|
}
|
||||||
html.light tr.play a {
|
html.light tr.play a {
|
||||||
color: #406;
|
color: #406;
|
||||||
}
|
}
|
||||||
html.light #files th:hover .cfg,
|
html.light #files th:hover .cfg {
|
||||||
html.light #files th.min .cfg {
|
|
||||||
background: #ccc;
|
background: #ccc;
|
||||||
}
|
}
|
||||||
html.light #files > thead > tr > th.min span {
|
|
||||||
background: linear-gradient(90deg, rgba(204,204,204,0), rgba(204,204,204,0.5) 70%, #ccc);
|
|
||||||
}
|
|
||||||
html.light #blocked {
|
html.light #blocked {
|
||||||
background: #eee;
|
background: #eee;
|
||||||
}
|
}
|
||||||
@@ -951,7 +1042,24 @@ html.light #blk_abrt a {
|
|||||||
box-shadow: 0 .2em .4em #ddd;
|
box-shadow: 0 .2em .4em #ddd;
|
||||||
}
|
}
|
||||||
html.light #widget a {
|
html.light #widget a {
|
||||||
color: #fc5;
|
color: #06a;
|
||||||
|
}
|
||||||
|
html.light #wtoggle,
|
||||||
|
html.light #widgeti {
|
||||||
|
background: #eee;
|
||||||
|
}
|
||||||
|
html.light #wtoggle {
|
||||||
|
box-shadow: 0 0 .5em #bbb;
|
||||||
|
}
|
||||||
|
html.light #widget.open {
|
||||||
|
border-top: .2em solid #f7f7f7;
|
||||||
|
}
|
||||||
|
html.light #wzip,
|
||||||
|
html.light #wnp {
|
||||||
|
border-color: #ccc;
|
||||||
|
}
|
||||||
|
html.light #barbuf {
|
||||||
|
background: none;
|
||||||
}
|
}
|
||||||
html.light #files tr.sel:hover td {
|
html.light #files tr.sel:hover td {
|
||||||
background: #c37;
|
background: #c37;
|
||||||
@@ -968,20 +1076,15 @@ html.light #files tr.sel a.play.act {
|
|||||||
html.light input[type="checkbox"] + label {
|
html.light input[type="checkbox"] + label {
|
||||||
color: #333;
|
color: #333;
|
||||||
}
|
}
|
||||||
|
html.light .opwide>div {
|
||||||
|
border-color: #ccc;
|
||||||
|
}
|
||||||
html.light .opview input[type="text"] {
|
html.light .opview input[type="text"] {
|
||||||
background: #fff;
|
background: #fff;
|
||||||
color: #333;
|
color: #333;
|
||||||
box-shadow: 0 0 2px #888;
|
box-shadow: 0 0 2px #888;
|
||||||
border-color: #38d;
|
border-color: #38d;
|
||||||
}
|
}
|
||||||
html.light #ops:hover #opdesc {
|
|
||||||
background: #fff;
|
|
||||||
box-shadow: 0 .3em 1em #ccc;
|
|
||||||
}
|
|
||||||
html.light #opdesc code {
|
|
||||||
background: #060;
|
|
||||||
color: #fff;
|
|
||||||
}
|
|
||||||
html.light #u2tab a>span,
|
html.light #u2tab a>span,
|
||||||
html.light #files td div span {
|
html.light #files td div span {
|
||||||
color: #000;
|
color: #000;
|
||||||
@@ -991,9 +1094,6 @@ html.light #path {
|
|||||||
text-shadow: none;
|
text-shadow: none;
|
||||||
box-shadow: 0 0 .3em #bbb;
|
box-shadow: 0 0 .3em #bbb;
|
||||||
}
|
}
|
||||||
html.light #path a {
|
|
||||||
color: #333;
|
|
||||||
}
|
|
||||||
html.light #path a:not(:last-child)::after {
|
html.light #path a:not(:last-child)::after {
|
||||||
border-color: #ccc;
|
border-color: #ccc;
|
||||||
background: none;
|
background: none;
|
||||||
@@ -1002,7 +1102,7 @@ html.light #path a:not(:last-child)::after {
|
|||||||
}
|
}
|
||||||
html.light #path a:hover {
|
html.light #path a:hover {
|
||||||
background: none;
|
background: none;
|
||||||
color: #60a;
|
color: #90d;
|
||||||
}
|
}
|
||||||
html.light #files tbody div a {
|
html.light #files tbody div a {
|
||||||
color: #d38;
|
color: #d38;
|
||||||
@@ -1012,6 +1112,9 @@ html.light #files tr.sel a:hover {
|
|||||||
color: #000;
|
color: #000;
|
||||||
background: #fff;
|
background: #fff;
|
||||||
}
|
}
|
||||||
|
html.light #treeh {
|
||||||
|
background: #eee;
|
||||||
|
}
|
||||||
html.light #tree {
|
html.light #tree {
|
||||||
scrollbar-color: #a70 #ddd;
|
scrollbar-color: #a70 #ddd;
|
||||||
}
|
}
|
||||||
@@ -1021,4 +1124,162 @@ html.light #tree::-webkit-scrollbar {
|
|||||||
}
|
}
|
||||||
#tree::-webkit-scrollbar-thumb {
|
#tree::-webkit-scrollbar-thumb {
|
||||||
background: #da0;
|
background: #da0;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
#baguetteBox-overlay {
|
||||||
|
display: none;
|
||||||
|
opacity: 0;
|
||||||
|
position: fixed;
|
||||||
|
overflow: hidden;
|
||||||
|
touch-action: none;
|
||||||
|
top: 0;
|
||||||
|
left: 0;
|
||||||
|
width: 100%;
|
||||||
|
height: 100%;
|
||||||
|
z-index: 1000000;
|
||||||
|
background: rgba(0, 0, 0, 0.8);
|
||||||
|
transition: opacity .3s ease;
|
||||||
|
}
|
||||||
|
#baguetteBox-overlay.visible {
|
||||||
|
opacity: 1;
|
||||||
|
}
|
||||||
|
#baguetteBox-overlay .full-image {
|
||||||
|
display: inline-block;
|
||||||
|
position: relative;
|
||||||
|
width: 100%;
|
||||||
|
height: 100%;
|
||||||
|
text-align: center;
|
||||||
|
}
|
||||||
|
#baguetteBox-overlay .full-image figure {
|
||||||
|
display: inline;
|
||||||
|
margin: 0;
|
||||||
|
height: 100%;
|
||||||
|
}
|
||||||
|
#baguetteBox-overlay .full-image img {
|
||||||
|
display: inline-block;
|
||||||
|
width: auto;
|
||||||
|
height: auto;
|
||||||
|
max-height: 100%;
|
||||||
|
max-width: 100%;
|
||||||
|
vertical-align: middle;
|
||||||
|
box-shadow: 0 0 8px rgba(0, 0, 0, 0.6);
|
||||||
|
}
|
||||||
|
#baguetteBox-overlay .full-image figcaption {
|
||||||
|
display: block;
|
||||||
|
position: absolute;
|
||||||
|
bottom: 0;
|
||||||
|
width: 100%;
|
||||||
|
text-align: center;
|
||||||
|
line-height: 1.8;
|
||||||
|
white-space: normal;
|
||||||
|
color: #ccc;
|
||||||
|
}
|
||||||
|
#baguetteBox-overlay figcaption a {
|
||||||
|
background: rgba(0, 0, 0, 0.6);
|
||||||
|
border-radius: .4em;
|
||||||
|
padding: .3em .6em;
|
||||||
|
}
|
||||||
|
#baguetteBox-overlay .full-image:before {
|
||||||
|
content: "";
|
||||||
|
display: inline-block;
|
||||||
|
height: 50%;
|
||||||
|
width: 1px;
|
||||||
|
margin-right: -1px;
|
||||||
|
}
|
||||||
|
#baguetteBox-slider {
|
||||||
|
position: absolute;
|
||||||
|
left: 0;
|
||||||
|
top: 0;
|
||||||
|
height: 100%;
|
||||||
|
width: 100%;
|
||||||
|
white-space: nowrap;
|
||||||
|
transition: left .2s ease, transform .2s ease;
|
||||||
|
}
|
||||||
|
#baguetteBox-slider.bounce-from-right {
|
||||||
|
animation: bounceFromRight .4s ease-out;
|
||||||
|
}
|
||||||
|
#baguetteBox-slider.bounce-from-left {
|
||||||
|
animation: bounceFromLeft .4s ease-out;
|
||||||
|
}
|
||||||
|
@keyframes bounceFromRight {
|
||||||
|
0% {margin-left: 0}
|
||||||
|
50% {margin-left: -30px}
|
||||||
|
100% {margin-left: 0}
|
||||||
|
}
|
||||||
|
@keyframes bounceFromLeft {
|
||||||
|
0% {margin-left: 0}
|
||||||
|
50% {margin-left: 30px}
|
||||||
|
100% {margin-left: 0}
|
||||||
|
}
|
||||||
|
.baguetteBox-button#next-button,
|
||||||
|
.baguetteBox-button#previous-button {
|
||||||
|
top: 50%;
|
||||||
|
top: calc(50% - 30px);
|
||||||
|
width: 44px;
|
||||||
|
height: 60px;
|
||||||
|
}
|
||||||
|
.baguetteBox-button {
|
||||||
|
position: absolute;
|
||||||
|
cursor: pointer;
|
||||||
|
outline: none;
|
||||||
|
padding: 0;
|
||||||
|
margin: 0;
|
||||||
|
border: 0;
|
||||||
|
border-radius: 15%;
|
||||||
|
background: rgba(50, 50, 50, 0.5);
|
||||||
|
color: #ddd;
|
||||||
|
font: 1.6em sans-serif;
|
||||||
|
transition: background-color .3s ease;
|
||||||
|
}
|
||||||
|
.baguetteBox-button:focus,
|
||||||
|
.baguetteBox-button:hover {
|
||||||
|
background: rgba(50, 50, 50, 0.9);
|
||||||
|
}
|
||||||
|
#next-button {
|
||||||
|
right: 2%;
|
||||||
|
}
|
||||||
|
#previous-button {
|
||||||
|
left: 2%;
|
||||||
|
}
|
||||||
|
#close-button {
|
||||||
|
top: 20px;
|
||||||
|
right: 2%;
|
||||||
|
width: 30px;
|
||||||
|
height: 30px;
|
||||||
|
}
|
||||||
|
.baguetteBox-button svg {
|
||||||
|
position: absolute;
|
||||||
|
left: 0;
|
||||||
|
top: 0;
|
||||||
|
}
|
||||||
|
.baguetteBox-spinner {
|
||||||
|
width: 40px;
|
||||||
|
height: 40px;
|
||||||
|
display: inline-block;
|
||||||
|
position: absolute;
|
||||||
|
top: 50%;
|
||||||
|
left: 50%;
|
||||||
|
margin-top: -20px;
|
||||||
|
margin-left: -20px;
|
||||||
|
}
|
||||||
|
.baguetteBox-double-bounce1,
|
||||||
|
.baguetteBox-double-bounce2 {
|
||||||
|
width: 100%;
|
||||||
|
height: 100%;
|
||||||
|
border-radius: 50%;
|
||||||
|
background-color: #fff;
|
||||||
|
opacity: .6;
|
||||||
|
position: absolute;
|
||||||
|
top: 0;
|
||||||
|
left: 0;
|
||||||
|
animation: bounce 2s infinite ease-in-out;
|
||||||
|
}
|
||||||
|
.baguetteBox-double-bounce2 {
|
||||||
|
animation-delay: -1s;
|
||||||
|
}
|
||||||
|
@keyframes bounce {
|
||||||
|
0%, 100% {transform: scale(0)}
|
||||||
|
50% {transform: scale(1)}
|
||||||
|
}
|
||||||
|
@@ -2,131 +2,134 @@
|
|||||||
<html lang="en">
|
<html lang="en">
|
||||||
|
|
||||||
<head>
|
<head>
|
||||||
<meta charset="utf-8">
|
<meta charset="utf-8">
|
||||||
<title>⇆🎉 {{ title }}</title>
|
<title>⇆🎉 {{ title }}</title>
|
||||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||||
<meta name="viewport" content="width=device-width, initial-scale=0.8">
|
<meta name="viewport" content="width=device-width, initial-scale=0.8">
|
||||||
<link rel="stylesheet" type="text/css" media="screen" href="/.cpr/browser.css{{ ts }}">
|
<link rel="stylesheet" type="text/css" media="screen" href="/.cpr/browser.css{{ ts }}">
|
||||||
<link rel="stylesheet" type="text/css" media="screen" href="/.cpr/upload.css{{ ts }}">
|
<link rel="stylesheet" type="text/css" media="screen" href="/.cpr/upload.css{{ ts }}">
|
||||||
|
{%- if css %}
|
||||||
|
<link rel="stylesheet" type="text/css" media="screen" href="{{ css }}{{ ts }}">
|
||||||
|
{%- endif %}
|
||||||
</head>
|
</head>
|
||||||
|
|
||||||
<body>
|
<body>
|
||||||
<div id="ops">
|
<div id="ops"></div>
|
||||||
<a href="#" data-dest="" data-desc="close submenu">---</a>
|
|
||||||
{%- if have_up2k_idx %}
|
|
||||||
<a href="#" data-perm="read" data-dest="search" data-desc="search for files by attributes, path/name, music tags, or any combination of those.<br /><br /><code>foo bar</code> = must contain both foo and bar,<br /><code>foo -bar</code> = must contain foo but not bar,<br /><code>^yana .opus$</code> = must start with yana and have the opus extension">🔎</a>
|
|
||||||
<a href="#" data-dest="up2k" data-desc="up2k: upload files (if you have write-access) or toggle into the search-mode and drag files onto the search button to see if they exist somewhere on the server">🚀</a>
|
|
||||||
{%- else %}
|
|
||||||
<a href="#" data-perm="write" data-dest="up2k" data-desc="up2k: upload files with resume support (close your browser and drop the same files in later)">🚀</a>
|
|
||||||
{%- endif %}
|
|
||||||
<a href="#" data-perm="write" data-dest="bup" data-desc="bup: basic uploader, even supports netscape 4.0">🎈</a>
|
|
||||||
<a href="#" data-perm="write" data-dest="mkdir" data-desc="mkdir: create a new directory">📂</a>
|
|
||||||
<a href="#" data-perm="read write" data-dest="new_md" data-desc="new-md: create a new markdown document">📝</a>
|
|
||||||
<a href="#" data-perm="write" data-dest="msg" data-desc="msg: send a message to the server log">📟</a>
|
|
||||||
<a href="#" data-dest="cfg" data-desc="configuration options">⚙️</a>
|
|
||||||
<div id="opdesc"></div>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div id="op_search" class="opview">
|
<div id="op_search" class="opview">
|
||||||
{%- if have_tags_idx %}
|
{%- if have_tags_idx %}
|
||||||
<div id="srch_form" class="tags"></div>
|
<div id="srch_form" class="tags"></div>
|
||||||
{%- else %}
|
{%- else %}
|
||||||
<div id="srch_form"></div>
|
<div id="srch_form"></div>
|
||||||
{%- endif %}
|
{%- endif %}
|
||||||
<div id="srch_q"></div>
|
<div id="srch_q"></div>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
{%- include 'upload.html' %}
|
<div id="op_player" class="opview opbox opwide"></div>
|
||||||
|
|
||||||
<div id="op_cfg" class="opview opbox">
|
<div id="op_bup" class="opview opbox act">
|
||||||
<h3>switches</h3>
|
<div id="u2err"></div>
|
||||||
<div>
|
<form method="post" enctype="multipart/form-data" accept-charset="utf-8" action="{{ url_suf }}">
|
||||||
<a id="tooltips" class="tgl btn" href="#">tooltips</a>
|
<input type="hidden" name="act" value="bput" />
|
||||||
<a id="lightmode" class="tgl btn" href="#">lightmode</a>
|
<input type="file" name="f" multiple><br />
|
||||||
<a id="griden" class="tgl btn" href="#">the grid</a>
|
<input type="submit" value="start upload">
|
||||||
<a id="thumbs" class="tgl btn" href="#">thumbs</a>
|
</form>
|
||||||
</div>
|
</div>
|
||||||
{%- if have_zip %}
|
|
||||||
<h3>folder download</h3>
|
<div id="op_mkdir" class="opview opbox act">
|
||||||
<div id="arc_fmt"></div>
|
<form method="post" enctype="multipart/form-data" accept-charset="utf-8" action="{{ url_suf }}">
|
||||||
{%- endif %}
|
<input type="hidden" name="act" value="mkdir" />
|
||||||
<h3>key notation</h3>
|
<input type="text" name="name" size="30">
|
||||||
<div id="key_notation"></div>
|
<input type="submit" value="mkdir">
|
||||||
</div>
|
</form>
|
||||||
|
</div>
|
||||||
<h1 id="path">
|
|
||||||
<a href="#" id="entree">🌲</a>
|
<div id="op_new_md" class="opview opbox">
|
||||||
{%- for n in vpnodes %}
|
<form method="post" enctype="multipart/form-data" accept-charset="utf-8" action="{{ url_suf }}">
|
||||||
<a href="/{{ n[0] }}">{{ n[1] }}</a>
|
<input type="hidden" name="act" value="new_md" />
|
||||||
{%- endfor %}
|
<input type="text" name="name" size="30">
|
||||||
</h1>
|
<input type="submit" value="create doc">
|
||||||
|
</form>
|
||||||
<div id="tree">
|
</div>
|
||||||
<a href="#" id="detree">🍞...</a>
|
|
||||||
<a href="#" class="btn" step="2" id="twobytwo">+</a>
|
<div id="op_msg" class="opview opbox act">
|
||||||
<a href="#" class="btn" step="-2" id="twig">–</a>
|
<form method="post" enctype="application/x-www-form-urlencoded" accept-charset="utf-8" action="{{ url_suf }}">
|
||||||
<a href="#" class="tgl btn" id="dyntree">a</a>
|
<input type="text" name="msg" size="30">
|
||||||
<ul id="treeul"></ul>
|
<input type="submit" value="send msg">
|
||||||
<div id="thx_ff"> </div>
|
</form>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
|
<div id="op_up2k" class="opview"></div>
|
||||||
|
|
||||||
|
<div id="op_cfg" class="opview opbox opwide"></div>
|
||||||
|
|
||||||
|
<h1 id="path">
|
||||||
|
<a href="#" id="entree" tt="show directory tree$NHotkey: B">🌲</a>
|
||||||
|
{%- for n in vpnodes %}
|
||||||
|
<a href="/{{ n[0] }}">{{ n[1] }}</a>
|
||||||
|
{%- endfor %}
|
||||||
|
</h1>
|
||||||
|
|
||||||
|
<div id="tree"></div>
|
||||||
|
|
||||||
<div id="wrap">
|
<div id="wrap">
|
||||||
|
|
||||||
<div id="pro" class="logue">{{ logues[0] }}</div>
|
<div id="pro" class="logue">{{ logues[0] }}</div>
|
||||||
|
|
||||||
<table id="files">
|
<table id="files">
|
||||||
<thead>
|
<thead>
|
||||||
<tr>
|
<tr>
|
||||||
<th name="lead"><span>c</span></th>
|
<th name="lead"><span>c</span></th>
|
||||||
<th name="href"><span>File Name</span></th>
|
<th name="href"><span>File Name</span></th>
|
||||||
<th name="sz" sort="int"><span>Size</span></th>
|
<th name="sz" sort="int"><span>Size</span></th>
|
||||||
{%- for k in taglist %}
|
{%- for k in taglist %}
|
||||||
{%- if k.startswith('.') %}
|
{%- if k.startswith('.') %}
|
||||||
<th name="tags/{{ k }}" sort="int"><span>{{ k[1:] }}</span></th>
|
<th name="tags/{{ k }}" sort="int"><span>{{ k[1:] }}</span></th>
|
||||||
{%- else %}
|
{%- else %}
|
||||||
<th name="tags/{{ k }}"><span>{{ k[0]|upper }}{{ k[1:] }}</span></th>
|
<th name="tags/{{ k }}"><span>{{ k[0]|upper }}{{ k[1:] }}</span></th>
|
||||||
{%- endif %}
|
{%- endif %}
|
||||||
{%- endfor %}
|
{%- endfor %}
|
||||||
<th name="ext"><span>T</span></th>
|
<th name="ext"><span>T</span></th>
|
||||||
<th name="ts"><span>Date</span></th>
|
<th name="ts"><span>Date</span></th>
|
||||||
</tr>
|
</tr>
|
||||||
</thead>
|
</thead>
|
||||||
<tbody>
|
<tbody>
|
||||||
|
|
||||||
{%- for f in files %}
|
{%- for f in files %}
|
||||||
<tr><td>{{ f.lead }}</td><td><a href="{{ f.href }}">{{ f.name|e }}</a></td><td>{{ f.sz }}</td>
|
<tr><td>{{ f.lead }}</td><td><a href="{{ f.href }}">{{ f.name|e }}</a></td><td>{{ f.sz }}</td>
|
||||||
{%- if f.tags is defined %}
|
{%- if f.tags is defined %}
|
||||||
{%- for k in taglist %}
|
{%- for k in taglist %}
|
||||||
<td>{{ f.tags[k] }}</td>
|
<td>{{ f.tags[k] }}</td>
|
||||||
{%- endfor %}
|
{%- endfor %}
|
||||||
{%- endif %}
|
{%- endif %}
|
||||||
<td>{{ f.ext }}</td><td>{{ f.dt }}</td></tr>
|
<td>{{ f.ext }}</td><td>{{ f.dt }}</td></tr>
|
||||||
{%- endfor %}
|
{%- endfor %}
|
||||||
|
|
||||||
</tbody>
|
</tbody>
|
||||||
</table>
|
</table>
|
||||||
|
|
||||||
<div id="epi" class="logue">{{ logues[1] }}</div>
|
<div id="epi" class="logue">{{ logues[1] }}</div>
|
||||||
|
|
||||||
<h2><a href="?h">control-panel</a></h2>
|
<h2><a href="/?h">control-panel</a></h2>
|
||||||
|
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
{%- if srv_info %}
|
{%- if srv_info %}
|
||||||
<div id="srv_info"><span>{{ srv_info }}</span></div>
|
<div id="srv_info"><span>{{ srv_info }}</span></div>
|
||||||
{%- endif %}
|
{%- endif %}
|
||||||
|
|
||||||
<div id="widget"></div>
|
<div id="widget"></div>
|
||||||
|
|
||||||
<script>
|
<script>
|
||||||
var tag_order_cfg = {{ tag_order }};
|
var perms = {{ perms }},
|
||||||
</script>
|
tag_order_cfg = {{ tag_order }},
|
||||||
<script src="/.cpr/util.js{{ ts }}"></script>
|
have_up2k_idx = {{ have_up2k_idx|tojson }},
|
||||||
<script src="/.cpr/browser.js{{ ts }}"></script>
|
have_tags_idx = {{ have_tags_idx|tojson }},
|
||||||
<script src="/.cpr/up2k.js{{ ts }}"></script>
|
have_zip = {{ have_zip|tojson }};
|
||||||
<script>
|
</script>
|
||||||
apply_perms({{ perms }});
|
<script src="/.cpr/util.js{{ ts }}"></script>
|
||||||
</script>
|
<script src="/.cpr/browser.js{{ ts }}"></script>
|
||||||
|
<script src="/.cpr/up2k.js{{ ts }}"></script>
|
||||||
</body>
|
</body>
|
||||||
|
|
||||||
</html>
|
</html>
|
||||||
|
File diff suppressed because it is too large
Load Diff
@@ -2,59 +2,59 @@
|
|||||||
<html lang="en">
|
<html lang="en">
|
||||||
|
|
||||||
<head>
|
<head>
|
||||||
<meta charset="utf-8">
|
<meta charset="utf-8">
|
||||||
<title>{{ title }}</title>
|
<title>{{ title }}</title>
|
||||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||||
<meta name="viewport" content="width=device-width, initial-scale=0.8">
|
<meta name="viewport" content="width=device-width, initial-scale=0.8">
|
||||||
<style>
|
<style>
|
||||||
html{font-family:sans-serif}
|
html{font-family:sans-serif}
|
||||||
td{border:1px solid #999;border-width:1px 1px 0 0;padding:0 5px}
|
td{border:1px solid #999;border-width:1px 1px 0 0;padding:0 5px}
|
||||||
a{display:block}
|
a{display:block}
|
||||||
</style>
|
</style>
|
||||||
</head>
|
</head>
|
||||||
|
|
||||||
<body>
|
<body>
|
||||||
{%- if srv_info %}
|
{%- if srv_info %}
|
||||||
<p><span>{{ srv_info }}</span></p>
|
<p><span>{{ srv_info }}</span></p>
|
||||||
{%- endif %}
|
{%- endif %}
|
||||||
|
|
||||||
{%- if have_b_u %}
|
{%- if have_b_u %}
|
||||||
<form method="post" enctype="multipart/form-data" accept-charset="utf-8" action="{{ url_suf }}">
|
<form method="post" enctype="multipart/form-data" accept-charset="utf-8" action="{{ url_suf }}">
|
||||||
<input type="hidden" name="act" value="bput" />
|
<input type="hidden" name="act" value="bput" />
|
||||||
<input type="file" name="f" multiple /><br />
|
<input type="file" name="f" multiple /><br />
|
||||||
<input type="submit" value="start upload" />
|
<input type="submit" value="start upload" />
|
||||||
</form>
|
</form>
|
||||||
<br />
|
<br />
|
||||||
{%- endif %}
|
{%- endif %}
|
||||||
|
|
||||||
{%- if logues[0] %}
|
{%- if logues[0] %}
|
||||||
<div>{{ logues[0] }}</div><br />
|
<div>{{ logues[0] }}</div><br />
|
||||||
{%- endif %}
|
{%- endif %}
|
||||||
|
|
||||||
<table id="files">
|
<table id="files">
|
||||||
<thead>
|
<thead>
|
||||||
<tr>
|
<tr>
|
||||||
<th name="lead"><span>c</span></th>
|
<th name="lead"><span>c</span></th>
|
||||||
<th name="href"><span>File Name</span></th>
|
<th name="href"><span>File Name</span></th>
|
||||||
<th name="sz" sort="int"><span>Size</span></th>
|
<th name="sz" sort="int"><span>Size</span></th>
|
||||||
<th name="ts"><span>Date</span></th>
|
<th name="ts"><span>Date</span></th>
|
||||||
</tr>
|
</tr>
|
||||||
</thead>
|
</thead>
|
||||||
<tbody>
|
<tbody>
|
||||||
<tr><td></td><td><a href="../{{ url_suf }}">parent folder</a></td><td>-</td><td>-</td></tr>
|
<tr><td></td><td><a href="../{{ url_suf }}">parent folder</a></td><td>-</td><td>-</td></tr>
|
||||||
|
|
||||||
{%- for f in files %}
|
{%- for f in files %}
|
||||||
<tr><td>{{ f.lead }}</td><td><a href="{{ f.href }}{{ url_suf }}">{{ f.name|e }}</a></td><td>{{ f.sz }}</td><td>{{ f.dt }}</td></tr>
|
<tr><td>{{ f.lead }}</td><td><a href="{{ f.href }}{{ url_suf }}">{{ f.name|e }}</a></td><td>{{ f.sz }}</td><td>{{ f.dt }}</td></tr>
|
||||||
{%- endfor %}
|
{%- endfor %}
|
||||||
|
|
||||||
</tbody>
|
</tbody>
|
||||||
</table>
|
</table>
|
||||||
|
|
||||||
{%- if logues[1] %}
|
{%- if logues[1] %}
|
||||||
<div>{{ logues[1] }}</div><br />
|
<div>{{ logues[1] }}</div><br />
|
||||||
{%- endif %}
|
{%- endif %}
|
||||||
|
|
||||||
<h2><a href="{{ url_suf }}{{ url_suf and '&' or '?' }}h">control-panel</a></h2>
|
<h2><a href="/{{ url_suf }}{{ url_suf and '&' or '?' }}h">control-panel</a></h2>
|
||||||
|
|
||||||
</body>
|
</body>
|
||||||
</html>
|
</html>
|
||||||
|
61
copyparty/web/dbg-audio.js
Normal file
61
copyparty/web/dbg-audio.js
Normal file
@@ -0,0 +1,61 @@
|
|||||||
|
var ofun = audio_eq.apply.bind(audio_eq);
|
||||||
|
audio_eq.apply = function () {
|
||||||
|
var ac1 = mp.ac;
|
||||||
|
ofun();
|
||||||
|
var ac = mp.ac,
|
||||||
|
w = 2048,
|
||||||
|
h = 256;
|
||||||
|
|
||||||
|
if (!audio_eq.filters.length) {
|
||||||
|
audio_eq.ana = null;
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
var can = ebi('fft_can');
|
||||||
|
if (!can) {
|
||||||
|
can = mknod('canvas');
|
||||||
|
can.setAttribute('id', 'fft_can');
|
||||||
|
can.style.cssText = 'position:absolute;left:0;bottom:5em;width:' + w + 'px;height:' + h + 'px;z-index:9001';
|
||||||
|
document.body.appendChild(can);
|
||||||
|
can.width = w;
|
||||||
|
can.height = h;
|
||||||
|
}
|
||||||
|
var cc = can.getContext('2d');
|
||||||
|
if (!ac)
|
||||||
|
return;
|
||||||
|
|
||||||
|
var ana = ac.createAnalyser();
|
||||||
|
ana.smoothingTimeConstant = 0;
|
||||||
|
ana.fftSize = 8192;
|
||||||
|
|
||||||
|
audio_eq.filters[0].connect(ana);
|
||||||
|
audio_eq.ana = ana;
|
||||||
|
|
||||||
|
var buf = new Uint8Array(ana.frequencyBinCount),
|
||||||
|
colw = can.width / buf.length;
|
||||||
|
|
||||||
|
cc.fillStyle = '#fc0';
|
||||||
|
function draw() {
|
||||||
|
if (ana == audio_eq.ana)
|
||||||
|
requestAnimationFrame(draw);
|
||||||
|
|
||||||
|
ana.getByteFrequencyData(buf);
|
||||||
|
|
||||||
|
cc.clearRect(0, 0, can.width, can.height);
|
||||||
|
|
||||||
|
/*var x = 0, w = 1;
|
||||||
|
for (var a = 0; a < buf.length; a++) {
|
||||||
|
cc.fillRect(x, h - buf[a], w, h);
|
||||||
|
x += w;
|
||||||
|
}*/
|
||||||
|
var mul = Math.pow(w, 4) / buf.length;
|
||||||
|
for (var x = 0; x < w; x++) {
|
||||||
|
var a = Math.floor(Math.pow(x, 4) / mul),
|
||||||
|
v = buf[a];
|
||||||
|
|
||||||
|
cc.fillRect(x, h - v, 1, v);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
draw();
|
||||||
|
};
|
||||||
|
audio_eq.apply();
|
@@ -26,10 +26,23 @@ a {
|
|||||||
border-radius: .2em;
|
border-radius: .2em;
|
||||||
padding: .2em .8em;
|
padding: .2em .8em;
|
||||||
}
|
}
|
||||||
td, th {
|
table {
|
||||||
|
border-collapse: collapse;
|
||||||
|
}
|
||||||
|
.vols td,
|
||||||
|
.vols th {
|
||||||
padding: .3em .6em;
|
padding: .3em .6em;
|
||||||
text-align: left;
|
text-align: left;
|
||||||
}
|
}
|
||||||
|
.num {
|
||||||
|
border-right: 1px solid #bbb;
|
||||||
|
}
|
||||||
|
.num td {
|
||||||
|
padding: .1em .7em .1em 0;
|
||||||
|
}
|
||||||
|
.num td:first-child {
|
||||||
|
text-align: right;
|
||||||
|
}
|
||||||
.btns {
|
.btns {
|
||||||
margin: 1em 0;
|
margin: 1em 0;
|
||||||
}
|
}
|
||||||
@@ -57,4 +70,7 @@ html.dark input {
|
|||||||
border-radius: .5em;
|
border-radius: .5em;
|
||||||
padding: .5em .7em;
|
padding: .5em .7em;
|
||||||
margin: 0 .5em 0 0;
|
margin: 0 .5em 0 0;
|
||||||
|
}
|
||||||
|
html.dark .num {
|
||||||
|
border-color: #777;
|
||||||
}
|
}
|
@@ -15,16 +15,25 @@
|
|||||||
|
|
||||||
{%- if avol %}
|
{%- if avol %}
|
||||||
<h1>admin panel:</h1>
|
<h1>admin panel:</h1>
|
||||||
<table>
|
<table><tr><td> <!-- hehehe -->
|
||||||
<thead><tr><th>vol</th><th>action</th><th>status</th></tr></thead>
|
<table class="num">
|
||||||
<tbody>
|
<tr><td>scanning</td><td>{{ scanning }}</td></tr>
|
||||||
{% for mp in avol %}
|
<tr><td>hash-q</td><td>{{ hashq }}</td></tr>
|
||||||
{%- if mp in vstate and vstate[mp] %}
|
<tr><td>tag-q</td><td>{{ tagq }}</td></tr>
|
||||||
<tr><td><a href="{{ mp }}{{ url_suf }}">{{ mp }}</a></td><td><a href="{{ mp }}?scan">rescan</a></td><td>{{ vstate[mp] }}</td></tr>
|
<tr><td>mtp-q</td><td>{{ mtpq }}</td></tr>
|
||||||
{%- endif %}
|
</table>
|
||||||
{% endfor %}
|
</td><td>
|
||||||
</tbody>
|
<table class="vols">
|
||||||
</table>
|
<thead><tr><th>vol</th><th>action</th><th>status</th></tr></thead>
|
||||||
|
<tbody>
|
||||||
|
{% for mp in avol %}
|
||||||
|
{%- if mp in vstate and vstate[mp] %}
|
||||||
|
<tr><td><a href="{{ mp }}{{ url_suf }}">{{ mp }}</a></td><td><a href="{{ mp }}?scan">rescan</a></td><td>{{ vstate[mp] }}</td></tr>
|
||||||
|
{%- endif %}
|
||||||
|
{% endfor %}
|
||||||
|
</tbody>
|
||||||
|
</table>
|
||||||
|
</td></tr></table>
|
||||||
<div class="btns">
|
<div class="btns">
|
||||||
<a href="{{ avol[0] }}?stack">dump stack</a>
|
<a href="{{ avol[0] }}?stack">dump stack</a>
|
||||||
</div>
|
</div>
|
||||||
@@ -50,7 +59,7 @@
|
|||||||
|
|
||||||
<h1>login for more:</h1>
|
<h1>login for more:</h1>
|
||||||
<ul>
|
<ul>
|
||||||
<form method="post" enctype="multipart/form-data" action="/{{ url_suf }}">
|
<form method="post" enctype="multipart/form-data" action="/">
|
||||||
<input type="hidden" name="act" value="login" />
|
<input type="hidden" name="act" value="login" />
|
||||||
<input type="password" name="cppwd" />
|
<input type="password" name="cppwd" />
|
||||||
<input type="submit" value="Login" />
|
<input type="submit" value="Login" />
|
||||||
|
@@ -444,8 +444,7 @@ function up2k_init(subtle) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// show uploader if the user only has write-access
|
// show uploader if the user only has write-access
|
||||||
var perms = document.body.getAttribute('perms');
|
if (perms.length && !has(perms, 'read'))
|
||||||
if (perms && !has(perms.split(' '), 'read'))
|
|
||||||
goto('up2k');
|
goto('up2k');
|
||||||
|
|
||||||
// shows or clears a message in the basic uploader ui
|
// shows or clears a message in the basic uploader ui
|
||||||
@@ -741,9 +740,17 @@ function up2k_init(subtle) {
|
|||||||
|
|
||||||
function handshakes_permitted() {
|
function handshakes_permitted() {
|
||||||
var lim = multitask ? 1 : 0;
|
var lim = multitask ? 1 : 0;
|
||||||
return lim >=
|
|
||||||
|
if (lim <
|
||||||
st.todo.upload.length +
|
st.todo.upload.length +
|
||||||
st.busy.upload.length;
|
st.busy.upload.length)
|
||||||
|
return false;
|
||||||
|
|
||||||
|
var cd = st.todo.handshake.length ? st.todo.handshake[0].cooldown : 0;
|
||||||
|
if (cd && cd - Date.now() > 0)
|
||||||
|
return false;
|
||||||
|
|
||||||
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
function hashing_permitted() {
|
function hashing_permitted() {
|
||||||
@@ -804,6 +811,14 @@ function up2k_init(subtle) {
|
|||||||
|
|
||||||
var mou_ikkai = false;
|
var mou_ikkai = false;
|
||||||
|
|
||||||
|
if (st.busy.handshake.length > 0 &&
|
||||||
|
st.busy.handshake[0].busied < Date.now() - 30 * 1000
|
||||||
|
) {
|
||||||
|
console.log("retrying stuck handshake");
|
||||||
|
var t = st.busy.handshake.shift();
|
||||||
|
st.todo.handshake.unshift(t);
|
||||||
|
}
|
||||||
|
|
||||||
if (st.todo.handshake.length > 0 &&
|
if (st.todo.handshake.length > 0 &&
|
||||||
st.busy.handshake.length == 0 && (
|
st.busy.handshake.length == 0 && (
|
||||||
st.todo.handshake[0].t4 || (
|
st.todo.handshake[0].t4 || (
|
||||||
@@ -963,8 +978,8 @@ function up2k_init(subtle) {
|
|||||||
while (segm_next());
|
while (segm_next());
|
||||||
|
|
||||||
var hash_done = function (hashbuf) {
|
var hash_done = function (hashbuf) {
|
||||||
var hslice = new Uint8Array(hashbuf).subarray(0, 32),
|
var hslice = new Uint8Array(hashbuf).subarray(0, 33),
|
||||||
b64str = buf2b64(hslice).replace(/=$/, '');
|
b64str = buf2b64(hslice);
|
||||||
|
|
||||||
hashtab[nch] = b64str;
|
hashtab[nch] = b64str;
|
||||||
t.hash.push(nch);
|
t.hash.push(nch);
|
||||||
@@ -989,6 +1004,7 @@ function up2k_init(subtle) {
|
|||||||
pvis.seth(t.n, 1, '📦 wait');
|
pvis.seth(t.n, 1, '📦 wait');
|
||||||
st.busy.hash.splice(st.busy.hash.indexOf(t), 1);
|
st.busy.hash.splice(st.busy.hash.indexOf(t), 1);
|
||||||
st.todo.handshake.push(t);
|
st.todo.handshake.push(t);
|
||||||
|
tasker();
|
||||||
};
|
};
|
||||||
|
|
||||||
if (subtle)
|
if (subtle)
|
||||||
@@ -1019,11 +1035,28 @@ function up2k_init(subtle) {
|
|||||||
//
|
//
|
||||||
|
|
||||||
function exec_handshake() {
|
function exec_handshake() {
|
||||||
var t = st.todo.handshake.shift();
|
var t = st.todo.handshake.shift(),
|
||||||
|
me = Date.now();
|
||||||
|
|
||||||
st.busy.handshake.push(t);
|
st.busy.handshake.push(t);
|
||||||
|
t.busied = me;
|
||||||
|
|
||||||
var xhr = new XMLHttpRequest();
|
var xhr = new XMLHttpRequest();
|
||||||
|
xhr.onerror = function () {
|
||||||
|
if (t.busied != me) {
|
||||||
|
console.log('zombie handshake onerror,', t);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
console.log('handshake onerror, retrying');
|
||||||
|
st.busy.handshake.splice(st.busy.handshake.indexOf(t), 1);
|
||||||
|
st.todo.handshake.unshift(t);
|
||||||
|
tasker();
|
||||||
|
};
|
||||||
xhr.onload = function (e) {
|
xhr.onload = function (e) {
|
||||||
|
if (t.busied != me) {
|
||||||
|
console.log('zombie handshake onload,', t);
|
||||||
|
return;
|
||||||
|
}
|
||||||
if (xhr.status == 200) {
|
if (xhr.status == 200) {
|
||||||
var response = JSON.parse(xhr.responseText);
|
var response = JSON.parse(xhr.responseText);
|
||||||
|
|
||||||
@@ -1130,6 +1163,15 @@ function up2k_init(subtle) {
|
|||||||
if (rsp.indexOf('<pre>') === 0)
|
if (rsp.indexOf('<pre>') === 0)
|
||||||
rsp = rsp.slice(5);
|
rsp = rsp.slice(5);
|
||||||
|
|
||||||
|
if (rsp.indexOf('rate-limit ') !== -1) {
|
||||||
|
var penalty = rsp.replace(/.*rate-limit /, "").split(' ')[0];
|
||||||
|
console.log("rate-limit: " + penalty);
|
||||||
|
t.cooldown = Date.now() + parseFloat(penalty) * 1000;
|
||||||
|
st.busy.handshake.splice(st.busy.handshake.indexOf(t), 1);
|
||||||
|
st.todo.handshake.unshift(t);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
st.bytes.uploaded += t.size;
|
st.bytes.uploaded += t.size;
|
||||||
if (rsp.indexOf('partial upload exists') !== -1 ||
|
if (rsp.indexOf('partial upload exists') !== -1 ||
|
||||||
rsp.indexOf('file already exists') !== -1) {
|
rsp.indexOf('file already exists') !== -1) {
|
||||||
@@ -1241,7 +1283,7 @@ function up2k_init(subtle) {
|
|||||||
fpx = parseInt(getComputedStyle(bar)['font-size']),
|
fpx = parseInt(getComputedStyle(bar)['font-size']),
|
||||||
wem = wpx * 1.0 / fpx,
|
wem = wpx * 1.0 / fpx,
|
||||||
wide = wem > 54,
|
wide = wem > 54,
|
||||||
parent = ebi(wide ? 'u2btn_cw' : 'u2btn_ct'),
|
parent = ebi(wide && has(perms, 'write') ? 'u2btn_cw' : 'u2btn_ct'),
|
||||||
btn = ebi('u2btn');
|
btn = ebi('u2btn');
|
||||||
|
|
||||||
//console.log([wpx, fpx, wem]);
|
//console.log([wpx, fpx, wem]);
|
||||||
@@ -1254,31 +1296,18 @@ function up2k_init(subtle) {
|
|||||||
window.addEventListener('resize', onresize);
|
window.addEventListener('resize', onresize);
|
||||||
onresize();
|
onresize();
|
||||||
|
|
||||||
function desc_show(e) {
|
if (is_touch) {
|
||||||
var cfg = sread('tooltips');
|
// android-chrome wobbles for a bit; firefox / iOS-safari are OK
|
||||||
if (cfg !== null && cfg != '1')
|
setTimeout(onresize, 20);
|
||||||
return;
|
setTimeout(onresize, 100);
|
||||||
|
setTimeout(onresize, 500);
|
||||||
var msg = this.getAttribute('alt'),
|
|
||||||
cdesc = ebi('u2cdesc');
|
|
||||||
|
|
||||||
cdesc.innerHTML = msg.replace(/\$N/g, "<br />");
|
|
||||||
cdesc.setAttribute('class', 'show');
|
|
||||||
}
|
}
|
||||||
function desc_hide(e) {
|
|
||||||
ebi('u2cdesc').setAttribute('class', '');
|
var o = QSA('#u2conf *[tt]');
|
||||||
}
|
|
||||||
var o = QSA('#u2conf *[alt]');
|
|
||||||
for (var a = o.length - 1; a >= 0; a--) {
|
for (var a = o.length - 1; a >= 0; a--) {
|
||||||
o[a].parentNode.getElementsByTagName('input')[0].setAttribute('alt', o[a].getAttribute('alt'));
|
o[a].parentNode.getElementsByTagName('input')[0].setAttribute('tt', o[a].getAttribute('tt'));
|
||||||
}
|
|
||||||
var o = QSA('#u2conf *[alt]');
|
|
||||||
for (var a = 0; a < o.length; a++) {
|
|
||||||
o[a].onfocus = desc_show;
|
|
||||||
o[a].onblur = desc_hide;
|
|
||||||
o[a].onmouseenter = desc_show;
|
|
||||||
o[a].onmouseleave = desc_hide;
|
|
||||||
}
|
}
|
||||||
|
tt.init();
|
||||||
|
|
||||||
function bumpthread(dir) {
|
function bumpthread(dir) {
|
||||||
try {
|
try {
|
||||||
@@ -1326,14 +1355,12 @@ function up2k_init(subtle) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
function set_fsearch(new_state) {
|
function set_fsearch(new_state) {
|
||||||
var perms = document.body.getAttribute('perms'),
|
var fixed = false;
|
||||||
fixed = false;
|
|
||||||
|
|
||||||
if (!ebi('fsearch')) {
|
if (!ebi('fsearch')) {
|
||||||
new_state = false;
|
new_state = false;
|
||||||
}
|
}
|
||||||
else if (perms) {
|
else if (perms.length) {
|
||||||
perms = perms.split(' ');
|
|
||||||
if (!has(perms, 'write')) {
|
if (!has(perms, 'write')) {
|
||||||
new_state = true;
|
new_state = true;
|
||||||
fixed = true;
|
fixed = true;
|
||||||
@@ -1363,6 +1390,8 @@ function up2k_init(subtle) {
|
|||||||
ebi('u2bm').innerHTML = ico + ' <sup>' + desc + '</sup>';
|
ebi('u2bm').innerHTML = ico + ' <sup>' + desc + '</sup>';
|
||||||
}
|
}
|
||||||
catch (ex) { }
|
catch (ex) { }
|
||||||
|
|
||||||
|
onresize();
|
||||||
}
|
}
|
||||||
|
|
||||||
function tgl_flag_en() {
|
function tgl_flag_en() {
|
||||||
@@ -1426,5 +1455,9 @@ function warn_uploader_busy(e) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
tt.init();
|
||||||
|
|
||||||
if (QS('#op_up2k.act'))
|
if (QS('#op_up2k.act'))
|
||||||
goto_up2k();
|
goto_up2k();
|
||||||
|
|
||||||
|
apply_perms(perms);
|
||||||
|
@@ -211,29 +211,6 @@
|
|||||||
box-shadow: none;
|
box-shadow: none;
|
||||||
opacity: .2;
|
opacity: .2;
|
||||||
}
|
}
|
||||||
#u2cdesc {
|
|
||||||
position: absolute;
|
|
||||||
width: 34em;
|
|
||||||
left: calc(50% - 15em);
|
|
||||||
background: #222;
|
|
||||||
border: 0 solid #555;
|
|
||||||
text-align: center;
|
|
||||||
overflow: hidden;
|
|
||||||
margin: 0 -2em;
|
|
||||||
padding: 0 1em;
|
|
||||||
height: 0;
|
|
||||||
opacity: .1;
|
|
||||||
transition: all 0.14s ease-in-out;
|
|
||||||
box-shadow: 0 .2em .5em #222;
|
|
||||||
border-radius: .4em;
|
|
||||||
z-index: 1;
|
|
||||||
}
|
|
||||||
#u2cdesc.show {
|
|
||||||
padding: 1em;
|
|
||||||
height: auto;
|
|
||||||
border-width: .2em 0;
|
|
||||||
opacity: 1;
|
|
||||||
}
|
|
||||||
#u2foot {
|
#u2foot {
|
||||||
color: #fff;
|
color: #fff;
|
||||||
font-style: italic;
|
font-style: italic;
|
||||||
@@ -286,10 +263,6 @@ html.light #u2conf .txtbox.err {
|
|||||||
background: #f96;
|
background: #f96;
|
||||||
color: #300;
|
color: #300;
|
||||||
}
|
}
|
||||||
html.light #u2cdesc {
|
|
||||||
background: #fff;
|
|
||||||
border: none;
|
|
||||||
}
|
|
||||||
html.light #op_up2k.srch #u2btn {
|
html.light #op_up2k.srch #u2btn {
|
||||||
border-color: #a80;
|
border-color: #a80;
|
||||||
}
|
}
|
||||||
|
@@ -1,103 +0,0 @@
|
|||||||
|
|
||||||
<div id="op_bup" class="opview opbox act">
|
|
||||||
<div id="u2err"></div>
|
|
||||||
<form method="post" enctype="multipart/form-data" accept-charset="utf-8" action="{{ url_suf }}">
|
|
||||||
<input type="hidden" name="act" value="bput" />
|
|
||||||
<input type="file" name="f" multiple><br />
|
|
||||||
<input type="submit" value="start upload">
|
|
||||||
</form>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div id="op_mkdir" class="opview opbox act">
|
|
||||||
<form method="post" enctype="multipart/form-data" accept-charset="utf-8" action="{{ url_suf }}">
|
|
||||||
<input type="hidden" name="act" value="mkdir" />
|
|
||||||
<input type="text" name="name" size="30">
|
|
||||||
<input type="submit" value="mkdir">
|
|
||||||
</form>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div id="op_new_md" class="opview opbox">
|
|
||||||
<form method="post" enctype="multipart/form-data" accept-charset="utf-8" action="{{ url_suf }}">
|
|
||||||
<input type="hidden" name="act" value="new_md" />
|
|
||||||
<input type="text" name="name" size="30">
|
|
||||||
<input type="submit" value="create doc">
|
|
||||||
</form>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div id="op_msg" class="opview opbox act">
|
|
||||||
<form method="post" enctype="application/x-www-form-urlencoded" accept-charset="utf-8" action="{{ url_suf }}">
|
|
||||||
<input type="text" name="msg" size="30">
|
|
||||||
<input type="submit" value="send msg">
|
|
||||||
</form>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div id="op_up2k" class="opview">
|
|
||||||
<form id="u2form" method="post" enctype="multipart/form-data" onsubmit="return false;"></form>
|
|
||||||
|
|
||||||
<table id="u2conf">
|
|
||||||
<tr>
|
|
||||||
<td><br />parallel uploads:</td>
|
|
||||||
<td rowspan="2">
|
|
||||||
<input type="checkbox" id="multitask" />
|
|
||||||
<label for="multitask" alt="continue hashing other files while uploading">🏃</label>
|
|
||||||
</td>
|
|
||||||
<td rowspan="2">
|
|
||||||
<input type="checkbox" id="ask_up" />
|
|
||||||
<label for="ask_up" alt="ask for confirmation befofre upload starts">💭</label>
|
|
||||||
</td>
|
|
||||||
<td rowspan="2">
|
|
||||||
<input type="checkbox" id="flag_en" />
|
|
||||||
<label for="flag_en" alt="ensure only one tab is uploading at a time $N (other tabs must have this enabled too)">💤</label>
|
|
||||||
</td>
|
|
||||||
{%- if have_up2k_idx %}
|
|
||||||
<td data-perm="read" rowspan="2">
|
|
||||||
<input type="checkbox" id="fsearch" />
|
|
||||||
<label for="fsearch" alt="don't actually upload, instead check if the files already $N exist on the server (will scan all folders you can read)">🔎</label>
|
|
||||||
</td>
|
|
||||||
{%- endif %}
|
|
||||||
<td data-perm="read" rowspan="2" id="u2btn_cw"></td>
|
|
||||||
</tr>
|
|
||||||
<tr>
|
|
||||||
<td>
|
|
||||||
<a href="#" id="nthread_sub">–</a><input
|
|
||||||
class="txtbox" id="nthread" value="2"/><a
|
|
||||||
href="#" id="nthread_add">+</a><br />
|
|
||||||
</td>
|
|
||||||
</tr>
|
|
||||||
</table>
|
|
||||||
|
|
||||||
<div id="u2cdesc"></div>
|
|
||||||
|
|
||||||
<div id="u2notbtn"></div>
|
|
||||||
|
|
||||||
<div id="u2btn_ct">
|
|
||||||
<div id="u2btn">
|
|
||||||
<span id="u2bm"></span><br />
|
|
||||||
drag/drop files<br />
|
|
||||||
and folders here<br />
|
|
||||||
(or click me)
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div id="u2cards">
|
|
||||||
<a href="#" act="ok">ok <span>0</span></a><a
|
|
||||||
href="#" act="ng">ng <span>0</span></a><a
|
|
||||||
href="#" act="done">done <span>0</span></a><a
|
|
||||||
href="#" act="bz" class="act">busy <span>0</span></a><a
|
|
||||||
href="#" act="q">que <span>0</span></a>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<table id="u2tab">
|
|
||||||
<thead>
|
|
||||||
<tr>
|
|
||||||
<td>filename</td>
|
|
||||||
<td>status</td>
|
|
||||||
<td>progress<a href="#" id="u2cleanup">cleanup</a></td>
|
|
||||||
</tr>
|
|
||||||
</thead>
|
|
||||||
<tbody></tbody>
|
|
||||||
</table>
|
|
||||||
|
|
||||||
<p id="u2foot"></p>
|
|
||||||
<p id="u2footfoot" data-perm="write">( you can use the <a href="#" id="u2nope">basic uploader</a> if you don't need lastmod timestamps, resumable uploads, or progress bars )</p>
|
|
||||||
</div>
|
|
@@ -6,7 +6,7 @@ if (!window['console'])
|
|||||||
};
|
};
|
||||||
|
|
||||||
|
|
||||||
var clickev = window.Touch ? 'touchstart' : 'click',
|
var is_touch = 'ontouchstart' in window,
|
||||||
ANDROID = /(android)/i.test(navigator.userAgent);
|
ANDROID = /(android)/i.test(navigator.userAgent);
|
||||||
|
|
||||||
|
|
||||||
@@ -67,6 +67,9 @@ function ev(e) {
|
|||||||
if (e.stopPropagation)
|
if (e.stopPropagation)
|
||||||
e.stopPropagation();
|
e.stopPropagation();
|
||||||
|
|
||||||
|
if (e.stopImmediatePropagation)
|
||||||
|
e.stopImmediatePropagation();
|
||||||
|
|
||||||
e.returnValue = false;
|
e.returnValue = false;
|
||||||
return e;
|
return e;
|
||||||
}
|
}
|
||||||
@@ -285,63 +288,6 @@ function makeSortable(table, cb) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
(function () {
|
|
||||||
var ops = QSA('#ops>a');
|
|
||||||
for (var a = 0; a < ops.length; a++) {
|
|
||||||
ops[a].onclick = opclick;
|
|
||||||
}
|
|
||||||
})();
|
|
||||||
|
|
||||||
|
|
||||||
function opclick(e) {
|
|
||||||
ev(e);
|
|
||||||
|
|
||||||
var dest = this.getAttribute('data-dest');
|
|
||||||
goto(dest);
|
|
||||||
|
|
||||||
swrite('opmode', dest || null);
|
|
||||||
|
|
||||||
var input = QS('.opview.act input:not([type="hidden"])')
|
|
||||||
if (input)
|
|
||||||
input.focus();
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
function goto(dest) {
|
|
||||||
var obj = QSA('.opview.act');
|
|
||||||
for (var a = obj.length - 1; a >= 0; a--)
|
|
||||||
clmod(obj[a], 'act');
|
|
||||||
|
|
||||||
obj = QSA('#ops>a');
|
|
||||||
for (var a = obj.length - 1; a >= 0; a--)
|
|
||||||
clmod(obj[a], 'act');
|
|
||||||
|
|
||||||
if (dest) {
|
|
||||||
var ui = ebi('op_' + dest);
|
|
||||||
clmod(ui, 'act', true);
|
|
||||||
QS('#ops>a[data-dest=' + dest + ']').className += " act";
|
|
||||||
|
|
||||||
var fn = window['goto_' + dest];
|
|
||||||
if (fn)
|
|
||||||
fn();
|
|
||||||
}
|
|
||||||
|
|
||||||
if (window['treectl'])
|
|
||||||
treectl.onscroll();
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
(function () {
|
|
||||||
goto();
|
|
||||||
var op = sread('opmode');
|
|
||||||
if (op !== null && op !== '.')
|
|
||||||
try {
|
|
||||||
goto(op);
|
|
||||||
}
|
|
||||||
catch (ex) { }
|
|
||||||
})();
|
|
||||||
|
|
||||||
|
|
||||||
function linksplit(rp) {
|
function linksplit(rp) {
|
||||||
var ret = [];
|
var ret = [];
|
||||||
var apath = '/';
|
var apath = '/';
|
||||||
@@ -416,6 +362,15 @@ function get_vpath() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
function get_pwd() {
|
||||||
|
var pwd = ('; ' + document.cookie).split('; cppwd=');
|
||||||
|
if (pwd.length < 2)
|
||||||
|
return null;
|
||||||
|
|
||||||
|
return pwd[1].split(';')[0];
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
function unix2iso(ts) {
|
function unix2iso(ts) {
|
||||||
return new Date(ts * 1000).toISOString().replace("T", " ").slice(0, -5);
|
return new Date(ts * 1000).toISOString().replace("T", " ").slice(0, -5);
|
||||||
}
|
}
|
||||||
@@ -528,3 +483,67 @@ function hist_replace(url) {
|
|||||||
console.log("h-repl " + url);
|
console.log("h-repl " + url);
|
||||||
history.replaceState(url, url, url);
|
history.replaceState(url, url, url);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
var tt = (function () {
|
||||||
|
var r = {
|
||||||
|
"tt": mknod("div"),
|
||||||
|
"en": true
|
||||||
|
};
|
||||||
|
|
||||||
|
r.tt.setAttribute('id', 'tt');
|
||||||
|
document.body.appendChild(r.tt);
|
||||||
|
|
||||||
|
function show() {
|
||||||
|
var cfg = sread('tooltips');
|
||||||
|
if (cfg !== null && cfg != '1')
|
||||||
|
return;
|
||||||
|
|
||||||
|
var msg = this.getAttribute('tt');
|
||||||
|
if (!msg)
|
||||||
|
return;
|
||||||
|
|
||||||
|
var pos = this.getBoundingClientRect(),
|
||||||
|
left = pos.left < window.innerWidth / 2,
|
||||||
|
top = pos.top < window.innerHeight / 2;
|
||||||
|
|
||||||
|
r.tt.style.top = top ? pos.bottom + 'px' : 'auto';
|
||||||
|
r.tt.style.bottom = top ? 'auto' : (window.innerHeight - pos.top) + 'px';
|
||||||
|
r.tt.style.left = left ? pos.left + 'px' : 'auto';
|
||||||
|
r.tt.style.right = left ? 'auto' : (window.innerWidth - pos.right) + 'px';
|
||||||
|
|
||||||
|
r.tt.innerHTML = msg.replace(/\$N/g, "<br />");
|
||||||
|
clmod(r.tt, 'show', 1);
|
||||||
|
}
|
||||||
|
|
||||||
|
function hide() {
|
||||||
|
clmod(r.tt, 'show');
|
||||||
|
}
|
||||||
|
|
||||||
|
r.init = function () {
|
||||||
|
var ttb = ebi('tooltips');
|
||||||
|
if (ttb) {
|
||||||
|
ttb.onclick = function (e) {
|
||||||
|
ev(e);
|
||||||
|
r.en = !r.en;
|
||||||
|
bcfg_set('tooltips', r.en);
|
||||||
|
r.init();
|
||||||
|
};
|
||||||
|
r.en = bcfg_get('tooltips', true)
|
||||||
|
}
|
||||||
|
|
||||||
|
var _show = r.en ? show : null,
|
||||||
|
_hide = r.en ? hide : null;
|
||||||
|
|
||||||
|
var o = QSA('*[tt]');
|
||||||
|
for (var a = o.length - 1; a >= 0; a--) {
|
||||||
|
o[a].onfocus = _show;
|
||||||
|
o[a].onblur = _hide;
|
||||||
|
o[a].onmouseenter = _show;
|
||||||
|
o[a].onmouseleave = _hide;
|
||||||
|
}
|
||||||
|
hide();
|
||||||
|
};
|
||||||
|
|
||||||
|
return r;
|
||||||
|
})();
|
||||||
|
22
docs/README.md
Normal file
22
docs/README.md
Normal file
@@ -0,0 +1,22 @@
|
|||||||
|
# example `.epilogue.html`
|
||||||
|
save one of these as `.epilogue.html` inside a folder to customize it:
|
||||||
|
|
||||||
|
* [`minimal-up2k.html`](minimal-up2k.html) will [simplify the upload ui](https://user-images.githubusercontent.com/241032/118311195-dd6ca380-b4ef-11eb-86f3-75a3ff2e1332.png)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
# example browser-css
|
||||||
|
point `--css-browser` to one of these by URL:
|
||||||
|
|
||||||
|
* [`browser.css`](browser.css) changes the background
|
||||||
|
* [`browser-icons.css`](browser-icons.css) adds filetype icons
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
# other stuff
|
||||||
|
|
||||||
|
## [`rclone.md`](rclone.md)
|
||||||
|
* notes on using rclone as a fuse client/server
|
||||||
|
|
||||||
|
## [`example.conf`](example.conf)
|
||||||
|
* example config file for `-c` which never really happened
|
95
docs/biquad.html
Normal file
95
docs/biquad.html
Normal file
@@ -0,0 +1,95 @@
|
|||||||
|
<!DOCTYPE html><html><head></head><body><script>
|
||||||
|
|
||||||
|
setTimeout(location.reload.bind(location), 700);
|
||||||
|
document.documentElement.scrollLeft = 0;
|
||||||
|
|
||||||
|
var can = document.createElement('canvas'),
|
||||||
|
cc = can.getContext('2d'),
|
||||||
|
w = 2048,
|
||||||
|
h = 1024;
|
||||||
|
|
||||||
|
w = 2048;
|
||||||
|
|
||||||
|
can.width = w;
|
||||||
|
can.height = h;
|
||||||
|
document.body.appendChild(can);
|
||||||
|
can.style.cssText = 'width:' + w + 'px;height:' + h + 'px';
|
||||||
|
|
||||||
|
cc.fillStyle = '#000';
|
||||||
|
cc.fillRect(0, 0, w, h);
|
||||||
|
|
||||||
|
var cfg = [ // hz, q, g
|
||||||
|
[31.25 * 0.88, 0, 1.4], // shelf
|
||||||
|
[31.25 * 1.04, 0.7, 0.96], // peak
|
||||||
|
[62.5, 0.7, 1],
|
||||||
|
[125, 0.8, 1],
|
||||||
|
[250, 0.9, 1.03],
|
||||||
|
[500, 0.9, 1.1],
|
||||||
|
[1000, 0.9, 1.1],
|
||||||
|
[2000, 0.9, 1.105],
|
||||||
|
[4000, 0.88, 1.05],
|
||||||
|
[8000 * 1.006, 0.73, 1.24],
|
||||||
|
//[16000 * 1.00, 0.5, 1.75], // peak.v1
|
||||||
|
//[16000 * 1.19, 0, 1.8] // shelf.v1
|
||||||
|
[16000 * 0.89, 0.7, 1.26], // peak
|
||||||
|
[16000 * 1.13, 0.82, 1.09], // peak
|
||||||
|
[16000 * 1.205, 0, 1.9] // shelf
|
||||||
|
];
|
||||||
|
|
||||||
|
var freqs = new Float32Array(22000),
|
||||||
|
sum = new Float32Array(freqs.length),
|
||||||
|
ac = new AudioContext(),
|
||||||
|
step = w / freqs.length,
|
||||||
|
colors = [
|
||||||
|
'rgba(255, 0, 0, 0.7)',
|
||||||
|
'rgba(0, 224, 0, 0.7)',
|
||||||
|
'rgba(0, 64, 255, 0.7)'
|
||||||
|
];
|
||||||
|
|
||||||
|
var order = [];
|
||||||
|
|
||||||
|
for (var a = 0; a < cfg.length; a += 2)
|
||||||
|
order.push(a);
|
||||||
|
|
||||||
|
for (var a = 1; a < cfg.length; a += 2)
|
||||||
|
order.push(a);
|
||||||
|
|
||||||
|
for (var ia = 0; ia < order.length; ia++) {
|
||||||
|
var a = order[ia],
|
||||||
|
fi = ac.createBiquadFilter(),
|
||||||
|
mag = new Float32Array(freqs.length),
|
||||||
|
phase = new Float32Array(freqs.length);
|
||||||
|
|
||||||
|
for (var b = 0; b < freqs.length; b++)
|
||||||
|
freqs[b] = b;
|
||||||
|
|
||||||
|
fi.type = a == 0 ? 'lowshelf' : a == cfg.length - 1 ? 'highshelf' : 'peaking';
|
||||||
|
fi.frequency.value = cfg[a][0];
|
||||||
|
fi.Q.value = cfg[a][1];
|
||||||
|
fi.gain.value = 1;
|
||||||
|
|
||||||
|
fi.getFrequencyResponse(freqs, mag, phase);
|
||||||
|
cc.fillStyle = colors[a % colors.length];
|
||||||
|
for (var b = 0; b < sum.length; b++) {
|
||||||
|
mag[b] -= 1;
|
||||||
|
sum[b] += mag[b] * cfg[a][2];
|
||||||
|
var y = h - (mag[b] * h * 3);
|
||||||
|
cc.fillRect(b * step, y, step, h - y);
|
||||||
|
cc.fillRect(b * step - 1, y - 1, 3, 3);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var min = 999999, max = 0;
|
||||||
|
for (var a = 0; a < sum.length; a++) {
|
||||||
|
min = Math.min(min, sum[a]);
|
||||||
|
max = Math.max(max, sum[a]);
|
||||||
|
}
|
||||||
|
cc.fillStyle = 'rgba(255,255,255,1)';
|
||||||
|
for (var a = 0; a < sum.length; a++) {
|
||||||
|
var v = (sum[a] - min) / (max - min);
|
||||||
|
cc.fillRect(a * step, 0, step, v * h / 2);
|
||||||
|
}
|
||||||
|
|
||||||
|
cc.fillRect(0, 460, w, 1);
|
||||||
|
|
||||||
|
</script></body></html>
|
66
docs/browser-icons.css
Normal file
66
docs/browser-icons.css
Normal file
@@ -0,0 +1,66 @@
|
|||||||
|
/* put filetype icons inline with text
|
||||||
|
#ggrid>a>span:before,
|
||||||
|
#ggrid>a>span.dir:before {
|
||||||
|
display: inline;
|
||||||
|
line-height: 0;
|
||||||
|
font-size: 1.7em;
|
||||||
|
margin: -.7em .1em -.5em -.6em;
|
||||||
|
}
|
||||||
|
*/
|
||||||
|
|
||||||
|
|
||||||
|
/* move folder icons top-left */
|
||||||
|
#ggrid>a>span.dir:before {
|
||||||
|
content: initial;
|
||||||
|
}
|
||||||
|
#ggrid>a[href$="/"]:before {
|
||||||
|
content: '📂';
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/* put filetype icons top-left */
|
||||||
|
#ggrid>a:before {
|
||||||
|
display: block;
|
||||||
|
position: absolute;
|
||||||
|
padding: .3em 0;
|
||||||
|
margin: -.4em;
|
||||||
|
text-shadow: 0 0 .1em #000;
|
||||||
|
background: linear-gradient(135deg,rgba(255,255,255,0) 50%,rgba(255,255,255,0.2));
|
||||||
|
border-radius: .3em;
|
||||||
|
font-size: 2em;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/* video */
|
||||||
|
#ggrid>a:is(
|
||||||
|
[href$=".mkv"i],
|
||||||
|
[href$=".mp4"i],
|
||||||
|
[href$=".webm"i],
|
||||||
|
):before {
|
||||||
|
content: '📺';
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/* audio */
|
||||||
|
#ggrid>a:is(
|
||||||
|
[href$=".mp3"i],
|
||||||
|
[href$=".ogg"i],
|
||||||
|
[href$=".opus"i],
|
||||||
|
[href$=".flac"i],
|
||||||
|
[href$=".m4a"i],
|
||||||
|
[href$=".aac"i],
|
||||||
|
):before {
|
||||||
|
content: '🎵';
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/* image */
|
||||||
|
#ggrid>a:is(
|
||||||
|
[href$=".jpg"i],
|
||||||
|
[href$=".jpeg"i],
|
||||||
|
[href$=".png"i],
|
||||||
|
[href$=".gif"i],
|
||||||
|
[href$=".webp"i],
|
||||||
|
):before {
|
||||||
|
content: '🎨';
|
||||||
|
}
|
29
docs/browser.css
Normal file
29
docs/browser.css
Normal file
@@ -0,0 +1,29 @@
|
|||||||
|
html {
|
||||||
|
background: #333 url('/wp/wallhaven-mdjrqy.jpg') center / cover no-repeat fixed;
|
||||||
|
}
|
||||||
|
#files th {
|
||||||
|
background: rgba(32, 32, 32, 0.9) !important;
|
||||||
|
}
|
||||||
|
#ops,
|
||||||
|
#treeul,
|
||||||
|
#files td {
|
||||||
|
background: rgba(32, 32, 32, 0.3) !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
html.light {
|
||||||
|
background: #eee url('/wp/wallhaven-dpxl6l.png') center / cover no-repeat fixed;
|
||||||
|
}
|
||||||
|
html.light #files th {
|
||||||
|
background: rgba(255, 255, 255, 0.9) !important;
|
||||||
|
}
|
||||||
|
html.light #ops,
|
||||||
|
html.light #treeul,
|
||||||
|
html.light #files td {
|
||||||
|
background: rgba(248, 248, 248, 0.8) !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
#files * {
|
||||||
|
background: transparent !important;
|
||||||
|
}
|
@@ -86,6 +86,9 @@ var t=[]; var b=document.location.href.split('#')[0].slice(0, -1); document.quer
|
|||||||
# get the size and video-id of all youtube vids in folder, assuming filename ends with -id.ext, and create a copyparty search query
|
# get the size and video-id of all youtube vids in folder, assuming filename ends with -id.ext, and create a copyparty search query
|
||||||
find -maxdepth 1 -printf '%s %p\n' | sort -n | awk '!/-([0-9a-zA-Z_-]{11})\.(mkv|mp4|webm)$/{next} {sub(/\.[^\.]+$/,"");n=length($0);v=substr($0,n-10);print $1, v}' | tee /dev/stderr | awk 'BEGIN {p="("} {printf("%s name like *-%s.* ",p,$2);p="or"} END {print ")\n"}' | cat >&2
|
find -maxdepth 1 -printf '%s %p\n' | sort -n | awk '!/-([0-9a-zA-Z_-]{11})\.(mkv|mp4|webm)$/{next} {sub(/\.[^\.]+$/,"");n=length($0);v=substr($0,n-10);print $1, v}' | tee /dev/stderr | awk 'BEGIN {p="("} {printf("%s name like *-%s.* ",p,$2);p="or"} END {print ")\n"}' | cat >&2
|
||||||
|
|
||||||
|
# unique stacks in a stackdump
|
||||||
|
f=a; rm -rf stacks; mkdir stacks; grep -E '^#' $f | while IFS= read -r n; do awk -v n="$n" '!$0{o=0} o; $0==n{o=1}' <$f >stacks/f; h=$(sha1sum <stacks/f | cut -c-16); mv stacks/f stacks/$h-"$n"; done ; find stacks/ | sort | uniq -cw24
|
||||||
|
|
||||||
|
|
||||||
##
|
##
|
||||||
## sqlite3 stuff
|
## sqlite3 stuff
|
||||||
@@ -100,6 +103,15 @@ cat warks | while IFS= read -r x; do sqlite3 up2k.db "delete from mt where w = '
|
|||||||
# dump all dbs
|
# dump all dbs
|
||||||
find -iname up2k.db | while IFS= read -r x; do sqlite3 "$x" 'select substr(w,1,12), rd, fn from up' | sed -r 's/\|/ \| /g' | while IFS= read -r y; do printf '%s | %s\n' "$x" "$y"; done; done
|
find -iname up2k.db | while IFS= read -r x; do sqlite3 "$x" 'select substr(w,1,12), rd, fn from up' | sed -r 's/\|/ \| /g' | while IFS= read -r y; do printf '%s | %s\n' "$x" "$y"; done; done
|
||||||
|
|
||||||
|
# unschedule mtp scan for all files somewhere under "enc/"
|
||||||
|
sqlite3 -readonly up2k.db 'select substr(up.w,1,16) from up inner join mt on mt.w = substr(up.w,1,16) where rd like "enc/%" and +mt.k = "t:mtp"' > keys; awk '{printf "delete from mt where w = \"%s\" and +k = \"t:mtp\";\n", $0}' <keys | tee /dev/stderr | sqlite3 up2k.db
|
||||||
|
|
||||||
|
# compare metadata key "key" between two databases
|
||||||
|
sqlite3 -readonly up2k.db.key-full 'select w, v from mt where k = "key" order by w' > k1; sqlite3 -readonly up2k.db 'select w, v from mt where k = "key" order by w' > k2; ok=0; ng=0; while IFS='|' read w k2; do k1="$(grep -E "^$w" k1 | sed -r 's/.*\|//')"; [ "$k1" = "$k2" ] && ok=$((ok+1)) || { ng=$((ng+1)); printf '%3s %3s %s\n' "$k1" "$k2" "$(sqlite3 -readonly up2k.db.key-full "select * from up where substr(w,1,16) = '$w'" | sed -r 's/\|/ | /g')"; }; done < <(cat k2); echo "match $ok diff $ng"
|
||||||
|
|
||||||
|
# actually this is much better
|
||||||
|
sqlite3 -readonly up2k.db.key-full 'select w, v from mt where k = "key" order by w' > k1; sqlite3 -readonly up2k.db 'select mt.w, mt.v, up.rd, up.fn from mt inner join up on mt.w = substr(up.w,1,16) where mt.k = "key" order by up.rd, up.fn' > k2; ok=0; ng=0; while IFS='|' read w k2 path; do k1="$(grep -E "^$w" k1 | sed -r 's/.*\|//')"; [ "$k1" = "$k2" ] && ok=$((ok+1)) || { ng=$((ng+1)); printf '%3s %3s %s\n' "$k1" "$k2" "$path"; }; done < <(cat k2); echo "match $ok diff $ng"
|
||||||
|
|
||||||
|
|
||||||
##
|
##
|
||||||
## media
|
## media
|
||||||
@@ -153,6 +165,9 @@ dbg.asyncStore.pendingBreakpoints = {}
|
|||||||
# fix firefox phantom breakpoints
|
# fix firefox phantom breakpoints
|
||||||
about:config >> devtools.debugger.prefs-schema-version = -1
|
about:config >> devtools.debugger.prefs-schema-version = -1
|
||||||
|
|
||||||
|
# determine server version
|
||||||
|
git pull; git reset --hard origin/HEAD && git log --format=format:"%H %ai %d" --decorate=full > ../revs && cat ../{util,browser}.js >../vr && cat ../revs | while read -r rev extra; do (git reset --hard $rev >/dev/null 2>/dev/null && dsz=$(cat copyparty/web/{util,browser}.js >../vg 2>/dev/null && diff -wNarU0 ../{vg,vr} | wc -c) && printf '%s %6s %s\n' "$rev" $dsz "$extra") </dev/null; done
|
||||||
|
|
||||||
|
|
||||||
##
|
##
|
||||||
## http 206
|
## http 206
|
||||||
@@ -194,3 +209,4 @@ mk() { rm -rf /tmp/foo; sudo -u ed bash -c 'mkdir /tmp/foo; echo hi > /tmp/foo/b
|
|||||||
mk && t0="$(date)" && while true; do date -s "$(date '+ 1 hour')"; systemd-tmpfiles --clean; ls -1 /tmp | grep foo || break; done; echo "$t0"
|
mk && t0="$(date)" && while true; do date -s "$(date '+ 1 hour')"; systemd-tmpfiles --clean; ls -1 /tmp | grep foo || break; done; echo "$t0"
|
||||||
mk && sudo -u ed flock /tmp/foo sleep 40 & sleep 1; ps aux | grep -E 'sleep 40$' && t0="$(date)" && for n in {1..40}; do date -s "$(date '+ 1 day')"; systemd-tmpfiles --clean; ls -1 /tmp | grep foo || break; done; echo "$t0"
|
mk && sudo -u ed flock /tmp/foo sleep 40 & sleep 1; ps aux | grep -E 'sleep 40$' && t0="$(date)" && for n in {1..40}; do date -s "$(date '+ 1 day')"; systemd-tmpfiles --clean; ls -1 /tmp | grep foo || break; done; echo "$t0"
|
||||||
mk && t0="$(date)" && for n in {1..40}; do date -s "$(date '+ 1 day')"; systemd-tmpfiles --clean; ls -1 /tmp | grep foo || break; tar -cf/dev/null /tmp/foo; done; echo "$t0"
|
mk && t0="$(date)" && for n in {1..40}; do date -s "$(date '+ 1 day')"; systemd-tmpfiles --clean; ls -1 /tmp | grep foo || break; tar -cf/dev/null /tmp/foo; done; echo "$t0"
|
||||||
|
|
||||||
|
32
docs/tcp-debug.sh
Normal file
32
docs/tcp-debug.sh
Normal file
@@ -0,0 +1,32 @@
|
|||||||
|
(cd ~/dev/copyparty && strace -Tttyyvfs 256 -o strace.strace python3 -um copyparty -i 127.0.0.1 --http-only --stackmon /dev/shm/cpps,10 ) 2>&1 | tee /dev/stderr > ~/log-copyparty-$(date +%Y-%m%d-%H%M%S).txt
|
||||||
|
|
||||||
|
14/Jun/2021:16:34:02 1623688447.212405 death
|
||||||
|
14/Jun/2021:16:35:02 1623688502.420860 back
|
||||||
|
|
||||||
|
tcpdump -nni lo -w /home/ed/lo.pcap
|
||||||
|
|
||||||
|
# 16:35:25.324662 IP 127.0.0.1.48632 > 127.0.0.1.3920: Flags [F.], seq 849, ack 544, win 359, options [nop,nop,TS val 809396796 ecr 809396796], length 0
|
||||||
|
|
||||||
|
tcpdump -nnr /home/ed/lo.pcap | awk '/ > 127.0.0.1.3920: /{sub(/ > .*/,"");sub(/.*\./,"");print}' | sort -n | uniq | while IFS= read -r port; do echo; tcpdump -nnr /home/ed/lo.pcap 2>/dev/null | grep -E "\.$port( > |: F)" | sed -r 's/ > .*, /, /'; done | grep -E '^16:35:0.*length [^0]' -C50
|
||||||
|
|
||||||
|
16:34:02.441732 IP 127.0.0.1.48638, length 0
|
||||||
|
16:34:02.441738 IP 127.0.0.1.3920, length 0
|
||||||
|
16:34:02.441744 IP 127.0.0.1.48638, length 0
|
||||||
|
16:34:02.441756 IP 127.0.0.1.48638, length 791
|
||||||
|
16:34:02.441759 IP 127.0.0.1.3920, length 0
|
||||||
|
16:35:02.445529 IP 127.0.0.1.48638, length 0
|
||||||
|
16:35:02.489194 IP 127.0.0.1.3920, length 0
|
||||||
|
16:35:02.515595 IP 127.0.0.1.3920, length 216
|
||||||
|
16:35:02.515600 IP 127.0.0.1.48638, length 0
|
||||||
|
|
||||||
|
grep 48638 "$(find ~ -maxdepth 1 -name log-copyparty-\*.txt | sort | tail -n 1)"
|
||||||
|
|
||||||
|
1623688502.510380 48638 rh
|
||||||
|
1623688502.511291 48638 Unrecv direct ...
|
||||||
|
1623688502.511827 48638 rh = 791
|
||||||
|
16:35:02.518 127.0.0.1 48638 shut(8): [Errno 107] Socket not connected
|
||||||
|
Exception in thread httpsrv-0.1-48638:
|
||||||
|
|
||||||
|
grep 48638 ~/dev/copyparty/strace.strace
|
||||||
|
14561 16:35:02.506310 <... accept4 resumed> {sa_family=AF_INET, sin_port=htons(48638), sin_addr=inet_addr("127.0.0.1")}, [16], SOCK_CLOEXEC) = 8<TCP:[127.0.0.1:3920->127.0.0.1:48638]> <0.000012>
|
||||||
|
15230 16:35:02.510725 write(1<pipe:[256639555]>, "1623688502.510380 48638 rh\n", 27 <unfinished ...>
|
@@ -92,20 +92,34 @@ chmod 755 \
|
|||||||
copyparty-extras/copyparty-*/{scripts,bin}/*
|
copyparty-extras/copyparty-*/{scripts,bin}/*
|
||||||
|
|
||||||
|
|
||||||
# extract and repack the sfx with less features enabled
|
# extract the sfx
|
||||||
( cd copyparty-extras/sfx-full/
|
( cd copyparty-extras/sfx-full/
|
||||||
./copyparty-sfx.py -h
|
./copyparty-sfx.py -h
|
||||||
cd ../copyparty-*/
|
|
||||||
./scripts/make-sfx.sh re no-ogv no-cm
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
# put new sfx into copyparty-extras/sfx-lite/,
|
repack() {
|
||||||
# fuse client into copyparty-extras/,
|
|
||||||
|
# do the repack
|
||||||
|
(cd copyparty-extras/copyparty-*/
|
||||||
|
./scripts/make-sfx.sh $2
|
||||||
|
)
|
||||||
|
|
||||||
|
# put new sfx into copyparty-extras/$name/,
|
||||||
|
( cd copyparty-extras/
|
||||||
|
mv copyparty-*/dist/* $1/
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
repack sfx-full "re gz no-sh"
|
||||||
|
repack sfx-lite "re no-ogv no-cm"
|
||||||
|
repack sfx-lite "re no-ogv no-cm gz no-sh"
|
||||||
|
|
||||||
|
|
||||||
|
# move fuse client into copyparty-extras/,
|
||||||
# copy lite-sfx.py to ./copyparty,
|
# copy lite-sfx.py to ./copyparty,
|
||||||
# delete extracted source code
|
# delete extracted source code
|
||||||
( cd copyparty-extras/
|
( cd copyparty-extras/
|
||||||
mv copyparty-*/dist/* sfx-lite/
|
|
||||||
mv copyparty-*/bin/copyparty-fuse.py .
|
mv copyparty-*/bin/copyparty-fuse.py .
|
||||||
cp -pv sfx-lite/copyparty-sfx.py ../copyparty
|
cp -pv sfx-lite/copyparty-sfx.py ../copyparty
|
||||||
rm -rf copyparty-{0..9}*.*.*{0..9}
|
rm -rf copyparty-{0..9}*.*.*{0..9}
|
||||||
@@ -119,6 +133,7 @@ true
|
|||||||
|
|
||||||
|
|
||||||
# create the bundle
|
# create the bundle
|
||||||
|
printf '\n\n'
|
||||||
fn=copyparty-$(date +%Y-%m%d-%H%M%S).tgz
|
fn=copyparty-$(date +%Y-%m%d-%H%M%S).tgz
|
||||||
tar -czvf "$od/$fn" *
|
tar -czvf "$od/$fn" *
|
||||||
cd "$od"
|
cd "$od"
|
||||||
|
@@ -1,6 +1,7 @@
|
|||||||
FROM alpine:3.13
|
FROM alpine:3.13
|
||||||
WORKDIR /z
|
WORKDIR /z
|
||||||
ENV ver_asmcrypto=5b994303a9d3e27e0915f72a10b6c2c51535a4dc \
|
ENV ver_asmcrypto=5b994303a9d3e27e0915f72a10b6c2c51535a4dc \
|
||||||
|
ver_hashwasm=4.7.0 \
|
||||||
ver_marked=1.1.0 \
|
ver_marked=1.1.0 \
|
||||||
ver_ogvjs=1.8.0 \
|
ver_ogvjs=1.8.0 \
|
||||||
ver_mde=2.14.0 \
|
ver_mde=2.14.0 \
|
||||||
@@ -9,12 +10,6 @@ ENV ver_asmcrypto=5b994303a9d3e27e0915f72a10b6c2c51535a4dc \
|
|||||||
ver_zopfli=1.0.3
|
ver_zopfli=1.0.3
|
||||||
|
|
||||||
|
|
||||||
# TODO
|
|
||||||
# sha512.hw.js https://github.com/Daninet/hash-wasm
|
|
||||||
# sha512.kc.js https://github.com/chm-diederichs/sha3-wasm
|
|
||||||
# awk '/HMAC state/{o=1} /var HEAP/{o=0} /function hmac_reset/{o=1} /return \{/{o=0} /var __extends =/{o=1} /var Hash =/{o=0} /hmac_|pbkdf2_/{next} o{next} {gsub(/IllegalStateError/,"Exception")} {sub(/^ +/,"");sub(/^\/\/ .*/,"");sub(/;$/," ;")} 1' <sha512.ac.js.orig >sha512.ac.js; for fn in sha512.ac.js.orig sha512.ac.js; do wc -c <$fn; wc -c <$fn.gz ; for n in {1..9}; do printf '%8d %d bz\n' $(bzip2 -c$n <$fn | wc -c) $n; done; done
|
|
||||||
|
|
||||||
|
|
||||||
# download;
|
# download;
|
||||||
# the scp url is latin from https://fonts.googleapis.com/css2?family=Source+Code+Pro&display=swap
|
# the scp url is latin from https://fonts.googleapis.com/css2?family=Source+Code+Pro&display=swap
|
||||||
RUN mkdir -p /z/dist/no-pk \
|
RUN mkdir -p /z/dist/no-pk \
|
||||||
@@ -27,7 +22,11 @@ RUN mkdir -p /z/dist/no-pk \
|
|||||||
&& wget https://github.com/codemirror/CodeMirror/archive/$ver_codemirror.tar.gz -O codemirror.tgz \
|
&& wget https://github.com/codemirror/CodeMirror/archive/$ver_codemirror.tar.gz -O codemirror.tgz \
|
||||||
&& wget https://github.com/FortAwesome/Font-Awesome/releases/download/$ver_fontawesome/fontawesome-free-$ver_fontawesome-web.zip -O fontawesome.zip \
|
&& wget https://github.com/FortAwesome/Font-Awesome/releases/download/$ver_fontawesome/fontawesome-free-$ver_fontawesome-web.zip -O fontawesome.zip \
|
||||||
&& wget https://github.com/google/zopfli/archive/zopfli-$ver_zopfli.tar.gz -O zopfli.tgz \
|
&& wget https://github.com/google/zopfli/archive/zopfli-$ver_zopfli.tar.gz -O zopfli.tgz \
|
||||||
|
&& wget https://github.com/Daninet/hash-wasm/releases/download/v$ver_hashwasm/hash-wasm@$ver_hashwasm.zip -O hash-wasm.zip \
|
||||||
&& unzip ogvjs.zip \
|
&& unzip ogvjs.zip \
|
||||||
|
&& (mkdir hash-wasm \
|
||||||
|
&& cd hash-wasm \
|
||||||
|
&& unzip ../hash-wasm.zip) \
|
||||||
&& (tar -xf asmcrypto.tgz \
|
&& (tar -xf asmcrypto.tgz \
|
||||||
&& cd asmcrypto.js-$ver_asmcrypto \
|
&& cd asmcrypto.js-$ver_asmcrypto \
|
||||||
&& npm install ) \
|
&& npm install ) \
|
||||||
@@ -64,7 +63,12 @@ RUN tar -xf zopfli.tgz \
|
|||||||
RUN cd asmcrypto.js-$ver_asmcrypto \
|
RUN cd asmcrypto.js-$ver_asmcrypto \
|
||||||
&& echo "export { Sha512 } from './hash/sha512/sha512';" > src/entry-export_all.ts \
|
&& echo "export { Sha512 } from './hash/sha512/sha512';" > src/entry-export_all.ts \
|
||||||
&& node -r esm build.js \
|
&& node -r esm build.js \
|
||||||
&& mv asmcrypto.all.es5.js /z/dist/sha512.js
|
&& awk '/HMAC state/{o=1} /var HEAP/{o=0} /function hmac_reset/{o=1} /return \{/{o=0} /var __extends =/{o=1} /var Hash =/{o=0} /hmac_|pbkdf2_/{next} o{next} {gsub(/IllegalStateError/,"Exception")} {sub(/^ +/,"");sub(/^\/\/ .*/,"");sub(/;$/," ;")} 1' < asmcrypto.all.es5.js > /z/dist/sha512.ac.js
|
||||||
|
|
||||||
|
|
||||||
|
# build hash-wasm
|
||||||
|
RUN cd hash-wasm \
|
||||||
|
&& mv sha512.umd.min.js /z/dist/sha512.hw.js
|
||||||
|
|
||||||
|
|
||||||
# build ogvjs
|
# build ogvjs
|
||||||
|
@@ -11,6 +11,10 @@ echo
|
|||||||
# `re` does a repack of an sfx which you already executed once
|
# `re` does a repack of an sfx which you already executed once
|
||||||
# (grabs files from the sfx-created tempdir), overrides `clean`
|
# (grabs files from the sfx-created tempdir), overrides `clean`
|
||||||
#
|
#
|
||||||
|
# `gz` creates a gzip-compressed python sfx instead of bzip2
|
||||||
|
#
|
||||||
|
# `no-sh` makes just the python sfx, skips the sh/unix sfx
|
||||||
|
#
|
||||||
# `no-ogv` saves ~500k by removing the opus/vorbis audio codecs
|
# `no-ogv` saves ~500k by removing the opus/vorbis audio codecs
|
||||||
# (only affects apple devices; everything else has native support)
|
# (only affects apple devices; everything else has native support)
|
||||||
#
|
#
|
||||||
@@ -32,6 +36,10 @@ gtar=$(command -v gtar || command -v gnutar) || true
|
|||||||
[ -e /opt/local/bin/bzip2 ] &&
|
[ -e /opt/local/bin/bzip2 ] &&
|
||||||
bzip2() { /opt/local/bin/bzip2 "$@"; }
|
bzip2() { /opt/local/bin/bzip2 "$@"; }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
gawk=$(command -v gawk || command -v gnuawk || command -v awk)
|
||||||
|
awk() { $gawk "$@"; }
|
||||||
|
|
||||||
pybin=$(command -v python3 || command -v python) || {
|
pybin=$(command -v python3 || command -v python) || {
|
||||||
echo need python
|
echo need python
|
||||||
exit 1
|
exit 1
|
||||||
@@ -163,7 +171,7 @@ find .. -type f \( -name .DS_Store -or -name ._.DS_Store \) -delete
|
|||||||
find .. -type f -name ._\* | while IFS= read -r f; do cmp <(printf '\x00\x05\x16') <(head -c 3 -- "$f") && rm -f -- "$f"; done
|
find .. -type f -name ._\* | while IFS= read -r f; do cmp <(printf '\x00\x05\x16') <(head -c 3 -- "$f") && rm -f -- "$f"; done
|
||||||
|
|
||||||
echo use smol web deps
|
echo use smol web deps
|
||||||
rm -f copyparty/web/deps/*.full.* copyparty/web/Makefile
|
rm -f copyparty/web/deps/*.full.* copyparty/web/dbg-* copyparty/web/Makefile
|
||||||
|
|
||||||
# it's fine dw
|
# it's fine dw
|
||||||
grep -lE '\.full\.(js|css)' copyparty/web/* |
|
grep -lE '\.full\.(js|css)' copyparty/web/* |
|
||||||
@@ -194,11 +202,40 @@ tmv "$f"
|
|||||||
|
|
||||||
# up2k goes from 28k to 22k laff
|
# up2k goes from 28k to 22k laff
|
||||||
echo entabbening
|
echo entabbening
|
||||||
find | grep -E '\.(js|css|html)$' | while IFS= read -r f; do
|
find | grep -E '\.css$' | while IFS= read -r f; do
|
||||||
|
awk '{
|
||||||
|
sub(/^[ \t]+/,"");
|
||||||
|
sub(/[ \t]+$/,"");
|
||||||
|
$0=gensub(/^([a-z-]+) *: *(.*[^ ]) *;$/,"\\1:\\2;","1");
|
||||||
|
sub(/ +\{$/,"{");
|
||||||
|
gsub(/, /,",")
|
||||||
|
}
|
||||||
|
!/\}$/ {printf "%s",$0;next}
|
||||||
|
1
|
||||||
|
' <$f | sed 's/;\}$/}/' >t
|
||||||
|
tmv "$f"
|
||||||
|
done
|
||||||
|
find | grep -E '\.(js|html)$' | while IFS= read -r f; do
|
||||||
unexpand -t 4 --first-only <"$f" >t
|
unexpand -t 4 --first-only <"$f" >t
|
||||||
tmv "$f"
|
tmv "$f"
|
||||||
done
|
done
|
||||||
|
|
||||||
|
|
||||||
|
gzres() {
|
||||||
|
command -v pigz &&
|
||||||
|
pk='pigz -11 -J 34 -I 100' ||
|
||||||
|
pk='gzip'
|
||||||
|
|
||||||
|
echo "$pk"
|
||||||
|
find | grep -E '\.(js|css)$' | grep -vF /deps/ | while IFS= read -r f; do
|
||||||
|
echo -n .
|
||||||
|
$pk "$f"
|
||||||
|
done
|
||||||
|
echo
|
||||||
|
}
|
||||||
|
gzres
|
||||||
|
|
||||||
|
|
||||||
echo gen tarlist
|
echo gen tarlist
|
||||||
for d in copyparty dep-j2; do find $d -type f; done |
|
for d in copyparty dep-j2; do find $d -type f; done |
|
||||||
sed -r 's/(.*)\.(.*)/\2 \1/' | LC_ALL=C sort |
|
sed -r 's/(.*)\.(.*)/\2 \1/' | LC_ALL=C sort |
|
||||||
|
@@ -3,10 +3,13 @@ set -ex
|
|||||||
|
|
||||||
pids=()
|
pids=()
|
||||||
for py in python{2,3}; do
|
for py in python{2,3}; do
|
||||||
$py -m unittest discover -s tests >/dev/null &
|
nice $py -m unittest discover -s tests >/dev/null &
|
||||||
pids+=($!)
|
pids+=($!)
|
||||||
done
|
done
|
||||||
|
|
||||||
|
python3 scripts/test/smoketest.py &
|
||||||
|
pids+=($!)
|
||||||
|
|
||||||
for pid in ${pids[@]}; do
|
for pid in ${pids[@]}; do
|
||||||
wait $pid
|
wait $pid
|
||||||
done
|
done
|
||||||
|
@@ -47,7 +47,7 @@ grep -E '/(python|pypy)[0-9\.-]*$' >$dir/pys || true
|
|||||||
printf '\033[1;30mlooking for jinja2 in [%s]\033[0m\n' "$_py" >&2
|
printf '\033[1;30mlooking for jinja2 in [%s]\033[0m\n' "$_py" >&2
|
||||||
$_py -c 'import jinja2' 2>/dev/null || continue
|
$_py -c 'import jinja2' 2>/dev/null || continue
|
||||||
printf '%s\n' "$_py"
|
printf '%s\n' "$_py"
|
||||||
mv $dir/{,x.}jinja2
|
mv $dir/{,x.}dep-j2
|
||||||
break
|
break
|
||||||
done)"
|
done)"
|
||||||
|
|
||||||
|
105
scripts/test/race.py
Normal file
105
scripts/test/race.py
Normal file
@@ -0,0 +1,105 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import time
|
||||||
|
import json
|
||||||
|
import threading
|
||||||
|
import http.client
|
||||||
|
|
||||||
|
|
||||||
|
class Conn(object):
|
||||||
|
def __init__(self, ip, port):
|
||||||
|
self.s = http.client.HTTPConnection(ip, port, timeout=260)
|
||||||
|
self.st = []
|
||||||
|
|
||||||
|
def get(self, vpath):
|
||||||
|
self.st = [time.time()]
|
||||||
|
|
||||||
|
self.s.request("GET", vpath)
|
||||||
|
self.st.append(time.time())
|
||||||
|
|
||||||
|
ret = self.s.getresponse()
|
||||||
|
self.st.append(time.time())
|
||||||
|
|
||||||
|
if ret.status < 200 or ret.status >= 400:
|
||||||
|
raise Exception(ret.status)
|
||||||
|
|
||||||
|
ret = ret.read()
|
||||||
|
self.st.append(time.time())
|
||||||
|
|
||||||
|
return ret
|
||||||
|
|
||||||
|
def get_json(self, vpath):
|
||||||
|
ret = self.get(vpath)
|
||||||
|
return json.loads(ret)
|
||||||
|
|
||||||
|
|
||||||
|
class CState(threading.Thread):
|
||||||
|
def __init__(self, cs):
|
||||||
|
threading.Thread.__init__(self)
|
||||||
|
self.daemon = True
|
||||||
|
self.cs = cs
|
||||||
|
self.start()
|
||||||
|
|
||||||
|
def run(self):
|
||||||
|
colors = [5, 1, 3, 2, 7]
|
||||||
|
remotes = []
|
||||||
|
remotes_ok = False
|
||||||
|
while True:
|
||||||
|
time.sleep(0.001)
|
||||||
|
if not remotes_ok:
|
||||||
|
remotes = []
|
||||||
|
remotes_ok = True
|
||||||
|
for conn in self.cs:
|
||||||
|
try:
|
||||||
|
remotes.append(conn.s.sock.getsockname()[1])
|
||||||
|
except:
|
||||||
|
remotes.append("?")
|
||||||
|
remotes_ok = False
|
||||||
|
|
||||||
|
m = []
|
||||||
|
for conn, remote in zip(self.cs, remotes):
|
||||||
|
stage = len(conn.st)
|
||||||
|
m.append(f"\033[3{colors[stage]}m{remote}")
|
||||||
|
|
||||||
|
m = " ".join(m)
|
||||||
|
print(f"{m}\033[0m\n\033[A", end="")
|
||||||
|
|
||||||
|
|
||||||
|
def allget(cs, urls):
|
||||||
|
thrs = []
|
||||||
|
for c, url in zip(cs, urls):
|
||||||
|
t = threading.Thread(target=c.get, args=(url,))
|
||||||
|
t.start()
|
||||||
|
thrs.append(t)
|
||||||
|
|
||||||
|
for t in thrs:
|
||||||
|
t.join()
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
os.system("")
|
||||||
|
|
||||||
|
ip, port = sys.argv[1].split(":")
|
||||||
|
port = int(port)
|
||||||
|
|
||||||
|
cs = []
|
||||||
|
for _ in range(64):
|
||||||
|
cs.append(Conn(ip, 3923))
|
||||||
|
|
||||||
|
CState(cs)
|
||||||
|
|
||||||
|
urlbase = "/doujin/c95"
|
||||||
|
j = cs[0].get_json(f"{urlbase}?ls")
|
||||||
|
urls = []
|
||||||
|
for d in j["dirs"]:
|
||||||
|
urls.append(f"{urlbase}/{d['href']}?th=w")
|
||||||
|
|
||||||
|
for n in range(100):
|
||||||
|
print(n)
|
||||||
|
allget(cs, urls)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
209
scripts/test/smoketest.py
Normal file
209
scripts/test/smoketest.py
Normal file
@@ -0,0 +1,209 @@
|
|||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import time
|
||||||
|
import shlex
|
||||||
|
import shutil
|
||||||
|
import signal
|
||||||
|
import tempfile
|
||||||
|
import requests
|
||||||
|
import threading
|
||||||
|
import subprocess as sp
|
||||||
|
|
||||||
|
|
||||||
|
CPP = []
|
||||||
|
|
||||||
|
|
||||||
|
class Cpp(object):
|
||||||
|
def __init__(self, args):
|
||||||
|
args = [sys.executable, "-m", "copyparty"] + args
|
||||||
|
print(" ".join([shlex.quote(x) for x in args]))
|
||||||
|
|
||||||
|
self.ls_pre = set(list(os.listdir()))
|
||||||
|
self.p = sp.Popen(args)
|
||||||
|
# , stdout=sp.PIPE, stderr=sp.PIPE)
|
||||||
|
|
||||||
|
self.t = threading.Thread(target=self._run)
|
||||||
|
self.t.daemon = True
|
||||||
|
self.t.start()
|
||||||
|
|
||||||
|
def _run(self):
|
||||||
|
self.so, self.se = self.p.communicate()
|
||||||
|
|
||||||
|
def stop(self, wait):
|
||||||
|
if wait:
|
||||||
|
os.kill(self.p.pid, signal.SIGINT)
|
||||||
|
self.t.join(timeout=2)
|
||||||
|
else:
|
||||||
|
self.p.kill() # macos py3.8
|
||||||
|
|
||||||
|
def clean(self):
|
||||||
|
t = os.listdir()
|
||||||
|
for f in t:
|
||||||
|
if f not in self.ls_pre and f.startswith("up."):
|
||||||
|
os.unlink(f)
|
||||||
|
|
||||||
|
def await_idle(self, ub, timeout):
|
||||||
|
req = ["scanning</td><td>False", "hash-q</td><td>0", "tag-q</td><td>0"]
|
||||||
|
lim = int(timeout * 10)
|
||||||
|
u = ub + "?h"
|
||||||
|
for n in range(lim):
|
||||||
|
try:
|
||||||
|
time.sleep(0.1)
|
||||||
|
r = requests.get(u, timeout=0.1)
|
||||||
|
for x in req:
|
||||||
|
if x not in r.text:
|
||||||
|
print("ST: {}/{} miss {}".format(n, lim, x))
|
||||||
|
raise Exception()
|
||||||
|
print("ST: idle")
|
||||||
|
return
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def tc1():
|
||||||
|
ub = "http://127.0.0.1:4321/"
|
||||||
|
td = os.path.join("srv", "smoketest")
|
||||||
|
try:
|
||||||
|
shutil.rmtree(td)
|
||||||
|
except:
|
||||||
|
if os.path.exists(td):
|
||||||
|
raise
|
||||||
|
|
||||||
|
for _ in range(10):
|
||||||
|
try:
|
||||||
|
os.mkdir(td)
|
||||||
|
except:
|
||||||
|
time.sleep(0.1) # win10
|
||||||
|
|
||||||
|
assert os.path.exists(td)
|
||||||
|
|
||||||
|
vidp = os.path.join(tempfile.gettempdir(), "smoketest.h264")
|
||||||
|
if not os.path.exists(vidp):
|
||||||
|
cmd = "ffmpeg -f lavfi -i testsrc=48x32:3 -t 1 -c:v libx264 -tune animation -preset veryslow -crf 69"
|
||||||
|
sp.check_call(cmd.split(" ") + [vidp])
|
||||||
|
|
||||||
|
with open(vidp, "rb") as f:
|
||||||
|
ovid = f.read()
|
||||||
|
|
||||||
|
args = [
|
||||||
|
"-p4321",
|
||||||
|
"-e2dsa",
|
||||||
|
"-e2tsr",
|
||||||
|
"--no-mutagen",
|
||||||
|
"--th-ff-jpg",
|
||||||
|
"--hist",
|
||||||
|
os.path.join(td, "dbm"),
|
||||||
|
]
|
||||||
|
pdirs = []
|
||||||
|
hpaths = {}
|
||||||
|
|
||||||
|
for d1 in ["r", "w", "a"]:
|
||||||
|
pdirs.append("{}/{}".format(td, d1))
|
||||||
|
pdirs.append("{}/{}/j".format(td, d1))
|
||||||
|
for d2 in ["r", "w", "a"]:
|
||||||
|
d = os.path.join(td, d1, "j", d2)
|
||||||
|
pdirs.append(d)
|
||||||
|
os.makedirs(d)
|
||||||
|
|
||||||
|
pdirs = [x.replace("\\", "/") for x in pdirs]
|
||||||
|
udirs = [x.split("/", 2)[2] for x in pdirs]
|
||||||
|
perms = [x.rstrip("j/")[-1] for x in pdirs]
|
||||||
|
for pd, ud, p in zip(pdirs, udirs, perms):
|
||||||
|
if ud[-1] == "j":
|
||||||
|
continue
|
||||||
|
|
||||||
|
hp = None
|
||||||
|
if pd.endswith("st/a"):
|
||||||
|
hp = hpaths[ud] = os.path.join(td, "db1")
|
||||||
|
elif pd[:-1].endswith("a/j/"):
|
||||||
|
hpaths[ud] = os.path.join(td, "dbm")
|
||||||
|
hp = None
|
||||||
|
else:
|
||||||
|
hp = "-"
|
||||||
|
hpaths[ud] = os.path.join(pd, ".hist")
|
||||||
|
|
||||||
|
arg = "{}:{}:{}".format(pd, ud, p, hp)
|
||||||
|
if hp:
|
||||||
|
arg += ":chist=" + hp
|
||||||
|
|
||||||
|
args += ["-v", arg]
|
||||||
|
|
||||||
|
# return
|
||||||
|
cpp = Cpp(args)
|
||||||
|
CPP.append(cpp)
|
||||||
|
cpp.await_idle(ub, 3)
|
||||||
|
|
||||||
|
for d in udirs:
|
||||||
|
vid = ovid + "\n{}".format(d).encode("utf-8")
|
||||||
|
try:
|
||||||
|
requests.post(ub + d, data={"act": "bput"}, files={"f": ("a.h264", vid)})
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
|
cpp.clean()
|
||||||
|
|
||||||
|
# GET permission
|
||||||
|
for d, p in zip(udirs, perms):
|
||||||
|
u = "{}{}/a.h264".format(ub, d)
|
||||||
|
r = requests.get(u)
|
||||||
|
ok = bool(r)
|
||||||
|
if ok != (p in ["a"]):
|
||||||
|
raise Exception("get {} with perm {} at {}".format(ok, p, u))
|
||||||
|
|
||||||
|
# stat filesystem
|
||||||
|
for d, p in zip(pdirs, perms):
|
||||||
|
u = "{}/a.h264".format(d)
|
||||||
|
ok = os.path.exists(u)
|
||||||
|
if ok != (p in ["a", "w"]):
|
||||||
|
raise Exception("stat {} with perm {} at {}".format(ok, p, u))
|
||||||
|
|
||||||
|
# GET thumbnail, vreify contents
|
||||||
|
for d, p in zip(udirs, perms):
|
||||||
|
u = "{}{}/a.h264?th=j".format(ub, d)
|
||||||
|
r = requests.get(u)
|
||||||
|
ok = bool(r and r.content[:3] == b"\xff\xd8\xff")
|
||||||
|
if ok != (p in ["a"]):
|
||||||
|
raise Exception("thumb {} with perm {} at {}".format(ok, p, u))
|
||||||
|
|
||||||
|
# check tags
|
||||||
|
cpp.await_idle(ub, 5)
|
||||||
|
for d, p in zip(udirs, perms):
|
||||||
|
u = "{}{}?ls".format(ub, d)
|
||||||
|
r = requests.get(u)
|
||||||
|
j = r.json() if r else False
|
||||||
|
tag = None
|
||||||
|
if j:
|
||||||
|
for f in j["files"]:
|
||||||
|
tag = tag or f["tags"].get("res")
|
||||||
|
|
||||||
|
r_ok = bool(j)
|
||||||
|
w_ok = bool(r_ok and j.get("files"))
|
||||||
|
|
||||||
|
if not r_ok or w_ok != (p in ["a"]):
|
||||||
|
raise Exception("ls {} with perm {} at {}".format(ok, p, u))
|
||||||
|
|
||||||
|
if (tag and p != "a") or (not tag and p == "a"):
|
||||||
|
raise Exception("tag {} with perm {} at {}".format(tag, p, u))
|
||||||
|
|
||||||
|
if tag is not None and tag != "48x32":
|
||||||
|
raise Exception("tag [{}] at {}".format(tag, u))
|
||||||
|
|
||||||
|
cpp.stop(True)
|
||||||
|
|
||||||
|
|
||||||
|
def run(tc):
|
||||||
|
try:
|
||||||
|
tc()
|
||||||
|
finally:
|
||||||
|
try:
|
||||||
|
CPP[0].stop(False)
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
run(tc1)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
@@ -28,6 +28,7 @@ class Cfg(Namespace):
|
|||||||
a=a,
|
a=a,
|
||||||
v=v,
|
v=v,
|
||||||
c=c,
|
c=c,
|
||||||
|
rproxy=0,
|
||||||
ed=False,
|
ed=False,
|
||||||
no_zip=False,
|
no_zip=False,
|
||||||
no_scandir=False,
|
no_scandir=False,
|
||||||
@@ -37,6 +38,9 @@ class Cfg(Namespace):
|
|||||||
nih=True,
|
nih=True,
|
||||||
mtp=[],
|
mtp=[],
|
||||||
mte="a",
|
mte="a",
|
||||||
|
hist=None,
|
||||||
|
no_hash=False,
|
||||||
|
css_browser=None,
|
||||||
**{k: False for k in "e2d e2ds e2dsa e2t e2ts e2tsr".split()}
|
**{k: False for k in "e2d e2ds e2dsa e2t e2ts e2tsr".split()}
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -99,7 +103,7 @@ class TestHttpCli(unittest.TestCase):
|
|||||||
pprint.pprint(vcfg)
|
pprint.pprint(vcfg)
|
||||||
|
|
||||||
self.args = Cfg(v=vcfg, a=["o:o", "x:x"])
|
self.args = Cfg(v=vcfg, a=["o:o", "x:x"])
|
||||||
self.auth = AuthSrv(self.args, self.log)
|
self.asrv = AuthSrv(self.args, self.log)
|
||||||
vfiles = [x for x in allfiles if x.startswith(top)]
|
vfiles = [x for x in allfiles if x.startswith(top)]
|
||||||
for fp in vfiles:
|
for fp in vfiles:
|
||||||
rok, wok = self.can_rw(fp)
|
rok, wok = self.can_rw(fp)
|
||||||
@@ -188,12 +192,12 @@ class TestHttpCli(unittest.TestCase):
|
|||||||
def put(self, url):
|
def put(self, url):
|
||||||
buf = "PUT /{0} HTTP/1.1\r\nCookie: cppwd=o\r\nConnection: close\r\nContent-Length: {1}\r\n\r\nok {0}\n"
|
buf = "PUT /{0} HTTP/1.1\r\nCookie: cppwd=o\r\nConnection: close\r\nContent-Length: {1}\r\n\r\nok {0}\n"
|
||||||
buf = buf.format(url, len(url) + 4).encode("utf-8")
|
buf = buf.format(url, len(url) + 4).encode("utf-8")
|
||||||
conn = tu.VHttpConn(self.args, self.auth, self.log, buf)
|
conn = tu.VHttpConn(self.args, self.asrv, self.log, buf)
|
||||||
HttpCli(conn).run()
|
HttpCli(conn).run()
|
||||||
return conn.s._reply.decode("utf-8").split("\r\n\r\n", 1)
|
return conn.s._reply.decode("utf-8").split("\r\n\r\n", 1)
|
||||||
|
|
||||||
def curl(self, url, binary=False):
|
def curl(self, url, binary=False):
|
||||||
conn = tu.VHttpConn(self.args, self.auth, self.log, hdr(url))
|
conn = tu.VHttpConn(self.args, self.asrv, self.log, hdr(url))
|
||||||
HttpCli(conn).run()
|
HttpCli(conn).run()
|
||||||
if binary:
|
if binary:
|
||||||
h, b = conn.s._reply.split(b"\r\n\r\n", 1)
|
h, b = conn.s._reply.split(b"\r\n\r\n", 1)
|
||||||
|
@@ -11,15 +11,22 @@ from textwrap import dedent
|
|||||||
from argparse import Namespace
|
from argparse import Namespace
|
||||||
|
|
||||||
from tests import util as tu
|
from tests import util as tu
|
||||||
from copyparty.authsrv import AuthSrv
|
from copyparty.authsrv import AuthSrv, VFS
|
||||||
from copyparty import util
|
from copyparty import util
|
||||||
|
|
||||||
|
|
||||||
class Cfg(Namespace):
|
class Cfg(Namespace):
|
||||||
def __init__(self, a=[], v=[], c=None):
|
def __init__(self, a=[], v=[], c=None):
|
||||||
ex = {k: False for k in "e2d e2ds e2dsa e2t e2ts e2tsr".split()}
|
ex = {k: False for k in "e2d e2ds e2dsa e2t e2ts e2tsr".split()}
|
||||||
ex["mtp"] = []
|
ex2 = {
|
||||||
ex["mte"] = "a"
|
"mtp": [],
|
||||||
|
"mte": "a",
|
||||||
|
"hist": None,
|
||||||
|
"no_hash": False,
|
||||||
|
"css_browser": None,
|
||||||
|
"rproxy": 0,
|
||||||
|
}
|
||||||
|
ex.update(ex2)
|
||||||
super(Cfg, self).__init__(a=a, v=v, c=c, **ex)
|
super(Cfg, self).__init__(a=a, v=v, c=c, **ex)
|
||||||
|
|
||||||
|
|
||||||
@@ -47,6 +54,7 @@ class TestVFS(unittest.TestCase):
|
|||||||
self.assertEqual(util.undot(query), response)
|
self.assertEqual(util.undot(query), response)
|
||||||
|
|
||||||
def ls(self, vfs, vpath, uname):
|
def ls(self, vfs, vpath, uname):
|
||||||
|
# type: (VFS, str, str) -> tuple[str, str, str]
|
||||||
"""helper for resolving and listing a folder"""
|
"""helper for resolving and listing a folder"""
|
||||||
vn, rem = vfs.get(vpath, uname, True, False)
|
vn, rem = vfs.get(vpath, uname, True, False)
|
||||||
r1 = vn.ls(rem, uname, False)
|
r1 = vn.ls(rem, uname, False)
|
||||||
@@ -112,13 +120,13 @@ class TestVFS(unittest.TestCase):
|
|||||||
n = vfs.nodes["a"]
|
n = vfs.nodes["a"]
|
||||||
self.assertEqual(len(vfs.nodes), 1)
|
self.assertEqual(len(vfs.nodes), 1)
|
||||||
self.assertEqual(n.vpath, "a")
|
self.assertEqual(n.vpath, "a")
|
||||||
self.assertEqual(n.realpath, td + "/a")
|
self.assertEqual(n.realpath, os.path.join(td, "a"))
|
||||||
self.assertEqual(n.uread, ["*", "k"])
|
self.assertEqual(n.uread, ["*", "k"])
|
||||||
self.assertEqual(n.uwrite, ["k"])
|
self.assertEqual(n.uwrite, ["k"])
|
||||||
n = n.nodes["ac"]
|
n = n.nodes["ac"]
|
||||||
self.assertEqual(len(vfs.nodes), 1)
|
self.assertEqual(len(vfs.nodes), 1)
|
||||||
self.assertEqual(n.vpath, "a/ac")
|
self.assertEqual(n.vpath, "a/ac")
|
||||||
self.assertEqual(n.realpath, td + "/a/ac")
|
self.assertEqual(n.realpath, os.path.join(td, "a", "ac"))
|
||||||
self.assertEqual(n.uread, ["*", "k"])
|
self.assertEqual(n.uread, ["*", "k"])
|
||||||
self.assertEqual(n.uwrite, ["k"])
|
self.assertEqual(n.uwrite, ["k"])
|
||||||
n = n.nodes["acb"]
|
n = n.nodes["acb"]
|
||||||
@@ -250,7 +258,7 @@ class TestVFS(unittest.TestCase):
|
|||||||
n = au.vfs
|
n = au.vfs
|
||||||
# root was not defined, so PWD with no access to anyone
|
# root was not defined, so PWD with no access to anyone
|
||||||
self.assertEqual(n.vpath, "")
|
self.assertEqual(n.vpath, "")
|
||||||
self.assertEqual(n.realpath, td)
|
self.assertEqual(n.realpath, None)
|
||||||
self.assertEqual(n.uread, [])
|
self.assertEqual(n.uread, [])
|
||||||
self.assertEqual(n.uwrite, [])
|
self.assertEqual(n.uwrite, [])
|
||||||
self.assertEqual(len(n.nodes), 1)
|
self.assertEqual(len(n.nodes), 1)
|
||||||
|
@@ -60,7 +60,7 @@ def get_ramdisk():
|
|||||||
|
|
||||||
if os.path.exists("/Volumes"):
|
if os.path.exists("/Volumes"):
|
||||||
# hdiutil eject /Volumes/cptd/
|
# hdiutil eject /Volumes/cptd/
|
||||||
devname, _ = chkcmd("hdiutil", "attach", "-nomount", "ram://65536")
|
devname, _ = chkcmd("hdiutil", "attach", "-nomount", "ram://131072")
|
||||||
devname = devname.strip()
|
devname = devname.strip()
|
||||||
print("devname: [{}]".format(devname))
|
print("devname: [{}]".format(devname))
|
||||||
for _ in range(10):
|
for _ in range(10):
|
||||||
@@ -110,12 +110,13 @@ class VHttpSrv(object):
|
|||||||
|
|
||||||
|
|
||||||
class VHttpConn(object):
|
class VHttpConn(object):
|
||||||
def __init__(self, args, auth, log, buf):
|
def __init__(self, args, asrv, log, buf):
|
||||||
self.s = VSock(buf)
|
self.s = VSock(buf)
|
||||||
self.sr = Unrecv(self.s)
|
self.sr = Unrecv(self.s)
|
||||||
self.addr = ("127.0.0.1", "42069")
|
self.addr = ("127.0.0.1", "42069")
|
||||||
self.args = args
|
self.args = args
|
||||||
self.auth = auth
|
self.asrv = asrv
|
||||||
|
self.is_mp = False
|
||||||
self.log_func = log
|
self.log_func = log
|
||||||
self.log_src = "a"
|
self.log_src = "a"
|
||||||
self.lf_url = None
|
self.lf_url = None
|
||||||
|
Reference in New Issue
Block a user