mirror of
https://github.com/9001/copyparty.git
synced 2025-11-07 07:13:16 +00:00
Compare commits
187 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
f7dbd95a54 | ||
|
|
515ee2290b | ||
|
|
b0c78910bb | ||
|
|
f4ca62b664 | ||
|
|
8eb8043a3d | ||
|
|
3e8541362a | ||
|
|
789724e348 | ||
|
|
5125b9532f | ||
|
|
ebc9de02b0 | ||
|
|
ec788fa491 | ||
|
|
9b5e264574 | ||
|
|
57c297274b | ||
|
|
e9bf092317 | ||
|
|
d173887324 | ||
|
|
99820d854c | ||
|
|
62df0a0eb2 | ||
|
|
600e9ac947 | ||
|
|
3ca41be2b4 | ||
|
|
5c7debd900 | ||
|
|
7fa5b23ce3 | ||
|
|
ff82738aaf | ||
|
|
bf5ee9d643 | ||
|
|
72a8593ecd | ||
|
|
bc3bbe07d4 | ||
|
|
c7cb64bfef | ||
|
|
629f537d06 | ||
|
|
9e988041b8 | ||
|
|
f9a8b5c9d7 | ||
|
|
b9c3538253 | ||
|
|
2bc0cdf017 | ||
|
|
02a91f60d4 | ||
|
|
fae83da197 | ||
|
|
0fe4aa6418 | ||
|
|
21a51bf0dc | ||
|
|
bcb353cc30 | ||
|
|
6af4508518 | ||
|
|
6a559bc28a | ||
|
|
0f5026cd20 | ||
|
|
a91b80a311 | ||
|
|
ec534701c8 | ||
|
|
af5169f67f | ||
|
|
18676c5e65 | ||
|
|
e2df6fda7b | ||
|
|
e9ae9782fe | ||
|
|
016dba4ca9 | ||
|
|
39c7ef305f | ||
|
|
849c1dc848 | ||
|
|
61414014fe | ||
|
|
578a915884 | ||
|
|
eacafb8a63 | ||
|
|
4446760f74 | ||
|
|
6da2a083f9 | ||
|
|
8837c8f822 | ||
|
|
bac301ed66 | ||
|
|
061db3906d | ||
|
|
fd7df5c952 | ||
|
|
a270019147 | ||
|
|
55e0209901 | ||
|
|
2b255fbbed | ||
|
|
8a2345a0fb | ||
|
|
bfa9f535aa | ||
|
|
f757623ad8 | ||
|
|
3c7465e268 | ||
|
|
108665fc4f | ||
|
|
ed519c9138 | ||
|
|
2dd2e2c57e | ||
|
|
6c3a976222 | ||
|
|
80cc26bd95 | ||
|
|
970fb84fd8 | ||
|
|
20cbcf6931 | ||
|
|
8fcde2a579 | ||
|
|
b32d1f8ad3 | ||
|
|
03513e0cb1 | ||
|
|
e041a2b197 | ||
|
|
d7d625be2a | ||
|
|
4121266678 | ||
|
|
22971a6be4 | ||
|
|
efbf8d7e0d | ||
|
|
397396ea4a | ||
|
|
e59b077c21 | ||
|
|
4bc39f3084 | ||
|
|
21c3570786 | ||
|
|
2f85c1fb18 | ||
|
|
1e27a4c2df | ||
|
|
456f575637 | ||
|
|
51546c9e64 | ||
|
|
83b4b70ef4 | ||
|
|
a5120d4f6f | ||
|
|
c95941e14f | ||
|
|
0dd531149d | ||
|
|
67da1b5219 | ||
|
|
919bd16437 | ||
|
|
ecead109ab | ||
|
|
765294c263 | ||
|
|
d6b5351207 | ||
|
|
a2009bcc6b | ||
|
|
12709a8a0a | ||
|
|
c055baefd2 | ||
|
|
56522599b5 | ||
|
|
664f53b75d | ||
|
|
87200d9f10 | ||
|
|
5c3d0b6520 | ||
|
|
bd49979f4a | ||
|
|
7e606cdd9f | ||
|
|
8b4b7fa794 | ||
|
|
05345ddf8b | ||
|
|
66adb470ad | ||
|
|
e15c8fd146 | ||
|
|
0f09b98a39 | ||
|
|
b4d6f4e24d | ||
|
|
3217fa625b | ||
|
|
e719ff8a47 | ||
|
|
9fcf528d45 | ||
|
|
1ddbf5a158 | ||
|
|
64bf4574b0 | ||
|
|
5649d26077 | ||
|
|
92f923effe | ||
|
|
0d46d548b9 | ||
|
|
062df3f0c3 | ||
|
|
789fb53b8e | ||
|
|
351db5a18f | ||
|
|
aabbd271c8 | ||
|
|
aae8e0171e | ||
|
|
45827a2458 | ||
|
|
726030296f | ||
|
|
6659ab3881 | ||
|
|
c6a103609e | ||
|
|
c6b3f035e5 | ||
|
|
2b0a7e378e | ||
|
|
b75ce909c8 | ||
|
|
229c3f5dab | ||
|
|
ec73094506 | ||
|
|
c7650c9326 | ||
|
|
d94c6d4e72 | ||
|
|
3cc8760733 | ||
|
|
a2f6973495 | ||
|
|
f8648fa651 | ||
|
|
177aa038df | ||
|
|
e0a14ec881 | ||
|
|
9366512f2f | ||
|
|
ea38b8041a | ||
|
|
f1870daf0d | ||
|
|
9722441aad | ||
|
|
9d014087f4 | ||
|
|
83b4038b85 | ||
|
|
1e0a448feb | ||
|
|
fb81de3b36 | ||
|
|
aa4f352301 | ||
|
|
f1a1c2ea45 | ||
|
|
6249bd4163 | ||
|
|
2579dc64ce | ||
|
|
356512270a | ||
|
|
bed27f2b43 | ||
|
|
54013d861b | ||
|
|
ec100210dc | ||
|
|
3ab1acf32c | ||
|
|
8c28266418 | ||
|
|
7f8b8dcb92 | ||
|
|
6dd39811d4 | ||
|
|
35e2138e3e | ||
|
|
239b4e9fe6 | ||
|
|
2fcd0e7e72 | ||
|
|
357347ce3a | ||
|
|
36dc1107fb | ||
|
|
0a3bbc4b4a | ||
|
|
855b93dcf6 | ||
|
|
89b79ba267 | ||
|
|
f5651b7d94 | ||
|
|
1881019ede | ||
|
|
caba4e974c | ||
|
|
bc3c9613bc | ||
|
|
15a3ee252e | ||
|
|
be055961ae | ||
|
|
e3031bdeec | ||
|
|
75917b9f7c | ||
|
|
910732e02c | ||
|
|
264b497681 | ||
|
|
372b949622 | ||
|
|
789a602914 | ||
|
|
093e955100 | ||
|
|
c32a89bebf | ||
|
|
c0bebe9f9f | ||
|
|
57579b2fe5 | ||
|
|
51d14a6b4d | ||
|
|
c50f1b64e5 | ||
|
|
98aaab02c5 | ||
|
|
0fc7973d8b |
5
.vscode/tasks.json
vendored
5
.vscode/tasks.json
vendored
@@ -9,7 +9,10 @@
|
|||||||
{
|
{
|
||||||
"label": "no_dbg",
|
"label": "no_dbg",
|
||||||
"type": "shell",
|
"type": "shell",
|
||||||
"command": "${config:python.pythonPath} .vscode/launch.py"
|
"command": "${config:python.pythonPath}",
|
||||||
|
"args": [
|
||||||
|
".vscode/launch.py"
|
||||||
|
]
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
232
README.md
232
README.md
@@ -20,8 +20,10 @@ turn your phone or raspi into a portable file server with resumable uploads/down
|
|||||||
|
|
||||||
* top
|
* top
|
||||||
* [quickstart](#quickstart)
|
* [quickstart](#quickstart)
|
||||||
|
* [on debian](#on-debian)
|
||||||
* [notes](#notes)
|
* [notes](#notes)
|
||||||
* [status](#status)
|
* [status](#status)
|
||||||
|
* [testimonials](#testimonials)
|
||||||
* [bugs](#bugs)
|
* [bugs](#bugs)
|
||||||
* [general bugs](#general-bugs)
|
* [general bugs](#general-bugs)
|
||||||
* [not my bugs](#not-my-bugs)
|
* [not my bugs](#not-my-bugs)
|
||||||
@@ -44,6 +46,7 @@ turn your phone or raspi into a portable file server with resumable uploads/down
|
|||||||
* [browser support](#browser-support)
|
* [browser support](#browser-support)
|
||||||
* [client examples](#client-examples)
|
* [client examples](#client-examples)
|
||||||
* [up2k](#up2k)
|
* [up2k](#up2k)
|
||||||
|
* [performance](#performance)
|
||||||
* [dependencies](#dependencies)
|
* [dependencies](#dependencies)
|
||||||
* [optional dependencies](#optional-dependencies)
|
* [optional dependencies](#optional-dependencies)
|
||||||
* [install recommended deps](#install-recommended-deps)
|
* [install recommended deps](#install-recommended-deps)
|
||||||
@@ -51,9 +54,12 @@ turn your phone or raspi into a portable file server with resumable uploads/down
|
|||||||
* [sfx](#sfx)
|
* [sfx](#sfx)
|
||||||
* [sfx repack](#sfx-repack)
|
* [sfx repack](#sfx-repack)
|
||||||
* [install on android](#install-on-android)
|
* [install on android](#install-on-android)
|
||||||
* [dev env setup](#dev-env-setup)
|
* [building](#building)
|
||||||
* [how to release](#how-to-release)
|
* [dev env setup](#dev-env-setup)
|
||||||
|
* [just the sfx](#just-the-sfx)
|
||||||
|
* [complete release](#complete-release)
|
||||||
* [todo](#todo)
|
* [todo](#todo)
|
||||||
|
* [discarded ideas](#discarded-ideas)
|
||||||
|
|
||||||
|
|
||||||
## quickstart
|
## quickstart
|
||||||
@@ -64,8 +70,9 @@ running the sfx without arguments (for example doubleclicking it on Windows) wil
|
|||||||
|
|
||||||
some recommended options:
|
some recommended options:
|
||||||
* `-e2dsa` enables general file indexing, see [search configuration](#search-configuration)
|
* `-e2dsa` enables general file indexing, see [search configuration](#search-configuration)
|
||||||
* `-e2ts` enables audio metadata indexing (needs either FFprobe or mutagen), see [optional dependencies](#optional-dependencies)
|
* `-e2ts` enables audio metadata indexing (needs either FFprobe or Mutagen), see [optional dependencies](#optional-dependencies)
|
||||||
* `-v /mnt/music:/music:r:afoo -a foo:bar` shares `/mnt/music` as `/music`, `r`eadable by anyone, with user `foo` as `a`dmin (read/write), password `bar`
|
* `-v /mnt/music:/music:r:afoo -a foo:bar` shares `/mnt/music` as `/music`, `r`eadable by anyone, with user `foo` as `a`dmin (read/write), password `bar`
|
||||||
|
* the syntax is `-v src:dst:perm:perm:...` so local-path, url-path, and one or more permissions to set
|
||||||
* replace `:r:afoo` with `:rfoo` to only make the folder readable by `foo` and nobody else
|
* replace `:r:afoo` with `:rfoo` to only make the folder readable by `foo` and nobody else
|
||||||
* in addition to `r`ead and `a`dmin, `w`rite makes a folder write-only, so cannot list/access files in it
|
* in addition to `r`ead and `a`dmin, `w`rite makes a folder write-only, so cannot list/access files in it
|
||||||
* `--ls '**,*,ln,p,r'` to crash on startup if any of the volumes contain a symlink which point outside the volume, as that could give users unintended access
|
* `--ls '**,*,ln,p,r'` to crash on startup if any of the volumes contain a symlink which point outside the volume, as that could give users unintended access
|
||||||
@@ -75,6 +82,19 @@ you may also want these, especially on servers:
|
|||||||
* [contrib/nginx/copyparty.conf](contrib/nginx/copyparty.conf) to reverse-proxy behind nginx (for better https)
|
* [contrib/nginx/copyparty.conf](contrib/nginx/copyparty.conf) to reverse-proxy behind nginx (for better https)
|
||||||
|
|
||||||
|
|
||||||
|
### on debian
|
||||||
|
|
||||||
|
recommended steps to enable audio metadata and thumbnails (from images and videos):
|
||||||
|
|
||||||
|
* as root, run the following:
|
||||||
|
`apt install python3 python3-pip python3-dev ffmpeg`
|
||||||
|
|
||||||
|
* then, as the user which will be running copyparty (so hopefully not root), run this:
|
||||||
|
`python3 -m pip install --user -U Pillow pillow-avif-plugin`
|
||||||
|
|
||||||
|
(skipped `pyheif-pillow-opener` because apparently debian is too old to build it)
|
||||||
|
|
||||||
|
|
||||||
## notes
|
## notes
|
||||||
|
|
||||||
general:
|
general:
|
||||||
@@ -95,7 +115,7 @@ summary: all planned features work! now please enjoy the bloatening
|
|||||||
|
|
||||||
* backend stuff
|
* backend stuff
|
||||||
* ☑ sanic multipart parser
|
* ☑ sanic multipart parser
|
||||||
* ☑ load balancer (multiprocessing)
|
* ☑ multiprocessing (actual multithreading)
|
||||||
* ☑ volumes (mountpoints)
|
* ☑ volumes (mountpoints)
|
||||||
* ☑ accounts
|
* ☑ accounts
|
||||||
* upload
|
* upload
|
||||||
@@ -109,12 +129,12 @@ summary: all planned features work! now please enjoy the bloatening
|
|||||||
* ☑ FUSE client (read-only)
|
* ☑ FUSE client (read-only)
|
||||||
* browser
|
* browser
|
||||||
* ☑ tree-view
|
* ☑ tree-view
|
||||||
* ☑ audio player
|
* ☑ audio player (with OS media controls)
|
||||||
* ☑ thumbnails
|
* ☑ thumbnails
|
||||||
* ☑ images using Pillow
|
* ☑ ...of images using Pillow
|
||||||
* ☑ videos using FFmpeg
|
* ☑ ...of videos using FFmpeg
|
||||||
* ☑ cache eviction (max-age; maybe max-size eventually)
|
* ☑ cache eviction (max-age; maybe max-size eventually)
|
||||||
* ☑ image gallery
|
* ☑ image gallery with webm player
|
||||||
* ☑ SPA (browse while uploading)
|
* ☑ SPA (browse while uploading)
|
||||||
* if you use the file-tree on the left only, not folders in the file list
|
* if you use the file-tree on the left only, not folders in the file list
|
||||||
* server indexing
|
* server indexing
|
||||||
@@ -126,24 +146,39 @@ summary: all planned features work! now please enjoy the bloatening
|
|||||||
* ☑ editor (sure why not)
|
* ☑ editor (sure why not)
|
||||||
|
|
||||||
|
|
||||||
|
## testimonials
|
||||||
|
|
||||||
|
small collection of user feedback
|
||||||
|
|
||||||
|
`good enough`, `surprisingly correct`, `certified good software`, `just works`, `why`
|
||||||
|
|
||||||
|
|
||||||
# bugs
|
# bugs
|
||||||
|
|
||||||
* Windows: python 3.7 and older cannot read tags with ffprobe, so use mutagen or upgrade
|
* Windows: python 3.7 and older cannot read tags with FFprobe, so use Mutagen or upgrade
|
||||||
* Windows: python 2.7 cannot index non-ascii filenames with `-e2d`
|
* Windows: python 2.7 cannot index non-ascii filenames with `-e2d`
|
||||||
* Windows: python 2.7 cannot handle filenames with mojibake
|
* Windows: python 2.7 cannot handle filenames with mojibake
|
||||||
* MacOS: `--th-ff-jpg` may fix thumbnails using macports-FFmpeg
|
* `--th-ff-jpg` may fix video thumbnails on some FFmpeg versions
|
||||||
|
|
||||||
## general bugs
|
## general bugs
|
||||||
|
|
||||||
* all volumes must exist / be available on startup; up2k (mtp especially) gets funky otherwise
|
* all volumes must exist / be available on startup; up2k (mtp especially) gets funky otherwise
|
||||||
* cannot mount something at `/d1/d2/d3` unless `d2` exists inside `d1`
|
* cannot mount something at `/d1/d2/d3` unless `d2` exists inside `d1`
|
||||||
|
* dupe files will not have metadata (audio tags etc) displayed in the file listing
|
||||||
|
* because they don't get `up` entries in the db (probably best fix) and `tx_browser` does not `lstat`
|
||||||
* probably more, pls let me know
|
* probably more, pls let me know
|
||||||
|
|
||||||
## not my bugs
|
## not my bugs
|
||||||
|
|
||||||
* Windows: msys2-python 3.8.6 occasionally throws "RuntimeError: release unlocked lock" when leaving a scoped mutex in up2k
|
* Windows: folders cannot be accessed if the name ends with `.`
|
||||||
|
* python or windows bug
|
||||||
|
|
||||||
|
* Windows: msys2-python 3.8.6 occasionally throws `RuntimeError: release unlocked lock` when leaving a scoped mutex in up2k
|
||||||
* this is an msys2 bug, the regular windows edition of python is fine
|
* this is an msys2 bug, the regular windows edition of python is fine
|
||||||
|
|
||||||
|
* VirtualBox: sqlite throws `Disk I/O Error` when running in a VM and the up2k database is in a vboxsf
|
||||||
|
* use `--hist` or the `hist` volflag (`-v [...]:chist=/tmp/foo`) to place the db inside the vm instead
|
||||||
|
|
||||||
|
|
||||||
# the browser
|
# the browser
|
||||||
|
|
||||||
@@ -157,40 +192,63 @@ summary: all planned features work! now please enjoy the bloatening
|
|||||||
* `[📂]` mkdir, create directories
|
* `[📂]` mkdir, create directories
|
||||||
* `[📝]` new-md, create a new markdown document
|
* `[📝]` new-md, create a new markdown document
|
||||||
* `[📟]` send-msg, either to server-log or into textfiles if `--urlform save`
|
* `[📟]` send-msg, either to server-log or into textfiles if `--urlform save`
|
||||||
* `[⚙️]` client configuration options
|
* `[🎺]` audio-player config options
|
||||||
|
* `[⚙️]` general client config options
|
||||||
|
|
||||||
|
|
||||||
## hotkeys
|
## hotkeys
|
||||||
|
|
||||||
the browser has the following hotkeys
|
the browser has the following hotkeys (assumes qwerty, ignores actual layout)
|
||||||
|
* `B` toggle breadcrumbs / directory tree
|
||||||
* `I/K` prev/next folder
|
* `I/K` prev/next folder
|
||||||
* `P` parent folder
|
* `M` parent folder (or unexpand current)
|
||||||
* `G` toggle list / grid view
|
* `G` toggle list / grid view
|
||||||
* `T` toggle thumbnails / icons
|
* `T` toggle thumbnails / icons
|
||||||
* when playing audio:
|
* when playing audio:
|
||||||
* `0..9` jump to 10%..90%
|
|
||||||
* `U/O` skip 10sec back/forward
|
|
||||||
* `J/L` prev/next song
|
* `J/L` prev/next song
|
||||||
* `M` play/pause (also starts playing the folder)
|
* `U/O` skip 10sec back/forward
|
||||||
|
* `0..9` jump to 0%..90%
|
||||||
|
* `P` play/pause (also starts playing the folder)
|
||||||
|
* when viewing images / playing videos:
|
||||||
|
* `J/L, Left/Right` prev/next file
|
||||||
|
* `Home/End` first/last file
|
||||||
|
* `Esc` close viewer
|
||||||
|
* videos:
|
||||||
|
* `U/O` skip 10sec back/forward
|
||||||
|
* `P/K/Space` play/pause
|
||||||
|
* `F` fullscreen
|
||||||
|
* `C` continue playing next video
|
||||||
|
* `R` loop
|
||||||
|
* `M` mute
|
||||||
|
* when tree-sidebar is open:
|
||||||
|
* `A/D` adjust tree width
|
||||||
* in the grid view:
|
* in the grid view:
|
||||||
* `S` toggle multiselect
|
* `S` toggle multiselect
|
||||||
* `A/D` zoom
|
* shift+`A/D` zoom
|
||||||
|
* in the markdown editor:
|
||||||
|
* `^s` save
|
||||||
|
* `^h` header
|
||||||
|
* `^k` autoformat table
|
||||||
|
* `^u` jump to next unicode character
|
||||||
|
* `^e` toggle editor / preview
|
||||||
|
* `^up, ^down` jump paragraphs
|
||||||
|
|
||||||
## tree-mode
|
## tree-mode
|
||||||
|
|
||||||
by default there's a breadcrumbs path; you can replace this with a tree-browser sidebar thing by clicking the 🌲
|
by default there's a breadcrumbs path; you can replace this with a tree-browser sidebar thing by clicking the `🌲` or pressing the `B` hotkey
|
||||||
|
|
||||||
click `[-]` and `[+]` to adjust the size, and the `[a]` toggles if the tree should widen dynamically as you go deeper or stay fixed-size
|
click `[-]` and `[+]` (or hotkeys `A`/`D`) to adjust the size, and the `[a]` toggles if the tree should widen dynamically as you go deeper or stay fixed-size
|
||||||
|
|
||||||
|
|
||||||
## thumbnails
|
## thumbnails
|
||||||
|
|
||||||

|

|
||||||
|
|
||||||
it does static images with Pillow and uses FFmpeg for video files, so you may want to `--no-thumb` or maybe just `--no-vthumb` depending on how destructive your users are
|
it does static images with Pillow and uses FFmpeg for video files, so you may want to `--no-thumb` or maybe just `--no-vthumb` depending on how dangerous your users are
|
||||||
|
|
||||||
images named `folder.jpg` and `folder.png` become the thumbnail of the folder they're in
|
images with the following names (see `--th-covers`) become the thumbnail of the folder they're in: `folder.png`, `folder.jpg`, `cover.png`, `cover.jpg`
|
||||||
|
|
||||||
|
in the grid/thumbnail view, if the audio player panel is open, songs will start playing when clicked
|
||||||
|
|
||||||
|
|
||||||
## zip downloads
|
## zip downloads
|
||||||
@@ -205,9 +263,10 @@ the `zip` link next to folders can produce various types of zip/tar files using
|
|||||||
| `zip_crc` | `?zip=crc` | cp437 with crc32 computed early for truly ancient software |
|
| `zip_crc` | `?zip=crc` | cp437 with crc32 computed early for truly ancient software |
|
||||||
|
|
||||||
* hidden files (dotfiles) are excluded unless `-ed`
|
* hidden files (dotfiles) are excluded unless `-ed`
|
||||||
* the up2k.db is always excluded
|
* `up2k.db` and `dir.txt` is always excluded
|
||||||
* `zip_crc` will take longer to download since the server has to read each file twice
|
* `zip_crc` will take longer to download since the server has to read each file twice
|
||||||
* please let me know if you find a program old enough to actually need this
|
* this is only to support MS-DOS PKZIP v2.04g (october 1993) and older
|
||||||
|
* how are you accessing copyparty actually
|
||||||
|
|
||||||
you can also zip a selection of files or folders by clicking them in the browser, that brings up a selection editor and zip button in the bottom right
|
you can also zip a selection of files or folders by clicking them in the browser, that brings up a selection editor and zip button in the bottom right
|
||||||
|
|
||||||
@@ -222,9 +281,11 @@ two upload methods are available in the html client:
|
|||||||
up2k has several advantages:
|
up2k has several advantages:
|
||||||
* you can drop folders into the browser (files are added recursively)
|
* you can drop folders into the browser (files are added recursively)
|
||||||
* files are processed in chunks, and each chunk is checksummed
|
* files are processed in chunks, and each chunk is checksummed
|
||||||
* uploads resume if they are interrupted (for example by a reboot)
|
* uploads autoresume if they are interrupted by network issues
|
||||||
|
* uploads resume if you reboot your browser or pc, just upload the same files again
|
||||||
* server detects any corruption; the client reuploads affected chunks
|
* server detects any corruption; the client reuploads affected chunks
|
||||||
* the client doesn't upload anything that already exists on the server
|
* the client doesn't upload anything that already exists on the server
|
||||||
|
* much higher speeds than ftp/scp/tarpipe on some internet connections (mainly american ones) thanks to parallel connections
|
||||||
* the last-modified timestamp of the file is preserved
|
* the last-modified timestamp of the file is preserved
|
||||||
|
|
||||||
see [up2k](#up2k) for details on how it works
|
see [up2k](#up2k) for details on how it works
|
||||||
@@ -257,11 +318,11 @@ in the `[🚀 up2k]` tab, after toggling the `[🔎]` switch green, any files/fo
|
|||||||
files go into `[ok]` if they exist (and you get a link to where it is), otherwise they land in `[ng]`
|
files go into `[ok]` if they exist (and you get a link to where it is), otherwise they land in `[ng]`
|
||||||
* the main reason filesearch is combined with the uploader is cause the code was too spaghetti to separate it out somewhere else, this is no longer the case but now i've warmed up to the idea too much
|
* the main reason filesearch is combined with the uploader is cause the code was too spaghetti to separate it out somewhere else, this is no longer the case but now i've warmed up to the idea too much
|
||||||
|
|
||||||
adding the same file multiple times is blocked, so if you first search for a file and then decide to upload it, you have to click the `[cleanup]` button to discard `[done]` files
|
adding the same file multiple times is blocked, so if you first search for a file and then decide to upload it, you have to click the `[cleanup]` button to discard `[done]` files (or just refresh the page)
|
||||||
|
|
||||||
note that since up2k has to read the file twice, `[🎈 bup]` can be up to 2x faster in extreme cases (if your internet connection is faster than the read-speed of your HDD)
|
note that since up2k has to read the file twice, `[🎈 bup]` can be up to 2x faster in extreme cases (if your internet connection is faster than the read-speed of your HDD)
|
||||||
|
|
||||||
up2k has saved a few uploads from becoming corrupted in-transfer already; caught an android phone on wifi redhanded in wireshark with a bitflip, however bup with https would *probably* have noticed as well thanks to tls also functioning as an integrity check
|
up2k has saved a few uploads from becoming corrupted in-transfer already; caught an android phone on wifi redhanded in wireshark with a bitflip, however bup with https would *probably* have noticed as well (thanks to tls also functioning as an integrity check)
|
||||||
|
|
||||||
|
|
||||||
## markdown viewer
|
## markdown viewer
|
||||||
@@ -275,6 +336,8 @@ up2k has saved a few uploads from becoming corrupted in-transfer already; caught
|
|||||||
|
|
||||||
* you can link a particular timestamp in an audio file by adding it to the URL, such as `&20` / `&20s` / `&1m20` / `&t=1:20` after the `.../#af-c8960dab`
|
* you can link a particular timestamp in an audio file by adding it to the URL, such as `&20` / `&20s` / `&1m20` / `&t=1:20` after the `.../#af-c8960dab`
|
||||||
|
|
||||||
|
* if you are using media hotkeys to switch songs and are getting tired of seeing the OSD popup which Windows doesn't let you disable, consider https://ocv.me/dev/?media-osd-bgone.ps1
|
||||||
|
|
||||||
|
|
||||||
# searching
|
# searching
|
||||||
|
|
||||||
@@ -297,11 +360,11 @@ searching relies on two databases, the up2k filetree (`-e2d`) and the metadata t
|
|||||||
|
|
||||||
through arguments:
|
through arguments:
|
||||||
* `-e2d` enables file indexing on upload
|
* `-e2d` enables file indexing on upload
|
||||||
* `-e2ds` scans writable folders on startup
|
* `-e2ds` scans writable folders for new files on startup
|
||||||
* `-e2dsa` scans all mounted volumes (including readonly ones)
|
* `-e2dsa` scans all mounted volumes (including readonly ones)
|
||||||
* `-e2t` enables metadata indexing on upload
|
* `-e2t` enables metadata indexing on upload
|
||||||
* `-e2ts` scans for tags in all files that don't have tags yet
|
* `-e2ts` scans for tags in all files that don't have tags yet
|
||||||
* `-e2tsr` deletes all existing tags, so a full reindex
|
* `-e2tsr` deletes all existing tags, does a full reindex
|
||||||
|
|
||||||
the same arguments can be set as volume flags, in addition to `d2d` and `d2t` for disabling:
|
the same arguments can be set as volume flags, in addition to `d2d` and `d2t` for disabling:
|
||||||
* `-v ~/music::r:ce2dsa:ce2tsr` does a full reindex of everything on startup
|
* `-v ~/music::r:ce2dsa:ce2tsr` does a full reindex of everything on startup
|
||||||
@@ -309,11 +372,11 @@ the same arguments can be set as volume flags, in addition to `d2d` and `d2t` fo
|
|||||||
* `-v ~/music::r:cd2t` disables all `-e2t*` (tags), does not affect `-e2d*`
|
* `-v ~/music::r:cd2t` disables all `-e2t*` (tags), does not affect `-e2d*`
|
||||||
|
|
||||||
note:
|
note:
|
||||||
* `e2tsr` is probably always overkill, since `e2ds`/`e2dsa` would pick up any file modifications and `e2ts` would then reindex those
|
* `e2tsr` is probably always overkill, since `e2ds`/`e2dsa` would pick up any file modifications and `e2ts` would then reindex those, unless there is a new copyparty version with new parsers and the release note says otherwise
|
||||||
* the rescan button in the admin panel has no effect unless the volume has `-e2ds` or higher
|
* the rescan button in the admin panel has no effect unless the volume has `-e2ds` or higher
|
||||||
|
|
||||||
you can choose to only index filename/path/size/last-modified (and not the hash of the file contents) by setting `--no-hash` or the volume-flag `cdhash`, this has the following consequences:
|
you can choose to only index filename/path/size/last-modified (and not the hash of the file contents) by setting `--no-hash` or the volume-flag `cdhash`, this has the following consequences:
|
||||||
* initial indexing is way faster, especially when the volume is on a networked disk
|
* initial indexing is way faster, especially when the volume is on a network disk
|
||||||
* makes it impossible to [file-search](#file-search)
|
* makes it impossible to [file-search](#file-search)
|
||||||
* if someone uploads the same file contents, the upload will not be detected as a dupe, so it will not get symlinked or rejected
|
* if someone uploads the same file contents, the upload will not be detected as a dupe, so it will not get symlinked or rejected
|
||||||
|
|
||||||
@@ -346,17 +409,17 @@ tags that start with a `.` such as `.bpm` and `.dur`(ation) indicate numeric val
|
|||||||
|
|
||||||
see the beautiful mess of a dictionary in [mtag.py](https://github.com/9001/copyparty/blob/master/copyparty/mtag.py) for the default mappings (should cover mp3,opus,flac,m4a,wav,aif,)
|
see the beautiful mess of a dictionary in [mtag.py](https://github.com/9001/copyparty/blob/master/copyparty/mtag.py) for the default mappings (should cover mp3,opus,flac,m4a,wav,aif,)
|
||||||
|
|
||||||
`--no-mutagen` disables mutagen and uses ffprobe instead, which...
|
`--no-mutagen` disables Mutagen and uses FFprobe instead, which...
|
||||||
* is about 20x slower than mutagen
|
* is about 20x slower than Mutagen
|
||||||
* catches a few tags that mutagen doesn't
|
* catches a few tags that Mutagen doesn't
|
||||||
* melodic key, video resolution, framerate, pixfmt
|
* melodic key, video resolution, framerate, pixfmt
|
||||||
* avoids pulling any GPL code into copyparty
|
* avoids pulling any GPL code into copyparty
|
||||||
* more importantly runs ffprobe on incoming files which is bad if your ffmpeg has a cve
|
* more importantly runs FFprobe on incoming files which is bad if your FFmpeg has a cve
|
||||||
|
|
||||||
|
|
||||||
## file parser plugins
|
## file parser plugins
|
||||||
|
|
||||||
copyparty can invoke external programs to collect additional metadata for files using `mtp` (as argument or volume flag), there is a default timeout of 30sec
|
copyparty can invoke external programs to collect additional metadata for files using `mtp` (either as argument or volume flag), there is a default timeout of 30sec
|
||||||
|
|
||||||
* `-mtp .bpm=~/bin/audio-bpm.py` will execute `~/bin/audio-bpm.py` with the audio file as argument 1 to provide the `.bpm` tag, if that does not exist in the audio metadata
|
* `-mtp .bpm=~/bin/audio-bpm.py` will execute `~/bin/audio-bpm.py` with the audio file as argument 1 to provide the `.bpm` tag, if that does not exist in the audio metadata
|
||||||
* `-mtp key=f,t5,~/bin/audio-key.py` uses `~/bin/audio-key.py` to get the `key` tag, replacing any existing metadata tag (`f,`), aborting if it takes longer than 5sec (`t5,`)
|
* `-mtp key=f,t5,~/bin/audio-key.py` uses `~/bin/audio-key.py` to get the `key` tag, replacing any existing metadata tag (`f,`), aborting if it takes longer than 5sec (`t5,`)
|
||||||
@@ -390,11 +453,13 @@ copyparty can invoke external programs to collect additional metadata for files
|
|||||||
| zip selection | - | yep | yep | yep | yep | yep | yep | yep |
|
| zip selection | - | yep | yep | yep | yep | yep | yep | yep |
|
||||||
| directory tree | - | - | `*1` | yep | yep | yep | yep | yep |
|
| directory tree | - | - | `*1` | yep | yep | yep | yep | yep |
|
||||||
| up2k | - | - | yep | yep | yep | yep | yep | yep |
|
| up2k | - | - | yep | yep | yep | yep | yep | yep |
|
||||||
| icons work | - | - | yep | yep | yep | yep | yep | yep |
|
|
||||||
| markdown editor | - | - | yep | yep | yep | yep | yep | yep |
|
| markdown editor | - | - | yep | yep | yep | yep | yep | yep |
|
||||||
| markdown viewer | - | - | yep | yep | yep | yep | yep | yep |
|
| markdown viewer | - | - | yep | yep | yep | yep | yep | yep |
|
||||||
| play mp3/m4a | - | yep | yep | yep | yep | yep | yep | yep |
|
| play mp3/m4a | - | yep | yep | yep | yep | yep | yep | yep |
|
||||||
| play ogg/opus | - | - | - | - | yep | yep | `*2` | yep |
|
| play ogg/opus | - | - | - | - | yep | yep | `*2` | yep |
|
||||||
|
| thumbnail view | - | - | - | - | yep | yep | yep | yep |
|
||||||
|
| image viewer | - | - | - | - | yep | yep | yep | yep |
|
||||||
|
| **= feature =** | ie6 | ie9 | ie10 | ie11 | ff 52 | c 49 | iOS | Andr |
|
||||||
|
|
||||||
* internet explorer 6 to 8 behave the same
|
* internet explorer 6 to 8 behave the same
|
||||||
* firefox 52 and chrome 49 are the last winxp versions
|
* firefox 52 and chrome 49 are the last winxp versions
|
||||||
@@ -412,7 +477,7 @@ quick summary of more eccentric web-browsers trying to view a directory index:
|
|||||||
| **w3m** (0.5.3/macports) | can browse, login, upload at 100kB/s, mkdir/msg |
|
| **w3m** (0.5.3/macports) | can browse, login, upload at 100kB/s, mkdir/msg |
|
||||||
| **netsurf** (3.10/arch) | is basically ie6 with much better css (javascript has almost no effect) |
|
| **netsurf** (3.10/arch) | is basically ie6 with much better css (javascript has almost no effect) |
|
||||||
| **ie4** and **netscape** 4.0 | can browse (text is yellow on white), upload with `?b=u` |
|
| **ie4** and **netscape** 4.0 | can browse (text is yellow on white), upload with `?b=u` |
|
||||||
| **SerenityOS** (22d13d8) | hits a page fault, works with `?b=u`, file input not-impl, url params are multiplying |
|
| **SerenityOS** (7e98457) | hits a page fault, works with `?b=u`, file upload not-impl |
|
||||||
|
|
||||||
|
|
||||||
# client examples
|
# client examples
|
||||||
@@ -437,7 +502,7 @@ quick summary of more eccentric web-browsers trying to view a directory index:
|
|||||||
|
|
||||||
copyparty returns a truncated sha512sum of your PUT/POST as base64; you can generate the same checksum locally to verify uplaods:
|
copyparty returns a truncated sha512sum of your PUT/POST as base64; you can generate the same checksum locally to verify uplaods:
|
||||||
|
|
||||||
b512(){ printf "$((sha512sum||shasum -a512)|sed -E 's/ .*//;s/(..)/\\x\1/g')"|base64|head -c43;}
|
b512(){ printf "$((sha512sum||shasum -a512)|sed -E 's/ .*//;s/(..)/\\x\1/g')"|base64|tr '+/' '-_'|head -c44;}
|
||||||
b512 <movie.mkv
|
b512 <movie.mkv
|
||||||
|
|
||||||
|
|
||||||
@@ -457,6 +522,23 @@ quick outline of the up2k protocol, see [uploading](#uploading) for the web-clie
|
|||||||
* client does another handshake with the hashlist; server replies with OK or a list of chunks to reupload
|
* client does another handshake with the hashlist; server replies with OK or a list of chunks to reupload
|
||||||
|
|
||||||
|
|
||||||
|
# performance
|
||||||
|
|
||||||
|
defaults are good for most cases, don't mind the `cannot efficiently use multiple CPU cores` message, it's very unlikely to be a problem
|
||||||
|
|
||||||
|
below are some tweaks roughly ordered by usefulness:
|
||||||
|
|
||||||
|
* `-q` disables logging and can help a bunch, even when combined with `-lo` to redirect logs to file
|
||||||
|
* `--http-only` or `--https-only` (unless you want to support both protocols) will reduce the delay before a new connection is established
|
||||||
|
* `--hist` pointing to a fast location (ssd) will make directory listings and searches faster when `-e2d` or `-e2t` is set
|
||||||
|
* `--no-hash` when indexing a network-disk if you don't care about the actual filehashes and only want the names/tags searchable
|
||||||
|
* `-j` enables multiprocessing (actual multithreading) and can make copyparty perform better in cpu-intensive workloads, for example:
|
||||||
|
* huge amount of short-lived connections
|
||||||
|
* really heavy traffic (downloads/uploads)
|
||||||
|
|
||||||
|
...however it adds an overhead to internal communication so it might be a net loss, see if it works 4 u
|
||||||
|
|
||||||
|
|
||||||
# dependencies
|
# dependencies
|
||||||
|
|
||||||
* `jinja2` (is built into the SFX)
|
* `jinja2` (is built into the SFX)
|
||||||
@@ -466,18 +548,18 @@ quick outline of the up2k protocol, see [uploading](#uploading) for the web-clie
|
|||||||
|
|
||||||
enable music tags:
|
enable music tags:
|
||||||
* either `mutagen` (fast, pure-python, skips a few tags, makes copyparty GPL? idk)
|
* either `mutagen` (fast, pure-python, skips a few tags, makes copyparty GPL? idk)
|
||||||
* or `FFprobe` (20x slower, more accurate, possibly dangerous depending on your distro and users)
|
* or `ffprobe` (20x slower, more accurate, possibly dangerous depending on your distro and users)
|
||||||
|
|
||||||
enable image thumbnails:
|
enable thumbnails of images:
|
||||||
* `Pillow` (requires py2.7 or py3.5+)
|
* `Pillow` (requires py2.7 or py3.5+)
|
||||||
|
|
||||||
enable video thumbnails:
|
enable thumbnails of videos:
|
||||||
* `ffmpeg` and `ffprobe` somewhere in `$PATH`
|
* `ffmpeg` and `ffprobe` somewhere in `$PATH`
|
||||||
|
|
||||||
enable reading HEIF pictures:
|
enable thumbnails of HEIF pictures:
|
||||||
* `pyheif-pillow-opener` (requires Linux or a C compiler)
|
* `pyheif-pillow-opener` (requires Linux or a C compiler)
|
||||||
|
|
||||||
enable reading AVIF pictures:
|
enable thumbnails of AVIF pictures:
|
||||||
* `pillow-avif-plugin`
|
* `pillow-avif-plugin`
|
||||||
|
|
||||||
|
|
||||||
@@ -491,7 +573,7 @@ python -m pip install --user -U jinja2 mutagen Pillow
|
|||||||
|
|
||||||
some bundled tools have copyleft dependencies, see [./bin/#mtag](bin/#mtag)
|
some bundled tools have copyleft dependencies, see [./bin/#mtag](bin/#mtag)
|
||||||
|
|
||||||
these are standalone programs and will never be imported / evaluated by copyparty
|
these are standalone programs and will never be imported / evaluated by copyparty, and must be enabled through `-mtp` configs
|
||||||
|
|
||||||
|
|
||||||
# sfx
|
# sfx
|
||||||
@@ -507,10 +589,10 @@ pls note that `copyparty-sfx.sh` will fail if you rename `copyparty-sfx.py` to `
|
|||||||
|
|
||||||
## sfx repack
|
## sfx repack
|
||||||
|
|
||||||
if you don't need all the features you can repack the sfx and save a bunch of space; all you need is an sfx and a copy of this repo (nothing else to download or build, except for either msys2 or WSL if you're on windows)
|
if you don't need all the features, you can repack the sfx and save a bunch of space; all you need is an sfx and a copy of this repo (nothing else to download or build, except if you're on windows then you need msys2 or WSL)
|
||||||
* `724K` original size as of v0.4.0
|
* `525k` size of original sfx.py as of v0.11.30
|
||||||
* `256K` after `./scripts/make-sfx.sh re no-ogv`
|
* `315k` after `./scripts/make-sfx.sh re no-ogv`
|
||||||
* `164K` after `./scripts/make-sfx.sh re no-ogv no-cm`
|
* `223k` after `./scripts/make-sfx.sh re no-ogv no-cm`
|
||||||
|
|
||||||
the features you can opt to drop are
|
the features you can opt to drop are
|
||||||
* `ogv`.js, the opus/vorbis decoder which is needed by apple devices to play foss audio files
|
* `ogv`.js, the opus/vorbis decoder which is needed by apple devices to play foss audio files
|
||||||
@@ -532,18 +614,45 @@ echo $?
|
|||||||
after the initial setup, you can launch copyparty at any time by running `copyparty` anywhere in Termux
|
after the initial setup, you can launch copyparty at any time by running `copyparty` anywhere in Termux
|
||||||
|
|
||||||
|
|
||||||
# dev env setup
|
# building
|
||||||
|
|
||||||
|
## dev env setup
|
||||||
|
|
||||||
|
mostly optional; if you need a working env for vscode or similar
|
||||||
|
|
||||||
```sh
|
```sh
|
||||||
python3 -m venv .venv
|
python3 -m venv .venv
|
||||||
. .venv/bin/activate
|
. .venv/bin/activate
|
||||||
pip install jinja2 # mandatory deps
|
pip install jinja2 # mandatory
|
||||||
pip install Pillow # thumbnail deps
|
pip install mutagen # audio metadata
|
||||||
|
pip install Pillow pyheif-pillow-opener pillow-avif-plugin # thumbnails
|
||||||
pip install black bandit pylint flake8 # vscode tooling
|
pip install black bandit pylint flake8 # vscode tooling
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
||||||
# how to release
|
## just the sfx
|
||||||
|
|
||||||
|
unless you need to modify something in the web-dependencies, it's faster to grab those from a previous release:
|
||||||
|
|
||||||
|
```sh
|
||||||
|
rm -rf copyparty/web/deps
|
||||||
|
curl -L https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.py >x.py
|
||||||
|
python3 x.py -h
|
||||||
|
rm x.py
|
||||||
|
mv /tmp/pe-copyparty/copyparty/web/deps/ copyparty/web/deps/
|
||||||
|
```
|
||||||
|
|
||||||
|
then build the sfx using any of the following examples:
|
||||||
|
|
||||||
|
```sh
|
||||||
|
./scripts/make-sfx.sh # both python and sh editions
|
||||||
|
./scripts/make-sfx.sh no-sh gz # just python with gzip
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
|
## complete release
|
||||||
|
|
||||||
|
also builds the sfx so disregard the sfx section above
|
||||||
|
|
||||||
in the `scripts` folder:
|
in the `scripts` folder:
|
||||||
|
|
||||||
@@ -558,14 +667,18 @@ in the `scripts` folder:
|
|||||||
|
|
||||||
roughly sorted by priority
|
roughly sorted by priority
|
||||||
|
|
||||||
|
* hls framework for Someone Else to drop code into :^)
|
||||||
* readme.md as epilogue
|
* readme.md as epilogue
|
||||||
* single sha512 across all up2k chunks? maybe
|
|
||||||
|
|
||||||
|
## discarded ideas
|
||||||
|
|
||||||
* reduce up2k roundtrips
|
* reduce up2k roundtrips
|
||||||
* start from a chunk index and just go
|
* start from a chunk index and just go
|
||||||
* terminate client on bad data
|
* terminate client on bad data
|
||||||
|
* not worth the effort, just throw enough conncetions at it
|
||||||
discarded ideas
|
* single sha512 across all up2k chunks?
|
||||||
|
* crypto.subtle cannot into streaming, would have to use hashwasm, expensive
|
||||||
* separate sqlite table per tag
|
* separate sqlite table per tag
|
||||||
* performance fixed by skipping some indexes (`+mt.k`)
|
* performance fixed by skipping some indexes (`+mt.k`)
|
||||||
* audio fingerprinting
|
* audio fingerprinting
|
||||||
@@ -580,3 +693,6 @@ discarded ideas
|
|||||||
* nah
|
* nah
|
||||||
* look into android thumbnail cache file format
|
* look into android thumbnail cache file format
|
||||||
* absolutely not
|
* absolutely not
|
||||||
|
* indexedDB for hashes, cfg enable/clear/sz, 2gb avail, ~9k for 1g, ~4k for 100m, 500k items before autoeviction
|
||||||
|
* blank hashlist when up-ok to skip handshake
|
||||||
|
* too many confusing side-effects
|
||||||
|
|||||||
@@ -48,15 +48,16 @@ you could replace winfsp with [dokan](https://github.com/dokan-dev/dokany/releas
|
|||||||
|
|
||||||
|
|
||||||
# [`dbtool.py`](dbtool.py)
|
# [`dbtool.py`](dbtool.py)
|
||||||
upgrade utility which can show db info and help transfer data between databases, for example when a new version of copyparty recommends to wipe the DB and reindex because it now collects additional metadata during analysis, but you have some really expensive `-mtp` parsers and want to copy over the tags from the old db
|
upgrade utility which can show db info and help transfer data between databases, for example when a new version of copyparty is incompatible with the old DB and automatically rebuilds the DB from scratch, but you have some really expensive `-mtp` parsers and want to copy over the tags from the old db
|
||||||
|
|
||||||
for that example (upgrading to v0.11.0), first move the old db aside, launch copyparty, let it rebuild the db until the point where it starts running mtp (colored messages as it adds the mtp tags), then CTRL-C and patch in the old mtp tags from the old db instead
|
for that example (upgrading to v0.11.20), first launch the new version of copyparty like usual, let it make a backup of the old db and rebuild the new db until the point where it starts running mtp (colored messages as it adds the mtp tags), that's when you hit CTRL-C and patch in the old mtp tags from the old db instead
|
||||||
|
|
||||||
so assuming you have `-mtp` parsers to provide the tags `key` and `.bpm`:
|
so assuming you have `-mtp` parsers to provide the tags `key` and `.bpm`:
|
||||||
|
|
||||||
```
|
```
|
||||||
~/bin/dbtool.py -ls up2k.db
|
cd /mnt/nas/music/.hist
|
||||||
~/bin/dbtool.py -src up2k.db.v0.10.22 up2k.db -cmp
|
~/src/copyparty/bin/dbtool.py -ls up2k.db
|
||||||
~/bin/dbtool.py -src up2k.db.v0.10.22 up2k.db -rm-mtp-flag -copy key
|
~/src/copyparty/bin/dbtool.py -src up2k.*.v3 up2k.db -cmp
|
||||||
~/bin/dbtool.py -src up2k.db.v0.10.22 up2k.db -rm-mtp-flag -copy .bpm -vac
|
~/src/copyparty/bin/dbtool.py -src up2k.*.v3 up2k.db -rm-mtp-flag -copy key
|
||||||
|
~/src/copyparty/bin/dbtool.py -src up2k.*.v3 up2k.db -rm-mtp-flag -copy .bpm -vac
|
||||||
```
|
```
|
||||||
|
|||||||
@@ -345,7 +345,7 @@ class Gateway(object):
|
|||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def sendreq(self, *args, headers={}, **kwargs):
|
def sendreq(self, meth, path, headers, **kwargs):
|
||||||
if self.password:
|
if self.password:
|
||||||
headers["Cookie"] = "=".join(["cppwd", self.password])
|
headers["Cookie"] = "=".join(["cppwd", self.password])
|
||||||
|
|
||||||
@@ -354,21 +354,21 @@ class Gateway(object):
|
|||||||
if c.rx_path:
|
if c.rx_path:
|
||||||
raise Exception()
|
raise Exception()
|
||||||
|
|
||||||
c.request(*list(args), headers=headers, **kwargs)
|
c.request(meth, path, headers=headers, **kwargs)
|
||||||
c.rx = c.getresponse()
|
c.rx = c.getresponse()
|
||||||
return c
|
return c
|
||||||
except:
|
except:
|
||||||
tid = threading.current_thread().ident
|
tid = threading.current_thread().ident
|
||||||
dbg(
|
dbg(
|
||||||
"\033[1;37;44mbad conn {:x}\n {}\n {}\033[0m".format(
|
"\033[1;37;44mbad conn {:x}\n {} {}\n {}\033[0m".format(
|
||||||
tid, " ".join(str(x) for x in args), c.rx_path if c else "(null)"
|
tid, meth, path, c.rx_path if c else "(null)"
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
self.closeconn(c)
|
self.closeconn(c)
|
||||||
c = self.getconn()
|
c = self.getconn()
|
||||||
try:
|
try:
|
||||||
c.request(*list(args), headers=headers, **kwargs)
|
c.request(meth, path, headers=headers, **kwargs)
|
||||||
c.rx = c.getresponse()
|
c.rx = c.getresponse()
|
||||||
return c
|
return c
|
||||||
except:
|
except:
|
||||||
@@ -386,7 +386,7 @@ class Gateway(object):
|
|||||||
path = dewin(path)
|
path = dewin(path)
|
||||||
|
|
||||||
web_path = self.quotep("/" + "/".join([self.web_root, path])) + "?dots"
|
web_path = self.quotep("/" + "/".join([self.web_root, path])) + "?dots"
|
||||||
c = self.sendreq("GET", web_path)
|
c = self.sendreq("GET", web_path, {})
|
||||||
if c.rx.status != 200:
|
if c.rx.status != 200:
|
||||||
self.closeconn(c)
|
self.closeconn(c)
|
||||||
log(
|
log(
|
||||||
@@ -440,7 +440,7 @@ class Gateway(object):
|
|||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
c = self.sendreq("GET", web_path, headers={"Range": hdr_range})
|
c = self.sendreq("GET", web_path, {"Range": hdr_range})
|
||||||
if c.rx.status != http.client.PARTIAL_CONTENT:
|
if c.rx.status != http.client.PARTIAL_CONTENT:
|
||||||
self.closeconn(c)
|
self.closeconn(c)
|
||||||
raise Exception(
|
raise Exception(
|
||||||
|
|||||||
@@ -54,10 +54,13 @@ MACOS = platform.system() == "Darwin"
|
|||||||
info = log = dbg = None
|
info = log = dbg = None
|
||||||
|
|
||||||
|
|
||||||
print("{} v{} @ {}".format(
|
print(
|
||||||
platform.python_implementation(),
|
"{} v{} @ {}".format(
|
||||||
".".join([str(x) for x in sys.version_info]),
|
platform.python_implementation(),
|
||||||
sys.executable))
|
".".join([str(x) for x in sys.version_info]),
|
||||||
|
sys.executable,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@@ -299,14 +302,14 @@ class Gateway(object):
|
|||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def sendreq(self, *args, headers={}, **kwargs):
|
def sendreq(self, meth, path, headers, **kwargs):
|
||||||
tid = get_tid()
|
tid = get_tid()
|
||||||
if self.password:
|
if self.password:
|
||||||
headers["Cookie"] = "=".join(["cppwd", self.password])
|
headers["Cookie"] = "=".join(["cppwd", self.password])
|
||||||
|
|
||||||
try:
|
try:
|
||||||
c = self.getconn(tid)
|
c = self.getconn(tid)
|
||||||
c.request(*list(args), headers=headers, **kwargs)
|
c.request(meth, path, headers=headers, **kwargs)
|
||||||
return c.getresponse()
|
return c.getresponse()
|
||||||
except:
|
except:
|
||||||
dbg("bad conn")
|
dbg("bad conn")
|
||||||
@@ -314,7 +317,7 @@ class Gateway(object):
|
|||||||
self.closeconn(tid)
|
self.closeconn(tid)
|
||||||
try:
|
try:
|
||||||
c = self.getconn(tid)
|
c = self.getconn(tid)
|
||||||
c.request(*list(args), headers=headers, **kwargs)
|
c.request(meth, path, headers=headers, **kwargs)
|
||||||
return c.getresponse()
|
return c.getresponse()
|
||||||
except:
|
except:
|
||||||
info("http connection failed:\n" + traceback.format_exc())
|
info("http connection failed:\n" + traceback.format_exc())
|
||||||
@@ -331,7 +334,7 @@ class Gateway(object):
|
|||||||
path = dewin(path)
|
path = dewin(path)
|
||||||
|
|
||||||
web_path = self.quotep("/" + "/".join([self.web_root, path])) + "?dots&ls"
|
web_path = self.quotep("/" + "/".join([self.web_root, path])) + "?dots&ls"
|
||||||
r = self.sendreq("GET", web_path)
|
r = self.sendreq("GET", web_path, {})
|
||||||
if r.status != 200:
|
if r.status != 200:
|
||||||
self.closeconn()
|
self.closeconn()
|
||||||
log(
|
log(
|
||||||
@@ -368,7 +371,7 @@ class Gateway(object):
|
|||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
r = self.sendreq("GET", web_path, headers={"Range": hdr_range})
|
r = self.sendreq("GET", web_path, {"Range": hdr_range})
|
||||||
if r.status != http.client.PARTIAL_CONTENT:
|
if r.status != http.client.PARTIAL_CONTENT:
|
||||||
self.closeconn()
|
self.closeconn()
|
||||||
raise Exception(
|
raise Exception(
|
||||||
|
|||||||
@@ -2,10 +2,13 @@
|
|||||||
|
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
|
import time
|
||||||
|
import shutil
|
||||||
import sqlite3
|
import sqlite3
|
||||||
import argparse
|
import argparse
|
||||||
|
|
||||||
DB_VER = 3
|
DB_VER1 = 3
|
||||||
|
DB_VER2 = 4
|
||||||
|
|
||||||
|
|
||||||
def die(msg):
|
def die(msg):
|
||||||
@@ -45,18 +48,21 @@ def compare(n1, d1, n2, d2, verbose):
|
|||||||
nt = next(d1.execute("select count(w) from up"))[0]
|
nt = next(d1.execute("select count(w) from up"))[0]
|
||||||
n = 0
|
n = 0
|
||||||
miss = 0
|
miss = 0
|
||||||
for w, rd, fn in d1.execute("select w, rd, fn from up"):
|
for w1, rd, fn in d1.execute("select w, rd, fn from up"):
|
||||||
n += 1
|
n += 1
|
||||||
if n % 25_000 == 0:
|
if n % 25_000 == 0:
|
||||||
m = f"\033[36mchecked {n:,} of {nt:,} files in {n1} against {n2}\033[0m"
|
m = f"\033[36mchecked {n:,} of {nt:,} files in {n1} against {n2}\033[0m"
|
||||||
print(m)
|
print(m)
|
||||||
|
|
||||||
q = "select w from up where substr(w,1,16) = ?"
|
if rd.split("/", 1)[0] == ".hist":
|
||||||
hit = d2.execute(q, (w[:16],)).fetchone()
|
continue
|
||||||
|
|
||||||
|
q = "select w from up where rd = ? and fn = ?"
|
||||||
|
hit = d2.execute(q, (rd, fn)).fetchone()
|
||||||
if not hit:
|
if not hit:
|
||||||
miss += 1
|
miss += 1
|
||||||
if verbose:
|
if verbose:
|
||||||
print(f"file in {n1} missing in {n2}: [{w}] {rd}/{fn}")
|
print(f"file in {n1} missing in {n2}: [{w1}] {rd}/{fn}")
|
||||||
|
|
||||||
print(f" {miss} files in {n1} missing in {n2}\n")
|
print(f" {miss} files in {n1} missing in {n2}\n")
|
||||||
|
|
||||||
@@ -64,15 +70,30 @@ def compare(n1, d1, n2, d2, verbose):
|
|||||||
n = 0
|
n = 0
|
||||||
miss = {}
|
miss = {}
|
||||||
nmiss = 0
|
nmiss = 0
|
||||||
for w, k, v in d1.execute("select * from mt"):
|
for w1, k, v in d1.execute("select * from mt"):
|
||||||
|
|
||||||
n += 1
|
n += 1
|
||||||
if n % 100_000 == 0:
|
if n % 100_000 == 0:
|
||||||
m = f"\033[36mchecked {n:,} of {nt:,} tags in {n1} against {n2}, so far {nmiss} missing tags\033[0m"
|
m = f"\033[36mchecked {n:,} of {nt:,} tags in {n1} against {n2}, so far {nmiss} missing tags\033[0m"
|
||||||
print(m)
|
print(m)
|
||||||
|
|
||||||
v2 = d2.execute("select v from mt where w = ? and +k = ?", (w, k)).fetchone()
|
q = "select rd, fn from up where substr(w,1,16) = ?"
|
||||||
if v2:
|
rd, fn = d1.execute(q, (w1,)).fetchone()
|
||||||
v2 = v2[0]
|
if rd.split("/", 1)[0] == ".hist":
|
||||||
|
continue
|
||||||
|
|
||||||
|
q = "select substr(w,1,16) from up where rd = ? and fn = ?"
|
||||||
|
w2 = d2.execute(q, (rd, fn)).fetchone()
|
||||||
|
if w2:
|
||||||
|
w2 = w2[0]
|
||||||
|
|
||||||
|
v2 = None
|
||||||
|
if w2:
|
||||||
|
v2 = d2.execute(
|
||||||
|
"select v from mt where w = ? and +k = ?", (w2, k)
|
||||||
|
).fetchone()
|
||||||
|
if v2:
|
||||||
|
v2 = v2[0]
|
||||||
|
|
||||||
# if v != v2 and v2 and k in [".bpm", "key"] and n2 == "src":
|
# if v != v2 and v2 and k in [".bpm", "key"] and n2 == "src":
|
||||||
# print(f"{w} [{rd}/{fn}] {k} = [{v}] / [{v2}]")
|
# print(f"{w} [{rd}/{fn}] {k} = [{v}] / [{v2}]")
|
||||||
@@ -99,9 +120,7 @@ def compare(n1, d1, n2, d2, verbose):
|
|||||||
miss[k] = 1
|
miss[k] = 1
|
||||||
|
|
||||||
if verbose:
|
if verbose:
|
||||||
q = "select rd, fn from up where substr(w,1,16) = ?"
|
print(f"missing in {n2}: [{w1}] [{rd}/{fn}] {k} = {v}")
|
||||||
rd, fn = d1.execute(q, (w,)).fetchone()
|
|
||||||
print(f"missing in {n2}: [{w}] [{rd}/{fn}] {k} = {v}")
|
|
||||||
|
|
||||||
for k, v in sorted(miss.items()):
|
for k, v in sorted(miss.items()):
|
||||||
if v:
|
if v:
|
||||||
@@ -114,24 +133,35 @@ def copy_mtp(d1, d2, tag, rm):
|
|||||||
nt = next(d1.execute("select count(w) from mt where k = ?", (tag,)))[0]
|
nt = next(d1.execute("select count(w) from mt where k = ?", (tag,)))[0]
|
||||||
n = 0
|
n = 0
|
||||||
ndone = 0
|
ndone = 0
|
||||||
for w, k, v in d1.execute("select * from mt where k = ?", (tag,)):
|
for w1, k, v in d1.execute("select * from mt where k = ?", (tag,)):
|
||||||
n += 1
|
n += 1
|
||||||
if n % 25_000 == 0:
|
if n % 25_000 == 0:
|
||||||
m = f"\033[36m{n:,} of {nt:,} tags checked, so far {ndone} copied\033[0m"
|
m = f"\033[36m{n:,} of {nt:,} tags checked, so far {ndone} copied\033[0m"
|
||||||
print(m)
|
print(m)
|
||||||
|
|
||||||
hit = d2.execute("select v from mt where w = ? and +k = ?", (w, k)).fetchone()
|
q = "select rd, fn from up where substr(w,1,16) = ?"
|
||||||
|
rd, fn = d1.execute(q, (w1,)).fetchone()
|
||||||
|
if rd.split("/", 1)[0] == ".hist":
|
||||||
|
continue
|
||||||
|
|
||||||
|
q = "select substr(w,1,16) from up where rd = ? and fn = ?"
|
||||||
|
w2 = d2.execute(q, (rd, fn)).fetchone()
|
||||||
|
if not w2:
|
||||||
|
continue
|
||||||
|
|
||||||
|
w2 = w2[0]
|
||||||
|
hit = d2.execute("select v from mt where w = ? and +k = ?", (w2, k)).fetchone()
|
||||||
if hit:
|
if hit:
|
||||||
hit = hit[0]
|
hit = hit[0]
|
||||||
|
|
||||||
if hit != v:
|
if hit != v:
|
||||||
ndone += 1
|
ndone += 1
|
||||||
if hit is not None:
|
if hit is not None:
|
||||||
d2.execute("delete from mt where w = ? and +k = ?", (w, k))
|
d2.execute("delete from mt where w = ? and +k = ?", (w2, k))
|
||||||
|
|
||||||
d2.execute("insert into mt values (?,?,?)", (w, k, v))
|
d2.execute("insert into mt values (?,?,?)", (w2, k, v))
|
||||||
if rm:
|
if rm:
|
||||||
d2.execute("delete from mt where w = ? and +k = 't:mtp'", (w,))
|
d2.execute("delete from mt where w = ? and +k = 't:mtp'", (w2,))
|
||||||
|
|
||||||
d2.commit()
|
d2.commit()
|
||||||
print(f"copied {ndone} {tag} tags over")
|
print(f"copied {ndone} {tag} tags over")
|
||||||
@@ -168,6 +198,23 @@ def main():
|
|||||||
db = sqlite3.connect(ar.db)
|
db = sqlite3.connect(ar.db)
|
||||||
ds = sqlite3.connect(ar.src) if ar.src else None
|
ds = sqlite3.connect(ar.src) if ar.src else None
|
||||||
|
|
||||||
|
# revert journals
|
||||||
|
for d, p in [[db, ar.db], [ds, ar.src]]:
|
||||||
|
if not d:
|
||||||
|
continue
|
||||||
|
|
||||||
|
pj = "{}-journal".format(p)
|
||||||
|
if not os.path.exists(pj):
|
||||||
|
continue
|
||||||
|
|
||||||
|
d.execute("create table foo (bar int)")
|
||||||
|
d.execute("drop table foo")
|
||||||
|
|
||||||
|
if ar.copy:
|
||||||
|
db.close()
|
||||||
|
shutil.copy2(ar.db, "{}.bak.dbtool.{:x}".format(ar.db, int(time.time())))
|
||||||
|
db = sqlite3.connect(ar.db)
|
||||||
|
|
||||||
for d, n in [[ds, "src"], [db, "dst"]]:
|
for d, n in [[ds, "src"], [db, "dst"]]:
|
||||||
if not d:
|
if not d:
|
||||||
continue
|
continue
|
||||||
@@ -176,8 +223,8 @@ def main():
|
|||||||
if ver == "corrupt":
|
if ver == "corrupt":
|
||||||
die("{} database appears to be corrupt, sorry")
|
die("{} database appears to be corrupt, sorry")
|
||||||
|
|
||||||
if ver != DB_VER:
|
if ver < DB_VER1 or ver > DB_VER2:
|
||||||
m = f"{n} db is version {ver}, this tool only supports version {DB_VER}, please upgrade it with copyparty first"
|
m = f"{n} db is version {ver}, this tool only supports versions between {DB_VER1} and {DB_VER2}, please upgrade it with copyparty first"
|
||||||
die(m)
|
die(m)
|
||||||
|
|
||||||
if ar.ls:
|
if ar.ls:
|
||||||
|
|||||||
@@ -60,7 +60,7 @@ def main():
|
|||||||
try:
|
try:
|
||||||
det(tf)
|
det(tf)
|
||||||
except:
|
except:
|
||||||
pass
|
pass # mute
|
||||||
finally:
|
finally:
|
||||||
os.unlink(tf)
|
os.unlink(tf)
|
||||||
|
|
||||||
|
|||||||
123
bin/mtag/audio-key-slicing.py
Executable file
123
bin/mtag/audio-key-slicing.py
Executable file
@@ -0,0 +1,123 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
|
||||||
|
import re
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import tempfile
|
||||||
|
import subprocess as sp
|
||||||
|
|
||||||
|
import keyfinder
|
||||||
|
|
||||||
|
from copyparty.util import fsenc
|
||||||
|
|
||||||
|
"""
|
||||||
|
dep: github/mixxxdj/libkeyfinder
|
||||||
|
dep: pypi/keyfinder
|
||||||
|
dep: ffmpeg
|
||||||
|
|
||||||
|
note: this is a janky edition of the regular audio-key.py,
|
||||||
|
slicing the files at 20sec intervals and keeping 5sec from each,
|
||||||
|
surprisingly accurate but still garbage (446 ok, 69 bad, 13% miss)
|
||||||
|
|
||||||
|
it is fast tho
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
def get_duration():
|
||||||
|
# TODO provide ffprobe tags to mtp as json
|
||||||
|
|
||||||
|
# fmt: off
|
||||||
|
dur = sp.check_output([
|
||||||
|
"ffprobe",
|
||||||
|
"-hide_banner",
|
||||||
|
"-v", "fatal",
|
||||||
|
"-show_streams",
|
||||||
|
"-show_format",
|
||||||
|
fsenc(sys.argv[1])
|
||||||
|
])
|
||||||
|
# fmt: on
|
||||||
|
|
||||||
|
dur = dur.decode("ascii", "replace").split("\n")
|
||||||
|
dur = [x.split("=")[1] for x in dur if x.startswith("duration=")]
|
||||||
|
dur = [float(x) for x in dur if re.match(r"^[0-9\.,]+$", x)]
|
||||||
|
return list(sorted(dur))[-1] if dur else None
|
||||||
|
|
||||||
|
|
||||||
|
def get_segs(dur):
|
||||||
|
# keep first 5s of each 20s,
|
||||||
|
# keep entire last segment
|
||||||
|
ofs = 0
|
||||||
|
segs = []
|
||||||
|
while True:
|
||||||
|
seg = [ofs, 5]
|
||||||
|
segs.append(seg)
|
||||||
|
if dur - ofs < 20:
|
||||||
|
seg[-1] = int(dur - seg[0])
|
||||||
|
break
|
||||||
|
|
||||||
|
ofs += 20
|
||||||
|
|
||||||
|
return segs
|
||||||
|
|
||||||
|
|
||||||
|
def slice(tf):
|
||||||
|
dur = get_duration()
|
||||||
|
dur = min(dur, 600) # max 10min
|
||||||
|
segs = get_segs(dur)
|
||||||
|
|
||||||
|
# fmt: off
|
||||||
|
cmd = [
|
||||||
|
"ffmpeg",
|
||||||
|
"-nostdin",
|
||||||
|
"-hide_banner",
|
||||||
|
"-v", "fatal",
|
||||||
|
"-y"
|
||||||
|
]
|
||||||
|
|
||||||
|
for seg in segs:
|
||||||
|
cmd.extend([
|
||||||
|
"-ss", str(seg[0]),
|
||||||
|
"-i", fsenc(sys.argv[1])
|
||||||
|
])
|
||||||
|
|
||||||
|
filt = ""
|
||||||
|
for n, seg in enumerate(segs):
|
||||||
|
filt += "[{}:a:0]atrim=duration={}[a{}]; ".format(n, seg[1], n)
|
||||||
|
|
||||||
|
prev = "a0"
|
||||||
|
for n in range(1, len(segs)):
|
||||||
|
nxt = "b{}".format(n)
|
||||||
|
filt += "[{}][a{}]acrossfade=d=0.5[{}]; ".format(prev, n, nxt)
|
||||||
|
prev = nxt
|
||||||
|
|
||||||
|
cmd.extend([
|
||||||
|
"-filter_complex", filt[:-2],
|
||||||
|
"-map", "[{}]".format(nxt),
|
||||||
|
"-sample_fmt", "s16",
|
||||||
|
tf
|
||||||
|
])
|
||||||
|
# fmt: on
|
||||||
|
|
||||||
|
# print(cmd)
|
||||||
|
sp.check_call(cmd)
|
||||||
|
|
||||||
|
|
||||||
|
def det(tf):
|
||||||
|
slice(tf)
|
||||||
|
print(keyfinder.key(tf).camelot())
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
with tempfile.NamedTemporaryFile(suffix=".flac", delete=False) as f:
|
||||||
|
f.write(b"h")
|
||||||
|
tf = f.name
|
||||||
|
|
||||||
|
try:
|
||||||
|
det(tf)
|
||||||
|
finally:
|
||||||
|
os.unlink(tf)
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
@@ -1,18 +1,54 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
|
import os
|
||||||
import sys
|
import sys
|
||||||
|
import tempfile
|
||||||
|
import subprocess as sp
|
||||||
import keyfinder
|
import keyfinder
|
||||||
|
|
||||||
|
from copyparty.util import fsenc
|
||||||
|
|
||||||
"""
|
"""
|
||||||
dep: github/mixxxdj/libkeyfinder
|
dep: github/mixxxdj/libkeyfinder
|
||||||
dep: pypi/keyfinder
|
dep: pypi/keyfinder
|
||||||
dep: ffmpeg
|
dep: ffmpeg
|
||||||
|
|
||||||
note: cannot fsenc
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
||||||
try:
|
# tried trimming the first/last 5th, bad idea,
|
||||||
print(keyfinder.key(sys.argv[1]).camelot())
|
# misdetects 9a law field (Sphere Caliber) as 10b,
|
||||||
except:
|
# obvious when mixing 9a ghostly parapara ship
|
||||||
pass
|
|
||||||
|
|
||||||
|
def det(tf):
|
||||||
|
# fmt: off
|
||||||
|
sp.check_call([
|
||||||
|
"ffmpeg",
|
||||||
|
"-nostdin",
|
||||||
|
"-hide_banner",
|
||||||
|
"-v", "fatal",
|
||||||
|
"-y", "-i", fsenc(sys.argv[1]),
|
||||||
|
"-t", "300",
|
||||||
|
"-sample_fmt", "s16",
|
||||||
|
tf
|
||||||
|
])
|
||||||
|
# fmt: on
|
||||||
|
|
||||||
|
print(keyfinder.key(tf).camelot())
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
with tempfile.NamedTemporaryFile(suffix=".flac", delete=False) as f:
|
||||||
|
f.write(b"h")
|
||||||
|
tf = f.name
|
||||||
|
|
||||||
|
try:
|
||||||
|
det(tf)
|
||||||
|
except:
|
||||||
|
pass # mute
|
||||||
|
finally:
|
||||||
|
os.unlink(tf)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
|
|||||||
@@ -1,7 +1,15 @@
|
|||||||
# when running copyparty behind a reverse-proxy,
|
# when running copyparty behind a reverse proxy,
|
||||||
# make sure that copyparty allows at least as many clients as the proxy does,
|
# the following arguments are recommended:
|
||||||
# so run copyparty with -nc 512 if your nginx has the default limits
|
#
|
||||||
# (worker_processes 1, worker_connections 512)
|
# -nc 512 important, see next paragraph
|
||||||
|
# --http-only lower latency on initial connection
|
||||||
|
# -i 127.0.0.1 only accept connections from nginx
|
||||||
|
#
|
||||||
|
# -nc must match or exceed the webserver's max number of concurrent clients;
|
||||||
|
# nginx default is 512 (worker_processes 1, worker_connections 512)
|
||||||
|
#
|
||||||
|
# you may also consider adding -j0 for CPU-intensive configurations
|
||||||
|
# (not that i can really think of any good examples)
|
||||||
|
|
||||||
upstream cpp {
|
upstream cpp {
|
||||||
server 127.0.0.1:3923;
|
server 127.0.0.1:3923;
|
||||||
|
|||||||
@@ -7,11 +7,19 @@
|
|||||||
# you may want to:
|
# you may want to:
|
||||||
# change '/usr/bin/python' to another interpreter
|
# change '/usr/bin/python' to another interpreter
|
||||||
# change '/mnt::a' to another location or permission-set
|
# change '/mnt::a' to another location or permission-set
|
||||||
|
#
|
||||||
|
# with `Type=notify`, copyparty will signal systemd when it is ready to
|
||||||
|
# accept connections; correctly delaying units depending on copyparty.
|
||||||
|
# But note that journalctl will get the timestamps wrong due to
|
||||||
|
# python disabling line-buffering, so messages are out-of-order:
|
||||||
|
# https://user-images.githubusercontent.com/241032/126040249-cb535cc7-c599-4931-a796-a5d9af691bad.png
|
||||||
|
|
||||||
[Unit]
|
[Unit]
|
||||||
Description=copyparty file server
|
Description=copyparty file server
|
||||||
|
|
||||||
[Service]
|
[Service]
|
||||||
|
Type=notify
|
||||||
|
SyslogIdentifier=copyparty
|
||||||
ExecStart=/usr/bin/python3 /usr/local/bin/copyparty-sfx.py -q -v /mnt::a
|
ExecStart=/usr/bin/python3 /usr/local/bin/copyparty-sfx.py -q -v /mnt::a
|
||||||
ExecStartPre=/bin/bash -c 'mkdir -p /run/tmpfiles.d/ && echo "x /tmp/pe-copyparty*" > /run/tmpfiles.d/copyparty.conf'
|
ExecStartPre=/bin/bash -c 'mkdir -p /run/tmpfiles.d/ && echo "x /tmp/pe-copyparty*" > /run/tmpfiles.d/copyparty.conf'
|
||||||
|
|
||||||
|
|||||||
@@ -9,6 +9,9 @@ import os
|
|||||||
PY2 = sys.version_info[0] == 2
|
PY2 = sys.version_info[0] == 2
|
||||||
if PY2:
|
if PY2:
|
||||||
sys.dont_write_bytecode = True
|
sys.dont_write_bytecode = True
|
||||||
|
unicode = unicode
|
||||||
|
else:
|
||||||
|
unicode = str
|
||||||
|
|
||||||
WINDOWS = False
|
WINDOWS = False
|
||||||
if platform.system() == "Windows":
|
if platform.system() == "Windows":
|
||||||
|
|||||||
@@ -20,10 +20,10 @@ import threading
|
|||||||
import traceback
|
import traceback
|
||||||
from textwrap import dedent
|
from textwrap import dedent
|
||||||
|
|
||||||
from .__init__ import E, WINDOWS, VT100, PY2
|
from .__init__ import E, WINDOWS, VT100, PY2, unicode
|
||||||
from .__version__ import S_VERSION, S_BUILD_DT, CODENAME
|
from .__version__ import S_VERSION, S_BUILD_DT, CODENAME
|
||||||
from .svchub import SvcHub
|
from .svchub import SvcHub
|
||||||
from .util import py_desc, align_tab, IMPLICATIONS, alltrace
|
from .util import py_desc, align_tab, IMPLICATIONS
|
||||||
|
|
||||||
HAVE_SSL = True
|
HAVE_SSL = True
|
||||||
try:
|
try:
|
||||||
@@ -31,6 +31,8 @@ try:
|
|||||||
except:
|
except:
|
||||||
HAVE_SSL = False
|
HAVE_SSL = False
|
||||||
|
|
||||||
|
printed = ""
|
||||||
|
|
||||||
|
|
||||||
class RiceFormatter(argparse.HelpFormatter):
|
class RiceFormatter(argparse.HelpFormatter):
|
||||||
def _get_help_string(self, action):
|
def _get_help_string(self, action):
|
||||||
@@ -61,8 +63,15 @@ class Dodge11874(RiceFormatter):
|
|||||||
super(Dodge11874, self).__init__(*args, **kwargs)
|
super(Dodge11874, self).__init__(*args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
def lprint(*a, **ka):
|
||||||
|
global printed
|
||||||
|
|
||||||
|
printed += " ".join(unicode(x) for x in a) + ka.get("end", "\n")
|
||||||
|
print(*a, **ka)
|
||||||
|
|
||||||
|
|
||||||
def warn(msg):
|
def warn(msg):
|
||||||
print("\033[1mwarning:\033[0;33m {}\033[0m\n".format(msg))
|
lprint("\033[1mwarning:\033[0;33m {}\033[0m\n".format(msg))
|
||||||
|
|
||||||
|
|
||||||
def ensure_locale():
|
def ensure_locale():
|
||||||
@@ -73,7 +82,7 @@ def ensure_locale():
|
|||||||
]:
|
]:
|
||||||
try:
|
try:
|
||||||
locale.setlocale(locale.LC_ALL, x)
|
locale.setlocale(locale.LC_ALL, x)
|
||||||
print("Locale:", x)
|
lprint("Locale:", x)
|
||||||
break
|
break
|
||||||
except:
|
except:
|
||||||
continue
|
continue
|
||||||
@@ -94,7 +103,7 @@ def ensure_cert():
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
if filecmp.cmp(cert_cfg, cert_insec):
|
if filecmp.cmp(cert_cfg, cert_insec):
|
||||||
print(
|
lprint(
|
||||||
"\033[33m using default TLS certificate; https will be insecure."
|
"\033[33m using default TLS certificate; https will be insecure."
|
||||||
+ "\033[36m\n certificate location: {}\033[0m\n".format(cert_cfg)
|
+ "\033[36m\n certificate location: {}\033[0m\n".format(cert_cfg)
|
||||||
)
|
)
|
||||||
@@ -123,7 +132,7 @@ def configure_ssl_ver(al):
|
|||||||
if "help" in sslver:
|
if "help" in sslver:
|
||||||
avail = [terse_sslver(x[6:]) for x in flags]
|
avail = [terse_sslver(x[6:]) for x in flags]
|
||||||
avail = " ".join(sorted(avail) + ["all"])
|
avail = " ".join(sorted(avail) + ["all"])
|
||||||
print("\navailable ssl/tls versions:\n " + avail)
|
lprint("\navailable ssl/tls versions:\n " + avail)
|
||||||
sys.exit(0)
|
sys.exit(0)
|
||||||
|
|
||||||
al.ssl_flags_en = 0
|
al.ssl_flags_en = 0
|
||||||
@@ -143,7 +152,7 @@ def configure_ssl_ver(al):
|
|||||||
|
|
||||||
for k in ["ssl_flags_en", "ssl_flags_de"]:
|
for k in ["ssl_flags_en", "ssl_flags_de"]:
|
||||||
num = getattr(al, k)
|
num = getattr(al, k)
|
||||||
print("{}: {:8x} ({})".format(k, num, num))
|
lprint("{}: {:8x} ({})".format(k, num, num))
|
||||||
|
|
||||||
# think i need that beer now
|
# think i need that beer now
|
||||||
|
|
||||||
@@ -160,13 +169,13 @@ def configure_ssl_ciphers(al):
|
|||||||
try:
|
try:
|
||||||
ctx.set_ciphers(al.ciphers)
|
ctx.set_ciphers(al.ciphers)
|
||||||
except:
|
except:
|
||||||
print("\n\033[1;31mfailed to set ciphers\033[0m\n")
|
lprint("\n\033[1;31mfailed to set ciphers\033[0m\n")
|
||||||
|
|
||||||
if not hasattr(ctx, "get_ciphers"):
|
if not hasattr(ctx, "get_ciphers"):
|
||||||
print("cannot read cipher list: openssl or python too old")
|
lprint("cannot read cipher list: openssl or python too old")
|
||||||
else:
|
else:
|
||||||
ciphers = [x["description"] for x in ctx.get_ciphers()]
|
ciphers = [x["description"] for x in ctx.get_ciphers()]
|
||||||
print("\n ".join(["\nenabled ciphers:"] + align_tab(ciphers) + [""]))
|
lprint("\n ".join(["\nenabled ciphers:"] + align_tab(ciphers) + [""]))
|
||||||
|
|
||||||
if is_help:
|
if is_help:
|
||||||
sys.exit(0)
|
sys.exit(0)
|
||||||
@@ -182,16 +191,6 @@ def sighandler(sig=None, frame=None):
|
|||||||
print("\n".join(msg))
|
print("\n".join(msg))
|
||||||
|
|
||||||
|
|
||||||
def stackmon(fp, ival):
|
|
||||||
ctr = 0
|
|
||||||
while True:
|
|
||||||
ctr += 1
|
|
||||||
time.sleep(ival)
|
|
||||||
st = "{}, {}\n{}".format(ctr, time.time(), alltrace())
|
|
||||||
with open(fp, "wb") as f:
|
|
||||||
f.write(st.encode("utf-8", "replace"))
|
|
||||||
|
|
||||||
|
|
||||||
def run_argparse(argv, formatter):
|
def run_argparse(argv, formatter):
|
||||||
ap = argparse.ArgumentParser(
|
ap = argparse.ArgumentParser(
|
||||||
formatter_class=formatter,
|
formatter_class=formatter,
|
||||||
@@ -249,30 +248,32 @@ def run_argparse(argv, formatter):
|
|||||||
),
|
),
|
||||||
)
|
)
|
||||||
# fmt: off
|
# fmt: off
|
||||||
ap.add_argument("-c", metavar="PATH", type=str, action="append", help="add config file")
|
u = unicode
|
||||||
ap.add_argument("-nc", metavar="NUM", type=int, default=64, help="max num clients")
|
ap2 = ap.add_argument_group('general options')
|
||||||
ap.add_argument("-j", metavar="CORES", type=int, default=1, help="max num cpu cores")
|
ap2.add_argument("-c", metavar="PATH", type=u, action="append", help="add config file")
|
||||||
ap.add_argument("-a", metavar="ACCT", type=str, action="append", help="add account, USER:PASS; example [ed:wark")
|
ap2.add_argument("-nc", metavar="NUM", type=int, default=64, help="max num clients")
|
||||||
ap.add_argument("-v", metavar="VOL", type=str, action="append", help="add volume, SRC:DST:FLAG; example [.::r], [/mnt/nas/music:/music:r:aed")
|
ap2.add_argument("-j", metavar="CORES", type=int, default=1, help="max num cpu cores")
|
||||||
ap.add_argument("-ed", action="store_true", help="enable ?dots")
|
ap2.add_argument("-a", metavar="ACCT", type=u, action="append", help="add account, USER:PASS; example [ed:wark")
|
||||||
ap.add_argument("-emp", action="store_true", help="enable markdown plugins")
|
ap2.add_argument("-v", metavar="VOL", type=u, action="append", help="add volume, SRC:DST:FLAG; example [.::r], [/mnt/nas/music:/music:r:aed")
|
||||||
ap.add_argument("-mcr", metavar="SEC", type=int, default=60, help="md-editor mod-chk rate")
|
ap2.add_argument("-ed", action="store_true", help="enable ?dots")
|
||||||
ap.add_argument("--dotpart", action="store_true", help="dotfile incomplete uploads")
|
ap2.add_argument("-emp", action="store_true", help="enable markdown plugins")
|
||||||
ap.add_argument("--sparse", metavar="MiB", type=int, default=4, help="up2k min.size threshold (mswin-only)")
|
ap2.add_argument("-mcr", metavar="SEC", type=int, default=60, help="md-editor mod-chk rate")
|
||||||
ap.add_argument("--urlform", metavar="MODE", type=str, default="print,get", help="how to handle url-forms; examples: [stash], [save,get]")
|
ap2.add_argument("--dotpart", action="store_true", help="dotfile incomplete uploads")
|
||||||
|
ap2.add_argument("--sparse", metavar="MiB", type=int, default=4, help="up2k min.size threshold (mswin-only)")
|
||||||
|
ap2.add_argument("--urlform", metavar="MODE", type=u, default="print,get", help="how to handle url-forms; examples: [stash], [save,get]")
|
||||||
|
|
||||||
ap2 = ap.add_argument_group('network options')
|
ap2 = ap.add_argument_group('network options')
|
||||||
ap2.add_argument("-i", metavar="IP", type=str, default="0.0.0.0", help="ip to bind (comma-sep.)")
|
ap2.add_argument("-i", metavar="IP", type=u, default="0.0.0.0", help="ip to bind (comma-sep.)")
|
||||||
ap2.add_argument("-p", metavar="PORT", type=str, default="3923", help="ports to bind (comma/range)")
|
ap2.add_argument("-p", metavar="PORT", type=u, default="3923", help="ports to bind (comma/range)")
|
||||||
ap2.add_argument("--rproxy", metavar="DEPTH", type=int, default=1, help="which ip to keep; 0 = tcp, 1 = origin (first x-fwd), 2 = cloudflare, 3 = nginx, -1 = closest proxy")
|
ap2.add_argument("--rproxy", metavar="DEPTH", type=int, default=1, help="which ip to keep; 0 = tcp, 1 = origin (first x-fwd), 2 = cloudflare, 3 = nginx, -1 = closest proxy")
|
||||||
|
|
||||||
ap2 = ap.add_argument_group('SSL/TLS options')
|
ap2 = ap.add_argument_group('SSL/TLS options')
|
||||||
ap2.add_argument("--http-only", action="store_true", help="disable ssl/tls")
|
ap2.add_argument("--http-only", action="store_true", help="disable ssl/tls")
|
||||||
ap2.add_argument("--https-only", action="store_true", help="disable plaintext")
|
ap2.add_argument("--https-only", action="store_true", help="disable plaintext")
|
||||||
ap2.add_argument("--ssl-ver", metavar="LIST", type=str, help="set allowed ssl/tls versions; [help] shows available versions; default is what your python version considers safe")
|
ap2.add_argument("--ssl-ver", metavar="LIST", type=u, help="set allowed ssl/tls versions; [help] shows available versions; default is what your python version considers safe")
|
||||||
ap2.add_argument("--ciphers", metavar="LIST", help="set allowed ssl/tls ciphers; [help] shows available ciphers")
|
ap2.add_argument("--ciphers", metavar="LIST", type=u, help="set allowed ssl/tls ciphers; [help] shows available ciphers")
|
||||||
ap2.add_argument("--ssl-dbg", action="store_true", help="dump some tls info")
|
ap2.add_argument("--ssl-dbg", action="store_true", help="dump some tls info")
|
||||||
ap2.add_argument("--ssl-log", metavar="PATH", help="log master secrets")
|
ap2.add_argument("--ssl-log", metavar="PATH", type=u, help="log master secrets")
|
||||||
|
|
||||||
ap2 = ap.add_argument_group('opt-outs')
|
ap2 = ap.add_argument_group('opt-outs')
|
||||||
ap2.add_argument("-nw", action="store_true", help="disable writes (benchmark)")
|
ap2.add_argument("-nw", action="store_true", help="disable writes (benchmark)")
|
||||||
@@ -281,14 +282,16 @@ def run_argparse(argv, formatter):
|
|||||||
ap2.add_argument("--no-zip", action="store_true", help="disable download as zip/tar")
|
ap2.add_argument("--no-zip", action="store_true", help="disable download as zip/tar")
|
||||||
|
|
||||||
ap2 = ap.add_argument_group('safety options')
|
ap2 = ap.add_argument_group('safety options')
|
||||||
ap2.add_argument("--ls", metavar="U[,V[,F]]", help="scan all volumes; arguments USER,VOL,FLAGS; example [**,*,ln,p,r]")
|
ap2.add_argument("--ls", metavar="U[,V[,F]]", type=u, help="scan all volumes; arguments USER,VOL,FLAGS; example [**,*,ln,p,r]")
|
||||||
ap2.add_argument("--salt", type=str, default="hunter2", help="up2k file-hash salt")
|
ap2.add_argument("--salt", type=u, default="hunter2", help="up2k file-hash salt")
|
||||||
|
|
||||||
ap2 = ap.add_argument_group('logging options')
|
ap2 = ap.add_argument_group('logging options')
|
||||||
ap2.add_argument("-q", action="store_true", help="quiet")
|
ap2.add_argument("-q", action="store_true", help="quiet")
|
||||||
|
ap2.add_argument("-lo", metavar="PATH", type=u, help="logfile, example: cpp-%%Y-%%m%%d-%%H%%M%%S.txt.xz")
|
||||||
ap2.add_argument("--log-conn", action="store_true", help="print tcp-server msgs")
|
ap2.add_argument("--log-conn", action="store_true", help="print tcp-server msgs")
|
||||||
ap2.add_argument("--ihead", metavar="HEADER", action='append', help="dump incoming header")
|
ap2.add_argument("--log-htp", action="store_true", help="print http-server threadpool scaling")
|
||||||
ap2.add_argument("--lf-url", metavar="RE", type=str, default=r"^/\.cpr/|\?th=[wj]$", help="dont log URLs matching")
|
ap2.add_argument("--ihead", metavar="HEADER", type=u, action='append', help="dump incoming header")
|
||||||
|
ap2.add_argument("--lf-url", metavar="RE", type=u, default=r"^/\.cpr/|\?th=[wj]$", help="dont log URLs matching")
|
||||||
|
|
||||||
ap2 = ap.add_argument_group('admin panel options')
|
ap2 = ap.add_argument_group('admin panel options')
|
||||||
ap2.add_argument("--no-rescan", action="store_true", help="disable ?scan (volume reindexing)")
|
ap2.add_argument("--no-rescan", action="store_true", help="disable ?scan (volume reindexing)")
|
||||||
@@ -303,8 +306,9 @@ def run_argparse(argv, formatter):
|
|||||||
ap2.add_argument("--th-no-webp", action="store_true", help="disable webp output")
|
ap2.add_argument("--th-no-webp", action="store_true", help="disable webp output")
|
||||||
ap2.add_argument("--th-ff-jpg", action="store_true", help="force jpg for video thumbs")
|
ap2.add_argument("--th-ff-jpg", action="store_true", help="force jpg for video thumbs")
|
||||||
ap2.add_argument("--th-poke", metavar="SEC", type=int, default=300, help="activity labeling cooldown")
|
ap2.add_argument("--th-poke", metavar="SEC", type=int, default=300, help="activity labeling cooldown")
|
||||||
ap2.add_argument("--th-clean", metavar="SEC", type=int, default=43200, help="cleanup interval")
|
ap2.add_argument("--th-clean", metavar="SEC", type=int, default=43200, help="cleanup interval; 0=disabled")
|
||||||
ap2.add_argument("--th-maxage", metavar="SEC", type=int, default=604800, help="max folder age")
|
ap2.add_argument("--th-maxage", metavar="SEC", type=int, default=604800, help="max folder age")
|
||||||
|
ap2.add_argument("--th-covers", metavar="N,N", type=u, default="folder.png,folder.jpg,cover.png,cover.jpg", help="folder thumbnails to stat for")
|
||||||
|
|
||||||
ap2 = ap.add_argument_group('database options')
|
ap2 = ap.add_argument_group('database options')
|
||||||
ap2.add_argument("-e2d", action="store_true", help="enable up2k database")
|
ap2.add_argument("-e2d", action="store_true", help="enable up2k database")
|
||||||
@@ -313,24 +317,27 @@ def run_argparse(argv, formatter):
|
|||||||
ap2.add_argument("-e2t", action="store_true", help="enable metadata indexing")
|
ap2.add_argument("-e2t", action="store_true", help="enable metadata indexing")
|
||||||
ap2.add_argument("-e2ts", action="store_true", help="enable metadata scanner, sets -e2t")
|
ap2.add_argument("-e2ts", action="store_true", help="enable metadata scanner, sets -e2t")
|
||||||
ap2.add_argument("-e2tsr", action="store_true", help="rescan all metadata, sets -e2ts")
|
ap2.add_argument("-e2tsr", action="store_true", help="rescan all metadata, sets -e2ts")
|
||||||
ap2.add_argument("--hist", metavar="PATH", type=str, help="where to store volume state")
|
ap2.add_argument("--hist", metavar="PATH", type=u, help="where to store volume state")
|
||||||
ap2.add_argument("--no-hash", action="store_true", help="disable hashing during e2ds folder scans")
|
ap2.add_argument("--no-hash", action="store_true", help="disable hashing during e2ds folder scans")
|
||||||
ap2.add_argument("--no-mutagen", action="store_true", help="use ffprobe for tags instead")
|
ap2.add_argument("--no-mutagen", action="store_true", help="use FFprobe for tags instead")
|
||||||
ap2.add_argument("--no-mtag-mt", action="store_true", help="disable tag-read parallelism")
|
ap2.add_argument("--no-mtag-mt", action="store_true", help="disable tag-read parallelism")
|
||||||
ap2.add_argument("-mtm", metavar="M=t,t,t", action="append", type=str, help="add/replace metadata mapping")
|
ap2.add_argument("--no-mtag-ff", action="store_true", help="never use FFprobe as tag reader")
|
||||||
ap2.add_argument("-mte", metavar="M,M,M", type=str, help="tags to index/display (comma-sep.)",
|
ap2.add_argument("-mtm", metavar="M=t,t,t", type=u, action="append", help="add/replace metadata mapping")
|
||||||
|
ap2.add_argument("-mte", metavar="M,M,M", type=u, help="tags to index/display (comma-sep.)",
|
||||||
default="circle,album,.tn,artist,title,.bpm,key,.dur,.q,.vq,.aq,ac,vc,res,.fps")
|
default="circle,album,.tn,artist,title,.bpm,key,.dur,.q,.vq,.aq,ac,vc,res,.fps")
|
||||||
ap2.add_argument("-mtp", metavar="M=[f,]bin", action="append", type=str, help="read tag M using bin")
|
ap2.add_argument("-mtp", metavar="M=[f,]bin", type=u, action="append", help="read tag M using bin")
|
||||||
ap2.add_argument("--srch-time", metavar="SEC", type=int, default=30, help="search deadline")
|
ap2.add_argument("--srch-time", metavar="SEC", type=int, default=30, help="search deadline")
|
||||||
|
|
||||||
ap2 = ap.add_argument_group('appearance options')
|
ap2 = ap.add_argument_group('appearance options')
|
||||||
ap2.add_argument("--css-browser", metavar="L", help="URL to additional CSS to include")
|
ap2.add_argument("--css-browser", metavar="L", type=u, help="URL to additional CSS to include")
|
||||||
|
|
||||||
ap2 = ap.add_argument_group('debug options')
|
ap2 = ap.add_argument_group('debug options')
|
||||||
ap2.add_argument("--no-sendfile", action="store_true", help="disable sendfile")
|
ap2.add_argument("--no-sendfile", action="store_true", help="disable sendfile")
|
||||||
ap2.add_argument("--no-scandir", action="store_true", help="disable scandir")
|
ap2.add_argument("--no-scandir", action="store_true", help="disable scandir")
|
||||||
ap2.add_argument("--no-fastboot", action="store_true", help="wait for up2k indexing")
|
ap2.add_argument("--no-fastboot", action="store_true", help="wait for up2k indexing")
|
||||||
ap2.add_argument("--stackmon", metavar="P,S", help="write stacktrace to Path every S second")
|
ap2.add_argument("--no-htp", action="store_true", help="disable httpserver threadpool, create threads as-needed instead")
|
||||||
|
ap2.add_argument("--stackmon", metavar="P,S", type=u, help="write stacktrace to Path every S second")
|
||||||
|
ap2.add_argument("--log-thrs", metavar="SEC", type=float, help="list active threads every SEC")
|
||||||
|
|
||||||
return ap.parse_args(args=argv[1:])
|
return ap.parse_args(args=argv[1:])
|
||||||
# fmt: on
|
# fmt: on
|
||||||
@@ -347,7 +354,7 @@ def main(argv=None):
|
|||||||
desc = py_desc().replace("[", "\033[1;30m[")
|
desc = py_desc().replace("[", "\033[1;30m[")
|
||||||
|
|
||||||
f = '\033[36mcopyparty v{} "\033[35m{}\033[36m" ({})\n{}\033[0m\n'
|
f = '\033[36mcopyparty v{} "\033[35m{}\033[36m" ({})\n{}\033[0m\n'
|
||||||
print(f.format(S_VERSION, CODENAME, S_BUILD_DT, desc))
|
lprint(f.format(S_VERSION, CODENAME, S_BUILD_DT, desc))
|
||||||
|
|
||||||
ensure_locale()
|
ensure_locale()
|
||||||
if HAVE_SSL:
|
if HAVE_SSL:
|
||||||
@@ -361,7 +368,7 @@ def main(argv=None):
|
|||||||
continue
|
continue
|
||||||
|
|
||||||
msg = "\033[1;31mWARNING:\033[0;1m\n {} \033[0;33mwas replaced with\033[0;1m {} \033[0;33mand will be removed\n\033[0m"
|
msg = "\033[1;31mWARNING:\033[0;1m\n {} \033[0;33mwas replaced with\033[0;1m {} \033[0;33mand will be removed\n\033[0m"
|
||||||
print(msg.format(dk, nk))
|
lprint(msg.format(dk, nk))
|
||||||
argv[idx] = nk
|
argv[idx] = nk
|
||||||
time.sleep(2)
|
time.sleep(2)
|
||||||
|
|
||||||
@@ -370,16 +377,6 @@ def main(argv=None):
|
|||||||
except AssertionError:
|
except AssertionError:
|
||||||
al = run_argparse(argv, Dodge11874)
|
al = run_argparse(argv, Dodge11874)
|
||||||
|
|
||||||
if al.stackmon:
|
|
||||||
fp, f = al.stackmon.rsplit(",", 1)
|
|
||||||
f = int(f)
|
|
||||||
t = threading.Thread(
|
|
||||||
target=stackmon,
|
|
||||||
args=(fp, f),
|
|
||||||
)
|
|
||||||
t.daemon = True
|
|
||||||
t.start()
|
|
||||||
|
|
||||||
# propagate implications
|
# propagate implications
|
||||||
for k1, k2 in IMPLICATIONS:
|
for k1, k2 in IMPLICATIONS:
|
||||||
if getattr(al, k1):
|
if getattr(al, k1):
|
||||||
@@ -410,12 +407,12 @@ def main(argv=None):
|
|||||||
+ " (if you crash with codec errors then that is why)"
|
+ " (if you crash with codec errors then that is why)"
|
||||||
)
|
)
|
||||||
|
|
||||||
if WINDOWS and sys.version_info < (3, 6):
|
if sys.version_info < (3, 6):
|
||||||
al.no_scandir = True
|
al.no_scandir = True
|
||||||
|
|
||||||
# signal.signal(signal.SIGINT, sighandler)
|
# signal.signal(signal.SIGINT, sighandler)
|
||||||
|
|
||||||
SvcHub(al).run()
|
SvcHub(al, argv, printed).run()
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
|
|||||||
@@ -1,8 +1,8 @@
|
|||||||
# coding: utf-8
|
# coding: utf-8
|
||||||
|
|
||||||
VERSION = (0, 11, 18)
|
VERSION = (0, 11, 43)
|
||||||
CODENAME = "the grid"
|
CODENAME = "the grid"
|
||||||
BUILD_DT = (2021, 6, 18)
|
BUILD_DT = (2021, 7, 19)
|
||||||
|
|
||||||
S_VERSION = ".".join(map(str, VERSION))
|
S_VERSION = ".".join(map(str, VERSION))
|
||||||
S_BUILD_DT = "{0:04d}-{1:02d}-{2:02d}".format(*BUILD_DT)
|
S_BUILD_DT = "{0:04d}-{1:02d}-{2:02d}".format(*BUILD_DT)
|
||||||
|
|||||||
@@ -10,13 +10,14 @@ import hashlib
|
|||||||
import threading
|
import threading
|
||||||
|
|
||||||
from .__init__ import WINDOWS
|
from .__init__ import WINDOWS
|
||||||
from .util import IMPLICATIONS, undot, Pebkac, fsdec, fsenc, statdir, nuprint
|
from .util import IMPLICATIONS, uncyg, undot, Pebkac, fsdec, fsenc, statdir
|
||||||
|
|
||||||
|
|
||||||
class VFS(object):
|
class VFS(object):
|
||||||
"""single level in the virtual fs"""
|
"""single level in the virtual fs"""
|
||||||
|
|
||||||
def __init__(self, realpath, vpath, uread=[], uwrite=[], uadm=[], flags={}):
|
def __init__(self, log, realpath, vpath, uread, uwrite, uadm, flags):
|
||||||
|
self.log = log
|
||||||
self.realpath = realpath # absolute path on host filesystem
|
self.realpath = realpath # absolute path on host filesystem
|
||||||
self.vpath = vpath # absolute path in the virtual filesystem
|
self.vpath = vpath # absolute path in the virtual filesystem
|
||||||
self.uread = uread # users who can read this
|
self.uread = uread # users who can read this
|
||||||
@@ -62,6 +63,7 @@ class VFS(object):
|
|||||||
return self.nodes[name].add(src, dst)
|
return self.nodes[name].add(src, dst)
|
||||||
|
|
||||||
vn = VFS(
|
vn = VFS(
|
||||||
|
self.log,
|
||||||
os.path.join(self.realpath, name) if self.realpath else None,
|
os.path.join(self.realpath, name) if self.realpath else None,
|
||||||
"{}/{}".format(self.vpath, name).lstrip("/"),
|
"{}/{}".format(self.vpath, name).lstrip("/"),
|
||||||
self.uread,
|
self.uread,
|
||||||
@@ -79,7 +81,7 @@ class VFS(object):
|
|||||||
|
|
||||||
# leaf does not exist; create and keep permissions blank
|
# leaf does not exist; create and keep permissions blank
|
||||||
vp = "{}/{}".format(self.vpath, dst).lstrip("/")
|
vp = "{}/{}".format(self.vpath, dst).lstrip("/")
|
||||||
vn = VFS(src, vp)
|
vn = VFS(self.log, src, vp, [], [], [], {})
|
||||||
vn.dbv = self.dbv or self
|
vn.dbv = self.dbv or self
|
||||||
self.nodes[dst] = vn
|
self.nodes[dst] = vn
|
||||||
return vn
|
return vn
|
||||||
@@ -181,7 +183,7 @@ class VFS(object):
|
|||||||
"""return user-readable [fsdir,real,virt] items at vpath"""
|
"""return user-readable [fsdir,real,virt] items at vpath"""
|
||||||
virt_vis = {} # nodes readable by user
|
virt_vis = {} # nodes readable by user
|
||||||
abspath = self.canonical(rem)
|
abspath = self.canonical(rem)
|
||||||
real = list(statdir(nuprint, scandir, lstat, abspath))
|
real = list(statdir(self.log, scandir, lstat, abspath))
|
||||||
real.sort()
|
real.sort()
|
||||||
if not rem:
|
if not rem:
|
||||||
for name, vn2 in sorted(self.nodes.items()):
|
for name, vn2 in sorted(self.nodes.items()):
|
||||||
@@ -208,8 +210,13 @@ class VFS(object):
|
|||||||
rem, uname, scandir, incl_wo=False, lstat=lstat
|
rem, uname, scandir, incl_wo=False, lstat=lstat
|
||||||
)
|
)
|
||||||
|
|
||||||
if seen and not fsroot.startswith(seen[-1]) and fsroot in seen:
|
if (
|
||||||
print("bailing from symlink loop,\n {}\n {}".format(seen[-1], fsroot))
|
seen
|
||||||
|
and (not fsroot.startswith(seen[-1]) or fsroot == seen[-1])
|
||||||
|
and fsroot in seen
|
||||||
|
):
|
||||||
|
m = "bailing from symlink loop,\n prev: {}\n curr: {}\n from: {}/{}"
|
||||||
|
self.log("vfs.walk", m.format(seen[-1], fsroot, self.vpath, rem), 3)
|
||||||
return
|
return
|
||||||
|
|
||||||
seen = seen[:] + [fsroot]
|
seen = seen[:] + [fsroot]
|
||||||
@@ -242,6 +249,10 @@ class VFS(object):
|
|||||||
if flt:
|
if flt:
|
||||||
flt = {k: True for k in flt}
|
flt = {k: True for k in flt}
|
||||||
|
|
||||||
|
f1 = "{0}.hist{0}up2k.".format(os.sep)
|
||||||
|
f2a = os.sep + "dir.txt"
|
||||||
|
f2b = "{0}.hist{0}".format(os.sep)
|
||||||
|
|
||||||
for vpath, apath, files, rd, vd in self.walk(
|
for vpath, apath, files, rd, vd in self.walk(
|
||||||
"", vrem, [], uname, dots, scandir, False
|
"", vrem, [], uname, dots, scandir, False
|
||||||
):
|
):
|
||||||
@@ -275,7 +286,11 @@ class VFS(object):
|
|||||||
del vd[x]
|
del vd[x]
|
||||||
|
|
||||||
# up2k filetring based on actual abspath
|
# up2k filetring based on actual abspath
|
||||||
files = [x for x in files if "{0}.hist{0}up2k.".format(os.sep) not in x[1]]
|
files = [
|
||||||
|
x
|
||||||
|
for x in files
|
||||||
|
if f1 not in x[1] and (not x[1].endswith(f2a) or f2b not in x[1])
|
||||||
|
]
|
||||||
|
|
||||||
for f in [{"vp": v, "ap": a, "st": n[1]} for v, a, n in files]:
|
for f in [{"vp": v, "ap": a, "st": n[1]} for v, a, n in files]:
|
||||||
yield f
|
yield f
|
||||||
@@ -439,8 +454,8 @@ class AuthSrv(object):
|
|||||||
raise Exception("invalid -v argument: [{}]".format(v_str))
|
raise Exception("invalid -v argument: [{}]".format(v_str))
|
||||||
|
|
||||||
src, dst, perms = m.groups()
|
src, dst, perms = m.groups()
|
||||||
if WINDOWS and src.startswith("/"):
|
if WINDOWS:
|
||||||
src = "{}:\\{}".format(src[1], src[3:])
|
src = uncyg(src)
|
||||||
|
|
||||||
# print("\n".join([src, dst, perms]))
|
# print("\n".join([src, dst, perms]))
|
||||||
src = fsdec(os.path.abspath(fsenc(src)))
|
src = fsdec(os.path.abspath(fsenc(src)))
|
||||||
@@ -466,15 +481,26 @@ class AuthSrv(object):
|
|||||||
)
|
)
|
||||||
except:
|
except:
|
||||||
m = "\n\033[1;31m\nerror in config file {} on line {}:\n\033[0m"
|
m = "\n\033[1;31m\nerror in config file {} on line {}:\n\033[0m"
|
||||||
print(m.format(cfg_fn, self.line_ctr))
|
self.log(m.format(cfg_fn, self.line_ctr), 1)
|
||||||
raise
|
raise
|
||||||
|
|
||||||
|
# case-insensitive; normalize
|
||||||
|
if WINDOWS:
|
||||||
|
cased = {}
|
||||||
|
for k, v in mount.items():
|
||||||
|
try:
|
||||||
|
cased[k] = fsdec(os.path.realpath(fsenc(v)))
|
||||||
|
except:
|
||||||
|
cased[k] = v
|
||||||
|
|
||||||
|
mount = cased
|
||||||
|
|
||||||
if not mount:
|
if not mount:
|
||||||
# -h says our defaults are CWD at root and read/write for everyone
|
# -h says our defaults are CWD at root and read/write for everyone
|
||||||
vfs = VFS(os.path.abspath("."), "", ["*"], ["*"])
|
vfs = VFS(self.log_func, os.path.abspath("."), "", ["*"], ["*"], ["*"], {})
|
||||||
elif "" not in mount:
|
elif "" not in mount:
|
||||||
# there's volumes but no root; make root inaccessible
|
# there's volumes but no root; make root inaccessible
|
||||||
vfs = VFS(None, "")
|
vfs = VFS(self.log_func, None, "", [], [], [], {})
|
||||||
vfs.flags["d2d"] = True
|
vfs.flags["d2d"] = True
|
||||||
|
|
||||||
maxdepth = 0
|
maxdepth = 0
|
||||||
@@ -486,7 +512,13 @@ class AuthSrv(object):
|
|||||||
if dst == "":
|
if dst == "":
|
||||||
# rootfs was mapped; fully replaces the default CWD vfs
|
# rootfs was mapped; fully replaces the default CWD vfs
|
||||||
vfs = VFS(
|
vfs = VFS(
|
||||||
mount[dst], dst, mread[dst], mwrite[dst], madm[dst], mflags[dst]
|
self.log_func,
|
||||||
|
mount[dst],
|
||||||
|
dst,
|
||||||
|
mread[dst],
|
||||||
|
mwrite[dst],
|
||||||
|
madm[dst],
|
||||||
|
mflags[dst],
|
||||||
)
|
)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
@@ -524,9 +556,7 @@ class AuthSrv(object):
|
|||||||
if vflag == "-":
|
if vflag == "-":
|
||||||
pass
|
pass
|
||||||
elif vflag:
|
elif vflag:
|
||||||
if WINDOWS and vflag.startswith("/"):
|
vol.histpath = uncyg(vflag) if WINDOWS else vflag
|
||||||
vflag = "{}:\\{}".format(vflag[1], vflag[3:])
|
|
||||||
vol.histpath = vflag
|
|
||||||
elif self.args.hist:
|
elif self.args.hist:
|
||||||
for nch in range(len(hid)):
|
for nch in range(len(hid)):
|
||||||
hpath = os.path.join(self.args.hist, hid[: nch + 1])
|
hpath = os.path.join(self.args.hist, hid[: nch + 1])
|
||||||
@@ -684,6 +714,11 @@ class AuthSrv(object):
|
|||||||
self.user = user
|
self.user = user
|
||||||
self.iuser = {v: k for k, v in user.items()}
|
self.iuser = {v: k for k, v in user.items()}
|
||||||
|
|
||||||
|
self.re_pwd = None
|
||||||
|
pwds = [re.escape(x) for x in self.iuser.keys()]
|
||||||
|
if pwds:
|
||||||
|
self.re_pwd = re.compile("=(" + "|".join(pwds) + ")([]&; ]|$)")
|
||||||
|
|
||||||
# import pprint
|
# import pprint
|
||||||
# pprint.pprint({"usr": user, "rd": mread, "wr": mwrite, "mnt": mount})
|
# pprint.pprint({"usr": user, "rd": mread, "wr": mwrite, "mnt": mount})
|
||||||
|
|
||||||
@@ -766,7 +801,7 @@ class AuthSrv(object):
|
|||||||
msg = [x[1] for x in files]
|
msg = [x[1] for x in files]
|
||||||
|
|
||||||
if msg:
|
if msg:
|
||||||
nuprint("\n".join(msg))
|
self.log("\n" + "\n".join(msg))
|
||||||
|
|
||||||
if n_bads and flag_p:
|
if n_bads and flag_p:
|
||||||
raise Exception("found symlink leaving volume, and strict is set")
|
raise Exception("found symlink leaving volume, and strict is set")
|
||||||
|
|||||||
@@ -4,17 +4,11 @@ from __future__ import print_function, unicode_literals
|
|||||||
import time
|
import time
|
||||||
import threading
|
import threading
|
||||||
|
|
||||||
from .__init__ import PY2, WINDOWS, VT100
|
|
||||||
from .broker_util import try_exec
|
from .broker_util import try_exec
|
||||||
from .broker_mpw import MpWorker
|
from .broker_mpw import MpWorker
|
||||||
from .util import mp
|
from .util import mp
|
||||||
|
|
||||||
|
|
||||||
if PY2 and not WINDOWS:
|
|
||||||
from multiprocessing.reduction import ForkingPickler
|
|
||||||
from StringIO import StringIO as MemesIO # pylint: disable=import-error
|
|
||||||
|
|
||||||
|
|
||||||
class BrokerMp(object):
|
class BrokerMp(object):
|
||||||
"""external api; manages MpWorkers"""
|
"""external api; manages MpWorkers"""
|
||||||
|
|
||||||
@@ -33,19 +27,17 @@ class BrokerMp(object):
|
|||||||
cores = mp.cpu_count()
|
cores = mp.cpu_count()
|
||||||
|
|
||||||
self.log("broker", "booting {} subprocesses".format(cores))
|
self.log("broker", "booting {} subprocesses".format(cores))
|
||||||
for n in range(cores):
|
for n in range(1, cores + 1):
|
||||||
q_pend = mp.Queue(1)
|
q_pend = mp.Queue(1)
|
||||||
q_yield = mp.Queue(64)
|
q_yield = mp.Queue(64)
|
||||||
|
|
||||||
proc = mp.Process(target=MpWorker, args=(q_pend, q_yield, self.args, n))
|
proc = mp.Process(target=MpWorker, args=(q_pend, q_yield, self.args, n))
|
||||||
proc.q_pend = q_pend
|
proc.q_pend = q_pend
|
||||||
proc.q_yield = q_yield
|
proc.q_yield = q_yield
|
||||||
proc.nid = n
|
|
||||||
proc.clients = {}
|
proc.clients = {}
|
||||||
proc.workload = 0
|
|
||||||
|
|
||||||
thr = threading.Thread(
|
thr = threading.Thread(
|
||||||
target=self.collector, args=(proc,), name="mp-collector"
|
target=self.collector, args=(proc,), name="mp-sink-{}".format(n)
|
||||||
)
|
)
|
||||||
thr.daemon = True
|
thr.daemon = True
|
||||||
thr.start()
|
thr.start()
|
||||||
@@ -53,13 +45,6 @@ class BrokerMp(object):
|
|||||||
self.procs.append(proc)
|
self.procs.append(proc)
|
||||||
proc.start()
|
proc.start()
|
||||||
|
|
||||||
if not self.args.q:
|
|
||||||
thr = threading.Thread(
|
|
||||||
target=self.debug_load_balancer, name="mp-dbg-loadbalancer"
|
|
||||||
)
|
|
||||||
thr.daemon = True
|
|
||||||
thr.start()
|
|
||||||
|
|
||||||
def shutdown(self):
|
def shutdown(self):
|
||||||
self.log("broker", "shutting down")
|
self.log("broker", "shutting down")
|
||||||
for n, proc in enumerate(self.procs):
|
for n, proc in enumerate(self.procs):
|
||||||
@@ -89,20 +74,6 @@ class BrokerMp(object):
|
|||||||
if dest == "log":
|
if dest == "log":
|
||||||
self.log(*args)
|
self.log(*args)
|
||||||
|
|
||||||
elif dest == "workload":
|
|
||||||
with self.mutex:
|
|
||||||
proc.workload = args[0]
|
|
||||||
|
|
||||||
elif dest == "httpdrop":
|
|
||||||
addr = args[0]
|
|
||||||
|
|
||||||
with self.mutex:
|
|
||||||
del proc.clients[addr]
|
|
||||||
if not proc.clients:
|
|
||||||
proc.workload = 0
|
|
||||||
|
|
||||||
self.hub.tcpsrv.num_clients.add(-1)
|
|
||||||
|
|
||||||
elif dest == "retq":
|
elif dest == "retq":
|
||||||
# response from previous ipc call
|
# response from previous ipc call
|
||||||
with self.retpend_mutex:
|
with self.retpend_mutex:
|
||||||
@@ -128,38 +99,9 @@ class BrokerMp(object):
|
|||||||
returns a Queue object which eventually contains the response if want_retval
|
returns a Queue object which eventually contains the response if want_retval
|
||||||
(not-impl here since nothing uses it yet)
|
(not-impl here since nothing uses it yet)
|
||||||
"""
|
"""
|
||||||
if dest == "httpconn":
|
if dest == "listen":
|
||||||
sck, addr = args
|
for p in self.procs:
|
||||||
sck2 = sck
|
p.q_pend.put([0, dest, [args[0], len(self.procs)]])
|
||||||
if PY2:
|
|
||||||
buf = MemesIO()
|
|
||||||
ForkingPickler(buf).dump(sck)
|
|
||||||
sck2 = buf.getvalue()
|
|
||||||
|
|
||||||
proc = sorted(self.procs, key=lambda x: x.workload)[0]
|
|
||||||
proc.q_pend.put([0, dest, [sck2, addr]])
|
|
||||||
|
|
||||||
with self.mutex:
|
|
||||||
proc.clients[addr] = 50
|
|
||||||
proc.workload += 50
|
|
||||||
|
|
||||||
else:
|
else:
|
||||||
raise Exception("what is " + str(dest))
|
raise Exception("what is " + str(dest))
|
||||||
|
|
||||||
def debug_load_balancer(self):
|
|
||||||
fmt = "\033[1m{}\033[0;36m{:4}\033[0m "
|
|
||||||
if not VT100:
|
|
||||||
fmt = "({}{:4})"
|
|
||||||
|
|
||||||
last = ""
|
|
||||||
while self.procs:
|
|
||||||
msg = ""
|
|
||||||
for proc in self.procs:
|
|
||||||
msg += fmt.format(len(proc.clients), proc.workload)
|
|
||||||
|
|
||||||
if msg != last:
|
|
||||||
last = msg
|
|
||||||
with self.hub.log_mutex:
|
|
||||||
print(msg)
|
|
||||||
|
|
||||||
time.sleep(0.1)
|
|
||||||
|
|||||||
@@ -3,18 +3,13 @@ from __future__ import print_function, unicode_literals
|
|||||||
from copyparty.authsrv import AuthSrv
|
from copyparty.authsrv import AuthSrv
|
||||||
|
|
||||||
import sys
|
import sys
|
||||||
import time
|
|
||||||
import signal
|
import signal
|
||||||
import threading
|
import threading
|
||||||
|
|
||||||
from .__init__ import PY2, WINDOWS
|
|
||||||
from .broker_util import ExceptionalQueue
|
from .broker_util import ExceptionalQueue
|
||||||
from .httpsrv import HttpSrv
|
from .httpsrv import HttpSrv
|
||||||
from .util import FAKE_MP
|
from .util import FAKE_MP
|
||||||
|
|
||||||
if PY2 and not WINDOWS:
|
|
||||||
import pickle # nosec
|
|
||||||
|
|
||||||
|
|
||||||
class MpWorker(object):
|
class MpWorker(object):
|
||||||
"""one single mp instance"""
|
"""one single mp instance"""
|
||||||
@@ -25,22 +20,23 @@ class MpWorker(object):
|
|||||||
self.args = args
|
self.args = args
|
||||||
self.n = n
|
self.n = n
|
||||||
|
|
||||||
|
self.log = self._log_disabled if args.q and not args.lo else self._log_enabled
|
||||||
|
|
||||||
self.retpend = {}
|
self.retpend = {}
|
||||||
self.retpend_mutex = threading.Lock()
|
self.retpend_mutex = threading.Lock()
|
||||||
self.mutex = threading.Lock()
|
self.mutex = threading.Lock()
|
||||||
self.workload_thr_alive = False
|
|
||||||
|
|
||||||
# we inherited signal_handler from parent,
|
# we inherited signal_handler from parent,
|
||||||
# replace it with something harmless
|
# replace it with something harmless
|
||||||
if not FAKE_MP:
|
if not FAKE_MP:
|
||||||
signal.signal(signal.SIGINT, self.signal_handler)
|
for sig in [signal.SIGINT, signal.SIGTERM]:
|
||||||
|
signal.signal(sig, self.signal_handler)
|
||||||
|
|
||||||
# starting to look like a good idea
|
# starting to look like a good idea
|
||||||
self.asrv = AuthSrv(args, None, False)
|
self.asrv = AuthSrv(args, None, False)
|
||||||
|
|
||||||
# instantiate all services here (TODO: inheritance?)
|
# instantiate all services here (TODO: inheritance?)
|
||||||
self.httpsrv = HttpSrv(self, True)
|
self.httpsrv = HttpSrv(self, n)
|
||||||
self.httpsrv.disconnect_func = self.httpdrop
|
|
||||||
|
|
||||||
# on winxp and some other platforms,
|
# on winxp and some other platforms,
|
||||||
# use thr.join() to block all signals
|
# use thr.join() to block all signals
|
||||||
@@ -49,19 +45,19 @@ class MpWorker(object):
|
|||||||
thr.start()
|
thr.start()
|
||||||
thr.join()
|
thr.join()
|
||||||
|
|
||||||
def signal_handler(self, signal, frame):
|
def signal_handler(self, sig, frame):
|
||||||
# print('k')
|
# print('k')
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def log(self, src, msg, c=0):
|
def _log_enabled(self, src, msg, c=0):
|
||||||
self.q_yield.put([0, "log", [src, msg, c]])
|
self.q_yield.put([0, "log", [src, msg, c]])
|
||||||
|
|
||||||
|
def _log_disabled(self, src, msg, c=0):
|
||||||
|
pass
|
||||||
|
|
||||||
def logw(self, msg, c=0):
|
def logw(self, msg, c=0):
|
||||||
self.log("mp{}".format(self.n), msg, c)
|
self.log("mp{}".format(self.n), msg, c)
|
||||||
|
|
||||||
def httpdrop(self, addr):
|
|
||||||
self.q_yield.put([0, "httpdrop", [addr]])
|
|
||||||
|
|
||||||
def main(self):
|
def main(self):
|
||||||
while True:
|
while True:
|
||||||
retq_id, dest, args = self.q_pend.get()
|
retq_id, dest, args = self.q_pend.get()
|
||||||
@@ -73,24 +69,8 @@ class MpWorker(object):
|
|||||||
sys.exit(0)
|
sys.exit(0)
|
||||||
return
|
return
|
||||||
|
|
||||||
elif dest == "httpconn":
|
elif dest == "listen":
|
||||||
sck, addr = args
|
self.httpsrv.listen(args[0], args[1])
|
||||||
if PY2:
|
|
||||||
sck = pickle.loads(sck) # nosec
|
|
||||||
|
|
||||||
if self.args.log_conn:
|
|
||||||
self.log("%s %s" % addr, "|%sC-qpop" % ("-" * 4,), c="1;30")
|
|
||||||
|
|
||||||
self.httpsrv.accept(sck, addr)
|
|
||||||
|
|
||||||
with self.mutex:
|
|
||||||
if not self.workload_thr_alive:
|
|
||||||
self.workload_thr_alive = True
|
|
||||||
thr = threading.Thread(
|
|
||||||
target=self.thr_workload, name="mpw-workload"
|
|
||||||
)
|
|
||||||
thr.daemon = True
|
|
||||||
thr.start()
|
|
||||||
|
|
||||||
elif dest == "retq":
|
elif dest == "retq":
|
||||||
# response from previous ipc call
|
# response from previous ipc call
|
||||||
@@ -114,16 +94,3 @@ class MpWorker(object):
|
|||||||
|
|
||||||
self.q_yield.put([retq_id, dest, args])
|
self.q_yield.put([retq_id, dest, args])
|
||||||
return retq
|
return retq
|
||||||
|
|
||||||
def thr_workload(self):
|
|
||||||
"""announce workloads to MpSrv (the mp controller / loadbalancer)"""
|
|
||||||
# avoid locking in extract_filedata by tracking difference here
|
|
||||||
while True:
|
|
||||||
time.sleep(0.2)
|
|
||||||
with self.mutex:
|
|
||||||
if self.httpsrv.num_clients() == 0:
|
|
||||||
# no clients rn, termiante thread
|
|
||||||
self.workload_thr_alive = False
|
|
||||||
return
|
|
||||||
|
|
||||||
self.q_yield.put([0, "workload", [self.httpsrv.workload]])
|
|
||||||
|
|||||||
@@ -3,7 +3,6 @@ from __future__ import print_function, unicode_literals
|
|||||||
|
|
||||||
import threading
|
import threading
|
||||||
|
|
||||||
from .authsrv import AuthSrv
|
|
||||||
from .httpsrv import HttpSrv
|
from .httpsrv import HttpSrv
|
||||||
from .broker_util import ExceptionalQueue, try_exec
|
from .broker_util import ExceptionalQueue, try_exec
|
||||||
|
|
||||||
@@ -20,8 +19,7 @@ class BrokerThr(object):
|
|||||||
self.mutex = threading.Lock()
|
self.mutex = threading.Lock()
|
||||||
|
|
||||||
# instantiate all services here (TODO: inheritance?)
|
# instantiate all services here (TODO: inheritance?)
|
||||||
self.httpsrv = HttpSrv(self)
|
self.httpsrv = HttpSrv(self, None)
|
||||||
self.httpsrv.disconnect_func = self.httpdrop
|
|
||||||
|
|
||||||
def shutdown(self):
|
def shutdown(self):
|
||||||
# self.log("broker", "shutting down")
|
# self.log("broker", "shutting down")
|
||||||
@@ -29,12 +27,8 @@ class BrokerThr(object):
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
def put(self, want_retval, dest, *args):
|
def put(self, want_retval, dest, *args):
|
||||||
if dest == "httpconn":
|
if dest == "listen":
|
||||||
sck, addr = args
|
self.httpsrv.listen(args[0], 1)
|
||||||
if self.args.log_conn:
|
|
||||||
self.log("%s %s" % addr, "|%sC-qpop" % ("-" * 4,), c="1;30")
|
|
||||||
|
|
||||||
self.httpsrv.accept(sck, addr)
|
|
||||||
|
|
||||||
else:
|
else:
|
||||||
# new ipc invoking managed service in hub
|
# new ipc invoking managed service in hub
|
||||||
@@ -51,6 +45,3 @@ class BrokerThr(object):
|
|||||||
retq = ExceptionalQueue(1)
|
retq = ExceptionalQueue(1)
|
||||||
retq.put(rv)
|
retq.put(rv)
|
||||||
return retq
|
return retq
|
||||||
|
|
||||||
def httpdrop(self, addr):
|
|
||||||
self.hub.tcpsrv.num_clients.add(-1)
|
|
||||||
|
|||||||
@@ -10,19 +10,15 @@ import json
|
|||||||
import string
|
import string
|
||||||
import socket
|
import socket
|
||||||
import ctypes
|
import ctypes
|
||||||
import traceback
|
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
import calendar
|
import calendar
|
||||||
|
|
||||||
from .__init__ import E, PY2, WINDOWS, ANYWIN
|
from .__init__ import E, PY2, WINDOWS, ANYWIN, unicode
|
||||||
from .util import * # noqa # pylint: disable=unused-wildcard-import
|
from .util import * # noqa # pylint: disable=unused-wildcard-import
|
||||||
from .authsrv import AuthSrv
|
from .authsrv import AuthSrv
|
||||||
from .szip import StreamZip
|
from .szip import StreamZip
|
||||||
from .star import StreamTar
|
from .star import StreamTar
|
||||||
|
|
||||||
if not PY2:
|
|
||||||
unicode = str
|
|
||||||
|
|
||||||
|
|
||||||
NO_CACHE = {"Cache-Control": "no-cache"}
|
NO_CACHE = {"Cache-Control": "no-cache"}
|
||||||
NO_STORE = {"Cache-Control": "no-store; max-age=0"}
|
NO_STORE = {"Cache-Control": "no-store; max-age=0"}
|
||||||
@@ -41,7 +37,6 @@ class HttpCli(object):
|
|||||||
self.ip = conn.addr[0]
|
self.ip = conn.addr[0]
|
||||||
self.addr = conn.addr # type: tuple[str, int]
|
self.addr = conn.addr # type: tuple[str, int]
|
||||||
self.args = conn.args
|
self.args = conn.args
|
||||||
self.is_mp = conn.is_mp
|
|
||||||
self.asrv = conn.asrv # type: AuthSrv
|
self.asrv = conn.asrv # type: AuthSrv
|
||||||
self.ico = conn.ico
|
self.ico = conn.ico
|
||||||
self.thumbcli = conn.thumbcli
|
self.thumbcli = conn.thumbcli
|
||||||
@@ -50,12 +45,21 @@ class HttpCli(object):
|
|||||||
self.tls = hasattr(self.s, "cipher")
|
self.tls = hasattr(self.s, "cipher")
|
||||||
|
|
||||||
self.bufsz = 1024 * 32
|
self.bufsz = 1024 * 32
|
||||||
|
self.hint = None
|
||||||
self.absolute_urls = False
|
self.absolute_urls = False
|
||||||
self.out_headers = {"Access-Control-Allow-Origin": "*"}
|
self.out_headers = {"Access-Control-Allow-Origin": "*"}
|
||||||
|
|
||||||
def log(self, msg, c=0):
|
def log(self, msg, c=0):
|
||||||
|
ptn = self.asrv.re_pwd
|
||||||
|
if ptn and ptn.search(msg):
|
||||||
|
msg = ptn.sub(self.unpwd, msg)
|
||||||
|
|
||||||
self.log_func(self.log_src, msg, c)
|
self.log_func(self.log_src, msg, c)
|
||||||
|
|
||||||
|
def unpwd(self, m):
|
||||||
|
a, b = m.groups()
|
||||||
|
return "=\033[7m {} \033[27m{}".format(self.asrv.iuser[a], b)
|
||||||
|
|
||||||
def _check_nonfatal(self, ex):
|
def _check_nonfatal(self, ex):
|
||||||
return ex.code < 400 or ex.code in [404, 429]
|
return ex.code < 400 or ex.code in [404, 429]
|
||||||
|
|
||||||
@@ -64,14 +68,19 @@ class HttpCli(object):
|
|||||||
if rem.startswith("/") or rem.startswith("../") or "/../" in rem:
|
if rem.startswith("/") or rem.startswith("../") or "/../" in rem:
|
||||||
raise Exception("that was close")
|
raise Exception("that was close")
|
||||||
|
|
||||||
def j2(self, name, **kwargs):
|
def j2(self, name, **ka):
|
||||||
tpl = self.conn.hsrv.j2[name]
|
tpl = self.conn.hsrv.j2[name]
|
||||||
return tpl.render(**kwargs) if kwargs else tpl
|
if ka:
|
||||||
|
ka["ts"] = self.conn.hsrv.cachebuster()
|
||||||
|
return tpl.render(**ka)
|
||||||
|
|
||||||
|
return tpl
|
||||||
|
|
||||||
def run(self):
|
def run(self):
|
||||||
"""returns true if connection can be reused"""
|
"""returns true if connection can be reused"""
|
||||||
self.keepalive = False
|
self.keepalive = False
|
||||||
self.headers = {}
|
self.headers = {}
|
||||||
|
self.hint = None
|
||||||
try:
|
try:
|
||||||
headerlines = read_header(self.sr)
|
headerlines = read_header(self.sr)
|
||||||
if not headerlines:
|
if not headerlines:
|
||||||
@@ -85,9 +94,13 @@ class HttpCli(object):
|
|||||||
try:
|
try:
|
||||||
self.mode, self.req, self.http_ver = headerlines[0].split(" ")
|
self.mode, self.req, self.http_ver = headerlines[0].split(" ")
|
||||||
except:
|
except:
|
||||||
raise Pebkac(400, "bad headers:\n" + "\n".join(headerlines))
|
msg = " ]\n#[ ".join(headerlines)
|
||||||
|
raise Pebkac(400, "bad headers:\n#[ " + msg + " ]")
|
||||||
|
|
||||||
except Pebkac as ex:
|
except Pebkac as ex:
|
||||||
|
self.mode = "GET"
|
||||||
|
self.req = "[junk]"
|
||||||
|
self.http_ver = "HTTP/1.1"
|
||||||
# self.log("pebkac at httpcli.run #1: " + repr(ex))
|
# self.log("pebkac at httpcli.run #1: " + repr(ex))
|
||||||
self.keepalive = self._check_nonfatal(ex)
|
self.keepalive = self._check_nonfatal(ex)
|
||||||
self.loud_reply(unicode(ex), status=ex.code)
|
self.loud_reply(unicode(ex), status=ex.code)
|
||||||
@@ -115,7 +128,7 @@ class HttpCli(object):
|
|||||||
try:
|
try:
|
||||||
self.ip = vs[n].strip()
|
self.ip = vs[n].strip()
|
||||||
except:
|
except:
|
||||||
self.ip = vs[-1].strip()
|
self.ip = vs[0].strip()
|
||||||
self.log("rproxy={} oob x-fwd {}".format(self.args.rproxy, v), c=3)
|
self.log("rproxy={} oob x-fwd {}".format(self.args.rproxy, v), c=3)
|
||||||
|
|
||||||
self.log_src = self.conn.set_rproxy(self.ip)
|
self.log_src = self.conn.set_rproxy(self.ip)
|
||||||
@@ -130,6 +143,9 @@ class HttpCli(object):
|
|||||||
if v is not None:
|
if v is not None:
|
||||||
self.log("[H] {}: \033[33m[{}]".format(k, v), 6)
|
self.log("[H] {}: \033[33m[{}]".format(k, v), 6)
|
||||||
|
|
||||||
|
if "&" in self.req and "?" not in self.req:
|
||||||
|
self.hint = "did you mean '?' instead of '&'"
|
||||||
|
|
||||||
# split req into vpath + uparam
|
# split req into vpath + uparam
|
||||||
uparam = {}
|
uparam = {}
|
||||||
if "?" not in self.req:
|
if "?" not in self.req:
|
||||||
@@ -169,6 +185,9 @@ class HttpCli(object):
|
|||||||
self.rvol, self.wvol, self.avol = [[], [], []]
|
self.rvol, self.wvol, self.avol = [[], [], []]
|
||||||
self.asrv.vfs.user_tree(self.uname, self.rvol, self.wvol, self.avol)
|
self.asrv.vfs.user_tree(self.uname, self.rvol, self.wvol, self.avol)
|
||||||
|
|
||||||
|
if pwd and "pw" in self.ouparam and pwd != cookies.get("cppwd"):
|
||||||
|
self.out_headers["Set-Cookie"] = self.get_pwd_cookie(pwd)[0]
|
||||||
|
|
||||||
ua = self.headers.get("user-agent", "")
|
ua = self.headers.get("user-agent", "")
|
||||||
self.is_rclone = ua.startswith("rclone/")
|
self.is_rclone = ua.startswith("rclone/")
|
||||||
if self.is_rclone:
|
if self.is_rclone:
|
||||||
@@ -199,12 +218,15 @@ class HttpCli(object):
|
|||||||
|
|
||||||
self.log("{}\033[0m, {}".format(str(ex), self.vpath), 3)
|
self.log("{}\033[0m, {}".format(str(ex), self.vpath), 3)
|
||||||
msg = "<pre>{}\r\nURL: {}\r\n".format(str(ex), self.vpath)
|
msg = "<pre>{}\r\nURL: {}\r\n".format(str(ex), self.vpath)
|
||||||
|
if self.hint:
|
||||||
|
msg += "hint: {}\r\n".format(self.hint)
|
||||||
|
|
||||||
self.reply(msg.encode("utf-8", "replace"), status=ex.code)
|
self.reply(msg.encode("utf-8", "replace"), status=ex.code)
|
||||||
return self.keepalive
|
return self.keepalive
|
||||||
except Pebkac:
|
except Pebkac:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def send_headers(self, length, status=200, mime=None, headers={}):
|
def send_headers(self, length, status=200, mime=None, headers=None):
|
||||||
response = ["{} {} {}".format(self.http_ver, status, HTTPCODE[status])]
|
response = ["{} {} {}".format(self.http_ver, status, HTTPCODE[status])]
|
||||||
|
|
||||||
if length is not None:
|
if length is not None:
|
||||||
@@ -214,7 +236,8 @@ class HttpCli(object):
|
|||||||
response.append("Connection: " + ("Keep-Alive" if self.keepalive else "Close"))
|
response.append("Connection: " + ("Keep-Alive" if self.keepalive else "Close"))
|
||||||
|
|
||||||
# headers{} overrides anything set previously
|
# headers{} overrides anything set previously
|
||||||
self.out_headers.update(headers)
|
if headers:
|
||||||
|
self.out_headers.update(headers)
|
||||||
|
|
||||||
# default to utf8 html if no content-type is set
|
# default to utf8 html if no content-type is set
|
||||||
if not mime:
|
if not mime:
|
||||||
@@ -231,7 +254,7 @@ class HttpCli(object):
|
|||||||
except:
|
except:
|
||||||
raise Pebkac(400, "client d/c while replying headers")
|
raise Pebkac(400, "client d/c while replying headers")
|
||||||
|
|
||||||
def reply(self, body, status=200, mime=None, headers={}):
|
def reply(self, body, status=200, mime=None, headers=None):
|
||||||
# TODO something to reply with user-supplied values safely
|
# TODO something to reply with user-supplied values safely
|
||||||
self.send_headers(len(body), status, mime, headers)
|
self.send_headers(len(body), status, mime, headers)
|
||||||
|
|
||||||
@@ -247,7 +270,7 @@ class HttpCli(object):
|
|||||||
self.log(body.rstrip())
|
self.log(body.rstrip())
|
||||||
self.reply(b"<pre>" + body.encode("utf-8") + b"\r\n", *list(args), **kwargs)
|
self.reply(b"<pre>" + body.encode("utf-8") + b"\r\n", *list(args), **kwargs)
|
||||||
|
|
||||||
def urlq(self, add={}, rm=[]):
|
def urlq(self, add, rm):
|
||||||
"""
|
"""
|
||||||
generates url query based on uparam (b, pw, all others)
|
generates url query based on uparam (b, pw, all others)
|
||||||
removing anything in rm, adding pairs in add
|
removing anything in rm, adding pairs in add
|
||||||
@@ -319,6 +342,9 @@ class HttpCli(object):
|
|||||||
if "tree" in self.uparam:
|
if "tree" in self.uparam:
|
||||||
return self.tx_tree()
|
return self.tx_tree()
|
||||||
|
|
||||||
|
if "stack" in self.uparam:
|
||||||
|
return self.tx_stack()
|
||||||
|
|
||||||
# conditional redirect to single volumes
|
# conditional redirect to single volumes
|
||||||
if self.vpath == "" and not self.ouparam:
|
if self.vpath == "" and not self.ouparam:
|
||||||
nread = len(self.rvol)
|
nread = len(self.rvol)
|
||||||
@@ -348,9 +374,6 @@ class HttpCli(object):
|
|||||||
if "scan" in self.uparam:
|
if "scan" in self.uparam:
|
||||||
return self.scanvol()
|
return self.scanvol()
|
||||||
|
|
||||||
if "stack" in self.uparam:
|
|
||||||
return self.tx_stack()
|
|
||||||
|
|
||||||
return self.tx_browser()
|
return self.tx_browser()
|
||||||
|
|
||||||
def handle_options(self):
|
def handle_options(self):
|
||||||
@@ -456,15 +479,17 @@ class HttpCli(object):
|
|||||||
addr = self.ip.replace(":", ".")
|
addr = self.ip.replace(":", ".")
|
||||||
fn = "put-{:.6f}-{}.bin".format(time.time(), addr)
|
fn = "put-{:.6f}-{}.bin".format(time.time(), addr)
|
||||||
path = os.path.join(fdir, fn)
|
path = os.path.join(fdir, fn)
|
||||||
|
if self.args.nw:
|
||||||
|
path = os.devnull
|
||||||
|
|
||||||
with open(fsenc(path), "wb", 512 * 1024) as f:
|
with open(fsenc(path), "wb", 512 * 1024) as f:
|
||||||
post_sz, _, sha_b64 = hashcopy(self.conn, reader, f)
|
post_sz, _, sha_b64 = hashcopy(reader, f)
|
||||||
|
|
||||||
vfs, vrem = vfs.get_dbv(rem)
|
if not self.args.nw:
|
||||||
|
vfs, vrem = vfs.get_dbv(rem)
|
||||||
self.conn.hsrv.broker.put(
|
self.conn.hsrv.broker.put(
|
||||||
False, "up2k.hash_file", vfs.realpath, vfs.flags, vrem, fn
|
False, "up2k.hash_file", vfs.realpath, vfs.flags, vrem, fn
|
||||||
)
|
)
|
||||||
|
|
||||||
return post_sz, sha_b64, remains, path
|
return post_sz, sha_b64, remains, path
|
||||||
|
|
||||||
@@ -481,7 +506,7 @@ class HttpCli(object):
|
|||||||
|
|
||||||
spd1 = get_spd(nbytes, self.t0)
|
spd1 = get_spd(nbytes, self.t0)
|
||||||
spd2 = get_spd(self.conn.nbyte, self.conn.t0)
|
spd2 = get_spd(self.conn.nbyte, self.conn.t0)
|
||||||
return spd1 + " " + spd2
|
return "{} {} n{}".format(spd1, spd2, self.conn.nreq)
|
||||||
|
|
||||||
def handle_post_multipart(self):
|
def handle_post_multipart(self):
|
||||||
self.parser = MultipartParser(self.log, self.sr, self.headers)
|
self.parser = MultipartParser(self.log, self.sr, self.headers)
|
||||||
@@ -581,15 +606,18 @@ class HttpCli(object):
|
|||||||
if sub:
|
if sub:
|
||||||
try:
|
try:
|
||||||
dst = os.path.join(vfs.realpath, rem)
|
dst = os.path.join(vfs.realpath, rem)
|
||||||
os.makedirs(fsenc(dst))
|
if not os.path.isdir(fsenc(dst)):
|
||||||
|
os.makedirs(fsenc(dst))
|
||||||
except OSError as ex:
|
except OSError as ex:
|
||||||
if ex.errno == 13:
|
self.log("makedirs failed [{}]".format(dst))
|
||||||
raise Pebkac(500, "the server OS denied write-access")
|
if not os.path.isdir(fsenc(dst)):
|
||||||
|
if ex.errno == 13:
|
||||||
|
raise Pebkac(500, "the server OS denied write-access")
|
||||||
|
|
||||||
if ex.errno == 17:
|
if ex.errno == 17:
|
||||||
raise Pebkac(400, "some file got your folder name")
|
raise Pebkac(400, "some file got your folder name")
|
||||||
|
|
||||||
raise Pebkac(500, min_ex())
|
raise Pebkac(500, min_ex())
|
||||||
except:
|
except:
|
||||||
raise Pebkac(500, min_ex())
|
raise Pebkac(500, min_ex())
|
||||||
|
|
||||||
@@ -621,7 +649,7 @@ class HttpCli(object):
|
|||||||
penalty = 0.7
|
penalty = 0.7
|
||||||
t_idle = t0 - idx.p_end
|
t_idle = t0 - idx.p_end
|
||||||
if idx.p_dur > 0.7 and t_idle < penalty:
|
if idx.p_dur > 0.7 and t_idle < penalty:
|
||||||
m = "rate-limit ({:.1f} sec), cost {:.2f}, idle {:.2f}"
|
m = "rate-limit {:.1f} sec, cost {:.2f}, idle {:.2f}"
|
||||||
raise Pebkac(429, m.format(penalty, idx.p_dur, t_idle))
|
raise Pebkac(429, m.format(penalty, idx.p_dur, t_idle))
|
||||||
|
|
||||||
if "srch" in body:
|
if "srch" in body:
|
||||||
@@ -687,7 +715,7 @@ class HttpCli(object):
|
|||||||
|
|
||||||
with open(fsenc(path), "rb+", 512 * 1024) as f:
|
with open(fsenc(path), "rb+", 512 * 1024) as f:
|
||||||
f.seek(cstart[0])
|
f.seek(cstart[0])
|
||||||
post_sz, _, sha_b64 = hashcopy(self.conn, reader, f)
|
post_sz, _, sha_b64 = hashcopy(reader, f)
|
||||||
|
|
||||||
if sha_b64 != chash:
|
if sha_b64 != chash:
|
||||||
raise Pebkac(
|
raise Pebkac(
|
||||||
@@ -741,6 +769,12 @@ class HttpCli(object):
|
|||||||
pwd = self.parser.require("cppwd", 64)
|
pwd = self.parser.require("cppwd", 64)
|
||||||
self.parser.drop()
|
self.parser.drop()
|
||||||
|
|
||||||
|
ck, msg = self.get_pwd_cookie(pwd)
|
||||||
|
html = self.j2("msg", h1=msg, h2='<a href="/">ack</a>', redir="/")
|
||||||
|
self.reply(html.encode("utf-8"), headers={"Set-Cookie": ck})
|
||||||
|
return True
|
||||||
|
|
||||||
|
def get_pwd_cookie(self, pwd):
|
||||||
if pwd in self.asrv.iuser:
|
if pwd in self.asrv.iuser:
|
||||||
msg = "login ok"
|
msg = "login ok"
|
||||||
dt = datetime.utcfromtimestamp(time.time() + 60 * 60 * 24 * 365)
|
dt = datetime.utcfromtimestamp(time.time() + 60 * 60 * 24 * 365)
|
||||||
@@ -751,9 +785,7 @@ class HttpCli(object):
|
|||||||
exp = "Fri, 15 Aug 1997 01:00:00 GMT"
|
exp = "Fri, 15 Aug 1997 01:00:00 GMT"
|
||||||
|
|
||||||
ck = "cppwd={}; Path=/; Expires={}; SameSite=Lax".format(pwd, exp)
|
ck = "cppwd={}; Path=/; Expires={}; SameSite=Lax".format(pwd, exp)
|
||||||
html = self.j2("msg", h1=msg, h2='<a href="/">ack</a>', redir="/")
|
return [ck, msg]
|
||||||
self.reply(html.encode("utf-8"), headers={"Set-Cookie": ck})
|
|
||||||
return True
|
|
||||||
|
|
||||||
def handle_mkdir(self):
|
def handle_mkdir(self):
|
||||||
new_dir = self.parser.require("name", 512)
|
new_dir = self.parser.require("name", 512)
|
||||||
@@ -763,7 +795,7 @@ class HttpCli(object):
|
|||||||
vfs, rem = self.asrv.vfs.get(self.vpath, self.uname, False, True)
|
vfs, rem = self.asrv.vfs.get(self.vpath, self.uname, False, True)
|
||||||
self._assert_safe_rem(rem)
|
self._assert_safe_rem(rem)
|
||||||
|
|
||||||
sanitized = sanitize_fn(new_dir)
|
sanitized = sanitize_fn(new_dir, "", [])
|
||||||
|
|
||||||
if not nullwrite:
|
if not nullwrite:
|
||||||
fdir = os.path.join(vfs.realpath, rem)
|
fdir = os.path.join(vfs.realpath, rem)
|
||||||
@@ -800,7 +832,7 @@ class HttpCli(object):
|
|||||||
if not new_file.endswith(".md"):
|
if not new_file.endswith(".md"):
|
||||||
new_file += ".md"
|
new_file += ".md"
|
||||||
|
|
||||||
sanitized = sanitize_fn(new_file)
|
sanitized = sanitize_fn(new_file, "", [])
|
||||||
|
|
||||||
if not nullwrite:
|
if not nullwrite:
|
||||||
fdir = os.path.join(vfs.realpath, rem)
|
fdir = os.path.join(vfs.realpath, rem)
|
||||||
@@ -833,7 +865,7 @@ class HttpCli(object):
|
|||||||
if p_file and not nullwrite:
|
if p_file and not nullwrite:
|
||||||
fdir = os.path.join(vfs.realpath, rem)
|
fdir = os.path.join(vfs.realpath, rem)
|
||||||
fname = sanitize_fn(
|
fname = sanitize_fn(
|
||||||
p_file, bad=[".prologue.html", ".epilogue.html"]
|
p_file, "", [".prologue.html", ".epilogue.html"]
|
||||||
)
|
)
|
||||||
|
|
||||||
if not os.path.isdir(fsenc(fdir)):
|
if not os.path.isdir(fsenc(fdir)):
|
||||||
@@ -850,7 +882,7 @@ class HttpCli(object):
|
|||||||
with ren_open(fname, "wb", 512 * 1024, **open_args) as f:
|
with ren_open(fname, "wb", 512 * 1024, **open_args) as f:
|
||||||
f, fname = f["orz"]
|
f, fname = f["orz"]
|
||||||
self.log("writing to {}/{}".format(fdir, fname))
|
self.log("writing to {}/{}".format(fdir, fname))
|
||||||
sz, sha512_hex, _ = hashcopy(self.conn, p_data, f)
|
sz, sha512_hex, _ = hashcopy(p_data, f)
|
||||||
if sz == 0:
|
if sz == 0:
|
||||||
raise Pebkac(400, "empty files in post")
|
raise Pebkac(400, "empty files in post")
|
||||||
|
|
||||||
@@ -1033,7 +1065,7 @@ class HttpCli(object):
|
|||||||
raise Pebkac(400, "expected body, got {}".format(p_field))
|
raise Pebkac(400, "expected body, got {}".format(p_field))
|
||||||
|
|
||||||
with open(fsenc(fp), "wb", 512 * 1024) as f:
|
with open(fsenc(fp), "wb", 512 * 1024) as f:
|
||||||
sz, sha512, _ = hashcopy(self.conn, p_data, f)
|
sz, sha512, _ = hashcopy(p_data, f)
|
||||||
|
|
||||||
new_lastmod = os.stat(fsenc(fp)).st_mtime
|
new_lastmod = os.stat(fsenc(fp)).st_mtime
|
||||||
new_lastmod3 = int(new_lastmod * 1000)
|
new_lastmod3 = int(new_lastmod * 1000)
|
||||||
@@ -1223,8 +1255,7 @@ class HttpCli(object):
|
|||||||
if use_sendfile:
|
if use_sendfile:
|
||||||
remains = sendfile_kern(lower, upper, f, self.s)
|
remains = sendfile_kern(lower, upper, f, self.s)
|
||||||
else:
|
else:
|
||||||
actor = self.conn if self.is_mp else None
|
remains = sendfile_py(lower, upper, f, self.s)
|
||||||
remains = sendfile_py(lower, upper, f, self.s, actor)
|
|
||||||
|
|
||||||
if remains > 0:
|
if remains > 0:
|
||||||
logmsg += " \033[31m" + unicode(upper - remains) + "\033[0m"
|
logmsg += " \033[31m" + unicode(upper - remains) + "\033[0m"
|
||||||
@@ -1281,7 +1312,7 @@ class HttpCli(object):
|
|||||||
|
|
||||||
fgen = vn.zipgen(rem, items, self.uname, dots, not self.args.no_scandir)
|
fgen = vn.zipgen(rem, items, self.uname, dots, not self.args.no_scandir)
|
||||||
# for f in fgen: print(repr({k: f[k] for k in ["vp", "ap"]}))
|
# for f in fgen: print(repr({k: f[k] for k in ["vp", "ap"]}))
|
||||||
bgen = packer(fgen, utf8="utf" in uarg, pre_crc="crc" in uarg)
|
bgen = packer(self.log, fgen, utf8="utf" in uarg, pre_crc="crc" in uarg)
|
||||||
bsent = 0
|
bsent = 0
|
||||||
for buf in bgen.gen():
|
for buf in bgen.gen():
|
||||||
if not buf:
|
if not buf:
|
||||||
@@ -1303,7 +1334,7 @@ class HttpCli(object):
|
|||||||
ext = "folder"
|
ext = "folder"
|
||||||
exact = True
|
exact = True
|
||||||
|
|
||||||
bad = re.compile(r"[](){}/[]|^[0-9_-]*$")
|
bad = re.compile(r"[](){}/ []|^[0-9_-]*$")
|
||||||
n = ext.split(".")[::-1]
|
n = ext.split(".")[::-1]
|
||||||
if not exact:
|
if not exact:
|
||||||
n = n[:-1]
|
n = n[:-1]
|
||||||
@@ -1345,7 +1376,7 @@ class HttpCli(object):
|
|||||||
for c, v in [[b"&", 4], [b"<", 3], [b">", 3]]:
|
for c, v in [[b"&", 4], [b"<", 3], [b">", 3]]:
|
||||||
sz_md += (len(buf) - len(buf.replace(c, b""))) * v
|
sz_md += (len(buf) - len(buf.replace(c, b""))) * v
|
||||||
|
|
||||||
file_ts = max(ts_md, ts_html)
|
file_ts = max(ts_md, ts_html, E.t0)
|
||||||
file_lastmod, do_send = self._chk_lastmod(file_ts)
|
file_lastmod, do_send = self._chk_lastmod(file_ts)
|
||||||
self.out_headers["Last-Modified"] = file_lastmod
|
self.out_headers["Last-Modified"] = file_lastmod
|
||||||
self.out_headers.update(NO_CACHE)
|
self.out_headers.update(NO_CACHE)
|
||||||
@@ -1359,6 +1390,7 @@ class HttpCli(object):
|
|||||||
"md_plug": "true" if self.args.emp else "false",
|
"md_plug": "true" if self.args.emp else "false",
|
||||||
"md_chk_rate": self.args.mcr,
|
"md_chk_rate": self.args.mcr,
|
||||||
"md": boundary,
|
"md": boundary,
|
||||||
|
"ts": self.conn.hsrv.cachebuster(),
|
||||||
}
|
}
|
||||||
html = template.render(**targs).encode("utf-8", "replace")
|
html = template.render(**targs).encode("utf-8", "replace")
|
||||||
html = html.split(boundary.encode("utf-8"))
|
html = html.split(boundary.encode("utf-8"))
|
||||||
@@ -1391,7 +1423,7 @@ class HttpCli(object):
|
|||||||
return True
|
return True
|
||||||
|
|
||||||
def tx_mounts(self):
|
def tx_mounts(self):
|
||||||
suf = self.urlq(rm=["h"])
|
suf = self.urlq({}, ["h"])
|
||||||
rvol, wvol, avol = [
|
rvol, wvol, avol = [
|
||||||
[("/" + x).rstrip("/") + "/" for x in y]
|
[("/" + x).rstrip("/") + "/" for x in y]
|
||||||
for y in [self.rvol, self.wvol, self.avol]
|
for y in [self.rvol, self.wvol, self.avol]
|
||||||
@@ -1441,7 +1473,7 @@ class HttpCli(object):
|
|||||||
raise Pebkac(500, x)
|
raise Pebkac(500, x)
|
||||||
|
|
||||||
def tx_stack(self):
|
def tx_stack(self):
|
||||||
if not self.readable or not self.writable:
|
if not self.avol:
|
||||||
raise Pebkac(403, "not admin")
|
raise Pebkac(403, "not admin")
|
||||||
|
|
||||||
if self.args.no_stack:
|
if self.args.no_stack:
|
||||||
@@ -1531,14 +1563,16 @@ class HttpCli(object):
|
|||||||
raise Pebkac(404)
|
raise Pebkac(404)
|
||||||
|
|
||||||
if self.readable:
|
if self.readable:
|
||||||
if rem.startswith(".hist/up2k."):
|
if rem.startswith(".hist/up2k.") or (
|
||||||
|
rem.endswith("/dir.txt") and rem.startswith(".hist/th/")
|
||||||
|
):
|
||||||
raise Pebkac(403)
|
raise Pebkac(403)
|
||||||
|
|
||||||
is_dir = stat.S_ISDIR(st.st_mode)
|
is_dir = stat.S_ISDIR(st.st_mode)
|
||||||
th_fmt = self.uparam.get("th")
|
th_fmt = self.uparam.get("th")
|
||||||
if th_fmt is not None:
|
if th_fmt is not None:
|
||||||
if is_dir:
|
if is_dir:
|
||||||
for fn in ["folder.png", "folder.jpg"]:
|
for fn in self.args.th_covers.split(","):
|
||||||
fp = os.path.join(abspath, fn)
|
fp = os.path.join(abspath, fn)
|
||||||
if os.path.exists(fp):
|
if os.path.exists(fp):
|
||||||
vrem = "{}/{}".format(vrem.rstrip("/"), fn)
|
vrem = "{}/{}".format(vrem.rstrip("/"), fn)
|
||||||
@@ -1600,9 +1634,8 @@ class HttpCli(object):
|
|||||||
if self.writable:
|
if self.writable:
|
||||||
perms.append("write")
|
perms.append("write")
|
||||||
|
|
||||||
url_suf = self.urlq()
|
url_suf = self.urlq({}, [])
|
||||||
is_ls = "ls" in self.uparam
|
is_ls = "ls" in self.uparam
|
||||||
ts = "" # "?{}".format(time.time())
|
|
||||||
|
|
||||||
tpl = "browser"
|
tpl = "browser"
|
||||||
if "b" in self.uparam:
|
if "b" in self.uparam:
|
||||||
@@ -1627,7 +1660,6 @@ class HttpCli(object):
|
|||||||
"vdir": quotep(self.vpath),
|
"vdir": quotep(self.vpath),
|
||||||
"vpnodes": vpnodes,
|
"vpnodes": vpnodes,
|
||||||
"files": [],
|
"files": [],
|
||||||
"ts": ts,
|
|
||||||
"perms": json.dumps(perms),
|
"perms": json.dumps(perms),
|
||||||
"taglist": [],
|
"taglist": [],
|
||||||
"tag_order": [],
|
"tag_order": [],
|
||||||
@@ -1763,28 +1795,44 @@ class HttpCli(object):
|
|||||||
fn = f["name"]
|
fn = f["name"]
|
||||||
rd = f["rd"]
|
rd = f["rd"]
|
||||||
del f["rd"]
|
del f["rd"]
|
||||||
if icur:
|
if not icur:
|
||||||
if vn != dbv:
|
break
|
||||||
_, rd = vn.get_dbv(rd)
|
|
||||||
|
if vn != dbv:
|
||||||
|
_, rd = vn.get_dbv(rd)
|
||||||
|
|
||||||
|
q = "select w from up where rd = ? and fn = ?"
|
||||||
|
r = None
|
||||||
|
try:
|
||||||
|
r = icur.execute(q, (rd, fn)).fetchone()
|
||||||
|
except Exception as ex:
|
||||||
|
if "database is locked" in str(ex):
|
||||||
|
break
|
||||||
|
|
||||||
q = "select w from up where rd = ? and fn = ?"
|
|
||||||
try:
|
try:
|
||||||
r = icur.execute(q, (rd, fn)).fetchone()
|
|
||||||
except:
|
|
||||||
args = s3enc(idx.mem_cur, rd, fn)
|
args = s3enc(idx.mem_cur, rd, fn)
|
||||||
r = icur.execute(q, args).fetchone()
|
r = icur.execute(q, args).fetchone()
|
||||||
|
except:
|
||||||
|
m = "tag list error, {}/{}\n{}"
|
||||||
|
self.log(m.format(rd, fn, min_ex()))
|
||||||
|
break
|
||||||
|
|
||||||
tags = {}
|
tags = {}
|
||||||
f["tags"] = tags
|
f["tags"] = tags
|
||||||
|
|
||||||
if not r:
|
if not r:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
w = r[0][:16]
|
w = r[0][:16]
|
||||||
q = "select k, v from mt where w = ? and k != 'x'"
|
q = "select k, v from mt where w = ? and k != 'x'"
|
||||||
|
try:
|
||||||
for k, v in icur.execute(q, (w,)):
|
for k, v in icur.execute(q, (w,)):
|
||||||
taglist[k] = True
|
taglist[k] = True
|
||||||
tags[k] = v
|
tags[k] = v
|
||||||
|
except:
|
||||||
|
m = "tag read error, {}/{} [{}]:\n{}"
|
||||||
|
self.log(m.format(rd, fn, w, min_ex()))
|
||||||
|
break
|
||||||
|
|
||||||
if icur:
|
if icur:
|
||||||
taglist = [k for k in vn.flags.get("mte", "").split(",") if k in taglist]
|
taglist = [k for k in vn.flags.get("mte", "").split(",") if k in taglist]
|
||||||
|
|||||||
@@ -3,7 +3,6 @@ from __future__ import print_function, unicode_literals
|
|||||||
|
|
||||||
import re
|
import re
|
||||||
import os
|
import os
|
||||||
import sys
|
|
||||||
import time
|
import time
|
||||||
import socket
|
import socket
|
||||||
|
|
||||||
@@ -35,7 +34,6 @@ class HttpConn(object):
|
|||||||
|
|
||||||
self.args = hsrv.args
|
self.args = hsrv.args
|
||||||
self.asrv = hsrv.asrv
|
self.asrv = hsrv.asrv
|
||||||
self.is_mp = hsrv.is_mp
|
|
||||||
self.cert_path = hsrv.cert_path
|
self.cert_path = hsrv.cert_path
|
||||||
|
|
||||||
enth = HAVE_PIL and not self.args.no_thumb
|
enth = HAVE_PIL and not self.args.no_thumb
|
||||||
@@ -44,8 +42,8 @@ class HttpConn(object):
|
|||||||
|
|
||||||
self.t0 = time.time()
|
self.t0 = time.time()
|
||||||
self.stopping = False
|
self.stopping = False
|
||||||
|
self.nreq = 0
|
||||||
self.nbyte = 0
|
self.nbyte = 0
|
||||||
self.workload = 0
|
|
||||||
self.u2idx = None
|
self.u2idx = None
|
||||||
self.log_func = hsrv.log
|
self.log_func = hsrv.log
|
||||||
self.lf_url = re.compile(self.args.lf_url) if self.args.lf_url else None
|
self.lf_url = re.compile(self.args.lf_url) if self.args.lf_url else None
|
||||||
@@ -172,7 +170,7 @@ class HttpConn(object):
|
|||||||
self.log("client rejected our certificate (nice)")
|
self.log("client rejected our certificate (nice)")
|
||||||
|
|
||||||
elif "ALERT_CERTIFICATE_UNKNOWN" in em:
|
elif "ALERT_CERTIFICATE_UNKNOWN" in em:
|
||||||
# chrome-android keeps doing this
|
# android-chrome keeps doing this
|
||||||
pass
|
pass
|
||||||
|
|
||||||
else:
|
else:
|
||||||
@@ -184,11 +182,7 @@ class HttpConn(object):
|
|||||||
self.sr = Unrecv(self.s)
|
self.sr = Unrecv(self.s)
|
||||||
|
|
||||||
while not self.stopping:
|
while not self.stopping:
|
||||||
if self.is_mp:
|
self.nreq += 1
|
||||||
self.workload += 50
|
|
||||||
if self.workload >= 2 ** 31:
|
|
||||||
self.workload = 100
|
|
||||||
|
|
||||||
cli = HttpCli(self)
|
cli = HttpCli(self)
|
||||||
if not cli.run():
|
if not cli.run():
|
||||||
return
|
return
|
||||||
|
|||||||
@@ -4,6 +4,8 @@ from __future__ import print_function, unicode_literals
|
|||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
import time
|
import time
|
||||||
|
import math
|
||||||
|
import base64
|
||||||
import socket
|
import socket
|
||||||
import threading
|
import threading
|
||||||
|
|
||||||
@@ -24,10 +26,15 @@ except ImportError:
|
|||||||
)
|
)
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
from .__init__ import E, MACOS
|
from .__init__ import E, PY2, MACOS
|
||||||
from .authsrv import AuthSrv
|
from .util import spack, min_ex, start_stackmon, start_log_thrs
|
||||||
from .httpconn import HttpConn
|
from .httpconn import HttpConn
|
||||||
|
|
||||||
|
if PY2:
|
||||||
|
import Queue as queue
|
||||||
|
else:
|
||||||
|
import queue
|
||||||
|
|
||||||
|
|
||||||
class HttpSrv(object):
|
class HttpSrv(object):
|
||||||
"""
|
"""
|
||||||
@@ -35,19 +42,28 @@ class HttpSrv(object):
|
|||||||
relying on MpSrv for performance (HttpSrv is just plain threads)
|
relying on MpSrv for performance (HttpSrv is just plain threads)
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, broker, is_mp=False):
|
def __init__(self, broker, nid):
|
||||||
self.broker = broker
|
self.broker = broker
|
||||||
self.is_mp = is_mp
|
self.nid = nid
|
||||||
self.args = broker.args
|
self.args = broker.args
|
||||||
self.log = broker.log
|
self.log = broker.log
|
||||||
self.asrv = broker.asrv
|
self.asrv = broker.asrv
|
||||||
|
|
||||||
self.disconnect_func = None
|
self.name = "httpsrv" + ("-n{}-i{:x}".format(nid, os.getpid()) if nid else "")
|
||||||
self.mutex = threading.Lock()
|
self.mutex = threading.Lock()
|
||||||
|
self.stopping = False
|
||||||
|
|
||||||
self.clients = {}
|
self.tp_nthr = 0 # actual
|
||||||
self.workload = 0
|
self.tp_ncli = 0 # fading
|
||||||
self.workload_thr_alive = False
|
self.tp_time = None # latest worker collect
|
||||||
|
self.tp_q = None if self.args.no_htp else queue.LifoQueue()
|
||||||
|
|
||||||
|
self.srvs = []
|
||||||
|
self.ncli = 0 # exact
|
||||||
|
self.clients = {} # laggy
|
||||||
|
self.nclimax = 0
|
||||||
|
self.cb_ts = 0
|
||||||
|
self.cb_v = 0
|
||||||
|
|
||||||
env = jinja2.Environment()
|
env = jinja2.Environment()
|
||||||
env.loader = jinja2.FileSystemLoader(os.path.join(E.mod, "web"))
|
env.loader = jinja2.FileSystemLoader(os.path.join(E.mod, "web"))
|
||||||
@@ -62,24 +78,155 @@ class HttpSrv(object):
|
|||||||
else:
|
else:
|
||||||
self.cert_path = None
|
self.cert_path = None
|
||||||
|
|
||||||
|
if self.tp_q:
|
||||||
|
self.start_threads(4)
|
||||||
|
|
||||||
|
name = "httpsrv-scaler" + ("-{}".format(nid) if nid else "")
|
||||||
|
t = threading.Thread(target=self.thr_scaler, name=name)
|
||||||
|
t.daemon = True
|
||||||
|
t.start()
|
||||||
|
|
||||||
|
if nid:
|
||||||
|
if self.args.stackmon:
|
||||||
|
start_stackmon(self.args.stackmon, nid)
|
||||||
|
|
||||||
|
if self.args.log_thrs:
|
||||||
|
start_log_thrs(self.log, self.args.log_thrs, nid)
|
||||||
|
|
||||||
|
def start_threads(self, n):
|
||||||
|
self.tp_nthr += n
|
||||||
|
if self.args.log_htp:
|
||||||
|
self.log(self.name, "workers += {} = {}".format(n, self.tp_nthr), 6)
|
||||||
|
|
||||||
|
for _ in range(n):
|
||||||
|
thr = threading.Thread(
|
||||||
|
target=self.thr_poolw,
|
||||||
|
name=self.name + "-poolw",
|
||||||
|
)
|
||||||
|
thr.daemon = True
|
||||||
|
thr.start()
|
||||||
|
|
||||||
|
def stop_threads(self, n):
|
||||||
|
self.tp_nthr -= n
|
||||||
|
if self.args.log_htp:
|
||||||
|
self.log(self.name, "workers -= {} = {}".format(n, self.tp_nthr), 6)
|
||||||
|
|
||||||
|
for _ in range(n):
|
||||||
|
self.tp_q.put(None)
|
||||||
|
|
||||||
|
def thr_scaler(self):
|
||||||
|
while True:
|
||||||
|
time.sleep(2 if self.tp_ncli else 30)
|
||||||
|
with self.mutex:
|
||||||
|
self.tp_ncli = max(self.ncli, self.tp_ncli - 2)
|
||||||
|
if self.tp_nthr > self.tp_ncli + 8:
|
||||||
|
self.stop_threads(4)
|
||||||
|
|
||||||
|
def listen(self, sck, nlisteners):
|
||||||
|
ip, port = sck.getsockname()
|
||||||
|
self.srvs.append(sck)
|
||||||
|
self.nclimax = math.ceil(self.args.nc * 1.0 / nlisteners)
|
||||||
|
t = threading.Thread(
|
||||||
|
target=self.thr_listen,
|
||||||
|
args=(sck,),
|
||||||
|
name="httpsrv-n{}-listen-{}-{}".format(self.nid or "0", ip, port),
|
||||||
|
)
|
||||||
|
t.daemon = True
|
||||||
|
t.start()
|
||||||
|
|
||||||
|
def thr_listen(self, srv_sck):
|
||||||
|
"""listens on a shared tcp server"""
|
||||||
|
ip, port = srv_sck.getsockname()
|
||||||
|
fno = srv_sck.fileno()
|
||||||
|
msg = "subscribed @ {}:{} f{}".format(ip, port, fno)
|
||||||
|
self.log(self.name, msg)
|
||||||
|
while not self.stopping:
|
||||||
|
if self.args.log_conn:
|
||||||
|
self.log(self.name, "|%sC-ncli" % ("-" * 1,), c="1;30")
|
||||||
|
|
||||||
|
if self.ncli >= self.nclimax:
|
||||||
|
self.log(self.name, "at connection limit; waiting", 3)
|
||||||
|
while self.ncli >= self.nclimax:
|
||||||
|
time.sleep(0.1)
|
||||||
|
|
||||||
|
if self.args.log_conn:
|
||||||
|
self.log(self.name, "|%sC-acc1" % ("-" * 2,), c="1;30")
|
||||||
|
|
||||||
|
try:
|
||||||
|
sck, addr = srv_sck.accept()
|
||||||
|
except (OSError, socket.error) as ex:
|
||||||
|
self.log(self.name, "accept({}): {}".format(fno, ex), c=6)
|
||||||
|
time.sleep(0.02)
|
||||||
|
continue
|
||||||
|
|
||||||
|
if self.args.log_conn:
|
||||||
|
m = "|{}C-acc2 \033[0;36m{} \033[3{}m{}".format(
|
||||||
|
"-" * 3, ip, port % 8, port
|
||||||
|
)
|
||||||
|
self.log("%s %s" % addr, m, c="1;30")
|
||||||
|
|
||||||
|
self.accept(sck, addr)
|
||||||
|
|
||||||
def accept(self, sck, addr):
|
def accept(self, sck, addr):
|
||||||
"""takes an incoming tcp connection and creates a thread to handle it"""
|
"""takes an incoming tcp connection and creates a thread to handle it"""
|
||||||
if self.args.log_conn:
|
now = time.time()
|
||||||
self.log("%s %s" % addr, "|%sC-cthr" % ("-" * 5,), c="1;30")
|
|
||||||
|
if now - (self.tp_time or now) > 300:
|
||||||
|
self.tp_q = None
|
||||||
|
|
||||||
|
if self.tp_q:
|
||||||
|
self.tp_q.put((sck, addr))
|
||||||
|
with self.mutex:
|
||||||
|
self.ncli += 1
|
||||||
|
self.tp_time = self.tp_time or now
|
||||||
|
self.tp_ncli = max(self.tp_ncli, self.ncli + 1)
|
||||||
|
if self.tp_nthr < self.ncli + 4:
|
||||||
|
self.start_threads(8)
|
||||||
|
return
|
||||||
|
|
||||||
|
if not self.args.no_htp:
|
||||||
|
m = "looks like the httpserver threadpool died; please make an issue on github and tell me the story of how you pulled that off, thanks and dog bless\n"
|
||||||
|
self.log(self.name, m, 1)
|
||||||
|
|
||||||
|
with self.mutex:
|
||||||
|
self.ncli += 1
|
||||||
|
|
||||||
thr = threading.Thread(
|
thr = threading.Thread(
|
||||||
target=self.thr_client,
|
target=self.thr_client,
|
||||||
args=(sck, addr),
|
args=(sck, addr),
|
||||||
name="httpsrv-{}-{}".format(addr[0].split(".", 2)[-1][-6:], addr[1]),
|
name="httpconn-{}-{}".format(addr[0].split(".", 2)[-1][-6:], addr[1]),
|
||||||
)
|
)
|
||||||
thr.daemon = True
|
thr.daemon = True
|
||||||
thr.start()
|
thr.start()
|
||||||
|
|
||||||
def num_clients(self):
|
def thr_poolw(self):
|
||||||
with self.mutex:
|
while True:
|
||||||
return len(self.clients)
|
task = self.tp_q.get()
|
||||||
|
if not task:
|
||||||
|
break
|
||||||
|
|
||||||
|
with self.mutex:
|
||||||
|
self.tp_time = None
|
||||||
|
|
||||||
|
try:
|
||||||
|
sck, addr = task
|
||||||
|
me = threading.current_thread()
|
||||||
|
me.name = "httpconn-{}-{}".format(
|
||||||
|
addr[0].split(".", 2)[-1][-6:], addr[1]
|
||||||
|
)
|
||||||
|
self.thr_client(sck, addr)
|
||||||
|
me.name = self.name + "-poolw"
|
||||||
|
except:
|
||||||
|
self.log(self.name, "thr_client: " + min_ex(), 3)
|
||||||
|
|
||||||
def shutdown(self):
|
def shutdown(self):
|
||||||
|
self.stopping = True
|
||||||
|
for srv in self.srvs:
|
||||||
|
try:
|
||||||
|
srv.close()
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
clients = list(self.clients.keys())
|
clients = list(self.clients.keys())
|
||||||
for cli in clients:
|
for cli in clients:
|
||||||
try:
|
try:
|
||||||
@@ -87,7 +234,14 @@ class HttpSrv(object):
|
|||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
self.log("httpsrv-n", "ok bye")
|
if self.tp_q:
|
||||||
|
self.stop_threads(self.tp_nthr)
|
||||||
|
for _ in range(10):
|
||||||
|
time.sleep(0.05)
|
||||||
|
if self.tp_q.empty():
|
||||||
|
break
|
||||||
|
|
||||||
|
self.log(self.name, "ok bye")
|
||||||
|
|
||||||
def thr_client(self, sck, addr):
|
def thr_client(self, sck, addr):
|
||||||
"""thread managing one tcp client"""
|
"""thread managing one tcp client"""
|
||||||
@@ -97,25 +251,15 @@ class HttpSrv(object):
|
|||||||
with self.mutex:
|
with self.mutex:
|
||||||
self.clients[cli] = 0
|
self.clients[cli] = 0
|
||||||
|
|
||||||
if self.is_mp:
|
|
||||||
self.workload += 50
|
|
||||||
if not self.workload_thr_alive:
|
|
||||||
self.workload_thr_alive = True
|
|
||||||
thr = threading.Thread(
|
|
||||||
target=self.thr_workload, name="httpsrv-workload"
|
|
||||||
)
|
|
||||||
thr.daemon = True
|
|
||||||
thr.start()
|
|
||||||
|
|
||||||
fno = sck.fileno()
|
fno = sck.fileno()
|
||||||
try:
|
try:
|
||||||
if self.args.log_conn:
|
if self.args.log_conn:
|
||||||
self.log("%s %s" % addr, "|%sC-crun" % ("-" * 6,), c="1;30")
|
self.log("%s %s" % addr, "|%sC-crun" % ("-" * 4,), c="1;30")
|
||||||
|
|
||||||
cli.run()
|
cli.run()
|
||||||
|
|
||||||
except (OSError, socket.error) as ex:
|
except (OSError, socket.error) as ex:
|
||||||
if ex.errno not in [10038, 10054, 107, 57, 9]:
|
if ex.errno not in [10038, 10054, 107, 57, 49, 9]:
|
||||||
self.log(
|
self.log(
|
||||||
"%s %s" % addr,
|
"%s %s" % addr,
|
||||||
"run({}): {}".format(fno, ex),
|
"run({}): {}".format(fno, ex),
|
||||||
@@ -125,7 +269,7 @@ class HttpSrv(object):
|
|||||||
finally:
|
finally:
|
||||||
sck = cli.s
|
sck = cli.s
|
||||||
if self.args.log_conn:
|
if self.args.log_conn:
|
||||||
self.log("%s %s" % addr, "|%sC-cdone" % ("-" * 7,), c="1;30")
|
self.log("%s %s" % addr, "|%sC-cdone" % ("-" * 5,), c="1;30")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
fno = sck.fileno()
|
fno = sck.fileno()
|
||||||
@@ -138,42 +282,37 @@ class HttpSrv(object):
|
|||||||
"shut({}): {}".format(fno, ex),
|
"shut({}): {}".format(fno, ex),
|
||||||
c="1;30",
|
c="1;30",
|
||||||
)
|
)
|
||||||
if ex.errno not in [10038, 10054, 107, 57, 9]:
|
if ex.errno not in [10038, 10054, 107, 57, 49, 9]:
|
||||||
# 10038 No longer considered a socket
|
# 10038 No longer considered a socket
|
||||||
# 10054 Foribly closed by remote
|
# 10054 Foribly closed by remote
|
||||||
# 107 Transport endpoint not connected
|
# 107 Transport endpoint not connected
|
||||||
# 57 Socket is not connected
|
# 57 Socket is not connected
|
||||||
|
# 49 Can't assign requested address (wifi down)
|
||||||
# 9 Bad file descriptor
|
# 9 Bad file descriptor
|
||||||
raise
|
raise
|
||||||
finally:
|
finally:
|
||||||
with self.mutex:
|
with self.mutex:
|
||||||
del self.clients[cli]
|
del self.clients[cli]
|
||||||
|
self.ncli -= 1
|
||||||
|
|
||||||
if self.disconnect_func:
|
def cachebuster(self):
|
||||||
self.disconnect_func(addr) # pylint: disable=not-callable
|
if time.time() - self.cb_ts < 1:
|
||||||
|
return self.cb_v
|
||||||
|
|
||||||
def thr_workload(self):
|
with self.mutex:
|
||||||
"""indicates the python interpreter workload caused by this HttpSrv"""
|
if time.time() - self.cb_ts < 1:
|
||||||
# avoid locking in extract_filedata by tracking difference here
|
return self.cb_v
|
||||||
while True:
|
|
||||||
time.sleep(0.2)
|
|
||||||
with self.mutex:
|
|
||||||
if not self.clients:
|
|
||||||
# no clients rn, termiante thread
|
|
||||||
self.workload_thr_alive = False
|
|
||||||
self.workload = 0
|
|
||||||
return
|
|
||||||
|
|
||||||
total = 0
|
v = E.t0
|
||||||
with self.mutex:
|
try:
|
||||||
for cli in self.clients.keys():
|
with os.scandir(os.path.join(E.mod, "web")) as dh:
|
||||||
now = cli.workload
|
for fh in dh:
|
||||||
delta = now - self.clients[cli]
|
inf = fh.stat(follow_symlinks=False)
|
||||||
if delta < 0:
|
v = max(v, inf.st_mtime)
|
||||||
# was reset in HttpCli to prevent overflow
|
except:
|
||||||
delta = now
|
pass
|
||||||
|
|
||||||
total += delta
|
v = base64.urlsafe_b64encode(spack(b">xxL", int(v)))
|
||||||
self.clients[cli] = now
|
self.cb_v = v.decode("ascii")[-4:]
|
||||||
|
self.cb_ts = time.time()
|
||||||
self.workload = total
|
return self.cb_v
|
||||||
|
|||||||
@@ -7,11 +7,8 @@ import json
|
|||||||
import shutil
|
import shutil
|
||||||
import subprocess as sp
|
import subprocess as sp
|
||||||
|
|
||||||
from .__init__ import PY2, WINDOWS
|
from .__init__ import PY2, WINDOWS, unicode
|
||||||
from .util import fsenc, fsdec, REKOBO_LKEY
|
from .util import fsenc, fsdec, uncyg, REKOBO_LKEY
|
||||||
|
|
||||||
if not PY2:
|
|
||||||
unicode = str
|
|
||||||
|
|
||||||
|
|
||||||
def have_ff(cmd):
|
def have_ff(cmd):
|
||||||
@@ -44,6 +41,9 @@ class MParser(object):
|
|||||||
while True:
|
while True:
|
||||||
try:
|
try:
|
||||||
bp = os.path.expanduser(args)
|
bp = os.path.expanduser(args)
|
||||||
|
if WINDOWS:
|
||||||
|
bp = uncyg(bp)
|
||||||
|
|
||||||
if os.path.exists(bp):
|
if os.path.exists(bp):
|
||||||
self.bin = bp
|
self.bin = bp
|
||||||
return
|
return
|
||||||
@@ -112,6 +112,19 @@ def parse_ffprobe(txt):
|
|||||||
ret = {} # processed
|
ret = {} # processed
|
||||||
md = {} # raw tags
|
md = {} # raw tags
|
||||||
|
|
||||||
|
is_audio = fmt.get("format_name") in ["mp3", "ogg", "flac", "wav"]
|
||||||
|
if fmt.get("filename", "").split(".")[-1].lower() in ["m4a", "aac"]:
|
||||||
|
is_audio = True
|
||||||
|
|
||||||
|
# if audio file, ensure audio stream appears first
|
||||||
|
if (
|
||||||
|
is_audio
|
||||||
|
and len(streams) > 2
|
||||||
|
and streams[1].get("codec_type") != "audio"
|
||||||
|
and streams[2].get("codec_type") == "audio"
|
||||||
|
):
|
||||||
|
streams = [fmt, streams[2], streams[1]] + streams[3:]
|
||||||
|
|
||||||
have = {}
|
have = {}
|
||||||
for strm in streams:
|
for strm in streams:
|
||||||
typ = strm.get("codec_type")
|
typ = strm.get("codec_type")
|
||||||
@@ -131,9 +144,7 @@ def parse_ffprobe(txt):
|
|||||||
]
|
]
|
||||||
|
|
||||||
if typ == "video":
|
if typ == "video":
|
||||||
if strm.get("DISPOSITION:attached_pic") == "1" or fmt.get(
|
if strm.get("DISPOSITION:attached_pic") == "1" or is_audio:
|
||||||
"format_name"
|
|
||||||
) in ["mp3", "ogg", "flac"]:
|
|
||||||
continue
|
continue
|
||||||
|
|
||||||
kvm = [
|
kvm = [
|
||||||
@@ -177,7 +188,7 @@ def parse_ffprobe(txt):
|
|||||||
|
|
||||||
k = k[4:].strip()
|
k = k[4:].strip()
|
||||||
v = v.strip()
|
v = v.strip()
|
||||||
if k and v:
|
if k and v and k not in md:
|
||||||
md[k] = [v]
|
md[k] = [v]
|
||||||
|
|
||||||
for k in [".q", ".vq", ".aq"]:
|
for k in [".q", ".vq", ".aq"]:
|
||||||
@@ -216,37 +227,47 @@ def parse_ffprobe(txt):
|
|||||||
class MTag(object):
|
class MTag(object):
|
||||||
def __init__(self, log_func, args):
|
def __init__(self, log_func, args):
|
||||||
self.log_func = log_func
|
self.log_func = log_func
|
||||||
|
self.args = args
|
||||||
self.usable = True
|
self.usable = True
|
||||||
self.prefer_mt = False
|
self.prefer_mt = not args.no_mtag_ff
|
||||||
mappings = args.mtm
|
|
||||||
self.backend = "ffprobe" if args.no_mutagen else "mutagen"
|
self.backend = "ffprobe" if args.no_mutagen else "mutagen"
|
||||||
or_ffprobe = " or ffprobe"
|
self.can_ffprobe = (
|
||||||
|
HAVE_FFPROBE
|
||||||
|
and not args.no_mtag_ff
|
||||||
|
and (not WINDOWS or sys.version_info >= (3, 8))
|
||||||
|
)
|
||||||
|
mappings = args.mtm
|
||||||
|
or_ffprobe = " or FFprobe"
|
||||||
|
|
||||||
if self.backend == "mutagen":
|
if self.backend == "mutagen":
|
||||||
self.get = self.get_mutagen
|
self.get = self.get_mutagen
|
||||||
try:
|
try:
|
||||||
import mutagen
|
import mutagen
|
||||||
except:
|
except:
|
||||||
self.log("could not load mutagen, trying ffprobe instead", c=3)
|
self.log("could not load Mutagen, trying FFprobe instead", c=3)
|
||||||
self.backend = "ffprobe"
|
self.backend = "ffprobe"
|
||||||
|
|
||||||
if self.backend == "ffprobe":
|
if self.backend == "ffprobe":
|
||||||
|
self.usable = self.can_ffprobe
|
||||||
self.get = self.get_ffprobe
|
self.get = self.get_ffprobe
|
||||||
self.prefer_mt = True
|
self.prefer_mt = True
|
||||||
# about 20x slower
|
|
||||||
self.usable = HAVE_FFPROBE
|
|
||||||
|
|
||||||
if self.usable and WINDOWS and sys.version_info < (3, 8):
|
if not HAVE_FFPROBE:
|
||||||
self.usable = False
|
pass
|
||||||
|
|
||||||
|
elif args.no_mtag_ff:
|
||||||
|
msg = "found FFprobe but it was disabled by --no-mtag-ff"
|
||||||
|
self.log(msg, c=3)
|
||||||
|
|
||||||
|
elif WINDOWS and sys.version_info < (3, 8):
|
||||||
or_ffprobe = " or python >= 3.8"
|
or_ffprobe = " or python >= 3.8"
|
||||||
msg = "found ffprobe but your python is too old; need 3.8 or newer"
|
msg = "found FFprobe but your python is too old; need 3.8 or newer"
|
||||||
self.log(msg, c=1)
|
self.log(msg, c=1)
|
||||||
|
|
||||||
if not self.usable:
|
if not self.usable:
|
||||||
msg = "need mutagen{} to read media tags so please run this:\n{}{} -m pip install --user mutagen\n"
|
msg = "need Mutagen{} to read media tags so please run this:\n{}{} -m pip install --user mutagen\n"
|
||||||
self.log(
|
pybin = os.path.basename(sys.executable)
|
||||||
msg.format(or_ffprobe, " " * 37, os.path.basename(sys.executable)), c=1
|
self.log(msg.format(or_ffprobe, " " * 37, pybin), c=1)
|
||||||
)
|
|
||||||
return
|
return
|
||||||
|
|
||||||
# https://picard-docs.musicbrainz.org/downloads/MusicBrainz_Picard_Tag_Map.html
|
# https://picard-docs.musicbrainz.org/downloads/MusicBrainz_Picard_Tag_Map.html
|
||||||
@@ -376,7 +397,7 @@ class MTag(object):
|
|||||||
v2 = r2.get(k)
|
v2 = r2.get(k)
|
||||||
if v1 == v2:
|
if v1 == v2:
|
||||||
print(" ", k, v1)
|
print(" ", k, v1)
|
||||||
elif v1 != "0000": # ffprobe date=0
|
elif v1 != "0000": # FFprobe date=0
|
||||||
diffs.append(k)
|
diffs.append(k)
|
||||||
print(" 1", k, v1)
|
print(" 1", k, v1)
|
||||||
print(" 2", k, v2)
|
print(" 2", k, v2)
|
||||||
@@ -397,20 +418,33 @@ class MTag(object):
|
|||||||
md = mutagen.File(fsenc(abspath), easy=True)
|
md = mutagen.File(fsenc(abspath), easy=True)
|
||||||
x = md.info.length
|
x = md.info.length
|
||||||
except Exception as ex:
|
except Exception as ex:
|
||||||
return {}
|
return self.get_ffprobe(abspath) if self.can_ffprobe else {}
|
||||||
|
|
||||||
ret = {}
|
sz = os.path.getsize(fsenc(abspath))
|
||||||
try:
|
ret = {".q": [0, int((sz / md.info.length) / 128)]}
|
||||||
dur = int(md.info.length)
|
|
||||||
|
for attr, k, norm in [
|
||||||
|
["codec", "ac", unicode],
|
||||||
|
["channels", "chs", int],
|
||||||
|
["sample_rate", ".hz", int],
|
||||||
|
["bitrate", ".aq", int],
|
||||||
|
["length", ".dur", int],
|
||||||
|
]:
|
||||||
try:
|
try:
|
||||||
q = int(md.info.bitrate / 1024)
|
v = getattr(md.info, attr)
|
||||||
except:
|
except:
|
||||||
q = int((os.path.getsize(fsenc(abspath)) / dur) / 128)
|
continue
|
||||||
|
|
||||||
ret[".dur"] = [0, dur]
|
if not v:
|
||||||
ret[".q"] = [0, q]
|
continue
|
||||||
except:
|
|
||||||
pass
|
if k == ".aq":
|
||||||
|
v /= 1000
|
||||||
|
|
||||||
|
if k == "ac" and v.startswith("mp4a.40."):
|
||||||
|
v = "aac"
|
||||||
|
|
||||||
|
ret[k] = [0, norm(v)]
|
||||||
|
|
||||||
return self.normalize_tags(ret, md)
|
return self.normalize_tags(ret, md)
|
||||||
|
|
||||||
|
|||||||
@@ -33,10 +33,11 @@ class QFile(object):
|
|||||||
class StreamTar(object):
|
class StreamTar(object):
|
||||||
"""construct in-memory tar file from the given path"""
|
"""construct in-memory tar file from the given path"""
|
||||||
|
|
||||||
def __init__(self, fgen, **kwargs):
|
def __init__(self, log, fgen, **kwargs):
|
||||||
self.ci = 0
|
self.ci = 0
|
||||||
self.co = 0
|
self.co = 0
|
||||||
self.qfile = QFile()
|
self.qfile = QFile()
|
||||||
|
self.log = log
|
||||||
self.fgen = fgen
|
self.fgen = fgen
|
||||||
self.errf = None
|
self.errf = None
|
||||||
|
|
||||||
@@ -91,7 +92,8 @@ class StreamTar(object):
|
|||||||
errors.append([f["vp"], repr(ex)])
|
errors.append([f["vp"], repr(ex)])
|
||||||
|
|
||||||
if errors:
|
if errors:
|
||||||
self.errf = errdesc(errors)
|
self.errf, txt = errdesc(errors)
|
||||||
|
self.log("\n".join(([repr(self.errf)] + txt[1:])))
|
||||||
self.ser(self.errf)
|
self.ser(self.errf)
|
||||||
|
|
||||||
self.tar.close()
|
self.tar.close()
|
||||||
|
|||||||
@@ -25,4 +25,4 @@ def errdesc(errors):
|
|||||||
"vp": "archive-errors-{}.txt".format(dt),
|
"vp": "archive-errors-{}.txt".format(dt),
|
||||||
"ap": tf_path,
|
"ap": tf_path,
|
||||||
"st": os.stat(tf_path),
|
"st": os.stat(tf_path),
|
||||||
}
|
}, report
|
||||||
|
|||||||
@@ -5,12 +5,16 @@ import re
|
|||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
import time
|
import time
|
||||||
|
import shlex
|
||||||
|
import string
|
||||||
|
import signal
|
||||||
|
import socket
|
||||||
import threading
|
import threading
|
||||||
from datetime import datetime, timedelta
|
from datetime import datetime, timedelta
|
||||||
import calendar
|
import calendar
|
||||||
|
|
||||||
from .__init__ import PY2, WINDOWS, MACOS, VT100
|
from .__init__ import E, PY2, WINDOWS, MACOS, VT100, unicode
|
||||||
from .util import mp
|
from .util import mp, start_log_thrs, start_stackmon, min_ex
|
||||||
from .authsrv import AuthSrv
|
from .authsrv import AuthSrv
|
||||||
from .tcpsrv import TcpSrv
|
from .tcpsrv import TcpSrv
|
||||||
from .up2k import Up2k
|
from .up2k import Up2k
|
||||||
@@ -28,14 +32,27 @@ class SvcHub(object):
|
|||||||
put() can return a queue (if want_reply=True) which has a blocking get() with the response.
|
put() can return a queue (if want_reply=True) which has a blocking get() with the response.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, args):
|
def __init__(self, args, argv, printed):
|
||||||
self.args = args
|
self.args = args
|
||||||
|
self.argv = argv
|
||||||
|
self.logf = None
|
||||||
|
self.stop_req = False
|
||||||
|
self.stopping = False
|
||||||
|
self.stop_cond = threading.Condition()
|
||||||
|
|
||||||
self.ansi_re = re.compile("\033\\[[^m]*m")
|
self.ansi_re = re.compile("\033\\[[^m]*m")
|
||||||
self.log_mutex = threading.Lock()
|
self.log_mutex = threading.Lock()
|
||||||
self.next_day = 0
|
self.next_day = 0
|
||||||
|
|
||||||
self.log = self._log_disabled if args.q else self._log_enabled
|
self.log = self._log_disabled if args.q else self._log_enabled
|
||||||
|
if args.lo:
|
||||||
|
self._setup_logfile(printed)
|
||||||
|
|
||||||
|
if args.stackmon:
|
||||||
|
start_stackmon(args.stackmon, 0)
|
||||||
|
|
||||||
|
if args.log_thrs:
|
||||||
|
start_log_thrs(self.log, args.log_thrs, 0)
|
||||||
|
|
||||||
# initiate all services to manage
|
# initiate all services to manage
|
||||||
self.asrv = AuthSrv(self.args, self.log, False)
|
self.asrv = AuthSrv(self.args, self.log, False)
|
||||||
@@ -69,22 +86,102 @@ class SvcHub(object):
|
|||||||
|
|
||||||
self.broker = Broker(self)
|
self.broker = Broker(self)
|
||||||
|
|
||||||
|
def _logname(self):
|
||||||
|
dt = datetime.utcfromtimestamp(time.time())
|
||||||
|
fn = self.args.lo
|
||||||
|
for fs in "YmdHMS":
|
||||||
|
fs = "%" + fs
|
||||||
|
if fs in fn:
|
||||||
|
fn = fn.replace(fs, dt.strftime(fs))
|
||||||
|
|
||||||
|
return fn
|
||||||
|
|
||||||
|
def _setup_logfile(self, printed):
|
||||||
|
base_fn = fn = sel_fn = self._logname()
|
||||||
|
if fn != self.args.lo:
|
||||||
|
ctr = 0
|
||||||
|
# yup this is a race; if started sufficiently concurrently, two
|
||||||
|
# copyparties can grab the same logfile (considered and ignored)
|
||||||
|
while os.path.exists(sel_fn):
|
||||||
|
ctr += 1
|
||||||
|
sel_fn = "{}.{}".format(fn, ctr)
|
||||||
|
|
||||||
|
fn = sel_fn
|
||||||
|
|
||||||
|
try:
|
||||||
|
import lzma
|
||||||
|
|
||||||
|
lh = lzma.open(fn, "wt", encoding="utf-8", errors="replace", preset=0)
|
||||||
|
|
||||||
|
except:
|
||||||
|
import codecs
|
||||||
|
|
||||||
|
lh = codecs.open(fn, "w", encoding="utf-8", errors="replace")
|
||||||
|
|
||||||
|
lh.base_fn = base_fn
|
||||||
|
|
||||||
|
argv = [sys.executable] + self.argv
|
||||||
|
if hasattr(shlex, "quote"):
|
||||||
|
argv = [shlex.quote(x) for x in argv]
|
||||||
|
else:
|
||||||
|
argv = ['"{}"'.format(x) for x in argv]
|
||||||
|
|
||||||
|
msg = "[+] opened logfile [{}]\n".format(fn)
|
||||||
|
printed += msg
|
||||||
|
lh.write("t0: {:.3f}\nargv: {}\n\n{}".format(E.t0, " ".join(argv), printed))
|
||||||
|
self.logf = lh
|
||||||
|
print(msg, end="")
|
||||||
|
|
||||||
def run(self):
|
def run(self):
|
||||||
thr = threading.Thread(target=self.tcpsrv.run, name="svchub-main")
|
self.tcpsrv.run()
|
||||||
|
|
||||||
|
thr = threading.Thread(target=self.sd_notify, name="sd-notify")
|
||||||
thr.daemon = True
|
thr.daemon = True
|
||||||
thr.start()
|
thr.start()
|
||||||
|
|
||||||
# winxp/py2.7 support: thr.join() kills signals
|
thr = threading.Thread(target=self.stop_thr, name="svchub-sig")
|
||||||
try:
|
thr.daemon = True
|
||||||
while True:
|
thr.start()
|
||||||
time.sleep(9001)
|
|
||||||
|
|
||||||
except KeyboardInterrupt:
|
for sig in [signal.SIGINT, signal.SIGTERM]:
|
||||||
|
signal.signal(sig, self.signal_handler)
|
||||||
|
|
||||||
|
try:
|
||||||
|
while not self.stop_req:
|
||||||
|
time.sleep(9001)
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
|
self.shutdown()
|
||||||
|
|
||||||
|
def stop_thr(self):
|
||||||
|
while not self.stop_req:
|
||||||
|
with self.stop_cond:
|
||||||
|
self.stop_cond.wait(9001)
|
||||||
|
|
||||||
|
self.shutdown()
|
||||||
|
|
||||||
|
def signal_handler(self):
|
||||||
|
if self.stopping:
|
||||||
|
return
|
||||||
|
|
||||||
|
self.stop_req = True
|
||||||
|
with self.stop_cond:
|
||||||
|
self.stop_cond.notify_all()
|
||||||
|
|
||||||
|
def shutdown(self):
|
||||||
|
if self.stopping:
|
||||||
|
return
|
||||||
|
|
||||||
|
self.stopping = True
|
||||||
|
self.stop_req = True
|
||||||
|
try:
|
||||||
with self.log_mutex:
|
with self.log_mutex:
|
||||||
print("OPYTHAT")
|
print("OPYTHAT")
|
||||||
|
|
||||||
self.tcpsrv.shutdown()
|
self.tcpsrv.shutdown()
|
||||||
self.broker.shutdown()
|
self.broker.shutdown()
|
||||||
|
self.up2k.shutdown()
|
||||||
if self.thumbsrv:
|
if self.thumbsrv:
|
||||||
self.thumbsrv.shutdown()
|
self.thumbsrv.shutdown()
|
||||||
|
|
||||||
@@ -99,9 +196,36 @@ class SvcHub(object):
|
|||||||
print("nailed it", end="")
|
print("nailed it", end="")
|
||||||
finally:
|
finally:
|
||||||
print("\033[0m")
|
print("\033[0m")
|
||||||
|
if self.logf:
|
||||||
|
self.logf.close()
|
||||||
|
|
||||||
def _log_disabled(self, src, msg, c=0):
|
def _log_disabled(self, src, msg, c=0):
|
||||||
pass
|
if not self.logf:
|
||||||
|
return
|
||||||
|
|
||||||
|
with self.log_mutex:
|
||||||
|
ts = datetime.utcfromtimestamp(time.time())
|
||||||
|
ts = ts.strftime("%Y-%m%d-%H%M%S.%f")[:-3]
|
||||||
|
self.logf.write("@{} [{}] {}\n".format(ts, src, msg))
|
||||||
|
|
||||||
|
now = time.time()
|
||||||
|
if now >= self.next_day:
|
||||||
|
self._set_next_day()
|
||||||
|
|
||||||
|
def _set_next_day(self):
|
||||||
|
if self.next_day and self.logf and self.logf.base_fn != self._logname():
|
||||||
|
self.logf.close()
|
||||||
|
self._setup_logfile("")
|
||||||
|
|
||||||
|
dt = datetime.utcfromtimestamp(time.time())
|
||||||
|
|
||||||
|
# unix timestamp of next 00:00:00 (leap-seconds safe)
|
||||||
|
day_now = dt.day
|
||||||
|
while dt.day == day_now:
|
||||||
|
dt += timedelta(hours=12)
|
||||||
|
|
||||||
|
dt = dt.replace(hour=0, minute=0, second=0)
|
||||||
|
self.next_day = calendar.timegm(dt.utctimetuple())
|
||||||
|
|
||||||
def _log_enabled(self, src, msg, c=0):
|
def _log_enabled(self, src, msg, c=0):
|
||||||
"""handles logging from all components"""
|
"""handles logging from all components"""
|
||||||
@@ -110,14 +234,7 @@ class SvcHub(object):
|
|||||||
if now >= self.next_day:
|
if now >= self.next_day:
|
||||||
dt = datetime.utcfromtimestamp(now)
|
dt = datetime.utcfromtimestamp(now)
|
||||||
print("\033[36m{}\033[0m\n".format(dt.strftime("%Y-%m-%d")), end="")
|
print("\033[36m{}\033[0m\n".format(dt.strftime("%Y-%m-%d")), end="")
|
||||||
|
self._set_next_day()
|
||||||
# unix timestamp of next 00:00:00 (leap-seconds safe)
|
|
||||||
day_now = dt.day
|
|
||||||
while dt.day == day_now:
|
|
||||||
dt += timedelta(hours=12)
|
|
||||||
|
|
||||||
dt = dt.replace(hour=0, minute=0, second=0)
|
|
||||||
self.next_day = calendar.timegm(dt.utctimetuple())
|
|
||||||
|
|
||||||
fmt = "\033[36m{} \033[33m{:21} \033[0m{}\n"
|
fmt = "\033[36m{} \033[33m{:21} \033[0m{}\n"
|
||||||
if not VT100:
|
if not VT100:
|
||||||
@@ -144,20 +261,20 @@ class SvcHub(object):
|
|||||||
except:
|
except:
|
||||||
print(msg.encode("ascii", "replace").decode(), end="")
|
print(msg.encode("ascii", "replace").decode(), end="")
|
||||||
|
|
||||||
|
if self.logf:
|
||||||
|
self.logf.write(msg)
|
||||||
|
|
||||||
def check_mp_support(self):
|
def check_mp_support(self):
|
||||||
vmin = sys.version_info[1]
|
vmin = sys.version_info[1]
|
||||||
if WINDOWS:
|
if WINDOWS:
|
||||||
msg = "need python 3.3 or newer for multiprocessing;"
|
msg = "need python 3.3 or newer for multiprocessing;"
|
||||||
if PY2:
|
if PY2 or vmin < 3:
|
||||||
# py2 pickler doesn't support winsock
|
|
||||||
return msg
|
|
||||||
elif vmin < 3:
|
|
||||||
return msg
|
return msg
|
||||||
elif MACOS:
|
elif MACOS:
|
||||||
return "multiprocessing is wonky on mac osx;"
|
return "multiprocessing is wonky on mac osx;"
|
||||||
else:
|
else:
|
||||||
msg = "need python 2.7 or 3.3+ for multiprocessing;"
|
msg = "need python 3.3+ for multiprocessing;"
|
||||||
if not PY2 and vmin < 3:
|
if PY2 or vmin < 3:
|
||||||
return msg
|
return msg
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@@ -189,5 +306,24 @@ class SvcHub(object):
|
|||||||
if not err:
|
if not err:
|
||||||
return True
|
return True
|
||||||
else:
|
else:
|
||||||
self.log("root", err)
|
self.log("svchub", err)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
def sd_notify(self):
|
||||||
|
try:
|
||||||
|
addr = os.getenv("NOTIFY_SOCKET")
|
||||||
|
if not addr:
|
||||||
|
return
|
||||||
|
|
||||||
|
addr = unicode(addr)
|
||||||
|
if addr.startswith("@"):
|
||||||
|
addr = "\0" + addr[1:]
|
||||||
|
|
||||||
|
m = "".join(x for x in addr if x in string.printable)
|
||||||
|
self.log("sd_notify", m)
|
||||||
|
|
||||||
|
sck = socket.socket(socket.AF_UNIX, socket.SOCK_DGRAM)
|
||||||
|
sck.connect(addr)
|
||||||
|
sck.sendall(b"READY=1")
|
||||||
|
except:
|
||||||
|
self.log("sd_notify", min_ex())
|
||||||
|
|||||||
@@ -4,15 +4,14 @@ from __future__ import print_function, unicode_literals
|
|||||||
import os
|
import os
|
||||||
import time
|
import time
|
||||||
import zlib
|
import zlib
|
||||||
import struct
|
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
|
|
||||||
from .sutil import errdesc
|
from .sutil import errdesc
|
||||||
from .util import yieldfile, sanitize_fn
|
from .util import yieldfile, sanitize_fn, spack, sunpack
|
||||||
|
|
||||||
|
|
||||||
def dostime2unix(buf):
|
def dostime2unix(buf):
|
||||||
t, d = struct.unpack("<HH", buf)
|
t, d = sunpack(b"<HH", buf)
|
||||||
|
|
||||||
ts = (t & 0x1F) * 2
|
ts = (t & 0x1F) * 2
|
||||||
tm = (t >> 5) & 0x3F
|
tm = (t >> 5) & 0x3F
|
||||||
@@ -36,13 +35,13 @@ def unixtime2dos(ts):
|
|||||||
|
|
||||||
bd = ((dy - 1980) << 9) + (dm << 5) + dd
|
bd = ((dy - 1980) << 9) + (dm << 5) + dd
|
||||||
bt = (th << 11) + (tm << 5) + ts // 2
|
bt = (th << 11) + (tm << 5) + ts // 2
|
||||||
return struct.pack("<HH", bt, bd)
|
return spack(b"<HH", bt, bd)
|
||||||
|
|
||||||
|
|
||||||
def gen_fdesc(sz, crc32, z64):
|
def gen_fdesc(sz, crc32, z64):
|
||||||
ret = b"\x50\x4b\x07\x08"
|
ret = b"\x50\x4b\x07\x08"
|
||||||
fmt = "<LQQ" if z64 else "<LLL"
|
fmt = b"<LQQ" if z64 else b"<LLL"
|
||||||
ret += struct.pack(fmt, crc32, sz, sz)
|
ret += spack(fmt, crc32, sz, sz)
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
|
|
||||||
@@ -66,7 +65,7 @@ def gen_hdr(h_pos, fn, sz, lastmod, utf8, crc32, pre_crc):
|
|||||||
req_ver = b"\x2d\x00" if z64 else b"\x0a\x00"
|
req_ver = b"\x2d\x00" if z64 else b"\x0a\x00"
|
||||||
|
|
||||||
if crc32:
|
if crc32:
|
||||||
crc32 = struct.pack("<L", crc32)
|
crc32 = spack(b"<L", crc32)
|
||||||
else:
|
else:
|
||||||
crc32 = b"\x00" * 4
|
crc32 = b"\x00" * 4
|
||||||
|
|
||||||
@@ -87,14 +86,14 @@ def gen_hdr(h_pos, fn, sz, lastmod, utf8, crc32, pre_crc):
|
|||||||
# however infozip does actual sz and it even works on winxp
|
# however infozip does actual sz and it even works on winxp
|
||||||
# (same reasning for z64 extradata later)
|
# (same reasning for z64 extradata later)
|
||||||
vsz = 0xFFFFFFFF if z64 else sz
|
vsz = 0xFFFFFFFF if z64 else sz
|
||||||
ret += struct.pack("<LL", vsz, vsz)
|
ret += spack(b"<LL", vsz, vsz)
|
||||||
|
|
||||||
# windows support (the "?" replace below too)
|
# windows support (the "?" replace below too)
|
||||||
fn = sanitize_fn(fn, ok="/")
|
fn = sanitize_fn(fn, "/", [])
|
||||||
bfn = fn.encode("utf-8" if utf8 else "cp437", "replace").replace(b"?", b"_")
|
bfn = fn.encode("utf-8" if utf8 else "cp437", "replace").replace(b"?", b"_")
|
||||||
|
|
||||||
z64_len = len(z64v) * 8 + 4 if z64v else 0
|
z64_len = len(z64v) * 8 + 4 if z64v else 0
|
||||||
ret += struct.pack("<HH", len(bfn), z64_len)
|
ret += spack(b"<HH", len(bfn), z64_len)
|
||||||
|
|
||||||
if h_pos is not None:
|
if h_pos is not None:
|
||||||
# 2b comment, 2b diskno
|
# 2b comment, 2b diskno
|
||||||
@@ -106,12 +105,12 @@ def gen_hdr(h_pos, fn, sz, lastmod, utf8, crc32, pre_crc):
|
|||||||
ret += b"\x01\x00\x00\x00\xa4\x81"
|
ret += b"\x01\x00\x00\x00\xa4\x81"
|
||||||
|
|
||||||
# 4b local-header-ofs
|
# 4b local-header-ofs
|
||||||
ret += struct.pack("<L", min(h_pos, 0xFFFFFFFF))
|
ret += spack(b"<L", min(h_pos, 0xFFFFFFFF))
|
||||||
|
|
||||||
ret += bfn
|
ret += bfn
|
||||||
|
|
||||||
if z64v:
|
if z64v:
|
||||||
ret += struct.pack("<HH" + "Q" * len(z64v), 1, len(z64v) * 8, *z64v)
|
ret += spack(b"<HH" + b"Q" * len(z64v), 1, len(z64v) * 8, *z64v)
|
||||||
|
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
@@ -136,7 +135,7 @@ def gen_ecdr(items, cdir_pos, cdir_end):
|
|||||||
need_64 = nitems == 0xFFFF or 0xFFFFFFFF in [csz, cpos]
|
need_64 = nitems == 0xFFFF or 0xFFFFFFFF in [csz, cpos]
|
||||||
|
|
||||||
# 2b tnfiles, 2b dnfiles, 4b dir sz, 4b dir pos
|
# 2b tnfiles, 2b dnfiles, 4b dir sz, 4b dir pos
|
||||||
ret += struct.pack("<HHLL", nitems, nitems, csz, cpos)
|
ret += spack(b"<HHLL", nitems, nitems, csz, cpos)
|
||||||
|
|
||||||
# 2b comment length
|
# 2b comment length
|
||||||
ret += b"\x00\x00"
|
ret += b"\x00\x00"
|
||||||
@@ -163,7 +162,7 @@ def gen_ecdr64(items, cdir_pos, cdir_end):
|
|||||||
|
|
||||||
# 8b tnfiles, 8b dnfiles, 8b dir sz, 8b dir pos
|
# 8b tnfiles, 8b dnfiles, 8b dir sz, 8b dir pos
|
||||||
cdir_sz = cdir_end - cdir_pos
|
cdir_sz = cdir_end - cdir_pos
|
||||||
ret += struct.pack("<QQQQ", len(items), len(items), cdir_sz, cdir_pos)
|
ret += spack(b"<QQQQ", len(items), len(items), cdir_sz, cdir_pos)
|
||||||
|
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
@@ -178,13 +177,14 @@ def gen_ecdr64_loc(ecdr64_pos):
|
|||||||
ret = b"\x50\x4b\x06\x07"
|
ret = b"\x50\x4b\x06\x07"
|
||||||
|
|
||||||
# 4b cdisk, 8b start of ecdr64, 4b ndisks
|
# 4b cdisk, 8b start of ecdr64, 4b ndisks
|
||||||
ret += struct.pack("<LQL", 0, ecdr64_pos, 1)
|
ret += spack(b"<LQL", 0, ecdr64_pos, 1)
|
||||||
|
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
|
|
||||||
class StreamZip(object):
|
class StreamZip(object):
|
||||||
def __init__(self, fgen, utf8=False, pre_crc=False):
|
def __init__(self, log, fgen, utf8=False, pre_crc=False):
|
||||||
|
self.log = log
|
||||||
self.fgen = fgen
|
self.fgen = fgen
|
||||||
self.utf8 = utf8
|
self.utf8 = utf8
|
||||||
self.pre_crc = pre_crc
|
self.pre_crc = pre_crc
|
||||||
@@ -247,8 +247,8 @@ class StreamZip(object):
|
|||||||
errors.append([f["vp"], repr(ex)])
|
errors.append([f["vp"], repr(ex)])
|
||||||
|
|
||||||
if errors:
|
if errors:
|
||||||
errf = errdesc(errors)
|
errf, txt = errdesc(errors)
|
||||||
print(repr(errf))
|
self.log("\n".join(([repr(errf)] + txt[1:])))
|
||||||
for x in self.ser(errf):
|
for x in self.ser(errf):
|
||||||
yield x
|
yield x
|
||||||
|
|
||||||
|
|||||||
@@ -2,11 +2,9 @@
|
|||||||
from __future__ import print_function, unicode_literals
|
from __future__ import print_function, unicode_literals
|
||||||
|
|
||||||
import re
|
import re
|
||||||
import time
|
|
||||||
import socket
|
import socket
|
||||||
import select
|
|
||||||
|
|
||||||
from .util import chkcmd, Counter
|
from .util import chkcmd
|
||||||
|
|
||||||
|
|
||||||
class TcpSrv(object):
|
class TcpSrv(object):
|
||||||
@@ -20,7 +18,6 @@ class TcpSrv(object):
|
|||||||
self.args = hub.args
|
self.args = hub.args
|
||||||
self.log = hub.log
|
self.log = hub.log
|
||||||
|
|
||||||
self.num_clients = Counter()
|
|
||||||
self.stopping = False
|
self.stopping = False
|
||||||
|
|
||||||
ip = "127.0.0.1"
|
ip = "127.0.0.1"
|
||||||
@@ -66,37 +63,13 @@ class TcpSrv(object):
|
|||||||
for srv in self.srv:
|
for srv in self.srv:
|
||||||
srv.listen(self.args.nc)
|
srv.listen(self.args.nc)
|
||||||
ip, port = srv.getsockname()
|
ip, port = srv.getsockname()
|
||||||
self.log("tcpsrv", "listening @ {0}:{1}".format(ip, port))
|
fno = srv.fileno()
|
||||||
|
msg = "listening @ {}:{} f{}".format(ip, port, fno)
|
||||||
|
self.log("tcpsrv", msg)
|
||||||
|
if self.args.q:
|
||||||
|
print(msg)
|
||||||
|
|
||||||
while not self.stopping:
|
self.hub.broker.put(False, "listen", srv)
|
||||||
if self.args.log_conn:
|
|
||||||
self.log("tcpsrv", "|%sC-ncli" % ("-" * 1,), c="1;30")
|
|
||||||
|
|
||||||
if self.num_clients.v >= self.args.nc:
|
|
||||||
time.sleep(0.1)
|
|
||||||
continue
|
|
||||||
|
|
||||||
if self.args.log_conn:
|
|
||||||
self.log("tcpsrv", "|%sC-acc1" % ("-" * 2,), c="1;30")
|
|
||||||
|
|
||||||
ready, _, _ = select.select(self.srv, [], [])
|
|
||||||
for srv in ready:
|
|
||||||
if self.stopping:
|
|
||||||
break
|
|
||||||
|
|
||||||
sck, addr = srv.accept()
|
|
||||||
sip, sport = srv.getsockname()
|
|
||||||
if self.args.log_conn:
|
|
||||||
self.log(
|
|
||||||
"%s %s" % addr,
|
|
||||||
"|{}C-acc2 \033[0;36m{} \033[3{}m{}".format(
|
|
||||||
"-" * 3, sip, sport % 8, sport
|
|
||||||
),
|
|
||||||
c="1;30",
|
|
||||||
)
|
|
||||||
|
|
||||||
self.num_clients.add()
|
|
||||||
self.hub.broker.put(False, "httpconn", sck, addr)
|
|
||||||
|
|
||||||
def shutdown(self):
|
def shutdown(self):
|
||||||
self.stopping = True
|
self.stopping = True
|
||||||
|
|||||||
@@ -9,15 +9,11 @@ import hashlib
|
|||||||
import threading
|
import threading
|
||||||
import subprocess as sp
|
import subprocess as sp
|
||||||
|
|
||||||
from .__init__ import PY2
|
from .__init__ import PY2, unicode
|
||||||
from .util import fsenc, runcmd, Queue, Cooldown, BytesIO, min_ex
|
from .util import fsenc, runcmd, Queue, Cooldown, BytesIO, min_ex
|
||||||
from .mtag import HAVE_FFMPEG, HAVE_FFPROBE, ffprobe
|
from .mtag import HAVE_FFMPEG, HAVE_FFPROBE, ffprobe
|
||||||
|
|
||||||
|
|
||||||
if not PY2:
|
|
||||||
unicode = str
|
|
||||||
|
|
||||||
|
|
||||||
HAVE_PIL = False
|
HAVE_PIL = False
|
||||||
HAVE_HEIF = False
|
HAVE_HEIF = False
|
||||||
HAVE_AVIF = False
|
HAVE_AVIF = False
|
||||||
@@ -53,7 +49,7 @@ except:
|
|||||||
# https://pillow.readthedocs.io/en/stable/handbook/image-file-formats.html
|
# https://pillow.readthedocs.io/en/stable/handbook/image-file-formats.html
|
||||||
# ffmpeg -formats
|
# ffmpeg -formats
|
||||||
FMT_PIL = "bmp dib gif icns ico jpg jpeg jp2 jpx pcx png pbm pgm ppm pnm sgi tga tif tiff webp xbm dds xpm"
|
FMT_PIL = "bmp dib gif icns ico jpg jpeg jp2 jpx pcx png pbm pgm ppm pnm sgi tga tif tiff webp xbm dds xpm"
|
||||||
FMT_FF = "av1 asf avi flv m4v mkv mjpeg mjpg mpg mpeg mpg2 mpeg2 h264 avc h265 hevc mov 3gp mp4 ts mpegts nut ogv ogm rm vob webm wmv"
|
FMT_FF = "av1 asf avi flv m4v mkv mjpeg mjpg mpg mpeg mpg2 mpeg2 h264 avc mts h265 hevc mov 3gp mp4 ts mpegts nut ogv ogm rm vob webm wmv"
|
||||||
|
|
||||||
if HAVE_HEIF:
|
if HAVE_HEIF:
|
||||||
FMT_PIL += " heif heifs heic heics"
|
FMT_PIL += " heif heifs heic heics"
|
||||||
@@ -84,14 +80,14 @@ def thumb_path(histpath, rem, mtime, fmt):
|
|||||||
fn = rem
|
fn = rem
|
||||||
|
|
||||||
if rd:
|
if rd:
|
||||||
h = hashlib.sha512(fsenc(rd)).digest()[:24]
|
h = hashlib.sha512(fsenc(rd)).digest()
|
||||||
b64 = base64.urlsafe_b64encode(h).decode("ascii")[:24]
|
b64 = base64.urlsafe_b64encode(h).decode("ascii")[:24]
|
||||||
rd = "{}/{}/".format(b64[:2], b64[2:4]).lower() + b64
|
rd = "{}/{}/".format(b64[:2], b64[2:4]).lower() + b64
|
||||||
else:
|
else:
|
||||||
rd = "top"
|
rd = "top"
|
||||||
|
|
||||||
# could keep original filenames but this is safer re pathlen
|
# could keep original filenames but this is safer re pathlen
|
||||||
h = hashlib.sha512(fsenc(fn)).digest()[:24]
|
h = hashlib.sha512(fsenc(fn)).digest()
|
||||||
fn = base64.urlsafe_b64encode(h).decode("ascii")[:24]
|
fn = base64.urlsafe_b64encode(h).decode("ascii")[:24]
|
||||||
|
|
||||||
return "{}/th/{}/{}.{:x}.{}".format(
|
return "{}/th/{}/{}.{:x}.{}".format(
|
||||||
@@ -125,18 +121,19 @@ class ThumbSrv(object):
|
|||||||
if not self.args.no_vthumb and (not HAVE_FFMPEG or not HAVE_FFPROBE):
|
if not self.args.no_vthumb and (not HAVE_FFMPEG or not HAVE_FFPROBE):
|
||||||
missing = []
|
missing = []
|
||||||
if not HAVE_FFMPEG:
|
if not HAVE_FFMPEG:
|
||||||
missing.append("ffmpeg")
|
missing.append("FFmpeg")
|
||||||
|
|
||||||
if not HAVE_FFPROBE:
|
if not HAVE_FFPROBE:
|
||||||
missing.append("ffprobe")
|
missing.append("FFprobe")
|
||||||
|
|
||||||
msg = "cannot create video thumbnails because some of the required programs are not available: "
|
msg = "cannot create video thumbnails because some of the required programs are not available: "
|
||||||
msg += ", ".join(missing)
|
msg += ", ".join(missing)
|
||||||
self.log(msg, c=3)
|
self.log(msg, c=3)
|
||||||
|
|
||||||
t = threading.Thread(target=self.cleaner, name="thumb-cleaner")
|
if self.args.th_clean:
|
||||||
t.daemon = True
|
t = threading.Thread(target=self.cleaner, name="thumb-cleaner")
|
||||||
t.start()
|
t.daemon = True
|
||||||
|
t.start()
|
||||||
|
|
||||||
def log(self, msg, c=0):
|
def log(self, msg, c=0):
|
||||||
self.log_func("thumb", msg, c)
|
self.log_func("thumb", msg, c)
|
||||||
@@ -263,7 +260,7 @@ class ThumbSrv(object):
|
|||||||
pass # default q = 75
|
pass # default q = 75
|
||||||
|
|
||||||
if im.mode not in fmts:
|
if im.mode not in fmts:
|
||||||
print("conv {}".format(im.mode))
|
# print("conv {}".format(im.mode))
|
||||||
im = im.convert("RGB")
|
im = im.convert("RGB")
|
||||||
|
|
||||||
im.save(tpath, quality=40, method=6)
|
im.save(tpath, quality=40, method=6)
|
||||||
|
|||||||
@@ -7,6 +7,7 @@ import time
|
|||||||
import threading
|
import threading
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
|
|
||||||
|
from .__init__ import unicode
|
||||||
from .util import s3dec, Pebkac, min_ex
|
from .util import s3dec, Pebkac, min_ex
|
||||||
from .up2k import up2k_wark_from_hashlist
|
from .up2k import up2k_wark_from_hashlist
|
||||||
|
|
||||||
@@ -26,7 +27,7 @@ class U2idx(object):
|
|||||||
self.timeout = self.args.srch_time
|
self.timeout = self.args.srch_time
|
||||||
|
|
||||||
if not HAVE_SQLITE3:
|
if not HAVE_SQLITE3:
|
||||||
self.log("could not load sqlite3; searchign wqill be disabled")
|
self.log("your python does not have sqlite3; searching will be disabled")
|
||||||
return
|
return
|
||||||
|
|
||||||
self.cur = {}
|
self.cur = {}
|
||||||
@@ -57,6 +58,9 @@ class U2idx(object):
|
|||||||
raise Pebkac(500, min_ex())
|
raise Pebkac(500, min_ex())
|
||||||
|
|
||||||
def get_cur(self, ptop):
|
def get_cur(self, ptop):
|
||||||
|
if not HAVE_SQLITE3:
|
||||||
|
return None
|
||||||
|
|
||||||
cur = self.cur.get(ptop)
|
cur = self.cur.get(ptop)
|
||||||
if cur:
|
if cur:
|
||||||
return cur
|
return cur
|
||||||
@@ -66,7 +70,7 @@ class U2idx(object):
|
|||||||
if not os.path.exists(db_path):
|
if not os.path.exists(db_path):
|
||||||
return None
|
return None
|
||||||
|
|
||||||
cur = sqlite3.connect(db_path).cursor()
|
cur = sqlite3.connect(db_path, 2).cursor()
|
||||||
self.cur[ptop] = cur
|
self.cur[ptop] = cur
|
||||||
return cur
|
return cur
|
||||||
|
|
||||||
@@ -87,6 +91,8 @@ class U2idx(object):
|
|||||||
mt_ctr = 0
|
mt_ctr = 0
|
||||||
mt_keycmp = "substr(up.w,1,16)"
|
mt_keycmp = "substr(up.w,1,16)"
|
||||||
mt_keycmp2 = None
|
mt_keycmp2 = None
|
||||||
|
ptn_lc = re.compile(r" (mt[0-9]+\.v) ([=<!>]+) \? $")
|
||||||
|
ptn_lcv = re.compile(r"[a-zA-Z]")
|
||||||
|
|
||||||
while True:
|
while True:
|
||||||
uq = uq.strip()
|
uq = uq.strip()
|
||||||
@@ -179,6 +185,21 @@ class U2idx(object):
|
|||||||
va.append(v)
|
va.append(v)
|
||||||
is_key = True
|
is_key = True
|
||||||
|
|
||||||
|
# lowercase tag searches
|
||||||
|
m = ptn_lc.search(q)
|
||||||
|
if not m or not ptn_lcv.search(unicode(v)):
|
||||||
|
continue
|
||||||
|
|
||||||
|
va.pop()
|
||||||
|
va.append(v.lower())
|
||||||
|
q = q[: m.start()]
|
||||||
|
|
||||||
|
field, oper = m.groups()
|
||||||
|
if oper in ["=", "=="]:
|
||||||
|
q += " {} like ? ".format(field)
|
||||||
|
else:
|
||||||
|
q += " lower({}) {} ? ".format(field, oper)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
return self.run_query(vols, joins + "where " + q, va)
|
return self.run_query(vols, joins + "where " + q, va)
|
||||||
except Exception as ex:
|
except Exception as ex:
|
||||||
|
|||||||
@@ -30,6 +30,7 @@ from .util import (
|
|||||||
s3dec,
|
s3dec,
|
||||||
statdir,
|
statdir,
|
||||||
s2hms,
|
s2hms,
|
||||||
|
min_ex,
|
||||||
)
|
)
|
||||||
from .mtag import MTag, MParser
|
from .mtag import MTag, MParser
|
||||||
|
|
||||||
@@ -39,6 +40,8 @@ try:
|
|||||||
except:
|
except:
|
||||||
HAVE_SQLITE3 = False
|
HAVE_SQLITE3 = False
|
||||||
|
|
||||||
|
DB_VER = 4
|
||||||
|
|
||||||
|
|
||||||
class Up2k(object):
|
class Up2k(object):
|
||||||
"""
|
"""
|
||||||
@@ -91,7 +94,7 @@ class Up2k(object):
|
|||||||
thr.start()
|
thr.start()
|
||||||
|
|
||||||
# static
|
# static
|
||||||
self.r_hash = re.compile("^[0-9a-zA-Z_-]{43}$")
|
self.r_hash = re.compile("^[0-9a-zA-Z_-]{44}$")
|
||||||
|
|
||||||
if not HAVE_SQLITE3:
|
if not HAVE_SQLITE3:
|
||||||
self.log("could not initialize sqlite3, will use in-memory registry only")
|
self.log("could not initialize sqlite3, will use in-memory registry only")
|
||||||
@@ -100,13 +103,15 @@ class Up2k(object):
|
|||||||
self.deferred_init()
|
self.deferred_init()
|
||||||
else:
|
else:
|
||||||
t = threading.Thread(
|
t = threading.Thread(
|
||||||
target=self.deferred_init,
|
target=self.deferred_init, name="up2k-deferred-init", args=(0.5,)
|
||||||
name="up2k-deferred-init",
|
|
||||||
)
|
)
|
||||||
t.daemon = True
|
t.daemon = True
|
||||||
t.start()
|
t.start()
|
||||||
|
|
||||||
def deferred_init(self):
|
def deferred_init(self, wait=0):
|
||||||
|
if wait:
|
||||||
|
time.sleep(wait)
|
||||||
|
|
||||||
all_vols = self.asrv.vfs.all_vols
|
all_vols = self.asrv.vfs.all_vols
|
||||||
have_e2d = self.init_indexes(all_vols)
|
have_e2d = self.init_indexes(all_vols)
|
||||||
|
|
||||||
@@ -190,7 +195,7 @@ class Up2k(object):
|
|||||||
|
|
||||||
return True, ret
|
return True, ret
|
||||||
|
|
||||||
def init_indexes(self, all_vols, scan_vols=[]):
|
def init_indexes(self, all_vols, scan_vols=None):
|
||||||
self.pp = ProgressPrinter()
|
self.pp = ProgressPrinter()
|
||||||
vols = all_vols.values()
|
vols = all_vols.values()
|
||||||
t0 = time.time()
|
t0 = time.time()
|
||||||
@@ -299,7 +304,7 @@ class Up2k(object):
|
|||||||
self.log(msg.format(len(vols), time.time() - t0))
|
self.log(msg.format(len(vols), time.time() - t0))
|
||||||
|
|
||||||
if needed_mutagen:
|
if needed_mutagen:
|
||||||
msg = "could not read tags because no backends are available (mutagen or ffprobe)"
|
msg = "could not read tags because no backends are available (Mutagen or FFprobe)"
|
||||||
self.log(msg, c=1)
|
self.log(msg, c=1)
|
||||||
|
|
||||||
thr = None
|
thr = None
|
||||||
@@ -339,7 +344,15 @@ class Up2k(object):
|
|||||||
for k, v in flags.items()
|
for k, v in flags.items()
|
||||||
]
|
]
|
||||||
if a:
|
if a:
|
||||||
self.log(" ".join(sorted(a)) + "\033[0m")
|
vpath = "?"
|
||||||
|
for k, v in self.asrv.vfs.all_vols.items():
|
||||||
|
if v.realpath == ptop:
|
||||||
|
vpath = k
|
||||||
|
|
||||||
|
if vpath:
|
||||||
|
vpath += "/"
|
||||||
|
|
||||||
|
self.log("/{} {}".format(vpath, " ".join(sorted(a))), "35")
|
||||||
|
|
||||||
reg = {}
|
reg = {}
|
||||||
path = os.path.join(histpath, "up2k.snap")
|
path = os.path.join(histpath, "up2k.snap")
|
||||||
@@ -398,7 +411,7 @@ class Up2k(object):
|
|||||||
if WINDOWS:
|
if WINDOWS:
|
||||||
excl = [x.replace("/", "\\") for x in excl]
|
excl = [x.replace("/", "\\") for x in excl]
|
||||||
|
|
||||||
n_add = self._build_dir(dbw, top, set(excl), top, nohash)
|
n_add = self._build_dir(dbw, top, set(excl), top, nohash, [])
|
||||||
n_rm = self._drop_lost(dbw[0], top)
|
n_rm = self._drop_lost(dbw[0], top)
|
||||||
if dbw[1]:
|
if dbw[1]:
|
||||||
self.log("commit {} new files".format(dbw[1]))
|
self.log("commit {} new files".format(dbw[1]))
|
||||||
@@ -406,11 +419,25 @@ class Up2k(object):
|
|||||||
|
|
||||||
return True, n_add or n_rm or do_vac
|
return True, n_add or n_rm or do_vac
|
||||||
|
|
||||||
def _build_dir(self, dbw, top, excl, cdir, nohash):
|
def _build_dir(self, dbw, top, excl, cdir, nohash, seen):
|
||||||
|
rcdir = cdir
|
||||||
|
if not ANYWIN:
|
||||||
|
try:
|
||||||
|
# a bit expensive but worth
|
||||||
|
rcdir = os.path.realpath(cdir)
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
|
if rcdir in seen:
|
||||||
|
m = "bailing from symlink loop,\n prev: {}\n curr: {}\n from: {}"
|
||||||
|
self.log(m.format(seen[-1], rcdir, cdir), 3)
|
||||||
|
return 0
|
||||||
|
|
||||||
|
seen = seen + [cdir]
|
||||||
self.pp.msg = "a{} {}".format(self.pp.n, cdir)
|
self.pp.msg = "a{} {}".format(self.pp.n, cdir)
|
||||||
histpath = self.asrv.vfs.histtab[top]
|
histpath = self.asrv.vfs.histtab[top]
|
||||||
ret = 0
|
ret = 0
|
||||||
g = statdir(self.log, not self.args.no_scandir, False, cdir)
|
g = statdir(self.log_func, not self.args.no_scandir, False, cdir)
|
||||||
for iname, inf in sorted(g):
|
for iname, inf in sorted(g):
|
||||||
abspath = os.path.join(cdir, iname)
|
abspath = os.path.join(cdir, iname)
|
||||||
lmod = int(inf.st_mtime)
|
lmod = int(inf.st_mtime)
|
||||||
@@ -419,10 +446,13 @@ class Up2k(object):
|
|||||||
if abspath in excl or abspath == histpath:
|
if abspath in excl or abspath == histpath:
|
||||||
continue
|
continue
|
||||||
# self.log(" dir: {}".format(abspath))
|
# self.log(" dir: {}".format(abspath))
|
||||||
ret += self._build_dir(dbw, top, excl, abspath, nohash)
|
ret += self._build_dir(dbw, top, excl, abspath, nohash, seen)
|
||||||
else:
|
else:
|
||||||
# self.log("file: {}".format(abspath))
|
# self.log("file: {}".format(abspath))
|
||||||
rp = abspath[len(top) :].replace("\\", "/").strip("/")
|
rp = abspath[len(top) + 1 :]
|
||||||
|
if WINDOWS:
|
||||||
|
rp = rp.replace("\\", "/").strip("/")
|
||||||
|
|
||||||
rd, fn = rp.rsplit("/", 1) if "/" in rp else ["", rp]
|
rd, fn = rp.rsplit("/", 1) if "/" in rp else ["", rp]
|
||||||
sql = "select w, mt, sz from up where rd = ? and fn = ?"
|
sql = "select w, mt, sz from up where rd = ? and fn = ?"
|
||||||
try:
|
try:
|
||||||
@@ -566,7 +596,7 @@ class Up2k(object):
|
|||||||
c2 = conn.cursor()
|
c2 = conn.cursor()
|
||||||
c3 = conn.cursor()
|
c3 = conn.cursor()
|
||||||
n_left = cur.execute("select count(w) from up").fetchone()[0]
|
n_left = cur.execute("select count(w) from up").fetchone()[0]
|
||||||
for w, rd, fn in cur.execute("select w, rd, fn from up"):
|
for w, rd, fn in cur.execute("select w, rd, fn from up order by rd, fn"):
|
||||||
n_left -= 1
|
n_left -= 1
|
||||||
q = "select w from mt where w = ?"
|
q = "select w from mt where w = ?"
|
||||||
if c2.execute(q, (w[:16],)).fetchone():
|
if c2.execute(q, (w[:16],)).fetchone():
|
||||||
@@ -647,7 +677,7 @@ class Up2k(object):
|
|||||||
try:
|
try:
|
||||||
parser = MParser(parser)
|
parser = MParser(parser)
|
||||||
except:
|
except:
|
||||||
self.log("invalid argument: " + parser, 1)
|
self.log("invalid argument (could not find program): " + parser, 1)
|
||||||
return
|
return
|
||||||
|
|
||||||
for tag in entags:
|
for tag in entags:
|
||||||
@@ -887,59 +917,31 @@ class Up2k(object):
|
|||||||
if not existed and ver is None:
|
if not existed and ver is None:
|
||||||
return self._create_db(db_path, cur)
|
return self._create_db(db_path, cur)
|
||||||
|
|
||||||
orig_ver = ver
|
if ver == DB_VER:
|
||||||
if not ver or ver < 3:
|
|
||||||
bak = "{}.bak.{:x}.v{}".format(db_path, int(time.time()), ver)
|
|
||||||
db = cur.connection
|
|
||||||
cur.close()
|
|
||||||
db.close()
|
|
||||||
msg = "creating new DB (old is bad); backup: {}"
|
|
||||||
if ver:
|
|
||||||
msg = "creating backup before upgrade: {}"
|
|
||||||
|
|
||||||
self.log(msg.format(bak))
|
|
||||||
shutil.copy2(db_path, bak)
|
|
||||||
cur = self._orz(db_path)
|
|
||||||
|
|
||||||
if ver == 1:
|
|
||||||
cur = self._upgrade_v1(cur, db_path)
|
|
||||||
if cur:
|
|
||||||
ver = 2
|
|
||||||
|
|
||||||
if ver == 2:
|
|
||||||
cur = self._create_v3(cur)
|
|
||||||
ver = self._read_ver(cur) if cur else None
|
|
||||||
|
|
||||||
if ver == 3:
|
|
||||||
if orig_ver != ver:
|
|
||||||
cur.connection.commit()
|
|
||||||
cur.execute("vacuum")
|
|
||||||
cur.connection.commit()
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
nfiles = next(cur.execute("select count(w) from up"))[0]
|
nfiles = next(cur.execute("select count(w) from up"))[0]
|
||||||
self.log("OK: {} |{}|".format(db_path, nfiles))
|
self.log("OK: {} |{}|".format(db_path, nfiles))
|
||||||
return cur
|
return cur
|
||||||
except Exception as ex:
|
except:
|
||||||
self.log("WARN: could not list files, DB corrupt?\n " + repr(ex))
|
self.log("WARN: could not list files; DB corrupt?\n" + min_ex())
|
||||||
|
|
||||||
if cur:
|
if (ver or 0) > DB_VER:
|
||||||
db = cur.connection
|
m = "database is version {}, this copyparty only supports versions <= {}"
|
||||||
cur.close()
|
raise Exception(m.format(ver, DB_VER))
|
||||||
db.close()
|
|
||||||
|
bak = "{}.bak.{:x}.v{}".format(db_path, int(time.time()), ver)
|
||||||
|
db = cur.connection
|
||||||
|
cur.close()
|
||||||
|
db.close()
|
||||||
|
msg = "creating new DB (old is bad); backup: {}"
|
||||||
|
if ver:
|
||||||
|
msg = "creating new DB (too old to upgrade); backup: {}"
|
||||||
|
|
||||||
|
self.log(msg.format(bak))
|
||||||
|
os.rename(fsenc(db_path), fsenc(bak))
|
||||||
|
|
||||||
return self._create_db(db_path, None)
|
return self._create_db(db_path, None)
|
||||||
|
|
||||||
def _create_db(self, db_path, cur):
|
|
||||||
if not cur:
|
|
||||||
cur = self._orz(db_path)
|
|
||||||
|
|
||||||
self._create_v2(cur)
|
|
||||||
self._create_v3(cur)
|
|
||||||
cur.connection.commit()
|
|
||||||
self.log("created DB at {}".format(db_path))
|
|
||||||
return cur
|
|
||||||
|
|
||||||
def _read_ver(self, cur):
|
def _read_ver(self, cur):
|
||||||
for tab in ["ki", "kv"]:
|
for tab in ["ki", "kv"]:
|
||||||
try:
|
try:
|
||||||
@@ -951,72 +953,45 @@ class Up2k(object):
|
|||||||
if rows:
|
if rows:
|
||||||
return int(rows[0][0])
|
return int(rows[0][0])
|
||||||
|
|
||||||
def _create_v2(self, cur):
|
def _create_db(self, db_path, cur):
|
||||||
for cmd in [
|
|
||||||
r"create table up (w text, mt int, sz int, rd text, fn text)",
|
|
||||||
r"create index up_rd on up(rd)",
|
|
||||||
r"create index up_fn on up(fn)",
|
|
||||||
]:
|
|
||||||
cur.execute(cmd)
|
|
||||||
return cur
|
|
||||||
|
|
||||||
def _create_v3(self, cur):
|
|
||||||
"""
|
"""
|
||||||
collision in 2^(n/2) files where n = bits (6 bits/ch)
|
collision in 2^(n/2) files where n = bits (6 bits/ch)
|
||||||
10*6/2 = 2^30 = 1'073'741'824, 24.1mb idx 1<<(3*10)
|
10*6/2 = 2^30 = 1'073'741'824, 24.1mb idx 1<<(3*10)
|
||||||
12*6/2 = 2^36 = 68'719'476'736, 24.8mb idx
|
12*6/2 = 2^36 = 68'719'476'736, 24.8mb idx
|
||||||
16*6/2 = 2^48 = 281'474'976'710'656, 26.1mb idx
|
16*6/2 = 2^48 = 281'474'976'710'656, 26.1mb idx
|
||||||
"""
|
"""
|
||||||
for c, ks in [["drop table k", "isv"], ["drop index up_", "w"]]:
|
if not cur:
|
||||||
for k in ks:
|
cur = self._orz(db_path)
|
||||||
try:
|
|
||||||
cur.execute(c + k)
|
|
||||||
except:
|
|
||||||
pass
|
|
||||||
|
|
||||||
idx = r"create index up_w on up(substr(w,1,16))"
|
idx = r"create index up_w on up(substr(w,1,16))"
|
||||||
if self.no_expr_idx:
|
if self.no_expr_idx:
|
||||||
idx = r"create index up_w on up(w)"
|
idx = r"create index up_w on up(w)"
|
||||||
|
|
||||||
for cmd in [
|
for cmd in [
|
||||||
|
r"create table up (w text, mt int, sz int, rd text, fn text)",
|
||||||
|
r"create index up_rd on up(rd)",
|
||||||
|
r"create index up_fn on up(fn)",
|
||||||
idx,
|
idx,
|
||||||
r"create table mt (w text, k text, v int)",
|
r"create table mt (w text, k text, v int)",
|
||||||
r"create index mt_w on mt(w)",
|
r"create index mt_w on mt(w)",
|
||||||
r"create index mt_k on mt(k)",
|
r"create index mt_k on mt(k)",
|
||||||
r"create index mt_v on mt(v)",
|
r"create index mt_v on mt(v)",
|
||||||
r"create table kv (k text, v int)",
|
r"create table kv (k text, v int)",
|
||||||
r"insert into kv values ('sver', 3)",
|
r"insert into kv values ('sver', {})".format(DB_VER),
|
||||||
]:
|
]:
|
||||||
cur.execute(cmd)
|
cur.execute(cmd)
|
||||||
|
|
||||||
|
cur.connection.commit()
|
||||||
|
self.log("created DB at {}".format(db_path))
|
||||||
return cur
|
return cur
|
||||||
|
|
||||||
def _upgrade_v1(self, odb, db_path):
|
|
||||||
npath = db_path + ".next"
|
|
||||||
if os.path.exists(npath):
|
|
||||||
os.unlink(npath)
|
|
||||||
|
|
||||||
ndb = self._orz(npath)
|
|
||||||
self._create_v2(ndb)
|
|
||||||
|
|
||||||
c = odb.execute("select * from up")
|
|
||||||
for wark, ts, sz, rp in c:
|
|
||||||
rd, fn = rp.rsplit("/", 1) if "/" in rp else ["", rp]
|
|
||||||
v = (wark, ts, sz, rd, fn)
|
|
||||||
ndb.execute("insert into up values (?,?,?,?,?)", v)
|
|
||||||
|
|
||||||
ndb.connection.commit()
|
|
||||||
ndb.connection.close()
|
|
||||||
odb.connection.close()
|
|
||||||
atomic_move(npath, db_path)
|
|
||||||
return self._orz(db_path)
|
|
||||||
|
|
||||||
def handle_json(self, cj):
|
def handle_json(self, cj):
|
||||||
with self.mutex:
|
with self.mutex:
|
||||||
if not self.register_vpath(cj["ptop"], cj["vcfg"]):
|
if not self.register_vpath(cj["ptop"], cj["vcfg"]):
|
||||||
if cj["ptop"] not in self.registry:
|
if cj["ptop"] not in self.registry:
|
||||||
raise Pebkac(410, "location unavailable")
|
raise Pebkac(410, "location unavailable")
|
||||||
|
|
||||||
cj["name"] = sanitize_fn(cj["name"], bad=[".prologue.html", ".epilogue.html"])
|
cj["name"] = sanitize_fn(cj["name"], "", [".prologue.html", ".epilogue.html"])
|
||||||
cj["poke"] = time.time()
|
cj["poke"] = time.time()
|
||||||
wark = self._get_wark(cj)
|
wark = self._get_wark(cj)
|
||||||
now = time.time()
|
now = time.time()
|
||||||
@@ -1068,7 +1043,8 @@ class Up2k(object):
|
|||||||
break
|
break
|
||||||
except:
|
except:
|
||||||
# missing; restart
|
# missing; restart
|
||||||
job = None
|
if not self.args.nw:
|
||||||
|
job = None
|
||||||
break
|
break
|
||||||
else:
|
else:
|
||||||
# file contents match, but not the path
|
# file contents match, but not the path
|
||||||
@@ -1095,8 +1071,9 @@ class Up2k(object):
|
|||||||
pdir = os.path.join(cj["ptop"], cj["prel"])
|
pdir = os.path.join(cj["ptop"], cj["prel"])
|
||||||
job["name"] = self._untaken(pdir, cj["name"], now, cj["addr"])
|
job["name"] = self._untaken(pdir, cj["name"], now, cj["addr"])
|
||||||
dst = os.path.join(job["ptop"], job["prel"], job["name"])
|
dst = os.path.join(job["ptop"], job["prel"], job["name"])
|
||||||
os.unlink(fsenc(dst)) # TODO ed pls
|
if not self.args.nw:
|
||||||
self._symlink(src, dst)
|
os.unlink(fsenc(dst)) # TODO ed pls
|
||||||
|
self._symlink(src, dst)
|
||||||
|
|
||||||
if not job:
|
if not job:
|
||||||
job = {
|
job = {
|
||||||
@@ -1138,6 +1115,9 @@ class Up2k(object):
|
|||||||
}
|
}
|
||||||
|
|
||||||
def _untaken(self, fdir, fname, ts, ip):
|
def _untaken(self, fdir, fname, ts, ip):
|
||||||
|
if self.args.nw:
|
||||||
|
return fname
|
||||||
|
|
||||||
# TODO broker which avoid this race and
|
# TODO broker which avoid this race and
|
||||||
# provides a new filename if taken (same as bup)
|
# provides a new filename if taken (same as bup)
|
||||||
suffix = ".{:.6f}-{}".format(ts, ip)
|
suffix = ".{:.6f}-{}".format(ts, ip)
|
||||||
@@ -1147,6 +1127,9 @@ class Up2k(object):
|
|||||||
def _symlink(self, src, dst):
|
def _symlink(self, src, dst):
|
||||||
# TODO store this in linktab so we never delete src if there are links to it
|
# TODO store this in linktab so we never delete src if there are links to it
|
||||||
self.log("linking dupe:\n {0}\n {1}".format(src, dst))
|
self.log("linking dupe:\n {0}\n {1}".format(src, dst))
|
||||||
|
if self.args.nw:
|
||||||
|
return
|
||||||
|
|
||||||
try:
|
try:
|
||||||
lsrc = src
|
lsrc = src
|
||||||
ldst = dst
|
ldst = dst
|
||||||
@@ -1224,6 +1207,10 @@ class Up2k(object):
|
|||||||
if ret > 0:
|
if ret > 0:
|
||||||
return ret, src
|
return ret, src
|
||||||
|
|
||||||
|
if self.args.nw:
|
||||||
|
# del self.registry[ptop][wark]
|
||||||
|
return ret, dst
|
||||||
|
|
||||||
atomic_move(src, dst)
|
atomic_move(src, dst)
|
||||||
|
|
||||||
if ANYWIN:
|
if ANYWIN:
|
||||||
@@ -1316,9 +1303,9 @@ class Up2k(object):
|
|||||||
hashobj.update(buf)
|
hashobj.update(buf)
|
||||||
rem -= len(buf)
|
rem -= len(buf)
|
||||||
|
|
||||||
digest = hashobj.digest()[:32]
|
digest = hashobj.digest()[:33]
|
||||||
digest = base64.urlsafe_b64encode(digest)
|
digest = base64.urlsafe_b64encode(digest)
|
||||||
ret.append(digest.decode("utf-8").rstrip("="))
|
ret.append(digest.decode("utf-8"))
|
||||||
|
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
@@ -1333,6 +1320,10 @@ class Up2k(object):
|
|||||||
if self.args.dotpart:
|
if self.args.dotpart:
|
||||||
tnam = "." + tnam
|
tnam = "." + tnam
|
||||||
|
|
||||||
|
if self.args.nw:
|
||||||
|
job["tnam"] = tnam
|
||||||
|
return
|
||||||
|
|
||||||
suffix = ".{:.6f}-{}".format(job["t0"], job["addr"])
|
suffix = ".{:.6f}-{}".format(job["t0"], job["addr"])
|
||||||
with ren_open(tnam, "wb", fdir=pdir, suffix=suffix) as f:
|
with ren_open(tnam, "wb", fdir=pdir, suffix=suffix) as f:
|
||||||
f, job["tnam"] = f["orz"]
|
f, job["tnam"] = f["orz"]
|
||||||
@@ -1372,19 +1363,22 @@ class Up2k(object):
|
|||||||
self.log("could not unsparse [{}]".format(path), 3)
|
self.log("could not unsparse [{}]".format(path), 3)
|
||||||
|
|
||||||
def _snapshot(self):
|
def _snapshot(self):
|
||||||
persist_interval = 30 # persist unfinished uploads index every 30 sec
|
self.snap_persist_interval = 300 # persist unfinished index every 5 min
|
||||||
discard_interval = 21600 # drop unfinished uploads after 6 hours inactivity
|
self.snap_discard_interval = 21600 # drop unfinished after 6 hours inactivity
|
||||||
prev = {}
|
self.snap_prev = {}
|
||||||
while True:
|
while True:
|
||||||
time.sleep(persist_interval)
|
time.sleep(self.snap_persist_interval)
|
||||||
with self.mutex:
|
self.do_snapshot()
|
||||||
for k, reg in self.registry.items():
|
|
||||||
self._snap_reg(prev, k, reg, discard_interval)
|
|
||||||
|
|
||||||
def _snap_reg(self, prev, ptop, reg, discard_interval):
|
def do_snapshot(self):
|
||||||
|
with self.mutex:
|
||||||
|
for k, reg in self.registry.items():
|
||||||
|
self._snap_reg(k, reg)
|
||||||
|
|
||||||
|
def _snap_reg(self, ptop, reg):
|
||||||
now = time.time()
|
now = time.time()
|
||||||
histpath = self.asrv.vfs.histtab[ptop]
|
histpath = self.asrv.vfs.histtab[ptop]
|
||||||
rm = [x for x in reg.values() if now - x["poke"] > discard_interval]
|
rm = [x for x in reg.values() if now - x["poke"] > self.snap_discard_interval]
|
||||||
if rm:
|
if rm:
|
||||||
m = "dropping {} abandoned uploads in {}".format(len(rm), ptop)
|
m = "dropping {} abandoned uploads in {}".format(len(rm), ptop)
|
||||||
vis = [self._vis_job_progress(x) for x in rm]
|
vis = [self._vis_job_progress(x) for x in rm]
|
||||||
@@ -1406,15 +1400,15 @@ class Up2k(object):
|
|||||||
|
|
||||||
path = os.path.join(histpath, "up2k.snap")
|
path = os.path.join(histpath, "up2k.snap")
|
||||||
if not reg:
|
if not reg:
|
||||||
if ptop not in prev or prev[ptop] is not None:
|
if ptop not in self.snap_prev or self.snap_prev[ptop] is not None:
|
||||||
prev[ptop] = None
|
self.snap_prev[ptop] = None
|
||||||
if os.path.exists(fsenc(path)):
|
if os.path.exists(fsenc(path)):
|
||||||
os.unlink(fsenc(path))
|
os.unlink(fsenc(path))
|
||||||
return
|
return
|
||||||
|
|
||||||
newest = max(x["poke"] for _, x in reg.items()) if reg else 0
|
newest = max(x["poke"] for _, x in reg.items()) if reg else 0
|
||||||
etag = [len(reg), newest]
|
etag = [len(reg), newest]
|
||||||
if etag == prev.get(ptop):
|
if etag == self.snap_prev.get(ptop):
|
||||||
return
|
return
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@@ -1430,7 +1424,7 @@ class Up2k(object):
|
|||||||
atomic_move(path2, path)
|
atomic_move(path2, path)
|
||||||
|
|
||||||
self.log("snap: {} |{}|".format(path, len(reg.keys())))
|
self.log("snap: {} |{}|".format(path, len(reg.keys())))
|
||||||
prev[ptop] = etag
|
self.snap_prev[ptop] = etag
|
||||||
|
|
||||||
def _tagger(self):
|
def _tagger(self):
|
||||||
with self.mutex:
|
with self.mutex:
|
||||||
@@ -1498,6 +1492,11 @@ class Up2k(object):
|
|||||||
self.n_hashq += 1
|
self.n_hashq += 1
|
||||||
# self.log("hashq {} push {}/{}/{}".format(self.n_hashq, ptop, rd, fn))
|
# self.log("hashq {} push {}/{}/{}".format(self.n_hashq, ptop, rd, fn))
|
||||||
|
|
||||||
|
def shutdown(self):
|
||||||
|
if hasattr(self, "snap_prev"):
|
||||||
|
self.log("writing snapshot")
|
||||||
|
self.do_snapshot()
|
||||||
|
|
||||||
|
|
||||||
def up2k_chunksize(filesize):
|
def up2k_chunksize(filesize):
|
||||||
chunksize = 1024 * 1024
|
chunksize = 1024 * 1024
|
||||||
@@ -1513,17 +1512,17 @@ def up2k_chunksize(filesize):
|
|||||||
|
|
||||||
|
|
||||||
def up2k_wark_from_hashlist(salt, filesize, hashes):
|
def up2k_wark_from_hashlist(salt, filesize, hashes):
|
||||||
""" server-reproducible file identifier, independent of name or location """
|
"""server-reproducible file identifier, independent of name or location"""
|
||||||
ident = [salt, str(filesize)]
|
ident = [salt, str(filesize)]
|
||||||
ident.extend(hashes)
|
ident.extend(hashes)
|
||||||
ident = "\n".join(ident)
|
ident = "\n".join(ident)
|
||||||
|
|
||||||
wark = hashlib.sha512(ident.encode("utf-8")).digest()
|
wark = hashlib.sha512(ident.encode("utf-8")).digest()[:33]
|
||||||
wark = base64.urlsafe_b64encode(wark)
|
wark = base64.urlsafe_b64encode(wark)
|
||||||
return wark.decode("ascii")[:43]
|
return wark.decode("ascii")
|
||||||
|
|
||||||
|
|
||||||
def up2k_wark_from_metadata(salt, sz, lastmod, rd, fn):
|
def up2k_wark_from_metadata(salt, sz, lastmod, rd, fn):
|
||||||
ret = fsenc("{}\n{}\n{}\n{}\n{}".format(salt, lastmod, sz, rd, fn))
|
ret = fsenc("{}\n{}\n{}\n{}\n{}".format(salt, lastmod, sz, rd, fn))
|
||||||
ret = base64.urlsafe_b64encode(hashlib.sha512(ret).digest())
|
ret = base64.urlsafe_b64encode(hashlib.sha512(ret).digest())
|
||||||
return "#{}".format(ret[:42].decode("ascii"))
|
return "#{}".format(ret.decode("ascii"))[:44]
|
||||||
|
|||||||
@@ -16,6 +16,7 @@ import mimetypes
|
|||||||
import contextlib
|
import contextlib
|
||||||
import subprocess as sp # nosec
|
import subprocess as sp # nosec
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
|
from collections import Counter
|
||||||
|
|
||||||
from .__init__ import PY2, WINDOWS, ANYWIN
|
from .__init__ import PY2, WINDOWS, ANYWIN
|
||||||
from .stolen import surrogateescape
|
from .stolen import surrogateescape
|
||||||
@@ -42,6 +43,20 @@ else:
|
|||||||
from Queue import Queue # pylint: disable=import-error,no-name-in-module
|
from Queue import Queue # pylint: disable=import-error,no-name-in-module
|
||||||
from StringIO import StringIO as BytesIO
|
from StringIO import StringIO as BytesIO
|
||||||
|
|
||||||
|
|
||||||
|
try:
|
||||||
|
struct.unpack(b">i", b"idgi")
|
||||||
|
spack = struct.pack
|
||||||
|
sunpack = struct.unpack
|
||||||
|
except:
|
||||||
|
|
||||||
|
def spack(f, *a, **ka):
|
||||||
|
return struct.pack(f.decode("ascii"), *a, **ka)
|
||||||
|
|
||||||
|
def sunpack(f, *a, **ka):
|
||||||
|
return struct.unpack(f.decode("ascii"), *a, **ka)
|
||||||
|
|
||||||
|
|
||||||
surrogateescape.register_surrogateescape()
|
surrogateescape.register_surrogateescape()
|
||||||
FS_ENCODING = sys.getfilesystemencoding()
|
FS_ENCODING = sys.getfilesystemencoding()
|
||||||
if WINDOWS and PY2:
|
if WINDOWS and PY2:
|
||||||
@@ -123,20 +138,6 @@ REKOBO_KEY = {
|
|||||||
REKOBO_LKEY = {k.lower(): v for k, v in REKOBO_KEY.items()}
|
REKOBO_LKEY = {k.lower(): v for k, v in REKOBO_KEY.items()}
|
||||||
|
|
||||||
|
|
||||||
class Counter(object):
|
|
||||||
def __init__(self, v=0):
|
|
||||||
self.v = v
|
|
||||||
self.mutex = threading.Lock()
|
|
||||||
|
|
||||||
def add(self, delta=1):
|
|
||||||
with self.mutex:
|
|
||||||
self.v += delta
|
|
||||||
|
|
||||||
def set(self, absval):
|
|
||||||
with self.mutex:
|
|
||||||
self.v = absval
|
|
||||||
|
|
||||||
|
|
||||||
class Cooldown(object):
|
class Cooldown(object):
|
||||||
def __init__(self, maxage):
|
def __init__(self, maxage):
|
||||||
self.maxage = maxage
|
self.maxage = maxage
|
||||||
@@ -231,7 +232,7 @@ def nuprint(msg):
|
|||||||
|
|
||||||
def rice_tid():
|
def rice_tid():
|
||||||
tid = threading.current_thread().ident
|
tid = threading.current_thread().ident
|
||||||
c = struct.unpack(b"B" * 5, struct.pack(b">Q", tid)[-5:])
|
c = sunpack(b"B" * 5, spack(b">Q", tid)[-5:])
|
||||||
return "".join("\033[1;37;48;5;{}m{:02x}".format(x, x) for x in c) + "\033[0m"
|
return "".join("\033[1;37;48;5;{}m{:02x}".format(x, x) for x in c) + "\033[0m"
|
||||||
|
|
||||||
|
|
||||||
@@ -282,15 +283,69 @@ def alltrace():
|
|||||||
return "\n".join(rret + bret)
|
return "\n".join(rret + bret)
|
||||||
|
|
||||||
|
|
||||||
|
def start_stackmon(arg_str, nid):
|
||||||
|
suffix = "-{}".format(nid) if nid else ""
|
||||||
|
fp, f = arg_str.rsplit(",", 1)
|
||||||
|
f = int(f)
|
||||||
|
t = threading.Thread(
|
||||||
|
target=stackmon,
|
||||||
|
args=(fp, f, suffix),
|
||||||
|
name="stackmon" + suffix,
|
||||||
|
)
|
||||||
|
t.daemon = True
|
||||||
|
t.start()
|
||||||
|
|
||||||
|
|
||||||
|
def stackmon(fp, ival, suffix):
|
||||||
|
ctr = 0
|
||||||
|
while True:
|
||||||
|
ctr += 1
|
||||||
|
time.sleep(ival)
|
||||||
|
st = "{}, {}\n{}".format(ctr, time.time(), alltrace())
|
||||||
|
with open(fp + suffix, "wb") as f:
|
||||||
|
f.write(st.encode("utf-8", "replace"))
|
||||||
|
|
||||||
|
|
||||||
|
def start_log_thrs(logger, ival, nid):
|
||||||
|
ival = int(ival)
|
||||||
|
tname = lname = "log-thrs"
|
||||||
|
if nid:
|
||||||
|
tname = "logthr-n{}-i{:x}".format(nid, os.getpid())
|
||||||
|
lname = tname[3:]
|
||||||
|
|
||||||
|
t = threading.Thread(
|
||||||
|
target=log_thrs,
|
||||||
|
args=(logger, ival, lname),
|
||||||
|
name=tname,
|
||||||
|
)
|
||||||
|
t.daemon = True
|
||||||
|
t.start()
|
||||||
|
|
||||||
|
|
||||||
|
def log_thrs(log, ival, name):
|
||||||
|
while True:
|
||||||
|
time.sleep(ival)
|
||||||
|
tv = [x.name for x in threading.enumerate()]
|
||||||
|
tv = [
|
||||||
|
x.split("-")[0]
|
||||||
|
if x.startswith("httpconn-") or x.startswith("thumb-")
|
||||||
|
else "listen"
|
||||||
|
if "-listen-" in x
|
||||||
|
else x
|
||||||
|
for x in tv
|
||||||
|
if not x.startswith("pydevd.")
|
||||||
|
]
|
||||||
|
tv = ["{}\033[36m{}".format(v, k) for k, v in sorted(Counter(tv).items())]
|
||||||
|
log(name, "\033[0m \033[33m".join(tv), 3)
|
||||||
|
|
||||||
|
|
||||||
def min_ex():
|
def min_ex():
|
||||||
et, ev, tb = sys.exc_info()
|
et, ev, tb = sys.exc_info()
|
||||||
tb = traceback.extract_tb(tb, 2)
|
tb = traceback.extract_tb(tb)
|
||||||
ex = [
|
fmt = "{} @ {} <{}>: {}"
|
||||||
"{} @ {} <{}>: {}".format(fp.split(os.sep)[-1], ln, fun, txt)
|
ex = [fmt.format(fp.split(os.sep)[-1], ln, fun, txt) for fp, ln, fun, txt in tb]
|
||||||
for fp, ln, fun, txt in tb
|
ex.append("[{}] {}".format(et.__name__, ev))
|
||||||
]
|
return "\n".join(ex[-8:])
|
||||||
ex.append("{}: {}".format(et.__name__, ev))
|
|
||||||
return "\n".join(ex)
|
|
||||||
|
|
||||||
|
|
||||||
@contextlib.contextmanager
|
@contextlib.contextmanager
|
||||||
@@ -351,7 +406,7 @@ def ren_open(fname, *args, **kwargs):
|
|||||||
if not b64:
|
if not b64:
|
||||||
b64 = (bname + ext).encode("utf-8", "replace")
|
b64 = (bname + ext).encode("utf-8", "replace")
|
||||||
b64 = hashlib.sha512(b64).digest()[:12]
|
b64 = hashlib.sha512(b64).digest()[:12]
|
||||||
b64 = base64.urlsafe_b64encode(b64).decode("utf-8").rstrip("=")
|
b64 = base64.urlsafe_b64encode(b64).decode("utf-8")
|
||||||
|
|
||||||
badlen = len(fname)
|
badlen = len(fname)
|
||||||
while len(fname) >= badlen:
|
while len(fname) >= badlen:
|
||||||
@@ -648,6 +703,16 @@ def s2hms(s, optional_h=False):
|
|||||||
return "{}:{:02}:{:02}".format(h, m, s)
|
return "{}:{:02}:{:02}".format(h, m, s)
|
||||||
|
|
||||||
|
|
||||||
|
def uncyg(path):
|
||||||
|
if len(path) < 2 or not path.startswith("/"):
|
||||||
|
return path
|
||||||
|
|
||||||
|
if len(path) > 2 and path[2] != "/":
|
||||||
|
return path
|
||||||
|
|
||||||
|
return "{}:\\{}".format(path[1], path[3:])
|
||||||
|
|
||||||
|
|
||||||
def undot(path):
|
def undot(path):
|
||||||
ret = []
|
ret = []
|
||||||
for node in path.split("/"):
|
for node in path.split("/"):
|
||||||
@@ -664,7 +729,7 @@ def undot(path):
|
|||||||
return "/".join(ret)
|
return "/".join(ret)
|
||||||
|
|
||||||
|
|
||||||
def sanitize_fn(fn, ok="", bad=[]):
|
def sanitize_fn(fn, ok, bad):
|
||||||
if "/" not in ok:
|
if "/" not in ok:
|
||||||
fn = fn.replace("\\", "/").split("/")[-1]
|
fn = fn.replace("\\", "/").split("/")[-1]
|
||||||
|
|
||||||
@@ -894,35 +959,24 @@ def yieldfile(fn):
|
|||||||
yield buf
|
yield buf
|
||||||
|
|
||||||
|
|
||||||
def hashcopy(actor, fin, fout):
|
def hashcopy(fin, fout):
|
||||||
is_mp = actor.is_mp
|
|
||||||
hashobj = hashlib.sha512()
|
hashobj = hashlib.sha512()
|
||||||
tlen = 0
|
tlen = 0
|
||||||
for buf in fin:
|
for buf in fin:
|
||||||
if is_mp:
|
|
||||||
actor.workload += 1
|
|
||||||
if actor.workload > 2 ** 31:
|
|
||||||
actor.workload = 100
|
|
||||||
|
|
||||||
tlen += len(buf)
|
tlen += len(buf)
|
||||||
hashobj.update(buf)
|
hashobj.update(buf)
|
||||||
fout.write(buf)
|
fout.write(buf)
|
||||||
|
|
||||||
digest32 = hashobj.digest()[:32]
|
digest = hashobj.digest()[:33]
|
||||||
digest_b64 = base64.urlsafe_b64encode(digest32).decode("utf-8").rstrip("=")
|
digest_b64 = base64.urlsafe_b64encode(digest).decode("utf-8")
|
||||||
|
|
||||||
return tlen, hashobj.hexdigest(), digest_b64
|
return tlen, hashobj.hexdigest(), digest_b64
|
||||||
|
|
||||||
|
|
||||||
def sendfile_py(lower, upper, f, s, actor=None):
|
def sendfile_py(lower, upper, f, s):
|
||||||
remains = upper - lower
|
remains = upper - lower
|
||||||
f.seek(lower)
|
f.seek(lower)
|
||||||
while remains > 0:
|
while remains > 0:
|
||||||
if actor:
|
|
||||||
actor.workload += 1
|
|
||||||
if actor.workload > 2 ** 31:
|
|
||||||
actor.workload = 100
|
|
||||||
|
|
||||||
# time.sleep(0.01)
|
# time.sleep(0.01)
|
||||||
buf = f.read(min(1024 * 32, remains))
|
buf = f.read(min(1024 * 32, remains))
|
||||||
if not buf:
|
if not buf:
|
||||||
@@ -969,8 +1023,7 @@ def statdir(logger, scandir, lstat, top):
|
|||||||
try:
|
try:
|
||||||
yield [fsdec(fh.name), fh.stat(follow_symlinks=not lstat)]
|
yield [fsdec(fh.name), fh.stat(follow_symlinks=not lstat)]
|
||||||
except Exception as ex:
|
except Exception as ex:
|
||||||
msg = "scan-stat: \033[36m{} @ {}"
|
logger(src, "[s] {} @ {}".format(repr(ex), fsdec(fh.path)), 6)
|
||||||
logger(msg.format(repr(ex), fsdec(fh.path)))
|
|
||||||
else:
|
else:
|
||||||
src = "listdir"
|
src = "listdir"
|
||||||
fun = os.lstat if lstat else os.stat
|
fun = os.lstat if lstat else os.stat
|
||||||
@@ -979,11 +1032,10 @@ def statdir(logger, scandir, lstat, top):
|
|||||||
try:
|
try:
|
||||||
yield [fsdec(name), fun(abspath)]
|
yield [fsdec(name), fun(abspath)]
|
||||||
except Exception as ex:
|
except Exception as ex:
|
||||||
msg = "list-stat: \033[36m{} @ {}"
|
logger(src, "[s] {} @ {}".format(repr(ex), fsdec(abspath)), 6)
|
||||||
logger(msg.format(repr(ex), fsdec(abspath)))
|
|
||||||
|
|
||||||
except Exception as ex:
|
except Exception as ex:
|
||||||
logger("{}: \033[31m{} @ {}".format(src, repr(ex), top))
|
logger(src, "{} @ {}".format(repr(ex), top), 1)
|
||||||
|
|
||||||
|
|
||||||
def unescape_cookie(orig):
|
def unescape_cookie(orig):
|
||||||
@@ -1020,7 +1072,13 @@ def guess_mime(url, fallback="application/octet-stream"):
|
|||||||
except:
|
except:
|
||||||
return fallback
|
return fallback
|
||||||
|
|
||||||
return MIMES.get(ext) or mimetypes.guess_type(url)[0] or fallback
|
ret = MIMES.get(ext) or mimetypes.guess_type(url)[0] or fallback
|
||||||
|
|
||||||
|
if ";" not in ret:
|
||||||
|
if ret.startswith("text/") or ret.endswith("/javascript"):
|
||||||
|
ret += "; charset=UTF-8"
|
||||||
|
|
||||||
|
return ret
|
||||||
|
|
||||||
|
|
||||||
def runcmd(*argv):
|
def runcmd(*argv):
|
||||||
@@ -1054,10 +1112,7 @@ def gzip_orig_sz(fn):
|
|||||||
with open(fsenc(fn), "rb") as f:
|
with open(fsenc(fn), "rb") as f:
|
||||||
f.seek(-4, 2)
|
f.seek(-4, 2)
|
||||||
rv = f.read(4)
|
rv = f.read(4)
|
||||||
try:
|
return sunpack(b"I", rv)[0]
|
||||||
return struct.unpack(b"I", rv)[0]
|
|
||||||
except:
|
|
||||||
return struct.unpack("I", rv)[0]
|
|
||||||
|
|
||||||
|
|
||||||
def py_desc():
|
def py_desc():
|
||||||
|
|||||||
@@ -13,7 +13,7 @@ window.baguetteBox = (function () {
|
|||||||
captions: true,
|
captions: true,
|
||||||
buttons: 'auto',
|
buttons: 'auto',
|
||||||
noScrollbars: false,
|
noScrollbars: false,
|
||||||
bodyClass: 'baguetteBox-open',
|
bodyClass: 'bbox-open',
|
||||||
titleTag: false,
|
titleTag: false,
|
||||||
async: false,
|
async: false,
|
||||||
preload: 2,
|
preload: 2,
|
||||||
@@ -22,37 +22,46 @@ window.baguetteBox = (function () {
|
|||||||
afterHide: null,
|
afterHide: null,
|
||||||
onChange: null,
|
onChange: null,
|
||||||
},
|
},
|
||||||
overlay, slider, previousButton, nextButton, closeButton,
|
overlay, slider, btnPrev, btnNext, btnHelp, btnVmode, btnClose,
|
||||||
currentGallery = [],
|
currentGallery = [],
|
||||||
currentIndex = 0,
|
currentIndex = 0,
|
||||||
isOverlayVisible = false,
|
isOverlayVisible = false,
|
||||||
touch = {}, // start-pos
|
touch = {}, // start-pos
|
||||||
touchFlag = false, // busy
|
touchFlag = false, // busy
|
||||||
regex = /.+\.(gif|jpe?g|png|webp)/i,
|
re_i = /.+\.(gif|jpe?g|png|webp)(\?|$)/i,
|
||||||
|
re_v = /.+\.(webm|mp4)(\?|$)/i,
|
||||||
data = {}, // all galleries
|
data = {}, // all galleries
|
||||||
imagesElements = [],
|
imagesElements = [],
|
||||||
documentLastFocus = null;
|
documentLastFocus = null,
|
||||||
|
isFullscreen = false,
|
||||||
|
vmute = false,
|
||||||
|
vloop = false,
|
||||||
|
vnext = false,
|
||||||
|
resume_mp = false;
|
||||||
|
|
||||||
var overlayClickHandler = function (event) {
|
var onFSC = function (e) {
|
||||||
if (event.target.id.indexOf('baguette-img') !== -1) {
|
isFullscreen = !!document.fullscreenElement;
|
||||||
|
};
|
||||||
|
|
||||||
|
var overlayClickHandler = function (e) {
|
||||||
|
if (e.target.id.indexOf('baguette-img') !== -1)
|
||||||
hideOverlay();
|
hideOverlay();
|
||||||
}
|
|
||||||
};
|
};
|
||||||
|
|
||||||
var touchstartHandler = function (event) {
|
var touchstartHandler = function (e) {
|
||||||
touch.count++;
|
touch.count++;
|
||||||
if (touch.count > 1) {
|
if (touch.count > 1)
|
||||||
touch.multitouch = true;
|
touch.multitouch = true;
|
||||||
}
|
|
||||||
touch.startX = event.changedTouches[0].pageX;
|
touch.startX = e.changedTouches[0].pageX;
|
||||||
touch.startY = event.changedTouches[0].pageY;
|
touch.startY = e.changedTouches[0].pageY;
|
||||||
};
|
};
|
||||||
var touchmoveHandler = function (event) {
|
var touchmoveHandler = function (e) {
|
||||||
if (touchFlag || touch.multitouch) {
|
if (touchFlag || touch.multitouch)
|
||||||
return;
|
return;
|
||||||
}
|
|
||||||
event.preventDefault ? event.preventDefault() : event.returnValue = false;
|
e.preventDefault ? e.preventDefault() : e.returnValue = false;
|
||||||
var touchEvent = event.touches[0] || event.changedTouches[0];
|
var touchEvent = e.touches[0] || e.changedTouches[0];
|
||||||
if (touchEvent.pageX - touch.startX > 40) {
|
if (touchEvent.pageX - touch.startX > 40) {
|
||||||
touchFlag = true;
|
touchFlag = true;
|
||||||
showPreviousImage();
|
showPreviousImage();
|
||||||
@@ -65,19 +74,19 @@ window.baguetteBox = (function () {
|
|||||||
};
|
};
|
||||||
var touchendHandler = function () {
|
var touchendHandler = function () {
|
||||||
touch.count--;
|
touch.count--;
|
||||||
if (touch.count <= 0) {
|
if (touch.count <= 0)
|
||||||
touch.multitouch = false;
|
touch.multitouch = false;
|
||||||
}
|
|
||||||
touchFlag = false;
|
touchFlag = false;
|
||||||
};
|
};
|
||||||
var contextmenuHandler = function () {
|
var contextmenuHandler = function () {
|
||||||
touchendHandler();
|
touchendHandler();
|
||||||
};
|
};
|
||||||
|
|
||||||
var trapFocusInsideOverlay = function (event) {
|
var trapFocusInsideOverlay = function (e) {
|
||||||
if (overlay.style.display === 'block' && (overlay.contains && !overlay.contains(event.target))) {
|
if (overlay.style.display === 'block' && (overlay.contains && !overlay.contains(e.target))) {
|
||||||
event.stopPropagation();
|
e.stopPropagation();
|
||||||
initFocus();
|
btnClose.focus();
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -88,7 +97,7 @@ window.baguetteBox = (function () {
|
|||||||
}
|
}
|
||||||
|
|
||||||
function bindImageClickListeners(selector, userOptions) {
|
function bindImageClickListeners(selector, userOptions) {
|
||||||
var galleryNodeList = document.querySelectorAll(selector);
|
var galleryNodeList = QSA(selector);
|
||||||
var selectorData = {
|
var selectorData = {
|
||||||
galleries: [],
|
galleries: [],
|
||||||
nodeList: galleryNodeList
|
nodeList: galleryNodeList
|
||||||
@@ -96,33 +105,26 @@ window.baguetteBox = (function () {
|
|||||||
data[selector] = selectorData;
|
data[selector] = selectorData;
|
||||||
|
|
||||||
[].forEach.call(galleryNodeList, function (galleryElement) {
|
[].forEach.call(galleryNodeList, function (galleryElement) {
|
||||||
if (userOptions && userOptions.filter) {
|
|
||||||
regex = userOptions.filter;
|
|
||||||
}
|
|
||||||
|
|
||||||
var tagsNodeList = [];
|
var tagsNodeList = [];
|
||||||
if (galleryElement.tagName === 'A') {
|
if (galleryElement.tagName === 'A')
|
||||||
tagsNodeList = [galleryElement];
|
tagsNodeList = [galleryElement];
|
||||||
} else {
|
else
|
||||||
tagsNodeList = galleryElement.getElementsByTagName('a');
|
tagsNodeList = galleryElement.getElementsByTagName('a');
|
||||||
}
|
|
||||||
|
|
||||||
tagsNodeList = [].filter.call(tagsNodeList, function (element) {
|
tagsNodeList = [].filter.call(tagsNodeList, function (element) {
|
||||||
if (element.className.indexOf(userOptions && userOptions.ignoreClass) === -1) {
|
if (element.className.indexOf(userOptions && userOptions.ignoreClass) === -1)
|
||||||
return regex.test(element.href);
|
return re_i.test(element.href) || re_v.test(element.href);
|
||||||
}
|
|
||||||
});
|
});
|
||||||
if (tagsNodeList.length === 0) {
|
if (!tagsNodeList.length)
|
||||||
return;
|
return;
|
||||||
}
|
|
||||||
|
|
||||||
var gallery = [];
|
var gallery = [];
|
||||||
[].forEach.call(tagsNodeList, function (imageElement, imageIndex) {
|
[].forEach.call(tagsNodeList, function (imageElement, imageIndex) {
|
||||||
var imageElementClickHandler = function (event) {
|
var imageElementClickHandler = function (e) {
|
||||||
if (event && event.ctrlKey)
|
if (ctrl(e))
|
||||||
return true;
|
return true;
|
||||||
|
|
||||||
event.preventDefault ? event.preventDefault() : event.returnValue = false;
|
e.preventDefault ? e.preventDefault() : e.returnValue = false;
|
||||||
prepareOverlay(gallery, userOptions);
|
prepareOverlay(gallery, userOptions);
|
||||||
showOverlay(imageIndex);
|
showOverlay(imageIndex);
|
||||||
};
|
};
|
||||||
@@ -140,93 +142,186 @@ window.baguetteBox = (function () {
|
|||||||
}
|
}
|
||||||
|
|
||||||
function clearCachedData() {
|
function clearCachedData() {
|
||||||
for (var selector in data) {
|
for (var selector in data)
|
||||||
if (data.hasOwnProperty(selector)) {
|
if (data.hasOwnProperty(selector))
|
||||||
removeFromCache(selector);
|
removeFromCache(selector);
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
function removeFromCache(selector) {
|
function removeFromCache(selector) {
|
||||||
if (!data.hasOwnProperty(selector)) {
|
if (!data.hasOwnProperty(selector))
|
||||||
return;
|
return;
|
||||||
}
|
|
||||||
var galleries = data[selector].galleries;
|
var galleries = data[selector].galleries;
|
||||||
[].forEach.call(galleries, function (gallery) {
|
[].forEach.call(galleries, function (gallery) {
|
||||||
[].forEach.call(gallery, function (imageItem) {
|
[].forEach.call(gallery, function (imageItem) {
|
||||||
unbind(imageItem.imageElement, 'click', imageItem.eventHandler);
|
unbind(imageItem.imageElement, 'click', imageItem.eventHandler);
|
||||||
});
|
});
|
||||||
|
|
||||||
if (currentGallery === gallery) {
|
if (currentGallery === gallery)
|
||||||
currentGallery = [];
|
currentGallery = [];
|
||||||
}
|
|
||||||
});
|
});
|
||||||
|
|
||||||
delete data[selector];
|
delete data[selector];
|
||||||
}
|
}
|
||||||
|
|
||||||
function buildOverlay() {
|
function buildOverlay() {
|
||||||
overlay = ebi('baguetteBox-overlay');
|
overlay = ebi('bbox-overlay');
|
||||||
if (overlay) {
|
if (!overlay) {
|
||||||
slider = ebi('baguetteBox-slider');
|
var ctr = mknod('div');
|
||||||
previousButton = ebi('previous-button');
|
ctr.innerHTML = (
|
||||||
nextButton = ebi('next-button');
|
'<div id="bbox-overlay" role="dialog">' +
|
||||||
closeButton = ebi('close-button');
|
'<div id="bbox-slider"></div>' +
|
||||||
return;
|
'<button id="bbox-prev" class="bbox-btn" type="button" aria-label="Previous"><</button>' +
|
||||||
|
'<button id="bbox-next" class="bbox-btn" type="button" aria-label="Next">></button>' +
|
||||||
|
'<div id="bbox-btns">' +
|
||||||
|
'<button id="bbox-help" type="button">?</button>' +
|
||||||
|
'<button id="bbox-vmode" type="button" tt="a"></button>' +
|
||||||
|
'<button id="bbox-close" type="button" aria-label="Close">X</button>' +
|
||||||
|
'</div></div>'
|
||||||
|
);
|
||||||
|
overlay = ctr.firstChild;
|
||||||
|
QS('body').appendChild(overlay);
|
||||||
|
tt.att(overlay);
|
||||||
}
|
}
|
||||||
overlay = mknod('div');
|
slider = ebi('bbox-slider');
|
||||||
overlay.setAttribute('role', 'dialog');
|
btnPrev = ebi('bbox-prev');
|
||||||
overlay.id = 'baguetteBox-overlay';
|
btnNext = ebi('bbox-next');
|
||||||
document.getElementsByTagName('body')[0].appendChild(overlay);
|
btnHelp = ebi('bbox-help');
|
||||||
|
btnVmode = ebi('bbox-vmode');
|
||||||
slider = mknod('div');
|
btnClose = ebi('bbox-close');
|
||||||
slider.id = 'baguetteBox-slider';
|
|
||||||
overlay.appendChild(slider);
|
|
||||||
|
|
||||||
previousButton = mknod('button');
|
|
||||||
previousButton.setAttribute('type', 'button');
|
|
||||||
previousButton.id = 'previous-button';
|
|
||||||
previousButton.setAttribute('aria-label', 'Previous');
|
|
||||||
previousButton.innerHTML = '<';
|
|
||||||
overlay.appendChild(previousButton);
|
|
||||||
|
|
||||||
nextButton = mknod('button');
|
|
||||||
nextButton.setAttribute('type', 'button');
|
|
||||||
nextButton.id = 'next-button';
|
|
||||||
nextButton.setAttribute('aria-label', 'Next');
|
|
||||||
nextButton.innerHTML = '>';
|
|
||||||
overlay.appendChild(nextButton);
|
|
||||||
|
|
||||||
closeButton = mknod('button');
|
|
||||||
closeButton.setAttribute('type', 'button');
|
|
||||||
closeButton.id = 'close-button';
|
|
||||||
closeButton.setAttribute('aria-label', 'Close');
|
|
||||||
closeButton.innerHTML = '×';
|
|
||||||
overlay.appendChild(closeButton);
|
|
||||||
|
|
||||||
previousButton.className = nextButton.className = closeButton.className = 'baguetteBox-button';
|
|
||||||
|
|
||||||
bindEvents();
|
bindEvents();
|
||||||
}
|
}
|
||||||
|
|
||||||
function keyDownHandler(event) {
|
function halp() {
|
||||||
switch (event.keyCode) {
|
if (ebi('bbox-halp'))
|
||||||
case 37: // Left
|
return;
|
||||||
showPreviousImage();
|
|
||||||
break;
|
var list = [
|
||||||
case 39: // Right
|
['<b># hotkey</b>', '<b># operation</b>'],
|
||||||
showNextImage();
|
['escape', 'close'],
|
||||||
break;
|
['left, J', 'previous file'],
|
||||||
case 27: // Esc
|
['right, L', 'next file'],
|
||||||
hideOverlay();
|
['home', 'first file'],
|
||||||
break;
|
['end', 'last file'],
|
||||||
case 36: // Home
|
['space, P, K', 'video: play / pause'],
|
||||||
showFirstImage(event);
|
['U', 'video: seek 10sec back'],
|
||||||
break;
|
['P', 'video: seek 10sec ahead'],
|
||||||
case 35: // End
|
['M', 'video: toggle mute'],
|
||||||
showLastImage(event);
|
['R', 'video: toggle loop'],
|
||||||
break;
|
['C', 'video: toggle auto-next'],
|
||||||
|
['F', 'video: toggle fullscreen'],
|
||||||
|
],
|
||||||
|
d = mknod('table'),
|
||||||
|
html = ['<tbody>'];
|
||||||
|
|
||||||
|
for (var a = 0; a < list.length; a++)
|
||||||
|
html.push('<tr><td>' + list[a][0] + '</td><td>' + list[a][1] + '</td></tr>');
|
||||||
|
|
||||||
|
d.innerHTML = html.join('\n') + '</tbody>';
|
||||||
|
d.setAttribute('id', 'bbox-halp');
|
||||||
|
d.onclick = function () {
|
||||||
|
overlay.removeChild(d);
|
||||||
|
};
|
||||||
|
overlay.appendChild(d);
|
||||||
|
}
|
||||||
|
|
||||||
|
function keyDownHandler(e) {
|
||||||
|
if (e.ctrlKey || e.altKey || e.metaKey || e.isComposing)
|
||||||
|
return;
|
||||||
|
|
||||||
|
var k = e.code + '', v = vid();
|
||||||
|
|
||||||
|
if (k == "ArrowLeft" || k == "KeyJ")
|
||||||
|
showPreviousImage();
|
||||||
|
else if (k == "ArrowRight" || k == "KeyL")
|
||||||
|
showNextImage();
|
||||||
|
else if (k == "Escape")
|
||||||
|
hideOverlay();
|
||||||
|
else if (k == "Home")
|
||||||
|
showFirstImage(e);
|
||||||
|
else if (k == "End")
|
||||||
|
showLastImage(e);
|
||||||
|
else if (k == "Space" || k == "KeyP" || k == "KeyK")
|
||||||
|
playpause();
|
||||||
|
else if (k == "KeyU" || k == "KeyO")
|
||||||
|
relseek(k == "KeyU" ? -10 : 10);
|
||||||
|
else if (k == "KeyM" && v) {
|
||||||
|
v.muted = vmute = !vmute;
|
||||||
|
mp_ctl();
|
||||||
}
|
}
|
||||||
|
else if (k == "KeyR" && v) {
|
||||||
|
vloop = !vloop;
|
||||||
|
vnext = vnext && !vloop;
|
||||||
|
setVmode();
|
||||||
|
}
|
||||||
|
else if (k == "KeyC" && v) {
|
||||||
|
vnext = !vnext;
|
||||||
|
vloop = vloop && !vnext;
|
||||||
|
setVmode();
|
||||||
|
}
|
||||||
|
else if (k == "KeyF")
|
||||||
|
try {
|
||||||
|
if (isFullscreen)
|
||||||
|
document.exitFullscreen();
|
||||||
|
else
|
||||||
|
v.requestFullscreen();
|
||||||
|
}
|
||||||
|
catch (ex) { }
|
||||||
|
}
|
||||||
|
|
||||||
|
function setVmode() {
|
||||||
|
var v = vid();
|
||||||
|
ebi('bbox-vmode').style.display = v ? '' : 'none';
|
||||||
|
if (!v)
|
||||||
|
return;
|
||||||
|
|
||||||
|
var msg = 'When video ends, ', tts = '', lbl;
|
||||||
|
if (vloop) {
|
||||||
|
lbl = 'Loop';
|
||||||
|
msg += 'repeat it';
|
||||||
|
tts = '$NHotkey: R';
|
||||||
|
}
|
||||||
|
else if (vnext) {
|
||||||
|
lbl = 'Cont';
|
||||||
|
msg += 'continue to next';
|
||||||
|
tts = '$NHotkey: C';
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
lbl = 'Stop';
|
||||||
|
msg += 'just stop'
|
||||||
|
}
|
||||||
|
btnVmode.setAttribute('aria-label', msg);
|
||||||
|
btnVmode.setAttribute('tt', msg + tts);
|
||||||
|
btnVmode.textContent = lbl;
|
||||||
|
|
||||||
|
v.loop = vloop
|
||||||
|
if (vloop && v.paused)
|
||||||
|
v.play();
|
||||||
|
}
|
||||||
|
|
||||||
|
function tglVmode() {
|
||||||
|
if (vloop) {
|
||||||
|
vnext = true;
|
||||||
|
vloop = false;
|
||||||
|
}
|
||||||
|
else if (vnext)
|
||||||
|
vnext = false;
|
||||||
|
else
|
||||||
|
vloop = true;
|
||||||
|
|
||||||
|
setVmode();
|
||||||
|
if (tt.en)
|
||||||
|
tt.show.bind(this)();
|
||||||
|
}
|
||||||
|
|
||||||
|
function keyUpHandler(e) {
|
||||||
|
if (e.ctrlKey || e.altKey || e.metaKey || e.isComposing)
|
||||||
|
return;
|
||||||
|
|
||||||
|
var k = e.code + '';
|
||||||
|
|
||||||
|
if (k == "Space")
|
||||||
|
ev(e);
|
||||||
}
|
}
|
||||||
|
|
||||||
var passiveSupp = false;
|
var passiveSupp = false;
|
||||||
@@ -248,9 +343,11 @@ window.baguetteBox = (function () {
|
|||||||
|
|
||||||
function bindEvents() {
|
function bindEvents() {
|
||||||
bind(overlay, 'click', overlayClickHandler);
|
bind(overlay, 'click', overlayClickHandler);
|
||||||
bind(previousButton, 'click', showPreviousImage);
|
bind(btnPrev, 'click', showPreviousImage);
|
||||||
bind(nextButton, 'click', showNextImage);
|
bind(btnNext, 'click', showNextImage);
|
||||||
bind(closeButton, 'click', hideOverlay);
|
bind(btnClose, 'click', hideOverlay);
|
||||||
|
bind(btnVmode, 'click', tglVmode);
|
||||||
|
bind(btnHelp, 'click', halp);
|
||||||
bind(slider, 'contextmenu', contextmenuHandler);
|
bind(slider, 'contextmenu', contextmenuHandler);
|
||||||
bind(overlay, 'touchstart', touchstartHandler, nonPassiveEvent);
|
bind(overlay, 'touchstart', touchstartHandler, nonPassiveEvent);
|
||||||
bind(overlay, 'touchmove', touchmoveHandler, passiveEvent);
|
bind(overlay, 'touchmove', touchmoveHandler, passiveEvent);
|
||||||
@@ -260,9 +357,11 @@ window.baguetteBox = (function () {
|
|||||||
|
|
||||||
function unbindEvents() {
|
function unbindEvents() {
|
||||||
unbind(overlay, 'click', overlayClickHandler);
|
unbind(overlay, 'click', overlayClickHandler);
|
||||||
unbind(previousButton, 'click', showPreviousImage);
|
unbind(btnPrev, 'click', showPreviousImage);
|
||||||
unbind(nextButton, 'click', showNextImage);
|
unbind(btnNext, 'click', showNextImage);
|
||||||
unbind(closeButton, 'click', hideOverlay);
|
unbind(btnClose, 'click', hideOverlay);
|
||||||
|
unbind(btnVmode, 'click', tglVmode);
|
||||||
|
unbind(btnHelp, 'click', halp);
|
||||||
unbind(slider, 'contextmenu', contextmenuHandler);
|
unbind(slider, 'contextmenu', contextmenuHandler);
|
||||||
unbind(overlay, 'touchstart', touchstartHandler, nonPassiveEvent);
|
unbind(overlay, 'touchstart', touchstartHandler, nonPassiveEvent);
|
||||||
unbind(overlay, 'touchmove', touchmoveHandler, passiveEvent);
|
unbind(overlay, 'touchmove', touchmoveHandler, passiveEvent);
|
||||||
@@ -271,9 +370,9 @@ window.baguetteBox = (function () {
|
|||||||
}
|
}
|
||||||
|
|
||||||
function prepareOverlay(gallery, userOptions) {
|
function prepareOverlay(gallery, userOptions) {
|
||||||
if (currentGallery === gallery) {
|
if (currentGallery === gallery)
|
||||||
return;
|
return;
|
||||||
}
|
|
||||||
currentGallery = gallery;
|
currentGallery = gallery;
|
||||||
setOptions(userOptions);
|
setOptions(userOptions);
|
||||||
slider.innerHTML = '';
|
slider.innerHTML = '';
|
||||||
@@ -287,8 +386,8 @@ window.baguetteBox = (function () {
|
|||||||
fullImage.id = 'baguette-img-' + i;
|
fullImage.id = 'baguette-img-' + i;
|
||||||
imagesElements.push(fullImage);
|
imagesElements.push(fullImage);
|
||||||
|
|
||||||
imagesFiguresIds.push('baguetteBox-figure-' + i);
|
imagesFiguresIds.push('bbox-figure-' + i);
|
||||||
imagesCaptionsIds.push('baguetteBox-figcaption-' + i);
|
imagesCaptionsIds.push('bbox-figcaption-' + i);
|
||||||
slider.appendChild(imagesElements[i]);
|
slider.appendChild(imagesElements[i]);
|
||||||
}
|
}
|
||||||
overlay.setAttribute('aria-labelledby', imagesFiguresIds.join(' '));
|
overlay.setAttribute('aria-labelledby', imagesFiguresIds.join(' '));
|
||||||
@@ -296,23 +395,21 @@ window.baguetteBox = (function () {
|
|||||||
}
|
}
|
||||||
|
|
||||||
function setOptions(newOptions) {
|
function setOptions(newOptions) {
|
||||||
if (!newOptions) {
|
if (!newOptions)
|
||||||
newOptions = {};
|
newOptions = {};
|
||||||
}
|
|
||||||
for (var item in defaults) {
|
for (var item in defaults) {
|
||||||
options[item] = defaults[item];
|
options[item] = defaults[item];
|
||||||
if (typeof newOptions[item] !== 'undefined') {
|
if (typeof newOptions[item] !== 'undefined')
|
||||||
options[item] = newOptions[item];
|
options[item] = newOptions[item];
|
||||||
}
|
|
||||||
}
|
}
|
||||||
slider.style.transition = (options.animation === 'fadeIn' ? 'opacity .4s ease' :
|
slider.style.transition = (options.animation === 'fadeIn' ? 'opacity .4s ease' :
|
||||||
options.animation === 'slideIn' ? '' : 'none');
|
options.animation === 'slideIn' ? '' : 'none');
|
||||||
|
|
||||||
if (options.buttons === 'auto' && ('ontouchstart' in window || currentGallery.length === 1)) {
|
if (options.buttons === 'auto' && ('ontouchstart' in window || currentGallery.length === 1))
|
||||||
options.buttons = false;
|
options.buttons = false;
|
||||||
}
|
|
||||||
|
|
||||||
previousButton.style.display = nextButton.style.display = (options.buttons ? '' : 'none');
|
btnPrev.style.display = btnNext.style.display = (options.buttons ? '' : 'none');
|
||||||
}
|
}
|
||||||
|
|
||||||
function showOverlay(chosenImageIndex) {
|
function showOverlay(chosenImageIndex) {
|
||||||
@@ -320,11 +417,12 @@ window.baguetteBox = (function () {
|
|||||||
document.documentElement.style.overflowY = 'hidden';
|
document.documentElement.style.overflowY = 'hidden';
|
||||||
document.body.style.overflowY = 'scroll';
|
document.body.style.overflowY = 'scroll';
|
||||||
}
|
}
|
||||||
if (overlay.style.display === 'block') {
|
if (overlay.style.display === 'block')
|
||||||
return;
|
return;
|
||||||
}
|
|
||||||
|
|
||||||
bind(document, 'keydown', keyDownHandler);
|
bind(document, 'keydown', keyDownHandler);
|
||||||
|
bind(document, 'keyup', keyUpHandler);
|
||||||
|
bind(document, 'fullscreenchange', onFSC);
|
||||||
currentIndex = chosenImageIndex;
|
currentIndex = chosenImageIndex;
|
||||||
touch = {
|
touch = {
|
||||||
count: 0,
|
count: 0,
|
||||||
@@ -341,50 +439,48 @@ window.baguetteBox = (function () {
|
|||||||
// Fade in overlay
|
// Fade in overlay
|
||||||
setTimeout(function () {
|
setTimeout(function () {
|
||||||
overlay.className = 'visible';
|
overlay.className = 'visible';
|
||||||
if (options.bodyClass && document.body.classList) {
|
if (options.bodyClass && document.body.classList)
|
||||||
document.body.classList.add(options.bodyClass);
|
document.body.classList.add(options.bodyClass);
|
||||||
}
|
|
||||||
if (options.afterShow) {
|
|
||||||
options.afterShow();
|
|
||||||
}
|
|
||||||
}, 50);
|
|
||||||
if (options.onChange) {
|
|
||||||
options.onChange(currentIndex, imagesElements.length);
|
|
||||||
}
|
|
||||||
documentLastFocus = document.activeElement;
|
|
||||||
initFocus();
|
|
||||||
isOverlayVisible = true;
|
|
||||||
}
|
|
||||||
|
|
||||||
function initFocus() {
|
if (options.afterShow)
|
||||||
if (options.buttons) {
|
options.afterShow();
|
||||||
previousButton.focus();
|
}, 50);
|
||||||
} else {
|
|
||||||
closeButton.focus();
|
if (options.onChange)
|
||||||
}
|
options.onChange(currentIndex, imagesElements.length);
|
||||||
|
|
||||||
|
documentLastFocus = document.activeElement;
|
||||||
|
btnClose.focus();
|
||||||
|
isOverlayVisible = true;
|
||||||
}
|
}
|
||||||
|
|
||||||
function hideOverlay(e) {
|
function hideOverlay(e) {
|
||||||
ev(e);
|
ev(e);
|
||||||
|
playvid(false);
|
||||||
if (options.noScrollbars) {
|
if (options.noScrollbars) {
|
||||||
document.documentElement.style.overflowY = 'auto';
|
document.documentElement.style.overflowY = 'auto';
|
||||||
document.body.style.overflowY = 'auto';
|
document.body.style.overflowY = 'auto';
|
||||||
}
|
}
|
||||||
if (overlay.style.display === 'none') {
|
if (overlay.style.display === 'none')
|
||||||
return;
|
return;
|
||||||
}
|
|
||||||
|
|
||||||
unbind(document, 'keydown', keyDownHandler);
|
unbind(document, 'keydown', keyDownHandler);
|
||||||
|
unbind(document, 'keyup', keyUpHandler);
|
||||||
|
unbind(document, 'fullscreenchange', onFSC);
|
||||||
// Fade out and hide the overlay
|
// Fade out and hide the overlay
|
||||||
overlay.className = '';
|
overlay.className = '';
|
||||||
setTimeout(function () {
|
setTimeout(function () {
|
||||||
overlay.style.display = 'none';
|
overlay.style.display = 'none';
|
||||||
if (options.bodyClass && document.body.classList) {
|
if (options.bodyClass && document.body.classList)
|
||||||
document.body.classList.remove(options.bodyClass);
|
document.body.classList.remove(options.bodyClass);
|
||||||
}
|
|
||||||
if (options.afterHide) {
|
var h = ebi('bbox-halp');
|
||||||
|
if (h)
|
||||||
|
h.parentNode.removeChild(h);
|
||||||
|
|
||||||
|
if (options.afterHide)
|
||||||
options.afterHide();
|
options.afterHide();
|
||||||
}
|
|
||||||
documentLastFocus && documentLastFocus.focus();
|
documentLastFocus && documentLastFocus.focus();
|
||||||
isOverlayVisible = false;
|
isOverlayVisible = false;
|
||||||
}, 500);
|
}, 500);
|
||||||
@@ -394,59 +490,68 @@ window.baguetteBox = (function () {
|
|||||||
var imageContainer = imagesElements[index];
|
var imageContainer = imagesElements[index];
|
||||||
var galleryItem = currentGallery[index];
|
var galleryItem = currentGallery[index];
|
||||||
|
|
||||||
if (typeof imageContainer === 'undefined' || typeof galleryItem === 'undefined') {
|
if (typeof imageContainer === 'undefined' || typeof galleryItem === 'undefined')
|
||||||
return; // out-of-bounds or gallery dirty
|
return; // out-of-bounds or gallery dirty
|
||||||
}
|
|
||||||
|
|
||||||
if (imageContainer.getElementsByTagName('img')[0]) {
|
if (imageContainer.querySelector('img, video'))
|
||||||
// image is loaded, cb and bail
|
// was loaded, cb and bail
|
||||||
if (callback) {
|
return callback ? callback() : null;
|
||||||
callback();
|
|
||||||
}
|
// maybe unloaded video
|
||||||
return;
|
while (imageContainer.firstChild)
|
||||||
}
|
imageContainer.removeChild(imageContainer.firstChild);
|
||||||
|
|
||||||
var imageElement = galleryItem.imageElement,
|
var imageElement = galleryItem.imageElement,
|
||||||
imageSrc = imageElement.href,
|
imageSrc = imageElement.href,
|
||||||
thumbnailElement = imageElement.getElementsByTagName('img')[0],
|
is_vid = re_v.test(imageSrc),
|
||||||
|
thumbnailElement = imageElement.querySelector('img, video'),
|
||||||
imageCaption = typeof options.captions === 'function' ?
|
imageCaption = typeof options.captions === 'function' ?
|
||||||
options.captions.call(currentGallery, imageElement) :
|
options.captions.call(currentGallery, imageElement) :
|
||||||
imageElement.getAttribute('data-caption') || imageElement.title;
|
imageElement.getAttribute('data-caption') || imageElement.title;
|
||||||
|
|
||||||
|
imageSrc += imageSrc.indexOf('?') < 0 ? '?cache' : '&cache';
|
||||||
|
|
||||||
|
if (is_vid && index != currentIndex)
|
||||||
|
return; // no preload
|
||||||
|
|
||||||
var figure = mknod('figure');
|
var figure = mknod('figure');
|
||||||
figure.id = 'baguetteBox-figure-' + index;
|
figure.id = 'bbox-figure-' + index;
|
||||||
figure.innerHTML = '<div class="baguetteBox-spinner">' +
|
figure.innerHTML = '<div class="bbox-spinner">' +
|
||||||
'<div class="baguetteBox-double-bounce1"></div>' +
|
'<div class="bbox-double-bounce1"></div>' +
|
||||||
'<div class="baguetteBox-double-bounce2"></div>' +
|
'<div class="bbox-double-bounce2"></div>' +
|
||||||
'</div>';
|
'</div>';
|
||||||
|
|
||||||
if (options.captions && imageCaption) {
|
if (options.captions && imageCaption) {
|
||||||
var figcaption = mknod('figcaption');
|
var figcaption = mknod('figcaption');
|
||||||
figcaption.id = 'baguetteBox-figcaption-' + index;
|
figcaption.id = 'bbox-figcaption-' + index;
|
||||||
figcaption.innerHTML = imageCaption;
|
figcaption.innerHTML = imageCaption;
|
||||||
figure.appendChild(figcaption);
|
figure.appendChild(figcaption);
|
||||||
}
|
}
|
||||||
imageContainer.appendChild(figure);
|
imageContainer.appendChild(figure);
|
||||||
|
|
||||||
var image = mknod('img');
|
var image = mknod(is_vid ? 'video' : 'img');
|
||||||
image.onload = function () {
|
clmod(imageContainer, 'vid', is_vid);
|
||||||
|
|
||||||
|
image.addEventListener(is_vid ? 'loadedmetadata' : 'load', function () {
|
||||||
// Remove loader element
|
// Remove loader element
|
||||||
var spinner = document.querySelector('#baguette-img-' + index + ' .baguetteBox-spinner');
|
var spinner = QS('#baguette-img-' + index + ' .bbox-spinner');
|
||||||
figure.removeChild(spinner);
|
figure.removeChild(spinner);
|
||||||
if (!options.async && callback) {
|
if (!options.async && callback)
|
||||||
callback();
|
callback();
|
||||||
}
|
});
|
||||||
};
|
|
||||||
image.setAttribute('src', imageSrc);
|
image.setAttribute('src', imageSrc);
|
||||||
image.alt = thumbnailElement ? thumbnailElement.alt || '' : '';
|
if (is_vid) {
|
||||||
if (options.titleTag && imageCaption) {
|
image.setAttribute('controls', 'controls');
|
||||||
image.title = imageCaption;
|
image.onended = vidEnd;
|
||||||
}
|
}
|
||||||
|
image.alt = thumbnailElement ? thumbnailElement.alt || '' : '';
|
||||||
|
if (options.titleTag && imageCaption)
|
||||||
|
image.title = imageCaption;
|
||||||
|
|
||||||
figure.appendChild(image);
|
figure.appendChild(image);
|
||||||
|
|
||||||
if (options.async && callback) {
|
if (options.async && callback)
|
||||||
callback();
|
callback();
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
function showNextImage(e) {
|
function showNextImage(e) {
|
||||||
@@ -459,26 +564,20 @@ window.baguetteBox = (function () {
|
|||||||
return show(currentIndex - 1);
|
return show(currentIndex - 1);
|
||||||
}
|
}
|
||||||
|
|
||||||
function showFirstImage(event) {
|
function showFirstImage(e) {
|
||||||
if (event) {
|
if (e)
|
||||||
event.preventDefault();
|
e.preventDefault();
|
||||||
}
|
|
||||||
return show(0);
|
return show(0);
|
||||||
}
|
}
|
||||||
|
|
||||||
function showLastImage(event) {
|
function showLastImage(e) {
|
||||||
if (event) {
|
if (e)
|
||||||
event.preventDefault();
|
e.preventDefault();
|
||||||
}
|
|
||||||
return show(currentGallery.length - 1);
|
return show(currentGallery.length - 1);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Move the gallery to a specific index
|
|
||||||
* @param `index` {number} - the position of the image
|
|
||||||
* @param `gallery` {array} - gallery which should be opened, if omitted assumes the currently opened one
|
|
||||||
* @return {boolean} - true on success or false if the index is invalid
|
|
||||||
*/
|
|
||||||
function show(index, gallery) {
|
function show(index, gallery) {
|
||||||
if (!isOverlayVisible && index >= 0 && index < gallery.length) {
|
if (!isOverlayVisible && index >= 0 && index < gallery.length) {
|
||||||
prepareOverlay(gallery, options);
|
prepareOverlay(gallery, options);
|
||||||
@@ -486,18 +585,25 @@ window.baguetteBox = (function () {
|
|||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
if (index < 0) {
|
if (index < 0) {
|
||||||
if (options.animation) {
|
if (options.animation)
|
||||||
bounceAnimation('left');
|
bounceAnimation('left');
|
||||||
}
|
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
if (index >= imagesElements.length) {
|
if (index >= imagesElements.length) {
|
||||||
if (options.animation) {
|
if (options.animation)
|
||||||
bounceAnimation('right');
|
bounceAnimation('right');
|
||||||
}
|
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
var v = vid();
|
||||||
|
if (v) {
|
||||||
|
v.src = '';
|
||||||
|
v.load();
|
||||||
|
v.parentNode.removeChild(v);
|
||||||
|
}
|
||||||
|
|
||||||
currentIndex = index;
|
currentIndex = index;
|
||||||
loadImage(currentIndex, function () {
|
loadImage(currentIndex, function () {
|
||||||
preloadNext(currentIndex);
|
preloadNext(currentIndex);
|
||||||
@@ -505,17 +611,49 @@ window.baguetteBox = (function () {
|
|||||||
});
|
});
|
||||||
updateOffset();
|
updateOffset();
|
||||||
|
|
||||||
if (options.onChange) {
|
if (options.onChange)
|
||||||
options.onChange(currentIndex, imagesElements.length);
|
options.onChange(currentIndex, imagesElements.length);
|
||||||
}
|
|
||||||
|
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
function vid() {
|
||||||
* Triggers the bounce animation
|
return imagesElements[currentIndex].querySelector('video');
|
||||||
* @param {('left'|'right')} direction - Direction of the movement
|
}
|
||||||
*/
|
|
||||||
|
function playvid(play) {
|
||||||
|
if (vid())
|
||||||
|
vid()[play ? 'play' : 'pause']();
|
||||||
|
}
|
||||||
|
|
||||||
|
function playpause() {
|
||||||
|
var v = vid();
|
||||||
|
if (v)
|
||||||
|
v[v.paused ? "play" : "pause"]();
|
||||||
|
}
|
||||||
|
|
||||||
|
function relseek(sec) {
|
||||||
|
if (vid())
|
||||||
|
vid().currentTime += sec;
|
||||||
|
}
|
||||||
|
|
||||||
|
function vidEnd() {
|
||||||
|
if (this == vid() && vnext)
|
||||||
|
showNextImage();
|
||||||
|
}
|
||||||
|
|
||||||
|
function mp_ctl() {
|
||||||
|
var v = vid();
|
||||||
|
if (!vmute && v && mp.au && !mp.au.paused) {
|
||||||
|
mp.fade_out();
|
||||||
|
resume_mp = true;
|
||||||
|
}
|
||||||
|
else if (resume_mp && (vmute || !v) && mp.au && mp.au.paused) {
|
||||||
|
mp.fade_in();
|
||||||
|
resume_mp = false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
function bounceAnimation(direction) {
|
function bounceAnimation(direction) {
|
||||||
slider.className = 'bounce-from-' + direction;
|
slider.className = 'bounce-from-' + direction;
|
||||||
setTimeout(function () {
|
setTimeout(function () {
|
||||||
@@ -534,21 +672,30 @@ window.baguetteBox = (function () {
|
|||||||
} else {
|
} else {
|
||||||
slider.style.transform = 'translate3d(' + offset + ',0,0)';
|
slider.style.transform = 'translate3d(' + offset + ',0,0)';
|
||||||
}
|
}
|
||||||
|
playvid(false);
|
||||||
|
var v = vid();
|
||||||
|
if (v) {
|
||||||
|
playvid(true);
|
||||||
|
v.muted = vmute;
|
||||||
|
v.loop = vloop;
|
||||||
|
}
|
||||||
|
mp_ctl();
|
||||||
|
setVmode();
|
||||||
}
|
}
|
||||||
|
|
||||||
function preloadNext(index) {
|
function preloadNext(index) {
|
||||||
if (index - currentIndex >= options.preload) {
|
if (index - currentIndex >= options.preload)
|
||||||
return;
|
return;
|
||||||
}
|
|
||||||
loadImage(index + 1, function () {
|
loadImage(index + 1, function () {
|
||||||
preloadNext(index + 1);
|
preloadNext(index + 1);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
function preloadPrev(index) {
|
function preloadPrev(index) {
|
||||||
if (currentIndex - index >= options.preload) {
|
if (currentIndex - index >= options.preload)
|
||||||
return;
|
return;
|
||||||
}
|
|
||||||
loadImage(index - 1, function () {
|
loadImage(index - 1, function () {
|
||||||
preloadPrev(index - 1);
|
preloadPrev(index - 1);
|
||||||
});
|
});
|
||||||
@@ -566,7 +713,8 @@ window.baguetteBox = (function () {
|
|||||||
unbindEvents();
|
unbindEvents();
|
||||||
clearCachedData();
|
clearCachedData();
|
||||||
unbind(document, 'keydown', keyDownHandler);
|
unbind(document, 'keydown', keyDownHandler);
|
||||||
document.getElementsByTagName('body')[0].removeChild(ebi('baguetteBox-overlay'));
|
unbind(document, 'keyup', keyUpHandler);
|
||||||
|
document.getElementsByTagName('body')[0].removeChild(ebi('bbox-overlay'));
|
||||||
data = {};
|
data = {};
|
||||||
currentGallery = [];
|
currentGallery = [];
|
||||||
currentIndex = 0;
|
currentIndex = 0;
|
||||||
@@ -577,6 +725,8 @@ window.baguetteBox = (function () {
|
|||||||
show: show,
|
show: show,
|
||||||
showNext: showNextImage,
|
showNext: showNextImage,
|
||||||
showPrevious: showPreviousImage,
|
showPrevious: showPreviousImage,
|
||||||
|
relseek: relseek,
|
||||||
|
playpause: playpause,
|
||||||
hide: hideOverlay,
|
hide: hideOverlay,
|
||||||
destroy: destroyPlugin
|
destroy: destroyPlugin
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -29,10 +29,10 @@ body {
|
|||||||
position: fixed;
|
position: fixed;
|
||||||
max-width: 34em;
|
max-width: 34em;
|
||||||
background: #222;
|
background: #222;
|
||||||
border: 0 solid #555;
|
border: 0 solid #777;
|
||||||
overflow: hidden;
|
overflow: hidden;
|
||||||
margin-top: 1em;
|
margin-top: 1em;
|
||||||
padding: 0 1em;
|
padding: 0 1.3em;
|
||||||
height: 0;
|
height: 0;
|
||||||
opacity: .1;
|
opacity: .1;
|
||||||
transition: opacity 0.14s, height 0.14s, padding 0.14s;
|
transition: opacity 0.14s, height 0.14s, padding 0.14s;
|
||||||
@@ -40,19 +40,31 @@ body {
|
|||||||
border-radius: .4em;
|
border-radius: .4em;
|
||||||
z-index: 9001;
|
z-index: 9001;
|
||||||
}
|
}
|
||||||
|
#tt.b {
|
||||||
|
padding: 0 2em;
|
||||||
|
border-radius: .5em;
|
||||||
|
box-shadow: 0 .2em 1em #000;
|
||||||
|
}
|
||||||
#tt.show {
|
#tt.show {
|
||||||
padding: 1em;
|
padding: 1em 1.3em;
|
||||||
|
border-width: .4em 0;
|
||||||
height: auto;
|
height: auto;
|
||||||
border-width: .2em 0;
|
|
||||||
opacity: 1;
|
opacity: 1;
|
||||||
}
|
}
|
||||||
|
#tt.show.b {
|
||||||
|
padding: 1.5em 2em;
|
||||||
|
border-width: .5em 0;
|
||||||
|
}
|
||||||
#tt code {
|
#tt code {
|
||||||
background: #3c3c3c;
|
background: #3c3c3c;
|
||||||
padding: .2em .3em;
|
padding: .1em .3em;
|
||||||
border-top: 1px solid #777;
|
border-top: 1px solid #777;
|
||||||
border-radius: .3em;
|
border-radius: .3em;
|
||||||
font-family: monospace, monospace;
|
font-family: monospace, monospace;
|
||||||
line-height: 2em;
|
line-height: 1.7em;
|
||||||
|
}
|
||||||
|
#tt em {
|
||||||
|
color: #f6a;
|
||||||
}
|
}
|
||||||
#path,
|
#path,
|
||||||
#path * {
|
#path * {
|
||||||
@@ -311,6 +323,7 @@ html.light #ggrid a.sel {
|
|||||||
height: 6em;
|
height: 6em;
|
||||||
width: 100%;
|
width: 100%;
|
||||||
z-index: 3;
|
z-index: 3;
|
||||||
|
touch-action: none;
|
||||||
transition: bottom 0.15s;
|
transition: bottom 0.15s;
|
||||||
}
|
}
|
||||||
#widget.open {
|
#widget.open {
|
||||||
@@ -324,10 +337,18 @@ html.light #ggrid a.sel {
|
|||||||
height: 100%;
|
height: 100%;
|
||||||
background: #3c3c3c;
|
background: #3c3c3c;
|
||||||
}
|
}
|
||||||
|
#wtgrid,
|
||||||
#wtico {
|
#wtico {
|
||||||
cursor: url(/.cpr/dd/4.png), pointer;
|
cursor: url(/.cpr/dd/4.png), pointer;
|
||||||
animation: cursor 500ms;
|
animation: cursor 500ms;
|
||||||
|
position: relative;
|
||||||
|
top: -.06em;
|
||||||
}
|
}
|
||||||
|
#wtgrid {
|
||||||
|
font-size: .8em;
|
||||||
|
top: -.12em;
|
||||||
|
}
|
||||||
|
#wtgrid:hover,
|
||||||
#wtico:hover {
|
#wtico:hover {
|
||||||
animation: cursor 500ms infinite;
|
animation: cursor 500ms infinite;
|
||||||
}
|
}
|
||||||
@@ -343,9 +364,10 @@ html.light #ggrid a.sel {
|
|||||||
}
|
}
|
||||||
#wtoggle {
|
#wtoggle {
|
||||||
position: absolute;
|
position: absolute;
|
||||||
|
white-space: nowrap;
|
||||||
top: -1.2em;
|
top: -1.2em;
|
||||||
right: 0;
|
right: 0;
|
||||||
width: 1.2em;
|
width: 2.5em;
|
||||||
height: 1em;
|
height: 1em;
|
||||||
font-size: 2em;
|
font-size: 2em;
|
||||||
line-height: 1em;
|
line-height: 1em;
|
||||||
@@ -377,10 +399,10 @@ html.light #ggrid a.sel {
|
|||||||
line-height: 1em;
|
line-height: 1em;
|
||||||
}
|
}
|
||||||
#wtoggle.np {
|
#wtoggle.np {
|
||||||
width: 5.5em;
|
width: 6.63em;
|
||||||
}
|
}
|
||||||
#wtoggle.sel {
|
#wtoggle.sel {
|
||||||
width: 6.4em;
|
width: 7.57em;
|
||||||
}
|
}
|
||||||
#wtoggle.sel #wzip,
|
#wtoggle.sel #wzip,
|
||||||
#wtoggle.np #wnp {
|
#wtoggle.np #wnp {
|
||||||
@@ -463,6 +485,17 @@ html.light #ggrid a.sel {
|
|||||||
max-width: 9em;
|
max-width: 9em;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@media (max-width: 35em) {
|
||||||
|
#ops>a[data-dest="new_md"],
|
||||||
|
#ops>a[data-dest="msg"] {
|
||||||
|
display: none;
|
||||||
|
}
|
||||||
|
#op_mkdir.act+div,
|
||||||
|
#op_mkdir.act+div+div {
|
||||||
|
display: block;
|
||||||
|
margin-top: 1em;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
@@ -535,7 +568,9 @@ html.light #ggrid a.sel {
|
|||||||
border-radius: .2em;
|
border-radius: .2em;
|
||||||
padding: .2em .3em;
|
padding: .2em .3em;
|
||||||
}
|
}
|
||||||
.opview input.err {
|
.opview input.err,
|
||||||
|
html.light .opview input[type="text"].err {
|
||||||
|
color: #fff;
|
||||||
background: #a20;
|
background: #a20;
|
||||||
border-color: #f00;
|
border-color: #f00;
|
||||||
box-shadow: 0 0 .7em #f00;
|
box-shadow: 0 0 .7em #f00;
|
||||||
@@ -548,6 +583,12 @@ input[type="checkbox"]+label {
|
|||||||
input[type="checkbox"]:checked+label {
|
input[type="checkbox"]:checked+label {
|
||||||
color: #fc5;
|
color: #fc5;
|
||||||
}
|
}
|
||||||
|
input[type="radio"]:checked+label {
|
||||||
|
color: #fc0;
|
||||||
|
}
|
||||||
|
html.light input[type="radio"]:checked+label {
|
||||||
|
color: #07c;
|
||||||
|
}
|
||||||
input.eq_gain {
|
input.eq_gain {
|
||||||
width: 3em;
|
width: 3em;
|
||||||
text-align: center;
|
text-align: center;
|
||||||
@@ -598,7 +639,7 @@ input.eq_gain {
|
|||||||
#srch_q {
|
#srch_q {
|
||||||
white-space: pre;
|
white-space: pre;
|
||||||
color: #f80;
|
color: #f80;
|
||||||
height: 1em;
|
min-height: 1em;
|
||||||
margin: .2em 0 -1em 1.6em;
|
margin: .2em 0 -1em 1.6em;
|
||||||
}
|
}
|
||||||
#tq_raw {
|
#tq_raw {
|
||||||
@@ -656,6 +697,7 @@ input.eq_gain {
|
|||||||
}
|
}
|
||||||
#thx_ff {
|
#thx_ff {
|
||||||
padding: 5em 0;
|
padding: 5em 0;
|
||||||
|
/* widget */
|
||||||
}
|
}
|
||||||
#tree::-webkit-scrollbar-track,
|
#tree::-webkit-scrollbar-track,
|
||||||
#tree::-webkit-scrollbar {
|
#tree::-webkit-scrollbar {
|
||||||
@@ -716,10 +758,10 @@ input.eq_gain {
|
|||||||
#treeul a.hl {
|
#treeul a.hl {
|
||||||
color: #400;
|
color: #400;
|
||||||
background: #fc4;
|
background: #fc4;
|
||||||
border-radius: .3em;
|
|
||||||
text-shadow: none;
|
text-shadow: none;
|
||||||
}
|
}
|
||||||
#treeul a {
|
#treeul a {
|
||||||
|
border-radius: .3em;
|
||||||
display: inline-block;
|
display: inline-block;
|
||||||
}
|
}
|
||||||
#treeul a+a {
|
#treeul a+a {
|
||||||
@@ -751,9 +793,14 @@ input.eq_gain {
|
|||||||
display: block;
|
display: block;
|
||||||
width: 1em;
|
width: 1em;
|
||||||
border-radius: .2em;
|
border-radius: .2em;
|
||||||
margin: -1.3em auto 0 auto;
|
margin: -1.2em auto 0 auto;
|
||||||
|
top: 2em;
|
||||||
|
position: relative;
|
||||||
background: #444;
|
background: #444;
|
||||||
}
|
}
|
||||||
|
#files th span {
|
||||||
|
position: relative;
|
||||||
|
}
|
||||||
#files>thead>tr>th.min,
|
#files>thead>tr>th.min,
|
||||||
#files td.min {
|
#files td.min {
|
||||||
display: none;
|
display: none;
|
||||||
@@ -801,10 +848,14 @@ input.eq_gain {
|
|||||||
padding: 0;
|
padding: 0;
|
||||||
border-bottom: 1px solid #555;
|
border-bottom: 1px solid #555;
|
||||||
}
|
}
|
||||||
#thumbs {
|
#thumbs,
|
||||||
|
#au_osd_cv,
|
||||||
|
#u2tdate {
|
||||||
opacity: .3;
|
opacity: .3;
|
||||||
}
|
}
|
||||||
#griden.on+#thumbs {
|
#griden.on+#thumbs,
|
||||||
|
#au_os_ctl.on+#au_osd_cv,
|
||||||
|
#u2turbo.on+#u2tdate {
|
||||||
opacity: 1;
|
opacity: 1;
|
||||||
}
|
}
|
||||||
#ghead {
|
#ghead {
|
||||||
@@ -876,7 +927,8 @@ html.light #ggrid a:hover {
|
|||||||
#pvol,
|
#pvol,
|
||||||
#barbuf,
|
#barbuf,
|
||||||
#barpos,
|
#barpos,
|
||||||
#u2conf label {
|
#u2conf label,
|
||||||
|
#ops {
|
||||||
-webkit-user-select: none;
|
-webkit-user-select: none;
|
||||||
-moz-user-select: none;
|
-moz-user-select: none;
|
||||||
-ms-user-select: none;
|
-ms-user-select: none;
|
||||||
@@ -909,13 +961,16 @@ html.light {
|
|||||||
}
|
}
|
||||||
html.light #tt {
|
html.light #tt {
|
||||||
background: #fff;
|
background: #fff;
|
||||||
border-color: #888;
|
border-color: #888 #000 #777 #000;
|
||||||
box-shadow: 0 .3em 1em rgba(0,0,0,0.4);
|
box-shadow: 0 .3em 1em rgba(0,0,0,0.4);
|
||||||
}
|
}
|
||||||
html.light #tt code {
|
html.light #tt code {
|
||||||
background: #060;
|
background: #060;
|
||||||
color: #fff;
|
color: #fff;
|
||||||
}
|
}
|
||||||
|
html.light #tt em {
|
||||||
|
color: #d38;
|
||||||
|
}
|
||||||
html.light #ops,
|
html.light #ops,
|
||||||
html.light .opbox,
|
html.light .opbox,
|
||||||
html.light #srch_form {
|
html.light #srch_form {
|
||||||
@@ -959,8 +1014,14 @@ html.light #treeul a.hl {
|
|||||||
background: #07a;
|
background: #07a;
|
||||||
color: #fff;
|
color: #fff;
|
||||||
}
|
}
|
||||||
|
html.light #treeul a.hl:hover {
|
||||||
|
background: #059;
|
||||||
|
}
|
||||||
html.light #tree li {
|
html.light #tree li {
|
||||||
border-color: #ddd #fff #f7f7f7 #fff;
|
border-color: #f7f7f7 #fff #ddd #fff;
|
||||||
|
}
|
||||||
|
html.light #tree a:hover {
|
||||||
|
background: #fff;
|
||||||
}
|
}
|
||||||
html.light #tree ul {
|
html.light #tree ul {
|
||||||
border-color: #ccc;
|
border-color: #ccc;
|
||||||
@@ -978,14 +1039,14 @@ html.light #files {
|
|||||||
}
|
}
|
||||||
html.light #files thead th {
|
html.light #files thead th {
|
||||||
background: #eee;
|
background: #eee;
|
||||||
border-right: 1px solid #ccc;
|
border: 1px solid #ccc;
|
||||||
border-bottom: 1px solid #ccc;
|
border-top: none;
|
||||||
}
|
}
|
||||||
html.light #files thead th {
|
html.light #files thead th+th {
|
||||||
border-left: 1px solid #f7f7f7;
|
border-left: 1px solid #f7f7f7;
|
||||||
}
|
}
|
||||||
html.light #files td {
|
html.light #files td {
|
||||||
border-color: #ddd #fff #fff #ddd;
|
border-color: #fff #fff #ddd #ddd;
|
||||||
}
|
}
|
||||||
html.light #files tbody tr:last-child td {
|
html.light #files tbody tr:last-child td {
|
||||||
border-bottom: .2em solid #ccc;
|
border-bottom: .2em solid #ccc;
|
||||||
@@ -1040,6 +1101,9 @@ html.light #wzip,
|
|||||||
html.light #wnp {
|
html.light #wnp {
|
||||||
border-color: #ccc;
|
border-color: #ccc;
|
||||||
}
|
}
|
||||||
|
html.light #barbuf {
|
||||||
|
background: none;
|
||||||
|
}
|
||||||
html.light #files tr.sel:hover td {
|
html.light #files tr.sel:hover td {
|
||||||
background: #c37;
|
background: #c37;
|
||||||
}
|
}
|
||||||
@@ -1107,67 +1171,76 @@ html.light #tree::-webkit-scrollbar {
|
|||||||
|
|
||||||
|
|
||||||
|
|
||||||
#baguetteBox-overlay {
|
#bbox-overlay {
|
||||||
display: none;
|
display: none;
|
||||||
opacity: 0;
|
opacity: 0;
|
||||||
position: fixed;
|
position: fixed;
|
||||||
overflow: hidden;
|
overflow: hidden;
|
||||||
|
touch-action: none;
|
||||||
top: 0;
|
top: 0;
|
||||||
left: 0;
|
left: 0;
|
||||||
width: 100%;
|
width: 100%;
|
||||||
height: 100%;
|
height: 100%;
|
||||||
z-index: 1000000;
|
z-index: 10;
|
||||||
background: rgba(0, 0, 0, 0.8);
|
background: rgba(0, 0, 0, 0.8);
|
||||||
transition: opacity .3s ease;
|
transition: opacity .3s ease;
|
||||||
}
|
}
|
||||||
#baguetteBox-overlay.visible {
|
#bbox-overlay.visible {
|
||||||
opacity: 1;
|
opacity: 1;
|
||||||
}
|
}
|
||||||
#baguetteBox-overlay .full-image {
|
.full-image {
|
||||||
display: inline-block;
|
display: inline-block;
|
||||||
position: relative;
|
position: relative;
|
||||||
width: 100%;
|
width: 100%;
|
||||||
height: 100%;
|
height: 100%;
|
||||||
text-align: center;
|
text-align: center;
|
||||||
}
|
}
|
||||||
#baguetteBox-overlay .full-image figure {
|
.full-image figure {
|
||||||
display: inline;
|
display: inline;
|
||||||
margin: 0;
|
margin: 0;
|
||||||
height: 100%;
|
height: 100%;
|
||||||
}
|
}
|
||||||
#baguetteBox-overlay .full-image img {
|
.full-image img,
|
||||||
|
.full-image video {
|
||||||
display: inline-block;
|
display: inline-block;
|
||||||
width: auto;
|
width: auto;
|
||||||
height: auto;
|
height: auto;
|
||||||
max-height: 100%;
|
|
||||||
max-width: 100%;
|
max-width: 100%;
|
||||||
|
max-height: 100%;
|
||||||
|
max-height: calc(100% - 1.4em);
|
||||||
|
margin-bottom: 1.4em;
|
||||||
vertical-align: middle;
|
vertical-align: middle;
|
||||||
box-shadow: 0 0 8px rgba(0, 0, 0, 0.6);
|
box-shadow: 0 0 8px rgba(0, 0, 0, 0.6);
|
||||||
}
|
}
|
||||||
#baguetteBox-overlay .full-image figcaption {
|
.full-image video {
|
||||||
|
background: #333;
|
||||||
|
}
|
||||||
|
.full-image figcaption {
|
||||||
display: block;
|
display: block;
|
||||||
position: absolute;
|
position: fixed;
|
||||||
bottom: 0;
|
bottom: .1em;
|
||||||
width: 100%;
|
width: 100%;
|
||||||
text-align: center;
|
text-align: center;
|
||||||
line-height: 1.8;
|
|
||||||
white-space: normal;
|
white-space: normal;
|
||||||
color: #ccc;
|
color: #ccc;
|
||||||
}
|
}
|
||||||
#baguetteBox-overlay figcaption a {
|
#bbox-overlay figcaption a {
|
||||||
background: rgba(0, 0, 0, 0.6);
|
background: rgba(0, 0, 0, 0.6);
|
||||||
border-radius: .4em;
|
border-radius: .4em;
|
||||||
padding: .3em .6em;
|
padding: .3em .6em;
|
||||||
}
|
}
|
||||||
#baguetteBox-overlay .full-image:before {
|
html.light #bbox-overlay figcaption a {
|
||||||
|
color: #0bf;
|
||||||
|
}
|
||||||
|
.full-image:before {
|
||||||
content: "";
|
content: "";
|
||||||
display: inline-block;
|
display: inline-block;
|
||||||
height: 50%;
|
height: 50%;
|
||||||
width: 1px;
|
width: 1px;
|
||||||
margin-right: -1px;
|
margin-right: -1px;
|
||||||
}
|
}
|
||||||
#baguetteBox-slider {
|
#bbox-slider {
|
||||||
position: absolute;
|
position: fixed;
|
||||||
left: 0;
|
left: 0;
|
||||||
top: 0;
|
top: 0;
|
||||||
height: 100%;
|
height: 100%;
|
||||||
@@ -1175,10 +1248,10 @@ html.light #tree::-webkit-scrollbar {
|
|||||||
white-space: nowrap;
|
white-space: nowrap;
|
||||||
transition: left .2s ease, transform .2s ease;
|
transition: left .2s ease, transform .2s ease;
|
||||||
}
|
}
|
||||||
#baguetteBox-slider.bounce-from-right {
|
.bounce-from-right {
|
||||||
animation: bounceFromRight .4s ease-out;
|
animation: bounceFromRight .4s ease-out;
|
||||||
}
|
}
|
||||||
#baguetteBox-slider.bounce-from-left {
|
.bounce-from-left {
|
||||||
animation: bounceFromLeft .4s ease-out;
|
animation: bounceFromLeft .4s ease-out;
|
||||||
}
|
}
|
||||||
@keyframes bounceFromRight {
|
@keyframes bounceFromRight {
|
||||||
@@ -1191,48 +1264,63 @@ html.light #tree::-webkit-scrollbar {
|
|||||||
50% {margin-left: 30px}
|
50% {margin-left: 30px}
|
||||||
100% {margin-left: 0}
|
100% {margin-left: 0}
|
||||||
}
|
}
|
||||||
.baguetteBox-button#next-button,
|
#bbox-next,
|
||||||
.baguetteBox-button#previous-button {
|
#bbox-prev {
|
||||||
top: 50%;
|
top: 50%;
|
||||||
top: calc(50% - 30px);
|
top: calc(50% - 30px);
|
||||||
width: 44px;
|
width: 44px;
|
||||||
height: 60px;
|
height: 60px;
|
||||||
}
|
}
|
||||||
.baguetteBox-button {
|
.bbox-btn {
|
||||||
position: absolute;
|
position: fixed;
|
||||||
|
}
|
||||||
|
#bbox-overlay button {
|
||||||
cursor: pointer;
|
cursor: pointer;
|
||||||
outline: none;
|
outline: none;
|
||||||
padding: 0;
|
padding: 0 .3em;
|
||||||
margin: 0;
|
margin: 0 .4em;
|
||||||
border: 0;
|
border: 0;
|
||||||
border-radius: 15%;
|
border-radius: 15%;
|
||||||
background: rgba(50, 50, 50, 0.5);
|
background: rgba(50, 50, 50, 0.5);
|
||||||
color: #ddd;
|
color: rgba(255,255,255,0.7);
|
||||||
font: 1.6em sans-serif;
|
|
||||||
transition: background-color .3s ease;
|
transition: background-color .3s ease;
|
||||||
|
transition: color .3s ease;
|
||||||
|
font-size: 1.4em;
|
||||||
|
line-height: 1.4em;
|
||||||
|
vertical-align: top;
|
||||||
}
|
}
|
||||||
.baguetteBox-button:focus,
|
#bbox-overlay button:focus,
|
||||||
.baguetteBox-button:hover {
|
#bbox-overlay button:hover {
|
||||||
|
color: rgba(255,255,255,0.9);
|
||||||
background: rgba(50, 50, 50, 0.9);
|
background: rgba(50, 50, 50, 0.9);
|
||||||
}
|
}
|
||||||
#next-button {
|
#bbox-next {
|
||||||
|
right: 1%;
|
||||||
|
}
|
||||||
|
#bbox-prev {
|
||||||
|
left: 1%;
|
||||||
|
}
|
||||||
|
#bbox-btns {
|
||||||
|
top: .5em;
|
||||||
right: 2%;
|
right: 2%;
|
||||||
|
position: fixed;
|
||||||
}
|
}
|
||||||
#previous-button {
|
#bbox-halp {
|
||||||
left: 2%;
|
color: #fff;
|
||||||
}
|
background: #333;
|
||||||
#close-button {
|
|
||||||
top: 20px;
|
|
||||||
right: 2%;
|
|
||||||
width: 30px;
|
|
||||||
height: 30px;
|
|
||||||
}
|
|
||||||
.baguetteBox-button svg {
|
|
||||||
position: absolute;
|
position: absolute;
|
||||||
left: 0;
|
|
||||||
top: 0;
|
top: 0;
|
||||||
|
left: 0;
|
||||||
|
z-index: 20;
|
||||||
|
padding: .4em;
|
||||||
}
|
}
|
||||||
.baguetteBox-spinner {
|
#bbox-halp td {
|
||||||
|
padding: .2em .5em;
|
||||||
|
}
|
||||||
|
#bbox-halp td:first-child {
|
||||||
|
text-align: right;
|
||||||
|
}
|
||||||
|
.bbox-spinner {
|
||||||
width: 40px;
|
width: 40px;
|
||||||
height: 40px;
|
height: 40px;
|
||||||
display: inline-block;
|
display: inline-block;
|
||||||
@@ -1242,8 +1330,8 @@ html.light #tree::-webkit-scrollbar {
|
|||||||
margin-top: -20px;
|
margin-top: -20px;
|
||||||
margin-left: -20px;
|
margin-left: -20px;
|
||||||
}
|
}
|
||||||
.baguetteBox-double-bounce1,
|
.bbox-double-bounce1,
|
||||||
.baguetteBox-double-bounce2 {
|
.bbox-double-bounce2 {
|
||||||
width: 100%;
|
width: 100%;
|
||||||
height: 100%;
|
height: 100%;
|
||||||
border-radius: 50%;
|
border-radius: 50%;
|
||||||
@@ -1254,7 +1342,7 @@ html.light #tree::-webkit-scrollbar {
|
|||||||
left: 0;
|
left: 0;
|
||||||
animation: bounce 2s infinite ease-in-out;
|
animation: bounce 2s infinite ease-in-out;
|
||||||
}
|
}
|
||||||
.baguetteBox-double-bounce2 {
|
.bbox-double-bounce2 {
|
||||||
animation-delay: -1s;
|
animation-delay: -1s;
|
||||||
}
|
}
|
||||||
@keyframes bounce {
|
@keyframes bounce {
|
||||||
|
|||||||
@@ -2,140 +2,134 @@
|
|||||||
<html lang="en">
|
<html lang="en">
|
||||||
|
|
||||||
<head>
|
<head>
|
||||||
<meta charset="utf-8">
|
<meta charset="utf-8">
|
||||||
<title>⇆🎉 {{ title }}</title>
|
<title>⇆🎉 {{ title }}</title>
|
||||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||||
<meta name="viewport" content="width=device-width, initial-scale=0.8">
|
<meta name="viewport" content="width=device-width, initial-scale=0.8">
|
||||||
<link rel="stylesheet" type="text/css" media="screen" href="/.cpr/browser.css{{ ts }}">
|
<link rel="stylesheet" type="text/css" media="screen" href="/.cpr/browser.css?_={{ ts }}">
|
||||||
<link rel="stylesheet" type="text/css" media="screen" href="/.cpr/upload.css{{ ts }}">
|
<link rel="stylesheet" type="text/css" media="screen" href="/.cpr/upload.css?_={{ ts }}">
|
||||||
{%- if css %}
|
{%- if css %}
|
||||||
<link rel="stylesheet" type="text/css" media="screen" href="{{ css }}{{ ts }}">
|
<link rel="stylesheet" type="text/css" media="screen" href="{{ css }}?_={{ ts }}">
|
||||||
{%- endif %}
|
{%- endif %}
|
||||||
</head>
|
</head>
|
||||||
|
|
||||||
<body>
|
<body>
|
||||||
<div id="ops">
|
<div id="ops"></div>
|
||||||
<a href="#" data-dest="" tt="close submenu">---</a>
|
|
||||||
{%- if have_up2k_idx %}
|
|
||||||
<a href="#" data-perm="read" data-dest="search" tt="search for files by attributes, path/name, music tags, or any combination of those.<br /><br /><code>foo bar</code> = must contain both foo and bar,<br /><code>foo -bar</code> = must contain foo but not bar,<br /><code>^yana .opus$</code> = must start with yana and have the opus extension">🔎</a>
|
|
||||||
<a href="#" data-dest="up2k" tt="up2k: upload files (if you have write-access) or toggle into the search-mode and drag files onto the search button to see if they exist somewhere on the server">🚀</a>
|
|
||||||
{%- else %}
|
|
||||||
<a href="#" data-perm="write" data-dest="up2k" tt="up2k: upload files with resume support (close your browser and drop the same files in later)">🚀</a>
|
|
||||||
{%- endif %}
|
|
||||||
<a href="#" data-perm="write" data-dest="bup" tt="bup: basic uploader, even supports netscape 4.0">🎈</a>
|
|
||||||
<a href="#" data-perm="write" data-dest="mkdir" tt="mkdir: create a new directory">📂</a>
|
|
||||||
<a href="#" data-perm="read write" data-dest="new_md" tt="new-md: create a new markdown document">📝</a>
|
|
||||||
<a href="#" data-perm="write" data-dest="msg" tt="msg: send a message to the server log">📟</a>
|
|
||||||
<a href="#" data-dest="player" tt="media player options">🎺</a>
|
|
||||||
<a href="#" data-dest="cfg" tt="configuration options">⚙️</a>
|
|
||||||
<div id="opdesc"></div>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div id="op_search" class="opview">
|
<div id="op_search" class="opview">
|
||||||
{%- if have_tags_idx %}
|
{%- if have_tags_idx %}
|
||||||
<div id="srch_form" class="tags"></div>
|
<div id="srch_form" class="tags"></div>
|
||||||
{%- else %}
|
{%- else %}
|
||||||
<div id="srch_form"></div>
|
<div id="srch_form"></div>
|
||||||
{%- endif %}
|
{%- endif %}
|
||||||
<div id="srch_q"></div>
|
<div id="srch_q"></div>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<div id="op_player" class="opview opbox opwide"></div>
|
<div id="op_player" class="opview opbox opwide"></div>
|
||||||
|
|
||||||
{%- include 'upload.html' %}
|
<div id="op_bup" class="opview opbox act">
|
||||||
|
<div id="u2err"></div>
|
||||||
|
<form method="post" enctype="multipart/form-data" accept-charset="utf-8" action="{{ url_suf }}">
|
||||||
|
<input type="hidden" name="act" value="bput" />
|
||||||
|
<input type="file" name="f" multiple><br />
|
||||||
|
<input type="submit" value="start upload">
|
||||||
|
</form>
|
||||||
|
</div>
|
||||||
|
|
||||||
<div id="op_cfg" class="opview opbox opwide">
|
<div id="op_mkdir" class="opview opbox act">
|
||||||
<div>
|
<form method="post" enctype="multipart/form-data" accept-charset="utf-8" action="{{ url_suf }}">
|
||||||
<h3>switches</h3>
|
<input type="hidden" name="act" value="mkdir" />
|
||||||
<div>
|
📂<input type="text" name="name" size="30">
|
||||||
<a id="tooltips" class="tgl btn" href="#" tt="◔ ◡ ◔">ℹ️ tooltips</a>
|
<input type="submit" value="make directory">
|
||||||
<a id="lightmode" class="tgl btn" href="#">☀️ lightmode</a>
|
</form>
|
||||||
<a id="griden" class="tgl btn" href="#" tt="toggle icons or list-view$NHotkey: G">田 the grid</a>
|
</div>
|
||||||
<a id="thumbs" class="tgl btn" href="#" tt="in icon view, toggle icons or thumbnails$NHotkey: T">🖼️ thumbs</a>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
{%- if have_zip %}
|
|
||||||
<div><h3>folder download</h3><div id="arc_fmt"></div></div>
|
|
||||||
{%- endif %}
|
|
||||||
<div><h3>key notation</h3><div id="key_notation"></div></div>
|
|
||||||
<div class="fill"><h3>hidden columns</h3><div id="hcols"></div></div>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<h1 id="path">
|
<div id="op_new_md" class="opview opbox">
|
||||||
<a href="#" id="entree" tt="show directory tree$NHotkey: B">🌲</a>
|
<form method="post" enctype="multipart/form-data" accept-charset="utf-8" action="{{ url_suf }}">
|
||||||
{%- for n in vpnodes %}
|
<input type="hidden" name="act" value="new_md" />
|
||||||
<a href="/{{ n[0] }}">{{ n[1] }}</a>
|
📝<input type="text" name="name" size="30">
|
||||||
{%- endfor %}
|
<input type="submit" value="new markdown doc">
|
||||||
</h1>
|
</form>
|
||||||
|
</div>
|
||||||
|
|
||||||
<div id="tree">
|
<div id="op_msg" class="opview opbox act">
|
||||||
<div id="treeh">
|
<form method="post" enctype="application/x-www-form-urlencoded" accept-charset="utf-8" action="{{ url_suf }}">
|
||||||
<a href="#" id="detree" tt="show breadcrumbs$NHotkey: B">🍞...</a>
|
📟<input type="text" name="msg" size="30">
|
||||||
<a href="#" class="btn" step="2" id="twobytwo">+</a>
|
<input type="submit" value="send msg to server log">
|
||||||
<a href="#" class="btn" step="-2" id="twig">–</a>
|
</form>
|
||||||
<a href="#" class="tgl btn" id="dyntree" tt="autogrow as tree expands">a</a>
|
</div>
|
||||||
</div>
|
|
||||||
<ul id="treeul"></ul>
|
<div id="op_up2k" class="opview"></div>
|
||||||
<div id="thx_ff"> </div>
|
|
||||||
</div>
|
<div id="op_cfg" class="opview opbox opwide"></div>
|
||||||
|
|
||||||
|
<h1 id="path">
|
||||||
|
<a href="#" id="entree" tt="show directory tree$NHotkey: B">🌲</a>
|
||||||
|
{%- for n in vpnodes %}
|
||||||
|
<a href="/{{ n[0] }}">{{ n[1] }}</a>
|
||||||
|
{%- endfor %}
|
||||||
|
</h1>
|
||||||
|
|
||||||
|
<div id="tree"></div>
|
||||||
|
|
||||||
<div id="wrap">
|
<div id="wrap">
|
||||||
|
|
||||||
<div id="pro" class="logue">{{ logues[0] }}</div>
|
<div id="pro" class="logue">{{ logues[0] }}</div>
|
||||||
|
|
||||||
<table id="files">
|
<table id="files">
|
||||||
<thead>
|
<thead>
|
||||||
<tr>
|
<tr>
|
||||||
<th name="lead"><span>c</span></th>
|
<th name="lead"><span>c</span></th>
|
||||||
<th name="href"><span>File Name</span></th>
|
<th name="href"><span>File Name</span></th>
|
||||||
<th name="sz" sort="int"><span>Size</span></th>
|
<th name="sz" sort="int"><span>Size</span></th>
|
||||||
{%- for k in taglist %}
|
{%- for k in taglist %}
|
||||||
{%- if k.startswith('.') %}
|
{%- if k.startswith('.') %}
|
||||||
<th name="tags/{{ k }}" sort="int"><span>{{ k[1:] }}</span></th>
|
<th name="tags/{{ k }}" sort="int"><span>{{ k[1:] }}</span></th>
|
||||||
{%- else %}
|
{%- else %}
|
||||||
<th name="tags/{{ k }}"><span>{{ k[0]|upper }}{{ k[1:] }}</span></th>
|
<th name="tags/{{ k }}"><span>{{ k[0]|upper }}{{ k[1:] }}</span></th>
|
||||||
{%- endif %}
|
{%- endif %}
|
||||||
{%- endfor %}
|
{%- endfor %}
|
||||||
<th name="ext"><span>T</span></th>
|
<th name="ext"><span>T</span></th>
|
||||||
<th name="ts"><span>Date</span></th>
|
<th name="ts"><span>Date</span></th>
|
||||||
</tr>
|
</tr>
|
||||||
</thead>
|
</thead>
|
||||||
<tbody>
|
<tbody>
|
||||||
|
|
||||||
{%- for f in files %}
|
{%- for f in files %}
|
||||||
<tr><td>{{ f.lead }}</td><td><a href="{{ f.href }}">{{ f.name|e }}</a></td><td>{{ f.sz }}</td>
|
<tr><td>{{ f.lead }}</td><td><a href="{{ f.href }}">{{ f.name|e }}</a></td><td>{{ f.sz }}</td>
|
||||||
{%- if f.tags is defined %}
|
{%- if f.tags is defined %}
|
||||||
{%- for k in taglist %}
|
{%- for k in taglist %}
|
||||||
<td>{{ f.tags[k] }}</td>
|
<td>{{ f.tags[k] }}</td>
|
||||||
{%- endfor %}
|
{%- endfor %}
|
||||||
{%- endif %}
|
{%- endif %}
|
||||||
<td>{{ f.ext }}</td><td>{{ f.dt }}</td></tr>
|
<td>{{ f.ext }}</td><td>{{ f.dt }}</td></tr>
|
||||||
{%- endfor %}
|
{%- endfor %}
|
||||||
|
|
||||||
</tbody>
|
</tbody>
|
||||||
</table>
|
</table>
|
||||||
|
|
||||||
<div id="epi" class="logue">{{ logues[1] }}</div>
|
<div id="epi" class="logue">{{ logues[1] }}</div>
|
||||||
|
|
||||||
<h2><a href="?h">control-panel</a></h2>
|
<h2><a href="/?h">control-panel</a></h2>
|
||||||
|
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
{%- if srv_info %}
|
{%- if srv_info %}
|
||||||
<div id="srv_info"><span>{{ srv_info }}</span></div>
|
<div id="srv_info"><span>{{ srv_info }}</span></div>
|
||||||
{%- endif %}
|
{%- endif %}
|
||||||
|
|
||||||
<div id="widget"></div>
|
<div id="widget"></div>
|
||||||
|
|
||||||
<script>
|
<script>
|
||||||
var tag_order_cfg = {{ tag_order }};
|
var perms = {{ perms }},
|
||||||
</script>
|
tag_order_cfg = {{ tag_order }},
|
||||||
<script src="/.cpr/util.js{{ ts }}"></script>
|
have_up2k_idx = {{ have_up2k_idx|tojson }},
|
||||||
<script src="/.cpr/browser.js{{ ts }}"></script>
|
have_tags_idx = {{ have_tags_idx|tojson }},
|
||||||
<script src="/.cpr/up2k.js{{ ts }}"></script>
|
have_zip = {{ have_zip|tojson }};
|
||||||
<script>
|
</script>
|
||||||
apply_perms({{ perms }});
|
<script src="/.cpr/util.js?_={{ ts }}"></script>
|
||||||
</script>
|
<script src="/.cpr/browser.js?_={{ ts }}"></script>
|
||||||
|
<script src="/.cpr/up2k.js?_={{ ts }}"></script>
|
||||||
</body>
|
</body>
|
||||||
|
|
||||||
</html>
|
</html>
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@@ -2,59 +2,59 @@
|
|||||||
<html lang="en">
|
<html lang="en">
|
||||||
|
|
||||||
<head>
|
<head>
|
||||||
<meta charset="utf-8">
|
<meta charset="utf-8">
|
||||||
<title>{{ title }}</title>
|
<title>{{ title }}</title>
|
||||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||||
<meta name="viewport" content="width=device-width, initial-scale=0.8">
|
<meta name="viewport" content="width=device-width, initial-scale=0.8">
|
||||||
<style>
|
<style>
|
||||||
html{font-family:sans-serif}
|
html{font-family:sans-serif}
|
||||||
td{border:1px solid #999;border-width:1px 1px 0 0;padding:0 5px}
|
td{border:1px solid #999;border-width:1px 1px 0 0;padding:0 5px}
|
||||||
a{display:block}
|
a{display:block}
|
||||||
</style>
|
</style>
|
||||||
</head>
|
</head>
|
||||||
|
|
||||||
<body>
|
<body>
|
||||||
{%- if srv_info %}
|
{%- if srv_info %}
|
||||||
<p><span>{{ srv_info }}</span></p>
|
<p><span>{{ srv_info }}</span></p>
|
||||||
{%- endif %}
|
{%- endif %}
|
||||||
|
|
||||||
{%- if have_b_u %}
|
{%- if have_b_u %}
|
||||||
<form method="post" enctype="multipart/form-data" accept-charset="utf-8" action="{{ url_suf }}">
|
<form method="post" enctype="multipart/form-data" accept-charset="utf-8" action="{{ url_suf }}">
|
||||||
<input type="hidden" name="act" value="bput" />
|
<input type="hidden" name="act" value="bput" />
|
||||||
<input type="file" name="f" multiple /><br />
|
<input type="file" name="f" multiple /><br />
|
||||||
<input type="submit" value="start upload" />
|
<input type="submit" value="start upload" />
|
||||||
</form>
|
</form>
|
||||||
<br />
|
<br />
|
||||||
{%- endif %}
|
{%- endif %}
|
||||||
|
|
||||||
{%- if logues[0] %}
|
{%- if logues[0] %}
|
||||||
<div>{{ logues[0] }}</div><br />
|
<div>{{ logues[0] }}</div><br />
|
||||||
{%- endif %}
|
{%- endif %}
|
||||||
|
|
||||||
<table id="files">
|
<table id="files">
|
||||||
<thead>
|
<thead>
|
||||||
<tr>
|
<tr>
|
||||||
<th name="lead"><span>c</span></th>
|
<th name="lead"><span>c</span></th>
|
||||||
<th name="href"><span>File Name</span></th>
|
<th name="href"><span>File Name</span></th>
|
||||||
<th name="sz" sort="int"><span>Size</span></th>
|
<th name="sz" sort="int"><span>Size</span></th>
|
||||||
<th name="ts"><span>Date</span></th>
|
<th name="ts"><span>Date</span></th>
|
||||||
</tr>
|
</tr>
|
||||||
</thead>
|
</thead>
|
||||||
<tbody>
|
<tbody>
|
||||||
<tr><td></td><td><a href="../{{ url_suf }}">parent folder</a></td><td>-</td><td>-</td></tr>
|
<tr><td></td><td><a href="../{{ url_suf }}">parent folder</a></td><td>-</td><td>-</td></tr>
|
||||||
|
|
||||||
{%- for f in files %}
|
{%- for f in files %}
|
||||||
<tr><td>{{ f.lead }}</td><td><a href="{{ f.href }}{{ url_suf }}">{{ f.name|e }}</a></td><td>{{ f.sz }}</td><td>{{ f.dt }}</td></tr>
|
<tr><td>{{ f.lead }}</td><td><a href="{{ f.href }}{{ url_suf }}">{{ f.name|e }}</a></td><td>{{ f.sz }}</td><td>{{ f.dt }}</td></tr>
|
||||||
{%- endfor %}
|
{%- endfor %}
|
||||||
|
|
||||||
</tbody>
|
</tbody>
|
||||||
</table>
|
</table>
|
||||||
|
|
||||||
{%- if logues[1] %}
|
{%- if logues[1] %}
|
||||||
<div>{{ logues[1] }}</div><br />
|
<div>{{ logues[1] }}</div><br />
|
||||||
{%- endif %}
|
{%- endif %}
|
||||||
|
|
||||||
<h2><a href="{{ url_suf }}{{ url_suf and '&' or '?' }}h">control-panel</a></h2>
|
<h2><a href="/{{ url_suf }}{{ url_suf and '&' or '?' }}h">control-panel</a></h2>
|
||||||
|
|
||||||
</body>
|
</body>
|
||||||
</html>
|
</html>
|
||||||
|
|||||||
@@ -8,6 +8,47 @@ html, body {
|
|||||||
font-family: sans-serif;
|
font-family: sans-serif;
|
||||||
line-height: 1.5em;
|
line-height: 1.5em;
|
||||||
}
|
}
|
||||||
|
#tt {
|
||||||
|
position: fixed;
|
||||||
|
max-width: 34em;
|
||||||
|
background: #222;
|
||||||
|
border: 0 solid #777;
|
||||||
|
overflow: hidden;
|
||||||
|
margin-top: 1em;
|
||||||
|
padding: 0 1.3em;
|
||||||
|
height: 0;
|
||||||
|
opacity: .1;
|
||||||
|
transition: opacity 0.14s, height 0.14s, padding 0.14s;
|
||||||
|
box-shadow: 0 .2em .5em #222;
|
||||||
|
border-radius: .4em;
|
||||||
|
z-index: 9001;
|
||||||
|
}
|
||||||
|
#tt.b {
|
||||||
|
padding: 0 2em;
|
||||||
|
border-radius: .5em;
|
||||||
|
box-shadow: 0 .2em 1em #000;
|
||||||
|
}
|
||||||
|
#tt.show {
|
||||||
|
padding: 1em 1.3em;
|
||||||
|
border-width: .4em 0;
|
||||||
|
height: auto;
|
||||||
|
opacity: 1;
|
||||||
|
}
|
||||||
|
#tt.show.b {
|
||||||
|
padding: 1.5em 2em;
|
||||||
|
border-width: .5em 0;
|
||||||
|
}
|
||||||
|
#tt code {
|
||||||
|
background: #3c3c3c;
|
||||||
|
padding: .1em .3em;
|
||||||
|
border-top: 1px solid #777;
|
||||||
|
border-radius: .3em;
|
||||||
|
font-family: monospace, monospace;
|
||||||
|
line-height: 1.7em;
|
||||||
|
}
|
||||||
|
#tt em {
|
||||||
|
color: #f6a;
|
||||||
|
}
|
||||||
#mtw {
|
#mtw {
|
||||||
display: none;
|
display: none;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -3,9 +3,9 @@
|
|||||||
<title>📝🎉 {{ title }}</title> <!-- 📜 -->
|
<title>📝🎉 {{ title }}</title> <!-- 📜 -->
|
||||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||||
<meta name="viewport" content="width=device-width, initial-scale=0.7">
|
<meta name="viewport" content="width=device-width, initial-scale=0.7">
|
||||||
<link href="/.cpr/md.css" rel="stylesheet">
|
<link href="/.cpr/md.css?_={{ ts }}" rel="stylesheet">
|
||||||
{%- if edit %}
|
{%- if edit %}
|
||||||
<link href="/.cpr/md2.css" rel="stylesheet">
|
<link href="/.cpr/md2.css?_={{ ts }}" rel="stylesheet">
|
||||||
{%- endif %}
|
{%- endif %}
|
||||||
</head>
|
</head>
|
||||||
<body>
|
<body>
|
||||||
@@ -14,9 +14,9 @@
|
|||||||
<a id="lightswitch" href="#">go dark</a>
|
<a id="lightswitch" href="#">go dark</a>
|
||||||
<a id="navtoggle" href="#">hide nav</a>
|
<a id="navtoggle" href="#">hide nav</a>
|
||||||
{%- if edit %}
|
{%- if edit %}
|
||||||
<a id="save" href="?edit">save</a>
|
<a id="save" href="?edit" tt="Hotkey: ctrl-s">save</a>
|
||||||
<a id="sbs" href="#">sbs</a>
|
<a id="sbs" href="#" tt="editor and preview side by side">sbs</a>
|
||||||
<a id="nsbs" href="#">editor</a>
|
<a id="nsbs" href="#" tt="switch between editor and preview$NHotkey: ctrl-e">editor</a>
|
||||||
<div id="toolsbox">
|
<div id="toolsbox">
|
||||||
<a id="tools" href="#">tools</a>
|
<a id="tools" href="#">tools</a>
|
||||||
<a id="fmt_table" href="#">prettify table (ctrl-k)</a>
|
<a id="fmt_table" href="#">prettify table (ctrl-k)</a>
|
||||||
@@ -26,8 +26,8 @@
|
|||||||
<a id="help" href="#">help</a>
|
<a id="help" href="#">help</a>
|
||||||
</div>
|
</div>
|
||||||
{%- else %}
|
{%- else %}
|
||||||
<a href="?edit">edit (basic)</a>
|
<a href="?edit" tt="good: higher performance$Ngood: same document width as viewer$Nbad: assumes you know markdown">edit (basic)</a>
|
||||||
<a href="?edit2">edit (fancy)</a>
|
<a href="?edit2" tt="not in-house so probably less buggy">edit (fancy)</a>
|
||||||
<a href="?raw">view raw</a>
|
<a href="?raw">view raw</a>
|
||||||
{%- endif %}
|
{%- endif %}
|
||||||
</div>
|
</div>
|
||||||
@@ -146,10 +146,10 @@ var md_opt = {
|
|||||||
})();
|
})();
|
||||||
|
|
||||||
</script>
|
</script>
|
||||||
<script src="/.cpr/util.js"></script>
|
<script src="/.cpr/util.js?_={{ ts }}"></script>
|
||||||
<script src="/.cpr/deps/marked.js"></script>
|
<script src="/.cpr/deps/marked.js?_={{ ts }}"></script>
|
||||||
<script src="/.cpr/md.js"></script>
|
<script src="/.cpr/md.js?_={{ ts }}"></script>
|
||||||
{%- if edit %}
|
{%- if edit %}
|
||||||
<script src="/.cpr/md2.js"></script>
|
<script src="/.cpr/md2.js?_={{ ts }}"></script>
|
||||||
{%- endif %}
|
{%- endif %}
|
||||||
</body></html>
|
</body></html>
|
||||||
|
|||||||
@@ -530,3 +530,6 @@ dom_navtgl.onclick = function () {
|
|||||||
|
|
||||||
if (sread('hidenav') == 1)
|
if (sread('hidenav') == 1)
|
||||||
dom_navtgl.onclick();
|
dom_navtgl.onclick();
|
||||||
|
|
||||||
|
if (window['tt'])
|
||||||
|
tt.init();
|
||||||
|
|||||||
@@ -924,10 +924,9 @@ function cfg_uni(e) {
|
|||||||
(function () {
|
(function () {
|
||||||
function keydown(ev) {
|
function keydown(ev) {
|
||||||
ev = ev || window.event;
|
ev = ev || window.event;
|
||||||
var kc = ev.keyCode || ev.which;
|
var kc = ev.code || ev.keyCode || ev.which;
|
||||||
var ctrl = ev.ctrlKey || ev.metaKey;
|
//console.log(ev.key, ev.code, ev.keyCode, ev.which);
|
||||||
//console.log(ev.code, kc);
|
if (ctrl(ev) && (ev.code == "KeyS" || kc == 83)) {
|
||||||
if (ctrl && (ev.code == "KeyS" || kc == 83)) {
|
|
||||||
save();
|
save();
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
@@ -936,23 +935,15 @@ function cfg_uni(e) {
|
|||||||
if (d)
|
if (d)
|
||||||
d.click();
|
d.click();
|
||||||
}
|
}
|
||||||
if (document.activeElement == dom_src) {
|
if (document.activeElement != dom_src)
|
||||||
if (ev.code == "Tab" || kc == 9) {
|
return true;
|
||||||
md_indent(ev.shiftKey);
|
|
||||||
return false;
|
if (ctrl(ev)) {
|
||||||
}
|
if (ev.code == "KeyH" || kc == 72) {
|
||||||
if (ctrl && (ev.code == "KeyH" || kc == 72)) {
|
|
||||||
md_header(ev.shiftKey);
|
md_header(ev.shiftKey);
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
if (!ctrl && (ev.code == "Home" || kc == 36)) {
|
if (ev.code == "KeyZ" || kc == 90) {
|
||||||
md_home(ev.shiftKey);
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
if (!ctrl && !ev.shiftKey && (ev.code == "Enter" || kc == 13)) {
|
|
||||||
return md_newline();
|
|
||||||
}
|
|
||||||
if (ctrl && (ev.code == "KeyZ" || kc == 90)) {
|
|
||||||
if (ev.shiftKey)
|
if (ev.shiftKey)
|
||||||
action_stack.redo();
|
action_stack.redo();
|
||||||
else
|
else
|
||||||
@@ -960,33 +951,45 @@ function cfg_uni(e) {
|
|||||||
|
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
if (ctrl && (ev.code == "KeyY" || kc == 89)) {
|
if (ev.code == "KeyY" || kc == 89) {
|
||||||
action_stack.redo();
|
action_stack.redo();
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
if (!ctrl && !ev.shiftKey && kc == 8) {
|
if (ev.code == "KeyK") {
|
||||||
return md_backspace();
|
|
||||||
}
|
|
||||||
if (ctrl && (ev.code == "KeyK")) {
|
|
||||||
fmt_table();
|
fmt_table();
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
if (ctrl && (ev.code == "KeyU")) {
|
if (ev.code == "KeyU") {
|
||||||
iter_uni();
|
iter_uni();
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
if (ctrl && (ev.code == "KeyE")) {
|
if (ev.code == "KeyE") {
|
||||||
dom_nsbs.click();
|
dom_nsbs.click();
|
||||||
//fmt_table();
|
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
var up = ev.code == "ArrowUp" || kc == 38;
|
var up = ev.code == "ArrowUp" || kc == 38;
|
||||||
var dn = ev.code == "ArrowDown" || kc == 40;
|
var dn = ev.code == "ArrowDown" || kc == 40;
|
||||||
if (ctrl && (up || dn)) {
|
if (up || dn) {
|
||||||
md_p_jump(dn);
|
md_p_jump(dn);
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
else {
|
||||||
|
if (ev.code == "Tab" || kc == 9) {
|
||||||
|
md_indent(ev.shiftKey);
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
if (ev.code == "Home" || kc == 36) {
|
||||||
|
md_home(ev.shiftKey);
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
if (!ev.shiftKey && (ev.code == "Enter" || kc == 13)) {
|
||||||
|
return md_newline();
|
||||||
|
}
|
||||||
|
if (!ev.shiftKey && kc == 8) {
|
||||||
|
return md_backspace();
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
document.onkeydown = keydown;
|
document.onkeydown = keydown;
|
||||||
ebi('save').onclick = save;
|
ebi('save').onclick = save;
|
||||||
|
|||||||
@@ -3,9 +3,9 @@
|
|||||||
<title>📝🎉 {{ title }}</title>
|
<title>📝🎉 {{ title }}</title>
|
||||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||||
<meta name="viewport" content="width=device-width, initial-scale=0.7">
|
<meta name="viewport" content="width=device-width, initial-scale=0.7">
|
||||||
<link href="/.cpr/mde.css" rel="stylesheet">
|
<link href="/.cpr/mde.css?_={{ ts }}" rel="stylesheet">
|
||||||
<link href="/.cpr/deps/mini-fa.css" rel="stylesheet">
|
<link href="/.cpr/deps/mini-fa.css?_={{ ts }}" rel="stylesheet">
|
||||||
<link href="/.cpr/deps/easymde.css" rel="stylesheet">
|
<link href="/.cpr/deps/easymde.css?_={{ ts }}" rel="stylesheet">
|
||||||
</head>
|
</head>
|
||||||
<body>
|
<body>
|
||||||
<div id="mw">
|
<div id="mw">
|
||||||
@@ -43,7 +43,7 @@ var lightswitch = (function () {
|
|||||||
})();
|
})();
|
||||||
|
|
||||||
</script>
|
</script>
|
||||||
<script src="/.cpr/util.js"></script>
|
<script src="/.cpr/util.js?_={{ ts }}"></script>
|
||||||
<script src="/.cpr/deps/easymde.js"></script>
|
<script src="/.cpr/deps/easymde.js?_={{ ts }}"></script>
|
||||||
<script src="/.cpr/mde.js"></script>
|
<script src="/.cpr/mde.js?_={{ ts }}"></script>
|
||||||
</body></html>
|
</body></html>
|
||||||
|
|||||||
@@ -6,7 +6,7 @@
|
|||||||
<title>copyparty</title>
|
<title>copyparty</title>
|
||||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||||
<meta name="viewport" content="width=device-width, initial-scale=0.8">
|
<meta name="viewport" content="width=device-width, initial-scale=0.8">
|
||||||
<link rel="stylesheet" type="text/css" media="screen" href="/.cpr/msg.css">
|
<link rel="stylesheet" type="text/css" media="screen" href="/.cpr/msg.css?_={{ ts }}">
|
||||||
</head>
|
</head>
|
||||||
|
|
||||||
<body>
|
<body>
|
||||||
|
|||||||
@@ -6,7 +6,7 @@
|
|||||||
<title>copyparty</title>
|
<title>copyparty</title>
|
||||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||||
<meta name="viewport" content="width=device-width, initial-scale=0.8">
|
<meta name="viewport" content="width=device-width, initial-scale=0.8">
|
||||||
<link rel="stylesheet" type="text/css" media="screen" href="/.cpr/splash.css">
|
<link rel="stylesheet" type="text/css" media="screen" href="/.cpr/splash.css?_={{ ts }}">
|
||||||
</head>
|
</head>
|
||||||
|
|
||||||
<body>
|
<body>
|
||||||
@@ -35,7 +35,7 @@
|
|||||||
</table>
|
</table>
|
||||||
</td></tr></table>
|
</td></tr></table>
|
||||||
<div class="btns">
|
<div class="btns">
|
||||||
<a href="{{ avol[0] }}?stack">dump stack</a>
|
<a href="/?stack">dump stack</a>
|
||||||
</div>
|
</div>
|
||||||
{%- endif %}
|
{%- endif %}
|
||||||
|
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@@ -215,9 +215,31 @@
|
|||||||
color: #fff;
|
color: #fff;
|
||||||
font-style: italic;
|
font-style: italic;
|
||||||
}
|
}
|
||||||
|
#u2foot .warn {
|
||||||
|
font-size: 1.3em;
|
||||||
|
padding: .5em .8em;
|
||||||
|
margin: 1em -.6em;
|
||||||
|
color: #f74;
|
||||||
|
background: #322;
|
||||||
|
border: 1px solid #633;
|
||||||
|
border-width: .1em 0;
|
||||||
|
text-align: center;
|
||||||
|
}
|
||||||
|
#u2foot .warn span {
|
||||||
|
color: #f86;
|
||||||
|
}
|
||||||
|
html.light #u2foot .warn {
|
||||||
|
color: #b00;
|
||||||
|
background: #fca;
|
||||||
|
border-color: #f70;
|
||||||
|
}
|
||||||
|
html.light #u2foot .warn span {
|
||||||
|
color: #930;
|
||||||
|
}
|
||||||
#u2foot span {
|
#u2foot span {
|
||||||
color: #999;
|
color: #999;
|
||||||
font-size: .9em;
|
font-size: .9em;
|
||||||
|
font-weight: normal;
|
||||||
}
|
}
|
||||||
#u2footfoot {
|
#u2footfoot {
|
||||||
margin-bottom: -1em;
|
margin-bottom: -1em;
|
||||||
@@ -235,6 +257,11 @@
|
|||||||
float: right;
|
float: right;
|
||||||
margin-bottom: -.3em;
|
margin-bottom: -.3em;
|
||||||
}
|
}
|
||||||
|
.fsearch_explain {
|
||||||
|
padding-left: .7em;
|
||||||
|
font-size: 1.1em;
|
||||||
|
line-height: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -1,101 +0,0 @@
|
|||||||
|
|
||||||
<div id="op_bup" class="opview opbox act">
|
|
||||||
<div id="u2err"></div>
|
|
||||||
<form method="post" enctype="multipart/form-data" accept-charset="utf-8" action="{{ url_suf }}">
|
|
||||||
<input type="hidden" name="act" value="bput" />
|
|
||||||
<input type="file" name="f" multiple><br />
|
|
||||||
<input type="submit" value="start upload">
|
|
||||||
</form>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div id="op_mkdir" class="opview opbox act">
|
|
||||||
<form method="post" enctype="multipart/form-data" accept-charset="utf-8" action="{{ url_suf }}">
|
|
||||||
<input type="hidden" name="act" value="mkdir" />
|
|
||||||
<input type="text" name="name" size="30">
|
|
||||||
<input type="submit" value="mkdir">
|
|
||||||
</form>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div id="op_new_md" class="opview opbox">
|
|
||||||
<form method="post" enctype="multipart/form-data" accept-charset="utf-8" action="{{ url_suf }}">
|
|
||||||
<input type="hidden" name="act" value="new_md" />
|
|
||||||
<input type="text" name="name" size="30">
|
|
||||||
<input type="submit" value="create doc">
|
|
||||||
</form>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div id="op_msg" class="opview opbox act">
|
|
||||||
<form method="post" enctype="application/x-www-form-urlencoded" accept-charset="utf-8" action="{{ url_suf }}">
|
|
||||||
<input type="text" name="msg" size="30">
|
|
||||||
<input type="submit" value="send msg">
|
|
||||||
</form>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div id="op_up2k" class="opview">
|
|
||||||
<form id="u2form" method="post" enctype="multipart/form-data" onsubmit="return false;"></form>
|
|
||||||
|
|
||||||
<table id="u2conf">
|
|
||||||
<tr>
|
|
||||||
<td><br />parallel uploads:</td>
|
|
||||||
<td rowspan="2">
|
|
||||||
<input type="checkbox" id="multitask" />
|
|
||||||
<label for="multitask" tt="continue hashing other files while uploading">🏃</label>
|
|
||||||
</td>
|
|
||||||
<td rowspan="2">
|
|
||||||
<input type="checkbox" id="ask_up" />
|
|
||||||
<label for="ask_up" tt="ask for confirmation befofre upload starts">💭</label>
|
|
||||||
</td>
|
|
||||||
<td rowspan="2">
|
|
||||||
<input type="checkbox" id="flag_en" />
|
|
||||||
<label for="flag_en" tt="ensure only one tab is uploading at a time $N (other tabs must have this enabled too)">💤</label>
|
|
||||||
</td>
|
|
||||||
{%- if have_up2k_idx %}
|
|
||||||
<td data-perm="read" rowspan="2">
|
|
||||||
<input type="checkbox" id="fsearch" />
|
|
||||||
<label for="fsearch" tt="don't actually upload, instead check if the files already $N exist on the server (will scan all folders you can read)">🔎</label>
|
|
||||||
</td>
|
|
||||||
{%- endif %}
|
|
||||||
<td data-perm="read" rowspan="2" id="u2btn_cw"></td>
|
|
||||||
</tr>
|
|
||||||
<tr>
|
|
||||||
<td>
|
|
||||||
<a href="#" id="nthread_sub">–</a><input
|
|
||||||
class="txtbox" id="nthread" value="2"/><a
|
|
||||||
href="#" id="nthread_add">+</a><br />
|
|
||||||
</td>
|
|
||||||
</tr>
|
|
||||||
</table>
|
|
||||||
|
|
||||||
<div id="u2notbtn"></div>
|
|
||||||
|
|
||||||
<div id="u2btn_ct">
|
|
||||||
<div id="u2btn">
|
|
||||||
<span id="u2bm"></span><br />
|
|
||||||
drag/drop files<br />
|
|
||||||
and folders here<br />
|
|
||||||
(or click me)
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div id="u2cards">
|
|
||||||
<a href="#" act="ok" tt="completed successfully">ok <span>0</span></a><a
|
|
||||||
href="#" act="ng" tt="failed / rejected / not-found">ng <span>0</span></a><a
|
|
||||||
href="#" act="done" tt="ok and ng combined">done <span>0</span></a><a
|
|
||||||
href="#" act="bz" tt="hashing or uploading" class="act">busy <span>0</span></a><a
|
|
||||||
href="#" act="q" tt="idle, pending">que <span>0</span></a>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<table id="u2tab">
|
|
||||||
<thead>
|
|
||||||
<tr>
|
|
||||||
<td>filename</td>
|
|
||||||
<td>status</td>
|
|
||||||
<td>progress<a href="#" id="u2cleanup" tt="remove completed uploads$N(makes it possible to upload a file after searching for it)">cleanup</a></td>
|
|
||||||
</tr>
|
|
||||||
</thead>
|
|
||||||
<tbody></tbody>
|
|
||||||
</table>
|
|
||||||
|
|
||||||
<p id="u2foot"></p>
|
|
||||||
<p id="u2footfoot" data-perm="write">( you can use the <a href="#" id="u2nope">basic uploader</a> if you don't need lastmod timestamps, resumable uploads, or progress bars )</p>
|
|
||||||
</div>
|
|
||||||
@@ -6,21 +6,11 @@ if (!window['console'])
|
|||||||
};
|
};
|
||||||
|
|
||||||
|
|
||||||
var clickev = window.Touch ? 'touchstart' : 'click',
|
var is_touch = 'ontouchstart' in window,
|
||||||
ANDROID = /(android)/i.test(navigator.userAgent);
|
ANDROID = /(android)/i.test(navigator.userAgent);
|
||||||
|
|
||||||
|
|
||||||
// error handler for mobile devices
|
// error handler for mobile devices
|
||||||
function hcroak(msg) {
|
|
||||||
document.body.innerHTML = msg;
|
|
||||||
window.onerror = undefined;
|
|
||||||
throw 'fatal_err';
|
|
||||||
}
|
|
||||||
function croak(msg) {
|
|
||||||
document.body.textContent = msg;
|
|
||||||
window.onerror = undefined;
|
|
||||||
throw msg;
|
|
||||||
}
|
|
||||||
function esc(txt) {
|
function esc(txt) {
|
||||||
return txt.replace(/[&"<>]/g, function (c) {
|
return txt.replace(/[&"<>]/g, function (c) {
|
||||||
return {
|
return {
|
||||||
@@ -32,21 +22,28 @@ function esc(txt) {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
function vis_exh(msg, url, lineNo, columnNo, error) {
|
function vis_exh(msg, url, lineNo, columnNo, error) {
|
||||||
|
if (!window.onerror)
|
||||||
|
return;
|
||||||
|
|
||||||
window.onerror = undefined;
|
window.onerror = undefined;
|
||||||
window['vis_exh'] = null;
|
window['vis_exh'] = null;
|
||||||
var html = ['<h1>you hit a bug!</h1><p>please screenshot this error and send me a copy arigathanks gozaimuch (ed/irc.rizon.net or ed#2644)</p><p>',
|
var html = ['<h1>you hit a bug!</h1><p style="font-size:1.3em;margin:0">try to <a href="#" onclick="localStorage.clear();location.reload();" style="text-decoration:underline;color:#fc0">reset copyparty settings</a> if you are stuck here</p><p>please send me a screenshot arigathanks gozaimuch: <code>ed/irc.rizon.net</code> or <code>ed#2644</code><br /> (and if you can, press F12 and include the "Console" tab in the screenshot too)</p><p>',
|
||||||
esc(String(msg)), '</p><p>', esc(url + ' @' + lineNo + ':' + columnNo), '</p>'];
|
esc(url + ' @' + lineNo + ':' + columnNo), '<br />' + esc(String(msg)) + '</p>'];
|
||||||
|
|
||||||
if (error) {
|
if (error) {
|
||||||
var find = ['desc', 'stack', 'trace'];
|
var find = ['desc', 'stack', 'trace'];
|
||||||
for (var a = 0; a < find.length; a++)
|
for (var a = 0; a < find.length; a++)
|
||||||
if (String(error[find[a]]) !== 'undefined')
|
if (String(error[find[a]]) !== 'undefined')
|
||||||
html.push('<h2>' + find[a] + '</h2>' +
|
html.push('<h3>' + find[a] + '</h3>' +
|
||||||
esc(String(error[find[a]])).replace(/\n/g, '<br />\n'));
|
esc(String(error[find[a]])).replace(/\n/g, '<br />\n'));
|
||||||
}
|
}
|
||||||
document.body.style.fontSize = '0.8em';
|
document.body.innerHTML = html.join('\n');
|
||||||
document.body.style.padding = '0 1em 1em 1em';
|
|
||||||
hcroak(html.join('\n'));
|
var s = mknod('style');
|
||||||
|
s.innerHTML = 'body{background:#333;color:#ddd;font-family:sans-serif;font-size:0.8em;padding:0 1em 1em 1em} h1{margin:.5em 1em 0 0;padding:0} h3{border-top:1px solid #999;margin:0} code{color:#bf7;background:#222;padding:.1em;margin:.2em;font-size:1.1em;font-family:monospace,monospace} *{line-height:1.5em}';
|
||||||
|
document.head.appendChild(s);
|
||||||
|
|
||||||
|
throw 'fatal_err';
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@@ -56,6 +53,11 @@ var ebi = document.getElementById.bind(document),
|
|||||||
mknod = document.createElement.bind(document);
|
mknod = document.createElement.bind(document);
|
||||||
|
|
||||||
|
|
||||||
|
function ctrl(e) {
|
||||||
|
return e && (e.ctrlKey || e.metaKey);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
function ev(e) {
|
function ev(e) {
|
||||||
e = e || window.event;
|
e = e || window.event;
|
||||||
if (!e)
|
if (!e)
|
||||||
@@ -67,6 +69,9 @@ function ev(e) {
|
|||||||
if (e.stopPropagation)
|
if (e.stopPropagation)
|
||||||
e.stopPropagation();
|
e.stopPropagation();
|
||||||
|
|
||||||
|
if (e.stopImmediatePropagation)
|
||||||
|
e.stopImmediatePropagation();
|
||||||
|
|
||||||
e.returnValue = false;
|
e.returnValue = false;
|
||||||
return e;
|
return e;
|
||||||
}
|
}
|
||||||
@@ -285,63 +290,6 @@ function makeSortable(table, cb) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
(function () {
|
|
||||||
var ops = QSA('#ops>a');
|
|
||||||
for (var a = 0; a < ops.length; a++) {
|
|
||||||
ops[a].onclick = opclick;
|
|
||||||
}
|
|
||||||
})();
|
|
||||||
|
|
||||||
|
|
||||||
function opclick(e) {
|
|
||||||
ev(e);
|
|
||||||
|
|
||||||
var dest = this.getAttribute('data-dest');
|
|
||||||
goto(dest);
|
|
||||||
|
|
||||||
swrite('opmode', dest || null);
|
|
||||||
|
|
||||||
var input = QS('.opview.act input:not([type="hidden"])')
|
|
||||||
if (input)
|
|
||||||
input.focus();
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
function goto(dest) {
|
|
||||||
var obj = QSA('.opview.act');
|
|
||||||
for (var a = obj.length - 1; a >= 0; a--)
|
|
||||||
clmod(obj[a], 'act');
|
|
||||||
|
|
||||||
obj = QSA('#ops>a');
|
|
||||||
for (var a = obj.length - 1; a >= 0; a--)
|
|
||||||
clmod(obj[a], 'act');
|
|
||||||
|
|
||||||
if (dest) {
|
|
||||||
var ui = ebi('op_' + dest);
|
|
||||||
clmod(ui, 'act', true);
|
|
||||||
QS('#ops>a[data-dest=' + dest + ']').className += " act";
|
|
||||||
|
|
||||||
var fn = window['goto_' + dest];
|
|
||||||
if (fn)
|
|
||||||
fn();
|
|
||||||
}
|
|
||||||
|
|
||||||
if (window['treectl'])
|
|
||||||
treectl.onscroll();
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
(function () {
|
|
||||||
goto();
|
|
||||||
var op = sread('opmode');
|
|
||||||
if (op !== null && op !== '.')
|
|
||||||
try {
|
|
||||||
goto(op);
|
|
||||||
}
|
|
||||||
catch (ex) { }
|
|
||||||
})();
|
|
||||||
|
|
||||||
|
|
||||||
function linksplit(rp) {
|
function linksplit(rp) {
|
||||||
var ret = [];
|
var ret = [];
|
||||||
var apath = '/';
|
var apath = '/';
|
||||||
@@ -416,6 +364,15 @@ function get_vpath() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
function get_pwd() {
|
||||||
|
var pwd = ('; ' + document.cookie).split('; cppwd=');
|
||||||
|
if (pwd.length < 2)
|
||||||
|
return null;
|
||||||
|
|
||||||
|
return pwd[1].split(';')[0];
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
function unix2iso(ts) {
|
function unix2iso(ts) {
|
||||||
return new Date(ts * 1000).toISOString().replace("T", " ").slice(0, -5);
|
return new Date(ts * 1000).toISOString().replace("T", " ").slice(0, -5);
|
||||||
}
|
}
|
||||||
@@ -437,6 +394,18 @@ function has(haystack, needle) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
function apop(arr, v) {
|
||||||
|
var ofs = arr.indexOf(v);
|
||||||
|
if (ofs !== -1)
|
||||||
|
arr.splice(ofs, 1);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
function jcp(obj) {
|
||||||
|
return JSON.parse(JSON.stringify(obj));
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
function sread(key) {
|
function sread(key) {
|
||||||
if (window.localStorage)
|
if (window.localStorage)
|
||||||
return localStorage.getItem(key);
|
return localStorage.getItem(key);
|
||||||
@@ -533,13 +502,20 @@ function hist_replace(url) {
|
|||||||
var tt = (function () {
|
var tt = (function () {
|
||||||
var r = {
|
var r = {
|
||||||
"tt": mknod("div"),
|
"tt": mknod("div"),
|
||||||
"en": bcfg_get('tooltips', true),
|
"en": true,
|
||||||
|
"el": null,
|
||||||
|
"skip": false
|
||||||
};
|
};
|
||||||
|
|
||||||
r.tt.setAttribute('id', 'tt');
|
r.tt.setAttribute('id', 'tt');
|
||||||
document.body.appendChild(r.tt);
|
document.body.appendChild(r.tt);
|
||||||
|
|
||||||
function show() {
|
r.show = function () {
|
||||||
|
if (r.skip) {
|
||||||
|
r.skip = false;
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
var cfg = sread('tooltips');
|
var cfg = sread('tooltips');
|
||||||
if (cfg !== null && cfg != '1')
|
if (cfg !== null && cfg != '1')
|
||||||
return;
|
return;
|
||||||
@@ -548,42 +524,63 @@ var tt = (function () {
|
|||||||
if (!msg)
|
if (!msg)
|
||||||
return;
|
return;
|
||||||
|
|
||||||
|
r.el = this;
|
||||||
var pos = this.getBoundingClientRect(),
|
var pos = this.getBoundingClientRect(),
|
||||||
|
dir = this.getAttribute('ttd') || '',
|
||||||
left = pos.left < window.innerWidth / 2,
|
left = pos.left < window.innerWidth / 2,
|
||||||
top = pos.top < window.innerHeight / 2;
|
top = pos.top < window.innerHeight / 2,
|
||||||
|
big = this.className.indexOf(' ttb') !== -1;
|
||||||
|
|
||||||
|
if (dir.indexOf('u') + 1) top = false;
|
||||||
|
if (dir.indexOf('d') + 1) top = true;
|
||||||
|
if (dir.indexOf('l') + 1) left = false;
|
||||||
|
if (dir.indexOf('r') + 1) left = true;
|
||||||
|
|
||||||
|
clmod(r.tt, 'b', big);
|
||||||
r.tt.style.top = top ? pos.bottom + 'px' : 'auto';
|
r.tt.style.top = top ? pos.bottom + 'px' : 'auto';
|
||||||
r.tt.style.bottom = top ? 'auto' : (window.innerHeight - pos.top) + 'px';
|
r.tt.style.bottom = top ? 'auto' : (window.innerHeight - pos.top) + 'px';
|
||||||
r.tt.style.left = left ? pos.left + 'px' : 'auto';
|
r.tt.style.left = left ? pos.left + 'px' : 'auto';
|
||||||
r.tt.style.right = left ? 'auto' : (window.innerWidth - pos.right) + 'px';
|
r.tt.style.right = left ? 'auto' : (window.innerWidth - pos.right) + 'px';
|
||||||
|
|
||||||
r.tt.innerHTML = msg.replace(/\$N/g, "<br />");
|
r.tt.innerHTML = msg.replace(/\$N/g, "<br />");
|
||||||
|
r.el.addEventListener('mouseleave', r.hide);
|
||||||
clmod(r.tt, 'show', 1);
|
clmod(r.tt, 'show', 1);
|
||||||
}
|
};
|
||||||
|
|
||||||
function hide() {
|
r.hide = function () {
|
||||||
clmod(r.tt, 'show');
|
clmod(r.tt, 'show');
|
||||||
}
|
if (r.el)
|
||||||
|
r.el.removeEventListener('mouseleave', r.hide);
|
||||||
|
};
|
||||||
|
|
||||||
r.init = function () {
|
r.tt.onclick = r.hide;
|
||||||
var _show = r.en ? show : null,
|
|
||||||
_hide = r.en ? hide : null;
|
r.att = function (ctr) {
|
||||||
|
var _show = r.en ? r.show : null,
|
||||||
|
_hide = r.en ? r.hide : null,
|
||||||
|
o = ctr.querySelectorAll('*[tt]');
|
||||||
|
|
||||||
var o = QSA('*[tt]');
|
|
||||||
for (var a = o.length - 1; a >= 0; a--) {
|
for (var a = o.length - 1; a >= 0; a--) {
|
||||||
o[a].onfocus = _show;
|
o[a].onfocus = _show;
|
||||||
o[a].onblur = _hide;
|
o[a].onblur = _hide;
|
||||||
o[a].onmouseenter = _show;
|
o[a].onmouseenter = _show;
|
||||||
o[a].onmouseleave = _hide;
|
o[a].onmouseleave = _hide;
|
||||||
}
|
}
|
||||||
hide();
|
r.hide();
|
||||||
};
|
}
|
||||||
|
|
||||||
ebi('tooltips').onclick = function (e) {
|
r.init = function () {
|
||||||
ev(e);
|
var ttb = ebi('tooltips');
|
||||||
r.en = !r.en;
|
if (ttb) {
|
||||||
bcfg_set('tooltips', r.en);
|
ttb.onclick = function (e) {
|
||||||
r.init();
|
ev(e);
|
||||||
|
r.en = !r.en;
|
||||||
|
bcfg_set('tooltips', r.en);
|
||||||
|
r.init();
|
||||||
|
};
|
||||||
|
r.en = bcfg_get('tooltips', true)
|
||||||
|
}
|
||||||
|
r.att(document);
|
||||||
};
|
};
|
||||||
|
|
||||||
return r;
|
return r;
|
||||||
|
|||||||
@@ -15,11 +15,6 @@
|
|||||||
}
|
}
|
||||||
#ggrid>a[href$="/"]:before {
|
#ggrid>a[href$="/"]:before {
|
||||||
content: '📂';
|
content: '📂';
|
||||||
display: block;
|
|
||||||
position: absolute;
|
|
||||||
margin: -.1em -.4em;
|
|
||||||
text-shadow: 0 0 .1em #000;
|
|
||||||
font-size: 2em;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@@ -27,8 +22,11 @@
|
|||||||
#ggrid>a:before {
|
#ggrid>a:before {
|
||||||
display: block;
|
display: block;
|
||||||
position: absolute;
|
position: absolute;
|
||||||
margin: -.1em -.4em;
|
padding: .3em 0;
|
||||||
|
margin: -.4em;
|
||||||
text-shadow: 0 0 .1em #000;
|
text-shadow: 0 0 .1em #000;
|
||||||
|
background: linear-gradient(135deg,rgba(255,255,255,0) 50%,rgba(255,255,255,0.2));
|
||||||
|
border-radius: .3em;
|
||||||
font-size: 2em;
|
font-size: 2em;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
51
docs/hls.html
Normal file
51
docs/hls.html
Normal file
@@ -0,0 +1,51 @@
|
|||||||
|
<!DOCTYPE html><html lang="en"><head>
|
||||||
|
<meta charset="utf-8">
|
||||||
|
<title>hls-test</title>
|
||||||
|
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||||
|
</head><body>
|
||||||
|
|
||||||
|
<video id="vid" controls></video>
|
||||||
|
<script src="hls.light.js"></script>
|
||||||
|
<script>
|
||||||
|
|
||||||
|
var video = document.getElementById('vid');
|
||||||
|
var hls = new Hls({
|
||||||
|
debug: true,
|
||||||
|
autoStartLoad: false
|
||||||
|
});
|
||||||
|
hls.loadSource('live/v.m3u8');
|
||||||
|
hls.attachMedia(video);
|
||||||
|
hls.on(Hls.Events.MANIFEST_PARSED, function() {
|
||||||
|
hls.startLoad(0);
|
||||||
|
});
|
||||||
|
hls.on(Hls.Events.MEDIA_ATTACHED, function() {
|
||||||
|
video.muted = true;
|
||||||
|
video.play();
|
||||||
|
});
|
||||||
|
|
||||||
|
/*
|
||||||
|
general good news:
|
||||||
|
- doesn't need fixed-length segments; ok to let x264 pick optimal keyframes and slice on those
|
||||||
|
- hls.js polls the m3u8 for new segments, scales the duration accordingly, seeking works great
|
||||||
|
- the sfx will grow by 66 KiB since that's how small hls.js can get, wait thats not good
|
||||||
|
|
||||||
|
# vod, creates m3u8 at the end, fixed keyframes, v bad
|
||||||
|
ffmpeg -hide_banner -threads 0 -flags -global_header -i ..\CowboyBebopMovie-OP1.webm -vf scale=1280:-4,format=yuv420p -ac 2 -c:a libopus -b:a 128k -c:v libx264 -preset slow -crf 24 -maxrate:v 5M -bufsize:v 10M -g 120 -keyint_min 120 -sc_threshold 0 -hls_time 4 -hls_playlist_type vod -hls_segment_filename v%05d.ts v.m3u8
|
||||||
|
|
||||||
|
# live, updates m3u8 as it goes, dynamic keyframes, streamable with hls.js
|
||||||
|
ffmpeg -hide_banner -threads 0 -flags -global_header -i ..\..\CowboyBebopMovie-OP1.webm -vf scale=1280:-4,format=yuv420p -ac 2 -c:a libopus -b:a 128k -c:v libx264 -preset slow -crf 24 -maxrate:v 5M -bufsize:v 10M -f segment -segment_list v.m3u8 -segment_format mpegts -segment_list_flags live v%05d.ts
|
||||||
|
|
||||||
|
# fmp4 (fragmented mp4), doesn't work with hls.js, gets duratoin 149:07:51 (536871s), probably the tkhd/mdhd 0xffffffff (timebase 8000? ok)
|
||||||
|
ffmpeg -re -hide_banner -threads 0 -flags +cgop -i ..\..\CowboyBebopMovie-OP1.webm -vf scale=1280:-4,format=yuv420p -ac 2 -c:a libopus -b:a 128k -c:v libx264 -preset slow -crf 24 -maxrate:v 5M -bufsize:v 10M -f segment -segment_list v.m3u8 -segment_format fmp4 -segment_list_flags live v%05d.mp4
|
||||||
|
|
||||||
|
# try 2, works, uses tempfiles for m3u8 updates, good, 6% smaller
|
||||||
|
ffmpeg -re -hide_banner -threads 0 -flags +cgop -i ..\..\CowboyBebopMovie-OP1.webm -vf scale=1280:-4,format=yuv420p -ac 2 -c:a libopus -b:a 128k -c:v libx264 -preset slow -crf 24 -maxrate:v 5M -bufsize:v 10M -f hls -hls_segment_type fmp4 -hls_list_size 0 -hls_segment_filename v%05d.mp4 v.m3u8
|
||||||
|
|
||||||
|
more notes
|
||||||
|
- adding -hls_flags single_file makes duration wack during playback (for both fmp4 and ts), ok once finalized and refreshed, gives no size reduction anyways
|
||||||
|
- bebop op has good keyframe spacing for testing hls.js, in particular it hops one seg back and immediately resumes if it hits eof with the explicit hls.startLoad(0); otherwise it jumps into the middle of a seg and becomes art
|
||||||
|
- can probably -c:v copy most of the time, is there a way to check for cgop? todo
|
||||||
|
|
||||||
|
*/
|
||||||
|
</script>
|
||||||
|
</body></html>
|
||||||
@@ -103,6 +103,15 @@ cat warks | while IFS= read -r x; do sqlite3 up2k.db "delete from mt where w = '
|
|||||||
# dump all dbs
|
# dump all dbs
|
||||||
find -iname up2k.db | while IFS= read -r x; do sqlite3 "$x" 'select substr(w,1,12), rd, fn from up' | sed -r 's/\|/ \| /g' | while IFS= read -r y; do printf '%s | %s\n' "$x" "$y"; done; done
|
find -iname up2k.db | while IFS= read -r x; do sqlite3 "$x" 'select substr(w,1,12), rd, fn from up' | sed -r 's/\|/ \| /g' | while IFS= read -r y; do printf '%s | %s\n' "$x" "$y"; done; done
|
||||||
|
|
||||||
|
# unschedule mtp scan for all files somewhere under "enc/"
|
||||||
|
sqlite3 -readonly up2k.db 'select substr(up.w,1,16) from up inner join mt on mt.w = substr(up.w,1,16) where rd like "enc/%" and +mt.k = "t:mtp"' > keys; awk '{printf "delete from mt where w = \"%s\" and +k = \"t:mtp\";\n", $0}' <keys | tee /dev/stderr | sqlite3 up2k.db
|
||||||
|
|
||||||
|
# compare metadata key "key" between two databases
|
||||||
|
sqlite3 -readonly up2k.db.key-full 'select w, v from mt where k = "key" order by w' > k1; sqlite3 -readonly up2k.db 'select w, v from mt where k = "key" order by w' > k2; ok=0; ng=0; while IFS='|' read w k2; do k1="$(grep -E "^$w" k1 | sed -r 's/.*\|//')"; [ "$k1" = "$k2" ] && ok=$((ok+1)) || { ng=$((ng+1)); printf '%3s %3s %s\n' "$k1" "$k2" "$(sqlite3 -readonly up2k.db.key-full "select * from up where substr(w,1,16) = '$w'" | sed -r 's/\|/ | /g')"; }; done < <(cat k2); echo "match $ok diff $ng"
|
||||||
|
|
||||||
|
# actually this is much better
|
||||||
|
sqlite3 -readonly up2k.db.key-full 'select w, v from mt where k = "key" order by w' > k1; sqlite3 -readonly up2k.db 'select mt.w, mt.v, up.rd, up.fn from mt inner join up on mt.w = substr(up.w,1,16) where mt.k = "key" order by up.rd, up.fn' > k2; ok=0; ng=0; while IFS='|' read w k2 path; do k1="$(grep -E "^$w" k1 | sed -r 's/.*\|//')"; [ "$k1" = "$k2" ] && ok=$((ok+1)) || { ng=$((ng+1)); printf '%3s %3s %s\n' "$k1" "$k2" "$path"; }; done < <(cat k2); echo "match $ok diff $ng"
|
||||||
|
|
||||||
|
|
||||||
##
|
##
|
||||||
## media
|
## media
|
||||||
@@ -157,7 +166,7 @@ dbg.asyncStore.pendingBreakpoints = {}
|
|||||||
about:config >> devtools.debugger.prefs-schema-version = -1
|
about:config >> devtools.debugger.prefs-schema-version = -1
|
||||||
|
|
||||||
# determine server version
|
# determine server version
|
||||||
git reset --hard origin/HEAD && git log --format=format:"%H %ai %d" --decorate=full > /dev/shm/revs && cat /dev/shm/revs | while read -r rev extra; do (git reset --hard $rev >/dev/null 2>/dev/null && dsz=$(cat copyparty/web/{util,browser,up2k}.js 2>/dev/null | diff -wNarU0 - <(cat /mnt/Users/ed/Downloads/ref/{util,browser,up2k}.js) | wc -c) && printf '%s %6s %s\n' "$rev" $dsz "$extra") </dev/null; done
|
git pull; git reset --hard origin/HEAD && git log --format=format:"%H %ai %d" --decorate=full > ../revs && cat ../{util,browser}.js >../vr && cat ../revs | while read -r rev extra; do (git reset --hard $rev >/dev/null 2>/dev/null && dsz=$(cat copyparty/web/{util,browser}.js >../vg 2>/dev/null && diff -wNarU0 ../{vg,vr} | wc -c) && printf '%s %6s %s\n' "$rev" $dsz "$extra") </dev/null; done
|
||||||
|
|
||||||
|
|
||||||
##
|
##
|
||||||
@@ -200,3 +209,4 @@ mk() { rm -rf /tmp/foo; sudo -u ed bash -c 'mkdir /tmp/foo; echo hi > /tmp/foo/b
|
|||||||
mk && t0="$(date)" && while true; do date -s "$(date '+ 1 hour')"; systemd-tmpfiles --clean; ls -1 /tmp | grep foo || break; done; echo "$t0"
|
mk && t0="$(date)" && while true; do date -s "$(date '+ 1 hour')"; systemd-tmpfiles --clean; ls -1 /tmp | grep foo || break; done; echo "$t0"
|
||||||
mk && sudo -u ed flock /tmp/foo sleep 40 & sleep 1; ps aux | grep -E 'sleep 40$' && t0="$(date)" && for n in {1..40}; do date -s "$(date '+ 1 day')"; systemd-tmpfiles --clean; ls -1 /tmp | grep foo || break; done; echo "$t0"
|
mk && sudo -u ed flock /tmp/foo sleep 40 & sleep 1; ps aux | grep -E 'sleep 40$' && t0="$(date)" && for n in {1..40}; do date -s "$(date '+ 1 day')"; systemd-tmpfiles --clean; ls -1 /tmp | grep foo || break; done; echo "$t0"
|
||||||
mk && t0="$(date)" && for n in {1..40}; do date -s "$(date '+ 1 day')"; systemd-tmpfiles --clean; ls -1 /tmp | grep foo || break; tar -cf/dev/null /tmp/foo; done; echo "$t0"
|
mk && t0="$(date)" && for n in {1..40}; do date -s "$(date '+ 1 day')"; systemd-tmpfiles --clean; ls -1 /tmp | grep foo || break; tar -cf/dev/null /tmp/foo; done; echo "$t0"
|
||||||
|
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
FROM alpine:3.13
|
FROM alpine:3.13
|
||||||
WORKDIR /z
|
WORKDIR /z
|
||||||
ENV ver_asmcrypto=5b994303a9d3e27e0915f72a10b6c2c51535a4dc \
|
ENV ver_asmcrypto=5b994303a9d3e27e0915f72a10b6c2c51535a4dc \
|
||||||
|
ver_hashwasm=4.7.0 \
|
||||||
ver_marked=1.1.0 \
|
ver_marked=1.1.0 \
|
||||||
ver_ogvjs=1.8.0 \
|
ver_ogvjs=1.8.0 \
|
||||||
ver_mde=2.14.0 \
|
ver_mde=2.14.0 \
|
||||||
@@ -9,12 +10,6 @@ ENV ver_asmcrypto=5b994303a9d3e27e0915f72a10b6c2c51535a4dc \
|
|||||||
ver_zopfli=1.0.3
|
ver_zopfli=1.0.3
|
||||||
|
|
||||||
|
|
||||||
# TODO
|
|
||||||
# sha512.hw.js https://github.com/Daninet/hash-wasm
|
|
||||||
# sha512.kc.js https://github.com/chm-diederichs/sha3-wasm
|
|
||||||
# awk '/HMAC state/{o=1} /var HEAP/{o=0} /function hmac_reset/{o=1} /return \{/{o=0} /var __extends =/{o=1} /var Hash =/{o=0} /hmac_|pbkdf2_/{next} o{next} {gsub(/IllegalStateError/,"Exception")} {sub(/^ +/,"");sub(/^\/\/ .*/,"");sub(/;$/," ;")} 1' <sha512.ac.js.orig >sha512.ac.js; for fn in sha512.ac.js.orig sha512.ac.js; do wc -c <$fn; wc -c <$fn.gz ; for n in {1..9}; do printf '%8d %d bz\n' $(bzip2 -c$n <$fn | wc -c) $n; done; done
|
|
||||||
|
|
||||||
|
|
||||||
# download;
|
# download;
|
||||||
# the scp url is latin from https://fonts.googleapis.com/css2?family=Source+Code+Pro&display=swap
|
# the scp url is latin from https://fonts.googleapis.com/css2?family=Source+Code+Pro&display=swap
|
||||||
RUN mkdir -p /z/dist/no-pk \
|
RUN mkdir -p /z/dist/no-pk \
|
||||||
@@ -27,7 +22,11 @@ RUN mkdir -p /z/dist/no-pk \
|
|||||||
&& wget https://github.com/codemirror/CodeMirror/archive/$ver_codemirror.tar.gz -O codemirror.tgz \
|
&& wget https://github.com/codemirror/CodeMirror/archive/$ver_codemirror.tar.gz -O codemirror.tgz \
|
||||||
&& wget https://github.com/FortAwesome/Font-Awesome/releases/download/$ver_fontawesome/fontawesome-free-$ver_fontawesome-web.zip -O fontawesome.zip \
|
&& wget https://github.com/FortAwesome/Font-Awesome/releases/download/$ver_fontawesome/fontawesome-free-$ver_fontawesome-web.zip -O fontawesome.zip \
|
||||||
&& wget https://github.com/google/zopfli/archive/zopfli-$ver_zopfli.tar.gz -O zopfli.tgz \
|
&& wget https://github.com/google/zopfli/archive/zopfli-$ver_zopfli.tar.gz -O zopfli.tgz \
|
||||||
|
&& wget https://github.com/Daninet/hash-wasm/releases/download/v$ver_hashwasm/hash-wasm@$ver_hashwasm.zip -O hash-wasm.zip \
|
||||||
&& unzip ogvjs.zip \
|
&& unzip ogvjs.zip \
|
||||||
|
&& (mkdir hash-wasm \
|
||||||
|
&& cd hash-wasm \
|
||||||
|
&& unzip ../hash-wasm.zip) \
|
||||||
&& (tar -xf asmcrypto.tgz \
|
&& (tar -xf asmcrypto.tgz \
|
||||||
&& cd asmcrypto.js-$ver_asmcrypto \
|
&& cd asmcrypto.js-$ver_asmcrypto \
|
||||||
&& npm install ) \
|
&& npm install ) \
|
||||||
@@ -64,7 +63,12 @@ RUN tar -xf zopfli.tgz \
|
|||||||
RUN cd asmcrypto.js-$ver_asmcrypto \
|
RUN cd asmcrypto.js-$ver_asmcrypto \
|
||||||
&& echo "export { Sha512 } from './hash/sha512/sha512';" > src/entry-export_all.ts \
|
&& echo "export { Sha512 } from './hash/sha512/sha512';" > src/entry-export_all.ts \
|
||||||
&& node -r esm build.js \
|
&& node -r esm build.js \
|
||||||
&& mv asmcrypto.all.es5.js /z/dist/sha512.js
|
&& awk '/HMAC state/{o=1} /var HEAP/{o=0} /function hmac_reset/{o=1} /return \{/{o=0} /var __extends =/{o=1} /var Hash =/{o=0} /hmac_|pbkdf2_/{next} o{next} {gsub(/IllegalStateError/,"Exception")} {sub(/^ +/,"");sub(/^\/\/ .*/,"");sub(/;$/," ;")} 1' < asmcrypto.all.es5.js > /z/dist/sha512.ac.js
|
||||||
|
|
||||||
|
|
||||||
|
# build hash-wasm
|
||||||
|
RUN cd hash-wasm \
|
||||||
|
&& mv sha512.umd.min.js /z/dist/sha512.hw.js
|
||||||
|
|
||||||
|
|
||||||
# build ogvjs
|
# build ogvjs
|
||||||
|
|||||||
@@ -6,10 +6,10 @@ import re, os, sys, time, shutil, signal, threading, tarfile, hashlib, platform,
|
|||||||
import subprocess as sp
|
import subprocess as sp
|
||||||
|
|
||||||
"""
|
"""
|
||||||
run me with any version of python, i will unpack and run copyparty
|
to edit this file, use HxD or "vim -b"
|
||||||
|
(there is compressed stuff at the end)
|
||||||
|
|
||||||
(but please don't edit this file with a text editor
|
run me with any version of python, i will unpack and run copyparty
|
||||||
since that would probably corrupt the binary stuff at the end)
|
|
||||||
|
|
||||||
there's zero binaries! just plaintext python scripts all the way down
|
there's zero binaries! just plaintext python scripts all the way down
|
||||||
so you can easily unpack the archive and inspect it for shady stuff
|
so you can easily unpack the archive and inspect it for shady stuff
|
||||||
@@ -380,7 +380,7 @@ def run(tmp, j2):
|
|||||||
fcntl.flock(fd, fcntl.LOCK_EX | fcntl.LOCK_NB)
|
fcntl.flock(fd, fcntl.LOCK_EX | fcntl.LOCK_NB)
|
||||||
except Exception as ex:
|
except Exception as ex:
|
||||||
if not WINDOWS:
|
if not WINDOWS:
|
||||||
msg("\033[31mflock:", repr(ex))
|
msg("\033[31mflock:{!r}\033[0m".format(ex))
|
||||||
|
|
||||||
t = threading.Thread(target=utime, args=(tmp,))
|
t = threading.Thread(target=utime, args=(tmp,))
|
||||||
t.daemon = True
|
t.daemon = True
|
||||||
|
|||||||
@@ -23,13 +23,14 @@ def hdr(query):
|
|||||||
|
|
||||||
|
|
||||||
class Cfg(Namespace):
|
class Cfg(Namespace):
|
||||||
def __init__(self, a=[], v=[], c=None):
|
def __init__(self, a=None, v=None, c=None):
|
||||||
super(Cfg, self).__init__(
|
super(Cfg, self).__init__(
|
||||||
a=a,
|
a=a or [],
|
||||||
v=v,
|
v=v or [],
|
||||||
c=c,
|
c=c,
|
||||||
rproxy=0,
|
rproxy=0,
|
||||||
ed=False,
|
ed=False,
|
||||||
|
nw=False,
|
||||||
no_zip=False,
|
no_zip=False,
|
||||||
no_scandir=False,
|
no_scandir=False,
|
||||||
no_sendfile=True,
|
no_sendfile=True,
|
||||||
|
|||||||
@@ -16,8 +16,8 @@ from copyparty import util
|
|||||||
|
|
||||||
|
|
||||||
class Cfg(Namespace):
|
class Cfg(Namespace):
|
||||||
def __init__(self, a=[], v=[], c=None):
|
def __init__(self, a=None, v=None, c=None):
|
||||||
ex = {k: False for k in "e2d e2ds e2dsa e2t e2ts e2tsr".split()}
|
ex = {k: False for k in "nw e2d e2ds e2dsa e2t e2ts e2tsr".split()}
|
||||||
ex2 = {
|
ex2 = {
|
||||||
"mtp": [],
|
"mtp": [],
|
||||||
"mte": "a",
|
"mte": "a",
|
||||||
@@ -27,7 +27,7 @@ class Cfg(Namespace):
|
|||||||
"rproxy": 0,
|
"rproxy": 0,
|
||||||
}
|
}
|
||||||
ex.update(ex2)
|
ex.update(ex2)
|
||||||
super(Cfg, self).__init__(a=a, v=v, c=c, **ex)
|
super(Cfg, self).__init__(a=a or [], v=v or [], c=c, **ex)
|
||||||
|
|
||||||
|
|
||||||
class TestVFS(unittest.TestCase):
|
class TestVFS(unittest.TestCase):
|
||||||
|
|||||||
@@ -66,6 +66,14 @@ def get_ramdisk():
|
|||||||
for _ in range(10):
|
for _ in range(10):
|
||||||
try:
|
try:
|
||||||
_, _ = chkcmd("diskutil", "eraseVolume", "HFS+", "cptd", devname)
|
_, _ = chkcmd("diskutil", "eraseVolume", "HFS+", "cptd", devname)
|
||||||
|
with open("/Volumes/cptd/.metadata_never_index", "w") as f:
|
||||||
|
f.write("orz")
|
||||||
|
|
||||||
|
try:
|
||||||
|
shutil.rmtree("/Volumes/cptd/.fseventsd")
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
return subdir("/Volumes/cptd")
|
return subdir("/Volumes/cptd")
|
||||||
except Exception as ex:
|
except Exception as ex:
|
||||||
print(repr(ex))
|
print(repr(ex))
|
||||||
@@ -108,6 +116,9 @@ class VHttpSrv(object):
|
|||||||
aliases = ["splash", "browser", "browser2", "msg", "md", "mde"]
|
aliases = ["splash", "browser", "browser2", "msg", "md", "mde"]
|
||||||
self.j2 = {x: J2_FILES for x in aliases}
|
self.j2 = {x: J2_FILES for x in aliases}
|
||||||
|
|
||||||
|
def cachebuster(self):
|
||||||
|
return "a"
|
||||||
|
|
||||||
|
|
||||||
class VHttpConn(object):
|
class VHttpConn(object):
|
||||||
def __init__(self, args, asrv, log, buf):
|
def __init__(self, args, asrv, log, buf):
|
||||||
@@ -116,13 +127,13 @@ class VHttpConn(object):
|
|||||||
self.addr = ("127.0.0.1", "42069")
|
self.addr = ("127.0.0.1", "42069")
|
||||||
self.args = args
|
self.args = args
|
||||||
self.asrv = asrv
|
self.asrv = asrv
|
||||||
self.is_mp = False
|
self.nid = None
|
||||||
self.log_func = log
|
self.log_func = log
|
||||||
self.log_src = "a"
|
self.log_src = "a"
|
||||||
self.lf_url = None
|
self.lf_url = None
|
||||||
self.hsrv = VHttpSrv()
|
self.hsrv = VHttpSrv()
|
||||||
|
self.nreq = 0
|
||||||
self.nbyte = 0
|
self.nbyte = 0
|
||||||
self.workload = 0
|
|
||||||
self.ico = None
|
self.ico = None
|
||||||
self.thumbcli = None
|
self.thumbcli = None
|
||||||
self.t0 = time.time()
|
self.t0 = time.time()
|
||||||
Reference in New Issue
Block a user