mirror of
				https://github.com/9001/copyparty.git
				synced 2025-10-30 19:43:37 +00:00 
			
		
		
		
	Compare commits
	
		
			277 Commits
		
	
	
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
|  | 3ba0cc20f1 | ||
|  | dd28de1796 | ||
|  | 9eecc9e19a | ||
|  | 6530cb6b05 | ||
|  | 41ce613379 | ||
|  | 5e2785caba | ||
|  | d7cc000976 | ||
|  | 50d8ff95ae | ||
|  | b2de1459b6 | ||
|  | f0ffbea0b2 | ||
|  | 199ccca0fe | ||
|  | 1d9b355743 | ||
|  | f0437fbb07 | ||
|  | abc404a5b7 | ||
|  | 04b9e21330 | ||
|  | 1044aa071b | ||
|  | 4c3192c8cc | ||
|  | 689e77a025 | ||
|  | 3bd89403d2 | ||
|  | b4800d9bcb | ||
|  | 05485e8539 | ||
|  | 0e03dc0868 | ||
|  | 352b1ed10a | ||
|  | 0db1244d04 | ||
|  | ece08b8179 | ||
|  | b8945ae233 | ||
|  | dcaf7b0a20 | ||
|  | f982cdc178 | ||
|  | b265e59834 | ||
|  | 4a843a6624 | ||
|  | 241ef5b99d | ||
|  | f39f575a9c | ||
|  | 1521307f1e | ||
|  | dd122111e6 | ||
|  | 00c177fa74 | ||
|  | f6c7e49eb8 | ||
|  | 1a8dc3d18a | ||
|  | 38a163a09a | ||
|  | 8f031246d2 | ||
|  | 8f3d97dde7 | ||
|  | 4acaf24d65 | ||
|  | 9a8dbbbcf8 | ||
|  | a3efc4c726 | ||
|  | 0278bf328f | ||
|  | 17ddd96cc6 | ||
|  | 0e82e79aea | ||
|  | 30f124c061 | ||
|  | e19d90fcfc | ||
|  | 184bbdd23d | ||
|  | 30b50aec95 | ||
|  | c3c3d81db1 | ||
|  | 49b7231283 | ||
|  | edbedcdad3 | ||
|  | e4ae5f74e6 | ||
|  | 2c7ffe08d7 | ||
|  | 3ca46bae46 | ||
|  | 7e82aaf843 | ||
|  | 315bd71adf | ||
|  | 2c612c9aeb | ||
|  | 36aee085f7 | ||
|  | d01bb69a9c | ||
|  | c9b1c48c72 | ||
|  | aea3843cf2 | ||
|  | 131b6f4b9a | ||
|  | 6efb8b735a | ||
|  | 223b7af2ce | ||
|  | e72c2a6982 | ||
|  | dd9b93970e | ||
|  | e4c7cd81a9 | ||
|  | 12b3a62586 | ||
|  | 2da3bdcd47 | ||
|  | c1dccbe0ba | ||
|  | 9629fcde68 | ||
|  | cae436b566 | ||
|  | 01714700ae | ||
|  | 51e6c4852b | ||
|  | b206c5d64e | ||
|  | 62c3272351 | ||
|  | c5d822c70a | ||
|  | 9c09b4061a | ||
|  | c26fb43ced | ||
|  | deb8f20db6 | ||
|  | 50e18ed8ff | ||
|  | 31f3895f40 | ||
|  | 615929268a | ||
|  | b8b15814cf | ||
|  | 7766fffe83 | ||
|  | 2a16c150d1 | ||
|  | 418c2166cc | ||
|  | a4dd44f648 | ||
|  | 5352f7cda7 | ||
|  | 5533b47099 | ||
|  | e9b14464ee | ||
|  | 4e986e5cd1 | ||
|  | 8a59b40c53 | ||
|  | 391caca043 | ||
|  | 171ce348d6 | ||
|  | c2cc729135 | ||
|  | e7e71b76f0 | ||
|  | a2af61cf6f | ||
|  | e111edd5e4 | ||
|  | 3375377371 | ||
|  | 0ced020c67 | ||
|  | c0d7aa9e4a | ||
|  | e5b3d2a312 | ||
|  | 7b4a794981 | ||
|  | 86a859de17 | ||
|  | b3aaa7bd0f | ||
|  | a90586e6a8 | ||
|  | 807f272895 | ||
|  | f050647b43 | ||
|  | 73baebbd16 | ||
|  | f327f698b9 | ||
|  | 8164910fe8 | ||
|  | 3498644055 | ||
|  | d31116b54c | ||
|  | aced110cdf | ||
|  | e9ab6aec77 | ||
|  | 15b261c861 | ||
|  | 970badce66 | ||
|  | 64304a9d65 | ||
|  | d1983553d2 | ||
|  | 6b15df3bcd | ||
|  | 730b1fff71 | ||
|  | c3add751e5 | ||
|  | 9da2dbdc1c | ||
|  | 977f09c470 | ||
|  | 4d0c6a8802 | ||
|  | 5345565037 | ||
|  | be38c27c64 | ||
|  | 82a0401099 | ||
|  | 33bea1b663 | ||
|  | f083acd46d | ||
|  | 5aacd15272 | ||
|  | cb7674b091 | ||
|  | 3899c7ad56 | ||
|  | d2debced09 | ||
|  | b86c0ddc48 | ||
|  | ba36f33bd8 | ||
|  | 49368a10ba | ||
|  | ac1568cacf | ||
|  | 862ca3439d | ||
|  | fdd4f9f2aa | ||
|  | aa2dc49ebe | ||
|  | cc23b7ee74 | ||
|  | f6f9fc5a45 | ||
|  | 26c8589399 | ||
|  | c2469935cb | ||
|  | 5e7c20955e | ||
|  | 967fa38108 | ||
|  | 280fe8e36b | ||
|  | 03ca96ccc3 | ||
|  | b5b8a2c9d5 | ||
|  | 0008832730 | ||
|  | c9b385db4b | ||
|  | c951b66ae0 | ||
|  | de735f3a45 | ||
|  | 19161425f3 | ||
|  | c69e8d5bf4 | ||
|  | 3d3bce2788 | ||
|  | 1cb0dc7f8e | ||
|  | cd5c56e601 | ||
|  | 8c979905e4 | ||
|  | 4d69f15f48 | ||
|  | 083f6572f7 | ||
|  | 4e7dd75266 | ||
|  | 3eb83f449b | ||
|  | d31f69117b | ||
|  | f5f9e3ac97 | ||
|  | 598d6c598c | ||
|  | 744727087a | ||
|  | f93212a665 | ||
|  | 6dade82d2c | ||
|  | 6b737bf1d7 | ||
|  | 94dbd70677 | ||
|  | 527ae0348e | ||
|  | 79629c430a | ||
|  | 908dd61be5 | ||
|  | 88f77b8cca | ||
|  | 1e846657d1 | ||
|  | ce70f62a88 | ||
|  | bca0cdbb62 | ||
|  | 1ee11e04e6 | ||
|  | 6eef44f212 | ||
|  | 8bd94f4a1c | ||
|  | 4bc4701372 | ||
|  | dfd89b503a | ||
|  | 060dc54832 | ||
|  | f7a4ea5793 | ||
|  | 71b478e6e2 | ||
|  | ed8fff8c52 | ||
|  | 95dc78db10 | ||
|  | addeac64c7 | ||
|  | d77ec22007 | ||
|  | 20030c91b7 | ||
|  | 8b366e255c | ||
|  | 6da366fcb0 | ||
|  | 2fa35f851e | ||
|  | e4ca4260bb | ||
|  | b69aace8d8 | ||
|  | 79097bb43c | ||
|  | 806fac1742 | ||
|  | 4f97d7cf8d | ||
|  | 42acc457af | ||
|  | c02920607f | ||
|  | 452885c271 | ||
|  | 5c242a07b6 | ||
|  | 088899d59f | ||
|  | 1faff2a37e | ||
|  | 23c8d3d045 | ||
|  | a033388d2b | ||
|  | 82fe45ac56 | ||
|  | bcb7fcda6b | ||
|  | 726a98100b | ||
|  | 2f021a0c2b | ||
|  | eb05cb6c6e | ||
|  | 7530af95da | ||
|  | 8399e95bda | ||
|  | 3b4dfe326f | ||
|  | 2e787a254e | ||
|  | f888bed1a6 | ||
|  | d865e9f35a | ||
|  | fc7fe70f66 | ||
|  | 5aff39d2b2 | ||
|  | d1be37a04a | ||
|  | b0fd8bf7d4 | ||
|  | b9cf8f3973 | ||
|  | 4588f11613 | ||
|  | 1a618c3c97 | ||
|  | d500a51d97 | ||
|  | 734e9d3874 | ||
|  | bd5cfc2f1b | ||
|  | 89f88ee78c | ||
|  | b2ae14695a | ||
|  | 19d86b44d9 | ||
|  | 85be62e38b | ||
|  | 80f3d90200 | ||
|  | 0249fa6e75 | ||
|  | 2d0696e048 | ||
|  | ff32ec515e | ||
|  | a6935b0293 | ||
|  | 63eb08ba9f | ||
|  | e5b67d2b3a | ||
|  | 9e10af6885 | ||
|  | 42bc9115d2 | ||
|  | 0a569ce413 | ||
|  | 9a16639a61 | ||
|  | 57953c68c6 | ||
|  | 088d08963f | ||
|  | 7bc8196821 | ||
|  | 7715299dd3 | ||
|  | b8ac9b7994 | ||
|  | 98e7d8f728 | ||
|  | e7fd871ffe | ||
|  | 14aab62f32 | ||
|  | cb81fe962c | ||
|  | fc970d2dea | ||
|  | b0e203d1f9 | ||
|  | 37cef05b19 | ||
|  | 5886a42901 | ||
|  | 2fd99f807d | ||
|  | 3d4cbd7d10 | ||
|  | f10d03c238 | ||
|  | f9a66ffb0e | ||
|  | 777a50063d | ||
|  | 0bb9154747 | ||
|  | 30c3f45072 | ||
|  | 0d5ca67f32 | ||
|  | 4a8bf6aebd | ||
|  | b11db090d8 | ||
|  | 189391fccd | ||
|  | 86d4c43909 | ||
|  | 5994f40982 | ||
|  | 076d32dee5 | ||
|  | 16c8e38ecd | ||
|  | eacbcda8e5 | ||
|  | 59be76cd44 | 
							
								
								
									
										5
									
								
								.gitignore
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										5
									
								
								.gitignore
									
									
									
									
										vendored
									
									
								
							| @@ -9,6 +9,7 @@ buildenv/ | ||||
| build/ | ||||
| dist/ | ||||
| sfx/ | ||||
| py2/ | ||||
| .venv/ | ||||
|  | ||||
| # ide | ||||
| @@ -20,3 +21,7 @@ sfx/ | ||||
| # derived | ||||
| copyparty/web/deps/ | ||||
| srv/ | ||||
|  | ||||
| # state/logs | ||||
| up.*.txt | ||||
| .hist/ | ||||
							
								
								
									
										265
									
								
								README.md
									
									
									
									
									
								
							
							
						
						
									
										265
									
								
								README.md
									
									
									
									
									
								
							| @@ -19,7 +19,7 @@ turn your phone or raspi into a portable file server with resumable uploads/down | ||||
| ## readme toc | ||||
|  | ||||
| * top | ||||
|     * **[quickstart](#quickstart)** - download **[copyparty-sfx.py](https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.py)** and you're all set! | ||||
|     * [quickstart](#quickstart) - download **[copyparty-sfx.py](https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.py)** and you're all set! | ||||
|         * [on servers](#on-servers) - you may also want these, especially on servers | ||||
|         * [on debian](#on-debian) - recommended additional steps on debian | ||||
|     * [notes](#notes) - general notes | ||||
| @@ -46,21 +46,31 @@ turn your phone or raspi into a portable file server with resumable uploads/down | ||||
|     * [markdown viewer](#markdown-viewer) - and there are *two* editors | ||||
|     * [other tricks](#other-tricks) | ||||
|     * [searching](#searching) - search by size, date, path/name, mp3-tags, ... | ||||
| * [server config](#server-config) | ||||
| * [server config](#server-config) - using arguments or config files, or a mix of both | ||||
|     * [file indexing](#file-indexing) | ||||
|     * [upload rules](#upload-rules) - set upload rules using volume flags | ||||
|     * [compress uploads](#compress-uploads) - files can be autocompressed on upload | ||||
|     * [database location](#database-location) - in-volume (`.hist/up2k.db`, default) or somewhere else | ||||
|     * [metadata from audio files](#metadata-from-audio-files) - set `-e2t` to index tags on upload | ||||
|     * [file parser plugins](#file-parser-plugins) - provide custom parsers to index additional tags | ||||
|     * [file parser plugins](#file-parser-plugins) - provide custom parsers to index additional tags, also see [./bin/mtag/README.md](./bin/mtag/README.md) | ||||
|     * [upload events](#upload-events) - trigger a script/program on each upload | ||||
|     * [complete examples](#complete-examples) | ||||
| * [browser support](#browser-support) - TLDR: yes | ||||
| * [client examples](#client-examples) - interact with copyparty using non-browser clients | ||||
| * [up2k](#up2k) - quick outline of the up2k protocol, see [uploading](#uploading) for the web-client | ||||
|     * [why chunk-hashes](#why-chunk-hashes) - a single sha512 would be better, right? | ||||
| * [performance](#performance) - defaults are usually fine - expect `8 GiB/s` download, `1 GiB/s` upload | ||||
|     * [client-side](#client-side) - when uploading files | ||||
| * [security](#security) - some notes on hardening | ||||
|     * [gotchas](#gotchas) - behavior that might be unexpected | ||||
| * [recovering from crashes](#recovering-from-crashes) | ||||
|     * [client crashes](#client-crashes) | ||||
|         * [frefox wsod](#frefox-wsod) - firefox 87 can crash during uploads | ||||
| * [HTTP API](#HTTP-API) | ||||
|     * [read](#read) | ||||
|     * [write](#write) | ||||
|     * [admin](#admin) | ||||
|     * [general](#general) | ||||
| * [dependencies](#dependencies) - mandatory deps | ||||
|     * [optional dependencies](#optional-dependencies) - install these to enable bonus features | ||||
|     * [install recommended deps](#install-recommended-deps) | ||||
| @@ -68,6 +78,7 @@ turn your phone or raspi into a portable file server with resumable uploads/down | ||||
| * [sfx](#sfx) - there are two self-contained "binaries" | ||||
|     * [sfx repack](#sfx-repack) - reduce the size of an sfx by removing features | ||||
| * [install on android](#install-on-android) | ||||
| * [reporting bugs](#reporting-bugs) - ideas for context to include in bug reports | ||||
| * [building](#building) | ||||
|     * [dev env setup](#dev-env-setup) | ||||
|     * [just the sfx](#just-the-sfx) | ||||
| @@ -149,14 +160,15 @@ feature summary | ||||
| * browser | ||||
|   * ☑ [navpane](#navpane) (directory tree sidebar) | ||||
|   * ☑ file manager (cut/paste, delete, [batch-rename](#batch-rename)) | ||||
|   * ☑ audio player (with OS media controls) | ||||
|   * ☑ audio player (with OS media controls and opus transcoding) | ||||
|   * ☑ image gallery with webm player | ||||
|   * ☑ textfile browser with syntax hilighting | ||||
|   * ☑ [thumbnails](#thumbnails) | ||||
|     * ☑ ...of images using Pillow | ||||
|     * ☑ ...of videos using FFmpeg | ||||
|     * ☑ ...of audio (spectrograms) using FFmpeg | ||||
|     * ☑ cache eviction (max-age; maybe max-size eventually) | ||||
|   * ☑ SPA (browse while uploading) | ||||
|     * if you use the navpane to navigate, not folders in the file list | ||||
| * server indexing | ||||
|   * ☑ [locate files by contents](#file-search) | ||||
|   * ☑ search by name/path/date/size | ||||
| @@ -214,6 +226,7 @@ some improvement ideas | ||||
| * Windows: python 2.7 cannot index non-ascii filenames with `-e2d` | ||||
| * Windows: python 2.7 cannot handle filenames with mojibake | ||||
| * `--th-ff-jpg` may fix video thumbnails on some FFmpeg versions (macos, some linux) | ||||
| * `--th-ff-swr` may fix audio thumbnails on some FFmpeg versions | ||||
|  | ||||
| ## general bugs | ||||
|  | ||||
| @@ -222,6 +235,10 @@ some improvement ideas | ||||
|  | ||||
| ## not my bugs | ||||
|  | ||||
| * iPhones: the volume control doesn't work because [apple doesn't want it to](https://developer.apple.com/library/archive/documentation/AudioVideo/Conceptual/Using_HTML5_Audio_Video/Device-SpecificConsiderations/Device-SpecificConsiderations.html#//apple_ref/doc/uid/TP40009523-CH5-SW11) | ||||
|   * *future workaround:* enable the equalizer, make it all-zero, and set a negative boost to reduce the volume | ||||
|     * "future" because `AudioContext` is broken in the current iOS version (15.1), maybe one day... | ||||
|  | ||||
| * Windows: folders cannot be accessed if the name ends with `.` | ||||
|   * python or windows bug | ||||
|  | ||||
| @@ -238,6 +255,7 @@ some improvement ideas | ||||
|  | ||||
| * is it possible to block read-access to folders unless you know the exact URL for a particular file inside? | ||||
|   * yes, using the [`g` permission](#accounts-and-volumes), see the examples there | ||||
|   * you can also do this with linux filesystem permissions; `chmod 111 music` will make it possible to access files and folders inside the `music` folder but not list the immediate contents -- also works with other software, not just copyparty | ||||
|  | ||||
| * can I make copyparty download a file to my server if I give it a URL? | ||||
|   * not officially, but there is a [terrible hack](https://github.com/9001/copyparty/blob/hovudstraum/bin/mtag/wget.py) which makes it possible | ||||
| @@ -245,7 +263,10 @@ some improvement ideas | ||||
|  | ||||
| # accounts and volumes | ||||
|  | ||||
| per-folder, per-user permissions | ||||
| per-folder, per-user permissions  - if your setup is getting complex, consider making a [config file](./docs/example.conf) instead of using arguments | ||||
| * much easier to manage, and you can modify the config at runtime with `systemctl reload copyparty` or more conveniently using the `[reload cfg]` button in the control-panel (if logged in as admin) | ||||
|  | ||||
| configuring accounts/volumes with arguments: | ||||
| * `-a usr:pwd` adds account `usr` with password `pwd` | ||||
| * `-v .::r` adds current-folder `.` as the webroot, `r`eadable by anyone | ||||
|   * the syntax is `-v src:dst:perm:perm:...` so local-path, url-path, and one or more permissions to set | ||||
| @@ -300,8 +321,10 @@ the browser has the following hotkeys  (always qwerty) | ||||
| * `B` toggle breadcrumbs / [navpane](#navpane) | ||||
| * `I/K` prev/next folder | ||||
| * `M` parent folder (or unexpand current) | ||||
| * `V` toggle folders / textfiles in the navpane | ||||
| * `G` toggle list / [grid view](#thumbnails) | ||||
| * `T` toggle thumbnails / icons | ||||
| * `ESC` close various things | ||||
| * `ctrl-X` cut selected files/folders | ||||
| * `ctrl-V` paste | ||||
| * `F2` [rename](#batch-rename) selected file/folder | ||||
| @@ -311,6 +334,10 @@ the browser has the following hotkeys  (always qwerty) | ||||
|   * ctrl+`Up/Down` move cursor and scroll viewport | ||||
|   * `Space` toggle file selection | ||||
|   * `Ctrl-A` toggle select all | ||||
| * when a textfile is open: | ||||
|   * `I/K` prev/next textfile | ||||
|   * `S` toggle selection of open file | ||||
|   * `M` close textfile | ||||
| * when playing audio: | ||||
|   * `J/L` prev/next song | ||||
|   * `U/O` skip 10sec back/forward | ||||
| @@ -349,9 +376,13 @@ switching between breadcrumbs or navpane | ||||
|  | ||||
| click the `🌲` or pressing the `B` hotkey to toggle between breadcrumbs path (default), or a navpane (tree-browser sidebar thing) | ||||
|  | ||||
| * `[-]` and `[+]` (or hotkeys `A`/`D`) adjust the size | ||||
| * `[v]` jumps to the currently open folder | ||||
| * `[+]` and `[-]` (or hotkeys `A`/`D`) adjust the size | ||||
| * `[🎯]` jumps to the currently open folder | ||||
| * `[📃]` toggles between showing folders and textfiles | ||||
| * `[📌]` shows the name of all parent folders in a docked panel | ||||
| * `[a]` toggles automatic widening as you go deeper | ||||
| * `[↵]` toggles wordwrap | ||||
| * `[👀]` show full name on hover (if wordwrap is off) | ||||
|  | ||||
|  | ||||
| ## thumbnails | ||||
| @@ -362,9 +393,12 @@ press `g` to toggle grid-view instead of the file listing,  and `t` toggles icon | ||||
|  | ||||
| it does static images with Pillow and uses FFmpeg for video files, so you may want to `--no-thumb` or maybe just `--no-vthumb` depending on how dangerous your users are | ||||
|  | ||||
| audio files are covnerted into spectrograms using FFmpeg unless you `--no-athumb` (and some FFmpeg builds may need `--th-ff-swr`) | ||||
|  | ||||
| images with the following names (see `--th-covers`) become the thumbnail of the folder they're in: `folder.png`, `folder.jpg`, `cover.png`, `cover.jpg` | ||||
|  | ||||
| in the grid/thumbnail view, if the audio player panel is open, songs will start playing when clicked | ||||
| * indicated by the audio files having the ▶ icon instead of 💾 | ||||
|  | ||||
|  | ||||
| ## zip downloads | ||||
| @@ -417,6 +451,8 @@ see [up2k](#up2k) for details on how it works | ||||
|  | ||||
| **protip:** you can avoid scaring away users with [docs/minimal-up2k.html](docs/minimal-up2k.html) which makes it look [much simpler](https://user-images.githubusercontent.com/241032/118311195-dd6ca380-b4ef-11eb-86f3-75a3ff2e1332.png) | ||||
|  | ||||
| **protip:** if you enable `favicon` in the `[⚙️] settings` tab (by typing something into the textbox), the icon in the browser tab will indicate upload progress | ||||
|  | ||||
| the up2k UI is the epitome of polished inutitive experiences: | ||||
| * "parallel uploads" specifies how many chunks to upload at the same time | ||||
| * `[🏃]` analysis of other files should continue while one is uploading | ||||
| @@ -433,7 +469,7 @@ and then theres the tabs below it, | ||||
|   * plus up to 3 entries each from `[done]` and `[que]` for context | ||||
| * `[que]` is all the files that are still queued | ||||
|  | ||||
| note that since up2k has to read each file twice, `[🎈 bup]` can *theoretically* be up to 2x faster in some extreme cases (files bigger than your ram, combined with an internet connection faster than the read-speed of your HDD) | ||||
| note that since up2k has to read each file twice, `[🎈 bup]` can *theoretically* be up to 2x faster in some extreme cases (files bigger than your ram, combined with an internet connection faster than the read-speed of your HDD, or if you're uploading from a cuo2duo) | ||||
|  | ||||
| if you are resuming a massive upload and want to skip hashing the files which already finished, you can enable `turbo` in the `[⚙️] config` tab, but please read the tooltip on that button | ||||
|  | ||||
| @@ -453,8 +489,6 @@ the files will be hashed on the client-side, and each hash is sent to the server | ||||
| files go into `[ok]` if they exist (and you get a link to where it is), otherwise they land in `[ng]` | ||||
| * the main reason filesearch is combined with the uploader is cause the code was too spaghetti to separate it out somewhere else, this is no longer the case but now i've warmed up to the idea too much | ||||
|  | ||||
| adding the same file multiple times is blocked, so if you first search for a file and then decide to upload it, you have to click the `[cleanup]` button to discard `[done]` files (or just refresh the page) | ||||
|  | ||||
|  | ||||
| ### unpost | ||||
|  | ||||
| @@ -542,6 +576,10 @@ and there are *two* editors | ||||
|  | ||||
| * you can link a particular timestamp in an audio file by adding it to the URL, such as `&20` / `&20s` / `&1m20` / `&t=1:20` after the `.../#af-c8960dab` | ||||
|  | ||||
| * enabling the audio equalizer can help make gapless albums fully gapless in some browsers (chrome), so consider leaving it on with all the values at zero | ||||
|  | ||||
| * get a plaintext file listing by adding `?ls=t` to a URL, or a compact colored one with `?ls=v` (for unix terminals) | ||||
|  | ||||
| * if you are using media hotkeys to switch songs and are getting tired of seeing the OSD popup which Windows doesn't let you disable, consider https://ocv.me/dev/?media-osd-bgone.ps1 | ||||
|  | ||||
| * click the bottom-left `π` to open a javascript prompt for debugging | ||||
| @@ -570,6 +608,12 @@ add the argument `-e2ts` to also scan/index tags from music files, which brings | ||||
|  | ||||
| # server config | ||||
|  | ||||
| using arguments or config files, or a mix of both: | ||||
| * config files (`-c some.conf`) can set additional commandline arguments; see [./docs/example.conf](docs/example.conf) | ||||
| * `kill -s USR1` (same as `systemctl reload copyparty`) to reload accounts and volumes from config files without restarting | ||||
|   * or click the `[reload cfg]` button in the control-panel when logged in as admin  | ||||
|  | ||||
|  | ||||
| ## file indexing | ||||
|  | ||||
| file indexing relies on two database tables, the up2k filetree (`-e2d`) and the metadata tags (`-e2t`), stored in `.hist/up2k.db`. Configuration can be done through arguments, volume flags, or a mix of both. | ||||
| @@ -582,29 +626,33 @@ through arguments: | ||||
| * `-e2ts` also scans for tags in all files that don't have tags yet | ||||
| * `-e2tsr` also deletes all existing tags, doing a full reindex | ||||
|  | ||||
| the same arguments can be set as volume flags, in addition to `d2d` and `d2t` for disabling: | ||||
| the same arguments can be set as volume flags, in addition to `d2d`, `d2ds`, `d2t`, `d2ts` for disabling: | ||||
| * `-v ~/music::r:c,e2dsa,e2tsr` does a full reindex of everything on startup | ||||
| * `-v ~/music::r:c,d2d` disables **all** indexing, even if any `-e2*` are on | ||||
| * `-v ~/music::r:c,d2t` disables all `-e2t*` (tags), does not affect `-e2d*` | ||||
| * `-v ~/music::r:c,d2ds` disables on-boot scans; only index new uploads | ||||
| * `-v ~/music::r:c,d2ts` same except only affecting tags | ||||
|  | ||||
| note: | ||||
| * the parser can finally handle `c,e2dsa,e2tsr` so you no longer have to `c,e2dsa:c,e2tsr` | ||||
| * `e2tsr` is probably always overkill, since `e2ds`/`e2dsa` would pick up any file modifications and `e2ts` would then reindex those, unless there is a new copyparty version with new parsers and the release note says otherwise | ||||
| * the rescan button in the admin panel has no effect unless the volume has `-e2ds` or higher | ||||
|  | ||||
| to save some time, you can choose to only index filename/path/size/last-modified (and not the hash of the file contents) by setting `--no-hash` or the volume-flag `:c,dhash`, this has the following consequences: | ||||
| to save some time, you can provide a regex pattern for filepaths to only index by filename/path/size/last-modified (and not the hash of the file contents) by setting `--no-hash \.iso$` or the volume-flag `:c,nohash=\.iso$`, this has the following consequences: | ||||
| * initial indexing is way faster, especially when the volume is on a network disk | ||||
| * makes it impossible to [file-search](#file-search) | ||||
| * if someone uploads the same file contents, the upload will not be detected as a dupe, so it will not get symlinked or rejected | ||||
|  | ||||
| if you set `--no-hash`, you can enable hashing for specific volumes using flag `:c,ehash` | ||||
| similarly, you can fully ignore files/folders using `--no-idx [...]` and `:c,noidx=\.iso$` | ||||
|  | ||||
| if you set `--no-hash [...]` globally, you can enable hashing for specific volumes using flag `:c,nohash=` | ||||
|  | ||||
|  | ||||
| ## upload rules | ||||
|  | ||||
| set upload rules using volume flags,  some examples: | ||||
|  | ||||
| * `:c,sz=1k-3m` sets allowed filesize between 1 KiB and 3 MiB inclusive (suffixes: b, k, m, g) | ||||
| * `:c,sz=1k-3m` sets allowed filesize between 1 KiB and 3 MiB inclusive (suffixes: `b`, `k`, `m`, `g`) | ||||
| * `:c,nosub` disallow uploading into subdirectories; goes well with `rotn` and `rotf`: | ||||
| * `:c,rotn=1000,2` moves uploads into subfolders, up to 1000 files in each folder before making a new one, two levels deep (must be at least 1) | ||||
| * `:c,rotf=%Y/%m/%d/%H` enforces files to be uploaded into a structure of subfolders according to that date format | ||||
| @@ -638,6 +686,12 @@ things to note, | ||||
| * the files will be indexed after compression, so dupe-detection and file-search will not work as expected | ||||
|  | ||||
| some examples, | ||||
| * `-v inc:inc:w:c,pk=xz,0`   | ||||
|   folder named inc, shared at inc, write-only for everyone, forces xz compression at level 0 | ||||
| * `-v inc:inc:w:c,pk`   | ||||
|   same write-only inc, but forces gz compression (default) instead of xz | ||||
| * `-v inc:inc:w:c,gz`   | ||||
|   allows (but does not force) gz compression if client uploads to `/inc?pk` or `/inc?gz` or `/inc?gz=4` | ||||
|  | ||||
|  | ||||
| ## database location | ||||
| @@ -682,7 +736,7 @@ see the beautiful mess of a dictionary in [mtag.py](https://github.com/9001/copy | ||||
|  | ||||
| ## file parser plugins | ||||
|  | ||||
| provide custom parsers to index additional tags | ||||
| provide custom parsers to index additional tags, also see [./bin/mtag/README.md](./bin/mtag/README.md) | ||||
|  | ||||
| copyparty can invoke external programs to collect additional metadata for files using `mtp` (either as argument or volume flag), there is a default timeout of 30sec | ||||
|  | ||||
| @@ -696,6 +750,25 @@ copyparty can invoke external programs to collect additional metadata for files | ||||
| * `-mtp arch,built,ver,orig=an,eexe,edll,~/bin/exe.py` runs `~/bin/exe.py` to get properties about windows-binaries only if file is not audio (`an`) and file extension is exe or dll | ||||
|  | ||||
|  | ||||
| ## upload events | ||||
|  | ||||
| trigger a script/program on each upload  like so: | ||||
|  | ||||
| ``` | ||||
| -v /mnt/inc:inc:w:c,mte=+a1:c,mtp=a1=ad,/usr/bin/notify-send | ||||
| ``` | ||||
|  | ||||
| so filesystem location `/mnt/inc` shared at `/inc`, write-only for everyone, appending `a1` to the list of tags to index, and using `/usr/bin/notify-send` to "provide" that tag | ||||
|  | ||||
| that'll run the command `notify-send` with the path to the uploaded file as the first and only argument (so on linux it'll show a notification on-screen) | ||||
|  | ||||
| note that it will only trigger on new unique files, not dupes | ||||
|  | ||||
| and it will occupy the parsing threads, so fork anything expensive, or if you want to intentionally queue/singlethread you can combine it with `--mtag-mt 1` | ||||
|  | ||||
| if this becomes popular maybe there should be a less janky way to do it actually | ||||
|  | ||||
|  | ||||
| ## complete examples | ||||
|  | ||||
| * read-only music server with bpm and key scanning   | ||||
| @@ -722,7 +795,7 @@ TLDR: yes | ||||
| | zip selection   |  -  | yep  | yep  | yep  |  yep  | yep  | yep | yep  | | ||||
| | file rename     |  -  | yep  | yep  | yep  |  yep  | yep  | yep | yep  | | ||||
| | file cut/paste  |  -  | yep  | yep  | yep  |  yep  | yep  | yep | yep  | | ||||
| | navpane         |  -  | `*2` | yep  | yep  |  yep  | yep  | yep | yep  | | ||||
| | navpane         |  -  | yep  | yep  | yep  |  yep  | yep  | yep | yep  | | ||||
| | image viewer    |  -  | yep  | yep  | yep  |  yep  | yep  | yep | yep  | | ||||
| | video player    |  -  | yep  | yep  | yep  |  yep  | yep  | yep | yep  | | ||||
| | markdown editor |  -  |  -   | yep  | yep  |  yep  | yep  | yep | yep  | | ||||
| @@ -734,8 +807,7 @@ TLDR: yes | ||||
| * internet explorer 6 to 8 behave the same | ||||
| * firefox 52 and chrome 49 are the final winxp versions | ||||
| * `*1` yes, but extremely slow (ie10: `1 MiB/s`, ie11: `270 KiB/s`) | ||||
| * `*2` causes a full-page refresh on each navigation | ||||
| * `*3` using a wasm decoder which consumes a bit more power | ||||
| * `*3` iOS 11 and newer, opus only, and requires FFmpeg on the server | ||||
|  | ||||
| quick summary of more eccentric web-browsers trying to view a directory index: | ||||
|  | ||||
| @@ -755,8 +827,8 @@ quick summary of more eccentric web-browsers trying to view a directory index: | ||||
| interact with copyparty using non-browser clients | ||||
|  | ||||
| * javascript: dump some state into a file (two separate examples) | ||||
|   * `await fetch('https://127.0.0.1:3923/', {method:"PUT", body: JSON.stringify(foo)});` | ||||
|   * `var xhr = new XMLHttpRequest(); xhr.open('POST', 'https://127.0.0.1:3923/msgs?raw'); xhr.send('foo');` | ||||
|   * `await fetch('//127.0.0.1:3923/', {method:"PUT", body: JSON.stringify(foo)});` | ||||
|   * `var xhr = new XMLHttpRequest(); xhr.open('POST', '//127.0.0.1:3923/msgs?raw'); xhr.send('foo');` | ||||
|  | ||||
| * curl/wget: upload some files (post=file, chunk=stdin) | ||||
|   * `post(){ curl -b cppwd=wark -F act=bput -F f=@"$1" http://127.0.0.1:3923/;}`   | ||||
| @@ -766,6 +838,14 @@ interact with copyparty using non-browser clients | ||||
|   * `chunk(){ curl -b cppwd=wark -T- http://127.0.0.1:3923/;}`   | ||||
|     `chunk <movie.mkv` | ||||
|  | ||||
| * bash: when curl and wget is not available or too boring | ||||
|   * `(printf 'PUT /junk?pw=wark HTTP/1.1\r\n\r\n'; cat movie.mkv) | nc 127.0.0.1 3923` | ||||
|   * `(printf 'PUT / HTTP/1.1\r\n\r\n'; cat movie.mkv) >/dev/tcp/127.0.0.1/3923` | ||||
|  | ||||
| * python: [up2k.py](https://github.com/9001/copyparty/blob/hovudstraum/bin/up2k.py) is a command-line up2k client [(webm)](https://ocv.me/stuff/u2cli.webm) | ||||
|   * file uploads, file-search, autoresume of aborted/broken uploads | ||||
|   * see [./bin/README.md#up2kpy](bin/README.md#up2kpy) | ||||
|  | ||||
| * FUSE: mount a copyparty server as a local filesystem | ||||
|   * cross-platform python client available in [./bin/](bin/) | ||||
|   * [rclone](https://rclone.org/) as client can give ~5x performance, see [./docs/rclone.md](docs/rclone.md) | ||||
| @@ -777,7 +857,7 @@ copyparty returns a truncated sha512sum of your PUT/POST as base64; you can gene | ||||
|     b512(){ printf "$((sha512sum||shasum -a512)|sed -E 's/ .*//;s/(..)/\\x\1/g')"|base64|tr '+/' '-_'|head -c44;} | ||||
|     b512 <movie.mkv | ||||
|  | ||||
| you can provide passwords using cookie 'cppwd=hunter2', as a url query `?pw=hunter2`, or with basic-authentication (either as the username or password) | ||||
| you can provide passwords using cookie `cppwd=hunter2`, as a url query `?pw=hunter2`, or with basic-authentication (either as the username or password) | ||||
|  | ||||
|  | ||||
| # up2k | ||||
| @@ -813,14 +893,12 @@ hashwasm would solve the streaming issue but reduces hashing speed for sha512 (x | ||||
|  | ||||
| defaults are usually fine - expect `8 GiB/s` download, `1 GiB/s` upload | ||||
|  | ||||
| you can ignore the `cannot efficiently use multiple CPU cores` message, very unlikely to be a problem | ||||
|  | ||||
| below are some tweaks roughly ordered by usefulness: | ||||
|  | ||||
| * `-q` disables logging and can help a bunch, even when combined with `-lo` to redirect logs to file | ||||
| * `--http-only` or `--https-only` (unless you want to support both protocols) will reduce the delay before a new connection is established | ||||
| * `--hist` pointing to a fast location (ssd) will make directory listings and searches faster when `-e2d` or `-e2t` is set | ||||
| * `--no-hash` when indexing a network-disk if you don't care about the actual filehashes and only want the names/tags searchable | ||||
| * `--no-hash .` when indexing a network-disk if you don't care about the actual filehashes and only want the names/tags searchable | ||||
| * `-j` enables multiprocessing (actual multithreading) and can make copyparty perform better in cpu-intensive workloads, for example: | ||||
|   * huge amount of short-lived connections | ||||
|   * really heavy traffic (downloads/uploads) | ||||
| @@ -828,6 +906,21 @@ below are some tweaks roughly ordered by usefulness: | ||||
|   ...however it adds an overhead to internal communication so it might be a net loss, see if it works 4 u | ||||
|  | ||||
|  | ||||
| ## client-side | ||||
|  | ||||
| when uploading files, | ||||
|  | ||||
| * chrome is recommended, at least compared to firefox: | ||||
|   * up to 90% faster when hashing, especially on SSDs | ||||
|   * up to 40% faster when uploading over extremely fast internets | ||||
|   * but [up2k.py](https://github.com/9001/copyparty/blob/hovudstraum/bin/up2k.py) can be 40% faster than chrome again | ||||
|  | ||||
| * if you're cpu-bottlenecked, or the browser is maxing a cpu core: | ||||
|   * up to 30% faster uploads if you hide the upload status list by switching away from the `[🚀]` up2k ui-tab (or closing it) | ||||
|     * switching to another browser-tab also works, the favicon will update every 10 seconds in that case | ||||
|   * unlikely to be a problem, but can happen when uploding many small files, or your internet is too fast, or PC too slow | ||||
|  | ||||
|  | ||||
| # security | ||||
|  | ||||
| some notes on hardening | ||||
| @@ -851,6 +944,104 @@ behavior that might be unexpected | ||||
| * users without read-access to a folder can still see the `.prologue.html` / `.epilogue.html` / `README.md` contents, for the purpose of showing a description on how to use the uploader for example | ||||
|  | ||||
|  | ||||
| # recovering from crashes | ||||
|  | ||||
| ## client crashes | ||||
|  | ||||
| ### frefox wsod | ||||
|  | ||||
| firefox 87 can crash during uploads  -- the entire browser goes, including all other browser tabs, everything turns white | ||||
|  | ||||
| however you can hit `F12` in the up2k tab and use the devtools to see how far you got in the uploads: | ||||
|  | ||||
| * get a complete list of all uploads, organized by statuts (ok / no-good / busy / queued):   | ||||
|   `var tabs = { ok:[], ng:[], bz:[], q:[] }; for (var a of up2k.ui.tab) tabs[a.in].push(a); tabs` | ||||
|  | ||||
| * list of filenames which failed:   | ||||
|   `var ng = []; for (var a of up2k.ui.tab) if (a.in != 'ok') ng.push(a.hn.split('<a href=\"').slice(-1)[0].split('\">')[0]); ng` | ||||
|  | ||||
| * send the list of filenames to copyparty for safekeeping:   | ||||
|   `await fetch('/inc', {method:'PUT', body:JSON.stringify(ng,null,1)})` | ||||
|  | ||||
|  | ||||
| # HTTP API | ||||
|  | ||||
| * table-column `params` = URL parameters; `?foo=bar&qux=...` | ||||
| * table-column `body` = POST payload | ||||
| * method `jPOST` = json post | ||||
| * method `mPOST` = multipart post | ||||
| * method `uPOST` = url-encoded post | ||||
| * `FILE` = conventional HTTP file upload entry (rfc1867 et al, filename in `Content-Disposition`) | ||||
|  | ||||
| authenticate using header `Cookie: cppwd=foo` or url param `&pw=foo` | ||||
|  | ||||
| ## read | ||||
|  | ||||
| | method | params | result | | ||||
| |--|--|--| | ||||
| | GET | `?ls` | list files/folders at URL as JSON | | ||||
| | GET | `?ls&dots` | list files/folders at URL as JSON, including dotfiles | | ||||
| | GET | `?ls=t` | list files/folders at URL as plaintext | | ||||
| | GET | `?ls=v` | list files/folders at URL, terminal-formatted | | ||||
| | GET | `?b` | list files/folders at URL as simplified HTML | | ||||
| | GET | `?tree=.` | list one level of subdirectories inside URL | | ||||
| | GET | `?tree` | list one level of subdirectories for each level until URL | | ||||
| | GET | `?tar` | download everything below URL as a tar file | | ||||
| | GET | `?zip=utf-8` | download everything below URL as a zip file | | ||||
| | GET | `?ups` | show recent uploads from your IP | | ||||
| | GET | `?ups&filter=f` | ...where URL contains `f` | | ||||
| | GET | `?mime=foo` | specify return mimetype `foo` | | ||||
| | GET | `?raw` | get markdown file at URL as plaintext | | ||||
| | GET | `?txt` | get file at URL as plaintext | | ||||
| | GET | `?txt=iso-8859-1` | ...with specific charset | | ||||
| | GET | `?th` | get image/video at URL as thumbnail | | ||||
| | GET | `?th=opus` | convert audio file to 128kbps opus | | ||||
| | GET | `?th=caf` | ...in the iOS-proprietary container | | ||||
|  | ||||
| | method | body | result | | ||||
| |--|--|--| | ||||
| | jPOST | `{"q":"foo"}` | do a server-wide search; see the `[🔎]` search tab `raw` field for syntax | | ||||
|  | ||||
| | method | params | body | result | | ||||
| |--|--|--|--| | ||||
| | jPOST | `?tar` | `["foo","bar"]` | download folders `foo` and `bar` inside URL as a tar file | | ||||
|  | ||||
| ## write | ||||
|  | ||||
| | method | params | result | | ||||
| |--|--|--| | ||||
| | GET | `?move=/foo/bar` | move/rename the file/folder at URL to /foo/bar | | ||||
|  | ||||
| | method | params | body | result | | ||||
| |--|--|--|--| | ||||
| | PUT | | (binary data) | upload into file at URL | | ||||
| | PUT | `?gz` | (binary data) | compress with gzip and write into file at URL | | ||||
| | PUT | `?xz` | (binary data) | compress with xz and write into file at URL | | ||||
| | mPOST | | `act=bput`, `f=FILE` | upload `FILE` into the folder at URL | | ||||
| | mPOST | `?j` | `act=bput`, `f=FILE` | ...and reply with json | | ||||
| | mPOST | | `act=mkdir`, `name=foo` | create directory `foo` at URL | | ||||
| | GET | `?delete` | | delete URL recursively | | ||||
| | jPOST | `?delete` | `["/foo","/bar"]` | delete `/foo` and `/bar` recursively | | ||||
| | uPOST | | `msg=foo` | send message `foo` into server log | | ||||
| | mPOST | | `act=tput`, `body=TEXT` | overwrite markdown document at URL | | ||||
|  | ||||
| server behavior of `msg` can be reconfigured with `--urlform` | ||||
|  | ||||
| ## admin | ||||
|  | ||||
| | method | params | result | | ||||
| |--|--|--| | ||||
| | GET | `?reload=cfg` | reload config files and rescan volumes | | ||||
| | GET | `?scan` | initiate a rescan of the volume which provides URL | | ||||
| | GET | `?stack` | show a stacktrace of all threads | | ||||
|  | ||||
| ## general | ||||
|  | ||||
| | method | params | result | | ||||
| |--|--|--| | ||||
| | GET | `?pw=x` | logout | | ||||
|  | ||||
|  | ||||
| # dependencies | ||||
|  | ||||
| mandatory deps: | ||||
| @@ -867,7 +1058,7 @@ enable music tags: | ||||
|  | ||||
| enable [thumbnails](#thumbnails) of... | ||||
| * **images:** `Pillow` (requires py2.7 or py3.5+) | ||||
| * **videos:** `ffmpeg` and `ffprobe` somewhere in `$PATH` | ||||
| * **videos/audio:** `ffmpeg` and `ffprobe` somewhere in `$PATH` | ||||
| * **HEIF pictures:** `pyheif-pillow-opener` (requires Linux or a C compiler) | ||||
| * **AVIF pictures:** `pillow-avif-plugin` | ||||
|  | ||||
| @@ -901,19 +1092,19 @@ pls note that `copyparty-sfx.sh` will fail if you rename `copyparty-sfx.py` to ` | ||||
| reduce the size of an sfx by removing features | ||||
|  | ||||
| if you don't need all the features, you can repack the sfx and save a bunch of space; all you need is an sfx and a copy of this repo (nothing else to download or build, except if you're on windows then you need msys2 or WSL) | ||||
| * `525k` size of original sfx.py as of v0.11.30 | ||||
| * `315k` after `./scripts/make-sfx.sh re no-ogv` | ||||
| * `223k` after `./scripts/make-sfx.sh re no-ogv no-cm` | ||||
| * `393k` size of original sfx.py as of v1.1.3 | ||||
| * `310k` after `./scripts/make-sfx.sh re no-cm` | ||||
| * `269k` after `./scripts/make-sfx.sh re no-cm no-hl` | ||||
|  | ||||
| the features you can opt to drop are | ||||
| * `ogv`.js, the opus/vorbis decoder which is needed by apple devices to play foss audio files, saves ~192k | ||||
| * `cm`/easymde, the "fancy" markdown editor, saves ~92k | ||||
| * `cm`/easymde, the "fancy" markdown editor, saves ~82k | ||||
| * `hl`, prism, the syntax hilighter, saves ~41k | ||||
| * `fnt`, source-code-pro, the monospace font, saves ~9k | ||||
| * `dd`, the custom mouse cursor for the media player tray tab, saves ~2k | ||||
|  | ||||
| for the `re`pack to work, first run one of the sfx'es once to unpack it | ||||
|  | ||||
| **note:** you can also just download and run [scripts/copyparty-repack.sh](scripts/copyparty-repack.sh) -- this will grab the latest copyparty release from github and do a `no-ogv no-cm` repack; works on linux/macos (and windows with msys2 or WSL) | ||||
| **note:** you can also just download and run [scripts/copyparty-repack.sh](scripts/copyparty-repack.sh) -- this will grab the latest copyparty release from github and do a few repacks; works on linux/macos (and windows with msys2 or WSL) | ||||
|  | ||||
|  | ||||
| # install on android | ||||
| @@ -927,6 +1118,16 @@ echo $? | ||||
| after the initial setup, you can launch copyparty at any time by running `copyparty` anywhere in Termux | ||||
|  | ||||
|  | ||||
| # reporting bugs | ||||
|  | ||||
| ideas for context to include in bug reports | ||||
|  | ||||
| if something broke during an upload (replacing FILENAME with a part of the filename that broke): | ||||
| ``` | ||||
| journalctl -aS '48 hour ago' -u copyparty | grep -C10 FILENAME | tee bug.log | ||||
| ``` | ||||
|  | ||||
|  | ||||
| # building | ||||
|  | ||||
| ## dev env setup | ||||
|   | ||||
| @@ -1,3 +1,11 @@ | ||||
| # [`up2k.py`](up2k.py) | ||||
| * command-line up2k client [(webm)](https://ocv.me/stuff/u2cli.webm) | ||||
| * file uploads, file-search, autoresume of aborted/broken uploads | ||||
| * faster than browsers | ||||
| * if something breaks just restart it | ||||
|  | ||||
|  | ||||
|  | ||||
| # [`copyparty-fuse.py`](copyparty-fuse.py) | ||||
| * mount a copyparty server as a local filesystem (read-only) | ||||
| * **supports Windows!** -- expect `194 MiB/s` sequential read | ||||
| @@ -47,6 +55,7 @@ you could replace winfsp with [dokan](https://github.com/dokan-dev/dokany/releas | ||||
| * copyparty can Popen programs like these during file indexing to collect additional metadata | ||||
|  | ||||
|  | ||||
|  | ||||
| # [`dbtool.py`](dbtool.py) | ||||
| upgrade utility which can show db info and help transfer data between databases, for example when a new version of copyparty is incompatible with the old DB and automatically rebuilds the DB from scratch, but you have some really expensive `-mtp` parsers and want to copy over the tags from the old db | ||||
|  | ||||
| @@ -63,6 +72,7 @@ cd /mnt/nas/music/.hist | ||||
| ``` | ||||
|  | ||||
|  | ||||
|  | ||||
| # [`prisonparty.sh`](prisonparty.sh) | ||||
| * run copyparty in a chroot, preventing any accidental file access | ||||
| * creates bindmounts for /bin, /lib, and so on, see `sysdirs=` | ||||
|   | ||||
| @@ -71,7 +71,7 @@ except: | ||||
|     elif MACOS: | ||||
|         libfuse = "install https://osxfuse.github.io/" | ||||
|     else: | ||||
|         libfuse = "apt install libfuse\n    modprobe fuse" | ||||
|         libfuse = "apt install libfuse3-3\n    modprobe fuse" | ||||
|  | ||||
|     print( | ||||
|         "\n  could not import fuse; these may help:" | ||||
| @@ -393,15 +393,16 @@ class Gateway(object): | ||||
|  | ||||
|         rsp = json.loads(rsp.decode("utf-8")) | ||||
|         ret = [] | ||||
|         for is_dir, nodes in [[True, rsp["dirs"]], [False, rsp["files"]]]: | ||||
|         for statfun, nodes in [ | ||||
|             [self.stat_dir, rsp["dirs"]], | ||||
|             [self.stat_file, rsp["files"]], | ||||
|         ]: | ||||
|             for n in nodes: | ||||
|                 fname = unquote(n["href"]).rstrip(b"/") | ||||
|                 fname = fname.decode("wtf-8") | ||||
|                 fname = unquote(n["href"].split("?")[0]).rstrip(b"/").decode("wtf-8") | ||||
|                 if bad_good: | ||||
|                     fname = enwin(fname) | ||||
|  | ||||
|                 fun = self.stat_dir if is_dir else self.stat_file | ||||
|                 ret.append([fname, fun(n["ts"], n["sz"]), 0]) | ||||
|                 ret.append([fname, statfun(n["ts"], n["sz"]), 0]) | ||||
|  | ||||
|         return ret | ||||
|  | ||||
|   | ||||
| @@ -11,14 +11,18 @@ import re | ||||
| import os | ||||
| import sys | ||||
| import time | ||||
| import json | ||||
| import stat | ||||
| import errno | ||||
| import struct | ||||
| import codecs | ||||
| import platform | ||||
| import threading | ||||
| import http.client  # py2: httplib | ||||
| import urllib.parse | ||||
| from datetime import datetime | ||||
| from urllib.parse import quote_from_bytes as quote | ||||
| from urllib.parse import unquote_to_bytes as unquote | ||||
|  | ||||
| try: | ||||
|     import fuse | ||||
| @@ -38,7 +42,7 @@ except: | ||||
| mount a copyparty server (local or remote) as a filesystem | ||||
|  | ||||
| usage: | ||||
|   python ./copyparty-fuseb.py -f -o allow_other,auto_unmount,nonempty,url=http://192.168.1.69:3923 /mnt/nas | ||||
|   python ./copyparty-fuseb.py -f -o allow_other,auto_unmount,nonempty,pw=wark,url=http://192.168.1.69:3923 /mnt/nas | ||||
|  | ||||
| dependencies: | ||||
|   sudo apk add fuse-dev python3-dev | ||||
| @@ -50,6 +54,10 @@ fork of copyparty-fuse.py based on fuse-python which | ||||
| """ | ||||
|  | ||||
|  | ||||
| WINDOWS = sys.platform == "win32" | ||||
| MACOS = platform.system() == "Darwin" | ||||
|  | ||||
|  | ||||
| def threadless_log(msg): | ||||
|     print(msg + "\n", end="") | ||||
|  | ||||
| @@ -93,6 +101,41 @@ def html_dec(txt): | ||||
|     ) | ||||
|  | ||||
|  | ||||
| def register_wtf8(): | ||||
|     def wtf8_enc(text): | ||||
|         return str(text).encode("utf-8", "surrogateescape"), len(text) | ||||
|  | ||||
|     def wtf8_dec(binary): | ||||
|         return bytes(binary).decode("utf-8", "surrogateescape"), len(binary) | ||||
|  | ||||
|     def wtf8_search(encoding_name): | ||||
|         return codecs.CodecInfo(wtf8_enc, wtf8_dec, name="wtf-8") | ||||
|  | ||||
|     codecs.register(wtf8_search) | ||||
|  | ||||
|  | ||||
| bad_good = {} | ||||
| good_bad = {} | ||||
|  | ||||
|  | ||||
| def enwin(txt): | ||||
|     return "".join([bad_good.get(x, x) for x in txt]) | ||||
|  | ||||
|     for bad, good in bad_good.items(): | ||||
|         txt = txt.replace(bad, good) | ||||
|  | ||||
|     return txt | ||||
|  | ||||
|  | ||||
| def dewin(txt): | ||||
|     return "".join([good_bad.get(x, x) for x in txt]) | ||||
|  | ||||
|     for bad, good in bad_good.items(): | ||||
|         txt = txt.replace(good, bad) | ||||
|  | ||||
|     return txt | ||||
|  | ||||
|  | ||||
| class CacheNode(object): | ||||
|     def __init__(self, tag, data): | ||||
|         self.tag = tag | ||||
| @@ -115,8 +158,9 @@ class Stat(fuse.Stat): | ||||
|  | ||||
|  | ||||
| class Gateway(object): | ||||
|     def __init__(self, base_url): | ||||
|     def __init__(self, base_url, pw): | ||||
|         self.base_url = base_url | ||||
|         self.pw = pw | ||||
|  | ||||
|         ui = urllib.parse.urlparse(base_url) | ||||
|         self.web_root = ui.path.strip("/") | ||||
| @@ -135,8 +179,7 @@ class Gateway(object): | ||||
|         self.conns = {} | ||||
|  | ||||
|     def quotep(self, path): | ||||
|         # TODO: mojibake support | ||||
|         path = path.encode("utf-8", "ignore") | ||||
|         path = path.encode("wtf-8") | ||||
|         return quote(path, safe="/") | ||||
|  | ||||
|     def getconn(self, tid=None): | ||||
| @@ -159,20 +202,29 @@ class Gateway(object): | ||||
|         except: | ||||
|             pass | ||||
|  | ||||
|     def sendreq(self, *args, **kwargs): | ||||
|     def sendreq(self, *args, **ka): | ||||
|         tid = get_tid() | ||||
|         if self.pw: | ||||
|             ck = "cppwd=" + self.pw | ||||
|             try: | ||||
|                 ka["headers"]["Cookie"] = ck | ||||
|             except: | ||||
|                 ka["headers"] = {"Cookie": ck} | ||||
|         try: | ||||
|             c = self.getconn(tid) | ||||
|             c.request(*list(args), **kwargs) | ||||
|             c.request(*list(args), **ka) | ||||
|             return c.getresponse() | ||||
|         except: | ||||
|             self.closeconn(tid) | ||||
|             c = self.getconn(tid) | ||||
|             c.request(*list(args), **kwargs) | ||||
|             c.request(*list(args), **ka) | ||||
|             return c.getresponse() | ||||
|  | ||||
|     def listdir(self, path): | ||||
|         web_path = self.quotep("/" + "/".join([self.web_root, path])) + "?dots" | ||||
|         if bad_good: | ||||
|             path = dewin(path) | ||||
|  | ||||
|         web_path = self.quotep("/" + "/".join([self.web_root, path])) + "?dots&ls" | ||||
|         r = self.sendreq("GET", web_path) | ||||
|         if r.status != 200: | ||||
|             self.closeconn() | ||||
| @@ -182,9 +234,12 @@ class Gateway(object): | ||||
|                 ) | ||||
|             ) | ||||
|  | ||||
|         return self.parse_html(r) | ||||
|         return self.parse_jls(r) | ||||
|  | ||||
|     def download_file_range(self, path, ofs1, ofs2): | ||||
|         if bad_good: | ||||
|             path = dewin(path) | ||||
|  | ||||
|         web_path = self.quotep("/" + "/".join([self.web_root, path])) + "?raw" | ||||
|         hdr_range = "bytes={}-{}".format(ofs1, ofs2 - 1) | ||||
|         log("downloading {}".format(hdr_range)) | ||||
| @@ -200,40 +255,27 @@ class Gateway(object): | ||||
|  | ||||
|         return r.read() | ||||
|  | ||||
|     def parse_html(self, datasrc): | ||||
|         ret = [] | ||||
|         remainder = b"" | ||||
|         ptn = re.compile( | ||||
|             r"^<tr><td>(-|DIR)</td><td><a [^>]+>([^<]+)</a></td><td>([^<]+)</td><td>([^<]+)</td></tr>$" | ||||
|         ) | ||||
|  | ||||
|     def parse_jls(self, datasrc): | ||||
|         rsp = b"" | ||||
|         while True: | ||||
|             buf = remainder + datasrc.read(4096) | ||||
|             # print('[{}]'.format(buf.decode('utf-8'))) | ||||
|             buf = datasrc.read(1024 * 32) | ||||
|             if not buf: | ||||
|                 break | ||||
|  | ||||
|             remainder = b"" | ||||
|             endpos = buf.rfind(b"\n") | ||||
|             if endpos >= 0: | ||||
|                 remainder = buf[endpos + 1 :] | ||||
|                 buf = buf[:endpos] | ||||
|             rsp += buf | ||||
|  | ||||
|             lines = buf.decode("utf-8").split("\n") | ||||
|             for line in lines: | ||||
|                 m = ptn.match(line) | ||||
|                 if not m: | ||||
|                     # print(line) | ||||
|                     continue | ||||
|         rsp = json.loads(rsp.decode("utf-8")) | ||||
|         ret = [] | ||||
|         for statfun, nodes in [ | ||||
|             [self.stat_dir, rsp["dirs"]], | ||||
|             [self.stat_file, rsp["files"]], | ||||
|         ]: | ||||
|             for n in nodes: | ||||
|                 fname = unquote(n["href"].split("?")[0]).rstrip(b"/").decode("wtf-8") | ||||
|                 if bad_good: | ||||
|                     fname = enwin(fname) | ||||
|  | ||||
|                 ftype, fname, fsize, fdate = m.groups() | ||||
|                 fname = html_dec(fname) | ||||
|                 ts = datetime.strptime(fdate, "%Y-%m-%d %H:%M:%S").timestamp() | ||||
|                 sz = int(fsize) | ||||
|                 if ftype == "-": | ||||
|                     ret.append([fname, self.stat_file(ts, sz), 0]) | ||||
|                 else: | ||||
|                     ret.append([fname, self.stat_dir(ts, sz), 0]) | ||||
|                 ret.append([fname, statfun(n["ts"], n["sz"]), 0]) | ||||
|  | ||||
|         return ret | ||||
|  | ||||
| @@ -262,6 +304,7 @@ class CPPF(Fuse): | ||||
|         Fuse.__init__(self, *args, **kwargs) | ||||
|  | ||||
|         self.url = None | ||||
|         self.pw = None | ||||
|  | ||||
|         self.dircache = [] | ||||
|         self.dircache_mtx = threading.Lock() | ||||
| @@ -271,7 +314,7 @@ class CPPF(Fuse): | ||||
|  | ||||
|     def init2(self): | ||||
|         # TODO figure out how python-fuse wanted this to go | ||||
|         self.gw = Gateway(self.url)  # .decode('utf-8')) | ||||
|         self.gw = Gateway(self.url, self.pw)  # .decode('utf-8')) | ||||
|         info("up") | ||||
|  | ||||
|     def clean_dircache(self): | ||||
| @@ -536,6 +579,8 @@ class CPPF(Fuse): | ||||
|  | ||||
|     def getattr(self, path): | ||||
|         log("getattr [{}]".format(path)) | ||||
|         if WINDOWS: | ||||
|             path = enwin(path)  # windows occasionally decodes f0xx to xx | ||||
|  | ||||
|         path = path.strip("/") | ||||
|         try: | ||||
| @@ -568,9 +613,25 @@ class CPPF(Fuse): | ||||
|  | ||||
| def main(): | ||||
|     time.strptime("19970815", "%Y%m%d")  # python#7980 | ||||
|     register_wtf8() | ||||
|     if WINDOWS: | ||||
|         os.system("rem") | ||||
|  | ||||
|         for ch in '<>:"\\|?*': | ||||
|             # microsoft maps illegal characters to f0xx | ||||
|             # (e000 to f8ff is basic-plane private-use) | ||||
|             bad_good[ch] = chr(ord(ch) + 0xF000) | ||||
|  | ||||
|         for n in range(0, 0x100): | ||||
|             # map surrogateescape to another private-use area | ||||
|             bad_good[chr(n + 0xDC00)] = chr(n + 0xF100) | ||||
|  | ||||
|         for k, v in bad_good.items(): | ||||
|             good_bad[v] = k | ||||
|  | ||||
|     server = CPPF() | ||||
|     server.parser.add_option(mountopt="url", metavar="BASE_URL", default=None) | ||||
|     server.parser.add_option(mountopt="pw", metavar="PASSWORD", default=None) | ||||
|     server.parse(values=server, errex=1) | ||||
|     if not server.url or not str(server.url).startswith("http"): | ||||
|         print("\nerror:") | ||||
| @@ -578,7 +639,7 @@ def main(): | ||||
|         print("  need argument: mount-path") | ||||
|         print("example:") | ||||
|         print( | ||||
|             "  ./copyparty-fuseb.py -f -o allow_other,auto_unmount,nonempty,url=http://192.168.1.69:3923 /mnt/nas" | ||||
|             "  ./copyparty-fuseb.py -f -o allow_other,auto_unmount,nonempty,pw=wark,url=http://192.168.1.69:3923 /mnt/nas" | ||||
|         ) | ||||
|         sys.exit(1) | ||||
|  | ||||
|   | ||||
| @@ -6,10 +6,15 @@ some of these rely on libraries which are not MIT-compatible | ||||
|  | ||||
| * [audio-bpm.py](./audio-bpm.py) detects the BPM of music using the BeatRoot Vamp Plugin; imports GPL2 | ||||
| * [audio-key.py](./audio-key.py) detects the melodic key of music using the Mixxx fork of keyfinder; imports GPL3 | ||||
|  | ||||
| these invoke standalone programs which are GPL or similar, so is legally fine for most purposes: | ||||
|  | ||||
| * [media-hash.py](./media-hash.py) generates checksums for audio and video streams; uses FFmpeg (LGPL or GPL) | ||||
| * [image-noexif.py](./image-noexif.py) removes exif tags from images; uses exiftool (GPLv1 or artistic-license) | ||||
|  | ||||
| these do not have any problematic dependencies: | ||||
| these do not have any problematic dependencies at all: | ||||
|  | ||||
| * [cksum.py](./cksum.py) computes various checksums | ||||
| * [exe.py](./exe.py) grabs metadata from .exe and .dll files (example for retrieving multiple tags with one parser) | ||||
| * [wget.py](./wget.py) lets you download files by POSTing URLs to copyparty | ||||
|  | ||||
|   | ||||
| @@ -19,18 +19,18 @@ dep: ffmpeg | ||||
| def det(tf): | ||||
|     # fmt: off | ||||
|     sp.check_call([ | ||||
|         "ffmpeg", | ||||
|         "-nostdin", | ||||
|         "-hide_banner", | ||||
|         "-v", "fatal", | ||||
|         "-ss", "13", | ||||
|         "-y", "-i", fsenc(sys.argv[1]), | ||||
|         "-map", "0:a:0", | ||||
|         "-ac", "1", | ||||
|         "-ar", "22050", | ||||
|         "-t", "300", | ||||
|         "-f", "f32le", | ||||
|         tf | ||||
|         b"ffmpeg", | ||||
|         b"-nostdin", | ||||
|         b"-hide_banner", | ||||
|         b"-v", b"fatal", | ||||
|         b"-ss", b"13", | ||||
|         b"-y", b"-i", fsenc(sys.argv[1]), | ||||
|         b"-map", b"0:a:0", | ||||
|         b"-ac", b"1", | ||||
|         b"-ar", b"22050", | ||||
|         b"-t", b"300", | ||||
|         b"-f", b"f32le", | ||||
|         fsenc(tf) | ||||
|     ]) | ||||
|     # fmt: on | ||||
|  | ||||
|   | ||||
| @@ -23,15 +23,15 @@ dep: ffmpeg | ||||
| def det(tf): | ||||
|     # fmt: off | ||||
|     sp.check_call([ | ||||
|         "ffmpeg", | ||||
|         "-nostdin", | ||||
|         "-hide_banner", | ||||
|         "-v", "fatal", | ||||
|         "-y", "-i", fsenc(sys.argv[1]), | ||||
|         "-map", "0:a:0", | ||||
|         "-t", "300", | ||||
|         "-sample_fmt", "s16", | ||||
|         tf | ||||
|         b"ffmpeg", | ||||
|         b"-nostdin", | ||||
|         b"-hide_banner", | ||||
|         b"-v", b"fatal", | ||||
|         b"-y", b"-i", fsenc(sys.argv[1]), | ||||
|         b"-map", b"0:a:0", | ||||
|         b"-t", b"300", | ||||
|         b"-sample_fmt", b"s16", | ||||
|         fsenc(tf) | ||||
|     ]) | ||||
|     # fmt: on | ||||
|  | ||||
|   | ||||
							
								
								
									
										89
									
								
								bin/mtag/cksum.py
									
									
									
									
									
										Executable file
									
								
							
							
						
						
									
										89
									
								
								bin/mtag/cksum.py
									
									
									
									
									
										Executable file
									
								
							| @@ -0,0 +1,89 @@ | ||||
| #!/usr/bin/env python3 | ||||
|  | ||||
| import sys | ||||
| import json | ||||
| import zlib | ||||
| import struct | ||||
| import base64 | ||||
| import hashlib | ||||
|  | ||||
| try: | ||||
|     from copyparty.util import fsenc | ||||
| except: | ||||
|  | ||||
|     def fsenc(p): | ||||
|         return p | ||||
|  | ||||
|  | ||||
| """ | ||||
| calculates various checksums for uploads, | ||||
| usage: -mtp crc32,md5,sha1,sha256b=bin/mtag/cksum.py | ||||
| """ | ||||
|  | ||||
|  | ||||
| def main(): | ||||
|     config = "crc32 md5 md5b sha1 sha1b sha256 sha256b sha512/240 sha512b/240" | ||||
|     # b suffix = base64 encoded | ||||
|     # slash = truncate to n bits | ||||
|  | ||||
|     known = { | ||||
|         "md5": hashlib.md5, | ||||
|         "sha1": hashlib.sha1, | ||||
|         "sha256": hashlib.sha256, | ||||
|         "sha512": hashlib.sha512, | ||||
|     } | ||||
|     config = config.split() | ||||
|     hashers = { | ||||
|         k: v() | ||||
|         for k, v in known.items() | ||||
|         if k in [x.split("/")[0].rstrip("b") for x in known] | ||||
|     } | ||||
|     crc32 = 0 if "crc32" in config else None | ||||
|  | ||||
|     with open(fsenc(sys.argv[1]), "rb", 512 * 1024) as f: | ||||
|         while True: | ||||
|             buf = f.read(64 * 1024) | ||||
|             if not buf: | ||||
|                 break | ||||
|  | ||||
|             for x in hashers.values(): | ||||
|                 x.update(buf) | ||||
|  | ||||
|             if crc32 is not None: | ||||
|                 crc32 = zlib.crc32(buf, crc32) | ||||
|  | ||||
|     ret = {} | ||||
|     for s in config: | ||||
|         alg = s.split("/")[0] | ||||
|         b64 = alg.endswith("b") | ||||
|         alg = alg.rstrip("b") | ||||
|         if alg in hashers: | ||||
|             v = hashers[alg].digest() | ||||
|         elif alg == "crc32": | ||||
|             v = crc32 | ||||
|             if v < 0: | ||||
|                 v &= 2 ** 32 - 1 | ||||
|             v = struct.pack(">L", v) | ||||
|         else: | ||||
|             raise Exception("what is {}".format(s)) | ||||
|  | ||||
|         if "/" in s: | ||||
|             v = v[: int(int(s.split("/")[1]) / 8)] | ||||
|  | ||||
|         if b64: | ||||
|             v = base64.b64encode(v).decode("ascii").rstrip("=") | ||||
|         else: | ||||
|             try: | ||||
|                 v = v.hex() | ||||
|             except: | ||||
|                 import binascii | ||||
|  | ||||
|                 v = binascii.hexlify(v) | ||||
|  | ||||
|         ret[s] = v | ||||
|  | ||||
|     print(json.dumps(ret, indent=4)) | ||||
|  | ||||
|  | ||||
| if __name__ == "__main__": | ||||
|     main() | ||||
							
								
								
									
										93
									
								
								bin/mtag/image-noexif.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										93
									
								
								bin/mtag/image-noexif.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,93 @@ | ||||
| #!/usr/bin/env python3 | ||||
|  | ||||
| """ | ||||
| remove exif tags from uploaded images | ||||
|  | ||||
| dependencies: | ||||
|   exiftool | ||||
|  | ||||
| about: | ||||
|   creates a "noexif" subfolder and puts exif-stripped copies of each image there, | ||||
|   the reason for the subfolder is to avoid issues with the up2k.db / deduplication: | ||||
|  | ||||
|   if the original image is modified in-place, then copyparty will keep the original | ||||
|   hash in up2k.db for a while (until the next volume rescan), so if the image is | ||||
|   reuploaded after a rescan then the upload will be renamed and kept as a dupe | ||||
|  | ||||
|   alternatively you could switch the logic around, making a copy of the original | ||||
|   image into a subfolder named "exif" and modify the original in-place, but then | ||||
|   up2k.db will be out of sync until the next rescan, so any additional uploads | ||||
|   of the same image will get symlinked (deduplicated) to the modified copy | ||||
|   instead of the original in "exif" | ||||
|  | ||||
|   or maybe delete the original image after processing, that would kinda work too | ||||
|  | ||||
| example copyparty config to use this: | ||||
|   -v/mnt/nas/pics:pics:rwmd,ed:c,e2ts,mte=+noexif:c,mtp=noexif=ejpg,ejpeg,ad,bin/mtag/image-noexif.py | ||||
|  | ||||
| explained: | ||||
|   for realpath /mnt/nas/pics (served at /pics) with read-write-modify-delete for ed, | ||||
|   enable file analysis on upload (e2ts), | ||||
|   append "noexif" to the list of known tags (mtp), | ||||
|   and use mtp plugin "bin/mtag/image-noexif.py" to provide that tag, | ||||
|   do this on all uploads with the file extension "jpg" or "jpeg", | ||||
|   ad = parse file regardless if FFmpeg thinks it is audio or not | ||||
|  | ||||
| PS: this requires e2ts to be functional, | ||||
|   meaning you need to do at least one of these: | ||||
|    * apt install ffmpeg | ||||
|    * pip3 install mutagen | ||||
|   and your python must have sqlite3 support compiled in | ||||
| """ | ||||
|  | ||||
|  | ||||
| import os | ||||
| import sys | ||||
| import time | ||||
| import filecmp | ||||
| import subprocess as sp | ||||
|  | ||||
| try: | ||||
|     from copyparty.util import fsenc | ||||
| except: | ||||
|  | ||||
|     def fsenc(p): | ||||
|         return p.encode("utf-8") | ||||
|  | ||||
|  | ||||
| def main(): | ||||
|     cwd, fn = os.path.split(sys.argv[1]) | ||||
|     if os.path.basename(cwd) == "noexif": | ||||
|         return | ||||
|  | ||||
|     os.chdir(cwd) | ||||
|     f1 = fsenc(fn) | ||||
|     f2 = os.path.join(b"noexif", f1) | ||||
|     cmd = [ | ||||
|         b"exiftool", | ||||
|         b"-exif:all=", | ||||
|         b"-iptc:all=", | ||||
|         b"-xmp:all=", | ||||
|         b"-P", | ||||
|         b"-o", | ||||
|         b"noexif/", | ||||
|         b"--", | ||||
|         f1, | ||||
|     ] | ||||
|     sp.check_output(cmd) | ||||
|     if not os.path.exists(f2): | ||||
|         print("failed") | ||||
|         return | ||||
|  | ||||
|     if filecmp.cmp(f1, f2, shallow=False): | ||||
|         print("clean") | ||||
|     else: | ||||
|         print("exif") | ||||
|  | ||||
|     # lastmod = os.path.getmtime(f1) | ||||
|     # times = (int(time.time()), int(lastmod)) | ||||
|     # os.utime(f2, times) | ||||
|  | ||||
|  | ||||
| if __name__ == "__main__": | ||||
|     main() | ||||
| @@ -13,7 +13,7 @@ try: | ||||
| except: | ||||
|  | ||||
|     def fsenc(p): | ||||
|         return p | ||||
|         return p.encode("utf-8") | ||||
|  | ||||
|  | ||||
| """ | ||||
| @@ -24,13 +24,13 @@ dep: ffmpeg | ||||
| def det(): | ||||
|     # fmt: off | ||||
|     cmd = [ | ||||
|         "ffmpeg", | ||||
|         "-nostdin", | ||||
|         "-hide_banner", | ||||
|         "-v", "fatal", | ||||
|         "-i", fsenc(sys.argv[1]), | ||||
|         "-f", "framemd5", | ||||
|         "-" | ||||
|         b"ffmpeg", | ||||
|         b"-nostdin", | ||||
|         b"-hide_banner", | ||||
|         b"-v", b"fatal", | ||||
|         b"-i", fsenc(sys.argv[1]), | ||||
|         b"-f", b"framemd5", | ||||
|         b"-" | ||||
|     ] | ||||
|     # fmt: on | ||||
|  | ||||
|   | ||||
							
								
								
									
										830
									
								
								bin/up2k.py
									
									
									
									
									
										Executable file
									
								
							
							
						
						
									
										830
									
								
								bin/up2k.py
									
									
									
									
									
										Executable file
									
								
							| @@ -0,0 +1,830 @@ | ||||
| #!/usr/bin/env python3 | ||||
| from __future__ import print_function, unicode_literals | ||||
|  | ||||
| """ | ||||
| up2k.py: upload to copyparty | ||||
| 2021-11-28, v0.13, ed <irc.rizon.net>, MIT-Licensed | ||||
| https://github.com/9001/copyparty/blob/hovudstraum/bin/up2k.py | ||||
|  | ||||
| - dependencies: requests | ||||
| - supports python 2.6, 2.7, and 3.3 through 3.10 | ||||
|  | ||||
| - almost zero error-handling | ||||
| - but if something breaks just try again and it'll autoresume | ||||
| """ | ||||
|  | ||||
| import os | ||||
| import sys | ||||
| import stat | ||||
| import math | ||||
| import time | ||||
| import atexit | ||||
| import signal | ||||
| import base64 | ||||
| import hashlib | ||||
| import argparse | ||||
| import platform | ||||
| import threading | ||||
| import requests | ||||
| import datetime | ||||
|  | ||||
|  | ||||
| # from copyparty/__init__.py | ||||
| PY2 = sys.version_info[0] == 2 | ||||
| if PY2: | ||||
|     from Queue import Queue | ||||
|     from urllib import unquote | ||||
|     from urllib import quote | ||||
|  | ||||
|     sys.dont_write_bytecode = True | ||||
|     bytes = str | ||||
| else: | ||||
|     from queue import Queue | ||||
|     from urllib.parse import unquote_to_bytes as unquote | ||||
|     from urllib.parse import quote_from_bytes as quote | ||||
|  | ||||
|     unicode = str | ||||
|  | ||||
| VT100 = platform.system() != "Windows" | ||||
|  | ||||
|  | ||||
| req_ses = requests.Session() | ||||
|  | ||||
|  | ||||
| class File(object): | ||||
|     """an up2k upload task; represents a single file""" | ||||
|  | ||||
|     def __init__(self, top, rel, size, lmod): | ||||
|         self.top = top  # type: bytes | ||||
|         self.rel = rel.replace(b"\\", b"/")  # type: bytes | ||||
|         self.size = size  # type: int | ||||
|         self.lmod = lmod  # type: float | ||||
|  | ||||
|         self.abs = os.path.join(top, rel)  # type: bytes | ||||
|         self.name = self.rel.split(b"/")[-1].decode("utf-8", "replace")  # type: str | ||||
|  | ||||
|         # set by get_hashlist | ||||
|         self.cids = []  # type: list[tuple[str, int, int]]  # [ hash, ofs, sz ] | ||||
|         self.kchunks = {}  # type: dict[str, tuple[int, int]]  # hash: [ ofs, sz ] | ||||
|  | ||||
|         # set by handshake | ||||
|         self.ucids = []  # type: list[str]  # chunks which need to be uploaded | ||||
|         self.wark = None  # type: str | ||||
|         self.url = None  # type: str | ||||
|  | ||||
|         # set by upload | ||||
|         self.up_b = 0  # type: int | ||||
|         self.up_c = 0  # type: int | ||||
|  | ||||
|         # m = "size({}) lmod({}) top({}) rel({}) abs({}) name({})\n" | ||||
|         # eprint(m.format(self.size, self.lmod, self.top, self.rel, self.abs, self.name)) | ||||
|  | ||||
|  | ||||
| class FileSlice(object): | ||||
|     """file-like object providing a fixed window into a file""" | ||||
|  | ||||
|     def __init__(self, file, cid): | ||||
|         # type: (File, str) -> FileSlice | ||||
|  | ||||
|         self.car, self.len = file.kchunks[cid] | ||||
|         self.cdr = self.car + self.len | ||||
|         self.ofs = 0  # type: int | ||||
|         self.f = open(file.abs, "rb", 512 * 1024) | ||||
|         self.f.seek(self.car) | ||||
|  | ||||
|         # https://stackoverflow.com/questions/4359495/what-is-exactly-a-file-like-object-in-python | ||||
|         # IOBase, RawIOBase, BufferedIOBase | ||||
|         funs = "close closed __enter__ __exit__ __iter__ isatty __next__ readable seekable writable" | ||||
|         try: | ||||
|             for fun in funs.split(): | ||||
|                 setattr(self, fun, getattr(self.f, fun)) | ||||
|         except: | ||||
|             pass  # py27 probably | ||||
|  | ||||
|     def tell(self): | ||||
|         return self.ofs | ||||
|  | ||||
|     def seek(self, ofs, wh=0): | ||||
|         if wh == 1: | ||||
|             ofs = self.ofs + ofs | ||||
|         elif wh == 2: | ||||
|             ofs = self.len + ofs  # provided ofs is negative | ||||
|  | ||||
|         if ofs < 0: | ||||
|             ofs = 0 | ||||
|         elif ofs >= self.len: | ||||
|             ofs = self.len - 1 | ||||
|  | ||||
|         self.ofs = ofs | ||||
|         self.f.seek(self.car + ofs) | ||||
|  | ||||
|     def read(self, sz): | ||||
|         sz = min(sz, self.len - self.ofs) | ||||
|         ret = self.f.read(sz) | ||||
|         self.ofs += len(ret) | ||||
|         return ret | ||||
|  | ||||
|  | ||||
| _print = print | ||||
|  | ||||
|  | ||||
| def eprint(*a, **ka): | ||||
|     ka["file"] = sys.stderr | ||||
|     ka["end"] = "" | ||||
|     if not PY2: | ||||
|         ka["flush"] = True | ||||
|  | ||||
|     _print(*a, **ka) | ||||
|     if PY2 or not VT100: | ||||
|         sys.stderr.flush() | ||||
|  | ||||
|  | ||||
| def flushing_print(*a, **ka): | ||||
|     _print(*a, **ka) | ||||
|     if "flush" not in ka: | ||||
|         sys.stdout.flush() | ||||
|  | ||||
|  | ||||
| if not VT100: | ||||
|     print = flushing_print | ||||
|  | ||||
|  | ||||
| def termsize(): | ||||
|     import os | ||||
|  | ||||
|     env = os.environ | ||||
|  | ||||
|     def ioctl_GWINSZ(fd): | ||||
|         try: | ||||
|             import fcntl, termios, struct, os | ||||
|  | ||||
|             cr = struct.unpack("hh", fcntl.ioctl(fd, termios.TIOCGWINSZ, "1234")) | ||||
|         except: | ||||
|             return | ||||
|         return cr | ||||
|  | ||||
|     cr = ioctl_GWINSZ(0) or ioctl_GWINSZ(1) or ioctl_GWINSZ(2) | ||||
|     if not cr: | ||||
|         try: | ||||
|             fd = os.open(os.ctermid(), os.O_RDONLY) | ||||
|             cr = ioctl_GWINSZ(fd) | ||||
|             os.close(fd) | ||||
|         except: | ||||
|             pass | ||||
|     if not cr: | ||||
|         try: | ||||
|             cr = (env["LINES"], env["COLUMNS"]) | ||||
|         except: | ||||
|             cr = (25, 80) | ||||
|     return int(cr[1]), int(cr[0]) | ||||
|  | ||||
|  | ||||
| class CTermsize(object): | ||||
|     def __init__(self): | ||||
|         self.ev = False | ||||
|         self.margin = None | ||||
|         self.g = None | ||||
|         self.w, self.h = termsize() | ||||
|  | ||||
|         try: | ||||
|             signal.signal(signal.SIGWINCH, self.ev_sig) | ||||
|         except: | ||||
|             return | ||||
|  | ||||
|         thr = threading.Thread(target=self.worker) | ||||
|         thr.daemon = True | ||||
|         thr.start() | ||||
|  | ||||
|     def worker(self): | ||||
|         while True: | ||||
|             time.sleep(0.5) | ||||
|             if not self.ev: | ||||
|                 continue | ||||
|  | ||||
|             self.ev = False | ||||
|             self.w, self.h = termsize() | ||||
|  | ||||
|             if self.margin is not None: | ||||
|                 self.scroll_region(self.margin) | ||||
|  | ||||
|     def ev_sig(self, *a, **ka): | ||||
|         self.ev = True | ||||
|  | ||||
|     def scroll_region(self, margin): | ||||
|         self.margin = margin | ||||
|         if margin is None: | ||||
|             self.g = None | ||||
|             eprint("\033[s\033[r\033[u") | ||||
|         else: | ||||
|             self.g = 1 + self.h - margin | ||||
|             m = "{0}\033[{1}A".format("\n" * margin, margin) | ||||
|             eprint("{0}\033[s\033[1;{1}r\033[u".format(m, self.g - 1)) | ||||
|  | ||||
|  | ||||
| ss = CTermsize() | ||||
|  | ||||
|  | ||||
| def _scd(err, top): | ||||
|     """non-recursive listing of directory contents, along with stat() info""" | ||||
|     with os.scandir(top) as dh: | ||||
|         for fh in dh: | ||||
|             abspath = os.path.join(top, fh.name) | ||||
|             try: | ||||
|                 yield [abspath, fh.stat()] | ||||
|             except: | ||||
|                 err.append(abspath) | ||||
|  | ||||
|  | ||||
| def _lsd(err, top): | ||||
|     """non-recursive listing of directory contents, along with stat() info""" | ||||
|     for name in os.listdir(top): | ||||
|         abspath = os.path.join(top, name) | ||||
|         try: | ||||
|             yield [abspath, os.stat(abspath)] | ||||
|         except: | ||||
|             err.append(abspath) | ||||
|  | ||||
|  | ||||
| if hasattr(os, "scandir"): | ||||
|     statdir = _scd | ||||
| else: | ||||
|     statdir = _lsd | ||||
|  | ||||
|  | ||||
| def walkdir(err, top): | ||||
|     """recursive statdir""" | ||||
|     for ap, inf in sorted(statdir(err, top)): | ||||
|         if stat.S_ISDIR(inf.st_mode): | ||||
|             try: | ||||
|                 for x in walkdir(err, ap): | ||||
|                     yield x | ||||
|             except: | ||||
|                 err.append(ap) | ||||
|         else: | ||||
|             yield ap, inf | ||||
|  | ||||
|  | ||||
| def walkdirs(err, tops): | ||||
|     """recursive statdir for a list of tops, yields [top, relpath, stat]""" | ||||
|     sep = "{0}".format(os.sep).encode("ascii") | ||||
|     for top in tops: | ||||
|         if top[-1:] == sep: | ||||
|             stop = top.rstrip(sep) | ||||
|         else: | ||||
|             stop = os.path.dirname(top) | ||||
|  | ||||
|         if os.path.isdir(top): | ||||
|             for ap, inf in walkdir(err, top): | ||||
|                 yield stop, ap[len(stop) :].lstrip(sep), inf | ||||
|         else: | ||||
|             d, n = top.rsplit(sep, 1) | ||||
|             yield d, n, os.stat(top) | ||||
|  | ||||
|  | ||||
| # mostly from copyparty/util.py | ||||
| def quotep(btxt): | ||||
|     quot1 = quote(btxt, safe=b"/") | ||||
|     if not PY2: | ||||
|         quot1 = quot1.encode("ascii") | ||||
|  | ||||
|     return quot1.replace(b" ", b"+") | ||||
|  | ||||
|  | ||||
| # from copyparty/util.py | ||||
| def humansize(sz, terse=False): | ||||
|     """picks a sensible unit for the given extent""" | ||||
|     for unit in ["B", "KiB", "MiB", "GiB", "TiB"]: | ||||
|         if sz < 1024: | ||||
|             break | ||||
|  | ||||
|         sz /= 1024.0 | ||||
|  | ||||
|     ret = " ".join([str(sz)[:4].rstrip("."), unit]) | ||||
|  | ||||
|     if not terse: | ||||
|         return ret | ||||
|  | ||||
|     return ret.replace("iB", "").replace(" ", "") | ||||
|  | ||||
|  | ||||
| # from copyparty/up2k.py | ||||
| def up2k_chunksize(filesize): | ||||
|     """gives The correct chunksize for up2k hashing""" | ||||
|     chunksize = 1024 * 1024 | ||||
|     stepsize = 512 * 1024 | ||||
|     while True: | ||||
|         for mul in [1, 2]: | ||||
|             nchunks = math.ceil(filesize * 1.0 / chunksize) | ||||
|             if nchunks <= 256 or chunksize >= 32 * 1024 * 1024: | ||||
|                 return chunksize | ||||
|  | ||||
|             chunksize += stepsize | ||||
|             stepsize *= mul | ||||
|  | ||||
|  | ||||
| # mostly from copyparty/up2k.py | ||||
| def get_hashlist(file, pcb): | ||||
|     # type: (File, any) -> None | ||||
|     """generates the up2k hashlist from file contents, inserts it into `file`""" | ||||
|  | ||||
|     chunk_sz = up2k_chunksize(file.size) | ||||
|     file_rem = file.size | ||||
|     file_ofs = 0 | ||||
|     ret = [] | ||||
|     with open(file.abs, "rb", 512 * 1024) as f: | ||||
|         while file_rem > 0: | ||||
|             hashobj = hashlib.sha512() | ||||
|             chunk_sz = chunk_rem = min(chunk_sz, file_rem) | ||||
|             while chunk_rem > 0: | ||||
|                 buf = f.read(min(chunk_rem, 64 * 1024)) | ||||
|                 if not buf: | ||||
|                     raise Exception("EOF at " + str(f.tell())) | ||||
|  | ||||
|                 hashobj.update(buf) | ||||
|                 chunk_rem -= len(buf) | ||||
|  | ||||
|             digest = hashobj.digest()[:33] | ||||
|             digest = base64.urlsafe_b64encode(digest).decode("utf-8") | ||||
|  | ||||
|             ret.append([digest, file_ofs, chunk_sz]) | ||||
|             file_ofs += chunk_sz | ||||
|             file_rem -= chunk_sz | ||||
|  | ||||
|             if pcb: | ||||
|                 pcb(file, file_ofs) | ||||
|  | ||||
|     file.cids = ret | ||||
|     file.kchunks = {} | ||||
|     for k, v1, v2 in ret: | ||||
|         file.kchunks[k] = [v1, v2] | ||||
|  | ||||
|  | ||||
| def handshake(req_ses, url, file, pw, search): | ||||
|     # type: (requests.Session, str, File, any, bool) -> List[str] | ||||
|     """ | ||||
|     performs a handshake with the server; reply is: | ||||
|       if search, a list of search results | ||||
|       otherwise, a list of chunks to upload | ||||
|     """ | ||||
|  | ||||
|     req = { | ||||
|         "hash": [x[0] for x in file.cids], | ||||
|         "name": file.name, | ||||
|         "lmod": file.lmod, | ||||
|         "size": file.size, | ||||
|     } | ||||
|     if search: | ||||
|         req["srch"] = 1 | ||||
|  | ||||
|     headers = {"Content-Type": "text/plain"}  # wtf ed | ||||
|     if pw: | ||||
|         headers["Cookie"] = "=".join(["cppwd", pw]) | ||||
|  | ||||
|     if file.url: | ||||
|         url = file.url | ||||
|     elif b"/" in file.rel: | ||||
|         url += quotep(file.rel.rsplit(b"/", 1)[0]).decode("utf-8", "replace") | ||||
|  | ||||
|     while True: | ||||
|         try: | ||||
|             r = req_ses.post(url, headers=headers, json=req) | ||||
|             break | ||||
|         except: | ||||
|             eprint("handshake failed, retrying: {0}\n".format(file.name)) | ||||
|             time.sleep(1) | ||||
|  | ||||
|     try: | ||||
|         r = r.json() | ||||
|     except: | ||||
|         raise Exception(r.text) | ||||
|  | ||||
|     if search: | ||||
|         return r["hits"] | ||||
|  | ||||
|     try: | ||||
|         pre, url = url.split("://") | ||||
|         pre += "://" | ||||
|     except: | ||||
|         pre = "" | ||||
|  | ||||
|     file.url = pre + url.split("/")[0] + r["purl"] | ||||
|     file.name = r["name"] | ||||
|     file.wark = r["wark"] | ||||
|  | ||||
|     return r["hash"] | ||||
|  | ||||
|  | ||||
| def upload(req_ses, file, cid, pw): | ||||
|     # type: (requests.Session, File, str, any) -> None | ||||
|     """upload one specific chunk, `cid` (a chunk-hash)""" | ||||
|  | ||||
|     headers = { | ||||
|         "X-Up2k-Hash": cid, | ||||
|         "X-Up2k-Wark": file.wark, | ||||
|         "Content-Type": "application/octet-stream", | ||||
|     } | ||||
|     if pw: | ||||
|         headers["Cookie"] = "=".join(["cppwd", pw]) | ||||
|  | ||||
|     f = FileSlice(file, cid) | ||||
|     try: | ||||
|         r = req_ses.post(file.url, headers=headers, data=f) | ||||
|         if not r: | ||||
|             raise Exception(repr(r)) | ||||
|  | ||||
|         _ = r.content | ||||
|     finally: | ||||
|         f.f.close() | ||||
|  | ||||
|  | ||||
| class Daemon(threading.Thread): | ||||
|     def __init__(self, *a, **ka): | ||||
|         threading.Thread.__init__(self, *a, **ka) | ||||
|         self.daemon = True | ||||
|  | ||||
|  | ||||
| class Ctl(object): | ||||
|     """ | ||||
|     this will be the coordinator which runs everything in parallel | ||||
|     (hashing, handshakes, uploads)  but right now it's p dumb | ||||
|     """ | ||||
|  | ||||
|     def __init__(self, ar): | ||||
|         self.ar = ar | ||||
|         ar.files = [ | ||||
|             os.path.abspath(os.path.realpath(x.encode("utf-8"))) | ||||
|             + (x[-1:] if x[-1:] == os.sep else "").encode("utf-8") | ||||
|             for x in ar.files | ||||
|         ] | ||||
|         ar.url = ar.url.rstrip("/") + "/" | ||||
|         if "://" not in ar.url: | ||||
|             ar.url = "http://" + ar.url | ||||
|  | ||||
|         eprint("\nscanning {0} locations\n".format(len(ar.files))) | ||||
|  | ||||
|         nfiles = 0 | ||||
|         nbytes = 0 | ||||
|         err = [] | ||||
|         for _, _, inf in walkdirs(err, ar.files): | ||||
|             nfiles += 1 | ||||
|             nbytes += inf.st_size | ||||
|  | ||||
|         if err: | ||||
|             eprint("\n# failed to access {0} paths:\n".format(len(err))) | ||||
|             for x in err: | ||||
|                 eprint(x.decode("utf-8", "replace") + "\n") | ||||
|  | ||||
|             eprint("^ failed to access those {0} paths ^\n\n".format(len(err))) | ||||
|             if not ar.ok: | ||||
|                 eprint("aborting because --ok is not set\n") | ||||
|                 return | ||||
|  | ||||
|         eprint("found {0} files, {1}\n\n".format(nfiles, humansize(nbytes))) | ||||
|         self.nfiles = nfiles | ||||
|         self.nbytes = nbytes | ||||
|  | ||||
|         if ar.td: | ||||
|             requests.packages.urllib3.disable_warnings() | ||||
|             req_ses.verify = False | ||||
|         if ar.te: | ||||
|             req_ses.verify = ar.te | ||||
|  | ||||
|         self.filegen = walkdirs([], ar.files) | ||||
|         if ar.safe: | ||||
|             self.safe() | ||||
|         else: | ||||
|             self.fancy() | ||||
|  | ||||
|     def safe(self): | ||||
|         """minimal basic slow boring fallback codepath""" | ||||
|         search = self.ar.s | ||||
|         for nf, (top, rel, inf) in enumerate(self.filegen): | ||||
|             file = File(top, rel, inf.st_size, inf.st_mtime) | ||||
|             upath = file.abs.decode("utf-8", "replace") | ||||
|  | ||||
|             print("{0} {1}\n  hash...".format(self.nfiles - nf, upath)) | ||||
|             get_hashlist(file, None) | ||||
|  | ||||
|             burl = self.ar.url[:12] + self.ar.url[8:].split("/")[0] + "/" | ||||
|             while True: | ||||
|                 print("  hs...") | ||||
|                 hs = handshake(req_ses, self.ar.url, file, self.ar.a, search) | ||||
|                 if search: | ||||
|                     if hs: | ||||
|                         for hit in hs: | ||||
|                             print("  found: {0}{1}".format(burl, hit["rp"])) | ||||
|                     else: | ||||
|                         print("  NOT found") | ||||
|                     break | ||||
|  | ||||
|                 file.ucids = hs | ||||
|                 if not hs: | ||||
|                     break | ||||
|  | ||||
|                 print("{0} {1}".format(self.nfiles - nf, upath)) | ||||
|                 ncs = len(hs) | ||||
|                 for nc, cid in enumerate(hs): | ||||
|                     print("  {0} up {1}".format(ncs - nc, cid)) | ||||
|                     upload(req_ses, file, cid, self.ar.a) | ||||
|  | ||||
|             print("  ok!") | ||||
|  | ||||
|     def fancy(self): | ||||
|         self.hash_f = 0 | ||||
|         self.hash_c = 0 | ||||
|         self.hash_b = 0 | ||||
|         self.up_f = 0 | ||||
|         self.up_c = 0 | ||||
|         self.up_b = 0 | ||||
|         self.up_br = 0 | ||||
|         self.hasher_busy = 1 | ||||
|         self.handshaker_busy = 0 | ||||
|         self.uploader_busy = 0 | ||||
|  | ||||
|         self.t0 = time.time() | ||||
|         self.t0_up = None | ||||
|         self.spd = None | ||||
|  | ||||
|         self.mutex = threading.Lock() | ||||
|         self.q_handshake = Queue()  # type: Queue[File] | ||||
|         self.q_recheck = Queue()  # type: Queue[File]  # partial upload exists [...] | ||||
|         self.q_upload = Queue()  # type: Queue[tuple[File, str]] | ||||
|  | ||||
|         self.st_hash = [None, "(idle, starting...)"]  # type: tuple[File, int] | ||||
|         self.st_up = [None, "(idle, starting...)"]  # type: tuple[File, int] | ||||
|         if VT100: | ||||
|             atexit.register(self.cleanup_vt100) | ||||
|             ss.scroll_region(3) | ||||
|  | ||||
|         Daemon(target=self.hasher).start() | ||||
|         for _ in range(self.ar.j): | ||||
|             Daemon(target=self.handshaker).start() | ||||
|             Daemon(target=self.uploader).start() | ||||
|  | ||||
|         idles = 0 | ||||
|         while idles < 3: | ||||
|             time.sleep(0.07) | ||||
|             with self.mutex: | ||||
|                 if ( | ||||
|                     self.q_handshake.empty() | ||||
|                     and self.q_upload.empty() | ||||
|                     and not self.hasher_busy | ||||
|                     and not self.handshaker_busy | ||||
|                     and not self.uploader_busy | ||||
|                 ): | ||||
|                     idles += 1 | ||||
|                 else: | ||||
|                     idles = 0 | ||||
|  | ||||
|             if VT100: | ||||
|                 maxlen = ss.w - len(str(self.nfiles)) - 14 | ||||
|                 txt = "\033[s\033[{0}H".format(ss.g) | ||||
|                 for y, k, st, f in [ | ||||
|                     [0, "hash", self.st_hash, self.hash_f], | ||||
|                     [1, "send", self.st_up, self.up_f], | ||||
|                 ]: | ||||
|                     txt += "\033[{0}H{1}:".format(ss.g + y, k) | ||||
|                     file, arg = st | ||||
|                     if not file: | ||||
|                         txt += " {0}\033[K".format(arg) | ||||
|                     else: | ||||
|                         if y: | ||||
|                             p = 100 * file.up_b / file.size | ||||
|                         else: | ||||
|                             p = 100 * arg / file.size | ||||
|  | ||||
|                         name = file.abs.decode("utf-8", "replace")[-maxlen:] | ||||
|                         if "/" in name: | ||||
|                             name = "\033[36m{0}\033[0m/{1}".format(*name.rsplit("/", 1)) | ||||
|  | ||||
|                         m = "{0:6.1f}% {1} {2}\033[K" | ||||
|                         txt += m.format(p, self.nfiles - f, name) | ||||
|  | ||||
|                 txt += "\033[{0}H ".format(ss.g + 2) | ||||
|             else: | ||||
|                 txt = " " | ||||
|  | ||||
|             if not self.up_br: | ||||
|                 spd = self.hash_b / (time.time() - self.t0) | ||||
|                 eta = (self.nbytes - self.hash_b) / (spd + 1) | ||||
|             else: | ||||
|                 spd = self.up_br / (time.time() - self.t0_up) | ||||
|                 spd = self.spd = (self.spd or spd) * 0.9 + spd * 0.1 | ||||
|                 eta = (self.nbytes - self.up_b) / (spd + 1) | ||||
|  | ||||
|             spd = humansize(spd) | ||||
|             eta = str(datetime.timedelta(seconds=int(eta))) | ||||
|             left = humansize(self.nbytes - self.up_b) | ||||
|             tail = "\033[K\033[u" if VT100 else "\r" | ||||
|  | ||||
|             m = "eta: {0} @ {1}/s, {2} left".format(eta, spd, left) | ||||
|             eprint(txt + "\033]0;{0}\033\\\r{1}{2}".format(m, m, tail)) | ||||
|  | ||||
|     def cleanup_vt100(self): | ||||
|         ss.scroll_region(None) | ||||
|         eprint("\033[J\033]0;\033\\") | ||||
|  | ||||
|     def cb_hasher(self, file, ofs): | ||||
|         self.st_hash = [file, ofs] | ||||
|  | ||||
|     def hasher(self): | ||||
|         prd = None | ||||
|         ls = {} | ||||
|         for top, rel, inf in self.filegen: | ||||
|             if self.ar.z: | ||||
|                 rd = os.path.dirname(rel) | ||||
|                 if prd != rd: | ||||
|                     prd = rd | ||||
|                     headers = {} | ||||
|                     if self.ar.a: | ||||
|                         headers["Cookie"] = "=".join(["cppwd", self.ar.a]) | ||||
|  | ||||
|                     ls = {} | ||||
|                     try: | ||||
|                         print("      ls ~{0}".format(rd.decode("utf-8", "replace"))) | ||||
|                         r = req_ses.get( | ||||
|                             self.ar.url.encode("utf-8") + quotep(rd) + b"?ls", | ||||
|                             headers=headers, | ||||
|                         ) | ||||
|                         for f in r.json()["files"]: | ||||
|                             rfn = f["href"].split("?")[0].encode("utf-8", "replace") | ||||
|                             ls[unquote(rfn)] = f | ||||
|                     except: | ||||
|                         print("   mkdir ~{0}".format(rd.decode("utf-8", "replace"))) | ||||
|  | ||||
|                 rf = ls.get(os.path.basename(rel), None) | ||||
|                 if rf and rf["sz"] == inf.st_size and abs(rf["ts"] - inf.st_mtime) <= 1: | ||||
|                     self.nfiles -= 1 | ||||
|                     self.nbytes -= inf.st_size | ||||
|                     continue | ||||
|  | ||||
|             file = File(top, rel, inf.st_size, inf.st_mtime) | ||||
|             while True: | ||||
|                 with self.mutex: | ||||
|                     if ( | ||||
|                         self.hash_b - self.up_b < 1024 * 1024 * 128 | ||||
|                         and self.hash_c - self.up_c < 64 | ||||
|                         and ( | ||||
|                             not self.ar.nh | ||||
|                             or ( | ||||
|                                 self.q_upload.empty() | ||||
|                                 and self.q_handshake.empty() | ||||
|                                 and not self.uploader_busy | ||||
|                             ) | ||||
|                         ) | ||||
|                     ): | ||||
|                         break | ||||
|  | ||||
|                 time.sleep(0.05) | ||||
|  | ||||
|             get_hashlist(file, self.cb_hasher) | ||||
|             with self.mutex: | ||||
|                 self.hash_f += 1 | ||||
|                 self.hash_c += len(file.cids) | ||||
|                 self.hash_b += file.size | ||||
|  | ||||
|             self.q_handshake.put(file) | ||||
|  | ||||
|         self.hasher_busy = 0 | ||||
|         self.st_hash = [None, "(finished)"] | ||||
|  | ||||
|     def handshaker(self): | ||||
|         search = self.ar.s | ||||
|         q = self.q_handshake | ||||
|         burl = self.ar.url[:8] + self.ar.url[8:].split("/")[0] + "/" | ||||
|         while True: | ||||
|             file = q.get() | ||||
|             if not file: | ||||
|                 if q == self.q_handshake: | ||||
|                     q = self.q_recheck | ||||
|                     q.put(None) | ||||
|                     continue | ||||
|  | ||||
|                 self.q_upload.put(None) | ||||
|                 break | ||||
|  | ||||
|             with self.mutex: | ||||
|                 self.handshaker_busy += 1 | ||||
|  | ||||
|             upath = file.abs.decode("utf-8", "replace") | ||||
|  | ||||
|             try: | ||||
|                 hs = handshake(req_ses, self.ar.url, file, self.ar.a, search) | ||||
|             except Exception as ex: | ||||
|                 if q == self.q_handshake and "<pre>partial upload exists" in str(ex): | ||||
|                     self.q_recheck.put(file) | ||||
|                     hs = [] | ||||
|                 else: | ||||
|                     raise | ||||
|  | ||||
|             if search: | ||||
|                 if hs: | ||||
|                     for hit in hs: | ||||
|                         m = "found: {0}\n  {1}{2}\n" | ||||
|                         print(m.format(upath, burl, hit["rp"]), end="") | ||||
|                 else: | ||||
|                     print("NOT found: {0}\n".format(upath), end="") | ||||
|  | ||||
|                 with self.mutex: | ||||
|                     self.up_f += 1 | ||||
|                     self.up_c += len(file.cids) | ||||
|                     self.up_b += file.size | ||||
|                     self.handshaker_busy -= 1 | ||||
|  | ||||
|                 continue | ||||
|  | ||||
|             with self.mutex: | ||||
|                 if not hs: | ||||
|                     # all chunks done | ||||
|                     self.up_f += 1 | ||||
|                     self.up_c += len(file.cids) - file.up_c | ||||
|                     self.up_b += file.size - file.up_b | ||||
|  | ||||
|                 if hs and file.up_c: | ||||
|                     # some chunks failed | ||||
|                     self.up_c -= len(hs) | ||||
|                     file.up_c -= len(hs) | ||||
|                     for cid in hs: | ||||
|                         sz = file.kchunks[cid][1] | ||||
|                         self.up_b -= sz | ||||
|                         file.up_b -= sz | ||||
|  | ||||
|                 file.ucids = hs | ||||
|                 self.handshaker_busy -= 1 | ||||
|  | ||||
|             if not hs: | ||||
|                 kw = "uploaded" if file.up_b else "   found" | ||||
|                 print("{0} {1}".format(kw, upath)) | ||||
|             for cid in hs: | ||||
|                 self.q_upload.put([file, cid]) | ||||
|  | ||||
|     def uploader(self): | ||||
|         while True: | ||||
|             task = self.q_upload.get() | ||||
|             if not task: | ||||
|                 self.st_up = [None, "(finished)"] | ||||
|                 break | ||||
|  | ||||
|             with self.mutex: | ||||
|                 self.uploader_busy += 1 | ||||
|                 self.t0_up = self.t0_up or time.time() | ||||
|  | ||||
|             file, cid = task | ||||
|             try: | ||||
|                 upload(req_ses, file, cid, self.ar.a) | ||||
|             except: | ||||
|                 eprint("upload failed, retrying: {0} #{1}\n".format(file.name, cid[:8])) | ||||
|                 pass  # handshake will fix it | ||||
|  | ||||
|             with self.mutex: | ||||
|                 sz = file.kchunks[cid][1] | ||||
|                 file.ucids = [x for x in file.ucids if x != cid] | ||||
|                 if not file.ucids: | ||||
|                     self.q_handshake.put(file) | ||||
|  | ||||
|                 self.st_up = [file, cid] | ||||
|                 file.up_b += sz | ||||
|                 self.up_b += sz | ||||
|                 self.up_br += sz | ||||
|                 file.up_c += 1 | ||||
|                 self.up_c += 1 | ||||
|                 self.uploader_busy -= 1 | ||||
|  | ||||
|  | ||||
| class APF(argparse.ArgumentDefaultsHelpFormatter, argparse.RawDescriptionHelpFormatter): | ||||
|     pass | ||||
|  | ||||
|  | ||||
| def main(): | ||||
|     time.strptime("19970815", "%Y%m%d")  # python#7980 | ||||
|     if not VT100: | ||||
|         os.system("rem")  # enables colors | ||||
|  | ||||
|     # fmt: off | ||||
|     ap = app = argparse.ArgumentParser(formatter_class=APF, epilog=""" | ||||
| NOTE: | ||||
| source file/folder selection uses rsync syntax, meaning that: | ||||
|   "foo" uploads the entire folder to URL/foo/ | ||||
|   "foo/" uploads the CONTENTS of the folder into URL/ | ||||
| """) | ||||
|  | ||||
|     ap.add_argument("url", type=unicode, help="server url, including destination folder") | ||||
|     ap.add_argument("files", type=unicode, nargs="+", help="files and/or folders to process") | ||||
|     ap.add_argument("-a", metavar="PASSWORD", help="password") | ||||
|     ap.add_argument("-s", action="store_true", help="file-search (disables upload)") | ||||
|     ap.add_argument("--ok", action="store_true", help="continue even if some local files are inaccessible") | ||||
|     ap = app.add_argument_group("performance tweaks") | ||||
|     ap.add_argument("-j", type=int, metavar="THREADS", default=4, help="parallel connections") | ||||
|     ap.add_argument("-nh", action="store_true", help="disable hashing while uploading") | ||||
|     ap.add_argument("--safe", action="store_true", help="use simple fallback approach") | ||||
|     ap.add_argument("-z", action="store_true", help="ZOOMIN' (skip uploading files if they exist at the destination with the ~same last-modified timestamp, so same as yolo / turbo with date-chk but even faster)") | ||||
|     ap = app.add_argument_group("tls") | ||||
|     ap.add_argument("-te", metavar="PEM_FILE", help="certificate to expect/verify") | ||||
|     ap.add_argument("-td", action="store_true", help="disable certificate check") | ||||
|     # fmt: on | ||||
|  | ||||
|     Ctl(app.parse_args()) | ||||
|  | ||||
|  | ||||
| if __name__ == "__main__": | ||||
|     main() | ||||
							
								
								
									
										24
									
								
								bin/up2k.sh
									
									
									
									
									
										
										
										Executable file → Normal file
									
								
							
							
						
						
									
										24
									
								
								bin/up2k.sh
									
									
									
									
									
										
										
										Executable file → Normal file
									
								
							| @@ -8,7 +8,7 @@ set -e | ||||
| ## | ||||
| ## config | ||||
|  | ||||
| datalen=$((2*1024*1024*1024)) | ||||
| datalen=$((128*1024*1024)) | ||||
| target=127.0.0.1 | ||||
| posturl=/inc | ||||
| passwd=wark | ||||
| @@ -37,10 +37,10 @@ gendata() { | ||||
| # pipe a chunk, get the base64 checksum | ||||
| gethash() { | ||||
|     printf $( | ||||
|         sha512sum | cut -c-64 | | ||||
|         sha512sum | cut -c-66 | | ||||
|         sed -r 's/ .*//;s/(..)/\\x\1/g' | ||||
|     ) | | ||||
|     base64 -w0 | cut -c-43 | | ||||
|     base64 -w0 | cut -c-44 | | ||||
|     tr '+/' '-_' | ||||
| } | ||||
|  | ||||
| @@ -123,7 +123,7 @@ printf '\033[36m' | ||||
| { | ||||
|     { | ||||
|         cat <<EOF | ||||
| POST $posturl/handshake.php HTTP/1.1 | ||||
| POST $posturl/ HTTP/1.1 | ||||
| Connection: Close | ||||
| Cookie: cppwd=$passwd | ||||
| Content-Type: text/plain;charset=UTF-8 | ||||
| @@ -145,14 +145,16 @@ printf '\033[0m\nwark: %s\n' $wark | ||||
| ## | ||||
| ## wait for signal to continue | ||||
|  | ||||
| w8=/dev/shm/$salt.w8 | ||||
| touch $w8 | ||||
| true || { | ||||
|     w8=/dev/shm/$salt.w8 | ||||
|     touch $w8 | ||||
|  | ||||
| echo "ready;  rm -f $w8" | ||||
|     echo "ready;  rm -f $w8" | ||||
|  | ||||
| while [ -e $w8 ]; do | ||||
|     sleep 0.2 | ||||
| done | ||||
|     while [ -e $w8 ]; do | ||||
|         sleep 0.2 | ||||
|     done | ||||
| } | ||||
|  | ||||
|  | ||||
| ## | ||||
| @@ -175,7 +177,7 @@ while [ $remains -gt 0 ]; do | ||||
|      | ||||
|     { | ||||
|         cat <<EOF | ||||
| POST $posturl/chunkpit.php HTTP/1.1 | ||||
| POST $posturl/ HTTP/1.1 | ||||
| Connection: Keep-Alive | ||||
| Cookie: cppwd=$passwd | ||||
| Content-Type: application/octet-stream | ||||
|   | ||||
| @@ -30,6 +30,7 @@ however if your copyparty is behind a reverse-proxy, you may want to use [`share | ||||
| # OS integration | ||||
| init-scripts to start copyparty as a service | ||||
| * [`systemd/copyparty.service`](systemd/copyparty.service) runs the sfx normally | ||||
| * [`rc/copyparty`](rc/copyparty) runs sfx normally on freebsd, create a `copyparty` user | ||||
| * [`systemd/prisonparty.service`](systemd/prisonparty.service) runs the sfx in a chroot | ||||
| * [`openrc/copyparty`](openrc/copyparty) | ||||
|  | ||||
|   | ||||
| @@ -13,7 +13,7 @@ | ||||
|  | ||||
| upstream cpp { | ||||
| 	server 127.0.0.1:3923; | ||||
| 	keepalive 120; | ||||
| 	keepalive 1; | ||||
| } | ||||
| server { | ||||
| 	listen 443 ssl; | ||||
|   | ||||
							
								
								
									
										31
									
								
								contrib/rc/copyparty
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										31
									
								
								contrib/rc/copyparty
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,31 @@ | ||||
| #!/bin/sh | ||||
| # | ||||
| # PROVIDE: copyparty | ||||
| # REQUIRE: networking | ||||
| # KEYWORD: | ||||
|  | ||||
| . /etc/rc.subr | ||||
|  | ||||
| name="copyparty" | ||||
| rcvar="copyparty_enable" | ||||
| copyparty_user="copyparty" | ||||
| copyparty_args="-e2dsa -v /storage:/storage:r" # change as you see fit | ||||
| copyparty_command="/usr/local/bin/python3.8 /usr/local/copyparty/copyparty-sfx.py ${copyparty_args}" | ||||
| pidfile="/var/run/copyparty/${name}.pid" | ||||
| command="/usr/sbin/daemon" | ||||
| command_args="-P ${pidfile} -r -f ${copyparty_command}" | ||||
|  | ||||
| stop_postcmd="copyparty_shutdown" | ||||
|  | ||||
| copyparty_shutdown() | ||||
| { | ||||
|         if [ -e "${pidfile}" ]; then | ||||
|                 echo "Stopping supervising daemon." | ||||
|                 kill -s TERM `cat ${pidfile}` | ||||
|         fi | ||||
| } | ||||
|  | ||||
| load_rc_config $name | ||||
| : ${copyparty_enable:=no} | ||||
|  | ||||
| run_rc_command "$1" | ||||
| @@ -3,10 +3,15 @@ | ||||
| # | ||||
| # installation: | ||||
| #   cp -pv copyparty.service /etc/systemd/system && systemctl enable --now copyparty | ||||
| #   restorecon -vr /etc/systemd/system/copyparty.service | ||||
| #   firewall-cmd --permanent --add-port={80,443,3923}/tcp | ||||
| #   firewall-cmd --reload | ||||
| # | ||||
| # you may want to: | ||||
| #   change '/usr/bin/python' to another interpreter | ||||
| #   change '/usr/bin/python3' to another interpreter | ||||
| #   change '/mnt::rw' to another location or permission-set | ||||
| #   remove '-p 80,443,3923' to only listen on port 3923 | ||||
| #   add '-i 127.0.0.1' to only allow local connections | ||||
| # | ||||
| # with `Type=notify`, copyparty will signal systemd when it is ready to | ||||
| #   accept connections; correctly delaying units depending on copyparty. | ||||
| @@ -14,11 +19,8 @@ | ||||
| #   python disabling line-buffering, so messages are out-of-order: | ||||
| #   https://user-images.githubusercontent.com/241032/126040249-cb535cc7-c599-4931-a796-a5d9af691bad.png | ||||
| # | ||||
| # enable line-buffering for realtime logging (slight performance cost): | ||||
| #   modify ExecStart and prefix it with `/usr/bin/stdbuf -oL` like so: | ||||
| #   ExecStart=/usr/bin/stdbuf -oL /usr/bin/python3 [...] | ||||
| # but some systemd versions require this instead (higher performance cost): | ||||
| #   inside the [Service] block, add the following line: | ||||
| # if you remove -q to enable logging, you may also want to remove the | ||||
| #   following line to enable buffering (slightly better performance): | ||||
| #   Environment=PYTHONUNBUFFERED=x | ||||
|  | ||||
| [Unit] | ||||
| @@ -27,8 +29,10 @@ Description=copyparty file server | ||||
| [Service] | ||||
| Type=notify | ||||
| SyslogIdentifier=copyparty | ||||
| ExecStart=/usr/bin/python3 /usr/local/bin/copyparty-sfx.py -q -v /mnt::rw | ||||
| Environment=PYTHONUNBUFFERED=x | ||||
| ExecReload=/bin/kill -s USR1 $MAINPID | ||||
| ExecStartPre=/bin/bash -c 'mkdir -p /run/tmpfiles.d/ && echo "x /tmp/pe-copyparty*" > /run/tmpfiles.d/copyparty.conf' | ||||
| ExecStart=/usr/bin/python3 /usr/local/bin/copyparty-sfx.py -q -p 80,443,3923 -v /mnt::rw | ||||
|  | ||||
| [Install] | ||||
| WantedBy=multi-user.target | ||||
|   | ||||
| @@ -25,26 +25,34 @@ ANYWIN = WINDOWS or sys.platform in ["msys"] | ||||
| MACOS = platform.system() == "Darwin" | ||||
|  | ||||
|  | ||||
| def get_unix_home(): | ||||
|     try: | ||||
|         v = os.environ["XDG_CONFIG_HOME"] | ||||
|         if not v: | ||||
|             raise Exception() | ||||
|         ret = os.path.normpath(v) | ||||
|         os.listdir(ret) | ||||
|         return ret | ||||
|     except: | ||||
|         pass | ||||
| def get_unixdir(): | ||||
|     paths = [ | ||||
|         (os.environ.get, "XDG_CONFIG_HOME"), | ||||
|         (os.path.expanduser, "~/.config"), | ||||
|         (os.environ.get, "TMPDIR"), | ||||
|         (os.environ.get, "TEMP"), | ||||
|         (os.environ.get, "TMP"), | ||||
|         (unicode, "/tmp"), | ||||
|     ] | ||||
|     for chk in [os.listdir, os.mkdir]: | ||||
|         for pf, pa in paths: | ||||
|             try: | ||||
|                 p = pf(pa) | ||||
|                 # print(chk.__name__, p, pa) | ||||
|                 if not p or p.startswith("~"): | ||||
|                     continue | ||||
|  | ||||
|     try: | ||||
|         v = os.path.expanduser("~/.config") | ||||
|         if v.startswith("~"): | ||||
|             raise Exception() | ||||
|         ret = os.path.normpath(v) | ||||
|         os.listdir(ret) | ||||
|         return ret | ||||
|     except: | ||||
|         return "/tmp" | ||||
|                 p = os.path.normpath(p) | ||||
|                 chk(p) | ||||
|                 p = os.path.join(p, "copyparty") | ||||
|                 if not os.path.isdir(p): | ||||
|                     os.mkdir(p) | ||||
|  | ||||
|                 return p | ||||
|             except: | ||||
|                 pass | ||||
|  | ||||
|     raise Exception("could not find a writable path for config") | ||||
|  | ||||
|  | ||||
| class EnvParams(object): | ||||
| @@ -59,7 +67,7 @@ class EnvParams(object): | ||||
|         elif sys.platform == "darwin": | ||||
|             self.cfg = os.path.expanduser("~/Library/Preferences/copyparty") | ||||
|         else: | ||||
|             self.cfg = get_unix_home() + "/copyparty" | ||||
|             self.cfg = get_unixdir() | ||||
|  | ||||
|         self.cfg = self.cfg.replace("\\", "/") | ||||
|         try: | ||||
|   | ||||
| @@ -20,10 +20,10 @@ import threading | ||||
| import traceback | ||||
| from textwrap import dedent | ||||
|  | ||||
| from .__init__ import E, WINDOWS, VT100, PY2, unicode | ||||
| from .__init__ import E, WINDOWS, ANYWIN, VT100, PY2, unicode | ||||
| from .__version__ import S_VERSION, S_BUILD_DT, CODENAME | ||||
| from .svchub import SvcHub | ||||
| from .util import py_desc, align_tab, IMPLICATIONS, ansi_re | ||||
| from .util import py_desc, align_tab, IMPLICATIONS, ansi_re, min_ex | ||||
| from .authsrv import re_vol | ||||
|  | ||||
| HAVE_SSL = True | ||||
| @@ -186,6 +186,32 @@ def configure_ssl_ciphers(al): | ||||
|         sys.exit(0) | ||||
|  | ||||
|  | ||||
| def args_from_cfg(cfg_path): | ||||
|     ret = [] | ||||
|     skip = False | ||||
|     with open(cfg_path, "rb") as f: | ||||
|         for ln in [x.decode("utf-8").strip() for x in f]: | ||||
|             if not ln: | ||||
|                 skip = False | ||||
|                 continue | ||||
|  | ||||
|             if ln.startswith("#"): | ||||
|                 continue | ||||
|  | ||||
|             if not ln.startswith("-"): | ||||
|                 continue | ||||
|  | ||||
|             if skip: | ||||
|                 continue | ||||
|  | ||||
|             try: | ||||
|                 ret.extend(ln.split(" ", 1)) | ||||
|             except: | ||||
|                 ret.append(ln) | ||||
|  | ||||
|     return ret | ||||
|  | ||||
|  | ||||
| def sighandler(sig=None, frame=None): | ||||
|     msg = [""] * 5 | ||||
|     for th in threading.enumerate(): | ||||
| @@ -196,6 +222,54 @@ def sighandler(sig=None, frame=None): | ||||
|     print("\n".join(msg)) | ||||
|  | ||||
|  | ||||
| def disable_quickedit(): | ||||
|     import ctypes | ||||
|     import atexit | ||||
|     from ctypes import wintypes | ||||
|  | ||||
|     def ecb(ok, fun, args): | ||||
|         if not ok: | ||||
|             err = ctypes.get_last_error() | ||||
|             if err: | ||||
|                 raise ctypes.WinError(err) | ||||
|         return args | ||||
|  | ||||
|     k32 = ctypes.WinDLL("kernel32", use_last_error=True) | ||||
|     if PY2: | ||||
|         wintypes.LPDWORD = ctypes.POINTER(wintypes.DWORD) | ||||
|  | ||||
|     k32.GetStdHandle.errcheck = ecb | ||||
|     k32.GetConsoleMode.errcheck = ecb | ||||
|     k32.SetConsoleMode.errcheck = ecb | ||||
|     k32.GetConsoleMode.argtypes = (wintypes.HANDLE, wintypes.LPDWORD) | ||||
|     k32.SetConsoleMode.argtypes = (wintypes.HANDLE, wintypes.DWORD) | ||||
|  | ||||
|     def cmode(out, mode=None): | ||||
|         h = k32.GetStdHandle(-11 if out else -10) | ||||
|         if mode: | ||||
|             return k32.SetConsoleMode(h, mode) | ||||
|  | ||||
|         mode = wintypes.DWORD() | ||||
|         k32.GetConsoleMode(h, ctypes.byref(mode)) | ||||
|         return mode.value | ||||
|  | ||||
|     # disable quickedit | ||||
|     mode = orig_in = cmode(False) | ||||
|     quickedit = 0x40 | ||||
|     extended = 0x80 | ||||
|     mask = quickedit + extended | ||||
|     if mode & mask != extended: | ||||
|         atexit.register(cmode, False, orig_in) | ||||
|         cmode(False, mode & ~mask | extended) | ||||
|  | ||||
|     # enable colors in case the os.system("rem") trick ever stops working | ||||
|     if VT100: | ||||
|         mode = orig_out = cmode(True) | ||||
|         if mode & 4 != 4: | ||||
|             atexit.register(cmode, True, orig_out) | ||||
|             cmode(True, mode | 4) | ||||
|  | ||||
|  | ||||
| def run_argparse(argv, formatter): | ||||
|     ap = argparse.ArgumentParser( | ||||
|         formatter_class=formatter, | ||||
| @@ -208,6 +282,8 @@ def run_argparse(argv, formatter): | ||||
|     except: | ||||
|         fk_salt = "hunter2" | ||||
|  | ||||
|     cores = os.cpu_count() if hasattr(os, "cpu_count") else 4 | ||||
|  | ||||
|     sects = [ | ||||
|         [ | ||||
|             "accounts", | ||||
| @@ -274,9 +350,12 @@ def run_argparse(argv, formatter): | ||||
|              | ||||
|             \033[0mdatabase, general: | ||||
|               \033[36me2d\033[35m sets -e2d (all -e2* args can be set using ce2* volflags) | ||||
|               \033[36md2ts\033[35m disables metadata collection for existing files | ||||
|               \033[36md2ds\033[35m disables onboot indexing, overrides -e2ds* | ||||
|               \033[36md2t\033[35m disables metadata collection, overrides -e2t* | ||||
|               \033[36md2d\033[35m disables all database stuff, overrides -e2* | ||||
|               \033[36mdhash\033[35m disables file hashing on initial scans, also ehash | ||||
|               \033[36mnohash=\\.iso$\033[35m skips hashing file contents if path matches *.iso | ||||
|               \033[36mnoidx=\\.iso$\033[35m fully ignores the contents at paths matching *.iso | ||||
|               \033[36mhist=/tmp/cdb\033[35m puts thumbnails and indexes at that location | ||||
|               \033[36mscan=60\033[35m scan for new files every 60sec, same as --re-maxage | ||||
|              | ||||
| @@ -332,24 +411,32 @@ def run_argparse(argv, formatter): | ||||
|     ap2 = ap.add_argument_group('general options') | ||||
|     ap2.add_argument("-c", metavar="PATH", type=u, action="append", help="add config file") | ||||
|     ap2.add_argument("-nc", metavar="NUM", type=int, default=64, help="max num clients") | ||||
|     ap2.add_argument("-j", metavar="CORES", type=int, default=1, help="max num cpu cores") | ||||
|     ap2.add_argument("-j", metavar="CORES", type=int, default=1, help="max num cpu cores, 0=all") | ||||
|     ap2.add_argument("-a", metavar="ACCT", type=u, action="append", help="add account, USER:PASS; example [ed:wark") | ||||
|     ap2.add_argument("-v", metavar="VOL", type=u, action="append", help="add volume, SRC:DST:FLAG; example [.::r], [/mnt/nas/music:/music:r:aed") | ||||
|     ap2.add_argument("-ed", action="store_true", help="enable ?dots") | ||||
|     ap2.add_argument("-emp", action="store_true", help="enable markdown plugins") | ||||
|     ap2.add_argument("-mcr", metavar="SEC", type=int, default=60, help="md-editor mod-chk rate") | ||||
|     ap2.add_argument("--urlform", metavar="MODE", type=u, default="print,get", help="how to handle url-forms; examples: [stash], [save,get]") | ||||
|     ap2.add_argument("--wintitle", metavar="TXT", type=u, default="cpp @ $pub", help="window title, for example '$ip-10.1.2.' or '$ip-'") | ||||
|  | ||||
|     ap2 = ap.add_argument_group('upload options') | ||||
|     ap2.add_argument("--dotpart", action="store_true", help="dotfile incomplete uploads") | ||||
|     ap2.add_argument("--sparse", metavar="MiB", type=int, default=4, help="up2k min.size threshold (mswin-only)") | ||||
|     ap2.add_argument("--unpost", metavar="SEC", type=int, default=3600*12, help="grace period where uploads can be deleted by the uploader, even without delete permissions; 0=disabled") | ||||
|     ap2.add_argument("--no-fpool", action="store_true", help="disable file-handle pooling -- instead, repeatedly close and reopen files during upload") | ||||
|     ap2.add_argument("--use-fpool", action="store_true", help="force file-handle pooling, even if copyparty thinks you're better off without") | ||||
|     ap2.add_argument("--no-symlink", action="store_true", help="duplicate file contents instead") | ||||
|     ap2.add_argument("--reg-cap", metavar="N", type=int, default=9000, help="max number of uploads to keep in memory when running without -e2d") | ||||
|  | ||||
|     ap2 = ap.add_argument_group('network options') | ||||
|     ap2.add_argument("-i", metavar="IP", type=u, default="0.0.0.0", help="ip to bind (comma-sep.)") | ||||
|     ap2.add_argument("-p", metavar="PORT", type=u, default="3923", help="ports to bind (comma/range)") | ||||
|     ap2.add_argument("--rproxy", metavar="DEPTH", type=int, default=1, help="which ip to keep; 0 = tcp, 1 = origin (first x-fwd), 2 = cloudflare, 3 = nginx, -1 = closest proxy") | ||||
|      | ||||
|     ap2.add_argument("--s-wr-sz", metavar="B", type=int, default=256*1024, help="socket write size in bytes") | ||||
|     ap2.add_argument("--s-wr-slp", metavar="SEC", type=float, default=0, help="socket write delay in seconds") | ||||
|     ap2.add_argument("--rsp-slp", metavar="SEC", type=float, default=0, help="response delay in seconds") | ||||
|  | ||||
|     ap2 = ap.add_argument_group('SSL/TLS options') | ||||
|     ap2.add_argument("--http-only", action="store_true", help="disable ssl/tls") | ||||
|     ap2.add_argument("--https-only", action="store_true", help="disable plaintext") | ||||
| @@ -360,6 +447,7 @@ def run_argparse(argv, formatter): | ||||
|  | ||||
|     ap2 = ap.add_argument_group('opt-outs') | ||||
|     ap2.add_argument("-nw", action="store_true", help="disable writes (benchmark)") | ||||
|     ap2.add_argument("--keep-qem", action="store_true", help="do not disable quick-edit-mode on windows") | ||||
|     ap2.add_argument("--no-del", action="store_true", help="disable delete operations") | ||||
|     ap2.add_argument("--no-mv", action="store_true", help="disable move/rename operations") | ||||
|     ap2.add_argument("-nih", action="store_true", help="no info hostname") | ||||
| @@ -375,6 +463,11 @@ def run_argparse(argv, formatter): | ||||
|     ap2.add_argument("--no-dot-ren", action="store_true", help="disallow renaming dotfiles; makes it impossible to make something a dotfile") | ||||
|     ap2.add_argument("--no-logues", action="store_true", help="disable rendering .prologue/.epilogue.html into directory listings") | ||||
|     ap2.add_argument("--no-readme", action="store_true", help="disable rendering readme.md into directory listings") | ||||
|     ap2.add_argument("--vague-403", action="store_true", help="send 404 instead of 403 (security through ambiguity, very enterprise)") | ||||
|  | ||||
|     ap2 = ap.add_argument_group('yolo options') | ||||
|     ap2.add_argument("--ign-ebind", action="store_true", help="continue running even if it's impossible to listen on some of the requested endpoints") | ||||
|     ap2.add_argument("--ign-ebind-all", action="store_true", help="continue running even if it's impossible to receive connections at all") | ||||
|  | ||||
|     ap2 = ap.add_argument_group('logging options') | ||||
|     ap2.add_argument("-q", action="store_true", help="quiet") | ||||
| @@ -386,30 +479,38 @@ def run_argparse(argv, formatter): | ||||
|     ap2.add_argument("--lf-url", metavar="RE", type=u, default=r"^/\.cpr/|\?th=[wj]$", help="dont log URLs matching") | ||||
|  | ||||
|     ap2 = ap.add_argument_group('admin panel options') | ||||
|     ap2.add_argument("--no-reload", action="store_true", help="disable ?reload=cfg (reload users/volumes/volflags from config file)") | ||||
|     ap2.add_argument("--no-rescan", action="store_true", help="disable ?scan (volume reindexing)") | ||||
|     ap2.add_argument("--no-stack", action="store_true", help="disable ?stack (list all stacks)") | ||||
|  | ||||
|     ap2 = ap.add_argument_group('thumbnail options') | ||||
|     ap2.add_argument("--no-thumb", action="store_true", help="disable all thumbnails") | ||||
|     ap2.add_argument("--no-athumb", action="store_true", help="disable audio thumbnails (spectrograms)") | ||||
|     ap2.add_argument("--no-vthumb", action="store_true", help="disable video thumbnails") | ||||
|     ap2.add_argument("--th-size", metavar="WxH", default="320x256", help="thumbnail res") | ||||
|     ap2.add_argument("--th-mt", metavar="CORES", type=int, default=0, help="max num cpu cores to use, 0=all") | ||||
|     ap2.add_argument("--th-mt", metavar="CORES", type=int, default=cores, help="num cpu cores to use for generating thumbnails") | ||||
|     ap2.add_argument("--th-convt", metavar="SEC", type=int, default=60, help="conversion timeout in seconds") | ||||
|     ap2.add_argument("--th-no-crop", action="store_true", help="dynamic height; show full image") | ||||
|     ap2.add_argument("--th-no-jpg", action="store_true", help="disable jpg output") | ||||
|     ap2.add_argument("--th-no-webp", action="store_true", help="disable webp output") | ||||
|     ap2.add_argument("--th-ff-jpg", action="store_true", help="force jpg for video thumbs") | ||||
|     ap2.add_argument("--th-ff-swr", action="store_true", help="use swresample instead of soxr for audio thumbs") | ||||
|     ap2.add_argument("--th-poke", metavar="SEC", type=int, default=300, help="activity labeling cooldown") | ||||
|     ap2.add_argument("--th-clean", metavar="SEC", type=int, default=43200, help="cleanup interval; 0=disabled") | ||||
|     ap2.add_argument("--th-maxage", metavar="SEC", type=int, default=604800, help="max folder age") | ||||
|     ap2.add_argument("--th-covers", metavar="N,N", type=u, default="folder.png,folder.jpg,cover.png,cover.jpg", help="folder thumbnails to stat for") | ||||
|  | ||||
|     ap2 = ap.add_argument_group('transcoding options') | ||||
|     ap2.add_argument("--no-acode", action="store_true", help="disable audio transcoding") | ||||
|     ap2.add_argument("--ac-maxage", metavar="SEC", type=int, default=86400, help="delete transcode output after SEC seconds") | ||||
|  | ||||
|     ap2 = ap.add_argument_group('general db options') | ||||
|     ap2.add_argument("-e2d", action="store_true", help="enable up2k database") | ||||
|     ap2.add_argument("-e2ds", action="store_true", help="enable up2k db-scanner, sets -e2d") | ||||
|     ap2.add_argument("-e2dsa", action="store_true", help="scan all folders (for search), sets -e2ds") | ||||
|     ap2.add_argument("--hist", metavar="PATH", type=u, help="where to store volume data (db, thumbs)") | ||||
|     ap2.add_argument("--no-hash", action="store_true", help="disable hashing during e2ds folder scans") | ||||
|     ap2.add_argument("--re-int", metavar="SEC", type=int, default=30, help="disk rescan check interval") | ||||
|     ap2.add_argument("--no-hash", metavar="PTN", type=u, help="regex: disable hashing of matching paths during e2ds folder scans") | ||||
|     ap2.add_argument("--no-idx", metavar="PTN", type=u, help="regex: disable indexing of matching paths during e2ds folder scans") | ||||
|     ap2.add_argument("--re-maxage", metavar="SEC", type=int, default=0, help="disk rescan volume interval, 0=off, can be set per-volume with the 'scan' volflag") | ||||
|     ap2.add_argument("--srch-time", metavar="SEC", type=int, default=30, help="search deadline") | ||||
|      | ||||
| @@ -418,8 +519,8 @@ def run_argparse(argv, formatter): | ||||
|     ap2.add_argument("-e2ts", action="store_true", help="enable metadata scanner, sets -e2t") | ||||
|     ap2.add_argument("-e2tsr", action="store_true", help="rescan all metadata, sets -e2ts") | ||||
|     ap2.add_argument("--no-mutagen", action="store_true", help="use FFprobe for tags instead") | ||||
|     ap2.add_argument("--no-mtag-mt", action="store_true", help="disable tag-read parallelism") | ||||
|     ap2.add_argument("--no-mtag-ff", action="store_true", help="never use FFprobe as tag reader") | ||||
|     ap2.add_argument("--mtag-mt", metavar="CORES", type=int, default=cores, help="num cpu cores to use for tag scanning") | ||||
|     ap2.add_argument("-mtm", metavar="M=t,t,t", type=u, action="append", help="add/replace metadata mapping") | ||||
|     ap2.add_argument("-mte", metavar="M,M,M", type=u, help="tags to index/display (comma-sep.)", | ||||
|         default="circle,album,.tn,artist,title,.bpm,key,.dur,.q,.vq,.aq,vc,ac,res,.fps,ahash,vhash") | ||||
| @@ -427,8 +528,11 @@ def run_argparse(argv, formatter): | ||||
|         default=".vq,.aq,vc,ac,res,.fps") | ||||
|     ap2.add_argument("-mtp", metavar="M=[f,]bin", type=u, action="append", help="read tag M using bin") | ||||
|  | ||||
|     ap2 = ap.add_argument_group('appearance options') | ||||
|     ap2 = ap.add_argument_group('ui options') | ||||
|     ap2.add_argument("--js-browser", metavar="L", type=u, help="URL to additional JS to include") | ||||
|     ap2.add_argument("--css-browser", metavar="L", type=u, help="URL to additional CSS to include") | ||||
|     ap2.add_argument("--textfiles", metavar="CSV", type=u, default="txt,nfo,diz,cue,readme", help="file extensions to present as plaintext") | ||||
|     ap2.add_argument("--doctitle", metavar="TXT", type=u, default="copyparty", help="title / service-name to show in html documents") | ||||
|  | ||||
|     ap2 = ap.add_argument_group('debug options') | ||||
|     ap2.add_argument("--no-sendfile", action="store_true", help="disable sendfile") | ||||
| @@ -471,7 +575,12 @@ def main(argv=None): | ||||
|     if HAVE_SSL: | ||||
|         ensure_cert() | ||||
|  | ||||
|     deprecated = [["-e2s", "-e2ds"]] | ||||
|     for k, v in zip(argv[1:], argv[2:]): | ||||
|         if k == "-c": | ||||
|             supp = args_from_cfg(v) | ||||
|             argv.extend(supp) | ||||
|  | ||||
|     deprecated = [] | ||||
|     for dk, nk in deprecated: | ||||
|         try: | ||||
|             idx = argv.index(dk) | ||||
| @@ -483,11 +592,26 @@ def main(argv=None): | ||||
|         argv[idx] = nk | ||||
|         time.sleep(2) | ||||
|  | ||||
|     try: | ||||
|         if len(argv) == 1 and (ANYWIN or not os.geteuid()): | ||||
|             argv.extend(["-p80,443,3923", "--ign-ebind"]) | ||||
|     except: | ||||
|         pass | ||||
|  | ||||
|     try: | ||||
|         al = run_argparse(argv, RiceFormatter) | ||||
|     except AssertionError: | ||||
|         al = run_argparse(argv, Dodge11874) | ||||
|  | ||||
|     if WINDOWS and not al.keep_qem: | ||||
|         try: | ||||
|             disable_quickedit() | ||||
|         except: | ||||
|             print("\nfailed to disable quick-edit-mode:\n" + min_ex() + "\n") | ||||
|  | ||||
|     if not VT100: | ||||
|         al.wintitle = "" | ||||
|  | ||||
|     nstrs = [] | ||||
|     anymod = False | ||||
|     for ostr in al.v or []: | ||||
|   | ||||
| @@ -1,8 +1,8 @@ | ||||
| # coding: utf-8 | ||||
|  | ||||
| VERSION = (1, 0, 4) | ||||
| CODENAME = "sufficient" | ||||
| BUILD_DT = (2021, 9, 19) | ||||
| VERSION = (1, 1, 10) | ||||
| CODENAME = "opus" | ||||
| BUILD_DT = (2021, 12, 16) | ||||
|  | ||||
| S_VERSION = ".".join(map(str, VERSION)) | ||||
| S_BUILD_DT = "{0:04d}-{1:02d}-{2:02d}".format(*BUILD_DT) | ||||
|   | ||||
| @@ -356,7 +356,7 @@ class VFS(object): | ||||
|         if not dbv: | ||||
|             return self, vrem | ||||
|  | ||||
|         vrem = [self.vpath[len(dbv.vpath) + 1 :], vrem] | ||||
|         vrem = [self.vpath[len(dbv.vpath) :].lstrip("/"), vrem] | ||||
|         vrem = "/".join([x for x in vrem if x]) | ||||
|         return dbv, vrem | ||||
|  | ||||
| @@ -526,8 +526,27 @@ class AuthSrv(object): | ||||
|  | ||||
|         yield prev, True | ||||
|  | ||||
|     def _map_volume(self, src, dst, mount, daxs, mflags): | ||||
|         if dst in mount: | ||||
|             m = "multiple filesystem-paths mounted at [/{}]:\n  [{}]\n  [{}]" | ||||
|             self.log(m.format(dst, mount[dst], src), c=1) | ||||
|             raise Exception("invalid config") | ||||
|  | ||||
|         if src in mount.values(): | ||||
|             m = "warning: filesystem-path [{}] mounted in multiple locations:" | ||||
|             m = m.format(src) | ||||
|             for v in [k for k, v in mount.items() if v == src] + [dst]: | ||||
|                 m += "\n  /{}".format(v) | ||||
|  | ||||
|             self.log(m, c=3) | ||||
|  | ||||
|         mount[dst] = src | ||||
|         daxs[dst] = AXS() | ||||
|         mflags[dst] = {} | ||||
|  | ||||
|     def _parse_config_file(self, fd, acct, daxs, mflags, mount): | ||||
|         # type: (any, str, dict[str, AXS], any, str) -> None | ||||
|         skip = False | ||||
|         vol_src = None | ||||
|         vol_dst = None | ||||
|         self.line_ctr = 0 | ||||
| @@ -537,6 +556,11 @@ class AuthSrv(object): | ||||
|                 vol_src = None | ||||
|                 vol_dst = None | ||||
|  | ||||
|             if skip: | ||||
|                 if not ln: | ||||
|                     skip = False | ||||
|                 continue | ||||
|  | ||||
|             if not ln or ln.startswith("#"): | ||||
|                 continue | ||||
|  | ||||
| @@ -544,6 +568,8 @@ class AuthSrv(object): | ||||
|                 if ln.startswith("u "): | ||||
|                     u, p = ln[2:].split(":", 1) | ||||
|                     acct[u] = p | ||||
|                 elif ln.startswith("-"): | ||||
|                     skip = True  # argv | ||||
|                 else: | ||||
|                     vol_src = ln | ||||
|                 continue | ||||
| @@ -556,9 +582,7 @@ class AuthSrv(object): | ||||
|                 # cfg files override arguments and previous files | ||||
|                 vol_src = bos.path.abspath(vol_src) | ||||
|                 vol_dst = vol_dst.strip("/") | ||||
|                 mount[vol_dst] = vol_src | ||||
|                 daxs[vol_dst] = AXS() | ||||
|                 mflags[vol_dst] = {} | ||||
|                 self._map_volume(vol_src, vol_dst, mount, daxs, mflags) | ||||
|                 continue | ||||
|  | ||||
|             try: | ||||
| @@ -597,7 +621,7 @@ class AuthSrv(object): | ||||
|         if uname == "": | ||||
|             uname = "*" | ||||
|  | ||||
|         for un in uname.split(","): | ||||
|         for un in uname.replace(",", " ").strip().split(): | ||||
|             if "r" in lvl: | ||||
|                 axs.uread[un] = 1 | ||||
|  | ||||
| @@ -663,9 +687,7 @@ class AuthSrv(object): | ||||
|                 # print("\n".join([src, dst, perms])) | ||||
|                 src = bos.path.abspath(src) | ||||
|                 dst = dst.strip("/") | ||||
|                 mount[dst] = src | ||||
|                 daxs[dst] = AXS() | ||||
|                 mflags[dst] = {} | ||||
|                 self._map_volume(src, dst, mount, daxs, mflags) | ||||
|  | ||||
|                 for x in perms.split(":"): | ||||
|                     lvl, uname = x.split(",", 1) if "," in x else [x, ""] | ||||
| @@ -726,6 +748,7 @@ class AuthSrv(object): | ||||
|                     axs = getattr(vol.axs, axs_key) | ||||
|                     if usr in axs or "*" in axs: | ||||
|                         umap[usr].append(mp) | ||||
|                 umap[usr].sort() | ||||
|             setattr(vfs, "a" + perm, umap) | ||||
|  | ||||
|         all_users = {} | ||||
| @@ -865,9 +888,14 @@ class AuthSrv(object): | ||||
|             if self.args.e2d or "e2ds" in vol.flags: | ||||
|                 vol.flags["e2d"] = True | ||||
|  | ||||
|             if self.args.no_hash: | ||||
|                 if "ehash" not in vol.flags: | ||||
|                     vol.flags["dhash"] = True | ||||
|             for ga, vf in [["no_hash", "nohash"], ["no_idx", "noidx"]]: | ||||
|                 if vf in vol.flags: | ||||
|                     ptn = vol.flags.pop(vf) | ||||
|                 else: | ||||
|                     ptn = getattr(self.args, ga) | ||||
|  | ||||
|                 if ptn: | ||||
|                     vol.flags[vf] = re.compile(ptn) | ||||
|  | ||||
|             for k in ["e2t", "e2ts", "e2tsr"]: | ||||
|                 if getattr(self.args, k): | ||||
| @@ -880,6 +908,10 @@ class AuthSrv(object): | ||||
|             # default tag cfgs if unset | ||||
|             if "mte" not in vol.flags: | ||||
|                 vol.flags["mte"] = self.args.mte | ||||
|             elif vol.flags["mte"].startswith("+"): | ||||
|                 vol.flags["mte"] = ",".join( | ||||
|                     x for x in [self.args.mte, vol.flags["mte"][1:]] if x | ||||
|                 ) | ||||
|             if "mth" not in vol.flags: | ||||
|                 vol.flags["mth"] = self.args.mth | ||||
|  | ||||
| @@ -894,6 +926,14 @@ class AuthSrv(object): | ||||
|                 vol.flags["d2t"] = True | ||||
|                 vol.flags = {k: v for k, v in vol.flags.items() if not k.startswith(rm)} | ||||
|  | ||||
|             # d2ds drops all onboot scans for a volume | ||||
|             for grp, rm in [["d2ds", "e2ds"], ["d2ts", "e2ts"]]: | ||||
|                 if not vol.flags.get(grp, False): | ||||
|                     continue | ||||
|  | ||||
|                 vol.flags["d2ts"] = True | ||||
|                 vol.flags = {k: v for k, v in vol.flags.items() if not k.startswith(rm)} | ||||
|  | ||||
|             # mt* needs e2t so drop those too | ||||
|             for grp, rm in [["e2t", "mt"]]: | ||||
|                 if vol.flags.get(grp, False): | ||||
| @@ -976,7 +1016,7 @@ class AuthSrv(object): | ||||
|             v, _ = vfs.get("/", "*", False, True) | ||||
|             if self.warn_anonwrite and os.getcwd() == v.realpath: | ||||
|                 self.warn_anonwrite = False | ||||
|                 msg = "anyone can read/write the current directory: {}" | ||||
|                 msg = "anyone can read/write the current directory: {}\n" | ||||
|                 self.log(msg.format(v.realpath), c=1) | ||||
|         except Pebkac: | ||||
|             self.warn_anonwrite = True | ||||
|   | ||||
| @@ -2,7 +2,7 @@ | ||||
| from __future__ import print_function, unicode_literals | ||||
|  | ||||
| import os | ||||
| from ..util import fsenc, fsdec | ||||
| from ..util import fsenc, fsdec, SYMTIME | ||||
| from . import path | ||||
|  | ||||
|  | ||||
| @@ -25,14 +25,14 @@ def lstat(p): | ||||
| def makedirs(name, mode=0o755, exist_ok=True): | ||||
|     bname = fsenc(name) | ||||
|     try: | ||||
|         os.makedirs(bname, mode=mode) | ||||
|         os.makedirs(bname, mode) | ||||
|     except: | ||||
|         if not exist_ok or not os.path.isdir(bname): | ||||
|             raise | ||||
|  | ||||
|  | ||||
| def mkdir(p, mode=0o755): | ||||
|     return os.mkdir(fsenc(p), mode=mode) | ||||
|     return os.mkdir(fsenc(p), mode) | ||||
|  | ||||
|  | ||||
| def rename(src, dst): | ||||
| @@ -55,5 +55,8 @@ def unlink(p): | ||||
|     return os.unlink(fsenc(p)) | ||||
|  | ||||
|  | ||||
| def utime(p, times=None): | ||||
|     return os.utime(fsenc(p), times) | ||||
| def utime(p, times=None, follow_symlinks=True): | ||||
|     if SYMTIME: | ||||
|         return os.utime(fsenc(p), times, follow_symlinks=follow_symlinks) | ||||
|     else: | ||||
|         return os.utime(fsenc(p), times) | ||||
|   | ||||
| @@ -2,7 +2,7 @@ | ||||
| from __future__ import print_function, unicode_literals | ||||
|  | ||||
| import os | ||||
| from ..util import fsenc, fsdec | ||||
| from ..util import fsenc, fsdec, SYMTIME | ||||
|  | ||||
|  | ||||
| def abspath(p): | ||||
| @@ -13,14 +13,21 @@ def exists(p): | ||||
|     return os.path.exists(fsenc(p)) | ||||
|  | ||||
|  | ||||
| def getmtime(p): | ||||
|     return os.path.getmtime(fsenc(p)) | ||||
| def getmtime(p, follow_symlinks=True): | ||||
|     if not follow_symlinks and SYMTIME: | ||||
|         return os.lstat(fsenc(p)).st_mtime | ||||
|     else: | ||||
|         return os.path.getmtime(fsenc(p)) | ||||
|  | ||||
|  | ||||
| def getsize(p): | ||||
|     return os.path.getsize(fsenc(p)) | ||||
|  | ||||
|  | ||||
| def isfile(p): | ||||
|     return os.path.isfile(fsenc(p)) | ||||
|  | ||||
|  | ||||
| def isdir(p): | ||||
|     return os.path.isdir(fsenc(p)) | ||||
|  | ||||
|   | ||||
| @@ -62,6 +62,11 @@ class BrokerMp(object): | ||||
|  | ||||
|             procs.pop() | ||||
|  | ||||
|     def reload(self): | ||||
|         self.log("broker", "reloading") | ||||
|         for _, proc in enumerate(self.procs): | ||||
|             proc.q_pend.put([0, "reload", []]) | ||||
|  | ||||
|     def collector(self, proc): | ||||
|         """receive message from hub in other process""" | ||||
|         while True: | ||||
|   | ||||
| @@ -29,7 +29,7 @@ class MpWorker(object): | ||||
|         # we inherited signal_handler from parent, | ||||
|         # replace it with something harmless | ||||
|         if not FAKE_MP: | ||||
|             for sig in [signal.SIGINT, signal.SIGTERM]: | ||||
|             for sig in [signal.SIGINT, signal.SIGTERM, signal.SIGUSR1]: | ||||
|                 signal.signal(sig, self.signal_handler) | ||||
|  | ||||
|         # starting to look like a good idea | ||||
| @@ -69,6 +69,11 @@ class MpWorker(object): | ||||
|                 sys.exit(0) | ||||
|                 return | ||||
|  | ||||
|             elif dest == "reload": | ||||
|                 self.logw("mpw.asrv reloading") | ||||
|                 self.asrv.reload() | ||||
|                 self.logw("mpw.asrv reloaded") | ||||
|  | ||||
|             elif dest == "listen": | ||||
|                 self.httpsrv.listen(args[0], args[1]) | ||||
|  | ||||
|   | ||||
| @@ -21,10 +21,13 @@ class BrokerThr(object): | ||||
|  | ||||
|         # instantiate all services here (TODO: inheritance?) | ||||
|         self.httpsrv = HttpSrv(self, None) | ||||
|         self.reload = self.noop | ||||
|  | ||||
|     def shutdown(self): | ||||
|         # self.log("broker", "shutting down") | ||||
|         self.httpsrv.shutdown() | ||||
|  | ||||
|     def noop(self): | ||||
|         pass | ||||
|  | ||||
|     def put(self, want_retval, dest, *args): | ||||
|   | ||||
| @@ -10,7 +10,6 @@ import json | ||||
| import base64 | ||||
| import string | ||||
| import socket | ||||
| import ctypes | ||||
| from datetime import datetime | ||||
| from operator import itemgetter | ||||
| import calendar | ||||
| @@ -20,6 +19,11 @@ try: | ||||
| except: | ||||
|     pass | ||||
|  | ||||
| try: | ||||
|     import ctypes | ||||
| except: | ||||
|     pass | ||||
|  | ||||
| from .__init__ import E, PY2, WINDOWS, ANYWIN, unicode | ||||
| from .util import *  # noqa  # pylint: disable=unused-wildcard-import | ||||
| from .bos import bos | ||||
| @@ -39,6 +43,7 @@ class HttpCli(object): | ||||
|     def __init__(self, conn): | ||||
|         self.t0 = time.time() | ||||
|         self.conn = conn | ||||
|         self.mutex = conn.mutex | ||||
|         self.s = conn.s  # type: socket | ||||
|         self.sr = conn.sr  # type: Unrecv | ||||
|         self.ip = conn.addr[0] | ||||
| @@ -47,13 +52,15 @@ class HttpCli(object): | ||||
|         self.asrv = conn.asrv  # type: AuthSrv | ||||
|         self.ico = conn.ico | ||||
|         self.thumbcli = conn.thumbcli | ||||
|         self.u2fh = conn.u2fh | ||||
|         self.log_func = conn.log_func | ||||
|         self.log_src = conn.log_src | ||||
|         self.tls = hasattr(self.s, "cipher") | ||||
|  | ||||
|         self.bufsz = 1024 * 32 | ||||
|         self.hint = None | ||||
|         self.absolute_urls = False | ||||
|         self.trailing_slash = True | ||||
|         self.out_headerlist = [] | ||||
|         self.out_headers = { | ||||
|             "Access-Control-Allow-Origin": "*", | ||||
|             "Cache-Control": "no-store; max-age=0", | ||||
| @@ -85,6 +92,7 @@ class HttpCli(object): | ||||
|         tpl = self.conn.hsrv.j2[name] | ||||
|         if ka: | ||||
|             ka["ts"] = self.conn.hsrv.cachebuster() | ||||
|             ka["svcname"] = self.args.doctitle | ||||
|             return tpl.render(**ka) | ||||
|  | ||||
|         return tpl | ||||
| @@ -92,6 +100,7 @@ class HttpCli(object): | ||||
|     def run(self): | ||||
|         """returns true if connection can be reused""" | ||||
|         self.keepalive = False | ||||
|         self.is_https = False | ||||
|         self.headers = {} | ||||
|         self.hint = None | ||||
|         try: | ||||
| @@ -119,7 +128,8 @@ class HttpCli(object): | ||||
|             self.loud_reply(unicode(ex), status=ex.code, volsan=True) | ||||
|             return self.keepalive | ||||
|  | ||||
|         # time.sleep(0.4) | ||||
|         if self.args.rsp_slp: | ||||
|             time.sleep(self.args.rsp_slp) | ||||
|  | ||||
|         # normalize incoming headers to lowercase; | ||||
|         # outgoing headers however are Correct-Case | ||||
| @@ -129,6 +139,7 @@ class HttpCli(object): | ||||
|  | ||||
|         v = self.headers.get("connection", "").lower() | ||||
|         self.keepalive = not v.startswith("close") and self.http_ver != "HTTP/1.0" | ||||
|         self.is_https = (self.headers.get("x-forwarded-proto", "").lower() == "https" or self.tls) | ||||
|  | ||||
|         n = self.args.rproxy | ||||
|         if n: | ||||
| @@ -146,6 +157,8 @@ class HttpCli(object): | ||||
|  | ||||
|                 self.log_src = self.conn.set_rproxy(self.ip) | ||||
|  | ||||
|         self.dip = self.ip.replace(":", ".") | ||||
|  | ||||
|         if self.args.ihead: | ||||
|             keys = self.args.ihead | ||||
|             if "*" in keys: | ||||
| @@ -162,15 +175,11 @@ class HttpCli(object): | ||||
|         # split req into vpath + uparam | ||||
|         uparam = {} | ||||
|         if "?" not in self.req: | ||||
|             if not self.req.endswith("/"): | ||||
|                 self.absolute_urls = True | ||||
|  | ||||
|             self.trailing_slash = self.req.endswith("/") | ||||
|             vpath = undot(self.req) | ||||
|         else: | ||||
|             vpath, arglist = self.req.split("?", 1) | ||||
|             if not vpath.endswith("/"): | ||||
|                 self.absolute_urls = True | ||||
|  | ||||
|             self.trailing_slash = vpath.endswith("/") | ||||
|             vpath = undot(vpath) | ||||
|             for k in arglist.split("&"): | ||||
|                 if "=" in k: | ||||
| @@ -219,10 +228,10 @@ class HttpCli(object): | ||||
|         self.gvol = self.asrv.vfs.aget[self.uname] | ||||
|  | ||||
|         if pwd and "pw" in self.ouparam and pwd != cookies.get("cppwd"): | ||||
|             self.out_headers["Set-Cookie"] = self.get_pwd_cookie(pwd)[0] | ||||
|             self.out_headerlist.append(("Set-Cookie", self.get_pwd_cookie(pwd)[0])) | ||||
|  | ||||
|         ua = self.headers.get("user-agent", "") | ||||
|         self.is_rclone = ua.startswith("rclone/") | ||||
|         self.ua = self.headers.get("user-agent", "") | ||||
|         self.is_rclone = self.ua.startswith("rclone/") | ||||
|         if self.is_rclone: | ||||
|             uparam["raw"] = False | ||||
|             uparam["dots"] = False | ||||
| @@ -268,12 +277,28 @@ class HttpCli(object): | ||||
|             except Pebkac: | ||||
|                 return False | ||||
|  | ||||
|     def permit_caching(self): | ||||
|         cache = self.uparam.get("cache") | ||||
|         if cache is None: | ||||
|             self.out_headers.update(NO_CACHE) | ||||
|             return | ||||
|  | ||||
|         n = "604800" if cache == "i" else cache or "69" | ||||
|         self.out_headers["Cache-Control"] = "max-age=" + n | ||||
|  | ||||
|     def k304(self): | ||||
|         k304 = self.cookies.get("k304") | ||||
|         return k304 == "y" or ("; Trident/" in self.ua and not k304) | ||||
|  | ||||
|     def send_headers(self, length, status=200, mime=None, headers=None): | ||||
|         response = ["{} {} {}".format(self.http_ver, status, HTTPCODE[status])] | ||||
|  | ||||
|         if length is not None: | ||||
|             response.append("Content-Length: " + unicode(length)) | ||||
|  | ||||
|         if status == 304 and self.k304(): | ||||
|             self.keepalive = False | ||||
|  | ||||
|         # close if unknown length, otherwise take client's preference | ||||
|         response.append("Connection: " + ("Keep-Alive" if self.keepalive else "Close")) | ||||
|  | ||||
| @@ -283,11 +308,11 @@ class HttpCli(object): | ||||
|  | ||||
|         # default to utf8 html if no content-type is set | ||||
|         if not mime: | ||||
|             mime = self.out_headers.get("Content-Type", "text/html; charset=UTF-8") | ||||
|             mime = self.out_headers.get("Content-Type", "text/html; charset=utf-8") | ||||
|  | ||||
|         self.out_headers["Content-Type"] = mime | ||||
|  | ||||
|         for k, v in self.out_headers.items(): | ||||
|         for k, v in list(self.out_headers.items()) + self.out_headerlist: | ||||
|             response.append("{}: {}".format(k, v)) | ||||
|  | ||||
|         try: | ||||
| @@ -387,7 +412,7 @@ class HttpCli(object): | ||||
|         if not self.can_read and not self.can_write and not self.can_get: | ||||
|             if self.vpath: | ||||
|                 self.log("inaccessible: [{}]".format(self.vpath)) | ||||
|                 return self.tx_404() | ||||
|                 return self.tx_404(True) | ||||
|  | ||||
|             self.uparam["h"] = False | ||||
|  | ||||
| @@ -404,12 +429,24 @@ class HttpCli(object): | ||||
|             return self.scanvol() | ||||
|  | ||||
|         if not self.vpath: | ||||
|             if "reload" in self.uparam: | ||||
|                 return self.handle_reload() | ||||
|  | ||||
|             if "stack" in self.uparam: | ||||
|                 return self.tx_stack() | ||||
|  | ||||
|             if "ups" in self.uparam: | ||||
|                 return self.tx_ups() | ||||
|  | ||||
|             if "k304" in self.uparam: | ||||
|                 return self.set_k304() | ||||
|  | ||||
|             if "am_js" in self.uparam: | ||||
|                 return self.set_am_js() | ||||
|  | ||||
|             if "reset" in self.uparam: | ||||
|                 return self.set_cfg_reset() | ||||
|  | ||||
|             if "h" in self.uparam: | ||||
|                 return self.tx_mounts() | ||||
|  | ||||
| @@ -464,13 +501,13 @@ class HttpCli(object): | ||||
|             except: | ||||
|                 raise Pebkac(400, "client d/c before 100 continue") | ||||
|  | ||||
|         if "raw" in self.uparam: | ||||
|             return self.handle_stash() | ||||
|  | ||||
|         ctype = self.headers.get("content-type", "").lower() | ||||
|         if not ctype: | ||||
|             raise Pebkac(400, "you can't post without a content-type header") | ||||
|  | ||||
|         if "raw" in self.uparam: | ||||
|             return self.handle_stash() | ||||
|  | ||||
|         if "multipart/form-data" in ctype: | ||||
|             return self.handle_post_multipart() | ||||
|  | ||||
| @@ -487,7 +524,7 @@ class HttpCli(object): | ||||
|                 return self.handle_stash() | ||||
|  | ||||
|             if "save" in opt: | ||||
|                 post_sz, _, _, path = self.dump_to_file() | ||||
|                 post_sz, _, _, _, path = self.dump_to_file() | ||||
|                 self.log("urlform: {} bytes, {}".format(post_sz, path)) | ||||
|             elif "print" in opt: | ||||
|                 reader, _ = self.get_body_reader() | ||||
| @@ -514,11 +551,11 @@ class HttpCli(object): | ||||
|         raise Pebkac(405, "don't know how to handle POST({})".format(ctype)) | ||||
|  | ||||
|     def get_body_reader(self): | ||||
|         chunked = "chunked" in self.headers.get("transfer-encoding", "").lower() | ||||
|         if "chunked" in self.headers.get("transfer-encoding", "").lower(): | ||||
|             return read_socket_chunked(self.sr), -1 | ||||
|  | ||||
|         remains = int(self.headers.get("content-length", -1)) | ||||
|         if chunked: | ||||
|             return read_socket_chunked(self.sr), remains | ||||
|         elif remains == -1: | ||||
|         if remains == -1: | ||||
|             self.keepalive = False | ||||
|             return read_socket_unbounded(self.sr), remains | ||||
|         else: | ||||
| @@ -531,17 +568,16 @@ class HttpCli(object): | ||||
|         fdir = os.path.join(vfs.realpath, rem) | ||||
|         if lim: | ||||
|             fdir, rem = lim.all(self.ip, rem, remains, fdir) | ||||
|             bos.makedirs(fdir) | ||||
|  | ||||
|         addr = self.ip.replace(":", ".") | ||||
|         fn = "put-{:.6f}-{}.bin".format(time.time(), addr) | ||||
|         path = os.path.join(fdir, fn) | ||||
|         if self.args.nw: | ||||
|             path = os.devnull | ||||
|         fn = None | ||||
|         if rem and not self.trailing_slash and not bos.path.isdir(fdir): | ||||
|             fdir, fn = os.path.split(fdir) | ||||
|             rem, _ = vsplit(rem) | ||||
|  | ||||
|         open_f = open | ||||
|         open_a = [fsenc(path), "wb", 512 * 1024] | ||||
|         open_ka = {} | ||||
|         bos.makedirs(fdir) | ||||
|  | ||||
|         open_ka = {"fun": open} | ||||
|         open_a = ["wb", 512 * 1024] | ||||
|  | ||||
|         # user-request || config-force | ||||
|         if ("gz" in vfs.flags or "xz" in vfs.flags) and ( | ||||
| @@ -573,8 +609,8 @@ class HttpCli(object): | ||||
|                 alg = alg or "gz"  # def.pk | ||||
|                 try: | ||||
|                     # config-forced opts | ||||
|                     alg, lv = pk.split(",") | ||||
|                     lv[alg] = int(lv) | ||||
|                     alg, nlv = pk.split(",") | ||||
|                     lv[alg] = int(nlv) | ||||
|                 except: | ||||
|                     pass | ||||
|  | ||||
| @@ -582,17 +618,29 @@ class HttpCli(object): | ||||
|  | ||||
|             self.log("compressing with {} level {}".format(alg, lv.get(alg))) | ||||
|             if alg == "gz": | ||||
|                 open_f = gzip.GzipFile | ||||
|                 open_a = [fsenc(path), "wb", lv[alg], None, 0x5FEE6600]  # 2021-01-01 | ||||
|                 open_ka["fun"] = gzip.GzipFile | ||||
|                 open_a = ["wb", lv[alg], None, 0x5FEE6600]  # 2021-01-01 | ||||
|             elif alg == "xz": | ||||
|                 open_f = lzma.open | ||||
|                 open_a = [fsenc(path), "wb"] | ||||
|                 open_ka = {"preset": lv[alg]} | ||||
|                 open_ka = {"fun": lzma.open, "preset": lv[alg]} | ||||
|                 open_a = ["wb"] | ||||
|             else: | ||||
|                 self.log("fallthrough? thats a bug", 1) | ||||
|  | ||||
|         with open_f(*open_a, **open_ka) as f: | ||||
|             post_sz, _, sha_b64 = hashcopy(reader, f) | ||||
|         suffix = "-{:.6f}-{}".format(time.time(), self.dip) | ||||
|         params = {"suffix": suffix, "fdir": fdir} | ||||
|         if self.args.nw: | ||||
|             params = {} | ||||
|             fn = os.devnull | ||||
|  | ||||
|         params.update(open_ka) | ||||
|  | ||||
|         if not fn: | ||||
|             fn = "put" + suffix | ||||
|  | ||||
|         with ren_open(fn, *open_a, **params) as f: | ||||
|             f, fn = f["orz"] | ||||
|             path = os.path.join(fdir, fn) | ||||
|             post_sz, sha_hex, sha_b64 = hashcopy(reader, f) | ||||
|  | ||||
|         if lim: | ||||
|             lim.nup(self.ip) | ||||
| @@ -616,13 +664,14 @@ class HttpCli(object): | ||||
|                 time.time(), | ||||
|             ) | ||||
|  | ||||
|         return post_sz, sha_b64, remains, path | ||||
|         return post_sz, sha_hex, sha_b64, remains, path | ||||
|  | ||||
|     def handle_stash(self): | ||||
|         post_sz, sha_b64, remains, path = self.dump_to_file() | ||||
|         post_sz, sha_hex, sha_b64, remains, path = self.dump_to_file() | ||||
|         spd = self._spd(post_sz) | ||||
|         self.log("{} wrote {}/{} bytes to {}".format(spd, post_sz, remains, path)) | ||||
|         self.reply("{}\n{}\n".format(post_sz, sha_b64).encode("utf-8")) | ||||
|         m = "{}\n{}\n{}\n".format(post_sz, sha_b64, sha_hex[:56]) | ||||
|         self.reply(m.encode("utf-8")) | ||||
|         return True | ||||
|  | ||||
|     def _spd(self, nbytes, add=True): | ||||
| @@ -754,6 +803,10 @@ class HttpCli(object): | ||||
|         return True | ||||
|  | ||||
|     def handle_search(self, body): | ||||
|         idx = self.conn.get_u2idx() | ||||
|         if not hasattr(idx, "p_end"): | ||||
|             raise Pebkac(500, "sqlite3 is not available on the server; cannot search") | ||||
|  | ||||
|         vols = [] | ||||
|         seen = {} | ||||
|         for vtop in self.rvol: | ||||
| @@ -765,7 +818,6 @@ class HttpCli(object): | ||||
|             seen[vfs] = True | ||||
|             vols.append([vfs.vpath, vfs.realpath, vfs.flags]) | ||||
|  | ||||
|         idx = self.conn.get_u2idx() | ||||
|         t0 = time.time() | ||||
|         if idx.p_end: | ||||
|             penalty = 0.7 | ||||
| @@ -825,62 +877,87 @@ class HttpCli(object): | ||||
|         response = x.get() | ||||
|         chunksize, cstart, path, lastmod = response | ||||
|  | ||||
|         if self.args.nw: | ||||
|             path = os.devnull | ||||
|         try: | ||||
|             if self.args.nw: | ||||
|                 path = os.devnull | ||||
|  | ||||
|         if remains > chunksize: | ||||
|             raise Pebkac(400, "your chunk is too big to fit") | ||||
|             if remains > chunksize: | ||||
|                 raise Pebkac(400, "your chunk is too big to fit") | ||||
|  | ||||
|         self.log("writing {} #{} @{} len {}".format(path, chash, cstart, remains)) | ||||
|             self.log("writing {} #{} @{} len {}".format(path, chash, cstart, remains)) | ||||
|  | ||||
|         reader = read_socket(self.sr, remains) | ||||
|             reader = read_socket(self.sr, remains) | ||||
|  | ||||
|         with open(fsenc(path), "rb+", 512 * 1024) as f: | ||||
|             f.seek(cstart[0]) | ||||
|             post_sz, _, sha_b64 = hashcopy(reader, f) | ||||
|             f = None | ||||
|             fpool = not self.args.no_fpool | ||||
|             if fpool: | ||||
|                 with self.mutex: | ||||
|                     try: | ||||
|                         f = self.u2fh.pop(path) | ||||
|                     except: | ||||
|                         pass | ||||
|  | ||||
|             if sha_b64 != chash: | ||||
|                 raise Pebkac( | ||||
|                     400, | ||||
|                     "your chunk got corrupted somehow (received {} bytes); expected vs received hash:\n{}\n{}".format( | ||||
|                         post_sz, chash, sha_b64 | ||||
|                     ), | ||||
|                 ) | ||||
|             f = f or open(fsenc(path), "rb+", 512 * 1024) | ||||
|  | ||||
|             if len(cstart) > 1 and path != os.devnull: | ||||
|                 self.log( | ||||
|                     "clone {} to {}".format( | ||||
|                         cstart[0], " & ".join(unicode(x) for x in cstart[1:]) | ||||
|             try: | ||||
|                 f.seek(cstart[0]) | ||||
|                 post_sz, _, sha_b64 = hashcopy(reader, f) | ||||
|  | ||||
|                 if sha_b64 != chash: | ||||
|                     m = "your chunk got corrupted somehow (received {} bytes); expected vs received hash:\n{}\n{}" | ||||
|                     raise Pebkac(400, m.format(post_sz, chash, sha_b64)) | ||||
|  | ||||
|                 if len(cstart) > 1 and path != os.devnull: | ||||
|                     self.log( | ||||
|                         "clone {} to {}".format( | ||||
|                             cstart[0], " & ".join(unicode(x) for x in cstart[1:]) | ||||
|                         ) | ||||
|                     ) | ||||
|                 ) | ||||
|                 ofs = 0 | ||||
|                 while ofs < chunksize: | ||||
|                     bufsz = min(chunksize - ofs, 4 * 1024 * 1024) | ||||
|                     f.seek(cstart[0] + ofs) | ||||
|                     buf = f.read(bufsz) | ||||
|                     for wofs in cstart[1:]: | ||||
|                         f.seek(wofs + ofs) | ||||
|                         f.write(buf) | ||||
|                     ofs = 0 | ||||
|                     while ofs < chunksize: | ||||
|                         bufsz = min(chunksize - ofs, 4 * 1024 * 1024) | ||||
|                         f.seek(cstart[0] + ofs) | ||||
|                         buf = f.read(bufsz) | ||||
|                         for wofs in cstart[1:]: | ||||
|                             f.seek(wofs + ofs) | ||||
|                             f.write(buf) | ||||
|  | ||||
|                     ofs += len(buf) | ||||
|                         ofs += len(buf) | ||||
|  | ||||
|                 self.log("clone {} done".format(cstart[0])) | ||||
|                     self.log("clone {} done".format(cstart[0])) | ||||
|             finally: | ||||
|                 if not fpool: | ||||
|                     f.close() | ||||
|                 else: | ||||
|                     with self.mutex: | ||||
|                         self.u2fh.put(path, f) | ||||
|         finally: | ||||
|             x = self.conn.hsrv.broker.put(True, "up2k.release_chunk", ptop, wark, chash) | ||||
|             x.get()  # block client until released | ||||
|  | ||||
|         x = self.conn.hsrv.broker.put(True, "up2k.confirm_chunk", ptop, wark, chash) | ||||
|         x = x.get() | ||||
|         try: | ||||
|             num_left, path = x | ||||
|             num_left, fin_path = x | ||||
|         except: | ||||
|             self.loud_reply(x, status=500) | ||||
|             return False | ||||
|  | ||||
|         if not ANYWIN and num_left == 0: | ||||
|         if not num_left and fpool: | ||||
|             with self.mutex: | ||||
|                 self.u2fh.close(path) | ||||
|  | ||||
|         # windows cant rename open files | ||||
|         if ANYWIN and path != fin_path and not self.args.nw: | ||||
|             self.conn.hsrv.broker.put(True, "up2k.finish_upload", ptop, wark).get() | ||||
|  | ||||
|         if not ANYWIN and not num_left: | ||||
|             times = (int(time.time()), int(lastmod)) | ||||
|             self.log("no more chunks, setting times {}".format(times)) | ||||
|             try: | ||||
|                 bos.utime(path, times) | ||||
|                 bos.utime(fin_path, times) | ||||
|             except: | ||||
|                 self.log("failed to utime ({}, {})".format(path, times)) | ||||
|                 self.log("failed to utime ({}, {})".format(fin_path, times)) | ||||
|  | ||||
|         spd = self._spd(post_sz) | ||||
|         self.log("{} thank".format(spd)) | ||||
| @@ -903,15 +980,13 @@ class HttpCli(object): | ||||
|     def get_pwd_cookie(self, pwd): | ||||
|         if pwd in self.asrv.iacct: | ||||
|             msg = "login ok" | ||||
|             dt = datetime.utcfromtimestamp(time.time() + 60 * 60 * 24 * 365) | ||||
|             exp = dt.strftime("%a, %d %b %Y %H:%M:%S GMT") | ||||
|             dur = 60 * 60 * 24 * 365 | ||||
|         else: | ||||
|             msg = "naw dude" | ||||
|             pwd = "x"  # nosec | ||||
|             exp = "Fri, 15 Aug 1997 01:00:00 GMT" | ||||
|             dur = None | ||||
|  | ||||
|         ck = "cppwd={}; Path=/; Expires={}; SameSite=Lax".format(pwd, exp) | ||||
|         return [ck, msg] | ||||
|         return [gencookie("cppwd", pwd, dur), msg] | ||||
|  | ||||
|     def handle_mkdir(self): | ||||
|         new_dir = self.parser.require("name", 512) | ||||
| @@ -1003,7 +1078,7 @@ class HttpCli(object): | ||||
|                     if not bos.path.isdir(fdir): | ||||
|                         raise Pebkac(404, "that folder does not exist") | ||||
|  | ||||
|                     suffix = ".{:.6f}-{}".format(time.time(), self.ip) | ||||
|                     suffix = "-{:.6f}-{}".format(time.time(), self.dip) | ||||
|                     open_args = {"fdir": fdir, "suffix": suffix} | ||||
|                 else: | ||||
|                     open_args = {} | ||||
| @@ -1019,7 +1094,7 @@ class HttpCli(object): | ||||
|                         f, fname = f["orz"] | ||||
|                         abspath = os.path.join(fdir, fname) | ||||
|                         self.log("writing to {}".format(abspath)) | ||||
|                         sz, sha512_hex, _ = hashcopy(p_data, f) | ||||
|                         sz, sha_hex, sha_b64 = hashcopy(p_data, f) | ||||
|                         if sz == 0: | ||||
|                             raise Pebkac(400, "empty files in post") | ||||
|  | ||||
| @@ -1032,7 +1107,7 @@ class HttpCli(object): | ||||
|                             bos.unlink(abspath) | ||||
|                             raise | ||||
|  | ||||
|                     files.append([sz, sha512_hex, p_file, fname]) | ||||
|                     files.append([sz, sha_hex, sha_b64, p_file, fname, abspath]) | ||||
|                     dbv, vrem = vfs.get_dbv(rem) | ||||
|                     self.conn.hsrv.broker.put( | ||||
|                         False, | ||||
| @@ -1084,29 +1159,35 @@ class HttpCli(object): | ||||
|             jmsg["error"] = errmsg | ||||
|             errmsg = "ERROR: " + errmsg | ||||
|  | ||||
|         for sz, sha512, ofn, lfn in files: | ||||
|         for sz, sha_hex, sha_b64, ofn, lfn, ap in files: | ||||
|             vsuf = "" | ||||
|             if self.can_read and "fk" in vfs.flags: | ||||
|                 vsuf = "?k=" + gen_filekey( | ||||
|                     self.args.fk_salt, | ||||
|                     abspath, | ||||
|                     sz, | ||||
|                     0 if ANYWIN else bos.stat(os.path.join(vfs.realpath, lfn)).st_ino, | ||||
|                     0 if ANYWIN or not ap else bos.stat(ap).st_ino, | ||||
|                 )[: vfs.flags["fk"]] | ||||
|  | ||||
|             vpath = "{}/{}".format(upload_vpath, lfn).strip("/") | ||||
|             msg += 'sha512: {} // {} bytes // <a href="/{}">{}</a> {}\n'.format( | ||||
|                 sha512[:56], sz, quotep(vpath) + vsuf, html_escape(ofn, crlf=True), vsuf | ||||
|             msg += 'sha512: {} // {} // {} bytes // <a href="/{}">{}</a> {}\n'.format( | ||||
|                 sha_hex[:56], | ||||
|                 sha_b64, | ||||
|                 sz, | ||||
|                 quotep(vpath) + vsuf, | ||||
|                 html_escape(ofn, crlf=True), | ||||
|                 vsuf, | ||||
|             ) | ||||
|             # truncated SHA-512 prevents length extension attacks; | ||||
|             # using SHA-512/224, optionally SHA-512/256 = :64 | ||||
|             jpart = { | ||||
|                 "url": "{}://{}/{}".format( | ||||
|                     "https" if self.tls else "http", | ||||
|                     "https" if self.is_https else "http", | ||||
|                     self.headers.get("host", "copyparty"), | ||||
|                     vpath + vsuf, | ||||
|                 ), | ||||
|                 "sha512": sha512[:56], | ||||
|                 "sha512": sha_hex[:56], | ||||
|                 "sha_b64": sha_b64, | ||||
|                 "sz": sz, | ||||
|                 "fn": lfn, | ||||
|                 "fn_orig": ofn, | ||||
| @@ -1286,6 +1367,9 @@ class HttpCli(object): | ||||
|             try: | ||||
|                 fs_path = req_path + ext | ||||
|                 st = bos.stat(fs_path) | ||||
|                 if stat.S_ISDIR(st.st_mode): | ||||
|                     continue | ||||
|  | ||||
|                 file_ts = max(file_ts, st.st_mtime) | ||||
|                 editions[ext or "plain"] = [fs_path, st.st_size] | ||||
|             except: | ||||
| @@ -1324,8 +1408,7 @@ class HttpCli(object): | ||||
|             if "gzip" not in supported_editions: | ||||
|                 decompress = True | ||||
|             else: | ||||
|                 ua = self.headers.get("user-agent", "") | ||||
|                 if re.match(r"MSIE [4-6]\.", ua) and " SV1" not in ua: | ||||
|                 if re.match(r"MSIE [4-6]\.", self.ua) and " SV1" not in self.ua: | ||||
|                     decompress = True | ||||
|  | ||||
|             if not decompress: | ||||
| @@ -1407,17 +1490,18 @@ class HttpCli(object): | ||||
|  | ||||
|         if is_compressed: | ||||
|             self.out_headers["Cache-Control"] = "max-age=573" | ||||
|         elif "cache" in self.uparam: | ||||
|             self.out_headers["Cache-Control"] = "max-age=69" | ||||
|         else: | ||||
|             self.out_headers.update(NO_CACHE) | ||||
|             self.permit_caching() | ||||
|  | ||||
|         if "txt" in self.uparam: | ||||
|             mime = "text/plain; charset={}".format(self.uparam["txt"] or "utf-8") | ||||
|         elif "mime" in self.uparam: | ||||
|             mime = self.uparam.get("mime") | ||||
|         else: | ||||
|             mime = guess_mime(req_path) | ||||
|  | ||||
|         self.out_headers["Accept-Ranges"] = "bytes" | ||||
|         self.send_headers( | ||||
|             length=upper - lower, | ||||
|             status=status, | ||||
|             mime=guess_mime(req_path), | ||||
|         ) | ||||
|         self.send_headers(length=upper - lower, status=status, mime=mime) | ||||
|  | ||||
|         logmsg += unicode(status) + logtail | ||||
|  | ||||
| @@ -1429,13 +1513,14 @@ class HttpCli(object): | ||||
|  | ||||
|         ret = True | ||||
|         with open_func(*open_args) as f: | ||||
|             if use_sendfile: | ||||
|                 remains = sendfile_kern(lower, upper, f, self.s) | ||||
|             else: | ||||
|                 remains = sendfile_py(lower, upper, f, self.s) | ||||
|             sendfun = sendfile_kern if use_sendfile else sendfile_py | ||||
|             remains = sendfun( | ||||
|                 self.log, lower, upper, f, self.s, self.args.s_wr_sz, self.args.s_wr_slp | ||||
|             ) | ||||
|  | ||||
|         if remains > 0: | ||||
|             logmsg += " \033[31m" + unicode(upper - remains) + "\033[0m" | ||||
|             self.keepalive = False | ||||
|  | ||||
|         spd = self._spd((upper - lower) - remains) | ||||
|         if self.do_log: | ||||
| @@ -1506,6 +1591,7 @@ class HttpCli(object): | ||||
|         return True | ||||
|  | ||||
|     def tx_ico(self, ext, exact=False): | ||||
|         self.permit_caching() | ||||
|         if ext.endswith("/"): | ||||
|             ext = "folder" | ||||
|             exact = True | ||||
| @@ -1538,7 +1624,7 @@ class HttpCli(object): | ||||
|  | ||||
|         if not self.can_write: | ||||
|             if "edit" in self.uparam or "edit2" in self.uparam: | ||||
|                 return self.tx_404() | ||||
|                 return self.tx_404(True) | ||||
|  | ||||
|         tpl = "mde" if "edit2" in self.uparam else "md" | ||||
|         html_path = os.path.join(E.mod, "web", "{}.html".format(tpl)) | ||||
| @@ -1636,12 +1722,36 @@ class HttpCli(object): | ||||
|             tagq=vs["tagq"], | ||||
|             mtpq=vs["mtpq"], | ||||
|             url_suf=suf, | ||||
|             k304=self.k304(), | ||||
|         ) | ||||
|         self.reply(html.encode("utf-8")) | ||||
|         return True | ||||
|  | ||||
|     def tx_404(self): | ||||
|         m = '<h1>404 not found  ┐( ´ -`)┌</h1><p>or maybe you don\'t have access -- try logging in or <a href="/?h">go home</a></p>' | ||||
|     def set_k304(self): | ||||
|         ck = gencookie("k304", self.uparam["k304"], 60 * 60 * 24 * 365) | ||||
|         self.out_headerlist.append(("Set-Cookie", ck)) | ||||
|         self.redirect("", "?h#cc") | ||||
|  | ||||
|     def set_am_js(self): | ||||
|         v = "n" if self.uparam["am_js"] == "n" else "y" | ||||
|         ck = gencookie("js", v, 60 * 60 * 24 * 365) | ||||
|         self.out_headerlist.append(("Set-Cookie", ck)) | ||||
|         self.reply(b"promoted\n") | ||||
|  | ||||
|     def set_cfg_reset(self): | ||||
|         for k in ("k304", "js", "cppwd"): | ||||
|             self.out_headerlist.append(("Set-Cookie", gencookie(k, "x", None))) | ||||
|  | ||||
|         self.redirect("", "?h#cc") | ||||
|  | ||||
|     def tx_404(self, is_403=False): | ||||
|         if self.args.vague_403: | ||||
|             m = '<h1>404 not found  ┐( ´ -`)┌</h1><p>or maybe you don\'t have access -- try logging in or <a href="/?h">go home</a></p>' | ||||
|         elif is_403: | ||||
|             m = '<h1>403 forbiddena  ~┻━┻</h1><p>you\'ll have to log in or <a href="/?h">go home</a></p>' | ||||
|         else: | ||||
|             m = '<h1>404 not found  ┐( ´ -`)┌</h1><p><a href="/?h">go home</a></p>' | ||||
|  | ||||
|         html = self.j2("splash", this=self, qvpath=quotep(self.vpath), msg=m) | ||||
|         self.reply(html.encode("utf-8"), status=404) | ||||
|         return True | ||||
| @@ -1655,7 +1765,7 @@ class HttpCli(object): | ||||
|  | ||||
|         vn, _ = self.asrv.vfs.get(self.vpath, self.uname, True, True) | ||||
|  | ||||
|         args = [self.asrv.vfs.all_vols, [vn.vpath]] | ||||
|         args = [self.asrv.vfs.all_vols, [vn.vpath], False] | ||||
|  | ||||
|         x = self.conn.hsrv.broker.put(True, "up2k.rescan", *args) | ||||
|         x = x.get() | ||||
| @@ -1665,6 +1775,20 @@ class HttpCli(object): | ||||
|  | ||||
|         raise Pebkac(500, x) | ||||
|  | ||||
|     def handle_reload(self): | ||||
|         act = self.uparam.get("reload") | ||||
|         if act != "cfg": | ||||
|             raise Pebkac(400, "only config files ('cfg') can be reloaded rn") | ||||
|  | ||||
|         if not [x for x in self.wvol if x in self.rvol]: | ||||
|             raise Pebkac(403, "not allowed for user " + self.uname) | ||||
|  | ||||
|         if self.args.no_reload: | ||||
|             raise Pebkac(403, "the reload feature is disabled in server config") | ||||
|  | ||||
|         x = self.conn.hsrv.broker.put(True, "reload") | ||||
|         return self.redirect("", "?h", x.get(), "return to", False) | ||||
|  | ||||
|     def tx_stack(self): | ||||
|         if not [x for x in self.wvol if x in self.rvol]: | ||||
|             raise Pebkac(403, "not allowed for user " + self.uname) | ||||
| @@ -1736,13 +1860,16 @@ class HttpCli(object): | ||||
|         if not self.args.unpost: | ||||
|             raise Pebkac(400, "the unpost feature is disabled in server config") | ||||
|  | ||||
|         idx = self.conn.get_u2idx() | ||||
|         if not hasattr(idx, "p_end"): | ||||
|             raise Pebkac(500, "sqlite3 is not available on the server; cannot unpost") | ||||
|  | ||||
|         filt = self.uparam.get("filter") | ||||
|         lm = "ups [{}]".format(filt) | ||||
|         self.log(lm) | ||||
|  | ||||
|         ret = [] | ||||
|         t0 = time.time() | ||||
|         idx = self.conn.get_u2idx() | ||||
|         lim = time.time() - self.args.unpost | ||||
|         for vol in self.asrv.vfs.all_vols.values(): | ||||
|             cur = idx.get_cur(vol.realpath) | ||||
| @@ -1755,7 +1882,7 @@ class HttpCli(object): | ||||
|                 if filt and filt not in vp: | ||||
|                     continue | ||||
|  | ||||
|                 ret.append({"vp": vp, "sz": sz, "at": at}) | ||||
|                 ret.append({"vp": quotep(vp), "sz": sz, "at": at}) | ||||
|                 if len(ret) > 3000: | ||||
|                     ret.sort(key=lambda x: x["at"], reverse=True) | ||||
|                     ret = ret[:2000] | ||||
| @@ -1800,6 +1927,71 @@ class HttpCli(object): | ||||
|         ) | ||||
|         self.loud_reply(x.get()) | ||||
|  | ||||
|     def tx_ls(self, ls): | ||||
|         dirs = ls["dirs"] | ||||
|         files = ls["files"] | ||||
|         arg = self.uparam["ls"] | ||||
|         if arg in ["v", "t", "txt"]: | ||||
|             try: | ||||
|                 biggest = max(ls["files"] + ls["dirs"], key=itemgetter("sz"))["sz"] | ||||
|             except: | ||||
|                 biggest = 0 | ||||
|  | ||||
|             if arg == "v": | ||||
|                 fmt = "\033[0;7;36m{{}} {{:>{}}}\033[0m {{}}" | ||||
|                 nfmt = "{}" | ||||
|                 biggest = 0 | ||||
|                 f2 = "".join( | ||||
|                     "{}{{}}".format(x) | ||||
|                     for x in [ | ||||
|                         "\033[7m", | ||||
|                         "\033[27m", | ||||
|                         "", | ||||
|                         "\033[0;1m", | ||||
|                         "\033[0;36m", | ||||
|                         "\033[0m", | ||||
|                     ] | ||||
|                 ) | ||||
|                 ctab = {"B": 6, "K": 5, "M": 1, "G": 3} | ||||
|                 for lst in [dirs, files]: | ||||
|                     for x in lst: | ||||
|                         a = x["dt"].replace("-", " ").replace(":", " ").split(" ") | ||||
|                         x["dt"] = f2.format(*list(a)) | ||||
|                         sz = humansize(x["sz"], True) | ||||
|                         x["sz"] = "\033[0;3{}m{:>5}".format(ctab.get(sz[-1:], 0), sz) | ||||
|             else: | ||||
|                 fmt = "{{}}  {{:{},}}  {{}}" | ||||
|                 nfmt = "{:,}" | ||||
|  | ||||
|             for x in dirs: | ||||
|                 n = x["name"] + "/" | ||||
|                 if arg == "v": | ||||
|                     n = "\033[94m" + n | ||||
|  | ||||
|                 x["name"] = n | ||||
|  | ||||
|             fmt = fmt.format(len(nfmt.format(biggest))) | ||||
|             ret = [ | ||||
|                 "# {}: {}".format(x, ls[x]) | ||||
|                 for x in ["acct", "perms", "srvinf"] | ||||
|                 if x in ls | ||||
|             ] | ||||
|             ret += [ | ||||
|                 fmt.format(x["dt"], x["sz"], x["name"]) | ||||
|                 for y in [dirs, files] | ||||
|                 for x in y | ||||
|             ] | ||||
|             ret = "\n".join(ret) | ||||
|             mime = "text/plain; charset=utf-8" | ||||
|         else: | ||||
|             [x.pop(k) for k in ["name", "dt"] for y in [dirs, files] for x in y] | ||||
|  | ||||
|             ret = json.dumps(ls) | ||||
|             mime = "application/json" | ||||
|  | ||||
|         self.reply(ret.encode("utf-8", "replace") + b"\n", mime=mime) | ||||
|         return True | ||||
|  | ||||
|     def tx_browser(self): | ||||
|         vpath = "" | ||||
|         vpnodes = [["", "/"]] | ||||
| @@ -1868,7 +2060,7 @@ class HttpCli(object): | ||||
|             return self.tx_file(abspath) | ||||
|  | ||||
|         elif is_dir and not self.can_read and not self.can_write: | ||||
|             return self.tx_404() | ||||
|             return self.tx_404(True) | ||||
|  | ||||
|         srv_info = [] | ||||
|  | ||||
| @@ -1882,11 +2074,14 @@ class HttpCli(object): | ||||
|             # some fuses misbehave | ||||
|             if not self.args.nid: | ||||
|                 if WINDOWS: | ||||
|                     bfree = ctypes.c_ulonglong(0) | ||||
|                     ctypes.windll.kernel32.GetDiskFreeSpaceExW( | ||||
|                         ctypes.c_wchar_p(abspath), None, None, ctypes.pointer(bfree) | ||||
|                     ) | ||||
|                     srv_info.append(humansize(bfree.value) + " free") | ||||
|                     try: | ||||
|                         bfree = ctypes.c_ulonglong(0) | ||||
|                         ctypes.windll.kernel32.GetDiskFreeSpaceExW( | ||||
|                             ctypes.c_wchar_p(abspath), None, None, ctypes.pointer(bfree) | ||||
|                         ) | ||||
|                         srv_info.append(humansize(bfree.value) + " free") | ||||
|                     except: | ||||
|                         pass | ||||
|                 else: | ||||
|                     sv = os.statvfs(fsenc(abspath)) | ||||
|                     free = humansize(sv.f_frsize * sv.f_bfree, True) | ||||
| @@ -1913,6 +2108,7 @@ class HttpCli(object): | ||||
|  | ||||
|         url_suf = self.urlq({}, []) | ||||
|         is_ls = "ls" in self.uparam | ||||
|         is_js = self.cookies.get("js") == "y" | ||||
|  | ||||
|         tpl = "browser" | ||||
|         if "b" in self.uparam: | ||||
| @@ -1941,6 +2137,7 @@ class HttpCli(object): | ||||
|             "taglist": [], | ||||
|             "srvinf": srv_info, | ||||
|             "acct": self.uname, | ||||
|             "idx": ("e2d" in vn.flags), | ||||
|             "perms": perms, | ||||
|             "logues": logues, | ||||
|             "readme": readme, | ||||
| @@ -1949,12 +2146,14 @@ class HttpCli(object): | ||||
|             "vdir": quotep(self.vpath), | ||||
|             "vpnodes": vpnodes, | ||||
|             "files": [], | ||||
|             "ls0": None, | ||||
|             "acct": self.uname, | ||||
|             "perms": json.dumps(perms), | ||||
|             "taglist": [], | ||||
|             "def_hcols": [], | ||||
|             "have_up2k_idx": ("e2d" in vn.flags), | ||||
|             "have_tags_idx": ("e2t" in vn.flags), | ||||
|             "have_acode": (not self.args.no_acode), | ||||
|             "have_mv": (not self.args.no_mv), | ||||
|             "have_del": (not self.args.no_del), | ||||
|             "have_zip": (not self.args.no_zip), | ||||
| @@ -1968,12 +2167,10 @@ class HttpCli(object): | ||||
|         } | ||||
|         if not self.can_read: | ||||
|             if is_ls: | ||||
|                 ret = json.dumps(ls_ret) | ||||
|                 self.reply(ret.encode("utf-8", "replace"), mime="application/json") | ||||
|                 return True | ||||
|                 return self.tx_ls(ls_ret) | ||||
|  | ||||
|             if not stat.S_ISDIR(st.st_mode): | ||||
|                 return self.tx_404() | ||||
|                 return self.tx_404(True) | ||||
|  | ||||
|             if "zip" in self.uparam or "tar" in self.uparam: | ||||
|                 raise Pebkac(403) | ||||
| @@ -2030,7 +2227,7 @@ class HttpCli(object): | ||||
|         for fn in vfs_ls: | ||||
|             base = "" | ||||
|             href = fn | ||||
|             if not is_ls and self.absolute_urls and vpath: | ||||
|             if not is_ls and not is_js and not self.trailing_slash and vpath: | ||||
|                 base = "/" + vpath + "/" | ||||
|                 href = base + fn | ||||
|  | ||||
| @@ -2067,6 +2264,8 @@ class HttpCli(object): | ||||
|  | ||||
|             try: | ||||
|                 ext = "---" if is_dir else fn.rsplit(".", 1)[1] | ||||
|                 if len(ext) > 16: | ||||
|                     ext = ext[:16] | ||||
|             except: | ||||
|                 ext = "%" | ||||
|  | ||||
| @@ -2145,26 +2344,48 @@ class HttpCli(object): | ||||
|                 f["tags"] = {} | ||||
|  | ||||
|         if is_ls: | ||||
|             [x.pop(k) for k in ["name", "dt"] for y in [dirs, files] for x in y] | ||||
|             ls_ret["dirs"] = dirs | ||||
|             ls_ret["files"] = files | ||||
|             ls_ret["taglist"] = taglist | ||||
|             ret = json.dumps(ls_ret) | ||||
|             self.reply(ret.encode("utf-8", "replace"), mime="application/json") | ||||
|             return True | ||||
|             return self.tx_ls(ls_ret) | ||||
|  | ||||
|         doc = self.uparam.get("doc") if self.can_read else None | ||||
|         if doc: | ||||
|             doc = unquotep(doc.replace("+", " ")) | ||||
|             j2a["docname"] = doc | ||||
|             if next((x for x in files if x["name"] == doc), None): | ||||
|                 with open(os.path.join(abspath, doc), "rb") as f: | ||||
|                     doc = f.read().decode("utf-8", "replace") | ||||
|             else: | ||||
|                 self.log("doc 404: [{}]".format(doc), c=6) | ||||
|                 doc = "( textfile not found )" | ||||
|  | ||||
|             j2a["doc"] = doc | ||||
|  | ||||
|         if not self.conn.hsrv.prism: | ||||
|             j2a["no_prism"] = True | ||||
|  | ||||
|         for d in dirs: | ||||
|             d["name"] += "/" | ||||
|  | ||||
|         dirs.sort(key=itemgetter("name")) | ||||
|  | ||||
|         j2a["files"] = dirs + files | ||||
|         if is_js: | ||||
|             j2a["ls0"] = {"dirs": dirs, "files": files, "taglist": taglist} | ||||
|             j2a["files"] = [] | ||||
|         else: | ||||
|             j2a["files"] = dirs + files | ||||
|  | ||||
|         j2a["logues"] = logues | ||||
|         j2a["taglist"] = taglist | ||||
|         j2a["txt_ext"] = self.args.textfiles.replace(",", " ") | ||||
|  | ||||
|         if "mth" in vn.flags: | ||||
|             j2a["def_hcols"] = vn.flags["mth"].split(",") | ||||
|  | ||||
|         if self.args.js_browser: | ||||
|             j2a["js"] = self.args.js_browser | ||||
|  | ||||
|         if self.args.css_browser: | ||||
|             j2a["css"] = self.args.css_browser | ||||
|  | ||||
|   | ||||
| @@ -32,12 +32,14 @@ class HttpConn(object): | ||||
|         self.addr = addr | ||||
|         self.hsrv = hsrv | ||||
|  | ||||
|         self.mutex = hsrv.mutex | ||||
|         self.args = hsrv.args | ||||
|         self.asrv = hsrv.asrv | ||||
|         self.cert_path = hsrv.cert_path | ||||
|         self.u2fh = hsrv.u2fh | ||||
|  | ||||
|         enth = HAVE_PIL and not self.args.no_thumb | ||||
|         self.thumbcli = ThumbCli(hsrv.broker) if enth else None | ||||
|         self.thumbcli = ThumbCli(hsrv) if enth else None | ||||
|         self.ico = Ico(self.args) | ||||
|  | ||||
|         self.t0 = time.time() | ||||
|   | ||||
| @@ -27,7 +27,7 @@ except ImportError: | ||||
|     sys.exit(1) | ||||
|  | ||||
| from .__init__ import E, PY2, MACOS | ||||
| from .util import spack, min_ex, start_stackmon, start_log_thrs | ||||
| from .util import FHC, spack, min_ex, start_stackmon, start_log_thrs | ||||
| from .bos import bos | ||||
| from .httpconn import HttpConn | ||||
|  | ||||
| @@ -50,7 +50,9 @@ class HttpSrv(object): | ||||
|         self.log = broker.log | ||||
|         self.asrv = broker.asrv | ||||
|  | ||||
|         self.name = "httpsrv" + ("-n{}-i{:x}".format(nid, os.getpid()) if nid else "") | ||||
|         nsuf = "-n{}-i{:x}".format(nid, os.getpid()) if nid else "" | ||||
|  | ||||
|         self.name = "hsrv" + nsuf | ||||
|         self.mutex = threading.Lock() | ||||
|         self.stopping = False | ||||
|  | ||||
| @@ -58,7 +60,9 @@ class HttpSrv(object): | ||||
|         self.tp_ncli = 0  # fading | ||||
|         self.tp_time = None  # latest worker collect | ||||
|         self.tp_q = None if self.args.no_htp else queue.LifoQueue() | ||||
|         self.t_periodic = None | ||||
|  | ||||
|         self.u2fh = FHC() | ||||
|         self.srvs = [] | ||||
|         self.ncli = 0  # exact | ||||
|         self.clients = {}  # laggy | ||||
| @@ -72,6 +76,7 @@ class HttpSrv(object): | ||||
|             x: env.get_template(x + ".html") | ||||
|             for x in ["splash", "browser", "browser2", "msg", "md", "mde"] | ||||
|         } | ||||
|         self.prism = os.path.exists(os.path.join(E.mod, "web", "deps", "prism.js.gz")) | ||||
|  | ||||
|         cert_path = os.path.join(E.cfg, "cert.pem") | ||||
|         if bos.path.exists(cert_path): | ||||
| @@ -82,11 +87,6 @@ class HttpSrv(object): | ||||
|         if self.tp_q: | ||||
|             self.start_threads(4) | ||||
|  | ||||
|             name = "httpsrv-scaler" + ("-{}".format(nid) if nid else "") | ||||
|             t = threading.Thread(target=self.thr_scaler, name=name) | ||||
|             t.daemon = True | ||||
|             t.start() | ||||
|  | ||||
|         if nid: | ||||
|             if self.args.stackmon: | ||||
|                 start_stackmon(self.args.stackmon, nid) | ||||
| @@ -115,13 +115,19 @@ class HttpSrv(object): | ||||
|         for _ in range(n): | ||||
|             self.tp_q.put(None) | ||||
|  | ||||
|     def thr_scaler(self): | ||||
|     def periodic(self): | ||||
|         while True: | ||||
|             time.sleep(2 if self.tp_ncli else 30) | ||||
|             time.sleep(2 if self.tp_ncli or self.ncli else 10) | ||||
|             with self.mutex: | ||||
|                 self.tp_ncli = max(self.ncli, self.tp_ncli - 2) | ||||
|                 if self.tp_nthr > self.tp_ncli + 8: | ||||
|                     self.stop_threads(4) | ||||
|                 self.u2fh.clean() | ||||
|                 if self.tp_q: | ||||
|                     self.tp_ncli = max(self.ncli, self.tp_ncli - 2) | ||||
|                     if self.tp_nthr > self.tp_ncli + 8: | ||||
|                         self.stop_threads(4) | ||||
|  | ||||
|                 if not self.ncli and not self.u2fh.cache and self.tp_nthr <= 8: | ||||
|                     self.t_periodic = None | ||||
|                     return | ||||
|  | ||||
|     def listen(self, sck, nlisteners): | ||||
|         ip, port = sck.getsockname() | ||||
| @@ -141,7 +147,12 @@ class HttpSrv(object): | ||||
|         fno = srv_sck.fileno() | ||||
|         msg = "subscribed @ {}:{}  f{}".format(ip, port, fno) | ||||
|         self.log(self.name, msg) | ||||
|         self.broker.put(False, "cb_httpsrv_up") | ||||
|  | ||||
|         def fun(): | ||||
|             self.broker.put(False, "cb_httpsrv_up") | ||||
|  | ||||
|         threading.Thread(target=fun).start() | ||||
|  | ||||
|         while not self.stopping: | ||||
|             if self.args.log_conn: | ||||
|                 self.log(self.name, "|%sC-ncli" % ("-" * 1,), c="1;30") | ||||
| @@ -181,6 +192,16 @@ class HttpSrv(object): | ||||
|  | ||||
|         with self.mutex: | ||||
|             self.ncli += 1 | ||||
|             if not self.t_periodic: | ||||
|                 name = "hsrv-pt" | ||||
|                 if self.nid: | ||||
|                     name += "-{}".format(self.nid) | ||||
|  | ||||
|                 t = threading.Thread(target=self.periodic, name=name) | ||||
|                 self.t_periodic = t | ||||
|                 t.daemon = True | ||||
|                 t.start() | ||||
|  | ||||
|             if self.tp_q: | ||||
|                 self.tp_time = self.tp_time or now | ||||
|                 self.tp_ncli = max(self.tp_ncli, self.ncli) | ||||
|   | ||||
| @@ -8,7 +8,7 @@ import shutil | ||||
| import subprocess as sp | ||||
|  | ||||
| from .__init__ import PY2, WINDOWS, unicode | ||||
| from .util import fsenc, fsdec, uncyg, REKOBO_LKEY | ||||
| from .util import fsenc, fsdec, uncyg, runcmd, REKOBO_LKEY | ||||
| from .bos import bos | ||||
|  | ||||
|  | ||||
| @@ -73,7 +73,7 @@ class MParser(object): | ||||
|             raise Exception() | ||||
|  | ||||
|  | ||||
| def ffprobe(abspath): | ||||
| def ffprobe(abspath, timeout=10): | ||||
|     cmd = [ | ||||
|         b"ffprobe", | ||||
|         b"-hide_banner", | ||||
| @@ -82,10 +82,8 @@ def ffprobe(abspath): | ||||
|         b"--", | ||||
|         fsenc(abspath), | ||||
|     ] | ||||
|     p = sp.Popen(cmd, stdout=sp.PIPE, stderr=sp.PIPE) | ||||
|     r = p.communicate() | ||||
|     txt = r[0].decode("utf-8", "replace") | ||||
|     return parse_ffprobe(txt) | ||||
|     rc = runcmd(cmd, timeout=timeout) | ||||
|     return parse_ffprobe(rc[1]) | ||||
|  | ||||
|  | ||||
| def parse_ffprobe(txt): | ||||
| @@ -413,11 +411,15 @@ class MTag(object): | ||||
|         return r1 | ||||
|  | ||||
|     def get_mutagen(self, abspath): | ||||
|         if not bos.path.isfile(abspath): | ||||
|             return {} | ||||
|  | ||||
|         import mutagen | ||||
|  | ||||
|         try: | ||||
|             md = mutagen.File(fsenc(abspath), easy=True) | ||||
|             x = md.info.length | ||||
|             if not md.info.length and not md.info.codec: | ||||
|                 raise Exception() | ||||
|         except Exception as ex: | ||||
|             return self.get_ffprobe(abspath) if self.can_ffprobe else {} | ||||
|  | ||||
| @@ -458,10 +460,16 @@ class MTag(object): | ||||
|         return self.normalize_tags(ret, md) | ||||
|  | ||||
|     def get_ffprobe(self, abspath): | ||||
|         if not bos.path.isfile(abspath): | ||||
|             return {} | ||||
|  | ||||
|         ret, md = ffprobe(abspath) | ||||
|         return self.normalize_tags(ret, md) | ||||
|  | ||||
|     def get_bin(self, parsers, abspath): | ||||
|         if not bos.path.isfile(abspath): | ||||
|             return {} | ||||
|  | ||||
|         pypath = os.path.abspath(os.path.dirname(os.path.dirname(__file__))) | ||||
|         pypath = [str(pypath)] + [str(x) for x in sys.path if x] | ||||
|         pypath = str(os.pathsep.join(pypath)) | ||||
| @@ -471,7 +479,10 @@ class MTag(object): | ||||
|         ret = {} | ||||
|         for tagname, mp in parsers.items(): | ||||
|             try: | ||||
|                 cmd = [sys.executable, mp.bin, abspath] | ||||
|                 cmd = [mp.bin, abspath] | ||||
|                 if mp.bin.endswith(".py"): | ||||
|                     cmd = [sys.executable] + cmd | ||||
|  | ||||
|                 args = {"env": env, "timeout": mp.timeout} | ||||
|  | ||||
|                 if WINDOWS: | ||||
|   | ||||
| @@ -1,7 +1,6 @@ | ||||
| # coding: utf-8 | ||||
| from __future__ import print_function, unicode_literals | ||||
|  | ||||
| import re | ||||
| import os | ||||
| import sys | ||||
| import time | ||||
| @@ -19,6 +18,7 @@ from .authsrv import AuthSrv | ||||
| from .tcpsrv import TcpSrv | ||||
| from .up2k import Up2k | ||||
| from .th_srv import ThumbSrv, HAVE_PIL, HAVE_WEBP | ||||
| from .mtag import HAVE_FFMPEG, HAVE_FFPROBE | ||||
|  | ||||
|  | ||||
| class SvcHub(object): | ||||
| @@ -37,8 +37,11 @@ class SvcHub(object): | ||||
|         self.argv = argv | ||||
|         self.logf = None | ||||
|         self.stop_req = False | ||||
|         self.reload_req = False | ||||
|         self.stopping = False | ||||
|         self.reloading = False | ||||
|         self.stop_cond = threading.Condition() | ||||
|         self.retcode = 0 | ||||
|         self.httpsrv_up = 0 | ||||
|  | ||||
|         self.log_mutex = threading.Lock() | ||||
| @@ -54,6 +57,19 @@ class SvcHub(object): | ||||
|         if args.log_thrs: | ||||
|             start_log_thrs(self.log, args.log_thrs, 0) | ||||
|  | ||||
|         if not args.use_fpool and args.j != 1: | ||||
|             args.no_fpool = True | ||||
|             m = "multithreading enabled with -j {}, so disabling fpool -- this can reduce upload performance on some filesystems" | ||||
|             self.log("root", m.format(args.j)) | ||||
|  | ||||
|         if not args.no_fpool and args.j != 1: | ||||
|             m = "WARNING: --use-fpool combined with multithreading is untested and can probably cause undefined behavior" | ||||
|             if ANYWIN: | ||||
|                 m = 'windows cannot do multithreading without --no-fpool, so enabling that -- note that upload performance will suffer if you have microsoft defender "real-time protection" enabled, so you probably want to use -j 1 instead' | ||||
|                 args.no_fpool = True | ||||
|  | ||||
|             self.log("root", m, c=3) | ||||
|  | ||||
|         # initiate all services to manage | ||||
|         self.asrv = AuthSrv(self.args, self.log) | ||||
|         if args.ls: | ||||
| @@ -77,31 +93,52 @@ class SvcHub(object): | ||||
|                     "thumb", msg.format(" " * 37, os.path.basename(sys.executable)), c=3 | ||||
|                 ) | ||||
|  | ||||
|         if not args.no_acode and args.no_thumb: | ||||
|             msg = "setting --no-acode because --no-thumb (sorry)" | ||||
|             self.log("thumb", msg, c=6) | ||||
|             args.no_acode = True | ||||
|  | ||||
|         if not args.no_acode and (not HAVE_FFMPEG or not HAVE_FFPROBE): | ||||
|             msg = "setting --no-acode because either FFmpeg or FFprobe is not available" | ||||
|             self.log("thumb", msg, c=6) | ||||
|             args.no_acode = True | ||||
|  | ||||
|         args.th_poke = min(args.th_poke, args.th_maxage, args.ac_maxage) | ||||
|  | ||||
|         # decide which worker impl to use | ||||
|         if self.check_mp_enable(): | ||||
|             from .broker_mp import BrokerMp as Broker | ||||
|         else: | ||||
|             self.log("root", "cannot efficiently use multiple CPU cores") | ||||
|             from .broker_thr import BrokerThr as Broker | ||||
|  | ||||
|         self.broker = Broker(self) | ||||
|  | ||||
|     def thr_httpsrv_up(self): | ||||
|         time.sleep(5) | ||||
|         failed = self.broker.num_workers - self.httpsrv_up | ||||
|         expected = self.broker.num_workers * self.tcpsrv.nsrv | ||||
|         failed = expected - self.httpsrv_up | ||||
|         if not failed: | ||||
|             return | ||||
|  | ||||
|         if self.args.ign_ebind_all: | ||||
|             return | ||||
|  | ||||
|         if self.args.ign_ebind and self.tcpsrv.srv: | ||||
|             return | ||||
|  | ||||
|         m = "{}/{} workers failed to start" | ||||
|         m = m.format(failed, self.broker.num_workers) | ||||
|         m = m.format(failed, expected) | ||||
|         self.log("root", m, 1) | ||||
|         os._exit(1) | ||||
|  | ||||
|         self.retcode = 1 | ||||
|         os.kill(os.getpid(), signal.SIGTERM) | ||||
|  | ||||
|     def cb_httpsrv_up(self): | ||||
|         self.httpsrv_up += 1 | ||||
|         if self.httpsrv_up != self.broker.num_workers: | ||||
|             return | ||||
|  | ||||
|         time.sleep(0.1)  # purely cosmetic dw | ||||
|         self.log("root", "workers OK\n") | ||||
|         self.up2k.init_vols() | ||||
|  | ||||
| @@ -162,7 +199,11 @@ class SvcHub(object): | ||||
|         thr.daemon = True | ||||
|         thr.start() | ||||
|  | ||||
|         for sig in [signal.SIGINT, signal.SIGTERM]: | ||||
|         sigs = [signal.SIGINT, signal.SIGTERM] | ||||
|         if not ANYWIN: | ||||
|             sigs.append(signal.SIGUSR1) | ||||
|  | ||||
|         for sig in sigs: | ||||
|             signal.signal(sig, self.signal_handler) | ||||
|  | ||||
|         # macos hangs after shutdown on sigterm with while-sleep, | ||||
| @@ -186,18 +227,45 @@ class SvcHub(object): | ||||
|         else: | ||||
|             self.stop_thr() | ||||
|  | ||||
|     def reload(self): | ||||
|         if self.reloading: | ||||
|             return "cannot reload; already in progress" | ||||
|  | ||||
|         self.reloading = True | ||||
|         t = threading.Thread(target=self._reload) | ||||
|         t.daemon = True | ||||
|         t.start() | ||||
|         return "reload initiated" | ||||
|  | ||||
|     def _reload(self): | ||||
|         self.log("root", "reload scheduled") | ||||
|         with self.up2k.mutex: | ||||
|             self.asrv.reload() | ||||
|             self.up2k.reload() | ||||
|             self.broker.reload() | ||||
|  | ||||
|         self.reloading = False | ||||
|  | ||||
|     def stop_thr(self): | ||||
|         while not self.stop_req: | ||||
|             with self.stop_cond: | ||||
|                 self.stop_cond.wait(9001) | ||||
|  | ||||
|             if self.reload_req: | ||||
|                 self.reload_req = False | ||||
|                 self.reload() | ||||
|  | ||||
|         self.shutdown() | ||||
|  | ||||
|     def signal_handler(self, sig, frame): | ||||
|         if self.stopping: | ||||
|             return | ||||
|  | ||||
|         self.stop_req = True | ||||
|         if sig == signal.SIGUSR1: | ||||
|             self.reload_req = True | ||||
|         else: | ||||
|             self.stop_req = True | ||||
|  | ||||
|         with self.stop_cond: | ||||
|             self.stop_cond.notify_all() | ||||
|  | ||||
| @@ -205,6 +273,8 @@ class SvcHub(object): | ||||
|         if self.stopping: | ||||
|             return | ||||
|  | ||||
|         # start_log_thrs(print, 0.1, 1) | ||||
|  | ||||
|         self.stopping = True | ||||
|         self.stop_req = True | ||||
|         with self.stop_cond: | ||||
| @@ -230,8 +300,12 @@ class SvcHub(object): | ||||
|                         print("waiting for thumbsrv (10sec)...") | ||||
|  | ||||
|             print("nailed it", end="") | ||||
|             ret = 0 | ||||
|             ret = self.retcode | ||||
|         finally: | ||||
|             if self.args.wintitle: | ||||
|                 print("\033]0;\033\\", file=sys.stderr, end="") | ||||
|                 sys.stderr.flush() | ||||
|  | ||||
|             print("\033[0m") | ||||
|             if self.logf: | ||||
|                 self.logf.close() | ||||
| @@ -327,10 +401,10 @@ class SvcHub(object): | ||||
|  | ||||
|     def check_mp_enable(self): | ||||
|         if self.args.j == 1: | ||||
|             self.log("root", "multiprocessing disabled by argument -j 1;") | ||||
|             return False | ||||
|  | ||||
|         if mp.cpu_count() <= 1: | ||||
|             self.log("svchub", "only one CPU detected; multiprocessing disabled") | ||||
|             return False | ||||
|  | ||||
|         try: | ||||
| @@ -345,6 +419,7 @@ class SvcHub(object): | ||||
|             return True | ||||
|         else: | ||||
|             self.log("svchub", err) | ||||
|             self.log("svchub", "cannot efficiently use multiple CPU cores") | ||||
|             return False | ||||
|  | ||||
|     def sd_notify(self): | ||||
|   | ||||
| @@ -2,9 +2,10 @@ | ||||
| from __future__ import print_function, unicode_literals | ||||
|  | ||||
| import re | ||||
| import sys | ||||
| import socket | ||||
|  | ||||
| from .__init__ import MACOS, ANYWIN | ||||
| from .__init__ import MACOS, ANYWIN, unicode | ||||
| from .util import chkcmd | ||||
|  | ||||
|  | ||||
| @@ -21,6 +22,29 @@ class TcpSrv(object): | ||||
|  | ||||
|         self.stopping = False | ||||
|  | ||||
|         self.srv = [] | ||||
|         self.nsrv = 0 | ||||
|         ok = {} | ||||
|         for ip in self.args.i: | ||||
|             ok[ip] = [] | ||||
|             for port in self.args.p: | ||||
|                 self.nsrv += 1 | ||||
|                 try: | ||||
|                     self._listen(ip, port) | ||||
|                     ok[ip].append(port) | ||||
|                 except Exception as ex: | ||||
|                     if self.args.ign_ebind or self.args.ign_ebind_all: | ||||
|                         m = "could not listen on {}:{}: {}" | ||||
|                         self.log("tcpsrv", m.format(ip, port, ex), c=3) | ||||
|                     else: | ||||
|                         raise | ||||
|  | ||||
|         if not self.srv and not self.args.ign_ebind_all: | ||||
|             raise Exception("could not listen on any of the given interfaces") | ||||
|  | ||||
|         if self.nsrv != len(self.srv): | ||||
|             self.log("tcpsrv", "") | ||||
|  | ||||
|         ip = "127.0.0.1" | ||||
|         eps = {ip: "local only"} | ||||
|         nonlocals = [x for x in self.args.i if x != ip] | ||||
| @@ -31,20 +55,48 @@ class TcpSrv(object): | ||||
|                     eps[x] = "external" | ||||
|  | ||||
|         msgs = [] | ||||
|         title_tab = {} | ||||
|         title_vars = [x[1:] for x in self.args.wintitle.split(" ") if x.startswith("$")] | ||||
|         m = "available @ http://{}:{}/  (\033[33m{}\033[0m)" | ||||
|         for ip, desc in sorted(eps.items(), key=lambda x: x[1]): | ||||
|             for port in sorted(self.args.p): | ||||
|                 if port not in ok.get(ip, ok.get("0.0.0.0", [])): | ||||
|                     continue | ||||
|  | ||||
|                 msgs.append(m.format(ip, port, desc)) | ||||
|  | ||||
|                 if not self.args.wintitle: | ||||
|                     continue | ||||
|  | ||||
|                 if port in [80, 443]: | ||||
|                     ep = ip | ||||
|                 else: | ||||
|                     ep = "{}:{}".format(ip, port) | ||||
|  | ||||
|                 hits = [] | ||||
|                 if "pub" in title_vars and "external" in unicode(desc): | ||||
|                     hits.append(("pub", ep)) | ||||
|  | ||||
|                 if "pub" in title_vars or "all" in title_vars: | ||||
|                     hits.append(("all", ep)) | ||||
|  | ||||
|                 for var in title_vars: | ||||
|                     if var.startswith("ip-") and ep.startswith(var[3:]): | ||||
|                         hits.append((var, ep)) | ||||
|  | ||||
|                 for tk, tv in hits: | ||||
|                     try: | ||||
|                         title_tab[tk][tv] = 1 | ||||
|                     except: | ||||
|                         title_tab[tk] = {tv: 1} | ||||
|  | ||||
|         if msgs: | ||||
|             msgs[-1] += "\n" | ||||
|             for m in msgs: | ||||
|                 self.log("tcpsrv", m) | ||||
|  | ||||
|         self.srv = [] | ||||
|         for ip in self.args.i: | ||||
|             for port in self.args.p: | ||||
|                 self.srv.append(self._listen(ip, port)) | ||||
|         if self.args.wintitle: | ||||
|             self._set_wintitle(title_tab) | ||||
|  | ||||
|     def _listen(self, ip, port): | ||||
|         srv = socket.socket(socket.AF_INET, socket.SOCK_STREAM) | ||||
| @@ -52,7 +104,7 @@ class TcpSrv(object): | ||||
|         srv.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) | ||||
|         try: | ||||
|             srv.bind((ip, port)) | ||||
|             return srv | ||||
|             self.srv.append(srv) | ||||
|         except (OSError, socket.error) as ex: | ||||
|             if ex.errno in [98, 48]: | ||||
|                 e = "\033[1;31mport {} is busy on interface {}\033[0m".format(port, ip) | ||||
| @@ -211,3 +263,26 @@ class TcpSrv(object): | ||||
|                     eps[default_route] = desc | ||||
|  | ||||
|         return eps | ||||
|  | ||||
|     def _set_wintitle(self, vars): | ||||
|         vars["all"] = vars.get("all", {"Local-Only": 1}) | ||||
|         vars["pub"] = vars.get("pub", vars["all"]) | ||||
|  | ||||
|         vars2 = {} | ||||
|         for k, eps in vars.items(): | ||||
|             vars2[k] = { | ||||
|                 ep: 1 | ||||
|                 for ep in eps.keys() | ||||
|                 if ":" not in ep or ep.split(":")[0] not in eps | ||||
|             } | ||||
|  | ||||
|         title = "" | ||||
|         vars = vars2 | ||||
|         for p in self.args.wintitle.split(" "): | ||||
|             if p.startswith("$"): | ||||
|                 p = " and ".join(sorted(vars.get(p[1:], {"(None)": 1}).keys())) | ||||
|  | ||||
|             title += "{} ".format(p) | ||||
|  | ||||
|         print("\033]0;{}\033\\".format(title), file=sys.stderr, end="") | ||||
|         sys.stderr.flush() | ||||
|   | ||||
| @@ -4,28 +4,44 @@ from __future__ import print_function, unicode_literals | ||||
| import os | ||||
|  | ||||
| from .util import Cooldown | ||||
| from .th_srv import thumb_path, THUMBABLE, FMT_FF | ||||
| from .th_srv import thumb_path, THUMBABLE, FMT_FFV, FMT_FFA | ||||
| from .bos import bos | ||||
|  | ||||
|  | ||||
| class ThumbCli(object): | ||||
|     def __init__(self, broker): | ||||
|         self.broker = broker | ||||
|         self.args = broker.args | ||||
|         self.asrv = broker.asrv | ||||
|     def __init__(self, hsrv): | ||||
|         self.broker = hsrv.broker | ||||
|         self.log_func = hsrv.log | ||||
|         self.args = hsrv.args | ||||
|         self.asrv = hsrv.asrv | ||||
|  | ||||
|         # cache on both sides for less broker spam | ||||
|         self.cooldown = Cooldown(self.args.th_poke) | ||||
|  | ||||
|     def log(self, msg, c=0): | ||||
|         self.log_func("thumbcli", msg, c) | ||||
|  | ||||
|     def get(self, ptop, rem, mtime, fmt): | ||||
|         ext = rem.rsplit(".")[-1].lower() | ||||
|         if ext not in THUMBABLE: | ||||
|             return None | ||||
|  | ||||
|         is_vid = ext in FMT_FF | ||||
|         is_vid = ext in FMT_FFV | ||||
|         if is_vid and self.args.no_vthumb: | ||||
|             return None | ||||
|  | ||||
|         want_opus = fmt in ("opus", "caf") | ||||
|         is_au = ext in FMT_FFA | ||||
|         if is_au: | ||||
|             if want_opus: | ||||
|                 if self.args.no_acode: | ||||
|                     return None | ||||
|             else: | ||||
|                 if self.args.no_athumb: | ||||
|                     return None | ||||
|         elif want_opus: | ||||
|             return None | ||||
|  | ||||
|         if rem.startswith(".hist/th/") and rem.split(".")[-1] in ["webp", "jpg"]: | ||||
|             return os.path.join(ptop, rem) | ||||
|  | ||||
| @@ -33,10 +49,14 @@ class ThumbCli(object): | ||||
|             fmt = "w" | ||||
|  | ||||
|         if fmt == "w": | ||||
|             if self.args.th_no_webp or (is_vid and self.args.th_ff_jpg): | ||||
|             if self.args.th_no_webp or ((is_vid or is_au) and self.args.th_ff_jpg): | ||||
|                 fmt = "j" | ||||
|  | ||||
|         histpath = self.asrv.vfs.histtab[ptop] | ||||
|         histpath = self.asrv.vfs.histtab.get(ptop) | ||||
|         if not histpath: | ||||
|             self.log("no histpath for [{}]".format(ptop)) | ||||
|             return None | ||||
|  | ||||
|         tpath = thumb_path(histpath, rem, mtime, fmt) | ||||
|         ret = None | ||||
|         try: | ||||
| @@ -53,6 +73,11 @@ class ThumbCli(object): | ||||
|             if self.cooldown.poke(tdir): | ||||
|                 self.broker.put(False, "thumbsrv.poke", tdir) | ||||
|  | ||||
|             if want_opus: | ||||
|                 # audio files expire individually | ||||
|                 if self.cooldown.poke(tpath): | ||||
|                     self.broker.put(False, "thumbsrv.poke", tpath) | ||||
|  | ||||
|             return ret | ||||
|  | ||||
|         x = self.broker.put(True, "thumbsrv.get", ptop, rem, mtime, fmt) | ||||
|   | ||||
| @@ -10,7 +10,7 @@ import threading | ||||
| import subprocess as sp | ||||
|  | ||||
| from .__init__ import PY2, unicode | ||||
| from .util import fsenc, vsplit, runcmd, Queue, Cooldown, BytesIO, min_ex | ||||
| from .util import fsenc, vsplit, statdir, runcmd, Queue, Cooldown, BytesIO, min_ex | ||||
| from .bos import bos | ||||
| from .mtag import HAVE_FFMPEG, HAVE_FFPROBE, ffprobe | ||||
|  | ||||
| @@ -50,7 +50,8 @@ except: | ||||
| # https://pillow.readthedocs.io/en/stable/handbook/image-file-formats.html | ||||
| # ffmpeg -formats | ||||
| FMT_PIL = "bmp dib gif icns ico jpg jpeg jp2 jpx pcx png pbm pgm ppm pnm sgi tga tif tiff webp xbm dds xpm" | ||||
| FMT_FF = "av1 asf avi flv m4v mkv mjpeg mjpg mpg mpeg mpg2 mpeg2 h264 avc mts h265 hevc mov 3gp mp4 ts mpegts nut ogv ogm rm vob webm wmv" | ||||
| FMT_FFV = "av1 asf avi flv m4v mkv mjpeg mjpg mpg mpeg mpg2 mpeg2 h264 avc mts h265 hevc mov 3gp mp4 ts mpegts nut ogv ogm rm vob webm wmv" | ||||
| FMT_FFA = "aac m4a ogg opus flac alac mp3 mp2 ac3 dts wma ra wav aif aiff au alaw ulaw mulaw amr gsm ape tak tta wv" | ||||
|  | ||||
| if HAVE_HEIF: | ||||
|     FMT_PIL += " heif heifs heic heics" | ||||
| @@ -58,7 +59,9 @@ if HAVE_HEIF: | ||||
| if HAVE_AVIF: | ||||
|     FMT_PIL += " avif avifs" | ||||
|  | ||||
| FMT_PIL, FMT_FF = [{x: True for x in y.split(" ") if x} for y in [FMT_PIL, FMT_FF]] | ||||
| FMT_PIL, FMT_FFV, FMT_FFA = [ | ||||
|     {x: True for x in y.split(" ") if x} for y in [FMT_PIL, FMT_FFV, FMT_FFA] | ||||
| ] | ||||
|  | ||||
|  | ||||
| THUMBABLE = {} | ||||
| @@ -67,7 +70,8 @@ if HAVE_PIL: | ||||
|     THUMBABLE.update(FMT_PIL) | ||||
|  | ||||
| if HAVE_FFMPEG and HAVE_FFPROBE: | ||||
|     THUMBABLE.update(FMT_FF) | ||||
|     THUMBABLE.update(FMT_FFV) | ||||
|     THUMBABLE.update(FMT_FFA) | ||||
|  | ||||
|  | ||||
| def thumb_path(histpath, rem, mtime, fmt): | ||||
| @@ -86,9 +90,13 @@ def thumb_path(histpath, rem, mtime, fmt): | ||||
|     h = hashlib.sha512(fsenc(fn)).digest() | ||||
|     fn = base64.urlsafe_b64encode(h).decode("ascii")[:24] | ||||
|  | ||||
|     return "{}/th/{}/{}.{:x}.{}".format( | ||||
|         histpath, rd, fn, int(mtime), "webp" if fmt == "w" else "jpg" | ||||
|     ) | ||||
|     if fmt in ("opus", "caf"): | ||||
|         cat = "ac" | ||||
|     else: | ||||
|         fmt = "webp" if fmt == "w" else "jpg" | ||||
|         cat = "th" | ||||
|  | ||||
|     return "{}/{}/{}/{}.{:x}.{}".format(histpath, cat, rd, fn, int(mtime), fmt) | ||||
|  | ||||
|  | ||||
| class ThumbSrv(object): | ||||
| @@ -105,9 +113,7 @@ class ThumbSrv(object): | ||||
|         self.mutex = threading.Lock() | ||||
|         self.busy = {} | ||||
|         self.stopping = False | ||||
|         self.nthr = self.args.th_mt | ||||
|         if not self.nthr: | ||||
|             self.nthr = os.cpu_count() if hasattr(os, "cpu_count") else 4 | ||||
|         self.nthr = max(1, self.args.th_mt) | ||||
|  | ||||
|         self.q = Queue(self.nthr * 4) | ||||
|         for n in range(self.nthr): | ||||
| @@ -117,7 +123,8 @@ class ThumbSrv(object): | ||||
|             t.daemon = True | ||||
|             t.start() | ||||
|  | ||||
|         if not self.args.no_vthumb and (not HAVE_FFMPEG or not HAVE_FFPROBE): | ||||
|         want_ff = not self.args.no_vthumb or not self.args.no_athumb | ||||
|         if want_ff and (not HAVE_FFMPEG or not HAVE_FFPROBE): | ||||
|             missing = [] | ||||
|             if not HAVE_FFMPEG: | ||||
|                 missing.append("FFmpeg") | ||||
| @@ -125,12 +132,12 @@ class ThumbSrv(object): | ||||
|             if not HAVE_FFPROBE: | ||||
|                 missing.append("FFprobe") | ||||
|  | ||||
|             msg = "cannot create video thumbnails because some of the required programs are not available: " | ||||
|             msg = "cannot create audio/video thumbnails because some of the required programs are not available: " | ||||
|             msg += ", ".join(missing) | ||||
|             self.log(msg, c=3) | ||||
|  | ||||
|         if self.args.th_clean: | ||||
|             t = threading.Thread(target=self.cleaner, name="thumb-cleaner") | ||||
|             t = threading.Thread(target=self.cleaner, name="thumb.cln") | ||||
|             t.daemon = True | ||||
|             t.start() | ||||
|  | ||||
| @@ -147,7 +154,11 @@ class ThumbSrv(object): | ||||
|             return not self.nthr | ||||
|  | ||||
|     def get(self, ptop, rem, mtime, fmt): | ||||
|         histpath = self.asrv.vfs.histtab[ptop] | ||||
|         histpath = self.asrv.vfs.histtab.get(ptop) | ||||
|         if not histpath: | ||||
|             self.log("no histpath for [{}]".format(ptop)) | ||||
|             return None | ||||
|  | ||||
|         tpath = thumb_path(histpath, rem, mtime, fmt) | ||||
|         abspath = os.path.join(ptop, rem) | ||||
|         cond = threading.Condition(self.mutex) | ||||
| @@ -183,6 +194,7 @@ class ThumbSrv(object): | ||||
|         try: | ||||
|             st = bos.stat(tpath) | ||||
|             if st.st_size: | ||||
|                 self.poke(tpath) | ||||
|                 return tpath | ||||
|         except: | ||||
|             pass | ||||
| @@ -201,8 +213,13 @@ class ThumbSrv(object): | ||||
|             if not bos.path.exists(tpath): | ||||
|                 if ext in FMT_PIL: | ||||
|                     fun = self.conv_pil | ||||
|                 elif ext in FMT_FF: | ||||
|                 elif ext in FMT_FFV: | ||||
|                     fun = self.conv_ffmpeg | ||||
|                 elif ext in FMT_FFA: | ||||
|                     if tpath.endswith(".opus") or tpath.endswith(".caf"): | ||||
|                         fun = self.conv_opus | ||||
|                     else: | ||||
|                         fun = self.conv_spec | ||||
|  | ||||
|             if fun: | ||||
|                 try: | ||||
| @@ -328,25 +345,116 @@ class ThumbSrv(object): | ||||
|             ] | ||||
|  | ||||
|         cmd += [fsenc(tpath)] | ||||
|         # self.log((b" ".join(cmd)).decode("utf-8")) | ||||
|         self._run_ff(cmd) | ||||
|  | ||||
|         ret, sout, serr = runcmd(cmd) | ||||
|     def _run_ff(self, cmd): | ||||
|         # self.log((b" ".join(cmd)).decode("utf-8")) | ||||
|         ret, sout, serr = runcmd(cmd, timeout=self.args.th_convt) | ||||
|         if ret != 0: | ||||
|             m = "FFmpeg failed (probably a corrupt video file):\n" | ||||
|             m += "\n".join(["ff: {}".format(x) for x in serr.split("\n")]) | ||||
|             self.log(m, c="1;30") | ||||
|             raise sp.CalledProcessError(ret, (cmd[0], b"...", cmd[-1])) | ||||
|  | ||||
|     def conv_spec(self, abspath, tpath): | ||||
|         ret, _ = ffprobe(abspath) | ||||
|         if "ac" not in ret: | ||||
|             raise Exception("not audio") | ||||
|  | ||||
|         fc = "[0:a:0]aresample=48000{},showspectrumpic=s=640x512,crop=780:544:70:50[o]" | ||||
|  | ||||
|         if self.args.th_ff_swr: | ||||
|             fco = ":filter_size=128:cutoff=0.877" | ||||
|         else: | ||||
|             fco = ":resampler=soxr" | ||||
|  | ||||
|         fc = fc.format(fco) | ||||
|  | ||||
|         # fmt: off | ||||
|         cmd = [ | ||||
|             b"ffmpeg", | ||||
|             b"-nostdin", | ||||
|             b"-v", b"error", | ||||
|             b"-hide_banner", | ||||
|             b"-i", fsenc(abspath), | ||||
|             b"-filter_complex", fc.encode("utf-8"), | ||||
|             b"-map", b"[o]" | ||||
|         ] | ||||
|         # fmt: on | ||||
|  | ||||
|         if tpath.endswith(".jpg"): | ||||
|             cmd += [ | ||||
|                 b"-q:v", | ||||
|                 b"6",  # default=?? | ||||
|             ] | ||||
|         else: | ||||
|             cmd += [ | ||||
|                 b"-q:v", | ||||
|                 b"50",  # default=75 | ||||
|                 b"-compression_level:v", | ||||
|                 b"6",  # default=4, 0=fast, 6=max | ||||
|             ] | ||||
|  | ||||
|         cmd += [fsenc(tpath)] | ||||
|         self._run_ff(cmd) | ||||
|  | ||||
|     def conv_opus(self, abspath, tpath): | ||||
|         if self.args.no_acode: | ||||
|             raise Exception("disabled in server config") | ||||
|  | ||||
|         ret, _ = ffprobe(abspath) | ||||
|         if "ac" not in ret: | ||||
|             raise Exception("not audio") | ||||
|  | ||||
|         src_opus = abspath.lower().endswith(".opus") or ret["ac"][1] == "opus" | ||||
|         want_caf = tpath.endswith(".caf") | ||||
|         tmp_opus = tpath | ||||
|         if want_caf: | ||||
|             tmp_opus = tpath.rsplit(".", 1)[0] + ".opus" | ||||
|  | ||||
|         if not want_caf or (not src_opus and not bos.path.isfile(tmp_opus)): | ||||
|             # fmt: off | ||||
|             cmd = [ | ||||
|                 b"ffmpeg", | ||||
|                 b"-nostdin", | ||||
|                 b"-v", b"error", | ||||
|                 b"-hide_banner", | ||||
|                 b"-i", fsenc(abspath), | ||||
|                 b"-map_metadata", b"-1", | ||||
|                 b"-map", b"0:a:0", | ||||
|                 b"-c:a", b"libopus", | ||||
|                 b"-b:a", b"128k", | ||||
|                 fsenc(tmp_opus) | ||||
|             ] | ||||
|             # fmt: on | ||||
|             self._run_ff(cmd) | ||||
|  | ||||
|         if want_caf: | ||||
|             # fmt: off | ||||
|             cmd = [ | ||||
|                 b"ffmpeg", | ||||
|                 b"-nostdin", | ||||
|                 b"-v", b"error", | ||||
|                 b"-hide_banner", | ||||
|                 b"-i", fsenc(abspath if src_opus else tmp_opus), | ||||
|                 b"-map_metadata", b"-1", | ||||
|                 b"-map", b"0:a:0", | ||||
|                 b"-c:a", b"copy", | ||||
|                 b"-f", b"caf", | ||||
|                 fsenc(tpath) | ||||
|             ] | ||||
|             # fmt: on | ||||
|             self._run_ff(cmd) | ||||
|  | ||||
|     def poke(self, tdir): | ||||
|         if not self.poke_cd.poke(tdir): | ||||
|             return | ||||
|  | ||||
|         ts = int(time.time()) | ||||
|         try: | ||||
|             p1 = os.path.dirname(tdir) | ||||
|             p2 = os.path.dirname(p1) | ||||
|             for dp in [tdir, p1, p2]: | ||||
|                 bos.utime(dp, (ts, ts)) | ||||
|             for _ in range(4): | ||||
|                 bos.utime(tdir, (ts, ts)) | ||||
|                 tdir = os.path.dirname(tdir) | ||||
|         except: | ||||
|             pass | ||||
|  | ||||
| @@ -366,25 +474,36 @@ class ThumbSrv(object): | ||||
|             self.log("\033[Jcln ok; rm {} dirs".format(ndirs)) | ||||
|  | ||||
|     def clean(self, histpath): | ||||
|         thumbpath = os.path.join(histpath, "th") | ||||
|         ret = 0 | ||||
|         for cat in ["th", "ac"]: | ||||
|             ret += self._clean(histpath, cat, None) | ||||
|  | ||||
|         return ret | ||||
|  | ||||
|     def _clean(self, histpath, cat, thumbpath): | ||||
|         if not thumbpath: | ||||
|             thumbpath = os.path.join(histpath, cat) | ||||
|  | ||||
|         # self.log("cln {}".format(thumbpath)) | ||||
|         maxage = self.args.th_maxage | ||||
|         exts = ["jpg", "webp"] if cat == "th" else ["opus", "caf"] | ||||
|         maxage = getattr(self.args, cat + "_maxage") | ||||
|         now = time.time() | ||||
|         prev_b64 = None | ||||
|         prev_fp = None | ||||
|         try: | ||||
|             ents = bos.listdir(thumbpath) | ||||
|             ents = statdir(self.log, not self.args.no_scandir, False, thumbpath) | ||||
|             ents = sorted(list(ents)) | ||||
|         except: | ||||
|             return 0 | ||||
|  | ||||
|         ndirs = 0 | ||||
|         for f in sorted(ents): | ||||
|         for f, inf in ents: | ||||
|             fp = os.path.join(thumbpath, f) | ||||
|             cmp = fp.lower().replace("\\", "/") | ||||
|  | ||||
|             # "top" or b64 prefix/full (a folder) | ||||
|             if len(f) <= 3 or len(f) == 24: | ||||
|                 age = now - bos.path.getmtime(fp) | ||||
|                 age = now - inf.st_mtime | ||||
|                 if age > maxage: | ||||
|                     with self.mutex: | ||||
|                         safe = True | ||||
| @@ -398,16 +517,15 @@ class ThumbSrv(object): | ||||
|                             self.log("rm -rf [{}]".format(fp)) | ||||
|                             shutil.rmtree(fp, ignore_errors=True) | ||||
|                 else: | ||||
|                     ndirs += self.clean(fp) | ||||
|                     self._clean(histpath, cat, fp) | ||||
|  | ||||
|                 continue | ||||
|  | ||||
|             # thumb file | ||||
|             try: | ||||
|                 b64, ts, ext = f.split(".") | ||||
|                 if len(b64) != 24 or len(ts) != 8 or ext not in ["jpg", "webp"]: | ||||
|                 if len(b64) != 24 or len(ts) != 8 or ext not in exts: | ||||
|                     raise Exception() | ||||
|  | ||||
|                 ts = int(ts, 16) | ||||
|             except: | ||||
|                 if f != "dir.txt": | ||||
|                     self.log("foreign file in thumbs dir: [{}]".format(fp), 1) | ||||
| @@ -418,6 +536,10 @@ class ThumbSrv(object): | ||||
|                 self.log("rm replaced [{}]".format(fp)) | ||||
|                 bos.unlink(prev_fp) | ||||
|  | ||||
|             if cat != "th" and inf.st_mtime + maxage < now: | ||||
|                 self.log("rm expired [{}]".format(fp)) | ||||
|                 bos.unlink(fp) | ||||
|  | ||||
|             prev_b64 = b64 | ||||
|             prev_fp = fp | ||||
|  | ||||
|   | ||||
| @@ -6,9 +6,10 @@ import os | ||||
| import time | ||||
| import threading | ||||
| from datetime import datetime | ||||
| from operator import itemgetter | ||||
|  | ||||
| from .__init__ import ANYWIN, unicode | ||||
| from .util import absreal, s3dec, Pebkac, min_ex, gen_filekey | ||||
| from .util import absreal, s3dec, Pebkac, min_ex, gen_filekey, quotep | ||||
| from .bos import bos | ||||
| from .up2k import up2k_wark_from_hashlist | ||||
|  | ||||
| @@ -66,7 +67,11 @@ class U2idx(object): | ||||
|         if cur: | ||||
|             return cur | ||||
|  | ||||
|         histpath = self.asrv.vfs.histtab[ptop] | ||||
|         histpath = self.asrv.vfs.histtab.get(ptop) | ||||
|         if not histpath: | ||||
|             self.log("no histpath for [{}]".format(ptop)) | ||||
|             return None | ||||
|  | ||||
|         db_path = os.path.join(histpath, "up2k.db") | ||||
|         if not bos.path.exists(db_path): | ||||
|             return None | ||||
| @@ -112,7 +117,16 @@ class U2idx(object): | ||||
|             if ok: | ||||
|                 continue | ||||
|  | ||||
|             v, uq = (uq + " ").split(" ", 1) | ||||
|             if uq.startswith('"'): | ||||
|                 v, uq = uq[1:].split('"', 1) | ||||
|                 while v.endswith("\\"): | ||||
|                     v2, uq = uq.split('"', 1) | ||||
|                     v = v[:-1] + '"' + v2 | ||||
|                 uq = uq.strip() | ||||
|             else: | ||||
|                 v, uq = (uq + " ").split(" ", 1) | ||||
|                 v = v.replace('\\"', '"') | ||||
|  | ||||
|             if is_key: | ||||
|                 is_key = False | ||||
|  | ||||
| @@ -253,21 +267,23 @@ class U2idx(object): | ||||
|                 if rd.startswith("//") or fn.startswith("//"): | ||||
|                     rd, fn = s3dec(rd, fn) | ||||
|  | ||||
|                 if fk: | ||||
|                 if not fk: | ||||
|                     suf = "" | ||||
|                 else: | ||||
|                     try: | ||||
|                         ap = absreal(os.path.join(ptop, rd, fn)) | ||||
|                         inf = bos.stat(ap) | ||||
|                     except: | ||||
|                         continue | ||||
|  | ||||
|                     fn += ( | ||||
|                     suf = ( | ||||
|                         "?k=" | ||||
|                         + gen_filekey( | ||||
|                             self.args.fk_salt, ap, sz, 0 if ANYWIN else inf.st_ino | ||||
|                         )[:fk] | ||||
|                     ) | ||||
|  | ||||
|                 rp = "/".join([x for x in [vtop, rd, fn] if x]) | ||||
|                 rp = quotep("/".join([x for x in [vtop, rd, fn] if x])) + suf | ||||
|                 sret.append({"ts": int(ts), "sz": sz, "rp": rp, "w": w[:16]}) | ||||
|  | ||||
|             for hit in sret: | ||||
| @@ -290,9 +306,13 @@ class U2idx(object): | ||||
|         # undupe hits from multiple metadata keys | ||||
|         if len(ret) > 1: | ||||
|             ret = [ret[0]] + [ | ||||
|                 y for x, y in zip(ret[:-1], ret[1:]) if x["rp"] != y["rp"] | ||||
|                 y | ||||
|                 for x, y in zip(ret[:-1], ret[1:]) | ||||
|                 if x["rp"].split("?")[0] != y["rp"].split("?")[0] | ||||
|             ] | ||||
|  | ||||
|         ret.sort(key=itemgetter("rp")) | ||||
|  | ||||
|         return ret, list(taglist.keys()) | ||||
|  | ||||
|     def terminator(self, identifier, done_flag): | ||||
|   | ||||
| @@ -21,13 +21,17 @@ from .util import ( | ||||
|     Pebkac, | ||||
|     Queue, | ||||
|     ProgressPrinter, | ||||
|     SYMTIME, | ||||
|     fsdec, | ||||
|     fsenc, | ||||
|     absreal, | ||||
|     sanitize_fn, | ||||
|     ren_open, | ||||
|     atomic_move, | ||||
|     quotep, | ||||
|     vsplit, | ||||
|     w8b64enc, | ||||
|     w8b64dec, | ||||
|     s3enc, | ||||
|     s3dec, | ||||
|     rmdirs, | ||||
| @@ -60,13 +64,17 @@ class Up2k(object): | ||||
|  | ||||
|         # state | ||||
|         self.mutex = threading.Lock() | ||||
|         self.rescan_cond = threading.Condition() | ||||
|         self.hashq = Queue() | ||||
|         self.tagq = Queue() | ||||
|         self.n_hashq = 0 | ||||
|         self.n_tagq = 0 | ||||
|         self.gid = 0 | ||||
|         self.volstate = {} | ||||
|         self.need_rescan = {} | ||||
|         self.dupesched = {} | ||||
|         self.registry = {} | ||||
|         self.droppable = {} | ||||
|         self.entags = {} | ||||
|         self.flags = {} | ||||
|         self.cur = {} | ||||
| @@ -109,15 +117,21 @@ class Up2k(object): | ||||
|         t.daemon = True | ||||
|         t.start() | ||||
|  | ||||
|     def reload(self): | ||||
|         self.gid += 1 | ||||
|         self.log("reload #{} initiated".format(self.gid)) | ||||
|         all_vols = self.asrv.vfs.all_vols | ||||
|         self.rescan(all_vols, list(all_vols.keys()), True) | ||||
|  | ||||
|     def deferred_init(self): | ||||
|         all_vols = self.asrv.vfs.all_vols | ||||
|         have_e2d = self.init_indexes(all_vols) | ||||
|  | ||||
|         if have_e2d: | ||||
|             thr = threading.Thread(target=self._snapshot, name="up2k-snapshot") | ||||
|             thr.daemon = True | ||||
|             thr.start() | ||||
|         thr = threading.Thread(target=self._snapshot, name="up2k-snapshot") | ||||
|         thr.daemon = True | ||||
|         thr.start() | ||||
|  | ||||
|         if have_e2d: | ||||
|             thr = threading.Thread(target=self._hasher, name="up2k-hasher") | ||||
|             thr.daemon = True | ||||
|             thr.start() | ||||
| @@ -127,9 +141,11 @@ class Up2k(object): | ||||
|             thr.start() | ||||
|  | ||||
|             if self.mtag: | ||||
|                 thr = threading.Thread(target=self._tagger, name="up2k-tagger") | ||||
|                 thr.daemon = True | ||||
|                 thr.start() | ||||
|                 for n in range(max(1, self.args.mtag_mt)): | ||||
|                     name = "tagger-{}".format(n) | ||||
|                     thr = threading.Thread(target=self._tagger, name=name) | ||||
|                     thr.daemon = True | ||||
|                     thr.start() | ||||
|  | ||||
|                 thr = threading.Thread(target=self._run_all_mtp, name="up2k-mtp-init") | ||||
|                 thr.daemon = True | ||||
| @@ -161,15 +177,15 @@ class Up2k(object): | ||||
|         } | ||||
|         return json.dumps(ret, indent=4) | ||||
|  | ||||
|     def rescan(self, all_vols, scan_vols): | ||||
|         if hasattr(self, "pp"): | ||||
|     def rescan(self, all_vols, scan_vols, wait): | ||||
|         if not wait and hasattr(self, "pp"): | ||||
|             return "cannot initiate; scan is already in progress" | ||||
|  | ||||
|         args = (all_vols, scan_vols) | ||||
|         t = threading.Thread( | ||||
|             target=self.init_indexes, | ||||
|             args=args, | ||||
|             name="up2k-rescan-{}".format(scan_vols[0]), | ||||
|             name="up2k-rescan-{}".format(scan_vols[0] if scan_vols else "all"), | ||||
|         ) | ||||
|         t.daemon = True | ||||
|         t.start() | ||||
| @@ -177,9 +193,23 @@ class Up2k(object): | ||||
|  | ||||
|     def _sched_rescan(self): | ||||
|         volage = {} | ||||
|         cooldown = 0 | ||||
|         timeout = time.time() + 3 | ||||
|         while True: | ||||
|             time.sleep(self.args.re_int) | ||||
|             timeout = max(timeout, cooldown) | ||||
|             wait = max(0.1, timeout + 0.1 - time.time()) | ||||
|             with self.rescan_cond: | ||||
|                 self.rescan_cond.wait(wait) | ||||
|  | ||||
|             now = time.time() | ||||
|             if now < cooldown: | ||||
|                 continue | ||||
|  | ||||
|             if hasattr(self, "pp"): | ||||
|                 cooldown = now + 5 | ||||
|                 continue | ||||
|  | ||||
|             timeout = now + 9001 | ||||
|             with self.mutex: | ||||
|                 for vp, vol in sorted(self.asrv.vfs.all_vols.items()): | ||||
|                     maxage = vol.flags.get("scan") | ||||
| @@ -189,14 +219,18 @@ class Up2k(object): | ||||
|                     if vp not in volage: | ||||
|                         volage[vp] = now | ||||
|  | ||||
|                     if now - volage[vp] >= maxage: | ||||
|                     deadline = volage[vp] + maxage | ||||
|                     if deadline <= now: | ||||
|                         self.need_rescan[vp] = 1 | ||||
|  | ||||
|                     timeout = min(timeout, deadline) | ||||
|  | ||||
|                 vols = list(sorted(self.need_rescan.keys())) | ||||
|                 self.need_rescan = {} | ||||
|  | ||||
|             if vols: | ||||
|                 err = self.rescan(self.asrv.vfs.all_vols, vols) | ||||
|                 cooldown = now + 10 | ||||
|                 err = self.rescan(self.asrv.vfs.all_vols, vols, False) | ||||
|                 if err: | ||||
|                     for v in vols: | ||||
|                         self.need_rescan[v] = True | ||||
| @@ -218,8 +252,11 @@ class Up2k(object): | ||||
|                 if not cur: | ||||
|                     continue | ||||
|  | ||||
|                 lifetime = int(lifetime) | ||||
|                 timeout = min(timeout, now + lifetime) | ||||
|  | ||||
|                 nrm = 0 | ||||
|                 deadline = time.time() - int(lifetime) | ||||
|                 deadline = time.time() - lifetime | ||||
|                 q = "select rd, fn from up where at > 0 and at < ? limit 100" | ||||
|                 while True: | ||||
|                     with self.mutex: | ||||
| @@ -236,12 +273,22 @@ class Up2k(object): | ||||
|                         if vp: | ||||
|                             fvp = "{}/{}".format(vp, fvp) | ||||
|  | ||||
|                         self._handle_rm(LEELOO_DALLAS, None, fvp, True) | ||||
|                         self._handle_rm(LEELOO_DALLAS, None, fvp) | ||||
|                         nrm += 1 | ||||
|  | ||||
|                 if nrm: | ||||
|                     self.log("{} files graduated in {}".format(nrm, vp)) | ||||
|  | ||||
|                 if timeout < 10: | ||||
|                     continue | ||||
|  | ||||
|                 q = "select at from up where at > 0 order by at limit 1" | ||||
|                 with self.mutex: | ||||
|                     hits = cur.execute(q).fetchone() | ||||
|  | ||||
|                 if hits: | ||||
|                     timeout = min(timeout, now + lifetime - (now - hits[0])) | ||||
|  | ||||
|     def _vis_job_progress(self, job): | ||||
|         perc = 100 - (len(job["need"]) * 100.0 / len(job["hash"])) | ||||
|         path = os.path.join(job["ptop"], job["prel"], job["name"]) | ||||
| @@ -250,7 +297,8 @@ class Up2k(object): | ||||
|     def _vis_reg_progress(self, reg): | ||||
|         ret = [] | ||||
|         for _, job in reg.items(): | ||||
|             ret.append(self._vis_job_progress(job)) | ||||
|             if job["need"]: | ||||
|                 ret.append(self._vis_job_progress(job)) | ||||
|  | ||||
|         return ret | ||||
|  | ||||
| @@ -265,6 +313,16 @@ class Up2k(object): | ||||
|         return True, ret | ||||
|  | ||||
|     def init_indexes(self, all_vols, scan_vols=None): | ||||
|         gid = self.gid | ||||
|         while hasattr(self, "pp") and gid == self.gid: | ||||
|             time.sleep(0.1) | ||||
|  | ||||
|         if gid != self.gid: | ||||
|             return | ||||
|  | ||||
|         if gid: | ||||
|             self.log("reload #{} running".format(self.gid)) | ||||
|  | ||||
|         self.pp = ProgressPrinter() | ||||
|         vols = all_vols.values() | ||||
|         t0 = time.time() | ||||
| @@ -395,7 +453,11 @@ class Up2k(object): | ||||
|         return have_e2d | ||||
|  | ||||
|     def register_vpath(self, ptop, flags): | ||||
|         histpath = self.asrv.vfs.histtab[ptop] | ||||
|         histpath = self.asrv.vfs.histtab.get(ptop) | ||||
|         if not histpath: | ||||
|             self.log("no histpath for [{}]".format(ptop)) | ||||
|             return None | ||||
|  | ||||
|         db_path = os.path.join(histpath, "up2k.db") | ||||
|         if ptop in self.registry: | ||||
|             try: | ||||
| @@ -424,26 +486,41 @@ class Up2k(object): | ||||
|             self.log("/{} {}".format(vpath, " ".join(sorted(a))), "35") | ||||
|  | ||||
|         reg = {} | ||||
|         drp = None | ||||
|         path = os.path.join(histpath, "up2k.snap") | ||||
|         if "e2d" in flags and bos.path.exists(path): | ||||
|         if bos.path.exists(path): | ||||
|             with gzip.GzipFile(path, "rb") as f: | ||||
|                 j = f.read().decode("utf-8") | ||||
|  | ||||
|             reg2 = json.loads(j) | ||||
|             try: | ||||
|                 drp = reg2["droppable"] | ||||
|                 reg2 = reg2["registry"] | ||||
|             except: | ||||
|                 pass | ||||
|  | ||||
|             for k, job in reg2.items(): | ||||
|                 path = os.path.join(job["ptop"], job["prel"], job["name"]) | ||||
|                 if bos.path.exists(path): | ||||
|                     reg[k] = job | ||||
|                     job["poke"] = time.time() | ||||
|                     job["busy"] = {} | ||||
|                 else: | ||||
|                     self.log("ign deleted file in snap: [{}]".format(path)) | ||||
|  | ||||
|             m = "loaded snap {} |{}|".format(path, len(reg.keys())) | ||||
|             if drp is None: | ||||
|                 drp = [k for k, v in reg.items() if not v.get("need", [])] | ||||
|             else: | ||||
|                 drp = [x for x in drp if x in reg] | ||||
|  | ||||
|             m = "loaded snap {} |{}| ({})".format(path, len(reg.keys()), len(drp or [])) | ||||
|             m = [m] + self._vis_reg_progress(reg) | ||||
|             self.log("\n".join(m)) | ||||
|  | ||||
|         self.flags[ptop] = flags | ||||
|         self.registry[ptop] = reg | ||||
|         self.droppable[ptop] = drp or [] | ||||
|         self.regdrop(ptop, None) | ||||
|         if not HAVE_SQLITE3 or "e2d" not in flags or "d2d" in flags: | ||||
|             return None | ||||
|  | ||||
| @@ -462,7 +539,8 @@ class Up2k(object): | ||||
|     def _build_file_index(self, vol, all_vols): | ||||
|         do_vac = False | ||||
|         top = vol.realpath | ||||
|         nohash = "dhash" in vol.flags | ||||
|         rei = vol.flags.get("noidx") | ||||
|         reh = vol.flags.get("nohash") | ||||
|         with self.mutex: | ||||
|             cur, _ = self.register_vpath(top, vol.flags) | ||||
|  | ||||
| @@ -477,38 +555,55 @@ class Up2k(object): | ||||
|             if WINDOWS: | ||||
|                 excl = [x.replace("/", "\\") for x in excl] | ||||
|  | ||||
|             n_add = self._build_dir(dbw, top, set(excl), top, nohash, []) | ||||
|             n_rm = self._drop_lost(dbw[0], top) | ||||
|             n_add = n_rm = 0 | ||||
|             try: | ||||
|                 n_add = self._build_dir(dbw, top, set(excl), top, rei, reh, []) | ||||
|                 n_rm = self._drop_lost(dbw[0], top) | ||||
|             except: | ||||
|                 m = "failed to index volume [{}]:\n{}" | ||||
|                 self.log(m.format(top, min_ex()), c=1) | ||||
|  | ||||
|             if dbw[1]: | ||||
|                 self.log("commit {} new files".format(dbw[1])) | ||||
|                 dbw[0].connection.commit() | ||||
|  | ||||
|             dbw[0].connection.commit() | ||||
|  | ||||
|             return True, n_add or n_rm or do_vac | ||||
|  | ||||
|     def _build_dir(self, dbw, top, excl, cdir, nohash, seen): | ||||
|     def _build_dir(self, dbw, top, excl, cdir, rei, reh, seen): | ||||
|         rcdir = absreal(cdir)  # a bit expensive but worth | ||||
|         if rcdir in seen: | ||||
|             m = "bailing from symlink loop,\n  prev: {}\n  curr: {}\n  from: {}" | ||||
|             self.log(m.format(seen[-1], rcdir, cdir), 3) | ||||
|             return 0 | ||||
|  | ||||
|         seen = seen + [cdir] | ||||
|         seen = seen + [rcdir] | ||||
|         self.pp.msg = "a{} {}".format(self.pp.n, cdir) | ||||
|         histpath = self.asrv.vfs.histtab[top] | ||||
|         ret = 0 | ||||
|         seen_files = {} | ||||
|         g = statdir(self.log_func, not self.args.no_scandir, False, cdir) | ||||
|         for iname, inf in sorted(g): | ||||
|             abspath = os.path.join(cdir, iname) | ||||
|             if rei and rei.search(abspath): | ||||
|                 continue | ||||
|  | ||||
|             nohash = reh.search(abspath) if reh else False | ||||
|             lmod = int(inf.st_mtime) | ||||
|             sz = inf.st_size | ||||
|             if stat.S_ISDIR(inf.st_mode): | ||||
|                 if abspath in excl or abspath == histpath: | ||||
|                     continue | ||||
|                 # self.log(" dir: {}".format(abspath)) | ||||
|                 ret += self._build_dir(dbw, top, excl, abspath, nohash, seen) | ||||
|                 try: | ||||
|                     ret += self._build_dir(dbw, top, excl, abspath, rei, reh, seen) | ||||
|                 except: | ||||
|                     m = "failed to index subdir [{}]:\n{}" | ||||
|                     self.log(m.format(abspath, min_ex()), c=1) | ||||
|             else: | ||||
|                 # self.log("file: {}".format(abspath)) | ||||
|                 rp = abspath[len(top) + 1 :] | ||||
|                 seen_files[iname] = 1 | ||||
|                 rp = abspath[len(top) :].lstrip("/") | ||||
|                 if WINDOWS: | ||||
|                     rp = rp.replace("\\", "/").strip("/") | ||||
|  | ||||
| @@ -566,34 +661,65 @@ class Up2k(object): | ||||
|                     dbw[0].connection.commit() | ||||
|                     dbw[1] = 0 | ||||
|                     dbw[2] = time.time() | ||||
|  | ||||
|         # drop missing files | ||||
|         rd = cdir[len(top) + 1 :].strip("/") | ||||
|         if WINDOWS: | ||||
|             rd = rd.replace("\\", "/").strip("/") | ||||
|  | ||||
|         q = "select fn from up where rd = ?" | ||||
|         try: | ||||
|             c = dbw[0].execute(q, (rd,)) | ||||
|         except: | ||||
|             c = dbw[0].execute(q, ("//" + w8b64enc(rd),)) | ||||
|  | ||||
|         hits = [w8b64dec(x[2:]) if x.startswith("//") else x for (x,) in c] | ||||
|         rm_files = [x for x in hits if x not in seen_files] | ||||
|         n_rm = len(rm_files) | ||||
|         for fn in rm_files: | ||||
|             self.db_rm(dbw[0], rd, fn) | ||||
|  | ||||
|         if n_rm: | ||||
|             self.log("forgot {} deleted files".format(n_rm)) | ||||
|  | ||||
|         return ret | ||||
|  | ||||
|     def _drop_lost(self, cur, top): | ||||
|         rm = [] | ||||
|         n_rm = 0 | ||||
|         nchecked = 0 | ||||
|         nfiles = next(cur.execute("select count(w) from up"))[0] | ||||
|         c = cur.execute("select rd, fn from up") | ||||
|         for drd, dfn in c: | ||||
|         # `_build_dir` did all the files, now do dirs | ||||
|         ndirs = next(cur.execute("select count(distinct rd) from up"))[0] | ||||
|         c = cur.execute("select distinct rd from up order by rd desc") | ||||
|         for (drd,) in c: | ||||
|             nchecked += 1 | ||||
|             if drd.startswith("//") or dfn.startswith("//"): | ||||
|                 drd, dfn = s3dec(drd, dfn) | ||||
|             if drd.startswith("//"): | ||||
|                 rd = w8b64dec(drd[2:]) | ||||
|             else: | ||||
|                 rd = drd | ||||
|  | ||||
|             abspath = os.path.join(top, drd, dfn) | ||||
|             # almost zero overhead dw | ||||
|             self.pp.msg = "b{} {}".format(nfiles - nchecked, abspath) | ||||
|             abspath = os.path.join(top, rd) | ||||
|             self.pp.msg = "b{} {}".format(ndirs - nchecked, abspath) | ||||
|             try: | ||||
|                 if not bos.path.exists(abspath): | ||||
|                     rm.append([drd, dfn]) | ||||
|             except Exception as ex: | ||||
|                 self.log("stat-rm: {} @ [{}]".format(repr(ex), abspath)) | ||||
|                 if os.path.isdir(abspath): | ||||
|                     continue | ||||
|             except: | ||||
|                 pass | ||||
|  | ||||
|         if rm: | ||||
|             self.log("forgetting {} deleted files".format(len(rm))) | ||||
|             for rd, fn in rm: | ||||
|                 # self.log("{} / {}".format(rd, fn)) | ||||
|                 self.db_rm(cur, rd, fn) | ||||
|             rm.append(drd) | ||||
|  | ||||
|         return len(rm) | ||||
|         if not rm: | ||||
|             return 0 | ||||
|  | ||||
|         q = "select count(w) from up where rd = ?" | ||||
|         for rd in rm: | ||||
|             n_rm += next(cur.execute(q, (rd,)))[0] | ||||
|  | ||||
|         self.log("forgetting {} deleted dirs, {} files".format(len(rm), n_rm)) | ||||
|         for rd in rm: | ||||
|             cur.execute("delete from up where rd = ?", (rd,)) | ||||
|  | ||||
|         return n_rm | ||||
|  | ||||
|     def _build_tags_index(self, vol): | ||||
|         ptop = vol.realpath | ||||
| @@ -647,7 +773,7 @@ class Up2k(object): | ||||
|                 return n_add, n_rm, False | ||||
|  | ||||
|             mpool = False | ||||
|             if self.mtag.prefer_mt and not self.args.no_mtag_mt: | ||||
|             if self.mtag.prefer_mt and self.args.mtag_mt > 1: | ||||
|                 mpool = self._start_mpool() | ||||
|  | ||||
|             conn = sqlite3.connect(db_path, timeout=15) | ||||
| @@ -714,10 +840,11 @@ class Up2k(object): | ||||
|         return ret | ||||
|  | ||||
|     def _run_all_mtp(self): | ||||
|         gid = self.gid | ||||
|         t0 = time.time() | ||||
|         for ptop, flags in self.flags.items(): | ||||
|             if "mtp" in flags: | ||||
|                 self._run_one_mtp(ptop) | ||||
|                 self._run_one_mtp(ptop, gid) | ||||
|  | ||||
|         td = time.time() - t0 | ||||
|         msg = "mtp finished in {:.2f} sec ({})" | ||||
| @@ -728,7 +855,10 @@ class Up2k(object): | ||||
|             if "OFFLINE" not in self.volstate[k]: | ||||
|                 self.volstate[k] = "online, idle" | ||||
|  | ||||
|     def _run_one_mtp(self, ptop): | ||||
|     def _run_one_mtp(self, ptop, gid): | ||||
|         if gid != self.gid: | ||||
|             return | ||||
|  | ||||
|         entags = self.entags[ptop] | ||||
|  | ||||
|         parsers = {} | ||||
| @@ -761,6 +891,9 @@ class Up2k(object): | ||||
|         in_progress = {} | ||||
|         while True: | ||||
|             with self.mutex: | ||||
|                 if gid != self.gid: | ||||
|                     break | ||||
|  | ||||
|                 q = "select w from mt where k = 't:mtp' limit ?" | ||||
|                 warks = cur.execute(q, (batch_sz,)).fetchall() | ||||
|                 warks = [x[0] for x in warks] | ||||
| @@ -880,9 +1013,7 @@ class Up2k(object): | ||||
|     def _start_mpool(self): | ||||
|         # mp.pool.ThreadPool and concurrent.futures.ThreadPoolExecutor | ||||
|         # both do crazy runahead so lets reinvent another wheel | ||||
|         nw = os.cpu_count() if hasattr(os, "cpu_count") else 4 | ||||
|         if self.args.no_mtag_mt: | ||||
|             nw = 1 | ||||
|         nw = max(1, self.args.mtag_mt) | ||||
|  | ||||
|         if self.pending_tags is None: | ||||
|             self.log("using {}x {}".format(nw, self.mtag.backend)) | ||||
| @@ -940,7 +1071,15 @@ class Up2k(object): | ||||
|  | ||||
|     def _tag_file(self, write_cur, entags, wark, abspath, tags=None): | ||||
|         if tags is None: | ||||
|             tags = self.mtag.get(abspath) | ||||
|             try: | ||||
|                 tags = self.mtag.get(abspath) | ||||
|             except Exception as ex: | ||||
|                 msg = "failed to read tags from {}:\n{}" | ||||
|                 self.log(msg.format(abspath, ex), c=3) | ||||
|                 return 0 | ||||
|  | ||||
|         if not bos.path.isfile(abspath): | ||||
|             return 0 | ||||
|  | ||||
|         if entags: | ||||
|             tags = {k: v for k, v in tags.items() if k in entags} | ||||
| @@ -1112,9 +1251,18 @@ class Up2k(object): | ||||
|                     if dp_dir.startswith("//") or dp_fn.startswith("//"): | ||||
|                         dp_dir, dp_fn = s3dec(dp_dir, dp_fn) | ||||
|  | ||||
|                     if job and (dp_dir != cj["prel"] or dp_fn != cj["name"]): | ||||
|                         continue | ||||
|  | ||||
|                     dp_abs = "/".join([cj["ptop"], dp_dir, dp_fn]) | ||||
|                     # relying on path.exists to return false on broken symlinks | ||||
|                     if bos.path.exists(dp_abs): | ||||
|                     # relying on this to fail on broken symlinks | ||||
|                     try: | ||||
|                         sz = bos.path.getsize(dp_abs) | ||||
|                     except: | ||||
|                         sz = 0 | ||||
|  | ||||
|                     if sz: | ||||
|                         # self.log("--- " + wark + "  " + dp_abs + " found file", 4) | ||||
|                         job = { | ||||
|                             "name": dp_fn, | ||||
|                             "prel": dp_dir, | ||||
| @@ -1126,10 +1274,11 @@ class Up2k(object): | ||||
|                             "at": at, | ||||
|                             "hash": [], | ||||
|                             "need": [], | ||||
|                             "busy": {}, | ||||
|                         } | ||||
|                         break | ||||
|  | ||||
|                 if job and wark in reg: | ||||
|                     # self.log("pop " + wark + "  " + job["name"] + " handle_json db", 4) | ||||
|                     del reg[wark] | ||||
|  | ||||
|             if job or wark in reg: | ||||
| @@ -1157,11 +1306,23 @@ class Up2k(object): | ||||
|                     if job["need"]: | ||||
|                         self.log("unfinished:\n  {0}\n  {1}".format(src, dst)) | ||||
|                         err = "partial upload exists at a different location; please resume uploading here instead:\n" | ||||
|                         err += "/" + vsrc + " " | ||||
|                         err += "/" + quotep(vsrc) + " " | ||||
|  | ||||
|                         # registry is size-constrained + can only contain one unique wark; | ||||
|                         # let want_recheck trigger symlink (if still in reg) or reupload | ||||
|                         if cur: | ||||
|                             dupe = [cj["prel"], cj["name"], cj["lmod"]] | ||||
|                             try: | ||||
|                                 self.dupesched[src].append(dupe) | ||||
|                             except: | ||||
|                                 self.dupesched[src] = [dupe] | ||||
|  | ||||
|                         raise Pebkac(400, err) | ||||
|  | ||||
|                     elif "nodupe" in self.flags[job["ptop"]]: | ||||
|                         self.log("dupe-reject:\n  {0}\n  {1}".format(src, dst)) | ||||
|                         err = "upload rejected, file already exists:\n/" + vsrc + " " | ||||
|                         err = "upload rejected, file already exists:\n" | ||||
|                         err += "/" + quotep(vsrc) + " " | ||||
|                         raise Pebkac(400, err) | ||||
|                     else: | ||||
|                         # symlink to the client-provided name, | ||||
| @@ -1175,7 +1336,7 @@ class Up2k(object): | ||||
|                         dst = os.path.join(job["ptop"], job["prel"], job["name"]) | ||||
|                         if not self.args.nw: | ||||
|                             bos.unlink(dst)  # TODO ed pls | ||||
|                             self._symlink(src, dst) | ||||
|                             self._symlink(src, dst, lmod=cj["lmod"]) | ||||
|  | ||||
|                         if cur: | ||||
|                             a = [cj[x] for x in "prel name lmod size addr".split()] | ||||
| @@ -1199,6 +1360,7 @@ class Up2k(object): | ||||
|                     "t0": now, | ||||
|                     "hash": deepcopy(cj["hash"]), | ||||
|                     "need": [], | ||||
|                     "busy": {}, | ||||
|                 } | ||||
|                 # client-provided, sanitized by _get_wark: name, size, lmod | ||||
|                 for k in [ | ||||
| @@ -1242,18 +1404,22 @@ class Up2k(object): | ||||
|  | ||||
|         # TODO broker which avoid this race and | ||||
|         # provides a new filename if taken (same as bup) | ||||
|         suffix = ".{:.6f}-{}".format(ts, ip) | ||||
|         suffix = "-{:.6f}-{}".format(ts, ip.replace(":", ".")) | ||||
|         with ren_open(fname, "wb", fdir=fdir, suffix=suffix) as f: | ||||
|             return f["orz"][1] | ||||
|  | ||||
|     def _symlink(self, src, dst, verbose=True): | ||||
|     def _symlink(self, src, dst, verbose=True, lmod=None): | ||||
|         if verbose: | ||||
|             self.log("linking dupe:\n  {0}\n  {1}".format(src, dst)) | ||||
|  | ||||
|         if self.args.nw: | ||||
|             return | ||||
|  | ||||
|         linked = False | ||||
|         try: | ||||
|             if self.args.no_symlink: | ||||
|                 raise Exception("disabled in config") | ||||
|  | ||||
|             lsrc = src | ||||
|             ldst = dst | ||||
|             fs1 = bos.stat(os.path.dirname(src)).st_dev | ||||
| @@ -1280,10 +1446,18 @@ class Up2k(object): | ||||
|                     hops = len(ndst[nc:]) - 1 | ||||
|                     lsrc = "../" * hops + "/".join(lsrc) | ||||
|             os.symlink(fsenc(lsrc), fsenc(ldst)) | ||||
|             linked = True | ||||
|         except Exception as ex: | ||||
|             self.log("cannot symlink; creating copy: " + repr(ex)) | ||||
|             shutil.copy2(fsenc(src), fsenc(dst)) | ||||
|  | ||||
|         if lmod and (not linked or SYMTIME): | ||||
|             times = (int(time.time()), int(lmod)) | ||||
|             if ANYWIN: | ||||
|                 self.lastmod_q.put([dst, 0, times]) | ||||
|             else: | ||||
|                 bos.utime(dst, times, False) | ||||
|  | ||||
|     def handle_chunk(self, ptop, wark, chash): | ||||
|         with self.mutex: | ||||
|             job = self.registry[ptop].get(wark) | ||||
| @@ -1302,6 +1476,14 @@ class Up2k(object): | ||||
|             if not nchunk: | ||||
|                 raise Pebkac(400, "unknown chunk") | ||||
|  | ||||
|             if chash in job["busy"]: | ||||
|                 nh = len(job["hash"]) | ||||
|                 idx = job["hash"].index(chash) | ||||
|                 m = "that chunk is already being written to:\n  {}\n  {} {}/{}\n  {}" | ||||
|                 raise Pebkac(400, m.format(wark, chash, idx, nh, job["name"])) | ||||
|  | ||||
|             job["busy"][chash] = 1 | ||||
|  | ||||
|         job["poke"] = time.time() | ||||
|  | ||||
|         chunksize = up2k_chunksize(job["size"]) | ||||
| @@ -1311,6 +1493,14 @@ class Up2k(object): | ||||
|  | ||||
|         return [chunksize, ofs, path, job["lmod"]] | ||||
|  | ||||
|     def release_chunk(self, ptop, wark, chash): | ||||
|         with self.mutex: | ||||
|             job = self.registry[ptop].get(wark) | ||||
|             if job: | ||||
|                 job["busy"].pop(chash, None) | ||||
|  | ||||
|         return [True] | ||||
|  | ||||
|     def confirm_chunk(self, ptop, wark, chash): | ||||
|         with self.mutex: | ||||
|             try: | ||||
| @@ -1321,6 +1511,8 @@ class Up2k(object): | ||||
|             except Exception as ex: | ||||
|                 return "confirm_chunk, wark, " + repr(ex) | ||||
|  | ||||
|             job["busy"].pop(chash, None) | ||||
|  | ||||
|             try: | ||||
|                 job["need"].remove(chash) | ||||
|             except Exception as ex: | ||||
| @@ -1331,23 +1523,75 @@ class Up2k(object): | ||||
|                 return ret, src | ||||
|  | ||||
|             if self.args.nw: | ||||
|                 # del self.registry[ptop][wark] | ||||
|                 self.regdrop(ptop, wark) | ||||
|                 return ret, dst | ||||
|  | ||||
|             atomic_move(src, dst) | ||||
|  | ||||
|             if ANYWIN: | ||||
|                 a = [dst, job["size"], (int(time.time()), int(job["lmod"]))] | ||||
|                 self.lastmod_q.put(a) | ||||
|  | ||||
|             a = [job[x] for x in "ptop wark prel name lmod size addr".split()] | ||||
|             a += [job.get("at") or time.time()] | ||||
|             if self.idx_wark(*a): | ||||
|                 del self.registry[ptop][wark] | ||||
|                 # in-memory registry is reserved for unfinished uploads | ||||
|             # windows cant rename open files | ||||
|             if not ANYWIN or src == dst: | ||||
|                 self._finish_upload(ptop, wark) | ||||
|  | ||||
|         return ret, dst | ||||
|  | ||||
|     def finish_upload(self, ptop, wark): | ||||
|         with self.mutex: | ||||
|             self._finish_upload(ptop, wark) | ||||
|  | ||||
|     def _finish_upload(self, ptop, wark): | ||||
|         try: | ||||
|             job = self.registry[ptop][wark] | ||||
|             pdir = os.path.join(job["ptop"], job["prel"]) | ||||
|             src = os.path.join(pdir, job["tnam"]) | ||||
|             dst = os.path.join(pdir, job["name"]) | ||||
|         except Exception as ex: | ||||
|             return "finish_upload, wark, " + repr(ex) | ||||
|  | ||||
|         # self.log("--- " + wark + "  " + dst + " finish_upload atomic " + dst, 4) | ||||
|         atomic_move(src, dst) | ||||
|  | ||||
|         if ANYWIN: | ||||
|             a = [dst, job["size"], (int(time.time()), int(job["lmod"]))] | ||||
|             self.lastmod_q.put(a) | ||||
|  | ||||
|         a = [job[x] for x in "ptop wark prel name lmod size addr".split()] | ||||
|         a += [job.get("at") or time.time()] | ||||
|         if self.idx_wark(*a): | ||||
|             del self.registry[ptop][wark] | ||||
|         else: | ||||
|             self.regdrop(ptop, wark) | ||||
|  | ||||
|         dupes = self.dupesched.pop(dst, []) | ||||
|         if not dupes: | ||||
|             return | ||||
|  | ||||
|         cur = self.cur.get(ptop) | ||||
|         for rd, fn, lmod in dupes: | ||||
|             d2 = os.path.join(ptop, rd, fn) | ||||
|             if os.path.exists(d2): | ||||
|                 continue | ||||
|  | ||||
|             self._symlink(dst, d2, lmod=lmod) | ||||
|             if cur: | ||||
|                 self.db_rm(cur, rd, fn) | ||||
|                 self.db_add(cur, wark, rd, fn, *a[-4:]) | ||||
|  | ||||
|         if cur: | ||||
|             cur.connection.commit() | ||||
|  | ||||
|     def regdrop(self, ptop, wark): | ||||
|         t = self.droppable[ptop] | ||||
|         if wark: | ||||
|             t.append(wark) | ||||
|  | ||||
|         if len(t) <= self.args.reg_cap: | ||||
|             return | ||||
|  | ||||
|         n = len(t) - int(self.args.reg_cap / 2) | ||||
|         m = "up2k-registry [{}] has {} droppables; discarding {}" | ||||
|         self.log(m.format(ptop, len(t), n)) | ||||
|         for k in t[:n]: | ||||
|             self.registry[ptop].pop(k, None) | ||||
|         self.droppable[ptop] = t[n:] | ||||
|  | ||||
|     def idx_wark(self, ptop, wark, rd, fn, lmod, sz, ip, at): | ||||
|         cur = self.cur.get(ptop) | ||||
|         if not cur: | ||||
| @@ -1385,7 +1629,7 @@ class Up2k(object): | ||||
|         ok = {} | ||||
|         ng = {} | ||||
|         for vp in vpaths: | ||||
|             a, b, c = self._handle_rm(uname, ip, vp, False) | ||||
|             a, b, c = self._handle_rm(uname, ip, vp) | ||||
|             n_files += a | ||||
|             for k in b: | ||||
|                 ok[k] = 1 | ||||
| @@ -1398,10 +1642,11 @@ class Up2k(object): | ||||
|  | ||||
|         return "deleted {} files (and {}/{} folders)".format(n_files, ok, ok + ng) | ||||
|  | ||||
|     def _handle_rm(self, uname, ip, vpath, rm_topdir): | ||||
|     def _handle_rm(self, uname, ip, vpath): | ||||
|         try: | ||||
|             permsets = [[True, False, False, True]] | ||||
|             vn, rem = self.asrv.vfs.get(vpath, uname, *permsets[0]) | ||||
|             vn, rem = vn.get_dbv(rem) | ||||
|             unpost = False | ||||
|         except: | ||||
|             # unpost with missing permissions? try read+write and verify with db | ||||
| @@ -1411,6 +1656,7 @@ class Up2k(object): | ||||
|             unpost = True | ||||
|             permsets = [[True, True]] | ||||
|             vn, rem = self.asrv.vfs.get(vpath, uname, *permsets[0]) | ||||
|             vn, rem = vn.get_dbv(rem) | ||||
|             _, _, _, _, dip, dat = self._find_from_vpath(vn.realpath, rem) | ||||
|  | ||||
|             m = "you cannot delete this: " | ||||
| @@ -1467,7 +1713,7 @@ class Up2k(object): | ||||
|  | ||||
|                 bos.unlink(abspath) | ||||
|  | ||||
|         rm = rmdirs(self.log_func, scandir, True, atop, 1 if rm_topdir else 0) | ||||
|         rm = rmdirs(self.log_func, scandir, True, atop, 1) | ||||
|         return n_files, rm[0], rm[1] | ||||
|  | ||||
|     def handle_mv(self, uname, svp, dvp): | ||||
| @@ -1540,12 +1786,16 @@ class Up2k(object): | ||||
|             dlabs = absreal(sabs) | ||||
|             m = "moving symlink from [{}] to [{}], target [{}]" | ||||
|             self.log(m.format(sabs, dabs, dlabs)) | ||||
|             os.unlink(sabs) | ||||
|             self._symlink(dlabs, dabs, False) | ||||
|             mt = bos.path.getmtime(sabs, False) | ||||
|             bos.unlink(sabs) | ||||
|             self._symlink(dlabs, dabs, False, lmod=mt) | ||||
|  | ||||
|             # folders are too scary, schedule rescan of both vols | ||||
|             self.need_rescan[svn.vpath] = 1 | ||||
|             self.need_rescan[dvn.vpath] = 1 | ||||
|             with self.rescan_cond: | ||||
|                 self.rescan_cond.notify_all() | ||||
|  | ||||
|             return "k" | ||||
|  | ||||
|         c1, w, ftime, fsize, ip, at = self._find_from_vpath(svn.realpath, srem) | ||||
| @@ -1623,7 +1873,7 @@ class Up2k(object): | ||||
|                 wark = [ | ||||
|                     x | ||||
|                     for x, y in reg.items() | ||||
|                     if fn in [y["name"], y.get("tnam")] and y["prel"] == vrem | ||||
|                     if sfn in [y["name"], y.get("tnam")] and y["prel"] == vrem | ||||
|                 ] | ||||
|  | ||||
|             if wark and wark in reg: | ||||
| @@ -1668,25 +1918,30 @@ class Up2k(object): | ||||
|             slabs = list(sorted(links.keys()))[0] | ||||
|             ptop, rem = links.pop(slabs) | ||||
|             self.log("linkswap [{}] and [{}]".format(sabs, slabs)) | ||||
|             mt = bos.path.getmtime(slabs, False) | ||||
|             bos.unlink(slabs) | ||||
|             bos.rename(sabs, slabs) | ||||
|             bos.utime(slabs, (int(time.time()), int(mt)), False) | ||||
|             self._symlink(slabs, sabs, False) | ||||
|             full[slabs] = [ptop, rem] | ||||
|             sabs = slabs | ||||
|  | ||||
|         if not dabs: | ||||
|             dabs = list(sorted(full.keys()))[0] | ||||
|  | ||||
|         for alink in links.keys(): | ||||
|             lmod = None | ||||
|             try: | ||||
|                 if alink != sabs and absreal(alink) != sabs: | ||||
|                     continue | ||||
|  | ||||
|                 self.log("relinking [{}] to [{}]".format(alink, dabs)) | ||||
|                 lmod = bos.path.getmtime(alink, False) | ||||
|                 bos.unlink(alink) | ||||
|             except: | ||||
|                 pass | ||||
|  | ||||
|             self._symlink(dabs, alink, False) | ||||
|             self._symlink(dabs, alink, False, lmod=lmod) | ||||
|  | ||||
|         return len(full) + len(links) | ||||
|  | ||||
| @@ -1706,7 +1961,13 @@ class Up2k(object): | ||||
|         except: | ||||
|             cj["lmod"] = int(time.time()) | ||||
|  | ||||
|         wark = up2k_wark_from_hashlist(self.salt, cj["size"], cj["hash"]) | ||||
|         if cj["hash"]: | ||||
|             wark = up2k_wark_from_hashlist(self.salt, cj["size"], cj["hash"]) | ||||
|         else: | ||||
|             wark = up2k_wark_from_metadata( | ||||
|                 self.salt, cj["size"], cj["lmod"], cj["prel"], cj["name"] | ||||
|             ) | ||||
|  | ||||
|         return wark | ||||
|  | ||||
|     def _hashlist_from_file(self, path): | ||||
| @@ -1749,9 +2010,12 @@ class Up2k(object): | ||||
|  | ||||
|         if self.args.nw: | ||||
|             job["tnam"] = tnam | ||||
|             if not job["hash"]: | ||||
|                 del self.registry[job["ptop"]][job["wark"]] | ||||
|             return | ||||
|  | ||||
|         suffix = ".{:.6f}-{}".format(job["t0"], job["addr"]) | ||||
|         dip = job["addr"].replace(":", ".") | ||||
|         suffix = "-{:.6f}-{}".format(job["t0"], dip) | ||||
|         with ren_open(tnam, "wb", fdir=pdir, suffix=suffix) as f: | ||||
|             f, job["tnam"] = f["orz"] | ||||
|             if ( | ||||
| @@ -1765,8 +2029,12 @@ class Up2k(object): | ||||
|                 except: | ||||
|                     self.log("could not sparse [{}]".format(fp), 3) | ||||
|  | ||||
|             f.seek(job["size"] - 1) | ||||
|             f.write(b"e") | ||||
|             if job["hash"]: | ||||
|                 f.seek(job["size"] - 1) | ||||
|                 f.write(b"e") | ||||
|  | ||||
|         if not job["hash"]: | ||||
|             self._finish_upload(job["ptop"], job["wark"]) | ||||
|  | ||||
|     def _lastmodder(self): | ||||
|         while True: | ||||
| @@ -1779,7 +2047,7 @@ class Up2k(object): | ||||
|             for path, sz, times in ready: | ||||
|                 self.log("lmod: setting times {} on {}".format(times, path)) | ||||
|                 try: | ||||
|                     bos.utime(path, times) | ||||
|                     bos.utime(path, times, False) | ||||
|                 except: | ||||
|                     self.log("lmod: failed to utime ({}, {})".format(path, times)) | ||||
|  | ||||
| @@ -1795,7 +2063,8 @@ class Up2k(object): | ||||
|         self.snap_prev = {} | ||||
|         while True: | ||||
|             time.sleep(self.snap_persist_interval) | ||||
|             self.do_snapshot() | ||||
|             if not hasattr(self, "pp"): | ||||
|                 self.do_snapshot() | ||||
|  | ||||
|     def do_snapshot(self): | ||||
|         with self.mutex: | ||||
| @@ -1804,7 +2073,10 @@ class Up2k(object): | ||||
|  | ||||
|     def _snap_reg(self, ptop, reg): | ||||
|         now = time.time() | ||||
|         histpath = self.asrv.vfs.histtab[ptop] | ||||
|         histpath = self.asrv.vfs.histtab.get(ptop) | ||||
|         if not histpath: | ||||
|             return | ||||
|  | ||||
|         rm = [x for x in reg.values() if now - x["poke"] > self.snap_discard_interval] | ||||
|         if rm: | ||||
|             m = "dropping {} abandoned uploads in {}".format(len(rm), ptop) | ||||
| @@ -1841,7 +2113,8 @@ class Up2k(object): | ||||
|         bos.makedirs(histpath) | ||||
|  | ||||
|         path2 = "{}.{}".format(path, os.getpid()) | ||||
|         j = json.dumps(reg, indent=2, sort_keys=True).encode("utf-8") | ||||
|         body = {"droppable": self.droppable[ptop], "registry": reg} | ||||
|         j = json.dumps(body, indent=2, sort_keys=True).encode("utf-8") | ||||
|         with gzip.GzipFile(path2, "wb") as f: | ||||
|             f.write(j) | ||||
|  | ||||
| @@ -1864,11 +2137,16 @@ class Up2k(object): | ||||
|  | ||||
|             # self.log("\n  " + repr([ptop, rd, fn])) | ||||
|             abspath = os.path.join(ptop, rd, fn) | ||||
|             tags = self.mtag.get(abspath) | ||||
|             ntags1 = len(tags) | ||||
|             parsers = self._get_parsers(ptop, tags, abspath) | ||||
|             if parsers: | ||||
|                 tags.update(self.mtag.get_bin(parsers, abspath)) | ||||
|             try: | ||||
|                 tags = self.mtag.get(abspath) | ||||
|                 ntags1 = len(tags) | ||||
|                 parsers = self._get_parsers(ptop, tags, abspath) | ||||
|                 if parsers: | ||||
|                     tags.update(self.mtag.get_bin(parsers, abspath)) | ||||
|             except Exception as ex: | ||||
|                 msg = "failed to read tags from {}:\n{}" | ||||
|                 self.log(msg.format(abspath, ex), c=3) | ||||
|                 continue | ||||
|  | ||||
|             with self.mutex: | ||||
|                 cur = self.cur[ptop] | ||||
|   | ||||
| @@ -67,8 +67,9 @@ if WINDOWS and PY2: | ||||
|     FS_ENCODING = "utf-8" | ||||
|  | ||||
|  | ||||
| HTTP_TS_FMT = "%a, %d %b %Y %H:%M:%S GMT" | ||||
| SYMTIME = sys.version_info >= (3, 6) and os.supports_follow_symlinks | ||||
|  | ||||
| HTTP_TS_FMT = "%a, %d %b %Y %H:%M:%S GMT" | ||||
|  | ||||
| HTTPCODE = { | ||||
|     200: "OK", | ||||
| @@ -100,10 +101,25 @@ IMPLICATIONS = [ | ||||
|  | ||||
|  | ||||
| MIMES = { | ||||
|     "md": "text/plain; charset=UTF-8", | ||||
|     "md": "text/plain", | ||||
|     "txt": "text/plain", | ||||
|     "js": "text/javascript", | ||||
|     "opus": "audio/ogg; codecs=opus", | ||||
|     "webp": "image/webp", | ||||
|     "caf": "audio/x-caf", | ||||
|     "mp3": "audio/mpeg", | ||||
|     "m4a": "audio/mp4", | ||||
|     "jpg": "image/jpeg", | ||||
| } | ||||
| for ln in """text css html csv | ||||
| application json wasm xml pdf rtf zip | ||||
| image webp jpeg png gif bmp | ||||
| audio aac ogg wav | ||||
| video webm mp4 mpeg | ||||
| font woff woff2 otf ttf | ||||
| """.splitlines(): | ||||
|     k, vs = ln.split(" ", 1) | ||||
|     for v in vs.strip().split(): | ||||
|         MIMES[v] = "{}/{}".format(k, v) | ||||
|  | ||||
|  | ||||
| REKOBO_KEY = { | ||||
| @@ -251,6 +267,55 @@ class _LUnrecv(object): | ||||
| Unrecv = _Unrecv | ||||
|  | ||||
|  | ||||
| class FHC(object): | ||||
|     class CE(object): | ||||
|         def __init__(self, fh): | ||||
|             self.ts = 0 | ||||
|             self.fhs = [fh] | ||||
|  | ||||
|     def __init__(self): | ||||
|         self.cache = {} | ||||
|  | ||||
|     def close(self, path): | ||||
|         try: | ||||
|             ce = self.cache[path] | ||||
|         except: | ||||
|             return | ||||
|  | ||||
|         for fh in ce.fhs: | ||||
|             fh.close() | ||||
|  | ||||
|         del self.cache[path] | ||||
|  | ||||
|     def clean(self): | ||||
|         if not self.cache: | ||||
|             return | ||||
|  | ||||
|         keep = {} | ||||
|         now = time.time() | ||||
|         for path, ce in self.cache.items(): | ||||
|             if now < ce.ts + 5: | ||||
|                 keep[path] = ce | ||||
|             else: | ||||
|                 for fh in ce.fhs: | ||||
|                     fh.close() | ||||
|  | ||||
|         self.cache = keep | ||||
|  | ||||
|     def pop(self, path): | ||||
|         return self.cache[path].fhs.pop() | ||||
|  | ||||
|     def put(self, path, fh): | ||||
|         try: | ||||
|             ce = self.cache[path] | ||||
|             ce.fhs.append(fh) | ||||
|         except: | ||||
|             ce = self.CE(fh) | ||||
|             self.cache[path] = ce | ||||
|  | ||||
|         ce.ts = time.time() | ||||
|  | ||||
|  | ||||
| class ProgressPrinter(threading.Thread): | ||||
|     """ | ||||
|     periodically print progress info without linefeeds | ||||
| @@ -375,7 +440,7 @@ def stackmon(fp, ival, suffix): | ||||
|  | ||||
|  | ||||
| def start_log_thrs(logger, ival, nid): | ||||
|     ival = int(ival) | ||||
|     ival = float(ival) | ||||
|     tname = lname = "log-thrs" | ||||
|     if nid: | ||||
|         tname = "logthr-n{}-i{:x}".format(nid, os.getpid()) | ||||
| @@ -396,7 +461,7 @@ def log_thrs(log, ival, name): | ||||
|         tv = [x.name for x in threading.enumerate()] | ||||
|         tv = [ | ||||
|             x.split("-")[0] | ||||
|             if x.startswith("httpconn-") or x.startswith("thumb-") | ||||
|             if x.split("-")[0] in ["httpconn", "thumb", "tagger"] | ||||
|             else "listen" | ||||
|             if "-listen-" in x | ||||
|             else x | ||||
| @@ -410,6 +475,10 @@ def log_thrs(log, ival, name): | ||||
| def vol_san(vols, txt): | ||||
|     for vol in vols: | ||||
|         txt = txt.replace(vol.realpath.encode("utf-8"), vol.vpath.encode("utf-8")) | ||||
|         txt = txt.replace( | ||||
|             vol.realpath.encode("utf-8").replace(b"\\", b"\\\\"), | ||||
|             vol.vpath.encode("utf-8"), | ||||
|         ) | ||||
|  | ||||
|     return txt | ||||
|  | ||||
| @@ -425,11 +494,12 @@ def min_ex(): | ||||
|  | ||||
| @contextlib.contextmanager | ||||
| def ren_open(fname, *args, **kwargs): | ||||
|     fun = kwargs.pop("fun", open) | ||||
|     fdir = kwargs.pop("fdir", None) | ||||
|     suffix = kwargs.pop("suffix", None) | ||||
|  | ||||
|     if fname == os.devnull: | ||||
|         with open(fname, *args, **kwargs) as f: | ||||
|         with fun(fname, *args, **kwargs) as f: | ||||
|             yield {"orz": [f, fname]} | ||||
|             return | ||||
|  | ||||
| @@ -463,7 +533,7 @@ def ren_open(fname, *args, **kwargs): | ||||
|                 fname += suffix | ||||
|                 ext += suffix | ||||
|  | ||||
|             with open(fsenc(fpath), *args, **kwargs) as f: | ||||
|             with fun(fsenc(fpath), *args, **kwargs) as f: | ||||
|                 if b64: | ||||
|                     fp2 = "fn-trunc.{}.txt".format(b64) | ||||
|                     fp2 = os.path.join(fdir, fp2) | ||||
| @@ -508,8 +578,8 @@ class MultipartParser(object): | ||||
|         self.log = log_func | ||||
|         self.headers = http_headers | ||||
|  | ||||
|         self.re_ctype = re.compile(r"^content-type: *([^;]+)", re.IGNORECASE) | ||||
|         self.re_cdisp = re.compile(r"^content-disposition: *([^;]+)", re.IGNORECASE) | ||||
|         self.re_ctype = re.compile(r"^content-type: *([^; ]+)", re.IGNORECASE) | ||||
|         self.re_cdisp = re.compile(r"^content-disposition: *([^; ]+)", re.IGNORECASE) | ||||
|         self.re_cdisp_field = re.compile( | ||||
|             r'^content-disposition:(?: *|.*; *)name="([^"]+)"', re.IGNORECASE | ||||
|         ) | ||||
| @@ -708,7 +778,7 @@ class MultipartParser(object): | ||||
| def get_boundary(headers): | ||||
|     # boundaries contain a-z A-Z 0-9 ' ( ) + _ , - . / : = ? | ||||
|     # (whitespace allowed except as the last char) | ||||
|     ptn = r"^multipart/form-data; *(.*; *)?boundary=([^;]+)" | ||||
|     ptn = r"^multipart/form-data *; *(.*; *)?boundary=([^;]+)" | ||||
|     ct = headers["content-type"] | ||||
|     m = re.match(ptn, ct, re.IGNORECASE) | ||||
|     if not m: | ||||
| @@ -753,6 +823,17 @@ def gen_filekey(salt, fspath, fsize, inode): | ||||
|     ).decode("ascii") | ||||
|  | ||||
|  | ||||
| def gencookie(k, v, dur): | ||||
|     v = v.replace(";", "") | ||||
|     if dur: | ||||
|         dt = datetime.utcfromtimestamp(time.time() + dur) | ||||
|         exp = dt.strftime("%a, %d %b %Y %H:%M:%S GMT") | ||||
|     else: | ||||
|         exp = "Fri, 15 Aug 1997 01:00:00 GMT" | ||||
|  | ||||
|     return "{}={}; Path=/; Expires={}; SameSite=Lax".format(k, v, exp) | ||||
|  | ||||
|  | ||||
| def humansize(sz, terse=False): | ||||
|     for unit in ["B", "KiB", "MiB", "GiB", "TiB"]: | ||||
|         if sz < 1024: | ||||
| @@ -1096,12 +1177,14 @@ def hashcopy(fin, fout): | ||||
|     return tlen, hashobj.hexdigest(), digest_b64 | ||||
|  | ||||
|  | ||||
| def sendfile_py(lower, upper, f, s): | ||||
| def sendfile_py(log, lower, upper, f, s, bufsz, slp): | ||||
|     remains = upper - lower | ||||
|     f.seek(lower) | ||||
|     while remains > 0: | ||||
|         # time.sleep(0.01) | ||||
|         buf = f.read(min(1024 * 32, remains)) | ||||
|         if slp: | ||||
|             time.sleep(slp) | ||||
|  | ||||
|         buf = f.read(min(bufsz, remains)) | ||||
|         if not buf: | ||||
|             return remains | ||||
|  | ||||
| @@ -1114,17 +1197,24 @@ def sendfile_py(lower, upper, f, s): | ||||
|     return 0 | ||||
|  | ||||
|  | ||||
| def sendfile_kern(lower, upper, f, s): | ||||
| def sendfile_kern(log, lower, upper, f, s, bufsz, slp): | ||||
|     out_fd = s.fileno() | ||||
|     in_fd = f.fileno() | ||||
|     ofs = lower | ||||
|     stuck = None | ||||
|     while ofs < upper: | ||||
|         stuck = stuck or time.time() | ||||
|         try: | ||||
|             req = min(2 ** 30, upper - ofs) | ||||
|             select.select([], [out_fd], [], 10) | ||||
|             n = os.sendfile(out_fd, in_fd, ofs, req) | ||||
|             stuck = None | ||||
|         except Exception as ex: | ||||
|             # print("sendfile: " + repr(ex)) | ||||
|             d = time.time() - stuck | ||||
|             log("sendfile stuck for {:.3f} sec: {!r}".format(d, ex)) | ||||
|             if d < 3600 and ex.errno == 11:  # eagain | ||||
|                 continue | ||||
|  | ||||
|             n = 0 | ||||
|  | ||||
|         if n <= 0: | ||||
| @@ -1137,6 +1227,9 @@ def sendfile_kern(lower, upper, f, s): | ||||
|  | ||||
|  | ||||
| def statdir(logger, scandir, lstat, top): | ||||
|     if lstat and ANYWIN: | ||||
|         lstat = False | ||||
|  | ||||
|     if lstat and not os.supports_follow_symlinks: | ||||
|         scandir = False | ||||
|  | ||||
| @@ -1167,6 +1260,7 @@ def statdir(logger, scandir, lstat, top): | ||||
| def rmdirs(logger, scandir, lstat, top, depth): | ||||
|     if not os.path.exists(fsenc(top)) or not os.path.isdir(fsenc(top)): | ||||
|         top = os.path.dirname(top) | ||||
|         depth -= 1 | ||||
|  | ||||
|     dirs = statdir(logger, scandir, lstat, top) | ||||
|     dirs = [x[0] for x in dirs if stat.S_ISDIR(x[1].st_mode)] | ||||
| @@ -1222,18 +1316,33 @@ def guess_mime(url, fallback="application/octet-stream"): | ||||
|     except: | ||||
|         return fallback | ||||
|  | ||||
|     ret = MIMES.get(ext) or mimetypes.guess_type(url)[0] or fallback | ||||
|     ret = MIMES.get(ext) | ||||
|  | ||||
|     if not ret: | ||||
|         x = mimetypes.guess_type(url) | ||||
|         ret = "application/{}".format(x[1]) if x[1] else x[0] | ||||
|  | ||||
|     if not ret: | ||||
|         ret = fallback | ||||
|  | ||||
|     if ";" not in ret: | ||||
|         if ret.startswith("text/") or ret.endswith("/javascript"): | ||||
|             ret += "; charset=UTF-8" | ||||
|             ret += "; charset=utf-8" | ||||
|  | ||||
|     return ret | ||||
|  | ||||
|  | ||||
| def runcmd(argv): | ||||
| def runcmd(argv, timeout=None): | ||||
|     p = sp.Popen(argv, stdout=sp.PIPE, stderr=sp.PIPE) | ||||
|     stdout, stderr = p.communicate() | ||||
|     if not timeout or PY2: | ||||
|         stdout, stderr = p.communicate() | ||||
|     else: | ||||
|         try: | ||||
|             stdout, stderr = p.communicate(timeout=timeout) | ||||
|         except sp.TimeoutExpired: | ||||
|             p.kill() | ||||
|             stdout, stderr = p.communicate() | ||||
|  | ||||
|     stdout = stdout.decode("utf-8", "replace") | ||||
|     stderr = stderr.decode("utf-8", "replace") | ||||
|     return [p.returncode, stdout, stderr] | ||||
|   | ||||
| @@ -237,7 +237,7 @@ window.baguetteBox = (function () { | ||||
|     } | ||||
|  | ||||
|     function keyDownHandler(e) { | ||||
|         if (e.ctrlKey || e.altKey || e.metaKey || e.isComposing) | ||||
|         if (e.ctrlKey || e.altKey || e.metaKey || e.isComposing || modal.busy) | ||||
|             return; | ||||
|  | ||||
|         var k = e.code + '', v = vid(); | ||||
| @@ -331,7 +331,7 @@ window.baguetteBox = (function () { | ||||
|  | ||||
|     function tglsel() { | ||||
|         var thumb = currentGallery[currentIndex].imageElement, | ||||
|             name = vsplit(thumb.href)[1], | ||||
|             name = vsplit(thumb.href)[1].split('?')[0], | ||||
|             files = msel.getall(); | ||||
|  | ||||
|         for (var a = 0; a < files.length; a++) | ||||
| @@ -345,7 +345,7 @@ window.baguetteBox = (function () { | ||||
|     function selbg() { | ||||
|         var img = vidimg(), | ||||
|             thumb = currentGallery[currentIndex].imageElement, | ||||
|             name = vsplit(thumb.href)[1], | ||||
|             name = vsplit(thumb.href)[1].split('?')[0], | ||||
|             files = msel.getsel(), | ||||
|             sel = false; | ||||
|  | ||||
| @@ -530,9 +530,7 @@ window.baguetteBox = (function () { | ||||
|             if (options.bodyClass && document.body.classList) | ||||
|                 document.body.classList.remove(options.bodyClass); | ||||
|  | ||||
|             var h = ebi('bbox-halp'); | ||||
|             if (h) | ||||
|                 h.parentNode.removeChild(h); | ||||
|             qsr('#bbox-halp'); | ||||
|  | ||||
|             if (options.afterHide) | ||||
|                 options.afterHide(); | ||||
| @@ -590,8 +588,7 @@ window.baguetteBox = (function () { | ||||
|  | ||||
|         image.addEventListener(is_vid ? 'loadedmetadata' : 'load', function () { | ||||
|             // Remove loader element | ||||
|             var spinner = QS('#baguette-img-' + index + ' .bbox-spinner'); | ||||
|             figure.removeChild(spinner); | ||||
|             qsr('#baguette-img-' + index + ' .bbox-spinner'); | ||||
|             if (!options.async && callback) | ||||
|                 callback(); | ||||
|         }); | ||||
|   | ||||
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							| @@ -18,9 +18,9 @@ | ||||
|  | ||||
| 	<div id="op_search" class="opview"> | ||||
| 		{%- if have_tags_idx %} | ||||
| 		<div id="srch_form" class="tags"></div> | ||||
| 		<div id="srch_form" class="tags opbox"></div> | ||||
| 		{%- else %} | ||||
| 		<div id="srch_form"></div> | ||||
| 		<div id="srch_form" class="opbox"></div> | ||||
| 		{%- endif %} | ||||
| 		<div id="srch_q"></div> | ||||
| 	</div> | ||||
| @@ -31,7 +31,7 @@ | ||||
| 		<div id="u2err"></div> | ||||
| 		<form method="post" enctype="multipart/form-data" accept-charset="utf-8" action="{{ url_suf }}"> | ||||
| 			<input type="hidden" name="act" value="bput" /> | ||||
| 			<input type="file" name="f" multiple><br /> | ||||
| 			<input type="file" name="f" multiple /><br /> | ||||
| 			<input type="submit" value="start upload"> | ||||
| 		</form> | ||||
| 	</div> | ||||
| @@ -39,7 +39,7 @@ | ||||
| 	<div id="op_mkdir" class="opview opbox act"> | ||||
| 		<form method="post" enctype="multipart/form-data" accept-charset="utf-8" action="{{ url_suf }}"> | ||||
| 			<input type="hidden" name="act" value="mkdir" /> | ||||
| 			📂<input type="text" name="name" size="30"> | ||||
| 			📂<input type="text" name="name" class="i"> | ||||
| 			<input type="submit" value="make directory"> | ||||
| 		</form> | ||||
| 	</div> | ||||
| @@ -47,15 +47,15 @@ | ||||
| 	<div id="op_new_md" class="opview opbox"> | ||||
| 		<form method="post" enctype="multipart/form-data" accept-charset="utf-8" action="{{ url_suf }}"> | ||||
| 			<input type="hidden" name="act" value="new_md" /> | ||||
| 			📝<input type="text" name="name" size="30"> | ||||
| 			📝<input type="text" name="name" class="i"> | ||||
| 			<input type="submit" value="new markdown doc"> | ||||
| 		</form> | ||||
| 	</div> | ||||
|  | ||||
| 	<div id="op_msg" class="opview opbox act"> | ||||
| 		<form method="post" enctype="application/x-www-form-urlencoded" accept-charset="utf-8" action="{{ url_suf }}"> | ||||
| 			📟<input type="text" name="msg" size="30"> | ||||
| 			<input type="submit" value="send msg to server log"> | ||||
| 			📟<input type="text" name="msg" class="i"> | ||||
| 			<input type="submit" value="send msg to srv log"> | ||||
| 		</form> | ||||
| 	</div> | ||||
|  | ||||
| @@ -76,6 +76,12 @@ | ||||
|  | ||||
| <div id="wrap"> | ||||
|  | ||||
| 	{%- if doc %} | ||||
| 	<div id="bdoc"><pre>{{ doc|e }}</pre></div> | ||||
| 	{%- else %} | ||||
| 	<div id="bdoc"></div> | ||||
| 	{%- endif %} | ||||
|  | ||||
| 	<div id="pro" class="logue">{{ logues[0] }}</div> | ||||
|  | ||||
| 	<table id="files"> | ||||
| @@ -130,15 +136,24 @@ | ||||
| 			def_hcols = {{ def_hcols|tojson }}, | ||||
| 			have_up2k_idx = {{ have_up2k_idx|tojson }}, | ||||
| 			have_tags_idx = {{ have_tags_idx|tojson }}, | ||||
| 			have_acode = {{ have_acode|tojson }}, | ||||
| 			have_mv = {{ have_mv|tojson }}, | ||||
| 			have_del = {{ have_del|tojson }}, | ||||
| 			have_unpost = {{ have_unpost|tojson }}, | ||||
| 			have_zip = {{ have_zip|tojson }}, | ||||
| 			readme = {{ readme|tojson }}; | ||||
| 			txt_ext = "{{ txt_ext }}", | ||||
| 			{% if no_prism %}no_prism = 1,{% endif %} | ||||
| 			readme = {{ readme|tojson }}, | ||||
| 			ls0 = {{ ls0|tojson }}; | ||||
|  | ||||
| 		document.documentElement.setAttribute("class", localStorage.lightmode == 1 ? "light" : "dark"); | ||||
| 	</script> | ||||
| 	<script src="/.cpr/util.js?_={{ ts }}"></script> | ||||
| 	<script src="/.cpr/browser.js?_={{ ts }}"></script> | ||||
| 	<script src="/.cpr/up2k.js?_={{ ts }}"></script> | ||||
| 	{%- if js %} | ||||
| 	<script src="{{ js }}?_={{ ts }}"></script> | ||||
| 	{%- endif %} | ||||
| </body> | ||||
|  | ||||
| </html> | ||||
|   | ||||
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							| @@ -10,7 +10,7 @@ | ||||
| 	{%- endif %} | ||||
| </head> | ||||
| <body> | ||||
| 	<div id="mn">navbar</div> | ||||
| 	<div id="mn"></div> | ||||
| 	<div id="mh"> | ||||
| 		<a id="lightswitch" href="#">go dark</a> | ||||
| 		<a id="navtoggle" href="#">hide nav</a> | ||||
| @@ -135,13 +135,13 @@ var md_opt = { | ||||
|  | ||||
| (function () { | ||||
|     var l = localStorage, | ||||
| 		drk = l.getItem('lightmode') != 1, | ||||
| 		drk = l.lightmode != 1, | ||||
| 		btn = document.getElementById("lightswitch"), | ||||
| 		f = function (e) { | ||||
| if (e) { e.preventDefault(); drk = !drk; } | ||||
| document.documentElement.setAttribute("class", drk? "dark":"light"); | ||||
| btn.innerHTML = "go " + (drk ? "light":"dark"); | ||||
| l.setItem('lightmode', drk? 0:1); | ||||
| l.lightmode = drk? 0:1; | ||||
|     	}; | ||||
| 	 | ||||
| 	btn.onclick = f; | ||||
|   | ||||
| @@ -39,20 +39,14 @@ var md_plug = {}; | ||||
|  | ||||
| // add navbar | ||||
| (function () { | ||||
|     var n = document.location + ''; | ||||
|     n = n.substr(n.indexOf('//') + 2).split('?')[0].split('/'); | ||||
|     n[0] = 'top'; | ||||
|     var loc = []; | ||||
|     var nav = []; | ||||
|     for (var a = 0; a < n.length; a++) { | ||||
|         if (a > 0) | ||||
|             loc.push(n[a]); | ||||
|  | ||||
|         var dec = esc(uricom_dec(n[a])[0]); | ||||
|  | ||||
|         nav.push('<a href="/' + loc.join('/') + '">' + dec + '</a>'); | ||||
|     var parts = get_evpath().split('/'), link = '', o; | ||||
|     for (var a = 0, aa = parts.length - 2; a <= aa; a++) { | ||||
|         link += parts[a] + (a < aa ? '/' : ''); | ||||
|         o = mknod('a'); | ||||
|         o.setAttribute('href', link); | ||||
|         o.textContent = uricom_dec(parts[a])[0] || 'top'; | ||||
|         dom_nav.appendChild(o); | ||||
|     } | ||||
|     dom_nav.innerHTML = nav.join(''); | ||||
| })(); | ||||
|  | ||||
|  | ||||
| @@ -164,10 +158,7 @@ function copydom(src, dst, lv) { | ||||
|  | ||||
|  | ||||
| function md_plug_err(ex, js) { | ||||
|     var errbox = ebi('md_errbox'); | ||||
|     if (errbox) | ||||
|         errbox.parentNode.removeChild(errbox); | ||||
|  | ||||
|     qsr('#md_errbox'); | ||||
|     if (!ex) | ||||
|         return; | ||||
|  | ||||
| @@ -183,7 +174,7 @@ function md_plug_err(ex, js) { | ||||
|             o.textContent = lns[ln - 1]; | ||||
|         } | ||||
|     } | ||||
|     errbox = mknod('div'); | ||||
|     var errbox = mknod('div'); | ||||
|     errbox.setAttribute('id', 'md_errbox'); | ||||
|     errbox.style.cssText = 'position:absolute;top:0;left:0;padding:1em .5em;background:#2b2b2b;color:#fc5' | ||||
|     errbox.textContent = msg; | ||||
| @@ -259,7 +250,7 @@ function convert_markdown(md_text, dest_dom) { | ||||
|         Object.assign(marked_opts, ext[0]); | ||||
|  | ||||
|     try { | ||||
|         var md_html = marked(md_text, marked_opts); | ||||
|         var md_html = marked.parse(md_text, marked_opts); | ||||
|     } | ||||
|     catch (ex) { | ||||
|         if (ext) | ||||
| @@ -381,8 +372,7 @@ function convert_markdown(md_text, dest_dom) { | ||||
|  | ||||
|  | ||||
| function init_toc() { | ||||
|     var loader = ebi('ml'); | ||||
|     loader.parentNode.removeChild(loader); | ||||
|     qsr('#ml'); | ||||
|  | ||||
|     var anchors = [];  // list of toc entries, complex objects | ||||
|     var anchor = null; // current toc node | ||||
|   | ||||
| @@ -33,11 +33,11 @@ var md_opt = { | ||||
|  | ||||
| var lightswitch = (function () { | ||||
| 	var l = localStorage, | ||||
| 		drk = l.getItem('lightmode') != 1, | ||||
| 		drk = l.lightmode != 1, | ||||
| 		f = function (e) { | ||||
| if (e) drk = !drk; | ||||
| document.documentElement.setAttribute("class", drk? "dark":"light"); | ||||
| l.setItem('lightmode', drk? 0:1); | ||||
| l.lightmode = drk? 0:1; | ||||
| 		}; | ||||
| 	f(); | ||||
| 	return f; | ||||
|   | ||||
| @@ -65,8 +65,7 @@ var mde = (function () { | ||||
|     mde.codemirror.on("change", function () { | ||||
|         md_changed(mde); | ||||
|     }); | ||||
|     var loader = ebi('ml'); | ||||
|     loader.parentNode.removeChild(loader); | ||||
|     qsr('#ml'); | ||||
|     return mde; | ||||
| })(); | ||||
|  | ||||
|   | ||||
| @@ -3,7 +3,7 @@ | ||||
|  | ||||
| <head> | ||||
|     <meta charset="utf-8"> | ||||
|     <title>copyparty</title> | ||||
|     <title>{{ svcname }}</title> | ||||
|     <meta http-equiv="X-UA-Compatible" content="IE=edge"> | ||||
|     <meta name="viewport" content="width=device-width, initial-scale=0.8"> | ||||
|     <link rel="stylesheet" media="screen" href="/.cpr/msg.css?_={{ ts }}"> | ||||
|   | ||||
| @@ -25,10 +25,24 @@ a { | ||||
| 	color: #047; | ||||
| 	background: #fff; | ||||
| 	text-decoration: none; | ||||
| 	border-bottom: 1px solid #aaa; | ||||
| 	border-bottom: 1px solid #8ab; | ||||
| 	border-radius: .2em; | ||||
| 	padding: .2em .8em; | ||||
| } | ||||
| a+a { | ||||
| 	margin-left: .5em; | ||||
| } | ||||
| .refresh, | ||||
| .logout { | ||||
| 	float: right; | ||||
| 	margin: -.2em 0 0 .5em; | ||||
| } | ||||
| .logout, | ||||
| .btns a, | ||||
| a.r { | ||||
| 	color: #c04; | ||||
| 	border-color: #c7a; | ||||
| } | ||||
| #repl { | ||||
| 	border: none; | ||||
| 	background: none; | ||||
| @@ -42,6 +56,7 @@ table { | ||||
| .vols th { | ||||
| 	padding: .3em .6em; | ||||
| 	text-align: left; | ||||
| 	white-space: nowrap; | ||||
| } | ||||
| .num { | ||||
| 	border-right: 1px solid #bbb; | ||||
| @@ -65,6 +80,12 @@ table { | ||||
| 	margin-top: .3em; | ||||
| 	text-align: right; | ||||
| } | ||||
| blockquote { | ||||
| 	margin: 0 0 1.6em .6em; | ||||
| 	padding: .7em 1em 0 1em; | ||||
| 	border-left: .3em solid rgba(128,128,128,0.5); | ||||
| 	border-radius: 0 0 0 .25em; | ||||
| } | ||||
|  | ||||
|  | ||||
| html.dark, | ||||
| @@ -81,6 +102,12 @@ html.dark a { | ||||
| 	background: #057; | ||||
| 	border-color: #37a; | ||||
| } | ||||
| html.dark .logout, | ||||
| html.dark .btns a, | ||||
| html.dark a.r { | ||||
| 	background: #804; | ||||
| 	border-color: #c28; | ||||
| } | ||||
| html.dark input { | ||||
| 	color: #fff; | ||||
| 	background: #626; | ||||
|   | ||||
| @@ -3,7 +3,7 @@ | ||||
|  | ||||
| <head> | ||||
|     <meta charset="utf-8"> | ||||
|     <title>copyparty</title> | ||||
|     <title>{{ svcname }}</title> | ||||
|     <meta http-equiv="X-UA-Compatible" content="IE=edge"> | ||||
|     <meta name="viewport" content="width=device-width, initial-scale=0.8"> | ||||
|     <link rel="stylesheet" media="screen" href="/.cpr/splash.css?_={{ ts }}"> | ||||
| @@ -12,9 +12,12 @@ | ||||
|  | ||||
| <body> | ||||
|     <div id="wrap"> | ||||
|         <a href="/?h" class="refresh">refresh</a> | ||||
|  | ||||
|         {%- if this.uname == '*' %} | ||||
|             <p>howdy stranger   <small>(you're not logged in)</small></p> | ||||
|         {%- else %} | ||||
|             <a href="/?pw=x" class="logout">logout</a> | ||||
|             <p>welcome back, <strong>{{ this.uname }}</strong></p> | ||||
|         {%- endif %} | ||||
|  | ||||
| @@ -46,7 +49,8 @@ | ||||
|             </table> | ||||
|         </td></tr></table> | ||||
|         <div class="btns"> | ||||
|             <a href="/?stack">dump stack</a> | ||||
|             <a href="/?stack" tt="shows the state of all active threads">dump stack</a> | ||||
|             <a href="/?reload=cfg" tt="reload config files (accounts/volumes/volflags),$Nand rescan all e2ds volumes">reload cfg</a> | ||||
|         </div> | ||||
|         {%- endif %} | ||||
|  | ||||
| @@ -68,6 +72,18 @@ | ||||
|         </ul> | ||||
|         {%- endif %} | ||||
|  | ||||
|         <h1 id="cc">client config:</h1> | ||||
|         <ul> | ||||
|             {% if k304 %} | ||||
|             <li><a href="/?k304=n">disable k304</a> (currently enabled) | ||||
|             {%- else %} | ||||
|             <li><a href="/?k304=y" class="r">enable k304</a> (currently disabled) | ||||
|             {% endif %} | ||||
|             <blockquote>enabling this will disconnect your client on every HTTP 304, which can prevent some buggy browsers/proxies from getting stuck (suddenly not being able to load pages), <em>but</em> it will also make things slower in general</blockquote></li> | ||||
|              | ||||
|             <li><a href="/?reset" class="r" onclick="localStorage.clear();return true">reset client settings</a></li> | ||||
|         </ul> | ||||
|  | ||||
|         <h1>login for more:</h1> | ||||
|         <ul> | ||||
|             <form method="post" enctype="multipart/form-data" action="/{{ qvpath }}"> | ||||
| @@ -80,10 +96,10 @@ | ||||
| 	<a href="#" id="repl">π</a> | ||||
|     <script> | ||||
|  | ||||
| if (localStorage.getItem('lightmode') != 1) | ||||
|     document.documentElement.setAttribute("class", "dark"); | ||||
| document.documentElement.setAttribute("class", localStorage.lightmode == 1 ? "light" : "dark"); | ||||
|  | ||||
| </script> | ||||
| <script src="/.cpr/util.js?_={{ ts }}"></script> | ||||
| <script>tt.init();</script> | ||||
| </body> | ||||
| </html> | ||||
|   | ||||
| @@ -11,9 +11,9 @@ html { | ||||
| 	max-width: 34em; | ||||
| 	max-width: min(34em, 90%); | ||||
| 	max-width: min(34em, calc(100% - 7em)); | ||||
| 	background: #222; | ||||
| 	background: #333; | ||||
| 	border: 0 solid #777; | ||||
| 	box-shadow: 0 .2em .5em #222; | ||||
| 	box-shadow: 0 .2em .5em #111; | ||||
| 	border-radius: .4em; | ||||
| 	z-index: 9001; | ||||
| } | ||||
| @@ -79,7 +79,8 @@ html { | ||||
| } | ||||
| #toast.vis { | ||||
| 	right: 1.3em; | ||||
| 	transform: unset; | ||||
| 	transform: inherit; | ||||
| 	transform: initial; | ||||
| } | ||||
| #toast.vis #toastc { | ||||
| 	left: -2em; | ||||
| @@ -115,6 +116,20 @@ html { | ||||
| #toast.err #toastc { | ||||
| 	background: #d06; | ||||
| } | ||||
| #tth { | ||||
| 	color: #fff; | ||||
| 	background: #111; | ||||
| 	font-size: .9em; | ||||
| 	padding: 0 .26em; | ||||
| 	line-height: .97em; | ||||
| 	border-radius: 1em; | ||||
| 	position: absolute; | ||||
| 	display: none; | ||||
| } | ||||
| #tth.act { | ||||
| 	display: block; | ||||
| 	z-index: 9001; | ||||
| } | ||||
| #tt.b { | ||||
| 	padding: 0 2em; | ||||
| 	border-radius: .5em; | ||||
| @@ -132,7 +147,8 @@ html { | ||||
| } | ||||
| #modalc code, | ||||
| #tt code { | ||||
| 	background: #3c3c3c; | ||||
| 	color: #eee; | ||||
| 	background: #444; | ||||
| 	padding: .1em .3em; | ||||
| 	border-top: 1px solid #777; | ||||
| 	border-radius: .3em; | ||||
| @@ -157,6 +173,10 @@ html.light #tt code { | ||||
| html.light #tt em { | ||||
| 	color: #d38; | ||||
| } | ||||
| html.light #tth { | ||||
| 	color: #000; | ||||
| 	background: #fff; | ||||
| } | ||||
| #modal { | ||||
| 	position: fixed; | ||||
|     overflow: auto; | ||||
| @@ -258,6 +278,16 @@ html.light #pctl *:focus, | ||||
| html.light .btn:focus { | ||||
| 	box-shadow: 0 .1em .2em #037 inset; | ||||
| } | ||||
| input[type="text"]:focus, | ||||
| input:not([type]):focus, | ||||
| textarea:focus { | ||||
| 	box-shadow: 0 .1em .3em #fc0, 0 -.1em .3em #fc0; | ||||
| } | ||||
| html.light input[type="text"]:focus, | ||||
| html.light input:not([type]):focus, | ||||
| html.light textarea:focus { | ||||
| 	box-shadow: 0 .1em .3em #037, 0 -.1em .3em #037; | ||||
| } | ||||
|  | ||||
|  | ||||
|  | ||||
| @@ -324,6 +354,13 @@ html.light .btn:focus { | ||||
| } | ||||
| .mdo ul, | ||||
| .mdo ol { | ||||
| 	padding-left: 1em; | ||||
| } | ||||
| .mdo ul ul, | ||||
| .mdo ul ol, | ||||
| .mdo ol ul, | ||||
| .mdo ol ol { | ||||
| 	padding-left: 2em; | ||||
| 	border-left: .3em solid #ddd; | ||||
| } | ||||
| .mdo ul>li, | ||||
|   | ||||
| @@ -30,7 +30,10 @@ catch (ex) { | ||||
|     try { | ||||
|         up2k = up2k_init(false); | ||||
|     } | ||||
|     catch (ex) { } | ||||
|     catch (ex) { | ||||
|         console.log('up2k init failed:', ex); | ||||
|         toast.err(10, 'could not initialze up2k\n\n' + basenames(ex)); | ||||
|     } | ||||
| } | ||||
| treectl.onscroll(); | ||||
|  | ||||
| @@ -210,14 +213,14 @@ function U2pvis(act, btns) { | ||||
|     }; | ||||
|  | ||||
|     r.setat = function (nfile, blocktab) { | ||||
|         r.tab[nfile].cb = blocktab; | ||||
|         var fo = r.tab[nfile], bd = 0; | ||||
|  | ||||
|         var bd = 0; | ||||
|         for (var a = 0; a < blocktab.length; a++) | ||||
|             bd += blocktab[a]; | ||||
|  | ||||
|         r.tab[nfile].bd = bd; | ||||
|         r.tab[nfile].bd0 = bd; | ||||
|         fo.bd = bd; | ||||
|         fo.bd0 = bd; | ||||
|         fo.cb = blocktab; | ||||
|     }; | ||||
|  | ||||
|     r.perc = function (bd, bd0, sz, t0) { | ||||
| @@ -246,7 +249,7 @@ function U2pvis(act, btns) { | ||||
|  | ||||
|         obj.innerHTML = fo.hp; | ||||
|         obj.style.color = '#fff'; | ||||
|         obj.style.background = 'linear-gradient(90deg, #025, #06a ' + o1 + '%, #09d ' + o2 + '%, #333 ' + o3 + '%, #333 99%, #777)'; | ||||
|         obj.style.background = 'linear-gradient(90deg, #025, #06a ' + o1 + '%, #09d ' + o2 + '%, #222 ' + o3 + '%, #222 99%, #555)'; | ||||
|     }; | ||||
|  | ||||
|     r.prog = function (fobj, nchunk, cbd) { | ||||
| @@ -303,7 +306,7 @@ function U2pvis(act, btns) { | ||||
|  | ||||
|         obj.innerHTML = fo.hp; | ||||
|         obj.style.color = '#fff'; | ||||
|         obj.style.background = 'linear-gradient(90deg, #050, #270 ' + o1 + '%, #4b0 ' + o2 + '%, #333 ' + o3 + '%, #333 99%, #777)'; | ||||
|         obj.style.background = 'linear-gradient(90deg, #050, #270 ' + o1 + '%, #4b0 ' + o2 + '%, #222 ' + o3 + '%, #222 99%, #555)'; | ||||
|     }; | ||||
|  | ||||
|     r.move = function (nfile, newcat) { | ||||
| @@ -329,8 +332,7 @@ function U2pvis(act, btns) { | ||||
|                 r.head++; | ||||
|  | ||||
|             if (!bz_act) { | ||||
|                 var tr = ebi("f" + nfile); | ||||
|                 tr.parentNode.removeChild(tr); | ||||
|                 qsr("#f" + nfile); | ||||
|             } | ||||
|         } | ||||
|         else return; | ||||
| @@ -349,9 +351,7 @@ function U2pvis(act, btns) { | ||||
|         last = parseInt(last.getAttribute('id').slice(1)); | ||||
|  | ||||
|         while (r.head - first > r.wsz) { | ||||
|             var obj = ebi('f' + (first++)); | ||||
|             if (obj) | ||||
|                 obj.parentNode.removeChild(obj); | ||||
|             qsr('#f' + (first++)); | ||||
|         } | ||||
|         while (last - r.tail < r.wsz && last < r.tab.length - 2) { | ||||
|             var obj = ebi('f' + (++last)); | ||||
| @@ -477,14 +477,101 @@ function U2pvis(act, btns) { | ||||
| } | ||||
|  | ||||
|  | ||||
| function Donut(uc, st) { | ||||
|     var r = this, | ||||
|         el = null, | ||||
|         psvg = null, | ||||
|         o = 20 * 2 * Math.PI, | ||||
|         optab = QS('#ops a[data-dest="up2k"]'); | ||||
|  | ||||
|     optab.setAttribute('ico', optab.textContent); | ||||
|  | ||||
|     function svg(v) { | ||||
|         var ico = v !== undefined, | ||||
|             bg = ico ? '#333' : 'transparent', | ||||
|             fg = '#fff', | ||||
|             fsz = 52, | ||||
|             rc = 32; | ||||
|  | ||||
|         if (r.eta && (r.eta > 99 || (uc.fsearch ? st.time.hashing : st.time.uploading) < 20)) | ||||
|             r.eta = null; | ||||
|  | ||||
|         if (r.eta) { | ||||
|             if (r.eta < 10) { | ||||
|                 fg = '#fa0'; | ||||
|                 fsz = 72; | ||||
|             } | ||||
|             rc = 8; | ||||
|         } | ||||
|  | ||||
|         return ( | ||||
|             '<svg version="1.1" viewBox="0 0 64 64" xmlns="http://www.w3.org/2000/svg">\n' + | ||||
|             (ico ? '<rect width="100%" height="100%" rx="' + rc + '" fill="#333" />\n' : | ||||
|                 '<circle stroke="white" stroke-width="6" r="3" cx="32" cy="32" />\n') + | ||||
|             (r.eta ? ( | ||||
|                 '<text x="55%" y="58%" dominant-baseline="middle" text-anchor="middle"' + | ||||
|                 ' font-family="sans-serif" font-weight="bold" font-size="' + fsz + 'px"' + | ||||
|                 ' fill="' + fg + '">' + r.eta + '</text></svg>' | ||||
|             ) : ( | ||||
|                 '<circle class="donut" stroke="white" fill="' + bg + | ||||
|                 '" stroke-dashoffset="' + (ico ? v : o) + '" stroke-dasharray="' + o + ' ' + o + | ||||
|                 '" transform="rotate(270 32 32)" stroke-width="12" r="20" cx="32" cy="32" /></svg>' | ||||
|             )) | ||||
|         ); | ||||
|     } | ||||
|  | ||||
|     function pos() { | ||||
|         return uc.fsearch ? Math.max(st.bytes.hashed, st.bytes.finished) : st.bytes.finished; | ||||
|     } | ||||
|  | ||||
|     r.on = function (ya) { | ||||
|         r.fc = r.tc = 99; | ||||
|         r.eta = null; | ||||
|         r.base = pos(); | ||||
|         optab.innerHTML = ya ? svg() : optab.getAttribute('ico'); | ||||
|         el = QS('#ops a .donut'); | ||||
|         if (!ya) { | ||||
|             favico.upd(); | ||||
|             wintitle(); | ||||
|         } | ||||
|     }; | ||||
|     r.do = function () { | ||||
|         if (!el) | ||||
|             return; | ||||
|  | ||||
|         var t = st.bytes.total - r.base, | ||||
|             v = pos() - r.base, | ||||
|             ofs = el.style.strokeDashoffset = o - o * v / t; | ||||
|  | ||||
|         if (++r.tc >= 10) { | ||||
|             wintitle(f2f(v * 100 / t, 1) + '%, ' + r.eta + 's, ', true); | ||||
|             r.tc = 0; | ||||
|         } | ||||
|  | ||||
|         if (favico.txt) { | ||||
|             if (++r.fc < 10 && r.eta && r.eta > 99) | ||||
|                 return; | ||||
|  | ||||
|             var s = svg(ofs); | ||||
|             if (s == psvg || (r.eta === null && r.fc < 10)) | ||||
|                 return; | ||||
|  | ||||
|             favico.upd('', s); | ||||
|             psvg = s; | ||||
|             r.fc = 0; | ||||
|         } | ||||
|     }; | ||||
| } | ||||
|  | ||||
|  | ||||
| function fsearch_explain(n) { | ||||
|     if (n) | ||||
|         return toast.inf(60, 'your access to this folder is Read-Only\n\n' + (acct == '*' ? 'you are currently not logged in' : 'you are currently logged in as "' + acct + '"')); | ||||
|  | ||||
|     if (bcfg_get('fsearch', false)) | ||||
|         return toast.inf(60, 'you are currently in file-search mode\n\nswitch to upload-mode by clicking the green magnifying glass (next to the big yellow search button), and then refresh\n\nsorry'); | ||||
|         return toast.inf(60, 'you are currently in file-search mode\n\nswitch to upload-mode by clicking the green magnifying glass (next to the big yellow search button), and try uploading again\n\nsorry'); | ||||
|  | ||||
|     return toast.inf(60, 'refresh the page and try again, it should work now'); | ||||
|     return toast.inf(60, 'try again, it should work now'); | ||||
| } | ||||
|  | ||||
|  | ||||
| @@ -512,9 +599,13 @@ function up2k_init(subtle) { | ||||
|         // chrome<37 firefox<34 edge<12 opera<24 safari<7 | ||||
|         shame = 'your browser is impressively ancient'; | ||||
|  | ||||
|     var got_deps = false; | ||||
|     function got_deps() { | ||||
|         return subtle || window.asmCrypto || window.hashwasm; | ||||
|     } | ||||
|  | ||||
|     var loading_deps = false; | ||||
|     function init_deps() { | ||||
|         if (!got_deps && !subtle && !window.asmCrypto) { | ||||
|         if (!loading_deps && !got_deps()) { | ||||
|             var fn = 'sha512.' + sha_js + '.js'; | ||||
|             showmodal('<h1>loading ' + fn + '</h1><h2>since ' + shame + '</h2><h4>thanks chrome</h4>'); | ||||
|             import_js('/.cpr/deps/' + fn, unmodal); | ||||
| @@ -525,7 +616,7 @@ function up2k_init(subtle) { | ||||
|                 ebi('u2foot').innerHTML = 'seems like ' + shame + ' so do that if you want more performance <span style="color:#' + | ||||
|                     (sha_js == 'ac' ? 'c84">(expecting 20' : '8a5">(but dont worry too much, expect 100') + ' MiB/s)</span>'; | ||||
|         } | ||||
|         got_deps = true; | ||||
|         loading_deps = true; | ||||
|     } | ||||
|  | ||||
|     if (perms.length && !has(perms, 'read') && has(perms, 'write')) | ||||
| @@ -578,13 +669,14 @@ function up2k_init(subtle) { | ||||
|  | ||||
|     bcfg_bind(uc, 'multitask', 'multitask', true, null, false); | ||||
|     bcfg_bind(uc, 'ask_up', 'ask_up', true, null, false); | ||||
|     bcfg_bind(uc, 'flag_en', 'flag_en', false, apply_flag_cfg, false); | ||||
|     bcfg_bind(uc, 'flag_en', 'flag_en', false, apply_flag_cfg); | ||||
|     bcfg_bind(uc, 'fsearch', 'fsearch', false, set_fsearch, false); | ||||
|     bcfg_bind(uc, 'turbo', 'u2turbo', false, draw_turbo, false); | ||||
|     bcfg_bind(uc, 'datechk', 'u2tdate', true, null, false); | ||||
|  | ||||
|     var st = { | ||||
|         "files": [], | ||||
|         "seen": {}, | ||||
|         "todo": { | ||||
|             "head": [], | ||||
|             "hash": [], | ||||
| @@ -619,7 +711,8 @@ function up2k_init(subtle) { | ||||
|             }); | ||||
|     } | ||||
|  | ||||
|     var pvis = new U2pvis("bz", '#u2cards'); | ||||
|     var pvis = new U2pvis("bz", '#u2cards'), | ||||
|         donut = new Donut(uc, st); | ||||
|  | ||||
|     var bobslice = null; | ||||
|     if (window.File) | ||||
| @@ -642,7 +735,6 @@ function up2k_init(subtle) { | ||||
|         if (++nenters <= 0) | ||||
|             nenters = 1; | ||||
|  | ||||
|         //console.log(nenters, Date.now(), 'enter', this, e.target); | ||||
|         if (onover.bind(this)(e)) | ||||
|             return true; | ||||
|  | ||||
| @@ -664,12 +756,19 @@ function up2k_init(subtle) { | ||||
|         ebi('up_dz').setAttribute('err', mup || ''); | ||||
|         ebi('srch_dz').setAttribute('err', msr || ''); | ||||
|     } | ||||
|     function onoverb(e) { | ||||
|         // zones are alive; disable cuo2duo branch | ||||
|         document.body.ondragover = document.body.ondrop = null; | ||||
|         return onover.bind(this)(e); | ||||
|     } | ||||
|     function onover(e) { | ||||
|         try { | ||||
|             var ok = false, dt = e.dataTransfer.types; | ||||
|             for (var a = 0; a < dt.length; a++) | ||||
|                 if (dt[a] == 'Files') | ||||
|                     ok = true; | ||||
|                 else if (dt[a] == 'text/uri-list') | ||||
|                     return true; | ||||
|  | ||||
|             if (!ok) | ||||
|                 return true; | ||||
| @@ -695,17 +794,20 @@ function up2k_init(subtle) { | ||||
|             clmod(ebi('drops'), 'vis'); | ||||
|             clmod(ebi('up_dz'), 'hl'); | ||||
|             clmod(ebi('srch_dz'), 'hl'); | ||||
|             // cuo2duo: | ||||
|             document.body.ondragover = onover; | ||||
|             document.body.ondrop = gotfile; | ||||
|         } | ||||
|  | ||||
|         //console.log(nenters, Date.now(), 'leave', this, e && e.target); | ||||
|     } | ||||
|     document.body.ondragenter = ondrag; | ||||
|     document.body.ondragleave = offdrag; | ||||
|     document.body.ondragover = onover; | ||||
|     document.body.ondrop = gotfile; | ||||
|  | ||||
|     var drops = [ebi('up_dz'), ebi('srch_dz')]; | ||||
|     for (var a = 0; a < 2; a++) { | ||||
|         drops[a].ondragenter = ondrag; | ||||
|         drops[a].ondragover = onover; | ||||
|         drops[a].ondragover = onoverb; | ||||
|         drops[a].ondragleave = offdrag; | ||||
|         drops[a].ondrop = gotfile; | ||||
|     } | ||||
| @@ -715,7 +817,10 @@ function up2k_init(subtle) { | ||||
|         ev(e); | ||||
|         nenters = 0; | ||||
|         offdrag.bind(this)(); | ||||
|         var dz = (this && this.getAttribute('id')); | ||||
|         var dz = this && this.getAttribute('id'); | ||||
|         if (!dz && e && e.clientY) | ||||
|             // cuo2duo fallback | ||||
|             dz = e.clientY < window.innerHeight / 2 ? 'up_dz' : 'srch_dz'; | ||||
|  | ||||
|         var err = this.getAttribute('err'); | ||||
|         if (err) | ||||
| @@ -744,11 +849,14 @@ function up2k_init(subtle) { | ||||
|  | ||||
|         more_one_file(); | ||||
|         var bad_files = [], | ||||
|             nil_files = [], | ||||
|             good_files = [], | ||||
|             dirs = []; | ||||
|  | ||||
|         for (var a = 0; a < files.length; a++) { | ||||
|             var fobj = files[a]; | ||||
|             var fobj = files[a], | ||||
|                 dst = good_files; | ||||
|  | ||||
|             if (is_itemlist) { | ||||
|                 if (fobj.kind !== 'file') | ||||
|                     continue; | ||||
| @@ -765,16 +873,15 @@ function up2k_init(subtle) { | ||||
|             } | ||||
|             try { | ||||
|                 if (fobj.size < 1) | ||||
|                     throw 1; | ||||
|                     dst = nil_files; | ||||
|             } | ||||
|             catch (ex) { | ||||
|                 bad_files.push(fobj.name); | ||||
|                 continue; | ||||
|                 dst = bad_files; | ||||
|             } | ||||
|             good_files.push([fobj, fobj.name]); | ||||
|             dst.push([fobj, fobj.name]); | ||||
|         } | ||||
|         if (dirs) { | ||||
|             return read_dirs(null, [], dirs, good_files, bad_files); | ||||
|             return read_dirs(null, [], dirs, good_files, nil_files, bad_files); | ||||
|         } | ||||
|     } | ||||
|  | ||||
| @@ -788,7 +895,7 @@ function up2k_init(subtle) { | ||||
|     } | ||||
|  | ||||
|     var rd_missing_ref = []; | ||||
|     function read_dirs(rd, pf, dirs, good, bad, spins) { | ||||
|     function read_dirs(rd, pf, dirs, good, nil, bad, spins) { | ||||
|         spins = spins || 0; | ||||
|         if (++spins == 5) | ||||
|             rd_missing_ref = rd_flatten(pf, dirs); | ||||
| @@ -809,7 +916,7 @@ function up2k_init(subtle) { | ||||
|                     msg.push('<li>' + esc(missing[a]) + '</li>'); | ||||
|  | ||||
|                 return modal.alert(msg.join('') + '</ul>', function () { | ||||
|                     read_dirs(rd, [], [], good, bad, spins); | ||||
|                     read_dirs(rd, [], [], good, nil, bad, spins); | ||||
|                 }); | ||||
|             } | ||||
|             spins = 0; | ||||
| @@ -817,11 +924,11 @@ function up2k_init(subtle) { | ||||
|  | ||||
|         if (!dirs.length) { | ||||
|             if (!pf.length) | ||||
|                 return gotallfiles(good, bad); | ||||
|                 return gotallfiles(good, nil, bad); | ||||
|  | ||||
|             console.log("retry pf, " + pf.length); | ||||
|             setTimeout(function () { | ||||
|                 read_dirs(rd, pf, dirs, good, bad, spins); | ||||
|                 read_dirs(rd, pf, dirs, good, nil, bad, spins); | ||||
|             }, 50); | ||||
|             return; | ||||
|         } | ||||
| @@ -843,14 +950,15 @@ function up2k_init(subtle) { | ||||
|                     pf.push(name); | ||||
|                     dn.file(function (fobj) { | ||||
|                         apop(pf, name); | ||||
|                         var dst = good; | ||||
|                         try { | ||||
|                             if (fobj.size > 0) { | ||||
|                                 good.push([fobj, name]); | ||||
|                                 return; | ||||
|                             } | ||||
|                             if (fobj.size < 1) | ||||
|                                 dst = nil; | ||||
|                         } | ||||
|                         catch (ex) { } | ||||
|                         bad.push(name); | ||||
|                         catch (ex) { | ||||
|                             dst = bad; | ||||
|                         } | ||||
|                         dst.push([fobj, name]); | ||||
|                     }); | ||||
|                 } | ||||
|                 ngot += 1; | ||||
| @@ -859,23 +967,33 @@ function up2k_init(subtle) { | ||||
|                 dirs.shift(); | ||||
|                 rd = null; | ||||
|             } | ||||
|             return read_dirs(rd, pf, dirs, good, bad, spins); | ||||
|             return read_dirs(rd, pf, dirs, good, nil, bad, spins); | ||||
|         }); | ||||
|     } | ||||
|  | ||||
|     function gotallfiles(good_files, bad_files) { | ||||
|     function gotallfiles(good_files, nil_files, bad_files) { | ||||
|         var ntot = good_files.concat(nil_files, bad_files).length; | ||||
|         if (bad_files.length) { | ||||
|             var ntot = bad_files.length + good_files.length, | ||||
|                 msg = 'These {0} files (of {1} total) were skipped because they are empty:\n'.format(bad_files.length, ntot); | ||||
|  | ||||
|             var msg = 'These {0} files (of {1} total) were skipped, possibly due to filesystem permissions:\n'.format(bad_files.length, ntot); | ||||
|             for (var a = 0, aa = Math.min(20, bad_files.length); a < aa; a++) | ||||
|                 msg += '-- ' + bad_files[a] + '\n'; | ||||
|  | ||||
|             if (good_files.length - bad_files.length <= 1 && ANDROID) | ||||
|                 msg += '\nFirefox-Android has a bug which prevents selecting multiple files. Try selecting one file at a time. For more info, see firefox bug 1456557'; | ||||
|                 msg += '-- ' + bad_files[a][1] + '\n'; | ||||
|  | ||||
|             msg += '\nMaybe it works better if you select just one file'; | ||||
|             return modal.alert(msg, function () { | ||||
|                 gotallfiles(good_files, []); | ||||
|                 gotallfiles(good_files, nil_files, []); | ||||
|             }); | ||||
|         } | ||||
|  | ||||
|         if (nil_files.length) { | ||||
|             var msg = 'These {0} files (of {1} total) are blank/empty; upload them anyways?\n'.format(nil_files.length, ntot); | ||||
|             for (var a = 0, aa = Math.min(20, nil_files.length); a < aa; a++) | ||||
|                 msg += '-- ' + nil_files[a][1] + '\n'; | ||||
|  | ||||
|             msg += '\nMaybe it works better if you select just one file'; | ||||
|             return modal.confirm(msg, function () { | ||||
|                 gotallfiles(good_files.concat(nil_files), [], []); | ||||
|             }, function () { | ||||
|                 gotallfiles(good_files, [], []); | ||||
|             }); | ||||
|         } | ||||
|  | ||||
| @@ -896,13 +1014,9 @@ function up2k_init(subtle) { | ||||
|     } | ||||
|  | ||||
|     function up_them(good_files) { | ||||
|         var seen = {}, | ||||
|             evpath = get_evpath(), | ||||
|         var evpath = get_evpath(), | ||||
|             draw_each = good_files.length < 50; | ||||
|  | ||||
|         for (var a = 0; a < st.files.length; a++) | ||||
|             seen[st.files[a].name + '\n' + st.files[a].size] = 1; | ||||
|  | ||||
|         for (var a = 0; a < good_files.length; a++) { | ||||
|             var fobj = good_files[a][0], | ||||
|                 name = good_files[a][1], | ||||
| @@ -921,32 +1035,40 @@ function up2k_init(subtle) { | ||||
|                 "t0": now, | ||||
|                 "fobj": fobj, | ||||
|                 "name": name, | ||||
|                 "size": fobj.size, | ||||
|                 "size": fobj.size || 0, | ||||
|                 "lmod": lmod / 1000, | ||||
|                 "purl": fdir, | ||||
|                 "done": false, | ||||
|                 "bytes_uploaded": 0, | ||||
|                 "hash": [] | ||||
|             }, | ||||
|                 key = entry.name + '\n' + entry.size; | ||||
|                 key = name + '\n' + entry.size + '\n' + lmod + '\n' + uc.fsearch; | ||||
|  | ||||
|             if (uc.fsearch) | ||||
|                 entry.srch = 1; | ||||
|  | ||||
|             if (seen[key]) | ||||
|                 continue; | ||||
|             try { | ||||
|                 if (st.seen[fdir][key]) | ||||
|                     continue; | ||||
|             } | ||||
|             catch (ex) { | ||||
|                 st.seen[fdir] = {}; | ||||
|             } | ||||
|  | ||||
|             seen[key] = 1; | ||||
|             st.seen[fdir][key] = 1; | ||||
|  | ||||
|             pvis.addfile([ | ||||
|                 uc.fsearch ? esc(entry.name) : linksplit( | ||||
|                     uricom_dec(entry.purl)[0] + entry.name).join(' '), | ||||
|                     entry.purl + uricom_enc(entry.name)).join(' '), | ||||
|                 '📐 hash', | ||||
|                 '' | ||||
|             ], fobj.size, draw_each); | ||||
|  | ||||
|             st.bytes.total += fobj.size; | ||||
|             st.files.push(entry); | ||||
|             if (uc.turbo) | ||||
|             if (!entry.size) | ||||
|                 push_t(st.todo.handshake, entry); | ||||
|             else if (uc.turbo) | ||||
|                 push_t(st.todo.head, entry); | ||||
|             else | ||||
|                 push_t(st.todo.hash, entry); | ||||
| @@ -966,23 +1088,7 @@ function up2k_init(subtle) { | ||||
|     } | ||||
|     more_one_file(); | ||||
|  | ||||
|     function u2cleanup(e) { | ||||
|         ev(e); | ||||
|         for (var a = 0; a < st.files.length; a++) { | ||||
|             var t = st.files[a]; | ||||
|             if (t.done && t.name) { | ||||
|                 var tr = ebi('f' + t.n); | ||||
|                 if (!tr) | ||||
|                     continue; | ||||
|  | ||||
|                 tr.parentNode.removeChild(tr); | ||||
|                 t.name = undefined; | ||||
|             } | ||||
|         } | ||||
|     } | ||||
|     ebi('u2cleanup').onclick = u2cleanup; | ||||
|  | ||||
|     var etaref = 0, etaskip = 0, op_minh = 0; | ||||
|     var etaref = 0, etaskip = 0, utw_minh = 0; | ||||
|     function etafun() { | ||||
|         var nhash = st.busy.head.length + st.busy.hash.length + st.todo.head.length + st.todo.hash.length, | ||||
|             nsend = st.busy.upload.length + st.todo.upload.length, | ||||
| @@ -995,13 +1101,10 @@ function up2k_init(subtle) { | ||||
|  | ||||
|         //ebi('acc_info').innerHTML = humantime(st.time.busy) + ' ' + f2f(now / 1000, 1); | ||||
|  | ||||
|         var op = ebi('op_up2k'), | ||||
|             uff = ebi('u2footfoot'), | ||||
|             minh = QS('#op_up2k.act') ? Math.max(op_minh, uff.offsetTop + uff.offsetHeight - op.offsetTop + 32) : 0; | ||||
|  | ||||
|         if (minh > op_minh || !op_minh) { | ||||
|             op_minh = minh; | ||||
|             op.style.minHeight = op_minh + 'px'; | ||||
|         var minh = QS('#op_up2k.act') && st.is_busy ? Math.max(utw_minh, ebi('u2tab').offsetHeight + 32) : 0; | ||||
|         if (utw_minh < minh || !utw_minh) { | ||||
|             utw_minh = minh; | ||||
|             ebi('u2tabw').style.minHeight = utw_minh + 'px'; | ||||
|         } | ||||
|  | ||||
|         if (!nhash) | ||||
| @@ -1047,6 +1150,7 @@ function up2k_init(subtle) { | ||||
|                 continue; | ||||
|             } | ||||
|  | ||||
|             donut.eta = eta; | ||||
|             if (etaskip) | ||||
|                 continue; | ||||
|  | ||||
| @@ -1081,11 +1185,6 @@ function up2k_init(subtle) { | ||||
|             st.busy.handshake.length) | ||||
|             return false; | ||||
|  | ||||
|         if (st.busy.handshake.length) | ||||
|             for (var n = t.n - 1; n >= t.n - parallel_uploads && n >= 0; n--) | ||||
|                 if (st.files[n].t_uploading) | ||||
|                     return false; | ||||
|  | ||||
|         if ((uc.multitask ? 1 : 0) < | ||||
|             st.todo.upload.length + | ||||
|             st.busy.upload.length) | ||||
| @@ -1122,28 +1221,48 @@ function up2k_init(subtle) { | ||||
|             if (running) | ||||
|                 return; | ||||
|  | ||||
|             if (crashed) | ||||
|             if (crashed || !got_deps()) | ||||
|                 return defer(); | ||||
|  | ||||
|             running = true; | ||||
|             while (true) { | ||||
|                 var now = Date.now(), | ||||
|                     is_busy = 0 != | ||||
|                         st.todo.head.length + | ||||
|                         st.todo.hash.length + | ||||
|                         st.todo.handshake.length + | ||||
|                         st.todo.upload.length + | ||||
|                         st.busy.head.length + | ||||
|                         st.busy.hash.length + | ||||
|                         st.busy.handshake.length + | ||||
|                         st.busy.upload.length; | ||||
|                     oldest_active = Math.min(  // gzip take the wheel | ||||
|                         st.todo.head.length ? st.todo.head[0].n : st.files.length, | ||||
|                         st.todo.hash.length ? st.todo.hash[0].n : st.files.length, | ||||
|                         st.todo.upload.length ? st.todo.upload[0].nfile : st.files.length, | ||||
|                         st.todo.handshake.length ? st.todo.handshake[0].n : st.files.length, | ||||
|                         st.busy.head.length ? st.busy.head[0].n : st.files.length, | ||||
|                         st.busy.hash.length ? st.busy.hash[0].n : st.files.length, | ||||
|                         st.busy.upload.length ? st.busy.upload[0].nfile : st.files.length, | ||||
|                         st.busy.handshake.length ? st.busy.handshake[0].n : st.files.length), | ||||
|                     is_busy = oldest_active < st.files.length; | ||||
|  | ||||
|                 if (was_busy && !is_busy) { | ||||
|                     for (var a = 0; a < st.files.length; a++) { | ||||
|                         var t = st.files[a]; | ||||
|                         if (t.want_recheck) { | ||||
|                             t.rechecks++; | ||||
|                             t.want_recheck = false; | ||||
|                             push_t(st.todo.handshake, t); | ||||
|                         } | ||||
|                     } | ||||
|                     is_busy = st.todo.handshake.length; | ||||
|                     try { | ||||
|                         if (!is_busy && !uc.fsearch && !msel.getsel().length && (!mp.au || mp.au.paused)) | ||||
|                             treectl.goto(get_evpath()); | ||||
|                     } | ||||
|                     catch (ex) { } | ||||
|                 } | ||||
|  | ||||
|                 if (was_busy != is_busy) { | ||||
|                     was_busy = is_busy; | ||||
|                     st.is_busy = was_busy = is_busy; | ||||
|  | ||||
|                     window[(is_busy ? "add" : "remove") + | ||||
|                         "EventListener"]("beforeunload", warn_uploader_busy); | ||||
|  | ||||
|                     donut.on(is_busy); | ||||
|  | ||||
|                     if (!is_busy) { | ||||
|                         var k = uc.fsearch ? 'searches' : 'uploads', | ||||
|                             ks = uc.fsearch ? 'Search' : 'Upload', | ||||
| @@ -1165,13 +1284,17 @@ function up2k_init(subtle) { | ||||
|                             toast.err(t, '{0} {1}'.format(ks, tng)); | ||||
|  | ||||
|                         timer.rm(etafun); | ||||
|                         op_minh = 0; | ||||
|                         timer.rm(donut.do); | ||||
|                         utw_minh = 0; | ||||
|                     } | ||||
|                     else { | ||||
|                         timer.add(donut.do); | ||||
|                         timer.add(etafun, false); | ||||
|                         ebi('u2etas').style.textAlign = 'left'; | ||||
|                     } | ||||
|                     etafun(); | ||||
|                     if (pvis.act == 'bz') | ||||
|                         pvis.changecard('bz'); | ||||
|                 } | ||||
|  | ||||
|                 if (flag) { | ||||
| @@ -1214,7 +1337,8 @@ function up2k_init(subtle) { | ||||
|                 } | ||||
|  | ||||
|                 if (st.todo.head.length && | ||||
|                     st.busy.head.length < parallel_uploads) { | ||||
|                     st.busy.head.length < parallel_uploads && | ||||
|                     (!is_busy || st.todo.head[0].n - oldest_active < parallel_uploads * 2)) { | ||||
|                     exec_head(); | ||||
|                     mou_ikkai = true; | ||||
|                 } | ||||
| @@ -1313,7 +1437,6 @@ function up2k_init(subtle) { | ||||
|     function exec_hash() { | ||||
|         var t = st.todo.hash.shift(); | ||||
|         st.busy.hash.push(t); | ||||
|         t.bytes_uploaded = 0; | ||||
|  | ||||
|         var bpend = 0, | ||||
|             nchunk = 0, | ||||
| @@ -1362,7 +1485,8 @@ function up2k_init(subtle) { | ||||
|                     err.indexOf('NotFoundError') !== -1  // macos-firefox permissions | ||||
|                 ) { | ||||
|                     pvis.seth(t.n, 1, 'OS-error'); | ||||
|                     pvis.seth(t.n, 2, err); | ||||
|                     pvis.seth(t.n, 2, err + ' @ ' + car); | ||||
|                     console.log('OS-error', reader.error, '@', car); | ||||
|                     handled = true; | ||||
|                 } | ||||
|  | ||||
| @@ -1370,7 +1494,7 @@ function up2k_init(subtle) { | ||||
|                     pvis.move(t.n, 'ng'); | ||||
|                     apop(st.busy.hash, t); | ||||
|                     st.bytes.finished += t.size; | ||||
|                     return tasker(); | ||||
|                     return; | ||||
|                 } | ||||
|  | ||||
|                 toast.err(0, 'y o u   b r o k e    i t\nfile: ' + esc(t.name + '') + '\nerror: ' + err); | ||||
| @@ -1446,7 +1570,6 @@ function up2k_init(subtle) { | ||||
|             console.log('head onerror, retrying', t); | ||||
|             apop(st.busy.head, t); | ||||
|             st.todo.head.unshift(t); | ||||
|             tasker(); | ||||
|         }; | ||||
|         function orz(e) { | ||||
|             var ok = false; | ||||
| @@ -1468,6 +1591,7 @@ function up2k_init(subtle) { | ||||
|             } | ||||
|  | ||||
|             t.done = true; | ||||
|             t.fobj = null; | ||||
|             st.bytes.hashed += t.size; | ||||
|             st.bytes.finished += t.size; | ||||
|             pvis.move(t.n, 'bz'); | ||||
| @@ -1511,7 +1635,6 @@ function up2k_init(subtle) { | ||||
|             apop(st.busy.handshake, t); | ||||
|             st.todo.handshake.unshift(t); | ||||
|             t.keepalive = keepalive; | ||||
|             tasker(); | ||||
|         }; | ||||
|         function orz(e) { | ||||
|             if (t.t_busied != me) { | ||||
| @@ -1537,15 +1660,18 @@ function up2k_init(subtle) { | ||||
|                     } | ||||
|                     else { | ||||
|                         smsg = 'found'; | ||||
|                         var hit = response.hits[0], | ||||
|                             msg = linksplit(hit.rp).join(''), | ||||
|                             tr = unix2iso(hit.ts), | ||||
|                             tu = unix2iso(t.lmod), | ||||
|                             diff = parseInt(t.lmod) - parseInt(hit.ts), | ||||
|                             cdiff = (Math.abs(diff) <= 2) ? '3c0' : 'f0b', | ||||
|                             sdiff = '<span style="color:#' + cdiff + '">diff ' + diff; | ||||
|                         var msg = []; | ||||
|                         for (var a = 0, aa = Math.min(20, response.hits.length); a < aa; a++) { | ||||
|                             var hit = response.hits[a], | ||||
|                                 tr = unix2iso(hit.ts), | ||||
|                                 tu = unix2iso(t.lmod), | ||||
|                                 diff = parseInt(t.lmod) - parseInt(hit.ts), | ||||
|                                 cdiff = (Math.abs(diff) <= 2) ? '3c0' : 'f0b', | ||||
|                                 sdiff = '<span style="color:#' + cdiff + '">diff ' + diff; | ||||
|  | ||||
|                         msg += '<br /><small>' + tr + ' (srv), ' + tu + ' (You), ' + sdiff + '</span></span>'; | ||||
|                             msg.push(linksplit(hit.rp).join('') + '<br /><small>' + tr + ' (srv), ' + tu + ' (You), ' + sdiff + '</small></span>'); | ||||
|                         } | ||||
|                         msg = msg.join('<br />\n'); | ||||
|                     } | ||||
|                     pvis.seth(t.n, 2, msg); | ||||
|                     pvis.seth(t.n, 1, smsg); | ||||
| @@ -1553,6 +1679,7 @@ function up2k_init(subtle) { | ||||
|                     apop(st.busy.handshake, t); | ||||
|                     st.bytes.finished += t.size; | ||||
|                     t.done = true; | ||||
|                     t.fobj = null; | ||||
|                     tasker(); | ||||
|                     return; | ||||
|                 } | ||||
| @@ -1563,7 +1690,7 @@ function up2k_init(subtle) { | ||||
|                     console.log("server-rename [" + t.purl + "] [" + t.name + "] to [" + rsp_purl + "] [" + response.name + "]"); | ||||
|                     t.purl = rsp_purl; | ||||
|                     t.name = response.name; | ||||
|                     pvis.seth(t.n, 0, linksplit(uricom_dec(t.purl)[0] + t.name).join(' ')); | ||||
|                     pvis.seth(t.n, 0, linksplit(t.purl + uricom_enc(t.name)).join(' ')); | ||||
|                 } | ||||
|  | ||||
|                 var chunksize = get_chunksize(t.size), | ||||
| @@ -1619,6 +1746,7 @@ function up2k_init(subtle) { | ||||
|  | ||||
|                 if (done) { | ||||
|                     t.done = true; | ||||
|                     t.fobj = null; | ||||
|                     st.bytes.finished += t.size - t.bytes_uploaded; | ||||
|                     var spd1 = (t.size / ((t.t_hashed - t.t_hashing) / 1000.)) / (1024 * 1024.), | ||||
|                         spd2 = (t.size / ((t.t_uploaded - t.t_uploading) / 1000.)) / (1024 * 1024.); | ||||
| @@ -1653,13 +1781,19 @@ function up2k_init(subtle) { | ||||
|                 } | ||||
|  | ||||
|                 st.bytes.finished += t.size; | ||||
|                 if (rsp.indexOf('partial upload exists') !== -1 || | ||||
|                     rsp.indexOf('file already exists') !== -1) { | ||||
|                 var err_pend = rsp.indexOf('partial upload exists') + 1, | ||||
|                     err_dupe = rsp.indexOf('file already exists') + 1; | ||||
|  | ||||
|                 if (err_pend || err_dupe) { | ||||
|                     err = rsp; | ||||
|                     ofs = err.indexOf('\n/'); | ||||
|                     if (ofs !== -1) { | ||||
|                         err = err.slice(0, ofs + 1) + linksplit(err.slice(ofs + 2).trimEnd()).join(' '); | ||||
|                     } | ||||
|                     if (!t.rechecks && err_pend) { | ||||
|                         t.rechecks = 0; | ||||
|                         t.want_recheck = true; | ||||
|                     } | ||||
|                 } | ||||
|                 if (err != "") { | ||||
|                     pvis.seth(t.n, 1, "ERROR"); | ||||
| @@ -1705,7 +1839,8 @@ function up2k_init(subtle) { | ||||
|         st.busy.upload.push(upt); | ||||
|  | ||||
|         var npart = upt.npart, | ||||
|             t = st.files[upt.nfile]; | ||||
|             t = st.files[upt.nfile], | ||||
|             tries = 0; | ||||
|  | ||||
|         if (!t.t_uploading) | ||||
|             t.t_uploading = Date.now(); | ||||
| @@ -1727,7 +1862,8 @@ function up2k_init(subtle) { | ||||
|                 st.bytes.uploaded += cdr - car; | ||||
|                 t.bytes_uploaded += cdr - car; | ||||
|             } | ||||
|             else if (txt.indexOf('already got that') !== -1) { | ||||
|             else if (txt.indexOf('already got that') + 1 || | ||||
|                 txt.indexOf('already being written') + 1) { | ||||
|                 console.log("ignoring dupe-segment error", t); | ||||
|             } | ||||
|             else { | ||||
| @@ -1735,6 +1871,9 @@ function up2k_init(subtle) { | ||||
|                     xhr.status, t.name) + (txt || "no further information")); | ||||
|                 return; | ||||
|             } | ||||
|             orz2(xhr); | ||||
|         } | ||||
|         function orz2(xhr) { | ||||
|             apop(st.busy.upload, upt); | ||||
|             apop(t.postlist, npart); | ||||
|             if (!t.postlist.length) { | ||||
| @@ -1756,8 +1895,11 @@ function up2k_init(subtle) { | ||||
|                 if (crashed) | ||||
|                     return; | ||||
|  | ||||
|                 console.log('chunkpit onerror, retrying', t); | ||||
|                 do_send(); | ||||
|                 if (!toast.visible) | ||||
|                     toast.warn(9.98, "failed to upload a chunk;\nprobably harmless, continuing\n\n" + t.name); | ||||
|  | ||||
|                 console.log('chunkpit onerror,', ++tries, t); | ||||
|                 orz2(xhr); | ||||
|             }; | ||||
|             xhr.open('POST', t.purl, true); | ||||
|             xhr.setRequestHeader("X-Up2k-Hash", t.hash[npart]); | ||||
| @@ -1782,16 +1924,28 @@ function up2k_init(subtle) { | ||||
|             wpx = window.innerWidth, | ||||
|             fpx = parseInt(getComputedStyle(bar)['font-size']), | ||||
|             wem = wpx * 1.0 / fpx, | ||||
|             wide = wem > 54, | ||||
|             parent = ebi(wide && has(perms, 'write') ? 'u2btn_cw' : 'u2btn_ct'), | ||||
|             write = has(perms, 'write'), | ||||
|             wide = write && wem > 54 ? 'w' : '', | ||||
|             parent = ebi(wide && write ? 'u2btn_cw' : 'u2btn_ct'), | ||||
|             btn = ebi('u2btn'); | ||||
|  | ||||
|         //console.log([wpx, fpx, wem]); | ||||
|         if (btn.parentNode !== parent) { | ||||
|             parent.appendChild(btn); | ||||
|             ebi('u2conf').setAttribute('class', wide ? 'has_btn' : ''); | ||||
|             ebi('u2cards').setAttribute('class', wide ? 'w' : ''); | ||||
|             ebi('u2etaw').setAttribute('class', wide ? 'w' : ''); | ||||
|             ebi('u2conf').setAttribute('class', wide); | ||||
|             ebi('u2cards').setAttribute('class', wide); | ||||
|             ebi('u2etaw').setAttribute('class', wide); | ||||
|         } | ||||
|  | ||||
|         wide = write && wem > 78 ? 'ww' : wide; | ||||
|         parent = ebi(wide == 'ww' && write ? 'u2c3w' : 'u2c3t'); | ||||
|         var its = [ebi('u2etaw'), ebi('u2cards')]; | ||||
|         if (its[0].parentNode !== parent) { | ||||
|             ebi('u2conf').setAttribute('class', wide); | ||||
|             for (var a = 0; a < 2; a++) { | ||||
|                 parent.appendChild(its[a]); | ||||
|                 its[a].setAttribute('class', wide); | ||||
|             } | ||||
|         } | ||||
|     } | ||||
|     window.addEventListener('resize', onresize); | ||||
| @@ -1804,7 +1958,7 @@ function up2k_init(subtle) { | ||||
|         setTimeout(onresize, 500); | ||||
|     } | ||||
|  | ||||
|     var o = QSA('#u2conf *[tt]'); | ||||
|     var o = QSA('#u2conf .c *[tt]'); | ||||
|     for (var a = o.length - 1; a >= 0; a--) { | ||||
|         o[a].parentNode.getElementsByTagName('input')[0].setAttribute('tt', o[a].getAttribute('tt')); | ||||
|     } | ||||
| @@ -1885,7 +2039,7 @@ function up2k_init(subtle) { | ||||
|                 new_state = true; | ||||
|                 fixed = true; | ||||
|             } | ||||
|             if (!has(perms, 'read')) { | ||||
|             if (!has(perms, 'read') || !have_up2k_idx) { | ||||
|                 new_state = false; | ||||
|                 fixed = true; | ||||
|             } | ||||
| @@ -1920,8 +2074,8 @@ function up2k_init(subtle) { | ||||
|                 flag = up2k_flagbus(); | ||||
|             } | ||||
|             catch (ex) { | ||||
|                 toast.err(5, "not supported on your browser:\n" + ex); | ||||
|                 tgl_flag_en(); | ||||
|                 toast.err(5, "not supported on your browser:\n" + esc(basenames(ex))); | ||||
|                 bcfg_set('flag_en', false); | ||||
|             } | ||||
|         } | ||||
|         else if (!uc.flag_en && flag) { | ||||
| @@ -1960,7 +2114,7 @@ function up2k_init(subtle) { | ||||
|     if (parallel_uploads < 1) | ||||
|         bumpthread(1); | ||||
|  | ||||
|     return { "init_deps": init_deps, "set_fsearch": set_fsearch, "ui": pvis } | ||||
|     return { "init_deps": init_deps, "set_fsearch": set_fsearch, "ui": pvis, "st": st, "uc": uc } | ||||
| } | ||||
|  | ||||
|  | ||||
| @@ -1972,6 +2126,15 @@ function warn_uploader_busy(e) { | ||||
|  | ||||
|  | ||||
| tt.init(); | ||||
| favico.init(); | ||||
| ebi('ico1').onclick = function () { | ||||
|     var a = favico.txt == this.textContent; | ||||
|     swrite('icot', a ? 'c' : this.textContent); | ||||
|     swrite('icof', a ? null : '000'); | ||||
|     swrite('icob', a ? null : ''); | ||||
|     favico.init(); | ||||
| }; | ||||
|  | ||||
|  | ||||
| if (QS('#op_up2k.act')) | ||||
|     goto_up2k(); | ||||
|   | ||||
| @@ -7,8 +7,7 @@ if (!window['console']) | ||||
|  | ||||
|  | ||||
| var is_touch = 'ontouchstart' in window, | ||||
|     IPHONE = /iPhone|iPad|iPod/i.test(navigator.userAgent), | ||||
|     ANDROID = /android/i.test(navigator.userAgent), | ||||
|     IPHONE = is_touch && /iPhone|iPad|iPod/i.test(navigator.userAgent), | ||||
|     WINDOWS = navigator.platform ? navigator.platform == 'Win32' : /Windows/.test(navigator.userAgent); | ||||
|  | ||||
|  | ||||
| @@ -18,6 +17,15 @@ var ebi = document.getElementById.bind(document), | ||||
|     mknod = document.createElement.bind(document); | ||||
|  | ||||
|  | ||||
| function qsr(sel) { | ||||
|     var el = QS(sel); | ||||
|     if (el) | ||||
|         el.parentNode.removeChild(el); | ||||
|  | ||||
|     return el; | ||||
| } | ||||
|  | ||||
|  | ||||
| // error handler for mobile devices | ||||
| function esc(txt) { | ||||
|     return txt.replace(/[&"<>]/g, function (c) { | ||||
| @@ -29,18 +37,24 @@ function esc(txt) { | ||||
|         }[c]; | ||||
|     }); | ||||
| } | ||||
| window.onunhandledrejection = function (e) { | ||||
|     var err = e.reason; | ||||
|     try { | ||||
|         err += '\n' + e.reason.stack; | ||||
|     } | ||||
|     catch (e) { } | ||||
|     console.log("REJ: " + err); | ||||
|     try { | ||||
|         toast.warn(30, err); | ||||
|     } | ||||
|     catch (e) { } | ||||
| }; | ||||
| function basenames(txt) { | ||||
|     return (txt + '').replace(/https?:\/\/[^ \/]+\//g, '/').replace(/js\?_=[a-zA-Z]{4}/g, 'js'); | ||||
| } | ||||
| if ((document.location + '').indexOf(',rej,') + 1) | ||||
|     window.onunhandledrejection = function (e) { | ||||
|         var err = e.reason; | ||||
|         try { | ||||
|             err += '\n' + e.reason.stack; | ||||
|         } | ||||
|         catch (e) { } | ||||
|         err = basenames(err); | ||||
|         console.log("REJ: " + err); | ||||
|         try { | ||||
|             toast.warn(30, err); | ||||
|         } | ||||
|         catch (e) { } | ||||
|     }; | ||||
|  | ||||
| try { | ||||
|     console.hist = []; | ||||
|     var hook = function (t) { | ||||
| @@ -65,7 +79,7 @@ try { | ||||
| catch (ex) { | ||||
|     if (console.stdlog) | ||||
|         console.log = console.stdlog; | ||||
|     console.log(ex); | ||||
|     console.log('console capture failed', ex); | ||||
| } | ||||
| var crashed = false, ignexd = {}; | ||||
| function vis_exh(msg, url, lineNo, columnNo, error) { | ||||
| @@ -140,7 +154,7 @@ function vis_exh(msg, url, lineNo, columnNo, error) { | ||||
|  | ||||
|             var s = mknod('style'); | ||||
|             s.innerHTML = ( | ||||
|                 '#exbox{background:#333;color:#ddd;font-family:sans-serif;font-size:0.8em;padding:0 1em 1em 1em;z-index:80386;position:fixed;top:0;left:0;right:0;bottom:0;width:100%;height:100%;overflow:auto;width:calc(100% - 2em)} ' + | ||||
|                 '#exbox{background:#222;color:#ddd;font-family:sans-serif;font-size:0.8em;padding:0 1em 1em 1em;z-index:80386;position:fixed;top:0;left:0;right:0;bottom:0;width:100%;height:100%;overflow:auto;width:calc(100% - 2em)} ' + | ||||
|                 '#exbox,#exbox *{line-height:1.5em;overflow-wrap:break-word} ' + | ||||
|                 '#exbox code{color:#bf7;background:#222;padding:.1em;margin:.2em;font-size:1.1em;font-family:monospace,monospace} ' + | ||||
|                 '#exbox a{text-decoration:underline;color:#fc0} ' + | ||||
| @@ -151,13 +165,12 @@ function vis_exh(msg, url, lineNo, columnNo, error) { | ||||
|             ); | ||||
|             document.head.appendChild(s); | ||||
|         } | ||||
|         exbox.innerHTML = html.join('\n').replace(/https?:\/\/[^ \/]+\//g, '/').replace(/js\?_=[a-zA-Z]{4}/g, 'js').replace(/<ghi>/, 'https://github.com/9001/copyparty/issues/new?labels=bug&template=bug_report.md'); | ||||
|         exbox.innerHTML = basenames(html.join('\n')).replace(/<ghi>/, 'https://github.com/9001/copyparty/issues/new?labels=bug&template=bug_report.md'); | ||||
|         exbox.style.display = 'block'; | ||||
|     } | ||||
|     catch (e) { | ||||
|         document.body.innerHTML = html.join('\n'); | ||||
|     } | ||||
|     throw 'fatal_err'; | ||||
| } | ||||
| function ignex(all) { | ||||
|     var o = ebi('exbox'); | ||||
| @@ -167,6 +180,7 @@ function ignex(all) { | ||||
|     if (!all) | ||||
|         window.onerror = vis_exh; | ||||
| } | ||||
| window.onerror = vis_exh; | ||||
|  | ||||
|  | ||||
| function noop() { } | ||||
| @@ -241,7 +255,9 @@ function import_js(url, cb) { | ||||
|     script.src = url; | ||||
|     script.onload = cb; | ||||
|     script.onerror = function () { | ||||
|         toast.err(0, 'Failed to load module:\n' + url); | ||||
|         var m = 'Failed to load module:\n' + url; | ||||
|         console.log(m); | ||||
|         toast.err(0, m); | ||||
|     }; | ||||
|     head.appendChild(script); | ||||
| } | ||||
| @@ -270,15 +286,19 @@ function crc32(str) { | ||||
|  | ||||
|  | ||||
| function clmod(el, cls, add) { | ||||
|     if (!el) | ||||
|         return false; | ||||
|  | ||||
|     if (el.classList) { | ||||
|         var have = el.classList.contains(cls); | ||||
|         if (add == 't') | ||||
|             add = !have; | ||||
|  | ||||
|         if (add != have) | ||||
|             el.classList[add ? 'add' : 'remove'](cls); | ||||
|         if (!add == !have) | ||||
|             return false; | ||||
|  | ||||
|         return; | ||||
|         el.classList[add ? 'add' : 'remove'](cls); | ||||
|         return true; | ||||
|     } | ||||
|  | ||||
|     var re = new RegExp('\\s*\\b' + cls + '\\s*\\b', 'g'), | ||||
| @@ -289,12 +309,18 @@ function clmod(el, cls, add) { | ||||
|  | ||||
|     var n2 = n1.replace(re, ' ') + (add ? ' ' + cls : ''); | ||||
|  | ||||
|     if (n1 != n2) | ||||
|         el.className = n2; | ||||
|     if (!n1 == !n2) | ||||
|         return false; | ||||
|  | ||||
|     el.className = n2; | ||||
|     return true; | ||||
| } | ||||
|  | ||||
|  | ||||
| function clgot(el, cls) { | ||||
|     if (!el) | ||||
|         return; | ||||
|  | ||||
|     if (el.classList) | ||||
|         return el.classList.contains(cls); | ||||
|  | ||||
| @@ -303,14 +329,45 @@ function clgot(el, cls) { | ||||
| } | ||||
|  | ||||
|  | ||||
| function showsort(tab) { | ||||
|     var v, vn, v1, v2, th = tab.tHead, | ||||
|         sopts = jread('fsort', [["href", 1, ""]]); | ||||
|  | ||||
|     th && (th = th.rows[0]) && (th = th.cells); | ||||
|  | ||||
|     for (var a = sopts.length - 1; a >= 0; a--) { | ||||
|         if (!sopts[a][0]) | ||||
|             continue; | ||||
|  | ||||
|         v2 = v1; | ||||
|         v1 = sopts[a]; | ||||
|     } | ||||
|  | ||||
|     v = [v1, v2]; | ||||
|     vn = [v1 ? v1[0] : '', v2 ? v2[0] : '']; | ||||
|  | ||||
|     var ga = QSA('#ghead a[s]'); | ||||
|     for (var a = 0; a < ga.length; a++) | ||||
|         ga[a].className = ''; | ||||
|  | ||||
|     for (var a = 0; a < th.length; a++) { | ||||
|         var n = vn.indexOf(th[a].getAttribute('name')), | ||||
|             cl = n < 0 ? ' ' : ' s' + n + (v[n][1] > 0 ? ' ' : 'r '); | ||||
|  | ||||
|         th[a].className = th[a].className.replace(/ *s[01]r? */, ' ') + cl; | ||||
|         if (n + 1) { | ||||
|             ga = QS('#ghead a[s="' + vn[n] + '"]'); | ||||
|             if (ga) | ||||
|                 ga.className = cl; | ||||
|         } | ||||
|     } | ||||
| } | ||||
| function sortTable(table, col, cb) { | ||||
|     var tb = table.tBodies[0], | ||||
|         th = table.tHead.rows[0].cells, | ||||
|         tr = Array.prototype.slice.call(tb.rows, 0), | ||||
|         i, reverse = th[col].className.indexOf('sort1') !== -1 ? -1 : 1; | ||||
|     for (var a = 0, thl = th.length; a < thl; a++) | ||||
|         th[a].className = th[a].className.replace(/ *sort-?1 */, " "); | ||||
|     th[col].className += ' sort' + reverse; | ||||
|         i, reverse = /s0[^r]/.exec(th[col].className + ' ') ? -1 : 1; | ||||
|  | ||||
|     var stype = th[col].getAttribute('sort'); | ||||
|     try { | ||||
|         var nrules = [], rules = jread("fsort", []); | ||||
| @@ -328,6 +385,7 @@ function sortTable(table, col, cb) { | ||||
|                 break; | ||||
|         } | ||||
|         jwrite("fsort", nrules); | ||||
|         try { showsort(table); } catch (ex) { } | ||||
|     } | ||||
|     catch (ex) { | ||||
|         console.log("failed to persist sort rules, resetting: " + ex); | ||||
| @@ -376,7 +434,7 @@ function makeSortable(table, cb) { | ||||
| } | ||||
|  | ||||
|  | ||||
| function linksplit(rp) { | ||||
| function linksplit(rp, id) { | ||||
|     var ret = [], | ||||
|         apath = '/', | ||||
|         q = null; | ||||
| @@ -400,19 +458,22 @@ function linksplit(rp) { | ||||
|             link = rp.slice(0, ofs + 1); | ||||
|             rp = rp.slice(ofs + 1); | ||||
|         } | ||||
|         var vlink = esc(link), | ||||
|             elink = uricom_enc(link); | ||||
|         var vlink = esc(uricom_dec(link)[0]); | ||||
|  | ||||
|         if (link.indexOf('/') !== -1) { | ||||
|             vlink = vlink.slice(0, -1) + '<span>/</span>'; | ||||
|             elink = elink.slice(0, -3) + '/'; | ||||
|         } | ||||
|  | ||||
|         if (!rp && q) | ||||
|             elink += q; | ||||
|         if (!rp) { | ||||
|             if (q) | ||||
|                 link += q; | ||||
|  | ||||
|         ret.push('<a href="' + apath + elink + '">' + vlink + '</a>'); | ||||
|         apath += elink; | ||||
|             if (id) | ||||
|                 link += '" id="' + id; | ||||
|         } | ||||
|  | ||||
|         ret.push('<a href="' + apath + link + '">' + vlink + '</a>'); | ||||
|         apath += link; | ||||
|     } | ||||
|     return ret; | ||||
| } | ||||
| @@ -577,14 +638,22 @@ function jcp(obj) { | ||||
|  | ||||
|  | ||||
| function sread(key) { | ||||
|     return localStorage.getItem(key); | ||||
|     try { | ||||
|         return localStorage.getItem(key); | ||||
|     } | ||||
|     catch (e) { | ||||
|         return null; | ||||
|     } | ||||
| } | ||||
|  | ||||
| function swrite(key, val) { | ||||
|     if (val === undefined || val === null) | ||||
|         localStorage.removeItem(key); | ||||
|     else | ||||
|         localStorage.setItem(key, val); | ||||
|     try { | ||||
|         if (val === undefined || val === null) | ||||
|             localStorage.removeItem(key); | ||||
|         else | ||||
|             localStorage.setItem(key, val); | ||||
|     } | ||||
|     catch (e) { } | ||||
| } | ||||
|  | ||||
| function jread(key, fb) { | ||||
| @@ -607,9 +676,9 @@ function icfg_get(name, defval) { | ||||
| } | ||||
|  | ||||
| function fcfg_get(name, defval) { | ||||
|     var o = ebi(name); | ||||
|     var o = ebi(name), | ||||
|         val = parseFloat(sread(name)); | ||||
|  | ||||
|     var val = parseFloat(sread(name)); | ||||
|     if (isNaN(val)) | ||||
|         return parseFloat(o ? o.value : defval); | ||||
|  | ||||
| @@ -619,6 +688,19 @@ function fcfg_get(name, defval) { | ||||
|     return val; | ||||
| } | ||||
|  | ||||
| function scfg_get(name, defval) { | ||||
|     var o = ebi(name), | ||||
|         val = sread(name); | ||||
|  | ||||
|     if (val === null) | ||||
|         val = defval; | ||||
|  | ||||
|     if (o) | ||||
|         o.value = val; | ||||
|  | ||||
|     return val; | ||||
| } | ||||
|  | ||||
| function bcfg_get(name, defval) { | ||||
|     var o = ebi(name); | ||||
|     if (!o) | ||||
| @@ -670,15 +752,41 @@ function bcfg_bind(obj, oname, cname, defval, cb, un_ev) { | ||||
|     return v; | ||||
| } | ||||
|  | ||||
| function scfg_bind(obj, oname, cname, defval, cb) { | ||||
|     var v = scfg_get(cname, defval), | ||||
|         el = ebi(cname); | ||||
|  | ||||
|     obj[oname] = v; | ||||
|     if (el) | ||||
|         el.oninput = function (e) { | ||||
|             swrite(cname, obj[oname] = this.value); | ||||
|             if (cb) | ||||
|                 cb(obj[oname]); | ||||
|         }; | ||||
|  | ||||
|     return v; | ||||
| } | ||||
|  | ||||
|  | ||||
| function hist_push(url) { | ||||
|     console.log("h-push " + url); | ||||
|     history.pushState(url, url, url); | ||||
|     if (window.history && history.pushState) | ||||
|         history.pushState(url, url, url); | ||||
| } | ||||
|  | ||||
| function hist_replace(url) { | ||||
|     console.log("h-repl " + url); | ||||
|     history.replaceState(url, url, url); | ||||
|     if (window.history && history.replaceState) | ||||
|         history.replaceState(url, url, url); | ||||
| } | ||||
|  | ||||
| function sethash(hv) { | ||||
|     if (window.history && history.replaceState) { | ||||
|         hist_replace(document.location.pathname + document.location.search + '#' + hv); | ||||
|     } | ||||
|     else { | ||||
|         document.location.hash = hv; | ||||
|     } | ||||
| } | ||||
|  | ||||
|  | ||||
| @@ -721,13 +829,18 @@ var timer = (function () { | ||||
| var tt = (function () { | ||||
|     var r = { | ||||
|         "tt": mknod("div"), | ||||
|         "th": mknod("div"), | ||||
|         "en": true, | ||||
|         "el": null, | ||||
|         "skip": false | ||||
|         "skip": false, | ||||
|         "lvis": 0 | ||||
|     }; | ||||
|  | ||||
|     r.th.innerHTML = '?'; | ||||
|     r.tt.setAttribute('id', 'tt'); | ||||
|     r.th.setAttribute('id', 'tth'); | ||||
|     document.body.appendChild(r.tt); | ||||
|     document.body.appendChild(r.th); | ||||
|  | ||||
|     var prev = null; | ||||
|     r.cshow = function () { | ||||
| @@ -737,11 +850,25 @@ var tt = (function () { | ||||
|         prev = this; | ||||
|     }; | ||||
|  | ||||
|     r.show = function () { | ||||
|         if (r.skip) { | ||||
|             r.skip = false; | ||||
|     var tev; | ||||
|     r.dshow = function (e) { | ||||
|         clearTimeout(tev); | ||||
|         if (!r.getmsg(this)) | ||||
|             return; | ||||
|         } | ||||
|  | ||||
|         if (Date.now() - r.lvis < 400) | ||||
|             return r.show.bind(this)(); | ||||
|  | ||||
|         tev = setTimeout(r.show.bind(this), 800); | ||||
|         if (is_touch) | ||||
|             return; | ||||
|  | ||||
|         this.addEventListener('mousemove', r.move); | ||||
|         clmod(r.th, 'act', 1); | ||||
|         r.move(e); | ||||
|     }; | ||||
|  | ||||
|     r.getmsg = function (el) { | ||||
|         if (QS('body.bbox-open')) | ||||
|             return; | ||||
|  | ||||
| @@ -749,7 +876,16 @@ var tt = (function () { | ||||
|         if (cfg !== null && cfg != '1') | ||||
|             return; | ||||
|  | ||||
|         var msg = this.getAttribute('tt'); | ||||
|         return el.getAttribute('tt'); | ||||
|     }; | ||||
|  | ||||
|     r.show = function () { | ||||
|         clearTimeout(tev); | ||||
|         if (r.skip) { | ||||
|             r.skip = false; | ||||
|             return; | ||||
|         } | ||||
|         var msg = r.getmsg(this); | ||||
|         if (!msg) | ||||
|             return; | ||||
|  | ||||
| @@ -763,6 +899,7 @@ var tt = (function () { | ||||
|         if (dir.indexOf('u') + 1) top = false; | ||||
|         if (dir.indexOf('d') + 1) top = true; | ||||
|  | ||||
|         clmod(r.th, 'act'); | ||||
|         clmod(r.tt, 'b', big); | ||||
|         r.tt.style.left = '0'; | ||||
|         r.tt.style.top = '0'; | ||||
| @@ -788,14 +925,27 @@ var tt = (function () { | ||||
|  | ||||
|     r.hide = function (e) { | ||||
|         ev(e); | ||||
|         clearTimeout(tev); | ||||
|         window.removeEventListener('scroll', r.hide); | ||||
|         clmod(r.tt, 'show'); | ||||
|  | ||||
|         clmod(r.tt, 'b'); | ||||
|         clmod(r.th, 'act'); | ||||
|         if (clmod(r.tt, 'show')) | ||||
|             r.lvis = Date.now(); | ||||
|  | ||||
|         if (r.el) | ||||
|             r.el.removeEventListener('mouseleave', r.hide); | ||||
|  | ||||
|         if (e && e.target) | ||||
|             e.target.removeEventListener('mousemove', r.move); | ||||
|     }; | ||||
|  | ||||
|     if (is_touch && IPHONE) { | ||||
|     r.move = function (e) { | ||||
|         r.th.style.left = (e.pageX + 12) + 'px'; | ||||
|         r.th.style.top = (e.pageY + 12) + 'px'; | ||||
|     }; | ||||
|  | ||||
|     if (IPHONE) { | ||||
|         var f1 = r.show, | ||||
|             f2 = r.hide, | ||||
|             q = []; | ||||
| @@ -821,30 +971,21 @@ var tt = (function () { | ||||
|  | ||||
|     r.att = function (ctr) { | ||||
|         var _cshow = r.en ? r.cshow : null, | ||||
|             _show = r.en ? r.show : null, | ||||
|             _dshow = r.en ? r.dshow : null, | ||||
|             _hide = r.en ? r.hide : null, | ||||
|             o = ctr.querySelectorAll('*[tt]'); | ||||
|  | ||||
|         for (var a = o.length - 1; a >= 0; a--) { | ||||
|             o[a].onfocus = _cshow; | ||||
|             o[a].onblur = _hide; | ||||
|             o[a].onmouseenter = _show; | ||||
|             o[a].onmouseenter = _dshow; | ||||
|             o[a].onmouseleave = _hide; | ||||
|         } | ||||
|         r.hide(); | ||||
|     } | ||||
|  | ||||
|     r.init = function () { | ||||
|         var ttb = ebi('tooltips'); | ||||
|         if (ttb) { | ||||
|             ttb.onclick = function (e) { | ||||
|                 ev(e); | ||||
|                 r.en = !r.en; | ||||
|                 bcfg_set('tooltips', r.en); | ||||
|                 r.init(); | ||||
|             }; | ||||
|             r.en = bcfg_get('tooltips', true) | ||||
|         } | ||||
|         bcfg_bind(r, 'en', 'tooltips', r.en, r.init); | ||||
|         r.att(document); | ||||
|     }; | ||||
|  | ||||
| @@ -907,6 +1048,9 @@ var toast = (function () { | ||||
|         if (sec) | ||||
|             te = setTimeout(r.hide, sec * 1000); | ||||
|  | ||||
|         if (txt.indexOf('<body>') + 1) | ||||
|             txt = txt.slice(0, txt.indexOf('<')) + ' [...]'; | ||||
|  | ||||
|         obj.innerHTML = '<a href="#" id="toastc">x</a><div id="toastb">' + lf2br(txt) + '</div>'; | ||||
|         obj.className = cl; | ||||
|         sec += obj.offsetWidth; | ||||
| @@ -1010,15 +1154,22 @@ var modal = (function () { | ||||
|     } | ||||
|  | ||||
|     function onkey(e) { | ||||
|         if (e.code == 'Enter') { | ||||
|             var a = ebi('modal-ng'); | ||||
|             if (a && document.activeElement == a) | ||||
|         var k = e.code, | ||||
|             eok = ebi('modal-ok'), | ||||
|             eng = ebi('modal-ng'), | ||||
|             ae = document.activeElement; | ||||
|  | ||||
|         if (k == 'Space' && ae && (ae === eok || ae === eng)) | ||||
|             k = 'Enter'; | ||||
|  | ||||
|         if (k == 'Enter') { | ||||
|             if (ae && ae == eng) | ||||
|                 return ng(); | ||||
|  | ||||
|             return ok(); | ||||
|         } | ||||
|  | ||||
|         if (e.code == 'Escape') | ||||
|         if (k == 'Escape') | ||||
|             return ng(); | ||||
|     } | ||||
|  | ||||
| @@ -1048,7 +1199,7 @@ var modal = (function () { | ||||
|     } | ||||
|     function _confirm(html, cok, cng, fun) { | ||||
|         cb_ok = cok; | ||||
|         cb_ng = cng === undefined ? cok : null; | ||||
|         cb_ng = cng === undefined ? cok : cng; | ||||
|         cb_up = fun; | ||||
|         html += '<div id="modalb">' + ok_cancel + '</div>'; | ||||
|         r.show(html); | ||||
| @@ -1095,6 +1246,7 @@ function repl_load() { | ||||
|         if (!ret.length) | ||||
|             ret = [ | ||||
|                 'var v=Object.keys(localStorage); v.sort(); JSON.stringify(v)', | ||||
|                 "for (var a of QSA('#files a[id]')) a.setAttribute('download','')", | ||||
|                 'console.hist.slice(-10).join("\\n")' | ||||
|             ]; | ||||
|  | ||||
| @@ -1164,3 +1316,57 @@ function repl(e) { | ||||
| } | ||||
| if (ebi('repl')) | ||||
|     ebi('repl').onclick = repl; | ||||
|  | ||||
|  | ||||
| var svg_decl = '<?xml version="1.0" encoding="UTF-8"?>\n'; | ||||
|  | ||||
|  | ||||
| var favico = (function () { | ||||
|     var r = {}; | ||||
|     r.en = true; | ||||
|     r.tag = null; | ||||
|  | ||||
|     function gx(txt) { | ||||
|         return (svg_decl + | ||||
|             '<svg version="1.1" viewBox="0 0 64 64" xmlns="http://www.w3.org/2000/svg">\n' + | ||||
|             (r.bg ? '<rect width="100%" height="100%" rx="16" fill="#' + r.bg + '" />\n' : '') + | ||||
|             '<text x="50%" y="55%" dominant-baseline="middle" text-anchor="middle"' + | ||||
|             ' font-family="sans-serif" font-weight="bold" font-size="64px"' + | ||||
|             ' fill="#' + r.fg + '">' + txt + '</text></svg>' | ||||
|         ); | ||||
|     } | ||||
|  | ||||
|     r.upd = function (txt, svg) { | ||||
|         if (!r.txt) | ||||
|             return; | ||||
|  | ||||
|         var b64; | ||||
|         try { | ||||
|             b64 = btoa(svg ? svg_decl + svg : gx(r.txt)); | ||||
|         } | ||||
|         catch (ex) { | ||||
|             b64 = encodeURIComponent(r.txt).replace(/%([0-9A-F]{2})/g, | ||||
|                 function x(m, v) { return String.fromCharCode('0x' + v); }); | ||||
|  | ||||
|             b64 = btoa(gx(unescape(encodeURIComponent(r.txt)))); | ||||
|         } | ||||
|  | ||||
|         if (!r.tag) { | ||||
|             r.tag = mknod('link'); | ||||
|             r.tag.rel = 'icon'; | ||||
|             document.head.appendChild(r.tag); | ||||
|         } | ||||
|         r.tag.href = 'data:image/svg+xml;base64,' + b64; | ||||
|     }; | ||||
|  | ||||
|     r.init = function () { | ||||
|         clearTimeout(r.to); | ||||
|         scfg_bind(r, 'txt', 'icot', '', r.upd); | ||||
|         scfg_bind(r, 'fg', 'icof', 'fc5', r.upd); | ||||
|         scfg_bind(r, 'bg', 'icob', '222', r.upd); | ||||
|         r.upd(); | ||||
|     }; | ||||
|  | ||||
|     r.to = setTimeout(r.init, 100); | ||||
|     return r; | ||||
| })(); | ||||
|   | ||||
| @@ -23,13 +23,22 @@ point `--css-browser` to one of these by URL: | ||||
|  | ||||
|  | ||||
|  | ||||
| # utilities | ||||
|  | ||||
| ## [`multisearch.html`](multisearch.html) | ||||
| * takes a list of filenames of youtube rips, grabs the youtube-id of each file, and does a search on the server for those | ||||
| * use it by putting it somewhere on the server and opening it as an html page | ||||
| * also serves as an extendable template for other specific search behaviors | ||||
|  | ||||
|  | ||||
|  | ||||
| # other stuff | ||||
|  | ||||
| ## [`rclone.md`](rclone.md) | ||||
| * notes on using rclone as a fuse client/server | ||||
|  | ||||
| ## [`example.conf`](example.conf) | ||||
| * example config file for `-c` (supports accounts, volumes, and volume-flags) | ||||
| * example config file for `-c` | ||||
|  | ||||
|  | ||||
|  | ||||
|   | ||||
| @@ -1,11 +1,11 @@ | ||||
| html { | ||||
|     background: #333 url('/wp/wallhaven-mdjrqy.jpg') center / cover no-repeat fixed; | ||||
|     background: #222 url('/wp/wallhaven-mdjrqy.jpg') center / cover no-repeat fixed; | ||||
| } | ||||
| #files th { | ||||
|     background: rgba(32, 32, 32, 0.9) !important; | ||||
| } | ||||
| #ops, | ||||
| #treeul, | ||||
| #tree, | ||||
| #files td { | ||||
|     background: rgba(32, 32, 32, 0.3) !important; | ||||
| } | ||||
| @@ -19,7 +19,7 @@ html.light #files th { | ||||
| } | ||||
| html.light .logue, | ||||
| html.light #ops, | ||||
| html.light #treeul, | ||||
| html.light #tree, | ||||
| html.light #files td { | ||||
|     background: rgba(248, 248, 248, 0.8) !important; | ||||
| } | ||||
|   | ||||
| @@ -1,3 +1,10 @@ | ||||
| # append some arguments to the commandline; | ||||
| # the first space in a line counts as a separator, | ||||
| # any additional spaces are part of the value | ||||
| -e2dsa | ||||
| -e2ts | ||||
| -i 127.0.0.1 | ||||
|  | ||||
| # create users: | ||||
| # u username:password | ||||
| u ed:123 | ||||
| @@ -24,7 +31,8 @@ rw ed | ||||
| r k | ||||
| rw ed | ||||
|  | ||||
| # this does the same thing: | ||||
| # this does the same thing, | ||||
| # and will cause an error on startup since /priv is already taken: | ||||
| ./priv | ||||
| /priv | ||||
| r ed k | ||||
|   | ||||
| @@ -9,7 +9,7 @@ | ||||
|  | ||||
|     #ops, #tree, #path, #wrap>h2:last-child,  /* main tabs and navigators (tree/breadcrumbs) */ | ||||
|  | ||||
|     #u2cleanup, #u2conf tr:first-child>td[rowspan]:not(#u2btn_cw),  /* most of the config options */ | ||||
|     #u2conf tr:first-child>td[rowspan]:not(#u2btn_cw),  /* most of the config options */ | ||||
|  | ||||
|     #srch_dz, #srch_zd,  /* the filesearch dropzone */ | ||||
|  | ||||
| @@ -27,7 +27,7 @@ | ||||
|     #u2conf #u2btn, #u2btn {padding:1.5em 0} | ||||
|  | ||||
|     /* adjust the button area a bit */ | ||||
|     #u2conf.has_btn {width: 35em !important; margin: 5em auto} | ||||
|     #u2conf.w, #u2conf.ww {width: 35em !important; margin: 5em auto} | ||||
|  | ||||
|     /* a */ | ||||
|     #op_up2k {min-height: 0} | ||||
|   | ||||
							
								
								
									
										124
									
								
								docs/multisearch.html
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										124
									
								
								docs/multisearch.html
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,124 @@ | ||||
| <!DOCTYPE html><html lang="en"><head> | ||||
| 	<meta charset="utf-8"> | ||||
| 	<title>multisearch</title> | ||||
| 	<meta http-equiv="X-UA-Compatible" content="IE=edge"> | ||||
|     <style> | ||||
|  | ||||
| html, body { | ||||
|     margin: 0; | ||||
|     padding: 0; | ||||
|     color: #ddd; | ||||
|     background: #222; | ||||
|     font-family: sans-serif; | ||||
| } | ||||
| body { | ||||
|     padding: 1em; | ||||
| } | ||||
| a { | ||||
|     color: #fc5; | ||||
| } | ||||
| ul { | ||||
|     line-height: 1.5em; | ||||
| } | ||||
| code { | ||||
|     color: #fc5; | ||||
|     border: 1px solid #444; | ||||
|     padding: .1em .2em; | ||||
|     font-family: sans-serif, sans-serif; | ||||
| } | ||||
| #src { | ||||
|     display: block; | ||||
|     width: calc(100% - 1em); | ||||
|     padding: .5em; | ||||
|     margin: 0; | ||||
| } | ||||
| td { | ||||
|     padding-left: 1em; | ||||
| } | ||||
| .hit, | ||||
| .miss { | ||||
|     font-weight: bold; | ||||
|     padding-left: 0; | ||||
|     padding-top: 1em; | ||||
| } | ||||
| .hit {color: #af0;} | ||||
| .miss {color: #f0c;} | ||||
| .hit:before {content: '✅';} | ||||
| .miss:before {content: '❌';} | ||||
|  | ||||
| </style></head><body> | ||||
|     <ul> | ||||
|         <li>paste a list of filenames (youtube rips) below and hit search</li> | ||||
|         <li>it will grab the youtube-id from the filenames and search for each id</li> | ||||
|         <li>filenames must be like <code>-YTID.webm</code> (youtube-dl style) or <code>[YTID].webm</code> (ytdlp style)</li> | ||||
|     </ul> | ||||
|     <textarea id="src"></textarea> | ||||
|     <button id="go">search</button> | ||||
|     <div id="res"></div> | ||||
|     <script> | ||||
|  | ||||
| var ebi = document.getElementById.bind(document); | ||||
| function esc(txt) { | ||||
|     return txt.replace(/[&"<>]/g, function (c) { | ||||
|         return { | ||||
|             '&': '&', | ||||
|             '"': '"', | ||||
|             '<': '<', | ||||
|             '>': '>' | ||||
|         }[c]; | ||||
|     }); | ||||
| } | ||||
|  | ||||
| ebi('go').onclick = async function() { | ||||
|     var queries = []; | ||||
|     for (var ln of ebi('src').value.split(/\n/g)) { | ||||
|         // filter the list of input files, | ||||
|         // only keeping youtube videos, | ||||
|         // meaning the filename ends with either | ||||
|         //   [YOUTUBEID].EXTENSION or | ||||
|         //   -YOUTUBEID.EXTENSION | ||||
|         var m = /[[-]([0-9a-zA-Z_-]{11})\]?\.(mp4|webm|mkv)$/.exec(ln); | ||||
|         if (!m || !(m = m[1])) | ||||
|             continue; | ||||
|  | ||||
|         // create a search query for each line: name like *youtubeid* | ||||
|         queries.push([ln, `name like *${m}*`]); | ||||
|     } | ||||
|  | ||||
|     var a = 0, html = ['<table>'], hits = [], misses = []; | ||||
|     for (var [fn, q] of queries) { | ||||
|         var r = await fetch('/?srch', { | ||||
|             method: 'POST', | ||||
|             body: JSON.stringify({'q': q}) | ||||
|         }); | ||||
|         r = await r.json(); | ||||
|          | ||||
|         var cl, tab2; | ||||
|         if (r.hits.length) { | ||||
|             tab2 = hits; | ||||
|             cl = 'hit'; | ||||
|         } | ||||
|         else { | ||||
|             tab2 = misses; | ||||
|             cl = 'miss'; | ||||
|         } | ||||
|         var h = `<tr><td class="${cl}" colspan="9">${esc(fn)}</td></tr>`; | ||||
|         tab2.push(h); | ||||
|         html.push(h); | ||||
|         for (var h of r.hits) { | ||||
|             var link = `<a href="/${h.rp}">${esc(decodeURIComponent(h.rp))}</a>`; | ||||
|             html.push(`<tr><td>${h.sz}</td><td>${link}</td></tr>`); | ||||
|         } | ||||
|         ebi('res').innerHTML = `searching, ${++a} / ${queries.length} done, ${hits.length} hits, ${misses.length} miss`; | ||||
|     } | ||||
|     html.push('<tr><td><h1>hits:</h1></td></tr>'); | ||||
|     html = html.concat(hits); | ||||
|  | ||||
|     html.push('<tr><td><h1>miss:</h1></td></tr>'); | ||||
|     html = html.concat(misses); | ||||
|  | ||||
|     html.push('</table>'); | ||||
|     ebi('res').innerHTML = html.join('\n'); | ||||
| }; | ||||
|  | ||||
| </script></body></html> | ||||
| @@ -38,6 +38,13 @@ para() { for s in 1 2 3 4 5 6 7 8 12 16 24 32 48 64; do echo $s; for r in {1..4} | ||||
| avg() { awk 'function pr(ncsz) {if (nsmp>0) {printf "%3s %s\n", csz, sum/nsmp} csz=$1;sum=0;nsmp=0} {sub(/\r$/,"")} /^[0-9]+$/ {pr($1);next} / MiB/ {sub(/ MiB.*/,"");sub(/.* /,"");sum+=$1;nsmp++} END {pr(0)}' "$1"; } | ||||
|  | ||||
|  | ||||
| ## | ||||
| ## time between first and last upload | ||||
|  | ||||
| python3 -um copyparty -nw -v srv::rw -i 127.0.0.1 2>&1 | tee log  | ||||
| cat log | awk '!/"purl"/{next} {s=$1;sub(/[^m]+m/,"");gsub(/:/," ");t=60*(60*$1+$2)+$3} !a{a=t;sa=s} {b=t;sb=s} END {print b-a,sa,sb}' | ||||
|  | ||||
|  | ||||
| ## | ||||
| ## bad filenames | ||||
|  | ||||
| @@ -73,6 +80,12 @@ shab64() { sp=$1; f="$2"; v=0; sz=$(stat -c%s "$f"); while true; do w=$((v+sp*10 | ||||
| command -v gdate && date() { gdate "$@"; }; while true; do t=$(date +%s.%N); (time wget http://127.0.0.1:3923/?ls -qO- | jq -C '.files[]|{sz:.sz,ta:.tags.artist,tb:.tags.".bpm"}|del(.[]|select(.==null))' | awk -F\" '/"/{t[$2]++} END {for (k in t){v=t[k];p=sprintf("%" (v+1) "s",v);gsub(/ /,"#",p);printf "\033[36m%s\033[33m%s   ",k,p}}') 2>&1 | awk -v ts=$t 'NR==1{t1=$0} NR==2{sub(/.*0m/,"");sub(/s$/,"");t2=$0;c=2; if(t2>0.3){c=3} if(t2>0.8){c=1} } END{sub(/[0-9]{6}$/,"",ts);printf "%s   \033[3%dm%s   %s\033[0m\n",ts,c,t2,t1}'; sleep 0.1 || break; done | ||||
|  | ||||
|  | ||||
| ## | ||||
| ## track an up2k upload and print all chunks in file-order | ||||
|  | ||||
| grep '"name": "2021-07-18 02-17-59.mkv"' fug.log | head -n 1 | sed -r 's/.*"hash": \[//; s/\].*//' | tr '"' '\n' | grep -E '^[a-zA-Z0-9_-]{44}$' | while IFS= read -r cid; do cat -n fug.log | grep -vF '"purl": "' | grep -- "$cid"; echo; done | stdbuf -oL tr '\t' ' ' | while IFS=' ' read -r ln _ _ _ _ _ ts ip port msg; do [ -z "$msg" ] && echo && continue; printf '%6s [%s] [%s] %s\n' $ln "$ts" "$ip $port" "$msg"; read -r ln _ _ _ _ _ ts ip port msg < <(cat -n fug.log | tail -n +$((ln+1)) | grep -F "$ip $port" | head -n 1); printf '%6s [%s] [%s] %s\n' $ln "$ts" "$ip $port" "$msg"; done | ||||
|  | ||||
|  | ||||
| ## | ||||
| ## js oneliners | ||||
|  | ||||
| @@ -162,7 +175,7 @@ brew install python@2 | ||||
| pip install virtualenv | ||||
|  | ||||
| # readme toc | ||||
| cat README.md | awk 'function pr() { if (!h) {return}; if (/^ *[*!#]/||!s) {printf "%s\n",h;h=0;return}; if (/.../) {printf "%s - %s\n",h,$0;h=0}; };    /^#/{s=1;pr()} /^#* *(file indexing|install on android|dev env setup|just the sfx|complete release|optional gpl stuff)|`$/{s=0} /^#/{lv=length($1);sub(/[^ ]+ /,"");bab=$0;gsub(/ /,"-",bab); h=sprintf("%" ((lv-1)*4+1) "s [%s](#%s)", "*",$0,bab);next} !h{next} {sub(/  .*/,"");sub(/[:,]$/,"")} {pr()}' > toc; grep -E '^## readme toc' -B1000 -A2 <README.md >p1; grep -E '^## quickstart' -B2 -A999999 <README.md >p2; (cat p1; grep quickstart -A1000 <toc; cat p2) >README.md | ||||
| cat README.md | awk 'function pr() { if (!h) {return}; if (/^ *[*!#|]/||!s) {printf "%s\n",h;h=0;return}; if (/.../) {printf "%s - %s\n",h,$0;h=0}; };    /^#/{s=1;pr()} /^#* *(file indexing|install on android|dev env setup|just the sfx|complete release|optional gpl stuff)|`$/{s=0} /^#/{lv=length($1);sub(/[^ ]+ /,"");bab=$0;gsub(/ /,"-",bab); h=sprintf("%" ((lv-1)*4+1) "s [%s](#%s)", "*",$0,bab);next} !h{next} {sub(/  .*/,"");sub(/[:,]$/,"")} {pr()}' > toc; grep -E '^## readme toc' -B1000 -A2 <README.md >p1; grep -E '^## quickstart' -B2 -A999999 <README.md >p2; (cat p1; grep quickstart -A1000 <toc; cat p2) >README.md; rm p1 p2 toc | ||||
|  | ||||
| # fix firefox phantom breakpoints, | ||||
| # suggestions from bugtracker, doesnt work (debugger is not attachable) | ||||
| @@ -178,8 +191,13 @@ about:config >> devtools.debugger.prefs-schema-version = -1 | ||||
| git pull; git reset --hard origin/HEAD && git log --format=format:"%H %ai %d" --decorate=full > ../revs && cat ../{util,browser,up2k}.js >../vr && cat ../revs | while read -r rev extra; do (git reset --hard $rev >/dev/null 2>/dev/null && dsz=$(cat copyparty/web/{util,browser,up2k}.js >../vg 2>/dev/null && diff -wNarU0 ../{vg,vr} | wc -c) && printf '%s %6s %s\n' "$rev" $dsz "$extra") </dev/null; done                 | ||||
|  | ||||
| # download all sfx versions | ||||
| curl https://api.github.com/repos/9001/copyparty/releases?per_page=100 | jq -r '.[] | .tag_name + " " + .name' | tr -d '\r' | while read v t; do fn="copyparty $v $t.py"; [ -e "$fn" ] || curl https://github.com/9001/copyparty/releases/download/$v/copyparty-sfx.py -Lo "$fn"; done | ||||
| curl https://api.github.com/repos/9001/copyparty/releases?per_page=100 | jq -r '.[] | .tag_name + " " + .name' | tr -d '\r' | while read v t; do fn="$(printf '%s\n' "copyparty $v $t.py" | tr / -)"; [ -e "$fn" ] || curl https://github.com/9001/copyparty/releases/download/$v/copyparty-sfx.py -Lo "$fn"; done | ||||
|  | ||||
| # push to multiple git remotes | ||||
| git config -l | grep '^remote' | ||||
| git remote add all git@github.com:9001/copyparty.git | ||||
| git remote set-url --add --push all git@gitlab.com:9001/copyparty.git | ||||
| git remote set-url --add --push all git@github.com:9001/copyparty.git | ||||
|  | ||||
| ## | ||||
| ## http 206 | ||||
|   | ||||
| @@ -10,14 +10,41 @@ set -e | ||||
| #  (and those are usually linux so bash is good inaff) | ||||
| #   (but that said this even has macos support) | ||||
| # | ||||
| # bundle will look like: | ||||
| # -rwxr-xr-x  0 ed ed  183808 Nov 19 00:43 copyparty | ||||
| # -rw-r--r--  0 ed ed  491318 Nov 19 00:40 copyparty-extras/copyparty-0.5.4.tar.gz | ||||
| # -rwxr-xr-x  0 ed ed   30254 Nov 17 23:58 copyparty-extras/copyparty-fuse.py | ||||
| # -rwxr-xr-x  0 ed ed  481403 Nov 19 00:40 copyparty-extras/sfx-full/copyparty-sfx.sh | ||||
| # -rwxr-xr-x  0 ed ed  506043 Nov 19 00:40 copyparty-extras/sfx-full/copyparty-sfx.py | ||||
| # -rwxr-xr-x  0 ed ed  167699 Nov 19 00:43 copyparty-extras/sfx-lite/copyparty-sfx.sh | ||||
| # -rwxr-xr-x  0 ed ed  183808 Nov 19 00:43 copyparty-extras/sfx-lite/copyparty-sfx.py | ||||
| # output summary (filesizes and contents): | ||||
| # | ||||
| # 535672  copyparty-extras/sfx-full/copyparty-sfx.sh | ||||
| # 550760  copyparty-extras/sfx-full/copyparty-sfx.py | ||||
| #           `- original unmodified sfx from github | ||||
| # | ||||
| # 572923  copyparty-extras/sfx-full/copyparty-sfx-gz.py | ||||
| #           `- unmodified but recompressed from bzip2 to gzip | ||||
| # | ||||
| # 341792  copyparty-extras/sfx-ent/copyparty-sfx.sh | ||||
| # 353975  copyparty-extras/sfx-ent/copyparty-sfx.py | ||||
| # 376934  copyparty-extras/sfx-ent/copyparty-sfx-gz.py | ||||
| #           `- removed iOS ogg/opus/vorbis audio decoder, | ||||
| #              removed the audio tray mouse cursor, | ||||
| #              "enterprise edition" | ||||
| # | ||||
| # 259288  copyparty-extras/sfx-lite/copyparty-sfx.sh | ||||
| # 270004  copyparty-extras/sfx-lite/copyparty-sfx.py | ||||
| # 293159  copyparty-extras/sfx-lite/copyparty-sfx-gz.py | ||||
| #           `- also removed the codemirror markdown editor | ||||
| #              and the text-viewer syntax hilighting, | ||||
| #              only essential features remaining | ||||
| # | ||||
| # 646297  copyparty-extras/copyparty-1.0.14.tar.gz | ||||
| #   4823  copyparty-extras/copyparty-repack.sh | ||||
| #           `- source files from github | ||||
| # | ||||
| #  23663  copyparty-extras/up2k.py | ||||
| #           `- standalone utility to upload or search for files | ||||
| # | ||||
| #  32280  copyparty-extras/copyparty-fuse.py | ||||
| #           `- standalone to mount a URL as a local read-only filesystem | ||||
| # | ||||
| # 270004  copyparty | ||||
| #           `- minimal binary, same as sfx-lite/copyparty-sfx.py | ||||
|  | ||||
|  | ||||
| command -v gnutar && tar() { gnutar "$@"; } | ||||
| @@ -54,6 +81,7 @@ cache="$od/.copyparty-repack.cache" | ||||
| 		# fallback to awk (sorry) | ||||
| 		awk -F\" '/"browser_download_url".*(\.tar\.gz|-sfx\.)/ {print$4}' | ||||
| 	) | | ||||
| 	grep -E '(sfx\.(sh|py)|tar\.gz)$' | | ||||
| 	tee /dev/stderr | | ||||
| 	tr -d '\r' | tr '\n' '\0' | | ||||
| 	xargs -0 bash -c 'dl_files "$@"' _ | ||||
| @@ -64,7 +92,7 @@ cache="$od/.copyparty-repack.cache" | ||||
|  | ||||
| # move src into copyparty-extras/, | ||||
| # move sfx into copyparty-extras/sfx-full/ | ||||
| mkdir -p copyparty-extras/sfx-{full,lite} | ||||
| mkdir -p copyparty-extras/sfx-{full,ent,lite} | ||||
| mv copyparty-sfx.* copyparty-extras/sfx-full/ | ||||
| mv copyparty-*.tar.gz copyparty-extras/ | ||||
|  | ||||
| @@ -112,14 +140,17 @@ repack() { | ||||
| } | ||||
|  | ||||
| repack sfx-full "re gz no-sh" | ||||
| repack sfx-lite "re no-ogv no-cm" | ||||
| repack sfx-lite "re no-ogv no-cm gz no-sh" | ||||
| repack sfx-ent  "re no-dd" | ||||
| repack sfx-ent  "re no-dd gz no-sh" | ||||
| repack sfx-lite "re no-dd no-cm no-hl" | ||||
| repack sfx-lite "re no-dd no-cm no-hl gz no-sh" | ||||
|  | ||||
|  | ||||
| # move fuse client into copyparty-extras/, | ||||
| # move fuse and up2k clients into copyparty-extras/, | ||||
| # copy lite-sfx.py to ./copyparty, | ||||
| # delete extracted source code | ||||
| ( cd copyparty-extras/ | ||||
| mv copyparty-*/bin/up2k.py . | ||||
| mv copyparty-*/bin/copyparty-fuse.py . | ||||
| cp -pv sfx-lite/copyparty-sfx.py ../copyparty | ||||
| rm -rf copyparty-{0..9}*.*.*{0..9} | ||||
|   | ||||
| @@ -1,11 +1,10 @@ | ||||
| FROM    alpine:3.14 | ||||
| FROM    alpine:3.15 | ||||
| WORKDIR /z | ||||
| ENV     ver_asmcrypto=5b994303a9d3e27e0915f72a10b6c2c51535a4dc \ | ||||
|         ver_hashwasm=4.9.0 \ | ||||
|         ver_marked=3.0.4 \ | ||||
|         ver_ogvjs=1.8.4 \ | ||||
|         ver_marked=4.0.6 \ | ||||
|         ver_mde=2.15.0 \ | ||||
|         ver_codemirror=5.62.3 \ | ||||
|         ver_codemirror=5.64.0 \ | ||||
|         ver_fontawesome=5.13.0 \ | ||||
|         ver_zopfli=1.0.3 | ||||
|  | ||||
| @@ -15,7 +14,6 @@ ENV     ver_asmcrypto=5b994303a9d3e27e0915f72a10b6c2c51535a4dc \ | ||||
| RUN     mkdir -p /z/dist/no-pk \ | ||||
|         && wget https://fonts.gstatic.com/s/sourcecodepro/v11/HI_SiYsKILxRpg3hIP6sJ7fM7PqlPevW.woff2 -O scp.woff2 \ | ||||
|         && apk add cmake make g++ git bash npm patch wget tar pigz brotli gzip unzip python3 python3-dev brotli py3-brotli \ | ||||
|         && wget https://github.com/brion/ogv.js/releases/download/$ver_ogvjs/ogvjs-$ver_ogvjs.zip -O ogvjs.zip \ | ||||
|         && wget https://github.com/openpgpjs/asmcrypto.js/archive/$ver_asmcrypto.tar.gz -O asmcrypto.tgz \ | ||||
|         && wget https://github.com/markedjs/marked/archive/v$ver_marked.tar.gz -O marked.tgz \ | ||||
|         && wget https://github.com/Ionaru/easy-markdown-editor/archive/$ver_mde.tar.gz -O mde.tgz \ | ||||
| @@ -23,7 +21,6 @@ RUN     mkdir -p /z/dist/no-pk \ | ||||
|         && wget https://github.com/FortAwesome/Font-Awesome/releases/download/$ver_fontawesome/fontawesome-free-$ver_fontawesome-web.zip -O fontawesome.zip \ | ||||
|         && wget https://github.com/google/zopfli/archive/zopfli-$ver_zopfli.tar.gz -O zopfli.tgz \ | ||||
|         && wget https://github.com/Daninet/hash-wasm/releases/download/v$ver_hashwasm/hash-wasm@$ver_hashwasm.zip -O hash-wasm.zip \ | ||||
|         && unzip ogvjs.zip \ | ||||
|         && (mkdir hash-wasm \ | ||||
|             && cd hash-wasm \ | ||||
|             && unzip ../hash-wasm.zip) \ | ||||
| @@ -45,6 +42,12 @@ RUN     mkdir -p /z/dist/no-pk \ | ||||
|         && tar -xf zopfli.tgz | ||||
|  | ||||
|  | ||||
| # todo | ||||
| # https://cdn.jsdelivr.net/gh/highlightjs/cdn-release/build/highlight.min.js | ||||
| # https://cdn.jsdelivr.net/gh/highlightjs/cdn-release/build/styles/default.min.css | ||||
| # https://prismjs.com/download.html#themes=prism-funky&languages=markup+css+clike+javascript+autohotkey+bash+basic+batch+c+csharp+cpp+cmake+diff+docker+go+ini+java+json+kotlin+latex+less+lisp+lua+makefile+objectivec+perl+powershell+python+r+jsx+ruby+rust+sass+scss+sql+swift+systemd+toml+typescript+vbnet+verilog+vhdl+yaml&plugins=line-highlight+line-numbers+autolinker | ||||
|  | ||||
|  | ||||
| # build fonttools (which needs zopfli) | ||||
| RUN     tar -xf zopfli.tgz \ | ||||
|         && cd zopfli* \ | ||||
| @@ -71,21 +74,6 @@ RUN     cd hash-wasm \ | ||||
|         && mv sha512.umd.min.js /z/dist/sha512.hw.js | ||||
|  | ||||
|  | ||||
| # build ogvjs | ||||
| RUN     cd ogvjs-$ver_ogvjs \ | ||||
|         && cp -pv \ | ||||
|             ogv-worker-audio.js \ | ||||
|             ogv-demuxer-ogg-wasm.js \ | ||||
|             ogv-demuxer-ogg-wasm.wasm \ | ||||
|             ogv-decoder-audio-opus-wasm.js \ | ||||
|             ogv-decoder-audio-opus-wasm.wasm \ | ||||
|             ogv-decoder-audio-vorbis-wasm.js \ | ||||
|             ogv-decoder-audio-vorbis-wasm.wasm \ | ||||
|             /z/dist \ | ||||
|         && cp -pv \ | ||||
|             ogv-es2017.js /z/dist/ogv.js | ||||
|  | ||||
|  | ||||
| # build marked | ||||
| COPY    marked.patch /z/ | ||||
| COPY    marked-ln.patch /z/ | ||||
| @@ -94,7 +82,6 @@ RUN     cd marked-$ver_marked \ | ||||
|         && patch -p1 < /z/marked.patch \ | ||||
|         && npm run build \ | ||||
|         && cp -pv marked.min.js /z/dist/marked.js \ | ||||
|         && cp -pv lib/marked.js /z/dist/marked.full.js \ | ||||
|         && mkdir -p /z/nodepkgs \ | ||||
|         && ln -s $(pwd) /z/nodepkgs/marked | ||||
| #        && npm run test \ | ||||
| @@ -110,8 +97,10 @@ RUN     cd CodeMirror-$ver_codemirror \ | ||||
|  | ||||
|  | ||||
| # build easymde | ||||
| COPY    easymde-marked6.patch /z/ | ||||
| COPY    easymde.patch /z/ | ||||
| RUN     cd easy-markdown-editor-$ver_mde \ | ||||
|         && patch -p1 < /z/easymde-marked6.patch \ | ||||
|         && patch -p1 < /z/easymde.patch \ | ||||
|         && sed -ri 's`https://registry.npmjs.org/marked/-/marked-[0-9\.]+.tgz`file:/z/nodepkgs/marked`' package-lock.json \ | ||||
|         && sed -ri 's`("marked": ")[^"]+`\1file:/z/nodepkgs/marked`' ./package.json \ | ||||
|   | ||||
							
								
								
									
										12
									
								
								scripts/deps-docker/easymde-marked6.patch
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										12
									
								
								scripts/deps-docker/easymde-marked6.patch
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,12 @@ | ||||
| diff --git a/src/js/easymde.js b/src/js/easymde.js | ||||
| --- a/src/js/easymde.js | ||||
| +++ b/src/js/easymde.js | ||||
| @@ -1962,7 +1962,7 @@ EasyMDE.prototype.markdown = function (text) { | ||||
|          marked.setOptions(markedOptions); | ||||
|   | ||||
|          // Convert the markdown to HTML | ||||
| -        var htmlText = marked(text); | ||||
| +        var htmlText = marked.parse(text); | ||||
|   | ||||
|          // Sanitize HTML | ||||
|          if (this.options.renderingConfig && typeof this.options.renderingConfig.sanitizerFunction === 'function') { | ||||
| @@ -1,15 +1,15 @@ | ||||
| diff --git a/src/Lexer.js b/src/Lexer.js | ||||
| adds linetracking to marked.js v3.0.4; | ||||
| adds linetracking to marked.js v4.0.6; | ||||
| add data-ln="%d" to most tags, %d is the source markdown line | ||||
| --- a/src/Lexer.js | ||||
| +++ b/src/Lexer.js | ||||
| @@ -50,4 +50,5 @@ function mangle(text) { | ||||
|  module.exports = class Lexer { | ||||
|  export class Lexer { | ||||
|    constructor(options) { | ||||
| +    this.ln = 1;  // like most editors, start couting from 1 | ||||
|      this.tokens = []; | ||||
|      this.tokens.links = Object.create(null); | ||||
| @@ -127,4 +128,15 @@ module.exports = class Lexer { | ||||
| @@ -127,4 +128,15 @@ export class Lexer { | ||||
|    } | ||||
|   | ||||
| +  set_ln(token, ln = this.ln) { | ||||
| @@ -25,7 +25,7 @@ add data-ln="%d" to most tags, %d is the source markdown line | ||||
| + | ||||
|    /** | ||||
|     * Lexing | ||||
| @@ -134,7 +146,11 @@ module.exports = class Lexer { | ||||
| @@ -134,7 +146,11 @@ export class Lexer { | ||||
|        src = src.replace(/^ +$/gm, ''); | ||||
|      } | ||||
| -    let token, lastToken, cutSrc, lastParagraphClipped; | ||||
| @@ -38,105 +38,105 @@ add data-ln="%d" to most tags, %d is the source markdown line | ||||
| + | ||||
|        if (this.options.extensions | ||||
|          && this.options.extensions.block | ||||
| @@ -142,4 +158,5 @@ module.exports = class Lexer { | ||||
| @@ -142,4 +158,5 @@ export class Lexer { | ||||
|            if (token = extTokenizer.call({ lexer: this }, src, tokens)) { | ||||
|              src = src.substring(token.raw.length); | ||||
| +            this.set_ln(token, ln); | ||||
|              tokens.push(token); | ||||
|              return true; | ||||
| @@ -153,4 +170,5 @@ module.exports = class Lexer { | ||||
| @@ -153,4 +170,5 @@ export class Lexer { | ||||
|        if (token = this.tokenizer.space(src)) { | ||||
|          src = src.substring(token.raw.length); | ||||
| +        this.set_ln(token, ln); // is \n if not type | ||||
|          if (token.type) { | ||||
|            tokens.push(token); | ||||
| @@ -162,4 +180,5 @@ module.exports = class Lexer { | ||||
| @@ -162,4 +180,5 @@ export class Lexer { | ||||
|        if (token = this.tokenizer.code(src)) { | ||||
|          src = src.substring(token.raw.length); | ||||
| +        this.set_ln(token, ln); | ||||
|          lastToken = tokens[tokens.length - 1]; | ||||
|          // An indented code block cannot interrupt a paragraph. | ||||
| @@ -177,4 +196,5 @@ module.exports = class Lexer { | ||||
| @@ -177,4 +196,5 @@ export class Lexer { | ||||
|        if (token = this.tokenizer.fences(src)) { | ||||
|          src = src.substring(token.raw.length); | ||||
| +        this.set_ln(token, ln); | ||||
|          tokens.push(token); | ||||
|          continue; | ||||
| @@ -184,4 +204,5 @@ module.exports = class Lexer { | ||||
| @@ -184,4 +204,5 @@ export class Lexer { | ||||
|        if (token = this.tokenizer.heading(src)) { | ||||
|          src = src.substring(token.raw.length); | ||||
| +        this.set_ln(token, ln); | ||||
|          tokens.push(token); | ||||
|          continue; | ||||
| @@ -191,4 +212,5 @@ module.exports = class Lexer { | ||||
| @@ -191,4 +212,5 @@ export class Lexer { | ||||
|        if (token = this.tokenizer.hr(src)) { | ||||
|          src = src.substring(token.raw.length); | ||||
| +        this.set_ln(token, ln); | ||||
|          tokens.push(token); | ||||
|          continue; | ||||
| @@ -198,4 +220,5 @@ module.exports = class Lexer { | ||||
| @@ -198,4 +220,5 @@ export class Lexer { | ||||
|        if (token = this.tokenizer.blockquote(src)) { | ||||
|          src = src.substring(token.raw.length); | ||||
| +        this.set_ln(token, ln); | ||||
|          tokens.push(token); | ||||
|          continue; | ||||
| @@ -205,4 +228,5 @@ module.exports = class Lexer { | ||||
| @@ -205,4 +228,5 @@ export class Lexer { | ||||
|        if (token = this.tokenizer.list(src)) { | ||||
|          src = src.substring(token.raw.length); | ||||
| +        this.set_ln(token, ln); | ||||
|          tokens.push(token); | ||||
|          continue; | ||||
| @@ -212,4 +236,5 @@ module.exports = class Lexer { | ||||
| @@ -212,4 +236,5 @@ export class Lexer { | ||||
|        if (token = this.tokenizer.html(src)) { | ||||
|          src = src.substring(token.raw.length); | ||||
| +        this.set_ln(token, ln); | ||||
|          tokens.push(token); | ||||
|          continue; | ||||
| @@ -219,4 +244,5 @@ module.exports = class Lexer { | ||||
| @@ -219,4 +244,5 @@ export class Lexer { | ||||
|        if (token = this.tokenizer.def(src)) { | ||||
|          src = src.substring(token.raw.length); | ||||
| +        this.set_ln(token, ln); | ||||
|          lastToken = tokens[tokens.length - 1]; | ||||
|          if (lastToken && (lastToken.type === 'paragraph' || lastToken.type === 'text')) { | ||||
| @@ -236,4 +262,5 @@ module.exports = class Lexer { | ||||
| @@ -236,4 +262,5 @@ export class Lexer { | ||||
|        if (token = this.tokenizer.table(src)) { | ||||
|          src = src.substring(token.raw.length); | ||||
| +        this.set_ln(token, ln); | ||||
|          tokens.push(token); | ||||
|          continue; | ||||
| @@ -243,4 +270,5 @@ module.exports = class Lexer { | ||||
| @@ -243,4 +270,5 @@ export class Lexer { | ||||
|        if (token = this.tokenizer.lheading(src)) { | ||||
|          src = src.substring(token.raw.length); | ||||
| +        this.set_ln(token, ln); | ||||
|          tokens.push(token); | ||||
|          continue; | ||||
| @@ -263,4 +291,5 @@ module.exports = class Lexer { | ||||
| @@ -263,4 +291,5 @@ export class Lexer { | ||||
|        } | ||||
|        if (this.state.top && (token = this.tokenizer.paragraph(cutSrc))) { | ||||
| +        this.set_ln(token, ln); | ||||
|          lastToken = tokens[tokens.length - 1]; | ||||
|          if (lastParagraphClipped && lastToken.type === 'paragraph') { | ||||
| @@ -280,4 +309,6 @@ module.exports = class Lexer { | ||||
| @@ -280,4 +309,6 @@ export class Lexer { | ||||
|        if (token = this.tokenizer.text(src)) { | ||||
|          src = src.substring(token.raw.length); | ||||
| +        this.set_ln(token, ln); | ||||
| +        this.ln++; | ||||
|          lastToken = tokens[tokens.length - 1]; | ||||
|          if (lastToken && lastToken.type === 'text') { | ||||
| @@ -355,4 +386,5 @@ module.exports = class Lexer { | ||||
| @@ -355,4 +386,5 @@ export class Lexer { | ||||
|            if (token = extTokenizer.call({ lexer: this }, src, tokens)) { | ||||
|              src = src.substring(token.raw.length); | ||||
| +            this.ln = token.ln || this.ln; | ||||
|              tokens.push(token); | ||||
|              return true; | ||||
| @@ -420,4 +452,6 @@ module.exports = class Lexer { | ||||
| @@ -420,4 +452,6 @@ export class Lexer { | ||||
|        if (token = this.tokenizer.br(src)) { | ||||
|          src = src.substring(token.raw.length); | ||||
| +        // no need to reset (no more blockTokens anyways) | ||||
| +        token.ln = this.ln++; | ||||
|          tokens.push(token); | ||||
|          continue; | ||||
| @@ -462,4 +496,5 @@ module.exports = class Lexer { | ||||
| @@ -462,4 +496,5 @@ export class Lexer { | ||||
|        if (token = this.tokenizer.inlineText(cutSrc, smartypants)) { | ||||
|          src = src.substring(token.raw.length); | ||||
| +        this.ln = token.ln || this.ln; | ||||
| @@ -145,13 +145,13 @@ add data-ln="%d" to most tags, %d is the source markdown line | ||||
| diff --git a/src/Parser.js b/src/Parser.js | ||||
| --- a/src/Parser.js | ||||
| +++ b/src/Parser.js | ||||
| @@ -18,4 +18,5 @@ module.exports = class Parser { | ||||
| @@ -18,4 +18,5 @@ export class Parser { | ||||
|      this.textRenderer = new TextRenderer(); | ||||
|      this.slugger = new Slugger(); | ||||
| +    this.ln = 0; // error indicator; should always be set >=1 from tokens | ||||
|    } | ||||
|   | ||||
| @@ -64,4 +65,8 @@ module.exports = class Parser { | ||||
| @@ -64,4 +65,8 @@ export class Parser { | ||||
|      for (i = 0; i < l; i++) { | ||||
|        token = tokens[i]; | ||||
| +      // take line-numbers from tokens whenever possible | ||||
| @@ -160,7 +160,7 @@ diff --git a/src/Parser.js b/src/Parser.js | ||||
| +      this.renderer.tag_ln(this.ln); | ||||
|   | ||||
|        // Run any renderer extensions | ||||
| @@ -124,7 +129,10 @@ module.exports = class Parser { | ||||
| @@ -124,7 +129,10 @@ export class Parser { | ||||
|              } | ||||
|   | ||||
| -            body += this.renderer.tablerow(cell); | ||||
| @@ -173,7 +173,7 @@ diff --git a/src/Parser.js b/src/Parser.js | ||||
| +          out += this.renderer.tag_ln(token.ln).table(header, body); | ||||
|            continue; | ||||
|          } | ||||
| @@ -167,8 +175,12 @@ module.exports = class Parser { | ||||
| @@ -167,8 +175,12 @@ export class Parser { | ||||
|   | ||||
|              itemBody += this.parse(item.tokens, loose); | ||||
| -            body += this.renderer.listitem(itemBody, task, checked); | ||||
| @@ -188,7 +188,7 @@ diff --git a/src/Parser.js b/src/Parser.js | ||||
| +          out += this.renderer.tag_ln(token.ln).list(body, ordered, start); | ||||
|            continue; | ||||
|          } | ||||
| @@ -179,5 +191,6 @@ module.exports = class Parser { | ||||
| @@ -179,5 +191,6 @@ export class Parser { | ||||
|          } | ||||
|          case 'paragraph': { | ||||
| -          out += this.renderer.paragraph(this.parseInline(token.tokens)); | ||||
| @@ -196,7 +196,7 @@ diff --git a/src/Parser.js b/src/Parser.js | ||||
| +          out += this.renderer.tag_ln(token.ln).paragraph(t); | ||||
|            continue; | ||||
|          } | ||||
| @@ -221,4 +234,7 @@ module.exports = class Parser { | ||||
| @@ -221,4 +234,7 @@ export class Parser { | ||||
|        token = tokens[i]; | ||||
|   | ||||
| +      // another thing that only affects <br/> and other inlines | ||||
| @@ -207,7 +207,7 @@ diff --git a/src/Parser.js b/src/Parser.js | ||||
| diff --git a/src/Renderer.js b/src/Renderer.js | ||||
| --- a/src/Renderer.js | ||||
| +++ b/src/Renderer.js | ||||
| @@ -11,6 +11,12 @@ module.exports = class Renderer { | ||||
| @@ -11,6 +11,12 @@ export class Renderer { | ||||
|    constructor(options) { | ||||
|      this.options = options || defaults; | ||||
| +    this.ln = ""; | ||||
| @@ -220,7 +220,7 @@ diff --git a/src/Renderer.js b/src/Renderer.js | ||||
| +   | ||||
|    code(code, infostring, escaped) { | ||||
|      const lang = (infostring || '').match(/\S*/)[0]; | ||||
| @@ -26,10 +32,10 @@ module.exports = class Renderer { | ||||
| @@ -26,10 +32,10 @@ export class Renderer { | ||||
|   | ||||
|      if (!lang) { | ||||
| -      return '<pre><code>' | ||||
| @@ -233,55 +233,55 @@ diff --git a/src/Renderer.js b/src/Renderer.js | ||||
| +    return '<pre' + this.ln + '><code class="' | ||||
|        + this.options.langPrefix | ||||
|        + escape(lang, true) | ||||
| @@ -40,5 +46,5 @@ module.exports = class Renderer { | ||||
| @@ -40,5 +46,5 @@ export class Renderer { | ||||
|   | ||||
|    blockquote(quote) { | ||||
| -    return '<blockquote>\n' + quote + '</blockquote>\n'; | ||||
| +    return '<blockquote' + this.ln + '>\n' + quote + '</blockquote>\n'; | ||||
|    } | ||||
|   | ||||
| @@ -51,4 +57,5 @@ module.exports = class Renderer { | ||||
| @@ -51,4 +57,5 @@ export class Renderer { | ||||
|        return '<h' | ||||
|          + level | ||||
| +        + this.ln | ||||
|          + ' id="' | ||||
|          + this.options.headerPrefix | ||||
| @@ -61,5 +68,5 @@ module.exports = class Renderer { | ||||
| @@ -61,5 +68,5 @@ export class Renderer { | ||||
|      } | ||||
|      // ignore IDs | ||||
| -    return '<h' + level + '>' + text + '</h' + level + '>\n'; | ||||
| +    return '<h' + level + this.ln + '>' + text + '</h' + level + '>\n'; | ||||
|    } | ||||
|   | ||||
| @@ -75,5 +82,5 @@ module.exports = class Renderer { | ||||
| @@ -75,5 +82,5 @@ export class Renderer { | ||||
|   | ||||
|    listitem(text) { | ||||
| -    return '<li>' + text + '</li>\n'; | ||||
| +    return '<li' + this.ln + '>' + text + '</li>\n'; | ||||
|    } | ||||
|   | ||||
| @@ -87,5 +94,5 @@ module.exports = class Renderer { | ||||
| @@ -87,5 +94,5 @@ export class Renderer { | ||||
|   | ||||
|    paragraph(text) { | ||||
| -    return '<p>' + text + '</p>\n'; | ||||
| +    return '<p' + this.ln + '>' + text + '</p>\n'; | ||||
|    } | ||||
|   | ||||
| @@ -102,5 +109,5 @@ module.exports = class Renderer { | ||||
| @@ -102,5 +109,5 @@ export class Renderer { | ||||
|   | ||||
|    tablerow(content) { | ||||
| -    return '<tr>\n' + content + '</tr>\n'; | ||||
| +    return '<tr' + this.ln + '>\n' + content + '</tr>\n'; | ||||
|    } | ||||
|   | ||||
| @@ -127,5 +134,5 @@ module.exports = class Renderer { | ||||
| @@ -127,5 +134,5 @@ export class Renderer { | ||||
|   | ||||
|    br() { | ||||
| -    return this.options.xhtml ? '<br/>' : '<br>'; | ||||
| +    return this.options.xhtml ? '<br' + this.ln + '/>' : '<br' + this.ln + '>'; | ||||
|    } | ||||
|   | ||||
| @@ -153,5 +160,5 @@ module.exports = class Renderer { | ||||
| @@ -153,5 +160,5 @@ export class Renderer { | ||||
|      } | ||||
|   | ||||
| -    let out = '<img src="' + href + '" alt="' + text + '"'; | ||||
| @@ -291,7 +291,7 @@ diff --git a/src/Renderer.js b/src/Renderer.js | ||||
| diff --git a/src/Tokenizer.js b/src/Tokenizer.js | ||||
| --- a/src/Tokenizer.js | ||||
| +++ b/src/Tokenizer.js | ||||
| @@ -301,4 +301,7 @@ module.exports = class Tokenizer { | ||||
| @@ -297,4 +297,7 @@ export class Tokenizer { | ||||
|        const l = list.items.length; | ||||
|   | ||||
| +      // each nested list gets +1 ahead; this hack makes every listgroup -1 but atleast it doesn't get infinitely bad | ||||
|   | ||||
| @@ -1,7 +1,7 @@ | ||||
| diff --git a/src/Lexer.js b/src/Lexer.js | ||||
| --- a/src/Lexer.js | ||||
| +++ b/src/Lexer.js | ||||
| @@ -6,5 +6,5 @@ const { repeatString } = require('./helpers.js'); | ||||
| @@ -6,5 +6,5 @@ import { repeatString } from './helpers.js'; | ||||
|  /** | ||||
|   * smartypants text replacement | ||||
| - */ | ||||
| @@ -15,21 +15,21 @@ diff --git a/src/Lexer.js b/src/Lexer.js | ||||
| + * | ||||
|  function mangle(text) { | ||||
|    let out = '', | ||||
| @@ -465,5 +465,5 @@ module.exports = class Lexer { | ||||
| @@ -466,5 +466,5 @@ export class Lexer { | ||||
|   | ||||
|        // autolink | ||||
| -      if (token = this.tokenizer.autolink(src, mangle)) { | ||||
| +      if (token = this.tokenizer.autolink(src)) { | ||||
|          src = src.substring(token.raw.length); | ||||
|          tokens.push(token); | ||||
| @@ -472,5 +472,5 @@ module.exports = class Lexer { | ||||
| @@ -473,5 +473,5 @@ export class Lexer { | ||||
|   | ||||
|        // url (gfm) | ||||
| -      if (!this.state.inLink && (token = this.tokenizer.url(src, mangle))) { | ||||
| +      if (!this.state.inLink && (token = this.tokenizer.url(src))) { | ||||
|          src = src.substring(token.raw.length); | ||||
|          tokens.push(token); | ||||
| @@ -493,5 +493,5 @@ module.exports = class Lexer { | ||||
| @@ -494,5 +494,5 @@ export class Lexer { | ||||
|          } | ||||
|        } | ||||
| -      if (token = this.tokenizer.inlineText(cutSrc, smartypants)) { | ||||
| @@ -39,14 +39,14 @@ diff --git a/src/Lexer.js b/src/Lexer.js | ||||
| diff --git a/src/Renderer.js b/src/Renderer.js | ||||
| --- a/src/Renderer.js | ||||
| +++ b/src/Renderer.js | ||||
| @@ -142,5 +142,5 @@ module.exports = class Renderer { | ||||
| @@ -142,5 +142,5 @@ export class Renderer { | ||||
|   | ||||
|    link(href, title, text) { | ||||
| -    href = cleanUrl(this.options.sanitize, this.options.baseUrl, href); | ||||
| +    href = cleanUrl(this.options.baseUrl, href); | ||||
|      if (href === null) { | ||||
|        return text; | ||||
| @@ -155,5 +155,5 @@ module.exports = class Renderer { | ||||
| @@ -155,5 +155,5 @@ export class Renderer { | ||||
|   | ||||
|    image(href, title, text) { | ||||
| -    href = cleanUrl(this.options.sanitize, this.options.baseUrl, href); | ||||
| @@ -56,7 +56,7 @@ diff --git a/src/Renderer.js b/src/Renderer.js | ||||
| diff --git a/src/Tokenizer.js b/src/Tokenizer.js | ||||
| --- a/src/Tokenizer.js | ||||
| +++ b/src/Tokenizer.js | ||||
| @@ -321,14 +321,7 @@ module.exports = class Tokenizer { | ||||
| @@ -320,14 +320,7 @@ export class Tokenizer { | ||||
|          type: 'html', | ||||
|          raw: cap[0], | ||||
| -        pre: !this.options.sanitizer | ||||
| @@ -72,7 +72,7 @@ diff --git a/src/Tokenizer.js b/src/Tokenizer.js | ||||
| -      } | ||||
|        return token; | ||||
|      } | ||||
| @@ -477,15 +470,9 @@ module.exports = class Tokenizer { | ||||
| @@ -476,15 +469,9 @@ export class Tokenizer { | ||||
|   | ||||
|        return { | ||||
| -        type: this.options.sanitize | ||||
| @@ -90,7 +90,7 @@ diff --git a/src/Tokenizer.js b/src/Tokenizer.js | ||||
| +        text: cap[0] | ||||
|        }; | ||||
|      } | ||||
| @@ -672,10 +659,10 @@ module.exports = class Tokenizer { | ||||
| @@ -671,10 +658,10 @@ export class Tokenizer { | ||||
|    } | ||||
|   | ||||
| -  autolink(src, mangle) { | ||||
| @@ -103,7 +103,7 @@ diff --git a/src/Tokenizer.js b/src/Tokenizer.js | ||||
| +        text = escape(cap[1]); | ||||
|          href = 'mailto:' + text; | ||||
|        } else { | ||||
| @@ -700,10 +687,10 @@ module.exports = class Tokenizer { | ||||
| @@ -699,10 +686,10 @@ export class Tokenizer { | ||||
|    } | ||||
|   | ||||
| -  url(src, mangle) { | ||||
| @@ -116,7 +116,7 @@ diff --git a/src/Tokenizer.js b/src/Tokenizer.js | ||||
| +        text = escape(cap[0]); | ||||
|          href = 'mailto:' + text; | ||||
|        } else { | ||||
| @@ -737,12 +724,12 @@ module.exports = class Tokenizer { | ||||
| @@ -736,12 +723,12 @@ export class Tokenizer { | ||||
|    } | ||||
|   | ||||
| -  inlineText(src, smartypants) { | ||||
| @@ -135,7 +135,7 @@ diff --git a/src/Tokenizer.js b/src/Tokenizer.js | ||||
| diff --git a/src/defaults.js b/src/defaults.js | ||||
| --- a/src/defaults.js | ||||
| +++ b/src/defaults.js | ||||
| @@ -9,12 +9,8 @@ function getDefaults() { | ||||
| @@ -9,12 +9,8 @@ export function getDefaults() { | ||||
|      highlight: null, | ||||
|      langPrefix: 'language-', | ||||
| -    mangle: true, | ||||
| @@ -151,10 +151,10 @@ diff --git a/src/defaults.js b/src/defaults.js | ||||
| diff --git a/src/helpers.js b/src/helpers.js | ||||
| --- a/src/helpers.js | ||||
| +++ b/src/helpers.js | ||||
| @@ -64,18 +64,5 @@ function edit(regex, opt) { | ||||
| @@ -64,18 +64,5 @@ export function edit(regex, opt) { | ||||
|  const nonWordAndColonTest = /[^\w:]/g; | ||||
|  const originIndependentUrl = /^$|^[a-z][a-z0-9+.-]*:|^[?#]/i; | ||||
| -function cleanUrl(sanitize, base, href) { | ||||
| -export function cleanUrl(sanitize, base, href) { | ||||
| -  if (sanitize) { | ||||
| -    let prot; | ||||
| -    try { | ||||
| @@ -168,36 +168,30 @@ diff --git a/src/helpers.js b/src/helpers.js | ||||
| -      return null; | ||||
| -    } | ||||
| -  } | ||||
| +function cleanUrl(base, href) { | ||||
| +export function cleanUrl(base, href) { | ||||
|    if (base && !originIndependentUrl.test(href)) { | ||||
|      href = resolveUrl(base, href); | ||||
| @@ -227,10 +214,4 @@ function findClosingBracket(str, b) { | ||||
| @@ -227,10 +214,4 @@ export function findClosingBracket(str, b) { | ||||
|  } | ||||
|   | ||||
| -function checkSanitizeDeprecation(opt) { | ||||
| -export function checkSanitizeDeprecation(opt) { | ||||
| -  if (opt && opt.sanitize && !opt.silent) { | ||||
| -    console.warn('marked(): sanitize and sanitizer parameters are deprecated since version 0.7.0, should not be used and will be removed in the future. Read more here: https://marked.js.org/#/USING_ADVANCED.md#options'); | ||||
| -  } | ||||
| -} | ||||
| - | ||||
|  // copied from https://stackoverflow.com/a/5450113/806777 | ||||
|  function repeatString(pattern, count) { | ||||
| @@ -260,5 +241,4 @@ module.exports = { | ||||
|    rtrim, | ||||
|    findClosingBracket, | ||||
| -  checkSanitizeDeprecation, | ||||
|    repeatString | ||||
|  }; | ||||
|  export function repeatString(pattern, count) { | ||||
| diff --git a/src/marked.js b/src/marked.js | ||||
| --- a/src/marked.js | ||||
| +++ b/src/marked.js | ||||
| @@ -7,5 +7,4 @@ const Slugger = require('./Slugger.js'); | ||||
|  const { | ||||
| @@ -7,5 +7,4 @@ import { Slugger } from './Slugger.js'; | ||||
|  import { | ||||
|    merge, | ||||
| -  checkSanitizeDeprecation, | ||||
|    escape | ||||
|  } = require('./helpers.js'); | ||||
| @@ -35,5 +34,4 @@ function marked(src, opt, callback) { | ||||
|  } from './helpers.js'; | ||||
| @@ -35,5 +34,4 @@ export function marked(src, opt, callback) { | ||||
|   | ||||
|    opt = merge({}, marked.defaults, opt || {}); | ||||
| -  checkSanitizeDeprecation(opt); | ||||
| @@ -219,37 +213,37 @@ diff --git a/src/marked.js b/src/marked.js | ||||
| diff --git a/test/bench.js b/test/bench.js | ||||
| --- a/test/bench.js | ||||
| +++ b/test/bench.js | ||||
| @@ -33,5 +33,4 @@ async function runBench(options) { | ||||
| @@ -37,5 +37,4 @@ export async function runBench(options) { | ||||
|      breaks: false, | ||||
|      pedantic: false, | ||||
| -    sanitize: false, | ||||
|      smartLists: false | ||||
|    }); | ||||
| @@ -45,5 +44,4 @@ async function runBench(options) { | ||||
| @@ -49,5 +48,4 @@ export async function runBench(options) { | ||||
|      breaks: false, | ||||
|      pedantic: false, | ||||
| -    sanitize: false, | ||||
|      smartLists: false | ||||
|    }); | ||||
| @@ -58,5 +56,4 @@ async function runBench(options) { | ||||
| @@ -62,5 +60,4 @@ export async function runBench(options) { | ||||
|      breaks: false, | ||||
|      pedantic: false, | ||||
| -    sanitize: false, | ||||
|      smartLists: false | ||||
|    }); | ||||
| @@ -70,5 +67,4 @@ async function runBench(options) { | ||||
| @@ -74,5 +71,4 @@ export async function runBench(options) { | ||||
|      breaks: false, | ||||
|      pedantic: false, | ||||
| -    sanitize: false, | ||||
|      smartLists: false | ||||
|    }); | ||||
| @@ -83,5 +79,4 @@ async function runBench(options) { | ||||
| @@ -87,5 +83,4 @@ export async function runBench(options) { | ||||
|      breaks: false, | ||||
|      pedantic: true, | ||||
| -    sanitize: false, | ||||
|      smartLists: false | ||||
|    }); | ||||
| @@ -95,5 +90,4 @@ async function runBench(options) { | ||||
| @@ -99,5 +94,4 @@ export async function runBench(options) { | ||||
|      breaks: false, | ||||
|      pedantic: true, | ||||
| -    sanitize: false, | ||||
| @@ -258,7 +252,7 @@ diff --git a/test/bench.js b/test/bench.js | ||||
| diff --git a/test/specs/run-spec.js b/test/specs/run-spec.js | ||||
| --- a/test/specs/run-spec.js | ||||
| +++ b/test/specs/run-spec.js | ||||
| @@ -22,9 +22,4 @@ function runSpecs(title, dir, showCompletionTable, options) { | ||||
| @@ -25,9 +25,4 @@ function runSpecs(title, dir, showCompletionTable, options) { | ||||
|            } | ||||
|   | ||||
| -          if (spec.options.sanitizer) { | ||||
| @@ -268,77 +262,77 @@ diff --git a/test/specs/run-spec.js b/test/specs/run-spec.js | ||||
| - | ||||
|            (spec.only ? fit : (spec.skip ? xit : it))('should ' + passFail + example, async() => { | ||||
|              const before = process.hrtime(); | ||||
| @@ -53,3 +48,2 @@ runSpecs('Original', './original', false, { gfm: false, pedantic: true }); | ||||
| @@ -56,3 +51,2 @@ runSpecs('Original', './original', false, { gfm: false, pedantic: true }); | ||||
|  runSpecs('New', './new'); | ||||
|  runSpecs('ReDOS', './redos'); | ||||
| -runSpecs('Security', './security', false, { silent: true }); // silent - do not show deprecation warning | ||||
| diff --git a/test/unit/Lexer-spec.js b/test/unit/Lexer-spec.js | ||||
| --- a/test/unit/Lexer-spec.js | ||||
| +++ b/test/unit/Lexer-spec.js | ||||
| @@ -589,5 +589,5 @@ paragraph | ||||
| @@ -635,5 +635,5 @@ paragraph | ||||
|      }); | ||||
|   | ||||
| -    it('sanitize', () => { | ||||
| +    /*it('sanitize', () => { | ||||
|        expectTokens({ | ||||
|          md: '<div>html</div>', | ||||
| @@ -607,5 +607,5 @@ paragraph | ||||
| @@ -653,5 +653,5 @@ paragraph | ||||
|          ] | ||||
|        }); | ||||
| -    }); | ||||
| +    });*/ | ||||
|    }); | ||||
|   | ||||
| @@ -652,5 +652,5 @@ paragraph | ||||
| @@ -698,5 +698,5 @@ paragraph | ||||
|        }); | ||||
|   | ||||
| -      it('html sanitize', () => { | ||||
| +      /*it('html sanitize', () => { | ||||
|          expectInlineTokens({ | ||||
|            md: '<div>html</div>', | ||||
| @@ -660,5 +660,5 @@ paragraph | ||||
| @@ -706,5 +706,5 @@ paragraph | ||||
|            ] | ||||
|          }); | ||||
| -      }); | ||||
| +      });*/ | ||||
|   | ||||
|        it('link', () => { | ||||
| @@ -971,5 +971,5 @@ paragraph | ||||
| @@ -1017,5 +1017,5 @@ paragraph | ||||
|          }); | ||||
|   | ||||
| -        it('autolink mangle email', () => { | ||||
| +        /*it('autolink mangle email', () => { | ||||
|            expectInlineTokens({ | ||||
|              md: '<test@example.com>', | ||||
| @@ -991,5 +991,5 @@ paragraph | ||||
| @@ -1037,5 +1037,5 @@ paragraph | ||||
|              ] | ||||
|            }); | ||||
| -        }); | ||||
| +        });*/ | ||||
|   | ||||
|          it('url', () => { | ||||
| @@ -1028,5 +1028,5 @@ paragraph | ||||
| @@ -1074,5 +1074,5 @@ paragraph | ||||
|          }); | ||||
|   | ||||
| -        it('url mangle email', () => { | ||||
| +        /*it('url mangle email', () => { | ||||
|            expectInlineTokens({ | ||||
|              md: 'test@example.com', | ||||
| @@ -1048,5 +1048,5 @@ paragraph | ||||
| @@ -1094,5 +1094,5 @@ paragraph | ||||
|              ] | ||||
|            }); | ||||
| -        }); | ||||
| +        });*/ | ||||
|        }); | ||||
|   | ||||
| @@ -1064,5 +1064,5 @@ paragraph | ||||
| @@ -1110,5 +1110,5 @@ paragraph | ||||
|        }); | ||||
|   | ||||
| -      describe('smartypants', () => { | ||||
| +      /*describe('smartypants', () => { | ||||
|          it('single quotes', () => { | ||||
|            expectInlineTokens({ | ||||
| @@ -1134,5 +1134,5 @@ paragraph | ||||
| @@ -1180,5 +1180,5 @@ paragraph | ||||
|            }); | ||||
|          }); | ||||
| -      }); | ||||
|   | ||||
| @@ -86,8 +86,6 @@ function have() { | ||||
| 	python -c "import $1; $1; $1.__version__" | ||||
| } | ||||
|  | ||||
| mv copyparty/web/deps/marked.full.js.gz srv/ || true | ||||
|  | ||||
| . buildenv/bin/activate | ||||
| have setuptools | ||||
| have wheel | ||||
|   | ||||
| @@ -16,12 +16,11 @@ help() { exec cat <<'EOF' | ||||
| # | ||||
| # `no-sh` makes just the python sfx, skips the sh/unix sfx | ||||
| # | ||||
| # `no-ogv` saves ~192k by removing the opus/vorbis audio codecs | ||||
| #   (only affects apple devices; everything else has native support) | ||||
| # | ||||
| # `no-cm` saves ~92k by removing easymde/codemirror | ||||
| # `no-cm` saves ~82k by removing easymde/codemirror | ||||
| #   (the fancy markdown editor) | ||||
| # | ||||
| # `no-hl` saves ~41k by removing syntax hilighting in the text viewer | ||||
| # | ||||
| # `no-fnt` saves ~9k by removing the source-code-pro font | ||||
| #   (browsers will try to use 'Consolas' instead) | ||||
| # | ||||
| @@ -73,8 +72,8 @@ while [ ! -z "$1" ]; do | ||||
| 		clean)  clean=1  ; ;; | ||||
| 		re)     repack=1 ; ;; | ||||
| 		gz)     use_gz=1 ; ;; | ||||
| 		no-ogv) no_ogv=1 ; ;; | ||||
| 		no-fnt) no_fnt=1 ; ;; | ||||
| 		no-hl)  no_hl=1  ; ;; | ||||
| 		no-dd)  no_dd=1  ; ;; | ||||
| 		no-cm)  no_cm=1  ; ;; | ||||
| 		no-sh)  do_sh=   ; ;; | ||||
| @@ -215,9 +214,6 @@ cat have | while IFS= read -r x; do | ||||
| done | ||||
| rm have | ||||
|  | ||||
| [ $no_ogv ] && | ||||
| 	rm -rf copyparty/web/deps/{dynamicaudio,ogv}* | ||||
|  | ||||
| [ $no_cm ] && { | ||||
| 	rm -rf copyparty/web/mde.* copyparty/web/deps/easymde* | ||||
| 	echo h > copyparty/web/mde.html | ||||
| @@ -226,6 +222,9 @@ rm have | ||||
| 	tmv "$f" | ||||
| } | ||||
|  | ||||
| [ $no_hl ] && | ||||
| 	rm -rf copyparty/web/deps/prism* | ||||
|  | ||||
| [ $no_fnt ] && { | ||||
| 	rm -f copyparty/web/deps/scp.woff2 | ||||
| 	f=copyparty/web/ui.css | ||||
| @@ -238,7 +237,7 @@ rm have | ||||
| 	rm -rf copyparty/web/dd | ||||
| 	f=copyparty/web/browser.css | ||||
| 	gzip -d "$f.gz" || true | ||||
| 	sed -r 's/(cursor: ?)url\([^)]+\), ?(pointer)/\1\2/; /[0-9]+% \{cursor:/d; /animation: ?cursor/d' <$f >t | ||||
| 	sed -r 's/(cursor: ?)url\([^)]+\), ?(pointer)/\1\2/; s/[0-9]+% \{cursor:[^}]+\}//; s/animation: ?cursor[^};]+//' <$f >t | ||||
| 	tmv "$f" | ||||
| } | ||||
|  | ||||
| @@ -271,7 +270,7 @@ find | grep -E '\.css$' | while IFS= read -r f; do | ||||
| 	} | ||||
| 	!/\}$/ {printf "%s",$0;next} | ||||
| 	1 | ||||
| 	' <$f | sed 's/;\}$/}/' >t | ||||
| 	' <$f | sed -r 's/;\}$/}/; /\{\}$/d' >t | ||||
| 	tmv "$f" | ||||
| done | ||||
| unexpand -h 2>/dev/null && | ||||
|   | ||||
| @@ -35,8 +35,6 @@ ver="$1" | ||||
| 	exit 1 | ||||
| } | ||||
|  | ||||
| mv copyparty/web/deps/marked.full.js.gz srv/ || true | ||||
|  | ||||
| mkdir -p dist | ||||
| zip_path="$(pwd)/dist/copyparty-$ver.zip" | ||||
| tgz_path="$(pwd)/dist/copyparty-$ver.tar.gz" | ||||
|   | ||||
| @@ -7,8 +7,9 @@ v=$1 | ||||
| printf '%s\n' "$v" | grep -qE '^[0-9\.]+$' || exit 1 | ||||
| grep -E "(${v//./, })" ../copyparty/__version__.py || exit 1 | ||||
|  | ||||
| git push all | ||||
| git tag v$v | ||||
| git push origin --tags | ||||
| git push all --tags | ||||
|  | ||||
| rm -rf ../dist | ||||
|  | ||||
|   | ||||
| @@ -49,14 +49,9 @@ copyparty/web/deps/easymde.js, | ||||
| copyparty/web/deps/marked.js, | ||||
| copyparty/web/deps/mini-fa.css, | ||||
| copyparty/web/deps/mini-fa.woff, | ||||
| copyparty/web/deps/ogv-decoder-audio-opus-wasm.js, | ||||
| copyparty/web/deps/ogv-decoder-audio-opus-wasm.wasm, | ||||
| copyparty/web/deps/ogv-decoder-audio-vorbis-wasm.js, | ||||
| copyparty/web/deps/ogv-decoder-audio-vorbis-wasm.wasm, | ||||
| copyparty/web/deps/ogv-demuxer-ogg-wasm.js, | ||||
| copyparty/web/deps/ogv-demuxer-ogg-wasm.wasm, | ||||
| copyparty/web/deps/ogv-worker-audio.js, | ||||
| copyparty/web/deps/ogv.js, | ||||
| copyparty/web/deps/prism.js, | ||||
| copyparty/web/deps/prism.css, | ||||
| copyparty/web/deps/prismd.css, | ||||
| copyparty/web/deps/scp.woff2, | ||||
| copyparty/web/deps/sha512.ac.js, | ||||
| copyparty/web/deps/sha512.hw.js, | ||||
|   | ||||
| @@ -9,7 +9,7 @@ import subprocess as sp | ||||
| to edit this file, use HxD or "vim -b" | ||||
|   (there is compressed stuff at the end) | ||||
|  | ||||
| run me with any version of python, i will unpack and run copyparty | ||||
| run me with python 2.7 or 3.3+ to unpack and run copyparty | ||||
|  | ||||
| there's zero binaries! just plaintext python scripts all the way down | ||||
|   so you can easily unpack the archive and inspect it for shady stuff | ||||
|   | ||||
| @@ -60,7 +60,7 @@ class Cpp(object): | ||||
|                 pass | ||||
|  | ||||
|  | ||||
| def tc1(): | ||||
| def tc1(vflags): | ||||
|     ub = "http://127.0.0.1:4321/" | ||||
|     td = os.path.join("srv", "smoketest") | ||||
|     try: | ||||
| @@ -100,17 +100,17 @@ def tc1(): | ||||
|     for d1 in ["r", "w", "a"]: | ||||
|         pdirs.append("{}/{}".format(td, d1)) | ||||
|         pdirs.append("{}/{}/j".format(td, d1)) | ||||
|         for d2 in ["r", "w", "a"]: | ||||
|         for d2 in ["r", "w", "a", "c"]: | ||||
|             d = os.path.join(td, d1, "j", d2) | ||||
|             pdirs.append(d) | ||||
|             os.makedirs(d) | ||||
|  | ||||
|     pdirs = [x.replace("\\", "/") for x in pdirs] | ||||
|     udirs = [x.split("/", 2)[2] for x in pdirs] | ||||
|     perms = [x.rstrip("j/")[-1] for x in pdirs] | ||||
|     perms = [x.rstrip("cj/")[-1] for x in pdirs] | ||||
|     perms = ["rw" if x == "a" else x for x in perms] | ||||
|     for pd, ud, p in zip(pdirs, udirs, perms): | ||||
|         if ud[-1] == "j": | ||||
|         if ud[-1] == "j" or ud[-1] == "c": | ||||
|             continue | ||||
|  | ||||
|         hp = None | ||||
| @@ -123,29 +123,37 @@ def tc1(): | ||||
|             hp = "-" | ||||
|             hpaths[ud] = os.path.join(pd, ".hist") | ||||
|  | ||||
|         arg = "{}:{}:{}".format(pd, ud, p, hp) | ||||
|         arg = "{}:{}:{}".format(pd, ud, p) | ||||
|         if hp: | ||||
|             arg += ":c,hist=" + hp | ||||
|  | ||||
|         args += ["-v", arg] | ||||
|         args += ["-v", arg + vflags] | ||||
|  | ||||
|     # return | ||||
|     cpp = Cpp(args) | ||||
|     CPP.append(cpp) | ||||
|     cpp.await_idle(ub, 3) | ||||
|  | ||||
|     for d in udirs: | ||||
|     for d, p in zip(udirs, perms): | ||||
|         vid = ovid + "\n{}".format(d).encode("utf-8") | ||||
|         try: | ||||
|             requests.post(ub + d, data={"act": "bput"}, files={"f": ("a.h264", vid)}) | ||||
|         except: | ||||
|             pass | ||||
|         r = requests.post( | ||||
|             ub + d, | ||||
|             data={"act": "bput"}, | ||||
|             files={"f": (d.replace("/", "") + ".h264", vid)}, | ||||
|         ) | ||||
|         c = r.status_code | ||||
|         if c == 200 and p not in ["w", "rw"]: | ||||
|             raise Exception("post {} with perm {} at {}".format(c, p, d)) | ||||
|         elif c == 403 and p not in ["r"]: | ||||
|             raise Exception("post {} with perm {} at {}".format(c, p, d)) | ||||
|         elif c not in [200, 403]: | ||||
|             raise Exception("post {} with perm {} at {}".format(c, p, d)) | ||||
|  | ||||
|     cpp.clean() | ||||
|  | ||||
|     # GET permission | ||||
|     for d, p in zip(udirs, perms): | ||||
|         u = "{}{}/a.h264".format(ub, d) | ||||
|         u = "{}{}/{}.h264".format(ub, d, d.replace("/", "")) | ||||
|         r = requests.get(u) | ||||
|         ok = bool(r) | ||||
|         if ok != (p in ["rw"]): | ||||
| @@ -153,14 +161,14 @@ def tc1(): | ||||
|  | ||||
|     # stat filesystem | ||||
|     for d, p in zip(pdirs, perms): | ||||
|         u = "{}/a.h264".format(d) | ||||
|         u = "{}/{}.h264".format(d, d.split("test/")[-1].replace("/", "")) | ||||
|         ok = os.path.exists(u) | ||||
|         if ok != (p in ["rw", "w"]): | ||||
|             raise Exception("stat {} with perm {} at {}".format(ok, p, u)) | ||||
|  | ||||
|     # GET thumbnail, vreify contents | ||||
|     for d, p in zip(udirs, perms): | ||||
|         u = "{}{}/a.h264?th=j".format(ub, d) | ||||
|         u = "{}{}/{}.h264?th=j".format(ub, d, d.replace("/", "")) | ||||
|         r = requests.get(u) | ||||
|         ok = bool(r and r.content[:3] == b"\xff\xd8\xff") | ||||
|         if ok != (p in ["rw"]): | ||||
| @@ -192,9 +200,9 @@ def tc1(): | ||||
|     cpp.stop(True) | ||||
|  | ||||
|  | ||||
| def run(tc): | ||||
| def run(tc, *a): | ||||
|     try: | ||||
|         tc() | ||||
|         tc(*a) | ||||
|     finally: | ||||
|         try: | ||||
|             CPP[0].stop(False) | ||||
| @@ -203,7 +211,8 @@ def run(tc): | ||||
|  | ||||
|  | ||||
| def main(): | ||||
|     run(tc1) | ||||
|     run(tc1, "") | ||||
|     run(tc1, ":c,fk") | ||||
|  | ||||
|  | ||||
| if __name__ == "__main__": | ||||
|   | ||||
							
								
								
									
										2
									
								
								setup.py
									
									
									
									
									
								
							
							
						
						
									
										2
									
								
								setup.py
									
									
									
									
									
								
							| @@ -114,7 +114,7 @@ args = { | ||||
|     "install_requires": ["jinja2"], | ||||
|     "extras_require": {"thumbnails": ["Pillow"], "audiotags": ["mutagen"]}, | ||||
|     "entry_points": {"console_scripts": ["copyparty = copyparty.__main__:main"]}, | ||||
|     "scripts": ["bin/copyparty-fuse.py"], | ||||
|     "scripts": ["bin/copyparty-fuse.py", "bin/up2k.py"], | ||||
|     "cmdclass": {"clean2": clean2}, | ||||
| } | ||||
|  | ||||
|   | ||||
| @@ -29,6 +29,9 @@ class Cfg(Namespace): | ||||
|             v=v or [], | ||||
|             c=c, | ||||
|             rproxy=0, | ||||
|             rsp_slp=0, | ||||
|             s_wr_slp=0, | ||||
|             s_wr_sz=512 * 1024, | ||||
|             ed=False, | ||||
|             nw=False, | ||||
|             unpost=600, | ||||
| @@ -47,10 +50,14 @@ class Cfg(Namespace): | ||||
|             mtp=[], | ||||
|             mte="a", | ||||
|             mth="", | ||||
|             textfiles="", | ||||
|             doctitle="", | ||||
|             hist=None, | ||||
|             no_hash=False, | ||||
|             no_idx=None, | ||||
|             no_hash=None, | ||||
|             js_browser=None, | ||||
|             css_browser=None, | ||||
|             **{k: False for k in "e2d e2ds e2dsa e2t e2ts e2tsr".split()} | ||||
|             **{k: False for k in "e2d e2ds e2dsa e2t e2ts e2tsr no_acode".split()} | ||||
|         ) | ||||
|  | ||||
|  | ||||
|   | ||||
| @@ -17,19 +17,24 @@ from copyparty import util | ||||
|  | ||||
| class Cfg(Namespace): | ||||
|     def __init__(self, a=None, v=None, c=None): | ||||
|         ex = {k: False for k in "nw e2d e2ds e2dsa e2t e2ts e2tsr".split()} | ||||
|         ex = "nw e2d e2ds e2dsa e2t e2ts e2tsr no_logues no_readme no_acode" | ||||
|         ex = {k: False for k in ex.split()} | ||||
|         ex2 = { | ||||
|             "mtp": [], | ||||
|             "mte": "a", | ||||
|             "mth": "", | ||||
|             "doctitle": "", | ||||
|             "hist": None, | ||||
|             "no_hash": False, | ||||
|             "no_idx": None, | ||||
|             "no_hash": None, | ||||
|             "js_browser": None, | ||||
|             "css_browser": None, | ||||
|             "no_voldump": True, | ||||
|             "no_logues": False, | ||||
|             "no_readme": False, | ||||
|             "re_maxage": 0, | ||||
|             "rproxy": 0, | ||||
|             "rsp_slp": 0, | ||||
|             "s_wr_slp": 0, | ||||
|             "s_wr_sz": 512 * 1024, | ||||
|         } | ||||
|         ex.update(ex2) | ||||
|         super(Cfg, self).__init__(a=a or [], v=v or [], c=c, **ex) | ||||
|   | ||||
| @@ -3,6 +3,7 @@ import sys | ||||
| import time | ||||
| import shutil | ||||
| import jinja2 | ||||
| import threading | ||||
| import tempfile | ||||
| import platform | ||||
| import subprocess as sp | ||||
| @@ -28,7 +29,7 @@ if MACOS: | ||||
|     # 25% faster; until any tests do symlink stuff | ||||
|  | ||||
|  | ||||
| from copyparty.util import Unrecv | ||||
| from copyparty.util import Unrecv, FHC | ||||
|  | ||||
|  | ||||
| def runcmd(argv): | ||||
| @@ -112,6 +113,7 @@ class VSock(object): | ||||
| class VHttpSrv(object): | ||||
|     def __init__(self): | ||||
|         self.broker = NullBroker() | ||||
|         self.prism = None | ||||
|  | ||||
|         aliases = ["splash", "browser", "browser2", "msg", "md", "mde"] | ||||
|         self.j2 = {x: J2_FILES for x in aliases} | ||||
| @@ -132,8 +134,10 @@ class VHttpConn(object): | ||||
|         self.log_src = "a" | ||||
|         self.lf_url = None | ||||
|         self.hsrv = VHttpSrv() | ||||
|         self.u2fh = FHC() | ||||
|         self.mutex = threading.Lock() | ||||
|         self.nreq = 0 | ||||
|         self.nbyte = 0 | ||||
|         self.ico = None | ||||
|         self.thumbcli = None | ||||
|         self.t0 = time.time() | ||||
|         self.t0 = time.time() | ||||
|   | ||||
		Reference in New Issue
	
	Block a user