Compare commits

..

172 Commits

Author SHA1 Message Date
ed
c4083a2942 v1.2.6 2022-04-15 20:09:50 +02:00
ed
36c20bbe53 fix setting mtime on windows 2022-04-15 20:08:55 +02:00
ed
e34634f5af v1.2.5 2022-04-15 19:42:40 +02:00
ed
cba9e5b669 add hardlinks (symlink alternative) for up2k dedup 2022-04-15 19:13:53 +02:00
ed
1f3c46a6b0 forgot some css files 2022-04-15 17:11:46 +02:00
ed
799a5ffa47 v1.2.4 2022-04-14 21:45:22 +02:00
ed
b000707c10 detect poor ffmpeg builds 2022-04-14 18:20:48 +02:00
ed
feba4de1d6 make gallery linkable 2022-04-14 17:12:56 +02:00
ed
951fdb27ca dont scan orphaned volumes 2022-04-14 17:11:51 +02:00
ed
9697fb3d84 option to disable thumbnails per volume 2022-04-14 17:11:26 +02:00
ed
2dbed4500a add flat theme 2022-04-14 16:57:51 +02:00
ed
fd9d0e433d thumbnails: try FFmpeg for images too 2022-04-11 10:38:57 +02:00
ed
f096f3ef81 thumbnails: disable pdf because too scary 2022-04-10 23:02:09 +02:00
ed
cc4a063695 thumbnails: per-decoder filetype config 2022-04-10 22:59:45 +02:00
ed
b64cabc3c9 thumbnails: add pyvips as alt/supp. to pillow 2022-04-10 14:16:09 +02:00
ed
3dd460717c add flat theme 2022-04-09 23:05:54 +02:00
ed
bf658a522b naming 2022-04-09 20:41:08 +02:00
ed
e9be7e712d futureproof clipboard function 2022-04-09 19:38:05 +02:00
ed
e40cd2a809 optimize window resizing 2022-04-09 19:20:09 +02:00
ed
dbabeb9692 gallery: add animation preferences 2022-04-09 17:23:54 +02:00
ed
8dd37d76b0 fix drifting resize 2022-04-09 14:37:25 +02:00
ed
fd475aa358 textviewer: translate basic ansi/sgr colors 2022-04-09 00:50:54 +02:00
ed
f0988c0e32 filter some volflags from up2k dump 2022-04-08 21:56:24 +02:00
ed
0632f09bff rhel8 ignores flock and kills us anyways 2022-04-08 21:29:31 +02:00
ed
ba599aaca0 explain systemd jank 2022-04-08 20:39:22 +02:00
ed
ff05919e89 support mpc/musepack audio (streaming + thumbnailing) 2022-04-02 22:17:16 +02:00
ed
52e63fa101 dont crash when mediaplayer config is changed while music isnt playing 2022-03-28 23:17:02 +02:00
ed
96ceccd12a v1.2.3 2022-03-24 02:35:53 +01:00
ed
87994fe006 retry failed uploads with backoff 2022-03-24 02:29:59 +01:00
ed
fa12c81a03 zip-download files older than 1980-01-01 2022-03-24 01:31:50 +01:00
ed
344ce63455 basic-browser is implicitly not js 2022-03-21 01:20:47 +01:00
ed
ec4daacf9e v1.2.2 2022-03-20 06:15:57 +01:00
ed
f3e8308718 eh, better as volflags 2022-03-20 05:45:07 +01:00
ed
515ac5d941 show textfile name in document title 2022-03-20 03:40:21 +01:00
ed
954c7e7e50 add option to request noindex from crawlers 2022-03-20 03:23:42 +01:00
ed
67ff57f3a3 add option to disable html folder listings 2022-03-20 02:45:53 +01:00
ed
c10c70c1e5 misc 2022-03-04 21:30:31 +01:00
ed
04592a98d2 include all IPs + link status in server url listing 2022-03-04 21:29:28 +01:00
ed
c9c4aac6cf v1.2.1 2022-03-03 01:26:29 +01:00
ed
8b2c7586ce minimal py2 support for ftpd 2022-03-03 01:18:01 +01:00
ed
32e22dfe84 vendor asynchat for pyftpdlib 2022-03-03 01:16:52 +01:00
ed
d70b885722 failed attempt at upgrading scp 2022-03-03 00:17:03 +01:00
ed
ac6c4b13f5 add plaintext volume listing 2022-03-02 21:20:19 +01:00
ed
ececdad22d and increase debounce a bit 2022-03-02 01:56:05 +01:00
ed
bf659781b0 try some more spacing 2022-03-02 01:49:15 +01:00
ed
2c6bb195a4 search: get rid of inner-joins to fix -tags 2022-03-02 00:35:04 +01:00
ed
c032cd08b3 prisonparty: clean exit on sigterm/int 2022-02-27 20:07:28 +01:00
ed
39e7a7a231 sfx: prefer system pyftpdlib if available 2022-02-13 21:00:13 +01:00
ed
6e14cd2c39 graduate copyparty-sfx.sh 2022-02-13 20:44:03 +01:00
ed
aab3baaea7 v1.2.0 2022-02-13 16:58:54 +01:00
ed
b8453c3b4f ftpd: support rootless filesystems 2022-02-13 16:38:24 +01:00
ed
6ce0e2cd5b ftpd: add ftps 2022-02-13 15:46:33 +01:00
ed
76beaae7f2 ftpd: add move/rename 2022-02-13 14:26:16 +01:00
ed
c1a7f9edbe ftpd: add indexing, delete, windows support 2022-02-13 13:58:16 +01:00
ed
b5f2fe2f0a add ftpd 2022-02-13 03:10:53 +01:00
ed
98a90d49cb ctrl-click document links to open in new tab 2022-02-12 20:26:44 +01:00
ed
f55e982cb5 configurable max-hits 2022-02-12 16:22:35 +01:00
ed
686c7defeb fix path-search in nontop volumes 2022-02-12 16:00:14 +01:00
ed
0b1e483c53 bump webdeps 2022-02-09 23:45:09 +01:00
ed
457d7df129 fix ie11 hotkey crash 2022-02-06 02:08:18 +01:00
ed
ce776a547c add rate throttling to uploads too 2022-02-06 02:06:59 +01:00
ed
ded0567cbf v1.1.12 2022-01-18 22:28:33 +01:00
ed
c9cac83d09 fix PUT response in write-only folders 2022-01-18 21:37:11 +01:00
ed
4fbe6b01a8 clarify what the app does 2022-01-17 00:31:23 +00:00
ed
ee9585264e deal with github api change + build vamp if necessary 2022-01-17 00:27:23 +00:00
ed
c9ffead7bf prisonparty: support running from src 2022-01-17 00:24:40 +00:00
ed
ed69d42005 v1.1.11 2022-01-14 22:25:06 +01:00
ed
0b47ee306b bump marked.js to 4.0.10 2022-01-14 20:42:23 +01:00
ed
e4e63619d4 linkable maintabs 2022-01-14 19:26:07 +01:00
ed
f32cca292a propagate sort-order to thegrid 2022-01-14 18:28:49 +01:00
ed
e87ea19ff1 return file URL in PUT response 2022-01-11 22:59:19 +01:00
ed
0214793740 fix garbage in markdown output 2022-01-05 18:57:05 +01:00
ed
fc9dd5d743 meadup changes 2022-01-03 01:16:27 +01:00
ed
9e6d5dd2b9 vbi: add onscreen qrcode 2021-12-28 20:57:11 +01:00
ed
bdad197e2c make it even worse 2021-12-27 00:04:38 +01:00
ed
7e139288a6 add very bad idea 2021-12-26 23:32:46 +01:00
ed
6e7935abaf repaint cut/paste buttons when permissions change 2021-12-24 00:50:52 +01:00
ed
3ba0cc20f1 v1.1.10 2021-12-17 00:05:17 +01:00
ed
dd28de1796 sendfile: handle eagain 2021-12-17 00:04:19 +01:00
ed
9eecc9e19a v1.1.9 2021-12-16 22:54:44 +01:00
ed
6530cb6b05 shut socket on tx error 2021-12-16 22:51:24 +01:00
ed
41ce613379 add multisearch 2021-12-12 20:11:07 +01:00
ed
5e2785caba more aggressively try ffmpeg when mutagen fails 2021-12-11 20:31:04 +01:00
ed
d7cc000976 v1.1.8 2021-12-10 02:44:48 +01:00
ed
50d8ff95ae good stuff 2021-12-10 02:21:56 +01:00
ed
b2de1459b6 quick backports to the alternative fuse client 2021-12-10 01:59:45 +01:00
ed
f0ffbea0b2 add breadcrumbs to the textfile tree 2021-12-10 00:44:47 +01:00
ed
199ccca0fe v1.1.7 2021-12-07 19:19:35 +01:00
ed
1d9b355743 fix search ui after b265e59 broke it 2021-12-07 19:12:36 +01:00
ed
f0437fbb07 cleanup the windowtitle a bit 2021-12-07 19:09:24 +01:00
ed
abc404a5b7 v1.1.6 2021-12-07 01:17:56 +01:00
ed
04b9e21330 update web-deps 2021-12-07 01:12:32 +01:00
ed
1044aa071b deal with consecutive dupes even without sqlite 2021-12-06 23:51:44 +01:00
ed
4c3192c8cc set window-title to listening ip 2021-12-06 23:08:04 +01:00
ed
689e77a025 option to set a custom servicename 2021-12-06 22:24:25 +01:00
ed
3bd89403d2 apply per-volume index config to ui 2021-12-06 22:04:24 +01:00
ed
b4800d9bcb option to disable onboot-scans per-volume 2021-12-06 20:54:13 +01:00
ed
05485e8539 md: smaller indent on outermost list 2021-12-06 20:17:12 +01:00
ed
0e03dc0868 and fix the markdown breadcrumbs too 2021-12-06 19:51:47 +01:00
ed
352b1ed10a generate correct links when trailing slash missing 2021-12-06 19:49:14 +01:00
ed
0db1244d04 also consider TMPDIR and friends 2021-12-06 09:47:39 +01:00
ed
ece08b8179 create ~/.config if /tmp is readonly 2021-12-06 02:02:44 +01:00
ed
b8945ae233 fix tests and readme 2021-12-04 18:52:14 +01:00
ed
dcaf7b0a20 v1.1.5 2021-12-04 03:33:57 +01:00
ed
f982cdc178 spa gridview 2021-12-04 03:31:12 +01:00
ed
b265e59834 spa filetab 2021-12-04 03:25:28 +01:00
ed
4a843a6624 unflicker navpane + add client state escape hatch 2021-12-04 02:46:00 +01:00
ed
241ef5b99d preserve mtimes when juggling symlinks 2021-12-04 01:58:04 +01:00
ed
f39f575a9c sort-order indicators 2021-12-03 23:53:41 +01:00
ed
1521307f1e use preferred sort on initial render, fixes #8 2021-12-03 02:07:08 +01:00
ed
dd122111e6 v1.1.4 2021-11-28 04:22:05 +01:00
ed
00c177fa74 show upload eta in window title 2021-11-28 04:05:16 +01:00
ed
f6c7e49eb8 u2cli: better error messages 2021-11-28 03:38:57 +01:00
ed
1a8dc3d18a add workaround for #7 after all since it was trivial 2021-11-28 00:12:19 +01:00
ed
38a163a09a better dropzone for extremely slow browsers 2021-11-28 00:11:21 +01:00
ed
8f031246d2 disable windows quickedit to avoid accidental lockups 2021-11-27 21:43:19 +01:00
ed
8f3d97dde7 indicate onclick action for audio files in grid view 2021-11-24 22:10:59 +01:00
ed
4acaf24d65 remember if media controls were open or not 2021-11-24 21:49:41 +01:00
ed
9a8dbbbcf8 another accesskey fix 2021-11-22 21:57:29 +01:00
ed
a3efc4c726 encode quoted queries into raw 2021-11-22 21:53:23 +01:00
ed
0278bf328f support raw-queries with quotes 2021-11-22 20:59:07 +01:00
ed
17ddd96cc6 up2k list wasnt centered anymore 2021-11-21 22:44:11 +01:00
ed
0e82e79aea mention the eq fixing gapless albums 2021-11-20 19:33:56 +01:00
ed
30f124c061 fix forcing compression levels 2021-11-20 18:51:15 +01:00
ed
e19d90fcfc add missing examples 2021-11-20 18:50:55 +01:00
ed
184bbdd23d legalese rephrasing 2021-11-20 17:58:37 +01:00
ed
30b50aec95 mention mtp readme 2021-11-20 17:51:49 +01:00
ed
c3c3d81db1 add mtp plugin for exif stripping 2021-11-20 17:45:56 +01:00
ed
49b7231283 fix mojibake support in misc mtp plugins 2021-11-20 17:33:24 +01:00
ed
edbedcdad3 v1.1.3 2021-11-20 02:27:09 +01:00
ed
e4ae5f74e6 add tooltip indicator 2021-11-20 01:47:16 +01:00
ed
2c7ffe08d7 include sha512 as both hex and b64 in responses 2021-11-20 01:03:32 +01:00
ed
3ca46bae46 good oneliner 2021-11-20 00:20:34 +01:00
ed
7e82aaf843 simplify/improve up2k ui debounce 2021-11-20 00:03:15 +01:00
ed
315bd71adf limit turbo runahead 2021-11-20 00:01:14 +01:00
ed
2c612c9aeb ux 2021-11-19 21:31:05 +01:00
ed
36aee085f7 add timeouts to FFmpeg things 2021-11-16 22:22:09 +01:00
ed
d01bb69a9c u2cli: option to ignore inaccessible files 2021-11-16 21:53:00 +01:00
ed
c9b1c48c72 sizelimit registry + persist without e2d 2021-11-16 21:31:24 +01:00
ed
aea3843cf2 this is just noise 2021-11-16 21:28:50 +01:00
ed
131b6f4b9a workaround chrome rendering bug 2021-11-16 21:28:36 +01:00
ed
6efb8b735a better handling of python builds without sqlite3 2021-11-16 01:13:04 +01:00
ed
223b7af2ce more iOS jank 2021-11-16 00:05:35 +01:00
ed
e72c2a6982 add fastpath for using the eq as a pure gain control 2021-11-15 23:19:43 +01:00
ed
dd9b93970e autoenable aac transcoding when codec missing 2021-11-15 23:18:52 +01:00
ed
e4c7cd81a9 update readme 2021-11-15 20:28:53 +01:00
ed
12b3a62586 fix dumb mistakes 2021-11-15 20:13:16 +01:00
ed
2da3bdcd47 delay tooltips, fix #6 2021-11-15 03:56:17 +01:00
ed
c1dccbe0ba trick iphones into preloading natively 2021-11-15 03:01:11 +01:00
ed
9629fcde68 optionally enable seeking through os controls 2021-11-15 02:47:42 +01:00
ed
cae436b566 add client-option to disconnect on HTTP 304 2021-11-15 02:45:18 +01:00
ed
01714700ae more gapless fixes 2021-11-14 20:25:28 +01:00
ed
51e6c4852b retire ogvjs 2021-11-14 19:28:44 +01:00
ed
b206c5d64e handle multiple simultaneous uploads of the same file 2021-11-14 15:03:11 +01:00
ed
62c3272351 add option to simulate latency 2021-11-14 15:01:20 +01:00
ed
c5d822c70a v1.1.2 2021-11-12 23:08:24 +01:00
ed
9c09b4061a prefer fpool on linux as well 2021-11-12 22:57:36 +01:00
ed
c26fb43ced more cleanup 2021-11-12 22:30:23 +01:00
ed
deb8f20db6 misc cleanup/unjank 2021-11-12 20:48:26 +01:00
ed
50e18ed8ff fix up2k layout in readonly folders 2021-11-12 19:18:52 +01:00
ed
31f3895f40 close misc views on escape 2021-11-12 19:18:29 +01:00
ed
615929268a cache monet 2021-11-12 02:00:44 +01:00
ed
b8b15814cf add traffic shaping, bump speeds on https/windows 2021-11-12 01:34:56 +01:00
ed
7766fffe83 mostly fix ogvjs preloading 2021-11-12 01:09:01 +01:00
ed
2a16c150d1 general preload improvements 2021-11-12 01:04:31 +01:00
ed
418c2166cc add cursed doubleclick-handler in gridsel mode 2021-11-11 01:03:14 +01:00
ed
a4dd44f648 textviewer initiable through hotkeys 2021-11-11 00:18:34 +01:00
ed
5352f7cda7 fix ctrl-a fencing in codeblocks 2021-11-11 00:11:29 +01:00
ed
5533b47099 handle crc collisions 2021-11-10 23:59:07 +01:00
ed
e9b14464ee terminate preloader if it can't finish in time 2021-11-10 22:53:02 +01:00
ed
4e986e5cd1 xhr preload is not gapless 2021-11-10 22:00:24 +01:00
ed
8a59b40c53 better clientside upload dedup 2021-11-10 20:57:45 +01:00
81 changed files with 5096 additions and 1825 deletions

145
README.md
View File

@@ -16,6 +16,13 @@ turn your phone or raspi into a portable file server with resumable uploads/down
📷 **screenshots:** [browser](#the-browser) // [upload](#uploading) // [unpost](#unpost) // [thumbnails](#thumbnails) // [search](#searching) // [fsearch](#file-search) // [zip-DL](#zip-downloads) // [md-viewer](#markdown-viewer) // [ie4](#browser-support)
## get the app
<a href="https://f-droid.org/packages/me.ocv.partyup/"><img src="https://ocv.me/fdroid.png" alt="Get it on F-Droid" height="50" /> '' <img src="https://img.shields.io/f-droid/v/me.ocv.partyup.svg" alt="f-droid version info" /></a> '' <a href="https://github.com/9001/party-up"><img src="https://img.shields.io/github/release/9001/party-up.svg?logo=github" alt="github version info" /></a>
(the app is **NOT** the full copyparty server! just a basic upload client, nothing fancy yet)
## readme toc
* top
@@ -47,13 +54,15 @@ turn your phone or raspi into a portable file server with resumable uploads/down
* [other tricks](#other-tricks)
* [searching](#searching) - search by size, date, path/name, mp3-tags, ...
* [server config](#server-config) - using arguments or config files, or a mix of both
* [ftp-server](#ftp-server) - an FTP server can be started using `--ftp 3921`
* [file indexing](#file-indexing)
* [upload rules](#upload-rules) - set upload rules using volume flags
* [compress uploads](#compress-uploads) - files can be autocompressed on upload
* [database location](#database-location) - in-volume (`.hist/up2k.db`, default) or somewhere else
* [metadata from audio files](#metadata-from-audio-files) - set `-e2t` to index tags on upload
* [file parser plugins](#file-parser-plugins) - provide custom parsers to index additional tags
* [file parser plugins](#file-parser-plugins) - provide custom parsers to index additional tags, also see [./bin/mtag/README.md](./bin/mtag/README.md)
* [upload events](#upload-events) - trigger a script/program on each upload
* [hiding from google](#hiding-from-google) - tell search engines you dont wanna be indexed
* [complete examples](#complete-examples)
* [browser support](#browser-support) - TLDR: yes
* [client examples](#client-examples) - interact with copyparty using non-browser clients
@@ -75,9 +84,10 @@ turn your phone or raspi into a portable file server with resumable uploads/down
* [optional dependencies](#optional-dependencies) - install these to enable bonus features
* [install recommended deps](#install-recommended-deps)
* [optional gpl stuff](#optional-gpl-stuff)
* [sfx](#sfx) - there are two self-contained "binaries"
* [sfx](#sfx) - the self-contained "binary"
* [sfx repack](#sfx-repack) - reduce the size of an sfx by removing features
* [install on android](#install-on-android)
* [reporting bugs](#reporting-bugs) - ideas for context to include in bug reports
* [building](#building)
* [dev env setup](#dev-env-setup)
* [just the sfx](#just-the-sfx)
@@ -146,6 +156,7 @@ feature summary
* ☑ multiprocessing (actual multithreading)
* ☑ volumes (mountpoints)
* ☑ [accounts](#accounts-and-volumes)
* ☑ [ftp-server](#ftp-server)
* upload
* ☑ basic: plain multipart, ie6 support
* ☑ [up2k](#uploading): js, resumable, multithreaded
@@ -161,12 +172,13 @@ feature summary
* ☑ file manager (cut/paste, delete, [batch-rename](#batch-rename))
* ☑ audio player (with OS media controls and opus transcoding)
* ☑ image gallery with webm player
* ☑ textfile browser with syntax hilighting
* ☑ [thumbnails](#thumbnails)
* ☑ ...of images using Pillow
* ☑ ...of images using Pillow, pyvips, or FFmpeg
* ☑ ...of videos using FFmpeg
* ☑ ...of audio (spectrograms) using FFmpeg
* ☑ cache eviction (max-age; maybe max-size eventually)
* ☑ SPA (browse while uploading)
* if you use the navpane to navigate, not folders in the file list
* server indexing
* ☑ [locate files by contents](#file-search)
* ☑ search by name/path/date/size
@@ -228,11 +240,17 @@ some improvement ideas
## general bugs
* Windows: if the up2k db is on a samba-share or network disk, you'll get unpredictable behavior if the share is disconnected for a bit
* use `--hist` or the `hist` volflag (`-v [...]:c,hist=/tmp/foo`) to place the db on a local disk instead
* all volumes must exist / be available on startup; up2k (mtp especially) gets funky otherwise
* probably more, pls let me know
## not my bugs
* iPhones: the volume control doesn't work because [apple doesn't want it to](https://developer.apple.com/library/archive/documentation/AudioVideo/Conceptual/Using_HTML5_Audio_Video/Device-SpecificConsiderations/Device-SpecificConsiderations.html#//apple_ref/doc/uid/TP40009523-CH5-SW11)
* *future workaround:* enable the equalizer, make it all-zero, and set a negative boost to reduce the volume
* "future" because `AudioContext` is broken in the current iOS version (15.1), maybe one day...
* Windows: folders cannot be accessed if the name ends with `.`
* python or windows bug
@@ -249,6 +267,7 @@ some improvement ideas
* is it possible to block read-access to folders unless you know the exact URL for a particular file inside?
* yes, using the [`g` permission](#accounts-and-volumes), see the examples there
* you can also do this with linux filesystem permissions; `chmod 111 music` will make it possible to access files and folders inside the `music` folder but not list the immediate contents -- also works with other software, not just copyparty
* can I make copyparty download a file to my server if I give it a URL?
* not officially, but there is a [terrible hack](https://github.com/9001/copyparty/blob/hovudstraum/bin/mtag/wget.py) which makes it possible
@@ -256,7 +275,10 @@ some improvement ideas
# accounts and volumes
per-folder, per-user permissions
per-folder, per-user permissions - if your setup is getting complex, consider making a [config file](./docs/example.conf) instead of using arguments
* much easier to manage, and you can modify the config at runtime with `systemctl reload copyparty` or more conveniently using the `[reload cfg]` button in the control-panel (if logged in as admin)
configuring accounts/volumes with arguments:
* `-a usr:pwd` adds account `usr` with password `pwd`
* `-v .::r` adds current-folder `.` as the webroot, `r`eadable by anyone
* the syntax is `-v src:dst:perm:perm:...` so local-path, url-path, and one or more permissions to set
@@ -314,6 +336,7 @@ the browser has the following hotkeys (always qwerty)
* `V` toggle folders / textfiles in the navpane
* `G` toggle list / [grid view](#thumbnails)
* `T` toggle thumbnails / icons
* `ESC` close various things
* `ctrl-X` cut selected files/folders
* `ctrl-V` paste
* `F2` [rename](#batch-rename) selected file/folder
@@ -365,9 +388,13 @@ switching between breadcrumbs or navpane
click the `🌲` or pressing the `B` hotkey to toggle between breadcrumbs path (default), or a navpane (tree-browser sidebar thing)
* `[-]` and `[+]` (or hotkeys `A`/`D`) adjust the size
* `[v]` jumps to the currently open folder
* `[+]` and `[-]` (or hotkeys `A`/`D`) adjust the size
* `[🎯]` jumps to the currently open folder
* `[📃]` toggles between showing folders and textfiles
* `[📌]` shows the name of all parent folders in a docked panel
* `[a]` toggles automatic widening as you go deeper
* `[↵]` toggles wordwrap
* `[👀]` show full name on hover (if wordwrap is off)
## thumbnails
@@ -376,13 +403,16 @@ press `g` to toggle grid-view instead of the file listing, and `t` toggles icon
![copyparty-thumbs-fs8](https://user-images.githubusercontent.com/241032/129636211-abd20fa2-a953-4366-9423-1c88ebb96ba9.png)
it does static images with Pillow and uses FFmpeg for video files, so you may want to `--no-thumb` or maybe just `--no-vthumb` depending on how dangerous your users are
it does static images with Pillow / pyvips / FFmpeg, and uses FFmpeg for video files, so you may want to `--no-thumb` or maybe just `--no-vthumb` depending on how dangerous your users are
* pyvips is 3x faster than Pillow, Pillow is 3x faster than FFmpeg
* disable thumbnails for specific volumes with volflag `dthumb` for all, or `dvthumb` / `dathumb` / `dithumb` for video/audio/images only
audio files are covnerted into spectrograms using FFmpeg unless you `--no-athumb` (and some FFmpeg builds may need `--th-ff-swr`)
images with the following names (see `--th-covers`) become the thumbnail of the folder they're in: `folder.png`, `folder.jpg`, `cover.png`, `cover.jpg`
in the grid/thumbnail view, if the audio player panel is open, songs will start playing when clicked
* indicated by the audio files having the ▶ icon instead of 💾
## zip downloads
@@ -433,7 +463,7 @@ see [up2k](#up2k) for details on how it works
![copyparty-upload-fs8](https://user-images.githubusercontent.com/241032/129635371-48fc54ca-fa91-48e3-9b1d-ba413e4b68cb.png)
**protip:** you can avoid scaring away users with [docs/minimal-up2k.html](docs/minimal-up2k.html) which makes it look [much simpler](https://user-images.githubusercontent.com/241032/118311195-dd6ca380-b4ef-11eb-86f3-75a3ff2e1332.png)
**protip:** you can avoid scaring away users with [contrib/plugins/minimal-up2k.html](contrib/plugins/minimal-up2k.html) which makes it look [much simpler](https://user-images.githubusercontent.com/241032/118311195-dd6ca380-b4ef-11eb-86f3-75a3ff2e1332.png)
**protip:** if you enable `favicon` in the `[⚙️] settings` tab (by typing something into the textbox), the icon in the browser tab will indicate upload progress
@@ -473,8 +503,6 @@ the files will be hashed on the client-side, and each hash is sent to the server
files go into `[ok]` if they exist (and you get a link to where it is), otherwise they land in `[ng]`
* the main reason filesearch is combined with the uploader is cause the code was too spaghetti to separate it out somewhere else, this is no longer the case but now i've warmed up to the idea too much
adding the same file multiple times is blocked, so if you first search for a file and then decide to upload it, you have to click the `[cleanup]` button to discard `[done]` files (or just refresh the page)
### unpost
@@ -562,6 +590,8 @@ and there are *two* editors
* you can link a particular timestamp in an audio file by adding it to the URL, such as `&20` / `&20s` / `&1m20` / `&t=1:20` after the `.../#af-c8960dab`
* enabling the audio equalizer can help make gapless albums fully gapless in some browsers (chrome), so consider leaving it on with all the values at zero
* get a plaintext file listing by adding `?ls=t` to a URL, or a compact colored one with `?ls=v` (for unix terminals)
* if you are using media hotkeys to switch songs and are getting tired of seeing the OSD popup which Windows doesn't let you disable, consider https://ocv.me/dev/?media-osd-bgone.ps1
@@ -594,6 +624,20 @@ add the argument `-e2ts` to also scan/index tags from music files, which brings
using arguments or config files, or a mix of both:
* config files (`-c some.conf`) can set additional commandline arguments; see [./docs/example.conf](docs/example.conf)
* `kill -s USR1` (same as `systemctl reload copyparty`) to reload accounts and volumes from config files without restarting
* or click the `[reload cfg]` button in the control-panel when logged in as admin
## ftp-server
an FTP server can be started using `--ftp 3921`, and/or `--ftps` for explicit TLS (ftpes)
* based on [pyftpdlib](https://github.com/giampaolo/pyftpdlib)
* needs a dedicated port (cannot share with the HTTP/HTTPS API)
* uploads are not resumable -- delete and restart if necessary
* runs in active mode by default, you probably want `--ftp-pr 12000-13000`
* if you enable both `ftp` and `ftps`, the port-range will be divided in half
* some older software (filezilla on debian-stable) cannot passive-mode with TLS
## file indexing
@@ -608,10 +652,12 @@ through arguments:
* `-e2ts` also scans for tags in all files that don't have tags yet
* `-e2tsr` also deletes all existing tags, doing a full reindex
the same arguments can be set as volume flags, in addition to `d2d` and `d2t` for disabling:
the same arguments can be set as volume flags, in addition to `d2d`, `d2ds`, `d2t`, `d2ts` for disabling:
* `-v ~/music::r:c,e2dsa,e2tsr` does a full reindex of everything on startup
* `-v ~/music::r:c,d2d` disables **all** indexing, even if any `-e2*` are on
* `-v ~/music::r:c,d2t` disables all `-e2t*` (tags), does not affect `-e2d*`
* `-v ~/music::r:c,d2ds` disables on-boot scans; only index new uploads
* `-v ~/music::r:c,d2ts` same except only affecting tags
note:
* the parser can finally handle `c,e2dsa,e2tsr` so you no longer have to `c,e2dsa:c,e2tsr`
@@ -632,7 +678,7 @@ if you set `--no-hash [...]` globally, you can enable hashing for specific volum
set upload rules using volume flags, some examples:
* `:c,sz=1k-3m` sets allowed filesize between 1 KiB and 3 MiB inclusive (suffixes: b, k, m, g)
* `:c,sz=1k-3m` sets allowed filesize between 1 KiB and 3 MiB inclusive (suffixes: `b`, `k`, `m`, `g`)
* `:c,nosub` disallow uploading into subdirectories; goes well with `rotn` and `rotf`:
* `:c,rotn=1000,2` moves uploads into subfolders, up to 1000 files in each folder before making a new one, two levels deep (must be at least 1)
* `:c,rotf=%Y/%m/%d/%H` enforces files to be uploaded into a structure of subfolders according to that date format
@@ -666,6 +712,12 @@ things to note,
* the files will be indexed after compression, so dupe-detection and file-search will not work as expected
some examples,
* `-v inc:inc:w:c,pk=xz,0`
folder named inc, shared at inc, write-only for everyone, forces xz compression at level 0
* `-v inc:inc:w:c,pk`
same write-only inc, but forces gz compression (default) instead of xz
* `-v inc:inc:w:c,gz`
allows (but does not force) gz compression if client uploads to `/inc?pk` or `/inc?gz` or `/inc?gz=4`
## database location
@@ -710,7 +762,7 @@ see the beautiful mess of a dictionary in [mtag.py](https://github.com/9001/copy
## file parser plugins
provide custom parsers to index additional tags
provide custom parsers to index additional tags, also see [./bin/mtag/README.md](./bin/mtag/README.md)
copyparty can invoke external programs to collect additional metadata for files using `mtp` (either as argument or volume flag), there is a default timeout of 30sec
@@ -743,6 +795,17 @@ and it will occupy the parsing threads, so fork anything expensive, or if you wa
if this becomes popular maybe there should be a less janky way to do it actually
## hiding from google
tell search engines you dont wanna be indexed, either using the good old [robots.txt](https://www.robotstxt.org/robotstxt.html) or through copyparty settings:
* `--no-robots` adds HTTP (`X-Robots-Tag`) and HTML (`<meta>`) headers with `noindex, nofollow` globally
* volume-flag `[...]:c,norobots` does the same thing for that single volume
* volume-flag `[...]:c,robots` ALLOWS search-engine crawling for that volume, even if `--no-robots` is set globally
also, `--force-js` disables the plain HTML folder listing, making things harder to parse for search engines
## complete examples
* read-only music server with bpm and key scanning
@@ -781,7 +844,7 @@ TLDR: yes
* internet explorer 6 to 8 behave the same
* firefox 52 and chrome 49 are the final winxp versions
* `*1` yes, but extremely slow (ie10: `1 MiB/s`, ie11: `270 KiB/s`)
* `*3` using a wasm decoder which consumes a bit more power
* `*3` iOS 11 and newer, opus only, and requires FFmpeg on the server
quick summary of more eccentric web-browsers trying to view a directory index:
@@ -801,8 +864,8 @@ quick summary of more eccentric web-browsers trying to view a directory index:
interact with copyparty using non-browser clients
* javascript: dump some state into a file (two separate examples)
* `await fetch('https://127.0.0.1:3923/', {method:"PUT", body: JSON.stringify(foo)});`
* `var xhr = new XMLHttpRequest(); xhr.open('POST', 'https://127.0.0.1:3923/msgs?raw'); xhr.send('foo');`
* `await fetch('//127.0.0.1:3923/', {method:"PUT", body: JSON.stringify(foo)});`
* `var xhr = new XMLHttpRequest(); xhr.open('POST', '//127.0.0.1:3923/msgs?raw'); xhr.send('foo');`
* curl/wget: upload some files (post=file, chunk=stdin)
* `post(){ curl -b cppwd=wark -F act=bput -F f=@"$1" http://127.0.0.1:3923/;}`
@@ -831,7 +894,7 @@ copyparty returns a truncated sha512sum of your PUT/POST as base64; you can gene
b512(){ printf "$((sha512sum||shasum -a512)|sed -E 's/ .*//;s/(..)/\\x\1/g')"|base64|tr '+/' '-_'|head -c44;}
b512 <movie.mkv
you can provide passwords using cookie 'cppwd=hunter2', as a url query `?pw=hunter2`, or with basic-authentication (either as the username or password)
you can provide passwords using cookie `cppwd=hunter2`, as a url query `?pw=hunter2`, or with basic-authentication (either as the username or password)
# up2k
@@ -970,6 +1033,7 @@ authenticate using header `Cookie: cppwd=foo` or url param `&pw=foo`
| GET | `?txt=iso-8859-1` | ...with specific charset |
| GET | `?th` | get image/video at URL as thumbnail |
| GET | `?th=opus` | convert audio file to 128kbps opus |
| GET | `?th=caf` | ...in the iOS-proprietary container |
| method | body | result |
|--|--|--|
@@ -1025,15 +1089,22 @@ mandatory deps:
install these to enable bonus features
enable ftp-server:
* for just plaintext FTP, `pyftpdlib` (is built into the SFX)
* with TLS encryption, `pyftpdlib pyopenssl`
enable music tags:
* either `mutagen` (fast, pure-python, skips a few tags, makes copyparty GPL? idk)
* or `ffprobe` (20x slower, more accurate, possibly dangerous depending on your distro and users)
enable [thumbnails](#thumbnails) of...
* **images:** `Pillow` (requires py2.7 or py3.5+)
* **images:** `Pillow` and/or `pyvips` and/or `ffmpeg` (requires py2.7 or py3.5+)
* **videos/audio:** `ffmpeg` and `ffprobe` somewhere in `$PATH`
* **HEIF pictures:** `pyheif-pillow-opener` (requires Linux or a C compiler)
* **AVIF pictures:** `pillow-avif-plugin`
* **HEIF pictures:** `pyvips` or `ffmpeg` or `pyheif-pillow-opener` (requires Linux or a C compiler)
* **AVIF pictures:** `pyvips` or `ffmpeg` or `pillow-avif-plugin`
* **JPEG XL pictures:** `pyvips` or `ffmpeg`
`pyvips` gives higher quality thumbnails than `Pillow` and is 320% faster, using 270% more ram: `sudo apt install libvips42 && python3 -m pip install --user -U pyvips`
## install recommended deps
@@ -1051,13 +1122,7 @@ these are standalone programs and will never be imported / evaluated by copypart
# sfx
there are two self-contained "binaries":
* [copyparty-sfx.py](https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.py) -- pure python, works everywhere, **recommended**
* [copyparty-sfx.sh](https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.sh) -- smaller, but only for linux and macos, kinda deprecated
launch either of them (**use sfx.py on systemd**) and it'll unpack and run copyparty, assuming you have python installed of course
pls note that `copyparty-sfx.sh` will fail if you rename `copyparty-sfx.py` to `copyparty.py` and keep it in the same folder because `sys.path` is funky
the self-contained "binary" [copyparty-sfx.py](https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.py) will unpack itself and run copyparty, assuming you have python installed of course
## sfx repack
@@ -1065,13 +1130,11 @@ pls note that `copyparty-sfx.sh` will fail if you rename `copyparty-sfx.py` to `
reduce the size of an sfx by removing features
if you don't need all the features, you can repack the sfx and save a bunch of space; all you need is an sfx and a copy of this repo (nothing else to download or build, except if you're on windows then you need msys2 or WSL)
* `584k` size of original sfx.py as of v1.1.0
* `392k` after `./scripts/make-sfx.sh re no-ogv`
* `310k` after `./scripts/make-sfx.sh re no-ogv no-cm`
* `269k` after `./scripts/make-sfx.sh re no-ogv no-cm no-hl`
* `393k` size of original sfx.py as of v1.1.3
* `310k` after `./scripts/make-sfx.sh re no-cm`
* `269k` after `./scripts/make-sfx.sh re no-cm no-hl`
the features you can opt to drop are
* `ogv`.js, the opus/vorbis decoder which is needed by apple devices to play foss audio files, saves ~192k
* `cm`/easymde, the "fancy" markdown editor, saves ~82k
* `hl`, prism, the syntax hilighter, saves ~41k
* `fnt`, source-code-pro, the monospace font, saves ~9k
@@ -1079,7 +1142,7 @@ the features you can opt to drop are
for the `re`pack to work, first run one of the sfx'es once to unpack it
**note:** you can also just download and run [scripts/copyparty-repack.sh](scripts/copyparty-repack.sh) -- this will grab the latest copyparty release from github and do a `no-ogv no-cm` repack; works on linux/macos (and windows with msys2 or WSL)
**note:** you can also just download and run [scripts/copyparty-repack.sh](scripts/copyparty-repack.sh) -- this will grab the latest copyparty release from github and do a few repacks; works on linux/macos (and windows with msys2 or WSL)
# install on android
@@ -1093,6 +1156,16 @@ echo $?
after the initial setup, you can launch copyparty at any time by running `copyparty` anywhere in Termux
# reporting bugs
ideas for context to include in bug reports
if something broke during an upload (replacing FILENAME with a part of the filename that broke):
```
journalctl -aS '48 hour ago' -u copyparty | grep -C10 FILENAME | tee bug.log
```
# building
## dev env setup
@@ -1124,8 +1197,8 @@ mv /tmp/pe-copyparty/copyparty/web/deps/ copyparty/web/deps/
then build the sfx using any of the following examples:
```sh
./scripts/make-sfx.sh # both python and sh editions
./scripts/make-sfx.sh no-sh gz # just python with gzip
./scripts/make-sfx.sh # regular edition
./scripts/make-sfx.sh gz no-cm # gzip-compressed + no fancy markdown editor
```

View File

@@ -2,9 +2,14 @@
* command-line up2k client [(webm)](https://ocv.me/stuff/u2cli.webm)
* file uploads, file-search, autoresume of aborted/broken uploads
* faster than browsers
* early beta, if something breaks just restart it
* if something breaks just restart it
# [`partyjournal.py`](partyjournal.py)
produces a chronological list of all uploads by collecting info from up2k databases and the filesystem
* outputs a standalone html file
* optional mapping from IP-addresses to nicknames
# [`copyparty-fuse.py`](copyparty-fuse.py)
* mount a copyparty server as a local filesystem (read-only)

View File

@@ -11,14 +11,18 @@ import re
import os
import sys
import time
import json
import stat
import errno
import struct
import codecs
import platform
import threading
import http.client # py2: httplib
import urllib.parse
from datetime import datetime
from urllib.parse import quote_from_bytes as quote
from urllib.parse import unquote_to_bytes as unquote
try:
import fuse
@@ -38,7 +42,7 @@ except:
mount a copyparty server (local or remote) as a filesystem
usage:
python ./copyparty-fuseb.py -f -o allow_other,auto_unmount,nonempty,url=http://192.168.1.69:3923 /mnt/nas
python ./copyparty-fuseb.py -f -o allow_other,auto_unmount,nonempty,pw=wark,url=http://192.168.1.69:3923 /mnt/nas
dependencies:
sudo apk add fuse-dev python3-dev
@@ -50,6 +54,10 @@ fork of copyparty-fuse.py based on fuse-python which
"""
WINDOWS = sys.platform == "win32"
MACOS = platform.system() == "Darwin"
def threadless_log(msg):
print(msg + "\n", end="")
@@ -93,6 +101,41 @@ def html_dec(txt):
)
def register_wtf8():
def wtf8_enc(text):
return str(text).encode("utf-8", "surrogateescape"), len(text)
def wtf8_dec(binary):
return bytes(binary).decode("utf-8", "surrogateescape"), len(binary)
def wtf8_search(encoding_name):
return codecs.CodecInfo(wtf8_enc, wtf8_dec, name="wtf-8")
codecs.register(wtf8_search)
bad_good = {}
good_bad = {}
def enwin(txt):
return "".join([bad_good.get(x, x) for x in txt])
for bad, good in bad_good.items():
txt = txt.replace(bad, good)
return txt
def dewin(txt):
return "".join([good_bad.get(x, x) for x in txt])
for bad, good in bad_good.items():
txt = txt.replace(good, bad)
return txt
class CacheNode(object):
def __init__(self, tag, data):
self.tag = tag
@@ -115,8 +158,9 @@ class Stat(fuse.Stat):
class Gateway(object):
def __init__(self, base_url):
def __init__(self, base_url, pw):
self.base_url = base_url
self.pw = pw
ui = urllib.parse.urlparse(base_url)
self.web_root = ui.path.strip("/")
@@ -135,8 +179,7 @@ class Gateway(object):
self.conns = {}
def quotep(self, path):
# TODO: mojibake support
path = path.encode("utf-8", "ignore")
path = path.encode("wtf-8")
return quote(path, safe="/")
def getconn(self, tid=None):
@@ -159,20 +202,29 @@ class Gateway(object):
except:
pass
def sendreq(self, *args, **kwargs):
def sendreq(self, *args, **ka):
tid = get_tid()
if self.pw:
ck = "cppwd=" + self.pw
try:
ka["headers"]["Cookie"] = ck
except:
ka["headers"] = {"Cookie": ck}
try:
c = self.getconn(tid)
c.request(*list(args), **kwargs)
c.request(*list(args), **ka)
return c.getresponse()
except:
self.closeconn(tid)
c = self.getconn(tid)
c.request(*list(args), **kwargs)
c.request(*list(args), **ka)
return c.getresponse()
def listdir(self, path):
web_path = self.quotep("/" + "/".join([self.web_root, path])) + "?dots"
if bad_good:
path = dewin(path)
web_path = self.quotep("/" + "/".join([self.web_root, path])) + "?dots&ls"
r = self.sendreq("GET", web_path)
if r.status != 200:
self.closeconn()
@@ -182,9 +234,12 @@ class Gateway(object):
)
)
return self.parse_html(r)
return self.parse_jls(r)
def download_file_range(self, path, ofs1, ofs2):
if bad_good:
path = dewin(path)
web_path = self.quotep("/" + "/".join([self.web_root, path])) + "?raw"
hdr_range = "bytes={}-{}".format(ofs1, ofs2 - 1)
log("downloading {}".format(hdr_range))
@@ -200,40 +255,27 @@ class Gateway(object):
return r.read()
def parse_html(self, datasrc):
ret = []
remainder = b""
ptn = re.compile(
r"^<tr><td>(-|DIR)</td><td><a [^>]+>([^<]+)</a></td><td>([^<]+)</td><td>([^<]+)</td></tr>$"
)
def parse_jls(self, datasrc):
rsp = b""
while True:
buf = remainder + datasrc.read(4096)
# print('[{}]'.format(buf.decode('utf-8')))
buf = datasrc.read(1024 * 32)
if not buf:
break
remainder = b""
endpos = buf.rfind(b"\n")
if endpos >= 0:
remainder = buf[endpos + 1 :]
buf = buf[:endpos]
rsp += buf
lines = buf.decode("utf-8").split("\n")
for line in lines:
m = ptn.match(line)
if not m:
# print(line)
continue
rsp = json.loads(rsp.decode("utf-8"))
ret = []
for statfun, nodes in [
[self.stat_dir, rsp["dirs"]],
[self.stat_file, rsp["files"]],
]:
for n in nodes:
fname = unquote(n["href"].split("?")[0]).rstrip(b"/").decode("wtf-8")
if bad_good:
fname = enwin(fname)
ftype, fname, fsize, fdate = m.groups()
fname = html_dec(fname)
ts = datetime.strptime(fdate, "%Y-%m-%d %H:%M:%S").timestamp()
sz = int(fsize)
if ftype == "-":
ret.append([fname, self.stat_file(ts, sz), 0])
else:
ret.append([fname, self.stat_dir(ts, sz), 0])
ret.append([fname, statfun(n["ts"], n["sz"]), 0])
return ret
@@ -262,6 +304,7 @@ class CPPF(Fuse):
Fuse.__init__(self, *args, **kwargs)
self.url = None
self.pw = None
self.dircache = []
self.dircache_mtx = threading.Lock()
@@ -271,7 +314,7 @@ class CPPF(Fuse):
def init2(self):
# TODO figure out how python-fuse wanted this to go
self.gw = Gateway(self.url) # .decode('utf-8'))
self.gw = Gateway(self.url, self.pw) # .decode('utf-8'))
info("up")
def clean_dircache(self):
@@ -536,6 +579,8 @@ class CPPF(Fuse):
def getattr(self, path):
log("getattr [{}]".format(path))
if WINDOWS:
path = enwin(path) # windows occasionally decodes f0xx to xx
path = path.strip("/")
try:
@@ -568,9 +613,25 @@ class CPPF(Fuse):
def main():
time.strptime("19970815", "%Y%m%d") # python#7980
register_wtf8()
if WINDOWS:
os.system("rem")
for ch in '<>:"\\|?*':
# microsoft maps illegal characters to f0xx
# (e000 to f8ff is basic-plane private-use)
bad_good[ch] = chr(ord(ch) + 0xF000)
for n in range(0, 0x100):
# map surrogateescape to another private-use area
bad_good[chr(n + 0xDC00)] = chr(n + 0xF100)
for k, v in bad_good.items():
good_bad[v] = k
server = CPPF()
server.parser.add_option(mountopt="url", metavar="BASE_URL", default=None)
server.parser.add_option(mountopt="pw", metavar="PASSWORD", default=None)
server.parse(values=server, errex=1)
if not server.url or not str(server.url).startswith("http"):
print("\nerror:")
@@ -578,7 +639,7 @@ def main():
print(" need argument: mount-path")
print("example:")
print(
" ./copyparty-fuseb.py -f -o allow_other,auto_unmount,nonempty,url=http://192.168.1.69:3923 /mnt/nas"
" ./copyparty-fuseb.py -f -o allow_other,auto_unmount,nonempty,pw=wark,url=http://192.168.1.69:3923 /mnt/nas"
)
sys.exit(1)

View File

@@ -6,9 +6,13 @@ some of these rely on libraries which are not MIT-compatible
* [audio-bpm.py](./audio-bpm.py) detects the BPM of music using the BeatRoot Vamp Plugin; imports GPL2
* [audio-key.py](./audio-key.py) detects the melodic key of music using the Mixxx fork of keyfinder; imports GPL3
* [media-hash.py](./media-hash.py) generates checksums for audio and video streams; uses FFmpeg (LGPL or GPL)
these do not have any problematic dependencies:
these invoke standalone programs which are GPL or similar, so is legally fine for most purposes:
* [media-hash.py](./media-hash.py) generates checksums for audio and video streams; uses FFmpeg (LGPL or GPL)
* [image-noexif.py](./image-noexif.py) removes exif tags from images; uses exiftool (GPLv1 or artistic-license)
these do not have any problematic dependencies at all:
* [cksum.py](./cksum.py) computes various checksums
* [exe.py](./exe.py) grabs metadata from .exe and .dll files (example for retrieving multiple tags with one parser)

View File

@@ -19,18 +19,18 @@ dep: ffmpeg
def det(tf):
# fmt: off
sp.check_call([
"ffmpeg",
"-nostdin",
"-hide_banner",
"-v", "fatal",
"-ss", "13",
"-y", "-i", fsenc(sys.argv[1]),
"-map", "0:a:0",
"-ac", "1",
"-ar", "22050",
"-t", "300",
"-f", "f32le",
tf
b"ffmpeg",
b"-nostdin",
b"-hide_banner",
b"-v", b"fatal",
b"-ss", b"13",
b"-y", b"-i", fsenc(sys.argv[1]),
b"-map", b"0:a:0",
b"-ac", b"1",
b"-ar", b"22050",
b"-t", b"300",
b"-f", b"f32le",
fsenc(tf)
])
# fmt: on

View File

@@ -23,15 +23,15 @@ dep: ffmpeg
def det(tf):
# fmt: off
sp.check_call([
"ffmpeg",
"-nostdin",
"-hide_banner",
"-v", "fatal",
"-y", "-i", fsenc(sys.argv[1]),
"-map", "0:a:0",
"-t", "300",
"-sample_fmt", "s16",
tf
b"ffmpeg",
b"-nostdin",
b"-hide_banner",
b"-v", b"fatal",
b"-y", b"-i", fsenc(sys.argv[1]),
b"-map", b"0:a:0",
b"-t", b"300",
b"-sample_fmt", b"s16",
fsenc(tf)
])
# fmt: on

93
bin/mtag/image-noexif.py Normal file
View File

@@ -0,0 +1,93 @@
#!/usr/bin/env python3
"""
remove exif tags from uploaded images
dependencies:
exiftool
about:
creates a "noexif" subfolder and puts exif-stripped copies of each image there,
the reason for the subfolder is to avoid issues with the up2k.db / deduplication:
if the original image is modified in-place, then copyparty will keep the original
hash in up2k.db for a while (until the next volume rescan), so if the image is
reuploaded after a rescan then the upload will be renamed and kept as a dupe
alternatively you could switch the logic around, making a copy of the original
image into a subfolder named "exif" and modify the original in-place, but then
up2k.db will be out of sync until the next rescan, so any additional uploads
of the same image will get symlinked (deduplicated) to the modified copy
instead of the original in "exif"
or maybe delete the original image after processing, that would kinda work too
example copyparty config to use this:
-v/mnt/nas/pics:pics:rwmd,ed:c,e2ts,mte=+noexif:c,mtp=noexif=ejpg,ejpeg,ad,bin/mtag/image-noexif.py
explained:
for realpath /mnt/nas/pics (served at /pics) with read-write-modify-delete for ed,
enable file analysis on upload (e2ts),
append "noexif" to the list of known tags (mtp),
and use mtp plugin "bin/mtag/image-noexif.py" to provide that tag,
do this on all uploads with the file extension "jpg" or "jpeg",
ad = parse file regardless if FFmpeg thinks it is audio or not
PS: this requires e2ts to be functional,
meaning you need to do at least one of these:
* apt install ffmpeg
* pip3 install mutagen
and your python must have sqlite3 support compiled in
"""
import os
import sys
import time
import filecmp
import subprocess as sp
try:
from copyparty.util import fsenc
except:
def fsenc(p):
return p.encode("utf-8")
def main():
cwd, fn = os.path.split(sys.argv[1])
if os.path.basename(cwd) == "noexif":
return
os.chdir(cwd)
f1 = fsenc(fn)
f2 = os.path.join(b"noexif", f1)
cmd = [
b"exiftool",
b"-exif:all=",
b"-iptc:all=",
b"-xmp:all=",
b"-P",
b"-o",
b"noexif/",
b"--",
f1,
]
sp.check_output(cmd)
if not os.path.exists(f2):
print("failed")
return
if filecmp.cmp(f1, f2, shallow=False):
print("clean")
else:
print("exif")
# lastmod = os.path.getmtime(f1)
# times = (int(time.time()), int(lastmod))
# os.utime(f2, times)
if __name__ == "__main__":
main()

View File

@@ -4,8 +4,8 @@ set -e
# install dependencies for audio-*.py
#
# linux/alpine: requires {python3,ffmpeg,fftw}-dev py3-{wheel,pip} py3-numpy{,-dev} vamp-sdk-dev patchelf cmake
# linux/debian: requires libav{codec,device,filter,format,resample,util}-dev {libfftw3,python3}-dev python3-{numpy,pip} vamp-{plugin-sdk,examples} patchelf cmake
# linux/alpine: requires gcc g++ make cmake patchelf {python3,ffmpeg,fftw,libsndfile}-dev py3-{wheel,pip} py3-numpy{,-dev}
# linux/debian: requires libav{codec,device,filter,format,resample,util}-dev {libfftw3,python3,libsndfile1}-dev python3-{numpy,pip} vamp-{plugin-sdk,examples} patchelf cmake
# win64: requires msys2-mingw64 environment
# macos: requires macports
#
@@ -101,8 +101,11 @@ export -f dl_files
github_tarball() {
rm -rf g
mkdir g
cd g
dl_text "$1" |
tee json |
tee ../json |
(
# prefer jq if available
jq -r '.tarball_url' ||
@@ -111,8 +114,11 @@ github_tarball() {
awk -F\" '/"tarball_url": "/ {print$4}'
) |
tee /dev/stderr |
head -n 1 |
tr -d '\r' | tr '\n' '\0' |
xargs -0 bash -c 'dl_files "$@"' _
mv * ../tgz
cd ..
}
@@ -127,6 +133,7 @@ gitlab_tarball() {
tr \" '\n' | grep -E '\.tar\.gz$' | head -n 1
) |
tee /dev/stderr |
head -n 1 |
tr -d '\r' | tr '\n' '\0' |
tee links |
xargs -0 bash -c 'dl_files "$@"' _
@@ -138,10 +145,17 @@ install_keyfinder() {
# use msys2 in mingw-w64 mode
# pacman -S --needed mingw-w64-x86_64-{ffmpeg,python}
github_tarball https://api.github.com/repos/mixxxdj/libkeyfinder/releases/latest
[ -e $HOME/pe/keyfinder ] && {
echo found a keyfinder build in ~/pe, skipping
return
}
tar -xf mixxxdj-libkeyfinder-*
rm -- *.tar.gz
cd "$td"
github_tarball https://api.github.com/repos/mixxxdj/libkeyfinder/releases/latest
ls -al
tar -xf tgz
rm tgz
cd mixxxdj-libkeyfinder*
h="$HOME"
@@ -208,6 +222,22 @@ install_vamp() {
$pybin -m pip install --user vamp
cd "$td"
echo '#include <vamp-sdk/Plugin.h>' | gcc -x c -c -o /dev/null - || [ -e ~/pe/vamp-sdk ] || {
printf '\033[33mcould not find the vamp-sdk, building from source\033[0m\n'
(dl_files yolo https://code.soundsoftware.ac.uk/attachments/download/2588/vamp-plugin-sdk-2.9.0.tar.gz)
sha512sum -c <(
echo "7ef7f837d19a08048b059e0da408373a7964ced452b290fae40b85d6d70ca9000bcfb3302cd0b4dc76cf2a848528456f78c1ce1ee0c402228d812bd347b6983b -"
) <vamp-plugin-sdk-2.9.0.tar.gz
tar -xf vamp-plugin-sdk-2.9.0.tar.gz
rm -- *.tar.gz
ls -al
cd vamp-plugin-sdk-*
./configure --prefix=$HOME/pe/vamp-sdk
make -j1 install
}
cd "$td"
have_beatroot || {
printf '\033[33mcould not find the vamp beatroot plugin, building from source\033[0m\n'
(dl_files yolo https://code.soundsoftware.ac.uk/attachments/download/885/beatroot-vamp-v1.0.tar.gz)
@@ -215,8 +245,11 @@ install_vamp() {
echo "1f444d1d58ccf565c0adfe99f1a1aa62789e19f5071e46857e2adfbc9d453037bc1c4dcb039b02c16240e9b97f444aaff3afb625c86aa2470233e711f55b6874 -"
) <beatroot-vamp-v1.0.tar.gz
tar -xf beatroot-vamp-v1.0.tar.gz
rm -- *.tar.gz
cd beatroot-vamp-v1.0
make -f Makefile.linux -j4
[ -e ~/pe/vamp-sdk ] &&
sed -ri 's`^(CFLAGS :=.*)`\1 -I'$HOME'/pe/vamp-sdk/include`' Makefile.linux
make -f Makefile.linux -j4 LDFLAGS=-L$HOME/pe/vamp-sdk/lib
# /home/ed/vamp /home/ed/.vamp /usr/local/lib/vamp
mkdir ~/vamp
cp -pv beatroot-vamp.* ~/vamp/
@@ -230,6 +263,7 @@ install_vamp() {
# not in use because it kinda segfaults, also no windows support
install_soundtouch() {
cd "$td"
gitlab_tarball https://gitlab.com/api/v4/projects/soundtouch%2Fsoundtouch/releases
tar -xvf soundtouch-*

View File

@@ -13,7 +13,7 @@ try:
except:
def fsenc(p):
return p
return p.encode("utf-8")
"""
@@ -24,13 +24,13 @@ dep: ffmpeg
def det():
# fmt: off
cmd = [
"ffmpeg",
"-nostdin",
"-hide_banner",
"-v", "fatal",
"-i", fsenc(sys.argv[1]),
"-f", "framemd5",
"-"
b"ffmpeg",
b"-nostdin",
b"-hide_banner",
b"-v", b"fatal",
b"-i", fsenc(sys.argv[1]),
b"-f", b"framemd5",
b"-"
]
# fmt: on

View File

@@ -0,0 +1,21 @@
// ==UserScript==
// @name twitter-unmute
// @namespace http://ocv.me/
// @version 0.1
// @description memes
// @author ed <irc.rizon.net>
// @match https://twitter.com/*
// @icon https://www.google.com/s2/favicons?domain=twitter.com
// @grant GM_addStyle
// ==/UserScript==
function grunnur() {
setInterval(function () {
//document.querySelector('div[aria-label="Unmute"]').click();
document.querySelector('video').muted = false;
}, 200);
}
var scr = document.createElement('script');
scr.textContent = '(' + grunnur.toString() + ')();';
(document.head || document.getElementsByTagName('head')[0]).appendChild(scr);

139
bin/mtag/very-bad-idea.py Executable file
View File

@@ -0,0 +1,139 @@
#!/usr/bin/env python3
"""
use copyparty as a chromecast replacement:
* post a URL and it will open in the default browser
* upload a file and it will open in the default application
* the `key` command simulates keyboard input
* the `x` command executes other xdotool commands
* the `c` command executes arbitrary unix commands
the android app makes it a breeze to post pics and links:
https://github.com/9001/party-up/releases
(iOS devices have to rely on the web-UI)
goes without saying, but this is HELLA DANGEROUS,
GIVES RCE TO ANYONE WHO HAVE UPLOAD PERMISSIONS
example copyparty config to use this:
--urlform save,get -v.::w:c,e2d,e2t,mte=+a1:c,mtp=a1=ad,bin/mtag/very-bad-idea.py
recommended deps:
apt install xdotool libnotify-bin
https://github.com/9001/copyparty/blob/hovudstraum/contrib/plugins/meadup.js
and you probably want `twitter-unmute.user.js` from the res folder
-----------------------------------------------------------------------
-- startup script:
-----------------------------------------------------------------------
#!/bin/bash
set -e
# create qr code
ip=$(ip r | awk '/^default/{print$(NF-2)}'); echo http://$ip:3923/ | qrencode -o - -s 4 >/dev/shm/cpp-qr.png
/usr/bin/feh -x /dev/shm/cpp-qr.png &
# reposition and make topmost (with janky raspbian support)
( sleep 0.5
xdotool search --name cpp-qr.png windowactivate --sync windowmove 1780 0
wmctrl -r :ACTIVE: -b toggle,above || true
ps aux | grep -E 'sleep[ ]7\.27' ||
while true; do
w=$(xdotool getactivewindow)
xdotool search --name cpp-qr.png windowactivate windowraise windowfocus
xdotool windowactivate $w
xdotool windowfocus $w
sleep 7.27 || break
done &
xeyes # distraction window to prevent ^w from closing the qr-code
) &
# bail if copyparty is already running
ps aux | grep -E '[3] copy[p]arty' && exit 0
# dumb chrome wrapper to allow autoplay
cat >/usr/local/bin/chromium-browser <<'EOF'
#!/bin/bash
set -e
/usr/bin/chromium-browser --autoplay-policy=no-user-gesture-required "$@"
EOF
chmod 755 /usr/local/bin/chromium-browser
# start the server (note: replace `-v.::rw:` with `-v.::r:` to disallow retrieving uploaded stuff)
cd ~/Downloads; python3 copyparty-sfx.py --urlform save,get -v.::rw:c,e2d,e2t,mte=+a1:c,mtp=a1=ad,very-bad-idea.py
"""
import os
import sys
import time
import subprocess as sp
from urllib.parse import unquote_to_bytes as unquote
def main():
fp = os.path.abspath(sys.argv[1])
with open(fp, "rb") as f:
txt = f.read(4096)
if txt.startswith(b"msg="):
open_post(txt)
else:
open_url(fp)
def open_post(txt):
txt = unquote(txt.replace(b"+", b" ")).decode("utf-8")[4:]
try:
k, v = txt.split(" ", 1)
except:
open_url(txt)
if k == "key":
sp.call(["xdotool", "key"] + v.split(" "))
elif k == "x":
sp.call(["xdotool"] + v.split(" "))
elif k == "c":
env = os.environ.copy()
while " " in v:
v1, v2 = v.split(" ", 1)
if "=" not in v1:
break
ek, ev = v1.split("=", 1)
env[ek] = ev
v = v2
sp.call(v.split(" "), env=env)
else:
open_url(txt)
def open_url(txt):
ext = txt.rsplit(".")[-1].lower()
sp.call(["notify-send", "--", txt])
if ext not in ["jpg", "jpeg", "png", "gif", "webp"]:
# sp.call(["wmctrl", "-c", ":ACTIVE:"]) # closes the active window correctly
sp.call(["killall", "vlc"])
sp.call(["killall", "mpv"])
sp.call(["killall", "feh"])
time.sleep(0.5)
for _ in range(20):
sp.call(["xdotool", "key", "ctrl+w"]) # closes the open tab correctly
# else:
# sp.call(["xdotool", "getactivewindow", "windowminimize"]) # minimizes the focused windo
# close any error messages:
sp.call(["xdotool", "search", "--name", "Error", "windowclose"])
# sp.call(["xdotool", "key", "ctrl+alt+d"]) # doesnt work at all
# sp.call(["xdotool", "keydown", "--delay", "100", "ctrl+alt+d"])
# sp.call(["xdotool", "keyup", "ctrl+alt+d"])
sp.call(["xdg-open", txt])
main()

177
bin/partyjournal.py Executable file
View File

@@ -0,0 +1,177 @@
#!/usr/bin/env python3
"""
partyjournal.py: chronological history of uploads
2021-12-31, v0.1, ed <irc.rizon.net>, MIT-Licensed
https://github.com/9001/copyparty/blob/hovudstraum/bin/partyjournal.py
produces a chronological list of all uploads,
by collecting info from up2k databases and the filesystem
specify subnet `192.168.1.*` with argument `.=192.168.1.`,
affecting all successive mappings
usage:
./partyjournal.py > partyjournal.html .=192.168.1. cart=125 steen=114 steen=131 sleepy=121 fscarlet=144 ed=101 ed=123
"""
import sys
import base64
import sqlite3
import argparse
from datetime import datetime
from urllib.parse import quote_from_bytes as quote
from urllib.parse import unquote_to_bytes as unquote
FS_ENCODING = sys.getfilesystemencoding()
class APF(argparse.ArgumentDefaultsHelpFormatter, argparse.RawDescriptionHelpFormatter):
pass
##
## snibbed from copyparty
def s3dec(v):
if not v.startswith("//"):
return v
v = base64.urlsafe_b64decode(v.encode("ascii")[2:])
return v.decode(FS_ENCODING, "replace")
def quotep(txt):
btxt = txt.encode("utf-8", "replace")
quot1 = quote(btxt, safe=b"/")
quot1 = quot1.encode("ascii")
quot2 = quot1.replace(b" ", b"+")
return quot2.decode("utf-8", "replace")
def html_escape(s, quote=False, crlf=False):
"""html.escape but also newlines"""
s = s.replace("&", "&amp;").replace("<", "&lt;").replace(">", "&gt;")
if quote:
s = s.replace('"', "&quot;").replace("'", "&#x27;")
if crlf:
s = s.replace("\r", "&#13;").replace("\n", "&#10;")
return s
## end snibs
##
def main():
ap = argparse.ArgumentParser(formatter_class=APF)
ap.add_argument("who", nargs="*")
ar = ap.parse_args()
imap = {}
subnet = ""
for v in ar.who:
if "=" not in v:
raise Exception("bad who: " + v)
k, v = v.split("=")
if k == ".":
subnet = v
continue
imap["{}{}".format(subnet, v)] = k
print(repr(imap), file=sys.stderr)
print(
"""\
<!DOCTYPE html>
<html lang="en">
<head><meta charset="utf-8"><style>
html, body {
color: #ccc;
background: #222;
font-family: sans-serif;
}
a {
color: #fc5;
}
td, th {
padding: .2em .5em;
border: 1px solid #999;
border-width: 0 1px 1px 0;
white-space: nowrap;
}
td:nth-child(1),
td:nth-child(2),
td:nth-child(3) {
font-family: monospace, monospace;
text-align: right;
}
tr:first-child {
position: sticky;
top: -1px;
}
th {
background: #222;
text-align: left;
}
</style></head><body><table><tr>
<th>wark</th>
<th>time</th>
<th>size</th>
<th>who</th>
<th>link</th>
</tr>"""
)
db_path = ".hist/up2k.db"
conn = sqlite3.connect(db_path)
q = r"pragma table_info(up)"
inf = conn.execute(q).fetchall()
cols = [x[1] for x in inf]
print("<!-- " + str(cols) + " -->")
# ['w', 'mt', 'sz', 'rd', 'fn', 'ip', 'at']
q = r"select * from up order by case when at > 0 then at else mt end"
for w, mt, sz, rd, fn, ip, at in conn.execute(q):
link = "/".join([s3dec(x) for x in [rd, fn] if x])
if fn.startswith("put-") and sz < 4096:
try:
with open(link, "rb") as f:
txt = f.read().decode("utf-8", "replace")
except:
continue
if txt.startswith("msg="):
txt = txt.encode("utf-8", "replace")
txt = unquote(txt.replace(b"+", b" "))
link = txt.decode("utf-8")[4:]
sz = "{:,}".format(sz)
v = [
w[:16],
datetime.utcfromtimestamp(at if at > 0 else mt).strftime(
"%Y-%m-%d %H:%M:%S"
),
sz,
imap.get(ip, ip),
]
row = "<tr>\n "
row += "\n ".join(["<td>{}</th>".format(x) for x in v])
row += '\n <td><a href="{}">{}</a></td>'.format(link, html_escape(link))
row += "\n</tr>"
print(row)
print("</table></body></html>")
if __name__ == "__main__":
main()

67
bin/prisonparty.sh Normal file → Executable file
View File

@@ -11,10 +11,16 @@ sysdirs=( /bin /lib /lib32 /lib64 /sbin /usr )
help() { cat <<'EOF'
usage:
./prisonparty.sh <ROOTDIR> <UID> <GID> [VOLDIR [VOLDIR...]] -- copyparty-sfx.py [...]"
./prisonparty.sh <ROOTDIR> <UID> <GID> [VOLDIR [VOLDIR...]] -- python3 copyparty-sfx.py [...]"
example:
./prisonparty.sh /var/lib/copyparty-jail 1000 1000 /mnt/nas/music -- copyparty-sfx.py -v /mnt/nas/music::rwmd"
./prisonparty.sh /var/lib/copyparty-jail 1000 1000 /mnt/nas/music -- python3 copyparty-sfx.py -v /mnt/nas/music::rwmd"
example for running straight from source (instead of using an sfx):
PYTHONPATH=$PWD ./prisonparty.sh /var/lib/copyparty-jail 1000 1000 /mnt/nas/music -- python3 -um copyparty -v /mnt/nas/music::rwmd"
note that if you have python modules installed as --user (such as bpm/key detectors),
you should add /home/foo/.local as a VOLDIR
EOF
exit 1
@@ -35,10 +41,20 @@ while true; do
vols+=( "$(realpath "$v")" )
done
pybin="$1"; shift
pybin="$(realpath "$pybin")"
pybin="$(command -v "$pybin")"
pyarg=
while true; do
v="$1"
[ "${v:0:1}" = - ] || break
pyarg="$pyarg $v"
shift
done
cpp="$1"; shift
cpp="$(realpath "$cpp")"
cppdir="$(dirname "$cpp")"
[ -d "$cpp" ] && cppdir="$PWD" || {
# sfx, not module
cpp="$(realpath "$cpp")"
cppdir="$(dirname "$cpp")"
}
trap - EXIT
@@ -60,11 +76,10 @@ echo
# remove any trailing slashes
jail="${jail%/}"
cppdir="${cppdir%/}"
# bind-mount system directories and volumes
printf '%s\n' "${sysdirs[@]}" "${vols[@]}" | LC_ALL=C sort |
printf '%s\n' "${sysdirs[@]}" "${vols[@]}" | sed -r 's`/$``' | LC_ALL=C sort | uniq |
while IFS= read -r v; do
[ -e "$v" ] || {
# printf '\033[1;31mfolder does not exist:\033[0m %s\n' "/$v"
@@ -72,6 +87,7 @@ while IFS= read -r v; do
}
i1=$(stat -c%D.%i "$v" 2>/dev/null || echo a)
i2=$(stat -c%D.%i "$jail$v" 2>/dev/null || echo b)
# echo "v [$v] i1 [$i1] i2 [$i2]"
[ $i1 = $i2 ] && continue
mkdir -p "$jail$v"
@@ -79,21 +95,34 @@ while IFS= read -r v; do
done
cln() {
rv=$?
# cleanup if not in use
lsof "$jail" | grep -qF "$jail" &&
echo "chroot is in use, will not cleanup" ||
{
mount | grep -F " on $jail" |
awk '{sub(/ type .*/,"");sub(/.* on /,"");print}' |
LC_ALL=C sort -r | tee /dev/stderr | tr '\n' '\0' | xargs -r0 umount
}
exit $rv
}
trap cln EXIT
# create a tmp
mkdir -p "$jail/tmp"
chmod 777 "$jail/tmp"
# run copyparty
/sbin/chroot --userspec=$uid:$gid "$jail" "$pybin" "$cpp" "$@" && rv=0 || rv=$?
# cleanup if not in use
lsof "$jail" | grep -qF "$jail" &&
echo "chroot is in use, will not cleanup" ||
{
mount | grep -qF " on $jail" |
awk '{sub(/ type .*/,"");sub(/.* on /,"");print}' |
LC_ALL=C sort -r | tee /dev/stderr | tr '\n' '\0' | xargs -r0 umount
}
exit $rv
export HOME=$(getent passwd $uid | cut -d: -f6)
export USER=$(getent passwd $uid | cut -d: -f1)
export LOGNAME="$USER"
#echo "pybin [$pybin]"
#echo "pyarg [$pyarg]"
#echo "cpp [$cpp]"
chroot --userspec=$uid:$gid "$jail" "$pybin" $pyarg "$cpp" "$@" &
p=$!
trap 'kill $p' INT TERM
wait

View File

@@ -3,7 +3,7 @@ from __future__ import print_function, unicode_literals
"""
up2k.py: upload to copyparty
2021-10-31, v0.11, ed <irc.rizon.net>, MIT-Licensed
2021-11-28, v0.13, ed <irc.rizon.net>, MIT-Licensed
https://github.com/9001/copyparty/blob/hovudstraum/bin/up2k.py
- dependencies: requests
@@ -224,29 +224,47 @@ class CTermsize(object):
ss = CTermsize()
def statdir(top):
def _scd(err, top):
"""non-recursive listing of directory contents, along with stat() info"""
if hasattr(os, "scandir"):
with os.scandir(top) as dh:
for fh in dh:
yield [os.path.join(top, fh.name), fh.stat()]
else:
for name in os.listdir(top):
abspath = os.path.join(top, name)
with os.scandir(top) as dh:
for fh in dh:
abspath = os.path.join(top, fh.name)
try:
yield [abspath, fh.stat()]
except:
err.append(abspath)
def _lsd(err, top):
"""non-recursive listing of directory contents, along with stat() info"""
for name in os.listdir(top):
abspath = os.path.join(top, name)
try:
yield [abspath, os.stat(abspath)]
except:
err.append(abspath)
def walkdir(top):
if hasattr(os, "scandir"):
statdir = _scd
else:
statdir = _lsd
def walkdir(err, top):
"""recursive statdir"""
for ap, inf in sorted(statdir(top)):
for ap, inf in sorted(statdir(err, top)):
if stat.S_ISDIR(inf.st_mode):
for x in walkdir(ap):
yield x
try:
for x in walkdir(err, ap):
yield x
except:
err.append(ap)
else:
yield ap, inf
def walkdirs(tops):
def walkdirs(err, tops):
"""recursive statdir for a list of tops, yields [top, relpath, stat]"""
sep = "{0}".format(os.sep).encode("ascii")
for top in tops:
@@ -256,7 +274,7 @@ def walkdirs(tops):
stop = os.path.dirname(top)
if os.path.isdir(top):
for ap, inf in walkdir(top):
for ap, inf in walkdir(err, top):
yield stop, ap[len(stop) :].lstrip(sep), inf
else:
d, n = top.rsplit(sep, 1)
@@ -372,7 +390,7 @@ def handshake(req_ses, url, file, pw, search):
r = req_ses.post(url, headers=headers, json=req)
break
except:
eprint("handshake failed, retry...\n")
eprint("handshake failed, retrying: {0}\n".format(file.name))
time.sleep(1)
try:
@@ -446,10 +464,21 @@ class Ctl(object):
nfiles = 0
nbytes = 0
for _, _, inf in walkdirs(ar.files):
err = []
for _, _, inf in walkdirs(err, ar.files):
nfiles += 1
nbytes += inf.st_size
if err:
eprint("\n# failed to access {0} paths:\n".format(len(err)))
for x in err:
eprint(x.decode("utf-8", "replace") + "\n")
eprint("^ failed to access those {0} paths ^\n\n".format(len(err)))
if not ar.ok:
eprint("aborting because --ok is not set\n")
return
eprint("found {0} files, {1}\n\n".format(nfiles, humansize(nbytes)))
self.nfiles = nfiles
self.nbytes = nbytes
@@ -460,7 +489,7 @@ class Ctl(object):
if ar.te:
req_ses.verify = ar.te
self.filegen = walkdirs(ar.files)
self.filegen = walkdirs([], ar.files)
if ar.safe:
self.safe()
else:
@@ -476,7 +505,7 @@ class Ctl(object):
print("{0} {1}\n hash...".format(self.nfiles - nf, upath))
get_hashlist(file, None)
burl = self.ar.url[:8] + self.ar.url[8:].split("/")[0] + "/"
burl = self.ar.url[:12] + self.ar.url[8:].split("/")[0] + "/"
while True:
print(" hs...")
hs = handshake(req_ses, self.ar.url, file, self.ar.a, search)
@@ -744,7 +773,7 @@ class Ctl(object):
try:
upload(req_ses, file, cid, self.ar.a)
except:
eprint("upload failed, retry...\n")
eprint("upload failed, retrying: {0} #{1}\n".format(file.name, cid[:8]))
pass # handshake will fix it
with self.mutex:
@@ -783,6 +812,7 @@ source file/folder selection uses rsync syntax, meaning that:
ap.add_argument("files", type=unicode, nargs="+", help="files and/or folders to process")
ap.add_argument("-a", metavar="PASSWORD", help="password")
ap.add_argument("-s", action="store_true", help="file-search (disables upload)")
ap.add_argument("--ok", action="store_true", help="continue even if some local files are inaccessible")
ap = app.add_argument_group("performance tweaks")
ap.add_argument("-j", type=int, metavar="THREADS", default=4, help="parallel connections")
ap.add_argument("-nh", action="store_true", help="disable hashing while uploading")

View File

@@ -1,3 +1,6 @@
### [`plugins/`](plugins/)
* example extensions
### [`copyparty.bat`](copyparty.bat)
* launches copyparty with no arguments (anon read+write within same folder)
* intended for windows machines with no python.exe in PATH

View File

@@ -13,7 +13,7 @@
upstream cpp {
server 127.0.0.1:3923;
keepalive 120;
keepalive 1;
}
server {
listen 443 ssl;

24
contrib/plugins/README.md Normal file
View File

@@ -0,0 +1,24 @@
# example resource files
can be provided to copyparty to tweak things
## example `.epilogue.html`
save one of these as `.epilogue.html` inside a folder to customize it:
* [`minimal-up2k.html`](minimal-up2k.html) will [simplify the upload ui](https://user-images.githubusercontent.com/241032/118311195-dd6ca380-b4ef-11eb-86f3-75a3ff2e1332.png)
## example browser-css
point `--css-browser` to one of these by URL:
* [`browser-icons.css`](browser-icons.css) adds filetype icons
## meadup.js
* turns copyparty into chromecast just more flexible (and probably way more buggy)
* usage: put the js somewhere in the webroot and `--js-browser /memes/meadup.js`

506
contrib/plugins/meadup.js Normal file
View File

@@ -0,0 +1,506 @@
// USAGE:
// place this file somewhere in the webroot and then
// python3 -m copyparty --js-browser /memes/meadup.js
//
// FEATURES:
// * adds an onscreen keyboard for operating a media center remotely,
// relies on https://github.com/9001/copyparty/blob/hovudstraum/bin/mtag/very-bad-idea.py
// * adds an interactive anime girl (if you can find the dependencies)
var hambagas = [
"https://www.youtube.com/watch?v=pFA3KGp4GuU"
];
// keybaord,
// onscreen keyboard by @steinuil
function initKeybaord(BASE_URL, HAMBAGA, consoleLog, consoleError) {
document.querySelector('.keybaord-container').innerHTML = `
<div class="keybaord-body">
<div class="keybaord-row keybaord-row-1">
<div class="keybaord-key" data-keybaord-key="Escape">
esc
</div>
<div class="keybaord-key" data-keybaord-key="F1">
F1
</div>
<div class="keybaord-key" data-keybaord-key="F2">
F2
</div>
<div class="keybaord-key" data-keybaord-key="F3">
F3
</div>
<div class="keybaord-key" data-keybaord-key="F4">
F4
</div>
<div class="keybaord-key" data-keybaord-key="F5">
F5
</div>
<div class="keybaord-key" data-keybaord-key="F6">
F6
</div>
<div class="keybaord-key" data-keybaord-key="F7">
F7
</div>
<div class="keybaord-key" data-keybaord-key="F8">
F8
</div>
<div class="keybaord-key" data-keybaord-key="F9">
F9
</div>
<div class="keybaord-key" data-keybaord-key="F10">
F10
</div>
<div class="keybaord-key" data-keybaord-key="F11">
F11
</div>
<div class="keybaord-key" data-keybaord-key="F12">
F12
</div>
<div class="keybaord-key" data-keybaord-key="Insert">
ins
</div>
<div class="keybaord-key" data-keybaord-key="Delete">
del
</div>
</div>
<div class="keybaord-row keybaord-row-2">
<div class="keybaord-key" data-keybaord-key="\`">
\`
</div>
<div class="keybaord-key" data-keybaord-key="1">
1
</div>
<div class="keybaord-key" data-keybaord-key="2">
2
</div>
<div class="keybaord-key" data-keybaord-key="3">
3
</div>
<div class="keybaord-key" data-keybaord-key="4">
4
</div>
<div class="keybaord-key" data-keybaord-key="5">
5
</div>
<div class="keybaord-key" data-keybaord-key="6">
6
</div>
<div class="keybaord-key" data-keybaord-key="7">
7
</div>
<div class="keybaord-key" data-keybaord-key="8">
8
</div>
<div class="keybaord-key" data-keybaord-key="9">
9
</div>
<div class="keybaord-key" data-keybaord-key="0">
0
</div>
<div class="keybaord-key" data-keybaord-key="-">
-
</div>
<div class="keybaord-key" data-keybaord-key="=">
=
</div>
<div class="keybaord-key keybaord-backspace" data-keybaord-key="BackSpace">
backspace
</div>
</div>
<div class="keybaord-row keybaord-row-3">
<div class="keybaord-key keybaord-tab" data-keybaord-key="Tab">
tab
</div>
<div class="keybaord-key" data-keybaord-key="q">
q
</div>
<div class="keybaord-key" data-keybaord-key="w">
w
</div>
<div class="keybaord-key" data-keybaord-key="e">
e
</div>
<div class="keybaord-key" data-keybaord-key="r">
r
</div>
<div class="keybaord-key" data-keybaord-key="t">
t
</div>
<div class="keybaord-key" data-keybaord-key="y">
y
</div>
<div class="keybaord-key" data-keybaord-key="u">
u
</div>
<div class="keybaord-key" data-keybaord-key="i">
i
</div>
<div class="keybaord-key" data-keybaord-key="o">
o
</div>
<div class="keybaord-key" data-keybaord-key="p">
p
</div>
<div class="keybaord-key" data-keybaord-key="[">
[
</div>
<div class="keybaord-key" data-keybaord-key="]">
]
</div>
<div class="keybaord-key keybaord-enter" data-keybaord-key="Return">
enter
</div>
</div>
<div class="keybaord-row keybaord-row-4">
<div class="keybaord-key keybaord-capslock" data-keybaord-key="HAMBAGA">
🍔
</div>
<div class="keybaord-key" data-keybaord-key="a">
a
</div>
<div class="keybaord-key" data-keybaord-key="s">
s
</div>
<div class="keybaord-key" data-keybaord-key="d">
d
</div>
<div class="keybaord-key" data-keybaord-key="f">
f
</div>
<div class="keybaord-key" data-keybaord-key="g">
g
</div>
<div class="keybaord-key" data-keybaord-key="h">
h
</div>
<div class="keybaord-key" data-keybaord-key="j">
j
</div>
<div class="keybaord-key" data-keybaord-key="k">
k
</div>
<div class="keybaord-key" data-keybaord-key="l">
l
</div>
<div class="keybaord-key" data-keybaord-key=";">
;
</div>
<div class="keybaord-key" data-keybaord-key="'">
'
</div>
<div class="keybaord-key keybaord-backslash" data-keybaord-key="\\">
\\
</div>
</div>
<div class="keybaord-row keybaord-row-5">
<div class="keybaord-key keybaord-lshift" data-keybaord-key="Shift_L">
shift
</div>
<div class="keybaord-key" data-keybaord-key="\\">
\\
</div>
<div class="keybaord-key" data-keybaord-key="z">
z
</div>
<div class="keybaord-key" data-keybaord-key="x">
x
</div>
<div class="keybaord-key" data-keybaord-key="c">
c
</div>
<div class="keybaord-key" data-keybaord-key="v">
v
</div>
<div class="keybaord-key" data-keybaord-key="b">
b
</div>
<div class="keybaord-key" data-keybaord-key="n">
n
</div>
<div class="keybaord-key" data-keybaord-key="m">
m
</div>
<div class="keybaord-key" data-keybaord-key=",">
,
</div>
<div class="keybaord-key" data-keybaord-key=".">
.
</div>
<div class="keybaord-key" data-keybaord-key="/">
/
</div>
<div class="keybaord-key keybaord-rshift" data-keybaord-key="Shift_R">
shift
</div>
</div>
<div class="keybaord-row keybaord-row-6">
<div class="keybaord-key keybaord-lctrl" data-keybaord-key="Control_L">
ctrl
</div>
<div class="keybaord-key keybaord-super" data-keybaord-key="Meta_L">
win
</div>
<div class="keybaord-key keybaord-alt" data-keybaord-key="Alt_L">
alt
</div>
<div class="keybaord-key keybaord-spacebar" data-keybaord-key="space">
space
</div>
<div class="keybaord-key keybaord-altgr" data-keybaord-key="Alt_R">
altgr
</div>
<div class="keybaord-key keybaord-what" data-keybaord-key="Menu">
menu
</div>
<div class="keybaord-key keybaord-rctrl" data-keybaord-key="Control_R">
ctrl
</div>
</div>
<div class="keybaord-row">
<div class="keybaord-key" data-keybaord-key="XF86AudioLowerVolume">
🔉
</div>
<div class="keybaord-key" data-keybaord-key="XF86AudioRaiseVolume">
🔊
</div>
<div class="keybaord-key" data-keybaord-key="Left">
⬅️
</div>
<div class="keybaord-key" data-keybaord-key="Down">
⬇️
</div>
<div class="keybaord-key" data-keybaord-key="Up">
⬆️
</div>
<div class="keybaord-key" data-keybaord-key="Right">
➡️
</div>
<div class="keybaord-key" data-keybaord-key="Page_Up">
PgUp
</div>
<div class="keybaord-key" data-keybaord-key="Page_Down">
PgDn
</div>
<div class="keybaord-key" data-keybaord-key="Home">
🏠
</div>
<div class="keybaord-key" data-keybaord-key="End">
End
</div>
</div>
<div>
`;
function arraySample(array) {
return array[Math.floor(Math.random() * array.length)];
}
function sendMessage(msg) {
return fetch(BASE_URL, {
method: "POST",
headers: {
"Content-Type": "application/x-www-form-urlencoded;charset=UTF-8",
},
body: "msg=" + encodeURIComponent(msg),
}).then(
(r) => r.text(), // so the response body shows up in network tab
(err) => consoleError(err)
);
}
const MODIFIER_ON_CLASS = "keybaord-modifier-on";
const KEY_DATASET = "data-keybaord-key";
const KEY_CLASS = "keybaord-key";
const modifiers = new Set()
function toggleModifier(button, key) {
button.classList.toggle(MODIFIER_ON_CLASS);
if (modifiers.has(key)) {
modifiers.delete(key);
} else {
modifiers.add(key);
}
}
function popModifiers() {
let modifierString = "";
modifiers.forEach((mod) => {
document.querySelector("[" + KEY_DATASET + "='" + mod + "']")
.classList.remove(MODIFIER_ON_CLASS);
modifierString += mod + "+";
});
modifiers.clear();
return modifierString;
}
Array.from(document.querySelectorAll("." + KEY_CLASS)).forEach((button) => {
const key = button.dataset.keybaordKey;
button.addEventListener("click", (ev) => {
switch (key) {
case "HAMBAGA":
sendMessage(arraySample(HAMBAGA));
break;
case "Shift_L":
case "Shift_R":
case "Control_L":
case "Control_R":
case "Meta_L":
case "Alt_L":
case "Alt_R":
toggleModifier(button, key);
break;
default: {
const keyWithModifiers = popModifiers() + key;
consoleLog(keyWithModifiers);
sendMessage("key " + keyWithModifiers)
.then(() => consoleLog(keyWithModifiers + " OK"));
}
}
});
});
}
// keybaord integration
(function () {
var o = mknod('div');
clmod(o, 'keybaord-container', 1);
ebi('op_msg').appendChild(o);
o = mknod('style');
o.innerHTML = `
.keybaord-body {
display: flex;
flex-flow: column nowrap;
margin: .6em 0;
}
.keybaord-row {
display: flex;
}
.keybaord-key {
border: 1px solid rgba(128,128,128,0.2);
width: 41px;
height: 40px;
display: flex;
justify-content: center;
align-items: center;
}
.keybaord-key:active {
background-color: lightgrey;
}
.keybaord-key.keybaord-modifier-on {
background-color: lightblue;
}
.keybaord-key.keybaord-backspace {
width: 82px;
}
.keybaord-key.keybaord-tab {
width: 55px;
}
.keybaord-key.keybaord-enter {
width: 69px;
}
.keybaord-key.keybaord-capslock {
width: 80px;
}
.keybaord-key.keybaord-backslash {
width: 88px;
}
.keybaord-key.keybaord-lshift {
width: 65px;
}
.keybaord-key.keybaord-rshift {
width: 103px;
}
.keybaord-key.keybaord-lctrl {
width: 55px;
}
.keybaord-key.keybaord-super {
width: 55px;
}
.keybaord-key.keybaord-alt {
width: 55px;
}
.keybaord-key.keybaord-altgr {
width: 55px;
}
.keybaord-key.keybaord-what {
width: 55px;
}
.keybaord-key.keybaord-rctrl {
width: 55px;
}
.keybaord-key.keybaord-spacebar {
width: 302px;
}
`;
document.head.appendChild(o);
initKeybaord('/', hambagas,
(msg) => { toast.inf(2, msg.toString()) },
(msg) => { toast.err(30, msg.toString()) });
})();
// live2d (dumb pointless meme)
// dependencies for this part are not tracked in git
// so delete this section if you wanna use this file
// (or supply your own l2d model and js)
(function () {
var o = mknod('link');
o.setAttribute('rel', 'stylesheet');
o.setAttribute('href', "/bad-memes/pio.css");
document.head.appendChild(o);
o = mknod('style');
o.innerHTML = '.pio-container{text-shadow:none;z-index:1}';
document.head.appendChild(o);
o = mknod('div');
clmod(o, 'pio-container', 1);
o.innerHTML = '<div class="pio-action"></div><canvas id="pio" width="280" height="500"></canvas>';
document.body.appendChild(o);
var remaining = 3;
for (var a of ['pio', 'l2d', 'fireworks']) {
import_js(`/bad-memes/${a}.js`, function () {
if (remaining --> 1)
return;
o = mknod('script');
o.innerHTML = 'var pio = new Paul_Pio({"selector":[],"mode":"fixed","hidden":false,"content":{"close":"ok bye"},"model":["/bad-memes/sagiri/model.json"]});';
document.body.appendChild(o);
});
}
})();

View File

@@ -9,7 +9,7 @@
#ops, #tree, #path, #wrap>h2:last-child, /* main tabs and navigators (tree/breadcrumbs) */
#u2cleanup, #u2conf tr:first-child>td[rowspan]:not(#u2btn_cw), /* most of the config options */
#u2conf tr:first-child>td[rowspan]:not(#u2btn_cw), /* most of the config options */
#srch_dz, #srch_zd, /* the filesearch dropzone */

View File

@@ -12,7 +12,6 @@
# change '/mnt::rw' to another location or permission-set
# remove '-p 80,443,3923' to only listen on port 3923
# add '-i 127.0.0.1' to only allow local connections
# add '--use-fpool' if uploading into nfs locations
#
# with `Type=notify`, copyparty will signal systemd when it is ready to
# accept connections; correctly delaying units depending on copyparty.
@@ -23,6 +22,8 @@
# if you remove -q to enable logging, you may also want to remove the
# following line to enable buffering (slightly better performance):
# Environment=PYTHONUNBUFFERED=x
#
# keep ExecStartPre before ExecStart, at least on rhel8
[Unit]
Description=copyparty file server

View File

@@ -25,26 +25,34 @@ ANYWIN = WINDOWS or sys.platform in ["msys"]
MACOS = platform.system() == "Darwin"
def get_unix_home():
try:
v = os.environ["XDG_CONFIG_HOME"]
if not v:
raise Exception()
ret = os.path.normpath(v)
os.listdir(ret)
return ret
except:
pass
def get_unixdir():
paths = [
(os.environ.get, "XDG_CONFIG_HOME"),
(os.path.expanduser, "~/.config"),
(os.environ.get, "TMPDIR"),
(os.environ.get, "TEMP"),
(os.environ.get, "TMP"),
(unicode, "/tmp"),
]
for chk in [os.listdir, os.mkdir]:
for pf, pa in paths:
try:
p = pf(pa)
# print(chk.__name__, p, pa)
if not p or p.startswith("~"):
continue
try:
v = os.path.expanduser("~/.config")
if v.startswith("~"):
raise Exception()
ret = os.path.normpath(v)
os.listdir(ret)
return ret
except:
return "/tmp"
p = os.path.normpath(p)
chk(p)
p = os.path.join(p, "copyparty")
if not os.path.isdir(p):
os.mkdir(p)
return p
except:
pass
raise Exception("could not find a writable path for config")
class EnvParams(object):
@@ -59,7 +67,7 @@ class EnvParams(object):
elif sys.platform == "darwin":
self.cfg = os.path.expanduser("~/Library/Preferences/copyparty")
else:
self.cfg = get_unix_home() + "/copyparty"
self.cfg = get_unixdir()
self.cfg = self.cfg.replace("\\", "/")
try:

View File

@@ -23,7 +23,7 @@ from textwrap import dedent
from .__init__ import E, WINDOWS, ANYWIN, VT100, PY2, unicode
from .__version__ import S_VERSION, S_BUILD_DT, CODENAME
from .svchub import SvcHub
from .util import py_desc, align_tab, IMPLICATIONS, ansi_re
from .util import py_desc, align_tab, IMPLICATIONS, ansi_re, min_ex
from .authsrv import re_vol
HAVE_SSL = True
@@ -222,6 +222,54 @@ def sighandler(sig=None, frame=None):
print("\n".join(msg))
def disable_quickedit():
import ctypes
import atexit
from ctypes import wintypes
def ecb(ok, fun, args):
if not ok:
err = ctypes.get_last_error()
if err:
raise ctypes.WinError(err)
return args
k32 = ctypes.WinDLL("kernel32", use_last_error=True)
if PY2:
wintypes.LPDWORD = ctypes.POINTER(wintypes.DWORD)
k32.GetStdHandle.errcheck = ecb
k32.GetConsoleMode.errcheck = ecb
k32.SetConsoleMode.errcheck = ecb
k32.GetConsoleMode.argtypes = (wintypes.HANDLE, wintypes.LPDWORD)
k32.SetConsoleMode.argtypes = (wintypes.HANDLE, wintypes.DWORD)
def cmode(out, mode=None):
h = k32.GetStdHandle(-11 if out else -10)
if mode:
return k32.SetConsoleMode(h, mode)
mode = wintypes.DWORD()
k32.GetConsoleMode(h, ctypes.byref(mode))
return mode.value
# disable quickedit
mode = orig_in = cmode(False)
quickedit = 0x40
extended = 0x80
mask = quickedit + extended
if mode & mask != extended:
atexit.register(cmode, False, orig_in)
cmode(False, mode & ~mask | extended)
# enable colors in case the os.system("rem") trick ever stops working
if VT100:
mode = orig_out = cmode(True)
if mode & 4 != 4:
atexit.register(cmode, True, orig_out)
cmode(True, mode | 4)
def run_argparse(argv, formatter):
ap = argparse.ArgumentParser(
formatter_class=formatter,
@@ -302,6 +350,8 @@ def run_argparse(argv, formatter):
\033[0mdatabase, general:
\033[36me2d\033[35m sets -e2d (all -e2* args can be set using ce2* volflags)
\033[36md2ts\033[35m disables metadata collection for existing files
\033[36md2ds\033[35m disables onboot indexing, overrides -e2ds*
\033[36md2t\033[35m disables metadata collection, overrides -e2t*
\033[36md2d\033[35m disables all database stuff, overrides -e2*
\033[36mnohash=\\.iso$\033[35m skips hashing file contents if path matches *.iso
@@ -368,6 +418,7 @@ def run_argparse(argv, formatter):
ap2.add_argument("-emp", action="store_true", help="enable markdown plugins")
ap2.add_argument("-mcr", metavar="SEC", type=int, default=60, help="md-editor mod-chk rate")
ap2.add_argument("--urlform", metavar="MODE", type=u, default="print,get", help="how to handle url-forms; examples: [stash], [save,get]")
ap2.add_argument("--wintitle", metavar="TXT", type=u, default="cpp @ $pub", help="window title, for example '$ip-10.1.2.' or '$ip-'")
ap2 = ap.add_argument_group('upload options')
ap2.add_argument("--dotpart", action="store_true", help="dotfile incomplete uploads")
@@ -375,13 +426,19 @@ def run_argparse(argv, formatter):
ap2.add_argument("--unpost", metavar="SEC", type=int, default=3600*12, help="grace period where uploads can be deleted by the uploader, even without delete permissions; 0=disabled")
ap2.add_argument("--no-fpool", action="store_true", help="disable file-handle pooling -- instead, repeatedly close and reopen files during upload")
ap2.add_argument("--use-fpool", action="store_true", help="force file-handle pooling, even if copyparty thinks you're better off without")
ap2.add_argument("--no-symlink", action="store_true", help="duplicate file contents instead")
ap2.add_argument("--hardlink", action="store_true", help="prefer hardlinks instead of symlinks when possible (same filesystem)")
ap2.add_argument("--never-symlink", action="store_true", help="do not fallback to symlinks when a hardlink cannot be made")
ap2.add_argument("--no-dedup", action="store_true", help="disable symlink/hardlink creation; copy file contents instead")
ap2.add_argument("--reg-cap", metavar="N", type=int, default=9000, help="max number of uploads to keep in memory when running without -e2d")
ap2 = ap.add_argument_group('network options')
ap2.add_argument("-i", metavar="IP", type=u, default="0.0.0.0", help="ip to bind (comma-sep.)")
ap2.add_argument("-p", metavar="PORT", type=u, default="3923", help="ports to bind (comma/range)")
ap2.add_argument("--rproxy", metavar="DEPTH", type=int, default=1, help="which ip to keep; 0 = tcp, 1 = origin (first x-fwd), 2 = cloudflare, 3 = nginx, -1 = closest proxy")
ap2.add_argument("--s-wr-sz", metavar="B", type=int, default=256*1024, help="socket write size in bytes")
ap2.add_argument("--s-wr-slp", metavar="SEC", type=float, default=0, help="socket write delay in seconds")
ap2.add_argument("--rsp-slp", metavar="SEC", type=float, default=0, help="response delay in seconds")
ap2 = ap.add_argument_group('SSL/TLS options')
ap2.add_argument("--http-only", action="store_true", help="disable ssl/tls")
ap2.add_argument("--https-only", action="store_true", help="disable plaintext")
@@ -390,8 +447,16 @@ def run_argparse(argv, formatter):
ap2.add_argument("--ssl-dbg", action="store_true", help="dump some tls info")
ap2.add_argument("--ssl-log", metavar="PATH", type=u, help="log master secrets")
ap2 = ap.add_argument_group('FTP options')
ap2.add_argument("--ftp", metavar="PORT", type=int, help="enable FTP server on PORT, for example 3921")
ap2.add_argument("--ftps", metavar="PORT", type=int, help="enable FTPS server on PORT, for example 3990")
ap2.add_argument("--ftp-dbg", action="store_true", help="enable debug logging")
ap2.add_argument("--ftp-nat", metavar="ADDR", type=u, help="the NAT address to use for passive connections")
ap2.add_argument("--ftp-pr", metavar="P-P", type=u, help="the range of TCP ports to use for passive connections, for example 12000-13000")
ap2 = ap.add_argument_group('opt-outs')
ap2.add_argument("-nw", action="store_true", help="disable writes (benchmark)")
ap2.add_argument("--keep-qem", action="store_true", help="do not disable quick-edit-mode on windows")
ap2.add_argument("--no-del", action="store_true", help="disable delete operations")
ap2.add_argument("--no-mv", action="store_true", help="disable move/rename operations")
ap2.add_argument("-nih", action="store_true", help="no info hostname")
@@ -408,6 +473,8 @@ def run_argparse(argv, formatter):
ap2.add_argument("--no-logues", action="store_true", help="disable rendering .prologue/.epilogue.html into directory listings")
ap2.add_argument("--no-readme", action="store_true", help="disable rendering readme.md into directory listings")
ap2.add_argument("--vague-403", action="store_true", help="send 404 instead of 403 (security through ambiguity, very enterprise)")
ap2.add_argument("--force-js", action="store_true", help="don't send HTML folder listings, force clients to use the embedded json instead")
ap2.add_argument("--no-robots", action="store_true", help="adds http and html headers asking search engines to not index anything")
ap2 = ap.add_argument_group('yolo options')
ap2.add_argument("--ign-ebind", action="store_true", help="continue running even if it's impossible to listen on some of the requested endpoints")
@@ -433,7 +500,9 @@ def run_argparse(argv, formatter):
ap2.add_argument("--no-vthumb", action="store_true", help="disable video thumbnails")
ap2.add_argument("--th-size", metavar="WxH", default="320x256", help="thumbnail res")
ap2.add_argument("--th-mt", metavar="CORES", type=int, default=cores, help="num cpu cores to use for generating thumbnails")
ap2.add_argument("--th-convt", metavar="SEC", type=int, default=60, help="conversion timeout in seconds")
ap2.add_argument("--th-no-crop", action="store_true", help="dynamic height; show full image")
ap2.add_argument("--th-dec", metavar="LIBS", default="vips,pil,ff", help="decoders, in order of preference")
ap2.add_argument("--th-no-jpg", action="store_true", help="disable jpg output")
ap2.add_argument("--th-no-webp", action="store_true", help="disable webp output")
ap2.add_argument("--th-ff-jpg", action="store_true", help="force jpg for video thumbs")
@@ -442,6 +511,14 @@ def run_argparse(argv, formatter):
ap2.add_argument("--th-clean", metavar="SEC", type=int, default=43200, help="cleanup interval; 0=disabled")
ap2.add_argument("--th-maxage", metavar="SEC", type=int, default=604800, help="max folder age")
ap2.add_argument("--th-covers", metavar="N,N", type=u, default="folder.png,folder.jpg,cover.png,cover.jpg", help="folder thumbnails to stat for")
# https://pillow.readthedocs.io/en/stable/handbook/image-file-formats.html
# https://github.com/libvips/libvips
# ffmpeg -hide_banner -demuxers | awk '/^ D /{print$2}' | while IFS= read -r x; do ffmpeg -hide_banner -h demuxer=$x; done | grep -E '^Demuxer |extensions:'
ap2.add_argument("--th-r-pil", metavar="T,T", type=u, default="bmp,dib,gif,icns,ico,jpg,jpeg,jp2,jpx,pcx,png,pbm,pgm,ppm,pnm,sgi,tga,tif,tiff,webp,xbm,dds,xpm,heif,heifs,heic,heics,avif,avifs", help="image formats to decode using pillow")
ap2.add_argument("--th-r-vips", metavar="T,T", type=u, default="jpg,jpeg,jp2,jpx,jxl,tif,tiff,png,webp,heic,avif,fit,fits,fts,exr,svg,hdr,ppm,pgm,pfm,gif,nii", help="image formats to decode using pyvips")
ap2.add_argument("--th-r-ffi", metavar="T,T", type=u, default="apng,avif,avifs,bmp,dds,dib,fit,fits,fts,gif,heic,heics,heif,heifs,icns,ico,jp2,jpeg,jpg,jpx,jxl,pbm,pcx,pfm,pgm,png,pnm,ppm,psd,sgi,tga,tif,tiff,webp,xbm,xpm", help="image formats to decode using ffmpeg")
ap2.add_argument("--th-r-ffv", metavar="T,T", type=u, default="av1,asf,avi,flv,m4v,mkv,mjpeg,mjpg,mpg,mpeg,mpg2,mpeg2,h264,avc,mts,h265,hevc,mov,3gp,mp4,ts,mpegts,nut,ogv,ogm,rm,vob,webm,wmv", help="video formats to decode using ffmpeg")
ap2.add_argument("--th-r-ffa", metavar="T,T", type=u, default="aac,m4a,ogg,opus,flac,alac,mp3,mp2,ac3,dts,wma,ra,wav,aif,aiff,au,alaw,ulaw,mulaw,amr,gsm,ape,tak,tta,wv,mpc", help="audio formats to decode using ffmpeg")
ap2 = ap.add_argument_group('transcoding options')
ap2.add_argument("--no-acode", action="store_true", help="disable audio transcoding")
@@ -456,6 +533,7 @@ def run_argparse(argv, formatter):
ap2.add_argument("--no-idx", metavar="PTN", type=u, help="regex: disable indexing of matching paths during e2ds folder scans")
ap2.add_argument("--re-maxage", metavar="SEC", type=int, default=0, help="disk rescan volume interval, 0=off, can be set per-volume with the 'scan' volflag")
ap2.add_argument("--srch-time", metavar="SEC", type=int, default=30, help="search deadline")
ap2.add_argument("--srch-hits", metavar="N", type=int, default=1000, help="max search results")
ap2 = ap.add_argument_group('metadata db options')
ap2.add_argument("-e2t", action="store_true", help="enable metadata indexing")
@@ -472,9 +550,13 @@ def run_argparse(argv, formatter):
ap2.add_argument("-mtp", metavar="M=[f,]bin", type=u, action="append", help="read tag M using bin")
ap2 = ap.add_argument_group('ui options')
ap2.add_argument("--theme", metavar="NUM", type=int, default=0, help="default theme to use")
ap2.add_argument("--themes", metavar="NUM", type=int, default=4, help="number of themes installed")
ap2.add_argument("--js-browser", metavar="L", type=u, help="URL to additional JS to include")
ap2.add_argument("--css-browser", metavar="L", type=u, help="URL to additional CSS to include")
ap2.add_argument("--html-head", metavar="TXT", type=u, default="", help="text to append to the <head> of all HTML pages")
ap2.add_argument("--textfiles", metavar="CSV", type=u, default="txt,nfo,diz,cue,readme", help="file extensions to present as plaintext")
ap2.add_argument("--doctitle", metavar="TXT", type=u, default="copyparty", help="title / service-name to show in html documents")
ap2 = ap.add_argument_group('debug options')
ap2.add_argument("--no-sendfile", action="store_true", help="disable sendfile")
@@ -517,7 +599,7 @@ def main(argv=None):
if HAVE_SSL:
ensure_cert()
for k, v in zip(argv, argv[1:]):
for k, v in zip(argv[1:], argv[2:]):
if k == "-c":
supp = args_from_cfg(v)
argv.extend(supp)
@@ -545,6 +627,15 @@ def main(argv=None):
except AssertionError:
al = run_argparse(argv, Dodge11874)
if WINDOWS and not al.keep_qem:
try:
disable_quickedit()
except:
print("\nfailed to disable quick-edit-mode:\n" + min_ex() + "\n")
if not VT100:
al.wintitle = ""
nstrs = []
anymod = False
for ostr in al.v or []:

View File

@@ -1,8 +1,8 @@
# coding: utf-8
VERSION = (1, 1, 1)
CODENAME = "opus"
BUILD_DT = (2021, 11, 8)
VERSION = (1, 2, 6)
CODENAME = "ftp btw"
BUILD_DT = (2022, 4, 15)
S_VERSION = ".".join(map(str, VERSION))
S_BUILD_DT = "{0:04d}-{1:02d}-{2:02d}".format(*BUILD_DT)

View File

@@ -14,6 +14,7 @@ from datetime import datetime
from .__init__ import WINDOWS
from .util import (
IMPLICATIONS,
META_NOBOTS,
uncyg,
undot,
unhumanize,
@@ -394,6 +395,13 @@ class VFS(object):
if ok:
virt_vis[name] = vn2
if ".hist" in abspath:
p = abspath.replace("\\", "/") if WINDOWS else abspath
if p.endswith("/.hist"):
real = [x for x in real if not x[0].startswith("up2k.")]
elif "/.hist/th/" in p:
real = [x for x in real if not x[0].endswith("dir.txt")]
return [abspath, real, virt_vis]
def walk(self, rel, rem, seen, uname, permsets, dots, scandir, lstat):
@@ -444,10 +452,6 @@ class VFS(object):
if flt:
flt = {k: True for k in flt}
f1 = "{0}.hist{0}up2k.".format(os.sep)
f2a = os.sep + "dir.txt"
f2b = "{0}.hist{0}".format(os.sep)
# if multiselect: add all items to archive root
# if single folder: the folder itself is the top-level item
folder = "" if flt else (vrem.split("/")[-1] or "top")
@@ -483,13 +487,6 @@ class VFS(object):
for x in rm:
del vd[x]
# up2k filetring based on actual abspath
files = [
x
for x in files
if f1 not in x[1] and (not x[1].endswith(f2a) or f2b not in x[1])
]
for f in [{"vp": v, "ap": a, "st": n[1]} for v, a, n in files]:
yield f
@@ -744,10 +741,10 @@ class AuthSrv(object):
unames = ["*"] + list(acct.keys())
umap = {x: [] for x in unames}
for usr in unames:
for mp, vol in vfs.all_vols.items():
for vp, vol in vfs.all_vols.items():
axs = getattr(vol.axs, axs_key)
if usr in axs or "*" in axs:
umap[usr].append(mp)
umap[usr].append(vp)
umap[usr].sort()
setattr(vfs, "a" + perm, umap)
@@ -865,6 +862,30 @@ class AuthSrv(object):
if use:
vol.lim = lim
if self.args.no_robots:
for vol in vfs.all_vols.values():
# volflag "robots" overrides global "norobots", allowing indexing by search engines for this vol
if not vol.flags.get("robots"):
vol.flags["norobots"] = True
for vol in vfs.all_vols.values():
h = [vol.flags.get("html_head", self.args.html_head)]
if vol.flags.get("norobots"):
h.insert(0, META_NOBOTS)
vol.flags["html_head"] = "\n".join([x for x in h if x])
for vol in vfs.all_vols.values():
if self.args.no_vthumb:
vol.flags["dvthumb"] = True
if self.args.no_athumb:
vol.flags["dathumb"] = True
if self.args.no_thumb or vol.flags.get("dthumb", False):
vol.flags["dthumb"] = True
vol.flags["dvthumb"] = True
vol.flags["dathumb"] = True
vol.flags["dithumb"] = True
for vol in vfs.all_vols.values():
fk = vol.flags.get("fk")
if fk:
@@ -926,6 +947,14 @@ class AuthSrv(object):
vol.flags["d2t"] = True
vol.flags = {k: v for k, v in vol.flags.items() if not k.startswith(rm)}
# d2ds drops all onboot scans for a volume
for grp, rm in [["d2ds", "e2ds"], ["d2ts", "e2ts"]]:
if not vol.flags.get(grp, False):
continue
vol.flags["d2ts"] = True
vol.flags = {k: v for k, v in vol.flags.items() if not k.startswith(rm)}
# mt* needs e2t so drop those too
for grp, rm in [["e2t", "mt"]]:
if vol.flags.get(grp, False):

View File

@@ -2,7 +2,7 @@
from __future__ import print_function, unicode_literals
import os
from ..util import fsenc, fsdec
from ..util import fsenc, fsdec, SYMTIME
from . import path
@@ -18,10 +18,6 @@ def listdir(p="."):
return [fsdec(x) for x in os.listdir(fsenc(p))]
def lstat(p):
return os.lstat(fsenc(p))
def makedirs(name, mode=0o755, exist_ok=True):
bname = fsenc(name)
try:
@@ -55,5 +51,17 @@ def unlink(p):
return os.unlink(fsenc(p))
def utime(p, times=None):
return os.utime(fsenc(p), times)
def utime(p, times=None, follow_symlinks=True):
if SYMTIME:
return os.utime(fsenc(p), times, follow_symlinks=follow_symlinks)
else:
return os.utime(fsenc(p), times)
if hasattr(os, "lstat"):
def lstat(p):
return os.lstat(fsenc(p))
else:
lstat = stat

View File

@@ -2,7 +2,7 @@
from __future__ import print_function, unicode_literals
import os
from ..util import fsenc, fsdec
from ..util import fsenc, fsdec, SYMTIME
def abspath(p):
@@ -13,8 +13,11 @@ def exists(p):
return os.path.exists(fsenc(p))
def getmtime(p):
return os.path.getmtime(fsenc(p))
def getmtime(p, follow_symlinks=True):
if not follow_symlinks and SYMTIME:
return os.lstat(fsenc(p)).st_mtime
else:
return os.path.getmtime(fsenc(p))
def getsize(p):
@@ -33,5 +36,9 @@ def islink(p):
return os.path.islink(fsenc(p))
def lexists(p):
return os.path.lexists(fsenc(p))
def realpath(p):
return fsdec(os.path.realpath(fsenc(p)))

374
copyparty/ftpd.py Normal file
View File

@@ -0,0 +1,374 @@
# coding: utf-8
from __future__ import print_function, unicode_literals
import os
import sys
import stat
import time
import logging
import threading
from .__init__ import E, PY2
from .util import Pebkac, fsenc, exclude_dotfiles
from .bos import bos
try:
from pyftpdlib.ioloop import IOLoop
except ImportError:
p = os.path.join(E.mod, "vend")
print("loading asynchat from " + p)
sys.path.append(p)
from pyftpdlib.ioloop import IOLoop
from pyftpdlib.authorizers import DummyAuthorizer, AuthenticationFailed
from pyftpdlib.filesystems import AbstractedFS, FilesystemError
from pyftpdlib.handlers import FTPHandler
from pyftpdlib.servers import FTPServer
from pyftpdlib.log import config_logging
try:
from typing import TYPE_CHECKING
if TYPE_CHECKING:
from .svchub import SvcHub
except ImportError:
pass
class FtpAuth(DummyAuthorizer):
def __init__(self):
super(FtpAuth, self).__init__()
self.hub = None # type: SvcHub
def validate_authentication(self, username, password, handler):
asrv = self.hub.asrv
if username == "anonymous":
password = ""
uname = "*"
if password:
uname = asrv.iacct.get(password, None)
handler.username = uname
if password and not uname:
raise AuthenticationFailed("Authentication failed.")
def get_home_dir(self, username):
return "/"
def has_user(self, username):
asrv = self.hub.asrv
return username in asrv.acct
def has_perm(self, username, perm, path=None):
return True # handled at filesystem layer
def get_perms(self, username):
return "elradfmwMT"
def get_msg_login(self, username):
return "sup {}".format(username)
def get_msg_quit(self, username):
return "cya"
class FtpFs(AbstractedFS):
def __init__(self, root, cmd_channel):
self.h = self.cmd_channel = cmd_channel # type: FTPHandler
self.hub = cmd_channel.hub # type: SvcHub
self.args = cmd_channel.args
self.uname = self.hub.asrv.iacct.get(cmd_channel.password, "*")
self.cwd = "/" # pyftpdlib convention of leading slash
self.root = "/var/lib/empty"
self.listdirinfo = self.listdir
self.chdir(".")
def v2a(self, vpath, r=False, w=False, m=False, d=False):
try:
vpath = vpath.replace("\\", "/").lstrip("/")
vfs, rem = self.hub.asrv.vfs.get(vpath, self.uname, r, w, m, d)
if not vfs.realpath:
raise FilesystemError("no filesystem mounted at this path")
return os.path.join(vfs.realpath, rem)
except Pebkac as ex:
raise FilesystemError(str(ex))
def rv2a(self, vpath, r=False, w=False, m=False, d=False):
return self.v2a(os.path.join(self.cwd, vpath), r, w, m, d)
def ftp2fs(self, ftppath):
# return self.v2a(ftppath)
return ftppath # self.cwd must be vpath
def fs2ftp(self, fspath):
# raise NotImplementedError()
return fspath
def validpath(self, path):
if "/.hist/" in path:
if "/up2k." in path or path.endswith("/dir.txt"):
raise FilesystemError("access to this file is forbidden")
return True
def open(self, filename, mode):
r = "r" in mode
w = "w" in mode or "a" in mode or "+" in mode
ap = self.rv2a(filename, r, w)
if w and bos.path.exists(ap):
raise FilesystemError("cannot open existing file for writing")
self.validpath(ap)
return open(fsenc(ap), mode)
def chdir(self, path):
self.cwd = join(self.cwd, path)
x = self.hub.asrv.vfs.can_access(self.cwd.lstrip("/"), self.h.username)
self.can_read, self.can_write, self.can_move, self.can_delete, self.can_get = x
def mkdir(self, path):
ap = self.rv2a(path, w=True)
bos.mkdir(ap)
def listdir(self, path):
vpath = join(self.cwd, path).lstrip("/")
try:
vfs, rem = self.hub.asrv.vfs.get(vpath, self.uname, True, False)
fsroot, vfs_ls, vfs_virt = vfs.ls(
rem, self.uname, not self.args.no_scandir, [[True], [False, True]]
)
vfs_ls = [x[0] for x in vfs_ls]
vfs_ls.extend(vfs_virt.keys())
if not self.args.ed:
vfs_ls = exclude_dotfiles(vfs_ls)
vfs_ls.sort()
return vfs_ls
except Exception as ex:
if vpath:
# display write-only folders as empty
return []
# return list of volumes
r = {x.split("/")[0]: 1 for x in self.hub.asrv.vfs.all_vols.keys()}
return list(sorted(list(r.keys())))
def rmdir(self, path):
ap = self.rv2a(path, d=True)
bos.rmdir(ap)
def remove(self, path):
if self.args.no_del:
raise FilesystemError("the delete feature is disabled in server config")
vp = join(self.cwd, path).lstrip("/")
x = self.hub.broker.put(
True, "up2k.handle_rm", self.uname, self.h.remote_ip, [vp]
)
try:
x.get()
except Exception as ex:
raise FilesystemError(str(ex))
def rename(self, src, dst):
if not self.can_move:
raise FilesystemError("not allowed for user " + self.h.username)
if self.args.no_mv:
m = "the rename/move feature is disabled in server config"
raise FilesystemError(m)
svp = join(self.cwd, src).lstrip("/")
dvp = join(self.cwd, dst).lstrip("/")
x = self.hub.broker.put(True, "up2k.handle_mv", self.uname, svp, dvp)
try:
x.get()
except Exception as ex:
raise FilesystemError(str(ex))
def chmod(self, path, mode):
pass
def stat(self, path):
try:
ap = self.rv2a(path, r=True)
return bos.stat(ap)
except:
ap = self.rv2a(path)
st = bos.stat(ap)
if not stat.S_ISDIR(st.st_mode):
raise
return st
def utime(self, path, timeval):
ap = self.rv2a(path, w=True)
return bos.utime(ap, (timeval, timeval))
def lstat(self, path):
ap = self.rv2a(path)
return bos.lstat(ap)
def isfile(self, path):
st = self.stat(path)
return stat.S_ISREG(st.st_mode)
def islink(self, path):
ap = self.rv2a(path)
return bos.path.islink(ap)
def isdir(self, path):
try:
st = self.stat(path)
return stat.S_ISDIR(st.st_mode)
except:
return True
def getsize(self, path):
ap = self.rv2a(path)
return bos.path.getsize(ap)
def getmtime(self, path):
ap = self.rv2a(path)
return bos.path.getmtime(ap)
def realpath(self, path):
return path
def lexists(self, path):
ap = self.rv2a(path)
return bos.path.lexists(ap)
def get_user_by_uid(self, uid):
return "root"
def get_group_by_uid(self, gid):
return "root"
class FtpHandler(FTPHandler):
abstracted_fs = FtpFs
def __init__(self, conn, server, ioloop=None):
if PY2:
FTPHandler.__init__(self, conn, server, ioloop)
else:
super(FtpHandler, self).__init__(conn, server, ioloop)
# abspath->vpath mapping to resolve log_transfer paths
self.vfs_map = {}
def ftp_STOR(self, file, mode="w"):
vp = join(self.fs.cwd, file).lstrip("/")
ap = self.fs.v2a(vp)
self.vfs_map[ap] = vp
# print("ftp_STOR: {} {} => {}".format(vp, mode, ap))
ret = FTPHandler.ftp_STOR(self, file, mode)
# print("ftp_STOR: {} {} OK".format(vp, mode))
return ret
def log_transfer(self, cmd, filename, receive, completed, elapsed, bytes):
ap = filename.decode("utf-8", "replace")
vp = self.vfs_map.pop(ap, None)
# print("xfer_end: {} => {}".format(ap, vp))
if vp:
vp, fn = os.path.split(vp)
vfs, rem = self.hub.asrv.vfs.get(vp, self.username, False, True)
vfs, rem = vfs.get_dbv(rem)
self.hub.broker.put(
False,
"up2k.hash_file",
vfs.realpath,
vfs.flags,
rem,
fn,
self.remote_ip,
time.time(),
)
return FTPHandler.log_transfer(
self, cmd, filename, receive, completed, elapsed, bytes
)
try:
from pyftpdlib.handlers import TLS_FTPHandler
class SftpHandler(FtpHandler, TLS_FTPHandler):
pass
except:
pass
class Ftpd(object):
def __init__(self, hub):
self.hub = hub
self.args = hub.args
hs = []
if self.args.ftp:
hs.append([FtpHandler, self.args.ftp])
if self.args.ftps:
try:
h = SftpHandler
except:
m = "\nftps requires pyopenssl;\nplease run the following:\n\n {} -m pip install --user pyopenssl\n"
print(m.format(sys.executable))
sys.exit(1)
h.certfile = os.path.join(E.cfg, "cert.pem")
h.tls_control_required = True
h.tls_data_required = True
hs.append([h, self.args.ftps])
for h in hs:
h, lp = h
h.hub = hub
h.args = hub.args
h.authorizer = FtpAuth()
h.authorizer.hub = hub
if self.args.ftp_pr:
p1, p2 = [int(x) for x in self.args.ftp_pr.split("-")]
if self.args.ftp and self.args.ftps:
# divide port range in half
d = int((p2 - p1) / 2)
if lp == self.args.ftp:
p2 = p1 + d
else:
p1 += d + 1
h.passive_ports = list(range(p1, p2 + 1))
if self.args.ftp_nat:
h.masquerade_address = self.args.ftp_nat
if self.args.ftp_dbg:
config_logging(level=logging.DEBUG)
ioloop = IOLoop()
for ip in self.args.i:
for h, lp in hs:
FTPServer((ip, int(lp)), h, ioloop)
t = threading.Thread(target=ioloop.loop)
t.daemon = True
t.start()
def join(p1, p2):
w = os.path.join(p1, p2.replace("\\", "/"))
return os.path.normpath(w).replace("\\", "/")

View File

@@ -60,10 +60,16 @@ class HttpCli(object):
self.bufsz = 1024 * 32
self.hint = None
self.trailing_slash = True
self.out_headerlist = []
self.out_headers = {
"Access-Control-Allow-Origin": "*",
"Cache-Control": "no-store; max-age=0",
}
h = self.args.html_head
if self.args.no_robots:
h = META_NOBOTS + (("\n" + h) if h else "")
self.out_headers["X-Robots-Tag"] = "noindex, nofollow"
self.html_head = h
def log(self, msg, c=0):
ptn = self.asrv.re_pwd
@@ -91,6 +97,8 @@ class HttpCli(object):
tpl = self.conn.hsrv.j2[name]
if ka:
ka["ts"] = self.conn.hsrv.cachebuster()
ka["svcname"] = self.args.doctitle
ka["html_head"] = self.html_head
return tpl.render(**ka)
return tpl
@@ -126,7 +134,8 @@ class HttpCli(object):
self.loud_reply(unicode(ex), status=ex.code, volsan=True)
return self.keepalive
# time.sleep(0.4)
if self.args.rsp_slp:
time.sleep(self.args.rsp_slp)
# normalize incoming headers to lowercase;
# outgoing headers however are Correct-Case
@@ -225,10 +234,10 @@ class HttpCli(object):
self.gvol = self.asrv.vfs.aget[self.uname]
if pwd and "pw" in self.ouparam and pwd != cookies.get("cppwd"):
self.out_headers["Set-Cookie"] = self.get_pwd_cookie(pwd)[0]
self.out_headerlist.append(("Set-Cookie", self.get_pwd_cookie(pwd)[0]))
ua = self.headers.get("user-agent", "")
self.is_rclone = ua.startswith("rclone/")
self.ua = self.headers.get("user-agent", "")
self.is_rclone = self.ua.startswith("rclone/")
if self.is_rclone:
uparam["raw"] = False
uparam["dots"] = False
@@ -283,12 +292,19 @@ class HttpCli(object):
n = "604800" if cache == "i" else cache or "69"
self.out_headers["Cache-Control"] = "max-age=" + n
def k304(self):
k304 = self.cookies.get("k304")
return k304 == "y" or ("; Trident/" in self.ua and not k304)
def send_headers(self, length, status=200, mime=None, headers=None):
response = ["{} {} {}".format(self.http_ver, status, HTTPCODE[status])]
if length is not None:
response.append("Content-Length: " + unicode(length))
if status == 304 and self.k304():
self.keepalive = False
# close if unknown length, otherwise take client's preference
response.append("Connection: " + ("Keep-Alive" if self.keepalive else "Close"))
@@ -302,7 +318,7 @@ class HttpCli(object):
self.out_headers["Content-Type"] = mime
for k, v in self.out_headers.items():
for k, v in list(self.out_headers.items()) + self.out_headerlist:
response.append("{}: {}".format(k, v))
try:
@@ -428,6 +444,15 @@ class HttpCli(object):
if "ups" in self.uparam:
return self.tx_ups()
if "k304" in self.uparam:
return self.set_k304()
if "am_js" in self.uparam:
return self.set_am_js()
if "reset" in self.uparam:
return self.set_cfg_reset()
if "h" in self.uparam:
return self.tx_mounts()
@@ -505,7 +530,7 @@ class HttpCli(object):
return self.handle_stash()
if "save" in opt:
post_sz, _, _, path = self.dump_to_file()
post_sz, _, _, _, path, _ = self.dump_to_file()
self.log("urlform: {} bytes, {}".format(post_sz, path))
elif "print" in opt:
reader, _ = self.get_body_reader()
@@ -532,11 +557,11 @@ class HttpCli(object):
raise Pebkac(405, "don't know how to handle POST({})".format(ctype))
def get_body_reader(self):
chunked = "chunked" in self.headers.get("transfer-encoding", "").lower()
if "chunked" in self.headers.get("transfer-encoding", "").lower():
return read_socket_chunked(self.sr), -1
remains = int(self.headers.get("content-length", -1))
if chunked:
return read_socket_chunked(self.sr), remains
elif remains == -1:
if remains == -1:
self.keepalive = False
return read_socket_unbounded(self.sr), remains
else:
@@ -590,8 +615,8 @@ class HttpCli(object):
alg = alg or "gz" # def.pk
try:
# config-forced opts
alg, lv = pk.split(",")
lv[alg] = int(lv)
alg, nlv = pk.split(",")
lv[alg] = int(nlv)
except:
pass
@@ -621,7 +646,7 @@ class HttpCli(object):
with ren_open(fn, *open_a, **params) as f:
f, fn = f["orz"]
path = os.path.join(fdir, fn)
post_sz, _, sha_b64 = hashcopy(reader, f)
post_sz, sha_hex, sha_b64 = hashcopy(reader, f, self.args.s_wr_slp)
if lim:
lim.nup(self.ip)
@@ -632,26 +657,48 @@ class HttpCli(object):
bos.unlink(path)
raise
if not self.args.nw:
vfs, vrem = vfs.get_dbv(rem)
self.conn.hsrv.broker.put(
False,
"up2k.hash_file",
vfs.realpath,
vfs.flags,
vrem,
fn,
self.ip,
time.time(),
)
if self.args.nw:
return post_sz, sha_hex, sha_b64, remains, path, ""
return post_sz, sha_b64, remains, path
vfs, rem = vfs.get_dbv(rem)
self.conn.hsrv.broker.put(
False,
"up2k.hash_file",
vfs.realpath,
vfs.flags,
rem,
fn,
self.ip,
time.time(),
)
vsuf = ""
if self.can_read and "fk" in vfs.flags:
vsuf = "?k=" + gen_filekey(
self.args.fk_salt,
path,
post_sz,
0 if ANYWIN else bos.stat(path).st_ino,
)[: vfs.flags["fk"]]
vpath = "/".join([x for x in [vfs.vpath, rem, fn] if x])
vpath = quotep(vpath)
url = "{}://{}/{}".format(
"https" if self.is_https else "http",
self.headers.get("host") or "{}:{}".format(*list(self.s.getsockname())),
vpath + vsuf,
)
return post_sz, sha_hex, sha_b64, remains, path, url
def handle_stash(self):
post_sz, sha_b64, remains, path = self.dump_to_file()
post_sz, sha_hex, sha_b64, remains, path, url = self.dump_to_file()
spd = self._spd(post_sz)
self.log("{} wrote {}/{} bytes to {}".format(spd, post_sz, remains, path))
self.reply("{}\n{}\n".format(post_sz, sha_b64).encode("utf-8"))
m = "{} wrote {}/{} bytes to {} # {}"
self.log(m.format(spd, post_sz, remains, path, sha_b64[:28])) # 21
m = "{}\n{}\n{}\n{}\n".format(post_sz, sha_b64, sha_hex[:56], url)
self.reply(m.encode("utf-8"))
return True
def _spd(self, nbytes, add=True):
@@ -783,6 +830,10 @@ class HttpCli(object):
return True
def handle_search(self, body):
idx = self.conn.get_u2idx()
if not hasattr(idx, "p_end"):
raise Pebkac(500, "sqlite3 is not available on the server; cannot search")
vols = []
seen = {}
for vtop in self.rvol:
@@ -794,7 +845,6 @@ class HttpCli(object):
seen[vfs] = True
vols.append([vfs.vpath, vfs.realpath, vfs.flags])
idx = self.conn.get_u2idx()
t0 = time.time()
if idx.p_end:
penalty = 0.7
@@ -854,63 +904,63 @@ class HttpCli(object):
response = x.get()
chunksize, cstart, path, lastmod = response
if self.args.nw:
path = os.devnull
if remains > chunksize:
raise Pebkac(400, "your chunk is too big to fit")
self.log("writing {} #{} @{} len {}".format(path, chash, cstart, remains))
reader = read_socket(self.sr, remains)
f = None
fpool = not self.args.no_fpool
if fpool:
with self.mutex:
try:
f = self.u2fh.pop(path)
except:
pass
f = f or open(fsenc(path), "rb+", 512 * 1024)
try:
f.seek(cstart[0])
post_sz, _, sha_b64 = hashcopy(reader, f)
if self.args.nw:
path = os.devnull
if sha_b64 != chash:
raise Pebkac(
400,
"your chunk got corrupted somehow (received {} bytes); expected vs received hash:\n{}\n{}".format(
post_sz, chash, sha_b64
),
)
if remains > chunksize:
raise Pebkac(400, "your chunk is too big to fit")
if len(cstart) > 1 and path != os.devnull:
self.log(
"clone {} to {}".format(
cstart[0], " & ".join(unicode(x) for x in cstart[1:])
)
)
ofs = 0
while ofs < chunksize:
bufsz = min(chunksize - ofs, 4 * 1024 * 1024)
f.seek(cstart[0] + ofs)
buf = f.read(bufsz)
for wofs in cstart[1:]:
f.seek(wofs + ofs)
f.write(buf)
self.log("writing {} #{} @{} len {}".format(path, chash, cstart, remains))
ofs += len(buf)
reader = read_socket(self.sr, remains)
self.log("clone {} done".format(cstart[0]))
finally:
if not fpool:
f.close()
else:
f = None
fpool = not self.args.no_fpool
if fpool:
with self.mutex:
self.u2fh.put(path, f)
try:
f = self.u2fh.pop(path)
except:
pass
f = f or open(fsenc(path), "rb+", 512 * 1024)
try:
f.seek(cstart[0])
post_sz, _, sha_b64 = hashcopy(reader, f, self.args.s_wr_slp)
if sha_b64 != chash:
m = "your chunk got corrupted somehow (received {} bytes); expected vs received hash:\n{}\n{}"
raise Pebkac(400, m.format(post_sz, chash, sha_b64))
if len(cstart) > 1 and path != os.devnull:
self.log(
"clone {} to {}".format(
cstart[0], " & ".join(unicode(x) for x in cstart[1:])
)
)
ofs = 0
while ofs < chunksize:
bufsz = min(chunksize - ofs, 4 * 1024 * 1024)
f.seek(cstart[0] + ofs)
buf = f.read(bufsz)
for wofs in cstart[1:]:
f.seek(wofs + ofs)
f.write(buf)
ofs += len(buf)
self.log("clone {} done".format(cstart[0]))
finally:
if not fpool:
f.close()
else:
with self.mutex:
self.u2fh.put(path, f)
finally:
x = self.conn.hsrv.broker.put(True, "up2k.release_chunk", ptop, wark, chash)
x.get() # block client until released
x = self.conn.hsrv.broker.put(True, "up2k.confirm_chunk", ptop, wark, chash)
x = x.get()
@@ -957,15 +1007,13 @@ class HttpCli(object):
def get_pwd_cookie(self, pwd):
if pwd in self.asrv.iacct:
msg = "login ok"
dt = datetime.utcfromtimestamp(time.time() + 60 * 60 * 24 * 365)
exp = dt.strftime("%a, %d %b %Y %H:%M:%S GMT")
dur = 60 * 60 * 24 * 365
else:
msg = "naw dude"
pwd = "x" # nosec
exp = "Fri, 15 Aug 1997 01:00:00 GMT"
dur = None
ck = "cppwd={}; Path=/; Expires={}; SameSite=Lax".format(pwd, exp)
return [ck, msg]
return [gencookie("cppwd", pwd, dur), msg]
def handle_mkdir(self):
new_dir = self.parser.require("name", 512)
@@ -1073,7 +1121,7 @@ class HttpCli(object):
f, fname = f["orz"]
abspath = os.path.join(fdir, fname)
self.log("writing to {}".format(abspath))
sz, sha512_hex, _ = hashcopy(p_data, f)
sz, sha_hex, sha_b64 = hashcopy(p_data, f, self.args.s_wr_slp)
if sz == 0:
raise Pebkac(400, "empty files in post")
@@ -1086,7 +1134,7 @@ class HttpCli(object):
bos.unlink(abspath)
raise
files.append([sz, sha512_hex, p_file, fname, abspath])
files.append([sz, sha_hex, sha_b64, p_file, fname, abspath])
dbv, vrem = vfs.get_dbv(rem)
self.conn.hsrv.broker.put(
False,
@@ -1138,7 +1186,7 @@ class HttpCli(object):
jmsg["error"] = errmsg
errmsg = "ERROR: " + errmsg
for sz, sha512, ofn, lfn, ap in files:
for sz, sha_hex, sha_b64, ofn, lfn, ap in files:
vsuf = ""
if self.can_read and "fk" in vfs.flags:
vsuf = "?k=" + gen_filekey(
@@ -1149,22 +1197,30 @@ class HttpCli(object):
)[: vfs.flags["fk"]]
vpath = "{}/{}".format(upload_vpath, lfn).strip("/")
msg += 'sha512: {} // {} bytes // <a href="/{}">{}</a> {}\n'.format(
sha512[:56], sz, quotep(vpath) + vsuf, html_escape(ofn, crlf=True), vsuf
rel_url = quotep(vpath) + vsuf
msg += 'sha512: {} // {} // {} bytes // <a href="/{}">{}</a> {}\n'.format(
sha_hex[:56],
sha_b64,
sz,
rel_url,
html_escape(ofn, crlf=True),
vsuf,
)
# truncated SHA-512 prevents length extension attacks;
# using SHA-512/224, optionally SHA-512/256 = :64
jpart = {
"url": "{}://{}/{}".format(
"https" if self.is_https else "http",
self.headers.get("host", "copyparty"),
vpath + vsuf,
self.headers.get("host")
or "{}:{}".format(*list(self.s.getsockname())),
rel_url,
),
"sha512": sha512[:56],
"sha512": sha_hex[:56],
"sha_b64": sha_b64,
"sz": sz,
"fn": lfn,
"fn_orig": ofn,
"path": vpath + vsuf,
"path": rel_url,
}
jmsg["files"].append(jpart)
@@ -1283,7 +1339,7 @@ class HttpCli(object):
raise Pebkac(400, "expected body, got {}".format(p_field))
with open(fsenc(fp), "wb", 512 * 1024) as f:
sz, sha512, _ = hashcopy(p_data, f)
sz, sha512, _ = hashcopy(p_data, f, self.args.s_wr_slp)
if lim:
lim.nup(self.ip)
@@ -1340,6 +1396,9 @@ class HttpCli(object):
try:
fs_path = req_path + ext
st = bos.stat(fs_path)
if stat.S_ISDIR(st.st_mode):
continue
file_ts = max(file_ts, st.st_mtime)
editions[ext or "plain"] = [fs_path, st.st_size]
except:
@@ -1378,8 +1437,7 @@ class HttpCli(object):
if "gzip" not in supported_editions:
decompress = True
else:
ua = self.headers.get("user-agent", "")
if re.match(r"MSIE [4-6]\.", ua) and " SV1" not in ua:
if re.match(r"MSIE [4-6]\.", self.ua) and " SV1" not in self.ua:
decompress = True
if not decompress:
@@ -1484,13 +1542,14 @@ class HttpCli(object):
ret = True
with open_func(*open_args) as f:
if use_sendfile:
remains = sendfile_kern(lower, upper, f, self.s)
else:
remains = sendfile_py(lower, upper, f, self.s)
sendfun = sendfile_kern if use_sendfile else sendfile_py
remains = sendfun(
self.log, lower, upper, f, self.s, self.args.s_wr_sz, self.args.s_wr_slp
)
if remains > 0:
logmsg += " \033[31m" + unicode(upper - remains) + "\033[0m"
self.keepalive = False
spd = self._spd((upper - lower) - remains)
if self.do_log:
@@ -1624,13 +1683,15 @@ class HttpCli(object):
boundary = "\roll\tide"
targs = {
"ts": self.conn.hsrv.cachebuster(),
"svcname": self.args.doctitle,
"html_head": self.html_head,
"edit": "edit" in self.uparam,
"title": html_escape(self.vpath, crlf=True),
"lastmod": int(ts_md * 1000),
"md_plug": "true" if self.args.emp else "false",
"md_chk_rate": self.args.mcr,
"md": boundary,
"ts": self.conn.hsrv.cachebuster(),
"arg_base": arg_base,
}
html = template.render(**targs).encode("utf-8", "replace")
@@ -1679,6 +1740,31 @@ class HttpCli(object):
vstate = {}
vs = {"scanning": None, "hashq": None, "tagq": None, "mtpq": None}
if self.uparam.get("ls") in ["v", "t", "txt"]:
if self.uname == "*":
txt = "howdy stranger (you're not logged in)"
else:
txt = "welcome back {}".format(self.uname)
if vstate:
txt += "\nstatus:"
for k in ["scanning", "hashq", "tagq", "mtpq"]:
txt += " {}({})".format(k, vs[k])
if rvol:
txt += "\nyou can browse:"
for v in rvol:
txt += "\n " + v
if wvol:
txt += "\nyou can upload to:"
for v in wvol:
txt += "\n " + v
txt = txt.encode("utf-8", "replace") + b"\n"
self.reply(txt, mime="text/plain; charset=utf-8")
return True
html = self.j2(
"splash",
this=self,
@@ -1692,10 +1778,28 @@ class HttpCli(object):
tagq=vs["tagq"],
mtpq=vs["mtpq"],
url_suf=suf,
k304=self.k304(),
)
self.reply(html.encode("utf-8"))
return True
def set_k304(self):
ck = gencookie("k304", self.uparam["k304"], 60 * 60 * 24 * 365)
self.out_headerlist.append(("Set-Cookie", ck))
self.redirect("", "?h#cc")
def set_am_js(self):
v = "n" if self.uparam["am_js"] == "n" else "y"
ck = gencookie("js", v, 60 * 60 * 24 * 365)
self.out_headerlist.append(("Set-Cookie", ck))
self.reply(b"promoted\n")
def set_cfg_reset(self):
for k in ("k304", "js", "cppwd"):
self.out_headerlist.append(("Set-Cookie", gencookie(k, "x", None)))
self.redirect("", "?h#cc")
def tx_404(self, is_403=False):
if self.args.vague_403:
m = '<h1>404 not found &nbsp;┐( ´ -`)┌</h1><p>or maybe you don\'t have access -- try logging in or <a href="/?h">go home</a></p>'
@@ -1812,13 +1916,16 @@ class HttpCli(object):
if not self.args.unpost:
raise Pebkac(400, "the unpost feature is disabled in server config")
idx = self.conn.get_u2idx()
if not hasattr(idx, "p_end"):
raise Pebkac(500, "sqlite3 is not available on the server; cannot unpost")
filt = self.uparam.get("filter")
lm = "ups [{}]".format(filt)
self.log(lm)
ret = []
t0 = time.time()
idx = self.conn.get_u2idx()
lim = time.time() - self.args.unpost
for vol in self.asrv.vfs.all_vols.values():
cur = idx.get_cur(vol.realpath)
@@ -1912,6 +2019,13 @@ class HttpCli(object):
fmt = "{{}} {{:{},}} {{}}"
nfmt = "{:,}"
for x in dirs:
n = x["name"] + "/"
if arg == "v":
n = "\033[94m" + n
x["name"] = n
fmt = fmt.format(len(nfmt.format(biggest)))
ret = [
"# {}: {}".format(x, ls[x])
@@ -1960,6 +2074,12 @@ class HttpCli(object):
):
raise Pebkac(403)
self.html_head = vn.flags.get("html_head", "")
if vn.flags.get("norobots"):
self.out_headers["X-Robots-Tag"] = "noindex, nofollow"
else:
self.out_headers.pop("X-Robots-Tag", None)
is_dir = stat.S_ISDIR(st.st_mode)
if self.can_read:
th_fmt = self.uparam.get("th")
@@ -1977,9 +2097,7 @@ class HttpCli(object):
thp = None
if self.thumbcli:
thp = self.thumbcli.get(
dbv.realpath, vrem, int(st.st_mtime), th_fmt
)
thp = self.thumbcli.get(dbv, vrem, int(st.st_mtime), th_fmt)
if thp:
return self.tx_file(thp)
@@ -2029,12 +2147,11 @@ class HttpCli(object):
free = humansize(sv.f_frsize * sv.f_bfree, True)
total = humansize(sv.f_frsize * sv.f_blocks, True)
srv_info.append(free + " free")
srv_info.append(total)
srv_info.append("{} free of {}".format(free, total))
except:
pass
srv_info = "</span> /// <span>".join(srv_info)
srv_info = "</span> // <span>".join(srv_info)
perms = []
if self.can_read:
@@ -2050,10 +2167,12 @@ class HttpCli(object):
url_suf = self.urlq({}, [])
is_ls = "ls" in self.uparam
is_js = self.args.force_js or self.cookies.get("js") == "y"
tpl = "browser"
if "b" in self.uparam:
tpl = "browser2"
is_js = False
logues = ["", ""]
if not self.args.no_logues:
@@ -2078,6 +2197,7 @@ class HttpCli(object):
"taglist": [],
"srvinf": srv_info,
"acct": self.uname,
"idx": ("e2d" in vn.flags),
"perms": perms,
"logues": logues,
"readme": readme,
@@ -2086,6 +2206,7 @@ class HttpCli(object):
"vdir": quotep(self.vpath),
"vpnodes": vpnodes,
"files": [],
"ls0": None,
"acct": self.uname,
"perms": json.dumps(perms),
"taglist": [],
@@ -2103,6 +2224,8 @@ class HttpCli(object):
"readme": readme,
"title": html_escape(self.vpath, crlf=True),
"srv_info": srv_info,
"dtheme": self.args.theme,
"themes": self.args.themes,
}
if not self.can_read:
if is_ls:
@@ -2150,10 +2273,6 @@ class HttpCli(object):
if not self.args.ed or "dots" not in self.uparam:
vfs_ls = exclude_dotfiles(vfs_ls)
hidden = []
if rem == ".hist":
hidden = ["up2k."]
icur = None
if "e2t" in vn.flags:
idx = self.conn.get_u2idx()
@@ -2166,14 +2285,12 @@ class HttpCli(object):
for fn in vfs_ls:
base = ""
href = fn
if not is_ls and not self.trailing_slash and vpath:
if not is_ls and not is_js and not self.trailing_slash and vpath:
base = "/" + vpath + "/"
href = base + fn
if fn in vfs_virt:
fspath = vfs_virt[fn].realpath
elif hidden and any(fn.startswith(x) for x in hidden):
continue
else:
fspath = fsroot + "/" + fn
@@ -2290,7 +2407,7 @@ class HttpCli(object):
doc = self.uparam.get("doc") if self.can_read else None
if doc:
doc = unquotep(doc.replace("+", " "))
doc = unquotep(doc.replace("+", " ").split("?")[0])
j2a["docname"] = doc
if next((x for x in files if x["name"] == doc), None):
with open(os.path.join(abspath, doc), "rb") as f:
@@ -2309,7 +2426,12 @@ class HttpCli(object):
dirs.sort(key=itemgetter("name"))
j2a["files"] = dirs + files
if is_js:
j2a["ls0"] = {"dirs": dirs, "files": files, "taglist": taglist}
j2a["files"] = []
else:
j2a["files"] = dirs + files
j2a["logues"] = logues
j2a["taglist"] = taglist
j2a["txt_ext"] = self.args.textfiles.replace(",", " ")

View File

@@ -17,7 +17,7 @@ from .util import Unrecv
from .httpcli import HttpCli
from .u2idx import U2idx
from .th_cli import ThumbCli
from .th_srv import HAVE_PIL
from .th_srv import HAVE_PIL, HAVE_VIPS
from .ico import Ico
@@ -38,7 +38,7 @@ class HttpConn(object):
self.cert_path = hsrv.cert_path
self.u2fh = hsrv.u2fh
enth = HAVE_PIL and not self.args.no_thumb
enth = (HAVE_PIL or HAVE_VIPS) and not self.args.no_thumb
self.thumbcli = ThumbCli(hsrv) if enth else None
self.ico = Ico(self.args)

View File

@@ -70,6 +70,12 @@ class HttpSrv(object):
self.cb_ts = 0
self.cb_v = 0
try:
x = self.broker.put(True, "thumbsrv.getcfg")
self.th_cfg = x.get()
except:
pass
env = jinja2.Environment()
env.loader = jinja2.FileSystemLoader(os.path.join(E.mod, "web"))
self.j2 = {

View File

@@ -8,7 +8,7 @@ import shutil
import subprocess as sp
from .__init__ import PY2, WINDOWS, unicode
from .util import fsenc, fsdec, uncyg, REKOBO_LKEY
from .util import fsenc, fsdec, uncyg, runcmd, REKOBO_LKEY
from .bos import bos
@@ -73,7 +73,7 @@ class MParser(object):
raise Exception()
def ffprobe(abspath):
def ffprobe(abspath, timeout=10):
cmd = [
b"ffprobe",
b"-hide_banner",
@@ -82,10 +82,8 @@ def ffprobe(abspath):
b"--",
fsenc(abspath),
]
p = sp.Popen(cmd, stdout=sp.PIPE, stderr=sp.PIPE)
r = p.communicate()
txt = r[0].decode("utf-8", "replace")
return parse_ffprobe(txt)
rc = runcmd(cmd, timeout=timeout)
return parse_ffprobe(rc[1])
def parse_ffprobe(txt):
@@ -420,7 +418,8 @@ class MTag(object):
try:
md = mutagen.File(fsenc(abspath), easy=True)
x = md.info.length
if not md.info.length and not md.info.codec:
raise Exception()
except Exception as ex:
return self.get_ffprobe(abspath) if self.can_ffprobe else {}
@@ -478,13 +477,13 @@ class MTag(object):
env["PYTHONPATH"] = pypath
ret = {}
for tagname, mp in parsers.items():
for tagname, parser in parsers.items():
try:
cmd = [mp.bin, abspath]
if mp.bin.endswith(".py"):
cmd = [parser.bin, abspath]
if parser.bin.endswith(".py"):
cmd = [sys.executable] + cmd
args = {"env": env, "timeout": mp.timeout}
args = {"env": env, "timeout": parser.timeout}
if WINDOWS:
args["creationflags"] = 0x4000

View File

@@ -5,7 +5,7 @@ import tarfile
import threading
from .sutil import errdesc
from .util import Queue, fsenc
from .util import Queue, fsenc, min_ex
from .bos import bos
@@ -88,8 +88,9 @@ class StreamTar(object):
try:
self.ser(f)
except Exception as ex:
errors.append([f["vp"], repr(ex)])
except Exception:
ex = min_ex(5, True).replace("\n", "\n-- ")
errors.append([f["vp"], ex])
if errors:
self.errf, txt = errdesc(errors)

View File

@@ -17,7 +17,7 @@ from .util import mp, start_log_thrs, start_stackmon, min_ex, ansi_re
from .authsrv import AuthSrv
from .tcpsrv import TcpSrv
from .up2k import Up2k
from .th_srv import ThumbSrv, HAVE_PIL, HAVE_WEBP
from .th_srv import ThumbSrv, HAVE_PIL, HAVE_VIPS, HAVE_WEBP
from .mtag import HAVE_FFMPEG, HAVE_FFPROBE
@@ -57,8 +57,10 @@ class SvcHub(object):
if args.log_thrs:
start_log_thrs(self.log, args.log_thrs, 0)
if not ANYWIN and not args.use_fpool:
if not args.use_fpool and args.j != 1:
args.no_fpool = True
m = "multithreading enabled with -j {}, so disabling fpool -- this can reduce upload performance on some filesystems"
self.log("root", m.format(args.j))
if not args.no_fpool and args.j != 1:
m = "WARNING: --use-fpool combined with multithreading is untested and can probably cause undefined behavior"
@@ -68,6 +70,13 @@ class SvcHub(object):
self.log("root", m, c=3)
bri = "zy"[args.theme % 2 :][:1]
ch = "abcdefghijklmnopqrstuvwx"[int(args.theme / 2)]
args.theme = "{0}{1} {0} {1}".format(ch, bri)
if not args.hardlink and args.never_symlink:
args.no_dedup = True
# initiate all services to manage
self.asrv = AuthSrv(self.args, self.log)
if args.ls:
@@ -76,20 +85,30 @@ class SvcHub(object):
self.tcpsrv = TcpSrv(self)
self.up2k = Up2k(self)
decs = {k: 1 for k in self.args.th_dec.split(",")}
if not HAVE_VIPS:
decs.pop("vips", None)
if not HAVE_PIL:
decs.pop("pil", None)
if not HAVE_FFMPEG or not HAVE_FFPROBE:
decs.pop("ff", None)
self.args.th_dec = list(decs.keys())
self.thumbsrv = None
if not args.no_thumb:
if HAVE_PIL:
if not HAVE_WEBP:
args.th_no_webp = True
msg = "setting --th-no-webp because either libwebp is not available or your Pillow is too old"
self.log("thumb", msg, c=3)
m = "decoder preference: {}".format(", ".join(self.args.th_dec))
self.log("thumb", m)
if "pil" in self.args.th_dec and not HAVE_WEBP:
msg = "disabling webp thumbnails because either libwebp is not available or your Pillow is too old"
self.log("thumb", msg, c=3)
if self.args.th_dec:
self.thumbsrv = ThumbSrv(self)
else:
msg = "need Pillow to create thumbnails; for example:\n{}{} -m pip install --user Pillow\n"
self.log(
"thumb", msg.format(" " * 37, os.path.basename(sys.executable)), c=3
)
msg = "need either Pillow, pyvips, or FFmpeg to create thumbnails; for example:\n{0}{1} -m pip install --user Pillow\n{0}{1} -m pip install --user pyvips\n{0}apt install ffmpeg"
msg = msg.format(" " * 37, os.path.basename(sys.executable))
self.log("thumb", msg, c=3)
if not args.no_acode and args.no_thumb:
msg = "setting --no-acode because --no-thumb (sorry)"
@@ -103,6 +122,11 @@ class SvcHub(object):
args.th_poke = min(args.th_poke, args.th_maxage, args.ac_maxage)
if args.ftp or args.ftps:
from .ftpd import Ftpd
self.ftpd = Ftpd(self)
# decide which worker impl to use
if self.check_mp_enable():
from .broker_mp import BrokerMp as Broker
@@ -300,6 +324,10 @@ class SvcHub(object):
print("nailed it", end="")
ret = self.retcode
finally:
if self.args.wintitle:
print("\033]0;\033\\", file=sys.stderr, end="")
sys.stderr.flush()
print("\033[0m")
if self.logf:
self.logf.close()
@@ -351,7 +379,7 @@ class SvcHub(object):
src = ansi_re.sub("", src)
elif c:
if isinstance(c, int):
msg = "\033[3{}m{}".format(c, msg)
msg = "\033[3{}m{}\033[0m".format(c, msg)
elif "\033" not in c:
msg = "\033[{}m{}\033[0m".format(c, msg)
else:
@@ -395,7 +423,6 @@ class SvcHub(object):
def check_mp_enable(self):
if self.args.j == 1:
self.log("svchub", "multiprocessing disabled by argument -j 1")
return False
if mp.cpu_count() <= 1:

View File

@@ -1,13 +1,12 @@
# coding: utf-8
from __future__ import print_function, unicode_literals
import os
import time
import zlib
from datetime import datetime
from .sutil import errdesc
from .util import yieldfile, sanitize_fn, spack, sunpack
from .util import yieldfile, sanitize_fn, spack, sunpack, min_ex
from .bos import bos
@@ -36,7 +35,10 @@ def unixtime2dos(ts):
bd = ((dy - 1980) << 9) + (dm << 5) + dd
bt = (th << 11) + (tm << 5) + ts // 2
return spack(b"<HH", bt, bd)
try:
return spack(b"<HH", bt, bd)
except:
return b"\x00\x00\x21\x00"
def gen_fdesc(sz, crc32, z64):
@@ -244,8 +246,9 @@ class StreamZip(object):
try:
for x in self.ser(f):
yield x
except Exception as ex:
errors.append([f["vp"], repr(ex)])
except Exception:
ex = min_ex(5, True).replace("\n", "\n-- ")
errors.append([f["vp"], ex])
if errors:
errf, txt = errdesc(errors)

View File

@@ -2,9 +2,10 @@
from __future__ import print_function, unicode_literals
import re
import sys
import socket
from .__init__ import MACOS, ANYWIN
from .__init__ import MACOS, ANYWIN, unicode
from .util import chkcmd
@@ -54,19 +55,55 @@ class TcpSrv(object):
eps[x] = "external"
msgs = []
m = "available @ http://{}:{}/ (\033[33m{}\033[0m)"
title_tab = {}
title_vars = [x[1:] for x in self.args.wintitle.split(" ") if x.startswith("$")]
m = "available @ {}://{}:{}/ (\033[33m{}\033[0m)"
for ip, desc in sorted(eps.items(), key=lambda x: x[1]):
for port in sorted(self.args.p):
if port not in ok.get(ip, ok.get("0.0.0.0", [])):
continue
msgs.append(m.format(ip, port, desc))
proto = " http"
if self.args.http_only:
pass
elif self.args.https_only or port == 443:
proto = "https"
msgs.append(m.format(proto, ip, port, desc))
if not self.args.wintitle:
continue
if port in [80, 443]:
ep = ip
else:
ep = "{}:{}".format(ip, port)
hits = []
if "pub" in title_vars and "external" in unicode(desc):
hits.append(("pub", ep))
if "pub" in title_vars or "all" in title_vars:
hits.append(("all", ep))
for var in title_vars:
if var.startswith("ip-") and ep.startswith(var[3:]):
hits.append((var, ep))
for tk, tv in hits:
try:
title_tab[tk][tv] = 1
except:
title_tab[tk] = {tv: 1}
if msgs:
msgs[-1] += "\n"
for m in msgs:
self.log("tcpsrv", m)
if self.args.wintitle:
self._set_wintitle(title_tab)
def _listen(self, ip, port):
srv = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
srv.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
@@ -113,10 +150,15 @@ class TcpSrv(object):
return eps
r = re.compile(r"^\s+inet ([^ ]+)/.* (.*)")
ri = re.compile(r"^\s*[0-9]+\s*:.*")
up = False
for ln in txt.split("\n"):
if ri.match(ln):
up = "UP" in re.split("[>,< ]", ln)
try:
ip, dev = r.match(ln.rstrip()).groups()
eps[ip] = dev
eps[ip] = dev + ("" if up else ", \033[31mLINK-DOWN")
except:
pass
@@ -146,6 +188,7 @@ class TcpSrv(object):
def ips_windows_ipconfig(self):
eps = {}
offs = {}
try:
txt, _ = chkcmd(["ipconfig"])
except:
@@ -153,18 +196,29 @@ class TcpSrv(object):
rdev = re.compile(r"(^[^ ].*):$")
rip = re.compile(r"^ +IPv?4? [^:]+: *([0-9\.]{7,15})$")
roff = re.compile(r".*: Media disconnected$")
dev = None
for ln in txt.replace("\r", "").split("\n"):
m = rdev.match(ln)
if m:
if dev and dev not in eps.values():
offs[dev] = 1
dev = m.group(1).split(" adapter ", 1)[-1]
if dev and roff.match(ln):
offs[dev] = 1
dev = None
m = rip.match(ln)
if m and dev:
eps[m.group(1)] = dev
dev = None
return eps
if dev and dev not in eps.values():
offs[dev] = 1
return eps, offs
def ips_windows_netsh(self):
eps = {}
@@ -184,7 +238,6 @@ class TcpSrv(object):
m = rip.match(ln)
if m and dev:
eps[m.group(1)] = dev
dev = None
return eps
@@ -192,8 +245,11 @@ class TcpSrv(object):
if MACOS:
eps = self.ips_macos()
elif ANYWIN:
eps = self.ips_windows_ipconfig() # sees more interfaces
eps, off = self.ips_windows_ipconfig() # sees more interfaces + link state
eps.update(self.ips_windows_netsh()) # has better names
for k, v in eps.items():
if v in off:
eps[k] += ", \033[31mLINK-DOWN"
else:
eps = self.ips_linux()
@@ -232,3 +288,26 @@ class TcpSrv(object):
eps[default_route] = desc
return eps
def _set_wintitle(self, vars):
vars["all"] = vars.get("all", {"Local-Only": 1})
vars["pub"] = vars.get("pub", vars["all"])
vars2 = {}
for k, eps in vars.items():
vars2[k] = {
ep: 1
for ep in eps.keys()
if ":" not in ep or ep.split(":")[0] not in eps
}
title = ""
vars = vars2
for p in self.args.wintitle.split(" "):
if p.startswith("$"):
p = " and ".join(sorted(vars.get(p[1:], {"(None)": 1}).keys()))
title += "{} ".format(p)
print("\033]0;{}\033\\".format(title), file=sys.stderr, end="")
sys.stderr.flush()

View File

@@ -4,7 +4,7 @@ from __future__ import print_function, unicode_literals
import os
from .util import Cooldown
from .th_srv import thumb_path, THUMBABLE, FMT_FFV, FMT_FFA
from .th_srv import thumb_path, HAVE_WEBP
from .bos import bos
@@ -18,30 +18,53 @@ class ThumbCli(object):
# cache on both sides for less broker spam
self.cooldown = Cooldown(self.args.th_poke)
try:
c = hsrv.th_cfg
except:
c = {k: {} for k in ["thumbable", "pil", "vips", "ffi", "ffv", "ffa"]}
self.thumbable = c["thumbable"]
self.fmt_pil = c["pil"]
self.fmt_vips = c["vips"]
self.fmt_ffi = c["ffi"]
self.fmt_ffv = c["ffv"]
self.fmt_ffa = c["ffa"]
# defer args.th_ff_jpg, can change at runtime
d = next((x for x in self.args.th_dec if x in ("vips", "pil")), None)
self.can_webp = HAVE_WEBP or d == "vips"
def log(self, msg, c=0):
self.log_func("thumbcli", msg, c)
def get(self, ptop, rem, mtime, fmt):
def get(self, dbv, rem, mtime, fmt):
ptop = dbv.realpath
ext = rem.rsplit(".")[-1].lower()
if ext not in THUMBABLE:
if ext not in self.thumbable or "dthumb" in dbv.flags:
return None
is_vid = ext in FMT_FFV
if is_vid and self.args.no_vthumb:
is_vid = ext in self.fmt_ffv
if is_vid and "dvthumb" in dbv.flags:
return None
want_opus = fmt == "opus"
is_au = ext in FMT_FFA
want_opus = fmt in ("opus", "caf")
is_au = ext in self.fmt_ffa
if is_au:
if want_opus:
if self.args.no_acode:
return None
else:
if self.args.no_athumb:
if "dathumb" in dbv.flags:
return None
elif want_opus:
return None
is_img = not is_vid and not is_au
if is_img and "dithumb" in dbv.flags:
return None
preferred = self.args.th_dec[0] if self.args.th_dec else ""
if rem.startswith(".hist/th/") and rem.split(".")[-1] in ["webp", "jpg"]:
return os.path.join(ptop, rem)
@@ -49,7 +72,11 @@ class ThumbCli(object):
fmt = "w"
if fmt == "w":
if self.args.th_no_webp or ((is_vid or is_au) and self.args.th_ff_jpg):
if (
self.args.th_no_webp
or (is_img and not self.can_webp)
or (self.args.th_ff_jpg and (not is_img or preferred == "ff"))
):
fmt = "j"
histpath = self.asrv.vfs.histtab.get(ptop)
@@ -58,15 +85,23 @@ class ThumbCli(object):
return None
tpath = thumb_path(histpath, rem, mtime, fmt)
tpaths = [tpath]
if fmt == "w":
# also check for jpg (maybe webp is unavailable)
tpaths.append(tpath.rsplit(".", 1)[0] + ".jpg")
ret = None
try:
st = bos.stat(tpath)
if st.st_size:
ret = tpath
else:
return None
except:
pass
abort = False
for tp in tpaths:
try:
st = bos.stat(tp)
if st.st_size:
ret = tpath = tp
fmt = ret.rsplit(".")[1]
else:
abort = True
except:
pass
if ret:
tdir = os.path.dirname(tpath)
@@ -80,5 +115,8 @@ class ThumbCli(object):
return ret
if abort:
return None
x = self.broker.put(True, "thumbsrv.get", ptop, rem, mtime, fmt)
return x.get()

View File

@@ -47,31 +47,12 @@ try:
except:
pass
# https://pillow.readthedocs.io/en/stable/handbook/image-file-formats.html
# ffmpeg -formats
FMT_PIL = "bmp dib gif icns ico jpg jpeg jp2 jpx pcx png pbm pgm ppm pnm sgi tga tif tiff webp xbm dds xpm"
FMT_FFV = "av1 asf avi flv m4v mkv mjpeg mjpg mpg mpeg mpg2 mpeg2 h264 avc mts h265 hevc mov 3gp mp4 ts mpegts nut ogv ogm rm vob webm wmv"
FMT_FFA = "aac m4a ogg opus flac alac mp3 mp2 ac3 dts wma ra wav aif aiff au alaw ulaw mulaw amr gsm ape tak tta wv"
try:
import pyvips
if HAVE_HEIF:
FMT_PIL += " heif heifs heic heics"
if HAVE_AVIF:
FMT_PIL += " avif avifs"
FMT_PIL, FMT_FFV, FMT_FFA = [
{x: True for x in y.split(" ") if x} for y in [FMT_PIL, FMT_FFV, FMT_FFA]
]
THUMBABLE = {}
if HAVE_PIL:
THUMBABLE.update(FMT_PIL)
if HAVE_FFMPEG and HAVE_FFPROBE:
THUMBABLE.update(FMT_FFV)
THUMBABLE.update(FMT_FFA)
HAVE_VIPS = True
except:
HAVE_VIPS = False
def thumb_path(histpath, rem, mtime, fmt):
@@ -90,7 +71,7 @@ def thumb_path(histpath, rem, mtime, fmt):
h = hashlib.sha512(fsenc(fn)).digest()
fn = base64.urlsafe_b64encode(h).decode("ascii")[:24]
if fmt == "opus":
if fmt in ("opus", "caf"):
cat = "ac"
else:
fmt = "webp" if fmt == "w" else "jpg"
@@ -141,6 +122,37 @@ class ThumbSrv(object):
t.daemon = True
t.start()
self.fmt_pil, self.fmt_vips, self.fmt_ffi, self.fmt_ffv, self.fmt_ffa = [
{x: True for x in y.split(",")}
for y in [
self.args.th_r_pil,
self.args.th_r_vips,
self.args.th_r_ffi,
self.args.th_r_ffv,
self.args.th_r_ffa,
]
]
if not HAVE_HEIF:
for f in "heif heifs heic heics".split(" "):
self.fmt_pil.pop(f, None)
if not HAVE_AVIF:
for f in "avif avifs".split(" "):
self.fmt_pil.pop(f, None)
self.thumbable = {}
if "pil" in self.args.th_dec:
self.thumbable.update(self.fmt_pil)
if "vips" in self.args.th_dec:
self.thumbable.update(self.fmt_vips)
if "ff" in self.args.th_dec:
for t in [self.fmt_ffi, self.fmt_ffv, self.fmt_ffa]:
self.thumbable.update(t)
def log(self, msg, c=0):
self.log_func("thumb", msg, c)
@@ -201,6 +213,16 @@ class ThumbSrv(object):
return None
def getcfg(self):
return {
"thumbable": self.thumbable,
"pil": self.fmt_pil,
"vips": self.fmt_vips,
"ffi": self.fmt_ffi,
"ffv": self.fmt_ffv,
"ffa": self.fmt_ffa,
}
def worker(self):
while not self.stopping:
task = self.q.get()
@@ -211,15 +233,20 @@ class ThumbSrv(object):
ext = abspath.split(".")[-1].lower()
fun = None
if not bos.path.exists(tpath):
if ext in FMT_PIL:
fun = self.conv_pil
elif ext in FMT_FFV:
fun = self.conv_ffmpeg
elif ext in FMT_FFA:
if tpath.endswith(".opus"):
fun = self.conv_opus
else:
fun = self.conv_spec
for lib in self.args.th_dec:
if fun:
break
elif lib == "pil" and ext in self.fmt_pil:
fun = self.conv_pil
elif lib == "vips" and ext in self.fmt_vips:
fun = self.conv_vips
elif lib == "ff" and ext in self.fmt_ffi or ext in self.fmt_ffv:
fun = self.conv_ffmpeg
elif lib == "ff" and ext in self.fmt_ffa:
if tpath.endswith(".opus") or tpath.endswith(".caf"):
fun = self.conv_opus
else:
fun = self.conv_spec
if fun:
try:
@@ -296,11 +323,29 @@ class ThumbSrv(object):
im.save(tpath, **args)
def conv_vips(self, abspath, tpath):
crops = ["centre", "none"]
if self.args.th_no_crop:
crops = ["none"]
w, h = self.res
kw = {"height": h, "size": "down", "intent": "relative"}
for c in crops:
try:
kw["crop"] = c
img = pyvips.Image.thumbnail(abspath, w, **kw)
break
except:
pass
img.write_to_file(tpath, Q=40)
def conv_ffmpeg(self, abspath, tpath):
ret, _ = ffprobe(abspath)
ext = abspath.rsplit(".")[-1]
if ext in ["h264", "h265"]:
ext = abspath.rsplit(".")[-1].lower()
if ext in ["h264", "h265"] or ext in self.fmt_ffi:
seek = []
else:
dur = ret[".dur"][1] if ".dur" in ret else 4
@@ -349,12 +394,32 @@ class ThumbSrv(object):
def _run_ff(self, cmd):
# self.log((b" ".join(cmd)).decode("utf-8"))
ret, sout, serr = runcmd(cmd)
if ret != 0:
m = "FFmpeg failed (probably a corrupt video file):\n"
m += "\n".join(["ff: {}".format(x) for x in serr.split("\n")])
self.log(m, c="1;30")
raise sp.CalledProcessError(ret, (cmd[0], b"...", cmd[-1]))
ret, sout, serr = runcmd(cmd, timeout=self.args.th_convt)
if not ret:
return
c = "1;30"
m = "FFmpeg failed (probably a corrupt video file):\n"
if cmd[-1].lower().endswith(b".webp") and (
"Error selecting an encoder" in serr
or "Automatic encoder selection failed" in serr
or "Default encoder for format webp" in serr
or "Please choose an encoder manually" in serr
):
self.args.th_ff_jpg = True
m = "FFmpeg failed because it was compiled without libwebp; enabling --th-ff-jpg to force jpeg output:\n"
c = 1
if (
"Requested resampling engine is unavailable" in serr
or "output pad on Parsed_aresample_" in serr
):
m = "FFmpeg failed because it was compiled without libsox; you must set --th-ff-swr to force swr resampling:\n"
c = 1
m += "\n".join(["ff: {}".format(x) for x in serr.split("\n")])
self.log(m, c=c)
raise sp.CalledProcessError(ret, (cmd[0], b"...", cmd[-1]))
def conv_spec(self, abspath, tpath):
ret, _ = ffprobe(abspath)
@@ -406,21 +471,45 @@ class ThumbSrv(object):
if "ac" not in ret:
raise Exception("not audio")
# fmt: off
cmd = [
b"ffmpeg",
b"-nostdin",
b"-v", b"error",
b"-hide_banner",
b"-i", fsenc(abspath),
b"-map", b"0:a:0",
b"-c:a", b"libopus",
b"-b:a", b"128k",
fsenc(tpath)
]
# fmt: on
src_opus = abspath.lower().endswith(".opus") or ret["ac"][1] == "opus"
want_caf = tpath.endswith(".caf")
tmp_opus = tpath
if want_caf:
tmp_opus = tpath.rsplit(".", 1)[0] + ".opus"
self._run_ff(cmd)
if not want_caf or (not src_opus and not bos.path.isfile(tmp_opus)):
# fmt: off
cmd = [
b"ffmpeg",
b"-nostdin",
b"-v", b"error",
b"-hide_banner",
b"-i", fsenc(abspath),
b"-map_metadata", b"-1",
b"-map", b"0:a:0",
b"-c:a", b"libopus",
b"-b:a", b"128k",
fsenc(tmp_opus)
]
# fmt: on
self._run_ff(cmd)
if want_caf:
# fmt: off
cmd = [
b"ffmpeg",
b"-nostdin",
b"-v", b"error",
b"-hide_banner",
b"-i", fsenc(abspath if src_opus else tmp_opus),
b"-map_metadata", b"-1",
b"-map", b"0:a:0",
b"-c:a", b"copy",
b"-f", b"caf",
fsenc(tpath)
]
# fmt: on
self._run_ff(cmd)
def poke(self, tdir):
if not self.poke_cd.poke(tdir):
@@ -461,7 +550,7 @@ class ThumbSrv(object):
thumbpath = os.path.join(histpath, cat)
# self.log("cln {}".format(thumbpath))
exts = ["jpg", "webp"] if cat == "th" else ["opus"]
exts = ["jpg", "webp"] if cat == "th" else ["opus", "caf"]
maxage = getattr(self.args, cat + "_maxage")
now = time.time()
prev_b64 = None

View File

@@ -51,11 +51,11 @@ class U2idx(object):
fhash = body["hash"]
wark = up2k_wark_from_hashlist(self.args.salt, fsize, fhash)
uq = "where substr(w,1,16) = ? and w = ?"
uq = "substr(w,1,16) = ? and w = ?"
uv = [wark[:16], wark]
try:
return self.run_query(vols, uq, uv)[0]
return self.run_query(vols, uq, uv, True, False)[0]
except:
raise Pebkac(500, min_ex())
@@ -87,17 +87,16 @@ class U2idx(object):
q = ""
va = []
joins = ""
have_up = False # query has up.* operands
have_mt = False
is_key = True
is_size = False
is_date = False
field_end = "" # closing parenthesis or whatever
kw_key = ["(", ")", "and ", "or ", "not "]
kw_val = ["==", "=", "!=", ">", ">=", "<", "<=", "like "]
ptn_mt = re.compile(r"^\.?[a-z_-]+$")
mt_ctr = 0
mt_keycmp = "substr(up.w,1,16)"
mt_keycmp2 = None
ptn_lc = re.compile(r" (mt[0-9]+\.v) ([=<!>]+) \? $")
ptn_lc = re.compile(r" (mt\.v) ([=<!>]+) \? \) $")
ptn_lcv = re.compile(r"[a-zA-Z]")
while True:
@@ -117,35 +116,47 @@ class U2idx(object):
if ok:
continue
v, uq = (uq + " ").split(" ", 1)
if uq.startswith('"'):
v, uq = uq[1:].split('"', 1)
while v.endswith("\\"):
v2, uq = uq.split('"', 1)
v = v[:-1] + '"' + v2
uq = uq.strip()
else:
v, uq = (uq + " ").split(" ", 1)
v = v.replace('\\"', '"')
if is_key:
is_key = False
if v == "size":
v = "up.sz"
is_size = True
have_up = True
elif v == "date":
v = "up.mt"
is_date = True
have_up = True
elif v == "path":
v = "up.rd"
v = "trim(?||up.rd,'/')"
va.append("\nrd")
have_up = True
elif v == "name":
v = "up.fn"
have_up = True
elif v == "tags" or ptn_mt.match(v):
mt_ctr += 1
mt_keycmp2 = "mt{}.w".format(mt_ctr)
joins += "inner join mt mt{} on {} = {} ".format(
mt_ctr, mt_keycmp, mt_keycmp2
)
mt_keycmp = mt_keycmp2
have_mt = True
field_end = ") "
if v == "tags":
v = "mt{0}.v".format(mt_ctr)
vq = "mt.v"
else:
v = "+mt{0}.k = '{1}' and mt{0}.v".format(mt_ctr, v)
vq = "+mt.k = '{}' and mt.v".format(v)
v = "exists(select 1 from mt where mt.w = mtw and " + vq
else:
raise Pebkac(400, "invalid key [" + v + "]")
@@ -191,6 +202,10 @@ class U2idx(object):
va.append(v)
is_key = True
if field_end:
q += field_end
field_end = ""
# lowercase tag searches
m = ptn_lc.search(q)
if not m or not ptn_lcv.search(unicode(v)):
@@ -202,16 +217,16 @@ class U2idx(object):
field, oper = m.groups()
if oper in ["=", "=="]:
q += " {} like ? ".format(field)
q += " {} like ? ) ".format(field)
else:
q += " lower({}) {} ? ".format(field, oper)
q += " lower({}) {} ? ) ".format(field, oper)
try:
return self.run_query(vols, joins + "where " + q, va)
return self.run_query(vols, q, va, have_up, have_mt)
except Exception as ex:
raise Pebkac(500, repr(ex))
def run_query(self, vols, uq, uv):
def run_query(self, vols, uq, uv, have_up, have_mt):
done_flag = []
self.active_id = "{:.6f}_{}".format(
time.time(), threading.current_thread().ident
@@ -228,16 +243,19 @@ class U2idx(object):
thr.start()
if not uq or not uv:
q = "select * from up"
v = ()
uq = "select * from up"
uv = ()
elif have_mt:
uq = "select up.*, substr(up.w,1,16) mtw from up where " + uq
uv = tuple(uv)
else:
q = "select up.* from up " + uq
v = tuple(uv)
uq = "select up.* from up where " + uq
uv = tuple(uv)
self.log("qs: {!r} {!r}".format(q, v))
self.log("qs: {!r} {!r}".format(uq, uv))
ret = []
lim = 1000
lim = int(self.args.srch_hits)
taglist = {}
for (vtop, ptop, flags) in vols:
cur = self.get_cur(ptop)
@@ -246,11 +264,19 @@ class U2idx(object):
self.active_cur = cur
vuv = []
for v in uv:
if v == "\nrd":
v = vtop + "/"
vuv.append(v)
vuv = tuple(vuv)
sret = []
fk = flags.get("fk")
c = cur.execute(q, v)
c = cur.execute(uq, vuv)
for hit in c:
w, ts, sz, rd, fn, ip, at = hit
w, ts, sz, rd, fn, ip, at = hit[:7]
lim -= 1
if lim <= 0:
break

View File

@@ -21,6 +21,7 @@ from .util import (
Pebkac,
Queue,
ProgressPrinter,
SYMTIME,
fsdec,
fsenc,
absreal,
@@ -73,6 +74,7 @@ class Up2k(object):
self.need_rescan = {}
self.dupesched = {}
self.registry = {}
self.droppable = {}
self.entags = {}
self.flags = {}
self.cur = {}
@@ -125,11 +127,11 @@ class Up2k(object):
all_vols = self.asrv.vfs.all_vols
have_e2d = self.init_indexes(all_vols)
if have_e2d:
thr = threading.Thread(target=self._snapshot, name="up2k-snapshot")
thr.daemon = True
thr.start()
thr = threading.Thread(target=self._snapshot, name="up2k-snapshot")
thr.daemon = True
thr.start()
if have_e2d:
thr = threading.Thread(target=self._hasher, name="up2k-hasher")
thr.daemon = True
thr.start()
@@ -295,7 +297,8 @@ class Up2k(object):
def _vis_reg_progress(self, reg):
ret = []
for _, job in reg.items():
ret.append(self._vis_job_progress(job))
if job["need"]:
ret.append(self._vis_job_progress(job))
return ret
@@ -467,9 +470,11 @@ class Up2k(object):
ft = "\033[0;32m{}{:.0}"
ff = "\033[0;35m{}{:.0}"
fv = "\033[0;36m{}:\033[1;30m{}"
fx = set(("html_head",))
a = [
(ft if v is True else ff if v is False else fv).format(k, str(v))
for k, v in flags.items()
if k not in fx
]
if a:
vpath = "?"
@@ -483,26 +488,41 @@ class Up2k(object):
self.log("/{} {}".format(vpath, " ".join(sorted(a))), "35")
reg = {}
drp = None
path = os.path.join(histpath, "up2k.snap")
if "e2d" in flags and bos.path.exists(path):
if bos.path.exists(path):
with gzip.GzipFile(path, "rb") as f:
j = f.read().decode("utf-8")
reg2 = json.loads(j)
try:
drp = reg2["droppable"]
reg2 = reg2["registry"]
except:
pass
for k, job in reg2.items():
path = os.path.join(job["ptop"], job["prel"], job["name"])
if bos.path.exists(path):
reg[k] = job
job["poke"] = time.time()
job["busy"] = {}
else:
self.log("ign deleted file in snap: [{}]".format(path))
m = "loaded snap {} |{}|".format(path, len(reg.keys()))
if drp is None:
drp = [k for k, v in reg.items() if not v.get("need", [])]
else:
drp = [x for x in drp if x in reg]
m = "loaded snap {} |{}| ({})".format(path, len(reg.keys()), len(drp or []))
m = [m] + self._vis_reg_progress(reg)
self.log("\n".join(m))
self.flags[ptop] = flags
self.registry[ptop] = reg
self.droppable[ptop] = drp or []
self.regdrop(ptop, None)
if not HAVE_SQLITE3 or "e2d" not in flags or "d2d" in flags:
return None
@@ -576,6 +596,9 @@ class Up2k(object):
if stat.S_ISDIR(inf.st_mode):
if abspath in excl or abspath == histpath:
continue
if iname == ".th" and bos.path.isdir(os.path.join(abspath, "top")):
# abandoned or foreign, skip
continue
# self.log(" dir: {}".format(abspath))
try:
ret += self._build_dir(dbw, top, excl, abspath, rei, reh, seen)
@@ -1119,9 +1142,9 @@ class Up2k(object):
m = "database is version {}, this copyparty only supports versions <= {}"
raise Exception(m.format(ver, DB_VER))
msg = "creating new DB (old is bad); backup: {}"
msg = "creating new DB (old is bad); backup: "
if ver:
msg = "creating new DB (too old to upgrade); backup: {}"
msg = "creating new DB (too old to upgrade); backup: "
cur = self._backup_db(db_path, cur, ver, msg)
db = cur.connection
@@ -1217,6 +1240,11 @@ class Up2k(object):
wark = self._get_wark(cj)
now = time.time()
job = None
try:
dev = bos.stat(os.path.join(cj["ptop"], cj["prel"])).st_dev
except:
dev = 0
with self.mutex:
cur = self.cur.get(cj["ptop"])
reg = self.registry[cj["ptop"]]
@@ -1228,36 +1256,42 @@ class Up2k(object):
q = r"select * from up where substr(w,1,16) = ? and w = ?"
argv = (wark[:16], wark)
alts = []
cur = cur.execute(q, argv)
for _, dtime, dsize, dp_dir, dp_fn, ip, at in cur:
if dp_dir.startswith("//") or dp_fn.startswith("//"):
dp_dir, dp_fn = s3dec(dp_dir, dp_fn)
if job and (dp_dir != cj["prel"] or dp_fn != cj["name"]):
dp_abs = "/".join([cj["ptop"], dp_dir, dp_fn])
try:
st = bos.stat(dp_abs)
if stat.S_ISLNK(st.st_mode):
# broken symlink
raise Exception()
except:
continue
dp_abs = "/".join([cj["ptop"], dp_dir, dp_fn])
# relying on this to fail on broken symlinks
try:
sz = bos.path.getsize(dp_abs)
except:
sz = 0
if sz:
# self.log("--- " + wark + " " + dp_abs + " found file", 4)
job = {
"name": dp_fn,
"prel": dp_dir,
"vtop": cj["vtop"],
"ptop": cj["ptop"],
"size": dsize,
"lmod": dtime,
"addr": ip,
"at": at,
"hash": [],
"need": [],
}
j = {
"name": dp_fn,
"prel": dp_dir,
"vtop": cj["vtop"],
"ptop": cj["ptop"],
"size": dsize,
"lmod": dtime,
"addr": ip,
"at": at,
"hash": [],
"need": [],
"busy": {},
}
score = (
(3 if st.st_dev == dev else 0)
+ (2 if dp_dir == cj["prel"] else 0)
+ (1 if dp_fn == cj["name"] else 0)
)
alts.append([score, -len(alts), j])
job = sorted(alts, reverse=True)[0][2] if alts else None
if job and wark in reg:
# self.log("pop " + wark + " " + job["name"] + " handle_json db", 4)
del reg[wark]
@@ -1289,11 +1323,14 @@ class Up2k(object):
err = "partial upload exists at a different location; please resume uploading here instead:\n"
err += "/" + quotep(vsrc) + " "
dupe = [cj["prel"], cj["name"]]
try:
self.dupesched[src].append(dupe)
except:
self.dupesched[src] = [dupe]
# registry is size-constrained + can only contain one unique wark;
# let want_recheck trigger symlink (if still in reg) or reupload
if cur:
dupe = [cj["prel"], cj["name"], cj["lmod"]]
try:
self.dupesched[src].append(dupe)
except:
self.dupesched[src] = [dupe]
raise Pebkac(400, err)
@@ -1314,7 +1351,7 @@ class Up2k(object):
dst = os.path.join(job["ptop"], job["prel"], job["name"])
if not self.args.nw:
bos.unlink(dst) # TODO ed pls
self._symlink(src, dst)
self._symlink(src, dst, lmod=cj["lmod"])
if cur:
a = [cj[x] for x in "prel name lmod size addr".split()]
@@ -1338,6 +1375,7 @@ class Up2k(object):
"t0": now,
"hash": deepcopy(cj["hash"]),
"need": [],
"busy": {},
}
# client-provided, sanitized by _get_wark: name, size, lmod
for k in [
@@ -1385,22 +1423,23 @@ class Up2k(object):
with ren_open(fname, "wb", fdir=fdir, suffix=suffix) as f:
return f["orz"][1]
def _symlink(self, src, dst, verbose=True):
def _symlink(self, src, dst, verbose=True, lmod=None):
if verbose:
self.log("linking dupe:\n {0}\n {1}".format(src, dst))
if self.args.nw:
return
linked = False
try:
if self.args.no_symlink:
if self.args.no_dedup:
raise Exception("disabled in config")
lsrc = src
ldst = dst
fs1 = bos.stat(os.path.dirname(src)).st_dev
fs2 = bos.stat(os.path.dirname(dst)).st_dev
if fs1 == 0:
if fs1 == 0 or fs2 == 0:
# py2 on winxp or other unsupported combination
raise OSError()
elif fs1 == fs2:
@@ -1421,11 +1460,30 @@ class Up2k(object):
lsrc = nsrc[nc:]
hops = len(ndst[nc:]) - 1
lsrc = "../" * hops + "/".join(lsrc)
os.symlink(fsenc(lsrc), fsenc(ldst))
try:
if self.args.hardlink:
os.link(fsenc(src), fsenc(dst))
linked = True
except Exception as ex:
self.log("cannot hardlink: " + repr(ex))
if self.args.never_symlink:
raise Exception("symlink-fallback disabled in cfg")
if not linked:
os.symlink(fsenc(lsrc), fsenc(ldst))
linked = True
except Exception as ex:
self.log("cannot symlink; creating copy: " + repr(ex))
self.log("cannot link; creating copy: " + repr(ex))
shutil.copy2(fsenc(src), fsenc(dst))
if lmod and (not linked or SYMTIME):
times = (int(time.time()), int(lmod))
if ANYWIN:
self.lastmod_q.put([dst, 0, times])
else:
bos.utime(dst, times, False)
def handle_chunk(self, ptop, wark, chash):
with self.mutex:
job = self.registry[ptop].get(wark)
@@ -1444,6 +1502,14 @@ class Up2k(object):
if not nchunk:
raise Pebkac(400, "unknown chunk")
if chash in job["busy"]:
nh = len(job["hash"])
idx = job["hash"].index(chash)
m = "that chunk is already being written to:\n {}\n {} {}/{}\n {}"
raise Pebkac(400, m.format(wark, chash, idx, nh, job["name"]))
job["busy"][chash] = 1
job["poke"] = time.time()
chunksize = up2k_chunksize(job["size"])
@@ -1453,6 +1519,14 @@ class Up2k(object):
return [chunksize, ofs, path, job["lmod"]]
def release_chunk(self, ptop, wark, chash):
with self.mutex:
job = self.registry[ptop].get(wark)
if job:
job["busy"].pop(chash, None)
return [True]
def confirm_chunk(self, ptop, wark, chash):
with self.mutex:
try:
@@ -1463,6 +1537,8 @@ class Up2k(object):
except Exception as ex:
return "confirm_chunk, wark, " + repr(ex)
job["busy"].pop(chash, None)
try:
job["need"].remove(chash)
except Exception as ex:
@@ -1473,7 +1549,7 @@ class Up2k(object):
return ret, src
if self.args.nw:
# del self.registry[ptop][wark]
self.regdrop(ptop, wark)
return ret, dst
# windows cant rename open files
@@ -1505,21 +1581,21 @@ class Up2k(object):
a = [job[x] for x in "ptop wark prel name lmod size addr".split()]
a += [job.get("at") or time.time()]
if self.idx_wark(*a):
# self.log("pop " + wark + " " + dst + " finish_upload idx_wark", 4)
del self.registry[ptop][wark]
# in-memory registry is reserved for unfinished uploads
else:
self.regdrop(ptop, wark)
dupes = self.dupesched.pop(dst, [])
if not dupes:
return
cur = self.cur.get(ptop)
for rd, fn in dupes:
for rd, fn, lmod in dupes:
d2 = os.path.join(ptop, rd, fn)
if os.path.exists(d2):
continue
self._symlink(dst, d2)
self._symlink(dst, d2, lmod=lmod)
if cur:
self.db_rm(cur, rd, fn)
self.db_add(cur, wark, rd, fn, *a[-4:])
@@ -1527,6 +1603,21 @@ class Up2k(object):
if cur:
cur.connection.commit()
def regdrop(self, ptop, wark):
t = self.droppable[ptop]
if wark:
t.append(wark)
if len(t) <= self.args.reg_cap:
return
n = len(t) - int(self.args.reg_cap / 2)
m = "up2k-registry [{}] has {} droppables; discarding {}"
self.log(m.format(ptop, len(t), n))
for k in t[:n]:
self.registry[ptop].pop(k, None)
self.droppable[ptop] = t[n:]
def idx_wark(self, ptop, wark, rd, fn, lmod, sz, ip, at):
cur = self.cur.get(ptop)
if not cur:
@@ -1721,8 +1812,9 @@ class Up2k(object):
dlabs = absreal(sabs)
m = "moving symlink from [{}] to [{}], target [{}]"
self.log(m.format(sabs, dabs, dlabs))
os.unlink(sabs)
self._symlink(dlabs, dabs, False)
mt = bos.path.getmtime(sabs, False)
bos.unlink(sabs)
self._symlink(dlabs, dabs, False, lmod=mt)
# folders are too scary, schedule rescan of both vols
self.need_rescan[svn.vpath] = 1
@@ -1852,25 +1944,30 @@ class Up2k(object):
slabs = list(sorted(links.keys()))[0]
ptop, rem = links.pop(slabs)
self.log("linkswap [{}] and [{}]".format(sabs, slabs))
mt = bos.path.getmtime(slabs, False)
bos.unlink(slabs)
bos.rename(sabs, slabs)
bos.utime(slabs, (int(time.time()), int(mt)), False)
self._symlink(slabs, sabs, False)
full[slabs] = [ptop, rem]
sabs = slabs
if not dabs:
dabs = list(sorted(full.keys()))[0]
for alink in links.keys():
lmod = None
try:
if alink != sabs and absreal(alink) != sabs:
continue
self.log("relinking [{}] to [{}]".format(alink, dabs))
lmod = bos.path.getmtime(alink, False)
bos.unlink(alink)
except:
pass
self._symlink(dabs, alink, False)
self._symlink(dabs, alink, False, lmod=lmod)
return len(full) + len(links)
@@ -1976,9 +2073,10 @@ class Up2k(object):
for path, sz, times in ready:
self.log("lmod: setting times {} on {}".format(times, path))
try:
bos.utime(path, times)
bos.utime(path, times, False)
except:
self.log("lmod: failed to utime ({}, {})".format(path, times))
m = "lmod: failed to utime ({}, {}):\n{}"
self.log(m.format(path, times, min_ex()))
if self.args.sparse and self.args.sparse * 1024 * 1024 <= sz:
try:
@@ -2042,7 +2140,8 @@ class Up2k(object):
bos.makedirs(histpath)
path2 = "{}.{}".format(path, os.getpid())
j = json.dumps(reg, indent=2, sort_keys=True).encode("utf-8")
body = {"droppable": self.droppable[ptop], "registry": reg}
j = json.dumps(body, indent=2, sort_keys=True).encode("utf-8")
with gzip.GzipFile(path2, "wb") as f:
f.write(j)

View File

@@ -67,8 +67,11 @@ if WINDOWS and PY2:
FS_ENCODING = "utf-8"
SYMTIME = sys.version_info >= (3, 6) and os.utime in os.supports_follow_symlinks
HTTP_TS_FMT = "%a, %d %b %Y %H:%M:%S GMT"
META_NOBOTS = '<meta name="robots" content="noindex, nofollow">'
HTTPCODE = {
200: "OK",
@@ -104,6 +107,7 @@ MIMES = {
"txt": "text/plain",
"js": "text/javascript",
"opus": "audio/ogg; codecs=opus",
"caf": "audio/x-caf",
"mp3": "audio/mpeg",
"m4a": "audio/mp4",
"jpg": "image/jpeg",
@@ -481,13 +485,13 @@ def vol_san(vols, txt):
return txt
def min_ex():
def min_ex(max_lines=8, reverse=False):
et, ev, tb = sys.exc_info()
tb = traceback.extract_tb(tb)
fmt = "{} @ {} <{}>: {}"
ex = [fmt.format(fp.split(os.sep)[-1], ln, fun, txt) for fp, ln, fun, txt in tb]
ex.append("[{}] {}".format(et.__name__, ev))
return "\n".join(ex[-8:])
return "\n".join(ex[-max_lines:][:: -1 if reverse else 1])
@contextlib.contextmanager
@@ -821,6 +825,17 @@ def gen_filekey(salt, fspath, fsize, inode):
).decode("ascii")
def gencookie(k, v, dur):
v = v.replace(";", "")
if dur:
dt = datetime.utcfromtimestamp(time.time() + dur)
exp = dt.strftime("%a, %d %b %Y %H:%M:%S GMT")
else:
exp = "Fri, 15 Aug 1997 01:00:00 GMT"
return "{}={}; Path=/; Expires={}; SameSite=Lax".format(k, v, exp)
def humansize(sz, terse=False):
for unit in ["B", "KiB", "MiB", "GiB", "TiB"]:
if sz < 1024:
@@ -1084,7 +1099,8 @@ def read_socket(sr, total_size):
buf = sr.recv(bufsz)
if not buf:
raise Pebkac(400, "client d/c during binary post")
m = "client d/c during binary post after {} bytes, {} bytes remaining"
raise Pebkac(400, m.format(total_size - remains, remains))
remains -= len(buf)
yield buf
@@ -1150,13 +1166,15 @@ def yieldfile(fn):
yield buf
def hashcopy(fin, fout):
def hashcopy(fin, fout, slp=0):
hashobj = hashlib.sha512()
tlen = 0
for buf in fin:
tlen += len(buf)
hashobj.update(buf)
fout.write(buf)
if slp:
time.sleep(slp)
digest = hashobj.digest()[:33]
digest_b64 = base64.urlsafe_b64encode(digest).decode("utf-8")
@@ -1164,12 +1182,14 @@ def hashcopy(fin, fout):
return tlen, hashobj.hexdigest(), digest_b64
def sendfile_py(lower, upper, f, s):
def sendfile_py(log, lower, upper, f, s, bufsz, slp):
remains = upper - lower
f.seek(lower)
while remains > 0:
# time.sleep(0.01)
buf = f.read(min(1024 * 32, remains))
if slp:
time.sleep(slp)
buf = f.read(min(bufsz, remains))
if not buf:
return remains
@@ -1182,17 +1202,24 @@ def sendfile_py(lower, upper, f, s):
return 0
def sendfile_kern(lower, upper, f, s):
def sendfile_kern(log, lower, upper, f, s, bufsz, slp):
out_fd = s.fileno()
in_fd = f.fileno()
ofs = lower
stuck = None
while ofs < upper:
stuck = stuck or time.time()
try:
req = min(2 ** 30, upper - ofs)
select.select([], [out_fd], [], 10)
n = os.sendfile(out_fd, in_fd, ofs, req)
stuck = None
except Exception as ex:
# print("sendfile: " + repr(ex))
d = time.time() - stuck
log("sendfile stuck for {:.3f} sec: {!r}".format(d, ex))
if d < 3600 and ex.errno == 11: # eagain
continue
n = 0
if n <= 0:
@@ -1208,7 +1235,7 @@ def statdir(logger, scandir, lstat, top):
if lstat and ANYWIN:
lstat = False
if lstat and not os.supports_follow_symlinks:
if lstat and (PY2 or os.stat not in os.supports_follow_symlinks):
scandir = False
try:
@@ -1310,9 +1337,17 @@ def guess_mime(url, fallback="application/octet-stream"):
return ret
def runcmd(argv):
def runcmd(argv, timeout=None):
p = sp.Popen(argv, stdout=sp.PIPE, stderr=sp.PIPE)
stdout, stderr = p.communicate()
if not timeout or PY2:
stdout, stderr = p.communicate()
else:
try:
stdout, stderr = p.communicate(timeout=timeout)
except sp.TimeoutExpired:
p.kill()
stdout, stderr = p.communicate()
stdout = stdout.decode("utf-8", "replace")
stderr = stderr.decode("utf-8", "replace")
return [p.returncode, stdout, stderr]

View File

@@ -17,12 +17,11 @@ window.baguetteBox = (function () {
titleTag: false,
async: false,
preload: 2,
animation: 'slideIn',
afterShow: null,
afterHide: null,
onChange: null,
},
overlay, slider, btnPrev, btnNext, btnHelp, btnRotL, btnRotR, btnSel, btnVmode, btnClose,
overlay, slider, btnPrev, btnNext, btnHelp, btnAnim, btnRotL, btnRotR, btnSel, btnVmode, btnClose,
currentGallery = [],
currentIndex = 0,
isOverlayVisible = false,
@@ -30,6 +29,7 @@ window.baguetteBox = (function () {
touchFlag = false, // busy
re_i = /.+\.(gif|jpe?g|png|webp)(\?|$)/i,
re_v = /.+\.(webm|mp4)(\?|$)/i,
anims = ['slideIn', 'fadeIn', 'none'],
data = {}, // all galleries
imagesElements = [],
documentLastFocus = null,
@@ -178,6 +178,7 @@ window.baguetteBox = (function () {
'<button id="bbox-next" class="bbox-btn" type="button" aria-label="Next">&gt;</button>' +
'<div id="bbox-btns">' +
'<button id="bbox-help" type="button">?</button>' +
'<button id="bbox-anim" type="button" tt="a">-</button>' +
'<button id="bbox-rotl" type="button">↶</button>' +
'<button id="bbox-rotr" type="button">↷</button>' +
'<button id="bbox-tsel" type="button">sel</button>' +
@@ -193,6 +194,7 @@ window.baguetteBox = (function () {
btnPrev = ebi('bbox-prev');
btnNext = ebi('bbox-next');
btnHelp = ebi('bbox-help');
btnAnim = ebi('bbox-anim');
btnRotL = ebi('bbox-rotl');
btnRotR = ebi('bbox-rotr');
btnSel = ebi('bbox-tsel');
@@ -237,7 +239,7 @@ window.baguetteBox = (function () {
}
function keyDownHandler(e) {
if (e.ctrlKey || e.altKey || e.metaKey || e.isComposing)
if (e.ctrlKey || e.altKey || e.metaKey || e.isComposing || modal.busy)
return;
var k = e.code + '', v = vid();
@@ -284,6 +286,16 @@ window.baguetteBox = (function () {
rotn(e.shiftKey ? -1 : 1);
}
function anim() {
var i = (anims.indexOf(options.animation) + 1) % anims.length,
o = options;
swrite('ganim', anims[i]);
options = {};
setOptions(o);
if (tt.en)
tt.show.bind(this)();
}
function setVmode() {
var v = vid();
ebi('bbox-vmode').style.display = v ? '' : 'none';
@@ -397,6 +409,7 @@ window.baguetteBox = (function () {
bind(btnClose, 'click', hideOverlay);
bind(btnVmode, 'click', tglVmode);
bind(btnHelp, 'click', halp);
bind(btnAnim, 'click', anim);
bind(btnRotL, 'click', rotl);
bind(btnRotR, 'click', rotr);
bind(btnSel, 'click', tglsel);
@@ -414,6 +427,7 @@ window.baguetteBox = (function () {
unbind(btnClose, 'click', hideOverlay);
unbind(btnVmode, 'click', tglVmode);
unbind(btnHelp, 'click', halp);
unbind(btnAnim, 'click', anim);
unbind(btnRotL, 'click', rotl);
unbind(btnRotR, 'click', rotr);
unbind(btnSel, 'click', tglsel);
@@ -459,7 +473,12 @@ window.baguetteBox = (function () {
if (typeof newOptions[item] !== 'undefined')
options[item] = newOptions[item];
}
slider.style.transition = (options.animation === 'fadeIn' ? 'opacity .4s ease' :
var an = options.animation = sread('ganim') || anims[ANIM ? 0 : 2];
btnAnim.textContent = ['⇄', '⮺', '⚡'][anims.indexOf(an)];
btnAnim.setAttribute('tt', 'animation: ' + an);
slider.style.transition = (options.animation === 'fadeIn' ? 'opacity .3s ease' :
options.animation === 'slideIn' ? '' : 'none');
if (options.buttons === 'auto' && ('ontouchstart' in window || currentGallery.length === 1))
@@ -520,6 +539,7 @@ window.baguetteBox = (function () {
if (overlay.style.display === 'none')
return;
sethash('');
unbind(document, 'keydown', keyDownHandler);
unbind(document, 'keyup', keyUpHandler);
unbind(document, 'fullscreenchange', onFSC);
@@ -806,7 +826,7 @@ window.baguetteBox = (function () {
slider.style.transform = 'translate3d(' + offset + ',0,0)' :
slider.style.left = offset;
slider.style.opacity = 1;
}, 400);
}, 100);
} else {
xform ?
slider.style.transform = 'translate3d(' + offset + ',0,0)' :

File diff suppressed because it is too large Load Diff

View File

@@ -6,6 +6,7 @@
<title>⇆🎉 {{ title }}</title>
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=0.8">
{{ html_head }}
<link rel="stylesheet" media="screen" href="/.cpr/ui.css?_={{ ts }}">
<link rel="stylesheet" media="screen" href="/.cpr/browser.css?_={{ ts }}">
{%- if css %}
@@ -34,6 +35,7 @@
<input type="file" name="f" multiple /><br />
<input type="submit" value="start upload">
</form>
<a id="bbsw" href="?b=u"><br />switch to basic browser</a>
</div>
<div id="op_mkdir" class="opview opbox act">
@@ -133,6 +135,9 @@
<script>
var acct = "{{ acct }}",
perms = {{ perms }},
themes = {{ themes }},
dtheme = "{{ dtheme }}",
srvinf = "{{ srv_info }}",
def_hcols = {{ def_hcols|tojson }},
have_up2k_idx = {{ have_up2k_idx|tojson }},
have_tags_idx = {{ have_tags_idx|tojson }},
@@ -143,11 +148,13 @@
have_zip = {{ have_zip|tojson }},
txt_ext = "{{ txt_ext }}",
{% if no_prism %}no_prism = 1,{% endif %}
readme = {{ readme|tojson }};
readme = {{ readme|tojson }},
ls0 = {{ ls0|tojson }};
document.documentElement.setAttribute("class", localStorage.lightmode == 1 ? "light" : "dark");
document.documentElement.setAttribute("class", localStorage.theme || dtheme);
</script>
<script src="/.cpr/util.js?_={{ ts }}"></script>
<script src="/.cpr/baguettebox.js?_={{ ts }}"></script>
<script src="/.cpr/browser.js?_={{ ts }}"></script>
<script src="/.cpr/up2k.js?_={{ ts }}"></script>
{%- if js %}

File diff suppressed because it is too large Load Diff

View File

@@ -6,6 +6,7 @@
<title>{{ title }}</title>
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=0.8">
{{ html_head }}
<style>
html{font-family:sans-serif}
td{border:1px solid #999;border-width:1px 1px 0 0;padding:0 5px}

View File

@@ -219,48 +219,48 @@ blink {
html.dark,
html.dark body {
html.z,
html.z body {
background: #222;
color: #ccc;
}
html.dark #toc a {
html.z #toc a {
color: #ccc;
border-left: .4em solid #444;
border-bottom: .1em solid #333;
}
html.dark #toc a.act {
html.z #toc a.act {
color: #fff;
border-left: .4em solid #3ad;
}
html.dark #toc li {
html.z #toc li {
border-width: 0;
}
html.dark #mn a:not(:last-child)::after {
html.z #mn a:not(:last-child)::after {
border-color: rgba(255,255,255,0.3);
}
html.dark #mn a {
html.z #mn a {
color: #ccc;
}
html.dark #mn {
html.z #mn {
border-bottom: 1px solid #333;
}
html.dark #mn,
html.dark #mh {
html.z #mn,
html.z #mh {
background: #222;
}
html.dark #mh a {
html.z #mh a {
color: #ccc;
background: none;
}
html.dark #mh a:hover {
html.z #mh a:hover {
background: #333;
color: #fff;
}
html.dark #toolsbox {
html.z #toolsbox {
background: #222;
}
html.dark #toolsbox.open {
html.z #toolsbox.open {
box-shadow: 0 .2em .2em #069;
border-radius: 0 0 .4em .4em;
}
@@ -308,23 +308,23 @@ blink {
html.dark #toc {
html.z #toc {
background: #282828;
border-top: 1px solid #2c2c2c;
box-shadow: 0 0 1em #181818;
}
html.dark #toc,
html.dark #mw {
html.z #toc,
html.z #mw {
scrollbar-color: #b80 #282828;
}
html.dark #toc::-webkit-scrollbar-track {
html.z #toc::-webkit-scrollbar-track {
background: #282828;
}
html.dark #toc::-webkit-scrollbar {
html.z #toc::-webkit-scrollbar {
background: #282828;
width: .8em;
}
html.dark #toc::-webkit-scrollbar-thumb {
html.z #toc::-webkit-scrollbar-thumb {
background: #b80;
}
}
@@ -432,16 +432,16 @@ blink {
html.dark .mdo a {
html.z .mdo a {
color: #000;
}
html.dark .mdo pre,
html.dark .mdo code {
html.z .mdo pre,
html.z .mdo code {
color: #240;
}
html.dark .mdo p>em,
html.dark .mdo li>em,
html.dark .mdo td>em {
html.z .mdo p>em,
html.z .mdo li>em,
html.z .mdo td>em {
color: #940;
}
}

View File

@@ -3,6 +3,7 @@
<title>📝🎉 {{ title }}</title>
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=0.7">
{{ html_head }}
<link rel="stylesheet" href="/.cpr/ui.css?_={{ ts }}">
<link rel="stylesheet" href="/.cpr/md.css?_={{ ts }}">
{%- if edit %}
@@ -10,7 +11,7 @@
{%- endif %}
</head>
<body>
<div id="mn">navbar</div>
<div id="mn"></div>
<div id="mh">
<a id="lightswitch" href="#">go dark</a>
<a id="navtoggle" href="#">hide nav</a>
@@ -135,13 +136,13 @@ var md_opt = {
(function () {
var l = localStorage,
drk = l.lightmode != 1,
drk = l.light != 1,
btn = document.getElementById("lightswitch"),
f = function (e) {
if (e) { e.preventDefault(); drk = !drk; }
document.documentElement.setAttribute("class", drk? "dark":"light");
document.documentElement.setAttribute("class", drk? "z":"y");
btn.innerHTML = "go " + (drk ? "light":"dark");
l.lightmode = drk? 0:1;
l.light = drk? 0:1;
};
btn.onclick = f;

View File

@@ -39,20 +39,14 @@ var md_plug = {};
// add navbar
(function () {
var n = document.location + '';
n = n.substr(n.indexOf('//') + 2).split('?')[0].split('/');
n[0] = 'top';
var loc = [];
var nav = [];
for (var a = 0; a < n.length; a++) {
if (a > 0)
loc.push(n[a]);
var dec = esc(uricom_dec(n[a])[0]);
nav.push('<a href="/' + loc.join('/') + '">' + dec + '</a>');
var parts = get_evpath().split('/'), link = '', o;
for (var a = 0, aa = parts.length - 2; a <= aa; a++) {
link += parts[a] + (a < aa ? '/' : '');
o = mknod('a');
o.setAttribute('href', link);
o.textContent = uricom_dec(parts[a])[0] || 'top';
dom_nav.appendChild(o);
}
dom_nav.innerHTML = nav.join('');
})();
@@ -91,13 +85,13 @@ function copydom(src, dst, lv) {
var rpl = [];
for (var a = sc.length - 1; a >= 0; a--) {
var st = sc[a].tagName,
dt = dc[a].tagName;
var st = sc[a].tagName || sc[a].nodeType,
dt = dc[a].tagName || dc[a].nodeType;
if (st !== dt) {
dbg("replace L%d (%d/%d) type %s/%s", lv, a, sc.length, st, dt);
rpl.push(a);
continue;
dst.innerHTML = src.innerHTML;
return;
}
var sa = sc[a].attributes || [],
@@ -146,8 +140,11 @@ function copydom(src, dst, lv) {
// repl is reversed; build top-down
var nbytes = 0;
for (var a = rpl.length - 1; a >= 0; a--) {
var html = sc[rpl[a]].outerHTML;
dc[rpl[a]].outerHTML = html;
var i = rpl[a],
prop = sc[i].nodeType == 1 ? 'outerHTML' : 'nodeValue';
var html = sc[i][prop];
dc[i][prop] = html;
nbytes += html.length;
}
if (nbytes > 0)
@@ -256,7 +253,7 @@ function convert_markdown(md_text, dest_dom) {
Object.assign(marked_opts, ext[0]);
try {
var md_html = marked(md_text, marked_opts);
var md_html = marked.parse(md_text, marked_opts);
}
catch (ex) {
if (ext)

View File

@@ -61,7 +61,7 @@
position: relative;
scrollbar-color: #eb0 #f7f7f7;
}
html.dark #mt {
html.z #mt {
color: #eee;
background: #222;
border: 1px solid #777;
@@ -77,7 +77,7 @@ html.dark #mt {
background: #f97;
border-radius: .15em;
}
html.dark #save.force-save {
html.z #save.force-save {
color: #fca;
background: #720;
}
@@ -102,7 +102,7 @@ html.dark #save.force-save {
#helpclose {
display: block;
}
html.dark #helpbox {
html.z #helpbox {
box-shadow: 0 .5em 2em #444;
background: #222;
border: 1px solid #079;

View File

@@ -84,24 +84,24 @@ html .editor-toolbar>button.save.force-save {
/* darkmode */
html.dark .mdo,
html.dark .CodeMirror {
html.z .mdo,
html.z .CodeMirror {
border-color: #222;
}
html.dark,
html.dark body,
html.dark .CodeMirror {
html.z,
html.z body,
html.z .CodeMirror {
background: #222;
color: #ccc;
}
html.dark .CodeMirror-cursor {
html.z .CodeMirror-cursor {
border-color: #fff;
}
html.dark .CodeMirror-selected {
html.z .CodeMirror-selected {
box-shadow: 0 0 1px #0cf inset;
}
html.dark .CodeMirror-selected,
html.dark .CodeMirror-selectedtext {
html.z .CodeMirror-selected,
html.z .CodeMirror-selectedtext {
border-radius: .1em;
background: #246;
color: #fff;
@@ -109,37 +109,37 @@ html.dark .CodeMirror-selectedtext {
html.dark #mn a {
html.z #mn a {
color: #ccc;
}
html.dark #mn a:not(:last-child):after {
html.z #mn a:not(:last-child):after {
border-color: rgba(255,255,255,0.3);
}
html.dark .editor-toolbar {
html.z .editor-toolbar {
border-color: #2c2c2c;
background: #1c1c1c;
}
html.dark .editor-toolbar>i.separator {
html.z .editor-toolbar>i.separator {
border-left: 1px solid #444;
border-right: 1px solid #111;
}
html.dark .editor-toolbar>button {
html.z .editor-toolbar>button {
margin-left: -1px; border: 1px solid rgba(255,255,255,0.1);
color: #aaa;
}
html.dark .editor-toolbar>button:hover {
html.z .editor-toolbar>button:hover {
color: #333;
}
html.dark .editor-toolbar>button.active {
html.z .editor-toolbar>button.active {
color: #333;
border-color: #ec1;
background: #c90;
}
html.dark .editor-toolbar::after,
html.dark .editor-toolbar::before {
html.z .editor-toolbar::after,
html.z .editor-toolbar::before {
background: none;
}
@@ -150,6 +150,6 @@ html.dark .editor-toolbar::before {
padding: 1em;
background: #f7f7f7;
}
html.dark .mdo {
html.z .mdo {
background: #1c1c1c;
}

View File

@@ -3,6 +3,7 @@
<title>📝🎉 {{ title }}</title>
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=0.7">
{{ html_head }}
<link rel="stylesheet" href="/.cpr/ui.css?_={{ ts }}">
<link rel="stylesheet" href="/.cpr/mde.css?_={{ ts }}">
<link rel="stylesheet" href="/.cpr/deps/mini-fa.css?_={{ ts }}">
@@ -33,11 +34,11 @@ var md_opt = {
var lightswitch = (function () {
var l = localStorage,
drk = l.lightmode != 1,
drk = l.light != 1,
f = function (e) {
if (e) drk = !drk;
document.documentElement.setAttribute("class", drk? "dark":"light");
l.lightmode = drk? 0:1;
document.documentElement.setAttribute("class", drk? "z":"y");
l.light = drk? 0:1;
};
f();
return f;

View File

@@ -3,9 +3,10 @@
<head>
<meta charset="utf-8">
<title>copyparty</title>
<title>{{ svcname }}</title>
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=0.8">
{{ html_head }}
<link rel="stylesheet" media="screen" href="/.cpr/msg.css?_={{ ts }}">
</head>

View File

@@ -37,7 +37,9 @@ a+a {
float: right;
margin: -.2em 0 0 .5em;
}
.logout {
.logout,
.btns a,
a.r {
color: #c04;
border-color: #c7a;
}
@@ -78,27 +80,35 @@ table {
margin-top: .3em;
text-align: right;
}
blockquote {
margin: 0 0 1.6em .6em;
padding: .7em 1em 0 1em;
border-left: .3em solid rgba(128,128,128,0.5);
border-radius: 0 0 0 .25em;
}
html.dark,
html.dark body,
html.dark #wrap {
html.z,
html.z body,
html.z #wrap {
background: #222;
color: #ccc;
}
html.dark h1 {
html.z h1 {
border-color: #777;
}
html.dark a {
html.z a {
color: #fff;
background: #057;
border-color: #37a;
}
html.dark .logout {
html.z .logout,
html.z .btns a,
html.z a.r {
background: #804;
border-color: #c28;
}
html.dark input {
html.z input {
color: #fff;
background: #626;
border: 1px solid #c2c;
@@ -107,6 +117,6 @@ html.dark input {
padding: .5em .7em;
margin: 0 .5em 0 0;
}
html.dark .num {
html.z .num {
border-color: #777;
}

View File

@@ -3,9 +3,10 @@
<head>
<meta charset="utf-8">
<title>copyparty</title>
<title>{{ svcname }}</title>
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=0.8">
{{ html_head }}
<link rel="stylesheet" media="screen" href="/.cpr/splash.css?_={{ ts }}">
<link rel="stylesheet" media="screen" href="/.cpr/ui.css?_={{ ts }}">
</head>
@@ -72,6 +73,18 @@
</ul>
{%- endif %}
<h1 id="cc">client config:</h1>
<ul>
{% if k304 %}
<li><a href="/?k304=n">disable k304</a> (currently enabled)
{%- else %}
<li><a href="/?k304=y" class="r">enable k304</a> (currently disabled)
{% endif %}
<blockquote>enabling this will disconnect your client on every HTTP 304, which can prevent some buggy browsers/proxies from getting stuck (suddenly not being able to load pages), <em>but</em> it will also make things slower in general</blockquote></li>
<li><a href="/?reset" class="r" onclick="localStorage.clear();return true">reset client settings</a></li>
</ul>
<h1>login for more:</h1>
<ul>
<form method="post" enctype="multipart/form-data" action="/{{ qvpath }}">
@@ -84,8 +97,7 @@
<a href="#" id="repl">π</a>
<script>
if (localStorage.lightmode != 1)
document.documentElement.setAttribute("class", "dark");
document.documentElement.setAttribute("class", localStorage.light == 1 ? "y" : "z");
</script>
<script src="/.cpr/util.js?_={{ ts }}"></script>

View File

@@ -116,6 +116,20 @@ html {
#toast.err #toastc {
background: #d06;
}
#tth {
color: #fff;
background: #111;
font-size: .9em;
padding: 0 .26em;
line-height: .97em;
border-radius: 1em;
position: absolute;
display: none;
}
#tth.act {
display: block;
z-index: 9001;
}
#tt.b {
padding: 0 2em;
border-radius: .5em;
@@ -133,7 +147,8 @@ html {
}
#modalc code,
#tt code {
background: #3c3c3c;
color: #eee;
background: #444;
padding: .1em .3em;
border-top: 1px solid #777;
border-radius: .3em;
@@ -142,22 +157,26 @@ html {
#tt em {
color: #f6a;
}
html.light #tt {
html.y #tt {
background: #fff;
border-color: #888 #000 #777 #000;
}
html.light #tt,
html.light #toast {
html.y #tt,
html.y #toast {
box-shadow: 0 .3em 1em rgba(0,0,0,0.4);
}
#modalc code,
html.light #tt code {
html.y #tt code {
background: #060;
color: #fff;
}
html.light #tt em {
html.y #tt em {
color: #d38;
}
html.y #tth {
color: #000;
background: #fff;
}
#modal {
position: fixed;
overflow: auto;
@@ -254,9 +273,9 @@ html.light #tt em {
box-shadow: 0 .1em .2em #fc0 inset;
border-radius: .2em;
}
html.light *:focus,
html.light #pctl *:focus,
html.light .btn:focus {
html.y *:focus,
html.y #pctl *:focus,
html.y .btn:focus {
box-shadow: 0 .1em .2em #037 inset;
}
input[type="text"]:focus,
@@ -264,9 +283,9 @@ input:not([type]):focus,
textarea:focus {
box-shadow: 0 .1em .3em #fc0, 0 -.1em .3em #fc0;
}
html.light input[type="text"]:focus,
html.light input:not([type]):focus,
html.light textarea:focus {
html.y input[type="text"]:focus,
html.y input:not([type]):focus,
html.y textarea:focus {
box-shadow: 0 .1em .3em #037, 0 -.1em .3em #037;
}
@@ -335,6 +354,13 @@ html.light textarea:focus {
}
.mdo ul,
.mdo ol {
padding-left: 1em;
}
.mdo ul ul,
.mdo ul ol,
.mdo ol ul,
.mdo ol ol {
padding-left: 2em;
border-left: .3em solid #ddd;
}
.mdo ul>li,
@@ -388,7 +414,7 @@ html.light textarea:focus {
overflow-wrap: break-word;
word-wrap: break-word; /*ie*/
}
html.light .mdo a,
html.y .mdo a,
.mdo a {
color: #fff;
background: #39b;
@@ -417,48 +443,48 @@ html.light textarea:focus {
html.dark .mdo a {
html.z .mdo a {
background: #057;
}
html.dark .mdo h1 a, html.dark .mdo h4 a,
html.dark .mdo h2 a, html.dark .mdo h5 a,
html.dark .mdo h3 a, html.dark .mdo h6 a {
html.z .mdo h1 a, html.z .mdo h4 a,
html.z .mdo h2 a, html.z .mdo h5 a,
html.z .mdo h3 a, html.z .mdo h6 a {
color: inherit;
background: none;
}
html.dark .mdo pre,
html.dark .mdo code {
html.z .mdo pre,
html.z .mdo code {
color: #8c0;
background: #1a1a1a;
border: .07em solid #333;
}
html.dark .mdo ul,
html.dark .mdo ol {
html.z .mdo ul,
html.z .mdo ol {
border-color: #444;
}
html.dark .mdo strong {
html.z .mdo strong {
color: #fff;
}
html.dark .mdo p>em,
html.dark .mdo li>em,
html.dark .mdo td>em {
html.z .mdo p>em,
html.z .mdo li>em,
html.z .mdo td>em {
color: #f94;
border-color: #666;
}
html.dark .mdo h1 {
html.z .mdo h1 {
background: #383838;
border-top: .4em solid #b80;
border-bottom: .4em solid #4c4c4c;
}
html.dark .mdo h2 {
html.z .mdo h2 {
background: #444;
border-bottom: .22em solid #555;
}
html.dark .mdo td,
html.dark .mdo th {
html.z .mdo td,
html.z .mdo th {
border-color: #444;
}
html.dark .mdo blockquote {
html.z .mdo blockquote {
background: #282828;
border: .07em dashed #444;
}

View File

@@ -525,13 +525,15 @@ function Donut(uc, st) {
}
r.on = function (ya) {
r.fc = 99;
r.fc = r.tc = 99;
r.eta = null;
r.base = pos();
optab.innerHTML = ya ? svg() : optab.getAttribute('ico');
el = QS('#ops a .donut');
if (!ya)
if (!ya) {
favico.upd();
wintitle();
}
};
r.do = function () {
if (!el)
@@ -541,6 +543,11 @@ function Donut(uc, st) {
v = pos() - r.base,
ofs = el.style.strokeDashoffset = o - o * v / t;
if (++r.tc >= 10) {
wintitle(f2f(v * 100 / t, 1) + '%, ' + r.eta + 's, ', true);
r.tc = 0;
}
if (favico.txt) {
if (++r.fc < 10 && r.eta && r.eta > 99)
return;
@@ -562,9 +569,9 @@ function fsearch_explain(n) {
return toast.inf(60, 'your access to this folder is Read-Only\n\n' + (acct == '*' ? 'you are currently not logged in' : 'you are currently logged in as "' + acct + '"'));
if (bcfg_get('fsearch', false))
return toast.inf(60, 'you are currently in file-search mode\n\nswitch to upload-mode by clicking the green magnifying glass (next to the big yellow search button), and then refresh\n\nsorry');
return toast.inf(60, 'you are currently in file-search mode\n\nswitch to upload-mode by clicking the green magnifying glass (next to the big yellow search button), and try uploading again\n\nsorry');
return toast.inf(60, 'refresh the page and try again, it should work now');
return toast.inf(60, 'try again, it should work now');
}
@@ -637,12 +644,6 @@ function up2k_init(subtle) {
return false;
}
ebi('u2nope').onclick = function (e) {
ev(e);
setmsg(suggest_up2k, 'msg');
goto('bup');
};
setmsg(suggest_up2k, 'msg');
if (!String.prototype.format) {
@@ -669,6 +670,7 @@ function up2k_init(subtle) {
var st = {
"files": [],
"seen": {},
"todo": {
"head": [],
"hash": [],
@@ -727,7 +729,6 @@ function up2k_init(subtle) {
if (++nenters <= 0)
nenters = 1;
//console.log(nenters, Date.now(), 'enter', this, e.target);
if (onover.bind(this)(e))
return true;
@@ -749,12 +750,19 @@ function up2k_init(subtle) {
ebi('up_dz').setAttribute('err', mup || '');
ebi('srch_dz').setAttribute('err', msr || '');
}
function onoverb(e) {
// zones are alive; disable cuo2duo branch
document.body.ondragover = document.body.ondrop = null;
return onover.bind(this)(e);
}
function onover(e) {
try {
var ok = false, dt = e.dataTransfer.types;
for (var a = 0; a < dt.length; a++)
if (dt[a] == 'Files')
ok = true;
else if (dt[a] == 'text/uri-list')
return true;
if (!ok)
return true;
@@ -780,17 +788,20 @@ function up2k_init(subtle) {
clmod(ebi('drops'), 'vis');
clmod(ebi('up_dz'), 'hl');
clmod(ebi('srch_dz'), 'hl');
// cuo2duo:
document.body.ondragover = onover;
document.body.ondrop = gotfile;
}
//console.log(nenters, Date.now(), 'leave', this, e && e.target);
}
document.body.ondragenter = ondrag;
document.body.ondragleave = offdrag;
document.body.ondragover = onover;
document.body.ondrop = gotfile;
var drops = [ebi('up_dz'), ebi('srch_dz')];
for (var a = 0; a < 2; a++) {
drops[a].ondragenter = ondrag;
drops[a].ondragover = onover;
drops[a].ondragover = onoverb;
drops[a].ondragleave = offdrag;
drops[a].ondrop = gotfile;
}
@@ -800,7 +811,10 @@ function up2k_init(subtle) {
ev(e);
nenters = 0;
offdrag.bind(this)();
var dz = (this && this.getAttribute('id'));
var dz = this && this.getAttribute('id');
if (!dz && e && e.clientY)
// cuo2duo fallback
dz = e.clientY < window.innerHeight / 2 ? 'up_dz' : 'srch_dz';
var err = this.getAttribute('err');
if (err)
@@ -994,13 +1008,9 @@ function up2k_init(subtle) {
}
function up_them(good_files) {
var seen = {},
evpath = get_evpath(),
var evpath = get_evpath(),
draw_each = good_files.length < 50;
for (var a = 0; a < st.files.length; a++)
seen[st.files[a].name + '\n' + st.files[a].size] = 1;
for (var a = 0; a < good_files.length; a++) {
var fobj = good_files[a][0],
name = good_files[a][1],
@@ -1026,15 +1036,20 @@ function up2k_init(subtle) {
"bytes_uploaded": 0,
"hash": []
},
key = entry.name + '\n' + entry.size;
key = name + '\n' + entry.size + '\n' + lmod + '\n' + uc.fsearch;
if (uc.fsearch)
entry.srch = 1;
if (seen[key])
continue;
try {
if (st.seen[fdir][key])
continue;
}
catch (ex) {
st.seen[fdir] = {};
}
seen[key] = 1;
st.seen[fdir][key] = 1;
pvis.addfile([
uc.fsearch ? esc(entry.name) : linksplit(
@@ -1067,21 +1082,7 @@ function up2k_init(subtle) {
}
more_one_file();
function u2cleanup(e) {
ev(e);
for (var a = 0; a < st.files.length; a++) {
var t = st.files[a];
if (t.done && t.name) {
if (!qsr('#f' + t.n))
continue;
t.name = undefined;
}
}
}
ebi('u2cleanup').onclick = u2cleanup;
var etaref = 0, etaskip = 0, op_minh = 0;
var etaref = 0, etaskip = 0, utw_minh = 0;
function etafun() {
var nhash = st.busy.head.length + st.busy.hash.length + st.todo.head.length + st.todo.hash.length,
nsend = st.busy.upload.length + st.todo.upload.length,
@@ -1094,13 +1095,10 @@ function up2k_init(subtle) {
//ebi('acc_info').innerHTML = humantime(st.time.busy) + ' ' + f2f(now / 1000, 1);
var op = ebi('op_up2k'),
uff = ebi('u2footfoot'),
minh = QS('#op_up2k.act') ? Math.max(op_minh, uff.offsetTop + uff.offsetHeight - op.offsetTop + 32) : 0;
if (minh > op_minh || !op_minh) {
op_minh = minh;
op.style.minHeight = op_minh + 'px';
var minh = QS('#op_up2k.act') && st.is_busy ? Math.max(utw_minh, ebi('u2tab').offsetHeight + 32) : 0;
if (utw_minh < minh || !utw_minh) {
utw_minh = minh;
ebi('u2tabw').style.minHeight = utw_minh + 'px';
}
if (!nhash)
@@ -1169,7 +1167,7 @@ function up2k_init(subtle) {
var t = st.todo.handshake[0],
cd = t.cooldown;
if (cd && cd - Date.now() > 0)
if (cd && cd > Date.now())
return false;
// keepalive or verify
@@ -1223,15 +1221,16 @@ function up2k_init(subtle) {
running = true;
while (true) {
var now = Date.now(),
is_busy = 0 !=
st.todo.head.length +
st.todo.hash.length +
st.todo.handshake.length +
st.todo.upload.length +
st.busy.head.length +
st.busy.hash.length +
st.busy.handshake.length +
st.busy.upload.length;
oldest_active = Math.min( // gzip take the wheel
st.todo.head.length ? st.todo.head[0].n : st.files.length,
st.todo.hash.length ? st.todo.hash[0].n : st.files.length,
st.todo.upload.length ? st.todo.upload[0].nfile : st.files.length,
st.todo.handshake.length ? st.todo.handshake[0].n : st.files.length,
st.busy.head.length ? st.busy.head[0].n : st.files.length,
st.busy.hash.length ? st.busy.hash[0].n : st.files.length,
st.busy.upload.length ? st.busy.upload[0].nfile : st.files.length,
st.busy.handshake.length ? st.busy.handshake[0].n : st.files.length),
is_busy = oldest_active < st.files.length;
if (was_busy && !is_busy) {
for (var a = 0; a < st.files.length; a++) {
@@ -1251,7 +1250,7 @@ function up2k_init(subtle) {
}
if (was_busy != is_busy) {
was_busy = is_busy;
st.is_busy = was_busy = is_busy;
window[(is_busy ? "add" : "remove") +
"EventListener"]("beforeunload", warn_uploader_busy);
@@ -1280,7 +1279,7 @@ function up2k_init(subtle) {
timer.rm(etafun);
timer.rm(donut.do);
op_minh = 0;
utw_minh = 0;
}
else {
timer.add(donut.do);
@@ -1332,7 +1331,8 @@ function up2k_init(subtle) {
}
if (st.todo.head.length &&
st.busy.head.length < parallel_uploads) {
st.busy.head.length < parallel_uploads &&
(!is_busy || st.todo.head[0].n - oldest_active < parallel_uploads * 2)) {
exec_head();
mou_ikkai = true;
}
@@ -1364,6 +1364,14 @@ function up2k_init(subtle) {
return taskerd;
})();
function chill(t) {
var now = Date.now();
if ((t.coolmul || 0) < 2 || now - t.cooldown < t.coolmul * 700)
t.coolmul = Math.min((t.coolmul || 0.5) * 2, 32);
t.cooldown = Math.max(t.cooldown || 1, Date.now() + t.coolmul * 1000);
}
/////
////
/// hashing
@@ -1462,7 +1470,6 @@ function up2k_init(subtle) {
min_filebuf = 1;
var td = Date.now() - t0;
if (td > 50) {
ebi('u2foot').innerHTML += "<p>excessive filereader latency (" + td + " ms), increasing readahead</p>";
min_filebuf = 32 * 1024 * 1024;
}
}
@@ -1479,7 +1486,8 @@ function up2k_init(subtle) {
err.indexOf('NotFoundError') !== -1 // macos-firefox permissions
) {
pvis.seth(t.n, 1, 'OS-error');
pvis.seth(t.n, 2, err);
pvis.seth(t.n, 2, err + ' @ ' + car);
console.log('OS-error', reader.error, '@', car);
handled = true;
}
@@ -1749,8 +1757,12 @@ function up2k_init(subtle) {
pvis.move(t.n, 'ok');
}
else t.t_uploaded = undefined;
else {
if (t.t_uploaded)
chill(t);
t.t_uploaded = undefined;
}
tasker();
}
else {
@@ -1855,14 +1867,19 @@ function up2k_init(subtle) {
st.bytes.uploaded += cdr - car;
t.bytes_uploaded += cdr - car;
}
else if (txt.indexOf('already got that') !== -1) {
else if (txt.indexOf('already got that') + 1 ||
txt.indexOf('already being written') + 1) {
console.log("ignoring dupe-segment error", t);
}
else {
toast.err(0, "server broke; cu-err {0} on file [{1}]:\n".format(
xhr.status, t.name) + (txt || "no further information"));
return;
chill(t);
}
orz2(xhr);
}
function orz2(xhr) {
apop(st.busy.upload, upt);
apop(t.postlist, npart);
if (!t.postlist.length) {
@@ -1884,9 +1901,11 @@ function up2k_init(subtle) {
if (crashed)
return;
toast.err(9.98, "failed to upload a chunk,\n" + tries + " retries so far -- retrying in 10sec\n\n" + t.name);
if (!toast.visible)
toast.warn(9.98, "failed to upload a chunk;\nprobably harmless, continuing\n\n" + t.name);
console.log('chunkpit onerror,', ++tries, t);
setTimeout(do_send, 10 * 1000);
orz2(xhr);
};
xhr.open('POST', t.purl, true);
xhr.setRequestHeader("X-Up2k-Hash", t.hash[npart]);
@@ -1907,16 +1926,16 @@ function up2k_init(subtle) {
//
function onresize(e) {
// 10x faster than matchMedia('(min-width
var bar = ebi('ops'),
wpx = window.innerWidth,
fpx = parseInt(getComputedStyle(bar)['font-size']),
wem = wpx * 1.0 / fpx,
wide = wem > 54 ? 'w' : '',
write = has(perms, 'write'),
wide = write && wem > 54 ? 'w' : '',
parent = ebi(wide && write ? 'u2btn_cw' : 'u2btn_ct'),
btn = ebi('u2btn');
//console.log([wpx, fpx, wem]);
if (btn.parentNode !== parent) {
parent.appendChild(btn);
ebi('u2conf').setAttribute('class', wide);
@@ -1924,7 +1943,7 @@ function up2k_init(subtle) {
ebi('u2etaw').setAttribute('class', wide);
}
wide = wem > 78 ? 'ww' : wide;
wide = write && wem > 78 ? 'ww' : wide;
parent = ebi(wide == 'ww' && write ? 'u2c3w' : 'u2c3t');
var its = [ebi('u2etaw'), ebi('u2cards')];
if (its[0].parentNode !== parent) {
@@ -2026,7 +2045,7 @@ function up2k_init(subtle) {
new_state = true;
fixed = true;
}
if (!has(perms, 'read')) {
if (!has(perms, 'read') || !have_up2k_idx) {
new_state = false;
fixed = true;
}
@@ -2044,10 +2063,10 @@ function up2k_init(subtle) {
try {
var ico = uc.fsearch ? '🔎' : '🚀',
desc = uc.fsearch ? 'Search' : 'Upload';
desc = uc.fsearch ? 'S E A R C H' : 'U P L O A D';
clmod(ebi('op_up2k'), 'srch', uc.fsearch);
ebi('u2bm').innerHTML = ico + ' <sup>' + desc + '</sup>';
ebi('u2bm').innerHTML = ico + '&nbsp; <sup>' + desc + '</sup>';
}
catch (ex) { }
@@ -2101,7 +2120,7 @@ function up2k_init(subtle) {
if (parallel_uploads < 1)
bumpthread(1);
return { "init_deps": init_deps, "set_fsearch": set_fsearch, "ui": pvis }
return { "init_deps": init_deps, "set_fsearch": set_fsearch, "ui": pvis, "st": st, "uc": uc }
}

View File

@@ -7,8 +7,7 @@ if (!window['console'])
var is_touch = 'ontouchstart' in window,
IPHONE = /iPhone|iPad|iPod/i.test(navigator.userAgent),
ANDROID = /android/i.test(navigator.userAgent),
IPHONE = is_touch && /iPhone|iPad|iPod/i.test(navigator.userAgent),
WINDOWS = navigator.platform ? navigator.platform == 'Win32' : /Windows/.test(navigator.userAgent);
@@ -87,6 +86,9 @@ function vis_exh(msg, url, lineNo, columnNo, error) {
if ((msg + '').indexOf('ResizeObserver') !== -1)
return; // chrome issue 809574 (benign, from <video>)
if ((msg + '').indexOf('l2d.js') !== -1)
return; // `t` undefined in tapEvent -> hitTestSimpleCustom
var ekey = url + '\n' + lineNo + '\n' + msg;
if (ignexd[ekey] || crashed)
return;
@@ -181,6 +183,7 @@ function ignex(all) {
if (!all)
window.onerror = vis_exh;
}
window.onerror = vis_exh;
function noop() { }
@@ -219,15 +222,15 @@ if (!String.prototype.endsWith)
return this.substring(this_len - search.length, this_len) === search;
};
if (!String.startsWith)
if (!String.prototype.startsWith)
String.prototype.startsWith = function (s, i) {
i = i > 0 ? i | 0 : 0;
return this.substring(i, i + s.length) === s;
};
if (!String.trimEnd)
if (!String.prototype.trimEnd)
String.prototype.trimEnd = String.prototype.trimRight = function () {
return this.replace(/[ \t\r\n]+$/m, '');
return this.replace(/[ \t\r\n]+$/, '');
};
if (!Element.prototype.matches)
@@ -286,15 +289,19 @@ function crc32(str) {
function clmod(el, cls, add) {
if (!el)
return false;
if (el.classList) {
var have = el.classList.contains(cls);
if (add == 't')
add = !have;
if (add != have)
el.classList[add ? 'add' : 'remove'](cls);
if (!add == !have)
return false;
return;
el.classList[add ? 'add' : 'remove'](cls);
return true;
}
var re = new RegExp('\\s*\\b' + cls + '\\s*\\b', 'g'),
@@ -305,12 +312,18 @@ function clmod(el, cls, add) {
var n2 = n1.replace(re, ' ') + (add ? ' ' + cls : '');
if (n1 != n2)
el.className = n2;
if (!n1 == !n2)
return false;
el.className = n2;
return true;
}
function clgot(el, cls) {
if (!el)
return;
if (el.classList)
return el.classList.contains(cls);
@@ -319,14 +332,55 @@ function clgot(el, cls) {
}
var ANIM = true;
if (window.matchMedia) {
var mq = window.matchMedia('(prefers-reduced-motion: reduce)');
mq.onchange = function () {
ANIM = !mq.matches;
};
ANIM = !mq.matches;
}
function showsort(tab) {
var v, vn, v1, v2, th = tab.tHead,
sopts = jread('fsort', [["href", 1, ""]]);
th && (th = th.rows[0]) && (th = th.cells);
for (var a = sopts.length - 1; a >= 0; a--) {
if (!sopts[a][0])
continue;
v2 = v1;
v1 = sopts[a];
}
v = [v1, v2];
vn = [v1 ? v1[0] : '', v2 ? v2[0] : ''];
var ga = QSA('#ghead a[s]');
for (var a = 0; a < ga.length; a++)
ga[a].className = '';
for (var a = 0; a < th.length; a++) {
var n = vn.indexOf(th[a].getAttribute('name')),
cl = n < 0 ? ' ' : ' s' + n + (v[n][1] > 0 ? ' ' : 'r ');
th[a].className = th[a].className.replace(/ *s[01]r? */, ' ') + cl;
if (n + 1) {
ga = QS('#ghead a[s="' + vn[n] + '"]');
if (ga)
ga.className = cl;
}
}
}
function sortTable(table, col, cb) {
var tb = table.tBodies[0],
th = table.tHead.rows[0].cells,
tr = Array.prototype.slice.call(tb.rows, 0),
i, reverse = th[col].className.indexOf('sort1') !== -1 ? -1 : 1;
for (var a = 0, thl = th.length; a < thl; a++)
th[a].className = th[a].className.replace(/ *sort-?1 */, " ");
th[col].className += ' sort' + reverse;
i, reverse = /s0[^r]/.exec(th[col].className + ' ') ? -1 : 1;
var stype = th[col].getAttribute('sort');
try {
var nrules = [], rules = jread("fsort", []);
@@ -344,6 +398,7 @@ function sortTable(table, col, cb) {
break;
}
jwrite("fsort", nrules);
try { showsort(table); } catch (ex) { }
}
catch (ex) {
console.log("failed to persist sort rules, resetting: " + ex);
@@ -392,7 +447,7 @@ function makeSortable(table, cb) {
}
function linksplit(rp) {
function linksplit(rp, id) {
var ret = [],
apath = '/',
q = null;
@@ -422,8 +477,13 @@ function linksplit(rp) {
vlink = vlink.slice(0, -1) + '<span>/</span>';
}
if (!rp && q)
link += q;
if (!rp) {
if (q)
link += q;
if (id)
link += '" id="' + id;
}
ret.push('<a href="' + apath + link + '">' + vlink + '</a>');
apath += link;
@@ -782,13 +842,18 @@ var timer = (function () {
var tt = (function () {
var r = {
"tt": mknod("div"),
"th": mknod("div"),
"en": true,
"el": null,
"skip": false
"skip": false,
"lvis": 0
};
r.th.innerHTML = '?';
r.tt.setAttribute('id', 'tt');
r.th.setAttribute('id', 'tth');
document.body.appendChild(r.tt);
document.body.appendChild(r.th);
var prev = null;
r.cshow = function () {
@@ -798,19 +863,42 @@ var tt = (function () {
prev = this;
};
r.show = function () {
if (r.skip) {
r.skip = false;
var tev;
r.dshow = function (e) {
clearTimeout(tev);
if (!r.getmsg(this))
return;
}
if (QS('body.bbox-open'))
if (Date.now() - r.lvis < 400)
return r.show.bind(this)();
tev = setTimeout(r.show.bind(this), 800);
if (is_touch)
return;
this.addEventListener('mousemove', r.move);
clmod(r.th, 'act', 1);
r.move(e);
};
r.getmsg = function (el) {
if (IPHONE && QS('body.bbox-open'))
return;
var cfg = sread('tooltips');
if (cfg !== null && cfg != '1')
return;
var msg = this.getAttribute('tt');
return el.getAttribute('tt');
};
r.show = function () {
clearTimeout(tev);
if (r.skip) {
r.skip = false;
return;
}
var msg = r.getmsg(this);
if (!msg)
return;
@@ -824,6 +912,7 @@ var tt = (function () {
if (dir.indexOf('u') + 1) top = false;
if (dir.indexOf('d') + 1) top = true;
clmod(r.th, 'act');
clmod(r.tt, 'b', big);
r.tt.style.left = '0';
r.tt.style.top = '0';
@@ -849,14 +938,27 @@ var tt = (function () {
r.hide = function (e) {
ev(e);
clearTimeout(tev);
window.removeEventListener('scroll', r.hide);
clmod(r.tt, 'show');
clmod(r.tt, 'b');
clmod(r.th, 'act');
if (clmod(r.tt, 'show'))
r.lvis = Date.now();
if (r.el)
r.el.removeEventListener('mouseleave', r.hide);
if (e && e.target)
e.target.removeEventListener('mousemove', r.move);
};
if (is_touch && IPHONE) {
r.move = function (e) {
r.th.style.left = (e.pageX + 12) + 'px';
r.th.style.top = (e.pageY + 12) + 'px';
};
if (IPHONE) {
var f1 = r.show,
f2 = r.hide,
q = [];
@@ -882,14 +984,14 @@ var tt = (function () {
r.att = function (ctr) {
var _cshow = r.en ? r.cshow : null,
_show = r.en ? r.show : null,
_dshow = r.en ? r.dshow : null,
_hide = r.en ? r.hide : null,
o = ctr.querySelectorAll('*[tt]');
for (var a = o.length - 1; a >= 0; a--) {
o[a].onfocus = _cshow;
o[a].onblur = _hide;
o[a].onmouseenter = _show;
o[a].onmouseenter = _dshow;
o[a].onmouseleave = _hide;
}
r.hide();

View File

@@ -2,24 +2,12 @@
# example resource files
# utilities
can be provided to copyparty to tweak things
## example `.epilogue.html`
save one of these as `.epilogue.html` inside a folder to customize it:
* [`minimal-up2k.html`](minimal-up2k.html) will [simplify the upload ui](https://user-images.githubusercontent.com/241032/118311195-dd6ca380-b4ef-11eb-86f3-75a3ff2e1332.png)
## example browser-css
point `--css-browser` to one of these by URL:
* [`browser.css`](browser.css) changes the background
* [`browser-icons.css`](browser-icons.css) adds filetype icons
## [`multisearch.html`](multisearch.html)
* takes a list of filenames of youtube rips, grabs the youtube-id of each file, and does a search on the server for those
* use it by putting it somewhere on the server and opening it as an html page
* also serves as an extendable template for other specific search behaviors

View File

@@ -1,30 +0,0 @@
html {
background: #222 url('/wp/wallhaven-mdjrqy.jpg') center / cover no-repeat fixed;
}
#files th {
background: rgba(32, 32, 32, 0.9) !important;
}
#ops,
#tree,
#files td {
background: rgba(32, 32, 32, 0.3) !important;
}
html.light {
background: #eee url('/wp/wallhaven-dpxl6l.png') center / cover no-repeat fixed;
}
html.light #files th {
background: rgba(255, 255, 255, 0.9) !important;
}
html.light .logue,
html.light #ops,
html.light #tree,
html.light #files td {
background: rgba(248, 248, 248, 0.8) !important;
}
#files * {
background: transparent !important;
}

124
docs/multisearch.html Normal file
View File

@@ -0,0 +1,124 @@
<!DOCTYPE html><html lang="en"><head>
<meta charset="utf-8">
<title>multisearch</title>
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<style>
html, body {
margin: 0;
padding: 0;
color: #ddd;
background: #222;
font-family: sans-serif;
}
body {
padding: 1em;
}
a {
color: #fc5;
}
ul {
line-height: 1.5em;
}
code {
color: #fc5;
border: 1px solid #444;
padding: .1em .2em;
font-family: sans-serif, sans-serif;
}
#src {
display: block;
width: calc(100% - 1em);
padding: .5em;
margin: 0;
}
td {
padding-left: 1em;
}
.hit,
.miss {
font-weight: bold;
padding-left: 0;
padding-top: 1em;
}
.hit {color: #af0;}
.miss {color: #f0c;}
.hit:before {content: '✅';}
.miss:before {content: '❌';}
</style></head><body>
<ul>
<li>paste a list of filenames (youtube rips) below and hit search</li>
<li>it will grab the youtube-id from the filenames and search for each id</li>
<li>filenames must be like <code>-YTID.webm</code> (youtube-dl style) or <code>[YTID].webm</code> (ytdlp style)</li>
</ul>
<textarea id="src"></textarea>
<button id="go">search</button>
<div id="res"></div>
<script>
var ebi = document.getElementById.bind(document);
function esc(txt) {
return txt.replace(/[&"<>]/g, function (c) {
return {
'&': '&amp;',
'"': '&quot;',
'<': '&lt;',
'>': '&gt;'
}[c];
});
}
ebi('go').onclick = async function() {
var queries = [];
for (var ln of ebi('src').value.split(/\n/g)) {
// filter the list of input files,
// only keeping youtube videos,
// meaning the filename ends with either
// [YOUTUBEID].EXTENSION or
// -YOUTUBEID.EXTENSION
var m = /[[-]([0-9a-zA-Z_-]{11})\]?\.(mp4|webm|mkv)$/.exec(ln);
if (!m || !(m = m[1]))
continue;
// create a search query for each line: name like *youtubeid*
queries.push([ln, `name like *${m}*`]);
}
var a = 0, html = ['<table>'], hits = [], misses = [];
for (var [fn, q] of queries) {
var r = await fetch('/?srch', {
method: 'POST',
body: JSON.stringify({'q': q})
});
r = await r.json();
var cl, tab2;
if (r.hits.length) {
tab2 = hits;
cl = 'hit';
}
else {
tab2 = misses;
cl = 'miss';
}
var h = `<tr><td class="${cl}" colspan="9">${esc(fn)}</td></tr>`;
tab2.push(h);
html.push(h);
for (var h of r.hits) {
var link = `<a href="/${h.rp}">${esc(decodeURIComponent(h.rp))}</a>`;
html.push(`<tr><td>${h.sz}</td><td>${link}</td></tr>`);
}
ebi('res').innerHTML = `searching, ${++a} / ${queries.length} done, ${hits.length} hits, ${misses.length} miss`;
}
html.push('<tr><td><h1>hits:</h1></td></tr>');
html = html.concat(hits);
html.push('<tr><td><h1>miss:</h1></td></tr>');
html = html.concat(misses);
html.push('</table>');
ebi('res').innerHTML = html.join('\n');
};
</script></body></html>

View File

@@ -3,6 +3,12 @@ echo not a script
exit 1
##
## add index.html banners
find -name index.html | sed -r 's/index.html$//' | while IFS= read -r dir; do f="$dir/.prologue.html"; [ -e "$f" ] || echo '<h1><a href="index.html">open index.html</a></h1>' >"$f"; done
##
## delete all partial uploads
## (supports linux/macos, probably windows+msys2)
@@ -80,6 +86,12 @@ shab64() { sp=$1; f="$2"; v=0; sz=$(stat -c%s "$f"); while true; do w=$((v+sp*10
command -v gdate && date() { gdate "$@"; }; while true; do t=$(date +%s.%N); (time wget http://127.0.0.1:3923/?ls -qO- | jq -C '.files[]|{sz:.sz,ta:.tags.artist,tb:.tags.".bpm"}|del(.[]|select(.==null))' | awk -F\" '/"/{t[$2]++} END {for (k in t){v=t[k];p=sprintf("%" (v+1) "s",v);gsub(/ /,"#",p);printf "\033[36m%s\033[33m%s ",k,p}}') 2>&1 | awk -v ts=$t 'NR==1{t1=$0} NR==2{sub(/.*0m/,"");sub(/s$/,"");t2=$0;c=2; if(t2>0.3){c=3} if(t2>0.8){c=1} } END{sub(/[0-9]{6}$/,"",ts);printf "%s \033[3%dm%s %s\033[0m\n",ts,c,t2,t1}'; sleep 0.1 || break; done
##
## track an up2k upload and print all chunks in file-order
grep '"name": "2021-07-18 02-17-59.mkv"' fug.log | head -n 1 | sed -r 's/.*"hash": \[//; s/\].*//' | tr '"' '\n' | grep -E '^[a-zA-Z0-9_-]{44}$' | while IFS= read -r cid; do cat -n fug.log | grep -vF '"purl": "' | grep -- "$cid"; echo; done | stdbuf -oL tr '\t' ' ' | while IFS=' ' read -r ln _ _ _ _ _ ts ip port msg; do [ -z "$msg" ] && echo && continue; printf '%6s [%s] [%s] %s\n' $ln "$ts" "$ip $port" "$msg"; read -r ln _ _ _ _ _ ts ip port msg < <(cat -n fug.log | tail -n +$((ln+1)) | grep -F "$ip $port" | head -n 1); printf '%6s [%s] [%s] %s\n' $ln "$ts" "$ip $port" "$msg"; done
##
## js oneliners
@@ -89,6 +101,7 @@ var t=[]; var b=document.location.href.split('#')[0].slice(0, -1); document.quer
# debug md-editor line tracking
var s=mknod('style');s.innerHTML='*[data-ln]:before {content:attr(data-ln)!important;color:#f0c;background:#000;position:absolute;left:-1.5em;font-size:1rem}';document.head.appendChild(s);
##
## bash oneliners
@@ -169,7 +182,7 @@ brew install python@2
pip install virtualenv
# readme toc
cat README.md | awk 'function pr() { if (!h) {return}; if (/^ *[*!#]/||!s) {printf "%s\n",h;h=0;return}; if (/.../) {printf "%s - %s\n",h,$0;h=0}; }; /^#/{s=1;pr()} /^#* *(file indexing|install on android|dev env setup|just the sfx|complete release|optional gpl stuff)|`$/{s=0} /^#/{lv=length($1);sub(/[^ ]+ /,"");bab=$0;gsub(/ /,"-",bab); h=sprintf("%" ((lv-1)*4+1) "s [%s](#%s)", "*",$0,bab);next} !h{next} {sub(/ .*/,"");sub(/[:,]$/,"")} {pr()}' > toc; grep -E '^## readme toc' -B1000 -A2 <README.md >p1; grep -E '^## quickstart' -B2 -A999999 <README.md >p2; (cat p1; grep quickstart -A1000 <toc; cat p2) >README.md; rm p1 p2 toc
cat README.md | awk 'function pr() { if (!h) {return}; if (/^ *[*!#|]/||!s) {printf "%s\n",h;h=0;return}; if (/.../) {printf "%s - %s\n",h,$0;h=0}; }; /^#/{s=1;pr()} /^#* *(file indexing|install on android|dev env setup|just the sfx|complete release|optional gpl stuff)|`$/{s=0} /^#/{lv=length($1);sub(/[^ ]+ /,"");bab=$0;gsub(/ /,"-",bab); h=sprintf("%" ((lv-1)*4+1) "s [%s](#%s)", "*",$0,bab);next} !h{next} {sub(/ .*/,"");sub(/[:,]$/,"")} {pr()}' > toc; grep -E '^## readme toc' -B1000 -A2 <README.md >p1; grep -E '^## quickstart' -B2 -A999999 <README.md >p2; (cat p1; grep quickstart -A1000 <toc; cat p2) >README.md; rm p1 p2 toc
# fix firefox phantom breakpoints,
# suggestions from bugtracker, doesnt work (debugger is not attachable)
@@ -185,7 +198,13 @@ about:config >> devtools.debugger.prefs-schema-version = -1
git pull; git reset --hard origin/HEAD && git log --format=format:"%H %ai %d" --decorate=full > ../revs && cat ../{util,browser,up2k}.js >../vr && cat ../revs | while read -r rev extra; do (git reset --hard $rev >/dev/null 2>/dev/null && dsz=$(cat copyparty/web/{util,browser,up2k}.js >../vg 2>/dev/null && diff -wNarU0 ../{vg,vr} | wc -c) && printf '%s %6s %s\n' "$rev" $dsz "$extra") </dev/null; done
# download all sfx versions
curl https://api.github.com/repos/9001/copyparty/releases?per_page=100 | jq -r '.[] | .tag_name + " " + .name' | tr -d '\r' | while read v t; do fn="copyparty $v $t.py"; [ -e "$fn" ] || curl https://github.com/9001/copyparty/releases/download/$v/copyparty-sfx.py -Lo "$fn"; done
curl https://api.github.com/repos/9001/copyparty/releases?per_page=100 | jq -r '.[] | .tag_name + " " + .name' | tr -d '\r' | while read v t; do fn="$(printf '%s\n' "copyparty $v $t.py" | tr / -)"; [ -e "$fn" ] || curl https://github.com/9001/copyparty/releases/download/$v/copyparty-sfx.py -Lo "$fn"; done
# push to multiple git remotes
git config -l | grep '^remote'
git remote add all git@github.com:9001/copyparty.git
git remote set-url --add --push all git@gitlab.com:9001/copyparty.git
git remote set-url --add --push all git@github.com:9001/copyparty.git
##

View File

@@ -12,21 +12,18 @@ set -e
#
# output summary (filesizes and contents):
#
# 535672 copyparty-extras/sfx-full/copyparty-sfx.sh
# 550760 copyparty-extras/sfx-full/copyparty-sfx.py
# `- original unmodified sfx from github
#
# 572923 copyparty-extras/sfx-full/copyparty-sfx-gz.py
# `- unmodified but recompressed from bzip2 to gzip
#
# 341792 copyparty-extras/sfx-ent/copyparty-sfx.sh
# 353975 copyparty-extras/sfx-ent/copyparty-sfx.py
# 376934 copyparty-extras/sfx-ent/copyparty-sfx-gz.py
# `- removed iOS ogg/opus/vorbis audio decoder,
# removed the audio tray mouse cursor,
# "enterprise edition"
#
# 259288 copyparty-extras/sfx-lite/copyparty-sfx.sh
# 270004 copyparty-extras/sfx-lite/copyparty-sfx.py
# 293159 copyparty-extras/sfx-lite/copyparty-sfx-gz.py
# `- also removed the codemirror markdown editor
@@ -81,7 +78,7 @@ cache="$od/.copyparty-repack.cache"
# fallback to awk (sorry)
awk -F\" '/"browser_download_url".*(\.tar\.gz|-sfx\.)/ {print$4}'
) |
grep -E '(sfx\.(sh|py)|tar\.gz)$' |
grep -E '(sfx\.py|tar\.gz)$' |
tee /dev/stderr |
tr -d '\r' | tr '\n' '\0' |
xargs -0 bash -c 'dl_files "$@"' _
@@ -139,11 +136,11 @@ repack() {
)
}
repack sfx-full "re gz no-sh"
repack sfx-ent "re no-dd no-ogv"
repack sfx-ent "re no-dd no-ogv gz no-sh"
repack sfx-lite "re no-dd no-ogv no-cm no-hl"
repack sfx-lite "re no-dd no-ogv no-cm no-hl gz no-sh"
repack sfx-full "re gz"
repack sfx-ent "re no-dd"
repack sfx-ent "re no-dd gz"
repack sfx-lite "re no-dd no-cm no-hl"
repack sfx-lite "re no-dd no-cm no-hl gz"
# move fuse and up2k clients into copyparty-extras/,

View File

@@ -1,21 +1,19 @@
FROM alpine:3.14
FROM alpine:3.15
WORKDIR /z
ENV ver_asmcrypto=5b994303a9d3e27e0915f72a10b6c2c51535a4dc \
ver_hashwasm=4.9.0 \
ver_marked=3.0.4 \
ver_ogvjs=1.8.4 \
ver_mde=2.15.0 \
ver_codemirror=5.62.3 \
ver_marked=4.0.12 \
ver_mde=2.16.1 \
ver_codemirror=5.65.2 \
ver_fontawesome=5.13.0 \
ver_zopfli=1.0.3
# download;
# the scp url is latin from https://fonts.googleapis.com/css2?family=Source+Code+Pro&display=swap
# the scp url is regular latin from https://fonts.googleapis.com/css2?family=Source+Code+Pro&display=swap
RUN mkdir -p /z/dist/no-pk \
&& wget https://fonts.gstatic.com/s/sourcecodepro/v11/HI_SiYsKILxRpg3hIP6sJ7fM7PqlPevW.woff2 -O scp.woff2 \
&& apk add cmake make g++ git bash npm patch wget tar pigz brotli gzip unzip python3 python3-dev brotli py3-brotli \
&& wget https://github.com/brion/ogv.js/releases/download/$ver_ogvjs/ogvjs-$ver_ogvjs.zip -O ogvjs.zip \
&& wget https://github.com/openpgpjs/asmcrypto.js/archive/$ver_asmcrypto.tar.gz -O asmcrypto.tgz \
&& wget https://github.com/markedjs/marked/archive/v$ver_marked.tar.gz -O marked.tgz \
&& wget https://github.com/Ionaru/easy-markdown-editor/archive/$ver_mde.tar.gz -O mde.tgz \
@@ -23,7 +21,6 @@ RUN mkdir -p /z/dist/no-pk \
&& wget https://github.com/FortAwesome/Font-Awesome/releases/download/$ver_fontawesome/fontawesome-free-$ver_fontawesome-web.zip -O fontawesome.zip \
&& wget https://github.com/google/zopfli/archive/zopfli-$ver_zopfli.tar.gz -O zopfli.tgz \
&& wget https://github.com/Daninet/hash-wasm/releases/download/v$ver_hashwasm/hash-wasm@$ver_hashwasm.zip -O hash-wasm.zip \
&& unzip ogvjs.zip \
&& (mkdir hash-wasm \
&& cd hash-wasm \
&& unzip ../hash-wasm.zip) \
@@ -77,21 +74,6 @@ RUN cd hash-wasm \
&& mv sha512.umd.min.js /z/dist/sha512.hw.js
# build ogvjs
RUN cd ogvjs-$ver_ogvjs \
&& cp -pv \
ogv-worker-audio.js \
ogv-demuxer-ogg-wasm.js \
ogv-demuxer-ogg-wasm.wasm \
ogv-decoder-audio-opus-wasm.js \
ogv-decoder-audio-opus-wasm.wasm \
ogv-decoder-audio-vorbis-wasm.js \
ogv-decoder-audio-vorbis-wasm.wasm \
/z/dist \
&& cp -pv \
ogv-es2017.js /z/dist/ogv.js
# build marked
COPY marked.patch /z/
COPY marked-ln.patch /z/
@@ -100,7 +82,6 @@ RUN cd marked-$ver_marked \
&& patch -p1 < /z/marked.patch \
&& npm run build \
&& cp -pv marked.min.js /z/dist/marked.js \
&& cp -pv lib/marked.js /z/dist/marked.full.js \
&& mkdir -p /z/nodepkgs \
&& ln -s $(pwd) /z/nodepkgs/marked
# && npm run test \
@@ -120,9 +101,10 @@ COPY easymde.patch /z/
RUN cd easy-markdown-editor-$ver_mde \
&& patch -p1 < /z/easymde.patch \
&& sed -ri 's`https://registry.npmjs.org/marked/-/marked-[0-9\.]+.tgz`file:/z/nodepkgs/marked`' package-lock.json \
&& sed -ri 's`https://registry.npmjs.org/codemirror/-/codemirror-[0-9\.]+.tgz`file:/z/nodepkgs/codemirror`' package-lock.json \
&& sed -ri 's`("marked": ")[^"]+`\1file:/z/nodepkgs/marked`' ./package.json \
&& sed -ri 's`("codemirror": ")[^"]+`\1file:/z/nodepkgs/codemirror`' ./package.json \
&& sed -ri 's`^var marked = require\(.marked/lib/marked.\);$`var marked = window.marked;`' src/js/easymde.js \
&& sed -ri 's`^var marked = require\(.marked.\).marked;$`var marked = window.marked;`' src/js/easymde.js \
&& npm install
COPY easymde-ln.patch /z/
@@ -136,6 +118,7 @@ RUN cd easy-markdown-editor-$ver_mde \
# build fontawesome and scp
COPY mini-fa.sh /z
COPY mini-fa.css /z
COPY shiftbase.py /z
RUN /bin/ash /z/mini-fa.sh

View File

@@ -1,6 +1,6 @@
diff -NarU2 codemirror-5.59.3-orig/mode/gfm/gfm.js codemirror-5.59.3/mode/gfm/gfm.js
--- codemirror-5.59.3-orig/mode/gfm/gfm.js 2021-02-20 21:24:57.000000000 +0000
+++ codemirror-5.59.3/mode/gfm/gfm.js 2021-02-21 20:42:02.166174775 +0000
diff -wNarU2 codemirror-5.65.1-orig/mode/gfm/gfm.js codemirror-5.65.1/mode/gfm/gfm.js
--- codemirror-5.65.1-orig/mode/gfm/gfm.js 2022-01-20 13:06:23.000000000 +0100
+++ codemirror-5.65.1/mode/gfm/gfm.js 2022-02-09 22:50:18.145862052 +0100
@@ -97,5 +97,5 @@
}
}
@@ -15,9 +15,9 @@ diff -NarU2 codemirror-5.59.3-orig/mode/gfm/gfm.js codemirror-5.59.3/mode/gfm/gf
+ }*/
stream.next();
return null;
diff -NarU2 codemirror-5.59.3-orig/mode/meta.js codemirror-5.59.3/mode/meta.js
--- codemirror-5.59.3-orig/mode/meta.js 2021-02-20 21:24:57.000000000 +0000
+++ codemirror-5.59.3/mode/meta.js 2021-02-21 20:42:54.798742821 +0000
diff -wNarU2 codemirror-5.65.1-orig/mode/meta.js codemirror-5.65.1/mode/meta.js
--- codemirror-5.65.1-orig/mode/meta.js 2022-01-20 13:06:23.000000000 +0100
+++ codemirror-5.65.1/mode/meta.js 2022-02-09 22:50:18.145862052 +0100
@@ -13,4 +13,5 @@
CodeMirror.modeInfo = [
@@ -62,10 +62,10 @@ diff -NarU2 codemirror-5.59.3-orig/mode/meta.js codemirror-5.59.3/mode/meta.js
+ */
];
// Ensure all modes have a mime property for backwards compatibility
diff -NarU2 codemirror-5.59.3-orig/src/display/selection.js codemirror-5.59.3/src/display/selection.js
--- codemirror-5.59.3-orig/src/display/selection.js 2021-02-20 21:24:57.000000000 +0000
+++ codemirror-5.59.3/src/display/selection.js 2021-02-21 20:44:14.860894328 +0000
@@ -84,29 +84,21 @@
diff -wNarU2 codemirror-5.65.1-orig/src/display/selection.js codemirror-5.65.1/src/display/selection.js
--- codemirror-5.65.1-orig/src/display/selection.js 2022-01-20 13:06:23.000000000 +0100
+++ codemirror-5.65.1/src/display/selection.js 2022-02-09 22:50:18.145862052 +0100
@@ -96,29 +96,21 @@
let order = getOrder(lineObj, doc.direction)
iterateBidiSections(order, fromArg || 0, toArg == null ? lineLen : toArg, (from, to, dir, i) => {
- let ltr = dir == "ltr"
@@ -105,24 +105,24 @@ diff -NarU2 codemirror-5.59.3-orig/src/display/selection.js codemirror-5.59.3/sr
+ botRight = openEnd && last ? rightSide : toPos.right
add(topLeft, fromPos.top, topRight - topLeft, fromPos.bottom)
if (fromPos.bottom < toPos.top) add(leftSide, fromPos.bottom, null, toPos.top)
diff -NarU2 codemirror-5.59.3-orig/src/input/ContentEditableInput.js codemirror-5.59.3/src/input/ContentEditableInput.js
--- codemirror-5.59.3-orig/src/input/ContentEditableInput.js 2021-02-20 21:24:57.000000000 +0000
+++ codemirror-5.59.3/src/input/ContentEditableInput.js 2021-02-21 20:44:33.273953867 +0000
@@ -399,4 +399,5 @@
diff -wNarU2 codemirror-5.65.1-orig/src/input/ContentEditableInput.js codemirror-5.65.1/src/input/ContentEditableInput.js
--- codemirror-5.65.1-orig/src/input/ContentEditableInput.js 2022-01-20 13:06:23.000000000 +0100
+++ codemirror-5.65.1/src/input/ContentEditableInput.js 2022-02-09 22:50:18.145862052 +0100
@@ -400,4 +400,5 @@
let info = mapFromLineView(view, line, pos.line)
+ /*
let order = getOrder(line, cm.doc.direction), side = "left"
if (order) {
@@ -404,4 +405,5 @@
@@ -405,4 +406,5 @@
side = partPos % 2 ? "right" : "left"
}
+ */
let result = nodeAndOffsetInLineMap(info.map, pos.ch, side)
result.offset = result.collapse == "right" ? result.end : result.start
diff -NarU2 codemirror-5.59.3-orig/src/input/movement.js codemirror-5.59.3/src/input/movement.js
--- codemirror-5.59.3-orig/src/input/movement.js 2021-02-20 21:24:57.000000000 +0000
+++ codemirror-5.59.3/src/input/movement.js 2021-02-21 20:45:12.763093671 +0000
diff -wNarU2 codemirror-5.65.1-orig/src/input/movement.js codemirror-5.65.1/src/input/movement.js
--- codemirror-5.65.1-orig/src/input/movement.js 2022-01-20 13:06:23.000000000 +0100
+++ codemirror-5.65.1/src/input/movement.js 2022-02-09 22:50:18.145862052 +0100
@@ -15,4 +15,5 @@
export function endOfLine(visually, cm, lineObj, lineNo, dir) {
@@ -146,9 +146,16 @@ diff -NarU2 codemirror-5.59.3-orig/src/input/movement.js codemirror-5.59.3/src/i
return null
+ */
}
diff -NarU2 codemirror-5.59.3-orig/src/line/line_data.js codemirror-5.59.3/src/line/line_data.js
--- codemirror-5.59.3-orig/src/line/line_data.js 2021-02-20 21:24:57.000000000 +0000
+++ codemirror-5.59.3/src/line/line_data.js 2021-02-21 20:45:36.472549599 +0000
diff -wNarU2 codemirror-5.65.1-orig/src/line/line_data.js codemirror-5.65.1/src/line/line_data.js
--- codemirror-5.65.1-orig/src/line/line_data.js 2022-01-20 13:06:23.000000000 +0100
+++ codemirror-5.65.1/src/line/line_data.js 2022-02-09 22:54:11.542722046 +0100
@@ -3,5 +3,5 @@
import { elt, eltP, joinClasses } from "../util/dom.js"
import { eventMixin, signal } from "../util/event.js"
-import { hasBadBidiRects, zeroWidthElement } from "../util/feature_detection.js"
+import { zeroWidthElement } from "../util/feature_detection.js"
import { lst, spaceStr } from "../util/misc.js"
@@ -79,6 +79,6 @@
// Optionally wire in some hacks into the token-rendering
// algorithm, to deal with browser quirks.
@@ -158,10 +165,10 @@ diff -NarU2 codemirror-5.59.3-orig/src/line/line_data.js codemirror-5.59.3/src/l
+ // builder.addToken = buildTokenBadBidi(builder.addToken, order)
builder.map = []
let allowFrontierUpdate = lineView != cm.display.externalMeasured && lineNo(line)
diff -NarU2 codemirror-5.59.3-orig/src/measurement/position_measurement.js codemirror-5.59.3/src/measurement/position_measurement.js
--- codemirror-5.59.3-orig/src/measurement/position_measurement.js 2021-02-20 21:24:57.000000000 +0000
+++ codemirror-5.59.3/src/measurement/position_measurement.js 2021-02-21 20:50:52.372945293 +0000
@@ -380,5 +380,6 @@
diff -wNarU2 codemirror-5.65.1-orig/src/measurement/position_measurement.js codemirror-5.65.1/src/measurement/position_measurement.js
--- codemirror-5.65.1-orig/src/measurement/position_measurement.js 2022-01-20 13:06:23.000000000 +0100
+++ codemirror-5.65.1/src/measurement/position_measurement.js 2022-02-09 22:50:18.145862052 +0100
@@ -382,5 +382,6 @@
sticky = "after"
}
- if (!order) return get(sticky == "before" ? ch - 1 : ch, sticky == "before")
@@ -169,39 +176,39 @@ diff -NarU2 codemirror-5.59.3-orig/src/measurement/position_measurement.js codem
+ /*
function getBidi(ch, partPos, invert) {
@@ -391,4 +392,5 @@
@@ -393,4 +394,5 @@
if (other != null) val.other = getBidi(ch, other, sticky != "before")
return val
+ */
}
@@ -468,4 +470,5 @@
@@ -470,4 +472,5 @@
let begin = 0, end = lineObj.text.length, ltr = true
+ /*
let order = getOrder(lineObj, cm.doc.direction)
// If the line isn't plain left-to-right text, first figure out
@@ -482,4 +485,5 @@
@@ -484,4 +487,5 @@
end = ltr ? part.to : part.from - 1
}
+ */
// A binary search to find the first character whose bounding box
@@ -526,4 +530,5 @@
@@ -528,4 +532,5 @@
}
+/*
function coordsBidiPart(cm, lineObj, lineNo, preparedMeasure, order, x, y) {
// Bidi parts are sorted left-to-right, and in a non-line-wrapping
@@ -580,4 +585,5 @@
@@ -582,4 +587,5 @@
return part
}
+*/
let measureText
diff -NarU2 codemirror-5.59.3-orig/src/util/bidi.js codemirror-5.59.3/src/util/bidi.js
--- codemirror-5.59.3-orig/src/util/bidi.js 2021-02-20 21:24:57.000000000 +0000
+++ codemirror-5.59.3/src/util/bidi.js 2021-02-21 20:52:18.168092225 +0000
diff -wNarU2 codemirror-5.65.1-orig/src/util/bidi.js codemirror-5.65.1/src/util/bidi.js
--- codemirror-5.65.1-orig/src/util/bidi.js 2022-01-20 13:06:23.000000000 +0100
+++ codemirror-5.65.1/src/util/bidi.js 2022-02-09 22:50:18.145862052 +0100
@@ -4,5 +4,5 @@
export function iterateBidiSections(order, from, to, f) {
@@ -259,9 +266,9 @@ diff -NarU2 codemirror-5.59.3-orig/src/util/bidi.js codemirror-5.59.3/src/util/b
- return order
+ return false;
}
diff -NarU2 codemirror-5.59.3-orig/src/util/feature_detection.js codemirror-5.59.3/src/util/feature_detection.js
--- codemirror-5.59.3-orig/src/util/feature_detection.js 2021-02-20 21:24:57.000000000 +0000
+++ codemirror-5.59.3/src/util/feature_detection.js 2021-02-21 20:49:22.191269270 +0000
diff -wNarU2 codemirror-5.65.1-orig/src/util/feature_detection.js codemirror-5.65.1/src/util/feature_detection.js
--- codemirror-5.65.1-orig/src/util/feature_detection.js 2022-01-20 13:06:23.000000000 +0100
+++ codemirror-5.65.1/src/util/feature_detection.js 2022-02-09 22:50:18.145862052 +0100
@@ -25,4 +25,5 @@
}

View File

@@ -0,0 +1,12 @@
diff --git a/src/js/easymde.js b/src/js/easymde.js
--- a/src/js/easymde.js
+++ b/src/js/easymde.js
@@ -1962,7 +1962,7 @@ EasyMDE.prototype.markdown = function (text) {
marked.setOptions(markedOptions);
// Convert the markdown to HTML
- var htmlText = marked(text);
+ var htmlText = marked.parse(text);
// Sanitize HTML
if (this.options.renderingConfig && typeof this.options.renderingConfig.sanitizerFunction === 'function') {

View File

@@ -1,52 +1,52 @@
diff -NarU2 easy-markdown-editor-2.14.0-orig/gulpfile.js easy-markdown-editor-2.14.0/gulpfile.js
--- easy-markdown-editor-2.14.0-orig/gulpfile.js 2021-02-14 12:11:48.000000000 +0000
+++ easy-markdown-editor-2.14.0/gulpfile.js 2021-02-21 20:55:37.134701007 +0000
diff -wNarU2 easy-markdown-editor-2.16.1-orig/gulpfile.js easy-markdown-editor-2.16.1/gulpfile.js
--- easy-markdown-editor-2.16.1-orig/gulpfile.js 2022-01-14 23:27:44.000000000 +0100
+++ easy-markdown-editor-2.16.1/gulpfile.js 2022-02-09 23:06:01.694592535 +0100
@@ -25,5 +25,4 @@
'./node_modules/codemirror/lib/codemirror.css',
'./src/css/*.css',
- './node_modules/codemirror-spell-checker/src/css/spell-checker.css',
];
diff -NarU2 easy-markdown-editor-2.14.0-orig/package.json easy-markdown-editor-2.14.0/package.json
--- easy-markdown-editor-2.14.0-orig/package.json 2021-02-14 12:11:48.000000000 +0000
+++ easy-markdown-editor-2.14.0/package.json 2021-02-21 20:55:47.761190082 +0000
@@ -21,5 +21,4 @@
"dependencies": {
"codemirror": "^5.59.2",
diff -wNarU2 easy-markdown-editor-2.16.1-orig/package.json easy-markdown-editor-2.16.1/package.json
--- easy-markdown-editor-2.16.1-orig/package.json 2022-01-14 23:27:44.000000000 +0100
+++ easy-markdown-editor-2.16.1/package.json 2022-02-09 23:06:24.778501888 +0100
@@ -23,5 +23,4 @@
"@types/marked": "^4.0.1",
"codemirror": "^5.63.1",
- "codemirror-spell-checker": "1.1.2",
"marked": "^2.0.0"
"marked": "^4.0.10"
},
diff -NarU2 easy-markdown-editor-2.14.0-orig/src/js/easymde.js easy-markdown-editor-2.14.0/src/js/easymde.js
--- easy-markdown-editor-2.14.0-orig/src/js/easymde.js 2021-02-14 12:11:48.000000000 +0000
+++ easy-markdown-editor-2.14.0/src/js/easymde.js 2021-02-21 20:57:09.143171536 +0000
diff -wNarU2 easy-markdown-editor-2.16.1-orig/src/js/easymde.js easy-markdown-editor-2.16.1/src/js/easymde.js
--- easy-markdown-editor-2.16.1-orig/src/js/easymde.js 2022-01-14 23:27:44.000000000 +0100
+++ easy-markdown-editor-2.16.1/src/js/easymde.js 2022-02-09 23:07:21.203131415 +0100
@@ -12,5 +12,4 @@
require('codemirror/mode/gfm/gfm.js');
require('codemirror/mode/xml/xml.js');
-var CodeMirrorSpellChecker = require('codemirror-spell-checker');
var marked = require('marked/lib/marked');
var marked = require('marked').marked;
@@ -1762,9 +1761,4 @@
@@ -1816,9 +1815,4 @@
options.autosave.uniqueId = options.autosave.unique_id;
- // If overlay mode is specified and combine is not provided, default it to true
- if (options.overlayMode && options.overlayMode.combine === undefined) {
- options.overlayMode.combine = true;
- options.overlayMode.combine = true;
- }
-
// Update this options
this.options = options;
@@ -2003,28 +1997,7 @@
@@ -2057,34 +2051,7 @@
var mode, backdrop;
- // CodeMirror overlay mode
- if (options.overlayMode) {
- CodeMirror.defineMode('overlay-mode', function(config) {
- return CodeMirror.overlayMode(CodeMirror.getMode(config, options.spellChecker !== false ? 'spell-checker' : 'gfm'), options.overlayMode.mode, options.overlayMode.combine);
- });
- CodeMirror.defineMode('overlay-mode', function (config) {
- return CodeMirror.overlayMode(CodeMirror.getMode(config, options.spellChecker !== false ? 'spell-checker' : 'gfm'), options.overlayMode.mode, options.overlayMode.combine);
- });
-
- mode = 'overlay-mode';
- backdrop = options.parsingConfig;
- backdrop.gitHubSpice = false;
- mode = 'overlay-mode';
- backdrop = options.parsingConfig;
- backdrop.gitHubSpice = false;
- } else {
mode = options.parsingConfig;
mode.name = 'gfm';
@@ -58,31 +58,35 @@ diff -NarU2 easy-markdown-editor-2.14.0-orig/src/js/easymde.js easy-markdown-edi
- backdrop.name = 'gfm';
- backdrop.gitHubSpice = false;
-
- CodeMirrorSpellChecker({
- codeMirrorInstance: CodeMirror,
- });
- if (typeof options.spellChecker === 'function') {
- options.spellChecker({
- codeMirrorInstance: CodeMirror,
- });
- } else {
- CodeMirrorSpellChecker({
- codeMirrorInstance: CodeMirror,
- });
- }
- }
// eslint-disable-next-line no-unused-vars
diff -NarU2 easy-markdown-editor-2.14.0-orig/types/easymde.d.ts easy-markdown-editor-2.14.0/types/easymde.d.ts
--- easy-markdown-editor-2.14.0-orig/types/easymde.d.ts 2021-02-14 12:11:48.000000000 +0000
+++ easy-markdown-editor-2.14.0/types/easymde.d.ts 2021-02-21 20:57:42.492620979 +0000
@@ -160,9 +160,4 @@
diff -wNarU2 easy-markdown-editor-2.16.1-orig/types/easymde.d.ts easy-markdown-editor-2.16.1/types/easymde.d.ts
--- easy-markdown-editor-2.16.1-orig/types/easymde.d.ts 2022-01-14 23:27:44.000000000 +0100
+++ easy-markdown-editor-2.16.1/types/easymde.d.ts 2022-02-09 23:07:55.427605243 +0100
@@ -167,9 +167,4 @@
}
- interface OverlayModeOptions {
- mode: CodeMirror.Mode<any>
- combine?: boolean
- mode: CodeMirror.Mode<any>;
- combine?: boolean;
- }
-
interface Options {
autoDownloadFontAwesome?: boolean;
@@ -214,7 +209,5 @@
interface SpellCheckerOptions {
codeMirrorInstance: CodeMirror.Editor;
@@ -229,6 +224,4 @@
syncSideBySidePreviewScroll?: boolean;
promptTexts?: PromptTexts;
- syncSideBySidePreviewScroll?: boolean;
- overlayMode?: OverlayModeOptions;
-
- overlayMode?: OverlayModeOptions
+ syncSideBySidePreviewScroll?: boolean
direction?: 'ltr' | 'rtl';
}
}

View File

@@ -1,15 +1,15 @@
diff --git a/src/Lexer.js b/src/Lexer.js
adds linetracking to marked.js v3.0.4;
adds linetracking to marked.js v4.0.6;
add data-ln="%d" to most tags, %d is the source markdown line
--- a/src/Lexer.js
+++ b/src/Lexer.js
@@ -50,4 +50,5 @@ function mangle(text) {
module.exports = class Lexer {
export class Lexer {
constructor(options) {
+ this.ln = 1; // like most editors, start couting from 1
this.tokens = [];
this.tokens.links = Object.create(null);
@@ -127,4 +128,15 @@ module.exports = class Lexer {
@@ -127,4 +128,15 @@ export class Lexer {
}
+ set_ln(token, ln = this.ln) {
@@ -25,7 +25,7 @@ add data-ln="%d" to most tags, %d is the source markdown line
+
/**
* Lexing
@@ -134,7 +146,11 @@ module.exports = class Lexer {
@@ -134,7 +146,11 @@ export class Lexer {
src = src.replace(/^ +$/gm, '');
}
- let token, lastToken, cutSrc, lastParagraphClipped;
@@ -38,105 +38,105 @@ add data-ln="%d" to most tags, %d is the source markdown line
+
if (this.options.extensions
&& this.options.extensions.block
@@ -142,4 +158,5 @@ module.exports = class Lexer {
@@ -142,4 +158,5 @@ export class Lexer {
if (token = extTokenizer.call({ lexer: this }, src, tokens)) {
src = src.substring(token.raw.length);
+ this.set_ln(token, ln);
tokens.push(token);
return true;
@@ -153,4 +170,5 @@ module.exports = class Lexer {
@@ -153,4 +170,5 @@ export class Lexer {
if (token = this.tokenizer.space(src)) {
src = src.substring(token.raw.length);
+ this.set_ln(token, ln); // is \n if not type
if (token.type) {
tokens.push(token);
@@ -162,4 +180,5 @@ module.exports = class Lexer {
@@ -162,4 +180,5 @@ export class Lexer {
if (token = this.tokenizer.code(src)) {
src = src.substring(token.raw.length);
+ this.set_ln(token, ln);
lastToken = tokens[tokens.length - 1];
// An indented code block cannot interrupt a paragraph.
@@ -177,4 +196,5 @@ module.exports = class Lexer {
@@ -177,4 +196,5 @@ export class Lexer {
if (token = this.tokenizer.fences(src)) {
src = src.substring(token.raw.length);
+ this.set_ln(token, ln);
tokens.push(token);
continue;
@@ -184,4 +204,5 @@ module.exports = class Lexer {
@@ -184,4 +204,5 @@ export class Lexer {
if (token = this.tokenizer.heading(src)) {
src = src.substring(token.raw.length);
+ this.set_ln(token, ln);
tokens.push(token);
continue;
@@ -191,4 +212,5 @@ module.exports = class Lexer {
@@ -191,4 +212,5 @@ export class Lexer {
if (token = this.tokenizer.hr(src)) {
src = src.substring(token.raw.length);
+ this.set_ln(token, ln);
tokens.push(token);
continue;
@@ -198,4 +220,5 @@ module.exports = class Lexer {
@@ -198,4 +220,5 @@ export class Lexer {
if (token = this.tokenizer.blockquote(src)) {
src = src.substring(token.raw.length);
+ this.set_ln(token, ln);
tokens.push(token);
continue;
@@ -205,4 +228,5 @@ module.exports = class Lexer {
@@ -205,4 +228,5 @@ export class Lexer {
if (token = this.tokenizer.list(src)) {
src = src.substring(token.raw.length);
+ this.set_ln(token, ln);
tokens.push(token);
continue;
@@ -212,4 +236,5 @@ module.exports = class Lexer {
@@ -212,4 +236,5 @@ export class Lexer {
if (token = this.tokenizer.html(src)) {
src = src.substring(token.raw.length);
+ this.set_ln(token, ln);
tokens.push(token);
continue;
@@ -219,4 +244,5 @@ module.exports = class Lexer {
@@ -219,4 +244,5 @@ export class Lexer {
if (token = this.tokenizer.def(src)) {
src = src.substring(token.raw.length);
+ this.set_ln(token, ln);
lastToken = tokens[tokens.length - 1];
if (lastToken && (lastToken.type === 'paragraph' || lastToken.type === 'text')) {
@@ -236,4 +262,5 @@ module.exports = class Lexer {
@@ -236,4 +262,5 @@ export class Lexer {
if (token = this.tokenizer.table(src)) {
src = src.substring(token.raw.length);
+ this.set_ln(token, ln);
tokens.push(token);
continue;
@@ -243,4 +270,5 @@ module.exports = class Lexer {
@@ -243,4 +270,5 @@ export class Lexer {
if (token = this.tokenizer.lheading(src)) {
src = src.substring(token.raw.length);
+ this.set_ln(token, ln);
tokens.push(token);
continue;
@@ -263,4 +291,5 @@ module.exports = class Lexer {
@@ -263,4 +291,5 @@ export class Lexer {
}
if (this.state.top && (token = this.tokenizer.paragraph(cutSrc))) {
+ this.set_ln(token, ln);
lastToken = tokens[tokens.length - 1];
if (lastParagraphClipped && lastToken.type === 'paragraph') {
@@ -280,4 +309,6 @@ module.exports = class Lexer {
@@ -280,4 +309,6 @@ export class Lexer {
if (token = this.tokenizer.text(src)) {
src = src.substring(token.raw.length);
+ this.set_ln(token, ln);
+ this.ln++;
lastToken = tokens[tokens.length - 1];
if (lastToken && lastToken.type === 'text') {
@@ -355,4 +386,5 @@ module.exports = class Lexer {
@@ -355,4 +386,5 @@ export class Lexer {
if (token = extTokenizer.call({ lexer: this }, src, tokens)) {
src = src.substring(token.raw.length);
+ this.ln = token.ln || this.ln;
tokens.push(token);
return true;
@@ -420,4 +452,6 @@ module.exports = class Lexer {
@@ -420,4 +452,6 @@ export class Lexer {
if (token = this.tokenizer.br(src)) {
src = src.substring(token.raw.length);
+ // no need to reset (no more blockTokens anyways)
+ token.ln = this.ln++;
tokens.push(token);
continue;
@@ -462,4 +496,5 @@ module.exports = class Lexer {
@@ -462,4 +496,5 @@ export class Lexer {
if (token = this.tokenizer.inlineText(cutSrc, smartypants)) {
src = src.substring(token.raw.length);
+ this.ln = token.ln || this.ln;
@@ -145,13 +145,13 @@ add data-ln="%d" to most tags, %d is the source markdown line
diff --git a/src/Parser.js b/src/Parser.js
--- a/src/Parser.js
+++ b/src/Parser.js
@@ -18,4 +18,5 @@ module.exports = class Parser {
@@ -18,4 +18,5 @@ export class Parser {
this.textRenderer = new TextRenderer();
this.slugger = new Slugger();
+ this.ln = 0; // error indicator; should always be set >=1 from tokens
}
@@ -64,4 +65,8 @@ module.exports = class Parser {
@@ -64,4 +65,8 @@ export class Parser {
for (i = 0; i < l; i++) {
token = tokens[i];
+ // take line-numbers from tokens whenever possible
@@ -160,7 +160,7 @@ diff --git a/src/Parser.js b/src/Parser.js
+ this.renderer.tag_ln(this.ln);
// Run any renderer extensions
@@ -124,7 +129,10 @@ module.exports = class Parser {
@@ -124,7 +129,10 @@ export class Parser {
}
- body += this.renderer.tablerow(cell);
@@ -173,7 +173,7 @@ diff --git a/src/Parser.js b/src/Parser.js
+ out += this.renderer.tag_ln(token.ln).table(header, body);
continue;
}
@@ -167,8 +175,12 @@ module.exports = class Parser {
@@ -167,8 +175,12 @@ export class Parser {
itemBody += this.parse(item.tokens, loose);
- body += this.renderer.listitem(itemBody, task, checked);
@@ -188,7 +188,7 @@ diff --git a/src/Parser.js b/src/Parser.js
+ out += this.renderer.tag_ln(token.ln).list(body, ordered, start);
continue;
}
@@ -179,5 +191,6 @@ module.exports = class Parser {
@@ -179,5 +191,6 @@ export class Parser {
}
case 'paragraph': {
- out += this.renderer.paragraph(this.parseInline(token.tokens));
@@ -196,7 +196,7 @@ diff --git a/src/Parser.js b/src/Parser.js
+ out += this.renderer.tag_ln(token.ln).paragraph(t);
continue;
}
@@ -221,4 +234,7 @@ module.exports = class Parser {
@@ -221,4 +234,7 @@ export class Parser {
token = tokens[i];
+ // another thing that only affects <br/> and other inlines
@@ -207,7 +207,7 @@ diff --git a/src/Parser.js b/src/Parser.js
diff --git a/src/Renderer.js b/src/Renderer.js
--- a/src/Renderer.js
+++ b/src/Renderer.js
@@ -11,6 +11,12 @@ module.exports = class Renderer {
@@ -11,6 +11,12 @@ export class Renderer {
constructor(options) {
this.options = options || defaults;
+ this.ln = "";
@@ -220,7 +220,7 @@ diff --git a/src/Renderer.js b/src/Renderer.js
+
code(code, infostring, escaped) {
const lang = (infostring || '').match(/\S*/)[0];
@@ -26,10 +32,10 @@ module.exports = class Renderer {
@@ -26,10 +32,10 @@ export class Renderer {
if (!lang) {
- return '<pre><code>'
@@ -233,55 +233,55 @@ diff --git a/src/Renderer.js b/src/Renderer.js
+ return '<pre' + this.ln + '><code class="'
+ this.options.langPrefix
+ escape(lang, true)
@@ -40,5 +46,5 @@ module.exports = class Renderer {
@@ -40,5 +46,5 @@ export class Renderer {
blockquote(quote) {
- return '<blockquote>\n' + quote + '</blockquote>\n';
+ return '<blockquote' + this.ln + '>\n' + quote + '</blockquote>\n';
}
@@ -51,4 +57,5 @@ module.exports = class Renderer {
@@ -51,4 +57,5 @@ export class Renderer {
return '<h'
+ level
+ + this.ln
+ ' id="'
+ this.options.headerPrefix
@@ -61,5 +68,5 @@ module.exports = class Renderer {
@@ -61,5 +68,5 @@ export class Renderer {
}
// ignore IDs
- return '<h' + level + '>' + text + '</h' + level + '>\n';
+ return '<h' + level + this.ln + '>' + text + '</h' + level + '>\n';
}
@@ -75,5 +82,5 @@ module.exports = class Renderer {
@@ -75,5 +82,5 @@ export class Renderer {
listitem(text) {
- return '<li>' + text + '</li>\n';
+ return '<li' + this.ln + '>' + text + '</li>\n';
}
@@ -87,5 +94,5 @@ module.exports = class Renderer {
@@ -87,5 +94,5 @@ export class Renderer {
paragraph(text) {
- return '<p>' + text + '</p>\n';
+ return '<p' + this.ln + '>' + text + '</p>\n';
}
@@ -102,5 +109,5 @@ module.exports = class Renderer {
@@ -102,5 +109,5 @@ export class Renderer {
tablerow(content) {
- return '<tr>\n' + content + '</tr>\n';
+ return '<tr' + this.ln + '>\n' + content + '</tr>\n';
}
@@ -127,5 +134,5 @@ module.exports = class Renderer {
@@ -127,5 +134,5 @@ export class Renderer {
br() {
- return this.options.xhtml ? '<br/>' : '<br>';
+ return this.options.xhtml ? '<br' + this.ln + '/>' : '<br' + this.ln + '>';
}
@@ -153,5 +160,5 @@ module.exports = class Renderer {
@@ -153,5 +160,5 @@ export class Renderer {
}
- let out = '<img src="' + href + '" alt="' + text + '"';
@@ -291,7 +291,7 @@ diff --git a/src/Renderer.js b/src/Renderer.js
diff --git a/src/Tokenizer.js b/src/Tokenizer.js
--- a/src/Tokenizer.js
+++ b/src/Tokenizer.js
@@ -301,4 +301,7 @@ module.exports = class Tokenizer {
@@ -297,4 +297,7 @@ export class Tokenizer {
const l = list.items.length;
+ // each nested list gets +1 ahead; this hack makes every listgroup -1 but atleast it doesn't get infinitely bad

View File

@@ -1,7 +1,7 @@
diff --git a/src/Lexer.js b/src/Lexer.js
--- a/src/Lexer.js
+++ b/src/Lexer.js
@@ -6,5 +6,5 @@ const { repeatString } = require('./helpers.js');
@@ -6,5 +6,5 @@ import { repeatString } from './helpers.js';
/**
* smartypants text replacement
- */
@@ -15,21 +15,21 @@ diff --git a/src/Lexer.js b/src/Lexer.js
+ *
function mangle(text) {
let out = '',
@@ -465,5 +465,5 @@ module.exports = class Lexer {
@@ -466,5 +466,5 @@ export class Lexer {
// autolink
- if (token = this.tokenizer.autolink(src, mangle)) {
+ if (token = this.tokenizer.autolink(src)) {
src = src.substring(token.raw.length);
tokens.push(token);
@@ -472,5 +472,5 @@ module.exports = class Lexer {
@@ -473,5 +473,5 @@ export class Lexer {
// url (gfm)
- if (!this.state.inLink && (token = this.tokenizer.url(src, mangle))) {
+ if (!this.state.inLink && (token = this.tokenizer.url(src))) {
src = src.substring(token.raw.length);
tokens.push(token);
@@ -493,5 +493,5 @@ module.exports = class Lexer {
@@ -494,5 +494,5 @@ export class Lexer {
}
}
- if (token = this.tokenizer.inlineText(cutSrc, smartypants)) {
@@ -39,14 +39,14 @@ diff --git a/src/Lexer.js b/src/Lexer.js
diff --git a/src/Renderer.js b/src/Renderer.js
--- a/src/Renderer.js
+++ b/src/Renderer.js
@@ -142,5 +142,5 @@ module.exports = class Renderer {
@@ -142,5 +142,5 @@ export class Renderer {
link(href, title, text) {
- href = cleanUrl(this.options.sanitize, this.options.baseUrl, href);
+ href = cleanUrl(this.options.baseUrl, href);
if (href === null) {
return text;
@@ -155,5 +155,5 @@ module.exports = class Renderer {
@@ -155,5 +155,5 @@ export class Renderer {
image(href, title, text) {
- href = cleanUrl(this.options.sanitize, this.options.baseUrl, href);
@@ -56,7 +56,7 @@ diff --git a/src/Renderer.js b/src/Renderer.js
diff --git a/src/Tokenizer.js b/src/Tokenizer.js
--- a/src/Tokenizer.js
+++ b/src/Tokenizer.js
@@ -321,14 +321,7 @@ module.exports = class Tokenizer {
@@ -320,14 +320,7 @@ export class Tokenizer {
type: 'html',
raw: cap[0],
- pre: !this.options.sanitizer
@@ -72,7 +72,7 @@ diff --git a/src/Tokenizer.js b/src/Tokenizer.js
- }
return token;
}
@@ -477,15 +470,9 @@ module.exports = class Tokenizer {
@@ -476,15 +469,9 @@ export class Tokenizer {
return {
- type: this.options.sanitize
@@ -90,7 +90,7 @@ diff --git a/src/Tokenizer.js b/src/Tokenizer.js
+ text: cap[0]
};
}
@@ -672,10 +659,10 @@ module.exports = class Tokenizer {
@@ -671,10 +658,10 @@ export class Tokenizer {
}
- autolink(src, mangle) {
@@ -103,7 +103,7 @@ diff --git a/src/Tokenizer.js b/src/Tokenizer.js
+ text = escape(cap[1]);
href = 'mailto:' + text;
} else {
@@ -700,10 +687,10 @@ module.exports = class Tokenizer {
@@ -699,10 +686,10 @@ export class Tokenizer {
}
- url(src, mangle) {
@@ -116,7 +116,7 @@ diff --git a/src/Tokenizer.js b/src/Tokenizer.js
+ text = escape(cap[0]);
href = 'mailto:' + text;
} else {
@@ -737,12 +724,12 @@ module.exports = class Tokenizer {
@@ -736,12 +723,12 @@ export class Tokenizer {
}
- inlineText(src, smartypants) {
@@ -135,7 +135,7 @@ diff --git a/src/Tokenizer.js b/src/Tokenizer.js
diff --git a/src/defaults.js b/src/defaults.js
--- a/src/defaults.js
+++ b/src/defaults.js
@@ -9,12 +9,8 @@ function getDefaults() {
@@ -9,12 +9,8 @@ export function getDefaults() {
highlight: null,
langPrefix: 'language-',
- mangle: true,
@@ -151,10 +151,10 @@ diff --git a/src/defaults.js b/src/defaults.js
diff --git a/src/helpers.js b/src/helpers.js
--- a/src/helpers.js
+++ b/src/helpers.js
@@ -64,18 +64,5 @@ function edit(regex, opt) {
@@ -64,18 +64,5 @@ export function edit(regex, opt) {
const nonWordAndColonTest = /[^\w:]/g;
const originIndependentUrl = /^$|^[a-z][a-z0-9+.-]*:|^[?#]/i;
-function cleanUrl(sanitize, base, href) {
-export function cleanUrl(sanitize, base, href) {
- if (sanitize) {
- let prot;
- try {
@@ -168,36 +168,30 @@ diff --git a/src/helpers.js b/src/helpers.js
- return null;
- }
- }
+function cleanUrl(base, href) {
+export function cleanUrl(base, href) {
if (base && !originIndependentUrl.test(href)) {
href = resolveUrl(base, href);
@@ -227,10 +214,4 @@ function findClosingBracket(str, b) {
@@ -227,10 +214,4 @@ export function findClosingBracket(str, b) {
}
-function checkSanitizeDeprecation(opt) {
-export function checkSanitizeDeprecation(opt) {
- if (opt && opt.sanitize && !opt.silent) {
- console.warn('marked(): sanitize and sanitizer parameters are deprecated since version 0.7.0, should not be used and will be removed in the future. Read more here: https://marked.js.org/#/USING_ADVANCED.md#options');
- }
-}
-
// copied from https://stackoverflow.com/a/5450113/806777
function repeatString(pattern, count) {
@@ -260,5 +241,4 @@ module.exports = {
rtrim,
findClosingBracket,
- checkSanitizeDeprecation,
repeatString
};
export function repeatString(pattern, count) {
diff --git a/src/marked.js b/src/marked.js
--- a/src/marked.js
+++ b/src/marked.js
@@ -7,5 +7,4 @@ const Slugger = require('./Slugger.js');
const {
@@ -7,5 +7,4 @@ import { Slugger } from './Slugger.js';
import {
merge,
- checkSanitizeDeprecation,
escape
} = require('./helpers.js');
@@ -35,5 +34,4 @@ function marked(src, opt, callback) {
} from './helpers.js';
@@ -35,5 +34,4 @@ export function marked(src, opt, callback) {
opt = merge({}, marked.defaults, opt || {});
- checkSanitizeDeprecation(opt);
@@ -219,37 +213,37 @@ diff --git a/src/marked.js b/src/marked.js
diff --git a/test/bench.js b/test/bench.js
--- a/test/bench.js
+++ b/test/bench.js
@@ -33,5 +33,4 @@ async function runBench(options) {
@@ -37,5 +37,4 @@ export async function runBench(options) {
breaks: false,
pedantic: false,
- sanitize: false,
smartLists: false
});
@@ -45,5 +44,4 @@ async function runBench(options) {
@@ -49,5 +48,4 @@ export async function runBench(options) {
breaks: false,
pedantic: false,
- sanitize: false,
smartLists: false
});
@@ -58,5 +56,4 @@ async function runBench(options) {
@@ -62,5 +60,4 @@ export async function runBench(options) {
breaks: false,
pedantic: false,
- sanitize: false,
smartLists: false
});
@@ -70,5 +67,4 @@ async function runBench(options) {
@@ -74,5 +71,4 @@ export async function runBench(options) {
breaks: false,
pedantic: false,
- sanitize: false,
smartLists: false
});
@@ -83,5 +79,4 @@ async function runBench(options) {
@@ -87,5 +83,4 @@ export async function runBench(options) {
breaks: false,
pedantic: true,
- sanitize: false,
smartLists: false
});
@@ -95,5 +90,4 @@ async function runBench(options) {
@@ -99,5 +94,4 @@ export async function runBench(options) {
breaks: false,
pedantic: true,
- sanitize: false,
@@ -258,7 +252,7 @@ diff --git a/test/bench.js b/test/bench.js
diff --git a/test/specs/run-spec.js b/test/specs/run-spec.js
--- a/test/specs/run-spec.js
+++ b/test/specs/run-spec.js
@@ -22,9 +22,4 @@ function runSpecs(title, dir, showCompletionTable, options) {
@@ -25,9 +25,4 @@ function runSpecs(title, dir, showCompletionTable, options) {
}
- if (spec.options.sanitizer) {
@@ -268,77 +262,77 @@ diff --git a/test/specs/run-spec.js b/test/specs/run-spec.js
-
(spec.only ? fit : (spec.skip ? xit : it))('should ' + passFail + example, async() => {
const before = process.hrtime();
@@ -53,3 +48,2 @@ runSpecs('Original', './original', false, { gfm: false, pedantic: true });
@@ -56,3 +51,2 @@ runSpecs('Original', './original', false, { gfm: false, pedantic: true });
runSpecs('New', './new');
runSpecs('ReDOS', './redos');
-runSpecs('Security', './security', false, { silent: true }); // silent - do not show deprecation warning
diff --git a/test/unit/Lexer-spec.js b/test/unit/Lexer-spec.js
--- a/test/unit/Lexer-spec.js
+++ b/test/unit/Lexer-spec.js
@@ -589,5 +589,5 @@ paragraph
@@ -635,5 +635,5 @@ paragraph
});
- it('sanitize', () => {
+ /*it('sanitize', () => {
expectTokens({
md: '<div>html</div>',
@@ -607,5 +607,5 @@ paragraph
@@ -653,5 +653,5 @@ paragraph
]
});
- });
+ });*/
});
@@ -652,5 +652,5 @@ paragraph
@@ -698,5 +698,5 @@ paragraph
});
- it('html sanitize', () => {
+ /*it('html sanitize', () => {
expectInlineTokens({
md: '<div>html</div>',
@@ -660,5 +660,5 @@ paragraph
@@ -706,5 +706,5 @@ paragraph
]
});
- });
+ });*/
it('link', () => {
@@ -971,5 +971,5 @@ paragraph
@@ -1017,5 +1017,5 @@ paragraph
});
- it('autolink mangle email', () => {
+ /*it('autolink mangle email', () => {
expectInlineTokens({
md: '<test@example.com>',
@@ -991,5 +991,5 @@ paragraph
@@ -1037,5 +1037,5 @@ paragraph
]
});
- });
+ });*/
it('url', () => {
@@ -1028,5 +1028,5 @@ paragraph
@@ -1074,5 +1074,5 @@ paragraph
});
- it('url mangle email', () => {
+ /*it('url mangle email', () => {
expectInlineTokens({
md: 'test@example.com',
@@ -1048,5 +1048,5 @@ paragraph
@@ -1094,5 +1094,5 @@ paragraph
]
});
- });
+ });*/
});
@@ -1064,5 +1064,5 @@ paragraph
@@ -1110,5 +1110,5 @@ paragraph
});
- describe('smartypants', () => {
+ /*describe('smartypants', () => {
it('single quotes', () => {
expectInlineTokens({
@@ -1134,5 +1134,5 @@ paragraph
@@ -1180,5 +1180,5 @@ paragraph
});
});
- });

View File

@@ -29,3 +29,10 @@ pyftsubset "$orig_woff" --unicodes-file=/z/icon.list --no-ignore-missing-unicode
# scp is easier, just want basic latin
pyftsubset /z/scp.woff2 --unicodes="20-7e,ab,b7,bb,2022" --no-ignore-missing-unicodes --flavor=woff2 --output-file=/z/dist/no-pk/scp.woff2 --verbose
exit 0
# kinda works but ruins hinting on windows, just use the old version of the font which has correct baseline
python3 shiftbase.py /z/dist/no-pk/scp.woff2
cd /z/dist/no-pk/
mv scp.woff2.woff2 scp.woff2

View File

@@ -0,0 +1,27 @@
#!/usr/bin/env python3
import sys
from fontTools.ttLib import TTFont, newTable
def main():
woff = sys.argv[1]
font = TTFont(woff)
print(repr(font["hhea"].__dict__))
print(repr(font["OS/2"].__dict__))
# font["hhea"].ascent = round(base_asc * mul)
# font["hhea"].descent = round(base_desc * mul)
# font["OS/2"].usWinAscent = round(base_asc * mul)
font["OS/2"].usWinDescent = round(font["OS/2"].usWinDescent * 1.1)
font["OS/2"].sTypoDescender = round(font["OS/2"].sTypoDescender * 1.1)
try:
del font["post"].mapping["Delta#1"]
except:
pass
font.save(woff + ".woff2")
if __name__ == "__main__":
main()

View File

@@ -86,8 +86,6 @@ function have() {
python -c "import $1; $1; $1.__version__"
}
mv copyparty/web/deps/marked.full.js.gz srv/ || true
. buildenv/bin/activate
have setuptools
have wheel

View File

@@ -14,11 +14,6 @@ help() { exec cat <<'EOF'
#
# `gz` creates a gzip-compressed python sfx instead of bzip2
#
# `no-sh` makes just the python sfx, skips the sh/unix sfx
#
# `no-ogv` saves ~192k by removing the opus/vorbis audio codecs
# (only affects apple devices; everything else has native support)
#
# `no-cm` saves ~82k by removing easymde/codemirror
# (the fancy markdown editor)
#
@@ -67,21 +62,16 @@ pybin=$(command -v python3 || command -v python) || {
}
use_gz=
do_sh=1
do_py=1
zopf=2560
while [ ! -z "$1" ]; do
case $1 in
clean) clean=1 ; ;;
re) repack=1 ; ;;
gz) use_gz=1 ; ;;
no-ogv) no_ogv=1 ; ;;
no-fnt) no_fnt=1 ; ;;
no-hl) no_hl=1 ; ;;
no-dd) no_dd=1 ; ;;
no-cm) no_cm=1 ; ;;
no-sh) do_sh= ; ;;
no-py) do_py= ; ;;
fast) zopf=100 ; ;;
*) help ; ;;
esac
@@ -111,7 +101,7 @@ tmpdir="$(
[ $repack ] && {
old="$tmpdir/pe-copyparty"
echo "repack of files in $old"
cp -pR "$old/"*{dep-j2,copyparty} .
cp -pR "$old/"*{dep-j2,dep-ftp,copyparty} .
}
[ $repack ] || {
@@ -138,6 +128,27 @@ tmpdir="$(
mkdir dep-j2/
mv {markupsafe,jinja2} dep-j2/
echo collecting pyftpdlib
f="../build/pyftpdlib-1.5.6.tar.gz"
[ -e "$f" ] ||
(url=https://github.com/giampaolo/pyftpdlib/archive/refs/tags/release-1.5.6.tar.gz;
wget -O$f "$url" || curl -L "$url" >$f)
tar -zxf $f
mv pyftpdlib-release-*/pyftpdlib .
rm -rf pyftpdlib-release-* pyftpdlib/test
mkdir dep-ftp/
mv pyftpdlib dep-ftp/
echo collecting asyncore, asynchat
for n in asyncore.py asynchat.py; do
f=../build/$n
[ -e "$f" ] ||
(url=https://raw.githubusercontent.com/python/cpython/c4d45ee670c09d4f6da709df072ec80cb7dfad22/Lib/$n;
wget -O$f "$url" || curl -L "$url" >$f)
done
# msys2 tar is bad, make the best of it
echo collecting source
[ $clean ] && {
@@ -148,6 +159,12 @@ tmpdir="$(
(cd .. && tar -cf tar copyparty) && tar -xf ../tar
}
rm -f ../tar
# insert asynchat
mkdir copyparty/vend
for n in asyncore.py asynchat.py; do
awk 'NR<4||NR>27;NR==4{print"# license: https://opensource.org/licenses/ISC\n"}' ../build/$n >copyparty/vend/$n
done
}
ver=
@@ -218,9 +235,6 @@ cat have | while IFS= read -r x; do
done
rm have
[ $no_ogv ] &&
rm -rf copyparty/web/deps/{dynamicaudio,ogv}*
[ $no_cm ] && {
rm -rf copyparty/web/mde.* copyparty/web/deps/easymde*
echo h > copyparty/web/mde.html
@@ -252,7 +266,7 @@ rm have
find | grep -E '\.py$' |
grep -vE '__version__' |
tr '\n' '\0' |
xargs -0 $pybin ../scripts/uncomment.py
xargs -0 "$pybin" ../scripts/uncomment.py
f=dep-j2/jinja2/constants.py
awk '/^LOREM_IPSUM_WORDS/{o=1;print "LOREM_IPSUM_WORDS = u\"a\"";next} !o; /"""/{o=0}' <$f >t
@@ -338,11 +352,18 @@ nf=$(ls -1 "$zdir"/arc.* | wc -l)
echo gen tarlist
for d in copyparty dep-j2; do find $d -type f; done |
for d in copyparty dep-j2 dep-ftp; do find $d -type f; done |
sed -r 's/(.*)\.(.*)/\2 \1/' | LC_ALL=C sort |
sed -r 's/([^ ]*) (.*)/\2.\1/' | grep -vE '/list1?$' > list1
(grep -vE '\.(gz|br)$' list1; grep -E '\.(gz|br)$' list1 | shuf) >list || true
for n in {1..50}; do
(grep -vE '\.(gz|br)$' list1; grep -E '\.(gz|br)$' list1 | shuf) >list || true
s=$(md5sum list | cut -c-16)
grep -q $s "$zdir/h" && continue
echo $s >> "$zdir/h"
break
done
[ $n -eq 50 ] && exit
echo creating tar
args=(--owner=1000 --group=1000)
@@ -357,41 +378,27 @@ pe=bz2
echo compressing tar
# detect best level; bzip2 -7 is usually better than -9
[ $do_py ] && { for n in {2..9}; do cp tar t.$n; $pc -$n t.$n & done; wait; mv -v $(ls -1S t.*.$pe | tail -n 1) tar.bz2; }
[ $do_sh ] && { for n in {2..9}; do cp tar t.$n; xz -ze$n t.$n & done; wait; mv -v $(ls -1S t.*.xz | tail -n 1) tar.xz; }
for n in {2..9}; do cp tar t.$n; $pc -$n t.$n & done; wait; mv -v $(ls -1S t.*.$pe | tail -n 1) tar.bz2
rm t.* || true
exts=()
[ $do_sh ] && {
exts+=(.sh)
echo creating unix sfx
(
sed "s/PACK_TS/$ts/; s/PACK_HTS/$hts/; s/CPP_VER/$ver/" <../scripts/sfx.sh |
grep -E '^sfx_eof$' -B 9001;
cat tar.xz
) >$sfx_out.sh
echo creating sfx
py=../scripts/sfx.py
suf=
[ $use_gz ] && {
sed -r 's/"r:bz2"/"r:gz"/' <$py >$py.t
py=$py.t
suf=-gz
}
"$pybin" $py --sfx-make tar.bz2 $ver $ts
mv sfx.out $sfx_out$suf.py
[ $do_py ] && {
echo creating generic sfx
py=../scripts/sfx.py
suf=
[ $use_gz ] && {
sed -r 's/"r:bz2"/"r:gz"/' <$py >$py.t
py=$py.t
suf=-gz
}
$pybin $py --sfx-make tar.bz2 $ver $ts
mv sfx.out $sfx_out$suf.py
exts+=($suf.py)
[ $use_gz ] &&
rm $py
}
exts+=($suf.py)
[ $use_gz ] &&
rm $py
chmod 755 $sfx_out*
@@ -402,4 +409,4 @@ for ext in ${exts[@]}; do
done
# apk add bash python3 tar xz bzip2
# while true; do ./make-sfx.sh; for f in ..//dist/copyparty-sfx.{sh,py}; do mv $f $f.$(wc -c <$f | awk '{print$1}'); done; done
# while true; do ./make-sfx.sh; f=../dist/copyparty-sfx.py; mv $f $f.$(wc -c <$f | awk '{print$1}'); done

View File

@@ -35,8 +35,6 @@ ver="$1"
exit 1
}
mv copyparty/web/deps/marked.full.js.gz srv/ || true
mkdir -p dist
zip_path="$(pwd)/dist/copyparty-$ver.zip"
tgz_path="$(pwd)/dist/copyparty-$ver.tar.gz"

View File

@@ -4,33 +4,31 @@ set -e
cd ~/dev/copyparty/scripts
v=$1
printf '%s\n' "$v" | grep -qE '^[0-9\.]+$' || exit 1
grep -E "(${v//./, })" ../copyparty/__version__.py || exit 1
git tag v$v
git push origin --tags
[ "$v" = sfx ] || {
printf '%s\n' "$v" | grep -qE '^[0-9\.]+$' || exit 1
grep -E "(${v//./, })" ../copyparty/__version__.py || exit 1
rm -rf ../dist
git push all
git tag v$v
git push all --tags
./make-pypi-release.sh u
(cd .. && python3 ./setup.py clean2)
rm -rf ../dist
./make-tgz-release.sh $v
./make-pypi-release.sh u
(cd .. && python3 ./setup.py clean2)
./make-tgz-release.sh $v
}
rm -f ../dist/copyparty-sfx.*
./make-sfx.sh no-sh
../dist/copyparty-sfx.py -h
f=../dist/copyparty-sfx.py
./make-sfx.sh
$f -h
ar=
while true; do
for ((a=0; a<100; a++)); do
for f in ../dist/copyparty-sfx.{py,sh}; do
[ -e $f ] || continue;
mv $f $f.$(wc -c <$f | awk '{print$1}')
done
./make-sfx.sh re $ar
done
ar=no-sh
mv $f $f.$(wc -c <$f | awk '{print$1}')
./make-sfx.sh re $ar
done
# git tag -d v$v; git push --delete origin v$v

View File

@@ -11,6 +11,7 @@ copyparty/broker_mp.py,
copyparty/broker_mpw.py,
copyparty/broker_thr.py,
copyparty/broker_util.py,
copyparty/ftpd.py,
copyparty/httpcli.py,
copyparty/httpconn.py,
copyparty/httpsrv.py,
@@ -31,6 +32,9 @@ copyparty/th_srv.py,
copyparty/u2idx.py,
copyparty/up2k.py,
copyparty/util.py,
copyparty/vend,
copyparty/vend/asynchat.py,
copyparty/vend/asyncore.py,
copyparty/web,
copyparty/web/baguettebox.js,
copyparty/web/browser.css,
@@ -49,14 +53,6 @@ copyparty/web/deps/easymde.js,
copyparty/web/deps/marked.js,
copyparty/web/deps/mini-fa.css,
copyparty/web/deps/mini-fa.woff,
copyparty/web/deps/ogv-decoder-audio-opus-wasm.js,
copyparty/web/deps/ogv-decoder-audio-opus-wasm.wasm,
copyparty/web/deps/ogv-decoder-audio-vorbis-wasm.js,
copyparty/web/deps/ogv-decoder-audio-vorbis-wasm.wasm,
copyparty/web/deps/ogv-demuxer-ogg-wasm.js,
copyparty/web/deps/ogv-demuxer-ogg-wasm.wasm,
copyparty/web/deps/ogv-worker-audio.js,
copyparty/web/deps/ogv.js,
copyparty/web/deps/prism.js,
copyparty/web/deps/prism.css,
copyparty/web/deps/prismd.css,

View File

@@ -1,10 +1,10 @@
#!/usr/bin/env python3
# coding: latin-1
from __future__ import print_function, unicode_literals
import re, os, sys, time, shutil, signal, threading, tarfile, hashlib, platform, tempfile, traceback
import subprocess as sp
"""
to edit this file, use HxD or "vim -b"
(there is compressed stuff at the end)
@@ -20,6 +20,7 @@ the archive data is attached after the b"\n# eof\n" archive marker,
b"\n# " decodes to b""
"""
# set by make-sfx.sh
VER = None
SIZE = None
@@ -341,14 +342,15 @@ def get_payload():
def utime(top):
# avoid cleaners
i = 0
files = [os.path.join(dp, p) for dp, dd, df in os.walk(top) for p in dd + df]
while WINDOWS:
while WINDOWS or os.path.exists("/etc/systemd"):
t = int(time.time())
if i:
msg("utime {}, {}".format(i, t))
for f in files:
for f in [top] + files:
os.utime(f, (t, t))
i += 1
@@ -367,28 +369,18 @@ def confirm(rv):
sys.exit(rv or 1)
def run(tmp, j2):
def run(tmp, j2, ftp):
msg("jinja2:", j2 or "bundled")
msg("pyftpd:", ftp or "bundled")
msg("sfxdir:", tmp)
msg()
# block systemd-tmpfiles-clean.timer
try:
import fcntl
fd = os.open(tmp, os.O_RDONLY)
fcntl.flock(fd, fcntl.LOCK_EX | fcntl.LOCK_NB)
except Exception as ex:
if not WINDOWS:
msg("\033[31mflock:{!r}\033[0m".format(ex))
t = threading.Thread(target=utime, args=(tmp,))
t.daemon = True
t.start()
ld = [tmp, os.path.join(tmp, "dep-j2")]
if j2:
del ld[-1]
ld = (("", ""), (j2, "dep-j2"), (ftp, "dep-ftp"))
ld = [os.path.join(tmp, b) for a, b in ld if not a]
if any([re.match(r"^-.*j[0-9]", x) for x in sys.argv]):
run_s(ld)
@@ -461,7 +453,12 @@ def main():
j2 = None
try:
run(tmp, j2)
from pyftpdlib.__init__ import __ver__ as ftp
except:
ftp = None
try:
run(tmp, j2, ftp)
except SystemExit as ex:
c = ex.code
if c not in [0, -15]:

View File

@@ -112,7 +112,13 @@ args = {
"data_files": data_files,
"packages": find_packages(),
"install_requires": ["jinja2"],
"extras_require": {"thumbnails": ["Pillow"], "audiotags": ["mutagen"]},
"extras_require": {
"thumbnails": ["Pillow"],
"thumbnails2": ["pyvips"],
"audiotags": ["mutagen"],
"ftpd": ["pyftpdlib"],
"ftps": ["pyftpdlib", "pyopenssl"],
},
"entry_points": {"console_scripts": ["copyparty = copyparty.__main__:main"]},
"scripts": ["bin/copyparty-fuse.py", "bin/up2k.py"],
"cmdclass": {"clean2": clean2},

View File

@@ -29,6 +29,9 @@ class Cfg(Namespace):
v=v or [],
c=c,
rproxy=0,
rsp_slp=0,
s_wr_slp=0,
s_wr_sz=512 * 1024,
ed=False,
nw=False,
unpost=600,
@@ -48,9 +51,13 @@ class Cfg(Namespace):
mte="a",
mth="",
textfiles="",
doctitle="",
html_head="",
hist=None,
no_idx=None,
no_hash=None,
force_js=False,
no_robots=False,
js_browser=None,
css_browser=None,
**{k: False for k in "e2d e2ds e2dsa e2t e2ts e2tsr no_acode".split()}

View File

@@ -17,12 +17,14 @@ from copyparty import util
class Cfg(Namespace):
def __init__(self, a=None, v=None, c=None):
ex = "nw e2d e2ds e2dsa e2t e2ts e2tsr no_logues no_readme no_acode"
ex = "nw e2d e2ds e2dsa e2t e2ts e2tsr no_logues no_readme no_acode force_js no_robots"
ex = {k: False for k in ex.split()}
ex2 = {
"mtp": [],
"mte": "a",
"mth": "",
"doctitle": "",
"html_head": "",
"hist": None,
"no_idx": None,
"no_hash": None,
@@ -31,6 +33,9 @@ class Cfg(Namespace):
"no_voldump": True,
"re_maxage": 0,
"rproxy": 0,
"rsp_slp": 0,
"s_wr_slp": 0,
"s_wr_sz": 512 * 1024,
}
ex.update(ex2)
super(Cfg, self).__init__(a=a or [], v=v or [], c=c, **ex)

View File

@@ -109,6 +109,9 @@ class VSock(object):
self._reply += buf
return len(buf)
def getsockname(self):
return ("a", 1)
class VHttpSrv(object):
def __init__(self):