mirror of
				https://github.com/9001/copyparty.git
				synced 2025-11-02 04:53:15 +00:00 
			
		
		
		
	Compare commits
	
		
			975 Commits
		
	
	
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
|  | a996a09bba | ||
|  | 18c763ac08 | ||
|  | 3d9fb753ba | ||
|  | 714fd1811a | ||
|  | 4364581705 | ||
|  | ba02c9cc12 | ||
|  | 11eefaf968 | ||
|  | 5a968f9e47 | ||
|  | 6420c4bd03 | ||
|  | 0f9877201b | ||
|  | 9ba2dec9b2 | ||
|  | ae9cfea939 | ||
|  | cadaeeeace | ||
|  | 767696185b | ||
|  | c1efd227b7 | ||
|  | a50d0563c3 | ||
|  | e5641ddd16 | ||
|  | 700111ffeb | ||
|  | b8adeb824a | ||
|  | 30cc9defcb | ||
|  | 61875bd773 | ||
|  | 30905c6f5d | ||
|  | 9986136dfb | ||
|  | 1c0d978979 | ||
|  | 0a0364e9f8 | ||
|  | 3376fbde1a | ||
|  | ac21fa7782 | ||
|  | c1c8dc5e82 | ||
|  | 5a38311481 | ||
|  | 9f8edb7f32 | ||
|  | c5a6ac8417 | ||
|  | 50e01d6904 | ||
|  | 9b46291a20 | ||
|  | 14497b2425 | ||
|  | f7ceae5a5f | ||
|  | c9492d16ba | ||
|  | 9fb9ada3aa | ||
|  | db0abbfdda | ||
|  | e7f0009e57 | ||
|  | 4444f0f6ff | ||
|  | 418842d2d3 | ||
|  | cafe53c055 | ||
|  | 7673beef72 | ||
|  | b28bfe64c0 | ||
|  | 135ece3fbd | ||
|  | bd3640d256 | ||
|  | fc0405c8f3 | ||
|  | 7df890d964 | ||
|  | 8341041857 | ||
|  | 1b7634932d | ||
|  | 48a3898aa6 | ||
|  | 5d13ebb4ac | ||
|  | 015b87ee99 | ||
|  | 0a48acf6be | ||
|  | 2b6a3afd38 | ||
|  | 18aa82fb2f | ||
|  | f5407b2997 | ||
|  | 474d5a155b | ||
|  | afcd98b794 | ||
|  | 4f80e44ff7 | ||
|  | 406e413594 | ||
|  | 033b50ae1b | ||
|  | bee26e853b | ||
|  | 04a1f7040e | ||
|  | f9d5bb3b29 | ||
|  | ca0cd04085 | ||
|  | 999ee2e7bc | ||
|  | 1ff7f968e8 | ||
|  | 3966266207 | ||
|  | d03e96a392 | ||
|  | 4c843c6df9 | ||
|  | 0896c5295c | ||
|  | cc0c9839eb | ||
|  | d0aa20e17c | ||
|  | 1a658dedb7 | ||
|  | 8d376b854c | ||
|  | 490c16b01d | ||
|  | 2437a4e864 | ||
|  | 007d948cb9 | ||
|  | 335fcc8535 | ||
|  | 9eaa9904e0 | ||
|  | 0778da6c4d | ||
|  | a1bb10012d | ||
|  | 1441ccee4f | ||
|  | 491803d8b7 | ||
|  | 3dcc386b6f | ||
|  | 5aa54d1217 | ||
|  | 88b876027c | ||
|  | fcc3aa98fd | ||
|  | f2f5e266b4 | ||
|  | e17bf8f325 | ||
|  | d19cb32bf3 | ||
|  | 85a637af09 | ||
|  | 043e3c7dd6 | ||
|  | 8f59afb159 | ||
|  | 77f1e51444 | ||
|  | 22fc4bb938 | ||
|  | 50c7bba6ea | ||
|  | 551d99b71b | ||
|  | b54b7213a7 | ||
|  | a14943c8de | ||
|  | a10cad54fc | ||
|  | 8568b7702a | ||
|  | 5d8cb34885 | ||
|  | 8d248333e8 | ||
|  | 99e2ef7f33 | ||
|  | e767230383 | ||
|  | 90601314d6 | ||
|  | 9c5eac1274 | ||
|  | 50905439e4 | ||
|  | a0c1239246 | ||
|  | b8e851c332 | ||
|  | baaf2eb24d | ||
|  | e197895c10 | ||
|  | cb75efa05d | ||
|  | 8b0cf2c982 | ||
|  | fc7d9e1f9c | ||
|  | 10caafa34c | ||
|  | 22cc22225a | ||
|  | 22dff4b0e5 | ||
|  | a00ff2b086 | ||
|  | e4acddc23b | ||
|  | 2b2d8e4e02 | ||
|  | 5501d49032 | ||
|  | fa54b2eec4 | ||
|  | cb0160021f | ||
|  | 93a723d588 | ||
|  | 8ebe1fb5e8 | ||
|  | 2acdf685b1 | ||
|  | 9f122ccd16 | ||
|  | 03be26fafc | ||
|  | df5d309d6e | ||
|  | c355f9bd91 | ||
|  | 9c28ba417e | ||
|  | 705b58c741 | ||
|  | 510302d667 | ||
|  | 025a537413 | ||
|  | 60a1ff0fc0 | ||
|  | f94a0b1bff | ||
|  | 4ccfeeb2cd | ||
|  | 2646f6a4f2 | ||
|  | b286ab539e | ||
|  | 2cca6e0922 | ||
|  | db51f1b063 | ||
|  | d979c47f50 | ||
|  | e64b87b99b | ||
|  | b985011a00 | ||
|  | c2ed2314c8 | ||
|  | cd496658c3 | ||
|  | deca082623 | ||
|  | 0ea8bb7c83 | ||
|  | 1fb251a4c2 | ||
|  | 4295923b76 | ||
|  | 572aa4b26c | ||
|  | b1359f039f | ||
|  | 867d8ee49e | ||
|  | 04c86e8a89 | ||
|  | bc0cb43ef9 | ||
|  | 769454fdce | ||
|  | 4ee81af8f6 | ||
|  | 8b0e66122f | ||
|  | 8a98efb929 | ||
|  | b6fd555038 | ||
|  | 7eb413ad51 | ||
|  | 4421d509eb | ||
|  | 793ffd7b01 | ||
|  | 1e22222c60 | ||
|  | 544e0549bc | ||
|  | 83178d0836 | ||
|  | c44f5f5701 | ||
|  | 138f5bc989 | ||
|  | e4759f86ef | ||
|  | d71416437a | ||
|  | a84c583b2c | ||
|  | cdacdccdb8 | ||
|  | d3ccd3f174 | ||
|  | cb6de0387d | ||
|  | abff40519d | ||
|  | 55c74ad164 | ||
|  | 673b4f7e23 | ||
|  | d11e02da49 | ||
|  | 8790f89e08 | ||
|  | 33442026b8 | ||
|  | 03193de6d0 | ||
|  | 8675ff40f3 | ||
|  | d88889d3fc | ||
|  | 6f244d4335 | ||
|  | cacca663b3 | ||
|  | d5109be559 | ||
|  | d999f06bb9 | ||
|  | a1a8a8c7b5 | ||
|  | fdd6f3b4a6 | ||
|  | f5191973df | ||
|  | ddbaebe779 | ||
|  | 42099baeff | ||
|  | 2459965ca8 | ||
|  | 6acf436573 | ||
|  | f217e1ce71 | ||
|  | 418000aee3 | ||
|  | dbbba9625b | ||
|  | 397bc92fbc | ||
|  | 6e615dcd03 | ||
|  | 9ac5908b33 | ||
|  | 50912480b9 | ||
|  | 24b9b8319d | ||
|  | b0f4f0b653 | ||
|  | 05bbd41c4b | ||
|  | 8f5f8a3cda | ||
|  | c8938fc033 | ||
|  | 1550350e05 | ||
|  | 5cc190c026 | ||
|  | d6a0a738ce | ||
|  | f5fe3678ee | ||
|  | f2a7925387 | ||
|  | fa953ced52 | ||
|  | f0000d9861 | ||
|  | 4e67516719 | ||
|  | 29db7a6270 | ||
|  | 852499e296 | ||
|  | f1775fd51c | ||
|  | 4bb306932a | ||
|  | 2a37e81bd8 | ||
|  | 6a312ca856 | ||
|  | e7f3e475a2 | ||
|  | 854ba0ec06 | ||
|  | 209b49d771 | ||
|  | 949baae539 | ||
|  | 5f4ea27586 | ||
|  | 099cc97247 | ||
|  | 592b7d6315 | ||
|  | 0880bf55a1 | ||
|  | 4cbffec0ec | ||
|  | cc355417d4 | ||
|  | e2bc573e61 | ||
|  | 41c0376177 | ||
|  | c01cad091e | ||
|  | eb349f339c | ||
|  | 24d8caaf3e | ||
|  | 5ac2c20959 | ||
|  | bb72e6bf30 | ||
|  | d8142e866a | ||
|  | 7b7979fd61 | ||
|  | 749616d09d | ||
|  | 5485c6d7ca | ||
|  | b7aea38d77 | ||
|  | 0ecd9f99e6 | ||
|  | ca04a00662 | ||
|  | 8a09601be8 | ||
|  | 1fe0d4693e | ||
|  | bba8a3c6bc | ||
|  | e3d7f0c7d5 | ||
|  | be7bb71bbc | ||
|  | e0c4829ec6 | ||
|  | 5af1575329 | ||
|  | 884f966b86 | ||
|  | f6c6fbc223 | ||
|  | b0cc396bca | ||
|  | ae463518f6 | ||
|  | 2be2e9a0d8 | ||
|  | e405fddf74 | ||
|  | c269b0dd91 | ||
|  | 8c3211263a | ||
|  | bf04e7c089 | ||
|  | c7c6e48b1a | ||
|  | 974ca773be | ||
|  | 9270c2df19 | ||
|  | b39ff92f34 | ||
|  | 7454167f78 | ||
|  | 5ceb3a962f | ||
|  | 52bd5642da | ||
|  | c39c93725f | ||
|  | d00f0b9fa7 | ||
|  | 01cfc70982 | ||
|  | e6aec189bd | ||
|  | c98fff1647 | ||
|  | 0009e31bd3 | ||
|  | db95e880b2 | ||
|  | e69fea4a59 | ||
|  | 4360800a6e | ||
|  | b179e2b031 | ||
|  | ecdec75b4e | ||
|  | 5cb2e33353 | ||
|  | 43ff2e531a | ||
|  | 1c2c9db8f0 | ||
|  | 7ea183baef | ||
|  | ab87fac6d8 | ||
|  | 1e3b7eee3b | ||
|  | 4de028fc3b | ||
|  | 604e5dfaaf | ||
|  | 05e0c2ec9e | ||
|  | 76bd005bdc | ||
|  | 5effaed352 | ||
|  | cedaf4809f | ||
|  | 6deaf5c268 | ||
|  | 9dc6a26472 | ||
|  | 14ad5916fc | ||
|  | 1a46738649 | ||
|  | 9e5e3b099a | ||
|  | 292ce75cc2 | ||
|  | ce7df7afd4 | ||
|  | e28e793f81 | ||
|  | 3e561976db | ||
|  | 273a4eb7d0 | ||
|  | 6175f85bb6 | ||
|  | a80579f63a | ||
|  | 96d6bcf26e | ||
|  | 49e8df25ac | ||
|  | 6a05850f21 | ||
|  | 5e7c3defe3 | ||
|  | 6c0987d4d0 | ||
|  | 6eba9feffe | ||
|  | 8adfcf5950 | ||
|  | 36d6fa512a | ||
|  | 79b6e9b393 | ||
|  | dc2e2cbd4b | ||
|  | 5c12dac30f | ||
|  | 641929191e | ||
|  | 617321631a | ||
|  | ddc0c899f8 | ||
|  | cdec42c1ae | ||
|  | c48f469e39 | ||
|  | 44909cc7b8 | ||
|  | 8f61e1568c | ||
|  | b7be7a0fd8 | ||
|  | 1526a4e084 | ||
|  | dbdb9574b1 | ||
|  | 853ae6386c | ||
|  | a4b56c74c7 | ||
|  | d7f1951e44 | ||
|  | 7e2ff9825e | ||
|  | 9b423396ec | ||
|  | 781146b2fb | ||
|  | 84937d1ce0 | ||
|  | 98cce66aa4 | ||
|  | 043c2d4858 | ||
|  | 99cc434779 | ||
|  | 5095d17e81 | ||
|  | 87d835ae37 | ||
|  | 6939ca768b | ||
|  | e3957e8239 | ||
|  | 4ad6e45216 | ||
|  | 76e5eeea3f | ||
|  | eb17f57761 | ||
|  | b0db14d8b0 | ||
|  | 2b644fa81b | ||
|  | 190ccee820 | ||
|  | 4e7dd32e78 | ||
|  | 5817fb66ae | ||
|  | 9cb04eef93 | ||
|  | 0019fe7f04 | ||
|  | 852c6f2de1 | ||
|  | c4191de2e7 | ||
|  | 4de61defc9 | ||
|  | 0aa88590d0 | ||
|  | 405f3ee5fe | ||
|  | bc339f774a | ||
|  | e67b695b23 | ||
|  | 4a7633ab99 | ||
|  | c58f2ef61f | ||
|  | 3866e6a3f2 | ||
|  | 381686fc66 | ||
|  | a918c285bf | ||
|  | 1e20eafbe0 | ||
|  | 39399934ee | ||
|  | b47635150a | ||
|  | 78d2f69ed5 | ||
|  | 7a98dc669e | ||
|  | 2f15bb5085 | ||
|  | 712a578e6c | ||
|  | d8dfc4ccb2 | ||
|  | e413007eb0 | ||
|  | 6d1d3e48d8 | ||
|  | 04966164ce | ||
|  | 8b62aa7cc7 | ||
|  | 1088e8c6a5 | ||
|  | 8c54c2226f | ||
|  | f74ac1f18b | ||
|  | 25931e62fd | ||
|  | 707a940399 | ||
|  | 87ef50d384 | ||
|  | dcadf2b11c | ||
|  | 37a690a4c3 | ||
|  | 87ad23fb93 | ||
|  | 5f54d534e3 | ||
|  | aecae552a4 | ||
|  | eaa6b3d0be | ||
|  | c2ace91e52 | ||
|  | 0bac87c36f | ||
|  | e650d05939 | ||
|  | 85a96e4446 | ||
|  | 2569005139 | ||
|  | c50cb66aef | ||
|  | d4c5fca15b | ||
|  | 75cea4f684 | ||
|  | 68c6794d33 | ||
|  | 82f98dd54d | ||
|  | 741d781c18 | ||
|  | 0be1e43451 | ||
|  | 5366bf22bb | ||
|  | bcd91b1809 | ||
|  | 9bd5738e6f | ||
|  | bab4aa4c0a | ||
|  | e965b9b9e2 | ||
|  | 31101427d3 | ||
|  | a083dc36ba | ||
|  | 9b7b9262aa | ||
|  | 660011fa6e | ||
|  | ead31b6823 | ||
|  | 4310580cd4 | ||
|  | b005acbfda | ||
|  | 460709e6f3 | ||
|  | a8768d05a9 | ||
|  | f8e3e87a52 | ||
|  | 70f1642d0d | ||
|  | 3fc7561da4 | ||
|  | 9065226c3d | ||
|  | b7e321fa47 | ||
|  | 664665b86b | ||
|  | f4f362b7a4 | ||
|  | 577d23f460 | ||
|  | 504e168486 | ||
|  | f2f9640371 | ||
|  | ee46f832b1 | ||
|  | b0e755d410 | ||
|  | cfd24604d5 | ||
|  | 264894e595 | ||
|  | 5bb9f56247 | ||
|  | 18942ed066 | ||
|  | 85321a6f31 | ||
|  | baf641396d | ||
|  | 17c91e7014 | ||
|  | 010770684d | ||
|  | b4c503657b | ||
|  | 71bd306268 | ||
|  | dd7fab1352 | ||
|  | dacca18863 | ||
|  | 53d92cc0a6 | ||
|  | 434823f6f0 | ||
|  | 2cb1f50370 | ||
|  | 03f53f6392 | ||
|  | a70ecd7af0 | ||
|  | 8b81e58205 | ||
|  | 4500c04edf | ||
|  | 6222ddd720 | ||
|  | 8a7135cf41 | ||
|  | b4c7282956 | ||
|  | 8491a40a04 | ||
|  | 343d38b693 | ||
|  | 6cf53d7364 | ||
|  | b070d44de7 | ||
|  | 79aa40fdea | ||
|  | dcaff2785f | ||
|  | 497f5b4307 | ||
|  | be32ad0da6 | ||
|  | 8ee2bf810b | ||
|  | 28232656a9 | ||
|  | fbc2424e8f | ||
|  | 94cd13e8b8 | ||
|  | 447ed5ab37 | ||
|  | af59808611 | ||
|  | e3406a9f86 | ||
|  | 7fd1d6a4e8 | ||
|  | 0ab2a665de | ||
|  | 3895575bc2 | ||
|  | 138c2bbcbb | ||
|  | bc7af1d1c8 | ||
|  | 19cd96e392 | ||
|  | db194ab519 | ||
|  | 02ad4bfab2 | ||
|  | 56b73dcc8a | ||
|  | 7704b9c8a2 | ||
|  | 999b7ae919 | ||
|  | 252b5a88b1 | ||
|  | 01e2681a07 | ||
|  | aa32f30202 | ||
|  | 195eb53995 | ||
|  | 06fa78f54a | ||
|  | 7a57c9dbf1 | ||
|  | bb657bfa85 | ||
|  | 87181726b0 | ||
|  | f1477a1c14 | ||
|  | 4f94a9e38b | ||
|  | fbed322d3b | ||
|  | 9b0f519e4e | ||
|  | 6cd6dadd06 | ||
|  | 9a28afcb48 | ||
|  | 45b701801d | ||
|  | 062246fb12 | ||
|  | 416ebfdd68 | ||
|  | 731eb92f33 | ||
|  | dbe2aec79c | ||
|  | cd9cafe3a1 | ||
|  | 067cc23346 | ||
|  | c573a780e9 | ||
|  | 8ef4a0aa71 | ||
|  | 89ba12065c | ||
|  | 99efc290df | ||
|  | 2fbdc0a85e | ||
|  | 4242422898 | ||
|  | 008d9b1834 | ||
|  | 7c76d08958 | ||
|  | 89c9f45fd0 | ||
|  | f107497a94 | ||
|  | b5dcf30e53 | ||
|  | 0cef062084 | ||
|  | 5c30148be4 | ||
|  | 3a800585bc | ||
|  | 29c212a60e | ||
|  | 2997baa7cb | ||
|  | dc6bde594d | ||
|  | e357aa546c | ||
|  | d3fe19c5aa | ||
|  | bd24bf9bae | ||
|  | ee141544aa | ||
|  | db6f6e6a23 | ||
|  | c7d950dd5e | ||
|  | 6a96c62fde | ||
|  | 36dc8cd686 | ||
|  | 7622601a77 | ||
|  | cfd41fcf41 | ||
|  | f39e370e2a | ||
|  | c1315a3b39 | ||
|  | 53b32f97e8 | ||
|  | 6c962ec7d3 | ||
|  | 6bc1bc542f | ||
|  | f0e78a6826 | ||
|  | e53531a9fb | ||
|  | 5cd9d11329 | ||
|  | 5a3e504ec4 | ||
|  | d6e09c3880 | ||
|  | 04f44c3c7c | ||
|  | ec587423e8 | ||
|  | f57b31146d | ||
|  | 35175fd685 | ||
|  | d326ba9723 | ||
|  | ab655a56af | ||
|  | d1eb113ea8 | ||
|  | 74effa9b8d | ||
|  | bba4b1c663 | ||
|  | 8709d4dba0 | ||
|  | 4ad4657774 | ||
|  | 5abe0c955c | ||
|  | 0cedaf4fa9 | ||
|  | 0aa7d12704 | ||
|  | a234aa1f7e | ||
|  | 9f68287846 | ||
|  | cd2513ec16 | ||
|  | 91d132c2b4 | ||
|  | 97ff0ebd06 | ||
|  | 8829f56d4c | ||
|  | 37c1cab726 | ||
|  | b3eb117e87 | ||
|  | fc0a941508 | ||
|  | c72753c5da | ||
|  | e442cb677a | ||
|  | 450121eac9 | ||
|  | b2ab8f971e | ||
|  | e9c6268568 | ||
|  | 2170ee8da4 | ||
|  | 357e7333cc | ||
|  | 8bb4f02601 | ||
|  | 4213efc7a6 | ||
|  | 67a744c3e8 | ||
|  | 98818e7d63 | ||
|  | 8650ce1295 | ||
|  | 9638267b4c | ||
|  | 304e053155 | ||
|  | 89d1f52235 | ||
|  | 3312c6f5bd | ||
|  | d4ba644d07 | ||
|  | b9a504fd3a | ||
|  | cebac523dc | ||
|  | c2f4090318 | ||
|  | d562956809 | ||
|  | 62499f9b71 | ||
|  | 89cf7608f9 | ||
|  | dd26b8f183 | ||
|  | 79303dac6d | ||
|  | 4203fc161b | ||
|  | f8a31cc24f | ||
|  | fc5bfe81a0 | ||
|  | aae14de796 | ||
|  | 54e1c8d261 | ||
|  | a0cc4ca4b7 | ||
|  | 2701108c5b | ||
|  | 73bd2df2c6 | ||
|  | 0063021012 | ||
|  | 1c3e4750b3 | ||
|  | edad3246e0 | ||
|  | 3411b0993f | ||
|  | 097b5609dc | ||
|  | a42af7655e | ||
|  | 69f78b86af | ||
|  | 5f60c509c6 | ||
|  | 75e5e53276 | ||
|  | 4b2b4ed52d | ||
|  | fb21bfd6d6 | ||
|  | f14369e038 | ||
|  | ff04b72f62 | ||
|  | 4535a81617 | ||
|  | cce57b700b | ||
|  | 5b6194d131 | ||
|  | 2701238cea | ||
|  | 835f8a20e6 | ||
|  | f3a501db30 | ||
|  | 4bcd30da6b | ||
|  | 947dbb6f8a | ||
|  | 1c2fedd2bf | ||
|  | 32e826efbc | ||
|  | 138b932c6a | ||
|  | 6da2f53aad | ||
|  | 20eeacaac3 | ||
|  | 81d896be9f | ||
|  | c003dfab03 | ||
|  | 20c6b82bec | ||
|  | 046b494b53 | ||
|  | f0e98d6e0d | ||
|  | fe57321853 | ||
|  | 8510804e57 | ||
|  | acd32abac5 | ||
|  | 2b47c96cf2 | ||
|  | 1027378bda | ||
|  | e979d30659 | ||
|  | 574db704cc | ||
|  | fdb969ea89 | ||
|  | 08977854b3 | ||
|  | cecac64b68 | ||
|  | 7dabdade2a | ||
|  | e788f098e2 | ||
|  | 69406d4344 | ||
|  | d16dd26c65 | ||
|  | 12219c1bea | ||
|  | 118bdcc26e | ||
|  | 78fa96f0f4 | ||
|  | c7deb63a04 | ||
|  | 4f811eb9e9 | ||
|  | 0b265bd673 | ||
|  | ee67fabbeb | ||
|  | b213de7e62 | ||
|  | 7c01505750 | ||
|  | ae28dfd020 | ||
|  | 2a5a4e785f | ||
|  | d8bddede6a | ||
|  | b8a93e74bf | ||
|  | e60ec94d35 | ||
|  | 84af5fd0a3 | ||
|  | dbb3edec77 | ||
|  | d284b46a3e | ||
|  | 9fcb4d222b | ||
|  | d0bb1ad141 | ||
|  | b299aaed93 | ||
|  | abb3224cc5 | ||
|  | 1c66d06702 | ||
|  | e00e80ae39 | ||
|  | 4f4f106c48 | ||
|  | a286cc9d55 | ||
|  | 53bb1c719b | ||
|  | 98d5aa17e2 | ||
|  | aaaa80e4b8 | ||
|  | e70e926a40 | ||
|  | e80c1f6d59 | ||
|  | 24de360325 | ||
|  | e0039bc1e6 | ||
|  | ae5c4a0109 | ||
|  | 1d367a0da0 | ||
|  | d285f7ee4a | ||
|  | 37c84021a2 | ||
|  | 8ee9de4291 | ||
|  | 249b63453b | ||
|  | 1c0017d763 | ||
|  | df51e23639 | ||
|  | 32e71a43b8 | ||
|  | 47a1e6ddfa | ||
|  | c5f41457bb | ||
|  | f1e0c44bdd | ||
|  | 9d2e390b6a | ||
|  | 75a58b435d | ||
|  | f5474d34ac | ||
|  | c962d2544f | ||
|  | 0b87a4a810 | ||
|  | 1882afb8b6 | ||
|  | 2270c8737a | ||
|  | d6794955a4 | ||
|  | f5520f45ef | ||
|  | 9401b5ae13 | ||
|  | df64a62a03 | ||
|  | 09cea66aa8 | ||
|  | 13cc33e0a5 | ||
|  | ab36c8c9de | ||
|  | f85d4ce82f | ||
|  | 6bec4c28ba | ||
|  | fad1449259 | ||
|  | 86b3b57137 | ||
|  | b235037dd3 | ||
|  | 3108139d51 | ||
|  | 2ae99ecfa0 | ||
|  | e8ab53c270 | ||
|  | 5e9bc1127d | ||
|  | 415e61c3c9 | ||
|  | 5152f37ec8 | ||
|  | 0dbeb010cf | ||
|  | 17c465bed7 | ||
|  | add04478e5 | ||
|  | 6db72d7166 | ||
|  | 868103a9c5 | ||
|  | 0f37718671 | ||
|  | fa1445df86 | ||
|  | a783e7071e | ||
|  | a9919df5af | ||
|  | b0af31ac35 | ||
|  | c4c964a685 | ||
|  | 348ec71398 | ||
|  | a257ccc8b3 | ||
|  | fcc4296040 | ||
|  | 1684d05d49 | ||
|  | 0006f933a2 | ||
|  | 0484f97c9c | ||
|  | e430b2567a | ||
|  | fbc8ee15da | ||
|  | 68a9c05947 | ||
|  | 0a81aba899 | ||
|  | d2ae822e15 | ||
|  | fac4b08526 | ||
|  | 3a7b43c663 | ||
|  | 8fcb2d1554 | ||
|  | 590c763659 | ||
|  | 11d1267f8c | ||
|  | 8f5bae95ce | ||
|  | e6b12ef14c | ||
|  | b65674618b | ||
|  | 20dca2bea5 | ||
|  | 059e93cdcf | ||
|  | 635ab25013 | ||
|  | 995cd10df8 | ||
|  | 50f3820a6d | ||
|  | 617f3ea861 | ||
|  | 788db47b95 | ||
|  | 5fa8aaabb9 | ||
|  | 89d1af7f33 | ||
|  | 799cf27c5d | ||
|  | c930d8f773 | ||
|  | a7f921abb9 | ||
|  | bc6234e032 | ||
|  | 558bfa4e1e | ||
|  | 5d19f23372 | ||
|  | 27f08cdbfa | ||
|  | 993213e2c0 | ||
|  | 49470c05fa | ||
|  | ee0a060b79 | ||
|  | 500e3157b9 | ||
|  | eba86b1d23 | ||
|  | b69a563fc2 | ||
|  | a900c36395 | ||
|  | 1d9b324d3e | ||
|  | 539e7b8efe | ||
|  | 50a477ee47 | ||
|  | 7000123a8b | ||
|  | d48a7d2398 | ||
|  | 389a00ce59 | ||
|  | 7a460de3c2 | ||
|  | 8ea1f4a751 | ||
|  | 1c69ccc6cd | ||
|  | 84b5bbd3b6 | ||
|  | 9ccd327298 | ||
|  | 11df36f3cf | ||
|  | f62dd0e3cc | ||
|  | ad18b6e15e | ||
|  | c00b80ca29 | ||
|  | 92ed4ba3f8 | ||
|  | 7de9775dd9 | ||
|  | 5ce9060e5c | ||
|  | f727d5cb5a | ||
|  | 4735fb1ebb | ||
|  | c7d05cc13d | ||
|  | 51c152ff4a | ||
|  | eeed2a840c | ||
|  | 4aaa111925 | ||
|  | e31248f018 | ||
|  | 8b4cf022f2 | ||
|  | 4e7455268a | ||
|  | 680f8ae814 | ||
|  | 90555a4cea | ||
|  | 56a62db591 | ||
|  | cf51997680 | ||
|  | f05cc18d61 | ||
|  | 5384c2e0f5 | ||
|  | 9bfbf80a0e | ||
|  | f874d7754f | ||
|  | a669f79480 | ||
|  | 1c3894743a | ||
|  | 75cdf17df4 | ||
|  | de7dd1e60a | ||
|  | 0ee574a718 | ||
|  | faac894706 | ||
|  | dac2fad48e | ||
|  | 77f624b01e | ||
|  | e24ffebfc8 | ||
|  | 70d07d1609 | ||
|  | bfb3303d87 | ||
|  | 660705a436 | ||
|  | 74a3f97671 | ||
|  | b3e35bb494 | ||
|  | 76adac7c72 | ||
|  | 5dc75ebb67 | ||
|  | d686ce12b6 | ||
|  | d3c40a423e | ||
|  | 2fb1e6dab8 | ||
|  | 10430b347f | ||
|  | e0e3f6ac3e | ||
|  | c694cbffdc | ||
|  | bdd0e5d771 | ||
|  | aa98e427f0 | ||
|  | daa6f4c94c | ||
|  | 4a76663fb2 | ||
|  | cebda5028a | ||
|  | 3fa377a580 | ||
|  | a11c1005a8 | ||
|  | 4a6aea9328 | ||
|  | 4ca041e93e | ||
|  | 52a866a405 | ||
|  | 8b6bd0e6ac | ||
|  | 780fc4639a | ||
|  | 3692fc9d83 | ||
|  | c2a0b1b4c6 | ||
|  | 21bbdb5419 | ||
|  | aa1c08962c | ||
|  | 8a5d0399dd | ||
|  | f2cd0b0c4a | ||
|  | c2b66bbe73 | ||
|  | 48b957f1d5 | ||
|  | 3683984c8d | ||
|  | a3431512d8 | ||
|  | d832b787e7 | ||
|  | 6f75b02723 | ||
|  | b8241710bd | ||
|  | d638404b6a | ||
|  | 9362ca3ed9 | ||
|  | d1a03c6d17 | ||
|  | c6c31702c2 | ||
|  | bd2d88c96e | ||
|  | 76b1857e4e | ||
|  | 095bd17d10 | ||
|  | 204bfac3fa | ||
|  | ac49b0ca93 | ||
|  | c5b04f6fef | ||
|  | 5c58fda46d | ||
|  | 062730c70c | ||
|  | cade1990ce | ||
|  | 59b6e61816 | ||
|  | daff7ff158 | ||
|  | 0862860961 | ||
|  | 1cb24045a0 | ||
|  | 622358b172 | ||
|  | 7998884a9d | ||
|  | 51ddecd101 | ||
|  | 7a35ab1d1e | ||
|  | 48564ba52a | ||
|  | 49efffd740 | ||
|  | d6ac224c8f | ||
|  | a772b8c3f2 | ||
|  | b580953dcd | ||
|  | d86653c763 | ||
|  | dded4fca76 | ||
|  | 36365ffa6b | ||
|  | 0f9aeeaa27 | ||
|  | d8ebcd0ef7 | ||
|  | 6e445487b1 | ||
|  | 6605e461c7 | ||
|  | 40ce4e2275 | ||
|  | 8fef9e363e | ||
|  | 4792c2770d | ||
|  | 87bb49da36 | ||
|  | 1c0071d9ce | ||
|  | efded35c2e | ||
|  | 1d74240b9a | ||
|  | 098184ff7b | ||
|  | 4083533916 | ||
|  | feb1acd43a | ||
|  | a9591db734 | ||
|  | 9ebf148cbe | ||
|  | a473e5e19a | ||
|  | 5d3034c231 | ||
|  | c3a895af64 | ||
|  | cea5aecbf2 | ||
|  | 0e61e70670 | ||
|  | 1e333c0939 | ||
|  | 917b6ec03c | ||
|  | fe67c52ead | ||
|  | 909c7bee3e | ||
|  | 27ca54d138 | ||
|  | 2147c3a646 | ||
|  | a99120116f | ||
|  | 802efeaff2 | ||
|  | 9ad3af1ef6 | ||
|  | 715727b811 | ||
|  | c6eaa7b836 | ||
|  | c2fceea2a5 | ||
|  | 190e11f7ea | ||
|  | ad7413a5ff | ||
|  | 903b9e627a | ||
|  | c5c1e96cf8 | ||
|  | 62fbb04c9d | ||
|  | 728dc62d0b | ||
|  | 2dfe1b1c6b | ||
|  | 35d4a1a6af | ||
|  | eb3fa5aa6b | ||
|  | 438384425a | ||
|  | 0b6f102436 | ||
|  | c9b7ec72d8 | ||
|  | 256c7f1789 | ||
|  | 4e5a323c62 | ||
|  | f4a3bbd237 | ||
|  | fe73f2d579 | ||
|  | f79fcc7073 | ||
|  | 4c4b3790c7 | ||
|  | bd60b464bb | ||
|  | 6bce852765 | ||
|  | 3b19a5a59d | ||
|  | f024583011 | ||
|  | 1111baacb2 | ||
|  | 1b9c913efb | ||
|  | 3524c36e1b | ||
|  | cf87cea9f8 | ||
|  | bfa34404b8 | ||
|  | 0aba5f35bf | ||
|  | 663bc0842a | ||
|  | 7d10c96e73 | ||
|  | 6b2720fab0 | ||
|  | e74ad5132a | ||
|  | 1f6f89c1fd | ||
|  | 4d55e60980 | ||
|  | ddaaccd5af | ||
|  | c20b7dac3d | ||
|  | 1f779d5094 | ||
|  | 715401ca8e | ||
|  | e7cd922d8b | ||
|  | 187feee0c1 | ||
|  | 49e962a7dc | ||
|  | 633ff601e5 | ||
|  | 331cf37054 | ||
|  | 23e4b9002f | ||
|  | c0de3c8053 | ||
|  | a82a3b084a | ||
|  | 67c298e66b | ||
|  | c110ccb9ae | ||
|  | 0143380306 | ||
|  | af9000d3c8 | ||
|  | 097d798e5e | ||
|  | 1d9f9f221a | ||
|  | 214a367f48 | ||
|  | 2fb46551a2 | ||
|  | 6bcf330ae0 | ||
|  | 2075a8b18c | ||
|  | 1275ac6c42 | ||
|  | 708f20b7af | ||
|  | a2c0c708e8 | ||
|  | 2f2c65d91e | ||
|  | cd5fcc7ca7 | ||
|  | aa29e7be48 | ||
|  | 93febe34b0 | ||
|  | f086e6d3c1 | ||
|  | 22e51e1c96 | ||
|  | 63a5336f31 | ||
|  | bfc6c53cc5 | ||
|  | 236017f310 | ||
|  | 0a1d9b4dfd | ||
|  | b50d090946 | ||
|  | 00b5db52cf | ||
|  | 24cb30e2c5 | ||
|  | 4549145ab5 | ||
|  | 67b0217754 | ||
|  | ccae9efdf0 | ||
|  | 59d596b222 | ||
|  | 4878eb2c45 | ||
|  | 7755392f57 | 
							
								
								
									
										2
									
								
								.github/pull_request_template.md
									
									
									
									
										vendored
									
									
										Normal file
									
								
							
							
						
						
									
										2
									
								
								.github/pull_request_template.md
									
									
									
									
										vendored
									
									
										Normal file
									
								
							| @@ -0,0 +1,2 @@ | ||||
| To show that your contribution is compatible with the MIT License, please include the following text somewhere in this PR description:   | ||||
| This PR complies with the DCO; https://developercertificate.org/   | ||||
							
								
								
									
										28
									
								
								.gitignore
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										28
									
								
								.gitignore
									
									
									
									
										vendored
									
									
								
							| @@ -5,23 +5,39 @@ __pycache__/ | ||||
| MANIFEST.in | ||||
| MANIFEST | ||||
| copyparty.egg-info/ | ||||
| buildenv/ | ||||
| build/ | ||||
| dist/ | ||||
| sfx/ | ||||
| py2/ | ||||
| .venv/ | ||||
|  | ||||
| /buildenv/ | ||||
| /build/ | ||||
| /dist/ | ||||
| /py2/ | ||||
| /sfx* | ||||
| /unt/ | ||||
| /log/ | ||||
|  | ||||
| # ide | ||||
| *.sublime-workspace | ||||
|  | ||||
| # winmerge | ||||
| *.bak | ||||
|  | ||||
| # apple pls | ||||
| .DS_Store | ||||
|  | ||||
| # derived | ||||
| copyparty/res/COPYING.txt | ||||
| copyparty/web/deps/ | ||||
| srv/ | ||||
| scripts/docker/i/ | ||||
| contrib/package/arch/pkg/ | ||||
| contrib/package/arch/src/ | ||||
|  | ||||
| # state/logs | ||||
| up.*.txt | ||||
| .hist/ | ||||
| .hist/ | ||||
| scripts/docker/*.out | ||||
| scripts/docker/*.err | ||||
| /perf.* | ||||
|  | ||||
| # nix build output link | ||||
| result | ||||
|   | ||||
							
								
								
									
										1
									
								
								.vscode/launch.json
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										1
									
								
								.vscode/launch.json
									
									
									
									
										vendored
									
									
								
							| @@ -8,6 +8,7 @@ | ||||
|             "module": "copyparty", | ||||
|             "console": "integratedTerminal", | ||||
|             "cwd": "${workspaceFolder}", | ||||
|             "justMyCode": false, | ||||
|             "args": [ | ||||
|                 //"-nw", | ||||
|                 "-ed", | ||||
|   | ||||
							
								
								
									
										18
									
								
								.vscode/launch.py
									
									
									
									
										vendored
									
									
										
										
										Normal file → Executable file
									
								
							
							
						
						
									
										18
									
								
								.vscode/launch.py
									
									
									
									
										vendored
									
									
										
										
										Normal file → Executable file
									
								
							| @@ -1,3 +1,5 @@ | ||||
| #!/usr/bin/env python3 | ||||
|  | ||||
| # takes arguments from launch.json | ||||
| # is used by no_dbg in tasks.json | ||||
| # launches 10x faster than mspython debugpy | ||||
| @@ -9,15 +11,15 @@ import sys | ||||
|  | ||||
| print(sys.executable) | ||||
|  | ||||
| import json5 | ||||
| import shlex | ||||
| import jstyleson | ||||
| import subprocess as sp | ||||
|  | ||||
|  | ||||
| with open(".vscode/launch.json", "r", encoding="utf-8") as f: | ||||
|     tj = f.read() | ||||
|  | ||||
| oj = jstyleson.loads(tj) | ||||
| oj = json5.loads(tj) | ||||
| argv = oj["configurations"][0]["args"] | ||||
|  | ||||
| try: | ||||
| @@ -28,7 +30,17 @@ except: | ||||
|  | ||||
| argv = [os.path.expanduser(x) if x.startswith("~") else x for x in argv] | ||||
|  | ||||
| if re.search(" -j ?[0-9]", " ".join(argv)): | ||||
| sfx = "" | ||||
| if len(sys.argv) > 1 and os.path.isfile(sys.argv[1]): | ||||
|     sfx = sys.argv[1] | ||||
|     sys.argv = [sys.argv[0]] + sys.argv[2:] | ||||
|  | ||||
| argv += sys.argv[1:] | ||||
|  | ||||
| if sfx: | ||||
|     argv = [sys.executable, sfx] + argv | ||||
|     sp.check_call(argv) | ||||
| elif re.search(" -j ?[0-9]", " ".join(argv)): | ||||
|     argv = [sys.executable, "-m", "copyparty"] + argv | ||||
|     sp.check_call(argv) | ||||
| else: | ||||
|   | ||||
							
								
								
									
										20
									
								
								.vscode/settings.json
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										20
									
								
								.vscode/settings.json
									
									
									
									
										vendored
									
									
								
							| @@ -23,7 +23,6 @@ | ||||
|         "terminal.ansiBrightWhite": "#ffffff", | ||||
|     }, | ||||
|     "python.testing.pytestEnabled": false, | ||||
|     "python.testing.nosetestsEnabled": false, | ||||
|     "python.testing.unittestEnabled": true, | ||||
|     "python.testing.unittestArgs": [ | ||||
|         "-v", | ||||
| @@ -35,25 +34,30 @@ | ||||
|     "python.linting.pylintEnabled": true, | ||||
|     "python.linting.flake8Enabled": true, | ||||
|     "python.linting.banditEnabled": true, | ||||
|     "python.linting.mypyEnabled": true, | ||||
|     "python.linting.flake8Args": [ | ||||
|         "--max-line-length=120", | ||||
|         "--ignore=E722,F405,E203,W503,W293,E402", | ||||
|         "--ignore=E722,F405,E203,W503,W293,E402,E501,E128,E226", | ||||
|     ], | ||||
|     "python.linting.banditArgs": [ | ||||
|         "--ignore=B104" | ||||
|         "--ignore=B104,B110,B112" | ||||
|     ], | ||||
|     "python.formatting.provider": "black", | ||||
|     // python3 -m isort --py=27 --profile=black copyparty/ | ||||
|     "python.formatting.provider": "none", | ||||
|     "[python]": { | ||||
|         "editor.defaultFormatter": "ms-python.black-formatter" | ||||
|     }, | ||||
|     "editor.formatOnSave": true, | ||||
|     "[html]": { | ||||
|         "editor.formatOnSave": false, | ||||
|         "editor.autoIndent": "keep", | ||||
|     }, | ||||
|     "[css]": { | ||||
|         "editor.formatOnSave": false, | ||||
|     }, | ||||
|     "files.associations": { | ||||
|         "*.makefile": "makefile" | ||||
|     }, | ||||
|     "python.formatting.blackArgs": [ | ||||
|         "-t", | ||||
|         "py27" | ||||
|     ], | ||||
|     "python.linting.enabled": true, | ||||
|     "python.pythonPath": "/usr/bin/python3" | ||||
| } | ||||
							
								
								
									
										9
									
								
								SECURITY.md
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										9
									
								
								SECURITY.md
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,9 @@ | ||||
| # Security Policy | ||||
|  | ||||
| if you hit something extra juicy pls let me know on either of the following | ||||
| * email -- `copyparty@ocv.ze` except `ze` should be `me` | ||||
| * [mastodon dm](https://layer8.space/@tripflag) -- `@tripflag@layer8.space` | ||||
| * [github private vulnerability report](https://github.com/9001/copyparty/security/advisories/new), wow that form is complicated | ||||
| * [twitter dm](https://twitter.com/tripflag) (if im somehow not banned yet) | ||||
|  | ||||
| no bug bounties sorry! all i can offer is greetz in the release notes | ||||
| @@ -1,7 +1,8 @@ | ||||
| # [`up2k.py`](up2k.py) | ||||
| # [`u2c.py`](u2c.py) | ||||
| * command-line up2k client [(webm)](https://ocv.me/stuff/u2cli.webm) | ||||
| * file uploads, file-search, autoresume of aborted/broken uploads | ||||
| * faster than browsers | ||||
| * sync local folder to server | ||||
| * generally faster than browsers | ||||
| * if something breaks just restart it | ||||
|  | ||||
|  | ||||
| @@ -11,7 +12,7 @@ produces a chronological list of all uploads by collecting info from up2k databa | ||||
| * optional mapping from IP-addresses to nicknames | ||||
|  | ||||
|  | ||||
| # [`copyparty-fuse.py`](copyparty-fuse.py) | ||||
| # [`partyfuse.py`](partyfuse.py) | ||||
| * mount a copyparty server as a local filesystem (read-only) | ||||
| * **supports Windows!** -- expect `194 MiB/s` sequential read | ||||
| * **supports Linux** -- expect `117 MiB/s` sequential read | ||||
| @@ -30,19 +31,19 @@ also consider using [../docs/rclone.md](../docs/rclone.md) instead for 5x perfor | ||||
| * install [winfsp](https://github.com/billziss-gh/winfsp/releases/latest) and [python 3](https://www.python.org/downloads/) | ||||
|   * [x] add python 3.x to PATH (it asks during install) | ||||
| * `python -m pip install --user fusepy` | ||||
| * `python ./copyparty-fuse.py n: http://192.168.1.69:3923/` | ||||
| * `python ./partyfuse.py n: http://192.168.1.69:3923/` | ||||
|  | ||||
| 10% faster in [msys2](https://www.msys2.org/), 700% faster if debug prints are enabled: | ||||
| * `pacman -S mingw64/mingw-w64-x86_64-python{,-pip}` | ||||
| * `/mingw64/bin/python3 -m pip install --user fusepy` | ||||
| * `/mingw64/bin/python3 ./copyparty-fuse.py [...]` | ||||
| * `/mingw64/bin/python3 ./partyfuse.py [...]` | ||||
|  | ||||
| you could replace winfsp with [dokan](https://github.com/dokan-dev/dokany/releases/latest), let me know if you [figure out how](https://github.com/dokan-dev/dokany/wiki/FUSE)   | ||||
| (winfsp's sshfs leaks, doesn't look like winfsp itself does, should be fine) | ||||
|  | ||||
|  | ||||
|  | ||||
| # [`copyparty-fuse🅱️.py`](copyparty-fuseb.py) | ||||
| # [`partyfuse2.py`](partyfuse2.py) | ||||
| * mount a copyparty server as a local filesystem (read-only) | ||||
| * does the same thing except more correct, `samba` approves | ||||
| * **supports Linux** -- expect `18 MiB/s` (wait what) | ||||
| @@ -50,7 +51,7 @@ you could replace winfsp with [dokan](https://github.com/dokan-dev/dokany/releas | ||||
|  | ||||
|  | ||||
|  | ||||
| # [`copyparty-fuse-streaming.py`](copyparty-fuse-streaming.py) | ||||
| # [`partyfuse-streaming.py`](partyfuse-streaming.py) | ||||
| * pretend this doesn't exist | ||||
|  | ||||
|  | ||||
|   | ||||
							
								
								
									
										111
									
								
								bin/dbtool.py
									
									
									
									
									
								
							
							
						
						
									
										111
									
								
								bin/dbtool.py
									
									
									
									
									
								
							| @@ -8,7 +8,10 @@ import sqlite3 | ||||
| import argparse | ||||
|  | ||||
| DB_VER1 = 3 | ||||
| DB_VER2 = 4 | ||||
| DB_VER2 = 5 | ||||
|  | ||||
| BY_PATH = None | ||||
| NC = None | ||||
|  | ||||
|  | ||||
| def die(msg): | ||||
| @@ -57,8 +60,13 @@ def compare(n1, d1, n2, d2, verbose): | ||||
|         if rd.split("/", 1)[0] == ".hist": | ||||
|             continue | ||||
|  | ||||
|         q = "select w from up where rd = ? and fn = ?" | ||||
|         hit = d2.execute(q, (rd, fn)).fetchone() | ||||
|         if BY_PATH: | ||||
|             q = "select w from up where rd = ? and fn = ?" | ||||
|             hit = d2.execute(q, (rd, fn)).fetchone() | ||||
|         else: | ||||
|             q = "select w from up where substr(w,1,16) = ? and +w = ?" | ||||
|             hit = d2.execute(q, (w1[:16], w1)).fetchone() | ||||
|  | ||||
|         if not hit: | ||||
|             miss += 1 | ||||
|             if verbose: | ||||
| @@ -70,27 +78,32 @@ def compare(n1, d1, n2, d2, verbose): | ||||
|     n = 0 | ||||
|     miss = {} | ||||
|     nmiss = 0 | ||||
|     for w1, k, v in d1.execute("select * from mt"): | ||||
|     for w1s, k, v in d1.execute("select * from mt"): | ||||
|  | ||||
|         n += 1 | ||||
|         if n % 100_000 == 0: | ||||
|             m = f"\033[36mchecked {n:,} of {nt:,} tags in {n1} against {n2}, so far {nmiss} missing tags\033[0m" | ||||
|             print(m) | ||||
|  | ||||
|         q = "select rd, fn from up where substr(w,1,16) = ?" | ||||
|         rd, fn = d1.execute(q, (w1,)).fetchone() | ||||
|         q = "select w, rd, fn from up where substr(w,1,16) = ?" | ||||
|         w1, rd, fn = d1.execute(q, (w1s,)).fetchone() | ||||
|         if rd.split("/", 1)[0] == ".hist": | ||||
|             continue | ||||
|  | ||||
|         q = "select substr(w,1,16) from up where rd = ? and fn = ?" | ||||
|         w2 = d2.execute(q, (rd, fn)).fetchone() | ||||
|         if BY_PATH: | ||||
|             q = "select w from up where rd = ? and fn = ?" | ||||
|             w2 = d2.execute(q, (rd, fn)).fetchone() | ||||
|         else: | ||||
|             q = "select w from up where substr(w,1,16) = ? and +w = ?" | ||||
|             w2 = d2.execute(q, (w1s, w1)).fetchone() | ||||
|  | ||||
|         if w2: | ||||
|             w2 = w2[0] | ||||
|  | ||||
|         v2 = None | ||||
|         if w2: | ||||
|             v2 = d2.execute( | ||||
|                 "select v from mt where w = ? and +k = ?", (w2, k) | ||||
|                 "select v from mt where w = ? and +k = ?", (w2[:16], k) | ||||
|             ).fetchone() | ||||
|             if v2: | ||||
|                 v2 = v2[0] | ||||
| @@ -124,7 +137,7 @@ def compare(n1, d1, n2, d2, verbose): | ||||
|  | ||||
|     for k, v in sorted(miss.items()): | ||||
|         if v: | ||||
|             print(f"{n1} has {v:6} more {k:<6} tags than {n2}") | ||||
|             print(f"{n1} has {v:7} more {k:<7} tags than {n2}") | ||||
|  | ||||
|     print(f"in total, {nmiss} missing tags in {n2}\n") | ||||
|  | ||||
| @@ -132,47 +145,75 @@ def compare(n1, d1, n2, d2, verbose): | ||||
| def copy_mtp(d1, d2, tag, rm): | ||||
|     nt = next(d1.execute("select count(w) from mt where k = ?", (tag,)))[0] | ||||
|     n = 0 | ||||
|     ndone = 0 | ||||
|     for w1, k, v in d1.execute("select * from mt where k = ?", (tag,)): | ||||
|     ncopy = 0 | ||||
|     nskip = 0 | ||||
|     for w1s, k, v in d1.execute("select * from mt where k = ?", (tag,)): | ||||
|         n += 1 | ||||
|         if n % 25_000 == 0: | ||||
|             m = f"\033[36m{n:,} of {nt:,} tags checked, so far {ndone} copied\033[0m" | ||||
|             m = f"\033[36m{n:,} of {nt:,} tags checked, so far {ncopy} copied, {nskip} skipped\033[0m" | ||||
|             print(m) | ||||
|  | ||||
|         q = "select rd, fn from up where substr(w,1,16) = ?" | ||||
|         rd, fn = d1.execute(q, (w1,)).fetchone() | ||||
|         q = "select w, rd, fn from up where substr(w,1,16) = ?" | ||||
|         w1, rd, fn = d1.execute(q, (w1s,)).fetchone() | ||||
|         if rd.split("/", 1)[0] == ".hist": | ||||
|             continue | ||||
|  | ||||
|         q = "select substr(w,1,16) from up where rd = ? and fn = ?" | ||||
|         w2 = d2.execute(q, (rd, fn)).fetchone() | ||||
|         if BY_PATH: | ||||
|             q = "select w from up where rd = ? and fn = ?" | ||||
|             w2 = d2.execute(q, (rd, fn)).fetchone() | ||||
|         else: | ||||
|             q = "select w from up where substr(w,1,16) = ? and +w = ?" | ||||
|             w2 = d2.execute(q, (w1s, w1)).fetchone() | ||||
|  | ||||
|         if not w2: | ||||
|             continue | ||||
|  | ||||
|         w2 = w2[0] | ||||
|         hit = d2.execute("select v from mt where w = ? and +k = ?", (w2, k)).fetchone() | ||||
|         w2s = w2[0][:16] | ||||
|         hit = d2.execute("select v from mt where w = ? and +k = ?", (w2s, k)).fetchone() | ||||
|         if hit: | ||||
|             hit = hit[0] | ||||
|  | ||||
|         if hit != v: | ||||
|             ndone += 1 | ||||
|             if hit is not None: | ||||
|                 d2.execute("delete from mt where w = ? and +k = ?", (w2, k)) | ||||
|             if NC and hit is not None: | ||||
|                 nskip += 1 | ||||
|                 continue | ||||
|  | ||||
|             d2.execute("insert into mt values (?,?,?)", (w2, k, v)) | ||||
|             ncopy += 1 | ||||
|             if hit is not None: | ||||
|                 d2.execute("delete from mt where w = ? and +k = ?", (w2s, k)) | ||||
|  | ||||
|             d2.execute("insert into mt values (?,?,?)", (w2s, k, v)) | ||||
|             if rm: | ||||
|                 d2.execute("delete from mt where w = ? and +k = 't:mtp'", (w2,)) | ||||
|                 d2.execute("delete from mt where w = ? and +k = 't:mtp'", (w2s,)) | ||||
|  | ||||
|     d2.commit() | ||||
|     print(f"copied {ndone} {tag} tags over") | ||||
|     print(f"copied {ncopy} {tag} tags over, skipped {nskip}") | ||||
|  | ||||
|  | ||||
| def examples(): | ||||
|     print( | ||||
|         """ | ||||
| # clearing the journal | ||||
| ./dbtool.py up2k.db | ||||
|  | ||||
| # copy tags ".bpm" and "key" from old.db to up2k.db, and remove the mtp flag from matching files (so copyparty won't run any mtps on it) | ||||
| ./dbtool.py -ls up2k.db | ||||
| ./dbtool.py -src old.db up2k.db -cmp | ||||
| ./dbtool.py -src old.v3 up2k.db -rm-mtp-flag -copy key | ||||
| ./dbtool.py -src old.v3 up2k.db -rm-mtp-flag -copy .bpm -vac | ||||
|  | ||||
| """ | ||||
|     ) | ||||
|  | ||||
|  | ||||
| def main(): | ||||
|     global NC, BY_PATH | ||||
|     os.system("") | ||||
|     print() | ||||
|  | ||||
|     ap = argparse.ArgumentParser() | ||||
|     ap.add_argument("db", help="database to work on") | ||||
|     ap.add_argument("-h2", action="store_true", help="show examples") | ||||
|     ap.add_argument("-src", metavar="DB", type=str, help="database to copy from") | ||||
|  | ||||
|     ap2 = ap.add_argument_group("informational / read-only stuff") | ||||
| @@ -185,11 +226,29 @@ def main(): | ||||
|     ap2.add_argument( | ||||
|         "-rm-mtp-flag", | ||||
|         action="store_true", | ||||
|         help="when an mtp tag is copied over, also mark that as done, so copyparty won't run mtp on it", | ||||
|         help="when an mtp tag is copied over, also mark that file as done, so copyparty won't run any mtps on those files", | ||||
|     ) | ||||
|     ap2.add_argument("-vac", action="store_true", help="optimize DB") | ||||
|  | ||||
|     ap2 = ap.add_argument_group("behavior modifiers") | ||||
|     ap2.add_argument( | ||||
|         "-nc", | ||||
|         action="store_true", | ||||
|         help="no-clobber; don't replace/overwrite existing tags", | ||||
|     ) | ||||
|     ap2.add_argument( | ||||
|         "-by-path", | ||||
|         action="store_true", | ||||
|         help="match files based on location rather than warks (content-hash), use this if the databases have different wark salts", | ||||
|     ) | ||||
|  | ||||
|     ar = ap.parse_args() | ||||
|     if ar.h2: | ||||
|         examples() | ||||
|         return | ||||
|  | ||||
|     NC = ar.nc | ||||
|     BY_PATH = ar.by_path | ||||
|  | ||||
|     for v in [ar.db, ar.src]: | ||||
|         if v and not os.path.exists(v): | ||||
|   | ||||
							
								
								
									
										35
									
								
								bin/handlers/README.md
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										35
									
								
								bin/handlers/README.md
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,35 @@ | ||||
| replace the standard 404 / 403 responses with plugins | ||||
|  | ||||
|  | ||||
| # usage | ||||
|  | ||||
| load plugins either globally with `--on404 ~/dev/copyparty/bin/handlers/sorry.py` or for a specific volume with `:c,on404=~/handlers/sorry.py` | ||||
|  | ||||
|  | ||||
| # api | ||||
|  | ||||
| each plugin must define a `main()` which takes 3 arguments; | ||||
|  | ||||
| * `cli` is an instance of [copyparty/httpcli.py](https://github.com/9001/copyparty/blob/hovudstraum/copyparty/httpcli.py) (the monstrosity itself) | ||||
| * `vn` is the VFS which overlaps with the requested URL, and | ||||
| * `rem` is the URL remainder below the VFS mountpoint | ||||
|     * so `vn.vpath + rem` == `cli.vpath` == original request | ||||
|  | ||||
|  | ||||
| # examples | ||||
|  | ||||
| ## on404 | ||||
|  | ||||
| * [sorry.py](answer.py) replies with a custom message instead of the usual 404 | ||||
| * [nooo.py](nooo.py) replies with an endless noooooooooooooo | ||||
| * [never404.py](never404.py) 100% guarantee that 404 will never be a thing again as it automatically creates dummy files whenever necessary | ||||
| * [caching-proxy.py](caching-proxy.py) transforms copyparty into a squid/varnish knockoff | ||||
|  | ||||
| ## on403 | ||||
|  | ||||
| * [ip-ok.py](ip-ok.py) disables security checks if client-ip is 1.2.3.4 | ||||
|  | ||||
|  | ||||
| # notes | ||||
|  | ||||
| * on403 only works for trivial stuff (basic http access) since I haven't been able to think of any good usecases for it (was just easy to add while doing on404) | ||||
							
								
								
									
										36
									
								
								bin/handlers/caching-proxy.py
									
									
									
									
									
										Executable file
									
								
							
							
						
						
									
										36
									
								
								bin/handlers/caching-proxy.py
									
									
									
									
									
										Executable file
									
								
							| @@ -0,0 +1,36 @@ | ||||
| # assume each requested file exists on another webserver and | ||||
| # download + mirror them as they're requested | ||||
| # (basically pretend we're warnish) | ||||
|  | ||||
| import os | ||||
| import requests | ||||
|  | ||||
| from typing import TYPE_CHECKING | ||||
|  | ||||
| if TYPE_CHECKING: | ||||
|     from copyparty.httpcli import HttpCli | ||||
|  | ||||
|  | ||||
| def main(cli: "HttpCli", vn, rem): | ||||
|     url = "https://mirrors.edge.kernel.org/alpine/" + rem | ||||
|     abspath = os.path.join(vn.realpath, rem) | ||||
|  | ||||
|     # sneaky trick to preserve a requests-session between downloads | ||||
|     # so it doesn't have to spend ages reopening https connections; | ||||
|     # luckily we can stash it inside the copyparty client session, | ||||
|     # name just has to be definitely unused so "hacapo_req_s" it is | ||||
|     req_s = getattr(cli.conn, "hacapo_req_s", None) or requests.Session() | ||||
|     setattr(cli.conn, "hacapo_req_s", req_s) | ||||
|  | ||||
|     try: | ||||
|         os.makedirs(os.path.dirname(abspath), exist_ok=True) | ||||
|         with req_s.get(url, stream=True, timeout=69) as r: | ||||
|             r.raise_for_status() | ||||
|             with open(abspath, "wb", 64 * 1024) as f: | ||||
|                 for buf in r.iter_content(chunk_size=64 * 1024): | ||||
|                     f.write(buf) | ||||
|     except: | ||||
|         os.unlink(abspath) | ||||
|         return "false" | ||||
|  | ||||
|     return "retry" | ||||
							
								
								
									
										6
									
								
								bin/handlers/ip-ok.py
									
									
									
									
									
										Executable file
									
								
							
							
						
						
									
										6
									
								
								bin/handlers/ip-ok.py
									
									
									
									
									
										Executable file
									
								
							| @@ -0,0 +1,6 @@ | ||||
| # disable permission checks and allow access if client-ip is 1.2.3.4 | ||||
|  | ||||
|  | ||||
| def main(cli, vn, rem): | ||||
|     if cli.ip == "1.2.3.4": | ||||
|         return "allow" | ||||
							
								
								
									
										11
									
								
								bin/handlers/never404.py
									
									
									
									
									
										Executable file
									
								
							
							
						
						
									
										11
									
								
								bin/handlers/never404.py
									
									
									
									
									
										Executable file
									
								
							| @@ -0,0 +1,11 @@ | ||||
| # create a dummy file and let copyparty return it | ||||
|  | ||||
|  | ||||
| def main(cli, vn, rem): | ||||
|     print("hello", cli.ip) | ||||
|  | ||||
|     abspath = vn.canonical(rem) | ||||
|     with open(abspath, "wb") as f: | ||||
|         f.write(b"404? not on MY watch!") | ||||
|  | ||||
|     return "retry" | ||||
							
								
								
									
										16
									
								
								bin/handlers/nooo.py
									
									
									
									
									
										Executable file
									
								
							
							
						
						
									
										16
									
								
								bin/handlers/nooo.py
									
									
									
									
									
										Executable file
									
								
							| @@ -0,0 +1,16 @@ | ||||
| # reply with an endless "noooooooooooooooooooooooo" | ||||
|  | ||||
|  | ||||
| def say_no(): | ||||
|     yield b"n" | ||||
|     while True: | ||||
|         yield b"o" * 4096 | ||||
|  | ||||
|  | ||||
| def main(cli, vn, rem): | ||||
|     cli.send_headers(None, 404, "text/plain") | ||||
|  | ||||
|     for chunk in say_no(): | ||||
|         cli.s.sendall(chunk) | ||||
|  | ||||
|     return "false" | ||||
							
								
								
									
										7
									
								
								bin/handlers/sorry.py
									
									
									
									
									
										Executable file
									
								
							
							
						
						
									
										7
									
								
								bin/handlers/sorry.py
									
									
									
									
									
										Executable file
									
								
							| @@ -0,0 +1,7 @@ | ||||
| # sends a custom response instead of the usual 404 | ||||
|  | ||||
|  | ||||
| def main(cli, vn, rem): | ||||
|     msg = f"sorry {cli.ip} but {cli.vpath} doesn't exist" | ||||
|  | ||||
|     return str(cli.reply(msg.encode("utf-8"), 404, "text/plain")) | ||||
							
								
								
									
										29
									
								
								bin/hooks/README.md
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										29
									
								
								bin/hooks/README.md
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,29 @@ | ||||
| standalone programs which are executed by copyparty when an event happens (upload, file rename, delete, ...) | ||||
|  | ||||
| these programs either take zero arguments, or a filepath (the affected file), or a json message with filepath + additional info | ||||
|  | ||||
| run copyparty with `--help-hooks` for usage details / hook type explanations (xbu/xau/xiu/xbr/xar/xbd/xad) | ||||
|  | ||||
| > **note:** in addition to event hooks (the stuff described here), copyparty has another api to run your programs/scripts while providing way more information such as audio tags / video codecs / etc and optionally daisychaining data between scripts in a processing pipeline; if that's what you want then see [mtp plugins](../mtag/) instead | ||||
|  | ||||
|  | ||||
| # after upload | ||||
| * [notify.py](notify.py) shows a desktop notification ([example](https://user-images.githubusercontent.com/241032/215335767-9c91ed24-d36e-4b6b-9766-fb95d12d163f.png)) | ||||
|   * [notify2.py](notify2.py) uses the json API to show more context | ||||
| * [image-noexif.py](image-noexif.py) removes image exif by overwriting / directly editing the uploaded file | ||||
| * [discord-announce.py](discord-announce.py) announces new uploads on discord using webhooks ([example](https://user-images.githubusercontent.com/241032/215304439-1c1cb3c8-ec6f-4c17-9f27-81f969b1811a.png)) | ||||
| * [reject-mimetype.py](reject-mimetype.py) rejects uploads unless the mimetype is acceptable | ||||
|  | ||||
|  | ||||
| # upload batches | ||||
| these are `--xiu` hooks; unlike `xbu` and `xau` (which get executed on every single file), `xiu` hooks are given a list of recent uploads on STDIN after the server has gone idle for N seconds, reducing server load + providing more context | ||||
| * [xiu.py](xiu.py) is a "minimal" example showing a list of filenames + total filesize | ||||
| * [xiu-sha.py](xiu-sha.py) produces a sha512 checksum list in the volume root | ||||
|  | ||||
|  | ||||
| # before upload | ||||
| * [reject-extension.py](reject-extension.py) rejects uploads if they match a list of file extensions | ||||
|  | ||||
|  | ||||
| # on message | ||||
| * [wget.py](wget.py) lets you download files by POSTing URLs to copyparty | ||||
							
								
								
									
										68
									
								
								bin/hooks/discord-announce.py
									
									
									
									
									
										Executable file
									
								
							
							
						
						
									
										68
									
								
								bin/hooks/discord-announce.py
									
									
									
									
									
										Executable file
									
								
							| @@ -0,0 +1,68 @@ | ||||
| #!/usr/bin/env python3 | ||||
|  | ||||
| import sys | ||||
| import json | ||||
| import requests | ||||
| from copyparty.util import humansize, quotep | ||||
|  | ||||
|  | ||||
| _ = r""" | ||||
| announces a new upload on discord | ||||
|  | ||||
| example usage as global config: | ||||
|     --xau f,t5,j,bin/hooks/discord-announce.py | ||||
|  | ||||
| example usage as a volflag (per-volume config): | ||||
|     -v srv/inc:inc:r:rw,ed:c,xau=f,t5,j,bin/hooks/discord-announce.py | ||||
|                            ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | ||||
|  | ||||
|     (share filesystem-path srv/inc as volume /inc, | ||||
|      readable by everyone, read-write for user 'ed', | ||||
|      running this plugin on all uploads with the params listed below) | ||||
|  | ||||
| parameters explained, | ||||
|     xbu = execute after upload | ||||
|     f  = fork; don't wait for it to finish | ||||
|     t5 = timeout if it's still running after 5 sec | ||||
|     j  = provide upload information as json; not just the filename | ||||
|  | ||||
| replace "xau" with "xbu" to announce Before upload starts instead of After completion | ||||
|  | ||||
| # how to discord: | ||||
| first create the webhook url; https://support.discord.com/hc/en-us/articles/228383668-Intro-to-Webhooks | ||||
| then use this to design your message: https://discohook.org/ | ||||
| """ | ||||
|  | ||||
|  | ||||
| def main(): | ||||
|     WEBHOOK = "https://discord.com/api/webhooks/1234/base64" | ||||
|     WEBHOOK = "https://discord.com/api/webhooks/1066830390280597718/M1TDD110hQA-meRLMRhdurych8iyG35LDoI1YhzbrjGP--BXNZodZFczNVwK4Ce7Yme5" | ||||
|  | ||||
|     # read info from copyparty | ||||
|     inf = json.loads(sys.argv[1]) | ||||
|     vpath = inf["vp"] | ||||
|     filename = vpath.split("/")[-1] | ||||
|     url = f"https://{inf['host']}/{quotep(vpath)}" | ||||
|  | ||||
|     # compose the message to discord | ||||
|     j = { | ||||
|         "title": filename, | ||||
|         "url": url, | ||||
|         "description": url.rsplit("/", 1)[0], | ||||
|         "color": 0x449900, | ||||
|         "fields": [ | ||||
|             {"name": "Size", "value": humansize(inf["sz"])}, | ||||
|             {"name": "User", "value": inf["user"]}, | ||||
|             {"name": "IP", "value": inf["ip"]}, | ||||
|         ], | ||||
|     } | ||||
|  | ||||
|     for v in j["fields"]: | ||||
|         v["inline"] = True | ||||
|  | ||||
|     r = requests.post(WEBHOOK, json={"embeds": [j]}) | ||||
|     print(f"discord: {r}\n", end="") | ||||
|  | ||||
|  | ||||
| if __name__ == "__main__": | ||||
|     main() | ||||
							
								
								
									
										72
									
								
								bin/hooks/image-noexif.py
									
									
									
									
									
										Executable file
									
								
							
							
						
						
									
										72
									
								
								bin/hooks/image-noexif.py
									
									
									
									
									
										Executable file
									
								
							| @@ -0,0 +1,72 @@ | ||||
| #!/usr/bin/env python3 | ||||
|  | ||||
| import os | ||||
| import sys | ||||
| import subprocess as sp | ||||
|  | ||||
|  | ||||
| _ = r""" | ||||
| remove exif tags from uploaded images; the eventhook edition of | ||||
| https://github.com/9001/copyparty/blob/hovudstraum/bin/mtag/image-noexif.py | ||||
|  | ||||
| dependencies: | ||||
|     exiftool / perl-Image-ExifTool | ||||
|  | ||||
| being an upload hook, this will take effect after upload completion | ||||
|     but before copyparty has hashed/indexed the file, which means that | ||||
|     copyparty will never index the original file, so deduplication will | ||||
|     not work as expected... which is mostly OK but ehhh | ||||
|  | ||||
| note: modifies the file in-place, so don't set the `f` (fork) flag | ||||
|  | ||||
| example usages; either as global config (all volumes) or as volflag: | ||||
|     --xau bin/hooks/image-noexif.py | ||||
|     -v srv/inc:inc:r:rw,ed:c,xau=bin/hooks/image-noexif.py | ||||
|                            ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | ||||
|  | ||||
| explained: | ||||
|     share fs-path srv/inc at /inc (readable by all, read-write for user ed) | ||||
|     running this xau (execute-after-upload) plugin for all uploaded files | ||||
| """ | ||||
|  | ||||
|  | ||||
| # filetypes to process; ignores everything else | ||||
| EXTS = ("jpg", "jpeg", "avif", "heif", "heic") | ||||
|  | ||||
|  | ||||
| try: | ||||
|     from copyparty.util import fsenc | ||||
| except: | ||||
|  | ||||
|     def fsenc(p): | ||||
|         return p.encode("utf-8") | ||||
|  | ||||
|  | ||||
| def main(): | ||||
|     fp = sys.argv[1] | ||||
|     ext = fp.lower().split(".")[-1] | ||||
|     if ext not in EXTS: | ||||
|         return | ||||
|  | ||||
|     cwd, fn = os.path.split(fp) | ||||
|     os.chdir(cwd) | ||||
|     f1 = fsenc(fn) | ||||
|     cmd = [ | ||||
|         b"exiftool", | ||||
|         b"-exif:all=", | ||||
|         b"-iptc:all=", | ||||
|         b"-xmp:all=", | ||||
|         b"-P", | ||||
|         b"-overwrite_original", | ||||
|         b"--", | ||||
|         f1, | ||||
|     ] | ||||
|     sp.check_output(cmd) | ||||
|     print("image-noexif: stripped") | ||||
|  | ||||
|  | ||||
| if __name__ == "__main__": | ||||
|     try: | ||||
|         main() | ||||
|     except: | ||||
|         pass | ||||
							
								
								
									
										66
									
								
								bin/hooks/notify.py
									
									
									
									
									
										Executable file
									
								
							
							
						
						
									
										66
									
								
								bin/hooks/notify.py
									
									
									
									
									
										Executable file
									
								
							| @@ -0,0 +1,66 @@ | ||||
| #!/usr/bin/env python3 | ||||
|  | ||||
| import os | ||||
| import sys | ||||
| import subprocess as sp | ||||
| from plyer import notification | ||||
|  | ||||
|  | ||||
| _ = r""" | ||||
| show os notification on upload; works on windows, linux, macos, android | ||||
|  | ||||
| depdencies: | ||||
|     windows: python3 -m pip install --user -U plyer | ||||
|     linux:   python3 -m pip install --user -U plyer | ||||
|     macos:   python3 -m pip install --user -U plyer pyobjus | ||||
|     android: just termux and termux-api | ||||
|  | ||||
| example usages; either as global config (all volumes) or as volflag: | ||||
|     --xau f,bin/hooks/notify.py | ||||
|     -v srv/inc:inc:r:rw,ed:c,xau=f,bin/hooks/notify.py | ||||
|                            ^^^^^^^^^^^^^^^^^^^^^^^^^^^ | ||||
|  | ||||
|     (share filesystem-path srv/inc as volume /inc, | ||||
|      readable by everyone, read-write for user 'ed', | ||||
|      running this plugin on all uploads with the params listed below) | ||||
|  | ||||
| parameters explained, | ||||
|     xau = execute after upload | ||||
|     f   = fork so it doesn't block uploads | ||||
| """ | ||||
|  | ||||
|  | ||||
| try: | ||||
|     from copyparty.util import humansize | ||||
| except: | ||||
|  | ||||
|     def humansize(n): | ||||
|         return n | ||||
|  | ||||
|  | ||||
| def main(): | ||||
|     fp = sys.argv[1] | ||||
|     dp, fn = os.path.split(fp) | ||||
|     try: | ||||
|         sz = humansize(os.path.getsize(fp)) | ||||
|     except: | ||||
|         sz = "?" | ||||
|  | ||||
|     msg = "{} ({})\n📁 {}".format(fn, sz, dp) | ||||
|     title = "File received" | ||||
|  | ||||
|     if "com.termux" in sys.executable: | ||||
|         sp.run(["termux-notification", "-t", title, "-c", msg]) | ||||
|         return | ||||
|  | ||||
|     icon = "emblem-documents-symbolic" if sys.platform == "linux" else "" | ||||
|     notification.notify( | ||||
|         title=title, | ||||
|         message=msg, | ||||
|         app_icon=icon, | ||||
|         timeout=10, | ||||
|     ) | ||||
|  | ||||
|  | ||||
| if __name__ == "__main__": | ||||
|     main() | ||||
							
								
								
									
										72
									
								
								bin/hooks/notify2.py
									
									
									
									
									
										Executable file
									
								
							
							
						
						
									
										72
									
								
								bin/hooks/notify2.py
									
									
									
									
									
										Executable file
									
								
							| @@ -0,0 +1,72 @@ | ||||
| #!/usr/bin/env python3 | ||||
|  | ||||
| import json | ||||
| import os | ||||
| import sys | ||||
| import subprocess as sp | ||||
| from datetime import datetime | ||||
| from plyer import notification | ||||
|  | ||||
|  | ||||
| _ = r""" | ||||
| same as notify.py but with additional info (uploader, ...) | ||||
| and also supports --xm (notify on 📟 message) | ||||
|  | ||||
| example usages; either as global config (all volumes) or as volflag: | ||||
|     --xm  f,j,bin/hooks/notify2.py | ||||
|     --xau f,j,bin/hooks/notify2.py | ||||
|     -v srv/inc:inc:r:rw,ed:c,xm=f,j,bin/hooks/notify2.py | ||||
|     -v srv/inc:inc:r:rw,ed:c,xau=f,j,bin/hooks/notify2.py | ||||
|                            ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | ||||
|  | ||||
|     (share filesystem-path srv/inc as volume /inc, | ||||
|      readable by everyone, read-write for user 'ed', | ||||
|      running this plugin on all uploads / msgs with the params listed below) | ||||
|  | ||||
| parameters explained, | ||||
|     xau = execute after upload | ||||
|     f   = fork so it doesn't block uploads | ||||
|     j   = provide json instead of filepath list | ||||
| """ | ||||
|  | ||||
|  | ||||
| try: | ||||
|     from copyparty.util import humansize | ||||
| except: | ||||
|  | ||||
|     def humansize(n): | ||||
|         return n | ||||
|  | ||||
|  | ||||
| def main(): | ||||
|     inf = json.loads(sys.argv[1]) | ||||
|     fp = inf["ap"] | ||||
|     sz = humansize(inf["sz"]) | ||||
|     dp, fn = os.path.split(fp) | ||||
|     mt = datetime.utcfromtimestamp(inf["mt"]).strftime("%Y-%m-%d %H:%M:%S") | ||||
|  | ||||
|     msg = f"{fn} ({sz})\n📁 {dp}" | ||||
|     title = "File received" | ||||
|     icon = "emblem-documents-symbolic" if sys.platform == "linux" else "" | ||||
|  | ||||
|     if inf.get("txt"): | ||||
|         msg = inf["txt"] | ||||
|         title = "Message received" | ||||
|         icon = "mail-unread-symbolic" if sys.platform == "linux" else "" | ||||
|  | ||||
|     msg += f"\n👤 {inf['user']} ({inf['ip']})\n🕒 {mt}" | ||||
|  | ||||
|     if "com.termux" in sys.executable: | ||||
|         sp.run(["termux-notification", "-t", title, "-c", msg]) | ||||
|         return | ||||
|  | ||||
|     notification.notify( | ||||
|         title=title, | ||||
|         message=msg, | ||||
|         app_icon=icon, | ||||
|         timeout=10, | ||||
|     ) | ||||
|  | ||||
|  | ||||
| if __name__ == "__main__": | ||||
|     main() | ||||
							
								
								
									
										35
									
								
								bin/hooks/reject-extension.py
									
									
									
									
									
										Executable file
									
								
							
							
						
						
									
										35
									
								
								bin/hooks/reject-extension.py
									
									
									
									
									
										Executable file
									
								
							| @@ -0,0 +1,35 @@ | ||||
| #!/usr/bin/env python3 | ||||
|  | ||||
| import sys | ||||
|  | ||||
|  | ||||
| _ = r""" | ||||
| reject file uploads by file extension | ||||
|  | ||||
| example usage as global config: | ||||
|     --xbu c,bin/hooks/reject-extension.py | ||||
|  | ||||
| example usage as a volflag (per-volume config): | ||||
|     -v srv/inc:inc:r:rw,ed:c,xbu=c,bin/hooks/reject-extension.py | ||||
|                            ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | ||||
|  | ||||
|     (share filesystem-path srv/inc as volume /inc, | ||||
|      readable by everyone, read-write for user 'ed', | ||||
|      running this plugin on all uploads with the params listed below) | ||||
|  | ||||
| parameters explained, | ||||
|     xbu = execute before upload | ||||
|     c   = check result, reject upload if error | ||||
| """ | ||||
|  | ||||
|  | ||||
| def main(): | ||||
|     bad = "exe scr com pif bat ps1 jar msi" | ||||
|  | ||||
|     ext = sys.argv[1].split(".")[-1] | ||||
|  | ||||
|     sys.exit(1 if ext in bad.split() else 0) | ||||
|  | ||||
|  | ||||
| if __name__ == "__main__": | ||||
|     main() | ||||
							
								
								
									
										44
									
								
								bin/hooks/reject-mimetype.py
									
									
									
									
									
										Executable file
									
								
							
							
						
						
									
										44
									
								
								bin/hooks/reject-mimetype.py
									
									
									
									
									
										Executable file
									
								
							| @@ -0,0 +1,44 @@ | ||||
| #!/usr/bin/env python3 | ||||
|  | ||||
| import sys | ||||
| import magic | ||||
|  | ||||
|  | ||||
| _ = r""" | ||||
| reject file uploads by mimetype | ||||
|  | ||||
| dependencies (linux, macos): | ||||
|     python3 -m pip install --user -U python-magic | ||||
|  | ||||
| dependencies (windows): | ||||
|     python3 -m pip install --user -U python-magic-bin | ||||
|  | ||||
| example usage as global config: | ||||
|     --xau c,bin/hooks/reject-mimetype.py | ||||
|  | ||||
| example usage as a volflag (per-volume config): | ||||
|     -v srv/inc:inc:r:rw,ed:c,xau=c,bin/hooks/reject-mimetype.py | ||||
|                            ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | ||||
|  | ||||
|     (share filesystem-path srv/inc as volume /inc, | ||||
|      readable by everyone, read-write for user 'ed', | ||||
|      running this plugin on all uploads with the params listed below) | ||||
|  | ||||
| parameters explained, | ||||
|     xau = execute after upload | ||||
|     c   = check result, reject upload if error | ||||
| """ | ||||
|  | ||||
|  | ||||
| def main(): | ||||
|     ok = ["image/jpeg", "image/png"] | ||||
|  | ||||
|     mt = magic.from_file(sys.argv[1], mime=True) | ||||
|  | ||||
|     print(mt) | ||||
|  | ||||
|     sys.exit(1 if mt not in ok else 0) | ||||
|  | ||||
|  | ||||
| if __name__ == "__main__": | ||||
|     main() | ||||
							
								
								
									
										64
									
								
								bin/hooks/wget.py
									
									
									
									
									
										Executable file
									
								
							
							
						
						
									
										64
									
								
								bin/hooks/wget.py
									
									
									
									
									
										Executable file
									
								
							| @@ -0,0 +1,64 @@ | ||||
| #!/usr/bin/env python3 | ||||
|  | ||||
| import os | ||||
| import sys | ||||
| import json | ||||
| import subprocess as sp | ||||
|  | ||||
|  | ||||
| _ = r""" | ||||
| use copyparty as a file downloader by POSTing URLs as | ||||
| application/x-www-form-urlencoded (for example using the | ||||
| message/pager function on the website) | ||||
|  | ||||
| example usage as global config: | ||||
|     --xm f,j,t3600,bin/hooks/wget.py | ||||
|  | ||||
| example usage as a volflag (per-volume config): | ||||
|     -v srv/inc:inc:r:rw,ed:c,xm=f,j,t3600,bin/hooks/wget.py | ||||
|                            ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | ||||
|  | ||||
|     (share filesystem-path srv/inc as volume /inc, | ||||
|      readable by everyone, read-write for user 'ed', | ||||
|      running this plugin on all messages with the params listed below) | ||||
|  | ||||
| parameters explained, | ||||
|     xm = execute on message-to-server-log | ||||
|     f = fork so it doesn't block uploads | ||||
|     j = provide message information as json; not just the text | ||||
|     c3 = mute all output | ||||
|     t3600 = timeout and kill download after 1 hour | ||||
| """ | ||||
|  | ||||
|  | ||||
| def main(): | ||||
|     inf = json.loads(sys.argv[1]) | ||||
|     url = inf["txt"] | ||||
|     if "://" not in url: | ||||
|         url = "https://" + url | ||||
|  | ||||
|     proto = url.split("://")[0].lower() | ||||
|     if proto not in ("http", "https", "ftp", "ftps"): | ||||
|         raise Exception("bad proto {}".format(proto)) | ||||
|  | ||||
|     os.chdir(inf["ap"]) | ||||
|  | ||||
|     name = url.split("?")[0].split("/")[-1] | ||||
|     tfn = "-- DOWNLOADING " + name | ||||
|     print(f"{tfn}\n", end="") | ||||
|     open(tfn, "wb").close() | ||||
|  | ||||
|     cmd = ["wget", "--trust-server-names", "-nv", "--", url] | ||||
|  | ||||
|     try: | ||||
|         sp.check_call(cmd) | ||||
|     except: | ||||
|         t = "-- FAILED TO DONWLOAD " + name | ||||
|         print(f"{t}\n", end="") | ||||
|         open(t, "wb").close() | ||||
|  | ||||
|     os.unlink(tfn) | ||||
|  | ||||
|  | ||||
| if __name__ == "__main__": | ||||
|     main() | ||||
							
								
								
									
										108
									
								
								bin/hooks/xiu-sha.py
									
									
									
									
									
										Executable file
									
								
							
							
						
						
									
										108
									
								
								bin/hooks/xiu-sha.py
									
									
									
									
									
										Executable file
									
								
							| @@ -0,0 +1,108 @@ | ||||
| #!/usr/bin/env python3 | ||||
|  | ||||
| import hashlib | ||||
| import json | ||||
| import sys | ||||
| from datetime import datetime | ||||
|  | ||||
|  | ||||
| _ = r""" | ||||
| this hook will produce a single sha512 file which | ||||
| covers all recent uploads (plus metadata comments) | ||||
|  | ||||
| use this with --xiu, which makes copyparty buffer | ||||
| uploads until server is idle, providing file infos | ||||
| on stdin (filepaths or json) | ||||
|  | ||||
| example usage as global config: | ||||
|     --xiu i5,j,bin/hooks/xiu-sha.py | ||||
|  | ||||
| example usage as a volflag (per-volume config): | ||||
|     -v srv/inc:inc:r:rw,ed:c,xiu=i5,j,bin/hooks/xiu-sha.py | ||||
|                            ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | ||||
|  | ||||
|     (share filesystem-path srv/inc as volume /inc, | ||||
|      readable by everyone, read-write for user 'ed', | ||||
|      running this plugin on batches of uploads with the params listed below) | ||||
|  | ||||
| parameters explained, | ||||
|     xiu = execute after uploads... | ||||
|     i5  = ...after volume has been idle for 5sec | ||||
|     j   = provide json instead of filepath list | ||||
|  | ||||
| note the "f" (fork) flag is not set, so this xiu | ||||
| will block other xiu hooks while it's running | ||||
| """ | ||||
|  | ||||
|  | ||||
| try: | ||||
|     from copyparty.util import fsenc | ||||
| except: | ||||
|  | ||||
|     def fsenc(p): | ||||
|         return p | ||||
|  | ||||
|  | ||||
| def humantime(ts): | ||||
|     return datetime.utcfromtimestamp(ts).strftime("%Y-%m-%d %H:%M:%S") | ||||
|  | ||||
|  | ||||
| def find_files_root(inf): | ||||
|     di = 9000 | ||||
|     for f1, f2 in zip(inf, inf[1:]): | ||||
|         p1 = f1["ap"].replace("\\", "/").rsplit("/", 1)[0] | ||||
|         p2 = f2["ap"].replace("\\", "/").rsplit("/", 1)[0] | ||||
|         di = min(len(p1), len(p2), di) | ||||
|         di = next((i for i in range(di) if p1[i] != p2[i]), di) | ||||
|  | ||||
|     return di + 1 | ||||
|  | ||||
|  | ||||
| def find_vol_root(inf): | ||||
|     return len(inf[0]["ap"][: -len(inf[0]["vp"])]) | ||||
|  | ||||
|  | ||||
| def main(): | ||||
|     zb = sys.stdin.buffer.read() | ||||
|     zs = zb.decode("utf-8", "replace") | ||||
|     inf = json.loads(zs) | ||||
|  | ||||
|     # root directory (where to put the sha512 file); | ||||
|     # di = find_files_root(inf)  # next to the file closest to volume root | ||||
|     di = find_vol_root(inf)  # top of the entire volume | ||||
|  | ||||
|     ret = [] | ||||
|     total_sz = 0 | ||||
|     for md in inf: | ||||
|         ap = md["ap"] | ||||
|         rp = ap[di:] | ||||
|         total_sz += md["sz"] | ||||
|         fsize = "{:,}".format(md["sz"]) | ||||
|         mtime = humantime(md["mt"]) | ||||
|         up_ts = humantime(md["at"]) | ||||
|  | ||||
|         h = hashlib.sha512() | ||||
|         with open(fsenc(md["ap"]), "rb", 512 * 1024) as f: | ||||
|             while True: | ||||
|                 buf = f.read(512 * 1024) | ||||
|                 if not buf: | ||||
|                     break | ||||
|  | ||||
|                 h.update(buf) | ||||
|  | ||||
|         cksum = h.hexdigest() | ||||
|         meta = " | ".join([md["wark"], up_ts, mtime, fsize, md["ip"]]) | ||||
|         ret.append("# {}\n{} *{}".format(meta, cksum, rp)) | ||||
|  | ||||
|     ret.append("# {} files, {} bytes total".format(len(inf), total_sz)) | ||||
|     ret.append("") | ||||
|     ftime = datetime.utcnow().strftime("%Y-%m%d-%H%M%S.%f") | ||||
|     fp = "{}xfer-{}.sha512".format(inf[0]["ap"][:di], ftime) | ||||
|     with open(fsenc(fp), "wb") as f: | ||||
|         f.write("\n".join(ret).encode("utf-8", "replace")) | ||||
|  | ||||
|     print("wrote checksums to {}".format(fp)) | ||||
|  | ||||
|  | ||||
| if __name__ == "__main__": | ||||
|     main() | ||||
							
								
								
									
										50
									
								
								bin/hooks/xiu.py
									
									
									
									
									
										Executable file
									
								
							
							
						
						
									
										50
									
								
								bin/hooks/xiu.py
									
									
									
									
									
										Executable file
									
								
							| @@ -0,0 +1,50 @@ | ||||
| #!/usr/bin/env python3 | ||||
|  | ||||
| import json | ||||
| import sys | ||||
|  | ||||
|  | ||||
| _ = r""" | ||||
| this hook prints absolute filepaths + total size | ||||
|  | ||||
| use this with --xiu, which makes copyparty buffer | ||||
| uploads until server is idle, providing file infos | ||||
| on stdin (filepaths or json) | ||||
|  | ||||
| example usage as global config: | ||||
|     --xiu i1,j,bin/hooks/xiu.py | ||||
|  | ||||
| example usage as a volflag (per-volume config): | ||||
|     -v srv/inc:inc:r:rw,ed:c,xiu=i1,j,bin/hooks/xiu.py | ||||
|                            ^^^^^^^^^^^^^^^^^^^^^^^^^^^ | ||||
|  | ||||
|     (share filesystem-path srv/inc as volume /inc, | ||||
|      readable by everyone, read-write for user 'ed', | ||||
|      running this plugin on batches of uploads with the params listed below) | ||||
|  | ||||
| parameters explained, | ||||
|     xiu = execute after uploads... | ||||
|     i1  = ...after volume has been idle for 1sec | ||||
|     j   = provide json instead of filepath list | ||||
|  | ||||
| note the "f" (fork) flag is not set, so this xiu | ||||
| will block other xiu hooks while it's running | ||||
| """ | ||||
|  | ||||
|  | ||||
| def main(): | ||||
|     zb = sys.stdin.buffer.read() | ||||
|     zs = zb.decode("utf-8", "replace") | ||||
|     inf = json.loads(zs) | ||||
|  | ||||
|     total_sz = 0 | ||||
|     for upload in inf: | ||||
|         sz = upload["sz"] | ||||
|         total_sz += sz | ||||
|         print("{:9} {}".format(sz, upload["ap"])) | ||||
|  | ||||
|     print("{} files, {} bytes total".format(len(inf), total_sz)) | ||||
|  | ||||
|  | ||||
| if __name__ == "__main__": | ||||
|     main() | ||||
| @@ -1,5 +1,9 @@ | ||||
| standalone programs which take an audio file as argument | ||||
|  | ||||
| you may want to forget about all this fancy complicated stuff and just use [event hooks](../hooks/) instead (which doesn't need `-e2ts` or ffmpeg)  | ||||
|  | ||||
| ---- | ||||
|  | ||||
| **NOTE:** these all require `-e2ts` to be functional, meaning you need to do at least one of these: `apt install ffmpeg` or `pip3 install mutagen` | ||||
|  | ||||
| some of these rely on libraries which are not MIT-compatible | ||||
| @@ -17,6 +21,16 @@ these do not have any problematic dependencies at all: | ||||
| * [cksum.py](./cksum.py) computes various checksums | ||||
| * [exe.py](./exe.py) grabs metadata from .exe and .dll files (example for retrieving multiple tags with one parser) | ||||
| * [wget.py](./wget.py) lets you download files by POSTing URLs to copyparty | ||||
|   * also available as an [event hook](../hooks/wget.py) | ||||
|  | ||||
|  | ||||
| ## dangerous plugins | ||||
|  | ||||
| plugins in this section should only be used with appropriate precautions: | ||||
|  | ||||
| * [very-bad-idea.py](./very-bad-idea.py) combined with [meadup.js](https://github.com/9001/copyparty/blob/hovudstraum/contrib/plugins/meadup.js) converts copyparty into a janky yet extremely flexible chromecast clone | ||||
|   * also adds a virtual keyboard by @steinuil to the basic-upload tab for comfy couch crowd control | ||||
|   * anything uploaded through the [android app](https://github.com/9001/party-up) (files or links) are executed on the server, meaning anyone can infect your PC with malware... so protect this with a password and keep it on a LAN! | ||||
|  | ||||
|  | ||||
| # dependencies | ||||
| @@ -26,7 +40,7 @@ run [`install-deps.sh`](install-deps.sh) to build/install most dependencies requ | ||||
| *alternatively* (or preferably) use packages from your distro instead, then you'll need at least these: | ||||
|  | ||||
| * from distro: `numpy vamp-plugin-sdk beatroot-vamp mixxx-keyfinder ffmpeg` | ||||
| * from pypy: `keyfinder vamp` | ||||
| * from pip: `keyfinder vamp` | ||||
|  | ||||
|  | ||||
| # usage from copyparty | ||||
| @@ -42,7 +56,7 @@ run [`install-deps.sh`](install-deps.sh) to build/install most dependencies requ | ||||
| * `mtp` modules will not run if a file has existing tags in the db, so clear out the tags with `-e2tsr` the first time you launch with new `mtp` options | ||||
|  | ||||
|  | ||||
| ## usage with volume-flags | ||||
| ## usage with volflags | ||||
|  | ||||
| instead of affecting all volumes, you can set the options for just one volume like so: | ||||
|  | ||||
|   | ||||
| @@ -16,6 +16,10 @@ dep: ffmpeg | ||||
| """ | ||||
|  | ||||
|  | ||||
| # save beat timestamps to ".beats/filename.txt" | ||||
| SAVE = False | ||||
|  | ||||
|  | ||||
| def det(tf): | ||||
|     # fmt: off | ||||
|     sp.check_call([ | ||||
| @@ -23,12 +27,11 @@ def det(tf): | ||||
|         b"-nostdin", | ||||
|         b"-hide_banner", | ||||
|         b"-v", b"fatal", | ||||
|         b"-ss", b"13", | ||||
|         b"-y", b"-i", fsenc(sys.argv[1]), | ||||
|         b"-map", b"0:a:0", | ||||
|         b"-ac", b"1", | ||||
|         b"-ar", b"22050", | ||||
|         b"-t", b"300", | ||||
|         b"-t", b"360", | ||||
|         b"-f", b"f32le", | ||||
|         fsenc(tf) | ||||
|     ]) | ||||
| @@ -47,10 +50,29 @@ def det(tf): | ||||
|             print(c["list"][0]["label"].split(" ")[0]) | ||||
|             return | ||||
|  | ||||
|         # throws if detection failed: | ||||
|         bpm = float(cl[-1]["timestamp"] - cl[1]["timestamp"]) | ||||
|         bpm = round(60 * ((len(cl) - 1) / bpm), 2) | ||||
|         print(f"{bpm:.2f}") | ||||
|     # throws if detection failed: | ||||
|     beats = [float(x["timestamp"]) for x in cl] | ||||
|     bds = [b - a for a, b in zip(beats, beats[1:])] | ||||
|     bds.sort() | ||||
|     n0 = int(len(bds) * 0.2) | ||||
|     n1 = int(len(bds) * 0.75) + 1 | ||||
|     bds = bds[n0:n1] | ||||
|     bpm = sum(bds) | ||||
|     bpm = round(60 * (len(bds) / bpm), 2) | ||||
|     print(f"{bpm:.2f}") | ||||
|  | ||||
|     if SAVE: | ||||
|         fdir, fname = os.path.split(sys.argv[1]) | ||||
|         bdir = os.path.join(fdir, ".beats") | ||||
|         try: | ||||
|             os.mkdir(fsenc(bdir)) | ||||
|         except: | ||||
|             pass | ||||
|  | ||||
|         fp = os.path.join(bdir, fname) + ".txt" | ||||
|         with open(fsenc(fp), "wb") as f: | ||||
|             txt = "\n".join([f"{x:.2f}" for x in beats]) | ||||
|             f.write(txt.encode("utf-8")) | ||||
|  | ||||
|  | ||||
| def main(): | ||||
|   | ||||
| @@ -17,7 +17,7 @@ except: | ||||
|  | ||||
| """ | ||||
| calculates various checksums for uploads, | ||||
| usage: -mtp crc32,md5,sha1,sha256b=bin/mtag/cksum.py | ||||
| usage: -mtp crc32,md5,sha1,sha256b=ad,bin/mtag/cksum.py | ||||
| """ | ||||
|  | ||||
|  | ||||
|   | ||||
							
								
								
									
										61
									
								
								bin/mtag/guestbook-read.py
									
									
									
									
									
										Executable file
									
								
							
							
						
						
									
										61
									
								
								bin/mtag/guestbook-read.py
									
									
									
									
									
										Executable file
									
								
							| @@ -0,0 +1,61 @@ | ||||
| #!/usr/bin/env python3 | ||||
|  | ||||
| """ | ||||
| fetch latest msg from guestbook and return as tag | ||||
|  | ||||
| example copyparty config to use this: | ||||
|   --urlform save,get -vsrv/hello:hello:w:c,e2ts,mtp=guestbook=t10,ad,p,bin/mtag/guestbook-read.py:mte=+guestbook | ||||
|  | ||||
| explained: | ||||
|   for realpath srv/hello (served at /hello), write-only for eveyrone, | ||||
|   enable file analysis on upload (e2ts), | ||||
|   use mtp plugin "bin/mtag/guestbook-read.py" to provide metadata tag "guestbook", | ||||
|   do this on all uploads regardless of extension, | ||||
|   t10 = 10 seconds timeout for each dwonload, | ||||
|   ad = parse file regardless if FFmpeg thinks it is audio or not | ||||
|   p = request upload info as json on stdin (need ip) | ||||
|   mte=+guestbook enabled indexing of that tag for this volume | ||||
|  | ||||
| PS: this requires e2ts to be functional, | ||||
|   meaning you need to do at least one of these: | ||||
|    * apt install ffmpeg | ||||
|    * pip3 install mutagen | ||||
| """ | ||||
|  | ||||
|  | ||||
| import json | ||||
| import os | ||||
| import sqlite3 | ||||
| import sys | ||||
|  | ||||
|  | ||||
| # set 0 to allow infinite msgs from one IP, | ||||
| # other values delete older messages to make space, | ||||
| # so 1 only keeps latest msg | ||||
| NUM_MSGS_TO_KEEP = 1 | ||||
|  | ||||
|  | ||||
| def main(): | ||||
|     fp = os.path.abspath(sys.argv[1]) | ||||
|     fdir = os.path.dirname(fp) | ||||
|  | ||||
|     zb = sys.stdin.buffer.read() | ||||
|     zs = zb.decode("utf-8", "replace") | ||||
|     md = json.loads(zs) | ||||
|  | ||||
|     ip = md["up_ip"] | ||||
|  | ||||
|     # can put the database inside `fdir` if you'd like, | ||||
|     # by default it saves to PWD: | ||||
|     # os.chdir(fdir) | ||||
|  | ||||
|     db = sqlite3.connect("guestbook.db3") | ||||
|     with db: | ||||
|         t = "select msg from gb where ip = ? order by ts desc" | ||||
|         r = db.execute(t, (ip,)).fetchone() | ||||
|         if r: | ||||
|             print(r[0]) | ||||
|  | ||||
|  | ||||
| if __name__ == "__main__": | ||||
|     main() | ||||
							
								
								
									
										111
									
								
								bin/mtag/guestbook.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										111
									
								
								bin/mtag/guestbook.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,111 @@ | ||||
| #!/usr/bin/env python3 | ||||
|  | ||||
| """ | ||||
| store messages from users in an sqlite database | ||||
| which can be read from another mtp for example | ||||
|  | ||||
| takes input from application/x-www-form-urlencoded POSTs, | ||||
| for example using the message/pager function on the website | ||||
|  | ||||
| example copyparty config to use this: | ||||
|   --urlform save,get -vsrv/hello:hello:w:c,e2ts,mtp=xgb=ebin,t10,ad,p,bin/mtag/guestbook.py:mte=+xgb | ||||
|  | ||||
| explained: | ||||
|   for realpath srv/hello (served at /hello),write-only for eveyrone, | ||||
|   enable file analysis on upload (e2ts), | ||||
|   use mtp plugin "bin/mtag/guestbook.py" to provide metadata tag "xgb", | ||||
|   do this on all uploads with the file extension "bin", | ||||
|   t300 = 300 seconds timeout for each dwonload, | ||||
|   ad = parse file regardless if FFmpeg thinks it is audio or not | ||||
|   p = request upload info as json on stdin | ||||
|   mte=+xgb enabled indexing of that tag for this volume | ||||
|  | ||||
| PS: this requires e2ts to be functional, | ||||
|   meaning you need to do at least one of these: | ||||
|    * apt install ffmpeg | ||||
|    * pip3 install mutagen | ||||
| """ | ||||
|  | ||||
|  | ||||
| import json | ||||
| import os | ||||
| import sqlite3 | ||||
| import sys | ||||
| from urllib.parse import unquote_to_bytes as unquote | ||||
|  | ||||
|  | ||||
| # set 0 to allow infinite msgs from one IP, | ||||
| # other values delete older messages to make space, | ||||
| # so 1 only keeps latest msg | ||||
| NUM_MSGS_TO_KEEP = 1 | ||||
|  | ||||
|  | ||||
| def main(): | ||||
|     fp = os.path.abspath(sys.argv[1]) | ||||
|     fdir = os.path.dirname(fp) | ||||
|     fname = os.path.basename(fp) | ||||
|     if not fname.startswith("put-") or not fname.endswith(".bin"): | ||||
|         raise Exception("not a post file") | ||||
|  | ||||
|     zb = sys.stdin.buffer.read() | ||||
|     zs = zb.decode("utf-8", "replace") | ||||
|     md = json.loads(zs) | ||||
|  | ||||
|     buf = b"" | ||||
|     with open(fp, "rb") as f: | ||||
|         while True: | ||||
|             b = f.read(4096) | ||||
|             buf += b | ||||
|             if len(buf) > 4096: | ||||
|                 raise Exception("too big") | ||||
|  | ||||
|             if not b: | ||||
|                 break | ||||
|  | ||||
|     if not buf: | ||||
|         raise Exception("file is empty") | ||||
|  | ||||
|     buf = unquote(buf.replace(b"+", b" ")) | ||||
|     txt = buf.decode("utf-8") | ||||
|  | ||||
|     if not txt.startswith("msg="): | ||||
|         raise Exception("does not start with msg=") | ||||
|  | ||||
|     ip = md["up_ip"] | ||||
|     ts = md["up_at"] | ||||
|     txt = txt[4:] | ||||
|  | ||||
|     # can put the database inside `fdir` if you'd like, | ||||
|     # by default it saves to PWD: | ||||
|     # os.chdir(fdir) | ||||
|  | ||||
|     db = sqlite3.connect("guestbook.db3") | ||||
|     try: | ||||
|         db.execute("select 1 from gb").fetchone() | ||||
|     except: | ||||
|         with db: | ||||
|             db.execute("create table gb (ip text, ts real, msg text)") | ||||
|             db.execute("create index gb_ip on gb(ip)") | ||||
|  | ||||
|     with db: | ||||
|         if NUM_MSGS_TO_KEEP == 1: | ||||
|             t = "delete from gb where ip = ?" | ||||
|             db.execute(t, (ip,)) | ||||
|  | ||||
|         t = "insert into gb values (?,?,?)" | ||||
|         db.execute(t, (ip, ts, txt)) | ||||
|  | ||||
|         if NUM_MSGS_TO_KEEP > 1: | ||||
|             t = "select ts from gb where ip = ? order by ts desc" | ||||
|             hits = db.execute(t, (ip,)).fetchall() | ||||
|  | ||||
|             if len(hits) > NUM_MSGS_TO_KEEP: | ||||
|                 lim = hits[NUM_MSGS_TO_KEEP][0] | ||||
|                 t = "delete from gb where ip = ? and ts <= ?" | ||||
|                 db.execute(t, (ip, lim)) | ||||
|  | ||||
|     print(txt) | ||||
|  | ||||
|  | ||||
| if __name__ == "__main__": | ||||
|     main() | ||||
| @@ -43,7 +43,6 @@ PS: this requires e2ts to be functional, | ||||
|  | ||||
| import os | ||||
| import sys | ||||
| import time | ||||
| import filecmp | ||||
| import subprocess as sp | ||||
|  | ||||
| @@ -62,7 +61,7 @@ def main(): | ||||
|  | ||||
|     os.chdir(cwd) | ||||
|     f1 = fsenc(fn) | ||||
|     f2 = os.path.join(b"noexif", f1) | ||||
|     f2 = fsenc(os.path.join(b"noexif", fn)) | ||||
|     cmd = [ | ||||
|         b"exiftool", | ||||
|         b"-exif:all=", | ||||
| @@ -90,4 +89,7 @@ def main(): | ||||
|  | ||||
|  | ||||
| if __name__ == "__main__": | ||||
|     main() | ||||
|     try: | ||||
|         main() | ||||
|     except: | ||||
|         pass | ||||
|   | ||||
| @@ -6,6 +6,7 @@ set -e | ||||
| # | ||||
| # linux/alpine: requires gcc g++ make cmake patchelf {python3,ffmpeg,fftw,libsndfile}-dev py3-{wheel,pip} py3-numpy{,-dev} | ||||
| # linux/debian: requires libav{codec,device,filter,format,resample,util}-dev {libfftw3,python3,libsndfile1}-dev python3-{numpy,pip} vamp-{plugin-sdk,examples} patchelf cmake | ||||
| # linux/fedora: requires gcc gcc-c++ make cmake patchelf {python3,ffmpeg,fftw,libsndfile}-devel python3-numpy vamp-plugin-sdk qm-vamp-plugins | ||||
| # win64: requires msys2-mingw64 environment | ||||
| # macos: requires macports | ||||
| # | ||||
| @@ -56,6 +57,7 @@ hash -r | ||||
| 	command -v python3 && pybin=python3 || pybin=python | ||||
| } | ||||
|  | ||||
| $pybin -c 'import numpy' || | ||||
| $pybin -m pip install --user numpy | ||||
|  | ||||
|  | ||||
| @@ -160,12 +162,12 @@ install_keyfinder() { | ||||
| 	 | ||||
| 	h="$HOME" | ||||
| 	so="lib/libkeyfinder.so" | ||||
| 	memes=() | ||||
| 	memes=(-DBUILD_TESTING=OFF) | ||||
|  | ||||
| 	[ $win ] && | ||||
| 		so="bin/libkeyfinder.dll" && | ||||
| 		h="$(printf '%s\n' "$USERPROFILE" | tr '\\' '/')" && | ||||
| 		memes+=(-G "MinGW Makefiles" -DBUILD_TESTING=OFF) | ||||
| 		memes+=(-G "MinGW Makefiles") | ||||
| 	 | ||||
| 	[ $mac ] && | ||||
| 		so="lib/libkeyfinder.dylib" | ||||
| @@ -185,7 +187,7 @@ install_keyfinder() { | ||||
| 	} | ||||
| 	 | ||||
| 	# rm -rf /Users/ed/Library/Python/3.9/lib/python/site-packages/*keyfinder* | ||||
| 	CFLAGS="-I$h/pe/keyfinder/include -I/opt/local/include" \ | ||||
| 	CFLAGS="-I$h/pe/keyfinder/include -I/opt/local/include -I/usr/include/ffmpeg" \ | ||||
| 	LDFLAGS="-L$h/pe/keyfinder/lib -L$h/pe/keyfinder/lib64 -L/opt/local/lib" \ | ||||
| 	PKG_CONFIG_PATH=/c/msys64/mingw64/lib/pkgconfig \ | ||||
| 	$pybin -m pip install --user keyfinder | ||||
| @@ -223,7 +225,7 @@ install_vamp() { | ||||
| 	$pybin -m pip install --user vamp | ||||
|  | ||||
| 	cd "$td" | ||||
| 	echo '#include <vamp-sdk/Plugin.h>' | gcc -x c -c -o /dev/null - || [ -e ~/pe/vamp-sdk ] || { | ||||
| 	echo '#include <vamp-sdk/Plugin.h>' | g++ -x c++ -c -o /dev/null - || [ -e ~/pe/vamp-sdk ] || { | ||||
| 		printf '\033[33mcould not find the vamp-sdk, building from source\033[0m\n' | ||||
| 		(dl_files yolo https://code.soundsoftware.ac.uk/attachments/download/2588/vamp-plugin-sdk-2.9.0.tar.gz) | ||||
| 		sha512sum -c <( | ||||
|   | ||||
							
								
								
									
										38
									
								
								bin/mtag/mousepad.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										38
									
								
								bin/mtag/mousepad.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,38 @@ | ||||
| #!/usr/bin/env python3 | ||||
|  | ||||
| import os | ||||
| import sys | ||||
| import subprocess as sp | ||||
|  | ||||
|  | ||||
| """ | ||||
| mtp test -- opens a texteditor | ||||
|  | ||||
| usage: | ||||
|   -vsrv/v1:v1:r:c,mte=+x1:c,mtp=x1=ad,p,bin/mtag/mousepad.py | ||||
|  | ||||
| explained: | ||||
|   c,mte: list of tags to index in this volume | ||||
|   c,mtp: add new tag provider | ||||
|      x1: dummy tag to provide | ||||
|      ad: dontcare if audio or not | ||||
|       p: priority 1 (run after initial tag-scan with ffprobe or mutagen) | ||||
| """ | ||||
|  | ||||
|  | ||||
| def main(): | ||||
|     env = os.environ.copy() | ||||
|     env["DISPLAY"] = ":0.0" | ||||
|  | ||||
|     if False: | ||||
|         # open the uploaded file | ||||
|         fp = sys.argv[-1] | ||||
|     else: | ||||
|         # display stdin contents (`oth_tags`) | ||||
|         fp = "/dev/stdin" | ||||
|  | ||||
|     p = sp.Popen(["/usr/bin/mousepad", fp]) | ||||
|     p.communicate() | ||||
|  | ||||
|  | ||||
| main() | ||||
							
								
								
									
										76
									
								
								bin/mtag/rclone-upload.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										76
									
								
								bin/mtag/rclone-upload.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,76 @@ | ||||
| #!/usr/bin/env python | ||||
|  | ||||
| import json | ||||
| import os | ||||
| import subprocess as sp | ||||
| import sys | ||||
| import time | ||||
|  | ||||
| try: | ||||
|     from copyparty.util import fsenc | ||||
| except: | ||||
|  | ||||
|     def fsenc(p): | ||||
|         return p.encode("utf-8") | ||||
|  | ||||
|  | ||||
| _ = r""" | ||||
| first checks the tag "vidchk" which must be "ok" to continue, | ||||
| then uploads all files to some cloud storage (RCLONE_REMOTE) | ||||
| and DELETES THE ORIGINAL FILES if rclone returns 0 ("success") | ||||
|  | ||||
| deps: | ||||
|   rclone | ||||
|  | ||||
| usage: | ||||
|   -mtp x2=t43200,ay,p2,bin/mtag/rclone-upload.py | ||||
|  | ||||
| explained: | ||||
| t43200: timeout 12h | ||||
|     ay: only process files which contain audio (including video with audio) | ||||
|     p2: set priority 2 (after vidchk's suggested priority of 1), | ||||
|           so the output of vidchk will be passed in here | ||||
|  | ||||
| complete usage example as vflags along with vidchk: | ||||
|   -vsrv/vidchk:vidchk:r:rw,ed:c,e2dsa,e2ts,mtp=vidchk=t600,p,bin/mtag/vidchk.py:c,mtp=rupload=t43200,ay,p2,bin/mtag/rclone-upload.py:c,mte=+vidchk,rupload | ||||
|  | ||||
| setup: see https://rclone.org/drive/ | ||||
|  | ||||
| if you wanna use this script standalone / separately from copyparty, | ||||
| either set CONDITIONAL_UPLOAD False or provide the following stdin: | ||||
|   {"vidchk":"ok"} | ||||
| """ | ||||
|  | ||||
|  | ||||
| RCLONE_REMOTE = "notmybox" | ||||
| CONDITIONAL_UPLOAD = True | ||||
|  | ||||
|  | ||||
| def main(): | ||||
|     fp = sys.argv[1] | ||||
|     if CONDITIONAL_UPLOAD: | ||||
|         zb = sys.stdin.buffer.read() | ||||
|         zs = zb.decode("utf-8", "replace") | ||||
|         md = json.loads(zs) | ||||
|  | ||||
|         chk = md.get("vidchk", None) | ||||
|         if chk != "ok": | ||||
|             print(f"vidchk={chk}", file=sys.stderr) | ||||
|             sys.exit(1) | ||||
|  | ||||
|     dst = f"{RCLONE_REMOTE}:".encode("utf-8") | ||||
|     cmd = [b"rclone", b"copy", b"--", fsenc(fp), dst] | ||||
|  | ||||
|     t0 = time.time() | ||||
|     try: | ||||
|         sp.check_call(cmd) | ||||
|     except: | ||||
|         print("rclone failed", file=sys.stderr) | ||||
|         sys.exit(1) | ||||
|  | ||||
|     print(f"{time.time() - t0:.1f} sec") | ||||
|     os.unlink(fsenc(fp)) | ||||
|  | ||||
|  | ||||
| if __name__ == "__main__": | ||||
|     main() | ||||
| @@ -1,6 +1,11 @@ | ||||
| #!/usr/bin/env python3 | ||||
|  | ||||
| """ | ||||
| WARNING -- DANGEROUS PLUGIN -- | ||||
|   if someone is able to upload files to a copyparty which is | ||||
|   running this plugin, they can execute malware on your machine | ||||
|   so please keep this on a LAN and protect it with a password | ||||
|  | ||||
| use copyparty as a chromecast replacement: | ||||
|   * post a URL and it will open in the default browser | ||||
|   * upload a file and it will open in the default application | ||||
| @@ -10,16 +15,17 @@ use copyparty as a chromecast replacement: | ||||
|  | ||||
| the android app makes it a breeze to post pics and links: | ||||
|   https://github.com/9001/party-up/releases | ||||
|   (iOS devices have to rely on the web-UI) | ||||
|  | ||||
| goes without saying, but this is HELLA DANGEROUS, | ||||
|   GIVES RCE TO ANYONE WHO HAVE UPLOAD PERMISSIONS | ||||
| iOS devices can use the web-UI or the shortcut instead: | ||||
|   https://github.com/9001/copyparty#ios-shortcuts | ||||
|  | ||||
| example copyparty config to use this: | ||||
|   --urlform save,get -v.::w:c,e2d,e2t,mte=+a1:c,mtp=a1=ad,bin/mtag/very-bad-idea.py | ||||
| example copyparty config to use this; | ||||
| lets the user "kevin" with password "hunter2" use this plugin: | ||||
|   -a kevin:hunter2 --urlform save,get -v.::w,kevin:c,e2d,e2t,mte=+a1:c,mtp=a1=ad,kn,c0,bin/mtag/very-bad-idea.py | ||||
|  | ||||
| recommended deps: | ||||
|   apt install xdotool libnotify-bin | ||||
|   apt install xdotool libnotify-bin mpv | ||||
|   python3 -m pip install --user -U streamlink yt-dlp | ||||
|   https://github.com/9001/copyparty/blob/hovudstraum/contrib/plugins/meadup.js | ||||
|  | ||||
| and you probably want `twitter-unmute.user.js` from the res folder | ||||
| @@ -63,8 +69,10 @@ set -e | ||||
| EOF | ||||
| chmod 755 /usr/local/bin/chromium-browser | ||||
|  | ||||
| # start the server  (note: replace `-v.::rw:` with `-v.::r:` to disallow retrieving uploaded stuff) | ||||
| cd ~/Downloads; python3 copyparty-sfx.py --urlform save,get -v.::rw:c,e2d,e2t,mte=+a1:c,mtp=a1=ad,very-bad-idea.py | ||||
| # start the server | ||||
| # note 1: replace hunter2 with a better password to access the server | ||||
| # note 2: replace `-v.::rw` with `-v.::w` to disallow retrieving uploaded stuff | ||||
| cd ~/Downloads; python3 copyparty-sfx.py -a kevin:hunter2 --urlform save,get -v.::rw,kevin:c,e2d,e2t,mte=+a1:c,mtp=a1=ad,kn,very-bad-idea.py | ||||
|  | ||||
| """ | ||||
|  | ||||
| @@ -72,11 +80,23 @@ cd ~/Downloads; python3 copyparty-sfx.py --urlform save,get -v.::rw:c,e2d,e2t,mt | ||||
| import os | ||||
| import sys | ||||
| import time | ||||
| import shutil | ||||
| import subprocess as sp | ||||
| from urllib.parse import unquote_to_bytes as unquote | ||||
| from urllib.parse import quote | ||||
|  | ||||
| have_mpv = shutil.which("mpv") | ||||
| have_vlc = shutil.which("vlc") | ||||
|  | ||||
|  | ||||
| def main(): | ||||
|     if len(sys.argv) > 2 and sys.argv[1] == "x": | ||||
|         # invoked on commandline for testing; | ||||
|         # python3 very-bad-idea.py x msg=https://youtu.be/dQw4w9WgXcQ | ||||
|         txt = " ".join(sys.argv[2:]) | ||||
|         txt = quote(txt.replace(" ", "+")) | ||||
|         return open_post(txt.encode("utf-8")) | ||||
|  | ||||
|     fp = os.path.abspath(sys.argv[1]) | ||||
|     with open(fp, "rb") as f: | ||||
|         txt = f.read(4096) | ||||
| @@ -92,7 +112,7 @@ def open_post(txt): | ||||
|     try: | ||||
|         k, v = txt.split(" ", 1) | ||||
|     except: | ||||
|         open_url(txt) | ||||
|         return open_url(txt) | ||||
|  | ||||
|     if k == "key": | ||||
|         sp.call(["xdotool", "key"] + v.split(" ")) | ||||
| @@ -128,6 +148,17 @@ def open_url(txt): | ||||
|     # else: | ||||
|     #    sp.call(["xdotool", "getactivewindow", "windowminimize"])  # minimizes the focused windo | ||||
|  | ||||
|     # mpv is probably smart enough to use streamlink automatically | ||||
|     if try_mpv(txt): | ||||
|         print("mpv got it") | ||||
|         return | ||||
|  | ||||
|     # or maybe streamlink would be a good choice to open this | ||||
|     if try_streamlink(txt): | ||||
|         print("streamlink got it") | ||||
|         return | ||||
|  | ||||
|     # nope, | ||||
|     # close any error messages: | ||||
|     sp.call(["xdotool", "search", "--name", "Error", "windowclose"]) | ||||
|     # sp.call(["xdotool", "key", "ctrl+alt+d"])  # doesnt work at all | ||||
| @@ -136,4 +167,39 @@ def open_url(txt): | ||||
|     sp.call(["xdg-open", txt]) | ||||
|  | ||||
|  | ||||
| def try_mpv(url): | ||||
|     t0 = time.time() | ||||
|     try: | ||||
|         print("trying mpv...") | ||||
|         sp.check_call(["mpv", "--fs", url]) | ||||
|         return True | ||||
|     except: | ||||
|         # if it ran for 15 sec it probably succeeded and terminated | ||||
|         t = time.time() | ||||
|         return t - t0 > 15 | ||||
|  | ||||
|  | ||||
| def try_streamlink(url): | ||||
|     t0 = time.time() | ||||
|     try: | ||||
|         import streamlink | ||||
|  | ||||
|         print("trying streamlink...") | ||||
|         streamlink.Streamlink().resolve_url(url) | ||||
|  | ||||
|         if have_mpv: | ||||
|             args = "-m streamlink -p mpv -a --fs" | ||||
|         else: | ||||
|             args = "-m streamlink" | ||||
|  | ||||
|         cmd = [sys.executable] + args.split() + [url, "best"] | ||||
|         t0 = time.time() | ||||
|         sp.check_call(cmd) | ||||
|         return True | ||||
|     except: | ||||
|         # if it ran for 10 sec it probably succeeded and terminated | ||||
|         t = time.time() | ||||
|         return t - t0 > 10 | ||||
|  | ||||
|  | ||||
| main() | ||||
|   | ||||
							
								
								
									
										131
									
								
								bin/mtag/vidchk.py
									
									
									
									
									
										Executable file
									
								
							
							
						
						
									
										131
									
								
								bin/mtag/vidchk.py
									
									
									
									
									
										Executable file
									
								
							| @@ -0,0 +1,131 @@ | ||||
| #!/usr/bin/env python3 | ||||
|  | ||||
| import json | ||||
| import re | ||||
| import os | ||||
| import sys | ||||
| import subprocess as sp | ||||
|  | ||||
| try: | ||||
|     from copyparty.util import fsenc | ||||
| except: | ||||
|  | ||||
|     def fsenc(p): | ||||
|         return p.encode("utf-8") | ||||
|  | ||||
|  | ||||
| _ = r""" | ||||
| inspects video files for errors and such | ||||
| plus stores a bunch of metadata to filename.ff.json | ||||
|  | ||||
| usage: | ||||
|   -mtp vidchk=t600,ay,p,bin/mtag/vidchk.py | ||||
|  | ||||
| explained: | ||||
| t600: timeout 10min | ||||
|   ay: only process files which contain audio (including video with audio) | ||||
|    p: set priority 1 (lowest priority after initial ffprobe/mutagen for base tags), | ||||
|        makes copyparty feed base tags into this script as json | ||||
|  | ||||
| if you wanna use this script standalone / separately from copyparty, | ||||
| provide the video resolution on stdin as json:  {"res":"1920x1080"} | ||||
| """ | ||||
|  | ||||
|  | ||||
| FAST = True  # parse entire file at container level | ||||
| # FAST = False  # fully decode audio and video streams | ||||
|  | ||||
|  | ||||
| # warnings to ignore | ||||
| harmless = re.compile( | ||||
|     r"Unsupported codec with id |Could not find codec parameters.*Attachment:|analyzeduration" | ||||
|     + r"|timescale not set" | ||||
| ) | ||||
|  | ||||
|  | ||||
| def wfilter(lines): | ||||
|     return [x for x in lines if x.strip() and not harmless.search(x)] | ||||
|  | ||||
|  | ||||
| def errchk(so, se, rc, dbg): | ||||
|     if dbg: | ||||
|         with open(dbg, "wb") as f: | ||||
|             f.write(b"so:\n" + so + b"\nse:\n" + se + b"\n") | ||||
|  | ||||
|     if rc: | ||||
|         err = (so + se).decode("utf-8", "replace").split("\n", 1) | ||||
|         err = wfilter(err) or err | ||||
|         return f"ERROR {rc}: {err[0]}" | ||||
|  | ||||
|     if se: | ||||
|         err = se.decode("utf-8", "replace").split("\n", 1) | ||||
|         err = wfilter(err) | ||||
|         if err: | ||||
|             return f"Warning: {err[0]}" | ||||
|  | ||||
|     return None | ||||
|  | ||||
|  | ||||
| def main(): | ||||
|     fp = sys.argv[1] | ||||
|     zb = sys.stdin.buffer.read() | ||||
|     zs = zb.decode("utf-8", "replace") | ||||
|     md = json.loads(zs) | ||||
|  | ||||
|     fdir = os.path.dirname(os.path.realpath(fp)) | ||||
|     flag = os.path.join(fdir, ".processed") | ||||
|     if os.path.exists(flag): | ||||
|         return "already processed" | ||||
|  | ||||
|     try: | ||||
|         w, h = [int(x) for x in md["res"].split("x")] | ||||
|         if not w + h: | ||||
|             raise Exception() | ||||
|     except: | ||||
|         return "could not determine resolution" | ||||
|  | ||||
|     # grab streams/format metadata + 2 seconds of frames at the start and end | ||||
|     zs = "ffprobe -hide_banner -v warning -of json -show_streams -show_format -show_packets -show_data_hash crc32 -read_intervals %+2,999999%+2" | ||||
|     cmd = zs.encode("ascii").split(b" ") + [fsenc(fp)] | ||||
|     p = sp.Popen(cmd, stdout=sp.PIPE, stderr=sp.PIPE) | ||||
|     so, se = p.communicate() | ||||
|  | ||||
|     # spaces to tabs, drops filesize from 69k to 48k | ||||
|     so = b"\n".join( | ||||
|         [ | ||||
|             b"\t" * int((len(x) - len(x.lstrip())) / 4) + x.lstrip() | ||||
|             for x in (so or b"").split(b"\n") | ||||
|         ] | ||||
|     ) | ||||
|     with open(fsenc(f"{fp}.ff.json"), "wb") as f: | ||||
|         f.write(so) | ||||
|  | ||||
|     err = errchk(so, se, p.returncode, f"{fp}.vidchk") | ||||
|     if err: | ||||
|         return err | ||||
|  | ||||
|     if max(w, h) < 1280 and min(w, h) < 720: | ||||
|         return "resolution too small" | ||||
|  | ||||
|     zs = ( | ||||
|         "ffmpeg -y -hide_banner -nostdin -v warning" | ||||
|         + " -err_detect +crccheck+bitstream+buffer+careful+compliant+aggressive+explode" | ||||
|         + " -xerror -i" | ||||
|     ) | ||||
|  | ||||
|     cmd = zs.encode("ascii").split(b" ") + [fsenc(fp)] | ||||
|  | ||||
|     if FAST: | ||||
|         zs = "-c copy -f null -" | ||||
|     else: | ||||
|         zs = "-vcodec rawvideo -acodec pcm_s16le -f null -" | ||||
|  | ||||
|     cmd += zs.encode("ascii").split(b" ") | ||||
|  | ||||
|     p = sp.Popen(cmd, stdout=sp.PIPE, stderr=sp.PIPE) | ||||
|     so, se = p.communicate() | ||||
|     return errchk(so, se, p.returncode, f"{fp}.vidchk") | ||||
|  | ||||
|  | ||||
| if __name__ == "__main__": | ||||
|     print(main() or "ok") | ||||
| @@ -1,6 +1,11 @@ | ||||
| #!/usr/bin/env python3 | ||||
|  | ||||
| """ | ||||
| DEPRECATED -- replaced by event hooks; | ||||
| https://github.com/9001/copyparty/blob/hovudstraum/bin/hooks/wget.py | ||||
|  | ||||
| --- | ||||
|  | ||||
| use copyparty as a file downloader by POSTing URLs as | ||||
| application/x-www-form-urlencoded (for example using the | ||||
| message/pager function on the website) | ||||
| @@ -60,6 +65,10 @@ def main(): | ||||
|     if "://" not in url: | ||||
|         url = "https://" + url | ||||
|  | ||||
|     proto = url.split("://")[0].lower() | ||||
|     if proto not in ("http", "https", "ftp", "ftps"): | ||||
|         raise Exception("bad proto {}".format(proto)) | ||||
|  | ||||
|     os.chdir(fdir) | ||||
|  | ||||
|     name = url.split("?")[0].split("/")[-1] | ||||
|   | ||||
| @@ -1,7 +1,7 @@ | ||||
| #!/usr/bin/env python3 | ||||
| from __future__ import print_function, unicode_literals | ||||
| 
 | ||||
| """copyparty-fuse-streaming: remote copyparty as a local filesystem""" | ||||
| """partyfuse-streaming: remote copyparty as a local filesystem""" | ||||
| __author__ = "ed <copyparty@ocv.me>" | ||||
| __copyright__ = 2020 | ||||
| __license__ = "MIT" | ||||
| @@ -12,7 +12,7 @@ __url__ = "https://github.com/9001/copyparty/" | ||||
| mount a copyparty server (local or remote) as a filesystem | ||||
| 
 | ||||
| usage: | ||||
|   python copyparty-fuse-streaming.py http://192.168.1.69:3923/  ./music | ||||
|   python partyfuse-streaming.py http://192.168.1.69:3923/  ./music | ||||
| 
 | ||||
| dependencies: | ||||
|   python3 -m pip install --user fusepy | ||||
| @@ -21,7 +21,7 @@ dependencies: | ||||
|   + on Windows: https://github.com/billziss-gh/winfsp/releases/latest | ||||
| 
 | ||||
| this was a mistake: | ||||
|   fork of copyparty-fuse.py with a streaming cache rather than readahead, | ||||
|   fork of partyfuse.py with a streaming cache rather than readahead, | ||||
|   thought this was gonna be way faster (and it kind of is) | ||||
|   except the overhead of reopening connections on trunc totally kills it | ||||
| """ | ||||
| @@ -42,6 +42,7 @@ import threading | ||||
| import traceback | ||||
| import http.client  # py2: httplib | ||||
| import urllib.parse | ||||
| import calendar | ||||
| from datetime import datetime | ||||
| from urllib.parse import quote_from_bytes as quote | ||||
| from urllib.parse import unquote_to_bytes as unquote | ||||
| @@ -61,12 +62,12 @@ except: | ||||
|     else: | ||||
|         libfuse = "apt install libfuse\n    modprobe fuse" | ||||
| 
 | ||||
|     print( | ||||
|         "\n  could not import fuse; these may help:" | ||||
|         + "\n    python3 -m pip install --user fusepy\n    " | ||||
|         + libfuse | ||||
|         + "\n" | ||||
|     ) | ||||
|     m = """\033[33m | ||||
|   could not import fuse; these may help: | ||||
|     {} -m pip install --user fusepy | ||||
|     {} | ||||
| \033[0m""" | ||||
|     print(m.format(sys.executable, libfuse)) | ||||
|     raise | ||||
| 
 | ||||
| 
 | ||||
| @@ -153,7 +154,7 @@ def dewin(txt): | ||||
| class RecentLog(object): | ||||
|     def __init__(self): | ||||
|         self.mtx = threading.Lock() | ||||
|         self.f = None  # open("copyparty-fuse.log", "wb") | ||||
|         self.f = None  # open("partyfuse.log", "wb") | ||||
|         self.q = [] | ||||
| 
 | ||||
|         thr = threading.Thread(target=self.printer) | ||||
| @@ -184,9 +185,9 @@ class RecentLog(object): | ||||
|             print("".join(q), end="") | ||||
| 
 | ||||
| 
 | ||||
| # [windows/cmd/cpy3]  python dev\copyparty\bin\copyparty-fuse.py q: http://192.168.1.159:1234/ | ||||
| # [windows/cmd/msys2] C:\msys64\mingw64\bin\python3 dev\copyparty\bin\copyparty-fuse.py q: http://192.168.1.159:1234/ | ||||
| # [windows/mty/msys2] /mingw64/bin/python3 /c/Users/ed/dev/copyparty/bin/copyparty-fuse.py q: http://192.168.1.159:1234/ | ||||
| # [windows/cmd/cpy3]  python dev\copyparty\bin\partyfuse.py q: http://192.168.1.159:1234/ | ||||
| # [windows/cmd/msys2] C:\msys64\mingw64\bin\python3 dev\copyparty\bin\partyfuse.py q: http://192.168.1.159:1234/ | ||||
| # [windows/mty/msys2] /mingw64/bin/python3 /c/Users/ed/dev/copyparty/bin/partyfuse.py q: http://192.168.1.159:1234/ | ||||
| # | ||||
| # [windows] find /q/music/albums/Phant*24bit -printf '%s %p\n' | sort -n | tail -n 8 | sed -r 's/^[0-9]+ //' | while IFS= read -r x; do dd if="$x" of=/dev/null bs=4k count=8192 & done | ||||
| # [alpine]  ll t; for x in t/2020_0724_16{2,3}*; do dd if="$x" of=/dev/null bs=4k count=10240 & done | ||||
| @@ -495,7 +496,7 @@ class Gateway(object): | ||||
|                 ts = 60 * 60 * 24 * 2 | ||||
|                 try: | ||||
|                     sz = int(fsize) | ||||
|                     ts = datetime.strptime(fdate, "%Y-%m-%d %H:%M:%S").timestamp() | ||||
|                     ts = calendar.timegm(time.strptime(fdate, "%Y-%m-%d %H:%M:%S")) | ||||
|                 except: | ||||
|                     info("bad HTML or OS [{}] [{}]".format(fdate, fsize)) | ||||
|                     # python cannot strptime(1959-01-01) on windows | ||||
| @@ -1,7 +1,7 @@ | ||||
| #!/usr/bin/env python3 | ||||
| from __future__ import print_function, unicode_literals | ||||
| 
 | ||||
| """copyparty-fuse: remote copyparty as a local filesystem""" | ||||
| """partyfuse: remote copyparty as a local filesystem""" | ||||
| __author__ = "ed <copyparty@ocv.me>" | ||||
| __copyright__ = 2019 | ||||
| __license__ = "MIT" | ||||
| @@ -12,7 +12,7 @@ __url__ = "https://github.com/9001/copyparty/" | ||||
| mount a copyparty server (local or remote) as a filesystem | ||||
| 
 | ||||
| usage: | ||||
|   python copyparty-fuse.py http://192.168.1.69:3923/  ./music | ||||
|   python partyfuse.py http://192.168.1.69:3923/  ./music | ||||
| 
 | ||||
| dependencies: | ||||
|   python3 -m pip install --user fusepy | ||||
| @@ -45,6 +45,7 @@ import threading | ||||
| import traceback | ||||
| import http.client  # py2: httplib | ||||
| import urllib.parse | ||||
| import calendar | ||||
| from datetime import datetime | ||||
| from urllib.parse import quote_from_bytes as quote | ||||
| from urllib.parse import unquote_to_bytes as unquote | ||||
| @@ -73,12 +74,12 @@ except: | ||||
|     else: | ||||
|         libfuse = "apt install libfuse3-3\n    modprobe fuse" | ||||
| 
 | ||||
|     print( | ||||
|         "\n  could not import fuse; these may help:" | ||||
|         + "\n    python3 -m pip install --user fusepy\n    " | ||||
|         + libfuse | ||||
|         + "\n" | ||||
|     ) | ||||
|     m = """\033[33m | ||||
|   could not import fuse; these may help: | ||||
|     {} -m pip install --user fusepy | ||||
|     {} | ||||
| \033[0m""" | ||||
|     print(m.format(sys.executable, libfuse)) | ||||
|     raise | ||||
| 
 | ||||
| 
 | ||||
| @@ -165,7 +166,7 @@ def dewin(txt): | ||||
| class RecentLog(object): | ||||
|     def __init__(self): | ||||
|         self.mtx = threading.Lock() | ||||
|         self.f = None  # open("copyparty-fuse.log", "wb") | ||||
|         self.f = None  # open("partyfuse.log", "wb") | ||||
|         self.q = [] | ||||
| 
 | ||||
|         thr = threading.Thread(target=self.printer) | ||||
| @@ -196,9 +197,9 @@ class RecentLog(object): | ||||
|             print("".join(q), end="") | ||||
| 
 | ||||
| 
 | ||||
| # [windows/cmd/cpy3]  python dev\copyparty\bin\copyparty-fuse.py q: http://192.168.1.159:1234/ | ||||
| # [windows/cmd/msys2] C:\msys64\mingw64\bin\python3 dev\copyparty\bin\copyparty-fuse.py q: http://192.168.1.159:1234/ | ||||
| # [windows/mty/msys2] /mingw64/bin/python3 /c/Users/ed/dev/copyparty/bin/copyparty-fuse.py q: http://192.168.1.159:1234/ | ||||
| # [windows/cmd/cpy3]  python dev\copyparty\bin\partyfuse.py q: http://192.168.1.159:1234/ | ||||
| # [windows/cmd/msys2] C:\msys64\mingw64\bin\python3 dev\copyparty\bin\partyfuse.py q: http://192.168.1.159:1234/ | ||||
| # [windows/mty/msys2] /mingw64/bin/python3 /c/Users/ed/dev/copyparty/bin/partyfuse.py q: http://192.168.1.159:1234/ | ||||
| # | ||||
| # [windows] find /q/music/albums/Phant*24bit -printf '%s %p\n' | sort -n | tail -n 8 | sed -r 's/^[0-9]+ //' | while IFS= read -r x; do dd if="$x" of=/dev/null bs=4k count=8192 & done | ||||
| # [alpine]  ll t; for x in t/2020_0724_16{2,3}*; do dd if="$x" of=/dev/null bs=4k count=10240 & done | ||||
| @@ -443,7 +444,7 @@ class Gateway(object): | ||||
|                 ts = 60 * 60 * 24 * 2 | ||||
|                 try: | ||||
|                     sz = int(fsize) | ||||
|                     ts = datetime.strptime(fdate, "%Y-%m-%d %H:%M:%S").timestamp() | ||||
|                     ts = calendar.timegm(time.strptime(fdate, "%Y-%m-%d %H:%M:%S")) | ||||
|                 except: | ||||
|                     info("bad HTML or OS [{}] [{}]".format(fdate, fsize)) | ||||
|                     # python cannot strptime(1959-01-01) on windows | ||||
| @@ -996,7 +997,7 @@ def main(): | ||||
|     ap.add_argument( | ||||
|         "-cf", metavar="NUM_BLOCKS", type=int, default=nf, help="file cache" | ||||
|     ) | ||||
|     ap.add_argument("-a", metavar="PASSWORD", help="password") | ||||
|     ap.add_argument("-a", metavar="PASSWORD", help="password or $filepath") | ||||
|     ap.add_argument("-d", action="store_true", help="enable debug") | ||||
|     ap.add_argument("-te", metavar="PEM_FILE", help="certificate to expect/verify") | ||||
|     ap.add_argument("-td", action="store_true", help="disable certificate check") | ||||
| @@ -1,7 +1,7 @@ | ||||
| #!/usr/bin/env python3 | ||||
| from __future__ import print_function, unicode_literals | ||||
| 
 | ||||
| """copyparty-fuseb: remote copyparty as a local filesystem""" | ||||
| """partyfuse2: remote copyparty as a local filesystem""" | ||||
| __author__ = "ed <copyparty@ocv.me>" | ||||
| __copyright__ = 2020 | ||||
| __license__ = "MIT" | ||||
| @@ -32,9 +32,19 @@ try: | ||||
|     if not hasattr(fuse, "__version__"): | ||||
|         raise Exception("your fuse-python is way old") | ||||
| except: | ||||
|     print( | ||||
|         "\n  could not import fuse; these may help:\n    python3 -m pip install --user fuse-python\n    apt install libfuse\n    modprobe fuse\n" | ||||
|     ) | ||||
|     if WINDOWS: | ||||
|         libfuse = "install https://github.com/billziss-gh/winfsp/releases/latest" | ||||
|     elif MACOS: | ||||
|         libfuse = "install https://osxfuse.github.io/" | ||||
|     else: | ||||
|         libfuse = "apt install libfuse\n    modprobe fuse" | ||||
| 
 | ||||
|     m = """\033[33m | ||||
|   could not import fuse; these may help: | ||||
|     {} -m pip install --user fuse-python | ||||
|     {} | ||||
| \033[0m""" | ||||
|     print(m.format(sys.executable, libfuse)) | ||||
|     raise | ||||
| 
 | ||||
| 
 | ||||
| @@ -42,13 +52,13 @@ except: | ||||
| mount a copyparty server (local or remote) as a filesystem | ||||
| 
 | ||||
| usage: | ||||
|   python ./copyparty-fuseb.py -f -o allow_other,auto_unmount,nonempty,pw=wark,url=http://192.168.1.69:3923 /mnt/nas | ||||
|   python ./partyfuse2.py -f -o allow_other,auto_unmount,nonempty,pw=wark,url=http://192.168.1.69:3923 /mnt/nas | ||||
| 
 | ||||
| dependencies: | ||||
|   sudo apk add fuse-dev python3-dev | ||||
|   python3 -m pip install --user fuse-python | ||||
| 
 | ||||
| fork of copyparty-fuse.py based on fuse-python which | ||||
| fork of partyfuse.py based on fuse-python which | ||||
|   appears to be more compliant than fusepy? since this works with samba | ||||
|     (probably just my garbage code tbh) | ||||
| """ | ||||
| @@ -639,7 +649,7 @@ def main(): | ||||
|         print("  need argument: mount-path") | ||||
|         print("example:") | ||||
|         print( | ||||
|             "  ./copyparty-fuseb.py -f -o allow_other,auto_unmount,nonempty,pw=wark,url=http://192.168.1.69:3923 /mnt/nas" | ||||
|             "  ./partyfuse2.py -f -o allow_other,auto_unmount,nonempty,pw=wark,url=http://192.168.1.69:3923 /mnt/nas" | ||||
|         ) | ||||
|         sys.exit(1) | ||||
| 
 | ||||
| @@ -4,20 +4,21 @@ set -e | ||||
| # runs copyparty (or any other program really) in a chroot | ||||
| # | ||||
| # assumption: these directories, and everything within, are owned by root | ||||
| sysdirs=( /bin /lib /lib32 /lib64 /sbin /usr ) | ||||
|  | ||||
| sysdirs=(); for v in /bin /lib /lib32 /lib64 /sbin /usr /etc/alternatives ; do | ||||
| 	[ -e $v ] && sysdirs+=($v) | ||||
| done | ||||
|  | ||||
| # error-handler | ||||
| help() { cat <<'EOF' | ||||
|  | ||||
| usage: | ||||
|   ./prisonparty.sh <ROOTDIR> <UID> <GID> [VOLDIR [VOLDIR...]] -- python3 copyparty-sfx.py [...]" | ||||
|   ./prisonparty.sh <ROOTDIR> <UID> <GID> [VOLDIR [VOLDIR...]] -- python3 copyparty-sfx.py [...] | ||||
|  | ||||
| example: | ||||
|   ./prisonparty.sh /var/lib/copyparty-jail 1000 1000 /mnt/nas/music -- python3 copyparty-sfx.py -v /mnt/nas/music::rwmd" | ||||
|   ./prisonparty.sh /var/lib/copyparty-jail 1000 1000 /mnt/nas/music -- python3 copyparty-sfx.py -v /mnt/nas/music::rwmd | ||||
|  | ||||
| example for running straight from source (instead of using an sfx): | ||||
|   PYTHONPATH=$PWD ./prisonparty.sh /var/lib/copyparty-jail 1000 1000 /mnt/nas/music -- python3 -um copyparty -v /mnt/nas/music::rwmd" | ||||
|   PYTHONPATH=$PWD ./prisonparty.sh /var/lib/copyparty-jail 1000 1000 /mnt/nas/music -- python3 -um copyparty -v /mnt/nas/music::rwmd | ||||
|  | ||||
| note that if you have python modules installed as --user (such as bpm/key detectors), | ||||
|   you should add /home/foo/.local as a VOLDIR | ||||
| @@ -38,7 +39,7 @@ while true; do | ||||
| 	v="$1"; shift | ||||
| 	[ "$v" = -- ] && break  # end of volumes | ||||
| 	[ "$#" -eq 0 ] && break  # invalid usage | ||||
| 	vols+=( "$(realpath "$v")" ) | ||||
| 	vols+=( "$(realpath "$v" || echo "$v")" ) | ||||
| done | ||||
| pybin="$1"; shift | ||||
| pybin="$(command -v "$pybin")" | ||||
| @@ -82,7 +83,7 @@ jail="${jail%/}" | ||||
| printf '%s\n' "${sysdirs[@]}" "${vols[@]}" | sed -r 's`/$``' | LC_ALL=C sort | uniq | | ||||
| while IFS= read -r v; do | ||||
| 	[ -e "$v" ] || { | ||||
| 		# printf '\033[1;31mfolder does not exist:\033[0m %s\n' "/$v" | ||||
| 		printf '\033[1;31mfolder does not exist:\033[0m %s\n' "$v" | ||||
| 		continue | ||||
| 	} | ||||
| 	i1=$(stat -c%D.%i "$v"      2>/dev/null || echo a) | ||||
| @@ -97,9 +98,11 @@ done | ||||
|  | ||||
| cln() { | ||||
| 	rv=$? | ||||
| 	# cleanup if not in use | ||||
| 	lsof "$jail" | grep -qF "$jail" && | ||||
| 		echo "chroot is in use, will not cleanup" || | ||||
| 	wait -f -p rv $p || true | ||||
| 	cd / | ||||
| 	echo "stopping chroot..." | ||||
| 	lsof "$jail" | grep -F "$jail" && | ||||
| 		echo "chroot is in use; will not unmount" || | ||||
| 	{ | ||||
| 		mount | grep -F " on $jail" | | ||||
| 		awk '{sub(/ type .*/,"");sub(/.* on /,"");print}' | | ||||
| @@ -115,6 +118,15 @@ mkdir -p "$jail/tmp" | ||||
| chmod 777 "$jail/tmp" | ||||
|  | ||||
|  | ||||
| # create a dev | ||||
| (cd $jail; mkdir -p dev; cd dev | ||||
| [ -e null ]    || mknod -m 666 null    c 1 3 | ||||
| [ -e zero ]    || mknod -m 666 zero    c 1 5 | ||||
| [ -e random ]  || mknod -m 444 random  c 1 8 | ||||
| [ -e urandom ] || mknod -m 444 urandom c 1 9 | ||||
| ) | ||||
|  | ||||
|  | ||||
| # run copyparty | ||||
| export HOME=$(getent passwd $uid | cut -d: -f6) | ||||
| export USER=$(getent passwd $uid | cut -d: -f1) | ||||
| @@ -124,5 +136,6 @@ export LOGNAME="$USER" | ||||
| #echo "cpp [$cpp]" | ||||
| chroot --userspec=$uid:$gid "$jail" "$pybin" $pyarg "$cpp" "$@" & | ||||
| p=$! | ||||
| trap 'kill -USR1 $p' USR1 | ||||
| trap 'kill $p' INT TERM | ||||
| wait | ||||
|   | ||||
							
								
								
									
										1194
									
								
								bin/u2c.py
									
									
									
									
									
										Executable file
									
								
							
							
						
						
									
										1194
									
								
								bin/u2c.py
									
									
									
									
									
										Executable file
									
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							
							
								
								
									
										99
									
								
								bin/unforget.py
									
									
									
									
									
										Executable file
									
								
							
							
						
						
									
										99
									
								
								bin/unforget.py
									
									
									
									
									
										Executable file
									
								
							| @@ -0,0 +1,99 @@ | ||||
| #!/usr/bin/env python3 | ||||
|  | ||||
| """ | ||||
| unforget.py: rebuild db from logfiles | ||||
| 2022-09-07, v0.1, ed <irc.rizon.net>, MIT-Licensed | ||||
| https://github.com/9001/copyparty/blob/hovudstraum/bin/unforget.py | ||||
|  | ||||
| only makes sense if running copyparty with --no-forget | ||||
| (e.g. immediately shifting uploads to other storage) | ||||
|  | ||||
| usage: | ||||
|   xz -d < log | ./unforget.py .hist/up2k.db | ||||
|  | ||||
| """ | ||||
|  | ||||
| import re | ||||
| import sys | ||||
| import json | ||||
| import base64 | ||||
| import sqlite3 | ||||
| import argparse | ||||
|  | ||||
|  | ||||
| FS_ENCODING = sys.getfilesystemencoding() | ||||
|  | ||||
|  | ||||
| class APF(argparse.ArgumentDefaultsHelpFormatter, argparse.RawDescriptionHelpFormatter): | ||||
|     pass | ||||
|  | ||||
|  | ||||
| mem_cur = sqlite3.connect(":memory:").cursor() | ||||
| mem_cur.execute(r"create table a (b text)") | ||||
|  | ||||
|  | ||||
| def s3enc(rd: str, fn: str) -> tuple[str, str]: | ||||
|     ret: list[str] = [] | ||||
|     for v in [rd, fn]: | ||||
|         try: | ||||
|             mem_cur.execute("select * from a where b = ?", (v,)) | ||||
|             ret.append(v) | ||||
|         except: | ||||
|             wtf8 = v.encode(FS_ENCODING, "surrogateescape") | ||||
|             ret.append("//" + base64.urlsafe_b64encode(wtf8).decode("ascii")) | ||||
|  | ||||
|     return ret[0], ret[1] | ||||
|  | ||||
|  | ||||
| def main(): | ||||
|     ap = argparse.ArgumentParser() | ||||
|     ap.add_argument("db") | ||||
|     ar = ap.parse_args() | ||||
|  | ||||
|     db = sqlite3.connect(ar.db).cursor() | ||||
|     ptn_times = re.compile(r"no more chunks, setting times \(([0-9]+)") | ||||
|     at = 0 | ||||
|     ctr = 0 | ||||
|  | ||||
|     for ln in [x.decode("utf-8", "replace").rstrip() for x in sys.stdin.buffer]: | ||||
|         if "no more chunks, setting times (" in ln: | ||||
|             m = ptn_times.search(ln) | ||||
|             if m: | ||||
|                 at = int(m.group(1)) | ||||
|  | ||||
|         if '"hash": []' in ln: | ||||
|             try: | ||||
|                 ofs = ln.find("{") | ||||
|                 j = json.loads(ln[ofs:]) | ||||
|             except: | ||||
|                 pass | ||||
|  | ||||
|             w = j["wark"] | ||||
|             if db.execute("select w from up where w = ?", (w,)).fetchone(): | ||||
|                 continue | ||||
|  | ||||
|             # PYTHONPATH=/home/ed/dev/copyparty/ python3 -m copyparty -e2dsa  -v foo:foo:rwmd,ed -aed:wark --no-forget | ||||
|             # 05:34:43.845 127.0.0.1 42496       no more chunks, setting times (1662528883, 1658001882) | ||||
|             # 05:34:43.863 127.0.0.1 42496       {"name": "f\"2", "purl": "/foo/bar/baz/", "size": 1674, "lmod": 1658001882, "sprs": true, "hash": [], "wark": "LKIWpp2jEAh9dH3fu-DobuURFGEKlODXDGTpZ1otMhUg"} | ||||
|             # |                      w                       |     mt     |  sz  |   rd    | fn  |    ip     |     at     | | ||||
|             # | LKIWpp2jEAh9dH3fu-DobuURFGEKlODXDGTpZ1otMhUg | 1658001882 | 1674 | bar/baz | f"2 | 127.0.0.1 | 1662528883 | | ||||
|  | ||||
|             rd, fn = s3enc(j["purl"].strip("/"), j["name"]) | ||||
|             ip = ln.split(" ")[1].split("m")[-1] | ||||
|  | ||||
|             q = "insert into up values (?,?,?,?,?,?,?)" | ||||
|             v = (w, int(j["lmod"]), int(j["size"]), rd, fn, ip, at) | ||||
|             db.execute(q, v) | ||||
|             ctr += 1 | ||||
|             if ctr % 1024 == 1023: | ||||
|                 print(f"{ctr} commit...") | ||||
|                 db.connection.commit() | ||||
|  | ||||
|     if ctr: | ||||
|         db.connection.commit() | ||||
|  | ||||
|     print(f"unforgot {ctr} files") | ||||
|  | ||||
|  | ||||
| if __name__ == "__main__": | ||||
|     main() | ||||
							
								
								
									
										830
									
								
								bin/up2k.py
									
									
									
									
									
								
							
							
						
						
									
										830
									
								
								bin/up2k.py
									
									
									
									
									
								
							| @@ -1,830 +0,0 @@ | ||||
| #!/usr/bin/env python3 | ||||
| from __future__ import print_function, unicode_literals | ||||
|  | ||||
| """ | ||||
| up2k.py: upload to copyparty | ||||
| 2021-11-28, v0.13, ed <irc.rizon.net>, MIT-Licensed | ||||
| https://github.com/9001/copyparty/blob/hovudstraum/bin/up2k.py | ||||
|  | ||||
| - dependencies: requests | ||||
| - supports python 2.6, 2.7, and 3.3 through 3.10 | ||||
|  | ||||
| - almost zero error-handling | ||||
| - but if something breaks just try again and it'll autoresume | ||||
| """ | ||||
|  | ||||
| import os | ||||
| import sys | ||||
| import stat | ||||
| import math | ||||
| import time | ||||
| import atexit | ||||
| import signal | ||||
| import base64 | ||||
| import hashlib | ||||
| import argparse | ||||
| import platform | ||||
| import threading | ||||
| import requests | ||||
| import datetime | ||||
|  | ||||
|  | ||||
| # from copyparty/__init__.py | ||||
| PY2 = sys.version_info[0] == 2 | ||||
| if PY2: | ||||
|     from Queue import Queue | ||||
|     from urllib import unquote | ||||
|     from urllib import quote | ||||
|  | ||||
|     sys.dont_write_bytecode = True | ||||
|     bytes = str | ||||
| else: | ||||
|     from queue import Queue | ||||
|     from urllib.parse import unquote_to_bytes as unquote | ||||
|     from urllib.parse import quote_from_bytes as quote | ||||
|  | ||||
|     unicode = str | ||||
|  | ||||
| VT100 = platform.system() != "Windows" | ||||
|  | ||||
|  | ||||
| req_ses = requests.Session() | ||||
|  | ||||
|  | ||||
| class File(object): | ||||
|     """an up2k upload task; represents a single file""" | ||||
|  | ||||
|     def __init__(self, top, rel, size, lmod): | ||||
|         self.top = top  # type: bytes | ||||
|         self.rel = rel.replace(b"\\", b"/")  # type: bytes | ||||
|         self.size = size  # type: int | ||||
|         self.lmod = lmod  # type: float | ||||
|  | ||||
|         self.abs = os.path.join(top, rel)  # type: bytes | ||||
|         self.name = self.rel.split(b"/")[-1].decode("utf-8", "replace")  # type: str | ||||
|  | ||||
|         # set by get_hashlist | ||||
|         self.cids = []  # type: list[tuple[str, int, int]]  # [ hash, ofs, sz ] | ||||
|         self.kchunks = {}  # type: dict[str, tuple[int, int]]  # hash: [ ofs, sz ] | ||||
|  | ||||
|         # set by handshake | ||||
|         self.ucids = []  # type: list[str]  # chunks which need to be uploaded | ||||
|         self.wark = None  # type: str | ||||
|         self.url = None  # type: str | ||||
|  | ||||
|         # set by upload | ||||
|         self.up_b = 0  # type: int | ||||
|         self.up_c = 0  # type: int | ||||
|  | ||||
|         # m = "size({}) lmod({}) top({}) rel({}) abs({}) name({})\n" | ||||
|         # eprint(m.format(self.size, self.lmod, self.top, self.rel, self.abs, self.name)) | ||||
|  | ||||
|  | ||||
| class FileSlice(object): | ||||
|     """file-like object providing a fixed window into a file""" | ||||
|  | ||||
|     def __init__(self, file, cid): | ||||
|         # type: (File, str) -> FileSlice | ||||
|  | ||||
|         self.car, self.len = file.kchunks[cid] | ||||
|         self.cdr = self.car + self.len | ||||
|         self.ofs = 0  # type: int | ||||
|         self.f = open(file.abs, "rb", 512 * 1024) | ||||
|         self.f.seek(self.car) | ||||
|  | ||||
|         # https://stackoverflow.com/questions/4359495/what-is-exactly-a-file-like-object-in-python | ||||
|         # IOBase, RawIOBase, BufferedIOBase | ||||
|         funs = "close closed __enter__ __exit__ __iter__ isatty __next__ readable seekable writable" | ||||
|         try: | ||||
|             for fun in funs.split(): | ||||
|                 setattr(self, fun, getattr(self.f, fun)) | ||||
|         except: | ||||
|             pass  # py27 probably | ||||
|  | ||||
|     def tell(self): | ||||
|         return self.ofs | ||||
|  | ||||
|     def seek(self, ofs, wh=0): | ||||
|         if wh == 1: | ||||
|             ofs = self.ofs + ofs | ||||
|         elif wh == 2: | ||||
|             ofs = self.len + ofs  # provided ofs is negative | ||||
|  | ||||
|         if ofs < 0: | ||||
|             ofs = 0 | ||||
|         elif ofs >= self.len: | ||||
|             ofs = self.len - 1 | ||||
|  | ||||
|         self.ofs = ofs | ||||
|         self.f.seek(self.car + ofs) | ||||
|  | ||||
|     def read(self, sz): | ||||
|         sz = min(sz, self.len - self.ofs) | ||||
|         ret = self.f.read(sz) | ||||
|         self.ofs += len(ret) | ||||
|         return ret | ||||
|  | ||||
|  | ||||
| _print = print | ||||
|  | ||||
|  | ||||
| def eprint(*a, **ka): | ||||
|     ka["file"] = sys.stderr | ||||
|     ka["end"] = "" | ||||
|     if not PY2: | ||||
|         ka["flush"] = True | ||||
|  | ||||
|     _print(*a, **ka) | ||||
|     if PY2 or not VT100: | ||||
|         sys.stderr.flush() | ||||
|  | ||||
|  | ||||
| def flushing_print(*a, **ka): | ||||
|     _print(*a, **ka) | ||||
|     if "flush" not in ka: | ||||
|         sys.stdout.flush() | ||||
|  | ||||
|  | ||||
| if not VT100: | ||||
|     print = flushing_print | ||||
|  | ||||
|  | ||||
| def termsize(): | ||||
|     import os | ||||
|  | ||||
|     env = os.environ | ||||
|  | ||||
|     def ioctl_GWINSZ(fd): | ||||
|         try: | ||||
|             import fcntl, termios, struct, os | ||||
|  | ||||
|             cr = struct.unpack("hh", fcntl.ioctl(fd, termios.TIOCGWINSZ, "1234")) | ||||
|         except: | ||||
|             return | ||||
|         return cr | ||||
|  | ||||
|     cr = ioctl_GWINSZ(0) or ioctl_GWINSZ(1) or ioctl_GWINSZ(2) | ||||
|     if not cr: | ||||
|         try: | ||||
|             fd = os.open(os.ctermid(), os.O_RDONLY) | ||||
|             cr = ioctl_GWINSZ(fd) | ||||
|             os.close(fd) | ||||
|         except: | ||||
|             pass | ||||
|     if not cr: | ||||
|         try: | ||||
|             cr = (env["LINES"], env["COLUMNS"]) | ||||
|         except: | ||||
|             cr = (25, 80) | ||||
|     return int(cr[1]), int(cr[0]) | ||||
|  | ||||
|  | ||||
| class CTermsize(object): | ||||
|     def __init__(self): | ||||
|         self.ev = False | ||||
|         self.margin = None | ||||
|         self.g = None | ||||
|         self.w, self.h = termsize() | ||||
|  | ||||
|         try: | ||||
|             signal.signal(signal.SIGWINCH, self.ev_sig) | ||||
|         except: | ||||
|             return | ||||
|  | ||||
|         thr = threading.Thread(target=self.worker) | ||||
|         thr.daemon = True | ||||
|         thr.start() | ||||
|  | ||||
|     def worker(self): | ||||
|         while True: | ||||
|             time.sleep(0.5) | ||||
|             if not self.ev: | ||||
|                 continue | ||||
|  | ||||
|             self.ev = False | ||||
|             self.w, self.h = termsize() | ||||
|  | ||||
|             if self.margin is not None: | ||||
|                 self.scroll_region(self.margin) | ||||
|  | ||||
|     def ev_sig(self, *a, **ka): | ||||
|         self.ev = True | ||||
|  | ||||
|     def scroll_region(self, margin): | ||||
|         self.margin = margin | ||||
|         if margin is None: | ||||
|             self.g = None | ||||
|             eprint("\033[s\033[r\033[u") | ||||
|         else: | ||||
|             self.g = 1 + self.h - margin | ||||
|             m = "{0}\033[{1}A".format("\n" * margin, margin) | ||||
|             eprint("{0}\033[s\033[1;{1}r\033[u".format(m, self.g - 1)) | ||||
|  | ||||
|  | ||||
| ss = CTermsize() | ||||
|  | ||||
|  | ||||
| def _scd(err, top): | ||||
|     """non-recursive listing of directory contents, along with stat() info""" | ||||
|     with os.scandir(top) as dh: | ||||
|         for fh in dh: | ||||
|             abspath = os.path.join(top, fh.name) | ||||
|             try: | ||||
|                 yield [abspath, fh.stat()] | ||||
|             except: | ||||
|                 err.append(abspath) | ||||
|  | ||||
|  | ||||
| def _lsd(err, top): | ||||
|     """non-recursive listing of directory contents, along with stat() info""" | ||||
|     for name in os.listdir(top): | ||||
|         abspath = os.path.join(top, name) | ||||
|         try: | ||||
|             yield [abspath, os.stat(abspath)] | ||||
|         except: | ||||
|             err.append(abspath) | ||||
|  | ||||
|  | ||||
| if hasattr(os, "scandir"): | ||||
|     statdir = _scd | ||||
| else: | ||||
|     statdir = _lsd | ||||
|  | ||||
|  | ||||
| def walkdir(err, top): | ||||
|     """recursive statdir""" | ||||
|     for ap, inf in sorted(statdir(err, top)): | ||||
|         if stat.S_ISDIR(inf.st_mode): | ||||
|             try: | ||||
|                 for x in walkdir(err, ap): | ||||
|                     yield x | ||||
|             except: | ||||
|                 err.append(ap) | ||||
|         else: | ||||
|             yield ap, inf | ||||
|  | ||||
|  | ||||
| def walkdirs(err, tops): | ||||
|     """recursive statdir for a list of tops, yields [top, relpath, stat]""" | ||||
|     sep = "{0}".format(os.sep).encode("ascii") | ||||
|     for top in tops: | ||||
|         if top[-1:] == sep: | ||||
|             stop = top.rstrip(sep) | ||||
|         else: | ||||
|             stop = os.path.dirname(top) | ||||
|  | ||||
|         if os.path.isdir(top): | ||||
|             for ap, inf in walkdir(err, top): | ||||
|                 yield stop, ap[len(stop) :].lstrip(sep), inf | ||||
|         else: | ||||
|             d, n = top.rsplit(sep, 1) | ||||
|             yield d, n, os.stat(top) | ||||
|  | ||||
|  | ||||
| # mostly from copyparty/util.py | ||||
| def quotep(btxt): | ||||
|     quot1 = quote(btxt, safe=b"/") | ||||
|     if not PY2: | ||||
|         quot1 = quot1.encode("ascii") | ||||
|  | ||||
|     return quot1.replace(b" ", b"+") | ||||
|  | ||||
|  | ||||
| # from copyparty/util.py | ||||
| def humansize(sz, terse=False): | ||||
|     """picks a sensible unit for the given extent""" | ||||
|     for unit in ["B", "KiB", "MiB", "GiB", "TiB"]: | ||||
|         if sz < 1024: | ||||
|             break | ||||
|  | ||||
|         sz /= 1024.0 | ||||
|  | ||||
|     ret = " ".join([str(sz)[:4].rstrip("."), unit]) | ||||
|  | ||||
|     if not terse: | ||||
|         return ret | ||||
|  | ||||
|     return ret.replace("iB", "").replace(" ", "") | ||||
|  | ||||
|  | ||||
| # from copyparty/up2k.py | ||||
| def up2k_chunksize(filesize): | ||||
|     """gives The correct chunksize for up2k hashing""" | ||||
|     chunksize = 1024 * 1024 | ||||
|     stepsize = 512 * 1024 | ||||
|     while True: | ||||
|         for mul in [1, 2]: | ||||
|             nchunks = math.ceil(filesize * 1.0 / chunksize) | ||||
|             if nchunks <= 256 or chunksize >= 32 * 1024 * 1024: | ||||
|                 return chunksize | ||||
|  | ||||
|             chunksize += stepsize | ||||
|             stepsize *= mul | ||||
|  | ||||
|  | ||||
| # mostly from copyparty/up2k.py | ||||
| def get_hashlist(file, pcb): | ||||
|     # type: (File, any) -> None | ||||
|     """generates the up2k hashlist from file contents, inserts it into `file`""" | ||||
|  | ||||
|     chunk_sz = up2k_chunksize(file.size) | ||||
|     file_rem = file.size | ||||
|     file_ofs = 0 | ||||
|     ret = [] | ||||
|     with open(file.abs, "rb", 512 * 1024) as f: | ||||
|         while file_rem > 0: | ||||
|             hashobj = hashlib.sha512() | ||||
|             chunk_sz = chunk_rem = min(chunk_sz, file_rem) | ||||
|             while chunk_rem > 0: | ||||
|                 buf = f.read(min(chunk_rem, 64 * 1024)) | ||||
|                 if not buf: | ||||
|                     raise Exception("EOF at " + str(f.tell())) | ||||
|  | ||||
|                 hashobj.update(buf) | ||||
|                 chunk_rem -= len(buf) | ||||
|  | ||||
|             digest = hashobj.digest()[:33] | ||||
|             digest = base64.urlsafe_b64encode(digest).decode("utf-8") | ||||
|  | ||||
|             ret.append([digest, file_ofs, chunk_sz]) | ||||
|             file_ofs += chunk_sz | ||||
|             file_rem -= chunk_sz | ||||
|  | ||||
|             if pcb: | ||||
|                 pcb(file, file_ofs) | ||||
|  | ||||
|     file.cids = ret | ||||
|     file.kchunks = {} | ||||
|     for k, v1, v2 in ret: | ||||
|         file.kchunks[k] = [v1, v2] | ||||
|  | ||||
|  | ||||
| def handshake(req_ses, url, file, pw, search): | ||||
|     # type: (requests.Session, str, File, any, bool) -> List[str] | ||||
|     """ | ||||
|     performs a handshake with the server; reply is: | ||||
|       if search, a list of search results | ||||
|       otherwise, a list of chunks to upload | ||||
|     """ | ||||
|  | ||||
|     req = { | ||||
|         "hash": [x[0] for x in file.cids], | ||||
|         "name": file.name, | ||||
|         "lmod": file.lmod, | ||||
|         "size": file.size, | ||||
|     } | ||||
|     if search: | ||||
|         req["srch"] = 1 | ||||
|  | ||||
|     headers = {"Content-Type": "text/plain"}  # wtf ed | ||||
|     if pw: | ||||
|         headers["Cookie"] = "=".join(["cppwd", pw]) | ||||
|  | ||||
|     if file.url: | ||||
|         url = file.url | ||||
|     elif b"/" in file.rel: | ||||
|         url += quotep(file.rel.rsplit(b"/", 1)[0]).decode("utf-8", "replace") | ||||
|  | ||||
|     while True: | ||||
|         try: | ||||
|             r = req_ses.post(url, headers=headers, json=req) | ||||
|             break | ||||
|         except: | ||||
|             eprint("handshake failed, retrying: {0}\n".format(file.name)) | ||||
|             time.sleep(1) | ||||
|  | ||||
|     try: | ||||
|         r = r.json() | ||||
|     except: | ||||
|         raise Exception(r.text) | ||||
|  | ||||
|     if search: | ||||
|         return r["hits"] | ||||
|  | ||||
|     try: | ||||
|         pre, url = url.split("://") | ||||
|         pre += "://" | ||||
|     except: | ||||
|         pre = "" | ||||
|  | ||||
|     file.url = pre + url.split("/")[0] + r["purl"] | ||||
|     file.name = r["name"] | ||||
|     file.wark = r["wark"] | ||||
|  | ||||
|     return r["hash"] | ||||
|  | ||||
|  | ||||
| def upload(req_ses, file, cid, pw): | ||||
|     # type: (requests.Session, File, str, any) -> None | ||||
|     """upload one specific chunk, `cid` (a chunk-hash)""" | ||||
|  | ||||
|     headers = { | ||||
|         "X-Up2k-Hash": cid, | ||||
|         "X-Up2k-Wark": file.wark, | ||||
|         "Content-Type": "application/octet-stream", | ||||
|     } | ||||
|     if pw: | ||||
|         headers["Cookie"] = "=".join(["cppwd", pw]) | ||||
|  | ||||
|     f = FileSlice(file, cid) | ||||
|     try: | ||||
|         r = req_ses.post(file.url, headers=headers, data=f) | ||||
|         if not r: | ||||
|             raise Exception(repr(r)) | ||||
|  | ||||
|         _ = r.content | ||||
|     finally: | ||||
|         f.f.close() | ||||
|  | ||||
|  | ||||
| class Daemon(threading.Thread): | ||||
|     def __init__(self, *a, **ka): | ||||
|         threading.Thread.__init__(self, *a, **ka) | ||||
|         self.daemon = True | ||||
|  | ||||
|  | ||||
| class Ctl(object): | ||||
|     """ | ||||
|     this will be the coordinator which runs everything in parallel | ||||
|     (hashing, handshakes, uploads)  but right now it's p dumb | ||||
|     """ | ||||
|  | ||||
|     def __init__(self, ar): | ||||
|         self.ar = ar | ||||
|         ar.files = [ | ||||
|             os.path.abspath(os.path.realpath(x.encode("utf-8"))) | ||||
|             + (x[-1:] if x[-1:] == os.sep else "").encode("utf-8") | ||||
|             for x in ar.files | ||||
|         ] | ||||
|         ar.url = ar.url.rstrip("/") + "/" | ||||
|         if "://" not in ar.url: | ||||
|             ar.url = "http://" + ar.url | ||||
|  | ||||
|         eprint("\nscanning {0} locations\n".format(len(ar.files))) | ||||
|  | ||||
|         nfiles = 0 | ||||
|         nbytes = 0 | ||||
|         err = [] | ||||
|         for _, _, inf in walkdirs(err, ar.files): | ||||
|             nfiles += 1 | ||||
|             nbytes += inf.st_size | ||||
|  | ||||
|         if err: | ||||
|             eprint("\n# failed to access {0} paths:\n".format(len(err))) | ||||
|             for x in err: | ||||
|                 eprint(x.decode("utf-8", "replace") + "\n") | ||||
|  | ||||
|             eprint("^ failed to access those {0} paths ^\n\n".format(len(err))) | ||||
|             if not ar.ok: | ||||
|                 eprint("aborting because --ok is not set\n") | ||||
|                 return | ||||
|  | ||||
|         eprint("found {0} files, {1}\n\n".format(nfiles, humansize(nbytes))) | ||||
|         self.nfiles = nfiles | ||||
|         self.nbytes = nbytes | ||||
|  | ||||
|         if ar.td: | ||||
|             requests.packages.urllib3.disable_warnings() | ||||
|             req_ses.verify = False | ||||
|         if ar.te: | ||||
|             req_ses.verify = ar.te | ||||
|  | ||||
|         self.filegen = walkdirs([], ar.files) | ||||
|         if ar.safe: | ||||
|             self.safe() | ||||
|         else: | ||||
|             self.fancy() | ||||
|  | ||||
|     def safe(self): | ||||
|         """minimal basic slow boring fallback codepath""" | ||||
|         search = self.ar.s | ||||
|         for nf, (top, rel, inf) in enumerate(self.filegen): | ||||
|             file = File(top, rel, inf.st_size, inf.st_mtime) | ||||
|             upath = file.abs.decode("utf-8", "replace") | ||||
|  | ||||
|             print("{0} {1}\n  hash...".format(self.nfiles - nf, upath)) | ||||
|             get_hashlist(file, None) | ||||
|  | ||||
|             burl = self.ar.url[:12] + self.ar.url[8:].split("/")[0] + "/" | ||||
|             while True: | ||||
|                 print("  hs...") | ||||
|                 hs = handshake(req_ses, self.ar.url, file, self.ar.a, search) | ||||
|                 if search: | ||||
|                     if hs: | ||||
|                         for hit in hs: | ||||
|                             print("  found: {0}{1}".format(burl, hit["rp"])) | ||||
|                     else: | ||||
|                         print("  NOT found") | ||||
|                     break | ||||
|  | ||||
|                 file.ucids = hs | ||||
|                 if not hs: | ||||
|                     break | ||||
|  | ||||
|                 print("{0} {1}".format(self.nfiles - nf, upath)) | ||||
|                 ncs = len(hs) | ||||
|                 for nc, cid in enumerate(hs): | ||||
|                     print("  {0} up {1}".format(ncs - nc, cid)) | ||||
|                     upload(req_ses, file, cid, self.ar.a) | ||||
|  | ||||
|             print("  ok!") | ||||
|  | ||||
|     def fancy(self): | ||||
|         self.hash_f = 0 | ||||
|         self.hash_c = 0 | ||||
|         self.hash_b = 0 | ||||
|         self.up_f = 0 | ||||
|         self.up_c = 0 | ||||
|         self.up_b = 0 | ||||
|         self.up_br = 0 | ||||
|         self.hasher_busy = 1 | ||||
|         self.handshaker_busy = 0 | ||||
|         self.uploader_busy = 0 | ||||
|  | ||||
|         self.t0 = time.time() | ||||
|         self.t0_up = None | ||||
|         self.spd = None | ||||
|  | ||||
|         self.mutex = threading.Lock() | ||||
|         self.q_handshake = Queue()  # type: Queue[File] | ||||
|         self.q_recheck = Queue()  # type: Queue[File]  # partial upload exists [...] | ||||
|         self.q_upload = Queue()  # type: Queue[tuple[File, str]] | ||||
|  | ||||
|         self.st_hash = [None, "(idle, starting...)"]  # type: tuple[File, int] | ||||
|         self.st_up = [None, "(idle, starting...)"]  # type: tuple[File, int] | ||||
|         if VT100: | ||||
|             atexit.register(self.cleanup_vt100) | ||||
|             ss.scroll_region(3) | ||||
|  | ||||
|         Daemon(target=self.hasher).start() | ||||
|         for _ in range(self.ar.j): | ||||
|             Daemon(target=self.handshaker).start() | ||||
|             Daemon(target=self.uploader).start() | ||||
|  | ||||
|         idles = 0 | ||||
|         while idles < 3: | ||||
|             time.sleep(0.07) | ||||
|             with self.mutex: | ||||
|                 if ( | ||||
|                     self.q_handshake.empty() | ||||
|                     and self.q_upload.empty() | ||||
|                     and not self.hasher_busy | ||||
|                     and not self.handshaker_busy | ||||
|                     and not self.uploader_busy | ||||
|                 ): | ||||
|                     idles += 1 | ||||
|                 else: | ||||
|                     idles = 0 | ||||
|  | ||||
|             if VT100: | ||||
|                 maxlen = ss.w - len(str(self.nfiles)) - 14 | ||||
|                 txt = "\033[s\033[{0}H".format(ss.g) | ||||
|                 for y, k, st, f in [ | ||||
|                     [0, "hash", self.st_hash, self.hash_f], | ||||
|                     [1, "send", self.st_up, self.up_f], | ||||
|                 ]: | ||||
|                     txt += "\033[{0}H{1}:".format(ss.g + y, k) | ||||
|                     file, arg = st | ||||
|                     if not file: | ||||
|                         txt += " {0}\033[K".format(arg) | ||||
|                     else: | ||||
|                         if y: | ||||
|                             p = 100 * file.up_b / file.size | ||||
|                         else: | ||||
|                             p = 100 * arg / file.size | ||||
|  | ||||
|                         name = file.abs.decode("utf-8", "replace")[-maxlen:] | ||||
|                         if "/" in name: | ||||
|                             name = "\033[36m{0}\033[0m/{1}".format(*name.rsplit("/", 1)) | ||||
|  | ||||
|                         m = "{0:6.1f}% {1} {2}\033[K" | ||||
|                         txt += m.format(p, self.nfiles - f, name) | ||||
|  | ||||
|                 txt += "\033[{0}H ".format(ss.g + 2) | ||||
|             else: | ||||
|                 txt = " " | ||||
|  | ||||
|             if not self.up_br: | ||||
|                 spd = self.hash_b / (time.time() - self.t0) | ||||
|                 eta = (self.nbytes - self.hash_b) / (spd + 1) | ||||
|             else: | ||||
|                 spd = self.up_br / (time.time() - self.t0_up) | ||||
|                 spd = self.spd = (self.spd or spd) * 0.9 + spd * 0.1 | ||||
|                 eta = (self.nbytes - self.up_b) / (spd + 1) | ||||
|  | ||||
|             spd = humansize(spd) | ||||
|             eta = str(datetime.timedelta(seconds=int(eta))) | ||||
|             left = humansize(self.nbytes - self.up_b) | ||||
|             tail = "\033[K\033[u" if VT100 else "\r" | ||||
|  | ||||
|             m = "eta: {0} @ {1}/s, {2} left".format(eta, spd, left) | ||||
|             eprint(txt + "\033]0;{0}\033\\\r{1}{2}".format(m, m, tail)) | ||||
|  | ||||
|     def cleanup_vt100(self): | ||||
|         ss.scroll_region(None) | ||||
|         eprint("\033[J\033]0;\033\\") | ||||
|  | ||||
|     def cb_hasher(self, file, ofs): | ||||
|         self.st_hash = [file, ofs] | ||||
|  | ||||
|     def hasher(self): | ||||
|         prd = None | ||||
|         ls = {} | ||||
|         for top, rel, inf in self.filegen: | ||||
|             if self.ar.z: | ||||
|                 rd = os.path.dirname(rel) | ||||
|                 if prd != rd: | ||||
|                     prd = rd | ||||
|                     headers = {} | ||||
|                     if self.ar.a: | ||||
|                         headers["Cookie"] = "=".join(["cppwd", self.ar.a]) | ||||
|  | ||||
|                     ls = {} | ||||
|                     try: | ||||
|                         print("      ls ~{0}".format(rd.decode("utf-8", "replace"))) | ||||
|                         r = req_ses.get( | ||||
|                             self.ar.url.encode("utf-8") + quotep(rd) + b"?ls", | ||||
|                             headers=headers, | ||||
|                         ) | ||||
|                         for f in r.json()["files"]: | ||||
|                             rfn = f["href"].split("?")[0].encode("utf-8", "replace") | ||||
|                             ls[unquote(rfn)] = f | ||||
|                     except: | ||||
|                         print("   mkdir ~{0}".format(rd.decode("utf-8", "replace"))) | ||||
|  | ||||
|                 rf = ls.get(os.path.basename(rel), None) | ||||
|                 if rf and rf["sz"] == inf.st_size and abs(rf["ts"] - inf.st_mtime) <= 1: | ||||
|                     self.nfiles -= 1 | ||||
|                     self.nbytes -= inf.st_size | ||||
|                     continue | ||||
|  | ||||
|             file = File(top, rel, inf.st_size, inf.st_mtime) | ||||
|             while True: | ||||
|                 with self.mutex: | ||||
|                     if ( | ||||
|                         self.hash_b - self.up_b < 1024 * 1024 * 128 | ||||
|                         and self.hash_c - self.up_c < 64 | ||||
|                         and ( | ||||
|                             not self.ar.nh | ||||
|                             or ( | ||||
|                                 self.q_upload.empty() | ||||
|                                 and self.q_handshake.empty() | ||||
|                                 and not self.uploader_busy | ||||
|                             ) | ||||
|                         ) | ||||
|                     ): | ||||
|                         break | ||||
|  | ||||
|                 time.sleep(0.05) | ||||
|  | ||||
|             get_hashlist(file, self.cb_hasher) | ||||
|             with self.mutex: | ||||
|                 self.hash_f += 1 | ||||
|                 self.hash_c += len(file.cids) | ||||
|                 self.hash_b += file.size | ||||
|  | ||||
|             self.q_handshake.put(file) | ||||
|  | ||||
|         self.hasher_busy = 0 | ||||
|         self.st_hash = [None, "(finished)"] | ||||
|  | ||||
|     def handshaker(self): | ||||
|         search = self.ar.s | ||||
|         q = self.q_handshake | ||||
|         burl = self.ar.url[:8] + self.ar.url[8:].split("/")[0] + "/" | ||||
|         while True: | ||||
|             file = q.get() | ||||
|             if not file: | ||||
|                 if q == self.q_handshake: | ||||
|                     q = self.q_recheck | ||||
|                     q.put(None) | ||||
|                     continue | ||||
|  | ||||
|                 self.q_upload.put(None) | ||||
|                 break | ||||
|  | ||||
|             with self.mutex: | ||||
|                 self.handshaker_busy += 1 | ||||
|  | ||||
|             upath = file.abs.decode("utf-8", "replace") | ||||
|  | ||||
|             try: | ||||
|                 hs = handshake(req_ses, self.ar.url, file, self.ar.a, search) | ||||
|             except Exception as ex: | ||||
|                 if q == self.q_handshake and "<pre>partial upload exists" in str(ex): | ||||
|                     self.q_recheck.put(file) | ||||
|                     hs = [] | ||||
|                 else: | ||||
|                     raise | ||||
|  | ||||
|             if search: | ||||
|                 if hs: | ||||
|                     for hit in hs: | ||||
|                         m = "found: {0}\n  {1}{2}\n" | ||||
|                         print(m.format(upath, burl, hit["rp"]), end="") | ||||
|                 else: | ||||
|                     print("NOT found: {0}\n".format(upath), end="") | ||||
|  | ||||
|                 with self.mutex: | ||||
|                     self.up_f += 1 | ||||
|                     self.up_c += len(file.cids) | ||||
|                     self.up_b += file.size | ||||
|                     self.handshaker_busy -= 1 | ||||
|  | ||||
|                 continue | ||||
|  | ||||
|             with self.mutex: | ||||
|                 if not hs: | ||||
|                     # all chunks done | ||||
|                     self.up_f += 1 | ||||
|                     self.up_c += len(file.cids) - file.up_c | ||||
|                     self.up_b += file.size - file.up_b | ||||
|  | ||||
|                 if hs and file.up_c: | ||||
|                     # some chunks failed | ||||
|                     self.up_c -= len(hs) | ||||
|                     file.up_c -= len(hs) | ||||
|                     for cid in hs: | ||||
|                         sz = file.kchunks[cid][1] | ||||
|                         self.up_b -= sz | ||||
|                         file.up_b -= sz | ||||
|  | ||||
|                 file.ucids = hs | ||||
|                 self.handshaker_busy -= 1 | ||||
|  | ||||
|             if not hs: | ||||
|                 kw = "uploaded" if file.up_b else "   found" | ||||
|                 print("{0} {1}".format(kw, upath)) | ||||
|             for cid in hs: | ||||
|                 self.q_upload.put([file, cid]) | ||||
|  | ||||
|     def uploader(self): | ||||
|         while True: | ||||
|             task = self.q_upload.get() | ||||
|             if not task: | ||||
|                 self.st_up = [None, "(finished)"] | ||||
|                 break | ||||
|  | ||||
|             with self.mutex: | ||||
|                 self.uploader_busy += 1 | ||||
|                 self.t0_up = self.t0_up or time.time() | ||||
|  | ||||
|             file, cid = task | ||||
|             try: | ||||
|                 upload(req_ses, file, cid, self.ar.a) | ||||
|             except: | ||||
|                 eprint("upload failed, retrying: {0} #{1}\n".format(file.name, cid[:8])) | ||||
|                 pass  # handshake will fix it | ||||
|  | ||||
|             with self.mutex: | ||||
|                 sz = file.kchunks[cid][1] | ||||
|                 file.ucids = [x for x in file.ucids if x != cid] | ||||
|                 if not file.ucids: | ||||
|                     self.q_handshake.put(file) | ||||
|  | ||||
|                 self.st_up = [file, cid] | ||||
|                 file.up_b += sz | ||||
|                 self.up_b += sz | ||||
|                 self.up_br += sz | ||||
|                 file.up_c += 1 | ||||
|                 self.up_c += 1 | ||||
|                 self.uploader_busy -= 1 | ||||
|  | ||||
|  | ||||
| class APF(argparse.ArgumentDefaultsHelpFormatter, argparse.RawDescriptionHelpFormatter): | ||||
|     pass | ||||
|  | ||||
|  | ||||
| def main(): | ||||
|     time.strptime("19970815", "%Y%m%d")  # python#7980 | ||||
|     if not VT100: | ||||
|         os.system("rem")  # enables colors | ||||
|  | ||||
|     # fmt: off | ||||
|     ap = app = argparse.ArgumentParser(formatter_class=APF, epilog=""" | ||||
| NOTE: | ||||
| source file/folder selection uses rsync syntax, meaning that: | ||||
|   "foo" uploads the entire folder to URL/foo/ | ||||
|   "foo/" uploads the CONTENTS of the folder into URL/ | ||||
| """) | ||||
|  | ||||
|     ap.add_argument("url", type=unicode, help="server url, including destination folder") | ||||
|     ap.add_argument("files", type=unicode, nargs="+", help="files and/or folders to process") | ||||
|     ap.add_argument("-a", metavar="PASSWORD", help="password") | ||||
|     ap.add_argument("-s", action="store_true", help="file-search (disables upload)") | ||||
|     ap.add_argument("--ok", action="store_true", help="continue even if some local files are inaccessible") | ||||
|     ap = app.add_argument_group("performance tweaks") | ||||
|     ap.add_argument("-j", type=int, metavar="THREADS", default=4, help="parallel connections") | ||||
|     ap.add_argument("-nh", action="store_true", help="disable hashing while uploading") | ||||
|     ap.add_argument("--safe", action="store_true", help="use simple fallback approach") | ||||
|     ap.add_argument("-z", action="store_true", help="ZOOMIN' (skip uploading files if they exist at the destination with the ~same last-modified timestamp, so same as yolo / turbo with date-chk but even faster)") | ||||
|     ap = app.add_argument_group("tls") | ||||
|     ap.add_argument("-te", metavar="PEM_FILE", help="certificate to expect/verify") | ||||
|     ap.add_argument("-td", action="store_true", help="disable certificate check") | ||||
|     # fmt: on | ||||
|  | ||||
|     Ctl(app.parse_args()) | ||||
|  | ||||
|  | ||||
| if __name__ == "__main__": | ||||
|     main() | ||||
| @@ -22,13 +22,23 @@ however if your copyparty is behind a reverse-proxy, you may want to use [`share | ||||
| * `URL`: full URL to the root folder (with trailing slash) followed by `$regex:1|1$` | ||||
| * `pw`: password (remove `Parameters` if anon-write) | ||||
|  | ||||
| ### [`media-osd-bgone.ps1`](media-osd-bgone.ps1) | ||||
| * disables the [windows OSD popup](https://user-images.githubusercontent.com/241032/122821375-0e08df80-d2dd-11eb-9fd9-184e8aacf1d0.png) (the thing on the left) which appears every time you hit media hotkeys to adjust volume or change song while playing music with the copyparty web-ui, or most other audio players really | ||||
|  | ||||
| ### [`explorer-nothumbs-nofoldertypes.reg`](explorer-nothumbs-nofoldertypes.reg) | ||||
| * disables thumbnails and folder-type detection in windows explorer | ||||
| * makes it way faster (especially for slow/networked locations (such as copyparty-fuse)) | ||||
| * makes it way faster (especially for slow/networked locations (such as partyfuse)) | ||||
|  | ||||
| ### [`webdav-cfg.reg`](webdav-cfg.bat) | ||||
| * improves the native webdav support in windows; | ||||
|   * removes the 47.6 MiB filesize limit when downloading from webdav | ||||
|   * optionally enables webdav basic-auth over plaintext http | ||||
|   * optionally helps disable wpad, removing the 10sec latency | ||||
|  | ||||
| ### [`cfssl.sh`](cfssl.sh) | ||||
| * creates CA and server certificates using cfssl | ||||
| * give a 3rd argument to install it to your copyparty config | ||||
| * systemd service at [`systemd/cfssl.service`](systemd/cfssl.service) | ||||
|  | ||||
| # OS integration | ||||
| init-scripts to start copyparty as a service | ||||
|   | ||||
							
								
								
									
										14
									
								
								contrib/apache/copyparty.conf
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										14
									
								
								contrib/apache/copyparty.conf
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,14 @@ | ||||
| # when running copyparty behind a reverse proxy, | ||||
| # the following arguments are recommended: | ||||
| # | ||||
| #   -i 127.0.0.1    only accept connections from nginx | ||||
| # | ||||
| # if you are doing location-based proxying (such as `/stuff` below) | ||||
| # you must run copyparty with --rp-loc=stuff | ||||
| # | ||||
| # on fedora/rhel, remember to setsebool -P httpd_can_network_connect 1 | ||||
|  | ||||
| LoadModule proxy_module modules/mod_proxy.so | ||||
| ProxyPass "/stuff" "http://127.0.0.1:3923/stuff" | ||||
| # do not specify ProxyPassReverse | ||||
| RequestHeader set "X-Forwarded-Proto" expr=%{REQUEST_SCHEME} | ||||
| @@ -1,14 +1,44 @@ | ||||
| #!/bin/bash | ||||
| set -e | ||||
|  | ||||
| cat >/dev/null <<'EOF' | ||||
|  | ||||
| NOTE: copyparty is now able to do this automatically; | ||||
| however you may wish to use this script instead if | ||||
| you have specific needs (or if copyparty breaks) | ||||
|  | ||||
| this script generates a new self-signed TLS certificate and | ||||
| replaces the default insecure one that comes with copyparty | ||||
|  | ||||
| as it is trivial to impersonate a copyparty server using the | ||||
| default certificate, it is highly recommended to do this | ||||
|  | ||||
| this will create a self-signed CA, and a Server certificate | ||||
| which gets signed by that CA -- you can run it multiple times | ||||
| with different server-FQDNs / IPs to create additional certs | ||||
| for all your different servers / (non-)copyparty services | ||||
|  | ||||
| EOF | ||||
|  | ||||
|  | ||||
| # ca-name and server-fqdn | ||||
| ca_name="$1" | ||||
| srv_fqdn="$2" | ||||
|  | ||||
| [ -z "$srv_fqdn" ] && { | ||||
| 	echo "need arg 1: ca name" | ||||
| 	echo "need arg 2: server fqdn" | ||||
| 	echo "optional arg 3: if set, write cert into copyparty cfg" | ||||
| [ -z "$srv_fqdn" ] && { cat <<'EOF' | ||||
| need arg 1: ca name | ||||
| need arg 2: server fqdn and/or IPs, comma-separated | ||||
| optional arg 3: if set, write cert into copyparty cfg | ||||
|  | ||||
| example: | ||||
|   ./cfssl.sh PartyCo partybox.local y | ||||
| EOF | ||||
| 	exit 1 | ||||
| } | ||||
|  | ||||
|  | ||||
| command -v cfssljson 2>/dev/null || { | ||||
| 	echo please install cfssl and try again | ||||
| 	exit 1 | ||||
| } | ||||
|  | ||||
| @@ -59,12 +89,14 @@ show() { | ||||
| } | ||||
| show ca.pem | ||||
| show "$srv_fqdn.pem" | ||||
|  | ||||
| echo | ||||
| echo "successfully generated new certificates" | ||||
|  | ||||
| # write cert into copyparty config | ||||
| [ -z "$3" ] || { | ||||
| 	mkdir -p ~/.config/copyparty | ||||
| 	cat "$srv_fqdn".{key,pem} ca.pem >~/.config/copyparty/cert.pem  | ||||
| 	echo "successfully replaced copyparty certificate" | ||||
| } | ||||
|  | ||||
|  | ||||
|   | ||||
| @@ -3,7 +3,7 @@ | ||||
|  | ||||
| <head> | ||||
| 	<meta charset="utf-8"> | ||||
| 	<title>⇆🎉 redirect</title> | ||||
| 	<title>💾🎉 redirect</title> | ||||
| 	<meta http-equiv="X-UA-Compatible" content="IE=edge"> | ||||
| 	<style> | ||||
|  | ||||
|   | ||||
							
								
								
									
										
											BIN
										
									
								
								contrib/ios/upload-to-copyparty.shortcut
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										
											BIN
										
									
								
								contrib/ios/upload-to-copyparty.shortcut
									
									
									
									
									
										Normal file
									
								
							
										
											Binary file not shown.
										
									
								
							
							
								
								
									
										104
									
								
								contrib/media-osd-bgone.ps1
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										104
									
								
								contrib/media-osd-bgone.ps1
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,104 @@ | ||||
| # media-osd-bgone.ps1: disable media-control OSD on win10do | ||||
| # v1.1, 2021-06-25, ed <irc.rizon.net>, MIT-licensed | ||||
| # https://github.com/9001/copyparty/blob/hovudstraum/contrib/media-osd-bgone.ps1 | ||||
| # | ||||
| # locates the first window that looks like the media OSD and minimizes it; | ||||
| # doing this once after each reboot should do the trick | ||||
| # (adjust the width/height filter if it doesn't work) | ||||
| # | ||||
| # --------------------------------------------------------------------- | ||||
| # | ||||
| # tip: save the following as "media-osd-bgone.bat" next to this script: | ||||
| #   start cmd /c "powershell -command ""set-executionpolicy -scope process bypass; .\media-osd-bgone.ps1"" & ping -n 2 127.1 >nul" | ||||
| # | ||||
| # then create a shortcut to that bat-file and move the shortcut here: | ||||
| #   %appdata%\Microsoft\Windows\Start Menu\Programs\Startup | ||||
| # | ||||
| # and now this will autorun on bootup | ||||
|  | ||||
|  | ||||
| Add-Type -TypeDefinition @" | ||||
| using System; | ||||
| using System.IO; | ||||
| using System.Threading; | ||||
| using System.Diagnostics; | ||||
| using System.Runtime.InteropServices; | ||||
| using System.Windows.Forms; | ||||
|  | ||||
| namespace A { | ||||
|   public class B : Control { | ||||
|  | ||||
|     [DllImport("user32.dll")] | ||||
|     static extern void keybd_event(byte bVk, byte bScan, uint dwFlags, int dwExtraInfo); | ||||
|  | ||||
|     [DllImport("user32.dll", SetLastError = true)] | ||||
|     static extern IntPtr FindWindowEx(IntPtr hwndParent, IntPtr hwndChildAfter, string lpszClass, string lpszWindow); | ||||
|  | ||||
|     [DllImport("user32.dll", SetLastError=true)] | ||||
|     static extern bool GetWindowRect(IntPtr hwnd, out RECT lpRect); | ||||
|  | ||||
|     [DllImport("user32.dll")] | ||||
|     static extern bool ShowWindow(IntPtr hWnd, int nCmdShow); | ||||
|  | ||||
|     [StructLayout(LayoutKind.Sequential)] | ||||
|     public struct RECT { | ||||
|       public int x; | ||||
|       public int y; | ||||
|       public int x2; | ||||
|       public int y2; | ||||
|     } | ||||
|      | ||||
|     bool fa() { | ||||
|       RECT r; | ||||
|       IntPtr it = IntPtr.Zero; | ||||
|       while ((it = FindWindowEx(IntPtr.Zero, it, "NativeHWNDHost", "")) != IntPtr.Zero) { | ||||
|         if (FindWindowEx(it, IntPtr.Zero, "DirectUIHWND", "") == IntPtr.Zero) | ||||
|           continue; | ||||
|          | ||||
|         if (!GetWindowRect(it, out r)) | ||||
|           continue; | ||||
|  | ||||
|         int w = r.x2 - r.x + 1; | ||||
|         int h = r.y2 - r.y + 1; | ||||
|  | ||||
|         Console.WriteLine("[*] hwnd {0:x} @ {1}x{2} sz {3}x{4}", it, r.x, r.y, w, h); | ||||
|         if (h != 141) | ||||
|           continue; | ||||
|          | ||||
|         ShowWindow(it, 6); | ||||
|         Console.WriteLine("[+] poof"); | ||||
|         return true; | ||||
|       } | ||||
|       return false; | ||||
|     } | ||||
|  | ||||
|     void fb() { | ||||
|       keybd_event((byte)Keys.VolumeMute, 0, 0, 0); | ||||
|       keybd_event((byte)Keys.VolumeMute, 0, 2, 0); | ||||
|       Thread.Sleep(500); | ||||
|       keybd_event((byte)Keys.VolumeMute, 0, 0, 0); | ||||
|       keybd_event((byte)Keys.VolumeMute, 0, 2, 0); | ||||
|  | ||||
|       while (true) { | ||||
|         if (fa()) { | ||||
|           break; | ||||
|         } | ||||
|         Console.WriteLine("[!] not found"); | ||||
|         Thread.Sleep(1000); | ||||
|       } | ||||
|       this.Invoke((MethodInvoker)delegate { | ||||
|         Application.Exit(); | ||||
|       }); | ||||
|     } | ||||
|  | ||||
|     public void Run() { | ||||
|       Console.WriteLine("[+] hi"); | ||||
|       new Thread(new ThreadStart(fb)).Start(); | ||||
|       Application.Run(); | ||||
|       Console.WriteLine("[+] bye"); | ||||
|     } | ||||
|   } | ||||
| } | ||||
| "@ -ReferencedAssemblies System.Windows.Forms | ||||
|  | ||||
| (New-Object -TypeName A.B).Run() | ||||
| @@ -1,15 +1,16 @@ | ||||
| # when running copyparty behind a reverse proxy, | ||||
| # the following arguments are recommended: | ||||
| # | ||||
| #   -nc 512         important, see next paragraph | ||||
| #   --http-only     lower latency on initial connection | ||||
| #   -i 127.0.0.1    only accept connections from nginx | ||||
| # | ||||
| # -nc must match or exceed the webserver's max number of concurrent clients; | ||||
| # copyparty default is 1024 if OS permits it (see "max clients:" on startup), | ||||
| # nginx default is 512  (worker_processes 1, worker_connections 512) | ||||
| # | ||||
| # you may also consider adding -j0 for CPU-intensive configurations | ||||
| # (not that i can really think of any good examples) | ||||
| # (5'000 requests per second, or 20gbps upload/download in parallel) | ||||
| # | ||||
| # on fedora/rhel, remember to setsebool -P httpd_can_network_connect 1 | ||||
|  | ||||
| upstream cpp { | ||||
| 	server 127.0.0.1:3923; | ||||
| @@ -33,7 +34,15 @@ server { | ||||
| 		proxy_set_header   Host              $host; | ||||
| 		proxy_set_header   X-Real-IP         $remote_addr; | ||||
| 		proxy_set_header   X-Forwarded-For   $proxy_add_x_forwarded_for; | ||||
| 		# NOTE: with cloudflare you want this instead: | ||||
| 		#proxy_set_header   X-Forwarded-For   $http_cf_connecting_ip; | ||||
| 		proxy_set_header   X-Forwarded-Proto $scheme; | ||||
| 		proxy_set_header   Connection        "Keep-Alive"; | ||||
| 	} | ||||
| } | ||||
|  | ||||
| # default client_max_body_size (1M) blocks uploads larger than 256 MiB | ||||
| client_max_body_size 1024M; | ||||
| client_header_timeout 610m; | ||||
| client_body_timeout 610m; | ||||
| send_timeout 610m; | ||||
|   | ||||
							
								
								
									
										283
									
								
								contrib/nixos/modules/copyparty.nix
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										283
									
								
								contrib/nixos/modules/copyparty.nix
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,283 @@ | ||||
| { config, pkgs, lib, ... }: | ||||
|  | ||||
| with lib; | ||||
|  | ||||
| let | ||||
|   mkKeyValue = key: value: | ||||
|     if value == true then | ||||
|     # sets with a true boolean value are coerced to just the key name | ||||
|       key | ||||
|     else if value == false then | ||||
|     # or omitted completely when false | ||||
|       "" | ||||
|     else | ||||
|       (generators.mkKeyValueDefault { inherit mkValueString; } ": " key value); | ||||
|  | ||||
|   mkAttrsString = value: (generators.toKeyValue { inherit mkKeyValue; } value); | ||||
|  | ||||
|   mkValueString = value: | ||||
|     if isList value then | ||||
|       (concatStringsSep ", " (map mkValueString value)) | ||||
|     else if isAttrs value then | ||||
|       "\n" + (mkAttrsString value) | ||||
|     else | ||||
|       (generators.mkValueStringDefault { } value); | ||||
|  | ||||
|   mkSectionName = value: "[" + (escape [ "[" "]" ] value) + "]"; | ||||
|  | ||||
|   mkSection = name: attrs: '' | ||||
|     ${mkSectionName name} | ||||
|     ${mkAttrsString attrs} | ||||
|   ''; | ||||
|  | ||||
|   mkVolume = name: attrs: '' | ||||
|     ${mkSectionName name} | ||||
|     ${attrs.path} | ||||
|     ${mkAttrsString { | ||||
|       accs = attrs.access; | ||||
|       flags = attrs.flags; | ||||
|     }} | ||||
|   ''; | ||||
|  | ||||
|   passwordPlaceholder = name: "{{password-${name}}}"; | ||||
|  | ||||
|   accountsWithPlaceholders = mapAttrs (name: attrs: passwordPlaceholder name); | ||||
|  | ||||
|   configStr = '' | ||||
|     ${mkSection "global" cfg.settings} | ||||
|     ${mkSection "accounts" (accountsWithPlaceholders cfg.accounts)} | ||||
|     ${concatStringsSep "\n" (mapAttrsToList mkVolume cfg.volumes)} | ||||
|   ''; | ||||
|  | ||||
|   name = "copyparty"; | ||||
|   cfg = config.services.copyparty; | ||||
|   configFile = pkgs.writeText "${name}.conf" configStr; | ||||
|   runtimeConfigPath = "/run/${name}/${name}.conf"; | ||||
|   home = "/var/lib/${name}"; | ||||
|   defaultShareDir = "${home}/data"; | ||||
| in { | ||||
|   options.services.copyparty = { | ||||
|     enable = mkEnableOption "web-based file manager"; | ||||
|  | ||||
|     package = mkOption { | ||||
|       type = types.package; | ||||
|       default = pkgs.copyparty; | ||||
|       defaultText = "pkgs.copyparty"; | ||||
|       description = '' | ||||
|         Package of the application to run, exposed for overriding purposes. | ||||
|       ''; | ||||
|     }; | ||||
|  | ||||
|     openFilesLimit = mkOption { | ||||
|       default = 4096; | ||||
|       type = types.either types.int types.str; | ||||
|       description = "Number of files to allow copyparty to open."; | ||||
|     }; | ||||
|  | ||||
|     settings = mkOption { | ||||
|       type = types.attrs; | ||||
|       description = '' | ||||
|         Global settings to apply. | ||||
|         Directly maps to values in the [global] section of the copyparty config. | ||||
|         See `${getExe cfg.package} --help` for more details. | ||||
|       ''; | ||||
|       default = { | ||||
|         i = "127.0.0.1"; | ||||
|         no-reload = true; | ||||
|       }; | ||||
|       example = literalExpression '' | ||||
|         { | ||||
|           i = "0.0.0.0"; | ||||
|           no-reload = true; | ||||
|         } | ||||
|       ''; | ||||
|     }; | ||||
|  | ||||
|     accounts = mkOption { | ||||
|       type = types.attrsOf (types.submodule ({ ... }: { | ||||
|         options = { | ||||
|           passwordFile = mkOption { | ||||
|             type = types.str; | ||||
|             description = '' | ||||
|               Runtime file path to a file containing the user password. | ||||
|               Must be readable by the copyparty user. | ||||
|             ''; | ||||
|             example = "/run/keys/copyparty/ed"; | ||||
|           }; | ||||
|         }; | ||||
|       })); | ||||
|       description = '' | ||||
|         A set of copyparty accounts to create. | ||||
|       ''; | ||||
|       default = { }; | ||||
|       example = literalExpression '' | ||||
|         { | ||||
|           ed.passwordFile = "/run/keys/copyparty/ed"; | ||||
|         }; | ||||
|       ''; | ||||
|     }; | ||||
|  | ||||
|     volumes = mkOption { | ||||
|       type = types.attrsOf (types.submodule ({ ... }: { | ||||
|         options = { | ||||
|           path = mkOption { | ||||
|             type = types.str; | ||||
|             description = '' | ||||
|               Path of a directory to share. | ||||
|             ''; | ||||
|           }; | ||||
|           access = mkOption { | ||||
|             type = types.attrs; | ||||
|             description = '' | ||||
|               Attribute list of permissions and the users to apply them to. | ||||
|  | ||||
|               The key must be a string containing any combination of allowed permission: | ||||
|                 "r" (read):   list folder contents, download files | ||||
|                 "w" (write):  upload files; need "r" to see the uploads | ||||
|                 "m" (move):   move files and folders; need "w" at destination | ||||
|                 "d" (delete): permanently delete files and folders | ||||
|                 "g" (get):    download files, but cannot see folder contents | ||||
|                 "G" (upget):  "get", but can see filekeys of their own uploads | ||||
|                 "h" (html):   "get", but folders return their index.html | ||||
|                 "a" (admin):  can see uploader IPs, config-reload | ||||
|  | ||||
|               For example: "rwmd" | ||||
|  | ||||
|               The value must be one of: | ||||
|                 an account name, defined in `accounts` | ||||
|                 a list of account names | ||||
|                 "*", which means "any account" | ||||
|             ''; | ||||
|             example = literalExpression '' | ||||
|               { | ||||
|                 # wG = write-upget = see your own uploads only | ||||
|                 wG = "*"; | ||||
|                 # read-write-modify-delete for users "ed" and "k" | ||||
|                 rwmd = ["ed" "k"]; | ||||
|               }; | ||||
|             ''; | ||||
|           }; | ||||
|           flags = mkOption { | ||||
|             type = types.attrs; | ||||
|             description = '' | ||||
|               Attribute list of volume flags to apply. | ||||
|               See `${getExe cfg.package} --help-flags` for more details. | ||||
|             ''; | ||||
|             example = literalExpression '' | ||||
|               { | ||||
|                 # "fk" enables filekeys (necessary for upget permission) (4 chars long) | ||||
|                 fk = 4; | ||||
|                 # scan for new files every 60sec | ||||
|                 scan = 60; | ||||
|                 # volflag "e2d" enables the uploads database | ||||
|                 e2d = true; | ||||
|                 # "d2t" disables multimedia parsers (in case the uploads are malicious) | ||||
|                 d2t = true; | ||||
|                 # skips hashing file contents if path matches *.iso | ||||
|                 nohash = "\.iso$"; | ||||
|               }; | ||||
|             ''; | ||||
|             default = { }; | ||||
|           }; | ||||
|         }; | ||||
|       })); | ||||
|       description = "A set of copyparty volumes to create"; | ||||
|       default = { | ||||
|         "/" = { | ||||
|           path = defaultShareDir; | ||||
|           access = { r = "*"; }; | ||||
|         }; | ||||
|       }; | ||||
|       example = literalExpression '' | ||||
|         { | ||||
|           "/" = { | ||||
|             path = ${defaultShareDir}; | ||||
|             access = { | ||||
|               # wG = write-upget = see your own uploads only | ||||
|               wG = "*"; | ||||
|               # read-write-modify-delete for users "ed" and "k" | ||||
|               rwmd = ["ed" "k"]; | ||||
|             }; | ||||
|           }; | ||||
|         }; | ||||
|       ''; | ||||
|     }; | ||||
|   }; | ||||
|  | ||||
|   config = mkIf cfg.enable { | ||||
|     systemd.services.copyparty = { | ||||
|       description = "http file sharing hub"; | ||||
|       wantedBy = [ "multi-user.target" ]; | ||||
|  | ||||
|       environment = { | ||||
|         PYTHONUNBUFFERED = "true"; | ||||
|         XDG_CONFIG_HOME = "${home}/.config"; | ||||
|       }; | ||||
|  | ||||
|       preStart = let | ||||
|         replaceSecretCommand = name: attrs: | ||||
|           "${getExe pkgs.replace-secret} '${ | ||||
|             passwordPlaceholder name | ||||
|           }' '${attrs.passwordFile}' ${runtimeConfigPath}"; | ||||
|       in '' | ||||
|         set -euo pipefail | ||||
|         install -m 600 ${configFile} ${runtimeConfigPath} | ||||
|         ${concatStringsSep "\n" | ||||
|         (mapAttrsToList replaceSecretCommand cfg.accounts)} | ||||
|       ''; | ||||
|  | ||||
|       serviceConfig = { | ||||
|         Type = "simple"; | ||||
|         ExecStart = "${getExe cfg.package} -c ${runtimeConfigPath}"; | ||||
|  | ||||
|         # Hardening options | ||||
|         User = "copyparty"; | ||||
|         Group = "copyparty"; | ||||
|         RuntimeDirectory = name; | ||||
|         RuntimeDirectoryMode = "0700"; | ||||
|         StateDirectory = [ name "${name}/data" "${name}/.config" ]; | ||||
|         StateDirectoryMode = "0700"; | ||||
|         WorkingDirectory = home; | ||||
|         TemporaryFileSystem = "/:ro"; | ||||
|         BindReadOnlyPaths = [ | ||||
|           "/nix/store" | ||||
|           "-/etc/resolv.conf" | ||||
|           "-/etc/nsswitch.conf" | ||||
|           "-/etc/hosts" | ||||
|           "-/etc/localtime" | ||||
|         ] ++ (mapAttrsToList (k: v: "-${v.passwordFile}") cfg.accounts); | ||||
|         BindPaths = [ home ] ++ (mapAttrsToList (k: v: v.path) cfg.volumes); | ||||
|         # Would re-mount paths ignored by temporary root | ||||
|         #ProtectSystem = "strict"; | ||||
|         ProtectHome = true; | ||||
|         PrivateTmp = true; | ||||
|         PrivateDevices = true; | ||||
|         ProtectKernelTunables = true; | ||||
|         ProtectControlGroups = true; | ||||
|         RestrictSUIDSGID = true; | ||||
|         PrivateMounts = true; | ||||
|         ProtectKernelModules = true; | ||||
|         ProtectKernelLogs = true; | ||||
|         ProtectHostname = true; | ||||
|         ProtectClock = true; | ||||
|         ProtectProc = "invisible"; | ||||
|         ProcSubset = "pid"; | ||||
|         RestrictNamespaces = true; | ||||
|         RemoveIPC = true; | ||||
|         UMask = "0077"; | ||||
|         LimitNOFILE = cfg.openFilesLimit; | ||||
|         NoNewPrivileges = true; | ||||
|         LockPersonality = true; | ||||
|         RestrictRealtime = true; | ||||
|       }; | ||||
|     }; | ||||
|  | ||||
|     users.groups.copyparty = { }; | ||||
|     users.users.copyparty = { | ||||
|       description = "Service user for copyparty"; | ||||
|       group = "copyparty"; | ||||
|       home = home; | ||||
|       isSystemUser = true; | ||||
|     }; | ||||
|   }; | ||||
| } | ||||
| @@ -14,5 +14,5 @@ name="$SVCNAME" | ||||
| command_background=true | ||||
| pidfile="/var/run/$SVCNAME.pid" | ||||
|  | ||||
| command="/usr/bin/python /usr/local/bin/copyparty-sfx.py" | ||||
| command="/usr/bin/python3 /usr/local/bin/copyparty-sfx.py" | ||||
| command_args="-q -v /mnt::rw" | ||||
|   | ||||
							
								
								
									
										55
									
								
								contrib/package/arch/PKGBUILD
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										55
									
								
								contrib/package/arch/PKGBUILD
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,55 @@ | ||||
| # Maintainer: icxes <dev.null@need.moe> | ||||
| pkgname=copyparty | ||||
| pkgver="1.9.4" | ||||
| pkgrel=1 | ||||
| pkgdesc="Portable file sharing hub" | ||||
| arch=("any") | ||||
| url="https://github.com/9001/${pkgname}" | ||||
| license=('MIT') | ||||
| depends=("python" "lsof" "python-jinja") | ||||
| makedepends=("python-wheel" "python-setuptools" "python-build" "python-installer" "make" "pigz") | ||||
| optdepends=("ffmpeg: thumbnails for videos, images (slower) and audio, music tags" | ||||
|             "python-mutagen: music tags (alternative)"  | ||||
|             "python-pillow: thumbnails for images"  | ||||
|             "python-pyvips: thumbnails for images (higher quality, faster, uses more ram)"  | ||||
|             "libkeyfinder-git: detection of musical keys"  | ||||
|             "qm-vamp-plugins: BPM detection"  | ||||
|             "python-pyopenssl: ftps functionality"  | ||||
|             "python-argon2_cffi: hashed passwords in config"  | ||||
|             "python-impacket-git: smb support (bad idea)" | ||||
| ) | ||||
| source=("https://github.com/9001/${pkgname}/releases/download/v${pkgver}/${pkgname}-${pkgver}.tar.gz") | ||||
| backup=("etc/${pkgname}.d/init" ) | ||||
| sha256sums=("c327ac35deaa5e6cc86b3b1a251cc78517be5578c37c4dff90e98465ced82abc") | ||||
|  | ||||
| build() { | ||||
|     cd "${srcdir}/${pkgname}-${pkgver}" | ||||
|      | ||||
|     pushd copyparty/web | ||||
|     make -j$(nproc) | ||||
|     rm Makefile | ||||
|     popd | ||||
|      | ||||
|     python3 -m build -wn | ||||
| } | ||||
|  | ||||
| package() { | ||||
|     cd "${srcdir}/${pkgname}-${pkgver}" | ||||
|     python3 -m installer -d "$pkgdir" dist/*.whl | ||||
|  | ||||
|     install -dm755 "${pkgdir}/etc/${pkgname}.d" | ||||
|     install -Dm755 "bin/prisonparty.sh" "${pkgdir}/usr/bin/prisonparty" | ||||
|     install -Dm644 "contrib/package/arch/${pkgname}.conf" "${pkgdir}/etc/${pkgname}.d/init" | ||||
|     install -Dm644 "contrib/package/arch/${pkgname}.service" "${pkgdir}/usr/lib/systemd/system/${pkgname}.service" | ||||
|     install -Dm644 "contrib/package/arch/prisonparty.service" "${pkgdir}/usr/lib/systemd/system/prisonparty.service" | ||||
|     install -Dm644 "contrib/package/arch/index.md" "${pkgdir}/var/lib/${pkgname}-jail/README.md" | ||||
|     install -Dm644 "LICENSE" "${pkgdir}/usr/share/licenses/${pkgname}/LICENSE" | ||||
|  | ||||
|     find /etc/${pkgname}.d -iname '*.conf' 2>/dev/null | grep -qE . && return | ||||
|     echo "┏━━━━━━━━━━━━━━━──-" | ||||
|     echo "┃ Configure ${pkgname} by adding .conf files into /etc/${pkgname}.d/" | ||||
|     echo "┃ and maybe copy+edit one of the following to /etc/systemd/system/:" | ||||
|     echo "┣━♦ /usr/lib/systemd/system/${pkgname}.service   (standard)" | ||||
|     echo "┣━♦ /usr/lib/systemd/system/prisonparty.service (chroot)" | ||||
|     echo "┗━━━━━━━━━━━━━━━──-" | ||||
| } | ||||
							
								
								
									
										7
									
								
								contrib/package/arch/copyparty.conf
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										7
									
								
								contrib/package/arch/copyparty.conf
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,7 @@ | ||||
| ## import all *.conf files from the current folder (/etc/copyparty.d) | ||||
| % ./ | ||||
|  | ||||
| # add additional .conf files to this folder; | ||||
| # see example config files for reference: | ||||
| # https://github.com/9001/copyparty/blob/hovudstraum/docs/example.conf | ||||
| # https://github.com/9001/copyparty/tree/hovudstraum/docs/copyparty.d | ||||
							
								
								
									
										32
									
								
								contrib/package/arch/copyparty.service
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										32
									
								
								contrib/package/arch/copyparty.service
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,32 @@ | ||||
| # this will start `/usr/bin/copyparty-sfx.py` | ||||
| # and read config from `/etc/copyparty.d/*.conf` | ||||
| # | ||||
| # you probably want to: | ||||
| #   change "User=cpp" and "/home/cpp/" to another user | ||||
| # | ||||
| # unless you add -q to disable logging, you may want to remove the | ||||
| #   following line to allow buffering (slightly better performance): | ||||
| #   Environment=PYTHONUNBUFFERED=x | ||||
|  | ||||
| [Unit] | ||||
| Description=copyparty file server | ||||
|  | ||||
| [Service] | ||||
| Type=notify | ||||
| SyslogIdentifier=copyparty | ||||
| Environment=PYTHONUNBUFFERED=x | ||||
| WorkingDirectory=/var/lib/copyparty-jail | ||||
| ExecReload=/bin/kill -s USR1 $MAINPID | ||||
|  | ||||
| # user to run as + where the TLS certificate is (if any) | ||||
| User=cpp | ||||
| Environment=XDG_CONFIG_HOME=/home/cpp/.config | ||||
|  | ||||
| # stop systemd-tmpfiles-clean.timer from deleting copyparty while it's running | ||||
| ExecStartPre=+/bin/bash -c 'mkdir -p /run/tmpfiles.d/ && echo "x /tmp/pe-copyparty*" > /run/tmpfiles.d/copyparty.conf' | ||||
|  | ||||
| # run copyparty | ||||
| ExecStart=/usr/bin/python3 /usr/bin/copyparty -c /etc/copyparty.d/init | ||||
|  | ||||
| [Install] | ||||
| WantedBy=multi-user.target | ||||
							
								
								
									
										3
									
								
								contrib/package/arch/index.md
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										3
									
								
								contrib/package/arch/index.md
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,3 @@ | ||||
| this is `/var/lib/copyparty-jail`, the fallback webroot when copyparty has not yet been configured | ||||
|  | ||||
| please add some `*.conf` files to `/etc/copyparty.d/` | ||||
							
								
								
									
										31
									
								
								contrib/package/arch/prisonparty.service
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										31
									
								
								contrib/package/arch/prisonparty.service
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,31 @@ | ||||
| # this will start `/usr/bin/copyparty-sfx.py` | ||||
| # in a chroot, preventing accidental access elsewhere | ||||
| # and read config from `/etc/copyparty.d/*.conf` | ||||
| # | ||||
| # expose additional filesystem locations to copyparty | ||||
| #   by listing them between the last `1000` and `--` | ||||
| # | ||||
| # `1000 1000` = what user to run copyparty as | ||||
| # | ||||
| # unless you add -q to disable logging, you may want to remove the | ||||
| #   following line to allow buffering (slightly better performance): | ||||
| #   Environment=PYTHONUNBUFFERED=x | ||||
|  | ||||
| [Unit] | ||||
| Description=copyparty file server | ||||
|  | ||||
| [Service] | ||||
| SyslogIdentifier=prisonparty | ||||
| Environment=PYTHONUNBUFFERED=x | ||||
| WorkingDirectory=/var/lib/copyparty-jail | ||||
| ExecReload=/bin/kill -s USR1 $MAINPID | ||||
|  | ||||
| # stop systemd-tmpfiles-clean.timer from deleting copyparty while it's running | ||||
| ExecStartPre=+/bin/bash -c 'mkdir -p /run/tmpfiles.d/ && echo "x /tmp/pe-copyparty*" > /run/tmpfiles.d/copyparty.conf' | ||||
|  | ||||
| # run copyparty | ||||
| ExecStart=/bin/bash /usr/bin/prisonparty /var/lib/copyparty-jail 1000 1000 /etc/copyparty.d -- \ | ||||
|   /usr/bin/python3 /usr/bin/copyparty -c /etc/copyparty.d/init | ||||
|  | ||||
| [Install] | ||||
| WantedBy=multi-user.target | ||||
							
								
								
									
										59
									
								
								contrib/package/nix/copyparty/default.nix
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										59
									
								
								contrib/package/nix/copyparty/default.nix
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,59 @@ | ||||
| { lib, stdenv, makeWrapper, fetchurl, utillinux, python, jinja2, impacket, pyftpdlib, pyopenssl, argon2-cffi, pillow, pyvips, ffmpeg, mutagen, | ||||
|  | ||||
| # use argon2id-hashed passwords in config files (sha2 is always available) | ||||
| withHashedPasswords ? true, | ||||
|  | ||||
| # create thumbnails with Pillow; faster than FFmpeg / MediaProcessing | ||||
| withThumbnails ? true, | ||||
|  | ||||
| # create thumbnails with PyVIPS; even faster, uses more memory | ||||
| # -- can be combined with Pillow to support more filetypes | ||||
| withFastThumbnails ? false, | ||||
|  | ||||
| # enable FFmpeg; thumbnails for most filetypes (also video and audio), extract audio metadata, transcode audio to opus | ||||
| # -- possibly dangerous if you allow anonymous uploads, since FFmpeg has a huge attack surface | ||||
| # -- can be combined with Thumbnails and/or FastThumbnails, since FFmpeg is slower than both | ||||
| withMediaProcessing ? true, | ||||
|  | ||||
| # if MediaProcessing is not enabled, you probably want this instead (less accurate, but much safer and faster) | ||||
| withBasicAudioMetadata ? false, | ||||
|  | ||||
| # enable FTPS support in the FTP server | ||||
| withFTPS ? false, | ||||
|  | ||||
| # samba/cifs server; dangerous and buggy, enable if you really need it | ||||
| withSMB ? false, | ||||
|  | ||||
| }: | ||||
|  | ||||
| let | ||||
|   pinData = lib.importJSON ./pin.json; | ||||
|   pyEnv = python.withPackages (ps: | ||||
|     with ps; [ | ||||
|       jinja2 | ||||
|     ] | ||||
|     ++ lib.optional withSMB impacket | ||||
|     ++ lib.optional withFTPS pyopenssl | ||||
|     ++ lib.optional withThumbnails pillow | ||||
|     ++ lib.optional withFastThumbnails pyvips | ||||
|     ++ lib.optional withMediaProcessing ffmpeg | ||||
|     ++ lib.optional withBasicAudioMetadata mutagen | ||||
|     ++ lib.optional withHashedPasswords argon2-cffi | ||||
|     ); | ||||
| in stdenv.mkDerivation { | ||||
|   pname = "copyparty"; | ||||
|   version = pinData.version; | ||||
|   src = fetchurl { | ||||
|     url = pinData.url; | ||||
|     hash = pinData.hash; | ||||
|   }; | ||||
|   buildInputs = [ makeWrapper ]; | ||||
|   dontUnpack = true; | ||||
|   dontBuild = true; | ||||
|   installPhase = '' | ||||
|     install -Dm755 $src $out/share/copyparty-sfx.py | ||||
|     makeWrapper ${pyEnv.interpreter} $out/bin/copyparty \ | ||||
|       --set PATH '${lib.makeBinPath ([ utillinux ] ++ lib.optional withMediaProcessing ffmpeg)}:$PATH' \ | ||||
|       --add-flags "$out/share/copyparty-sfx.py" | ||||
|   ''; | ||||
| } | ||||
							
								
								
									
										5
									
								
								contrib/package/nix/copyparty/pin.json
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										5
									
								
								contrib/package/nix/copyparty/pin.json
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,5 @@ | ||||
| { | ||||
|     "url": "https://github.com/9001/copyparty/releases/download/v1.9.4/copyparty-sfx.py", | ||||
|     "version": "1.9.4", | ||||
|     "hash": "sha256-17dBvr6uUzaQtzunZUX84BLTWCGe1Hucz9b48BKowHs=" | ||||
| } | ||||
							
								
								
									
										77
									
								
								contrib/package/nix/copyparty/update.py
									
									
									
									
									
										Executable file
									
								
							
							
						
						
									
										77
									
								
								contrib/package/nix/copyparty/update.py
									
									
									
									
									
										Executable file
									
								
							| @@ -0,0 +1,77 @@ | ||||
| #!/usr/bin/env python3 | ||||
|  | ||||
| # Update the Nix package pin | ||||
| # | ||||
| # Usage: ./update.sh [PATH] | ||||
| # When the [PATH] is not set, it will fetch the latest release from the repo. | ||||
| # With [PATH] set, it will hash the given file and generate the URL, | ||||
| # base on the version contained within the file | ||||
|  | ||||
| import base64 | ||||
| import json | ||||
| import hashlib | ||||
| import sys | ||||
| import re | ||||
| from pathlib import Path | ||||
|  | ||||
| OUTPUT_FILE = Path("pin.json") | ||||
| TARGET_ASSET = "copyparty-sfx.py" | ||||
| HASH_TYPE = "sha256" | ||||
| LATEST_RELEASE_URL = "https://api.github.com/repos/9001/copyparty/releases/latest" | ||||
| DOWNLOAD_URL = lambda version: f"https://github.com/9001/copyparty/releases/download/v{version}/{TARGET_ASSET}" | ||||
|  | ||||
|  | ||||
| def get_formatted_hash(binary): | ||||
|     hasher = hashlib.new("sha256") | ||||
|     hasher.update(binary) | ||||
|     asset_hash = hasher.digest() | ||||
|     encoded_hash = base64.b64encode(asset_hash).decode("ascii") | ||||
|     return f"{HASH_TYPE}-{encoded_hash}" | ||||
|  | ||||
|  | ||||
| def version_from_sfx(binary): | ||||
|     result = re.search(b'^VER = "(.*)"$', binary, re.MULTILINE) | ||||
|     if result: | ||||
|         return result.groups(1)[0].decode("ascii") | ||||
|  | ||||
|     raise ValueError("version not found in provided file") | ||||
|  | ||||
|  | ||||
| def remote_release_pin(): | ||||
|     import requests | ||||
|  | ||||
|     response = requests.get(LATEST_RELEASE_URL).json() | ||||
|     version = response["tag_name"].lstrip("v") | ||||
|     asset_info = [a for a in response["assets"] if a["name"] == TARGET_ASSET][0] | ||||
|     download_url = asset_info["browser_download_url"] | ||||
|     asset = requests.get(download_url) | ||||
|     formatted_hash = get_formatted_hash(asset.content) | ||||
|  | ||||
|     result = {"url": download_url, "version": version, "hash": formatted_hash} | ||||
|     return result | ||||
|  | ||||
|  | ||||
| def local_release_pin(path): | ||||
|     asset = path.read_bytes() | ||||
|     version = version_from_sfx(asset) | ||||
|     download_url = DOWNLOAD_URL(version) | ||||
|     formatted_hash = get_formatted_hash(asset) | ||||
|  | ||||
|     result = {"url": download_url, "version": version, "hash": formatted_hash} | ||||
|     return result | ||||
|  | ||||
|  | ||||
| def main(): | ||||
|     if len(sys.argv) > 1: | ||||
|         asset_path = Path(sys.argv[1]) | ||||
|         result = local_release_pin(asset_path) | ||||
|     else: | ||||
|         result = remote_release_pin() | ||||
|  | ||||
|     print(result) | ||||
|     json_result = json.dumps(result, indent=4) | ||||
|     OUTPUT_FILE.write_text(json_result) | ||||
|  | ||||
|  | ||||
| if __name__ == "__main__": | ||||
|     main() | ||||
| @@ -11,6 +11,15 @@ save one of these as `.epilogue.html` inside a folder to customize it: | ||||
|  | ||||
|  | ||||
|  | ||||
| ## example browser-js | ||||
| point `--js-browser` to one of these by URL: | ||||
|  | ||||
| * [`minimal-up2k.js`](minimal-up2k.js) is similar to the above `minimal-up2k.html` except it applies globally to all write-only folders | ||||
| * [`up2k-hooks.js`](up2k-hooks.js) lets you specify a ruleset for files to skip uploading | ||||
|   * [`up2k-hook-ytid.js`](up2k-hook-ytid.js) is a more specific example checking youtube-IDs against some API | ||||
|  | ||||
|  | ||||
|  | ||||
| ## example browser-css | ||||
| point `--css-browser` to one of these by URL: | ||||
|  | ||||
|   | ||||
| @@ -1,13 +1,22 @@ | ||||
| <!-- | ||||
|   NOTE: DEPRECATED; please use the javascript version instead: | ||||
|   https://github.com/9001/copyparty/blob/hovudstraum/contrib/plugins/minimal-up2k.js | ||||
|  | ||||
|   ---- | ||||
|  | ||||
|   save this as .epilogue.html inside a write-only folder to declutter the UI,  makes it look like | ||||
|   https://user-images.githubusercontent.com/241032/118311195-dd6ca380-b4ef-11eb-86f3-75a3ff2e1332.png | ||||
|  | ||||
|   only works if you disable the prologue/epilogue sandbox with --no-sb-lg | ||||
|   which should probably be combined with --no-dot-ren to prevent damage | ||||
|   (`no_sb_lg` can also be set per-volume with volflags) | ||||
| --> | ||||
|  | ||||
| <style> | ||||
|  | ||||
|     /* make the up2k ui REALLY minimal by hiding a bunch of stuff: */ | ||||
|  | ||||
|     #ops, #tree, #path, #wrap>h2:last-child,  /* main tabs and navigators (tree/breadcrumbs) */ | ||||
|     #ops, #tree, #path, #wfp,  /* main tabs and navigators (tree/breadcrumbs) */ | ||||
|  | ||||
|     #u2conf tr:first-child>td[rowspan]:not(#u2btn_cw),  /* most of the config options */ | ||||
|  | ||||
|   | ||||
							
								
								
									
										59
									
								
								contrib/plugins/minimal-up2k.js
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										59
									
								
								contrib/plugins/minimal-up2k.js
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,59 @@ | ||||
| /* | ||||
|  | ||||
| makes the up2k ui REALLY minimal by hiding a bunch of stuff | ||||
|  | ||||
| almost the same as minimal-up2k.html except this one...: | ||||
|  | ||||
|  -- applies to every write-only folder when used with --js-browser | ||||
|  | ||||
|  -- only applies if javascript is enabled | ||||
|  | ||||
|  -- doesn't hide the total upload ETA display | ||||
|  | ||||
|  -- looks slightly better | ||||
|  | ||||
| */ | ||||
|  | ||||
| var u2min = ` | ||||
| <style> | ||||
|  | ||||
| #ops, #path, #tree, #files, #wfp, | ||||
| #u2conf td.c+.c, #u2cards, #srch_dz, #srch_zd { | ||||
|   display: none !important; | ||||
| } | ||||
| #u2conf {margin:5em auto 0 auto !important} | ||||
| #u2conf.ww {width:70em} | ||||
| #u2conf.w {width:50em} | ||||
| #u2conf.w .c, | ||||
| #u2conf.w #u2btn_cw {text-align:left} | ||||
| #u2conf.w #u2btn_cw {width:70%} | ||||
| #u2etaw {margin:3em auto} | ||||
| #u2etaw.w { | ||||
|   text-align: center; | ||||
|   margin: -3.5em auto 5em auto; | ||||
| } | ||||
| #u2etaw.w #u2etas {margin-right:-37em} | ||||
| #u2etaw.w #u2etas.o {margin-top:-2.2em} | ||||
| #u2etaw.ww {margin:-1em auto} | ||||
| #u2etaw.ww #u2etas {padding-left:4em} | ||||
| #u2etas { | ||||
|   background: none !important; | ||||
|   border: none !important; | ||||
| } | ||||
| #wrap {margin-left:2em !important} | ||||
| .logue { | ||||
|   border: none !important; | ||||
|   margin: 2em auto !important; | ||||
| } | ||||
| .logue:before {content:'' !important} | ||||
|  | ||||
| </style> | ||||
|  | ||||
| <a href="#" onclick="this.parentNode.innerHTML='';">show advanced options</a> | ||||
| `; | ||||
|  | ||||
| if (!has(perms, 'read')) { | ||||
|   var e2 = mknod('div'); | ||||
|   e2.innerHTML = u2min; | ||||
|   ebi('wrap').insertBefore(e2, QS('#wfp')); | ||||
| } | ||||
							
								
								
									
										208
									
								
								contrib/plugins/rave.js
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										208
									
								
								contrib/plugins/rave.js
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,208 @@ | ||||
| /* untz untz untz untz */ | ||||
|  | ||||
| (function () { | ||||
|  | ||||
|     var can, ctx, W, H, fft, buf, bars, barw, pv, | ||||
|         hue = 0, | ||||
|         ibeat = 0, | ||||
|         beats = [9001], | ||||
|         beats_url = '', | ||||
|         uofs = 0, | ||||
|         ops = ebi('ops'), | ||||
|         raving = false, | ||||
|         recalc = 0, | ||||
|         cdown = 0, | ||||
|         FC = 0.9, | ||||
|         css = `<style> | ||||
|  | ||||
| #fft { | ||||
|     position: fixed; | ||||
|     top: 0; | ||||
|     left: 0; | ||||
|     z-index: -1; | ||||
| } | ||||
| body { | ||||
|     box-shadow: inset 0 0 0 white; | ||||
| } | ||||
| #ops>a, | ||||
| #path>a { | ||||
|     display: inline-block; | ||||
| } | ||||
| /* | ||||
| body.untz { | ||||
|     animation: untz-body 200ms ease-out; | ||||
| } | ||||
| @keyframes untz-body { | ||||
| 	0% {inset 0 0 20em white} | ||||
| 	100% {inset 0 0 0 white} | ||||
| } | ||||
| */ | ||||
| :root, html.a, html.b, html.c, html.d, html.e { | ||||
|     --row-alt: rgba(48,52,78,0.2); | ||||
| } | ||||
| #files td { | ||||
|     background: none; | ||||
| } | ||||
|  | ||||
| </style>`; | ||||
|  | ||||
|     QS('body').appendChild(mknod('div', null, css)); | ||||
|  | ||||
|     function rave_load() { | ||||
|         console.log('rave_load'); | ||||
|         can = mknod('canvas', 'fft'); | ||||
|         QS('body').appendChild(can); | ||||
|         ctx = can.getContext('2d'); | ||||
|  | ||||
|         fft = new AnalyserNode(actx, { | ||||
|             "fftSize": 2048, | ||||
|             "maxDecibels": 0, | ||||
|             "smoothingTimeConstant": 0.7, | ||||
|         }); | ||||
|         ibeat = 0; | ||||
|         beats = [9001]; | ||||
|         buf = new Uint8Array(fft.frequencyBinCount); | ||||
|         bars = buf.length * FC; | ||||
|         afilt.filters.push(fft); | ||||
|         if (!raving) { | ||||
|             raving = true; | ||||
|             raver(); | ||||
|         } | ||||
|         beats_url = mp.au.src.split('?')[0].replace(/(.*\/)(.*)/, '$1.beats/$2.txt'); | ||||
|         console.log("reading beats from", beats_url); | ||||
|         var xhr = new XHR(); | ||||
|         xhr.open('GET', beats_url, true); | ||||
|         xhr.onload = readbeats; | ||||
|         xhr.url = beats_url; | ||||
|         xhr.send(); | ||||
|     } | ||||
|  | ||||
|     function rave_unload() { | ||||
|         qsr('#fft'); | ||||
|         can = null; | ||||
|     } | ||||
|  | ||||
|     function readbeats() { | ||||
|         if (this.url != beats_url) | ||||
|             return console.log('old beats??', this.url, beats_url); | ||||
|  | ||||
|         var sbeats = this.responseText.replace(/\r/g, '').split(/\n/g); | ||||
|         if (sbeats.length < 3) | ||||
|             return; | ||||
|  | ||||
|         beats = []; | ||||
|         for (var a = 0; a < sbeats.length; a++) | ||||
|             beats.push(parseFloat(sbeats[a])); | ||||
|  | ||||
|         var end = beats.slice(-2), | ||||
|             t = end[1], | ||||
|             d = t - end[0]; | ||||
|  | ||||
|         while (d > 0.1 && t < 1200) | ||||
|             beats.push(t += d); | ||||
|     } | ||||
|  | ||||
|     function hrand() { | ||||
|         return Math.random() - 0.5; | ||||
|     } | ||||
|  | ||||
|     function raver() { | ||||
|         if (!can) { | ||||
|             raving = false; | ||||
|             return; | ||||
|         } | ||||
|  | ||||
|         requestAnimationFrame(raver); | ||||
|         if (!mp || !mp.au || mp.au.paused) | ||||
|             return; | ||||
|  | ||||
|         if (--uofs >= 0) { | ||||
|             document.body.style.marginLeft = hrand() * uofs + 'px'; | ||||
|             ebi('tree').style.marginLeft = hrand() * uofs + 'px'; | ||||
|             for (var a of QSA('#ops>a, #path>a, #pctl>a')) | ||||
|                 a.style.transform = 'translate(' + hrand() * uofs * 1 + 'px, ' + hrand() * uofs * 0.7 + 'px) rotate(' + Math.random() * uofs * 0.7 + 'deg)' | ||||
|         } | ||||
|  | ||||
|         if (--recalc < 0) { | ||||
|             recalc = 60; | ||||
|             var tree = ebi('tree'), | ||||
|                 x = tree.style.display == 'none' ? 0 : tree.offsetWidth; | ||||
|  | ||||
|             //W = can.width = window.innerWidth - x; | ||||
|             //H = can.height = window.innerHeight; | ||||
|             //H = ebi('widget').offsetTop; | ||||
|             W = can.width = bars; | ||||
|             H = can.height = 512; | ||||
|             barw = 1; //parseInt(0.8 + W / bars); | ||||
|             can.style.left = x + 'px'; | ||||
|             can.style.width = (window.innerWidth - x) + 'px'; | ||||
|             can.style.height = ebi('widget').offsetTop + 'px'; | ||||
|         } | ||||
|  | ||||
|         //if (--cdown == 1) | ||||
|         //    clmod(ops, 'untz'); | ||||
|  | ||||
|         fft.getByteFrequencyData(buf); | ||||
|  | ||||
|         var imax = 0, vmax = 0; | ||||
|         for (var a = 10; a < 50; a++) | ||||
|             if (vmax < buf[a]) { | ||||
|                 vmax = buf[a]; | ||||
|                 imax = a; | ||||
|             } | ||||
|  | ||||
|         hue = hue * 0.93 + imax * 0.07; | ||||
|  | ||||
|         ctx.fillStyle = 'rgba(0,0,0,0)'; | ||||
|         ctx.fillRect(0, 0, W, H); | ||||
|         ctx.clearRect(0, 0, W, H); | ||||
|         ctx.fillStyle = 'hsla(' + (hue * 2.5) + ',100%,50%,0.7)'; | ||||
|  | ||||
|         var x = 0, mul = (H / 256) * 0.5; | ||||
|         for (var a = 0; a < buf.length * FC; a++) { | ||||
|             var v = buf[a] * mul * (1 + 0.69 * a / buf.length); | ||||
|             ctx.fillRect(x, H - v, barw, v); | ||||
|             x += barw; | ||||
|         } | ||||
|  | ||||
|         var t = mp.au.currentTime + 0.05; | ||||
|  | ||||
|         if (ibeat >= beats.length || beats[ibeat] > t) | ||||
|             return; | ||||
|  | ||||
|         while (ibeat < beats.length && beats[ibeat++] < t) | ||||
|             continue; | ||||
|  | ||||
|         return untz(); | ||||
|  | ||||
|         var cv = 0; | ||||
|         for (var a = 0; a < 128; a++) | ||||
|             cv += buf[a]; | ||||
|  | ||||
|         if (cv - pv > 1000) { | ||||
|             console.log(pv, cv, cv - pv); | ||||
|             if (cdown < 0) { | ||||
|                 clmod(ops, 'untz', 1); | ||||
|                 cdown = 20; | ||||
|             } | ||||
|         } | ||||
|         pv = cv; | ||||
|     } | ||||
|  | ||||
|     function untz() { | ||||
|         console.log('untz'); | ||||
|         uofs = 14; | ||||
|         document.body.animate([ | ||||
|             { boxShadow: 'inset 0 0 1em #f0c' }, | ||||
|             { boxShadow: 'inset 0 0 20em #f0c', offset: 0.2 }, | ||||
|             { boxShadow: 'inset 0 0 0 #f0c' }, | ||||
|         ], { duration: 200, iterations: 1 }); | ||||
|     } | ||||
|  | ||||
|     afilt.plugs.push({ | ||||
|         "en": true, | ||||
|         "load": rave_load, | ||||
|         "unload": rave_unload | ||||
|     }); | ||||
|  | ||||
| })(); | ||||
							
								
								
									
										297
									
								
								contrib/plugins/up2k-hook-ytid.js
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										297
									
								
								contrib/plugins/up2k-hook-ytid.js
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,297 @@ | ||||
| // way more specific example -- | ||||
| // assumes all files dropped into the uploader have a youtube-id somewhere in the filename, | ||||
| // locates the youtube-ids and passes them to an API which returns a list of IDs which should be uploaded | ||||
| // | ||||
| // also tries to find the youtube-id in the embedded metadata | ||||
| // | ||||
| // assumes copyparty is behind nginx as /ytq is a standalone service which must be rproxied in place | ||||
|  | ||||
| function up2k_namefilter(good_files, nil_files, bad_files, hooks) { | ||||
|     var passthru = up2k.uc.fsearch; | ||||
|     if (passthru) | ||||
|         return hooks[0](good_files, nil_files, bad_files, hooks.slice(1)); | ||||
|  | ||||
|     a_up2k_namefilter(good_files, nil_files, bad_files, hooks).then(() => { }); | ||||
| } | ||||
|  | ||||
| // ebi('op_up2k').appendChild(mknod('input','unick')); | ||||
|  | ||||
| function bstrpos(buf, ptn) { | ||||
|     var ofs = 0, | ||||
|         ch0 = ptn[0], | ||||
|         sz = buf.byteLength; | ||||
|  | ||||
|     while (true) { | ||||
|         ofs = buf.indexOf(ch0, ofs); | ||||
|         if (ofs < 0 || ofs >= sz) | ||||
|             return -1; | ||||
|  | ||||
|         for (var a = 1; a < ptn.length; a++) | ||||
|             if (buf[ofs + a] !== ptn[a]) | ||||
|                 break; | ||||
|  | ||||
|         if (a === ptn.length) | ||||
|             return ofs; | ||||
|  | ||||
|         ++ofs; | ||||
|     } | ||||
| } | ||||
|  | ||||
| async function a_up2k_namefilter(good_files, nil_files, bad_files, hooks) { | ||||
|     var t0 = Date.now(), | ||||
|         yt_ids = new Set(), | ||||
|         textdec = new TextDecoder('latin1'), | ||||
|         md_ptn = new TextEncoder().encode('youtube.com/watch?v='), | ||||
|         file_ids = [],  // all IDs found for each good_files | ||||
|         md_only = [],  // `${id} ${fn}` where ID was only found in metadata | ||||
|         mofs = 0, | ||||
|         mnchk = 0, | ||||
|         mfile = '', | ||||
|         myid = localStorage.getItem('ytid_t0'); | ||||
|  | ||||
|     if (!myid) | ||||
|         localStorage.setItem('ytid_t0', myid = Date.now()); | ||||
|  | ||||
|     for (var a = 0; a < good_files.length; a++) { | ||||
|         var [fobj, name] = good_files[a], | ||||
|             cname = name,  // will clobber | ||||
|             sz = fobj.size, | ||||
|             ids = [], | ||||
|             fn_ids = [], | ||||
|             md_ids = [], | ||||
|             id_ok = false, | ||||
|             m; | ||||
|  | ||||
|         // all IDs found in this file | ||||
|         file_ids.push(ids); | ||||
|  | ||||
|         // look for ID in filename; reduce the | ||||
|         // metadata-scan intensity if the id looks safe | ||||
|         m = /[\[(-]([\w-]{11})[\])]?\.(?:mp4|webm|mkv|flv|opus|ogg|mp3|m4a|aac)$/i.exec(name); | ||||
|         id_ok = !!m; | ||||
|  | ||||
|         while (true) { | ||||
|             // fuzzy catch-all; | ||||
|             // some ytdl fork did %(title)-%(id).%(ext) ... | ||||
|             m = /(?:^|[^\w])([\w-]{11})(?:$|[^\w-])/.exec(cname); | ||||
|             if (!m) | ||||
|                 break; | ||||
|  | ||||
|             cname = cname.replace(m[1], ''); | ||||
|             yt_ids.add(m[1]); | ||||
|             fn_ids.unshift(m[1]); | ||||
|         } | ||||
|  | ||||
|         // look for IDs in video metadata, | ||||
|         if (/\.(mp4|webm|mkv|flv|opus|ogg|mp3|m4a|aac)$/i.exec(name)) { | ||||
|             toast.show('inf r', 0, `analyzing file ${a + 1} / ${good_files.length} :\n${name}\n\nhave analysed ${++mnchk} files in ${(Date.now() - t0) / 1000} seconds, ${humantime((good_files.length - (a + 1)) * (((Date.now() - t0) / 1000) / mnchk))} remaining,\n\nbiggest offset so far is ${mofs}, in this file:\n\n${mfile}`); | ||||
|  | ||||
|             // check first and last 128 MiB; | ||||
|             // pWxOroN5WCo.mkv @  6edb98 (6.92M) | ||||
|             // Nf-nN1wF5Xo.mp4 @ 4a98034 (74.6M) | ||||
|             var chunksz = 1024 * 1024 * 2,  // byte | ||||
|                 aspan = id_ok ? 128 : 512;  // MiB | ||||
|  | ||||
|             aspan = parseInt(Math.min(sz / 2, aspan * 1024 * 1024) / chunksz) * chunksz; | ||||
|             if (!aspan) | ||||
|                 aspan = Math.min(sz, chunksz); | ||||
|  | ||||
|             for (var side = 0; side < 2; side++) { | ||||
|                 var ofs = side ? Math.max(0, sz - aspan) : 0, | ||||
|                     nchunks = aspan / chunksz; | ||||
|  | ||||
|                 for (var chunk = 0; chunk < nchunks; chunk++) { | ||||
|                     var bchunk = await fobj.slice(ofs, ofs + chunksz + 16).arrayBuffer(), | ||||
|                         uchunk = new Uint8Array(bchunk, 0, bchunk.byteLength), | ||||
|                         bofs = bstrpos(uchunk, md_ptn), | ||||
|                         absofs = Math.min(ofs + bofs, (sz - ofs) + bofs), | ||||
|                         txt = bofs < 0 ? '' : textdec.decode(uchunk.subarray(bofs)), | ||||
|                         m; | ||||
|  | ||||
|                     //console.log(`side ${ side }, chunk ${ chunk }, ofs ${ ofs }, bchunk ${ bchunk.byteLength }, txt ${ txt.length }`); | ||||
|                     while (true) { | ||||
|                         // mkv/webm have [a-z] immediately after url | ||||
|                         m = /(youtube\.com\/watch\?v=[\w-]{11})/.exec(txt); | ||||
|                         if (!m) | ||||
|                             break; | ||||
|  | ||||
|                         txt = txt.replace(m[1], ''); | ||||
|                         m = m[1].slice(-11); | ||||
|  | ||||
|                         console.log(`found ${m} @${bofs}, ${name} `); | ||||
|                         yt_ids.add(m); | ||||
|                         if (!has(fn_ids, m) && !has(md_ids, m)) { | ||||
|                             md_ids.push(m); | ||||
|                             md_only.push(`${m} ${name}`); | ||||
|                         } | ||||
|                         else | ||||
|                             // id appears several times; make it preferred | ||||
|                             md_ids.unshift(m); | ||||
|  | ||||
|                         // bail after next iteration | ||||
|                         chunk = nchunks - 1; | ||||
|                         side = 9; | ||||
|  | ||||
|                         if (mofs < absofs) { | ||||
|                             mofs = absofs; | ||||
|                             mfile = name; | ||||
|                         } | ||||
|                     } | ||||
|                     ofs += chunksz; | ||||
|                     if (ofs >= sz) | ||||
|                         break; | ||||
|                 } | ||||
|             } | ||||
|         } | ||||
|  | ||||
|         for (var yi of md_ids) | ||||
|             ids.push(yi); | ||||
|  | ||||
|         for (var yi of fn_ids) | ||||
|             if (!has(ids, yi)) | ||||
|                 ids.push(yi); | ||||
|     } | ||||
|  | ||||
|     if (md_only.length) | ||||
|         console.log('recovered the following youtube-IDs by inspecting metadata:\n\n' + md_only.join('\n')); | ||||
|     else if (yt_ids.size) | ||||
|         console.log('did not discover any additional youtube-IDs by inspecting metadata; all the IDs also existed in the filenames'); | ||||
|     else | ||||
|         console.log('failed to find any youtube-IDs at all, sorry'); | ||||
|  | ||||
|     if (false) { | ||||
|         var msg = `finished analysing ${mnchk} files in ${(Date.now() - t0) / 1000} seconds,\n\nbiggest offset was ${mofs} in this file:\n\n${mfile}`, | ||||
|             mfun = function () { toast.ok(0, msg); }; | ||||
|  | ||||
|         mfun(); | ||||
|         setTimeout(mfun, 200); | ||||
|  | ||||
|         return hooks[0]([], [], [], hooks.slice(1)); | ||||
|     } | ||||
|  | ||||
|     var el = ebi('unick'), unick = el ? el.value : ''; | ||||
|     if (unick) { | ||||
|         console.log(`sending uploader nickname [${unick}]`); | ||||
|         fetch(document.location, { | ||||
|             method: 'POST', | ||||
|             headers: { 'Content-Type': 'application/x-www-form-urlencoded;charset=UTF-8' }, | ||||
|             body: 'msg=' + encodeURIComponent(unick) | ||||
|         }); | ||||
|     } | ||||
|  | ||||
|     toast.inf(5, `running query for ${yt_ids.size} youtube-IDs...`); | ||||
|  | ||||
|     var xhr = new XHR(); | ||||
|     xhr.open('POST', '/ytq', true); | ||||
|     xhr.setRequestHeader('Content-Type', 'text/plain'); | ||||
|     xhr.onload = xhr.onerror = function () { | ||||
|         if (this.status != 200) | ||||
|             return toast.err(0, `sorry, database query failed ;_;\n\nplease let us know so we can look at it, thx!!\n\nerror ${this.status}: ${(this.response && this.response.err) || this.responseText}`); | ||||
|  | ||||
|         process_id_list(this.responseText); | ||||
|     }; | ||||
|     xhr.send(Array.from(yt_ids).join('\n')); | ||||
|  | ||||
|     function process_id_list(txt) { | ||||
|         var wanted_ids = new Set(txt.trim().split('\n')), | ||||
|             name_id = {}, | ||||
|             wanted_names = new Set(),  // basenames with a wanted ID -- not including relpath | ||||
|             wanted_names_scoped = {},  // basenames with a wanted ID -> list of dirs to search under | ||||
|             wanted_files = new Set();  // filedrops | ||||
|  | ||||
|         for (var a = 0; a < good_files.length; a++) { | ||||
|             var name = good_files[a][1]; | ||||
|             for (var b = 0; b < file_ids[a].length; b++) | ||||
|                 if (wanted_ids.has(file_ids[a][b])) { | ||||
|                     // let the next stage handle this to prevent dupes | ||||
|                     //wanted_files.add(good_files[a]); | ||||
|  | ||||
|                     var m = /(.*)\.(mp4|webm|mkv|flv|opus|ogg|mp3|m4a|aac)$/i.exec(name); | ||||
|                     if (!m) | ||||
|                         continue; | ||||
|  | ||||
|                     var [rd, fn] = vsplit(m[1]); | ||||
|  | ||||
|                     if (fn in wanted_names_scoped) | ||||
|                         wanted_names_scoped[fn].push(rd); | ||||
|                     else | ||||
|                         wanted_names_scoped[fn] = [rd]; | ||||
|  | ||||
|                     wanted_names.add(fn); | ||||
|                     name_id[m[1]] = file_ids[a][b]; | ||||
|  | ||||
|                     break; | ||||
|                 } | ||||
|         } | ||||
|  | ||||
|         // add all files with the same basename as each explicitly wanted file | ||||
|         // (infojson/chatlog/etc when ID was discovered from metadata) | ||||
|         for (var a = 0; a < good_files.length; a++) { | ||||
|             var [rd, name] = vsplit(good_files[a][1]); | ||||
|             for (var b = 0; b < 3; b++) { | ||||
|                 name = name.replace(/\.[^\.]+$/, ''); | ||||
|                 if (!wanted_names.has(name)) | ||||
|                     continue; | ||||
|  | ||||
|                 var vid_fp = false; | ||||
|                 for (var c of wanted_names_scoped[name]) | ||||
|                     if (rd.startsWith(c)) | ||||
|                         vid_fp = c + name; | ||||
|  | ||||
|                 if (!vid_fp) | ||||
|                     continue; | ||||
|  | ||||
|                 var subdir = name_id[vid_fp]; | ||||
|                 subdir = `v${subdir.slice(0, 1)}/${subdir}-${myid}`; | ||||
|                 var newpath = subdir + '/' + good_files[a][1].split(/\//g).pop(); | ||||
|  | ||||
|                 // check if this file is a dupe | ||||
|                 for (var c of good_files) | ||||
|                     if (c[1] == newpath) | ||||
|                         newpath = null; | ||||
|  | ||||
|                 if (!newpath) | ||||
|                     break; | ||||
|  | ||||
|                 good_files[a][1] = newpath; | ||||
|                 wanted_files.add(good_files[a]); | ||||
|                 break; | ||||
|             } | ||||
|         } | ||||
|  | ||||
|         function upload_filtered() { | ||||
|             if (!wanted_files.size) | ||||
|                 return modal.alert('Good news -- turns out we already have all those.\n\nBut thank you for checking in!'); | ||||
|  | ||||
|             hooks[0](Array.from(wanted_files), nil_files, bad_files, hooks.slice(1)); | ||||
|         } | ||||
|  | ||||
|         function upload_all() { | ||||
|             hooks[0](good_files, nil_files, bad_files, hooks.slice(1)); | ||||
|         } | ||||
|  | ||||
|         var n_skip = good_files.length - wanted_files.size, | ||||
|             msg = `you added ${good_files.length} files; ${good_files.length == n_skip ? 'all' : n_skip} of them were skipped --\neither because we already have them,\nor because there is no youtube-ID in your filenames.\n\n<code>OK</code> / <code>Enter</code> = continue uploading just the ${wanted_files.size} files we definitely need\n\n<code>Cancel</code> / <code>ESC</code> = override the filter; upload ALL the files you added`; | ||||
|  | ||||
|         if (!n_skip) | ||||
|             upload_filtered(); | ||||
|         else | ||||
|             modal.confirm(msg, upload_filtered, upload_all); | ||||
|     }; | ||||
| } | ||||
|  | ||||
| up2k_hooks.push(function () { | ||||
|     up2k.gotallfiles.unshift(up2k_namefilter); | ||||
| }); | ||||
|  | ||||
| // persist/restore nickname field if present | ||||
| setInterval(function () { | ||||
|     var o = ebi('unick'); | ||||
|     if (!o || document.activeElement == o) | ||||
|         return; | ||||
|  | ||||
|     o.oninput = function () { | ||||
|         localStorage.setItem('unick', o.value); | ||||
|     }; | ||||
|     o.value = localStorage.getItem('unick') || ''; | ||||
| }, 1000); | ||||
							
								
								
									
										45
									
								
								contrib/plugins/up2k-hooks.js
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										45
									
								
								contrib/plugins/up2k-hooks.js
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,45 @@ | ||||
| // hooks into up2k | ||||
|  | ||||
| function up2k_namefilter(good_files, nil_files, bad_files, hooks) { | ||||
|     // is called when stuff is dropped into the browser, | ||||
|     // after iterating through the directory tree and discovering all files, | ||||
|     // before the upload confirmation dialogue is shown | ||||
|  | ||||
|     // good_files will successfully upload | ||||
|     // nil_files are empty files and will show an alert in the final hook | ||||
|     // bad_files are unreadable and cannot be uploaded | ||||
|     var file_lists = [good_files, nil_files, bad_files]; | ||||
|  | ||||
|     // build a list of filenames | ||||
|     var filenames = []; | ||||
|     for (var lst of file_lists) | ||||
|         for (var ent of lst) | ||||
|             filenames.push(ent[1]); | ||||
|  | ||||
|     toast.inf(5, "running database query..."); | ||||
|  | ||||
|     // simulate delay while passing the list to some api for checking | ||||
|     setTimeout(function () { | ||||
|  | ||||
|         // only keep webm files as an example | ||||
|         var new_lists = []; | ||||
|         for (var lst of file_lists) { | ||||
|             var keep = []; | ||||
|             new_lists.push(keep); | ||||
|  | ||||
|             for (var ent of lst) | ||||
|                 if (/\.webm$/.test(ent[1])) | ||||
|                     keep.push(ent); | ||||
|         } | ||||
|  | ||||
|         // finally, call the next hook in the chain | ||||
|         [good_files, nil_files, bad_files] = new_lists; | ||||
|         hooks[0](good_files, nil_files, bad_files, hooks.slice(1)); | ||||
|  | ||||
|     }, 1000); | ||||
| } | ||||
|  | ||||
| // register | ||||
| up2k_hooks.push(function () { | ||||
|     up2k.gotallfiles.unshift(up2k_namefilter); | ||||
| }); | ||||
							
								
								
									
										26
									
								
								contrib/systemd/cfssl.service
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										26
									
								
								contrib/systemd/cfssl.service
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,26 @@ | ||||
| # NOTE: this is now a built-in feature in copyparty | ||||
| # but you may still want this if you have specific needs | ||||
| # | ||||
| # systemd service which generates a new TLS certificate on each boot, | ||||
| # that way the one-year expiry time won't cause any issues -- | ||||
| # just have everyone trust the ca.pem once every 10 years | ||||
| # | ||||
| # assumptions/placeholder values: | ||||
| #  * this script and copyparty runs as user "cpp" | ||||
| #  * copyparty repo is at ~cpp/dev/copyparty | ||||
| #  * CA is named partylan | ||||
| #  * server IPs = 10.1.2.3 and 192.168.123.1 | ||||
| #  * server hostname = party.lan | ||||
|  | ||||
| [Unit] | ||||
| Description=copyparty certificate generator | ||||
| Before=copyparty.service | ||||
|  | ||||
| [Service] | ||||
| User=cpp | ||||
| Type=oneshot | ||||
| SyslogIdentifier=cpp-cert | ||||
| ExecStart=/bin/bash -c 'cd ~/dev/copyparty/contrib && ./cfssl.sh partylan 10.1.2.3,192.168.123.1,party.lan y' | ||||
|  | ||||
| [Install] | ||||
| WantedBy=multi-user.target | ||||
| @@ -2,18 +2,27 @@ | ||||
| # and share '/mnt' with anonymous read+write | ||||
| # | ||||
| # installation: | ||||
| #   cp -pv copyparty.service /etc/systemd/system && systemctl enable --now copyparty | ||||
| #   restorecon -vr /etc/systemd/system/copyparty.service | ||||
| #   firewall-cmd --permanent --add-port={80,443,3923}/tcp | ||||
| #   wget https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.py -O /usr/local/bin/copyparty-sfx.py | ||||
| #   cp -pv copyparty.service /etc/systemd/system/ | ||||
| #   restorecon -vr /etc/systemd/system/copyparty.service  # on fedora/rhel | ||||
| #   firewall-cmd --permanent --add-port={80,443,3923}/tcp  # --zone=libvirt | ||||
| #   firewall-cmd --reload | ||||
| #   systemctl daemon-reload && systemctl enable --now copyparty | ||||
| # | ||||
| # if it fails to start, first check this: systemctl status copyparty | ||||
| # then try starting it while viewing logs: journalctl -fan 100 | ||||
| # | ||||
| # you may want to: | ||||
| #   change "User=cpp" and "/home/cpp/" to another user | ||||
| #   remove the nft lines to only listen on port 3923 | ||||
| # and in the ExecStart= line: | ||||
| #   change '/usr/bin/python3' to another interpreter | ||||
| #   change '/mnt::rw' to another location or permission-set | ||||
| #   remove '-p 80,443,3923' to only listen on port 3923 | ||||
| #   add '-q' to disable logging on busy servers | ||||
| #   add '-i 127.0.0.1' to only allow local connections | ||||
| #   add '-e2dsa' to enable filesystem scanning + indexing | ||||
| #   add '-e2ts' to enable metadata indexing | ||||
| #   remove '--ansi' to disable colored logs | ||||
| # | ||||
| # with `Type=notify`, copyparty will signal systemd when it is ready to | ||||
| #   accept connections; correctly delaying units depending on copyparty. | ||||
| @@ -21,8 +30,8 @@ | ||||
| #   python disabling line-buffering, so messages are out-of-order: | ||||
| #   https://user-images.githubusercontent.com/241032/126040249-cb535cc7-c599-4931-a796-a5d9af691bad.png | ||||
| # | ||||
| # if you remove -q to enable logging, you may also want to remove the | ||||
| #   following line to enable buffering (slightly better performance): | ||||
| # unless you add -q to disable logging, you may want to remove the | ||||
| #   following line to allow buffering (slightly better performance): | ||||
| #   Environment=PYTHONUNBUFFERED=x | ||||
| # | ||||
| # keep ExecStartPre before ExecStart, at least on rhel8 | ||||
| @@ -35,8 +44,23 @@ Type=notify | ||||
| SyslogIdentifier=copyparty | ||||
| Environment=PYTHONUNBUFFERED=x | ||||
| ExecReload=/bin/kill -s USR1 $MAINPID | ||||
| ExecStartPre=/bin/bash -c 'mkdir -p /run/tmpfiles.d/ && echo "x /tmp/pe-copyparty*" > /run/tmpfiles.d/copyparty.conf' | ||||
| ExecStart=/usr/bin/python3 /usr/local/bin/copyparty-sfx.py -q -p 80,443,3923 -e2d -v /mnt::rw | ||||
|  | ||||
| # user to run as + where the TLS certificate is (if any) | ||||
| User=cpp | ||||
| Environment=XDG_CONFIG_HOME=/home/cpp/.config | ||||
|  | ||||
| # OPTIONAL: setup forwarding from ports 80 and 443 to port 3923 | ||||
| ExecStartPre=+/bin/bash -c 'nft -n -a list table nat | awk "/ to :3923 /{print\$NF}" | xargs -rL1 nft delete rule nat prerouting handle; true' | ||||
| ExecStartPre=+nft add table ip nat | ||||
| ExecStartPre=+nft -- add chain ip nat prerouting { type nat hook prerouting priority -100 \; } | ||||
| ExecStartPre=+nft add rule ip nat prerouting tcp dport 80 redirect to :3923 | ||||
| ExecStartPre=+nft add rule ip nat prerouting tcp dport 443 redirect to :3923 | ||||
|  | ||||
| # stop systemd-tmpfiles-clean.timer from deleting copyparty while it's running | ||||
| ExecStartPre=+/bin/bash -c 'mkdir -p /run/tmpfiles.d/ && echo "x /tmp/pe-copyparty*" > /run/tmpfiles.d/copyparty.conf' | ||||
|  | ||||
| # copyparty settings | ||||
| ExecStart=/usr/bin/python3 /usr/local/bin/copyparty-sfx.py --ansi -e2d -v /mnt::rw | ||||
|  | ||||
| [Install] | ||||
| WantedBy=multi-user.target | ||||
|   | ||||
| @@ -6,12 +6,17 @@ | ||||
| #   1) put copyparty-sfx.py and prisonparty.sh in /usr/local/bin | ||||
| #   2) cp -pv prisonparty.service /etc/systemd/system && systemctl enable --now prisonparty | ||||
| # | ||||
| # expose additional filesystem locations to copyparty | ||||
| #   by listing them between the last `1000` and `--` | ||||
| # | ||||
| # `1000 1000` = what user to run copyparty as | ||||
| # | ||||
| # you may want to: | ||||
| #   change '/mnt::rw' to another location or permission-set | ||||
| #    (remember to change the '/mnt' chroot arg too) | ||||
| # | ||||
| # enable line-buffering for realtime logging (slight performance cost): | ||||
| #   inside the [Service] block, add the following line: | ||||
| # unless you add -q to disable logging, you may want to remove the | ||||
| #   following line to allow buffering (slightly better performance): | ||||
| #   Environment=PYTHONUNBUFFERED=x | ||||
|  | ||||
| [Unit] | ||||
| @@ -19,7 +24,14 @@ Description=copyparty file server | ||||
|  | ||||
| [Service] | ||||
| SyslogIdentifier=prisonparty | ||||
| WorkingDirectory=/usr/local/bin | ||||
| Environment=PYTHONUNBUFFERED=x | ||||
| WorkingDirectory=/var/lib/copyparty-jail | ||||
| ExecReload=/bin/kill -s USR1 $MAINPID | ||||
|  | ||||
| # stop systemd-tmpfiles-clean.timer from deleting copyparty while it's running | ||||
| ExecStartPre=+/bin/bash -c 'mkdir -p /run/tmpfiles.d/ && echo "x /tmp/pe-copyparty*" > /run/tmpfiles.d/copyparty.conf' | ||||
|  | ||||
| # run copyparty | ||||
| ExecStart=/bin/bash /usr/local/bin/prisonparty.sh /var/lib/copyparty-jail 1000 1000 /mnt -- \ | ||||
|   /usr/bin/python3 /usr/local/bin/copyparty-sfx.py -q -v /mnt::rw | ||||
|  | ||||
|   | ||||
							
								
								
									
										45
									
								
								contrib/webdav-cfg.bat
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										45
									
								
								contrib/webdav-cfg.bat
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,45 @@ | ||||
| @echo off | ||||
| rem removes the 47.6 MiB filesize limit when downloading from webdav | ||||
| rem + optionally allows/enables password-auth over plaintext http | ||||
| rem + optionally helps disable wpad, removing the 10sec latency | ||||
|  | ||||
| net session >nul 2>&1 | ||||
| if %errorlevel% neq 0 ( | ||||
|     echo sorry, you must run this as administrator | ||||
|     pause | ||||
|     exit /b | ||||
| ) | ||||
|  | ||||
| reg add HKEY_LOCAL_MACHINE\SYSTEM\CurrentControlSet\services\WebClient\Parameters /v FileSizeLimitInBytes /t REG_DWORD /d 0xffffffff /f | ||||
| reg add HKEY_LOCAL_MACHINE\SYSTEM\CurrentControlSet\Services\WebClient\Parameters /v FsCtlRequestTimeoutInSec /t REG_DWORD /d 0xffffffff /f | ||||
|  | ||||
| echo( | ||||
| echo OK; | ||||
| echo allow webdav basic-auth over plaintext http? | ||||
| echo Y: login works, but the password will be visible in wireshark etc | ||||
| echo N: login will NOT work unless you use https and valid certificates | ||||
| choice | ||||
| if %errorlevel% equ 1 ( | ||||
|     reg add HKEY_LOCAL_MACHINE\SYSTEM\CurrentControlSet\services\WebClient\Parameters /v BasicAuthLevel /t REG_DWORD /d 0x2 /f | ||||
|     rem default is 1 (require tls) | ||||
| ) | ||||
|  | ||||
| echo( | ||||
| echo OK; | ||||
| echo do you want to disable wpad? | ||||
| echo can give a HUGE speed boost depending on network settings | ||||
| choice | ||||
| if %errorlevel% equ 1 ( | ||||
|     echo( | ||||
|     echo i'm about to open the [Connections] tab in [Internet Properties] for you; | ||||
|     echo please click [LAN settings] and disable [Automatically detect settings] | ||||
|     echo( | ||||
|     pause | ||||
|     control inetcpl.cpl,,4 | ||||
| ) | ||||
|  | ||||
| net stop webclient | ||||
| net start webclient | ||||
| echo( | ||||
| echo OK; all done | ||||
| pause | ||||
| @@ -1,80 +1,62 @@ | ||||
| # coding: utf-8 | ||||
| from __future__ import print_function, unicode_literals | ||||
|  | ||||
| import platform | ||||
| import time | ||||
| import sys | ||||
| import os | ||||
| import platform | ||||
| import sys | ||||
| import time | ||||
|  | ||||
| PY2 = sys.version_info[0] == 2 | ||||
| if PY2: | ||||
|     sys.dont_write_bytecode = True | ||||
|     unicode = unicode | ||||
| # fmt: off | ||||
| _:tuple[int,int]=(0,0)  # _____________________________________________________________________  hey there! if you are reading this, your python is too old to run copyparty without some help. Please use https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.py or the pypi package instead, or see https://github.com/9001/copyparty/blob/hovudstraum/docs/devnotes.md#building if you want to build it yourself :-)  ************************************************************************************************************************************************ | ||||
| # fmt: on | ||||
|  | ||||
| try: | ||||
|     from typing import TYPE_CHECKING | ||||
| except: | ||||
|     TYPE_CHECKING = False | ||||
|  | ||||
| if True: | ||||
|     from typing import Any, Callable | ||||
|  | ||||
| PY2 = sys.version_info < (3,) | ||||
| if not PY2: | ||||
|     unicode: Callable[[Any], str] = str | ||||
| else: | ||||
|     unicode = str | ||||
|     sys.dont_write_bytecode = True | ||||
|     unicode = unicode  # noqa: F821  # pylint: disable=undefined-variable,self-assigning-variable | ||||
|  | ||||
| WINDOWS = False | ||||
| if platform.system() == "Windows": | ||||
|     WINDOWS = [int(x) for x in platform.version().split(".")] | ||||
| WINDOWS: Any = ( | ||||
|     [int(x) for x in platform.version().split(".")] | ||||
|     if platform.system() == "Windows" | ||||
|     else False | ||||
| ) | ||||
|  | ||||
| VT100 = not WINDOWS or WINDOWS >= [10, 0, 14393] | ||||
| VT100 = "--ansi" in sys.argv or ( | ||||
|     os.environ.get("NO_COLOR", "").lower() in ("", "0", "false") | ||||
|     and sys.stdout.isatty() | ||||
|     and "--no-ansi" not in sys.argv | ||||
|     and (not WINDOWS or WINDOWS >= [10, 0, 14393]) | ||||
| ) | ||||
| # introduced in anniversary update | ||||
|  | ||||
| ANYWIN = WINDOWS or sys.platform in ["msys"] | ||||
| ANYWIN = WINDOWS or sys.platform in ["msys", "cygwin"] | ||||
|  | ||||
| MACOS = platform.system() == "Darwin" | ||||
|  | ||||
| EXE = bool(getattr(sys, "frozen", False)) | ||||
|  | ||||
| def get_unixdir(): | ||||
|     paths = [ | ||||
|         (os.environ.get, "XDG_CONFIG_HOME"), | ||||
|         (os.path.expanduser, "~/.config"), | ||||
|         (os.environ.get, "TMPDIR"), | ||||
|         (os.environ.get, "TEMP"), | ||||
|         (os.environ.get, "TMP"), | ||||
|         (unicode, "/tmp"), | ||||
|     ] | ||||
|     for chk in [os.listdir, os.mkdir]: | ||||
|         for pf, pa in paths: | ||||
|             try: | ||||
|                 p = pf(pa) | ||||
|                 # print(chk.__name__, p, pa) | ||||
|                 if not p or p.startswith("~"): | ||||
|                     continue | ||||
|  | ||||
|                 p = os.path.normpath(p) | ||||
|                 chk(p) | ||||
|                 p = os.path.join(p, "copyparty") | ||||
|                 if not os.path.isdir(p): | ||||
|                     os.mkdir(p) | ||||
|  | ||||
|                 return p | ||||
|             except: | ||||
|                 pass | ||||
|  | ||||
|     raise Exception("could not find a writable path for config") | ||||
| try: | ||||
|     CORES = len(os.sched_getaffinity(0)) | ||||
| except: | ||||
|     CORES = (os.cpu_count() if hasattr(os, "cpu_count") else 0) or 2 | ||||
|  | ||||
|  | ||||
| class EnvParams(object): | ||||
|     def __init__(self): | ||||
|     def __init__(self) -> None: | ||||
|         self.t0 = time.time() | ||||
|         self.mod = os.path.dirname(os.path.realpath(__file__)) | ||||
|         if self.mod.endswith("__init__"): | ||||
|             self.mod = os.path.dirname(self.mod) | ||||
|  | ||||
|         if sys.platform == "win32": | ||||
|             self.cfg = os.path.normpath(os.environ["APPDATA"] + "/copyparty") | ||||
|         elif sys.platform == "darwin": | ||||
|             self.cfg = os.path.expanduser("~/Library/Preferences/copyparty") | ||||
|         else: | ||||
|             self.cfg = get_unixdir() | ||||
|  | ||||
|         self.cfg = self.cfg.replace("\\", "/") | ||||
|         try: | ||||
|             os.makedirs(self.cfg) | ||||
|         except: | ||||
|             if not os.path.isdir(self.cfg): | ||||
|                 raise | ||||
|         self.mod = "" | ||||
|         self.cfg = "" | ||||
|         self.ox = getattr(sys, "oxidized", None) | ||||
|  | ||||
|  | ||||
| E = EnvParams() | ||||
|   | ||||
							
								
								
									
										1240
									
								
								copyparty/__main__.py
									
									
									
									
									
										
										
										Normal file → Executable file
									
								
							
							
						
						
									
										1240
									
								
								copyparty/__main__.py
									
									
									
									
									
										
										
										Normal file → Executable file
									
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							| @@ -1,8 +1,8 @@ | ||||
| # coding: utf-8 | ||||
|  | ||||
| VERSION = (1, 2, 8) | ||||
| CODENAME = "ftp btw" | ||||
| BUILD_DT = (2022, 4, 30) | ||||
| VERSION = (1, 9, 5) | ||||
| CODENAME = "prometheable" | ||||
| BUILD_DT = (2023, 9, 9) | ||||
|  | ||||
| S_VERSION = ".".join(map(str, VERSION)) | ||||
| S_BUILD_DT = "{0:04d}-{1:02d}-{2:02d}".format(*BUILD_DT) | ||||
|   | ||||
							
								
								
									
										1716
									
								
								copyparty/authsrv.py
									
									
									
									
									
								
							
							
						
						
									
										1716
									
								
								copyparty/authsrv.py
									
									
									
									
									
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							| @@ -2,56 +2,70 @@ | ||||
| from __future__ import print_function, unicode_literals | ||||
|  | ||||
| import os | ||||
| from ..util import fsenc, fsdec, SYMTIME | ||||
| from . import path | ||||
|  | ||||
| from ..util import SYMTIME, fsdec, fsenc | ||||
| from . import path as path | ||||
|  | ||||
| if True:  # pylint: disable=using-constant-test | ||||
|     from typing import Any, Optional | ||||
|  | ||||
| _ = (path,) | ||||
| __all__ = ["path"] | ||||
|  | ||||
| # grep -hRiE '(^|[^a-zA-Z_\.-])os\.' . | gsed -r 's/ /\n/g;s/\(/(\n/g' | grep -hRiE '(^|[^a-zA-Z_\.-])os\.' | sort | uniq -c | ||||
| # printf 'os\.(%s)' "$(grep ^def bos/__init__.py | gsed -r 's/^def //;s/\(.*//' | tr '\n' '|' | gsed -r 's/.$//')" | ||||
|  | ||||
|  | ||||
| def chmod(p, mode): | ||||
| def chmod(p: str, mode: int) -> None: | ||||
|     return os.chmod(fsenc(p), mode) | ||||
|  | ||||
|  | ||||
| def listdir(p="."): | ||||
| def listdir(p: str = ".") -> list[str]: | ||||
|     return [fsdec(x) for x in os.listdir(fsenc(p))] | ||||
|  | ||||
|  | ||||
| def makedirs(name, mode=0o755, exist_ok=True): | ||||
| def makedirs(name: str, mode: int = 0o755, exist_ok: bool = True) -> bool: | ||||
|     bname = fsenc(name) | ||||
|     try: | ||||
|         os.makedirs(bname, mode) | ||||
|         return True | ||||
|     except: | ||||
|         if not exist_ok or not os.path.isdir(bname): | ||||
|             raise | ||||
|         return False | ||||
|  | ||||
|  | ||||
| def mkdir(p, mode=0o755): | ||||
| def mkdir(p: str, mode: int = 0o755) -> None: | ||||
|     return os.mkdir(fsenc(p), mode) | ||||
|  | ||||
|  | ||||
| def rename(src, dst): | ||||
| def open(p: str, *a, **ka) -> int: | ||||
|     return os.open(fsenc(p), *a, **ka) | ||||
|  | ||||
|  | ||||
| def rename(src: str, dst: str) -> None: | ||||
|     return os.rename(fsenc(src), fsenc(dst)) | ||||
|  | ||||
|  | ||||
| def replace(src, dst): | ||||
| def replace(src: str, dst: str) -> None: | ||||
|     return os.replace(fsenc(src), fsenc(dst)) | ||||
|  | ||||
|  | ||||
| def rmdir(p): | ||||
| def rmdir(p: str) -> None: | ||||
|     return os.rmdir(fsenc(p)) | ||||
|  | ||||
|  | ||||
| def stat(p): | ||||
| def stat(p: str) -> os.stat_result: | ||||
|     return os.stat(fsenc(p)) | ||||
|  | ||||
|  | ||||
| def unlink(p): | ||||
| def unlink(p: str) -> None: | ||||
|     return os.unlink(fsenc(p)) | ||||
|  | ||||
|  | ||||
| def utime(p, times=None, follow_symlinks=True): | ||||
| def utime( | ||||
|     p: str, times: Optional[tuple[float, float]] = None, follow_symlinks: bool = True | ||||
| ) -> None: | ||||
|     if SYMTIME: | ||||
|         return os.utime(fsenc(p), times, follow_symlinks=follow_symlinks) | ||||
|     else: | ||||
| @@ -60,7 +74,7 @@ def utime(p, times=None, follow_symlinks=True): | ||||
|  | ||||
| if hasattr(os, "lstat"): | ||||
|  | ||||
|     def lstat(p): | ||||
|     def lstat(p: str) -> os.stat_result: | ||||
|         return os.lstat(fsenc(p)) | ||||
|  | ||||
| else: | ||||
|   | ||||
| @@ -2,43 +2,44 @@ | ||||
| from __future__ import print_function, unicode_literals | ||||
|  | ||||
| import os | ||||
| from ..util import fsenc, fsdec, SYMTIME | ||||
|  | ||||
| from ..util import SYMTIME, fsdec, fsenc | ||||
|  | ||||
|  | ||||
| def abspath(p): | ||||
| def abspath(p: str) -> str: | ||||
|     return fsdec(os.path.abspath(fsenc(p))) | ||||
|  | ||||
|  | ||||
| def exists(p): | ||||
| def exists(p: str) -> bool: | ||||
|     return os.path.exists(fsenc(p)) | ||||
|  | ||||
|  | ||||
| def getmtime(p, follow_symlinks=True): | ||||
| def getmtime(p: str, follow_symlinks: bool = True) -> float: | ||||
|     if not follow_symlinks and SYMTIME: | ||||
|         return os.lstat(fsenc(p)).st_mtime | ||||
|     else: | ||||
|         return os.path.getmtime(fsenc(p)) | ||||
|  | ||||
|  | ||||
| def getsize(p): | ||||
| def getsize(p: str) -> int: | ||||
|     return os.path.getsize(fsenc(p)) | ||||
|  | ||||
|  | ||||
| def isfile(p): | ||||
| def isfile(p: str) -> bool: | ||||
|     return os.path.isfile(fsenc(p)) | ||||
|  | ||||
|  | ||||
| def isdir(p): | ||||
| def isdir(p: str) -> bool: | ||||
|     return os.path.isdir(fsenc(p)) | ||||
|  | ||||
|  | ||||
| def islink(p): | ||||
| def islink(p: str) -> bool: | ||||
|     return os.path.islink(fsenc(p)) | ||||
|  | ||||
|  | ||||
| def lexists(p): | ||||
| def lexists(p: str) -> bool: | ||||
|     return os.path.lexists(fsenc(p)) | ||||
|  | ||||
|  | ||||
| def realpath(p): | ||||
| def realpath(p: str) -> str: | ||||
|     return fsdec(os.path.realpath(fsenc(p))) | ||||
|   | ||||
| @@ -1,52 +1,64 @@ | ||||
| # coding: utf-8 | ||||
| from __future__ import print_function, unicode_literals | ||||
|  | ||||
| import time | ||||
| import threading | ||||
| import time | ||||
| import traceback | ||||
|  | ||||
| from .broker_util import try_exec | ||||
| import queue | ||||
|  | ||||
| from .__init__ import CORES, TYPE_CHECKING | ||||
| from .broker_mpw import MpWorker | ||||
| from .util import mp | ||||
| from .broker_util import ExceptionalQueue, try_exec | ||||
| from .util import Daemon, mp | ||||
|  | ||||
| if TYPE_CHECKING: | ||||
|     from .svchub import SvcHub | ||||
|  | ||||
| if True:  # pylint: disable=using-constant-test | ||||
|     from typing import Any | ||||
|  | ||||
|  | ||||
| class MProcess(mp.Process): | ||||
|     def __init__( | ||||
|         self, | ||||
|         q_pend: queue.Queue[tuple[int, str, list[Any]]], | ||||
|         q_yield: queue.Queue[tuple[int, str, list[Any]]], | ||||
|         target: Any, | ||||
|         args: Any, | ||||
|     ) -> None: | ||||
|         super(MProcess, self).__init__(target=target, args=args) | ||||
|         self.q_pend = q_pend | ||||
|         self.q_yield = q_yield | ||||
|  | ||||
|  | ||||
| class BrokerMp(object): | ||||
|     """external api; manages MpWorkers""" | ||||
|  | ||||
|     def __init__(self, hub): | ||||
|     def __init__(self, hub: "SvcHub") -> None: | ||||
|         self.hub = hub | ||||
|         self.log = hub.log | ||||
|         self.args = hub.args | ||||
|  | ||||
|         self.procs = [] | ||||
|         self.retpend = {} | ||||
|         self.retpend_mutex = threading.Lock() | ||||
|         self.mutex = threading.Lock() | ||||
|  | ||||
|         self.num_workers = self.args.j or mp.cpu_count() | ||||
|         self.num_workers = self.args.j or CORES | ||||
|         self.log("broker", "booting {} subprocesses".format(self.num_workers)) | ||||
|         for n in range(1, self.num_workers + 1): | ||||
|             q_pend = mp.Queue(1) | ||||
|             q_yield = mp.Queue(64) | ||||
|  | ||||
|             proc = mp.Process(target=MpWorker, args=(q_pend, q_yield, self.args, n)) | ||||
|             proc.q_pend = q_pend | ||||
|             proc.q_yield = q_yield | ||||
|             proc.clients = {} | ||||
|  | ||||
|             thr = threading.Thread( | ||||
|                 target=self.collector, args=(proc,), name="mp-sink-{}".format(n) | ||||
|             ) | ||||
|             thr.daemon = True | ||||
|             thr.start() | ||||
|             q_pend: queue.Queue[tuple[int, str, list[Any]]] = mp.Queue(1) | ||||
|             q_yield: queue.Queue[tuple[int, str, list[Any]]] = mp.Queue(64) | ||||
|  | ||||
|             proc = MProcess(q_pend, q_yield, MpWorker, (q_pend, q_yield, self.args, n)) | ||||
|             Daemon(self.collector, "mp-sink-{}".format(n), (proc,)) | ||||
|             self.procs.append(proc) | ||||
|             proc.start() | ||||
|  | ||||
|     def shutdown(self): | ||||
|     def shutdown(self) -> None: | ||||
|         self.log("broker", "shutting down") | ||||
|         for n, proc in enumerate(self.procs): | ||||
|             thr = threading.Thread( | ||||
|                 target=proc.q_pend.put([0, "shutdown", []]), | ||||
|                 target=proc.q_pend.put((0, "shutdown", [])), | ||||
|                 name="mp-shutdown-{}-{}".format(n, len(self.procs)), | ||||
|             ) | ||||
|             thr.start() | ||||
| @@ -62,12 +74,12 @@ class BrokerMp(object): | ||||
|  | ||||
|             procs.pop() | ||||
|  | ||||
|     def reload(self): | ||||
|     def reload(self) -> None: | ||||
|         self.log("broker", "reloading") | ||||
|         for _, proc in enumerate(self.procs): | ||||
|             proc.q_pend.put([0, "reload", []]) | ||||
|             proc.q_pend.put((0, "reload", [])) | ||||
|  | ||||
|     def collector(self, proc): | ||||
|     def collector(self, proc: MProcess) -> None: | ||||
|         """receive message from hub in other process""" | ||||
|         while True: | ||||
|             msg = proc.q_yield.get() | ||||
| @@ -78,24 +90,37 @@ class BrokerMp(object): | ||||
|  | ||||
|             elif dest == "retq": | ||||
|                 # response from previous ipc call | ||||
|                 with self.retpend_mutex: | ||||
|                     retq = self.retpend.pop(retq_id) | ||||
|  | ||||
|                 retq.put(args) | ||||
|                 raise Exception("invalid broker_mp usage") | ||||
|  | ||||
|             else: | ||||
|                 # new ipc invoking managed service in hub | ||||
|                 obj = self.hub | ||||
|                 for node in dest.split("."): | ||||
|                     obj = getattr(obj, node) | ||||
|                 try: | ||||
|                     obj = self.hub | ||||
|                     for node in dest.split("."): | ||||
|                         obj = getattr(obj, node) | ||||
|  | ||||
|                 # TODO will deadlock if dest performs another ipc | ||||
|                 rv = try_exec(retq_id, obj, *args) | ||||
|                     # TODO will deadlock if dest performs another ipc | ||||
|                     rv = try_exec(retq_id, obj, *args) | ||||
|                 except: | ||||
|                     rv = ["exception", "stack", traceback.format_exc()] | ||||
|  | ||||
|                 if retq_id: | ||||
|                     proc.q_pend.put([retq_id, "retq", rv]) | ||||
|                     proc.q_pend.put((retq_id, "retq", rv)) | ||||
|  | ||||
|     def put(self, want_retval, dest, *args): | ||||
|     def ask(self, dest: str, *args: Any) -> ExceptionalQueue: | ||||
|  | ||||
|         # new non-ipc invoking managed service in hub | ||||
|         obj = self.hub | ||||
|         for node in dest.split("."): | ||||
|             obj = getattr(obj, node) | ||||
|  | ||||
|         rv = try_exec(True, obj, *args) | ||||
|  | ||||
|         retq = ExceptionalQueue(1) | ||||
|         retq.put(rv) | ||||
|         return retq | ||||
|  | ||||
|     def say(self, dest: str, *args: Any) -> None: | ||||
|         """ | ||||
|         send message to non-hub component in other process, | ||||
|         returns a Queue object which eventually contains the response if want_retval | ||||
| @@ -103,7 +128,11 @@ class BrokerMp(object): | ||||
|         """ | ||||
|         if dest == "listen": | ||||
|             for p in self.procs: | ||||
|                 p.q_pend.put([0, dest, [args[0], len(self.procs)]]) | ||||
|                 p.q_pend.put((0, dest, [args[0], len(self.procs)])) | ||||
|  | ||||
|         elif dest == "set_netdevs": | ||||
|             for p in self.procs: | ||||
|                 p.q_pend.put((0, dest, list(args))) | ||||
|  | ||||
|         elif dest == "cb_httpsrv_up": | ||||
|             self.hub.cb_httpsrv_up() | ||||
|   | ||||
| @@ -1,20 +1,38 @@ | ||||
| # coding: utf-8 | ||||
| from __future__ import print_function, unicode_literals | ||||
|  | ||||
| import sys | ||||
| import argparse | ||||
| import os | ||||
| import signal | ||||
| import sys | ||||
| import threading | ||||
|  | ||||
| from .broker_util import ExceptionalQueue | ||||
| import queue | ||||
|  | ||||
| from .__init__ import ANYWIN | ||||
| from .authsrv import AuthSrv | ||||
| from .broker_util import BrokerCli, ExceptionalQueue | ||||
| from .httpsrv import HttpSrv | ||||
| from .util import FAKE_MP | ||||
| from copyparty.authsrv import AuthSrv | ||||
| from .util import FAKE_MP, Daemon, HMaccas | ||||
|  | ||||
| if True:  # pylint: disable=using-constant-test | ||||
|     from types import FrameType | ||||
|  | ||||
|     from typing import Any, Optional, Union | ||||
|  | ||||
|  | ||||
| class MpWorker(object): | ||||
| class MpWorker(BrokerCli): | ||||
|     """one single mp instance""" | ||||
|  | ||||
|     def __init__(self, q_pend, q_yield, args, n): | ||||
|     def __init__( | ||||
|         self, | ||||
|         q_pend: queue.Queue[tuple[int, str, list[Any]]], | ||||
|         q_yield: queue.Queue[tuple[int, str, list[Any]]], | ||||
|         args: argparse.Namespace, | ||||
|         n: int, | ||||
|     ) -> None: | ||||
|         super(MpWorker, self).__init__() | ||||
|  | ||||
|         self.q_pend = q_pend | ||||
|         self.q_yield = q_yield | ||||
|         self.args = args | ||||
| @@ -22,43 +40,45 @@ class MpWorker(object): | ||||
|  | ||||
|         self.log = self._log_disabled if args.q and not args.lo else self._log_enabled | ||||
|  | ||||
|         self.retpend = {} | ||||
|         self.retpend: dict[int, Any] = {} | ||||
|         self.retpend_mutex = threading.Lock() | ||||
|         self.mutex = threading.Lock() | ||||
|  | ||||
|         # we inherited signal_handler from parent, | ||||
|         # replace it with something harmless | ||||
|         if not FAKE_MP: | ||||
|             for sig in [signal.SIGINT, signal.SIGTERM, signal.SIGUSR1]: | ||||
|             sigs = [signal.SIGINT, signal.SIGTERM] | ||||
|             if not ANYWIN: | ||||
|                 sigs.append(signal.SIGUSR1) | ||||
|  | ||||
|             for sig in sigs: | ||||
|                 signal.signal(sig, self.signal_handler) | ||||
|  | ||||
|         # starting to look like a good idea | ||||
|         self.asrv = AuthSrv(args, None, False) | ||||
|  | ||||
|         # instantiate all services here (TODO: inheritance?) | ||||
|         self.iphash = HMaccas(os.path.join(self.args.E.cfg, "iphash"), 8) | ||||
|         self.httpsrv = HttpSrv(self, n) | ||||
|  | ||||
|         # on winxp and some other platforms, | ||||
|         # use thr.join() to block all signals | ||||
|         thr = threading.Thread(target=self.main, name="mpw-main") | ||||
|         thr.daemon = True | ||||
|         thr.start() | ||||
|         thr.join() | ||||
|         Daemon(self.main, "mpw-main").join() | ||||
|  | ||||
|     def signal_handler(self, sig, frame): | ||||
|     def signal_handler(self, sig: Optional[int], frame: Optional[FrameType]) -> None: | ||||
|         # print('k') | ||||
|         pass | ||||
|  | ||||
|     def _log_enabled(self, src, msg, c=0): | ||||
|         self.q_yield.put([0, "log", [src, msg, c]]) | ||||
|     def _log_enabled(self, src: str, msg: str, c: Union[int, str] = 0) -> None: | ||||
|         self.q_yield.put((0, "log", [src, msg, c])) | ||||
|  | ||||
|     def _log_disabled(self, src, msg, c=0): | ||||
|     def _log_disabled(self, src: str, msg: str, c: Union[int, str] = 0) -> None: | ||||
|         pass | ||||
|  | ||||
|     def logw(self, msg, c=0): | ||||
|     def logw(self, msg: str, c: Union[int, str] = 0) -> None: | ||||
|         self.log("mp{}".format(self.n), msg, c) | ||||
|  | ||||
|     def main(self): | ||||
|     def main(self) -> None: | ||||
|         while True: | ||||
|             retq_id, dest, args = self.q_pend.get() | ||||
|  | ||||
| @@ -77,6 +97,9 @@ class MpWorker(object): | ||||
|             elif dest == "listen": | ||||
|                 self.httpsrv.listen(args[0], args[1]) | ||||
|  | ||||
|             elif dest == "set_netdevs": | ||||
|                 self.httpsrv.set_netdevs(args[0]) | ||||
|  | ||||
|             elif dest == "retq": | ||||
|                 # response from previous ipc call | ||||
|                 with self.retpend_mutex: | ||||
| @@ -87,15 +110,14 @@ class MpWorker(object): | ||||
|             else: | ||||
|                 raise Exception("what is " + str(dest)) | ||||
|  | ||||
|     def put(self, want_retval, dest, *args): | ||||
|         if want_retval: | ||||
|             retq = ExceptionalQueue(1) | ||||
|             retq_id = id(retq) | ||||
|             with self.retpend_mutex: | ||||
|                 self.retpend[retq_id] = retq | ||||
|         else: | ||||
|             retq = None | ||||
|             retq_id = 0 | ||||
|     def ask(self, dest: str, *args: Any) -> ExceptionalQueue: | ||||
|         retq = ExceptionalQueue(1) | ||||
|         retq_id = id(retq) | ||||
|         with self.retpend_mutex: | ||||
|             self.retpend[retq_id] = retq | ||||
|  | ||||
|         self.q_yield.put([retq_id, dest, args]) | ||||
|         self.q_yield.put((retq_id, dest, list(args))) | ||||
|         return retq | ||||
|  | ||||
|     def say(self, dest: str, *args: Any) -> None: | ||||
|         self.q_yield.put((0, dest, list(args))) | ||||
|   | ||||
| @@ -1,16 +1,27 @@ | ||||
| # coding: utf-8 | ||||
| from __future__ import print_function, unicode_literals | ||||
|  | ||||
| import os | ||||
| import threading | ||||
|  | ||||
| from .__init__ import TYPE_CHECKING | ||||
| from .broker_util import BrokerCli, ExceptionalQueue, try_exec | ||||
| from .httpsrv import HttpSrv | ||||
| from .broker_util import ExceptionalQueue, try_exec | ||||
| from .util import HMaccas | ||||
|  | ||||
| if TYPE_CHECKING: | ||||
|     from .svchub import SvcHub | ||||
|  | ||||
| if True:  # pylint: disable=using-constant-test | ||||
|     from typing import Any | ||||
|  | ||||
|  | ||||
| class BrokerThr(object): | ||||
| class BrokerThr(BrokerCli): | ||||
|     """external api; behaves like BrokerMP but using plain threads""" | ||||
|  | ||||
|     def __init__(self, hub): | ||||
|     def __init__(self, hub: "SvcHub") -> None: | ||||
|         super(BrokerThr, self).__init__() | ||||
|  | ||||
|         self.hub = hub | ||||
|         self.log = hub.log | ||||
|         self.args = hub.args | ||||
| @@ -20,32 +31,43 @@ class BrokerThr(object): | ||||
|         self.num_workers = 1 | ||||
|  | ||||
|         # instantiate all services here (TODO: inheritance?) | ||||
|         self.iphash = HMaccas(os.path.join(self.args.E.cfg, "iphash"), 8) | ||||
|         self.httpsrv = HttpSrv(self, None) | ||||
|         self.reload = self.noop | ||||
|  | ||||
|     def shutdown(self): | ||||
|     def shutdown(self) -> None: | ||||
|         # self.log("broker", "shutting down") | ||||
|         self.httpsrv.shutdown() | ||||
|  | ||||
|     def noop(self): | ||||
|     def noop(self) -> None: | ||||
|         pass | ||||
|  | ||||
|     def put(self, want_retval, dest, *args): | ||||
|     def ask(self, dest: str, *args: Any) -> ExceptionalQueue: | ||||
|  | ||||
|         # new ipc invoking managed service in hub | ||||
|         obj = self.hub | ||||
|         for node in dest.split("."): | ||||
|             obj = getattr(obj, node) | ||||
|  | ||||
|         rv = try_exec(True, obj, *args) | ||||
|  | ||||
|         # pretend we're broker_mp | ||||
|         retq = ExceptionalQueue(1) | ||||
|         retq.put(rv) | ||||
|         return retq | ||||
|  | ||||
|     def say(self, dest: str, *args: Any) -> None: | ||||
|         if dest == "listen": | ||||
|             self.httpsrv.listen(args[0], 1) | ||||
|             return | ||||
|  | ||||
|         else: | ||||
|             # new ipc invoking managed service in hub | ||||
|             obj = self.hub | ||||
|             for node in dest.split("."): | ||||
|                 obj = getattr(obj, node) | ||||
|         if dest == "set_netdevs": | ||||
|             self.httpsrv.set_netdevs(args[0]) | ||||
|             return | ||||
|  | ||||
|             # TODO will deadlock if dest performs another ipc | ||||
|             rv = try_exec(want_retval, obj, *args) | ||||
|             if not want_retval: | ||||
|                 return | ||||
|         # new ipc invoking managed service in hub | ||||
|         obj = self.hub | ||||
|         for node in dest.split("."): | ||||
|             obj = getattr(obj, node) | ||||
|  | ||||
|             # pretend we're broker_mp | ||||
|             retq = ExceptionalQueue(1) | ||||
|             retq.put(rv) | ||||
|             return retq | ||||
|         try_exec(False, obj, *args) | ||||
|   | ||||
| @@ -1,17 +1,28 @@ | ||||
| # coding: utf-8 | ||||
| from __future__ import print_function, unicode_literals | ||||
|  | ||||
|  | ||||
| import argparse | ||||
| import traceback | ||||
|  | ||||
| from .util import Pebkac, Queue | ||||
| from queue import Queue | ||||
|  | ||||
| from .__init__ import TYPE_CHECKING | ||||
| from .authsrv import AuthSrv | ||||
| from .util import HMaccas, Pebkac | ||||
|  | ||||
| if True:  # pylint: disable=using-constant-test | ||||
|     from typing import Any, Optional, Union | ||||
|  | ||||
|     from .util import RootLogger | ||||
|  | ||||
| if TYPE_CHECKING: | ||||
|     from .httpsrv import HttpSrv | ||||
|  | ||||
|  | ||||
| class ExceptionalQueue(Queue, object): | ||||
|     def get(self, block=True, timeout=None): | ||||
|     def get(self, block: bool = True, timeout: Optional[float] = None) -> Any: | ||||
|         rv = super(ExceptionalQueue, self).get(block, timeout) | ||||
|  | ||||
|         # TODO: how expensive is this? | ||||
|         if isinstance(rv, list): | ||||
|             if rv[0] == "exception": | ||||
|                 if rv[1] == "pebkac": | ||||
| @@ -22,7 +33,29 @@ class ExceptionalQueue(Queue, object): | ||||
|         return rv | ||||
|  | ||||
|  | ||||
| def try_exec(want_retval, func, *args): | ||||
| class BrokerCli(object): | ||||
|     """ | ||||
|     helps mypy understand httpsrv.broker but still fails a few levels deeper, | ||||
|     for example resolving httpconn.* in httpcli -- see lines tagged #mypy404 | ||||
|     """ | ||||
|  | ||||
|     log: "RootLogger" | ||||
|     args: argparse.Namespace | ||||
|     asrv: AuthSrv | ||||
|     httpsrv: "HttpSrv" | ||||
|     iphash: HMaccas | ||||
|  | ||||
|     def __init__(self) -> None: | ||||
|         pass | ||||
|  | ||||
|     def ask(self, dest: str, *args: Any) -> ExceptionalQueue: | ||||
|         return ExceptionalQueue(1) | ||||
|  | ||||
|     def say(self, dest: str, *args: Any) -> None: | ||||
|         pass | ||||
|  | ||||
|  | ||||
| def try_exec(want_retval: Union[bool, int], func: Any, *args: list[Any]) -> Any: | ||||
|     try: | ||||
|         return func(*args) | ||||
|  | ||||
|   | ||||
							
								
								
									
										226
									
								
								copyparty/cert.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										226
									
								
								copyparty/cert.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,226 @@ | ||||
| import calendar | ||||
| import errno | ||||
| import filecmp | ||||
| import json | ||||
| import os | ||||
| import shutil | ||||
| import time | ||||
|  | ||||
| from .util import Netdev, runcmd | ||||
|  | ||||
| HAVE_CFSSL = True | ||||
|  | ||||
| if True:  # pylint: disable=using-constant-test | ||||
|     from .util import RootLogger | ||||
|  | ||||
|  | ||||
| def ensure_cert(log: "RootLogger", args) -> None: | ||||
|     """ | ||||
|     the default cert (and the entire TLS support) is only here to enable the | ||||
|     crypto.subtle javascript API, which is necessary due to the webkit guys | ||||
|     being massive memers (https://www.chromium.org/blink/webcrypto) | ||||
|  | ||||
|     i feel awful about this and so should they | ||||
|     """ | ||||
|     cert_insec = os.path.join(args.E.mod, "res/insecure.pem") | ||||
|     cert_appdata = os.path.join(args.E.cfg, "cert.pem") | ||||
|     if not os.path.isfile(args.cert): | ||||
|         if cert_appdata != args.cert: | ||||
|             raise Exception("certificate file does not exist: " + args.cert) | ||||
|  | ||||
|         shutil.copy(cert_insec, args.cert) | ||||
|  | ||||
|     with open(args.cert, "rb") as f: | ||||
|         buf = f.read() | ||||
|         o1 = buf.find(b" PRIVATE KEY-") | ||||
|         o2 = buf.find(b" CERTIFICATE-") | ||||
|         m = "unsupported certificate format: " | ||||
|         if o1 < 0: | ||||
|             raise Exception(m + "no private key inside pem") | ||||
|         if o2 < 0: | ||||
|             raise Exception(m + "no server certificate inside pem") | ||||
|         if o1 > o2: | ||||
|             raise Exception(m + "private key must appear before server certificate") | ||||
|  | ||||
|     try: | ||||
|         if filecmp.cmp(args.cert, cert_insec): | ||||
|             t = "using default TLS certificate; https will be insecure:\033[36m {}" | ||||
|             log("cert", t.format(args.cert), 3) | ||||
|     except: | ||||
|         pass | ||||
|  | ||||
|     # speaking of the default cert, | ||||
|     # printf 'NO\n.\n.\n.\n.\ncopyparty-insecure\n.\n' | faketime '2000-01-01 00:00:00' openssl req -x509 -sha256 -newkey rsa:2048 -keyout insecure.pem -out insecure.pem -days $((($(printf %d 0x7fffffff)-$(date +%s --date=2000-01-01T00:00:00Z))/(60*60*24))) -nodes && ls -al insecure.pem && openssl x509 -in insecure.pem -text -noout | ||||
|  | ||||
|  | ||||
| def _read_crt(args, fn): | ||||
|     try: | ||||
|         if not os.path.exists(os.path.join(args.crt_dir, fn)): | ||||
|             return 0, {} | ||||
|  | ||||
|         acmd = ["cfssl-certinfo", "-cert", fn] | ||||
|         rc, so, se = runcmd(acmd, cwd=args.crt_dir) | ||||
|         if rc: | ||||
|             return 0, {} | ||||
|  | ||||
|         inf = json.loads(so) | ||||
|         zs = inf["not_after"] | ||||
|         expiry = calendar.timegm(time.strptime(zs, "%Y-%m-%dT%H:%M:%SZ")) | ||||
|         return expiry, inf | ||||
|     except OSError as ex: | ||||
|         if ex.errno == errno.ENOENT: | ||||
|             raise | ||||
|         return 0, {} | ||||
|     except: | ||||
|         return 0, {} | ||||
|  | ||||
|  | ||||
| def _gen_ca(log: "RootLogger", args): | ||||
|     expiry = _read_crt(args, "ca.pem")[0] | ||||
|     if time.time() + args.crt_cdays * 60 * 60 * 24 * 0.1 < expiry: | ||||
|         return | ||||
|  | ||||
|     backdate = "{}m".format(int(args.crt_back * 60)) | ||||
|     expiry = "{}m".format(int(args.crt_cdays * 60 * 24)) | ||||
|     cn = args.crt_cnc.replace("--crt-cn", args.crt_cn) | ||||
|     algo, ksz = args.crt_alg.split("-") | ||||
|     req = { | ||||
|         "CN": cn, | ||||
|         "CA": {"backdate": backdate, "expiry": expiry, "pathlen": 0}, | ||||
|         "key": {"algo": algo, "size": int(ksz)}, | ||||
|         "names": [{"O": cn}], | ||||
|     } | ||||
|     sin = json.dumps(req).encode("utf-8") | ||||
|     log("cert", "creating new ca ...", 6) | ||||
|  | ||||
|     cmd = "cfssl gencert -initca -" | ||||
|     rc, so, se = runcmd(cmd.split(), 30, sin=sin) | ||||
|     if rc: | ||||
|         raise Exception("failed to create ca-cert: {}, {}".format(rc, se), 3) | ||||
|  | ||||
|     cmd = "cfssljson -bare ca" | ||||
|     sin = so.encode("utf-8") | ||||
|     rc, so, se = runcmd(cmd.split(), 10, sin=sin, cwd=args.crt_dir) | ||||
|     if rc: | ||||
|         raise Exception("failed to translate ca-cert: {}, {}".format(rc, se), 3) | ||||
|  | ||||
|     bname = os.path.join(args.crt_dir, "ca") | ||||
|     os.rename(bname + "-key.pem", bname + ".key") | ||||
|     os.unlink(bname + ".csr") | ||||
|  | ||||
|     log("cert", "new ca OK", 2) | ||||
|  | ||||
|  | ||||
| def _gen_srv(log: "RootLogger", args, netdevs: dict[str, Netdev]): | ||||
|     names = args.crt_ns.split(",") if args.crt_ns else [] | ||||
|     if not args.crt_exact: | ||||
|         for n in names[:]: | ||||
|             names.append("*.{}".format(n)) | ||||
|     if not args.crt_noip: | ||||
|         for ip in netdevs.keys(): | ||||
|             names.append(ip.split("/")[0]) | ||||
|     if args.crt_nolo: | ||||
|         names = [x for x in names if x not in ("localhost", "127.0.0.1", "::1")] | ||||
|     if not args.crt_nohn: | ||||
|         names.append(args.name) | ||||
|         names.append(args.name + ".local") | ||||
|     if not names: | ||||
|         names = ["127.0.0.1"] | ||||
|     if "127.0.0.1" in names or "::1" in names: | ||||
|         names.append("localhost") | ||||
|     names = list({x: 1 for x in names}.keys()) | ||||
|  | ||||
|     try: | ||||
|         expiry, inf = _read_crt(args, "srv.pem") | ||||
|         expired = time.time() + args.crt_sdays * 60 * 60 * 24 * 0.1 > expiry | ||||
|         cert_insec = os.path.join(args.E.mod, "res/insecure.pem") | ||||
|         for n in names: | ||||
|             if n not in inf["sans"]: | ||||
|                 raise Exception("does not have {}".format(n)) | ||||
|         if expired: | ||||
|             raise Exception("old server-cert has expired") | ||||
|         if not filecmp.cmp(args.cert, cert_insec): | ||||
|             return | ||||
|     except Exception as ex: | ||||
|         log("cert", "will create new server-cert; {}".format(ex)) | ||||
|  | ||||
|     log("cert", "creating server-cert ...", 6) | ||||
|  | ||||
|     backdate = "{}m".format(int(args.crt_back * 60)) | ||||
|     expiry = "{}m".format(int(args.crt_sdays * 60 * 24)) | ||||
|     cfg = { | ||||
|         "signing": { | ||||
|             "default": { | ||||
|                 "backdate": backdate, | ||||
|                 "expiry": expiry, | ||||
|                 "usages": ["signing", "key encipherment", "server auth"], | ||||
|             } | ||||
|         } | ||||
|     } | ||||
|     with open(os.path.join(args.crt_dir, "cfssl.json"), "wb") as f: | ||||
|         f.write(json.dumps(cfg).encode("utf-8")) | ||||
|  | ||||
|     cn = args.crt_cns.replace("--crt-cn", args.crt_cn) | ||||
|     algo, ksz = args.crt_alg.split("-") | ||||
|     req = { | ||||
|         "key": {"algo": algo, "size": int(ksz)}, | ||||
|         "names": [{"O": cn}], | ||||
|     } | ||||
|     sin = json.dumps(req).encode("utf-8") | ||||
|  | ||||
|     cmd = "cfssl gencert -config=cfssl.json -ca ca.pem -ca-key ca.key -profile=www" | ||||
|     acmd = cmd.split() + ["-hostname=" + ",".join(names), "-"] | ||||
|     rc, so, se = runcmd(acmd, 30, sin=sin, cwd=args.crt_dir) | ||||
|     if rc: | ||||
|         raise Exception("failed to create cert: {}, {}".format(rc, se)) | ||||
|  | ||||
|     cmd = "cfssljson -bare srv" | ||||
|     sin = so.encode("utf-8") | ||||
|     rc, so, se = runcmd(cmd.split(), 10, sin=sin, cwd=args.crt_dir) | ||||
|     if rc: | ||||
|         raise Exception("failed to translate cert: {}, {}".format(rc, se)) | ||||
|  | ||||
|     bname = os.path.join(args.crt_dir, "srv") | ||||
|     try: | ||||
|         os.unlink(bname + ".key") | ||||
|     except: | ||||
|         pass | ||||
|     os.rename(bname + "-key.pem", bname + ".key") | ||||
|     os.unlink(bname + ".csr") | ||||
|  | ||||
|     with open(os.path.join(args.crt_dir, "ca.pem"), "rb") as f: | ||||
|         ca = f.read() | ||||
|  | ||||
|     with open(bname + ".key", "rb") as f: | ||||
|         skey = f.read() | ||||
|  | ||||
|     with open(bname + ".pem", "rb") as f: | ||||
|         scrt = f.read() | ||||
|  | ||||
|     with open(args.cert, "wb") as f: | ||||
|         f.write(skey + scrt + ca) | ||||
|  | ||||
|     log("cert", "new server-cert OK", 2) | ||||
|  | ||||
|  | ||||
| def gencert(log: "RootLogger", args, netdevs: dict[str, Netdev]): | ||||
|     global HAVE_CFSSL | ||||
|  | ||||
|     if args.http_only: | ||||
|         return | ||||
|  | ||||
|     if args.no_crt or not HAVE_CFSSL: | ||||
|         ensure_cert(log, args) | ||||
|         return | ||||
|  | ||||
|     try: | ||||
|         _gen_ca(log, args) | ||||
|         _gen_srv(log, args, netdevs) | ||||
|     except Exception as ex: | ||||
|         HAVE_CFSSL = False | ||||
|         log("cert", "could not create TLS certificates: {}".format(ex), 3) | ||||
|         if getattr(ex, "errno", 0) == errno.ENOENT: | ||||
|             t = "install cfssl if you want to fix this; https://github.com/cloudflare/cfssl/releases/latest  (cfssl, cfssljson, cfssl-certinfo)" | ||||
|             log("cert", t, 6) | ||||
|  | ||||
|         ensure_cert(log, args) | ||||
							
								
								
									
										172
									
								
								copyparty/cfg.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										172
									
								
								copyparty/cfg.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,172 @@ | ||||
| # coding: utf-8 | ||||
| from __future__ import print_function, unicode_literals | ||||
|  | ||||
| # awk -F\" '/add_argument\("-[^-]/{print(substr($2,2))}' copyparty/__main__.py | sort | tr '\n' ' ' | ||||
| zs = "a c e2d e2ds e2dsa e2t e2ts e2tsr e2v e2vp e2vu ed emp i j lo mcr mte mth mtm mtp nb nc nid nih nw p q s ss sss v z zv" | ||||
| onedash = set(zs.split()) | ||||
|  | ||||
|  | ||||
| def vf_bmap() -> dict[str, str]: | ||||
|     """argv-to-volflag: simple bools""" | ||||
|     ret = { | ||||
|         "never_symlink": "neversymlink", | ||||
|         "no_dedup": "copydupes", | ||||
|         "no_dupe": "nodupe", | ||||
|         "no_forget": "noforget", | ||||
|         "th_no_crop": "nocrop", | ||||
|         "dav_auth": "davauth", | ||||
|         "dav_rt": "davrt", | ||||
|     } | ||||
|     for k in ( | ||||
|         "dotsrch", | ||||
|         "e2t", | ||||
|         "e2ts", | ||||
|         "e2tsr", | ||||
|         "e2v", | ||||
|         "e2vu", | ||||
|         "e2vp", | ||||
|         "grid", | ||||
|         "hardlink", | ||||
|         "magic", | ||||
|         "no_sb_md", | ||||
|         "no_sb_lg", | ||||
|         "rand", | ||||
|         "xdev", | ||||
|         "xlink", | ||||
|         "xvol", | ||||
|     ): | ||||
|         ret[k] = k | ||||
|     return ret | ||||
|  | ||||
|  | ||||
| def vf_vmap() -> dict[str, str]: | ||||
|     """argv-to-volflag: simple values""" | ||||
|     ret = {"th_convt": "convt", "th_size": "thsize"} | ||||
|     for k in ("dbd", "lg_sbf", "md_sbf", "nrand", "unlist"): | ||||
|         ret[k] = k | ||||
|     return ret | ||||
|  | ||||
|  | ||||
| def vf_cmap() -> dict[str, str]: | ||||
|     """argv-to-volflag: complex/lists""" | ||||
|     ret = {} | ||||
|     for k in ("html_head", "mte", "mth"): | ||||
|         ret[k] = k | ||||
|     return ret | ||||
|  | ||||
|  | ||||
| permdescs = { | ||||
|     "r": "read; list folder contents, download files", | ||||
|     "w": 'write; upload files; need "r" to see the uploads', | ||||
|     "m": 'move; move files and folders; need "w" at destination', | ||||
|     "d": "delete; permanently delete files and folders", | ||||
|     "g": "get; download files, but cannot see folder contents", | ||||
|     "G": 'upget; same as "g" but can see filekeys of their own uploads', | ||||
|     "h": 'html; same as "g" but folders return their index.html', | ||||
|     "a": "admin; can see uploader IPs, config-reload", | ||||
| } | ||||
|  | ||||
|  | ||||
| flagcats = { | ||||
|     "uploads, general": { | ||||
|         "nodupe": "rejects existing files (instead of symlinking them)", | ||||
|         "hardlink": "does dedup with hardlinks instead of symlinks", | ||||
|         "neversymlink": "disables symlink fallback; full copy instead", | ||||
|         "copydupes": "disables dedup, always saves full copies of dupes", | ||||
|         "daw": "enable full WebDAV write support (dangerous);\nPUT-operations will now \033[1;31mOVERWRITE\033[0;35m existing files", | ||||
|         "nosub": "forces all uploads into the top folder of the vfs", | ||||
|         "magic": "enables filetype detection for nameless uploads", | ||||
|         "gz": "allows server-side gzip of uploads with ?gz (also c,xz)", | ||||
|         "pk": "forces server-side compression, optional arg: xz,9", | ||||
|     }, | ||||
|     "upload rules": { | ||||
|         "maxn=250,600": "max 250 uploads over 15min", | ||||
|         "maxb=1g,300": "max 1 GiB over 5min (suffixes: b, k, m, g, t)", | ||||
|         "vmaxb=1g": "total volume size max 1 GiB (suffixes: b, k, m, g, t)", | ||||
|         "vmaxn=4k": "max 4096 files in volume (suffixes: b, k, m, g, t)", | ||||
|         "rand": "force randomized filenames, 9 chars long by default", | ||||
|         "nrand=N": "randomized filenames are N chars long", | ||||
|         "sz=1k-3m": "allow filesizes between 1 KiB and 3MiB", | ||||
|         "df=1g": "ensure 1 GiB free disk space", | ||||
|     }, | ||||
|     "upload rotation\n(moves all uploads into the specified folder structure)": { | ||||
|         "rotn=100,3": "3 levels of subfolders with 100 entries in each", | ||||
|         "rotf=%Y-%m/%d-%H": "date-formatted organizing", | ||||
|         "lifetime=3600": "uploads are deleted after 1 hour", | ||||
|     }, | ||||
|     "database, general": { | ||||
|         "e2d": "enable database; makes files searchable + enables upload dedup", | ||||
|         "e2ds": "scan writable folders for new files on startup; also sets -e2d", | ||||
|         "e2dsa": "scans all folders for new files on startup; also sets -e2d", | ||||
|         "e2t": "enable multimedia indexing; makes it possible to search for tags", | ||||
|         "e2ts": "scan existing files for tags on startup; also sets -e2t", | ||||
|         "e2tsa": "delete all metadata from DB (full rescan); also sets -e2ts", | ||||
|         "d2ts": "disables metadata collection for existing files", | ||||
|         "d2ds": "disables onboot indexing, overrides -e2ds*", | ||||
|         "d2t": "disables metadata collection, overrides -e2t*", | ||||
|         "d2v": "disables file verification, overrides -e2v*", | ||||
|         "d2d": "disables all database stuff, overrides -e2*", | ||||
|         "hist=/tmp/cdb": "puts thumbnails and indexes at that location", | ||||
|         "scan=60": "scan for new files every 60sec, same as --re-maxage", | ||||
|         "nohash=\\.iso$": "skips hashing file contents if path matches *.iso", | ||||
|         "noidx=\\.iso$": "fully ignores the contents at paths matching *.iso", | ||||
|         "noforget": "don't forget files when deleted from disk", | ||||
|         "fat32": "avoid excessive reindexing on android sdcardfs", | ||||
|         "dbd=[acid|swal|wal|yolo]": "database speed-durability tradeoff", | ||||
|         "xlink": "cross-volume dupe detection / linking", | ||||
|         "xdev": "do not descend into other filesystems", | ||||
|         "xvol": "do not follow symlinks leaving the volume root", | ||||
|         "dotsrch": "show dotfiles in search results", | ||||
|         "nodotsrch": "hide dotfiles in search results (default)", | ||||
|     }, | ||||
|     'database, audio tags\n"mte", "mth", "mtp", "mtm" all work the same as -mte, -mth, ...': { | ||||
|         "mtp=.bpm=f,audio-bpm.py": 'uses the "audio-bpm.py" program to\ngenerate ".bpm" tags from uploads (f = overwrite tags)', | ||||
|         "mtp=ahash,vhash=media-hash.py": "collects two tags at once", | ||||
|     }, | ||||
|     "thumbnails": { | ||||
|         "dthumb": "disables all thumbnails", | ||||
|         "dvthumb": "disables video thumbnails", | ||||
|         "dathumb": "disables audio thumbnails (spectrograms)", | ||||
|         "dithumb": "disables image thumbnails", | ||||
|         "thsize": "thumbnail res; WxH", | ||||
|         "nocrop": "disable center-cropping", | ||||
|         "convt": "conversion timeout in seconds", | ||||
|     }, | ||||
|     "handlers\n(better explained in --help-handlers)": { | ||||
|         "on404=PY": "handle 404s by executing PY file", | ||||
|         "on403=PY": "handle 403s by executing PY file", | ||||
|     }, | ||||
|     "event hooks\n(better explained in --help-hooks)": { | ||||
|         "xbu=CMD": "execute CMD before a file upload starts", | ||||
|         "xau=CMD": "execute CMD after  a file upload finishes", | ||||
|         "xiu=CMD": "execute CMD after  all uploads finish and volume is idle", | ||||
|         "xbr=CMD": "execute CMD before a file rename/move", | ||||
|         "xar=CMD": "execute CMD after  a file rename/move", | ||||
|         "xbd=CMD": "execute CMD before a file delete", | ||||
|         "xad=CMD": "execute CMD after  a file delete", | ||||
|         "xm=CMD": "execute CMD on message", | ||||
|         "xban=CMD": "execute CMD if someone gets banned", | ||||
|     }, | ||||
|     "client and ux": { | ||||
|         "grid": "show grid/thumbnails by default", | ||||
|         "unlist": "dont list files matching REGEX", | ||||
|         "html_head=TXT": "includes TXT in the <head>", | ||||
|         "robots": "allows indexing by search engines (default)", | ||||
|         "norobots": "kindly asks search engines to leave", | ||||
|         "no_sb_md": "disable js sandbox for markdown files", | ||||
|         "no_sb_lg": "disable js sandbox for prologue/epilogue", | ||||
|         "sb_md": "enable js sandbox for markdown files (default)", | ||||
|         "sb_lg": "enable js sandbox for prologue/epilogue (default)", | ||||
|         "md_sbf": "list of markdown-sandbox safeguards to disable", | ||||
|         "lg_sbf": "list of *logue-sandbox safeguards to disable", | ||||
|         "nohtml": "return html and markdown as text/html", | ||||
|     }, | ||||
|     "others": { | ||||
|         "fk=8": 'generates per-file accesskeys,\nwhich will then be required at the "g" permission', | ||||
|         "davauth": "ask webdav clients to login for all folders", | ||||
|         "davrt": "show lastmod time of symlink destination, not the link itself\n(note: this option is always enabled for recursive listings)", | ||||
|     }, | ||||
| } | ||||
|  | ||||
|  | ||||
| flagdescs = {k.split("=")[0]: v for tab in flagcats.values() for k, v in tab.items()} | ||||
							
								
								
									
										72
									
								
								copyparty/dxml.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										72
									
								
								copyparty/dxml.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,72 @@ | ||||
| import importlib | ||||
| import sys | ||||
| import xml.etree.ElementTree as ET | ||||
|  | ||||
| from .__init__ import PY2 | ||||
|  | ||||
| if True:  # pylint: disable=using-constant-test | ||||
|     from typing import Any, Optional | ||||
|  | ||||
|  | ||||
| def get_ET() -> ET.XMLParser: | ||||
|     pn = "xml.etree.ElementTree" | ||||
|     cn = "_elementtree" | ||||
|  | ||||
|     cmod = sys.modules.pop(cn, None) | ||||
|     if not cmod: | ||||
|         return ET.XMLParser  # type: ignore | ||||
|  | ||||
|     pmod = sys.modules.pop(pn) | ||||
|     sys.modules[cn] = None  # type: ignore | ||||
|  | ||||
|     ret = importlib.import_module(pn) | ||||
|     for name, mod in ((pn, pmod), (cn, cmod)): | ||||
|         if mod: | ||||
|             sys.modules[name] = mod | ||||
|         else: | ||||
|             sys.modules.pop(name, None) | ||||
|  | ||||
|     sys.modules["xml.etree"].ElementTree = pmod  # type: ignore | ||||
|     ret.ParseError = ET.ParseError  # type: ignore | ||||
|     return ret.XMLParser  # type: ignore | ||||
|  | ||||
|  | ||||
| XMLParser: ET.XMLParser = get_ET() | ||||
|  | ||||
|  | ||||
| class DXMLParser(XMLParser):  # type: ignore | ||||
|     def __init__(self) -> None: | ||||
|         tb = ET.TreeBuilder() | ||||
|         super(DXMLParser, self).__init__(target=tb) | ||||
|  | ||||
|         p = self._parser if PY2 else self.parser | ||||
|         p.StartDoctypeDeclHandler = self.nope | ||||
|         p.EntityDeclHandler = self.nope | ||||
|         p.UnparsedEntityDeclHandler = self.nope | ||||
|         p.ExternalEntityRefHandler = self.nope | ||||
|  | ||||
|     def nope(self, *a: Any, **ka: Any) -> None: | ||||
|         raise BadXML("{}, {}".format(a, ka)) | ||||
|  | ||||
|  | ||||
| class BadXML(Exception): | ||||
|     pass | ||||
|  | ||||
|  | ||||
| def parse_xml(txt: str) -> ET.Element: | ||||
|     parser = DXMLParser() | ||||
|     parser.feed(txt) | ||||
|     return parser.close()  # type: ignore | ||||
|  | ||||
|  | ||||
| def mktnod(name: str, text: str) -> ET.Element: | ||||
|     el = ET.Element(name) | ||||
|     el.text = text | ||||
|     return el | ||||
|  | ||||
|  | ||||
| def mkenod(name: str, sub_el: Optional[ET.Element] = None) -> ET.Element: | ||||
|     el = ET.Element(name) | ||||
|     if sub_el is not None: | ||||
|         el.append(sub_el) | ||||
|     return el | ||||
							
								
								
									
										152
									
								
								copyparty/fsutil.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										152
									
								
								copyparty/fsutil.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,152 @@ | ||||
| # coding: utf-8 | ||||
| from __future__ import print_function, unicode_literals | ||||
|  | ||||
| import os | ||||
| import re | ||||
| import time | ||||
|  | ||||
| from .__init__ import ANYWIN, MACOS | ||||
| from .authsrv import AXS, VFS | ||||
| from .bos import bos | ||||
| from .util import chkcmd, min_ex | ||||
|  | ||||
| if True:  # pylint: disable=using-constant-test | ||||
|     from typing import Optional, Union | ||||
|  | ||||
|     from .util import RootLogger | ||||
|  | ||||
|  | ||||
| class Fstab(object): | ||||
|     def __init__(self, log: "RootLogger"): | ||||
|         self.log_func = log | ||||
|  | ||||
|         self.trusted = False | ||||
|         self.tab: Optional[VFS] = None | ||||
|         self.cache: dict[str, str] = {} | ||||
|         self.age = 0.0 | ||||
|  | ||||
|     def log(self, msg: str, c: Union[int, str] = 0) -> None: | ||||
|         self.log_func("fstab", msg, c) | ||||
|  | ||||
|     def get(self, path: str) -> str: | ||||
|         if len(self.cache) > 9000: | ||||
|             self.age = time.time() | ||||
|             self.tab = None | ||||
|             self.cache = {} | ||||
|  | ||||
|         fs = "ext4" | ||||
|         msg = "failed to determine filesystem at [{}]; assuming {}\n{}" | ||||
|  | ||||
|         if ANYWIN: | ||||
|             fs = "vfat" | ||||
|             try: | ||||
|                 path = self._winpath(path) | ||||
|             except: | ||||
|                 self.log(msg.format(path, fs, min_ex()), 3) | ||||
|                 return fs | ||||
|  | ||||
|         path = path.lstrip("/") | ||||
|         try: | ||||
|             return self.cache[path] | ||||
|         except: | ||||
|             pass | ||||
|  | ||||
|         try: | ||||
|             fs = self.get_w32(path) if ANYWIN else self.get_unix(path) | ||||
|         except: | ||||
|             self.log(msg.format(path, fs, min_ex()), 3) | ||||
|  | ||||
|         fs = fs.lower() | ||||
|         self.cache[path] = fs | ||||
|         self.log("found {} at {}".format(fs, path)) | ||||
|         return fs | ||||
|  | ||||
|     def _winpath(self, path: str) -> str: | ||||
|         # try to combine volume-label + st_dev (vsn) | ||||
|         path = path.replace("/", "\\") | ||||
|         vid = path.split(":", 1)[0].strip("\\").split("\\", 1)[0] | ||||
|         try: | ||||
|             return "{}*{}".format(vid, bos.stat(path).st_dev) | ||||
|         except: | ||||
|             return vid | ||||
|  | ||||
|     def build_fallback(self) -> None: | ||||
|         self.tab = VFS(self.log_func, "idk", "/", AXS(), {}) | ||||
|         self.trusted = False | ||||
|  | ||||
|     def build_tab(self) -> None: | ||||
|         self.log("building tab") | ||||
|  | ||||
|         sptn = r"^.*? on (.*) type ([^ ]+) \(.*" | ||||
|         if MACOS: | ||||
|             sptn = r"^.*? on (.*) \(([^ ]+), .*" | ||||
|  | ||||
|         ptn = re.compile(sptn) | ||||
|         so, _ = chkcmd(["mount"]) | ||||
|         tab1: list[tuple[str, str]] = [] | ||||
|         for ln in so.split("\n"): | ||||
|             m = ptn.match(ln) | ||||
|             if not m: | ||||
|                 continue | ||||
|  | ||||
|             zs1, zs2 = m.groups() | ||||
|             tab1.append((str(zs1), str(zs2))) | ||||
|  | ||||
|         tab1.sort(key=lambda x: (len(x[0]), x[0])) | ||||
|         path1, fs1 = tab1[0] | ||||
|         tab = VFS(self.log_func, fs1, path1, AXS(), {}) | ||||
|         for path, fs in tab1[1:]: | ||||
|             tab.add(fs, path.lstrip("/")) | ||||
|  | ||||
|         self.tab = tab | ||||
|  | ||||
|     def relabel(self, path: str, nval: str) -> None: | ||||
|         assert self.tab | ||||
|         self.cache = {} | ||||
|         if ANYWIN: | ||||
|             path = self._winpath(path) | ||||
|  | ||||
|         path = path.lstrip("/") | ||||
|         ptn = re.compile(r"^[^\\/]*") | ||||
|         vn, rem = self.tab._find(path) | ||||
|         if not self.trusted: | ||||
|             # no mtab access; have to build as we go | ||||
|             if "/" in rem: | ||||
|                 self.tab.add("idk", os.path.join(vn.vpath, rem.split("/")[0])) | ||||
|             if rem: | ||||
|                 self.tab.add(nval, path) | ||||
|             else: | ||||
|                 vn.realpath = nval | ||||
|  | ||||
|             return | ||||
|  | ||||
|         visit = [vn] | ||||
|         while visit: | ||||
|             vn = visit.pop() | ||||
|             vn.realpath = ptn.sub(nval, vn.realpath) | ||||
|             visit.extend(list(vn.nodes.values())) | ||||
|  | ||||
|     def get_unix(self, path: str) -> str: | ||||
|         if not self.tab: | ||||
|             try: | ||||
|                 self.build_tab() | ||||
|                 self.trusted = True | ||||
|             except: | ||||
|                 # prisonparty or other restrictive environment | ||||
|                 self.log("failed to build tab:\n{}".format(min_ex()), 3) | ||||
|                 self.build_fallback() | ||||
|  | ||||
|         assert self.tab | ||||
|         ret = self.tab._find(path)[0] | ||||
|         if self.trusted or path == ret.vpath: | ||||
|             return ret.realpath.split("/")[0] | ||||
|         else: | ||||
|             return "idk" | ||||
|  | ||||
|     def get_w32(self, path: str) -> str: | ||||
|         if not self.tab: | ||||
|             self.build_fallback() | ||||
|  | ||||
|         assert self.tab | ||||
|         ret = self.tab._find(path)[0] | ||||
|         return ret.realpath | ||||
| @@ -1,152 +1,268 @@ | ||||
| # coding: utf-8 | ||||
| from __future__ import print_function, unicode_literals | ||||
|  | ||||
| import os | ||||
| import sys | ||||
| import stat | ||||
| import time | ||||
| import argparse | ||||
| import errno | ||||
| import logging | ||||
| import threading | ||||
| import os | ||||
| import stat | ||||
| import sys | ||||
| import time | ||||
|  | ||||
| from .__init__ import E, PY2 | ||||
| from .util import Pebkac, fsenc, exclude_dotfiles | ||||
| from .bos import bos | ||||
| from .__init__ import ANYWIN, PY2, TYPE_CHECKING, E | ||||
|  | ||||
| try: | ||||
|     from pyftpdlib.ioloop import IOLoop | ||||
| except ImportError: | ||||
|     p = os.path.join(E.mod, "vend") | ||||
|     print("loading asynchat from " + p) | ||||
|     sys.path.append(p) | ||||
|     from pyftpdlib.ioloop import IOLoop | ||||
|     import asynchat | ||||
| except: | ||||
|     sys.path.append(os.path.join(E.mod, "vend")) | ||||
|  | ||||
| from pyftpdlib.authorizers import DummyAuthorizer, AuthenticationFailed | ||||
| from pyftpdlib.authorizers import AuthenticationFailed, DummyAuthorizer | ||||
| from pyftpdlib.filesystems import AbstractedFS, FilesystemError | ||||
| from pyftpdlib.handlers import FTPHandler | ||||
| from pyftpdlib.ioloop import IOLoop | ||||
| from pyftpdlib.servers import FTPServer | ||||
| from pyftpdlib.log import config_logging | ||||
|  | ||||
| from .authsrv import VFS | ||||
| from .bos import bos | ||||
| from .util import ( | ||||
|     Daemon, | ||||
|     Pebkac, | ||||
|     exclude_dotfiles, | ||||
|     fsenc, | ||||
|     ipnorm, | ||||
|     pybin, | ||||
|     relchk, | ||||
|     runhook, | ||||
|     sanitize_fn, | ||||
|     vjoin, | ||||
| ) | ||||
|  | ||||
| if TYPE_CHECKING: | ||||
|     from .svchub import SvcHub | ||||
|  | ||||
| if True:  # pylint: disable=using-constant-test | ||||
|     import typing | ||||
|     from typing import Any, Optional | ||||
|  | ||||
|  | ||||
| try: | ||||
|     from typing import TYPE_CHECKING | ||||
|  | ||||
|     if TYPE_CHECKING: | ||||
|         from .svchub import SvcHub | ||||
| except ImportError: | ||||
|     pass | ||||
| class FSE(FilesystemError): | ||||
|     def __init__(self, msg: str, severity: int = 0) -> None: | ||||
|         super(FilesystemError, self).__init__(msg) | ||||
|         self.severity = severity | ||||
|  | ||||
|  | ||||
| class FtpAuth(DummyAuthorizer): | ||||
|     def __init__(self): | ||||
|     def __init__(self, hub: "SvcHub") -> None: | ||||
|         super(FtpAuth, self).__init__() | ||||
|         self.hub = None  # type: SvcHub | ||||
|         self.hub = hub | ||||
|  | ||||
|     def validate_authentication( | ||||
|         self, username: str, password: str, handler: Any | ||||
|     ) -> None: | ||||
|         handler.username = "{}:{}".format(username, password) | ||||
|         handler.uname = "*" | ||||
|  | ||||
|         ip = handler.addr[0] | ||||
|         if ip.startswith("::ffff:"): | ||||
|             ip = ip[7:] | ||||
|  | ||||
|         ip = ipnorm(ip) | ||||
|         bans = self.hub.bans | ||||
|         if ip in bans: | ||||
|             rt = bans[ip] - time.time() | ||||
|             if rt < 0: | ||||
|                 logging.info("client unbanned") | ||||
|                 del bans[ip] | ||||
|             else: | ||||
|                 raise AuthenticationFailed("banned") | ||||
|  | ||||
|     def validate_authentication(self, username, password, handler): | ||||
|         asrv = self.hub.asrv | ||||
|         if username == "anonymous": | ||||
|             password = "" | ||||
|  | ||||
|         uname = "*" | ||||
|         if password: | ||||
|             uname = asrv.iacct.get(password, None) | ||||
|         if username != "anonymous": | ||||
|             for zs in (password, username): | ||||
|                 zs = asrv.iacct.get(asrv.ah.hash(zs), "") | ||||
|                 if zs: | ||||
|                     uname = zs | ||||
|                     break | ||||
|  | ||||
|         handler.username = uname | ||||
|         if not uname or not (asrv.vfs.aread.get(uname) or asrv.vfs.awrite.get(uname)): | ||||
|             g = self.hub.gpwd | ||||
|             if g.lim: | ||||
|                 bonk, ip = g.bonk(ip, handler.username) | ||||
|                 if bonk: | ||||
|                     logging.warning("client banned: invalid passwords") | ||||
|                     bans[ip] = bonk | ||||
|  | ||||
|         if password and not uname: | ||||
|             raise AuthenticationFailed("Authentication failed.") | ||||
|  | ||||
|     def get_home_dir(self, username): | ||||
|         handler.uname = handler.username = uname | ||||
|  | ||||
|     def get_home_dir(self, username: str) -> str: | ||||
|         return "/" | ||||
|  | ||||
|     def has_user(self, username): | ||||
|     def has_user(self, username: str) -> bool: | ||||
|         asrv = self.hub.asrv | ||||
|         return username in asrv.acct | ||||
|         return username in asrv.acct or username in asrv.iacct | ||||
|  | ||||
|     def has_perm(self, username, perm, path=None): | ||||
|     def has_perm(self, username: str, perm: int, path: Optional[str] = None) -> bool: | ||||
|         return True  # handled at filesystem layer | ||||
|  | ||||
|     def get_perms(self, username): | ||||
|     def get_perms(self, username: str) -> str: | ||||
|         return "elradfmwMT" | ||||
|  | ||||
|     def get_msg_login(self, username): | ||||
|     def get_msg_login(self, username: str) -> str: | ||||
|         return "sup {}".format(username) | ||||
|  | ||||
|     def get_msg_quit(self, username): | ||||
|     def get_msg_quit(self, username: str) -> str: | ||||
|         return "cya" | ||||
|  | ||||
|  | ||||
| class FtpFs(AbstractedFS): | ||||
|     def __init__(self, root, cmd_channel): | ||||
|         self.h = self.cmd_channel = cmd_channel  # type: FTPHandler | ||||
|         self.hub = cmd_channel.hub  # type: SvcHub | ||||
|     def __init__( | ||||
|         self, root: str, cmd_channel: Any | ||||
|     ) -> None:  # pylint: disable=super-init-not-called | ||||
|         self.h = cmd_channel  # type: FTPHandler | ||||
|         self.cmd_channel = cmd_channel  # type: FTPHandler | ||||
|         self.hub: "SvcHub" = cmd_channel.hub | ||||
|         self.args = cmd_channel.args | ||||
|  | ||||
|         self.uname = self.hub.asrv.iacct.get(cmd_channel.password, "*") | ||||
|         self.uname = cmd_channel.uname | ||||
|  | ||||
|         self.cwd = "/"  # pyftpdlib convention of leading slash | ||||
|         self.root = "/var/lib/empty" | ||||
|  | ||||
|         self.can_read = self.can_write = self.can_move = False | ||||
|         self.can_delete = self.can_get = self.can_upget = False | ||||
|         self.can_admin = False | ||||
|  | ||||
|         self.listdirinfo = self.listdir | ||||
|         self.chdir(".") | ||||
|  | ||||
|     def v2a(self, vpath, r=False, w=False, m=False, d=False): | ||||
|     def v2a( | ||||
|         self, | ||||
|         vpath: str, | ||||
|         r: bool = False, | ||||
|         w: bool = False, | ||||
|         m: bool = False, | ||||
|         d: bool = False, | ||||
|     ) -> tuple[str, VFS, str]: | ||||
|         try: | ||||
|             vpath = vpath.replace("\\", "/").lstrip("/") | ||||
|             vpath = vpath.replace("\\", "/").strip("/") | ||||
|             rd, fn = os.path.split(vpath) | ||||
|             if ANYWIN and relchk(rd): | ||||
|                 logging.warning("malicious vpath: %s", vpath) | ||||
|                 t = "Unsupported characters in [{}]" | ||||
|                 raise FSE(t.format(vpath), 1) | ||||
|  | ||||
|             fn = sanitize_fn(fn or "", "", [".prologue.html", ".epilogue.html"]) | ||||
|             vpath = vjoin(rd, fn) | ||||
|             vfs, rem = self.hub.asrv.vfs.get(vpath, self.uname, r, w, m, d) | ||||
|             if not vfs.realpath: | ||||
|                 raise FilesystemError("no filesystem mounted at this path") | ||||
|                 t = "No filesystem mounted at [{}]" | ||||
|                 raise FSE(t.format(vpath)) | ||||
|  | ||||
|             return os.path.join(vfs.realpath, rem) | ||||
|             if "xdev" in vfs.flags or "xvol" in vfs.flags: | ||||
|                 ap = vfs.canonical(rem) | ||||
|                 avfs = vfs.chk_ap(ap) | ||||
|                 t = "Permission denied in [{}]" | ||||
|                 if not avfs: | ||||
|                     raise FSE(t.format(vpath), 1) | ||||
|  | ||||
|                 cr, cw, cm, cd, _, _, _ = avfs.can_access("", self.h.uname) | ||||
|                 if r and not cr or w and not cw or m and not cm or d and not cd: | ||||
|                     raise FSE(t.format(vpath), 1) | ||||
|  | ||||
|             return os.path.join(vfs.realpath, rem), vfs, rem | ||||
|         except Pebkac as ex: | ||||
|             raise FilesystemError(str(ex)) | ||||
|             raise FSE(str(ex)) | ||||
|  | ||||
|     def rv2a(self, vpath, r=False, w=False, m=False, d=False): | ||||
|     def rv2a( | ||||
|         self, | ||||
|         vpath: str, | ||||
|         r: bool = False, | ||||
|         w: bool = False, | ||||
|         m: bool = False, | ||||
|         d: bool = False, | ||||
|     ) -> tuple[str, VFS, str]: | ||||
|         return self.v2a(os.path.join(self.cwd, vpath), r, w, m, d) | ||||
|  | ||||
|     def ftp2fs(self, ftppath): | ||||
|     def ftp2fs(self, ftppath: str) -> str: | ||||
|         # return self.v2a(ftppath) | ||||
|         return ftppath  # self.cwd must be vpath | ||||
|  | ||||
|     def fs2ftp(self, fspath): | ||||
|     def fs2ftp(self, fspath: str) -> str: | ||||
|         # raise NotImplementedError() | ||||
|         return fspath | ||||
|  | ||||
|     def validpath(self, path): | ||||
|     def validpath(self, path: str) -> bool: | ||||
|         if "/.hist/" in path: | ||||
|             if "/up2k." in path or path.endswith("/dir.txt"): | ||||
|                 raise FilesystemError("access to this file is forbidden") | ||||
|                 raise FSE("Access to this file is forbidden", 1) | ||||
|  | ||||
|         return True | ||||
|  | ||||
|     def open(self, filename, mode): | ||||
|     def open(self, filename: str, mode: str) -> typing.IO[Any]: | ||||
|         r = "r" in mode | ||||
|         w = "w" in mode or "a" in mode or "+" in mode | ||||
|  | ||||
|         ap = self.rv2a(filename, r, w) | ||||
|         if w and bos.path.exists(ap): | ||||
|             raise FilesystemError("cannot open existing file for writing") | ||||
|         ap = self.rv2a(filename, r, w)[0] | ||||
|         if w: | ||||
|             try: | ||||
|                 st = bos.stat(ap) | ||||
|                 td = time.time() - st.st_mtime | ||||
|             except: | ||||
|                 td = 0 | ||||
|  | ||||
|             if td < -1 or td > self.args.ftp_wt: | ||||
|                 raise FSE("Cannot open existing file for writing") | ||||
|  | ||||
|         self.validpath(ap) | ||||
|         return open(fsenc(ap), mode) | ||||
|  | ||||
|     def chdir(self, path): | ||||
|         self.cwd = join(self.cwd, path) | ||||
|         x = self.hub.asrv.vfs.can_access(self.cwd.lstrip("/"), self.h.username) | ||||
|         self.can_read, self.can_write, self.can_move, self.can_delete, self.can_get = x | ||||
|  | ||||
|     def mkdir(self, path): | ||||
|         ap = self.rv2a(path, w=True) | ||||
|         bos.mkdir(ap) | ||||
|  | ||||
|     def listdir(self, path): | ||||
|         vpath = join(self.cwd, path).lstrip("/") | ||||
|     def chdir(self, path: str) -> None: | ||||
|         nwd = join(self.cwd, path) | ||||
|         vfs, rem = self.hub.asrv.vfs.get(nwd, self.uname, False, False) | ||||
|         ap = vfs.canonical(rem) | ||||
|         try: | ||||
|             vfs, rem = self.hub.asrv.vfs.get(vpath, self.uname, True, False) | ||||
|             st = bos.stat(ap) | ||||
|             if not stat.S_ISDIR(st.st_mode): | ||||
|                 raise Exception() | ||||
|         except: | ||||
|             # returning 550 is library-default and suitable | ||||
|             raise FSE("No such file or directory") | ||||
|  | ||||
|             fsroot, vfs_ls, vfs_virt = vfs.ls( | ||||
|                 rem, self.uname, not self.args.no_scandir, [[True], [False, True]] | ||||
|         avfs = vfs.chk_ap(ap, st) | ||||
|         if not avfs: | ||||
|             raise FSE("Permission denied", 1) | ||||
|  | ||||
|         self.cwd = nwd | ||||
|         ( | ||||
|             self.can_read, | ||||
|             self.can_write, | ||||
|             self.can_move, | ||||
|             self.can_delete, | ||||
|             self.can_get, | ||||
|             self.can_upget, | ||||
|             self.can_admin, | ||||
|         ) = avfs.can_access("", self.h.uname) | ||||
|  | ||||
|     def mkdir(self, path: str) -> None: | ||||
|         ap = self.rv2a(path, w=True)[0] | ||||
|         bos.makedirs(ap)  # filezilla expects this | ||||
|  | ||||
|     def listdir(self, path: str) -> list[str]: | ||||
|         vpath = join(self.cwd, path) | ||||
|         try: | ||||
|             ap, vfs, rem = self.v2a(vpath, True, False) | ||||
|             if not bos.path.isdir(ap): | ||||
|                 raise FSE("No such file or directory", 1) | ||||
|  | ||||
|             fsroot, vfs_ls1, vfs_virt = vfs.ls( | ||||
|                 rem, | ||||
|                 self.uname, | ||||
|                 not self.args.no_scandir, | ||||
|                 [[True, False], [False, True]], | ||||
|             ) | ||||
|             vfs_ls = [x[0] for x in vfs_ls] | ||||
|             vfs_ls = [x[0] for x in vfs_ls1] | ||||
|             vfs_ls.extend(vfs_virt.keys()) | ||||
|  | ||||
|             if not self.args.ed: | ||||
| @@ -155,7 +271,11 @@ class FtpFs(AbstractedFS): | ||||
|             vfs_ls.sort() | ||||
|             return vfs_ls | ||||
|         except Exception as ex: | ||||
|             if vpath: | ||||
|             # panic on malicious names | ||||
|             if getattr(ex, "severity", 0): | ||||
|                 raise | ||||
|  | ||||
|             if vpath.strip("/"): | ||||
|                 # display write-only folders as empty | ||||
|                 return [] | ||||
|  | ||||
| @@ -163,138 +283,187 @@ class FtpFs(AbstractedFS): | ||||
|             r = {x.split("/")[0]: 1 for x in self.hub.asrv.vfs.all_vols.keys()} | ||||
|             return list(sorted(list(r.keys()))) | ||||
|  | ||||
|     def rmdir(self, path): | ||||
|         ap = self.rv2a(path, d=True) | ||||
|         bos.rmdir(ap) | ||||
|     def rmdir(self, path: str) -> None: | ||||
|         ap = self.rv2a(path, d=True)[0] | ||||
|         try: | ||||
|             bos.rmdir(ap) | ||||
|         except OSError as e: | ||||
|             if e.errno != errno.ENOENT: | ||||
|                 raise | ||||
|  | ||||
|     def remove(self, path): | ||||
|     def remove(self, path: str) -> None: | ||||
|         if self.args.no_del: | ||||
|             raise FilesystemError("the delete feature is disabled in server config") | ||||
|             raise FSE("The delete feature is disabled in server config") | ||||
|  | ||||
|         vp = join(self.cwd, path).lstrip("/") | ||||
|         x = self.hub.broker.put( | ||||
|             True, "up2k.handle_rm", self.uname, self.h.remote_ip, [vp] | ||||
|         ) | ||||
|  | ||||
|         try: | ||||
|             x.get() | ||||
|             self.hub.up2k.handle_rm(self.uname, self.h.cli_ip, [vp], [], False) | ||||
|         except Exception as ex: | ||||
|             raise FilesystemError(str(ex)) | ||||
|             raise FSE(str(ex)) | ||||
|  | ||||
|     def rename(self, src, dst): | ||||
|     def rename(self, src: str, dst: str) -> None: | ||||
|         if not self.can_move: | ||||
|             raise FilesystemError("not allowed for user " + self.h.username) | ||||
|             raise FSE("Not allowed for user " + self.h.uname) | ||||
|  | ||||
|         if self.args.no_mv: | ||||
|             m = "the rename/move feature is disabled in server config" | ||||
|             raise FilesystemError(m) | ||||
|             raise FSE("The rename/move feature is disabled in server config") | ||||
|  | ||||
|         svp = join(self.cwd, src).lstrip("/") | ||||
|         dvp = join(self.cwd, dst).lstrip("/") | ||||
|         x = self.hub.broker.put(True, "up2k.handle_mv", self.uname, svp, dvp) | ||||
|         try: | ||||
|             x.get() | ||||
|             self.hub.up2k.handle_mv(self.uname, svp, dvp) | ||||
|         except Exception as ex: | ||||
|             raise FilesystemError(str(ex)) | ||||
|             raise FSE(str(ex)) | ||||
|  | ||||
|     def chmod(self, path, mode): | ||||
|     def chmod(self, path: str, mode: str) -> None: | ||||
|         pass | ||||
|  | ||||
|     def stat(self, path): | ||||
|     def stat(self, path: str) -> os.stat_result: | ||||
|         try: | ||||
|             ap = self.rv2a(path, r=True) | ||||
|             ap = self.rv2a(path, r=True)[0] | ||||
|             return bos.stat(ap) | ||||
|         except: | ||||
|             ap = self.rv2a(path) | ||||
|         except FSE as ex: | ||||
|             if ex.severity: | ||||
|                 raise | ||||
|  | ||||
|             ap = self.rv2a(path)[0] | ||||
|             st = bos.stat(ap) | ||||
|             if not stat.S_ISDIR(st.st_mode): | ||||
|                 raise | ||||
|  | ||||
|             return st | ||||
|  | ||||
|     def utime(self, path, timeval): | ||||
|         ap = self.rv2a(path, w=True) | ||||
|     def utime(self, path: str, timeval: float) -> None: | ||||
|         ap = self.rv2a(path, w=True)[0] | ||||
|         return bos.utime(ap, (timeval, timeval)) | ||||
|  | ||||
|     def lstat(self, path): | ||||
|         ap = self.rv2a(path) | ||||
|         return bos.lstat(ap) | ||||
|     def lstat(self, path: str) -> os.stat_result: | ||||
|         ap = self.rv2a(path)[0] | ||||
|         return bos.stat(ap) | ||||
|  | ||||
|     def isfile(self, path): | ||||
|         st = self.stat(path) | ||||
|         return stat.S_ISREG(st.st_mode) | ||||
|     def isfile(self, path: str) -> bool: | ||||
|         try: | ||||
|             st = self.stat(path) | ||||
|             return stat.S_ISREG(st.st_mode) | ||||
|         except Exception as ex: | ||||
|             if getattr(ex, "severity", 0): | ||||
|                 raise | ||||
|  | ||||
|     def islink(self, path): | ||||
|         ap = self.rv2a(path) | ||||
|             return False  # expected for mojibake in ftp_SIZE() | ||||
|  | ||||
|     def islink(self, path: str) -> bool: | ||||
|         ap = self.rv2a(path)[0] | ||||
|         return bos.path.islink(ap) | ||||
|  | ||||
|     def isdir(self, path): | ||||
|     def isdir(self, path: str) -> bool: | ||||
|         try: | ||||
|             st = self.stat(path) | ||||
|             return stat.S_ISDIR(st.st_mode) | ||||
|         except: | ||||
|         except Exception as ex: | ||||
|             if getattr(ex, "severity", 0): | ||||
|                 raise | ||||
|  | ||||
|             return True | ||||
|  | ||||
|     def getsize(self, path): | ||||
|         ap = self.rv2a(path) | ||||
|     def getsize(self, path: str) -> int: | ||||
|         ap = self.rv2a(path)[0] | ||||
|         return bos.path.getsize(ap) | ||||
|  | ||||
|     def getmtime(self, path): | ||||
|         ap = self.rv2a(path) | ||||
|     def getmtime(self, path: str) -> float: | ||||
|         ap = self.rv2a(path)[0] | ||||
|         return bos.path.getmtime(ap) | ||||
|  | ||||
|     def realpath(self, path): | ||||
|     def realpath(self, path: str) -> str: | ||||
|         return path | ||||
|  | ||||
|     def lexists(self, path): | ||||
|         ap = self.rv2a(path) | ||||
|     def lexists(self, path: str) -> bool: | ||||
|         ap = self.rv2a(path)[0] | ||||
|         return bos.path.lexists(ap) | ||||
|  | ||||
|     def get_user_by_uid(self, uid): | ||||
|     def get_user_by_uid(self, uid: int) -> str: | ||||
|         return "root" | ||||
|  | ||||
|     def get_group_by_uid(self, gid): | ||||
|     def get_group_by_uid(self, gid: int) -> str: | ||||
|         return "root" | ||||
|  | ||||
|  | ||||
| class FtpHandler(FTPHandler): | ||||
|     abstracted_fs = FtpFs | ||||
|     hub: "SvcHub" | ||||
|     args: argparse.Namespace | ||||
|     uname: str | ||||
|  | ||||
|     def __init__(self, conn: Any, server: Any, ioloop: Any = None) -> None: | ||||
|         self.hub: "SvcHub" = FtpHandler.hub | ||||
|         self.args: argparse.Namespace = FtpHandler.args | ||||
|         self.uname = "*" | ||||
|  | ||||
|     def __init__(self, conn, server, ioloop=None): | ||||
|         if PY2: | ||||
|             FTPHandler.__init__(self, conn, server, ioloop) | ||||
|         else: | ||||
|             super(FtpHandler, self).__init__(conn, server, ioloop) | ||||
|  | ||||
|         # abspath->vpath mapping to resolve log_transfer paths | ||||
|         self.vfs_map = {} | ||||
|         cip = self.remote_ip | ||||
|         self.cli_ip = cip[7:] if cip.startswith("::ffff:") else cip | ||||
|  | ||||
|     def ftp_STOR(self, file, mode="w"): | ||||
|         # abspath->vpath mapping to resolve log_transfer paths | ||||
|         self.vfs_map: dict[str, str] = {} | ||||
|  | ||||
|         # reduce non-debug logging | ||||
|         self.log_cmds_list = [x for x in self.log_cmds_list if x not in ("CWD", "XCWD")] | ||||
|  | ||||
|     def ftp_STOR(self, file: str, mode: str = "w") -> Any: | ||||
|         # Optional[str] | ||||
|         vp = join(self.fs.cwd, file).lstrip("/") | ||||
|         ap = self.fs.v2a(vp) | ||||
|         ap, vfs, rem = self.fs.v2a(vp, w=True) | ||||
|         self.vfs_map[ap] = vp | ||||
|         xbu = vfs.flags.get("xbu") | ||||
|         if xbu and not runhook( | ||||
|             None, | ||||
|             xbu, | ||||
|             ap, | ||||
|             vfs.canonical(rem), | ||||
|             "", | ||||
|             self.uname, | ||||
|             0, | ||||
|             0, | ||||
|             self.cli_ip, | ||||
|             0, | ||||
|             "", | ||||
|         ): | ||||
|             raise FSE("Upload blocked by xbu server config") | ||||
|  | ||||
|         # print("ftp_STOR: {} {} => {}".format(vp, mode, ap)) | ||||
|         ret = FTPHandler.ftp_STOR(self, file, mode) | ||||
|         # print("ftp_STOR: {} {} OK".format(vp, mode)) | ||||
|         return ret | ||||
|  | ||||
|     def log_transfer(self, cmd, filename, receive, completed, elapsed, bytes): | ||||
|     def log_transfer( | ||||
|         self, | ||||
|         cmd: str, | ||||
|         filename: bytes, | ||||
|         receive: bool, | ||||
|         completed: bool, | ||||
|         elapsed: float, | ||||
|         bytes: int, | ||||
|     ) -> Any: | ||||
|         # None | ||||
|         ap = filename.decode("utf-8", "replace") | ||||
|         vp = self.vfs_map.pop(ap, None) | ||||
|         # print("xfer_end: {} => {}".format(ap, vp)) | ||||
|         if vp: | ||||
|             vp, fn = os.path.split(vp) | ||||
|             vfs, rem = self.hub.asrv.vfs.get(vp, self.username, False, True) | ||||
|             vfs, rem = self.hub.asrv.vfs.get(vp, self.uname, False, True) | ||||
|             vfs, rem = vfs.get_dbv(rem) | ||||
|             self.hub.broker.put( | ||||
|                 False, | ||||
|                 "up2k.hash_file", | ||||
|             self.hub.up2k.hash_file( | ||||
|                 vfs.realpath, | ||||
|                 vfs.vpath, | ||||
|                 vfs.flags, | ||||
|                 rem, | ||||
|                 fn, | ||||
|                 self.remote_ip, | ||||
|                 self.cli_ip, | ||||
|                 time.time(), | ||||
|                 self.uname, | ||||
|             ) | ||||
|  | ||||
|         return FTPHandler.log_transfer( | ||||
| @@ -313,7 +482,7 @@ except: | ||||
|  | ||||
|  | ||||
| class Ftpd(object): | ||||
|     def __init__(self, hub): | ||||
|     def __init__(self, hub: "SvcHub") -> None: | ||||
|         self.hub = hub | ||||
|         self.args = hub.args | ||||
|  | ||||
| @@ -322,24 +491,23 @@ class Ftpd(object): | ||||
|             hs.append([FtpHandler, self.args.ftp]) | ||||
|         if self.args.ftps: | ||||
|             try: | ||||
|                 h = SftpHandler | ||||
|                 h1 = SftpHandler | ||||
|             except: | ||||
|                 m = "\nftps requires pyopenssl;\nplease run the following:\n\n  {} -m pip install --user pyopenssl\n" | ||||
|                 print(m.format(sys.executable)) | ||||
|                 t = "\nftps requires pyopenssl;\nplease run the following:\n\n  {} -m pip install --user pyopenssl\n" | ||||
|                 print(t.format(pybin)) | ||||
|                 sys.exit(1) | ||||
|  | ||||
|             h.certfile = os.path.join(E.cfg, "cert.pem") | ||||
|             h.tls_control_required = True | ||||
|             h.tls_data_required = True | ||||
|             h1.certfile = self.args.cert | ||||
|             h1.tls_control_required = True | ||||
|             h1.tls_data_required = True | ||||
|  | ||||
|             hs.append([h, self.args.ftps]) | ||||
|             hs.append([h1, self.args.ftps]) | ||||
|  | ||||
|         for h in hs: | ||||
|             h, lp = h | ||||
|             h.hub = hub | ||||
|             h.args = hub.args | ||||
|             h.authorizer = FtpAuth() | ||||
|             h.authorizer.hub = hub | ||||
|         for h_lp in hs: | ||||
|             h2, lp = h_lp | ||||
|             FtpHandler.hub = h2.hub = hub | ||||
|             FtpHandler.args = h2.args = hub.args | ||||
|             FtpHandler.authorizer = h2.authorizer = FtpAuth(hub) | ||||
|  | ||||
|             if self.args.ftp_pr: | ||||
|                 p1, p2 = [int(x) for x in self.args.ftp_pr.split("-")] | ||||
| @@ -351,24 +519,33 @@ class Ftpd(object): | ||||
|                     else: | ||||
|                         p1 += d + 1 | ||||
|  | ||||
|                 h.passive_ports = list(range(p1, p2 + 1)) | ||||
|                 h2.passive_ports = list(range(p1, p2 + 1)) | ||||
|  | ||||
|             if self.args.ftp_nat: | ||||
|                 h.masquerade_address = self.args.ftp_nat | ||||
|                 h2.masquerade_address = self.args.ftp_nat | ||||
|  | ||||
|         if self.args.ftp_dbg: | ||||
|             config_logging(level=logging.DEBUG) | ||||
|         lgr = logging.getLogger("pyftpdlib") | ||||
|         lgr.setLevel(logging.DEBUG if self.args.ftpv else logging.INFO) | ||||
|  | ||||
|         ips = self.args.i | ||||
|         if "::" in ips: | ||||
|             ips.append("0.0.0.0") | ||||
|  | ||||
|         if self.args.ftp4: | ||||
|             ips = [x for x in ips if ":" not in x] | ||||
|  | ||||
|         ioloop = IOLoop() | ||||
|         for ip in self.args.i: | ||||
|         for ip in ips: | ||||
|             for h, lp in hs: | ||||
|                 FTPServer((ip, int(lp)), h, ioloop) | ||||
|                 try: | ||||
|                     FTPServer((ip, int(lp)), h, ioloop) | ||||
|                 except: | ||||
|                     if ip != "0.0.0.0" or "::" not in ips: | ||||
|                         raise | ||||
|  | ||||
|         t = threading.Thread(target=ioloop.loop) | ||||
|         t.daemon = True | ||||
|         t.start() | ||||
|         Daemon(ioloop.loop, "ftp") | ||||
|  | ||||
|  | ||||
| def join(p1, p2): | ||||
| def join(p1: str, p2: str) -> str: | ||||
|     w = os.path.join(p1, p2.replace("\\", "/")) | ||||
|     return os.path.normpath(w).replace("\\", "/") | ||||
|   | ||||
							
								
								
									
										2892
									
								
								copyparty/httpcli.py
									
									
									
									
									
								
							
							
						
						
									
										2892
									
								
								copyparty/httpcli.py
									
									
									
									
									
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							| @@ -1,24 +1,38 @@ | ||||
| # coding: utf-8 | ||||
| from __future__ import print_function, unicode_literals | ||||
|  | ||||
| import re | ||||
| import argparse  # typechk | ||||
| import os | ||||
| import time | ||||
| import re | ||||
| import socket | ||||
| import threading  # typechk | ||||
| import time | ||||
|  | ||||
| HAVE_SSL = True | ||||
| try: | ||||
|     HAVE_SSL = True | ||||
|     import ssl | ||||
| except: | ||||
|     HAVE_SSL = False | ||||
|  | ||||
| from .__init__ import E | ||||
| from .util import Unrecv | ||||
| from . import util as Util | ||||
| from .__init__ import TYPE_CHECKING, EnvParams | ||||
| from .authsrv import AuthSrv  # typechk | ||||
| from .httpcli import HttpCli | ||||
| from .u2idx import U2idx | ||||
| from .ico import Ico | ||||
| from .mtag import HAVE_FFMPEG | ||||
| from .th_cli import ThumbCli | ||||
| from .th_srv import HAVE_PIL, HAVE_VIPS | ||||
| from .ico import Ico | ||||
| from .u2idx import U2idx | ||||
| from .util import HMaccas, shut_socket | ||||
|  | ||||
| if True:  # pylint: disable=using-constant-test | ||||
|     from typing import Optional, Pattern, Union | ||||
|  | ||||
| if TYPE_CHECKING: | ||||
|     from .httpsrv import HttpSrv | ||||
|  | ||||
|  | ||||
| PTN_HTTP = re.compile(br"[A-Z]{3}[A-Z ]") | ||||
|  | ||||
|  | ||||
| class HttpConn(object): | ||||
| @@ -27,39 +41,49 @@ class HttpConn(object): | ||||
|     creates an HttpCli for each request (Connection: Keep-Alive) | ||||
|     """ | ||||
|  | ||||
|     def __init__(self, sck, addr, hsrv): | ||||
|     def __init__( | ||||
|         self, sck: socket.socket, addr: tuple[str, int], hsrv: "HttpSrv" | ||||
|     ) -> None: | ||||
|         self.s = sck | ||||
|         self.sr: Optional[Util._Unrecv] = None | ||||
|         self.cli: Optional[HttpCli] = None | ||||
|         self.addr = addr | ||||
|         self.hsrv = hsrv | ||||
|  | ||||
|         self.mutex = hsrv.mutex | ||||
|         self.args = hsrv.args | ||||
|         self.asrv = hsrv.asrv | ||||
|         self.cert_path = hsrv.cert_path | ||||
|         self.u2fh = hsrv.u2fh | ||||
|         self.mutex: threading.Lock = hsrv.mutex  # mypy404 | ||||
|         self.args: argparse.Namespace = hsrv.args  # mypy404 | ||||
|         self.E: EnvParams = self.args.E | ||||
|         self.asrv: AuthSrv = hsrv.asrv  # mypy404 | ||||
|         self.u2fh: Util.FHC = hsrv.u2fh  # mypy404 | ||||
|         self.iphash: HMaccas = hsrv.broker.iphash | ||||
|         self.bans: dict[str, int] = hsrv.bans | ||||
|         self.aclose: dict[str, int] = hsrv.aclose | ||||
|  | ||||
|         enth = (HAVE_PIL or HAVE_VIPS) and not self.args.no_thumb | ||||
|         self.thumbcli = ThumbCli(hsrv) if enth else None | ||||
|         self.ico = Ico(self.args) | ||||
|         enth = (HAVE_PIL or HAVE_VIPS or HAVE_FFMPEG) and not self.args.no_thumb | ||||
|         self.thumbcli: Optional[ThumbCli] = ThumbCli(hsrv) if enth else None  # mypy404 | ||||
|         self.ico: Ico = Ico(self.args)  # mypy404 | ||||
|  | ||||
|         self.t0 = time.time() | ||||
|         self.t0: float = time.time()  # mypy404 | ||||
|         self.freshen_pwd: float = 0.0 | ||||
|         self.stopping = False | ||||
|         self.nreq = 0 | ||||
|         self.nbyte = 0 | ||||
|         self.u2idx = None | ||||
|         self.log_func = hsrv.log | ||||
|         self.lf_url = re.compile(self.args.lf_url) if self.args.lf_url else None | ||||
|         self.nreq: int = -1  # mypy404 | ||||
|         self.nbyte: int = 0  # mypy404 | ||||
|         self.u2idx: Optional[U2idx] = None | ||||
|         self.log_func: "Util.RootLogger" = hsrv.log  # mypy404 | ||||
|         self.log_src: str = "httpconn"  # mypy404 | ||||
|         self.lf_url: Optional[Pattern[str]] = ( | ||||
|             re.compile(self.args.lf_url) if self.args.lf_url else None | ||||
|         )  # mypy404 | ||||
|         self.set_rproxy() | ||||
|  | ||||
|     def shutdown(self): | ||||
|     def shutdown(self) -> None: | ||||
|         self.stopping = True | ||||
|         try: | ||||
|             self.s.shutdown(socket.SHUT_RDWR) | ||||
|             self.s.close() | ||||
|             shut_socket(self.log, self.s, 1) | ||||
|         except: | ||||
|             pass | ||||
|  | ||||
|     def set_rproxy(self, ip=None): | ||||
|     def set_rproxy(self, ip: Optional[str] = None) -> str: | ||||
|         if ip is None: | ||||
|             color = 36 | ||||
|             ip = self.addr[0] | ||||
| @@ -72,35 +96,38 @@ class HttpConn(object): | ||||
|         self.log_src = "{} \033[{}m{}".format(ip, color, self.addr[1]).ljust(26) | ||||
|         return self.log_src | ||||
|  | ||||
|     def respath(self, res_name): | ||||
|         return os.path.join(E.mod, "web", res_name) | ||||
|     def respath(self, res_name: str) -> str: | ||||
|         return os.path.join(self.E.mod, "web", res_name) | ||||
|  | ||||
|     def log(self, msg, c=0): | ||||
|     def log(self, msg: str, c: Union[int, str] = 0) -> None: | ||||
|         self.log_func(self.log_src, msg, c) | ||||
|  | ||||
|     def get_u2idx(self): | ||||
|     def get_u2idx(self) -> Optional[U2idx]: | ||||
|         # grab from a pool of u2idx instances; | ||||
|         # sqlite3 fully parallelizes under python threads | ||||
|         # but avoid running out of FDs by creating too many | ||||
|         if not self.u2idx: | ||||
|             self.u2idx = U2idx(self) | ||||
|             self.u2idx = self.hsrv.get_u2idx(str(self.addr)) | ||||
|  | ||||
|         return self.u2idx | ||||
|  | ||||
|     def _detect_https(self): | ||||
|     def _detect_https(self) -> bool: | ||||
|         method = None | ||||
|         if self.cert_path: | ||||
|         if True: | ||||
|             try: | ||||
|                 method = self.s.recv(4, socket.MSG_PEEK) | ||||
|             except socket.timeout: | ||||
|                 return | ||||
|                 return False | ||||
|             except AttributeError: | ||||
|                 # jython does not support msg_peek; forget about https | ||||
|                 method = self.s.recv(4) | ||||
|                 self.sr = Unrecv(self.s) | ||||
|                 self.sr = Util.Unrecv(self.s, self.log) | ||||
|                 self.sr.buf = method | ||||
|  | ||||
|                 # jython used to do this, they stopped since it's broken | ||||
|                 # but reimplementing sendall is out of scope for now | ||||
|                 if not getattr(self.s, "sendall", None): | ||||
|                     self.s.sendall = self.s.send | ||||
|                     self.s.sendall = self.s.send  # type: ignore | ||||
|  | ||||
|             if len(method) != 4: | ||||
|                 err = "need at least 4 bytes in the first packet; got {}".format( | ||||
| @@ -110,17 +137,20 @@ class HttpConn(object): | ||||
|                     self.log(err) | ||||
|  | ||||
|                 self.s.send(b"HTTP/1.1 400 Bad Request\r\n\r\n" + err.encode("utf-8")) | ||||
|                 return | ||||
|                 return False | ||||
|  | ||||
|         return method not in [None, b"GET ", b"HEAD", b"POST", b"PUT ", b"OPTI"] | ||||
|         return not method or not bool(PTN_HTTP.match(method)) | ||||
|  | ||||
|     def run(self) -> None: | ||||
|         self.s.settimeout(10) | ||||
|  | ||||
|     def run(self): | ||||
|         self.sr = None | ||||
|         if self.args.https_only: | ||||
|             is_https = True | ||||
|         elif self.args.http_only or not HAVE_SSL: | ||||
|         elif self.args.http_only: | ||||
|             is_https = False | ||||
|         else: | ||||
|             # raise Exception("asdf") | ||||
|             is_https = self._detect_https() | ||||
|  | ||||
|         if is_https: | ||||
| @@ -131,7 +161,7 @@ class HttpConn(object): | ||||
|             self.log_src = self.log_src.replace("[36m", "[35m") | ||||
|             try: | ||||
|                 ctx = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH) | ||||
|                 ctx.load_cert_chain(self.cert_path) | ||||
|                 ctx.load_cert_chain(self.args.cert) | ||||
|                 if self.args.ssl_ver: | ||||
|                     ctx.options &= ~self.args.ssl_flags_en | ||||
|                     ctx.options |= self.args.ssl_flags_de | ||||
| @@ -149,14 +179,15 @@ class HttpConn(object): | ||||
|                 self.s = ctx.wrap_socket(self.s, server_side=True) | ||||
|                 msg = [ | ||||
|                     "\033[1;3{:d}m{}".format(c, s) | ||||
|                     for c, s in zip([0, 5, 0], self.s.cipher()) | ||||
|                     for c, s in zip([0, 5, 0], self.s.cipher())  # type: ignore | ||||
|                 ] | ||||
|                 self.log(" ".join(msg) + "\033[0m") | ||||
|  | ||||
|                 if self.args.ssl_dbg and hasattr(self.s, "shared_ciphers"): | ||||
|                     overlap = [y[::-1] for y in self.s.shared_ciphers()] | ||||
|                     lines = [str(x) for x in (["TLS cipher overlap:"] + overlap)] | ||||
|                     self.log("\n".join(lines)) | ||||
|                     ciphers = self.s.shared_ciphers() | ||||
|                     assert ciphers | ||||
|                     overlap = [str(y[::-1]) for y in ciphers] | ||||
|                     self.log("TLS cipher overlap:" + "\n".join(overlap)) | ||||
|                     for k, v in [ | ||||
|                         ["compression", self.s.compression()], | ||||
|                         ["ALPN proto", self.s.selected_alpn_protocol()], | ||||
| @@ -167,11 +198,7 @@ class HttpConn(object): | ||||
|             except Exception as ex: | ||||
|                 em = str(ex) | ||||
|  | ||||
|                 if "ALERT_BAD_CERTIFICATE" in em: | ||||
|                     # firefox-linux if there is no exception yet | ||||
|                     self.log("client rejected our certificate (nice)") | ||||
|  | ||||
|                 elif "ALERT_CERTIFICATE_UNKNOWN" in em: | ||||
|                 if "ALERT_CERTIFICATE_UNKNOWN" in em: | ||||
|                     # android-chrome keeps doing this | ||||
|                     pass | ||||
|  | ||||
| @@ -181,10 +208,14 @@ class HttpConn(object): | ||||
|                 return | ||||
|  | ||||
|         if not self.sr: | ||||
|             self.sr = Unrecv(self.s) | ||||
|             self.sr = Util.Unrecv(self.s, self.log) | ||||
|  | ||||
|         while not self.stopping: | ||||
|             self.nreq += 1 | ||||
|             cli = HttpCli(self) | ||||
|             if not cli.run(): | ||||
|             self.cli = HttpCli(self) | ||||
|             if not self.cli.run(): | ||||
|                 return | ||||
|  | ||||
|             if self.u2idx: | ||||
|                 self.hsrv.put_u2idx(str(self.addr), self.u2idx) | ||||
|                 self.u2idx = None | ||||
|   | ||||
| @@ -1,17 +1,30 @@ | ||||
| # coding: utf-8 | ||||
| from __future__ import print_function, unicode_literals | ||||
|  | ||||
| import os | ||||
| import sys | ||||
| import time | ||||
| import math | ||||
| import base64 | ||||
| import math | ||||
| import os | ||||
| import re | ||||
| import socket | ||||
| import sys | ||||
| import threading | ||||
| import time | ||||
|  | ||||
| import queue | ||||
|  | ||||
| from .__init__ import ANYWIN, CORES, EXE, MACOS, TYPE_CHECKING, EnvParams | ||||
|  | ||||
| try: | ||||
|     MNFE = ModuleNotFoundError | ||||
| except: | ||||
|     MNFE = ImportError | ||||
|  | ||||
| try: | ||||
|     import jinja2 | ||||
| except ImportError: | ||||
| except MNFE: | ||||
|     if EXE: | ||||
|         raise | ||||
|  | ||||
|     print( | ||||
|         """\033[1;31m | ||||
|   you do not have jinja2 installed,\033[33m | ||||
| @@ -21,20 +34,53 @@ except ImportError: | ||||
|    * (try another python version, if you have one) | ||||
|    * (try copyparty.sfx instead) | ||||
| """.format( | ||||
|             os.path.basename(sys.executable) | ||||
|             sys.executable | ||||
|         ) | ||||
|     ) | ||||
|     sys.exit(1) | ||||
| except SyntaxError: | ||||
|     if EXE: | ||||
|         raise | ||||
|  | ||||
|     print( | ||||
|         """\033[1;31m | ||||
|   your jinja2 version is incompatible with your python version;\033[33m | ||||
|   please try to replace it with an older version:\033[0m | ||||
|    * {} -m pip install --user jinja2==2.11.3 | ||||
|    * (try another python version, if you have one) | ||||
|    * (try copyparty.sfx instead) | ||||
| """.format( | ||||
|             sys.executable | ||||
|         ) | ||||
|     ) | ||||
|     sys.exit(1) | ||||
|  | ||||
| from .__init__ import E, PY2, MACOS | ||||
| from .util import FHC, spack, min_ex, start_stackmon, start_log_thrs | ||||
| from .bos import bos | ||||
| from .httpconn import HttpConn | ||||
| from .metrics import Metrics | ||||
| from .u2idx import U2idx | ||||
| from .util import ( | ||||
|     E_SCK, | ||||
|     FHC, | ||||
|     Daemon, | ||||
|     Garda, | ||||
|     Magician, | ||||
|     Netdev, | ||||
|     NetMap, | ||||
|     absreal, | ||||
|     ipnorm, | ||||
|     min_ex, | ||||
|     shut_socket, | ||||
|     spack, | ||||
|     start_log_thrs, | ||||
|     start_stackmon, | ||||
| ) | ||||
|  | ||||
| if PY2: | ||||
|     import Queue as queue | ||||
| else: | ||||
|     import queue | ||||
| if TYPE_CHECKING: | ||||
|     from .broker_util import BrokerCli | ||||
|     from .ssdp import SSDPr | ||||
|  | ||||
| if True:  # pylint: disable=using-constant-test | ||||
|     from typing import Any, Optional | ||||
|  | ||||
|  | ||||
| class HttpSrv(object): | ||||
| @@ -43,52 +89,76 @@ class HttpSrv(object): | ||||
|     relying on MpSrv for performance (HttpSrv is just plain threads) | ||||
|     """ | ||||
|  | ||||
|     def __init__(self, broker, nid): | ||||
|     def __init__(self, broker: "BrokerCli", nid: Optional[int]) -> None: | ||||
|         self.broker = broker | ||||
|         self.nid = nid | ||||
|         self.args = broker.args | ||||
|         self.E: EnvParams = self.args.E | ||||
|         self.log = broker.log | ||||
|         self.asrv = broker.asrv | ||||
|  | ||||
|         nsuf = "-n{}-i{:x}".format(nid, os.getpid()) if nid else "" | ||||
|         # redefine in case of multiprocessing | ||||
|         socket.setdefaulttimeout(120) | ||||
|  | ||||
|         self.t0 = time.time() | ||||
|         nsuf = "-n{}-i{:x}".format(nid, os.getpid()) if nid else "" | ||||
|         self.magician = Magician() | ||||
|         self.nm = NetMap([], {}) | ||||
|         self.ssdp: Optional["SSDPr"] = None | ||||
|         self.gpwd = Garda(self.args.ban_pw) | ||||
|         self.g404 = Garda(self.args.ban_404) | ||||
|         self.g403 = Garda(self.args.ban_403) | ||||
|         self.g422 = Garda(self.args.ban_422, False) | ||||
|         self.gurl = Garda(self.args.ban_url) | ||||
|         self.bans: dict[str, int] = {} | ||||
|         self.aclose: dict[str, int] = {} | ||||
|  | ||||
|         self.bound: set[tuple[str, int]] = set() | ||||
|         self.name = "hsrv" + nsuf | ||||
|         self.mutex = threading.Lock() | ||||
|         self.stopping = False | ||||
|  | ||||
|         self.tp_nthr = 0  # actual | ||||
|         self.tp_ncli = 0  # fading | ||||
|         self.tp_time = None  # latest worker collect | ||||
|         self.tp_q = None if self.args.no_htp else queue.LifoQueue() | ||||
|         self.t_periodic = None | ||||
|         self.tp_time = 0.0  # latest worker collect | ||||
|         self.tp_q: Optional[queue.LifoQueue[Any]] = ( | ||||
|             None if self.args.no_htp else queue.LifoQueue() | ||||
|         ) | ||||
|         self.t_periodic: Optional[threading.Thread] = None | ||||
|  | ||||
|         self.u2fh = FHC() | ||||
|         self.srvs = [] | ||||
|         self.metrics = Metrics(self) | ||||
|         self.srvs: list[socket.socket] = [] | ||||
|         self.ncli = 0  # exact | ||||
|         self.clients = {}  # laggy | ||||
|         self.clients: set[HttpConn] = set()  # laggy | ||||
|         self.nclimax = 0 | ||||
|         self.cb_ts = 0 | ||||
|         self.cb_v = 0 | ||||
|         self.cb_ts = 0.0 | ||||
|         self.cb_v = "" | ||||
|  | ||||
|         try: | ||||
|             x = self.broker.put(True, "thumbsrv.getcfg") | ||||
|             self.th_cfg = x.get() | ||||
|         except: | ||||
|             pass | ||||
|         self.u2idx_free: dict[str, U2idx] = {} | ||||
|         self.u2idx_n = 0 | ||||
|  | ||||
|         env = jinja2.Environment() | ||||
|         env.loader = jinja2.FileSystemLoader(os.path.join(E.mod, "web")) | ||||
|         self.j2 = { | ||||
|             x: env.get_template(x + ".html") | ||||
|             for x in ["splash", "browser", "browser2", "msg", "md", "mde"] | ||||
|         } | ||||
|         self.prism = os.path.exists(os.path.join(E.mod, "web", "deps", "prism.js.gz")) | ||||
|         env.loader = jinja2.FileSystemLoader(os.path.join(self.E.mod, "web")) | ||||
|         jn = ["splash", "svcs", "browser", "browser2", "msg", "md", "mde", "cf"] | ||||
|         self.j2 = {x: env.get_template(x + ".html") for x in jn} | ||||
|         zs = os.path.join(self.E.mod, "web", "deps", "prism.js.gz") | ||||
|         self.prism = os.path.exists(zs) | ||||
|  | ||||
|         cert_path = os.path.join(E.cfg, "cert.pem") | ||||
|         if bos.path.exists(cert_path): | ||||
|             self.cert_path = cert_path | ||||
|         else: | ||||
|             self.cert_path = None | ||||
|         self.statics: set[str] = set() | ||||
|         self._build_statics() | ||||
|  | ||||
|         self.ptn_cc = re.compile(r"[\x00-\x1f]") | ||||
|  | ||||
|         self.mallow = "GET HEAD POST PUT DELETE OPTIONS".split() | ||||
|         if not self.args.no_dav: | ||||
|             zs = "PROPFIND PROPPATCH LOCK UNLOCK MKCOL COPY MOVE" | ||||
|             self.mallow += zs.split() | ||||
|  | ||||
|         if self.args.zs: | ||||
|             from .ssdp import SSDPr | ||||
|  | ||||
|             self.ssdp = SSDPr(broker) | ||||
|  | ||||
|         if self.tp_q: | ||||
|             self.start_threads(4) | ||||
| @@ -100,28 +170,49 @@ class HttpSrv(object): | ||||
|             if self.args.log_thrs: | ||||
|                 start_log_thrs(self.log, self.args.log_thrs, nid) | ||||
|  | ||||
|     def start_threads(self, n): | ||||
|         self.th_cfg: dict[str, Any] = {} | ||||
|         Daemon(self.post_init, "hsrv-init2") | ||||
|  | ||||
|     def post_init(self) -> None: | ||||
|         try: | ||||
|             x = self.broker.ask("thumbsrv.getcfg") | ||||
|             self.th_cfg = x.get() | ||||
|         except: | ||||
|             pass | ||||
|  | ||||
|     def _build_statics(self) -> None: | ||||
|         for dp, _, df in os.walk(os.path.join(self.E.mod, "web")): | ||||
|             for fn in df: | ||||
|                 ap = absreal(os.path.join(dp, fn)) | ||||
|                 self.statics.add(ap) | ||||
|                 if ap.endswith(".gz") or ap.endswith(".br"): | ||||
|                     self.statics.add(ap[:-3]) | ||||
|  | ||||
|     def set_netdevs(self, netdevs: dict[str, Netdev]) -> None: | ||||
|         ips = set() | ||||
|         for ip, _ in self.bound: | ||||
|             ips.add(ip) | ||||
|  | ||||
|         self.nm = NetMap(list(ips), netdevs) | ||||
|  | ||||
|     def start_threads(self, n: int) -> None: | ||||
|         self.tp_nthr += n | ||||
|         if self.args.log_htp: | ||||
|             self.log(self.name, "workers += {} = {}".format(n, self.tp_nthr), 6) | ||||
|  | ||||
|         for _ in range(n): | ||||
|             thr = threading.Thread( | ||||
|                 target=self.thr_poolw, | ||||
|                 name=self.name + "-poolw", | ||||
|             ) | ||||
|             thr.daemon = True | ||||
|             thr.start() | ||||
|             Daemon(self.thr_poolw, self.name + "-poolw") | ||||
|  | ||||
|     def stop_threads(self, n): | ||||
|     def stop_threads(self, n: int) -> None: | ||||
|         self.tp_nthr -= n | ||||
|         if self.args.log_htp: | ||||
|             self.log(self.name, "workers -= {} = {}".format(n, self.tp_nthr), 6) | ||||
|  | ||||
|         assert self.tp_q | ||||
|         for _ in range(n): | ||||
|             self.tp_q.put(None) | ||||
|  | ||||
|     def periodic(self): | ||||
|     def periodic(self) -> None: | ||||
|         while True: | ||||
|             time.sleep(2 if self.tp_ncli or self.ncli else 10) | ||||
|             with self.mutex: | ||||
| @@ -135,65 +226,134 @@ class HttpSrv(object): | ||||
|                     self.t_periodic = None | ||||
|                     return | ||||
|  | ||||
|     def listen(self, sck, nlisteners): | ||||
|         ip, port = sck.getsockname() | ||||
|         self.srvs.append(sck) | ||||
|         self.nclimax = math.ceil(self.args.nc * 1.0 / nlisteners) | ||||
|         t = threading.Thread( | ||||
|             target=self.thr_listen, | ||||
|             args=(sck,), | ||||
|             name="httpsrv-n{}-listen-{}-{}".format(self.nid or "0", ip, port), | ||||
|         ) | ||||
|         t.daemon = True | ||||
|         t.start() | ||||
|     def listen(self, sck: socket.socket, nlisteners: int) -> None: | ||||
|         if self.args.j != 1: | ||||
|             # lost in the pickle; redefine | ||||
|             if not ANYWIN or self.args.reuseaddr: | ||||
|                 sck.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) | ||||
|  | ||||
|     def thr_listen(self, srv_sck): | ||||
|             sck.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) | ||||
|             sck.settimeout(None)  # < does not inherit, ^ opts above do | ||||
|  | ||||
|         ip, port = sck.getsockname()[:2] | ||||
|         self.srvs.append(sck) | ||||
|         self.bound.add((ip, port)) | ||||
|         self.nclimax = math.ceil(self.args.nc * 1.0 / nlisteners) | ||||
|         Daemon( | ||||
|             self.thr_listen, | ||||
|             "httpsrv-n{}-listen-{}-{}".format(self.nid or "0", ip, port), | ||||
|             (sck,), | ||||
|         ) | ||||
|  | ||||
|     def thr_listen(self, srv_sck: socket.socket) -> None: | ||||
|         """listens on a shared tcp server""" | ||||
|         ip, port = srv_sck.getsockname() | ||||
|         ip, port = srv_sck.getsockname()[:2] | ||||
|         fno = srv_sck.fileno() | ||||
|         msg = "subscribed @ {}:{}  f{}".format(ip, port, fno) | ||||
|         hip = "[{}]".format(ip) if ":" in ip else ip | ||||
|         msg = "subscribed @ {}:{}  f{} p{}".format(hip, port, fno, os.getpid()) | ||||
|         self.log(self.name, msg) | ||||
|  | ||||
|         def fun(): | ||||
|             self.broker.put(False, "cb_httpsrv_up") | ||||
|         def fun() -> None: | ||||
|             self.broker.say("cb_httpsrv_up") | ||||
|  | ||||
|         threading.Thread(target=fun).start() | ||||
|         threading.Thread(target=fun, name="sig-hsrv-up1").start() | ||||
|  | ||||
|         while not self.stopping: | ||||
|             if self.args.log_conn: | ||||
|                 self.log(self.name, "|%sC-ncli" % ("-" * 1,), c="1;30") | ||||
|                 self.log(self.name, "|%sC-ncli" % ("-" * 1,), c="90") | ||||
|  | ||||
|             if self.ncli >= self.nclimax: | ||||
|                 self.log(self.name, "at connection limit; waiting", 3) | ||||
|                 while self.ncli >= self.nclimax: | ||||
|                     time.sleep(0.1) | ||||
|             spins = 0 | ||||
|             while self.ncli >= self.nclimax: | ||||
|                 if not spins: | ||||
|                     self.log(self.name, "at connection limit; waiting", 3) | ||||
|  | ||||
|                 spins += 1 | ||||
|                 time.sleep(0.1) | ||||
|                 if spins != 50 or not self.args.aclose: | ||||
|                     continue | ||||
|  | ||||
|                 ipfreq: dict[str, int] = {} | ||||
|                 with self.mutex: | ||||
|                     for c in self.clients: | ||||
|                         ip = ipnorm(c.ip) | ||||
|                         try: | ||||
|                             ipfreq[ip] += 1 | ||||
|                         except: | ||||
|                             ipfreq[ip] = 1 | ||||
|  | ||||
|                 ip, n = sorted(ipfreq.items(), key=lambda x: x[1], reverse=True)[0] | ||||
|                 if n < self.nclimax / 2: | ||||
|                     continue | ||||
|  | ||||
|                 self.aclose[ip] = int(time.time() + self.args.aclose * 60) | ||||
|                 nclose = 0 | ||||
|                 nloris = 0 | ||||
|                 nconn = 0 | ||||
|                 with self.mutex: | ||||
|                     for c in self.clients: | ||||
|                         cip = ipnorm(c.ip) | ||||
|                         if ip != cip: | ||||
|                             continue | ||||
|  | ||||
|                         nconn += 1 | ||||
|                         try: | ||||
|                             if ( | ||||
|                                 c.nreq >= 1 | ||||
|                                 or not c.cli | ||||
|                                 or c.cli.in_hdr_recv | ||||
|                                 or c.cli.keepalive | ||||
|                             ): | ||||
|                                 Daemon(c.shutdown) | ||||
|                                 nclose += 1 | ||||
|                                 if c.nreq <= 0 and (not c.cli or c.cli.in_hdr_recv): | ||||
|                                     nloris += 1 | ||||
|                         except: | ||||
|                             pass | ||||
|  | ||||
|                 t = "{} downgraded to connection:close for {} min; dropped {}/{} connections" | ||||
|                 self.log(self.name, t.format(ip, self.args.aclose, nclose, nconn), 1) | ||||
|  | ||||
|                 if nloris < nconn / 2: | ||||
|                     continue | ||||
|  | ||||
|                 t = "slowloris (idle-conn): {} banned for {} min" | ||||
|                 self.log(self.name, t.format(ip, self.args.loris, nclose), 1) | ||||
|                 self.bans[ip] = int(time.time() + self.args.loris * 60) | ||||
|  | ||||
|             if self.args.log_conn: | ||||
|                 self.log(self.name, "|%sC-acc1" % ("-" * 2,), c="1;30") | ||||
|                 self.log(self.name, "|%sC-acc1" % ("-" * 2,), c="90") | ||||
|  | ||||
|             try: | ||||
|                 sck, addr = srv_sck.accept() | ||||
|                 sck, saddr = srv_sck.accept() | ||||
|                 cip, cport = saddr[:2] | ||||
|                 if cip.startswith("::ffff:"): | ||||
|                     cip = cip[7:] | ||||
|  | ||||
|                 addr = (cip, cport) | ||||
|             except (OSError, socket.error) as ex: | ||||
|                 if self.stopping: | ||||
|                     break | ||||
|  | ||||
|                 self.log(self.name, "accept({}): {}".format(fno, ex), c=6) | ||||
|                 time.sleep(0.02) | ||||
|                 continue | ||||
|  | ||||
|             if self.args.log_conn: | ||||
|                 m = "|{}C-acc2 \033[0;36m{} \033[3{}m{}".format( | ||||
|                 t = "|{}C-acc2 \033[0;36m{} \033[3{}m{}".format( | ||||
|                     "-" * 3, ip, port % 8, port | ||||
|                 ) | ||||
|                 self.log("%s %s" % addr, m, c="1;30") | ||||
|                 self.log("%s %s" % addr, t, c="90") | ||||
|  | ||||
|             self.accept(sck, addr) | ||||
|  | ||||
|     def accept(self, sck, addr): | ||||
|     def accept(self, sck: socket.socket, addr: tuple[str, int]) -> None: | ||||
|         """takes an incoming tcp connection and creates a thread to handle it""" | ||||
|         now = time.time() | ||||
|  | ||||
|         if now - (self.tp_time or now) > 300: | ||||
|             m = "httpserver threadpool died: tpt {:.2f}, now {:.2f}, nthr {}, ncli {}" | ||||
|             self.log(self.name, m.format(self.tp_time, now, self.tp_nthr, self.ncli), 1) | ||||
|             self.tp_time = None | ||||
|             t = "httpserver threadpool died: tpt {:.2f}, now {:.2f}, nthr {}, ncli {}" | ||||
|             self.log(self.name, t.format(self.tp_time, now, self.tp_nthr, self.ncli), 1) | ||||
|             self.tp_time = 0 | ||||
|             self.tp_q = None | ||||
|  | ||||
|         with self.mutex: | ||||
| @@ -203,10 +363,7 @@ class HttpSrv(object): | ||||
|                 if self.nid: | ||||
|                     name += "-{}".format(self.nid) | ||||
|  | ||||
|                 t = threading.Thread(target=self.periodic, name=name) | ||||
|                 self.t_periodic = t | ||||
|                 t.daemon = True | ||||
|                 t.start() | ||||
|                 self.t_periodic = Daemon(self.periodic, name) | ||||
|  | ||||
|             if self.tp_q: | ||||
|                 self.tp_time = self.tp_time or now | ||||
| @@ -218,25 +375,24 @@ class HttpSrv(object): | ||||
|                 return | ||||
|  | ||||
|         if not self.args.no_htp: | ||||
|             m = "looks like the httpserver threadpool died; please make an issue on github and tell me the story of how you pulled that off, thanks and dog bless\n" | ||||
|             self.log(self.name, m, 1) | ||||
|             t = "looks like the httpserver threadpool died; please make an issue on github and tell me the story of how you pulled that off, thanks and dog bless\n" | ||||
|             self.log(self.name, t, 1) | ||||
|  | ||||
|         thr = threading.Thread( | ||||
|             target=self.thr_client, | ||||
|             args=(sck, addr), | ||||
|             name="httpconn-{}-{}".format(addr[0].split(".", 2)[-1][-6:], addr[1]), | ||||
|         Daemon( | ||||
|             self.thr_client, | ||||
|             "httpconn-{}-{}".format(addr[0].split(".", 2)[-1][-6:], addr[1]), | ||||
|             (sck, addr), | ||||
|         ) | ||||
|         thr.daemon = True | ||||
|         thr.start() | ||||
|  | ||||
|     def thr_poolw(self): | ||||
|     def thr_poolw(self) -> None: | ||||
|         assert self.tp_q | ||||
|         while True: | ||||
|             task = self.tp_q.get() | ||||
|             if not task: | ||||
|                 break | ||||
|  | ||||
|             with self.mutex: | ||||
|                 self.tp_time = None | ||||
|                 self.tp_time = 0 | ||||
|  | ||||
|             try: | ||||
|                 sck, addr = task | ||||
| @@ -246,10 +402,13 @@ class HttpSrv(object): | ||||
|                 ) | ||||
|                 self.thr_client(sck, addr) | ||||
|                 me.name = self.name + "-poolw" | ||||
|             except: | ||||
|                 self.log(self.name, "thr_client: " + min_ex(), 3) | ||||
|             except Exception as ex: | ||||
|                 if str(ex).startswith("client d/c "): | ||||
|                     self.log(self.name, "thr_client: " + str(ex), 6) | ||||
|                 else: | ||||
|                     self.log(self.name, "thr_client: " + min_ex(), 3) | ||||
|  | ||||
|     def shutdown(self): | ||||
|     def shutdown(self) -> None: | ||||
|         self.stopping = True | ||||
|         for srv in self.srvs: | ||||
|             try: | ||||
| @@ -257,12 +416,12 @@ class HttpSrv(object): | ||||
|             except: | ||||
|                 pass | ||||
|  | ||||
|         clients = list(self.clients.keys()) | ||||
|         thrs = [] | ||||
|         clients = list(self.clients) | ||||
|         for cli in clients: | ||||
|             try: | ||||
|                 cli.shutdown() | ||||
|             except: | ||||
|                 pass | ||||
|             t = threading.Thread(target=cli.shutdown) | ||||
|             thrs.append(t) | ||||
|             t.start() | ||||
|  | ||||
|         if self.tp_q: | ||||
|             self.stop_threads(self.tp_nthr) | ||||
| @@ -271,25 +430,27 @@ class HttpSrv(object): | ||||
|                 if self.tp_q.empty(): | ||||
|                     break | ||||
|  | ||||
|         for t in thrs: | ||||
|             t.join() | ||||
|  | ||||
|         self.log(self.name, "ok bye") | ||||
|  | ||||
|     def thr_client(self, sck, addr): | ||||
|     def thr_client(self, sck: socket.socket, addr: tuple[str, int]) -> None: | ||||
|         """thread managing one tcp client""" | ||||
|         sck.settimeout(120) | ||||
|  | ||||
|         cli = HttpConn(sck, addr, self) | ||||
|         with self.mutex: | ||||
|             self.clients[cli] = 0 | ||||
|             self.clients.add(cli) | ||||
|  | ||||
|         # print("{}\n".format(len(self.clients)), end="") | ||||
|         fno = sck.fileno() | ||||
|         try: | ||||
|             if self.args.log_conn: | ||||
|                 self.log("%s %s" % addr, "|%sC-crun" % ("-" * 4,), c="1;30") | ||||
|                 self.log("%s %s" % addr, "|%sC-crun" % ("-" * 4,), c="90") | ||||
|  | ||||
|             cli.run() | ||||
|  | ||||
|         except (OSError, socket.error) as ex: | ||||
|             if ex.errno not in [10038, 10054, 107, 57, 49, 9]: | ||||
|             if ex.errno not in E_SCK: | ||||
|                 self.log( | ||||
|                     "%s %s" % addr, | ||||
|                     "run({}): {}".format(fno, ex), | ||||
| @@ -299,33 +460,29 @@ class HttpSrv(object): | ||||
|         finally: | ||||
|             sck = cli.s | ||||
|             if self.args.log_conn: | ||||
|                 self.log("%s %s" % addr, "|%sC-cdone" % ("-" * 5,), c="1;30") | ||||
|                 self.log("%s %s" % addr, "|%sC-cdone" % ("-" * 5,), c="90") | ||||
|  | ||||
|             try: | ||||
|                 fno = sck.fileno() | ||||
|                 sck.shutdown(socket.SHUT_RDWR) | ||||
|                 sck.close() | ||||
|                 shut_socket(cli.log, sck) | ||||
|             except (OSError, socket.error) as ex: | ||||
|                 if not MACOS: | ||||
|                     self.log( | ||||
|                         "%s %s" % addr, | ||||
|                         "shut({}): {}".format(fno, ex), | ||||
|                         c="1;30", | ||||
|                         c="90", | ||||
|                     ) | ||||
|                 if ex.errno not in [10038, 10054, 107, 57, 49, 9]: | ||||
|                     # 10038 No longer considered a socket | ||||
|                     # 10054 Foribly closed by remote | ||||
|                     #   107 Transport endpoint not connected | ||||
|                     #    57 Socket is not connected | ||||
|                     #    49 Can't assign requested address (wifi down) | ||||
|                     #     9 Bad file descriptor | ||||
|                 if ex.errno not in E_SCK: | ||||
|                     raise | ||||
|             finally: | ||||
|                 with self.mutex: | ||||
|                     del self.clients[cli] | ||||
|                     self.clients.remove(cli) | ||||
|                     self.ncli -= 1 | ||||
|  | ||||
|     def cachebuster(self): | ||||
|                 if cli.u2idx: | ||||
|                     self.put_u2idx(str(addr), cli.u2idx) | ||||
|  | ||||
|     def cachebuster(self) -> str: | ||||
|         if time.time() - self.cb_ts < 1: | ||||
|             return self.cb_v | ||||
|  | ||||
| @@ -333,9 +490,9 @@ class HttpSrv(object): | ||||
|             if time.time() - self.cb_ts < 1: | ||||
|                 return self.cb_v | ||||
|  | ||||
|             v = E.t0 | ||||
|             v = self.E.t0 | ||||
|             try: | ||||
|                 with os.scandir(os.path.join(E.mod, "web")) as dh: | ||||
|                 with os.scandir(os.path.join(self.E.mod, "web")) as dh: | ||||
|                     for fh in dh: | ||||
|                         inf = fh.stat() | ||||
|                         v = max(v, inf.st_mtime) | ||||
| @@ -346,3 +503,31 @@ class HttpSrv(object): | ||||
|             self.cb_v = v.decode("ascii")[-4:] | ||||
|             self.cb_ts = time.time() | ||||
|             return self.cb_v | ||||
|  | ||||
|     def get_u2idx(self, ident: str) -> Optional[U2idx]: | ||||
|         utab = self.u2idx_free | ||||
|         for _ in range(100):  # 5/0.05 = 5sec | ||||
|             with self.mutex: | ||||
|                 if utab: | ||||
|                     if ident in utab: | ||||
|                         return utab.pop(ident) | ||||
|  | ||||
|                     return utab.pop(list(utab.keys())[0]) | ||||
|  | ||||
|                 if self.u2idx_n < CORES: | ||||
|                     self.u2idx_n += 1 | ||||
|                     return U2idx(self) | ||||
|  | ||||
|             time.sleep(0.05) | ||||
|             # not using conditional waits, on a hunch that | ||||
|             # average performance will be faster like this | ||||
|             # since most servers won't be fully saturated | ||||
|  | ||||
|         return None | ||||
|  | ||||
|     def put_u2idx(self, ident: str, u2idx: U2idx) -> None: | ||||
|         with self.mutex: | ||||
|             while ident in self.u2idx_free: | ||||
|                 ident += "a" | ||||
|  | ||||
|             self.u2idx_free[ident] = u2idx | ||||
|   | ||||
| @@ -1,33 +1,82 @@ | ||||
| # coding: utf-8 | ||||
| from __future__ import print_function, unicode_literals | ||||
|  | ||||
| import hashlib | ||||
| import argparse  # typechk | ||||
| import colorsys | ||||
| import hashlib | ||||
|  | ||||
| from .__init__ import PY2 | ||||
| from .th_srv import HAVE_PIL | ||||
| from .util import BytesIO | ||||
|  | ||||
|  | ||||
| class Ico(object): | ||||
|     def __init__(self, args): | ||||
|     def __init__(self, args: argparse.Namespace) -> None: | ||||
|         self.args = args | ||||
|  | ||||
|     def get(self, ext, as_thumb): | ||||
|     def get(self, ext: str, as_thumb: bool, chrome: bool) -> tuple[str, bytes]: | ||||
|         """placeholder to make thumbnails not break""" | ||||
|  | ||||
|         h = hashlib.md5(ext.encode("utf-8")).digest()[:2] | ||||
|         bext = ext.encode("ascii", "replace") | ||||
|         ext = bext.decode("utf-8") | ||||
|         zb = hashlib.sha1(bext).digest()[2:4] | ||||
|         if PY2: | ||||
|             h = [ord(x) for x in h] | ||||
|             zb = [ord(x) for x in zb] | ||||
|  | ||||
|         c1 = colorsys.hsv_to_rgb(h[0] / 256.0, 1, 0.3) | ||||
|         c2 = colorsys.hsv_to_rgb(h[0] / 256.0, 1, 1) | ||||
|         c = list(c1) + list(c2) | ||||
|         c = [int(x * 255) for x in c] | ||||
|         c = "".join(["{:02x}".format(x) for x in c]) | ||||
|         c1 = colorsys.hsv_to_rgb(zb[0] / 256.0, 1, 0.3) | ||||
|         c2 = colorsys.hsv_to_rgb(zb[0] / 256.0, 1, 1) | ||||
|         ci = [int(x * 255) for x in list(c1) + list(c2)] | ||||
|         c = "".join(["{:02x}".format(x) for x in ci]) | ||||
|  | ||||
|         w = 100 | ||||
|         h = 30 | ||||
|         if not self.args.th_no_crop and as_thumb: | ||||
|             w, h = self.args.th_size.split("x") | ||||
|             h = int(100 / (float(w) / float(h))) | ||||
|             sw, sh = self.args.th_size.split("x") | ||||
|             h = int(100 / (float(sw) / float(sh))) | ||||
|             w = 100 | ||||
|  | ||||
|         if chrome: | ||||
|             # cannot handle more than ~2000 unique SVGs | ||||
|             if HAVE_PIL: | ||||
|                 # svg: 3s, cache: 6s, this: 8s | ||||
|                 from PIL import Image, ImageDraw | ||||
|  | ||||
|                 h = int(64 * h / w) | ||||
|                 w = 64 | ||||
|                 img = Image.new("RGB", (w, h), "#" + c[:6]) | ||||
|                 pb = ImageDraw.Draw(img) | ||||
|                 try: | ||||
|                     _, _, tw, th = pb.textbbox((0, 0), ext) | ||||
|                 except: | ||||
|                     tw, th = pb.textsize(ext) | ||||
|  | ||||
|                 tw += len(ext) | ||||
|                 cw = tw // len(ext) | ||||
|                 x = ((w - tw) // 2) - (cw * 2) // 3 | ||||
|                 fill = "#" + c[6:] | ||||
|                 for ch in ext: | ||||
|                     pb.text((x, (h - th) // 2), " %s " % (ch,), fill=fill) | ||||
|                     x += cw | ||||
|  | ||||
|                 img = img.resize((w * 3, h * 3), Image.NEAREST) | ||||
|  | ||||
|                 buf = BytesIO() | ||||
|                 img.save(buf, format="PNG", compress_level=1) | ||||
|                 return "image/png", buf.getvalue() | ||||
|  | ||||
|             elif False: | ||||
|                 # 48s, too slow | ||||
|                 import pyvips | ||||
|  | ||||
|                 h = int(192 * h / w) | ||||
|                 w = 192 | ||||
|                 img = pyvips.Image.text( | ||||
|                     ext, width=w, height=h, dpi=192, align=pyvips.Align.CENTRE | ||||
|                 ) | ||||
|                 img = img.ifthenelse(ci[3:], ci[:3], blend=True) | ||||
|                 # i = i.resize(3, kernel=pyvips.Kernel.NEAREST) | ||||
|                 buf = img.write_to_buffer(".png[compression=1]") | ||||
|                 return "image/png", buf | ||||
|  | ||||
|         svg = """\ | ||||
| <?xml version="1.0" encoding="UTF-8"?> | ||||
| @@ -37,6 +86,6 @@ class Ico(object): | ||||
|   fill="#{}" font-family="monospace" font-size="14px" style="letter-spacing:.5px">{}</text> | ||||
| </g></svg> | ||||
| """ | ||||
|         svg = svg.format(h, c[:6], c[6:], ext).encode("utf-8") | ||||
|         svg = svg.format(h, c[:6], c[6:], ext) | ||||
|  | ||||
|         return ["image/svg+xml", svg] | ||||
|         return "image/svg+xml", svg.encode("utf-8") | ||||
|   | ||||
							
								
								
									
										555
									
								
								copyparty/mdns.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										555
									
								
								copyparty/mdns.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,555 @@ | ||||
| # coding: utf-8 | ||||
| from __future__ import print_function, unicode_literals | ||||
|  | ||||
| import errno | ||||
| import random | ||||
| import select | ||||
| import socket | ||||
| import time | ||||
|  | ||||
| from ipaddress import IPv4Network, IPv6Network | ||||
|  | ||||
| from .__init__ import TYPE_CHECKING | ||||
| from .__init__ import unicode as U | ||||
| from .multicast import MC_Sck, MCast | ||||
| from .stolen.dnslib import AAAA | ||||
| from .stolen.dnslib import CLASS as DC | ||||
| from .stolen.dnslib import ( | ||||
|     NSEC, | ||||
|     PTR, | ||||
|     QTYPE, | ||||
|     RR, | ||||
|     SRV, | ||||
|     TXT, | ||||
|     A, | ||||
|     DNSHeader, | ||||
|     DNSQuestion, | ||||
|     DNSRecord, | ||||
| ) | ||||
| from .util import CachedSet, Daemon, Netdev, list_ips, min_ex | ||||
|  | ||||
| if TYPE_CHECKING: | ||||
|     from .svchub import SvcHub | ||||
|  | ||||
| if True:  # pylint: disable=using-constant-test | ||||
|     from typing import Any, Optional, Union | ||||
|  | ||||
|  | ||||
| MDNS4 = "224.0.0.251" | ||||
| MDNS6 = "ff02::fb" | ||||
|  | ||||
|  | ||||
| class MDNS_Sck(MC_Sck): | ||||
|     def __init__( | ||||
|         self, | ||||
|         sck: socket.socket, | ||||
|         nd: Netdev, | ||||
|         grp: str, | ||||
|         ip: str, | ||||
|         net: Union[IPv4Network, IPv6Network], | ||||
|     ): | ||||
|         super(MDNS_Sck, self).__init__(sck, nd, grp, ip, net) | ||||
|  | ||||
|         self.bp_probe = b"" | ||||
|         self.bp_ip = b"" | ||||
|         self.bp_svc = b"" | ||||
|         self.bp_bye = b"" | ||||
|  | ||||
|         self.last_tx = 0.0 | ||||
|         self.tx_ex = False | ||||
|  | ||||
|  | ||||
| class MDNS(MCast): | ||||
|     def __init__(self, hub: "SvcHub", ngen: int) -> None: | ||||
|         al = hub.args | ||||
|         grp4 = "" if al.zm6 else MDNS4 | ||||
|         grp6 = "" if al.zm4 else MDNS6 | ||||
|         super(MDNS, self).__init__( | ||||
|             hub, MDNS_Sck, al.zm_on, al.zm_off, grp4, grp6, 5353, hub.args.zmv | ||||
|         ) | ||||
|         self.srv: dict[socket.socket, MDNS_Sck] = {} | ||||
|         self.logsrc = "mDNS-{}".format(ngen) | ||||
|         self.ngen = ngen | ||||
|         self.ttl = 300 | ||||
|  | ||||
|         zs = self.args.name + ".local." | ||||
|         zs = zs.encode("ascii", "replace").decode("ascii", "replace") | ||||
|         self.hn = "-".join(x for x in zs.split("?") if x) or ( | ||||
|             "vault-{}".format(random.randint(1, 255)) | ||||
|         ) | ||||
|         self.lhn = self.hn.lower() | ||||
|  | ||||
|         # requester ip -> (response deadline, srv, body): | ||||
|         self.q: dict[str, tuple[float, MDNS_Sck, bytes]] = {} | ||||
|         self.rx4 = CachedSet(0.42)  # 3 probes @ 250..500..750 => 500ms span | ||||
|         self.rx6 = CachedSet(0.42) | ||||
|         self.svcs, self.sfqdns = self.build_svcs() | ||||
|         self.lsvcs = {k.lower(): v for k, v in self.svcs.items()} | ||||
|         self.lsfqdns = set([x.lower() for x in self.sfqdns]) | ||||
|  | ||||
|         self.probing = 0.0 | ||||
|         self.unsolicited: list[float] = []  # scheduled announces on all nics | ||||
|         self.defend: dict[MDNS_Sck, float] = {}  # server -> deadline | ||||
|  | ||||
|     def log(self, msg: str, c: Union[int, str] = 0) -> None: | ||||
|         self.log_func(self.logsrc, msg, c) | ||||
|  | ||||
|     def build_svcs(self) -> tuple[dict[str, dict[str, Any]], set[str]]: | ||||
|         zms = self.args.zms | ||||
|         http = {"port": 80 if 80 in self.args.p else self.args.p[0]} | ||||
|         https = {"port": 443 if 443 in self.args.p else self.args.p[0]} | ||||
|         webdav = http.copy() | ||||
|         webdavs = https.copy() | ||||
|         webdav["u"] = webdavs["u"] = "u"  # KDE requires username | ||||
|         ftp = {"port": (self.args.ftp if "f" in zms else self.args.ftps)} | ||||
|         smb = {"port": self.args.smb_port} | ||||
|  | ||||
|         # some gvfs require path | ||||
|         zs = self.args.zm_ld or "/" | ||||
|         if zs: | ||||
|             webdav["path"] = zs | ||||
|             webdavs["path"] = zs | ||||
|  | ||||
|         if self.args.zm_lh: | ||||
|             http["path"] = self.args.zm_lh | ||||
|             https["path"] = self.args.zm_lh | ||||
|  | ||||
|         if self.args.zm_lf: | ||||
|             ftp["path"] = self.args.zm_lf | ||||
|  | ||||
|         if self.args.zm_ls: | ||||
|             smb["path"] = self.args.zm_ls | ||||
|  | ||||
|         svcs: dict[str, dict[str, Any]] = {} | ||||
|  | ||||
|         if "d" in zms: | ||||
|             svcs["_webdav._tcp.local."] = webdav | ||||
|  | ||||
|         if "D" in zms: | ||||
|             svcs["_webdavs._tcp.local."] = webdavs | ||||
|  | ||||
|         if "h" in zms: | ||||
|             svcs["_http._tcp.local."] = http | ||||
|  | ||||
|         if "H" in zms: | ||||
|             svcs["_https._tcp.local."] = https | ||||
|  | ||||
|         if "f" in zms.lower(): | ||||
|             svcs["_ftp._tcp.local."] = ftp | ||||
|  | ||||
|         if "s" in zms.lower(): | ||||
|             svcs["_smb._tcp.local."] = smb | ||||
|  | ||||
|         sfqdns: set[str] = set() | ||||
|         for k, v in svcs.items(): | ||||
|             name = "{}-c-{}".format(self.args.name, k.split(".")[0][1:]) | ||||
|             v["name"] = name | ||||
|             sfqdns.add("{}.{}".format(name, k)) | ||||
|  | ||||
|         return svcs, sfqdns | ||||
|  | ||||
|     def build_replies(self) -> None: | ||||
|         for srv in self.srv.values(): | ||||
|             probe = DNSRecord(DNSHeader(0, 0), q=DNSQuestion(self.hn, QTYPE.ANY)) | ||||
|             areply = DNSRecord(DNSHeader(0, 0x8400)) | ||||
|             sreply = DNSRecord(DNSHeader(0, 0x8400)) | ||||
|             bye = DNSRecord(DNSHeader(0, 0x8400)) | ||||
|  | ||||
|             have4 = have6 = False | ||||
|             for s2 in self.srv.values(): | ||||
|                 if srv.idx != s2.idx: | ||||
|                     continue | ||||
|  | ||||
|                 if s2.v6: | ||||
|                     have6 = True | ||||
|                 else: | ||||
|                     have4 = True | ||||
|  | ||||
|             for ip in srv.ips: | ||||
|                 if ":" in ip: | ||||
|                     qt = QTYPE.AAAA | ||||
|                     ar = {"rclass": DC.F_IN, "rdata": AAAA(ip)} | ||||
|                 else: | ||||
|                     qt = QTYPE.A | ||||
|                     ar = {"rclass": DC.F_IN, "rdata": A(ip)} | ||||
|  | ||||
|                 r0 = RR(self.hn, qt, ttl=0, **ar) | ||||
|                 r120 = RR(self.hn, qt, ttl=120, **ar) | ||||
|                 # rfc-10: | ||||
|                 #   SHOULD rr ttl 120sec for A/AAAA/SRV | ||||
|                 #   (and recommend 75min for all others) | ||||
|  | ||||
|                 probe.add_auth(r120) | ||||
|                 areply.add_answer(r120) | ||||
|                 sreply.add_answer(r120) | ||||
|                 bye.add_answer(r0) | ||||
|  | ||||
|             for sclass, props in self.svcs.items(): | ||||
|                 sname = props["name"] | ||||
|                 sport = props["port"] | ||||
|                 sfqdn = sname + "." + sclass | ||||
|  | ||||
|                 k = "_services._dns-sd._udp.local." | ||||
|                 r = RR(k, QTYPE.PTR, DC.IN, 4500, PTR(sclass)) | ||||
|                 sreply.add_answer(r) | ||||
|  | ||||
|                 r = RR(sclass, QTYPE.PTR, DC.IN, 4500, PTR(sfqdn)) | ||||
|                 sreply.add_answer(r) | ||||
|  | ||||
|                 r = RR(sfqdn, QTYPE.SRV, DC.F_IN, 120, SRV(0, 0, sport, self.hn)) | ||||
|                 sreply.add_answer(r) | ||||
|                 areply.add_answer(r) | ||||
|  | ||||
|                 r = RR(sfqdn, QTYPE.SRV, DC.F_IN, 0, SRV(0, 0, sport, self.hn)) | ||||
|                 bye.add_answer(r) | ||||
|  | ||||
|                 txts = [] | ||||
|                 for k in ("u", "path"): | ||||
|                     if k not in props: | ||||
|                         continue | ||||
|  | ||||
|                     zb = "{}={}".format(k, props[k]).encode("utf-8") | ||||
|                     if len(zb) > 255: | ||||
|                         t = "value too long for mdns: [{}]" | ||||
|                         raise Exception(t.format(props[k])) | ||||
|  | ||||
|                     txts.append(zb) | ||||
|  | ||||
|                 # gvfs really wants txt even if they're empty | ||||
|                 r = RR(sfqdn, QTYPE.TXT, DC.F_IN, 4500, TXT(txts)) | ||||
|                 sreply.add_answer(r) | ||||
|  | ||||
|             if not (have4 and have6) and not self.args.zm_noneg: | ||||
|                 ns = NSEC(self.hn, ["AAAA" if have6 else "A"]) | ||||
|                 r = RR(self.hn, QTYPE.NSEC, DC.F_IN, 120, ns) | ||||
|                 areply.add_ar(r) | ||||
|                 if len(sreply.pack()) < 1400: | ||||
|                     sreply.add_ar(r) | ||||
|  | ||||
|             srv.bp_probe = probe.pack() | ||||
|             srv.bp_ip = areply.pack() | ||||
|             srv.bp_svc = sreply.pack() | ||||
|             srv.bp_bye = bye.pack() | ||||
|  | ||||
|             # since all replies are small enough to fit in one packet, | ||||
|             # always send full replies rather than just a/aaaa records | ||||
|             srv.bp_ip = srv.bp_svc | ||||
|  | ||||
|     def send_probes(self) -> None: | ||||
|         slp = random.random() * 0.25 | ||||
|         for _ in range(3): | ||||
|             time.sleep(slp) | ||||
|             slp = 0.25 | ||||
|             if not self.running: | ||||
|                 break | ||||
|  | ||||
|             if self.args.zmv: | ||||
|                 self.log("sending hostname probe...") | ||||
|  | ||||
|             # ipv4: need to probe each ip (each server) | ||||
|             # ipv6: only need to probe each set of looped nics | ||||
|             probed6: set[str] = set() | ||||
|             for srv in self.srv.values(): | ||||
|                 if srv.ip in probed6: | ||||
|                     continue | ||||
|  | ||||
|                 try: | ||||
|                     srv.sck.sendto(srv.bp_probe, (srv.grp, 5353)) | ||||
|                     if srv.v6: | ||||
|                         for ip in srv.ips: | ||||
|                             probed6.add(ip) | ||||
|                 except Exception as ex: | ||||
|                     self.log("sendto failed: {} ({})".format(srv.ip, ex), "90") | ||||
|  | ||||
|     def run(self) -> None: | ||||
|         try: | ||||
|             bound = self.create_servers() | ||||
|         except: | ||||
|             t = "no server IP matches the mdns config\n{}" | ||||
|             self.log(t.format(min_ex()), 1) | ||||
|             bound = [] | ||||
|  | ||||
|         if not bound: | ||||
|             self.log("failed to announce copyparty services on the network", 3) | ||||
|             return | ||||
|  | ||||
|         self.build_replies() | ||||
|         Daemon(self.send_probes) | ||||
|         zf = time.time() + 2 | ||||
|         self.probing = zf  # cant unicast so give everyone an extra sec | ||||
|         self.unsolicited = [zf, zf + 1, zf + 3, zf + 7]  # rfc-8.3 | ||||
|  | ||||
|         try: | ||||
|             self.run2() | ||||
|         except OSError as ex: | ||||
|             if ex.errno != errno.EBADF: | ||||
|                 raise | ||||
|  | ||||
|             self.log("stopping due to {}".format(ex), "90") | ||||
|  | ||||
|         self.log("stopped", 2) | ||||
|  | ||||
|     def run2(self) -> None: | ||||
|         last_hop = time.time() | ||||
|         ihop = self.args.mc_hop | ||||
|         while self.running: | ||||
|             timeout = ( | ||||
|                 0.02 + random.random() * 0.07 | ||||
|                 if self.probing or self.q or self.defend | ||||
|                 else max(0.05, self.unsolicited[0] - time.time()) | ||||
|                 if self.unsolicited | ||||
|                 else (last_hop + ihop if ihop else 180) | ||||
|             ) | ||||
|             rdy = select.select(self.srv, [], [], timeout) | ||||
|             rx: list[socket.socket] = rdy[0]  # type: ignore | ||||
|             self.rx4.cln() | ||||
|             self.rx6.cln() | ||||
|             buf = b"" | ||||
|             addr = ("0", 0) | ||||
|             for sck in rx: | ||||
|                 try: | ||||
|                     buf, addr = sck.recvfrom(4096) | ||||
|                     self.eat(buf, addr, sck) | ||||
|                 except: | ||||
|                     if not self.running: | ||||
|                         self.log("stopped", 2) | ||||
|                         return | ||||
|  | ||||
|                     t = "{} {} \033[33m|{}| {}\n{}".format( | ||||
|                         self.srv[sck].name, addr, len(buf), repr(buf)[2:-1], min_ex() | ||||
|                     ) | ||||
|                     self.log(t, 6) | ||||
|  | ||||
|             if not self.probing: | ||||
|                 self.process() | ||||
|                 continue | ||||
|  | ||||
|             if self.probing < time.time(): | ||||
|                 t = "probe ok; announcing [{}]" | ||||
|                 self.log(t.format(self.hn[:-1]), 2) | ||||
|                 self.probing = 0 | ||||
|  | ||||
|     def stop(self, panic=False) -> None: | ||||
|         self.running = False | ||||
|         for srv in self.srv.values(): | ||||
|             try: | ||||
|                 if panic: | ||||
|                     srv.sck.close() | ||||
|                 else: | ||||
|                     srv.sck.sendto(srv.bp_bye, (srv.grp, 5353)) | ||||
|             except: | ||||
|                 pass | ||||
|  | ||||
|         self.srv = {} | ||||
|  | ||||
|     def eat(self, buf: bytes, addr: tuple[str, int], sck: socket.socket) -> None: | ||||
|         cip = addr[0] | ||||
|         v6 = ":" in cip | ||||
|         if (cip.startswith("169.254") and not self.ll_ok) or ( | ||||
|             v6 and not cip.startswith("fe80") | ||||
|         ): | ||||
|             return | ||||
|  | ||||
|         cache = self.rx6 if v6 else self.rx4 | ||||
|         if buf in cache.c: | ||||
|             return | ||||
|  | ||||
|         srv: Optional[MDNS_Sck] = self.srv[sck] if v6 else self.map_client(cip)  # type: ignore | ||||
|         if not srv: | ||||
|             return | ||||
|  | ||||
|         cache.add(buf) | ||||
|         now = time.time() | ||||
|  | ||||
|         if self.args.zmv and cip != srv.ip and cip not in srv.ips: | ||||
|             t = "{} [{}] \033[36m{} \033[0m|{}|" | ||||
|             self.log(t.format(srv.name, srv.ip, cip, len(buf)), "90") | ||||
|  | ||||
|         p = DNSRecord.parse(buf) | ||||
|         if self.args.zmvv: | ||||
|             self.log(str(p)) | ||||
|  | ||||
|         # check for incoming probes for our hostname | ||||
|         cips = [U(x.rdata) for x in p.auth if U(x.rname).lower() == self.lhn] | ||||
|         if cips and self.sips.isdisjoint(cips): | ||||
|             if not [x for x in cips if x not in ("::1", "127.0.0.1")]: | ||||
|                 # avahi broadcasting 127.0.0.1-only packets | ||||
|                 return | ||||
|  | ||||
|             self.log("someone trying to steal our hostname: {}".format(cips), 3) | ||||
|             # immediately unicast | ||||
|             if not self.probing: | ||||
|                 srv.sck.sendto(srv.bp_ip, (cip, 5353)) | ||||
|  | ||||
|             # and schedule multicast | ||||
|             self.defend[srv] = self.defend.get(srv, now + 0.1) | ||||
|             return | ||||
|  | ||||
|         # check for someone rejecting our probe / hijacking our hostname | ||||
|         cips = [ | ||||
|             U(x.rdata) | ||||
|             for x in p.rr | ||||
|             if U(x.rname).lower() == self.lhn and x.rclass == DC.F_IN | ||||
|         ] | ||||
|         if cips and self.sips.isdisjoint(cips): | ||||
|             if not [x for x in cips if x not in ("::1", "127.0.0.1")]: | ||||
|                 # avahi broadcasting 127.0.0.1-only packets | ||||
|                 return | ||||
|  | ||||
|             # check if we've been given additional IPs | ||||
|             for ip in list_ips(): | ||||
|                 if ip in cips: | ||||
|                     self.sips.add(ip) | ||||
|  | ||||
|             if not self.sips.isdisjoint(cips): | ||||
|                 return | ||||
|  | ||||
|             t = "mdns zeroconf: " | ||||
|             if self.probing: | ||||
|                 t += "Cannot start; hostname '{}' is occupied" | ||||
|             else: | ||||
|                 t += "Emergency stop; hostname '{}' got stolen" | ||||
|  | ||||
|             t += " on {}! Use --name to set another hostname.\n\nName taken by {}\n\nYour IPs: {}\n" | ||||
|             self.log(t.format(self.args.name, srv.name, cips, list(self.sips)), 1) | ||||
|             self.stop(True) | ||||
|             return | ||||
|  | ||||
|         # then rfc-6.7; dns pretending to be mdns (android...) | ||||
|         if p.header.id or addr[1] != 5353: | ||||
|             rsp: Optional[DNSRecord] = None | ||||
|             for r in p.questions: | ||||
|                 try: | ||||
|                     lhn = U(r.qname).lower() | ||||
|                 except: | ||||
|                     self.log("invalid question: {}".format(r)) | ||||
|                     continue | ||||
|  | ||||
|                 if lhn != self.lhn: | ||||
|                     continue | ||||
|  | ||||
|                 if p.header.id and r.qtype in (QTYPE.A, QTYPE.AAAA): | ||||
|                     rsp = rsp or DNSRecord(DNSHeader(p.header.id, 0x8400)) | ||||
|                     rsp.add_question(r) | ||||
|                     for ip in srv.ips: | ||||
|                         qt = r.qtype | ||||
|                         v6 = ":" in ip | ||||
|                         if v6 == (qt == QTYPE.AAAA): | ||||
|                             rd = AAAA(ip) if v6 else A(ip) | ||||
|                             rr = RR(self.hn, qt, DC.IN, 10, rd) | ||||
|                             rsp.add_answer(rr) | ||||
|             if rsp: | ||||
|                 srv.sck.sendto(rsp.pack(), addr[:2]) | ||||
|                 # but don't return in case it's a differently broken client | ||||
|  | ||||
|         # then a/aaaa records | ||||
|         for r in p.questions: | ||||
|             try: | ||||
|                 lhn = U(r.qname).lower() | ||||
|             except: | ||||
|                 self.log("invalid question: {}".format(r)) | ||||
|                 continue | ||||
|  | ||||
|             if lhn != self.lhn: | ||||
|                 continue | ||||
|  | ||||
|             # gvfs keeps repeating itself | ||||
|             found = False | ||||
|             unicast = False | ||||
|             for rr in p.rr: | ||||
|                 try: | ||||
|                     rname = U(rr.rname).lower() | ||||
|                 except: | ||||
|                     self.log("invalid rr: {}".format(rr)) | ||||
|                     continue | ||||
|  | ||||
|                 if rname == self.lhn: | ||||
|                     if rr.ttl > 60: | ||||
|                         found = True | ||||
|                     if rr.rclass == DC.F_IN: | ||||
|                         unicast = True | ||||
|  | ||||
|             if unicast: | ||||
|                 # spec-compliant mDNS-over-unicast | ||||
|                 srv.sck.sendto(srv.bp_ip, (cip, 5353)) | ||||
|             elif addr[1] != 5353: | ||||
|                 # just in case some clients use (and want us to use) invalid ports | ||||
|                 srv.sck.sendto(srv.bp_ip, addr[:2]) | ||||
|  | ||||
|             if not found: | ||||
|                 self.q[cip] = (0, srv, srv.bp_ip) | ||||
|                 return | ||||
|  | ||||
|         deadline = now + (0.5 if p.header.tc else 0.02)  # rfc-7.2 | ||||
|  | ||||
|         # and service queries | ||||
|         for r in p.questions: | ||||
|             if not r or not r.qname: | ||||
|                 continue | ||||
|  | ||||
|             qname = U(r.qname).lower() | ||||
|             if qname in self.lsvcs or qname == "_services._dns-sd._udp.local.": | ||||
|                 self.q[cip] = (deadline, srv, srv.bp_svc) | ||||
|                 break | ||||
|         # heed rfc-7.1 if there was an announce in the past 12sec | ||||
|         # (workaround gvfs race-condition where it occasionally | ||||
|         #  doesn't read/decode the full response...) | ||||
|         if now < srv.last_tx + 12: | ||||
|             for rr in p.rr: | ||||
|                 if not rr.rdata: | ||||
|                     continue | ||||
|  | ||||
|                 rdata = U(rr.rdata).lower() | ||||
|                 if rdata in self.lsfqdns: | ||||
|                     if rr.ttl > 2250: | ||||
|                         self.q.pop(cip, None) | ||||
|                     break | ||||
|  | ||||
|     def process(self) -> None: | ||||
|         tx = set() | ||||
|         now = time.time() | ||||
|         cooldown = 0.9  # rfc-6: 1 | ||||
|         if self.unsolicited and self.unsolicited[0] < now: | ||||
|             self.unsolicited.pop(0) | ||||
|             cooldown = 0.1 | ||||
|             for srv in self.srv.values(): | ||||
|                 tx.add(srv) | ||||
|  | ||||
|             if not self.unsolicited and self.args.zm_spam: | ||||
|                 zf = time.time() + self.args.zm_spam + random.random() * 0.07 | ||||
|                 self.unsolicited.append(zf) | ||||
|  | ||||
|         for srv, deadline in list(self.defend.items()): | ||||
|             if now < deadline: | ||||
|                 continue | ||||
|  | ||||
|             if self._tx(srv, srv.bp_ip, 0.02):  # rfc-6: 0.25 | ||||
|                 self.defend.pop(srv) | ||||
|  | ||||
|         for cip, (deadline, srv, msg) in list(self.q.items()): | ||||
|             if now < deadline: | ||||
|                 continue | ||||
|  | ||||
|             self.q.pop(cip) | ||||
|             self._tx(srv, msg, cooldown) | ||||
|  | ||||
|         for srv in tx: | ||||
|             self._tx(srv, srv.bp_svc, cooldown) | ||||
|  | ||||
|     def _tx(self, srv: MDNS_Sck, msg: bytes, cooldown: float) -> bool: | ||||
|         now = time.time() | ||||
|         if now < srv.last_tx + cooldown: | ||||
|             return False | ||||
|  | ||||
|         try: | ||||
|             srv.sck.sendto(msg, (srv.grp, 5353)) | ||||
|             srv.last_tx = now | ||||
|         except Exception as ex: | ||||
|             if srv.tx_ex: | ||||
|                 return True | ||||
|  | ||||
|             srv.tx_ex = True | ||||
|             t = "tx({},|{}|,{}): {}" | ||||
|             self.log(t.format(srv.ip, len(msg), cooldown, ex), 3) | ||||
|  | ||||
|         return True | ||||
							
								
								
									
										165
									
								
								copyparty/metrics.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										165
									
								
								copyparty/metrics.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,165 @@ | ||||
| # coding: utf-8 | ||||
| from __future__ import print_function, unicode_literals | ||||
|  | ||||
| import json | ||||
| import time | ||||
|  | ||||
| from .__init__ import TYPE_CHECKING | ||||
| from .util import Pebkac, get_df, unhumanize | ||||
|  | ||||
| if TYPE_CHECKING: | ||||
|     from .httpcli import HttpCli | ||||
|     from .httpsrv import HttpSrv | ||||
|  | ||||
|  | ||||
| class Metrics(object): | ||||
|     def __init__(self, hsrv: "HttpSrv") -> None: | ||||
|         self.hsrv = hsrv | ||||
|  | ||||
|     def tx(self, cli: "HttpCli") -> bool: | ||||
|         if not cli.avol: | ||||
|             raise Pebkac(403, "not allowed for user " + cli.uname) | ||||
|  | ||||
|         args = cli.args | ||||
|         if not args.stats: | ||||
|             raise Pebkac(403, "the stats feature is not enabled in server config") | ||||
|  | ||||
|         conn = cli.conn | ||||
|         vfs = conn.asrv.vfs | ||||
|         allvols = list(sorted(vfs.all_vols.items())) | ||||
|  | ||||
|         idx = conn.get_u2idx() | ||||
|         if not idx or not hasattr(idx, "p_end"): | ||||
|             idx = None | ||||
|  | ||||
|         ret: list[str] = [] | ||||
|  | ||||
|         def addc(k: str, unit: str, v: str, desc: str) -> None: | ||||
|             if unit: | ||||
|                 k += "_" + unit | ||||
|                 zs = "# TYPE %s counter\n# UNIT %s %s\n# HELP %s %s\n%s_created %s\n%s_total %s" | ||||
|                 ret.append(zs % (k, k, unit, k, desc, k, int(self.hsrv.t0), k, v)) | ||||
|             else: | ||||
|                 zs = "# TYPE %s counter\n# HELP %s %s\n%s_created %s\n%s_total %s" | ||||
|                 ret.append(zs % (k, k, desc, k, int(self.hsrv.t0), k, v)) | ||||
|  | ||||
|         def addh(k: str, typ: str, desc: str) -> None: | ||||
|             zs = "# TYPE %s %s\n# HELP %s %s" | ||||
|             ret.append(zs % (k, typ, k, desc)) | ||||
|  | ||||
|         def addbh(k: str, desc: str) -> None: | ||||
|             zs = "# TYPE %s gauge\n# UNIT %s bytes\n# HELP %s %s" | ||||
|             ret.append(zs % (k, k, k, desc)) | ||||
|  | ||||
|         def addv(k: str, v: str) -> None: | ||||
|             ret.append("%s %s" % (k, v)) | ||||
|  | ||||
|         v = "{:.3f}".format(time.time() - self.hsrv.t0) | ||||
|         addc("cpp_uptime", "seconds", v, "time since last server restart") | ||||
|  | ||||
|         v = str(len(conn.bans or [])) | ||||
|         addc("cpp_bans", "", v, "number of banned IPs") | ||||
|  | ||||
|         if not args.nos_hdd: | ||||
|             addbh("cpp_disk_size_bytes", "total HDD size of volume") | ||||
|             addbh("cpp_disk_free_bytes", "free HDD space in volume") | ||||
|             for vpath, vol in allvols: | ||||
|                 free, total = get_df(vol.realpath) | ||||
|                 addv('cpp_disk_size_bytes{vol="/%s"}' % (vpath), str(total)) | ||||
|                 addv('cpp_disk_free_bytes{vol="/%s"}' % (vpath), str(free)) | ||||
|  | ||||
|         if idx and not args.nos_vol: | ||||
|             addbh("cpp_vol_bytes", "num bytes of data in volume") | ||||
|             addh("cpp_vol_files", "gauge", "num files in volume") | ||||
|             addbh("cpp_vol_free_bytes", "free space (vmaxb) in volume") | ||||
|             addh("cpp_vol_free_files", "gauge", "free space (vmaxn) in volume") | ||||
|             tnbytes = 0 | ||||
|             tnfiles = 0 | ||||
|  | ||||
|             volsizes = [] | ||||
|             try: | ||||
|                 ptops = [x.realpath for _, x in allvols] | ||||
|                 x = self.hsrv.broker.ask("up2k.get_volsizes", ptops) | ||||
|                 volsizes = x.get() | ||||
|             except Exception as ex: | ||||
|                 cli.log("tx_stats get_volsizes: {!r}".format(ex), 3) | ||||
|  | ||||
|             for (vpath, vol), (nbytes, nfiles) in zip(allvols, volsizes): | ||||
|                 tnbytes += nbytes | ||||
|                 tnfiles += nfiles | ||||
|                 addv('cpp_vol_bytes{vol="/%s"}' % (vpath), str(nbytes)) | ||||
|                 addv('cpp_vol_files{vol="/%s"}' % (vpath), str(nfiles)) | ||||
|  | ||||
|                 if vol.flags.get("vmaxb") or vol.flags.get("vmaxn"): | ||||
|  | ||||
|                     zi = unhumanize(vol.flags.get("vmaxb") or "0") | ||||
|                     if zi: | ||||
|                         v = str(zi - nbytes) | ||||
|                         addv('cpp_vol_free_bytes{vol="/%s"}' % (vpath), v) | ||||
|  | ||||
|                     zi = unhumanize(vol.flags.get("vmaxn") or "0") | ||||
|                     if zi: | ||||
|                         v = str(zi - nfiles) | ||||
|                         addv('cpp_vol_free_files{vol="/%s"}' % (vpath), v) | ||||
|  | ||||
|             if volsizes: | ||||
|                 addv('cpp_vol_bytes{vol="total"}', str(tnbytes)) | ||||
|                 addv('cpp_vol_files{vol="total"}', str(tnfiles)) | ||||
|  | ||||
|         if idx and not args.nos_dup: | ||||
|             addbh("cpp_dupe_bytes", "num dupe bytes in volume") | ||||
|             addh("cpp_dupe_files", "gauge", "num dupe files in volume") | ||||
|             tnbytes = 0 | ||||
|             tnfiles = 0 | ||||
|             for vpath, vol in allvols: | ||||
|                 cur = idx.get_cur(vol.realpath) | ||||
|                 if not cur: | ||||
|                     continue | ||||
|  | ||||
|                 nbytes = 0 | ||||
|                 nfiles = 0 | ||||
|                 q = "select sz, count(*)-1 c from up group by w having c" | ||||
|                 for sz, c in cur.execute(q): | ||||
|                     nbytes += sz * c | ||||
|                     nfiles += c | ||||
|  | ||||
|                 tnbytes += nbytes | ||||
|                 tnfiles += nfiles | ||||
|                 addv('cpp_dupe_bytes{vol="/%s"}' % (vpath), str(nbytes)) | ||||
|                 addv('cpp_dupe_files{vol="/%s"}' % (vpath), str(nfiles)) | ||||
|  | ||||
|             addv('cpp_dupe_bytes{vol="total"}', str(tnbytes)) | ||||
|             addv('cpp_dupe_files{vol="total"}', str(tnfiles)) | ||||
|  | ||||
|         if not args.nos_unf: | ||||
|             addbh("cpp_unf_bytes", "incoming/unfinished uploads (num bytes)") | ||||
|             addh("cpp_unf_files", "gauge", "incoming/unfinished uploads (num files)") | ||||
|             tnbytes = 0 | ||||
|             tnfiles = 0 | ||||
|             try: | ||||
|                 x = self.hsrv.broker.ask("up2k.get_unfinished") | ||||
|                 xs = x.get() | ||||
|                 xj = json.loads(xs) | ||||
|                 for ptop, (nbytes, nfiles) in xj.items(): | ||||
|                     tnbytes += nbytes | ||||
|                     tnfiles += nfiles | ||||
|                     vol = next((x[1] for x in allvols if x[1].realpath == ptop), None) | ||||
|                     if not vol: | ||||
|                         t = "tx_stats get_unfinished: could not map {}" | ||||
|                         cli.log(t.format(ptop), 3) | ||||
|                         continue | ||||
|  | ||||
|                     addv('cpp_unf_bytes{vol="/%s"}' % (vol.vpath), str(nbytes)) | ||||
|                     addv('cpp_unf_files{vol="/%s"}' % (vol.vpath), str(nfiles)) | ||||
|  | ||||
|                 addv('cpp_unf_bytes{vol="total"}', str(tnbytes)) | ||||
|                 addv('cpp_unf_files{vol="total"}', str(tnfiles)) | ||||
|  | ||||
|             except Exception as ex: | ||||
|                 cli.log("tx_stats get_unfinished: {!r}".format(ex), 3) | ||||
|  | ||||
|         ret.append("# EOF") | ||||
|  | ||||
|         mime = "application/openmetrics-text; version=1.0.0; charset=utf-8" | ||||
|         cli.reply("\n".join(ret).encode("utf-8"), mime=mime) | ||||
|         return True | ||||
| @@ -1,28 +1,44 @@ | ||||
| # coding: utf-8 | ||||
| from __future__ import print_function, unicode_literals | ||||
|  | ||||
| import os | ||||
| import sys | ||||
| import argparse | ||||
| import json | ||||
| import os | ||||
| import shutil | ||||
| import subprocess as sp | ||||
| import sys | ||||
|  | ||||
| from .__init__ import PY2, WINDOWS, unicode | ||||
| from .util import fsenc, fsdec, uncyg, runcmd, REKOBO_LKEY | ||||
| from .__init__ import EXE, PY2, WINDOWS, E, unicode | ||||
| from .bos import bos | ||||
| from .util import ( | ||||
|     FFMPEG_URL, | ||||
|     REKOBO_LKEY, | ||||
|     fsenc, | ||||
|     min_ex, | ||||
|     pybin, | ||||
|     retchk, | ||||
|     runcmd, | ||||
|     sfsenc, | ||||
|     uncyg, | ||||
| ) | ||||
|  | ||||
| if True:  # pylint: disable=using-constant-test | ||||
|     from typing import Any, Union | ||||
|  | ||||
|     from .util import RootLogger | ||||
|  | ||||
|  | ||||
| def have_ff(cmd): | ||||
| def have_ff(scmd: str) -> bool: | ||||
|     if PY2: | ||||
|         print("# checking {}".format(cmd)) | ||||
|         cmd = (cmd + " -version").encode("ascii").split(b" ") | ||||
|         print("# checking {}".format(scmd)) | ||||
|         acmd = (scmd + " -version").encode("ascii").split(b" ") | ||||
|         try: | ||||
|             sp.Popen(cmd, stdout=sp.PIPE, stderr=sp.PIPE).communicate() | ||||
|             sp.Popen(acmd, stdout=sp.PIPE, stderr=sp.PIPE).communicate() | ||||
|             return True | ||||
|         except: | ||||
|             return False | ||||
|     else: | ||||
|         return bool(shutil.which(cmd)) | ||||
|         return bool(shutil.which(scmd)) | ||||
|  | ||||
|  | ||||
| HAVE_FFMPEG = have_ff("ffmpeg") | ||||
| @@ -30,13 +46,16 @@ HAVE_FFPROBE = have_ff("ffprobe") | ||||
|  | ||||
|  | ||||
| class MParser(object): | ||||
|     def __init__(self, cmdline): | ||||
|     def __init__(self, cmdline: str) -> None: | ||||
|         self.tag, args = cmdline.split("=", 1) | ||||
|         self.tags = self.tag.split(",") | ||||
|  | ||||
|         self.timeout = 30 | ||||
|         self.timeout = 60 | ||||
|         self.force = False | ||||
|         self.kill = "t"  # tree; all children recursively | ||||
|         self.capture = 3  # outputs to consume | ||||
|         self.audio = "y" | ||||
|         self.pri = 0  # priority; higher = later | ||||
|         self.ext = [] | ||||
|  | ||||
|         while True: | ||||
| @@ -58,6 +77,14 @@ class MParser(object): | ||||
|                 self.audio = arg[1:]  # [r]equire [n]ot [d]ontcare | ||||
|                 continue | ||||
|  | ||||
|             if arg.startswith("k"): | ||||
|                 self.kill = arg[1:]  # [t]ree [m]ain [n]one | ||||
|                 continue | ||||
|  | ||||
|             if arg.startswith("c"): | ||||
|                 self.capture = int(arg[1:])  # 0=none 1=stdout 2=stderr 3=both | ||||
|                 continue | ||||
|  | ||||
|             if arg == "f": | ||||
|                 self.force = True | ||||
|                 continue | ||||
| @@ -70,10 +97,16 @@ class MParser(object): | ||||
|                 self.ext.append(arg[1:]) | ||||
|                 continue | ||||
|  | ||||
|             if arg.startswith("p"): | ||||
|                 self.pri = int(arg[1:] or "1") | ||||
|                 continue | ||||
|  | ||||
|             raise Exception() | ||||
|  | ||||
|  | ||||
| def ffprobe(abspath, timeout=10): | ||||
| def ffprobe( | ||||
|     abspath: str, timeout: int = 60 | ||||
| ) -> tuple[dict[str, tuple[int, Any]], dict[str, list[Any]]]: | ||||
|     cmd = [ | ||||
|         b"ffprobe", | ||||
|         b"-hide_banner", | ||||
| @@ -82,19 +115,20 @@ def ffprobe(abspath, timeout=10): | ||||
|         b"--", | ||||
|         fsenc(abspath), | ||||
|     ] | ||||
|     rc = runcmd(cmd, timeout=timeout) | ||||
|     return parse_ffprobe(rc[1]) | ||||
|     rc, so, se = runcmd(cmd, timeout=timeout) | ||||
|     retchk(rc, cmd, se) | ||||
|     return parse_ffprobe(so) | ||||
|  | ||||
|  | ||||
| def parse_ffprobe(txt): | ||||
| def parse_ffprobe(txt: str) -> tuple[dict[str, tuple[int, Any]], dict[str, list[Any]]]: | ||||
|     """ffprobe -show_format -show_streams""" | ||||
|     streams = [] | ||||
|     fmt = {} | ||||
|     g = None | ||||
|     g = {} | ||||
|     for ln in [x.rstrip("\r") for x in txt.split("\n")]: | ||||
|         try: | ||||
|             k, v = ln.split("=", 1) | ||||
|             g[k] = v | ||||
|             sk, sv = ln.split("=", 1) | ||||
|             g[sk] = sv | ||||
|             continue | ||||
|         except: | ||||
|             pass | ||||
| @@ -108,8 +142,8 @@ def parse_ffprobe(txt): | ||||
|             fmt = g | ||||
|  | ||||
|     streams = [fmt] + streams | ||||
|     ret = {}  # processed | ||||
|     md = {}  # raw tags | ||||
|     ret: dict[str, Any] = {}  # processed | ||||
|     md: dict[str, list[Any]] = {}  # raw tags | ||||
|  | ||||
|     is_audio = fmt.get("format_name") in ["mp3", "ogg", "flac", "wav"] | ||||
|     if fmt.get("filename", "").split(".")[-1].lower() in ["m4a", "aac"]: | ||||
| @@ -157,52 +191,55 @@ def parse_ffprobe(txt): | ||||
|             ] | ||||
|  | ||||
|         if typ == "format": | ||||
|             kvm = [["duration", ".dur"], ["bit_rate", ".q"]] | ||||
|             kvm = [["duration", ".dur"], ["bit_rate", ".q"], ["format_name", "fmt"]] | ||||
|  | ||||
|         for sk, rk in kvm: | ||||
|             v = strm.get(sk) | ||||
|             if v is None: | ||||
|             v1 = strm.get(sk) | ||||
|             if v1 is None: | ||||
|                 continue | ||||
|  | ||||
|             if rk.startswith("."): | ||||
|                 try: | ||||
|                     v = float(v) | ||||
|                     zf = float(v1) | ||||
|                     v2 = ret.get(rk) | ||||
|                     if v2 is None or v > v2: | ||||
|                         ret[rk] = v | ||||
|                     if v2 is None or zf > v2: | ||||
|                         ret[rk] = zf | ||||
|                 except: | ||||
|                     # sqlite doesnt care but the code below does | ||||
|                     if v not in ["N/A"]: | ||||
|                         ret[rk] = v | ||||
|                     if v1 not in ["N/A"]: | ||||
|                         ret[rk] = v1 | ||||
|             else: | ||||
|                 ret[rk] = v | ||||
|                 ret[rk] = v1 | ||||
|  | ||||
|     if ret.get("vc") == "ansi":  # shellscript | ||||
|         return {}, {} | ||||
|  | ||||
|     for strm in streams: | ||||
|         for k, v in strm.items(): | ||||
|             if not k.startswith("TAG:"): | ||||
|         for sk, sv in strm.items(): | ||||
|             if not sk.startswith("TAG:"): | ||||
|                 continue | ||||
|  | ||||
|             k = k[4:].strip() | ||||
|             v = v.strip() | ||||
|             if k and v and k not in md: | ||||
|                 md[k] = [v] | ||||
|             sk = sk[4:].strip() | ||||
|             sv = sv.strip() | ||||
|             if sk and sv and sk not in md: | ||||
|                 md[sk] = [sv] | ||||
|  | ||||
|     for k in [".q", ".vq", ".aq"]: | ||||
|         if k in ret: | ||||
|             ret[k] /= 1000  # bit_rate=320000 | ||||
|     for sk in [".q", ".vq", ".aq"]: | ||||
|         if sk in ret: | ||||
|             ret[sk] /= 1000  # bit_rate=320000 | ||||
|  | ||||
|     for k in [".q", ".vq", ".aq", ".resw", ".resh"]: | ||||
|         if k in ret: | ||||
|             ret[k] = int(ret[k]) | ||||
|     for sk in [".q", ".vq", ".aq", ".resw", ".resh"]: | ||||
|         if sk in ret: | ||||
|             ret[sk] = int(ret[sk]) | ||||
|  | ||||
|     if ".fps" in ret: | ||||
|         fps = ret[".fps"] | ||||
|         if "/" in fps: | ||||
|             fa, fb = fps.split("/") | ||||
|             fps = int(fa) * 1.0 / int(fb) | ||||
|             try: | ||||
|                 fps = int(fa) * 1.0 / int(fb) | ||||
|             except: | ||||
|                 fps = 9001 | ||||
|  | ||||
|         if fps < 1000 and fmt.get("format_name") not in ["image2", "png_pipe"]: | ||||
|             ret[".fps"] = round(fps, 3) | ||||
| @@ -215,33 +252,34 @@ def parse_ffprobe(txt): | ||||
|             if ".q" in ret: | ||||
|                 del ret[".q"] | ||||
|  | ||||
|     if "fmt" in ret: | ||||
|         ret["fmt"] = ret["fmt"].split(",")[0] | ||||
|  | ||||
|     if ".resw" in ret and ".resh" in ret: | ||||
|         ret["res"] = "{}x{}".format(ret[".resw"], ret[".resh"]) | ||||
|  | ||||
|     ret = {k: [0, v] for k, v in ret.items()} | ||||
|     zd = {k: (0, v) for k, v in ret.items()} | ||||
|  | ||||
|     return ret, md | ||||
|     return zd, md | ||||
|  | ||||
|  | ||||
| class MTag(object): | ||||
|     def __init__(self, log_func, args): | ||||
|     def __init__(self, log_func: "RootLogger", args: argparse.Namespace) -> None: | ||||
|         self.log_func = log_func | ||||
|         self.args = args | ||||
|         self.usable = True | ||||
|         self.prefer_mt = not args.no_mtag_ff | ||||
|         self.backend = "ffprobe" if args.no_mutagen else "mutagen" | ||||
|         self.can_ffprobe = ( | ||||
|             HAVE_FFPROBE | ||||
|             and not args.no_mtag_ff | ||||
|             and (not WINDOWS or sys.version_info >= (3, 8)) | ||||
|         self.backend = ( | ||||
|             "ffprobe" if args.no_mutagen or (HAVE_FFPROBE and EXE) else "mutagen" | ||||
|         ) | ||||
|         self.can_ffprobe = HAVE_FFPROBE and not args.no_mtag_ff | ||||
|         mappings = args.mtm | ||||
|         or_ffprobe = " or FFprobe" | ||||
|  | ||||
|         if self.backend == "mutagen": | ||||
|             self.get = self.get_mutagen | ||||
|             try: | ||||
|                 import mutagen | ||||
|                 from mutagen import version  # noqa: F401 | ||||
|             except: | ||||
|                 self.log("could not load Mutagen, trying FFprobe instead", c=3) | ||||
|                 self.backend = "ffprobe" | ||||
| @@ -258,15 +296,15 @@ class MTag(object): | ||||
|                 msg = "found FFprobe but it was disabled by --no-mtag-ff" | ||||
|                 self.log(msg, c=3) | ||||
|  | ||||
|             elif WINDOWS and sys.version_info < (3, 8): | ||||
|                 or_ffprobe = " or python >= 3.8" | ||||
|                 msg = "found FFprobe but your python is too old; need 3.8 or newer" | ||||
|                 self.log(msg, c=1) | ||||
|  | ||||
|         if not self.usable: | ||||
|             if EXE: | ||||
|                 t = "copyparty.exe cannot use mutagen; need ffprobe.exe to read media tags: " | ||||
|                 self.log(t + FFMPEG_URL) | ||||
|                 return | ||||
|  | ||||
|             msg = "need Mutagen{} to read media tags so please run this:\n{}{} -m pip install --user mutagen\n" | ||||
|             pybin = os.path.basename(sys.executable) | ||||
|             self.log(msg.format(or_ffprobe, " " * 37, pybin), c=1) | ||||
|             pyname = os.path.basename(pybin) | ||||
|             self.log(msg.format(or_ffprobe, " " * 37, pyname), c=1) | ||||
|             return | ||||
|  | ||||
|         # https://picard-docs.musicbrainz.org/downloads/MusicBrainz_Picard_Tag_Map.html | ||||
| @@ -338,41 +376,49 @@ class MTag(object): | ||||
|         } | ||||
|         # self.get = self.compare | ||||
|  | ||||
|     def log(self, msg, c=0): | ||||
|     def log(self, msg: str, c: Union[int, str] = 0) -> None: | ||||
|         self.log_func("mtag", msg, c) | ||||
|  | ||||
|     def normalize_tags(self, ret, md): | ||||
|         for k, v in dict(md).items(): | ||||
|             if not v: | ||||
|     def normalize_tags( | ||||
|         self, parser_output: dict[str, tuple[int, Any]], md: dict[str, list[Any]] | ||||
|     ) -> dict[str, Union[str, float]]: | ||||
|         for sk, tv in dict(md).items(): | ||||
|             if not tv: | ||||
|                 continue | ||||
|  | ||||
|             k = k.lower().split("::")[0].strip() | ||||
|             mk = self.rmap.get(k) | ||||
|             if not mk: | ||||
|             sk = sk.lower().split("::")[0].strip() | ||||
|             key_mapping = self.rmap.get(sk) | ||||
|             if not key_mapping: | ||||
|                 continue | ||||
|  | ||||
|             pref, mk = mk | ||||
|             if mk not in ret or ret[mk][0] > pref: | ||||
|                 ret[mk] = [pref, v[0]] | ||||
|             priority, alias = key_mapping | ||||
|             if alias not in parser_output or parser_output[alias][0] > priority: | ||||
|                 parser_output[alias] = (priority, tv[0]) | ||||
|  | ||||
|         # take first value | ||||
|         ret = {k: unicode(v[1]).strip() for k, v in ret.items()} | ||||
|         # take first value (lowest priority / most preferred) | ||||
|         ret: dict[str, Union[str, float]] = { | ||||
|             sk: unicode(tv[1]).strip() for sk, tv in parser_output.items() | ||||
|         } | ||||
|  | ||||
|         # track 3/7 => track 3 | ||||
|         for k, v in ret.items(): | ||||
|             if k[0] == ".": | ||||
|                 v = v.split("/")[0].strip().lstrip("0") | ||||
|                 ret[k] = v or 0 | ||||
|         for sk, zv in ret.items(): | ||||
|             if sk[0] == ".": | ||||
|                 sv = str(zv).split("/")[0].strip().lstrip("0") | ||||
|                 ret[sk] = sv or 0 | ||||
|  | ||||
|         # normalize key notation to rkeobo | ||||
|         okey = ret.get("key") | ||||
|         if okey: | ||||
|             key = okey.replace(" ", "").replace("maj", "").replace("min", "m") | ||||
|             key = str(okey).replace(" ", "").replace("maj", "").replace("min", "m") | ||||
|             ret["key"] = REKOBO_LKEY.get(key.lower(), okey) | ||||
|  | ||||
|         if self.args.mtag_vv: | ||||
|             zl = " ".join("\033[36m{} \033[33m{}".format(k, v) for k, v in ret.items()) | ||||
|             self.log("norm: {}\033[0m".format(zl), "90") | ||||
|  | ||||
|         return ret | ||||
|  | ||||
|     def compare(self, abspath): | ||||
|     def compare(self, abspath: str) -> dict[str, Union[str, float]]: | ||||
|         if abspath.endswith(".au"): | ||||
|             return {} | ||||
|  | ||||
| @@ -410,21 +456,34 @@ class MTag(object): | ||||
|  | ||||
|         return r1 | ||||
|  | ||||
|     def get_mutagen(self, abspath): | ||||
|     def get_mutagen(self, abspath: str) -> dict[str, Union[str, float]]: | ||||
|         ret: dict[str, tuple[int, Any]] = {} | ||||
|  | ||||
|         if not bos.path.isfile(abspath): | ||||
|             return {} | ||||
|  | ||||
|         import mutagen | ||||
|         from mutagen import File | ||||
|  | ||||
|         try: | ||||
|             md = mutagen.File(fsenc(abspath), easy=True) | ||||
|             md = File(fsenc(abspath), easy=True) | ||||
|             assert md | ||||
|             if self.args.mtag_vv: | ||||
|                 for zd in (md.info.__dict__, dict(md.tags)): | ||||
|                     zl = ["\033[36m{} \033[33m{}".format(k, v) for k, v in zd.items()] | ||||
|                     self.log("mutagen: {}\033[0m".format(" ".join(zl)), "90") | ||||
|             if not md.info.length and not md.info.codec: | ||||
|                 raise Exception() | ||||
|         except Exception as ex: | ||||
|             if self.args.mtag_v: | ||||
|                 self.log("mutagen-err [{}] @ [{}]".format(ex, abspath), "90") | ||||
|  | ||||
|             return self.get_ffprobe(abspath) if self.can_ffprobe else {} | ||||
|  | ||||
|         sz = bos.path.getsize(abspath) | ||||
|         ret = {".q": [0, int((sz / md.info.length) / 128)]} | ||||
|         try: | ||||
|             ret[".q"] = (0, int((sz / md.info.length) / 128)) | ||||
|         except: | ||||
|             pass | ||||
|  | ||||
|         for attr, k, norm in [ | ||||
|             ["codec", "ac", unicode], | ||||
| @@ -455,54 +514,83 @@ class MTag(object): | ||||
|             if k == "ac" and v.startswith("mp4a.40."): | ||||
|                 v = "aac" | ||||
|  | ||||
|             ret[k] = [0, norm(v)] | ||||
|             ret[k] = (0, norm(v)) | ||||
|  | ||||
|         return self.normalize_tags(ret, md) | ||||
|  | ||||
|     def get_ffprobe(self, abspath): | ||||
|     def get_ffprobe(self, abspath: str) -> dict[str, Union[str, float]]: | ||||
|         if not bos.path.isfile(abspath): | ||||
|             return {} | ||||
|  | ||||
|         ret, md = ffprobe(abspath) | ||||
|         ret, md = ffprobe(abspath, self.args.mtag_to) | ||||
|  | ||||
|         if self.args.mtag_vv: | ||||
|             for zd in (ret, dict(md)): | ||||
|                 zl = ["\033[36m{} \033[33m{}".format(k, v) for k, v in zd.items()] | ||||
|                 self.log("ffprobe: {}\033[0m".format(" ".join(zl)), "90") | ||||
|  | ||||
|         return self.normalize_tags(ret, md) | ||||
|  | ||||
|     def get_bin(self, parsers, abspath): | ||||
|     def get_bin( | ||||
|         self, parsers: dict[str, MParser], abspath: str, oth_tags: dict[str, Any] | ||||
|     ) -> dict[str, Any]: | ||||
|         if not bos.path.isfile(abspath): | ||||
|             return {} | ||||
|  | ||||
|         pypath = os.path.abspath(os.path.dirname(os.path.dirname(__file__))) | ||||
|         pypath = [str(pypath)] + [str(x) for x in sys.path if x] | ||||
|         pypath = str(os.pathsep.join(pypath)) | ||||
|         env = os.environ.copy() | ||||
|         env["PYTHONPATH"] = pypath | ||||
|         try: | ||||
|             if EXE: | ||||
|                 raise Exception() | ||||
|  | ||||
|         ret = {} | ||||
|         for tagname, parser in parsers.items(): | ||||
|             pypath = os.path.abspath(os.path.dirname(os.path.dirname(__file__))) | ||||
|             zsl = [str(pypath)] + [str(x) for x in sys.path if x] | ||||
|             pypath = str(os.pathsep.join(zsl)) | ||||
|             env["PYTHONPATH"] = pypath | ||||
|         except: | ||||
|             if not E.ox and not EXE: | ||||
|                 raise | ||||
|  | ||||
|         ret: dict[str, Any] = {} | ||||
|         for tagname, parser in sorted(parsers.items(), key=lambda x: (x[1].pri, x[0])): | ||||
|             try: | ||||
|                 cmd = [parser.bin, abspath] | ||||
|                 if parser.bin.endswith(".py"): | ||||
|                     cmd = [sys.executable] + cmd | ||||
|                     cmd = [pybin] + cmd | ||||
|  | ||||
|                 args = {"env": env, "timeout": parser.timeout} | ||||
|                 args = { | ||||
|                     "env": env, | ||||
|                     "timeout": parser.timeout, | ||||
|                     "kill": parser.kill, | ||||
|                     "capture": parser.capture, | ||||
|                 } | ||||
|  | ||||
|                 if parser.pri: | ||||
|                     zd = oth_tags.copy() | ||||
|                     zd.update(ret) | ||||
|                     args["sin"] = json.dumps(zd).encode("utf-8", "replace") | ||||
|  | ||||
|                 if WINDOWS: | ||||
|                     args["creationflags"] = 0x4000 | ||||
|                 else: | ||||
|                     cmd = ["nice"] + cmd | ||||
|  | ||||
|                 cmd = [fsenc(x) for x in cmd] | ||||
|                 v = sp.check_output(cmd, **args).strip() | ||||
|                 bcmd = [sfsenc(x) for x in cmd[:-1]] + [fsenc(cmd[-1])] | ||||
|                 rc, v, err = runcmd(bcmd, **args)  # type: ignore | ||||
|                 retchk(rc, bcmd, err, self.log, 5, self.args.mtag_v) | ||||
|                 v = v.strip() | ||||
|                 if not v: | ||||
|                     continue | ||||
|  | ||||
|                 if "," not in tagname: | ||||
|                     ret[tagname] = v.decode("utf-8") | ||||
|                     ret[tagname] = v | ||||
|                 else: | ||||
|                     v = json.loads(v) | ||||
|                     zj = json.loads(v) | ||||
|                     for tag in tagname.split(","): | ||||
|                         if tag and tag in v: | ||||
|                             ret[tag] = v[tag] | ||||
|                         if tag and tag in zj: | ||||
|                             ret[tag] = zj[tag] | ||||
|             except: | ||||
|                 pass | ||||
|                 if self.args.mtag_v: | ||||
|                     t = "mtag error: tagname {}, parser {}, file {} => {}" | ||||
|                     self.log(t.format(tagname, parser.bin, abspath, min_ex())) | ||||
|  | ||||
|         return ret | ||||
|   | ||||
							
								
								
									
										395
									
								
								copyparty/multicast.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										395
									
								
								copyparty/multicast.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,395 @@ | ||||
| # coding: utf-8 | ||||
| from __future__ import print_function, unicode_literals | ||||
|  | ||||
| import socket | ||||
| import time | ||||
|  | ||||
| import ipaddress | ||||
| from ipaddress import ( | ||||
|     IPv4Address, | ||||
|     IPv4Network, | ||||
|     IPv6Address, | ||||
|     IPv6Network, | ||||
|     ip_address, | ||||
|     ip_network, | ||||
| ) | ||||
|  | ||||
| from .__init__ import MACOS, TYPE_CHECKING | ||||
| from .util import Daemon, Netdev, find_prefix, min_ex, spack | ||||
|  | ||||
| if TYPE_CHECKING: | ||||
|     from .svchub import SvcHub | ||||
|  | ||||
| if True:  # pylint: disable=using-constant-test | ||||
|     from typing import Optional, Union | ||||
|  | ||||
| if not hasattr(socket, "IPPROTO_IPV6"): | ||||
|     setattr(socket, "IPPROTO_IPV6", 41) | ||||
|  | ||||
|  | ||||
| class NoIPs(Exception): | ||||
|     pass | ||||
|  | ||||
|  | ||||
| class MC_Sck(object): | ||||
|     """there is one socket for each server ip""" | ||||
|  | ||||
|     def __init__( | ||||
|         self, | ||||
|         sck: socket.socket, | ||||
|         nd: Netdev, | ||||
|         grp: str, | ||||
|         ip: str, | ||||
|         net: Union[IPv4Network, IPv6Network], | ||||
|     ): | ||||
|         self.sck = sck | ||||
|         self.idx = nd.idx | ||||
|         self.name = nd.name | ||||
|         self.grp = grp | ||||
|         self.mreq = b"" | ||||
|         self.ip = ip | ||||
|         self.net = net | ||||
|         self.ips = {ip: net} | ||||
|         self.v6 = ":" in ip | ||||
|         self.have4 = ":" not in ip | ||||
|         self.have6 = ":" in ip | ||||
|  | ||||
|  | ||||
| class MCast(object): | ||||
|     def __init__( | ||||
|         self, | ||||
|         hub: "SvcHub", | ||||
|         Srv: type[MC_Sck], | ||||
|         on: list[str], | ||||
|         off: list[str], | ||||
|         mc_grp_4: str, | ||||
|         mc_grp_6: str, | ||||
|         port: int, | ||||
|         vinit: bool, | ||||
|     ) -> None: | ||||
|         """disable ipv%d by setting mc_grp_%d empty""" | ||||
|         self.hub = hub | ||||
|         self.Srv = Srv | ||||
|         self.args = hub.args | ||||
|         self.asrv = hub.asrv | ||||
|         self.log_func = hub.log | ||||
|         self.on = on | ||||
|         self.off = off | ||||
|         self.grp4 = mc_grp_4 | ||||
|         self.grp6 = mc_grp_6 | ||||
|         self.port = port | ||||
|         self.vinit = vinit | ||||
|  | ||||
|         self.srv: dict[socket.socket, MC_Sck] = {}  # listening sockets | ||||
|         self.sips: set[str] = set()  # all listening ips (including failed attempts) | ||||
|         self.ll_ok: set[str] = set()  # fallback linklocal IPv4 and IPv6 addresses | ||||
|         self.b2srv: dict[bytes, MC_Sck] = {}  # binary-ip -> server socket | ||||
|         self.b4: list[bytes] = []  # sorted list of binary-ips | ||||
|         self.b6: list[bytes] = []  # sorted list of binary-ips | ||||
|         self.cscache: dict[str, Optional[MC_Sck]] = {}  # client ip -> server cache | ||||
|  | ||||
|         self.running = True | ||||
|  | ||||
|     def log(self, msg: str, c: Union[int, str] = 0) -> None: | ||||
|         self.log_func("multicast", msg, c) | ||||
|  | ||||
|     def create_servers(self) -> list[str]: | ||||
|         bound: list[str] = [] | ||||
|         netdevs = self.hub.tcpsrv.netdevs | ||||
|         ips = [x[0] for x in self.hub.tcpsrv.bound] | ||||
|  | ||||
|         if "::" in ips: | ||||
|             ips = [x for x in ips if x != "::"] + list( | ||||
|                 [x.split("/")[0] for x in netdevs if ":" in x] | ||||
|             ) | ||||
|             ips.append("0.0.0.0") | ||||
|  | ||||
|         if "0.0.0.0" in ips: | ||||
|             ips = [x for x in ips if x != "0.0.0.0"] + list( | ||||
|                 [x.split("/")[0] for x in netdevs if ":" not in x] | ||||
|             ) | ||||
|  | ||||
|         ips = [x for x in ips if x not in ("::1", "127.0.0.1")] | ||||
|         ips = find_prefix(ips, netdevs) | ||||
|  | ||||
|         on = self.on[:] | ||||
|         off = self.off[:] | ||||
|         for lst in (on, off): | ||||
|             for av in list(lst): | ||||
|                 try: | ||||
|                     arg_net = ip_network(av, False) | ||||
|                 except: | ||||
|                     arg_net = None | ||||
|  | ||||
|                 for sk, sv in netdevs.items(): | ||||
|                     if arg_net: | ||||
|                         net_ip = ip_address(sk.split("/")[0]) | ||||
|                         if net_ip in arg_net and sk not in lst: | ||||
|                             lst.append(sk) | ||||
|  | ||||
|                     if (av == str(sv.idx) or av == sv.name) and sk not in lst: | ||||
|                         lst.append(sk) | ||||
|  | ||||
|         if on: | ||||
|             ips = [x for x in ips if x in on] | ||||
|         elif off: | ||||
|             ips = [x for x in ips if x not in off] | ||||
|  | ||||
|         if not self.grp4: | ||||
|             ips = [x for x in ips if ":" in x] | ||||
|  | ||||
|         if not self.grp6: | ||||
|             ips = [x for x in ips if ":" not in x] | ||||
|  | ||||
|         ips = list(set(ips)) | ||||
|         all_selected = ips[:] | ||||
|  | ||||
|         # discard non-linklocal ipv6 | ||||
|         ips = [x for x in ips if ":" not in x or x.startswith("fe80")] | ||||
|  | ||||
|         if not ips: | ||||
|             raise NoIPs() | ||||
|  | ||||
|         for ip in ips: | ||||
|             v6 = ":" in ip | ||||
|             netdev = netdevs[ip] | ||||
|             if not netdev.idx: | ||||
|                 t = "using INADDR_ANY for ip [{}], netdev [{}]" | ||||
|                 if not self.srv and ip not in ["::", "0.0.0.0"]: | ||||
|                     self.log(t.format(ip, netdev), 3) | ||||
|  | ||||
|             ipv = socket.AF_INET6 if v6 else socket.AF_INET | ||||
|             sck = socket.socket(ipv, socket.SOCK_DGRAM, socket.IPPROTO_UDP) | ||||
|             sck.settimeout(None) | ||||
|             sck.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) | ||||
|             try: | ||||
|                 sck.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1) | ||||
|             except: | ||||
|                 pass | ||||
|  | ||||
|             # most ipv6 clients expect multicast on linklocal ip only; | ||||
|             # add a/aaaa records for the other nic IPs | ||||
|             other_ips: set[str] = set() | ||||
|             if v6: | ||||
|                 for nd in netdevs.values(): | ||||
|                     if nd.idx == netdev.idx and nd.ip in all_selected and ":" in nd.ip: | ||||
|                         other_ips.add(nd.ip) | ||||
|  | ||||
|             net = ipaddress.ip_network(ip, False) | ||||
|             ip = ip.split("/")[0] | ||||
|             srv = self.Srv(sck, netdev, self.grp6 if ":" in ip else self.grp4, ip, net) | ||||
|             for oth_ip in other_ips: | ||||
|                 srv.ips[oth_ip.split("/")[0]] = ipaddress.ip_network(oth_ip, False) | ||||
|  | ||||
|             # gvfs breaks if a linklocal ip appears in a dns reply | ||||
|             ll = { | ||||
|                 k: v | ||||
|                 for k, v in srv.ips.items() | ||||
|                 if k.startswith("169.254") or k.startswith("fe80") | ||||
|             } | ||||
|             rt = {k: v for k, v in srv.ips.items() if k not in ll} | ||||
|  | ||||
|             if self.args.ll or not rt: | ||||
|                 self.ll_ok.update(list(ll)) | ||||
|  | ||||
|             if not self.args.ll: | ||||
|                 srv.ips = rt or ll | ||||
|  | ||||
|             if not srv.ips: | ||||
|                 self.log("no IPs on {}; skipping [{}]".format(netdev, ip), 3) | ||||
|                 continue | ||||
|  | ||||
|             try: | ||||
|                 self.setup_socket(srv) | ||||
|                 self.srv[sck] = srv | ||||
|                 bound.append(ip) | ||||
|             except: | ||||
|                 t = "announce failed on {} [{}]:\n{}" | ||||
|                 self.log(t.format(netdev, ip, min_ex()), 3) | ||||
|  | ||||
|         if self.args.zm_msub: | ||||
|             for s1 in self.srv.values(): | ||||
|                 for s2 in self.srv.values(): | ||||
|                     if s1.idx != s2.idx: | ||||
|                         continue | ||||
|  | ||||
|                     if s1.ip not in s2.ips: | ||||
|                         s2.ips[s1.ip] = s1.net | ||||
|  | ||||
|         if self.args.zm_mnic: | ||||
|             for s1 in self.srv.values(): | ||||
|                 for s2 in self.srv.values(): | ||||
|                     for ip1, net1 in list(s1.ips.items()): | ||||
|                         for ip2, net2 in list(s2.ips.items()): | ||||
|                             if net1 == net2 and ip1 != ip2: | ||||
|                                 s1.ips[ip2] = net2 | ||||
|  | ||||
|         self.sips = set([x.split("/")[0] for x in all_selected]) | ||||
|         for srv in self.srv.values(): | ||||
|             assert srv.ip in self.sips | ||||
|  | ||||
|         Daemon(self.hopper, "mc-hop") | ||||
|         return bound | ||||
|  | ||||
|     def setup_socket(self, srv: MC_Sck) -> None: | ||||
|         sck = srv.sck | ||||
|         if srv.v6: | ||||
|             if self.vinit: | ||||
|                 zsl = list(srv.ips.keys()) | ||||
|                 self.log("v6({}) idx({}) {}".format(srv.ip, srv.idx, zsl), 6) | ||||
|  | ||||
|             for ip in srv.ips: | ||||
|                 bip = socket.inet_pton(socket.AF_INET6, ip) | ||||
|                 self.b2srv[bip] = srv | ||||
|                 self.b6.append(bip) | ||||
|  | ||||
|             grp = self.grp6 if srv.idx else "" | ||||
|             try: | ||||
|                 if MACOS: | ||||
|                     raise Exception() | ||||
|  | ||||
|                 sck.bind((grp, self.port, 0, srv.idx)) | ||||
|             except: | ||||
|                 sck.bind(("", self.port, 0, srv.idx)) | ||||
|  | ||||
|             bgrp = socket.inet_pton(socket.AF_INET6, self.grp6) | ||||
|             dev = spack(b"@I", srv.idx) | ||||
|             srv.mreq = bgrp + dev | ||||
|             if srv.idx != socket.INADDR_ANY: | ||||
|                 sck.setsockopt(socket.IPPROTO_IPV6, socket.IPV6_MULTICAST_IF, dev) | ||||
|  | ||||
|             try: | ||||
|                 sck.setsockopt(socket.IPPROTO_IPV6, socket.IPV6_MULTICAST_HOPS, 255) | ||||
|                 sck.setsockopt(socket.IPPROTO_IPV6, socket.IPV6_MULTICAST_LOOP, 1) | ||||
|             except: | ||||
|                 # macos | ||||
|                 t = "failed to set IPv6 TTL/LOOP; announcements may not survive multiple switches/routers" | ||||
|                 self.log(t, 3) | ||||
|         else: | ||||
|             if self.vinit: | ||||
|                 self.log("v4({}) idx({})".format(srv.ip, srv.idx), 6) | ||||
|  | ||||
|             bip = socket.inet_aton(srv.ip) | ||||
|             self.b2srv[bip] = srv | ||||
|             self.b4.append(bip) | ||||
|  | ||||
|             grp = self.grp4 if srv.idx else "" | ||||
|             try: | ||||
|                 if MACOS: | ||||
|                     raise Exception() | ||||
|  | ||||
|                 sck.bind((grp, self.port)) | ||||
|             except: | ||||
|                 sck.bind(("", self.port)) | ||||
|  | ||||
|             bgrp = socket.inet_aton(self.grp4) | ||||
|             dev = ( | ||||
|                 spack(b"=I", socket.INADDR_ANY) | ||||
|                 if srv.idx == socket.INADDR_ANY | ||||
|                 else socket.inet_aton(srv.ip) | ||||
|             ) | ||||
|             srv.mreq = bgrp + dev | ||||
|             if srv.idx != socket.INADDR_ANY: | ||||
|                 sck.setsockopt(socket.IPPROTO_IP, socket.IP_MULTICAST_IF, dev) | ||||
|  | ||||
|             try: | ||||
|                 sck.setsockopt(socket.IPPROTO_IP, socket.IP_MULTICAST_TTL, 255) | ||||
|                 sck.setsockopt(socket.IPPROTO_IP, socket.IP_MULTICAST_LOOP, 1) | ||||
|             except: | ||||
|                 # probably can't happen but dontcare if it does | ||||
|                 t = "failed to set IPv4 TTL/LOOP; announcements may not survive multiple switches/routers" | ||||
|                 self.log(t, 3) | ||||
|  | ||||
|         if self.hop(srv, False): | ||||
|             self.log("igmp was already joined?? chilling for a sec", 3) | ||||
|             time.sleep(1.2) | ||||
|  | ||||
|         self.hop(srv, True) | ||||
|         self.b4.sort(reverse=True) | ||||
|         self.b6.sort(reverse=True) | ||||
|  | ||||
|     def hop(self, srv: MC_Sck, on: bool) -> bool: | ||||
|         """rejoin to keepalive on routers/switches without igmp-snooping""" | ||||
|         sck = srv.sck | ||||
|         req = srv.mreq | ||||
|         if ":" in srv.ip: | ||||
|             if not on: | ||||
|                 try: | ||||
|                     sck.setsockopt(socket.IPPROTO_IPV6, socket.IPV6_LEAVE_GROUP, req) | ||||
|                     return True | ||||
|                 except: | ||||
|                     return False | ||||
|             else: | ||||
|                 sck.setsockopt(socket.IPPROTO_IPV6, socket.IPV6_JOIN_GROUP, req) | ||||
|         else: | ||||
|             if not on: | ||||
|                 try: | ||||
|                     sck.setsockopt(socket.IPPROTO_IP, socket.IP_DROP_MEMBERSHIP, req) | ||||
|                     return True | ||||
|                 except: | ||||
|                     return False | ||||
|             else: | ||||
|                 # t = "joining {} from ip {} idx {} with mreq {}" | ||||
|                 # self.log(t.format(srv.grp, srv.ip, srv.idx, repr(srv.mreq)), 6) | ||||
|                 sck.setsockopt(socket.IPPROTO_IP, socket.IP_ADD_MEMBERSHIP, req) | ||||
|  | ||||
|         return True | ||||
|  | ||||
|     def hopper(self): | ||||
|         while self.args.mc_hop and self.running: | ||||
|             time.sleep(self.args.mc_hop) | ||||
|             if not self.running: | ||||
|                 return | ||||
|  | ||||
|             for srv in self.srv.values(): | ||||
|                 self.hop(srv, False) | ||||
|  | ||||
|             # linux does leaves/joins twice with 0.2~1.05s spacing | ||||
|             time.sleep(1.2) | ||||
|             if not self.running: | ||||
|                 return | ||||
|  | ||||
|             for srv in self.srv.values(): | ||||
|                 self.hop(srv, True) | ||||
|  | ||||
|     def map_client(self, cip: str) -> Optional[MC_Sck]: | ||||
|         try: | ||||
|             return self.cscache[cip] | ||||
|         except: | ||||
|             pass | ||||
|  | ||||
|         ret: Optional[MC_Sck] = None | ||||
|         v6 = ":" in cip | ||||
|         ci = IPv6Address(cip) if v6 else IPv4Address(cip) | ||||
|         for x in self.b6 if v6 else self.b4: | ||||
|             srv = self.b2srv[x] | ||||
|             if any([x for x in srv.ips.values() if ci in x]): | ||||
|                 ret = srv | ||||
|                 break | ||||
|  | ||||
|         if not ret and cip in ("127.0.0.1", "::1"): | ||||
|             # just give it something | ||||
|             ret = list(self.srv.values())[0] | ||||
|  | ||||
|         if not ret and cip.startswith("169.254"): | ||||
|             # idk how to map LL IPv4 msgs to nics; | ||||
|             # just pick one and hope for the best | ||||
|             lls = ( | ||||
|                 x | ||||
|                 for x in self.srv.values() | ||||
|                 if next((y for y in x.ips if y in self.ll_ok), None) | ||||
|             ) | ||||
|             ret = next(lls, None) | ||||
|  | ||||
|         if ret: | ||||
|             t = "new client on {} ({}): {}" | ||||
|             self.log(t.format(ret.name, ret.net, cip), 6) | ||||
|         else: | ||||
|             t = "could not map client {} to known subnet; maybe forwarded from another network?" | ||||
|             self.log(t.format(cip), 3) | ||||
|  | ||||
|         if len(self.cscache) > 9000: | ||||
|             self.cscache = {} | ||||
|  | ||||
|         self.cscache[cip] = ret | ||||
|         return ret | ||||
							
								
								
									
										145
									
								
								copyparty/pwhash.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										145
									
								
								copyparty/pwhash.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,145 @@ | ||||
| # coding: utf-8 | ||||
| from __future__ import print_function, unicode_literals | ||||
|  | ||||
| import argparse | ||||
| import base64 | ||||
| import hashlib | ||||
| import sys | ||||
| import threading | ||||
|  | ||||
| from .__init__ import unicode | ||||
|  | ||||
|  | ||||
| class PWHash(object): | ||||
|     def __init__(self, args: argparse.Namespace): | ||||
|         self.args = args | ||||
|  | ||||
|         try: | ||||
|             alg, ac = args.ah_alg.split(",") | ||||
|         except: | ||||
|             alg = args.ah_alg | ||||
|             ac = {} | ||||
|  | ||||
|         if alg == "none": | ||||
|             alg = "" | ||||
|  | ||||
|         self.alg = alg | ||||
|         self.ac = ac | ||||
|         if not alg: | ||||
|             self.on = False | ||||
|             self.hash = unicode | ||||
|             return | ||||
|  | ||||
|         self.on = True | ||||
|         self.salt = args.ah_salt.encode("utf-8") | ||||
|         self.cache: dict[str, str] = {} | ||||
|         self.mutex = threading.Lock() | ||||
|         self.hash = self._cache_hash | ||||
|  | ||||
|         if alg == "sha2": | ||||
|             self._hash = self._gen_sha2 | ||||
|         elif alg == "scrypt": | ||||
|             self._hash = self._gen_scrypt | ||||
|         elif alg == "argon2": | ||||
|             self._hash = self._gen_argon2 | ||||
|         else: | ||||
|             t = "unsupported password hashing algorithm [{}], must be one of these: argon2 scrypt sha2 none" | ||||
|             raise Exception(t.format(alg)) | ||||
|  | ||||
|     def _cache_hash(self, plain: str) -> str: | ||||
|         with self.mutex: | ||||
|             try: | ||||
|                 return self.cache[plain] | ||||
|             except: | ||||
|                 pass | ||||
|  | ||||
|             if not plain: | ||||
|                 return "" | ||||
|  | ||||
|             if len(plain) > 255: | ||||
|                 raise Exception("password too long") | ||||
|  | ||||
|             if len(self.cache) > 9000: | ||||
|                 self.cache = {} | ||||
|  | ||||
|             ret = self._hash(plain) | ||||
|             self.cache[plain] = ret | ||||
|             return ret | ||||
|  | ||||
|     def _gen_sha2(self, plain: str) -> str: | ||||
|         its = int(self.ac[0]) if self.ac else 424242 | ||||
|         bplain = plain.encode("utf-8") | ||||
|         ret = b"\n" | ||||
|         for _ in range(its): | ||||
|             ret = hashlib.sha512(self.salt + bplain + ret).digest() | ||||
|  | ||||
|         return "+" + base64.urlsafe_b64encode(ret[:24]).decode("utf-8") | ||||
|  | ||||
|     def _gen_scrypt(self, plain: str) -> str: | ||||
|         cost = 2 << 13 | ||||
|         its = 2 | ||||
|         blksz = 8 | ||||
|         para = 4 | ||||
|         try: | ||||
|             cost = 2 << int(self.ac[0]) | ||||
|             its = int(self.ac[1]) | ||||
|             blksz = int(self.ac[2]) | ||||
|             para = int(self.ac[3]) | ||||
|         except: | ||||
|             pass | ||||
|  | ||||
|         ret = plain.encode("utf-8") | ||||
|         for _ in range(its): | ||||
|             ret = hashlib.scrypt(ret, salt=self.salt, n=cost, r=blksz, p=para, dklen=24) | ||||
|  | ||||
|         return "+" + base64.urlsafe_b64encode(ret).decode("utf-8") | ||||
|  | ||||
|     def _gen_argon2(self, plain: str) -> str: | ||||
|         from argon2.low_level import Type as ArgonType | ||||
|         from argon2.low_level import hash_secret | ||||
|  | ||||
|         time_cost = 3 | ||||
|         mem_cost = 256 | ||||
|         parallelism = 4 | ||||
|         version = 19 | ||||
|         try: | ||||
|             time_cost = int(self.ac[0]) | ||||
|             mem_cost = int(self.ac[1]) | ||||
|             parallelism = int(self.ac[2]) | ||||
|             version = int(self.ac[3]) | ||||
|         except: | ||||
|             pass | ||||
|  | ||||
|         bplain = plain.encode("utf-8") | ||||
|  | ||||
|         bret = hash_secret( | ||||
|             secret=bplain, | ||||
|             salt=self.salt, | ||||
|             time_cost=time_cost, | ||||
|             memory_cost=mem_cost * 1024, | ||||
|             parallelism=parallelism, | ||||
|             hash_len=24, | ||||
|             type=ArgonType.ID, | ||||
|             version=version, | ||||
|         ) | ||||
|         ret = bret.split(b"$")[-1].decode("utf-8") | ||||
|         return "+" + ret.replace("/", "_").replace("+", "-") | ||||
|  | ||||
|     def stdin(self) -> None: | ||||
|         while True: | ||||
|             ln = sys.stdin.readline().strip() | ||||
|             if not ln: | ||||
|                 break | ||||
|             print(self.hash(ln)) | ||||
|  | ||||
|     def cli(self) -> None: | ||||
|         import getpass | ||||
|  | ||||
|         while True: | ||||
|             p1 = getpass.getpass("password> ") | ||||
|             p2 = getpass.getpass("again or just hit ENTER> ") | ||||
|             if p2 and p1 != p2: | ||||
|                 print("\033[31minputs don't match; try again\033[0m", file=sys.stderr) | ||||
|                 continue | ||||
|             print(self.hash(p1)) | ||||
|             print() | ||||
							
								
								
									
										0
									
								
								copyparty/res/__init__.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										0
									
								
								copyparty/res/__init__.py
									
									
									
									
									
										Normal file
									
								
							
							
								
								
									
										423
									
								
								copyparty/smbd.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										423
									
								
								copyparty/smbd.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,423 @@ | ||||
| # coding: utf-8 | ||||
|  | ||||
| import inspect | ||||
| import logging | ||||
| import os | ||||
| import random | ||||
| import stat | ||||
| import sys | ||||
| import time | ||||
| from types import SimpleNamespace | ||||
|  | ||||
| from .__init__ import ANYWIN, EXE, TYPE_CHECKING | ||||
| from .authsrv import LEELOO_DALLAS, VFS | ||||
| from .bos import bos | ||||
| from .util import Daemon, min_ex, pybin, runhook | ||||
|  | ||||
| if True:  # pylint: disable=using-constant-test | ||||
|     from typing import Any, Union | ||||
|  | ||||
| if TYPE_CHECKING: | ||||
|     from .svchub import SvcHub | ||||
|  | ||||
|  | ||||
| lg = logging.getLogger("smb") | ||||
| debug, info, warning, error = (lg.debug, lg.info, lg.warning, lg.error) | ||||
|  | ||||
|  | ||||
| class SMB(object): | ||||
|     def __init__(self, hub: "SvcHub") -> None: | ||||
|         self.hub = hub | ||||
|         self.args = hub.args | ||||
|         self.asrv = hub.asrv | ||||
|         self.log = hub.log | ||||
|         self.files: dict[int, tuple[float, str]] = {} | ||||
|         self.noacc = self.args.smba | ||||
|         self.accs = not self.args.smba | ||||
|  | ||||
|         lg.setLevel(logging.DEBUG if self.args.smbvvv else logging.INFO) | ||||
|         for x in ["impacket", "impacket.smbserver"]: | ||||
|             lgr = logging.getLogger(x) | ||||
|             lgr.setLevel(logging.DEBUG if self.args.smbvv else logging.INFO) | ||||
|  | ||||
|         try: | ||||
|             from impacket import smbserver | ||||
|             from impacket.ntlm import compute_lmhash, compute_nthash | ||||
|         except ImportError: | ||||
|             if EXE: | ||||
|                 print("copyparty.exe cannot do SMB") | ||||
|                 sys.exit(1) | ||||
|  | ||||
|             m = "\033[36m\n{}\033[31m\n\nERROR: need 'impacket'; please run this command:\033[33m\n {} -m pip install --user impacket\n\033[0m" | ||||
|             print(m.format(min_ex(), pybin)) | ||||
|             sys.exit(1) | ||||
|  | ||||
|         # patch vfs into smbserver.os | ||||
|         fos = SimpleNamespace() | ||||
|         for k in os.__dict__: | ||||
|             try: | ||||
|                 setattr(fos, k, getattr(os, k)) | ||||
|             except: | ||||
|                 pass | ||||
|         fos.close = self._close | ||||
|         fos.listdir = self._listdir | ||||
|         fos.mkdir = self._mkdir | ||||
|         fos.open = self._open | ||||
|         fos.remove = self._unlink | ||||
|         fos.rename = self._rename | ||||
|         fos.stat = self._stat | ||||
|         fos.unlink = self._unlink | ||||
|         fos.utime = self._utime | ||||
|         smbserver.os = fos | ||||
|  | ||||
|         # ...and smbserver.os.path | ||||
|         fop = SimpleNamespace() | ||||
|         for k in os.path.__dict__: | ||||
|             try: | ||||
|                 setattr(fop, k, getattr(os.path, k)) | ||||
|             except: | ||||
|                 pass | ||||
|         fop.exists = self._p_exists | ||||
|         fop.getsize = self._p_getsize | ||||
|         fop.isdir = self._p_isdir | ||||
|         smbserver.os.path = fop | ||||
|  | ||||
|         if not self.args.smb_nwa_2: | ||||
|             fop.join = self._p_join | ||||
|  | ||||
|         # other patches | ||||
|         smbserver.isInFileJail = self._is_in_file_jail | ||||
|         self._disarm() | ||||
|  | ||||
|         ip = next((x for x in self.args.i if ":" not in x), None) | ||||
|         if not ip: | ||||
|             self.log("smb", "IPv6 not supported for SMB; listening on 0.0.0.0", 3) | ||||
|             ip = "0.0.0.0" | ||||
|  | ||||
|         port = int(self.args.smb_port) | ||||
|         srv = smbserver.SimpleSMBServer(listenAddress=ip, listenPort=port) | ||||
|         try: | ||||
|             if self.accs: | ||||
|                 srv.setAuthCallback(self._auth_cb) | ||||
|         except: | ||||
|             self.accs = False | ||||
|             self.noacc = True | ||||
|             t = "impacket too old; access permissions will not work! all accounts are admin!" | ||||
|             self.log("smb", t, 1) | ||||
|  | ||||
|         ro = "no" if self.args.smbw else "yes"  # (does nothing) | ||||
|         srv.addShare("A", "/", readOnly=ro) | ||||
|         srv.setSMB2Support(not self.args.smb1) | ||||
|  | ||||
|         for name, pwd in self.asrv.acct.items(): | ||||
|             for u, p in ((name, pwd), (pwd, "k")): | ||||
|                 lmhash = compute_lmhash(p) | ||||
|                 nthash = compute_nthash(p) | ||||
|                 srv.addCredential(u, 0, lmhash, nthash) | ||||
|  | ||||
|         chi = [random.randint(0, 255) for x in range(8)] | ||||
|         cha = "".join(["{:02x}".format(x) for x in chi]) | ||||
|         srv.setSMBChallenge(cha) | ||||
|  | ||||
|         self.srv = srv | ||||
|         self.stop = srv.stop | ||||
|         self.log("smb", "listening @ {}:{}".format(ip, port)) | ||||
|  | ||||
|     def nlog(self, msg: str, c: Union[int, str] = 0) -> None: | ||||
|         self.log("smb", msg, c) | ||||
|  | ||||
|     def start(self) -> None: | ||||
|         Daemon(self.srv.start) | ||||
|  | ||||
|     def _auth_cb(self, *a, **ka): | ||||
|         debug("auth-result: %s %s", a, ka) | ||||
|         conndata = ka["connData"] | ||||
|         auth_ok = conndata["Authenticated"] | ||||
|         uname = ka["user_name"] if auth_ok else "*" | ||||
|         uname = self.asrv.iacct.get(uname, uname) or "*" | ||||
|         oldname = conndata.get("partygoer", "*") or "*" | ||||
|         cli_ip = conndata["ClientIP"] | ||||
|         cli_hn = ka["host_name"] | ||||
|         if uname != "*": | ||||
|             conndata["partygoer"] = uname | ||||
|             info("client %s [%s] authed as %s", cli_ip, cli_hn, uname) | ||||
|         elif oldname != "*": | ||||
|             info("client %s [%s] keeping old auth as %s", cli_ip, cli_hn, oldname) | ||||
|         elif auth_ok: | ||||
|             info("client %s [%s] authed as [*] (anon)", cli_ip, cli_hn) | ||||
|         else: | ||||
|             info("client %s [%s] rejected", cli_ip, cli_hn) | ||||
|  | ||||
|     def _uname(self) -> str: | ||||
|         if self.noacc: | ||||
|             return LEELOO_DALLAS | ||||
|  | ||||
|         try: | ||||
|             # you found it! my single worst bit of code so far | ||||
|             # (if you can think of a better way to track users through impacket i'm all ears) | ||||
|             cf0 = inspect.currentframe().f_back.f_back | ||||
|             cf = cf0.f_back | ||||
|             for n in range(3): | ||||
|                 cl = cf.f_locals | ||||
|                 if "connData" in cl: | ||||
|                     return cl["connData"]["partygoer"] | ||||
|                 cf = cf.f_back | ||||
|         except: | ||||
|             warning( | ||||
|                 "nyoron... %s <<-- %s <<-- %s <<-- %s", | ||||
|                 cf0.f_code.co_name, | ||||
|                 cf0.f_back.f_code.co_name, | ||||
|                 cf0.f_back.f_back.f_code.co_name, | ||||
|                 cf0.f_back.f_back.f_back.f_code.co_name, | ||||
|             ) | ||||
|             return "*" | ||||
|  | ||||
|     def _v2a( | ||||
|         self, caller: str, vpath: str, *a: Any, uname="", perms=None | ||||
|     ) -> tuple[VFS, str]: | ||||
|         vpath = vpath.replace("\\", "/").lstrip("/") | ||||
|         # cf = inspect.currentframe().f_back | ||||
|         # c1 = cf.f_back.f_code.co_name | ||||
|         # c2 = cf.f_code.co_name | ||||
|         if not uname: | ||||
|             uname = self._uname() | ||||
|         if not perms: | ||||
|             perms = [True, True] | ||||
|  | ||||
|         debug('%s("%s", %s) %s @%s\033[K\033[0m', caller, vpath, str(a), perms, uname) | ||||
|         vfs, rem = self.asrv.vfs.get(vpath, uname, *perms) | ||||
|         return vfs, vfs.canonical(rem) | ||||
|  | ||||
|     def _listdir(self, vpath: str, *a: Any, **ka: Any) -> list[str]: | ||||
|         vpath = vpath.replace("\\", "/").lstrip("/") | ||||
|         # caller = inspect.currentframe().f_back.f_code.co_name | ||||
|         uname = self._uname() | ||||
|         # debug('listdir("%s", %s) @%s\033[K\033[0m', vpath, str(a), uname) | ||||
|         vfs, rem = self.asrv.vfs.get(vpath, uname, False, False) | ||||
|         _, vfs_ls, vfs_virt = vfs.ls( | ||||
|             rem, uname, not self.args.no_scandir, [[False, False]] | ||||
|         ) | ||||
|         dirs = [x[0] for x in vfs_ls if stat.S_ISDIR(x[1].st_mode)] | ||||
|         fils = [x[0] for x in vfs_ls if x[0] not in dirs] | ||||
|         ls = list(vfs_virt.keys()) + dirs + fils | ||||
|         if self.args.smb_nwa_1: | ||||
|             return ls | ||||
|  | ||||
|         # clients crash somewhere around 65760 byte | ||||
|         ret = [] | ||||
|         sz = 112 * 2  # ['.', '..'] | ||||
|         for n, fn in enumerate(ls): | ||||
|             if sz >= 64000: | ||||
|                 t = "listing only %d of %d files (%d byte) in /%s; see impacket#1433" | ||||
|                 warning(t, n, len(ls), sz, vpath) | ||||
|                 break | ||||
|  | ||||
|             nsz = len(fn.encode("utf-16", "replace")) | ||||
|             nsz = ((nsz + 7) // 8) * 8 | ||||
|             sz += 104 + nsz | ||||
|             ret.append(fn) | ||||
|  | ||||
|         return ret | ||||
|  | ||||
|     def _open( | ||||
|         self, vpath: str, flags: int, *a: Any, chmod: int = 0o777, **ka: Any | ||||
|     ) -> Any: | ||||
|         f_ro = os.O_RDONLY | ||||
|         if ANYWIN: | ||||
|             f_ro |= os.O_BINARY | ||||
|  | ||||
|         wr = flags != f_ro | ||||
|         if wr and not self.args.smbw: | ||||
|             yeet("blocked write (no --smbw): " + vpath) | ||||
|  | ||||
|         uname = self._uname() | ||||
|         vfs, ap = self._v2a("open", vpath, *a, uname=uname, perms=[True, wr]) | ||||
|         if wr: | ||||
|             if not vfs.axs.uwrite: | ||||
|                 t = "blocked write (no-write-acc %s): /%s @%s" | ||||
|                 yeet(t % (vfs.axs.uwrite, vpath, uname)) | ||||
|  | ||||
|             xbu = vfs.flags.get("xbu") | ||||
|             if xbu and not runhook( | ||||
|                 self.nlog, xbu, ap, vpath, "", "", 0, 0, "1.7.6.2", 0, "" | ||||
|             ): | ||||
|                 yeet("blocked by xbu server config: " + vpath) | ||||
|  | ||||
|         ret = bos.open(ap, flags, *a, mode=chmod, **ka) | ||||
|         if wr: | ||||
|             now = time.time() | ||||
|             nf = len(self.files) | ||||
|             if nf > 9000: | ||||
|                 oldest = min([x[0] for x in self.files.values()]) | ||||
|                 cutoff = oldest + (now - oldest) / 2 | ||||
|                 self.files = {k: v for k, v in self.files.items() if v[0] > cutoff} | ||||
|                 info("was tracking %d files, now %d", nf, len(self.files)) | ||||
|  | ||||
|             vpath = vpath.replace("\\", "/").lstrip("/") | ||||
|             self.files[ret] = (now, vpath) | ||||
|  | ||||
|         return ret | ||||
|  | ||||
|     def _close(self, fd: int) -> None: | ||||
|         os.close(fd) | ||||
|         if fd not in self.files: | ||||
|             return | ||||
|  | ||||
|         _, vp = self.files.pop(fd) | ||||
|         vp, fn = os.path.split(vp) | ||||
|         vfs, rem = self.hub.asrv.vfs.get(vp, self._uname(), False, True) | ||||
|         vfs, rem = vfs.get_dbv(rem) | ||||
|         self.hub.up2k.hash_file( | ||||
|             vfs.realpath, | ||||
|             vfs.vpath, | ||||
|             vfs.flags, | ||||
|             rem, | ||||
|             fn, | ||||
|             "1.7.6.2", | ||||
|             time.time(), | ||||
|             "", | ||||
|         ) | ||||
|  | ||||
|     def _rename(self, vp1: str, vp2: str) -> None: | ||||
|         if not self.args.smbw: | ||||
|             yeet("blocked rename (no --smbw): " + vp1) | ||||
|  | ||||
|         vp1 = vp1.lstrip("/") | ||||
|         vp2 = vp2.lstrip("/") | ||||
|  | ||||
|         uname = self._uname() | ||||
|         vfs2, ap2 = self._v2a("rename", vp2, vp1, uname=uname) | ||||
|         if not vfs2.axs.uwrite: | ||||
|             t = "blocked write (no-write-acc %s): /%s @%s" | ||||
|             yeet(t % (vfs2.axs.uwrite, vp2, uname)) | ||||
|  | ||||
|         vfs1, _ = self.asrv.vfs.get(vp1, uname, True, True, True) | ||||
|         if not vfs1.axs.umove: | ||||
|             t = "blocked rename (no-move-acc %s): /%s @%s" | ||||
|             yeet(t % (vfs1.axs.umove, vp1, uname)) | ||||
|  | ||||
|         self.hub.up2k.handle_mv(uname, vp1, vp2) | ||||
|         try: | ||||
|             bos.makedirs(ap2) | ||||
|         except: | ||||
|             pass | ||||
|  | ||||
|     def _mkdir(self, vpath: str) -> None: | ||||
|         if not self.args.smbw: | ||||
|             yeet("blocked mkdir (no --smbw): " + vpath) | ||||
|  | ||||
|         uname = self._uname() | ||||
|         vfs, ap = self._v2a("mkdir", vpath, uname=uname) | ||||
|         if not vfs.axs.uwrite: | ||||
|             t = "blocked mkdir (no-write-acc %s): /%s @%s" | ||||
|             yeet(t % (vfs.axs.uwrite, vpath, uname)) | ||||
|  | ||||
|         return bos.mkdir(ap) | ||||
|  | ||||
|     def _stat(self, vpath: str, *a: Any, **ka: Any) -> os.stat_result: | ||||
|         try: | ||||
|             ap = self._v2a("stat", vpath, *a, perms=[True, False])[1] | ||||
|             ret = bos.stat(ap, *a, **ka) | ||||
|             # debug(" `-stat:ok") | ||||
|             return ret | ||||
|         except: | ||||
|             # white lie: windows freaks out if we raise due to an offline volume | ||||
|             # debug(" `-stat:NOPE (faking a directory)") | ||||
|             ts = int(time.time()) | ||||
|             return os.stat_result((16877, -1, -1, 1, 1000, 1000, 8, ts, ts, ts)) | ||||
|  | ||||
|     def _unlink(self, vpath: str) -> None: | ||||
|         if not self.args.smbw: | ||||
|             yeet("blocked delete (no --smbw): " + vpath) | ||||
|  | ||||
|         # return bos.unlink(self._v2a("stat", vpath, *a)[1]) | ||||
|         uname = self._uname() | ||||
|         vfs, ap = self._v2a( | ||||
|             "delete", vpath, uname=uname, perms=[True, False, False, True] | ||||
|         ) | ||||
|         if not vfs.axs.udel: | ||||
|             yeet("blocked delete (no-del-acc): " + vpath) | ||||
|  | ||||
|         vpath = vpath.replace("\\", "/").lstrip("/") | ||||
|         self.hub.up2k.handle_rm(uname, "1.7.6.2", [vpath], [], False) | ||||
|  | ||||
|     def _utime(self, vpath: str, times: tuple[float, float]) -> None: | ||||
|         if not self.args.smbw: | ||||
|             yeet("blocked utime (no --smbw): " + vpath) | ||||
|  | ||||
|         uname = self._uname() | ||||
|         vfs, ap = self._v2a("utime", vpath, uname=uname) | ||||
|         if not vfs.axs.uwrite: | ||||
|             t = "blocked utime (no-write-acc %s): /%s @%s" | ||||
|             yeet(t % (vfs.axs.uwrite, vpath, uname)) | ||||
|  | ||||
|         return bos.utime(ap, times) | ||||
|  | ||||
|     def _p_exists(self, vpath: str) -> bool: | ||||
|         # ap = "?" | ||||
|         try: | ||||
|             ap = self._v2a("p.exists", vpath, perms=[True, False])[1] | ||||
|             bos.stat(ap) | ||||
|             # debug(" `-exists((%s)->(%s)):ok", vpath, ap) | ||||
|             return True | ||||
|         except: | ||||
|             # debug(" `-exists((%s)->(%s)):NOPE", vpath, ap) | ||||
|             return False | ||||
|  | ||||
|     def _p_getsize(self, vpath: str) -> int: | ||||
|         st = bos.stat(self._v2a("p.getsize", vpath, perms=[True, False])[1]) | ||||
|         return st.st_size | ||||
|  | ||||
|     def _p_isdir(self, vpath: str) -> bool: | ||||
|         try: | ||||
|             st = bos.stat(self._v2a("p.isdir", vpath, perms=[True, False])[1]) | ||||
|             ret = stat.S_ISDIR(st.st_mode) | ||||
|             # debug(" `-isdir:%s:%s", st.st_mode, ret) | ||||
|             return ret | ||||
|         except: | ||||
|             return False | ||||
|  | ||||
|     def _p_join(self, *a) -> str: | ||||
|         # impacket.smbserver reads globs from queryDirectoryRequest['Buffer'] | ||||
|         # where somehow `fds.*` becomes `fds"*` so lets fix that | ||||
|         ret = os.path.join(*a) | ||||
|         return ret.replace('"', ".")  # type: ignore | ||||
|  | ||||
|     def _hook(self, *a: Any, **ka: Any) -> None: | ||||
|         src = inspect.currentframe().f_back.f_code.co_name | ||||
|         error("\033[31m%s:hook(%s)\033[0m", src, a) | ||||
|         raise Exception("nope") | ||||
|  | ||||
|     def _disarm(self) -> None: | ||||
|         from impacket import smbserver | ||||
|  | ||||
|         smbserver.os.chmod = self._hook | ||||
|         smbserver.os.chown = self._hook | ||||
|         smbserver.os.ftruncate = self._hook | ||||
|         smbserver.os.lchown = self._hook | ||||
|         smbserver.os.link = self._hook | ||||
|         smbserver.os.lstat = self._hook | ||||
|         smbserver.os.replace = self._hook | ||||
|         smbserver.os.scandir = self._hook | ||||
|         smbserver.os.symlink = self._hook | ||||
|         smbserver.os.truncate = self._hook | ||||
|         smbserver.os.walk = self._hook | ||||
|  | ||||
|         smbserver.os.path.abspath = self._hook | ||||
|         smbserver.os.path.expanduser = self._hook | ||||
|         smbserver.os.path.getatime = self._hook | ||||
|         smbserver.os.path.getctime = self._hook | ||||
|         smbserver.os.path.getmtime = self._hook | ||||
|         smbserver.os.path.isabs = self._hook | ||||
|         smbserver.os.path.isfile = self._hook | ||||
|         smbserver.os.path.islink = self._hook | ||||
|         smbserver.os.path.realpath = self._hook | ||||
|  | ||||
|     def _is_in_file_jail(self, *a: Any) -> bool: | ||||
|         # handled by vfs | ||||
|         return True | ||||
|  | ||||
|  | ||||
| def yeet(msg: str) -> None: | ||||
|     info(msg) | ||||
|     raise Exception(msg) | ||||
							
								
								
									
										220
									
								
								copyparty/ssdp.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										220
									
								
								copyparty/ssdp.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,220 @@ | ||||
| # coding: utf-8 | ||||
| from __future__ import print_function, unicode_literals | ||||
|  | ||||
| import errno | ||||
| import re | ||||
| import select | ||||
| import socket | ||||
| from email.utils import formatdate | ||||
|  | ||||
| from .__init__ import TYPE_CHECKING | ||||
| from .multicast import MC_Sck, MCast | ||||
| from .util import CachedSet, html_escape, min_ex | ||||
|  | ||||
| if TYPE_CHECKING: | ||||
|     from .broker_util import BrokerCli | ||||
|     from .httpcli import HttpCli | ||||
|     from .svchub import SvcHub | ||||
|  | ||||
| if True:  # pylint: disable=using-constant-test | ||||
|     from typing import Optional, Union | ||||
|  | ||||
|  | ||||
| GRP = "239.255.255.250" | ||||
|  | ||||
|  | ||||
| class SSDP_Sck(MC_Sck): | ||||
|     def __init__(self, *a): | ||||
|         super(SSDP_Sck, self).__init__(*a) | ||||
|         self.hport = 0 | ||||
|  | ||||
|  | ||||
| class SSDPr(object): | ||||
|     """generates http responses for httpcli""" | ||||
|  | ||||
|     def __init__(self, broker: "BrokerCli") -> None: | ||||
|         self.broker = broker | ||||
|         self.args = broker.args | ||||
|  | ||||
|     def reply(self, hc: "HttpCli") -> bool: | ||||
|         if hc.vpath.endswith("device.xml"): | ||||
|             return self.tx_device(hc) | ||||
|  | ||||
|         hc.reply(b"unknown request", 400) | ||||
|         return False | ||||
|  | ||||
|     def tx_device(self, hc: "HttpCli") -> bool: | ||||
|         zs = """ | ||||
| <?xml version="1.0"?> | ||||
| <root xmlns="urn:schemas-upnp-org:device-1-0"> | ||||
|     <specVersion> | ||||
|         <major>1</major> | ||||
|         <minor>0</minor> | ||||
|     </specVersion> | ||||
|     <URLBase>{}</URLBase> | ||||
|     <device> | ||||
|         <presentationURL>{}</presentationURL> | ||||
|         <deviceType>urn:schemas-upnp-org:device:Basic:1</deviceType> | ||||
|         <friendlyName>{}</friendlyName> | ||||
|         <modelDescription>file server</modelDescription> | ||||
|         <manufacturer>ed</manufacturer> | ||||
|         <manufacturerURL>https://ocv.me/</manufacturerURL> | ||||
|         <modelName>copyparty</modelName> | ||||
|         <modelURL>https://github.com/9001/copyparty/</modelURL> | ||||
|         <UDN>{}</UDN> | ||||
|         <serviceList> | ||||
|             <service> | ||||
|                 <serviceType>urn:schemas-upnp-org:device:Basic:1</serviceType> | ||||
|                 <serviceId>urn:schemas-upnp-org:device:Basic</serviceId> | ||||
|                 <controlURL>/.cpr/ssdp/services.xml</controlURL> | ||||
|                 <eventSubURL>/.cpr/ssdp/services.xml</eventSubURL> | ||||
|                 <SCPDURL>/.cpr/ssdp/services.xml</SCPDURL> | ||||
|             </service> | ||||
|         </serviceList> | ||||
|     </device> | ||||
| </root>""" | ||||
|  | ||||
|         c = html_escape | ||||
|         sip, sport = hc.s.getsockname()[:2] | ||||
|         sip = sip.replace("::ffff:", "") | ||||
|         proto = "https" if self.args.https_only else "http" | ||||
|         ubase = "{}://{}:{}".format(proto, sip, sport) | ||||
|         zsl = self.args.zsl | ||||
|         url = zsl if "://" in zsl else ubase + "/" + zsl.lstrip("/") | ||||
|         name = self.args.doctitle | ||||
|         zs = zs.strip().format(c(ubase), c(url), c(name), c(self.args.zsid)) | ||||
|         hc.reply(zs.encode("utf-8", "replace")) | ||||
|         return False  # close connectino | ||||
|  | ||||
|  | ||||
| class SSDPd(MCast): | ||||
|     """communicates with ssdp clients over multicast""" | ||||
|  | ||||
|     def __init__(self, hub: "SvcHub", ngen: int) -> None: | ||||
|         al = hub.args | ||||
|         vinit = al.zsv and not al.zmv | ||||
|         super(SSDPd, self).__init__( | ||||
|             hub, SSDP_Sck, al.zs_on, al.zs_off, GRP, "", 1900, vinit | ||||
|         ) | ||||
|         self.srv: dict[socket.socket, SSDP_Sck] = {} | ||||
|         self.logsrc = "SSDP-{}".format(ngen) | ||||
|         self.ngen = ngen | ||||
|  | ||||
|         self.rxc = CachedSet(0.7) | ||||
|         self.txc = CachedSet(5)  # win10: every 3 sec | ||||
|         self.ptn_st = re.compile(b"\nst: *upnp:rootdevice", re.I) | ||||
|  | ||||
|     def log(self, msg: str, c: Union[int, str] = 0) -> None: | ||||
|         self.log_func(self.logsrc, msg, c) | ||||
|  | ||||
|     def run(self) -> None: | ||||
|         try: | ||||
|             bound = self.create_servers() | ||||
|         except: | ||||
|             t = "no server IP matches the ssdp config\n{}" | ||||
|             self.log(t.format(min_ex()), 1) | ||||
|             bound = [] | ||||
|  | ||||
|         if not bound: | ||||
|             self.log("failed to announce copyparty services on the network", 3) | ||||
|             return | ||||
|  | ||||
|         # find http port for this listening ip | ||||
|         for srv in self.srv.values(): | ||||
|             tcps = self.hub.tcpsrv.bound | ||||
|             hp = next((x[1] for x in tcps if x[0] in ("0.0.0.0", srv.ip)), 0) | ||||
|             hp = hp or next((x[1] for x in tcps if x[0] == "::"), 0) | ||||
|             if not hp: | ||||
|                 hp = tcps[0][1] | ||||
|                 self.log("assuming port {} for {}".format(hp, srv.ip), 3) | ||||
|             srv.hport = hp | ||||
|  | ||||
|         self.log("listening") | ||||
|         try: | ||||
|             self.run2() | ||||
|         except OSError as ex: | ||||
|             if ex.errno != errno.EBADF: | ||||
|                 raise | ||||
|  | ||||
|             self.log("stopping due to {}".format(ex), "90") | ||||
|  | ||||
|         self.log("stopped", 2) | ||||
|  | ||||
|     def run2(self) -> None: | ||||
|         while self.running: | ||||
|             rdy = select.select(self.srv, [], [], self.args.z_chk or 180) | ||||
|             rx: list[socket.socket] = rdy[0]  # type: ignore | ||||
|             self.rxc.cln() | ||||
|             buf = b"" | ||||
|             addr = ("0", 0) | ||||
|             for sck in rx: | ||||
|                 try: | ||||
|                     buf, addr = sck.recvfrom(4096) | ||||
|                     self.eat(buf, addr) | ||||
|                 except: | ||||
|                     if not self.running: | ||||
|                         break | ||||
|  | ||||
|                     t = "{} {} \033[33m|{}| {}\n{}".format( | ||||
|                         self.srv[sck].name, addr, len(buf), repr(buf)[2:-1], min_ex() | ||||
|                     ) | ||||
|                     self.log(t, 6) | ||||
|  | ||||
|     def stop(self) -> None: | ||||
|         self.running = False | ||||
|         for srv in self.srv.values(): | ||||
|             try: | ||||
|                 srv.sck.close() | ||||
|             except: | ||||
|                 pass | ||||
|  | ||||
|         self.srv = {} | ||||
|  | ||||
|     def eat(self, buf: bytes, addr: tuple[str, int]) -> None: | ||||
|         cip = addr[0] | ||||
|         if cip.startswith("169.254") and not self.ll_ok: | ||||
|             return | ||||
|  | ||||
|         if buf in self.rxc.c: | ||||
|             return | ||||
|  | ||||
|         srv: Optional[SSDP_Sck] = self.map_client(cip)  # type: ignore | ||||
|         if not srv: | ||||
|             return | ||||
|  | ||||
|         self.rxc.add(buf) | ||||
|         if not buf.startswith(b"M-SEARCH * HTTP/1."): | ||||
|             return | ||||
|  | ||||
|         if not self.ptn_st.search(buf): | ||||
|             return | ||||
|  | ||||
|         if self.args.zsv: | ||||
|             t = "{} [{}] \033[36m{} \033[0m|{}|" | ||||
|             self.log(t.format(srv.name, srv.ip, cip, len(buf)), "90") | ||||
|  | ||||
|         zs = """ | ||||
| HTTP/1.1 200 OK | ||||
| CACHE-CONTROL: max-age=1800 | ||||
| DATE: {0} | ||||
| EXT: | ||||
| LOCATION: http://{1}:{2}/.cpr/ssdp/device.xml | ||||
| OPT: "http://schemas.upnp.org/upnp/1/0/"; ns=01 | ||||
| 01-NLS: {3} | ||||
| SERVER: UPnP/1.0 | ||||
| ST: upnp:rootdevice | ||||
| USN: {3}::upnp:rootdevice | ||||
| BOOTID.UPNP.ORG: 0 | ||||
| CONFIGID.UPNP.ORG: 1 | ||||
|  | ||||
| """ | ||||
|         v4 = srv.ip.replace("::ffff:", "") | ||||
|         zs = zs.format(formatdate(usegmt=True), v4, srv.hport, self.args.zsid) | ||||
|         zb = zs[1:].replace("\n", "\r\n").encode("utf-8", "replace") | ||||
|         srv.sck.sendto(zb, addr[:2]) | ||||
|  | ||||
|         if cip not in self.txc.c: | ||||
|             self.log("{} [{}] --> {}".format(srv.name, srv.ip, cip), 6) | ||||
|  | ||||
|         self.txc.add(cip) | ||||
|         self.txc.cln() | ||||
| @@ -1,23 +1,31 @@ | ||||
| # coding: utf-8 | ||||
| from __future__ import print_function, unicode_literals | ||||
|  | ||||
| import re | ||||
| import stat | ||||
| import tarfile | ||||
| import threading | ||||
|  | ||||
| from .sutil import errdesc | ||||
| from .util import Queue, fsenc, min_ex | ||||
| from queue import Queue | ||||
|  | ||||
| from .bos import bos | ||||
| from .sutil import StreamArc, errdesc | ||||
| from .util import Daemon, fsenc, min_ex | ||||
|  | ||||
| if True:  # pylint: disable=using-constant-test | ||||
|     from typing import Any, Generator, Optional | ||||
|  | ||||
|     from .util import NamedLogger | ||||
|  | ||||
|  | ||||
| class QFile(object): | ||||
| class QFile(object):  # inherit io.StringIO for painful typing | ||||
|     """file-like object which buffers writes into a queue""" | ||||
|  | ||||
|     def __init__(self): | ||||
|         self.q = Queue(64) | ||||
|         self.bq = [] | ||||
|     def __init__(self) -> None: | ||||
|         self.q: Queue[Optional[bytes]] = Queue(64) | ||||
|         self.bq: list[bytes] = [] | ||||
|         self.nq = 0 | ||||
|  | ||||
|     def write(self, buf): | ||||
|     def write(self, buf: Optional[bytes]) -> None: | ||||
|         if buf is None or self.nq >= 240 * 1024: | ||||
|             self.q.put(b"".join(self.bq)) | ||||
|             self.bq = [] | ||||
| @@ -30,44 +38,86 @@ class QFile(object): | ||||
|             self.nq += len(buf) | ||||
|  | ||||
|  | ||||
| class StreamTar(object): | ||||
| class StreamTar(StreamArc): | ||||
|     """construct in-memory tar file from the given path""" | ||||
|  | ||||
|     def __init__(self, log, fgen, **kwargs): | ||||
|     def __init__( | ||||
|         self, | ||||
|         log: "NamedLogger", | ||||
|         fgen: Generator[dict[str, Any], None, None], | ||||
|         cmp: str = "", | ||||
|         **kwargs: Any | ||||
|     ): | ||||
|         super(StreamTar, self).__init__(log, fgen) | ||||
|  | ||||
|         self.ci = 0 | ||||
|         self.co = 0 | ||||
|         self.qfile = QFile() | ||||
|         self.log = log | ||||
|         self.fgen = fgen | ||||
|         self.errf = None | ||||
|         self.errf: dict[str, Any] = {} | ||||
|  | ||||
|         # python 3.8 changed to PAX_FORMAT as default, | ||||
|         # waste of space and don't care about the new features | ||||
|         # python 3.8 changed to PAX_FORMAT as default; | ||||
|         # slower, bigger, and no particular advantage | ||||
|         fmt = tarfile.GNU_FORMAT | ||||
|         self.tar = tarfile.open(fileobj=self.qfile, mode="w|", format=fmt) | ||||
|         if "pax" in cmp: | ||||
|             # unless a client asks for it (currently | ||||
|             # gnu-tar has wider support than pax-tar) | ||||
|             fmt = tarfile.PAX_FORMAT | ||||
|             cmp = re.sub(r"[^a-z0-9]*pax[^a-z0-9]*", "", cmp) | ||||
|  | ||||
|         w = threading.Thread(target=self._gen, name="star-gen") | ||||
|         w.daemon = True | ||||
|         w.start() | ||||
|         try: | ||||
|             cmp, lv = cmp.replace(":", ",").split(",") | ||||
|             lv = int(lv) | ||||
|         except: | ||||
|             lv = None | ||||
|  | ||||
|     def gen(self): | ||||
|         while True: | ||||
|             buf = self.qfile.q.get() | ||||
|             if not buf: | ||||
|                 break | ||||
|         arg = {"name": None, "fileobj": self.qfile, "mode": "w", "format": fmt} | ||||
|         if cmp == "gz": | ||||
|             fun = tarfile.TarFile.gzopen | ||||
|             arg["compresslevel"] = lv if lv is not None else 3 | ||||
|         elif cmp == "bz2": | ||||
|             fun = tarfile.TarFile.bz2open | ||||
|             arg["compresslevel"] = lv if lv is not None else 2 | ||||
|         elif cmp == "xz": | ||||
|             fun = tarfile.TarFile.xzopen | ||||
|             arg["preset"] = lv if lv is not None else 1 | ||||
|         else: | ||||
|             fun = tarfile.open | ||||
|             arg["mode"] = "w|" | ||||
|  | ||||
|             self.co += len(buf) | ||||
|             yield buf | ||||
|         self.tar = fun(**arg) | ||||
|  | ||||
|         yield None | ||||
|         if self.errf: | ||||
|             bos.unlink(self.errf["ap"]) | ||||
|         Daemon(self._gen, "star-gen") | ||||
|  | ||||
|     def ser(self, f): | ||||
|     def gen(self) -> Generator[Optional[bytes], None, None]: | ||||
|         buf = b"" | ||||
|         try: | ||||
|             while True: | ||||
|                 buf = self.qfile.q.get() | ||||
|                 if not buf: | ||||
|                     break | ||||
|  | ||||
|                 self.co += len(buf) | ||||
|                 yield buf | ||||
|  | ||||
|             yield None | ||||
|         finally: | ||||
|             while buf: | ||||
|                 try: | ||||
|                     buf = self.qfile.q.get() | ||||
|                 except: | ||||
|                     pass | ||||
|  | ||||
|             if self.errf: | ||||
|                 bos.unlink(self.errf["ap"]) | ||||
|  | ||||
|     def ser(self, f: dict[str, Any]) -> None: | ||||
|         name = f["vp"] | ||||
|         src = f["ap"] | ||||
|         fsi = f["st"] | ||||
|  | ||||
|         if stat.S_ISDIR(fsi.st_mode): | ||||
|             return | ||||
|  | ||||
|         inf = tarfile.TarInfo(name=name) | ||||
|         inf.mode = fsi.st_mode | ||||
|         inf.size = fsi.st_size | ||||
| @@ -76,21 +126,24 @@ class StreamTar(object): | ||||
|         inf.gid = 0 | ||||
|  | ||||
|         self.ci += inf.size | ||||
|         with open(fsenc(src), "rb", 512 * 1024) as f: | ||||
|             self.tar.addfile(inf, f) | ||||
|         with open(fsenc(src), "rb", 512 * 1024) as fo: | ||||
|             self.tar.addfile(inf, fo) | ||||
|  | ||||
|     def _gen(self): | ||||
|     def _gen(self) -> None: | ||||
|         errors = [] | ||||
|         for f in self.fgen: | ||||
|             if "err" in f: | ||||
|                 errors.append([f["vp"], f["err"]]) | ||||
|                 errors.append((f["vp"], f["err"])) | ||||
|                 continue | ||||
|  | ||||
|             if self.stopped: | ||||
|                 break | ||||
|  | ||||
|             try: | ||||
|                 self.ser(f) | ||||
|             except Exception: | ||||
|             except: | ||||
|                 ex = min_ex(5, True).replace("\n", "\n-- ") | ||||
|                 errors.append([f["vp"], ex]) | ||||
|                 errors.append((f["vp"], ex)) | ||||
|  | ||||
|         if errors: | ||||
|             self.errf, txt = errdesc(errors) | ||||
|   | ||||
							
								
								
									
										5
									
								
								copyparty/stolen/dnslib/README.md
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										5
									
								
								copyparty/stolen/dnslib/README.md
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,5 @@ | ||||
| `dnslib` but heavily simplified/feature-stripped | ||||
|  | ||||
| L: MIT | ||||
| Copyright (c) 2010 - 2017 Paul Chakravarti | ||||
| https://github.com/paulc/dnslib/ | ||||
Some files were not shown because too many files have changed in this diff Show More
		Reference in New Issue
	
	Block a user