mirror of
				https://github.com/9001/copyparty.git
				synced 2025-10-31 03:53:31 +00:00 
			
		
		
		
	Compare commits
	
		
			640 Commits
		
	
	
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
|  | 22cc22225a | ||
|  | 22dff4b0e5 | ||
|  | a00ff2b086 | ||
|  | e4acddc23b | ||
|  | 2b2d8e4e02 | ||
|  | 5501d49032 | ||
|  | fa54b2eec4 | ||
|  | cb0160021f | ||
|  | 93a723d588 | ||
|  | 8ebe1fb5e8 | ||
|  | 2acdf685b1 | ||
|  | 9f122ccd16 | ||
|  | 03be26fafc | ||
|  | df5d309d6e | ||
|  | c355f9bd91 | ||
|  | 9c28ba417e | ||
|  | 705b58c741 | ||
|  | 510302d667 | ||
|  | 025a537413 | ||
|  | 60a1ff0fc0 | ||
|  | f94a0b1bff | ||
|  | 4ccfeeb2cd | ||
|  | 2646f6a4f2 | ||
|  | b286ab539e | ||
|  | 2cca6e0922 | ||
|  | db51f1b063 | ||
|  | d979c47f50 | ||
|  | e64b87b99b | ||
|  | b985011a00 | ||
|  | c2ed2314c8 | ||
|  | cd496658c3 | ||
|  | deca082623 | ||
|  | 0ea8bb7c83 | ||
|  | 1fb251a4c2 | ||
|  | 4295923b76 | ||
|  | 572aa4b26c | ||
|  | b1359f039f | ||
|  | 867d8ee49e | ||
|  | 04c86e8a89 | ||
|  | bc0cb43ef9 | ||
|  | 769454fdce | ||
|  | 4ee81af8f6 | ||
|  | 8b0e66122f | ||
|  | 8a98efb929 | ||
|  | b6fd555038 | ||
|  | 7eb413ad51 | ||
|  | 4421d509eb | ||
|  | 793ffd7b01 | ||
|  | 1e22222c60 | ||
|  | 544e0549bc | ||
|  | 83178d0836 | ||
|  | c44f5f5701 | ||
|  | 138f5bc989 | ||
|  | e4759f86ef | ||
|  | d71416437a | ||
|  | a84c583b2c | ||
|  | cdacdccdb8 | ||
|  | d3ccd3f174 | ||
|  | cb6de0387d | ||
|  | abff40519d | ||
|  | 55c74ad164 | ||
|  | 673b4f7e23 | ||
|  | d11e02da49 | ||
|  | 8790f89e08 | ||
|  | 33442026b8 | ||
|  | 03193de6d0 | ||
|  | 8675ff40f3 | ||
|  | d88889d3fc | ||
|  | 6f244d4335 | ||
|  | cacca663b3 | ||
|  | d5109be559 | ||
|  | d999f06bb9 | ||
|  | a1a8a8c7b5 | ||
|  | fdd6f3b4a6 | ||
|  | f5191973df | ||
|  | ddbaebe779 | ||
|  | 42099baeff | ||
|  | 2459965ca8 | ||
|  | 6acf436573 | ||
|  | f217e1ce71 | ||
|  | 418000aee3 | ||
|  | dbbba9625b | ||
|  | 397bc92fbc | ||
|  | 6e615dcd03 | ||
|  | 9ac5908b33 | ||
|  | 50912480b9 | ||
|  | 24b9b8319d | ||
|  | b0f4f0b653 | ||
|  | 05bbd41c4b | ||
|  | 8f5f8a3cda | ||
|  | c8938fc033 | ||
|  | 1550350e05 | ||
|  | 5cc190c026 | ||
|  | d6a0a738ce | ||
|  | f5fe3678ee | ||
|  | f2a7925387 | ||
|  | fa953ced52 | ||
|  | f0000d9861 | ||
|  | 4e67516719 | ||
|  | 29db7a6270 | ||
|  | 852499e296 | ||
|  | f1775fd51c | ||
|  | 4bb306932a | ||
|  | 2a37e81bd8 | ||
|  | 6a312ca856 | ||
|  | e7f3e475a2 | ||
|  | 854ba0ec06 | ||
|  | 209b49d771 | ||
|  | 949baae539 | ||
|  | 5f4ea27586 | ||
|  | 099cc97247 | ||
|  | 592b7d6315 | ||
|  | 0880bf55a1 | ||
|  | 4cbffec0ec | ||
|  | cc355417d4 | ||
|  | e2bc573e61 | ||
|  | 41c0376177 | ||
|  | c01cad091e | ||
|  | eb349f339c | ||
|  | 24d8caaf3e | ||
|  | 5ac2c20959 | ||
|  | bb72e6bf30 | ||
|  | d8142e866a | ||
|  | 7b7979fd61 | ||
|  | 749616d09d | ||
|  | 5485c6d7ca | ||
|  | b7aea38d77 | ||
|  | 0ecd9f99e6 | ||
|  | ca04a00662 | ||
|  | 8a09601be8 | ||
|  | 1fe0d4693e | ||
|  | bba8a3c6bc | ||
|  | e3d7f0c7d5 | ||
|  | be7bb71bbc | ||
|  | e0c4829ec6 | ||
|  | 5af1575329 | ||
|  | 884f966b86 | ||
|  | f6c6fbc223 | ||
|  | b0cc396bca | ||
|  | ae463518f6 | ||
|  | 2be2e9a0d8 | ||
|  | e405fddf74 | ||
|  | c269b0dd91 | ||
|  | 8c3211263a | ||
|  | bf04e7c089 | ||
|  | c7c6e48b1a | ||
|  | 974ca773be | ||
|  | 9270c2df19 | ||
|  | b39ff92f34 | ||
|  | 7454167f78 | ||
|  | 5ceb3a962f | ||
|  | 52bd5642da | ||
|  | c39c93725f | ||
|  | d00f0b9fa7 | ||
|  | 01cfc70982 | ||
|  | e6aec189bd | ||
|  | c98fff1647 | ||
|  | 0009e31bd3 | ||
|  | db95e880b2 | ||
|  | e69fea4a59 | ||
|  | 4360800a6e | ||
|  | b179e2b031 | ||
|  | ecdec75b4e | ||
|  | 5cb2e33353 | ||
|  | 43ff2e531a | ||
|  | 1c2c9db8f0 | ||
|  | 7ea183baef | ||
|  | ab87fac6d8 | ||
|  | 1e3b7eee3b | ||
|  | 4de028fc3b | ||
|  | 604e5dfaaf | ||
|  | 05e0c2ec9e | ||
|  | 76bd005bdc | ||
|  | 5effaed352 | ||
|  | cedaf4809f | ||
|  | 6deaf5c268 | ||
|  | 9dc6a26472 | ||
|  | 14ad5916fc | ||
|  | 1a46738649 | ||
|  | 9e5e3b099a | ||
|  | 292ce75cc2 | ||
|  | ce7df7afd4 | ||
|  | e28e793f81 | ||
|  | 3e561976db | ||
|  | 273a4eb7d0 | ||
|  | 6175f85bb6 | ||
|  | a80579f63a | ||
|  | 96d6bcf26e | ||
|  | 49e8df25ac | ||
|  | 6a05850f21 | ||
|  | 5e7c3defe3 | ||
|  | 6c0987d4d0 | ||
|  | 6eba9feffe | ||
|  | 8adfcf5950 | ||
|  | 36d6fa512a | ||
|  | 79b6e9b393 | ||
|  | dc2e2cbd4b | ||
|  | 5c12dac30f | ||
|  | 641929191e | ||
|  | 617321631a | ||
|  | ddc0c899f8 | ||
|  | cdec42c1ae | ||
|  | c48f469e39 | ||
|  | 44909cc7b8 | ||
|  | 8f61e1568c | ||
|  | b7be7a0fd8 | ||
|  | 1526a4e084 | ||
|  | dbdb9574b1 | ||
|  | 853ae6386c | ||
|  | a4b56c74c7 | ||
|  | d7f1951e44 | ||
|  | 7e2ff9825e | ||
|  | 9b423396ec | ||
|  | 781146b2fb | ||
|  | 84937d1ce0 | ||
|  | 98cce66aa4 | ||
|  | 043c2d4858 | ||
|  | 99cc434779 | ||
|  | 5095d17e81 | ||
|  | 87d835ae37 | ||
|  | 6939ca768b | ||
|  | e3957e8239 | ||
|  | 4ad6e45216 | ||
|  | 76e5eeea3f | ||
|  | eb17f57761 | ||
|  | b0db14d8b0 | ||
|  | 2b644fa81b | ||
|  | 190ccee820 | ||
|  | 4e7dd32e78 | ||
|  | 5817fb66ae | ||
|  | 9cb04eef93 | ||
|  | 0019fe7f04 | ||
|  | 852c6f2de1 | ||
|  | c4191de2e7 | ||
|  | 4de61defc9 | ||
|  | 0aa88590d0 | ||
|  | 405f3ee5fe | ||
|  | bc339f774a | ||
|  | e67b695b23 | ||
|  | 4a7633ab99 | ||
|  | c58f2ef61f | ||
|  | 3866e6a3f2 | ||
|  | 381686fc66 | ||
|  | a918c285bf | ||
|  | 1e20eafbe0 | ||
|  | 39399934ee | ||
|  | b47635150a | ||
|  | 78d2f69ed5 | ||
|  | 7a98dc669e | ||
|  | 2f15bb5085 | ||
|  | 712a578e6c | ||
|  | d8dfc4ccb2 | ||
|  | e413007eb0 | ||
|  | 6d1d3e48d8 | ||
|  | 04966164ce | ||
|  | 8b62aa7cc7 | ||
|  | 1088e8c6a5 | ||
|  | 8c54c2226f | ||
|  | f74ac1f18b | ||
|  | 25931e62fd | ||
|  | 707a940399 | ||
|  | 87ef50d384 | ||
|  | dcadf2b11c | ||
|  | 37a690a4c3 | ||
|  | 87ad23fb93 | ||
|  | 5f54d534e3 | ||
|  | aecae552a4 | ||
|  | eaa6b3d0be | ||
|  | c2ace91e52 | ||
|  | 0bac87c36f | ||
|  | e650d05939 | ||
|  | 85a96e4446 | ||
|  | 2569005139 | ||
|  | c50cb66aef | ||
|  | d4c5fca15b | ||
|  | 75cea4f684 | ||
|  | 68c6794d33 | ||
|  | 82f98dd54d | ||
|  | 741d781c18 | ||
|  | 0be1e43451 | ||
|  | 5366bf22bb | ||
|  | bcd91b1809 | ||
|  | 9bd5738e6f | ||
|  | bab4aa4c0a | ||
|  | e965b9b9e2 | ||
|  | 31101427d3 | ||
|  | a083dc36ba | ||
|  | 9b7b9262aa | ||
|  | 660011fa6e | ||
|  | ead31b6823 | ||
|  | 4310580cd4 | ||
|  | b005acbfda | ||
|  | 460709e6f3 | ||
|  | a8768d05a9 | ||
|  | f8e3e87a52 | ||
|  | 70f1642d0d | ||
|  | 3fc7561da4 | ||
|  | 9065226c3d | ||
|  | b7e321fa47 | ||
|  | 664665b86b | ||
|  | f4f362b7a4 | ||
|  | 577d23f460 | ||
|  | 504e168486 | ||
|  | f2f9640371 | ||
|  | ee46f832b1 | ||
|  | b0e755d410 | ||
|  | cfd24604d5 | ||
|  | 264894e595 | ||
|  | 5bb9f56247 | ||
|  | 18942ed066 | ||
|  | 85321a6f31 | ||
|  | baf641396d | ||
|  | 17c91e7014 | ||
|  | 010770684d | ||
|  | b4c503657b | ||
|  | 71bd306268 | ||
|  | dd7fab1352 | ||
|  | dacca18863 | ||
|  | 53d92cc0a6 | ||
|  | 434823f6f0 | ||
|  | 2cb1f50370 | ||
|  | 03f53f6392 | ||
|  | a70ecd7af0 | ||
|  | 8b81e58205 | ||
|  | 4500c04edf | ||
|  | 6222ddd720 | ||
|  | 8a7135cf41 | ||
|  | b4c7282956 | ||
|  | 8491a40a04 | ||
|  | 343d38b693 | ||
|  | 6cf53d7364 | ||
|  | b070d44de7 | ||
|  | 79aa40fdea | ||
|  | dcaff2785f | ||
|  | 497f5b4307 | ||
|  | be32ad0da6 | ||
|  | 8ee2bf810b | ||
|  | 28232656a9 | ||
|  | fbc2424e8f | ||
|  | 94cd13e8b8 | ||
|  | 447ed5ab37 | ||
|  | af59808611 | ||
|  | e3406a9f86 | ||
|  | 7fd1d6a4e8 | ||
|  | 0ab2a665de | ||
|  | 3895575bc2 | ||
|  | 138c2bbcbb | ||
|  | bc7af1d1c8 | ||
|  | 19cd96e392 | ||
|  | db194ab519 | ||
|  | 02ad4bfab2 | ||
|  | 56b73dcc8a | ||
|  | 7704b9c8a2 | ||
|  | 999b7ae919 | ||
|  | 252b5a88b1 | ||
|  | 01e2681a07 | ||
|  | aa32f30202 | ||
|  | 195eb53995 | ||
|  | 06fa78f54a | ||
|  | 7a57c9dbf1 | ||
|  | bb657bfa85 | ||
|  | 87181726b0 | ||
|  | f1477a1c14 | ||
|  | 4f94a9e38b | ||
|  | fbed322d3b | ||
|  | 9b0f519e4e | ||
|  | 6cd6dadd06 | ||
|  | 9a28afcb48 | ||
|  | 45b701801d | ||
|  | 062246fb12 | ||
|  | 416ebfdd68 | ||
|  | 731eb92f33 | ||
|  | dbe2aec79c | ||
|  | cd9cafe3a1 | ||
|  | 067cc23346 | ||
|  | c573a780e9 | ||
|  | 8ef4a0aa71 | ||
|  | 89ba12065c | ||
|  | 99efc290df | ||
|  | 2fbdc0a85e | ||
|  | 4242422898 | ||
|  | 008d9b1834 | ||
|  | 7c76d08958 | ||
|  | 89c9f45fd0 | ||
|  | f107497a94 | ||
|  | b5dcf30e53 | ||
|  | 0cef062084 | ||
|  | 5c30148be4 | ||
|  | 3a800585bc | ||
|  | 29c212a60e | ||
|  | 2997baa7cb | ||
|  | dc6bde594d | ||
|  | e357aa546c | ||
|  | d3fe19c5aa | ||
|  | bd24bf9bae | ||
|  | ee141544aa | ||
|  | db6f6e6a23 | ||
|  | c7d950dd5e | ||
|  | 6a96c62fde | ||
|  | 36dc8cd686 | ||
|  | 7622601a77 | ||
|  | cfd41fcf41 | ||
|  | f39e370e2a | ||
|  | c1315a3b39 | ||
|  | 53b32f97e8 | ||
|  | 6c962ec7d3 | ||
|  | 6bc1bc542f | ||
|  | f0e78a6826 | ||
|  | e53531a9fb | ||
|  | 5cd9d11329 | ||
|  | 5a3e504ec4 | ||
|  | d6e09c3880 | ||
|  | 04f44c3c7c | ||
|  | ec587423e8 | ||
|  | f57b31146d | ||
|  | 35175fd685 | ||
|  | d326ba9723 | ||
|  | ab655a56af | ||
|  | d1eb113ea8 | ||
|  | 74effa9b8d | ||
|  | bba4b1c663 | ||
|  | 8709d4dba0 | ||
|  | 4ad4657774 | ||
|  | 5abe0c955c | ||
|  | 0cedaf4fa9 | ||
|  | 0aa7d12704 | ||
|  | a234aa1f7e | ||
|  | 9f68287846 | ||
|  | cd2513ec16 | ||
|  | 91d132c2b4 | ||
|  | 97ff0ebd06 | ||
|  | 8829f56d4c | ||
|  | 37c1cab726 | ||
|  | b3eb117e87 | ||
|  | fc0a941508 | ||
|  | c72753c5da | ||
|  | e442cb677a | ||
|  | 450121eac9 | ||
|  | b2ab8f971e | ||
|  | e9c6268568 | ||
|  | 2170ee8da4 | ||
|  | 357e7333cc | ||
|  | 8bb4f02601 | ||
|  | 4213efc7a6 | ||
|  | 67a744c3e8 | ||
|  | 98818e7d63 | ||
|  | 8650ce1295 | ||
|  | 9638267b4c | ||
|  | 304e053155 | ||
|  | 89d1f52235 | ||
|  | 3312c6f5bd | ||
|  | d4ba644d07 | ||
|  | b9a504fd3a | ||
|  | cebac523dc | ||
|  | c2f4090318 | ||
|  | d562956809 | ||
|  | 62499f9b71 | ||
|  | 89cf7608f9 | ||
|  | dd26b8f183 | ||
|  | 79303dac6d | ||
|  | 4203fc161b | ||
|  | f8a31cc24f | ||
|  | fc5bfe81a0 | ||
|  | aae14de796 | ||
|  | 54e1c8d261 | ||
|  | a0cc4ca4b7 | ||
|  | 2701108c5b | ||
|  | 73bd2df2c6 | ||
|  | 0063021012 | ||
|  | 1c3e4750b3 | ||
|  | edad3246e0 | ||
|  | 3411b0993f | ||
|  | 097b5609dc | ||
|  | a42af7655e | ||
|  | 69f78b86af | ||
|  | 5f60c509c6 | ||
|  | 75e5e53276 | ||
|  | 4b2b4ed52d | ||
|  | fb21bfd6d6 | ||
|  | f14369e038 | ||
|  | ff04b72f62 | ||
|  | 4535a81617 | ||
|  | cce57b700b | ||
|  | 5b6194d131 | ||
|  | 2701238cea | ||
|  | 835f8a20e6 | ||
|  | f3a501db30 | ||
|  | 4bcd30da6b | ||
|  | 947dbb6f8a | ||
|  | 1c2fedd2bf | ||
|  | 32e826efbc | ||
|  | 138b932c6a | ||
|  | 6da2f53aad | ||
|  | 20eeacaac3 | ||
|  | 81d896be9f | ||
|  | c003dfab03 | ||
|  | 20c6b82bec | ||
|  | 046b494b53 | ||
|  | f0e98d6e0d | ||
|  | fe57321853 | ||
|  | 8510804e57 | ||
|  | acd32abac5 | ||
|  | 2b47c96cf2 | ||
|  | 1027378bda | ||
|  | e979d30659 | ||
|  | 574db704cc | ||
|  | fdb969ea89 | ||
|  | 08977854b3 | ||
|  | cecac64b68 | ||
|  | 7dabdade2a | ||
|  | e788f098e2 | ||
|  | 69406d4344 | ||
|  | d16dd26c65 | ||
|  | 12219c1bea | ||
|  | 118bdcc26e | ||
|  | 78fa96f0f4 | ||
|  | c7deb63a04 | ||
|  | 4f811eb9e9 | ||
|  | 0b265bd673 | ||
|  | ee67fabbeb | ||
|  | b213de7e62 | ||
|  | 7c01505750 | ||
|  | ae28dfd020 | ||
|  | 2a5a4e785f | ||
|  | d8bddede6a | ||
|  | b8a93e74bf | ||
|  | e60ec94d35 | ||
|  | 84af5fd0a3 | ||
|  | dbb3edec77 | ||
|  | d284b46a3e | ||
|  | 9fcb4d222b | ||
|  | d0bb1ad141 | ||
|  | b299aaed93 | ||
|  | abb3224cc5 | ||
|  | 1c66d06702 | ||
|  | e00e80ae39 | ||
|  | 4f4f106c48 | ||
|  | a286cc9d55 | ||
|  | 53bb1c719b | ||
|  | 98d5aa17e2 | ||
|  | aaaa80e4b8 | ||
|  | e70e926a40 | ||
|  | e80c1f6d59 | ||
|  | 24de360325 | ||
|  | e0039bc1e6 | ||
|  | ae5c4a0109 | ||
|  | 1d367a0da0 | ||
|  | d285f7ee4a | ||
|  | 37c84021a2 | ||
|  | 8ee9de4291 | ||
|  | 249b63453b | ||
|  | 1c0017d763 | ||
|  | df51e23639 | ||
|  | 32e71a43b8 | ||
|  | 47a1e6ddfa | ||
|  | c5f41457bb | ||
|  | f1e0c44bdd | ||
|  | 9d2e390b6a | ||
|  | 75a58b435d | ||
|  | f5474d34ac | ||
|  | c962d2544f | ||
|  | 0b87a4a810 | ||
|  | 1882afb8b6 | ||
|  | 2270c8737a | ||
|  | d6794955a4 | ||
|  | f5520f45ef | ||
|  | 9401b5ae13 | ||
|  | df64a62a03 | ||
|  | 09cea66aa8 | ||
|  | 13cc33e0a5 | ||
|  | ab36c8c9de | ||
|  | f85d4ce82f | ||
|  | 6bec4c28ba | ||
|  | fad1449259 | ||
|  | 86b3b57137 | ||
|  | b235037dd3 | ||
|  | 3108139d51 | ||
|  | 2ae99ecfa0 | ||
|  | e8ab53c270 | ||
|  | 5e9bc1127d | ||
|  | 415e61c3c9 | ||
|  | 5152f37ec8 | ||
|  | 0dbeb010cf | ||
|  | 17c465bed7 | ||
|  | add04478e5 | ||
|  | 6db72d7166 | ||
|  | 868103a9c5 | ||
|  | 0f37718671 | ||
|  | fa1445df86 | ||
|  | a783e7071e | ||
|  | a9919df5af | ||
|  | b0af31ac35 | ||
|  | c4c964a685 | ||
|  | 348ec71398 | ||
|  | a257ccc8b3 | ||
|  | fcc4296040 | ||
|  | 1684d05d49 | ||
|  | 0006f933a2 | ||
|  | 0484f97c9c | ||
|  | e430b2567a | ||
|  | fbc8ee15da | ||
|  | 68a9c05947 | ||
|  | 0a81aba899 | ||
|  | d2ae822e15 | ||
|  | fac4b08526 | ||
|  | 3a7b43c663 | ||
|  | 8fcb2d1554 | ||
|  | 590c763659 | ||
|  | 11d1267f8c | ||
|  | 8f5bae95ce | ||
|  | e6b12ef14c | ||
|  | b65674618b | ||
|  | 20dca2bea5 | ||
|  | 059e93cdcf | ||
|  | 635ab25013 | ||
|  | 995cd10df8 | ||
|  | 50f3820a6d | ||
|  | 617f3ea861 | ||
|  | 788db47b95 | ||
|  | 5fa8aaabb9 | ||
|  | 89d1af7f33 | ||
|  | 799cf27c5d | ||
|  | c930d8f773 | ||
|  | a7f921abb9 | ||
|  | bc6234e032 | ||
|  | 558bfa4e1e | ||
|  | 5d19f23372 | ||
|  | 27f08cdbfa | ||
|  | 993213e2c0 | ||
|  | 49470c05fa | ||
|  | ee0a060b79 | ||
|  | 500e3157b9 | ||
|  | eba86b1d23 | ||
|  | b69a563fc2 | ||
|  | a900c36395 | ||
|  | 1d9b324d3e | ||
|  | 539e7b8efe | ||
|  | 50a477ee47 | ||
|  | 7000123a8b | ||
|  | d48a7d2398 | 
							
								
								
									
										2
									
								
								.github/pull_request_template.md
									
									
									
									
										vendored
									
									
										Normal file
									
								
							
							
						
						
									
										2
									
								
								.github/pull_request_template.md
									
									
									
									
										vendored
									
									
										Normal file
									
								
							| @@ -0,0 +1,2 @@ | ||||
| To show that your contribution is compatible with the MIT License, please include the following text somewhere in this PR description:   | ||||
| This PR complies with the DCO; https://developercertificate.org/   | ||||
							
								
								
									
										17
									
								
								.gitignore
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										17
									
								
								.gitignore
									
									
									
									
										vendored
									
									
								
							| @@ -11,7 +11,7 @@ copyparty.egg-info/ | ||||
| /build/ | ||||
| /dist/ | ||||
| /py2/ | ||||
| /sfx/ | ||||
| /sfx* | ||||
| /unt/ | ||||
| /log/ | ||||
|  | ||||
| @@ -21,10 +21,23 @@ copyparty.egg-info/ | ||||
| # winmerge | ||||
| *.bak | ||||
|  | ||||
| # apple pls | ||||
| .DS_Store | ||||
|  | ||||
| # derived | ||||
| copyparty/res/COPYING.txt | ||||
| copyparty/web/deps/ | ||||
| srv/ | ||||
| scripts/docker/i/ | ||||
| contrib/package/arch/pkg/ | ||||
| contrib/package/arch/src/ | ||||
|  | ||||
| # state/logs | ||||
| up.*.txt | ||||
| .hist/ | ||||
| .hist/ | ||||
| scripts/docker/*.out | ||||
| scripts/docker/*.err | ||||
| /perf.* | ||||
|  | ||||
| # nix build output link | ||||
| result | ||||
|   | ||||
							
								
								
									
										1
									
								
								.vscode/launch.json
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										1
									
								
								.vscode/launch.json
									
									
									
									
										vendored
									
									
								
							| @@ -8,6 +8,7 @@ | ||||
|             "module": "copyparty", | ||||
|             "console": "integratedTerminal", | ||||
|             "cwd": "${workspaceFolder}", | ||||
|             "justMyCode": false, | ||||
|             "args": [ | ||||
|                 //"-nw", | ||||
|                 "-ed", | ||||
|   | ||||
							
								
								
									
										18
									
								
								.vscode/launch.py
									
									
									
									
										vendored
									
									
										
										
										Normal file → Executable file
									
								
							
							
						
						
									
										18
									
								
								.vscode/launch.py
									
									
									
									
										vendored
									
									
										
										
										Normal file → Executable file
									
								
							| @@ -1,3 +1,5 @@ | ||||
| #!/usr/bin/env python3 | ||||
|  | ||||
| # takes arguments from launch.json | ||||
| # is used by no_dbg in tasks.json | ||||
| # launches 10x faster than mspython debugpy | ||||
| @@ -9,15 +11,15 @@ import sys | ||||
|  | ||||
| print(sys.executable) | ||||
|  | ||||
| import json5 | ||||
| import shlex | ||||
| import jstyleson | ||||
| import subprocess as sp | ||||
|  | ||||
|  | ||||
| with open(".vscode/launch.json", "r", encoding="utf-8") as f: | ||||
|     tj = f.read() | ||||
|  | ||||
| oj = jstyleson.loads(tj) | ||||
| oj = json5.loads(tj) | ||||
| argv = oj["configurations"][0]["args"] | ||||
|  | ||||
| try: | ||||
| @@ -28,7 +30,17 @@ except: | ||||
|  | ||||
| argv = [os.path.expanduser(x) if x.startswith("~") else x for x in argv] | ||||
|  | ||||
| if re.search(" -j ?[0-9]", " ".join(argv)): | ||||
| sfx = "" | ||||
| if len(sys.argv) > 1 and os.path.isfile(sys.argv[1]): | ||||
|     sfx = sys.argv[1] | ||||
|     sys.argv = [sys.argv[0]] + sys.argv[2:] | ||||
|  | ||||
| argv += sys.argv[1:] | ||||
|  | ||||
| if sfx: | ||||
|     argv = [sys.executable, sfx] + argv | ||||
|     sp.check_call(argv) | ||||
| elif re.search(" -j ?[0-9]", " ".join(argv)): | ||||
|     argv = [sys.executable, "-m", "copyparty"] + argv | ||||
|     sp.check_call(argv) | ||||
| else: | ||||
|   | ||||
							
								
								
									
										31
									
								
								.vscode/settings.json
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										31
									
								
								.vscode/settings.json
									
									
									
									
										vendored
									
									
								
							| @@ -35,35 +35,22 @@ | ||||
|     "python.linting.flake8Enabled": true, | ||||
|     "python.linting.banditEnabled": true, | ||||
|     "python.linting.mypyEnabled": true, | ||||
|     "python.linting.mypyArgs": [ | ||||
|         "--ignore-missing-imports", | ||||
|         "--follow-imports=silent", | ||||
|         "--show-column-numbers", | ||||
|         "--strict" | ||||
|     ], | ||||
|     "python.linting.flake8Args": [ | ||||
|         "--max-line-length=120", | ||||
|         "--ignore=E722,F405,E203,W503,W293,E402,E501,E128", | ||||
|         "--ignore=E722,F405,E203,W503,W293,E402,E501,E128,E226", | ||||
|     ], | ||||
|     "python.linting.banditArgs": [ | ||||
|         "--ignore=B104" | ||||
|     ], | ||||
|     "python.linting.pylintArgs": [ | ||||
|         "--disable=missing-module-docstring", | ||||
|         "--disable=missing-class-docstring", | ||||
|         "--disable=missing-function-docstring", | ||||
|         "--disable=wrong-import-position", | ||||
|         "--disable=raise-missing-from", | ||||
|         "--disable=bare-except", | ||||
|         "--disable=invalid-name", | ||||
|         "--disable=line-too-long", | ||||
|         "--disable=consider-using-f-string" | ||||
|         "--ignore=B104,B110,B112" | ||||
|     ], | ||||
|     // python3 -m isort --py=27 --profile=black copyparty/ | ||||
|     "python.formatting.provider": "black", | ||||
|     "python.formatting.provider": "none", | ||||
|     "[python]": { | ||||
|         "editor.defaultFormatter": "ms-python.black-formatter" | ||||
|     }, | ||||
|     "editor.formatOnSave": true, | ||||
|     "[html]": { | ||||
|         "editor.formatOnSave": false, | ||||
|         "editor.autoIndent": "keep", | ||||
|     }, | ||||
|     "[css]": { | ||||
|         "editor.formatOnSave": false, | ||||
| @@ -71,10 +58,6 @@ | ||||
|     "files.associations": { | ||||
|         "*.makefile": "makefile" | ||||
|     }, | ||||
|     "python.formatting.blackArgs": [ | ||||
|         "-t", | ||||
|         "py27" | ||||
|     ], | ||||
|     "python.linting.enabled": true, | ||||
|     "python.pythonPath": "/usr/bin/python3" | ||||
| } | ||||
							
								
								
									
										9
									
								
								SECURITY.md
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										9
									
								
								SECURITY.md
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,9 @@ | ||||
| # Security Policy | ||||
|  | ||||
| if you hit something extra juicy pls let me know on either of the following | ||||
| * email -- `copyparty@ocv.ze` except `ze` should be `me` | ||||
| * [mastodon dm](https://layer8.space/@tripflag) -- `@tripflag@layer8.space` | ||||
| * [github private vulnerability report](https://github.com/9001/copyparty/security/advisories/new), wow that form is complicated | ||||
| * [twitter dm](https://twitter.com/tripflag) (if im somehow not banned yet) | ||||
|  | ||||
| no bug bounties sorry! all i can offer is greetz in the release notes | ||||
| @@ -1,7 +1,8 @@ | ||||
| # [`up2k.py`](up2k.py) | ||||
| # [`u2c.py`](u2c.py) | ||||
| * command-line up2k client [(webm)](https://ocv.me/stuff/u2cli.webm) | ||||
| * file uploads, file-search, autoresume of aborted/broken uploads | ||||
| * faster than browsers | ||||
| * sync local folder to server | ||||
| * generally faster than browsers | ||||
| * if something breaks just restart it | ||||
|  | ||||
|  | ||||
| @@ -11,7 +12,7 @@ produces a chronological list of all uploads by collecting info from up2k databa | ||||
| * optional mapping from IP-addresses to nicknames | ||||
|  | ||||
|  | ||||
| # [`copyparty-fuse.py`](copyparty-fuse.py) | ||||
| # [`partyfuse.py`](partyfuse.py) | ||||
| * mount a copyparty server as a local filesystem (read-only) | ||||
| * **supports Windows!** -- expect `194 MiB/s` sequential read | ||||
| * **supports Linux** -- expect `117 MiB/s` sequential read | ||||
| @@ -30,19 +31,19 @@ also consider using [../docs/rclone.md](../docs/rclone.md) instead for 5x perfor | ||||
| * install [winfsp](https://github.com/billziss-gh/winfsp/releases/latest) and [python 3](https://www.python.org/downloads/) | ||||
|   * [x] add python 3.x to PATH (it asks during install) | ||||
| * `python -m pip install --user fusepy` | ||||
| * `python ./copyparty-fuse.py n: http://192.168.1.69:3923/` | ||||
| * `python ./partyfuse.py n: http://192.168.1.69:3923/` | ||||
|  | ||||
| 10% faster in [msys2](https://www.msys2.org/), 700% faster if debug prints are enabled: | ||||
| * `pacman -S mingw64/mingw-w64-x86_64-python{,-pip}` | ||||
| * `/mingw64/bin/python3 -m pip install --user fusepy` | ||||
| * `/mingw64/bin/python3 ./copyparty-fuse.py [...]` | ||||
| * `/mingw64/bin/python3 ./partyfuse.py [...]` | ||||
|  | ||||
| you could replace winfsp with [dokan](https://github.com/dokan-dev/dokany/releases/latest), let me know if you [figure out how](https://github.com/dokan-dev/dokany/wiki/FUSE)   | ||||
| (winfsp's sshfs leaks, doesn't look like winfsp itself does, should be fine) | ||||
|  | ||||
|  | ||||
|  | ||||
| # [`copyparty-fuse🅱️.py`](copyparty-fuseb.py) | ||||
| # [`partyfuse2.py`](partyfuse2.py) | ||||
| * mount a copyparty server as a local filesystem (read-only) | ||||
| * does the same thing except more correct, `samba` approves | ||||
| * **supports Linux** -- expect `18 MiB/s` (wait what) | ||||
| @@ -50,7 +51,7 @@ you could replace winfsp with [dokan](https://github.com/dokan-dev/dokany/releas | ||||
|  | ||||
|  | ||||
|  | ||||
| # [`copyparty-fuse-streaming.py`](copyparty-fuse-streaming.py) | ||||
| # [`partyfuse-streaming.py`](partyfuse-streaming.py) | ||||
| * pretend this doesn't exist | ||||
|  | ||||
|  | ||||
|   | ||||
							
								
								
									
										29
									
								
								bin/hooks/README.md
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										29
									
								
								bin/hooks/README.md
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,29 @@ | ||||
| standalone programs which are executed by copyparty when an event happens (upload, file rename, delete, ...) | ||||
|  | ||||
| these programs either take zero arguments, or a filepath (the affected file), or a json message with filepath + additional info | ||||
|  | ||||
| run copyparty with `--help-hooks` for usage details / hook type explanations (xbu/xau/xiu/xbr/xar/xbd/xad) | ||||
|  | ||||
| > **note:** in addition to event hooks (the stuff described here), copyparty has another api to run your programs/scripts while providing way more information such as audio tags / video codecs / etc and optionally daisychaining data between scripts in a processing pipeline; if that's what you want then see [mtp plugins](../mtag/) instead | ||||
|  | ||||
|  | ||||
| # after upload | ||||
| * [notify.py](notify.py) shows a desktop notification ([example](https://user-images.githubusercontent.com/241032/215335767-9c91ed24-d36e-4b6b-9766-fb95d12d163f.png)) | ||||
|   * [notify2.py](notify2.py) uses the json API to show more context | ||||
| * [image-noexif.py](image-noexif.py) removes image exif by overwriting / directly editing the uploaded file | ||||
| * [discord-announce.py](discord-announce.py) announces new uploads on discord using webhooks ([example](https://user-images.githubusercontent.com/241032/215304439-1c1cb3c8-ec6f-4c17-9f27-81f969b1811a.png)) | ||||
| * [reject-mimetype.py](reject-mimetype.py) rejects uploads unless the mimetype is acceptable | ||||
|  | ||||
|  | ||||
| # upload batches | ||||
| these are `--xiu` hooks; unlike `xbu` and `xau` (which get executed on every single file), `xiu` hooks are given a list of recent uploads on STDIN after the server has gone idle for N seconds, reducing server load + providing more context | ||||
| * [xiu.py](xiu.py) is a "minimal" example showing a list of filenames + total filesize | ||||
| * [xiu-sha.py](xiu-sha.py) produces a sha512 checksum list in the volume root | ||||
|  | ||||
|  | ||||
| # before upload | ||||
| * [reject-extension.py](reject-extension.py) rejects uploads if they match a list of file extensions | ||||
|  | ||||
|  | ||||
| # on message | ||||
| * [wget.py](wget.py) lets you download files by POSTing URLs to copyparty | ||||
							
								
								
									
										68
									
								
								bin/hooks/discord-announce.py
									
									
									
									
									
										Executable file
									
								
							
							
						
						
									
										68
									
								
								bin/hooks/discord-announce.py
									
									
									
									
									
										Executable file
									
								
							| @@ -0,0 +1,68 @@ | ||||
| #!/usr/bin/env python3 | ||||
|  | ||||
| import sys | ||||
| import json | ||||
| import requests | ||||
| from copyparty.util import humansize, quotep | ||||
|  | ||||
|  | ||||
| _ = r""" | ||||
| announces a new upload on discord | ||||
|  | ||||
| example usage as global config: | ||||
|     --xau f,t5,j,bin/hooks/discord-announce.py | ||||
|  | ||||
| example usage as a volflag (per-volume config): | ||||
|     -v srv/inc:inc:r:rw,ed:c,xau=f,t5,j,bin/hooks/discord-announce.py | ||||
|                            ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | ||||
|  | ||||
|     (share filesystem-path srv/inc as volume /inc, | ||||
|      readable by everyone, read-write for user 'ed', | ||||
|      running this plugin on all uploads with the params listed below) | ||||
|  | ||||
| parameters explained, | ||||
|     xbu = execute after upload | ||||
|     f  = fork; don't wait for it to finish | ||||
|     t5 = timeout if it's still running after 5 sec | ||||
|     j  = provide upload information as json; not just the filename | ||||
|  | ||||
| replace "xau" with "xbu" to announce Before upload starts instead of After completion | ||||
|  | ||||
| # how to discord: | ||||
| first create the webhook url; https://support.discord.com/hc/en-us/articles/228383668-Intro-to-Webhooks | ||||
| then use this to design your message: https://discohook.org/ | ||||
| """ | ||||
|  | ||||
|  | ||||
| def main(): | ||||
|     WEBHOOK = "https://discord.com/api/webhooks/1234/base64" | ||||
|     WEBHOOK = "https://discord.com/api/webhooks/1066830390280597718/M1TDD110hQA-meRLMRhdurych8iyG35LDoI1YhzbrjGP--BXNZodZFczNVwK4Ce7Yme5" | ||||
|  | ||||
|     # read info from copyparty | ||||
|     inf = json.loads(sys.argv[1]) | ||||
|     vpath = inf["vp"] | ||||
|     filename = vpath.split("/")[-1] | ||||
|     url = f"https://{inf['host']}/{quotep(vpath)}" | ||||
|  | ||||
|     # compose the message to discord | ||||
|     j = { | ||||
|         "title": filename, | ||||
|         "url": url, | ||||
|         "description": url.rsplit("/", 1)[0], | ||||
|         "color": 0x449900, | ||||
|         "fields": [ | ||||
|             {"name": "Size", "value": humansize(inf["sz"])}, | ||||
|             {"name": "User", "value": inf["user"]}, | ||||
|             {"name": "IP", "value": inf["ip"]}, | ||||
|         ], | ||||
|     } | ||||
|  | ||||
|     for v in j["fields"]: | ||||
|         v["inline"] = True | ||||
|  | ||||
|     r = requests.post(WEBHOOK, json={"embeds": [j]}) | ||||
|     print(f"discord: {r}\n", end="") | ||||
|  | ||||
|  | ||||
| if __name__ == "__main__": | ||||
|     main() | ||||
							
								
								
									
										72
									
								
								bin/hooks/image-noexif.py
									
									
									
									
									
										Executable file
									
								
							
							
						
						
									
										72
									
								
								bin/hooks/image-noexif.py
									
									
									
									
									
										Executable file
									
								
							| @@ -0,0 +1,72 @@ | ||||
| #!/usr/bin/env python3 | ||||
|  | ||||
| import os | ||||
| import sys | ||||
| import subprocess as sp | ||||
|  | ||||
|  | ||||
| _ = r""" | ||||
| remove exif tags from uploaded images; the eventhook edition of | ||||
| https://github.com/9001/copyparty/blob/hovudstraum/bin/mtag/image-noexif.py | ||||
|  | ||||
| dependencies: | ||||
|     exiftool / perl-Image-ExifTool | ||||
|  | ||||
| being an upload hook, this will take effect after upload completion | ||||
|     but before copyparty has hashed/indexed the file, which means that | ||||
|     copyparty will never index the original file, so deduplication will | ||||
|     not work as expected... which is mostly OK but ehhh | ||||
|  | ||||
| note: modifies the file in-place, so don't set the `f` (fork) flag | ||||
|  | ||||
| example usages; either as global config (all volumes) or as volflag: | ||||
|     --xau bin/hooks/image-noexif.py | ||||
|     -v srv/inc:inc:r:rw,ed:c,xau=bin/hooks/image-noexif.py | ||||
|                            ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | ||||
|  | ||||
| explained: | ||||
|     share fs-path srv/inc at /inc (readable by all, read-write for user ed) | ||||
|     running this xau (execute-after-upload) plugin for all uploaded files | ||||
| """ | ||||
|  | ||||
|  | ||||
| # filetypes to process; ignores everything else | ||||
| EXTS = ("jpg", "jpeg", "avif", "heif", "heic") | ||||
|  | ||||
|  | ||||
| try: | ||||
|     from copyparty.util import fsenc | ||||
| except: | ||||
|  | ||||
|     def fsenc(p): | ||||
|         return p.encode("utf-8") | ||||
|  | ||||
|  | ||||
| def main(): | ||||
|     fp = sys.argv[1] | ||||
|     ext = fp.lower().split(".")[-1] | ||||
|     if ext not in EXTS: | ||||
|         return | ||||
|  | ||||
|     cwd, fn = os.path.split(fp) | ||||
|     os.chdir(cwd) | ||||
|     f1 = fsenc(fn) | ||||
|     cmd = [ | ||||
|         b"exiftool", | ||||
|         b"-exif:all=", | ||||
|         b"-iptc:all=", | ||||
|         b"-xmp:all=", | ||||
|         b"-P", | ||||
|         b"-overwrite_original", | ||||
|         b"--", | ||||
|         f1, | ||||
|     ] | ||||
|     sp.check_output(cmd) | ||||
|     print("image-noexif: stripped") | ||||
|  | ||||
|  | ||||
| if __name__ == "__main__": | ||||
|     try: | ||||
|         main() | ||||
|     except: | ||||
|         pass | ||||
							
								
								
									
										66
									
								
								bin/hooks/notify.py
									
									
									
									
									
										Executable file
									
								
							
							
						
						
									
										66
									
								
								bin/hooks/notify.py
									
									
									
									
									
										Executable file
									
								
							| @@ -0,0 +1,66 @@ | ||||
| #!/usr/bin/env python3 | ||||
|  | ||||
| import os | ||||
| import sys | ||||
| import subprocess as sp | ||||
| from plyer import notification | ||||
|  | ||||
|  | ||||
| _ = r""" | ||||
| show os notification on upload; works on windows, linux, macos, android | ||||
|  | ||||
| depdencies: | ||||
|     windows: python3 -m pip install --user -U plyer | ||||
|     linux:   python3 -m pip install --user -U plyer | ||||
|     macos:   python3 -m pip install --user -U plyer pyobjus | ||||
|     android: just termux and termux-api | ||||
|  | ||||
| example usages; either as global config (all volumes) or as volflag: | ||||
|     --xau f,bin/hooks/notify.py | ||||
|     -v srv/inc:inc:r:rw,ed:c,xau=f,bin/hooks/notify.py | ||||
|                            ^^^^^^^^^^^^^^^^^^^^^^^^^^^ | ||||
|  | ||||
|     (share filesystem-path srv/inc as volume /inc, | ||||
|      readable by everyone, read-write for user 'ed', | ||||
|      running this plugin on all uploads with the params listed below) | ||||
|  | ||||
| parameters explained, | ||||
|     xau = execute after upload | ||||
|     f   = fork so it doesn't block uploads | ||||
| """ | ||||
|  | ||||
|  | ||||
| try: | ||||
|     from copyparty.util import humansize | ||||
| except: | ||||
|  | ||||
|     def humansize(n): | ||||
|         return n | ||||
|  | ||||
|  | ||||
| def main(): | ||||
|     fp = sys.argv[1] | ||||
|     dp, fn = os.path.split(fp) | ||||
|     try: | ||||
|         sz = humansize(os.path.getsize(fp)) | ||||
|     except: | ||||
|         sz = "?" | ||||
|  | ||||
|     msg = "{} ({})\n📁 {}".format(fn, sz, dp) | ||||
|     title = "File received" | ||||
|  | ||||
|     if "com.termux" in sys.executable: | ||||
|         sp.run(["termux-notification", "-t", title, "-c", msg]) | ||||
|         return | ||||
|  | ||||
|     icon = "emblem-documents-symbolic" if sys.platform == "linux" else "" | ||||
|     notification.notify( | ||||
|         title=title, | ||||
|         message=msg, | ||||
|         app_icon=icon, | ||||
|         timeout=10, | ||||
|     ) | ||||
|  | ||||
|  | ||||
| if __name__ == "__main__": | ||||
|     main() | ||||
							
								
								
									
										72
									
								
								bin/hooks/notify2.py
									
									
									
									
									
										Executable file
									
								
							
							
						
						
									
										72
									
								
								bin/hooks/notify2.py
									
									
									
									
									
										Executable file
									
								
							| @@ -0,0 +1,72 @@ | ||||
| #!/usr/bin/env python3 | ||||
|  | ||||
| import json | ||||
| import os | ||||
| import sys | ||||
| import subprocess as sp | ||||
| from datetime import datetime | ||||
| from plyer import notification | ||||
|  | ||||
|  | ||||
| _ = r""" | ||||
| same as notify.py but with additional info (uploader, ...) | ||||
| and also supports --xm (notify on 📟 message) | ||||
|  | ||||
| example usages; either as global config (all volumes) or as volflag: | ||||
|     --xm  f,j,bin/hooks/notify2.py | ||||
|     --xau f,j,bin/hooks/notify2.py | ||||
|     -v srv/inc:inc:r:rw,ed:c,xm=f,j,bin/hooks/notify2.py | ||||
|     -v srv/inc:inc:r:rw,ed:c,xau=f,j,bin/hooks/notify2.py | ||||
|                            ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | ||||
|  | ||||
|     (share filesystem-path srv/inc as volume /inc, | ||||
|      readable by everyone, read-write for user 'ed', | ||||
|      running this plugin on all uploads / msgs with the params listed below) | ||||
|  | ||||
| parameters explained, | ||||
|     xau = execute after upload | ||||
|     f   = fork so it doesn't block uploads | ||||
|     j   = provide json instead of filepath list | ||||
| """ | ||||
|  | ||||
|  | ||||
| try: | ||||
|     from copyparty.util import humansize | ||||
| except: | ||||
|  | ||||
|     def humansize(n): | ||||
|         return n | ||||
|  | ||||
|  | ||||
| def main(): | ||||
|     inf = json.loads(sys.argv[1]) | ||||
|     fp = inf["ap"] | ||||
|     sz = humansize(inf["sz"]) | ||||
|     dp, fn = os.path.split(fp) | ||||
|     mt = datetime.utcfromtimestamp(inf["mt"]).strftime("%Y-%m-%d %H:%M:%S") | ||||
|  | ||||
|     msg = f"{fn} ({sz})\n📁 {dp}" | ||||
|     title = "File received" | ||||
|     icon = "emblem-documents-symbolic" if sys.platform == "linux" else "" | ||||
|  | ||||
|     if inf.get("txt"): | ||||
|         msg = inf["txt"] | ||||
|         title = "Message received" | ||||
|         icon = "mail-unread-symbolic" if sys.platform == "linux" else "" | ||||
|  | ||||
|     msg += f"\n👤 {inf['user']} ({inf['ip']})\n🕒 {mt}" | ||||
|  | ||||
|     if "com.termux" in sys.executable: | ||||
|         sp.run(["termux-notification", "-t", title, "-c", msg]) | ||||
|         return | ||||
|  | ||||
|     notification.notify( | ||||
|         title=title, | ||||
|         message=msg, | ||||
|         app_icon=icon, | ||||
|         timeout=10, | ||||
|     ) | ||||
|  | ||||
|  | ||||
| if __name__ == "__main__": | ||||
|     main() | ||||
							
								
								
									
										35
									
								
								bin/hooks/reject-extension.py
									
									
									
									
									
										Executable file
									
								
							
							
						
						
									
										35
									
								
								bin/hooks/reject-extension.py
									
									
									
									
									
										Executable file
									
								
							| @@ -0,0 +1,35 @@ | ||||
| #!/usr/bin/env python3 | ||||
|  | ||||
| import sys | ||||
|  | ||||
|  | ||||
| _ = r""" | ||||
| reject file uploads by file extension | ||||
|  | ||||
| example usage as global config: | ||||
|     --xbu c,bin/hooks/reject-extension.py | ||||
|  | ||||
| example usage as a volflag (per-volume config): | ||||
|     -v srv/inc:inc:r:rw,ed:c,xbu=c,bin/hooks/reject-extension.py | ||||
|                            ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | ||||
|  | ||||
|     (share filesystem-path srv/inc as volume /inc, | ||||
|      readable by everyone, read-write for user 'ed', | ||||
|      running this plugin on all uploads with the params listed below) | ||||
|  | ||||
| parameters explained, | ||||
|     xbu = execute before upload | ||||
|     c   = check result, reject upload if error | ||||
| """ | ||||
|  | ||||
|  | ||||
| def main(): | ||||
|     bad = "exe scr com pif bat ps1 jar msi" | ||||
|  | ||||
|     ext = sys.argv[1].split(".")[-1] | ||||
|  | ||||
|     sys.exit(1 if ext in bad.split() else 0) | ||||
|  | ||||
|  | ||||
| if __name__ == "__main__": | ||||
|     main() | ||||
							
								
								
									
										44
									
								
								bin/hooks/reject-mimetype.py
									
									
									
									
									
										Executable file
									
								
							
							
						
						
									
										44
									
								
								bin/hooks/reject-mimetype.py
									
									
									
									
									
										Executable file
									
								
							| @@ -0,0 +1,44 @@ | ||||
| #!/usr/bin/env python3 | ||||
|  | ||||
| import sys | ||||
| import magic | ||||
|  | ||||
|  | ||||
| _ = r""" | ||||
| reject file uploads by mimetype | ||||
|  | ||||
| dependencies (linux, macos): | ||||
|     python3 -m pip install --user -U python-magic | ||||
|  | ||||
| dependencies (windows): | ||||
|     python3 -m pip install --user -U python-magic-bin | ||||
|  | ||||
| example usage as global config: | ||||
|     --xau c,bin/hooks/reject-mimetype.py | ||||
|  | ||||
| example usage as a volflag (per-volume config): | ||||
|     -v srv/inc:inc:r:rw,ed:c,xau=c,bin/hooks/reject-mimetype.py | ||||
|                            ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | ||||
|  | ||||
|     (share filesystem-path srv/inc as volume /inc, | ||||
|      readable by everyone, read-write for user 'ed', | ||||
|      running this plugin on all uploads with the params listed below) | ||||
|  | ||||
| parameters explained, | ||||
|     xau = execute after upload | ||||
|     c   = check result, reject upload if error | ||||
| """ | ||||
|  | ||||
|  | ||||
| def main(): | ||||
|     ok = ["image/jpeg", "image/png"] | ||||
|  | ||||
|     mt = magic.from_file(sys.argv[1], mime=True) | ||||
|  | ||||
|     print(mt) | ||||
|  | ||||
|     sys.exit(1 if mt not in ok else 0) | ||||
|  | ||||
|  | ||||
| if __name__ == "__main__": | ||||
|     main() | ||||
							
								
								
									
										60
									
								
								bin/hooks/wget.py
									
									
									
									
									
										Executable file
									
								
							
							
						
						
									
										60
									
								
								bin/hooks/wget.py
									
									
									
									
									
										Executable file
									
								
							| @@ -0,0 +1,60 @@ | ||||
| #!/usr/bin/env python3 | ||||
|  | ||||
| import os | ||||
| import sys | ||||
| import json | ||||
| import subprocess as sp | ||||
|  | ||||
|  | ||||
| _ = r""" | ||||
| use copyparty as a file downloader by POSTing URLs as | ||||
| application/x-www-form-urlencoded (for example using the | ||||
| message/pager function on the website) | ||||
|  | ||||
| example usage as global config: | ||||
|     --xm f,j,t3600,bin/hooks/wget.py | ||||
|  | ||||
| example usage as a volflag (per-volume config): | ||||
|     -v srv/inc:inc:r:rw,ed:c,xm=f,j,t3600,bin/hooks/wget.py | ||||
|                            ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | ||||
|  | ||||
|     (share filesystem-path srv/inc as volume /inc, | ||||
|      readable by everyone, read-write for user 'ed', | ||||
|      running this plugin on all messages with the params listed below) | ||||
|  | ||||
| parameters explained, | ||||
|     xm = execute on message-to-server-log | ||||
|     f = fork so it doesn't block uploads | ||||
|     j = provide message information as json; not just the text | ||||
|     c3 = mute all output | ||||
|     t3600 = timeout and kill download after 1 hour | ||||
| """ | ||||
|  | ||||
|  | ||||
| def main(): | ||||
|     inf = json.loads(sys.argv[1]) | ||||
|     url = inf["txt"] | ||||
|     if "://" not in url: | ||||
|         url = "https://" + url | ||||
|  | ||||
|     os.chdir(inf["ap"]) | ||||
|  | ||||
|     name = url.split("?")[0].split("/")[-1] | ||||
|     tfn = "-- DOWNLOADING " + name | ||||
|     print(f"{tfn}\n", end="") | ||||
|     open(tfn, "wb").close() | ||||
|  | ||||
|     cmd = ["wget", "--trust-server-names", "-nv", "--", url] | ||||
|  | ||||
|     try: | ||||
|         sp.check_call(cmd) | ||||
|     except: | ||||
|         t = "-- FAILED TO DONWLOAD " + name | ||||
|         print(f"{t}\n", end="") | ||||
|         open(t, "wb").close() | ||||
|  | ||||
|     os.unlink(tfn) | ||||
|  | ||||
|  | ||||
| if __name__ == "__main__": | ||||
|     main() | ||||
							
								
								
									
										108
									
								
								bin/hooks/xiu-sha.py
									
									
									
									
									
										Executable file
									
								
							
							
						
						
									
										108
									
								
								bin/hooks/xiu-sha.py
									
									
									
									
									
										Executable file
									
								
							| @@ -0,0 +1,108 @@ | ||||
| #!/usr/bin/env python3 | ||||
|  | ||||
| import hashlib | ||||
| import json | ||||
| import sys | ||||
| from datetime import datetime | ||||
|  | ||||
|  | ||||
| _ = r""" | ||||
| this hook will produce a single sha512 file which | ||||
| covers all recent uploads (plus metadata comments) | ||||
|  | ||||
| use this with --xiu, which makes copyparty buffer | ||||
| uploads until server is idle, providing file infos | ||||
| on stdin (filepaths or json) | ||||
|  | ||||
| example usage as global config: | ||||
|     --xiu i5,j,bin/hooks/xiu-sha.py | ||||
|  | ||||
| example usage as a volflag (per-volume config): | ||||
|     -v srv/inc:inc:r:rw,ed:c,xiu=i5,j,bin/hooks/xiu-sha.py | ||||
|                            ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | ||||
|  | ||||
|     (share filesystem-path srv/inc as volume /inc, | ||||
|      readable by everyone, read-write for user 'ed', | ||||
|      running this plugin on batches of uploads with the params listed below) | ||||
|  | ||||
| parameters explained, | ||||
|     xiu = execute after uploads... | ||||
|     i5  = ...after volume has been idle for 5sec | ||||
|     j   = provide json instead of filepath list | ||||
|  | ||||
| note the "f" (fork) flag is not set, so this xiu | ||||
| will block other xiu hooks while it's running | ||||
| """ | ||||
|  | ||||
|  | ||||
| try: | ||||
|     from copyparty.util import fsenc | ||||
| except: | ||||
|  | ||||
|     def fsenc(p): | ||||
|         return p | ||||
|  | ||||
|  | ||||
| def humantime(ts): | ||||
|     return datetime.utcfromtimestamp(ts).strftime("%Y-%m-%d %H:%M:%S") | ||||
|  | ||||
|  | ||||
| def find_files_root(inf): | ||||
|     di = 9000 | ||||
|     for f1, f2 in zip(inf, inf[1:]): | ||||
|         p1 = f1["ap"].replace("\\", "/").rsplit("/", 1)[0] | ||||
|         p2 = f2["ap"].replace("\\", "/").rsplit("/", 1)[0] | ||||
|         di = min(len(p1), len(p2), di) | ||||
|         di = next((i for i in range(di) if p1[i] != p2[i]), di) | ||||
|  | ||||
|     return di + 1 | ||||
|  | ||||
|  | ||||
| def find_vol_root(inf): | ||||
|     return len(inf[0]["ap"][: -len(inf[0]["vp"])]) | ||||
|  | ||||
|  | ||||
| def main(): | ||||
|     zb = sys.stdin.buffer.read() | ||||
|     zs = zb.decode("utf-8", "replace") | ||||
|     inf = json.loads(zs) | ||||
|  | ||||
|     # root directory (where to put the sha512 file); | ||||
|     # di = find_files_root(inf)  # next to the file closest to volume root | ||||
|     di = find_vol_root(inf)  # top of the entire volume | ||||
|  | ||||
|     ret = [] | ||||
|     total_sz = 0 | ||||
|     for md in inf: | ||||
|         ap = md["ap"] | ||||
|         rp = ap[di:] | ||||
|         total_sz += md["sz"] | ||||
|         fsize = "{:,}".format(md["sz"]) | ||||
|         mtime = humantime(md["mt"]) | ||||
|         up_ts = humantime(md["at"]) | ||||
|  | ||||
|         h = hashlib.sha512() | ||||
|         with open(fsenc(md["ap"]), "rb", 512 * 1024) as f: | ||||
|             while True: | ||||
|                 buf = f.read(512 * 1024) | ||||
|                 if not buf: | ||||
|                     break | ||||
|  | ||||
|                 h.update(buf) | ||||
|  | ||||
|         cksum = h.hexdigest() | ||||
|         meta = " | ".join([md["wark"], up_ts, mtime, fsize, md["ip"]]) | ||||
|         ret.append("# {}\n{} *{}".format(meta, cksum, rp)) | ||||
|  | ||||
|     ret.append("# {} files, {} bytes total".format(len(inf), total_sz)) | ||||
|     ret.append("") | ||||
|     ftime = datetime.utcnow().strftime("%Y-%m%d-%H%M%S.%f") | ||||
|     fp = "{}xfer-{}.sha512".format(inf[0]["ap"][:di], ftime) | ||||
|     with open(fsenc(fp), "wb") as f: | ||||
|         f.write("\n".join(ret).encode("utf-8", "replace")) | ||||
|  | ||||
|     print("wrote checksums to {}".format(fp)) | ||||
|  | ||||
|  | ||||
| if __name__ == "__main__": | ||||
|     main() | ||||
							
								
								
									
										50
									
								
								bin/hooks/xiu.py
									
									
									
									
									
										Executable file
									
								
							
							
						
						
									
										50
									
								
								bin/hooks/xiu.py
									
									
									
									
									
										Executable file
									
								
							| @@ -0,0 +1,50 @@ | ||||
| #!/usr/bin/env python3 | ||||
|  | ||||
| import json | ||||
| import sys | ||||
|  | ||||
|  | ||||
| _ = r""" | ||||
| this hook prints absolute filepaths + total size | ||||
|  | ||||
| use this with --xiu, which makes copyparty buffer | ||||
| uploads until server is idle, providing file infos | ||||
| on stdin (filepaths or json) | ||||
|  | ||||
| example usage as global config: | ||||
|     --xiu i1,j,bin/hooks/xiu.py | ||||
|  | ||||
| example usage as a volflag (per-volume config): | ||||
|     -v srv/inc:inc:r:rw,ed:c,xiu=i1,j,bin/hooks/xiu.py | ||||
|                            ^^^^^^^^^^^^^^^^^^^^^^^^^^^ | ||||
|  | ||||
|     (share filesystem-path srv/inc as volume /inc, | ||||
|      readable by everyone, read-write for user 'ed', | ||||
|      running this plugin on batches of uploads with the params listed below) | ||||
|  | ||||
| parameters explained, | ||||
|     xiu = execute after uploads... | ||||
|     i1  = ...after volume has been idle for 1sec | ||||
|     j   = provide json instead of filepath list | ||||
|  | ||||
| note the "f" (fork) flag is not set, so this xiu | ||||
| will block other xiu hooks while it's running | ||||
| """ | ||||
|  | ||||
|  | ||||
| def main(): | ||||
|     zb = sys.stdin.buffer.read() | ||||
|     zs = zb.decode("utf-8", "replace") | ||||
|     inf = json.loads(zs) | ||||
|  | ||||
|     total_sz = 0 | ||||
|     for upload in inf: | ||||
|         sz = upload["sz"] | ||||
|         total_sz += sz | ||||
|         print("{:9} {}".format(sz, upload["ap"])) | ||||
|  | ||||
|     print("{} files, {} bytes total".format(len(inf), total_sz)) | ||||
|  | ||||
|  | ||||
| if __name__ == "__main__": | ||||
|     main() | ||||
| @@ -1,5 +1,9 @@ | ||||
| standalone programs which take an audio file as argument | ||||
|  | ||||
| you may want to forget about all this fancy complicated stuff and just use [event hooks](../hooks/) instead (which doesn't need `-e2ts` or ffmpeg)  | ||||
|  | ||||
| ---- | ||||
|  | ||||
| **NOTE:** these all require `-e2ts` to be functional, meaning you need to do at least one of these: `apt install ffmpeg` or `pip3 install mutagen` | ||||
|  | ||||
| some of these rely on libraries which are not MIT-compatible | ||||
| @@ -17,6 +21,7 @@ these do not have any problematic dependencies at all: | ||||
| * [cksum.py](./cksum.py) computes various checksums | ||||
| * [exe.py](./exe.py) grabs metadata from .exe and .dll files (example for retrieving multiple tags with one parser) | ||||
| * [wget.py](./wget.py) lets you download files by POSTing URLs to copyparty | ||||
|   * also available as an [event hook](../hooks/wget.py) | ||||
|  | ||||
|  | ||||
| # dependencies | ||||
| @@ -26,7 +31,7 @@ run [`install-deps.sh`](install-deps.sh) to build/install most dependencies requ | ||||
| *alternatively* (or preferably) use packages from your distro instead, then you'll need at least these: | ||||
|  | ||||
| * from distro: `numpy vamp-plugin-sdk beatroot-vamp mixxx-keyfinder ffmpeg` | ||||
| * from pypy: `keyfinder vamp` | ||||
| * from pip: `keyfinder vamp` | ||||
|  | ||||
|  | ||||
| # usage from copyparty | ||||
|   | ||||
| @@ -16,6 +16,10 @@ dep: ffmpeg | ||||
| """ | ||||
|  | ||||
|  | ||||
| # save beat timestamps to ".beats/filename.txt" | ||||
| SAVE = False | ||||
|  | ||||
|  | ||||
| def det(tf): | ||||
|     # fmt: off | ||||
|     sp.check_call([ | ||||
| @@ -23,12 +27,11 @@ def det(tf): | ||||
|         b"-nostdin", | ||||
|         b"-hide_banner", | ||||
|         b"-v", b"fatal", | ||||
|         b"-ss", b"13", | ||||
|         b"-y", b"-i", fsenc(sys.argv[1]), | ||||
|         b"-map", b"0:a:0", | ||||
|         b"-ac", b"1", | ||||
|         b"-ar", b"22050", | ||||
|         b"-t", b"300", | ||||
|         b"-t", b"360", | ||||
|         b"-f", b"f32le", | ||||
|         fsenc(tf) | ||||
|     ]) | ||||
| @@ -47,10 +50,29 @@ def det(tf): | ||||
|             print(c["list"][0]["label"].split(" ")[0]) | ||||
|             return | ||||
|  | ||||
|         # throws if detection failed: | ||||
|         bpm = float(cl[-1]["timestamp"] - cl[1]["timestamp"]) | ||||
|         bpm = round(60 * ((len(cl) - 1) / bpm), 2) | ||||
|         print(f"{bpm:.2f}") | ||||
|     # throws if detection failed: | ||||
|     beats = [float(x["timestamp"]) for x in cl] | ||||
|     bds = [b - a for a, b in zip(beats, beats[1:])] | ||||
|     bds.sort() | ||||
|     n0 = int(len(bds) * 0.2) | ||||
|     n1 = int(len(bds) * 0.75) + 1 | ||||
|     bds = bds[n0:n1] | ||||
|     bpm = sum(bds) | ||||
|     bpm = round(60 * (len(bds) / bpm), 2) | ||||
|     print(f"{bpm:.2f}") | ||||
|  | ||||
|     if SAVE: | ||||
|         fdir, fname = os.path.split(sys.argv[1]) | ||||
|         bdir = os.path.join(fdir, ".beats") | ||||
|         try: | ||||
|             os.mkdir(fsenc(bdir)) | ||||
|         except: | ||||
|             pass | ||||
|  | ||||
|         fp = os.path.join(bdir, fname) + ".txt" | ||||
|         with open(fsenc(fp), "wb") as f: | ||||
|             txt = "\n".join([f"{x:.2f}" for x in beats]) | ||||
|             f.write(txt.encode("utf-8")) | ||||
|  | ||||
|  | ||||
| def main(): | ||||
|   | ||||
							
								
								
									
										61
									
								
								bin/mtag/guestbook-read.py
									
									
									
									
									
										Executable file
									
								
							
							
						
						
									
										61
									
								
								bin/mtag/guestbook-read.py
									
									
									
									
									
										Executable file
									
								
							| @@ -0,0 +1,61 @@ | ||||
| #!/usr/bin/env python3 | ||||
|  | ||||
| """ | ||||
| fetch latest msg from guestbook and return as tag | ||||
|  | ||||
| example copyparty config to use this: | ||||
|   --urlform save,get -vsrv/hello:hello:w:c,e2ts,mtp=guestbook=t10,ad,p,bin/mtag/guestbook-read.py:mte=+guestbook | ||||
|  | ||||
| explained: | ||||
|   for realpath srv/hello (served at /hello), write-only for eveyrone, | ||||
|   enable file analysis on upload (e2ts), | ||||
|   use mtp plugin "bin/mtag/guestbook-read.py" to provide metadata tag "guestbook", | ||||
|   do this on all uploads regardless of extension, | ||||
|   t10 = 10 seconds timeout for each dwonload, | ||||
|   ad = parse file regardless if FFmpeg thinks it is audio or not | ||||
|   p = request upload info as json on stdin (need ip) | ||||
|   mte=+guestbook enabled indexing of that tag for this volume | ||||
|  | ||||
| PS: this requires e2ts to be functional, | ||||
|   meaning you need to do at least one of these: | ||||
|    * apt install ffmpeg | ||||
|    * pip3 install mutagen | ||||
| """ | ||||
|  | ||||
|  | ||||
| import json | ||||
| import os | ||||
| import sqlite3 | ||||
| import sys | ||||
|  | ||||
|  | ||||
| # set 0 to allow infinite msgs from one IP, | ||||
| # other values delete older messages to make space, | ||||
| # so 1 only keeps latest msg | ||||
| NUM_MSGS_TO_KEEP = 1 | ||||
|  | ||||
|  | ||||
| def main(): | ||||
|     fp = os.path.abspath(sys.argv[1]) | ||||
|     fdir = os.path.dirname(fp) | ||||
|  | ||||
|     zb = sys.stdin.buffer.read() | ||||
|     zs = zb.decode("utf-8", "replace") | ||||
|     md = json.loads(zs) | ||||
|  | ||||
|     ip = md["up_ip"] | ||||
|  | ||||
|     # can put the database inside `fdir` if you'd like, | ||||
|     # by default it saves to PWD: | ||||
|     # os.chdir(fdir) | ||||
|  | ||||
|     db = sqlite3.connect("guestbook.db3") | ||||
|     with db: | ||||
|         t = "select msg from gb where ip = ? order by ts desc" | ||||
|         r = db.execute(t, (ip,)).fetchone() | ||||
|         if r: | ||||
|             print(r[0]) | ||||
|  | ||||
|  | ||||
| if __name__ == "__main__": | ||||
|     main() | ||||
							
								
								
									
										111
									
								
								bin/mtag/guestbook.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										111
									
								
								bin/mtag/guestbook.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,111 @@ | ||||
| #!/usr/bin/env python3 | ||||
|  | ||||
| """ | ||||
| store messages from users in an sqlite database | ||||
| which can be read from another mtp for example | ||||
|  | ||||
| takes input from application/x-www-form-urlencoded POSTs, | ||||
| for example using the message/pager function on the website | ||||
|  | ||||
| example copyparty config to use this: | ||||
|   --urlform save,get -vsrv/hello:hello:w:c,e2ts,mtp=xgb=ebin,t10,ad,p,bin/mtag/guestbook.py:mte=+xgb | ||||
|  | ||||
| explained: | ||||
|   for realpath srv/hello (served at /hello),write-only for eveyrone, | ||||
|   enable file analysis on upload (e2ts), | ||||
|   use mtp plugin "bin/mtag/guestbook.py" to provide metadata tag "xgb", | ||||
|   do this on all uploads with the file extension "bin", | ||||
|   t300 = 300 seconds timeout for each dwonload, | ||||
|   ad = parse file regardless if FFmpeg thinks it is audio or not | ||||
|   p = request upload info as json on stdin | ||||
|   mte=+xgb enabled indexing of that tag for this volume | ||||
|  | ||||
| PS: this requires e2ts to be functional, | ||||
|   meaning you need to do at least one of these: | ||||
|    * apt install ffmpeg | ||||
|    * pip3 install mutagen | ||||
| """ | ||||
|  | ||||
|  | ||||
| import json | ||||
| import os | ||||
| import sqlite3 | ||||
| import sys | ||||
| from urllib.parse import unquote_to_bytes as unquote | ||||
|  | ||||
|  | ||||
| # set 0 to allow infinite msgs from one IP, | ||||
| # other values delete older messages to make space, | ||||
| # so 1 only keeps latest msg | ||||
| NUM_MSGS_TO_KEEP = 1 | ||||
|  | ||||
|  | ||||
| def main(): | ||||
|     fp = os.path.abspath(sys.argv[1]) | ||||
|     fdir = os.path.dirname(fp) | ||||
|     fname = os.path.basename(fp) | ||||
|     if not fname.startswith("put-") or not fname.endswith(".bin"): | ||||
|         raise Exception("not a post file") | ||||
|  | ||||
|     zb = sys.stdin.buffer.read() | ||||
|     zs = zb.decode("utf-8", "replace") | ||||
|     md = json.loads(zs) | ||||
|  | ||||
|     buf = b"" | ||||
|     with open(fp, "rb") as f: | ||||
|         while True: | ||||
|             b = f.read(4096) | ||||
|             buf += b | ||||
|             if len(buf) > 4096: | ||||
|                 raise Exception("too big") | ||||
|  | ||||
|             if not b: | ||||
|                 break | ||||
|  | ||||
|     if not buf: | ||||
|         raise Exception("file is empty") | ||||
|  | ||||
|     buf = unquote(buf.replace(b"+", b" ")) | ||||
|     txt = buf.decode("utf-8") | ||||
|  | ||||
|     if not txt.startswith("msg="): | ||||
|         raise Exception("does not start with msg=") | ||||
|  | ||||
|     ip = md["up_ip"] | ||||
|     ts = md["up_at"] | ||||
|     txt = txt[4:] | ||||
|  | ||||
|     # can put the database inside `fdir` if you'd like, | ||||
|     # by default it saves to PWD: | ||||
|     # os.chdir(fdir) | ||||
|  | ||||
|     db = sqlite3.connect("guestbook.db3") | ||||
|     try: | ||||
|         db.execute("select 1 from gb").fetchone() | ||||
|     except: | ||||
|         with db: | ||||
|             db.execute("create table gb (ip text, ts real, msg text)") | ||||
|             db.execute("create index gb_ip on gb(ip)") | ||||
|  | ||||
|     with db: | ||||
|         if NUM_MSGS_TO_KEEP == 1: | ||||
|             t = "delete from gb where ip = ?" | ||||
|             db.execute(t, (ip,)) | ||||
|  | ||||
|         t = "insert into gb values (?,?,?)" | ||||
|         db.execute(t, (ip, ts, txt)) | ||||
|  | ||||
|         if NUM_MSGS_TO_KEEP > 1: | ||||
|             t = "select ts from gb where ip = ? order by ts desc" | ||||
|             hits = db.execute(t, (ip,)).fetchall() | ||||
|  | ||||
|             if len(hits) > NUM_MSGS_TO_KEEP: | ||||
|                 lim = hits[NUM_MSGS_TO_KEEP][0] | ||||
|                 t = "delete from gb where ip = ? and ts <= ?" | ||||
|                 db.execute(t, (ip, lim)) | ||||
|  | ||||
|     print(txt) | ||||
|  | ||||
|  | ||||
| if __name__ == "__main__": | ||||
|     main() | ||||
| @@ -61,7 +61,7 @@ def main(): | ||||
|  | ||||
|     os.chdir(cwd) | ||||
|     f1 = fsenc(fn) | ||||
|     f2 = os.path.join(b"noexif", f1) | ||||
|     f2 = fsenc(os.path.join(b"noexif", fn)) | ||||
|     cmd = [ | ||||
|         b"exiftool", | ||||
|         b"-exif:all=", | ||||
|   | ||||
| @@ -6,6 +6,7 @@ set -e | ||||
| # | ||||
| # linux/alpine: requires gcc g++ make cmake patchelf {python3,ffmpeg,fftw,libsndfile}-dev py3-{wheel,pip} py3-numpy{,-dev} | ||||
| # linux/debian: requires libav{codec,device,filter,format,resample,util}-dev {libfftw3,python3,libsndfile1}-dev python3-{numpy,pip} vamp-{plugin-sdk,examples} patchelf cmake | ||||
| # linux/fedora: requires gcc gcc-c++ make cmake patchelf {python3,ffmpeg,fftw,libsndfile}-devel python3-numpy vamp-plugin-sdk qm-vamp-plugins | ||||
| # win64: requires msys2-mingw64 environment | ||||
| # macos: requires macports | ||||
| # | ||||
| @@ -56,6 +57,7 @@ hash -r | ||||
| 	command -v python3 && pybin=python3 || pybin=python | ||||
| } | ||||
|  | ||||
| $pybin -c 'import numpy' || | ||||
| $pybin -m pip install --user numpy | ||||
|  | ||||
|  | ||||
| @@ -160,12 +162,12 @@ install_keyfinder() { | ||||
| 	 | ||||
| 	h="$HOME" | ||||
| 	so="lib/libkeyfinder.so" | ||||
| 	memes=() | ||||
| 	memes=(-DBUILD_TESTING=OFF) | ||||
|  | ||||
| 	[ $win ] && | ||||
| 		so="bin/libkeyfinder.dll" && | ||||
| 		h="$(printf '%s\n' "$USERPROFILE" | tr '\\' '/')" && | ||||
| 		memes+=(-G "MinGW Makefiles" -DBUILD_TESTING=OFF) | ||||
| 		memes+=(-G "MinGW Makefiles") | ||||
| 	 | ||||
| 	[ $mac ] && | ||||
| 		so="lib/libkeyfinder.dylib" | ||||
| @@ -185,7 +187,7 @@ install_keyfinder() { | ||||
| 	} | ||||
| 	 | ||||
| 	# rm -rf /Users/ed/Library/Python/3.9/lib/python/site-packages/*keyfinder* | ||||
| 	CFLAGS="-I$h/pe/keyfinder/include -I/opt/local/include" \ | ||||
| 	CFLAGS="-I$h/pe/keyfinder/include -I/opt/local/include -I/usr/include/ffmpeg" \ | ||||
| 	LDFLAGS="-L$h/pe/keyfinder/lib -L$h/pe/keyfinder/lib64 -L/opt/local/lib" \ | ||||
| 	PKG_CONFIG_PATH=/c/msys64/mingw64/lib/pkgconfig \ | ||||
| 	$pybin -m pip install --user keyfinder | ||||
| @@ -223,7 +225,7 @@ install_vamp() { | ||||
| 	$pybin -m pip install --user vamp | ||||
|  | ||||
| 	cd "$td" | ||||
| 	echo '#include <vamp-sdk/Plugin.h>' | gcc -x c -c -o /dev/null - || [ -e ~/pe/vamp-sdk ] || { | ||||
| 	echo '#include <vamp-sdk/Plugin.h>' | g++ -x c++ -c -o /dev/null - || [ -e ~/pe/vamp-sdk ] || { | ||||
| 		printf '\033[33mcould not find the vamp-sdk, building from source\033[0m\n' | ||||
| 		(dl_files yolo https://code.soundsoftware.ac.uk/attachments/download/2588/vamp-plugin-sdk-2.9.0.tar.gz) | ||||
| 		sha512sum -c <( | ||||
|   | ||||
							
								
								
									
										38
									
								
								bin/mtag/mousepad.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										38
									
								
								bin/mtag/mousepad.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,38 @@ | ||||
| #!/usr/bin/env python3 | ||||
|  | ||||
| import os | ||||
| import sys | ||||
| import subprocess as sp | ||||
|  | ||||
|  | ||||
| """ | ||||
| mtp test -- opens a texteditor | ||||
|  | ||||
| usage: | ||||
|   -vsrv/v1:v1:r:c,mte=+x1:c,mtp=x1=ad,p,bin/mtag/mousepad.py | ||||
|  | ||||
| explained: | ||||
|   c,mte: list of tags to index in this volume | ||||
|   c,mtp: add new tag provider | ||||
|      x1: dummy tag to provide | ||||
|      ad: dontcare if audio or not | ||||
|       p: priority 1 (run after initial tag-scan with ffprobe or mutagen) | ||||
| """ | ||||
|  | ||||
|  | ||||
| def main(): | ||||
|     env = os.environ.copy() | ||||
|     env["DISPLAY"] = ":0.0" | ||||
|  | ||||
|     if False: | ||||
|         # open the uploaded file | ||||
|         fp = sys.argv[-1] | ||||
|     else: | ||||
|         # display stdin contents (`oth_tags`) | ||||
|         fp = "/dev/stdin" | ||||
|  | ||||
|     p = sp.Popen(["/usr/bin/mousepad", fp]) | ||||
|     p.communicate() | ||||
|  | ||||
|  | ||||
| main() | ||||
| @@ -16,7 +16,7 @@ goes without saying, but this is HELLA DANGEROUS, | ||||
|   GIVES RCE TO ANYONE WHO HAVE UPLOAD PERMISSIONS | ||||
|  | ||||
| example copyparty config to use this: | ||||
|   --urlform save,get -v.::w:c,e2d,e2t,mte=+a1:c,mtp=a1=ad,kn,bin/mtag/very-bad-idea.py | ||||
|   --urlform save,get -v.::w:c,e2d,e2t,mte=+a1:c,mtp=a1=ad,kn,c0,bin/mtag/very-bad-idea.py | ||||
|  | ||||
| recommended deps: | ||||
|   apt install xdotool libnotify-bin | ||||
|   | ||||
| @@ -2,6 +2,7 @@ | ||||
|  | ||||
| import json | ||||
| import re | ||||
| import os | ||||
| import sys | ||||
| import subprocess as sp | ||||
|  | ||||
| @@ -36,14 +37,21 @@ FAST = True  # parse entire file at container level | ||||
|  | ||||
|  | ||||
| # warnings to ignore | ||||
| harmless = re.compile("^Unsupported codec with id ") | ||||
| harmless = re.compile( | ||||
|     r"Unsupported codec with id |Could not find codec parameters.*Attachment:|analyzeduration" | ||||
|     + r"|timescale not set" | ||||
| ) | ||||
|  | ||||
|  | ||||
| def wfilter(lines): | ||||
|     return [x for x in lines if not harmless.search(x)] | ||||
|     return [x for x in lines if x.strip() and not harmless.search(x)] | ||||
|  | ||||
|  | ||||
| def errchk(so, se, rc): | ||||
| def errchk(so, se, rc, dbg): | ||||
|     if dbg: | ||||
|         with open(dbg, "wb") as f: | ||||
|             f.write(b"so:\n" + so + b"\nse:\n" + se + b"\n") | ||||
|  | ||||
|     if rc: | ||||
|         err = (so + se).decode("utf-8", "replace").split("\n", 1) | ||||
|         err = wfilter(err) or err | ||||
| @@ -64,6 +72,11 @@ def main(): | ||||
|     zs = zb.decode("utf-8", "replace") | ||||
|     md = json.loads(zs) | ||||
|  | ||||
|     fdir = os.path.dirname(os.path.realpath(fp)) | ||||
|     flag = os.path.join(fdir, ".processed") | ||||
|     if os.path.exists(flag): | ||||
|         return "already processed" | ||||
|  | ||||
|     try: | ||||
|         w, h = [int(x) for x in md["res"].split("x")] | ||||
|         if not w + h: | ||||
| @@ -87,11 +100,11 @@ def main(): | ||||
|     with open(fsenc(f"{fp}.ff.json"), "wb") as f: | ||||
|         f.write(so) | ||||
|  | ||||
|     err = errchk(so, se, p.returncode) | ||||
|     err = errchk(so, se, p.returncode, f"{fp}.vidchk") | ||||
|     if err: | ||||
|         return err | ||||
|  | ||||
|     if min(w, h) < 1080: | ||||
|     if max(w, h) < 1280 and min(w, h) < 720: | ||||
|         return "resolution too small" | ||||
|  | ||||
|     zs = ( | ||||
| @@ -111,7 +124,7 @@ def main(): | ||||
|  | ||||
|     p = sp.Popen(cmd, stdout=sp.PIPE, stderr=sp.PIPE) | ||||
|     so, se = p.communicate() | ||||
|     return errchk(so, se, p.returncode) | ||||
|     return errchk(so, se, p.returncode, f"{fp}.vidchk") | ||||
|  | ||||
|  | ||||
| if __name__ == "__main__": | ||||
|   | ||||
| @@ -1,6 +1,11 @@ | ||||
| #!/usr/bin/env python3 | ||||
|  | ||||
| """ | ||||
| DEPRECATED -- replaced by event hooks; | ||||
| https://github.com/9001/copyparty/blob/hovudstraum/bin/hooks/wget.py | ||||
|  | ||||
| --- | ||||
|  | ||||
| use copyparty as a file downloader by POSTing URLs as | ||||
| application/x-www-form-urlencoded (for example using the | ||||
| message/pager function on the website) | ||||
|   | ||||
| @@ -1,7 +1,7 @@ | ||||
| #!/usr/bin/env python3 | ||||
| from __future__ import print_function, unicode_literals | ||||
| 
 | ||||
| """copyparty-fuse-streaming: remote copyparty as a local filesystem""" | ||||
| """partyfuse-streaming: remote copyparty as a local filesystem""" | ||||
| __author__ = "ed <copyparty@ocv.me>" | ||||
| __copyright__ = 2020 | ||||
| __license__ = "MIT" | ||||
| @@ -12,7 +12,7 @@ __url__ = "https://github.com/9001/copyparty/" | ||||
| mount a copyparty server (local or remote) as a filesystem | ||||
| 
 | ||||
| usage: | ||||
|   python copyparty-fuse-streaming.py http://192.168.1.69:3923/  ./music | ||||
|   python partyfuse-streaming.py http://192.168.1.69:3923/  ./music | ||||
| 
 | ||||
| dependencies: | ||||
|   python3 -m pip install --user fusepy | ||||
| @@ -21,7 +21,7 @@ dependencies: | ||||
|   + on Windows: https://github.com/billziss-gh/winfsp/releases/latest | ||||
| 
 | ||||
| this was a mistake: | ||||
|   fork of copyparty-fuse.py with a streaming cache rather than readahead, | ||||
|   fork of partyfuse.py with a streaming cache rather than readahead, | ||||
|   thought this was gonna be way faster (and it kind of is) | ||||
|   except the overhead of reopening connections on trunc totally kills it | ||||
| """ | ||||
| @@ -62,12 +62,12 @@ except: | ||||
|     else: | ||||
|         libfuse = "apt install libfuse\n    modprobe fuse" | ||||
| 
 | ||||
|     print( | ||||
|         "\n  could not import fuse; these may help:" | ||||
|         + "\n    python3 -m pip install --user fusepy\n    " | ||||
|         + libfuse | ||||
|         + "\n" | ||||
|     ) | ||||
|     m = """\033[33m | ||||
|   could not import fuse; these may help: | ||||
|     {} -m pip install --user fusepy | ||||
|     {} | ||||
| \033[0m""" | ||||
|     print(m.format(sys.executable, libfuse)) | ||||
|     raise | ||||
| 
 | ||||
| 
 | ||||
| @@ -154,7 +154,7 @@ def dewin(txt): | ||||
| class RecentLog(object): | ||||
|     def __init__(self): | ||||
|         self.mtx = threading.Lock() | ||||
|         self.f = None  # open("copyparty-fuse.log", "wb") | ||||
|         self.f = None  # open("partyfuse.log", "wb") | ||||
|         self.q = [] | ||||
| 
 | ||||
|         thr = threading.Thread(target=self.printer) | ||||
| @@ -185,9 +185,9 @@ class RecentLog(object): | ||||
|             print("".join(q), end="") | ||||
| 
 | ||||
| 
 | ||||
| # [windows/cmd/cpy3]  python dev\copyparty\bin\copyparty-fuse.py q: http://192.168.1.159:1234/ | ||||
| # [windows/cmd/msys2] C:\msys64\mingw64\bin\python3 dev\copyparty\bin\copyparty-fuse.py q: http://192.168.1.159:1234/ | ||||
| # [windows/mty/msys2] /mingw64/bin/python3 /c/Users/ed/dev/copyparty/bin/copyparty-fuse.py q: http://192.168.1.159:1234/ | ||||
| # [windows/cmd/cpy3]  python dev\copyparty\bin\partyfuse.py q: http://192.168.1.159:1234/ | ||||
| # [windows/cmd/msys2] C:\msys64\mingw64\bin\python3 dev\copyparty\bin\partyfuse.py q: http://192.168.1.159:1234/ | ||||
| # [windows/mty/msys2] /mingw64/bin/python3 /c/Users/ed/dev/copyparty/bin/partyfuse.py q: http://192.168.1.159:1234/ | ||||
| # | ||||
| # [windows] find /q/music/albums/Phant*24bit -printf '%s %p\n' | sort -n | tail -n 8 | sed -r 's/^[0-9]+ //' | while IFS= read -r x; do dd if="$x" of=/dev/null bs=4k count=8192 & done | ||||
| # [alpine]  ll t; for x in t/2020_0724_16{2,3}*; do dd if="$x" of=/dev/null bs=4k count=10240 & done | ||||
| @@ -1,7 +1,7 @@ | ||||
| #!/usr/bin/env python3 | ||||
| from __future__ import print_function, unicode_literals | ||||
| 
 | ||||
| """copyparty-fuse: remote copyparty as a local filesystem""" | ||||
| """partyfuse: remote copyparty as a local filesystem""" | ||||
| __author__ = "ed <copyparty@ocv.me>" | ||||
| __copyright__ = 2019 | ||||
| __license__ = "MIT" | ||||
| @@ -12,7 +12,7 @@ __url__ = "https://github.com/9001/copyparty/" | ||||
| mount a copyparty server (local or remote) as a filesystem | ||||
| 
 | ||||
| usage: | ||||
|   python copyparty-fuse.py http://192.168.1.69:3923/  ./music | ||||
|   python partyfuse.py http://192.168.1.69:3923/  ./music | ||||
| 
 | ||||
| dependencies: | ||||
|   python3 -m pip install --user fusepy | ||||
| @@ -74,12 +74,12 @@ except: | ||||
|     else: | ||||
|         libfuse = "apt install libfuse3-3\n    modprobe fuse" | ||||
| 
 | ||||
|     print( | ||||
|         "\n  could not import fuse; these may help:" | ||||
|         + "\n    python3 -m pip install --user fusepy\n    " | ||||
|         + libfuse | ||||
|         + "\n" | ||||
|     ) | ||||
|     m = """\033[33m | ||||
|   could not import fuse; these may help: | ||||
|     {} -m pip install --user fusepy | ||||
|     {} | ||||
| \033[0m""" | ||||
|     print(m.format(sys.executable, libfuse)) | ||||
|     raise | ||||
| 
 | ||||
| 
 | ||||
| @@ -166,7 +166,7 @@ def dewin(txt): | ||||
| class RecentLog(object): | ||||
|     def __init__(self): | ||||
|         self.mtx = threading.Lock() | ||||
|         self.f = None  # open("copyparty-fuse.log", "wb") | ||||
|         self.f = None  # open("partyfuse.log", "wb") | ||||
|         self.q = [] | ||||
| 
 | ||||
|         thr = threading.Thread(target=self.printer) | ||||
| @@ -197,9 +197,9 @@ class RecentLog(object): | ||||
|             print("".join(q), end="") | ||||
| 
 | ||||
| 
 | ||||
| # [windows/cmd/cpy3]  python dev\copyparty\bin\copyparty-fuse.py q: http://192.168.1.159:1234/ | ||||
| # [windows/cmd/msys2] C:\msys64\mingw64\bin\python3 dev\copyparty\bin\copyparty-fuse.py q: http://192.168.1.159:1234/ | ||||
| # [windows/mty/msys2] /mingw64/bin/python3 /c/Users/ed/dev/copyparty/bin/copyparty-fuse.py q: http://192.168.1.159:1234/ | ||||
| # [windows/cmd/cpy3]  python dev\copyparty\bin\partyfuse.py q: http://192.168.1.159:1234/ | ||||
| # [windows/cmd/msys2] C:\msys64\mingw64\bin\python3 dev\copyparty\bin\partyfuse.py q: http://192.168.1.159:1234/ | ||||
| # [windows/mty/msys2] /mingw64/bin/python3 /c/Users/ed/dev/copyparty/bin/partyfuse.py q: http://192.168.1.159:1234/ | ||||
| # | ||||
| # [windows] find /q/music/albums/Phant*24bit -printf '%s %p\n' | sort -n | tail -n 8 | sed -r 's/^[0-9]+ //' | while IFS= read -r x; do dd if="$x" of=/dev/null bs=4k count=8192 & done | ||||
| # [alpine]  ll t; for x in t/2020_0724_16{2,3}*; do dd if="$x" of=/dev/null bs=4k count=10240 & done | ||||
| @@ -997,7 +997,7 @@ def main(): | ||||
|     ap.add_argument( | ||||
|         "-cf", metavar="NUM_BLOCKS", type=int, default=nf, help="file cache" | ||||
|     ) | ||||
|     ap.add_argument("-a", metavar="PASSWORD", help="password") | ||||
|     ap.add_argument("-a", metavar="PASSWORD", help="password or $filepath") | ||||
|     ap.add_argument("-d", action="store_true", help="enable debug") | ||||
|     ap.add_argument("-te", metavar="PEM_FILE", help="certificate to expect/verify") | ||||
|     ap.add_argument("-td", action="store_true", help="disable certificate check") | ||||
| @@ -1,7 +1,7 @@ | ||||
| #!/usr/bin/env python3 | ||||
| from __future__ import print_function, unicode_literals | ||||
| 
 | ||||
| """copyparty-fuseb: remote copyparty as a local filesystem""" | ||||
| """partyfuse2: remote copyparty as a local filesystem""" | ||||
| __author__ = "ed <copyparty@ocv.me>" | ||||
| __copyright__ = 2020 | ||||
| __license__ = "MIT" | ||||
| @@ -32,9 +32,19 @@ try: | ||||
|     if not hasattr(fuse, "__version__"): | ||||
|         raise Exception("your fuse-python is way old") | ||||
| except: | ||||
|     print( | ||||
|         "\n  could not import fuse; these may help:\n    python3 -m pip install --user fuse-python\n    apt install libfuse\n    modprobe fuse\n" | ||||
|     ) | ||||
|     if WINDOWS: | ||||
|         libfuse = "install https://github.com/billziss-gh/winfsp/releases/latest" | ||||
|     elif MACOS: | ||||
|         libfuse = "install https://osxfuse.github.io/" | ||||
|     else: | ||||
|         libfuse = "apt install libfuse\n    modprobe fuse" | ||||
| 
 | ||||
|     m = """\033[33m | ||||
|   could not import fuse; these may help: | ||||
|     {} -m pip install --user fuse-python | ||||
|     {} | ||||
| \033[0m""" | ||||
|     print(m.format(sys.executable, libfuse)) | ||||
|     raise | ||||
| 
 | ||||
| 
 | ||||
| @@ -42,13 +52,13 @@ except: | ||||
| mount a copyparty server (local or remote) as a filesystem | ||||
| 
 | ||||
| usage: | ||||
|   python ./copyparty-fuseb.py -f -o allow_other,auto_unmount,nonempty,pw=wark,url=http://192.168.1.69:3923 /mnt/nas | ||||
|   python ./partyfuse2.py -f -o allow_other,auto_unmount,nonempty,pw=wark,url=http://192.168.1.69:3923 /mnt/nas | ||||
| 
 | ||||
| dependencies: | ||||
|   sudo apk add fuse-dev python3-dev | ||||
|   python3 -m pip install --user fuse-python | ||||
| 
 | ||||
| fork of copyparty-fuse.py based on fuse-python which | ||||
| fork of partyfuse.py based on fuse-python which | ||||
|   appears to be more compliant than fusepy? since this works with samba | ||||
|     (probably just my garbage code tbh) | ||||
| """ | ||||
| @@ -639,7 +649,7 @@ def main(): | ||||
|         print("  need argument: mount-path") | ||||
|         print("example:") | ||||
|         print( | ||||
|             "  ./copyparty-fuseb.py -f -o allow_other,auto_unmount,nonempty,pw=wark,url=http://192.168.1.69:3923 /mnt/nas" | ||||
|             "  ./partyfuse2.py -f -o allow_other,auto_unmount,nonempty,pw=wark,url=http://192.168.1.69:3923 /mnt/nas" | ||||
|         ) | ||||
|         sys.exit(1) | ||||
| 
 | ||||
| @@ -4,8 +4,9 @@ set -e | ||||
| # runs copyparty (or any other program really) in a chroot | ||||
| # | ||||
| # assumption: these directories, and everything within, are owned by root | ||||
| sysdirs=( /bin /lib /lib32 /lib64 /sbin /usr ) | ||||
|  | ||||
| sysdirs=(); for v in /bin /lib /lib32 /lib64 /sbin /usr /etc/alternatives ; do | ||||
| 	[ -e $v ] && sysdirs+=($v) | ||||
| done | ||||
|  | ||||
| # error-handler | ||||
| help() { cat <<'EOF' | ||||
| @@ -38,7 +39,7 @@ while true; do | ||||
| 	v="$1"; shift | ||||
| 	[ "$v" = -- ] && break  # end of volumes | ||||
| 	[ "$#" -eq 0 ] && break  # invalid usage | ||||
| 	vols+=( "$(realpath "$v")" ) | ||||
| 	vols+=( "$(realpath "$v" || echo "$v")" ) | ||||
| done | ||||
| pybin="$1"; shift | ||||
| pybin="$(command -v "$pybin")" | ||||
| @@ -82,7 +83,7 @@ jail="${jail%/}" | ||||
| printf '%s\n' "${sysdirs[@]}" "${vols[@]}" | sed -r 's`/$``' | LC_ALL=C sort | uniq | | ||||
| while IFS= read -r v; do | ||||
| 	[ -e "$v" ] || { | ||||
| 		# printf '\033[1;31mfolder does not exist:\033[0m %s\n' "/$v" | ||||
| 		printf '\033[1;31mfolder does not exist:\033[0m %s\n' "$v" | ||||
| 		continue | ||||
| 	} | ||||
| 	i1=$(stat -c%D.%i "$v"      2>/dev/null || echo a) | ||||
| @@ -97,9 +98,11 @@ done | ||||
|  | ||||
| cln() { | ||||
| 	rv=$? | ||||
| 	# cleanup if not in use | ||||
| 	lsof "$jail" | grep -qF "$jail" && | ||||
| 		echo "chroot is in use, will not cleanup" || | ||||
| 	wait -f -p rv $p || true | ||||
| 	cd / | ||||
| 	echo "stopping chroot..." | ||||
| 	lsof "$jail" | grep -F "$jail" && | ||||
| 		echo "chroot is in use; will not unmount" || | ||||
| 	{ | ||||
| 		mount | grep -F " on $jail" | | ||||
| 		awk '{sub(/ type .*/,"");sub(/.* on /,"");print}' | | ||||
| @@ -115,6 +118,15 @@ mkdir -p "$jail/tmp" | ||||
| chmod 777 "$jail/tmp" | ||||
|  | ||||
|  | ||||
| # create a dev | ||||
| (cd $jail; mkdir -p dev; cd dev | ||||
| [ -e null ]    || mknod -m 666 null    c 1 3 | ||||
| [ -e zero ]    || mknod -m 666 zero    c 1 5 | ||||
| [ -e random ]  || mknod -m 444 random  c 1 8 | ||||
| [ -e urandom ] || mknod -m 444 urandom c 1 9 | ||||
| ) | ||||
|  | ||||
|  | ||||
| # run copyparty | ||||
| export HOME=$(getent passwd $uid | cut -d: -f6) | ||||
| export USER=$(getent passwd $uid | cut -d: -f1) | ||||
| @@ -124,5 +136,6 @@ export LOGNAME="$USER" | ||||
| #echo "cpp [$cpp]" | ||||
| chroot --userspec=$uid:$gid "$jail" "$pybin" $pyarg "$cpp" "$@" & | ||||
| p=$! | ||||
| trap 'kill -USR1 $p' USR1 | ||||
| trap 'kill $p' INT TERM | ||||
| wait | ||||
|   | ||||
| @@ -1,16 +1,17 @@ | ||||
| #!/usr/bin/env python3 | ||||
| from __future__ import print_function, unicode_literals | ||||
| 
 | ||||
| S_VERSION = "1.9" | ||||
| S_BUILD_DT = "2023-05-07" | ||||
| 
 | ||||
| """ | ||||
| up2k.py: upload to copyparty | ||||
| 2022-08-13, v0.18, ed <irc.rizon.net>, MIT-Licensed | ||||
| https://github.com/9001/copyparty/blob/hovudstraum/bin/up2k.py | ||||
| u2c.py: upload to copyparty | ||||
| 2021, ed <irc.rizon.net>, MIT-Licensed | ||||
| https://github.com/9001/copyparty/blob/hovudstraum/bin/u2c.py | ||||
| 
 | ||||
| - dependencies: requests | ||||
| - supports python 2.6, 2.7, and 3.3 through 3.11 | ||||
| 
 | ||||
| - almost zero error-handling | ||||
| - but if something breaks just try again and it'll autoresume | ||||
| - supports python 2.6, 2.7, and 3.3 through 3.12 | ||||
| - if something breaks just try again and it'll autoresume | ||||
| """ | ||||
| 
 | ||||
| import os | ||||
| @@ -20,12 +21,15 @@ import math | ||||
| import time | ||||
| import atexit | ||||
| import signal | ||||
| import socket | ||||
| import base64 | ||||
| import hashlib | ||||
| import platform | ||||
| import threading | ||||
| import datetime | ||||
| 
 | ||||
| EXE = sys.executable.endswith("exe") | ||||
| 
 | ||||
| try: | ||||
|     import argparse | ||||
| except: | ||||
| @@ -35,24 +39,27 @@ except: | ||||
| 
 | ||||
| try: | ||||
|     import requests | ||||
| except: | ||||
|     if sys.version_info > (2, 7): | ||||
|         m = "\n  ERROR: need 'requests'; run this:\n   python -m pip install --user requests\n" | ||||
| except ImportError: | ||||
|     if EXE: | ||||
|         raise | ||||
|     elif sys.version_info > (2, 7): | ||||
|         m = "\nERROR: need 'requests'; please run this command:\n {0} -m pip install --user requests\n" | ||||
|     else: | ||||
|         m = "requests/2.18.4 urllib3/1.23 chardet/3.0.4 certifi/2020.4.5.1 idna/2.7" | ||||
|         m = ["   https://pypi.org/project/" + x + "/#files" for x in m.split()] | ||||
|         m = "\n  ERROR: need these:\n" + "\n".join(m) + "\n" | ||||
|         m += "\n  for f in *.whl; do unzip $f; done; rm -r *.dist-info\n" | ||||
| 
 | ||||
|     print(m) | ||||
|     raise | ||||
|     print(m.format(sys.executable)) | ||||
|     sys.exit(1) | ||||
| 
 | ||||
| 
 | ||||
| # from copyparty/__init__.py | ||||
| PY2 = sys.version_info[0] == 2 | ||||
| PY2 = sys.version_info < (3,) | ||||
| if PY2: | ||||
|     from Queue import Queue | ||||
|     from urllib import unquote | ||||
|     from urllib import quote | ||||
|     from urllib import quote, unquote | ||||
|     from urlparse import urlsplit, urlunsplit | ||||
| 
 | ||||
|     sys.dont_write_bytecode = True | ||||
|     bytes = str | ||||
| @@ -60,6 +67,7 @@ else: | ||||
|     from queue import Queue | ||||
|     from urllib.parse import unquote_to_bytes as unquote | ||||
|     from urllib.parse import quote_from_bytes as quote | ||||
|     from urllib.parse import urlsplit, urlunsplit | ||||
| 
 | ||||
|     unicode = str | ||||
| 
 | ||||
| @@ -69,6 +77,14 @@ VT100 = platform.system() != "Windows" | ||||
| req_ses = requests.Session() | ||||
| 
 | ||||
| 
 | ||||
| class Daemon(threading.Thread): | ||||
|     def __init__(self, target, name=None, a=None): | ||||
|         # type: (Any, Any, Any) -> None | ||||
|         threading.Thread.__init__(self, target=target, args=a or (), name=name) | ||||
|         self.daemon = True | ||||
|         self.start() | ||||
| 
 | ||||
| 
 | ||||
| class File(object): | ||||
|     """an up2k upload task; represents a single file""" | ||||
| 
 | ||||
| @@ -86,6 +102,7 @@ class File(object): | ||||
|         self.kchunks = {}  # type: dict[str, tuple[int, int]]  # hash: [ ofs, sz ] | ||||
| 
 | ||||
|         # set by handshake | ||||
|         self.recheck = False  # duplicate; redo handshake after all files done | ||||
|         self.ucids = []  # type: list[str]  # chunks which need to be uploaded | ||||
|         self.wark = None  # type: str | ||||
|         self.url = None  # type: str | ||||
| @@ -154,10 +171,7 @@ class MTHash(object): | ||||
|         self.done_q = Queue() | ||||
|         self.thrs = [] | ||||
|         for _ in range(cores): | ||||
|             t = threading.Thread(target=self.worker) | ||||
|             t.daemon = True | ||||
|             t.start() | ||||
|             self.thrs.append(t) | ||||
|             self.thrs.append(Daemon(self.worker)) | ||||
| 
 | ||||
|     def hash(self, f, fsz, chunksz, pcb=None, pcb_opaque=None): | ||||
|         with self.omutex: | ||||
| @@ -241,7 +255,13 @@ def eprint(*a, **ka): | ||||
| 
 | ||||
| 
 | ||||
| def flushing_print(*a, **ka): | ||||
|     _print(*a, **ka) | ||||
|     try: | ||||
|         _print(*a, **ka) | ||||
|     except: | ||||
|         v = " ".join(str(x) for x in a) | ||||
|         v = v.encode("ascii", "replace").decode("ascii") | ||||
|         _print(v, **ka) | ||||
| 
 | ||||
|     if "flush" not in ka: | ||||
|         sys.stdout.flush() | ||||
| 
 | ||||
| @@ -257,10 +277,10 @@ def termsize(): | ||||
|         try: | ||||
|             import fcntl, termios, struct | ||||
| 
 | ||||
|             cr = struct.unpack("hh", fcntl.ioctl(fd, termios.TIOCGWINSZ, "1234")) | ||||
|             r = struct.unpack(b"hh", fcntl.ioctl(fd, termios.TIOCGWINSZ, b"AAAA")) | ||||
|             return r[::-1] | ||||
|         except: | ||||
|             return | ||||
|         return cr | ||||
|             return None | ||||
| 
 | ||||
|     cr = ioctl_GWINSZ(0) or ioctl_GWINSZ(1) or ioctl_GWINSZ(2) | ||||
|     if not cr: | ||||
| @@ -270,12 +290,11 @@ def termsize(): | ||||
|             os.close(fd) | ||||
|         except: | ||||
|             pass | ||||
|     if not cr: | ||||
|         try: | ||||
|             cr = (env["LINES"], env["COLUMNS"]) | ||||
|         except: | ||||
|             cr = (25, 80) | ||||
|     return int(cr[1]), int(cr[0]) | ||||
| 
 | ||||
|     try: | ||||
|         return cr or (int(env["COLUMNS"]), int(env["LINES"])) | ||||
|     except: | ||||
|         return 80, 25 | ||||
| 
 | ||||
| 
 | ||||
| class CTermsize(object): | ||||
| @@ -290,9 +309,7 @@ class CTermsize(object): | ||||
|         except: | ||||
|             return | ||||
| 
 | ||||
|         thr = threading.Thread(target=self.worker) | ||||
|         thr.daemon = True | ||||
|         thr.start() | ||||
|         Daemon(self.worker) | ||||
| 
 | ||||
|     def worker(self): | ||||
|         while True: | ||||
| @@ -323,6 +340,32 @@ class CTermsize(object): | ||||
| ss = CTermsize() | ||||
| 
 | ||||
| 
 | ||||
| def undns(url): | ||||
|     usp = urlsplit(url) | ||||
|     hn = usp.hostname | ||||
|     gai = None | ||||
|     eprint("resolving host [{0}] ...".format(hn), end="") | ||||
|     try: | ||||
|         gai = socket.getaddrinfo(hn, None) | ||||
|         hn = gai[0][4][0] | ||||
|     except KeyboardInterrupt: | ||||
|         raise | ||||
|     except: | ||||
|         t = "\n\033[31mfailed to resolve upload destination host;\033[0m\ngai={0}\n" | ||||
|         eprint(t.format(repr(gai))) | ||||
|         raise | ||||
| 
 | ||||
|     if usp.port: | ||||
|         hn = "{0}:{1}".format(hn, usp.port) | ||||
|     if usp.username or usp.password: | ||||
|         hn = "{0}:{1}@{2}".format(usp.username, usp.password, hn) | ||||
| 
 | ||||
|     usp = usp._replace(netloc=hn) | ||||
|     url = urlunsplit(usp) | ||||
|     eprint(" {0}".format(url)) | ||||
|     return url | ||||
| 
 | ||||
| 
 | ||||
| def _scd(err, top): | ||||
|     """non-recursive listing of directory contents, along with stat() info""" | ||||
|     with os.scandir(top) as dh: | ||||
| @@ -344,7 +387,7 @@ def _lsd(err, top): | ||||
|             err.append((abspath, str(ex))) | ||||
| 
 | ||||
| 
 | ||||
| if hasattr(os, "scandir"): | ||||
| if hasattr(os, "scandir") and sys.version_info > (3, 6): | ||||
|     statdir = _scd | ||||
| else: | ||||
|     statdir = _lsd | ||||
| @@ -359,26 +402,46 @@ def walkdir(err, top, seen): | ||||
| 
 | ||||
|     seen = seen[:] + [atop] | ||||
|     for ap, inf in sorted(statdir(err, top)): | ||||
|         yield ap, inf | ||||
|         if stat.S_ISDIR(inf.st_mode): | ||||
|             try: | ||||
|                 for x in walkdir(err, ap, seen): | ||||
|                     yield x | ||||
|             except Exception as ex: | ||||
|                 err.append((ap, str(ex))) | ||||
|         else: | ||||
|             yield ap, inf | ||||
| 
 | ||||
| 
 | ||||
| def walkdirs(err, tops): | ||||
|     """recursive statdir for a list of tops, yields [top, relpath, stat]""" | ||||
|     sep = "{0}".format(os.sep).encode("ascii") | ||||
|     if not VT100: | ||||
|         za = [] | ||||
|         for td in tops: | ||||
|             try: | ||||
|                 ap = os.path.abspath(os.path.realpath(td)) | ||||
|                 if td[-1:] in (b"\\", b"/"): | ||||
|                     ap += sep | ||||
|             except: | ||||
|                 # maybe cpython #88013 (ok) | ||||
|                 ap = td | ||||
| 
 | ||||
|             za.append(ap) | ||||
| 
 | ||||
|         za = [x if x.startswith(b"\\\\") else b"\\\\?\\" + x for x in za] | ||||
|         za = [x.replace(b"/", b"\\") for x in za] | ||||
|         tops = za | ||||
| 
 | ||||
|     for top in tops: | ||||
|         isdir = os.path.isdir(top) | ||||
|         if top[-1:] == sep: | ||||
|             stop = top.rstrip(sep) | ||||
|             yield stop, b"", os.stat(stop) | ||||
|         else: | ||||
|             stop = os.path.dirname(top) | ||||
|             stop, dn = os.path.split(top) | ||||
|             if isdir: | ||||
|                 yield stop, dn, os.stat(stop) | ||||
| 
 | ||||
|         if os.path.isdir(top): | ||||
|         if isdir: | ||||
|             for ap, inf in walkdir(err, top, []): | ||||
|                 yield stop, ap[len(stop) :].lstrip(sep), inf | ||||
|         else: | ||||
| @@ -420,7 +483,7 @@ def up2k_chunksize(filesize): | ||||
|     while True: | ||||
|         for mul in [1, 2]: | ||||
|             nchunks = math.ceil(filesize * 1.0 / chunksize) | ||||
|             if nchunks <= 256 or chunksize >= 32 * 1024 * 1024: | ||||
|             if nchunks <= 256 or (chunksize >= 32 * 1024 * 1024 and nchunks < 4096): | ||||
|                 return chunksize | ||||
| 
 | ||||
|             chunksize += stepsize | ||||
| @@ -469,14 +532,17 @@ def get_hashlist(file, pcb, mth): | ||||
|         file.kchunks[k] = [v1, v2] | ||||
| 
 | ||||
| 
 | ||||
| def handshake(req_ses, url, file, pw, search): | ||||
|     # type: (requests.Session, str, File, any, bool) -> list[str] | ||||
| def handshake(ar, file, search): | ||||
|     # type: (argparse.Namespace, File, bool) -> tuple[list[str], bool] | ||||
|     """ | ||||
|     performs a handshake with the server; reply is: | ||||
|       if search, a list of search results | ||||
|       otherwise, a list of chunks to upload | ||||
|     """ | ||||
| 
 | ||||
|     url = ar.url | ||||
|     pw = ar.a | ||||
| 
 | ||||
|     req = { | ||||
|         "hash": [x[0] for x in file.cids], | ||||
|         "name": file.name, | ||||
| @@ -485,22 +551,46 @@ def handshake(req_ses, url, file, pw, search): | ||||
|     } | ||||
|     if search: | ||||
|         req["srch"] = 1 | ||||
|     elif ar.dr: | ||||
|         req["replace"] = True | ||||
| 
 | ||||
|     headers = {"Content-Type": "text/plain"}  # wtf ed | ||||
|     headers = {"Content-Type": "text/plain"}  # <=1.5.1 compat | ||||
|     if pw: | ||||
|         headers["Cookie"] = "=".join(["cppwd", pw]) | ||||
| 
 | ||||
|     file.recheck = False | ||||
|     if file.url: | ||||
|         url = file.url | ||||
|     elif b"/" in file.rel: | ||||
|         url += quotep(file.rel.rsplit(b"/", 1)[0]).decode("utf-8", "replace") | ||||
| 
 | ||||
|     while True: | ||||
|         sc = 600 | ||||
|         txt = "" | ||||
|         try: | ||||
|             r = req_ses.post(url, headers=headers, json=req) | ||||
|             break | ||||
|             sc = r.status_code | ||||
|             txt = r.text | ||||
|             if sc < 400: | ||||
|                 break | ||||
| 
 | ||||
|             raise Exception("http {0}: {1}".format(sc, txt)) | ||||
| 
 | ||||
|         except Exception as ex: | ||||
|             em = str(ex).split("SSLError(")[-1] | ||||
|             em = str(ex).split("SSLError(")[-1].split("\nURL: ")[0].strip() | ||||
| 
 | ||||
|             if ( | ||||
|                 sc == 422 | ||||
|                 or "<pre>partial upload exists at a different" in txt | ||||
|                 or "<pre>source file busy; please try again" in txt | ||||
|             ): | ||||
|                 file.recheck = True | ||||
|                 return [], False | ||||
|             elif sc == 409 or "<pre>upload rejected, file already exists" in txt: | ||||
|                 return [], False | ||||
|             elif "<pre>you don't have " in txt: | ||||
|                 raise | ||||
| 
 | ||||
|             eprint("handshake failed, retrying: {0}\n  {1}\n\n".format(file.name, em)) | ||||
|             time.sleep(1) | ||||
| 
 | ||||
| @@ -525,8 +615,8 @@ def handshake(req_ses, url, file, pw, search): | ||||
|     return r["hash"], r["sprs"] | ||||
| 
 | ||||
| 
 | ||||
| def upload(req_ses, file, cid, pw): | ||||
|     # type: (requests.Session, File, str, any) -> None | ||||
| def upload(file, cid, pw, stats): | ||||
|     # type: (File, str, str, str) -> None | ||||
|     """upload one specific chunk, `cid` (a chunk-hash)""" | ||||
| 
 | ||||
|     headers = { | ||||
| @@ -534,6 +624,10 @@ def upload(req_ses, file, cid, pw): | ||||
|         "X-Up2k-Wark": file.wark, | ||||
|         "Content-Type": "application/octet-stream", | ||||
|     } | ||||
| 
 | ||||
|     if stats: | ||||
|         headers["X-Up2k-Stat"] = stats | ||||
| 
 | ||||
|     if pw: | ||||
|         headers["Cookie"] = "=".join(["cppwd", pw]) | ||||
| 
 | ||||
| @@ -548,35 +642,22 @@ def upload(req_ses, file, cid, pw): | ||||
|         f.f.close() | ||||
| 
 | ||||
| 
 | ||||
| class Daemon(threading.Thread): | ||||
|     def __init__(self, *a, **ka): | ||||
|         threading.Thread.__init__(self, *a, **ka) | ||||
|         self.daemon = True | ||||
| 
 | ||||
| 
 | ||||
| class Ctl(object): | ||||
|     """ | ||||
|     this will be the coordinator which runs everything in parallel | ||||
|     (hashing, handshakes, uploads)  but right now it's p dumb | ||||
|     the coordinator which runs everything in parallel | ||||
|     (hashing, handshakes, uploads) | ||||
|     """ | ||||
| 
 | ||||
|     def __init__(self, ar): | ||||
|         self.ar = ar | ||||
|         ar.files = [ | ||||
|             os.path.abspath(os.path.realpath(x.encode("utf-8"))) | ||||
|             + (x[-1:] if x[-1:] == os.sep else "").encode("utf-8") | ||||
|             for x in ar.files | ||||
|         ] | ||||
|         ar.url = ar.url.rstrip("/") + "/" | ||||
|         if "://" not in ar.url: | ||||
|             ar.url = "http://" + ar.url | ||||
| 
 | ||||
|     def _scan(self): | ||||
|         ar = self.ar | ||||
|         eprint("\nscanning {0} locations\n".format(len(ar.files))) | ||||
| 
 | ||||
|         nfiles = 0 | ||||
|         nbytes = 0 | ||||
|         err = [] | ||||
|         for _, _, inf in walkdirs(err, ar.files): | ||||
|             if stat.S_ISDIR(inf.st_mode): | ||||
|                 continue | ||||
| 
 | ||||
|             nfiles += 1 | ||||
|             nbytes += inf.st_size | ||||
| 
 | ||||
| @@ -598,8 +679,16 @@ class Ctl(object): | ||||
|                 return | ||||
| 
 | ||||
|         eprint("found {0} files, {1}\n\n".format(nfiles, humansize(nbytes))) | ||||
|         self.nfiles = nfiles | ||||
|         self.nbytes = nbytes | ||||
|         return nfiles, nbytes | ||||
| 
 | ||||
|     def __init__(self, ar, stats=None): | ||||
|         self.ok = False | ||||
|         self.ar = ar | ||||
|         self.stats = stats or self._scan() | ||||
|         if not self.stats: | ||||
|             return | ||||
| 
 | ||||
|         self.nfiles, self.nbytes = self.stats | ||||
| 
 | ||||
|         if ar.td: | ||||
|             requests.packages.urllib3.disable_warnings() | ||||
| @@ -608,6 +697,8 @@ class Ctl(object): | ||||
|             req_ses.verify = ar.te | ||||
| 
 | ||||
|         self.filegen = walkdirs([], ar.files) | ||||
|         self.recheck = []  # type: list[File] | ||||
| 
 | ||||
|         if ar.safe: | ||||
|             self._safe() | ||||
|         else: | ||||
| @@ -626,10 +717,10 @@ class Ctl(object): | ||||
|             self.t0 = time.time() | ||||
|             self.t0_up = None | ||||
|             self.spd = None | ||||
|             self.eta = "99:99:99" | ||||
| 
 | ||||
|             self.mutex = threading.Lock() | ||||
|             self.q_handshake = Queue()  # type: Queue[File] | ||||
|             self.q_recheck = Queue()  # type: Queue[File]  # partial upload exists [...] | ||||
|             self.q_upload = Queue()  # type: Queue[tuple[File, str]] | ||||
| 
 | ||||
|             self.st_hash = [None, "(idle, starting...)"]  # type: tuple[File, int] | ||||
| @@ -639,10 +730,15 @@ class Ctl(object): | ||||
| 
 | ||||
|             self._fancy() | ||||
| 
 | ||||
|         self.ok = True | ||||
| 
 | ||||
|     def _safe(self): | ||||
|         """minimal basic slow boring fallback codepath""" | ||||
|         search = self.ar.s | ||||
|         for nf, (top, rel, inf) in enumerate(self.filegen): | ||||
|             if stat.S_ISDIR(inf.st_mode) or not rel: | ||||
|                 continue | ||||
| 
 | ||||
|             file = File(top, rel, inf.st_size, inf.st_mtime) | ||||
|             upath = file.abs.decode("utf-8", "replace") | ||||
| 
 | ||||
| @@ -652,7 +748,7 @@ class Ctl(object): | ||||
|             burl = self.ar.url[:12] + self.ar.url[8:].split("/")[0] + "/" | ||||
|             while True: | ||||
|                 print("  hs...") | ||||
|                 hs, _ = handshake(req_ses, self.ar.url, file, self.ar.a, search) | ||||
|                 hs, _ = handshake(self.ar, file, search) | ||||
|                 if search: | ||||
|                     if hs: | ||||
|                         for hit in hs: | ||||
| @@ -669,19 +765,29 @@ class Ctl(object): | ||||
|                 ncs = len(hs) | ||||
|                 for nc, cid in enumerate(hs): | ||||
|                     print("  {0} up {1}".format(ncs - nc, cid)) | ||||
|                     upload(req_ses, file, cid, self.ar.a) | ||||
|                     stats = "{0}/0/0/{1}".format(nf, self.nfiles - nf) | ||||
|                     upload(file, cid, self.ar.a, stats) | ||||
| 
 | ||||
|             print("  ok!") | ||||
|             if file.recheck: | ||||
|                 self.recheck.append(file) | ||||
| 
 | ||||
|         if not self.recheck: | ||||
|             return | ||||
| 
 | ||||
|         eprint("finalizing {0} duplicate files".format(len(self.recheck))) | ||||
|         for file in self.recheck: | ||||
|             handshake(self.ar, file, search) | ||||
| 
 | ||||
|     def _fancy(self): | ||||
|         if VT100: | ||||
|         if VT100 and not self.ar.ns: | ||||
|             atexit.register(self.cleanup_vt100) | ||||
|             ss.scroll_region(3) | ||||
| 
 | ||||
|         Daemon(target=self.hasher).start() | ||||
|         Daemon(self.hasher) | ||||
|         for _ in range(self.ar.j): | ||||
|             Daemon(target=self.handshaker).start() | ||||
|             Daemon(target=self.uploader).start() | ||||
|             Daemon(self.handshaker) | ||||
|             Daemon(self.uploader) | ||||
| 
 | ||||
|         idles = 0 | ||||
|         while idles < 3: | ||||
| @@ -698,7 +804,7 @@ class Ctl(object): | ||||
|                 else: | ||||
|                     idles = 0 | ||||
| 
 | ||||
|             if VT100: | ||||
|             if VT100 and not self.ar.ns: | ||||
|                 maxlen = ss.w - len(str(self.nfiles)) - 14 | ||||
|                 txt = "\033[s\033[{0}H".format(ss.g) | ||||
|                 for y, k, st, f in [ | ||||
| @@ -735,14 +841,21 @@ class Ctl(object): | ||||
|                 eta = (self.nbytes - self.up_b) / (spd + 1) | ||||
| 
 | ||||
|             spd = humansize(spd) | ||||
|             eta = str(datetime.timedelta(seconds=int(eta))) | ||||
|             self.eta = str(datetime.timedelta(seconds=int(eta))) | ||||
|             sleft = humansize(self.nbytes - self.up_b) | ||||
|             nleft = self.nfiles - self.up_f | ||||
|             tail = "\033[K\033[u" if VT100 else "\r" | ||||
|             tail = "\033[K\033[u" if VT100 and not self.ar.ns else "\r" | ||||
| 
 | ||||
|             t = "{0} eta @ {1}/s, {2}, {3}# left".format(eta, spd, sleft, nleft) | ||||
|             t = "{0} eta @ {1}/s, {2}, {3}# left".format(self.eta, spd, sleft, nleft) | ||||
|             eprint(txt + "\033]0;{0}\033\\\r{0}{1}".format(t, tail)) | ||||
| 
 | ||||
|         if not self.recheck: | ||||
|             return | ||||
| 
 | ||||
|         eprint("finalizing {0} duplicate files".format(len(self.recheck))) | ||||
|         for file in self.recheck: | ||||
|             handshake(self.ar, file, False) | ||||
| 
 | ||||
|     def cleanup_vt100(self): | ||||
|         ss.scroll_region(None) | ||||
|         eprint("\033[J\033]0;\033\\") | ||||
| @@ -754,8 +867,10 @@ class Ctl(object): | ||||
|         prd = None | ||||
|         ls = {} | ||||
|         for top, rel, inf in self.filegen: | ||||
|             if self.ar.z: | ||||
|                 rd = os.path.dirname(rel) | ||||
|             isdir = stat.S_ISDIR(inf.st_mode) | ||||
|             if self.ar.z or self.ar.drd: | ||||
|                 rd = rel if isdir else os.path.dirname(rel) | ||||
|                 srd = rd.decode("utf-8", "replace").replace("\\", "/") | ||||
|                 if prd != rd: | ||||
|                     prd = rd | ||||
|                     headers = {} | ||||
| @@ -764,19 +879,37 @@ class Ctl(object): | ||||
| 
 | ||||
|                     ls = {} | ||||
|                     try: | ||||
|                         print("      ls ~{0}".format(rd.decode("utf-8", "replace"))) | ||||
|                         r = req_ses.get( | ||||
|                             self.ar.url.encode("utf-8") + quotep(rd) + b"?ls", | ||||
|                             headers=headers, | ||||
|                         ) | ||||
|                         for f in r.json()["files"]: | ||||
|                             rfn = f["href"].split("?")[0].encode("utf-8", "replace") | ||||
|                             ls[unquote(rfn)] = f | ||||
|                     except: | ||||
|                         print("   mkdir ~{0}".format(rd.decode("utf-8", "replace"))) | ||||
|                         print("      ls ~{0}".format(srd)) | ||||
|                         zb = self.ar.url.encode("utf-8") | ||||
|                         zb += quotep(rd.replace(b"\\", b"/")) | ||||
|                         r = req_ses.get(zb + b"?ls<&dots", headers=headers) | ||||
|                         if not r: | ||||
|                             raise Exception("HTTP {0}".format(r.status_code)) | ||||
| 
 | ||||
|                         j = r.json() | ||||
|                         for f in j["dirs"] + j["files"]: | ||||
|                             rfn = f["href"].split("?")[0].rstrip("/") | ||||
|                             ls[unquote(rfn.encode("utf-8", "replace"))] = f | ||||
|                     except Exception as ex: | ||||
|                         print("   mkdir ~{0}  ({1})".format(srd, ex)) | ||||
| 
 | ||||
|                     if self.ar.drd: | ||||
|                         dp = os.path.join(top, rd) | ||||
|                         lnodes = set(os.listdir(dp)) | ||||
|                         bnames = [x for x in ls if x not in lnodes] | ||||
|                         if bnames: | ||||
|                             vpath = self.ar.url.split("://")[-1].split("/", 1)[-1] | ||||
|                             names = [x.decode("utf-8", "replace") for x in bnames] | ||||
|                             locs = [vpath + srd + "/" + x for x in names] | ||||
|                             print("DELETING ~{0}/#{1}".format(srd, len(names))) | ||||
|                             req_ses.post(self.ar.url + "?delete", json=locs) | ||||
| 
 | ||||
|             if isdir: | ||||
|                 continue | ||||
| 
 | ||||
|             if self.ar.z: | ||||
|                 rf = ls.get(os.path.basename(rel), None) | ||||
|                 if rf and rf["sz"] == inf.st_size and abs(rf["ts"] - inf.st_mtime) <= 1: | ||||
|                 if rf and rf["sz"] == inf.st_size and abs(rf["ts"] - inf.st_mtime) <= 2: | ||||
|                     self.nfiles -= 1 | ||||
|                     self.nbytes -= inf.st_size | ||||
|                     continue | ||||
| @@ -785,15 +918,17 @@ class Ctl(object): | ||||
|             while True: | ||||
|                 with self.mutex: | ||||
|                     if ( | ||||
|                         self.hash_b - self.up_b < 1024 * 1024 * 128 | ||||
|                         and self.hash_c - self.up_c < 64 | ||||
|                         and ( | ||||
|                             not self.ar.nh | ||||
|                             or ( | ||||
|                                 self.q_upload.empty() | ||||
|                                 and self.q_handshake.empty() | ||||
|                                 and not self.uploader_busy | ||||
|                             ) | ||||
|                         self.hash_f - self.up_f == 1 | ||||
|                         or ( | ||||
|                             self.hash_b - self.up_b < 1024 * 1024 * 1024 | ||||
|                             and self.hash_c - self.up_c < 512 | ||||
|                         ) | ||||
|                     ) and ( | ||||
|                         not self.ar.nh | ||||
|                         or ( | ||||
|                             self.q_upload.empty() | ||||
|                             and self.q_handshake.empty() | ||||
|                             and not self.uploader_busy | ||||
|                         ) | ||||
|                     ): | ||||
|                         break | ||||
| @@ -813,16 +948,10 @@ class Ctl(object): | ||||
| 
 | ||||
|     def handshaker(self): | ||||
|         search = self.ar.s | ||||
|         q = self.q_handshake | ||||
|         burl = self.ar.url[:8] + self.ar.url[8:].split("/")[0] + "/" | ||||
|         while True: | ||||
|             file = q.get() | ||||
|             file = self.q_handshake.get() | ||||
|             if not file: | ||||
|                 if q == self.q_handshake: | ||||
|                     q = self.q_recheck | ||||
|                     q.put(None) | ||||
|                     continue | ||||
| 
 | ||||
|                 self.q_upload.put(None) | ||||
|                 break | ||||
| 
 | ||||
| @@ -830,16 +959,10 @@ class Ctl(object): | ||||
|                 self.handshaker_busy += 1 | ||||
| 
 | ||||
|             upath = file.abs.decode("utf-8", "replace") | ||||
|             if not VT100: | ||||
|                 upath = upath.lstrip("\\?") | ||||
| 
 | ||||
|             try: | ||||
|                 hs, sprs = handshake(req_ses, self.ar.url, file, self.ar.a, search) | ||||
|             except Exception as ex: | ||||
|                 if q == self.q_handshake and "<pre>partial upload exists" in str(ex): | ||||
|                     self.q_recheck.put(file) | ||||
|                     hs = [] | ||||
|                 else: | ||||
|                     raise | ||||
| 
 | ||||
|             hs, sprs = handshake(self.ar, file, search) | ||||
|             if search: | ||||
|                 if hs: | ||||
|                     for hit in hs: | ||||
| @@ -856,8 +979,11 @@ class Ctl(object): | ||||
| 
 | ||||
|                 continue | ||||
| 
 | ||||
|             if file.recheck: | ||||
|                 self.recheck.append(file) | ||||
| 
 | ||||
|             with self.mutex: | ||||
|                 if not sprs and not self.serialized: | ||||
|                 if hs and not sprs and not self.serialized: | ||||
|                     t = "server filesystem does not support sparse files; serializing uploads\n" | ||||
|                     eprint(t) | ||||
|                     self.serialized = True | ||||
| @@ -869,6 +995,9 @@ class Ctl(object): | ||||
|                     self.up_c += len(file.cids) - file.up_c | ||||
|                     self.up_b += file.size - file.up_b | ||||
| 
 | ||||
|                     if not file.recheck: | ||||
|                         self.up_done(file) | ||||
| 
 | ||||
|                 if hs and file.up_c: | ||||
|                     # some chunks failed | ||||
|                     self.up_c -= len(hs) | ||||
| @@ -898,12 +1027,24 @@ class Ctl(object): | ||||
|                 self.uploader_busy += 1 | ||||
|                 self.t0_up = self.t0_up or time.time() | ||||
| 
 | ||||
|             zs = "{0}/{1}/{2}/{3} {4}/{5} {6}" | ||||
|             stats = zs.format( | ||||
|                 self.up_f, | ||||
|                 len(self.recheck), | ||||
|                 self.uploader_busy, | ||||
|                 self.nfiles - self.up_f, | ||||
|                 int(self.nbytes / (1024 * 1024)), | ||||
|                 int((self.nbytes - self.up_b) / (1024 * 1024)), | ||||
|                 self.eta, | ||||
|             ) | ||||
| 
 | ||||
|             file, cid = task | ||||
|             try: | ||||
|                 upload(req_ses, file, cid, self.ar.a) | ||||
|             except: | ||||
|                 eprint("upload failed, retrying: {0} #{1}\n".format(file.name, cid[:8])) | ||||
|                 pass  # handshake will fix it | ||||
|                 upload(file, cid, self.ar.a, stats) | ||||
|             except Exception as ex: | ||||
|                 t = "upload failed, retrying: {0} #{1} ({2})\n" | ||||
|                 eprint(t.format(file.name, cid[:8], ex)) | ||||
|                 # handshake will fix it | ||||
| 
 | ||||
|             with self.mutex: | ||||
|                 sz = file.kchunks[cid][1] | ||||
| @@ -919,6 +1060,10 @@ class Ctl(object): | ||||
|                 self.up_c += 1 | ||||
|                 self.uploader_busy -= 1 | ||||
| 
 | ||||
|     def up_done(self, file): | ||||
|         if self.ar.dl: | ||||
|             os.unlink(file.abs) | ||||
| 
 | ||||
| 
 | ||||
| class APF(argparse.ArgumentDefaultsHelpFormatter, argparse.RawDescriptionHelpFormatter): | ||||
|     pass | ||||
| @@ -929,11 +1074,18 @@ def main(): | ||||
|     if not VT100: | ||||
|         os.system("rem")  # enables colors | ||||
| 
 | ||||
|     cores = os.cpu_count() if hasattr(os, "cpu_count") else 4 | ||||
|     cores = (os.cpu_count() if hasattr(os, "cpu_count") else 0) or 2 | ||||
|     hcores = min(cores, 3)  # 4% faster than 4+ on py3.9 @ r5-4500U | ||||
| 
 | ||||
|     ver = "{0}, v{1}".format(S_BUILD_DT, S_VERSION) | ||||
|     if "--version" in sys.argv: | ||||
|         print(ver) | ||||
|         return | ||||
| 
 | ||||
|     sys.argv = [x for x in sys.argv if x != "--ws"] | ||||
| 
 | ||||
|     # fmt: off | ||||
|     ap = app = argparse.ArgumentParser(formatter_class=APF, epilog=""" | ||||
|     ap = app = argparse.ArgumentParser(formatter_class=APF, description="copyparty up2k uploader / filesearch tool, " + ver, epilog=""" | ||||
| NOTE: | ||||
| source file/folder selection uses rsync syntax, meaning that: | ||||
|   "foo" uploads the entire folder to URL/foo/ | ||||
| @@ -943,21 +1095,92 @@ source file/folder selection uses rsync syntax, meaning that: | ||||
|     ap.add_argument("url", type=unicode, help="server url, including destination folder") | ||||
|     ap.add_argument("files", type=unicode, nargs="+", help="files and/or folders to process") | ||||
|     ap.add_argument("-v", action="store_true", help="verbose") | ||||
|     ap.add_argument("-a", metavar="PASSWORD", help="password") | ||||
|     ap.add_argument("-a", metavar="PASSWORD", help="password or $filepath") | ||||
|     ap.add_argument("-s", action="store_true", help="file-search (disables upload)") | ||||
|     ap.add_argument("--ok", action="store_true", help="continue even if some local files are inaccessible") | ||||
|     ap.add_argument("--version", action="store_true", help="show version and exit") | ||||
| 
 | ||||
|     ap = app.add_argument_group("compatibility") | ||||
|     ap.add_argument("--cls", action="store_true", help="clear screen before start") | ||||
|     ap.add_argument("--rh", type=int, metavar="TRIES", default=0, help="resolve server hostname before upload (good for buggy networks, but TLS certs will break)") | ||||
| 
 | ||||
|     ap = app.add_argument_group("folder sync") | ||||
|     ap.add_argument("--dl", action="store_true", help="delete local files after uploading") | ||||
|     ap.add_argument("--dr", action="store_true", help="delete remote files which don't exist locally") | ||||
|     ap.add_argument("--drd", action="store_true", help="delete remote files during upload instead of afterwards; reduces peak disk space usage, but will reupload instead of detecting renames") | ||||
| 
 | ||||
|     ap = app.add_argument_group("performance tweaks") | ||||
|     ap.add_argument("-j", type=int, metavar="THREADS", default=4, help="parallel connections") | ||||
|     ap.add_argument("-J", type=int, metavar="THREADS", default=hcores, help="num cpu-cores to use for hashing; set 0 or 1 for single-core hashing") | ||||
|     ap.add_argument("-nh", action="store_true", help="disable hashing while uploading") | ||||
|     ap.add_argument("-ns", action="store_true", help="no status panel (for slow consoles)") | ||||
|     ap.add_argument("--safe", action="store_true", help="use simple fallback approach") | ||||
|     ap.add_argument("-z", action="store_true", help="ZOOMIN' (skip uploading files if they exist at the destination with the ~same last-modified timestamp, so same as yolo / turbo with date-chk but even faster)") | ||||
| 
 | ||||
|     ap = app.add_argument_group("tls") | ||||
|     ap.add_argument("-te", metavar="PEM_FILE", help="certificate to expect/verify") | ||||
|     ap.add_argument("-td", action="store_true", help="disable certificate check") | ||||
|     # fmt: on | ||||
| 
 | ||||
|     Ctl(app.parse_args()) | ||||
|     try: | ||||
|         ar = app.parse_args() | ||||
|     finally: | ||||
|         if EXE and not sys.argv[1:]: | ||||
|             eprint("*** hit enter to exit ***") | ||||
|             try: | ||||
|                 input() | ||||
|             except: | ||||
|                 pass | ||||
| 
 | ||||
|     if ar.drd: | ||||
|         ar.dr = True | ||||
| 
 | ||||
|     for k in "dl dr drd".split(): | ||||
|         errs = [] | ||||
|         if ar.safe and getattr(ar, k): | ||||
|             errs.append(k) | ||||
| 
 | ||||
|         if errs: | ||||
|             raise Exception("--safe is incompatible with " + str(errs)) | ||||
| 
 | ||||
|     ar.files = [ | ||||
|         os.path.abspath(os.path.realpath(x.encode("utf-8"))) | ||||
|         + (x[-1:] if x[-1:] in ("\\", "/") else "").encode("utf-8") | ||||
|         for x in ar.files | ||||
|     ] | ||||
| 
 | ||||
|     ar.url = ar.url.rstrip("/") + "/" | ||||
|     if "://" not in ar.url: | ||||
|         ar.url = "http://" + ar.url | ||||
| 
 | ||||
|     if ar.a and ar.a.startswith("$"): | ||||
|         fn = ar.a[1:] | ||||
|         print("reading password from file [{0}]".format(fn)) | ||||
|         with open(fn, "rb") as f: | ||||
|             ar.a = f.read().decode("utf-8").strip() | ||||
| 
 | ||||
|     for n in range(ar.rh): | ||||
|         try: | ||||
|             ar.url = undns(ar.url) | ||||
|             break | ||||
|         except KeyboardInterrupt: | ||||
|             raise | ||||
|         except: | ||||
|             if n > ar.rh - 2: | ||||
|                 raise | ||||
| 
 | ||||
|     if ar.cls: | ||||
|         eprint("\x1b\x5b\x48\x1b\x5b\x32\x4a\x1b\x5b\x33\x4a", end="") | ||||
| 
 | ||||
|     ctl = Ctl(ar) | ||||
| 
 | ||||
|     if ar.dr and not ar.drd and ctl.ok: | ||||
|         print("\npass 2/2: delete") | ||||
|         ar.drd = True | ||||
|         ar.z = True | ||||
|         ctl = Ctl(ar, ctl.stats) | ||||
| 
 | ||||
|     sys.exit(0 if ctl.ok else 1) | ||||
| 
 | ||||
| 
 | ||||
| if __name__ == "__main__": | ||||
							
								
								
									
										99
									
								
								bin/unforget.py
									
									
									
									
									
										Executable file
									
								
							
							
						
						
									
										99
									
								
								bin/unforget.py
									
									
									
									
									
										Executable file
									
								
							| @@ -0,0 +1,99 @@ | ||||
| #!/usr/bin/env python3 | ||||
|  | ||||
| """ | ||||
| unforget.py: rebuild db from logfiles | ||||
| 2022-09-07, v0.1, ed <irc.rizon.net>, MIT-Licensed | ||||
| https://github.com/9001/copyparty/blob/hovudstraum/bin/unforget.py | ||||
|  | ||||
| only makes sense if running copyparty with --no-forget | ||||
| (e.g. immediately shifting uploads to other storage) | ||||
|  | ||||
| usage: | ||||
|   xz -d < log | ./unforget.py .hist/up2k.db | ||||
|  | ||||
| """ | ||||
|  | ||||
| import re | ||||
| import sys | ||||
| import json | ||||
| import base64 | ||||
| import sqlite3 | ||||
| import argparse | ||||
|  | ||||
|  | ||||
| FS_ENCODING = sys.getfilesystemencoding() | ||||
|  | ||||
|  | ||||
| class APF(argparse.ArgumentDefaultsHelpFormatter, argparse.RawDescriptionHelpFormatter): | ||||
|     pass | ||||
|  | ||||
|  | ||||
| mem_cur = sqlite3.connect(":memory:").cursor() | ||||
| mem_cur.execute(r"create table a (b text)") | ||||
|  | ||||
|  | ||||
| def s3enc(rd: str, fn: str) -> tuple[str, str]: | ||||
|     ret: list[str] = [] | ||||
|     for v in [rd, fn]: | ||||
|         try: | ||||
|             mem_cur.execute("select * from a where b = ?", (v,)) | ||||
|             ret.append(v) | ||||
|         except: | ||||
|             wtf8 = v.encode(FS_ENCODING, "surrogateescape") | ||||
|             ret.append("//" + base64.urlsafe_b64encode(wtf8).decode("ascii")) | ||||
|  | ||||
|     return ret[0], ret[1] | ||||
|  | ||||
|  | ||||
| def main(): | ||||
|     ap = argparse.ArgumentParser() | ||||
|     ap.add_argument("db") | ||||
|     ar = ap.parse_args() | ||||
|  | ||||
|     db = sqlite3.connect(ar.db).cursor() | ||||
|     ptn_times = re.compile(r"no more chunks, setting times \(([0-9]+)") | ||||
|     at = 0 | ||||
|     ctr = 0 | ||||
|  | ||||
|     for ln in [x.decode("utf-8", "replace").rstrip() for x in sys.stdin.buffer]: | ||||
|         if "no more chunks, setting times (" in ln: | ||||
|             m = ptn_times.search(ln) | ||||
|             if m: | ||||
|                 at = int(m.group(1)) | ||||
|  | ||||
|         if '"hash": []' in ln: | ||||
|             try: | ||||
|                 ofs = ln.find("{") | ||||
|                 j = json.loads(ln[ofs:]) | ||||
|             except: | ||||
|                 pass | ||||
|  | ||||
|             w = j["wark"] | ||||
|             if db.execute("select w from up where w = ?", (w,)).fetchone(): | ||||
|                 continue | ||||
|  | ||||
|             # PYTHONPATH=/home/ed/dev/copyparty/ python3 -m copyparty -e2dsa  -v foo:foo:rwmd,ed -aed:wark --no-forget | ||||
|             # 05:34:43.845 127.0.0.1 42496       no more chunks, setting times (1662528883, 1658001882) | ||||
|             # 05:34:43.863 127.0.0.1 42496       {"name": "f\"2", "purl": "/foo/bar/baz/", "size": 1674, "lmod": 1658001882, "sprs": true, "hash": [], "wark": "LKIWpp2jEAh9dH3fu-DobuURFGEKlODXDGTpZ1otMhUg"} | ||||
|             # |                      w                       |     mt     |  sz  |   rd    | fn  |    ip     |     at     | | ||||
|             # | LKIWpp2jEAh9dH3fu-DobuURFGEKlODXDGTpZ1otMhUg | 1658001882 | 1674 | bar/baz | f"2 | 127.0.0.1 | 1662528883 | | ||||
|  | ||||
|             rd, fn = s3enc(j["purl"].strip("/"), j["name"]) | ||||
|             ip = ln.split(" ")[1].split("m")[-1] | ||||
|  | ||||
|             q = "insert into up values (?,?,?,?,?,?,?)" | ||||
|             v = (w, int(j["lmod"]), int(j["size"]), rd, fn, ip, at) | ||||
|             db.execute(q, v) | ||||
|             ctr += 1 | ||||
|             if ctr % 1024 == 1023: | ||||
|                 print(f"{ctr} commit...") | ||||
|                 db.connection.commit() | ||||
|  | ||||
|     if ctr: | ||||
|         db.connection.commit() | ||||
|  | ||||
|     print(f"unforgot {ctr} files") | ||||
|  | ||||
|  | ||||
| if __name__ == "__main__": | ||||
|     main() | ||||
| @@ -27,7 +27,13 @@ however if your copyparty is behind a reverse-proxy, you may want to use [`share | ||||
|  | ||||
| ### [`explorer-nothumbs-nofoldertypes.reg`](explorer-nothumbs-nofoldertypes.reg) | ||||
| * disables thumbnails and folder-type detection in windows explorer | ||||
| * makes it way faster (especially for slow/networked locations (such as copyparty-fuse)) | ||||
| * makes it way faster (especially for slow/networked locations (such as partyfuse)) | ||||
|  | ||||
| ### [`webdav-cfg.reg`](webdav-cfg.bat) | ||||
| * improves the native webdav support in windows; | ||||
|   * removes the 47.6 MiB filesize limit when downloading from webdav | ||||
|   * optionally enables webdav basic-auth over plaintext http | ||||
|   * optionally helps disable wpad, removing the 10sec latency | ||||
|  | ||||
| ### [`cfssl.sh`](cfssl.sh) | ||||
| * creates CA and server certificates using cfssl | ||||
|   | ||||
							
								
								
									
										14
									
								
								contrib/apache/copyparty.conf
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										14
									
								
								contrib/apache/copyparty.conf
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,14 @@ | ||||
| # when running copyparty behind a reverse proxy, | ||||
| # the following arguments are recommended: | ||||
| # | ||||
| #   -i 127.0.0.1    only accept connections from nginx | ||||
| # | ||||
| # if you are doing location-based proxying (such as `/stuff` below) | ||||
| # you must run copyparty with --rp-loc=stuff | ||||
| # | ||||
| # on fedora/rhel, remember to setsebool -P httpd_can_network_connect 1 | ||||
|  | ||||
| LoadModule proxy_module modules/mod_proxy.so | ||||
| ProxyPass "/stuff" "http://127.0.0.1:3923/stuff" | ||||
| # do not specify ProxyPassReverse | ||||
| RequestHeader set "X-Forwarded-Proto" expr=%{REQUEST_SCHEME} | ||||
| @@ -1,14 +1,44 @@ | ||||
| #!/bin/bash | ||||
| set -e | ||||
|  | ||||
| cat >/dev/null <<'EOF' | ||||
|  | ||||
| NOTE: copyparty is now able to do this automatically; | ||||
| however you may wish to use this script instead if | ||||
| you have specific needs (or if copyparty breaks) | ||||
|  | ||||
| this script generates a new self-signed TLS certificate and | ||||
| replaces the default insecure one that comes with copyparty | ||||
|  | ||||
| as it is trivial to impersonate a copyparty server using the | ||||
| default certificate, it is highly recommended to do this | ||||
|  | ||||
| this will create a self-signed CA, and a Server certificate | ||||
| which gets signed by that CA -- you can run it multiple times | ||||
| with different server-FQDNs / IPs to create additional certs | ||||
| for all your different servers / (non-)copyparty services | ||||
|  | ||||
| EOF | ||||
|  | ||||
|  | ||||
| # ca-name and server-fqdn | ||||
| ca_name="$1" | ||||
| srv_fqdn="$2" | ||||
|  | ||||
| [ -z "$srv_fqdn" ] && { | ||||
| 	echo "need arg 1: ca name" | ||||
| 	echo "need arg 2: server fqdn and/or IPs, comma-separated" | ||||
| 	echo "optional arg 3: if set, write cert into copyparty cfg" | ||||
| [ -z "$srv_fqdn" ] && { cat <<'EOF' | ||||
| need arg 1: ca name | ||||
| need arg 2: server fqdn and/or IPs, comma-separated | ||||
| optional arg 3: if set, write cert into copyparty cfg | ||||
|  | ||||
| example: | ||||
|   ./cfssl.sh PartyCo partybox.local y | ||||
| EOF | ||||
| 	exit 1 | ||||
| } | ||||
|  | ||||
|  | ||||
| command -v cfssljson 2>/dev/null || { | ||||
| 	echo please install cfssl and try again | ||||
| 	exit 1 | ||||
| } | ||||
|  | ||||
| @@ -59,12 +89,14 @@ show() { | ||||
| } | ||||
| show ca.pem | ||||
| show "$srv_fqdn.pem" | ||||
|  | ||||
| echo | ||||
| echo "successfully generated new certificates" | ||||
|  | ||||
| # write cert into copyparty config | ||||
| [ -z "$3" ] || { | ||||
| 	mkdir -p ~/.config/copyparty | ||||
| 	cat "$srv_fqdn".{key,pem} ca.pem >~/.config/copyparty/cert.pem  | ||||
| 	echo "successfully replaced copyparty certificate" | ||||
| } | ||||
|  | ||||
|  | ||||
|   | ||||
| @@ -3,7 +3,7 @@ | ||||
|  | ||||
| <head> | ||||
| 	<meta charset="utf-8"> | ||||
| 	<title>⇆🎉 redirect</title> | ||||
| 	<title>💾🎉 redirect</title> | ||||
| 	<meta http-equiv="X-UA-Compatible" content="IE=edge"> | ||||
| 	<style> | ||||
|  | ||||
|   | ||||
							
								
								
									
										
											BIN
										
									
								
								contrib/ios/upload-to-copyparty.shortcut
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										
											BIN
										
									
								
								contrib/ios/upload-to-copyparty.shortcut
									
									
									
									
									
										Normal file
									
								
							
										
											Binary file not shown.
										
									
								
							| @@ -1,15 +1,16 @@ | ||||
| # when running copyparty behind a reverse proxy, | ||||
| # the following arguments are recommended: | ||||
| # | ||||
| #   -nc 512         important, see next paragraph | ||||
| #   --http-only     lower latency on initial connection | ||||
| #   -i 127.0.0.1    only accept connections from nginx | ||||
| # | ||||
| # -nc must match or exceed the webserver's max number of concurrent clients; | ||||
| # copyparty default is 1024 if OS permits it (see "max clients:" on startup), | ||||
| # nginx default is 512  (worker_processes 1, worker_connections 512) | ||||
| # | ||||
| # you may also consider adding -j0 for CPU-intensive configurations | ||||
| # (not that i can really think of any good examples) | ||||
| # (5'000 requests per second, or 20gbps upload/download in parallel) | ||||
| # | ||||
| # on fedora/rhel, remember to setsebool -P httpd_can_network_connect 1 | ||||
|  | ||||
| upstream cpp { | ||||
| 	server 127.0.0.1:3923; | ||||
| @@ -37,3 +38,9 @@ server { | ||||
| 		proxy_set_header   Connection        "Keep-Alive"; | ||||
| 	} | ||||
| } | ||||
|  | ||||
| # default client_max_body_size (1M) blocks uploads larger than 256 MiB | ||||
| client_max_body_size 1024M; | ||||
| client_header_timeout 610m; | ||||
| client_body_timeout 610m; | ||||
| send_timeout 610m; | ||||
|   | ||||
							
								
								
									
										281
									
								
								contrib/nixos/modules/copyparty.nix
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										281
									
								
								contrib/nixos/modules/copyparty.nix
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,281 @@ | ||||
| { config, pkgs, lib, ... }: | ||||
|  | ||||
| with lib; | ||||
|  | ||||
| let | ||||
|   mkKeyValue = key: value: | ||||
|     if value == true then | ||||
|     # sets with a true boolean value are coerced to just the key name | ||||
|       key | ||||
|     else if value == false then | ||||
|     # or omitted completely when false | ||||
|       "" | ||||
|     else | ||||
|       (generators.mkKeyValueDefault { inherit mkValueString; } ": " key value); | ||||
|  | ||||
|   mkAttrsString = value: (generators.toKeyValue { inherit mkKeyValue; } value); | ||||
|  | ||||
|   mkValueString = value: | ||||
|     if isList value then | ||||
|       (concatStringsSep ", " (map mkValueString value)) | ||||
|     else if isAttrs value then | ||||
|       "\n" + (mkAttrsString value) | ||||
|     else | ||||
|       (generators.mkValueStringDefault { } value); | ||||
|  | ||||
|   mkSectionName = value: "[" + (escape [ "[" "]" ] value) + "]"; | ||||
|  | ||||
|   mkSection = name: attrs: '' | ||||
|     ${mkSectionName name} | ||||
|     ${mkAttrsString attrs} | ||||
|   ''; | ||||
|  | ||||
|   mkVolume = name: attrs: '' | ||||
|     ${mkSectionName name} | ||||
|     ${attrs.path} | ||||
|     ${mkAttrsString { | ||||
|       accs = attrs.access; | ||||
|       flags = attrs.flags; | ||||
|     }} | ||||
|   ''; | ||||
|  | ||||
|   passwordPlaceholder = name: "{{password-${name}}}"; | ||||
|  | ||||
|   accountsWithPlaceholders = mapAttrs (name: attrs: passwordPlaceholder name); | ||||
|  | ||||
|   configStr = '' | ||||
|     ${mkSection "global" cfg.settings} | ||||
|     ${mkSection "accounts" (accountsWithPlaceholders cfg.accounts)} | ||||
|     ${concatStringsSep "\n" (mapAttrsToList mkVolume cfg.volumes)} | ||||
|   ''; | ||||
|  | ||||
|   name = "copyparty"; | ||||
|   cfg = config.services.copyparty; | ||||
|   configFile = pkgs.writeText "${name}.conf" configStr; | ||||
|   runtimeConfigPath = "/run/${name}/${name}.conf"; | ||||
|   home = "/var/lib/${name}"; | ||||
|   defaultShareDir = "${home}/data"; | ||||
| in { | ||||
|   options.services.copyparty = { | ||||
|     enable = mkEnableOption "web-based file manager"; | ||||
|  | ||||
|     package = mkOption { | ||||
|       type = types.package; | ||||
|       default = pkgs.copyparty; | ||||
|       defaultText = "pkgs.copyparty"; | ||||
|       description = '' | ||||
|         Package of the application to run, exposed for overriding purposes. | ||||
|       ''; | ||||
|     }; | ||||
|  | ||||
|     openFilesLimit = mkOption { | ||||
|       default = 4096; | ||||
|       type = types.either types.int types.str; | ||||
|       description = "Number of files to allow copyparty to open."; | ||||
|     }; | ||||
|  | ||||
|     settings = mkOption { | ||||
|       type = types.attrs; | ||||
|       description = '' | ||||
|         Global settings to apply. | ||||
|         Directly maps to values in the [global] section of the copyparty config. | ||||
|         See `${getExe cfg.package} --help` for more details. | ||||
|       ''; | ||||
|       default = { | ||||
|         i = "127.0.0.1"; | ||||
|         no-reload = true; | ||||
|       }; | ||||
|       example = literalExpression '' | ||||
|         { | ||||
|           i = "0.0.0.0"; | ||||
|           no-reload = true; | ||||
|         } | ||||
|       ''; | ||||
|     }; | ||||
|  | ||||
|     accounts = mkOption { | ||||
|       type = types.attrsOf (types.submodule ({ ... }: { | ||||
|         options = { | ||||
|           passwordFile = mkOption { | ||||
|             type = types.str; | ||||
|             description = '' | ||||
|               Runtime file path to a file containing the user password. | ||||
|               Must be readable by the copyparty user. | ||||
|             ''; | ||||
|             example = "/run/keys/copyparty/ed"; | ||||
|           }; | ||||
|         }; | ||||
|       })); | ||||
|       description = '' | ||||
|         A set of copyparty accounts to create. | ||||
|       ''; | ||||
|       default = { }; | ||||
|       example = literalExpression '' | ||||
|         { | ||||
|           ed.passwordFile = "/run/keys/copyparty/ed"; | ||||
|         }; | ||||
|       ''; | ||||
|     }; | ||||
|  | ||||
|     volumes = mkOption { | ||||
|       type = types.attrsOf (types.submodule ({ ... }: { | ||||
|         options = { | ||||
|           path = mkOption { | ||||
|             type = types.str; | ||||
|             description = '' | ||||
|               Path of a directory to share. | ||||
|             ''; | ||||
|           }; | ||||
|           access = mkOption { | ||||
|             type = types.attrs; | ||||
|             description = '' | ||||
|               Attribute list of permissions and the users to apply them to. | ||||
|  | ||||
|               The key must be a string containing any combination of allowed permission: | ||||
|                 "r" (read):   list folder contents, download files | ||||
|                 "w" (write):  upload files; need "r" to see the uploads | ||||
|                 "m" (move):   move files and folders; need "w" at destination | ||||
|                 "d" (delete): permanently delete files and folders | ||||
|                 "g" (get):    download files, but cannot see folder contents | ||||
|                 "G" (upget):  "get", but can see filekeys of their own uploads | ||||
|  | ||||
|               For example: "rwmd" | ||||
|  | ||||
|               The value must be one of: | ||||
|                 an account name, defined in `accounts` | ||||
|                 a list of account names | ||||
|                 "*", which means "any account" | ||||
|             ''; | ||||
|             example = literalExpression '' | ||||
|               { | ||||
|                 # wG = write-upget = see your own uploads only | ||||
|                 wG = "*"; | ||||
|                 # read-write-modify-delete for users "ed" and "k" | ||||
|                 rwmd = ["ed" "k"]; | ||||
|               }; | ||||
|             ''; | ||||
|           }; | ||||
|           flags = mkOption { | ||||
|             type = types.attrs; | ||||
|             description = '' | ||||
|               Attribute list of volume flags to apply. | ||||
|               See `${getExe cfg.package} --help-flags` for more details. | ||||
|             ''; | ||||
|             example = literalExpression '' | ||||
|               { | ||||
|                 # "fk" enables filekeys (necessary for upget permission) (4 chars long) | ||||
|                 fk = 4; | ||||
|                 # scan for new files every 60sec | ||||
|                 scan = 60; | ||||
|                 # volflag "e2d" enables the uploads database | ||||
|                 e2d = true; | ||||
|                 # "d2t" disables multimedia parsers (in case the uploads are malicious) | ||||
|                 d2t = true; | ||||
|                 # skips hashing file contents if path matches *.iso | ||||
|                 nohash = "\.iso$"; | ||||
|               }; | ||||
|             ''; | ||||
|             default = { }; | ||||
|           }; | ||||
|         }; | ||||
|       })); | ||||
|       description = "A set of copyparty volumes to create"; | ||||
|       default = { | ||||
|         "/" = { | ||||
|           path = defaultShareDir; | ||||
|           access = { r = "*"; }; | ||||
|         }; | ||||
|       }; | ||||
|       example = literalExpression '' | ||||
|         { | ||||
|           "/" = { | ||||
|             path = ${defaultShareDir}; | ||||
|             access = { | ||||
|               # wG = write-upget = see your own uploads only | ||||
|               wG = "*"; | ||||
|               # read-write-modify-delete for users "ed" and "k" | ||||
|               rwmd = ["ed" "k"]; | ||||
|             }; | ||||
|           }; | ||||
|         }; | ||||
|       ''; | ||||
|     }; | ||||
|   }; | ||||
|  | ||||
|   config = mkIf cfg.enable { | ||||
|     systemd.services.copyparty = { | ||||
|       description = "http file sharing hub"; | ||||
|       wantedBy = [ "multi-user.target" ]; | ||||
|  | ||||
|       environment = { | ||||
|         PYTHONUNBUFFERED = "true"; | ||||
|         XDG_CONFIG_HOME = "${home}/.config"; | ||||
|       }; | ||||
|  | ||||
|       preStart = let | ||||
|         replaceSecretCommand = name: attrs: | ||||
|           "${getExe pkgs.replace-secret} '${ | ||||
|             passwordPlaceholder name | ||||
|           }' '${attrs.passwordFile}' ${runtimeConfigPath}"; | ||||
|       in '' | ||||
|         set -euo pipefail | ||||
|         install -m 600 ${configFile} ${runtimeConfigPath} | ||||
|         ${concatStringsSep "\n" | ||||
|         (mapAttrsToList replaceSecretCommand cfg.accounts)} | ||||
|       ''; | ||||
|  | ||||
|       serviceConfig = { | ||||
|         Type = "simple"; | ||||
|         ExecStart = "${getExe cfg.package} -c ${runtimeConfigPath}"; | ||||
|  | ||||
|         # Hardening options | ||||
|         User = "copyparty"; | ||||
|         Group = "copyparty"; | ||||
|         RuntimeDirectory = name; | ||||
|         RuntimeDirectoryMode = "0700"; | ||||
|         StateDirectory = [ name "${name}/data" "${name}/.config" ]; | ||||
|         StateDirectoryMode = "0700"; | ||||
|         WorkingDirectory = home; | ||||
|         TemporaryFileSystem = "/:ro"; | ||||
|         BindReadOnlyPaths = [ | ||||
|           "/nix/store" | ||||
|           "-/etc/resolv.conf" | ||||
|           "-/etc/nsswitch.conf" | ||||
|           "-/etc/hosts" | ||||
|           "-/etc/localtime" | ||||
|         ] ++ (mapAttrsToList (k: v: "-${v.passwordFile}") cfg.accounts); | ||||
|         BindPaths = [ home ] ++ (mapAttrsToList (k: v: v.path) cfg.volumes); | ||||
|         # Would re-mount paths ignored by temporary root | ||||
|         #ProtectSystem = "strict"; | ||||
|         ProtectHome = true; | ||||
|         PrivateTmp = true; | ||||
|         PrivateDevices = true; | ||||
|         ProtectKernelTunables = true; | ||||
|         ProtectControlGroups = true; | ||||
|         RestrictSUIDSGID = true; | ||||
|         PrivateMounts = true; | ||||
|         ProtectKernelModules = true; | ||||
|         ProtectKernelLogs = true; | ||||
|         ProtectHostname = true; | ||||
|         ProtectClock = true; | ||||
|         ProtectProc = "invisible"; | ||||
|         ProcSubset = "pid"; | ||||
|         RestrictNamespaces = true; | ||||
|         RemoveIPC = true; | ||||
|         UMask = "0077"; | ||||
|         LimitNOFILE = cfg.openFilesLimit; | ||||
|         NoNewPrivileges = true; | ||||
|         LockPersonality = true; | ||||
|         RestrictRealtime = true; | ||||
|       }; | ||||
|     }; | ||||
|  | ||||
|     users.groups.copyparty = { }; | ||||
|     users.users.copyparty = { | ||||
|       description = "Service user for copyparty"; | ||||
|       group = "copyparty"; | ||||
|       home = home; | ||||
|       isSystemUser = true; | ||||
|     }; | ||||
|   }; | ||||
| } | ||||
| @@ -14,5 +14,5 @@ name="$SVCNAME" | ||||
| command_background=true | ||||
| pidfile="/var/run/$SVCNAME.pid" | ||||
|  | ||||
| command="/usr/bin/python /usr/local/bin/copyparty-sfx.py" | ||||
| command="/usr/bin/python3 /usr/local/bin/copyparty-sfx.py" | ||||
| command_args="-q -v /mnt::rw" | ||||
|   | ||||
							
								
								
									
										54
									
								
								contrib/package/arch/PKGBUILD
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										54
									
								
								contrib/package/arch/PKGBUILD
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,54 @@ | ||||
| # Maintainer: icxes <dev.null@need.moe> | ||||
| pkgname=copyparty | ||||
| pkgver="1.7.4" | ||||
| pkgrel=1 | ||||
| pkgdesc="Portable file sharing hub" | ||||
| arch=("any") | ||||
| url="https://github.com/9001/${pkgname}" | ||||
| license=('MIT') | ||||
| depends=("python" "lsof" "python-jinja") | ||||
| makedepends=("python-wheel" "python-setuptools" "python-build" "python-installer" "make" "pigz") | ||||
| optdepends=("ffmpeg: thumbnails for videos, images (slower) and audio, music tags" | ||||
|             "python-mutagen: music tags (alternative)"  | ||||
|             "python-pillow: thumbnails for images"  | ||||
|             "python-pyvips: thumbnails for images (higher quality, faster, uses more ram)"  | ||||
|             "libkeyfinder-git: detection of musical keys"  | ||||
|             "qm-vamp-plugins: BPM detection"  | ||||
|             "python-pyopenssl: ftps functionality"  | ||||
|             "python-impacket-git: smb support (bad idea)" | ||||
| ) | ||||
| source=("https://github.com/9001/${pkgname}/releases/download/v${pkgver}/${pkgname}-${pkgver}.tar.gz") | ||||
| backup=("etc/${pkgname}.d/init" ) | ||||
| sha256sums=("ae304043e806e3c39cd39c8919a514c2b2ec0f35b4cddb114eaf0e66b6826265") | ||||
|  | ||||
| build() { | ||||
|     cd "${srcdir}/${pkgname}-${pkgver}" | ||||
|      | ||||
|     pushd copyparty/web | ||||
|     make -j$(nproc) | ||||
|     rm Makefile | ||||
|     popd | ||||
|      | ||||
|     python3 -m build -wn | ||||
| } | ||||
|  | ||||
| package() { | ||||
|     cd "${srcdir}/${pkgname}-${pkgver}" | ||||
|     python3 -m installer -d "$pkgdir" dist/*.whl | ||||
|  | ||||
|     install -dm755 "${pkgdir}/etc/${pkgname}.d" | ||||
|     install -Dm755 "bin/prisonparty.sh" "${pkgdir}/usr/bin/prisonparty" | ||||
|     install -Dm644 "contrib/package/arch/${pkgname}.conf" "${pkgdir}/etc/${pkgname}.d/init" | ||||
|     install -Dm644 "contrib/package/arch/${pkgname}.service" "${pkgdir}/usr/lib/systemd/system/${pkgname}.service" | ||||
|     install -Dm644 "contrib/package/arch/prisonparty.service" "${pkgdir}/usr/lib/systemd/system/prisonparty.service" | ||||
|     install -Dm644 "contrib/package/arch/index.md" "${pkgdir}/var/lib/${pkgname}-jail/README.md" | ||||
|     install -Dm644 "LICENSE" "${pkgdir}/usr/share/licenses/${pkgname}/LICENSE" | ||||
|  | ||||
|     find /etc/${pkgname}.d -iname '*.conf' 2>/dev/null | grep -qE . && return | ||||
|     echo "┏━━━━━━━━━━━━━━━──-" | ||||
|     echo "┃ Configure ${pkgname} by adding .conf files into /etc/${pkgname}.d/" | ||||
|     echo "┃ and maybe copy+edit one of the following to /etc/systemd/system/:" | ||||
|     echo "┣━♦ /usr/lib/systemd/system/${pkgname}.service   (standard)" | ||||
|     echo "┣━♦ /usr/lib/systemd/system/prisonparty.service (chroot)" | ||||
|     echo "┗━━━━━━━━━━━━━━━──-" | ||||
| } | ||||
							
								
								
									
										7
									
								
								contrib/package/arch/copyparty.conf
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										7
									
								
								contrib/package/arch/copyparty.conf
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,7 @@ | ||||
| ## import all *.conf files from the current folder (/etc/copyparty.d) | ||||
| % ./ | ||||
|  | ||||
| # add additional .conf files to this folder; | ||||
| # see example config files for reference: | ||||
| # https://github.com/9001/copyparty/blob/hovudstraum/docs/example.conf | ||||
| # https://github.com/9001/copyparty/tree/hovudstraum/docs/copyparty.d | ||||
							
								
								
									
										32
									
								
								contrib/package/arch/copyparty.service
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										32
									
								
								contrib/package/arch/copyparty.service
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,32 @@ | ||||
| # this will start `/usr/bin/copyparty-sfx.py` | ||||
| # and read config from `/etc/copyparty.d/*.conf` | ||||
| # | ||||
| # you probably want to: | ||||
| #   change "User=cpp" and "/home/cpp/" to another user | ||||
| # | ||||
| # unless you add -q to disable logging, you may want to remove the | ||||
| #   following line to allow buffering (slightly better performance): | ||||
| #   Environment=PYTHONUNBUFFERED=x | ||||
|  | ||||
| [Unit] | ||||
| Description=copyparty file server | ||||
|  | ||||
| [Service] | ||||
| Type=notify | ||||
| SyslogIdentifier=copyparty | ||||
| Environment=PYTHONUNBUFFERED=x | ||||
| WorkingDirectory=/var/lib/copyparty-jail | ||||
| ExecReload=/bin/kill -s USR1 $MAINPID | ||||
|  | ||||
| # user to run as + where the TLS certificate is (if any) | ||||
| User=cpp | ||||
| Environment=XDG_CONFIG_HOME=/home/cpp/.config | ||||
|  | ||||
| # stop systemd-tmpfiles-clean.timer from deleting copyparty while it's running | ||||
| ExecStartPre=+/bin/bash -c 'mkdir -p /run/tmpfiles.d/ && echo "x /tmp/pe-copyparty*" > /run/tmpfiles.d/copyparty.conf' | ||||
|  | ||||
| # run copyparty | ||||
| ExecStart=/usr/bin/python3 /usr/bin/copyparty -c /etc/copyparty.d/init | ||||
|  | ||||
| [Install] | ||||
| WantedBy=multi-user.target | ||||
							
								
								
									
										3
									
								
								contrib/package/arch/index.md
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										3
									
								
								contrib/package/arch/index.md
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,3 @@ | ||||
| this is `/var/lib/copyparty-jail`, the fallback webroot when copyparty has not yet been configured | ||||
|  | ||||
| please add some `*.conf` files to `/etc/copyparty.d/` | ||||
							
								
								
									
										31
									
								
								contrib/package/arch/prisonparty.service
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										31
									
								
								contrib/package/arch/prisonparty.service
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,31 @@ | ||||
| # this will start `/usr/bin/copyparty-sfx.py` | ||||
| # in a chroot, preventing accidental access elsewhere | ||||
| # and read config from `/etc/copyparty.d/*.conf` | ||||
| # | ||||
| # expose additional filesystem locations to copyparty | ||||
| #   by listing them between the last `1000` and `--` | ||||
| # | ||||
| # `1000 1000` = what user to run copyparty as | ||||
| # | ||||
| # unless you add -q to disable logging, you may want to remove the | ||||
| #   following line to allow buffering (slightly better performance): | ||||
| #   Environment=PYTHONUNBUFFERED=x | ||||
|  | ||||
| [Unit] | ||||
| Description=copyparty file server | ||||
|  | ||||
| [Service] | ||||
| SyslogIdentifier=prisonparty | ||||
| Environment=PYTHONUNBUFFERED=x | ||||
| WorkingDirectory=/var/lib/copyparty-jail | ||||
| ExecReload=/bin/kill -s USR1 $MAINPID | ||||
|  | ||||
| # stop systemd-tmpfiles-clean.timer from deleting copyparty while it's running | ||||
| ExecStartPre=+/bin/bash -c 'mkdir -p /run/tmpfiles.d/ && echo "x /tmp/pe-copyparty*" > /run/tmpfiles.d/copyparty.conf' | ||||
|  | ||||
| # run copyparty | ||||
| ExecStart=/bin/bash /usr/bin/prisonparty /var/lib/copyparty-jail 1000 1000 /etc/copyparty.d -- \ | ||||
|   /usr/bin/python3 /usr/bin/copyparty -c /etc/copyparty.d/init | ||||
|  | ||||
| [Install] | ||||
| WantedBy=multi-user.target | ||||
							
								
								
									
										55
									
								
								contrib/package/nix/copyparty/default.nix
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										55
									
								
								contrib/package/nix/copyparty/default.nix
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,55 @@ | ||||
| { lib, stdenv, makeWrapper, fetchurl, utillinux, python, jinja2, impacket, pyftpdlib, pyopenssl, pillow, pyvips, ffmpeg, mutagen, | ||||
|  | ||||
| # create thumbnails with Pillow; faster than FFmpeg / MediaProcessing | ||||
| withThumbnails ? true, | ||||
|  | ||||
| # create thumbnails with PyVIPS; even faster, uses more memory | ||||
| # -- can be combined with Pillow to support more filetypes | ||||
| withFastThumbnails ? false, | ||||
|  | ||||
| # enable FFmpeg; thumbnails for most filetypes (also video and audio), extract audio metadata, transcode audio to opus | ||||
| # -- possibly dangerous if you allow anonymous uploads, since FFmpeg has a huge attack surface | ||||
| # -- can be combined with Thumbnails and/or FastThumbnails, since FFmpeg is slower than both | ||||
| withMediaProcessing ? true, | ||||
|  | ||||
| # if MediaProcessing is not enabled, you probably want this instead (less accurate, but much safer and faster) | ||||
| withBasicAudioMetadata ? false, | ||||
|  | ||||
| # enable FTPS support in the FTP server | ||||
| withFTPS ? false, | ||||
|  | ||||
| # samba/cifs server; dangerous and buggy, enable if you really need it | ||||
| withSMB ? false, | ||||
|  | ||||
| }: | ||||
|  | ||||
| let | ||||
|   pinData = lib.importJSON ./pin.json; | ||||
|   pyEnv = python.withPackages (ps: | ||||
|     with ps; [ | ||||
|       jinja2 | ||||
|     ] | ||||
|     ++ lib.optional withSMB impacket | ||||
|     ++ lib.optional withFTPS pyopenssl | ||||
|     ++ lib.optional withThumbnails pillow | ||||
|     ++ lib.optional withFastThumbnails pyvips | ||||
|     ++ lib.optional withMediaProcessing ffmpeg | ||||
|     ++ lib.optional withBasicAudioMetadata mutagen | ||||
|     ); | ||||
| in stdenv.mkDerivation { | ||||
|   pname = "copyparty"; | ||||
|   version = pinData.version; | ||||
|   src = fetchurl { | ||||
|     url = pinData.url; | ||||
|     hash = pinData.hash; | ||||
|   }; | ||||
|   buildInputs = [ makeWrapper ]; | ||||
|   dontUnpack = true; | ||||
|   dontBuild = true; | ||||
|   installPhase = '' | ||||
|     install -Dm755 $src $out/share/copyparty-sfx.py | ||||
|     makeWrapper ${pyEnv.interpreter} $out/bin/copyparty \ | ||||
|       --set PATH '${lib.makeBinPath ([ utillinux ] ++ lib.optional withMediaProcessing ffmpeg)}:$PATH' \ | ||||
|       --add-flags "$out/share/copyparty-sfx.py" | ||||
|   ''; | ||||
| } | ||||
							
								
								
									
										5
									
								
								contrib/package/nix/copyparty/pin.json
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										5
									
								
								contrib/package/nix/copyparty/pin.json
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,5 @@ | ||||
| { | ||||
|     "url": "https://github.com/9001/copyparty/releases/download/v1.7.4/copyparty-sfx.py", | ||||
|     "version": "1.7.4", | ||||
|     "hash": "sha256-du7LYr28gzGy1zV2sTaUDHJfQ4dFFeyQS/TZCX42J5Q=" | ||||
| } | ||||
							
								
								
									
										77
									
								
								contrib/package/nix/copyparty/update.py
									
									
									
									
									
										Executable file
									
								
							
							
						
						
									
										77
									
								
								contrib/package/nix/copyparty/update.py
									
									
									
									
									
										Executable file
									
								
							| @@ -0,0 +1,77 @@ | ||||
| #!/usr/bin/env python3 | ||||
|  | ||||
| # Update the Nix package pin | ||||
| # | ||||
| # Usage: ./update.sh [PATH] | ||||
| # When the [PATH] is not set, it will fetch the latest release from the repo. | ||||
| # With [PATH] set, it will hash the given file and generate the URL, | ||||
| # base on the version contained within the file | ||||
|  | ||||
| import base64 | ||||
| import json | ||||
| import hashlib | ||||
| import sys | ||||
| import re | ||||
| from pathlib import Path | ||||
|  | ||||
| OUTPUT_FILE = Path("pin.json") | ||||
| TARGET_ASSET = "copyparty-sfx.py" | ||||
| HASH_TYPE = "sha256" | ||||
| LATEST_RELEASE_URL = "https://api.github.com/repos/9001/copyparty/releases/latest" | ||||
| DOWNLOAD_URL = lambda version: f"https://github.com/9001/copyparty/releases/download/v{version}/{TARGET_ASSET}" | ||||
|  | ||||
|  | ||||
| def get_formatted_hash(binary): | ||||
|     hasher = hashlib.new("sha256") | ||||
|     hasher.update(binary) | ||||
|     asset_hash = hasher.digest() | ||||
|     encoded_hash = base64.b64encode(asset_hash).decode("ascii") | ||||
|     return f"{HASH_TYPE}-{encoded_hash}" | ||||
|  | ||||
|  | ||||
| def version_from_sfx(binary): | ||||
|     result = re.search(b'^VER = "(.*)"$', binary, re.MULTILINE) | ||||
|     if result: | ||||
|         return result.groups(1)[0].decode("ascii") | ||||
|  | ||||
|     raise ValueError("version not found in provided file") | ||||
|  | ||||
|  | ||||
| def remote_release_pin(): | ||||
|     import requests | ||||
|  | ||||
|     response = requests.get(LATEST_RELEASE_URL).json() | ||||
|     version = response["tag_name"].lstrip("v") | ||||
|     asset_info = [a for a in response["assets"] if a["name"] == TARGET_ASSET][0] | ||||
|     download_url = asset_info["browser_download_url"] | ||||
|     asset = requests.get(download_url) | ||||
|     formatted_hash = get_formatted_hash(asset.content) | ||||
|  | ||||
|     result = {"url": download_url, "version": version, "hash": formatted_hash} | ||||
|     return result | ||||
|  | ||||
|  | ||||
| def local_release_pin(path): | ||||
|     asset = path.read_bytes() | ||||
|     version = version_from_sfx(asset) | ||||
|     download_url = DOWNLOAD_URL(version) | ||||
|     formatted_hash = get_formatted_hash(asset) | ||||
|  | ||||
|     result = {"url": download_url, "version": version, "hash": formatted_hash} | ||||
|     return result | ||||
|  | ||||
|  | ||||
| def main(): | ||||
|     if len(sys.argv) > 1: | ||||
|         asset_path = Path(sys.argv[1]) | ||||
|         result = local_release_pin(asset_path) | ||||
|     else: | ||||
|         result = remote_release_pin() | ||||
|  | ||||
|     print(result) | ||||
|     json_result = json.dumps(result, indent=4) | ||||
|     OUTPUT_FILE.write_text(json_result) | ||||
|  | ||||
|  | ||||
| if __name__ == "__main__": | ||||
|     main() | ||||
| @@ -1,13 +1,22 @@ | ||||
| <!-- | ||||
|   NOTE: DEPRECATED; please use the javascript version instead: | ||||
|   https://github.com/9001/copyparty/blob/hovudstraum/contrib/plugins/minimal-up2k.js | ||||
|  | ||||
|   ---- | ||||
|  | ||||
|   save this as .epilogue.html inside a write-only folder to declutter the UI,  makes it look like | ||||
|   https://user-images.githubusercontent.com/241032/118311195-dd6ca380-b4ef-11eb-86f3-75a3ff2e1332.png | ||||
|  | ||||
|   only works if you disable the prologue/epilogue sandbox with --no-sb-lg | ||||
|   which should probably be combined with --no-dot-ren to prevent damage | ||||
|   (`no_sb_lg` can also be set per-volume with volflags) | ||||
| --> | ||||
|  | ||||
| <style> | ||||
|  | ||||
|     /* make the up2k ui REALLY minimal by hiding a bunch of stuff: */ | ||||
|  | ||||
|     #ops, #tree, #path, #epi+h2,  /* main tabs and navigators (tree/breadcrumbs) */ | ||||
|     #ops, #tree, #path, #wfp,  /* main tabs and navigators (tree/breadcrumbs) */ | ||||
|  | ||||
|     #u2conf tr:first-child>td[rowspan]:not(#u2btn_cw),  /* most of the config options */ | ||||
|  | ||||
|   | ||||
| @@ -17,7 +17,7 @@ almost the same as minimal-up2k.html except this one...: | ||||
| var u2min = ` | ||||
| <style> | ||||
|  | ||||
| #ops, #path, #tree, #files, #epi+div+h2, | ||||
| #ops, #path, #tree, #files, #wfp, | ||||
| #u2conf td.c+.c, #u2cards, #srch_dz, #srch_zd { | ||||
|   display: none !important; | ||||
| } | ||||
| @@ -55,5 +55,5 @@ var u2min = ` | ||||
| if (!has(perms, 'read')) { | ||||
|   var e2 = mknod('div'); | ||||
|   e2.innerHTML = u2min; | ||||
|   ebi('wrap').insertBefore(e2, QS('#epi+h2')); | ||||
|   ebi('wrap').insertBefore(e2, QS('#wfp')); | ||||
| } | ||||
|   | ||||
							
								
								
									
										208
									
								
								contrib/plugins/rave.js
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										208
									
								
								contrib/plugins/rave.js
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,208 @@ | ||||
| /* untz untz untz untz */ | ||||
|  | ||||
| (function () { | ||||
|  | ||||
|     var can, ctx, W, H, fft, buf, bars, barw, pv, | ||||
|         hue = 0, | ||||
|         ibeat = 0, | ||||
|         beats = [9001], | ||||
|         beats_url = '', | ||||
|         uofs = 0, | ||||
|         ops = ebi('ops'), | ||||
|         raving = false, | ||||
|         recalc = 0, | ||||
|         cdown = 0, | ||||
|         FC = 0.9, | ||||
|         css = `<style> | ||||
|  | ||||
| #fft { | ||||
|     position: fixed; | ||||
|     top: 0; | ||||
|     left: 0; | ||||
|     z-index: -1; | ||||
| } | ||||
| body { | ||||
|     box-shadow: inset 0 0 0 white; | ||||
| } | ||||
| #ops>a, | ||||
| #path>a { | ||||
|     display: inline-block; | ||||
| } | ||||
| /* | ||||
| body.untz { | ||||
|     animation: untz-body 200ms ease-out; | ||||
| } | ||||
| @keyframes untz-body { | ||||
| 	0% {inset 0 0 20em white} | ||||
| 	100% {inset 0 0 0 white} | ||||
| } | ||||
| */ | ||||
| :root, html.a, html.b, html.c, html.d, html.e { | ||||
|     --row-alt: rgba(48,52,78,0.2); | ||||
| } | ||||
| #files td { | ||||
|     background: none; | ||||
| } | ||||
|  | ||||
| </style>`; | ||||
|  | ||||
|     QS('body').appendChild(mknod('div', null, css)); | ||||
|  | ||||
|     function rave_load() { | ||||
|         console.log('rave_load'); | ||||
|         can = mknod('canvas', 'fft'); | ||||
|         QS('body').appendChild(can); | ||||
|         ctx = can.getContext('2d'); | ||||
|  | ||||
|         fft = new AnalyserNode(actx, { | ||||
|             "fftSize": 2048, | ||||
|             "maxDecibels": 0, | ||||
|             "smoothingTimeConstant": 0.7, | ||||
|         }); | ||||
|         ibeat = 0; | ||||
|         beats = [9001]; | ||||
|         buf = new Uint8Array(fft.frequencyBinCount); | ||||
|         bars = buf.length * FC; | ||||
|         afilt.filters.push(fft); | ||||
|         if (!raving) { | ||||
|             raving = true; | ||||
|             raver(); | ||||
|         } | ||||
|         beats_url = mp.au.src.split('?')[0].replace(/(.*\/)(.*)/, '$1.beats/$2.txt'); | ||||
|         console.log("reading beats from", beats_url); | ||||
|         var xhr = new XHR(); | ||||
|         xhr.open('GET', beats_url, true); | ||||
|         xhr.onload = readbeats; | ||||
|         xhr.url = beats_url; | ||||
|         xhr.send(); | ||||
|     } | ||||
|  | ||||
|     function rave_unload() { | ||||
|         qsr('#fft'); | ||||
|         can = null; | ||||
|     } | ||||
|  | ||||
|     function readbeats() { | ||||
|         if (this.url != beats_url) | ||||
|             return console.log('old beats??', this.url, beats_url); | ||||
|  | ||||
|         var sbeats = this.responseText.replace(/\r/g, '').split(/\n/g); | ||||
|         if (sbeats.length < 3) | ||||
|             return; | ||||
|  | ||||
|         beats = []; | ||||
|         for (var a = 0; a < sbeats.length; a++) | ||||
|             beats.push(parseFloat(sbeats[a])); | ||||
|  | ||||
|         var end = beats.slice(-2), | ||||
|             t = end[1], | ||||
|             d = t - end[0]; | ||||
|  | ||||
|         while (d > 0.1 && t < 1200) | ||||
|             beats.push(t += d); | ||||
|     } | ||||
|  | ||||
|     function hrand() { | ||||
|         return Math.random() - 0.5; | ||||
|     } | ||||
|  | ||||
|     function raver() { | ||||
|         if (!can) { | ||||
|             raving = false; | ||||
|             return; | ||||
|         } | ||||
|  | ||||
|         requestAnimationFrame(raver); | ||||
|         if (!mp || !mp.au || mp.au.paused) | ||||
|             return; | ||||
|  | ||||
|         if (--uofs >= 0) { | ||||
|             document.body.style.marginLeft = hrand() * uofs + 'px'; | ||||
|             ebi('tree').style.marginLeft = hrand() * uofs + 'px'; | ||||
|             for (var a of QSA('#ops>a, #path>a, #pctl>a')) | ||||
|                 a.style.transform = 'translate(' + hrand() * uofs * 1 + 'px, ' + hrand() * uofs * 0.7 + 'px) rotate(' + Math.random() * uofs * 0.7 + 'deg)' | ||||
|         } | ||||
|  | ||||
|         if (--recalc < 0) { | ||||
|             recalc = 60; | ||||
|             var tree = ebi('tree'), | ||||
|                 x = tree.style.display == 'none' ? 0 : tree.offsetWidth; | ||||
|  | ||||
|             //W = can.width = window.innerWidth - x; | ||||
|             //H = can.height = window.innerHeight; | ||||
|             //H = ebi('widget').offsetTop; | ||||
|             W = can.width = bars; | ||||
|             H = can.height = 512; | ||||
|             barw = 1; //parseInt(0.8 + W / bars); | ||||
|             can.style.left = x + 'px'; | ||||
|             can.style.width = (window.innerWidth - x) + 'px'; | ||||
|             can.style.height = ebi('widget').offsetTop + 'px'; | ||||
|         } | ||||
|  | ||||
|         //if (--cdown == 1) | ||||
|         //    clmod(ops, 'untz'); | ||||
|  | ||||
|         fft.getByteFrequencyData(buf); | ||||
|  | ||||
|         var imax = 0, vmax = 0; | ||||
|         for (var a = 10; a < 50; a++) | ||||
|             if (vmax < buf[a]) { | ||||
|                 vmax = buf[a]; | ||||
|                 imax = a; | ||||
|             } | ||||
|  | ||||
|         hue = hue * 0.93 + imax * 0.07; | ||||
|  | ||||
|         ctx.fillStyle = 'rgba(0,0,0,0)'; | ||||
|         ctx.fillRect(0, 0, W, H); | ||||
|         ctx.clearRect(0, 0, W, H); | ||||
|         ctx.fillStyle = 'hsla(' + (hue * 2.5) + ',100%,50%,0.7)'; | ||||
|  | ||||
|         var x = 0, mul = (H / 256) * 0.5; | ||||
|         for (var a = 0; a < buf.length * FC; a++) { | ||||
|             var v = buf[a] * mul * (1 + 0.69 * a / buf.length); | ||||
|             ctx.fillRect(x, H - v, barw, v); | ||||
|             x += barw; | ||||
|         } | ||||
|  | ||||
|         var t = mp.au.currentTime + 0.05; | ||||
|  | ||||
|         if (ibeat >= beats.length || beats[ibeat] > t) | ||||
|             return; | ||||
|  | ||||
|         while (ibeat < beats.length && beats[ibeat++] < t) | ||||
|             continue; | ||||
|  | ||||
|         return untz(); | ||||
|  | ||||
|         var cv = 0; | ||||
|         for (var a = 0; a < 128; a++) | ||||
|             cv += buf[a]; | ||||
|  | ||||
|         if (cv - pv > 1000) { | ||||
|             console.log(pv, cv, cv - pv); | ||||
|             if (cdown < 0) { | ||||
|                 clmod(ops, 'untz', 1); | ||||
|                 cdown = 20; | ||||
|             } | ||||
|         } | ||||
|         pv = cv; | ||||
|     } | ||||
|  | ||||
|     function untz() { | ||||
|         console.log('untz'); | ||||
|         uofs = 14; | ||||
|         document.body.animate([ | ||||
|             { boxShadow: 'inset 0 0 1em #f0c' }, | ||||
|             { boxShadow: 'inset 0 0 20em #f0c', offset: 0.2 }, | ||||
|             { boxShadow: 'inset 0 0 0 #f0c' }, | ||||
|         ], { duration: 200, iterations: 1 }); | ||||
|     } | ||||
|  | ||||
|     afilt.plugs.push({ | ||||
|         "en": true, | ||||
|         "load": rave_load, | ||||
|         "unload": rave_unload | ||||
|     }); | ||||
|  | ||||
| })(); | ||||
| @@ -14,6 +14,8 @@ function up2k_namefilter(good_files, nil_files, bad_files, hooks) { | ||||
|     a_up2k_namefilter(good_files, nil_files, bad_files, hooks).then(() => { }); | ||||
| } | ||||
|  | ||||
| // ebi('op_up2k').appendChild(mknod('input','unick')); | ||||
|  | ||||
| function bstrpos(buf, ptn) { | ||||
|     var ofs = 0, | ||||
|         ch0 = ptn[0], | ||||
| @@ -44,13 +46,19 @@ async function a_up2k_namefilter(good_files, nil_files, bad_files, hooks) { | ||||
|         md_only = [],  // `${id} ${fn}` where ID was only found in metadata | ||||
|         mofs = 0, | ||||
|         mnchk = 0, | ||||
|         mfile = ''; | ||||
|         mfile = '', | ||||
|         myid = localStorage.getItem('ytid_t0'); | ||||
|  | ||||
|     if (!myid) | ||||
|         localStorage.setItem('ytid_t0', myid = Date.now()); | ||||
|  | ||||
|     for (var a = 0; a < good_files.length; a++) { | ||||
|         var [fobj, name] = good_files[a], | ||||
|             cname = name,  // will clobber | ||||
|             sz = fobj.size, | ||||
|             ids = [], | ||||
|             fn_ids = [], | ||||
|             md_ids = [], | ||||
|             id_ok = false, | ||||
|             m; | ||||
|  | ||||
| @@ -71,7 +79,7 @@ async function a_up2k_namefilter(good_files, nil_files, bad_files, hooks) { | ||||
|  | ||||
|             cname = cname.replace(m[1], ''); | ||||
|             yt_ids.add(m[1]); | ||||
|             ids.push(m[1]); | ||||
|             fn_ids.unshift(m[1]); | ||||
|         } | ||||
|  | ||||
|         // look for IDs in video metadata, | ||||
| @@ -85,6 +93,8 @@ async function a_up2k_namefilter(good_files, nil_files, bad_files, hooks) { | ||||
|                 aspan = id_ok ? 128 : 512;  // MiB | ||||
|  | ||||
|             aspan = parseInt(Math.min(sz / 2, aspan * 1024 * 1024) / chunksz) * chunksz; | ||||
|             if (!aspan) | ||||
|                 aspan = Math.min(sz, chunksz); | ||||
|  | ||||
|             for (var side = 0; side < 2; side++) { | ||||
|                 var ofs = side ? Math.max(0, sz - aspan) : 0, | ||||
| @@ -110,10 +120,13 @@ async function a_up2k_namefilter(good_files, nil_files, bad_files, hooks) { | ||||
|  | ||||
|                         console.log(`found ${m} @${bofs}, ${name} `); | ||||
|                         yt_ids.add(m); | ||||
|                         if (!has(ids, m)) { | ||||
|                             ids.push(m); | ||||
|                         if (!has(fn_ids, m) && !has(md_ids, m)) { | ||||
|                             md_ids.push(m); | ||||
|                             md_only.push(`${m} ${name}`); | ||||
|                         } | ||||
|                         else | ||||
|                             // id appears several times; make it preferred | ||||
|                             md_ids.unshift(m); | ||||
|  | ||||
|                         // bail after next iteration | ||||
|                         chunk = nchunks - 1; | ||||
| @@ -130,6 +143,13 @@ async function a_up2k_namefilter(good_files, nil_files, bad_files, hooks) { | ||||
|                 } | ||||
|             } | ||||
|         } | ||||
|  | ||||
|         for (var yi of md_ids) | ||||
|             ids.push(yi); | ||||
|  | ||||
|         for (var yi of fn_ids) | ||||
|             if (!has(ids, yi)) | ||||
|                 ids.push(yi); | ||||
|     } | ||||
|  | ||||
|     if (md_only.length) | ||||
| @@ -149,6 +169,16 @@ async function a_up2k_namefilter(good_files, nil_files, bad_files, hooks) { | ||||
|         return hooks[0]([], [], [], hooks.slice(1)); | ||||
|     } | ||||
|  | ||||
|     var el = ebi('unick'), unick = el ? el.value : ''; | ||||
|     if (unick) { | ||||
|         console.log(`sending uploader nickname [${unick}]`); | ||||
|         fetch(document.location, { | ||||
|             method: 'POST', | ||||
|             headers: { 'Content-Type': 'application/x-www-form-urlencoded;charset=UTF-8' }, | ||||
|             body: 'msg=' + encodeURIComponent(unick) | ||||
|         }); | ||||
|     } | ||||
|  | ||||
|     toast.inf(5, `running query for ${yt_ids.size} youtube-IDs...`); | ||||
|  | ||||
|     var xhr = new XHR(); | ||||
| @@ -164,18 +194,31 @@ async function a_up2k_namefilter(good_files, nil_files, bad_files, hooks) { | ||||
|  | ||||
|     function process_id_list(txt) { | ||||
|         var wanted_ids = new Set(txt.trim().split('\n')), | ||||
|             wanted_names = new Set(),  // basenames with a wanted ID | ||||
|             name_id = {}, | ||||
|             wanted_names = new Set(),  // basenames with a wanted ID -- not including relpath | ||||
|             wanted_names_scoped = {},  // basenames with a wanted ID -> list of dirs to search under | ||||
|             wanted_files = new Set();  // filedrops | ||||
|  | ||||
|         for (var a = 0; a < good_files.length; a++) { | ||||
|             var name = good_files[a][1]; | ||||
|             for (var b = 0; b < file_ids[a].length; b++) | ||||
|                 if (wanted_ids.has(file_ids[a][b])) { | ||||
|                     wanted_files.add(good_files[a]); | ||||
|                     // let the next stage handle this to prevent dupes | ||||
|                     //wanted_files.add(good_files[a]); | ||||
|  | ||||
|                     var m = /(.*)\.(mp4|webm|mkv|flv|opus|ogg|mp3|m4a|aac)$/i.exec(name); | ||||
|                     if (m) | ||||
|                         wanted_names.add(m[1]); | ||||
|                     if (!m) | ||||
|                         continue; | ||||
|  | ||||
|                     var [rd, fn] = vsplit(m[1]); | ||||
|  | ||||
|                     if (fn in wanted_names_scoped) | ||||
|                         wanted_names_scoped[fn].push(rd); | ||||
|                     else | ||||
|                         wanted_names_scoped[fn] = [rd]; | ||||
|  | ||||
|                     wanted_names.add(fn); | ||||
|                     name_id[m[1]] = file_ids[a][b]; | ||||
|  | ||||
|                     break; | ||||
|                 } | ||||
| @@ -184,13 +227,35 @@ async function a_up2k_namefilter(good_files, nil_files, bad_files, hooks) { | ||||
|         // add all files with the same basename as each explicitly wanted file | ||||
|         // (infojson/chatlog/etc when ID was discovered from metadata) | ||||
|         for (var a = 0; a < good_files.length; a++) { | ||||
|             var name = good_files[a][1]; | ||||
|             var [rd, name] = vsplit(good_files[a][1]); | ||||
|             for (var b = 0; b < 3; b++) { | ||||
|                 name = name.replace(/\.[^\.]+$/, ''); | ||||
|                 if (wanted_names.has(name)) { | ||||
|                     wanted_files.add(good_files[a]); | ||||
|                 if (!wanted_names.has(name)) | ||||
|                     continue; | ||||
|  | ||||
|                 var vid_fp = false; | ||||
|                 for (var c of wanted_names_scoped[name]) | ||||
|                     if (rd.startsWith(c)) | ||||
|                         vid_fp = c + name; | ||||
|  | ||||
|                 if (!vid_fp) | ||||
|                     continue; | ||||
|  | ||||
|                 var subdir = name_id[vid_fp]; | ||||
|                 subdir = `v${subdir.slice(0, 1)}/${subdir}-${myid}`; | ||||
|                 var newpath = subdir + '/' + good_files[a][1].split(/\//g).pop(); | ||||
|  | ||||
|                 // check if this file is a dupe | ||||
|                 for (var c of good_files) | ||||
|                     if (c[1] == newpath) | ||||
|                         newpath = null; | ||||
|  | ||||
|                 if (!newpath) | ||||
|                     break; | ||||
|                 } | ||||
|  | ||||
|                 good_files[a][1] = newpath; | ||||
|                 wanted_files.add(good_files[a]); | ||||
|                 break; | ||||
|             } | ||||
|         } | ||||
|  | ||||
| @@ -218,3 +283,15 @@ async function a_up2k_namefilter(good_files, nil_files, bad_files, hooks) { | ||||
| up2k_hooks.push(function () { | ||||
|     up2k.gotallfiles.unshift(up2k_namefilter); | ||||
| }); | ||||
|  | ||||
| // persist/restore nickname field if present | ||||
| setInterval(function () { | ||||
|     var o = ebi('unick'); | ||||
|     if (!o || document.activeElement == o) | ||||
|         return; | ||||
|  | ||||
|     o.oninput = function () { | ||||
|         localStorage.setItem('unick', o.value); | ||||
|     }; | ||||
|     o.value = localStorage.getItem('unick') || ''; | ||||
| }, 1000); | ||||
|   | ||||
| @@ -1,3 +1,6 @@ | ||||
| # NOTE: this is now a built-in feature in copyparty | ||||
| # but you may still want this if you have specific needs | ||||
| # | ||||
| # systemd service which generates a new TLS certificate on each boot, | ||||
| # that way the one-year expiry time won't cause any issues -- | ||||
| # just have everyone trust the ca.pem once every 10 years | ||||
|   | ||||
| @@ -2,12 +2,16 @@ | ||||
| # and share '/mnt' with anonymous read+write | ||||
| # | ||||
| # installation: | ||||
| #   cp -pv copyparty.service /etc/systemd/system | ||||
| #   restorecon -vr /etc/systemd/system/copyparty.service | ||||
| #   wget https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.py -O /usr/local/bin/copyparty-sfx.py | ||||
| #   cp -pv copyparty.service /etc/systemd/system/ | ||||
| #   restorecon -vr /etc/systemd/system/copyparty.service  # on fedora/rhel | ||||
| #   firewall-cmd --permanent --add-port={80,443,3923}/tcp  # --zone=libvirt | ||||
| #   firewall-cmd --reload | ||||
| #   systemctl daemon-reload && systemctl enable --now copyparty | ||||
| # | ||||
| # if it fails to start, first check this: systemctl status copyparty | ||||
| # then try starting it while viewing logs: journalctl -fan 100 | ||||
| # | ||||
| # you may want to: | ||||
| #   change "User=cpp" and "/home/cpp/" to another user | ||||
| #   remove the nft lines to only listen on port 3923 | ||||
| @@ -18,6 +22,7 @@ | ||||
| #   add '-i 127.0.0.1' to only allow local connections | ||||
| #   add '-e2dsa' to enable filesystem scanning + indexing | ||||
| #   add '-e2ts' to enable metadata indexing | ||||
| #   remove '--ansi' to disable colored logs | ||||
| # | ||||
| # with `Type=notify`, copyparty will signal systemd when it is ready to | ||||
| #   accept connections; correctly delaying units depending on copyparty. | ||||
| @@ -44,7 +49,7 @@ ExecReload=/bin/kill -s USR1 $MAINPID | ||||
| User=cpp | ||||
| Environment=XDG_CONFIG_HOME=/home/cpp/.config | ||||
|  | ||||
| # setup forwarding from ports 80 and 443 to port 3923 | ||||
| # OPTIONAL: setup forwarding from ports 80 and 443 to port 3923 | ||||
| ExecStartPre=+/bin/bash -c 'nft -n -a list table nat | awk "/ to :3923 /{print\$NF}" | xargs -rL1 nft delete rule nat prerouting handle; true' | ||||
| ExecStartPre=+nft add table ip nat | ||||
| ExecStartPre=+nft -- add chain ip nat prerouting { type nat hook prerouting priority -100 \; } | ||||
| @@ -55,7 +60,7 @@ ExecStartPre=+nft add rule ip nat prerouting tcp dport 443 redirect to :3923 | ||||
| ExecStartPre=+/bin/bash -c 'mkdir -p /run/tmpfiles.d/ && echo "x /tmp/pe-copyparty*" > /run/tmpfiles.d/copyparty.conf' | ||||
|  | ||||
| # copyparty settings | ||||
| ExecStart=/usr/bin/python3 /usr/local/bin/copyparty-sfx.py -e2d -v /mnt::rw | ||||
| ExecStart=/usr/bin/python3 /usr/local/bin/copyparty-sfx.py --ansi -e2d -v /mnt::rw | ||||
|  | ||||
| [Install] | ||||
| WantedBy=multi-user.target | ||||
|   | ||||
| @@ -6,12 +6,17 @@ | ||||
| #   1) put copyparty-sfx.py and prisonparty.sh in /usr/local/bin | ||||
| #   2) cp -pv prisonparty.service /etc/systemd/system && systemctl enable --now prisonparty | ||||
| # | ||||
| # expose additional filesystem locations to copyparty | ||||
| #   by listing them between the last `1000` and `--` | ||||
| # | ||||
| # `1000 1000` = what user to run copyparty as | ||||
| # | ||||
| # you may want to: | ||||
| #   change '/mnt::rw' to another location or permission-set | ||||
| #    (remember to change the '/mnt' chroot arg too) | ||||
| # | ||||
| # enable line-buffering for realtime logging (slight performance cost): | ||||
| #   inside the [Service] block, add the following line: | ||||
| # unless you add -q to disable logging, you may want to remove the | ||||
| #   following line to allow buffering (slightly better performance): | ||||
| #   Environment=PYTHONUNBUFFERED=x | ||||
|  | ||||
| [Unit] | ||||
| @@ -19,7 +24,14 @@ Description=copyparty file server | ||||
|  | ||||
| [Service] | ||||
| SyslogIdentifier=prisonparty | ||||
| WorkingDirectory=/usr/local/bin | ||||
| Environment=PYTHONUNBUFFERED=x | ||||
| WorkingDirectory=/var/lib/copyparty-jail | ||||
| ExecReload=/bin/kill -s USR1 $MAINPID | ||||
|  | ||||
| # stop systemd-tmpfiles-clean.timer from deleting copyparty while it's running | ||||
| ExecStartPre=+/bin/bash -c 'mkdir -p /run/tmpfiles.d/ && echo "x /tmp/pe-copyparty*" > /run/tmpfiles.d/copyparty.conf' | ||||
|  | ||||
| # run copyparty | ||||
| ExecStart=/bin/bash /usr/local/bin/prisonparty.sh /var/lib/copyparty-jail 1000 1000 /mnt -- \ | ||||
|   /usr/bin/python3 /usr/local/bin/copyparty-sfx.py -q -v /mnt::rw | ||||
|  | ||||
|   | ||||
							
								
								
									
										45
									
								
								contrib/webdav-cfg.bat
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										45
									
								
								contrib/webdav-cfg.bat
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,45 @@ | ||||
| @echo off | ||||
| rem removes the 47.6 MiB filesize limit when downloading from webdav | ||||
| rem + optionally allows/enables password-auth over plaintext http | ||||
| rem + optionally helps disable wpad, removing the 10sec latency | ||||
|  | ||||
| net session >nul 2>&1 | ||||
| if %errorlevel% neq 0 ( | ||||
|     echo sorry, you must run this as administrator | ||||
|     pause | ||||
|     exit /b | ||||
| ) | ||||
|  | ||||
| reg add HKEY_LOCAL_MACHINE\SYSTEM\CurrentControlSet\services\WebClient\Parameters /v FileSizeLimitInBytes /t REG_DWORD /d 0xffffffff /f | ||||
| reg add HKEY_LOCAL_MACHINE\SYSTEM\CurrentControlSet\Services\WebClient\Parameters /v FsCtlRequestTimeoutInSec /t REG_DWORD /d 0xffffffff /f | ||||
|  | ||||
| echo( | ||||
| echo OK; | ||||
| echo allow webdav basic-auth over plaintext http? | ||||
| echo Y: login works, but the password will be visible in wireshark etc | ||||
| echo N: login will NOT work unless you use https and valid certificates | ||||
| choice | ||||
| if %errorlevel% equ 1 ( | ||||
|     reg add HKEY_LOCAL_MACHINE\SYSTEM\CurrentControlSet\services\WebClient\Parameters /v BasicAuthLevel /t REG_DWORD /d 0x2 /f | ||||
|     rem default is 1 (require tls) | ||||
| ) | ||||
|  | ||||
| echo( | ||||
| echo OK; | ||||
| echo do you want to disable wpad? | ||||
| echo can give a HUGE speed boost depending on network settings | ||||
| choice | ||||
| if %errorlevel% equ 1 ( | ||||
|     echo( | ||||
|     echo i'm about to open the [Connections] tab in [Internet Properties] for you; | ||||
|     echo please click [LAN settings] and disable [Automatically detect settings] | ||||
|     echo( | ||||
|     pause | ||||
|     control inetcpl.cpl,,4 | ||||
| ) | ||||
|  | ||||
| net stop webclient | ||||
| net start webclient | ||||
| echo( | ||||
| echo OK; all done | ||||
| pause | ||||
| @@ -6,19 +6,24 @@ import platform | ||||
| import sys | ||||
| import time | ||||
|  | ||||
| try: | ||||
|     from collections.abc import Callable | ||||
| # fmt: off | ||||
| _:tuple[int,int]=(0,0)  # _____________________________________________________________________  hey there! if you are reading this, your python is too old to run copyparty without some help. Please use https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.py or the pypi package instead, or see https://github.com/9001/copyparty/blob/hovudstraum/docs/devnotes.md#building if you want to build it yourself :-)  ************************************************************************************************************************************************ | ||||
| # fmt: on | ||||
|  | ||||
|     from typing import TYPE_CHECKING, Any | ||||
| try: | ||||
|     from typing import TYPE_CHECKING | ||||
| except: | ||||
|     TYPE_CHECKING = False | ||||
|  | ||||
| PY2 = sys.version_info[0] == 2 | ||||
| if PY2: | ||||
| if True: | ||||
|     from typing import Any, Callable | ||||
|  | ||||
| PY2 = sys.version_info < (3,) | ||||
| if not PY2: | ||||
|     unicode: Callable[[Any], str] = str | ||||
| else: | ||||
|     sys.dont_write_bytecode = True | ||||
|     unicode = unicode  # noqa: F821  # pylint: disable=undefined-variable,self-assigning-variable | ||||
| else: | ||||
|     unicode = str | ||||
|  | ||||
| WINDOWS: Any = ( | ||||
|     [int(x) for x in platform.version().split(".")] | ||||
| @@ -26,64 +31,32 @@ WINDOWS: Any = ( | ||||
|     else False | ||||
| ) | ||||
|  | ||||
| VT100 = not WINDOWS or WINDOWS >= [10, 0, 14393] | ||||
| VT100 = "--ansi" in sys.argv or ( | ||||
|     os.environ.get("NO_COLOR", "").lower() in ("", "0", "false") | ||||
|     and sys.stdout.isatty() | ||||
|     and "--no-ansi" not in sys.argv | ||||
|     and (not WINDOWS or WINDOWS >= [10, 0, 14393]) | ||||
| ) | ||||
| # introduced in anniversary update | ||||
|  | ||||
| ANYWIN = WINDOWS or sys.platform in ["msys", "cygwin"] | ||||
|  | ||||
| MACOS = platform.system() == "Darwin" | ||||
|  | ||||
| EXE = bool(getattr(sys, "frozen", False)) | ||||
|  | ||||
| def get_unixdir() -> str: | ||||
|     paths: list[tuple[Callable[..., str], str]] = [ | ||||
|         (os.environ.get, "XDG_CONFIG_HOME"), | ||||
|         (os.path.expanduser, "~/.config"), | ||||
|         (os.environ.get, "TMPDIR"), | ||||
|         (os.environ.get, "TEMP"), | ||||
|         (os.environ.get, "TMP"), | ||||
|         (unicode, "/tmp"), | ||||
|     ] | ||||
|     for chk in [os.listdir, os.mkdir]: | ||||
|         for pf, pa in paths: | ||||
|             try: | ||||
|                 p = pf(pa) | ||||
|                 # print(chk.__name__, p, pa) | ||||
|                 if not p or p.startswith("~"): | ||||
|                     continue | ||||
|  | ||||
|                 p = os.path.normpath(p) | ||||
|                 chk(p)  # type: ignore | ||||
|                 p = os.path.join(p, "copyparty") | ||||
|                 if not os.path.isdir(p): | ||||
|                     os.mkdir(p) | ||||
|  | ||||
|                 return p | ||||
|             except: | ||||
|                 pass | ||||
|  | ||||
|     raise Exception("could not find a writable path for config") | ||||
| try: | ||||
|     CORES = len(os.sched_getaffinity(0)) | ||||
| except: | ||||
|     CORES = (os.cpu_count() if hasattr(os, "cpu_count") else 0) or 2 | ||||
|  | ||||
|  | ||||
| class EnvParams(object): | ||||
|     def __init__(self) -> None: | ||||
|         self.t0 = time.time() | ||||
|         self.mod = os.path.dirname(os.path.realpath(__file__)) | ||||
|         if self.mod.endswith("__init__"): | ||||
|             self.mod = os.path.dirname(self.mod) | ||||
|  | ||||
|         if sys.platform == "win32": | ||||
|             self.cfg = os.path.normpath(os.environ["APPDATA"] + "/copyparty") | ||||
|         elif sys.platform == "darwin": | ||||
|             self.cfg = os.path.expanduser("~/Library/Preferences/copyparty") | ||||
|         else: | ||||
|             self.cfg = get_unixdir() | ||||
|  | ||||
|         self.cfg = self.cfg.replace("\\", "/") | ||||
|         try: | ||||
|             os.makedirs(self.cfg) | ||||
|         except: | ||||
|             if not os.path.isdir(self.cfg): | ||||
|                 raise | ||||
|         self.mod = "" | ||||
|         self.cfg = "" | ||||
|         self.ox = getattr(sys, "oxidized", None) | ||||
|  | ||||
|  | ||||
| E = EnvParams() | ||||
|   | ||||
							
								
								
									
										849
									
								
								copyparty/__main__.py
									
									
									
									
									
										
										
										Normal file → Executable file
									
								
							
							
						
						
									
										849
									
								
								copyparty/__main__.py
									
									
									
									
									
										
										
										Normal file → Executable file
									
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							| @@ -1,8 +1,8 @@ | ||||
| # coding: utf-8 | ||||
|  | ||||
| VERSION = (1, 3, 13) | ||||
| CODENAME = "god dag" | ||||
| BUILD_DT = (2022, 8, 15) | ||||
| VERSION = (1, 7, 5) | ||||
| CODENAME = "unlinked" | ||||
| BUILD_DT = (2023, 6, 11) | ||||
|  | ||||
| S_VERSION = ".".join(map(str, VERSION)) | ||||
| S_BUILD_DT = "{0:04d}-{1:02d}-{2:02d}".format(*BUILD_DT) | ||||
|   | ||||
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							| @@ -4,14 +4,13 @@ from __future__ import print_function, unicode_literals | ||||
| import os | ||||
|  | ||||
| from ..util import SYMTIME, fsdec, fsenc | ||||
| from . import path | ||||
| from . import path as path | ||||
|  | ||||
| try: | ||||
|     from typing import Optional | ||||
| except: | ||||
|     pass | ||||
| if True:  # pylint: disable=using-constant-test | ||||
|     from typing import Any, Optional | ||||
|  | ||||
| _ = (path,) | ||||
| __all__ = ["path"] | ||||
|  | ||||
| # grep -hRiE '(^|[^a-zA-Z_\.-])os\.' . | gsed -r 's/ /\n/g;s/\(/(\n/g' | grep -hRiE '(^|[^a-zA-Z_\.-])os\.' | sort | uniq -c | ||||
| # printf 'os\.(%s)' "$(grep ^def bos/__init__.py | gsed -r 's/^def //;s/\(.*//' | tr '\n' '|' | gsed -r 's/.$//')" | ||||
| @@ -25,19 +24,25 @@ def listdir(p: str = ".") -> list[str]: | ||||
|     return [fsdec(x) for x in os.listdir(fsenc(p))] | ||||
|  | ||||
|  | ||||
| def makedirs(name: str, mode: int = 0o755, exist_ok: bool = True) -> None: | ||||
| def makedirs(name: str, mode: int = 0o755, exist_ok: bool = True) -> bool: | ||||
|     bname = fsenc(name) | ||||
|     try: | ||||
|         os.makedirs(bname, mode) | ||||
|         return True | ||||
|     except: | ||||
|         if not exist_ok or not os.path.isdir(bname): | ||||
|             raise | ||||
|         return False | ||||
|  | ||||
|  | ||||
| def mkdir(p: str, mode: int = 0o755) -> None: | ||||
|     return os.mkdir(fsenc(p), mode) | ||||
|  | ||||
|  | ||||
| def open(p: str, *a, **ka) -> int: | ||||
|     return os.open(fsenc(p), *a, **ka) | ||||
|  | ||||
|  | ||||
| def rename(src: str, dst: str) -> None: | ||||
|     return os.rename(fsenc(src), fsenc(dst)) | ||||
|  | ||||
|   | ||||
| @@ -3,21 +3,20 @@ from __future__ import print_function, unicode_literals | ||||
|  | ||||
| import threading | ||||
| import time | ||||
| import traceback | ||||
|  | ||||
| import queue | ||||
|  | ||||
| from .__init__ import TYPE_CHECKING | ||||
| from .__init__ import CORES, TYPE_CHECKING | ||||
| from .broker_mpw import MpWorker | ||||
| from .broker_util import try_exec | ||||
| from .util import mp | ||||
| from .util import Daemon, mp | ||||
|  | ||||
| if TYPE_CHECKING: | ||||
|     from .svchub import SvcHub | ||||
|  | ||||
| try: | ||||
| if True:  # pylint: disable=using-constant-test | ||||
|     from typing import Any | ||||
| except: | ||||
|     pass | ||||
|  | ||||
|  | ||||
| class MProcess(mp.Process): | ||||
| @@ -44,20 +43,14 @@ class BrokerMp(object): | ||||
|         self.procs = [] | ||||
|         self.mutex = threading.Lock() | ||||
|  | ||||
|         self.num_workers = self.args.j or mp.cpu_count() | ||||
|         self.num_workers = self.args.j or CORES | ||||
|         self.log("broker", "booting {} subprocesses".format(self.num_workers)) | ||||
|         for n in range(1, self.num_workers + 1): | ||||
|             q_pend: queue.Queue[tuple[int, str, list[Any]]] = mp.Queue(1) | ||||
|             q_yield: queue.Queue[tuple[int, str, list[Any]]] = mp.Queue(64) | ||||
|  | ||||
|             proc = MProcess(q_pend, q_yield, MpWorker, (q_pend, q_yield, self.args, n)) | ||||
|  | ||||
|             thr = threading.Thread( | ||||
|                 target=self.collector, args=(proc,), name="mp-sink-{}".format(n) | ||||
|             ) | ||||
|             thr.daemon = True | ||||
|             thr.start() | ||||
|  | ||||
|             Daemon(self.collector, "mp-sink-{}".format(n), (proc,)) | ||||
|             self.procs.append(proc) | ||||
|             proc.start() | ||||
|  | ||||
| @@ -101,12 +94,15 @@ class BrokerMp(object): | ||||
|  | ||||
|             else: | ||||
|                 # new ipc invoking managed service in hub | ||||
|                 obj = self.hub | ||||
|                 for node in dest.split("."): | ||||
|                     obj = getattr(obj, node) | ||||
|                 try: | ||||
|                     obj = self.hub | ||||
|                     for node in dest.split("."): | ||||
|                         obj = getattr(obj, node) | ||||
|  | ||||
|                 # TODO will deadlock if dest performs another ipc | ||||
|                 rv = try_exec(retq_id, obj, *args) | ||||
|                     # TODO will deadlock if dest performs another ipc | ||||
|                     rv = try_exec(retq_id, obj, *args) | ||||
|                 except: | ||||
|                     rv = ["exception", "stack", traceback.format_exc()] | ||||
|  | ||||
|                 if retq_id: | ||||
|                     proc.q_pend.put((retq_id, "retq", rv)) | ||||
| @@ -121,6 +117,10 @@ class BrokerMp(object): | ||||
|             for p in self.procs: | ||||
|                 p.q_pend.put((0, dest, [args[0], len(self.procs)])) | ||||
|  | ||||
|         elif dest == "set_netdevs": | ||||
|             for p in self.procs: | ||||
|                 p.q_pend.put((0, dest, list(args))) | ||||
|  | ||||
|         elif dest == "cb_httpsrv_up": | ||||
|             self.hub.cb_httpsrv_up() | ||||
|  | ||||
|   | ||||
| @@ -2,23 +2,23 @@ | ||||
| from __future__ import print_function, unicode_literals | ||||
|  | ||||
| import argparse | ||||
| import os | ||||
| import signal | ||||
| import sys | ||||
| import threading | ||||
|  | ||||
| import queue | ||||
|  | ||||
| from .__init__ import ANYWIN | ||||
| from .authsrv import AuthSrv | ||||
| from .broker_util import BrokerCli, ExceptionalQueue | ||||
| from .httpsrv import HttpSrv | ||||
| from .util import FAKE_MP | ||||
| from .util import FAKE_MP, Daemon, HMaccas | ||||
|  | ||||
| try: | ||||
| if True:  # pylint: disable=using-constant-test | ||||
|     from types import FrameType | ||||
|  | ||||
|     from typing import Any, Optional, Union | ||||
| except: | ||||
|     pass | ||||
|  | ||||
|  | ||||
| class MpWorker(BrokerCli): | ||||
| @@ -47,21 +47,23 @@ class MpWorker(BrokerCli): | ||||
|         # we inherited signal_handler from parent, | ||||
|         # replace it with something harmless | ||||
|         if not FAKE_MP: | ||||
|             for sig in [signal.SIGINT, signal.SIGTERM, signal.SIGUSR1]: | ||||
|             sigs = [signal.SIGINT, signal.SIGTERM] | ||||
|             if not ANYWIN: | ||||
|                 sigs.append(signal.SIGUSR1) | ||||
|  | ||||
|             for sig in sigs: | ||||
|                 signal.signal(sig, self.signal_handler) | ||||
|  | ||||
|         # starting to look like a good idea | ||||
|         self.asrv = AuthSrv(args, None, False) | ||||
|  | ||||
|         # instantiate all services here (TODO: inheritance?) | ||||
|         self.iphash = HMaccas(os.path.join(self.args.E.cfg, "iphash"), 8) | ||||
|         self.httpsrv = HttpSrv(self, n) | ||||
|  | ||||
|         # on winxp and some other platforms, | ||||
|         # use thr.join() to block all signals | ||||
|         thr = threading.Thread(target=self.main, name="mpw-main") | ||||
|         thr.daemon = True | ||||
|         thr.start() | ||||
|         thr.join() | ||||
|         Daemon(self.main, "mpw-main").join() | ||||
|  | ||||
|     def signal_handler(self, sig: Optional[int], frame: Optional[FrameType]) -> None: | ||||
|         # print('k') | ||||
| @@ -95,6 +97,9 @@ class MpWorker(BrokerCli): | ||||
|             elif dest == "listen": | ||||
|                 self.httpsrv.listen(args[0], args[1]) | ||||
|  | ||||
|             elif dest == "set_netdevs": | ||||
|                 self.httpsrv.set_netdevs(args[0]) | ||||
|  | ||||
|             elif dest == "retq": | ||||
|                 # response from previous ipc call | ||||
|                 with self.retpend_mutex: | ||||
|   | ||||
| @@ -1,19 +1,19 @@ | ||||
| # coding: utf-8 | ||||
| from __future__ import print_function, unicode_literals | ||||
|  | ||||
| import os | ||||
| import threading | ||||
|  | ||||
| from .__init__ import TYPE_CHECKING | ||||
| from .broker_util import BrokerCli, ExceptionalQueue, try_exec | ||||
| from .httpsrv import HttpSrv | ||||
| from .util import HMaccas | ||||
|  | ||||
| if TYPE_CHECKING: | ||||
|     from .svchub import SvcHub | ||||
|  | ||||
| try: | ||||
| if True:  # pylint: disable=using-constant-test | ||||
|     from typing import Any | ||||
| except: | ||||
|     pass | ||||
|  | ||||
|  | ||||
| class BrokerThr(BrokerCli): | ||||
| @@ -31,6 +31,7 @@ class BrokerThr(BrokerCli): | ||||
|         self.num_workers = 1 | ||||
|  | ||||
|         # instantiate all services here (TODO: inheritance?) | ||||
|         self.iphash = HMaccas(os.path.join(self.args.E.cfg, "iphash"), 8) | ||||
|         self.httpsrv = HttpSrv(self, None) | ||||
|         self.reload = self.noop | ||||
|  | ||||
| @@ -60,6 +61,10 @@ class BrokerThr(BrokerCli): | ||||
|             self.httpsrv.listen(args[0], 1) | ||||
|             return | ||||
|  | ||||
|         if dest == "set_netdevs": | ||||
|             self.httpsrv.set_netdevs(args[0]) | ||||
|             return | ||||
|  | ||||
|         # new ipc invoking managed service in hub | ||||
|         obj = self.hub | ||||
|         for node in dest.split("."): | ||||
|   | ||||
| @@ -8,14 +8,12 @@ from queue import Queue | ||||
|  | ||||
| from .__init__ import TYPE_CHECKING | ||||
| from .authsrv import AuthSrv | ||||
| from .util import Pebkac | ||||
| from .util import HMaccas, Pebkac | ||||
|  | ||||
| try: | ||||
| if True:  # pylint: disable=using-constant-test | ||||
|     from typing import Any, Optional, Union | ||||
|  | ||||
|     from .util import RootLogger | ||||
| except: | ||||
|     pass | ||||
|  | ||||
| if TYPE_CHECKING: | ||||
|     from .httpsrv import HttpSrv | ||||
| @@ -41,11 +39,14 @@ class BrokerCli(object): | ||||
|     for example resolving httpconn.* in httpcli -- see lines tagged #mypy404 | ||||
|     """ | ||||
|  | ||||
|     log: "RootLogger" | ||||
|     args: argparse.Namespace | ||||
|     asrv: AuthSrv | ||||
|     httpsrv: "HttpSrv" | ||||
|     iphash: HMaccas | ||||
|  | ||||
|     def __init__(self) -> None: | ||||
|         self.log: "RootLogger" = None | ||||
|         self.args: argparse.Namespace = None | ||||
|         self.asrv: AuthSrv = None | ||||
|         self.httpsrv: "HttpSrv" = None | ||||
|         pass | ||||
|  | ||||
|     def ask(self, dest: str, *args: Any) -> ExceptionalQueue: | ||||
|         return ExceptionalQueue(1) | ||||
|   | ||||
							
								
								
									
										217
									
								
								copyparty/cert.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										217
									
								
								copyparty/cert.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,217 @@ | ||||
| import os | ||||
| import errno | ||||
| import time | ||||
| import json | ||||
| import shutil | ||||
| import filecmp | ||||
| import calendar | ||||
|  | ||||
| from .util import runcmd, Netdev | ||||
|  | ||||
|  | ||||
| HAVE_CFSSL = True | ||||
|  | ||||
|  | ||||
| def ensure_cert(log: "RootLogger", args) -> None: | ||||
|     """ | ||||
|     the default cert (and the entire TLS support) is only here to enable the | ||||
|     crypto.subtle javascript API, which is necessary due to the webkit guys | ||||
|     being massive memers (https://www.chromium.org/blink/webcrypto) | ||||
|  | ||||
|     i feel awful about this and so should they | ||||
|     """ | ||||
|     cert_insec = os.path.join(args.E.mod, "res/insecure.pem") | ||||
|     cert_appdata = os.path.join(args.E.cfg, "cert.pem") | ||||
|     if not os.path.isfile(args.cert): | ||||
|         if cert_appdata != args.cert: | ||||
|             raise Exception("certificate file does not exist: " + args.cert) | ||||
|  | ||||
|         shutil.copy(cert_insec, args.cert) | ||||
|  | ||||
|     with open(args.cert, "rb") as f: | ||||
|         buf = f.read() | ||||
|         o1 = buf.find(b" PRIVATE KEY-") | ||||
|         o2 = buf.find(b" CERTIFICATE-") | ||||
|         m = "unsupported certificate format: " | ||||
|         if o1 < 0: | ||||
|             raise Exception(m + "no private key inside pem") | ||||
|         if o2 < 0: | ||||
|             raise Exception(m + "no server certificate inside pem") | ||||
|         if o1 > o2: | ||||
|             raise Exception(m + "private key must appear before server certificate") | ||||
|  | ||||
|     try: | ||||
|         if filecmp.cmp(args.cert, cert_insec): | ||||
|             t = "using default TLS certificate; https will be insecure:\033[36m {}" | ||||
|             log("cert", t.format(args.cert), 3) | ||||
|     except: | ||||
|         pass | ||||
|  | ||||
|     # speaking of the default cert, | ||||
|     # printf 'NO\n.\n.\n.\n.\ncopyparty-insecure\n.\n' | faketime '2000-01-01 00:00:00' openssl req -x509 -sha256 -newkey rsa:2048 -keyout insecure.pem -out insecure.pem -days $((($(printf %d 0x7fffffff)-$(date +%s --date=2000-01-01T00:00:00Z))/(60*60*24))) -nodes && ls -al insecure.pem && openssl x509 -in insecure.pem -text -noout | ||||
|  | ||||
|  | ||||
| def _read_crt(args, fn): | ||||
|     try: | ||||
|         if not os.path.exists(os.path.join(args.crt_dir, fn)): | ||||
|             return 0, {} | ||||
|  | ||||
|         acmd = ["cfssl-certinfo", "-cert", fn] | ||||
|         rc, so, se = runcmd(acmd, cwd=args.crt_dir) | ||||
|         if rc: | ||||
|             return 0, {} | ||||
|  | ||||
|         inf = json.loads(so) | ||||
|         zs = inf["not_after"] | ||||
|         expiry = calendar.timegm(time.strptime(zs, "%Y-%m-%dT%H:%M:%SZ")) | ||||
|         return expiry, inf | ||||
|     except OSError as ex: | ||||
|         if ex.errno == errno.ENOENT: | ||||
|             raise | ||||
|         return 0, {} | ||||
|     except: | ||||
|         return 0, {} | ||||
|  | ||||
|  | ||||
| def _gen_ca(log: "RootLogger", args): | ||||
|     expiry = _read_crt(args, "ca.pem")[0] | ||||
|     if time.time() + args.crt_cdays * 60 * 60 * 24 * 0.1 < expiry: | ||||
|         return | ||||
|  | ||||
|     backdate = "{}m".format(int(args.crt_back * 60)) | ||||
|     expiry = "{}m".format(int(args.crt_cdays * 60 * 24)) | ||||
|     cn = args.crt_cnc.replace("--crt-cn", args.crt_cn) | ||||
|     algo, ksz = args.crt_alg.split("-") | ||||
|     req = { | ||||
|         "CN": cn, | ||||
|         "CA": {"backdate": backdate, "expiry": expiry, "pathlen": 0}, | ||||
|         "key": {"algo": algo, "size": int(ksz)}, | ||||
|         "names": [{"O": cn}], | ||||
|     } | ||||
|     sin = json.dumps(req).encode("utf-8") | ||||
|     log("cert", "creating new ca ...", 6) | ||||
|  | ||||
|     cmd = "cfssl gencert -initca -" | ||||
|     rc, so, se = runcmd(cmd.split(), 30, sin=sin) | ||||
|     if rc: | ||||
|         raise Exception("failed to create ca-cert: {}, {}".format(rc, se), 3) | ||||
|  | ||||
|     cmd = "cfssljson -bare ca" | ||||
|     sin = so.encode("utf-8") | ||||
|     rc, so, se = runcmd(cmd.split(), 10, sin=sin, cwd=args.crt_dir) | ||||
|     if rc: | ||||
|         raise Exception("failed to translate ca-cert: {}, {}".format(rc, se), 3) | ||||
|  | ||||
|     bname = os.path.join(args.crt_dir, "ca") | ||||
|     os.rename(bname + "-key.pem", bname + ".key") | ||||
|     os.unlink(bname + ".csr") | ||||
|  | ||||
|     log("cert", "new ca OK", 2) | ||||
|  | ||||
|  | ||||
| def _gen_srv(log: "RootLogger", args, netdevs: dict[str, Netdev]): | ||||
|     names = args.crt_ns.split(",") if args.crt_ns else [] | ||||
|     if not args.crt_exact: | ||||
|         for n in names[:]: | ||||
|             names.append("*.{}".format(n)) | ||||
|     if not args.crt_noip: | ||||
|         for ip in netdevs.keys(): | ||||
|             names.append(ip.split("/")[0]) | ||||
|     if args.crt_nolo: | ||||
|         names = [x for x in names if x not in ("localhost", "127.0.0.1", "::1")] | ||||
|     if not names: | ||||
|         names = ["127.0.0.1"] | ||||
|     if "127.0.0.1" in names or "::1" in names: | ||||
|         names.append("localhost") | ||||
|     names = list({x: 1 for x in names}.keys()) | ||||
|  | ||||
|     try: | ||||
|         expiry, inf = _read_crt(args, "srv.pem") | ||||
|         expired = time.time() + args.crt_sdays * 60 * 60 * 24 * 0.1 > expiry | ||||
|         cert_insec = os.path.join(args.E.mod, "res/insecure.pem") | ||||
|         for n in names: | ||||
|             if n not in inf["sans"]: | ||||
|                 raise Exception("does not have {}".format(n)) | ||||
|         if expired: | ||||
|             raise Exception("old server-cert has expired") | ||||
|         if not filecmp.cmp(args.cert, cert_insec): | ||||
|             return | ||||
|     except Exception as ex: | ||||
|         log("cert", "will create new server-cert; {}".format(ex)) | ||||
|  | ||||
|     log("cert", "creating server-cert ...", 6) | ||||
|  | ||||
|     backdate = "{}m".format(int(args.crt_back * 60)) | ||||
|     expiry = "{}m".format(int(args.crt_sdays * 60 * 24)) | ||||
|     cfg = { | ||||
|         "signing": { | ||||
|             "default": { | ||||
|                 "backdate": backdate, | ||||
|                 "expiry": expiry, | ||||
|                 "usages": ["signing", "key encipherment", "server auth"], | ||||
|             } | ||||
|         } | ||||
|     } | ||||
|     with open(os.path.join(args.crt_dir, "cfssl.json"), "wb") as f: | ||||
|         f.write(json.dumps(cfg).encode("utf-8")) | ||||
|  | ||||
|     cn = args.crt_cns.replace("--crt-cn", args.crt_cn) | ||||
|     algo, ksz = args.crt_alg.split("-") | ||||
|     req = { | ||||
|         "key": {"algo": algo, "size": int(ksz)}, | ||||
|         "names": [{"O": cn}], | ||||
|     } | ||||
|     sin = json.dumps(req).encode("utf-8") | ||||
|  | ||||
|     cmd = "cfssl gencert -config=cfssl.json -ca ca.pem -ca-key ca.key -profile=www" | ||||
|     acmd = cmd.split() + ["-hostname=" + ",".join(names), "-"] | ||||
|     rc, so, se = runcmd(acmd, 30, sin=sin, cwd=args.crt_dir) | ||||
|     if rc: | ||||
|         raise Exception("failed to create cert: {}, {}".format(rc, se)) | ||||
|  | ||||
|     cmd = "cfssljson -bare srv" | ||||
|     sin = so.encode("utf-8") | ||||
|     rc, so, se = runcmd(cmd.split(), 10, sin=sin, cwd=args.crt_dir) | ||||
|     if rc: | ||||
|         raise Exception("failed to translate cert: {}, {}".format(rc, se)) | ||||
|  | ||||
|     bname = os.path.join(args.crt_dir, "srv") | ||||
|     os.rename(bname + "-key.pem", bname + ".key") | ||||
|     os.unlink(bname + ".csr") | ||||
|  | ||||
|     with open(os.path.join(args.crt_dir, "ca.pem"), "rb") as f: | ||||
|         ca = f.read() | ||||
|  | ||||
|     with open(bname + ".key", "rb") as f: | ||||
|         skey = f.read() | ||||
|  | ||||
|     with open(bname + ".pem", "rb") as f: | ||||
|         scrt = f.read() | ||||
|  | ||||
|     with open(args.cert, "wb") as f: | ||||
|         f.write(skey + scrt + ca) | ||||
|  | ||||
|     log("cert", "new server-cert OK", 2) | ||||
|  | ||||
|  | ||||
| def gencert(log: "RootLogger", args, netdevs: dict[str, Netdev]): | ||||
|     global HAVE_CFSSL | ||||
|  | ||||
|     if args.http_only: | ||||
|         return | ||||
|  | ||||
|     if args.no_crt or not HAVE_CFSSL: | ||||
|         ensure_cert(log, args) | ||||
|         return | ||||
|  | ||||
|     try: | ||||
|         _gen_ca(log, args) | ||||
|         _gen_srv(log, args, netdevs) | ||||
|     except Exception as ex: | ||||
|         HAVE_CFSSL = False | ||||
|         log("cert", "could not create TLS certificates: {}".format(ex), 3) | ||||
|         if getattr(ex, "errno", 0) == errno.ENOENT: | ||||
|             t = "install cfssl if you want to fix this; https://github.com/cloudflare/cfssl/releases/latest" | ||||
|             log("cert", t, 6) | ||||
|  | ||||
|         ensure_cert(log, args) | ||||
							
								
								
									
										157
									
								
								copyparty/cfg.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										157
									
								
								copyparty/cfg.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,157 @@ | ||||
| # coding: utf-8 | ||||
| from __future__ import print_function, unicode_literals | ||||
|  | ||||
| # awk -F\" '/add_argument\("-[^-]/{print(substr($2,2))}' copyparty/__main__.py | sort | tr '\n' ' ' | ||||
| zs = "a c e2d e2ds e2dsa e2t e2ts e2tsr e2v e2vp e2vu ed emp i j lo mcr mte mth mtm mtp nb nc nid nih nw p q s ss sss v z zv" | ||||
| onedash = set(zs.split()) | ||||
|  | ||||
|  | ||||
| def vf_bmap() -> dict[str, str]: | ||||
|     """argv-to-volflag: simple bools""" | ||||
|     ret = { | ||||
|         "never_symlink": "neversymlink", | ||||
|         "no_dedup": "copydupes", | ||||
|         "no_dupe": "nodupe", | ||||
|         "no_forget": "noforget", | ||||
|         "dav_auth": "davauth", | ||||
|         "dav_rt": "davrt", | ||||
|     } | ||||
|     for k in ( | ||||
|         "dotsrch", | ||||
|         "e2t", | ||||
|         "e2ts", | ||||
|         "e2tsr", | ||||
|         "e2v", | ||||
|         "e2vu", | ||||
|         "e2vp", | ||||
|         "grid", | ||||
|         "hardlink", | ||||
|         "magic", | ||||
|         "no_sb_md", | ||||
|         "no_sb_lg", | ||||
|         "rand", | ||||
|         "xdev", | ||||
|         "xlink", | ||||
|         "xvol", | ||||
|     ): | ||||
|         ret[k] = k | ||||
|     return ret | ||||
|  | ||||
|  | ||||
| def vf_vmap() -> dict[str, str]: | ||||
|     """argv-to-volflag: simple values""" | ||||
|     ret = {} | ||||
|     for k in ("lg_sbf", "md_sbf", "unlist"): | ||||
|         ret[k] = k | ||||
|     return ret | ||||
|  | ||||
|  | ||||
| def vf_cmap() -> dict[str, str]: | ||||
|     """argv-to-volflag: complex/lists""" | ||||
|     ret = {} | ||||
|     for k in ("dbd", "html_head", "mte", "mth", "nrand"): | ||||
|         ret[k] = k | ||||
|     return ret | ||||
|  | ||||
|  | ||||
| permdescs = { | ||||
|     "r": "read; list folder contents, download files", | ||||
|     "w": 'write; upload files; need "r" to see the uploads', | ||||
|     "m": 'move; move files and folders; need "w" at destination', | ||||
|     "d": "delete; permanently delete files and folders", | ||||
|     "g": "get; download files, but cannot see folder contents", | ||||
|     "G": 'upget; same as "g" but can see filekeys of their own uploads', | ||||
| } | ||||
|  | ||||
|  | ||||
| flagcats = { | ||||
|     "uploads, general": { | ||||
|         "nodupe": "rejects existing files (instead of symlinking them)", | ||||
|         "hardlink": "does dedup with hardlinks instead of symlinks", | ||||
|         "neversymlink": "disables symlink fallback; full copy instead", | ||||
|         "copydupes": "disables dedup, always saves full copies of dupes", | ||||
|         "daw": "enable full WebDAV write support (dangerous);\nPUT-operations will now \033[1;31mOVERWRITE\033[0;35m existing files", | ||||
|         "nosub": "forces all uploads into the top folder of the vfs", | ||||
|         "magic": "enables filetype detection for nameless uploads", | ||||
|         "gz": "allows server-side gzip of uploads with ?gz (also c,xz)", | ||||
|         "pk": "forces server-side compression, optional arg: xz,9", | ||||
|     }, | ||||
|     "upload rules": { | ||||
|         "maxn=250,600": "max 250 uploads over 15min", | ||||
|         "maxb=1g,300": "max 1 GiB over 5min (suffixes: b, k, m, g)", | ||||
|         "rand": "force randomized filenames, 9 chars long by default", | ||||
|         "nrand=N": "randomized filenames are N chars long", | ||||
|         "sz=1k-3m": "allow filesizes between 1 KiB and 3MiB", | ||||
|         "df=1g": "ensure 1 GiB free disk space", | ||||
|     }, | ||||
|     "upload rotation\n(moves all uploads into the specified folder structure)": { | ||||
|         "rotn=100,3": "3 levels of subfolders with 100 entries in each", | ||||
|         "rotf=%Y-%m/%d-%H": "date-formatted organizing", | ||||
|         "lifetime=3600": "uploads are deleted after 1 hour", | ||||
|     }, | ||||
|     "database, general": { | ||||
|         "e2d": "enable database; makes files searchable + enables upload dedup", | ||||
|         "e2ds": "scan writable folders for new files on startup; also sets -e2d", | ||||
|         "e2dsa": "scans all folders for new files on startup; also sets -e2d", | ||||
|         "e2t": "enable multimedia indexing; makes it possible to search for tags", | ||||
|         "e2ts": "scan existing files for tags on startup; also sets -e2t", | ||||
|         "e2tsa": "delete all metadata from DB (full rescan); also sets -e2ts", | ||||
|         "d2ts": "disables metadata collection for existing files", | ||||
|         "d2ds": "disables onboot indexing, overrides -e2ds*", | ||||
|         "d2t": "disables metadata collection, overrides -e2t*", | ||||
|         "d2v": "disables file verification, overrides -e2v*", | ||||
|         "d2d": "disables all database stuff, overrides -e2*", | ||||
|         "hist=/tmp/cdb": "puts thumbnails and indexes at that location", | ||||
|         "scan=60": "scan for new files every 60sec, same as --re-maxage", | ||||
|         "nohash=\\.iso$": "skips hashing file contents if path matches *.iso", | ||||
|         "noidx=\\.iso$": "fully ignores the contents at paths matching *.iso", | ||||
|         "noforget": "don't forget files when deleted from disk", | ||||
|         "dbd=[acid|swal|wal|yolo]": "database speed-durability tradeoff", | ||||
|         "xlink": "cross-volume dupe detection / linking", | ||||
|         "xdev": "do not descend into other filesystems", | ||||
|         "xvol": "do not follow symlinks leaving the volume root", | ||||
|         "dotsrch": "show dotfiles in search results", | ||||
|         "nodotsrch": "hide dotfiles in search results (default)", | ||||
|     }, | ||||
|     'database, audio tags\n"mte", "mth", "mtp", "mtm" all work the same as -mte, -mth, ...': { | ||||
|         "mtp=.bpm=f,audio-bpm.py": 'uses the "audio-bpm.py" program to\ngenerate ".bpm" tags from uploads (f = overwrite tags)', | ||||
|         "mtp=ahash,vhash=media-hash.py": "collects two tags at once", | ||||
|     }, | ||||
|     "thumbnails": { | ||||
|         "dthumb": "disables all thumbnails", | ||||
|         "dvthumb": "disables video thumbnails", | ||||
|         "dathumb": "disables audio thumbnails (spectrograms)", | ||||
|         "dithumb": "disables image thumbnails", | ||||
|     }, | ||||
|     "event hooks\n(better explained in --help-hooks)": { | ||||
|         "xbu=CMD": "execute CMD before a file upload starts", | ||||
|         "xau=CMD": "execute CMD after  a file upload finishes", | ||||
|         "xiu=CMD": "execute CMD after  all uploads finish and volume is idle", | ||||
|         "xbr=CMD": "execute CMD before a file rename/move", | ||||
|         "xar=CMD": "execute CMD after  a file rename/move", | ||||
|         "xbd=CMD": "execute CMD before a file delete", | ||||
|         "xad=CMD": "execute CMD after  a file delete", | ||||
|         "xm=CMD": "execute CMD on message", | ||||
|     }, | ||||
|     "client and ux": { | ||||
|         "grid": "show grid/thumbnails by default", | ||||
|         "unlist": "dont list files matching REGEX", | ||||
|         "html_head=TXT": "includes TXT in the <head>", | ||||
|         "robots": "allows indexing by search engines (default)", | ||||
|         "norobots": "kindly asks search engines to leave", | ||||
|         "no_sb_md": "disable js sandbox for markdown files", | ||||
|         "no_sb_lg": "disable js sandbox for prologue/epilogue", | ||||
|         "sb_md": "enable js sandbox for markdown files (default)", | ||||
|         "sb_lg": "enable js sandbox for prologue/epilogue (default)", | ||||
|         "md_sbf": "list of markdown-sandbox safeguards to disable", | ||||
|         "lg_sbf": "list of *logue-sandbox safeguards to disable", | ||||
|     }, | ||||
|     "others": { | ||||
|         "fk=8": 'generates per-file accesskeys,\nwhich will then be required at the "g" permission', | ||||
|         "davauth": "ask webdav clients to login for all folders", | ||||
|         "davrt": "show lastmod time of symlink destination, not the link itself\n(note: this option is always enabled for recursive listings)", | ||||
|     }, | ||||
| } | ||||
|  | ||||
|  | ||||
| flagdescs = {k.split("=")[0]: v for tab in flagcats.values() for k, v in tab.items()} | ||||
							
								
								
									
										72
									
								
								copyparty/dxml.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										72
									
								
								copyparty/dxml.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,72 @@ | ||||
| import importlib | ||||
| import sys | ||||
| import xml.etree.ElementTree as ET | ||||
|  | ||||
| from .__init__ import PY2 | ||||
|  | ||||
| if True:  # pylint: disable=using-constant-test | ||||
|     from typing import Any, Optional | ||||
|  | ||||
|  | ||||
| def get_ET() -> ET.XMLParser: | ||||
|     pn = "xml.etree.ElementTree" | ||||
|     cn = "_elementtree" | ||||
|  | ||||
|     cmod = sys.modules.pop(cn, None) | ||||
|     if not cmod: | ||||
|         return ET.XMLParser  # type: ignore | ||||
|  | ||||
|     pmod = sys.modules.pop(pn) | ||||
|     sys.modules[cn] = None  # type: ignore | ||||
|  | ||||
|     ret = importlib.import_module(pn) | ||||
|     for name, mod in ((pn, pmod), (cn, cmod)): | ||||
|         if mod: | ||||
|             sys.modules[name] = mod | ||||
|         else: | ||||
|             sys.modules.pop(name, None) | ||||
|  | ||||
|     sys.modules["xml.etree"].ElementTree = pmod  # type: ignore | ||||
|     ret.ParseError = ET.ParseError  # type: ignore | ||||
|     return ret.XMLParser  # type: ignore | ||||
|  | ||||
|  | ||||
| XMLParser: ET.XMLParser = get_ET() | ||||
|  | ||||
|  | ||||
| class DXMLParser(XMLParser):  # type: ignore | ||||
|     def __init__(self) -> None: | ||||
|         tb = ET.TreeBuilder() | ||||
|         super(DXMLParser, self).__init__(target=tb) | ||||
|  | ||||
|         p = self._parser if PY2 else self.parser | ||||
|         p.StartDoctypeDeclHandler = self.nope | ||||
|         p.EntityDeclHandler = self.nope | ||||
|         p.UnparsedEntityDeclHandler = self.nope | ||||
|         p.ExternalEntityRefHandler = self.nope | ||||
|  | ||||
|     def nope(self, *a: Any, **ka: Any) -> None: | ||||
|         raise BadXML("{}, {}".format(a, ka)) | ||||
|  | ||||
|  | ||||
| class BadXML(Exception): | ||||
|     pass | ||||
|  | ||||
|  | ||||
| def parse_xml(txt: str) -> ET.Element: | ||||
|     parser = DXMLParser() | ||||
|     parser.feed(txt) | ||||
|     return parser.close()  # type: ignore | ||||
|  | ||||
|  | ||||
| def mktnod(name: str, text: str) -> ET.Element: | ||||
|     el = ET.Element(name) | ||||
|     el.text = text | ||||
|     return el | ||||
|  | ||||
|  | ||||
| def mkenod(name: str, sub_el: Optional[ET.Element] = None) -> ET.Element: | ||||
|     el = ET.Element(name) | ||||
|     if sub_el is not None: | ||||
|         el.append(sub_el) | ||||
|     return el | ||||
| @@ -10,12 +10,10 @@ from .authsrv import AXS, VFS | ||||
| from .bos import bos | ||||
| from .util import chkcmd, min_ex | ||||
|  | ||||
| try: | ||||
| if True:  # pylint: disable=using-constant-test | ||||
|     from typing import Optional, Union | ||||
|  | ||||
|     from .util import RootLogger | ||||
| except: | ||||
|     pass | ||||
|  | ||||
|  | ||||
| class Fstab(object): | ||||
| @@ -28,7 +26,7 @@ class Fstab(object): | ||||
|         self.age = 0.0 | ||||
|  | ||||
|     def log(self, msg: str, c: Union[int, str] = 0) -> None: | ||||
|         self.log_func("fstab", msg + "\033[K", c) | ||||
|         self.log_func("fstab", msg, c) | ||||
|  | ||||
|     def get(self, path: str) -> str: | ||||
|         if len(self.cache) > 9000: | ||||
|   | ||||
| @@ -2,22 +2,33 @@ | ||||
| from __future__ import print_function, unicode_literals | ||||
|  | ||||
| import argparse | ||||
| import errno | ||||
| import logging | ||||
| import os | ||||
| import stat | ||||
| import sys | ||||
| import threading | ||||
| import time | ||||
|  | ||||
| from pyftpdlib.authorizers import AuthenticationFailed, DummyAuthorizer | ||||
| from pyftpdlib.filesystems import AbstractedFS, FilesystemError | ||||
| from pyftpdlib.handlers import FTPHandler | ||||
| from pyftpdlib.log import config_logging | ||||
| from pyftpdlib.servers import FTPServer | ||||
|  | ||||
| from .__init__ import PY2, TYPE_CHECKING, E | ||||
| from .__init__ import ANYWIN, PY2, TYPE_CHECKING, E | ||||
| from .authsrv import VFS | ||||
| from .bos import bos | ||||
| from .util import Pebkac, exclude_dotfiles, fsenc | ||||
| from .util import ( | ||||
|     Daemon, | ||||
|     Pebkac, | ||||
|     exclude_dotfiles, | ||||
|     fsenc, | ||||
|     ipnorm, | ||||
|     pybin, | ||||
|     relchk, | ||||
|     runhook, | ||||
|     sanitize_fn, | ||||
|     vjoin, | ||||
| ) | ||||
|  | ||||
| try: | ||||
|     from pyftpdlib.ioloop import IOLoop | ||||
| @@ -31,11 +42,15 @@ except ImportError: | ||||
| if TYPE_CHECKING: | ||||
|     from .svchub import SvcHub | ||||
|  | ||||
| try: | ||||
| if True:  # pylint: disable=using-constant-test | ||||
|     import typing | ||||
|     from typing import Any, Optional | ||||
| except: | ||||
|     pass | ||||
|  | ||||
|  | ||||
| class FSE(FilesystemError): | ||||
|     def __init__(self, msg: str, severity: int = 0) -> None: | ||||
|         super(FilesystemError, self).__init__(msg) | ||||
|         self.severity = severity | ||||
|  | ||||
|  | ||||
| class FtpAuth(DummyAuthorizer): | ||||
| @@ -46,25 +61,47 @@ class FtpAuth(DummyAuthorizer): | ||||
|     def validate_authentication( | ||||
|         self, username: str, password: str, handler: Any | ||||
|     ) -> None: | ||||
|         handler.username = "{}:{}".format(username, password) | ||||
|         handler.uname = "*" | ||||
|  | ||||
|         ip = handler.addr[0] | ||||
|         if ip.startswith("::ffff:"): | ||||
|             ip = ip[7:] | ||||
|  | ||||
|         ip = ipnorm(ip) | ||||
|         bans = self.hub.bans | ||||
|         if ip in bans: | ||||
|             rt = bans[ip] - time.time() | ||||
|             if rt < 0: | ||||
|                 logging.info("client unbanned") | ||||
|                 del bans[ip] | ||||
|             else: | ||||
|                 raise AuthenticationFailed("banned") | ||||
|  | ||||
|         asrv = self.hub.asrv | ||||
|         if username == "anonymous": | ||||
|             password = "" | ||||
|             uname = "*" | ||||
|         else: | ||||
|             uname = asrv.iacct.get(password, "") or asrv.iacct.get(username, "") or "*" | ||||
|  | ||||
|         uname = "*" | ||||
|         if password: | ||||
|             uname = asrv.iacct.get(password, "") | ||||
|         if not uname or not (asrv.vfs.aread.get(uname) or asrv.vfs.awrite.get(uname)): | ||||
|             g = self.hub.gpwd | ||||
|             if g.lim: | ||||
|                 bonk, ip = g.bonk(ip, handler.username) | ||||
|                 if bonk: | ||||
|                     logging.warning("client banned: invalid passwords") | ||||
|                     bans[ip] = bonk | ||||
|  | ||||
|         handler.username = uname | ||||
|  | ||||
|         if password and not uname: | ||||
|             raise AuthenticationFailed("Authentication failed.") | ||||
|  | ||||
|         handler.uname = handler.username = uname | ||||
|  | ||||
|     def get_home_dir(self, username: str) -> str: | ||||
|         return "/" | ||||
|  | ||||
|     def has_user(self, username: str) -> bool: | ||||
|         asrv = self.hub.asrv | ||||
|         return username in asrv.acct | ||||
|         return username in asrv.acct or username in asrv.iacct | ||||
|  | ||||
|     def has_perm(self, username: str, perm: int, path: Optional[str] = None) -> bool: | ||||
|         return True  # handled at filesystem layer | ||||
| @@ -83,15 +120,18 @@ class FtpFs(AbstractedFS): | ||||
|     def __init__( | ||||
|         self, root: str, cmd_channel: Any | ||||
|     ) -> None:  # pylint: disable=super-init-not-called | ||||
|         self.h = self.cmd_channel = cmd_channel  # type: FTPHandler | ||||
|         self.h = cmd_channel  # type: FTPHandler | ||||
|         self.cmd_channel = cmd_channel  # type: FTPHandler | ||||
|         self.hub: "SvcHub" = cmd_channel.hub | ||||
|         self.args = cmd_channel.args | ||||
|  | ||||
|         self.uname = self.hub.asrv.iacct.get(cmd_channel.password, "*") | ||||
|         self.uname = cmd_channel.uname | ||||
|  | ||||
|         self.cwd = "/"  # pyftpdlib convention of leading slash | ||||
|         self.root = "/var/lib/empty" | ||||
|  | ||||
|         self.can_read = self.can_write = self.can_move = False | ||||
|         self.can_delete = self.can_get = self.can_upget = False | ||||
|  | ||||
|         self.listdirinfo = self.listdir | ||||
|         self.chdir(".") | ||||
|  | ||||
| @@ -102,16 +142,36 @@ class FtpFs(AbstractedFS): | ||||
|         w: bool = False, | ||||
|         m: bool = False, | ||||
|         d: bool = False, | ||||
|     ) -> str: | ||||
|     ) -> tuple[str, VFS, str]: | ||||
|         try: | ||||
|             vpath = vpath.replace("\\", "/").lstrip("/") | ||||
|             vpath = vpath.replace("\\", "/").strip("/") | ||||
|             rd, fn = os.path.split(vpath) | ||||
|             if ANYWIN and relchk(rd): | ||||
|                 logging.warning("malicious vpath: %s", vpath) | ||||
|                 t = "Unsupported characters in [{}]" | ||||
|                 raise FSE(t.format(vpath), 1) | ||||
|  | ||||
|             fn = sanitize_fn(fn or "", "", [".prologue.html", ".epilogue.html"]) | ||||
|             vpath = vjoin(rd, fn) | ||||
|             vfs, rem = self.hub.asrv.vfs.get(vpath, self.uname, r, w, m, d) | ||||
|             if not vfs.realpath: | ||||
|                 raise FilesystemError("no filesystem mounted at this path") | ||||
|                 t = "No filesystem mounted at [{}]" | ||||
|                 raise FSE(t.format(vpath)) | ||||
|  | ||||
|             return os.path.join(vfs.realpath, rem) | ||||
|             if "xdev" in vfs.flags or "xvol" in vfs.flags: | ||||
|                 ap = vfs.canonical(rem) | ||||
|                 avfs = vfs.chk_ap(ap) | ||||
|                 t = "Permission denied in [{}]" | ||||
|                 if not avfs: | ||||
|                     raise FSE(t.format(vpath), 1) | ||||
|  | ||||
|                 cr, cw, cm, cd, _, _ = avfs.can_access("", self.h.uname) | ||||
|                 if r and not cr or w and not cw or m and not cm or d and not cd: | ||||
|                     raise FSE(t.format(vpath), 1) | ||||
|  | ||||
|             return os.path.join(vfs.realpath, rem), vfs, rem | ||||
|         except Pebkac as ex: | ||||
|             raise FilesystemError(str(ex)) | ||||
|             raise FSE(str(ex)) | ||||
|  | ||||
|     def rv2a( | ||||
|         self, | ||||
| @@ -120,7 +180,7 @@ class FtpFs(AbstractedFS): | ||||
|         w: bool = False, | ||||
|         m: bool = False, | ||||
|         d: bool = False, | ||||
|     ) -> str: | ||||
|     ) -> tuple[str, VFS, str]: | ||||
|         return self.v2a(os.path.join(self.cwd, vpath), r, w, m, d) | ||||
|  | ||||
|     def ftp2fs(self, ftppath: str) -> str: | ||||
| @@ -134,7 +194,7 @@ class FtpFs(AbstractedFS): | ||||
|     def validpath(self, path: str) -> bool: | ||||
|         if "/.hist/" in path: | ||||
|             if "/up2k." in path or path.endswith("/dir.txt"): | ||||
|                 raise FilesystemError("access to this file is forbidden") | ||||
|                 raise FSE("Access to this file is forbidden", 1) | ||||
|  | ||||
|         return True | ||||
|  | ||||
| @@ -142,29 +202,62 @@ class FtpFs(AbstractedFS): | ||||
|         r = "r" in mode | ||||
|         w = "w" in mode or "a" in mode or "+" in mode | ||||
|  | ||||
|         ap = self.rv2a(filename, r, w) | ||||
|         if w and bos.path.exists(ap): | ||||
|             raise FilesystemError("cannot open existing file for writing") | ||||
|         ap = self.rv2a(filename, r, w)[0] | ||||
|         if w: | ||||
|             try: | ||||
|                 st = bos.stat(ap) | ||||
|                 td = time.time() - st.st_mtime | ||||
|             except: | ||||
|                 td = 0 | ||||
|  | ||||
|             if td < -1 or td > self.args.ftp_wt: | ||||
|                 raise FSE("Cannot open existing file for writing") | ||||
|  | ||||
|         self.validpath(ap) | ||||
|         return open(fsenc(ap), mode) | ||||
|  | ||||
|     def chdir(self, path: str) -> None: | ||||
|         self.cwd = join(self.cwd, path) | ||||
|         x = self.hub.asrv.vfs.can_access(self.cwd.lstrip("/"), self.h.username) | ||||
|         self.can_read, self.can_write, self.can_move, self.can_delete, self.can_get = x | ||||
|         nwd = join(self.cwd, path) | ||||
|         vfs, rem = self.hub.asrv.vfs.get(nwd, self.uname, False, False) | ||||
|         ap = vfs.canonical(rem) | ||||
|         try: | ||||
|             st = bos.stat(ap) | ||||
|             if not stat.S_ISDIR(st.st_mode): | ||||
|                 raise Exception() | ||||
|         except: | ||||
|             # returning 550 is library-default and suitable | ||||
|             raise FSE("No such file or directory") | ||||
|  | ||||
|         avfs = vfs.chk_ap(ap, st) | ||||
|         if not avfs: | ||||
|             raise FSE("Permission denied", 1) | ||||
|  | ||||
|         self.cwd = nwd | ||||
|         ( | ||||
|             self.can_read, | ||||
|             self.can_write, | ||||
|             self.can_move, | ||||
|             self.can_delete, | ||||
|             self.can_get, | ||||
|             self.can_upget, | ||||
|         ) = avfs.can_access("", self.h.uname) | ||||
|  | ||||
|     def mkdir(self, path: str) -> None: | ||||
|         ap = self.rv2a(path, w=True) | ||||
|         bos.mkdir(ap) | ||||
|         ap = self.rv2a(path, w=True)[0] | ||||
|         bos.makedirs(ap)  # filezilla expects this | ||||
|  | ||||
|     def listdir(self, path: str) -> list[str]: | ||||
|         vpath = join(self.cwd, path).lstrip("/") | ||||
|         vpath = join(self.cwd, path) | ||||
|         try: | ||||
|             vfs, rem = self.hub.asrv.vfs.get(vpath, self.uname, True, False) | ||||
|             ap, vfs, rem = self.v2a(vpath, True, False) | ||||
|             if not bos.path.isdir(ap): | ||||
|                 raise FSE("No such file or directory", 1) | ||||
|  | ||||
|             fsroot, vfs_ls1, vfs_virt = vfs.ls( | ||||
|                 rem, self.uname, not self.args.no_scandir, [[True], [False, True]] | ||||
|                 rem, | ||||
|                 self.uname, | ||||
|                 not self.args.no_scandir, | ||||
|                 [[True, False], [False, True]], | ||||
|             ) | ||||
|             vfs_ls = [x[0] for x in vfs_ls1] | ||||
|             vfs_ls.extend(vfs_virt.keys()) | ||||
| @@ -174,8 +267,12 @@ class FtpFs(AbstractedFS): | ||||
|  | ||||
|             vfs_ls.sort() | ||||
|             return vfs_ls | ||||
|         except: | ||||
|             if vpath: | ||||
|         except Exception as ex: | ||||
|             # panic on malicious names | ||||
|             if getattr(ex, "severity", 0): | ||||
|                 raise | ||||
|  | ||||
|             if vpath.strip("/"): | ||||
|                 # display write-only folders as empty | ||||
|                 return [] | ||||
|  | ||||
| @@ -184,43 +281,49 @@ class FtpFs(AbstractedFS): | ||||
|             return list(sorted(list(r.keys()))) | ||||
|  | ||||
|     def rmdir(self, path: str) -> None: | ||||
|         ap = self.rv2a(path, d=True) | ||||
|         bos.rmdir(ap) | ||||
|         ap = self.rv2a(path, d=True)[0] | ||||
|         try: | ||||
|             bos.rmdir(ap) | ||||
|         except OSError as e: | ||||
|             if e.errno != errno.ENOENT: | ||||
|                 raise | ||||
|  | ||||
|     def remove(self, path: str) -> None: | ||||
|         if self.args.no_del: | ||||
|             raise FilesystemError("the delete feature is disabled in server config") | ||||
|             raise FSE("The delete feature is disabled in server config") | ||||
|  | ||||
|         vp = join(self.cwd, path).lstrip("/") | ||||
|         try: | ||||
|             self.hub.up2k.handle_rm(self.uname, self.h.remote_ip, [vp]) | ||||
|             self.hub.up2k.handle_rm(self.uname, self.h.cli_ip, [vp], [], False) | ||||
|         except Exception as ex: | ||||
|             raise FilesystemError(str(ex)) | ||||
|             raise FSE(str(ex)) | ||||
|  | ||||
|     def rename(self, src: str, dst: str) -> None: | ||||
|         if not self.can_move: | ||||
|             raise FilesystemError("not allowed for user " + self.h.username) | ||||
|             raise FSE("Not allowed for user " + self.h.uname) | ||||
|  | ||||
|         if self.args.no_mv: | ||||
|             t = "the rename/move feature is disabled in server config" | ||||
|             raise FilesystemError(t) | ||||
|             raise FSE("The rename/move feature is disabled in server config") | ||||
|  | ||||
|         svp = join(self.cwd, src).lstrip("/") | ||||
|         dvp = join(self.cwd, dst).lstrip("/") | ||||
|         try: | ||||
|             self.hub.up2k.handle_mv(self.uname, svp, dvp) | ||||
|         except Exception as ex: | ||||
|             raise FilesystemError(str(ex)) | ||||
|             raise FSE(str(ex)) | ||||
|  | ||||
|     def chmod(self, path: str, mode: str) -> None: | ||||
|         pass | ||||
|  | ||||
|     def stat(self, path: str) -> os.stat_result: | ||||
|         try: | ||||
|             ap = self.rv2a(path, r=True) | ||||
|             ap = self.rv2a(path, r=True)[0] | ||||
|             return bos.stat(ap) | ||||
|         except: | ||||
|             ap = self.rv2a(path) | ||||
|         except FSE as ex: | ||||
|             if ex.severity: | ||||
|                 raise | ||||
|  | ||||
|             ap = self.rv2a(path)[0] | ||||
|             st = bos.stat(ap) | ||||
|             if not stat.S_ISDIR(st.st_mode): | ||||
|                 raise | ||||
| @@ -228,41 +331,50 @@ class FtpFs(AbstractedFS): | ||||
|             return st | ||||
|  | ||||
|     def utime(self, path: str, timeval: float) -> None: | ||||
|         ap = self.rv2a(path, w=True) | ||||
|         ap = self.rv2a(path, w=True)[0] | ||||
|         return bos.utime(ap, (timeval, timeval)) | ||||
|  | ||||
|     def lstat(self, path: str) -> os.stat_result: | ||||
|         ap = self.rv2a(path) | ||||
|         return bos.lstat(ap) | ||||
|         ap = self.rv2a(path)[0] | ||||
|         return bos.stat(ap) | ||||
|  | ||||
|     def isfile(self, path: str) -> bool: | ||||
|         st = self.stat(path) | ||||
|         return stat.S_ISREG(st.st_mode) | ||||
|         try: | ||||
|             st = self.stat(path) | ||||
|             return stat.S_ISREG(st.st_mode) | ||||
|         except Exception as ex: | ||||
|             if getattr(ex, "severity", 0): | ||||
|                 raise | ||||
|  | ||||
|             return False  # expected for mojibake in ftp_SIZE() | ||||
|  | ||||
|     def islink(self, path: str) -> bool: | ||||
|         ap = self.rv2a(path) | ||||
|         ap = self.rv2a(path)[0] | ||||
|         return bos.path.islink(ap) | ||||
|  | ||||
|     def isdir(self, path: str) -> bool: | ||||
|         try: | ||||
|             st = self.stat(path) | ||||
|             return stat.S_ISDIR(st.st_mode) | ||||
|         except: | ||||
|         except Exception as ex: | ||||
|             if getattr(ex, "severity", 0): | ||||
|                 raise | ||||
|  | ||||
|             return True | ||||
|  | ||||
|     def getsize(self, path: str) -> int: | ||||
|         ap = self.rv2a(path) | ||||
|         ap = self.rv2a(path)[0] | ||||
|         return bos.path.getsize(ap) | ||||
|  | ||||
|     def getmtime(self, path: str) -> float: | ||||
|         ap = self.rv2a(path) | ||||
|         ap = self.rv2a(path)[0] | ||||
|         return bos.path.getmtime(ap) | ||||
|  | ||||
|     def realpath(self, path: str) -> str: | ||||
|         return path | ||||
|  | ||||
|     def lexists(self, path: str) -> bool: | ||||
|         ap = self.rv2a(path) | ||||
|         ap = self.rv2a(path)[0] | ||||
|         return bos.path.lexists(ap) | ||||
|  | ||||
|     def get_user_by_uid(self, uid: int) -> str: | ||||
| @@ -274,26 +386,50 @@ class FtpFs(AbstractedFS): | ||||
|  | ||||
| class FtpHandler(FTPHandler): | ||||
|     abstracted_fs = FtpFs | ||||
|     hub: "SvcHub" = None | ||||
|     args: argparse.Namespace = None | ||||
|     hub: "SvcHub" | ||||
|     args: argparse.Namespace | ||||
|     uname: str | ||||
|  | ||||
|     def __init__(self, conn: Any, server: Any, ioloop: Any = None) -> None: | ||||
|         self.hub: "SvcHub" = FtpHandler.hub | ||||
|         self.args: argparse.Namespace = FtpHandler.args | ||||
|         self.uname = "*" | ||||
|  | ||||
|         if PY2: | ||||
|             FTPHandler.__init__(self, conn, server, ioloop) | ||||
|         else: | ||||
|             super(FtpHandler, self).__init__(conn, server, ioloop) | ||||
|  | ||||
|         cip = self.remote_ip | ||||
|         self.cli_ip = cip[7:] if cip.startswith("::ffff:") else cip | ||||
|  | ||||
|         # abspath->vpath mapping to resolve log_transfer paths | ||||
|         self.vfs_map: dict[str, str] = {} | ||||
|  | ||||
|         # reduce non-debug logging | ||||
|         self.log_cmds_list = [x for x in self.log_cmds_list if x not in ("CWD", "XCWD")] | ||||
|  | ||||
|     def ftp_STOR(self, file: str, mode: str = "w") -> Any: | ||||
|         # Optional[str] | ||||
|         vp = join(self.fs.cwd, file).lstrip("/") | ||||
|         ap = self.fs.v2a(vp) | ||||
|         ap, vfs, rem = self.fs.v2a(vp, w=True) | ||||
|         self.vfs_map[ap] = vp | ||||
|         xbu = vfs.flags.get("xbu") | ||||
|         if xbu and not runhook( | ||||
|             None, | ||||
|             xbu, | ||||
|             ap, | ||||
|             vfs.canonical(rem), | ||||
|             "", | ||||
|             self.uname, | ||||
|             0, | ||||
|             0, | ||||
|             self.cli_ip, | ||||
|             0, | ||||
|             "", | ||||
|         ): | ||||
|             raise FSE("Upload blocked by xbu server config") | ||||
|  | ||||
|         # print("ftp_STOR: {} {} => {}".format(vp, mode, ap)) | ||||
|         ret = FTPHandler.ftp_STOR(self, file, mode) | ||||
|         # print("ftp_STOR: {} {} OK".format(vp, mode)) | ||||
| @@ -314,15 +450,17 @@ class FtpHandler(FTPHandler): | ||||
|         # print("xfer_end: {} => {}".format(ap, vp)) | ||||
|         if vp: | ||||
|             vp, fn = os.path.split(vp) | ||||
|             vfs, rem = self.hub.asrv.vfs.get(vp, self.username, False, True) | ||||
|             vfs, rem = self.hub.asrv.vfs.get(vp, self.uname, False, True) | ||||
|             vfs, rem = vfs.get_dbv(rem) | ||||
|             self.hub.up2k.hash_file( | ||||
|                 vfs.realpath, | ||||
|                 vfs.vpath, | ||||
|                 vfs.flags, | ||||
|                 rem, | ||||
|                 fn, | ||||
|                 self.remote_ip, | ||||
|                 self.cli_ip, | ||||
|                 time.time(), | ||||
|                 self.uname, | ||||
|             ) | ||||
|  | ||||
|         return FTPHandler.log_transfer( | ||||
| @@ -353,10 +491,10 @@ class Ftpd(object): | ||||
|                 h1 = SftpHandler | ||||
|             except: | ||||
|                 t = "\nftps requires pyopenssl;\nplease run the following:\n\n  {} -m pip install --user pyopenssl\n" | ||||
|                 print(t.format(sys.executable)) | ||||
|                 print(t.format(pybin)) | ||||
|                 sys.exit(1) | ||||
|  | ||||
|             h1.certfile = os.path.join(E.cfg, "cert.pem") | ||||
|             h1.certfile = self.args.cert | ||||
|             h1.tls_control_required = True | ||||
|             h1.tls_data_required = True | ||||
|  | ||||
| @@ -364,9 +502,9 @@ class Ftpd(object): | ||||
|  | ||||
|         for h_lp in hs: | ||||
|             h2, lp = h_lp | ||||
|             h2.hub = hub | ||||
|             h2.args = hub.args | ||||
|             h2.authorizer = FtpAuth(hub) | ||||
|             FtpHandler.hub = h2.hub = hub | ||||
|             FtpHandler.args = h2.args = hub.args | ||||
|             FtpHandler.authorizer = h2.authorizer = FtpAuth(hub) | ||||
|  | ||||
|             if self.args.ftp_pr: | ||||
|                 p1, p2 = [int(x) for x in self.args.ftp_pr.split("-")] | ||||
| @@ -383,17 +521,26 @@ class Ftpd(object): | ||||
|             if self.args.ftp_nat: | ||||
|                 h2.masquerade_address = self.args.ftp_nat | ||||
|  | ||||
|         if self.args.ftp_dbg: | ||||
|             config_logging(level=logging.DEBUG) | ||||
|         lgr = logging.getLogger("pyftpdlib") | ||||
|         lgr.setLevel(logging.DEBUG if self.args.ftpv else logging.INFO) | ||||
|  | ||||
|         ips = self.args.i | ||||
|         if "::" in ips: | ||||
|             ips.append("0.0.0.0") | ||||
|  | ||||
|         if self.args.ftp4: | ||||
|             ips = [x for x in ips if ":" not in x] | ||||
|  | ||||
|         ioloop = IOLoop() | ||||
|         for ip in self.args.i: | ||||
|         for ip in ips: | ||||
|             for h, lp in hs: | ||||
|                 FTPServer((ip, int(lp)), h, ioloop) | ||||
|                 try: | ||||
|                     FTPServer((ip, int(lp)), h, ioloop) | ||||
|                 except: | ||||
|                     if ip != "0.0.0.0" or "::" not in ips: | ||||
|                         raise | ||||
|  | ||||
|         thr = threading.Thread(target=ioloop.loop, name="ftp") | ||||
|         thr.daemon = True | ||||
|         thr.start() | ||||
|         Daemon(ioloop.loop, "ftp") | ||||
|  | ||||
|  | ||||
| def join(p1: str, p2: str) -> str: | ||||
|   | ||||
							
								
								
									
										1641
									
								
								copyparty/httpcli.py
									
									
									
									
									
								
							
							
						
						
									
										1641
									
								
								copyparty/httpcli.py
									
									
									
									
									
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							| @@ -15,7 +15,7 @@ except: | ||||
|     HAVE_SSL = False | ||||
|  | ||||
| from . import util as Util | ||||
| from .__init__ import TYPE_CHECKING, E | ||||
| from .__init__ import TYPE_CHECKING, EnvParams | ||||
| from .authsrv import AuthSrv  # typechk | ||||
| from .httpcli import HttpCli | ||||
| from .ico import Ico | ||||
| @@ -23,16 +23,18 @@ from .mtag import HAVE_FFMPEG | ||||
| from .th_cli import ThumbCli | ||||
| from .th_srv import HAVE_PIL, HAVE_VIPS | ||||
| from .u2idx import U2idx | ||||
| from .util import HMaccas, shut_socket | ||||
|  | ||||
| try: | ||||
| if True:  # pylint: disable=using-constant-test | ||||
|     from typing import Optional, Pattern, Union | ||||
| except: | ||||
|     pass | ||||
|  | ||||
| if TYPE_CHECKING: | ||||
|     from .httpsrv import HttpSrv | ||||
|  | ||||
|  | ||||
| PTN_HTTP = re.compile(br"[A-Z]{3}[A-Z ]") | ||||
|  | ||||
|  | ||||
| class HttpConn(object): | ||||
|     """ | ||||
|     spawned by HttpSrv to handle an incoming client connection, | ||||
| @@ -44,22 +46,27 @@ class HttpConn(object): | ||||
|     ) -> None: | ||||
|         self.s = sck | ||||
|         self.sr: Optional[Util._Unrecv] = None | ||||
|         self.cli: Optional[HttpCli] = None | ||||
|         self.addr = addr | ||||
|         self.hsrv = hsrv | ||||
|  | ||||
|         self.mutex: threading.Lock = hsrv.mutex  # mypy404 | ||||
|         self.args: argparse.Namespace = hsrv.args  # mypy404 | ||||
|         self.E: EnvParams = self.args.E | ||||
|         self.asrv: AuthSrv = hsrv.asrv  # mypy404 | ||||
|         self.cert_path = hsrv.cert_path | ||||
|         self.u2fh: Util.FHC = hsrv.u2fh  # mypy404 | ||||
|         self.iphash: HMaccas = hsrv.broker.iphash | ||||
|         self.bans: dict[str, int] = hsrv.bans | ||||
|         self.aclose: dict[str, int] = hsrv.aclose | ||||
|  | ||||
|         enth = (HAVE_PIL or HAVE_VIPS or HAVE_FFMPEG) and not self.args.no_thumb | ||||
|         self.thumbcli: Optional[ThumbCli] = ThumbCli(hsrv) if enth else None  # mypy404 | ||||
|         self.ico: Ico = Ico(self.args)  # mypy404 | ||||
|  | ||||
|         self.t0: float = time.time()  # mypy404 | ||||
|         self.freshen_pwd: float = 0.0 | ||||
|         self.stopping = False | ||||
|         self.nreq: int = 0  # mypy404 | ||||
|         self.nreq: int = -1  # mypy404 | ||||
|         self.nbyte: int = 0  # mypy404 | ||||
|         self.u2idx: Optional[U2idx] = None | ||||
|         self.log_func: "Util.RootLogger" = hsrv.log  # mypy404 | ||||
| @@ -72,8 +79,7 @@ class HttpConn(object): | ||||
|     def shutdown(self) -> None: | ||||
|         self.stopping = True | ||||
|         try: | ||||
|             self.s.shutdown(socket.SHUT_RDWR) | ||||
|             self.s.close() | ||||
|             shut_socket(self.log, self.s, 1) | ||||
|         except: | ||||
|             pass | ||||
|  | ||||
| @@ -91,22 +97,23 @@ class HttpConn(object): | ||||
|         return self.log_src | ||||
|  | ||||
|     def respath(self, res_name: str) -> str: | ||||
|         return os.path.join(E.mod, "web", res_name) | ||||
|         return os.path.join(self.E.mod, "web", res_name) | ||||
|  | ||||
|     def log(self, msg: str, c: Union[int, str] = 0) -> None: | ||||
|         self.log_func(self.log_src, msg, c) | ||||
|  | ||||
|     def get_u2idx(self) -> U2idx: | ||||
|         # one u2idx per tcp connection; | ||||
|     def get_u2idx(self) -> Optional[U2idx]: | ||||
|         # grab from a pool of u2idx instances; | ||||
|         # sqlite3 fully parallelizes under python threads | ||||
|         # but avoid running out of FDs by creating too many | ||||
|         if not self.u2idx: | ||||
|             self.u2idx = U2idx(self) | ||||
|             self.u2idx = self.hsrv.get_u2idx(str(self.addr)) | ||||
|  | ||||
|         return self.u2idx | ||||
|  | ||||
|     def _detect_https(self) -> bool: | ||||
|         method = None | ||||
|         if self.cert_path: | ||||
|         if True: | ||||
|             try: | ||||
|                 method = self.s.recv(4, socket.MSG_PEEK) | ||||
|             except socket.timeout: | ||||
| @@ -132,13 +139,15 @@ class HttpConn(object): | ||||
|                 self.s.send(b"HTTP/1.1 400 Bad Request\r\n\r\n" + err.encode("utf-8")) | ||||
|                 return False | ||||
|  | ||||
|         return method not in [None, b"GET ", b"HEAD", b"POST", b"PUT ", b"OPTI"] | ||||
|         return not method or not bool(PTN_HTTP.match(method)) | ||||
|  | ||||
|     def run(self) -> None: | ||||
|         self.s.settimeout(10) | ||||
|  | ||||
|         self.sr = None | ||||
|         if self.args.https_only: | ||||
|             is_https = True | ||||
|         elif self.args.http_only or not HAVE_SSL: | ||||
|         elif self.args.http_only: | ||||
|             is_https = False | ||||
|         else: | ||||
|             # raise Exception("asdf") | ||||
| @@ -152,7 +161,7 @@ class HttpConn(object): | ||||
|             self.log_src = self.log_src.replace("[36m", "[35m") | ||||
|             try: | ||||
|                 ctx = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH) | ||||
|                 ctx.load_cert_chain(self.cert_path) | ||||
|                 ctx.load_cert_chain(self.args.cert) | ||||
|                 if self.args.ssl_ver: | ||||
|                     ctx.options &= ~self.args.ssl_flags_en | ||||
|                     ctx.options |= self.args.ssl_flags_de | ||||
| @@ -189,11 +198,7 @@ class HttpConn(object): | ||||
|             except Exception as ex: | ||||
|                 em = str(ex) | ||||
|  | ||||
|                 if "ALERT_BAD_CERTIFICATE" in em: | ||||
|                     # firefox-linux if there is no exception yet | ||||
|                     self.log("client rejected our certificate (nice)") | ||||
|  | ||||
|                 elif "ALERT_CERTIFICATE_UNKNOWN" in em: | ||||
|                 if "ALERT_CERTIFICATE_UNKNOWN" in em: | ||||
|                     # android-chrome keeps doing this | ||||
|                     pass | ||||
|  | ||||
| @@ -207,6 +212,10 @@ class HttpConn(object): | ||||
|  | ||||
|         while not self.stopping: | ||||
|             self.nreq += 1 | ||||
|             cli = HttpCli(self) | ||||
|             if not cli.run(): | ||||
|             self.cli = HttpCli(self) | ||||
|             if not self.cli.run(): | ||||
|                 return | ||||
|  | ||||
|             if self.u2idx: | ||||
|                 self.hsrv.put_u2idx(str(self.addr), self.u2idx) | ||||
|                 self.u2idx = None | ||||
|   | ||||
| @@ -11,9 +11,19 @@ import time | ||||
|  | ||||
| import queue | ||||
|  | ||||
| from .__init__ import ANYWIN, CORES, EXE, MACOS, TYPE_CHECKING, EnvParams | ||||
|  | ||||
| try: | ||||
|     MNFE = ModuleNotFoundError | ||||
| except: | ||||
|     MNFE = ImportError | ||||
|  | ||||
| try: | ||||
|     import jinja2 | ||||
| except ImportError: | ||||
| except MNFE: | ||||
|     if EXE: | ||||
|         raise | ||||
|  | ||||
|     print( | ||||
|         """\033[1;31m | ||||
|   you do not have jinja2 installed,\033[33m | ||||
| @@ -23,23 +33,52 @@ except ImportError: | ||||
|    * (try another python version, if you have one) | ||||
|    * (try copyparty.sfx instead) | ||||
| """.format( | ||||
|             os.path.basename(sys.executable) | ||||
|             sys.executable | ||||
|         ) | ||||
|     ) | ||||
|     sys.exit(1) | ||||
| except SyntaxError: | ||||
|     if EXE: | ||||
|         raise | ||||
|  | ||||
|     print( | ||||
|         """\033[1;31m | ||||
|   your jinja2 version is incompatible with your python version;\033[33m | ||||
|   please try to replace it with an older version:\033[0m | ||||
|    * {} -m pip install --user jinja2==2.11.3 | ||||
|    * (try another python version, if you have one) | ||||
|    * (try copyparty.sfx instead) | ||||
| """.format( | ||||
|             sys.executable | ||||
|         ) | ||||
|     ) | ||||
|     sys.exit(1) | ||||
|  | ||||
| from .__init__ import MACOS, TYPE_CHECKING, E | ||||
| from .bos import bos | ||||
| from .httpconn import HttpConn | ||||
| from .util import FHC, min_ex, spack, start_log_thrs, start_stackmon | ||||
| from .u2idx import U2idx | ||||
| from .util import ( | ||||
|     E_SCK, | ||||
|     FHC, | ||||
|     Daemon, | ||||
|     Garda, | ||||
|     Magician, | ||||
|     Netdev, | ||||
|     NetMap, | ||||
|     ipnorm, | ||||
|     min_ex, | ||||
|     shut_socket, | ||||
|     spack, | ||||
|     start_log_thrs, | ||||
|     start_stackmon, | ||||
| ) | ||||
|  | ||||
| if TYPE_CHECKING: | ||||
|     from .broker_util import BrokerCli | ||||
|     from .ssdp import SSDPr | ||||
|  | ||||
| try: | ||||
| if True:  # pylint: disable=using-constant-test | ||||
|     from typing import Any, Optional | ||||
| except: | ||||
|     pass | ||||
|  | ||||
|  | ||||
| class HttpSrv(object): | ||||
| @@ -52,11 +91,23 @@ class HttpSrv(object): | ||||
|         self.broker = broker | ||||
|         self.nid = nid | ||||
|         self.args = broker.args | ||||
|         self.E: EnvParams = self.args.E | ||||
|         self.log = broker.log | ||||
|         self.asrv = broker.asrv | ||||
|  | ||||
|         nsuf = "-n{}-i{:x}".format(nid, os.getpid()) if nid else "" | ||||
|         # redefine in case of multiprocessing | ||||
|         socket.setdefaulttimeout(120) | ||||
|  | ||||
|         nsuf = "-n{}-i{:x}".format(nid, os.getpid()) if nid else "" | ||||
|         self.magician = Magician() | ||||
|         self.nm = NetMap([], {}) | ||||
|         self.ssdp: Optional["SSDPr"] = None | ||||
|         self.gpwd = Garda(self.args.ban_pw) | ||||
|         self.g404 = Garda(self.args.ban_404) | ||||
|         self.bans: dict[str, int] = {} | ||||
|         self.aclose: dict[str, int] = {} | ||||
|  | ||||
|         self.bound: set[tuple[str, int]] = set() | ||||
|         self.name = "hsrv" + nsuf | ||||
|         self.mutex = threading.Lock() | ||||
|         self.stopping = False | ||||
| @@ -77,19 +128,25 @@ class HttpSrv(object): | ||||
|         self.cb_ts = 0.0 | ||||
|         self.cb_v = "" | ||||
|  | ||||
|         env = jinja2.Environment() | ||||
|         env.loader = jinja2.FileSystemLoader(os.path.join(E.mod, "web")) | ||||
|         self.j2 = { | ||||
|             x: env.get_template(x + ".html") | ||||
|             for x in ["splash", "browser", "browser2", "msg", "md", "mde", "cf"] | ||||
|         } | ||||
|         self.prism = os.path.exists(os.path.join(E.mod, "web", "deps", "prism.js.gz")) | ||||
|         self.u2idx_free: dict[str, U2idx] = {} | ||||
|         self.u2idx_n = 0 | ||||
|  | ||||
|         cert_path = os.path.join(E.cfg, "cert.pem") | ||||
|         if bos.path.exists(cert_path): | ||||
|             self.cert_path = cert_path | ||||
|         else: | ||||
|             self.cert_path = "" | ||||
|         env = jinja2.Environment() | ||||
|         env.loader = jinja2.FileSystemLoader(os.path.join(self.E.mod, "web")) | ||||
|         jn = ["splash", "svcs", "browser", "browser2", "msg", "md", "mde", "cf"] | ||||
|         self.j2 = {x: env.get_template(x + ".html") for x in jn} | ||||
|         zs = os.path.join(self.E.mod, "web", "deps", "prism.js.gz") | ||||
|         self.prism = os.path.exists(zs) | ||||
|  | ||||
|         self.mallow = "GET HEAD POST PUT DELETE OPTIONS".split() | ||||
|         if not self.args.no_dav: | ||||
|             zs = "PROPFIND PROPPATCH LOCK UNLOCK MKCOL COPY MOVE" | ||||
|             self.mallow += zs.split() | ||||
|  | ||||
|         if self.args.zs: | ||||
|             from .ssdp import SSDPr | ||||
|  | ||||
|             self.ssdp = SSDPr(broker) | ||||
|  | ||||
|         if self.tp_q: | ||||
|             self.start_threads(4) | ||||
| @@ -102,9 +159,7 @@ class HttpSrv(object): | ||||
|                 start_log_thrs(self.log, self.args.log_thrs, nid) | ||||
|  | ||||
|         self.th_cfg: dict[str, Any] = {} | ||||
|         t = threading.Thread(target=self.post_init, name="hsrv-init2") | ||||
|         t.daemon = True | ||||
|         t.start() | ||||
|         Daemon(self.post_init, "hsrv-init2") | ||||
|  | ||||
|     def post_init(self) -> None: | ||||
|         try: | ||||
| @@ -113,18 +168,20 @@ class HttpSrv(object): | ||||
|         except: | ||||
|             pass | ||||
|  | ||||
|     def set_netdevs(self, netdevs: dict[str, Netdev]) -> None: | ||||
|         ips = set() | ||||
|         for ip, _ in self.bound: | ||||
|             ips.add(ip) | ||||
|  | ||||
|         self.nm = NetMap(list(ips), netdevs) | ||||
|  | ||||
|     def start_threads(self, n: int) -> None: | ||||
|         self.tp_nthr += n | ||||
|         if self.args.log_htp: | ||||
|             self.log(self.name, "workers += {} = {}".format(n, self.tp_nthr), 6) | ||||
|  | ||||
|         for _ in range(n): | ||||
|             thr = threading.Thread( | ||||
|                 target=self.thr_poolw, | ||||
|                 name=self.name + "-poolw", | ||||
|             ) | ||||
|             thr.daemon = True | ||||
|             thr.start() | ||||
|             Daemon(self.thr_poolw, self.name + "-poolw") | ||||
|  | ||||
|     def stop_threads(self, n: int) -> None: | ||||
|         self.tp_nthr -= n | ||||
| @@ -150,22 +207,30 @@ class HttpSrv(object): | ||||
|                     return | ||||
|  | ||||
|     def listen(self, sck: socket.socket, nlisteners: int) -> None: | ||||
|         ip, port = sck.getsockname() | ||||
|         if self.args.j != 1: | ||||
|             # lost in the pickle; redefine | ||||
|             if not ANYWIN or self.args.reuseaddr: | ||||
|                 sck.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) | ||||
|  | ||||
|             sck.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) | ||||
|             sck.settimeout(None)  # < does not inherit, ^ opts above do | ||||
|  | ||||
|         ip, port = sck.getsockname()[:2] | ||||
|         self.srvs.append(sck) | ||||
|         self.bound.add((ip, port)) | ||||
|         self.nclimax = math.ceil(self.args.nc * 1.0 / nlisteners) | ||||
|         t = threading.Thread( | ||||
|             target=self.thr_listen, | ||||
|             args=(sck,), | ||||
|             name="httpsrv-n{}-listen-{}-{}".format(self.nid or "0", ip, port), | ||||
|         Daemon( | ||||
|             self.thr_listen, | ||||
|             "httpsrv-n{}-listen-{}-{}".format(self.nid or "0", ip, port), | ||||
|             (sck,), | ||||
|         ) | ||||
|         t.daemon = True | ||||
|         t.start() | ||||
|  | ||||
|     def thr_listen(self, srv_sck: socket.socket) -> None: | ||||
|         """listens on a shared tcp server""" | ||||
|         ip, port = srv_sck.getsockname() | ||||
|         ip, port = srv_sck.getsockname()[:2] | ||||
|         fno = srv_sck.fileno() | ||||
|         msg = "subscribed @ {}:{}  f{} p{}".format(ip, port, fno, os.getpid()) | ||||
|         hip = "[{}]".format(ip) if ":" in ip else ip | ||||
|         msg = "subscribed @ {}:{}  f{} p{}".format(hip, port, fno, os.getpid()) | ||||
|         self.log(self.name, msg) | ||||
|  | ||||
|         def fun() -> None: | ||||
| @@ -175,19 +240,80 @@ class HttpSrv(object): | ||||
|  | ||||
|         while not self.stopping: | ||||
|             if self.args.log_conn: | ||||
|                 self.log(self.name, "|%sC-ncli" % ("-" * 1,), c="1;30") | ||||
|                 self.log(self.name, "|%sC-ncli" % ("-" * 1,), c="90") | ||||
|  | ||||
|             if self.ncli >= self.nclimax: | ||||
|                 self.log(self.name, "at connection limit; waiting", 3) | ||||
|                 while self.ncli >= self.nclimax: | ||||
|                     time.sleep(0.1) | ||||
|             spins = 0 | ||||
|             while self.ncli >= self.nclimax: | ||||
|                 if not spins: | ||||
|                     self.log(self.name, "at connection limit; waiting", 3) | ||||
|  | ||||
|                 spins += 1 | ||||
|                 time.sleep(0.1) | ||||
|                 if spins != 50 or not self.args.aclose: | ||||
|                     continue | ||||
|  | ||||
|                 ipfreq: dict[str, int] = {} | ||||
|                 with self.mutex: | ||||
|                     for c in self.clients: | ||||
|                         ip = ipnorm(c.ip) | ||||
|                         try: | ||||
|                             ipfreq[ip] += 1 | ||||
|                         except: | ||||
|                             ipfreq[ip] = 1 | ||||
|  | ||||
|                 ip, n = sorted(ipfreq.items(), key=lambda x: x[1], reverse=True)[0] | ||||
|                 if n < self.nclimax / 2: | ||||
|                     continue | ||||
|  | ||||
|                 self.aclose[ip] = int(time.time() + self.args.aclose * 60) | ||||
|                 nclose = 0 | ||||
|                 nloris = 0 | ||||
|                 nconn = 0 | ||||
|                 with self.mutex: | ||||
|                     for c in self.clients: | ||||
|                         cip = ipnorm(c.ip) | ||||
|                         if ip != cip: | ||||
|                             continue | ||||
|  | ||||
|                         nconn += 1 | ||||
|                         try: | ||||
|                             if ( | ||||
|                                 c.nreq >= 1 | ||||
|                                 or not c.cli | ||||
|                                 or c.cli.in_hdr_recv | ||||
|                                 or c.cli.keepalive | ||||
|                             ): | ||||
|                                 Daemon(c.shutdown) | ||||
|                                 nclose += 1 | ||||
|                                 if c.nreq <= 0 and (not c.cli or c.cli.in_hdr_recv): | ||||
|                                     nloris += 1 | ||||
|                         except: | ||||
|                             pass | ||||
|  | ||||
|                 t = "{} downgraded to connection:close for {} min; dropped {}/{} connections" | ||||
|                 self.log(self.name, t.format(ip, self.args.aclose, nclose, nconn), 1) | ||||
|  | ||||
|                 if nloris < nconn / 2: | ||||
|                     continue | ||||
|  | ||||
|                 t = "slowloris (idle-conn): {} banned for {} min" | ||||
|                 self.log(self.name, t.format(ip, self.args.loris, nclose), 1) | ||||
|                 self.bans[ip] = int(time.time() + self.args.loris * 60) | ||||
|  | ||||
|             if self.args.log_conn: | ||||
|                 self.log(self.name, "|%sC-acc1" % ("-" * 2,), c="1;30") | ||||
|                 self.log(self.name, "|%sC-acc1" % ("-" * 2,), c="90") | ||||
|  | ||||
|             try: | ||||
|                 sck, addr = srv_sck.accept() | ||||
|                 sck, saddr = srv_sck.accept() | ||||
|                 cip, cport = saddr[:2] | ||||
|                 if cip.startswith("::ffff:"): | ||||
|                     cip = cip[7:] | ||||
|  | ||||
|                 addr = (cip, cport) | ||||
|             except (OSError, socket.error) as ex: | ||||
|                 if self.stopping: | ||||
|                     break | ||||
|  | ||||
|                 self.log(self.name, "accept({}): {}".format(fno, ex), c=6) | ||||
|                 time.sleep(0.02) | ||||
|                 continue | ||||
| @@ -196,7 +322,7 @@ class HttpSrv(object): | ||||
|                 t = "|{}C-acc2 \033[0;36m{} \033[3{}m{}".format( | ||||
|                     "-" * 3, ip, port % 8, port | ||||
|                 ) | ||||
|                 self.log("%s %s" % addr, t, c="1;30") | ||||
|                 self.log("%s %s" % addr, t, c="90") | ||||
|  | ||||
|             self.accept(sck, addr) | ||||
|  | ||||
| @@ -217,10 +343,7 @@ class HttpSrv(object): | ||||
|                 if self.nid: | ||||
|                     name += "-{}".format(self.nid) | ||||
|  | ||||
|                 thr = threading.Thread(target=self.periodic, name=name) | ||||
|                 self.t_periodic = thr | ||||
|                 thr.daemon = True | ||||
|                 thr.start() | ||||
|                 self.t_periodic = Daemon(self.periodic, name) | ||||
|  | ||||
|             if self.tp_q: | ||||
|                 self.tp_time = self.tp_time or now | ||||
| @@ -235,13 +358,11 @@ class HttpSrv(object): | ||||
|             t = "looks like the httpserver threadpool died; please make an issue on github and tell me the story of how you pulled that off, thanks and dog bless\n" | ||||
|             self.log(self.name, t, 1) | ||||
|  | ||||
|         thr = threading.Thread( | ||||
|             target=self.thr_client, | ||||
|             args=(sck, addr), | ||||
|             name="httpconn-{}-{}".format(addr[0].split(".", 2)[-1][-6:], addr[1]), | ||||
|         Daemon( | ||||
|             self.thr_client, | ||||
|             "httpconn-{}-{}".format(addr[0].split(".", 2)[-1][-6:], addr[1]), | ||||
|             (sck, addr), | ||||
|         ) | ||||
|         thr.daemon = True | ||||
|         thr.start() | ||||
|  | ||||
|     def thr_poolw(self) -> None: | ||||
|         assert self.tp_q | ||||
| @@ -275,12 +396,12 @@ class HttpSrv(object): | ||||
|             except: | ||||
|                 pass | ||||
|  | ||||
|         thrs = [] | ||||
|         clients = list(self.clients) | ||||
|         for cli in clients: | ||||
|             try: | ||||
|                 cli.shutdown() | ||||
|             except: | ||||
|                 pass | ||||
|             t = threading.Thread(target=cli.shutdown) | ||||
|             thrs.append(t) | ||||
|             t.start() | ||||
|  | ||||
|         if self.tp_q: | ||||
|             self.stop_threads(self.tp_nthr) | ||||
| @@ -289,25 +410,27 @@ class HttpSrv(object): | ||||
|                 if self.tp_q.empty(): | ||||
|                     break | ||||
|  | ||||
|         for t in thrs: | ||||
|             t.join() | ||||
|  | ||||
|         self.log(self.name, "ok bye") | ||||
|  | ||||
|     def thr_client(self, sck: socket.socket, addr: tuple[str, int]) -> None: | ||||
|         """thread managing one tcp client""" | ||||
|         sck.settimeout(120) | ||||
|  | ||||
|         cli = HttpConn(sck, addr, self) | ||||
|         with self.mutex: | ||||
|             self.clients.add(cli) | ||||
|  | ||||
|         # print("{}\n".format(len(self.clients)), end="") | ||||
|         fno = sck.fileno() | ||||
|         try: | ||||
|             if self.args.log_conn: | ||||
|                 self.log("%s %s" % addr, "|%sC-crun" % ("-" * 4,), c="1;30") | ||||
|                 self.log("%s %s" % addr, "|%sC-crun" % ("-" * 4,), c="90") | ||||
|  | ||||
|             cli.run() | ||||
|  | ||||
|         except (OSError, socket.error) as ex: | ||||
|             if ex.errno not in [10038, 10054, 107, 57, 49, 9]: | ||||
|             if ex.errno not in E_SCK: | ||||
|                 self.log( | ||||
|                     "%s %s" % addr, | ||||
|                     "run({}): {}".format(fno, ex), | ||||
| @@ -317,32 +440,28 @@ class HttpSrv(object): | ||||
|         finally: | ||||
|             sck = cli.s | ||||
|             if self.args.log_conn: | ||||
|                 self.log("%s %s" % addr, "|%sC-cdone" % ("-" * 5,), c="1;30") | ||||
|                 self.log("%s %s" % addr, "|%sC-cdone" % ("-" * 5,), c="90") | ||||
|  | ||||
|             try: | ||||
|                 fno = sck.fileno() | ||||
|                 sck.shutdown(socket.SHUT_RDWR) | ||||
|                 sck.close() | ||||
|                 shut_socket(cli.log, sck) | ||||
|             except (OSError, socket.error) as ex: | ||||
|                 if not MACOS: | ||||
|                     self.log( | ||||
|                         "%s %s" % addr, | ||||
|                         "shut({}): {}".format(fno, ex), | ||||
|                         c="1;30", | ||||
|                         c="90", | ||||
|                     ) | ||||
|                 if ex.errno not in [10038, 10054, 107, 57, 49, 9]: | ||||
|                     # 10038 No longer considered a socket | ||||
|                     # 10054 Foribly closed by remote | ||||
|                     #   107 Transport endpoint not connected | ||||
|                     #    57 Socket is not connected | ||||
|                     #    49 Can't assign requested address (wifi down) | ||||
|                     #     9 Bad file descriptor | ||||
|                 if ex.errno not in E_SCK: | ||||
|                     raise | ||||
|             finally: | ||||
|                 with self.mutex: | ||||
|                     self.clients.remove(cli) | ||||
|                     self.ncli -= 1 | ||||
|  | ||||
|                 if cli.u2idx: | ||||
|                     self.put_u2idx(str(addr), cli.u2idx) | ||||
|  | ||||
|     def cachebuster(self) -> str: | ||||
|         if time.time() - self.cb_ts < 1: | ||||
|             return self.cb_v | ||||
| @@ -351,9 +470,9 @@ class HttpSrv(object): | ||||
|             if time.time() - self.cb_ts < 1: | ||||
|                 return self.cb_v | ||||
|  | ||||
|             v = E.t0 | ||||
|             v = self.E.t0 | ||||
|             try: | ||||
|                 with os.scandir(os.path.join(E.mod, "web")) as dh: | ||||
|                 with os.scandir(os.path.join(self.E.mod, "web")) as dh: | ||||
|                     for fh in dh: | ||||
|                         inf = fh.stat() | ||||
|                         v = max(v, inf.st_mtime) | ||||
| @@ -364,3 +483,31 @@ class HttpSrv(object): | ||||
|             self.cb_v = v.decode("ascii")[-4:] | ||||
|             self.cb_ts = time.time() | ||||
|             return self.cb_v | ||||
|  | ||||
|     def get_u2idx(self, ident: str) -> Optional[U2idx]: | ||||
|         utab = self.u2idx_free | ||||
|         for _ in range(100):  # 5/0.05 = 5sec | ||||
|             with self.mutex: | ||||
|                 if utab: | ||||
|                     if ident in utab: | ||||
|                         return utab.pop(ident) | ||||
|  | ||||
|                     return utab.pop(list(utab.keys())[0]) | ||||
|  | ||||
|                 if self.u2idx_n < CORES: | ||||
|                     self.u2idx_n += 1 | ||||
|                     return U2idx(self) | ||||
|  | ||||
|             time.sleep(0.05) | ||||
|             # not using conditional waits, on a hunch that | ||||
|             # average performance will be faster like this | ||||
|             # since most servers won't be fully saturated | ||||
|  | ||||
|         return None | ||||
|  | ||||
|     def put_u2idx(self, ident: str, u2idx: U2idx) -> None: | ||||
|         with self.mutex: | ||||
|             while ident in self.u2idx_free: | ||||
|                 ident += "a" | ||||
|  | ||||
|             self.u2idx_free[ident] = u2idx | ||||
|   | ||||
| @@ -6,13 +6,15 @@ import colorsys | ||||
| import hashlib | ||||
|  | ||||
| from .__init__ import PY2 | ||||
| from .th_srv import HAVE_PIL | ||||
| from .util import BytesIO | ||||
|  | ||||
|  | ||||
| class Ico(object): | ||||
|     def __init__(self, args: argparse.Namespace) -> None: | ||||
|         self.args = args | ||||
|  | ||||
|     def get(self, ext: str, as_thumb: bool) -> tuple[str, bytes]: | ||||
|     def get(self, ext: str, as_thumb: bool, chrome: bool) -> tuple[str, bytes]: | ||||
|         """placeholder to make thumbnails not break""" | ||||
|  | ||||
|         zb = hashlib.sha1(ext.encode("utf-8")).digest()[2:4] | ||||
| @@ -24,10 +26,44 @@ class Ico(object): | ||||
|         ci = [int(x * 255) for x in list(c1) + list(c2)] | ||||
|         c = "".join(["{:02x}".format(x) for x in ci]) | ||||
|  | ||||
|         w = 100 | ||||
|         h = 30 | ||||
|         if not self.args.th_no_crop and as_thumb: | ||||
|             w, h = self.args.th_size.split("x") | ||||
|             h = int(100 / (float(w) / float(h))) | ||||
|             sw, sh = self.args.th_size.split("x") | ||||
|             h = int(100 / (float(sw) / float(sh))) | ||||
|             w = 100 | ||||
|  | ||||
|         if chrome and as_thumb: | ||||
|             # cannot handle more than ~2000 unique SVGs | ||||
|             if HAVE_PIL: | ||||
|                 # svg: 3s, cache: 6s, this: 8s | ||||
|                 from PIL import Image, ImageDraw | ||||
|  | ||||
|                 h = int(64 * h / w) | ||||
|                 w = 64 | ||||
|                 img = Image.new("RGB", (w, h), "#" + c[:6]) | ||||
|                 pb = ImageDraw.Draw(img) | ||||
|                 tw, th = pb.textsize(ext) | ||||
|                 pb.text(((w - tw) // 2, (h - th) // 2), ext, fill="#" + c[6:]) | ||||
|                 img = img.resize((w * 3, h * 3), Image.NEAREST) | ||||
|  | ||||
|                 buf = BytesIO() | ||||
|                 img.save(buf, format="PNG", compress_level=1) | ||||
|                 return "image/png", buf.getvalue() | ||||
|  | ||||
|             elif False: | ||||
|                 # 48s, too slow | ||||
|                 import pyvips | ||||
|  | ||||
|                 h = int(192 * h / w) | ||||
|                 w = 192 | ||||
|                 img = pyvips.Image.text( | ||||
|                     ext, width=w, height=h, dpi=192, align=pyvips.Align.CENTRE | ||||
|                 ) | ||||
|                 img = img.ifthenelse(ci[3:], ci[:3], blend=True) | ||||
|                 # i = i.resize(3, kernel=pyvips.Kernel.NEAREST) | ||||
|                 buf = img.write_to_buffer(".png[compression=1]") | ||||
|                 return "image/png", buf | ||||
|  | ||||
|         svg = """\ | ||||
| <?xml version="1.0" encoding="UTF-8"?> | ||||
|   | ||||
							
								
								
									
										549
									
								
								copyparty/mdns.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										549
									
								
								copyparty/mdns.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,549 @@ | ||||
| # coding: utf-8 | ||||
| from __future__ import print_function, unicode_literals | ||||
|  | ||||
| import errno | ||||
| import random | ||||
| import select | ||||
| import socket | ||||
| import time | ||||
|  | ||||
| from ipaddress import IPv4Network, IPv6Network | ||||
|  | ||||
| from .__init__ import TYPE_CHECKING | ||||
| from .__init__ import unicode as U | ||||
| from .multicast import MC_Sck, MCast | ||||
| from .stolen.dnslib import AAAA | ||||
| from .stolen.dnslib import CLASS as DC | ||||
| from .stolen.dnslib import ( | ||||
|     NSEC, | ||||
|     PTR, | ||||
|     QTYPE, | ||||
|     RR, | ||||
|     SRV, | ||||
|     TXT, | ||||
|     A, | ||||
|     DNSHeader, | ||||
|     DNSQuestion, | ||||
|     DNSRecord, | ||||
| ) | ||||
| from .util import CachedSet, Daemon, Netdev, list_ips, min_ex | ||||
|  | ||||
| if TYPE_CHECKING: | ||||
|     from .svchub import SvcHub | ||||
|  | ||||
| if True:  # pylint: disable=using-constant-test | ||||
|     from typing import Any, Optional, Union | ||||
|  | ||||
|  | ||||
| MDNS4 = "224.0.0.251" | ||||
| MDNS6 = "ff02::fb" | ||||
|  | ||||
|  | ||||
| class MDNS_Sck(MC_Sck): | ||||
|     def __init__( | ||||
|         self, | ||||
|         sck: socket.socket, | ||||
|         nd: Netdev, | ||||
|         grp: str, | ||||
|         ip: str, | ||||
|         net: Union[IPv4Network, IPv6Network], | ||||
|     ): | ||||
|         super(MDNS_Sck, self).__init__(sck, nd, grp, ip, net) | ||||
|  | ||||
|         self.bp_probe = b"" | ||||
|         self.bp_ip = b"" | ||||
|         self.bp_svc = b"" | ||||
|         self.bp_bye = b"" | ||||
|  | ||||
|         self.last_tx = 0.0 | ||||
|         self.tx_ex = False | ||||
|  | ||||
|  | ||||
| class MDNS(MCast): | ||||
|     def __init__(self, hub: "SvcHub", ngen: int) -> None: | ||||
|         al = hub.args | ||||
|         grp4 = "" if al.zm6 else MDNS4 | ||||
|         grp6 = "" if al.zm4 else MDNS6 | ||||
|         super(MDNS, self).__init__( | ||||
|             hub, MDNS_Sck, al.zm_on, al.zm_off, grp4, grp6, 5353, hub.args.zmv | ||||
|         ) | ||||
|         self.srv: dict[socket.socket, MDNS_Sck] = {} | ||||
|         self.logsrc = "mDNS-{}".format(ngen) | ||||
|         self.ngen = ngen | ||||
|         self.ttl = 300 | ||||
|  | ||||
|         zs = self.args.name + ".local." | ||||
|         zs = zs.encode("ascii", "replace").decode("ascii", "replace") | ||||
|         self.hn = "-".join(x for x in zs.split("?") if x) or ( | ||||
|             "vault-{}".format(random.randint(1, 255)) | ||||
|         ) | ||||
|         self.lhn = self.hn.lower() | ||||
|  | ||||
|         # requester ip -> (response deadline, srv, body): | ||||
|         self.q: dict[str, tuple[float, MDNS_Sck, bytes]] = {} | ||||
|         self.rx4 = CachedSet(0.42)  # 3 probes @ 250..500..750 => 500ms span | ||||
|         self.rx6 = CachedSet(0.42) | ||||
|         self.svcs, self.sfqdns = self.build_svcs() | ||||
|         self.lsvcs = {k.lower(): v for k, v in self.svcs.items()} | ||||
|         self.lsfqdns = set([x.lower() for x in self.sfqdns]) | ||||
|  | ||||
|         self.probing = 0.0 | ||||
|         self.unsolicited: list[float] = []  # scheduled announces on all nics | ||||
|         self.defend: dict[MDNS_Sck, float] = {}  # server -> deadline | ||||
|  | ||||
|     def log(self, msg: str, c: Union[int, str] = 0) -> None: | ||||
|         self.log_func(self.logsrc, msg, c) | ||||
|  | ||||
|     def build_svcs(self) -> tuple[dict[str, dict[str, Any]], set[str]]: | ||||
|         zms = self.args.zms | ||||
|         http = {"port": 80 if 80 in self.args.p else self.args.p[0]} | ||||
|         https = {"port": 443 if 443 in self.args.p else self.args.p[0]} | ||||
|         webdav = http.copy() | ||||
|         webdavs = https.copy() | ||||
|         webdav["u"] = webdavs["u"] = "u"  # KDE requires username | ||||
|         ftp = {"port": (self.args.ftp if "f" in zms else self.args.ftps)} | ||||
|         smb = {"port": self.args.smb_port} | ||||
|  | ||||
|         # some gvfs require path | ||||
|         zs = self.args.zm_ld or "/" | ||||
|         if zs: | ||||
|             webdav["path"] = zs | ||||
|             webdavs["path"] = zs | ||||
|  | ||||
|         if self.args.zm_lh: | ||||
|             http["path"] = self.args.zm_lh | ||||
|             https["path"] = self.args.zm_lh | ||||
|  | ||||
|         if self.args.zm_lf: | ||||
|             ftp["path"] = self.args.zm_lf | ||||
|  | ||||
|         if self.args.zm_ls: | ||||
|             smb["path"] = self.args.zm_ls | ||||
|  | ||||
|         svcs: dict[str, dict[str, Any]] = {} | ||||
|  | ||||
|         if "d" in zms: | ||||
|             svcs["_webdav._tcp.local."] = webdav | ||||
|  | ||||
|         if "D" in zms: | ||||
|             svcs["_webdavs._tcp.local."] = webdavs | ||||
|  | ||||
|         if "h" in zms: | ||||
|             svcs["_http._tcp.local."] = http | ||||
|  | ||||
|         if "H" in zms: | ||||
|             svcs["_https._tcp.local."] = https | ||||
|  | ||||
|         if "f" in zms.lower(): | ||||
|             svcs["_ftp._tcp.local."] = ftp | ||||
|  | ||||
|         if "s" in zms.lower(): | ||||
|             svcs["_smb._tcp.local."] = smb | ||||
|  | ||||
|         sfqdns: set[str] = set() | ||||
|         for k, v in svcs.items(): | ||||
|             name = "{}-c-{}".format(self.args.name, k.split(".")[0][1:]) | ||||
|             v["name"] = name | ||||
|             sfqdns.add("{}.{}".format(name, k)) | ||||
|  | ||||
|         return svcs, sfqdns | ||||
|  | ||||
|     def build_replies(self) -> None: | ||||
|         for srv in self.srv.values(): | ||||
|             probe = DNSRecord(DNSHeader(0, 0), q=DNSQuestion(self.hn, QTYPE.ANY)) | ||||
|             areply = DNSRecord(DNSHeader(0, 0x8400)) | ||||
|             sreply = DNSRecord(DNSHeader(0, 0x8400)) | ||||
|             bye = DNSRecord(DNSHeader(0, 0x8400)) | ||||
|  | ||||
|             have4 = have6 = False | ||||
|             for s2 in self.srv.values(): | ||||
|                 if srv.idx != s2.idx: | ||||
|                     continue | ||||
|  | ||||
|                 if s2.v6: | ||||
|                     have6 = True | ||||
|                 else: | ||||
|                     have4 = True | ||||
|  | ||||
|             for ip in srv.ips: | ||||
|                 if ":" in ip: | ||||
|                     qt = QTYPE.AAAA | ||||
|                     ar = {"rclass": DC.F_IN, "rdata": AAAA(ip)} | ||||
|                 else: | ||||
|                     qt = QTYPE.A | ||||
|                     ar = {"rclass": DC.F_IN, "rdata": A(ip)} | ||||
|  | ||||
|                 r0 = RR(self.hn, qt, ttl=0, **ar) | ||||
|                 r120 = RR(self.hn, qt, ttl=120, **ar) | ||||
|                 # rfc-10: | ||||
|                 #   SHOULD rr ttl 120sec for A/AAAA/SRV | ||||
|                 #   (and recommend 75min for all others) | ||||
|  | ||||
|                 probe.add_auth(r120) | ||||
|                 areply.add_answer(r120) | ||||
|                 sreply.add_answer(r120) | ||||
|                 bye.add_answer(r0) | ||||
|  | ||||
|             for sclass, props in self.svcs.items(): | ||||
|                 sname = props["name"] | ||||
|                 sport = props["port"] | ||||
|                 sfqdn = sname + "." + sclass | ||||
|  | ||||
|                 k = "_services._dns-sd._udp.local." | ||||
|                 r = RR(k, QTYPE.PTR, DC.IN, 4500, PTR(sclass)) | ||||
|                 sreply.add_answer(r) | ||||
|  | ||||
|                 r = RR(sclass, QTYPE.PTR, DC.IN, 4500, PTR(sfqdn)) | ||||
|                 sreply.add_answer(r) | ||||
|  | ||||
|                 r = RR(sfqdn, QTYPE.SRV, DC.F_IN, 120, SRV(0, 0, sport, self.hn)) | ||||
|                 sreply.add_answer(r) | ||||
|                 areply.add_answer(r) | ||||
|  | ||||
|                 r = RR(sfqdn, QTYPE.SRV, DC.F_IN, 0, SRV(0, 0, sport, self.hn)) | ||||
|                 bye.add_answer(r) | ||||
|  | ||||
|                 txts = [] | ||||
|                 for k in ("u", "path"): | ||||
|                     if k not in props: | ||||
|                         continue | ||||
|  | ||||
|                     zb = "{}={}".format(k, props[k]).encode("utf-8") | ||||
|                     if len(zb) > 255: | ||||
|                         t = "value too long for mdns: [{}]" | ||||
|                         raise Exception(t.format(props[k])) | ||||
|  | ||||
|                     txts.append(zb) | ||||
|  | ||||
|                 # gvfs really wants txt even if they're empty | ||||
|                 r = RR(sfqdn, QTYPE.TXT, DC.F_IN, 4500, TXT(txts)) | ||||
|                 sreply.add_answer(r) | ||||
|  | ||||
|             if not (have4 and have6) and not self.args.zm_noneg: | ||||
|                 ns = NSEC(self.hn, ["AAAA" if have6 else "A"]) | ||||
|                 r = RR(self.hn, QTYPE.NSEC, DC.F_IN, 120, ns) | ||||
|                 areply.add_ar(r) | ||||
|                 if len(sreply.pack()) < 1400: | ||||
|                     sreply.add_ar(r) | ||||
|  | ||||
|             srv.bp_probe = probe.pack() | ||||
|             srv.bp_ip = areply.pack() | ||||
|             srv.bp_svc = sreply.pack() | ||||
|             srv.bp_bye = bye.pack() | ||||
|  | ||||
|             # since all replies are small enough to fit in one packet, | ||||
|             # always send full replies rather than just a/aaaa records | ||||
|             srv.bp_ip = srv.bp_svc | ||||
|  | ||||
|     def send_probes(self) -> None: | ||||
|         slp = random.random() * 0.25 | ||||
|         for _ in range(3): | ||||
|             time.sleep(slp) | ||||
|             slp = 0.25 | ||||
|             if not self.running: | ||||
|                 break | ||||
|  | ||||
|             if self.args.zmv: | ||||
|                 self.log("sending hostname probe...") | ||||
|  | ||||
|             # ipv4: need to probe each ip (each server) | ||||
|             # ipv6: only need to probe each set of looped nics | ||||
|             probed6: set[str] = set() | ||||
|             for srv in self.srv.values(): | ||||
|                 if srv.ip in probed6: | ||||
|                     continue | ||||
|  | ||||
|                 try: | ||||
|                     srv.sck.sendto(srv.bp_probe, (srv.grp, 5353)) | ||||
|                     if srv.v6: | ||||
|                         for ip in srv.ips: | ||||
|                             probed6.add(ip) | ||||
|                 except Exception as ex: | ||||
|                     self.log("sendto failed: {} ({})".format(srv.ip, ex), "90") | ||||
|  | ||||
|     def run(self) -> None: | ||||
|         try: | ||||
|             bound = self.create_servers() | ||||
|         except: | ||||
|             t = "no server IP matches the mdns config\n{}" | ||||
|             self.log(t.format(min_ex()), 1) | ||||
|             bound = [] | ||||
|  | ||||
|         if not bound: | ||||
|             self.log("failed to announce copyparty services on the network", 3) | ||||
|             return | ||||
|  | ||||
|         self.build_replies() | ||||
|         Daemon(self.send_probes) | ||||
|         zf = time.time() + 2 | ||||
|         self.probing = zf  # cant unicast so give everyone an extra sec | ||||
|         self.unsolicited = [zf, zf + 1, zf + 3, zf + 7]  # rfc-8.3 | ||||
|  | ||||
|         try: | ||||
|             self.run2() | ||||
|         except OSError as ex: | ||||
|             if ex.errno != errno.EBADF: | ||||
|                 raise | ||||
|  | ||||
|             self.log("stopping due to {}".format(ex), "90") | ||||
|  | ||||
|         self.log("stopped", 2) | ||||
|  | ||||
|     def run2(self) -> None: | ||||
|         last_hop = time.time() | ||||
|         ihop = self.args.mc_hop | ||||
|         while self.running: | ||||
|             timeout = ( | ||||
|                 0.02 + random.random() * 0.07 | ||||
|                 if self.probing or self.q or self.defend or self.unsolicited | ||||
|                 else (last_hop + ihop if ihop else 180) | ||||
|             ) | ||||
|             rdy = select.select(self.srv, [], [], timeout) | ||||
|             rx: list[socket.socket] = rdy[0]  # type: ignore | ||||
|             self.rx4.cln() | ||||
|             self.rx6.cln() | ||||
|             buf = b"" | ||||
|             addr = ("0", 0) | ||||
|             for sck in rx: | ||||
|                 try: | ||||
|                     buf, addr = sck.recvfrom(4096) | ||||
|                     self.eat(buf, addr, sck) | ||||
|                 except: | ||||
|                     if not self.running: | ||||
|                         self.log("stopped", 2) | ||||
|                         return | ||||
|  | ||||
|                     t = "{} {} \033[33m|{}| {}\n{}".format( | ||||
|                         self.srv[sck].name, addr, len(buf), repr(buf)[2:-1], min_ex() | ||||
|                     ) | ||||
|                     self.log(t, 6) | ||||
|  | ||||
|             if not self.probing: | ||||
|                 self.process() | ||||
|                 continue | ||||
|  | ||||
|             if self.probing < time.time(): | ||||
|                 t = "probe ok; announcing [{}]" | ||||
|                 self.log(t.format(self.hn[:-1]), 2) | ||||
|                 self.probing = 0 | ||||
|  | ||||
|     def stop(self, panic=False) -> None: | ||||
|         self.running = False | ||||
|         for srv in self.srv.values(): | ||||
|             try: | ||||
|                 if panic: | ||||
|                     srv.sck.close() | ||||
|                 else: | ||||
|                     srv.sck.sendto(srv.bp_bye, (srv.grp, 5353)) | ||||
|             except: | ||||
|                 pass | ||||
|  | ||||
|         self.srv = {} | ||||
|  | ||||
|     def eat(self, buf: bytes, addr: tuple[str, int], sck: socket.socket) -> None: | ||||
|         cip = addr[0] | ||||
|         v6 = ":" in cip | ||||
|         if (cip.startswith("169.254") and not self.ll_ok) or ( | ||||
|             v6 and not cip.startswith("fe80") | ||||
|         ): | ||||
|             return | ||||
|  | ||||
|         cache = self.rx6 if v6 else self.rx4 | ||||
|         if buf in cache.c: | ||||
|             return | ||||
|  | ||||
|         srv: Optional[MDNS_Sck] = self.srv[sck] if v6 else self.map_client(cip)  # type: ignore | ||||
|         if not srv: | ||||
|             return | ||||
|  | ||||
|         cache.add(buf) | ||||
|         now = time.time() | ||||
|  | ||||
|         if self.args.zmv and cip != srv.ip and cip not in srv.ips: | ||||
|             t = "{} [{}] \033[36m{} \033[0m|{}|" | ||||
|             self.log(t.format(srv.name, srv.ip, cip, len(buf)), "90") | ||||
|  | ||||
|         p = DNSRecord.parse(buf) | ||||
|         if self.args.zmvv: | ||||
|             self.log(str(p)) | ||||
|  | ||||
|         # check for incoming probes for our hostname | ||||
|         cips = [U(x.rdata) for x in p.auth if U(x.rname).lower() == self.lhn] | ||||
|         if cips and self.sips.isdisjoint(cips): | ||||
|             if not [x for x in cips if x not in ("::1", "127.0.0.1")]: | ||||
|                 # avahi broadcasting 127.0.0.1-only packets | ||||
|                 return | ||||
|  | ||||
|             self.log("someone trying to steal our hostname: {}".format(cips), 3) | ||||
|             # immediately unicast | ||||
|             if not self.probing: | ||||
|                 srv.sck.sendto(srv.bp_ip, (cip, 5353)) | ||||
|  | ||||
|             # and schedule multicast | ||||
|             self.defend[srv] = self.defend.get(srv, now + 0.1) | ||||
|             return | ||||
|  | ||||
|         # check for someone rejecting our probe / hijacking our hostname | ||||
|         cips = [ | ||||
|             U(x.rdata) | ||||
|             for x in p.rr | ||||
|             if U(x.rname).lower() == self.lhn and x.rclass == DC.F_IN | ||||
|         ] | ||||
|         if cips and self.sips.isdisjoint(cips): | ||||
|             if not [x for x in cips if x not in ("::1", "127.0.0.1")]: | ||||
|                 # avahi broadcasting 127.0.0.1-only packets | ||||
|                 return | ||||
|  | ||||
|             # check if we've been given additional IPs | ||||
|             for ip in list_ips(): | ||||
|                 if ip in cips: | ||||
|                     self.sips.add(ip) | ||||
|  | ||||
|             if not self.sips.isdisjoint(cips): | ||||
|                 return | ||||
|  | ||||
|             t = "mdns zeroconf: " | ||||
|             if self.probing: | ||||
|                 t += "Cannot start; hostname '{}' is occupied" | ||||
|             else: | ||||
|                 t += "Emergency stop; hostname '{}' got stolen" | ||||
|  | ||||
|             t += " on {}! Use --name to set another hostname.\n\nName taken by {}\n\nYour IPs: {}\n" | ||||
|             self.log(t.format(self.args.name, srv.name, cips, list(self.sips)), 1) | ||||
|             self.stop(True) | ||||
|             return | ||||
|  | ||||
|         # then rfc-6.7; dns pretending to be mdns (android...) | ||||
|         if p.header.id or addr[1] != 5353: | ||||
|             rsp: Optional[DNSRecord] = None | ||||
|             for r in p.questions: | ||||
|                 try: | ||||
|                     lhn = U(r.qname).lower() | ||||
|                 except: | ||||
|                     self.log("invalid question: {}".format(r)) | ||||
|                     continue | ||||
|  | ||||
|                 if lhn != self.lhn: | ||||
|                     continue | ||||
|  | ||||
|                 if p.header.id and r.qtype in (QTYPE.A, QTYPE.AAAA): | ||||
|                     rsp = rsp or DNSRecord(DNSHeader(p.header.id, 0x8400)) | ||||
|                     rsp.add_question(r) | ||||
|                     for ip in srv.ips: | ||||
|                         qt = r.qtype | ||||
|                         v6 = ":" in ip | ||||
|                         if v6 == (qt == QTYPE.AAAA): | ||||
|                             rd = AAAA(ip) if v6 else A(ip) | ||||
|                             rr = RR(self.hn, qt, DC.IN, 10, rd) | ||||
|                             rsp.add_answer(rr) | ||||
|             if rsp: | ||||
|                 srv.sck.sendto(rsp.pack(), addr[:2]) | ||||
|                 # but don't return in case it's a differently broken client | ||||
|  | ||||
|         # then a/aaaa records | ||||
|         for r in p.questions: | ||||
|             try: | ||||
|                 lhn = U(r.qname).lower() | ||||
|             except: | ||||
|                 self.log("invalid question: {}".format(r)) | ||||
|                 continue | ||||
|  | ||||
|             if lhn != self.lhn: | ||||
|                 continue | ||||
|  | ||||
|             # gvfs keeps repeating itself | ||||
|             found = False | ||||
|             unicast = False | ||||
|             for rr in p.rr: | ||||
|                 try: | ||||
|                     rname = U(rr.rname).lower() | ||||
|                 except: | ||||
|                     self.log("invalid rr: {}".format(rr)) | ||||
|                     continue | ||||
|  | ||||
|                 if rname == self.lhn: | ||||
|                     if rr.ttl > 60: | ||||
|                         found = True | ||||
|                     if rr.rclass == DC.F_IN: | ||||
|                         unicast = True | ||||
|  | ||||
|             if unicast: | ||||
|                 # spec-compliant mDNS-over-unicast | ||||
|                 srv.sck.sendto(srv.bp_ip, (cip, 5353)) | ||||
|             elif addr[1] != 5353: | ||||
|                 # just in case some clients use (and want us to use) invalid ports | ||||
|                 srv.sck.sendto(srv.bp_ip, addr[:2]) | ||||
|  | ||||
|             if not found: | ||||
|                 self.q[cip] = (0, srv, srv.bp_ip) | ||||
|                 return | ||||
|  | ||||
|         deadline = now + (0.5 if p.header.tc else 0.02)  # rfc-7.2 | ||||
|  | ||||
|         # and service queries | ||||
|         for r in p.questions: | ||||
|             if not r or not r.qname: | ||||
|                 continue | ||||
|  | ||||
|             qname = U(r.qname).lower() | ||||
|             if qname in self.lsvcs or qname == "_services._dns-sd._udp.local.": | ||||
|                 self.q[cip] = (deadline, srv, srv.bp_svc) | ||||
|                 break | ||||
|         # heed rfc-7.1 if there was an announce in the past 12sec | ||||
|         # (workaround gvfs race-condition where it occasionally | ||||
|         #  doesn't read/decode the full response...) | ||||
|         if now < srv.last_tx + 12: | ||||
|             for rr in p.rr: | ||||
|                 if not rr.rdata: | ||||
|                     continue | ||||
|  | ||||
|                 rdata = U(rr.rdata).lower() | ||||
|                 if rdata in self.lsfqdns: | ||||
|                     if rr.ttl > 2250: | ||||
|                         self.q.pop(cip, None) | ||||
|                     break | ||||
|  | ||||
|     def process(self) -> None: | ||||
|         tx = set() | ||||
|         now = time.time() | ||||
|         cooldown = 0.9  # rfc-6: 1 | ||||
|         if self.unsolicited and self.unsolicited[0] < now: | ||||
|             self.unsolicited.pop(0) | ||||
|             cooldown = 0.1 | ||||
|             for srv in self.srv.values(): | ||||
|                 tx.add(srv) | ||||
|  | ||||
|         for srv, deadline in list(self.defend.items()): | ||||
|             if now < deadline: | ||||
|                 continue | ||||
|  | ||||
|             if self._tx(srv, srv.bp_ip, 0.02):  # rfc-6: 0.25 | ||||
|                 self.defend.pop(srv) | ||||
|  | ||||
|         for cip, (deadline, srv, msg) in list(self.q.items()): | ||||
|             if now < deadline: | ||||
|                 continue | ||||
|  | ||||
|             self.q.pop(cip) | ||||
|             self._tx(srv, msg, cooldown) | ||||
|  | ||||
|         for srv in tx: | ||||
|             self._tx(srv, srv.bp_svc, cooldown) | ||||
|  | ||||
|     def _tx(self, srv: MDNS_Sck, msg: bytes, cooldown: float) -> bool: | ||||
|         now = time.time() | ||||
|         if now < srv.last_tx + cooldown: | ||||
|             return False | ||||
|  | ||||
|         try: | ||||
|             srv.sck.sendto(msg, (srv.grp, 5353)) | ||||
|             srv.last_tx = now | ||||
|         except Exception as ex: | ||||
|             if srv.tx_ex: | ||||
|                 return True | ||||
|  | ||||
|             srv.tx_ex = True | ||||
|             t = "tx({},|{}|,{}): {}" | ||||
|             self.log(t.format(srv.ip, len(msg), cooldown, ex), 3) | ||||
|  | ||||
|         return True | ||||
| @@ -8,29 +8,37 @@ import shutil | ||||
| import subprocess as sp | ||||
| import sys | ||||
|  | ||||
| from .__init__ import PY2, WINDOWS, unicode | ||||
| from .__init__ import EXE, PY2, WINDOWS, E, unicode | ||||
| from .bos import bos | ||||
| from .util import REKOBO_LKEY, fsenc, min_ex, retchk, runcmd, uncyg | ||||
| from .util import ( | ||||
|     FFMPEG_URL, | ||||
|     REKOBO_LKEY, | ||||
|     fsenc, | ||||
|     min_ex, | ||||
|     pybin, | ||||
|     retchk, | ||||
|     runcmd, | ||||
|     sfsenc, | ||||
|     uncyg, | ||||
| ) | ||||
|  | ||||
| try: | ||||
| if True:  # pylint: disable=using-constant-test | ||||
|     from typing import Any, Union | ||||
|  | ||||
|     from .util import RootLogger | ||||
| except: | ||||
|     pass | ||||
|  | ||||
|  | ||||
| def have_ff(cmd: str) -> bool: | ||||
| def have_ff(scmd: str) -> bool: | ||||
|     if PY2: | ||||
|         print("# checking {}".format(cmd)) | ||||
|         cmd = (cmd + " -version").encode("ascii").split(b" ") | ||||
|         print("# checking {}".format(scmd)) | ||||
|         acmd = (scmd + " -version").encode("ascii").split(b" ") | ||||
|         try: | ||||
|             sp.Popen(cmd, stdout=sp.PIPE, stderr=sp.PIPE).communicate() | ||||
|             sp.Popen(acmd, stdout=sp.PIPE, stderr=sp.PIPE).communicate() | ||||
|             return True | ||||
|         except: | ||||
|             return False | ||||
|     else: | ||||
|         return bool(shutil.which(cmd)) | ||||
|         return bool(shutil.which(scmd)) | ||||
|  | ||||
|  | ||||
| HAVE_FFMPEG = have_ff("ffmpeg") | ||||
| @@ -42,9 +50,10 @@ class MParser(object): | ||||
|         self.tag, args = cmdline.split("=", 1) | ||||
|         self.tags = self.tag.split(",") | ||||
|  | ||||
|         self.timeout = 30 | ||||
|         self.timeout = 60 | ||||
|         self.force = False | ||||
|         self.kill = "t"  # tree; all children recursively | ||||
|         self.capture = 3  # outputs to consume | ||||
|         self.audio = "y" | ||||
|         self.pri = 0  # priority; higher = later | ||||
|         self.ext = [] | ||||
| @@ -72,6 +81,10 @@ class MParser(object): | ||||
|                 self.kill = arg[1:]  # [t]ree [m]ain [n]one | ||||
|                 continue | ||||
|  | ||||
|             if arg.startswith("c"): | ||||
|                 self.capture = int(arg[1:])  # 0=none 1=stdout 2=stderr 3=both | ||||
|                 continue | ||||
|  | ||||
|             if arg == "f": | ||||
|                 self.force = True | ||||
|                 continue | ||||
| @@ -92,7 +105,7 @@ class MParser(object): | ||||
|  | ||||
|  | ||||
| def ffprobe( | ||||
|     abspath: str, timeout: int = 10 | ||||
|     abspath: str, timeout: int = 60 | ||||
| ) -> tuple[dict[str, tuple[int, Any]], dict[str, list[Any]]]: | ||||
|     cmd = [ | ||||
|         b"ffprobe", | ||||
| @@ -256,19 +269,17 @@ class MTag(object): | ||||
|         self.args = args | ||||
|         self.usable = True | ||||
|         self.prefer_mt = not args.no_mtag_ff | ||||
|         self.backend = "ffprobe" if args.no_mutagen else "mutagen" | ||||
|         self.can_ffprobe = ( | ||||
|             HAVE_FFPROBE | ||||
|             and not args.no_mtag_ff | ||||
|             and (not WINDOWS or sys.version_info >= (3, 8)) | ||||
|         self.backend = ( | ||||
|             "ffprobe" if args.no_mutagen or (HAVE_FFPROBE and EXE) else "mutagen" | ||||
|         ) | ||||
|         self.can_ffprobe = HAVE_FFPROBE and not args.no_mtag_ff | ||||
|         mappings = args.mtm | ||||
|         or_ffprobe = " or FFprobe" | ||||
|  | ||||
|         if self.backend == "mutagen": | ||||
|             self.get = self.get_mutagen | ||||
|             try: | ||||
|                 import mutagen  # noqa: F401  # pylint: disable=unused-import,import-outside-toplevel | ||||
|                 from mutagen import version  # noqa: F401 | ||||
|             except: | ||||
|                 self.log("could not load Mutagen, trying FFprobe instead", c=3) | ||||
|                 self.backend = "ffprobe" | ||||
| @@ -285,15 +296,15 @@ class MTag(object): | ||||
|                 msg = "found FFprobe but it was disabled by --no-mtag-ff" | ||||
|                 self.log(msg, c=3) | ||||
|  | ||||
|             elif WINDOWS and sys.version_info < (3, 8): | ||||
|                 or_ffprobe = " or python >= 3.8" | ||||
|                 msg = "found FFprobe but your python is too old; need 3.8 or newer" | ||||
|                 self.log(msg, c=1) | ||||
|  | ||||
|         if not self.usable: | ||||
|             if EXE: | ||||
|                 t = "copyparty.exe cannot use mutagen; need ffprobe.exe to read media tags: " | ||||
|                 self.log(t + FFMPEG_URL) | ||||
|                 return | ||||
|  | ||||
|             msg = "need Mutagen{} to read media tags so please run this:\n{}{} -m pip install --user mutagen\n" | ||||
|             pybin = os.path.basename(sys.executable) | ||||
|             self.log(msg.format(or_ffprobe, " " * 37, pybin), c=1) | ||||
|             pyname = os.path.basename(pybin) | ||||
|             self.log(msg.format(or_ffprobe, " " * 37, pyname), c=1) | ||||
|             return | ||||
|  | ||||
|         # https://picard-docs.musicbrainz.org/downloads/MusicBrainz_Picard_Tag_Map.html | ||||
| @@ -313,7 +324,6 @@ class MTag(object): | ||||
|                 "tope", | ||||
|             ], | ||||
|             "title": ["title", "tit2", "\u00a9nam"], | ||||
|             "comment": ["comment"], | ||||
|             "circle": [ | ||||
|                 "album-artist", | ||||
|                 "tpe2", | ||||
| @@ -386,20 +396,26 @@ class MTag(object): | ||||
|                 parser_output[alias] = (priority, tv[0]) | ||||
|  | ||||
|         # take first value (lowest priority / most preferred) | ||||
|         ret = {sk: unicode(tv[1]).strip() for sk, tv in parser_output.items()} | ||||
|         ret: dict[str, Union[str, float]] = { | ||||
|             sk: unicode(tv[1]).strip() for sk, tv in parser_output.items() | ||||
|         } | ||||
|  | ||||
|         # track 3/7 => track 3 | ||||
|         for sk, tv in ret.items(): | ||||
|         for sk, zv in ret.items(): | ||||
|             if sk[0] == ".": | ||||
|                 sv = str(tv).split("/")[0].strip().lstrip("0") | ||||
|                 sv = str(zv).split("/")[0].strip().lstrip("0") | ||||
|                 ret[sk] = sv or 0 | ||||
|  | ||||
|         # normalize key notation to rkeobo | ||||
|         okey = ret.get("key") | ||||
|         if okey: | ||||
|             key = okey.replace(" ", "").replace("maj", "").replace("min", "m") | ||||
|             key = str(okey).replace(" ", "").replace("maj", "").replace("min", "m") | ||||
|             ret["key"] = REKOBO_LKEY.get(key.lower(), okey) | ||||
|  | ||||
|         if self.args.mtag_vv: | ||||
|             zl = " ".join("\033[36m{} \033[33m{}".format(k, v) for k, v in ret.items()) | ||||
|             self.log("norm: {}\033[0m".format(zl), "90") | ||||
|  | ||||
|         return ret | ||||
|  | ||||
|     def compare(self, abspath: str) -> dict[str, Union[str, float]]: | ||||
| @@ -446,13 +462,21 @@ class MTag(object): | ||||
|         if not bos.path.isfile(abspath): | ||||
|             return {} | ||||
|  | ||||
|         import mutagen | ||||
|         from mutagen import File | ||||
|  | ||||
|         try: | ||||
|             md = mutagen.File(fsenc(abspath), easy=True) | ||||
|             md = File(fsenc(abspath), easy=True) | ||||
|             assert md | ||||
|             if self.args.mtag_vv: | ||||
|                 for zd in (md.info.__dict__, dict(md.tags)): | ||||
|                     zl = ["\033[36m{} \033[33m{}".format(k, v) for k, v in zd.items()] | ||||
|                     self.log("mutagen: {}\033[0m".format(" ".join(zl)), "90") | ||||
|             if not md.info.length and not md.info.codec: | ||||
|                 raise Exception() | ||||
|         except: | ||||
|         except Exception as ex: | ||||
|             if self.args.mtag_v: | ||||
|                 self.log("mutagen-err [{}] @ [{}]".format(ex, abspath), "90") | ||||
|  | ||||
|             return self.get_ffprobe(abspath) if self.can_ffprobe else {} | ||||
|  | ||||
|         sz = bos.path.getsize(abspath) | ||||
| @@ -498,7 +522,13 @@ class MTag(object): | ||||
|         if not bos.path.isfile(abspath): | ||||
|             return {} | ||||
|  | ||||
|         ret, md = ffprobe(abspath) | ||||
|         ret, md = ffprobe(abspath, self.args.mtag_to) | ||||
|  | ||||
|         if self.args.mtag_vv: | ||||
|             for zd in (ret, dict(md)): | ||||
|                 zl = ["\033[36m{} \033[33m{}".format(k, v) for k, v in zd.items()] | ||||
|                 self.log("ffprobe: {}\033[0m".format(" ".join(zl)), "90") | ||||
|  | ||||
|         return self.normalize_tags(ret, md) | ||||
|  | ||||
|     def get_bin( | ||||
| @@ -507,20 +537,32 @@ class MTag(object): | ||||
|         if not bos.path.isfile(abspath): | ||||
|             return {} | ||||
|  | ||||
|         pypath = os.path.abspath(os.path.dirname(os.path.dirname(__file__))) | ||||
|         zsl = [str(pypath)] + [str(x) for x in sys.path if x] | ||||
|         pypath = str(os.pathsep.join(zsl)) | ||||
|         env = os.environ.copy() | ||||
|         env["PYTHONPATH"] = pypath | ||||
|         try: | ||||
|             if EXE: | ||||
|                 raise Exception() | ||||
|  | ||||
|             pypath = os.path.abspath(os.path.dirname(os.path.dirname(__file__))) | ||||
|             zsl = [str(pypath)] + [str(x) for x in sys.path if x] | ||||
|             pypath = str(os.pathsep.join(zsl)) | ||||
|             env["PYTHONPATH"] = pypath | ||||
|         except: | ||||
|             if not E.ox and not EXE: | ||||
|                 raise | ||||
|  | ||||
|         ret: dict[str, Any] = {} | ||||
|         for tagname, parser in sorted(parsers.items(), key=lambda x: (x[1].pri, x[0])): | ||||
|             try: | ||||
|                 cmd = [parser.bin, abspath] | ||||
|                 if parser.bin.endswith(".py"): | ||||
|                     cmd = [sys.executable] + cmd | ||||
|                     cmd = [pybin] + cmd | ||||
|  | ||||
|                 args = {"env": env, "timeout": parser.timeout, "kill": parser.kill} | ||||
|                 args = { | ||||
|                     "env": env, | ||||
|                     "timeout": parser.timeout, | ||||
|                     "kill": parser.kill, | ||||
|                     "capture": parser.capture, | ||||
|                 } | ||||
|  | ||||
|                 if parser.pri: | ||||
|                     zd = oth_tags.copy() | ||||
| @@ -532,7 +574,7 @@ class MTag(object): | ||||
|                 else: | ||||
|                     cmd = ["nice"] + cmd | ||||
|  | ||||
|                 bcmd = [fsenc(x) for x in cmd] | ||||
|                 bcmd = [sfsenc(x) for x in cmd[:-1]] + [fsenc(cmd[-1])] | ||||
|                 rc, v, err = runcmd(bcmd, **args)  # type: ignore | ||||
|                 retchk(rc, bcmd, err, self.log, 5, self.args.mtag_v) | ||||
|                 v = v.strip() | ||||
|   | ||||
							
								
								
									
										370
									
								
								copyparty/multicast.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										370
									
								
								copyparty/multicast.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,370 @@ | ||||
| # coding: utf-8 | ||||
| from __future__ import print_function, unicode_literals | ||||
|  | ||||
| import socket | ||||
| import time | ||||
|  | ||||
| import ipaddress | ||||
| from ipaddress import ( | ||||
|     IPv4Address, | ||||
|     IPv4Network, | ||||
|     IPv6Address, | ||||
|     IPv6Network, | ||||
|     ip_address, | ||||
|     ip_network, | ||||
| ) | ||||
|  | ||||
| from .__init__ import MACOS, TYPE_CHECKING | ||||
| from .util import Netdev, find_prefix, min_ex, spack | ||||
|  | ||||
| if TYPE_CHECKING: | ||||
|     from .svchub import SvcHub | ||||
|  | ||||
| if True:  # pylint: disable=using-constant-test | ||||
|     from typing import Optional, Union | ||||
|  | ||||
| if not hasattr(socket, "IPPROTO_IPV6"): | ||||
|     setattr(socket, "IPPROTO_IPV6", 41) | ||||
|  | ||||
|  | ||||
| class NoIPs(Exception): | ||||
|     pass | ||||
|  | ||||
|  | ||||
| class MC_Sck(object): | ||||
|     """there is one socket for each server ip""" | ||||
|  | ||||
|     def __init__( | ||||
|         self, | ||||
|         sck: socket.socket, | ||||
|         nd: Netdev, | ||||
|         grp: str, | ||||
|         ip: str, | ||||
|         net: Union[IPv4Network, IPv6Network], | ||||
|     ): | ||||
|         self.sck = sck | ||||
|         self.idx = nd.idx | ||||
|         self.name = nd.name | ||||
|         self.grp = grp | ||||
|         self.mreq = b"" | ||||
|         self.ip = ip | ||||
|         self.net = net | ||||
|         self.ips = {ip: net} | ||||
|         self.v6 = ":" in ip | ||||
|         self.have4 = ":" not in ip | ||||
|         self.have6 = ":" in ip | ||||
|  | ||||
|  | ||||
| class MCast(object): | ||||
|     def __init__( | ||||
|         self, | ||||
|         hub: "SvcHub", | ||||
|         Srv: type[MC_Sck], | ||||
|         on: list[str], | ||||
|         off: list[str], | ||||
|         mc_grp_4: str, | ||||
|         mc_grp_6: str, | ||||
|         port: int, | ||||
|         vinit: bool, | ||||
|     ) -> None: | ||||
|         """disable ipv%d by setting mc_grp_%d empty""" | ||||
|         self.hub = hub | ||||
|         self.Srv = Srv | ||||
|         self.args = hub.args | ||||
|         self.asrv = hub.asrv | ||||
|         self.log_func = hub.log | ||||
|         self.on = on | ||||
|         self.off = off | ||||
|         self.grp4 = mc_grp_4 | ||||
|         self.grp6 = mc_grp_6 | ||||
|         self.port = port | ||||
|         self.vinit = vinit | ||||
|  | ||||
|         self.srv: dict[socket.socket, MC_Sck] = {}  # listening sockets | ||||
|         self.sips: set[str] = set()  # all listening ips (including failed attempts) | ||||
|         self.ll_ok: set[str] = set()  # fallback linklocal IPv4 and IPv6 addresses | ||||
|         self.b2srv: dict[bytes, MC_Sck] = {}  # binary-ip -> server socket | ||||
|         self.b4: list[bytes] = []  # sorted list of binary-ips | ||||
|         self.b6: list[bytes] = []  # sorted list of binary-ips | ||||
|         self.cscache: dict[str, Optional[MC_Sck]] = {}  # client ip -> server cache | ||||
|  | ||||
|         self.running = True | ||||
|  | ||||
|     def log(self, msg: str, c: Union[int, str] = 0) -> None: | ||||
|         self.log_func("multicast", msg, c) | ||||
|  | ||||
|     def create_servers(self) -> list[str]: | ||||
|         bound: list[str] = [] | ||||
|         netdevs = self.hub.tcpsrv.netdevs | ||||
|         ips = [x[0] for x in self.hub.tcpsrv.bound] | ||||
|  | ||||
|         if "::" in ips: | ||||
|             ips = [x for x in ips if x != "::"] + list( | ||||
|                 [x.split("/")[0] for x in netdevs if ":" in x] | ||||
|             ) | ||||
|             ips.append("0.0.0.0") | ||||
|  | ||||
|         if "0.0.0.0" in ips: | ||||
|             ips = [x for x in ips if x != "0.0.0.0"] + list( | ||||
|                 [x.split("/")[0] for x in netdevs if ":" not in x] | ||||
|             ) | ||||
|  | ||||
|         ips = [x for x in ips if x not in ("::1", "127.0.0.1")] | ||||
|         ips = find_prefix(ips, netdevs) | ||||
|  | ||||
|         on = self.on[:] | ||||
|         off = self.off[:] | ||||
|         for lst in (on, off): | ||||
|             for av in list(lst): | ||||
|                 try: | ||||
|                     arg_net = ip_network(av, False) | ||||
|                 except: | ||||
|                     arg_net = None | ||||
|  | ||||
|                 for sk, sv in netdevs.items(): | ||||
|                     if arg_net: | ||||
|                         net_ip = ip_address(sk.split("/")[0]) | ||||
|                         if net_ip in arg_net and sk not in lst: | ||||
|                             lst.append(sk) | ||||
|  | ||||
|                     if (av == str(sv.idx) or av == sv.name) and sk not in lst: | ||||
|                         lst.append(sk) | ||||
|  | ||||
|         if on: | ||||
|             ips = [x for x in ips if x in on] | ||||
|         elif off: | ||||
|             ips = [x for x in ips if x not in off] | ||||
|  | ||||
|         if not self.grp4: | ||||
|             ips = [x for x in ips if ":" in x] | ||||
|  | ||||
|         if not self.grp6: | ||||
|             ips = [x for x in ips if ":" not in x] | ||||
|  | ||||
|         ips = list(set(ips)) | ||||
|         all_selected = ips[:] | ||||
|  | ||||
|         # discard non-linklocal ipv6 | ||||
|         ips = [x for x in ips if ":" not in x or x.startswith("fe80")] | ||||
|  | ||||
|         if not ips: | ||||
|             raise NoIPs() | ||||
|  | ||||
|         for ip in ips: | ||||
|             v6 = ":" in ip | ||||
|             netdev = netdevs[ip] | ||||
|             if not netdev.idx: | ||||
|                 t = "using INADDR_ANY for ip [{}], netdev [{}]" | ||||
|                 if not self.srv and ip not in ["::", "0.0.0.0"]: | ||||
|                     self.log(t.format(ip, netdev), 3) | ||||
|  | ||||
|             ipv = socket.AF_INET6 if v6 else socket.AF_INET | ||||
|             sck = socket.socket(ipv, socket.SOCK_DGRAM, socket.IPPROTO_UDP) | ||||
|             sck.settimeout(None) | ||||
|             sck.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) | ||||
|             try: | ||||
|                 sck.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1) | ||||
|             except: | ||||
|                 pass | ||||
|  | ||||
|             # most ipv6 clients expect multicast on linklocal ip only; | ||||
|             # add a/aaaa records for the other nic IPs | ||||
|             other_ips: set[str] = set() | ||||
|             if v6: | ||||
|                 for nd in netdevs.values(): | ||||
|                     if nd.idx == netdev.idx and nd.ip in all_selected and ":" in nd.ip: | ||||
|                         other_ips.add(nd.ip) | ||||
|  | ||||
|             net = ipaddress.ip_network(ip, False) | ||||
|             ip = ip.split("/")[0] | ||||
|             srv = self.Srv(sck, netdev, self.grp6 if ":" in ip else self.grp4, ip, net) | ||||
|             for oth_ip in other_ips: | ||||
|                 srv.ips[oth_ip.split("/")[0]] = ipaddress.ip_network(oth_ip, False) | ||||
|  | ||||
|             # gvfs breaks if a linklocal ip appears in a dns reply | ||||
|             ll = { | ||||
|                 k: v | ||||
|                 for k, v in srv.ips.items() | ||||
|                 if k.startswith("169.254") or k.startswith("fe80") | ||||
|             } | ||||
|             rt = {k: v for k, v in srv.ips.items() if k not in ll} | ||||
|  | ||||
|             if self.args.ll or not rt: | ||||
|                 self.ll_ok.update(list(ll)) | ||||
|  | ||||
|             if not self.args.ll: | ||||
|                 srv.ips = rt or ll | ||||
|  | ||||
|             if not srv.ips: | ||||
|                 self.log("no IPs on {}; skipping [{}]".format(netdev, ip), 3) | ||||
|                 continue | ||||
|  | ||||
|             try: | ||||
|                 self.setup_socket(srv) | ||||
|                 self.srv[sck] = srv | ||||
|                 bound.append(ip) | ||||
|             except: | ||||
|                 t = "announce failed on {} [{}]:\n{}" | ||||
|                 self.log(t.format(netdev, ip, min_ex()), 3) | ||||
|  | ||||
|         if self.args.zm_msub: | ||||
|             for s1 in self.srv.values(): | ||||
|                 for s2 in self.srv.values(): | ||||
|                     if s1.idx != s2.idx: | ||||
|                         continue | ||||
|  | ||||
|                     if s1.ip not in s2.ips: | ||||
|                         s2.ips[s1.ip] = s1.net | ||||
|  | ||||
|         if self.args.zm_mnic: | ||||
|             for s1 in self.srv.values(): | ||||
|                 for s2 in self.srv.values(): | ||||
|                     for ip1, net1 in list(s1.ips.items()): | ||||
|                         for ip2, net2 in list(s2.ips.items()): | ||||
|                             if net1 == net2 and ip1 != ip2: | ||||
|                                 s1.ips[ip2] = net2 | ||||
|  | ||||
|         self.sips = set([x.split("/")[0] for x in all_selected]) | ||||
|         for srv in self.srv.values(): | ||||
|             assert srv.ip in self.sips | ||||
|  | ||||
|         return bound | ||||
|  | ||||
|     def setup_socket(self, srv: MC_Sck) -> None: | ||||
|         sck = srv.sck | ||||
|         if srv.v6: | ||||
|             if self.vinit: | ||||
|                 zsl = list(srv.ips.keys()) | ||||
|                 self.log("v6({}) idx({}) {}".format(srv.ip, srv.idx, zsl), 6) | ||||
|  | ||||
|             for ip in srv.ips: | ||||
|                 bip = socket.inet_pton(socket.AF_INET6, ip) | ||||
|                 self.b2srv[bip] = srv | ||||
|                 self.b6.append(bip) | ||||
|  | ||||
|             grp = self.grp6 if srv.idx else "" | ||||
|             try: | ||||
|                 if MACOS: | ||||
|                     raise Exception() | ||||
|  | ||||
|                 sck.bind((grp, self.port, 0, srv.idx)) | ||||
|             except: | ||||
|                 sck.bind(("", self.port, 0, srv.idx)) | ||||
|  | ||||
|             bgrp = socket.inet_pton(socket.AF_INET6, self.grp6) | ||||
|             dev = spack(b"@I", srv.idx) | ||||
|             srv.mreq = bgrp + dev | ||||
|             if srv.idx != socket.INADDR_ANY: | ||||
|                 sck.setsockopt(socket.IPPROTO_IPV6, socket.IPV6_MULTICAST_IF, dev) | ||||
|  | ||||
|             try: | ||||
|                 sck.setsockopt(socket.IPPROTO_IPV6, socket.IPV6_MULTICAST_HOPS, 255) | ||||
|                 sck.setsockopt(socket.IPPROTO_IPV6, socket.IPV6_MULTICAST_LOOP, 1) | ||||
|             except: | ||||
|                 # macos | ||||
|                 t = "failed to set IPv6 TTL/LOOP; announcements may not survive multiple switches/routers" | ||||
|                 self.log(t, 3) | ||||
|         else: | ||||
|             if self.vinit: | ||||
|                 self.log("v4({}) idx({})".format(srv.ip, srv.idx), 6) | ||||
|  | ||||
|             bip = socket.inet_aton(srv.ip) | ||||
|             self.b2srv[bip] = srv | ||||
|             self.b4.append(bip) | ||||
|  | ||||
|             grp = self.grp4 if srv.idx else "" | ||||
|             try: | ||||
|                 if MACOS: | ||||
|                     raise Exception() | ||||
|  | ||||
|                 sck.bind((grp, self.port)) | ||||
|             except: | ||||
|                 sck.bind(("", self.port)) | ||||
|  | ||||
|             bgrp = socket.inet_aton(self.grp4) | ||||
|             dev = ( | ||||
|                 spack(b"=I", socket.INADDR_ANY) | ||||
|                 if srv.idx == socket.INADDR_ANY | ||||
|                 else socket.inet_aton(srv.ip) | ||||
|             ) | ||||
|             srv.mreq = bgrp + dev | ||||
|             if srv.idx != socket.INADDR_ANY: | ||||
|                 sck.setsockopt(socket.IPPROTO_IP, socket.IP_MULTICAST_IF, dev) | ||||
|  | ||||
|             try: | ||||
|                 sck.setsockopt(socket.IPPROTO_IP, socket.IP_MULTICAST_TTL, 255) | ||||
|                 sck.setsockopt(socket.IPPROTO_IP, socket.IP_MULTICAST_LOOP, 1) | ||||
|             except: | ||||
|                 # probably can't happen but dontcare if it does | ||||
|                 t = "failed to set IPv4 TTL/LOOP; announcements may not survive multiple switches/routers" | ||||
|                 self.log(t, 3) | ||||
|  | ||||
|         self.hop(srv) | ||||
|         self.b4.sort(reverse=True) | ||||
|         self.b6.sort(reverse=True) | ||||
|  | ||||
|     def hop(self, srv: MC_Sck) -> None: | ||||
|         """rejoin to keepalive on routers/switches without igmp-snooping""" | ||||
|         sck = srv.sck | ||||
|         req = srv.mreq | ||||
|         if ":" in srv.ip: | ||||
|             try: | ||||
|                 sck.setsockopt(socket.IPPROTO_IPV6, socket.IPV6_LEAVE_GROUP, req) | ||||
|                 # linux does leaves/joins twice with 0.2~1.05s spacing | ||||
|                 time.sleep(1.2) | ||||
|             except: | ||||
|                 pass | ||||
|  | ||||
|             sck.setsockopt(socket.IPPROTO_IPV6, socket.IPV6_JOIN_GROUP, req) | ||||
|         else: | ||||
|             try: | ||||
|                 sck.setsockopt(socket.IPPROTO_IP, socket.IP_DROP_MEMBERSHIP, req) | ||||
|                 time.sleep(1.2) | ||||
|             except: | ||||
|                 pass | ||||
|  | ||||
|             # t = "joining {} from ip {} idx {} with mreq {}" | ||||
|             # self.log(t.format(srv.grp, srv.ip, srv.idx, repr(srv.mreq)), 6) | ||||
|             sck.setsockopt(socket.IPPROTO_IP, socket.IP_ADD_MEMBERSHIP, req) | ||||
|  | ||||
|     def map_client(self, cip: str) -> Optional[MC_Sck]: | ||||
|         try: | ||||
|             return self.cscache[cip] | ||||
|         except: | ||||
|             pass | ||||
|  | ||||
|         ret: Optional[MC_Sck] = None | ||||
|         v6 = ":" in cip | ||||
|         ci = IPv6Address(cip) if v6 else IPv4Address(cip) | ||||
|         for x in self.b6 if v6 else self.b4: | ||||
|             srv = self.b2srv[x] | ||||
|             if any([x for x in srv.ips.values() if ci in x]): | ||||
|                 ret = srv | ||||
|                 break | ||||
|  | ||||
|         if not ret and cip in ("127.0.0.1", "::1"): | ||||
|             # just give it something | ||||
|             ret = list(self.srv.values())[0] | ||||
|  | ||||
|         if not ret and cip.startswith("169.254"): | ||||
|             # idk how to map LL IPv4 msgs to nics; | ||||
|             # just pick one and hope for the best | ||||
|             lls = ( | ||||
|                 x | ||||
|                 for x in self.srv.values() | ||||
|                 if next((y for y in x.ips if y in self.ll_ok), None) | ||||
|             ) | ||||
|             ret = next(lls, None) | ||||
|  | ||||
|         if ret: | ||||
|             t = "new client on {} ({}): {}" | ||||
|             self.log(t.format(ret.name, ret.net, cip), 6) | ||||
|         else: | ||||
|             t = "could not map client {} to known subnet; maybe forwarded from another network?" | ||||
|             self.log(t.format(cip), 3) | ||||
|  | ||||
|         if len(self.cscache) > 9000: | ||||
|             self.cscache = {} | ||||
|  | ||||
|         self.cscache[cip] = ret | ||||
|         return ret | ||||
							
								
								
									
										0
									
								
								copyparty/res/__init__.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										0
									
								
								copyparty/res/__init__.py
									
									
									
									
									
										Normal file
									
								
							
							
								
								
									
										337
									
								
								copyparty/smbd.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										337
									
								
								copyparty/smbd.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,337 @@ | ||||
| # coding: utf-8 | ||||
|  | ||||
| import inspect | ||||
| import logging | ||||
| import os | ||||
| import random | ||||
| import stat | ||||
| import sys | ||||
| import time | ||||
| from types import SimpleNamespace | ||||
|  | ||||
| from .__init__ import ANYWIN, EXE, TYPE_CHECKING | ||||
| from .authsrv import LEELOO_DALLAS, VFS | ||||
| from .bos import bos | ||||
| from .util import Daemon, min_ex, pybin, runhook | ||||
|  | ||||
| if True:  # pylint: disable=using-constant-test | ||||
|     from typing import Any, Union | ||||
|  | ||||
| if TYPE_CHECKING: | ||||
|     from .svchub import SvcHub | ||||
|  | ||||
|  | ||||
| lg = logging.getLogger("smb") | ||||
| debug, info, warning, error = (lg.debug, lg.info, lg.warning, lg.error) | ||||
|  | ||||
|  | ||||
| class SMB(object): | ||||
|     def __init__(self, hub: "SvcHub") -> None: | ||||
|         self.hub = hub | ||||
|         self.args = hub.args | ||||
|         self.asrv = hub.asrv | ||||
|         self.log = hub.log | ||||
|         self.files: dict[int, tuple[float, str]] = {} | ||||
|  | ||||
|         lg.setLevel(logging.DEBUG if self.args.smbvvv else logging.INFO) | ||||
|         for x in ["impacket", "impacket.smbserver"]: | ||||
|             lgr = logging.getLogger(x) | ||||
|             lgr.setLevel(logging.DEBUG if self.args.smbvv else logging.INFO) | ||||
|  | ||||
|         try: | ||||
|             from impacket import smbserver | ||||
|             from impacket.ntlm import compute_lmhash, compute_nthash | ||||
|         except ImportError: | ||||
|             if EXE: | ||||
|                 print("copyparty.exe cannot do SMB") | ||||
|                 sys.exit(1) | ||||
|  | ||||
|             m = "\033[36m\n{}\033[31m\n\nERROR: need 'impacket'; please run this command:\033[33m\n {} -m pip install --user impacket\n\033[0m" | ||||
|             print(m.format(min_ex(), pybin)) | ||||
|             sys.exit(1) | ||||
|  | ||||
|         # patch vfs into smbserver.os | ||||
|         fos = SimpleNamespace() | ||||
|         for k in os.__dict__: | ||||
|             try: | ||||
|                 setattr(fos, k, getattr(os, k)) | ||||
|             except: | ||||
|                 pass | ||||
|         fos.close = self._close | ||||
|         fos.listdir = self._listdir | ||||
|         fos.mkdir = self._mkdir | ||||
|         fos.open = self._open | ||||
|         fos.remove = self._unlink | ||||
|         fos.rename = self._rename | ||||
|         fos.stat = self._stat | ||||
|         fos.unlink = self._unlink | ||||
|         fos.utime = self._utime | ||||
|         smbserver.os = fos | ||||
|  | ||||
|         # ...and smbserver.os.path | ||||
|         fop = SimpleNamespace() | ||||
|         for k in os.path.__dict__: | ||||
|             try: | ||||
|                 setattr(fop, k, getattr(os.path, k)) | ||||
|             except: | ||||
|                 pass | ||||
|         fop.exists = self._p_exists | ||||
|         fop.getsize = self._p_getsize | ||||
|         fop.isdir = self._p_isdir | ||||
|         smbserver.os.path = fop | ||||
|  | ||||
|         if not self.args.smb_nwa_2: | ||||
|             fop.join = self._p_join | ||||
|  | ||||
|         # other patches | ||||
|         smbserver.isInFileJail = self._is_in_file_jail | ||||
|         self._disarm() | ||||
|  | ||||
|         ip = next((x for x in self.args.i if ":" not in x), None) | ||||
|         if not ip: | ||||
|             self.log("smb", "IPv6 not supported for SMB; listening on 0.0.0.0", 3) | ||||
|             ip = "0.0.0.0" | ||||
|  | ||||
|         port = int(self.args.smb_port) | ||||
|         srv = smbserver.SimpleSMBServer(listenAddress=ip, listenPort=port) | ||||
|  | ||||
|         ro = "no" if self.args.smbw else "yes"  # (does nothing) | ||||
|         srv.addShare("A", "/", readOnly=ro) | ||||
|         srv.setSMB2Support(not self.args.smb1) | ||||
|  | ||||
|         for name, pwd in self.asrv.acct.items(): | ||||
|             for u, p in ((name, pwd), (pwd, "k")): | ||||
|                 lmhash = compute_lmhash(p) | ||||
|                 nthash = compute_nthash(p) | ||||
|                 srv.addCredential(u, 0, lmhash, nthash) | ||||
|  | ||||
|         chi = [random.randint(0, 255) for x in range(8)] | ||||
|         cha = "".join(["{:02x}".format(x) for x in chi]) | ||||
|         srv.setSMBChallenge(cha) | ||||
|  | ||||
|         self.srv = srv | ||||
|         self.stop = srv.stop | ||||
|         self.log("smb", "listening @ {}:{}".format(ip, port)) | ||||
|  | ||||
|     def nlog(self, msg: str, c: Union[int, str] = 0) -> None: | ||||
|         self.log("smb", msg, c) | ||||
|  | ||||
|     def start(self) -> None: | ||||
|         Daemon(self.srv.start) | ||||
|  | ||||
|     def _v2a(self, caller: str, vpath: str, *a: Any) -> tuple[VFS, str]: | ||||
|         vpath = vpath.replace("\\", "/").lstrip("/") | ||||
|         # cf = inspect.currentframe().f_back | ||||
|         # c1 = cf.f_back.f_code.co_name | ||||
|         # c2 = cf.f_code.co_name | ||||
|         debug('%s("%s", %s)\033[K\033[0m', caller, vpath, str(a)) | ||||
|  | ||||
|         # TODO find a way to grab `identity` in smbComSessionSetupAndX and smb2SessionSetup | ||||
|         vfs, rem = self.asrv.vfs.get(vpath, LEELOO_DALLAS, True, True) | ||||
|         return vfs, vfs.canonical(rem) | ||||
|  | ||||
|     def _listdir(self, vpath: str, *a: Any, **ka: Any) -> list[str]: | ||||
|         vpath = vpath.replace("\\", "/").lstrip("/") | ||||
|         # caller = inspect.currentframe().f_back.f_code.co_name | ||||
|         debug('listdir("%s", %s)\033[K\033[0m', vpath, str(a)) | ||||
|         vfs, rem = self.asrv.vfs.get(vpath, LEELOO_DALLAS, False, False) | ||||
|         _, vfs_ls, vfs_virt = vfs.ls( | ||||
|             rem, LEELOO_DALLAS, not self.args.no_scandir, [[False, False]] | ||||
|         ) | ||||
|         dirs = [x[0] for x in vfs_ls if stat.S_ISDIR(x[1].st_mode)] | ||||
|         fils = [x[0] for x in vfs_ls if x[0] not in dirs] | ||||
|         ls = list(vfs_virt.keys()) + dirs + fils | ||||
|         if self.args.smb_nwa_1: | ||||
|             return ls | ||||
|  | ||||
|         # clients crash somewhere around 65760 byte | ||||
|         ret = [] | ||||
|         sz = 112 * 2  # ['.', '..'] | ||||
|         for n, fn in enumerate(ls): | ||||
|             if sz >= 64000: | ||||
|                 t = "listing only %d of %d files (%d byte); see impacket#1433" | ||||
|                 warning(t, n, len(ls), sz) | ||||
|                 break | ||||
|  | ||||
|             nsz = len(fn.encode("utf-16", "replace")) | ||||
|             nsz = ((nsz + 7) // 8) * 8 | ||||
|             sz += 104 + nsz | ||||
|             ret.append(fn) | ||||
|  | ||||
|         return ret | ||||
|  | ||||
|     def _open( | ||||
|         self, vpath: str, flags: int, *a: Any, chmod: int = 0o777, **ka: Any | ||||
|     ) -> Any: | ||||
|         f_ro = os.O_RDONLY | ||||
|         if ANYWIN: | ||||
|             f_ro |= os.O_BINARY | ||||
|  | ||||
|         wr = flags != f_ro | ||||
|         if wr and not self.args.smbw: | ||||
|             yeet("blocked write (no --smbw): " + vpath) | ||||
|  | ||||
|         vfs, ap = self._v2a("open", vpath, *a) | ||||
|         if wr: | ||||
|             if not vfs.axs.uwrite: | ||||
|                 yeet("blocked write (no-write-acc): " + vpath) | ||||
|  | ||||
|             xbu = vfs.flags.get("xbu") | ||||
|             if xbu and not runhook( | ||||
|                 self.nlog, xbu, ap, vpath, "", "", 0, 0, "1.7.6.2", 0, "" | ||||
|             ): | ||||
|                 yeet("blocked by xbu server config: " + vpath) | ||||
|  | ||||
|         ret = bos.open(ap, flags, *a, mode=chmod, **ka) | ||||
|         if wr: | ||||
|             now = time.time() | ||||
|             nf = len(self.files) | ||||
|             if nf > 9000: | ||||
|                 oldest = min([x[0] for x in self.files.values()]) | ||||
|                 cutoff = oldest + (now - oldest) / 2 | ||||
|                 self.files = {k: v for k, v in self.files.items() if v[0] > cutoff} | ||||
|                 info("was tracking %d files, now %d", nf, len(self.files)) | ||||
|  | ||||
|             vpath = vpath.replace("\\", "/").lstrip("/") | ||||
|             self.files[ret] = (now, vpath) | ||||
|  | ||||
|         return ret | ||||
|  | ||||
|     def _close(self, fd: int) -> None: | ||||
|         os.close(fd) | ||||
|         if fd not in self.files: | ||||
|             return | ||||
|  | ||||
|         _, vp = self.files.pop(fd) | ||||
|         vp, fn = os.path.split(vp) | ||||
|         vfs, rem = self.hub.asrv.vfs.get(vp, LEELOO_DALLAS, False, True) | ||||
|         vfs, rem = vfs.get_dbv(rem) | ||||
|         self.hub.up2k.hash_file( | ||||
|             vfs.realpath, | ||||
|             vfs.vpath, | ||||
|             vfs.flags, | ||||
|             rem, | ||||
|             fn, | ||||
|             "1.7.6.2", | ||||
|             time.time(), | ||||
|             "", | ||||
|         ) | ||||
|  | ||||
|     def _rename(self, vp1: str, vp2: str) -> None: | ||||
|         if not self.args.smbw: | ||||
|             yeet("blocked rename (no --smbw): " + vp1) | ||||
|  | ||||
|         vp1 = vp1.lstrip("/") | ||||
|         vp2 = vp2.lstrip("/") | ||||
|  | ||||
|         vfs2, ap2 = self._v2a("rename", vp2, vp1) | ||||
|         if not vfs2.axs.uwrite: | ||||
|             yeet("blocked rename (no-write-acc): " + vp2) | ||||
|  | ||||
|         vfs1, _ = self.asrv.vfs.get(vp1, LEELOO_DALLAS, True, True) | ||||
|         if not vfs1.axs.umove: | ||||
|             yeet("blocked rename (no-move-acc): " + vp1) | ||||
|  | ||||
|         self.hub.up2k.handle_mv(LEELOO_DALLAS, vp1, vp2) | ||||
|         try: | ||||
|             bos.makedirs(ap2) | ||||
|         except: | ||||
|             pass | ||||
|  | ||||
|     def _mkdir(self, vpath: str) -> None: | ||||
|         if not self.args.smbw: | ||||
|             yeet("blocked mkdir (no --smbw): " + vpath) | ||||
|  | ||||
|         vfs, ap = self._v2a("mkdir", vpath) | ||||
|         if not vfs.axs.uwrite: | ||||
|             yeet("blocked mkdir (no-write-acc): " + vpath) | ||||
|  | ||||
|         return bos.mkdir(ap) | ||||
|  | ||||
|     def _stat(self, vpath: str, *a: Any, **ka: Any) -> os.stat_result: | ||||
|         return bos.stat(self._v2a("stat", vpath, *a)[1], *a, **ka) | ||||
|  | ||||
|     def _unlink(self, vpath: str) -> None: | ||||
|         if not self.args.smbw: | ||||
|             yeet("blocked delete (no --smbw): " + vpath) | ||||
|  | ||||
|         # return bos.unlink(self._v2a("stat", vpath, *a)[1]) | ||||
|         vfs, ap = self._v2a("delete", vpath) | ||||
|         if not vfs.axs.udel: | ||||
|             yeet("blocked delete (no-del-acc): " + vpath) | ||||
|  | ||||
|         vpath = vpath.replace("\\", "/").lstrip("/") | ||||
|         self.hub.up2k.handle_rm(LEELOO_DALLAS, "1.7.6.2", [vpath], [], False) | ||||
|  | ||||
|     def _utime(self, vpath: str, times: tuple[float, float]) -> None: | ||||
|         if not self.args.smbw: | ||||
|             yeet("blocked utime (no --smbw): " + vpath) | ||||
|  | ||||
|         vfs, ap = self._v2a("utime", vpath) | ||||
|         if not vfs.axs.uwrite: | ||||
|             yeet("blocked utime (no-write-acc): " + vpath) | ||||
|  | ||||
|         return bos.utime(ap, times) | ||||
|  | ||||
|     def _p_exists(self, vpath: str) -> bool: | ||||
|         try: | ||||
|             bos.stat(self._v2a("p.exists", vpath)[1]) | ||||
|             return True | ||||
|         except: | ||||
|             return False | ||||
|  | ||||
|     def _p_getsize(self, vpath: str) -> int: | ||||
|         st = bos.stat(self._v2a("p.getsize", vpath)[1]) | ||||
|         return st.st_size | ||||
|  | ||||
|     def _p_isdir(self, vpath: str) -> bool: | ||||
|         try: | ||||
|             st = bos.stat(self._v2a("p.isdir", vpath)[1]) | ||||
|             return stat.S_ISDIR(st.st_mode) | ||||
|         except: | ||||
|             return False | ||||
|  | ||||
|     def _p_join(self, *a) -> str: | ||||
|         # impacket.smbserver reads globs from queryDirectoryRequest['Buffer'] | ||||
|         # where somehow `fds.*` becomes `fds"*` so lets fix that | ||||
|         ret = os.path.join(*a) | ||||
|         return ret.replace('"', ".")  # type: ignore | ||||
|  | ||||
|     def _hook(self, *a: Any, **ka: Any) -> None: | ||||
|         src = inspect.currentframe().f_back.f_code.co_name | ||||
|         error("\033[31m%s:hook(%s)\033[0m", src, a) | ||||
|         raise Exception("nope") | ||||
|  | ||||
|     def _disarm(self) -> None: | ||||
|         from impacket import smbserver | ||||
|  | ||||
|         smbserver.os.chmod = self._hook | ||||
|         smbserver.os.chown = self._hook | ||||
|         smbserver.os.ftruncate = self._hook | ||||
|         smbserver.os.lchown = self._hook | ||||
|         smbserver.os.link = self._hook | ||||
|         smbserver.os.lstat = self._hook | ||||
|         smbserver.os.replace = self._hook | ||||
|         smbserver.os.scandir = self._hook | ||||
|         smbserver.os.symlink = self._hook | ||||
|         smbserver.os.truncate = self._hook | ||||
|         smbserver.os.walk = self._hook | ||||
|  | ||||
|         smbserver.os.path.abspath = self._hook | ||||
|         smbserver.os.path.expanduser = self._hook | ||||
|         smbserver.os.path.getatime = self._hook | ||||
|         smbserver.os.path.getctime = self._hook | ||||
|         smbserver.os.path.getmtime = self._hook | ||||
|         smbserver.os.path.isabs = self._hook | ||||
|         smbserver.os.path.isfile = self._hook | ||||
|         smbserver.os.path.islink = self._hook | ||||
|         smbserver.os.path.realpath = self._hook | ||||
|  | ||||
|     def _is_in_file_jail(self, *a: Any) -> bool: | ||||
|         # handled by vfs | ||||
|         return True | ||||
|  | ||||
|  | ||||
| def yeet(msg: str) -> None: | ||||
|     info(msg) | ||||
|     raise Exception(msg) | ||||
							
								
								
									
										220
									
								
								copyparty/ssdp.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										220
									
								
								copyparty/ssdp.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,220 @@ | ||||
| # coding: utf-8 | ||||
| from __future__ import print_function, unicode_literals | ||||
|  | ||||
| import errno | ||||
| import re | ||||
| import select | ||||
| import socket | ||||
| from email.utils import formatdate | ||||
|  | ||||
| from .__init__ import TYPE_CHECKING | ||||
| from .multicast import MC_Sck, MCast | ||||
| from .util import CachedSet, html_escape, min_ex | ||||
|  | ||||
| if TYPE_CHECKING: | ||||
|     from .broker_util import BrokerCli | ||||
|     from .httpcli import HttpCli | ||||
|     from .svchub import SvcHub | ||||
|  | ||||
| if True:  # pylint: disable=using-constant-test | ||||
|     from typing import Optional, Union | ||||
|  | ||||
|  | ||||
| GRP = "239.255.255.250" | ||||
|  | ||||
|  | ||||
| class SSDP_Sck(MC_Sck): | ||||
|     def __init__(self, *a): | ||||
|         super(SSDP_Sck, self).__init__(*a) | ||||
|         self.hport = 0 | ||||
|  | ||||
|  | ||||
| class SSDPr(object): | ||||
|     """generates http responses for httpcli""" | ||||
|  | ||||
|     def __init__(self, broker: "BrokerCli") -> None: | ||||
|         self.broker = broker | ||||
|         self.args = broker.args | ||||
|  | ||||
|     def reply(self, hc: "HttpCli") -> bool: | ||||
|         if hc.vpath.endswith("device.xml"): | ||||
|             return self.tx_device(hc) | ||||
|  | ||||
|         hc.reply(b"unknown request", 400) | ||||
|         return False | ||||
|  | ||||
|     def tx_device(self, hc: "HttpCli") -> bool: | ||||
|         zs = """ | ||||
| <?xml version="1.0"?> | ||||
| <root xmlns="urn:schemas-upnp-org:device-1-0"> | ||||
|     <specVersion> | ||||
|         <major>1</major> | ||||
|         <minor>0</minor> | ||||
|     </specVersion> | ||||
|     <URLBase>{}</URLBase> | ||||
|     <device> | ||||
|         <presentationURL>{}</presentationURL> | ||||
|         <deviceType>urn:schemas-upnp-org:device:Basic:1</deviceType> | ||||
|         <friendlyName>{}</friendlyName> | ||||
|         <modelDescription>file server</modelDescription> | ||||
|         <manufacturer>ed</manufacturer> | ||||
|         <manufacturerURL>https://ocv.me/</manufacturerURL> | ||||
|         <modelName>copyparty</modelName> | ||||
|         <modelURL>https://github.com/9001/copyparty/</modelURL> | ||||
|         <UDN>{}</UDN> | ||||
|         <serviceList> | ||||
|             <service> | ||||
|                 <serviceType>urn:schemas-upnp-org:device:Basic:1</serviceType> | ||||
|                 <serviceId>urn:schemas-upnp-org:device:Basic</serviceId> | ||||
|                 <controlURL>/.cpr/ssdp/services.xml</controlURL> | ||||
|                 <eventSubURL>/.cpr/ssdp/services.xml</eventSubURL> | ||||
|                 <SCPDURL>/.cpr/ssdp/services.xml</SCPDURL> | ||||
|             </service> | ||||
|         </serviceList> | ||||
|     </device> | ||||
| </root>""" | ||||
|  | ||||
|         c = html_escape | ||||
|         sip, sport = hc.s.getsockname()[:2] | ||||
|         sip = sip.replace("::ffff:", "") | ||||
|         proto = "https" if self.args.https_only else "http" | ||||
|         ubase = "{}://{}:{}".format(proto, sip, sport) | ||||
|         zsl = self.args.zsl | ||||
|         url = zsl if "://" in zsl else ubase + "/" + zsl.lstrip("/") | ||||
|         name = "{} @ {}".format(self.args.doctitle, self.args.name) | ||||
|         zs = zs.strip().format(c(ubase), c(url), c(name), c(self.args.zsid)) | ||||
|         hc.reply(zs.encode("utf-8", "replace")) | ||||
|         return False  # close connectino | ||||
|  | ||||
|  | ||||
| class SSDPd(MCast): | ||||
|     """communicates with ssdp clients over multicast""" | ||||
|  | ||||
|     def __init__(self, hub: "SvcHub", ngen: int) -> None: | ||||
|         al = hub.args | ||||
|         vinit = al.zsv and not al.zmv | ||||
|         super(SSDPd, self).__init__( | ||||
|             hub, SSDP_Sck, al.zs_on, al.zs_off, GRP, "", 1900, vinit | ||||
|         ) | ||||
|         self.srv: dict[socket.socket, SSDP_Sck] = {} | ||||
|         self.logsrc = "SSDP-{}".format(ngen) | ||||
|         self.ngen = ngen | ||||
|  | ||||
|         self.rxc = CachedSet(0.7) | ||||
|         self.txc = CachedSet(5)  # win10: every 3 sec | ||||
|         self.ptn_st = re.compile(b"\nst: *upnp:rootdevice", re.I) | ||||
|  | ||||
|     def log(self, msg: str, c: Union[int, str] = 0) -> None: | ||||
|         self.log_func(self.logsrc, msg, c) | ||||
|  | ||||
|     def run(self) -> None: | ||||
|         try: | ||||
|             bound = self.create_servers() | ||||
|         except: | ||||
|             t = "no server IP matches the ssdp config\n{}" | ||||
|             self.log(t.format(min_ex()), 1) | ||||
|             bound = [] | ||||
|  | ||||
|         if not bound: | ||||
|             self.log("failed to announce copyparty services on the network", 3) | ||||
|             return | ||||
|  | ||||
|         # find http port for this listening ip | ||||
|         for srv in self.srv.values(): | ||||
|             tcps = self.hub.tcpsrv.bound | ||||
|             hp = next((x[1] for x in tcps if x[0] in ("0.0.0.0", srv.ip)), 0) | ||||
|             hp = hp or next((x[1] for x in tcps if x[0] == "::"), 0) | ||||
|             if not hp: | ||||
|                 hp = tcps[0][1] | ||||
|                 self.log("assuming port {} for {}".format(hp, srv.ip), 3) | ||||
|             srv.hport = hp | ||||
|  | ||||
|         self.log("listening") | ||||
|         try: | ||||
|             self.run2() | ||||
|         except OSError as ex: | ||||
|             if ex.errno != errno.EBADF: | ||||
|                 raise | ||||
|  | ||||
|             self.log("stopping due to {}".format(ex), "90") | ||||
|  | ||||
|         self.log("stopped", 2) | ||||
|  | ||||
|     def run2(self) -> None: | ||||
|         while self.running: | ||||
|             rdy = select.select(self.srv, [], [], self.args.z_chk or 180) | ||||
|             rx: list[socket.socket] = rdy[0]  # type: ignore | ||||
|             self.rxc.cln() | ||||
|             buf = b"" | ||||
|             addr = ("0", 0) | ||||
|             for sck in rx: | ||||
|                 try: | ||||
|                     buf, addr = sck.recvfrom(4096) | ||||
|                     self.eat(buf, addr) | ||||
|                 except: | ||||
|                     if not self.running: | ||||
|                         break | ||||
|  | ||||
|                     t = "{} {} \033[33m|{}| {}\n{}".format( | ||||
|                         self.srv[sck].name, addr, len(buf), repr(buf)[2:-1], min_ex() | ||||
|                     ) | ||||
|                     self.log(t, 6) | ||||
|  | ||||
|     def stop(self) -> None: | ||||
|         self.running = False | ||||
|         for srv in self.srv.values(): | ||||
|             try: | ||||
|                 srv.sck.close() | ||||
|             except: | ||||
|                 pass | ||||
|  | ||||
|         self.srv = {} | ||||
|  | ||||
|     def eat(self, buf: bytes, addr: tuple[str, int]) -> None: | ||||
|         cip = addr[0] | ||||
|         if cip.startswith("169.254") and not self.ll_ok: | ||||
|             return | ||||
|  | ||||
|         if buf in self.rxc.c: | ||||
|             return | ||||
|  | ||||
|         srv: Optional[SSDP_Sck] = self.map_client(cip)  # type: ignore | ||||
|         if not srv: | ||||
|             return | ||||
|  | ||||
|         self.rxc.add(buf) | ||||
|         if not buf.startswith(b"M-SEARCH * HTTP/1."): | ||||
|             return | ||||
|  | ||||
|         if not self.ptn_st.search(buf): | ||||
|             return | ||||
|  | ||||
|         if self.args.zsv: | ||||
|             t = "{} [{}] \033[36m{} \033[0m|{}|" | ||||
|             self.log(t.format(srv.name, srv.ip, cip, len(buf)), "90") | ||||
|  | ||||
|         zs = """ | ||||
| HTTP/1.1 200 OK | ||||
| CACHE-CONTROL: max-age=1800 | ||||
| DATE: {0} | ||||
| EXT: | ||||
| LOCATION: http://{1}:{2}/.cpr/ssdp/device.xml | ||||
| OPT: "http://schemas.upnp.org/upnp/1/0/"; ns=01 | ||||
| 01-NLS: {3} | ||||
| SERVER: UPnP/1.0 | ||||
| ST: upnp:rootdevice | ||||
| USN: {3}::upnp:rootdevice | ||||
| BOOTID.UPNP.ORG: 0 | ||||
| CONFIGID.UPNP.ORG: 1 | ||||
|  | ||||
| """ | ||||
|         v4 = srv.ip.replace("::ffff:", "") | ||||
|         zs = zs.format(formatdate(usegmt=True), v4, srv.hport, self.args.zsid) | ||||
|         zb = zs[1:].replace("\n", "\r\n").encode("utf-8", "replace") | ||||
|         srv.sck.sendto(zb, addr[:2]) | ||||
|  | ||||
|         if cip not in self.txc.c: | ||||
|             self.log("{} [{}] --> {}".format(srv.name, srv.ip, cip), "6") | ||||
|  | ||||
|         self.txc.add(cip) | ||||
|         self.txc.cln() | ||||
| @@ -1,21 +1,19 @@ | ||||
| # coding: utf-8 | ||||
| from __future__ import print_function, unicode_literals | ||||
|  | ||||
| import stat | ||||
| import tarfile | ||||
| import threading | ||||
|  | ||||
| from queue import Queue | ||||
|  | ||||
| from .bos import bos | ||||
| from .sutil import StreamArc, errdesc | ||||
| from .util import fsenc, min_ex | ||||
| from .util import Daemon, fsenc, min_ex | ||||
|  | ||||
| try: | ||||
| if True:  # pylint: disable=using-constant-test | ||||
|     from typing import Any, Generator, Optional | ||||
|  | ||||
|     from .util import NamedLogger | ||||
| except: | ||||
|     pass | ||||
|  | ||||
|  | ||||
| class QFile(object):  # inherit io.StringIO for painful typing | ||||
| @@ -60,9 +58,7 @@ class StreamTar(StreamArc): | ||||
|         fmt = tarfile.GNU_FORMAT | ||||
|         self.tar = tarfile.open(fileobj=self.qfile, mode="w|", format=fmt)  # type: ignore | ||||
|  | ||||
|         w = threading.Thread(target=self._gen, name="star-gen") | ||||
|         w.daemon = True | ||||
|         w.start() | ||||
|         Daemon(self._gen, "star-gen") | ||||
|  | ||||
|     def gen(self) -> Generator[Optional[bytes], None, None]: | ||||
|         try: | ||||
| @@ -84,6 +80,9 @@ class StreamTar(StreamArc): | ||||
|         src = f["ap"] | ||||
|         fsi = f["st"] | ||||
|  | ||||
|         if stat.S_ISDIR(fsi.st_mode): | ||||
|             return | ||||
|  | ||||
|         inf = tarfile.TarInfo(name=name) | ||||
|         inf.mode = fsi.st_mode | ||||
|         inf.size = fsi.st_size | ||||
|   | ||||
							
								
								
									
										5
									
								
								copyparty/stolen/dnslib/README.md
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										5
									
								
								copyparty/stolen/dnslib/README.md
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,5 @@ | ||||
| `dnslib` but heavily simplified/feature-stripped | ||||
|  | ||||
| L: MIT | ||||
| Copyright (c) 2010 - 2017 Paul Chakravarti | ||||
| https://github.com/paulc/dnslib/ | ||||
							
								
								
									
										11
									
								
								copyparty/stolen/dnslib/__init__.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										11
									
								
								copyparty/stolen/dnslib/__init__.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,11 @@ | ||||
| # coding: utf-8 | ||||
|  | ||||
| """ | ||||
| L: MIT | ||||
| Copyright (c) 2010 - 2017 Paul Chakravarti | ||||
| https://github.com/paulc/dnslib/tree/0.9.23 | ||||
| """ | ||||
|  | ||||
| from .dns import * | ||||
|  | ||||
| version = "0.9.23" | ||||
							
								
								
									
										41
									
								
								copyparty/stolen/dnslib/bimap.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										41
									
								
								copyparty/stolen/dnslib/bimap.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,41 @@ | ||||
| # coding: utf-8 | ||||
|  | ||||
| import types | ||||
|  | ||||
|  | ||||
| class BimapError(Exception): | ||||
|     pass | ||||
|  | ||||
|  | ||||
| class Bimap(object): | ||||
|     def __init__(self, name, forward, error=AttributeError): | ||||
|         self.name = name | ||||
|         self.error = error | ||||
|         self.forward = forward.copy() | ||||
|         self.reverse = dict([(v, k) for (k, v) in list(forward.items())]) | ||||
|  | ||||
|     def get(self, k, default=None): | ||||
|         try: | ||||
|             return self.forward[k] | ||||
|         except KeyError: | ||||
|             return default or str(k) | ||||
|  | ||||
|     def __getitem__(self, k): | ||||
|         try: | ||||
|             return self.forward[k] | ||||
|         except KeyError: | ||||
|             if isinstance(self.error, types.FunctionType): | ||||
|                 return self.error(self.name, k, True) | ||||
|             else: | ||||
|                 raise self.error("%s: Invalid forward lookup: [%s]" % (self.name, k)) | ||||
|  | ||||
|     def __getattr__(self, k): | ||||
|         try: | ||||
|             if k == "__wrapped__": | ||||
|                 raise AttributeError() | ||||
|             return self.reverse[k] | ||||
|         except KeyError: | ||||
|             if isinstance(self.error, types.FunctionType): | ||||
|                 return self.error(self.name, k, False) | ||||
|             else: | ||||
|                 raise self.error("%s: Invalid reverse lookup: [%s]" % (self.name, k)) | ||||
							
								
								
									
										15
									
								
								copyparty/stolen/dnslib/bit.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										15
									
								
								copyparty/stolen/dnslib/bit.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,15 @@ | ||||
| # coding: utf-8 | ||||
|  | ||||
| from __future__ import print_function | ||||
|  | ||||
|  | ||||
| def get_bits(data, offset, bits=1): | ||||
|     mask = ((1 << bits) - 1) << offset | ||||
|     return (data & mask) >> offset | ||||
|  | ||||
|  | ||||
| def set_bits(data, value, offset, bits=1): | ||||
|     mask = ((1 << bits) - 1) << offset | ||||
|     clear = 0xFFFF ^ mask | ||||
|     data = (data & clear) | ((value << offset) & mask) | ||||
|     return data | ||||
							
								
								
									
										56
									
								
								copyparty/stolen/dnslib/buffer.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										56
									
								
								copyparty/stolen/dnslib/buffer.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,56 @@ | ||||
| # coding: utf-8 | ||||
|  | ||||
| import binascii | ||||
| import struct | ||||
|  | ||||
|  | ||||
| class BufferError(Exception): | ||||
|     pass | ||||
|  | ||||
|  | ||||
| class Buffer(object): | ||||
|     def __init__(self, data=b""): | ||||
|         self.data = bytearray(data) | ||||
|         self.offset = 0 | ||||
|  | ||||
|     def remaining(self): | ||||
|         return len(self.data) - self.offset | ||||
|  | ||||
|     def get(self, length): | ||||
|         if length > self.remaining(): | ||||
|             raise BufferError( | ||||
|                 "Not enough bytes [offset=%d,remaining=%d,requested=%d]" | ||||
|                 % (self.offset, self.remaining(), length) | ||||
|             ) | ||||
|         start = self.offset | ||||
|         end = self.offset + length | ||||
|         self.offset += length | ||||
|         return bytes(self.data[start:end]) | ||||
|  | ||||
|     def hex(self): | ||||
|         return binascii.hexlify(self.data) | ||||
|  | ||||
|     def pack(self, fmt, *args): | ||||
|         self.offset += struct.calcsize(fmt) | ||||
|         self.data += struct.pack(fmt, *args) | ||||
|  | ||||
|     def append(self, s): | ||||
|         self.offset += len(s) | ||||
|         self.data += s | ||||
|  | ||||
|     def update(self, ptr, fmt, *args): | ||||
|         s = struct.pack(fmt, *args) | ||||
|         self.data[ptr : ptr + len(s)] = s | ||||
|  | ||||
|     def unpack(self, fmt): | ||||
|         try: | ||||
|             data = self.get(struct.calcsize(fmt)) | ||||
|             return struct.unpack(fmt, data) | ||||
|         except struct.error: | ||||
|             raise BufferError( | ||||
|                 "Error unpacking struct '%s' <%s>" | ||||
|                 % (fmt, binascii.hexlify(data).decode()) | ||||
|             ) | ||||
|  | ||||
|     def __len__(self): | ||||
|         return len(self.data) | ||||
							
								
								
									
										775
									
								
								copyparty/stolen/dnslib/dns.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										775
									
								
								copyparty/stolen/dnslib/dns.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,775 @@ | ||||
| # coding: utf-8 | ||||
|  | ||||
| from __future__ import print_function | ||||
|  | ||||
| import binascii | ||||
| from itertools import chain | ||||
|  | ||||
| from .bimap import Bimap, BimapError | ||||
| from .bit import get_bits, set_bits | ||||
| from .buffer import BufferError | ||||
| from .label import DNSBuffer, DNSLabel | ||||
| from .ranges import IP4, IP6, H, I, check_bytes | ||||
|  | ||||
|  | ||||
| class DNSError(Exception): | ||||
|     pass | ||||
|  | ||||
|  | ||||
| def unknown_qtype(name, key, forward): | ||||
|     if forward: | ||||
|         try: | ||||
|             return "TYPE%d" % (key,) | ||||
|         except: | ||||
|             raise DNSError("%s: Invalid forward lookup: [%s]" % (name, key)) | ||||
|     else: | ||||
|         if key.startswith("TYPE"): | ||||
|             try: | ||||
|                 return int(key[4:]) | ||||
|             except: | ||||
|                 pass | ||||
|         raise DNSError("%s: Invalid reverse lookup: [%s]" % (name, key)) | ||||
|  | ||||
|  | ||||
| QTYPE = Bimap( | ||||
|     "QTYPE", | ||||
|     {1: "A", 12: "PTR", 16: "TXT", 28: "AAAA", 33: "SRV", 47: "NSEC", 255: "ANY"}, | ||||
|     unknown_qtype, | ||||
| ) | ||||
|  | ||||
| CLASS = Bimap("CLASS", {1: "IN", 254: "None", 255: "*", 0x8001: "F_IN"}, DNSError) | ||||
|  | ||||
| QR = Bimap("QR", {0: "QUERY", 1: "RESPONSE"}, DNSError) | ||||
|  | ||||
| RCODE = Bimap( | ||||
|     "RCODE", | ||||
|     { | ||||
|         0: "NOERROR", | ||||
|         1: "FORMERR", | ||||
|         2: "SERVFAIL", | ||||
|         3: "NXDOMAIN", | ||||
|         4: "NOTIMP", | ||||
|         5: "REFUSED", | ||||
|         6: "YXDOMAIN", | ||||
|         7: "YXRRSET", | ||||
|         8: "NXRRSET", | ||||
|         9: "NOTAUTH", | ||||
|         10: "NOTZONE", | ||||
|     }, | ||||
|     DNSError, | ||||
| ) | ||||
|  | ||||
| OPCODE = Bimap( | ||||
|     "OPCODE", {0: "QUERY", 1: "IQUERY", 2: "STATUS", 4: "NOTIFY", 5: "UPDATE"}, DNSError | ||||
| ) | ||||
|  | ||||
|  | ||||
| def label(label, origin=None): | ||||
|     if label.endswith("."): | ||||
|         return DNSLabel(label) | ||||
|     else: | ||||
|         return (origin if isinstance(origin, DNSLabel) else DNSLabel(origin)).add(label) | ||||
|  | ||||
|  | ||||
| class DNSRecord(object): | ||||
|     @classmethod | ||||
|     def parse(cls, packet) -> "DNSRecord": | ||||
|         buffer = DNSBuffer(packet) | ||||
|         try: | ||||
|             header = DNSHeader.parse(buffer) | ||||
|             questions = [] | ||||
|             rr = [] | ||||
|             auth = [] | ||||
|             ar = [] | ||||
|             for i in range(header.q): | ||||
|                 questions.append(DNSQuestion.parse(buffer)) | ||||
|             for i in range(header.a): | ||||
|                 rr.append(RR.parse(buffer)) | ||||
|             for i in range(header.auth): | ||||
|                 auth.append(RR.parse(buffer)) | ||||
|             for i in range(header.ar): | ||||
|                 ar.append(RR.parse(buffer)) | ||||
|             return cls(header, questions, rr, auth=auth, ar=ar) | ||||
|         except (BufferError, BimapError) as e: | ||||
|             raise DNSError( | ||||
|                 "Error unpacking DNSRecord [offset=%d]: %s" % (buffer.offset, e) | ||||
|             ) | ||||
|  | ||||
|     @classmethod | ||||
|     def question(cls, qname, qtype="A", qclass="IN"): | ||||
|         return DNSRecord( | ||||
|             q=DNSQuestion(qname, getattr(QTYPE, qtype), getattr(CLASS, qclass)) | ||||
|         ) | ||||
|  | ||||
|     def __init__( | ||||
|         self, header=None, questions=None, rr=None, q=None, a=None, auth=None, ar=None | ||||
|     ) -> None: | ||||
|         self.header = header or DNSHeader() | ||||
|         self.questions: list[DNSQuestion] = questions or [] | ||||
|         self.rr: list[RR] = rr or [] | ||||
|         self.auth: list[RR] = auth or [] | ||||
|         self.ar: list[RR] = ar or [] | ||||
|  | ||||
|         if q: | ||||
|             self.questions.append(q) | ||||
|         if a: | ||||
|             self.rr.append(a) | ||||
|         self.set_header_qa() | ||||
|  | ||||
|     def reply(self, ra=1, aa=1): | ||||
|         return DNSRecord( | ||||
|             DNSHeader(id=self.header.id, bitmap=self.header.bitmap, qr=1, ra=ra, aa=aa), | ||||
|             q=self.q, | ||||
|         ) | ||||
|  | ||||
|     def add_question(self, *q) -> None: | ||||
|         self.questions.extend(q) | ||||
|         self.set_header_qa() | ||||
|  | ||||
|     def add_answer(self, *rr) -> None: | ||||
|         self.rr.extend(rr) | ||||
|         self.set_header_qa() | ||||
|  | ||||
|     def add_auth(self, *auth) -> None: | ||||
|         self.auth.extend(auth) | ||||
|         self.set_header_qa() | ||||
|  | ||||
|     def add_ar(self, *ar) -> None: | ||||
|         self.ar.extend(ar) | ||||
|         self.set_header_qa() | ||||
|  | ||||
|     def set_header_qa(self) -> None: | ||||
|         self.header.q = len(self.questions) | ||||
|         self.header.a = len(self.rr) | ||||
|         self.header.auth = len(self.auth) | ||||
|         self.header.ar = len(self.ar) | ||||
|  | ||||
|     def get_q(self): | ||||
|         return self.questions[0] if self.questions else DNSQuestion() | ||||
|  | ||||
|     q = property(get_q) | ||||
|  | ||||
|     def get_a(self): | ||||
|         return self.rr[0] if self.rr else RR() | ||||
|  | ||||
|     a = property(get_a) | ||||
|  | ||||
|     def pack(self) -> bytes: | ||||
|         self.set_header_qa() | ||||
|         buffer = DNSBuffer() | ||||
|         self.header.pack(buffer) | ||||
|         for q in self.questions: | ||||
|             q.pack(buffer) | ||||
|         for rr in self.rr: | ||||
|             rr.pack(buffer) | ||||
|         for auth in self.auth: | ||||
|             auth.pack(buffer) | ||||
|         for ar in self.ar: | ||||
|             ar.pack(buffer) | ||||
|         return buffer.data | ||||
|  | ||||
|     def truncate(self): | ||||
|         return DNSRecord(DNSHeader(id=self.header.id, bitmap=self.header.bitmap, tc=1)) | ||||
|  | ||||
|     def format(self, prefix="", sort=False): | ||||
|         s = sorted if sort else lambda x: x | ||||
|         sections = [repr(self.header)] | ||||
|         sections.extend(s([repr(q) for q in self.questions])) | ||||
|         sections.extend(s([repr(rr) for rr in self.rr])) | ||||
|         sections.extend(s([repr(rr) for rr in self.auth])) | ||||
|         sections.extend(s([repr(rr) for rr in self.ar])) | ||||
|         return prefix + ("\n" + prefix).join(sections) | ||||
|  | ||||
|     short = format | ||||
|  | ||||
|     def __repr__(self): | ||||
|         return self.format() | ||||
|  | ||||
|     __str__ = __repr__ | ||||
|  | ||||
|  | ||||
| class DNSHeader(object): | ||||
|     id = H("id") | ||||
|     bitmap = H("bitmap") | ||||
|     q = H("q") | ||||
|     a = H("a") | ||||
|     auth = H("auth") | ||||
|     ar = H("ar") | ||||
|  | ||||
|     @classmethod | ||||
|     def parse(cls, buffer): | ||||
|         try: | ||||
|             (id, bitmap, q, a, auth, ar) = buffer.unpack("!HHHHHH") | ||||
|             return cls(id, bitmap, q, a, auth, ar) | ||||
|         except (BufferError, BimapError) as e: | ||||
|             raise DNSError( | ||||
|                 "Error unpacking DNSHeader [offset=%d]: %s" % (buffer.offset, e) | ||||
|             ) | ||||
|  | ||||
|     def __init__(self, id=None, bitmap=None, q=0, a=0, auth=0, ar=0, **args) -> None: | ||||
|         self.id = id if id else 0 | ||||
|         if bitmap is None: | ||||
|             self.bitmap = 0 | ||||
|         else: | ||||
|             self.bitmap = bitmap | ||||
|         self.q = q | ||||
|         self.a = a | ||||
|         self.auth = auth | ||||
|         self.ar = ar | ||||
|         for k, v in args.items(): | ||||
|             if k.lower() == "qr": | ||||
|                 self.qr = v | ||||
|             elif k.lower() == "opcode": | ||||
|                 self.opcode = v | ||||
|             elif k.lower() == "aa": | ||||
|                 self.aa = v | ||||
|             elif k.lower() == "tc": | ||||
|                 self.tc = v | ||||
|             elif k.lower() == "rd": | ||||
|                 self.rd = v | ||||
|             elif k.lower() == "ra": | ||||
|                 self.ra = v | ||||
|             elif k.lower() == "z": | ||||
|                 self.z = v | ||||
|             elif k.lower() == "ad": | ||||
|                 self.ad = v | ||||
|             elif k.lower() == "cd": | ||||
|                 self.cd = v | ||||
|             elif k.lower() == "rcode": | ||||
|                 self.rcode = v | ||||
|  | ||||
|     def get_qr(self): | ||||
|         return get_bits(self.bitmap, 15) | ||||
|  | ||||
|     def set_qr(self, val): | ||||
|         self.bitmap = set_bits(self.bitmap, val, 15) | ||||
|  | ||||
|     qr = property(get_qr, set_qr) | ||||
|  | ||||
|     def get_opcode(self): | ||||
|         return get_bits(self.bitmap, 11, 4) | ||||
|  | ||||
|     def set_opcode(self, val): | ||||
|         self.bitmap = set_bits(self.bitmap, val, 11, 4) | ||||
|  | ||||
|     opcode = property(get_opcode, set_opcode) | ||||
|  | ||||
|     def get_aa(self): | ||||
|         return get_bits(self.bitmap, 10) | ||||
|  | ||||
|     def set_aa(self, val): | ||||
|         self.bitmap = set_bits(self.bitmap, val, 10) | ||||
|  | ||||
|     aa = property(get_aa, set_aa) | ||||
|  | ||||
|     def get_tc(self): | ||||
|         return get_bits(self.bitmap, 9) | ||||
|  | ||||
|     def set_tc(self, val): | ||||
|         self.bitmap = set_bits(self.bitmap, val, 9) | ||||
|  | ||||
|     tc = property(get_tc, set_tc) | ||||
|  | ||||
|     def get_rd(self): | ||||
|         return get_bits(self.bitmap, 8) | ||||
|  | ||||
|     def set_rd(self, val): | ||||
|         self.bitmap = set_bits(self.bitmap, val, 8) | ||||
|  | ||||
|     rd = property(get_rd, set_rd) | ||||
|  | ||||
|     def get_ra(self): | ||||
|         return get_bits(self.bitmap, 7) | ||||
|  | ||||
|     def set_ra(self, val): | ||||
|         self.bitmap = set_bits(self.bitmap, val, 7) | ||||
|  | ||||
|     ra = property(get_ra, set_ra) | ||||
|  | ||||
|     def get_z(self): | ||||
|         return get_bits(self.bitmap, 6) | ||||
|  | ||||
|     def set_z(self, val): | ||||
|         self.bitmap = set_bits(self.bitmap, val, 6) | ||||
|  | ||||
|     z = property(get_z, set_z) | ||||
|  | ||||
|     def get_ad(self): | ||||
|         return get_bits(self.bitmap, 5) | ||||
|  | ||||
|     def set_ad(self, val): | ||||
|         self.bitmap = set_bits(self.bitmap, val, 5) | ||||
|  | ||||
|     ad = property(get_ad, set_ad) | ||||
|  | ||||
|     def get_cd(self): | ||||
|         return get_bits(self.bitmap, 4) | ||||
|  | ||||
|     def set_cd(self, val): | ||||
|         self.bitmap = set_bits(self.bitmap, val, 4) | ||||
|  | ||||
|     cd = property(get_cd, set_cd) | ||||
|  | ||||
|     def get_rcode(self): | ||||
|         return get_bits(self.bitmap, 0, 4) | ||||
|  | ||||
|     def set_rcode(self, val): | ||||
|         self.bitmap = set_bits(self.bitmap, val, 0, 4) | ||||
|  | ||||
|     rcode = property(get_rcode, set_rcode) | ||||
|  | ||||
|     def pack(self, buffer): | ||||
|         buffer.pack("!HHHHHH", self.id, self.bitmap, self.q, self.a, self.auth, self.ar) | ||||
|  | ||||
|     def __repr__(self): | ||||
|         f = [ | ||||
|             self.aa and "AA", | ||||
|             self.tc and "TC", | ||||
|             self.rd and "RD", | ||||
|             self.ra and "RA", | ||||
|             self.z and "Z", | ||||
|             self.ad and "AD", | ||||
|             self.cd and "CD", | ||||
|         ] | ||||
|         if OPCODE.get(self.opcode) == "UPDATE": | ||||
|             f1 = "zo" | ||||
|             f2 = "pr" | ||||
|             f3 = "up" | ||||
|             f4 = "ad" | ||||
|         else: | ||||
|             f1 = "q" | ||||
|             f2 = "a" | ||||
|             f3 = "ns" | ||||
|             f4 = "ar" | ||||
|         return ( | ||||
|             "<DNS Header: id=0x%x type=%s opcode=%s flags=%s " | ||||
|             "rcode='%s' %s=%d %s=%d %s=%d %s=%d>" | ||||
|             % ( | ||||
|                 self.id, | ||||
|                 QR.get(self.qr), | ||||
|                 OPCODE.get(self.opcode), | ||||
|                 ",".join(filter(None, f)), | ||||
|                 RCODE.get(self.rcode), | ||||
|                 f1, | ||||
|                 self.q, | ||||
|                 f2, | ||||
|                 self.a, | ||||
|                 f3, | ||||
|                 self.auth, | ||||
|                 f4, | ||||
|                 self.ar, | ||||
|             ) | ||||
|         ) | ||||
|  | ||||
|     __str__ = __repr__ | ||||
|  | ||||
|  | ||||
| class DNSQuestion(object): | ||||
|     @classmethod | ||||
|     def parse(cls, buffer): | ||||
|         try: | ||||
|             qname = buffer.decode_name() | ||||
|             qtype, qclass = buffer.unpack("!HH") | ||||
|             return cls(qname, qtype, qclass) | ||||
|         except (BufferError, BimapError) as e: | ||||
|             raise DNSError( | ||||
|                 "Error unpacking DNSQuestion [offset=%d]: %s" % (buffer.offset, e) | ||||
|             ) | ||||
|  | ||||
|     def __init__(self, qname=None, qtype=1, qclass=1) -> None: | ||||
|         self.qname = qname | ||||
|         self.qtype = qtype | ||||
|         self.qclass = qclass | ||||
|  | ||||
|     def set_qname(self, qname): | ||||
|         if isinstance(qname, DNSLabel): | ||||
|             self._qname = qname | ||||
|         else: | ||||
|             self._qname = DNSLabel(qname) | ||||
|  | ||||
|     def get_qname(self): | ||||
|         return self._qname | ||||
|  | ||||
|     qname = property(get_qname, set_qname) | ||||
|  | ||||
|     def pack(self, buffer): | ||||
|         buffer.encode_name(self.qname) | ||||
|         buffer.pack("!HH", self.qtype, self.qclass) | ||||
|  | ||||
|     def __repr__(self): | ||||
|         return "<DNS Question: '%s' qtype=%s qclass=%s>" % ( | ||||
|             self.qname, | ||||
|             QTYPE.get(self.qtype), | ||||
|             CLASS.get(self.qclass), | ||||
|         ) | ||||
|  | ||||
|     __str__ = __repr__ | ||||
|  | ||||
|  | ||||
| class RR(object): | ||||
|     rtype = H("rtype") | ||||
|     rclass = H("rclass") | ||||
|     ttl = I("ttl") | ||||
|     rdlength = H("rdlength") | ||||
|  | ||||
|     @classmethod | ||||
|     def parse(cls, buffer): | ||||
|         try: | ||||
|             rname = buffer.decode_name() | ||||
|             rtype, rclass, ttl, rdlength = buffer.unpack("!HHIH") | ||||
|             if rdlength: | ||||
|                 rdata = RDMAP.get(QTYPE.get(rtype), RD).parse(buffer, rdlength) | ||||
|             else: | ||||
|                 rdata = "" | ||||
|             return cls(rname, rtype, rclass, ttl, rdata) | ||||
|         except (BufferError, BimapError) as e: | ||||
|             raise DNSError("Error unpacking RR [offset=%d]: %s" % (buffer.offset, e)) | ||||
|  | ||||
|     def __init__(self, rname=None, rtype=1, rclass=1, ttl=0, rdata=None) -> None: | ||||
|         self.rname = rname | ||||
|         self.rtype = rtype | ||||
|         self.rclass = rclass | ||||
|         self.ttl = ttl | ||||
|         self.rdata = rdata | ||||
|  | ||||
|     def set_rname(self, rname): | ||||
|         if isinstance(rname, DNSLabel): | ||||
|             self._rname = rname | ||||
|         else: | ||||
|             self._rname = DNSLabel(rname) | ||||
|  | ||||
|     def get_rname(self): | ||||
|         return self._rname | ||||
|  | ||||
|     rname = property(get_rname, set_rname) | ||||
|  | ||||
|     def pack(self, buffer): | ||||
|         buffer.encode_name(self.rname) | ||||
|         buffer.pack("!HHI", self.rtype, self.rclass, self.ttl) | ||||
|         rdlength_ptr = buffer.offset | ||||
|         buffer.pack("!H", 0) | ||||
|         start = buffer.offset | ||||
|         self.rdata.pack(buffer) | ||||
|         end = buffer.offset | ||||
|         buffer.update(rdlength_ptr, "!H", end - start) | ||||
|  | ||||
|     def __repr__(self): | ||||
|         return "<DNS RR: '%s' rtype=%s rclass=%s ttl=%d rdata='%s'>" % ( | ||||
|             self.rname, | ||||
|             QTYPE.get(self.rtype), | ||||
|             CLASS.get(self.rclass), | ||||
|             self.ttl, | ||||
|             self.rdata, | ||||
|         ) | ||||
|  | ||||
|     __str__ = __repr__ | ||||
|  | ||||
|  | ||||
| class RD(object): | ||||
|     @classmethod | ||||
|     def parse(cls, buffer, length): | ||||
|         try: | ||||
|             data = buffer.get(length) | ||||
|             return cls(data) | ||||
|         except (BufferError, BimapError) as e: | ||||
|             raise DNSError("Error unpacking RD [offset=%d]: %s" % (buffer.offset, e)) | ||||
|  | ||||
|     def __init__(self, data=b"") -> None: | ||||
|         check_bytes("data", data) | ||||
|         self.data = bytes(data) | ||||
|  | ||||
|     def pack(self, buffer): | ||||
|         buffer.append(self.data) | ||||
|  | ||||
|     def __repr__(self): | ||||
|         if len(self.data) > 0: | ||||
|             return "\\# %d %s" % ( | ||||
|                 len(self.data), | ||||
|                 binascii.hexlify(self.data).decode().upper(), | ||||
|             ) | ||||
|         else: | ||||
|             return "\\# 0" | ||||
|  | ||||
|     attrs = ("data",) | ||||
|  | ||||
|  | ||||
| def _force_bytes(x): | ||||
|     if isinstance(x, bytes): | ||||
|         return x | ||||
|     else: | ||||
|         return x.encode() | ||||
|  | ||||
|  | ||||
| class TXT(RD): | ||||
|     @classmethod | ||||
|     def parse(cls, buffer, length): | ||||
|         try: | ||||
|             data = list() | ||||
|             start_bo = buffer.offset | ||||
|             now_length = 0 | ||||
|             while buffer.offset < start_bo + length: | ||||
|                 (txtlength,) = buffer.unpack("!B") | ||||
|  | ||||
|                 if now_length + txtlength < length: | ||||
|                     now_length += txtlength | ||||
|                     data.append(buffer.get(txtlength)) | ||||
|                 else: | ||||
|                     raise DNSError( | ||||
|                         "Invalid TXT record: len(%d) > RD len(%d)" % (txtlength, length) | ||||
|                     ) | ||||
|             return cls(data) | ||||
|         except (BufferError, BimapError) as e: | ||||
|             raise DNSError("Error unpacking TXT [offset=%d]: %s" % (buffer.offset, e)) | ||||
|  | ||||
|     def __init__(self, data) -> None: | ||||
|         if type(data) in (tuple, list): | ||||
|             self.data = [_force_bytes(x) for x in data] | ||||
|         else: | ||||
|             self.data = [_force_bytes(data)] | ||||
|         if any([len(x) > 255 for x in self.data]): | ||||
|             raise DNSError("TXT record too long: %s" % self.data) | ||||
|  | ||||
|     def pack(self, buffer): | ||||
|         for ditem in self.data: | ||||
|             if len(ditem) > 255: | ||||
|                 raise DNSError("TXT record too long: %s" % ditem) | ||||
|             buffer.pack("!B", len(ditem)) | ||||
|             buffer.append(ditem) | ||||
|  | ||||
|     def __repr__(self): | ||||
|         return ",".join([repr(x) for x in self.data]) | ||||
|  | ||||
|  | ||||
| class A(RD): | ||||
|  | ||||
|     data = IP4("data") | ||||
|  | ||||
|     @classmethod | ||||
|     def parse(cls, buffer, length): | ||||
|         try: | ||||
|             data = buffer.unpack("!BBBB") | ||||
|             return cls(data) | ||||
|         except (BufferError, BimapError) as e: | ||||
|             raise DNSError("Error unpacking A [offset=%d]: %s" % (buffer.offset, e)) | ||||
|  | ||||
|     def __init__(self, data) -> None: | ||||
|         if type(data) in (tuple, list): | ||||
|             self.data = tuple(data) | ||||
|         else: | ||||
|             self.data = tuple(map(int, data.rstrip(".").split("."))) | ||||
|  | ||||
|     def pack(self, buffer): | ||||
|         buffer.pack("!BBBB", *self.data) | ||||
|  | ||||
|     def __repr__(self): | ||||
|         return "%d.%d.%d.%d" % self.data | ||||
|  | ||||
|  | ||||
| def _parse_ipv6(a): | ||||
|     l, _, r = a.partition("::") | ||||
|     l_groups = list(chain(*[divmod(int(x, 16), 256) for x in l.split(":") if x])) | ||||
|     r_groups = list(chain(*[divmod(int(x, 16), 256) for x in r.split(":") if x])) | ||||
|     zeros = [0] * (16 - len(l_groups) - len(r_groups)) | ||||
|     return tuple(l_groups + zeros + r_groups) | ||||
|  | ||||
|  | ||||
| def _format_ipv6(a): | ||||
|     left = [] | ||||
|     right = [] | ||||
|     current = "left" | ||||
|     for i in range(0, 16, 2): | ||||
|         group = (a[i] << 8) + a[i + 1] | ||||
|         if current == "left": | ||||
|             if group == 0 and i < 14: | ||||
|                 if (a[i + 2] << 8) + a[i + 3] == 0: | ||||
|                     current = "right" | ||||
|                 else: | ||||
|                     left.append("0") | ||||
|             else: | ||||
|                 left.append("%x" % group) | ||||
|         else: | ||||
|             if group == 0 and len(right) == 0: | ||||
|                 pass | ||||
|             else: | ||||
|                 right.append("%x" % group) | ||||
|     if len(left) < 8: | ||||
|         return ":".join(left) + "::" + ":".join(right) | ||||
|     else: | ||||
|         return ":".join(left) | ||||
|  | ||||
|  | ||||
| class AAAA(RD): | ||||
|     data = IP6("data") | ||||
|  | ||||
|     @classmethod | ||||
|     def parse(cls, buffer, length): | ||||
|         try: | ||||
|             data = buffer.unpack("!16B") | ||||
|             return cls(data) | ||||
|         except (BufferError, BimapError) as e: | ||||
|             raise DNSError("Error unpacking AAAA [offset=%d]: %s" % (buffer.offset, e)) | ||||
|  | ||||
|     def __init__(self, data) -> None: | ||||
|         if type(data) in (tuple, list): | ||||
|             self.data = tuple(data) | ||||
|         else: | ||||
|             self.data = _parse_ipv6(data) | ||||
|  | ||||
|     def pack(self, buffer): | ||||
|         buffer.pack("!16B", *self.data) | ||||
|  | ||||
|     def __repr__(self): | ||||
|         return _format_ipv6(self.data) | ||||
|  | ||||
|  | ||||
| class CNAME(RD): | ||||
|     @classmethod | ||||
|     def parse(cls, buffer, length): | ||||
|         try: | ||||
|             label = buffer.decode_name() | ||||
|             return cls(label) | ||||
|         except (BufferError, BimapError) as e: | ||||
|             raise DNSError("Error unpacking CNAME [offset=%d]: %s" % (buffer.offset, e)) | ||||
|  | ||||
|     def __init__(self, label=None) -> None: | ||||
|         self.label = label | ||||
|  | ||||
|     def set_label(self, label): | ||||
|         if isinstance(label, DNSLabel): | ||||
|             self._label = label | ||||
|         else: | ||||
|             self._label = DNSLabel(label) | ||||
|  | ||||
|     def get_label(self): | ||||
|         return self._label | ||||
|  | ||||
|     label = property(get_label, set_label) | ||||
|  | ||||
|     def pack(self, buffer): | ||||
|         buffer.encode_name(self.label) | ||||
|  | ||||
|     def __repr__(self): | ||||
|         return "%s" % (self.label) | ||||
|  | ||||
|     attrs = ("label",) | ||||
|  | ||||
|  | ||||
| class PTR(CNAME): | ||||
|     pass | ||||
|  | ||||
|  | ||||
| class SRV(RD): | ||||
|     priority = H("priority") | ||||
|     weight = H("weight") | ||||
|     port = H("port") | ||||
|  | ||||
|     @classmethod | ||||
|     def parse(cls, buffer, length): | ||||
|         try: | ||||
|             priority, weight, port = buffer.unpack("!HHH") | ||||
|             target = buffer.decode_name() | ||||
|             return cls(priority, weight, port, target) | ||||
|         except (BufferError, BimapError) as e: | ||||
|             raise DNSError("Error unpacking SRV [offset=%d]: %s" % (buffer.offset, e)) | ||||
|  | ||||
|     def __init__(self, priority=0, weight=0, port=0, target=None) -> None: | ||||
|         self.priority = priority | ||||
|         self.weight = weight | ||||
|         self.port = port | ||||
|         self.target = target | ||||
|  | ||||
|     def set_target(self, target): | ||||
|         if isinstance(target, DNSLabel): | ||||
|             self._target = target | ||||
|         else: | ||||
|             self._target = DNSLabel(target) | ||||
|  | ||||
|     def get_target(self): | ||||
|         return self._target | ||||
|  | ||||
|     target = property(get_target, set_target) | ||||
|  | ||||
|     def pack(self, buffer): | ||||
|         buffer.pack("!HHH", self.priority, self.weight, self.port) | ||||
|         buffer.encode_name(self.target) | ||||
|  | ||||
|     def __repr__(self): | ||||
|         return "%d %d %d %s" % (self.priority, self.weight, self.port, self.target) | ||||
|  | ||||
|     attrs = ("priority", "weight", "port", "target") | ||||
|  | ||||
|  | ||||
| def decode_type_bitmap(type_bitmap): | ||||
|     rrlist = [] | ||||
|     buf = DNSBuffer(type_bitmap) | ||||
|     while buf.remaining(): | ||||
|         winnum, winlen = buf.unpack("BB") | ||||
|         bitmap = bytearray(buf.get(winlen)) | ||||
|         for (pos, value) in enumerate(bitmap): | ||||
|             for i in range(8): | ||||
|                 if (value << i) & 0x80: | ||||
|                     bitpos = (256 * winnum) + (8 * pos) + i | ||||
|                     rrlist.append(QTYPE[bitpos]) | ||||
|     return rrlist | ||||
|  | ||||
|  | ||||
| def encode_type_bitmap(rrlist): | ||||
|     rrlist = sorted([getattr(QTYPE, rr) for rr in rrlist]) | ||||
|     buf = DNSBuffer() | ||||
|     curWindow = rrlist[0] // 256 | ||||
|     bitmap = bytearray(32) | ||||
|     n = len(rrlist) - 1 | ||||
|     for i, rr in enumerate(rrlist): | ||||
|         v = rr - curWindow * 256 | ||||
|         bitmap[v // 8] |= 1 << (7 - v % 8) | ||||
|  | ||||
|         if i == n or rrlist[i + 1] >= (curWindow + 1) * 256: | ||||
|             while bitmap[-1] == 0: | ||||
|                 bitmap = bitmap[:-1] | ||||
|             buf.pack("BB", curWindow, len(bitmap)) | ||||
|             buf.append(bitmap) | ||||
|  | ||||
|             if i != n: | ||||
|                 curWindow = rrlist[i + 1] // 256 | ||||
|                 bitmap = bytearray(32) | ||||
|  | ||||
|     return buf.data | ||||
|  | ||||
|  | ||||
| class NSEC(RD): | ||||
|     @classmethod | ||||
|     def parse(cls, buffer, length): | ||||
|         try: | ||||
|             end = buffer.offset + length | ||||
|             name = buffer.decode_name() | ||||
|             rrlist = decode_type_bitmap(buffer.get(end - buffer.offset)) | ||||
|             return cls(name, rrlist) | ||||
|         except (BufferError, BimapError) as e: | ||||
|             raise DNSError("Error unpacking NSEC [offset=%d]: %s" % (buffer.offset, e)) | ||||
|  | ||||
|     def __init__(self, label, rrlist) -> None: | ||||
|         self.label = label | ||||
|         self.rrlist = rrlist | ||||
|  | ||||
|     def set_label(self, label): | ||||
|         if isinstance(label, DNSLabel): | ||||
|             self._label = label | ||||
|         else: | ||||
|             self._label = DNSLabel(label) | ||||
|  | ||||
|     def get_label(self): | ||||
|         return self._label | ||||
|  | ||||
|     label = property(get_label, set_label) | ||||
|  | ||||
|     def pack(self, buffer): | ||||
|         buffer.encode_name(self.label) | ||||
|         buffer.append(encode_type_bitmap(self.rrlist)) | ||||
|  | ||||
|     def __repr__(self): | ||||
|         return "%s %s" % (self.label, " ".join(self.rrlist)) | ||||
|  | ||||
|     attrs = ("label", "rrlist") | ||||
|  | ||||
|  | ||||
| RDMAP = {"A": A, "AAAA": AAAA, "TXT": TXT, "PTR": PTR, "SRV": SRV, "NSEC": NSEC} | ||||
							
								
								
									
										154
									
								
								copyparty/stolen/dnslib/label.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										154
									
								
								copyparty/stolen/dnslib/label.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,154 @@ | ||||
| # coding: utf-8 | ||||
|  | ||||
| from __future__ import print_function | ||||
|  | ||||
| import re | ||||
|  | ||||
| from .bit import get_bits, set_bits | ||||
| from .buffer import Buffer, BufferError | ||||
|  | ||||
| LDH = set(range(33, 127)) | ||||
| ESCAPE = re.compile(r"\\([0-9][0-9][0-9])") | ||||
|  | ||||
|  | ||||
| class DNSLabelError(Exception): | ||||
|     pass | ||||
|  | ||||
|  | ||||
| class DNSLabel(object): | ||||
|     def __init__(self, label): | ||||
|         if type(label) == DNSLabel: | ||||
|             self.label = label.label | ||||
|         elif type(label) in (list, tuple): | ||||
|             self.label = tuple(label) | ||||
|         else: | ||||
|             if not label or label in (b".", "."): | ||||
|                 self.label = () | ||||
|             elif type(label) is not bytes: | ||||
|                 if type("") != type(b""): | ||||
|  | ||||
|                     label = ESCAPE.sub(lambda m: chr(int(m[1])), label) | ||||
|                 self.label = tuple(label.encode("idna").rstrip(b".").split(b".")) | ||||
|             else: | ||||
|                 if type("") == type(b""): | ||||
|  | ||||
|                     label = ESCAPE.sub(lambda m: chr(int(m.groups()[0])), label) | ||||
|                 self.label = tuple(label.rstrip(b".").split(b".")) | ||||
|  | ||||
|     def add(self, name): | ||||
|         new = DNSLabel(name) | ||||
|         if self.label: | ||||
|             new.label += self.label | ||||
|         return new | ||||
|  | ||||
|     def idna(self): | ||||
|         return ".".join([s.decode("idna") for s in self.label]) + "." | ||||
|  | ||||
|     def _decode(self, s): | ||||
|         if set(s).issubset(LDH): | ||||
|  | ||||
|             return s.decode() | ||||
|         else: | ||||
|  | ||||
|             return "".join([(chr(c) if (c in LDH) else "\\%03d" % c) for c in s]) | ||||
|  | ||||
|     def __str__(self): | ||||
|         return ".".join([self._decode(bytearray(s)) for s in self.label]) + "." | ||||
|  | ||||
|     def __repr__(self): | ||||
|         return "<DNSLabel: '%s'>" % str(self) | ||||
|  | ||||
|     def __hash__(self): | ||||
|         return hash(tuple(map(lambda x: x.lower(), self.label))) | ||||
|  | ||||
|     def __ne__(self, other): | ||||
|         return not self == other | ||||
|  | ||||
|     def __eq__(self, other): | ||||
|         if type(other) != DNSLabel: | ||||
|             return self.__eq__(DNSLabel(other)) | ||||
|         else: | ||||
|             return [l.lower() for l in self.label] == [l.lower() for l in other.label] | ||||
|  | ||||
|     def __len__(self): | ||||
|         return len(b".".join(self.label)) | ||||
|  | ||||
|  | ||||
| class DNSBuffer(Buffer): | ||||
|     def __init__(self, data=b""): | ||||
|         super(DNSBuffer, self).__init__(data) | ||||
|         self.names = {} | ||||
|  | ||||
|     def decode_name(self, last=-1): | ||||
|         label = [] | ||||
|         done = False | ||||
|         while not done: | ||||
|             (length,) = self.unpack("!B") | ||||
|             if get_bits(length, 6, 2) == 3: | ||||
|  | ||||
|                 self.offset -= 1 | ||||
|                 pointer = get_bits(self.unpack("!H")[0], 0, 14) | ||||
|                 save = self.offset | ||||
|                 if last == save: | ||||
|                     raise BufferError( | ||||
|                         "Recursive pointer in DNSLabel [offset=%d,pointer=%d,length=%d]" | ||||
|                         % (self.offset, pointer, len(self.data)) | ||||
|                     ) | ||||
|                 if pointer < self.offset: | ||||
|                     self.offset = pointer | ||||
|                 else: | ||||
|  | ||||
|                     raise BufferError( | ||||
|                         "Invalid pointer in DNSLabel [offset=%d,pointer=%d,length=%d]" | ||||
|                         % (self.offset, pointer, len(self.data)) | ||||
|                     ) | ||||
|                 label.extend(self.decode_name(save).label) | ||||
|                 self.offset = save | ||||
|                 done = True | ||||
|             else: | ||||
|                 if length > 0: | ||||
|                     l = self.get(length) | ||||
|                     try: | ||||
|                         l.decode() | ||||
|                     except UnicodeDecodeError: | ||||
|                         raise BufferError("Invalid label <%s>" % l) | ||||
|                     label.append(l) | ||||
|                 else: | ||||
|                     done = True | ||||
|         return DNSLabel(label) | ||||
|  | ||||
|     def encode_name(self, name): | ||||
|         if not isinstance(name, DNSLabel): | ||||
|             name = DNSLabel(name) | ||||
|         if len(name) > 253: | ||||
|             raise DNSLabelError("Domain label too long: %r" % name) | ||||
|         name = list(name.label) | ||||
|         while name: | ||||
|             if tuple(name) in self.names: | ||||
|  | ||||
|                 pointer = self.names[tuple(name)] | ||||
|                 pointer = set_bits(pointer, 3, 14, 2) | ||||
|                 self.pack("!H", pointer) | ||||
|                 return | ||||
|             else: | ||||
|                 self.names[tuple(name)] = self.offset | ||||
|                 element = name.pop(0) | ||||
|                 if len(element) > 63: | ||||
|                     raise DNSLabelError("Label component too long: %r" % element) | ||||
|                 self.pack("!B", len(element)) | ||||
|                 self.append(element) | ||||
|         self.append(b"\x00") | ||||
|  | ||||
|     def encode_name_nocompress(self, name): | ||||
|         if not isinstance(name, DNSLabel): | ||||
|             name = DNSLabel(name) | ||||
|         if len(name) > 253: | ||||
|             raise DNSLabelError("Domain label too long: %r" % name) | ||||
|         name = list(name.label) | ||||
|         while name: | ||||
|             element = name.pop(0) | ||||
|             if len(element) > 63: | ||||
|                 raise DNSLabelError("Label component too long: %r" % element) | ||||
|             self.pack("!B", len(element)) | ||||
|             self.append(element) | ||||
|         self.append(b"\x00") | ||||
							
								
								
									
										105
									
								
								copyparty/stolen/dnslib/lex.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										105
									
								
								copyparty/stolen/dnslib/lex.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,105 @@ | ||||
| # coding: utf-8 | ||||
|  | ||||
| from __future__ import print_function | ||||
|  | ||||
| import collections | ||||
|  | ||||
| try: | ||||
|     from StringIO import StringIO | ||||
| except ImportError: | ||||
|     from io import StringIO | ||||
|  | ||||
|  | ||||
| class Lexer(object): | ||||
|  | ||||
|     escape_chars = "\\" | ||||
|     escape = {"n": "\n", "t": "\t", "r": "\r"} | ||||
|  | ||||
|     def __init__(self, f, debug=False): | ||||
|         if hasattr(f, "read"): | ||||
|             self.f = f | ||||
|         elif type(f) == str: | ||||
|             self.f = StringIO(f) | ||||
|         elif type(f) == bytes: | ||||
|             self.f = StringIO(f.decode()) | ||||
|         else: | ||||
|             raise ValueError("Invalid input") | ||||
|         self.debug = debug | ||||
|         self.q = collections.deque() | ||||
|         self.state = self.lexStart | ||||
|         self.escaped = False | ||||
|         self.eof = False | ||||
|  | ||||
|     def __iter__(self): | ||||
|         return self.parse() | ||||
|  | ||||
|     def next_token(self): | ||||
|         if self.debug: | ||||
|             print("STATE", self.state) | ||||
|         (tok, self.state) = self.state() | ||||
|         return tok | ||||
|  | ||||
|     def parse(self): | ||||
|         while self.state is not None and not self.eof: | ||||
|             tok = self.next_token() | ||||
|             if tok: | ||||
|                 yield tok | ||||
|  | ||||
|     def read(self, n=1): | ||||
|         s = "" | ||||
|         while self.q and n > 0: | ||||
|             s += self.q.popleft() | ||||
|             n -= 1 | ||||
|         s += self.f.read(n) | ||||
|         if s == "": | ||||
|             self.eof = True | ||||
|         if self.debug: | ||||
|             print("Read: >%s<" % repr(s)) | ||||
|         return s | ||||
|  | ||||
|     def peek(self, n=1): | ||||
|         s = "" | ||||
|         i = 0 | ||||
|         while len(self.q) > i and n > 0: | ||||
|             s += self.q[i] | ||||
|             i += 1 | ||||
|             n -= 1 | ||||
|         r = self.f.read(n) | ||||
|         if n > 0 and r == "": | ||||
|             self.eof = True | ||||
|         self.q.extend(r) | ||||
|         if self.debug: | ||||
|             print("Peek : >%s<" % repr(s + r)) | ||||
|         return s + r | ||||
|  | ||||
|     def pushback(self, s): | ||||
|         p = collections.deque(s) | ||||
|         p.extend(self.q) | ||||
|         self.q = p | ||||
|  | ||||
|     def readescaped(self): | ||||
|         c = self.read(1) | ||||
|         if c in self.escape_chars: | ||||
|             self.escaped = True | ||||
|             n = self.peek(3) | ||||
|             if n.isdigit(): | ||||
|                 n = self.read(3) | ||||
|                 if self.debug: | ||||
|                     print("Escape: >%s<" % n) | ||||
|                 return chr(int(n, 8)) | ||||
|             elif n[0] in "x": | ||||
|                 x = self.read(3) | ||||
|                 if self.debug: | ||||
|                     print("Escape: >%s<" % x) | ||||
|                 return chr(int(x[1:], 16)) | ||||
|             else: | ||||
|                 c = self.read(1) | ||||
|                 if self.debug: | ||||
|                     print("Escape: >%s<" % c) | ||||
|                 return self.escape.get(c, c) | ||||
|         else: | ||||
|             self.escaped = False | ||||
|             return c | ||||
|  | ||||
|     def lexStart(self): | ||||
|         return (None, None) | ||||
							
								
								
									
										81
									
								
								copyparty/stolen/dnslib/ranges.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										81
									
								
								copyparty/stolen/dnslib/ranges.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,81 @@ | ||||
| # coding: utf-8 | ||||
|  | ||||
| import sys | ||||
|  | ||||
| if sys.version_info < (3,): | ||||
|     int_types = ( | ||||
|         int, | ||||
|         long, | ||||
|     ) | ||||
|     byte_types = (str, bytearray) | ||||
| else: | ||||
|     int_types = (int,) | ||||
|     byte_types = (bytes, bytearray) | ||||
|  | ||||
|  | ||||
| def check_instance(name, val, types): | ||||
|     if not isinstance(val, types): | ||||
|         raise ValueError( | ||||
|             "Attribute '%s' must be instance of %s [%s]" % (name, types, type(val)) | ||||
|         ) | ||||
|  | ||||
|  | ||||
| def check_bytes(name, val): | ||||
|     return check_instance(name, val, byte_types) | ||||
|  | ||||
|  | ||||
| def range_property(attr, min, max): | ||||
|     def getter(obj): | ||||
|         return getattr(obj, "_%s" % attr) | ||||
|  | ||||
|     def setter(obj, val): | ||||
|         if isinstance(val, int_types) and min <= val <= max: | ||||
|             setattr(obj, "_%s" % attr, val) | ||||
|         else: | ||||
|             raise ValueError( | ||||
|                 "Attribute '%s' must be between %d-%d [%s]" % (attr, min, max, val) | ||||
|             ) | ||||
|  | ||||
|     return property(getter, setter) | ||||
|  | ||||
|  | ||||
| def B(attr): | ||||
|     return range_property(attr, 0, 255) | ||||
|  | ||||
|  | ||||
| def H(attr): | ||||
|     return range_property(attr, 0, 65535) | ||||
|  | ||||
|  | ||||
| def I(attr): | ||||
|     return range_property(attr, 0, 4294967295) | ||||
|  | ||||
|  | ||||
| def ntuple_range(attr, n, min, max): | ||||
|     f = lambda x: isinstance(x, int_types) and min <= x <= max | ||||
|  | ||||
|     def getter(obj): | ||||
|         return getattr(obj, "_%s" % attr) | ||||
|  | ||||
|     def setter(obj, val): | ||||
|         if len(val) != n: | ||||
|             raise ValueError( | ||||
|                 "Attribute '%s' must be tuple with %d elements [%s]" % (attr, n, val) | ||||
|             ) | ||||
|         if all(map(f, val)): | ||||
|             setattr(obj, "_%s" % attr, val) | ||||
|         else: | ||||
|             raise ValueError( | ||||
|                 "Attribute '%s' elements must be between %d-%d [%s]" | ||||
|                 % (attr, min, max, val) | ||||
|             ) | ||||
|  | ||||
|     return property(getter, setter) | ||||
|  | ||||
|  | ||||
| def IP4(attr): | ||||
|     return ntuple_range(attr, 4, 0, 255) | ||||
|  | ||||
|  | ||||
| def IP6(attr): | ||||
|     return ntuple_range(attr, 16, 0, 255) | ||||
							
								
								
									
										5
									
								
								copyparty/stolen/ifaddr/README.md
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										5
									
								
								copyparty/stolen/ifaddr/README.md
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,5 @@ | ||||
| `ifaddr` with py2.7 support enabled by make-sfx.sh which strips py3 hints using strip_hints and removes the `^if True:` blocks | ||||
|  | ||||
| L: BSD-2-Clause | ||||
| Copyright (c) 2014 Stefan C. Mueller | ||||
| https://github.com/pydron/ifaddr/ | ||||
							
								
								
									
										21
									
								
								copyparty/stolen/ifaddr/__init__.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										21
									
								
								copyparty/stolen/ifaddr/__init__.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,21 @@ | ||||
| # coding: utf-8 | ||||
| from __future__ import print_function, unicode_literals | ||||
|  | ||||
| """ | ||||
| L: BSD-2-Clause | ||||
| Copyright (c) 2014 Stefan C. Mueller | ||||
| https://github.com/pydron/ifaddr/tree/0.2.0 | ||||
| """ | ||||
|  | ||||
| import os | ||||
|  | ||||
| from ._shared import IP, Adapter | ||||
|  | ||||
| if os.name == "nt": | ||||
|     from ._win32 import get_adapters | ||||
| elif os.name == "posix": | ||||
|     from ._posix import get_adapters | ||||
| else: | ||||
|     raise RuntimeError("Unsupported Operating System: %s" % os.name) | ||||
|  | ||||
| __all__ = ["Adapter", "IP", "get_adapters"] | ||||
							
								
								
									
										84
									
								
								copyparty/stolen/ifaddr/_posix.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										84
									
								
								copyparty/stolen/ifaddr/_posix.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,84 @@ | ||||
| # coding: utf-8 | ||||
| from __future__ import print_function, unicode_literals | ||||
|  | ||||
| import collections | ||||
| import ctypes.util | ||||
| import os | ||||
| import socket | ||||
|  | ||||
| import ipaddress | ||||
|  | ||||
| if True:  # pylint: disable=using-constant-test | ||||
|     from typing import Iterable, Optional | ||||
|  | ||||
| from . import _shared as shared | ||||
| from ._shared import U | ||||
|  | ||||
|  | ||||
| class ifaddrs(ctypes.Structure): | ||||
|     pass | ||||
|  | ||||
|  | ||||
| ifaddrs._fields_ = [ | ||||
|     ("ifa_next", ctypes.POINTER(ifaddrs)), | ||||
|     ("ifa_name", ctypes.c_char_p), | ||||
|     ("ifa_flags", ctypes.c_uint), | ||||
|     ("ifa_addr", ctypes.POINTER(shared.sockaddr)), | ||||
|     ("ifa_netmask", ctypes.POINTER(shared.sockaddr)), | ||||
| ] | ||||
|  | ||||
| libc = ctypes.CDLL(ctypes.util.find_library("socket" if os.uname()[0] == "SunOS" else "c"), use_errno=True)  # type: ignore | ||||
|  | ||||
|  | ||||
| def get_adapters(include_unconfigured: bool = False) -> Iterable[shared.Adapter]: | ||||
|  | ||||
|     addr0 = addr = ctypes.POINTER(ifaddrs)() | ||||
|     retval = libc.getifaddrs(ctypes.byref(addr)) | ||||
|     if retval != 0: | ||||
|         eno = ctypes.get_errno() | ||||
|         raise OSError(eno, os.strerror(eno)) | ||||
|  | ||||
|     ips = collections.OrderedDict() | ||||
|  | ||||
|     def add_ip(adapter_name: str, ip: Optional[shared.IP]) -> None: | ||||
|         if adapter_name not in ips: | ||||
|             index = None  # type: Optional[int] | ||||
|             try: | ||||
|                 # Mypy errors on this when the Windows CI runs: | ||||
|                 #     error: Module has no attribute "if_nametoindex" | ||||
|                 index = socket.if_nametoindex(adapter_name)  # type: ignore | ||||
|             except (OSError, AttributeError): | ||||
|                 pass | ||||
|             ips[adapter_name] = shared.Adapter( | ||||
|                 adapter_name, adapter_name, [], index=index | ||||
|             ) | ||||
|         if ip is not None: | ||||
|             ips[adapter_name].ips.append(ip) | ||||
|  | ||||
|     while addr: | ||||
|         name = addr[0].ifa_name.decode(encoding="UTF-8") | ||||
|         ip_addr = shared.sockaddr_to_ip(addr[0].ifa_addr) | ||||
|         if ip_addr: | ||||
|             if addr[0].ifa_netmask and not addr[0].ifa_netmask[0].sa_familiy: | ||||
|                 addr[0].ifa_netmask[0].sa_familiy = addr[0].ifa_addr[0].sa_familiy | ||||
|             netmask = shared.sockaddr_to_ip(addr[0].ifa_netmask) | ||||
|             if isinstance(netmask, tuple): | ||||
|                 netmaskStr = U(netmask[0]) | ||||
|                 prefixlen = shared.ipv6_prefixlength(ipaddress.IPv6Address(netmaskStr)) | ||||
|             else: | ||||
|                 if netmask is None: | ||||
|                     t = "sockaddr_to_ip({}) returned None" | ||||
|                     raise Exception(t.format(addr[0].ifa_netmask)) | ||||
|  | ||||
|                 netmaskStr = U("0.0.0.0/" + netmask) | ||||
|                 prefixlen = ipaddress.IPv4Network(netmaskStr).prefixlen | ||||
|             ip = shared.IP(ip_addr, prefixlen, name) | ||||
|             add_ip(name, ip) | ||||
|         else: | ||||
|             if include_unconfigured: | ||||
|                 add_ip(name, None) | ||||
|         addr = addr[0].ifa_next | ||||
|  | ||||
|     libc.freeifaddrs(addr0) | ||||
|  | ||||
|     return ips.values() | ||||
							
								
								
									
										203
									
								
								copyparty/stolen/ifaddr/_shared.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										203
									
								
								copyparty/stolen/ifaddr/_shared.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,203 @@ | ||||
| # coding: utf-8 | ||||
| from __future__ import print_function, unicode_literals | ||||
|  | ||||
| import ctypes | ||||
| import platform | ||||
| import socket | ||||
| import sys | ||||
|  | ||||
| import ipaddress | ||||
|  | ||||
| if True:  # pylint: disable=using-constant-test | ||||
|     from typing import Callable, List, Optional, Union | ||||
|  | ||||
|  | ||||
| PY2 = sys.version_info < (3,) | ||||
| if not PY2: | ||||
|     U: Callable[[str], str] = str | ||||
| else: | ||||
|     U = unicode  # noqa: F821  # pylint: disable=undefined-variable,self-assigning-variable | ||||
|  | ||||
|  | ||||
| class Adapter(object): | ||||
|     """ | ||||
|     Represents a network interface device controller (NIC), such as a | ||||
|     network card. An adapter can have multiple IPs. | ||||
|  | ||||
|     On Linux aliasing (multiple IPs per physical NIC) is implemented | ||||
|     by creating 'virtual' adapters, each represented by an instance | ||||
|     of this class. Each of those 'virtual' adapters can have both | ||||
|     a IPv4 and an IPv6 IP address. | ||||
|     """ | ||||
|  | ||||
|     def __init__( | ||||
|         self, name: str, nice_name: str, ips: List["IP"], index: Optional[int] = None | ||||
|     ) -> None: | ||||
|  | ||||
|         #: Unique name that identifies the adapter in the system. | ||||
|         #: On Linux this is of the form of `eth0` or `eth0:1`, on | ||||
|         #: Windows it is a UUID in string representation, such as | ||||
|         #: `{846EE342-7039-11DE-9D20-806E6F6E6963}`. | ||||
|         self.name = name | ||||
|  | ||||
|         #: Human readable name of the adpater. On Linux this | ||||
|         #: is currently the same as :attr:`name`. On Windows | ||||
|         #: this is the name of the device. | ||||
|         self.nice_name = nice_name | ||||
|  | ||||
|         #: List of :class:`ifaddr.IP` instances in the order they were | ||||
|         #: reported by the system. | ||||
|         self.ips = ips | ||||
|  | ||||
|         #: Adapter index as used by some API (e.g. IPv6 multicast group join). | ||||
|         self.index = index | ||||
|  | ||||
|     def __repr__(self) -> str: | ||||
|         return "Adapter(name={name}, nice_name={nice_name}, ips={ips}, index={index})".format( | ||||
|             name=repr(self.name), | ||||
|             nice_name=repr(self.nice_name), | ||||
|             ips=repr(self.ips), | ||||
|             index=repr(self.index), | ||||
|         ) | ||||
|  | ||||
|  | ||||
| if True: | ||||
|     # Type of an IPv4 address (a string in "xxx.xxx.xxx.xxx" format) | ||||
|     _IPv4Address = str | ||||
|  | ||||
|     # Type of an IPv6 address (a three-tuple `(ip, flowinfo, scope_id)`) | ||||
|     _IPv6Address = tuple[str, int, int] | ||||
|  | ||||
|  | ||||
| class IP(object): | ||||
|     """ | ||||
|     Represents an IP address of an adapter. | ||||
|     """ | ||||
|  | ||||
|     def __init__( | ||||
|         self, ip: Union[_IPv4Address, _IPv6Address], network_prefix: int, nice_name: str | ||||
|     ) -> None: | ||||
|  | ||||
|         #: IP address. For IPv4 addresses this is a string in | ||||
|         #: "xxx.xxx.xxx.xxx" format. For IPv6 addresses this | ||||
|         #: is a three-tuple `(ip, flowinfo, scope_id)`, where | ||||
|         #: `ip` is a string in the usual collon separated | ||||
|         #: hex format. | ||||
|         self.ip = ip | ||||
|  | ||||
|         #: Number of bits of the IP that represent the | ||||
|         #: network. For a `255.255.255.0` netmask, this | ||||
|         #: number would be `24`. | ||||
|         self.network_prefix = network_prefix | ||||
|  | ||||
|         #: Human readable name for this IP. | ||||
|         #: On Linux is this currently the same as the adapter name. | ||||
|         #: On Windows this is the name of the network connection | ||||
|         #: as configured in the system control panel. | ||||
|         self.nice_name = nice_name | ||||
|  | ||||
|     @property | ||||
|     def is_IPv4(self) -> bool: | ||||
|         """ | ||||
|         Returns `True` if this IP is an IPv4 address and `False` | ||||
|         if it is an IPv6 address. | ||||
|         """ | ||||
|         return not isinstance(self.ip, tuple) | ||||
|  | ||||
|     @property | ||||
|     def is_IPv6(self) -> bool: | ||||
|         """ | ||||
|         Returns `True` if this IP is an IPv6 address and `False` | ||||
|         if it is an IPv4 address. | ||||
|         """ | ||||
|         return isinstance(self.ip, tuple) | ||||
|  | ||||
|     def __repr__(self) -> str: | ||||
|         return "IP(ip={ip}, network_prefix={network_prefix}, nice_name={nice_name})".format( | ||||
|             ip=repr(self.ip), | ||||
|             network_prefix=repr(self.network_prefix), | ||||
|             nice_name=repr(self.nice_name), | ||||
|         ) | ||||
|  | ||||
|  | ||||
| if platform.system() == "Darwin" or "BSD" in platform.system(): | ||||
|  | ||||
|     # BSD derived systems use marginally different structures | ||||
|     # than either Linux or Windows. | ||||
|     # I still keep it in `shared` since we can use | ||||
|     # both structures equally. | ||||
|  | ||||
|     class sockaddr(ctypes.Structure): | ||||
|         _fields_ = [ | ||||
|             ("sa_len", ctypes.c_uint8), | ||||
|             ("sa_familiy", ctypes.c_uint8), | ||||
|             ("sa_data", ctypes.c_uint8 * 14), | ||||
|         ] | ||||
|  | ||||
|     class sockaddr_in(ctypes.Structure): | ||||
|         _fields_ = [ | ||||
|             ("sa_len", ctypes.c_uint8), | ||||
|             ("sa_familiy", ctypes.c_uint8), | ||||
|             ("sin_port", ctypes.c_uint16), | ||||
|             ("sin_addr", ctypes.c_uint8 * 4), | ||||
|             ("sin_zero", ctypes.c_uint8 * 8), | ||||
|         ] | ||||
|  | ||||
|     class sockaddr_in6(ctypes.Structure): | ||||
|         _fields_ = [ | ||||
|             ("sa_len", ctypes.c_uint8), | ||||
|             ("sa_familiy", ctypes.c_uint8), | ||||
|             ("sin6_port", ctypes.c_uint16), | ||||
|             ("sin6_flowinfo", ctypes.c_uint32), | ||||
|             ("sin6_addr", ctypes.c_uint8 * 16), | ||||
|             ("sin6_scope_id", ctypes.c_uint32), | ||||
|         ] | ||||
|  | ||||
| else: | ||||
|  | ||||
|     class sockaddr(ctypes.Structure):  # type: ignore | ||||
|         _fields_ = [("sa_familiy", ctypes.c_uint16), ("sa_data", ctypes.c_uint8 * 14)] | ||||
|  | ||||
|     class sockaddr_in(ctypes.Structure):  # type: ignore | ||||
|         _fields_ = [ | ||||
|             ("sin_familiy", ctypes.c_uint16), | ||||
|             ("sin_port", ctypes.c_uint16), | ||||
|             ("sin_addr", ctypes.c_uint8 * 4), | ||||
|             ("sin_zero", ctypes.c_uint8 * 8), | ||||
|         ] | ||||
|  | ||||
|     class sockaddr_in6(ctypes.Structure):  # type: ignore | ||||
|         _fields_ = [ | ||||
|             ("sin6_familiy", ctypes.c_uint16), | ||||
|             ("sin6_port", ctypes.c_uint16), | ||||
|             ("sin6_flowinfo", ctypes.c_uint32), | ||||
|             ("sin6_addr", ctypes.c_uint8 * 16), | ||||
|             ("sin6_scope_id", ctypes.c_uint32), | ||||
|         ] | ||||
|  | ||||
|  | ||||
| def sockaddr_to_ip( | ||||
|     sockaddr_ptr: "ctypes.pointer[sockaddr]", | ||||
| ) -> Optional[Union[_IPv4Address, _IPv6Address]]: | ||||
|     if sockaddr_ptr: | ||||
|         if sockaddr_ptr[0].sa_familiy == socket.AF_INET: | ||||
|             ipv4 = ctypes.cast(sockaddr_ptr, ctypes.POINTER(sockaddr_in)) | ||||
|             ippacked = bytes(bytearray(ipv4[0].sin_addr)) | ||||
|             ip = U(ipaddress.ip_address(ippacked)) | ||||
|             return ip | ||||
|         elif sockaddr_ptr[0].sa_familiy == socket.AF_INET6: | ||||
|             ipv6 = ctypes.cast(sockaddr_ptr, ctypes.POINTER(sockaddr_in6)) | ||||
|             flowinfo = ipv6[0].sin6_flowinfo | ||||
|             ippacked = bytes(bytearray(ipv6[0].sin6_addr)) | ||||
|             ip = U(ipaddress.ip_address(ippacked)) | ||||
|             scope_id = ipv6[0].sin6_scope_id | ||||
|             return (ip, flowinfo, scope_id) | ||||
|     return None | ||||
|  | ||||
|  | ||||
| def ipv6_prefixlength(address: ipaddress.IPv6Address) -> int: | ||||
|     prefix_length = 0 | ||||
|     for i in range(address.max_prefixlen): | ||||
|         if int(address) >> i & 1: | ||||
|             prefix_length = prefix_length + 1 | ||||
|     return prefix_length | ||||
							
								
								
									
										135
									
								
								copyparty/stolen/ifaddr/_win32.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										135
									
								
								copyparty/stolen/ifaddr/_win32.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,135 @@ | ||||
| # coding: utf-8 | ||||
| from __future__ import print_function, unicode_literals | ||||
|  | ||||
| import ctypes | ||||
| from ctypes import wintypes | ||||
|  | ||||
| if True:  # pylint: disable=using-constant-test | ||||
|     from typing import Iterable, List | ||||
|  | ||||
| from . import _shared as shared | ||||
|  | ||||
| NO_ERROR = 0 | ||||
| ERROR_BUFFER_OVERFLOW = 111 | ||||
| MAX_ADAPTER_NAME_LENGTH = 256 | ||||
| MAX_ADAPTER_DESCRIPTION_LENGTH = 128 | ||||
| MAX_ADAPTER_ADDRESS_LENGTH = 8 | ||||
| AF_UNSPEC = 0 | ||||
|  | ||||
|  | ||||
| class SOCKET_ADDRESS(ctypes.Structure): | ||||
|     _fields_ = [ | ||||
|         ("lpSockaddr", ctypes.POINTER(shared.sockaddr)), | ||||
|         ("iSockaddrLength", wintypes.INT), | ||||
|     ] | ||||
|  | ||||
|  | ||||
| class IP_ADAPTER_UNICAST_ADDRESS(ctypes.Structure): | ||||
|     pass | ||||
|  | ||||
|  | ||||
| IP_ADAPTER_UNICAST_ADDRESS._fields_ = [ | ||||
|     ("Length", wintypes.ULONG), | ||||
|     ("Flags", wintypes.DWORD), | ||||
|     ("Next", ctypes.POINTER(IP_ADAPTER_UNICAST_ADDRESS)), | ||||
|     ("Address", SOCKET_ADDRESS), | ||||
|     ("PrefixOrigin", ctypes.c_uint), | ||||
|     ("SuffixOrigin", ctypes.c_uint), | ||||
|     ("DadState", ctypes.c_uint), | ||||
|     ("ValidLifetime", wintypes.ULONG), | ||||
|     ("PreferredLifetime", wintypes.ULONG), | ||||
|     ("LeaseLifetime", wintypes.ULONG), | ||||
|     ("OnLinkPrefixLength", ctypes.c_uint8), | ||||
| ] | ||||
|  | ||||
|  | ||||
| class IP_ADAPTER_ADDRESSES(ctypes.Structure): | ||||
|     pass | ||||
|  | ||||
|  | ||||
| IP_ADAPTER_ADDRESSES._fields_ = [ | ||||
|     ("Length", wintypes.ULONG), | ||||
|     ("IfIndex", wintypes.DWORD), | ||||
|     ("Next", ctypes.POINTER(IP_ADAPTER_ADDRESSES)), | ||||
|     ("AdapterName", ctypes.c_char_p), | ||||
|     ("FirstUnicastAddress", ctypes.POINTER(IP_ADAPTER_UNICAST_ADDRESS)), | ||||
|     ("FirstAnycastAddress", ctypes.c_void_p), | ||||
|     ("FirstMulticastAddress", ctypes.c_void_p), | ||||
|     ("FirstDnsServerAddress", ctypes.c_void_p), | ||||
|     ("DnsSuffix", ctypes.c_wchar_p), | ||||
|     ("Description", ctypes.c_wchar_p), | ||||
|     ("FriendlyName", ctypes.c_wchar_p), | ||||
| ] | ||||
|  | ||||
|  | ||||
| iphlpapi = ctypes.windll.LoadLibrary("Iphlpapi")  # type: ignore | ||||
|  | ||||
|  | ||||
| def enumerate_interfaces_of_adapter( | ||||
|     nice_name: str, address: IP_ADAPTER_UNICAST_ADDRESS | ||||
| ) -> Iterable[shared.IP]: | ||||
|  | ||||
|     # Iterate through linked list and fill list | ||||
|     addresses = []  # type: List[IP_ADAPTER_UNICAST_ADDRESS] | ||||
|     while True: | ||||
|         addresses.append(address) | ||||
|         if not address.Next: | ||||
|             break | ||||
|         address = address.Next[0] | ||||
|  | ||||
|     for address in addresses: | ||||
|         ip = shared.sockaddr_to_ip(address.Address.lpSockaddr) | ||||
|         if ip is None: | ||||
|             t = "sockaddr_to_ip({}) returned None" | ||||
|             raise Exception(t.format(address.Address.lpSockaddr)) | ||||
|  | ||||
|         network_prefix = address.OnLinkPrefixLength | ||||
|         yield shared.IP(ip, network_prefix, nice_name) | ||||
|  | ||||
|  | ||||
| def get_adapters(include_unconfigured: bool = False) -> Iterable[shared.Adapter]: | ||||
|  | ||||
|     # Call GetAdaptersAddresses() with error and buffer size handling | ||||
|  | ||||
|     addressbuffersize = wintypes.ULONG(15 * 1024) | ||||
|     retval = ERROR_BUFFER_OVERFLOW | ||||
|     while retval == ERROR_BUFFER_OVERFLOW: | ||||
|         addressbuffer = ctypes.create_string_buffer(addressbuffersize.value) | ||||
|         retval = iphlpapi.GetAdaptersAddresses( | ||||
|             wintypes.ULONG(AF_UNSPEC), | ||||
|             wintypes.ULONG(0), | ||||
|             None, | ||||
|             ctypes.byref(addressbuffer), | ||||
|             ctypes.byref(addressbuffersize), | ||||
|         ) | ||||
|     if retval != NO_ERROR: | ||||
|         raise ctypes.WinError()  # type: ignore | ||||
|  | ||||
|     # Iterate through adapters fill array | ||||
|     address_infos = []  # type: List[IP_ADAPTER_ADDRESSES] | ||||
|     address_info = IP_ADAPTER_ADDRESSES.from_buffer(addressbuffer) | ||||
|     while True: | ||||
|         address_infos.append(address_info) | ||||
|         if not address_info.Next: | ||||
|             break | ||||
|         address_info = address_info.Next[0] | ||||
|  | ||||
|     # Iterate through unicast addresses | ||||
|     result = []  # type: List[shared.Adapter] | ||||
|     for adapter_info in address_infos: | ||||
|  | ||||
|         # We don't expect non-ascii characters here, so encoding shouldn't matter | ||||
|         name = adapter_info.AdapterName.decode() | ||||
|         nice_name = adapter_info.Description | ||||
|         index = adapter_info.IfIndex | ||||
|  | ||||
|         if adapter_info.FirstUnicastAddress: | ||||
|             ips = enumerate_interfaces_of_adapter( | ||||
|                 adapter_info.FriendlyName, adapter_info.FirstUnicastAddress[0] | ||||
|             ) | ||||
|             ips = list(ips) | ||||
|             result.append(shared.Adapter(name, nice_name, ips, index=index)) | ||||
|         elif include_unconfigured: | ||||
|             result.append(shared.Adapter(name, nice_name, [], index=index)) | ||||
|  | ||||
|     return result | ||||
							
								
								
									
										591
									
								
								copyparty/stolen/qrcodegen.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										591
									
								
								copyparty/stolen/qrcodegen.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,591 @@ | ||||
| # coding: utf-8 | ||||
|  | ||||
| # modified copy of Project Nayuki's qrcodegen (MIT-licensed); | ||||
| # https://github.com/nayuki/QR-Code-generator/blob/daa3114/python/qrcodegen.py | ||||
| # the original ^ is extremely well commented so refer to that for explanations | ||||
|  | ||||
| # hacks: binary-only, auto-ecc, render, py2-compat | ||||
|  | ||||
| from __future__ import print_function, unicode_literals | ||||
|  | ||||
| import collections | ||||
| import itertools | ||||
|  | ||||
| if True:  # pylint: disable=using-constant-test | ||||
|     from collections.abc import Sequence | ||||
|  | ||||
|     from typing import Callable, List, Optional, Tuple, Union | ||||
|  | ||||
|  | ||||
| def num_char_count_bits(ver: int) -> int: | ||||
|     return 16 if (ver + 7) // 17 else 8 | ||||
|  | ||||
|  | ||||
| class Ecc(object): | ||||
|     ordinal: int | ||||
|     formatbits: int | ||||
|  | ||||
|     def __init__(self, i: int, fb: int) -> None: | ||||
|         self.ordinal = i | ||||
|         self.formatbits = fb | ||||
|  | ||||
|     LOW: "Ecc" | ||||
|     MEDIUM: "Ecc" | ||||
|     QUARTILE: "Ecc" | ||||
|     HIGH: "Ecc" | ||||
|  | ||||
|  | ||||
| Ecc.LOW = Ecc(0, 1) | ||||
| Ecc.MEDIUM = Ecc(1, 0) | ||||
| Ecc.QUARTILE = Ecc(2, 3) | ||||
| Ecc.HIGH = Ecc(3, 2) | ||||
|  | ||||
|  | ||||
| class QrSegment(object): | ||||
|     @staticmethod | ||||
|     def make_seg(data: Union[bytes, Sequence[int]]) -> "QrSegment": | ||||
|         bb = _BitBuffer() | ||||
|         for b in data: | ||||
|             bb.append_bits(b, 8) | ||||
|         return QrSegment(len(data), bb) | ||||
|  | ||||
|     numchars: int  # num bytes, not the same as the data's bit length | ||||
|     bitdata: List[int]  # The data bits of this segment | ||||
|  | ||||
|     def __init__(self, numch: int, bitdata: Sequence[int]) -> None: | ||||
|         if numch < 0: | ||||
|             raise ValueError() | ||||
|         self.numchars = numch | ||||
|         self.bitdata = list(bitdata) | ||||
|  | ||||
|     @staticmethod | ||||
|     def get_total_bits(segs: Sequence["QrSegment"], ver: int) -> Optional[int]: | ||||
|         result = 0 | ||||
|         for seg in segs: | ||||
|             ccbits: int = num_char_count_bits(ver) | ||||
|             if seg.numchars >= (1 << ccbits): | ||||
|                 return None  # segment length doesn't fit the field's bit width | ||||
|             result += 4 + ccbits + len(seg.bitdata) | ||||
|         return result | ||||
|  | ||||
|  | ||||
| class QrCode(object): | ||||
|     @staticmethod | ||||
|     def encode_binary(data: Union[bytes, Sequence[int]]) -> "QrCode": | ||||
|         return QrCode.encode_segments([QrSegment.make_seg(data)]) | ||||
|  | ||||
|     @staticmethod | ||||
|     def encode_segments( | ||||
|         segs: Sequence[QrSegment], | ||||
|         ecl: Ecc = Ecc.LOW, | ||||
|         minver: int = 2, | ||||
|         maxver: int = 40, | ||||
|         mask: int = -1, | ||||
|     ) -> "QrCode": | ||||
|         for ver in range(minver, maxver + 1): | ||||
|             datacapacitybits: int = QrCode._get_num_data_codewords(ver, ecl) * 8 | ||||
|             datausedbits: Optional[int] = QrSegment.get_total_bits(segs, ver) | ||||
|             if (datausedbits is not None) and (datausedbits <= datacapacitybits): | ||||
|                 break | ||||
|  | ||||
|         assert datausedbits | ||||
|  | ||||
|         for newecl in ( | ||||
|             Ecc.MEDIUM, | ||||
|             Ecc.QUARTILE, | ||||
|             Ecc.HIGH, | ||||
|         ): | ||||
|             if datausedbits <= QrCode._get_num_data_codewords(ver, newecl) * 8: | ||||
|                 ecl = newecl | ||||
|  | ||||
|         # Concatenate all segments to create the data bit string | ||||
|         bb = _BitBuffer() | ||||
|         for seg in segs: | ||||
|             bb.append_bits(4, 4) | ||||
|             bb.append_bits(seg.numchars, num_char_count_bits(ver)) | ||||
|             bb.extend(seg.bitdata) | ||||
|         assert len(bb) == datausedbits | ||||
|  | ||||
|         # Add terminator and pad up to a byte if applicable | ||||
|         datacapacitybits = QrCode._get_num_data_codewords(ver, ecl) * 8 | ||||
|         assert len(bb) <= datacapacitybits | ||||
|         bb.append_bits(0, min(4, datacapacitybits - len(bb))) | ||||
|         bb.append_bits(0, -len(bb) % 8) | ||||
|         assert len(bb) % 8 == 0 | ||||
|  | ||||
|         # Pad with alternating bytes until data capacity is reached | ||||
|         for padbyte in itertools.cycle((0xEC, 0x11)): | ||||
|             if len(bb) >= datacapacitybits: | ||||
|                 break | ||||
|             bb.append_bits(padbyte, 8) | ||||
|  | ||||
|         # Pack bits into bytes in big endian | ||||
|         datacodewords = bytearray([0] * (len(bb) // 8)) | ||||
|         for (i, bit) in enumerate(bb): | ||||
|             datacodewords[i >> 3] |= bit << (7 - (i & 7)) | ||||
|  | ||||
|         return QrCode(ver, ecl, datacodewords, mask) | ||||
|  | ||||
|     ver: int | ||||
|     size: int  # w/h; 21..177 (ver * 4 + 17) | ||||
|     ecclvl: Ecc | ||||
|     mask: int  # 0..7 | ||||
|     modules: List[List[bool]] | ||||
|     unmaskable: List[List[bool]] | ||||
|  | ||||
|     def __init__( | ||||
|         self, | ||||
|         ver: int, | ||||
|         ecclvl: Ecc, | ||||
|         datacodewords: Union[bytes, Sequence[int]], | ||||
|         msk: int, | ||||
|     ) -> None: | ||||
|         self.ver = ver | ||||
|         self.size = ver * 4 + 17 | ||||
|         self.ecclvl = ecclvl | ||||
|  | ||||
|         self.modules = [[False] * self.size for _ in range(self.size)] | ||||
|         self.unmaskable = [[False] * self.size for _ in range(self.size)] | ||||
|  | ||||
|         # Compute ECC, draw modules | ||||
|         self._draw_function_patterns() | ||||
|         allcodewords: bytes = self._add_ecc_and_interleave(bytearray(datacodewords)) | ||||
|         self._draw_codewords(allcodewords) | ||||
|  | ||||
|         if msk == -1:  # automask | ||||
|             minpenalty: int = 1 << 32 | ||||
|             for i in range(8): | ||||
|                 self._apply_mask(i) | ||||
|                 self._draw_format_bits(i) | ||||
|                 penalty = self._get_penalty_score() | ||||
|                 if penalty < minpenalty: | ||||
|                     msk = i | ||||
|                     minpenalty = penalty | ||||
|                 self._apply_mask(i)  # xor/undo | ||||
|  | ||||
|         assert 0 <= msk <= 7 | ||||
|         self.mask = msk | ||||
|         self._apply_mask(msk)  # Apply the final choice of mask | ||||
|         self._draw_format_bits(msk)  # Overwrite old format bits | ||||
|  | ||||
|     def render(self, zoom=1, pad=4) -> str: | ||||
|         tab = self.modules | ||||
|         sz = self.size | ||||
|         if sz % 2 and zoom == 1: | ||||
|             tab.append([False] * sz) | ||||
|  | ||||
|         tab = [[False] * sz] * pad + tab + [[False] * sz] * pad | ||||
|         tab = [[False] * pad + x + [False] * pad for x in tab] | ||||
|  | ||||
|         rows: list[str] = [] | ||||
|         if zoom == 1: | ||||
|             for y in range(0, len(tab), 2): | ||||
|                 row = "" | ||||
|                 for x in range(len(tab[y])): | ||||
|                     v = 2 if tab[y][x] else 0 | ||||
|                     v += 1 if tab[y + 1][x] else 0 | ||||
|                     row += " ▄▀█"[v] | ||||
|                 rows.append(row) | ||||
|         else: | ||||
|             for tr in tab: | ||||
|                 row = "" | ||||
|                 for zb in tr: | ||||
|                     row += " █"[int(zb)] * 2 | ||||
|                 rows.append(row) | ||||
|  | ||||
|         return "\n".join(rows) | ||||
|  | ||||
|     def _draw_function_patterns(self) -> None: | ||||
|         # Draw horizontal and vertical timing patterns | ||||
|         for i in range(self.size): | ||||
|             self._set_function_module(6, i, i % 2 == 0) | ||||
|             self._set_function_module(i, 6, i % 2 == 0) | ||||
|  | ||||
|         # Draw 3 finder patterns (all corners except bottom right; overwrites some timing modules) | ||||
|         self._draw_finder_pattern(3, 3) | ||||
|         self._draw_finder_pattern(self.size - 4, 3) | ||||
|         self._draw_finder_pattern(3, self.size - 4) | ||||
|  | ||||
|         # Draw numerous alignment patterns | ||||
|         alignpatpos: List[int] = self._get_alignment_pattern_positions() | ||||
|         numalign: int = len(alignpatpos) | ||||
|         skips: Sequence[Tuple[int, int]] = ( | ||||
|             (0, 0), | ||||
|             (0, numalign - 1), | ||||
|             (numalign - 1, 0), | ||||
|         ) | ||||
|         for i in range(numalign): | ||||
|             for j in range(numalign): | ||||
|                 if (i, j) not in skips:  # avoid finder corners | ||||
|                     self._draw_alignment_pattern(alignpatpos[i], alignpatpos[j]) | ||||
|  | ||||
|         # draw config data with dummy mask value; ctor overwrites it | ||||
|         self._draw_format_bits(0) | ||||
|         self._draw_ver() | ||||
|  | ||||
|     def _draw_format_bits(self, mask: int) -> None: | ||||
|         # Calculate error correction code and pack bits; ecclvl is uint2, mask is uint3 | ||||
|         data: int = self.ecclvl.formatbits << 3 | mask | ||||
|         rem: int = data | ||||
|         for _ in range(10): | ||||
|             rem = (rem << 1) ^ ((rem >> 9) * 0x537) | ||||
|         bits: int = (data << 10 | rem) ^ 0x5412  # uint15 | ||||
|         assert bits >> 15 == 0 | ||||
|  | ||||
|         # first copy | ||||
|         for i in range(0, 6): | ||||
|             self._set_function_module(8, i, _get_bit(bits, i)) | ||||
|         self._set_function_module(8, 7, _get_bit(bits, 6)) | ||||
|         self._set_function_module(8, 8, _get_bit(bits, 7)) | ||||
|         self._set_function_module(7, 8, _get_bit(bits, 8)) | ||||
|         for i in range(9, 15): | ||||
|             self._set_function_module(14 - i, 8, _get_bit(bits, i)) | ||||
|  | ||||
|         # second copy | ||||
|         for i in range(0, 8): | ||||
|             self._set_function_module(self.size - 1 - i, 8, _get_bit(bits, i)) | ||||
|         for i in range(8, 15): | ||||
|             self._set_function_module(8, self.size - 15 + i, _get_bit(bits, i)) | ||||
|         self._set_function_module(8, self.size - 8, True)  # Always dark | ||||
|  | ||||
|     def _draw_ver(self) -> None: | ||||
|         if self.ver < 7: | ||||
|             return | ||||
|  | ||||
|         # Calculate error correction code and pack bits | ||||
|         rem: int = self.ver  # ver is uint6, 7..40 | ||||
|         for _ in range(12): | ||||
|             rem = (rem << 1) ^ ((rem >> 11) * 0x1F25) | ||||
|         bits: int = self.ver << 12 | rem  # uint18 | ||||
|         assert bits >> 18 == 0 | ||||
|  | ||||
|         # Draw two copies | ||||
|         for i in range(18): | ||||
|             bit: bool = _get_bit(bits, i) | ||||
|             a: int = self.size - 11 + i % 3 | ||||
|             b: int = i // 3 | ||||
|             self._set_function_module(a, b, bit) | ||||
|             self._set_function_module(b, a, bit) | ||||
|  | ||||
|     def _draw_finder_pattern(self, x: int, y: int) -> None: | ||||
|         for dy in range(-4, 5): | ||||
|             for dx in range(-4, 5): | ||||
|                 xx, yy = x + dx, y + dy | ||||
|                 if (0 <= xx < self.size) and (0 <= yy < self.size): | ||||
|                     # Chebyshev/infinity norm | ||||
|                     self._set_function_module( | ||||
|                         xx, yy, max(abs(dx), abs(dy)) not in (2, 4) | ||||
|                     ) | ||||
|  | ||||
|     def _draw_alignment_pattern(self, x: int, y: int) -> None: | ||||
|         for dy in range(-2, 3): | ||||
|             for dx in range(-2, 3): | ||||
|                 self._set_function_module(x + dx, y + dy, max(abs(dx), abs(dy)) != 1) | ||||
|  | ||||
|     def _set_function_module(self, x: int, y: int, isdark: bool) -> None: | ||||
|         self.modules[y][x] = isdark | ||||
|         self.unmaskable[y][x] = True | ||||
|  | ||||
|     def _add_ecc_and_interleave(self, data: bytearray) -> bytes: | ||||
|         ver: int = self.ver | ||||
|         assert len(data) == QrCode._get_num_data_codewords(ver, self.ecclvl) | ||||
|  | ||||
|         # Calculate parameter numbers | ||||
|         numblocks: int = QrCode._NUM_ERROR_CORRECTION_BLOCKS[self.ecclvl.ordinal][ver] | ||||
|         blockecclen: int = QrCode._ECC_CODEWORDS_PER_BLOCK[self.ecclvl.ordinal][ver] | ||||
|         rawcodewords: int = QrCode._get_num_raw_data_modules(ver) // 8 | ||||
|         numshortblocks: int = numblocks - rawcodewords % numblocks | ||||
|         shortblocklen: int = rawcodewords // numblocks | ||||
|  | ||||
|         # Split data into blocks and append ECC to each block | ||||
|         blocks: List[bytes] = [] | ||||
|         rsdiv: bytes = QrCode._reed_solomon_compute_divisor(blockecclen) | ||||
|         k: int = 0 | ||||
|         for i in range(numblocks): | ||||
|             dat: bytearray = data[ | ||||
|                 k : k + shortblocklen - blockecclen + (0 if i < numshortblocks else 1) | ||||
|             ] | ||||
|             k += len(dat) | ||||
|             ecc: bytes = QrCode._reed_solomon_compute_remainder(dat, rsdiv) | ||||
|             if i < numshortblocks: | ||||
|                 dat.append(0) | ||||
|             blocks.append(dat + ecc) | ||||
|         assert k == len(data) | ||||
|  | ||||
|         # Interleave (not concatenate) the bytes from every block into a single sequence | ||||
|         result = bytearray() | ||||
|         for i in range(len(blocks[0])): | ||||
|             for (j, blk) in enumerate(blocks): | ||||
|                 # Skip the padding byte in short blocks | ||||
|                 if (i != shortblocklen - blockecclen) or (j >= numshortblocks): | ||||
|                     result.append(blk[i]) | ||||
|         assert len(result) == rawcodewords | ||||
|         return result | ||||
|  | ||||
|     def _draw_codewords(self, data: bytes) -> None: | ||||
|         assert len(data) == QrCode._get_num_raw_data_modules(self.ver) // 8 | ||||
|  | ||||
|         i: int = 0  # Bit index into the data | ||||
|         for right in range(self.size - 1, 0, -2): | ||||
|             # idx of right column in each column pair | ||||
|             if right <= 6: | ||||
|                 right -= 1 | ||||
|             for vert in range(self.size):  # Vertical counter | ||||
|                 for j in range(2): | ||||
|                     x: int = right - j | ||||
|                     upward: bool = (right + 1) & 2 == 0 | ||||
|                     y: int = (self.size - 1 - vert) if upward else vert | ||||
|                     if (not self.unmaskable[y][x]) and (i < len(data) * 8): | ||||
|                         self.modules[y][x] = _get_bit(data[i >> 3], 7 - (i & 7)) | ||||
|                         i += 1 | ||||
|                     # any remainder bits (0..7) were set 0/false/light by ctor | ||||
|  | ||||
|         assert i == len(data) * 8 | ||||
|  | ||||
|     def _apply_mask(self, mask: int) -> None: | ||||
|         masker: Callable[[int, int], int] = QrCode._MASK_PATTERNS[mask] | ||||
|         for y in range(self.size): | ||||
|             for x in range(self.size): | ||||
|                 self.modules[y][x] ^= (masker(x, y) == 0) and ( | ||||
|                     not self.unmaskable[y][x] | ||||
|                 ) | ||||
|  | ||||
|     def _get_penalty_score(self) -> int: | ||||
|         result: int = 0 | ||||
|         size: int = self.size | ||||
|         modules: List[List[bool]] = self.modules | ||||
|  | ||||
|         # Adjacent modules in row having same color, and finder-like patterns | ||||
|         for y in range(size): | ||||
|             runcolor: bool = False | ||||
|             runx: int = 0 | ||||
|             runhistory = collections.deque([0] * 7, 7) | ||||
|             for x in range(size): | ||||
|                 if modules[y][x] == runcolor: | ||||
|                     runx += 1 | ||||
|                     if runx == 5: | ||||
|                         result += QrCode._PENALTY_N1 | ||||
|                     elif runx > 5: | ||||
|                         result += 1 | ||||
|                 else: | ||||
|                     self._finder_penalty_add_history(runx, runhistory) | ||||
|                     if not runcolor: | ||||
|                         result += ( | ||||
|                             self._finder_penalty_count_patterns(runhistory) | ||||
|                             * QrCode._PENALTY_N3 | ||||
|                         ) | ||||
|                     runcolor = modules[y][x] | ||||
|                     runx = 1 | ||||
|             result += ( | ||||
|                 self._finder_penalty_terminate_and_count(runcolor, runx, runhistory) | ||||
|                 * QrCode._PENALTY_N3 | ||||
|             ) | ||||
|  | ||||
|         # Adjacent modules in column having same color, and finder-like patterns | ||||
|         for x in range(size): | ||||
|             runcolor = False | ||||
|             runy = 0 | ||||
|             runhistory = collections.deque([0] * 7, 7) | ||||
|             for y in range(size): | ||||
|                 if modules[y][x] == runcolor: | ||||
|                     runy += 1 | ||||
|                     if runy == 5: | ||||
|                         result += QrCode._PENALTY_N1 | ||||
|                     elif runy > 5: | ||||
|                         result += 1 | ||||
|                 else: | ||||
|                     self._finder_penalty_add_history(runy, runhistory) | ||||
|                     if not runcolor: | ||||
|                         result += ( | ||||
|                             self._finder_penalty_count_patterns(runhistory) | ||||
|                             * QrCode._PENALTY_N3 | ||||
|                         ) | ||||
|                     runcolor = modules[y][x] | ||||
|                     runy = 1 | ||||
|             result += ( | ||||
|                 self._finder_penalty_terminate_and_count(runcolor, runy, runhistory) | ||||
|                 * QrCode._PENALTY_N3 | ||||
|             ) | ||||
|  | ||||
|         # 2*2 blocks of modules having same color | ||||
|         for y in range(size - 1): | ||||
|             for x in range(size - 1): | ||||
|                 if ( | ||||
|                     modules[y][x] | ||||
|                     == modules[y][x + 1] | ||||
|                     == modules[y + 1][x] | ||||
|                     == modules[y + 1][x + 1] | ||||
|                 ): | ||||
|                     result += QrCode._PENALTY_N2 | ||||
|  | ||||
|         # Balance of dark and light modules | ||||
|         dark: int = sum((1 if cell else 0) for row in modules for cell in row) | ||||
|         total: int = size ** 2  # Note that size is odd, so dark/total != 1/2 | ||||
|  | ||||
|         # Compute the smallest integer k >= 0 such that (45-5k)% <= dark/total <= (55+5k)% | ||||
|         k: int = (abs(dark * 20 - total * 10) + total - 1) // total - 1 | ||||
|         assert 0 <= k <= 9 | ||||
|         result += k * QrCode._PENALTY_N4 | ||||
|         assert 0 <= result <= 2568888 | ||||
|         # ^ Non-tight upper bound based on default values of PENALTY_N1, ..., N4 | ||||
|  | ||||
|         return result | ||||
|  | ||||
|     def _get_alignment_pattern_positions(self) -> List[int]: | ||||
|         ver: int = self.ver | ||||
|         if ver == 1: | ||||
|             return [] | ||||
|  | ||||
|         numalign: int = ver // 7 + 2 | ||||
|         step: int = ( | ||||
|             26 | ||||
|             if (ver == 32) | ||||
|             else (ver * 4 + numalign * 2 + 1) // (numalign * 2 - 2) * 2 | ||||
|         ) | ||||
|         result: List[int] = [ | ||||
|             (self.size - 7 - i * step) for i in range(numalign - 1) | ||||
|         ] + [6] | ||||
|         return list(reversed(result)) | ||||
|  | ||||
|     @staticmethod | ||||
|     def _get_num_raw_data_modules(ver: int) -> int: | ||||
|         result: int = (16 * ver + 128) * ver + 64 | ||||
|         if ver >= 2: | ||||
|             numalign: int = ver // 7 + 2 | ||||
|             result -= (25 * numalign - 10) * numalign - 55 | ||||
|             if ver >= 7: | ||||
|                 result -= 36 | ||||
|         assert 208 <= result <= 29648 | ||||
|         return result | ||||
|  | ||||
|     @staticmethod | ||||
|     def _get_num_data_codewords(ver: int, ecl: Ecc) -> int: | ||||
|         return ( | ||||
|             QrCode._get_num_raw_data_modules(ver) // 8 | ||||
|             - QrCode._ECC_CODEWORDS_PER_BLOCK[ecl.ordinal][ver] | ||||
|             * QrCode._NUM_ERROR_CORRECTION_BLOCKS[ecl.ordinal][ver] | ||||
|         ) | ||||
|  | ||||
|     @staticmethod | ||||
|     def _reed_solomon_compute_divisor(degree: int) -> bytes: | ||||
|         if not (1 <= degree <= 255): | ||||
|             raise ValueError("Degree out of range") | ||||
|  | ||||
|         # Polynomial coefficients are stored from highest to lowest power, excluding the leading term which is always 1. | ||||
|         # For example the polynomial x^3 + 255x^2 + 8x + 93 is stored as the uint8 array [255, 8, 93]. | ||||
|         result = bytearray([0] * (degree - 1) + [1])  # start with monomial x^0 | ||||
|  | ||||
|         # Compute the product polynomial (x - r^0) * (x - r^1) * (x - r^2) * ... * (x - r^{degree-1}), | ||||
|         # and drop the highest monomial term which is always 1x^degree. | ||||
|         # Note that r = 0x02, which is a generator element of this field GF(2^8/0x11D). | ||||
|         root: int = 1 | ||||
|         for _ in range(degree): | ||||
|             # Multiply the current product by (x - r^i) | ||||
|             for j in range(degree): | ||||
|                 result[j] = QrCode._reed_solomon_multiply(result[j], root) | ||||
|                 if j + 1 < degree: | ||||
|                     result[j] ^= result[j + 1] | ||||
|             root = QrCode._reed_solomon_multiply(root, 0x02) | ||||
|  | ||||
|         return result | ||||
|  | ||||
|     @staticmethod | ||||
|     def _reed_solomon_compute_remainder(data: bytes, divisor: bytes) -> bytes: | ||||
|         result = bytearray([0] * len(divisor)) | ||||
|         for b in data:  # Polynomial division | ||||
|             factor: int = b ^ result.pop(0) | ||||
|             result.append(0) | ||||
|             for (i, coef) in enumerate(divisor): | ||||
|                 result[i] ^= QrCode._reed_solomon_multiply(coef, factor) | ||||
|  | ||||
|         return result | ||||
|  | ||||
|     @staticmethod | ||||
|     def _reed_solomon_multiply(x: int, y: int) -> int: | ||||
|         if (x >> 8 != 0) or (y >> 8 != 0): | ||||
|             raise ValueError("Byte out of range") | ||||
|         z: int = 0  # Russian peasant multiplication | ||||
|         for i in reversed(range(8)): | ||||
|             z = (z << 1) ^ ((z >> 7) * 0x11D) | ||||
|             z ^= ((y >> i) & 1) * x | ||||
|         assert z >> 8 == 0 | ||||
|         return z | ||||
|  | ||||
|     def _finder_penalty_count_patterns(self, runhistory: collections.deque[int]) -> int: | ||||
|         n: int = runhistory[1] | ||||
|         assert n <= self.size * 3 | ||||
|         core: bool = ( | ||||
|             n > 0 | ||||
|             and (runhistory[2] == runhistory[4] == runhistory[5] == n) | ||||
|             and runhistory[3] == n * 3 | ||||
|         ) | ||||
|         return ( | ||||
|             1 if (core and runhistory[0] >= n * 4 and runhistory[6] >= n) else 0 | ||||
|         ) + (1 if (core and runhistory[6] >= n * 4 and runhistory[0] >= n) else 0) | ||||
|  | ||||
|     def _finder_penalty_terminate_and_count( | ||||
|         self, | ||||
|         currentruncolor: bool, | ||||
|         currentrunlength: int, | ||||
|         runhistory: collections.deque[int], | ||||
|     ) -> int: | ||||
|         if currentruncolor:  # Terminate dark run | ||||
|             self._finder_penalty_add_history(currentrunlength, runhistory) | ||||
|             currentrunlength = 0 | ||||
|         currentrunlength += self.size  # Add light border to final run | ||||
|         self._finder_penalty_add_history(currentrunlength, runhistory) | ||||
|         return self._finder_penalty_count_patterns(runhistory) | ||||
|  | ||||
|     def _finder_penalty_add_history( | ||||
|         self, currentrunlength: int, runhistory: collections.deque[int] | ||||
|     ) -> None: | ||||
|         if runhistory[0] == 0: | ||||
|             currentrunlength += self.size  # Add light border to initial run | ||||
|  | ||||
|         runhistory.appendleft(currentrunlength) | ||||
|  | ||||
|     _PENALTY_N1: int = 3 | ||||
|     _PENALTY_N2: int = 3 | ||||
|     _PENALTY_N3: int = 40 | ||||
|     _PENALTY_N4: int = 10 | ||||
|  | ||||
|     # fmt: off | ||||
|     _ECC_CODEWORDS_PER_BLOCK: Sequence[Sequence[int]] = ( | ||||
|         (-1,  7, 10, 15, 20, 26, 18, 20, 24, 30, 18, 20, 24, 26, 30, 22, 24, 28, 30, 28, 28, 28, 28, 30, 30, 26, 28, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30),  # noqa: E241  # L | ||||
|         (-1, 10, 16, 26, 18, 24, 16, 18, 22, 22, 26, 30, 22, 22, 24, 24, 28, 28, 26, 26, 26, 26, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28, 28),  # noqa: E241  # M | ||||
|         (-1, 13, 22, 18, 26, 18, 24, 18, 22, 20, 24, 28, 26, 24, 20, 30, 24, 28, 28, 26, 30, 28, 30, 30, 30, 30, 28, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30),  # noqa: E241  # Q | ||||
|         (-1, 17, 28, 22, 16, 22, 28, 26, 26, 24, 28, 24, 28, 22, 24, 24, 30, 28, 28, 26, 28, 30, 24, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30, 30))  # noqa: E241  # H | ||||
|  | ||||
|     _NUM_ERROR_CORRECTION_BLOCKS: Sequence[Sequence[int]] = ( | ||||
|         (-1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 4,  4,  4,  4,  4,  6,  6,  6,  6,  7,  8,  8,  9,  9, 10, 12, 12, 12, 13, 14, 15, 16, 17, 18, 19, 19, 20, 21, 22, 24, 25),  # noqa: E241  # L | ||||
|         (-1, 1, 1, 1, 2, 2, 4, 4, 4, 5, 5,  5,  8,  9,  9, 10, 10, 11, 13, 14, 16, 17, 17, 18, 20, 21, 23, 25, 26, 28, 29, 31, 33, 35, 37, 38, 40, 43, 45, 47, 49),  # noqa: E241  # M | ||||
|         (-1, 1, 1, 2, 2, 4, 4, 6, 6, 8, 8,  8, 10, 12, 16, 12, 17, 16, 18, 21, 20, 23, 23, 25, 27, 29, 34, 34, 35, 38, 40, 43, 45, 48, 51, 53, 56, 59, 62, 65, 68),  # noqa: E241  # Q | ||||
|         (-1, 1, 1, 2, 4, 4, 4, 5, 6, 8, 8, 11, 11, 16, 16, 18, 16, 19, 21, 25, 25, 25, 34, 30, 32, 35, 37, 40, 42, 45, 48, 51, 54, 57, 60, 63, 66, 70, 74, 77, 81))  # noqa: E241  # H | ||||
|     # fmt: on | ||||
|  | ||||
|     _MASK_PATTERNS: Sequence[Callable[[int, int], int]] = ( | ||||
|         (lambda x, y: (x + y) % 2), | ||||
|         (lambda x, y: y % 2), | ||||
|         (lambda x, y: x % 3), | ||||
|         (lambda x, y: (x + y) % 3), | ||||
|         (lambda x, y: (x // 3 + y // 2) % 2), | ||||
|         (lambda x, y: x * y % 2 + x * y % 3), | ||||
|         (lambda x, y: (x * y % 2 + x * y % 3) % 2), | ||||
|         (lambda x, y: ((x + y) % 2 + x * y % 3) % 2), | ||||
|     ) | ||||
|  | ||||
|  | ||||
| class _BitBuffer(list):  # type: ignore | ||||
|     def append_bits(self, val: int, n: int) -> None: | ||||
|         if (n < 0) or (val >> n != 0): | ||||
|             raise ValueError("Value out of range") | ||||
|  | ||||
|         self.extend(((val >> i) & 1) for i in reversed(range(n))) | ||||
|  | ||||
|  | ||||
| def _get_bit(x: int, i: int) -> bool: | ||||
|     return (x >> i) & 1 != 0 | ||||
|  | ||||
|  | ||||
| class DataTooLongError(ValueError): | ||||
|     pass | ||||
| @@ -16,28 +16,12 @@ import codecs | ||||
| import platform | ||||
| import sys | ||||
|  | ||||
| PY3 = sys.version_info[0] > 2 | ||||
| PY3 = sys.version_info > (3,) | ||||
| WINDOWS = platform.system() == "Windows" | ||||
| FS_ERRORS = "surrogateescape" | ||||
|  | ||||
| try: | ||||
| if True:  # pylint: disable=using-constant-test | ||||
|     from typing import Any | ||||
| except: | ||||
|     pass | ||||
|  | ||||
|  | ||||
| def u(text: Any) -> str: | ||||
|     if PY3: | ||||
|         return text | ||||
|     else: | ||||
|         return text.decode("unicode_escape") | ||||
|  | ||||
|  | ||||
| def b(data: Any) -> bytes: | ||||
|     if PY3: | ||||
|         return data.encode("latin1") | ||||
|     else: | ||||
|         return data | ||||
|  | ||||
|  | ||||
| if PY3: | ||||
| @@ -171,9 +155,6 @@ def decodefilename(fn: bytes) -> str: | ||||
|  | ||||
|  | ||||
| FS_ENCODING = sys.getfilesystemencoding() | ||||
| # FS_ENCODING = "ascii"; fn = b("[abc\xff]"); encoded = u("[abc\udcff]") | ||||
| # FS_ENCODING = 'cp932'; fn = b('[abc\x81\x00]'); encoded = u('[abc\udc81\x00]') | ||||
| # FS_ENCODING = 'UTF-8'; fn = b('[abc\xff]'); encoded = u('[abc\udcff]') | ||||
|  | ||||
|  | ||||
| if WINDOWS and not PY3: | ||||
|   | ||||
| @@ -6,12 +6,10 @@ from datetime import datetime | ||||
|  | ||||
| from .bos import bos | ||||
|  | ||||
| try: | ||||
| if True:  # pylint: disable=using-constant-test | ||||
|     from typing import Any, Generator, Optional | ||||
|  | ||||
|     from .util import NamedLogger | ||||
| except: | ||||
|     pass | ||||
|  | ||||
|  | ||||
| class StreamArc(object): | ||||
| @@ -25,7 +23,7 @@ class StreamArc(object): | ||||
|         self.fgen = fgen | ||||
|  | ||||
|     def gen(self) -> Generator[Optional[bytes], None, None]: | ||||
|         pass | ||||
|         raise Exception("override me") | ||||
|  | ||||
|  | ||||
| def errdesc(errors: list[tuple[str, str]]) -> tuple[dict[str, Any], list[str]]: | ||||
|   | ||||
Some files were not shown because too many files have changed in this diff Show More
		Reference in New Issue
	
	Block a user