mirror of
https://github.com/9001/copyparty.git
synced 2025-10-24 08:33:58 +00:00
Compare commits
402 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
c9b385db4b | ||
|
|
c951b66ae0 | ||
|
|
de735f3a45 | ||
|
|
19161425f3 | ||
|
|
c69e8d5bf4 | ||
|
|
3d3bce2788 | ||
|
|
1cb0dc7f8e | ||
|
|
cd5c56e601 | ||
|
|
8c979905e4 | ||
|
|
4d69f15f48 | ||
|
|
083f6572f7 | ||
|
|
4e7dd75266 | ||
|
|
3eb83f449b | ||
|
|
d31f69117b | ||
|
|
f5f9e3ac97 | ||
|
|
598d6c598c | ||
|
|
744727087a | ||
|
|
f93212a665 | ||
|
|
6dade82d2c | ||
|
|
6b737bf1d7 | ||
|
|
94dbd70677 | ||
|
|
527ae0348e | ||
|
|
79629c430a | ||
|
|
908dd61be5 | ||
|
|
88f77b8cca | ||
|
|
1e846657d1 | ||
|
|
ce70f62a88 | ||
|
|
bca0cdbb62 | ||
|
|
1ee11e04e6 | ||
|
|
6eef44f212 | ||
|
|
8bd94f4a1c | ||
|
|
4bc4701372 | ||
|
|
dfd89b503a | ||
|
|
060dc54832 | ||
|
|
f7a4ea5793 | ||
|
|
71b478e6e2 | ||
|
|
ed8fff8c52 | ||
|
|
95dc78db10 | ||
|
|
addeac64c7 | ||
|
|
d77ec22007 | ||
|
|
20030c91b7 | ||
|
|
8b366e255c | ||
|
|
6da366fcb0 | ||
|
|
2fa35f851e | ||
|
|
e4ca4260bb | ||
|
|
b69aace8d8 | ||
|
|
79097bb43c | ||
|
|
806fac1742 | ||
|
|
4f97d7cf8d | ||
|
|
42acc457af | ||
|
|
c02920607f | ||
|
|
452885c271 | ||
|
|
5c242a07b6 | ||
|
|
088899d59f | ||
|
|
1faff2a37e | ||
|
|
23c8d3d045 | ||
|
|
a033388d2b | ||
|
|
82fe45ac56 | ||
|
|
bcb7fcda6b | ||
|
|
726a98100b | ||
|
|
2f021a0c2b | ||
|
|
eb05cb6c6e | ||
|
|
7530af95da | ||
|
|
8399e95bda | ||
|
|
3b4dfe326f | ||
|
|
2e787a254e | ||
|
|
f888bed1a6 | ||
|
|
d865e9f35a | ||
|
|
fc7fe70f66 | ||
|
|
5aff39d2b2 | ||
|
|
d1be37a04a | ||
|
|
b0fd8bf7d4 | ||
|
|
b9cf8f3973 | ||
|
|
4588f11613 | ||
|
|
1a618c3c97 | ||
|
|
d500a51d97 | ||
|
|
734e9d3874 | ||
|
|
bd5cfc2f1b | ||
|
|
89f88ee78c | ||
|
|
b2ae14695a | ||
|
|
19d86b44d9 | ||
|
|
85be62e38b | ||
|
|
80f3d90200 | ||
|
|
0249fa6e75 | ||
|
|
2d0696e048 | ||
|
|
ff32ec515e | ||
|
|
a6935b0293 | ||
|
|
63eb08ba9f | ||
|
|
e5b67d2b3a | ||
|
|
9e10af6885 | ||
|
|
42bc9115d2 | ||
|
|
0a569ce413 | ||
|
|
9a16639a61 | ||
|
|
57953c68c6 | ||
|
|
088d08963f | ||
|
|
7bc8196821 | ||
|
|
7715299dd3 | ||
|
|
b8ac9b7994 | ||
|
|
98e7d8f728 | ||
|
|
e7fd871ffe | ||
|
|
14aab62f32 | ||
|
|
cb81fe962c | ||
|
|
fc970d2dea | ||
|
|
b0e203d1f9 | ||
|
|
37cef05b19 | ||
|
|
5886a42901 | ||
|
|
2fd99f807d | ||
|
|
3d4cbd7d10 | ||
|
|
f10d03c238 | ||
|
|
f9a66ffb0e | ||
|
|
777a50063d | ||
|
|
0bb9154747 | ||
|
|
30c3f45072 | ||
|
|
0d5ca67f32 | ||
|
|
4a8bf6aebd | ||
|
|
b11db090d8 | ||
|
|
189391fccd | ||
|
|
86d4c43909 | ||
|
|
5994f40982 | ||
|
|
076d32dee5 | ||
|
|
16c8e38ecd | ||
|
|
eacbcda8e5 | ||
|
|
59be76cd44 | ||
|
|
5bb0e7e8b3 | ||
|
|
b78d207121 | ||
|
|
0fcbcdd08c | ||
|
|
ed6c683922 | ||
|
|
9fe1edb02b | ||
|
|
fb3811a708 | ||
|
|
18f8658eec | ||
|
|
3ead4676b0 | ||
|
|
d30001d23d | ||
|
|
06bbf0d656 | ||
|
|
6ddd952e04 | ||
|
|
027ad0c3ee | ||
|
|
3abad2b87b | ||
|
|
32a1c7c5d5 | ||
|
|
f06e165bd4 | ||
|
|
1c843b24f7 | ||
|
|
2ace9ed380 | ||
|
|
5f30c0ae03 | ||
|
|
ef60adf7e2 | ||
|
|
7354b462e8 | ||
|
|
da904d6be8 | ||
|
|
c5fbbbbb5c | ||
|
|
5010387d8a | ||
|
|
f00c54a7fb | ||
|
|
9f52c169d0 | ||
|
|
bf18339404 | ||
|
|
2ad12b074b | ||
|
|
a6788ffe8d | ||
|
|
0e884df486 | ||
|
|
ef1c55286f | ||
|
|
abc0424c26 | ||
|
|
44e5c82e6d | ||
|
|
5849c446ed | ||
|
|
12b7317831 | ||
|
|
fe323f59af | ||
|
|
a00e56f219 | ||
|
|
1a7852794f | ||
|
|
22b1373a57 | ||
|
|
17d78b1469 | ||
|
|
4d8b32b249 | ||
|
|
b65bea2550 | ||
|
|
0b52ccd200 | ||
|
|
3006a07059 | ||
|
|
801dbc7a9a | ||
|
|
4f4e895fb7 | ||
|
|
cc57c3b655 | ||
|
|
ca6ec9c5c7 | ||
|
|
633b1f0a78 | ||
|
|
6136b9bf9c | ||
|
|
524a3ba566 | ||
|
|
58580320f9 | ||
|
|
759b0a994d | ||
|
|
d2800473e4 | ||
|
|
f5b1a2065e | ||
|
|
5e62532295 | ||
|
|
c1bee96c40 | ||
|
|
f273253a2b | ||
|
|
012bbcf770 | ||
|
|
b54cb47b2e | ||
|
|
1b15f43745 | ||
|
|
96771bf1bd | ||
|
|
580078bddb | ||
|
|
c5c7080ec6 | ||
|
|
408339b51d | ||
|
|
02e3d44998 | ||
|
|
156f13ded1 | ||
|
|
d288467cb7 | ||
|
|
21662c9f3f | ||
|
|
9149fe6cdd | ||
|
|
9a146192b7 | ||
|
|
3a9d3b7b61 | ||
|
|
f03f0973ab | ||
|
|
7ec0881e8c | ||
|
|
59e1ab42ff | ||
|
|
722216b901 | ||
|
|
bd8f3dc368 | ||
|
|
33cd94a141 | ||
|
|
053ac74734 | ||
|
|
cced99fafa | ||
|
|
a009ff53f7 | ||
|
|
ca16c4108d | ||
|
|
d1b6c67dc3 | ||
|
|
a61f8133d5 | ||
|
|
38d797a544 | ||
|
|
16c1877f50 | ||
|
|
da5f15a778 | ||
|
|
396c64ecf7 | ||
|
|
252c3a7985 | ||
|
|
a3ecbf0ae7 | ||
|
|
314327d8f2 | ||
|
|
bfacd06929 | ||
|
|
4f5e8f8cf5 | ||
|
|
1fbb4c09cc | ||
|
|
b332e1992b | ||
|
|
5955940b82 | ||
|
|
231a03bcfd | ||
|
|
bc85723657 | ||
|
|
be32b743c6 | ||
|
|
83c9843059 | ||
|
|
11cf43626d | ||
|
|
a6dc5e2ce3 | ||
|
|
38593a0394 | ||
|
|
95309afeea | ||
|
|
c2bf6fe2a3 | ||
|
|
99ac324fbd | ||
|
|
5562de330f | ||
|
|
95014236ac | ||
|
|
6aa7386138 | ||
|
|
3226a1f588 | ||
|
|
b4cf890cd8 | ||
|
|
ce09e323af | ||
|
|
941aedb177 | ||
|
|
87a0d502a3 | ||
|
|
cab7c1b0b8 | ||
|
|
d5892341b6 | ||
|
|
646557a43e | ||
|
|
ed8d34ab43 | ||
|
|
5e34463c77 | ||
|
|
1b14eb7959 | ||
|
|
ed48c2d0ed | ||
|
|
26fe84b660 | ||
|
|
5938230270 | ||
|
|
1a33a047fa | ||
|
|
43a8bcefb9 | ||
|
|
2e740e513f | ||
|
|
8a21a86b61 | ||
|
|
f600116205 | ||
|
|
1c03705de8 | ||
|
|
f7e461fac6 | ||
|
|
03ce6c97ff | ||
|
|
ffd9e76e07 | ||
|
|
fc49cb1e67 | ||
|
|
f5712d9f25 | ||
|
|
161d57bdda | ||
|
|
bae0d440bf | ||
|
|
fff052dde1 | ||
|
|
73b06eaa02 | ||
|
|
08a8ebed17 | ||
|
|
74d07426b3 | ||
|
|
69a2bba99a | ||
|
|
4d685d78ee | ||
|
|
5845ec3f49 | ||
|
|
13373426fe | ||
|
|
8e55551a06 | ||
|
|
12a3f0ac31 | ||
|
|
18e33edc88 | ||
|
|
c72c5ad4ee | ||
|
|
0fbc81ab2f | ||
|
|
af0a34cf82 | ||
|
|
b4590c5398 | ||
|
|
f787a66230 | ||
|
|
b21a99fd62 | ||
|
|
eb16306cde | ||
|
|
7bc23687e3 | ||
|
|
e1eaa057f2 | ||
|
|
97c264ca3e | ||
|
|
cf848ab1f7 | ||
|
|
cf83f9b0fd | ||
|
|
d98e361083 | ||
|
|
ce7f5309c7 | ||
|
|
75c485ced7 | ||
|
|
9c6e2ec012 | ||
|
|
1a02948a61 | ||
|
|
8b05ba4ba1 | ||
|
|
21e2874cb7 | ||
|
|
360ed5c46c | ||
|
|
5099bc365d | ||
|
|
12986da147 | ||
|
|
23e72797bc | ||
|
|
ac7b6f8f55 | ||
|
|
981b9ff11e | ||
|
|
4186906f4c | ||
|
|
0850d24e0c | ||
|
|
7ab8334c96 | ||
|
|
a4d7329ab7 | ||
|
|
3f4eae6bce | ||
|
|
518cf4be57 | ||
|
|
71096182be | ||
|
|
6452e927ea | ||
|
|
bc70cfa6f0 | ||
|
|
2b6e5ebd2d | ||
|
|
c761bd799a | ||
|
|
2f7c2fdee4 | ||
|
|
70a76ec343 | ||
|
|
7c3f64abf2 | ||
|
|
f5f38f195c | ||
|
|
7e84f4f015 | ||
|
|
4802f8cf07 | ||
|
|
cc05e67d8f | ||
|
|
2b6b174517 | ||
|
|
a1d05e6e12 | ||
|
|
f95ceb6a9b | ||
|
|
8f91b0726d | ||
|
|
97807f4383 | ||
|
|
5f42237f2c | ||
|
|
68289cfa54 | ||
|
|
42ea30270f | ||
|
|
ebbbbf3d82 | ||
|
|
27516e2d16 | ||
|
|
84bb6f915e | ||
|
|
46752f758a | ||
|
|
34c4c22e61 | ||
|
|
af2d0b8421 | ||
|
|
638b05a49a | ||
|
|
7a13e8a7fc | ||
|
|
d9fa74711d | ||
|
|
41867f578f | ||
|
|
0bf41ed4ef | ||
|
|
d080b4a731 | ||
|
|
ca4232ada9 | ||
|
|
ad348f91c9 | ||
|
|
990f915f42 | ||
|
|
53d720217b | ||
|
|
7a06ff480d | ||
|
|
3ef551f788 | ||
|
|
f0125cdc36 | ||
|
|
ed5f6736df | ||
|
|
15d8be0fae | ||
|
|
46f3e61360 | ||
|
|
87ad8c98d4 | ||
|
|
9bbdc4100f | ||
|
|
c80307e8ff | ||
|
|
c1d77e1041 | ||
|
|
d9e83650dc | ||
|
|
f6d635acd9 | ||
|
|
0dbd8a01ff | ||
|
|
8d755d41e0 | ||
|
|
190473bd32 | ||
|
|
030d1ec254 | ||
|
|
5a2b91a084 | ||
|
|
a50a05e4e7 | ||
|
|
6cb5a87c79 | ||
|
|
b9f89ca552 | ||
|
|
26c9fd5dea | ||
|
|
e81a9b6fe0 | ||
|
|
452450e451 | ||
|
|
419dd2d1c7 | ||
|
|
ee86b06676 | ||
|
|
953183f16d | ||
|
|
228f71708b | ||
|
|
621471a7cb | ||
|
|
8b58e951e3 | ||
|
|
1db489a0aa | ||
|
|
be65c3c6cf | ||
|
|
46e7fa31fe | ||
|
|
66e21bd499 | ||
|
|
8cab4c01fd | ||
|
|
d52038366b | ||
|
|
4fcfd87f5b | ||
|
|
f893c6baa4 | ||
|
|
9a45549b66 | ||
|
|
ae3a01038b | ||
|
|
e47a2a4ca2 | ||
|
|
95ea6d5f78 | ||
|
|
7d290f6b8f | ||
|
|
9db617ed5a | ||
|
|
514456940a | ||
|
|
33feefd9cd | ||
|
|
65e14cf348 | ||
|
|
1d61bcc4f3 | ||
|
|
c38bbaca3c | ||
|
|
246d245ebc | ||
|
|
f269a710e2 | ||
|
|
051998429c | ||
|
|
432cdd640f | ||
|
|
9ed9b0964e | ||
|
|
6a97b3526d | ||
|
|
451d757996 | ||
|
|
f9e9eba3b1 | ||
|
|
2a9a6aebd9 | ||
|
|
adbb6c449e | ||
|
|
3993605324 | ||
|
|
0ae574ec2c | ||
|
|
c56ded828c | ||
|
|
02c7061945 | ||
|
|
9209e44cd3 | ||
|
|
ebed37394e | ||
|
|
4c7a2a7ec3 | ||
|
|
0a25a88a34 |
40
.github/ISSUE_TEMPLATE/bug_report.md
vendored
Normal file
40
.github/ISSUE_TEMPLATE/bug_report.md
vendored
Normal file
@@ -0,0 +1,40 @@
|
||||
---
|
||||
name: Bug report
|
||||
about: Create a report to help us improve
|
||||
title: ''
|
||||
labels: bug
|
||||
assignees: '9001'
|
||||
|
||||
---
|
||||
|
||||
NOTE:
|
||||
all of the below are optional, consider them as inspiration, delete and rewrite at will, thx md
|
||||
|
||||
|
||||
**Describe the bug**
|
||||
a description of what the bug is
|
||||
|
||||
**To Reproduce**
|
||||
List of steps to reproduce the issue, or, if it's hard to reproduce, then at least a detailed explanation of what you did to run into it
|
||||
|
||||
**Expected behavior**
|
||||
a description of what you expected to happen
|
||||
|
||||
**Screenshots**
|
||||
if applicable, add screenshots to help explain your problem, such as the kickass crashpage :^)
|
||||
|
||||
**Server details**
|
||||
if the issue is possibly on the server-side, then mention some of the following:
|
||||
* server OS / version:
|
||||
* python version:
|
||||
* copyparty arguments:
|
||||
* filesystem (`lsblk -f` on linux):
|
||||
|
||||
**Client details**
|
||||
if the issue is possibly on the client-side, then mention some of the following:
|
||||
* the device type and model:
|
||||
* OS version:
|
||||
* browser version:
|
||||
|
||||
**Additional context**
|
||||
any other context about the problem here
|
||||
22
.github/ISSUE_TEMPLATE/feature_request.md
vendored
Normal file
22
.github/ISSUE_TEMPLATE/feature_request.md
vendored
Normal file
@@ -0,0 +1,22 @@
|
||||
---
|
||||
name: Feature request
|
||||
about: Suggest an idea for this project
|
||||
title: ''
|
||||
labels: enhancement
|
||||
assignees: '9001'
|
||||
|
||||
---
|
||||
|
||||
all of the below are optional, consider them as inspiration, delete and rewrite at will
|
||||
|
||||
**is your feature request related to a problem? Please describe.**
|
||||
a description of what the problem is, for example, `I'm always frustrated when [...]` or `Why is it not possible to [...]`
|
||||
|
||||
**Describe the idea / solution you'd like**
|
||||
a description of what you want to happen
|
||||
|
||||
**Describe any alternatives you've considered**
|
||||
a description of any alternative solutions or features you've considered
|
||||
|
||||
**Additional context**
|
||||
add any other context or screenshots about the feature request here
|
||||
10
.github/ISSUE_TEMPLATE/something-else.md
vendored
Normal file
10
.github/ISSUE_TEMPLATE/something-else.md
vendored
Normal file
@@ -0,0 +1,10 @@
|
||||
---
|
||||
name: Something else
|
||||
about: "┐(゚∀゚)┌"
|
||||
title: ''
|
||||
labels: ''
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
|
||||
7
.github/branch-rename.md
vendored
Normal file
7
.github/branch-rename.md
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
modernize your local checkout of the repo like so,
|
||||
```sh
|
||||
git branch -m master hovudstraum
|
||||
git fetch origin
|
||||
git branch -u origin/hovudstraum hovudstraum
|
||||
git remote set-head origin -a
|
||||
```
|
||||
5
.gitignore
vendored
5
.gitignore
vendored
@@ -9,6 +9,7 @@ buildenv/
|
||||
build/
|
||||
dist/
|
||||
sfx/
|
||||
py2/
|
||||
.venv/
|
||||
|
||||
# ide
|
||||
@@ -20,3 +21,7 @@ sfx/
|
||||
# derived
|
||||
copyparty/web/deps/
|
||||
srv/
|
||||
|
||||
# state/logs
|
||||
up.*.txt
|
||||
.hist/
|
||||
2
.vscode/launch.json
vendored
2
.vscode/launch.json
vendored
@@ -17,7 +17,7 @@
|
||||
"-mtp",
|
||||
".bpm=f,bin/mtag/audio-bpm.py",
|
||||
"-aed:wark",
|
||||
"-vsrv::r:aed:cnodupe",
|
||||
"-vsrv::r:rw,ed:c,dupe",
|
||||
"-vdist:dist:r"
|
||||
]
|
||||
},
|
||||
|
||||
1
.vscode/settings.json
vendored
1
.vscode/settings.json
vendored
@@ -55,4 +55,5 @@
|
||||
"py27"
|
||||
],
|
||||
"python.linting.enabled": true,
|
||||
"python.pythonPath": "/usr/bin/python3"
|
||||
}
|
||||
24
CODE_OF_CONDUCT.md
Normal file
24
CODE_OF_CONDUCT.md
Normal file
@@ -0,0 +1,24 @@
|
||||
in the words of Abraham Lincoln:
|
||||
|
||||
> Be excellent to each other... and... PARTY ON, DUDES!
|
||||
|
||||
more specifically I'll paraphrase some examples from a german automotive corporation as they cover all the bases without being too wordy
|
||||
|
||||
## Examples of unacceptable behavior
|
||||
* intimidation, harassment, trolling
|
||||
* insulting, derogatory, harmful or prejudicial comments
|
||||
* posting private information without permission
|
||||
* political or personal attacks
|
||||
|
||||
## Examples of expected behavior
|
||||
* being nice, friendly, welcoming, inclusive, mindful and empathetic
|
||||
* acting considerate, modest, respectful
|
||||
* using polite and inclusive language
|
||||
* criticize constructively and accept constructive criticism
|
||||
* respect different points of view
|
||||
|
||||
## finally and even more specifically,
|
||||
* parse opinions and feedback objectively without prejudice
|
||||
* it's the message that matters, not who said it
|
||||
|
||||
aaand that's how you say `be nice` in a way that fills half a floppy w
|
||||
3
CONTRIBUTING.md
Normal file
3
CONTRIBUTING.md
Normal file
@@ -0,0 +1,3 @@
|
||||
* do something cool
|
||||
|
||||
really tho, send a PR or an issue or whatever, all appreciated, anything goes, just behave aight
|
||||
@@ -1,3 +1,11 @@
|
||||
# [`up2k.py`](up2k.py)
|
||||
* command-line up2k client [(webm)](https://ocv.me/stuff/u2cli.webm)
|
||||
* file uploads, file-search, autoresume of aborted/broken uploads
|
||||
* faster than browsers
|
||||
* early beta, if something breaks just restart it
|
||||
|
||||
|
||||
|
||||
# [`copyparty-fuse.py`](copyparty-fuse.py)
|
||||
* mount a copyparty server as a local filesystem (read-only)
|
||||
* **supports Windows!** -- expect `194 MiB/s` sequential read
|
||||
@@ -47,6 +55,7 @@ you could replace winfsp with [dokan](https://github.com/dokan-dev/dokany/releas
|
||||
* copyparty can Popen programs like these during file indexing to collect additional metadata
|
||||
|
||||
|
||||
|
||||
# [`dbtool.py`](dbtool.py)
|
||||
upgrade utility which can show db info and help transfer data between databases, for example when a new version of copyparty is incompatible with the old DB and automatically rebuilds the DB from scratch, but you have some really expensive `-mtp` parsers and want to copy over the tags from the old db
|
||||
|
||||
@@ -61,3 +70,9 @@ cd /mnt/nas/music/.hist
|
||||
~/src/copyparty/bin/dbtool.py -src up2k.*.v3 up2k.db -rm-mtp-flag -copy key
|
||||
~/src/copyparty/bin/dbtool.py -src up2k.*.v3 up2k.db -rm-mtp-flag -copy .bpm -vac
|
||||
```
|
||||
|
||||
|
||||
|
||||
# [`prisonparty.sh`](prisonparty.sh)
|
||||
* run copyparty in a chroot, preventing any accidental file access
|
||||
* creates bindmounts for /bin, /lib, and so on, see `sysdirs=`
|
||||
|
||||
@@ -22,7 +22,7 @@ dependencies:
|
||||
|
||||
note:
|
||||
you probably want to run this on windows clients:
|
||||
https://github.com/9001/copyparty/blob/master/contrib/explorer-nothumbs-nofoldertypes.reg
|
||||
https://github.com/9001/copyparty/blob/hovudstraum/contrib/explorer-nothumbs-nofoldertypes.reg
|
||||
|
||||
get server cert:
|
||||
awk '/-BEGIN CERTIFICATE-/ {a=1} a; /-END CERTIFICATE-/{exit}' <(openssl s_client -connect 127.0.0.1:3923 </dev/null 2>/dev/null) >cert.pem
|
||||
@@ -71,7 +71,7 @@ except:
|
||||
elif MACOS:
|
||||
libfuse = "install https://osxfuse.github.io/"
|
||||
else:
|
||||
libfuse = "apt install libfuse\n modprobe fuse"
|
||||
libfuse = "apt install libfuse3-3\n modprobe fuse"
|
||||
|
||||
print(
|
||||
"\n could not import fuse; these may help:"
|
||||
@@ -393,15 +393,16 @@ class Gateway(object):
|
||||
|
||||
rsp = json.loads(rsp.decode("utf-8"))
|
||||
ret = []
|
||||
for is_dir, nodes in [[True, rsp["dirs"]], [False, rsp["files"]]]:
|
||||
for statfun, nodes in [
|
||||
[self.stat_dir, rsp["dirs"]],
|
||||
[self.stat_file, rsp["files"]],
|
||||
]:
|
||||
for n in nodes:
|
||||
fname = unquote(n["href"]).rstrip(b"/")
|
||||
fname = fname.decode("wtf-8")
|
||||
fname = unquote(n["href"].split("?")[0]).rstrip(b"/").decode("wtf-8")
|
||||
if bad_good:
|
||||
fname = enwin(fname)
|
||||
|
||||
fun = self.stat_dir if is_dir else self.stat_file
|
||||
ret.append([fname, fun(n["ts"], n["sz"]), 0])
|
||||
ret.append([fname, statfun(n["ts"], n["sz"]), 0])
|
||||
|
||||
return ret
|
||||
|
||||
|
||||
@@ -1,9 +1,18 @@
|
||||
standalone programs which take an audio file as argument
|
||||
|
||||
**NOTE:** these all require `-e2ts` to be functional, meaning you need to do at least one of these: `apt install ffmpeg` or `pip3 install mutagen`
|
||||
|
||||
some of these rely on libraries which are not MIT-compatible
|
||||
|
||||
* [audio-bpm.py](./audio-bpm.py) detects the BPM of music using the BeatRoot Vamp Plugin; imports GPL2
|
||||
* [audio-key.py](./audio-key.py) detects the melodic key of music using the Mixxx fork of keyfinder; imports GPL3
|
||||
* [media-hash.py](./media-hash.py) generates checksums for audio and video streams; uses FFmpeg (LGPL or GPL)
|
||||
|
||||
these do not have any problematic dependencies:
|
||||
|
||||
* [cksum.py](./cksum.py) computes various checksums
|
||||
* [exe.py](./exe.py) grabs metadata from .exe and .dll files (example for retrieving multiple tags with one parser)
|
||||
* [wget.py](./wget.py) lets you download files by POSTing URLs to copyparty
|
||||
|
||||
|
||||
# dependencies
|
||||
@@ -18,7 +27,10 @@ run [`install-deps.sh`](install-deps.sh) to build/install most dependencies requ
|
||||
|
||||
# usage from copyparty
|
||||
|
||||
`copyparty -e2dsa -e2ts -mtp key=f,audio-key.py -mtp .bpm=f,audio-bpm.py`
|
||||
`copyparty -e2dsa -e2ts` followed by any combination of these:
|
||||
* `-mtp key=f,audio-key.py`
|
||||
* `-mtp .bpm=f,audio-bpm.py`
|
||||
* `-mtp ahash,vhash=f,media-hash.py`
|
||||
|
||||
* `f,` makes the detected value replace any existing values
|
||||
* the `.` in `.bpm` indicates numeric value
|
||||
@@ -29,6 +41,9 @@ run [`install-deps.sh`](install-deps.sh) to build/install most dependencies requ
|
||||
## usage with volume-flags
|
||||
|
||||
instead of affecting all volumes, you can set the options for just one volume like so:
|
||||
```
|
||||
copyparty -v /mnt/nas/music:/music:r:cmtp=key=f,audio-key.py:cmtp=.bpm=f,audio-bpm.py:ce2dsa:ce2ts
|
||||
```
|
||||
|
||||
`copyparty -v /mnt/nas/music:/music:r:c,e2dsa:c,e2ts` immediately followed by any combination of these:
|
||||
|
||||
* `:c,mtp=key=f,audio-key.py`
|
||||
* `:c,mtp=.bpm=f,audio-bpm.py`
|
||||
* `:c,mtp=ahash,vhash=f,media-hash.py`
|
||||
|
||||
@@ -25,6 +25,7 @@ def det(tf):
|
||||
"-v", "fatal",
|
||||
"-ss", "13",
|
||||
"-y", "-i", fsenc(sys.argv[1]),
|
||||
"-map", "0:a:0",
|
||||
"-ac", "1",
|
||||
"-ar", "22050",
|
||||
"-t", "300",
|
||||
|
||||
@@ -28,6 +28,7 @@ def det(tf):
|
||||
"-hide_banner",
|
||||
"-v", "fatal",
|
||||
"-y", "-i", fsenc(sys.argv[1]),
|
||||
"-map", "0:a:0",
|
||||
"-t", "300",
|
||||
"-sample_fmt", "s16",
|
||||
tf
|
||||
|
||||
89
bin/mtag/cksum.py
Executable file
89
bin/mtag/cksum.py
Executable file
@@ -0,0 +1,89 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import sys
|
||||
import json
|
||||
import zlib
|
||||
import struct
|
||||
import base64
|
||||
import hashlib
|
||||
|
||||
try:
|
||||
from copyparty.util import fsenc
|
||||
except:
|
||||
|
||||
def fsenc(p):
|
||||
return p
|
||||
|
||||
|
||||
"""
|
||||
calculates various checksums for uploads,
|
||||
usage: -mtp crc32,md5,sha1,sha256b=bin/mtag/cksum.py
|
||||
"""
|
||||
|
||||
|
||||
def main():
|
||||
config = "crc32 md5 md5b sha1 sha1b sha256 sha256b sha512/240 sha512b/240"
|
||||
# b suffix = base64 encoded
|
||||
# slash = truncate to n bits
|
||||
|
||||
known = {
|
||||
"md5": hashlib.md5,
|
||||
"sha1": hashlib.sha1,
|
||||
"sha256": hashlib.sha256,
|
||||
"sha512": hashlib.sha512,
|
||||
}
|
||||
config = config.split()
|
||||
hashers = {
|
||||
k: v()
|
||||
for k, v in known.items()
|
||||
if k in [x.split("/")[0].rstrip("b") for x in known]
|
||||
}
|
||||
crc32 = 0 if "crc32" in config else None
|
||||
|
||||
with open(fsenc(sys.argv[1]), "rb", 512 * 1024) as f:
|
||||
while True:
|
||||
buf = f.read(64 * 1024)
|
||||
if not buf:
|
||||
break
|
||||
|
||||
for x in hashers.values():
|
||||
x.update(buf)
|
||||
|
||||
if crc32 is not None:
|
||||
crc32 = zlib.crc32(buf, crc32)
|
||||
|
||||
ret = {}
|
||||
for s in config:
|
||||
alg = s.split("/")[0]
|
||||
b64 = alg.endswith("b")
|
||||
alg = alg.rstrip("b")
|
||||
if alg in hashers:
|
||||
v = hashers[alg].digest()
|
||||
elif alg == "crc32":
|
||||
v = crc32
|
||||
if v < 0:
|
||||
v &= 2 ** 32 - 1
|
||||
v = struct.pack(">L", v)
|
||||
else:
|
||||
raise Exception("what is {}".format(s))
|
||||
|
||||
if "/" in s:
|
||||
v = v[: int(int(s.split("/")[1]) / 8)]
|
||||
|
||||
if b64:
|
||||
v = base64.b64encode(v).decode("ascii").rstrip("=")
|
||||
else:
|
||||
try:
|
||||
v = v.hex()
|
||||
except:
|
||||
import binascii
|
||||
|
||||
v = binascii.hexlify(v)
|
||||
|
||||
ret[s] = v
|
||||
|
||||
print(json.dumps(ret, indent=4))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -4,7 +4,8 @@ set -e
|
||||
|
||||
# install dependencies for audio-*.py
|
||||
#
|
||||
# linux: requires {python3,ffmpeg,fftw}-dev py3-{wheel,pip} py3-numpy{,-dev} vamp-sdk-dev patchelf
|
||||
# linux/alpine: requires {python3,ffmpeg,fftw}-dev py3-{wheel,pip} py3-numpy{,-dev} vamp-sdk-dev patchelf cmake
|
||||
# linux/debian: requires libav{codec,device,filter,format,resample,util}-dev {libfftw3,python3}-dev python3-{numpy,pip} vamp-{plugin-sdk,examples} patchelf cmake
|
||||
# win64: requires msys2-mingw64 environment
|
||||
# macos: requires macports
|
||||
#
|
||||
|
||||
73
bin/mtag/media-hash.py
Normal file
73
bin/mtag/media-hash.py
Normal file
@@ -0,0 +1,73 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import re
|
||||
import sys
|
||||
import json
|
||||
import time
|
||||
import base64
|
||||
import hashlib
|
||||
import subprocess as sp
|
||||
|
||||
try:
|
||||
from copyparty.util import fsenc
|
||||
except:
|
||||
|
||||
def fsenc(p):
|
||||
return p
|
||||
|
||||
|
||||
"""
|
||||
dep: ffmpeg
|
||||
"""
|
||||
|
||||
|
||||
def det():
|
||||
# fmt: off
|
||||
cmd = [
|
||||
"ffmpeg",
|
||||
"-nostdin",
|
||||
"-hide_banner",
|
||||
"-v", "fatal",
|
||||
"-i", fsenc(sys.argv[1]),
|
||||
"-f", "framemd5",
|
||||
"-"
|
||||
]
|
||||
# fmt: on
|
||||
|
||||
p = sp.Popen(cmd, stdout=sp.PIPE)
|
||||
# ps = io.TextIOWrapper(p.stdout, encoding="utf-8")
|
||||
ps = p.stdout
|
||||
|
||||
chans = {}
|
||||
for ln in ps:
|
||||
if ln.startswith(b"#stream#"):
|
||||
break
|
||||
|
||||
m = re.match(r"^#media_type ([0-9]): ([a-zA-Z])", ln.decode("utf-8"))
|
||||
if m:
|
||||
chans[m.group(1)] = m.group(2)
|
||||
|
||||
hashers = [hashlib.sha512(), hashlib.sha512()]
|
||||
for ln in ps:
|
||||
n = int(ln[:1])
|
||||
v = ln.rsplit(b",", 1)[-1].strip()
|
||||
hashers[n].update(v)
|
||||
|
||||
r = {}
|
||||
for k, v in chans.items():
|
||||
dg = hashers[int(k)].digest()[:12]
|
||||
dg = base64.urlsafe_b64encode(dg).decode("ascii")
|
||||
r[v[0].lower() + "hash"] = dg
|
||||
|
||||
print(json.dumps(r, indent=4))
|
||||
|
||||
|
||||
def main():
|
||||
try:
|
||||
det()
|
||||
except:
|
||||
pass # mute
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
39
bin/mtag/res/yt-ipr.conf
Normal file
39
bin/mtag/res/yt-ipr.conf
Normal file
@@ -0,0 +1,39 @@
|
||||
# example config file to use copyparty as a youtube manifest collector,
|
||||
# use with copyparty like: python copyparty.py -c yt-ipr.conf
|
||||
#
|
||||
# see docs/example.conf for a better explanation of the syntax, but
|
||||
# newlines are block separators, so adding blank lines inside a volume definition is bad
|
||||
# (use comments as separators instead)
|
||||
|
||||
|
||||
# create user ed, password wark
|
||||
u ed:wark
|
||||
|
||||
|
||||
# create a volume at /ytm which stores files at ./srv/ytm
|
||||
./srv/ytm
|
||||
/ytm
|
||||
# write-only, but read-write for user ed
|
||||
w
|
||||
rw ed
|
||||
# rescan the volume on startup
|
||||
c e2dsa
|
||||
# collect tags from all new files since last scan
|
||||
c e2ts
|
||||
# optionally enable compression to make the files 50% smaller
|
||||
c pk
|
||||
# only allow uploads which are between 16k and 1m large
|
||||
c sz=16k-1m
|
||||
# allow up to 10 uploads over 5 minutes from each ip
|
||||
c maxn=10,300
|
||||
# move uploads into subfolders: YEAR-MONTH / DAY-HOUR / <upload>
|
||||
c rotf=%Y-%m/%d-%H
|
||||
# delete uploads when they are 24 hours old
|
||||
c lifetime=86400
|
||||
# add the parser and tell copyparty what tags it can expect from it
|
||||
c mtp=yt-id,yt-title,yt-author,yt-channel,yt-views,yt-private,yt-manifest,yt-expires=bin/mtag/yt-ipr.py
|
||||
# decide which tags we want to index and in what order
|
||||
c mte=yt-id,yt-title,yt-author,yt-channel,yt-views,yt-private,yt-manifest,yt-expires
|
||||
|
||||
|
||||
# create any other volumes you'd like down here, or merge this with an existing config file
|
||||
47
bin/mtag/res/yt-ipr.user.js
Normal file
47
bin/mtag/res/yt-ipr.user.js
Normal file
@@ -0,0 +1,47 @@
|
||||
// ==UserScript==
|
||||
// @name youtube-playerdata-hub
|
||||
// @match https://youtube.com/*
|
||||
// @match https://*.youtube.com/*
|
||||
// @version 1.0
|
||||
// @grant GM_addStyle
|
||||
// ==/UserScript==
|
||||
|
||||
function main() {
|
||||
var server = 'https://127.0.0.1:3923/ytm?pw=wark',
|
||||
interval = 60; // sec
|
||||
|
||||
var sent = {};
|
||||
function send(txt, mf_url, desc) {
|
||||
if (sent[mf_url])
|
||||
return;
|
||||
|
||||
fetch(server + '&_=' + Date.now(), { method: "PUT", body: txt });
|
||||
console.log('[yt-pdh] yeet %d bytes, %s', txt.length, desc);
|
||||
sent[mf_url] = 1;
|
||||
}
|
||||
|
||||
function collect() {
|
||||
try {
|
||||
var pd = document.querySelector('ytd-watch-flexy');
|
||||
if (!pd)
|
||||
return console.log('[yt-pdh] no video found');
|
||||
|
||||
pd = pd.playerData;
|
||||
var mu = pd.streamingData.dashManifestUrl || pd.streamingData.hlsManifestUrl;
|
||||
if (!mu || !mu.length)
|
||||
return console.log('[yt-pdh] no manifest found');
|
||||
|
||||
var desc = pd.videoDetails.videoId + ', ' + pd.videoDetails.title;
|
||||
send(JSON.stringify(pd), mu, desc);
|
||||
}
|
||||
catch (ex) {
|
||||
console.log("[yt-pdh]", ex);
|
||||
}
|
||||
}
|
||||
setInterval(collect, interval * 1000);
|
||||
}
|
||||
|
||||
var scr = document.createElement('script');
|
||||
scr.textContent = '(' + main.toString() + ')();';
|
||||
(document.head || document.getElementsByTagName('head')[0]).appendChild(scr);
|
||||
console.log('[yt-pdh] a');
|
||||
85
bin/mtag/wget.py
Normal file
85
bin/mtag/wget.py
Normal file
@@ -0,0 +1,85 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
"""
|
||||
use copyparty as a file downloader by POSTing URLs as
|
||||
application/x-www-form-urlencoded (for example using the
|
||||
message/pager function on the website)
|
||||
|
||||
example copyparty config to use this:
|
||||
--urlform save,get -vsrv/wget:wget:rwmd,ed:c,e2ts,mtp=title=ebin,t300,ad,bin/mtag/wget.py
|
||||
|
||||
explained:
|
||||
for realpath srv/wget (served at /wget) with read-write-modify-delete for ed,
|
||||
enable file analysis on upload (e2ts),
|
||||
use mtp plugin "bin/mtag/wget.py" to provide metadata tag "title",
|
||||
do this on all uploads with the file extension "bin",
|
||||
t300 = 300 seconds timeout for each dwonload,
|
||||
ad = parse file regardless if FFmpeg thinks it is audio or not
|
||||
|
||||
PS: this requires e2ts to be functional,
|
||||
meaning you need to do at least one of these:
|
||||
* apt install ffmpeg
|
||||
* pip3 install mutagen
|
||||
"""
|
||||
|
||||
|
||||
import os
|
||||
import sys
|
||||
import subprocess as sp
|
||||
from urllib.parse import unquote_to_bytes as unquote
|
||||
|
||||
|
||||
def main():
|
||||
fp = os.path.abspath(sys.argv[1])
|
||||
fdir = os.path.dirname(fp)
|
||||
fname = os.path.basename(fp)
|
||||
if not fname.startswith("put-") or not fname.endswith(".bin"):
|
||||
raise Exception("not a post file")
|
||||
|
||||
buf = b""
|
||||
with open(fp, "rb") as f:
|
||||
while True:
|
||||
b = f.read(4096)
|
||||
buf += b
|
||||
if len(buf) > 4096:
|
||||
raise Exception("too big")
|
||||
|
||||
if not b:
|
||||
break
|
||||
|
||||
if not buf:
|
||||
raise Exception("file is empty")
|
||||
|
||||
buf = unquote(buf.replace(b"+", b" "))
|
||||
url = buf.decode("utf-8")
|
||||
|
||||
if not url.startswith("msg="):
|
||||
raise Exception("does not start with msg=")
|
||||
|
||||
url = url[4:]
|
||||
if "://" not in url:
|
||||
url = "https://" + url
|
||||
|
||||
os.chdir(fdir)
|
||||
|
||||
name = url.split("?")[0].split("/")[-1]
|
||||
tfn = "-- DOWNLOADING " + name
|
||||
open(tfn, "wb").close()
|
||||
|
||||
cmd = ["wget", "--trust-server-names", "--", url]
|
||||
|
||||
try:
|
||||
sp.check_call(cmd)
|
||||
|
||||
# OPTIONAL:
|
||||
# on success, delete the .bin file which contains the URL
|
||||
os.unlink(fp)
|
||||
except:
|
||||
open("-- FAILED TO DONWLOAD " + name, "wb").close()
|
||||
|
||||
os.unlink(tfn)
|
||||
print(url)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
198
bin/mtag/yt-ipr.py
Normal file
198
bin/mtag/yt-ipr.py
Normal file
@@ -0,0 +1,198 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import re
|
||||
import os
|
||||
import sys
|
||||
import gzip
|
||||
import json
|
||||
import base64
|
||||
import string
|
||||
import urllib.request
|
||||
from datetime import datetime
|
||||
|
||||
"""
|
||||
youtube initial player response
|
||||
|
||||
it's probably best to use this through a config file; see res/yt-ipr.conf
|
||||
|
||||
but if you want to use plain arguments instead then:
|
||||
-v srv/ytm:ytm:w:rw,ed
|
||||
:c,e2ts,e2dsa
|
||||
:c,sz=16k-1m:c,maxn=10,300:c,rotf=%Y-%m/%d-%H
|
||||
:c,mtp=yt-id,yt-title,yt-author,yt-channel,yt-views,yt-private,yt-manifest,yt-expires=bin/mtag/yt-ipr.py
|
||||
:c,mte=yt-id,yt-title,yt-author,yt-channel,yt-views,yt-private,yt-manifest,yt-expires
|
||||
|
||||
see res/yt-ipr.user.js for the example userscript to go with this
|
||||
"""
|
||||
|
||||
|
||||
def main():
|
||||
try:
|
||||
with gzip.open(sys.argv[1], "rt", encoding="utf-8", errors="replace") as f:
|
||||
txt = f.read()
|
||||
except:
|
||||
with open(sys.argv[1], "r", encoding="utf-8", errors="replace") as f:
|
||||
txt = f.read()
|
||||
|
||||
txt = "{" + txt.split("{", 1)[1]
|
||||
|
||||
try:
|
||||
pd = json.loads(txt)
|
||||
except json.decoder.JSONDecodeError as ex:
|
||||
pd = json.loads(txt[: ex.pos])
|
||||
|
||||
# print(json.dumps(pd, indent=2))
|
||||
|
||||
if "videoDetails" in pd:
|
||||
parse_youtube(pd)
|
||||
else:
|
||||
parse_freg(pd)
|
||||
|
||||
|
||||
def get_expiration(url):
|
||||
et = re.search(r"[?&]expire=([0-9]+)", url).group(1)
|
||||
et = datetime.utcfromtimestamp(int(et))
|
||||
return et.strftime("%Y-%m-%d, %H:%M")
|
||||
|
||||
|
||||
def parse_youtube(pd):
|
||||
vd = pd["videoDetails"]
|
||||
sd = pd["streamingData"]
|
||||
|
||||
et = sd["adaptiveFormats"][0]["url"]
|
||||
et = get_expiration(et)
|
||||
|
||||
mf = []
|
||||
if "dashManifestUrl" in sd:
|
||||
mf.append("dash")
|
||||
if "hlsManifestUrl" in sd:
|
||||
mf.append("hls")
|
||||
|
||||
r = {
|
||||
"yt-id": vd["videoId"],
|
||||
"yt-title": vd["title"],
|
||||
"yt-author": vd["author"],
|
||||
"yt-channel": vd["channelId"],
|
||||
"yt-views": vd["viewCount"],
|
||||
"yt-private": vd["isPrivate"],
|
||||
# "yt-expires": sd["expiresInSeconds"],
|
||||
"yt-manifest": ",".join(mf),
|
||||
"yt-expires": et,
|
||||
}
|
||||
print(json.dumps(r))
|
||||
|
||||
freg_conv(pd)
|
||||
|
||||
|
||||
def parse_freg(pd):
|
||||
md = pd["metadata"]
|
||||
r = {
|
||||
"yt-id": md["id"],
|
||||
"yt-title": md["title"],
|
||||
"yt-author": md["channelName"],
|
||||
"yt-channel": md["channelURL"].strip("/").split("/")[-1],
|
||||
"yt-expires": get_expiration(list(pd["video"].values())[0]),
|
||||
}
|
||||
print(json.dumps(r))
|
||||
|
||||
|
||||
def freg_conv(pd):
|
||||
# based on getURLs.js v1.5 (2021-08-07)
|
||||
# fmt: off
|
||||
priority = {
|
||||
"video": [
|
||||
337, 315, 266, 138, # 2160p60
|
||||
313, 336, # 2160p
|
||||
308, # 1440p60
|
||||
271, 264, # 1440p
|
||||
335, 303, 299, # 1080p60
|
||||
248, 169, 137, # 1080p
|
||||
334, 302, 298, # 720p60
|
||||
247, 136 # 720p
|
||||
],
|
||||
"audio": [
|
||||
251, 141, 171, 140, 250, 249, 139
|
||||
]
|
||||
}
|
||||
|
||||
vid_id = pd["videoDetails"]["videoId"]
|
||||
chan_id = pd["videoDetails"]["channelId"]
|
||||
|
||||
try:
|
||||
thumb_url = pd["microformat"]["playerMicroformatRenderer"]["thumbnail"]["thumbnails"][0]["url"]
|
||||
start_ts = pd["microformat"]["playerMicroformatRenderer"]["liveBroadcastDetails"]["startTimestamp"]
|
||||
except:
|
||||
thumb_url = f"https://img.youtube.com/vi/{vid_id}/maxresdefault.jpg"
|
||||
start_ts = ""
|
||||
|
||||
# fmt: on
|
||||
|
||||
metadata = {
|
||||
"title": pd["videoDetails"]["title"],
|
||||
"id": vid_id,
|
||||
"channelName": pd["videoDetails"]["author"],
|
||||
"channelURL": "https://www.youtube.com/channel/" + chan_id,
|
||||
"description": pd["videoDetails"]["shortDescription"],
|
||||
"thumbnailUrl": thumb_url,
|
||||
"startTimestamp": start_ts,
|
||||
}
|
||||
|
||||
if [x for x in vid_id if x not in string.ascii_letters + string.digits + "_-"]:
|
||||
print(f"malicious json", file=sys.stderr)
|
||||
return
|
||||
|
||||
basepath = os.path.dirname(sys.argv[1])
|
||||
|
||||
thumb_fn = f"{basepath}/{vid_id}.jpg"
|
||||
tmp_fn = f"{thumb_fn}.{os.getpid()}"
|
||||
if not os.path.exists(thumb_fn) and (
|
||||
thumb_url.startswith("https://img.youtube.com/vi/")
|
||||
or thumb_url.startswith("https://i.ytimg.com/vi/")
|
||||
):
|
||||
try:
|
||||
with urllib.request.urlopen(thumb_url) as fi:
|
||||
with open(tmp_fn, "wb") as fo:
|
||||
fo.write(fi.read())
|
||||
|
||||
os.rename(tmp_fn, thumb_fn)
|
||||
except:
|
||||
if os.path.exists(tmp_fn):
|
||||
os.unlink(tmp_fn)
|
||||
|
||||
try:
|
||||
with open(thumb_fn, "rb") as f:
|
||||
thumb = base64.b64encode(f.read()).decode("ascii")
|
||||
except:
|
||||
thumb = "/9j/4AAQSkZJRgABAQEASABIAAD/2wBDAAMCAgICAgMCAgIDAwMDBAYEBAQEBAgGBgUGCQgKCgkICQkKDA8MCgsOCwkJDRENDg8QEBEQCgwSExIQEw8QEBD/yQALCAABAAEBAREA/8wABgAQEAX/2gAIAQEAAD8A0s8g/9k="
|
||||
|
||||
metadata["thumbnail"] = "data:image/jpeg;base64," + thumb
|
||||
|
||||
ret = {
|
||||
"metadata": metadata,
|
||||
"version": "1.5",
|
||||
"createTime": datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%SZ"),
|
||||
}
|
||||
|
||||
for stream, itags in priority.items():
|
||||
for itag in itags:
|
||||
url = None
|
||||
for afmt in pd["streamingData"]["adaptiveFormats"]:
|
||||
if itag == afmt["itag"]:
|
||||
url = afmt["url"]
|
||||
break
|
||||
|
||||
if url:
|
||||
ret[stream] = {itag: url}
|
||||
break
|
||||
|
||||
fn = f"{basepath}/{vid_id}.urls.json"
|
||||
with open(fn, "w", encoding="utf-8", errors="replace") as f:
|
||||
f.write(json.dumps(ret, indent=4))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
try:
|
||||
main()
|
||||
except:
|
||||
# raise
|
||||
pass
|
||||
99
bin/prisonparty.sh
Normal file
99
bin/prisonparty.sh
Normal file
@@ -0,0 +1,99 @@
|
||||
#!/bin/bash
|
||||
set -e
|
||||
|
||||
# runs copyparty (or any other program really) in a chroot
|
||||
#
|
||||
# assumption: these directories, and everything within, are owned by root
|
||||
sysdirs=( /bin /lib /lib32 /lib64 /sbin /usr )
|
||||
|
||||
|
||||
# error-handler
|
||||
help() { cat <<'EOF'
|
||||
|
||||
usage:
|
||||
./prisonparty.sh <ROOTDIR> <UID> <GID> [VOLDIR [VOLDIR...]] -- copyparty-sfx.py [...]"
|
||||
|
||||
example:
|
||||
./prisonparty.sh /var/lib/copyparty-jail 1000 1000 /mnt/nas/music -- copyparty-sfx.py -v /mnt/nas/music::rwmd"
|
||||
|
||||
EOF
|
||||
exit 1
|
||||
}
|
||||
|
||||
|
||||
# read arguments
|
||||
trap help EXIT
|
||||
jail="$(realpath "$1")"; shift
|
||||
uid="$1"; shift
|
||||
gid="$1"; shift
|
||||
|
||||
vols=()
|
||||
while true; do
|
||||
v="$1"; shift
|
||||
[ "$v" = -- ] && break # end of volumes
|
||||
[ "$#" -eq 0 ] && break # invalid usage
|
||||
vols+=( "$(realpath "$v")" )
|
||||
done
|
||||
pybin="$1"; shift
|
||||
pybin="$(realpath "$pybin")"
|
||||
cpp="$1"; shift
|
||||
cpp="$(realpath "$cpp")"
|
||||
cppdir="$(dirname "$cpp")"
|
||||
trap - EXIT
|
||||
|
||||
|
||||
# debug/vis
|
||||
echo
|
||||
echo "chroot-dir = $jail"
|
||||
echo "user:group = $uid:$gid"
|
||||
echo " copyparty = $cpp"
|
||||
echo
|
||||
printf '\033[33m%s\033[0m\n' "copyparty can access these folders and all their subdirectories:"
|
||||
for v in "${vols[@]}"; do
|
||||
printf '\033[36m ├─\033[0m %s \033[36m ── added by (You)\033[0m\n' "$v"
|
||||
done
|
||||
printf '\033[36m ├─\033[0m %s \033[36m ── where the copyparty binary is\033[0m\n' "$cppdir"
|
||||
printf '\033[36m ╰─\033[0m %s \033[36m ── the folder you are currently in\033[0m\n' "$PWD"
|
||||
vols+=("$cppdir" "$PWD")
|
||||
echo
|
||||
|
||||
|
||||
# remove any trailing slashes
|
||||
jail="${jail%/}"
|
||||
cppdir="${cppdir%/}"
|
||||
|
||||
|
||||
# bind-mount system directories and volumes
|
||||
printf '%s\n' "${sysdirs[@]}" "${vols[@]}" | LC_ALL=C sort |
|
||||
while IFS= read -r v; do
|
||||
[ -e "$v" ] || {
|
||||
# printf '\033[1;31mfolder does not exist:\033[0m %s\n' "/$v"
|
||||
continue
|
||||
}
|
||||
i1=$(stat -c%D.%i "$v" 2>/dev/null || echo a)
|
||||
i2=$(stat -c%D.%i "$jail$v" 2>/dev/null || echo b)
|
||||
[ $i1 = $i2 ] && continue
|
||||
|
||||
mkdir -p "$jail$v"
|
||||
mount --bind "$v" "$jail$v"
|
||||
done
|
||||
|
||||
|
||||
# create a tmp
|
||||
mkdir -p "$jail/tmp"
|
||||
chmod 777 "$jail/tmp"
|
||||
|
||||
|
||||
# run copyparty
|
||||
/sbin/chroot --userspec=$uid:$gid "$jail" "$pybin" "$cpp" "$@" && rv=0 || rv=$?
|
||||
|
||||
|
||||
# cleanup if not in use
|
||||
lsof "$jail" | grep -qF "$jail" &&
|
||||
echo "chroot is in use, will not cleanup" ||
|
||||
{
|
||||
mount | grep -qF " on $jail" |
|
||||
awk '{sub(/ type .*/,"");sub(/.* on /,"");print}' |
|
||||
LC_ALL=C sort -r | tee /dev/stderr | tr '\n' '\0' | xargs -r0 umount
|
||||
}
|
||||
exit $rv
|
||||
799
bin/up2k.py
Executable file
799
bin/up2k.py
Executable file
@@ -0,0 +1,799 @@
|
||||
#!/usr/bin/env python3
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
"""
|
||||
up2k.py: upload to copyparty
|
||||
2021-10-29, v0.10, ed <irc.rizon.net>, MIT-Licensed
|
||||
https://github.com/9001/copyparty/blob/hovudstraum/bin/up2k.py
|
||||
|
||||
- dependencies: requests
|
||||
- supports python 2.6, 2.7, and 3.3 through 3.10
|
||||
|
||||
- almost zero error-handling
|
||||
- but if something breaks just try again and it'll autoresume
|
||||
"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
import stat
|
||||
import math
|
||||
import time
|
||||
import atexit
|
||||
import signal
|
||||
import base64
|
||||
import hashlib
|
||||
import argparse
|
||||
import platform
|
||||
import threading
|
||||
import requests
|
||||
import datetime
|
||||
|
||||
|
||||
# from copyparty/__init__.py
|
||||
PY2 = sys.version_info[0] == 2
|
||||
if PY2:
|
||||
from Queue import Queue
|
||||
from urllib import unquote
|
||||
from urllib import quote
|
||||
|
||||
sys.dont_write_bytecode = True
|
||||
bytes = str
|
||||
else:
|
||||
from queue import Queue
|
||||
from urllib.parse import unquote_to_bytes as unquote
|
||||
from urllib.parse import quote_from_bytes as quote
|
||||
|
||||
unicode = str
|
||||
|
||||
VT100 = platform.system() != "Windows"
|
||||
|
||||
|
||||
req_ses = requests.Session()
|
||||
|
||||
|
||||
class File(object):
|
||||
"""an up2k upload task; represents a single file"""
|
||||
|
||||
def __init__(self, top, rel, size, lmod):
|
||||
self.top = top # type: bytes
|
||||
self.rel = rel.replace(b"\\", b"/") # type: bytes
|
||||
self.size = size # type: int
|
||||
self.lmod = lmod # type: float
|
||||
|
||||
self.abs = os.path.join(top, rel) # type: bytes
|
||||
self.name = self.rel.split(b"/")[-1].decode("utf-8", "replace") # type: str
|
||||
|
||||
# set by get_hashlist
|
||||
self.cids = [] # type: list[tuple[str, int, int]] # [ hash, ofs, sz ]
|
||||
self.kchunks = {} # type: dict[str, tuple[int, int]] # hash: [ ofs, sz ]
|
||||
|
||||
# set by handshake
|
||||
self.ucids = [] # type: list[str] # chunks which need to be uploaded
|
||||
self.wark = None # type: str
|
||||
self.url = None # type: str
|
||||
|
||||
# set by upload
|
||||
self.up_b = 0 # type: int
|
||||
self.up_c = 0 # type: int
|
||||
|
||||
# m = "size({}) lmod({}) top({}) rel({}) abs({}) name({})\n"
|
||||
# eprint(m.format(self.size, self.lmod, self.top, self.rel, self.abs, self.name))
|
||||
|
||||
|
||||
class FileSlice(object):
|
||||
"""file-like object providing a fixed window into a file"""
|
||||
|
||||
def __init__(self, file, cid):
|
||||
# type: (File, str) -> FileSlice
|
||||
|
||||
self.car, self.len = file.kchunks[cid]
|
||||
self.cdr = self.car + self.len
|
||||
self.ofs = 0 # type: int
|
||||
self.f = open(file.abs, "rb", 512 * 1024)
|
||||
self.f.seek(self.car)
|
||||
|
||||
# https://stackoverflow.com/questions/4359495/what-is-exactly-a-file-like-object-in-python
|
||||
# IOBase, RawIOBase, BufferedIOBase
|
||||
funs = "close closed __enter__ __exit__ __iter__ isatty __next__ readable seekable writable"
|
||||
try:
|
||||
for fun in funs.split():
|
||||
setattr(self, fun, getattr(self.f, fun))
|
||||
except:
|
||||
pass # py27 probably
|
||||
|
||||
def tell(self):
|
||||
return self.ofs
|
||||
|
||||
def seek(self, ofs, wh=0):
|
||||
if wh == 1:
|
||||
ofs = self.ofs + ofs
|
||||
elif wh == 2:
|
||||
ofs = self.len + ofs # provided ofs is negative
|
||||
|
||||
if ofs < 0:
|
||||
ofs = 0
|
||||
elif ofs >= self.len:
|
||||
ofs = self.len - 1
|
||||
|
||||
self.ofs = ofs
|
||||
self.f.seek(self.car + ofs)
|
||||
|
||||
def read(self, sz):
|
||||
sz = min(sz, self.len - self.ofs)
|
||||
ret = self.f.read(sz)
|
||||
self.ofs += len(ret)
|
||||
return ret
|
||||
|
||||
|
||||
_print = print
|
||||
|
||||
|
||||
def eprint(*a, **ka):
|
||||
ka["file"] = sys.stderr
|
||||
ka["end"] = ""
|
||||
if not PY2:
|
||||
ka["flush"] = True
|
||||
|
||||
_print(*a, **ka)
|
||||
if PY2 or not VT100:
|
||||
sys.stderr.flush()
|
||||
|
||||
|
||||
def flushing_print(*a, **ka):
|
||||
_print(*a, **ka)
|
||||
if "flush" not in ka:
|
||||
sys.stdout.flush()
|
||||
|
||||
|
||||
if not VT100:
|
||||
print = flushing_print
|
||||
|
||||
|
||||
def termsize():
|
||||
import os
|
||||
|
||||
env = os.environ
|
||||
|
||||
def ioctl_GWINSZ(fd):
|
||||
try:
|
||||
import fcntl, termios, struct, os
|
||||
|
||||
cr = struct.unpack("hh", fcntl.ioctl(fd, termios.TIOCGWINSZ, "1234"))
|
||||
except:
|
||||
return
|
||||
return cr
|
||||
|
||||
cr = ioctl_GWINSZ(0) or ioctl_GWINSZ(1) or ioctl_GWINSZ(2)
|
||||
if not cr:
|
||||
try:
|
||||
fd = os.open(os.ctermid(), os.O_RDONLY)
|
||||
cr = ioctl_GWINSZ(fd)
|
||||
os.close(fd)
|
||||
except:
|
||||
pass
|
||||
if not cr:
|
||||
try:
|
||||
cr = (env["LINES"], env["COLUMNS"])
|
||||
except:
|
||||
cr = (25, 80)
|
||||
return int(cr[1]), int(cr[0])
|
||||
|
||||
|
||||
class CTermsize(object):
|
||||
def __init__(self):
|
||||
self.ev = False
|
||||
self.margin = None
|
||||
self.g = None
|
||||
self.w, self.h = termsize()
|
||||
|
||||
try:
|
||||
signal.signal(signal.SIGWINCH, self.ev_sig)
|
||||
except:
|
||||
return
|
||||
|
||||
thr = threading.Thread(target=self.worker)
|
||||
thr.daemon = True
|
||||
thr.start()
|
||||
|
||||
def worker(self):
|
||||
while True:
|
||||
time.sleep(0.5)
|
||||
if not self.ev:
|
||||
continue
|
||||
|
||||
self.ev = False
|
||||
self.w, self.h = termsize()
|
||||
|
||||
if self.margin is not None:
|
||||
self.scroll_region(self.margin)
|
||||
|
||||
def ev_sig(self, *a, **ka):
|
||||
self.ev = True
|
||||
|
||||
def scroll_region(self, margin):
|
||||
self.margin = margin
|
||||
if margin is None:
|
||||
self.g = None
|
||||
eprint("\033[s\033[r\033[u")
|
||||
else:
|
||||
self.g = 1 + self.h - margin
|
||||
m = "{0}\033[{1}A".format("\n" * margin, margin)
|
||||
eprint("{0}\033[s\033[1;{1}r\033[u".format(m, self.g - 1))
|
||||
|
||||
|
||||
ss = CTermsize()
|
||||
|
||||
|
||||
def statdir(top):
|
||||
"""non-recursive listing of directory contents, along with stat() info"""
|
||||
if hasattr(os, "scandir"):
|
||||
with os.scandir(top) as dh:
|
||||
for fh in dh:
|
||||
yield [os.path.join(top, fh.name), fh.stat()]
|
||||
else:
|
||||
for name in os.listdir(top):
|
||||
abspath = os.path.join(top, name)
|
||||
yield [abspath, os.stat(abspath)]
|
||||
|
||||
|
||||
def walkdir(top):
|
||||
"""recursive statdir"""
|
||||
for ap, inf in sorted(statdir(top)):
|
||||
if stat.S_ISDIR(inf.st_mode):
|
||||
for x in walkdir(ap):
|
||||
yield x
|
||||
else:
|
||||
yield ap, inf
|
||||
|
||||
|
||||
def walkdirs(tops):
|
||||
"""recursive statdir for a list of tops, yields [top, relpath, stat]"""
|
||||
sep = "{0}".format(os.sep).encode("ascii")
|
||||
for top in tops:
|
||||
if top[-1:] == sep:
|
||||
stop = top.rstrip(sep)
|
||||
else:
|
||||
stop = os.path.dirname(top)
|
||||
|
||||
if os.path.isdir(top):
|
||||
for ap, inf in walkdir(top):
|
||||
yield stop, ap[len(stop) :].lstrip(sep), inf
|
||||
else:
|
||||
d, n = top.rsplit(sep, 1)
|
||||
yield d, n, os.stat(top)
|
||||
|
||||
|
||||
# mostly from copyparty/util.py
|
||||
def quotep(btxt):
|
||||
quot1 = quote(btxt, safe=b"/")
|
||||
if not PY2:
|
||||
quot1 = quot1.encode("ascii")
|
||||
|
||||
return quot1.replace(b" ", b"+")
|
||||
|
||||
|
||||
# from copyparty/util.py
|
||||
def humansize(sz, terse=False):
|
||||
"""picks a sensible unit for the given extent"""
|
||||
for unit in ["B", "KiB", "MiB", "GiB", "TiB"]:
|
||||
if sz < 1024:
|
||||
break
|
||||
|
||||
sz /= 1024.0
|
||||
|
||||
ret = " ".join([str(sz)[:4].rstrip("."), unit])
|
||||
|
||||
if not terse:
|
||||
return ret
|
||||
|
||||
return ret.replace("iB", "").replace(" ", "")
|
||||
|
||||
|
||||
# from copyparty/up2k.py
|
||||
def up2k_chunksize(filesize):
|
||||
"""gives The correct chunksize for up2k hashing"""
|
||||
chunksize = 1024 * 1024
|
||||
stepsize = 512 * 1024
|
||||
while True:
|
||||
for mul in [1, 2]:
|
||||
nchunks = math.ceil(filesize * 1.0 / chunksize)
|
||||
if nchunks <= 256 or chunksize >= 32 * 1024 * 1024:
|
||||
return chunksize
|
||||
|
||||
chunksize += stepsize
|
||||
stepsize *= mul
|
||||
|
||||
|
||||
# mostly from copyparty/up2k.py
|
||||
def get_hashlist(file, pcb):
|
||||
# type: (File, any) -> None
|
||||
"""generates the up2k hashlist from file contents, inserts it into `file`"""
|
||||
|
||||
chunk_sz = up2k_chunksize(file.size)
|
||||
file_rem = file.size
|
||||
file_ofs = 0
|
||||
ret = []
|
||||
with open(file.abs, "rb", 512 * 1024) as f:
|
||||
while file_rem > 0:
|
||||
hashobj = hashlib.sha512()
|
||||
chunk_sz = chunk_rem = min(chunk_sz, file_rem)
|
||||
while chunk_rem > 0:
|
||||
buf = f.read(min(chunk_rem, 64 * 1024))
|
||||
if not buf:
|
||||
raise Exception("EOF at " + str(f.tell()))
|
||||
|
||||
hashobj.update(buf)
|
||||
chunk_rem -= len(buf)
|
||||
|
||||
digest = hashobj.digest()[:33]
|
||||
digest = base64.urlsafe_b64encode(digest).decode("utf-8")
|
||||
|
||||
ret.append([digest, file_ofs, chunk_sz])
|
||||
file_ofs += chunk_sz
|
||||
file_rem -= chunk_sz
|
||||
|
||||
if pcb:
|
||||
pcb(file, file_ofs)
|
||||
|
||||
file.cids = ret
|
||||
file.kchunks = {}
|
||||
for k, v1, v2 in ret:
|
||||
file.kchunks[k] = [v1, v2]
|
||||
|
||||
|
||||
def handshake(req_ses, url, file, pw, search):
|
||||
# type: (requests.Session, str, File, any, bool) -> List[str]
|
||||
"""
|
||||
performs a handshake with the server; reply is:
|
||||
if search, a list of search results
|
||||
otherwise, a list of chunks to upload
|
||||
"""
|
||||
|
||||
req = {
|
||||
"hash": [x[0] for x in file.cids],
|
||||
"name": file.name,
|
||||
"lmod": file.lmod,
|
||||
"size": file.size,
|
||||
}
|
||||
if search:
|
||||
req["srch"] = 1
|
||||
|
||||
headers = {"Content-Type": "text/plain"} # wtf ed
|
||||
if pw:
|
||||
headers["Cookie"] = "=".join(["cppwd", pw])
|
||||
|
||||
if file.url:
|
||||
url = file.url
|
||||
elif b"/" in file.rel:
|
||||
url += quotep(file.rel.rsplit(b"/", 1)[0]).decode("utf-8", "replace")
|
||||
|
||||
while True:
|
||||
try:
|
||||
r = req_ses.post(url, headers=headers, json=req)
|
||||
break
|
||||
except:
|
||||
eprint("handshake failed, retry...\n")
|
||||
time.sleep(1)
|
||||
|
||||
try:
|
||||
r = r.json()
|
||||
except:
|
||||
raise Exception(r.text)
|
||||
|
||||
if search:
|
||||
return r["hits"]
|
||||
|
||||
try:
|
||||
pre, url = url.split("://")
|
||||
pre += "://"
|
||||
except:
|
||||
pre = ""
|
||||
|
||||
file.url = pre + url.split("/")[0] + r["purl"]
|
||||
file.name = r["name"]
|
||||
file.wark = r["wark"]
|
||||
|
||||
return r["hash"]
|
||||
|
||||
|
||||
def upload(req_ses, file, cid, pw):
|
||||
# type: (requests.Session, File, str, any) -> None
|
||||
"""upload one specific chunk, `cid` (a chunk-hash)"""
|
||||
|
||||
headers = {
|
||||
"X-Up2k-Hash": cid,
|
||||
"X-Up2k-Wark": file.wark,
|
||||
"Content-Type": "application/octet-stream",
|
||||
}
|
||||
if pw:
|
||||
headers["Cookie"] = "=".join(["cppwd", pw])
|
||||
|
||||
f = FileSlice(file, cid)
|
||||
try:
|
||||
r = req_ses.post(file.url, headers=headers, data=f)
|
||||
if not r:
|
||||
raise Exception(repr(r))
|
||||
|
||||
_ = r.content
|
||||
finally:
|
||||
f.f.close()
|
||||
|
||||
|
||||
class Daemon(threading.Thread):
|
||||
def __init__(self, *a, **ka):
|
||||
threading.Thread.__init__(self, *a, **ka)
|
||||
self.daemon = True
|
||||
|
||||
|
||||
class Ctl(object):
|
||||
"""
|
||||
this will be the coordinator which runs everything in parallel
|
||||
(hashing, handshakes, uploads) but right now it's p dumb
|
||||
"""
|
||||
|
||||
def __init__(self, ar):
|
||||
self.ar = ar
|
||||
ar.files = [
|
||||
os.path.abspath(os.path.realpath(x.encode("utf-8")))
|
||||
+ (x[-1:] if x[-1:] == os.sep else "").encode("utf-8")
|
||||
for x in ar.files
|
||||
]
|
||||
ar.url = ar.url.rstrip("/") + "/"
|
||||
if "://" not in ar.url:
|
||||
ar.url = "http://" + ar.url
|
||||
|
||||
eprint("\nscanning {0} locations\n".format(len(ar.files)))
|
||||
|
||||
nfiles = 0
|
||||
nbytes = 0
|
||||
for _, _, inf in walkdirs(ar.files):
|
||||
nfiles += 1
|
||||
nbytes += inf.st_size
|
||||
|
||||
eprint("found {0} files, {1}\n\n".format(nfiles, humansize(nbytes)))
|
||||
self.nfiles = nfiles
|
||||
self.nbytes = nbytes
|
||||
|
||||
if ar.td:
|
||||
req_ses.verify = False
|
||||
if ar.te:
|
||||
req_ses.verify = ar.te
|
||||
|
||||
self.filegen = walkdirs(ar.files)
|
||||
if ar.safe:
|
||||
self.safe()
|
||||
else:
|
||||
self.fancy()
|
||||
|
||||
def safe(self):
|
||||
"""minimal basic slow boring fallback codepath"""
|
||||
search = self.ar.s
|
||||
for nf, (top, rel, inf) in enumerate(self.filegen):
|
||||
file = File(top, rel, inf.st_size, inf.st_mtime)
|
||||
upath = file.abs.decode("utf-8", "replace")
|
||||
|
||||
print("{0} {1}\n hash...".format(self.nfiles - nf, upath))
|
||||
get_hashlist(file, None)
|
||||
|
||||
burl = self.ar.url[:8] + self.ar.url[8:].split("/")[0] + "/"
|
||||
while True:
|
||||
print(" hs...")
|
||||
hs = handshake(req_ses, self.ar.url, file, self.ar.a, search)
|
||||
if search:
|
||||
if hs:
|
||||
for hit in hs:
|
||||
print(" found: {0}{1}".format(burl, hit["rp"]))
|
||||
else:
|
||||
print(" NOT found")
|
||||
break
|
||||
|
||||
file.ucids = hs
|
||||
if not hs:
|
||||
break
|
||||
|
||||
print("{0} {1}".format(self.nfiles - nf, upath))
|
||||
ncs = len(hs)
|
||||
for nc, cid in enumerate(hs):
|
||||
print(" {0} up {1}".format(ncs - nc, cid))
|
||||
upload(req_ses, file, cid, self.ar.a)
|
||||
|
||||
print(" ok!")
|
||||
|
||||
def fancy(self):
|
||||
self.hash_f = 0
|
||||
self.hash_c = 0
|
||||
self.hash_b = 0
|
||||
self.up_f = 0
|
||||
self.up_c = 0
|
||||
self.up_b = 0
|
||||
self.up_br = 0
|
||||
self.hasher_busy = 1
|
||||
self.handshaker_busy = 0
|
||||
self.uploader_busy = 0
|
||||
|
||||
self.t0 = time.time()
|
||||
self.t0_up = None
|
||||
self.spd = None
|
||||
|
||||
self.mutex = threading.Lock()
|
||||
self.q_handshake = Queue() # type: Queue[File]
|
||||
self.q_recheck = Queue() # type: Queue[File] # partial upload exists [...]
|
||||
self.q_upload = Queue() # type: Queue[tuple[File, str]]
|
||||
|
||||
self.st_hash = [None, "(idle, starting...)"] # type: tuple[File, int]
|
||||
self.st_up = [None, "(idle, starting...)"] # type: tuple[File, int]
|
||||
if VT100:
|
||||
atexit.register(self.cleanup_vt100)
|
||||
ss.scroll_region(3)
|
||||
|
||||
Daemon(target=self.hasher).start()
|
||||
for _ in range(self.ar.j):
|
||||
Daemon(target=self.handshaker).start()
|
||||
Daemon(target=self.uploader).start()
|
||||
|
||||
idles = 0
|
||||
while idles < 3:
|
||||
time.sleep(0.07)
|
||||
with self.mutex:
|
||||
if (
|
||||
self.q_handshake.empty()
|
||||
and self.q_upload.empty()
|
||||
and not self.hasher_busy
|
||||
and not self.handshaker_busy
|
||||
and not self.uploader_busy
|
||||
):
|
||||
idles += 1
|
||||
else:
|
||||
idles = 0
|
||||
|
||||
if VT100:
|
||||
maxlen = ss.w - len(str(self.nfiles)) - 14
|
||||
txt = "\033[s\033[{0}H".format(ss.g)
|
||||
for y, k, st, f in [
|
||||
[0, "hash", self.st_hash, self.hash_f],
|
||||
[1, "send", self.st_up, self.up_f],
|
||||
]:
|
||||
txt += "\033[{0}H{1}:".format(ss.g + y, k)
|
||||
file, arg = st
|
||||
if not file:
|
||||
txt += " {0}\033[K".format(arg)
|
||||
else:
|
||||
if y:
|
||||
p = 100 * file.up_b / file.size
|
||||
else:
|
||||
p = 100 * arg / file.size
|
||||
|
||||
name = file.abs.decode("utf-8", "replace")[-maxlen:]
|
||||
if "/" in name:
|
||||
name = "\033[36m{0}\033[0m/{1}".format(*name.rsplit("/", 1))
|
||||
|
||||
m = "{0:6.1f}% {1} {2}\033[K"
|
||||
txt += m.format(p, self.nfiles - f, name)
|
||||
|
||||
txt += "\033[{0}H ".format(ss.g + 2)
|
||||
else:
|
||||
txt = " "
|
||||
|
||||
if not self.up_br:
|
||||
spd = self.hash_b / (time.time() - self.t0)
|
||||
eta = (self.nbytes - self.hash_b) / (spd + 1)
|
||||
else:
|
||||
spd = self.up_br / (time.time() - self.t0_up)
|
||||
spd = self.spd = (self.spd or spd) * 0.9 + spd * 0.1
|
||||
eta = (self.nbytes - self.up_b) / (spd + 1)
|
||||
|
||||
spd = humansize(spd)
|
||||
eta = str(datetime.timedelta(seconds=int(eta)))
|
||||
left = humansize(self.nbytes - self.up_b)
|
||||
tail = "\033[K\033[u" if VT100 else "\r"
|
||||
|
||||
m = "eta: {0} @ {1}/s, {2} left".format(eta, spd, left)
|
||||
eprint(txt + "\033]0;{0}\033\\\r{1}{2}".format(m, m, tail))
|
||||
|
||||
def cleanup_vt100(self):
|
||||
ss.scroll_region(None)
|
||||
eprint("\033[J\033]0;\033\\")
|
||||
|
||||
def cb_hasher(self, file, ofs):
|
||||
self.st_hash = [file, ofs]
|
||||
|
||||
def hasher(self):
|
||||
prd = None
|
||||
ls = {}
|
||||
for top, rel, inf in self.filegen:
|
||||
if self.ar.z:
|
||||
rd = os.path.dirname(rel)
|
||||
if prd != rd:
|
||||
prd = rd
|
||||
headers = {}
|
||||
if self.ar.a:
|
||||
headers["Cookie"] = "=".join(["cppwd", self.ar.a])
|
||||
|
||||
ls = {}
|
||||
try:
|
||||
print(" ls ~{0}".format(rd.decode("utf-8", "replace")))
|
||||
r = req_ses.get(
|
||||
self.ar.url.encode("utf-8") + quotep(rd) + b"?ls",
|
||||
headers=headers,
|
||||
)
|
||||
for f in r.json()["files"]:
|
||||
rfn = f["href"].split("?")[0].encode("utf-8", "replace")
|
||||
ls[unquote(rfn)] = f
|
||||
except:
|
||||
print(" mkdir ~{0}".format(rd.decode("utf-8", "replace")))
|
||||
|
||||
rf = ls.get(os.path.basename(rel), None)
|
||||
if rf and rf["sz"] == inf.st_size and abs(rf["ts"] - inf.st_mtime) <= 1:
|
||||
self.nfiles -= 1
|
||||
self.nbytes -= inf.st_size
|
||||
continue
|
||||
|
||||
file = File(top, rel, inf.st_size, inf.st_mtime)
|
||||
while True:
|
||||
with self.mutex:
|
||||
if (
|
||||
self.hash_b - self.up_b < 1024 * 1024 * 128
|
||||
and self.hash_c - self.up_c < 64
|
||||
and (
|
||||
not self.ar.nh
|
||||
or (
|
||||
self.q_upload.empty()
|
||||
and self.q_handshake.empty()
|
||||
and not self.uploader_busy
|
||||
)
|
||||
)
|
||||
):
|
||||
break
|
||||
|
||||
time.sleep(0.05)
|
||||
|
||||
get_hashlist(file, self.cb_hasher)
|
||||
with self.mutex:
|
||||
self.hash_f += 1
|
||||
self.hash_c += len(file.cids)
|
||||
self.hash_b += file.size
|
||||
|
||||
self.q_handshake.put(file)
|
||||
|
||||
self.hasher_busy = 0
|
||||
self.st_hash = [None, "(finished)"]
|
||||
|
||||
def handshaker(self):
|
||||
search = self.ar.s
|
||||
q = self.q_handshake
|
||||
burl = self.ar.url[:8] + self.ar.url[8:].split("/")[0] + "/"
|
||||
while True:
|
||||
file = q.get()
|
||||
if not file:
|
||||
if q == self.q_handshake:
|
||||
q = self.q_recheck
|
||||
q.put(None)
|
||||
continue
|
||||
|
||||
self.q_upload.put(None)
|
||||
break
|
||||
|
||||
with self.mutex:
|
||||
self.handshaker_busy += 1
|
||||
|
||||
upath = file.abs.decode("utf-8", "replace")
|
||||
|
||||
try:
|
||||
hs = handshake(req_ses, self.ar.url, file, self.ar.a, search)
|
||||
except Exception as ex:
|
||||
if q == self.q_handshake and "<pre>partial upload exists" in str(ex):
|
||||
self.q_recheck.put(file)
|
||||
hs = []
|
||||
else:
|
||||
raise
|
||||
|
||||
if search:
|
||||
if hs:
|
||||
for hit in hs:
|
||||
m = "found: {0}\n {1}{2}\n"
|
||||
print(m.format(upath, burl, hit["rp"]), end="")
|
||||
else:
|
||||
print("NOT found: {0}\n".format(upath), end="")
|
||||
|
||||
with self.mutex:
|
||||
self.up_f += 1
|
||||
self.up_c += len(file.cids)
|
||||
self.up_b += file.size
|
||||
self.handshaker_busy -= 1
|
||||
|
||||
continue
|
||||
|
||||
with self.mutex:
|
||||
if not hs:
|
||||
# all chunks done
|
||||
self.up_f += 1
|
||||
self.up_c += len(file.cids) - file.up_c
|
||||
self.up_b += file.size - file.up_b
|
||||
|
||||
if hs and file.up_c:
|
||||
# some chunks failed
|
||||
self.up_c -= len(hs)
|
||||
file.up_c -= len(hs)
|
||||
for cid in hs:
|
||||
sz = file.kchunks[cid][1]
|
||||
self.up_b -= sz
|
||||
file.up_b -= sz
|
||||
|
||||
file.ucids = hs
|
||||
self.handshaker_busy -= 1
|
||||
|
||||
if not hs:
|
||||
kw = "uploaded" if file.up_b else " found"
|
||||
print("{0} {1}".format(kw, upath))
|
||||
for cid in hs:
|
||||
self.q_upload.put([file, cid])
|
||||
|
||||
def uploader(self):
|
||||
while True:
|
||||
task = self.q_upload.get()
|
||||
if not task:
|
||||
self.st_up = [None, "(finished)"]
|
||||
break
|
||||
|
||||
with self.mutex:
|
||||
self.uploader_busy += 1
|
||||
self.t0_up = self.t0_up or time.time()
|
||||
|
||||
file, cid = task
|
||||
try:
|
||||
upload(req_ses, file, cid, self.ar.a)
|
||||
except:
|
||||
eprint("upload failed, retry...\n")
|
||||
pass # handshake will fix it
|
||||
|
||||
with self.mutex:
|
||||
sz = file.kchunks[cid][1]
|
||||
file.ucids = [x for x in file.ucids if x != cid]
|
||||
if not file.ucids:
|
||||
self.q_handshake.put(file)
|
||||
|
||||
self.st_up = [file, cid]
|
||||
file.up_b += sz
|
||||
self.up_b += sz
|
||||
self.up_br += sz
|
||||
file.up_c += 1
|
||||
self.up_c += 1
|
||||
self.uploader_busy -= 1
|
||||
|
||||
|
||||
class APF(argparse.ArgumentDefaultsHelpFormatter, argparse.RawDescriptionHelpFormatter):
|
||||
pass
|
||||
|
||||
|
||||
def main():
|
||||
time.strptime("19970815", "%Y%m%d") # python#7980
|
||||
if not VT100:
|
||||
os.system("rem") # enables colors
|
||||
|
||||
# fmt: off
|
||||
ap = app = argparse.ArgumentParser(formatter_class=APF, epilog="""
|
||||
NOTE:
|
||||
source file/folder selection uses rsync syntax, meaning that:
|
||||
"foo" uploads the entire folder to URL/foo/
|
||||
"foo/" uploads the CONTENTS of the folder into URL/
|
||||
""")
|
||||
|
||||
ap.add_argument("url", type=unicode, help="server url, including destination folder")
|
||||
ap.add_argument("files", type=unicode, nargs="+", help="files and/or folders to process")
|
||||
ap.add_argument("-a", metavar="PASSWORD", help="password")
|
||||
ap.add_argument("-s", action="store_true", help="file-search (disables upload)")
|
||||
ap = app.add_argument_group("performance tweaks")
|
||||
ap.add_argument("-j", type=int, metavar="THREADS", default=4, help="parallel connections")
|
||||
ap.add_argument("-nh", action="store_true", help="disable hashing while uploading")
|
||||
ap.add_argument("--safe", action="store_true", help="use simple fallback approach")
|
||||
ap.add_argument("-z", action="store_true", help="ZOOMIN' (skip uploading files if they exist at the destination with the ~same last-modified timestamp, so same as yolo / turbo with date-chk but even faster)")
|
||||
ap = app.add_argument_group("tls")
|
||||
ap.add_argument("-te", metavar="PEM_FILE", help="certificate to expect/verify")
|
||||
ap.add_argument("-td", action="store_true", help="disable certificate check")
|
||||
# fmt: on
|
||||
|
||||
Ctl(app.parse_args())
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
24
bin/up2k.sh
Executable file → Normal file
24
bin/up2k.sh
Executable file → Normal file
@@ -8,7 +8,7 @@ set -e
|
||||
##
|
||||
## config
|
||||
|
||||
datalen=$((2*1024*1024*1024))
|
||||
datalen=$((128*1024*1024))
|
||||
target=127.0.0.1
|
||||
posturl=/inc
|
||||
passwd=wark
|
||||
@@ -37,10 +37,10 @@ gendata() {
|
||||
# pipe a chunk, get the base64 checksum
|
||||
gethash() {
|
||||
printf $(
|
||||
sha512sum | cut -c-64 |
|
||||
sha512sum | cut -c-66 |
|
||||
sed -r 's/ .*//;s/(..)/\\x\1/g'
|
||||
) |
|
||||
base64 -w0 | cut -c-43 |
|
||||
base64 -w0 | cut -c-44 |
|
||||
tr '+/' '-_'
|
||||
}
|
||||
|
||||
@@ -123,7 +123,7 @@ printf '\033[36m'
|
||||
{
|
||||
{
|
||||
cat <<EOF
|
||||
POST $posturl/handshake.php HTTP/1.1
|
||||
POST $posturl/ HTTP/1.1
|
||||
Connection: Close
|
||||
Cookie: cppwd=$passwd
|
||||
Content-Type: text/plain;charset=UTF-8
|
||||
@@ -145,14 +145,16 @@ printf '\033[0m\nwark: %s\n' $wark
|
||||
##
|
||||
## wait for signal to continue
|
||||
|
||||
w8=/dev/shm/$salt.w8
|
||||
touch $w8
|
||||
true || {
|
||||
w8=/dev/shm/$salt.w8
|
||||
touch $w8
|
||||
|
||||
echo "ready; rm -f $w8"
|
||||
echo "ready; rm -f $w8"
|
||||
|
||||
while [ -e $w8 ]; do
|
||||
sleep 0.2
|
||||
done
|
||||
while [ -e $w8 ]; do
|
||||
sleep 0.2
|
||||
done
|
||||
}
|
||||
|
||||
|
||||
##
|
||||
@@ -175,7 +177,7 @@ while [ $remains -gt 0 ]; do
|
||||
|
||||
{
|
||||
cat <<EOF
|
||||
POST $posturl/chunkpit.php HTTP/1.1
|
||||
POST $posturl/ HTTP/1.1
|
||||
Connection: Keep-Alive
|
||||
Cookie: cppwd=$passwd
|
||||
Content-Type: application/octet-stream
|
||||
|
||||
@@ -29,7 +29,8 @@ however if your copyparty is behind a reverse-proxy, you may want to use [`share
|
||||
|
||||
# OS integration
|
||||
init-scripts to start copyparty as a service
|
||||
* [`systemd/copyparty.service`](systemd/copyparty.service)
|
||||
* [`systemd/copyparty.service`](systemd/copyparty.service) runs the sfx normally
|
||||
* [`systemd/prisonparty.service`](systemd/prisonparty.service) runs the sfx in a chroot
|
||||
* [`openrc/copyparty`](openrc/copyparty)
|
||||
|
||||
# Reverse-proxy
|
||||
|
||||
@@ -1,13 +1,14 @@
|
||||
#!/bin/bash
|
||||
set -e
|
||||
|
||||
# ca-name and server-name
|
||||
# ca-name and server-fqdn
|
||||
ca_name="$1"
|
||||
srv_name="$2"
|
||||
srv_fqdn="$2"
|
||||
|
||||
[ -z "$srv_name" ] && {
|
||||
[ -z "$srv_fqdn" ] && {
|
||||
echo "need arg 1: ca name"
|
||||
echo "need arg 2: server name"
|
||||
echo "need arg 2: server fqdn"
|
||||
echo "optional arg 3: if set, write cert into copyparty cfg"
|
||||
exit 1
|
||||
}
|
||||
|
||||
@@ -31,15 +32,15 @@ EOF
|
||||
gen_srv() {
|
||||
(tee /dev/stderr <<EOF
|
||||
{"key": {"algo":"rsa", "size":4096},
|
||||
"names": [{"O":"$ca_name - $srv_name"}]}
|
||||
"names": [{"O":"$ca_name - $srv_fqdn"}]}
|
||||
EOF
|
||||
)|
|
||||
cfssl gencert -ca ca.pem -ca-key ca.key \
|
||||
-profile=www -hostname="$srv_name.$ca_name" - |
|
||||
cfssljson -bare "$srv_name"
|
||||
-profile=www -hostname="$srv_fqdn" - |
|
||||
cfssljson -bare "$srv_fqdn"
|
||||
|
||||
mv "$srv_name-key.pem" "$srv_name.key"
|
||||
rm "$srv_name.csr"
|
||||
mv "$srv_fqdn-key.pem" "$srv_fqdn.key"
|
||||
rm "$srv_fqdn.csr"
|
||||
}
|
||||
|
||||
|
||||
@@ -57,13 +58,13 @@ show() {
|
||||
awk '!o; {o=0} /[0-9a-f:]{16}/{o=1}'
|
||||
}
|
||||
show ca.pem
|
||||
show "$srv_name.pem"
|
||||
show "$srv_fqdn.pem"
|
||||
|
||||
|
||||
# write cert into copyparty config
|
||||
[ -z "$3" ] || {
|
||||
mkdir -p ~/.config/copyparty
|
||||
cat "$srv_name".{key,pem} ca.pem >~/.config/copyparty/cert.pem
|
||||
cat "$srv_fqdn".{key,pem} ca.pem >~/.config/copyparty/cert.pem
|
||||
}
|
||||
|
||||
|
||||
|
||||
@@ -8,11 +8,11 @@
|
||||
#
|
||||
# you may want to:
|
||||
# change '/usr/bin/python' to another interpreter
|
||||
# change '/mnt::a' to another location or permission-set
|
||||
# change '/mnt::rw' to another location or permission-set
|
||||
|
||||
name="$SVCNAME"
|
||||
command_background=true
|
||||
pidfile="/var/run/$SVCNAME.pid"
|
||||
|
||||
command="/usr/bin/python /usr/local/bin/copyparty-sfx.py"
|
||||
command_args="-q -v /mnt::a"
|
||||
command_args="-q -v /mnt::rw"
|
||||
|
||||
@@ -3,10 +3,16 @@
|
||||
#
|
||||
# installation:
|
||||
# cp -pv copyparty.service /etc/systemd/system && systemctl enable --now copyparty
|
||||
# restorecon -vr /etc/systemd/system/copyparty.service
|
||||
# firewall-cmd --permanent --add-port={80,443,3923}/tcp
|
||||
# firewall-cmd --reload
|
||||
#
|
||||
# you may want to:
|
||||
# change '/usr/bin/python' to another interpreter
|
||||
# change '/mnt::a' to another location or permission-set
|
||||
# change '/usr/bin/python3' to another interpreter
|
||||
# change '/mnt::rw' to another location or permission-set
|
||||
# remove '-p 80,443,3923' to only listen on port 3923
|
||||
# add '-i 127.0.0.1' to only allow local connections
|
||||
# add '--use-fpool' if uploading into nfs locations
|
||||
#
|
||||
# with `Type=notify`, copyparty will signal systemd when it is ready to
|
||||
# accept connections; correctly delaying units depending on copyparty.
|
||||
@@ -15,8 +21,11 @@
|
||||
# https://user-images.githubusercontent.com/241032/126040249-cb535cc7-c599-4931-a796-a5d9af691bad.png
|
||||
#
|
||||
# enable line-buffering for realtime logging (slight performance cost):
|
||||
# modify ExecStart and prefix it with `/bin/stdbuf -oL` like so:
|
||||
# ExecStart=/bin/stdbuf -oL /usr/bin/python3 [...]
|
||||
# modify ExecStart and prefix it with `/usr/bin/stdbuf -oL` like so:
|
||||
# ExecStart=/usr/bin/stdbuf -oL /usr/bin/python3 [...]
|
||||
# but some systemd versions require this instead (higher performance cost):
|
||||
# inside the [Service] block, add the following line:
|
||||
# Environment=PYTHONUNBUFFERED=x
|
||||
|
||||
[Unit]
|
||||
Description=copyparty file server
|
||||
@@ -24,7 +33,7 @@ Description=copyparty file server
|
||||
[Service]
|
||||
Type=notify
|
||||
SyslogIdentifier=copyparty
|
||||
ExecStart=/usr/bin/python3 /usr/local/bin/copyparty-sfx.py -q -v /mnt::a
|
||||
ExecStart=/usr/bin/python3 /usr/local/bin/copyparty-sfx.py -q -p 80,443,3923 -v /mnt::rw
|
||||
ExecStartPre=/bin/bash -c 'mkdir -p /run/tmpfiles.d/ && echo "x /tmp/pe-copyparty*" > /run/tmpfiles.d/copyparty.conf'
|
||||
|
||||
[Install]
|
||||
|
||||
27
contrib/systemd/prisonparty.service
Normal file
27
contrib/systemd/prisonparty.service
Normal file
@@ -0,0 +1,27 @@
|
||||
# this will start `/usr/local/bin/copyparty-sfx.py`
|
||||
# in a chroot, preventing accidental access elsewhere
|
||||
# and share '/mnt' with anonymous read+write
|
||||
#
|
||||
# installation:
|
||||
# 1) put copyparty-sfx.py and prisonparty.sh in /usr/local/bin
|
||||
# 2) cp -pv prisonparty.service /etc/systemd/system && systemctl enable --now prisonparty
|
||||
#
|
||||
# you may want to:
|
||||
# change '/mnt::rw' to another location or permission-set
|
||||
# (remember to change the '/mnt' chroot arg too)
|
||||
#
|
||||
# enable line-buffering for realtime logging (slight performance cost):
|
||||
# inside the [Service] block, add the following line:
|
||||
# Environment=PYTHONUNBUFFERED=x
|
||||
|
||||
[Unit]
|
||||
Description=copyparty file server
|
||||
|
||||
[Service]
|
||||
SyslogIdentifier=prisonparty
|
||||
WorkingDirectory=/usr/local/bin
|
||||
ExecStart=/bin/bash /usr/local/bin/prisonparty.sh /var/lib/copyparty-jail 1000 1000 /mnt -- \
|
||||
/usr/bin/python3 /usr/local/bin/copyparty-sfx.py -q -v /mnt::rw
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
||||
@@ -25,6 +25,28 @@ ANYWIN = WINDOWS or sys.platform in ["msys"]
|
||||
MACOS = platform.system() == "Darwin"
|
||||
|
||||
|
||||
def get_unix_home():
|
||||
try:
|
||||
v = os.environ["XDG_CONFIG_HOME"]
|
||||
if not v:
|
||||
raise Exception()
|
||||
ret = os.path.normpath(v)
|
||||
os.listdir(ret)
|
||||
return ret
|
||||
except:
|
||||
pass
|
||||
|
||||
try:
|
||||
v = os.path.expanduser("~/.config")
|
||||
if v.startswith("~"):
|
||||
raise Exception()
|
||||
ret = os.path.normpath(v)
|
||||
os.listdir(ret)
|
||||
return ret
|
||||
except:
|
||||
return "/tmp"
|
||||
|
||||
|
||||
class EnvParams(object):
|
||||
def __init__(self):
|
||||
self.t0 = time.time()
|
||||
@@ -37,10 +59,7 @@ class EnvParams(object):
|
||||
elif sys.platform == "darwin":
|
||||
self.cfg = os.path.expanduser("~/Library/Preferences/copyparty")
|
||||
else:
|
||||
self.cfg = os.path.normpath(
|
||||
os.getenv("XDG_CONFIG_HOME", os.path.expanduser("~/.config"))
|
||||
+ "/copyparty"
|
||||
)
|
||||
self.cfg = get_unix_home() + "/copyparty"
|
||||
|
||||
self.cfg = self.cfg.replace("\\", "/")
|
||||
try:
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
#!/usr/bin/env python
|
||||
#!/usr/bin/env python3
|
||||
# coding: utf-8
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
@@ -20,10 +20,11 @@ import threading
|
||||
import traceback
|
||||
from textwrap import dedent
|
||||
|
||||
from .__init__ import E, WINDOWS, VT100, PY2, unicode
|
||||
from .__init__ import E, WINDOWS, ANYWIN, VT100, PY2, unicode
|
||||
from .__version__ import S_VERSION, S_BUILD_DT, CODENAME
|
||||
from .svchub import SvcHub
|
||||
from .util import py_desc, align_tab, IMPLICATIONS
|
||||
from .util import py_desc, align_tab, IMPLICATIONS, ansi_re
|
||||
from .authsrv import re_vol
|
||||
|
||||
HAVE_SSL = True
|
||||
try:
|
||||
@@ -66,8 +67,12 @@ class Dodge11874(RiceFormatter):
|
||||
def lprint(*a, **ka):
|
||||
global printed
|
||||
|
||||
printed += " ".join(unicode(x) for x in a) + ka.get("end", "\n")
|
||||
print(*a, **ka)
|
||||
txt = " ".join(unicode(x) for x in a) + ka.get("end", "\n")
|
||||
printed += txt
|
||||
if not VT100:
|
||||
txt = ansi_re.sub("", txt)
|
||||
|
||||
print(txt, **ka)
|
||||
|
||||
|
||||
def warn(msg):
|
||||
@@ -99,7 +104,7 @@ def ensure_cert():
|
||||
cert_insec = os.path.join(E.mod, "res/insecure.pem")
|
||||
cert_cfg = os.path.join(E.cfg, "cert.pem")
|
||||
if not os.path.exists(cert_cfg):
|
||||
shutil.copy2(cert_insec, cert_cfg)
|
||||
shutil.copy(cert_insec, cert_cfg)
|
||||
|
||||
try:
|
||||
if filecmp.cmp(cert_cfg, cert_insec):
|
||||
@@ -196,24 +201,34 @@ def run_argparse(argv, formatter):
|
||||
formatter_class=formatter,
|
||||
prog="copyparty",
|
||||
description="http file sharing hub v{} ({})".format(S_VERSION, S_BUILD_DT),
|
||||
epilog=dedent(
|
||||
"""
|
||||
)
|
||||
|
||||
try:
|
||||
fk_salt = unicode(os.path.getmtime(os.path.join(E.cfg, "cert.pem")))
|
||||
except:
|
||||
fk_salt = "hunter2"
|
||||
|
||||
cores = os.cpu_count() if hasattr(os, "cpu_count") else 4
|
||||
|
||||
sects = [
|
||||
[
|
||||
"accounts",
|
||||
"accounts and volumes",
|
||||
dedent(
|
||||
"""
|
||||
-a takes username:password,
|
||||
-v takes src:dst:perm1:perm2:permN:cflag1:cflag2:cflagN:...
|
||||
where "perm" is "accesslevels,username1,username2,..."
|
||||
and "cflag" is config flags to set on this volume
|
||||
-v takes src:dst:perm1:perm2:permN:volflag1:volflag2:volflagN:...
|
||||
where "perm" is "permissions,username1,username2,..."
|
||||
and "volflag" is config flags to set on this volume
|
||||
|
||||
list of accesslevels:
|
||||
list of permissions:
|
||||
"r" (read): list folder contents, download files
|
||||
"w" (write): upload files; need "r" to see the uploads
|
||||
"m" (move): move files and folders; need "w" at destination
|
||||
"d" (delete): permanently delete files and folders
|
||||
"g" (get): download files, but cannot see folder contents
|
||||
|
||||
list of cflags:
|
||||
"c,nodupe" rejects existing files (instead of symlinking them)
|
||||
"c,e2d" sets -e2d (all -e2* args can be set using ce2* cflags)
|
||||
"c,d2t" disables metadata collection, overrides -e2t*
|
||||
"c,d2d" disables all database stuff, overrides -e2*
|
||||
too many volflags to list here, see the other sections
|
||||
|
||||
example:\033[35m
|
||||
-a ed:hunter2 -v .::r:rw,ed -v ../inc:dump:w:rw,ed:c,nodupe \033[36m
|
||||
@@ -230,35 +245,97 @@ def run_argparse(argv, formatter):
|
||||
|
||||
consider the config file for more flexible account/volume management,
|
||||
including dynamic reload at runtime (and being more readable w)
|
||||
"""
|
||||
),
|
||||
],
|
||||
[
|
||||
"flags",
|
||||
"list of volflags",
|
||||
dedent(
|
||||
"""
|
||||
volflags are appended to volume definitions, for example,
|
||||
to create a write-only volume with the \033[33mnodupe\033[0m and \033[32mnosub\033[0m flags:
|
||||
\033[35m-v /mnt/inc:/inc:w\033[33m:c,nodupe\033[32m:c,nosub
|
||||
|
||||
\033[0muploads, general:
|
||||
\033[36mnodupe\033[35m rejects existing files (instead of symlinking them)
|
||||
\033[36mnosub\033[35m forces all uploads into the top folder of the vfs
|
||||
\033[36mgz\033[35m allows server-side gzip of uploads with ?gz (also c,xz)
|
||||
\033[36mpk\033[35m forces server-side compression, optional arg: xz,9
|
||||
|
||||
\033[0mupload rules:
|
||||
\033[36mmaxn=250,600\033[35m max 250 uploads over 15min
|
||||
\033[36mmaxb=1g,300\033[35m max 1 GiB over 5min (suffixes: b, k, m, g)
|
||||
\033[36msz=1k-3m\033[35m allow filesizes between 1 KiB and 3MiB
|
||||
|
||||
\033[0mupload rotation:
|
||||
(moves all uploads into the specified folder structure)
|
||||
\033[36mrotn=100,3\033[35m 3 levels of subfolders with 100 entries in each
|
||||
\033[36mrotf=%Y-%m/%d-%H\033[35m date-formatted organizing
|
||||
\033[36mlifetime=3600\033[35m uploads are deleted after 1 hour
|
||||
|
||||
\033[0mdatabase, general:
|
||||
\033[36me2d\033[35m sets -e2d (all -e2* args can be set using ce2* volflags)
|
||||
\033[36md2t\033[35m disables metadata collection, overrides -e2t*
|
||||
\033[36md2d\033[35m disables all database stuff, overrides -e2*
|
||||
\033[36mnohash=\\.iso$\033[35m skips hashing file contents if path matches *.iso
|
||||
\033[36mnoidx=\\.iso$\033[35m fully ignores the contents at paths matching *.iso
|
||||
\033[36mhist=/tmp/cdb\033[35m puts thumbnails and indexes at that location
|
||||
\033[36mscan=60\033[35m scan for new files every 60sec, same as --re-maxage
|
||||
|
||||
\033[0mdatabase, audio tags:
|
||||
"mte", "mth", "mtp", "mtm" all work the same as -mte, -mth, ...
|
||||
\033[36mmtp=.bpm=f,audio-bpm.py\033[35m uses the "audio-bpm.py" program to
|
||||
generate ".bpm" tags from uploads (f = overwrite tags)
|
||||
\033[36mmtp=ahash,vhash=media-hash.py\033[35m collects two tags at once
|
||||
|
||||
\033[0mothers:
|
||||
\033[36mfk=8\033[35m generates per-file accesskeys,
|
||||
which will then be required at the "g" permission
|
||||
\033[0m"""
|
||||
),
|
||||
],
|
||||
[
|
||||
"urlform",
|
||||
"",
|
||||
dedent(
|
||||
"""
|
||||
values for --urlform:
|
||||
"stash" dumps the data to file and returns length + checksum
|
||||
"save,get" dumps to file and returns the page like a GET
|
||||
"print,get" prints the data in the log and returns GET
|
||||
\033[36mstash\033[35m dumps the data to file and returns length + checksum
|
||||
\033[36msave,get\033[35m dumps to file and returns the page like a GET
|
||||
\033[36mprint,get\033[35m prints the data in the log and returns GET
|
||||
(leave out the ",get" to return an error instead)
|
||||
|
||||
values for --ls:
|
||||
"USR" is a user to browse as; * is anonymous, ** is all users
|
||||
"VOL" is a single volume to scan, default is * (all vols)
|
||||
"FLAG" is flags;
|
||||
"v" in addition to realpaths, print usernames and vpaths
|
||||
"ln" only prints symlinks leaving the volume mountpoint
|
||||
"p" exits 1 if any such symlinks are found
|
||||
"r" resumes startup after the listing
|
||||
"""
|
||||
),
|
||||
],
|
||||
[
|
||||
"ls",
|
||||
"volume inspection",
|
||||
dedent(
|
||||
"""
|
||||
\033[35m--ls USR,VOL,FLAGS
|
||||
\033[36mUSR\033[0m is a user to browse as; * is anonymous, ** is all users
|
||||
\033[36mVOL\033[0m is a single volume to scan, default is * (all vols)
|
||||
\033[36mFLAG\033[0m is flags;
|
||||
\033[36mv\033[0m in addition to realpaths, print usernames and vpaths
|
||||
\033[36mln\033[0m only prints symlinks leaving the volume mountpoint
|
||||
\033[36mp\033[0m exits 1 if any such symlinks are found
|
||||
\033[36mr\033[0m resumes startup after the listing
|
||||
examples:
|
||||
--ls '**' # list all files which are possible to read
|
||||
--ls '**,*,ln' # check for dangerous symlinks
|
||||
--ls '**,*,ln,p,r' # check, then start normally if safe
|
||||
\033[0m
|
||||
"""
|
||||
),
|
||||
)
|
||||
),
|
||||
],
|
||||
]
|
||||
|
||||
# fmt: off
|
||||
u = unicode
|
||||
ap2 = ap.add_argument_group('general options')
|
||||
ap2.add_argument("-c", metavar="PATH", type=u, action="append", help="add config file")
|
||||
ap2.add_argument("-nc", metavar="NUM", type=int, default=64, help="max num clients")
|
||||
ap2.add_argument("-j", metavar="CORES", type=int, default=1, help="max num cpu cores")
|
||||
ap2.add_argument("-j", metavar="CORES", type=int, default=1, help="max num cpu cores, 0=all")
|
||||
ap2.add_argument("-a", metavar="ACCT", type=u, action="append", help="add account, USER:PASS; example [ed:wark")
|
||||
ap2.add_argument("-v", metavar="VOL", type=u, action="append", help="add volume, SRC:DST:FLAG; example [.::r], [/mnt/nas/music:/music:r:aed")
|
||||
ap2.add_argument("-ed", action="store_true", help="enable ?dots")
|
||||
@@ -270,6 +347,9 @@ def run_argparse(argv, formatter):
|
||||
ap2.add_argument("--dotpart", action="store_true", help="dotfile incomplete uploads")
|
||||
ap2.add_argument("--sparse", metavar="MiB", type=int, default=4, help="up2k min.size threshold (mswin-only)")
|
||||
ap2.add_argument("--unpost", metavar="SEC", type=int, default=3600*12, help="grace period where uploads can be deleted by the uploader, even without delete permissions; 0=disabled")
|
||||
ap2.add_argument("--no-fpool", action="store_true", help="disable file-handle pooling -- instead, repeatedly close and reopen files during upload")
|
||||
ap2.add_argument("--use-fpool", action="store_true", help="force file-handle pooling, even if copyparty thinks you're better off without")
|
||||
ap2.add_argument("--no-symlink", action="store_true", help="duplicate file contents instead")
|
||||
|
||||
ap2 = ap.add_argument_group('network options')
|
||||
ap2.add_argument("-i", metavar="IP", type=u, default="0.0.0.0", help="ip to bind (comma-sep.)")
|
||||
@@ -291,10 +371,21 @@ def run_argparse(argv, formatter):
|
||||
ap2.add_argument("-nih", action="store_true", help="no info hostname")
|
||||
ap2.add_argument("-nid", action="store_true", help="no info disk-usage")
|
||||
ap2.add_argument("--no-zip", action="store_true", help="disable download as zip/tar")
|
||||
ap2.add_argument("--no-lifetime", action="store_true", help="disable automatic deletion of uploads after a certain time (lifetime volflag)")
|
||||
|
||||
ap2 = ap.add_argument_group('safety options')
|
||||
ap2.add_argument("--ls", metavar="U[,V[,F]]", type=u, help="scan all volumes; arguments USER,VOL,FLAGS; example [**,*,ln,p,r]")
|
||||
ap2.add_argument("--salt", type=u, default="hunter2", help="up2k file-hash salt")
|
||||
ap2.add_argument("--fk-salt", metavar="SALT", type=u, default=fk_salt, help="per-file accesskey salt")
|
||||
ap2.add_argument("--no-dot-mv", action="store_true", help="disallow moving dotfiles; makes it impossible to move folders containing dotfiles")
|
||||
ap2.add_argument("--no-dot-ren", action="store_true", help="disallow renaming dotfiles; makes it impossible to make something a dotfile")
|
||||
ap2.add_argument("--no-logues", action="store_true", help="disable rendering .prologue/.epilogue.html into directory listings")
|
||||
ap2.add_argument("--no-readme", action="store_true", help="disable rendering readme.md into directory listings")
|
||||
ap2.add_argument("--vague-403", action="store_true", help="send 404 instead of 403 (security through ambiguity, very enterprise)")
|
||||
|
||||
ap2 = ap.add_argument_group('yolo options')
|
||||
ap2.add_argument("--ign-ebind", action="store_true", help="continue running even if it's impossible to listen on some of the requested endpoints")
|
||||
ap2.add_argument("--ign-ebind-all", action="store_true", help="continue running even if it's impossible to receive connections at all")
|
||||
|
||||
ap2 = ap.add_argument_group('logging options')
|
||||
ap2.add_argument("-q", action="store_true", help="quiet")
|
||||
@@ -313,6 +404,7 @@ def run_argparse(argv, formatter):
|
||||
ap2.add_argument("--no-thumb", action="store_true", help="disable all thumbnails")
|
||||
ap2.add_argument("--no-vthumb", action="store_true", help="disable video thumbnails")
|
||||
ap2.add_argument("--th-size", metavar="WxH", default="320x256", help="thumbnail res")
|
||||
ap2.add_argument("--th-mt", metavar="CORES", type=int, default=cores, help="num cpu cores to use for generating thumbnails")
|
||||
ap2.add_argument("--th-no-crop", action="store_true", help="dynamic height; show full image")
|
||||
ap2.add_argument("--th-no-jpg", action="store_true", help="disable jpg output")
|
||||
ap2.add_argument("--th-no-webp", action="store_true", help="disable webp output")
|
||||
@@ -326,10 +418,10 @@ def run_argparse(argv, formatter):
|
||||
ap2.add_argument("-e2d", action="store_true", help="enable up2k database")
|
||||
ap2.add_argument("-e2ds", action="store_true", help="enable up2k db-scanner, sets -e2d")
|
||||
ap2.add_argument("-e2dsa", action="store_true", help="scan all folders (for search), sets -e2ds")
|
||||
ap2.add_argument("--hist", metavar="PATH", type=u, help="where to store volume state")
|
||||
ap2.add_argument("--no-hash", action="store_true", help="disable hashing during e2ds folder scans")
|
||||
ap2.add_argument("--re-int", metavar="SEC", type=int, default=30, help="disk rescan check interval")
|
||||
ap2.add_argument("--re-maxage", metavar="SEC", type=int, default=0, help="disk rescan volume interval (0=off)")
|
||||
ap2.add_argument("--hist", metavar="PATH", type=u, help="where to store volume data (db, thumbs)")
|
||||
ap2.add_argument("--no-hash", metavar="PTN", type=u, help="regex: disable hashing of matching paths during e2ds folder scans")
|
||||
ap2.add_argument("--no-idx", metavar="PTN", type=u, help="regex: disable indexing of matching paths during e2ds folder scans")
|
||||
ap2.add_argument("--re-maxage", metavar="SEC", type=int, default=0, help="disk rescan volume interval, 0=off, can be set per-volume with the 'scan' volflag")
|
||||
ap2.add_argument("--srch-time", metavar="SEC", type=int, default=30, help="search deadline")
|
||||
|
||||
ap2 = ap.add_argument_group('metadata db options')
|
||||
@@ -337,14 +429,17 @@ def run_argparse(argv, formatter):
|
||||
ap2.add_argument("-e2ts", action="store_true", help="enable metadata scanner, sets -e2t")
|
||||
ap2.add_argument("-e2tsr", action="store_true", help="rescan all metadata, sets -e2ts")
|
||||
ap2.add_argument("--no-mutagen", action="store_true", help="use FFprobe for tags instead")
|
||||
ap2.add_argument("--no-mtag-mt", action="store_true", help="disable tag-read parallelism")
|
||||
ap2.add_argument("--no-mtag-ff", action="store_true", help="never use FFprobe as tag reader")
|
||||
ap2.add_argument("--mtag-mt", metavar="CORES", type=int, default=cores, help="num cpu cores to use for tag scanning")
|
||||
ap2.add_argument("-mtm", metavar="M=t,t,t", type=u, action="append", help="add/replace metadata mapping")
|
||||
ap2.add_argument("-mte", metavar="M,M,M", type=u, help="tags to index/display (comma-sep.)",
|
||||
default="circle,album,.tn,artist,title,.bpm,key,.dur,.q,.vq,.aq,ac,vc,res,.fps")
|
||||
default="circle,album,.tn,artist,title,.bpm,key,.dur,.q,.vq,.aq,vc,ac,res,.fps,ahash,vhash")
|
||||
ap2.add_argument("-mth", metavar="M,M,M", type=u, help="tags to hide by default (comma-sep.)",
|
||||
default=".vq,.aq,vc,ac,res,.fps")
|
||||
ap2.add_argument("-mtp", metavar="M=[f,]bin", type=u, action="append", help="read tag M using bin")
|
||||
|
||||
ap2 = ap.add_argument_group('appearance options')
|
||||
ap2 = ap.add_argument_group('ui options')
|
||||
ap2.add_argument("--js-browser", metavar="L", type=u, help="URL to additional JS to include")
|
||||
ap2.add_argument("--css-browser", metavar="L", type=u, help="URL to additional CSS to include")
|
||||
|
||||
ap2 = ap.add_argument_group('debug options')
|
||||
@@ -354,10 +449,22 @@ def run_argparse(argv, formatter):
|
||||
ap2.add_argument("--no-htp", action="store_true", help="disable httpserver threadpool, create threads as-needed instead")
|
||||
ap2.add_argument("--stackmon", metavar="P,S", type=u, help="write stacktrace to Path every S second")
|
||||
ap2.add_argument("--log-thrs", metavar="SEC", type=float, help="list active threads every SEC")
|
||||
|
||||
return ap.parse_args(args=argv[1:])
|
||||
# fmt: on
|
||||
|
||||
ap2 = ap.add_argument_group("help sections")
|
||||
for k, h, _ in sects:
|
||||
ap2.add_argument("--help-" + k, action="store_true", help=h)
|
||||
|
||||
ret = ap.parse_args(args=argv[1:])
|
||||
for k, h, t in sects:
|
||||
k2 = "help_" + k.replace("-", "_")
|
||||
if vars(ret)[k2]:
|
||||
lprint("# {} help page".format(k))
|
||||
lprint(t + "\033[0m")
|
||||
sys.exit(0)
|
||||
|
||||
return ret
|
||||
|
||||
|
||||
def main(argv=None):
|
||||
time.strptime("19970815", "%Y%m%d") # python#7980
|
||||
@@ -388,6 +495,12 @@ def main(argv=None):
|
||||
argv[idx] = nk
|
||||
time.sleep(2)
|
||||
|
||||
try:
|
||||
if len(argv) == 1 and (ANYWIN or not os.geteuid()):
|
||||
argv.extend(["-p80,443,3923", "--ign-ebind"])
|
||||
except:
|
||||
pass
|
||||
|
||||
try:
|
||||
al = run_argparse(argv, RiceFormatter)
|
||||
except AssertionError:
|
||||
@@ -396,14 +509,20 @@ def main(argv=None):
|
||||
nstrs = []
|
||||
anymod = False
|
||||
for ostr in al.v or []:
|
||||
m = re_vol.match(ostr)
|
||||
if not m:
|
||||
# not our problem
|
||||
nstrs.append(ostr)
|
||||
continue
|
||||
|
||||
src, dst, perms = m.groups()
|
||||
na = [src, dst]
|
||||
mod = False
|
||||
oa = ostr.split(":")
|
||||
na = oa[:2]
|
||||
for opt in oa[2:]:
|
||||
for opt in perms.split(":"):
|
||||
if re.match("c[^,]", opt):
|
||||
mod = True
|
||||
na.append("c," + opt[1:])
|
||||
elif re.sub("^[rwmd]*", "", opt) and "," not in opt:
|
||||
elif re.sub("^[rwmdg]*", "", opt) and "," not in opt:
|
||||
mod = True
|
||||
perm = opt[0]
|
||||
if perm == "a":
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
# coding: utf-8
|
||||
|
||||
VERSION = (0, 12, 7)
|
||||
CODENAME = "fil\033[33med"
|
||||
BUILD_DT = (2021, 7, 31)
|
||||
VERSION = (1, 0, 14)
|
||||
CODENAME = "sufficient"
|
||||
BUILD_DT = (2021, 10, 30)
|
||||
|
||||
S_VERSION = ".".join(map(str, VERSION))
|
||||
S_BUILD_DT = "{0:04d}-{1:02d}-{2:02d}".format(*BUILD_DT)
|
||||
|
||||
@@ -5,31 +5,196 @@ import re
|
||||
import os
|
||||
import sys
|
||||
import stat
|
||||
import time
|
||||
import base64
|
||||
import hashlib
|
||||
import threading
|
||||
from datetime import datetime
|
||||
|
||||
from .__init__ import WINDOWS
|
||||
from .util import IMPLICATIONS, uncyg, undot, absreal, Pebkac, fsdec, fsenc, statdir
|
||||
from .util import (
|
||||
IMPLICATIONS,
|
||||
uncyg,
|
||||
undot,
|
||||
unhumanize,
|
||||
absreal,
|
||||
Pebkac,
|
||||
fsenc,
|
||||
statdir,
|
||||
)
|
||||
from .bos import bos
|
||||
|
||||
|
||||
LEELOO_DALLAS = "leeloo_dallas"
|
||||
|
||||
|
||||
class AXS(object):
|
||||
def __init__(self, uread=None, uwrite=None, umove=None, udel=None):
|
||||
def __init__(self, uread=None, uwrite=None, umove=None, udel=None, uget=None):
|
||||
self.uread = {} if uread is None else {k: 1 for k in uread}
|
||||
self.uwrite = {} if uwrite is None else {k: 1 for k in uwrite}
|
||||
self.umove = {} if umove is None else {k: 1 for k in umove}
|
||||
self.udel = {} if udel is None else {k: 1 for k in udel}
|
||||
self.uget = {} if uget is None else {k: 1 for k in uget}
|
||||
|
||||
def __repr__(self):
|
||||
return "AXS({})".format(
|
||||
", ".join(
|
||||
"{}={!r}".format(k, self.__dict__[k])
|
||||
for k in "uread uwrite umove udel".split()
|
||||
for k in "uread uwrite umove udel uget".split()
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
class Lim(object):
|
||||
def __init__(self):
|
||||
self.nups = {} # num tracker
|
||||
self.bups = {} # byte tracker list
|
||||
self.bupc = {} # byte tracker cache
|
||||
|
||||
self.nosub = False # disallow subdirectories
|
||||
|
||||
self.smin = None # filesize min
|
||||
self.smax = None # filesize max
|
||||
|
||||
self.bwin = None # bytes window
|
||||
self.bmax = None # bytes max
|
||||
self.nwin = None # num window
|
||||
self.nmax = None # num max
|
||||
|
||||
self.rotn = None # rot num files
|
||||
self.rotl = None # rot depth
|
||||
self.rotf = None # rot datefmt
|
||||
self.rot_re = None # rotf check
|
||||
|
||||
def set_rotf(self, fmt):
|
||||
self.rotf = fmt
|
||||
r = re.escape(fmt).replace("%Y", "[0-9]{4}").replace("%j", "[0-9]{3}")
|
||||
r = re.sub("%[mdHMSWU]", "[0-9]{2}", r)
|
||||
self.rot_re = re.compile("(^|/)" + r + "$")
|
||||
|
||||
def all(self, ip, rem, sz, abspath):
|
||||
self.chk_nup(ip)
|
||||
self.chk_bup(ip)
|
||||
self.chk_rem(rem)
|
||||
if sz != -1:
|
||||
self.chk_sz(sz)
|
||||
|
||||
ap2, vp2 = self.rot(abspath)
|
||||
if abspath == ap2:
|
||||
return ap2, rem
|
||||
|
||||
return ap2, ("{}/{}".format(rem, vp2) if rem else vp2)
|
||||
|
||||
def chk_sz(self, sz):
|
||||
if self.smin is not None and sz < self.smin:
|
||||
raise Pebkac(400, "file too small")
|
||||
|
||||
if self.smax is not None and sz > self.smax:
|
||||
raise Pebkac(400, "file too big")
|
||||
|
||||
def chk_rem(self, rem):
|
||||
if self.nosub and rem:
|
||||
raise Pebkac(500, "no subdirectories allowed")
|
||||
|
||||
def rot(self, path):
|
||||
if not self.rotf and not self.rotn:
|
||||
return path, ""
|
||||
|
||||
if self.rotf:
|
||||
path = path.rstrip("/\\")
|
||||
if self.rot_re.search(path.replace("\\", "/")):
|
||||
return path, ""
|
||||
|
||||
suf = datetime.utcnow().strftime(self.rotf)
|
||||
if path:
|
||||
path += "/"
|
||||
|
||||
return path + suf, suf
|
||||
|
||||
ret = self.dive(path, self.rotl)
|
||||
if not ret:
|
||||
raise Pebkac(500, "no available slots in volume")
|
||||
|
||||
d = ret[len(path) :].strip("/\\").replace("\\", "/")
|
||||
return ret, d
|
||||
|
||||
def dive(self, path, lvs):
|
||||
items = bos.listdir(path)
|
||||
|
||||
if not lvs:
|
||||
# at leaf level
|
||||
return None if len(items) >= self.rotn else ""
|
||||
|
||||
dirs = [int(x) for x in items if x and all(y in "1234567890" for y in x)]
|
||||
dirs.sort()
|
||||
|
||||
if not dirs:
|
||||
# no branches yet; make one
|
||||
sub = os.path.join(path, "0")
|
||||
bos.mkdir(sub)
|
||||
else:
|
||||
# try newest branch only
|
||||
sub = os.path.join(path, str(dirs[-1]))
|
||||
|
||||
ret = self.dive(sub, lvs - 1)
|
||||
if ret is not None:
|
||||
return os.path.join(sub, ret)
|
||||
|
||||
if len(dirs) >= self.rotn:
|
||||
# full branch or root
|
||||
return None
|
||||
|
||||
# make a branch
|
||||
sub = os.path.join(path, str(dirs[-1] + 1))
|
||||
bos.mkdir(sub)
|
||||
ret = self.dive(sub, lvs - 1)
|
||||
if ret is None:
|
||||
raise Pebkac(500, "rotation bug")
|
||||
|
||||
return os.path.join(sub, ret)
|
||||
|
||||
def nup(self, ip):
|
||||
try:
|
||||
self.nups[ip].append(time.time())
|
||||
except:
|
||||
self.nups[ip] = [time.time()]
|
||||
|
||||
def bup(self, ip, nbytes):
|
||||
v = [time.time(), nbytes]
|
||||
try:
|
||||
self.bups[ip].append(v)
|
||||
self.bupc[ip] += nbytes
|
||||
except:
|
||||
self.bups[ip] = [v]
|
||||
self.bupc[ip] = nbytes
|
||||
|
||||
def chk_nup(self, ip):
|
||||
if not self.nmax or ip not in self.nups:
|
||||
return
|
||||
|
||||
nups = self.nups[ip]
|
||||
cutoff = time.time() - self.nwin
|
||||
while nups and nups[0] < cutoff:
|
||||
nups.pop(0)
|
||||
|
||||
if len(nups) >= self.nmax:
|
||||
raise Pebkac(429, "too many uploads")
|
||||
|
||||
def chk_bup(self, ip):
|
||||
if not self.bmax or ip not in self.bups:
|
||||
return
|
||||
|
||||
bups = self.bups[ip]
|
||||
cutoff = time.time() - self.bwin
|
||||
mark = self.bupc[ip]
|
||||
while bups and bups[0][0] < cutoff:
|
||||
mark -= bups.pop(0)[1]
|
||||
|
||||
self.bupc[ip] = mark
|
||||
if mark >= self.bmax:
|
||||
raise Pebkac(429, "ingress saturated")
|
||||
|
||||
|
||||
class VFS(object):
|
||||
"""single level in the virtual fs"""
|
||||
|
||||
@@ -42,6 +207,7 @@ class VFS(object):
|
||||
self.nodes = {} # child nodes
|
||||
self.histtab = None # all realpath->histpath
|
||||
self.dbv = None # closest full/non-jump parent
|
||||
self.lim = None # type: Lim # upload limits; only set for dbv
|
||||
|
||||
if realpath:
|
||||
self.histpath = os.path.join(realpath, ".hist") # db / thumbcache
|
||||
@@ -50,6 +216,7 @@ class VFS(object):
|
||||
self.awrite = {}
|
||||
self.amove = {}
|
||||
self.adel = {}
|
||||
self.aget = {}
|
||||
else:
|
||||
self.histpath = None
|
||||
self.all_vols = None
|
||||
@@ -57,6 +224,7 @@ class VFS(object):
|
||||
self.awrite = None
|
||||
self.amove = None
|
||||
self.adel = None
|
||||
self.aget = None
|
||||
|
||||
def __repr__(self):
|
||||
return "VFS({})".format(
|
||||
@@ -143,7 +311,7 @@ class VFS(object):
|
||||
|
||||
def can_access(self, vpath, uname):
|
||||
# type: (str, str) -> tuple[bool, bool, bool, bool]
|
||||
"""can Read,Write,Move,Delete"""
|
||||
"""can Read,Write,Move,Delete,Get"""
|
||||
vn, _ = self._find(vpath)
|
||||
c = vn.axs
|
||||
return [
|
||||
@@ -151,10 +319,20 @@ class VFS(object):
|
||||
uname in c.uwrite or "*" in c.uwrite,
|
||||
uname in c.umove or "*" in c.umove,
|
||||
uname in c.udel or "*" in c.udel,
|
||||
uname in c.uget or "*" in c.uget,
|
||||
]
|
||||
|
||||
def get(self, vpath, uname, will_read, will_write, will_move=False, will_del=False):
|
||||
# type: (str, str, bool, bool, bool, bool) -> tuple[VFS, str]
|
||||
def get(
|
||||
self,
|
||||
vpath,
|
||||
uname,
|
||||
will_read,
|
||||
will_write,
|
||||
will_move=False,
|
||||
will_del=False,
|
||||
will_get=False,
|
||||
):
|
||||
# type: (str, str, bool, bool, bool, bool, bool) -> tuple[VFS, str]
|
||||
"""returns [vfsnode,fs_remainder] if user has the requested permissions"""
|
||||
vn, rem = self._find(vpath)
|
||||
c = vn.axs
|
||||
@@ -164,19 +342,21 @@ class VFS(object):
|
||||
[will_write, c.uwrite, "write"],
|
||||
[will_move, c.umove, "move"],
|
||||
[will_del, c.udel, "delete"],
|
||||
[will_get, c.uget, "get"],
|
||||
]:
|
||||
if req and (uname not in d and "*" not in d):
|
||||
if req and (uname not in d and "*" not in d) and uname != LEELOO_DALLAS:
|
||||
m = "you don't have {}-access for this location"
|
||||
raise Pebkac(403, m.format(msg))
|
||||
|
||||
return vn, rem
|
||||
|
||||
def get_dbv(self, vrem):
|
||||
# type: (str) -> tuple[VFS, str]
|
||||
dbv = self.dbv
|
||||
if not dbv:
|
||||
return self, vrem
|
||||
|
||||
vrem = [self.vpath[len(dbv.vpath) + 1 :], vrem]
|
||||
vrem = [self.vpath[len(dbv.vpath) :].lstrip("/"), vrem]
|
||||
vrem = "/".join([x for x in vrem if x])
|
||||
return dbv, vrem
|
||||
|
||||
@@ -202,7 +382,7 @@ class VFS(object):
|
||||
for name, vn2 in sorted(self.nodes.items()):
|
||||
ok = False
|
||||
axs = vn2.axs
|
||||
axs = [axs.uread, axs.uwrite, axs.umove, axs.udel]
|
||||
axs = [axs.uread, axs.uwrite, axs.umove, axs.udel, axs.uget]
|
||||
for pset in permsets:
|
||||
ok = True
|
||||
for req, lst in zip(pset, axs):
|
||||
@@ -268,7 +448,11 @@ class VFS(object):
|
||||
f2a = os.sep + "dir.txt"
|
||||
f2b = "{0}.hist{0}".format(os.sep)
|
||||
|
||||
g = self.walk("", vrem, [], uname, [[True]], dots, scandir, False)
|
||||
# if multiselect: add all items to archive root
|
||||
# if single folder: the folder itself is the top-level item
|
||||
folder = "" if flt else (vrem.split("/")[-1] or "top")
|
||||
|
||||
g = self.walk(folder, vrem, [], uname, [[True]], dots, scandir, False)
|
||||
for _, _, vpath, apath, files, rd, vd in g:
|
||||
if flt:
|
||||
files = [x for x in files if x[0] in flt]
|
||||
@@ -310,6 +494,12 @@ class VFS(object):
|
||||
yield f
|
||||
|
||||
|
||||
if WINDOWS:
|
||||
re_vol = re.compile(r"^([a-zA-Z]:[\\/][^:]*|[^:]*):([^:]*):(.*)$")
|
||||
else:
|
||||
re_vol = re.compile(r"^([^:]*):([^:]*):(.*)$")
|
||||
|
||||
|
||||
class AuthSrv(object):
|
||||
"""verifies users against given paths"""
|
||||
|
||||
@@ -319,11 +509,6 @@ class AuthSrv(object):
|
||||
self.warn_anonwrite = warn_anonwrite
|
||||
self.line_ctr = 0
|
||||
|
||||
if WINDOWS:
|
||||
self.re_vol = re.compile(r"^([a-zA-Z]:[\\/][^:]*|[^:]*):([^:]*):(.*)$")
|
||||
else:
|
||||
self.re_vol = re.compile(r"^([^:]*):([^:]*):(.*)$")
|
||||
|
||||
self.mutex = threading.Lock()
|
||||
self.reload()
|
||||
|
||||
@@ -341,6 +526,24 @@ class AuthSrv(object):
|
||||
|
||||
yield prev, True
|
||||
|
||||
def _map_volume(self, src, dst, mount, daxs, mflags):
|
||||
if dst in mount:
|
||||
m = "multiple filesystem-paths mounted at [/{}]:\n [{}]\n [{}]"
|
||||
self.log(m.format(dst, mount[dst], src), c=1)
|
||||
raise Exception("invalid config")
|
||||
|
||||
if src in mount.values():
|
||||
m = "warning: filesystem-path [{}] mounted in multiple locations:"
|
||||
m = m.format(src)
|
||||
for v in [k for k, v in mount.items() if v == src] + [dst]:
|
||||
m += "\n /{}".format(v)
|
||||
|
||||
self.log(m, c=3)
|
||||
|
||||
mount[dst] = src
|
||||
daxs[dst] = AXS()
|
||||
mflags[dst] = {}
|
||||
|
||||
def _parse_config_file(self, fd, acct, daxs, mflags, mount):
|
||||
# type: (any, str, dict[str, AXS], any, str) -> None
|
||||
vol_src = None
|
||||
@@ -371,9 +574,7 @@ class AuthSrv(object):
|
||||
# cfg files override arguments and previous files
|
||||
vol_src = bos.path.abspath(vol_src)
|
||||
vol_dst = vol_dst.strip("/")
|
||||
mount[vol_dst] = vol_src
|
||||
daxs[vol_dst] = AXS()
|
||||
mflags[vol_dst] = {}
|
||||
self._map_volume(vol_src, vol_dst, mount, daxs, mflags)
|
||||
continue
|
||||
|
||||
try:
|
||||
@@ -390,10 +591,21 @@ class AuthSrv(object):
|
||||
|
||||
def _read_vol_str(self, lvl, uname, axs, flags):
|
||||
# type: (str, str, AXS, any) -> None
|
||||
if lvl.strip("crwmdg"):
|
||||
raise Exception("invalid volume flag: {},{}".format(lvl, uname))
|
||||
|
||||
if lvl == "c":
|
||||
cval = True
|
||||
if "=" in uname:
|
||||
try:
|
||||
# volume flag with arguments, possibly with a preceding list of bools
|
||||
uname, cval = uname.split("=", 1)
|
||||
except:
|
||||
# just one or more bools
|
||||
cval = True
|
||||
|
||||
while "," in uname:
|
||||
# one or more bools before the final flag; eat them
|
||||
n1, uname = uname.split(",", 1)
|
||||
self._read_volflag(flags, n1, True, False)
|
||||
|
||||
self._read_volflag(flags, uname, cval, False)
|
||||
return
|
||||
@@ -414,6 +626,9 @@ class AuthSrv(object):
|
||||
if "d" in lvl:
|
||||
axs.udel[un] = 1
|
||||
|
||||
if "g" in lvl:
|
||||
axs.uget[un] = 1
|
||||
|
||||
def _read_volflag(self, flags, name, value, is_list):
|
||||
if name not in ["mtp"]:
|
||||
flags[name] = value
|
||||
@@ -451,9 +666,9 @@ class AuthSrv(object):
|
||||
|
||||
if self.args.v:
|
||||
# list of src:dst:permset:permset:...
|
||||
# permset is <rwmd>[,username][,username] or <c>,<flag>[=args]
|
||||
# permset is <rwmdg>[,username][,username] or <c>,<flag>[=args]
|
||||
for v_str in self.args.v:
|
||||
m = self.re_vol.match(v_str)
|
||||
m = re_vol.match(v_str)
|
||||
if not m:
|
||||
raise Exception("invalid -v argument: [{}]".format(v_str))
|
||||
|
||||
@@ -464,9 +679,7 @@ class AuthSrv(object):
|
||||
# print("\n".join([src, dst, perms]))
|
||||
src = bos.path.abspath(src)
|
||||
dst = dst.strip("/")
|
||||
mount[dst] = src
|
||||
daxs[dst] = AXS()
|
||||
mflags[dst] = {}
|
||||
self._map_volume(src, dst, mount, daxs, mflags)
|
||||
|
||||
for x in perms.split(":"):
|
||||
lvl, uname = x.split(",", 1) if "," in x else [x, ""]
|
||||
@@ -518,20 +731,22 @@ class AuthSrv(object):
|
||||
vfs.all_vols = {}
|
||||
vfs.get_all_vols(vfs.all_vols)
|
||||
|
||||
for perm in "read write move del".split():
|
||||
for perm in "read write move del get".split():
|
||||
axs_key = "u" + perm
|
||||
unames = ["*"] + list(acct.keys())
|
||||
umap = {x: [] for x in unames}
|
||||
for usr in unames:
|
||||
for mp, vol in vfs.all_vols.items():
|
||||
if usr in getattr(vol.axs, axs_key):
|
||||
axs = getattr(vol.axs, axs_key)
|
||||
if usr in axs or "*" in axs:
|
||||
umap[usr].append(mp)
|
||||
umap[usr].sort()
|
||||
setattr(vfs, "a" + perm, umap)
|
||||
|
||||
all_users = {}
|
||||
missing_users = {}
|
||||
for axs in daxs.values():
|
||||
for d in [axs.uread, axs.uwrite, axs.umove, axs.udel]:
|
||||
for d in [axs.uread, axs.uwrite, axs.umove, axs.udel, axs.uget]:
|
||||
for usr in d.keys():
|
||||
all_users[usr] = 1
|
||||
if usr != "*" and usr not in acct:
|
||||
@@ -545,6 +760,9 @@ class AuthSrv(object):
|
||||
)
|
||||
raise Exception("invalid config")
|
||||
|
||||
if LEELOO_DALLAS in all_users:
|
||||
raise Exception("sorry, reserved username: " + LEELOO_DALLAS)
|
||||
|
||||
promote = []
|
||||
demote = []
|
||||
for vol in vfs.all_vols.values():
|
||||
@@ -603,6 +821,56 @@ class AuthSrv(object):
|
||||
|
||||
vfs.histtab = {v.realpath: v.histpath for v in vfs.all_vols.values()}
|
||||
|
||||
for vol in vfs.all_vols.values():
|
||||
lim = Lim()
|
||||
use = False
|
||||
|
||||
if vol.flags.get("nosub"):
|
||||
use = True
|
||||
lim.nosub = True
|
||||
|
||||
v = vol.flags.get("sz")
|
||||
if v:
|
||||
use = True
|
||||
lim.smin, lim.smax = [unhumanize(x) for x in v.split("-")]
|
||||
|
||||
v = vol.flags.get("rotn")
|
||||
if v:
|
||||
use = True
|
||||
lim.rotn, lim.rotl = [int(x) for x in v.split(",")]
|
||||
|
||||
v = vol.flags.get("rotf")
|
||||
if v:
|
||||
use = True
|
||||
lim.set_rotf(v)
|
||||
|
||||
v = vol.flags.get("maxn")
|
||||
if v:
|
||||
use = True
|
||||
lim.nmax, lim.nwin = [int(x) for x in v.split(",")]
|
||||
|
||||
v = vol.flags.get("maxb")
|
||||
if v:
|
||||
use = True
|
||||
lim.bmax, lim.bwin = [unhumanize(x) for x in v.split(",")]
|
||||
|
||||
if use:
|
||||
vol.lim = lim
|
||||
|
||||
for vol in vfs.all_vols.values():
|
||||
fk = vol.flags.get("fk")
|
||||
if fk:
|
||||
vol.flags["fk"] = int(fk) if fk is not True else 8
|
||||
|
||||
for vol in vfs.all_vols.values():
|
||||
if "pk" in vol.flags and "gz" not in vol.flags and "xz" not in vol.flags:
|
||||
vol.flags["gz"] = False # def.pk
|
||||
|
||||
if "scan" in vol.flags:
|
||||
vol.flags["scan"] = int(vol.flags["scan"])
|
||||
elif self.args.re_maxage:
|
||||
vol.flags["scan"] = self.args.re_maxage
|
||||
|
||||
all_mte = {}
|
||||
errors = False
|
||||
for vol in vfs.all_vols.values():
|
||||
@@ -612,9 +880,14 @@ class AuthSrv(object):
|
||||
if self.args.e2d or "e2ds" in vol.flags:
|
||||
vol.flags["e2d"] = True
|
||||
|
||||
if self.args.no_hash:
|
||||
if "ehash" not in vol.flags:
|
||||
vol.flags["dhash"] = True
|
||||
for ga, vf in [["no_hash", "nohash"], ["no_idx", "noidx"]]:
|
||||
if vf in vol.flags:
|
||||
ptn = vol.flags.pop(vf)
|
||||
else:
|
||||
ptn = getattr(self.args, ga)
|
||||
|
||||
if ptn:
|
||||
vol.flags[vf] = re.compile(ptn)
|
||||
|
||||
for k in ["e2t", "e2ts", "e2tsr"]:
|
||||
if getattr(self.args, k):
|
||||
@@ -624,9 +897,15 @@ class AuthSrv(object):
|
||||
if k1 in vol.flags:
|
||||
vol.flags[k2] = True
|
||||
|
||||
# default tag-list if unset
|
||||
# default tag cfgs if unset
|
||||
if "mte" not in vol.flags:
|
||||
vol.flags["mte"] = self.args.mte
|
||||
elif vol.flags["mte"].startswith("+"):
|
||||
vol.flags["mte"] = ",".join(
|
||||
x for x in [self.args.mte, vol.flags["mte"][1:]] if x
|
||||
)
|
||||
if "mth" not in vol.flags:
|
||||
vol.flags["mth"] = self.args.mth
|
||||
|
||||
# append parsers from argv to volume-flags
|
||||
self._read_volflag(vol.flags, "mtp", self.args.mtp, True)
|
||||
@@ -706,6 +985,7 @@ class AuthSrv(object):
|
||||
[" write", "uwrite"],
|
||||
[" move", "umove"],
|
||||
["delete", "udel"],
|
||||
[" get", "uget"],
|
||||
]:
|
||||
u = list(sorted(getattr(v.axs, attr).keys()))
|
||||
u = ", ".join("\033[35meverybody\033[0m" if x == "*" else x for x in u)
|
||||
@@ -720,7 +1000,7 @@ class AuthSrv(object):
|
||||
v, _ = vfs.get("/", "*", False, True)
|
||||
if self.warn_anonwrite and os.getcwd() == v.realpath:
|
||||
self.warn_anonwrite = False
|
||||
msg = "anyone can read/write the current directory: {}"
|
||||
msg = "anyone can read/write the current directory: {}\n"
|
||||
self.log(msg.format(v.realpath), c=1)
|
||||
except Pebkac:
|
||||
self.warn_anonwrite = True
|
||||
@@ -773,10 +1053,10 @@ class AuthSrv(object):
|
||||
raise Exception("volume not found: " + v)
|
||||
|
||||
self.log({"users": users, "vols": vols, "flags": flags})
|
||||
m = "/{}: read({}) write({}) move({}) del({})"
|
||||
m = "/{}: read({}) write({}) move({}) del({}) get({})"
|
||||
for k, v in self.vfs.all_vols.items():
|
||||
vc = v.axs
|
||||
self.log(m.format(k, vc.uread, vc.uwrite, vc.umove, vc.udel))
|
||||
self.log(m.format(k, vc.uread, vc.uwrite, vc.umove, vc.udel, vc.uget))
|
||||
|
||||
flag_v = "v" in flags
|
||||
flag_ln = "ln" in flags
|
||||
@@ -790,7 +1070,7 @@ class AuthSrv(object):
|
||||
for u in users:
|
||||
self.log("checking /{} as {}".format(v, u))
|
||||
try:
|
||||
vn, _ = self.vfs.get(v, u, True, False, False, False)
|
||||
vn, _ = self.vfs.get(v, u, True, False, False, False, False)
|
||||
except:
|
||||
continue
|
||||
|
||||
|
||||
@@ -25,14 +25,14 @@ def lstat(p):
|
||||
def makedirs(name, mode=0o755, exist_ok=True):
|
||||
bname = fsenc(name)
|
||||
try:
|
||||
os.makedirs(bname, mode=mode)
|
||||
os.makedirs(bname, mode)
|
||||
except:
|
||||
if not exist_ok or not os.path.isdir(bname):
|
||||
raise
|
||||
|
||||
|
||||
def mkdir(p, mode=0o755):
|
||||
return os.mkdir(fsenc(p), mode=mode)
|
||||
return os.mkdir(fsenc(p), mode)
|
||||
|
||||
|
||||
def rename(src, dst):
|
||||
|
||||
@@ -21,6 +21,10 @@ def getsize(p):
|
||||
return os.path.getsize(fsenc(p))
|
||||
|
||||
|
||||
def isfile(p):
|
||||
return os.path.isfile(fsenc(p))
|
||||
|
||||
|
||||
def isdir(p):
|
||||
return os.path.isdir(fsenc(p))
|
||||
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
# coding: utf-8
|
||||
from __future__ import print_function, unicode_literals
|
||||
from copyparty.authsrv import AuthSrv
|
||||
|
||||
import sys
|
||||
import signal
|
||||
@@ -9,6 +8,7 @@ import threading
|
||||
from .broker_util import ExceptionalQueue
|
||||
from .httpsrv import HttpSrv
|
||||
from .util import FAKE_MP
|
||||
from copyparty.authsrv import AuthSrv
|
||||
|
||||
|
||||
class MpWorker(object):
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -32,9 +32,11 @@ class HttpConn(object):
|
||||
self.addr = addr
|
||||
self.hsrv = hsrv
|
||||
|
||||
self.mutex = hsrv.mutex
|
||||
self.args = hsrv.args
|
||||
self.asrv = hsrv.asrv
|
||||
self.cert_path = hsrv.cert_path
|
||||
self.u2fh = hsrv.u2fh
|
||||
|
||||
enth = HAVE_PIL and not self.args.no_thumb
|
||||
self.thumbcli = ThumbCli(hsrv.broker) if enth else None
|
||||
|
||||
@@ -27,7 +27,7 @@ except ImportError:
|
||||
sys.exit(1)
|
||||
|
||||
from .__init__ import E, PY2, MACOS
|
||||
from .util import spack, min_ex, start_stackmon, start_log_thrs
|
||||
from .util import FHC, spack, min_ex, start_stackmon, start_log_thrs
|
||||
from .bos import bos
|
||||
from .httpconn import HttpConn
|
||||
|
||||
@@ -50,7 +50,9 @@ class HttpSrv(object):
|
||||
self.log = broker.log
|
||||
self.asrv = broker.asrv
|
||||
|
||||
self.name = "httpsrv" + ("-n{}-i{:x}".format(nid, os.getpid()) if nid else "")
|
||||
nsuf = "-n{}-i{:x}".format(nid, os.getpid()) if nid else ""
|
||||
|
||||
self.name = "hsrv" + nsuf
|
||||
self.mutex = threading.Lock()
|
||||
self.stopping = False
|
||||
|
||||
@@ -58,7 +60,9 @@ class HttpSrv(object):
|
||||
self.tp_ncli = 0 # fading
|
||||
self.tp_time = None # latest worker collect
|
||||
self.tp_q = None if self.args.no_htp else queue.LifoQueue()
|
||||
self.t_periodic = None
|
||||
|
||||
self.u2fh = FHC()
|
||||
self.srvs = []
|
||||
self.ncli = 0 # exact
|
||||
self.clients = {} # laggy
|
||||
@@ -82,11 +86,6 @@ class HttpSrv(object):
|
||||
if self.tp_q:
|
||||
self.start_threads(4)
|
||||
|
||||
name = "httpsrv-scaler" + ("-{}".format(nid) if nid else "")
|
||||
t = threading.Thread(target=self.thr_scaler, name=name)
|
||||
t.daemon = True
|
||||
t.start()
|
||||
|
||||
if nid:
|
||||
if self.args.stackmon:
|
||||
start_stackmon(self.args.stackmon, nid)
|
||||
@@ -115,13 +114,19 @@ class HttpSrv(object):
|
||||
for _ in range(n):
|
||||
self.tp_q.put(None)
|
||||
|
||||
def thr_scaler(self):
|
||||
def periodic(self):
|
||||
while True:
|
||||
time.sleep(2 if self.tp_ncli else 30)
|
||||
time.sleep(2 if self.tp_ncli or self.ncli else 10)
|
||||
with self.mutex:
|
||||
self.tp_ncli = max(self.ncli, self.tp_ncli - 2)
|
||||
if self.tp_nthr > self.tp_ncli + 8:
|
||||
self.stop_threads(4)
|
||||
self.u2fh.clean()
|
||||
if self.tp_q:
|
||||
self.tp_ncli = max(self.ncli, self.tp_ncli - 2)
|
||||
if self.tp_nthr > self.tp_ncli + 8:
|
||||
self.stop_threads(4)
|
||||
|
||||
if not self.ncli and not self.u2fh.cache and self.tp_nthr <= 8:
|
||||
self.t_periodic = None
|
||||
return
|
||||
|
||||
def listen(self, sck, nlisteners):
|
||||
ip, port = sck.getsockname()
|
||||
@@ -141,7 +146,12 @@ class HttpSrv(object):
|
||||
fno = srv_sck.fileno()
|
||||
msg = "subscribed @ {}:{} f{}".format(ip, port, fno)
|
||||
self.log(self.name, msg)
|
||||
self.broker.put(False, "cb_httpsrv_up")
|
||||
|
||||
def fun():
|
||||
self.broker.put(False, "cb_httpsrv_up")
|
||||
|
||||
threading.Thread(target=fun).start()
|
||||
|
||||
while not self.stopping:
|
||||
if self.args.log_conn:
|
||||
self.log(self.name, "|%sC-ncli" % ("-" * 1,), c="1;30")
|
||||
@@ -174,25 +184,36 @@ class HttpSrv(object):
|
||||
now = time.time()
|
||||
|
||||
if now - (self.tp_time or now) > 300:
|
||||
m = "httpserver threadpool died: tpt {:.2f}, now {:.2f}, nthr {}, ncli {}"
|
||||
self.log(self.name, m.format(self.tp_time, now, self.tp_nthr, self.ncli), 1)
|
||||
self.tp_time = None
|
||||
self.tp_q = None
|
||||
|
||||
if self.tp_q:
|
||||
self.tp_q.put((sck, addr))
|
||||
with self.mutex:
|
||||
self.ncli += 1
|
||||
with self.mutex:
|
||||
self.ncli += 1
|
||||
if not self.t_periodic:
|
||||
name = "hsrv-pt"
|
||||
if self.nid:
|
||||
name += "-{}".format(self.nid)
|
||||
|
||||
t = threading.Thread(target=self.periodic, name=name)
|
||||
self.t_periodic = t
|
||||
t.daemon = True
|
||||
t.start()
|
||||
|
||||
if self.tp_q:
|
||||
self.tp_time = self.tp_time or now
|
||||
self.tp_ncli = max(self.tp_ncli, self.ncli + 1)
|
||||
self.tp_ncli = max(self.tp_ncli, self.ncli)
|
||||
if self.tp_nthr < self.ncli + 4:
|
||||
self.start_threads(8)
|
||||
return
|
||||
|
||||
self.tp_q.put((sck, addr))
|
||||
return
|
||||
|
||||
if not self.args.no_htp:
|
||||
m = "looks like the httpserver threadpool died; please make an issue on github and tell me the story of how you pulled that off, thanks and dog bless\n"
|
||||
self.log(self.name, m, 1)
|
||||
|
||||
with self.mutex:
|
||||
self.ncli += 1
|
||||
|
||||
thr = threading.Thread(
|
||||
target=self.thr_client,
|
||||
args=(sck, addr),
|
||||
|
||||
@@ -413,6 +413,9 @@ class MTag(object):
|
||||
return r1
|
||||
|
||||
def get_mutagen(self, abspath):
|
||||
if not bos.path.isfile(abspath):
|
||||
return {}
|
||||
|
||||
import mutagen
|
||||
|
||||
try:
|
||||
@@ -434,7 +437,15 @@ class MTag(object):
|
||||
try:
|
||||
v = getattr(md.info, attr)
|
||||
except:
|
||||
continue
|
||||
if k != "ac":
|
||||
continue
|
||||
|
||||
try:
|
||||
v = str(md.info).split(".")[1]
|
||||
if v.startswith("ogg"):
|
||||
v = v[3:]
|
||||
except:
|
||||
continue
|
||||
|
||||
if not v:
|
||||
continue
|
||||
@@ -450,10 +461,16 @@ class MTag(object):
|
||||
return self.normalize_tags(ret, md)
|
||||
|
||||
def get_ffprobe(self, abspath):
|
||||
if not bos.path.isfile(abspath):
|
||||
return {}
|
||||
|
||||
ret, md = ffprobe(abspath)
|
||||
return self.normalize_tags(ret, md)
|
||||
|
||||
def get_bin(self, parsers, abspath):
|
||||
if not bos.path.isfile(abspath):
|
||||
return {}
|
||||
|
||||
pypath = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
|
||||
pypath = [str(pypath)] + [str(x) for x in sys.path if x]
|
||||
pypath = str(os.pathsep.join(pypath))
|
||||
@@ -463,7 +480,10 @@ class MTag(object):
|
||||
ret = {}
|
||||
for tagname, mp in parsers.items():
|
||||
try:
|
||||
cmd = [sys.executable, mp.bin, abspath]
|
||||
cmd = [mp.bin, abspath]
|
||||
if mp.bin.endswith(".py"):
|
||||
cmd = [sys.executable] + cmd
|
||||
|
||||
args = {"env": env, "timeout": mp.timeout}
|
||||
|
||||
if WINDOWS:
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
# coding: utf-8
|
||||
|
||||
"""
|
||||
This is Victor Stinner's pure-Python implementation of PEP 383: the "surrogateescape" error
|
||||
handler of Python 3.
|
||||
@@ -171,7 +173,7 @@ FS_ENCODING = sys.getfilesystemencoding()
|
||||
|
||||
if WINDOWS and not PY3:
|
||||
# py2 thinks win* is mbcs, probably a bug? anyways this works
|
||||
FS_ENCODING = 'utf-8'
|
||||
FS_ENCODING = "utf-8"
|
||||
|
||||
|
||||
# normalize the filesystem encoding name.
|
||||
|
||||
@@ -18,8 +18,7 @@ def errdesc(errors):
|
||||
tf_path = tf.name
|
||||
tf.write("\r\n".join(report).encode("utf-8", "replace"))
|
||||
|
||||
dt = datetime.utcfromtimestamp(time.time())
|
||||
dt = dt.strftime("%Y-%m%d-%H%M%S")
|
||||
dt = datetime.utcnow().strftime("%Y-%m%d-%H%M%S")
|
||||
|
||||
bos.chmod(tf_path, 0o444)
|
||||
return {
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
# coding: utf-8
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
import re
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
@@ -14,7 +13,7 @@ from datetime import datetime, timedelta
|
||||
import calendar
|
||||
|
||||
from .__init__ import E, PY2, WINDOWS, ANYWIN, MACOS, VT100, unicode
|
||||
from .util import mp, start_log_thrs, start_stackmon, min_ex
|
||||
from .util import mp, start_log_thrs, start_stackmon, min_ex, ansi_re
|
||||
from .authsrv import AuthSrv
|
||||
from .tcpsrv import TcpSrv
|
||||
from .up2k import Up2k
|
||||
@@ -39,9 +38,9 @@ class SvcHub(object):
|
||||
self.stop_req = False
|
||||
self.stopping = False
|
||||
self.stop_cond = threading.Condition()
|
||||
self.retcode = 0
|
||||
self.httpsrv_up = 0
|
||||
|
||||
self.ansi_re = re.compile("\033\\[[^m]*m")
|
||||
self.log_mutex = threading.Lock()
|
||||
self.next_day = 0
|
||||
|
||||
@@ -55,6 +54,17 @@ class SvcHub(object):
|
||||
if args.log_thrs:
|
||||
start_log_thrs(self.log, args.log_thrs, 0)
|
||||
|
||||
if not ANYWIN and not args.use_fpool:
|
||||
args.no_fpool = True
|
||||
|
||||
if not args.no_fpool and args.j != 1:
|
||||
m = "WARNING: --use-fpool combined with multithreading is untested and can probably cause undefined behavior"
|
||||
if ANYWIN:
|
||||
m = 'windows cannot do multithreading without --no-fpool, so enabling that -- note that upload performance will suffer if you have microsoft defender "real-time protection" enabled, so you probably want to use -j 1 instead'
|
||||
args.no_fpool = True
|
||||
|
||||
self.log("root", m, c=3)
|
||||
|
||||
# initiate all services to manage
|
||||
self.asrv = AuthSrv(self.args, self.log)
|
||||
if args.ls:
|
||||
@@ -82,27 +92,36 @@ class SvcHub(object):
|
||||
if self.check_mp_enable():
|
||||
from .broker_mp import BrokerMp as Broker
|
||||
else:
|
||||
self.log("root", "cannot efficiently use multiple CPU cores")
|
||||
from .broker_thr import BrokerThr as Broker
|
||||
|
||||
self.broker = Broker(self)
|
||||
|
||||
def thr_httpsrv_up(self):
|
||||
time.sleep(5)
|
||||
failed = self.broker.num_workers - self.httpsrv_up
|
||||
expected = self.broker.num_workers * self.tcpsrv.nsrv
|
||||
failed = expected - self.httpsrv_up
|
||||
if not failed:
|
||||
return
|
||||
|
||||
if self.args.ign_ebind_all:
|
||||
return
|
||||
|
||||
if self.args.ign_ebind and self.tcpsrv.srv:
|
||||
return
|
||||
|
||||
m = "{}/{} workers failed to start"
|
||||
m = m.format(failed, self.broker.num_workers)
|
||||
m = m.format(failed, expected)
|
||||
self.log("root", m, 1)
|
||||
os._exit(1)
|
||||
|
||||
self.retcode = 1
|
||||
os.kill(os.getpid(), signal.SIGTERM)
|
||||
|
||||
def cb_httpsrv_up(self):
|
||||
self.httpsrv_up += 1
|
||||
if self.httpsrv_up != self.broker.num_workers:
|
||||
return
|
||||
|
||||
time.sleep(0.1) # purely cosmetic dw
|
||||
self.log("root", "workers OK\n")
|
||||
self.up2k.init_vols()
|
||||
|
||||
@@ -111,7 +130,7 @@ class SvcHub(object):
|
||||
thr.start()
|
||||
|
||||
def _logname(self):
|
||||
dt = datetime.utcfromtimestamp(time.time())
|
||||
dt = datetime.utcnow()
|
||||
fn = self.args.lo
|
||||
for fs in "YmdHMS":
|
||||
fs = "%" + fs
|
||||
@@ -206,6 +225,8 @@ class SvcHub(object):
|
||||
if self.stopping:
|
||||
return
|
||||
|
||||
# start_log_thrs(print, 0.1, 1)
|
||||
|
||||
self.stopping = True
|
||||
self.stop_req = True
|
||||
with self.stop_cond:
|
||||
@@ -231,7 +252,7 @@ class SvcHub(object):
|
||||
print("waiting for thumbsrv (10sec)...")
|
||||
|
||||
print("nailed it", end="")
|
||||
ret = 0
|
||||
ret = self.retcode
|
||||
finally:
|
||||
print("\033[0m")
|
||||
if self.logf:
|
||||
@@ -244,8 +265,7 @@ class SvcHub(object):
|
||||
return
|
||||
|
||||
with self.log_mutex:
|
||||
ts = datetime.utcfromtimestamp(time.time())
|
||||
ts = ts.strftime("%Y-%m%d-%H%M%S.%f")[:-3]
|
||||
ts = datetime.utcnow().strftime("%Y-%m%d-%H%M%S.%f")[:-3]
|
||||
self.logf.write("@{} [{}] {}\n".format(ts, src, msg))
|
||||
|
||||
now = time.time()
|
||||
@@ -257,7 +277,7 @@ class SvcHub(object):
|
||||
self.logf.close()
|
||||
self._setup_logfile("")
|
||||
|
||||
dt = datetime.utcfromtimestamp(time.time())
|
||||
dt = datetime.utcnow()
|
||||
|
||||
# unix timestamp of next 00:00:00 (leap-seconds safe)
|
||||
day_now = dt.day
|
||||
@@ -280,9 +300,9 @@ class SvcHub(object):
|
||||
if not VT100:
|
||||
fmt = "{} {:21} {}\n"
|
||||
if "\033" in msg:
|
||||
msg = self.ansi_re.sub("", msg)
|
||||
msg = ansi_re.sub("", msg)
|
||||
if "\033" in src:
|
||||
src = self.ansi_re.sub("", src)
|
||||
src = ansi_re.sub("", src)
|
||||
elif c:
|
||||
if isinstance(c, int):
|
||||
msg = "\033[3{}m{}".format(c, msg)
|
||||
@@ -329,10 +349,11 @@ class SvcHub(object):
|
||||
|
||||
def check_mp_enable(self):
|
||||
if self.args.j == 1:
|
||||
self.log("root", "multiprocessing disabled by argument -j 1;")
|
||||
self.log("svchub", "multiprocessing disabled by argument -j 1")
|
||||
return False
|
||||
|
||||
if mp.cpu_count() <= 1:
|
||||
self.log("svchub", "only one CPU detected; multiprocessing disabled")
|
||||
return False
|
||||
|
||||
try:
|
||||
@@ -347,6 +368,7 @@ class SvcHub(object):
|
||||
return True
|
||||
else:
|
||||
self.log("svchub", err)
|
||||
self.log("svchub", "cannot efficiently use multiple CPU cores")
|
||||
return False
|
||||
|
||||
def sd_notify(self):
|
||||
|
||||
@@ -21,6 +21,29 @@ class TcpSrv(object):
|
||||
|
||||
self.stopping = False
|
||||
|
||||
self.srv = []
|
||||
self.nsrv = 0
|
||||
ok = {}
|
||||
for ip in self.args.i:
|
||||
ok[ip] = []
|
||||
for port in self.args.p:
|
||||
self.nsrv += 1
|
||||
try:
|
||||
self._listen(ip, port)
|
||||
ok[ip].append(port)
|
||||
except Exception as ex:
|
||||
if self.args.ign_ebind or self.args.ign_ebind_all:
|
||||
m = "could not listen on {}:{}: {}"
|
||||
self.log("tcpsrv", m.format(ip, port, ex), c=3)
|
||||
else:
|
||||
raise
|
||||
|
||||
if not self.srv and not self.args.ign_ebind_all:
|
||||
raise Exception("could not listen on any of the given interfaces")
|
||||
|
||||
if self.nsrv != len(self.srv):
|
||||
self.log("tcpsrv", "")
|
||||
|
||||
ip = "127.0.0.1"
|
||||
eps = {ip: "local only"}
|
||||
nonlocals = [x for x in self.args.i if x != ip]
|
||||
@@ -34,6 +57,9 @@ class TcpSrv(object):
|
||||
m = "available @ http://{}:{}/ (\033[33m{}\033[0m)"
|
||||
for ip, desc in sorted(eps.items(), key=lambda x: x[1]):
|
||||
for port in sorted(self.args.p):
|
||||
if port not in ok.get(ip, ok.get("0.0.0.0", [])):
|
||||
continue
|
||||
|
||||
msgs.append(m.format(ip, port, desc))
|
||||
|
||||
if msgs:
|
||||
@@ -41,18 +67,13 @@ class TcpSrv(object):
|
||||
for m in msgs:
|
||||
self.log("tcpsrv", m)
|
||||
|
||||
self.srv = []
|
||||
for ip in self.args.i:
|
||||
for port in self.args.p:
|
||||
self.srv.append(self._listen(ip, port))
|
||||
|
||||
def _listen(self, ip, port):
|
||||
srv = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
||||
srv.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
|
||||
srv.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
|
||||
try:
|
||||
srv.bind((ip, port))
|
||||
return srv
|
||||
self.srv.append(srv)
|
||||
except (OSError, socket.error) as ex:
|
||||
if ex.errno in [98, 48]:
|
||||
e = "\033[1;31mport {} is busy on interface {}\033[0m".format(port, ip)
|
||||
@@ -177,7 +198,7 @@ class TcpSrv(object):
|
||||
eps = self.ips_linux()
|
||||
|
||||
if "0.0.0.0" not in listen_ips:
|
||||
eps = {k: v for k, v in eps if k in listen_ips}
|
||||
eps = {k: v for k, v in eps.items() if k in listen_ips}
|
||||
|
||||
default_route = None
|
||||
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
|
||||
|
||||
@@ -21,7 +21,7 @@ HAVE_AVIF = False
|
||||
HAVE_WEBP = False
|
||||
|
||||
try:
|
||||
from PIL import Image, ImageOps
|
||||
from PIL import Image, ImageOps, ExifTags
|
||||
|
||||
HAVE_PIL = True
|
||||
try:
|
||||
@@ -105,7 +105,8 @@ class ThumbSrv(object):
|
||||
self.mutex = threading.Lock()
|
||||
self.busy = {}
|
||||
self.stopping = False
|
||||
self.nthr = os.cpu_count() if hasattr(os, "cpu_count") else 4
|
||||
self.nthr = max(1, self.args.th_mt)
|
||||
|
||||
self.q = Queue(self.nthr * 4)
|
||||
for n in range(self.nthr):
|
||||
t = threading.Thread(
|
||||
@@ -127,7 +128,7 @@ class ThumbSrv(object):
|
||||
self.log(msg, c=3)
|
||||
|
||||
if self.args.th_clean:
|
||||
t = threading.Thread(target=self.cleaner, name="thumb-cleaner")
|
||||
t = threading.Thread(target=self.cleaner, name="thumb.cln")
|
||||
t.daemon = True
|
||||
t.start()
|
||||
|
||||
@@ -205,8 +206,8 @@ class ThumbSrv(object):
|
||||
try:
|
||||
fun(abspath, tpath)
|
||||
except:
|
||||
msg = "{} failed on {}\n{}"
|
||||
self.log(msg.format(fun.__name__, abspath, min_ex()), 3)
|
||||
msg = "{} could not create thumbnail of {}\n{}"
|
||||
self.log(msg.format(fun.__name__, abspath, min_ex()), "1;30")
|
||||
with open(tpath, "wb") as _:
|
||||
pass
|
||||
|
||||
@@ -221,21 +222,38 @@ class ThumbSrv(object):
|
||||
with self.mutex:
|
||||
self.nthr -= 1
|
||||
|
||||
def fancy_pillow(self, im):
|
||||
# exif_transpose is expensive (loads full image + unconditional copy)
|
||||
r = max(*self.res) * 2
|
||||
im.thumbnail((r, r), resample=Image.LANCZOS)
|
||||
try:
|
||||
k = next(k for k, v in ExifTags.TAGS.items() if v == "Orientation")
|
||||
exif = im.getexif()
|
||||
rot = int(exif[k])
|
||||
del exif[k]
|
||||
except:
|
||||
rot = 1
|
||||
|
||||
rots = {8: Image.ROTATE_90, 3: Image.ROTATE_180, 6: Image.ROTATE_270}
|
||||
if rot in rots:
|
||||
im = im.transpose(rots[rot])
|
||||
|
||||
if self.args.th_no_crop:
|
||||
im.thumbnail(self.res, resample=Image.LANCZOS)
|
||||
else:
|
||||
iw, ih = im.size
|
||||
dw, dh = self.res
|
||||
res = (min(iw, dw), min(ih, dh))
|
||||
im = ImageOps.fit(im, res, method=Image.LANCZOS)
|
||||
|
||||
return im
|
||||
|
||||
def conv_pil(self, abspath, tpath):
|
||||
with Image.open(fsenc(abspath)) as im:
|
||||
crop = not self.args.th_no_crop
|
||||
res2 = self.res
|
||||
if crop:
|
||||
res2 = (res2[0] * 2, res2[1] * 2)
|
||||
|
||||
try:
|
||||
im.thumbnail(res2, resample=Image.LANCZOS)
|
||||
if crop:
|
||||
iw, ih = im.size
|
||||
dw, dh = self.res
|
||||
res = (min(iw, dw), min(ih, dh))
|
||||
im = ImageOps.fit(im, res, method=Image.LANCZOS)
|
||||
except:
|
||||
im = self.fancy_pillow(im)
|
||||
except Exception as ex:
|
||||
self.log("fancy_pillow {}".format(ex), "1;30")
|
||||
im.thumbnail(self.res)
|
||||
|
||||
fmts = ["RGB", "L"]
|
||||
@@ -250,13 +268,14 @@ class ThumbSrv(object):
|
||||
fmts += ["RGBA", "LA"]
|
||||
args["method"] = 6
|
||||
else:
|
||||
pass # default q = 75
|
||||
# default q = 75
|
||||
args["progressive"] = True
|
||||
|
||||
if im.mode not in fmts:
|
||||
# print("conv {}".format(im.mode))
|
||||
im = im.convert("RGB")
|
||||
|
||||
im.save(tpath, quality=40, method=6)
|
||||
im.save(tpath, **args)
|
||||
|
||||
def conv_ffmpeg(self, abspath, tpath):
|
||||
ret, _ = ffprobe(abspath)
|
||||
@@ -286,8 +305,10 @@ class ThumbSrv(object):
|
||||
cmd += seek
|
||||
cmd += [
|
||||
b"-i", fsenc(abspath),
|
||||
b"-map", b"0:v:0",
|
||||
b"-vf", scale,
|
||||
b"-vframes", b"1",
|
||||
b"-frames:v", b"1",
|
||||
b"-metadata:s:v:0", b"rotate=0",
|
||||
]
|
||||
# fmt: on
|
||||
|
||||
@@ -305,11 +326,13 @@ class ThumbSrv(object):
|
||||
]
|
||||
|
||||
cmd += [fsenc(tpath)]
|
||||
# self.log((b" ".join(cmd)).decode("utf-8"))
|
||||
|
||||
ret, sout, serr = runcmd(cmd)
|
||||
if ret != 0:
|
||||
msg = ["ff: {}".format(x) for x in serr.split("\n")]
|
||||
self.log("FFmpeg failed:\n" + "\n".join(msg), c="1;30")
|
||||
m = "FFmpeg failed (probably a corrupt video file):\n"
|
||||
m += "\n".join(["ff: {}".format(x) for x in serr.split("\n")])
|
||||
self.log(m, c="1;30")
|
||||
raise sp.CalledProcessError(ret, (cmd[0], b"...", cmd[-1]))
|
||||
|
||||
def poke(self, tdir):
|
||||
|
||||
@@ -6,9 +6,10 @@ import os
|
||||
import time
|
||||
import threading
|
||||
from datetime import datetime
|
||||
from operator import itemgetter
|
||||
|
||||
from .__init__ import unicode
|
||||
from .util import s3dec, Pebkac, min_ex
|
||||
from .__init__ import ANYWIN, unicode
|
||||
from .util import absreal, s3dec, Pebkac, min_ex, gen_filekey, quotep
|
||||
from .bos import bos
|
||||
from .up2k import up2k_wark_from_hashlist
|
||||
|
||||
@@ -88,7 +89,7 @@ class U2idx(object):
|
||||
is_date = False
|
||||
kw_key = ["(", ")", "and ", "or ", "not "]
|
||||
kw_val = ["==", "=", "!=", ">", ">=", "<", "<=", "like "]
|
||||
ptn_mt = re.compile(r"^\.?[a-z]+$")
|
||||
ptn_mt = re.compile(r"^\.?[a-z_-]+$")
|
||||
mt_ctr = 0
|
||||
mt_keycmp = "substr(up.w,1,16)"
|
||||
mt_keycmp2 = None
|
||||
@@ -242,6 +243,7 @@ class U2idx(object):
|
||||
self.active_cur = cur
|
||||
|
||||
sret = []
|
||||
fk = flags.get("fk")
|
||||
c = cur.execute(q, v)
|
||||
for hit in c:
|
||||
w, ts, sz, rd, fn, ip, at = hit
|
||||
@@ -252,7 +254,23 @@ class U2idx(object):
|
||||
if rd.startswith("//") or fn.startswith("//"):
|
||||
rd, fn = s3dec(rd, fn)
|
||||
|
||||
rp = "/".join([x for x in [vtop, rd, fn] if x])
|
||||
if not fk:
|
||||
suf = ""
|
||||
else:
|
||||
try:
|
||||
ap = absreal(os.path.join(ptop, rd, fn))
|
||||
inf = bos.stat(ap)
|
||||
except:
|
||||
continue
|
||||
|
||||
suf = (
|
||||
"?k="
|
||||
+ gen_filekey(
|
||||
self.args.fk_salt, ap, sz, 0 if ANYWIN else inf.st_ino
|
||||
)[:fk]
|
||||
)
|
||||
|
||||
rp = quotep("/".join([x for x in [vtop, rd, fn] if x])) + suf
|
||||
sret.append({"ts": int(ts), "sz": sz, "rp": rp, "w": w[:16]})
|
||||
|
||||
for hit in sret:
|
||||
@@ -275,9 +293,13 @@ class U2idx(object):
|
||||
# undupe hits from multiple metadata keys
|
||||
if len(ret) > 1:
|
||||
ret = [ret[0]] + [
|
||||
y for x, y in zip(ret[:-1], ret[1:]) if x["rp"] != y["rp"]
|
||||
y
|
||||
for x, y in zip(ret[:-1], ret[1:])
|
||||
if x["rp"].split("?")[0] != y["rp"].split("?")[0]
|
||||
]
|
||||
|
||||
ret.sort(key=itemgetter("rp"))
|
||||
|
||||
return ret, list(taglist.keys())
|
||||
|
||||
def terminator(self, identifier, done_flag):
|
||||
|
||||
@@ -27,7 +27,10 @@ from .util import (
|
||||
sanitize_fn,
|
||||
ren_open,
|
||||
atomic_move,
|
||||
quotep,
|
||||
vsplit,
|
||||
w8b64enc,
|
||||
w8b64dec,
|
||||
s3enc,
|
||||
s3dec,
|
||||
rmdirs,
|
||||
@@ -36,7 +39,7 @@ from .util import (
|
||||
min_ex,
|
||||
)
|
||||
from .bos import bos
|
||||
from .authsrv import AuthSrv
|
||||
from .authsrv import AuthSrv, LEELOO_DALLAS
|
||||
from .mtag import MTag, MParser
|
||||
|
||||
try:
|
||||
@@ -60,12 +63,14 @@ class Up2k(object):
|
||||
|
||||
# state
|
||||
self.mutex = threading.Lock()
|
||||
self.rescan_cond = threading.Condition()
|
||||
self.hashq = Queue()
|
||||
self.tagq = Queue()
|
||||
self.n_hashq = 0
|
||||
self.n_tagq = 0
|
||||
self.volstate = {}
|
||||
self.need_rescan = {}
|
||||
self.dupesched = {}
|
||||
self.registry = {}
|
||||
self.entags = {}
|
||||
self.flags = {}
|
||||
@@ -127,9 +132,11 @@ class Up2k(object):
|
||||
thr.start()
|
||||
|
||||
if self.mtag:
|
||||
thr = threading.Thread(target=self._tagger, name="up2k-tagger")
|
||||
thr.daemon = True
|
||||
thr.start()
|
||||
for n in range(max(1, self.args.mtag_mt)):
|
||||
name = "tagger-{}".format(n)
|
||||
thr = threading.Thread(target=self._tagger, name=name)
|
||||
thr.daemon = True
|
||||
thr.start()
|
||||
|
||||
thr = threading.Thread(target=self._run_all_mtp, name="up2k-mtp-init")
|
||||
thr.daemon = True
|
||||
@@ -176,36 +183,98 @@ class Up2k(object):
|
||||
return None
|
||||
|
||||
def _sched_rescan(self):
|
||||
maxage = self.args.re_maxage
|
||||
volage = {}
|
||||
cooldown = 0
|
||||
timeout = time.time() + 3
|
||||
while True:
|
||||
time.sleep(self.args.re_int)
|
||||
timeout = max(timeout, cooldown)
|
||||
wait = max(0.1, timeout + 0.1 - time.time())
|
||||
with self.rescan_cond:
|
||||
self.rescan_cond.wait(wait)
|
||||
|
||||
now = time.time()
|
||||
vpaths = list(sorted(self.asrv.vfs.all_vols.keys()))
|
||||
if now < cooldown:
|
||||
continue
|
||||
|
||||
timeout = now + 9001
|
||||
with self.mutex:
|
||||
if maxage:
|
||||
for vp in vpaths:
|
||||
if vp not in volage:
|
||||
volage[vp] = now
|
||||
for vp, vol in sorted(self.asrv.vfs.all_vols.items()):
|
||||
maxage = vol.flags.get("scan")
|
||||
if not maxage:
|
||||
continue
|
||||
|
||||
if now - volage[vp] >= maxage:
|
||||
self.need_rescan[vp] = 1
|
||||
if vp not in volage:
|
||||
volage[vp] = now
|
||||
|
||||
if not self.need_rescan:
|
||||
continue
|
||||
deadline = volage[vp] + maxage
|
||||
if deadline <= now:
|
||||
self.need_rescan[vp] = 1
|
||||
|
||||
timeout = min(timeout, deadline)
|
||||
|
||||
vols = list(sorted(self.need_rescan.keys()))
|
||||
self.need_rescan = {}
|
||||
|
||||
err = self.rescan(self.asrv.vfs.all_vols, vols)
|
||||
if err:
|
||||
for v in vols:
|
||||
self.need_rescan[v] = True
|
||||
if vols:
|
||||
cooldown = now + 10
|
||||
err = self.rescan(self.asrv.vfs.all_vols, vols)
|
||||
if err:
|
||||
for v in vols:
|
||||
self.need_rescan[v] = True
|
||||
|
||||
continue
|
||||
|
||||
for v in vols:
|
||||
volage[v] = now
|
||||
|
||||
if self.args.no_lifetime:
|
||||
continue
|
||||
|
||||
for v in vols:
|
||||
volage[v] = now
|
||||
for vp, vol in sorted(self.asrv.vfs.all_vols.items()):
|
||||
lifetime = vol.flags.get("lifetime")
|
||||
if not lifetime:
|
||||
continue
|
||||
|
||||
cur = self.cur.get(vol.realpath)
|
||||
if not cur:
|
||||
continue
|
||||
|
||||
lifetime = int(lifetime)
|
||||
timeout = min(timeout, now + lifetime)
|
||||
|
||||
nrm = 0
|
||||
deadline = time.time() - lifetime
|
||||
q = "select rd, fn from up where at > 0 and at < ? limit 100"
|
||||
while True:
|
||||
with self.mutex:
|
||||
hits = cur.execute(q, (deadline,)).fetchall()
|
||||
|
||||
if not hits:
|
||||
break
|
||||
|
||||
for rd, fn in hits:
|
||||
if rd.startswith("//") or fn.startswith("//"):
|
||||
rd, fn = s3dec(rd, fn)
|
||||
|
||||
fvp = "{}/{}".format(rd, fn).strip("/")
|
||||
if vp:
|
||||
fvp = "{}/{}".format(vp, fvp)
|
||||
|
||||
self._handle_rm(LEELOO_DALLAS, None, fvp)
|
||||
nrm += 1
|
||||
|
||||
if nrm:
|
||||
self.log("{} files graduated in {}".format(nrm, vp))
|
||||
|
||||
if timeout < 10:
|
||||
continue
|
||||
|
||||
q = "select at from up where at > 0 order by at limit 1"
|
||||
with self.mutex:
|
||||
hits = cur.execute(q).fetchone()
|
||||
|
||||
if hits:
|
||||
timeout = min(timeout, now + lifetime - (now - hits[0]))
|
||||
|
||||
def _vis_job_progress(self, job):
|
||||
perc = 100 - (len(job["need"]) * 100.0 / len(job["hash"]))
|
||||
@@ -427,7 +496,8 @@ class Up2k(object):
|
||||
def _build_file_index(self, vol, all_vols):
|
||||
do_vac = False
|
||||
top = vol.realpath
|
||||
nohash = "dhash" in vol.flags
|
||||
rei = vol.flags.get("noidx")
|
||||
reh = vol.flags.get("nohash")
|
||||
with self.mutex:
|
||||
cur, _ = self.register_vpath(top, vol.flags)
|
||||
|
||||
@@ -442,38 +512,55 @@ class Up2k(object):
|
||||
if WINDOWS:
|
||||
excl = [x.replace("/", "\\") for x in excl]
|
||||
|
||||
n_add = self._build_dir(dbw, top, set(excl), top, nohash, [])
|
||||
n_rm = self._drop_lost(dbw[0], top)
|
||||
n_add = n_rm = 0
|
||||
try:
|
||||
n_add = self._build_dir(dbw, top, set(excl), top, rei, reh, [])
|
||||
n_rm = self._drop_lost(dbw[0], top)
|
||||
except:
|
||||
m = "failed to index volume [{}]:\n{}"
|
||||
self.log(m.format(top, min_ex()), c=1)
|
||||
|
||||
if dbw[1]:
|
||||
self.log("commit {} new files".format(dbw[1]))
|
||||
dbw[0].connection.commit()
|
||||
|
||||
dbw[0].connection.commit()
|
||||
|
||||
return True, n_add or n_rm or do_vac
|
||||
|
||||
def _build_dir(self, dbw, top, excl, cdir, nohash, seen):
|
||||
def _build_dir(self, dbw, top, excl, cdir, rei, reh, seen):
|
||||
rcdir = absreal(cdir) # a bit expensive but worth
|
||||
if rcdir in seen:
|
||||
m = "bailing from symlink loop,\n prev: {}\n curr: {}\n from: {}"
|
||||
self.log(m.format(seen[-1], rcdir, cdir), 3)
|
||||
return 0
|
||||
|
||||
seen = seen + [cdir]
|
||||
seen = seen + [rcdir]
|
||||
self.pp.msg = "a{} {}".format(self.pp.n, cdir)
|
||||
histpath = self.asrv.vfs.histtab[top]
|
||||
ret = 0
|
||||
seen_files = {}
|
||||
g = statdir(self.log_func, not self.args.no_scandir, False, cdir)
|
||||
for iname, inf in sorted(g):
|
||||
abspath = os.path.join(cdir, iname)
|
||||
if rei and rei.search(abspath):
|
||||
continue
|
||||
|
||||
nohash = reh.search(abspath) if reh else False
|
||||
lmod = int(inf.st_mtime)
|
||||
sz = inf.st_size
|
||||
if stat.S_ISDIR(inf.st_mode):
|
||||
if abspath in excl or abspath == histpath:
|
||||
continue
|
||||
# self.log(" dir: {}".format(abspath))
|
||||
ret += self._build_dir(dbw, top, excl, abspath, nohash, seen)
|
||||
try:
|
||||
ret += self._build_dir(dbw, top, excl, abspath, rei, reh, seen)
|
||||
except:
|
||||
m = "failed to index subdir [{}]:\n{}"
|
||||
self.log(m.format(abspath, min_ex()), c=1)
|
||||
else:
|
||||
# self.log("file: {}".format(abspath))
|
||||
rp = abspath[len(top) + 1 :]
|
||||
seen_files[iname] = 1
|
||||
rp = abspath[len(top) :].lstrip("/")
|
||||
if WINDOWS:
|
||||
rp = rp.replace("\\", "/").strip("/")
|
||||
|
||||
@@ -531,34 +618,65 @@ class Up2k(object):
|
||||
dbw[0].connection.commit()
|
||||
dbw[1] = 0
|
||||
dbw[2] = time.time()
|
||||
|
||||
# drop missing files
|
||||
rd = cdir[len(top) + 1 :].strip("/")
|
||||
if WINDOWS:
|
||||
rd = rd.replace("\\", "/").strip("/")
|
||||
|
||||
q = "select fn from up where rd = ?"
|
||||
try:
|
||||
c = dbw[0].execute(q, (rd,))
|
||||
except:
|
||||
c = dbw[0].execute(q, ("//" + w8b64enc(rd),))
|
||||
|
||||
hits = [w8b64dec(x[2:]) if x.startswith("//") else x for (x,) in c]
|
||||
rm_files = [x for x in hits if x not in seen_files]
|
||||
n_rm = len(rm_files)
|
||||
for fn in rm_files:
|
||||
self.db_rm(dbw[0], rd, fn)
|
||||
|
||||
if n_rm:
|
||||
self.log("forgot {} deleted files".format(n_rm))
|
||||
|
||||
return ret
|
||||
|
||||
def _drop_lost(self, cur, top):
|
||||
rm = []
|
||||
n_rm = 0
|
||||
nchecked = 0
|
||||
nfiles = next(cur.execute("select count(w) from up"))[0]
|
||||
c = cur.execute("select rd, fn from up")
|
||||
for drd, dfn in c:
|
||||
# `_build_dir` did all the files, now do dirs
|
||||
ndirs = next(cur.execute("select count(distinct rd) from up"))[0]
|
||||
c = cur.execute("select distinct rd from up order by rd desc")
|
||||
for (drd,) in c:
|
||||
nchecked += 1
|
||||
if drd.startswith("//") or dfn.startswith("//"):
|
||||
drd, dfn = s3dec(drd, dfn)
|
||||
if drd.startswith("//"):
|
||||
rd = w8b64dec(drd[2:])
|
||||
else:
|
||||
rd = drd
|
||||
|
||||
abspath = os.path.join(top, drd, dfn)
|
||||
# almost zero overhead dw
|
||||
self.pp.msg = "b{} {}".format(nfiles - nchecked, abspath)
|
||||
abspath = os.path.join(top, rd)
|
||||
self.pp.msg = "b{} {}".format(ndirs - nchecked, abspath)
|
||||
try:
|
||||
if not bos.path.exists(abspath):
|
||||
rm.append([drd, dfn])
|
||||
except Exception as ex:
|
||||
self.log("stat-rm: {} @ [{}]".format(repr(ex), abspath))
|
||||
if os.path.isdir(abspath):
|
||||
continue
|
||||
except:
|
||||
pass
|
||||
|
||||
if rm:
|
||||
self.log("forgetting {} deleted files".format(len(rm)))
|
||||
for rd, fn in rm:
|
||||
# self.log("{} / {}".format(rd, fn))
|
||||
self.db_rm(cur, rd, fn)
|
||||
rm.append(drd)
|
||||
|
||||
return len(rm)
|
||||
if not rm:
|
||||
return 0
|
||||
|
||||
q = "select count(w) from up where rd = ?"
|
||||
for rd in rm:
|
||||
n_rm += next(cur.execute(q, (rd,)))[0]
|
||||
|
||||
self.log("forgetting {} deleted dirs, {} files".format(len(rm), n_rm))
|
||||
for rd in rm:
|
||||
cur.execute("delete from up where rd = ?", (rd,))
|
||||
|
||||
return n_rm
|
||||
|
||||
def _build_tags_index(self, vol):
|
||||
ptop = vol.realpath
|
||||
@@ -612,7 +730,7 @@ class Up2k(object):
|
||||
return n_add, n_rm, False
|
||||
|
||||
mpool = False
|
||||
if self.mtag.prefer_mt and not self.args.no_mtag_mt:
|
||||
if self.mtag.prefer_mt and self.args.mtag_mt > 1:
|
||||
mpool = self._start_mpool()
|
||||
|
||||
conn = sqlite3.connect(db_path, timeout=15)
|
||||
@@ -803,6 +921,7 @@ class Up2k(object):
|
||||
|
||||
cur.connection.commit()
|
||||
if n_done:
|
||||
self.log("mtp: scanned {} files in {}".format(n_done, ptop), c=6)
|
||||
cur.execute("vacuum")
|
||||
|
||||
wcur.close()
|
||||
@@ -844,9 +963,7 @@ class Up2k(object):
|
||||
def _start_mpool(self):
|
||||
# mp.pool.ThreadPool and concurrent.futures.ThreadPoolExecutor
|
||||
# both do crazy runahead so lets reinvent another wheel
|
||||
nw = os.cpu_count() if hasattr(os, "cpu_count") else 4
|
||||
if self.args.no_mtag_mt:
|
||||
nw = 1
|
||||
nw = max(1, self.args.mtag_mt)
|
||||
|
||||
if self.pending_tags is None:
|
||||
self.log("using {}x {}".format(nw, self.mtag.backend))
|
||||
@@ -904,7 +1021,15 @@ class Up2k(object):
|
||||
|
||||
def _tag_file(self, write_cur, entags, wark, abspath, tags=None):
|
||||
if tags is None:
|
||||
tags = self.mtag.get(abspath)
|
||||
try:
|
||||
tags = self.mtag.get(abspath)
|
||||
except Exception as ex:
|
||||
msg = "failed to read tags from {}:\n{}"
|
||||
self.log(msg.format(abspath, ex), c=3)
|
||||
return 0
|
||||
|
||||
if not bos.path.isfile(abspath):
|
||||
return 0
|
||||
|
||||
if entags:
|
||||
tags = {k: v for k, v in tags.items() if k in entags}
|
||||
@@ -1076,9 +1201,18 @@ class Up2k(object):
|
||||
if dp_dir.startswith("//") or dp_fn.startswith("//"):
|
||||
dp_dir, dp_fn = s3dec(dp_dir, dp_fn)
|
||||
|
||||
if job and (dp_dir != cj["prel"] or dp_fn != cj["name"]):
|
||||
continue
|
||||
|
||||
dp_abs = "/".join([cj["ptop"], dp_dir, dp_fn])
|
||||
# relying on path.exists to return false on broken symlinks
|
||||
if bos.path.exists(dp_abs):
|
||||
# relying on this to fail on broken symlinks
|
||||
try:
|
||||
sz = bos.path.getsize(dp_abs)
|
||||
except:
|
||||
sz = 0
|
||||
|
||||
if sz:
|
||||
# self.log("--- " + wark + " " + dp_abs + " found file", 4)
|
||||
job = {
|
||||
"name": dp_fn,
|
||||
"prel": dp_dir,
|
||||
@@ -1091,9 +1225,9 @@ class Up2k(object):
|
||||
"hash": [],
|
||||
"need": [],
|
||||
}
|
||||
break
|
||||
|
||||
if job and wark in reg:
|
||||
# self.log("pop " + wark + " " + job["name"] + " handle_json db", 4)
|
||||
del reg[wark]
|
||||
|
||||
if job or wark in reg:
|
||||
@@ -1121,11 +1255,20 @@ class Up2k(object):
|
||||
if job["need"]:
|
||||
self.log("unfinished:\n {0}\n {1}".format(src, dst))
|
||||
err = "partial upload exists at a different location; please resume uploading here instead:\n"
|
||||
err += "/" + vsrc + " "
|
||||
err += "/" + quotep(vsrc) + " "
|
||||
|
||||
dupe = [cj["prel"], cj["name"]]
|
||||
try:
|
||||
self.dupesched[src].append(dupe)
|
||||
except:
|
||||
self.dupesched[src] = [dupe]
|
||||
|
||||
raise Pebkac(400, err)
|
||||
|
||||
elif "nodupe" in self.flags[job["ptop"]]:
|
||||
self.log("dupe-reject:\n {0}\n {1}".format(src, dst))
|
||||
err = "upload rejected, file already exists:\n/" + vsrc + " "
|
||||
err = "upload rejected, file already exists:\n"
|
||||
err += "/" + quotep(vsrc) + " "
|
||||
raise Pebkac(400, err)
|
||||
else:
|
||||
# symlink to the client-provided name,
|
||||
@@ -1148,6 +1291,16 @@ class Up2k(object):
|
||||
cur.connection.commit()
|
||||
|
||||
if not job:
|
||||
vfs = self.asrv.vfs.all_vols[cj["vtop"]]
|
||||
if vfs.lim:
|
||||
ap1 = os.path.join(cj["ptop"], cj["prel"])
|
||||
ap2, cj["prel"] = vfs.lim.all(
|
||||
cj["addr"], cj["prel"], cj["size"], ap1
|
||||
)
|
||||
bos.makedirs(ap2)
|
||||
vfs.lim.nup(cj["addr"])
|
||||
vfs.lim.bup(cj["addr"], cj["size"])
|
||||
|
||||
job = {
|
||||
"wark": wark,
|
||||
"t0": now,
|
||||
@@ -1178,8 +1331,12 @@ class Up2k(object):
|
||||
|
||||
self._new_upload(job)
|
||||
|
||||
purl = "{}/{}".format(job["vtop"], job["prel"]).strip("/")
|
||||
purl = "/{}/".format(purl) if purl else "/"
|
||||
|
||||
return {
|
||||
"name": job["name"],
|
||||
"purl": purl,
|
||||
"size": job["size"],
|
||||
"lmod": job["lmod"],
|
||||
"hash": job["need"],
|
||||
@@ -1192,7 +1349,7 @@ class Up2k(object):
|
||||
|
||||
# TODO broker which avoid this race and
|
||||
# provides a new filename if taken (same as bup)
|
||||
suffix = ".{:.6f}-{}".format(ts, ip)
|
||||
suffix = "-{:.6f}-{}".format(ts, ip.replace(":", "."))
|
||||
with ren_open(fname, "wb", fdir=fdir, suffix=suffix) as f:
|
||||
return f["orz"][1]
|
||||
|
||||
@@ -1204,6 +1361,9 @@ class Up2k(object):
|
||||
return
|
||||
|
||||
try:
|
||||
if self.args.no_symlink:
|
||||
raise Exception("disabled in config")
|
||||
|
||||
lsrc = src
|
||||
ldst = dst
|
||||
fs1 = bos.stat(os.path.dirname(src)).st_dev
|
||||
@@ -1230,7 +1390,7 @@ class Up2k(object):
|
||||
hops = len(ndst[nc:]) - 1
|
||||
lsrc = "../" * hops + "/".join(lsrc)
|
||||
os.symlink(fsenc(lsrc), fsenc(ldst))
|
||||
except (AttributeError, OSError) as ex:
|
||||
except Exception as ex:
|
||||
self.log("cannot symlink; creating copy: " + repr(ex))
|
||||
shutil.copy2(fsenc(src), fsenc(dst))
|
||||
|
||||
@@ -1284,20 +1444,57 @@ class Up2k(object):
|
||||
# del self.registry[ptop][wark]
|
||||
return ret, dst
|
||||
|
||||
atomic_move(src, dst)
|
||||
|
||||
if ANYWIN:
|
||||
a = [dst, job["size"], (int(time.time()), int(job["lmod"]))]
|
||||
self.lastmod_q.put(a)
|
||||
|
||||
a = [job[x] for x in "ptop wark prel name lmod size addr".split()]
|
||||
a += [job.get("at") or time.time()]
|
||||
if self.idx_wark(*a):
|
||||
del self.registry[ptop][wark]
|
||||
# in-memory registry is reserved for unfinished uploads
|
||||
# windows cant rename open files
|
||||
if not ANYWIN or src == dst:
|
||||
self._finish_upload(ptop, wark)
|
||||
|
||||
return ret, dst
|
||||
|
||||
def finish_upload(self, ptop, wark):
|
||||
with self.mutex:
|
||||
self._finish_upload(ptop, wark)
|
||||
|
||||
def _finish_upload(self, ptop, wark):
|
||||
try:
|
||||
job = self.registry[ptop][wark]
|
||||
pdir = os.path.join(job["ptop"], job["prel"])
|
||||
src = os.path.join(pdir, job["tnam"])
|
||||
dst = os.path.join(pdir, job["name"])
|
||||
except Exception as ex:
|
||||
return "finish_upload, wark, " + repr(ex)
|
||||
|
||||
# self.log("--- " + wark + " " + dst + " finish_upload atomic " + dst, 4)
|
||||
atomic_move(src, dst)
|
||||
|
||||
if ANYWIN:
|
||||
a = [dst, job["size"], (int(time.time()), int(job["lmod"]))]
|
||||
self.lastmod_q.put(a)
|
||||
|
||||
a = [job[x] for x in "ptop wark prel name lmod size addr".split()]
|
||||
a += [job.get("at") or time.time()]
|
||||
if self.idx_wark(*a):
|
||||
# self.log("pop " + wark + " " + dst + " finish_upload idx_wark", 4)
|
||||
del self.registry[ptop][wark]
|
||||
# in-memory registry is reserved for unfinished uploads
|
||||
|
||||
dupes = self.dupesched.pop(dst, [])
|
||||
if not dupes:
|
||||
return
|
||||
|
||||
cur = self.cur.get(ptop)
|
||||
for rd, fn in dupes:
|
||||
d2 = os.path.join(ptop, rd, fn)
|
||||
if os.path.exists(d2):
|
||||
continue
|
||||
|
||||
self._symlink(dst, d2)
|
||||
if cur:
|
||||
self.db_rm(cur, rd, fn)
|
||||
self.db_add(cur, wark, rd, fn, *a[-4:])
|
||||
|
||||
if cur:
|
||||
cur.connection.commit()
|
||||
|
||||
def idx_wark(self, ptop, wark, rd, fn, lmod, sz, ip, at):
|
||||
cur = self.cur.get(ptop)
|
||||
if not cur:
|
||||
@@ -1352,15 +1549,17 @@ class Up2k(object):
|
||||
try:
|
||||
permsets = [[True, False, False, True]]
|
||||
vn, rem = self.asrv.vfs.get(vpath, uname, *permsets[0])
|
||||
vn, rem = vn.get_dbv(rem)
|
||||
unpost = False
|
||||
except:
|
||||
# unpost with missing permissions? try read+write and verify with db
|
||||
if not self.args.unpost:
|
||||
raise Pebkac(400, "the unpost feature was disabled by server config")
|
||||
raise Pebkac(400, "the unpost feature is disabled in server config")
|
||||
|
||||
unpost = True
|
||||
permsets = [[True, True]]
|
||||
vn, rem = self.asrv.vfs.get(vpath, uname, *permsets[0])
|
||||
vn, rem = vn.get_dbv(rem)
|
||||
_, _, _, _, dip, dat = self._find_from_vpath(vn.realpath, rem)
|
||||
|
||||
m = "you cannot delete this: "
|
||||
@@ -1379,7 +1578,11 @@ class Up2k(object):
|
||||
ptop = vn.realpath
|
||||
atop = vn.canonical(rem, False)
|
||||
adir, fn = os.path.split(atop)
|
||||
st = bos.lstat(atop)
|
||||
try:
|
||||
st = bos.lstat(atop)
|
||||
except:
|
||||
raise Pebkac(400, "file not found on disk (already deleted?)")
|
||||
|
||||
scandir = not self.args.no_scandir
|
||||
if stat.S_ISLNK(st.st_mode) or stat.S_ISREG(st.st_mode):
|
||||
dbv, vrem = self.asrv.vfs.get(vpath, uname, *permsets[0])
|
||||
@@ -1402,16 +1605,18 @@ class Up2k(object):
|
||||
self.log("rm {}\n {}".format(vpath, abspath))
|
||||
_ = dbv.get(volpath, uname, *permsets[0])
|
||||
with self.mutex:
|
||||
cur = None
|
||||
try:
|
||||
ptop = dbv.realpath
|
||||
cur, wark, _, _, _, _ = self._find_from_vpath(ptop, volpath)
|
||||
self._forget_file(ptop, volpath, cur, wark, True)
|
||||
finally:
|
||||
cur.connection.commit()
|
||||
if cur:
|
||||
cur.connection.commit()
|
||||
|
||||
bos.unlink(abspath)
|
||||
|
||||
rm = rmdirs(self.log_func, scandir, True, atop)
|
||||
rm = rmdirs(self.log_func, scandir, True, atop, 1)
|
||||
return n_files, rm[0], rm[1]
|
||||
|
||||
def handle_mv(self, uname, svp, dvp):
|
||||
@@ -1422,9 +1627,10 @@ class Up2k(object):
|
||||
if not srem:
|
||||
raise Pebkac(400, "mv: cannot move a mountpoint")
|
||||
|
||||
st = bos.stat(sabs)
|
||||
if stat.S_ISREG(st.st_mode):
|
||||
return self._mv_file(uname, svp, dvp)
|
||||
st = bos.lstat(sabs)
|
||||
if stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode):
|
||||
with self.mutex:
|
||||
return self._mv_file(uname, svp, dvp)
|
||||
|
||||
jail = svn.get_dbv(srem)[0]
|
||||
permsets = [[True, False, True]]
|
||||
@@ -1449,9 +1655,10 @@ class Up2k(object):
|
||||
raise Pebkac(500, "mv: bug at {}, top {}".format(svpf, svp))
|
||||
|
||||
dvpf = dvp + svpf[len(svp) :]
|
||||
self._mv_file(uname, svpf, dvpf)
|
||||
with self.mutex:
|
||||
self._mv_file(uname, svpf, dvpf)
|
||||
|
||||
rmdirs(self.log_func, scandir, True, sabs)
|
||||
rmdirs(self.log_func, scandir, True, sabs, 1)
|
||||
return "k"
|
||||
|
||||
def _mv_file(self, uname, svp, dvp):
|
||||
@@ -1465,6 +1672,14 @@ class Up2k(object):
|
||||
dabs = dvn.canonical(drem)
|
||||
drd, dfn = vsplit(drem)
|
||||
|
||||
n1 = svp.split("/")[-1]
|
||||
n2 = dvp.split("/")[-1]
|
||||
if n1.startswith(".") or n2.startswith("."):
|
||||
if self.args.no_dot_mv:
|
||||
raise Pebkac(400, "moving dotfiles is disabled in server config")
|
||||
elif self.args.no_dot_ren and n1 != n2:
|
||||
raise Pebkac(400, "renaming dotfiles is disabled in server config")
|
||||
|
||||
if bos.path.exists(dabs):
|
||||
raise Pebkac(400, "mv2: target file exists")
|
||||
|
||||
@@ -1480,6 +1695,9 @@ class Up2k(object):
|
||||
# folders are too scary, schedule rescan of both vols
|
||||
self.need_rescan[svn.vpath] = 1
|
||||
self.need_rescan[dvn.vpath] = 1
|
||||
with self.rescan_cond:
|
||||
self.rescan_cond.notify_all()
|
||||
|
||||
return "k"
|
||||
|
||||
c1, w, ftime, fsize, ip, at = self._find_from_vpath(svn.realpath, srem)
|
||||
@@ -1520,7 +1738,7 @@ class Up2k(object):
|
||||
def _find_from_vpath(self, ptop, vrem):
|
||||
cur = self.cur.get(ptop)
|
||||
if not cur:
|
||||
return None, None
|
||||
return [None] * 6
|
||||
|
||||
rd, fn = vsplit(vrem)
|
||||
q = "select w, mt, sz, ip, at from up where rd=? and fn=? limit 1"
|
||||
@@ -1541,13 +1759,15 @@ class Up2k(object):
|
||||
self.log("forgetting {}".format(vrem))
|
||||
if wark:
|
||||
self.log("found {} in db".format(wark))
|
||||
if self._relink(wark, ptop, vrem, None):
|
||||
drop_tags = False
|
||||
if drop_tags:
|
||||
if self._relink(wark, ptop, vrem, None):
|
||||
drop_tags = False
|
||||
|
||||
if drop_tags:
|
||||
q = "delete from mt where w=?"
|
||||
cur.execute(q, (wark[:16],))
|
||||
self.db_rm(cur, srd, sfn)
|
||||
|
||||
self.db_rm(cur, srd, sfn)
|
||||
|
||||
reg = self.registry.get(ptop)
|
||||
if reg:
|
||||
@@ -1555,7 +1775,7 @@ class Up2k(object):
|
||||
wark = [
|
||||
x
|
||||
for x, y in reg.items()
|
||||
if fn in [y["name"], y.get("tnam")] and y["prel"] == vrem
|
||||
if sfn in [y["name"], y.get("tnam")] and y["prel"] == vrem
|
||||
]
|
||||
|
||||
if wark and wark in reg:
|
||||
@@ -1599,7 +1819,7 @@ class Up2k(object):
|
||||
# deleting final remaining full copy; swap it with a symlink
|
||||
slabs = list(sorted(links.keys()))[0]
|
||||
ptop, rem = links.pop(slabs)
|
||||
self.log("linkswap [{}] and [{}]".format(sabs, dabs))
|
||||
self.log("linkswap [{}] and [{}]".format(sabs, slabs))
|
||||
bos.unlink(slabs)
|
||||
bos.rename(sabs, slabs)
|
||||
self._symlink(slabs, sabs, False)
|
||||
@@ -1638,7 +1858,13 @@ class Up2k(object):
|
||||
except:
|
||||
cj["lmod"] = int(time.time())
|
||||
|
||||
wark = up2k_wark_from_hashlist(self.salt, cj["size"], cj["hash"])
|
||||
if cj["hash"]:
|
||||
wark = up2k_wark_from_hashlist(self.salt, cj["size"], cj["hash"])
|
||||
else:
|
||||
wark = up2k_wark_from_metadata(
|
||||
self.salt, cj["size"], cj["lmod"], cj["prel"], cj["name"]
|
||||
)
|
||||
|
||||
return wark
|
||||
|
||||
def _hashlist_from_file(self, path):
|
||||
@@ -1681,9 +1907,12 @@ class Up2k(object):
|
||||
|
||||
if self.args.nw:
|
||||
job["tnam"] = tnam
|
||||
if not job["hash"]:
|
||||
del self.registry[job["ptop"]][job["wark"]]
|
||||
return
|
||||
|
||||
suffix = ".{:.6f}-{}".format(job["t0"], job["addr"])
|
||||
dip = job["addr"].replace(":", ".")
|
||||
suffix = "-{:.6f}-{}".format(job["t0"], dip)
|
||||
with ren_open(tnam, "wb", fdir=pdir, suffix=suffix) as f:
|
||||
f, job["tnam"] = f["orz"]
|
||||
if (
|
||||
@@ -1697,8 +1926,12 @@ class Up2k(object):
|
||||
except:
|
||||
self.log("could not sparse [{}]".format(fp), 3)
|
||||
|
||||
f.seek(job["size"] - 1)
|
||||
f.write(b"e")
|
||||
if job["hash"]:
|
||||
f.seek(job["size"] - 1)
|
||||
f.write(b"e")
|
||||
|
||||
if not job["hash"]:
|
||||
self._finish_upload(job["ptop"], job["wark"])
|
||||
|
||||
def _lastmodder(self):
|
||||
while True:
|
||||
@@ -1796,11 +2029,16 @@ class Up2k(object):
|
||||
|
||||
# self.log("\n " + repr([ptop, rd, fn]))
|
||||
abspath = os.path.join(ptop, rd, fn)
|
||||
tags = self.mtag.get(abspath)
|
||||
ntags1 = len(tags)
|
||||
parsers = self._get_parsers(ptop, tags, abspath)
|
||||
if parsers:
|
||||
tags.update(self.mtag.get_bin(parsers, abspath))
|
||||
try:
|
||||
tags = self.mtag.get(abspath)
|
||||
ntags1 = len(tags)
|
||||
parsers = self._get_parsers(ptop, tags, abspath)
|
||||
if parsers:
|
||||
tags.update(self.mtag.get_bin(parsers, abspath))
|
||||
except Exception as ex:
|
||||
msg = "failed to read tags from {}:\n{}"
|
||||
self.log(msg.format(abspath, ex), c=3)
|
||||
continue
|
||||
|
||||
with self.mutex:
|
||||
cur = self.cur[ptop]
|
||||
|
||||
@@ -19,7 +19,7 @@ import subprocess as sp # nosec
|
||||
from datetime import datetime
|
||||
from collections import Counter
|
||||
|
||||
from .__init__ import PY2, WINDOWS, ANYWIN
|
||||
from .__init__ import PY2, WINDOWS, ANYWIN, VT100, unicode
|
||||
from .stolen import surrogateescape
|
||||
|
||||
FAKE_MP = False
|
||||
@@ -58,6 +58,9 @@ except:
|
||||
return struct.unpack(f.decode("ascii"), *a, **ka)
|
||||
|
||||
|
||||
ansi_re = re.compile("\033\\[[^mK]*[mK]")
|
||||
|
||||
|
||||
surrogateescape.register_surrogateescape()
|
||||
FS_ENCODING = sys.getfilesystemencoding()
|
||||
if WINDOWS and PY2:
|
||||
@@ -77,6 +80,7 @@ HTTPCODE = {
|
||||
403: "Forbidden",
|
||||
404: "Not Found",
|
||||
405: "Method Not Allowed",
|
||||
411: "Length Required",
|
||||
413: "Payload Too Large",
|
||||
416: "Requested Range Not Satisfiable",
|
||||
422: "Unprocessable Entity",
|
||||
@@ -165,7 +169,7 @@ class Cooldown(object):
|
||||
return ret
|
||||
|
||||
|
||||
class Unrecv(object):
|
||||
class _Unrecv(object):
|
||||
"""
|
||||
undo any number of socket recv ops
|
||||
"""
|
||||
@@ -185,10 +189,117 @@ class Unrecv(object):
|
||||
except:
|
||||
return b""
|
||||
|
||||
def recv_ex(self, nbytes):
|
||||
"""read an exact number of bytes"""
|
||||
ret = self.recv(nbytes)
|
||||
while ret and len(ret) < nbytes:
|
||||
buf = self.recv(nbytes - len(ret))
|
||||
if not buf:
|
||||
break
|
||||
|
||||
ret += buf
|
||||
|
||||
return ret
|
||||
|
||||
def unrecv(self, buf):
|
||||
self.buf = buf + self.buf
|
||||
|
||||
|
||||
class _LUnrecv(object):
|
||||
"""
|
||||
with expensive debug logging
|
||||
"""
|
||||
|
||||
def __init__(self, s):
|
||||
self.s = s
|
||||
self.buf = b""
|
||||
|
||||
def recv(self, nbytes):
|
||||
if self.buf:
|
||||
ret = self.buf[:nbytes]
|
||||
self.buf = self.buf[nbytes:]
|
||||
m = "\033[0;7mur:pop:\033[0;1;32m {}\n\033[0;7mur:rem:\033[0;1;35m {}\033[0m\n"
|
||||
print(m.format(ret, self.buf), end="")
|
||||
return ret
|
||||
|
||||
try:
|
||||
ret = self.s.recv(nbytes)
|
||||
m = "\033[0;7mur:recv\033[0;1;33m {}\033[0m\n"
|
||||
print(m.format(ret), end="")
|
||||
return ret
|
||||
except:
|
||||
return b""
|
||||
|
||||
def recv_ex(self, nbytes):
|
||||
"""read an exact number of bytes"""
|
||||
ret = self.recv(nbytes)
|
||||
while ret and len(ret) < nbytes:
|
||||
buf = self.recv(nbytes - len(ret))
|
||||
if not buf:
|
||||
break
|
||||
|
||||
ret += buf
|
||||
|
||||
return ret
|
||||
|
||||
def unrecv(self, buf):
|
||||
self.buf = buf + self.buf
|
||||
m = "\033[0;7mur:push\033[0;1;31m {}\n\033[0;7mur:rem:\033[0;1;35m {}\033[0m\n"
|
||||
print(m.format(buf, self.buf), end="")
|
||||
|
||||
|
||||
Unrecv = _Unrecv
|
||||
|
||||
|
||||
class FHC(object):
|
||||
class CE(object):
|
||||
def __init__(self, fh):
|
||||
self.ts = 0
|
||||
self.fhs = [fh]
|
||||
|
||||
def __init__(self):
|
||||
self.cache = {}
|
||||
|
||||
def close(self, path):
|
||||
try:
|
||||
ce = self.cache[path]
|
||||
except:
|
||||
return
|
||||
|
||||
for fh in ce.fhs:
|
||||
fh.close()
|
||||
|
||||
del self.cache[path]
|
||||
|
||||
def clean(self):
|
||||
if not self.cache:
|
||||
return
|
||||
|
||||
keep = {}
|
||||
now = time.time()
|
||||
for path, ce in self.cache.items():
|
||||
if now < ce.ts + 5:
|
||||
keep[path] = ce
|
||||
else:
|
||||
for fh in ce.fhs:
|
||||
fh.close()
|
||||
|
||||
self.cache = keep
|
||||
|
||||
def pop(self, path):
|
||||
return self.cache[path].fhs.pop()
|
||||
|
||||
def put(self, path, fh):
|
||||
try:
|
||||
ce = self.cache[path]
|
||||
ce.fhs.append(fh)
|
||||
except:
|
||||
ce = self.CE(fh)
|
||||
self.cache[path] = ce
|
||||
|
||||
ce.ts = time.time()
|
||||
|
||||
|
||||
class ProgressPrinter(threading.Thread):
|
||||
"""
|
||||
periodically print progress info without linefeeds
|
||||
@@ -203,17 +314,22 @@ class ProgressPrinter(threading.Thread):
|
||||
|
||||
def run(self):
|
||||
msg = None
|
||||
fmt = " {}\033[K\r" if VT100 else " {} $\r"
|
||||
while not self.end:
|
||||
time.sleep(0.1)
|
||||
if msg == self.msg or self.end:
|
||||
continue
|
||||
|
||||
msg = self.msg
|
||||
uprint(" {}\033[K\r".format(msg))
|
||||
uprint(fmt.format(msg))
|
||||
if PY2:
|
||||
sys.stdout.flush()
|
||||
|
||||
print("\033[K", end="")
|
||||
if VT100:
|
||||
print("\033[K", end="")
|
||||
elif msg:
|
||||
print("------------------------")
|
||||
|
||||
sys.stdout.flush() # necessary on win10 even w/ stderr btw
|
||||
|
||||
|
||||
@@ -308,7 +424,7 @@ def stackmon(fp, ival, suffix):
|
||||
|
||||
|
||||
def start_log_thrs(logger, ival, nid):
|
||||
ival = int(ival)
|
||||
ival = float(ival)
|
||||
tname = lname = "log-thrs"
|
||||
if nid:
|
||||
tname = "logthr-n{}-i{:x}".format(nid, os.getpid())
|
||||
@@ -329,7 +445,7 @@ def log_thrs(log, ival, name):
|
||||
tv = [x.name for x in threading.enumerate()]
|
||||
tv = [
|
||||
x.split("-")[0]
|
||||
if x.startswith("httpconn-") or x.startswith("thumb-")
|
||||
if x.split("-")[0] in ["httpconn", "thumb", "tagger"]
|
||||
else "listen"
|
||||
if "-listen-" in x
|
||||
else x
|
||||
@@ -340,6 +456,17 @@ def log_thrs(log, ival, name):
|
||||
log(name, "\033[0m \033[33m".join(tv), 3)
|
||||
|
||||
|
||||
def vol_san(vols, txt):
|
||||
for vol in vols:
|
||||
txt = txt.replace(vol.realpath.encode("utf-8"), vol.vpath.encode("utf-8"))
|
||||
txt = txt.replace(
|
||||
vol.realpath.encode("utf-8").replace(b"\\", b"\\\\"),
|
||||
vol.vpath.encode("utf-8"),
|
||||
)
|
||||
|
||||
return txt
|
||||
|
||||
|
||||
def min_ex():
|
||||
et, ev, tb = sys.exc_info()
|
||||
tb = traceback.extract_tb(tb)
|
||||
@@ -351,11 +478,12 @@ def min_ex():
|
||||
|
||||
@contextlib.contextmanager
|
||||
def ren_open(fname, *args, **kwargs):
|
||||
fun = kwargs.pop("fun", open)
|
||||
fdir = kwargs.pop("fdir", None)
|
||||
suffix = kwargs.pop("suffix", None)
|
||||
|
||||
if fname == os.devnull:
|
||||
with open(fname, *args, **kwargs) as f:
|
||||
with fun(fname, *args, **kwargs) as f:
|
||||
yield {"orz": [f, fname]}
|
||||
return
|
||||
|
||||
@@ -389,7 +517,7 @@ def ren_open(fname, *args, **kwargs):
|
||||
fname += suffix
|
||||
ext += suffix
|
||||
|
||||
with open(fsenc(fpath), *args, **kwargs) as f:
|
||||
with fun(fsenc(fpath), *args, **kwargs) as f:
|
||||
if b64:
|
||||
fp2 = "fn-trunc.{}.txt".format(b64)
|
||||
fp2 = os.path.join(fdir, fp2)
|
||||
@@ -434,8 +562,8 @@ class MultipartParser(object):
|
||||
self.log = log_func
|
||||
self.headers = http_headers
|
||||
|
||||
self.re_ctype = re.compile(r"^content-type: *([^;]+)", re.IGNORECASE)
|
||||
self.re_cdisp = re.compile(r"^content-disposition: *([^;]+)", re.IGNORECASE)
|
||||
self.re_ctype = re.compile(r"^content-type: *([^; ]+)", re.IGNORECASE)
|
||||
self.re_cdisp = re.compile(r"^content-disposition: *([^; ]+)", re.IGNORECASE)
|
||||
self.re_cdisp_field = re.compile(
|
||||
r'^content-disposition:(?: *|.*; *)name="([^"]+)"', re.IGNORECASE
|
||||
)
|
||||
@@ -571,19 +699,21 @@ class MultipartParser(object):
|
||||
yields [fieldname, unsanitized_filename, fieldvalue]
|
||||
where fieldvalue yields chunks of data
|
||||
"""
|
||||
while True:
|
||||
run = True
|
||||
while run:
|
||||
fieldname, filename = self._read_header()
|
||||
yield [fieldname, filename, self._read_data()]
|
||||
|
||||
tail = self.sr.recv(2)
|
||||
tail = self.sr.recv_ex(2)
|
||||
|
||||
if tail == b"--":
|
||||
# EOF indicated by this immediately after final boundary
|
||||
self.sr.recv(2)
|
||||
return
|
||||
tail = self.sr.recv_ex(2)
|
||||
run = False
|
||||
|
||||
if tail != b"\r\n":
|
||||
raise Pebkac(400, "protocol error after field value")
|
||||
m = "protocol error after field value: want b'\\r\\n', got {!r}"
|
||||
raise Pebkac(400, m.format(tail))
|
||||
|
||||
def _read_value(self, iterator, max_len):
|
||||
ret = b""
|
||||
@@ -632,7 +762,7 @@ class MultipartParser(object):
|
||||
def get_boundary(headers):
|
||||
# boundaries contain a-z A-Z 0-9 ' ( ) + _ , - . / : = ?
|
||||
# (whitespace allowed except as the last char)
|
||||
ptn = r"^multipart/form-data; *(.*; *)?boundary=([^;]+)"
|
||||
ptn = r"^multipart/form-data *; *(.*; *)?boundary=([^;]+)"
|
||||
ct = headers["content-type"]
|
||||
m = re.match(ptn, ct, re.IGNORECASE)
|
||||
if not m:
|
||||
@@ -669,6 +799,14 @@ def read_header(sr):
|
||||
return ret[:ofs].decode("utf-8", "surrogateescape").lstrip("\r\n").split("\r\n")
|
||||
|
||||
|
||||
def gen_filekey(salt, fspath, fsize, inode):
|
||||
return base64.urlsafe_b64encode(
|
||||
hashlib.sha512(
|
||||
"{} {} {} {}".format(salt, fspath, fsize, inode).encode("utf-8", "replace")
|
||||
).digest()
|
||||
).decode("ascii")
|
||||
|
||||
|
||||
def humansize(sz, terse=False):
|
||||
for unit in ["B", "KiB", "MiB", "GiB", "TiB"]:
|
||||
if sz < 1024:
|
||||
@@ -684,6 +822,17 @@ def humansize(sz, terse=False):
|
||||
return ret.replace("iB", "").replace(" ", "")
|
||||
|
||||
|
||||
def unhumanize(sz):
|
||||
try:
|
||||
return float(sz)
|
||||
except:
|
||||
pass
|
||||
|
||||
mul = sz[-1:].lower()
|
||||
mul = {"k": 1024, "m": 1024 * 1024, "g": 1024 * 1024 * 1024}.get(mul, 1)
|
||||
return float(sz[:-1]) * mul
|
||||
|
||||
|
||||
def get_spd(nbyte, t0, t=None):
|
||||
if t is None:
|
||||
t = time.time()
|
||||
@@ -958,8 +1107,12 @@ def read_socket_chunked(sr, log=None):
|
||||
raise Pebkac(400, err)
|
||||
|
||||
if chunklen == 0:
|
||||
sr.recv(2) # \r\n after final chunk
|
||||
return
|
||||
x = sr.recv_ex(2)
|
||||
if x == b"\r\n":
|
||||
return
|
||||
|
||||
m = "protocol error after final chunk: want b'\\r\\n', got {!r}"
|
||||
raise Pebkac(400, m.format(x))
|
||||
|
||||
if log:
|
||||
log("receiving {} byte chunk".format(chunklen))
|
||||
@@ -967,7 +1120,10 @@ def read_socket_chunked(sr, log=None):
|
||||
for chunk in read_socket(sr, chunklen):
|
||||
yield chunk
|
||||
|
||||
sr.recv(2) # \r\n after each chunk too
|
||||
x = sr.recv_ex(2)
|
||||
if x != b"\r\n":
|
||||
m = "protocol error in chunk separator: want b'\\r\\n', got {!r}"
|
||||
raise Pebkac(400, m.format(x))
|
||||
|
||||
|
||||
def yieldfile(fn):
|
||||
@@ -1035,6 +1191,9 @@ def sendfile_kern(lower, upper, f, s):
|
||||
|
||||
|
||||
def statdir(logger, scandir, lstat, top):
|
||||
if lstat and ANYWIN:
|
||||
lstat = False
|
||||
|
||||
if lstat and not os.supports_follow_symlinks:
|
||||
scandir = False
|
||||
|
||||
@@ -1062,25 +1221,27 @@ def statdir(logger, scandir, lstat, top):
|
||||
logger(src, "{} @ {}".format(repr(ex), top), 1)
|
||||
|
||||
|
||||
def rmdirs(logger, scandir, lstat, top):
|
||||
def rmdirs(logger, scandir, lstat, top, depth):
|
||||
if not os.path.exists(fsenc(top)) or not os.path.isdir(fsenc(top)):
|
||||
top = os.path.dirname(top)
|
||||
|
||||
depth -= 1
|
||||
|
||||
dirs = statdir(logger, scandir, lstat, top)
|
||||
dirs = [x[0] for x in dirs if stat.S_ISDIR(x[1].st_mode)]
|
||||
dirs = [os.path.join(top, x) for x in dirs]
|
||||
ok = []
|
||||
ng = []
|
||||
for d in dirs[::-1]:
|
||||
a, b = rmdirs(logger, scandir, lstat, d)
|
||||
a, b = rmdirs(logger, scandir, lstat, d, depth + 1)
|
||||
ok += a
|
||||
ng += b
|
||||
|
||||
try:
|
||||
os.rmdir(fsenc(top))
|
||||
ok.append(top)
|
||||
except:
|
||||
ng.append(top)
|
||||
if depth:
|
||||
try:
|
||||
os.rmdir(fsenc(top))
|
||||
ok.append(top)
|
||||
except:
|
||||
ng.append(top)
|
||||
|
||||
return ok, ng
|
||||
|
||||
|
||||
@@ -22,7 +22,7 @@ window.baguetteBox = (function () {
|
||||
afterHide: null,
|
||||
onChange: null,
|
||||
},
|
||||
overlay, slider, btnPrev, btnNext, btnHelp, btnVmode, btnClose,
|
||||
overlay, slider, btnPrev, btnNext, btnHelp, btnRotL, btnRotR, btnSel, btnVmode, btnClose,
|
||||
currentGallery = [],
|
||||
currentIndex = 0,
|
||||
isOverlayVisible = false,
|
||||
@@ -49,7 +49,7 @@ window.baguetteBox = (function () {
|
||||
};
|
||||
|
||||
var touchstartHandler = function (e) {
|
||||
touch.count++;
|
||||
touch.count = e.touches.length;
|
||||
if (touch.count > 1)
|
||||
touch.multitouch = true;
|
||||
|
||||
@@ -72,8 +72,11 @@ window.baguetteBox = (function () {
|
||||
hideOverlay();
|
||||
}
|
||||
};
|
||||
var touchendHandler = function () {
|
||||
var touchendHandler = function (e) {
|
||||
touch.count--;
|
||||
if (e && e.touches)
|
||||
touch.count = e.touches.length;
|
||||
|
||||
if (touch.count <= 0)
|
||||
touch.multitouch = false;
|
||||
|
||||
@@ -175,6 +178,9 @@ window.baguetteBox = (function () {
|
||||
'<button id="bbox-next" class="bbox-btn" type="button" aria-label="Next">></button>' +
|
||||
'<div id="bbox-btns">' +
|
||||
'<button id="bbox-help" type="button">?</button>' +
|
||||
'<button id="bbox-rotl" type="button">↶</button>' +
|
||||
'<button id="bbox-rotr" type="button">↷</button>' +
|
||||
'<button id="bbox-tsel" type="button">sel</button>' +
|
||||
'<button id="bbox-vmode" type="button" tt="a"></button>' +
|
||||
'<button id="bbox-close" type="button" aria-label="Close">X</button>' +
|
||||
'</div></div>'
|
||||
@@ -187,6 +193,9 @@ window.baguetteBox = (function () {
|
||||
btnPrev = ebi('bbox-prev');
|
||||
btnNext = ebi('bbox-next');
|
||||
btnHelp = ebi('bbox-help');
|
||||
btnRotL = ebi('bbox-rotl');
|
||||
btnRotR = ebi('bbox-rotr');
|
||||
btnSel = ebi('bbox-tsel');
|
||||
btnVmode = ebi('bbox-vmode');
|
||||
btnClose = ebi('bbox-close');
|
||||
bindEvents();
|
||||
@@ -203,11 +212,13 @@ window.baguetteBox = (function () {
|
||||
['right, L', 'next file'],
|
||||
['home', 'first file'],
|
||||
['end', 'last file'],
|
||||
['R', 'rotate (shift=ccw)'],
|
||||
['S', 'toggle file selection'],
|
||||
['space, P, K', 'video: play / pause'],
|
||||
['U', 'video: seek 10sec back'],
|
||||
['P', 'video: seek 10sec ahead'],
|
||||
['M', 'video: toggle mute'],
|
||||
['R', 'video: toggle loop'],
|
||||
['V', 'video: toggle loop'],
|
||||
['C', 'video: toggle auto-next'],
|
||||
['F', 'video: toggle fullscreen'],
|
||||
],
|
||||
@@ -249,7 +260,7 @@ window.baguetteBox = (function () {
|
||||
v.muted = vmute = !vmute;
|
||||
mp_ctl();
|
||||
}
|
||||
else if (k == "KeyR" && v) {
|
||||
else if (k == "KeyV" && v) {
|
||||
vloop = !vloop;
|
||||
vnext = vnext && !vloop;
|
||||
setVmode();
|
||||
@@ -267,6 +278,10 @@ window.baguetteBox = (function () {
|
||||
v.requestFullscreen();
|
||||
}
|
||||
catch (ex) { }
|
||||
else if (k == "KeyS")
|
||||
tglsel();
|
||||
else if (k == "KeyR")
|
||||
rotn(e.shiftKey ? -1 : 1);
|
||||
}
|
||||
|
||||
function setVmode() {
|
||||
@@ -279,7 +294,7 @@ window.baguetteBox = (function () {
|
||||
if (vloop) {
|
||||
lbl = 'Loop';
|
||||
msg += 'repeat it';
|
||||
tts = '$NHotkey: R';
|
||||
tts = '$NHotkey: V';
|
||||
}
|
||||
else if (vnext) {
|
||||
lbl = 'Cont';
|
||||
@@ -314,6 +329,40 @@ window.baguetteBox = (function () {
|
||||
tt.show.bind(this)();
|
||||
}
|
||||
|
||||
function tglsel() {
|
||||
var thumb = currentGallery[currentIndex].imageElement,
|
||||
name = vsplit(thumb.href)[1],
|
||||
files = msel.getall();
|
||||
|
||||
for (var a = 0; a < files.length; a++)
|
||||
if (vsplit(files[a].vp)[1] == name)
|
||||
clmod(ebi(files[a].id).closest('tr'), 'sel', 't');
|
||||
|
||||
msel.selui();
|
||||
selbg();
|
||||
}
|
||||
|
||||
function selbg() {
|
||||
var img = vidimg(),
|
||||
thumb = currentGallery[currentIndex].imageElement,
|
||||
name = vsplit(thumb.href)[1],
|
||||
files = msel.getsel(),
|
||||
sel = false;
|
||||
|
||||
for (var a = 0; a < files.length; a++)
|
||||
if (vsplit(files[a].vp)[1] == name)
|
||||
sel = true;
|
||||
|
||||
ebi('bbox-overlay').style.background = sel ?
|
||||
'rgba(153,34,85,0.7)' : '';
|
||||
|
||||
img.style.borderRadius = sel ? '1em' : '';
|
||||
btnSel.style.color = sel ? '#fff' : '';
|
||||
btnSel.style.background = sel ? '#d48' : '';
|
||||
btnSel.style.textShadow = sel ? '1px 1px 0 #b38' : '';
|
||||
btnSel.style.boxShadow = sel ? '.15em .15em 0 #502' : '';
|
||||
}
|
||||
|
||||
function keyUpHandler(e) {
|
||||
if (e.ctrlKey || e.altKey || e.metaKey || e.isComposing)
|
||||
return;
|
||||
@@ -348,6 +397,9 @@ window.baguetteBox = (function () {
|
||||
bind(btnClose, 'click', hideOverlay);
|
||||
bind(btnVmode, 'click', tglVmode);
|
||||
bind(btnHelp, 'click', halp);
|
||||
bind(btnRotL, 'click', rotl);
|
||||
bind(btnRotR, 'click', rotr);
|
||||
bind(btnSel, 'click', tglsel);
|
||||
bind(slider, 'contextmenu', contextmenuHandler);
|
||||
bind(overlay, 'touchstart', touchstartHandler, nonPassiveEvent);
|
||||
bind(overlay, 'touchmove', touchmoveHandler, passiveEvent);
|
||||
@@ -362,11 +414,15 @@ window.baguetteBox = (function () {
|
||||
unbind(btnClose, 'click', hideOverlay);
|
||||
unbind(btnVmode, 'click', tglVmode);
|
||||
unbind(btnHelp, 'click', halp);
|
||||
unbind(btnRotL, 'click', rotl);
|
||||
unbind(btnRotR, 'click', rotr);
|
||||
unbind(btnSel, 'click', tglsel);
|
||||
unbind(slider, 'contextmenu', contextmenuHandler);
|
||||
unbind(overlay, 'touchstart', touchstartHandler, nonPassiveEvent);
|
||||
unbind(overlay, 'touchmove', touchmoveHandler, passiveEvent);
|
||||
unbind(overlay, 'touchend', touchendHandler);
|
||||
unbind(document, 'focus', trapFocusInsideOverlay, true);
|
||||
timer.rm(rotn);
|
||||
}
|
||||
|
||||
function prepareOverlay(gallery, userOptions) {
|
||||
@@ -617,10 +673,91 @@ window.baguetteBox = (function () {
|
||||
return true;
|
||||
}
|
||||
|
||||
var prev_cw = 0, prev_ch = 0, unrot_timer = null;
|
||||
function rotn(n) {
|
||||
var el = vidimg(),
|
||||
orot = parseInt(el.getAttribute('rot') || 0),
|
||||
frot = orot + (n || 0) * 90;
|
||||
|
||||
if (!frot && !orot)
|
||||
return; // reflow noop
|
||||
|
||||
var co = ebi('bbox-overlay'),
|
||||
cw = co.clientWidth,
|
||||
ch = co.clientHeight;
|
||||
|
||||
if (!n && prev_cw === cw && prev_ch === ch)
|
||||
return; // reflow noop
|
||||
|
||||
prev_cw = cw;
|
||||
prev_ch = ch;
|
||||
var rot = frot,
|
||||
iw = el.naturalWidth || el.videoWidth,
|
||||
ih = el.naturalHeight || el.videoHeight,
|
||||
magic = 4, // idk, works in enough browsers
|
||||
dl = el.closest('div').querySelector('figcaption a'),
|
||||
vw = cw,
|
||||
vh = ch - dl.offsetHeight + magic,
|
||||
pmag = Math.min(1, Math.min(vw / ih, vh / iw)),
|
||||
wmag = Math.min(1, Math.min(vw / iw, vh / ih));
|
||||
|
||||
while (rot < 0) rot += 360;
|
||||
while (rot >= 360) rot -= 360;
|
||||
var q = rot == 90 || rot == 270 ? 1 : 0,
|
||||
mag = q ? pmag : wmag;
|
||||
|
||||
el.style.cssText = 'max-width:none; max-height:none; position:absolute; display:block; margin:0';
|
||||
if (!orot) {
|
||||
el.style.width = iw * wmag + 'px';
|
||||
el.style.height = ih * wmag + 'px';
|
||||
el.style.left = (vw - iw * wmag) / 2 + 'px';
|
||||
el.style.top = (vh - ih * wmag) / 2 - magic + 'px';
|
||||
q = el.offsetHeight;
|
||||
}
|
||||
el.style.width = iw * mag + 'px';
|
||||
el.style.height = ih * mag + 'px';
|
||||
el.style.left = (vw - iw * mag) / 2 + 'px';
|
||||
el.style.top = (vh - ih * mag) / 2 - magic + 'px';
|
||||
el.style.transform = 'rotate(' + frot + 'deg)';
|
||||
el.setAttribute('rot', frot);
|
||||
timer.add(rotn);
|
||||
if (!rot) {
|
||||
clearTimeout(unrot_timer);
|
||||
unrot_timer = setTimeout(unrot, 300);
|
||||
}
|
||||
}
|
||||
function rotl() {
|
||||
rotn(-1);
|
||||
}
|
||||
function rotr() {
|
||||
rotn(1);
|
||||
}
|
||||
function unrot() {
|
||||
var el = vidimg(),
|
||||
orot = el.getAttribute('rot'),
|
||||
rot = parseInt(orot || 0);
|
||||
|
||||
while (rot < 0) rot += 360;
|
||||
while (rot >= 360) rot -= 360;
|
||||
if (rot || orot === null)
|
||||
return;
|
||||
|
||||
clmod(el, 'nt', 1);
|
||||
el.removeAttribute('rot');
|
||||
el.removeAttribute("style");
|
||||
rot = el.offsetHeight;
|
||||
clmod(el, 'nt');
|
||||
timer.rm(rotn);
|
||||
}
|
||||
|
||||
function vid() {
|
||||
return imagesElements[currentIndex].querySelector('video');
|
||||
}
|
||||
|
||||
function vidimg() {
|
||||
return imagesElements[currentIndex].querySelector('img, video');
|
||||
}
|
||||
|
||||
function playvid(play) {
|
||||
if (vid())
|
||||
vid()[play ? 'play' : 'pause']();
|
||||
@@ -662,15 +799,21 @@ window.baguetteBox = (function () {
|
||||
}
|
||||
|
||||
function updateOffset() {
|
||||
var offset = -currentIndex * 100 + '%';
|
||||
var offset = -currentIndex * 100 + '%',
|
||||
xform = slider.style.perspective !== undefined;
|
||||
|
||||
if (options.animation === 'fadeIn') {
|
||||
slider.style.opacity = 0;
|
||||
setTimeout(function () {
|
||||
slider.style.transform = 'translate3d(' + offset + ',0,0)';
|
||||
xform ?
|
||||
slider.style.transform = 'translate3d(' + offset + ',0,0)' :
|
||||
slider.style.left = offset;
|
||||
slider.style.opacity = 1;
|
||||
}, 400);
|
||||
} else {
|
||||
slider.style.transform = 'translate3d(' + offset + ',0,0)';
|
||||
xform ?
|
||||
slider.style.transform = 'translate3d(' + offset + ',0,0)' :
|
||||
slider.style.left = offset;
|
||||
}
|
||||
playvid(false);
|
||||
var v = vid();
|
||||
@@ -679,8 +822,21 @@ window.baguetteBox = (function () {
|
||||
v.muted = vmute;
|
||||
v.loop = vloop;
|
||||
}
|
||||
selbg();
|
||||
mp_ctl();
|
||||
setVmode();
|
||||
|
||||
var el = vidimg();
|
||||
if (el.getAttribute('rot'))
|
||||
timer.add(rotn);
|
||||
else
|
||||
timer.rm(rotn);
|
||||
|
||||
var prev = QS('.full-image.vis');
|
||||
if (prev)
|
||||
clmod(prev, 'vis');
|
||||
|
||||
clmod(el.closest('div'), 'vis', 1);
|
||||
}
|
||||
|
||||
function preloadNext(index) {
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -6,10 +6,10 @@
|
||||
<title>⇆🎉 {{ title }}</title>
|
||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=0.8">
|
||||
<link rel="stylesheet" type="text/css" media="screen" href="/.cpr/browser.css?_={{ ts }}">
|
||||
<link rel="stylesheet" type="text/css" media="screen" href="/.cpr/upload.css?_={{ ts }}">
|
||||
<link rel="stylesheet" media="screen" href="/.cpr/ui.css?_={{ ts }}">
|
||||
<link rel="stylesheet" media="screen" href="/.cpr/browser.css?_={{ ts }}">
|
||||
{%- if css %}
|
||||
<link rel="stylesheet" type="text/css" media="screen" href="{{ css }}?_={{ ts }}">
|
||||
<link rel="stylesheet" media="screen" href="{{ css }}?_={{ ts }}">
|
||||
{%- endif %}
|
||||
</head>
|
||||
|
||||
@@ -18,9 +18,9 @@
|
||||
|
||||
<div id="op_search" class="opview">
|
||||
{%- if have_tags_idx %}
|
||||
<div id="srch_form" class="tags"></div>
|
||||
<div id="srch_form" class="tags opbox"></div>
|
||||
{%- else %}
|
||||
<div id="srch_form"></div>
|
||||
<div id="srch_form" class="opbox"></div>
|
||||
{%- endif %}
|
||||
<div id="srch_q"></div>
|
||||
</div>
|
||||
@@ -31,7 +31,7 @@
|
||||
<div id="u2err"></div>
|
||||
<form method="post" enctype="multipart/form-data" accept-charset="utf-8" action="{{ url_suf }}">
|
||||
<input type="hidden" name="act" value="bput" />
|
||||
<input type="file" name="f" multiple><br />
|
||||
<input type="file" name="f" multiple /><br />
|
||||
<input type="submit" value="start upload">
|
||||
</form>
|
||||
</div>
|
||||
@@ -39,7 +39,7 @@
|
||||
<div id="op_mkdir" class="opview opbox act">
|
||||
<form method="post" enctype="multipart/form-data" accept-charset="utf-8" action="{{ url_suf }}">
|
||||
<input type="hidden" name="act" value="mkdir" />
|
||||
📂<input type="text" name="name" size="30">
|
||||
📂<input type="text" name="name" class="i">
|
||||
<input type="submit" value="make directory">
|
||||
</form>
|
||||
</div>
|
||||
@@ -47,15 +47,15 @@
|
||||
<div id="op_new_md" class="opview opbox">
|
||||
<form method="post" enctype="multipart/form-data" accept-charset="utf-8" action="{{ url_suf }}">
|
||||
<input type="hidden" name="act" value="new_md" />
|
||||
📝<input type="text" name="name" size="30">
|
||||
📝<input type="text" name="name" class="i">
|
||||
<input type="submit" value="new markdown doc">
|
||||
</form>
|
||||
</div>
|
||||
|
||||
<div id="op_msg" class="opview opbox act">
|
||||
<form method="post" enctype="application/x-www-form-urlencoded" accept-charset="utf-8" action="{{ url_suf }}">
|
||||
📟<input type="text" name="msg" size="30">
|
||||
<input type="submit" value="send msg to server log">
|
||||
📟<input type="text" name="msg" class="i">
|
||||
<input type="submit" value="send msg to srv log">
|
||||
</form>
|
||||
</div>
|
||||
|
||||
@@ -113,6 +113,8 @@
|
||||
<div id="epi" class="logue">{{ logues[1] }}</div>
|
||||
|
||||
<h2><a href="/?h">control-panel</a></h2>
|
||||
|
||||
<a href="#" id="repl">π</a>
|
||||
|
||||
</div>
|
||||
|
||||
@@ -125,17 +127,23 @@
|
||||
<script>
|
||||
var acct = "{{ acct }}",
|
||||
perms = {{ perms }},
|
||||
tag_order_cfg = {{ tag_order }},
|
||||
def_hcols = {{ def_hcols|tojson }},
|
||||
have_up2k_idx = {{ have_up2k_idx|tojson }},
|
||||
have_tags_idx = {{ have_tags_idx|tojson }},
|
||||
have_mv = {{ have_mv|tojson }},
|
||||
have_del = {{ have_del|tojson }},
|
||||
have_unpost = {{ have_unpost|tojson }},
|
||||
have_zip = {{ have_zip|tojson }};
|
||||
have_zip = {{ have_zip|tojson }},
|
||||
readme = {{ readme|tojson }};
|
||||
|
||||
document.documentElement.setAttribute("class", localStorage.lightmode == 1 ? "light" : "dark");
|
||||
</script>
|
||||
<script src="/.cpr/util.js?_={{ ts }}"></script>
|
||||
<script src="/.cpr/browser.js?_={{ ts }}"></script>
|
||||
<script src="/.cpr/up2k.js?_={{ ts }}"></script>
|
||||
{%- if js %}
|
||||
<script src="{{ js }}?_={{ ts }}"></script>
|
||||
{%- endif %}
|
||||
</body>
|
||||
|
||||
</html>
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,147 +1,17 @@
|
||||
@font-face {
|
||||
font-family: 'scp';
|
||||
src: local('Source Code Pro Regular'), local('SourceCodePro-Regular'), url(/.cpr/deps/scp.woff2) format('woff2');
|
||||
}
|
||||
html, body {
|
||||
color: #333;
|
||||
background: #eee;
|
||||
font-family: sans-serif;
|
||||
line-height: 1.5em;
|
||||
}
|
||||
|
||||
|
||||
|
||||
#tt, #toast {
|
||||
position: fixed;
|
||||
max-width: 34em;
|
||||
background: #222;
|
||||
border: 0 solid #777;
|
||||
box-shadow: 0 .2em .5em #222;
|
||||
border-radius: .4em;
|
||||
z-index: 9001;
|
||||
}
|
||||
#tt {
|
||||
overflow: hidden;
|
||||
margin-top: 1em;
|
||||
padding: 0 1.3em;
|
||||
height: 0;
|
||||
opacity: .1;
|
||||
transition: opacity 0.14s, height 0.14s, padding 0.14s;
|
||||
}
|
||||
#toast {
|
||||
top: 1.4em;
|
||||
right: -1em;
|
||||
line-height: 1.5em;
|
||||
padding: 1em 1.3em;
|
||||
border-width: .4em 0;
|
||||
transform: translateX(100%);
|
||||
transition:
|
||||
transform .4s cubic-bezier(.2, 1.2, .5, 1),
|
||||
right .4s cubic-bezier(.2, 1.2, .5, 1);
|
||||
text-shadow: 1px 1px 0 #000;
|
||||
color: #fff;
|
||||
}
|
||||
#toast pre {
|
||||
margin: 0;
|
||||
}
|
||||
#toastc {
|
||||
display: inline-block;
|
||||
#repl {
|
||||
position: absolute;
|
||||
overflow: hidden;
|
||||
left: 0;
|
||||
width: 0;
|
||||
opacity: 0;
|
||||
padding: .3em 0;
|
||||
margin: -.3em 0 0 0;
|
||||
line-height: 1.5em;
|
||||
color: #000;
|
||||
top: 0;
|
||||
right: .5em;
|
||||
border: none;
|
||||
outline: none;
|
||||
text-shadow: none;
|
||||
border-radius: .5em 0 0 .5em;
|
||||
transition: left .3s, width .3s, padding .3s, opacity .3s;
|
||||
color: inherit;
|
||||
background: none;
|
||||
}
|
||||
#toast.vis {
|
||||
right: 1.3em;
|
||||
transform: unset;
|
||||
}
|
||||
#toast.vis #toastc {
|
||||
left: -2em;
|
||||
width: .4em;
|
||||
padding: .3em .8em;
|
||||
opacity: 1;
|
||||
}
|
||||
#toast.inf {
|
||||
background: #07a;
|
||||
border-color: #0be;
|
||||
}
|
||||
#toast.inf #toastc {
|
||||
background: #0be;
|
||||
}
|
||||
#toast.ok {
|
||||
background: #4a0;
|
||||
border-color: #8e4;
|
||||
}
|
||||
#toast.ok #toastc {
|
||||
background: #8e4;
|
||||
}
|
||||
#toast.warn {
|
||||
background: #970;
|
||||
border-color: #fc0;
|
||||
}
|
||||
#toast.warn #toastc {
|
||||
background: #fc0;
|
||||
}
|
||||
#toast.err {
|
||||
background: #900;
|
||||
border-color: #d06;
|
||||
}
|
||||
#toast.err #toastc {
|
||||
background: #d06;
|
||||
}
|
||||
#tt.b {
|
||||
padding: 0 2em;
|
||||
border-radius: .5em;
|
||||
box-shadow: 0 .2em 1em #000;
|
||||
}
|
||||
#tt.show {
|
||||
padding: 1em 1.3em;
|
||||
border-width: .4em 0;
|
||||
height: auto;
|
||||
opacity: 1;
|
||||
}
|
||||
#tt.show.b {
|
||||
padding: 1.5em 2em;
|
||||
border-width: .5em 0;
|
||||
}
|
||||
#tt code {
|
||||
background: #3c3c3c;
|
||||
padding: .1em .3em;
|
||||
border-top: 1px solid #777;
|
||||
border-radius: .3em;
|
||||
line-height: 1.7em;
|
||||
}
|
||||
#tt em {
|
||||
color: #f6a;
|
||||
}
|
||||
html.light #tt {
|
||||
background: #fff;
|
||||
border-color: #888 #000 #777 #000;
|
||||
}
|
||||
html.light #tt,
|
||||
html.light #toast {
|
||||
box-shadow: 0 .3em 1em rgba(0,0,0,0.4);
|
||||
}
|
||||
html.light #tt code {
|
||||
background: #060;
|
||||
color: #fff;
|
||||
}
|
||||
html.light #tt em {
|
||||
color: #d38;
|
||||
}
|
||||
|
||||
|
||||
|
||||
#mtw {
|
||||
display: none;
|
||||
}
|
||||
@@ -149,122 +19,12 @@ html.light #tt em {
|
||||
margin: 0 auto;
|
||||
padding: 0 1.5em;
|
||||
}
|
||||
pre, code, a {
|
||||
color: #480;
|
||||
background: #f7f7f7;
|
||||
border: .07em solid #ddd;
|
||||
border-radius: .2em;
|
||||
padding: .1em .3em;
|
||||
margin: 0 .1em;
|
||||
#toast {
|
||||
bottom: auto;
|
||||
top: 1.4em;
|
||||
}
|
||||
code {
|
||||
font-size: .96em;
|
||||
}
|
||||
pre, code, tt {
|
||||
font-family: 'scp', monospace, monospace;
|
||||
white-space: pre-wrap;
|
||||
word-break: break-all;
|
||||
}
|
||||
pre {
|
||||
counter-reset: precode;
|
||||
}
|
||||
pre code {
|
||||
counter-increment: precode;
|
||||
display: inline-block;
|
||||
margin: 0 -.3em;
|
||||
padding: .4em .5em;
|
||||
border: none;
|
||||
border-bottom: 1px solid #cdc;
|
||||
min-width: calc(100% - .6em);
|
||||
line-height: 1.1em;
|
||||
}
|
||||
pre code:last-child {
|
||||
border-bottom: none;
|
||||
}
|
||||
pre code::before {
|
||||
content: counter(precode);
|
||||
-webkit-user-select: none;
|
||||
-moz-user-select: none;
|
||||
-ms-user-select: none;
|
||||
user-select: none;
|
||||
display: inline-block;
|
||||
text-align: right;
|
||||
font-size: .75em;
|
||||
color: #48a;
|
||||
width: 4em;
|
||||
padding-right: 1.5em;
|
||||
margin-left: -5.5em;
|
||||
}
|
||||
pre code:hover {
|
||||
background: #fec;
|
||||
color: #360;
|
||||
}
|
||||
h1, h2 {
|
||||
line-height: 1.5em;
|
||||
}
|
||||
h1 {
|
||||
font-size: 1.7em;
|
||||
text-align: center;
|
||||
border: 1em solid #777;
|
||||
border-width: .05em 0;
|
||||
margin: 3em 0;
|
||||
}
|
||||
h2 {
|
||||
font-size: 1.5em;
|
||||
font-weight: normal;
|
||||
background: #f7f7f7;
|
||||
border-top: .07em solid #fff;
|
||||
border-bottom: .07em solid #bbb;
|
||||
border-radius: .5em .5em 0 0;
|
||||
padding-left: .4em;
|
||||
margin-top: 3em;
|
||||
}
|
||||
h3 {
|
||||
border-bottom: .1em solid #999;
|
||||
}
|
||||
h1 a, h3 a, h5 a,
|
||||
h2 a, h4 a, h6 a {
|
||||
color: inherit;
|
||||
display: block;
|
||||
background: none;
|
||||
border: none;
|
||||
padding: 0;
|
||||
margin: 0;
|
||||
}
|
||||
#mp ul,
|
||||
#mp ol {
|
||||
border-left: .3em solid #ddd;
|
||||
}
|
||||
#m>ul,
|
||||
#m>ol {
|
||||
border-color: #bbb;
|
||||
}
|
||||
#mp ul>li {
|
||||
list-style-type: disc;
|
||||
}
|
||||
#mp ul>li,
|
||||
#mp ol>li {
|
||||
margin: .7em 0;
|
||||
}
|
||||
strong {
|
||||
color: #000;
|
||||
}
|
||||
p>em,
|
||||
li>em,
|
||||
td>em {
|
||||
color: #c50;
|
||||
padding: .1em;
|
||||
border-bottom: .1em solid #bbb;
|
||||
}
|
||||
blockquote {
|
||||
font-family: serif;
|
||||
background: #f7f7f7;
|
||||
border: .07em dashed #ccc;
|
||||
padding: 0 2em;
|
||||
margin: 1em 0;
|
||||
}
|
||||
small {
|
||||
opacity: .8;
|
||||
a {
|
||||
text-decoration: none;
|
||||
}
|
||||
#toc {
|
||||
margin: 0 1em;
|
||||
@@ -312,14 +72,6 @@ small {
|
||||
color: #6b3;
|
||||
text-shadow: .02em 0 0 #6b3;
|
||||
}
|
||||
table {
|
||||
border-collapse: collapse;
|
||||
margin: 1em 0;
|
||||
}
|
||||
th, td {
|
||||
padding: .2em .5em;
|
||||
border: .12em solid #aaa;
|
||||
}
|
||||
blink {
|
||||
animation: blinker .7s cubic-bezier(.9, 0, .1, 1) infinite;
|
||||
}
|
||||
@@ -332,6 +84,36 @@ blink {
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
.mdo pre {
|
||||
counter-reset: precode;
|
||||
}
|
||||
.mdo pre code {
|
||||
counter-increment: precode;
|
||||
display: inline-block;
|
||||
border: none;
|
||||
border-bottom: 1px solid #cdc;
|
||||
min-width: calc(100% - .6em);
|
||||
}
|
||||
.mdo pre code:last-child {
|
||||
border-bottom: none;
|
||||
}
|
||||
.mdo pre code::before {
|
||||
content: counter(precode);
|
||||
-webkit-user-select: none;
|
||||
-moz-user-select: none;
|
||||
-ms-user-select: none;
|
||||
user-select: none;
|
||||
display: inline-block;
|
||||
text-align: right;
|
||||
font-size: .75em;
|
||||
color: #48a;
|
||||
width: 4em;
|
||||
padding-right: 1.5em;
|
||||
margin-left: -5.5em;
|
||||
}
|
||||
|
||||
|
||||
@media screen {
|
||||
html, body {
|
||||
margin: 0;
|
||||
@@ -348,34 +130,6 @@ blink {
|
||||
#mp {
|
||||
max-width: 52em;
|
||||
margin-bottom: 6em;
|
||||
word-break: break-word;
|
||||
overflow-wrap: break-word;
|
||||
word-wrap: break-word; /*ie*/
|
||||
}
|
||||
a {
|
||||
color: #fff;
|
||||
background: #39b;
|
||||
text-decoration: none;
|
||||
padding: 0 .3em;
|
||||
border: none;
|
||||
border-bottom: .07em solid #079;
|
||||
}
|
||||
h2 {
|
||||
color: #fff;
|
||||
background: #555;
|
||||
margin-top: 2em;
|
||||
border-bottom: .22em solid #999;
|
||||
border-top: none;
|
||||
}
|
||||
h1 {
|
||||
color: #fff;
|
||||
background: #444;
|
||||
font-weight: normal;
|
||||
border-top: .4em solid #fb0;
|
||||
border-bottom: .4em solid #777;
|
||||
border-radius: 0 1em 0 1em;
|
||||
margin: 3em 0 1em 0;
|
||||
padding: .5em 0;
|
||||
}
|
||||
#mn {
|
||||
padding: 1.3em 0 .7em 1em;
|
||||
@@ -428,6 +182,8 @@ blink {
|
||||
color: #444;
|
||||
background: none;
|
||||
text-decoration: underline;
|
||||
margin: 0 .1em;
|
||||
padding: 0 .3em;
|
||||
border: none;
|
||||
}
|
||||
#mh a:hover {
|
||||
@@ -456,6 +212,10 @@ blink {
|
||||
#toolsbox a+a {
|
||||
text-decoration: none;
|
||||
}
|
||||
#lno {
|
||||
position: absolute;
|
||||
right: 0;
|
||||
}
|
||||
|
||||
|
||||
|
||||
@@ -476,55 +236,6 @@ blink {
|
||||
html.dark #toc li {
|
||||
border-width: 0;
|
||||
}
|
||||
html.dark #mp a {
|
||||
background: #057;
|
||||
}
|
||||
html.dark #mp h1 a, html.dark #mp h4 a,
|
||||
html.dark #mp h2 a, html.dark #mp h5 a,
|
||||
html.dark #mp h3 a, html.dark #mp h6 a {
|
||||
color: inherit;
|
||||
background: none;
|
||||
}
|
||||
html.dark pre,
|
||||
html.dark code {
|
||||
color: #8c0;
|
||||
background: #1a1a1a;
|
||||
border: .07em solid #333;
|
||||
}
|
||||
html.dark #mp ul,
|
||||
html.dark #mp ol {
|
||||
border-color: #444;
|
||||
}
|
||||
html.dark #m>ul,
|
||||
html.dark #m>ol {
|
||||
border-color: #555;
|
||||
}
|
||||
html.dark strong {
|
||||
color: #fff;
|
||||
}
|
||||
html.dark p>em,
|
||||
html.dark li>em,
|
||||
html.dark td>em {
|
||||
color: #f94;
|
||||
border-color: #666;
|
||||
}
|
||||
html.dark h1 {
|
||||
background: #383838;
|
||||
border-top: .4em solid #b80;
|
||||
border-bottom: .4em solid #4c4c4c;
|
||||
}
|
||||
html.dark h2 {
|
||||
background: #444;
|
||||
border-bottom: .22em solid #555;
|
||||
}
|
||||
html.dark td,
|
||||
html.dark th {
|
||||
border-color: #444;
|
||||
}
|
||||
html.dark blockquote {
|
||||
background: #282828;
|
||||
border: .07em dashed #444;
|
||||
}
|
||||
html.dark #mn a:not(:last-child)::after {
|
||||
border-color: rgba(255,255,255,0.3);
|
||||
}
|
||||
@@ -630,12 +341,15 @@ blink {
|
||||
mso-footer-margin: .6in;
|
||||
mso-paper-source: 0;
|
||||
}
|
||||
a {
|
||||
.mdo a {
|
||||
color: #079;
|
||||
text-decoration: none;
|
||||
border-bottom: .07em solid #4ac;
|
||||
padding: 0 .3em;
|
||||
}
|
||||
#repl {
|
||||
display: none;
|
||||
}
|
||||
#toc>ul {
|
||||
border-left: .1em solid #84c4dd;
|
||||
}
|
||||
@@ -660,18 +374,20 @@ blink {
|
||||
a[ctr]::before {
|
||||
content: attr(ctr) '. ';
|
||||
}
|
||||
h1 {
|
||||
.mdo h1 {
|
||||
margin: 2em 0;
|
||||
}
|
||||
h2 {
|
||||
.mdo h2 {
|
||||
margin: 2em 0 0 0;
|
||||
}
|
||||
h1, h2, h3 {
|
||||
.mdo h1,
|
||||
.mdo h2,
|
||||
.mdo h3 {
|
||||
page-break-inside: avoid;
|
||||
}
|
||||
h1::after,
|
||||
h2::after,
|
||||
h3::after {
|
||||
.mdo h1::after,
|
||||
.mdo h2::after,
|
||||
.mdo h3::after {
|
||||
content: 'orz';
|
||||
color: transparent;
|
||||
display: block;
|
||||
@@ -679,20 +395,20 @@ blink {
|
||||
padding: 4em 0 0 0;
|
||||
margin: 0 0 -5em 0;
|
||||
}
|
||||
p {
|
||||
.mdo p {
|
||||
page-break-inside: avoid;
|
||||
}
|
||||
table {
|
||||
.mdo table {
|
||||
page-break-inside: auto;
|
||||
}
|
||||
tr {
|
||||
.mdo tr {
|
||||
page-break-inside: avoid;
|
||||
page-break-after: auto;
|
||||
}
|
||||
thead {
|
||||
.mdo thead {
|
||||
display: table-header-group;
|
||||
}
|
||||
tfoot {
|
||||
.mdo tfoot {
|
||||
display: table-footer-group;
|
||||
}
|
||||
#mp a.vis::after {
|
||||
@@ -700,31 +416,32 @@ blink {
|
||||
border-bottom: 1px solid #bbb;
|
||||
color: #444;
|
||||
}
|
||||
blockquote {
|
||||
.mdo blockquote {
|
||||
border-color: #555;
|
||||
}
|
||||
code {
|
||||
.mdo code {
|
||||
border-color: #bbb;
|
||||
}
|
||||
pre, pre code {
|
||||
.mdo pre,
|
||||
.mdo pre code {
|
||||
border-color: #999;
|
||||
}
|
||||
pre code::before {
|
||||
.mdo pre code::before {
|
||||
color: #058;
|
||||
}
|
||||
|
||||
|
||||
|
||||
html.dark a {
|
||||
html.dark .mdo a {
|
||||
color: #000;
|
||||
}
|
||||
html.dark pre,
|
||||
html.dark code {
|
||||
html.dark .mdo pre,
|
||||
html.dark .mdo code {
|
||||
color: #240;
|
||||
}
|
||||
html.dark p>em,
|
||||
html.dark li>em,
|
||||
html.dark td>em {
|
||||
html.dark .mdo p>em,
|
||||
html.dark .mdo li>em,
|
||||
html.dark .mdo td>em {
|
||||
color: #940;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,11 +1,12 @@
|
||||
<!DOCTYPE html><html><head>
|
||||
<meta charset="utf-8">
|
||||
<title>📝🎉 {{ title }}</title> <!-- 📜 -->
|
||||
<title>📝🎉 {{ title }}</title>
|
||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=0.7">
|
||||
<link href="/.cpr/md.css?_={{ ts }}" rel="stylesheet">
|
||||
<link rel="stylesheet" href="/.cpr/ui.css?_={{ ts }}">
|
||||
<link rel="stylesheet" href="/.cpr/md.css?_={{ ts }}">
|
||||
{%- if edit %}
|
||||
<link href="/.cpr/md2.css?_={{ ts }}" rel="stylesheet">
|
||||
<link rel="stylesheet" href="/.cpr/md2.css?_={{ ts }}">
|
||||
{%- endif %}
|
||||
</head>
|
||||
<body>
|
||||
@@ -14,7 +15,7 @@
|
||||
<a id="lightswitch" href="#">go dark</a>
|
||||
<a id="navtoggle" href="#">hide nav</a>
|
||||
{%- if edit %}
|
||||
<a id="save" href="?edit" tt="Hotkey: ctrl-s">save</a>
|
||||
<a id="save" href="{{ arg_base }}edit" tt="Hotkey: ctrl-s">save</a>
|
||||
<a id="sbs" href="#" tt="editor and preview side by side">sbs</a>
|
||||
<a id="nsbs" href="#" tt="switch between editor and preview$NHotkey: ctrl-e">editor</a>
|
||||
<div id="toolsbox">
|
||||
@@ -25,10 +26,11 @@
|
||||
<a id="cfg_uni" href="#">non-ascii: whitelist</a>
|
||||
<a id="help" href="#">help</a>
|
||||
</div>
|
||||
<span id="lno">L#</span>
|
||||
{%- else %}
|
||||
<a href="?edit" tt="good: higher performance$Ngood: same document width as viewer$Nbad: assumes you know markdown">edit (basic)</a>
|
||||
<a href="?edit2" tt="not in-house so probably less buggy">edit (fancy)</a>
|
||||
<a href="?raw">view raw</a>
|
||||
<a href="{{ arg_base }}edit" tt="good: higher performance$Ngood: same document width as viewer$Nbad: assumes you know markdown">edit (basic)</a>
|
||||
<a href="{{ arg_base }}edit2" tt="not in-house so probably less buggy">edit (fancy)</a>
|
||||
<a href="{{ arg_base }}raw">view raw</a>
|
||||
{%- endif %}
|
||||
</div>
|
||||
<div id="toc"></div>
|
||||
@@ -42,8 +44,9 @@
|
||||
if you're still reading this, check that javascript is allowed
|
||||
</div>
|
||||
</div>
|
||||
<div id="mp"></div>
|
||||
<div id="mp" class="mdo"></div>
|
||||
</div>
|
||||
<a href="#" id="repl">π</a>
|
||||
|
||||
{%- if edit %}
|
||||
<div id="helpbox">
|
||||
@@ -132,13 +135,13 @@ var md_opt = {
|
||||
|
||||
(function () {
|
||||
var l = localStorage,
|
||||
drk = l.getItem('lightmode') != 1,
|
||||
drk = l.lightmode != 1,
|
||||
btn = document.getElementById("lightswitch"),
|
||||
f = function (e) {
|
||||
if (e) { e.preventDefault(); drk = !drk; }
|
||||
document.documentElement.setAttribute("class", drk? "dark":"light");
|
||||
btn.innerHTML = "go " + (drk ? "light":"dark");
|
||||
l.setItem('lightmode', drk? 0:1);
|
||||
l.lightmode = drk? 0:1;
|
||||
};
|
||||
|
||||
btn.onclick = f;
|
||||
|
||||
@@ -24,23 +24,6 @@ var dbg = function () { };
|
||||
var md_plug = {};
|
||||
|
||||
|
||||
function hesc(txt) {
|
||||
return txt.replace(/&/g, "&").replace(/</g, "<").replace(/>/g, ">");
|
||||
}
|
||||
|
||||
|
||||
function cls(dom, name, add) {
|
||||
var re = new RegExp('(^| )' + name + '( |$)');
|
||||
var lst = (dom.getAttribute('class') + '').replace(re, "$1$2").replace(/ /, "");
|
||||
dom.setAttribute('class', lst + (add ? ' ' + name : ''));
|
||||
}
|
||||
|
||||
|
||||
function statify(obj) {
|
||||
return JSON.parse(JSON.stringify(obj));
|
||||
}
|
||||
|
||||
|
||||
// dodge browser issues
|
||||
(function () {
|
||||
var ua = navigator.userAgent;
|
||||
@@ -65,7 +48,7 @@ function statify(obj) {
|
||||
if (a > 0)
|
||||
loc.push(n[a]);
|
||||
|
||||
var dec = hesc(uricom_dec(n[a])[0]);
|
||||
var dec = esc(uricom_dec(n[a])[0]);
|
||||
|
||||
nav.push('<a href="/' + loc.join('/') + '">' + dec + '</a>');
|
||||
}
|
||||
@@ -73,6 +56,26 @@ function statify(obj) {
|
||||
})();
|
||||
|
||||
|
||||
// image load handler
|
||||
var img_load = (function () {
|
||||
var r = {};
|
||||
r.callbacks = [];
|
||||
|
||||
function fire() {
|
||||
for (var a = 0; a < r.callbacks.length; a++)
|
||||
r.callbacks[a]();
|
||||
}
|
||||
|
||||
var timeout = null;
|
||||
r.done = function () {
|
||||
clearTimeout(timeout);
|
||||
timeout = setTimeout(fire, 500);
|
||||
};
|
||||
|
||||
return r;
|
||||
})();
|
||||
|
||||
|
||||
// faster than replacing the entire html (chrome 1.8x, firefox 1.6x)
|
||||
function copydom(src, dst, lv) {
|
||||
var sc = src.childNodes,
|
||||
@@ -185,7 +188,7 @@ function md_plug_err(ex, js) {
|
||||
errbox.style.cssText = 'position:absolute;top:0;left:0;padding:1em .5em;background:#2b2b2b;color:#fc5'
|
||||
errbox.textContent = msg;
|
||||
errbox.onclick = function () {
|
||||
alert('' + ex.stack);
|
||||
modal.alert('<pre>' + esc(ex.stack) + '</pre>');
|
||||
};
|
||||
if (o) {
|
||||
errbox.appendChild(o);
|
||||
@@ -264,7 +267,14 @@ function convert_markdown(md_text, dest_dom) {
|
||||
|
||||
throw ex;
|
||||
}
|
||||
var md_dom = new DOMParser().parseFromString(md_html, "text/html").body;
|
||||
var md_dom = dest_dom;
|
||||
try {
|
||||
md_dom = new DOMParser().parseFromString(md_html, "text/html").body;
|
||||
}
|
||||
catch (ex) {
|
||||
md_dom.innerHTML = md_html;
|
||||
window.copydom = noop;
|
||||
}
|
||||
|
||||
var nodes = md_dom.getElementsByTagName('a');
|
||||
for (var a = nodes.length - 1; a >= 0; a--) {
|
||||
@@ -356,6 +366,10 @@ function convert_markdown(md_text, dest_dom) {
|
||||
|
||||
copydom(md_dom, dest_dom, 0);
|
||||
|
||||
var imgs = dest_dom.getElementsByTagName('img');
|
||||
for (var a = 0, aa = imgs.length; a < aa; a++)
|
||||
imgs[a].onload = img_load.done;
|
||||
|
||||
if (ext && ext[0].render2)
|
||||
try {
|
||||
ext[0].render2(dest_dom);
|
||||
@@ -490,13 +504,16 @@ function init_toc() {
|
||||
// "main" :p
|
||||
convert_markdown(dom_src.value, dom_pre);
|
||||
var toc = init_toc();
|
||||
img_load.callbacks = [toc.refresh];
|
||||
|
||||
|
||||
// scroll handler
|
||||
var redraw = (function () {
|
||||
var sbs = false;
|
||||
var sbs = true;
|
||||
function onresize() {
|
||||
sbs = window.matchMedia('(min-width: 64em)').matches;
|
||||
if (window.matchMedia)
|
||||
sbs = window.matchMedia('(min-width: 64em)').matches;
|
||||
|
||||
var y = (dom_hbar.offsetTop + dom_hbar.offsetHeight) + 'px';
|
||||
if (sbs) {
|
||||
dom_toc.style.top = y;
|
||||
|
||||
@@ -50,7 +50,7 @@
|
||||
outline: none;
|
||||
padding: 0;
|
||||
margin: 0;
|
||||
font-family: 'consolas', monospace, monospace;
|
||||
font-family: 'scp', monospace, monospace;
|
||||
white-space: pre-wrap;
|
||||
word-break: break-word;
|
||||
overflow-wrap: break-word;
|
||||
|
||||
@@ -98,7 +98,7 @@ var draw_md = (function () {
|
||||
var src = dom_src.value;
|
||||
convert_markdown(src, dom_pre);
|
||||
|
||||
var lines = hesc(src).replace(/\r/g, "").split('\n');
|
||||
var lines = esc(src).replace(/\r/g, "").split('\n');
|
||||
nlines = lines.length;
|
||||
var html = [];
|
||||
for (var a = 0; a < lines.length; a++)
|
||||
@@ -108,7 +108,7 @@ var draw_md = (function () {
|
||||
map_src = genmap(dom_ref, map_src);
|
||||
map_pre = genmap(dom_pre, map_pre);
|
||||
|
||||
cls(ebi('save'), 'disabled', src == server_md);
|
||||
clmod(ebi('save'), 'disabled', src == server_md);
|
||||
|
||||
var t1 = Date.now();
|
||||
delay = t1 - t0 > 100 ? 25 : 1;
|
||||
@@ -127,6 +127,12 @@ var draw_md = (function () {
|
||||
})();
|
||||
|
||||
|
||||
// discard TOC callback, just regen editor scroll map
|
||||
img_load.callbacks = [function () {
|
||||
map_pre = genmap(dom_pre, map_pre);
|
||||
}];
|
||||
|
||||
|
||||
// resize handler
|
||||
redraw = (function () {
|
||||
function onresize() {
|
||||
@@ -136,7 +142,6 @@ redraw = (function () {
|
||||
dom_ref.style.width = getComputedStyle(dom_src).offsetWidth + 'px';
|
||||
map_src = genmap(dom_ref, map_src);
|
||||
map_pre = genmap(dom_pre, map_pre);
|
||||
dbg(document.body.clientWidth + 'x' + document.body.clientHeight);
|
||||
}
|
||||
function setsbs() {
|
||||
dom_wrap.setAttribute('class', '');
|
||||
@@ -225,44 +230,40 @@ redraw = (function () {
|
||||
|
||||
// modification checker
|
||||
function Modpoll() {
|
||||
this.skip_one = true;
|
||||
this.disabled = false;
|
||||
|
||||
this.periodic = function () {
|
||||
var that = this;
|
||||
setTimeout(function () {
|
||||
that.periodic();
|
||||
}, 1000 * md_opt.modpoll_freq);
|
||||
var r = {
|
||||
skip_one: true,
|
||||
disabled: false
|
||||
};
|
||||
|
||||
r.periodic = function () {
|
||||
var skip = null;
|
||||
|
||||
if (toast.visible)
|
||||
skip = 'toast';
|
||||
|
||||
else if (this.skip_one)
|
||||
else if (r.skip_one)
|
||||
skip = 'saved';
|
||||
|
||||
else if (this.disabled)
|
||||
else if (r.disabled)
|
||||
skip = 'disabled';
|
||||
|
||||
if (skip) {
|
||||
console.log('modpoll skip, ' + skip);
|
||||
this.skip_one = false;
|
||||
r.skip_one = false;
|
||||
return;
|
||||
}
|
||||
|
||||
console.log('modpoll...');
|
||||
var url = (document.location + '').split('?')[0] + '?raw&_=' + Date.now();
|
||||
var xhr = new XMLHttpRequest();
|
||||
xhr.modpoll = this;
|
||||
xhr.open('GET', url, true);
|
||||
xhr.responseType = 'text';
|
||||
xhr.onreadystatechange = this.cb;
|
||||
xhr.onreadystatechange = r.cb;
|
||||
xhr.send();
|
||||
}
|
||||
};
|
||||
|
||||
this.cb = function () {
|
||||
if (this.modpoll.disabled || this.modpoll.skip_one) {
|
||||
r.cb = function () {
|
||||
if (r.disabled || r.skip_one) {
|
||||
console.log('modpoll abort');
|
||||
return;
|
||||
}
|
||||
@@ -283,7 +284,7 @@ function Modpoll() {
|
||||
|
||||
if (server_ref != server_now) {
|
||||
console.log("modpoll diff |" + server_ref.length + "|, |" + server_now.length + "|");
|
||||
this.modpoll.disabled = true;
|
||||
r.disabled = true;
|
||||
var msg = [
|
||||
"The document has changed on the server.",
|
||||
"The changes will NOT be loaded into your editor automatically.",
|
||||
@@ -297,12 +298,12 @@ function Modpoll() {
|
||||
}
|
||||
|
||||
console.log('modpoll eq');
|
||||
}
|
||||
};
|
||||
|
||||
if (md_opt.modpoll_freq > 0)
|
||||
this.periodic();
|
||||
setInterval(r.periodic, 1000 * md_opt.modpoll_freq);
|
||||
|
||||
return this;
|
||||
return r;
|
||||
}
|
||||
var modpoll = new Modpoll();
|
||||
|
||||
@@ -326,26 +327,32 @@ function save(e) {
|
||||
return toast.inf(2, "no changes");
|
||||
|
||||
var force = (save_cls.indexOf('force-save') >= 0);
|
||||
if (force && !confirm('confirm that you wish to lose the changes made on the server since you opened this document'))
|
||||
return toast.inf(3, 'aborted');
|
||||
function save2() {
|
||||
var txt = dom_src.value,
|
||||
fd = new FormData();
|
||||
|
||||
var txt = dom_src.value;
|
||||
fd.append("act", "tput");
|
||||
fd.append("lastmod", (force ? -1 : last_modified));
|
||||
fd.append("body", txt);
|
||||
|
||||
var fd = new FormData();
|
||||
fd.append("act", "tput");
|
||||
fd.append("lastmod", (force ? -1 : last_modified));
|
||||
fd.append("body", txt);
|
||||
var url = (document.location + '').split('?')[0];
|
||||
var xhr = new XMLHttpRequest();
|
||||
xhr.open('POST', url, true);
|
||||
xhr.responseType = 'text';
|
||||
xhr.onreadystatechange = save_cb;
|
||||
xhr.btn = save_btn;
|
||||
xhr.txt = txt;
|
||||
|
||||
var url = (document.location + '').split('?')[0];
|
||||
var xhr = new XMLHttpRequest();
|
||||
xhr.open('POST', url, true);
|
||||
xhr.responseType = 'text';
|
||||
xhr.onreadystatechange = save_cb;
|
||||
xhr.btn = save_btn;
|
||||
xhr.txt = txt;
|
||||
modpoll.skip_one = true; // skip one iteration while we save
|
||||
xhr.send(fd);
|
||||
}
|
||||
|
||||
modpoll.skip_one = true; // skip one iteration while we save
|
||||
xhr.send(fd);
|
||||
if (!force)
|
||||
save2();
|
||||
else
|
||||
modal.confirm('confirm that you wish to lose the changes made on the server since you opened this document', save2, function () {
|
||||
toast.inf(3, 'aborted');
|
||||
});
|
||||
}
|
||||
|
||||
function save_cb() {
|
||||
@@ -353,19 +360,19 @@ function save_cb() {
|
||||
return;
|
||||
|
||||
if (this.status !== 200)
|
||||
return alert('Error! The file was NOT saved.\n\n' + this.status + ": " + (this.responseText + '').replace(/^<pre>/, ""));
|
||||
return toast.err(0, 'Error! The file was NOT saved.\n\n' + this.status + ": " + (this.responseText + '').replace(/^<pre>/, ""));
|
||||
|
||||
var r;
|
||||
try {
|
||||
r = JSON.parse(this.responseText);
|
||||
}
|
||||
catch (ex) {
|
||||
return alert('Failed to parse reply from server:\n\n' + this.responseText);
|
||||
return toast.err(0, 'Failed to parse reply from server:\n\n' + this.responseText);
|
||||
}
|
||||
|
||||
if (!r.ok) {
|
||||
if (!this.btn.classList.contains('force-save')) {
|
||||
this.btn.classList.add('force-save');
|
||||
if (!clgot(this.btn, 'force-save')) {
|
||||
clmod(this.btn, 'force-save', 1);
|
||||
var msg = [
|
||||
'This file has been modified since you started editing it!\n',
|
||||
'if you really want to overwrite, press save again.\n',
|
||||
@@ -375,15 +382,13 @@ function save_cb() {
|
||||
r.lastmod + ' lastmod on the server now,',
|
||||
r.now + ' server time now,\n',
|
||||
];
|
||||
alert(msg.join('\n'));
|
||||
return toast.err(0, msg.join('\n'));
|
||||
}
|
||||
else {
|
||||
alert('Error! Save failed. Maybe this JSON explains why:\n\n' + this.responseText);
|
||||
}
|
||||
return;
|
||||
else
|
||||
return toast.err(0, 'Error! Save failed. Maybe this JSON explains why:\n\n' + this.responseText);
|
||||
}
|
||||
|
||||
this.btn.classList.remove('force-save');
|
||||
clmod(this.btn, 'force-save');
|
||||
//alert('save OK -- wrote ' + r.size + ' bytes.\n\nsha512: ' + r.sha512);
|
||||
|
||||
run_savechk(r.lastmod, this.txt, this.btn, 0);
|
||||
@@ -407,10 +412,8 @@ function savechk_cb() {
|
||||
if (this.readyState != XMLHttpRequest.DONE)
|
||||
return;
|
||||
|
||||
if (this.status !== 200) {
|
||||
alert('Error! The file was NOT saved.\n\n' + this.status + ": " + (this.responseText + '').replace(/^<pre>/, ""));
|
||||
return;
|
||||
}
|
||||
if (this.status !== 200)
|
||||
return toast.err(0, 'Error! The file was NOT saved.\n\n' + this.status + ": " + (this.responseText + '').replace(/^<pre>/, ""));
|
||||
|
||||
var doc1 = this.txt.replace(/\r\n/g, "\n");
|
||||
var doc2 = this.responseText.replace(/\r\n/g, "\n");
|
||||
@@ -423,12 +426,12 @@ function savechk_cb() {
|
||||
}, 100);
|
||||
return;
|
||||
}
|
||||
alert(
|
||||
modal.alert(
|
||||
'Error! The document on the server does not appear to have saved correctly (your editor contents and the server copy is not identical). Place the document on your clipboard for now and check the server logs for hints\n\n' +
|
||||
'Length: yours=' + doc1.length + ', server=' + doc2.length
|
||||
);
|
||||
alert('yours, ' + doc1.length + ' byte:\n[' + doc1 + ']');
|
||||
alert('server, ' + doc2.length + ' byte:\n[' + doc2 + ']');
|
||||
modal.alert('yours, ' + doc1.length + ' byte:\n[' + doc1 + ']');
|
||||
modal.alert('server, ' + doc2.length + ' byte:\n[' + doc2 + ']');
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -865,15 +868,47 @@ function iter_uni(e) {
|
||||
function cfg_uni(e) {
|
||||
if (e) e.preventDefault();
|
||||
|
||||
var reply = prompt("unicode whitelist", esc_uni_whitelist);
|
||||
if (reply === null)
|
||||
return;
|
||||
|
||||
esc_uni_whitelist = reply;
|
||||
js_uni_whitelist = eval('\'' + esc_uni_whitelist + '\'');
|
||||
modal.prompt("unicode whitelist", esc_uni_whitelist, function (reply) {
|
||||
esc_uni_whitelist = reply;
|
||||
js_uni_whitelist = eval('\'' + esc_uni_whitelist + '\'');
|
||||
}, null);
|
||||
}
|
||||
|
||||
|
||||
var set_lno = (function () {
|
||||
var t = null,
|
||||
pi = null,
|
||||
pv = null,
|
||||
lno = ebi('lno');
|
||||
|
||||
function poke() {
|
||||
clearTimeout(t);
|
||||
t = setTimeout(fire, 20);
|
||||
}
|
||||
|
||||
function fire() {
|
||||
try {
|
||||
clearTimeout(t);
|
||||
|
||||
var i = dom_src.selectionStart;
|
||||
if (i === pi)
|
||||
return;
|
||||
|
||||
var v = 'L' + dom_src.value.slice(0, i).split('\n').length;
|
||||
if (v != pv)
|
||||
lno.innerHTML = v;
|
||||
|
||||
pi = i;
|
||||
pv = v;
|
||||
}
|
||||
catch (e) { }
|
||||
}
|
||||
|
||||
timer.add(fire);
|
||||
return poke;
|
||||
})();
|
||||
|
||||
|
||||
// hotkeys / toolbar
|
||||
(function () {
|
||||
function keydown(ev) {
|
||||
@@ -892,6 +927,8 @@ function cfg_uni(e) {
|
||||
if (document.activeElement != dom_src)
|
||||
return true;
|
||||
|
||||
set_lno();
|
||||
|
||||
if (ctrl(ev)) {
|
||||
if (ev.code == "KeyH" || kc == 72) {
|
||||
md_header(ev.shiftKey);
|
||||
@@ -1086,9 +1123,9 @@ action_stack = (function () {
|
||||
ref = newtxt;
|
||||
dbg('undos(%d) redos(%d)', hist.un.length, hist.re.length);
|
||||
if (hist.un.length > 0)
|
||||
dbg(statify(hist.un.slice(-1)[0]));
|
||||
dbg(jcp(hist.un.slice(-1)[0]));
|
||||
if (hist.re.length > 0)
|
||||
dbg(statify(hist.re.slice(-1)[0]));
|
||||
dbg(jcp(hist.re.slice(-1)[0]));
|
||||
}
|
||||
|
||||
return {
|
||||
|
||||
@@ -7,6 +7,8 @@ html .editor-toolbar>button.active { border-color: rgba(0,0,0,0.4); background:
|
||||
html .editor-toolbar>i.separator { border-left: 1px solid #ccc; }
|
||||
html .editor-toolbar.disabled-for-preview>button:not(.no-disable) { opacity: .35 }
|
||||
|
||||
|
||||
|
||||
html {
|
||||
line-height: 1.5em;
|
||||
}
|
||||
@@ -18,6 +20,22 @@ html, body {
|
||||
background: #f7f7f7;
|
||||
color: #333;
|
||||
}
|
||||
#toast {
|
||||
bottom: auto;
|
||||
top: 1.4em;
|
||||
}
|
||||
#repl {
|
||||
position: absolute;
|
||||
top: 0;
|
||||
right: .5em;
|
||||
border: none;
|
||||
color: inherit;
|
||||
background: none;
|
||||
text-decoration: none;
|
||||
}
|
||||
|
||||
|
||||
|
||||
#mn {
|
||||
font-weight: normal;
|
||||
margin: 1.3em 0 .7em 1em;
|
||||
@@ -59,148 +77,12 @@ html .editor-toolbar>button.disabled {
|
||||
html .editor-toolbar>button.save.force-save {
|
||||
background: #f97;
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
/* copied from md.css for now */
|
||||
.mdo pre,
|
||||
.mdo code,
|
||||
.mdo a {
|
||||
color: #480;
|
||||
background: #f7f7f7;
|
||||
border: .07em solid #ddd;
|
||||
border-radius: .2em;
|
||||
padding: .1em .3em;
|
||||
margin: 0 .1em;
|
||||
}
|
||||
.mdo code {
|
||||
font-size: .96em;
|
||||
}
|
||||
.mdo pre,
|
||||
.mdo code {
|
||||
font-family: monospace, monospace;
|
||||
white-space: pre-wrap;
|
||||
word-break: break-all;
|
||||
}
|
||||
.mdo pre code {
|
||||
display: block;
|
||||
margin: 0 -.3em;
|
||||
padding: .4em .5em;
|
||||
line-height: 1.1em;
|
||||
}
|
||||
.mdo a {
|
||||
color: #fff;
|
||||
background: #39b;
|
||||
text-decoration: none;
|
||||
padding: 0 .3em;
|
||||
border: none;
|
||||
border-bottom: .07em solid #079;
|
||||
}
|
||||
.mdo h2 {
|
||||
color: #fff;
|
||||
background: #555;
|
||||
margin-top: 2em;
|
||||
border-bottom: .22em solid #999;
|
||||
border-top: none;
|
||||
}
|
||||
.mdo h1 {
|
||||
color: #fff;
|
||||
background: #444;
|
||||
font-weight: normal;
|
||||
border-top: .4em solid #fb0;
|
||||
border-bottom: .4em solid #777;
|
||||
border-radius: 0 1em 0 1em;
|
||||
margin: 3em 0 1em 0;
|
||||
padding: .5em 0;
|
||||
}
|
||||
h1, h2 {
|
||||
line-height: 1.5em;
|
||||
}
|
||||
h1 {
|
||||
font-size: 1.7em;
|
||||
text-align: center;
|
||||
border: 1em solid #777;
|
||||
border-width: .05em 0;
|
||||
margin: 3em 0;
|
||||
}
|
||||
h2 {
|
||||
font-size: 1.5em;
|
||||
font-weight: normal;
|
||||
background: #f7f7f7;
|
||||
border-top: .07em solid #fff;
|
||||
border-bottom: .07em solid #bbb;
|
||||
border-radius: .5em .5em 0 0;
|
||||
padding-left: .4em;
|
||||
margin-top: 3em;
|
||||
}
|
||||
.mdo ul,
|
||||
.mdo ol {
|
||||
border-left: .3em solid #ddd;
|
||||
}
|
||||
.mdo>ul,
|
||||
.mdo>ol {
|
||||
border-color: #bbb;
|
||||
}
|
||||
.mdo ul>li {
|
||||
list-style-type: disc;
|
||||
}
|
||||
.mdo ul>li,
|
||||
.mdo ol>li {
|
||||
margin: .7em 0;
|
||||
}
|
||||
strong {
|
||||
color: #000;
|
||||
}
|
||||
p>em,
|
||||
li>em,
|
||||
td>em {
|
||||
color: #c50;
|
||||
padding: .1em;
|
||||
border-bottom: .1em solid #bbb;
|
||||
}
|
||||
blockquote {
|
||||
font-family: serif;
|
||||
background: #f7f7f7;
|
||||
border: .07em dashed #ccc;
|
||||
padding: 0 2em;
|
||||
margin: 1em 0;
|
||||
}
|
||||
small {
|
||||
opacity: .8;
|
||||
}
|
||||
table {
|
||||
border-collapse: collapse;
|
||||
}
|
||||
td {
|
||||
padding: .2em .5em;
|
||||
border: .12em solid #aaa;
|
||||
}
|
||||
th {
|
||||
border: .12em solid #aaa;
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
/* mde support */
|
||||
.mdo {
|
||||
padding: 1em;
|
||||
background: #f7f7f7;
|
||||
}
|
||||
html.dark .mdo {
|
||||
background: #1c1c1c;
|
||||
}
|
||||
.CodeMirror {
|
||||
background: #f7f7f7;
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
/* darkmode */
|
||||
html.dark .mdo,
|
||||
html.dark .CodeMirror {
|
||||
@@ -224,55 +106,6 @@ html.dark .CodeMirror-selectedtext {
|
||||
background: #246;
|
||||
color: #fff;
|
||||
}
|
||||
html.dark .mdo a {
|
||||
background: #057;
|
||||
}
|
||||
html.dark .mdo h1 a, html.dark .mdo h4 a,
|
||||
html.dark .mdo h2 a, html.dark .mdo h5 a,
|
||||
html.dark .mdo h3 a, html.dark .mdo h6 a {
|
||||
color: inherit;
|
||||
background: none;
|
||||
}
|
||||
html.dark pre,
|
||||
html.dark code {
|
||||
color: #8c0;
|
||||
background: #1a1a1a;
|
||||
border: .07em solid #333;
|
||||
}
|
||||
html.dark .mdo ul,
|
||||
html.dark .mdo ol {
|
||||
border-color: #444;
|
||||
}
|
||||
html.dark .mdo>ul,
|
||||
html.dark .mdo>ol {
|
||||
border-color: #555;
|
||||
}
|
||||
html.dark strong {
|
||||
color: #fff;
|
||||
}
|
||||
html.dark p>em,
|
||||
html.dark li>em,
|
||||
html.dark td>em {
|
||||
color: #f94;
|
||||
border-color: #666;
|
||||
}
|
||||
html.dark h1 {
|
||||
background: #383838;
|
||||
border-top: .4em solid #b80;
|
||||
border-bottom: .4em solid #4c4c4c;
|
||||
}
|
||||
html.dark h2 {
|
||||
background: #444;
|
||||
border-bottom: .22em solid #555;
|
||||
}
|
||||
html.dark td,
|
||||
html.dark th {
|
||||
border-color: #444;
|
||||
}
|
||||
html.dark blockquote {
|
||||
background: #282828;
|
||||
border: .07em dashed #444;
|
||||
}
|
||||
|
||||
|
||||
|
||||
@@ -308,4 +141,15 @@ html.dark .editor-toolbar>button.active {
|
||||
html.dark .editor-toolbar::after,
|
||||
html.dark .editor-toolbar::before {
|
||||
background: none;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
/* ui.css overrides */
|
||||
.mdo {
|
||||
padding: 1em;
|
||||
background: #f7f7f7;
|
||||
}
|
||||
html.dark .mdo {
|
||||
background: #1c1c1c;
|
||||
}
|
||||
|
||||
@@ -3,9 +3,10 @@
|
||||
<title>📝🎉 {{ title }}</title>
|
||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=0.7">
|
||||
<link href="/.cpr/mde.css?_={{ ts }}" rel="stylesheet">
|
||||
<link href="/.cpr/deps/mini-fa.css?_={{ ts }}" rel="stylesheet">
|
||||
<link href="/.cpr/deps/easymde.css?_={{ ts }}" rel="stylesheet">
|
||||
<link rel="stylesheet" href="/.cpr/ui.css?_={{ ts }}">
|
||||
<link rel="stylesheet" href="/.cpr/mde.css?_={{ ts }}">
|
||||
<link rel="stylesheet" href="/.cpr/deps/mini-fa.css?_={{ ts }}">
|
||||
<link rel="stylesheet" href="/.cpr/deps/easymde.css?_={{ ts }}">
|
||||
</head>
|
||||
<body>
|
||||
<div id="mw">
|
||||
@@ -20,6 +21,7 @@
|
||||
<textarea id="mt" style="display:none" autocomplete="off">{{ md }}</textarea>
|
||||
</div>
|
||||
</div>
|
||||
<a href="#" id="repl">π</a>
|
||||
<script>
|
||||
|
||||
var last_modified = {{ lastmod }};
|
||||
@@ -31,11 +33,11 @@ var md_opt = {
|
||||
|
||||
var lightswitch = (function () {
|
||||
var l = localStorage,
|
||||
drk = l.getItem('lightmode') != 1,
|
||||
drk = l.lightmode != 1,
|
||||
f = function (e) {
|
||||
if (e) drk = !drk;
|
||||
document.documentElement.setAttribute("class", drk? "dark":"light");
|
||||
l.setItem('lightmode', drk? 0:1);
|
||||
l.lightmode = drk? 0:1;
|
||||
};
|
||||
f();
|
||||
return f;
|
||||
@@ -43,6 +45,7 @@ l.setItem('lightmode', drk? 0:1);
|
||||
|
||||
</script>
|
||||
<script src="/.cpr/util.js?_={{ ts }}"></script>
|
||||
<script src="/.cpr/deps/marked.js?_={{ ts }}"></script>
|
||||
<script src="/.cpr/deps/easymde.js?_={{ ts }}"></script>
|
||||
<script src="/.cpr/mde.js?_={{ ts }}"></script>
|
||||
</body></html>
|
||||
|
||||
@@ -96,39 +96,41 @@ function md_changed(mde, on_srv) {
|
||||
var md_now = mde.value();
|
||||
var save_btn = QS('.editor-toolbar button.save');
|
||||
|
||||
if (md_now == window.md_saved)
|
||||
save_btn.classList.add('disabled');
|
||||
else
|
||||
save_btn.classList.remove('disabled');
|
||||
|
||||
clmod(save_btn, 'disabled', md_now == window.md_saved);
|
||||
set_jumpto();
|
||||
}
|
||||
|
||||
function save(mde) {
|
||||
var save_btn = QS('.editor-toolbar button.save');
|
||||
if (save_btn.classList.contains('disabled'))
|
||||
if (clgot(save_btn, 'disabled'))
|
||||
return toast.inf(2, 'no changes');
|
||||
|
||||
var force = save_btn.classList.contains('force-save');
|
||||
if (force && !confirm('confirm that you wish to lose the changes made on the server since you opened this document'))
|
||||
return toast.inf(3, 'aborted');
|
||||
var force = clgot(save_btn, 'force-save');
|
||||
function save2() {
|
||||
var txt = mde.value();
|
||||
|
||||
var txt = mde.value();
|
||||
var fd = new FormData();
|
||||
fd.append("act", "tput");
|
||||
fd.append("lastmod", (force ? -1 : last_modified));
|
||||
fd.append("body", txt);
|
||||
|
||||
var fd = new FormData();
|
||||
fd.append("act", "tput");
|
||||
fd.append("lastmod", (force ? -1 : last_modified));
|
||||
fd.append("body", txt);
|
||||
var url = (document.location + '').split('?')[0];
|
||||
var xhr = new XMLHttpRequest();
|
||||
xhr.open('POST', url, true);
|
||||
xhr.responseType = 'text';
|
||||
xhr.onreadystatechange = save_cb;
|
||||
xhr.btn = save_btn;
|
||||
xhr.mde = mde;
|
||||
xhr.txt = txt;
|
||||
xhr.send(fd);
|
||||
}
|
||||
|
||||
var url = (document.location + '').split('?')[0];
|
||||
var xhr = new XMLHttpRequest();
|
||||
xhr.open('POST', url, true);
|
||||
xhr.responseType = 'text';
|
||||
xhr.onreadystatechange = save_cb;
|
||||
xhr.btn = save_btn;
|
||||
xhr.mde = mde;
|
||||
xhr.txt = txt;
|
||||
xhr.send(fd);
|
||||
if (!force)
|
||||
save2();
|
||||
else
|
||||
modal.confirm('confirm that you wish to lose the changes made on the server since you opened this document', save2, function () {
|
||||
toast.inf(3, 'aborted');
|
||||
});
|
||||
}
|
||||
|
||||
function save_cb() {
|
||||
@@ -136,19 +138,19 @@ function save_cb() {
|
||||
return;
|
||||
|
||||
if (this.status !== 200)
|
||||
return alert('Error! The file was NOT saved.\n\n' + this.status + ": " + (this.responseText + '').replace(/^<pre>/, ""));
|
||||
return toast.err(0, 'Error! The file was NOT saved.\n\n' + this.status + ": " + (this.responseText + '').replace(/^<pre>/, ""));
|
||||
|
||||
var r;
|
||||
try {
|
||||
r = JSON.parse(this.responseText);
|
||||
}
|
||||
catch (ex) {
|
||||
return alert('Failed to parse reply from server:\n\n' + this.responseText);
|
||||
return toast.err(0, 'Failed to parse reply from server:\n\n' + this.responseText);
|
||||
}
|
||||
|
||||
if (!r.ok) {
|
||||
if (!this.btn.classList.contains('force-save')) {
|
||||
this.btn.classList.add('force-save');
|
||||
if (!clgot(this.btn, 'force-save')) {
|
||||
clmod(this.btn, 'force-save', 1);
|
||||
var msg = [
|
||||
'This file has been modified since you started editing it!\n',
|
||||
'if you really want to overwrite, press save again.\n',
|
||||
@@ -158,15 +160,13 @@ function save_cb() {
|
||||
r.lastmod + ' lastmod on the server now,',
|
||||
r.now + ' server time now,\n',
|
||||
];
|
||||
alert(msg.join('\n'));
|
||||
return toast.err(0, msg.join('\n'));
|
||||
}
|
||||
else {
|
||||
alert('Error! Save failed. Maybe this JSON explains why:\n\n' + this.responseText);
|
||||
}
|
||||
return;
|
||||
else
|
||||
return toast.err(0, 'Error! Save failed. Maybe this JSON explains why:\n\n' + this.responseText);
|
||||
}
|
||||
|
||||
this.btn.classList.remove('force-save');
|
||||
clmod(this.btn, 'force-save');
|
||||
//alert('save OK -- wrote ' + r.size + ' bytes.\n\nsha512: ' + r.sha512);
|
||||
|
||||
// download the saved doc from the server and compare
|
||||
@@ -186,35 +186,23 @@ function save_chk() {
|
||||
if (this.readyState != XMLHttpRequest.DONE)
|
||||
return;
|
||||
|
||||
if (this.status !== 200) {
|
||||
alert('Error! The file was NOT saved.\n\n' + this.status + ": " + (this.responseText + '').replace(/^<pre>/, ""));
|
||||
return;
|
||||
}
|
||||
if (this.status !== 200)
|
||||
return toast.err(0, 'Error! The file was NOT saved.\n\n' + this.status + ": " + (this.responseText + '').replace(/^<pre>/, ""));
|
||||
|
||||
var doc1 = this.txt.replace(/\r\n/g, "\n");
|
||||
var doc2 = this.responseText.replace(/\r\n/g, "\n");
|
||||
if (doc1 != doc2) {
|
||||
alert(
|
||||
modal.alert(
|
||||
'Error! The document on the server does not appear to have saved correctly (your editor contents and the server copy is not identical). Place the document on your clipboard for now and check the server logs for hints\n\n' +
|
||||
'Length: yours=' + doc1.length + ', server=' + doc2.length
|
||||
);
|
||||
alert('yours, ' + doc1.length + ' byte:\n[' + doc1 + ']');
|
||||
alert('server, ' + doc2.length + ' byte:\n[' + doc2 + ']');
|
||||
modal.alert('yours, ' + doc1.length + ' byte:\n[' + doc1 + ']');
|
||||
modal.alert('server, ' + doc2.length + ' byte:\n[' + doc2 + ']');
|
||||
return;
|
||||
}
|
||||
|
||||
last_modified = this.lastmod;
|
||||
md_changed(this.mde, true);
|
||||
|
||||
var ok = mknod('div');
|
||||
ok.setAttribute('style', 'font-size:6em;font-family:serif;font-weight:bold;color:#cf6;background:#444;border-radius:.3em;padding:.6em 0;position:fixed;top:30%;left:calc(50% - 2em);width:4em;text-align:center;z-index:9001;transition:opacity 0.2s ease-in-out;opacity:1');
|
||||
ok.innerHTML = 'OK✔️';
|
||||
var parent = ebi('m');
|
||||
document.documentElement.appendChild(ok);
|
||||
setTimeout(function () {
|
||||
ok.style.opacity = 0;
|
||||
}, 500);
|
||||
setTimeout(function () {
|
||||
ok.parentNode.removeChild(ok);
|
||||
}, 750);
|
||||
toast.ok(2, 'save OK' + (this.ntry ? '\nattempt ' + this.ntry : ''));
|
||||
}
|
||||
|
||||
@@ -11,14 +11,12 @@ html {
|
||||
background: #333;
|
||||
font-family: sans-serif;
|
||||
text-shadow: 1px 1px 0px #000;
|
||||
touch-action: manipulation;
|
||||
}
|
||||
html, body {
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
}
|
||||
body {
|
||||
padding-bottom: 5em;
|
||||
}
|
||||
#box {
|
||||
padding: .5em 1em;
|
||||
background: #2c2c2c;
|
||||
@@ -28,4 +26,4 @@ pre {
|
||||
}
|
||||
a {
|
||||
color: #fc5;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -6,7 +6,7 @@
|
||||
<title>copyparty</title>
|
||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=0.8">
|
||||
<link rel="stylesheet" type="text/css" media="screen" href="/.cpr/msg.css?_={{ ts }}">
|
||||
<link rel="stylesheet" media="screen" href="/.cpr/msg.css?_={{ ts }}">
|
||||
</head>
|
||||
|
||||
<body>
|
||||
|
||||
@@ -3,6 +3,9 @@ html, body, #wrap {
|
||||
background: #f7f7f7;
|
||||
font-family: sans-serif;
|
||||
}
|
||||
html {
|
||||
touch-action: manipulation;
|
||||
}
|
||||
#wrap {
|
||||
max-width: 40em;
|
||||
margin: 2em auto;
|
||||
@@ -22,10 +25,26 @@ a {
|
||||
color: #047;
|
||||
background: #fff;
|
||||
text-decoration: none;
|
||||
border-bottom: 1px solid #aaa;
|
||||
border-bottom: 1px solid #8ab;
|
||||
border-radius: .2em;
|
||||
padding: .2em .8em;
|
||||
}
|
||||
.refresh,
|
||||
.logout {
|
||||
float: right;
|
||||
margin-top: -.2em;
|
||||
}
|
||||
.logout {
|
||||
color: #c04;
|
||||
border-color: #c7a;
|
||||
margin-right: .5em;
|
||||
}
|
||||
#repl {
|
||||
border: none;
|
||||
background: none;
|
||||
color: inherit;
|
||||
padding: 0;
|
||||
}
|
||||
table {
|
||||
border-collapse: collapse;
|
||||
}
|
||||
@@ -46,6 +65,16 @@ table {
|
||||
.btns {
|
||||
margin: 1em 0;
|
||||
}
|
||||
#msg {
|
||||
margin: 3em 0;
|
||||
}
|
||||
#msg h1 {
|
||||
margin-bottom: 0;
|
||||
}
|
||||
#msg h1 + p {
|
||||
margin-top: .3em;
|
||||
text-align: right;
|
||||
}
|
||||
|
||||
|
||||
html.dark,
|
||||
@@ -62,10 +91,14 @@ html.dark a {
|
||||
background: #057;
|
||||
border-color: #37a;
|
||||
}
|
||||
html.dark .logout {
|
||||
background: #804;
|
||||
border-color: #c28;
|
||||
}
|
||||
html.dark input {
|
||||
color: #fff;
|
||||
background: #624;
|
||||
border: 1px solid #c27;
|
||||
background: #626;
|
||||
border: 1px solid #c2c;
|
||||
border-width: 1px 0 0 0;
|
||||
border-radius: .5em;
|
||||
padding: .5em .7em;
|
||||
@@ -73,4 +106,4 @@ html.dark input {
|
||||
}
|
||||
html.dark .num {
|
||||
border-color: #777;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -6,12 +6,26 @@
|
||||
<title>copyparty</title>
|
||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=0.8">
|
||||
<link rel="stylesheet" type="text/css" media="screen" href="/.cpr/splash.css?_={{ ts }}">
|
||||
<link rel="stylesheet" media="screen" href="/.cpr/splash.css?_={{ ts }}">
|
||||
<link rel="stylesheet" media="screen" href="/.cpr/ui.css?_={{ ts }}">
|
||||
</head>
|
||||
|
||||
<body>
|
||||
<div id="wrap">
|
||||
<p>hello {{ this.uname }}</p>
|
||||
<a href="/?h" class="refresh">refresh</a>
|
||||
|
||||
{%- if this.uname == '*' %}
|
||||
<p>howdy stranger <small>(you're not logged in)</small></p>
|
||||
{%- else %}
|
||||
<a href="/?pw=x" class="logout">logout</a>
|
||||
<p>welcome back, <strong>{{ this.uname }}</strong></p>
|
||||
{%- endif %}
|
||||
|
||||
{%- if msg %}
|
||||
<div id="msg">
|
||||
{{ msg }}
|
||||
</div>
|
||||
{%- endif %}
|
||||
|
||||
{%- if avol %}
|
||||
<h1>admin panel:</h1>
|
||||
@@ -59,18 +73,20 @@
|
||||
|
||||
<h1>login for more:</h1>
|
||||
<ul>
|
||||
<form method="post" enctype="multipart/form-data" action="/">
|
||||
<form method="post" enctype="multipart/form-data" action="/{{ qvpath }}">
|
||||
<input type="hidden" name="act" value="login" />
|
||||
<input type="password" name="cppwd" />
|
||||
<input type="submit" value="Login" />
|
||||
</form>
|
||||
</ul>
|
||||
</div>
|
||||
<a href="#" id="repl">π</a>
|
||||
<script>
|
||||
|
||||
if (localStorage.getItem('lightmode') != 1)
|
||||
if (localStorage.lightmode != 1)
|
||||
document.documentElement.setAttribute("class", "dark");
|
||||
|
||||
</script>
|
||||
<script src="/.cpr/util.js?_={{ ts }}"></script>
|
||||
</body>
|
||||
</html>
|
||||
</html>
|
||||
|
||||
465
copyparty/web/ui.css
Normal file
465
copyparty/web/ui.css
Normal file
@@ -0,0 +1,465 @@
|
||||
@font-face {
|
||||
font-family: 'scp';
|
||||
font-display: swap;
|
||||
src: local('Source Code Pro Regular'), local('SourceCodePro-Regular'), url(/.cpr/deps/scp.woff2) format('woff2');
|
||||
}
|
||||
html {
|
||||
touch-action: manipulation;
|
||||
}
|
||||
#tt, #toast {
|
||||
position: fixed;
|
||||
max-width: 34em;
|
||||
max-width: min(34em, 90%);
|
||||
max-width: min(34em, calc(100% - 7em));
|
||||
background: #333;
|
||||
border: 0 solid #777;
|
||||
box-shadow: 0 .2em .5em #111;
|
||||
border-radius: .4em;
|
||||
z-index: 9001;
|
||||
}
|
||||
#tt {
|
||||
max-width: min(34em, calc(100% - 3.3em));
|
||||
overflow: hidden;
|
||||
margin: .7em 0;
|
||||
padding: 0 1.3em;
|
||||
height: 0;
|
||||
opacity: .1;
|
||||
transition: opacity 0.14s, height 0.14s, padding 0.14s;
|
||||
}
|
||||
#toast {
|
||||
bottom: 5em;
|
||||
right: -1em;
|
||||
line-height: 1.5em;
|
||||
padding: 1em 1.3em;
|
||||
margin-left: 3em;
|
||||
border-width: .4em 0;
|
||||
overflow-wrap: break-word;
|
||||
transform: translateX(100%);
|
||||
transition:
|
||||
transform .4s cubic-bezier(.2, 1.2, .5, 1),
|
||||
right .4s cubic-bezier(.2, 1.2, .5, 1);
|
||||
text-shadow: 1px 1px 0 #000;
|
||||
color: #fff;
|
||||
}
|
||||
#toast a {
|
||||
color: inherit;
|
||||
text-shadow: inherit;
|
||||
background: rgba(0, 0, 0, 0.4);
|
||||
border-radius: .3em;
|
||||
padding: .2em .3em;
|
||||
}
|
||||
#toast a#toastc {
|
||||
display: inline-block;
|
||||
position: absolute;
|
||||
overflow: hidden;
|
||||
left: 0;
|
||||
width: 0;
|
||||
opacity: 0;
|
||||
padding: .3em 0;
|
||||
margin: -.3em 0 0 0;
|
||||
line-height: 1.3em;
|
||||
color: #000;
|
||||
border: none;
|
||||
outline: none;
|
||||
text-shadow: none;
|
||||
border-radius: .5em 0 0 .5em;
|
||||
transition: left .3s, width .3s, padding .3s, opacity .3s;
|
||||
}
|
||||
#toastb {
|
||||
max-height: 70vh;
|
||||
overflow-y: auto;
|
||||
}
|
||||
#toast.scroll #toastb {
|
||||
overflow-y: scroll;
|
||||
margin-right: -1.2em;
|
||||
padding-right: .7em;
|
||||
}
|
||||
#toast pre {
|
||||
margin: 0;
|
||||
}
|
||||
#toast.vis {
|
||||
right: 1.3em;
|
||||
transform: inherit;
|
||||
transform: initial;
|
||||
}
|
||||
#toast.vis #toastc {
|
||||
left: -2em;
|
||||
width: .4em;
|
||||
padding: .3em .8em;
|
||||
opacity: 1;
|
||||
}
|
||||
#toast.inf {
|
||||
background: #07a;
|
||||
border-color: #0be;
|
||||
}
|
||||
#toast.inf #toastc {
|
||||
background: #0be;
|
||||
}
|
||||
#toast.ok {
|
||||
background: #380;
|
||||
border-color: #8e4;
|
||||
}
|
||||
#toast.ok #toastc {
|
||||
background: #8e4;
|
||||
}
|
||||
#toast.warn {
|
||||
background: #960;
|
||||
border-color: #fc0;
|
||||
}
|
||||
#toast.warn #toastc {
|
||||
background: #fc0;
|
||||
}
|
||||
#toast.err {
|
||||
background: #900;
|
||||
border-color: #d06;
|
||||
}
|
||||
#toast.err #toastc {
|
||||
background: #d06;
|
||||
}
|
||||
#tt.b {
|
||||
padding: 0 2em;
|
||||
border-radius: .5em;
|
||||
box-shadow: 0 .2em 1em #000;
|
||||
}
|
||||
#tt.show {
|
||||
padding: 1em 1.3em;
|
||||
border-width: .4em 0;
|
||||
height: auto;
|
||||
opacity: 1;
|
||||
}
|
||||
#tt.show.b {
|
||||
padding: 1.5em 2em;
|
||||
border-width: .5em 0;
|
||||
}
|
||||
#modalc code,
|
||||
#tt code {
|
||||
background: #3c3c3c;
|
||||
padding: .1em .3em;
|
||||
border-top: 1px solid #777;
|
||||
border-radius: .3em;
|
||||
line-height: 1.7em;
|
||||
}
|
||||
#tt em {
|
||||
color: #f6a;
|
||||
}
|
||||
html.light #tt {
|
||||
background: #fff;
|
||||
border-color: #888 #000 #777 #000;
|
||||
}
|
||||
html.light #tt,
|
||||
html.light #toast {
|
||||
box-shadow: 0 .3em 1em rgba(0,0,0,0.4);
|
||||
}
|
||||
#modalc code,
|
||||
html.light #tt code {
|
||||
background: #060;
|
||||
color: #fff;
|
||||
}
|
||||
html.light #tt em {
|
||||
color: #d38;
|
||||
}
|
||||
#modal {
|
||||
position: fixed;
|
||||
overflow: auto;
|
||||
top: 0;
|
||||
left: 0;
|
||||
right: 0;
|
||||
bottom: 0;
|
||||
width: 100%;
|
||||
height: 100%;
|
||||
z-index: 9001;
|
||||
background: rgba(64,64,64,0.6);
|
||||
}
|
||||
#modal>table {
|
||||
width: 100%;
|
||||
height: 100%;
|
||||
}
|
||||
#modal td {
|
||||
text-align: center;
|
||||
}
|
||||
#modalc {
|
||||
position: relative;
|
||||
display: inline-block;
|
||||
background: #f7f7f7;
|
||||
color: #333;
|
||||
text-shadow: none;
|
||||
text-align: left;
|
||||
margin: 3em;
|
||||
padding: 1em 1.1em;
|
||||
border-radius: .6em;
|
||||
box-shadow: 0 .3em 3em rgba(0,0,0,0.5);
|
||||
max-width: 50em;
|
||||
max-height: 30em;
|
||||
overflow: auto;
|
||||
}
|
||||
@media (min-width: 40em) {
|
||||
#modalc {
|
||||
min-width: 30em;
|
||||
}
|
||||
}
|
||||
#modalc li {
|
||||
margin: 1em 0;
|
||||
}
|
||||
#modalc h6 {
|
||||
font-size: 1.3em;
|
||||
border-bottom: 1px solid #999;
|
||||
margin: 0;
|
||||
padding: .3em;
|
||||
text-align: center;
|
||||
}
|
||||
#modalb {
|
||||
position: sticky;
|
||||
text-align: right;
|
||||
padding-top: 1em;
|
||||
bottom: 0;
|
||||
right: 0;
|
||||
}
|
||||
#modalb a {
|
||||
color: #000;
|
||||
background: #ccc;
|
||||
display: inline-block;
|
||||
border-radius: .3em;
|
||||
padding: .5em 1em;
|
||||
outline: none;
|
||||
border: none;
|
||||
}
|
||||
#modalb a:focus,
|
||||
#modalb a:hover {
|
||||
background: #06d;
|
||||
color: #fff;
|
||||
}
|
||||
#modalb a+a {
|
||||
margin-left: .5em;
|
||||
}
|
||||
#modali {
|
||||
display: block;
|
||||
background: #fff;
|
||||
color: #000;
|
||||
width: calc(100% - 1.25em);
|
||||
margin: 1em -.1em 0 -.1em;
|
||||
padding: .5em;
|
||||
outline: none;
|
||||
border: .25em solid #ccc;
|
||||
border-radius: .4em;
|
||||
}
|
||||
#modali:focus {
|
||||
border-color: #06d;
|
||||
}
|
||||
#repl_pre {
|
||||
max-width: 24em;
|
||||
}
|
||||
*:focus,
|
||||
#pctl *:focus,
|
||||
.btn:focus {
|
||||
box-shadow: 0 .1em .2em #fc0 inset;
|
||||
border-radius: .2em;
|
||||
}
|
||||
html.light *:focus,
|
||||
html.light #pctl *:focus,
|
||||
html.light .btn:focus {
|
||||
box-shadow: 0 .1em .2em #037 inset;
|
||||
}
|
||||
input[type="text"]:focus,
|
||||
input:not([type]):focus,
|
||||
textarea:focus {
|
||||
box-shadow: 0 .1em .3em #fc0, 0 -.1em .3em #fc0;
|
||||
}
|
||||
html.light input[type="text"]:focus,
|
||||
html.light input:not([type]):focus,
|
||||
html.light textarea:focus {
|
||||
box-shadow: 0 .1em .3em #037, 0 -.1em .3em #037;
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
.mdo pre,
|
||||
.mdo code,
|
||||
.mdo a {
|
||||
color: #480;
|
||||
background: #f7f7f7;
|
||||
border: .07em solid #ddd;
|
||||
border-radius: .2em;
|
||||
padding: .1em .3em;
|
||||
margin: 0 .1em;
|
||||
}
|
||||
.mdo pre,
|
||||
.mdo code,
|
||||
.mdo tt {
|
||||
font-family: 'scp', monospace, monospace;
|
||||
white-space: pre-wrap;
|
||||
word-break: break-all;
|
||||
}
|
||||
.mdo code {
|
||||
font-size: .96em;
|
||||
}
|
||||
.mdo h1,
|
||||
.mdo h2 {
|
||||
line-height: 1.5em;
|
||||
}
|
||||
.mdo h1 {
|
||||
font-size: 1.7em;
|
||||
text-align: center;
|
||||
border: 1em solid #777;
|
||||
border-width: .05em 0;
|
||||
margin: 3em 0;
|
||||
}
|
||||
.mdo h2 {
|
||||
font-size: 1.5em;
|
||||
font-weight: normal;
|
||||
background: #f7f7f7;
|
||||
border-top: .07em solid #fff;
|
||||
border-bottom: .07em solid #bbb;
|
||||
border-radius: .5em .5em 0 0;
|
||||
padding-left: .4em;
|
||||
margin-top: 3em;
|
||||
}
|
||||
.mdo h3 {
|
||||
border-bottom: .1em solid #999;
|
||||
}
|
||||
.mdo h1 a, .mdo h3 a, .mdo h5 a,
|
||||
.mdo h2 a, .mdo h4 a, .mdo h6 a {
|
||||
color: inherit;
|
||||
display: block;
|
||||
background: none;
|
||||
border: none;
|
||||
padding: 0;
|
||||
margin: 0;
|
||||
}
|
||||
.mdo ul,
|
||||
.mdo ol {
|
||||
border-left: .3em solid #ddd;
|
||||
}
|
||||
.mdo ul>li,
|
||||
.mdo ol>li {
|
||||
margin: .7em 0;
|
||||
list-style-type: disc;
|
||||
}
|
||||
.mdo strong {
|
||||
color: #000;
|
||||
}
|
||||
.mdo p>em,
|
||||
.mdo li>em,
|
||||
.mdo td>em {
|
||||
color: #c50;
|
||||
padding: .1em;
|
||||
border-bottom: .1em solid #bbb;
|
||||
}
|
||||
.mdo blockquote {
|
||||
font-family: serif;
|
||||
background: #f7f7f7;
|
||||
border: .07em dashed #ccc;
|
||||
padding: 0 2em;
|
||||
margin: 1em 0;
|
||||
}
|
||||
.mdo small {
|
||||
opacity: .8;
|
||||
}
|
||||
.mdo pre code {
|
||||
display: block;
|
||||
margin: 0 -.3em;
|
||||
padding: .4em .5em;
|
||||
line-height: 1.1em;
|
||||
}
|
||||
.mdo pre code:hover {
|
||||
background: #fec;
|
||||
color: #360;
|
||||
}
|
||||
.mdo table {
|
||||
border-collapse: collapse;
|
||||
margin: 1em 0;
|
||||
}
|
||||
.mdo th,
|
||||
.mdo td {
|
||||
padding: .2em .5em;
|
||||
border: .12em solid #aaa;
|
||||
}
|
||||
|
||||
@media screen {
|
||||
.mdo {
|
||||
word-break: break-word;
|
||||
overflow-wrap: break-word;
|
||||
word-wrap: break-word; /*ie*/
|
||||
}
|
||||
html.light .mdo a,
|
||||
.mdo a {
|
||||
color: #fff;
|
||||
background: #39b;
|
||||
text-decoration: none;
|
||||
padding: 0 .3em;
|
||||
border: none;
|
||||
border-bottom: .07em solid #079;
|
||||
}
|
||||
.mdo h1 {
|
||||
color: #fff;
|
||||
background: #444;
|
||||
font-weight: normal;
|
||||
border-top: .4em solid #fb0;
|
||||
border-bottom: .4em solid #777;
|
||||
border-radius: 0 1em 0 1em;
|
||||
margin: 3em 0 1em 0;
|
||||
padding: .5em 0;
|
||||
}
|
||||
.mdo h2 {
|
||||
color: #fff;
|
||||
background: #555;
|
||||
margin-top: 2em;
|
||||
border-bottom: .22em solid #999;
|
||||
border-top: none;
|
||||
}
|
||||
|
||||
|
||||
|
||||
html.dark .mdo a {
|
||||
background: #057;
|
||||
}
|
||||
html.dark .mdo h1 a, html.dark .mdo h4 a,
|
||||
html.dark .mdo h2 a, html.dark .mdo h5 a,
|
||||
html.dark .mdo h3 a, html.dark .mdo h6 a {
|
||||
color: inherit;
|
||||
background: none;
|
||||
}
|
||||
html.dark .mdo pre,
|
||||
html.dark .mdo code {
|
||||
color: #8c0;
|
||||
background: #1a1a1a;
|
||||
border: .07em solid #333;
|
||||
}
|
||||
html.dark .mdo ul,
|
||||
html.dark .mdo ol {
|
||||
border-color: #444;
|
||||
}
|
||||
html.dark .mdo strong {
|
||||
color: #fff;
|
||||
}
|
||||
html.dark .mdo p>em,
|
||||
html.dark .mdo li>em,
|
||||
html.dark .mdo td>em {
|
||||
color: #f94;
|
||||
border-color: #666;
|
||||
}
|
||||
html.dark .mdo h1 {
|
||||
background: #383838;
|
||||
border-top: .4em solid #b80;
|
||||
border-bottom: .4em solid #4c4c4c;
|
||||
}
|
||||
html.dark .mdo h2 {
|
||||
background: #444;
|
||||
border-bottom: .22em solid #555;
|
||||
}
|
||||
html.dark .mdo td,
|
||||
html.dark .mdo th {
|
||||
border-color: #444;
|
||||
}
|
||||
html.dark .mdo blockquote {
|
||||
background: #282828;
|
||||
border: .07em dashed #444;
|
||||
}
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,302 +0,0 @@
|
||||
|
||||
#op_up2k {
|
||||
padding: 0 1em 1em 1em;
|
||||
}
|
||||
#u2form {
|
||||
position: absolute;
|
||||
top: 0;
|
||||
left: 0;
|
||||
width: 2px;
|
||||
height: 2px;
|
||||
overflow: hidden;
|
||||
}
|
||||
#u2form input {
|
||||
background: #444;
|
||||
border: 0px solid #444;
|
||||
outline: none;
|
||||
}
|
||||
#u2err.err {
|
||||
color: #f87;
|
||||
padding: .5em;
|
||||
}
|
||||
#u2err.msg {
|
||||
color: #999;
|
||||
padding: .5em;
|
||||
font-size: .9em;
|
||||
}
|
||||
#u2btn {
|
||||
color: #eee;
|
||||
background: #555;
|
||||
background: -moz-linear-gradient(top, #367 0%, #489 50%, #38788a 51%, #367 100%);
|
||||
background: -webkit-linear-gradient(top, #367 0%, #489 50%, #38788a 51%, #367 100%);
|
||||
background: linear-gradient(to bottom, #367 0%, #489 50%, #38788a 51%, #367 100%);
|
||||
filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#489', endColorstr='#38788a', GradientType=0);
|
||||
text-decoration: none;
|
||||
line-height: 1.3em;
|
||||
border: 1px solid #222;
|
||||
border-radius: .4em;
|
||||
text-align: center;
|
||||
font-size: 1.5em;
|
||||
margin: .5em auto;
|
||||
padding: .8em 0;
|
||||
width: 16em;
|
||||
cursor: pointer;
|
||||
box-shadow: .4em .4em 0 #111;
|
||||
}
|
||||
#op_up2k.srch #u2btn {
|
||||
background: linear-gradient(to bottom, #ca3 0%, #fd8 50%, #fc6 51%, #b92 100%);
|
||||
text-shadow: 1px 1px 1px #fc6;
|
||||
color: #333;
|
||||
}
|
||||
#u2conf #u2btn {
|
||||
margin: -1.5em 0;
|
||||
padding: .8em 0;
|
||||
width: 100%;
|
||||
max-width: 12em;
|
||||
display: inline-block;
|
||||
}
|
||||
#u2conf #u2btn_cw {
|
||||
text-align: right;
|
||||
}
|
||||
#u2notbtn {
|
||||
display: none;
|
||||
text-align: center;
|
||||
background: #333;
|
||||
padding-top: 1em;
|
||||
}
|
||||
#u2notbtn * {
|
||||
line-height: 1.3em;
|
||||
}
|
||||
#u2tab {
|
||||
margin: 3em auto;
|
||||
width: calc(100% - 2em);
|
||||
max-width: 100em;
|
||||
}
|
||||
#op_up2k.srch #u2tab {
|
||||
max-width: none;
|
||||
}
|
||||
#u2tab td {
|
||||
border: 1px solid #ccc;
|
||||
border-width: 0 0px 1px 0;
|
||||
padding: .1em .3em;
|
||||
}
|
||||
#u2tab td:nth-child(2) {
|
||||
width: 5em;
|
||||
white-space: nowrap;
|
||||
}
|
||||
#u2tab td:nth-child(3) {
|
||||
width: 40%;
|
||||
}
|
||||
#op_up2k.srch td.prog {
|
||||
font-family: sans-serif;
|
||||
font-size: 1em;
|
||||
width: auto;
|
||||
}
|
||||
#u2tab tbody tr:hover td {
|
||||
background: #222;
|
||||
}
|
||||
#u2cards {
|
||||
padding: 1em 0 .3em 1em;
|
||||
margin: 1.5em auto -2.5em auto;
|
||||
white-space: nowrap;
|
||||
text-align: center;
|
||||
overflow: hidden;
|
||||
}
|
||||
#u2cards.w {
|
||||
width: 45em;
|
||||
text-align: left;
|
||||
}
|
||||
#u2cards a {
|
||||
padding: .2em 1em;
|
||||
border: 1px solid #777;
|
||||
border-width: 0 0 1px 0;
|
||||
background: linear-gradient(to bottom, #333, #222);
|
||||
}
|
||||
#u2cards a:first-child {
|
||||
border-radius: .4em 0 0 0;
|
||||
}
|
||||
#u2cards a:last-child {
|
||||
border-radius: 0 .4em 0 0;
|
||||
}
|
||||
#u2cards a.act {
|
||||
padding-bottom: .5em;
|
||||
border-width: 1px 1px .1em 1px;
|
||||
border-radius: .3em .3em 0 0;
|
||||
margin-left: -1px;
|
||||
background: linear-gradient(to bottom, #464, #333 80%);
|
||||
box-shadow: 0 -.17em .67em #280;
|
||||
border-color: #7c5 #583 #333 #583;
|
||||
position: relative;
|
||||
color: #fd7;
|
||||
}
|
||||
#u2cards span {
|
||||
color: #fff;
|
||||
}
|
||||
#u2conf {
|
||||
margin: 1em auto;
|
||||
width: 30em;
|
||||
}
|
||||
#u2conf.has_btn {
|
||||
width: 48em;
|
||||
}
|
||||
#u2conf * {
|
||||
text-align: center;
|
||||
line-height: 1em;
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
border: none;
|
||||
outline: none;
|
||||
}
|
||||
#u2conf .txtbox {
|
||||
width: 3em;
|
||||
color: #fff;
|
||||
background: #444;
|
||||
border: 1px solid #777;
|
||||
font-size: 1.2em;
|
||||
padding: .15em 0;
|
||||
height: 1.05em;
|
||||
}
|
||||
#u2conf .txtbox.err {
|
||||
background: #922;
|
||||
}
|
||||
#u2conf a {
|
||||
color: #fff;
|
||||
background: #c38;
|
||||
text-decoration: none;
|
||||
border-radius: .1em;
|
||||
font-size: 1.5em;
|
||||
padding: .1em 0;
|
||||
margin: 0 -1px;
|
||||
width: 1.5em;
|
||||
height: 1em;
|
||||
display: inline-block;
|
||||
position: relative;
|
||||
bottom: -0.08em;
|
||||
}
|
||||
#u2conf input+a {
|
||||
background: #d80;
|
||||
}
|
||||
#u2conf label {
|
||||
font-size: 1.6em;
|
||||
width: 2em;
|
||||
height: 1em;
|
||||
padding: .4em 0;
|
||||
display: block;
|
||||
border-radius: .25em;
|
||||
}
|
||||
#u2conf input[type="checkbox"] {
|
||||
position: relative;
|
||||
opacity: .02;
|
||||
top: 2em;
|
||||
}
|
||||
#u2conf input[type="checkbox"]+label {
|
||||
position: relative;
|
||||
background: #603;
|
||||
border-bottom: .2em solid #a16;
|
||||
box-shadow: 0 .1em .3em #a00 inset;
|
||||
}
|
||||
#u2conf input[type="checkbox"]:checked+label {
|
||||
background: #6a1;
|
||||
border-bottom: .2em solid #efa;
|
||||
box-shadow: 0 .1em .5em #0c0;
|
||||
}
|
||||
#u2conf input[type="checkbox"]+label:hover {
|
||||
box-shadow: 0 .1em .3em #fb0;
|
||||
border-color: #fb0;
|
||||
}
|
||||
#op_up2k.srch #u2conf td:nth-child(1)>*,
|
||||
#op_up2k.srch #u2conf td:nth-child(2)>*,
|
||||
#op_up2k.srch #u2conf td:nth-child(3)>* {
|
||||
background: #777;
|
||||
border-color: #ccc;
|
||||
box-shadow: none;
|
||||
opacity: .2;
|
||||
}
|
||||
#u2foot {
|
||||
color: #fff;
|
||||
font-style: italic;
|
||||
}
|
||||
#u2foot .warn {
|
||||
font-size: 1.3em;
|
||||
padding: .5em .8em;
|
||||
margin: 1em -.6em;
|
||||
color: #f74;
|
||||
background: #322;
|
||||
border: 1px solid #633;
|
||||
border-width: .1em 0;
|
||||
text-align: center;
|
||||
}
|
||||
#u2foot .warn span {
|
||||
color: #f86;
|
||||
}
|
||||
html.light #u2foot .warn {
|
||||
color: #b00;
|
||||
background: #fca;
|
||||
border-color: #f70;
|
||||
}
|
||||
html.light #u2foot .warn span {
|
||||
color: #930;
|
||||
}
|
||||
#u2foot span {
|
||||
color: #999;
|
||||
font-size: .9em;
|
||||
font-weight: normal;
|
||||
}
|
||||
#u2footfoot {
|
||||
margin-bottom: -1em;
|
||||
}
|
||||
.prog {
|
||||
font-family: monospace, monospace;
|
||||
}
|
||||
#u2tab a>span {
|
||||
font-weight: bold;
|
||||
font-style: italic;
|
||||
color: #fff;
|
||||
padding-left: .2em;
|
||||
}
|
||||
#u2cleanup {
|
||||
float: right;
|
||||
margin-bottom: -.3em;
|
||||
}
|
||||
.fsearch_explain {
|
||||
padding-left: .7em;
|
||||
font-size: 1.1em;
|
||||
line-height: 0;
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
html.light #u2btn {
|
||||
box-shadow: .4em .4em 0 #ccc;
|
||||
}
|
||||
html.light #u2cards span {
|
||||
color: #000;
|
||||
}
|
||||
html.light #u2cards a {
|
||||
background: linear-gradient(to bottom, #eee, #fff);
|
||||
}
|
||||
html.light #u2cards a.act {
|
||||
color: #037;
|
||||
background: inherit;
|
||||
box-shadow: 0 -.17em .67em #0ad;
|
||||
border-color: #09c #05a #eee #05a;
|
||||
}
|
||||
html.light #u2conf .txtbox {
|
||||
background: #fff;
|
||||
color: #444;
|
||||
}
|
||||
html.light #u2conf .txtbox.err {
|
||||
background: #f96;
|
||||
color: #300;
|
||||
}
|
||||
html.light #op_up2k.srch #u2btn {
|
||||
border-color: #a80;
|
||||
}
|
||||
html.light #u2foot {
|
||||
color: #000;
|
||||
}
|
||||
html.light #u2tab tbody tr:hover td {
|
||||
background: #fff;
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,11 +1,21 @@
|
||||
# example `.epilogue.html`
|
||||
**NOTE:** there's more stuff (sharex config, service scripts, nginx configs, ...) in [`/contrib/`](/contrib/)
|
||||
|
||||
|
||||
|
||||
# example resource files
|
||||
|
||||
can be provided to copyparty to tweak things
|
||||
|
||||
|
||||
|
||||
## example `.epilogue.html`
|
||||
save one of these as `.epilogue.html` inside a folder to customize it:
|
||||
|
||||
* [`minimal-up2k.html`](minimal-up2k.html) will [simplify the upload ui](https://user-images.githubusercontent.com/241032/118311195-dd6ca380-b4ef-11eb-86f3-75a3ff2e1332.png)
|
||||
|
||||
|
||||
|
||||
# example browser-css
|
||||
## example browser-css
|
||||
point `--css-browser` to one of these by URL:
|
||||
|
||||
* [`browser.css`](browser.css) changes the background
|
||||
@@ -19,4 +29,23 @@ point `--css-browser` to one of these by URL:
|
||||
* notes on using rclone as a fuse client/server
|
||||
|
||||
## [`example.conf`](example.conf)
|
||||
* example config file for `-c` which never really happened
|
||||
* example config file for `-c` (supports accounts, volumes, and volume-flags)
|
||||
|
||||
|
||||
|
||||
# junk
|
||||
|
||||
alphabetical list of the remaining files
|
||||
|
||||
| what | why |
|
||||
| -- | -- |
|
||||
| [biquad.html](biquad.html) | bruteforce calibrator for the audio equalizer since im not that good at maths |
|
||||
| [design.txt](design.txt) | initial brainstorming of the copyparty design, unmaintained, incorrect, sentimental value only |
|
||||
| [hls.html](hls.html) | experimenting with hls playback using `hls.js`, works p well, almost became a thing |
|
||||
| [music-analysis.sh](music-analysis.sh) | testing various bpm/key detection libraries before settling on the ones used in [`/bin/mtag/`](/bin/mtag/) |
|
||||
| [notes.sh](notes.sh) | notepad, just scraps really |
|
||||
| [nuitka.txt](nuitka.txt) | how to build a copyparty exe using nuitka (not maintained) |
|
||||
| [pretend-youre-qnap.patch](pretend-youre-qnap.patch) | simulate a NAS which keeps returning old cached data even though you just modified the file yourself |
|
||||
| [tcp-debug.sh](tcp-debug.sh) | looks like this was to debug stuck tcp connections? |
|
||||
| [unirange.py](unirange.py) | uhh |
|
||||
| [up2k.txt](up2k.txt) | initial ideas for how up2k should work, another unmaintained sentimental-value-only thing |
|
||||
|
||||
@@ -3,6 +3,24 @@
|
||||
setTimeout(location.reload.bind(location), 700);
|
||||
document.documentElement.scrollLeft = 0;
|
||||
|
||||
var cali = (function() {
|
||||
var ac = new AudioContext(),
|
||||
fi = ac.createBiquadFilter(),
|
||||
freqs = new Float32Array(1),
|
||||
mag = new Float32Array(1),
|
||||
phase = new Float32Array(1);
|
||||
|
||||
freqs[0] = 14000;
|
||||
fi.type = 'peaking';
|
||||
fi.frequency.value = 18000;
|
||||
fi.Q.value = 0.8;
|
||||
fi.gain.value = 1;
|
||||
fi.getFrequencyResponse(freqs, mag, phase);
|
||||
|
||||
return mag[0]; // 1.0407 good, 1.0563 bad
|
||||
})(),
|
||||
mp = cali < 1.05;
|
||||
|
||||
var can = document.createElement('canvas'),
|
||||
cc = can.getContext('2d'),
|
||||
w = 2048,
|
||||
@@ -28,12 +46,12 @@ var cfg = [ // hz, q, g
|
||||
[1000, 0.9, 1.1],
|
||||
[2000, 0.9, 1.105],
|
||||
[4000, 0.88, 1.05],
|
||||
[8000 * 1.006, 0.73, 1.24],
|
||||
[8000 * 1.006, 0.73, mp ? 1.24 : 1.2],
|
||||
//[16000 * 1.00, 0.5, 1.75], // peak.v1
|
||||
//[16000 * 1.19, 0, 1.8] // shelf.v1
|
||||
[16000 * 0.89, 0.7, 1.26], // peak
|
||||
[16000 * 1.13, 0.82, 1.09], // peak
|
||||
[16000 * 1.205, 0, 1.9] // shelf
|
||||
[16000 * 0.89, 0.7, mp ? 1.26 : 1.2], // peak
|
||||
[16000 * 1.13, 0.82, mp ? 1.09 : 0.75], // peak
|
||||
[16000 * 1.205, 0, mp ? 1.9 : 1.85] // shelf
|
||||
];
|
||||
|
||||
var freqs = new Float32Array(22000),
|
||||
|
||||
@@ -1,37 +1,7 @@
|
||||
/* put filetype icons inline with text
|
||||
#ggrid>a>span:before,
|
||||
#ggrid>a>span.dir:before {
|
||||
display: inline;
|
||||
line-height: 0;
|
||||
font-size: 1.7em;
|
||||
margin: -.7em .1em -.5em -.6em;
|
||||
}
|
||||
*/
|
||||
/* video, alternative 1:
|
||||
top-left icon, just like the other formats
|
||||
=======================================================================
|
||||
|
||||
|
||||
/* move folder icons top-left */
|
||||
#ggrid>a>span.dir:before {
|
||||
content: initial;
|
||||
}
|
||||
#ggrid>a[href$="/"]:before {
|
||||
content: '📂';
|
||||
}
|
||||
|
||||
|
||||
/* put filetype icons top-left */
|
||||
#ggrid>a:before {
|
||||
display: block;
|
||||
position: absolute;
|
||||
padding: .3em 0;
|
||||
margin: -.4em;
|
||||
text-shadow: 0 0 .1em #000;
|
||||
background: linear-gradient(135deg,rgba(255,255,255,0) 50%,rgba(255,255,255,0.2));
|
||||
border-radius: .3em;
|
||||
font-size: 2em;
|
||||
}
|
||||
|
||||
|
||||
/* video */
|
||||
#ggrid>a:is(
|
||||
[href$=".mkv"i],
|
||||
[href$=".mp4"i],
|
||||
@@ -39,6 +9,40 @@
|
||||
):before {
|
||||
content: '📺';
|
||||
}
|
||||
*/
|
||||
|
||||
|
||||
|
||||
/* video, alternative 2:
|
||||
play-icon in the middle of the thumbnail
|
||||
=======================================================================
|
||||
*/
|
||||
#ggrid>a:is(
|
||||
[href$=".mkv"i],
|
||||
[href$=".mp4"i],
|
||||
[href$=".webm"i],
|
||||
) {
|
||||
position: relative;
|
||||
overflow: hidden;
|
||||
}
|
||||
#ggrid>a:is(
|
||||
[href$=".mkv"i],
|
||||
[href$=".mp4"i],
|
||||
[href$=".webm"i],
|
||||
):before {
|
||||
content: '▶';
|
||||
opacity: .8;
|
||||
margin: 0;
|
||||
padding: 1em .5em 1em .7em;
|
||||
border-radius: 9em;
|
||||
line-height: 0;
|
||||
color: #fff;
|
||||
text-shadow: none;
|
||||
background: rgba(0, 0, 0, 0.7);
|
||||
left: calc(50% - 1em);
|
||||
top: calc(50% - 1.4em);
|
||||
}
|
||||
|
||||
|
||||
|
||||
/* audio */
|
||||
@@ -54,6 +58,7 @@
|
||||
}
|
||||
|
||||
|
||||
|
||||
/* image */
|
||||
#ggrid>a:is(
|
||||
[href$=".jpg"i],
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
html {
|
||||
background: #333 url('/wp/wallhaven-mdjrqy.jpg') center / cover no-repeat fixed;
|
||||
background: #222 url('/wp/wallhaven-mdjrqy.jpg') center / cover no-repeat fixed;
|
||||
}
|
||||
#files th {
|
||||
background: rgba(32, 32, 32, 0.9) !important;
|
||||
}
|
||||
#ops,
|
||||
#treeul,
|
||||
#tree,
|
||||
#files td {
|
||||
background: rgba(32, 32, 32, 0.3) !important;
|
||||
}
|
||||
@@ -17,8 +17,9 @@ html.light {
|
||||
html.light #files th {
|
||||
background: rgba(255, 255, 255, 0.9) !important;
|
||||
}
|
||||
html.light .logue,
|
||||
html.light #ops,
|
||||
html.light #treeul,
|
||||
html.light #tree,
|
||||
html.light #files td {
|
||||
background: rgba(248, 248, 248, 0.8) !important;
|
||||
}
|
||||
|
||||
@@ -47,5 +47,5 @@ c e2d
|
||||
c nodupe
|
||||
|
||||
# this entire config file can be replaced with these arguments:
|
||||
# -u ed:123 -u k:k -v .::r:a,ed -v priv:priv:r,k:rw,ed -v /home/ed/Music:music:r -v /home/ed/inc:dump:w:c,e2d:c,nodupe
|
||||
# -u ed:123 -u k:k -v .::r:a,ed -v priv:priv:r,k:rw,ed -v /home/ed/Music:music:r -v /home/ed/inc:dump:w:c,e2d,nodupe
|
||||
# but note that the config file always wins in case of conflicts
|
||||
|
||||
@@ -11,7 +11,9 @@
|
||||
|
||||
#u2cleanup, #u2conf tr:first-child>td[rowspan]:not(#u2btn_cw), /* most of the config options */
|
||||
|
||||
#u2cards /* and the upload progress tabs */
|
||||
#srch_dz, #srch_zd, /* the filesearch dropzone */
|
||||
|
||||
#u2cards, #u2etaw /* and the upload progress tabs */
|
||||
|
||||
{display: none !important} /* do it! */
|
||||
|
||||
@@ -19,13 +21,16 @@
|
||||
|
||||
/* add some margins because now it's weird */
|
||||
.opview {margin-top: 2.5em}
|
||||
#op_up2k {margin-top: 3em}
|
||||
#op_up2k {margin-top: 6em}
|
||||
|
||||
/* and embiggen the upload button */
|
||||
#u2conf #u2btn, #u2btn {padding:1.5em 0}
|
||||
|
||||
/* adjust the button area a bit */
|
||||
#u2conf.has_btn {width: 35em !important; margin: 5em auto}
|
||||
#u2conf.w, #u2conf.ww {width: 35em !important; margin: 5em auto}
|
||||
|
||||
/* a */
|
||||
#op_up2k {min-height: 0}
|
||||
|
||||
</style>
|
||||
|
||||
|
||||
@@ -1,26 +0,0 @@
|
||||
|
||||
method = self.s.recv(4)
|
||||
self.s.unrecv(method)
|
||||
print("xxx unrecv'd [{}]".format(method))
|
||||
|
||||
# jython used to do this, they stopped since it's broken
|
||||
# but reimplementing sendall is out of scope for now
|
||||
if not getattr(self.s.s, "sendall", None):
|
||||
self.s.s.sendall = self.s.s.send
|
||||
|
||||
# TODO this is also pretty bad
|
||||
have = dir(self.s)
|
||||
for k in self.s.s.__dict__:
|
||||
if k not in have and not k.startswith("__"):
|
||||
if k == "recv":
|
||||
raise Exception("wait what")
|
||||
|
||||
self.s.__dict__[k] = self.s.s.__dict__[k]
|
||||
|
||||
have = dir(self.s)
|
||||
for k in dir(self.s.s):
|
||||
if k not in have and not k.startswith("__"):
|
||||
if k == "recv":
|
||||
raise Exception("wait what")
|
||||
|
||||
setattr(self.s, k, getattr(self.s.s, k))
|
||||
@@ -41,9 +41,9 @@ avg() { awk 'function pr(ncsz) {if (nsmp>0) {printf "%3s %s\n", csz, sum/nsmp} c
|
||||
##
|
||||
## bad filenames
|
||||
|
||||
dirs=("$HOME/vfs/ほげ" "$HOME/vfs/ほげ/ぴよ" "$HOME/vfs/$(printf \\xed\\x91)" "$HOME/vfs/$(printf \\xed\\x91/\\xed\\x92)")
|
||||
dirs=("./ほげ" "./ほげ/ぴよ" "./$(printf \\xed\\x91)" "./$(printf \\xed\\x91/\\xed\\x92)" './qw,er;ty%20as df?gh+jkl%zxc&vbn <qwe>"rty'"'"'uio&asd fgh')
|
||||
mkdir -p "${dirs[@]}"
|
||||
for dir in "${dirs[@]}"; do for fn in ふが "$(printf \\xed\\x93)" 'qwe,rty;asd fgh+jkl%zxc&vbn <qwe>"rty'"'"'uio&asd fgh'; do echo "$dir" > "$dir/$fn.html"; done; done
|
||||
for dir in "${dirs[@]}"; do for fn in ふが "$(printf \\xed\\x93)" 'qw,er;ty%20as df?gh+jkl%zxc&vbn <qwe>"rty'"'"'uio&asd fgh'; do echo "$dir" > "$dir/$fn.html"; done; done
|
||||
# qw er+ty%20ui%%20op<as>df&gh&jk#zx'cv"bn`m=qw*er^ty?ui@op,as.df-gh_jk
|
||||
|
||||
##
|
||||
@@ -79,6 +79,8 @@ command -v gdate && date() { gdate "$@"; }; while true; do t=$(date +%s.%N); (ti
|
||||
# get all up2k search result URLs
|
||||
var t=[]; var b=document.location.href.split('#')[0].slice(0, -1); document.querySelectorAll('#u2tab .prog a').forEach((x) => {t.push(b+encodeURI(x.getAttribute("href")))}); console.log(t.join("\n"));
|
||||
|
||||
# debug md-editor line tracking
|
||||
var s=mknod('style');s.innerHTML='*[data-ln]:before {content:attr(data-ln)!important;color:#f0c;background:#000;position:absolute;left:-1.5em;font-size:1rem}';document.head.appendChild(s);
|
||||
|
||||
##
|
||||
## bash oneliners
|
||||
@@ -122,6 +124,13 @@ e=6; s=10; d=~/dev/copyparty/srv/aus; n=1; p=0; e=$((e*60)); rm -rf $d; mkdir $d
|
||||
-v srv/aus:aus:r:ce2dsa:ce2ts:cmtp=fgsfds=bin/mtag/sleep.py
|
||||
sqlite3 .hist/up2k.db 'select * from mt where k="fgsfds" or k="t:mtp"' | tee /dev/stderr | wc -l
|
||||
|
||||
# generate the sine meme
|
||||
for ((f=420;f<1200;f++)); do sz=$(ffmpeg -y -f lavfi -i sine=frequency=$f:duration=2 -vf volume=0.1 -ac 1 -ar 44100 -f s16le /dev/shm/a.wav 2>/dev/null; base64 -w0 </dev/shm/a.wav | gzip -c | wc -c); printf '%d %d\n' $f $sz; done | tee /dev/stderr | sort -nrk2,2
|
||||
ffmpeg -y -f lavfi -i sine=frequency=1050:duration=2 -vf volume=0.1 -ac 1 -ar 44100 /dev/shm/a.wav
|
||||
|
||||
# play icon calibration pics
|
||||
for w in 150 170 190 210 230 250; do for h in 130 150 170 190 210; do /c/Program\ Files/ImageMagick-7.0.11-Q16-HDRI/magick.exe convert -size ${w}x${h} xc:brown -fill orange -draw "circle $((w/2)),$((h/2)) $((w/2)),$((h/3))" $w-$h.png; done; done
|
||||
|
||||
|
||||
##
|
||||
## vscode
|
||||
@@ -153,7 +162,7 @@ brew install python@2
|
||||
pip install virtualenv
|
||||
|
||||
# readme toc
|
||||
cat README.md | awk '!/^#/{next} {lv=length($1);sub(/[^ ]+ /,"");bab=$0;gsub(/ /,"-",bab)} {printf "%" ((lv-1)*4+1) "s [%s](#%s)\n", "*",$0,bab}'
|
||||
cat README.md | awk 'function pr() { if (!h) {return}; if (/^ *[*!#]/||!s) {printf "%s\n",h;h=0;return}; if (/.../) {printf "%s - %s\n",h,$0;h=0}; }; /^#/{s=1;pr()} /^#* *(file indexing|install on android|dev env setup|just the sfx|complete release|optional gpl stuff)|`$/{s=0} /^#/{lv=length($1);sub(/[^ ]+ /,"");bab=$0;gsub(/ /,"-",bab); h=sprintf("%" ((lv-1)*4+1) "s [%s](#%s)", "*",$0,bab);next} !h{next} {sub(/ .*/,"");sub(/[:,]$/,"")} {pr()}' > toc; grep -E '^## readme toc' -B1000 -A2 <README.md >p1; grep -E '^## quickstart' -B2 -A999999 <README.md >p2; (cat p1; grep quickstart -A1000 <toc; cat p2) >README.md; rm p1 p2 toc
|
||||
|
||||
# fix firefox phantom breakpoints,
|
||||
# suggestions from bugtracker, doesnt work (debugger is not attachable)
|
||||
@@ -169,7 +178,7 @@ about:config >> devtools.debugger.prefs-schema-version = -1
|
||||
git pull; git reset --hard origin/HEAD && git log --format=format:"%H %ai %d" --decorate=full > ../revs && cat ../{util,browser,up2k}.js >../vr && cat ../revs | while read -r rev extra; do (git reset --hard $rev >/dev/null 2>/dev/null && dsz=$(cat copyparty/web/{util,browser,up2k}.js >../vg 2>/dev/null && diff -wNarU0 ../{vg,vr} | wc -c) && printf '%s %6s %s\n' "$rev" $dsz "$extra") </dev/null; done
|
||||
|
||||
# download all sfx versions
|
||||
curl https://api.github.com/repos/9001/copyparty/releases?per_page=100 | jq -r '.[] | .tag_name + " " + .name' | while read v t; do fn="copyparty $v $t.py"; [ -e $fn ] || curl https://github.com/9001/copyparty/releases/download/$v/copyparty-sfx.py -Lo "$fn"; done
|
||||
curl https://api.github.com/repos/9001/copyparty/releases?per_page=100 | jq -r '.[] | .tag_name + " " + .name' | tr -d '\r' | while read v t; do fn="copyparty $v $t.py"; [ -e "$fn" ] || curl https://github.com/9001/copyparty/releases/download/$v/copyparty-sfx.py -Lo "$fn"; done
|
||||
|
||||
|
||||
##
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
FROM alpine:3.13
|
||||
FROM alpine:3.14
|
||||
WORKDIR /z
|
||||
ENV ver_asmcrypto=5b994303a9d3e27e0915f72a10b6c2c51535a4dc \
|
||||
ver_hashwasm=4.7.0 \
|
||||
ver_marked=1.1.0 \
|
||||
ver_ogvjs=1.8.0 \
|
||||
ver_mde=2.14.0 \
|
||||
ver_codemirror=5.59.3 \
|
||||
ver_hashwasm=4.9.0 \
|
||||
ver_marked=3.0.4 \
|
||||
ver_ogvjs=1.8.4 \
|
||||
ver_mde=2.15.0 \
|
||||
ver_codemirror=5.62.3 \
|
||||
ver_fontawesome=5.13.0 \
|
||||
ver_zopfli=1.0.3
|
||||
|
||||
@@ -74,23 +74,16 @@ RUN cd hash-wasm \
|
||||
# build ogvjs
|
||||
RUN cd ogvjs-$ver_ogvjs \
|
||||
&& cp -pv \
|
||||
ogv.js \
|
||||
ogv-worker-audio.js \
|
||||
ogv-demuxer-ogg-wasm.js \
|
||||
ogv-demuxer-ogg-wasm.wasm \
|
||||
ogv-demuxer-webm-wasm.js \
|
||||
ogv-demuxer-webm-wasm.wasm \
|
||||
ogv-decoder-audio-opus-wasm.js \
|
||||
ogv-decoder-audio-opus-wasm.wasm \
|
||||
ogv-decoder-audio-vorbis-wasm.js \
|
||||
ogv-decoder-audio-vorbis-wasm.wasm \
|
||||
/z/dist
|
||||
|
||||
# ogv-demuxer-ogg.js \
|
||||
# ogv-demuxer-webm.js \
|
||||
# ogv-decoder-audio-opus.js \
|
||||
# ogv-decoder-audio-vorbis.js \
|
||||
# dynamicaudio.swf \
|
||||
/z/dist \
|
||||
&& cp -pv \
|
||||
ogv-es2017.js /z/dist/ogv.js
|
||||
|
||||
|
||||
# build marked
|
||||
@@ -120,9 +113,10 @@ RUN cd CodeMirror-$ver_codemirror \
|
||||
COPY easymde.patch /z/
|
||||
RUN cd easy-markdown-editor-$ver_mde \
|
||||
&& patch -p1 < /z/easymde.patch \
|
||||
&& sed -ri 's`https://registry.npmjs.org/marked/-/marked-0.8.2.tgz`file:/z/nodepkgs/marked`' package-lock.json \
|
||||
&& sed -ri 's`https://registry.npmjs.org/marked/-/marked-[0-9\.]+.tgz`file:/z/nodepkgs/marked`' package-lock.json \
|
||||
&& sed -ri 's`("marked": ")[^"]+`\1file:/z/nodepkgs/marked`' ./package.json \
|
||||
&& sed -ri 's`("codemirror": ")[^"]+`\1file:/z/nodepkgs/codemirror`' ./package.json \
|
||||
&& sed -ri 's`^var marked = require\(.marked/lib/marked.\);$`var marked = window.marked;`' src/js/easymde.js \
|
||||
&& npm install
|
||||
|
||||
COPY easymde-ln.patch /z/
|
||||
|
||||
@@ -1,15 +1,15 @@
|
||||
diff --git a/src/Lexer.js b/src/Lexer.js
|
||||
adds linetracking to marked.js v1.0.0 +git;
|
||||
adds linetracking to marked.js v3.0.4;
|
||||
add data-ln="%d" to most tags, %d is the source markdown line
|
||||
--- a/src/Lexer.js
|
||||
+++ b/src/Lexer.js
|
||||
@@ -49,4 +49,5 @@ function mangle(text) {
|
||||
@@ -50,4 +50,5 @@ function mangle(text) {
|
||||
module.exports = class Lexer {
|
||||
constructor(options) {
|
||||
+ this.ln = 1; // like most editors, start couting from 1
|
||||
this.tokens = [];
|
||||
this.tokens.links = Object.create(null);
|
||||
@@ -108,4 +109,15 @@ module.exports = class Lexer {
|
||||
@@ -127,4 +128,15 @@ module.exports = class Lexer {
|
||||
}
|
||||
|
||||
+ set_ln(token, ln = this.ln) {
|
||||
@@ -25,122 +25,123 @@ add data-ln="%d" to most tags, %d is the source markdown line
|
||||
+
|
||||
/**
|
||||
* Lexing
|
||||
@@ -113,10 +125,15 @@ module.exports = class Lexer {
|
||||
blockTokens(src, tokens = [], top = true) {
|
||||
src = src.replace(/^ +$/gm, '');
|
||||
- let token, i, l, lastToken;
|
||||
+ let token, i, l, lastToken, ln;
|
||||
@@ -134,7 +146,11 @@ module.exports = class Lexer {
|
||||
src = src.replace(/^ +$/gm, '');
|
||||
}
|
||||
- let token, lastToken, cutSrc, lastParagraphClipped;
|
||||
+ let token, lastToken, cutSrc, lastParagraphClipped, ln;
|
||||
|
||||
while (src) {
|
||||
+ // this.ln will be bumped by recursive calls into this func;
|
||||
+ // reset the count and rely on the outermost token's raw only
|
||||
+ ln = this.ln;
|
||||
+
|
||||
// newline
|
||||
if (this.options.extensions
|
||||
&& this.options.extensions.block
|
||||
@@ -142,4 +158,5 @@ module.exports = class Lexer {
|
||||
if (token = extTokenizer.call({ lexer: this }, src, tokens)) {
|
||||
src = src.substring(token.raw.length);
|
||||
+ this.set_ln(token, ln);
|
||||
tokens.push(token);
|
||||
return true;
|
||||
@@ -153,4 +170,5 @@ module.exports = class Lexer {
|
||||
if (token = this.tokenizer.space(src)) {
|
||||
src = src.substring(token.raw.length);
|
||||
+ this.set_ln(token); // is \n if not type
|
||||
+ this.set_ln(token, ln); // is \n if not type
|
||||
if (token.type) {
|
||||
tokens.push(token);
|
||||
@@ -128,4 +145,5 @@ module.exports = class Lexer {
|
||||
if (token = this.tokenizer.code(src, tokens)) {
|
||||
@@ -162,4 +180,5 @@ module.exports = class Lexer {
|
||||
if (token = this.tokenizer.code(src)) {
|
||||
src = src.substring(token.raw.length);
|
||||
+ this.set_ln(token);
|
||||
if (token.type) {
|
||||
tokens.push(token);
|
||||
@@ -141,4 +159,5 @@ module.exports = class Lexer {
|
||||
+ this.set_ln(token, ln);
|
||||
lastToken = tokens[tokens.length - 1];
|
||||
// An indented code block cannot interrupt a paragraph.
|
||||
@@ -177,4 +196,5 @@ module.exports = class Lexer {
|
||||
if (token = this.tokenizer.fences(src)) {
|
||||
src = src.substring(token.raw.length);
|
||||
+ this.set_ln(token);
|
||||
+ this.set_ln(token, ln);
|
||||
tokens.push(token);
|
||||
continue;
|
||||
@@ -148,4 +167,5 @@ module.exports = class Lexer {
|
||||
@@ -184,4 +204,5 @@ module.exports = class Lexer {
|
||||
if (token = this.tokenizer.heading(src)) {
|
||||
src = src.substring(token.raw.length);
|
||||
+ this.set_ln(token);
|
||||
+ this.set_ln(token, ln);
|
||||
tokens.push(token);
|
||||
continue;
|
||||
@@ -155,4 +175,5 @@ module.exports = class Lexer {
|
||||
if (token = this.tokenizer.nptable(src)) {
|
||||
src = src.substring(token.raw.length);
|
||||
+ this.set_ln(token);
|
||||
tokens.push(token);
|
||||
continue;
|
||||
@@ -162,4 +183,5 @@ module.exports = class Lexer {
|
||||
@@ -191,4 +212,5 @@ module.exports = class Lexer {
|
||||
if (token = this.tokenizer.hr(src)) {
|
||||
src = src.substring(token.raw.length);
|
||||
+ this.set_ln(token);
|
||||
+ this.set_ln(token, ln);
|
||||
tokens.push(token);
|
||||
continue;
|
||||
@@ -170,4 +192,7 @@ module.exports = class Lexer {
|
||||
@@ -198,4 +220,5 @@ module.exports = class Lexer {
|
||||
if (token = this.tokenizer.blockquote(src)) {
|
||||
src = src.substring(token.raw.length);
|
||||
token.tokens = this.blockTokens(token.text, [], top);
|
||||
+ // recursive call to blockTokens probably bumped this.ln,
|
||||
+ // token.raw is more reliable so reset this.ln and use that
|
||||
+ this.set_ln(token, ln);
|
||||
tokens.push(token);
|
||||
continue;
|
||||
@@ -180,5 +205,9 @@ module.exports = class Lexer {
|
||||
for (i = 0; i < l; i++) {
|
||||
token.items[i].tokens = this.blockTokens(token.items[i].text, [], false);
|
||||
+ // list entries don't bump the linecounter, so let's
|
||||
+ this.ln++;
|
||||
}
|
||||
+ // then reset like blockquote
|
||||
@@ -205,4 +228,5 @@ module.exports = class Lexer {
|
||||
if (token = this.tokenizer.list(src)) {
|
||||
src = src.substring(token.raw.length);
|
||||
+ this.set_ln(token, ln);
|
||||
tokens.push(token);
|
||||
continue;
|
||||
@@ -188,4 +217,5 @@ module.exports = class Lexer {
|
||||
@@ -212,4 +236,5 @@ module.exports = class Lexer {
|
||||
if (token = this.tokenizer.html(src)) {
|
||||
src = src.substring(token.raw.length);
|
||||
+ this.set_ln(token);
|
||||
+ this.set_ln(token, ln);
|
||||
tokens.push(token);
|
||||
continue;
|
||||
@@ -195,4 +225,5 @@ module.exports = class Lexer {
|
||||
if (top && (token = this.tokenizer.def(src))) {
|
||||
@@ -219,4 +244,5 @@ module.exports = class Lexer {
|
||||
if (token = this.tokenizer.def(src)) {
|
||||
src = src.substring(token.raw.length);
|
||||
+ this.set_ln(token);
|
||||
if (!this.tokens.links[token.tag]) {
|
||||
this.tokens.links[token.tag] = {
|
||||
@@ -207,4 +238,5 @@ module.exports = class Lexer {
|
||||
+ this.set_ln(token, ln);
|
||||
lastToken = tokens[tokens.length - 1];
|
||||
if (lastToken && (lastToken.type === 'paragraph' || lastToken.type === 'text')) {
|
||||
@@ -236,4 +262,5 @@ module.exports = class Lexer {
|
||||
if (token = this.tokenizer.table(src)) {
|
||||
src = src.substring(token.raw.length);
|
||||
+ this.set_ln(token);
|
||||
+ this.set_ln(token, ln);
|
||||
tokens.push(token);
|
||||
continue;
|
||||
@@ -214,4 +246,5 @@ module.exports = class Lexer {
|
||||
@@ -243,4 +270,5 @@ module.exports = class Lexer {
|
||||
if (token = this.tokenizer.lheading(src)) {
|
||||
src = src.substring(token.raw.length);
|
||||
+ this.set_ln(token);
|
||||
+ this.set_ln(token, ln);
|
||||
tokens.push(token);
|
||||
continue;
|
||||
@@ -221,4 +254,5 @@ module.exports = class Lexer {
|
||||
if (top && (token = this.tokenizer.paragraph(src))) {
|
||||
@@ -263,4 +291,5 @@ module.exports = class Lexer {
|
||||
}
|
||||
if (this.state.top && (token = this.tokenizer.paragraph(cutSrc))) {
|
||||
+ this.set_ln(token, ln);
|
||||
lastToken = tokens[tokens.length - 1];
|
||||
if (lastParagraphClipped && lastToken.type === 'paragraph') {
|
||||
@@ -280,4 +309,6 @@ module.exports = class Lexer {
|
||||
if (token = this.tokenizer.text(src)) {
|
||||
src = src.substring(token.raw.length);
|
||||
+ this.set_ln(token);
|
||||
tokens.push(token);
|
||||
continue;
|
||||
@@ -228,4 +262,5 @@ module.exports = class Lexer {
|
||||
if (token = this.tokenizer.text(src, tokens)) {
|
||||
src = src.substring(token.raw.length);
|
||||
+ this.set_ln(token);
|
||||
if (token.type) {
|
||||
tokens.push(token);
|
||||
@@ -263,4 +298,7 @@ module.exports = class Lexer {
|
||||
for (i = 0; i < l; i++) {
|
||||
token = tokens[i];
|
||||
+ // this.ln is at EOF when inline() is invoked;
|
||||
+ // all this affects <br> tags only so no biggie if it breaks
|
||||
+ this.ln = token.ln || this.ln;
|
||||
switch (token.type) {
|
||||
case 'paragraph':
|
||||
@@ -386,4 +424,6 @@ module.exports = class Lexer {
|
||||
+ this.set_ln(token, ln);
|
||||
+ this.ln++;
|
||||
lastToken = tokens[tokens.length - 1];
|
||||
if (lastToken && lastToken.type === 'text') {
|
||||
@@ -355,4 +386,5 @@ module.exports = class Lexer {
|
||||
if (token = extTokenizer.call({ lexer: this }, src, tokens)) {
|
||||
src = src.substring(token.raw.length);
|
||||
+ this.ln = token.ln || this.ln;
|
||||
tokens.push(token);
|
||||
return true;
|
||||
@@ -420,4 +452,6 @@ module.exports = class Lexer {
|
||||
if (token = this.tokenizer.br(src)) {
|
||||
src = src.substring(token.raw.length);
|
||||
+ // no need to reset (no more blockTokens anyways)
|
||||
+ token.ln = this.ln++;
|
||||
tokens.push(token);
|
||||
continue;
|
||||
@@ -462,4 +496,5 @@ module.exports = class Lexer {
|
||||
if (token = this.tokenizer.inlineText(cutSrc, smartypants)) {
|
||||
src = src.substring(token.raw.length);
|
||||
+ this.ln = token.ln || this.ln;
|
||||
if (token.raw.slice(-1) !== '_') { // Track prevChar before string of ____ started
|
||||
prevChar = token.raw.slice(-1);
|
||||
diff --git a/src/Parser.js b/src/Parser.js
|
||||
--- a/src/Parser.js
|
||||
+++ b/src/Parser.js
|
||||
@@ -150,17 +151,16 @@ diff --git a/src/Parser.js b/src/Parser.js
|
||||
+ this.ln = 0; // error indicator; should always be set >=1 from tokens
|
||||
}
|
||||
|
||||
@@ -55,4 +56,9 @@ module.exports = class Parser {
|
||||
@@ -64,4 +65,8 @@ module.exports = class Parser {
|
||||
for (i = 0; i < l; i++) {
|
||||
token = tokens[i];
|
||||
+ // take line-numbers from tokens whenever possible
|
||||
+ // and update the renderer's html attribute with the new value
|
||||
+ this.ln = token.ln || this.ln;
|
||||
+ this.renderer.tag_ln(this.ln);
|
||||
+
|
||||
switch (token.type) {
|
||||
case 'space': {
|
||||
@@ -105,7 +111,10 @@ module.exports = class Parser {
|
||||
|
||||
// Run any renderer extensions
|
||||
@@ -124,7 +129,10 @@ module.exports = class Parser {
|
||||
}
|
||||
|
||||
- body += this.renderer.tablerow(cell);
|
||||
@@ -173,7 +173,7 @@ diff --git a/src/Parser.js b/src/Parser.js
|
||||
+ out += this.renderer.tag_ln(token.ln).table(header, body);
|
||||
continue;
|
||||
}
|
||||
@@ -148,8 +157,12 @@ module.exports = class Parser {
|
||||
@@ -167,8 +175,12 @@ module.exports = class Parser {
|
||||
|
||||
itemBody += this.parse(item.tokens, loose);
|
||||
- body += this.renderer.listitem(itemBody, task, checked);
|
||||
@@ -188,7 +188,7 @@ diff --git a/src/Parser.js b/src/Parser.js
|
||||
+ out += this.renderer.tag_ln(token.ln).list(body, ordered, start);
|
||||
continue;
|
||||
}
|
||||
@@ -160,5 +173,6 @@ module.exports = class Parser {
|
||||
@@ -179,5 +191,6 @@ module.exports = class Parser {
|
||||
}
|
||||
case 'paragraph': {
|
||||
- out += this.renderer.paragraph(this.parseInline(token.tokens));
|
||||
@@ -196,22 +196,14 @@ diff --git a/src/Parser.js b/src/Parser.js
|
||||
+ out += this.renderer.tag_ln(token.ln).paragraph(t);
|
||||
continue;
|
||||
}
|
||||
@@ -199,4 +213,6 @@ module.exports = class Parser {
|
||||
for (i = 0; i < l; i++) {
|
||||
@@ -221,4 +234,7 @@ module.exports = class Parser {
|
||||
token = tokens[i];
|
||||
|
||||
+ // another thing that only affects <br/> and other inlines
|
||||
+ this.ln = token.ln || this.ln;
|
||||
switch (token.type) {
|
||||
case 'escape': {
|
||||
@@ -229,5 +245,7 @@ module.exports = class Parser {
|
||||
}
|
||||
case 'br': {
|
||||
- out += renderer.br();
|
||||
+ // update the html attribute before writing each <br/>,
|
||||
+ // don't care about the others
|
||||
+ out += renderer.tag_ln(this.ln).br();
|
||||
break;
|
||||
}
|
||||
+
|
||||
// Run any renderer extensions
|
||||
if (this.options.extensions && this.options.extensions.renderers && this.options.extensions.renderers[token.type]) {
|
||||
diff --git a/src/Renderer.js b/src/Renderer.js
|
||||
--- a/src/Renderer.js
|
||||
+++ b/src/Renderer.js
|
||||
@@ -228,7 +220,7 @@ diff --git a/src/Renderer.js b/src/Renderer.js
|
||||
+
|
||||
code(code, infostring, escaped) {
|
||||
const lang = (infostring || '').match(/\S*/)[0];
|
||||
@@ -24,10 +30,10 @@ module.exports = class Renderer {
|
||||
@@ -26,10 +32,10 @@ module.exports = class Renderer {
|
||||
|
||||
if (!lang) {
|
||||
- return '<pre><code>'
|
||||
@@ -241,58 +233,69 @@ diff --git a/src/Renderer.js b/src/Renderer.js
|
||||
+ return '<pre' + this.ln + '><code class="'
|
||||
+ this.options.langPrefix
|
||||
+ escape(lang, true)
|
||||
@@ -38,5 +44,5 @@ module.exports = class Renderer {
|
||||
@@ -40,5 +46,5 @@ module.exports = class Renderer {
|
||||
|
||||
blockquote(quote) {
|
||||
- return '<blockquote>\n' + quote + '</blockquote>\n';
|
||||
+ return '<blockquote' + this.ln + '>\n' + quote + '</blockquote>\n';
|
||||
}
|
||||
|
||||
@@ -49,4 +55,5 @@ module.exports = class Renderer {
|
||||
@@ -51,4 +57,5 @@ module.exports = class Renderer {
|
||||
return '<h'
|
||||
+ level
|
||||
+ + this.ln
|
||||
+ ' id="'
|
||||
+ this.options.headerPrefix
|
||||
@@ -59,5 +66,5 @@ module.exports = class Renderer {
|
||||
@@ -61,5 +68,5 @@ module.exports = class Renderer {
|
||||
}
|
||||
// ignore IDs
|
||||
- return '<h' + level + '>' + text + '</h' + level + '>\n';
|
||||
+ return '<h' + level + this.ln + '>' + text + '</h' + level + '>\n';
|
||||
}
|
||||
|
||||
@@ -73,5 +80,5 @@ module.exports = class Renderer {
|
||||
@@ -75,5 +82,5 @@ module.exports = class Renderer {
|
||||
|
||||
listitem(text) {
|
||||
- return '<li>' + text + '</li>\n';
|
||||
+ return '<li' + this.ln + '>' + text + '</li>\n';
|
||||
}
|
||||
|
||||
@@ -85,5 +92,5 @@ module.exports = class Renderer {
|
||||
@@ -87,5 +94,5 @@ module.exports = class Renderer {
|
||||
|
||||
paragraph(text) {
|
||||
- return '<p>' + text + '</p>\n';
|
||||
+ return '<p' + this.ln + '>' + text + '</p>\n';
|
||||
}
|
||||
|
||||
@@ -100,5 +107,5 @@ module.exports = class Renderer {
|
||||
@@ -102,5 +109,5 @@ module.exports = class Renderer {
|
||||
|
||||
tablerow(content) {
|
||||
- return '<tr>\n' + content + '</tr>\n';
|
||||
+ return '<tr' + this.ln + '>\n' + content + '</tr>\n';
|
||||
}
|
||||
|
||||
@@ -125,5 +132,5 @@ module.exports = class Renderer {
|
||||
@@ -127,5 +134,5 @@ module.exports = class Renderer {
|
||||
|
||||
br() {
|
||||
- return this.options.xhtml ? '<br/>' : '<br>';
|
||||
+ return this.options.xhtml ? '<br' + this.ln + '/>' : '<br' + this.ln + '>';
|
||||
}
|
||||
|
||||
@@ -151,5 +158,5 @@ module.exports = class Renderer {
|
||||
@@ -153,5 +160,5 @@ module.exports = class Renderer {
|
||||
}
|
||||
|
||||
- let out = '<img src="' + href + '" alt="' + text + '"';
|
||||
+ let out = '<img' + this.ln + ' src="' + href + '" alt="' + text + '"';
|
||||
if (title) {
|
||||
out += ' title="' + title + '"';
|
||||
diff --git a/src/Tokenizer.js b/src/Tokenizer.js
|
||||
--- a/src/Tokenizer.js
|
||||
+++ b/src/Tokenizer.js
|
||||
@@ -301,4 +301,7 @@ module.exports = class Tokenizer {
|
||||
const l = list.items.length;
|
||||
|
||||
+ // each nested list gets +1 ahead; this hack makes every listgroup -1 but atleast it doesn't get infinitely bad
|
||||
+ this.lexer.ln--;
|
||||
+
|
||||
// Item child tokens handled here at end because we needed to have the final item to trim it first
|
||||
for (i = 0; i < l; i++) {
|
||||
|
||||
@@ -1,52 +1,52 @@
|
||||
diff --git a/src/Lexer.js b/src/Lexer.js
|
||||
--- a/src/Lexer.js
|
||||
+++ b/src/Lexer.js
|
||||
@@ -5,5 +5,5 @@ const { block, inline } = require('./rules.js');
|
||||
@@ -6,5 +6,5 @@ const { repeatString } = require('./helpers.js');
|
||||
/**
|
||||
* smartypants text replacement
|
||||
- */
|
||||
+ *
|
||||
function smartypants(text) {
|
||||
return text
|
||||
@@ -26,5 +26,5 @@ function smartypants(text) {
|
||||
@@ -27,5 +27,5 @@ function smartypants(text) {
|
||||
/**
|
||||
* mangle email addresses
|
||||
- */
|
||||
+ *
|
||||
function mangle(text) {
|
||||
let out = '',
|
||||
@@ -439,5 +439,5 @@ module.exports = class Lexer {
|
||||
@@ -465,5 +465,5 @@ module.exports = class Lexer {
|
||||
|
||||
// autolink
|
||||
- if (token = this.tokenizer.autolink(src, mangle)) {
|
||||
+ if (token = this.tokenizer.autolink(src)) {
|
||||
src = src.substring(token.raw.length);
|
||||
tokens.push(token);
|
||||
@@ -446,5 +446,5 @@ module.exports = class Lexer {
|
||||
@@ -472,5 +472,5 @@ module.exports = class Lexer {
|
||||
|
||||
// url (gfm)
|
||||
- if (!inLink && (token = this.tokenizer.url(src, mangle))) {
|
||||
+ if (!inLink && (token = this.tokenizer.url(src))) {
|
||||
- if (!this.state.inLink && (token = this.tokenizer.url(src, mangle))) {
|
||||
+ if (!this.state.inLink && (token = this.tokenizer.url(src))) {
|
||||
src = src.substring(token.raw.length);
|
||||
tokens.push(token);
|
||||
@@ -453,5 +453,5 @@ module.exports = class Lexer {
|
||||
|
||||
// text
|
||||
- if (token = this.tokenizer.inlineText(src, inRawBlock, smartypants)) {
|
||||
+ if (token = this.tokenizer.inlineText(src, inRawBlock)) {
|
||||
@@ -493,5 +493,5 @@ module.exports = class Lexer {
|
||||
}
|
||||
}
|
||||
- if (token = this.tokenizer.inlineText(cutSrc, smartypants)) {
|
||||
+ if (token = this.tokenizer.inlineText(cutSrc)) {
|
||||
src = src.substring(token.raw.length);
|
||||
tokens.push(token);
|
||||
this.ln = token.ln || this.ln;
|
||||
diff --git a/src/Renderer.js b/src/Renderer.js
|
||||
--- a/src/Renderer.js
|
||||
+++ b/src/Renderer.js
|
||||
@@ -140,5 +140,5 @@ module.exports = class Renderer {
|
||||
@@ -142,5 +142,5 @@ module.exports = class Renderer {
|
||||
|
||||
link(href, title, text) {
|
||||
- href = cleanUrl(this.options.sanitize, this.options.baseUrl, href);
|
||||
+ href = cleanUrl(this.options.baseUrl, href);
|
||||
if (href === null) {
|
||||
return text;
|
||||
@@ -153,5 +153,5 @@ module.exports = class Renderer {
|
||||
@@ -155,5 +155,5 @@ module.exports = class Renderer {
|
||||
|
||||
image(href, title, text) {
|
||||
- href = cleanUrl(this.options.sanitize, this.options.baseUrl, href);
|
||||
@@ -56,22 +56,23 @@ diff --git a/src/Renderer.js b/src/Renderer.js
|
||||
diff --git a/src/Tokenizer.js b/src/Tokenizer.js
|
||||
--- a/src/Tokenizer.js
|
||||
+++ b/src/Tokenizer.js
|
||||
@@ -287,11 +287,8 @@ module.exports = class Tokenizer {
|
||||
if (cap) {
|
||||
return {
|
||||
- type: this.options.sanitize
|
||||
- ? 'paragraph'
|
||||
- : 'html',
|
||||
+ type: 'html',
|
||||
@@ -321,14 +321,7 @@ module.exports = class Tokenizer {
|
||||
type: 'html',
|
||||
raw: cap[0],
|
||||
- pre: !this.options.sanitizer
|
||||
- && (cap[1] === 'pre' || cap[1] === 'script' || cap[1] === 'style'),
|
||||
- text: this.options.sanitize ? (this.options.sanitizer ? this.options.sanitizer(cap[0]) : escape(cap[0])) : cap[0]
|
||||
+ pre: cap[1] === 'pre' || cap[1] === 'script' || cap[1] === 'style',
|
||||
+ text: cap[0]
|
||||
+ pre: (cap[1] === 'pre' || cap[1] === 'script' || cap[1] === 'style'),
|
||||
text: cap[0]
|
||||
};
|
||||
- if (this.options.sanitize) {
|
||||
- token.type = 'paragraph';
|
||||
- token.text = this.options.sanitizer ? this.options.sanitizer(cap[0]) : escape(cap[0]);
|
||||
- token.tokens = [];
|
||||
- this.lexer.inline(token.text, token.tokens);
|
||||
- }
|
||||
return token;
|
||||
}
|
||||
@@ -421,15 +418,9 @@ module.exports = class Tokenizer {
|
||||
@@ -477,15 +470,9 @@ module.exports = class Tokenizer {
|
||||
|
||||
return {
|
||||
- type: this.options.sanitize
|
||||
@@ -79,8 +80,8 @@ diff --git a/src/Tokenizer.js b/src/Tokenizer.js
|
||||
- : 'html',
|
||||
+ type: 'html',
|
||||
raw: cap[0],
|
||||
inLink,
|
||||
inRawBlock,
|
||||
inLink: this.lexer.state.inLink,
|
||||
inRawBlock: this.lexer.state.inRawBlock,
|
||||
- text: this.options.sanitize
|
||||
- ? (this.options.sanitizer
|
||||
- ? this.options.sanitizer(cap[0])
|
||||
@@ -89,7 +90,7 @@ diff --git a/src/Tokenizer.js b/src/Tokenizer.js
|
||||
+ text: cap[0]
|
||||
};
|
||||
}
|
||||
@@ -550,10 +541,10 @@ module.exports = class Tokenizer {
|
||||
@@ -672,10 +659,10 @@ module.exports = class Tokenizer {
|
||||
}
|
||||
|
||||
- autolink(src, mangle) {
|
||||
@@ -102,7 +103,7 @@ diff --git a/src/Tokenizer.js b/src/Tokenizer.js
|
||||
+ text = escape(cap[1]);
|
||||
href = 'mailto:' + text;
|
||||
} else {
|
||||
@@ -578,10 +569,10 @@ module.exports = class Tokenizer {
|
||||
@@ -700,10 +687,10 @@ module.exports = class Tokenizer {
|
||||
}
|
||||
|
||||
- url(src, mangle) {
|
||||
@@ -115,15 +116,15 @@ diff --git a/src/Tokenizer.js b/src/Tokenizer.js
|
||||
+ text = escape(cap[0]);
|
||||
href = 'mailto:' + text;
|
||||
} else {
|
||||
@@ -615,12 +606,12 @@ module.exports = class Tokenizer {
|
||||
@@ -737,12 +724,12 @@ module.exports = class Tokenizer {
|
||||
}
|
||||
|
||||
- inlineText(src, inRawBlock, smartypants) {
|
||||
+ inlineText(src, inRawBlock) {
|
||||
- inlineText(src, smartypants) {
|
||||
+ inlineText(src) {
|
||||
const cap = this.rules.inline.text.exec(src);
|
||||
if (cap) {
|
||||
let text;
|
||||
if (inRawBlock) {
|
||||
if (this.lexer.state.inRawBlock) {
|
||||
- text = this.options.sanitize ? (this.options.sanitizer ? this.options.sanitizer(cap[0]) : escape(cap[0])) : cap[0];
|
||||
+ text = cap[0];
|
||||
} else {
|
||||
@@ -134,7 +135,7 @@ diff --git a/src/Tokenizer.js b/src/Tokenizer.js
|
||||
diff --git a/src/defaults.js b/src/defaults.js
|
||||
--- a/src/defaults.js
|
||||
+++ b/src/defaults.js
|
||||
@@ -8,12 +8,8 @@ function getDefaults() {
|
||||
@@ -9,12 +9,8 @@ function getDefaults() {
|
||||
highlight: null,
|
||||
langPrefix: 'language-',
|
||||
- mangle: true,
|
||||
@@ -170,7 +171,7 @@ diff --git a/src/helpers.js b/src/helpers.js
|
||||
+function cleanUrl(base, href) {
|
||||
if (base && !originIndependentUrl.test(href)) {
|
||||
href = resolveUrl(base, href);
|
||||
@@ -223,10 +210,4 @@ function findClosingBracket(str, b) {
|
||||
@@ -227,10 +214,4 @@ function findClosingBracket(str, b) {
|
||||
}
|
||||
|
||||
-function checkSanitizeDeprecation(opt) {
|
||||
@@ -179,14 +180,13 @@ diff --git a/src/helpers.js b/src/helpers.js
|
||||
- }
|
||||
-}
|
||||
-
|
||||
module.exports = {
|
||||
escape,
|
||||
@@ -239,5 +220,4 @@ module.exports = {
|
||||
splitCells,
|
||||
// copied from https://stackoverflow.com/a/5450113/806777
|
||||
function repeatString(pattern, count) {
|
||||
@@ -260,5 +241,4 @@ module.exports = {
|
||||
rtrim,
|
||||
- findClosingBracket,
|
||||
- checkSanitizeDeprecation
|
||||
+ findClosingBracket
|
||||
findClosingBracket,
|
||||
- checkSanitizeDeprecation,
|
||||
repeatString
|
||||
};
|
||||
diff --git a/src/marked.js b/src/marked.js
|
||||
--- a/src/marked.js
|
||||
@@ -203,8 +203,14 @@ diff --git a/src/marked.js b/src/marked.js
|
||||
- checkSanitizeDeprecation(opt);
|
||||
|
||||
if (callback) {
|
||||
@@ -108,5 +106,5 @@ function marked(src, opt, callback) {
|
||||
return Parser.parse(tokens, opt);
|
||||
@@ -302,5 +300,4 @@ marked.parseInline = function(src, opt) {
|
||||
|
||||
opt = merge({}, marked.defaults, opt || {});
|
||||
- checkSanitizeDeprecation(opt);
|
||||
|
||||
try {
|
||||
@@ -311,5 +308,5 @@ marked.parseInline = function(src, opt) {
|
||||
return Parser.parseInline(tokens, opt);
|
||||
} catch (e) {
|
||||
- e.message += '\nPlease report this to https://github.com/markedjs/marked.';
|
||||
+ e.message += '\nmake issue @ https://github.com/9001/copyparty';
|
||||
@@ -252,86 +258,87 @@ diff --git a/test/bench.js b/test/bench.js
|
||||
diff --git a/test/specs/run-spec.js b/test/specs/run-spec.js
|
||||
--- a/test/specs/run-spec.js
|
||||
+++ b/test/specs/run-spec.js
|
||||
@@ -22,8 +22,4 @@ function runSpecs(title, dir, showCompletionTable, options) {
|
||||
@@ -22,9 +22,4 @@ function runSpecs(title, dir, showCompletionTable, options) {
|
||||
}
|
||||
|
||||
- if (spec.options.sanitizer) {
|
||||
- // eslint-disable-next-line no-eval
|
||||
- spec.options.sanitizer = eval(spec.options.sanitizer);
|
||||
- }
|
||||
|
||||
-
|
||||
(spec.only ? fit : (spec.skip ? xit : it))('should ' + passFail + example, async() => {
|
||||
@@ -53,3 +49,2 @@ runSpecs('Original', './original', false, { gfm: false, pedantic: true });
|
||||
const before = process.hrtime();
|
||||
@@ -53,3 +48,2 @@ runSpecs('Original', './original', false, { gfm: false, pedantic: true });
|
||||
runSpecs('New', './new');
|
||||
runSpecs('ReDOS', './redos');
|
||||
-runSpecs('Security', './security', false, { silent: true }); // silent - do not show deprecation warning
|
||||
diff --git a/test/unit/Lexer-spec.js b/test/unit/Lexer-spec.js
|
||||
--- a/test/unit/Lexer-spec.js
|
||||
+++ b/test/unit/Lexer-spec.js
|
||||
@@ -465,5 +465,5 @@ a | b
|
||||
@@ -589,5 +589,5 @@ paragraph
|
||||
});
|
||||
|
||||
- it('sanitize', () => {
|
||||
+ /*it('sanitize', () => {
|
||||
expectTokens({
|
||||
md: '<div>html</div>',
|
||||
@@ -483,5 +483,5 @@ a | b
|
||||
@@ -607,5 +607,5 @@ paragraph
|
||||
]
|
||||
});
|
||||
- });
|
||||
+ });*/
|
||||
});
|
||||
|
||||
@@ -587,5 +587,5 @@ a | b
|
||||
@@ -652,5 +652,5 @@ paragraph
|
||||
});
|
||||
|
||||
- it('html sanitize', () => {
|
||||
+ /*it('html sanitize', () => {
|
||||
expectInlineTokens({
|
||||
md: '<div>html</div>',
|
||||
@@ -597,5 +597,5 @@ a | b
|
||||
@@ -660,5 +660,5 @@ paragraph
|
||||
]
|
||||
});
|
||||
- });
|
||||
+ });*/
|
||||
|
||||
it('link', () => {
|
||||
@@ -909,5 +909,5 @@ a | b
|
||||
@@ -971,5 +971,5 @@ paragraph
|
||||
});
|
||||
|
||||
- it('autolink mangle email', () => {
|
||||
+ /*it('autolink mangle email', () => {
|
||||
expectInlineTokens({
|
||||
md: '<test@example.com>',
|
||||
@@ -929,5 +929,5 @@ a | b
|
||||
@@ -991,5 +991,5 @@ paragraph
|
||||
]
|
||||
});
|
||||
- });
|
||||
+ });*/
|
||||
|
||||
it('url', () => {
|
||||
@@ -966,5 +966,5 @@ a | b
|
||||
@@ -1028,5 +1028,5 @@ paragraph
|
||||
});
|
||||
|
||||
- it('url mangle email', () => {
|
||||
+ /*it('url mangle email', () => {
|
||||
expectInlineTokens({
|
||||
md: 'test@example.com',
|
||||
@@ -986,5 +986,5 @@ a | b
|
||||
@@ -1048,5 +1048,5 @@ paragraph
|
||||
]
|
||||
});
|
||||
- });
|
||||
+ });*/
|
||||
});
|
||||
|
||||
@@ -1002,5 +1002,5 @@ a | b
|
||||
@@ -1064,5 +1064,5 @@ paragraph
|
||||
});
|
||||
|
||||
- describe('smartypants', () => {
|
||||
+ /*describe('smartypants', () => {
|
||||
it('single quotes', () => {
|
||||
expectInlineTokens({
|
||||
@@ -1072,5 +1072,5 @@ a | b
|
||||
@@ -1134,5 +1134,5 @@ paragraph
|
||||
});
|
||||
});
|
||||
- });
|
||||
|
||||
@@ -2,7 +2,7 @@ all: $(addsuffix .gz, $(wildcard *.*))
|
||||
|
||||
%.gz: %
|
||||
#brotli -q 11 $<
|
||||
pigz -11 -J 34 -I 573 $<
|
||||
pigz -11 -I 573 $<
|
||||
|
||||
# pigz -11 -J 34 -I 100 -F < $< > $@.first
|
||||
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
set -e
|
||||
echo
|
||||
|
||||
help() { exec cat <<'EOF'
|
||||
|
||||
# optional args:
|
||||
#
|
||||
@@ -15,17 +16,19 @@ echo
|
||||
#
|
||||
# `no-sh` makes just the python sfx, skips the sh/unix sfx
|
||||
#
|
||||
# `no-ogv` saves ~500k by removing the opus/vorbis audio codecs
|
||||
# `no-ogv` saves ~192k by removing the opus/vorbis audio codecs
|
||||
# (only affects apple devices; everything else has native support)
|
||||
#
|
||||
# `no-cm` saves ~90k by removing easymde/codemirror
|
||||
# `no-cm` saves ~92k by removing easymde/codemirror
|
||||
# (the fancy markdown editor)
|
||||
#
|
||||
# `no-fnt` saves ~9k by removing the source-code-pro font
|
||||
# (mainly used my the markdown viewer/editor)
|
||||
# (browsers will try to use 'Consolas' instead)
|
||||
#
|
||||
# `no-dd` saves ~2k by removing the mouse cursor
|
||||
|
||||
EOF
|
||||
}
|
||||
|
||||
# port install gnutar findutils gsed coreutils
|
||||
gtar=$(command -v gtar || command -v gnutar) || true
|
||||
@@ -34,6 +37,8 @@ gtar=$(command -v gtar || command -v gnutar) || true
|
||||
sed() { gsed "$@"; }
|
||||
find() { gfind "$@"; }
|
||||
sort() { gsort "$@"; }
|
||||
shuf() { gshuf "$@"; }
|
||||
nproc() { gnproc; }
|
||||
sha1sum() { shasum "$@"; }
|
||||
unexpand() { gunexpand "$@"; }
|
||||
command -v grealpath >/dev/null &&
|
||||
@@ -62,6 +67,7 @@ pybin=$(command -v python3 || command -v python) || {
|
||||
use_gz=
|
||||
do_sh=1
|
||||
do_py=1
|
||||
zopf=2560
|
||||
while [ ! -z "$1" ]; do
|
||||
case $1 in
|
||||
clean) clean=1 ; ;;
|
||||
@@ -73,6 +79,8 @@ while [ ! -z "$1" ]; do
|
||||
no-cm) no_cm=1 ; ;;
|
||||
no-sh) do_sh= ; ;;
|
||||
no-py) do_py= ; ;;
|
||||
fast) zopf=100 ; ;;
|
||||
*) help ; ;;
|
||||
esac
|
||||
shift
|
||||
done
|
||||
@@ -130,7 +138,7 @@ tmpdir="$(
|
||||
# msys2 tar is bad, make the best of it
|
||||
echo collecting source
|
||||
[ $clean ] && {
|
||||
(cd .. && git archive master >tar) && tar -xf ../tar copyparty
|
||||
(cd .. && git archive hovudstraum >tar) && tar -xf ../tar copyparty
|
||||
(cd .. && tar -cf tar copyparty/web/deps) && tar -xf ../tar
|
||||
}
|
||||
[ $clean ] || {
|
||||
@@ -140,6 +148,7 @@ tmpdir="$(
|
||||
}
|
||||
|
||||
ver=
|
||||
[ -z "$repack" ] &&
|
||||
git describe --tags >/dev/null 2>/dev/null && {
|
||||
git_ver="$(git describe --tags)"; # v0.5.5-2-gb164aa0
|
||||
ver="$(printf '%s\n' "$git_ver" | sed -r 's/^v//')";
|
||||
@@ -171,7 +180,7 @@ git describe --tags >/dev/null 2>/dev/null && {
|
||||
|
||||
[ -z "$ver" ] &&
|
||||
ver="$(awk '/^VERSION *= \(/ {
|
||||
gsub(/[^0-9,]/,""); gsub(/,/,"."); print; exit}' < copyparty/__version__.py)"
|
||||
gsub(/[^0-9,a-g-]/,""); gsub(/,/,"."); print; exit}' < copyparty/__version__.py)"
|
||||
|
||||
ts=$(date -u +%s)
|
||||
hts=$(date -u +%Y-%m%d-%H%M%S) # --date=@$ts (thx osx)
|
||||
@@ -197,6 +206,15 @@ while IFS= read -r x; do
|
||||
tmv "$x"
|
||||
done
|
||||
|
||||
find copyparty | LC_ALL=C sort | sed 's/\.gz$//;s/$/,/' > have
|
||||
cat have | while IFS= read -r x; do
|
||||
grep -qF -- "$x" ../scripts/sfx.ls || {
|
||||
echo "unexpected file: $x"
|
||||
exit 1
|
||||
}
|
||||
done
|
||||
rm have
|
||||
|
||||
[ $no_ogv ] &&
|
||||
rm -rf copyparty/web/deps/{dynamicaudio,ogv}*
|
||||
|
||||
@@ -204,19 +222,24 @@ done
|
||||
rm -rf copyparty/web/mde.* copyparty/web/deps/easymde*
|
||||
echo h > copyparty/web/mde.html
|
||||
f=copyparty/web/md.html
|
||||
sed -r '/edit2">edit \(fancy/d' <$f >t && tmv "$f"
|
||||
sed -r '/edit2">edit \(fancy/d' <$f >t
|
||||
tmv "$f"
|
||||
}
|
||||
|
||||
[ $no_fnt ] && {
|
||||
rm -f copyparty/web/deps/scp.woff2
|
||||
f=copyparty/web/md.css
|
||||
sed -r '/scp\.woff2/d' <$f >t && tmv "$f"
|
||||
f=copyparty/web/ui.css
|
||||
gzip -d "$f.gz" || true
|
||||
sed -r "s/src:.*scp.*\)/src:local('Consolas')/" <$f >t
|
||||
tmv "$f"
|
||||
}
|
||||
|
||||
[ $no_dd ] && {
|
||||
rm -rf copyparty/web/dd
|
||||
f=copyparty/web/browser.css
|
||||
sed -r 's/(cursor: )url\([^)]+\), (pointer)/\1\2/; /[0-9]+% \{cursor:/d; /animation: cursor/d' <$f >t && tmv "$f"
|
||||
gzip -d "$f.gz" || true
|
||||
sed -r 's/(cursor: ?)url\([^)]+\), ?(pointer)/\1\2/; s/[0-9]+% \{cursor:[^}]+\}//; s/animation: ?cursor[^};]+//' <$f >t
|
||||
tmv "$f"
|
||||
}
|
||||
|
||||
[ $repack ] ||
|
||||
@@ -229,8 +252,15 @@ f=dep-j2/jinja2/constants.py
|
||||
awk '/^LOREM_IPSUM_WORDS/{o=1;print "LOREM_IPSUM_WORDS = u\"a\"";next} !o; /"""/{o=0}' <$f >t
|
||||
tmv "$f"
|
||||
|
||||
grep -rLE '^#[^a-z]*coding: utf-8' dep-j2 |
|
||||
while IFS= read -r f; do
|
||||
(echo "# coding: utf-8"; cat "$f") >t
|
||||
tmv "$f"
|
||||
done
|
||||
|
||||
# up2k goes from 28k to 22k laff
|
||||
echo entabbening
|
||||
awk 'BEGIN{gensub(//,"",1)}' </dev/null &&
|
||||
echo entabbening &&
|
||||
find | grep -E '\.css$' | while IFS= read -r f; do
|
||||
awk '{
|
||||
sub(/^[ \t]+/,"");
|
||||
@@ -241,9 +271,10 @@ find | grep -E '\.css$' | while IFS= read -r f; do
|
||||
}
|
||||
!/\}$/ {printf "%s",$0;next}
|
||||
1
|
||||
' <$f | sed 's/;\}$/}/' >t
|
||||
' <$f | sed -r 's/;\}$/}/; /\{\}$/d' >t
|
||||
tmv "$f"
|
||||
done
|
||||
unexpand -h 2>/dev/null &&
|
||||
find | grep -E '\.(js|html)$' | while IFS= read -r f; do
|
||||
unexpand -t 4 --first-only <"$f" >t
|
||||
tmv "$f"
|
||||
@@ -251,14 +282,27 @@ done
|
||||
|
||||
gzres() {
|
||||
command -v pigz &&
|
||||
pk='pigz -11 -J 34 -I 256' ||
|
||||
pk="pigz -11 -I $zopf" ||
|
||||
pk='gzip'
|
||||
|
||||
echo "$pk"
|
||||
find | grep -E '\.(js|css)$' | grep -vF /deps/ | while IFS= read -r f; do
|
||||
np=$(nproc)
|
||||
echo "$pk #$np"
|
||||
|
||||
while IFS=' ' read -r _ f; do
|
||||
while true; do
|
||||
na=$(ps auxwww | grep -F "$pk" | wc -l)
|
||||
[ $na -le $np ] && break
|
||||
sleep 0.2
|
||||
done
|
||||
echo -n .
|
||||
$pk "$f"
|
||||
done
|
||||
$pk "$f" &
|
||||
done < <(
|
||||
find -printf '%s %p\n' |
|
||||
grep -E '\.(js|css)$' |
|
||||
grep -vF /deps/ |
|
||||
sort -nr
|
||||
)
|
||||
wait
|
||||
echo
|
||||
}
|
||||
|
||||
@@ -268,7 +312,7 @@ zdir="$tmpdir/cpp-mksfx"
|
||||
mkdir -p "$zdir"
|
||||
echo a > "$zdir/$stamp"
|
||||
nf=$(ls -1 "$zdir"/arc.* | wc -l)
|
||||
[ $nf -ge 10 ] && [ ! $repack ] && use_zdir=1 || use_zdir=
|
||||
[ $nf -ge 2 ] && [ ! $repack ] && use_zdir=1 || use_zdir=
|
||||
|
||||
[ $use_zdir ] || {
|
||||
echo "$nf alts += 1"
|
||||
@@ -292,7 +336,7 @@ for d in copyparty dep-j2; do find $d -type f; done |
|
||||
sed -r 's/(.*)\.(.*)/\2 \1/' | LC_ALL=C sort |
|
||||
sed -r 's/([^ ]*) (.*)/\2.\1/' | grep -vE '/list1?$' > list1
|
||||
|
||||
(grep -vE '\.(gz|br)$' list1; grep -E '\.(gz|br)$' list1) >list || true
|
||||
(grep -vE '\.(gz|br)$' list1; grep -E '\.(gz|br)$' list1 | shuf) >list || true
|
||||
|
||||
echo creating tar
|
||||
args=(--owner=1000 --group=1000)
|
||||
|
||||
@@ -61,7 +61,7 @@ rls_dir="$tmp/copyparty-$ver"
|
||||
mkdir "$rls_dir"
|
||||
|
||||
echo ">>> export from git"
|
||||
git archive master | tar -xC "$rls_dir"
|
||||
git archive hovudstraum | tar -xC "$rls_dir"
|
||||
|
||||
echo ">>> export untracked deps"
|
||||
tar -c copyparty/web/deps | tar -xC "$rls_dir"
|
||||
@@ -122,5 +122,5 @@ echo " $zip_path"
|
||||
echo " $tgz_path"
|
||||
echo
|
||||
|
||||
# function alr() { ls -alR copyparty-$1 | sed -r "s/copyparty-$1/copyparty/" | sed -r 's/[A-Z][a-z]{2} [0-9 ]{2} [0-9]{2}:[0-9]{2}//' > $1; }; for x in master rls src ; do alr $x; done
|
||||
# function alr() { ls -alR copyparty-$1 | sed -r "s/copyparty-$1/copyparty/" | sed -r 's/[A-Z][a-z]{2} [0-9 ]{2} [0-9]{2}:[0-9]{2}//' > $1; }; for x in hovudstraum rls src ; do alr $x; done
|
||||
|
||||
|
||||
36
scripts/rls.sh
Executable file
36
scripts/rls.sh
Executable file
@@ -0,0 +1,36 @@
|
||||
#!/bin/bash
|
||||
set -e
|
||||
|
||||
cd ~/dev/copyparty/scripts
|
||||
|
||||
v=$1
|
||||
printf '%s\n' "$v" | grep -qE '^[0-9\.]+$' || exit 1
|
||||
grep -E "(${v//./, })" ../copyparty/__version__.py || exit 1
|
||||
|
||||
git tag v$v
|
||||
git push origin --tags
|
||||
|
||||
rm -rf ../dist
|
||||
|
||||
./make-pypi-release.sh u
|
||||
(cd .. && python3 ./setup.py clean2)
|
||||
|
||||
./make-tgz-release.sh $v
|
||||
|
||||
rm -f ../dist/copyparty-sfx.*
|
||||
./make-sfx.sh no-sh
|
||||
../dist/copyparty-sfx.py -h
|
||||
|
||||
ar=
|
||||
while true; do
|
||||
for ((a=0; a<100; a++)); do
|
||||
for f in ../dist/copyparty-sfx.{py,sh}; do
|
||||
[ -e $f ] || continue;
|
||||
mv $f $f.$(wc -c <$f | awk '{print$1}')
|
||||
done
|
||||
./make-sfx.sh re $ar
|
||||
done
|
||||
ar=no-sh
|
||||
done
|
||||
|
||||
# git tag -d v$v; git push --delete origin v$v
|
||||
77
scripts/sfx.ls
Normal file
77
scripts/sfx.ls
Normal file
@@ -0,0 +1,77 @@
|
||||
copyparty,
|
||||
copyparty/__init__.py,
|
||||
copyparty/__main__.py,
|
||||
copyparty/__version__.py,
|
||||
copyparty/authsrv.py,
|
||||
copyparty/bos,
|
||||
copyparty/bos/__init__.py,
|
||||
copyparty/bos/bos.py,
|
||||
copyparty/bos/path.py,
|
||||
copyparty/broker_mp.py,
|
||||
copyparty/broker_mpw.py,
|
||||
copyparty/broker_thr.py,
|
||||
copyparty/broker_util.py,
|
||||
copyparty/httpcli.py,
|
||||
copyparty/httpconn.py,
|
||||
copyparty/httpsrv.py,
|
||||
copyparty/ico.py,
|
||||
copyparty/mtag.py,
|
||||
copyparty/res,
|
||||
copyparty/res/insecure.pem,
|
||||
copyparty/star.py,
|
||||
copyparty/stolen,
|
||||
copyparty/stolen/__init__.py,
|
||||
copyparty/stolen/surrogateescape.py,
|
||||
copyparty/sutil.py,
|
||||
copyparty/svchub.py,
|
||||
copyparty/szip.py,
|
||||
copyparty/tcpsrv.py,
|
||||
copyparty/th_cli.py,
|
||||
copyparty/th_srv.py,
|
||||
copyparty/u2idx.py,
|
||||
copyparty/up2k.py,
|
||||
copyparty/util.py,
|
||||
copyparty/web,
|
||||
copyparty/web/baguettebox.js,
|
||||
copyparty/web/browser.css,
|
||||
copyparty/web/browser.html,
|
||||
copyparty/web/browser.js,
|
||||
copyparty/web/browser2.html,
|
||||
copyparty/web/copyparty.gif,
|
||||
copyparty/web/dd,
|
||||
copyparty/web/dd/2.png,
|
||||
copyparty/web/dd/3.png,
|
||||
copyparty/web/dd/4.png,
|
||||
copyparty/web/dd/5.png,
|
||||
copyparty/web/deps,
|
||||
copyparty/web/deps/easymde.css,
|
||||
copyparty/web/deps/easymde.js,
|
||||
copyparty/web/deps/marked.js,
|
||||
copyparty/web/deps/mini-fa.css,
|
||||
copyparty/web/deps/mini-fa.woff,
|
||||
copyparty/web/deps/ogv-decoder-audio-opus-wasm.js,
|
||||
copyparty/web/deps/ogv-decoder-audio-opus-wasm.wasm,
|
||||
copyparty/web/deps/ogv-decoder-audio-vorbis-wasm.js,
|
||||
copyparty/web/deps/ogv-decoder-audio-vorbis-wasm.wasm,
|
||||
copyparty/web/deps/ogv-demuxer-ogg-wasm.js,
|
||||
copyparty/web/deps/ogv-demuxer-ogg-wasm.wasm,
|
||||
copyparty/web/deps/ogv-worker-audio.js,
|
||||
copyparty/web/deps/ogv.js,
|
||||
copyparty/web/deps/scp.woff2,
|
||||
copyparty/web/deps/sha512.ac.js,
|
||||
copyparty/web/deps/sha512.hw.js,
|
||||
copyparty/web/md.css,
|
||||
copyparty/web/md.html,
|
||||
copyparty/web/md.js,
|
||||
copyparty/web/md2.css,
|
||||
copyparty/web/md2.js,
|
||||
copyparty/web/mde.css,
|
||||
copyparty/web/mde.html,
|
||||
copyparty/web/mde.js,
|
||||
copyparty/web/msg.css,
|
||||
copyparty/web/msg.html,
|
||||
copyparty/web/splash.css,
|
||||
copyparty/web/splash.html,
|
||||
copyparty/web/ui.css,
|
||||
copyparty/web/up2k.js,
|
||||
copyparty/web/util.js,
|
||||
@@ -1,4 +1,4 @@
|
||||
#!/usr/bin/env python
|
||||
#!/usr/bin/env python3
|
||||
# coding: latin-1
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
@@ -9,7 +9,7 @@ import subprocess as sp
|
||||
to edit this file, use HxD or "vim -b"
|
||||
(there is compressed stuff at the end)
|
||||
|
||||
run me with any version of python, i will unpack and run copyparty
|
||||
run me with python 2.7 or 3.3+ to unpack and run copyparty
|
||||
|
||||
there's zero binaries! just plaintext python scripts all the way down
|
||||
so you can easily unpack the archive and inspect it for shady stuff
|
||||
@@ -364,7 +364,7 @@ def confirm(rv):
|
||||
except:
|
||||
pass
|
||||
|
||||
sys.exit(rv)
|
||||
sys.exit(rv or 1)
|
||||
|
||||
|
||||
def run(tmp, j2):
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
#!/usr/bin/env python
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
@@ -60,7 +60,7 @@ class Cpp(object):
|
||||
pass
|
||||
|
||||
|
||||
def tc1():
|
||||
def tc1(vflags):
|
||||
ub = "http://127.0.0.1:4321/"
|
||||
td = os.path.join("srv", "smoketest")
|
||||
try:
|
||||
@@ -100,16 +100,17 @@ def tc1():
|
||||
for d1 in ["r", "w", "a"]:
|
||||
pdirs.append("{}/{}".format(td, d1))
|
||||
pdirs.append("{}/{}/j".format(td, d1))
|
||||
for d2 in ["r", "w", "a"]:
|
||||
for d2 in ["r", "w", "a", "c"]:
|
||||
d = os.path.join(td, d1, "j", d2)
|
||||
pdirs.append(d)
|
||||
os.makedirs(d)
|
||||
|
||||
pdirs = [x.replace("\\", "/") for x in pdirs]
|
||||
udirs = [x.split("/", 2)[2] for x in pdirs]
|
||||
perms = [x.rstrip("j/")[-1] for x in pdirs]
|
||||
perms = [x.rstrip("cj/")[-1] for x in pdirs]
|
||||
perms = ["rw" if x == "a" else x for x in perms]
|
||||
for pd, ud, p in zip(pdirs, udirs, perms):
|
||||
if ud[-1] == "j":
|
||||
if ud[-1] == "j" or ud[-1] == "c":
|
||||
continue
|
||||
|
||||
hp = None
|
||||
@@ -122,47 +123,55 @@ def tc1():
|
||||
hp = "-"
|
||||
hpaths[ud] = os.path.join(pd, ".hist")
|
||||
|
||||
arg = "{}:{}:{}".format(pd, ud, p, hp)
|
||||
arg = "{}:{}:{}".format(pd, ud, p)
|
||||
if hp:
|
||||
arg += ":chist=" + hp
|
||||
arg += ":c,hist=" + hp
|
||||
|
||||
args += ["-v", arg]
|
||||
args += ["-v", arg + vflags]
|
||||
|
||||
# return
|
||||
cpp = Cpp(args)
|
||||
CPP.append(cpp)
|
||||
cpp.await_idle(ub, 3)
|
||||
|
||||
for d in udirs:
|
||||
for d, p in zip(udirs, perms):
|
||||
vid = ovid + "\n{}".format(d).encode("utf-8")
|
||||
try:
|
||||
requests.post(ub + d, data={"act": "bput"}, files={"f": ("a.h264", vid)})
|
||||
except:
|
||||
pass
|
||||
r = requests.post(
|
||||
ub + d,
|
||||
data={"act": "bput"},
|
||||
files={"f": (d.replace("/", "") + ".h264", vid)},
|
||||
)
|
||||
c = r.status_code
|
||||
if c == 200 and p not in ["w", "rw"]:
|
||||
raise Exception("post {} with perm {} at {}".format(c, p, d))
|
||||
elif c == 403 and p not in ["r"]:
|
||||
raise Exception("post {} with perm {} at {}".format(c, p, d))
|
||||
elif c not in [200, 403]:
|
||||
raise Exception("post {} with perm {} at {}".format(c, p, d))
|
||||
|
||||
cpp.clean()
|
||||
|
||||
# GET permission
|
||||
for d, p in zip(udirs, perms):
|
||||
u = "{}{}/a.h264".format(ub, d)
|
||||
u = "{}{}/{}.h264".format(ub, d, d.replace("/", ""))
|
||||
r = requests.get(u)
|
||||
ok = bool(r)
|
||||
if ok != (p in ["a"]):
|
||||
if ok != (p in ["rw"]):
|
||||
raise Exception("get {} with perm {} at {}".format(ok, p, u))
|
||||
|
||||
# stat filesystem
|
||||
for d, p in zip(pdirs, perms):
|
||||
u = "{}/a.h264".format(d)
|
||||
u = "{}/{}.h264".format(d, d.split("test/")[-1].replace("/", ""))
|
||||
ok = os.path.exists(u)
|
||||
if ok != (p in ["a", "w"]):
|
||||
if ok != (p in ["rw", "w"]):
|
||||
raise Exception("stat {} with perm {} at {}".format(ok, p, u))
|
||||
|
||||
# GET thumbnail, vreify contents
|
||||
for d, p in zip(udirs, perms):
|
||||
u = "{}{}/a.h264?th=j".format(ub, d)
|
||||
u = "{}{}/{}.h264?th=j".format(ub, d, d.replace("/", ""))
|
||||
r = requests.get(u)
|
||||
ok = bool(r and r.content[:3] == b"\xff\xd8\xff")
|
||||
if ok != (p in ["a"]):
|
||||
if ok != (p in ["rw"]):
|
||||
raise Exception("thumb {} with perm {} at {}".format(ok, p, u))
|
||||
|
||||
# check tags
|
||||
@@ -179,10 +188,10 @@ def tc1():
|
||||
r_ok = bool(j)
|
||||
w_ok = bool(r_ok and j.get("files"))
|
||||
|
||||
if not r_ok or w_ok != (p in ["a"]):
|
||||
if not r_ok or w_ok != (p in ["rw"]):
|
||||
raise Exception("ls {} with perm {} at {}".format(ok, p, u))
|
||||
|
||||
if (tag and p != "a") or (not tag and p == "a"):
|
||||
if (tag and p != "rw") or (not tag and p == "rw"):
|
||||
raise Exception("tag {} with perm {} at {}".format(tag, p, u))
|
||||
|
||||
if tag is not None and tag != "48x32":
|
||||
@@ -191,9 +200,9 @@ def tc1():
|
||||
cpp.stop(True)
|
||||
|
||||
|
||||
def run(tc):
|
||||
def run(tc, *a):
|
||||
try:
|
||||
tc()
|
||||
tc(*a)
|
||||
finally:
|
||||
try:
|
||||
CPP[0].stop(False)
|
||||
@@ -202,7 +211,8 @@ def run(tc):
|
||||
|
||||
|
||||
def main():
|
||||
run(tc1)
|
||||
run(tc1, "")
|
||||
run(tc1, ":c,fk")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
#!/usr/bin/env python
|
||||
#!/usr/bin/env python3
|
||||
# coding: utf-8
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
@@ -8,7 +8,7 @@ import tokenize
|
||||
|
||||
|
||||
def uncomment(fpath):
|
||||
""" modified https://stackoverflow.com/a/62074206 """
|
||||
"""modified https://stackoverflow.com/a/62074206"""
|
||||
|
||||
with open(fpath, "rb") as f:
|
||||
orig = f.read().decode("utf-8")
|
||||
|
||||
10
setup.py
10
setup.py
@@ -1,4 +1,4 @@
|
||||
#!/usr/bin/env python
|
||||
#!/usr/bin/env python3
|
||||
# coding: utf-8
|
||||
from __future__ import print_function
|
||||
|
||||
@@ -61,7 +61,7 @@ class clean2(Command):
|
||||
pass
|
||||
|
||||
nuke = []
|
||||
for (dirpath, dirnames, filenames) in os.walk("."):
|
||||
for (dirpath, _, filenames) in os.walk("."):
|
||||
for fn in filenames:
|
||||
if (
|
||||
fn.startswith("MANIFEST")
|
||||
@@ -86,7 +86,7 @@ args = {
|
||||
"url": "https://github.com/9001/copyparty",
|
||||
"license": "MIT",
|
||||
"classifiers": [
|
||||
"Development Status :: 4 - Beta",
|
||||
"Development Status :: 5 - Production/Stable",
|
||||
"License :: OSI Approved :: MIT License",
|
||||
"Programming Language :: Python",
|
||||
"Programming Language :: Python :: 2",
|
||||
@@ -99,7 +99,9 @@ args = {
|
||||
"Programming Language :: Python :: 3.7",
|
||||
"Programming Language :: Python :: 3.8",
|
||||
"Programming Language :: Python :: 3.9",
|
||||
"Programming Language :: Python :: 3.10",
|
||||
"Programming Language :: Python :: Implementation :: CPython",
|
||||
"Programming Language :: Python :: Implementation :: Jython",
|
||||
"Programming Language :: Python :: Implementation :: PyPy",
|
||||
"Environment :: Console",
|
||||
"Environment :: No Input/Output (Daemon)",
|
||||
@@ -112,7 +114,7 @@ args = {
|
||||
"install_requires": ["jinja2"],
|
||||
"extras_require": {"thumbnails": ["Pillow"], "audiotags": ["mutagen"]},
|
||||
"entry_points": {"console_scripts": ["copyparty = copyparty.__main__:main"]},
|
||||
"scripts": ["bin/copyparty-fuse.py"],
|
||||
"scripts": ["bin/copyparty-fuse.py", "bin/up2k.py"],
|
||||
"cmdclass": {"clean2": clean2},
|
||||
}
|
||||
|
||||
|
||||
20
srv/test.md
20
srv/test.md
@@ -1,11 +1,17 @@
|
||||
### hello world
|
||||
|
||||
* qwe
|
||||
* asd
|
||||
* zxc
|
||||
* 573
|
||||
* one
|
||||
* two
|
||||
* rty
|
||||
* uio
|
||||
* asd
|
||||
* fgh
|
||||
* jkl
|
||||
* zxc
|
||||
* vbn
|
||||
* 573
|
||||
* one
|
||||
* two
|
||||
* three
|
||||
|
||||
* |||
|
||||
|--|--|
|
||||
@@ -134,12 +140,12 @@ a newline toplevel
|
||||
| a table | on the right |
|
||||
| second row | foo bar |
|
||||
|
||||
||
|
||||
a||a
|
||||
--|:-:|-:
|
||||
a table | big text in this | aaakbfddd
|
||||
second row | centred | bbb
|
||||
|
||||
||
|
||||
||||
|
||||
--|--|--
|
||||
foo
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
#!/usr/bin/env python
|
||||
#!/usr/bin/env python3
|
||||
# coding: utf-8
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
@@ -39,12 +39,18 @@ class Cfg(Namespace):
|
||||
no_scandir=False,
|
||||
no_sendfile=True,
|
||||
no_rescan=True,
|
||||
no_logues=False,
|
||||
no_readme=False,
|
||||
re_maxage=0,
|
||||
ihead=False,
|
||||
nih=True,
|
||||
mtp=[],
|
||||
mte="a",
|
||||
mth="",
|
||||
hist=None,
|
||||
no_hash=False,
|
||||
no_idx=None,
|
||||
no_hash=None,
|
||||
js_browser=None,
|
||||
css_browser=None,
|
||||
**{k: False for k in "e2d e2ds e2dsa e2t e2ts e2tsr".split()}
|
||||
)
|
||||
@@ -94,7 +100,7 @@ class TestHttpCli(unittest.TestCase):
|
||||
if not vol.startswith(top):
|
||||
continue
|
||||
|
||||
mode = vol[-2].replace("a", "rwmd")
|
||||
mode = vol[-2].replace("a", "rw")
|
||||
usr = vol[-1]
|
||||
if usr == "a":
|
||||
usr = ""
|
||||
@@ -149,6 +155,7 @@ class TestHttpCli(unittest.TestCase):
|
||||
tar = tarfile.open(fileobj=io.BytesIO(b)).getnames()
|
||||
except:
|
||||
tar = []
|
||||
tar = [x[4:] if x.startswith("top/") else x for x in tar]
|
||||
tar = ["/".join([y for y in [top, durl, x] if y]) for x in tar]
|
||||
tar = [[x] + self.can_rw(x) for x in tar]
|
||||
tar_ok = [x[0] for x in tar if x[1]]
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
#!/usr/bin/env python
|
||||
#!/usr/bin/env python3
|
||||
# coding: utf-8
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
@@ -21,10 +21,16 @@ class Cfg(Namespace):
|
||||
ex2 = {
|
||||
"mtp": [],
|
||||
"mte": "a",
|
||||
"mth": "",
|
||||
"hist": None,
|
||||
"no_hash": False,
|
||||
"no_idx": None,
|
||||
"no_hash": None,
|
||||
"js_browser": None,
|
||||
"css_browser": None,
|
||||
"no_voldump": True,
|
||||
"no_logues": False,
|
||||
"no_readme": False,
|
||||
"re_maxage": 0,
|
||||
"rproxy": 0,
|
||||
}
|
||||
ex.update(ex2)
|
||||
@@ -193,10 +199,10 @@ class TestVFS(unittest.TestCase):
|
||||
self.assertEqual(n.realpath, os.path.join(td, "a"))
|
||||
self.assertAxs(n.axs.uread, ["*"])
|
||||
self.assertAxs(n.axs.uwrite, [])
|
||||
self.assertEqual(vfs.can_access("/", "*"), [False, False, False, False])
|
||||
self.assertEqual(vfs.can_access("/", "k"), [True, True, False, False])
|
||||
self.assertEqual(vfs.can_access("/a", "*"), [True, False, False, False])
|
||||
self.assertEqual(vfs.can_access("/a", "k"), [True, False, False, False])
|
||||
self.assertEqual(vfs.can_access("/", "*"), [False, False, False, False, False])
|
||||
self.assertEqual(vfs.can_access("/", "k"), [True, True, False, False, False])
|
||||
self.assertEqual(vfs.can_access("/a", "*"), [True, False, False, False, False])
|
||||
self.assertEqual(vfs.can_access("/a", "k"), [True, False, False, False, False])
|
||||
|
||||
# breadth-first construction
|
||||
vfs = AuthSrv(
|
||||
|
||||
@@ -3,6 +3,7 @@ import sys
|
||||
import time
|
||||
import shutil
|
||||
import jinja2
|
||||
import threading
|
||||
import tempfile
|
||||
import platform
|
||||
import subprocess as sp
|
||||
@@ -28,7 +29,7 @@ if MACOS:
|
||||
# 25% faster; until any tests do symlink stuff
|
||||
|
||||
|
||||
from copyparty.util import Unrecv
|
||||
from copyparty.util import Unrecv, FHC
|
||||
|
||||
|
||||
def runcmd(argv):
|
||||
@@ -132,8 +133,10 @@ class VHttpConn(object):
|
||||
self.log_src = "a"
|
||||
self.lf_url = None
|
||||
self.hsrv = VHttpSrv()
|
||||
self.u2fh = FHC()
|
||||
self.mutex = threading.Lock()
|
||||
self.nreq = 0
|
||||
self.nbyte = 0
|
||||
self.ico = None
|
||||
self.thumbcli = None
|
||||
self.t0 = time.time()
|
||||
self.t0 = time.time()
|
||||
|
||||
Reference in New Issue
Block a user