mirror of
https://github.com/9001/copyparty.git
synced 2025-11-06 14:53:17 +00:00
Compare commits
114 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
f888bed1a6 | ||
|
|
d865e9f35a | ||
|
|
fc7fe70f66 | ||
|
|
5aff39d2b2 | ||
|
|
d1be37a04a | ||
|
|
b0fd8bf7d4 | ||
|
|
b9cf8f3973 | ||
|
|
4588f11613 | ||
|
|
1a618c3c97 | ||
|
|
d500a51d97 | ||
|
|
734e9d3874 | ||
|
|
bd5cfc2f1b | ||
|
|
89f88ee78c | ||
|
|
b2ae14695a | ||
|
|
19d86b44d9 | ||
|
|
85be62e38b | ||
|
|
80f3d90200 | ||
|
|
0249fa6e75 | ||
|
|
2d0696e048 | ||
|
|
ff32ec515e | ||
|
|
a6935b0293 | ||
|
|
63eb08ba9f | ||
|
|
e5b67d2b3a | ||
|
|
9e10af6885 | ||
|
|
42bc9115d2 | ||
|
|
0a569ce413 | ||
|
|
9a16639a61 | ||
|
|
57953c68c6 | ||
|
|
088d08963f | ||
|
|
7bc8196821 | ||
|
|
7715299dd3 | ||
|
|
b8ac9b7994 | ||
|
|
98e7d8f728 | ||
|
|
e7fd871ffe | ||
|
|
14aab62f32 | ||
|
|
cb81fe962c | ||
|
|
fc970d2dea | ||
|
|
b0e203d1f9 | ||
|
|
37cef05b19 | ||
|
|
5886a42901 | ||
|
|
2fd99f807d | ||
|
|
3d4cbd7d10 | ||
|
|
f10d03c238 | ||
|
|
f9a66ffb0e | ||
|
|
777a50063d | ||
|
|
0bb9154747 | ||
|
|
30c3f45072 | ||
|
|
0d5ca67f32 | ||
|
|
4a8bf6aebd | ||
|
|
b11db090d8 | ||
|
|
189391fccd | ||
|
|
86d4c43909 | ||
|
|
5994f40982 | ||
|
|
076d32dee5 | ||
|
|
16c8e38ecd | ||
|
|
eacbcda8e5 | ||
|
|
59be76cd44 | ||
|
|
5bb0e7e8b3 | ||
|
|
b78d207121 | ||
|
|
0fcbcdd08c | ||
|
|
ed6c683922 | ||
|
|
9fe1edb02b | ||
|
|
fb3811a708 | ||
|
|
18f8658eec | ||
|
|
3ead4676b0 | ||
|
|
d30001d23d | ||
|
|
06bbf0d656 | ||
|
|
6ddd952e04 | ||
|
|
027ad0c3ee | ||
|
|
3abad2b87b | ||
|
|
32a1c7c5d5 | ||
|
|
f06e165bd4 | ||
|
|
1c843b24f7 | ||
|
|
2ace9ed380 | ||
|
|
5f30c0ae03 | ||
|
|
ef60adf7e2 | ||
|
|
7354b462e8 | ||
|
|
da904d6be8 | ||
|
|
c5fbbbbb5c | ||
|
|
5010387d8a | ||
|
|
f00c54a7fb | ||
|
|
9f52c169d0 | ||
|
|
bf18339404 | ||
|
|
2ad12b074b | ||
|
|
a6788ffe8d | ||
|
|
0e884df486 | ||
|
|
ef1c55286f | ||
|
|
abc0424c26 | ||
|
|
44e5c82e6d | ||
|
|
5849c446ed | ||
|
|
12b7317831 | ||
|
|
fe323f59af | ||
|
|
a00e56f219 | ||
|
|
1a7852794f | ||
|
|
22b1373a57 | ||
|
|
17d78b1469 | ||
|
|
4d8b32b249 | ||
|
|
b65bea2550 | ||
|
|
0b52ccd200 | ||
|
|
3006a07059 | ||
|
|
801dbc7a9a | ||
|
|
4f4e895fb7 | ||
|
|
cc57c3b655 | ||
|
|
ca6ec9c5c7 | ||
|
|
633b1f0a78 | ||
|
|
6136b9bf9c | ||
|
|
524a3ba566 | ||
|
|
58580320f9 | ||
|
|
759b0a994d | ||
|
|
d2800473e4 | ||
|
|
f5b1a2065e | ||
|
|
5e62532295 | ||
|
|
c1bee96c40 | ||
|
|
f273253a2b |
4
.gitignore
vendored
4
.gitignore
vendored
@@ -20,3 +20,7 @@ sfx/
|
|||||||
# derived
|
# derived
|
||||||
copyparty/web/deps/
|
copyparty/web/deps/
|
||||||
srv/
|
srv/
|
||||||
|
|
||||||
|
# state/logs
|
||||||
|
up.*.txt
|
||||||
|
.hist/
|
||||||
2
.vscode/launch.json
vendored
2
.vscode/launch.json
vendored
@@ -17,7 +17,7 @@
|
|||||||
"-mtp",
|
"-mtp",
|
||||||
".bpm=f,bin/mtag/audio-bpm.py",
|
".bpm=f,bin/mtag/audio-bpm.py",
|
||||||
"-aed:wark",
|
"-aed:wark",
|
||||||
"-vsrv::r:aed:cnodupe",
|
"-vsrv::r:rw,ed:c,dupe",
|
||||||
"-vdist:dist:r"
|
"-vdist:dist:r"
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
|
|||||||
1
.vscode/settings.json
vendored
1
.vscode/settings.json
vendored
@@ -55,4 +55,5 @@
|
|||||||
"py27"
|
"py27"
|
||||||
],
|
],
|
||||||
"python.linting.enabled": true,
|
"python.linting.enabled": true,
|
||||||
|
"python.pythonPath": "/usr/bin/python3"
|
||||||
}
|
}
|
||||||
126
README.md
126
README.md
@@ -19,15 +19,18 @@ turn your phone or raspi into a portable file server with resumable uploads/down
|
|||||||
## readme toc
|
## readme toc
|
||||||
|
|
||||||
* top
|
* top
|
||||||
* **[quickstart](#quickstart)** - download **[copyparty-sfx.py](https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.py)** and you're all set!
|
* [quickstart](#quickstart) - download **[copyparty-sfx.py](https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.py)** and you're all set!
|
||||||
* [on servers](#on-servers) - you may also want these, especially on servers
|
* [on servers](#on-servers) - you may also want these, especially on servers
|
||||||
* [on debian](#on-debian) - recommended additional steps on debian
|
* [on debian](#on-debian) - recommended additional steps on debian
|
||||||
* [notes](#notes) - general notes
|
* [notes](#notes) - general notes
|
||||||
* [status](#status) - feature summary
|
* [status](#status) - feature summary
|
||||||
* [testimonials](#testimonials) - small collection of user feedback
|
* [testimonials](#testimonials) - small collection of user feedback
|
||||||
|
* [motivations](#motivations) - project goals / philosophy
|
||||||
|
* [future plans](#future-plans) - some improvement ideas
|
||||||
* [bugs](#bugs)
|
* [bugs](#bugs)
|
||||||
* [general bugs](#general-bugs)
|
* [general bugs](#general-bugs)
|
||||||
* [not my bugs](#not-my-bugs)
|
* [not my bugs](#not-my-bugs)
|
||||||
|
* [FAQ](#FAQ) - "frequently" asked questions
|
||||||
* [accounts and volumes](#accounts-and-volumes) - per-folder, per-user permissions
|
* [accounts and volumes](#accounts-and-volumes) - per-folder, per-user permissions
|
||||||
* [the browser](#the-browser) - accessing a copyparty server using a web-browser
|
* [the browser](#the-browser) - accessing a copyparty server using a web-browser
|
||||||
* [tabs](#tabs) - the main tabs in the ui
|
* [tabs](#tabs) - the main tabs in the ui
|
||||||
@@ -50,6 +53,7 @@ turn your phone or raspi into a portable file server with resumable uploads/down
|
|||||||
* [database location](#database-location) - in-volume (`.hist/up2k.db`, default) or somewhere else
|
* [database location](#database-location) - in-volume (`.hist/up2k.db`, default) or somewhere else
|
||||||
* [metadata from audio files](#metadata-from-audio-files) - set `-e2t` to index tags on upload
|
* [metadata from audio files](#metadata-from-audio-files) - set `-e2t` to index tags on upload
|
||||||
* [file parser plugins](#file-parser-plugins) - provide custom parsers to index additional tags
|
* [file parser plugins](#file-parser-plugins) - provide custom parsers to index additional tags
|
||||||
|
* [upload events](#upload-events) - trigger a script/program on each upload
|
||||||
* [complete examples](#complete-examples)
|
* [complete examples](#complete-examples)
|
||||||
* [browser support](#browser-support) - TLDR: yes
|
* [browser support](#browser-support) - TLDR: yes
|
||||||
* [client examples](#client-examples) - interact with copyparty using non-browser clients
|
* [client examples](#client-examples) - interact with copyparty using non-browser clients
|
||||||
@@ -58,6 +62,9 @@ turn your phone or raspi into a portable file server with resumable uploads/down
|
|||||||
* [performance](#performance) - defaults are usually fine - expect `8 GiB/s` download, `1 GiB/s` upload
|
* [performance](#performance) - defaults are usually fine - expect `8 GiB/s` download, `1 GiB/s` upload
|
||||||
* [security](#security) - some notes on hardening
|
* [security](#security) - some notes on hardening
|
||||||
* [gotchas](#gotchas) - behavior that might be unexpected
|
* [gotchas](#gotchas) - behavior that might be unexpected
|
||||||
|
* [recovering from crashes](#recovering-from-crashes)
|
||||||
|
* [client crashes](#client-crashes)
|
||||||
|
* [frefox wsod](#frefox-wsod) - firefox 87 can crash during uploads
|
||||||
* [dependencies](#dependencies) - mandatory deps
|
* [dependencies](#dependencies) - mandatory deps
|
||||||
* [optional dependencies](#optional-dependencies) - install these to enable bonus features
|
* [optional dependencies](#optional-dependencies) - install these to enable bonus features
|
||||||
* [install recommended deps](#install-recommended-deps)
|
* [install recommended deps](#install-recommended-deps)
|
||||||
@@ -84,7 +91,7 @@ some recommended options:
|
|||||||
* `-e2ts` enables audio metadata indexing (needs either FFprobe or Mutagen), see [optional dependencies](#optional-dependencies)
|
* `-e2ts` enables audio metadata indexing (needs either FFprobe or Mutagen), see [optional dependencies](#optional-dependencies)
|
||||||
* `-v /mnt/music:/music:r:rw,foo -a foo:bar` shares `/mnt/music` as `/music`, `r`eadable by anyone, and read-write for user `foo`, password `bar`
|
* `-v /mnt/music:/music:r:rw,foo -a foo:bar` shares `/mnt/music` as `/music`, `r`eadable by anyone, and read-write for user `foo`, password `bar`
|
||||||
* replace `:r:rw,foo` with `:r,foo` to only make the folder readable by `foo` and nobody else
|
* replace `:r:rw,foo` with `:r,foo` to only make the folder readable by `foo` and nobody else
|
||||||
* see [accounts and volumes](#accounts-and-volumes) for the syntax and other access levels (`r`ead, `w`rite, `m`ove, `d`elete)
|
* see [accounts and volumes](#accounts-and-volumes) for the syntax and other permissions (`r`ead, `w`rite, `m`ove, `d`elete, `g`et)
|
||||||
* `--ls '**,*,ln,p,r'` to crash on startup if any of the volumes contain a symlink which point outside the volume, as that could give users unintended access
|
* `--ls '**,*,ln,p,r'` to crash on startup if any of the volumes contain a symlink which point outside the volume, as that could give users unintended access
|
||||||
|
|
||||||
|
|
||||||
@@ -170,6 +177,41 @@ small collection of user feedback
|
|||||||
`good enough`, `surprisingly correct`, `certified good software`, `just works`, `why`
|
`good enough`, `surprisingly correct`, `certified good software`, `just works`, `why`
|
||||||
|
|
||||||
|
|
||||||
|
# motivations
|
||||||
|
|
||||||
|
project goals / philosophy
|
||||||
|
|
||||||
|
* inverse linux philosophy -- do all the things, and do an *okay* job
|
||||||
|
* quick drop-in service to get a lot of features in a pinch
|
||||||
|
* there are probably [better alternatives](https://github.com/awesome-selfhosted/awesome-selfhosted) if you have specific/long-term needs
|
||||||
|
* run anywhere, support everything
|
||||||
|
* as many web-browsers and python versions as possible
|
||||||
|
* every browser should at least be able to browse, download, upload files
|
||||||
|
* be a good emergency solution for transferring stuff between ancient boxes
|
||||||
|
* minimal dependencies
|
||||||
|
* but optional dependencies adding bonus-features are ok
|
||||||
|
* everything being plaintext makes it possible to proofread for malicious code
|
||||||
|
* no preparations / setup necessary, just run the sfx (which is also plaintext)
|
||||||
|
* adaptable, malleable, hackable
|
||||||
|
* no build steps; modify the js/python without needing node.js or anything like that
|
||||||
|
|
||||||
|
|
||||||
|
## future plans
|
||||||
|
|
||||||
|
some improvement ideas
|
||||||
|
|
||||||
|
* the JS is a mess -- a preact rewrite would be nice
|
||||||
|
* preferably without build dependencies like webpack/babel/node.js, maybe a python thing to assemble js files into main.js
|
||||||
|
* good excuse to look at using virtual lists (browsers start to struggle when folders contain over 5000 files)
|
||||||
|
* the UX is a mess -- a proper design would be nice
|
||||||
|
* very organic (much like the python/js), everything was an afterthought
|
||||||
|
* true for both the layout and the visual flair
|
||||||
|
* something like the tron board-room ui (or most other hollywood ones, like ironman) would be :100:
|
||||||
|
* some of the python files are way too big
|
||||||
|
* `up2k.py` ended up doing all the file indexing / db management
|
||||||
|
* `httpcli.py` should be separated into modules in general
|
||||||
|
|
||||||
|
|
||||||
# bugs
|
# bugs
|
||||||
|
|
||||||
* Windows: python 3.7 and older cannot read tags with FFprobe, so use Mutagen or upgrade
|
* Windows: python 3.7 and older cannot read tags with FFprobe, so use Mutagen or upgrade
|
||||||
@@ -194,6 +236,17 @@ small collection of user feedback
|
|||||||
* use `--hist` or the `hist` volflag (`-v [...]:c,hist=/tmp/foo`) to place the db inside the vm instead
|
* use `--hist` or the `hist` volflag (`-v [...]:c,hist=/tmp/foo`) to place the db inside the vm instead
|
||||||
|
|
||||||
|
|
||||||
|
# FAQ
|
||||||
|
|
||||||
|
"frequently" asked questions
|
||||||
|
|
||||||
|
* is it possible to block read-access to folders unless you know the exact URL for a particular file inside?
|
||||||
|
* yes, using the [`g` permission](#accounts-and-volumes), see the examples there
|
||||||
|
|
||||||
|
* can I make copyparty download a file to my server if I give it a URL?
|
||||||
|
* not officially, but there is a [terrible hack](https://github.com/9001/copyparty/blob/hovudstraum/bin/mtag/wget.py) which makes it possible
|
||||||
|
|
||||||
|
|
||||||
# accounts and volumes
|
# accounts and volumes
|
||||||
|
|
||||||
per-folder, per-user permissions
|
per-folder, per-user permissions
|
||||||
@@ -208,6 +261,7 @@ permissions:
|
|||||||
* `w` (write): upload files, move files *into* this folder
|
* `w` (write): upload files, move files *into* this folder
|
||||||
* `m` (move): move files/folders *from* this folder
|
* `m` (move): move files/folders *from* this folder
|
||||||
* `d` (delete): delete files/folders
|
* `d` (delete): delete files/folders
|
||||||
|
* `g` (get): only download files, cannot see folder contents or zip/tar
|
||||||
|
|
||||||
examples:
|
examples:
|
||||||
* add accounts named u1, u2, u3 with passwords p1, p2, p3: `-a u1:p1 -a u2:p2 -a u3:p3`
|
* add accounts named u1, u2, u3 with passwords p1, p2, p3: `-a u1:p1 -a u2:p2 -a u3:p3`
|
||||||
@@ -218,6 +272,10 @@ examples:
|
|||||||
* unauthorized users accessing the webroot can see that the `inc` folder exists, but cannot open it
|
* unauthorized users accessing the webroot can see that the `inc` folder exists, but cannot open it
|
||||||
* `u1` can open the `inc` folder, but cannot see the contents, only upload new files to it
|
* `u1` can open the `inc` folder, but cannot see the contents, only upload new files to it
|
||||||
* `u2` can browse it and move files *from* `/inc` into any folder where `u2` has write-access
|
* `u2` can browse it and move files *from* `/inc` into any folder where `u2` has write-access
|
||||||
|
* make folder `/mnt/ss` available at `/i`, read-write for u1, get-only for everyone else, and enable accesskeys: `-v /mnt/ss:i:rw,u1:g:c,fk=4`
|
||||||
|
* `c,fk=4` sets the `fk` volume-flag to 4, meaning each file gets a 4-character accesskey
|
||||||
|
* `u1` can upload files, browse the folder, and see the generated accesskeys
|
||||||
|
* other users cannot browse the folder, but can access the files if they have the full file URL with the accesskey
|
||||||
|
|
||||||
|
|
||||||
# the browser
|
# the browser
|
||||||
@@ -379,7 +437,7 @@ and then theres the tabs below it,
|
|||||||
* plus up to 3 entries each from `[done]` and `[que]` for context
|
* plus up to 3 entries each from `[done]` and `[que]` for context
|
||||||
* `[que]` is all the files that are still queued
|
* `[que]` is all the files that are still queued
|
||||||
|
|
||||||
note that since up2k has to read each file twice, `[🎈 bup]` can *theoretically* be up to 2x faster in some extreme cases (files bigger than your ram, combined with an internet connection faster than the read-speed of your HDD)
|
note that since up2k has to read each file twice, `[🎈 bup]` can *theoretically* be up to 2x faster in some extreme cases (files bigger than your ram, combined with an internet connection faster than the read-speed of your HDD, or if you're uploading from a cuo2duo)
|
||||||
|
|
||||||
if you are resuming a massive upload and want to skip hashing the files which already finished, you can enable `turbo` in the `[⚙️] config` tab, but please read the tooltip on that button
|
if you are resuming a massive upload and want to skip hashing the files which already finished, you can enable `turbo` in the `[⚙️] config` tab, but please read the tooltip on that button
|
||||||
|
|
||||||
@@ -529,12 +587,12 @@ through arguments:
|
|||||||
* `-e2tsr` also deletes all existing tags, doing a full reindex
|
* `-e2tsr` also deletes all existing tags, doing a full reindex
|
||||||
|
|
||||||
the same arguments can be set as volume flags, in addition to `d2d` and `d2t` for disabling:
|
the same arguments can be set as volume flags, in addition to `d2d` and `d2t` for disabling:
|
||||||
* `-v ~/music::r:c,e2dsa:c,e2tsr` does a full reindex of everything on startup
|
* `-v ~/music::r:c,e2dsa,e2tsr` does a full reindex of everything on startup
|
||||||
* `-v ~/music::r:c,d2d` disables **all** indexing, even if any `-e2*` are on
|
* `-v ~/music::r:c,d2d` disables **all** indexing, even if any `-e2*` are on
|
||||||
* `-v ~/music::r:c,d2t` disables all `-e2t*` (tags), does not affect `-e2d*`
|
* `-v ~/music::r:c,d2t` disables all `-e2t*` (tags), does not affect `-e2d*`
|
||||||
|
|
||||||
note:
|
note:
|
||||||
* the parser currently can't handle `c,e2dsa,e2tsr` so you have to `c,e2dsa:c,e2tsr`
|
* the parser can finally handle `c,e2dsa,e2tsr` so you no longer have to `c,e2dsa:c,e2tsr`
|
||||||
* `e2tsr` is probably always overkill, since `e2ds`/`e2dsa` would pick up any file modifications and `e2ts` would then reindex those, unless there is a new copyparty version with new parsers and the release note says otherwise
|
* `e2tsr` is probably always overkill, since `e2ds`/`e2dsa` would pick up any file modifications and `e2ts` would then reindex those, unless there is a new copyparty version with new parsers and the release note says otherwise
|
||||||
* the rescan button in the admin panel has no effect unless the volume has `-e2ds` or higher
|
* the rescan button in the admin panel has no effect unless the volume has `-e2ds` or higher
|
||||||
|
|
||||||
@@ -642,6 +700,25 @@ copyparty can invoke external programs to collect additional metadata for files
|
|||||||
* `-mtp arch,built,ver,orig=an,eexe,edll,~/bin/exe.py` runs `~/bin/exe.py` to get properties about windows-binaries only if file is not audio (`an`) and file extension is exe or dll
|
* `-mtp arch,built,ver,orig=an,eexe,edll,~/bin/exe.py` runs `~/bin/exe.py` to get properties about windows-binaries only if file is not audio (`an`) and file extension is exe or dll
|
||||||
|
|
||||||
|
|
||||||
|
## upload events
|
||||||
|
|
||||||
|
trigger a script/program on each upload like so:
|
||||||
|
|
||||||
|
```
|
||||||
|
-v /mnt/inc:inc:w:c,mte=+a1:c,mtp=a1=ad,/usr/bin/notify-send
|
||||||
|
```
|
||||||
|
|
||||||
|
so filesystem location `/mnt/inc` shared at `/inc`, write-only for everyone, appending `a1` to the list of tags to index, and using `/usr/bin/notify-send` to "provide" that tag
|
||||||
|
|
||||||
|
that'll run the command `notify-send` with the path to the uploaded file as the first and only argument (so on linux it'll show a notification on-screen)
|
||||||
|
|
||||||
|
note that it will only trigger on new unique files, not dupes
|
||||||
|
|
||||||
|
and it will occupy the parsing threads, so fork anything expensive, or if you want to intentionally queue/singlethread you can combine it with `--no-mtag-mt`
|
||||||
|
|
||||||
|
if this becomes popular maybe there should be a less janky way to do it actually
|
||||||
|
|
||||||
|
|
||||||
## complete examples
|
## complete examples
|
||||||
|
|
||||||
* read-only music server with bpm and key scanning
|
* read-only music server with bpm and key scanning
|
||||||
@@ -672,7 +749,7 @@ TLDR: yes
|
|||||||
| image viewer | - | yep | yep | yep | yep | yep | yep | yep |
|
| image viewer | - | yep | yep | yep | yep | yep | yep | yep |
|
||||||
| video player | - | yep | yep | yep | yep | yep | yep | yep |
|
| video player | - | yep | yep | yep | yep | yep | yep | yep |
|
||||||
| markdown editor | - | - | yep | yep | yep | yep | yep | yep |
|
| markdown editor | - | - | yep | yep | yep | yep | yep | yep |
|
||||||
| markdown viewer | - | - | yep | yep | yep | yep | yep | yep |
|
| markdown viewer | - | yep | yep | yep | yep | yep | yep | yep |
|
||||||
| play mp3/m4a | - | yep | yep | yep | yep | yep | yep | yep |
|
| play mp3/m4a | - | yep | yep | yep | yep | yep | yep | yep |
|
||||||
| play ogg/opus | - | - | - | - | yep | yep | `*3` | yep |
|
| play ogg/opus | - | - | - | - | yep | yep | `*3` | yep |
|
||||||
| **= feature =** | ie6 | ie9 | ie10 | ie11 | ff 52 | c 49 | iOS | Andr |
|
| **= feature =** | ie6 | ie9 | ie10 | ie11 | ff 52 | c 49 | iOS | Andr |
|
||||||
@@ -687,14 +764,12 @@ quick summary of more eccentric web-browsers trying to view a directory index:
|
|||||||
|
|
||||||
| browser | will it blend |
|
| browser | will it blend |
|
||||||
| ------- | ------------- |
|
| ------- | ------------- |
|
||||||
| **safari** (14.0.3/macos) | is chrome with janky wasm, so playing opus can deadlock the javascript engine |
|
|
||||||
| **safari** (14.0.1/iOS) | same as macos, except it recovers from the deadlocks if you poke it a bit |
|
|
||||||
| **links** (2.21/macports) | can browse, login, upload/mkdir/msg |
|
| **links** (2.21/macports) | can browse, login, upload/mkdir/msg |
|
||||||
| **lynx** (2.8.9/macports) | can browse, login, upload/mkdir/msg |
|
| **lynx** (2.8.9/macports) | can browse, login, upload/mkdir/msg |
|
||||||
| **w3m** (0.5.3/macports) | can browse, login, upload at 100kB/s, mkdir/msg |
|
| **w3m** (0.5.3/macports) | can browse, login, upload at 100kB/s, mkdir/msg |
|
||||||
| **netsurf** (3.10/arch) | is basically ie6 with much better css (javascript has almost no effect) |
|
| **netsurf** (3.10/arch) | is basically ie6 with much better css (javascript has almost no effect) |
|
||||||
| **opera** (11.60/winxp) | OK: thumbnails, image-viewer, zip-selection, rename/cut/paste. NG: up2k, navpane, markdown, audio |
|
| **opera** (11.60/winxp) | OK: thumbnails, image-viewer, zip-selection, rename/cut/paste. NG: up2k, navpane, markdown, audio |
|
||||||
| **ie4** and **netscape** 4.0 | can browse (text is yellow on white), upload with `?b=u` |
|
| **ie4** and **netscape** 4.0 | can browse, upload with `?b=u` |
|
||||||
| **SerenityOS** (7e98457) | hits a page fault, works with `?b=u`, file upload not-impl |
|
| **SerenityOS** (7e98457) | hits a page fault, works with `?b=u`, file upload not-impl |
|
||||||
|
|
||||||
|
|
||||||
@@ -714,6 +789,14 @@ interact with copyparty using non-browser clients
|
|||||||
* `chunk(){ curl -b cppwd=wark -T- http://127.0.0.1:3923/;}`
|
* `chunk(){ curl -b cppwd=wark -T- http://127.0.0.1:3923/;}`
|
||||||
`chunk <movie.mkv`
|
`chunk <movie.mkv`
|
||||||
|
|
||||||
|
* bash: when curl and wget is not available or too boring
|
||||||
|
* `(printf 'PUT /junk?pw=wark HTTP/1.1\r\n\r\n'; cat movie.mkv) | nc 127.0.0.1 3923`
|
||||||
|
* `(printf 'PUT / HTTP/1.1\r\n\r\n'; cat movie.mkv) >/dev/tcp/127.0.0.1/3923`
|
||||||
|
|
||||||
|
* python: [up2k.py](https://github.com/9001/copyparty/blob/hovudstraum/bin/up2k.py) is a command-line up2k client [(webm)](https://ocv.me/stuff/u2cli.webm)
|
||||||
|
* file uploads, file-search, autoresume of aborted/broken uploads
|
||||||
|
* see [./bin/README.md#up2kpy](bin/README.md#up2kpy)
|
||||||
|
|
||||||
* FUSE: mount a copyparty server as a local filesystem
|
* FUSE: mount a copyparty server as a local filesystem
|
||||||
* cross-platform python client available in [./bin/](bin/)
|
* cross-platform python client available in [./bin/](bin/)
|
||||||
* [rclone](https://rclone.org/) as client can give ~5x performance, see [./docs/rclone.md](docs/rclone.md)
|
* [rclone](https://rclone.org/) as client can give ~5x performance, see [./docs/rclone.md](docs/rclone.md)
|
||||||
@@ -786,6 +869,11 @@ on public copyparty instances with anonymous upload enabled:
|
|||||||
* unless `--no-readme` is set: by uploading/modifying a file named `readme.md`
|
* unless `--no-readme` is set: by uploading/modifying a file named `readme.md`
|
||||||
* if `move` access is granted AND none of `--no-logues`, `--no-dot-mv`, `--no-dot-ren` is set: by uploading some .html file and renaming it to `.epilogue.html` (uploading it directly is blocked)
|
* if `move` access is granted AND none of `--no-logues`, `--no-dot-mv`, `--no-dot-ren` is set: by uploading some .html file and renaming it to `.epilogue.html` (uploading it directly is blocked)
|
||||||
|
|
||||||
|
other misc:
|
||||||
|
|
||||||
|
* you can disable directory listings by giving permission `g` instead of `r`, only accepting direct URLs to files
|
||||||
|
* combine this with volume-flag `c,fk` to generate per-file accesskeys; users which have full read-access will then see URLs with `?k=...` appended to the end, and `g` users must provide that URL including the correct key to avoid a 404
|
||||||
|
|
||||||
|
|
||||||
## gotchas
|
## gotchas
|
||||||
|
|
||||||
@@ -794,6 +882,26 @@ behavior that might be unexpected
|
|||||||
* users without read-access to a folder can still see the `.prologue.html` / `.epilogue.html` / `README.md` contents, for the purpose of showing a description on how to use the uploader for example
|
* users without read-access to a folder can still see the `.prologue.html` / `.epilogue.html` / `README.md` contents, for the purpose of showing a description on how to use the uploader for example
|
||||||
|
|
||||||
|
|
||||||
|
# recovering from crashes
|
||||||
|
|
||||||
|
## client crashes
|
||||||
|
|
||||||
|
### frefox wsod
|
||||||
|
|
||||||
|
firefox 87 can crash during uploads -- the entire browser goes, including all other browser tabs, everything turns white
|
||||||
|
|
||||||
|
however you can hit `F12` in the up2k tab and use the devtools to see how far you got in the uploads:
|
||||||
|
|
||||||
|
* get a complete list of all uploads, organized by statuts (ok / no-good / busy / queued):
|
||||||
|
`var tabs = { ok:[], ng:[], bz:[], q:[] }; for (var a of up2k.ui.tab) tabs[a.in].push(a); tabs`
|
||||||
|
|
||||||
|
* list of filenames which failed:
|
||||||
|
`var ng = []; for (var a of up2k.ui.tab) if (a.in != 'ok') ng.push(a.hn.split('<a href=\"').slice(-1)[0].split('\">')[0]); ng`
|
||||||
|
|
||||||
|
* send the list of filenames to copyparty for safekeeping:
|
||||||
|
`await fetch('/inc', {method:'PUT', body:JSON.stringify(ng,null,1)})`
|
||||||
|
|
||||||
|
|
||||||
# dependencies
|
# dependencies
|
||||||
|
|
||||||
mandatory deps:
|
mandatory deps:
|
||||||
|
|||||||
@@ -1,3 +1,11 @@
|
|||||||
|
# [`up2k.py`](up2k.py)
|
||||||
|
* command-line up2k client [(webm)](https://ocv.me/stuff/u2cli.webm)
|
||||||
|
* file uploads, file-search, autoresume of aborted/broken uploads
|
||||||
|
* faster than browsers
|
||||||
|
* early beta, if something breaks just restart it
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# [`copyparty-fuse.py`](copyparty-fuse.py)
|
# [`copyparty-fuse.py`](copyparty-fuse.py)
|
||||||
* mount a copyparty server as a local filesystem (read-only)
|
* mount a copyparty server as a local filesystem (read-only)
|
||||||
* **supports Windows!** -- expect `194 MiB/s` sequential read
|
* **supports Windows!** -- expect `194 MiB/s` sequential read
|
||||||
@@ -47,6 +55,7 @@ you could replace winfsp with [dokan](https://github.com/dokan-dev/dokany/releas
|
|||||||
* copyparty can Popen programs like these during file indexing to collect additional metadata
|
* copyparty can Popen programs like these during file indexing to collect additional metadata
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# [`dbtool.py`](dbtool.py)
|
# [`dbtool.py`](dbtool.py)
|
||||||
upgrade utility which can show db info and help transfer data between databases, for example when a new version of copyparty is incompatible with the old DB and automatically rebuilds the DB from scratch, but you have some really expensive `-mtp` parsers and want to copy over the tags from the old db
|
upgrade utility which can show db info and help transfer data between databases, for example when a new version of copyparty is incompatible with the old DB and automatically rebuilds the DB from scratch, but you have some really expensive `-mtp` parsers and want to copy over the tags from the old db
|
||||||
|
|
||||||
@@ -63,6 +72,7 @@ cd /mnt/nas/music/.hist
|
|||||||
```
|
```
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# [`prisonparty.sh`](prisonparty.sh)
|
# [`prisonparty.sh`](prisonparty.sh)
|
||||||
* run copyparty in a chroot, preventing any accidental file access
|
* run copyparty in a chroot, preventing any accidental file access
|
||||||
* creates bindmounts for /bin, /lib, and so on, see `sysdirs=`
|
* creates bindmounts for /bin, /lib, and so on, see `sysdirs=`
|
||||||
|
|||||||
@@ -71,7 +71,7 @@ except:
|
|||||||
elif MACOS:
|
elif MACOS:
|
||||||
libfuse = "install https://osxfuse.github.io/"
|
libfuse = "install https://osxfuse.github.io/"
|
||||||
else:
|
else:
|
||||||
libfuse = "apt install libfuse\n modprobe fuse"
|
libfuse = "apt install libfuse3-3\n modprobe fuse"
|
||||||
|
|
||||||
print(
|
print(
|
||||||
"\n could not import fuse; these may help:"
|
"\n could not import fuse; these may help:"
|
||||||
@@ -393,15 +393,16 @@ class Gateway(object):
|
|||||||
|
|
||||||
rsp = json.loads(rsp.decode("utf-8"))
|
rsp = json.loads(rsp.decode("utf-8"))
|
||||||
ret = []
|
ret = []
|
||||||
for is_dir, nodes in [[True, rsp["dirs"]], [False, rsp["files"]]]:
|
for statfun, nodes in [
|
||||||
|
[self.stat_dir, rsp["dirs"]],
|
||||||
|
[self.stat_file, rsp["files"]],
|
||||||
|
]:
|
||||||
for n in nodes:
|
for n in nodes:
|
||||||
fname = unquote(n["href"]).rstrip(b"/")
|
fname = unquote(n["href"].split("?")[0]).rstrip(b"/").decode("wtf-8")
|
||||||
fname = fname.decode("wtf-8")
|
|
||||||
if bad_good:
|
if bad_good:
|
||||||
fname = enwin(fname)
|
fname = enwin(fname)
|
||||||
|
|
||||||
fun = self.stat_dir if is_dir else self.stat_file
|
ret.append([fname, statfun(n["ts"], n["sz"]), 0])
|
||||||
ret.append([fname, fun(n["ts"], n["sz"]), 0])
|
|
||||||
|
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
|
|||||||
@@ -1,11 +1,19 @@
|
|||||||
standalone programs which take an audio file as argument
|
standalone programs which take an audio file as argument
|
||||||
|
|
||||||
|
**NOTE:** these all require `-e2ts` to be functional, meaning you need to do at least one of these: `apt install ffmpeg` or `pip3 install mutagen`
|
||||||
|
|
||||||
some of these rely on libraries which are not MIT-compatible
|
some of these rely on libraries which are not MIT-compatible
|
||||||
|
|
||||||
* [audio-bpm.py](./audio-bpm.py) detects the BPM of music using the BeatRoot Vamp Plugin; imports GPL2
|
* [audio-bpm.py](./audio-bpm.py) detects the BPM of music using the BeatRoot Vamp Plugin; imports GPL2
|
||||||
* [audio-key.py](./audio-key.py) detects the melodic key of music using the Mixxx fork of keyfinder; imports GPL3
|
* [audio-key.py](./audio-key.py) detects the melodic key of music using the Mixxx fork of keyfinder; imports GPL3
|
||||||
* [media-hash.py](./media-hash.py) generates checksums for audio and video streams; uses FFmpeg (LGPL or GPL)
|
* [media-hash.py](./media-hash.py) generates checksums for audio and video streams; uses FFmpeg (LGPL or GPL)
|
||||||
|
|
||||||
|
these do not have any problematic dependencies:
|
||||||
|
|
||||||
|
* [cksum.py](./cksum.py) computes various checksums
|
||||||
|
* [exe.py](./exe.py) grabs metadata from .exe and .dll files (example for retrieving multiple tags with one parser)
|
||||||
|
* [wget.py](./wget.py) lets you download files by POSTing URLs to copyparty
|
||||||
|
|
||||||
|
|
||||||
# dependencies
|
# dependencies
|
||||||
|
|
||||||
|
|||||||
@@ -25,6 +25,7 @@ def det(tf):
|
|||||||
"-v", "fatal",
|
"-v", "fatal",
|
||||||
"-ss", "13",
|
"-ss", "13",
|
||||||
"-y", "-i", fsenc(sys.argv[1]),
|
"-y", "-i", fsenc(sys.argv[1]),
|
||||||
|
"-map", "0:a:0",
|
||||||
"-ac", "1",
|
"-ac", "1",
|
||||||
"-ar", "22050",
|
"-ar", "22050",
|
||||||
"-t", "300",
|
"-t", "300",
|
||||||
|
|||||||
@@ -28,6 +28,7 @@ def det(tf):
|
|||||||
"-hide_banner",
|
"-hide_banner",
|
||||||
"-v", "fatal",
|
"-v", "fatal",
|
||||||
"-y", "-i", fsenc(sys.argv[1]),
|
"-y", "-i", fsenc(sys.argv[1]),
|
||||||
|
"-map", "0:a:0",
|
||||||
"-t", "300",
|
"-t", "300",
|
||||||
"-sample_fmt", "s16",
|
"-sample_fmt", "s16",
|
||||||
tf
|
tf
|
||||||
|
|||||||
89
bin/mtag/cksum.py
Executable file
89
bin/mtag/cksum.py
Executable file
@@ -0,0 +1,89 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
import sys
|
||||||
|
import json
|
||||||
|
import zlib
|
||||||
|
import struct
|
||||||
|
import base64
|
||||||
|
import hashlib
|
||||||
|
|
||||||
|
try:
|
||||||
|
from copyparty.util import fsenc
|
||||||
|
except:
|
||||||
|
|
||||||
|
def fsenc(p):
|
||||||
|
return p
|
||||||
|
|
||||||
|
|
||||||
|
"""
|
||||||
|
calculates various checksums for uploads,
|
||||||
|
usage: -mtp crc32,md5,sha1,sha256b=bin/mtag/cksum.py
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
config = "crc32 md5 md5b sha1 sha1b sha256 sha256b sha512/240 sha512b/240"
|
||||||
|
# b suffix = base64 encoded
|
||||||
|
# slash = truncate to n bits
|
||||||
|
|
||||||
|
known = {
|
||||||
|
"md5": hashlib.md5,
|
||||||
|
"sha1": hashlib.sha1,
|
||||||
|
"sha256": hashlib.sha256,
|
||||||
|
"sha512": hashlib.sha512,
|
||||||
|
}
|
||||||
|
config = config.split()
|
||||||
|
hashers = {
|
||||||
|
k: v()
|
||||||
|
for k, v in known.items()
|
||||||
|
if k in [x.split("/")[0].rstrip("b") for x in known]
|
||||||
|
}
|
||||||
|
crc32 = 0 if "crc32" in config else None
|
||||||
|
|
||||||
|
with open(fsenc(sys.argv[1]), "rb", 512 * 1024) as f:
|
||||||
|
while True:
|
||||||
|
buf = f.read(64 * 1024)
|
||||||
|
if not buf:
|
||||||
|
break
|
||||||
|
|
||||||
|
for x in hashers.values():
|
||||||
|
x.update(buf)
|
||||||
|
|
||||||
|
if crc32 is not None:
|
||||||
|
crc32 = zlib.crc32(buf, crc32)
|
||||||
|
|
||||||
|
ret = {}
|
||||||
|
for s in config:
|
||||||
|
alg = s.split("/")[0]
|
||||||
|
b64 = alg.endswith("b")
|
||||||
|
alg = alg.rstrip("b")
|
||||||
|
if alg in hashers:
|
||||||
|
v = hashers[alg].digest()
|
||||||
|
elif alg == "crc32":
|
||||||
|
v = crc32
|
||||||
|
if v < 0:
|
||||||
|
v &= 2 ** 32 - 1
|
||||||
|
v = struct.pack(">L", v)
|
||||||
|
else:
|
||||||
|
raise Exception("what is {}".format(s))
|
||||||
|
|
||||||
|
if "/" in s:
|
||||||
|
v = v[: int(int(s.split("/")[1]) / 8)]
|
||||||
|
|
||||||
|
if b64:
|
||||||
|
v = base64.b64encode(v).decode("ascii").rstrip("=")
|
||||||
|
else:
|
||||||
|
try:
|
||||||
|
v = v.hex()
|
||||||
|
except:
|
||||||
|
import binascii
|
||||||
|
|
||||||
|
v = binascii.hexlify(v)
|
||||||
|
|
||||||
|
ret[s] = v
|
||||||
|
|
||||||
|
print(json.dumps(ret, indent=4))
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
@@ -4,7 +4,8 @@ set -e
|
|||||||
|
|
||||||
# install dependencies for audio-*.py
|
# install dependencies for audio-*.py
|
||||||
#
|
#
|
||||||
# linux: requires {python3,ffmpeg,fftw}-dev py3-{wheel,pip} py3-numpy{,-dev} vamp-sdk-dev patchelf
|
# linux/alpine: requires {python3,ffmpeg,fftw}-dev py3-{wheel,pip} py3-numpy{,-dev} vamp-sdk-dev patchelf cmake
|
||||||
|
# linux/debian: requires libav{codec,device,filter,format,resample,util}-dev {libfftw3,python3}-dev python3-{numpy,pip} vamp-{plugin-sdk,examples} patchelf cmake
|
||||||
# win64: requires msys2-mingw64 environment
|
# win64: requires msys2-mingw64 environment
|
||||||
# macos: requires macports
|
# macos: requires macports
|
||||||
#
|
#
|
||||||
|
|||||||
85
bin/mtag/wget.py
Normal file
85
bin/mtag/wget.py
Normal file
@@ -0,0 +1,85 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
"""
|
||||||
|
use copyparty as a file downloader by POSTing URLs as
|
||||||
|
application/x-www-form-urlencoded (for example using the
|
||||||
|
message/pager function on the website)
|
||||||
|
|
||||||
|
example copyparty config to use this:
|
||||||
|
--urlform save,get -vsrv/wget:wget:rwmd,ed:c,e2ts,mtp=title=ebin,t300,ad,bin/mtag/wget.py
|
||||||
|
|
||||||
|
explained:
|
||||||
|
for realpath srv/wget (served at /wget) with read-write-modify-delete for ed,
|
||||||
|
enable file analysis on upload (e2ts),
|
||||||
|
use mtp plugin "bin/mtag/wget.py" to provide metadata tag "title",
|
||||||
|
do this on all uploads with the file extension "bin",
|
||||||
|
t300 = 300 seconds timeout for each dwonload,
|
||||||
|
ad = parse file regardless if FFmpeg thinks it is audio or not
|
||||||
|
|
||||||
|
PS: this requires e2ts to be functional,
|
||||||
|
meaning you need to do at least one of these:
|
||||||
|
* apt install ffmpeg
|
||||||
|
* pip3 install mutagen
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import subprocess as sp
|
||||||
|
from urllib.parse import unquote_to_bytes as unquote
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
fp = os.path.abspath(sys.argv[1])
|
||||||
|
fdir = os.path.dirname(fp)
|
||||||
|
fname = os.path.basename(fp)
|
||||||
|
if not fname.startswith("put-") or not fname.endswith(".bin"):
|
||||||
|
raise Exception("not a post file")
|
||||||
|
|
||||||
|
buf = b""
|
||||||
|
with open(fp, "rb") as f:
|
||||||
|
while True:
|
||||||
|
b = f.read(4096)
|
||||||
|
buf += b
|
||||||
|
if len(buf) > 4096:
|
||||||
|
raise Exception("too big")
|
||||||
|
|
||||||
|
if not b:
|
||||||
|
break
|
||||||
|
|
||||||
|
if not buf:
|
||||||
|
raise Exception("file is empty")
|
||||||
|
|
||||||
|
buf = unquote(buf.replace(b"+", b" "))
|
||||||
|
url = buf.decode("utf-8")
|
||||||
|
|
||||||
|
if not url.startswith("msg="):
|
||||||
|
raise Exception("does not start with msg=")
|
||||||
|
|
||||||
|
url = url[4:]
|
||||||
|
if "://" not in url:
|
||||||
|
url = "https://" + url
|
||||||
|
|
||||||
|
os.chdir(fdir)
|
||||||
|
|
||||||
|
name = url.split("?")[0].split("/")[-1]
|
||||||
|
tfn = "-- DOWNLOADING " + name
|
||||||
|
open(tfn, "wb").close()
|
||||||
|
|
||||||
|
cmd = ["wget", "--trust-server-names", "--", url]
|
||||||
|
|
||||||
|
try:
|
||||||
|
sp.check_call(cmd)
|
||||||
|
|
||||||
|
# OPTIONAL:
|
||||||
|
# on success, delete the .bin file which contains the URL
|
||||||
|
os.unlink(fp)
|
||||||
|
except:
|
||||||
|
open("-- FAILED TO DONWLOAD " + name, "wb").close()
|
||||||
|
|
||||||
|
os.unlink(tfn)
|
||||||
|
print(url)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
@@ -17,7 +17,7 @@ it's probably best to use this through a config file; see res/yt-ipr.conf
|
|||||||
|
|
||||||
but if you want to use plain arguments instead then:
|
but if you want to use plain arguments instead then:
|
||||||
-v srv/ytm:ytm:w:rw,ed
|
-v srv/ytm:ytm:w:rw,ed
|
||||||
:c,e2ts:c,e2dsa
|
:c,e2ts,e2dsa
|
||||||
:c,sz=16k-1m:c,maxn=10,300:c,rotf=%Y-%m/%d-%H
|
:c,sz=16k-1m:c,maxn=10,300:c,rotf=%Y-%m/%d-%H
|
||||||
:c,mtp=yt-id,yt-title,yt-author,yt-channel,yt-views,yt-private,yt-manifest,yt-expires=bin/mtag/yt-ipr.py
|
:c,mtp=yt-id,yt-title,yt-author,yt-channel,yt-views,yt-private,yt-manifest,yt-expires=bin/mtag/yt-ipr.py
|
||||||
:c,mte=yt-id,yt-title,yt-author,yt-channel,yt-views,yt-private,yt-manifest,yt-expires
|
:c,mte=yt-id,yt-title,yt-author,yt-channel,yt-views,yt-private,yt-manifest,yt-expires
|
||||||
|
|||||||
723
bin/up2k.py
Executable file
723
bin/up2k.py
Executable file
@@ -0,0 +1,723 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
from __future__ import print_function, unicode_literals
|
||||||
|
|
||||||
|
"""
|
||||||
|
up2k.py: upload to copyparty
|
||||||
|
2021-10-04, v0.7, ed <irc.rizon.net>, MIT-Licensed
|
||||||
|
https://github.com/9001/copyparty/blob/hovudstraum/bin/up2k.py
|
||||||
|
|
||||||
|
- dependencies: requests
|
||||||
|
- supports python 2.6, 2.7, and 3.3 through 3.10
|
||||||
|
|
||||||
|
- almost zero error-handling
|
||||||
|
- but if something breaks just try again and it'll autoresume
|
||||||
|
"""
|
||||||
|
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import stat
|
||||||
|
import math
|
||||||
|
import time
|
||||||
|
import atexit
|
||||||
|
import signal
|
||||||
|
import base64
|
||||||
|
import hashlib
|
||||||
|
import argparse
|
||||||
|
import platform
|
||||||
|
import threading
|
||||||
|
import requests
|
||||||
|
import datetime
|
||||||
|
|
||||||
|
|
||||||
|
# from copyparty/__init__.py
|
||||||
|
PY2 = sys.version_info[0] == 2
|
||||||
|
if PY2:
|
||||||
|
from Queue import Queue
|
||||||
|
|
||||||
|
sys.dont_write_bytecode = True
|
||||||
|
bytes = str
|
||||||
|
else:
|
||||||
|
from queue import Queue
|
||||||
|
|
||||||
|
unicode = str
|
||||||
|
|
||||||
|
VT100 = platform.system() != "Windows"
|
||||||
|
|
||||||
|
|
||||||
|
req_ses = requests.Session()
|
||||||
|
|
||||||
|
|
||||||
|
class File(object):
|
||||||
|
"""an up2k upload task; represents a single file"""
|
||||||
|
|
||||||
|
def __init__(self, top, rel, size, lmod):
|
||||||
|
self.top = top # type: bytes
|
||||||
|
self.rel = rel.replace(b"\\", b"/") # type: bytes
|
||||||
|
self.size = size # type: int
|
||||||
|
self.lmod = lmod # type: float
|
||||||
|
|
||||||
|
self.abs = os.path.join(top, rel) # type: bytes
|
||||||
|
self.name = self.rel.split(b"/")[-1].decode("utf-8", "replace") # type: str
|
||||||
|
|
||||||
|
# set by get_hashlist
|
||||||
|
self.cids = [] # type: list[tuple[str, int, int]] # [ hash, ofs, sz ]
|
||||||
|
self.kchunks = {} # type: dict[str, tuple[int, int]] # hash: [ ofs, sz ]
|
||||||
|
|
||||||
|
# set by handshake
|
||||||
|
self.ucids = [] # type: list[str] # chunks which need to be uploaded
|
||||||
|
self.wark = None # type: str
|
||||||
|
self.url = None # type: str
|
||||||
|
|
||||||
|
# set by upload
|
||||||
|
self.up_b = 0 # type: int
|
||||||
|
self.up_c = 0 # type: int
|
||||||
|
|
||||||
|
# m = "size({}) lmod({}) top({}) rel({}) abs({}) name({})\n"
|
||||||
|
# eprint(m.format(self.size, self.lmod, self.top, self.rel, self.abs, self.name))
|
||||||
|
|
||||||
|
|
||||||
|
class FileSlice(object):
|
||||||
|
"""file-like object providing a fixed window into a file"""
|
||||||
|
|
||||||
|
def __init__(self, file, cid):
|
||||||
|
# type: (File, str) -> FileSlice
|
||||||
|
|
||||||
|
self.car, self.len = file.kchunks[cid]
|
||||||
|
self.cdr = self.car + self.len
|
||||||
|
self.ofs = 0 # type: int
|
||||||
|
self.f = open(file.abs, "rb", 512 * 1024)
|
||||||
|
self.f.seek(self.car)
|
||||||
|
|
||||||
|
# https://stackoverflow.com/questions/4359495/what-is-exactly-a-file-like-object-in-python
|
||||||
|
# IOBase, RawIOBase, BufferedIOBase
|
||||||
|
funs = "close closed __enter__ __exit__ __iter__ isatty __next__ readable seekable writable"
|
||||||
|
try:
|
||||||
|
for fun in funs.split():
|
||||||
|
setattr(self, fun, getattr(self.f, fun))
|
||||||
|
except:
|
||||||
|
pass # py27 probably
|
||||||
|
|
||||||
|
def tell(self):
|
||||||
|
return self.ofs
|
||||||
|
|
||||||
|
def seek(self, ofs, wh=0):
|
||||||
|
if wh == 1:
|
||||||
|
ofs = self.ofs + ofs
|
||||||
|
elif wh == 2:
|
||||||
|
ofs = self.len + ofs # provided ofs is negative
|
||||||
|
|
||||||
|
if ofs < 0:
|
||||||
|
ofs = 0
|
||||||
|
elif ofs >= self.len:
|
||||||
|
ofs = self.len - 1
|
||||||
|
|
||||||
|
self.ofs = ofs
|
||||||
|
self.f.seek(self.car + ofs)
|
||||||
|
|
||||||
|
def read(self, sz):
|
||||||
|
sz = min(sz, self.len - self.ofs)
|
||||||
|
ret = self.f.read(sz)
|
||||||
|
self.ofs += len(ret)
|
||||||
|
return ret
|
||||||
|
|
||||||
|
|
||||||
|
def eprint(*a, **ka):
|
||||||
|
ka["file"] = sys.stderr
|
||||||
|
ka["end"] = ""
|
||||||
|
if not PY2:
|
||||||
|
ka["flush"] = True
|
||||||
|
|
||||||
|
print(*a, **ka)
|
||||||
|
if PY2:
|
||||||
|
sys.stderr.flush()
|
||||||
|
|
||||||
|
|
||||||
|
def termsize():
|
||||||
|
import os
|
||||||
|
|
||||||
|
env = os.environ
|
||||||
|
|
||||||
|
def ioctl_GWINSZ(fd):
|
||||||
|
try:
|
||||||
|
import fcntl, termios, struct, os
|
||||||
|
|
||||||
|
cr = struct.unpack("hh", fcntl.ioctl(fd, termios.TIOCGWINSZ, "1234"))
|
||||||
|
except:
|
||||||
|
return
|
||||||
|
return cr
|
||||||
|
|
||||||
|
cr = ioctl_GWINSZ(0) or ioctl_GWINSZ(1) or ioctl_GWINSZ(2)
|
||||||
|
if not cr:
|
||||||
|
try:
|
||||||
|
fd = os.open(os.ctermid(), os.O_RDONLY)
|
||||||
|
cr = ioctl_GWINSZ(fd)
|
||||||
|
os.close(fd)
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
if not cr:
|
||||||
|
try:
|
||||||
|
cr = (env["LINES"], env["COLUMNS"])
|
||||||
|
except:
|
||||||
|
cr = (25, 80)
|
||||||
|
return int(cr[1]), int(cr[0])
|
||||||
|
|
||||||
|
|
||||||
|
class CTermsize(object):
|
||||||
|
def __init__(self):
|
||||||
|
self.ev = False
|
||||||
|
self.margin = None
|
||||||
|
self.g = None
|
||||||
|
self.w, self.h = termsize()
|
||||||
|
|
||||||
|
try:
|
||||||
|
signal.signal(signal.SIGWINCH, self.ev_sig)
|
||||||
|
except:
|
||||||
|
return
|
||||||
|
|
||||||
|
thr = threading.Thread(target=self.worker)
|
||||||
|
thr.daemon = True
|
||||||
|
thr.start()
|
||||||
|
|
||||||
|
def worker(self):
|
||||||
|
while True:
|
||||||
|
time.sleep(0.5)
|
||||||
|
if not self.ev:
|
||||||
|
continue
|
||||||
|
|
||||||
|
self.ev = False
|
||||||
|
self.w, self.h = termsize()
|
||||||
|
|
||||||
|
if self.margin is not None:
|
||||||
|
self.scroll_region(self.margin)
|
||||||
|
|
||||||
|
def ev_sig(self, *a, **ka):
|
||||||
|
self.ev = True
|
||||||
|
|
||||||
|
def scroll_region(self, margin):
|
||||||
|
self.margin = margin
|
||||||
|
if margin is None:
|
||||||
|
self.g = None
|
||||||
|
eprint("\033[s\033[r\033[u")
|
||||||
|
else:
|
||||||
|
self.g = 1 + self.h - margin
|
||||||
|
m = "{0}\033[{1}A".format("\n" * margin, margin)
|
||||||
|
eprint("{0}\033[s\033[1;{1}r\033[u".format(m, self.g - 1))
|
||||||
|
|
||||||
|
|
||||||
|
ss = CTermsize()
|
||||||
|
|
||||||
|
|
||||||
|
def statdir(top):
|
||||||
|
"""non-recursive listing of directory contents, along with stat() info"""
|
||||||
|
if hasattr(os, "scandir"):
|
||||||
|
with os.scandir(top) as dh:
|
||||||
|
for fh in dh:
|
||||||
|
yield [os.path.join(top, fh.name), fh.stat()]
|
||||||
|
else:
|
||||||
|
for name in os.listdir(top):
|
||||||
|
abspath = os.path.join(top, name)
|
||||||
|
yield [abspath, os.stat(abspath)]
|
||||||
|
|
||||||
|
|
||||||
|
def walkdir(top):
|
||||||
|
"""recursive statdir"""
|
||||||
|
for ap, inf in sorted(statdir(top)):
|
||||||
|
if stat.S_ISDIR(inf.st_mode):
|
||||||
|
for x in walkdir(ap):
|
||||||
|
yield x
|
||||||
|
else:
|
||||||
|
yield ap, inf
|
||||||
|
|
||||||
|
|
||||||
|
def walkdirs(tops):
|
||||||
|
"""recursive statdir for a list of tops, yields [top, relpath, stat]"""
|
||||||
|
for top in tops:
|
||||||
|
if os.path.isdir(top):
|
||||||
|
for ap, inf in walkdir(top):
|
||||||
|
yield top, ap[len(top) + 1 :], inf
|
||||||
|
else:
|
||||||
|
sep = "{0}".format(os.sep).encode("ascii")
|
||||||
|
d, n = top.rsplit(sep, 1)
|
||||||
|
yield d, n, os.stat(top)
|
||||||
|
|
||||||
|
|
||||||
|
# from copyparty/util.py
|
||||||
|
def humansize(sz, terse=False):
|
||||||
|
"""picks a sensible unit for the given extent"""
|
||||||
|
for unit in ["B", "KiB", "MiB", "GiB", "TiB"]:
|
||||||
|
if sz < 1024:
|
||||||
|
break
|
||||||
|
|
||||||
|
sz /= 1024.0
|
||||||
|
|
||||||
|
ret = " ".join([str(sz)[:4].rstrip("."), unit])
|
||||||
|
|
||||||
|
if not terse:
|
||||||
|
return ret
|
||||||
|
|
||||||
|
return ret.replace("iB", "").replace(" ", "")
|
||||||
|
|
||||||
|
|
||||||
|
# from copyparty/up2k.py
|
||||||
|
def up2k_chunksize(filesize):
|
||||||
|
"""gives The correct chunksize for up2k hashing"""
|
||||||
|
chunksize = 1024 * 1024
|
||||||
|
stepsize = 512 * 1024
|
||||||
|
while True:
|
||||||
|
for mul in [1, 2]:
|
||||||
|
nchunks = math.ceil(filesize * 1.0 / chunksize)
|
||||||
|
if nchunks <= 256 or chunksize >= 32 * 1024 * 1024:
|
||||||
|
return chunksize
|
||||||
|
|
||||||
|
chunksize += stepsize
|
||||||
|
stepsize *= mul
|
||||||
|
|
||||||
|
|
||||||
|
# mostly from copyparty/up2k.py
|
||||||
|
def get_hashlist(file, pcb):
|
||||||
|
# type: (File, any) -> None
|
||||||
|
"""generates the up2k hashlist from file contents, inserts it into `file`"""
|
||||||
|
|
||||||
|
chunk_sz = up2k_chunksize(file.size)
|
||||||
|
file_rem = file.size
|
||||||
|
file_ofs = 0
|
||||||
|
ret = []
|
||||||
|
with open(file.abs, "rb", 512 * 1024) as f:
|
||||||
|
while file_rem > 0:
|
||||||
|
hashobj = hashlib.sha512()
|
||||||
|
chunk_sz = chunk_rem = min(chunk_sz, file_rem)
|
||||||
|
while chunk_rem > 0:
|
||||||
|
buf = f.read(min(chunk_rem, 64 * 1024))
|
||||||
|
if not buf:
|
||||||
|
raise Exception("EOF at " + str(f.tell()))
|
||||||
|
|
||||||
|
hashobj.update(buf)
|
||||||
|
chunk_rem -= len(buf)
|
||||||
|
|
||||||
|
digest = hashobj.digest()[:33]
|
||||||
|
digest = base64.urlsafe_b64encode(digest).decode("utf-8")
|
||||||
|
|
||||||
|
ret.append([digest, file_ofs, chunk_sz])
|
||||||
|
file_ofs += chunk_sz
|
||||||
|
file_rem -= chunk_sz
|
||||||
|
|
||||||
|
if pcb:
|
||||||
|
pcb(file, file_ofs)
|
||||||
|
|
||||||
|
file.cids = ret
|
||||||
|
file.kchunks = {}
|
||||||
|
for k, v1, v2 in ret:
|
||||||
|
file.kchunks[k] = [v1, v2]
|
||||||
|
|
||||||
|
|
||||||
|
def handshake(req_ses, url, file, pw, search):
|
||||||
|
# type: (requests.Session, str, File, any, bool) -> List[str]
|
||||||
|
"""
|
||||||
|
performs a handshake with the server; reply is:
|
||||||
|
if search, a list of search results
|
||||||
|
otherwise, a list of chunks to upload
|
||||||
|
"""
|
||||||
|
|
||||||
|
req = {
|
||||||
|
"hash": [x[0] for x in file.cids],
|
||||||
|
"name": file.name,
|
||||||
|
"lmod": file.lmod,
|
||||||
|
"size": file.size,
|
||||||
|
}
|
||||||
|
if search:
|
||||||
|
req["srch"] = 1
|
||||||
|
|
||||||
|
headers = {"Content-Type": "text/plain"} # wtf ed
|
||||||
|
if pw:
|
||||||
|
headers["Cookie"] = "=".join(["cppwd", pw])
|
||||||
|
|
||||||
|
if file.url:
|
||||||
|
url = file.url
|
||||||
|
elif b"/" in file.rel:
|
||||||
|
url += file.rel.rsplit(b"/", 1)[0].decode("utf-8", "replace")
|
||||||
|
|
||||||
|
while True:
|
||||||
|
try:
|
||||||
|
r = req_ses.post(url, headers=headers, json=req)
|
||||||
|
break
|
||||||
|
except:
|
||||||
|
eprint("handshake failed, retry...\n")
|
||||||
|
time.sleep(1)
|
||||||
|
|
||||||
|
try:
|
||||||
|
r = r.json()
|
||||||
|
except:
|
||||||
|
raise Exception(r.text)
|
||||||
|
|
||||||
|
if search:
|
||||||
|
return r["hits"]
|
||||||
|
|
||||||
|
try:
|
||||||
|
pre, url = url.split("://")
|
||||||
|
pre += "://"
|
||||||
|
except:
|
||||||
|
pre = ""
|
||||||
|
|
||||||
|
file.url = pre + url.split("/")[0] + r["purl"]
|
||||||
|
file.name = r["name"]
|
||||||
|
file.wark = r["wark"]
|
||||||
|
|
||||||
|
return r["hash"]
|
||||||
|
|
||||||
|
|
||||||
|
def upload(req_ses, file, cid, pw):
|
||||||
|
# type: (requests.Session, File, str, any) -> None
|
||||||
|
"""upload one specific chunk, `cid` (a chunk-hash)"""
|
||||||
|
|
||||||
|
headers = {
|
||||||
|
"X-Up2k-Hash": cid,
|
||||||
|
"X-Up2k-Wark": file.wark,
|
||||||
|
"Content-Type": "application/octet-stream",
|
||||||
|
}
|
||||||
|
if pw:
|
||||||
|
headers["Cookie"] = "=".join(["cppwd", pw])
|
||||||
|
|
||||||
|
f = FileSlice(file, cid)
|
||||||
|
try:
|
||||||
|
r = req_ses.post(file.url, headers=headers, data=f)
|
||||||
|
if not r:
|
||||||
|
raise Exception(repr(r))
|
||||||
|
|
||||||
|
_ = r.content
|
||||||
|
finally:
|
||||||
|
f.f.close()
|
||||||
|
|
||||||
|
|
||||||
|
class Daemon(threading.Thread):
|
||||||
|
def __init__(self, *a, **ka):
|
||||||
|
threading.Thread.__init__(self, *a, **ka)
|
||||||
|
self.daemon = True
|
||||||
|
|
||||||
|
|
||||||
|
class Ctl(object):
|
||||||
|
"""
|
||||||
|
this will be the coordinator which runs everything in parallel
|
||||||
|
(hashing, handshakes, uploads) but right now it's p dumb
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, ar):
|
||||||
|
self.ar = ar
|
||||||
|
ar.files = [
|
||||||
|
os.path.abspath(os.path.realpath(x.encode("utf-8"))) for x in ar.files
|
||||||
|
]
|
||||||
|
ar.url = ar.url.rstrip("/") + "/"
|
||||||
|
if "://" not in ar.url:
|
||||||
|
ar.url = "http://" + ar.url
|
||||||
|
|
||||||
|
eprint("\nscanning {0} locations\n".format(len(ar.files)))
|
||||||
|
|
||||||
|
nfiles = 0
|
||||||
|
nbytes = 0
|
||||||
|
for _, _, inf in walkdirs(ar.files):
|
||||||
|
nfiles += 1
|
||||||
|
nbytes += inf.st_size
|
||||||
|
|
||||||
|
eprint("found {0} files, {1}\n\n".format(nfiles, humansize(nbytes)))
|
||||||
|
self.nfiles = nfiles
|
||||||
|
self.nbytes = nbytes
|
||||||
|
|
||||||
|
if ar.td:
|
||||||
|
req_ses.verify = False
|
||||||
|
if ar.te:
|
||||||
|
req_ses.verify = ar.te
|
||||||
|
|
||||||
|
self.filegen = walkdirs(ar.files)
|
||||||
|
if ar.safe:
|
||||||
|
self.safe()
|
||||||
|
else:
|
||||||
|
self.fancy()
|
||||||
|
|
||||||
|
def safe(self):
|
||||||
|
"""minimal basic slow boring fallback codepath"""
|
||||||
|
search = self.ar.s
|
||||||
|
for nf, (top, rel, inf) in enumerate(self.filegen):
|
||||||
|
file = File(top, rel, inf.st_size, inf.st_mtime)
|
||||||
|
upath = file.abs.decode("utf-8", "replace")
|
||||||
|
|
||||||
|
print("{0} {1}\n hash...".format(self.nfiles - nf, upath))
|
||||||
|
get_hashlist(file, None)
|
||||||
|
|
||||||
|
while True:
|
||||||
|
print(" hs...")
|
||||||
|
hs = handshake(req_ses, self.ar.url, file, self.ar.a, search)
|
||||||
|
if search:
|
||||||
|
if hs:
|
||||||
|
for hit in hs:
|
||||||
|
print(" found: {0}{1}".format(self.ar.url, hit["rp"]))
|
||||||
|
else:
|
||||||
|
print(" NOT found")
|
||||||
|
break
|
||||||
|
|
||||||
|
file.ucids = hs
|
||||||
|
if not hs:
|
||||||
|
break
|
||||||
|
|
||||||
|
print("{0} {1}".format(self.nfiles - nf, upath))
|
||||||
|
ncs = len(hs)
|
||||||
|
for nc, cid in enumerate(hs):
|
||||||
|
print(" {0} up {1}".format(ncs - nc, cid))
|
||||||
|
upload(req_ses, file, cid, self.ar.a)
|
||||||
|
|
||||||
|
print(" ok!")
|
||||||
|
|
||||||
|
def fancy(self):
|
||||||
|
self.hash_f = 0
|
||||||
|
self.hash_c = 0
|
||||||
|
self.hash_b = 0
|
||||||
|
self.up_f = 0
|
||||||
|
self.up_c = 0
|
||||||
|
self.up_b = 0
|
||||||
|
self.up_br = 0
|
||||||
|
self.hasher_busy = 1
|
||||||
|
self.handshaker_busy = 0
|
||||||
|
self.uploader_busy = 0
|
||||||
|
|
||||||
|
self.t0 = time.time()
|
||||||
|
self.t0_up = None
|
||||||
|
self.spd = None
|
||||||
|
|
||||||
|
self.mutex = threading.Lock()
|
||||||
|
self.q_handshake = Queue() # type: Queue[File]
|
||||||
|
self.q_recheck = Queue() # type: Queue[File] # partial upload exists [...]
|
||||||
|
self.q_upload = Queue() # type: Queue[tuple[File, str]]
|
||||||
|
|
||||||
|
self.st_hash = [None, "(idle, starting...)"] # type: tuple[File, int]
|
||||||
|
self.st_up = [None, "(idle, starting...)"] # type: tuple[File, int]
|
||||||
|
if VT100:
|
||||||
|
atexit.register(self.cleanup_vt100)
|
||||||
|
ss.scroll_region(3)
|
||||||
|
|
||||||
|
Daemon(target=self.hasher).start()
|
||||||
|
for _ in range(self.ar.j):
|
||||||
|
Daemon(target=self.handshaker).start()
|
||||||
|
Daemon(target=self.uploader).start()
|
||||||
|
|
||||||
|
idles = 0
|
||||||
|
while idles < 3:
|
||||||
|
time.sleep(0.07)
|
||||||
|
with self.mutex:
|
||||||
|
if (
|
||||||
|
self.q_handshake.empty()
|
||||||
|
and self.q_upload.empty()
|
||||||
|
and not self.hasher_busy
|
||||||
|
and not self.handshaker_busy
|
||||||
|
and not self.uploader_busy
|
||||||
|
):
|
||||||
|
idles += 1
|
||||||
|
else:
|
||||||
|
idles = 0
|
||||||
|
|
||||||
|
if VT100:
|
||||||
|
maxlen = ss.w - len(str(self.nfiles)) - 14
|
||||||
|
txt = "\033[s\033[{0}H".format(ss.g)
|
||||||
|
for y, k, st, f in [
|
||||||
|
[0, "hash", self.st_hash, self.hash_f],
|
||||||
|
[1, "send", self.st_up, self.up_f],
|
||||||
|
]:
|
||||||
|
txt += "\033[{0}H{1}:".format(ss.g + y, k)
|
||||||
|
file, arg = st
|
||||||
|
if not file:
|
||||||
|
txt += " {0}\033[K".format(arg)
|
||||||
|
else:
|
||||||
|
if y:
|
||||||
|
p = 100 * file.up_b / file.size
|
||||||
|
else:
|
||||||
|
p = 100 * arg / file.size
|
||||||
|
|
||||||
|
name = file.abs.decode("utf-8", "replace")[-maxlen:]
|
||||||
|
if "/" in name:
|
||||||
|
name = "\033[36m{0}\033[0m/{1}".format(*name.rsplit("/", 1))
|
||||||
|
|
||||||
|
m = "{0:6.1f}% {1} {2}\033[K"
|
||||||
|
txt += m.format(p, self.nfiles - f, name)
|
||||||
|
|
||||||
|
txt += "\033[{0}H ".format(ss.g + 2)
|
||||||
|
else:
|
||||||
|
txt = " "
|
||||||
|
|
||||||
|
if not self.up_br:
|
||||||
|
spd = self.hash_b / (time.time() - self.t0)
|
||||||
|
eta = (self.nbytes - self.hash_b) / (spd + 1)
|
||||||
|
else:
|
||||||
|
spd = self.up_br / (time.time() - self.t0_up)
|
||||||
|
spd = self.spd = (self.spd or spd) * 0.9 + spd * 0.1
|
||||||
|
eta = (self.nbytes - self.up_b) / (spd + 1)
|
||||||
|
|
||||||
|
spd = humansize(spd)
|
||||||
|
eta = str(datetime.timedelta(seconds=int(eta)))
|
||||||
|
left = humansize(self.nbytes - self.up_b)
|
||||||
|
tail = "\033[K\033[u" if VT100 else "\r"
|
||||||
|
|
||||||
|
m = "eta: {0} @ {1}/s, {2} left".format(eta, spd, left)
|
||||||
|
eprint(txt + "\033]0;{0}\033\\\r{1}{2}".format(m, m, tail))
|
||||||
|
|
||||||
|
def cleanup_vt100(self):
|
||||||
|
ss.scroll_region(None)
|
||||||
|
eprint("\033[J\033]0;\033\\")
|
||||||
|
|
||||||
|
def cb_hasher(self, file, ofs):
|
||||||
|
self.st_hash = [file, ofs]
|
||||||
|
|
||||||
|
def hasher(self):
|
||||||
|
for top, rel, inf in self.filegen:
|
||||||
|
file = File(top, rel, inf.st_size, inf.st_mtime)
|
||||||
|
while True:
|
||||||
|
with self.mutex:
|
||||||
|
if (
|
||||||
|
self.hash_b - self.up_b < 1024 * 1024 * 128
|
||||||
|
and self.hash_c - self.up_c < 64
|
||||||
|
and (
|
||||||
|
not self.ar.nh
|
||||||
|
or (
|
||||||
|
self.q_upload.empty()
|
||||||
|
and self.q_handshake.empty()
|
||||||
|
and not self.uploader_busy
|
||||||
|
)
|
||||||
|
)
|
||||||
|
):
|
||||||
|
break
|
||||||
|
|
||||||
|
time.sleep(0.05)
|
||||||
|
|
||||||
|
get_hashlist(file, self.cb_hasher)
|
||||||
|
with self.mutex:
|
||||||
|
self.hash_f += 1
|
||||||
|
self.hash_c += len(file.cids)
|
||||||
|
self.hash_b += file.size
|
||||||
|
|
||||||
|
self.q_handshake.put(file)
|
||||||
|
|
||||||
|
self.hasher_busy = 0
|
||||||
|
self.st_hash = [None, "(finished)"]
|
||||||
|
|
||||||
|
def handshaker(self):
|
||||||
|
search = self.ar.s
|
||||||
|
q = self.q_handshake
|
||||||
|
while True:
|
||||||
|
file = q.get()
|
||||||
|
if not file:
|
||||||
|
if q == self.q_handshake:
|
||||||
|
q = self.q_recheck
|
||||||
|
q.put(None)
|
||||||
|
continue
|
||||||
|
|
||||||
|
self.q_upload.put(None)
|
||||||
|
break
|
||||||
|
|
||||||
|
with self.mutex:
|
||||||
|
self.handshaker_busy += 1
|
||||||
|
|
||||||
|
upath = file.abs.decode("utf-8", "replace")
|
||||||
|
|
||||||
|
try:
|
||||||
|
hs = handshake(req_ses, self.ar.url, file, self.ar.a, search)
|
||||||
|
except Exception as ex:
|
||||||
|
if q == self.q_handshake and "<pre>partial upload exists" in str(ex):
|
||||||
|
self.q_recheck.put(file)
|
||||||
|
hs = []
|
||||||
|
else:
|
||||||
|
raise
|
||||||
|
|
||||||
|
if search:
|
||||||
|
if hs:
|
||||||
|
for hit in hs:
|
||||||
|
m = "found: {0}\n {1}{2}\n"
|
||||||
|
print(m.format(upath, self.ar.url, hit["rp"]), end="")
|
||||||
|
else:
|
||||||
|
print("NOT found: {0}\n".format(upath), end="")
|
||||||
|
|
||||||
|
with self.mutex:
|
||||||
|
self.up_f += 1
|
||||||
|
self.up_c += len(file.cids)
|
||||||
|
self.up_b += file.size
|
||||||
|
self.handshaker_busy -= 1
|
||||||
|
|
||||||
|
continue
|
||||||
|
|
||||||
|
with self.mutex:
|
||||||
|
if not hs:
|
||||||
|
# all chunks done
|
||||||
|
self.up_f += 1
|
||||||
|
self.up_c += len(file.cids) - file.up_c
|
||||||
|
self.up_b += file.size - file.up_b
|
||||||
|
|
||||||
|
if hs and file.up_c:
|
||||||
|
# some chunks failed
|
||||||
|
self.up_c -= len(hs)
|
||||||
|
file.up_c -= len(hs)
|
||||||
|
for cid in hs:
|
||||||
|
sz = file.kchunks[cid][1]
|
||||||
|
self.up_b -= sz
|
||||||
|
file.up_b -= sz
|
||||||
|
|
||||||
|
file.ucids = hs
|
||||||
|
self.handshaker_busy -= 1
|
||||||
|
|
||||||
|
if not hs:
|
||||||
|
print("uploaded {0}".format(upath))
|
||||||
|
for cid in hs:
|
||||||
|
self.q_upload.put([file, cid])
|
||||||
|
|
||||||
|
def uploader(self):
|
||||||
|
while True:
|
||||||
|
task = self.q_upload.get()
|
||||||
|
if not task:
|
||||||
|
self.st_up = [None, "(finished)"]
|
||||||
|
break
|
||||||
|
|
||||||
|
with self.mutex:
|
||||||
|
self.uploader_busy += 1
|
||||||
|
self.t0_up = self.t0_up or time.time()
|
||||||
|
|
||||||
|
file, cid = task
|
||||||
|
try:
|
||||||
|
upload(req_ses, file, cid, self.ar.a)
|
||||||
|
except:
|
||||||
|
eprint("upload failed, retry...\n")
|
||||||
|
pass # handshake will fix it
|
||||||
|
|
||||||
|
with self.mutex:
|
||||||
|
sz = file.kchunks[cid][1]
|
||||||
|
file.ucids = [x for x in file.ucids if x != cid]
|
||||||
|
if not file.ucids:
|
||||||
|
self.q_handshake.put(file)
|
||||||
|
|
||||||
|
self.st_up = [file, cid]
|
||||||
|
file.up_b += sz
|
||||||
|
self.up_b += sz
|
||||||
|
self.up_br += sz
|
||||||
|
file.up_c += 1
|
||||||
|
self.up_c += 1
|
||||||
|
self.uploader_busy -= 1
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
time.strptime("19970815", "%Y%m%d") # python#7980
|
||||||
|
if not VT100:
|
||||||
|
os.system("rem") # enables colors
|
||||||
|
|
||||||
|
# fmt: off
|
||||||
|
ap = app = argparse.ArgumentParser(formatter_class=argparse.ArgumentDefaultsHelpFormatter)
|
||||||
|
ap.add_argument("url", type=unicode, help="server url, including destination folder")
|
||||||
|
ap.add_argument("files", type=unicode, nargs="+", help="files and/or folders to process")
|
||||||
|
ap.add_argument("-a", metavar="PASSWORD", help="password")
|
||||||
|
ap.add_argument("-s", action="store_true", help="file-search (disables upload)")
|
||||||
|
ap = app.add_argument_group("performance tweaks")
|
||||||
|
ap.add_argument("-j", type=int, metavar="THREADS", default=4, help="parallel connections")
|
||||||
|
ap.add_argument("-nh", action="store_true", help="disable hashing while uploading")
|
||||||
|
ap.add_argument("--safe", action="store_true", help="use simple fallback approach")
|
||||||
|
ap = app.add_argument_group("tls")
|
||||||
|
ap.add_argument("-te", metavar="PEM_FILE", help="certificate to expect/verify")
|
||||||
|
ap.add_argument("-td", action="store_true", help="disable certificate check")
|
||||||
|
# fmt: on
|
||||||
|
|
||||||
|
Ctl(app.parse_args())
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
24
bin/up2k.sh
Executable file → Normal file
24
bin/up2k.sh
Executable file → Normal file
@@ -8,7 +8,7 @@ set -e
|
|||||||
##
|
##
|
||||||
## config
|
## config
|
||||||
|
|
||||||
datalen=$((2*1024*1024*1024))
|
datalen=$((128*1024*1024))
|
||||||
target=127.0.0.1
|
target=127.0.0.1
|
||||||
posturl=/inc
|
posturl=/inc
|
||||||
passwd=wark
|
passwd=wark
|
||||||
@@ -37,10 +37,10 @@ gendata() {
|
|||||||
# pipe a chunk, get the base64 checksum
|
# pipe a chunk, get the base64 checksum
|
||||||
gethash() {
|
gethash() {
|
||||||
printf $(
|
printf $(
|
||||||
sha512sum | cut -c-64 |
|
sha512sum | cut -c-66 |
|
||||||
sed -r 's/ .*//;s/(..)/\\x\1/g'
|
sed -r 's/ .*//;s/(..)/\\x\1/g'
|
||||||
) |
|
) |
|
||||||
base64 -w0 | cut -c-43 |
|
base64 -w0 | cut -c-44 |
|
||||||
tr '+/' '-_'
|
tr '+/' '-_'
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -123,7 +123,7 @@ printf '\033[36m'
|
|||||||
{
|
{
|
||||||
{
|
{
|
||||||
cat <<EOF
|
cat <<EOF
|
||||||
POST $posturl/handshake.php HTTP/1.1
|
POST $posturl/ HTTP/1.1
|
||||||
Connection: Close
|
Connection: Close
|
||||||
Cookie: cppwd=$passwd
|
Cookie: cppwd=$passwd
|
||||||
Content-Type: text/plain;charset=UTF-8
|
Content-Type: text/plain;charset=UTF-8
|
||||||
@@ -145,14 +145,16 @@ printf '\033[0m\nwark: %s\n' $wark
|
|||||||
##
|
##
|
||||||
## wait for signal to continue
|
## wait for signal to continue
|
||||||
|
|
||||||
w8=/dev/shm/$salt.w8
|
true || {
|
||||||
touch $w8
|
w8=/dev/shm/$salt.w8
|
||||||
|
touch $w8
|
||||||
|
|
||||||
echo "ready; rm -f $w8"
|
echo "ready; rm -f $w8"
|
||||||
|
|
||||||
while [ -e $w8 ]; do
|
while [ -e $w8 ]; do
|
||||||
sleep 0.2
|
sleep 0.2
|
||||||
done
|
done
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
##
|
##
|
||||||
@@ -175,7 +177,7 @@ while [ $remains -gt 0 ]; do
|
|||||||
|
|
||||||
{
|
{
|
||||||
cat <<EOF
|
cat <<EOF
|
||||||
POST $posturl/chunkpit.php HTTP/1.1
|
POST $posturl/ HTTP/1.1
|
||||||
Connection: Keep-Alive
|
Connection: Keep-Alive
|
||||||
Cookie: cppwd=$passwd
|
Cookie: cppwd=$passwd
|
||||||
Content-Type: application/octet-stream
|
Content-Type: application/octet-stream
|
||||||
|
|||||||
@@ -1,13 +1,14 @@
|
|||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
set -e
|
set -e
|
||||||
|
|
||||||
# ca-name and server-name
|
# ca-name and server-fqdn
|
||||||
ca_name="$1"
|
ca_name="$1"
|
||||||
srv_name="$2"
|
srv_fqdn="$2"
|
||||||
|
|
||||||
[ -z "$srv_name" ] && {
|
[ -z "$srv_fqdn" ] && {
|
||||||
echo "need arg 1: ca name"
|
echo "need arg 1: ca name"
|
||||||
echo "need arg 2: server name"
|
echo "need arg 2: server fqdn"
|
||||||
|
echo "optional arg 3: if set, write cert into copyparty cfg"
|
||||||
exit 1
|
exit 1
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -31,15 +32,15 @@ EOF
|
|||||||
gen_srv() {
|
gen_srv() {
|
||||||
(tee /dev/stderr <<EOF
|
(tee /dev/stderr <<EOF
|
||||||
{"key": {"algo":"rsa", "size":4096},
|
{"key": {"algo":"rsa", "size":4096},
|
||||||
"names": [{"O":"$ca_name - $srv_name"}]}
|
"names": [{"O":"$ca_name - $srv_fqdn"}]}
|
||||||
EOF
|
EOF
|
||||||
)|
|
)|
|
||||||
cfssl gencert -ca ca.pem -ca-key ca.key \
|
cfssl gencert -ca ca.pem -ca-key ca.key \
|
||||||
-profile=www -hostname="$srv_name.$ca_name" - |
|
-profile=www -hostname="$srv_fqdn" - |
|
||||||
cfssljson -bare "$srv_name"
|
cfssljson -bare "$srv_fqdn"
|
||||||
|
|
||||||
mv "$srv_name-key.pem" "$srv_name.key"
|
mv "$srv_fqdn-key.pem" "$srv_fqdn.key"
|
||||||
rm "$srv_name.csr"
|
rm "$srv_fqdn.csr"
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@@ -57,13 +58,13 @@ show() {
|
|||||||
awk '!o; {o=0} /[0-9a-f:]{16}/{o=1}'
|
awk '!o; {o=0} /[0-9a-f:]{16}/{o=1}'
|
||||||
}
|
}
|
||||||
show ca.pem
|
show ca.pem
|
||||||
show "$srv_name.pem"
|
show "$srv_fqdn.pem"
|
||||||
|
|
||||||
|
|
||||||
# write cert into copyparty config
|
# write cert into copyparty config
|
||||||
[ -z "$3" ] || {
|
[ -z "$3" ] || {
|
||||||
mkdir -p ~/.config/copyparty
|
mkdir -p ~/.config/copyparty
|
||||||
cat "$srv_name".{key,pem} ca.pem >~/.config/copyparty/cert.pem
|
cat "$srv_fqdn".{key,pem} ca.pem >~/.config/copyparty/cert.pem
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python3
|
||||||
# coding: utf-8
|
# coding: utf-8
|
||||||
from __future__ import print_function, unicode_literals
|
from __future__ import print_function, unicode_literals
|
||||||
|
|
||||||
@@ -104,7 +104,7 @@ def ensure_cert():
|
|||||||
cert_insec = os.path.join(E.mod, "res/insecure.pem")
|
cert_insec = os.path.join(E.mod, "res/insecure.pem")
|
||||||
cert_cfg = os.path.join(E.cfg, "cert.pem")
|
cert_cfg = os.path.join(E.cfg, "cert.pem")
|
||||||
if not os.path.exists(cert_cfg):
|
if not os.path.exists(cert_cfg):
|
||||||
shutil.copy2(cert_insec, cert_cfg)
|
shutil.copy(cert_insec, cert_cfg)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
if filecmp.cmp(cert_cfg, cert_insec):
|
if filecmp.cmp(cert_cfg, cert_insec):
|
||||||
@@ -203,6 +203,11 @@ def run_argparse(argv, formatter):
|
|||||||
description="http file sharing hub v{} ({})".format(S_VERSION, S_BUILD_DT),
|
description="http file sharing hub v{} ({})".format(S_VERSION, S_BUILD_DT),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
try:
|
||||||
|
fk_salt = unicode(os.path.getmtime(os.path.join(E.cfg, "cert.pem")))
|
||||||
|
except:
|
||||||
|
fk_salt = "hunter2"
|
||||||
|
|
||||||
sects = [
|
sects = [
|
||||||
[
|
[
|
||||||
"accounts",
|
"accounts",
|
||||||
@@ -211,14 +216,15 @@ def run_argparse(argv, formatter):
|
|||||||
"""
|
"""
|
||||||
-a takes username:password,
|
-a takes username:password,
|
||||||
-v takes src:dst:perm1:perm2:permN:volflag1:volflag2:volflagN:...
|
-v takes src:dst:perm1:perm2:permN:volflag1:volflag2:volflagN:...
|
||||||
where "perm" is "accesslevels,username1,username2,..."
|
where "perm" is "permissions,username1,username2,..."
|
||||||
and "volflag" is config flags to set on this volume
|
and "volflag" is config flags to set on this volume
|
||||||
|
|
||||||
list of accesslevels:
|
list of permissions:
|
||||||
"r" (read): list folder contents, download files
|
"r" (read): list folder contents, download files
|
||||||
"w" (write): upload files; need "r" to see the uploads
|
"w" (write): upload files; need "r" to see the uploads
|
||||||
"m" (move): move files and folders; need "w" at destination
|
"m" (move): move files and folders; need "w" at destination
|
||||||
"d" (delete): permanently delete files and folders
|
"d" (delete): permanently delete files and folders
|
||||||
|
"g" (get): download files, but cannot see folder contents
|
||||||
|
|
||||||
too many volflags to list here, see the other sections
|
too many volflags to list here, see the other sections
|
||||||
|
|
||||||
@@ -279,6 +285,10 @@ def run_argparse(argv, formatter):
|
|||||||
\033[36mmtp=.bpm=f,audio-bpm.py\033[35m uses the "audio-bpm.py" program to
|
\033[36mmtp=.bpm=f,audio-bpm.py\033[35m uses the "audio-bpm.py" program to
|
||||||
generate ".bpm" tags from uploads (f = overwrite tags)
|
generate ".bpm" tags from uploads (f = overwrite tags)
|
||||||
\033[36mmtp=ahash,vhash=media-hash.py\033[35m collects two tags at once
|
\033[36mmtp=ahash,vhash=media-hash.py\033[35m collects two tags at once
|
||||||
|
|
||||||
|
\033[0mothers:
|
||||||
|
\033[36mfk=8\033[35m generates per-file accesskeys,
|
||||||
|
which will then be required at the "g" permission
|
||||||
\033[0m"""
|
\033[0m"""
|
||||||
),
|
),
|
||||||
],
|
],
|
||||||
@@ -334,6 +344,9 @@ def run_argparse(argv, formatter):
|
|||||||
ap2.add_argument("--dotpart", action="store_true", help="dotfile incomplete uploads")
|
ap2.add_argument("--dotpart", action="store_true", help="dotfile incomplete uploads")
|
||||||
ap2.add_argument("--sparse", metavar="MiB", type=int, default=4, help="up2k min.size threshold (mswin-only)")
|
ap2.add_argument("--sparse", metavar="MiB", type=int, default=4, help="up2k min.size threshold (mswin-only)")
|
||||||
ap2.add_argument("--unpost", metavar="SEC", type=int, default=3600*12, help="grace period where uploads can be deleted by the uploader, even without delete permissions; 0=disabled")
|
ap2.add_argument("--unpost", metavar="SEC", type=int, default=3600*12, help="grace period where uploads can be deleted by the uploader, even without delete permissions; 0=disabled")
|
||||||
|
ap2.add_argument("--no-fpool", action="store_true", help="disable file-handle pooling -- instead, repeatedly close and reopen files during upload")
|
||||||
|
ap2.add_argument("--use-fpool", action="store_true", help="force file-handle pooling, even if copyparty thinks you're better off without")
|
||||||
|
ap2.add_argument("--no-symlink", action="store_true", help="duplicate file contents instead")
|
||||||
|
|
||||||
ap2 = ap.add_argument_group('network options')
|
ap2 = ap.add_argument_group('network options')
|
||||||
ap2.add_argument("-i", metavar="IP", type=u, default="0.0.0.0", help="ip to bind (comma-sep.)")
|
ap2.add_argument("-i", metavar="IP", type=u, default="0.0.0.0", help="ip to bind (comma-sep.)")
|
||||||
@@ -360,10 +373,12 @@ def run_argparse(argv, formatter):
|
|||||||
ap2 = ap.add_argument_group('safety options')
|
ap2 = ap.add_argument_group('safety options')
|
||||||
ap2.add_argument("--ls", metavar="U[,V[,F]]", type=u, help="scan all volumes; arguments USER,VOL,FLAGS; example [**,*,ln,p,r]")
|
ap2.add_argument("--ls", metavar="U[,V[,F]]", type=u, help="scan all volumes; arguments USER,VOL,FLAGS; example [**,*,ln,p,r]")
|
||||||
ap2.add_argument("--salt", type=u, default="hunter2", help="up2k file-hash salt")
|
ap2.add_argument("--salt", type=u, default="hunter2", help="up2k file-hash salt")
|
||||||
|
ap2.add_argument("--fk-salt", metavar="SALT", type=u, default=fk_salt, help="per-file accesskey salt")
|
||||||
ap2.add_argument("--no-dot-mv", action="store_true", help="disallow moving dotfiles; makes it impossible to move folders containing dotfiles")
|
ap2.add_argument("--no-dot-mv", action="store_true", help="disallow moving dotfiles; makes it impossible to move folders containing dotfiles")
|
||||||
ap2.add_argument("--no-dot-ren", action="store_true", help="disallow renaming dotfiles; makes it impossible to make something a dotfile")
|
ap2.add_argument("--no-dot-ren", action="store_true", help="disallow renaming dotfiles; makes it impossible to make something a dotfile")
|
||||||
ap2.add_argument("--no-logues", action="store_true", help="disable rendering .prologue/.epilogue.html into directory listings")
|
ap2.add_argument("--no-logues", action="store_true", help="disable rendering .prologue/.epilogue.html into directory listings")
|
||||||
ap2.add_argument("--no-readme", action="store_true", help="disable rendering readme.md into directory listings")
|
ap2.add_argument("--no-readme", action="store_true", help="disable rendering readme.md into directory listings")
|
||||||
|
ap2.add_argument("--vague-403", action="store_true", help="send 404 instead of 403 (security through ambiguity, very enterprise)")
|
||||||
|
|
||||||
ap2 = ap.add_argument_group('logging options')
|
ap2 = ap.add_argument_group('logging options')
|
||||||
ap2.add_argument("-q", action="store_true", help="quiet")
|
ap2.add_argument("-q", action="store_true", help="quiet")
|
||||||
@@ -493,7 +508,7 @@ def main(argv=None):
|
|||||||
if re.match("c[^,]", opt):
|
if re.match("c[^,]", opt):
|
||||||
mod = True
|
mod = True
|
||||||
na.append("c," + opt[1:])
|
na.append("c," + opt[1:])
|
||||||
elif re.sub("^[rwmd]*", "", opt) and "," not in opt:
|
elif re.sub("^[rwmdg]*", "", opt) and "," not in opt:
|
||||||
mod = True
|
mod = True
|
||||||
perm = opt[0]
|
perm = opt[0]
|
||||||
if perm == "a":
|
if perm == "a":
|
||||||
|
|||||||
@@ -1,8 +1,8 @@
|
|||||||
# coding: utf-8
|
# coding: utf-8
|
||||||
|
|
||||||
VERSION = (1, 0, 0)
|
VERSION = (1, 0, 9)
|
||||||
CODENAME = "sufficient"
|
CODENAME = "sufficient"
|
||||||
BUILD_DT = (2021, 9, 7)
|
BUILD_DT = (2021, 10, 9)
|
||||||
|
|
||||||
S_VERSION = ".".join(map(str, VERSION))
|
S_VERSION = ".".join(map(str, VERSION))
|
||||||
S_BUILD_DT = "{0:04d}-{1:02d}-{2:02d}".format(*BUILD_DT)
|
S_BUILD_DT = "{0:04d}-{1:02d}-{2:02d}".format(*BUILD_DT)
|
||||||
|
|||||||
@@ -29,17 +29,18 @@ LEELOO_DALLAS = "leeloo_dallas"
|
|||||||
|
|
||||||
|
|
||||||
class AXS(object):
|
class AXS(object):
|
||||||
def __init__(self, uread=None, uwrite=None, umove=None, udel=None):
|
def __init__(self, uread=None, uwrite=None, umove=None, udel=None, uget=None):
|
||||||
self.uread = {} if uread is None else {k: 1 for k in uread}
|
self.uread = {} if uread is None else {k: 1 for k in uread}
|
||||||
self.uwrite = {} if uwrite is None else {k: 1 for k in uwrite}
|
self.uwrite = {} if uwrite is None else {k: 1 for k in uwrite}
|
||||||
self.umove = {} if umove is None else {k: 1 for k in umove}
|
self.umove = {} if umove is None else {k: 1 for k in umove}
|
||||||
self.udel = {} if udel is None else {k: 1 for k in udel}
|
self.udel = {} if udel is None else {k: 1 for k in udel}
|
||||||
|
self.uget = {} if uget is None else {k: 1 for k in uget}
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
return "AXS({})".format(
|
return "AXS({})".format(
|
||||||
", ".join(
|
", ".join(
|
||||||
"{}={!r}".format(k, self.__dict__[k])
|
"{}={!r}".format(k, self.__dict__[k])
|
||||||
for k in "uread uwrite umove udel".split()
|
for k in "uread uwrite umove udel uget".split()
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -215,6 +216,7 @@ class VFS(object):
|
|||||||
self.awrite = {}
|
self.awrite = {}
|
||||||
self.amove = {}
|
self.amove = {}
|
||||||
self.adel = {}
|
self.adel = {}
|
||||||
|
self.aget = {}
|
||||||
else:
|
else:
|
||||||
self.histpath = None
|
self.histpath = None
|
||||||
self.all_vols = None
|
self.all_vols = None
|
||||||
@@ -222,6 +224,7 @@ class VFS(object):
|
|||||||
self.awrite = None
|
self.awrite = None
|
||||||
self.amove = None
|
self.amove = None
|
||||||
self.adel = None
|
self.adel = None
|
||||||
|
self.aget = None
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
return "VFS({})".format(
|
return "VFS({})".format(
|
||||||
@@ -308,7 +311,7 @@ class VFS(object):
|
|||||||
|
|
||||||
def can_access(self, vpath, uname):
|
def can_access(self, vpath, uname):
|
||||||
# type: (str, str) -> tuple[bool, bool, bool, bool]
|
# type: (str, str) -> tuple[bool, bool, bool, bool]
|
||||||
"""can Read,Write,Move,Delete"""
|
"""can Read,Write,Move,Delete,Get"""
|
||||||
vn, _ = self._find(vpath)
|
vn, _ = self._find(vpath)
|
||||||
c = vn.axs
|
c = vn.axs
|
||||||
return [
|
return [
|
||||||
@@ -316,10 +319,20 @@ class VFS(object):
|
|||||||
uname in c.uwrite or "*" in c.uwrite,
|
uname in c.uwrite or "*" in c.uwrite,
|
||||||
uname in c.umove or "*" in c.umove,
|
uname in c.umove or "*" in c.umove,
|
||||||
uname in c.udel or "*" in c.udel,
|
uname in c.udel or "*" in c.udel,
|
||||||
|
uname in c.uget or "*" in c.uget,
|
||||||
]
|
]
|
||||||
|
|
||||||
def get(self, vpath, uname, will_read, will_write, will_move=False, will_del=False):
|
def get(
|
||||||
# type: (str, str, bool, bool, bool, bool) -> tuple[VFS, str]
|
self,
|
||||||
|
vpath,
|
||||||
|
uname,
|
||||||
|
will_read,
|
||||||
|
will_write,
|
||||||
|
will_move=False,
|
||||||
|
will_del=False,
|
||||||
|
will_get=False,
|
||||||
|
):
|
||||||
|
# type: (str, str, bool, bool, bool, bool, bool) -> tuple[VFS, str]
|
||||||
"""returns [vfsnode,fs_remainder] if user has the requested permissions"""
|
"""returns [vfsnode,fs_remainder] if user has the requested permissions"""
|
||||||
vn, rem = self._find(vpath)
|
vn, rem = self._find(vpath)
|
||||||
c = vn.axs
|
c = vn.axs
|
||||||
@@ -329,6 +342,7 @@ class VFS(object):
|
|||||||
[will_write, c.uwrite, "write"],
|
[will_write, c.uwrite, "write"],
|
||||||
[will_move, c.umove, "move"],
|
[will_move, c.umove, "move"],
|
||||||
[will_del, c.udel, "delete"],
|
[will_del, c.udel, "delete"],
|
||||||
|
[will_get, c.uget, "get"],
|
||||||
]:
|
]:
|
||||||
if req and (uname not in d and "*" not in d) and uname != LEELOO_DALLAS:
|
if req and (uname not in d and "*" not in d) and uname != LEELOO_DALLAS:
|
||||||
m = "you don't have {}-access for this location"
|
m = "you don't have {}-access for this location"
|
||||||
@@ -342,7 +356,7 @@ class VFS(object):
|
|||||||
if not dbv:
|
if not dbv:
|
||||||
return self, vrem
|
return self, vrem
|
||||||
|
|
||||||
vrem = [self.vpath[len(dbv.vpath) + 1 :], vrem]
|
vrem = [self.vpath[len(dbv.vpath) :].lstrip("/"), vrem]
|
||||||
vrem = "/".join([x for x in vrem if x])
|
vrem = "/".join([x for x in vrem if x])
|
||||||
return dbv, vrem
|
return dbv, vrem
|
||||||
|
|
||||||
@@ -368,7 +382,7 @@ class VFS(object):
|
|||||||
for name, vn2 in sorted(self.nodes.items()):
|
for name, vn2 in sorted(self.nodes.items()):
|
||||||
ok = False
|
ok = False
|
||||||
axs = vn2.axs
|
axs = vn2.axs
|
||||||
axs = [axs.uread, axs.uwrite, axs.umove, axs.udel]
|
axs = [axs.uread, axs.uwrite, axs.umove, axs.udel, axs.uget]
|
||||||
for pset in permsets:
|
for pset in permsets:
|
||||||
ok = True
|
ok = True
|
||||||
for req, lst in zip(pset, axs):
|
for req, lst in zip(pset, axs):
|
||||||
@@ -561,13 +575,21 @@ class AuthSrv(object):
|
|||||||
|
|
||||||
def _read_vol_str(self, lvl, uname, axs, flags):
|
def _read_vol_str(self, lvl, uname, axs, flags):
|
||||||
# type: (str, str, AXS, any) -> None
|
# type: (str, str, AXS, any) -> None
|
||||||
if lvl.strip("crwmd"):
|
if lvl.strip("crwmdg"):
|
||||||
raise Exception("invalid volume flag: {},{}".format(lvl, uname))
|
raise Exception("invalid volume flag: {},{}".format(lvl, uname))
|
||||||
|
|
||||||
if lvl == "c":
|
if lvl == "c":
|
||||||
cval = True
|
try:
|
||||||
if "=" in uname:
|
# volume flag with arguments, possibly with a preceding list of bools
|
||||||
uname, cval = uname.split("=", 1)
|
uname, cval = uname.split("=", 1)
|
||||||
|
except:
|
||||||
|
# just one or more bools
|
||||||
|
cval = True
|
||||||
|
|
||||||
|
while "," in uname:
|
||||||
|
# one or more bools before the final flag; eat them
|
||||||
|
n1, uname = uname.split(",", 1)
|
||||||
|
self._read_volflag(flags, n1, True, False)
|
||||||
|
|
||||||
self._read_volflag(flags, uname, cval, False)
|
self._read_volflag(flags, uname, cval, False)
|
||||||
return
|
return
|
||||||
@@ -588,6 +610,9 @@ class AuthSrv(object):
|
|||||||
if "d" in lvl:
|
if "d" in lvl:
|
||||||
axs.udel[un] = 1
|
axs.udel[un] = 1
|
||||||
|
|
||||||
|
if "g" in lvl:
|
||||||
|
axs.uget[un] = 1
|
||||||
|
|
||||||
def _read_volflag(self, flags, name, value, is_list):
|
def _read_volflag(self, flags, name, value, is_list):
|
||||||
if name not in ["mtp"]:
|
if name not in ["mtp"]:
|
||||||
flags[name] = value
|
flags[name] = value
|
||||||
@@ -625,7 +650,7 @@ class AuthSrv(object):
|
|||||||
|
|
||||||
if self.args.v:
|
if self.args.v:
|
||||||
# list of src:dst:permset:permset:...
|
# list of src:dst:permset:permset:...
|
||||||
# permset is <rwmd>[,username][,username] or <c>,<flag>[=args]
|
# permset is <rwmdg>[,username][,username] or <c>,<flag>[=args]
|
||||||
for v_str in self.args.v:
|
for v_str in self.args.v:
|
||||||
m = re_vol.match(v_str)
|
m = re_vol.match(v_str)
|
||||||
if not m:
|
if not m:
|
||||||
@@ -692,20 +717,21 @@ class AuthSrv(object):
|
|||||||
vfs.all_vols = {}
|
vfs.all_vols = {}
|
||||||
vfs.get_all_vols(vfs.all_vols)
|
vfs.get_all_vols(vfs.all_vols)
|
||||||
|
|
||||||
for perm in "read write move del".split():
|
for perm in "read write move del get".split():
|
||||||
axs_key = "u" + perm
|
axs_key = "u" + perm
|
||||||
unames = ["*"] + list(acct.keys())
|
unames = ["*"] + list(acct.keys())
|
||||||
umap = {x: [] for x in unames}
|
umap = {x: [] for x in unames}
|
||||||
for usr in unames:
|
for usr in unames:
|
||||||
for mp, vol in vfs.all_vols.items():
|
for mp, vol in vfs.all_vols.items():
|
||||||
if usr in getattr(vol.axs, axs_key):
|
axs = getattr(vol.axs, axs_key)
|
||||||
|
if usr in axs or "*" in axs:
|
||||||
umap[usr].append(mp)
|
umap[usr].append(mp)
|
||||||
setattr(vfs, "a" + perm, umap)
|
setattr(vfs, "a" + perm, umap)
|
||||||
|
|
||||||
all_users = {}
|
all_users = {}
|
||||||
missing_users = {}
|
missing_users = {}
|
||||||
for axs in daxs.values():
|
for axs in daxs.values():
|
||||||
for d in [axs.uread, axs.uwrite, axs.umove, axs.udel]:
|
for d in [axs.uread, axs.uwrite, axs.umove, axs.udel, axs.uget]:
|
||||||
for usr in d.keys():
|
for usr in d.keys():
|
||||||
all_users[usr] = 1
|
all_users[usr] = 1
|
||||||
if usr != "*" and usr not in acct:
|
if usr != "*" and usr not in acct:
|
||||||
@@ -816,6 +842,11 @@ class AuthSrv(object):
|
|||||||
if use:
|
if use:
|
||||||
vol.lim = lim
|
vol.lim = lim
|
||||||
|
|
||||||
|
for vol in vfs.all_vols.values():
|
||||||
|
fk = vol.flags.get("fk")
|
||||||
|
if fk:
|
||||||
|
vol.flags["fk"] = int(fk) if fk is not True else 8
|
||||||
|
|
||||||
for vol in vfs.all_vols.values():
|
for vol in vfs.all_vols.values():
|
||||||
if "pk" in vol.flags and "gz" not in vol.flags and "xz" not in vol.flags:
|
if "pk" in vol.flags and "gz" not in vol.flags and "xz" not in vol.flags:
|
||||||
vol.flags["gz"] = False # def.pk
|
vol.flags["gz"] = False # def.pk
|
||||||
@@ -849,6 +880,10 @@ class AuthSrv(object):
|
|||||||
# default tag cfgs if unset
|
# default tag cfgs if unset
|
||||||
if "mte" not in vol.flags:
|
if "mte" not in vol.flags:
|
||||||
vol.flags["mte"] = self.args.mte
|
vol.flags["mte"] = self.args.mte
|
||||||
|
elif vol.flags["mte"].startswith("+"):
|
||||||
|
vol.flags["mte"] = ",".join(
|
||||||
|
x for x in [self.args.mte, vol.flags["mte"][1:]] if x
|
||||||
|
)
|
||||||
if "mth" not in vol.flags:
|
if "mth" not in vol.flags:
|
||||||
vol.flags["mth"] = self.args.mth
|
vol.flags["mth"] = self.args.mth
|
||||||
|
|
||||||
@@ -930,6 +965,7 @@ class AuthSrv(object):
|
|||||||
[" write", "uwrite"],
|
[" write", "uwrite"],
|
||||||
[" move", "umove"],
|
[" move", "umove"],
|
||||||
["delete", "udel"],
|
["delete", "udel"],
|
||||||
|
[" get", "uget"],
|
||||||
]:
|
]:
|
||||||
u = list(sorted(getattr(v.axs, attr).keys()))
|
u = list(sorted(getattr(v.axs, attr).keys()))
|
||||||
u = ", ".join("\033[35meverybody\033[0m" if x == "*" else x for x in u)
|
u = ", ".join("\033[35meverybody\033[0m" if x == "*" else x for x in u)
|
||||||
@@ -997,10 +1033,10 @@ class AuthSrv(object):
|
|||||||
raise Exception("volume not found: " + v)
|
raise Exception("volume not found: " + v)
|
||||||
|
|
||||||
self.log({"users": users, "vols": vols, "flags": flags})
|
self.log({"users": users, "vols": vols, "flags": flags})
|
||||||
m = "/{}: read({}) write({}) move({}) del({})"
|
m = "/{}: read({}) write({}) move({}) del({}) get({})"
|
||||||
for k, v in self.vfs.all_vols.items():
|
for k, v in self.vfs.all_vols.items():
|
||||||
vc = v.axs
|
vc = v.axs
|
||||||
self.log(m.format(k, vc.uread, vc.uwrite, vc.umove, vc.udel))
|
self.log(m.format(k, vc.uread, vc.uwrite, vc.umove, vc.udel, vc.uget))
|
||||||
|
|
||||||
flag_v = "v" in flags
|
flag_v = "v" in flags
|
||||||
flag_ln = "ln" in flags
|
flag_ln = "ln" in flags
|
||||||
@@ -1014,7 +1050,7 @@ class AuthSrv(object):
|
|||||||
for u in users:
|
for u in users:
|
||||||
self.log("checking /{} as {}".format(v, u))
|
self.log("checking /{} as {}".format(v, u))
|
||||||
try:
|
try:
|
||||||
vn, _ = self.vfs.get(v, u, True, False, False, False)
|
vn, _ = self.vfs.get(v, u, True, False, False, False, False)
|
||||||
except:
|
except:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
|||||||
@@ -12,6 +12,7 @@ import string
|
|||||||
import socket
|
import socket
|
||||||
import ctypes
|
import ctypes
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
|
from operator import itemgetter
|
||||||
import calendar
|
import calendar
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@@ -22,13 +23,12 @@ except:
|
|||||||
from .__init__ import E, PY2, WINDOWS, ANYWIN, unicode
|
from .__init__ import E, PY2, WINDOWS, ANYWIN, unicode
|
||||||
from .util import * # noqa # pylint: disable=unused-wildcard-import
|
from .util import * # noqa # pylint: disable=unused-wildcard-import
|
||||||
from .bos import bos
|
from .bos import bos
|
||||||
from .authsrv import AuthSrv, Lim
|
from .authsrv import AuthSrv
|
||||||
from .szip import StreamZip
|
from .szip import StreamZip
|
||||||
from .star import StreamTar
|
from .star import StreamTar
|
||||||
|
|
||||||
|
|
||||||
NO_CACHE = {"Cache-Control": "no-cache"}
|
NO_CACHE = {"Cache-Control": "no-cache"}
|
||||||
NO_STORE = {"Cache-Control": "no-store; max-age=0"}
|
|
||||||
|
|
||||||
|
|
||||||
class HttpCli(object):
|
class HttpCli(object):
|
||||||
@@ -39,6 +39,7 @@ class HttpCli(object):
|
|||||||
def __init__(self, conn):
|
def __init__(self, conn):
|
||||||
self.t0 = time.time()
|
self.t0 = time.time()
|
||||||
self.conn = conn
|
self.conn = conn
|
||||||
|
self.mutex = conn.mutex
|
||||||
self.s = conn.s # type: socket
|
self.s = conn.s # type: socket
|
||||||
self.sr = conn.sr # type: Unrecv
|
self.sr = conn.sr # type: Unrecv
|
||||||
self.ip = conn.addr[0]
|
self.ip = conn.addr[0]
|
||||||
@@ -47,6 +48,7 @@ class HttpCli(object):
|
|||||||
self.asrv = conn.asrv # type: AuthSrv
|
self.asrv = conn.asrv # type: AuthSrv
|
||||||
self.ico = conn.ico
|
self.ico = conn.ico
|
||||||
self.thumbcli = conn.thumbcli
|
self.thumbcli = conn.thumbcli
|
||||||
|
self.u2fh = conn.u2fh
|
||||||
self.log_func = conn.log_func
|
self.log_func = conn.log_func
|
||||||
self.log_src = conn.log_src
|
self.log_src = conn.log_src
|
||||||
self.tls = hasattr(self.s, "cipher")
|
self.tls = hasattr(self.s, "cipher")
|
||||||
@@ -54,7 +56,10 @@ class HttpCli(object):
|
|||||||
self.bufsz = 1024 * 32
|
self.bufsz = 1024 * 32
|
||||||
self.hint = None
|
self.hint = None
|
||||||
self.absolute_urls = False
|
self.absolute_urls = False
|
||||||
self.out_headers = {"Access-Control-Allow-Origin": "*"}
|
self.out_headers = {
|
||||||
|
"Access-Control-Allow-Origin": "*",
|
||||||
|
"Cache-Control": "no-store; max-age=0",
|
||||||
|
}
|
||||||
|
|
||||||
def log(self, msg, c=0):
|
def log(self, msg, c=0):
|
||||||
ptn = self.asrv.re_pwd
|
ptn = self.asrv.re_pwd
|
||||||
@@ -89,6 +94,7 @@ class HttpCli(object):
|
|||||||
def run(self):
|
def run(self):
|
||||||
"""returns true if connection can be reused"""
|
"""returns true if connection can be reused"""
|
||||||
self.keepalive = False
|
self.keepalive = False
|
||||||
|
self.is_https = False
|
||||||
self.headers = {}
|
self.headers = {}
|
||||||
self.hint = None
|
self.hint = None
|
||||||
try:
|
try:
|
||||||
@@ -126,6 +132,7 @@ class HttpCli(object):
|
|||||||
|
|
||||||
v = self.headers.get("connection", "").lower()
|
v = self.headers.get("connection", "").lower()
|
||||||
self.keepalive = not v.startswith("close") and self.http_ver != "HTTP/1.0"
|
self.keepalive = not v.startswith("close") and self.http_ver != "HTTP/1.0"
|
||||||
|
self.is_https = (self.headers.get("x-forwarded-proto", "").lower() == "https" or self.tls)
|
||||||
|
|
||||||
n = self.args.rproxy
|
n = self.args.rproxy
|
||||||
if n:
|
if n:
|
||||||
@@ -213,6 +220,7 @@ class HttpCli(object):
|
|||||||
self.wvol = self.asrv.vfs.awrite[self.uname]
|
self.wvol = self.asrv.vfs.awrite[self.uname]
|
||||||
self.mvol = self.asrv.vfs.amove[self.uname]
|
self.mvol = self.asrv.vfs.amove[self.uname]
|
||||||
self.dvol = self.asrv.vfs.adel[self.uname]
|
self.dvol = self.asrv.vfs.adel[self.uname]
|
||||||
|
self.gvol = self.asrv.vfs.aget[self.uname]
|
||||||
|
|
||||||
if pwd and "pw" in self.ouparam and pwd != cookies.get("cppwd"):
|
if pwd and "pw" in self.ouparam and pwd != cookies.get("cppwd"):
|
||||||
self.out_headers["Set-Cookie"] = self.get_pwd_cookie(pwd)[0]
|
self.out_headers["Set-Cookie"] = self.get_pwd_cookie(pwd)[0]
|
||||||
@@ -227,6 +235,9 @@ class HttpCli(object):
|
|||||||
|
|
||||||
self.do_log = not self.conn.lf_url or not self.conn.lf_url.search(self.req)
|
self.do_log = not self.conn.lf_url or not self.conn.lf_url.search(self.req)
|
||||||
|
|
||||||
|
x = self.asrv.vfs.can_access(self.vpath, self.uname)
|
||||||
|
self.can_read, self.can_write, self.can_move, self.can_delete, self.can_get = x
|
||||||
|
|
||||||
try:
|
try:
|
||||||
if self.mode in ["GET", "HEAD"]:
|
if self.mode in ["GET", "HEAD"]:
|
||||||
return self.handle_get() and self.keepalive
|
return self.handle_get() and self.keepalive
|
||||||
@@ -351,8 +362,7 @@ class HttpCli(object):
|
|||||||
).encode("utf-8", "replace")
|
).encode("utf-8", "replace")
|
||||||
|
|
||||||
if use302:
|
if use302:
|
||||||
h = {"Location": "/" + vpath, "Cache-Control": "no-cache"}
|
self.reply(html, status=302, headers={"Location": "/" + vpath})
|
||||||
self.reply(html, status=302, headers=h)
|
|
||||||
else:
|
else:
|
||||||
self.reply(html, status=status)
|
self.reply(html, status=status)
|
||||||
|
|
||||||
@@ -378,12 +388,10 @@ class HttpCli(object):
|
|||||||
static_path = os.path.join(E.mod, "web/", self.vpath[5:])
|
static_path = os.path.join(E.mod, "web/", self.vpath[5:])
|
||||||
return self.tx_file(static_path)
|
return self.tx_file(static_path)
|
||||||
|
|
||||||
x = self.asrv.vfs.can_access(self.vpath, self.uname)
|
if not self.can_read and not self.can_write and not self.can_get:
|
||||||
self.can_read, self.can_write, self.can_move, self.can_delete = x
|
|
||||||
if not self.can_read and not self.can_write:
|
|
||||||
if self.vpath:
|
if self.vpath:
|
||||||
self.log("inaccessible: [{}]".format(self.vpath))
|
self.log("inaccessible: [{}]".format(self.vpath))
|
||||||
raise Pebkac(404)
|
return self.tx_404(True)
|
||||||
|
|
||||||
self.uparam["h"] = False
|
self.uparam["h"] = False
|
||||||
|
|
||||||
@@ -831,7 +839,18 @@ class HttpCli(object):
|
|||||||
|
|
||||||
reader = read_socket(self.sr, remains)
|
reader = read_socket(self.sr, remains)
|
||||||
|
|
||||||
with open(fsenc(path), "rb+", 512 * 1024) as f:
|
f = None
|
||||||
|
fpool = not self.args.no_fpool
|
||||||
|
if fpool:
|
||||||
|
with self.mutex:
|
||||||
|
try:
|
||||||
|
f = self.u2fh.pop(path)
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
|
f = f or open(fsenc(path), "rb+", 512 * 1024)
|
||||||
|
|
||||||
|
try:
|
||||||
f.seek(cstart[0])
|
f.seek(cstart[0])
|
||||||
post_sz, _, sha_b64 = hashcopy(reader, f)
|
post_sz, _, sha_b64 = hashcopy(reader, f)
|
||||||
|
|
||||||
@@ -861,22 +880,36 @@ class HttpCli(object):
|
|||||||
ofs += len(buf)
|
ofs += len(buf)
|
||||||
|
|
||||||
self.log("clone {} done".format(cstart[0]))
|
self.log("clone {} done".format(cstart[0]))
|
||||||
|
finally:
|
||||||
|
if not fpool:
|
||||||
|
f.close()
|
||||||
|
else:
|
||||||
|
with self.mutex:
|
||||||
|
self.u2fh.put(path, f)
|
||||||
|
|
||||||
x = self.conn.hsrv.broker.put(True, "up2k.confirm_chunk", ptop, wark, chash)
|
x = self.conn.hsrv.broker.put(True, "up2k.confirm_chunk", ptop, wark, chash)
|
||||||
x = x.get()
|
x = x.get()
|
||||||
try:
|
try:
|
||||||
num_left, path = x
|
num_left, fin_path = x
|
||||||
except:
|
except:
|
||||||
self.loud_reply(x, status=500)
|
self.loud_reply(x, status=500)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
if not ANYWIN and num_left == 0:
|
if not num_left and fpool:
|
||||||
|
with self.mutex:
|
||||||
|
self.u2fh.close(path)
|
||||||
|
|
||||||
|
# windows cant rename open files
|
||||||
|
if ANYWIN and path != fin_path and not self.args.nw:
|
||||||
|
self.conn.hsrv.broker.put(True, "up2k.finish_upload", ptop, wark).get()
|
||||||
|
|
||||||
|
if not ANYWIN and not num_left:
|
||||||
times = (int(time.time()), int(lastmod))
|
times = (int(time.time()), int(lastmod))
|
||||||
self.log("no more chunks, setting times {}".format(times))
|
self.log("no more chunks, setting times {}".format(times))
|
||||||
try:
|
try:
|
||||||
bos.utime(path, times)
|
bos.utime(fin_path, times)
|
||||||
except:
|
except:
|
||||||
self.log("failed to utime ({}, {})".format(path, times))
|
self.log("failed to utime ({}, {})".format(fin_path, times))
|
||||||
|
|
||||||
spd = self._spd(post_sz)
|
spd = self._spd(post_sz)
|
||||||
self.log("{} thank".format(spd))
|
self.log("{} thank".format(spd))
|
||||||
@@ -887,8 +920,12 @@ class HttpCli(object):
|
|||||||
pwd = self.parser.require("cppwd", 64)
|
pwd = self.parser.require("cppwd", 64)
|
||||||
self.parser.drop()
|
self.parser.drop()
|
||||||
|
|
||||||
|
dst = "/?h"
|
||||||
|
if self.vpath:
|
||||||
|
dst = "/" + quotep(self.vpath)
|
||||||
|
|
||||||
ck, msg = self.get_pwd_cookie(pwd)
|
ck, msg = self.get_pwd_cookie(pwd)
|
||||||
html = self.j2("msg", h1=msg, h2='<a href="/?h">ack</a>', redir="/?h")
|
html = self.j2("msg", h1=msg, h2='<a href="' + dst + '">ack</a>', redir=dst)
|
||||||
self.reply(html.encode("utf-8"), headers={"Set-Cookie": ck})
|
self.reply(html.encode("utf-8"), headers={"Set-Cookie": ck})
|
||||||
return True
|
return True
|
||||||
|
|
||||||
@@ -1024,7 +1061,7 @@ class HttpCli(object):
|
|||||||
bos.unlink(abspath)
|
bos.unlink(abspath)
|
||||||
raise
|
raise
|
||||||
|
|
||||||
files.append([sz, sha512_hex, p_file, fname])
|
files.append([sz, sha512_hex, p_file, fname, abspath])
|
||||||
dbv, vrem = vfs.get_dbv(rem)
|
dbv, vrem = vfs.get_dbv(rem)
|
||||||
self.conn.hsrv.broker.put(
|
self.conn.hsrv.broker.put(
|
||||||
False,
|
False,
|
||||||
@@ -1076,24 +1113,33 @@ class HttpCli(object):
|
|||||||
jmsg["error"] = errmsg
|
jmsg["error"] = errmsg
|
||||||
errmsg = "ERROR: " + errmsg
|
errmsg = "ERROR: " + errmsg
|
||||||
|
|
||||||
for sz, sha512, ofn, lfn in files:
|
for sz, sha512, ofn, lfn, ap in files:
|
||||||
|
vsuf = ""
|
||||||
|
if self.can_read and "fk" in vfs.flags:
|
||||||
|
vsuf = "?k=" + gen_filekey(
|
||||||
|
self.args.fk_salt,
|
||||||
|
abspath,
|
||||||
|
sz,
|
||||||
|
0 if ANYWIN or not ap else bos.stat(ap).st_ino,
|
||||||
|
)[: vfs.flags["fk"]]
|
||||||
|
|
||||||
vpath = "{}/{}".format(upload_vpath, lfn).strip("/")
|
vpath = "{}/{}".format(upload_vpath, lfn).strip("/")
|
||||||
msg += 'sha512: {} // {} bytes // <a href="/{}">{}</a>\n'.format(
|
msg += 'sha512: {} // {} bytes // <a href="/{}">{}</a> {}\n'.format(
|
||||||
sha512[:56], sz, quotep(vpath), html_escape(ofn, crlf=True)
|
sha512[:56], sz, quotep(vpath) + vsuf, html_escape(ofn, crlf=True), vsuf
|
||||||
)
|
)
|
||||||
# truncated SHA-512 prevents length extension attacks;
|
# truncated SHA-512 prevents length extension attacks;
|
||||||
# using SHA-512/224, optionally SHA-512/256 = :64
|
# using SHA-512/224, optionally SHA-512/256 = :64
|
||||||
jpart = {
|
jpart = {
|
||||||
"url": "{}://{}/{}".format(
|
"url": "{}://{}/{}".format(
|
||||||
"https" if self.tls else "http",
|
"https" if self.is_https else "http",
|
||||||
self.headers.get("host", "copyparty"),
|
self.headers.get("host", "copyparty"),
|
||||||
vpath,
|
vpath + vsuf,
|
||||||
),
|
),
|
||||||
"sha512": sha512[:56],
|
"sha512": sha512[:56],
|
||||||
"sz": sz,
|
"sz": sz,
|
||||||
"fn": lfn,
|
"fn": lfn,
|
||||||
"fn_orig": ofn,
|
"fn_orig": ofn,
|
||||||
"path": vpath,
|
"path": vpath + vsuf,
|
||||||
}
|
}
|
||||||
jmsg["files"].append(jpart)
|
jmsg["files"].append(jpart)
|
||||||
|
|
||||||
@@ -1277,7 +1323,7 @@ class HttpCli(object):
|
|||||||
break
|
break
|
||||||
|
|
||||||
if not editions:
|
if not editions:
|
||||||
raise Pebkac(404)
|
return self.tx_404()
|
||||||
|
|
||||||
#
|
#
|
||||||
# if-modified
|
# if-modified
|
||||||
@@ -1388,7 +1434,11 @@ class HttpCli(object):
|
|||||||
#
|
#
|
||||||
# send reply
|
# send reply
|
||||||
|
|
||||||
if not is_compressed and "cache" not in self.uparam:
|
if is_compressed:
|
||||||
|
self.out_headers["Cache-Control"] = "max-age=573"
|
||||||
|
elif "cache" in self.uparam:
|
||||||
|
self.out_headers["Cache-Control"] = "max-age=69"
|
||||||
|
else:
|
||||||
self.out_headers.update(NO_CACHE)
|
self.out_headers.update(NO_CACHE)
|
||||||
|
|
||||||
self.out_headers["Accept-Ranges"] = "bytes"
|
self.out_headers["Accept-Ranges"] = "bytes"
|
||||||
@@ -1515,6 +1565,10 @@ class HttpCli(object):
|
|||||||
def tx_md(self, fs_path):
|
def tx_md(self, fs_path):
|
||||||
logmsg = "{:4} {} ".format("", self.req)
|
logmsg = "{:4} {} ".format("", self.req)
|
||||||
|
|
||||||
|
if not self.can_write:
|
||||||
|
if "edit" in self.uparam or "edit2" in self.uparam:
|
||||||
|
return self.tx_404(True)
|
||||||
|
|
||||||
tpl = "mde" if "edit2" in self.uparam else "md"
|
tpl = "mde" if "edit2" in self.uparam else "md"
|
||||||
html_path = os.path.join(E.mod, "web", "{}.html".format(tpl))
|
html_path = os.path.join(E.mod, "web", "{}.html".format(tpl))
|
||||||
template = self.j2(tpl)
|
template = self.j2(tpl)
|
||||||
@@ -1537,6 +1591,10 @@ class HttpCli(object):
|
|||||||
self.out_headers.update(NO_CACHE)
|
self.out_headers.update(NO_CACHE)
|
||||||
status = 200 if do_send else 304
|
status = 200 if do_send else 304
|
||||||
|
|
||||||
|
arg_base = "?"
|
||||||
|
if "k" in self.uparam:
|
||||||
|
arg_base = "?k={}&".format(self.uparam["k"])
|
||||||
|
|
||||||
boundary = "\roll\tide"
|
boundary = "\roll\tide"
|
||||||
targs = {
|
targs = {
|
||||||
"edit": "edit" in self.uparam,
|
"edit": "edit" in self.uparam,
|
||||||
@@ -1546,6 +1604,7 @@ class HttpCli(object):
|
|||||||
"md_chk_rate": self.args.mcr,
|
"md_chk_rate": self.args.mcr,
|
||||||
"md": boundary,
|
"md": boundary,
|
||||||
"ts": self.conn.hsrv.cachebuster(),
|
"ts": self.conn.hsrv.cachebuster(),
|
||||||
|
"arg_base": arg_base,
|
||||||
}
|
}
|
||||||
html = template.render(**targs).encode("utf-8", "replace")
|
html = template.render(**targs).encode("utf-8", "replace")
|
||||||
html = html.split(boundary.encode("utf-8"))
|
html = html.split(boundary.encode("utf-8"))
|
||||||
@@ -1596,6 +1655,7 @@ class HttpCli(object):
|
|||||||
html = self.j2(
|
html = self.j2(
|
||||||
"splash",
|
"splash",
|
||||||
this=self,
|
this=self,
|
||||||
|
qvpath=quotep(self.vpath),
|
||||||
rvol=rvol,
|
rvol=rvol,
|
||||||
wvol=wvol,
|
wvol=wvol,
|
||||||
avol=avol,
|
avol=avol,
|
||||||
@@ -1606,7 +1666,19 @@ class HttpCli(object):
|
|||||||
mtpq=vs["mtpq"],
|
mtpq=vs["mtpq"],
|
||||||
url_suf=suf,
|
url_suf=suf,
|
||||||
)
|
)
|
||||||
self.reply(html.encode("utf-8"), headers=NO_STORE)
|
self.reply(html.encode("utf-8"))
|
||||||
|
return True
|
||||||
|
|
||||||
|
def tx_404(self, is_403=False):
|
||||||
|
if self.args.vague_403:
|
||||||
|
m = '<h1>404 not found ┐( ´ -`)┌</h1><p>or maybe you don\'t have access -- try logging in or <a href="/?h">go home</a></p>'
|
||||||
|
elif is_403:
|
||||||
|
m = '<h1>403 forbiddena ~┻━┻</h1><p>you\'ll have to log in or <a href="/?h">go home</a></p>'
|
||||||
|
else:
|
||||||
|
m = '<h1>404 not found ┐( ´ -`)┌</h1><p><a href="/?h">go home</a></p>'
|
||||||
|
|
||||||
|
html = self.j2("splash", this=self, qvpath=quotep(self.vpath), msg=m)
|
||||||
|
self.reply(html.encode("utf-8"), status=404)
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def scanvol(self):
|
def scanvol(self):
|
||||||
@@ -1718,7 +1790,7 @@ class HttpCli(object):
|
|||||||
if filt and filt not in vp:
|
if filt and filt not in vp:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
ret.append({"vp": vp, "sz": sz, "at": at})
|
ret.append({"vp": quotep(vp), "sz": sz, "at": at})
|
||||||
if len(ret) > 3000:
|
if len(ret) > 3000:
|
||||||
ret.sort(key=lambda x: x["at"], reverse=True)
|
ret.sort(key=lambda x: x["at"], reverse=True)
|
||||||
ret = ret[:2000]
|
ret = ret[:2000]
|
||||||
@@ -1782,15 +1854,15 @@ class HttpCli(object):
|
|||||||
try:
|
try:
|
||||||
st = bos.stat(abspath)
|
st = bos.stat(abspath)
|
||||||
except:
|
except:
|
||||||
raise Pebkac(404)
|
return self.tx_404()
|
||||||
|
|
||||||
|
if rem.startswith(".hist/up2k.") or (
|
||||||
|
rem.endswith("/dir.txt") and rem.startswith(".hist/th/")
|
||||||
|
):
|
||||||
|
raise Pebkac(403)
|
||||||
|
|
||||||
|
is_dir = stat.S_ISDIR(st.st_mode)
|
||||||
if self.can_read:
|
if self.can_read:
|
||||||
if rem.startswith(".hist/up2k.") or (
|
|
||||||
rem.endswith("/dir.txt") and rem.startswith(".hist/th/")
|
|
||||||
):
|
|
||||||
raise Pebkac(403)
|
|
||||||
|
|
||||||
is_dir = stat.S_ISDIR(st.st_mode)
|
|
||||||
th_fmt = self.uparam.get("th")
|
th_fmt = self.uparam.get("th")
|
||||||
if th_fmt is not None:
|
if th_fmt is not None:
|
||||||
if is_dir:
|
if is_dir:
|
||||||
@@ -1815,11 +1887,23 @@ class HttpCli(object):
|
|||||||
|
|
||||||
return self.tx_ico(rem)
|
return self.tx_ico(rem)
|
||||||
|
|
||||||
if not is_dir:
|
if not is_dir and (self.can_read or self.can_get):
|
||||||
if abspath.endswith(".md") and "raw" not in self.uparam:
|
if not self.can_read and "fk" in vn.flags:
|
||||||
return self.tx_md(abspath)
|
correct = gen_filekey(
|
||||||
|
self.args.fk_salt, abspath, st.st_size, 0 if ANYWIN else st.st_ino
|
||||||
|
)[: vn.flags["fk"]]
|
||||||
|
got = self.uparam.get("k")
|
||||||
|
if got != correct:
|
||||||
|
self.log("wrong filekey, want {}, got {}".format(correct, got))
|
||||||
|
return self.tx_404()
|
||||||
|
|
||||||
return self.tx_file(abspath)
|
if abspath.endswith(".md") and "raw" not in self.uparam:
|
||||||
|
return self.tx_md(abspath)
|
||||||
|
|
||||||
|
return self.tx_file(abspath)
|
||||||
|
|
||||||
|
elif is_dir and not self.can_read and not self.can_write:
|
||||||
|
return self.tx_404(True)
|
||||||
|
|
||||||
srv_info = []
|
srv_info = []
|
||||||
|
|
||||||
@@ -1859,6 +1943,8 @@ class HttpCli(object):
|
|||||||
perms.append("move")
|
perms.append("move")
|
||||||
if self.can_delete:
|
if self.can_delete:
|
||||||
perms.append("delete")
|
perms.append("delete")
|
||||||
|
if self.can_get:
|
||||||
|
perms.append("get")
|
||||||
|
|
||||||
url_suf = self.urlq({}, [])
|
url_suf = self.urlq({}, [])
|
||||||
is_ls = "ls" in self.uparam
|
is_ls = "ls" in self.uparam
|
||||||
@@ -1918,18 +2004,17 @@ class HttpCli(object):
|
|||||||
if not self.can_read:
|
if not self.can_read:
|
||||||
if is_ls:
|
if is_ls:
|
||||||
ret = json.dumps(ls_ret)
|
ret = json.dumps(ls_ret)
|
||||||
self.reply(
|
self.reply(ret.encode("utf-8", "replace"), mime="application/json")
|
||||||
ret.encode("utf-8", "replace"),
|
|
||||||
mime="application/json",
|
|
||||||
headers=NO_STORE,
|
|
||||||
)
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
if not stat.S_ISDIR(st.st_mode):
|
if not stat.S_ISDIR(st.st_mode):
|
||||||
raise Pebkac(404)
|
return self.tx_404(True)
|
||||||
|
|
||||||
|
if "zip" in self.uparam or "tar" in self.uparam:
|
||||||
|
raise Pebkac(403)
|
||||||
|
|
||||||
html = self.j2(tpl, **j2a)
|
html = self.j2(tpl, **j2a)
|
||||||
self.reply(html.encode("utf-8", "replace"), headers=NO_STORE)
|
self.reply(html.encode("utf-8", "replace"))
|
||||||
return True
|
return True
|
||||||
|
|
||||||
for k in ["zip", "tar"]:
|
for k in ["zip", "tar"]:
|
||||||
@@ -1973,6 +2058,8 @@ class HttpCli(object):
|
|||||||
idx = self.conn.get_u2idx()
|
idx = self.conn.get_u2idx()
|
||||||
icur = idx.get_cur(dbv.realpath)
|
icur = idx.get_cur(dbv.realpath)
|
||||||
|
|
||||||
|
add_fk = vn.flags.get("fk")
|
||||||
|
|
||||||
dirs = []
|
dirs = []
|
||||||
files = []
|
files = []
|
||||||
for fn in vfs_ls:
|
for fn in vfs_ls:
|
||||||
@@ -2018,9 +2105,19 @@ class HttpCli(object):
|
|||||||
except:
|
except:
|
||||||
ext = "%"
|
ext = "%"
|
||||||
|
|
||||||
|
if add_fk:
|
||||||
|
href = "{}?k={}".format(
|
||||||
|
quotep(href),
|
||||||
|
gen_filekey(
|
||||||
|
self.args.fk_salt, fspath, sz, 0 if ANYWIN else inf.st_ino
|
||||||
|
)[:add_fk],
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
href = quotep(href)
|
||||||
|
|
||||||
item = {
|
item = {
|
||||||
"lead": margin,
|
"lead": margin,
|
||||||
"href": quotep(href),
|
"href": href,
|
||||||
"name": fn,
|
"name": fn,
|
||||||
"sz": sz,
|
"sz": sz,
|
||||||
"ext": ext,
|
"ext": ext,
|
||||||
@@ -2088,13 +2185,14 @@ class HttpCli(object):
|
|||||||
ls_ret["files"] = files
|
ls_ret["files"] = files
|
||||||
ls_ret["taglist"] = taglist
|
ls_ret["taglist"] = taglist
|
||||||
ret = json.dumps(ls_ret)
|
ret = json.dumps(ls_ret)
|
||||||
self.reply(
|
self.reply(ret.encode("utf-8", "replace"), mime="application/json")
|
||||||
ret.encode("utf-8", "replace"),
|
|
||||||
mime="application/json",
|
|
||||||
headers=NO_STORE,
|
|
||||||
)
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
for d in dirs:
|
||||||
|
d["name"] += "/"
|
||||||
|
|
||||||
|
dirs.sort(key=itemgetter("name"))
|
||||||
|
|
||||||
j2a["files"] = dirs + files
|
j2a["files"] = dirs + files
|
||||||
j2a["logues"] = logues
|
j2a["logues"] = logues
|
||||||
j2a["taglist"] = taglist
|
j2a["taglist"] = taglist
|
||||||
@@ -2106,5 +2204,5 @@ class HttpCli(object):
|
|||||||
j2a["css"] = self.args.css_browser
|
j2a["css"] = self.args.css_browser
|
||||||
|
|
||||||
html = self.j2(tpl, **j2a)
|
html = self.j2(tpl, **j2a)
|
||||||
self.reply(html.encode("utf-8", "replace"), headers=NO_STORE)
|
self.reply(html.encode("utf-8", "replace"))
|
||||||
return True
|
return True
|
||||||
|
|||||||
@@ -32,9 +32,11 @@ class HttpConn(object):
|
|||||||
self.addr = addr
|
self.addr = addr
|
||||||
self.hsrv = hsrv
|
self.hsrv = hsrv
|
||||||
|
|
||||||
|
self.mutex = hsrv.mutex
|
||||||
self.args = hsrv.args
|
self.args = hsrv.args
|
||||||
self.asrv = hsrv.asrv
|
self.asrv = hsrv.asrv
|
||||||
self.cert_path = hsrv.cert_path
|
self.cert_path = hsrv.cert_path
|
||||||
|
self.u2fh = hsrv.u2fh
|
||||||
|
|
||||||
enth = HAVE_PIL and not self.args.no_thumb
|
enth = HAVE_PIL and not self.args.no_thumb
|
||||||
self.thumbcli = ThumbCli(hsrv.broker) if enth else None
|
self.thumbcli = ThumbCli(hsrv.broker) if enth else None
|
||||||
|
|||||||
@@ -27,7 +27,7 @@ except ImportError:
|
|||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
from .__init__ import E, PY2, MACOS
|
from .__init__ import E, PY2, MACOS
|
||||||
from .util import spack, min_ex, start_stackmon, start_log_thrs
|
from .util import FHC, spack, min_ex, start_stackmon, start_log_thrs
|
||||||
from .bos import bos
|
from .bos import bos
|
||||||
from .httpconn import HttpConn
|
from .httpconn import HttpConn
|
||||||
|
|
||||||
@@ -50,7 +50,10 @@ class HttpSrv(object):
|
|||||||
self.log = broker.log
|
self.log = broker.log
|
||||||
self.asrv = broker.asrv
|
self.asrv = broker.asrv
|
||||||
|
|
||||||
self.name = "httpsrv" + ("-n{}-i{:x}".format(nid, os.getpid()) if nid else "")
|
nsuf = "-{}".format(nid) if nid else ""
|
||||||
|
nsuf2 = "-n{}-i{:x}".format(nid, os.getpid()) if nid else ""
|
||||||
|
|
||||||
|
self.name = "hsrv" + nsuf2
|
||||||
self.mutex = threading.Lock()
|
self.mutex = threading.Lock()
|
||||||
self.stopping = False
|
self.stopping = False
|
||||||
|
|
||||||
@@ -59,6 +62,7 @@ class HttpSrv(object):
|
|||||||
self.tp_time = None # latest worker collect
|
self.tp_time = None # latest worker collect
|
||||||
self.tp_q = None if self.args.no_htp else queue.LifoQueue()
|
self.tp_q = None if self.args.no_htp else queue.LifoQueue()
|
||||||
|
|
||||||
|
self.u2fh = FHC()
|
||||||
self.srvs = []
|
self.srvs = []
|
||||||
self.ncli = 0 # exact
|
self.ncli = 0 # exact
|
||||||
self.clients = {} # laggy
|
self.clients = {} # laggy
|
||||||
@@ -82,11 +86,6 @@ class HttpSrv(object):
|
|||||||
if self.tp_q:
|
if self.tp_q:
|
||||||
self.start_threads(4)
|
self.start_threads(4)
|
||||||
|
|
||||||
name = "httpsrv-scaler" + ("-{}".format(nid) if nid else "")
|
|
||||||
t = threading.Thread(target=self.thr_scaler, name=name)
|
|
||||||
t.daemon = True
|
|
||||||
t.start()
|
|
||||||
|
|
||||||
if nid:
|
if nid:
|
||||||
if self.args.stackmon:
|
if self.args.stackmon:
|
||||||
start_stackmon(self.args.stackmon, nid)
|
start_stackmon(self.args.stackmon, nid)
|
||||||
@@ -94,6 +93,10 @@ class HttpSrv(object):
|
|||||||
if self.args.log_thrs:
|
if self.args.log_thrs:
|
||||||
start_log_thrs(self.log, self.args.log_thrs, nid)
|
start_log_thrs(self.log, self.args.log_thrs, nid)
|
||||||
|
|
||||||
|
t = threading.Thread(target=self.periodic, name="hsrv-pt" + nsuf)
|
||||||
|
t.daemon = True
|
||||||
|
t.start()
|
||||||
|
|
||||||
def start_threads(self, n):
|
def start_threads(self, n):
|
||||||
self.tp_nthr += n
|
self.tp_nthr += n
|
||||||
if self.args.log_htp:
|
if self.args.log_htp:
|
||||||
@@ -115,13 +118,15 @@ class HttpSrv(object):
|
|||||||
for _ in range(n):
|
for _ in range(n):
|
||||||
self.tp_q.put(None)
|
self.tp_q.put(None)
|
||||||
|
|
||||||
def thr_scaler(self):
|
def periodic(self):
|
||||||
while True:
|
while True:
|
||||||
time.sleep(2 if self.tp_ncli else 30)
|
time.sleep(2 if self.tp_ncli else 10)
|
||||||
with self.mutex:
|
with self.mutex:
|
||||||
self.tp_ncli = max(self.ncli, self.tp_ncli - 2)
|
self.u2fh.clean()
|
||||||
if self.tp_nthr > self.tp_ncli + 8:
|
if self.tp_q:
|
||||||
self.stop_threads(4)
|
self.tp_ncli = max(self.ncli, self.tp_ncli - 2)
|
||||||
|
if self.tp_nthr > self.tp_ncli + 8:
|
||||||
|
self.stop_threads(4)
|
||||||
|
|
||||||
def listen(self, sck, nlisteners):
|
def listen(self, sck, nlisteners):
|
||||||
ip, port = sck.getsockname()
|
ip, port = sck.getsockname()
|
||||||
|
|||||||
@@ -471,7 +471,10 @@ class MTag(object):
|
|||||||
ret = {}
|
ret = {}
|
||||||
for tagname, mp in parsers.items():
|
for tagname, mp in parsers.items():
|
||||||
try:
|
try:
|
||||||
cmd = [sys.executable, mp.bin, abspath]
|
cmd = [mp.bin, abspath]
|
||||||
|
if mp.bin.endswith(".py"):
|
||||||
|
cmd = [sys.executable] + cmd
|
||||||
|
|
||||||
args = {"env": env, "timeout": mp.timeout}
|
args = {"env": env, "timeout": mp.timeout}
|
||||||
|
|
||||||
if WINDOWS:
|
if WINDOWS:
|
||||||
|
|||||||
@@ -1,7 +1,6 @@
|
|||||||
# coding: utf-8
|
# coding: utf-8
|
||||||
from __future__ import print_function, unicode_literals
|
from __future__ import print_function, unicode_literals
|
||||||
|
|
||||||
import re
|
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
import time
|
import time
|
||||||
@@ -54,6 +53,17 @@ class SvcHub(object):
|
|||||||
if args.log_thrs:
|
if args.log_thrs:
|
||||||
start_log_thrs(self.log, args.log_thrs, 0)
|
start_log_thrs(self.log, args.log_thrs, 0)
|
||||||
|
|
||||||
|
if not ANYWIN and not args.use_fpool:
|
||||||
|
args.no_fpool = True
|
||||||
|
|
||||||
|
if not args.no_fpool and args.j != 1:
|
||||||
|
m = "WARNING: --use-fpool combined with multithreading is untested and can probably cause undefined behavior"
|
||||||
|
if ANYWIN:
|
||||||
|
m = "windows cannot do multithreading without --no-fpool, so enabling that -- note that upload performance will suffer if you have microsoft defender \"real-time protection\" enabled, so you probably want to use -j 1 instead"
|
||||||
|
args.no_fpool = True
|
||||||
|
|
||||||
|
self.log("root", m, c=3)
|
||||||
|
|
||||||
# initiate all services to manage
|
# initiate all services to manage
|
||||||
self.asrv = AuthSrv(self.args, self.log)
|
self.asrv = AuthSrv(self.args, self.log)
|
||||||
if args.ls:
|
if args.ls:
|
||||||
@@ -205,6 +215,8 @@ class SvcHub(object):
|
|||||||
if self.stopping:
|
if self.stopping:
|
||||||
return
|
return
|
||||||
|
|
||||||
|
# start_log_thrs(print, 0.1, 1)
|
||||||
|
|
||||||
self.stopping = True
|
self.stopping = True
|
||||||
self.stop_req = True
|
self.stop_req = True
|
||||||
with self.stop_cond:
|
with self.stop_cond:
|
||||||
|
|||||||
@@ -6,9 +6,10 @@ import os
|
|||||||
import time
|
import time
|
||||||
import threading
|
import threading
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
|
from operator import itemgetter
|
||||||
|
|
||||||
from .__init__ import unicode
|
from .__init__ import ANYWIN, unicode
|
||||||
from .util import s3dec, Pebkac, min_ex
|
from .util import absreal, s3dec, Pebkac, min_ex, gen_filekey, quotep
|
||||||
from .bos import bos
|
from .bos import bos
|
||||||
from .up2k import up2k_wark_from_hashlist
|
from .up2k import up2k_wark_from_hashlist
|
||||||
|
|
||||||
@@ -242,6 +243,7 @@ class U2idx(object):
|
|||||||
self.active_cur = cur
|
self.active_cur = cur
|
||||||
|
|
||||||
sret = []
|
sret = []
|
||||||
|
fk = flags.get("fk")
|
||||||
c = cur.execute(q, v)
|
c = cur.execute(q, v)
|
||||||
for hit in c:
|
for hit in c:
|
||||||
w, ts, sz, rd, fn, ip, at = hit
|
w, ts, sz, rd, fn, ip, at = hit
|
||||||
@@ -252,7 +254,23 @@ class U2idx(object):
|
|||||||
if rd.startswith("//") or fn.startswith("//"):
|
if rd.startswith("//") or fn.startswith("//"):
|
||||||
rd, fn = s3dec(rd, fn)
|
rd, fn = s3dec(rd, fn)
|
||||||
|
|
||||||
rp = "/".join([x for x in [vtop, rd, fn] if x])
|
if not fk:
|
||||||
|
suf = ""
|
||||||
|
else:
|
||||||
|
try:
|
||||||
|
ap = absreal(os.path.join(ptop, rd, fn))
|
||||||
|
inf = bos.stat(ap)
|
||||||
|
except:
|
||||||
|
continue
|
||||||
|
|
||||||
|
suf = (
|
||||||
|
"?k="
|
||||||
|
+ gen_filekey(
|
||||||
|
self.args.fk_salt, ap, sz, 0 if ANYWIN else inf.st_ino
|
||||||
|
)[:fk]
|
||||||
|
)
|
||||||
|
|
||||||
|
rp = quotep("/".join([x for x in [vtop, rd, fn] if x])) + suf
|
||||||
sret.append({"ts": int(ts), "sz": sz, "rp": rp, "w": w[:16]})
|
sret.append({"ts": int(ts), "sz": sz, "rp": rp, "w": w[:16]})
|
||||||
|
|
||||||
for hit in sret:
|
for hit in sret:
|
||||||
@@ -275,9 +293,13 @@ class U2idx(object):
|
|||||||
# undupe hits from multiple metadata keys
|
# undupe hits from multiple metadata keys
|
||||||
if len(ret) > 1:
|
if len(ret) > 1:
|
||||||
ret = [ret[0]] + [
|
ret = [ret[0]] + [
|
||||||
y for x, y in zip(ret[:-1], ret[1:]) if x["rp"] != y["rp"]
|
y
|
||||||
|
for x, y in zip(ret[:-1], ret[1:])
|
||||||
|
if x["rp"].split("?")[0] != y["rp"].split("?")[0]
|
||||||
]
|
]
|
||||||
|
|
||||||
|
ret.sort(key=itemgetter("rp"))
|
||||||
|
|
||||||
return ret, list(taglist.keys())
|
return ret, list(taglist.keys())
|
||||||
|
|
||||||
def terminator(self, identifier, done_flag):
|
def terminator(self, identifier, done_flag):
|
||||||
|
|||||||
@@ -27,7 +27,10 @@ from .util import (
|
|||||||
sanitize_fn,
|
sanitize_fn,
|
||||||
ren_open,
|
ren_open,
|
||||||
atomic_move,
|
atomic_move,
|
||||||
|
quotep,
|
||||||
vsplit,
|
vsplit,
|
||||||
|
w8b64enc,
|
||||||
|
w8b64dec,
|
||||||
s3enc,
|
s3enc,
|
||||||
s3dec,
|
s3dec,
|
||||||
rmdirs,
|
rmdirs,
|
||||||
@@ -66,6 +69,7 @@ class Up2k(object):
|
|||||||
self.n_tagq = 0
|
self.n_tagq = 0
|
||||||
self.volstate = {}
|
self.volstate = {}
|
||||||
self.need_rescan = {}
|
self.need_rescan = {}
|
||||||
|
self.dupesched = {}
|
||||||
self.registry = {}
|
self.registry = {}
|
||||||
self.entags = {}
|
self.entags = {}
|
||||||
self.flags = {}
|
self.flags = {}
|
||||||
@@ -236,7 +240,7 @@ class Up2k(object):
|
|||||||
if vp:
|
if vp:
|
||||||
fvp = "{}/{}".format(vp, fvp)
|
fvp = "{}/{}".format(vp, fvp)
|
||||||
|
|
||||||
self._handle_rm(LEELOO_DALLAS, None, fvp)
|
self._handle_rm(LEELOO_DALLAS, None, fvp, True)
|
||||||
nrm += 1
|
nrm += 1
|
||||||
|
|
||||||
if nrm:
|
if nrm:
|
||||||
@@ -477,11 +481,18 @@ class Up2k(object):
|
|||||||
if WINDOWS:
|
if WINDOWS:
|
||||||
excl = [x.replace("/", "\\") for x in excl]
|
excl = [x.replace("/", "\\") for x in excl]
|
||||||
|
|
||||||
n_add = self._build_dir(dbw, top, set(excl), top, nohash, [])
|
n_add = n_rm = 0
|
||||||
n_rm = self._drop_lost(dbw[0], top)
|
try:
|
||||||
|
n_add = self._build_dir(dbw, top, set(excl), top, nohash, [])
|
||||||
|
n_rm = self._drop_lost(dbw[0], top)
|
||||||
|
except:
|
||||||
|
m = "failed to index volume [{}]:\n{}"
|
||||||
|
self.log(m.format(top, min_ex()), c=1)
|
||||||
|
|
||||||
if dbw[1]:
|
if dbw[1]:
|
||||||
self.log("commit {} new files".format(dbw[1]))
|
self.log("commit {} new files".format(dbw[1]))
|
||||||
dbw[0].connection.commit()
|
|
||||||
|
dbw[0].connection.commit()
|
||||||
|
|
||||||
return True, n_add or n_rm or do_vac
|
return True, n_add or n_rm or do_vac
|
||||||
|
|
||||||
@@ -492,10 +503,11 @@ class Up2k(object):
|
|||||||
self.log(m.format(seen[-1], rcdir, cdir), 3)
|
self.log(m.format(seen[-1], rcdir, cdir), 3)
|
||||||
return 0
|
return 0
|
||||||
|
|
||||||
seen = seen + [cdir]
|
seen = seen + [rcdir]
|
||||||
self.pp.msg = "a{} {}".format(self.pp.n, cdir)
|
self.pp.msg = "a{} {}".format(self.pp.n, cdir)
|
||||||
histpath = self.asrv.vfs.histtab[top]
|
histpath = self.asrv.vfs.histtab[top]
|
||||||
ret = 0
|
ret = 0
|
||||||
|
seen_files = {}
|
||||||
g = statdir(self.log_func, not self.args.no_scandir, False, cdir)
|
g = statdir(self.log_func, not self.args.no_scandir, False, cdir)
|
||||||
for iname, inf in sorted(g):
|
for iname, inf in sorted(g):
|
||||||
abspath = os.path.join(cdir, iname)
|
abspath = os.path.join(cdir, iname)
|
||||||
@@ -505,9 +517,14 @@ class Up2k(object):
|
|||||||
if abspath in excl or abspath == histpath:
|
if abspath in excl or abspath == histpath:
|
||||||
continue
|
continue
|
||||||
# self.log(" dir: {}".format(abspath))
|
# self.log(" dir: {}".format(abspath))
|
||||||
ret += self._build_dir(dbw, top, excl, abspath, nohash, seen)
|
try:
|
||||||
|
ret += self._build_dir(dbw, top, excl, abspath, nohash, seen)
|
||||||
|
except:
|
||||||
|
m = "failed to index subdir [{}]:\n{}"
|
||||||
|
self.log(m.format(abspath, min_ex()), c=1)
|
||||||
else:
|
else:
|
||||||
# self.log("file: {}".format(abspath))
|
# self.log("file: {}".format(abspath))
|
||||||
|
seen_files[iname] = 1
|
||||||
rp = abspath[len(top) + 1 :]
|
rp = abspath[len(top) + 1 :]
|
||||||
if WINDOWS:
|
if WINDOWS:
|
||||||
rp = rp.replace("\\", "/").strip("/")
|
rp = rp.replace("\\", "/").strip("/")
|
||||||
@@ -566,34 +583,65 @@ class Up2k(object):
|
|||||||
dbw[0].connection.commit()
|
dbw[0].connection.commit()
|
||||||
dbw[1] = 0
|
dbw[1] = 0
|
||||||
dbw[2] = time.time()
|
dbw[2] = time.time()
|
||||||
|
|
||||||
|
# drop missing files
|
||||||
|
rd = cdir[len(top) + 1 :].strip("/")
|
||||||
|
if WINDOWS:
|
||||||
|
rd = rd.replace("\\", "/").strip("/")
|
||||||
|
|
||||||
|
q = "select fn from up where rd = ?"
|
||||||
|
try:
|
||||||
|
c = dbw[0].execute(q, (rd,))
|
||||||
|
except:
|
||||||
|
c = dbw[0].execute(q, ("//" + w8b64enc(rd),))
|
||||||
|
|
||||||
|
hits = [w8b64dec(x[2:]) if x.startswith("//") else x for (x,) in c]
|
||||||
|
rm_files = [x for x in hits if x not in seen_files]
|
||||||
|
n_rm = len(rm_files)
|
||||||
|
for fn in rm_files:
|
||||||
|
self.db_rm(dbw[0], rd, fn)
|
||||||
|
|
||||||
|
if n_rm:
|
||||||
|
self.log("forgot {} deleted files".format(n_rm))
|
||||||
|
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
def _drop_lost(self, cur, top):
|
def _drop_lost(self, cur, top):
|
||||||
rm = []
|
rm = []
|
||||||
|
n_rm = 0
|
||||||
nchecked = 0
|
nchecked = 0
|
||||||
nfiles = next(cur.execute("select count(w) from up"))[0]
|
# `_build_dir` did all the files, now do dirs
|
||||||
c = cur.execute("select rd, fn from up")
|
ndirs = next(cur.execute("select count(distinct rd) from up"))[0]
|
||||||
for drd, dfn in c:
|
c = cur.execute("select distinct rd from up order by rd desc")
|
||||||
|
for (drd,) in c:
|
||||||
nchecked += 1
|
nchecked += 1
|
||||||
if drd.startswith("//") or dfn.startswith("//"):
|
if drd.startswith("//"):
|
||||||
drd, dfn = s3dec(drd, dfn)
|
rd = w8b64dec(drd[2:])
|
||||||
|
else:
|
||||||
|
rd = drd
|
||||||
|
|
||||||
abspath = os.path.join(top, drd, dfn)
|
abspath = os.path.join(top, rd)
|
||||||
# almost zero overhead dw
|
self.pp.msg = "b{} {}".format(ndirs - nchecked, abspath)
|
||||||
self.pp.msg = "b{} {}".format(nfiles - nchecked, abspath)
|
|
||||||
try:
|
try:
|
||||||
if not bos.path.exists(abspath):
|
if os.path.isdir(abspath):
|
||||||
rm.append([drd, dfn])
|
continue
|
||||||
except Exception as ex:
|
except:
|
||||||
self.log("stat-rm: {} @ [{}]".format(repr(ex), abspath))
|
pass
|
||||||
|
|
||||||
if rm:
|
rm.append(drd)
|
||||||
self.log("forgetting {} deleted files".format(len(rm)))
|
|
||||||
for rd, fn in rm:
|
|
||||||
# self.log("{} / {}".format(rd, fn))
|
|
||||||
self.db_rm(cur, rd, fn)
|
|
||||||
|
|
||||||
return len(rm)
|
if not rm:
|
||||||
|
return 0
|
||||||
|
|
||||||
|
q = "select count(w) from up where rd = ?"
|
||||||
|
for rd in rm:
|
||||||
|
n_rm += next(cur.execute(q, (rd,)))[0]
|
||||||
|
|
||||||
|
self.log("forgetting {} deleted dirs, {} files".format(len(rm), n_rm))
|
||||||
|
for rd in rm:
|
||||||
|
cur.execute("delete from up where rd = ?", (rd,))
|
||||||
|
|
||||||
|
return n_rm
|
||||||
|
|
||||||
def _build_tags_index(self, vol):
|
def _build_tags_index(self, vol):
|
||||||
ptop = vol.realpath
|
ptop = vol.realpath
|
||||||
@@ -838,6 +886,7 @@ class Up2k(object):
|
|||||||
|
|
||||||
cur.connection.commit()
|
cur.connection.commit()
|
||||||
if n_done:
|
if n_done:
|
||||||
|
self.log("mtp: scanned {} files in {}".format(n_done, ptop), c=6)
|
||||||
cur.execute("vacuum")
|
cur.execute("vacuum")
|
||||||
|
|
||||||
wcur.close()
|
wcur.close()
|
||||||
@@ -939,7 +988,12 @@ class Up2k(object):
|
|||||||
|
|
||||||
def _tag_file(self, write_cur, entags, wark, abspath, tags=None):
|
def _tag_file(self, write_cur, entags, wark, abspath, tags=None):
|
||||||
if tags is None:
|
if tags is None:
|
||||||
tags = self.mtag.get(abspath)
|
try:
|
||||||
|
tags = self.mtag.get(abspath)
|
||||||
|
except Exception as ex:
|
||||||
|
msg = "failed to read tags from {}:\n{}"
|
||||||
|
self.log(msg.format(abspath, ex), c=3)
|
||||||
|
return
|
||||||
|
|
||||||
if entags:
|
if entags:
|
||||||
tags = {k: v for k, v in tags.items() if k in entags}
|
tags = {k: v for k, v in tags.items() if k in entags}
|
||||||
@@ -1111,9 +1165,18 @@ class Up2k(object):
|
|||||||
if dp_dir.startswith("//") or dp_fn.startswith("//"):
|
if dp_dir.startswith("//") or dp_fn.startswith("//"):
|
||||||
dp_dir, dp_fn = s3dec(dp_dir, dp_fn)
|
dp_dir, dp_fn = s3dec(dp_dir, dp_fn)
|
||||||
|
|
||||||
|
if job and (dp_dir != cj["prel"] or dp_fn != cj["name"]):
|
||||||
|
continue
|
||||||
|
|
||||||
dp_abs = "/".join([cj["ptop"], dp_dir, dp_fn])
|
dp_abs = "/".join([cj["ptop"], dp_dir, dp_fn])
|
||||||
# relying on path.exists to return false on broken symlinks
|
# relying on this to fail on broken symlinks
|
||||||
if bos.path.exists(dp_abs):
|
try:
|
||||||
|
sz = bos.path.getsize(dp_abs)
|
||||||
|
except:
|
||||||
|
sz = 0
|
||||||
|
|
||||||
|
if sz:
|
||||||
|
# self.log("--- " + wark + " " + dp_abs + " found file", 4)
|
||||||
job = {
|
job = {
|
||||||
"name": dp_fn,
|
"name": dp_fn,
|
||||||
"prel": dp_dir,
|
"prel": dp_dir,
|
||||||
@@ -1126,9 +1189,9 @@ class Up2k(object):
|
|||||||
"hash": [],
|
"hash": [],
|
||||||
"need": [],
|
"need": [],
|
||||||
}
|
}
|
||||||
break
|
|
||||||
|
|
||||||
if job and wark in reg:
|
if job and wark in reg:
|
||||||
|
# self.log("pop " + wark + " " + job["name"] + " handle_json db", 4)
|
||||||
del reg[wark]
|
del reg[wark]
|
||||||
|
|
||||||
if job or wark in reg:
|
if job or wark in reg:
|
||||||
@@ -1156,11 +1219,20 @@ class Up2k(object):
|
|||||||
if job["need"]:
|
if job["need"]:
|
||||||
self.log("unfinished:\n {0}\n {1}".format(src, dst))
|
self.log("unfinished:\n {0}\n {1}".format(src, dst))
|
||||||
err = "partial upload exists at a different location; please resume uploading here instead:\n"
|
err = "partial upload exists at a different location; please resume uploading here instead:\n"
|
||||||
err += "/" + vsrc + " "
|
err += "/" + quotep(vsrc) + " "
|
||||||
|
|
||||||
|
dupe = [cj["prel"], cj["name"]]
|
||||||
|
try:
|
||||||
|
self.dupesched[src].append(dupe)
|
||||||
|
except:
|
||||||
|
self.dupesched[src] = [dupe]
|
||||||
|
|
||||||
raise Pebkac(400, err)
|
raise Pebkac(400, err)
|
||||||
|
|
||||||
elif "nodupe" in self.flags[job["ptop"]]:
|
elif "nodupe" in self.flags[job["ptop"]]:
|
||||||
self.log("dupe-reject:\n {0}\n {1}".format(src, dst))
|
self.log("dupe-reject:\n {0}\n {1}".format(src, dst))
|
||||||
err = "upload rejected, file already exists:\n/" + vsrc + " "
|
err = "upload rejected, file already exists:\n"
|
||||||
|
err += "/" + quotep(vsrc) + " "
|
||||||
raise Pebkac(400, err)
|
raise Pebkac(400, err)
|
||||||
else:
|
else:
|
||||||
# symlink to the client-provided name,
|
# symlink to the client-provided name,
|
||||||
@@ -1253,6 +1325,9 @@ class Up2k(object):
|
|||||||
return
|
return
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
if self.args.no_symlink:
|
||||||
|
raise Exception("disabled in config")
|
||||||
|
|
||||||
lsrc = src
|
lsrc = src
|
||||||
ldst = dst
|
ldst = dst
|
||||||
fs1 = bos.stat(os.path.dirname(src)).st_dev
|
fs1 = bos.stat(os.path.dirname(src)).st_dev
|
||||||
@@ -1333,20 +1408,57 @@ class Up2k(object):
|
|||||||
# del self.registry[ptop][wark]
|
# del self.registry[ptop][wark]
|
||||||
return ret, dst
|
return ret, dst
|
||||||
|
|
||||||
atomic_move(src, dst)
|
# windows cant rename open files
|
||||||
|
if not ANYWIN or src == dst:
|
||||||
if ANYWIN:
|
self._finish_upload(ptop, wark)
|
||||||
a = [dst, job["size"], (int(time.time()), int(job["lmod"]))]
|
|
||||||
self.lastmod_q.put(a)
|
|
||||||
|
|
||||||
a = [job[x] for x in "ptop wark prel name lmod size addr".split()]
|
|
||||||
a += [job.get("at") or time.time()]
|
|
||||||
if self.idx_wark(*a):
|
|
||||||
del self.registry[ptop][wark]
|
|
||||||
# in-memory registry is reserved for unfinished uploads
|
|
||||||
|
|
||||||
return ret, dst
|
return ret, dst
|
||||||
|
|
||||||
|
def finish_upload(self, ptop, wark):
|
||||||
|
with self.mutex:
|
||||||
|
self._finish_upload(ptop, wark)
|
||||||
|
|
||||||
|
def _finish_upload(self, ptop, wark):
|
||||||
|
try:
|
||||||
|
job = self.registry[ptop][wark]
|
||||||
|
pdir = os.path.join(job["ptop"], job["prel"])
|
||||||
|
src = os.path.join(pdir, job["tnam"])
|
||||||
|
dst = os.path.join(pdir, job["name"])
|
||||||
|
except Exception as ex:
|
||||||
|
return "finish_upload, wark, " + repr(ex)
|
||||||
|
|
||||||
|
# self.log("--- " + wark + " " + dst + " finish_upload atomic " + dst, 4)
|
||||||
|
atomic_move(src, dst)
|
||||||
|
|
||||||
|
if ANYWIN:
|
||||||
|
a = [dst, job["size"], (int(time.time()), int(job["lmod"]))]
|
||||||
|
self.lastmod_q.put(a)
|
||||||
|
|
||||||
|
a = [job[x] for x in "ptop wark prel name lmod size addr".split()]
|
||||||
|
a += [job.get("at") or time.time()]
|
||||||
|
if self.idx_wark(*a):
|
||||||
|
# self.log("pop " + wark + " " + dst + " finish_upload idx_wark", 4)
|
||||||
|
del self.registry[ptop][wark]
|
||||||
|
# in-memory registry is reserved for unfinished uploads
|
||||||
|
|
||||||
|
dupes = self.dupesched.pop(dst, [])
|
||||||
|
if not dupes:
|
||||||
|
return
|
||||||
|
|
||||||
|
cur = self.cur.get(ptop)
|
||||||
|
for rd, fn in dupes:
|
||||||
|
d2 = os.path.join(ptop, rd, fn)
|
||||||
|
if os.path.exists(d2):
|
||||||
|
continue
|
||||||
|
|
||||||
|
self._symlink(dst, d2)
|
||||||
|
if cur:
|
||||||
|
self.db_rm(cur, rd, fn)
|
||||||
|
self.db_add(cur, wark, rd, fn, *a[-4:])
|
||||||
|
|
||||||
|
if cur:
|
||||||
|
cur.connection.commit()
|
||||||
|
|
||||||
def idx_wark(self, ptop, wark, rd, fn, lmod, sz, ip, at):
|
def idx_wark(self, ptop, wark, rd, fn, lmod, sz, ip, at):
|
||||||
cur = self.cur.get(ptop)
|
cur = self.cur.get(ptop)
|
||||||
if not cur:
|
if not cur:
|
||||||
@@ -1384,7 +1496,7 @@ class Up2k(object):
|
|||||||
ok = {}
|
ok = {}
|
||||||
ng = {}
|
ng = {}
|
||||||
for vp in vpaths:
|
for vp in vpaths:
|
||||||
a, b, c = self._handle_rm(uname, ip, vp)
|
a, b, c = self._handle_rm(uname, ip, vp, False)
|
||||||
n_files += a
|
n_files += a
|
||||||
for k in b:
|
for k in b:
|
||||||
ok[k] = 1
|
ok[k] = 1
|
||||||
@@ -1397,7 +1509,7 @@ class Up2k(object):
|
|||||||
|
|
||||||
return "deleted {} files (and {}/{} folders)".format(n_files, ok, ok + ng)
|
return "deleted {} files (and {}/{} folders)".format(n_files, ok, ok + ng)
|
||||||
|
|
||||||
def _handle_rm(self, uname, ip, vpath):
|
def _handle_rm(self, uname, ip, vpath, rm_topdir):
|
||||||
try:
|
try:
|
||||||
permsets = [[True, False, False, True]]
|
permsets = [[True, False, False, True]]
|
||||||
vn, rem = self.asrv.vfs.get(vpath, uname, *permsets[0])
|
vn, rem = self.asrv.vfs.get(vpath, uname, *permsets[0])
|
||||||
@@ -1466,7 +1578,7 @@ class Up2k(object):
|
|||||||
|
|
||||||
bos.unlink(abspath)
|
bos.unlink(abspath)
|
||||||
|
|
||||||
rm = rmdirs(self.log_func, scandir, True, atop)
|
rm = rmdirs(self.log_func, scandir, True, atop, 1 if rm_topdir else 0)
|
||||||
return n_files, rm[0], rm[1]
|
return n_files, rm[0], rm[1]
|
||||||
|
|
||||||
def handle_mv(self, uname, svp, dvp):
|
def handle_mv(self, uname, svp, dvp):
|
||||||
@@ -1508,7 +1620,7 @@ class Up2k(object):
|
|||||||
with self.mutex:
|
with self.mutex:
|
||||||
self._mv_file(uname, svpf, dvpf)
|
self._mv_file(uname, svpf, dvpf)
|
||||||
|
|
||||||
rmdirs(self.log_func, scandir, True, sabs)
|
rmdirs(self.log_func, scandir, True, sabs, 1)
|
||||||
return "k"
|
return "k"
|
||||||
|
|
||||||
def _mv_file(self, uname, svp, dvp):
|
def _mv_file(self, uname, svp, dvp):
|
||||||
@@ -1622,7 +1734,7 @@ class Up2k(object):
|
|||||||
wark = [
|
wark = [
|
||||||
x
|
x
|
||||||
for x, y in reg.items()
|
for x, y in reg.items()
|
||||||
if fn in [y["name"], y.get("tnam")] and y["prel"] == vrem
|
if sfn in [y["name"], y.get("tnam")] and y["prel"] == vrem
|
||||||
]
|
]
|
||||||
|
|
||||||
if wark and wark in reg:
|
if wark and wark in reg:
|
||||||
@@ -1705,7 +1817,13 @@ class Up2k(object):
|
|||||||
except:
|
except:
|
||||||
cj["lmod"] = int(time.time())
|
cj["lmod"] = int(time.time())
|
||||||
|
|
||||||
wark = up2k_wark_from_hashlist(self.salt, cj["size"], cj["hash"])
|
if cj["hash"]:
|
||||||
|
wark = up2k_wark_from_hashlist(self.salt, cj["size"], cj["hash"])
|
||||||
|
else:
|
||||||
|
wark = up2k_wark_from_metadata(
|
||||||
|
self.salt, cj["size"], cj["lmod"], cj["prel"], cj["name"]
|
||||||
|
)
|
||||||
|
|
||||||
return wark
|
return wark
|
||||||
|
|
||||||
def _hashlist_from_file(self, path):
|
def _hashlist_from_file(self, path):
|
||||||
@@ -1748,6 +1866,8 @@ class Up2k(object):
|
|||||||
|
|
||||||
if self.args.nw:
|
if self.args.nw:
|
||||||
job["tnam"] = tnam
|
job["tnam"] = tnam
|
||||||
|
if not job["hash"]:
|
||||||
|
del self.registry[job["ptop"]][job["wark"]]
|
||||||
return
|
return
|
||||||
|
|
||||||
suffix = ".{:.6f}-{}".format(job["t0"], job["addr"])
|
suffix = ".{:.6f}-{}".format(job["t0"], job["addr"])
|
||||||
@@ -1764,8 +1884,12 @@ class Up2k(object):
|
|||||||
except:
|
except:
|
||||||
self.log("could not sparse [{}]".format(fp), 3)
|
self.log("could not sparse [{}]".format(fp), 3)
|
||||||
|
|
||||||
f.seek(job["size"] - 1)
|
if job["hash"]:
|
||||||
f.write(b"e")
|
f.seek(job["size"] - 1)
|
||||||
|
f.write(b"e")
|
||||||
|
|
||||||
|
if not job["hash"]:
|
||||||
|
self._finish_upload(job["ptop"], job["wark"])
|
||||||
|
|
||||||
def _lastmodder(self):
|
def _lastmodder(self):
|
||||||
while True:
|
while True:
|
||||||
@@ -1863,11 +1987,16 @@ class Up2k(object):
|
|||||||
|
|
||||||
# self.log("\n " + repr([ptop, rd, fn]))
|
# self.log("\n " + repr([ptop, rd, fn]))
|
||||||
abspath = os.path.join(ptop, rd, fn)
|
abspath = os.path.join(ptop, rd, fn)
|
||||||
tags = self.mtag.get(abspath)
|
try:
|
||||||
ntags1 = len(tags)
|
tags = self.mtag.get(abspath)
|
||||||
parsers = self._get_parsers(ptop, tags, abspath)
|
ntags1 = len(tags)
|
||||||
if parsers:
|
parsers = self._get_parsers(ptop, tags, abspath)
|
||||||
tags.update(self.mtag.get_bin(parsers, abspath))
|
if parsers:
|
||||||
|
tags.update(self.mtag.get_bin(parsers, abspath))
|
||||||
|
except Exception as ex:
|
||||||
|
msg = "failed to read tags from {}:\n{}"
|
||||||
|
self.log(msg.format(abspath, ex), c=3)
|
||||||
|
continue
|
||||||
|
|
||||||
with self.mutex:
|
with self.mutex:
|
||||||
cur = self.cur[ptop]
|
cur = self.cur[ptop]
|
||||||
|
|||||||
@@ -19,7 +19,7 @@ import subprocess as sp # nosec
|
|||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from collections import Counter
|
from collections import Counter
|
||||||
|
|
||||||
from .__init__ import PY2, WINDOWS, ANYWIN, VT100
|
from .__init__ import PY2, WINDOWS, ANYWIN, VT100, unicode
|
||||||
from .stolen import surrogateescape
|
from .stolen import surrogateescape
|
||||||
|
|
||||||
FAKE_MP = False
|
FAKE_MP = False
|
||||||
@@ -169,7 +169,7 @@ class Cooldown(object):
|
|||||||
return ret
|
return ret
|
||||||
|
|
||||||
|
|
||||||
class Unrecv(object):
|
class _Unrecv(object):
|
||||||
"""
|
"""
|
||||||
undo any number of socket recv ops
|
undo any number of socket recv ops
|
||||||
"""
|
"""
|
||||||
@@ -189,10 +189,117 @@ class Unrecv(object):
|
|||||||
except:
|
except:
|
||||||
return b""
|
return b""
|
||||||
|
|
||||||
|
def recv_ex(self, nbytes):
|
||||||
|
"""read an exact number of bytes"""
|
||||||
|
ret = self.recv(nbytes)
|
||||||
|
while ret and len(ret) < nbytes:
|
||||||
|
buf = self.recv(nbytes - len(ret))
|
||||||
|
if not buf:
|
||||||
|
break
|
||||||
|
|
||||||
|
ret += buf
|
||||||
|
|
||||||
|
return ret
|
||||||
|
|
||||||
def unrecv(self, buf):
|
def unrecv(self, buf):
|
||||||
self.buf = buf + self.buf
|
self.buf = buf + self.buf
|
||||||
|
|
||||||
|
|
||||||
|
class _LUnrecv(object):
|
||||||
|
"""
|
||||||
|
with expensive debug logging
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, s):
|
||||||
|
self.s = s
|
||||||
|
self.buf = b""
|
||||||
|
|
||||||
|
def recv(self, nbytes):
|
||||||
|
if self.buf:
|
||||||
|
ret = self.buf[:nbytes]
|
||||||
|
self.buf = self.buf[nbytes:]
|
||||||
|
m = "\033[0;7mur:pop:\033[0;1;32m {}\n\033[0;7mur:rem:\033[0;1;35m {}\033[0m\n"
|
||||||
|
print(m.format(ret, self.buf), end="")
|
||||||
|
return ret
|
||||||
|
|
||||||
|
try:
|
||||||
|
ret = self.s.recv(nbytes)
|
||||||
|
m = "\033[0;7mur:recv\033[0;1;33m {}\033[0m\n"
|
||||||
|
print(m.format(ret), end="")
|
||||||
|
return ret
|
||||||
|
except:
|
||||||
|
return b""
|
||||||
|
|
||||||
|
def recv_ex(self, nbytes):
|
||||||
|
"""read an exact number of bytes"""
|
||||||
|
ret = self.recv(nbytes)
|
||||||
|
while ret and len(ret) < nbytes:
|
||||||
|
buf = self.recv(nbytes - len(ret))
|
||||||
|
if not buf:
|
||||||
|
break
|
||||||
|
|
||||||
|
ret += buf
|
||||||
|
|
||||||
|
return ret
|
||||||
|
|
||||||
|
def unrecv(self, buf):
|
||||||
|
self.buf = buf + self.buf
|
||||||
|
m = "\033[0;7mur:push\033[0;1;31m {}\n\033[0;7mur:rem:\033[0;1;35m {}\033[0m\n"
|
||||||
|
print(m.format(buf, self.buf), end="")
|
||||||
|
|
||||||
|
|
||||||
|
Unrecv = _Unrecv
|
||||||
|
|
||||||
|
|
||||||
|
class FHC(object):
|
||||||
|
class CE(object):
|
||||||
|
def __init__(self, fh):
|
||||||
|
self.ts = 0
|
||||||
|
self.fhs = [fh]
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
self.cache = {}
|
||||||
|
|
||||||
|
def close(self, path):
|
||||||
|
try:
|
||||||
|
ce = self.cache[path]
|
||||||
|
except:
|
||||||
|
return
|
||||||
|
|
||||||
|
for fh in ce.fhs:
|
||||||
|
fh.close()
|
||||||
|
|
||||||
|
del self.cache[path]
|
||||||
|
|
||||||
|
def clean(self):
|
||||||
|
if not self.cache:
|
||||||
|
return
|
||||||
|
|
||||||
|
keep = {}
|
||||||
|
now = time.time()
|
||||||
|
for path, ce in self.cache.items():
|
||||||
|
if now < ce.ts + 5:
|
||||||
|
keep[path] = ce
|
||||||
|
else:
|
||||||
|
for fh in ce.fhs:
|
||||||
|
fh.close()
|
||||||
|
|
||||||
|
self.cache = keep
|
||||||
|
|
||||||
|
def pop(self, path):
|
||||||
|
return self.cache[path].fhs.pop()
|
||||||
|
|
||||||
|
def put(self, path, fh):
|
||||||
|
try:
|
||||||
|
ce = self.cache[path]
|
||||||
|
ce.fhs.append(fh)
|
||||||
|
except:
|
||||||
|
ce = self.CE(fh)
|
||||||
|
self.cache[path] = ce
|
||||||
|
|
||||||
|
ce.ts = time.time()
|
||||||
|
|
||||||
|
|
||||||
class ProgressPrinter(threading.Thread):
|
class ProgressPrinter(threading.Thread):
|
||||||
"""
|
"""
|
||||||
periodically print progress info without linefeeds
|
periodically print progress info without linefeeds
|
||||||
@@ -317,7 +424,7 @@ def stackmon(fp, ival, suffix):
|
|||||||
|
|
||||||
|
|
||||||
def start_log_thrs(logger, ival, nid):
|
def start_log_thrs(logger, ival, nid):
|
||||||
ival = int(ival)
|
ival = float(ival)
|
||||||
tname = lname = "log-thrs"
|
tname = lname = "log-thrs"
|
||||||
if nid:
|
if nid:
|
||||||
tname = "logthr-n{}-i{:x}".format(nid, os.getpid())
|
tname = "logthr-n{}-i{:x}".format(nid, os.getpid())
|
||||||
@@ -352,6 +459,10 @@ def log_thrs(log, ival, name):
|
|||||||
def vol_san(vols, txt):
|
def vol_san(vols, txt):
|
||||||
for vol in vols:
|
for vol in vols:
|
||||||
txt = txt.replace(vol.realpath.encode("utf-8"), vol.vpath.encode("utf-8"))
|
txt = txt.replace(vol.realpath.encode("utf-8"), vol.vpath.encode("utf-8"))
|
||||||
|
txt = txt.replace(
|
||||||
|
vol.realpath.encode("utf-8").replace(b"\\", b"\\\\"),
|
||||||
|
vol.vpath.encode("utf-8"),
|
||||||
|
)
|
||||||
|
|
||||||
return txt
|
return txt
|
||||||
|
|
||||||
@@ -450,8 +561,8 @@ class MultipartParser(object):
|
|||||||
self.log = log_func
|
self.log = log_func
|
||||||
self.headers = http_headers
|
self.headers = http_headers
|
||||||
|
|
||||||
self.re_ctype = re.compile(r"^content-type: *([^;]+)", re.IGNORECASE)
|
self.re_ctype = re.compile(r"^content-type: *([^; ]+)", re.IGNORECASE)
|
||||||
self.re_cdisp = re.compile(r"^content-disposition: *([^;]+)", re.IGNORECASE)
|
self.re_cdisp = re.compile(r"^content-disposition: *([^; ]+)", re.IGNORECASE)
|
||||||
self.re_cdisp_field = re.compile(
|
self.re_cdisp_field = re.compile(
|
||||||
r'^content-disposition:(?: *|.*; *)name="([^"]+)"', re.IGNORECASE
|
r'^content-disposition:(?: *|.*; *)name="([^"]+)"', re.IGNORECASE
|
||||||
)
|
)
|
||||||
@@ -587,19 +698,21 @@ class MultipartParser(object):
|
|||||||
yields [fieldname, unsanitized_filename, fieldvalue]
|
yields [fieldname, unsanitized_filename, fieldvalue]
|
||||||
where fieldvalue yields chunks of data
|
where fieldvalue yields chunks of data
|
||||||
"""
|
"""
|
||||||
while True:
|
run = True
|
||||||
|
while run:
|
||||||
fieldname, filename = self._read_header()
|
fieldname, filename = self._read_header()
|
||||||
yield [fieldname, filename, self._read_data()]
|
yield [fieldname, filename, self._read_data()]
|
||||||
|
|
||||||
tail = self.sr.recv(2)
|
tail = self.sr.recv_ex(2)
|
||||||
|
|
||||||
if tail == b"--":
|
if tail == b"--":
|
||||||
# EOF indicated by this immediately after final boundary
|
# EOF indicated by this immediately after final boundary
|
||||||
self.sr.recv(2)
|
tail = self.sr.recv_ex(2)
|
||||||
return
|
run = False
|
||||||
|
|
||||||
if tail != b"\r\n":
|
if tail != b"\r\n":
|
||||||
raise Pebkac(400, "protocol error after field value")
|
m = "protocol error after field value: want b'\\r\\n', got {!r}"
|
||||||
|
raise Pebkac(400, m.format(tail))
|
||||||
|
|
||||||
def _read_value(self, iterator, max_len):
|
def _read_value(self, iterator, max_len):
|
||||||
ret = b""
|
ret = b""
|
||||||
@@ -648,7 +761,7 @@ class MultipartParser(object):
|
|||||||
def get_boundary(headers):
|
def get_boundary(headers):
|
||||||
# boundaries contain a-z A-Z 0-9 ' ( ) + _ , - . / : = ?
|
# boundaries contain a-z A-Z 0-9 ' ( ) + _ , - . / : = ?
|
||||||
# (whitespace allowed except as the last char)
|
# (whitespace allowed except as the last char)
|
||||||
ptn = r"^multipart/form-data; *(.*; *)?boundary=([^;]+)"
|
ptn = r"^multipart/form-data *; *(.*; *)?boundary=([^;]+)"
|
||||||
ct = headers["content-type"]
|
ct = headers["content-type"]
|
||||||
m = re.match(ptn, ct, re.IGNORECASE)
|
m = re.match(ptn, ct, re.IGNORECASE)
|
||||||
if not m:
|
if not m:
|
||||||
@@ -685,6 +798,14 @@ def read_header(sr):
|
|||||||
return ret[:ofs].decode("utf-8", "surrogateescape").lstrip("\r\n").split("\r\n")
|
return ret[:ofs].decode("utf-8", "surrogateescape").lstrip("\r\n").split("\r\n")
|
||||||
|
|
||||||
|
|
||||||
|
def gen_filekey(salt, fspath, fsize, inode):
|
||||||
|
return base64.urlsafe_b64encode(
|
||||||
|
hashlib.sha512(
|
||||||
|
"{} {} {} {}".format(salt, fspath, fsize, inode).encode("utf-8", "replace")
|
||||||
|
).digest()
|
||||||
|
).decode("ascii")
|
||||||
|
|
||||||
|
|
||||||
def humansize(sz, terse=False):
|
def humansize(sz, terse=False):
|
||||||
for unit in ["B", "KiB", "MiB", "GiB", "TiB"]:
|
for unit in ["B", "KiB", "MiB", "GiB", "TiB"]:
|
||||||
if sz < 1024:
|
if sz < 1024:
|
||||||
@@ -985,8 +1106,12 @@ def read_socket_chunked(sr, log=None):
|
|||||||
raise Pebkac(400, err)
|
raise Pebkac(400, err)
|
||||||
|
|
||||||
if chunklen == 0:
|
if chunklen == 0:
|
||||||
sr.recv(2) # \r\n after final chunk
|
x = sr.recv_ex(2)
|
||||||
return
|
if x == b"\r\n":
|
||||||
|
return
|
||||||
|
|
||||||
|
m = "protocol error after final chunk: want b'\\r\\n', got {!r}"
|
||||||
|
raise Pebkac(400, m.format(x))
|
||||||
|
|
||||||
if log:
|
if log:
|
||||||
log("receiving {} byte chunk".format(chunklen))
|
log("receiving {} byte chunk".format(chunklen))
|
||||||
@@ -994,7 +1119,10 @@ def read_socket_chunked(sr, log=None):
|
|||||||
for chunk in read_socket(sr, chunklen):
|
for chunk in read_socket(sr, chunklen):
|
||||||
yield chunk
|
yield chunk
|
||||||
|
|
||||||
sr.recv(2) # \r\n after each chunk too
|
x = sr.recv_ex(2)
|
||||||
|
if x != b"\r\n":
|
||||||
|
m = "protocol error in chunk separator: want b'\\r\\n', got {!r}"
|
||||||
|
raise Pebkac(400, m.format(x))
|
||||||
|
|
||||||
|
|
||||||
def yieldfile(fn):
|
def yieldfile(fn):
|
||||||
@@ -1089,7 +1217,7 @@ def statdir(logger, scandir, lstat, top):
|
|||||||
logger(src, "{} @ {}".format(repr(ex), top), 1)
|
logger(src, "{} @ {}".format(repr(ex), top), 1)
|
||||||
|
|
||||||
|
|
||||||
def rmdirs(logger, scandir, lstat, top):
|
def rmdirs(logger, scandir, lstat, top, depth):
|
||||||
if not os.path.exists(fsenc(top)) or not os.path.isdir(fsenc(top)):
|
if not os.path.exists(fsenc(top)) or not os.path.isdir(fsenc(top)):
|
||||||
top = os.path.dirname(top)
|
top = os.path.dirname(top)
|
||||||
|
|
||||||
@@ -1099,15 +1227,16 @@ def rmdirs(logger, scandir, lstat, top):
|
|||||||
ok = []
|
ok = []
|
||||||
ng = []
|
ng = []
|
||||||
for d in dirs[::-1]:
|
for d in dirs[::-1]:
|
||||||
a, b = rmdirs(logger, scandir, lstat, d)
|
a, b = rmdirs(logger, scandir, lstat, d, depth + 1)
|
||||||
ok += a
|
ok += a
|
||||||
ng += b
|
ng += b
|
||||||
|
|
||||||
try:
|
if depth:
|
||||||
os.rmdir(fsenc(top))
|
try:
|
||||||
ok.append(top)
|
os.rmdir(fsenc(top))
|
||||||
except:
|
ok.append(top)
|
||||||
ng.append(top)
|
except:
|
||||||
|
ng.append(top)
|
||||||
|
|
||||||
return ok, ng
|
return ok, ng
|
||||||
|
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
:root {
|
:root {
|
||||||
--grid-sz: 10em;
|
--grid-sz: 10em;
|
||||||
--grid-ln: 3;
|
--grid-ln: 3;
|
||||||
|
--nav-sz: 16em;
|
||||||
}
|
}
|
||||||
* {
|
* {
|
||||||
line-height: 1.2em;
|
line-height: 1.2em;
|
||||||
@@ -164,6 +165,7 @@ a, #files tbody div a:last-child {
|
|||||||
.logue {
|
.logue {
|
||||||
padding: .2em 1.5em;
|
padding: .2em 1.5em;
|
||||||
}
|
}
|
||||||
|
.logue.hidden,
|
||||||
.logue:empty {
|
.logue:empty {
|
||||||
display: none;
|
display: none;
|
||||||
}
|
}
|
||||||
@@ -173,6 +175,9 @@ a, #files tbody div a:last-child {
|
|||||||
#epi.logue {
|
#epi.logue {
|
||||||
margin: .8em 0;
|
margin: .8em 0;
|
||||||
}
|
}
|
||||||
|
.mdo {
|
||||||
|
max-width: 52em;
|
||||||
|
}
|
||||||
.mdo,
|
.mdo,
|
||||||
.mdo * {
|
.mdo * {
|
||||||
line-height: 1.4em;
|
line-height: 1.4em;
|
||||||
@@ -568,7 +573,7 @@ html.light #wfm a:not(.en) {
|
|||||||
padding: .5em;
|
padding: .5em;
|
||||||
border-radius: 0 1em 1em 0;
|
border-radius: 0 1em 1em 0;
|
||||||
border-width: .15em .3em .3em 0;
|
border-width: .15em .3em .3em 0;
|
||||||
max-width: 40em;
|
max-width: 41em;
|
||||||
}
|
}
|
||||||
.opbox input {
|
.opbox input {
|
||||||
margin: .5em;
|
margin: .5em;
|
||||||
@@ -753,6 +758,9 @@ input.eq_gain {
|
|||||||
color: #400;
|
color: #400;
|
||||||
text-shadow: none;
|
text-shadow: none;
|
||||||
}
|
}
|
||||||
|
.tgl.btn.on:hover {
|
||||||
|
background: #fe8;
|
||||||
|
}
|
||||||
#detree {
|
#detree {
|
||||||
padding: .3em .5em;
|
padding: .3em .5em;
|
||||||
font-size: 1.5em;
|
font-size: 1.5em;
|
||||||
@@ -788,6 +796,19 @@ input.eq_gain {
|
|||||||
width: calc(100% - 2em);
|
width: calc(100% - 2em);
|
||||||
line-height: 1em;
|
line-height: 1em;
|
||||||
}
|
}
|
||||||
|
#tree.nowrap #treeul li {
|
||||||
|
min-height: 1.4em;
|
||||||
|
white-space: nowrap;
|
||||||
|
}
|
||||||
|
#tree.nowrap #treeul a+a:hover {
|
||||||
|
background: rgba(34, 34, 34, 0.67);
|
||||||
|
min-width: calc(var(--nav-sz) - 2em);
|
||||||
|
width: auto;
|
||||||
|
}
|
||||||
|
html.light #tree.nowrap #treeul a+a:hover {
|
||||||
|
background: rgba(255, 255, 255, 0.67);
|
||||||
|
color: #000;
|
||||||
|
}
|
||||||
#treeul a+a:hover {
|
#treeul a+a:hover {
|
||||||
background: #222;
|
background: #222;
|
||||||
color: #fff;
|
color: #fff;
|
||||||
@@ -879,6 +900,9 @@ input.eq_gain {
|
|||||||
#u2turbo.on+#u2tdate {
|
#u2turbo.on+#u2tdate {
|
||||||
opacity: 1;
|
opacity: 1;
|
||||||
}
|
}
|
||||||
|
#wraptree.on+#hovertree {
|
||||||
|
display: none;
|
||||||
|
}
|
||||||
#ghead {
|
#ghead {
|
||||||
background: #3c3c3c;
|
background: #3c3c3c;
|
||||||
border: 1px solid #444;
|
border: 1px solid #444;
|
||||||
@@ -1113,6 +1137,9 @@ html.light .tgl.btn.on {
|
|||||||
background: #4a0;
|
background: #4a0;
|
||||||
color: #fff;
|
color: #fff;
|
||||||
}
|
}
|
||||||
|
html.light .tgl.btn.on:hover {
|
||||||
|
background: #5c0;
|
||||||
|
}
|
||||||
html.light #srv_info {
|
html.light #srv_info {
|
||||||
color: #c83;
|
color: #c83;
|
||||||
background: #eee;
|
background: #eee;
|
||||||
@@ -1138,7 +1165,7 @@ html.light #treeul a.hl:hover {
|
|||||||
html.light #tree li {
|
html.light #tree li {
|
||||||
border-color: #f7f7f7 #fff #ddd #fff;
|
border-color: #f7f7f7 #fff #ddd #fff;
|
||||||
}
|
}
|
||||||
html.light #tree a:hover {
|
html.light #treeul a:hover {
|
||||||
background: #fff;
|
background: #fff;
|
||||||
}
|
}
|
||||||
html.light #tree ul {
|
html.light #tree ul {
|
||||||
@@ -1576,7 +1603,7 @@ html.light #bbox-overlay figcaption a {
|
|||||||
border-radius: .5em;
|
border-radius: .5em;
|
||||||
border-width: 1vw;
|
border-width: 1vw;
|
||||||
color: #fff;
|
color: #fff;
|
||||||
transition: all 0.2s;
|
transition: all 0.12s;
|
||||||
}
|
}
|
||||||
#drops .dropdesc.hl.ok {
|
#drops .dropdesc.hl.ok {
|
||||||
border-color: #fff;
|
border-color: #fff;
|
||||||
@@ -1597,6 +1624,16 @@ html.light #bbox-overlay figcaption a {
|
|||||||
vertical-align: middle;
|
vertical-align: middle;
|
||||||
text-align: center;
|
text-align: center;
|
||||||
}
|
}
|
||||||
|
#drops .dropdesc>div>div {
|
||||||
|
position: absolute;
|
||||||
|
top: 40%;
|
||||||
|
top: calc(50% - .5em);
|
||||||
|
left: -.8em;
|
||||||
|
}
|
||||||
|
#drops .dropdesc>div>div+div {
|
||||||
|
left: auto;
|
||||||
|
right: -.8em;
|
||||||
|
}
|
||||||
#drops .dropzone {
|
#drops .dropzone {
|
||||||
z-index: 80386;
|
z-index: 80386;
|
||||||
height: 50%;
|
height: 50%;
|
||||||
@@ -1648,6 +1685,9 @@ html.light #bbox-overlay figcaption a {
|
|||||||
padding: .5em;
|
padding: .5em;
|
||||||
font-size: .9em;
|
font-size: .9em;
|
||||||
}
|
}
|
||||||
|
html.light #u2err.err {
|
||||||
|
color: #f07;
|
||||||
|
}
|
||||||
#u2btn {
|
#u2btn {
|
||||||
color: #eee;
|
color: #eee;
|
||||||
background: #555;
|
background: #555;
|
||||||
@@ -1935,7 +1975,8 @@ html.light #u2foot .warn span {
|
|||||||
background: #900;
|
background: #900;
|
||||||
border-color: #d06;
|
border-color: #d06;
|
||||||
}
|
}
|
||||||
#u2tab a>span {
|
#u2tab a>span,
|
||||||
|
#unpost a>span {
|
||||||
font-weight: bold;
|
font-weight: bold;
|
||||||
font-style: italic;
|
font-style: italic;
|
||||||
color: #fff;
|
color: #fff;
|
||||||
|
|||||||
@@ -135,6 +135,8 @@
|
|||||||
have_unpost = {{ have_unpost|tojson }},
|
have_unpost = {{ have_unpost|tojson }},
|
||||||
have_zip = {{ have_zip|tojson }},
|
have_zip = {{ have_zip|tojson }},
|
||||||
readme = {{ readme|tojson }};
|
readme = {{ readme|tojson }};
|
||||||
|
|
||||||
|
document.documentElement.setAttribute("class", localStorage.lightmode == 1 ? "light" : "dark");
|
||||||
</script>
|
</script>
|
||||||
<script src="/.cpr/util.js?_={{ ts }}"></script>
|
<script src="/.cpr/util.js?_={{ ts }}"></script>
|
||||||
<script src="/.cpr/browser.js?_={{ ts }}"></script>
|
<script src="/.cpr/browser.js?_={{ ts }}"></script>
|
||||||
|
|||||||
@@ -9,7 +9,7 @@ function dbg(msg) {
|
|||||||
|
|
||||||
// toolbar
|
// toolbar
|
||||||
ebi('ops').innerHTML = (
|
ebi('ops').innerHTML = (
|
||||||
'<a href="#" data-dest="" tt="close submenu">---</a>\n' +
|
'<a href="#" data-dest="" tt="close submenu">--</a>\n' +
|
||||||
(have_up2k_idx ? (
|
(have_up2k_idx ? (
|
||||||
'<a href="#" data-perm="read" data-dest="search" tt="search for files by attributes, path/name, music tags, or any combination of those.$N$N<code>foo bar</code> = must contain both foo and bar,$N<code>foo -bar</code> = must contain foo but not bar,$N<code>^yana .opus$</code> = must start with yana and have the opus extension">🔎</a>\n' +
|
'<a href="#" data-perm="read" data-dest="search" tt="search for files by attributes, path/name, music tags, or any combination of those.$N$N<code>foo bar</code> = must contain both foo and bar,$N<code>foo -bar</code> = must contain foo but not bar,$N<code>^yana .opus$</code> = must start with yana and have the opus extension">🔎</a>\n' +
|
||||||
(have_del && have_unpost ? '<a href="#" data-dest="unpost" tt="unpost: delete your recent uploads">🧯</a>\n' : '') +
|
(have_del && have_unpost ? '<a href="#" data-dest="unpost" tt="unpost: delete your recent uploads">🧯</a>\n' : '') +
|
||||||
@@ -70,10 +70,6 @@ ebi('op_up2k').innerHTML = (
|
|||||||
' <input type="checkbox" id="ask_up" />\n' +
|
' <input type="checkbox" id="ask_up" />\n' +
|
||||||
' <label for="ask_up" tt="ask for confirmation before upload starts">💭</label>\n' +
|
' <label for="ask_up" tt="ask for confirmation before upload starts">💭</label>\n' +
|
||||||
' </td>\n' +
|
' </td>\n' +
|
||||||
' <td rowspan="2">\n' +
|
|
||||||
' <input type="checkbox" id="flag_en" />\n' +
|
|
||||||
' <label for="flag_en" tt="ensure only one tab is uploading at a time $N (other tabs must have this enabled too)">💤</label>\n' +
|
|
||||||
' </td>\n' +
|
|
||||||
(have_up2k_idx ? (
|
(have_up2k_idx ? (
|
||||||
' <td data-perm="read" rowspan="2">\n' +
|
' <td data-perm="read" rowspan="2">\n' +
|
||||||
' <input type="checkbox" id="fsearch" />\n' +
|
' <input type="checkbox" id="fsearch" />\n' +
|
||||||
@@ -137,8 +133,8 @@ ebi('op_up2k').innerHTML = (
|
|||||||
var o = mknod('div');
|
var o = mknod('div');
|
||||||
o.innerHTML = (
|
o.innerHTML = (
|
||||||
'<div id="drops">\n' +
|
'<div id="drops">\n' +
|
||||||
' <div class="dropdesc" id="up_zd"><div>🚀 Upload<br /><span></span></div></div>\n' +
|
' <div class="dropdesc" id="up_zd"><div>🚀 Upload<br /><span></span><div>🚀</div><div>🚀</div></div></div>\n' +
|
||||||
' <div class="dropdesc" id="srch_zd"><div>🔎 Search<br /><span></span></div></div>\n' +
|
' <div class="dropdesc" id="srch_zd"><div>🔎 Search<br /><span></span><div>🔎</div><div>🔎</div></div></div>\n' +
|
||||||
' <div class="dropzone" id="up_dz" v="up_zd"></div>\n' +
|
' <div class="dropzone" id="up_dz" v="up_zd"></div>\n' +
|
||||||
' <div class="dropzone" id="srch_dz" v="srch_zd"></div>\n' +
|
' <div class="dropzone" id="srch_dz" v="srch_zd"></div>\n' +
|
||||||
'</div>'
|
'</div>'
|
||||||
@@ -168,6 +164,8 @@ ebi('op_cfg').innerHTML = (
|
|||||||
' <div>\n' +
|
' <div>\n' +
|
||||||
' <a id="u2turbo" class="tgl btn ttb" href="#" tt="the yolo button, you probably DO NOT want to enable this:$N$Nuse this if you were uploading a huge amount of files and had to restart for some reason, and want to continue the upload ASAP$N$Nthis replaces the hash-check with a simple <em>"does this have the same filesize on the server?"</em> so if the file contents are different it will NOT be uploaded$N$Nyou should turn this off when the upload is done, and then "upload" the same files again to let the client verify them">turbo</a>\n' +
|
' <a id="u2turbo" class="tgl btn ttb" href="#" tt="the yolo button, you probably DO NOT want to enable this:$N$Nuse this if you were uploading a huge amount of files and had to restart for some reason, and want to continue the upload ASAP$N$Nthis replaces the hash-check with a simple <em>"does this have the same filesize on the server?"</em> so if the file contents are different it will NOT be uploaded$N$Nyou should turn this off when the upload is done, and then "upload" the same files again to let the client verify them">turbo</a>\n' +
|
||||||
' <a id="u2tdate" class="tgl btn ttb" href="#" tt="has no effect unless the turbo button is enabled$N$Nreduces the yolo factor by a tiny amount; checks whether the file timestamps on the server matches yours$N$Nshould <em>theoretically</em> catch most unfinished/corrupted uploads, but is not a substitute for doing a verification pass with turbo disabled afterwards">date-chk</a>\n' +
|
' <a id="u2tdate" class="tgl btn ttb" href="#" tt="has no effect unless the turbo button is enabled$N$Nreduces the yolo factor by a tiny amount; checks whether the file timestamps on the server matches yours$N$Nshould <em>theoretically</em> catch most unfinished/corrupted uploads, but is not a substitute for doing a verification pass with turbo disabled afterwards">date-chk</a>\n' +
|
||||||
|
' <a id="flag_en" class="tgl btn" href="#" tt="ensure only one tab is uploading at a time $N (other tabs must have this enabled too)">💤</a>\n' +
|
||||||
|
' </td>\n' +
|
||||||
' </div>\n' +
|
' </div>\n' +
|
||||||
'</div>\n' +
|
'</div>\n' +
|
||||||
'<div><h3>key notation</h3><div id="key_notation"></div></div>\n' +
|
'<div><h3>key notation</h3><div id="key_notation"></div></div>\n' +
|
||||||
@@ -179,10 +177,12 @@ ebi('op_cfg').innerHTML = (
|
|||||||
ebi('tree').innerHTML = (
|
ebi('tree').innerHTML = (
|
||||||
'<div id="treeh">\n' +
|
'<div id="treeh">\n' +
|
||||||
' <a href="#" id="detree" tt="show breadcrumbs$NHotkey: B">🍞...</a>\n' +
|
' <a href="#" id="detree" tt="show breadcrumbs$NHotkey: B">🍞...</a>\n' +
|
||||||
' <a href="#" class="btn" step="2" id="twobytwo" tt="Hotkey: A">+</a>\n' +
|
' <a href="#" class="btn" step="2" id="twobytwo" tt="Hotkey: D">+</a>\n' +
|
||||||
' <a href="#" class="btn" step="-2" id="twig" tt="Hotkey: D">–</a>\n' +
|
' <a href="#" class="btn" step="-2" id="twig" tt="Hotkey: A">–</a>\n' +
|
||||||
|
' <a href="#" class="btn" id="visdir" tt="scroll to selected folder">🎯</a>\n' +
|
||||||
' <a href="#" class="tgl btn" id="dyntree" tt="autogrow as tree expands">a</a>\n' +
|
' <a href="#" class="tgl btn" id="dyntree" tt="autogrow as tree expands">a</a>\n' +
|
||||||
' <a href="#" class="btn" id="visdir" tt="scroll to selected folder">v</a>\n' +
|
' <a href="#" class="tgl btn" id="wraptree" tt="word wrap">↵</a>\n' +
|
||||||
|
' <a href="#" class="tgl btn" id="hovertree" tt="reveal overflowing lines on hover$N( breaks scrolling unless mouse $N cursor is in the left gutter )">👀</a>\n' +
|
||||||
'</div>\n' +
|
'</div>\n' +
|
||||||
'<ul id="treeul"></ul>\n' +
|
'<ul id="treeul"></ul>\n' +
|
||||||
'<div id="thx_ff"> </div>'
|
'<div id="thx_ff"> </div>'
|
||||||
@@ -199,6 +199,9 @@ ebi('tree').innerHTML = (
|
|||||||
|
|
||||||
function opclick(e) {
|
function opclick(e) {
|
||||||
var dest = this.getAttribute('data-dest');
|
var dest = this.getAttribute('data-dest');
|
||||||
|
if (QS('#op_' + dest + '.act'))
|
||||||
|
dest = '';
|
||||||
|
|
||||||
swrite('opmode', dest || null);
|
swrite('opmode', dest || null);
|
||||||
if (ctrl(e))
|
if (ctrl(e))
|
||||||
return;
|
return;
|
||||||
@@ -224,9 +227,22 @@ function goto(dest) {
|
|||||||
clmod(obj[a], 'act');
|
clmod(obj[a], 'act');
|
||||||
|
|
||||||
if (dest) {
|
if (dest) {
|
||||||
var ui = ebi('op_' + dest);
|
var ui = ebi('op_' + dest),
|
||||||
|
lnk = QS('#ops>a[data-dest=' + dest + ']'),
|
||||||
|
nps = lnk.getAttribute('data-perm');
|
||||||
|
|
||||||
|
nps = nps && nps.length ? nps.split(' ') : [];
|
||||||
|
|
||||||
|
if (perms.length)
|
||||||
|
for (var a = 0; a < nps.length; a++)
|
||||||
|
if (!has(perms, nps[a]))
|
||||||
|
return;
|
||||||
|
|
||||||
|
if (!has(perms, 'read') && !has(perms, 'write') && (dest == 'up2k'))
|
||||||
|
return;
|
||||||
|
|
||||||
clmod(ui, 'act', true);
|
clmod(ui, 'act', true);
|
||||||
QS('#ops>a[data-dest=' + dest + ']').className += " act";
|
lnk.className += " act";
|
||||||
|
|
||||||
var fn = window['goto_' + dest];
|
var fn = window['goto_' + dest];
|
||||||
if (fn)
|
if (fn)
|
||||||
@@ -275,24 +291,13 @@ var mpl = (function () {
|
|||||||
|
|
||||||
var r = {
|
var r = {
|
||||||
"pb_mode": sread('pb_mode') || 'loop-folder',
|
"pb_mode": sread('pb_mode') || 'loop-folder',
|
||||||
"preload": bcfg_get('au_preload', true),
|
|
||||||
"clip": bcfg_get('au_npclip', false),
|
|
||||||
"os_ctl": bcfg_get('au_os_ctl', have_mctl) && have_mctl,
|
"os_ctl": bcfg_get('au_os_ctl', have_mctl) && have_mctl,
|
||||||
"osd_cv": bcfg_get('au_osd_cv', true),
|
|
||||||
};
|
|
||||||
|
|
||||||
ebi('au_preload').onclick = function (e) {
|
|
||||||
ev(e);
|
|
||||||
r.preload = !r.preload;
|
|
||||||
bcfg_set('au_preload', r.preload);
|
|
||||||
};
|
|
||||||
|
|
||||||
ebi('au_npclip').onclick = function (e) {
|
|
||||||
ev(e);
|
|
||||||
r.clip = !r.clip;
|
|
||||||
bcfg_set('au_npclip', r.clip);
|
|
||||||
clmod(ebi('wtoggle'), 'np', r.clip && mp.au);
|
|
||||||
};
|
};
|
||||||
|
bcfg_bind(r, 'preload', 'au_preload', true);
|
||||||
|
bcfg_bind(r, 'osd_cv', 'au_osd_cv', true);
|
||||||
|
bcfg_bind(r, 'clip', 'au_npclip', false, function (v) {
|
||||||
|
clmod(ebi('wtoggle'), 'np', v && mp.au);
|
||||||
|
});
|
||||||
|
|
||||||
ebi('au_os_ctl').onclick = function (e) {
|
ebi('au_os_ctl').onclick = function (e) {
|
||||||
ev(e);
|
ev(e);
|
||||||
@@ -302,12 +307,6 @@ var mpl = (function () {
|
|||||||
toast.err(5, 'need firefox 82+ or chrome 73+\n(or iOS 15+ supposedly)');
|
toast.err(5, 'need firefox 82+ or chrome 73+\n(or iOS 15+ supposedly)');
|
||||||
};
|
};
|
||||||
|
|
||||||
ebi('au_osd_cv').onclick = function (e) {
|
|
||||||
ev(e);
|
|
||||||
r.osd_cv = !r.osd_cv;
|
|
||||||
bcfg_set('au_osd_cv', r.osd_cv);
|
|
||||||
};
|
|
||||||
|
|
||||||
function draw_pb_mode() {
|
function draw_pb_mode() {
|
||||||
var btns = QSA('#pb_mode>a');
|
var btns = QSA('#pb_mode>a');
|
||||||
for (var a = 0, aa = btns.length; a < aa; a++) {
|
for (var a = 0, aa = btns.length; a < aa; a++) {
|
||||||
@@ -367,7 +366,7 @@ var mpl = (function () {
|
|||||||
|
|
||||||
for (var a = 0, aa = files.length; a < aa; a++) {
|
for (var a = 0, aa = files.length; a < aa; a++) {
|
||||||
if (/^(cover|folder)\.(jpe?g|png|gif)$/.test(files[a].textContent)) {
|
if (/^(cover|folder)\.(jpe?g|png|gif)$/.test(files[a].textContent)) {
|
||||||
cover = files[a].getAttribute('href');
|
cover = noq_href(files[a]);
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -426,7 +425,7 @@ function MPlayer() {
|
|||||||
link = tds[1].getElementsByTagName('a');
|
link = tds[1].getElementsByTagName('a');
|
||||||
|
|
||||||
link = link[link.length - 1];
|
link = link[link.length - 1];
|
||||||
var url = link.getAttribute('href'),
|
var url = noq_href(link),
|
||||||
m = re_audio.exec(url);
|
m = re_audio.exec(url);
|
||||||
|
|
||||||
if (m) {
|
if (m) {
|
||||||
@@ -732,6 +731,12 @@ var pbar = (function () {
|
|||||||
for (var p = 1, mins = adur / 60; p <= mins; p++)
|
for (var p = 1, mins = adur / 60; p <= mins; p++)
|
||||||
pctx.fillRect(Math.floor(sm * p * 60), 0, 2, pc.h);
|
pctx.fillRect(Math.floor(sm * p * 60), 0, 2, pc.h);
|
||||||
|
|
||||||
|
pctx.font = '.5em sans-serif';
|
||||||
|
pctx.fillStyle = light ? 'rgba(0,64,0,0.9)' : 'rgba(192,255,96,1)';
|
||||||
|
for (var p = 1, mins = adur / 60; p <= mins; p++) {
|
||||||
|
pctx.fillText(p, Math.floor(sm * p * 60 + 3), pc.h / 3);
|
||||||
|
}
|
||||||
|
|
||||||
pctx.fillStyle = light ? 'rgba(0,0,0,1)' : 'rgba(255,255,255,1)';
|
pctx.fillStyle = light ? 'rgba(0,0,0,1)' : 'rgba(255,255,255,1)';
|
||||||
for (var p = 1, mins = adur / 600; p <= mins; p++)
|
for (var p = 1, mins = adur / 600; p <= mins; p++)
|
||||||
pctx.fillRect(Math.floor(sm * p * 600), 0, 2, pc.h);
|
pctx.fillRect(Math.floor(sm * p * 600), 0, 2, pc.h);
|
||||||
@@ -1063,6 +1068,33 @@ var audio_eq = (function () {
|
|||||||
"last_au": null
|
"last_au": null
|
||||||
};
|
};
|
||||||
|
|
||||||
|
// some browsers have insane high-frequency boost
|
||||||
|
// (or rather the actual problem is Q but close enough)
|
||||||
|
r.cali = (function () {
|
||||||
|
try {
|
||||||
|
var ac = new AudioContext(),
|
||||||
|
fi = ac.createBiquadFilter(),
|
||||||
|
freqs = new Float32Array(1),
|
||||||
|
mag = new Float32Array(1),
|
||||||
|
phase = new Float32Array(1);
|
||||||
|
|
||||||
|
freqs[0] = 14000;
|
||||||
|
fi.type = 'peaking';
|
||||||
|
fi.frequency.value = 18000;
|
||||||
|
fi.Q.value = 0.8;
|
||||||
|
fi.gain.value = 1;
|
||||||
|
fi.getFrequencyResponse(freqs, mag, phase);
|
||||||
|
|
||||||
|
return mag[0]; // 1.0407 good, 1.0563 bad
|
||||||
|
}
|
||||||
|
catch (ex) {
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
})();
|
||||||
|
console.log('eq cali: ' + r.cali);
|
||||||
|
|
||||||
|
var e1 = r.cali < 1.05;
|
||||||
|
|
||||||
var cfg = [ // hz, q, g
|
var cfg = [ // hz, q, g
|
||||||
[31.25 * 0.88, 0, 1.4], // shelf
|
[31.25 * 0.88, 0, 1.4], // shelf
|
||||||
[31.25 * 1.04, 0.7, 0.96], // peak
|
[31.25 * 1.04, 0.7, 0.96], // peak
|
||||||
@@ -1073,10 +1105,10 @@ var audio_eq = (function () {
|
|||||||
[1000, 0.9, 1.1],
|
[1000, 0.9, 1.1],
|
||||||
[2000, 0.9, 1.105],
|
[2000, 0.9, 1.105],
|
||||||
[4000, 0.88, 1.05],
|
[4000, 0.88, 1.05],
|
||||||
[8000 * 1.006, 0.73, 1.24],
|
[8000 * 1.006, 0.73, e1 ? 1.24 : 1.2],
|
||||||
[16000 * 0.89, 0.7, 1.26], // peak
|
[16000 * 0.89, 0.7, e1 ? 1.26 : 1.2], // peak
|
||||||
[16000 * 1.13, 0.82, 1.09], // peak
|
[16000 * 1.13, 0.82, e1 ? 1.09 : 0.75], // peak
|
||||||
[16000 * 1.205, 0, 1.9] // shelf
|
[16000 * 1.205, 0, e1 ? 1.9 : 1.85] // shelf
|
||||||
];
|
];
|
||||||
|
|
||||||
try {
|
try {
|
||||||
@@ -1253,13 +1285,7 @@ var audio_eq = (function () {
|
|||||||
txt[a].onkeydown = eq_keydown;
|
txt[a].onkeydown = eq_keydown;
|
||||||
}
|
}
|
||||||
|
|
||||||
r.en = bcfg_get('au_eq', false);
|
bcfg_bind(r, 'en', 'au_eq', false, r.apply);
|
||||||
ebi('au_eq').onclick = function (e) {
|
|
||||||
ev(e);
|
|
||||||
r.en = !r.en;
|
|
||||||
bcfg_set('au_eq', r.en);
|
|
||||||
r.apply();
|
|
||||||
};
|
|
||||||
|
|
||||||
r.draw();
|
r.draw();
|
||||||
return r;
|
return r;
|
||||||
@@ -1327,7 +1353,7 @@ function play(tid, is_ev, seek, call_depth) {
|
|||||||
mp.au = mp.au_ogvjs = new OGVPlayer();
|
mp.au = mp.au_ogvjs = new OGVPlayer();
|
||||||
}
|
}
|
||||||
catch (ex) {
|
catch (ex) {
|
||||||
return toast.err(30, 'your browser cannot play ogg/vorbis/opus\n\n' + ex +
|
return toast.err(30, 'your browser cannot play ogg/vorbis/opus\n\n' + basenames(ex) +
|
||||||
'\n\n<a href="#" onclick="new OGVPlayer();">click here</a> for a full crash report');
|
'\n\n<a href="#" onclick="new OGVPlayer();">click here</a> for a full crash report');
|
||||||
}
|
}
|
||||||
attempt_play = is_ev;
|
attempt_play = is_ev;
|
||||||
@@ -1419,7 +1445,7 @@ function play(tid, is_ev, seek, call_depth) {
|
|||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
catch (ex) {
|
catch (ex) {
|
||||||
toast.err(0, esc('playback failed: ' + ex));
|
toast.err(0, esc('playback failed: ' + basenames(ex)));
|
||||||
}
|
}
|
||||||
setclass(oid, 'play');
|
setclass(oid, 'play');
|
||||||
setTimeout(next_song, 500);
|
setTimeout(next_song, 500);
|
||||||
@@ -1453,7 +1479,7 @@ function evau_error(e) {
|
|||||||
|
|
||||||
err += '\n\nFile: «' + uricom_dec(eplaya.src.split('/').slice(-1)[0])[0] + '»';
|
err += '\n\nFile: «' + uricom_dec(eplaya.src.split('/').slice(-1)[0])[0] + '»';
|
||||||
|
|
||||||
toast.warn(15, esc(err + ''));
|
toast.warn(15, esc(basenames(err)));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@@ -1494,6 +1520,7 @@ function autoplay_blocked(seek) {
|
|||||||
go.textContent = 'Play "' + fn + '"';
|
go.textContent = 'Play "' + fn + '"';
|
||||||
go.onclick = function (e) {
|
go.onclick = function (e) {
|
||||||
unblocked(e);
|
unblocked(e);
|
||||||
|
toast.hide();
|
||||||
if (mp.au !== mp.au_ogvjs)
|
if (mp.au !== mp.au_ogvjs)
|
||||||
// chrome 91 may permanently taint on a failed play()
|
// chrome 91 may permanently taint on a failed play()
|
||||||
// depending on win10 settings or something? idk
|
// depending on win10 settings or something? idk
|
||||||
@@ -1538,6 +1565,9 @@ function play_linked() {
|
|||||||
|
|
||||||
|
|
||||||
function sortfiles(nodes) {
|
function sortfiles(nodes) {
|
||||||
|
if (!nodes.length)
|
||||||
|
return nodes;
|
||||||
|
|
||||||
var sopts = jread('fsort', [["href", 1, ""]]);
|
var sopts = jread('fsort', [["href", 1, ""]]);
|
||||||
|
|
||||||
try {
|
try {
|
||||||
@@ -1570,7 +1600,7 @@ function sortfiles(nodes) {
|
|||||||
if ((v + '').indexOf('<a ') === 0)
|
if ((v + '').indexOf('<a ') === 0)
|
||||||
v = v.split('>')[1];
|
v = v.split('>')[1];
|
||||||
else if (name == "href" && v) {
|
else if (name == "href" && v) {
|
||||||
if (v.slice(-1) == '/')
|
if (v.split('?')[0].slice(-1) == '/')
|
||||||
v = '\t' + v;
|
v = '\t' + v;
|
||||||
|
|
||||||
v = uricom_dec(v)[0]
|
v = uricom_dec(v)[0]
|
||||||
@@ -1881,12 +1911,11 @@ var fileman = (function () {
|
|||||||
rn_reset(0);
|
rn_reset(0);
|
||||||
tt.att(rui);
|
tt.att(rui);
|
||||||
|
|
||||||
var adv = bcfg_get('rn_adv', false),
|
|
||||||
cs = bcfg_get('rn_case', false);
|
|
||||||
|
|
||||||
function sadv() {
|
function sadv() {
|
||||||
ebi('rn_vadv').style.display = ebi('rn_case').style.display = adv ? '' : 'none';
|
ebi('rn_vadv').style.display = ebi('rn_case').style.display = r.adv ? '' : 'none';
|
||||||
}
|
}
|
||||||
|
bcfg_bind(r, 'adv', 'rn_adv', false, sadv);
|
||||||
|
bcfg_bind(r, 'cs', 'rn_case', false);
|
||||||
sadv();
|
sadv();
|
||||||
|
|
||||||
function rn_ok(n, ok) {
|
function rn_ok(n, ok) {
|
||||||
@@ -1906,17 +1935,6 @@ var fileman = (function () {
|
|||||||
|
|
||||||
ebi('rn_cancel').onclick = rn_cancel;
|
ebi('rn_cancel').onclick = rn_cancel;
|
||||||
ebi('rn_apply').onclick = rn_apply;
|
ebi('rn_apply').onclick = rn_apply;
|
||||||
ebi('rn_adv').onclick = function (e) {
|
|
||||||
ev(e);
|
|
||||||
adv = !adv;
|
|
||||||
bcfg_set('rn_adv', adv);
|
|
||||||
sadv();
|
|
||||||
};
|
|
||||||
ebi('rn_case').onclick = function (e) {
|
|
||||||
ev(e);
|
|
||||||
cs = !cs;
|
|
||||||
bcfg_set('rn_case', cs);
|
|
||||||
};
|
|
||||||
|
|
||||||
var ire = ebi('rn_re'),
|
var ire = ebi('rn_re'),
|
||||||
ifmt = ebi('rn_fmt'),
|
ifmt = ebi('rn_fmt'),
|
||||||
@@ -1986,7 +2004,7 @@ var fileman = (function () {
|
|||||||
|
|
||||||
try {
|
try {
|
||||||
if (ptn)
|
if (ptn)
|
||||||
re = new RegExp(ptn, cs ? 'i' : '');
|
re = new RegExp(ptn, r.cs ? 'i' : '');
|
||||||
}
|
}
|
||||||
catch (ex) {
|
catch (ex) {
|
||||||
return toast.err(5, esc('invalid regex:\n' + ex));
|
return toast.err(5, esc('invalid regex:\n' + ex));
|
||||||
@@ -2146,7 +2164,7 @@ var fileman = (function () {
|
|||||||
links = QSA('#files tbody td:nth-child(2) a');
|
links = QSA('#files tbody td:nth-child(2) a');
|
||||||
|
|
||||||
for (var a = 0, aa = links.length; a < aa; a++)
|
for (var a = 0, aa = links.length; a < aa; a++)
|
||||||
indir.push(vsplit(links[a].getAttribute('href'))[1]);
|
indir.push(vsplit(noq_href(links[a]))[1]);
|
||||||
|
|
||||||
for (var a = 0; a < r.clip.length; a++) {
|
for (var a = 0; a < r.clip.length; a++) {
|
||||||
var found = false;
|
var found = false;
|
||||||
@@ -2254,36 +2272,12 @@ var thegrid = (function () {
|
|||||||
lfiles.parentNode.insertBefore(gfiles, lfiles);
|
lfiles.parentNode.insertBefore(gfiles, lfiles);
|
||||||
|
|
||||||
var r = {
|
var r = {
|
||||||
'thumbs': bcfg_get('thumbs', true),
|
|
||||||
'en': bcfg_get('griden', false),
|
|
||||||
'sel': bcfg_get('gridsel', false),
|
|
||||||
'sz': clamp(fcfg_get('gridsz', 10), 4, 40),
|
'sz': clamp(fcfg_get('gridsz', 10), 4, 40),
|
||||||
'ln': clamp(icfg_get('gridln', 3), 1, 7),
|
'ln': clamp(icfg_get('gridln', 3), 1, 7),
|
||||||
'isdirty': true,
|
'isdirty': true,
|
||||||
'bbox': null
|
'bbox': null
|
||||||
};
|
};
|
||||||
|
|
||||||
ebi('thumbs').onclick = function (e) {
|
|
||||||
ev(e);
|
|
||||||
r.thumbs = !r.thumbs;
|
|
||||||
bcfg_set('thumbs', r.thumbs);
|
|
||||||
r.setdirty();
|
|
||||||
};
|
|
||||||
|
|
||||||
ebi('griden').onclick = ebi('wtgrid').onclick = function (e) {
|
|
||||||
ev(e);
|
|
||||||
r.en = !r.en;
|
|
||||||
bcfg_set('griden', r.en);
|
|
||||||
if (r.en) {
|
|
||||||
loadgrid();
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
ungrid();
|
|
||||||
}
|
|
||||||
pbar.onresize();
|
|
||||||
vbar.onresize();
|
|
||||||
};
|
|
||||||
|
|
||||||
var btnclick = function (e) {
|
var btnclick = function (e) {
|
||||||
ev(e);
|
ev(e);
|
||||||
var s = this.getAttribute('s'),
|
var s = this.getAttribute('s'),
|
||||||
@@ -2310,12 +2304,14 @@ var thegrid = (function () {
|
|||||||
for (var a = 0; a < links.length; a++)
|
for (var a = 0; a < links.length; a++)
|
||||||
links[a].onclick = btnclick;
|
links[a].onclick = btnclick;
|
||||||
|
|
||||||
ebi('gridsel').onclick = function (e) {
|
bcfg_bind(r, 'thumbs', 'thumbs', true, r.setdirty);
|
||||||
ev(e);
|
bcfg_bind(r, 'sel', 'gridsel', false, r.loadsel);
|
||||||
r.sel = !r.sel;
|
bcfg_bind(r, 'en', 'griden', false, function (v) {
|
||||||
bcfg_set('gridsel', r.sel);
|
v ? loadgrid() : ungrid();
|
||||||
r.loadsel();
|
pbar.onresize();
|
||||||
};
|
vbar.onresize();
|
||||||
|
});
|
||||||
|
ebi('wtgrid').onclick = ebi('griden').onclick;
|
||||||
|
|
||||||
r.setvis = function (vis) {
|
r.setvis = function (vis) {
|
||||||
(r.en ? gfiles : lfiles).style.display = vis ? '' : 'none';
|
(r.en ? gfiles : lfiles).style.display = vis ? '' : 'none';
|
||||||
@@ -2361,7 +2357,7 @@ var thegrid = (function () {
|
|||||||
return true;
|
return true;
|
||||||
|
|
||||||
var oth = ebi(this.getAttribute('ref')),
|
var oth = ebi(this.getAttribute('ref')),
|
||||||
href = this.getAttribute('href'),
|
href = noq_href(this),
|
||||||
aplay = ebi('a' + oth.getAttribute('id')),
|
aplay = ebi('a' + oth.getAttribute('id')),
|
||||||
is_img = /\.(gif|jpe?g|png|webp|webm|mp4)(\?|$)/i.test(href),
|
is_img = /\.(gif|jpe?g|png|webp|webm|mp4)(\?|$)/i.test(href),
|
||||||
in_tree = null,
|
in_tree = null,
|
||||||
@@ -2369,7 +2365,7 @@ var thegrid = (function () {
|
|||||||
td = oth.closest('td').nextSibling,
|
td = oth.closest('td').nextSibling,
|
||||||
tr = td.parentNode;
|
tr = td.parentNode;
|
||||||
|
|
||||||
if (/\/(\?|$)/.test(href)) {
|
if (href.endsWith('/')) {
|
||||||
var ta = QSA('#treeul a.hl+ul>li>a+a'),
|
var ta = QSA('#treeul a.hl+ul>li>a+a'),
|
||||||
txt = oth.textContent.slice(0, -1);
|
txt = oth.textContent.slice(0, -1);
|
||||||
|
|
||||||
@@ -2408,7 +2404,7 @@ var thegrid = (function () {
|
|||||||
var tr = ebi(ths[a].getAttribute('ref')).closest('tr'),
|
var tr = ebi(ths[a].getAttribute('ref')).closest('tr'),
|
||||||
cl = tr.getAttribute('class') || '';
|
cl = tr.getAttribute('class') || '';
|
||||||
|
|
||||||
if (ths[a].getAttribute('href').endsWith('/'))
|
if (noq_href(ths[a]).endsWith('/'))
|
||||||
cl += ' dir';
|
cl += ' dir';
|
||||||
|
|
||||||
ths[a].setAttribute('class', cl);
|
ths[a].setAttribute('class', cl);
|
||||||
@@ -2472,15 +2468,16 @@ var thegrid = (function () {
|
|||||||
var files = QSA('#files>tbody>tr>td:nth-child(2) a[id]');
|
var files = QSA('#files>tbody>tr>td:nth-child(2) a[id]');
|
||||||
for (var a = 0, aa = files.length; a < aa; a++) {
|
for (var a = 0, aa = files.length; a < aa; a++) {
|
||||||
var ao = files[a],
|
var ao = files[a],
|
||||||
href = esc(ao.getAttribute('href')),
|
ohref = esc(ao.getAttribute('href')),
|
||||||
|
href = ohref.split('?')[0],
|
||||||
name = uricom_dec(vsplit(href)[1])[0],
|
name = uricom_dec(vsplit(href)[1])[0],
|
||||||
ref = ao.getAttribute('id'),
|
ref = ao.getAttribute('id'),
|
||||||
isdir = href.split('?')[0].slice(-1)[0] == '/',
|
isdir = href.endsWith('/'),
|
||||||
ac = isdir ? ' class="dir"' : '',
|
ac = isdir ? ' class="dir"' : '',
|
||||||
ihref = href;
|
ihref = href;
|
||||||
|
|
||||||
if (r.thumbs) {
|
if (r.thumbs) {
|
||||||
ihref += (ihref.indexOf('?') === -1 ? '?' : '&') + 'th=' + (have_webp ? 'w' : 'j');
|
ihref += '?th=' + (have_webp ? 'w' : 'j');
|
||||||
if (href == "#")
|
if (href == "#")
|
||||||
ihref = '/.cpr/ico/⏏️';
|
ihref = '/.cpr/ico/⏏️';
|
||||||
}
|
}
|
||||||
@@ -2488,7 +2485,7 @@ var thegrid = (function () {
|
|||||||
ihref = '/.cpr/ico/folder';
|
ihref = '/.cpr/ico/folder';
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
var ar = href.split('?')[0].split('.');
|
var ar = href.split('.');
|
||||||
if (ar.length > 1)
|
if (ar.length > 1)
|
||||||
ar = ar.slice(1);
|
ar = ar.slice(1);
|
||||||
|
|
||||||
@@ -2506,7 +2503,7 @@ var thegrid = (function () {
|
|||||||
ihref = '/.cpr/ico/' + ihref.slice(0, -1);
|
ihref = '/.cpr/ico/' + ihref.slice(0, -1);
|
||||||
}
|
}
|
||||||
|
|
||||||
html.push('<a href="' + href + '" ref="' + ref +
|
html.push('<a href="' + ohref + '" ref="' + ref +
|
||||||
'"' + ac + ' ttt="' + esc(name) + '"><img src="' +
|
'"' + ac + ' ttt="' + esc(name) + '"><img src="' +
|
||||||
ihref + '" /><span' + ac + '>' + ao.innerHTML + '</span></a>');
|
ihref + '" /><span' + ac + '>' + ao.innerHTML + '</span></a>');
|
||||||
}
|
}
|
||||||
@@ -2834,14 +2831,15 @@ document.onkeydown = function (e) {
|
|||||||
|
|
||||||
clearTimeout(defer_timeout);
|
clearTimeout(defer_timeout);
|
||||||
defer_timeout = setTimeout(try_search, 2000);
|
defer_timeout = setTimeout(try_search, 2000);
|
||||||
try_search();
|
try_search(v);
|
||||||
}
|
}
|
||||||
|
|
||||||
function try_search() {
|
function try_search(v) {
|
||||||
if (Date.now() - search_in_progress > 30 * 1000) {
|
if (Date.now() - search_in_progress > 30 * 1000) {
|
||||||
clearTimeout(defer_timeout);
|
clearTimeout(defer_timeout);
|
||||||
clearTimeout(search_timeout);
|
clearTimeout(search_timeout);
|
||||||
search_timeout = setTimeout(do_search, 200);
|
search_timeout = setTimeout(do_search,
|
||||||
|
v && v.length < (is_touch ? 4 : 3) ? 600 : 200);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -2960,7 +2958,7 @@ document.onkeydown = function (e) {
|
|||||||
var r = res.hits[a],
|
var r = res.hits[a],
|
||||||
ts = parseInt(r.ts),
|
ts = parseInt(r.ts),
|
||||||
sz = esc(r.sz + ''),
|
sz = esc(r.sz + ''),
|
||||||
rp = esc(r.rp + ''),
|
rp = esc(uricom_dec(r.rp + '')[0]),
|
||||||
ext = rp.lastIndexOf('.') > 0 ? rp.split('.').slice(-1)[0] : '%',
|
ext = rp.lastIndexOf('.') > 0 ? rp.split('.').slice(-1)[0] : '%',
|
||||||
links = linksplit(r.rp + '');
|
links = linksplit(r.rp + '');
|
||||||
|
|
||||||
@@ -3019,15 +3017,26 @@ var treectl = (function () {
|
|||||||
"hidden": true,
|
"hidden": true,
|
||||||
"ls_cb": null,
|
"ls_cb": null,
|
||||||
"dir_cb": tree_scrollto,
|
"dir_cb": tree_scrollto,
|
||||||
"ireadme": bcfg_get('ireadme', true)
|
|
||||||
},
|
},
|
||||||
entreed = false,
|
entreed = false,
|
||||||
fixedpos = false,
|
fixedpos = false,
|
||||||
prev_atop = null,
|
prev_atop = null,
|
||||||
prev_winh = null,
|
prev_winh = null,
|
||||||
dyn = bcfg_get('dyntree', true),
|
mentered = null,
|
||||||
dots = bcfg_get('dotfiles', false),
|
treesz = clamp(icfg_get('treesz', 16), 10, 50);
|
||||||
treesz = clamp(icfg_get('treesz', 16), 4, 50);
|
|
||||||
|
bcfg_bind(treectl, 'ireadme', 'ireadme', true);
|
||||||
|
bcfg_bind(treectl, 'dyn', 'dyntree', true, onresize);
|
||||||
|
bcfg_bind(treectl, 'dots', 'dotfiles', false, function (v) {
|
||||||
|
treectl.goto(get_evpath());
|
||||||
|
});
|
||||||
|
setwrap(bcfg_bind(treectl, 'wtree', 'wraptree', true, setwrap));
|
||||||
|
bcfg_bind(treectl, 'htree', 'hovertree', true, reload_tree);
|
||||||
|
|
||||||
|
function setwrap(v) {
|
||||||
|
clmod(ebi('tree'), 'nowrap', !v);
|
||||||
|
reload_tree();
|
||||||
|
}
|
||||||
|
|
||||||
treectl.entree = function (e) {
|
treectl.entree = function (e) {
|
||||||
ev(e);
|
ev(e);
|
||||||
@@ -3069,7 +3078,19 @@ var treectl = (function () {
|
|||||||
window.removeEventListener('scroll', onscroll);
|
window.removeEventListener('scroll', onscroll);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function unmenter() {
|
||||||
|
if (mentered) {
|
||||||
|
mentered.style.position = '';
|
||||||
|
mentered = null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
function onscroll() {
|
function onscroll() {
|
||||||
|
unmenter();
|
||||||
|
onscroll2();
|
||||||
|
}
|
||||||
|
|
||||||
|
function onscroll2() {
|
||||||
if (!entreed || treectl.hidden || document.visibilityState == 'hidden')
|
if (!entreed || treectl.hidden || document.visibilityState == 'hidden')
|
||||||
return;
|
return;
|
||||||
|
|
||||||
@@ -3106,24 +3127,28 @@ var treectl = (function () {
|
|||||||
tree.style.height = treeh < 10 ? '' : treeh + 'px';
|
tree.style.height = treeh < 10 ? '' : treeh + 'px';
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
timer.add(onscroll, true);
|
timer.add(onscroll2, true);
|
||||||
|
|
||||||
function onresize(e) {
|
function onresize(e) {
|
||||||
if (!entreed || treectl.hidden)
|
if (!entreed || treectl.hidden)
|
||||||
return;
|
return;
|
||||||
|
|
||||||
var q = '#tree',
|
var q = '#tree',
|
||||||
nq = 0;
|
nq = -3;
|
||||||
|
|
||||||
while (dyn) {
|
while (treectl.dyn) {
|
||||||
nq++;
|
nq++;
|
||||||
q += '>ul>li';
|
q += '>ul>li';
|
||||||
if (!QS(q))
|
if (!QS(q))
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
var w = treesz + nq;
|
var w = (treesz + Math.max(0, nq)) + 'em';
|
||||||
ebi('tree').style.width = w + 'em';
|
try {
|
||||||
ebi('wrap').style.marginLeft = w + 'em';
|
document.documentElement.style.setProperty('--nav-sz', w);
|
||||||
|
}
|
||||||
|
catch (ex) { }
|
||||||
|
ebi('tree').style.width = w;
|
||||||
|
ebi('wrap').style.marginLeft = w;
|
||||||
onscroll();
|
onscroll();
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -3138,7 +3163,7 @@ var treectl = (function () {
|
|||||||
xhr.dst = dst;
|
xhr.dst = dst;
|
||||||
xhr.rst = rst;
|
xhr.rst = rst;
|
||||||
xhr.ts = Date.now();
|
xhr.ts = Date.now();
|
||||||
xhr.open('GET', dst + '?tree=' + top + (dots ? '&dots' : ''), true);
|
xhr.open('GET', dst + '?tree=' + top + (treectl.dots ? '&dots' : ''), true);
|
||||||
xhr.onreadystatechange = recvtree;
|
xhr.onreadystatechange = recvtree;
|
||||||
xhr.send();
|
xhr.send();
|
||||||
enspin('#tree');
|
enspin('#tree');
|
||||||
@@ -3209,19 +3234,39 @@ var treectl = (function () {
|
|||||||
}
|
}
|
||||||
|
|
||||||
function reload_tree() {
|
function reload_tree() {
|
||||||
var cdir = get_evpath(),
|
var cdir = get_vpath(),
|
||||||
links = QSA('#treeul a+a');
|
links = QSA('#treeul a+a'),
|
||||||
|
nowrap = QS('#tree.nowrap') && QS('#hovertree.on');
|
||||||
|
|
||||||
for (var a = 0, aa = links.length; a < aa; a++) {
|
for (var a = 0, aa = links.length; a < aa; a++) {
|
||||||
var href = links[a].getAttribute('href');
|
var href = uricom_dec(links[a].getAttribute('href'))[0];
|
||||||
links[a].setAttribute('class', href == cdir ? 'hl' : '');
|
links[a].setAttribute('class', href == cdir ? 'hl' : '');
|
||||||
links[a].onclick = treego;
|
links[a].onclick = treego;
|
||||||
|
links[a].onmouseenter = nowrap ? menter : null;
|
||||||
|
links[a].onmouseleave = nowrap ? mleave : null;
|
||||||
}
|
}
|
||||||
links = QSA('#treeul li>a:first-child');
|
links = QSA('#treeul li>a:first-child');
|
||||||
for (var a = 0, aa = links.length; a < aa; a++) {
|
for (var a = 0, aa = links.length; a < aa; a++) {
|
||||||
links[a].setAttribute('dst', links[a].nextSibling.getAttribute('href'));
|
links[a].setAttribute('dst', links[a].nextSibling.getAttribute('href'));
|
||||||
links[a].onclick = treegrow;
|
links[a].onclick = treegrow;
|
||||||
}
|
}
|
||||||
|
ebi('tree').onscroll = nowrap ? unmenter : null;
|
||||||
|
}
|
||||||
|
|
||||||
|
function menter(e) {
|
||||||
|
var p = this.offsetParent,
|
||||||
|
pp = p.offsetParent,
|
||||||
|
ppy = pp.offsetTop,
|
||||||
|
y = this.offsetTop + p.offsetTop + ppy - p.scrollTop - pp.scrollTop - (ppy ? document.documentElement.scrollTop : 0);
|
||||||
|
|
||||||
|
this.style.top = y + 'px';
|
||||||
|
this.style.position = 'fixed';
|
||||||
|
mentered = this;
|
||||||
|
}
|
||||||
|
|
||||||
|
function mleave(e) {
|
||||||
|
this.style.position = '';
|
||||||
|
mentered = null;
|
||||||
}
|
}
|
||||||
|
|
||||||
function treego(e) {
|
function treego(e) {
|
||||||
@@ -3242,7 +3287,7 @@ var treectl = (function () {
|
|||||||
xhr.top = url;
|
xhr.top = url;
|
||||||
xhr.hpush = hpush;
|
xhr.hpush = hpush;
|
||||||
xhr.ts = Date.now();
|
xhr.ts = Date.now();
|
||||||
xhr.open('GET', xhr.top + '?ls' + (dots ? '&dots' : ''), true);
|
xhr.open('GET', xhr.top + '?ls' + (treectl.dots ? '&dots' : ''), true);
|
||||||
xhr.onreadystatechange = recvls;
|
xhr.onreadystatechange = recvls;
|
||||||
xhr.send();
|
xhr.send();
|
||||||
if (hpush)
|
if (hpush)
|
||||||
@@ -3300,8 +3345,9 @@ var treectl = (function () {
|
|||||||
nodes = sortfiles(nodes);
|
nodes = sortfiles(nodes);
|
||||||
for (var a = 0; a < nodes.length; a++) {
|
for (var a = 0; a < nodes.length; a++) {
|
||||||
var r = nodes[a],
|
var r = nodes[a],
|
||||||
hname = esc(uricom_dec(r.href)[0]),
|
bhref = r.href.split('?')[0],
|
||||||
sortv = (r.href.slice(-1) == '/' ? '\t' : '') + hname,
|
hname = esc(uricom_dec(bhref)[0]),
|
||||||
|
sortv = (bhref.slice(-1) == '/' ? '\t' : '') + hname,
|
||||||
ln = ['<tr><td>' + r.lead + '</td><td sortv="' + sortv +
|
ln = ['<tr><td>' + r.lead + '</td><td sortv="' + sortv +
|
||||||
'"><a href="' + top + r.href + '">' + hname + '</a>', r.sz];
|
'"><a href="' + top + r.href + '">' + hname + '</a>', r.sz];
|
||||||
|
|
||||||
@@ -3342,9 +3388,7 @@ var treectl = (function () {
|
|||||||
|
|
||||||
clmod(ebi('epi'), 'mdo');
|
clmod(ebi('epi'), 'mdo');
|
||||||
if (res.readme)
|
if (res.readme)
|
||||||
setTimeout(function () {
|
show_readme(res.readme);
|
||||||
show_readme(res.readme);
|
|
||||||
}, 10);
|
|
||||||
|
|
||||||
document.title = '⇆🎉 ' + uricom_dec(document.location.pathname.slice(1, -1))[0];
|
document.title = '⇆🎉 ' + uricom_dec(document.location.pathname.slice(1, -1))[0];
|
||||||
|
|
||||||
@@ -3391,32 +3435,13 @@ var treectl = (function () {
|
|||||||
return ret;
|
return ret;
|
||||||
}
|
}
|
||||||
|
|
||||||
function tdots(e) {
|
|
||||||
ev(e);
|
|
||||||
dots = !dots;
|
|
||||||
bcfg_set('dotfiles', dots);
|
|
||||||
treectl.goto(get_evpath());
|
|
||||||
}
|
|
||||||
|
|
||||||
function treadme(e) {
|
|
||||||
ev(e);
|
|
||||||
treectl.ireadme = !treectl.ireadme;
|
|
||||||
bcfg_set('ireadme', treectl.ireadme);
|
|
||||||
}
|
|
||||||
|
|
||||||
function dyntree(e) {
|
|
||||||
ev(e);
|
|
||||||
dyn = !dyn;
|
|
||||||
bcfg_set('dyntree', dyn);
|
|
||||||
onresize();
|
|
||||||
}
|
|
||||||
|
|
||||||
function scaletree(e) {
|
function scaletree(e) {
|
||||||
ev(e);
|
ev(e);
|
||||||
treesz += parseInt(this.getAttribute("step"));
|
treesz += parseInt(this.getAttribute("step"));
|
||||||
if (isNaN(treesz))
|
if (isNaN(treesz))
|
||||||
treesz = 16;
|
treesz = 16;
|
||||||
|
|
||||||
|
treesz = clamp(treesz, 2, 120);
|
||||||
swrite('treesz', treesz);
|
swrite('treesz', treesz);
|
||||||
onresize();
|
onresize();
|
||||||
}
|
}
|
||||||
@@ -3424,9 +3449,6 @@ var treectl = (function () {
|
|||||||
ebi('entree').onclick = treectl.entree;
|
ebi('entree').onclick = treectl.entree;
|
||||||
ebi('detree').onclick = treectl.detree;
|
ebi('detree').onclick = treectl.detree;
|
||||||
ebi('visdir').onclick = tree_scrollto;
|
ebi('visdir').onclick = tree_scrollto;
|
||||||
ebi('dotfiles').onclick = tdots;
|
|
||||||
ebi('ireadme').onclick = treadme;
|
|
||||||
ebi('dyntree').onclick = dyntree;
|
|
||||||
ebi('twig').onclick = scaletree;
|
ebi('twig').onclick = scaletree;
|
||||||
ebi('twobytwo').onclick = scaletree;
|
ebi('twobytwo').onclick = scaletree;
|
||||||
if (sread('entreed') == 'tree')
|
if (sread('entreed') == 'tree')
|
||||||
@@ -3472,7 +3494,7 @@ function apply_perms(newperms) {
|
|||||||
|
|
||||||
var axs = [],
|
var axs = [],
|
||||||
aclass = '>',
|
aclass = '>',
|
||||||
chk = ['read', 'write', 'move', 'delete'];
|
chk = ['read', 'write', 'move', 'delete', 'get'];
|
||||||
|
|
||||||
for (var a = 0; a < chk.length; a++)
|
for (var a = 0; a < chk.length; a++)
|
||||||
if (has(perms, chk[a]))
|
if (has(perms, chk[a]))
|
||||||
@@ -3526,7 +3548,7 @@ function apply_perms(newperms) {
|
|||||||
|
|
||||||
ebi('widget').style.display = have_read ? '' : 'none';
|
ebi('widget').style.display = have_read ? '' : 'none';
|
||||||
thegrid.setvis(have_read);
|
thegrid.setvis(have_read);
|
||||||
if (!have_read)
|
if (!have_read && have_write)
|
||||||
goto('up2k');
|
goto('up2k');
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -3597,7 +3619,7 @@ var filecols = (function () {
|
|||||||
"pixfmt": "subsampling / pixel structure",
|
"pixfmt": "subsampling / pixel structure",
|
||||||
"resw": "horizontal resolution",
|
"resw": "horizontal resolution",
|
||||||
"resh": "veritcal resolution",
|
"resh": "veritcal resolution",
|
||||||
"acs": "audio channels",
|
"chs": "audio channels",
|
||||||
"hz": "sample rate"
|
"hz": "sample rate"
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -3852,8 +3874,6 @@ function addcrc() {
|
|||||||
|
|
||||||
var light;
|
var light;
|
||||||
(function () {
|
(function () {
|
||||||
light = bcfg_get('lightmode', false);
|
|
||||||
|
|
||||||
function freshen() {
|
function freshen() {
|
||||||
clmod(document.documentElement, "light", light);
|
clmod(document.documentElement, "light", light);
|
||||||
clmod(document.documentElement, "dark", !light);
|
clmod(document.documentElement, "dark", !light);
|
||||||
@@ -3862,12 +3882,7 @@ var light;
|
|||||||
vbar.draw();
|
vbar.draw();
|
||||||
}
|
}
|
||||||
|
|
||||||
ebi('lightmode').onclick = function (e) {
|
bcfg_bind(window, 'light', 'lightmode', false, freshen);
|
||||||
ev(e);
|
|
||||||
light = !light;
|
|
||||||
bcfg_set('lightmode', light);
|
|
||||||
freshen();
|
|
||||||
};
|
|
||||||
|
|
||||||
freshen();
|
freshen();
|
||||||
})();
|
})();
|
||||||
@@ -3975,7 +3990,7 @@ var msel = (function () {
|
|||||||
vbase = get_evpath();
|
vbase = get_evpath();
|
||||||
|
|
||||||
for (var a = 0, aa = links.length; a < aa; a++) {
|
for (var a = 0, aa = links.length; a < aa; a++) {
|
||||||
var href = links[a].getAttribute('href').replace(/\/$/, ""),
|
var href = noq_href(links[a]).replace(/\/$/, ""),
|
||||||
item = {};
|
item = {};
|
||||||
|
|
||||||
item.id = links[a].getAttribute('id');
|
item.id = links[a].getAttribute('id');
|
||||||
@@ -4070,6 +4085,106 @@ var msel = (function () {
|
|||||||
})();
|
})();
|
||||||
|
|
||||||
|
|
||||||
|
(function () {
|
||||||
|
if (!window.FormData)
|
||||||
|
return;
|
||||||
|
|
||||||
|
var form = QS('#op_mkdir>form'),
|
||||||
|
tb = QS('#op_mkdir input[name="name"]'),
|
||||||
|
sf = mknod('div');
|
||||||
|
|
||||||
|
clmod(sf, 'msg', 1);
|
||||||
|
form.parentNode.appendChild(sf);
|
||||||
|
|
||||||
|
form.onsubmit = function (e) {
|
||||||
|
ev(e);
|
||||||
|
clmod(sf, 'vis', 1);
|
||||||
|
sf.textContent = 'creating "' + tb.value + '"...';
|
||||||
|
|
||||||
|
var fd = new FormData();
|
||||||
|
fd.append("act", "mkdir");
|
||||||
|
fd.append("name", tb.value);
|
||||||
|
|
||||||
|
var xhr = new XMLHttpRequest();
|
||||||
|
xhr.vp = get_evpath();
|
||||||
|
xhr.dn = tb.value;
|
||||||
|
xhr.open('POST', xhr.vp, true);
|
||||||
|
xhr.onreadystatechange = cb;
|
||||||
|
xhr.responseType = 'text';
|
||||||
|
xhr.send(fd);
|
||||||
|
|
||||||
|
return false;
|
||||||
|
};
|
||||||
|
|
||||||
|
function cb() {
|
||||||
|
if (this.readyState != XMLHttpRequest.DONE)
|
||||||
|
return;
|
||||||
|
|
||||||
|
if (this.vp !== get_evpath()) {
|
||||||
|
sf.textContent = 'aborted due to location change';
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (this.status !== 200) {
|
||||||
|
sf.textContent = 'error: ' + this.responseText;
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
tb.value = '';
|
||||||
|
clmod(sf, 'vis');
|
||||||
|
sf.textContent = '';
|
||||||
|
treectl.goto(this.vp + uricom_enc(this.dn) + '/', true);
|
||||||
|
}
|
||||||
|
})();
|
||||||
|
|
||||||
|
|
||||||
|
(function () {
|
||||||
|
var form = QS('#op_msg>form'),
|
||||||
|
tb = QS('#op_msg input[name="msg"]'),
|
||||||
|
sf = mknod('div');
|
||||||
|
|
||||||
|
clmod(sf, 'msg', 1);
|
||||||
|
form.parentNode.appendChild(sf);
|
||||||
|
|
||||||
|
form.onsubmit = function (e) {
|
||||||
|
ev(e);
|
||||||
|
clmod(sf, 'vis', 1);
|
||||||
|
sf.textContent = 'sending...';
|
||||||
|
|
||||||
|
var xhr = new XMLHttpRequest(),
|
||||||
|
ct = 'application/x-www-form-urlencoded;charset=UTF-8';
|
||||||
|
|
||||||
|
xhr.msg = tb.value;
|
||||||
|
xhr.open('POST', get_evpath(), true);
|
||||||
|
xhr.responseType = 'text';
|
||||||
|
xhr.onreadystatechange = cb;
|
||||||
|
xhr.setRequestHeader('Content-Type', ct);
|
||||||
|
if (xhr.overrideMimeType)
|
||||||
|
xhr.overrideMimeType('Content-Type', ct);
|
||||||
|
|
||||||
|
xhr.send('msg=' + uricom_enc(xhr.msg));
|
||||||
|
return false;
|
||||||
|
};
|
||||||
|
|
||||||
|
function cb() {
|
||||||
|
if (this.readyState != XMLHttpRequest.DONE)
|
||||||
|
return;
|
||||||
|
|
||||||
|
if (this.status !== 200) {
|
||||||
|
sf.textContent = 'error: ' + this.responseText;
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
tb.value = '';
|
||||||
|
clmod(sf, 'vis');
|
||||||
|
sf.textContent = 'sent: "' + this.msg + '"';
|
||||||
|
setTimeout(function () {
|
||||||
|
treectl.goto(get_evpath());
|
||||||
|
}, 100);
|
||||||
|
}
|
||||||
|
})();
|
||||||
|
|
||||||
|
|
||||||
function show_readme(md, url, depth) {
|
function show_readme(md, url, depth) {
|
||||||
if (!treectl.ireadme)
|
if (!treectl.ireadme)
|
||||||
return;
|
return;
|
||||||
@@ -4121,8 +4236,8 @@ if (readme)
|
|||||||
for (var a = 0; a < tr.length; a++) {
|
for (var a = 0; a < tr.length; a++) {
|
||||||
var td = tr[a].cells[1],
|
var td = tr[a].cells[1],
|
||||||
ao = td.firstChild,
|
ao = td.firstChild,
|
||||||
href = ao.getAttribute('href'),
|
href = noq_href(ao),
|
||||||
isdir = href.split('?')[0].slice(-1)[0] == '/',
|
isdir = href.endsWith('/'),
|
||||||
txt = ao.textContent;
|
txt = ao.textContent;
|
||||||
|
|
||||||
td.setAttribute('sortv', (isdir ? '\t' : '') + txt);
|
td.setAttribute('sortv', (isdir ? '\t' : '') + txt);
|
||||||
@@ -4140,7 +4255,7 @@ function ev_row_tgl(e) {
|
|||||||
|
|
||||||
var unpost = (function () {
|
var unpost = (function () {
|
||||||
ebi('op_unpost').innerHTML = (
|
ebi('op_unpost').innerHTML = (
|
||||||
"you can delete your recent uploads below – click the fire-extinguisher icon to refresh" +
|
'you can delete your recent uploads below – <a id="unpost_refresh" href="#">refresh list</a>' +
|
||||||
'<p>optional filter: URL must contain <input type="text" id="unpost_filt" size="20" /><a id="unpost_nofilt" href="#">clear filter</a></p>' +
|
'<p>optional filter: URL must contain <input type="text" id="unpost_filt" size="20" /><a id="unpost_nofilt" href="#">clear filter</a></p>' +
|
||||||
'<div id="unpost"></div>'
|
'<div id="unpost"></div>'
|
||||||
);
|
);
|
||||||
@@ -4237,7 +4352,6 @@ var unpost = (function () {
|
|||||||
}
|
}
|
||||||
|
|
||||||
ct.onclick = function (e) {
|
ct.onclick = function (e) {
|
||||||
ev(e);
|
|
||||||
var tgt = e.target.closest('a[me]');
|
var tgt = e.target.closest('a[me]');
|
||||||
if (!tgt)
|
if (!tgt)
|
||||||
return;
|
return;
|
||||||
@@ -4245,6 +4359,7 @@ var unpost = (function () {
|
|||||||
if (!tgt.getAttribute('href'))
|
if (!tgt.getAttribute('href'))
|
||||||
return;
|
return;
|
||||||
|
|
||||||
|
ev(e);
|
||||||
var ame = tgt.getAttribute('me');
|
var ame = tgt.getAttribute('me');
|
||||||
if (ame != r.me)
|
if (ame != r.me)
|
||||||
return toast.err(0, 'something broke, please try a refresh');
|
return toast.err(0, 'something broke, please try a refresh');
|
||||||
@@ -4255,7 +4370,7 @@ var unpost = (function () {
|
|||||||
|
|
||||||
for (var a = n; a < n2; a++)
|
for (var a = n; a < n2; a++)
|
||||||
if (QS('#op_unpost a.n' + a))
|
if (QS('#op_unpost a.n' + a))
|
||||||
req.push(r.files[a].vp);
|
req.push(uricom_dec(r.files[a].vp)[0]);
|
||||||
|
|
||||||
var links = QSA('#op_unpost a.n' + n);
|
var links = QSA('#op_unpost a.n' + n);
|
||||||
for (var a = 0, aa = links.length; a < aa; a++) {
|
for (var a = 0, aa = links.length; a < aa; a++) {
|
||||||
@@ -4285,6 +4400,11 @@ var unpost = (function () {
|
|||||||
r.load();
|
r.load();
|
||||||
};
|
};
|
||||||
|
|
||||||
|
ebi('unpost_refresh').onclick = function (e) {
|
||||||
|
ev(e);
|
||||||
|
goto('unpost');
|
||||||
|
};
|
||||||
|
|
||||||
return r;
|
return r;
|
||||||
})();
|
})();
|
||||||
|
|
||||||
@@ -4338,6 +4458,9 @@ function reload_browser(not_mp) {
|
|||||||
makeSortable(ebi('files'), mp.read_order.bind(mp));
|
makeSortable(ebi('files'), mp.read_order.bind(mp));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
for (var a = 0; a < 2; a++)
|
||||||
|
clmod(ebi(a ? 'pro' : 'epi'), 'hidden', ebi('unsearch'));
|
||||||
|
|
||||||
if (window['up2k'])
|
if (window['up2k'])
|
||||||
up2k.set_fsearch();
|
up2k.set_fsearch();
|
||||||
|
|
||||||
|
|||||||
@@ -212,6 +212,10 @@ blink {
|
|||||||
#toolsbox a+a {
|
#toolsbox a+a {
|
||||||
text-decoration: none;
|
text-decoration: none;
|
||||||
}
|
}
|
||||||
|
#lno {
|
||||||
|
position: absolute;
|
||||||
|
right: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -15,7 +15,7 @@
|
|||||||
<a id="lightswitch" href="#">go dark</a>
|
<a id="lightswitch" href="#">go dark</a>
|
||||||
<a id="navtoggle" href="#">hide nav</a>
|
<a id="navtoggle" href="#">hide nav</a>
|
||||||
{%- if edit %}
|
{%- if edit %}
|
||||||
<a id="save" href="?edit" tt="Hotkey: ctrl-s">save</a>
|
<a id="save" href="{{ arg_base }}edit" tt="Hotkey: ctrl-s">save</a>
|
||||||
<a id="sbs" href="#" tt="editor and preview side by side">sbs</a>
|
<a id="sbs" href="#" tt="editor and preview side by side">sbs</a>
|
||||||
<a id="nsbs" href="#" tt="switch between editor and preview$NHotkey: ctrl-e">editor</a>
|
<a id="nsbs" href="#" tt="switch between editor and preview$NHotkey: ctrl-e">editor</a>
|
||||||
<div id="toolsbox">
|
<div id="toolsbox">
|
||||||
@@ -26,10 +26,11 @@
|
|||||||
<a id="cfg_uni" href="#">non-ascii: whitelist</a>
|
<a id="cfg_uni" href="#">non-ascii: whitelist</a>
|
||||||
<a id="help" href="#">help</a>
|
<a id="help" href="#">help</a>
|
||||||
</div>
|
</div>
|
||||||
|
<span id="lno">L#</span>
|
||||||
{%- else %}
|
{%- else %}
|
||||||
<a href="?edit" tt="good: higher performance$Ngood: same document width as viewer$Nbad: assumes you know markdown">edit (basic)</a>
|
<a href="{{ arg_base }}edit" tt="good: higher performance$Ngood: same document width as viewer$Nbad: assumes you know markdown">edit (basic)</a>
|
||||||
<a href="?edit2" tt="not in-house so probably less buggy">edit (fancy)</a>
|
<a href="{{ arg_base }}edit2" tt="not in-house so probably less buggy">edit (fancy)</a>
|
||||||
<a href="?raw">view raw</a>
|
<a href="{{ arg_base }}raw">view raw</a>
|
||||||
{%- endif %}
|
{%- endif %}
|
||||||
</div>
|
</div>
|
||||||
<div id="toc"></div>
|
<div id="toc"></div>
|
||||||
@@ -134,13 +135,13 @@ var md_opt = {
|
|||||||
|
|
||||||
(function () {
|
(function () {
|
||||||
var l = localStorage,
|
var l = localStorage,
|
||||||
drk = l.getItem('lightmode') != 1,
|
drk = l.lightmode != 1,
|
||||||
btn = document.getElementById("lightswitch"),
|
btn = document.getElementById("lightswitch"),
|
||||||
f = function (e) {
|
f = function (e) {
|
||||||
if (e) { e.preventDefault(); drk = !drk; }
|
if (e) { e.preventDefault(); drk = !drk; }
|
||||||
document.documentElement.setAttribute("class", drk? "dark":"light");
|
document.documentElement.setAttribute("class", drk? "dark":"light");
|
||||||
btn.innerHTML = "go " + (drk ? "light":"dark");
|
btn.innerHTML = "go " + (drk ? "light":"dark");
|
||||||
l.setItem('lightmode', drk? 0:1);
|
l.lightmode = drk? 0:1;
|
||||||
};
|
};
|
||||||
|
|
||||||
btn.onclick = f;
|
btn.onclick = f;
|
||||||
|
|||||||
@@ -267,7 +267,14 @@ function convert_markdown(md_text, dest_dom) {
|
|||||||
|
|
||||||
throw ex;
|
throw ex;
|
||||||
}
|
}
|
||||||
var md_dom = new DOMParser().parseFromString(md_html, "text/html").body;
|
var md_dom = dest_dom;
|
||||||
|
try {
|
||||||
|
md_dom = new DOMParser().parseFromString(md_html, "text/html").body;
|
||||||
|
}
|
||||||
|
catch (ex) {
|
||||||
|
md_dom.innerHTML = md_html;
|
||||||
|
window.copydom = noop;
|
||||||
|
}
|
||||||
|
|
||||||
var nodes = md_dom.getElementsByTagName('a');
|
var nodes = md_dom.getElementsByTagName('a');
|
||||||
for (var a = nodes.length - 1; a >= 0; a--) {
|
for (var a = nodes.length - 1; a >= 0; a--) {
|
||||||
@@ -502,9 +509,11 @@ img_load.callbacks = [toc.refresh];
|
|||||||
|
|
||||||
// scroll handler
|
// scroll handler
|
||||||
var redraw = (function () {
|
var redraw = (function () {
|
||||||
var sbs = false;
|
var sbs = true;
|
||||||
function onresize() {
|
function onresize() {
|
||||||
sbs = window.matchMedia('(min-width: 64em)').matches;
|
if (window.matchMedia)
|
||||||
|
sbs = window.matchMedia('(min-width: 64em)').matches;
|
||||||
|
|
||||||
var y = (dom_hbar.offsetTop + dom_hbar.offsetHeight) + 'px';
|
var y = (dom_hbar.offsetTop + dom_hbar.offsetHeight) + 'px';
|
||||||
if (sbs) {
|
if (sbs) {
|
||||||
dom_toc.style.top = y;
|
dom_toc.style.top = y;
|
||||||
|
|||||||
@@ -230,44 +230,40 @@ redraw = (function () {
|
|||||||
|
|
||||||
// modification checker
|
// modification checker
|
||||||
function Modpoll() {
|
function Modpoll() {
|
||||||
this.skip_one = true;
|
var r = {
|
||||||
this.disabled = false;
|
skip_one: true,
|
||||||
|
disabled: false
|
||||||
this.periodic = function () {
|
};
|
||||||
var that = this;
|
|
||||||
setTimeout(function () {
|
|
||||||
that.periodic();
|
|
||||||
}, 1000 * md_opt.modpoll_freq);
|
|
||||||
|
|
||||||
|
r.periodic = function () {
|
||||||
var skip = null;
|
var skip = null;
|
||||||
|
|
||||||
if (toast.visible)
|
if (toast.visible)
|
||||||
skip = 'toast';
|
skip = 'toast';
|
||||||
|
|
||||||
else if (this.skip_one)
|
else if (r.skip_one)
|
||||||
skip = 'saved';
|
skip = 'saved';
|
||||||
|
|
||||||
else if (this.disabled)
|
else if (r.disabled)
|
||||||
skip = 'disabled';
|
skip = 'disabled';
|
||||||
|
|
||||||
if (skip) {
|
if (skip) {
|
||||||
console.log('modpoll skip, ' + skip);
|
console.log('modpoll skip, ' + skip);
|
||||||
this.skip_one = false;
|
r.skip_one = false;
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
console.log('modpoll...');
|
console.log('modpoll...');
|
||||||
var url = (document.location + '').split('?')[0] + '?raw&_=' + Date.now();
|
var url = (document.location + '').split('?')[0] + '?raw&_=' + Date.now();
|
||||||
var xhr = new XMLHttpRequest();
|
var xhr = new XMLHttpRequest();
|
||||||
xhr.modpoll = this;
|
|
||||||
xhr.open('GET', url, true);
|
xhr.open('GET', url, true);
|
||||||
xhr.responseType = 'text';
|
xhr.responseType = 'text';
|
||||||
xhr.onreadystatechange = this.cb;
|
xhr.onreadystatechange = r.cb;
|
||||||
xhr.send();
|
xhr.send();
|
||||||
}
|
};
|
||||||
|
|
||||||
this.cb = function () {
|
r.cb = function () {
|
||||||
if (this.modpoll.disabled || this.modpoll.skip_one) {
|
if (r.disabled || r.skip_one) {
|
||||||
console.log('modpoll abort');
|
console.log('modpoll abort');
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
@@ -288,7 +284,7 @@ function Modpoll() {
|
|||||||
|
|
||||||
if (server_ref != server_now) {
|
if (server_ref != server_now) {
|
||||||
console.log("modpoll diff |" + server_ref.length + "|, |" + server_now.length + "|");
|
console.log("modpoll diff |" + server_ref.length + "|, |" + server_now.length + "|");
|
||||||
this.modpoll.disabled = true;
|
r.disabled = true;
|
||||||
var msg = [
|
var msg = [
|
||||||
"The document has changed on the server.",
|
"The document has changed on the server.",
|
||||||
"The changes will NOT be loaded into your editor automatically.",
|
"The changes will NOT be loaded into your editor automatically.",
|
||||||
@@ -302,12 +298,12 @@ function Modpoll() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
console.log('modpoll eq');
|
console.log('modpoll eq');
|
||||||
}
|
};
|
||||||
|
|
||||||
if (md_opt.modpoll_freq > 0)
|
if (md_opt.modpoll_freq > 0)
|
||||||
this.periodic();
|
setInterval(r.periodic, 1000 * md_opt.modpoll_freq);
|
||||||
|
|
||||||
return this;
|
return r;
|
||||||
}
|
}
|
||||||
var modpoll = new Modpoll();
|
var modpoll = new Modpoll();
|
||||||
|
|
||||||
@@ -879,6 +875,40 @@ function cfg_uni(e) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
var set_lno = (function () {
|
||||||
|
var t = null,
|
||||||
|
pi = null,
|
||||||
|
pv = null,
|
||||||
|
lno = ebi('lno');
|
||||||
|
|
||||||
|
function poke() {
|
||||||
|
clearTimeout(t);
|
||||||
|
t = setTimeout(fire, 20);
|
||||||
|
}
|
||||||
|
|
||||||
|
function fire() {
|
||||||
|
try {
|
||||||
|
clearTimeout(t);
|
||||||
|
|
||||||
|
var i = dom_src.selectionStart;
|
||||||
|
if (i === pi)
|
||||||
|
return;
|
||||||
|
|
||||||
|
var v = 'L' + dom_src.value.slice(0, i).split('\n').length;
|
||||||
|
if (v != pv)
|
||||||
|
lno.innerHTML = v;
|
||||||
|
|
||||||
|
pi = i;
|
||||||
|
pv = v;
|
||||||
|
}
|
||||||
|
catch (e) { }
|
||||||
|
}
|
||||||
|
|
||||||
|
timer.add(fire);
|
||||||
|
return poke;
|
||||||
|
})();
|
||||||
|
|
||||||
|
|
||||||
// hotkeys / toolbar
|
// hotkeys / toolbar
|
||||||
(function () {
|
(function () {
|
||||||
function keydown(ev) {
|
function keydown(ev) {
|
||||||
@@ -897,6 +927,8 @@ function cfg_uni(e) {
|
|||||||
if (document.activeElement != dom_src)
|
if (document.activeElement != dom_src)
|
||||||
return true;
|
return true;
|
||||||
|
|
||||||
|
set_lno();
|
||||||
|
|
||||||
if (ctrl(ev)) {
|
if (ctrl(ev)) {
|
||||||
if (ev.code == "KeyH" || kc == 72) {
|
if (ev.code == "KeyH" || kc == 72) {
|
||||||
md_header(ev.shiftKey);
|
md_header(ev.shiftKey);
|
||||||
|
|||||||
@@ -33,11 +33,11 @@ var md_opt = {
|
|||||||
|
|
||||||
var lightswitch = (function () {
|
var lightswitch = (function () {
|
||||||
var l = localStorage,
|
var l = localStorage,
|
||||||
drk = l.getItem('lightmode') != 1,
|
drk = l.lightmode != 1,
|
||||||
f = function (e) {
|
f = function (e) {
|
||||||
if (e) drk = !drk;
|
if (e) drk = !drk;
|
||||||
document.documentElement.setAttribute("class", drk? "dark":"light");
|
document.documentElement.setAttribute("class", drk? "dark":"light");
|
||||||
l.setItem('lightmode', drk? 0:1);
|
l.lightmode = drk? 0:1;
|
||||||
};
|
};
|
||||||
f();
|
f();
|
||||||
return f;
|
return f;
|
||||||
@@ -45,6 +45,7 @@ l.setItem('lightmode', drk? 0:1);
|
|||||||
|
|
||||||
</script>
|
</script>
|
||||||
<script src="/.cpr/util.js?_={{ ts }}"></script>
|
<script src="/.cpr/util.js?_={{ ts }}"></script>
|
||||||
|
<script src="/.cpr/deps/marked.js?_={{ ts }}"></script>
|
||||||
<script src="/.cpr/deps/easymde.js?_={{ ts }}"></script>
|
<script src="/.cpr/deps/easymde.js?_={{ ts }}"></script>
|
||||||
<script src="/.cpr/mde.js?_={{ ts }}"></script>
|
<script src="/.cpr/mde.js?_={{ ts }}"></script>
|
||||||
</body></html>
|
</body></html>
|
||||||
|
|||||||
@@ -55,6 +55,16 @@ table {
|
|||||||
.btns {
|
.btns {
|
||||||
margin: 1em 0;
|
margin: 1em 0;
|
||||||
}
|
}
|
||||||
|
#msg {
|
||||||
|
margin: 3em 0;
|
||||||
|
}
|
||||||
|
#msg h1 {
|
||||||
|
margin-bottom: 0;
|
||||||
|
}
|
||||||
|
#msg h1 + p {
|
||||||
|
margin-top: .3em;
|
||||||
|
text-align: right;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
html.dark,
|
html.dark,
|
||||||
@@ -73,8 +83,8 @@ html.dark a {
|
|||||||
}
|
}
|
||||||
html.dark input {
|
html.dark input {
|
||||||
color: #fff;
|
color: #fff;
|
||||||
background: #624;
|
background: #626;
|
||||||
border: 1px solid #c27;
|
border: 1px solid #c2c;
|
||||||
border-width: 1px 0 0 0;
|
border-width: 1px 0 0 0;
|
||||||
border-radius: .5em;
|
border-radius: .5em;
|
||||||
padding: .5em .7em;
|
padding: .5em .7em;
|
||||||
|
|||||||
@@ -12,7 +12,17 @@
|
|||||||
|
|
||||||
<body>
|
<body>
|
||||||
<div id="wrap">
|
<div id="wrap">
|
||||||
<p>hello {{ this.uname }}</p>
|
{%- if this.uname == '*' %}
|
||||||
|
<p>howdy stranger <small>(you're not logged in)</small></p>
|
||||||
|
{%- else %}
|
||||||
|
<p>welcome back, <strong>{{ this.uname }}</strong></p>
|
||||||
|
{%- endif %}
|
||||||
|
|
||||||
|
{%- if msg %}
|
||||||
|
<div id="msg">
|
||||||
|
{{ msg }}
|
||||||
|
</div>
|
||||||
|
{%- endif %}
|
||||||
|
|
||||||
{%- if avol %}
|
{%- if avol %}
|
||||||
<h1>admin panel:</h1>
|
<h1>admin panel:</h1>
|
||||||
@@ -60,7 +70,7 @@
|
|||||||
|
|
||||||
<h1>login for more:</h1>
|
<h1>login for more:</h1>
|
||||||
<ul>
|
<ul>
|
||||||
<form method="post" enctype="multipart/form-data" action="/">
|
<form method="post" enctype="multipart/form-data" action="/{{ qvpath }}">
|
||||||
<input type="hidden" name="act" value="login" />
|
<input type="hidden" name="act" value="login" />
|
||||||
<input type="password" name="cppwd" />
|
<input type="password" name="cppwd" />
|
||||||
<input type="submit" value="Login" />
|
<input type="submit" value="Login" />
|
||||||
@@ -70,7 +80,7 @@
|
|||||||
<a href="#" id="repl">π</a>
|
<a href="#" id="repl">π</a>
|
||||||
<script>
|
<script>
|
||||||
|
|
||||||
if (localStorage.getItem('lightmode') != 1)
|
if (localStorage.lightmode != 1)
|
||||||
document.documentElement.setAttribute("class", "dark");
|
document.documentElement.setAttribute("class", "dark");
|
||||||
|
|
||||||
</script>
|
</script>
|
||||||
|
|||||||
@@ -1,5 +1,6 @@
|
|||||||
@font-face {
|
@font-face {
|
||||||
font-family: 'scp';
|
font-family: 'scp';
|
||||||
|
font-display: swap;
|
||||||
src: local('Source Code Pro Regular'), local('SourceCodePro-Regular'), url(/.cpr/deps/scp.woff2) format('woff2');
|
src: local('Source Code Pro Regular'), local('SourceCodePro-Regular'), url(/.cpr/deps/scp.woff2) format('woff2');
|
||||||
}
|
}
|
||||||
html {
|
html {
|
||||||
@@ -246,6 +247,27 @@ html.light #tt em {
|
|||||||
#repl_pre {
|
#repl_pre {
|
||||||
max-width: 24em;
|
max-width: 24em;
|
||||||
}
|
}
|
||||||
|
*:focus,
|
||||||
|
#pctl *:focus,
|
||||||
|
.btn:focus {
|
||||||
|
box-shadow: 0 .1em .2em #fc0 inset;
|
||||||
|
border-radius: .2em;
|
||||||
|
}
|
||||||
|
html.light *:focus,
|
||||||
|
html.light #pctl *:focus,
|
||||||
|
html.light .btn:focus {
|
||||||
|
box-shadow: 0 .1em .2em #037 inset;
|
||||||
|
}
|
||||||
|
input[type="text"]:focus,
|
||||||
|
input:not([type]):focus,
|
||||||
|
textarea:focus {
|
||||||
|
box-shadow: 0 .1em .3em #fc0, 0 -.1em .3em #fc0;
|
||||||
|
}
|
||||||
|
html.light input[type="text"]:focus,
|
||||||
|
html.light input:not([type]):focus,
|
||||||
|
html.light textarea:focus {
|
||||||
|
box-shadow: 0 .1em .3em #037, 0 -.1em .3em #037;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -512,9 +512,13 @@ function up2k_init(subtle) {
|
|||||||
// chrome<37 firefox<34 edge<12 opera<24 safari<7
|
// chrome<37 firefox<34 edge<12 opera<24 safari<7
|
||||||
shame = 'your browser is impressively ancient';
|
shame = 'your browser is impressively ancient';
|
||||||
|
|
||||||
var got_deps = false;
|
function got_deps() {
|
||||||
|
return subtle || window.asmCrypto || window.hashwasm;
|
||||||
|
}
|
||||||
|
|
||||||
|
var loading_deps = false;
|
||||||
function init_deps() {
|
function init_deps() {
|
||||||
if (!got_deps && !subtle && !window.asmCrypto) {
|
if (!loading_deps && !got_deps()) {
|
||||||
var fn = 'sha512.' + sha_js + '.js';
|
var fn = 'sha512.' + sha_js + '.js';
|
||||||
showmodal('<h1>loading ' + fn + '</h1><h2>since ' + shame + '</h2><h4>thanks chrome</h4>');
|
showmodal('<h1>loading ' + fn + '</h1><h2>since ' + shame + '</h2><h4>thanks chrome</h4>');
|
||||||
import_js('/.cpr/deps/' + fn, unmodal);
|
import_js('/.cpr/deps/' + fn, unmodal);
|
||||||
@@ -525,10 +529,10 @@ function up2k_init(subtle) {
|
|||||||
ebi('u2foot').innerHTML = 'seems like ' + shame + ' so do that if you want more performance <span style="color:#' +
|
ebi('u2foot').innerHTML = 'seems like ' + shame + ' so do that if you want more performance <span style="color:#' +
|
||||||
(sha_js == 'ac' ? 'c84">(expecting 20' : '8a5">(but dont worry too much, expect 100') + ' MiB/s)</span>';
|
(sha_js == 'ac' ? 'c84">(expecting 20' : '8a5">(but dont worry too much, expect 100') + ' MiB/s)</span>';
|
||||||
}
|
}
|
||||||
got_deps = true;
|
loading_deps = true;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (perms.length && !has(perms, 'read'))
|
if (perms.length && !has(perms, 'read') && has(perms, 'write'))
|
||||||
goto('up2k');
|
goto('up2k');
|
||||||
|
|
||||||
function setmsg(msg, type) {
|
function setmsg(msg, type) {
|
||||||
@@ -572,15 +576,17 @@ function up2k_init(subtle) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
var parallel_uploads = icfg_get('nthread'),
|
var parallel_uploads = icfg_get('nthread'),
|
||||||
multitask = bcfg_get('multitask', true),
|
uc = {},
|
||||||
ask_up = bcfg_get('ask_up', true),
|
|
||||||
flag_en = bcfg_get('flag_en', false),
|
|
||||||
fsearch = bcfg_get('fsearch', false),
|
|
||||||
turbo = bcfg_get('u2turbo', false),
|
|
||||||
datechk = bcfg_get('u2tdate', true),
|
|
||||||
fdom_ctr = 0,
|
fdom_ctr = 0,
|
||||||
min_filebuf = 0;
|
min_filebuf = 0;
|
||||||
|
|
||||||
|
bcfg_bind(uc, 'multitask', 'multitask', true, null, false);
|
||||||
|
bcfg_bind(uc, 'ask_up', 'ask_up', true, null, false);
|
||||||
|
bcfg_bind(uc, 'flag_en', 'flag_en', false, apply_flag_cfg);
|
||||||
|
bcfg_bind(uc, 'fsearch', 'fsearch', false, set_fsearch, false);
|
||||||
|
bcfg_bind(uc, 'turbo', 'u2turbo', false, draw_turbo, false);
|
||||||
|
bcfg_bind(uc, 'datechk', 'u2tdate', true, null, false);
|
||||||
|
|
||||||
var st = {
|
var st = {
|
||||||
"files": [],
|
"files": [],
|
||||||
"todo": {
|
"todo": {
|
||||||
@@ -715,13 +721,12 @@ function up2k_init(subtle) {
|
|||||||
offdrag.bind(this)();
|
offdrag.bind(this)();
|
||||||
var dz = (this && this.getAttribute('id'));
|
var dz = (this && this.getAttribute('id'));
|
||||||
|
|
||||||
if ((dz == 'up_dz' && fsearch) || (dz == 'srch_dz' && !fsearch)) {
|
var err = this.getAttribute('err');
|
||||||
var err = this.getAttribute('err');
|
if (err)
|
||||||
if (err)
|
return modal.alert('sorry, ' + err);
|
||||||
return modal.alert('sorry, ' + err);
|
|
||||||
|
|
||||||
|
if ((dz == 'up_dz' && uc.fsearch) || (dz == 'srch_dz' && !uc.fsearch))
|
||||||
tgl_fsearch();
|
tgl_fsearch();
|
||||||
}
|
|
||||||
|
|
||||||
if (!QS('#op_up2k.act'))
|
if (!QS('#op_up2k.act'))
|
||||||
goto('up2k');
|
goto('up2k');
|
||||||
@@ -743,11 +748,14 @@ function up2k_init(subtle) {
|
|||||||
|
|
||||||
more_one_file();
|
more_one_file();
|
||||||
var bad_files = [],
|
var bad_files = [],
|
||||||
|
nil_files = [],
|
||||||
good_files = [],
|
good_files = [],
|
||||||
dirs = [];
|
dirs = [];
|
||||||
|
|
||||||
for (var a = 0; a < files.length; a++) {
|
for (var a = 0; a < files.length; a++) {
|
||||||
var fobj = files[a];
|
var fobj = files[a],
|
||||||
|
dst = good_files;
|
||||||
|
|
||||||
if (is_itemlist) {
|
if (is_itemlist) {
|
||||||
if (fobj.kind !== 'file')
|
if (fobj.kind !== 'file')
|
||||||
continue;
|
continue;
|
||||||
@@ -764,16 +772,15 @@ function up2k_init(subtle) {
|
|||||||
}
|
}
|
||||||
try {
|
try {
|
||||||
if (fobj.size < 1)
|
if (fobj.size < 1)
|
||||||
throw 1;
|
dst = nil_files;
|
||||||
}
|
}
|
||||||
catch (ex) {
|
catch (ex) {
|
||||||
bad_files.push(fobj.name);
|
dst = bad_files;
|
||||||
continue;
|
|
||||||
}
|
}
|
||||||
good_files.push([fobj, fobj.name]);
|
dst.push([fobj, fobj.name]);
|
||||||
}
|
}
|
||||||
if (dirs) {
|
if (dirs) {
|
||||||
return read_dirs(null, [], dirs, good_files, bad_files);
|
return read_dirs(null, [], dirs, good_files, nil_files, bad_files);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -787,7 +794,7 @@ function up2k_init(subtle) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
var rd_missing_ref = [];
|
var rd_missing_ref = [];
|
||||||
function read_dirs(rd, pf, dirs, good, bad, spins) {
|
function read_dirs(rd, pf, dirs, good, nil, bad, spins) {
|
||||||
spins = spins || 0;
|
spins = spins || 0;
|
||||||
if (++spins == 5)
|
if (++spins == 5)
|
||||||
rd_missing_ref = rd_flatten(pf, dirs);
|
rd_missing_ref = rd_flatten(pf, dirs);
|
||||||
@@ -808,7 +815,7 @@ function up2k_init(subtle) {
|
|||||||
msg.push('<li>' + esc(missing[a]) + '</li>');
|
msg.push('<li>' + esc(missing[a]) + '</li>');
|
||||||
|
|
||||||
return modal.alert(msg.join('') + '</ul>', function () {
|
return modal.alert(msg.join('') + '</ul>', function () {
|
||||||
read_dirs(rd, [], [], good, bad, spins);
|
read_dirs(rd, [], [], good, nil, bad, spins);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
spins = 0;
|
spins = 0;
|
||||||
@@ -816,11 +823,11 @@ function up2k_init(subtle) {
|
|||||||
|
|
||||||
if (!dirs.length) {
|
if (!dirs.length) {
|
||||||
if (!pf.length)
|
if (!pf.length)
|
||||||
return gotallfiles(good, bad);
|
return gotallfiles(good, nil, bad);
|
||||||
|
|
||||||
console.log("retry pf, " + pf.length);
|
console.log("retry pf, " + pf.length);
|
||||||
setTimeout(function () {
|
setTimeout(function () {
|
||||||
read_dirs(rd, pf, dirs, good, bad, spins);
|
read_dirs(rd, pf, dirs, good, nil, bad, spins);
|
||||||
}, 50);
|
}, 50);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
@@ -842,14 +849,15 @@ function up2k_init(subtle) {
|
|||||||
pf.push(name);
|
pf.push(name);
|
||||||
dn.file(function (fobj) {
|
dn.file(function (fobj) {
|
||||||
apop(pf, name);
|
apop(pf, name);
|
||||||
|
var dst = good;
|
||||||
try {
|
try {
|
||||||
if (fobj.size > 0) {
|
if (fobj.size < 1)
|
||||||
good.push([fobj, name]);
|
dst = nil;
|
||||||
return;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
catch (ex) { }
|
catch (ex) {
|
||||||
bad.push(name);
|
dst = bad;
|
||||||
|
}
|
||||||
|
dst.push([fobj, name]);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
ngot += 1;
|
ngot += 1;
|
||||||
@@ -858,23 +866,33 @@ function up2k_init(subtle) {
|
|||||||
dirs.shift();
|
dirs.shift();
|
||||||
rd = null;
|
rd = null;
|
||||||
}
|
}
|
||||||
return read_dirs(rd, pf, dirs, good, bad, spins);
|
return read_dirs(rd, pf, dirs, good, nil, bad, spins);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
function gotallfiles(good_files, bad_files) {
|
function gotallfiles(good_files, nil_files, bad_files) {
|
||||||
|
var ntot = good_files.concat(nil_files, bad_files).length;
|
||||||
if (bad_files.length) {
|
if (bad_files.length) {
|
||||||
var ntot = bad_files.length + good_files.length,
|
var msg = 'These {0} files (of {1} total) were skipped, possibly due to filesystem permissions:\n'.format(bad_files.length, ntot);
|
||||||
msg = 'These {0} files (of {1} total) were skipped because they are empty:\n'.format(bad_files.length, ntot);
|
|
||||||
|
|
||||||
for (var a = 0, aa = Math.min(20, bad_files.length); a < aa; a++)
|
for (var a = 0, aa = Math.min(20, bad_files.length); a < aa; a++)
|
||||||
msg += '-- ' + bad_files[a] + '\n';
|
msg += '-- ' + bad_files[a][1] + '\n';
|
||||||
|
|
||||||
if (good_files.length - bad_files.length <= 1 && ANDROID)
|
|
||||||
msg += '\nFirefox-Android has a bug which prevents selecting multiple files. Try selecting one file at a time. For more info, see firefox bug 1456557';
|
|
||||||
|
|
||||||
|
msg += '\nMaybe it works better if you select just one file';
|
||||||
return modal.alert(msg, function () {
|
return modal.alert(msg, function () {
|
||||||
gotallfiles(good_files, []);
|
gotallfiles(good_files, nil_files, []);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
if (nil_files.length) {
|
||||||
|
var msg = 'These {0} files (of {1} total) are blank/empty; upload them anyways?\n'.format(nil_files.length, ntot);
|
||||||
|
for (var a = 0, aa = Math.min(20, nil_files.length); a < aa; a++)
|
||||||
|
msg += '-- ' + nil_files[a][1] + '\n';
|
||||||
|
|
||||||
|
msg += '\nMaybe it works better if you select just one file';
|
||||||
|
return modal.confirm(msg, function () {
|
||||||
|
gotallfiles(good_files.concat(nil_files), [], []);
|
||||||
|
}, function () {
|
||||||
|
gotallfiles(good_files, [], []);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -884,11 +902,11 @@ function up2k_init(subtle) {
|
|||||||
return a < b ? -1 : a > b ? 1 : 0;
|
return a < b ? -1 : a > b ? 1 : 0;
|
||||||
});
|
});
|
||||||
|
|
||||||
var msg = ['{0} these {1} files?<ul>'.format(fsearch ? 'search' : 'upload', good_files.length)];
|
var msg = ['{0} these {1} files?<ul>'.format(uc.fsearch ? 'search' : 'upload', good_files.length)];
|
||||||
for (var a = 0, aa = Math.min(20, good_files.length); a < aa; a++)
|
for (var a = 0, aa = Math.min(20, good_files.length); a < aa; a++)
|
||||||
msg.push('<li>' + esc(good_files[a][1]) + '</li>');
|
msg.push('<li>' + esc(good_files[a][1]) + '</li>');
|
||||||
|
|
||||||
if (ask_up && !fsearch)
|
if (uc.ask_up && !uc.fsearch)
|
||||||
return modal.confirm(msg.join('') + '</ul>', function () { up_them(good_files); }, null);
|
return modal.confirm(msg.join('') + '</ul>', function () { up_them(good_files); }, null);
|
||||||
|
|
||||||
up_them(good_files);
|
up_them(good_files);
|
||||||
@@ -920,7 +938,7 @@ function up2k_init(subtle) {
|
|||||||
"t0": now,
|
"t0": now,
|
||||||
"fobj": fobj,
|
"fobj": fobj,
|
||||||
"name": name,
|
"name": name,
|
||||||
"size": fobj.size,
|
"size": fobj.size || 0,
|
||||||
"lmod": lmod / 1000,
|
"lmod": lmod / 1000,
|
||||||
"purl": fdir,
|
"purl": fdir,
|
||||||
"done": false,
|
"done": false,
|
||||||
@@ -928,7 +946,7 @@ function up2k_init(subtle) {
|
|||||||
},
|
},
|
||||||
key = entry.name + '\n' + entry.size;
|
key = entry.name + '\n' + entry.size;
|
||||||
|
|
||||||
if (fsearch)
|
if (uc.fsearch)
|
||||||
entry.srch = 1;
|
entry.srch = 1;
|
||||||
|
|
||||||
if (seen[key])
|
if (seen[key])
|
||||||
@@ -937,15 +955,17 @@ function up2k_init(subtle) {
|
|||||||
seen[key] = 1;
|
seen[key] = 1;
|
||||||
|
|
||||||
pvis.addfile([
|
pvis.addfile([
|
||||||
fsearch ? esc(entry.name) : linksplit(
|
uc.fsearch ? esc(entry.name) : linksplit(
|
||||||
uricom_dec(entry.purl)[0] + entry.name).join(' '),
|
entry.purl + uricom_enc(entry.name)).join(' '),
|
||||||
'📐 hash',
|
'📐 hash',
|
||||||
''
|
''
|
||||||
], fobj.size, draw_each);
|
], fobj.size, draw_each);
|
||||||
|
|
||||||
st.bytes.total += fobj.size;
|
st.bytes.total += fobj.size;
|
||||||
st.files.push(entry);
|
st.files.push(entry);
|
||||||
if (turbo)
|
if (!entry.size)
|
||||||
|
push_t(st.todo.handshake, entry);
|
||||||
|
else if (uc.turbo)
|
||||||
push_t(st.todo.head, entry);
|
push_t(st.todo.head, entry);
|
||||||
else
|
else
|
||||||
push_t(st.todo.hash, entry);
|
push_t(st.todo.hash, entry);
|
||||||
@@ -1020,14 +1040,14 @@ function up2k_init(subtle) {
|
|||||||
if (nhash) {
|
if (nhash) {
|
||||||
st.time.hashing += td;
|
st.time.hashing += td;
|
||||||
t.push(['u2etah', st.bytes.hashed, st.bytes.hashed, st.time.hashing]);
|
t.push(['u2etah', st.bytes.hashed, st.bytes.hashed, st.time.hashing]);
|
||||||
if (fsearch)
|
if (uc.fsearch)
|
||||||
t.push(['u2etat', st.bytes.hashed, st.bytes.hashed, st.time.hashing]);
|
t.push(['u2etat', st.bytes.hashed, st.bytes.hashed, st.time.hashing]);
|
||||||
}
|
}
|
||||||
if (nsend) {
|
if (nsend) {
|
||||||
st.time.uploading += td;
|
st.time.uploading += td;
|
||||||
t.push(['u2etau', st.bytes.uploaded, st.bytes.finished, st.time.uploading]);
|
t.push(['u2etau', st.bytes.uploaded, st.bytes.finished, st.time.uploading]);
|
||||||
}
|
}
|
||||||
if ((nhash || nsend) && !fsearch) {
|
if ((nhash || nsend) && !uc.fsearch) {
|
||||||
if (!st.bytes.finished) {
|
if (!st.bytes.finished) {
|
||||||
ebi('u2etat').innerHTML = '(preparing to upload)';
|
ebi('u2etat').innerHTML = '(preparing to upload)';
|
||||||
}
|
}
|
||||||
@@ -1080,12 +1100,7 @@ function up2k_init(subtle) {
|
|||||||
st.busy.handshake.length)
|
st.busy.handshake.length)
|
||||||
return false;
|
return false;
|
||||||
|
|
||||||
if (st.busy.handshake.length)
|
if ((uc.multitask ? 1 : 0) <
|
||||||
for (var n = t.n - 1; n >= t.n - parallel_uploads && n >= 0; n--)
|
|
||||||
if (st.files[n].t_uploading)
|
|
||||||
return false;
|
|
||||||
|
|
||||||
if ((multitask ? 1 : 0) <
|
|
||||||
st.todo.upload.length +
|
st.todo.upload.length +
|
||||||
st.busy.upload.length)
|
st.busy.upload.length)
|
||||||
return false;
|
return false;
|
||||||
@@ -1097,7 +1112,7 @@ function up2k_init(subtle) {
|
|||||||
if (!parallel_uploads)
|
if (!parallel_uploads)
|
||||||
return false;
|
return false;
|
||||||
|
|
||||||
if (multitask) {
|
if (uc.multitask) {
|
||||||
var ahead = st.bytes.hashed - st.bytes.finished;
|
var ahead = st.bytes.hashed - st.bytes.finished;
|
||||||
return ahead < 1024 * 1024 * 1024 * 4 &&
|
return ahead < 1024 * 1024 * 1024 * 4 &&
|
||||||
st.todo.handshake.length + st.busy.handshake.length < 16;
|
st.todo.handshake.length + st.busy.handshake.length < 16;
|
||||||
@@ -1121,7 +1136,7 @@ function up2k_init(subtle) {
|
|||||||
if (running)
|
if (running)
|
||||||
return;
|
return;
|
||||||
|
|
||||||
if (crashed)
|
if (crashed || !got_deps())
|
||||||
return defer();
|
return defer();
|
||||||
|
|
||||||
running = true;
|
running = true;
|
||||||
@@ -1137,6 +1152,18 @@ function up2k_init(subtle) {
|
|||||||
st.busy.handshake.length +
|
st.busy.handshake.length +
|
||||||
st.busy.upload.length;
|
st.busy.upload.length;
|
||||||
|
|
||||||
|
if (was_busy && !is_busy) {
|
||||||
|
for (var a = 0; a < st.files.length; a++) {
|
||||||
|
var t = st.files[a];
|
||||||
|
if (t.want_recheck) {
|
||||||
|
t.rechecks++;
|
||||||
|
t.want_recheck = false;
|
||||||
|
push_t(st.todo.handshake, t);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
is_busy = st.todo.handshake.length;
|
||||||
|
}
|
||||||
|
|
||||||
if (was_busy != is_busy) {
|
if (was_busy != is_busy) {
|
||||||
was_busy = is_busy;
|
was_busy = is_busy;
|
||||||
|
|
||||||
@@ -1144,13 +1171,13 @@ function up2k_init(subtle) {
|
|||||||
"EventListener"]("beforeunload", warn_uploader_busy);
|
"EventListener"]("beforeunload", warn_uploader_busy);
|
||||||
|
|
||||||
if (!is_busy) {
|
if (!is_busy) {
|
||||||
var k = fsearch ? 'searches' : 'uploads',
|
var k = uc.fsearch ? 'searches' : 'uploads',
|
||||||
ks = fsearch ? 'Search' : 'Upload',
|
ks = uc.fsearch ? 'Search' : 'Upload',
|
||||||
tok = fsearch ? 'successful (found on server)' : 'completed successfully',
|
tok = uc.fsearch ? 'successful (found on server)' : 'completed successfully',
|
||||||
tng = fsearch ? 'failed (NOT found on server)' : 'failed, sorry',
|
tng = uc.fsearch ? 'failed (NOT found on server)' : 'failed, sorry',
|
||||||
ok = pvis.ctr["ok"],
|
ok = pvis.ctr["ok"],
|
||||||
ng = pvis.ctr["ng"],
|
ng = pvis.ctr["ng"],
|
||||||
t = ask_up ? 0 : 10;
|
t = uc.ask_up ? 0 : 10;
|
||||||
|
|
||||||
if (ok && ng)
|
if (ok && ng)
|
||||||
toast.warn(t, 'Finished, but some {0} failed:\n{1} {2},\n{3} {4}'.format(k, ok, tok, ng, tng));
|
toast.warn(t, 'Finished, but some {0} failed:\n{1} {2},\n{3} {4}'.format(k, ok, tok, ng, tng));
|
||||||
@@ -1171,6 +1198,8 @@ function up2k_init(subtle) {
|
|||||||
ebi('u2etas').style.textAlign = 'left';
|
ebi('u2etas').style.textAlign = 'left';
|
||||||
}
|
}
|
||||||
etafun();
|
etafun();
|
||||||
|
if (pvis.act == 'bz')
|
||||||
|
pvis.changecard('bz');
|
||||||
}
|
}
|
||||||
|
|
||||||
if (flag) {
|
if (flag) {
|
||||||
@@ -1369,7 +1398,7 @@ function up2k_init(subtle) {
|
|||||||
pvis.move(t.n, 'ng');
|
pvis.move(t.n, 'ng');
|
||||||
apop(st.busy.hash, t);
|
apop(st.busy.hash, t);
|
||||||
st.bytes.finished += t.size;
|
st.bytes.finished += t.size;
|
||||||
return tasker();
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
toast.err(0, 'y o u b r o k e i t\nfile: ' + esc(t.name + '') + '\nerror: ' + err);
|
toast.err(0, 'y o u b r o k e i t\nfile: ' + esc(t.name + '') + '\nerror: ' + err);
|
||||||
@@ -1445,7 +1474,6 @@ function up2k_init(subtle) {
|
|||||||
console.log('head onerror, retrying', t);
|
console.log('head onerror, retrying', t);
|
||||||
apop(st.busy.head, t);
|
apop(st.busy.head, t);
|
||||||
st.todo.head.unshift(t);
|
st.todo.head.unshift(t);
|
||||||
tasker();
|
|
||||||
};
|
};
|
||||||
function orz(e) {
|
function orz(e) {
|
||||||
var ok = false;
|
var ok = false;
|
||||||
@@ -1454,7 +1482,7 @@ function up2k_init(subtle) {
|
|||||||
srv_ts = xhr.getResponseHeader('Last-Modified');
|
srv_ts = xhr.getResponseHeader('Last-Modified');
|
||||||
|
|
||||||
ok = t.size == srv_sz;
|
ok = t.size == srv_sz;
|
||||||
if (ok && datechk) {
|
if (ok && uc.datechk) {
|
||||||
srv_ts = new Date(srv_ts) / 1000;
|
srv_ts = new Date(srv_ts) / 1000;
|
||||||
ok = Math.abs(srv_ts - t.lmod) < 2;
|
ok = Math.abs(srv_ts - t.lmod) < 2;
|
||||||
}
|
}
|
||||||
@@ -1467,6 +1495,7 @@ function up2k_init(subtle) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
t.done = true;
|
t.done = true;
|
||||||
|
t.fobj = null;
|
||||||
st.bytes.hashed += t.size;
|
st.bytes.hashed += t.size;
|
||||||
st.bytes.finished += t.size;
|
st.bytes.finished += t.size;
|
||||||
pvis.move(t.n, 'bz');
|
pvis.move(t.n, 'bz');
|
||||||
@@ -1510,7 +1539,6 @@ function up2k_init(subtle) {
|
|||||||
apop(st.busy.handshake, t);
|
apop(st.busy.handshake, t);
|
||||||
st.todo.handshake.unshift(t);
|
st.todo.handshake.unshift(t);
|
||||||
t.keepalive = keepalive;
|
t.keepalive = keepalive;
|
||||||
tasker();
|
|
||||||
};
|
};
|
||||||
function orz(e) {
|
function orz(e) {
|
||||||
if (t.t_busied != me) {
|
if (t.t_busied != me) {
|
||||||
@@ -1536,15 +1564,18 @@ function up2k_init(subtle) {
|
|||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
smsg = 'found';
|
smsg = 'found';
|
||||||
var hit = response.hits[0],
|
var msg = [];
|
||||||
msg = linksplit(hit.rp).join(''),
|
for (var a = 0, aa = Math.min(20, response.hits.length); a < aa; a++) {
|
||||||
tr = unix2iso(hit.ts),
|
var hit = response.hits[a],
|
||||||
tu = unix2iso(t.lmod),
|
tr = unix2iso(hit.ts),
|
||||||
diff = parseInt(t.lmod) - parseInt(hit.ts),
|
tu = unix2iso(t.lmod),
|
||||||
cdiff = (Math.abs(diff) <= 2) ? '3c0' : 'f0b',
|
diff = parseInt(t.lmod) - parseInt(hit.ts),
|
||||||
sdiff = '<span style="color:#' + cdiff + '">diff ' + diff;
|
cdiff = (Math.abs(diff) <= 2) ? '3c0' : 'f0b',
|
||||||
|
sdiff = '<span style="color:#' + cdiff + '">diff ' + diff;
|
||||||
|
|
||||||
msg += '<br /><small>' + tr + ' (srv), ' + tu + ' (You), ' + sdiff + '</span></span>';
|
msg.push(linksplit(hit.rp).join('') + '<br /><small>' + tr + ' (srv), ' + tu + ' (You), ' + sdiff + '</small></span>');
|
||||||
|
}
|
||||||
|
msg = msg.join('<br />\n');
|
||||||
}
|
}
|
||||||
pvis.seth(t.n, 2, msg);
|
pvis.seth(t.n, 2, msg);
|
||||||
pvis.seth(t.n, 1, smsg);
|
pvis.seth(t.n, 1, smsg);
|
||||||
@@ -1552,6 +1583,7 @@ function up2k_init(subtle) {
|
|||||||
apop(st.busy.handshake, t);
|
apop(st.busy.handshake, t);
|
||||||
st.bytes.finished += t.size;
|
st.bytes.finished += t.size;
|
||||||
t.done = true;
|
t.done = true;
|
||||||
|
t.fobj = null;
|
||||||
tasker();
|
tasker();
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
@@ -1562,7 +1594,7 @@ function up2k_init(subtle) {
|
|||||||
console.log("server-rename [" + t.purl + "] [" + t.name + "] to [" + rsp_purl + "] [" + response.name + "]");
|
console.log("server-rename [" + t.purl + "] [" + t.name + "] to [" + rsp_purl + "] [" + response.name + "]");
|
||||||
t.purl = rsp_purl;
|
t.purl = rsp_purl;
|
||||||
t.name = response.name;
|
t.name = response.name;
|
||||||
pvis.seth(t.n, 0, linksplit(uricom_dec(t.purl)[0] + t.name).join(' '));
|
pvis.seth(t.n, 0, linksplit(t.purl + uricom_enc(t.name)).join(' '));
|
||||||
}
|
}
|
||||||
|
|
||||||
var chunksize = get_chunksize(t.size),
|
var chunksize = get_chunksize(t.size),
|
||||||
@@ -1618,6 +1650,7 @@ function up2k_init(subtle) {
|
|||||||
|
|
||||||
if (done) {
|
if (done) {
|
||||||
t.done = true;
|
t.done = true;
|
||||||
|
t.fobj = null;
|
||||||
st.bytes.finished += t.size - t.bytes_uploaded;
|
st.bytes.finished += t.size - t.bytes_uploaded;
|
||||||
var spd1 = (t.size / ((t.t_hashed - t.t_hashing) / 1000.)) / (1024 * 1024.),
|
var spd1 = (t.size / ((t.t_hashed - t.t_hashing) / 1000.)) / (1024 * 1024.),
|
||||||
spd2 = (t.size / ((t.t_uploaded - t.t_uploading) / 1000.)) / (1024 * 1024.);
|
spd2 = (t.size / ((t.t_uploaded - t.t_uploading) / 1000.)) / (1024 * 1024.);
|
||||||
@@ -1652,12 +1685,18 @@ function up2k_init(subtle) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
st.bytes.finished += t.size;
|
st.bytes.finished += t.size;
|
||||||
if (rsp.indexOf('partial upload exists') !== -1 ||
|
var err_pend = rsp.indexOf('partial upload exists') + 1,
|
||||||
rsp.indexOf('file already exists') !== -1) {
|
err_dupe = rsp.indexOf('file already exists') + 1;
|
||||||
|
|
||||||
|
if (err_pend || err_dupe) {
|
||||||
err = rsp;
|
err = rsp;
|
||||||
ofs = err.indexOf('\n/');
|
ofs = err.indexOf('\n/');
|
||||||
if (ofs !== -1) {
|
if (ofs !== -1) {
|
||||||
err = err.slice(0, ofs + 1) + linksplit(err.slice(ofs + 2)).join(' ');
|
err = err.slice(0, ofs + 1) + linksplit(err.slice(ofs + 2).trimEnd()).join(' ');
|
||||||
|
}
|
||||||
|
if (!t.rechecks && err_pend) {
|
||||||
|
t.rechecks = 0;
|
||||||
|
t.want_recheck = true;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (err != "") {
|
if (err != "") {
|
||||||
@@ -1704,7 +1743,8 @@ function up2k_init(subtle) {
|
|||||||
st.busy.upload.push(upt);
|
st.busy.upload.push(upt);
|
||||||
|
|
||||||
var npart = upt.npart,
|
var npart = upt.npart,
|
||||||
t = st.files[upt.nfile];
|
t = st.files[upt.nfile],
|
||||||
|
tries = 0;
|
||||||
|
|
||||||
if (!t.t_uploading)
|
if (!t.t_uploading)
|
||||||
t.t_uploading = Date.now();
|
t.t_uploading = Date.now();
|
||||||
@@ -1755,8 +1795,9 @@ function up2k_init(subtle) {
|
|||||||
if (crashed)
|
if (crashed)
|
||||||
return;
|
return;
|
||||||
|
|
||||||
console.log('chunkpit onerror, retrying', t);
|
toast.err(9.98, "failed to upload a chunk,\n" + tries + " retries so far -- retrying in 10sec\n\n" + t.name);
|
||||||
do_send();
|
console.log('chunkpit onerror,', ++tries, t);
|
||||||
|
setTimeout(do_send, 10 * 1000);
|
||||||
};
|
};
|
||||||
xhr.open('POST', t.purl, true);
|
xhr.open('POST', t.purl, true);
|
||||||
xhr.setRequestHeader("X-Up2k-Hash", t.hash[npart]);
|
xhr.setRequestHeader("X-Up2k-Hash", t.hash[npart]);
|
||||||
@@ -1851,42 +1892,21 @@ function up2k_init(subtle) {
|
|||||||
bumpthread({ "target": 1 })
|
bumpthread({ "target": 1 })
|
||||||
}
|
}
|
||||||
|
|
||||||
function tgl_multitask() {
|
|
||||||
multitask = !multitask;
|
|
||||||
bcfg_set('multitask', multitask);
|
|
||||||
}
|
|
||||||
|
|
||||||
function tgl_ask_up() {
|
|
||||||
ask_up = !ask_up;
|
|
||||||
bcfg_set('ask_up', ask_up);
|
|
||||||
}
|
|
||||||
|
|
||||||
function tgl_fsearch() {
|
function tgl_fsearch() {
|
||||||
set_fsearch(!fsearch);
|
set_fsearch(!uc.fsearch);
|
||||||
}
|
|
||||||
|
|
||||||
function tgl_turbo() {
|
|
||||||
turbo = !turbo;
|
|
||||||
bcfg_set('u2turbo', turbo);
|
|
||||||
draw_turbo();
|
|
||||||
}
|
|
||||||
|
|
||||||
function tgl_datechk() {
|
|
||||||
datechk = !datechk;
|
|
||||||
bcfg_set('u2tdate', datechk);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
function draw_turbo() {
|
function draw_turbo() {
|
||||||
var msgu = '<p class="warn">WARNING: turbo enabled, <span> client may not detect and resume incomplete uploads; see turbo-button tooltip</span></p>',
|
var msgu = '<p class="warn">WARNING: turbo enabled, <span> client may not detect and resume incomplete uploads; see turbo-button tooltip</span></p>',
|
||||||
msgs = '<p class="warn">WARNING: turbo enabled, <span> search results can be incorrect; see turbo-button tooltip</span></p>',
|
msgs = '<p class="warn">WARNING: turbo enabled, <span> search results can be incorrect; see turbo-button tooltip</span></p>',
|
||||||
msg = fsearch ? msgs : msgu,
|
msg = uc.fsearch ? msgs : msgu,
|
||||||
omsg = fsearch ? msgu : msgs,
|
omsg = uc.fsearch ? msgu : msgs,
|
||||||
html = ebi('u2foot').innerHTML,
|
html = ebi('u2foot').innerHTML,
|
||||||
ohtml = html;
|
ohtml = html;
|
||||||
|
|
||||||
if (turbo && html.indexOf(msg) === -1)
|
if (uc.turbo && html.indexOf(msg) === -1)
|
||||||
html = html.replace(omsg, '') + msg;
|
html = html.replace(omsg, '') + msg;
|
||||||
else if (!turbo)
|
else if (!uc.turbo)
|
||||||
html = html.replace(msgu, '').replace(msgs, '');
|
html = html.replace(msgu, '').replace(msgs, '');
|
||||||
|
|
||||||
if (html !== ohtml)
|
if (html !== ohtml)
|
||||||
@@ -1912,8 +1932,8 @@ function up2k_init(subtle) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (new_state !== undefined) {
|
if (new_state !== undefined) {
|
||||||
fsearch = new_state;
|
uc.fsearch = new_state;
|
||||||
bcfg_set('fsearch', fsearch);
|
bcfg_set('fsearch', uc.fsearch);
|
||||||
}
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
@@ -1922,10 +1942,10 @@ function up2k_init(subtle) {
|
|||||||
catch (ex) { }
|
catch (ex) { }
|
||||||
|
|
||||||
try {
|
try {
|
||||||
var ico = fsearch ? '🔎' : '🚀',
|
var ico = uc.fsearch ? '🔎' : '🚀',
|
||||||
desc = fsearch ? 'Search' : 'Upload';
|
desc = uc.fsearch ? 'Search' : 'Upload';
|
||||||
|
|
||||||
clmod(ebi('op_up2k'), 'srch', fsearch);
|
clmod(ebi('op_up2k'), 'srch', uc.fsearch);
|
||||||
ebi('u2bm').innerHTML = ico + ' <sup>' + desc + '</sup>';
|
ebi('u2bm').innerHTML = ico + ' <sup>' + desc + '</sup>';
|
||||||
}
|
}
|
||||||
catch (ex) { }
|
catch (ex) { }
|
||||||
@@ -1934,23 +1954,17 @@ function up2k_init(subtle) {
|
|||||||
onresize();
|
onresize();
|
||||||
}
|
}
|
||||||
|
|
||||||
function tgl_flag_en() {
|
|
||||||
flag_en = !flag_en;
|
|
||||||
bcfg_set('flag_en', flag_en);
|
|
||||||
apply_flag_cfg();
|
|
||||||
}
|
|
||||||
|
|
||||||
function apply_flag_cfg() {
|
function apply_flag_cfg() {
|
||||||
if (flag_en && !flag) {
|
if (uc.flag_en && !flag) {
|
||||||
try {
|
try {
|
||||||
flag = up2k_flagbus();
|
flag = up2k_flagbus();
|
||||||
}
|
}
|
||||||
catch (ex) {
|
catch (ex) {
|
||||||
toast.err(5, "not supported on your browser:\n" + ex);
|
toast.err(5, "not supported on your browser:\n" + esc(basenames(ex)));
|
||||||
tgl_flag_en();
|
bcfg_set('flag_en', false);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
else if (!flag_en && flag) {
|
else if (!uc.flag_en && flag) {
|
||||||
if (flag.ours)
|
if (flag.ours)
|
||||||
flag.give();
|
flag.give();
|
||||||
|
|
||||||
@@ -1975,14 +1989,6 @@ function up2k_init(subtle) {
|
|||||||
|
|
||||||
ebi('nthread').onkeydown = bumpthread2;
|
ebi('nthread').onkeydown = bumpthread2;
|
||||||
ebi('nthread').oninput = bumpthread;
|
ebi('nthread').oninput = bumpthread;
|
||||||
ebi('multitask').onclick = tgl_multitask;
|
|
||||||
ebi('ask_up').onclick = tgl_ask_up;
|
|
||||||
ebi('flag_en').onclick = tgl_flag_en;
|
|
||||||
ebi('u2turbo').onclick = tgl_turbo;
|
|
||||||
ebi('u2tdate').onclick = tgl_datechk;
|
|
||||||
var o = ebi('fsearch');
|
|
||||||
if (o)
|
|
||||||
o.onclick = tgl_fsearch;
|
|
||||||
|
|
||||||
ebi('u2etas').onclick = function (e) {
|
ebi('u2etas').onclick = function (e) {
|
||||||
ev(e);
|
ev(e);
|
||||||
|
|||||||
@@ -29,9 +29,24 @@ function esc(txt) {
|
|||||||
}[c];
|
}[c];
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
window.onunhandledrejection = function (e) {
|
function basenames(txt) {
|
||||||
console.log("REJ: " + e.reason);
|
return (txt + '').replace(/https?:\/\/[^ \/]+\//g, '/').replace(/js\?_=[a-zA-Z]{4}/g, 'js');
|
||||||
};
|
}
|
||||||
|
if ((document.location + '').indexOf(',rej,') + 1)
|
||||||
|
window.onunhandledrejection = function (e) {
|
||||||
|
var err = e.reason;
|
||||||
|
try {
|
||||||
|
err += '\n' + e.reason.stack;
|
||||||
|
}
|
||||||
|
catch (e) { }
|
||||||
|
err = basenames(err);
|
||||||
|
console.log("REJ: " + err);
|
||||||
|
try {
|
||||||
|
toast.warn(30, err);
|
||||||
|
}
|
||||||
|
catch (e) { }
|
||||||
|
};
|
||||||
|
|
||||||
try {
|
try {
|
||||||
console.hist = [];
|
console.hist = [];
|
||||||
var hook = function (t) {
|
var hook = function (t) {
|
||||||
@@ -142,7 +157,7 @@ function vis_exh(msg, url, lineNo, columnNo, error) {
|
|||||||
);
|
);
|
||||||
document.head.appendChild(s);
|
document.head.appendChild(s);
|
||||||
}
|
}
|
||||||
exbox.innerHTML = html.join('\n').replace(/https?:\/\/[^ \/]+\//g, '/').replace(/js\?_=[a-zA-Z]{4}/g, 'js').replace(/<ghi>/, 'https://github.com/9001/copyparty/issues/new?labels=bug&template=bug_report.md');
|
exbox.innerHTML = basenames(html.join('\n')).replace(/<ghi>/, 'https://github.com/9001/copyparty/issues/new?labels=bug&template=bug_report.md');
|
||||||
exbox.style.display = 'block';
|
exbox.style.display = 'block';
|
||||||
}
|
}
|
||||||
catch (e) {
|
catch (e) {
|
||||||
@@ -160,6 +175,9 @@ function ignex(all) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
function noop() { }
|
||||||
|
|
||||||
|
|
||||||
function ctrl(e) {
|
function ctrl(e) {
|
||||||
return e && (e.ctrlKey || e.metaKey);
|
return e && (e.ctrlKey || e.metaKey);
|
||||||
}
|
}
|
||||||
@@ -185,36 +203,40 @@ function ev(e) {
|
|||||||
|
|
||||||
|
|
||||||
// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/endsWith
|
// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/endsWith
|
||||||
if (!String.prototype.endsWith) {
|
if (!String.prototype.endsWith)
|
||||||
String.prototype.endsWith = function (search, this_len) {
|
String.prototype.endsWith = function (search, this_len) {
|
||||||
if (this_len === undefined || this_len > this.length) {
|
if (this_len === undefined || this_len > this.length) {
|
||||||
this_len = this.length;
|
this_len = this.length;
|
||||||
}
|
}
|
||||||
return this.substring(this_len - search.length, this_len) === search;
|
return this.substring(this_len - search.length, this_len) === search;
|
||||||
};
|
};
|
||||||
}
|
|
||||||
if (!String.startsWith) {
|
if (!String.startsWith)
|
||||||
String.prototype.startsWith = function (s, i) {
|
String.prototype.startsWith = function (s, i) {
|
||||||
i = i > 0 ? i | 0 : 0;
|
i = i > 0 ? i | 0 : 0;
|
||||||
return this.substring(i, i + s.length) === s;
|
return this.substring(i, i + s.length) === s;
|
||||||
};
|
};
|
||||||
}
|
|
||||||
if (!Element.prototype.matches) {
|
if (!String.trimEnd)
|
||||||
|
String.prototype.trimEnd = String.prototype.trimRight = function () {
|
||||||
|
return this.replace(/[ \t\r\n]+$/m, '');
|
||||||
|
};
|
||||||
|
|
||||||
|
if (!Element.prototype.matches)
|
||||||
Element.prototype.matches =
|
Element.prototype.matches =
|
||||||
Element.prototype.oMatchesSelector ||
|
Element.prototype.oMatchesSelector ||
|
||||||
Element.prototype.msMatchesSelector ||
|
Element.prototype.msMatchesSelector ||
|
||||||
Element.prototype.mozMatchesSelector ||
|
Element.prototype.mozMatchesSelector ||
|
||||||
Element.prototype.webkitMatchesSelector;
|
Element.prototype.webkitMatchesSelector;
|
||||||
}
|
|
||||||
if (!Element.prototype.closest) {
|
if (!Element.prototype.closest)
|
||||||
Element.prototype.closest = function (s) {
|
Element.prototype.closest = function (s) {
|
||||||
var el = this;
|
var el = this;
|
||||||
do {
|
do {
|
||||||
if (el.matches(s)) return el;
|
if (el.matches(s)) return el;
|
||||||
el = el.parentElement || el.parentNode;
|
el = el.parentElement || el.parentNode;
|
||||||
} while (el !== null && el.nodeType === 1);
|
} while (el !== null && el.nodeType === 1);
|
||||||
}
|
};
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
// https://stackoverflow.com/a/950146
|
// https://stackoverflow.com/a/950146
|
||||||
@@ -225,7 +247,9 @@ function import_js(url, cb) {
|
|||||||
script.src = url;
|
script.src = url;
|
||||||
script.onload = cb;
|
script.onload = cb;
|
||||||
script.onerror = function () {
|
script.onerror = function () {
|
||||||
toast.err(0, 'Failed to load module:\n' + url);
|
var m = 'Failed to load module:\n' + url;
|
||||||
|
console.log(m);
|
||||||
|
toast.err(0, m);
|
||||||
};
|
};
|
||||||
head.appendChild(script);
|
head.appendChild(script);
|
||||||
}
|
}
|
||||||
@@ -361,8 +385,16 @@ function makeSortable(table, cb) {
|
|||||||
|
|
||||||
|
|
||||||
function linksplit(rp) {
|
function linksplit(rp) {
|
||||||
var ret = [];
|
var ret = [],
|
||||||
var apath = '/';
|
apath = '/',
|
||||||
|
q = null;
|
||||||
|
|
||||||
|
if (rp && rp.indexOf('?') + 1) {
|
||||||
|
q = rp.split('?', 2);
|
||||||
|
rp = q[0];
|
||||||
|
q = '?' + q[1];
|
||||||
|
}
|
||||||
|
|
||||||
if (rp && rp.charAt(0) == '/')
|
if (rp && rp.charAt(0) == '/')
|
||||||
rp = rp.slice(1);
|
rp = rp.slice(1);
|
||||||
|
|
||||||
@@ -376,16 +408,17 @@ function linksplit(rp) {
|
|||||||
link = rp.slice(0, ofs + 1);
|
link = rp.slice(0, ofs + 1);
|
||||||
rp = rp.slice(ofs + 1);
|
rp = rp.slice(ofs + 1);
|
||||||
}
|
}
|
||||||
var vlink = esc(link),
|
var vlink = esc(uricom_dec(link)[0]);
|
||||||
elink = uricom_enc(link);
|
|
||||||
|
|
||||||
if (link.indexOf('/') !== -1) {
|
if (link.indexOf('/') !== -1) {
|
||||||
vlink = vlink.slice(0, -1) + '<span>/</span>';
|
vlink = vlink.slice(0, -1) + '<span>/</span>';
|
||||||
elink = elink.slice(0, -3) + '/';
|
|
||||||
}
|
}
|
||||||
|
|
||||||
ret.push('<a href="' + apath + elink + '">' + vlink + '</a>');
|
if (!rp && q)
|
||||||
apath += elink;
|
link += q;
|
||||||
|
|
||||||
|
ret.push('<a href="' + apath + link + '">' + vlink + '</a>');
|
||||||
|
apath += link;
|
||||||
}
|
}
|
||||||
return ret;
|
return ret;
|
||||||
}
|
}
|
||||||
@@ -467,6 +500,11 @@ function get_vpath() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
function noq_href(el) {
|
||||||
|
return el.getAttribute('href').split('?')[0];
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
function get_pwd() {
|
function get_pwd() {
|
||||||
var pwd = ('; ' + document.cookie).split('; cppwd=');
|
var pwd = ('; ' + document.cookie).split('; cppwd=');
|
||||||
if (pwd.length < 2)
|
if (pwd.length < 2)
|
||||||
@@ -545,14 +583,22 @@ function jcp(obj) {
|
|||||||
|
|
||||||
|
|
||||||
function sread(key) {
|
function sread(key) {
|
||||||
return localStorage.getItem(key);
|
try {
|
||||||
|
return localStorage.getItem(key);
|
||||||
|
}
|
||||||
|
catch (e) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
function swrite(key, val) {
|
function swrite(key, val) {
|
||||||
if (val === undefined || val === null)
|
try {
|
||||||
localStorage.removeItem(key);
|
if (val === undefined || val === null)
|
||||||
else
|
localStorage.removeItem(key);
|
||||||
localStorage.setItem(key, val);
|
else
|
||||||
|
localStorage.setItem(key, val);
|
||||||
|
}
|
||||||
|
catch (e) { }
|
||||||
}
|
}
|
||||||
|
|
||||||
function jread(key, fb) {
|
function jread(key, fb) {
|
||||||
@@ -620,6 +666,24 @@ function bcfg_upd_ui(name, val) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function bcfg_bind(obj, oname, cname, defval, cb, un_ev) {
|
||||||
|
var v = bcfg_get(cname, defval),
|
||||||
|
el = ebi(cname);
|
||||||
|
|
||||||
|
obj[oname] = v;
|
||||||
|
if (el)
|
||||||
|
el.onclick = function (e) {
|
||||||
|
if (un_ev !== false)
|
||||||
|
ev(e);
|
||||||
|
|
||||||
|
obj[oname] = bcfg_set(cname, !obj[oname]);
|
||||||
|
if (cb)
|
||||||
|
cb(obj[oname]);
|
||||||
|
};
|
||||||
|
|
||||||
|
return v;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
function hist_push(url) {
|
function hist_push(url) {
|
||||||
console.log("h-push " + url);
|
console.log("h-push " + url);
|
||||||
@@ -679,6 +743,14 @@ var tt = (function () {
|
|||||||
r.tt.setAttribute('id', 'tt');
|
r.tt.setAttribute('id', 'tt');
|
||||||
document.body.appendChild(r.tt);
|
document.body.appendChild(r.tt);
|
||||||
|
|
||||||
|
var prev = null;
|
||||||
|
r.cshow = function () {
|
||||||
|
if (this !== prev)
|
||||||
|
r.show.bind(this)();
|
||||||
|
|
||||||
|
prev = this;
|
||||||
|
};
|
||||||
|
|
||||||
r.show = function () {
|
r.show = function () {
|
||||||
if (r.skip) {
|
if (r.skip) {
|
||||||
r.skip = false;
|
r.skip = false;
|
||||||
@@ -732,6 +804,7 @@ var tt = (function () {
|
|||||||
ev(e);
|
ev(e);
|
||||||
window.removeEventListener('scroll', r.hide);
|
window.removeEventListener('scroll', r.hide);
|
||||||
clmod(r.tt, 'show');
|
clmod(r.tt, 'show');
|
||||||
|
clmod(r.tt, 'b');
|
||||||
if (r.el)
|
if (r.el)
|
||||||
r.el.removeEventListener('mouseleave', r.hide);
|
r.el.removeEventListener('mouseleave', r.hide);
|
||||||
};
|
};
|
||||||
@@ -761,12 +834,13 @@ var tt = (function () {
|
|||||||
r.tt.onclick = r.hide;
|
r.tt.onclick = r.hide;
|
||||||
|
|
||||||
r.att = function (ctr) {
|
r.att = function (ctr) {
|
||||||
var _show = r.en ? r.show : null,
|
var _cshow = r.en ? r.cshow : null,
|
||||||
|
_show = r.en ? r.show : null,
|
||||||
_hide = r.en ? r.hide : null,
|
_hide = r.en ? r.hide : null,
|
||||||
o = ctr.querySelectorAll('*[tt]');
|
o = ctr.querySelectorAll('*[tt]');
|
||||||
|
|
||||||
for (var a = o.length - 1; a >= 0; a--) {
|
for (var a = o.length - 1; a >= 0; a--) {
|
||||||
o[a].onfocus = _show;
|
o[a].onfocus = _cshow;
|
||||||
o[a].onblur = _hide;
|
o[a].onblur = _hide;
|
||||||
o[a].onmouseenter = _show;
|
o[a].onmouseenter = _show;
|
||||||
o[a].onmouseleave = _hide;
|
o[a].onmouseleave = _hide;
|
||||||
@@ -847,6 +921,9 @@ var toast = (function () {
|
|||||||
if (sec)
|
if (sec)
|
||||||
te = setTimeout(r.hide, sec * 1000);
|
te = setTimeout(r.hide, sec * 1000);
|
||||||
|
|
||||||
|
if (txt.indexOf('<body>') + 1)
|
||||||
|
txt = txt.slice(0, txt.indexOf('<')) + ' [...]';
|
||||||
|
|
||||||
obj.innerHTML = '<a href="#" id="toastc">x</a><div id="toastb">' + lf2br(txt) + '</div>';
|
obj.innerHTML = '<a href="#" id="toastc">x</a><div id="toastb">' + lf2br(txt) + '</div>';
|
||||||
obj.className = cl;
|
obj.className = cl;
|
||||||
sec += obj.offsetWidth;
|
sec += obj.offsetWidth;
|
||||||
@@ -988,7 +1065,7 @@ var modal = (function () {
|
|||||||
}
|
}
|
||||||
function _confirm(html, cok, cng, fun) {
|
function _confirm(html, cok, cng, fun) {
|
||||||
cb_ok = cok;
|
cb_ok = cok;
|
||||||
cb_ng = cng === undefined ? cok : null;
|
cb_ng = cng === undefined ? cok : cng;
|
||||||
cb_up = fun;
|
cb_up = fun;
|
||||||
html += '<div id="modalb">' + ok_cancel + '</div>';
|
html += '<div id="modalb">' + ok_cancel + '</div>';
|
||||||
r.show(html);
|
r.show(html);
|
||||||
|
|||||||
@@ -3,6 +3,24 @@
|
|||||||
setTimeout(location.reload.bind(location), 700);
|
setTimeout(location.reload.bind(location), 700);
|
||||||
document.documentElement.scrollLeft = 0;
|
document.documentElement.scrollLeft = 0;
|
||||||
|
|
||||||
|
var cali = (function() {
|
||||||
|
var ac = new AudioContext(),
|
||||||
|
fi = ac.createBiquadFilter(),
|
||||||
|
freqs = new Float32Array(1),
|
||||||
|
mag = new Float32Array(1),
|
||||||
|
phase = new Float32Array(1);
|
||||||
|
|
||||||
|
freqs[0] = 14000;
|
||||||
|
fi.type = 'peaking';
|
||||||
|
fi.frequency.value = 18000;
|
||||||
|
fi.Q.value = 0.8;
|
||||||
|
fi.gain.value = 1;
|
||||||
|
fi.getFrequencyResponse(freqs, mag, phase);
|
||||||
|
|
||||||
|
return mag[0]; // 1.0407 good, 1.0563 bad
|
||||||
|
})(),
|
||||||
|
mp = cali < 1.05;
|
||||||
|
|
||||||
var can = document.createElement('canvas'),
|
var can = document.createElement('canvas'),
|
||||||
cc = can.getContext('2d'),
|
cc = can.getContext('2d'),
|
||||||
w = 2048,
|
w = 2048,
|
||||||
@@ -28,12 +46,12 @@ var cfg = [ // hz, q, g
|
|||||||
[1000, 0.9, 1.1],
|
[1000, 0.9, 1.1],
|
||||||
[2000, 0.9, 1.105],
|
[2000, 0.9, 1.105],
|
||||||
[4000, 0.88, 1.05],
|
[4000, 0.88, 1.05],
|
||||||
[8000 * 1.006, 0.73, 1.24],
|
[8000 * 1.006, 0.73, mp ? 1.24 : 1.2],
|
||||||
//[16000 * 1.00, 0.5, 1.75], // peak.v1
|
//[16000 * 1.00, 0.5, 1.75], // peak.v1
|
||||||
//[16000 * 1.19, 0, 1.8] // shelf.v1
|
//[16000 * 1.19, 0, 1.8] // shelf.v1
|
||||||
[16000 * 0.89, 0.7, 1.26], // peak
|
[16000 * 0.89, 0.7, mp ? 1.26 : 1.2], // peak
|
||||||
[16000 * 1.13, 0.82, 1.09], // peak
|
[16000 * 1.13, 0.82, mp ? 1.09 : 0.75], // peak
|
||||||
[16000 * 1.205, 0, 1.9] // shelf
|
[16000 * 1.205, 0, mp ? 1.9 : 1.85] // shelf
|
||||||
];
|
];
|
||||||
|
|
||||||
var freqs = new Float32Array(22000),
|
var freqs = new Float32Array(22000),
|
||||||
|
|||||||
@@ -17,6 +17,7 @@ html.light {
|
|||||||
html.light #files th {
|
html.light #files th {
|
||||||
background: rgba(255, 255, 255, 0.9) !important;
|
background: rgba(255, 255, 255, 0.9) !important;
|
||||||
}
|
}
|
||||||
|
html.light .logue,
|
||||||
html.light #ops,
|
html.light #ops,
|
||||||
html.light #treeul,
|
html.light #treeul,
|
||||||
html.light #files td {
|
html.light #files td {
|
||||||
|
|||||||
@@ -47,5 +47,5 @@ c e2d
|
|||||||
c nodupe
|
c nodupe
|
||||||
|
|
||||||
# this entire config file can be replaced with these arguments:
|
# this entire config file can be replaced with these arguments:
|
||||||
# -u ed:123 -u k:k -v .::r:a,ed -v priv:priv:r,k:rw,ed -v /home/ed/Music:music:r -v /home/ed/inc:dump:w:c,e2d:c,nodupe
|
# -u ed:123 -u k:k -v .::r:a,ed -v priv:priv:r,k:rw,ed -v /home/ed/Music:music:r -v /home/ed/inc:dump:w:c,e2d,nodupe
|
||||||
# but note that the config file always wins in case of conflicts
|
# but note that the config file always wins in case of conflicts
|
||||||
|
|||||||
@@ -11,6 +11,8 @@
|
|||||||
|
|
||||||
#u2cleanup, #u2conf tr:first-child>td[rowspan]:not(#u2btn_cw), /* most of the config options */
|
#u2cleanup, #u2conf tr:first-child>td[rowspan]:not(#u2btn_cw), /* most of the config options */
|
||||||
|
|
||||||
|
#srch_dz, #srch_zd, /* the filesearch dropzone */
|
||||||
|
|
||||||
#u2cards, #u2etaw /* and the upload progress tabs */
|
#u2cards, #u2etaw /* and the upload progress tabs */
|
||||||
|
|
||||||
{display: none !important} /* do it! */
|
{display: none !important} /* do it! */
|
||||||
|
|||||||
@@ -41,9 +41,9 @@ avg() { awk 'function pr(ncsz) {if (nsmp>0) {printf "%3s %s\n", csz, sum/nsmp} c
|
|||||||
##
|
##
|
||||||
## bad filenames
|
## bad filenames
|
||||||
|
|
||||||
dirs=("$HOME/vfs/ほげ" "$HOME/vfs/ほげ/ぴよ" "$HOME/vfs/$(printf \\xed\\x91)" "$HOME/vfs/$(printf \\xed\\x91/\\xed\\x92)")
|
dirs=("./ほげ" "./ほげ/ぴよ" "./$(printf \\xed\\x91)" "./$(printf \\xed\\x91/\\xed\\x92)" './qw,er;ty%20as df?gh+jkl%zxc&vbn <qwe>"rty'"'"'uio&asd fgh')
|
||||||
mkdir -p "${dirs[@]}"
|
mkdir -p "${dirs[@]}"
|
||||||
for dir in "${dirs[@]}"; do for fn in ふが "$(printf \\xed\\x93)" 'qwe,rty;asd fgh+jkl%zxc&vbn <qwe>"rty'"'"'uio&asd fgh'; do echo "$dir" > "$dir/$fn.html"; done; done
|
for dir in "${dirs[@]}"; do for fn in ふが "$(printf \\xed\\x93)" 'qw,er;ty%20as df?gh+jkl%zxc&vbn <qwe>"rty'"'"'uio&asd fgh'; do echo "$dir" > "$dir/$fn.html"; done; done
|
||||||
# qw er+ty%20ui%%20op<as>df&gh&jk#zx'cv"bn`m=qw*er^ty?ui@op,as.df-gh_jk
|
# qw er+ty%20ui%%20op<as>df&gh&jk#zx'cv"bn`m=qw*er^ty?ui@op,as.df-gh_jk
|
||||||
|
|
||||||
##
|
##
|
||||||
@@ -79,10 +79,8 @@ command -v gdate && date() { gdate "$@"; }; while true; do t=$(date +%s.%N); (ti
|
|||||||
# get all up2k search result URLs
|
# get all up2k search result URLs
|
||||||
var t=[]; var b=document.location.href.split('#')[0].slice(0, -1); document.querySelectorAll('#u2tab .prog a').forEach((x) => {t.push(b+encodeURI(x.getAttribute("href")))}); console.log(t.join("\n"));
|
var t=[]; var b=document.location.href.split('#')[0].slice(0, -1); document.querySelectorAll('#u2tab .prog a').forEach((x) => {t.push(b+encodeURI(x.getAttribute("href")))}); console.log(t.join("\n"));
|
||||||
|
|
||||||
# rename all selected songs to <leading-track-number> + <Title> + <extension>
|
# debug md-editor line tracking
|
||||||
var sel=msel.getsel(), ci=find_file_col('Title')[0], re=[]; for (var a=0; a<sel.length; a++) { var url=sel[a].vp, tag=ebi(sel[a].id).closest('tr').querySelectorAll('td')[ci].textContent, name=uricom_dec(vsplit(url)[1])[0], m=/^([0-9]+[\. -]+)?.*(\.[^\.]+$)/.exec(name), name2=(m[1]||'')+tag+m[2], url2=vsplit(url)[0]+uricom_enc(name2,false); if (url!=url2) re.push([url, url2]); }
|
var s=mknod('style');s.innerHTML='*[data-ln]:before {content:attr(data-ln)!important;color:#f0c;background:#000;position:absolute;left:-1.5em;font-size:1rem}';document.head.appendChild(s);
|
||||||
console.log(JSON.stringify(re, null, ' '));
|
|
||||||
function f() { if (!re.length) return treectl.goto(get_evpath()); var [u1,u2] = re.shift(); fetch(u1+'?move='+u2).then((rsp) => {if (rsp.ok) f(); }); }; f();
|
|
||||||
|
|
||||||
##
|
##
|
||||||
## bash oneliners
|
## bash oneliners
|
||||||
@@ -164,7 +162,7 @@ brew install python@2
|
|||||||
pip install virtualenv
|
pip install virtualenv
|
||||||
|
|
||||||
# readme toc
|
# readme toc
|
||||||
cat README.md | awk 'function pr() { if (!h) {return}; if (/^ *[*!#]/||!s) {printf "%s\n",h;h=0;return}; if (/.../) {printf "%s - %s\n",h,$0;h=0}; }; /^#/{s=1;pr()} /^#* *(file indexing|install on android|dev env setup|just the sfx|complete release|optional gpl stuff)|`$/{s=0} /^#/{lv=length($1);sub(/[^ ]+ /,"");bab=$0;gsub(/ /,"-",bab); h=sprintf("%" ((lv-1)*4+1) "s [%s](#%s)", "*",$0,bab);next} !h{next} {sub(/ .*/,"");sub(/[:,]$/,"")} {pr()}' > toc; grep -E '^## readme toc' -B1000 -A2 <README.md >p1; grep -E '^## quickstart' -B2 -A999999 <README.md >p2; (cat p1; grep quickstart -A1000 <toc; cat p2) >README.md
|
cat README.md | awk 'function pr() { if (!h) {return}; if (/^ *[*!#]/||!s) {printf "%s\n",h;h=0;return}; if (/.../) {printf "%s - %s\n",h,$0;h=0}; }; /^#/{s=1;pr()} /^#* *(file indexing|install on android|dev env setup|just the sfx|complete release|optional gpl stuff)|`$/{s=0} /^#/{lv=length($1);sub(/[^ ]+ /,"");bab=$0;gsub(/ /,"-",bab); h=sprintf("%" ((lv-1)*4+1) "s [%s](#%s)", "*",$0,bab);next} !h{next} {sub(/ .*/,"");sub(/[:,]$/,"")} {pr()}' > toc; grep -E '^## readme toc' -B1000 -A2 <README.md >p1; grep -E '^## quickstart' -B2 -A999999 <README.md >p2; (cat p1; grep quickstart -A1000 <toc; cat p2) >README.md; rm p1 p2 toc
|
||||||
|
|
||||||
# fix firefox phantom breakpoints,
|
# fix firefox phantom breakpoints,
|
||||||
# suggestions from bugtracker, doesnt work (debugger is not attachable)
|
# suggestions from bugtracker, doesnt work (debugger is not attachable)
|
||||||
|
|||||||
@@ -1,11 +1,11 @@
|
|||||||
FROM alpine:3.13
|
FROM alpine:3.14
|
||||||
WORKDIR /z
|
WORKDIR /z
|
||||||
ENV ver_asmcrypto=5b994303a9d3e27e0915f72a10b6c2c51535a4dc \
|
ENV ver_asmcrypto=5b994303a9d3e27e0915f72a10b6c2c51535a4dc \
|
||||||
ver_hashwasm=4.7.0 \
|
ver_hashwasm=4.9.0 \
|
||||||
ver_marked=1.1.0 \
|
ver_marked=3.0.4 \
|
||||||
ver_ogvjs=1.8.4 \
|
ver_ogvjs=1.8.4 \
|
||||||
ver_mde=2.14.0 \
|
ver_mde=2.15.0 \
|
||||||
ver_codemirror=5.59.3 \
|
ver_codemirror=5.62.3 \
|
||||||
ver_fontawesome=5.13.0 \
|
ver_fontawesome=5.13.0 \
|
||||||
ver_zopfli=1.0.3
|
ver_zopfli=1.0.3
|
||||||
|
|
||||||
@@ -113,9 +113,10 @@ RUN cd CodeMirror-$ver_codemirror \
|
|||||||
COPY easymde.patch /z/
|
COPY easymde.patch /z/
|
||||||
RUN cd easy-markdown-editor-$ver_mde \
|
RUN cd easy-markdown-editor-$ver_mde \
|
||||||
&& patch -p1 < /z/easymde.patch \
|
&& patch -p1 < /z/easymde.patch \
|
||||||
&& sed -ri 's`https://registry.npmjs.org/marked/-/marked-0.8.2.tgz`file:/z/nodepkgs/marked`' package-lock.json \
|
&& sed -ri 's`https://registry.npmjs.org/marked/-/marked-[0-9\.]+.tgz`file:/z/nodepkgs/marked`' package-lock.json \
|
||||||
&& sed -ri 's`("marked": ")[^"]+`\1file:/z/nodepkgs/marked`' ./package.json \
|
&& sed -ri 's`("marked": ")[^"]+`\1file:/z/nodepkgs/marked`' ./package.json \
|
||||||
&& sed -ri 's`("codemirror": ")[^"]+`\1file:/z/nodepkgs/codemirror`' ./package.json \
|
&& sed -ri 's`("codemirror": ")[^"]+`\1file:/z/nodepkgs/codemirror`' ./package.json \
|
||||||
|
&& sed -ri 's`^var marked = require\(.marked/lib/marked.\);$`var marked = window.marked;`' src/js/easymde.js \
|
||||||
&& npm install
|
&& npm install
|
||||||
|
|
||||||
COPY easymde-ln.patch /z/
|
COPY easymde-ln.patch /z/
|
||||||
|
|||||||
@@ -1,15 +1,15 @@
|
|||||||
diff --git a/src/Lexer.js b/src/Lexer.js
|
diff --git a/src/Lexer.js b/src/Lexer.js
|
||||||
adds linetracking to marked.js v1.0.0 +git;
|
adds linetracking to marked.js v3.0.4;
|
||||||
add data-ln="%d" to most tags, %d is the source markdown line
|
add data-ln="%d" to most tags, %d is the source markdown line
|
||||||
--- a/src/Lexer.js
|
--- a/src/Lexer.js
|
||||||
+++ b/src/Lexer.js
|
+++ b/src/Lexer.js
|
||||||
@@ -49,4 +49,5 @@ function mangle(text) {
|
@@ -50,4 +50,5 @@ function mangle(text) {
|
||||||
module.exports = class Lexer {
|
module.exports = class Lexer {
|
||||||
constructor(options) {
|
constructor(options) {
|
||||||
+ this.ln = 1; // like most editors, start couting from 1
|
+ this.ln = 1; // like most editors, start couting from 1
|
||||||
this.tokens = [];
|
this.tokens = [];
|
||||||
this.tokens.links = Object.create(null);
|
this.tokens.links = Object.create(null);
|
||||||
@@ -108,4 +109,15 @@ module.exports = class Lexer {
|
@@ -127,4 +128,15 @@ module.exports = class Lexer {
|
||||||
}
|
}
|
||||||
|
|
||||||
+ set_ln(token, ln = this.ln) {
|
+ set_ln(token, ln = this.ln) {
|
||||||
@@ -25,122 +25,123 @@ add data-ln="%d" to most tags, %d is the source markdown line
|
|||||||
+
|
+
|
||||||
/**
|
/**
|
||||||
* Lexing
|
* Lexing
|
||||||
@@ -113,10 +125,15 @@ module.exports = class Lexer {
|
@@ -134,7 +146,11 @@ module.exports = class Lexer {
|
||||||
blockTokens(src, tokens = [], top = true) {
|
src = src.replace(/^ +$/gm, '');
|
||||||
src = src.replace(/^ +$/gm, '');
|
}
|
||||||
- let token, i, l, lastToken;
|
- let token, lastToken, cutSrc, lastParagraphClipped;
|
||||||
+ let token, i, l, lastToken, ln;
|
+ let token, lastToken, cutSrc, lastParagraphClipped, ln;
|
||||||
|
|
||||||
while (src) {
|
while (src) {
|
||||||
+ // this.ln will be bumped by recursive calls into this func;
|
+ // this.ln will be bumped by recursive calls into this func;
|
||||||
+ // reset the count and rely on the outermost token's raw only
|
+ // reset the count and rely on the outermost token's raw only
|
||||||
+ ln = this.ln;
|
+ ln = this.ln;
|
||||||
+
|
+
|
||||||
// newline
|
if (this.options.extensions
|
||||||
|
&& this.options.extensions.block
|
||||||
|
@@ -142,4 +158,5 @@ module.exports = class Lexer {
|
||||||
|
if (token = extTokenizer.call({ lexer: this }, src, tokens)) {
|
||||||
|
src = src.substring(token.raw.length);
|
||||||
|
+ this.set_ln(token, ln);
|
||||||
|
tokens.push(token);
|
||||||
|
return true;
|
||||||
|
@@ -153,4 +170,5 @@ module.exports = class Lexer {
|
||||||
if (token = this.tokenizer.space(src)) {
|
if (token = this.tokenizer.space(src)) {
|
||||||
src = src.substring(token.raw.length);
|
src = src.substring(token.raw.length);
|
||||||
+ this.set_ln(token); // is \n if not type
|
+ this.set_ln(token, ln); // is \n if not type
|
||||||
if (token.type) {
|
if (token.type) {
|
||||||
tokens.push(token);
|
tokens.push(token);
|
||||||
@@ -128,4 +145,5 @@ module.exports = class Lexer {
|
@@ -162,4 +180,5 @@ module.exports = class Lexer {
|
||||||
if (token = this.tokenizer.code(src, tokens)) {
|
if (token = this.tokenizer.code(src)) {
|
||||||
src = src.substring(token.raw.length);
|
src = src.substring(token.raw.length);
|
||||||
+ this.set_ln(token);
|
+ this.set_ln(token, ln);
|
||||||
if (token.type) {
|
lastToken = tokens[tokens.length - 1];
|
||||||
tokens.push(token);
|
// An indented code block cannot interrupt a paragraph.
|
||||||
@@ -141,4 +159,5 @@ module.exports = class Lexer {
|
@@ -177,4 +196,5 @@ module.exports = class Lexer {
|
||||||
if (token = this.tokenizer.fences(src)) {
|
if (token = this.tokenizer.fences(src)) {
|
||||||
src = src.substring(token.raw.length);
|
src = src.substring(token.raw.length);
|
||||||
+ this.set_ln(token);
|
+ this.set_ln(token, ln);
|
||||||
tokens.push(token);
|
tokens.push(token);
|
||||||
continue;
|
continue;
|
||||||
@@ -148,4 +167,5 @@ module.exports = class Lexer {
|
@@ -184,4 +204,5 @@ module.exports = class Lexer {
|
||||||
if (token = this.tokenizer.heading(src)) {
|
if (token = this.tokenizer.heading(src)) {
|
||||||
src = src.substring(token.raw.length);
|
src = src.substring(token.raw.length);
|
||||||
+ this.set_ln(token);
|
+ this.set_ln(token, ln);
|
||||||
tokens.push(token);
|
tokens.push(token);
|
||||||
continue;
|
continue;
|
||||||
@@ -155,4 +175,5 @@ module.exports = class Lexer {
|
@@ -191,4 +212,5 @@ module.exports = class Lexer {
|
||||||
if (token = this.tokenizer.nptable(src)) {
|
|
||||||
src = src.substring(token.raw.length);
|
|
||||||
+ this.set_ln(token);
|
|
||||||
tokens.push(token);
|
|
||||||
continue;
|
|
||||||
@@ -162,4 +183,5 @@ module.exports = class Lexer {
|
|
||||||
if (token = this.tokenizer.hr(src)) {
|
if (token = this.tokenizer.hr(src)) {
|
||||||
src = src.substring(token.raw.length);
|
src = src.substring(token.raw.length);
|
||||||
+ this.set_ln(token);
|
+ this.set_ln(token, ln);
|
||||||
tokens.push(token);
|
tokens.push(token);
|
||||||
continue;
|
continue;
|
||||||
@@ -170,4 +192,7 @@ module.exports = class Lexer {
|
@@ -198,4 +220,5 @@ module.exports = class Lexer {
|
||||||
|
if (token = this.tokenizer.blockquote(src)) {
|
||||||
src = src.substring(token.raw.length);
|
src = src.substring(token.raw.length);
|
||||||
token.tokens = this.blockTokens(token.text, [], top);
|
|
||||||
+ // recursive call to blockTokens probably bumped this.ln,
|
|
||||||
+ // token.raw is more reliable so reset this.ln and use that
|
|
||||||
+ this.set_ln(token, ln);
|
+ this.set_ln(token, ln);
|
||||||
tokens.push(token);
|
tokens.push(token);
|
||||||
continue;
|
continue;
|
||||||
@@ -180,5 +205,9 @@ module.exports = class Lexer {
|
@@ -205,4 +228,5 @@ module.exports = class Lexer {
|
||||||
for (i = 0; i < l; i++) {
|
if (token = this.tokenizer.list(src)) {
|
||||||
token.items[i].tokens = this.blockTokens(token.items[i].text, [], false);
|
src = src.substring(token.raw.length);
|
||||||
+ // list entries don't bump the linecounter, so let's
|
|
||||||
+ this.ln++;
|
|
||||||
}
|
|
||||||
+ // then reset like blockquote
|
|
||||||
+ this.set_ln(token, ln);
|
+ this.set_ln(token, ln);
|
||||||
tokens.push(token);
|
tokens.push(token);
|
||||||
continue;
|
continue;
|
||||||
@@ -188,4 +217,5 @@ module.exports = class Lexer {
|
@@ -212,4 +236,5 @@ module.exports = class Lexer {
|
||||||
if (token = this.tokenizer.html(src)) {
|
if (token = this.tokenizer.html(src)) {
|
||||||
src = src.substring(token.raw.length);
|
src = src.substring(token.raw.length);
|
||||||
+ this.set_ln(token);
|
+ this.set_ln(token, ln);
|
||||||
tokens.push(token);
|
tokens.push(token);
|
||||||
continue;
|
continue;
|
||||||
@@ -195,4 +225,5 @@ module.exports = class Lexer {
|
@@ -219,4 +244,5 @@ module.exports = class Lexer {
|
||||||
if (top && (token = this.tokenizer.def(src))) {
|
if (token = this.tokenizer.def(src)) {
|
||||||
src = src.substring(token.raw.length);
|
src = src.substring(token.raw.length);
|
||||||
+ this.set_ln(token);
|
+ this.set_ln(token, ln);
|
||||||
if (!this.tokens.links[token.tag]) {
|
lastToken = tokens[tokens.length - 1];
|
||||||
this.tokens.links[token.tag] = {
|
if (lastToken && (lastToken.type === 'paragraph' || lastToken.type === 'text')) {
|
||||||
@@ -207,4 +238,5 @@ module.exports = class Lexer {
|
@@ -236,4 +262,5 @@ module.exports = class Lexer {
|
||||||
if (token = this.tokenizer.table(src)) {
|
if (token = this.tokenizer.table(src)) {
|
||||||
src = src.substring(token.raw.length);
|
src = src.substring(token.raw.length);
|
||||||
+ this.set_ln(token);
|
+ this.set_ln(token, ln);
|
||||||
tokens.push(token);
|
tokens.push(token);
|
||||||
continue;
|
continue;
|
||||||
@@ -214,4 +246,5 @@ module.exports = class Lexer {
|
@@ -243,4 +270,5 @@ module.exports = class Lexer {
|
||||||
if (token = this.tokenizer.lheading(src)) {
|
if (token = this.tokenizer.lheading(src)) {
|
||||||
src = src.substring(token.raw.length);
|
src = src.substring(token.raw.length);
|
||||||
+ this.set_ln(token);
|
+ this.set_ln(token, ln);
|
||||||
tokens.push(token);
|
tokens.push(token);
|
||||||
continue;
|
continue;
|
||||||
@@ -221,4 +254,5 @@ module.exports = class Lexer {
|
@@ -263,4 +291,5 @@ module.exports = class Lexer {
|
||||||
if (top && (token = this.tokenizer.paragraph(src))) {
|
}
|
||||||
|
if (this.state.top && (token = this.tokenizer.paragraph(cutSrc))) {
|
||||||
|
+ this.set_ln(token, ln);
|
||||||
|
lastToken = tokens[tokens.length - 1];
|
||||||
|
if (lastParagraphClipped && lastToken.type === 'paragraph') {
|
||||||
|
@@ -280,4 +309,6 @@ module.exports = class Lexer {
|
||||||
|
if (token = this.tokenizer.text(src)) {
|
||||||
src = src.substring(token.raw.length);
|
src = src.substring(token.raw.length);
|
||||||
+ this.set_ln(token);
|
+ this.set_ln(token, ln);
|
||||||
tokens.push(token);
|
+ this.ln++;
|
||||||
continue;
|
lastToken = tokens[tokens.length - 1];
|
||||||
@@ -228,4 +262,5 @@ module.exports = class Lexer {
|
if (lastToken && lastToken.type === 'text') {
|
||||||
if (token = this.tokenizer.text(src, tokens)) {
|
@@ -355,4 +386,5 @@ module.exports = class Lexer {
|
||||||
src = src.substring(token.raw.length);
|
if (token = extTokenizer.call({ lexer: this }, src, tokens)) {
|
||||||
+ this.set_ln(token);
|
src = src.substring(token.raw.length);
|
||||||
if (token.type) {
|
+ this.ln = token.ln || this.ln;
|
||||||
tokens.push(token);
|
tokens.push(token);
|
||||||
@@ -263,4 +298,7 @@ module.exports = class Lexer {
|
return true;
|
||||||
for (i = 0; i < l; i++) {
|
@@ -420,4 +452,6 @@ module.exports = class Lexer {
|
||||||
token = tokens[i];
|
|
||||||
+ // this.ln is at EOF when inline() is invoked;
|
|
||||||
+ // all this affects <br> tags only so no biggie if it breaks
|
|
||||||
+ this.ln = token.ln || this.ln;
|
|
||||||
switch (token.type) {
|
|
||||||
case 'paragraph':
|
|
||||||
@@ -386,4 +424,6 @@ module.exports = class Lexer {
|
|
||||||
if (token = this.tokenizer.br(src)) {
|
if (token = this.tokenizer.br(src)) {
|
||||||
src = src.substring(token.raw.length);
|
src = src.substring(token.raw.length);
|
||||||
+ // no need to reset (no more blockTokens anyways)
|
+ // no need to reset (no more blockTokens anyways)
|
||||||
+ token.ln = this.ln++;
|
+ token.ln = this.ln++;
|
||||||
tokens.push(token);
|
tokens.push(token);
|
||||||
continue;
|
continue;
|
||||||
|
@@ -462,4 +496,5 @@ module.exports = class Lexer {
|
||||||
|
if (token = this.tokenizer.inlineText(cutSrc, smartypants)) {
|
||||||
|
src = src.substring(token.raw.length);
|
||||||
|
+ this.ln = token.ln || this.ln;
|
||||||
|
if (token.raw.slice(-1) !== '_') { // Track prevChar before string of ____ started
|
||||||
|
prevChar = token.raw.slice(-1);
|
||||||
diff --git a/src/Parser.js b/src/Parser.js
|
diff --git a/src/Parser.js b/src/Parser.js
|
||||||
--- a/src/Parser.js
|
--- a/src/Parser.js
|
||||||
+++ b/src/Parser.js
|
+++ b/src/Parser.js
|
||||||
@@ -150,17 +151,16 @@ diff --git a/src/Parser.js b/src/Parser.js
|
|||||||
+ this.ln = 0; // error indicator; should always be set >=1 from tokens
|
+ this.ln = 0; // error indicator; should always be set >=1 from tokens
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -55,4 +56,9 @@ module.exports = class Parser {
|
@@ -64,4 +65,8 @@ module.exports = class Parser {
|
||||||
for (i = 0; i < l; i++) {
|
for (i = 0; i < l; i++) {
|
||||||
token = tokens[i];
|
token = tokens[i];
|
||||||
+ // take line-numbers from tokens whenever possible
|
+ // take line-numbers from tokens whenever possible
|
||||||
+ // and update the renderer's html attribute with the new value
|
+ // and update the renderer's html attribute with the new value
|
||||||
+ this.ln = token.ln || this.ln;
|
+ this.ln = token.ln || this.ln;
|
||||||
+ this.renderer.tag_ln(this.ln);
|
+ this.renderer.tag_ln(this.ln);
|
||||||
+
|
|
||||||
switch (token.type) {
|
// Run any renderer extensions
|
||||||
case 'space': {
|
@@ -124,7 +129,10 @@ module.exports = class Parser {
|
||||||
@@ -105,7 +111,10 @@ module.exports = class Parser {
|
|
||||||
}
|
}
|
||||||
|
|
||||||
- body += this.renderer.tablerow(cell);
|
- body += this.renderer.tablerow(cell);
|
||||||
@@ -173,7 +173,7 @@ diff --git a/src/Parser.js b/src/Parser.js
|
|||||||
+ out += this.renderer.tag_ln(token.ln).table(header, body);
|
+ out += this.renderer.tag_ln(token.ln).table(header, body);
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
@@ -148,8 +157,12 @@ module.exports = class Parser {
|
@@ -167,8 +175,12 @@ module.exports = class Parser {
|
||||||
|
|
||||||
itemBody += this.parse(item.tokens, loose);
|
itemBody += this.parse(item.tokens, loose);
|
||||||
- body += this.renderer.listitem(itemBody, task, checked);
|
- body += this.renderer.listitem(itemBody, task, checked);
|
||||||
@@ -188,7 +188,7 @@ diff --git a/src/Parser.js b/src/Parser.js
|
|||||||
+ out += this.renderer.tag_ln(token.ln).list(body, ordered, start);
|
+ out += this.renderer.tag_ln(token.ln).list(body, ordered, start);
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
@@ -160,5 +173,6 @@ module.exports = class Parser {
|
@@ -179,5 +191,6 @@ module.exports = class Parser {
|
||||||
}
|
}
|
||||||
case 'paragraph': {
|
case 'paragraph': {
|
||||||
- out += this.renderer.paragraph(this.parseInline(token.tokens));
|
- out += this.renderer.paragraph(this.parseInline(token.tokens));
|
||||||
@@ -196,22 +196,14 @@ diff --git a/src/Parser.js b/src/Parser.js
|
|||||||
+ out += this.renderer.tag_ln(token.ln).paragraph(t);
|
+ out += this.renderer.tag_ln(token.ln).paragraph(t);
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
@@ -199,4 +213,6 @@ module.exports = class Parser {
|
@@ -221,4 +234,7 @@ module.exports = class Parser {
|
||||||
for (i = 0; i < l; i++) {
|
|
||||||
token = tokens[i];
|
token = tokens[i];
|
||||||
|
|
||||||
+ // another thing that only affects <br/> and other inlines
|
+ // another thing that only affects <br/> and other inlines
|
||||||
+ this.ln = token.ln || this.ln;
|
+ this.ln = token.ln || this.ln;
|
||||||
switch (token.type) {
|
+
|
||||||
case 'escape': {
|
// Run any renderer extensions
|
||||||
@@ -229,5 +245,7 @@ module.exports = class Parser {
|
if (this.options.extensions && this.options.extensions.renderers && this.options.extensions.renderers[token.type]) {
|
||||||
}
|
|
||||||
case 'br': {
|
|
||||||
- out += renderer.br();
|
|
||||||
+ // update the html attribute before writing each <br/>,
|
|
||||||
+ // don't care about the others
|
|
||||||
+ out += renderer.tag_ln(this.ln).br();
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
diff --git a/src/Renderer.js b/src/Renderer.js
|
diff --git a/src/Renderer.js b/src/Renderer.js
|
||||||
--- a/src/Renderer.js
|
--- a/src/Renderer.js
|
||||||
+++ b/src/Renderer.js
|
+++ b/src/Renderer.js
|
||||||
@@ -228,7 +220,7 @@ diff --git a/src/Renderer.js b/src/Renderer.js
|
|||||||
+
|
+
|
||||||
code(code, infostring, escaped) {
|
code(code, infostring, escaped) {
|
||||||
const lang = (infostring || '').match(/\S*/)[0];
|
const lang = (infostring || '').match(/\S*/)[0];
|
||||||
@@ -24,10 +30,10 @@ module.exports = class Renderer {
|
@@ -26,10 +32,10 @@ module.exports = class Renderer {
|
||||||
|
|
||||||
if (!lang) {
|
if (!lang) {
|
||||||
- return '<pre><code>'
|
- return '<pre><code>'
|
||||||
@@ -241,58 +233,69 @@ diff --git a/src/Renderer.js b/src/Renderer.js
|
|||||||
+ return '<pre' + this.ln + '><code class="'
|
+ return '<pre' + this.ln + '><code class="'
|
||||||
+ this.options.langPrefix
|
+ this.options.langPrefix
|
||||||
+ escape(lang, true)
|
+ escape(lang, true)
|
||||||
@@ -38,5 +44,5 @@ module.exports = class Renderer {
|
@@ -40,5 +46,5 @@ module.exports = class Renderer {
|
||||||
|
|
||||||
blockquote(quote) {
|
blockquote(quote) {
|
||||||
- return '<blockquote>\n' + quote + '</blockquote>\n';
|
- return '<blockquote>\n' + quote + '</blockquote>\n';
|
||||||
+ return '<blockquote' + this.ln + '>\n' + quote + '</blockquote>\n';
|
+ return '<blockquote' + this.ln + '>\n' + quote + '</blockquote>\n';
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -49,4 +55,5 @@ module.exports = class Renderer {
|
@@ -51,4 +57,5 @@ module.exports = class Renderer {
|
||||||
return '<h'
|
return '<h'
|
||||||
+ level
|
+ level
|
||||||
+ + this.ln
|
+ + this.ln
|
||||||
+ ' id="'
|
+ ' id="'
|
||||||
+ this.options.headerPrefix
|
+ this.options.headerPrefix
|
||||||
@@ -59,5 +66,5 @@ module.exports = class Renderer {
|
@@ -61,5 +68,5 @@ module.exports = class Renderer {
|
||||||
}
|
}
|
||||||
// ignore IDs
|
// ignore IDs
|
||||||
- return '<h' + level + '>' + text + '</h' + level + '>\n';
|
- return '<h' + level + '>' + text + '</h' + level + '>\n';
|
||||||
+ return '<h' + level + this.ln + '>' + text + '</h' + level + '>\n';
|
+ return '<h' + level + this.ln + '>' + text + '</h' + level + '>\n';
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -73,5 +80,5 @@ module.exports = class Renderer {
|
@@ -75,5 +82,5 @@ module.exports = class Renderer {
|
||||||
|
|
||||||
listitem(text) {
|
listitem(text) {
|
||||||
- return '<li>' + text + '</li>\n';
|
- return '<li>' + text + '</li>\n';
|
||||||
+ return '<li' + this.ln + '>' + text + '</li>\n';
|
+ return '<li' + this.ln + '>' + text + '</li>\n';
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -85,5 +92,5 @@ module.exports = class Renderer {
|
@@ -87,5 +94,5 @@ module.exports = class Renderer {
|
||||||
|
|
||||||
paragraph(text) {
|
paragraph(text) {
|
||||||
- return '<p>' + text + '</p>\n';
|
- return '<p>' + text + '</p>\n';
|
||||||
+ return '<p' + this.ln + '>' + text + '</p>\n';
|
+ return '<p' + this.ln + '>' + text + '</p>\n';
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -100,5 +107,5 @@ module.exports = class Renderer {
|
@@ -102,5 +109,5 @@ module.exports = class Renderer {
|
||||||
|
|
||||||
tablerow(content) {
|
tablerow(content) {
|
||||||
- return '<tr>\n' + content + '</tr>\n';
|
- return '<tr>\n' + content + '</tr>\n';
|
||||||
+ return '<tr' + this.ln + '>\n' + content + '</tr>\n';
|
+ return '<tr' + this.ln + '>\n' + content + '</tr>\n';
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -125,5 +132,5 @@ module.exports = class Renderer {
|
@@ -127,5 +134,5 @@ module.exports = class Renderer {
|
||||||
|
|
||||||
br() {
|
br() {
|
||||||
- return this.options.xhtml ? '<br/>' : '<br>';
|
- return this.options.xhtml ? '<br/>' : '<br>';
|
||||||
+ return this.options.xhtml ? '<br' + this.ln + '/>' : '<br' + this.ln + '>';
|
+ return this.options.xhtml ? '<br' + this.ln + '/>' : '<br' + this.ln + '>';
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -151,5 +158,5 @@ module.exports = class Renderer {
|
@@ -153,5 +160,5 @@ module.exports = class Renderer {
|
||||||
}
|
}
|
||||||
|
|
||||||
- let out = '<img src="' + href + '" alt="' + text + '"';
|
- let out = '<img src="' + href + '" alt="' + text + '"';
|
||||||
+ let out = '<img' + this.ln + ' src="' + href + '" alt="' + text + '"';
|
+ let out = '<img' + this.ln + ' src="' + href + '" alt="' + text + '"';
|
||||||
if (title) {
|
if (title) {
|
||||||
out += ' title="' + title + '"';
|
out += ' title="' + title + '"';
|
||||||
|
diff --git a/src/Tokenizer.js b/src/Tokenizer.js
|
||||||
|
--- a/src/Tokenizer.js
|
||||||
|
+++ b/src/Tokenizer.js
|
||||||
|
@@ -301,4 +301,7 @@ module.exports = class Tokenizer {
|
||||||
|
const l = list.items.length;
|
||||||
|
|
||||||
|
+ // each nested list gets +1 ahead; this hack makes every listgroup -1 but atleast it doesn't get infinitely bad
|
||||||
|
+ this.lexer.ln--;
|
||||||
|
+
|
||||||
|
// Item child tokens handled here at end because we needed to have the final item to trim it first
|
||||||
|
for (i = 0; i < l; i++) {
|
||||||
|
|||||||
@@ -1,52 +1,52 @@
|
|||||||
diff --git a/src/Lexer.js b/src/Lexer.js
|
diff --git a/src/Lexer.js b/src/Lexer.js
|
||||||
--- a/src/Lexer.js
|
--- a/src/Lexer.js
|
||||||
+++ b/src/Lexer.js
|
+++ b/src/Lexer.js
|
||||||
@@ -5,5 +5,5 @@ const { block, inline } = require('./rules.js');
|
@@ -6,5 +6,5 @@ const { repeatString } = require('./helpers.js');
|
||||||
/**
|
/**
|
||||||
* smartypants text replacement
|
* smartypants text replacement
|
||||||
- */
|
- */
|
||||||
+ *
|
+ *
|
||||||
function smartypants(text) {
|
function smartypants(text) {
|
||||||
return text
|
return text
|
||||||
@@ -26,5 +26,5 @@ function smartypants(text) {
|
@@ -27,5 +27,5 @@ function smartypants(text) {
|
||||||
/**
|
/**
|
||||||
* mangle email addresses
|
* mangle email addresses
|
||||||
- */
|
- */
|
||||||
+ *
|
+ *
|
||||||
function mangle(text) {
|
function mangle(text) {
|
||||||
let out = '',
|
let out = '',
|
||||||
@@ -439,5 +439,5 @@ module.exports = class Lexer {
|
@@ -465,5 +465,5 @@ module.exports = class Lexer {
|
||||||
|
|
||||||
// autolink
|
// autolink
|
||||||
- if (token = this.tokenizer.autolink(src, mangle)) {
|
- if (token = this.tokenizer.autolink(src, mangle)) {
|
||||||
+ if (token = this.tokenizer.autolink(src)) {
|
+ if (token = this.tokenizer.autolink(src)) {
|
||||||
src = src.substring(token.raw.length);
|
src = src.substring(token.raw.length);
|
||||||
tokens.push(token);
|
tokens.push(token);
|
||||||
@@ -446,5 +446,5 @@ module.exports = class Lexer {
|
@@ -472,5 +472,5 @@ module.exports = class Lexer {
|
||||||
|
|
||||||
// url (gfm)
|
// url (gfm)
|
||||||
- if (!inLink && (token = this.tokenizer.url(src, mangle))) {
|
- if (!this.state.inLink && (token = this.tokenizer.url(src, mangle))) {
|
||||||
+ if (!inLink && (token = this.tokenizer.url(src))) {
|
+ if (!this.state.inLink && (token = this.tokenizer.url(src))) {
|
||||||
src = src.substring(token.raw.length);
|
src = src.substring(token.raw.length);
|
||||||
tokens.push(token);
|
tokens.push(token);
|
||||||
@@ -453,5 +453,5 @@ module.exports = class Lexer {
|
@@ -493,5 +493,5 @@ module.exports = class Lexer {
|
||||||
|
}
|
||||||
// text
|
}
|
||||||
- if (token = this.tokenizer.inlineText(src, inRawBlock, smartypants)) {
|
- if (token = this.tokenizer.inlineText(cutSrc, smartypants)) {
|
||||||
+ if (token = this.tokenizer.inlineText(src, inRawBlock)) {
|
+ if (token = this.tokenizer.inlineText(cutSrc)) {
|
||||||
src = src.substring(token.raw.length);
|
src = src.substring(token.raw.length);
|
||||||
tokens.push(token);
|
this.ln = token.ln || this.ln;
|
||||||
diff --git a/src/Renderer.js b/src/Renderer.js
|
diff --git a/src/Renderer.js b/src/Renderer.js
|
||||||
--- a/src/Renderer.js
|
--- a/src/Renderer.js
|
||||||
+++ b/src/Renderer.js
|
+++ b/src/Renderer.js
|
||||||
@@ -140,5 +140,5 @@ module.exports = class Renderer {
|
@@ -142,5 +142,5 @@ module.exports = class Renderer {
|
||||||
|
|
||||||
link(href, title, text) {
|
link(href, title, text) {
|
||||||
- href = cleanUrl(this.options.sanitize, this.options.baseUrl, href);
|
- href = cleanUrl(this.options.sanitize, this.options.baseUrl, href);
|
||||||
+ href = cleanUrl(this.options.baseUrl, href);
|
+ href = cleanUrl(this.options.baseUrl, href);
|
||||||
if (href === null) {
|
if (href === null) {
|
||||||
return text;
|
return text;
|
||||||
@@ -153,5 +153,5 @@ module.exports = class Renderer {
|
@@ -155,5 +155,5 @@ module.exports = class Renderer {
|
||||||
|
|
||||||
image(href, title, text) {
|
image(href, title, text) {
|
||||||
- href = cleanUrl(this.options.sanitize, this.options.baseUrl, href);
|
- href = cleanUrl(this.options.sanitize, this.options.baseUrl, href);
|
||||||
@@ -56,22 +56,23 @@ diff --git a/src/Renderer.js b/src/Renderer.js
|
|||||||
diff --git a/src/Tokenizer.js b/src/Tokenizer.js
|
diff --git a/src/Tokenizer.js b/src/Tokenizer.js
|
||||||
--- a/src/Tokenizer.js
|
--- a/src/Tokenizer.js
|
||||||
+++ b/src/Tokenizer.js
|
+++ b/src/Tokenizer.js
|
||||||
@@ -287,11 +287,8 @@ module.exports = class Tokenizer {
|
@@ -321,14 +321,7 @@ module.exports = class Tokenizer {
|
||||||
if (cap) {
|
type: 'html',
|
||||||
return {
|
|
||||||
- type: this.options.sanitize
|
|
||||||
- ? 'paragraph'
|
|
||||||
- : 'html',
|
|
||||||
+ type: 'html',
|
|
||||||
raw: cap[0],
|
raw: cap[0],
|
||||||
- pre: !this.options.sanitizer
|
- pre: !this.options.sanitizer
|
||||||
- && (cap[1] === 'pre' || cap[1] === 'script' || cap[1] === 'style'),
|
- && (cap[1] === 'pre' || cap[1] === 'script' || cap[1] === 'style'),
|
||||||
- text: this.options.sanitize ? (this.options.sanitizer ? this.options.sanitizer(cap[0]) : escape(cap[0])) : cap[0]
|
+ pre: (cap[1] === 'pre' || cap[1] === 'script' || cap[1] === 'style'),
|
||||||
+ pre: cap[1] === 'pre' || cap[1] === 'script' || cap[1] === 'style',
|
text: cap[0]
|
||||||
+ text: cap[0]
|
|
||||||
};
|
};
|
||||||
|
- if (this.options.sanitize) {
|
||||||
|
- token.type = 'paragraph';
|
||||||
|
- token.text = this.options.sanitizer ? this.options.sanitizer(cap[0]) : escape(cap[0]);
|
||||||
|
- token.tokens = [];
|
||||||
|
- this.lexer.inline(token.text, token.tokens);
|
||||||
|
- }
|
||||||
|
return token;
|
||||||
}
|
}
|
||||||
@@ -421,15 +418,9 @@ module.exports = class Tokenizer {
|
@@ -477,15 +470,9 @@ module.exports = class Tokenizer {
|
||||||
|
|
||||||
return {
|
return {
|
||||||
- type: this.options.sanitize
|
- type: this.options.sanitize
|
||||||
@@ -79,8 +80,8 @@ diff --git a/src/Tokenizer.js b/src/Tokenizer.js
|
|||||||
- : 'html',
|
- : 'html',
|
||||||
+ type: 'html',
|
+ type: 'html',
|
||||||
raw: cap[0],
|
raw: cap[0],
|
||||||
inLink,
|
inLink: this.lexer.state.inLink,
|
||||||
inRawBlock,
|
inRawBlock: this.lexer.state.inRawBlock,
|
||||||
- text: this.options.sanitize
|
- text: this.options.sanitize
|
||||||
- ? (this.options.sanitizer
|
- ? (this.options.sanitizer
|
||||||
- ? this.options.sanitizer(cap[0])
|
- ? this.options.sanitizer(cap[0])
|
||||||
@@ -89,7 +90,7 @@ diff --git a/src/Tokenizer.js b/src/Tokenizer.js
|
|||||||
+ text: cap[0]
|
+ text: cap[0]
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
@@ -550,10 +541,10 @@ module.exports = class Tokenizer {
|
@@ -672,10 +659,10 @@ module.exports = class Tokenizer {
|
||||||
}
|
}
|
||||||
|
|
||||||
- autolink(src, mangle) {
|
- autolink(src, mangle) {
|
||||||
@@ -102,7 +103,7 @@ diff --git a/src/Tokenizer.js b/src/Tokenizer.js
|
|||||||
+ text = escape(cap[1]);
|
+ text = escape(cap[1]);
|
||||||
href = 'mailto:' + text;
|
href = 'mailto:' + text;
|
||||||
} else {
|
} else {
|
||||||
@@ -578,10 +569,10 @@ module.exports = class Tokenizer {
|
@@ -700,10 +687,10 @@ module.exports = class Tokenizer {
|
||||||
}
|
}
|
||||||
|
|
||||||
- url(src, mangle) {
|
- url(src, mangle) {
|
||||||
@@ -115,15 +116,15 @@ diff --git a/src/Tokenizer.js b/src/Tokenizer.js
|
|||||||
+ text = escape(cap[0]);
|
+ text = escape(cap[0]);
|
||||||
href = 'mailto:' + text;
|
href = 'mailto:' + text;
|
||||||
} else {
|
} else {
|
||||||
@@ -615,12 +606,12 @@ module.exports = class Tokenizer {
|
@@ -737,12 +724,12 @@ module.exports = class Tokenizer {
|
||||||
}
|
}
|
||||||
|
|
||||||
- inlineText(src, inRawBlock, smartypants) {
|
- inlineText(src, smartypants) {
|
||||||
+ inlineText(src, inRawBlock) {
|
+ inlineText(src) {
|
||||||
const cap = this.rules.inline.text.exec(src);
|
const cap = this.rules.inline.text.exec(src);
|
||||||
if (cap) {
|
if (cap) {
|
||||||
let text;
|
let text;
|
||||||
if (inRawBlock) {
|
if (this.lexer.state.inRawBlock) {
|
||||||
- text = this.options.sanitize ? (this.options.sanitizer ? this.options.sanitizer(cap[0]) : escape(cap[0])) : cap[0];
|
- text = this.options.sanitize ? (this.options.sanitizer ? this.options.sanitizer(cap[0]) : escape(cap[0])) : cap[0];
|
||||||
+ text = cap[0];
|
+ text = cap[0];
|
||||||
} else {
|
} else {
|
||||||
@@ -134,7 +135,7 @@ diff --git a/src/Tokenizer.js b/src/Tokenizer.js
|
|||||||
diff --git a/src/defaults.js b/src/defaults.js
|
diff --git a/src/defaults.js b/src/defaults.js
|
||||||
--- a/src/defaults.js
|
--- a/src/defaults.js
|
||||||
+++ b/src/defaults.js
|
+++ b/src/defaults.js
|
||||||
@@ -8,12 +8,8 @@ function getDefaults() {
|
@@ -9,12 +9,8 @@ function getDefaults() {
|
||||||
highlight: null,
|
highlight: null,
|
||||||
langPrefix: 'language-',
|
langPrefix: 'language-',
|
||||||
- mangle: true,
|
- mangle: true,
|
||||||
@@ -170,7 +171,7 @@ diff --git a/src/helpers.js b/src/helpers.js
|
|||||||
+function cleanUrl(base, href) {
|
+function cleanUrl(base, href) {
|
||||||
if (base && !originIndependentUrl.test(href)) {
|
if (base && !originIndependentUrl.test(href)) {
|
||||||
href = resolveUrl(base, href);
|
href = resolveUrl(base, href);
|
||||||
@@ -223,10 +210,4 @@ function findClosingBracket(str, b) {
|
@@ -227,10 +214,4 @@ function findClosingBracket(str, b) {
|
||||||
}
|
}
|
||||||
|
|
||||||
-function checkSanitizeDeprecation(opt) {
|
-function checkSanitizeDeprecation(opt) {
|
||||||
@@ -179,14 +180,13 @@ diff --git a/src/helpers.js b/src/helpers.js
|
|||||||
- }
|
- }
|
||||||
-}
|
-}
|
||||||
-
|
-
|
||||||
module.exports = {
|
// copied from https://stackoverflow.com/a/5450113/806777
|
||||||
escape,
|
function repeatString(pattern, count) {
|
||||||
@@ -239,5 +220,4 @@ module.exports = {
|
@@ -260,5 +241,4 @@ module.exports = {
|
||||||
splitCells,
|
|
||||||
rtrim,
|
rtrim,
|
||||||
- findClosingBracket,
|
findClosingBracket,
|
||||||
- checkSanitizeDeprecation
|
- checkSanitizeDeprecation,
|
||||||
+ findClosingBracket
|
repeatString
|
||||||
};
|
};
|
||||||
diff --git a/src/marked.js b/src/marked.js
|
diff --git a/src/marked.js b/src/marked.js
|
||||||
--- a/src/marked.js
|
--- a/src/marked.js
|
||||||
@@ -203,8 +203,14 @@ diff --git a/src/marked.js b/src/marked.js
|
|||||||
- checkSanitizeDeprecation(opt);
|
- checkSanitizeDeprecation(opt);
|
||||||
|
|
||||||
if (callback) {
|
if (callback) {
|
||||||
@@ -108,5 +106,5 @@ function marked(src, opt, callback) {
|
@@ -302,5 +300,4 @@ marked.parseInline = function(src, opt) {
|
||||||
return Parser.parse(tokens, opt);
|
|
||||||
|
opt = merge({}, marked.defaults, opt || {});
|
||||||
|
- checkSanitizeDeprecation(opt);
|
||||||
|
|
||||||
|
try {
|
||||||
|
@@ -311,5 +308,5 @@ marked.parseInline = function(src, opt) {
|
||||||
|
return Parser.parseInline(tokens, opt);
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
- e.message += '\nPlease report this to https://github.com/markedjs/marked.';
|
- e.message += '\nPlease report this to https://github.com/markedjs/marked.';
|
||||||
+ e.message += '\nmake issue @ https://github.com/9001/copyparty';
|
+ e.message += '\nmake issue @ https://github.com/9001/copyparty';
|
||||||
@@ -252,86 +258,87 @@ diff --git a/test/bench.js b/test/bench.js
|
|||||||
diff --git a/test/specs/run-spec.js b/test/specs/run-spec.js
|
diff --git a/test/specs/run-spec.js b/test/specs/run-spec.js
|
||||||
--- a/test/specs/run-spec.js
|
--- a/test/specs/run-spec.js
|
||||||
+++ b/test/specs/run-spec.js
|
+++ b/test/specs/run-spec.js
|
||||||
@@ -22,8 +22,4 @@ function runSpecs(title, dir, showCompletionTable, options) {
|
@@ -22,9 +22,4 @@ function runSpecs(title, dir, showCompletionTable, options) {
|
||||||
}
|
}
|
||||||
|
|
||||||
- if (spec.options.sanitizer) {
|
- if (spec.options.sanitizer) {
|
||||||
- // eslint-disable-next-line no-eval
|
- // eslint-disable-next-line no-eval
|
||||||
- spec.options.sanitizer = eval(spec.options.sanitizer);
|
- spec.options.sanitizer = eval(spec.options.sanitizer);
|
||||||
- }
|
- }
|
||||||
|
-
|
||||||
(spec.only ? fit : (spec.skip ? xit : it))('should ' + passFail + example, async() => {
|
(spec.only ? fit : (spec.skip ? xit : it))('should ' + passFail + example, async() => {
|
||||||
@@ -53,3 +49,2 @@ runSpecs('Original', './original', false, { gfm: false, pedantic: true });
|
const before = process.hrtime();
|
||||||
|
@@ -53,3 +48,2 @@ runSpecs('Original', './original', false, { gfm: false, pedantic: true });
|
||||||
runSpecs('New', './new');
|
runSpecs('New', './new');
|
||||||
runSpecs('ReDOS', './redos');
|
runSpecs('ReDOS', './redos');
|
||||||
-runSpecs('Security', './security', false, { silent: true }); // silent - do not show deprecation warning
|
-runSpecs('Security', './security', false, { silent: true }); // silent - do not show deprecation warning
|
||||||
diff --git a/test/unit/Lexer-spec.js b/test/unit/Lexer-spec.js
|
diff --git a/test/unit/Lexer-spec.js b/test/unit/Lexer-spec.js
|
||||||
--- a/test/unit/Lexer-spec.js
|
--- a/test/unit/Lexer-spec.js
|
||||||
+++ b/test/unit/Lexer-spec.js
|
+++ b/test/unit/Lexer-spec.js
|
||||||
@@ -465,5 +465,5 @@ a | b
|
@@ -589,5 +589,5 @@ paragraph
|
||||||
});
|
});
|
||||||
|
|
||||||
- it('sanitize', () => {
|
- it('sanitize', () => {
|
||||||
+ /*it('sanitize', () => {
|
+ /*it('sanitize', () => {
|
||||||
expectTokens({
|
expectTokens({
|
||||||
md: '<div>html</div>',
|
md: '<div>html</div>',
|
||||||
@@ -483,5 +483,5 @@ a | b
|
@@ -607,5 +607,5 @@ paragraph
|
||||||
]
|
]
|
||||||
});
|
});
|
||||||
- });
|
- });
|
||||||
+ });*/
|
+ });*/
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -587,5 +587,5 @@ a | b
|
@@ -652,5 +652,5 @@ paragraph
|
||||||
});
|
});
|
||||||
|
|
||||||
- it('html sanitize', () => {
|
- it('html sanitize', () => {
|
||||||
+ /*it('html sanitize', () => {
|
+ /*it('html sanitize', () => {
|
||||||
expectInlineTokens({
|
expectInlineTokens({
|
||||||
md: '<div>html</div>',
|
md: '<div>html</div>',
|
||||||
@@ -597,5 +597,5 @@ a | b
|
@@ -660,5 +660,5 @@ paragraph
|
||||||
]
|
]
|
||||||
});
|
});
|
||||||
- });
|
- });
|
||||||
+ });*/
|
+ });*/
|
||||||
|
|
||||||
it('link', () => {
|
it('link', () => {
|
||||||
@@ -909,5 +909,5 @@ a | b
|
@@ -971,5 +971,5 @@ paragraph
|
||||||
});
|
});
|
||||||
|
|
||||||
- it('autolink mangle email', () => {
|
- it('autolink mangle email', () => {
|
||||||
+ /*it('autolink mangle email', () => {
|
+ /*it('autolink mangle email', () => {
|
||||||
expectInlineTokens({
|
expectInlineTokens({
|
||||||
md: '<test@example.com>',
|
md: '<test@example.com>',
|
||||||
@@ -929,5 +929,5 @@ a | b
|
@@ -991,5 +991,5 @@ paragraph
|
||||||
]
|
]
|
||||||
});
|
});
|
||||||
- });
|
- });
|
||||||
+ });*/
|
+ });*/
|
||||||
|
|
||||||
it('url', () => {
|
it('url', () => {
|
||||||
@@ -966,5 +966,5 @@ a | b
|
@@ -1028,5 +1028,5 @@ paragraph
|
||||||
});
|
});
|
||||||
|
|
||||||
- it('url mangle email', () => {
|
- it('url mangle email', () => {
|
||||||
+ /*it('url mangle email', () => {
|
+ /*it('url mangle email', () => {
|
||||||
expectInlineTokens({
|
expectInlineTokens({
|
||||||
md: 'test@example.com',
|
md: 'test@example.com',
|
||||||
@@ -986,5 +986,5 @@ a | b
|
@@ -1048,5 +1048,5 @@ paragraph
|
||||||
]
|
]
|
||||||
});
|
});
|
||||||
- });
|
- });
|
||||||
+ });*/
|
+ });*/
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -1002,5 +1002,5 @@ a | b
|
@@ -1064,5 +1064,5 @@ paragraph
|
||||||
});
|
});
|
||||||
|
|
||||||
- describe('smartypants', () => {
|
- describe('smartypants', () => {
|
||||||
+ /*describe('smartypants', () => {
|
+ /*describe('smartypants', () => {
|
||||||
it('single quotes', () => {
|
it('single quotes', () => {
|
||||||
expectInlineTokens({
|
expectInlineTokens({
|
||||||
@@ -1072,5 +1072,5 @@ a | b
|
@@ -1134,5 +1134,5 @@ paragraph
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
- });
|
- });
|
||||||
|
|||||||
@@ -206,6 +206,15 @@ while IFS= read -r x; do
|
|||||||
tmv "$x"
|
tmv "$x"
|
||||||
done
|
done
|
||||||
|
|
||||||
|
find copyparty | LC_ALL=C sort | sed 's/\.gz$//;s/$/,/' > have
|
||||||
|
cat have | while IFS= read -r x; do
|
||||||
|
grep -qF -- "$x" ../scripts/sfx.ls || {
|
||||||
|
echo "unexpected file: $x"
|
||||||
|
exit 1
|
||||||
|
}
|
||||||
|
done
|
||||||
|
rm have
|
||||||
|
|
||||||
[ $no_ogv ] &&
|
[ $no_ogv ] &&
|
||||||
rm -rf copyparty/web/deps/{dynamicaudio,ogv}*
|
rm -rf copyparty/web/deps/{dynamicaudio,ogv}*
|
||||||
|
|
||||||
@@ -229,7 +238,7 @@ done
|
|||||||
rm -rf copyparty/web/dd
|
rm -rf copyparty/web/dd
|
||||||
f=copyparty/web/browser.css
|
f=copyparty/web/browser.css
|
||||||
gzip -d "$f.gz" || true
|
gzip -d "$f.gz" || true
|
||||||
sed -r 's/(cursor: ?)url\([^)]+\), ?(pointer)/\1\2/; /[0-9]+% \{cursor:/d; /animation: ?cursor/d' <$f >t
|
sed -r 's/(cursor: ?)url\([^)]+\), ?(pointer)/\1\2/; s/[0-9]+% \{cursor:[^}]+\}//; s/animation: ?cursor[^};]+//' <$f >t
|
||||||
tmv "$f"
|
tmv "$f"
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -262,7 +271,7 @@ find | grep -E '\.css$' | while IFS= read -r f; do
|
|||||||
}
|
}
|
||||||
!/\}$/ {printf "%s",$0;next}
|
!/\}$/ {printf "%s",$0;next}
|
||||||
1
|
1
|
||||||
' <$f | sed 's/;\}$/}/' >t
|
' <$f | sed -r 's/;\}$/}/; /\{\}$/d' >t
|
||||||
tmv "$f"
|
tmv "$f"
|
||||||
done
|
done
|
||||||
unexpand -h 2>/dev/null &&
|
unexpand -h 2>/dev/null &&
|
||||||
|
|||||||
36
scripts/rls.sh
Executable file
36
scripts/rls.sh
Executable file
@@ -0,0 +1,36 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
set -e
|
||||||
|
|
||||||
|
cd ~/dev/copyparty/scripts
|
||||||
|
|
||||||
|
v=$1
|
||||||
|
printf '%s\n' "$v" | grep -qE '^[0-9\.]+$' || exit 1
|
||||||
|
grep -E "(${v//./, })" ../copyparty/__version__.py || exit 1
|
||||||
|
|
||||||
|
git tag v$v
|
||||||
|
git push origin --tags
|
||||||
|
|
||||||
|
rm -rf ../dist
|
||||||
|
|
||||||
|
./make-pypi-release.sh u
|
||||||
|
(cd .. && python3 ./setup.py clean2)
|
||||||
|
|
||||||
|
./make-tgz-release.sh $v
|
||||||
|
|
||||||
|
rm -f ../dist/copyparty-sfx.*
|
||||||
|
./make-sfx.sh no-sh
|
||||||
|
../dist/copyparty-sfx.py -h
|
||||||
|
|
||||||
|
ar=
|
||||||
|
while true; do
|
||||||
|
for ((a=0; a<100; a++)); do
|
||||||
|
for f in ../dist/copyparty-sfx.{py,sh}; do
|
||||||
|
[ -e $f ] || continue;
|
||||||
|
mv $f $f.$(wc -c <$f | awk '{print$1}')
|
||||||
|
done
|
||||||
|
./make-sfx.sh re $ar
|
||||||
|
done
|
||||||
|
ar=no-sh
|
||||||
|
done
|
||||||
|
|
||||||
|
# git tag -d v$v; git push --delete origin v$v
|
||||||
77
scripts/sfx.ls
Normal file
77
scripts/sfx.ls
Normal file
@@ -0,0 +1,77 @@
|
|||||||
|
copyparty,
|
||||||
|
copyparty/__init__.py,
|
||||||
|
copyparty/__main__.py,
|
||||||
|
copyparty/__version__.py,
|
||||||
|
copyparty/authsrv.py,
|
||||||
|
copyparty/bos,
|
||||||
|
copyparty/bos/__init__.py,
|
||||||
|
copyparty/bos/bos.py,
|
||||||
|
copyparty/bos/path.py,
|
||||||
|
copyparty/broker_mp.py,
|
||||||
|
copyparty/broker_mpw.py,
|
||||||
|
copyparty/broker_thr.py,
|
||||||
|
copyparty/broker_util.py,
|
||||||
|
copyparty/httpcli.py,
|
||||||
|
copyparty/httpconn.py,
|
||||||
|
copyparty/httpsrv.py,
|
||||||
|
copyparty/ico.py,
|
||||||
|
copyparty/mtag.py,
|
||||||
|
copyparty/res,
|
||||||
|
copyparty/res/insecure.pem,
|
||||||
|
copyparty/star.py,
|
||||||
|
copyparty/stolen,
|
||||||
|
copyparty/stolen/__init__.py,
|
||||||
|
copyparty/stolen/surrogateescape.py,
|
||||||
|
copyparty/sutil.py,
|
||||||
|
copyparty/svchub.py,
|
||||||
|
copyparty/szip.py,
|
||||||
|
copyparty/tcpsrv.py,
|
||||||
|
copyparty/th_cli.py,
|
||||||
|
copyparty/th_srv.py,
|
||||||
|
copyparty/u2idx.py,
|
||||||
|
copyparty/up2k.py,
|
||||||
|
copyparty/util.py,
|
||||||
|
copyparty/web,
|
||||||
|
copyparty/web/baguettebox.js,
|
||||||
|
copyparty/web/browser.css,
|
||||||
|
copyparty/web/browser.html,
|
||||||
|
copyparty/web/browser.js,
|
||||||
|
copyparty/web/browser2.html,
|
||||||
|
copyparty/web/copyparty.gif,
|
||||||
|
copyparty/web/dd,
|
||||||
|
copyparty/web/dd/2.png,
|
||||||
|
copyparty/web/dd/3.png,
|
||||||
|
copyparty/web/dd/4.png,
|
||||||
|
copyparty/web/dd/5.png,
|
||||||
|
copyparty/web/deps,
|
||||||
|
copyparty/web/deps/easymde.css,
|
||||||
|
copyparty/web/deps/easymde.js,
|
||||||
|
copyparty/web/deps/marked.js,
|
||||||
|
copyparty/web/deps/mini-fa.css,
|
||||||
|
copyparty/web/deps/mini-fa.woff,
|
||||||
|
copyparty/web/deps/ogv-decoder-audio-opus-wasm.js,
|
||||||
|
copyparty/web/deps/ogv-decoder-audio-opus-wasm.wasm,
|
||||||
|
copyparty/web/deps/ogv-decoder-audio-vorbis-wasm.js,
|
||||||
|
copyparty/web/deps/ogv-decoder-audio-vorbis-wasm.wasm,
|
||||||
|
copyparty/web/deps/ogv-demuxer-ogg-wasm.js,
|
||||||
|
copyparty/web/deps/ogv-demuxer-ogg-wasm.wasm,
|
||||||
|
copyparty/web/deps/ogv-worker-audio.js,
|
||||||
|
copyparty/web/deps/ogv.js,
|
||||||
|
copyparty/web/deps/scp.woff2,
|
||||||
|
copyparty/web/deps/sha512.ac.js,
|
||||||
|
copyparty/web/deps/sha512.hw.js,
|
||||||
|
copyparty/web/md.css,
|
||||||
|
copyparty/web/md.html,
|
||||||
|
copyparty/web/md.js,
|
||||||
|
copyparty/web/md2.css,
|
||||||
|
copyparty/web/md2.js,
|
||||||
|
copyparty/web/mde.css,
|
||||||
|
copyparty/web/mde.html,
|
||||||
|
copyparty/web/mde.js,
|
||||||
|
copyparty/web/msg.css,
|
||||||
|
copyparty/web/msg.html,
|
||||||
|
copyparty/web/splash.css,
|
||||||
|
copyparty/web/splash.html,
|
||||||
|
copyparty/web/ui.css,
|
||||||
|
copyparty/web/up2k.js,
|
||||||
|
copyparty/web/util.js,
|
||||||
@@ -1,4 +1,4 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python3
|
||||||
# coding: latin-1
|
# coding: latin-1
|
||||||
from __future__ import print_function, unicode_literals
|
from __future__ import print_function, unicode_literals
|
||||||
|
|
||||||
@@ -9,7 +9,7 @@ import subprocess as sp
|
|||||||
to edit this file, use HxD or "vim -b"
|
to edit this file, use HxD or "vim -b"
|
||||||
(there is compressed stuff at the end)
|
(there is compressed stuff at the end)
|
||||||
|
|
||||||
run me with any version of python, i will unpack and run copyparty
|
run me with python 2.7 or 3.3+ to unpack and run copyparty
|
||||||
|
|
||||||
there's zero binaries! just plaintext python scripts all the way down
|
there's zero binaries! just plaintext python scripts all the way down
|
||||||
so you can easily unpack the archive and inspect it for shady stuff
|
so you can easily unpack the archive and inspect it for shady stuff
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
|
|||||||
@@ -60,7 +60,7 @@ class Cpp(object):
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
def tc1():
|
def tc1(vflags):
|
||||||
ub = "http://127.0.0.1:4321/"
|
ub = "http://127.0.0.1:4321/"
|
||||||
td = os.path.join("srv", "smoketest")
|
td = os.path.join("srv", "smoketest")
|
||||||
try:
|
try:
|
||||||
@@ -100,17 +100,17 @@ def tc1():
|
|||||||
for d1 in ["r", "w", "a"]:
|
for d1 in ["r", "w", "a"]:
|
||||||
pdirs.append("{}/{}".format(td, d1))
|
pdirs.append("{}/{}".format(td, d1))
|
||||||
pdirs.append("{}/{}/j".format(td, d1))
|
pdirs.append("{}/{}/j".format(td, d1))
|
||||||
for d2 in ["r", "w", "a"]:
|
for d2 in ["r", "w", "a", "c"]:
|
||||||
d = os.path.join(td, d1, "j", d2)
|
d = os.path.join(td, d1, "j", d2)
|
||||||
pdirs.append(d)
|
pdirs.append(d)
|
||||||
os.makedirs(d)
|
os.makedirs(d)
|
||||||
|
|
||||||
pdirs = [x.replace("\\", "/") for x in pdirs]
|
pdirs = [x.replace("\\", "/") for x in pdirs]
|
||||||
udirs = [x.split("/", 2)[2] for x in pdirs]
|
udirs = [x.split("/", 2)[2] for x in pdirs]
|
||||||
perms = [x.rstrip("j/")[-1] for x in pdirs]
|
perms = [x.rstrip("cj/")[-1] for x in pdirs]
|
||||||
perms = ["rw" if x == "a" else x for x in perms]
|
perms = ["rw" if x == "a" else x for x in perms]
|
||||||
for pd, ud, p in zip(pdirs, udirs, perms):
|
for pd, ud, p in zip(pdirs, udirs, perms):
|
||||||
if ud[-1] == "j":
|
if ud[-1] == "j" or ud[-1] == "c":
|
||||||
continue
|
continue
|
||||||
|
|
||||||
hp = None
|
hp = None
|
||||||
@@ -123,29 +123,37 @@ def tc1():
|
|||||||
hp = "-"
|
hp = "-"
|
||||||
hpaths[ud] = os.path.join(pd, ".hist")
|
hpaths[ud] = os.path.join(pd, ".hist")
|
||||||
|
|
||||||
arg = "{}:{}:{}".format(pd, ud, p, hp)
|
arg = "{}:{}:{}".format(pd, ud, p)
|
||||||
if hp:
|
if hp:
|
||||||
arg += ":c,hist=" + hp
|
arg += ":c,hist=" + hp
|
||||||
|
|
||||||
args += ["-v", arg]
|
args += ["-v", arg + vflags]
|
||||||
|
|
||||||
# return
|
# return
|
||||||
cpp = Cpp(args)
|
cpp = Cpp(args)
|
||||||
CPP.append(cpp)
|
CPP.append(cpp)
|
||||||
cpp.await_idle(ub, 3)
|
cpp.await_idle(ub, 3)
|
||||||
|
|
||||||
for d in udirs:
|
for d, p in zip(udirs, perms):
|
||||||
vid = ovid + "\n{}".format(d).encode("utf-8")
|
vid = ovid + "\n{}".format(d).encode("utf-8")
|
||||||
try:
|
r = requests.post(
|
||||||
requests.post(ub + d, data={"act": "bput"}, files={"f": ("a.h264", vid)})
|
ub + d,
|
||||||
except:
|
data={"act": "bput"},
|
||||||
pass
|
files={"f": (d.replace("/", "") + ".h264", vid)},
|
||||||
|
)
|
||||||
|
c = r.status_code
|
||||||
|
if c == 200 and p not in ["w", "rw"]:
|
||||||
|
raise Exception("post {} with perm {} at {}".format(c, p, d))
|
||||||
|
elif c == 403 and p not in ["r"]:
|
||||||
|
raise Exception("post {} with perm {} at {}".format(c, p, d))
|
||||||
|
elif c not in [200, 403]:
|
||||||
|
raise Exception("post {} with perm {} at {}".format(c, p, d))
|
||||||
|
|
||||||
cpp.clean()
|
cpp.clean()
|
||||||
|
|
||||||
# GET permission
|
# GET permission
|
||||||
for d, p in zip(udirs, perms):
|
for d, p in zip(udirs, perms):
|
||||||
u = "{}{}/a.h264".format(ub, d)
|
u = "{}{}/{}.h264".format(ub, d, d.replace("/", ""))
|
||||||
r = requests.get(u)
|
r = requests.get(u)
|
||||||
ok = bool(r)
|
ok = bool(r)
|
||||||
if ok != (p in ["rw"]):
|
if ok != (p in ["rw"]):
|
||||||
@@ -153,14 +161,14 @@ def tc1():
|
|||||||
|
|
||||||
# stat filesystem
|
# stat filesystem
|
||||||
for d, p in zip(pdirs, perms):
|
for d, p in zip(pdirs, perms):
|
||||||
u = "{}/a.h264".format(d)
|
u = "{}/{}.h264".format(d, d.split("test/")[-1].replace("/", ""))
|
||||||
ok = os.path.exists(u)
|
ok = os.path.exists(u)
|
||||||
if ok != (p in ["rw", "w"]):
|
if ok != (p in ["rw", "w"]):
|
||||||
raise Exception("stat {} with perm {} at {}".format(ok, p, u))
|
raise Exception("stat {} with perm {} at {}".format(ok, p, u))
|
||||||
|
|
||||||
# GET thumbnail, vreify contents
|
# GET thumbnail, vreify contents
|
||||||
for d, p in zip(udirs, perms):
|
for d, p in zip(udirs, perms):
|
||||||
u = "{}{}/a.h264?th=j".format(ub, d)
|
u = "{}{}/{}.h264?th=j".format(ub, d, d.replace("/", ""))
|
||||||
r = requests.get(u)
|
r = requests.get(u)
|
||||||
ok = bool(r and r.content[:3] == b"\xff\xd8\xff")
|
ok = bool(r and r.content[:3] == b"\xff\xd8\xff")
|
||||||
if ok != (p in ["rw"]):
|
if ok != (p in ["rw"]):
|
||||||
@@ -192,9 +200,9 @@ def tc1():
|
|||||||
cpp.stop(True)
|
cpp.stop(True)
|
||||||
|
|
||||||
|
|
||||||
def run(tc):
|
def run(tc, *a):
|
||||||
try:
|
try:
|
||||||
tc()
|
tc(*a)
|
||||||
finally:
|
finally:
|
||||||
try:
|
try:
|
||||||
CPP[0].stop(False)
|
CPP[0].stop(False)
|
||||||
@@ -203,7 +211,8 @@ def run(tc):
|
|||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
run(tc1)
|
run(tc1, "")
|
||||||
|
run(tc1, ":c,fk")
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python3
|
||||||
# coding: utf-8
|
# coding: utf-8
|
||||||
from __future__ import print_function, unicode_literals
|
from __future__ import print_function, unicode_literals
|
||||||
|
|
||||||
@@ -8,7 +8,7 @@ import tokenize
|
|||||||
|
|
||||||
|
|
||||||
def uncomment(fpath):
|
def uncomment(fpath):
|
||||||
""" modified https://stackoverflow.com/a/62074206 """
|
"""modified https://stackoverflow.com/a/62074206"""
|
||||||
|
|
||||||
with open(fpath, "rb") as f:
|
with open(fpath, "rb") as f:
|
||||||
orig = f.read().decode("utf-8")
|
orig = f.read().decode("utf-8")
|
||||||
|
|||||||
4
setup.py
4
setup.py
@@ -1,4 +1,4 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python3
|
||||||
# coding: utf-8
|
# coding: utf-8
|
||||||
from __future__ import print_function
|
from __future__ import print_function
|
||||||
|
|
||||||
@@ -114,7 +114,7 @@ args = {
|
|||||||
"install_requires": ["jinja2"],
|
"install_requires": ["jinja2"],
|
||||||
"extras_require": {"thumbnails": ["Pillow"], "audiotags": ["mutagen"]},
|
"extras_require": {"thumbnails": ["Pillow"], "audiotags": ["mutagen"]},
|
||||||
"entry_points": {"console_scripts": ["copyparty = copyparty.__main__:main"]},
|
"entry_points": {"console_scripts": ["copyparty = copyparty.__main__:main"]},
|
||||||
"scripts": ["bin/copyparty-fuse.py"],
|
"scripts": ["bin/copyparty-fuse.py", "bin/up2k.py"],
|
||||||
"cmdclass": {"clean2": clean2},
|
"cmdclass": {"clean2": clean2},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
20
srv/test.md
20
srv/test.md
@@ -1,11 +1,17 @@
|
|||||||
### hello world
|
### hello world
|
||||||
|
|
||||||
* qwe
|
* qwe
|
||||||
* asd
|
* rty
|
||||||
* zxc
|
* uio
|
||||||
* 573
|
* asd
|
||||||
* one
|
* fgh
|
||||||
* two
|
* jkl
|
||||||
|
* zxc
|
||||||
|
* vbn
|
||||||
|
* 573
|
||||||
|
* one
|
||||||
|
* two
|
||||||
|
* three
|
||||||
|
|
||||||
* |||
|
* |||
|
||||||
|--|--|
|
|--|--|
|
||||||
@@ -134,12 +140,12 @@ a newline toplevel
|
|||||||
| a table | on the right |
|
| a table | on the right |
|
||||||
| second row | foo bar |
|
| second row | foo bar |
|
||||||
|
|
||||||
||
|
a||a
|
||||||
--|:-:|-:
|
--|:-:|-:
|
||||||
a table | big text in this | aaakbfddd
|
a table | big text in this | aaakbfddd
|
||||||
second row | centred | bbb
|
second row | centred | bbb
|
||||||
|
|
||||||
||
|
||||
|
||||||
--|--|--
|
--|--|--
|
||||||
foo
|
foo
|
||||||
|
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python3
|
||||||
# coding: utf-8
|
# coding: utf-8
|
||||||
from __future__ import print_function, unicode_literals
|
from __future__ import print_function, unicode_literals
|
||||||
|
|
||||||
@@ -39,6 +39,8 @@ class Cfg(Namespace):
|
|||||||
no_scandir=False,
|
no_scandir=False,
|
||||||
no_sendfile=True,
|
no_sendfile=True,
|
||||||
no_rescan=True,
|
no_rescan=True,
|
||||||
|
no_logues=False,
|
||||||
|
no_readme=False,
|
||||||
re_maxage=0,
|
re_maxage=0,
|
||||||
ihead=False,
|
ihead=False,
|
||||||
nih=True,
|
nih=True,
|
||||||
@@ -96,7 +98,7 @@ class TestHttpCli(unittest.TestCase):
|
|||||||
if not vol.startswith(top):
|
if not vol.startswith(top):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
mode = vol[-2].replace("a", "rwmd")
|
mode = vol[-2].replace("a", "rw")
|
||||||
usr = vol[-1]
|
usr = vol[-1]
|
||||||
if usr == "a":
|
if usr == "a":
|
||||||
usr = ""
|
usr = ""
|
||||||
@@ -151,6 +153,7 @@ class TestHttpCli(unittest.TestCase):
|
|||||||
tar = tarfile.open(fileobj=io.BytesIO(b)).getnames()
|
tar = tarfile.open(fileobj=io.BytesIO(b)).getnames()
|
||||||
except:
|
except:
|
||||||
tar = []
|
tar = []
|
||||||
|
tar = [x[4:] if x.startswith("top/") else x for x in tar]
|
||||||
tar = ["/".join([y for y in [top, durl, x] if y]) for x in tar]
|
tar = ["/".join([y for y in [top, durl, x] if y]) for x in tar]
|
||||||
tar = [[x] + self.can_rw(x) for x in tar]
|
tar = [[x] + self.can_rw(x) for x in tar]
|
||||||
tar_ok = [x[0] for x in tar if x[1]]
|
tar_ok = [x[0] for x in tar if x[1]]
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python3
|
||||||
# coding: utf-8
|
# coding: utf-8
|
||||||
from __future__ import print_function, unicode_literals
|
from __future__ import print_function, unicode_literals
|
||||||
|
|
||||||
@@ -26,6 +26,8 @@ class Cfg(Namespace):
|
|||||||
"no_hash": False,
|
"no_hash": False,
|
||||||
"css_browser": None,
|
"css_browser": None,
|
||||||
"no_voldump": True,
|
"no_voldump": True,
|
||||||
|
"no_logues": False,
|
||||||
|
"no_readme": False,
|
||||||
"re_maxage": 0,
|
"re_maxage": 0,
|
||||||
"rproxy": 0,
|
"rproxy": 0,
|
||||||
}
|
}
|
||||||
@@ -195,10 +197,10 @@ class TestVFS(unittest.TestCase):
|
|||||||
self.assertEqual(n.realpath, os.path.join(td, "a"))
|
self.assertEqual(n.realpath, os.path.join(td, "a"))
|
||||||
self.assertAxs(n.axs.uread, ["*"])
|
self.assertAxs(n.axs.uread, ["*"])
|
||||||
self.assertAxs(n.axs.uwrite, [])
|
self.assertAxs(n.axs.uwrite, [])
|
||||||
self.assertEqual(vfs.can_access("/", "*"), [False, False, False, False])
|
self.assertEqual(vfs.can_access("/", "*"), [False, False, False, False, False])
|
||||||
self.assertEqual(vfs.can_access("/", "k"), [True, True, False, False])
|
self.assertEqual(vfs.can_access("/", "k"), [True, True, False, False, False])
|
||||||
self.assertEqual(vfs.can_access("/a", "*"), [True, False, False, False])
|
self.assertEqual(vfs.can_access("/a", "*"), [True, False, False, False, False])
|
||||||
self.assertEqual(vfs.can_access("/a", "k"), [True, False, False, False])
|
self.assertEqual(vfs.can_access("/a", "k"), [True, False, False, False, False])
|
||||||
|
|
||||||
# breadth-first construction
|
# breadth-first construction
|
||||||
vfs = AuthSrv(
|
vfs = AuthSrv(
|
||||||
|
|||||||
@@ -3,6 +3,7 @@ import sys
|
|||||||
import time
|
import time
|
||||||
import shutil
|
import shutil
|
||||||
import jinja2
|
import jinja2
|
||||||
|
import threading
|
||||||
import tempfile
|
import tempfile
|
||||||
import platform
|
import platform
|
||||||
import subprocess as sp
|
import subprocess as sp
|
||||||
@@ -28,7 +29,7 @@ if MACOS:
|
|||||||
# 25% faster; until any tests do symlink stuff
|
# 25% faster; until any tests do symlink stuff
|
||||||
|
|
||||||
|
|
||||||
from copyparty.util import Unrecv
|
from copyparty.util import Unrecv, FHC
|
||||||
|
|
||||||
|
|
||||||
def runcmd(argv):
|
def runcmd(argv):
|
||||||
@@ -132,8 +133,10 @@ class VHttpConn(object):
|
|||||||
self.log_src = "a"
|
self.log_src = "a"
|
||||||
self.lf_url = None
|
self.lf_url = None
|
||||||
self.hsrv = VHttpSrv()
|
self.hsrv = VHttpSrv()
|
||||||
|
self.u2fh = FHC()
|
||||||
|
self.mutex = threading.Lock()
|
||||||
self.nreq = 0
|
self.nreq = 0
|
||||||
self.nbyte = 0
|
self.nbyte = 0
|
||||||
self.ico = None
|
self.ico = None
|
||||||
self.thumbcli = None
|
self.thumbcli = None
|
||||||
self.t0 = time.time()
|
self.t0 = time.time()
|
||||||
|
|||||||
Reference in New Issue
Block a user