mirror of
				https://github.com/9001/copyparty.git
				synced 2025-10-31 03:53:31 +00:00 
			
		
		
		
	Compare commits
	
		
			18 Commits
		
	
	
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
|  | 43a23bf733 | ||
|  | 92bb00c6d2 | ||
|  | b0b97a2648 | ||
|  | 2c452fe323 | ||
|  | ad73d0c77d | ||
|  | 7f9bf1c78c | ||
|  | 61a6bc3a65 | ||
|  | 46e10b0e9f | ||
|  | 8441206e26 | ||
|  | 9fdc5ee748 | ||
|  | 00ff133387 | ||
|  | 96164cb934 | ||
|  | 82fb21ae69 | ||
|  | 89d4a2b4c4 | ||
|  | fc0c7ff374 | ||
|  | 5148c4f2e9 | ||
|  | c3b59f7bcf | ||
|  | 61e148202b | 
							
								
								
									
										5
									
								
								.vscode/launch.json
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										5
									
								
								.vscode/launch.json
									
									
									
									
										vendored
									
									
								
							| @@ -13,10 +13,13 @@ | ||||
|                 "-ed", | ||||
|                 "-emp", | ||||
|                 "-e2dsa", | ||||
|                 "-e2ts", | ||||
|                 "-a", | ||||
|                 "ed:wark", | ||||
|                 "-v", | ||||
|                 "srv::r:aed:cnodupe" | ||||
|                 "srv::r:aed:cnodupe", | ||||
|                 "-v", | ||||
|                 "dist:dist:r" | ||||
|             ] | ||||
|         }, | ||||
|         { | ||||
|   | ||||
							
								
								
									
										2
									
								
								.vscode/tasks.json
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										2
									
								
								.vscode/tasks.json
									
									
									
									
										vendored
									
									
								
							| @@ -8,7 +8,7 @@ | ||||
|         }, | ||||
|         { | ||||
|             "label": "no_dbg", | ||||
|             "command": "${config:python.pythonPath} -m copyparty -ed -emp -e2dsa -a ed:wark -v srv::r:aed:cnodupe ;exit 1", | ||||
|             "command": "${config:python.pythonPath} -m copyparty -ed -emp -e2dsa -e2ts -a ed:wark -v srv::r:aed:cnodupe -v dist:dist:r ;exit 1", | ||||
|             "type": "shell" | ||||
|         } | ||||
|     ] | ||||
|   | ||||
							
								
								
									
										60
									
								
								README.md
									
									
									
									
									
								
							
							
						
						
									
										60
									
								
								README.md
									
									
									
									
									
								
							| @@ -59,7 +59,7 @@ you may also want these, especially on servers: | ||||
| * server indexing | ||||
|   * ☑ locate files by contents | ||||
|   * ☑ search by name/path/date/size | ||||
|   * ✖ search by ID3-tags etc. | ||||
|   * ☑ search by ID3-tags etc. | ||||
| * markdown | ||||
|   * ☑ viewer | ||||
|   * ☑ editor (sure why not) | ||||
| @@ -82,7 +82,42 @@ path/name queries are space-separated, AND'ed together, and words are negated wi | ||||
| * path: `shibayan -bossa` finds all files where one of the folders contain `shibayan` but filters out any results where `bossa` exists somewhere in the path | ||||
| * name: `demetori styx` gives you [good stuff](https://www.youtube.com/watch?v=zGh0g14ZJ8I&list=PL3A147BD151EE5218&index=9) | ||||
|  | ||||
| other metadata (like song tags etc) are not yet indexed for searching | ||||
| add `-e2ts` to also scan/index tags from music files: | ||||
|  | ||||
|  | ||||
| ## search configuration | ||||
|  | ||||
| searching relies on two databases, the up2k filetree (`-e2d`) and the metadata tags (`-e2t`). Configuration can be done through arguments, volume flags, or a mix of both. | ||||
|  | ||||
| through arguments: | ||||
| * `-e2d` enables file indexing on upload | ||||
| * `-e2ds` scans writable folders on startup | ||||
| * `-e2dsa` scans all mounted volumes (including readonly ones) | ||||
| * `-e2t` enables metadata indexing on upload | ||||
| * `-e2ts` scans for tags in all files that don't have tags yet | ||||
| * `-e2tsr` deletes all existing tags, so a full reindex | ||||
|  | ||||
| the same arguments can be set as volume flags, in addition to `d2d` and `d2t` for disabling: | ||||
| * `-v ~/music::ce2dsa:ce2tsr` does a full reindex of everything on startup | ||||
| * `-v ~/music::cd2d` disables **all** indexing, even if any `-e2*` are on | ||||
| * `-v ~/music::cd2t` disables all `-e2t*` (tags), does not affect `-e2d*` | ||||
|  | ||||
| `e2tsr` is probably always overkill, since `e2ds`/`e2dsa` would pick up any file modifications and cause `e2ts` to reindex those | ||||
|  | ||||
| `-mte` decides which tags to index and display in the browser (and also the display order), this can be changed per-volume: | ||||
| * `-v ~/music::cmte=title,artist` indexes and displays *title* followed by *artist* | ||||
|  | ||||
| if you add/remove a tag from `mte` you will need to run with `-e2tsr` once to rebuild the database, otherwise only new files will be affected | ||||
|  | ||||
| `-mtm` can be used to add or redefine a metadata mapping, say you have media files with `foo` and `bar` tags and you want them to display as `qux` in the browser (preferring `foo` if both are present), then do `-mtm qux=foo,bar` and now you can `-mte artist,title,qux` | ||||
|  | ||||
| see the beautiful mess of a dictionary in [mtag.py](https://github.com/9001/copyparty/blob/master/copyparty/mtag.py) for the default mappings (should cover mp3,opus,flac,m4a,wav,aif,) | ||||
|  | ||||
| `--no-mutagen` disables mutagen and uses ffprobe instead, which... | ||||
| * is about 20x slower than mutagen | ||||
| * catches a few tags that mutagen doesn't | ||||
| * avoids pulling any GPL code into copyparty | ||||
| * more importantly runs ffprobe on incoming files which is bad if your ffmpeg has a cve | ||||
|  | ||||
|  | ||||
| # client examples | ||||
| @@ -91,16 +126,33 @@ other metadata (like song tags etc) are not yet indexed for searching | ||||
|   * `await fetch('https://127.0.0.1:3923/', {method:"PUT", body: JSON.stringify(foo)});` | ||||
|   * `var xhr = new XMLHttpRequest(); xhr.open('POST', 'https://127.0.0.1:3923/msgs?raw'); xhr.send('foo');` | ||||
|  | ||||
| * curl/wget: upload some files (post=file, chunk=stdin) | ||||
|   * `post(){ curl -b cppwd=wark http://127.0.0.1:3923/ -F act=bput -F f=@"$1";}`   | ||||
|     `post movie.mkv` | ||||
|   * `post(){ wget --header='Cookie: cppwd=wark' http://127.0.0.1:3923/?raw --post-file="$1" -O-;}`   | ||||
|     `post movie.mkv` | ||||
|   * `chunk(){ curl -b cppwd=wark http://127.0.0.1:3923/ -T-;}`   | ||||
|     `chunk <movie.mkv` | ||||
|  | ||||
| * FUSE: mount a copyparty server as a local filesystem | ||||
|   * cross-platform python client available in [./bin/](bin/) | ||||
|   * [rclone](https://rclone.org/) as client can give ~5x performance, see [./docs/rclone.md](docs/rclone.md) | ||||
|  | ||||
| copyparty returns a truncated sha512sum of your PUT/POST as base64; you can generate the same checksum locally to verify uplaods: | ||||
|  | ||||
|     b512(){ printf "$((sha512sum||shasum -a512)|sed -E 's/ .*//;s/(..)/\\x\1/g')"|base64|head -c43;} | ||||
|     b512 <movie.mkv | ||||
|  | ||||
|  | ||||
| # dependencies | ||||
|  | ||||
| * `jinja2` | ||||
| * `jinja2` (is built into the SFX) | ||||
|  | ||||
| optional, will eventually enable thumbnails: | ||||
| **optional,** enables music tags: | ||||
| * either `mutagen` (fast, pure-python, skips a few tags, makes copyparty GPL? idk) | ||||
| * or `FFprobe` (20x slower, more accurate, possibly dangerous depending on your distro and users) | ||||
|  | ||||
| **optional,** will eventually enable thumbnails: | ||||
| * `Pillow` (requires py2.7 or py3.5+) | ||||
|  | ||||
|  | ||||
|   | ||||
| @@ -198,7 +198,7 @@ def main(): | ||||
|                and "cflag" is config flags to set on this volume | ||||
|              | ||||
|             list of cflags: | ||||
|               cnodupe rejects existing files (instead of symlinking them) | ||||
|               "cnodupe" rejects existing files (instead of symlinking them) | ||||
|  | ||||
|             example:\033[35m | ||||
|               -a ed:hunter2 -v .::r:aed -v ../inc:dump:w:aed:cnodupe  \033[36m | ||||
| @@ -239,9 +239,6 @@ def main(): | ||||
|     ap.add_argument("-q", action="store_true", help="quiet") | ||||
|     ap.add_argument("-ed", action="store_true", help="enable ?dots") | ||||
|     ap.add_argument("-emp", action="store_true", help="enable markdown plugins") | ||||
|     ap.add_argument("-e2d", action="store_true", help="enable up2k database") | ||||
|     ap.add_argument("-e2ds", action="store_true", help="enable up2k db-scanner, sets -e2d") | ||||
|     ap.add_argument("-e2dsa", action="store_true", help="scan all folders (for search), sets -e2ds") | ||||
|     ap.add_argument("-mcr", metavar="SEC", type=int, default=60, help="md-editor mod-chk rate") | ||||
|     ap.add_argument("-nw", action="store_true", help="disable writes (benchmark)") | ||||
|     ap.add_argument("-nih", action="store_true", help="no info hostname") | ||||
| @@ -250,6 +247,18 @@ def main(): | ||||
|     ap.add_argument("--urlform", type=str, default="print,get", help="how to handle url-forms") | ||||
|     ap.add_argument("--salt", type=str, default="hunter2", help="up2k file-hash salt") | ||||
|  | ||||
|     ap2 = ap.add_argument_group('database options') | ||||
|     ap2.add_argument("-e2d", action="store_true", help="enable up2k database") | ||||
|     ap2.add_argument("-e2ds", action="store_true", help="enable up2k db-scanner, sets -e2d") | ||||
|     ap2.add_argument("-e2dsa", action="store_true", help="scan all folders (for search), sets -e2ds") | ||||
|     ap2.add_argument("-e2t", action="store_true", help="enable metadata indexing") | ||||
|     ap2.add_argument("-e2ts", action="store_true", help="enable metadata scanner, sets -e2t") | ||||
|     ap2.add_argument("-e2tsr", action="store_true", help="rescan all metadata, sets -e2ts") | ||||
|     ap2.add_argument("--no-mutagen", action="store_true", help="use ffprobe for tags instead") | ||||
|     ap2.add_argument("-mtm", metavar="M=t,t,t", action="append", type=str, help="add/replace metadata mapping") | ||||
|     ap2.add_argument("-mte", metavar="M,M,M", type=str, help="tags to index/display (comma-sep.)", | ||||
|         default="circle,album,.tn,artist,title,.bpm,key,.dur,.q") | ||||
|  | ||||
|     ap2 = ap.add_argument_group('SSL/TLS options') | ||||
|     ap2.add_argument("--http-only", action="store_true", help="disable ssl/tls") | ||||
|     ap2.add_argument("--https-only", action="store_true", help="disable plaintext") | ||||
| @@ -257,14 +266,20 @@ def main(): | ||||
|     ap2.add_argument("--ciphers", metavar="LIST", help="set allowed ciphers") | ||||
|     ap2.add_argument("--ssl-dbg", action="store_true", help="dump some tls info") | ||||
|     ap2.add_argument("--ssl-log", metavar="PATH", help="log master secrets") | ||||
|      | ||||
|     al = ap.parse_args() | ||||
|     # fmt: on | ||||
|  | ||||
|     if al.e2dsa: | ||||
|         al.e2ds = True | ||||
|  | ||||
|     if al.e2ds: | ||||
|         al.e2d = True | ||||
|     # propagate implications | ||||
|     for k1, k2 in [ | ||||
|         ["e2dsa", "e2ds"], | ||||
|         ["e2ds", "e2d"], | ||||
|         ["e2tsr", "e2ts"], | ||||
|         ["e2ts", "e2t"], | ||||
|         ["e2t", "e2d"], | ||||
|     ]: | ||||
|         if getattr(al, k1): | ||||
|             setattr(al, k2, True) | ||||
|  | ||||
|     al.i = al.i.split(",") | ||||
|     try: | ||||
|   | ||||
| @@ -1,8 +1,8 @@ | ||||
| # coding: utf-8 | ||||
|  | ||||
| VERSION = (0, 8, 1) | ||||
| CODENAME = "keeping track" | ||||
| BUILD_DT = (2021, 2, 22) | ||||
| VERSION = (0, 9, 1) | ||||
| CODENAME = "the strongest music server" | ||||
| BUILD_DT = (2021, 3, 3) | ||||
|  | ||||
| S_VERSION = ".".join(map(str, VERSION)) | ||||
| S_BUILD_DT = "{0:04d}-{1:02d}-{2:02d}".format(*BUILD_DT) | ||||
|   | ||||
| @@ -206,8 +206,11 @@ class AuthSrv(object): | ||||
|             if lvl in "wa": | ||||
|                 mwrite[vol_dst].append(uname) | ||||
|             if lvl == "c": | ||||
|                 # config option, currently switches only | ||||
|                 mflags[vol_dst][uname] = True | ||||
|                 cval = True | ||||
|                 if "=" in uname: | ||||
|                     uname, cval = uname.split("=", 1) | ||||
|  | ||||
|                 mflags[vol_dst][uname] = cval | ||||
|  | ||||
|     def reload(self): | ||||
|         """ | ||||
| @@ -248,12 +251,19 @@ class AuthSrv(object): | ||||
|                 perms = perms.split(":") | ||||
|                 for (lvl, uname) in [[x[0], x[1:]] for x in perms]: | ||||
|                     if lvl == "c": | ||||
|                         # config option, currently switches only | ||||
|                         mflags[dst][uname] = True | ||||
|                         cval = True | ||||
|                         if "=" in uname: | ||||
|                             uname, cval = uname.split("=", 1) | ||||
|  | ||||
|                         mflags[dst][uname] = cval | ||||
|                         continue | ||||
|  | ||||
|                     if uname == "": | ||||
|                         uname = "*" | ||||
|  | ||||
|                     if lvl in "ra": | ||||
|                         mread[dst].append(uname) | ||||
|  | ||||
|                     if lvl in "wa": | ||||
|                         mwrite[dst].append(uname) | ||||
|  | ||||
| @@ -268,6 +278,7 @@ class AuthSrv(object): | ||||
|         elif "" not in mount: | ||||
|             # there's volumes but no root; make root inaccessible | ||||
|             vfs = VFS(os.path.abspath("."), "") | ||||
|             vfs.flags["d2d"] = True | ||||
|  | ||||
|         maxdepth = 0 | ||||
|         for dst in sorted(mount.keys(), key=lambda x: (x.count("/"), len(x))): | ||||
| @@ -300,15 +311,27 @@ class AuthSrv(object): | ||||
|             ) | ||||
|             raise Exception("invalid config") | ||||
|  | ||||
|         for vol in vfs.all_vols.values(): | ||||
|             if (self.args.e2ds and vol.uwrite) or self.args.e2dsa: | ||||
|                 vol.flags["e2ds"] = True | ||||
|  | ||||
|             if self.args.e2d: | ||||
|                 vol.flags["e2d"] = True | ||||
|  | ||||
|             for k in ["e2t", "e2ts", "e2tsr"]: | ||||
|                 if getattr(self.args, k): | ||||
|                     vol.flags[k] = True | ||||
|  | ||||
|             # default tag-list if unset | ||||
|             if "mte" not in vol.flags: | ||||
|                 vol.flags["mte"] = self.args.mte | ||||
|  | ||||
|         try: | ||||
|             v, _ = vfs.get("/", "*", False, True) | ||||
|             if self.warn_anonwrite and os.getcwd() == v.realpath: | ||||
|                 self.warn_anonwrite = False | ||||
|                 self.log( | ||||
|                     "\033[31manyone can read/write the current directory: {}\033[0m".format( | ||||
|                         v.realpath | ||||
|                     ) | ||||
|                 ) | ||||
|                 msg = "\033[31manyone can read/write the current directory: {}\033[0m" | ||||
|                 self.log(msg.format(v.realpath)) | ||||
|         except Pebkac: | ||||
|             self.warn_anonwrite = True | ||||
|  | ||||
|   | ||||
| @@ -222,6 +222,9 @@ class HttpCli(object): | ||||
|             static_path = os.path.join(E.mod, "web/", self.vpath[5:]) | ||||
|             return self.tx_file(static_path) | ||||
|  | ||||
|         if "tree" in self.uparam: | ||||
|             return self.tx_tree() | ||||
|  | ||||
|         # conditional redirect to single volumes | ||||
|         if self.vpath == "" and not self.uparam: | ||||
|             nread = len(self.rvol) | ||||
| @@ -246,9 +249,6 @@ class HttpCli(object): | ||||
|             self.vpath = None | ||||
|             return self.tx_mounts() | ||||
|  | ||||
|         if "tree" in self.uparam: | ||||
|             return self.tx_tree() | ||||
|  | ||||
|         return self.tx_browser() | ||||
|  | ||||
|     def handle_options(self): | ||||
| @@ -323,8 +323,11 @@ class HttpCli(object): | ||||
|         raise Pebkac(405, "don't know how to handle POST({})".format(ctype)) | ||||
|  | ||||
|     def get_body_reader(self): | ||||
|         remains = int(self.headers.get("content-length", None)) | ||||
|         if remains is None: | ||||
|         chunked = "chunked" in self.headers.get("transfer-encoding", "").lower() | ||||
|         remains = int(self.headers.get("content-length", -1)) | ||||
|         if chunked: | ||||
|             return read_socket_chunked(self.sr), remains | ||||
|         elif remains == -1: | ||||
|             self.keepalive = False | ||||
|             return read_socket_unbounded(self.sr), remains | ||||
|         else: | ||||
| @@ -425,7 +428,7 @@ class HttpCli(object): | ||||
|         body["ptop"] = vfs.realpath | ||||
|         body["prel"] = rem | ||||
|         body["addr"] = self.ip | ||||
|         body["flag"] = vfs.flags | ||||
|         body["vcfg"] = vfs.flags | ||||
|  | ||||
|         x = self.conn.hsrv.broker.put(True, "up2k.handle_json", body) | ||||
|         response = x.get() | ||||
| @@ -442,20 +445,31 @@ class HttpCli(object): | ||||
|             vols.append([vfs.vpath, vfs.realpath, vfs.flags]) | ||||
|  | ||||
|         idx = self.conn.get_u2idx() | ||||
|         t0 = time.time() | ||||
|         if "srch" in body: | ||||
|             # search by up2k hashlist | ||||
|             vbody = copy.deepcopy(body) | ||||
|             vbody["hash"] = len(vbody["hash"]) | ||||
|             self.log("qj: " + repr(vbody)) | ||||
|             hits = idx.fsearch(vols, body) | ||||
|             self.log("q#: " + repr(hits)) | ||||
|             self.log("q#: {} ({:.2f}s)".format(repr(hits), time.time() - t0)) | ||||
|             taglist = [] | ||||
|         else: | ||||
|             # search by query params | ||||
|             self.log("qj: " + repr(body)) | ||||
|             hits = idx.search(vols, body) | ||||
|             self.log("q#: " + str(len(hits))) | ||||
|             hits, taglist = idx.search(vols, body) | ||||
|             self.log("q#: {} ({:.2f}s)".format(len(hits), time.time() - t0)) | ||||
|  | ||||
|         r = json.dumps(hits).encode("utf-8") | ||||
|         order = [] | ||||
|         cfg = self.args.mte.split(",") | ||||
|         for t in cfg: | ||||
|             if t in taglist: | ||||
|                 order.append(t) | ||||
|         for t in taglist: | ||||
|             if t not in order: | ||||
|                 order.append(t) | ||||
|  | ||||
|         r = json.dumps({"hits": hits, "tag_order": order}).encode("utf-8") | ||||
|         self.reply(r, mime="application/json") | ||||
|         return True | ||||
|  | ||||
| @@ -1183,6 +1197,11 @@ class HttpCli(object): | ||||
|  | ||||
|         is_ls = "ls" in self.uparam | ||||
|  | ||||
|         icur = None | ||||
|         if "e2t" in vn.flags: | ||||
|             idx = self.conn.get_u2idx() | ||||
|             icur = idx.get_cur(vn.realpath) | ||||
|  | ||||
|         dirs = [] | ||||
|         files = [] | ||||
|         for fn in vfs_ls: | ||||
| @@ -1238,6 +1257,32 @@ class HttpCli(object): | ||||
|                 dirs.append(item) | ||||
|             else: | ||||
|                 files.append(item) | ||||
|                 item["rd"] = rem | ||||
|  | ||||
|         taglist = {} | ||||
|         for f in files: | ||||
|             fn = f["name"] | ||||
|             rd = f["rd"] | ||||
|             del f["rd"] | ||||
|             if icur: | ||||
|                 q = "select w from up where rd = ? and fn = ?" | ||||
|                 r = icur.execute(q, (rd, fn)).fetchone() | ||||
|                 if not r: | ||||
|                     continue | ||||
|  | ||||
|                 w = r[0][:16] | ||||
|                 tags = {} | ||||
|                 q = "select k, v from mt where w = ? and k != 'x'" | ||||
|                 for k, v in icur.execute(q, (w,)): | ||||
|                     taglist[k] = True | ||||
|                     tags[k] = v | ||||
|  | ||||
|                 f["tags"] = tags | ||||
|  | ||||
|         if icur: | ||||
|             taglist = [k for k in self.args.mte.split(",") if k in taglist] | ||||
|             for f in dirs: | ||||
|                 f["tags"] = {} | ||||
|  | ||||
|         srv_info = [] | ||||
|  | ||||
| @@ -1275,20 +1320,27 @@ class HttpCli(object): | ||||
|         if self.writable: | ||||
|             perms.append("write") | ||||
|  | ||||
|         if is_ls: | ||||
|             [x.pop(k) for k in ["name", "dt"] for y in [dirs, files] for x in y] | ||||
|             ret = {"dirs": dirs, "files": files, "srvinf": srv_info, "perms": perms} | ||||
|             ret = json.dumps(ret) | ||||
|             self.reply(ret.encode("utf-8", "replace"), mime="application/json") | ||||
|             return True | ||||
|  | ||||
|         logues = [None, None] | ||||
|         logues = ["", ""] | ||||
|         for n, fn in enumerate([".prologue.html", ".epilogue.html"]): | ||||
|             fn = os.path.join(abspath, fn) | ||||
|             if os.path.exists(fsenc(fn)): | ||||
|                 with open(fsenc(fn), "rb") as f: | ||||
|                     logues[n] = f.read().decode("utf-8") | ||||
|  | ||||
|         if is_ls: | ||||
|             [x.pop(k) for k in ["name", "dt"] for y in [dirs, files] for x in y] | ||||
|             ret = { | ||||
|                 "dirs": dirs, | ||||
|                 "files": files, | ||||
|                 "srvinf": srv_info, | ||||
|                 "perms": perms, | ||||
|                 "logues": logues, | ||||
|                 "taglist": taglist, | ||||
|             } | ||||
|             ret = json.dumps(ret) | ||||
|             self.reply(ret.encode("utf-8", "replace"), mime="application/json") | ||||
|             return True | ||||
|  | ||||
|         ts = "" | ||||
|         # ts = "?{}".format(time.time()) | ||||
|  | ||||
| @@ -1300,9 +1352,11 @@ class HttpCli(object): | ||||
|             files=dirs, | ||||
|             ts=ts, | ||||
|             perms=json.dumps(perms), | ||||
|             have_up2k_idx=self.args.e2d, | ||||
|             prologue=logues[0], | ||||
|             epilogue=logues[1], | ||||
|             taglist=taglist, | ||||
|             tag_order=json.dumps(self.args.mte.split(",")), | ||||
|             have_up2k_idx=("e2d" in vn.flags), | ||||
|             have_tags_idx=("e2t" in vn.flags), | ||||
|             logues=logues, | ||||
|             title=html_escape(self.vpath), | ||||
|             srv_info=srv_info, | ||||
|         ) | ||||
|   | ||||
| @@ -20,10 +20,12 @@ except ImportError: | ||||
|   you do not have jinja2 installed,\033[33m | ||||
|   choose one of these:\033[0m | ||||
|    * apt install python-jinja2 | ||||
|    * python3 -m pip install --user jinja2 | ||||
|    * {} -m pip install --user jinja2 | ||||
|    * (try another python version, if you have one) | ||||
|    * (try copyparty.sfx instead) | ||||
| """ | ||||
| """.format( | ||||
|             os.path.basename(sys.executable) | ||||
|         ) | ||||
|     ) | ||||
|     sys.exit(1) | ||||
|  | ||||
|   | ||||
							
								
								
									
										305
									
								
								copyparty/mtag.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										305
									
								
								copyparty/mtag.py
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,305 @@ | ||||
| # coding: utf-8 | ||||
| from __future__ import print_function, unicode_literals | ||||
| from math import fabs | ||||
|  | ||||
| import re | ||||
| import os | ||||
| import sys | ||||
| import shutil | ||||
| import subprocess as sp | ||||
|  | ||||
| from .__init__ import PY2, WINDOWS | ||||
| from .util import fsenc, fsdec | ||||
|  | ||||
|  | ||||
| class MTag(object): | ||||
|     def __init__(self, log_func, args): | ||||
|         self.log_func = log_func | ||||
|         self.usable = True | ||||
|         mappings = args.mtm | ||||
|         backend = "ffprobe" if args.no_mutagen else "mutagen" | ||||
|  | ||||
|         if backend == "mutagen": | ||||
|             self.get = self.get_mutagen | ||||
|             try: | ||||
|                 import mutagen | ||||
|             except: | ||||
|                 self.log("\033[33mcould not load mutagen, trying ffprobe instead") | ||||
|                 backend = "ffprobe" | ||||
|  | ||||
|         if backend == "ffprobe": | ||||
|             self.get = self.get_ffprobe | ||||
|             # about 20x slower | ||||
|             if PY2: | ||||
|                 cmd = ["ffprobe", "-version"] | ||||
|                 try: | ||||
|                     sp.Popen(cmd, stdout=sp.PIPE, stderr=sp.PIPE) | ||||
|                 except: | ||||
|                     self.usable = False | ||||
|             else: | ||||
|                 if not shutil.which("ffprobe"): | ||||
|                     self.usable = False | ||||
|  | ||||
|         if not self.usable: | ||||
|             msg = "\033[31mneed mutagen or ffprobe to read media tags so please run this:\n  {} -m pip install --user mutagen \033[0m" | ||||
|             self.log(msg.format(os.path.basename(sys.executable))) | ||||
|             return | ||||
|  | ||||
|         # https://picard-docs.musicbrainz.org/downloads/MusicBrainz_Picard_Tag_Map.html | ||||
|         tagmap = { | ||||
|             "album": ["album", "talb", "\u00a9alb", "original-album", "toal"], | ||||
|             "artist": [ | ||||
|                 "artist", | ||||
|                 "tpe1", | ||||
|                 "\u00a9art", | ||||
|                 "composer", | ||||
|                 "performer", | ||||
|                 "arranger", | ||||
|                 "\u00a9wrt", | ||||
|                 "tcom", | ||||
|                 "tpe3", | ||||
|                 "original-artist", | ||||
|                 "tope", | ||||
|             ], | ||||
|             "title": ["title", "tit2", "\u00a9nam"], | ||||
|             "circle": [ | ||||
|                 "album-artist", | ||||
|                 "tpe2", | ||||
|                 "aart", | ||||
|                 "conductor", | ||||
|                 "organization", | ||||
|                 "band", | ||||
|             ], | ||||
|             ".tn": ["tracknumber", "trck", "trkn", "track"], | ||||
|             "genre": ["genre", "tcon", "\u00a9gen"], | ||||
|             "date": [ | ||||
|                 "original-release-date", | ||||
|                 "release-date", | ||||
|                 "date", | ||||
|                 "tdrc", | ||||
|                 "\u00a9day", | ||||
|                 "original-date", | ||||
|                 "original-year", | ||||
|                 "tyer", | ||||
|                 "tdor", | ||||
|                 "tory", | ||||
|                 "year", | ||||
|                 "creation-time", | ||||
|             ], | ||||
|             ".bpm": ["bpm", "tbpm", "tmpo", "tbp"], | ||||
|             "key": ["initial-key", "tkey", "key"], | ||||
|             "comment": ["comment", "comm", "\u00a9cmt", "comments", "description"], | ||||
|         } | ||||
|  | ||||
|         if mappings: | ||||
|             for k, v in [x.split("=") for x in mappings]: | ||||
|                 tagmap[k] = v.split(",") | ||||
|  | ||||
|         self.tagmap = {} | ||||
|         for k, vs in tagmap.items(): | ||||
|             vs2 = [] | ||||
|             for v in vs: | ||||
|                 if "-" not in v: | ||||
|                     vs2.append(v) | ||||
|                     continue | ||||
|  | ||||
|                 vs2.append(v.replace("-", " ")) | ||||
|                 vs2.append(v.replace("-", "_")) | ||||
|                 vs2.append(v.replace("-", "")) | ||||
|  | ||||
|             self.tagmap[k] = vs2 | ||||
|  | ||||
|         self.rmap = { | ||||
|             v: [n, k] for k, vs in self.tagmap.items() for n, v in enumerate(vs) | ||||
|         } | ||||
|         # self.get = self.compare | ||||
|  | ||||
|     def log(self, msg): | ||||
|         self.log_func("mtag", msg) | ||||
|  | ||||
|     def normalize_tags(self, ret, md): | ||||
|         for k, v in dict(md).items(): | ||||
|             if not v: | ||||
|                 continue | ||||
|  | ||||
|             k = k.lower().split("::")[0].strip() | ||||
|             mk = self.rmap.get(k) | ||||
|             if not mk: | ||||
|                 continue | ||||
|  | ||||
|             pref, mk = mk | ||||
|             if mk not in ret or ret[mk][0] > pref: | ||||
|                 ret[mk] = [pref, v[0]] | ||||
|  | ||||
|         # take first value | ||||
|         ret = {k: str(v[1]).strip() for k, v in ret.items()} | ||||
|  | ||||
|         # track 3/7 => track 3 | ||||
|         for k, v in ret.items(): | ||||
|             if k[0] == ".": | ||||
|                 v = v.split("/")[0].strip().lstrip("0") | ||||
|                 ret[k] = v or 0 | ||||
|  | ||||
|         return ret | ||||
|  | ||||
|     def compare(self, abspath): | ||||
|         if abspath.endswith(".au"): | ||||
|             return {} | ||||
|  | ||||
|         print("\n" + abspath) | ||||
|         r1 = self.get_mutagen(abspath) | ||||
|         r2 = self.get_ffprobe(abspath) | ||||
|  | ||||
|         keys = {} | ||||
|         for d in [r1, r2]: | ||||
|             for k in d.keys(): | ||||
|                 keys[k] = True | ||||
|  | ||||
|         diffs = [] | ||||
|         l1 = [] | ||||
|         l2 = [] | ||||
|         for k in sorted(keys.keys()): | ||||
|             if k in [".q", ".dur"]: | ||||
|                 continue  # lenient | ||||
|  | ||||
|             v1 = r1.get(k) | ||||
|             v2 = r2.get(k) | ||||
|             if v1 == v2: | ||||
|                 print("  ", k, v1) | ||||
|             elif v1 != "0000":  # ffprobe date=0 | ||||
|                 diffs.append(k) | ||||
|                 print(" 1", k, v1) | ||||
|                 print(" 2", k, v2) | ||||
|                 if v1: | ||||
|                     l1.append(k) | ||||
|                 if v2: | ||||
|                     l2.append(k) | ||||
|  | ||||
|         if diffs: | ||||
|             raise Exception() | ||||
|  | ||||
|         return r1 | ||||
|  | ||||
|     def get_mutagen(self, abspath): | ||||
|         import mutagen | ||||
|  | ||||
|         try: | ||||
|             md = mutagen.File(abspath, easy=True) | ||||
|             x = md.info.length | ||||
|         except Exception as ex: | ||||
|             return {} | ||||
|  | ||||
|         ret = {} | ||||
|         try: | ||||
|             dur = int(md.info.length) | ||||
|             try: | ||||
|                 q = int(md.info.bitrate / 1024) | ||||
|             except: | ||||
|                 q = int((os.path.getsize(abspath) / dur) / 128) | ||||
|  | ||||
|             ret[".dur"] = [0, dur] | ||||
|             ret[".q"] = [0, q] | ||||
|         except: | ||||
|             pass | ||||
|  | ||||
|         return self.normalize_tags(ret, md) | ||||
|  | ||||
|     def get_ffprobe(self, abspath): | ||||
|         cmd = ["ffprobe", "-hide_banner", "--", fsenc(abspath)] | ||||
|         p = sp.Popen(cmd, stdout=sp.PIPE, stderr=sp.PIPE) | ||||
|         r = p.communicate() | ||||
|         txt = r[1].decode("utf-8", "replace") | ||||
|         txt = [x.rstrip("\r") for x in txt.split("\n")] | ||||
|  | ||||
|         """ | ||||
|         note: | ||||
|           tags which contain newline will be truncated on first \n, | ||||
|           ffmpeg emits \n and spacepads the : to align visually | ||||
|         note: | ||||
|           the Stream ln always mentions Audio: if audio | ||||
|           the Stream ln usually has kb/s, is more accurate | ||||
|           the Duration ln always has kb/s | ||||
|           the Metadata: after Chapter may contain BPM info, | ||||
|             title : Tempo: 126.0 | ||||
|  | ||||
|         Input #0, wav, | ||||
|           Metadata: | ||||
|             date : <OK> | ||||
|           Duration: | ||||
|             Chapter # | ||||
|             Metadata: | ||||
|               title : <NG> | ||||
|  | ||||
|         Input #0, mp3, | ||||
|           Metadata: | ||||
|             album : <OK> | ||||
|           Duration: | ||||
|             Stream #0:0: Audio: | ||||
|             Stream #0:1: Video: | ||||
|             Metadata: | ||||
|               comment : <NG> | ||||
|         """ | ||||
|  | ||||
|         ptn_md_beg = re.compile("^( +)Metadata:$") | ||||
|         ptn_md_kv = re.compile("^( +)([^:]+) *: (.*)") | ||||
|         ptn_dur = re.compile("^ *Duration: ([^ ]+)(, |$)") | ||||
|         ptn_br1 = re.compile("^ *Duration: .*, bitrate: ([0-9]+) kb/s(, |$)") | ||||
|         ptn_br2 = re.compile("^ *Stream.*: Audio:.* ([0-9]+) kb/s(, |$)") | ||||
|         ptn_audio = re.compile("^ *Stream .*: Audio: ") | ||||
|         ptn_au_parent = re.compile("^ *(Input #|Stream .*: Audio: )") | ||||
|  | ||||
|         ret = {} | ||||
|         md = {} | ||||
|         in_md = False | ||||
|         is_audio = False | ||||
|         au_parent = False | ||||
|         for ln in txt: | ||||
|             m = ptn_md_kv.match(ln) | ||||
|             if m and in_md and len(m.group(1)) == in_md: | ||||
|                 _, k, v = [x.strip() for x in m.groups()] | ||||
|                 if k != "" and v != "": | ||||
|                     md[k] = [v] | ||||
|                 continue | ||||
|             else: | ||||
|                 in_md = False | ||||
|  | ||||
|             m = ptn_md_beg.match(ln) | ||||
|             if m and au_parent: | ||||
|                 in_md = len(m.group(1)) + 2 | ||||
|                 continue | ||||
|  | ||||
|             au_parent = bool(ptn_au_parent.search(ln)) | ||||
|  | ||||
|             if ptn_audio.search(ln): | ||||
|                 is_audio = True | ||||
|  | ||||
|             m = ptn_dur.search(ln) | ||||
|             if m: | ||||
|                 sec = 0 | ||||
|                 tstr = m.group(1) | ||||
|                 if tstr.lower() != "n/a": | ||||
|                     try: | ||||
|                         tf = tstr.split(",")[0].split(".")[0].split(":") | ||||
|                         for f in tf: | ||||
|                             sec *= 60 | ||||
|                             sec += int(f) | ||||
|                     except: | ||||
|                         self.log( | ||||
|                             "\033[33minvalid timestr from ffmpeg: [{}]".format(tstr) | ||||
|                         ) | ||||
|  | ||||
|                 ret[".dur"] = sec | ||||
|                 m = ptn_br1.search(ln) | ||||
|                 if m: | ||||
|                     ret[".q"] = m.group(1) | ||||
|  | ||||
|             m = ptn_br2.search(ln) | ||||
|             if m: | ||||
|                 ret[".q"] = m.group(1) | ||||
|  | ||||
|         if not is_audio: | ||||
|             return {} | ||||
|  | ||||
|         ret = {k: [0, v] for k, v in ret.items()} | ||||
|  | ||||
|         return self.normalize_tags(ret, md) | ||||
| @@ -39,14 +39,6 @@ class SvcHub(object): | ||||
|         self.tcpsrv = TcpSrv(self) | ||||
|         self.up2k = Up2k(self) | ||||
|  | ||||
|         if self.args.e2ds: | ||||
|             auth = AuthSrv(self.args, self.log, False) | ||||
|             vols = auth.vfs.all_vols.values() | ||||
|             if not self.args.e2dsa: | ||||
|                 vols = [x for x in vols if x.uwrite] | ||||
|  | ||||
|             self.up2k.build_indexes(vols) | ||||
|  | ||||
|         # decide which worker impl to use | ||||
|         if self.check_mp_enable(): | ||||
|             from .broker_mp import BrokerMp as Broker | ||||
| @@ -95,7 +87,7 @@ class SvcHub(object): | ||||
|  | ||||
|             fmt = "\033[36m{} \033[33m{:21} \033[0m{}\n" | ||||
|             if not VT100: | ||||
|                 fmt = "{} {:21} {}" | ||||
|                 fmt = "{} {:21} {}\n" | ||||
|                 if "\033" in msg: | ||||
|                     msg = self.ansi_re.sub("", msg) | ||||
|                 if "\033" in src: | ||||
|   | ||||
| @@ -24,7 +24,7 @@ class U2idx(object): | ||||
|             self.log("could not load sqlite3; searchign wqill be disabled") | ||||
|             return | ||||
|  | ||||
|         self.dbs = {} | ||||
|         self.cur = {} | ||||
|  | ||||
|     def log(self, msg): | ||||
|         self.log_func("u2idx", msg) | ||||
| @@ -37,7 +37,19 @@ class U2idx(object): | ||||
|         fsize = body["size"] | ||||
|         fhash = body["hash"] | ||||
|         wark = up2k_wark_from_hashlist(self.args.salt, fsize, fhash) | ||||
|         return self.run_query(vols, "select * from up where w = ?", [wark]) | ||||
|         return self.run_query(vols, "w = ?", [wark], "", [])[0] | ||||
|  | ||||
|     def get_cur(self, ptop): | ||||
|         cur = self.cur.get(ptop) | ||||
|         if cur: | ||||
|             return cur | ||||
|  | ||||
|         cur = _open(ptop) | ||||
|         if not cur: | ||||
|             return None | ||||
|  | ||||
|         self.cur[ptop] = cur | ||||
|         return cur | ||||
|  | ||||
|     def search(self, vols, body): | ||||
|         """search by query params""" | ||||
| @@ -45,59 +57,80 @@ class U2idx(object): | ||||
|             return [] | ||||
|  | ||||
|         qobj = {} | ||||
|         _conv_sz(qobj, body, "sz_min", "sz >= ?") | ||||
|         _conv_sz(qobj, body, "sz_max", "sz <= ?") | ||||
|         _conv_dt(qobj, body, "dt_min", "mt >= ?") | ||||
|         _conv_dt(qobj, body, "dt_max", "mt <= ?") | ||||
|         for seg, dk in [["path", "rd"], ["name", "fn"]]: | ||||
|         _conv_sz(qobj, body, "sz_min", "up.sz >= ?") | ||||
|         _conv_sz(qobj, body, "sz_max", "up.sz <= ?") | ||||
|         _conv_dt(qobj, body, "dt_min", "up.mt >= ?") | ||||
|         _conv_dt(qobj, body, "dt_max", "up.mt <= ?") | ||||
|         for seg, dk in [["path", "up.rd"], ["name", "up.fn"]]: | ||||
|             if seg in body: | ||||
|                 _conv_txt(qobj, body, seg, dk) | ||||
|  | ||||
|         qstr = "select * from up" | ||||
|         qv = [] | ||||
|         if qobj: | ||||
|             qk = [] | ||||
|             for k, v in sorted(qobj.items()): | ||||
|                 qk.append(k.split("\n")[0]) | ||||
|                 qv.append(v) | ||||
|         uq, uv = _sqlize(qobj) | ||||
|  | ||||
|             qstr = " and ".join(qk) | ||||
|             qstr = "select * from up where " + qstr | ||||
|         tq = "" | ||||
|         tv = [] | ||||
|         qobj = {} | ||||
|         if "tags" in body: | ||||
|             _conv_txt(qobj, body, "tags", "mt.v") | ||||
|             tq, tv = _sqlize(qobj) | ||||
|  | ||||
|         return self.run_query(vols, qstr, qv) | ||||
|         return self.run_query(vols, uq, uv, tq, tv) | ||||
|  | ||||
|     def run_query(self, vols, qstr, qv): | ||||
|         qv = tuple(qv) | ||||
|         self.log("qs: {} {}".format(qstr, repr(qv))) | ||||
|     def run_query(self, vols, uq, uv, tq, tv): | ||||
|         self.log("qs: {} {} ,  {} {}".format(uq, repr(uv), tq, repr(tv))) | ||||
|  | ||||
|         ret = [] | ||||
|         lim = 100 | ||||
|         lim = 1000 | ||||
|         taglist = {} | ||||
|         for (vtop, ptop, flags) in vols: | ||||
|             db = self.dbs.get(ptop) | ||||
|             if not db: | ||||
|                 db = _open(ptop) | ||||
|                 if not db: | ||||
|                     continue | ||||
|             cur = self.get_cur(ptop) | ||||
|             if not cur: | ||||
|                 continue | ||||
|  | ||||
|                 self.dbs[ptop] = db | ||||
|                 # self.log("idx /{} @ {} {}".format(vtop, ptop, flags)) | ||||
|             if not tq: | ||||
|                 if not uq: | ||||
|                     q = "select * from up" | ||||
|                     v = () | ||||
|                 else: | ||||
|                     q = "select * from up where " + uq | ||||
|                     v = tuple(uv) | ||||
|             else: | ||||
|                 # naive assumption: tags first | ||||
|                 q = "select up.* from up inner join mt on substr(up.w,1,16) = mt.w where {}" | ||||
|                 q = q.format(" and ".join([tq, uq]) if uq else tq) | ||||
|                 v = tuple(tv + uv) | ||||
|  | ||||
|             c = db.execute(qstr, qv) | ||||
|             for _, ts, sz, rd, fn in c: | ||||
|             sret = [] | ||||
|             c = cur.execute(q, v) | ||||
|             for hit in c: | ||||
|                 w, ts, sz, rd, fn = hit | ||||
|                 lim -= 1 | ||||
|                 if lim <= 0: | ||||
|                     break | ||||
|  | ||||
|                 rp = os.path.join(vtop, rd, fn).replace("\\", "/") | ||||
|                 ret.append({"ts": int(ts), "sz": sz, "rp": rp}) | ||||
|                 sret.append({"ts": int(ts), "sz": sz, "rp": rp, "w": w[:16]}) | ||||
|  | ||||
|         return ret | ||||
|             for hit in sret: | ||||
|                 w = hit["w"] | ||||
|                 del hit["w"] | ||||
|                 tags = {} | ||||
|                 q = "select k, v from mt where w = ? and k != 'x'" | ||||
|                 for k, v in cur.execute(q, (w,)): | ||||
|                     taglist[k] = True | ||||
|                     tags[k] = v | ||||
|  | ||||
|                 hit["tags"] = tags | ||||
|  | ||||
|             ret.extend(sret) | ||||
|  | ||||
|         return ret, list(taglist.keys()) | ||||
|  | ||||
|  | ||||
| def _open(ptop): | ||||
|     db_path = os.path.join(ptop, ".hist", "up2k.db") | ||||
|     if os.path.exists(db_path): | ||||
|         return sqlite3.connect(db_path) | ||||
|         return sqlite3.connect(db_path).cursor() | ||||
|  | ||||
|  | ||||
| def _conv_sz(q, body, k, sql): | ||||
| @@ -146,3 +179,13 @@ def _conv_txt(q, body, k, sql): | ||||
|  | ||||
|         qk = "{} {} like {}?{}".format(sql, inv, head, tail) | ||||
|         q[qk + "\n" + v] = u8safe(v) | ||||
|  | ||||
|  | ||||
| def _sqlize(qobj): | ||||
|     keys = [] | ||||
|     values = [] | ||||
|     for k, v in sorted(qobj.items()): | ||||
|         keys.append(k.split("\n")[0]) | ||||
|         values.append(v) | ||||
|  | ||||
|     return " and ".join(keys), values | ||||
|   | ||||
| @@ -3,6 +3,7 @@ from __future__ import print_function, unicode_literals | ||||
|  | ||||
| import re | ||||
| import os | ||||
| import sys | ||||
| import time | ||||
| import math | ||||
| import json | ||||
| @@ -12,6 +13,7 @@ import shutil | ||||
| import base64 | ||||
| import hashlib | ||||
| import threading | ||||
| import traceback | ||||
| from copy import deepcopy | ||||
|  | ||||
| from .__init__ import WINDOWS | ||||
| @@ -27,6 +29,8 @@ from .util import ( | ||||
|     w8b64enc, | ||||
|     w8b64dec, | ||||
| ) | ||||
| from .mtag import MTag | ||||
| from .authsrv import AuthSrv | ||||
|  | ||||
| try: | ||||
|     HAVE_SQLITE3 = True | ||||
| @@ -55,14 +59,15 @@ class Up2k(object): | ||||
|         # state | ||||
|         self.mutex = threading.Lock() | ||||
|         self.registry = {} | ||||
|         self.db = {} | ||||
|         self.entags = {} | ||||
|         self.flags = {} | ||||
|         self.cur = {} | ||||
|  | ||||
|         self.mem_db = None | ||||
|         self.mem_cur = None | ||||
|         if HAVE_SQLITE3: | ||||
|             # mojibake detector | ||||
|             self.mem_db = sqlite3.connect(":memory:", check_same_thread=False) | ||||
|             self.mem_db.execute(r"create table a (b text)") | ||||
|             self.mem_db.commit() | ||||
|             self.mem_cur = self._orz(":memory:") | ||||
|             self.mem_cur.execute(r"create table a (b text)") | ||||
|  | ||||
|         if WINDOWS: | ||||
|             # usually fails to set lastmod too quickly | ||||
| @@ -71,10 +76,9 @@ class Up2k(object): | ||||
|             thr.daemon = True | ||||
|             thr.start() | ||||
|  | ||||
|         if self.persist: | ||||
|             thr = threading.Thread(target=self._snapshot) | ||||
|             thr.daemon = True | ||||
|             thr.start() | ||||
|         self.mtag = MTag(self.log_func, self.args) | ||||
|         if not self.mtag.usable: | ||||
|             self.mtag = None | ||||
|  | ||||
|         # static | ||||
|         self.r_hash = re.compile("^[0-9a-zA-Z_-]{43}$") | ||||
| @@ -82,14 +86,23 @@ class Up2k(object): | ||||
|         if self.persist and not HAVE_SQLITE3: | ||||
|             self.log("could not initialize sqlite3, will use in-memory registry only") | ||||
|  | ||||
|         # this is kinda jank | ||||
|         auth = AuthSrv(self.args, self.log, False) | ||||
|         self.init_indexes(auth) | ||||
|  | ||||
|         if self.persist: | ||||
|             thr = threading.Thread(target=self._snapshot) | ||||
|             thr.daemon = True | ||||
|             thr.start() | ||||
|  | ||||
|     def log(self, msg): | ||||
|         self.log_func("up2k", msg + "\033[K") | ||||
|  | ||||
|     def w8enc(self, rd, fn): | ||||
|         ret = [] | ||||
|         for k, v in [["d", rd], ["f", fn]]: | ||||
|         for v in [rd, fn]: | ||||
|             try: | ||||
|                 self.mem_db.execute("select * from a where b = ?", (v,)) | ||||
|                 self.mem_cur.execute("select * from a where b = ?", (v,)) | ||||
|                 ret.append(v) | ||||
|             except: | ||||
|                 ret.append("//" + w8b64enc(v)) | ||||
| @@ -120,7 +133,59 @@ class Up2k(object): | ||||
|  | ||||
|         return ret | ||||
|  | ||||
|     def register_vpath(self, ptop): | ||||
|     def init_indexes(self, auth): | ||||
|         self.pp = ProgressPrinter() | ||||
|         vols = auth.vfs.all_vols.values() | ||||
|         t0 = time.time() | ||||
|  | ||||
|         live_vols = [] | ||||
|         for vol in vols: | ||||
|             try: | ||||
|                 os.listdir(vol.realpath) | ||||
|                 live_vols.append(vol) | ||||
|             except: | ||||
|                 self.log("\033[31mcannot access " + vol.realpath) | ||||
|  | ||||
|         vols = live_vols | ||||
|  | ||||
|         # e2ds(a) volumes first, | ||||
|         # also covers tags where e2ts is set | ||||
|         for vol in vols: | ||||
|             en = {} | ||||
|             if "mte" in vol.flags: | ||||
|                 en = {k: True for k in vol.flags["mte"].split(",")} | ||||
|  | ||||
|             self.entags[vol.realpath] = en | ||||
|  | ||||
|             if "e2ds" in vol.flags: | ||||
|                 r = self._build_file_index(vol, vols) | ||||
|                 if not r: | ||||
|                     needed_mutagen = True | ||||
|  | ||||
|         # open the rest + do any e2ts(a) | ||||
|         needed_mutagen = False | ||||
|         for vol in vols: | ||||
|             r = self.register_vpath(vol.realpath, vol.flags) | ||||
|             if not r or "e2ts" not in vol.flags: | ||||
|                 continue | ||||
|  | ||||
|             cur, db_path, sz0 = r | ||||
|             n_add, n_rm, success = self._build_tags_index(vol.realpath) | ||||
|             if not success: | ||||
|                 needed_mutagen = True | ||||
|  | ||||
|             if n_add or n_rm: | ||||
|                 self.vac(cur, db_path, n_add, n_rm, sz0) | ||||
|  | ||||
|         self.pp.end = True | ||||
|         msg = "{} volumes in {:.2f} sec" | ||||
|         self.log(msg.format(len(vols), time.time() - t0)) | ||||
|  | ||||
|         if needed_mutagen: | ||||
|             msg = "\033[31mcould not read tags because no backends are available (mutagen or ffprobe)\033[0m" | ||||
|             self.log(msg) | ||||
|  | ||||
|     def register_vpath(self, ptop, flags): | ||||
|         with self.mutex: | ||||
|             if ptop in self.registry: | ||||
|                 return None | ||||
| @@ -139,8 +204,9 @@ class Up2k(object): | ||||
|                 m = [m] + self._vis_reg_progress(reg) | ||||
|                 self.log("\n".join(m)) | ||||
|  | ||||
|             self.flags[ptop] = flags | ||||
|             self.registry[ptop] = reg | ||||
|             if not self.persist or not HAVE_SQLITE3: | ||||
|             if not self.persist or not HAVE_SQLITE3 or "d2d" in flags: | ||||
|                 return None | ||||
|  | ||||
|             try: | ||||
| @@ -149,54 +215,58 @@ class Up2k(object): | ||||
|                 pass | ||||
|  | ||||
|             db_path = os.path.join(ptop, ".hist", "up2k.db") | ||||
|             if ptop in self.db: | ||||
|                 # self.db[ptop].close() | ||||
|             if ptop in self.cur: | ||||
|                 return None | ||||
|  | ||||
|             try: | ||||
|                 db = self._open_db(db_path) | ||||
|                 self.db[ptop] = db | ||||
|                 return db | ||||
|             except Exception as ex: | ||||
|                 self.log("cannot use database at [{}]: {}".format(ptop, repr(ex))) | ||||
|                 sz0 = 0 | ||||
|                 if os.path.exists(db_path): | ||||
|                     sz0 = os.path.getsize(db_path) // 1024 | ||||
|  | ||||
|                 cur = self._open_db(db_path) | ||||
|                 self.cur[ptop] = cur | ||||
|                 return [cur, db_path, sz0] | ||||
|             except: | ||||
|                 msg = "cannot use database at [{}]:\n{}" | ||||
|                 self.log(msg.format(ptop, traceback.format_exc())) | ||||
|  | ||||
|             return None | ||||
|  | ||||
|     def build_indexes(self, writeables): | ||||
|         tops = [d.realpath for d in writeables] | ||||
|         self.pp = ProgressPrinter() | ||||
|         t0 = time.time() | ||||
|         for top in tops: | ||||
|             db = self.register_vpath(top) | ||||
|             if not db: | ||||
|                 continue | ||||
|     def _build_file_index(self, vol, all_vols): | ||||
|         do_vac = False | ||||
|         top = vol.realpath | ||||
|         reg = self.register_vpath(top, vol.flags) | ||||
|         if not reg: | ||||
|             return | ||||
|  | ||||
|             self.pp.n = next(db.execute("select count(w) from up"))[0] | ||||
|             db_path = os.path.join(top, ".hist", "up2k.db") | ||||
|             sz0 = os.path.getsize(db_path) // 1024 | ||||
|         _, db_path, sz0 = reg | ||||
|         dbw = [reg[0], 0, time.time()] | ||||
|         self.pp.n = next(dbw[0].execute("select count(w) from up"))[0] | ||||
|  | ||||
|             # can be symlink so don't `and d.startswith(top)`` | ||||
|             excl = set([d for d in tops if d != top]) | ||||
|             dbw = [db, 0, time.time()] | ||||
|         # can be symlink so don't `and d.startswith(top)`` | ||||
|         excl = set([d.realpath for d in all_vols if d != vol]) | ||||
|         n_add = self._build_dir(dbw, top, excl, top) | ||||
|         n_rm = self._drop_lost(dbw[0], top) | ||||
|         if dbw[1]: | ||||
|             self.log("commit {} new files".format(dbw[1])) | ||||
|             dbw[0].connection.commit() | ||||
|  | ||||
|             n_add = self._build_dir(dbw, top, excl, top) | ||||
|             n_rm = self._drop_lost(db, top) | ||||
|             if dbw[1]: | ||||
|                 self.log("commit {} new files".format(dbw[1])) | ||||
|         n_add, n_rm, success = self._build_tags_index(vol.realpath) | ||||
|  | ||||
|             db.commit() | ||||
|             if n_add or n_rm: | ||||
|                 db_path = os.path.join(top, ".hist", "up2k.db") | ||||
|                 sz1 = os.path.getsize(db_path) // 1024 | ||||
|                 db.execute("vacuum") | ||||
|                 sz2 = os.path.getsize(db_path) // 1024 | ||||
|                 msg = "{} new, {} del, {} kB vacced, {} kB gain, {} kB now".format( | ||||
|                     n_add, n_rm, sz1 - sz2, sz2 - sz0, sz2 | ||||
|                 ) | ||||
|                 self.log(msg) | ||||
|         dbw[0].connection.commit() | ||||
|         if n_add or n_rm or do_vac: | ||||
|             self.vac(dbw[0], db_path, n_add, n_rm, sz0) | ||||
|  | ||||
|         self.pp.end = True | ||||
|         self.log("{} volumes in {:.2f} sec".format(len(tops), time.time() - t0)) | ||||
|         return success | ||||
|  | ||||
|     def vac(self, cur, db_path, n_add, n_rm, sz0): | ||||
|         sz1 = os.path.getsize(db_path) // 1024 | ||||
|         cur.execute("vacuum") | ||||
|         sz2 = os.path.getsize(db_path) // 1024 | ||||
|         msg = "{} new, {} del, {} kB vacced, {} kB gain, {} kB now".format( | ||||
|             n_add, n_rm, sz1 - sz2, sz2 - sz0, sz2 | ||||
|         ) | ||||
|         self.log(msg) | ||||
|  | ||||
|     def _build_dir(self, dbw, top, excl, cdir): | ||||
|         try: | ||||
| @@ -270,16 +340,16 @@ class Up2k(object): | ||||
|                 td = time.time() - dbw[2] | ||||
|                 if dbw[1] >= 4096 or td >= 60: | ||||
|                     self.log("commit {} new files".format(dbw[1])) | ||||
|                     dbw[0].commit() | ||||
|                     dbw[0].connection.commit() | ||||
|                     dbw[1] = 0 | ||||
|                     dbw[2] = time.time() | ||||
|         return ret | ||||
|  | ||||
|     def _drop_lost(self, db, top): | ||||
|     def _drop_lost(self, cur, top): | ||||
|         rm = [] | ||||
|         nchecked = 0 | ||||
|         nfiles = next(db.execute("select count(w) from up"))[0] | ||||
|         c = db.execute("select * from up") | ||||
|         nfiles = next(cur.execute("select count(w) from up"))[0] | ||||
|         c = cur.execute("select * from up") | ||||
|         for dwark, dts, dsz, drd, dfn in c: | ||||
|             nchecked += 1 | ||||
|             if drd.startswith("//") or dfn.startswith("//"): | ||||
| @@ -298,49 +368,153 @@ class Up2k(object): | ||||
|             self.log("forgetting {} deleted files".format(len(rm))) | ||||
|             for rd, fn in rm: | ||||
|                 # self.log("{} / {}".format(rd, fn)) | ||||
|                 self.db_rm(db, rd, fn) | ||||
|                 self.db_rm(cur, rd, fn) | ||||
|  | ||||
|         return len(rm) | ||||
|  | ||||
|     def _build_tags_index(self, ptop): | ||||
|         entags = self.entags[ptop] | ||||
|         flags = self.flags[ptop] | ||||
|         cur = self.cur[ptop] | ||||
|         n_add = 0 | ||||
|         n_rm = 0 | ||||
|         n_buf = 0 | ||||
|         last_write = time.time() | ||||
|  | ||||
|         if "e2tsr" in flags: | ||||
|             n_rm = cur.execute("select count(w) from mt").fetchone()[0] | ||||
|             if n_rm: | ||||
|                 self.log("discarding {} media tags for a full rescan".format(n_rm)) | ||||
|                 cur.execute("delete from mt") | ||||
|             else: | ||||
|                 self.log("volume has e2tsr but there are no media tags to discard") | ||||
|  | ||||
|         # integrity: drop tags for tracks that were deleted | ||||
|         if "e2t" in flags: | ||||
|             drops = [] | ||||
|             c2 = cur.connection.cursor() | ||||
|             up_q = "select w from up where substr(w,1,16) = ?" | ||||
|             for (w,) in cur.execute("select w from mt"): | ||||
|                 if not c2.execute(up_q, (w,)).fetchone(): | ||||
|                     drops.append(w[:16]) | ||||
|             c2.close() | ||||
|  | ||||
|             if drops: | ||||
|                 msg = "discarding media tags for {} deleted files" | ||||
|                 self.log(msg.format(len(drops))) | ||||
|                 n_rm += len(drops) | ||||
|                 for w in drops: | ||||
|                     cur.execute("delete from mt where w = ?", (w,)) | ||||
|  | ||||
|         # bail if a volume flag disables indexing | ||||
|         if "d2t" in flags or "d2d" in flags: | ||||
|             return n_add, n_rm, True | ||||
|  | ||||
|         # add tags for new files | ||||
|         if "e2ts" in flags: | ||||
|             if not self.mtag: | ||||
|                 return n_add, n_rm, False | ||||
|  | ||||
|             c2 = cur.connection.cursor() | ||||
|             n_left = cur.execute("select count(w) from up").fetchone()[0] | ||||
|             for w, rd, fn in cur.execute("select w, rd, fn from up"): | ||||
|                 n_left -= 1 | ||||
|                 q = "select w from mt where w = ?" | ||||
|                 if c2.execute(q, (w[:16],)).fetchone(): | ||||
|                     continue | ||||
|  | ||||
|                 abspath = os.path.join(ptop, rd, fn) | ||||
|                 self.pp.msg = "c{} {}".format(n_left, abspath) | ||||
|                 tags = self.mtag.get(abspath) | ||||
|                 tags = {k: v for k, v in tags.items() if k in entags} | ||||
|                 if not tags: | ||||
|                     # indicate scanned without tags | ||||
|                     tags = {"x": 0} | ||||
|  | ||||
|                 for k, v in tags.items(): | ||||
|                     q = "insert into mt values (?,?,?)" | ||||
|                     c2.execute(q, (w[:16], k, v)) | ||||
|                     n_add += 1 | ||||
|                     n_buf += 1 | ||||
|  | ||||
|                 td = time.time() - last_write | ||||
|                 if n_buf >= 4096 or td >= 60: | ||||
|                     self.log("commit {} new tags".format(n_buf)) | ||||
|                     cur.connection.commit() | ||||
|                     last_write = time.time() | ||||
|                     n_buf = 0 | ||||
|  | ||||
|             c2.close() | ||||
|  | ||||
|         return n_add, n_rm, True | ||||
|  | ||||
|     def _orz(self, db_path): | ||||
|         return sqlite3.connect(db_path, check_same_thread=False).cursor() | ||||
|  | ||||
|     def _open_db(self, db_path): | ||||
|         existed = os.path.exists(db_path) | ||||
|         conn = sqlite3.connect(db_path, check_same_thread=False) | ||||
|         try: | ||||
|             ver = self._read_ver(conn) | ||||
|         cur = self._orz(db_path) | ||||
|         ver = self._read_ver(cur) | ||||
|         if not existed and ver is None: | ||||
|             return self._create_db(db_path, cur) | ||||
|  | ||||
|             if ver == 1: | ||||
|                 conn = self._upgrade_v1(conn, db_path) | ||||
|                 ver = self._read_ver(conn) | ||||
|         orig_ver = ver | ||||
|         if not ver or ver < 3: | ||||
|             bak = "{}.bak.{:x}.v{}".format(db_path, int(time.time()), ver) | ||||
|             db = cur.connection | ||||
|             cur.close() | ||||
|             db.close() | ||||
|             msg = "creating new DB (old is bad); backup: {}" | ||||
|             if ver: | ||||
|                 msg = "creating backup before upgrade: {}" | ||||
|  | ||||
|             if ver == 2: | ||||
|                 try: | ||||
|                     nfiles = next(conn.execute("select count(w) from up"))[0] | ||||
|                     self.log("found DB at {} |{}|".format(db_path, nfiles)) | ||||
|                     return conn | ||||
|                 except Exception as ex: | ||||
|                     self.log("WARN: could not list files, DB corrupt?\n  " + repr(ex)) | ||||
|             self.log(msg.format(bak)) | ||||
|             shutil.copy2(db_path, bak) | ||||
|             cur = self._orz(db_path) | ||||
|  | ||||
|             if ver is not None: | ||||
|                 self.log("REPLACING unsupported DB (v.{}) at {}".format(ver, db_path)) | ||||
|             elif not existed: | ||||
|                 raise Exception("whatever") | ||||
|         if ver == 1: | ||||
|             cur = self._upgrade_v1(cur, db_path) | ||||
|             if cur: | ||||
|                 ver = 2 | ||||
|  | ||||
|             conn.close() | ||||
|             os.unlink(db_path) | ||||
|             conn = sqlite3.connect(db_path, check_same_thread=False) | ||||
|         except: | ||||
|             pass | ||||
|         if ver == 2: | ||||
|             cur = self._create_v3(cur) | ||||
|             ver = self._read_ver(cur) if cur else None | ||||
|  | ||||
|         # sqlite is variable-width only, no point in using char/nchar/varchar | ||||
|         self._create_v2(conn) | ||||
|         conn.commit() | ||||
|         if ver == 3: | ||||
|             if orig_ver != ver: | ||||
|                 cur.connection.commit() | ||||
|                 cur.execute("vacuum") | ||||
|                 cur.connection.commit() | ||||
|  | ||||
|             try: | ||||
|                 nfiles = next(cur.execute("select count(w) from up"))[0] | ||||
|                 self.log("OK: {} |{}|".format(db_path, nfiles)) | ||||
|                 return cur | ||||
|             except Exception as ex: | ||||
|                 self.log("WARN: could not list files, DB corrupt?\n  " + repr(ex)) | ||||
|  | ||||
|         if cur: | ||||
|             db = cur.connection | ||||
|             cur.close() | ||||
|             db.close() | ||||
|  | ||||
|         return self._create_db(db_path, None) | ||||
|  | ||||
|     def _create_db(self, db_path, cur): | ||||
|         if not cur: | ||||
|             cur = self._orz(db_path) | ||||
|  | ||||
|         self._create_v2(cur) | ||||
|         self._create_v3(cur) | ||||
|         cur.connection.commit() | ||||
|         self.log("created DB at {}".format(db_path)) | ||||
|         return conn | ||||
|         return cur | ||||
|  | ||||
|     def _read_ver(self, conn): | ||||
|     def _read_ver(self, cur): | ||||
|         for tab in ["ki", "kv"]: | ||||
|             try: | ||||
|                 c = conn.execute(r"select v from {} where k = 'sver'".format(tab)) | ||||
|                 c = cur.execute(r"select v from {} where k = 'sver'".format(tab)) | ||||
|             except: | ||||
|                 continue | ||||
|  | ||||
| @@ -348,26 +522,47 @@ class Up2k(object): | ||||
|             if rows: | ||||
|                 return int(rows[0][0]) | ||||
|  | ||||
|     def _create_v2(self, conn): | ||||
|     def _create_v2(self, cur): | ||||
|         for cmd in [ | ||||
|             r"create table ks (k text, v text)", | ||||
|             r"create table ki (k text, v int)", | ||||
|             r"create table up (w text, mt int, sz int, rd text, fn text)", | ||||
|             r"insert into ki values ('sver', 2)", | ||||
|             r"create index up_w on up(w)", | ||||
|             r"create index up_rd on up(rd)", | ||||
|             r"create index up_fn on up(fn)", | ||||
|         ]: | ||||
|             conn.execute(cmd) | ||||
|             cur.execute(cmd) | ||||
|         return cur | ||||
|  | ||||
|     def _create_v3(self, cur): | ||||
|         """ | ||||
|         collision in 2^(n/2) files where n = bits (6 bits/ch) | ||||
|           10*6/2 = 2^30 =       1'073'741'824, 24.1mb idx | ||||
|           12*6/2 = 2^36 =      68'719'476'736, 24.8mb idx | ||||
|           16*6/2 = 2^48 = 281'474'976'710'656, 26.1mb idx | ||||
|         """ | ||||
|         for c, ks in [["drop table k", "isv"], ["drop index up_", "w"]]: | ||||
|             for k in ks: | ||||
|                 try: | ||||
|                     cur.execute(c + k) | ||||
|                 except: | ||||
|                     pass | ||||
|  | ||||
|         for cmd in [ | ||||
|             r"create index up_w on up(substr(w,1,16))", | ||||
|             r"create table mt (w text, k text, v int)", | ||||
|             r"create index mt_w on mt(w)", | ||||
|             r"create index mt_k on mt(k)", | ||||
|             r"create index mt_v on mt(v)", | ||||
|             r"create table kv (k text, v int)", | ||||
|             r"insert into kv values ('sver', 3)", | ||||
|         ]: | ||||
|             cur.execute(cmd) | ||||
|         return cur | ||||
|  | ||||
|     def _upgrade_v1(self, odb, db_path): | ||||
|         self.log("\033[33mupgrading v1 to v2:\033[0m {}".format(db_path)) | ||||
|  | ||||
|         npath = db_path + ".next" | ||||
|         if os.path.exists(npath): | ||||
|             os.unlink(npath) | ||||
|  | ||||
|         ndb = sqlite3.connect(npath, check_same_thread=False) | ||||
|         ndb = self._orz(npath) | ||||
|         self._create_v2(ndb) | ||||
|  | ||||
|         c = odb.execute("select * from up") | ||||
| @@ -376,27 +571,29 @@ class Up2k(object): | ||||
|             v = (wark, ts, sz, rd, fn) | ||||
|             ndb.execute("insert into up values (?,?,?,?,?)", v) | ||||
|  | ||||
|         ndb.commit() | ||||
|         ndb.close() | ||||
|         odb.close() | ||||
|         bpath = db_path + ".bak.v1" | ||||
|         self.log("success; backup at: " + bpath) | ||||
|         atomic_move(db_path, bpath) | ||||
|         ndb.connection.commit() | ||||
|         ndb.connection.close() | ||||
|         odb.connection.close() | ||||
|         atomic_move(npath, db_path) | ||||
|         return sqlite3.connect(db_path, check_same_thread=False) | ||||
|         return self._orz(db_path) | ||||
|  | ||||
|     def handle_json(self, cj): | ||||
|         self.register_vpath(cj["ptop"]) | ||||
|         if not self.register_vpath(cj["ptop"], cj["vcfg"]): | ||||
|             if cj["ptop"] not in self.registry: | ||||
|                 raise Pebkac(410, "location unavailable") | ||||
|  | ||||
|         cj["name"] = sanitize_fn(cj["name"]) | ||||
|         cj["poke"] = time.time() | ||||
|         wark = self._get_wark(cj) | ||||
|         now = time.time() | ||||
|         job = None | ||||
|         with self.mutex: | ||||
|             db = self.db.get(cj["ptop"], None) | ||||
|             cur = self.cur.get(cj["ptop"], None) | ||||
|             reg = self.registry[cj["ptop"]] | ||||
|             if db: | ||||
|                 cur = db.execute(r"select * from up where w = ?", (wark,)) | ||||
|             if cur: | ||||
|                 q = r"select * from up where substr(w,1,16) = ? and w = ?" | ||||
|                 argv = (wark[:16], wark) | ||||
|                 cur = cur.execute(q, argv) | ||||
|                 for _, dtime, dsize, dp_dir, dp_fn in cur: | ||||
|                     if dp_dir.startswith("//") or dp_fn.startswith("//"): | ||||
|                         dp_dir, dp_fn = self.w8dec(dp_dir, dp_fn) | ||||
| @@ -409,7 +606,6 @@ class Up2k(object): | ||||
|                             "prel": dp_dir, | ||||
|                             "vtop": cj["vtop"], | ||||
|                             "ptop": cj["ptop"], | ||||
|                             "flag": cj["flag"], | ||||
|                             "size": dsize, | ||||
|                             "lmod": dtime, | ||||
|                             "hash": [], | ||||
| @@ -446,7 +642,7 @@ class Up2k(object): | ||||
|                         err = "partial upload exists at a different location; please resume uploading here instead:\n" | ||||
|                         err += "/" + vsrc + " " | ||||
|                         raise Pebkac(400, err) | ||||
|                     elif "nodupe" in job["flag"]: | ||||
|                     elif "nodupe" in self.flags[job["ptop"]]: | ||||
|                         self.log("dupe-reject:\n  {0}\n  {1}".format(src, dst)) | ||||
|                         err = "upload rejected, file already exists:\n/" + vsrc + " " | ||||
|                         raise Pebkac(400, err) | ||||
| @@ -476,7 +672,6 @@ class Up2k(object): | ||||
|                     "vtop", | ||||
|                     "ptop", | ||||
|                     "prel", | ||||
|                     "flag", | ||||
|                     "name", | ||||
|                     "size", | ||||
|                     "lmod", | ||||
| @@ -584,12 +779,13 @@ class Up2k(object): | ||||
|             if WINDOWS: | ||||
|                 self.lastmod_q.put([dst, (int(time.time()), int(job["lmod"]))]) | ||||
|  | ||||
|             db = self.db.get(job["ptop"], None) | ||||
|             if db: | ||||
|             cur = self.cur.get(job["ptop"], None) | ||||
|             if cur: | ||||
|                 j = job | ||||
|                 self.db_rm(db, j["prel"], j["name"]) | ||||
|                 self.db_add(db, j["wark"], j["prel"], j["name"], j["lmod"], j["size"]) | ||||
|                 db.commit() | ||||
|                 self.db_rm(cur, j["prel"], j["name"]) | ||||
|                 self.db_add(cur, j["wark"], j["prel"], j["name"], j["lmod"], j["size"]) | ||||
|                 cur.connection.commit() | ||||
|  | ||||
|                 del self.registry[ptop][wark] | ||||
|                 # in-memory registry is reserved for unfinished uploads | ||||
|  | ||||
| @@ -604,7 +800,7 @@ class Up2k(object): | ||||
|  | ||||
|     def db_add(self, db, wark, rd, fn, ts, sz): | ||||
|         sql = "insert into up values (?,?,?,?,?)" | ||||
|         v = (wark, ts, sz, rd, fn) | ||||
|         v = (wark, int(ts), sz, rd, fn) | ||||
|         try: | ||||
|             db.execute(sql, v) | ||||
|         except: | ||||
| @@ -635,10 +831,9 @@ class Up2k(object): | ||||
|         fsz = os.path.getsize(path) | ||||
|         csz = up2k_chunksize(fsz) | ||||
|         ret = [] | ||||
|         last_print = time.time() | ||||
|         with open(path, "rb", 512 * 1024) as f: | ||||
|             while fsz > 0: | ||||
|                 self.pp.msg = msg = "{} MB".format(int(fsz / 1024 / 1024)) | ||||
|                 self.pp.msg = "{} MB".format(int(fsz / 1024 / 1024)) | ||||
|                 hashobj = hashlib.sha512() | ||||
|                 rem = min(csz, fsz) | ||||
|                 fsz -= rem | ||||
|   | ||||
| @@ -633,6 +633,40 @@ def read_socket_unbounded(sr): | ||||
|         yield buf | ||||
|  | ||||
|  | ||||
| def read_socket_chunked(sr, log=None): | ||||
|     err = "expected chunk length, got [{}] |{}| instead" | ||||
|     while True: | ||||
|         buf = b"" | ||||
|         while b"\r" not in buf: | ||||
|             rbuf = sr.recv(2) | ||||
|             if not rbuf or len(buf) > 16: | ||||
|                 err = err.format(buf.decode("utf-8", "replace"), len(buf)) | ||||
|                 raise Pebkac(400, err) | ||||
|  | ||||
|             buf += rbuf | ||||
|  | ||||
|         if not buf.endswith(b"\n"): | ||||
|             sr.recv(1) | ||||
|  | ||||
|         try: | ||||
|             chunklen = int(buf.rstrip(b"\r\n"), 16) | ||||
|         except: | ||||
|             err = err.format(buf.decode("utf-8", "replace"), len(buf)) | ||||
|             raise Pebkac(400, err) | ||||
|  | ||||
|         if chunklen == 0: | ||||
|             sr.recv(2)  # \r\n after final chunk | ||||
|             return | ||||
|  | ||||
|         if log: | ||||
|             log("receiving {} byte chunk".format(chunklen)) | ||||
|  | ||||
|         for chunk in read_socket(sr, chunklen): | ||||
|             yield chunk | ||||
|  | ||||
|         sr.recv(2)  # \r\n after each chunk too | ||||
|  | ||||
|  | ||||
| def hashcopy(actor, fin, fout): | ||||
|     u32_lim = int((2 ** 31) * 0.9) | ||||
|     hashobj = hashlib.sha512() | ||||
|   | ||||
| @@ -46,7 +46,7 @@ body { | ||||
| 	display: none; | ||||
| } | ||||
| #files { | ||||
| 	border-collapse: collapse; | ||||
| 	border-spacing: 0; | ||||
| 	margin-top: 2em; | ||||
| 	z-index: 1; | ||||
| 	position: relative; | ||||
| @@ -94,6 +94,16 @@ a, | ||||
| 	margin: 0; | ||||
| 	padding: 0 .5em; | ||||
| } | ||||
| #files td { | ||||
| 	border-bottom: 1px solid #111; | ||||
| } | ||||
| #files td+td+td { | ||||
| 	max-width: 30em; | ||||
| 	overflow: hidden; | ||||
| } | ||||
| #files tr+tr td { | ||||
| 	border-top: 1px solid #383838; | ||||
| } | ||||
| #files tbody td:nth-child(3) { | ||||
| 	font-family: monospace; | ||||
| 	font-size: 1.3em; | ||||
| @@ -112,6 +122,9 @@ a, | ||||
| 	padding-bottom: 1.3em; | ||||
| 	border-bottom: .5em solid #444; | ||||
| } | ||||
| #files tbody tr td:last-child { | ||||
| 	white-space: nowrap; | ||||
| } | ||||
| #files thead th[style] { | ||||
| 	width: auto !important; | ||||
| } | ||||
| @@ -160,7 +173,8 @@ a, | ||||
| 	margin: -.2em; | ||||
| } | ||||
| #files tbody a.play.act { | ||||
| 	color: #af0; | ||||
| 	color: #840; | ||||
| 	text-shadow: 0 0 .3em #b80; | ||||
| } | ||||
| #blocked { | ||||
| 	position: fixed; | ||||
| @@ -291,6 +305,20 @@ a, | ||||
| 	width: calc(100% - 10.5em); | ||||
| 	background: rgba(0,0,0,0.2); | ||||
| } | ||||
| @media (min-width: 100em) { | ||||
| 	#barpos, | ||||
| 	#barbuf { | ||||
| 		width: calc(100% - 24em); | ||||
| 		left: 10em; | ||||
| 		top: .7em; | ||||
| 		height: 1.6em; | ||||
| 		bottom: auto; | ||||
| 	} | ||||
| 	#widget { | ||||
| 		bottom: -3.2em; | ||||
| 		height: 3.2em; | ||||
| 	} | ||||
| } | ||||
|  | ||||
|  | ||||
|  | ||||
| @@ -400,14 +428,13 @@ input[type="checkbox"]:checked+label { | ||||
| 	color: #fff; | ||||
| } | ||||
| #files td div a { | ||||
| 	display: table-cell; | ||||
| 	display: inline-block; | ||||
| 	white-space: nowrap; | ||||
| } | ||||
| #files td div a:last-child { | ||||
| 	width: 100%; | ||||
| } | ||||
| #files td div { | ||||
| 	display: table; | ||||
| 	border-collapse: collapse; | ||||
| 	width: 100%; | ||||
| } | ||||
| @@ -480,13 +507,6 @@ input[type="checkbox"]:checked+label { | ||||
| #treeul a:first-child { | ||||
| 	font-family: monospace, monospace; | ||||
| } | ||||
| #treefiles { | ||||
| 	opacity: 1; | ||||
| 	transition: opacity 0.2s ease-in-out; | ||||
| } | ||||
| #tree:hover+#treefiles { | ||||
| 	opacity: .8; | ||||
| } | ||||
| .dumb_loader_thing { | ||||
| 	display: inline-block; | ||||
| 	margin: 1em .3em 1em 1em; | ||||
| @@ -496,3 +516,42 @@ input[type="checkbox"]:checked+label { | ||||
| 	position: absolute; | ||||
| 	z-index: 9; | ||||
| } | ||||
| #files .cfg { | ||||
| 	display: none; | ||||
| 	font-size: 2em; | ||||
| 	white-space: nowrap; | ||||
| } | ||||
| #files th:hover .cfg, | ||||
| #files th.min .cfg { | ||||
| 	display: block; | ||||
| 	width: 1em; | ||||
| 	border-radius: .2em; | ||||
| 	margin: -1.3em auto 0 auto; | ||||
| 	background: #444; | ||||
| } | ||||
| #files th.min .cfg { | ||||
| 	margin: -.6em; | ||||
| } | ||||
| #files>thead>tr>th.min span { | ||||
| 	position: absolute; | ||||
| 	transform: rotate(270deg); | ||||
| 	background: linear-gradient(90deg, #222, #444); | ||||
| 	margin-left: -4.6em; | ||||
| 	padding: .4em; | ||||
| 	top: 5.4em; | ||||
| 	width: 8em; | ||||
| 	text-align: right; | ||||
| 	letter-spacing: .04em; | ||||
| } | ||||
| #files td:nth-child(2n) { | ||||
| 	color: #f5a; | ||||
| } | ||||
| #files td.min a { | ||||
| 	display: none; | ||||
| } | ||||
| #files tr.play td { | ||||
| 	background: #fc4; | ||||
| 	border-color: transparent; | ||||
| 	color: #400; | ||||
| 	text-shadow: none; | ||||
| } | ||||
| @@ -26,7 +26,11 @@ | ||||
|     </div> | ||||
|  | ||||
|     <div id="op_search" class="opview"> | ||||
|         {%- if have_tags_idx %} | ||||
|         <table id="srch_form" class="tags"></table> | ||||
|         {%- else %} | ||||
|         <table id="srch_form"></table> | ||||
|         {%- endif %} | ||||
|         <div id="srch_q"></div> | ||||
|     </div> | ||||
|     {%- include 'upload.html' %} | ||||
| @@ -38,9 +42,7 @@ | ||||
|         {%- endfor %} | ||||
|     </h1> | ||||
|      | ||||
|     {%- if prologue %} | ||||
|     <div id="pro" class="logue">{{ prologue }}</div> | ||||
|     {%- endif %} | ||||
|     <div id="pro" class="logue">{{ logues[0] }}</div> | ||||
|  | ||||
|     <table id="treetab"> | ||||
|         <tr> | ||||
| @@ -56,24 +58,35 @@ | ||||
|         <thead> | ||||
|             <tr> | ||||
|                 <th></th> | ||||
|                 <th>File Name</th> | ||||
|                 <th sort="int">File Size</th> | ||||
|                 <th>T</th> | ||||
|                 <th>Date</th> | ||||
|                 <th><span>File Name</span></th> | ||||
|                 <th sort="int"><span>Size</span></th> | ||||
|                 {%- for k in taglist %} | ||||
|                     {%- if k.startswith('.') %} | ||||
|                         <th sort="int"><span>{{ k[1:] }}</span></th> | ||||
|                     {%- else %} | ||||
|                         <th><span>{{ k[0]|upper }}{{ k[1:] }}</span></th> | ||||
|                     {%- endif %} | ||||
|                 {%- endfor %} | ||||
|                 <th><span>T</span></th> | ||||
|                 <th><span>Date</span></th> | ||||
|             </tr> | ||||
|         </thead> | ||||
|         <tbody> | ||||
|  | ||||
| {%- for f in files %} | ||||
| <tr><td>{{ f.lead }}</td><td><a href="{{ f.href }}">{{ f.name|e }}</a></td><td>{{ f.sz }}</td><td>{{ f.ext }}</td><td>{{ f.dt }}</td></tr> | ||||
|     <tr><td>{{ f.lead }}</td><td><a href="{{ f.href }}">{{ f.name|e }}</a></td><td>{{ f.sz }}</td> | ||||
|     {%- if f.tags is defined %} | ||||
|         {%- for k in taglist %} | ||||
|             <td>{{ f.tags[k] }}</td> | ||||
|         {%- endfor %} | ||||
|     {%- endif %} | ||||
|     <td>{{ f.ext }}</td><td>{{ f.dt }}</td></tr> | ||||
| {%- endfor %} | ||||
|  | ||||
|         </tbody> | ||||
|     </table> | ||||
|      | ||||
|     {%- if epilogue %} | ||||
|     <div id="epi" class="logue">{{ epilogue }}</div> | ||||
|     {%- endif %} | ||||
|     <div id="epi" class="logue">{{ logues[1] }}</div> | ||||
|  | ||||
|     <h2><a href="?h">control-panel</a></h2> | ||||
|  | ||||
| @@ -90,7 +103,10 @@ | ||||
|             <canvas id="barbuf"></canvas> | ||||
|         </div> | ||||
|     </div> | ||||
|      | ||||
|  | ||||
|     <script> | ||||
|         var tag_order_cfg = {{ tag_order }}; | ||||
|     </script> | ||||
|     <script src="/.cpr/util.js{{ ts }}"></script> | ||||
|     <script src="/.cpr/browser.js{{ ts }}"></script> | ||||
|     <script src="/.cpr/up2k.js{{ ts }}"></script> | ||||
|   | ||||
| @@ -6,21 +6,6 @@ function dbg(msg) { | ||||
| 	ebi('path').innerHTML = msg; | ||||
| } | ||||
|  | ||||
| function ev(e) { | ||||
| 	e = e || window.event; | ||||
| 	if (!e) | ||||
| 		return; | ||||
|  | ||||
| 	if (e.preventDefault) | ||||
| 		e.preventDefault() | ||||
|  | ||||
| 	if (e.stopPropagation) | ||||
| 		e.stopPropagation(); | ||||
|  | ||||
| 	e.returnValue = false; | ||||
| 	return e; | ||||
| } | ||||
|  | ||||
| makeSortable(ebi('files')); | ||||
|  | ||||
|  | ||||
| @@ -55,7 +40,7 @@ function init_mp() { | ||||
| 	for (var a = 0, aa = tracks.length; a < aa; a++) | ||||
| 		ebi('trk' + a).onclick = ev_play; | ||||
|  | ||||
| 	ret.vol = localStorage.getItem('vol'); | ||||
| 	ret.vol = sread('vol'); | ||||
| 	if (ret.vol !== null) | ||||
| 		ret.vol = parseFloat(ret.vol); | ||||
| 	else | ||||
| @@ -67,7 +52,7 @@ function init_mp() { | ||||
|  | ||||
| 	ret.setvol = function (vol) { | ||||
| 		ret.vol = Math.max(Math.min(vol, 1), 0); | ||||
| 		localStorage.setItem('vol', vol); | ||||
| 		swrite('vol', vol); | ||||
|  | ||||
| 		if (ret.au) | ||||
| 			ret.au.volume = ret.expvol(); | ||||
| @@ -460,6 +445,11 @@ function play(tid, call_depth) { | ||||
| 	mp.au.volume = mp.expvol(); | ||||
| 	var oid = 'trk' + tid; | ||||
| 	setclass(oid, 'play act'); | ||||
| 	var trs = ebi('files').getElementsByTagName('tbody')[0].getElementsByTagName('tr'); | ||||
| 	for (var a = 0, aa = trs.length; a < aa; a++) { | ||||
| 		trs[a].className = trs[a].className.replace(/ *play */, ""); | ||||
| 	} | ||||
| 	ebi(oid).parentElement.parentElement.className += ' play'; | ||||
|  | ||||
| 	try { | ||||
| 		if (hack_attempt_play) | ||||
| @@ -472,7 +462,7 @@ function play(tid, call_depth) { | ||||
| 		o.setAttribute('id', 'thx_js'); | ||||
| 		if (window.history && history.replaceState) { | ||||
| 			var nurl = (document.location + '').split('#')[0] + '#' + oid; | ||||
| 			history.replaceState(ebi('files').tBodies[0].innerHTML, nurl, nurl); | ||||
| 			history.replaceState(ebi('files').innerHTML, nurl, nurl); | ||||
| 		} | ||||
| 		else { | ||||
| 			document.location.hash = oid; | ||||
| @@ -591,6 +581,12 @@ function autoplay_blocked() { | ||||
| 			["name", "name", "name contains   (negate with -nope)", "46"] | ||||
| 		] | ||||
| 	]; | ||||
|  | ||||
| 	if (document.querySelector('#srch_form.tags')) | ||||
| 		sconf.push(["tags", | ||||
| 			["tags", "tags", "tags contains", "46"] | ||||
| 		]); | ||||
|  | ||||
| 	var html = []; | ||||
| 	var orig_html = null; | ||||
| 	for (var a = 0; a < sconf.length; a++) { | ||||
| @@ -610,7 +606,7 @@ function autoplay_blocked() { | ||||
| 	} | ||||
| 	ebi('srch_form').innerHTML = html.join('\n'); | ||||
|  | ||||
| 	var o = document.querySelectorAll('#op_search input[type="text"]'); | ||||
| 	var o = document.querySelectorAll('#op_search input'); | ||||
| 	for (var a = 0; a < o.length; a++) { | ||||
| 		o[a].oninput = ev_search_input; | ||||
| 	} | ||||
| @@ -619,8 +615,11 @@ function autoplay_blocked() { | ||||
|  | ||||
| 	function ev_search_input() { | ||||
| 		var v = this.value; | ||||
| 		var chk = ebi(this.getAttribute('id').slice(0, -1) + 'c'); | ||||
| 		chk.checked = ((v + '').length > 0); | ||||
| 		var id = this.getAttribute('id'); | ||||
| 		if (id.slice(-1) == 'v') { | ||||
| 			var chk = ebi(id.slice(0, -1) + 'c'); | ||||
| 			chk.checked = ((v + '').length > 0); | ||||
| 		} | ||||
| 		clearTimeout(search_timeout); | ||||
| 		search_timeout = setTimeout(do_search, 100); | ||||
| 	} | ||||
| @@ -653,6 +652,9 @@ function autoplay_blocked() { | ||||
| 			return; | ||||
| 		} | ||||
|  | ||||
| 		var res = JSON.parse(this.responseText), | ||||
| 			tagord = res.tag_order; | ||||
|  | ||||
| 		var ofiles = ebi('files'); | ||||
| 		if (ofiles.getAttribute('ts') > this.ts) | ||||
| 			return; | ||||
| @@ -660,10 +662,11 @@ function autoplay_blocked() { | ||||
| 		ebi('path').style.display = 'none'; | ||||
| 		ebi('tree').style.display = 'none'; | ||||
|  | ||||
| 		var html = ['<tr><td>-</td><td colspan="4"><a href="#" id="unsearch">close search results</a></td></tr>']; | ||||
| 		var res = JSON.parse(this.responseText); | ||||
| 		for (var a = 0; a < res.length; a++) { | ||||
| 			var r = res[a], | ||||
| 		var html = mk_files_header(tagord); | ||||
| 		html.push('<tbody>'); | ||||
| 		html.push('<tr><td>-</td><td colspan="42"><a href="#" id="unsearch">close search results</a></td></tr>'); | ||||
| 		for (var a = 0; a < res.hits.length; a++) { | ||||
| 			var r = res.hits[a], | ||||
| 				ts = parseInt(r.ts), | ||||
| 				sz = esc(r.sz + ''), | ||||
| 				rp = esc(r.rp + ''), | ||||
| @@ -674,15 +677,31 @@ function autoplay_blocked() { | ||||
| 				ext = '%'; | ||||
|  | ||||
| 			links = links.join(''); | ||||
| 			html.push('<tr><td>-</td><td><div>' + links + '</div></td><td>' + sz + | ||||
| 				'</td><td>' + ext + '</td><td>' + unix2iso(ts) + '</td></tr>'); | ||||
| 			var nodes = ['<tr><td>-</td><td><div>' + links + '</div>', sz]; | ||||
| 			for (var b = 0; b < tagord.length; b++) { | ||||
| 				var k = tagord[b], | ||||
| 					v = r.tags[k] || ""; | ||||
|  | ||||
| 				if (k == "dur") { | ||||
| 					var sv = s2ms(v); | ||||
| 					nodes[nodes.length - 1] += '</td><td sortv="' + v + '">' + sv; | ||||
| 					continue; | ||||
| 				} | ||||
|  | ||||
| 				nodes.push(v); | ||||
| 			} | ||||
|  | ||||
| 			nodes = nodes.concat([ext, unix2iso(ts)]); | ||||
| 			html.push(nodes.join('</td><td>')); | ||||
| 			html.push('</td></tr>'); | ||||
| 		} | ||||
|  | ||||
| 		if (!orig_html) | ||||
| 			orig_html = ebi('files').tBodies[0].innerHTML; | ||||
| 			orig_html = ebi('files').innerHTML; | ||||
|  | ||||
| 		ofiles.tBodies[0].innerHTML = html.join('\n'); | ||||
| 		ofiles.innerHTML = html.join('\n'); | ||||
| 		ofiles.setAttribute("ts", this.ts); | ||||
| 		filecols.set_style(); | ||||
| 		reload_browser(); | ||||
|  | ||||
| 		ebi('unsearch').onclick = unsearch; | ||||
| @@ -692,7 +711,7 @@ function autoplay_blocked() { | ||||
| 		ev(e); | ||||
| 		ebi('path').style.display = 'inline-block'; | ||||
| 		ebi('tree').style.display = 'block'; | ||||
| 		ebi('files').tBodies[0].innerHTML = orig_html; | ||||
| 		ebi('files').innerHTML = orig_html; | ||||
| 		orig_html = null; | ||||
| 		reload_browser(); | ||||
| 	} | ||||
| @@ -712,17 +731,11 @@ function autoplay_blocked() { | ||||
|  | ||||
| 		treetab.style.display = 'table'; | ||||
|  | ||||
| 		var pro = ebi('pro'); | ||||
| 		if (pro) | ||||
| 			treefiles.appendChild(pro); | ||||
|  | ||||
| 		treefiles.appendChild(ebi('pro')); | ||||
| 		treefiles.appendChild(ebi('files')); | ||||
| 		treefiles.appendChild(ebi('epi')); | ||||
|  | ||||
| 		var epi = ebi('epi'); | ||||
| 		if (epi) | ||||
| 			treefiles.appendChild(epi); | ||||
|  | ||||
| 		localStorage.setItem('entreed', 'tree'); | ||||
| 		swrite('entreed', 'tree'); | ||||
| 		get_tree("", get_vpath()); | ||||
| 	} | ||||
|  | ||||
| @@ -857,27 +870,42 @@ function autoplay_blocked() { | ||||
| 		ebi('srv_info').innerHTML = '<span>' + res.srvinf + '</span>'; | ||||
| 		var nodes = res.dirs.concat(res.files); | ||||
| 		var top = this.top; | ||||
| 		var html = []; | ||||
| 		var html = mk_files_header(res.taglist); | ||||
| 		html.push('<tbody>'); | ||||
| 		for (var a = 0; a < nodes.length; a++) { | ||||
| 			var r = nodes[a], | ||||
| 				ln = '<tr><td>' + r.lead + '</td><td><a href="' + | ||||
| 					top + r.href + '">' + esc(decodeURIComponent(r.href)) + '</a>'; | ||||
| 				ln = ['<tr><td>' + r.lead + '</td><td><a href="' + | ||||
| 					top + r.href + '">' + esc(decodeURIComponent(r.href)) + '</a>', r.sz]; | ||||
|  | ||||
| 			ln = [ln, r.sz, r.ext, unix2iso(r.ts)].join('</td><td>'); | ||||
| 			for (var b = 0; b < res.taglist.length; b++) { | ||||
| 				var k = res.taglist[b], | ||||
| 					v = (r.tags || {})[k] || ""; | ||||
|  | ||||
| 				if (k[0] == '.') | ||||
| 					k = k.slice(1); | ||||
|  | ||||
| 				if (k == "dur") { | ||||
| 					var sv = s2ms(v); | ||||
| 					ln[ln.length - 1] += '</td><td sortv="' + v + '">' + sv; | ||||
| 					continue; | ||||
| 				} | ||||
| 				ln.push(v); | ||||
| 			} | ||||
| 			ln = ln.concat([r.ext, unix2iso(r.ts)]).join('</td><td>'); | ||||
| 			html.push(ln + '</td></tr>'); | ||||
| 		} | ||||
| 		html.push('</tbody>'); | ||||
| 		html = html.join('\n'); | ||||
| 		ebi('files').tBodies[0].innerHTML = html; | ||||
| 		ebi('files').innerHTML = html; | ||||
|  | ||||
| 		history.pushState(html, this.top, this.top); | ||||
| 		apply_perms(res.perms); | ||||
| 		despin('#files'); | ||||
|  | ||||
| 		var o = ebi('pro'); | ||||
| 		if (o) o.parentNode.removeChild(o); | ||||
|  | ||||
| 		o = ebi('epi'); | ||||
| 		if (o) o.parentNode.removeChild(o); | ||||
| 		ebi('pro').innerHTML = res.logues ? res.logues[0] || "" : ""; | ||||
| 		ebi('epi').innerHTML = res.logues ? res.logues[1] || "" : ""; | ||||
|  | ||||
| 		filecols.set_style(); | ||||
| 		reload_tree(); | ||||
| 		reload_browser(); | ||||
| 	} | ||||
| @@ -915,39 +943,33 @@ function autoplay_blocked() { | ||||
| 		ev(e); | ||||
| 		var treetab = ebi('treetab'); | ||||
|  | ||||
| 		var pro = ebi('pro'); | ||||
| 		if (pro) | ||||
| 			treetab.parentNode.insertBefore(pro, treetab); | ||||
|  | ||||
| 		treetab.parentNode.insertBefore(ebi('pro'), treetab); | ||||
| 		treetab.parentNode.insertBefore(ebi('files'), treetab.nextSibling); | ||||
|  | ||||
| 		var epi = ebi('epi'); | ||||
| 		if (epi) | ||||
| 			treetab.parentNode.insertBefore(epi, ebi('files').nextSibling); | ||||
| 		treetab.parentNode.insertBefore(ebi('epi'), ebi('files').nextSibling); | ||||
|  | ||||
| 		ebi('path').style.display = 'inline-block'; | ||||
| 		treetab.style.display = 'none'; | ||||
|  | ||||
| 		localStorage.setItem('entreed', 'na'); | ||||
| 		swrite('entreed', 'na'); | ||||
| 	} | ||||
|  | ||||
| 	ebi('entree').onclick = entree; | ||||
| 	ebi('detree').onclick = detree; | ||||
| 	if (window.localStorage && localStorage.getItem('entreed') == 'tree') | ||||
| 	if (sread('entreed') == 'tree') | ||||
| 		entree(); | ||||
|  | ||||
| 	window.onpopstate = function (e) { | ||||
| 		console.log(e.url + ' ,, ' + ((e.state + '').slice(0, 64))); | ||||
| 		if (e.state) { | ||||
| 			ebi('files').tBodies[0].innerHTML = e.state; | ||||
| 			ebi('files').innerHTML = e.state; | ||||
| 			reload_tree(); | ||||
| 			reload_browser(); | ||||
| 		} | ||||
| 	}; | ||||
|  | ||||
| 	if (window.history && history.pushState) { | ||||
| 		var u = get_vpath(); | ||||
| 		history.replaceState(ebi('files').tBodies[0].innerHTML, u, u); | ||||
| 		var u = get_vpath() + window.location.hash; | ||||
| 		history.replaceState(ebi('files').innerHTML, u, u); | ||||
| 	} | ||||
| })(); | ||||
|  | ||||
| @@ -998,12 +1020,125 @@ function apply_perms(perms) { | ||||
| 			(have_write || tds[a].getAttribute('data-perm') == 'read') ? | ||||
| 				'table-cell' : 'none'; | ||||
| 	} | ||||
| 	if (!have_write && up2k) | ||||
|  | ||||
| 	if (window['up2k']) | ||||
| 		up2k.set_fsearch(); | ||||
| } | ||||
|  | ||||
|  | ||||
| function mk_files_header(taglist) { | ||||
| 	var html = [ | ||||
| 		'<thead>', | ||||
| 		'<th></th>', | ||||
| 		'<th><span>File Name</span></th>', | ||||
| 		'<th sort="int"><span>Size</span></th>' | ||||
| 	]; | ||||
| 	for (var a = 0; a < taglist.length; a++) { | ||||
| 		var tag = taglist[a]; | ||||
| 		var c1 = tag.slice(0, 1).toUpperCase(); | ||||
| 		tag = c1 + tag.slice(1); | ||||
| 		if (c1 == '.') | ||||
| 			tag = '<th sort="int"><span>' + tag.slice(1); | ||||
| 		else | ||||
| 			tag = '<th><span>' + tag; | ||||
|  | ||||
| 		html.push(tag + '</span></th>'); | ||||
| 	} | ||||
| 	html = html.concat([ | ||||
| 		'<th><span>T</span></th>', | ||||
| 		'<th><span>Date</span></th>', | ||||
| 		'</thead>', | ||||
| 	]); | ||||
| 	return html; | ||||
| } | ||||
|  | ||||
|  | ||||
| var filecols = (function () { | ||||
| 	var hidden = jread('filecols', []); | ||||
|  | ||||
| 	var add_btns = function () { | ||||
| 		var ths = document.querySelectorAll('#files th>span'); | ||||
| 		for (var a = 0, aa = ths.length; a < aa; a++) { | ||||
| 			var th = ths[a].parentElement; | ||||
| 			var is_hidden = has(hidden, ths[a].textContent); | ||||
| 			th.innerHTML = '<div class="cfg"><a href="#">' + | ||||
| 				(is_hidden ? '+' : '-') + '</a></div>' + ths[a].outerHTML; | ||||
|  | ||||
| 			th.getElementsByTagName('a')[0].onclick = ev_row_tgl; | ||||
| 		} | ||||
| 	}; | ||||
|  | ||||
| 	var set_style = function () { | ||||
| 		add_btns(); | ||||
|  | ||||
| 		var ohidden = [], | ||||
| 			ths = document.querySelectorAll('#files th'), | ||||
| 			ncols = ths.length; | ||||
|  | ||||
| 		for (var a = 0; a < ncols; a++) { | ||||
| 			var span = ths[a].getElementsByTagName('span'); | ||||
| 			if (span.length <= 0) | ||||
| 				continue; | ||||
|  | ||||
| 			var name = span[0].textContent, | ||||
| 				cls = ''; | ||||
|  | ||||
| 			if (has(hidden, name)) { | ||||
| 				ohidden.push(a); | ||||
| 				cls = ' min'; | ||||
| 			} | ||||
| 			ths[a].className = ths[a].className.replace(/ *min */, " ") + cls; | ||||
| 		} | ||||
| 		for (var a = 0; a < ncols; a++) { | ||||
| 			var cls = has(ohidden, a) ? 'min' : ''; | ||||
| 			var tds = document.querySelectorAll('#files>tbody>tr>td:nth-child(' + (a + 1) + ')'); | ||||
| 			for (var b = 0, bb = tds.length; b < bb; b++) { | ||||
| 				tds[b].setAttribute('class', cls); | ||||
| 				if (a < 2) | ||||
| 					continue; | ||||
|  | ||||
| 				if (cls) { | ||||
| 					if (!tds[b].hasAttribute('html')) { | ||||
| 						tds[b].setAttribute('html', tds[b].innerHTML); | ||||
| 						tds[b].innerHTML = '...'; | ||||
| 					} | ||||
| 				} | ||||
| 				else if (tds[b].hasAttribute('html')) { | ||||
| 					tds[b].innerHTML = tds[b].getAttribute('html'); | ||||
| 					tds[b].removeAttribute('html'); | ||||
| 				} | ||||
| 			} | ||||
| 		} | ||||
| 	}; | ||||
| 	set_style(); | ||||
|  | ||||
| 	var toggle = function (name) { | ||||
| 		var ofs = hidden.indexOf(name); | ||||
| 		if (ofs !== -1) | ||||
| 			hidden.splice(ofs, 1); | ||||
| 		else | ||||
| 			hidden.push(name); | ||||
|  | ||||
| 		jwrite("filecols", hidden); | ||||
| 		set_style(); | ||||
| 	}; | ||||
|  | ||||
| 	return { | ||||
| 		"add_btns": add_btns, | ||||
| 		"set_style": set_style, | ||||
| 		"toggle": toggle, | ||||
| 	}; | ||||
| })(); | ||||
|  | ||||
|  | ||||
| function ev_row_tgl(e) { | ||||
| 	ev(e); | ||||
| 	filecols.toggle(this.parentElement.parentElement.getElementsByTagName('span')[0].textContent); | ||||
| } | ||||
|  | ||||
|  | ||||
| function reload_browser(not_mp) { | ||||
| 	filecols.set_style(); | ||||
| 	makeSortable(ebi('files')); | ||||
|  | ||||
| 	var parts = get_vpath().split('/'); | ||||
| @@ -1036,5 +1171,8 @@ function reload_browser(not_mp) { | ||||
| 		widget.close(); | ||||
| 		mp = init_mp(); | ||||
| 	} | ||||
|  | ||||
| 	if (window['up2k']) | ||||
| 		up2k.set_fsearch(); | ||||
| } | ||||
| reload_browser(true); | ||||
|   | ||||
| @@ -524,11 +524,9 @@ dom_navtgl.onclick = function () { | ||||
|     dom_navtgl.innerHTML = hidden ? 'show nav' : 'hide nav'; | ||||
|     dom_nav.style.display = hidden ? 'none' : 'block'; | ||||
|  | ||||
|     if (window.localStorage) | ||||
|         localStorage.setItem('hidenav', hidden ? 1 : 0); | ||||
|  | ||||
|     swrite('hidenav', hidden ? 1 : 0); | ||||
|     redraw(); | ||||
| }; | ||||
|  | ||||
| if (window.localStorage && localStorage.getItem('hidenav') == 1) | ||||
| if (sread('hidenav') == 1) | ||||
|     dom_navtgl.onclick(); | ||||
|   | ||||
| @@ -210,7 +210,7 @@ function up2k_init(have_crypto) { | ||||
|     } | ||||
|  | ||||
|     function cfg_get(name) { | ||||
|         var val = localStorage.getItem(name); | ||||
|         var val = sread(name); | ||||
|         if (val === null) | ||||
|             return parseInt(ebi(name).value); | ||||
|  | ||||
| @@ -223,7 +223,7 @@ function up2k_init(have_crypto) { | ||||
|         if (!o) | ||||
|             return defval; | ||||
|  | ||||
|         var val = localStorage.getItem(name); | ||||
|         var val = sread(name); | ||||
|         if (val === null) | ||||
|             val = defval; | ||||
|         else | ||||
| @@ -234,8 +234,7 @@ function up2k_init(have_crypto) { | ||||
|     } | ||||
|  | ||||
|     function bcfg_set(name, val) { | ||||
|         localStorage.setItem( | ||||
|             name, val ? '1' : '0'); | ||||
|         swrite(name, val ? '1' : '0'); | ||||
|  | ||||
|         var o = ebi(name); | ||||
|         if (o) | ||||
| @@ -282,7 +281,7 @@ function up2k_init(have_crypto) { | ||||
|  | ||||
|     var flag = false; | ||||
|     apply_flag_cfg(); | ||||
|     apply_fsearch_cfg(); | ||||
|     set_fsearch(); | ||||
|  | ||||
|     function nav() { | ||||
|         ebi('file' + fdom_ctr).click(); | ||||
| @@ -772,13 +771,13 @@ function up2k_init(have_crypto) { | ||||
|                 if (!response.name) { | ||||
|                     var msg = ''; | ||||
|                     var smsg = ''; | ||||
|                     if (!response || !response.length) { | ||||
|                     if (!response || !response.hits || !response.hits.length) { | ||||
|                         msg = 'not found on server'; | ||||
|                         smsg = '404'; | ||||
|                     } | ||||
|                     else { | ||||
|                         smsg = 'found'; | ||||
|                         var hit = response[0], | ||||
|                         var hit = response.hits[0], | ||||
|                             msg = linksplit(hit.rp).join(''), | ||||
|                             tr = unix2iso(hit.ts), | ||||
|                             tu = unix2iso(t.lmod), | ||||
| @@ -1033,7 +1032,7 @@ function up2k_init(have_crypto) { | ||||
|                 return; | ||||
|  | ||||
|             parallel_uploads = v; | ||||
|             localStorage.setItem('nthread', v); | ||||
|             swrite('nthread', v); | ||||
|             obj.style.background = '#444'; | ||||
|             return; | ||||
|         } | ||||
| @@ -1061,12 +1060,31 @@ function up2k_init(have_crypto) { | ||||
|     } | ||||
|  | ||||
|     function tgl_fsearch() { | ||||
|         fsearch = !fsearch; | ||||
|         bcfg_set('fsearch', fsearch); | ||||
|         apply_fsearch_cfg(); | ||||
|         set_fsearch(!fsearch); | ||||
|     } | ||||
|  | ||||
|     function apply_fsearch_cfg() { | ||||
|     function set_fsearch(new_state) { | ||||
|         var perms = document.body.getAttribute('perms'); | ||||
|         var read_only = false; | ||||
|  | ||||
|         if (!ebi('fsearch')) { | ||||
|             new_state = false; | ||||
|         } | ||||
|         else if (perms && perms.indexOf('write') === -1) { | ||||
|             new_state = true; | ||||
|             read_only = true; | ||||
|         } | ||||
|  | ||||
|         if (new_state !== undefined) { | ||||
|             fsearch = new_state; | ||||
|             bcfg_set('fsearch', fsearch); | ||||
|         } | ||||
|  | ||||
|         try { | ||||
|             document.querySelector('label[for="fsearch"]').style.opacity = read_only ? '0' : '1'; | ||||
|         } | ||||
|         catch (ex) { } | ||||
|  | ||||
|         try { | ||||
|             var fun = fsearch ? 'add' : 'remove'; | ||||
|             ebi('op_up2k').classList[fun]('srch'); | ||||
| @@ -1078,11 +1096,6 @@ function up2k_init(have_crypto) { | ||||
|         catch (ex) { } | ||||
|     } | ||||
|  | ||||
|     function set_fsearch() { | ||||
|         if (!fsearch) | ||||
|             tgl_fsearch(); | ||||
|     } | ||||
|  | ||||
|     function tgl_flag_en() { | ||||
|         flag_en = !flag_en; | ||||
|         bcfg_set('flag_en', flag_en); | ||||
| @@ -1131,12 +1144,8 @@ function up2k_init(have_crypto) { | ||||
|     for (var a = nodes.length - 1; a >= 0; a--) | ||||
|         nodes[a].addEventListener('touchend', nop, false); | ||||
|  | ||||
|     var perms = document.body.getAttribute('perms'); | ||||
|     if (perms && perms.indexOf('write') === -1) | ||||
|         set_fsearch(); | ||||
|  | ||||
|     set_fsearch(); | ||||
|     bumpthread({ "target": 1 }) | ||||
|  | ||||
|     return { "init_deps": init_deps, "set_fsearch": set_fsearch } | ||||
| } | ||||
|  | ||||
|   | ||||
| @@ -43,6 +43,21 @@ function ebi(id) { | ||||
|     return document.getElementById(id); | ||||
| } | ||||
|  | ||||
| function ev(e) { | ||||
|     e = e || window.event; | ||||
|     if (!e) | ||||
|         return; | ||||
|  | ||||
|     if (e.preventDefault) | ||||
|         e.preventDefault() | ||||
|  | ||||
|     if (e.stopPropagation) | ||||
|         e.stopPropagation(); | ||||
|  | ||||
|     e.returnValue = false; | ||||
|     return e; | ||||
| } | ||||
|  | ||||
|  | ||||
| // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/endsWith | ||||
| if (!String.prototype.endsWith) { | ||||
| @@ -76,30 +91,41 @@ function import_js(url, cb) { | ||||
|  | ||||
|  | ||||
| function sortTable(table, col) { | ||||
|     var tb = table.tBodies[0], // use `<tbody>` to ignore `<thead>` and `<tfoot>` rows | ||||
|     var tb = table.tBodies[0], | ||||
|         th = table.tHead.rows[0].cells, | ||||
|         tr = Array.prototype.slice.call(tb.rows, 0), | ||||
|         i, reverse = th[col].className == 'sort1' ? -1 : 1; | ||||
|         i, reverse = th[col].className.indexOf('sort1') !== -1 ? -1 : 1; | ||||
|     for (var a = 0, thl = th.length; a < thl; a++) | ||||
|         th[a].className = ''; | ||||
|     th[col].className = 'sort' + reverse; | ||||
|         th[a].className = th[a].className.replace(/ *sort-?1 */, " "); | ||||
|     th[col].className += ' sort' + reverse; | ||||
|     var stype = th[col].getAttribute('sort'); | ||||
|     tr = tr.sort(function (a, b) { | ||||
|         if (!a.cells[col]) | ||||
|     var vl = []; | ||||
|     for (var a = 0; a < tr.length; a++) { | ||||
|         var cell = tr[a].cells[col]; | ||||
|         if (!cell) { | ||||
|             vl.push([null, a]); | ||||
|             continue; | ||||
|         } | ||||
|         var v = cell.getAttribute('sortv') || cell.textContent.trim(); | ||||
|         if (stype == 'int') { | ||||
|             v = parseInt(v.replace(/[, ]/g, '')) || 0; | ||||
|         } | ||||
|         vl.push([v, a]); | ||||
|     } | ||||
|     vl.sort(function (a, b) { | ||||
|         a = a[0]; | ||||
|         b = b[0]; | ||||
|         if (a === null) | ||||
|             return -1; | ||||
|         if (!b.cells[col]) | ||||
|         if (b === null) | ||||
|             return 1; | ||||
|  | ||||
|         var v1 = a.cells[col].textContent.trim(); | ||||
|         var v2 = b.cells[col].textContent.trim(); | ||||
|         if (stype == 'int') { | ||||
|             v1 = parseInt(v1.replace(/,/g, '')); | ||||
|             v2 = parseInt(v2.replace(/,/g, '')); | ||||
|             return reverse * (v1 - v2); | ||||
|             return reverse * (a - b); | ||||
|         } | ||||
|         return reverse * (v1.localeCompare(v2)); | ||||
|         return reverse * (a.localeCompare(b)); | ||||
|     }); | ||||
|     for (i = 0; i < tr.length; ++i) tb.appendChild(tr[i]); | ||||
|     for (i = 0; i < tr.length; ++i) tb.appendChild(tr[vl[i][1]]); | ||||
| } | ||||
| function makeSortable(table) { | ||||
|     var th = table.tHead, i; | ||||
| @@ -107,7 +133,8 @@ function makeSortable(table) { | ||||
|     if (th) i = th.length; | ||||
|     else return; // if no `<thead>` then do nothing | ||||
|     while (--i >= 0) (function (i) { | ||||
|         th[i].onclick = function () { | ||||
|         th[i].onclick = function (e) { | ||||
|             ev(e); | ||||
|             sortTable(table, i); | ||||
|         }; | ||||
|     }(i)); | ||||
| @@ -123,16 +150,13 @@ function makeSortable(table) { | ||||
| })(); | ||||
|  | ||||
|  | ||||
| function opclick(ev) { | ||||
|     if (ev) //ie | ||||
|         ev.preventDefault(); | ||||
| function opclick(e) { | ||||
|     ev(e); | ||||
|  | ||||
|     var dest = this.getAttribute('data-dest'); | ||||
|     goto(dest); | ||||
|  | ||||
|     // writing a blank value makes ie8 segfault w | ||||
|     if (window.localStorage) | ||||
|         localStorage.setItem('opmode', dest || '.'); | ||||
|     swrite('opmode', dest || undefined); | ||||
|  | ||||
|     var input = document.querySelector('.opview.act input:not([type="hidden"])') | ||||
|     if (input) | ||||
| @@ -167,11 +191,9 @@ function goto(dest) { | ||||
|  | ||||
| (function () { | ||||
|     goto(); | ||||
|     if (window.localStorage) { | ||||
|         var op = localStorage.getItem('opmode'); | ||||
|         if (op !== null && op !== '.') | ||||
|             goto(op); | ||||
|     } | ||||
|     var op = sread('opmode'); | ||||
|     if (op !== null && op !== '.') | ||||
|         goto(op); | ||||
| })(); | ||||
|  | ||||
|  | ||||
| @@ -225,6 +247,12 @@ function unix2iso(ts) { | ||||
| } | ||||
|  | ||||
|  | ||||
| function s2ms(s) { | ||||
|     var m = Math.floor(s / 60); | ||||
|     return m + ":" + ("0" + (s - m * 60)).slice(-2); | ||||
| } | ||||
|  | ||||
|  | ||||
| function has(haystack, needle) { | ||||
|     for (var a = 0; a < haystack.length; a++) | ||||
|         if (haystack[a] == needle) | ||||
| @@ -232,3 +260,35 @@ function has(haystack, needle) { | ||||
|  | ||||
|     return false; | ||||
| } | ||||
|  | ||||
|  | ||||
| function sread(key) { | ||||
|     if (window.localStorage) | ||||
|         return localStorage.getItem(key); | ||||
|  | ||||
|     return ''; | ||||
| } | ||||
|  | ||||
| function swrite(key, val) { | ||||
|     if (window.localStorage) { | ||||
|         if (val === undefined) | ||||
|             localStorage.removeItem(key); | ||||
|         else | ||||
|             localStorage.setItem(key, val); | ||||
|     } | ||||
| } | ||||
|  | ||||
| function jread(key, fb) { | ||||
|     var str = sread(key); | ||||
|     if (!str) | ||||
|         return fb; | ||||
|  | ||||
|     return JSON.parse(str); | ||||
| } | ||||
|  | ||||
| function jwrite(key, val) { | ||||
|     if (!val) | ||||
|         swrite(key); | ||||
|     else | ||||
|         swrite(key, JSON.stringify(val)); | ||||
| } | ||||
|   | ||||
		Reference in New Issue
	
	Block a user