Compare commits

...

9 Commits

Author SHA1 Message Date
ed
d0bb1ad141 v1.4.3 2022-09-26 22:37:54 +02:00
ed
b299aaed93 fix some cases of deleted files not being forgotten 2022-09-26 22:19:46 +02:00
ed
abb3224cc5 option to save a copy of corrupted uploads 2022-09-26 22:01:49 +02:00
ed
1c66d06702 cleanup versionchecks 2022-09-25 21:31:47 +02:00
ed
e00e80ae39 v1.4.2 2022-09-25 14:36:10 +02:00
ed
4f4f106c48 add ability to forget uploads by deleting the files 2022-09-25 14:24:01 +02:00
ed
a286cc9d55 fix printing big unicode messages 2022-09-25 14:04:35 +02:00
ed
53bb1c719b fix huge-filename trunc on ubuntu-20.04 zfs 2022-09-25 14:00:11 +02:00
ed
98d5aa17e2 notes on dealing with bitflips 2022-09-24 22:41:00 +02:00
14 changed files with 248 additions and 53 deletions

View File

@@ -1036,7 +1036,9 @@ quick outline of the up2k protocol, see [uploading](#uploading) for the web-clie
* server writes chunks into place based on the hash
* client does another handshake with the hashlist; server replies with OK or a list of chunks to reupload
up2k has saved a few uploads from becoming corrupted in-transfer already; caught an android phone on wifi redhanded in wireshark with a bitflip, however bup with https would *probably* have noticed as well (thanks to tls also functioning as an integrity check)
up2k has saved a few uploads from becoming corrupted in-transfer already;
* caught an android phone on wifi redhanded in wireshark with a bitflip, however bup with https would *probably* have noticed as well (thanks to tls also functioning as an integrity check)
* also stopped someone from uploading because their ram was bad
regarding the frequent server log message during uploads;
`6.0M 106M/s 2.77G 102.9M/s n948 thank 4/0/3/1 10042/7198 00:01:09`
@@ -1072,6 +1074,7 @@ below are some tweaks roughly ordered by usefulness:
* `--http-only` or `--https-only` (unless you want to support both protocols) will reduce the delay before a new connection is established
* `--hist` pointing to a fast location (ssd) will make directory listings and searches faster when `-e2d` or `-e2t` is set
* `--no-hash .` when indexing a network-disk if you don't care about the actual filehashes and only want the names/tags searchable
* `--no-htp --hash-mt=0 --th-mt=1` minimizes the number of threads; can help in some eccentric environments (like the vscode debugger)
* `-j` enables multiprocessing (actual multithreading) and can make copyparty perform better in cpu-intensive workloads, for example:
* huge amount of short-lived connections
* really heavy traffic (downloads/uploads)

View File

@@ -48,7 +48,7 @@ except ImportError:
# from copyparty/__init__.py
PY2 = sys.version_info[0] == 2
PY2 = sys.version_info < (3,)
if PY2:
from Queue import Queue
from urllib import unquote

View File

@@ -11,7 +11,7 @@ try:
except:
TYPE_CHECKING = False
PY2 = sys.version_info[0] == 2
PY2 = sys.version_info < (3,)
if PY2:
sys.dont_write_bytecode = True
unicode = unicode # noqa: F821 # pylint: disable=undefined-variable,self-assigning-variable

View File

@@ -750,6 +750,9 @@ def run_argparse(argv: list[str], formatter: Any, retry: bool) -> argparse.Names
ap2.add_argument("--stackmon", metavar="P,S", type=u, help="write stacktrace to Path every S second, for example --stackmon=./st/%%Y-%%m/%%d/%%H%%M.xz,60")
ap2.add_argument("--log-thrs", metavar="SEC", type=float, help="list active threads every SEC")
ap2.add_argument("--log-fk", metavar="REGEX", type=u, default="", help="log filekey params for files where path matches REGEX; '.' (a single dot) = all files")
ap2.add_argument("--bak-flips", action="store_true", help="[up2k] if a client uploads a bitflipped/corrupted chunk, store a copy according to --bf-nc and --bf-dir")
ap2.add_argument("--bf-nc", metavar="NUM", type=int, default=200, help="bak-flips: stop if there's more than NUM files at --kf-dir already; default: 6.3 GiB max (200*32M)")
ap2.add_argument("--bf-dir", metavar="PATH", type=u, default="bf", help="bak-flips: store corrupted chunks at PATH; default: folder named 'bf' wherever copyparty was started")
# fmt: on
ap2 = ap.add_argument_group("help sections")

View File

@@ -1,8 +1,8 @@
# coding: utf-8
VERSION = (1, 4, 1)
VERSION = (1, 4, 3)
CODENAME = "mostly reliable"
BUILD_DT = (2022, 9, 24)
BUILD_DT = (2022, 9, 26)
S_VERSION = ".".join(map(str, VERSION))
S_BUILD_DT = "{0:04d}-{1:02d}-{2:02d}".format(*BUILD_DT)

View File

@@ -82,6 +82,7 @@ from .util import (
try:
from typing import Any, Generator, Match, Optional, Pattern, Type, Union
import typing
except:
pass
@@ -710,7 +711,13 @@ class HttpCli(object):
self.log("urlform: {} bytes, {}".format(post_sz, path))
elif "print" in opt:
reader, _ = self.get_body_reader()
for buf in reader:
buf = b""
for rbuf in reader:
buf += rbuf
if not rbuf or len(buf) >= 32768:
break
if buf:
orig = buf.decode("utf-8", "replace")
t = "urlform_raw {} @ {}\n {}\n"
self.log(t.format(len(orig), self.vpath, orig))
@@ -914,6 +921,38 @@ class HttpCli(object):
self.reply(t.encode("utf-8"))
return True
def bakflip(self, f: typing.BinaryIO, ofs: int, sz: int, sha: str) -> None:
if not self.args.bak_flips or self.args.nw:
return
sdir = self.args.bf_dir
fp = os.path.join(sdir, sha)
if bos.path.exists(fp):
return self.log("no bakflip; have it", 6)
if not bos.path.isdir(sdir):
bos.makedirs(sdir)
if len(bos.listdir(sdir)) >= self.args.bf_nc:
return self.log("no bakflip; too many", 3)
nrem = sz
f.seek(ofs)
with open(fp, "wb") as fo:
while nrem:
buf = f.read(min(nrem, 512 * 1024))
if not buf:
break
nrem -= len(buf)
fo.write(buf)
if nrem:
self.log("bakflip truncated; {} remains".format(nrem), 1)
atomic_move(fp, fp + ".trunc")
else:
self.log("bakflip ok", 2)
def rand_name(self, fdir: str, fn: str, rnd: int) -> str:
ok = False
try:
@@ -1171,6 +1210,11 @@ class HttpCli(object):
post_sz, _, sha_b64 = hashcopy(reader, f, self.args.s_wr_slp)
if sha_b64 != chash:
try:
self.bakflip(f, cstart[0], post_sz, sha_b64)
except:
self.log("bakflip failed: " + min_ex())
t = "your chunk got corrupted somehow (received {} bytes); expected vs received hash:\n{}\n{}"
raise Pebkac(400, t.format(post_sz, chash, sha_b64))

View File

@@ -16,7 +16,7 @@ import codecs
import platform
import sys
PY3 = sys.version_info[0] > 2
PY3 = sys.version_info > (3,)
WINDOWS = platform.system() == "Windows"
FS_ERRORS = "surrogateescape"
@@ -26,20 +26,6 @@ except:
pass
def u(text: Any) -> str:
if PY3:
return text
else:
return text.decode("unicode_escape")
def b(data: Any) -> bytes:
if PY3:
return data.encode("latin1")
else:
return data
if PY3:
_unichr = chr
bytes_chr = lambda code: bytes((code,))
@@ -171,9 +157,6 @@ def decodefilename(fn: bytes) -> str:
FS_ENCODING = sys.getfilesystemencoding()
# FS_ENCODING = "ascii"; fn = b("[abc\xff]"); encoded = u("[abc\udcff]")
# FS_ENCODING = 'cp932'; fn = b('[abc\x81\x00]'); encoded = u('[abc\udc81\x00]')
# FS_ENCODING = 'UTF-8'; fn = b('[abc\xff]'); encoded = u('[abc\udcff]')
if WINDOWS and not PY3:

View File

@@ -24,7 +24,7 @@ try:
except:
pass
from .__init__ import ANYWIN, MACOS, PY2, VT100, WINDOWS, EnvParams, unicode
from .__init__ import ANYWIN, MACOS, VT100, EnvParams, unicode
from .authsrv import AuthSrv
from .mtag import HAVE_FFMPEG, HAVE_FFPROBE
from .tcpsrv import TcpSrv
@@ -480,17 +480,10 @@ class SvcHub(object):
print(*a, **ka)
def check_mp_support(self) -> str:
vmin = sys.version_info[1]
if WINDOWS:
msg = "need python 3.3 or newer for multiprocessing;"
if PY2 or vmin < 3:
return msg
elif MACOS:
if MACOS:
return "multiprocessing is wonky on mac osx;"
else:
msg = "need python 3.3+ for multiprocessing;"
if PY2 or vmin < 3:
return msg
elif sys.version_info < (3, 3):
return "need python 3.3 or newer for multiprocessing;"
try:
x: mp.Queue[tuple[str, str]] = mp.Queue(1)

View File

@@ -45,6 +45,7 @@ from .util import (
s3dec,
s3enc,
sanitize_fn,
spack,
statdir,
vjoin,
vsplit,
@@ -689,10 +690,8 @@ class Up2k(object):
rei = vol.flags.get("noidx")
reh = vol.flags.get("nohash")
n4g = bool(vol.flags.get("noforget"))
dev = 0
if vol.flags.get("xdev"):
dev = bos.stat(top).st_dev
cst = bos.stat(top)
dev = cst.st_dev if vol.flags.get("xdev") else 0
with self.mutex:
reg = self.register_vpath(top, vol.flags)
@@ -728,6 +727,7 @@ class Up2k(object):
reh,
n4g,
[],
cst,
dev,
bool(vol.flags.get("xvol")),
)
@@ -764,6 +764,7 @@ class Up2k(object):
reh: Optional[Pattern[str]],
n4g: bool,
seen: list[str],
cst: os.stat_result,
dev: int,
xvol: bool,
) -> int:
@@ -818,7 +819,7 @@ class Up2k(object):
# self.log(" dir: {}".format(abspath))
try:
ret += self._build_dir(
db, top, excl, abspath, rap, rei, reh, n4g, seen, dev, xvol
db, top, excl, abspath, rap, rei, reh, n4g, seen, inf, dev, xvol
)
except:
t = "failed to index subdir [{}]:\n{}"
@@ -851,6 +852,7 @@ class Up2k(object):
zh = hashlib.sha1()
_ = [zh.update(str(x).encode("utf-8", "replace")) for x in files]
zh.update(spack(b"<d", cst.st_mtime))
dhash = base64.urlsafe_b64encode(zh.digest()[:12]).decode("ascii")
sql = "select d from dh where d = ? and h = ?"
try:
@@ -941,25 +943,25 @@ class Up2k(object):
return -1
# drop shadowed folders
for rd in unreg:
for sh_rd in unreg:
n = 0
q = "select count(w) from up where (rd = ? or rd like ?||'%') and at == 0"
for erd in [rd, "//" + w8b64enc(rd)]:
for sh_erd in [sh_rd, "//" + w8b64enc(sh_rd)]:
try:
n = db.c.execute(q, (erd, erd + "/")).fetchone()[0]
n = db.c.execute(q, (sh_erd, sh_erd + "/")).fetchone()[0]
break
except:
pass
if n:
t = "forgetting {} shadowed autoindexed files in [{}] > [{}]"
self.log(t.format(n, top, rd))
self.log(t.format(n, top, sh_rd))
q = "delete from dh where (d = ? or d like ?||'%')"
db.c.execute(q, (erd, erd + "/"))
db.c.execute(q, (sh_erd, sh_erd + "/"))
q = "delete from up where (rd = ? or rd like ?||'%') and at == 0"
db.c.execute(q, (erd, erd + "/"))
db.c.execute(q, (sh_erd, sh_erd + "/"))
ret += n
if n4g:
@@ -1924,6 +1926,7 @@ class Up2k(object):
reg = self.registry[cj["ptop"]]
vfs = self.asrv.vfs.all_vols[cj["vtop"]]
n4g = vfs.flags.get("noforget")
lost: list[tuple[str, str]] = []
if cur:
if self.no_expr_idx:
q = r"select * from up where w = ?"
@@ -1948,6 +1951,7 @@ class Up2k(object):
if n4g:
st = os.stat_result((0, -1, -1, 0, 0, 0, 0, 0, 0, 0))
else:
lost.append((dp_dir, dp_fn))
continue
j = {
@@ -1980,6 +1984,12 @@ class Up2k(object):
# self.log("pop " + wark + " " + job["name"] + " handle_json db", 4)
del reg[wark]
if lost:
for dp_dir, dp_fn in lost:
self.db_rm(cur, dp_dir, dp_fn)
cur.connection.commit()
if job or wark in reg:
job = job or reg[wark]
if job["prel"] == cj["prel"] and job["name"] == cj["name"]:
@@ -2975,8 +2985,18 @@ class Up2k(object):
for x in reg.values()
if x["need"] and now - x["poke"] > self.snap_discard_interval
]
if rm:
t = "dropping {} abandoned uploads in {}".format(len(rm), ptop)
lost = [
x
for x in reg.values()
if x["need"]
and not bos.path.exists(os.path.join(x["ptop"], x["prel"], x["name"]))
]
if rm or lost:
t = "dropping {} abandoned, {} deleted uploads in {}"
t = t.format(len(rm), len(lost), ptop)
rm.extend(lost)
vis = [self._vis_job_progress(x) for x in rm]
self.log("\n".join([t] + vis))
for job in rm:
@@ -2986,7 +3006,10 @@ class Up2k(object):
path = os.path.join(job["ptop"], job["prel"], job["name"])
if bos.path.getsize(path) == 0:
bos.unlink(path)
except:
pass
try:
if len(job["hash"]) == len(job["need"]):
# PARTIAL is empty, delete that too
path = os.path.join(job["ptop"], job["prel"], job["tnam"])

View File

@@ -120,7 +120,7 @@ else:
FS_ENCODING = sys.getfilesystemencoding()
SYMTIME = sys.version_info >= (3, 6) and os.utime in os.supports_follow_symlinks
SYMTIME = sys.version_info > (3, 6) and os.utime in os.supports_follow_symlinks
HTTP_TS_FMT = "%a, %d %b %Y %H:%M:%S GMT"
@@ -1022,7 +1022,7 @@ def ren_open(
]
continue
if ex.errno not in [36, 63] and (not WINDOWS or ex.errno != 22):
if ex.errno not in [36, 63, 95] and (not WINDOWS or ex.errno != 22):
raise
if not b64:
@@ -1786,7 +1786,7 @@ def yieldfile(fn: str) -> Generator[bytes, None, None]:
def hashcopy(
fin: Union[typing.BinaryIO, Generator[bytes, None, None]],
fin: Generator[bytes, None, None],
fout: Union[typing.BinaryIO, typing.IO[Any]],
slp: int = 0,
max_sz: int = 0,

View File

@@ -1,3 +1,123 @@
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
# 2022-0925-1236 `v1.4.2` fuhgeddaboudit
* read-only demo server at https://a.ocv.me/pub/demo/
* latest gzip edition of the sfx: [v1.0.14](https://github.com/9001/copyparty/releases/tag/v1.0.14#:~:text=release-specific%20notes)
## new features
* forget incoming uploads by deleting the name-reservation
* (the zerobyte file with the actual filename, not the .PARTIAL)
* can take 5min to kick in
## bugfixes
* zfs on ubuntu 20.04 would reject files with big unicode names such as `148. Профессор Лебединский, Виктор Бондарюк, Дмитрий Нагиев - Я её хой (Я танцую пьяный на столе) (feat. Виктор Бондарюк & Дмитрий Нагиев).mp3`
* usually not a problem since copyparty truncates names to fit filesystem limits, except zfs uses a nonstandard errorcode
* in the "print-message-to-serverlog" feature, a unicode message larger than one tcp-frame could decode incorrectly
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
# 2022-0924-1245 `v1.4.1` fix api compat
* read-only demo server at https://a.ocv.me/pub/demo/
* latest gzip edition of the sfx: [v1.0.14](https://github.com/9001/copyparty/releases/tag/v1.0.14#:~:text=release-specific%20notes)
# bugfixes
* [v1.4.0](https://github.com/9001/copyparty/releases/tag/v1.4.0) accidentally required all clients to use the new up2k.js to continue uploading; support the old js too
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
# 2022-0923-2053 `v1.4.0` mostly reliable
* read-only demo server at https://a.ocv.me/pub/demo/
* latest gzip edition of the sfx: [v1.0.14](https://github.com/9001/copyparty/releases/tag/v1.0.14#:~:text=release-specific%20notes)
## new features
* huge folders are lazily rendered for a massive speedup, #11
* also reduces the number of `?tree` requests; helps a tiny bit on server load
* [selfdestruct timer](https://github.com/9001/copyparty#self-destruct) on uploaded files -- see link for howto and side-effects
* ban clients trying to bruteforce passwords
* arg `--ban-pw`, default `9,60,1440`, bans for 1440min after 9 wrong passwords in 60min
* clients repeatedly trying the same password (due to a bug or whatever) are not counted
* does a `/64` range-ban for IPv6 offenders
* arg `--ban-404`, disabled by default, bans for excessive 404s / directory-scanning
* but that breaks up2k turbo-mode and probably some other eccentric usecases
* waveform seekbar [(screenshot)](https://user-images.githubusercontent.com/241032/192042695-522b3ec7-6845-494a-abdb-d1c0d0e23801.png)
* the up2k upload button can do folders recursively now
* but only a single folder can be selected at a time, making drag-drop the obvious choice still
* gridview is now less jank, #12
* togglebuttons for desktop-notifications and audio-jingle when upload completes
* stop exposing uploader IPs when avoiding filename collisions
* IPs are now HMAC'ed with urandom stored at `~/.config/copyparty/iphash`
* stop crashing chrome; generate PNGs rather than SVGs for filetype icons
* terminate connections with SHUT_WR and flush with siocoutq
* makes buggy enterprise proxies behave less buggy
* do a read-spin on windows for almost the same effect
* improved upload scheduling
* unfortunately removes the `0.0%, NaN:aN, N.aN MB/s` easteregg
* arg `--magic` enables filetype detection on nameless uploads based on libmagic
* mtp modifiers to let tagparsers keep their stdout/stderr instead of capturing
* `c0` disables all capturing, `c1` captures stdout only, `c2` only stderr, and `c3` (default) captures both
* arg `--write-uplog` enables the old default of writing upload reports on POSTs
* kinda pointless and was causing issues in prisonparty
* [upload modifiers](https://github.com/9001/copyparty#write) for terse replies and to randomize filenames
* other optimizations
* 30% faster tag collection on directory listings
* 8x faster rendering of huge tagsets
* new mtps [guestbook](https://github.com/9001/copyparty/blob/hovudstraum/bin/mtag/guestbook.py) and [guestbook-read](https://github.com/9001/copyparty/blob/hovudstraum/bin/mtag/guestbook-read.py), for example for comment-fields on uploads
* arg `--stackmon` now takes dateformat filenames to produce multiple files
* arg `--mtag-vv` to debug tagparser configs
* arg `--version` shows copyparty version and exits
* arg `--license` shows a list of embedded dependencies + their licenses
* arg `--no-forget` and volflag `:c,noforget` keeps deleted files in the up2k db/index
* useful if you're shuffling uploads to s3/gdrive/etc and still want deduplication
## bugfixes
* upload deduplication using symlinks on windows
* increase timeouts to run better on servers with extremely overloaded HDDs
* arg `--mtag-to` (default 60 sec, was 10) can be reduced for faster tag scanning
* incorrect filekeys for files symlinked into another volume
* playback could start mid-song if skipping back and forth between songs
* use affinity mask to determine how many CPU cores are available
* restore .bin-suffix for nameless PUT/POSTs (disappeared in v1.0.11)
* fix glitch in uploader-UI when upload queue is bigger than 1 TiB
* avoid a firefox race-condition accessing the navigation history
* sfx tmpdir keepalive when flipflopping between unix users
* reject anon ftp if anon has no read/write
* improved autocorrect for poor ffmpeg builds
* patch popen on older pythons so collecting tags on windows is always possible
* misc ui/ux fixes
* filesearch layout in read-only folders
* more comfy fadein/fadeout on play/pause
* total-ETA going crazy when an overloaded server drops requests
* stop trying to play into the next folder while in search results
* improve warnings/errors in the uploader ui
* some errors which should have been warnings are now warnings
* autohide warnings/errors when they are remedied
* delay starting the audiocontext until necessary
* reduces cpu-load by 0.2% and fixes chrome claiming the tab is playing audio
# copyparty.exe
now introducing [copyparty.exe](https://github.com/9001/copyparty/releases/download/v1.4.0/copyparty.exe)!   only suitable for the rainiest of days ™
[first thing you'll see](https://user-images.githubusercontent.com/241032/192070274-bfe0bfef-2293-40fc-8852-fcf4f7a90043.png) when you run it is a warning to **«please use the [python-sfx](https://github.com/9001/copyparty/releases/latest/download/copyparty-sfx.py) instead»**,
* `copyparty.exe` was compiled using 32bit python3.7 to support windows7, meaning it won't receive any security patches
* `copyparty-sfx.py` uses your system libraries instead so it'll stay safe for much longer while also having better performance
so the exe might be super useful in a pinch on a secluded LAN but otherwise *Absolutely Not Recommended*
you can download [ffmpeg](https://ocv.me/stuff/bin/ffmpeg.exe) and [ffprobe](https://ocv.me/stuff/bin/ffprobe.exe) into the same folder if you want multimedia-info, audio-transcoding or thumbnails/spectrograms/waveforms -- those binaries were [built](https://github.com/9001/copyparty/tree/hovudstraum/scripts/pyinstaller#ffmpeg) with just enough features to cover what copyparty wants, but much like copyparty.exe itself (so due to security reasons) it is strongly recommended to instead grab a [recent official build](https://github.com/BtbN/FFmpeg-Builds/releases/download/latest/ffmpeg-master-latest-win64-gpl.zip) every once in a while
## and finally some good news
* the chrome memory leak will be [fixed in v107](https://bugs.chromium.org/p/chromium/issues/detail?id=1354816)
* and firefox may fix the crash in [v106 or so](https://bugzilla.mozilla.org/show_bug.cgi?id=1790500)
* and the release title / this season's codename stems from a cpp instance recently being slammed with terabytes of uploads running on a struggling server mostly without breaking a sweat 👍
▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀▀
# 2022-0818-1724 `v1.3.16` gc kiting

View File

@@ -143,6 +143,31 @@ sqlite3 -readonly up2k.db.key-full 'select w, v from mt where k = "key" order by
sqlite3 -readonly up2k.db.key-full 'select w, v from mt where k = "key" order by w' > k1; sqlite3 -readonly up2k.db 'select mt.w, mt.v, up.rd, up.fn from mt inner join up on mt.w = substr(up.w,1,16) where mt.k = "key" order by up.rd, up.fn' > k2; ok=0; ng=0; while IFS='|' read w k2 path; do k1="$(grep -E "^$w" k1 | sed -r 's/.*\|//')"; [ "$k1" = "$k2" ] && ok=$((ok+1)) || { ng=$((ng+1)); printf '%3s %3s %s\n' "$k1" "$k2" "$path"; }; done < <(cat k2); echo "match $ok diff $ng"
##
## tracking bitflips
l=log.tmux-1662316902 # your logfile (tmux-capture or decompressed -lo)
# grab handshakes to a smaller logfile
tr -d '\r' <$l | awk '/^.\[36m....-..-...\[0m.?$/{d=substr($0,6,10)} !d{next} /"purl": "/{t=substr($1,6);sub(/[^ ]+ /,"");sub(/ .\[34m[0-9]+ /," ");printf("%s %s %s %s\n",d,t,ip,$0)}' | while read d t ip f; do u=$(date +%s --date="${d}T${t}Z"); printf '%s\n' "$u $ip $f"; done > handshakes
# quick list of affected files
grep 'your chunk got corrupted somehow' -A1 $l | tr -d '\r' | grep -E '^[a-zA-Z0-9_-]{44}$' | sort | uniq | while IFS= read -r x; do grep -F "$x" handshakes | head -c 200; echo; done | sed -r 's/.*"name": "//' | sort | uniq -cw20
# find all cases of corrupt chunks and print their respective handshakes (if any),
# timestamps are when the corrupted chunk was received (and also the order they are displayed),
# first checksum is the expected value from the handshake, second is what got uploaded
awk <$l '/^.\[36m....-..-...\[0m.?$/{d=substr($0,6,10)} /your chunk got corrupted somehow/{n=2;t=substr($1,6);next} !n{next} {n--;sub(/\r$/,"")} n{a=$0;next} {sub(/.\[0m,.*/,"");printf "%s %s %s %s\n",d,t,a,$0}' |
while read d t h1 h2; do printf '%s %s\n' $d $t; (
printf ' %s [%s]\n' $h1 "$(grep -F $h1 <handshakes | head -n 1)"
printf ' %s [%s]\n' $h2 "$(grep -F $h2 <handshakes | head -n 1)"
) | sed 's/, "sprs":.*//'; done | less -R
# notes; TODO clean up and put in the readme maybe --
# quickest way to drop the bad files (if a client generated bad hashes for the initial handshake) is shutting down copyparty and moving aside the unfinished file (both the .PARTIAL and the empty placeholder)
# BUT the clients will immediately re-handshake the upload with the same bitflipped hashes, so the uploaders have to refresh their browsers before you do that,
# so maybe just ask them to refresh and do nothing for 6 hours so the timeout kicks in, which deletes the placeholders/name-reservations and you can then manually delete the .PARTIALs at some point later
##
## media

View File

@@ -224,7 +224,6 @@ tmpdir="$(
# remove type hints before build instead
(cd copyparty; "$pybin" ../../scripts/strip_hints/a.py; rm uh)
}
f=../build/mit.txt
[ -e $f ] ||
@@ -266,6 +265,8 @@ while IFS= read -r x; do
printf "%${p}s\033[07m%s\033[0m\n" "" "$x"
done > copyparty/res/COPYING.txt
}
ver=
[ -z "$repack" ] &&
git describe --tags >/dev/null 2>/dev/null && {

View File

@@ -27,7 +27,7 @@ SIZE = None
CKSUM = None
STAMP = None
PY2 = sys.version_info[0] == 2
PY2 = sys.version_info < (3,)
WINDOWS = sys.platform in ["win32", "msys"]
sys.dont_write_bytecode = True
me = os.path.abspath(os.path.realpath(__file__))