mirror of
https://github.com/9001/copyparty.git
synced 2025-08-17 09:02:15 -06:00
misc
This commit is contained in:
parent
f8a31cc24f
commit
4203fc161b
|
@ -30,7 +30,7 @@ however if your copyparty is behind a reverse-proxy, you may want to use [`share
|
||||||
* makes it way faster (especially for slow/networked locations (such as copyparty-fuse))
|
* makes it way faster (especially for slow/networked locations (such as copyparty-fuse))
|
||||||
|
|
||||||
### [`webdav-basicauth.reg`](webdav-basicauth.reg)
|
### [`webdav-basicauth.reg`](webdav-basicauth.reg)
|
||||||
* enables webdav basic-auth over plaintext http
|
* enables webdav basic-auth over plaintext http; takes effect after a reboot OR after running `webdav-unlimit.bat`
|
||||||
|
|
||||||
### [`webdav-unlimit.bat`](webdav-unlimit.bat)
|
### [`webdav-unlimit.bat`](webdav-unlimit.bat)
|
||||||
* removes the 47.6 MiB filesize limit when downloading from webdav
|
* removes the 47.6 MiB filesize limit when downloading from webdav
|
||||||
|
|
|
@ -2968,7 +2968,9 @@ class HttpCli(object):
|
||||||
self.log("wrong filekey, want {}, got {}".format(correct, got))
|
self.log("wrong filekey, want {}, got {}".format(correct, got))
|
||||||
return self.tx_404()
|
return self.tx_404()
|
||||||
|
|
||||||
if abspath.endswith(".md") and "v" in self.uparam:
|
if abspath.endswith(".md") and (
|
||||||
|
"v" in self.uparam or "edit" in self.uparam or "edit2" in self.uparam
|
||||||
|
):
|
||||||
return self.tx_md(abspath)
|
return self.tx_md(abspath)
|
||||||
|
|
||||||
return self.tx_file(abspath)
|
return self.tx_file(abspath)
|
||||||
|
|
|
@ -2991,12 +2991,15 @@ class Up2k(object):
|
||||||
if x["need"] and now - x["poke"] > self.snap_discard_interval
|
if x["need"] and now - x["poke"] > self.snap_discard_interval
|
||||||
]
|
]
|
||||||
|
|
||||||
lost = [
|
if self.args.nw:
|
||||||
x
|
lost = []
|
||||||
for x in reg.values()
|
else:
|
||||||
if x["need"]
|
lost = [
|
||||||
and not bos.path.exists(os.path.join(x["ptop"], x["prel"], x["name"]))
|
x
|
||||||
]
|
for x in reg.values()
|
||||||
|
if x["need"]
|
||||||
|
and not bos.path.exists(os.path.join(x["ptop"], x["prel"], x["name"]))
|
||||||
|
]
|
||||||
|
|
||||||
if rm or lost:
|
if rm or lost:
|
||||||
t = "dropping {} abandoned, {} deleted uploads in {}"
|
t = "dropping {} abandoned, {} deleted uploads in {}"
|
||||||
|
|
|
@ -41,7 +41,7 @@ as a result, the hashes are much less useful than they could have been (search t
|
||||||
|
|
||||||
however it allows for hashing multiple chunks in parallel, greatly increasing upload speed from fast storage (NVMe, raid-0 and such)
|
however it allows for hashing multiple chunks in parallel, greatly increasing upload speed from fast storage (NVMe, raid-0 and such)
|
||||||
|
|
||||||
* both the [browser uploader](#uploading) and the [commandline one](https://github.com/9001/copyparty/blob/hovudstraum/bin/up2k.py) does this now, allowing for fast uploading even from plaintext http
|
* both the [browser uploader](https://github.com/9001/copyparty#uploading) and the [commandline one](https://github.com/9001/copyparty/blob/hovudstraum/bin/up2k.py) does this now, allowing for fast uploading even from plaintext http
|
||||||
|
|
||||||
hashwasm would solve the streaming issue but reduces hashing speed for sha512 (xxh128 does 6 GiB/s), and it would make old browsers and [iphones](https://bugs.webkit.org/show_bug.cgi?id=228552) unsupported
|
hashwasm would solve the streaming issue but reduces hashing speed for sha512 (xxh128 does 6 GiB/s), and it would make old browsers and [iphones](https://bugs.webkit.org/show_bug.cgi?id=228552) unsupported
|
||||||
|
|
||||||
|
|
Loading…
Reference in a new issue