mirror of
https://github.com/9001/copyparty.git
synced 2025-08-17 09:02:15 -06:00
allow uploading logues; closes #100
This commit is contained in:
parent
2715ee6c61
commit
19a5985f29
|
@ -2,7 +2,7 @@
|
|||
from __future__ import print_function, unicode_literals
|
||||
|
||||
# awk -F\" '/add_argument\("-[^-]/{print(substr($2,2))}' copyparty/__main__.py | sort | tr '\n' ' '
|
||||
zs = "a c e2d e2ds e2dsa e2t e2ts e2tsr e2v e2vp e2vu ed emp i j lo mcr mte mth mtm mtp nb nc nid nih nw p q s ss sss v z zv"
|
||||
zs = "a c e2d e2ds e2dsa e2t e2ts e2tsr e2v e2vp e2vu ed emp i j lo mcr mte mth mtm mtp nb nc nid nih nth nw p q s ss sss v z zv"
|
||||
onedash = set(zs.split())
|
||||
|
||||
|
||||
|
|
|
@ -163,7 +163,7 @@ class FtpFs(AbstractedFS):
|
|||
t = "Unsupported characters in [{}]"
|
||||
raise FSE(t.format(vpath), 1)
|
||||
|
||||
fn = sanitize_fn(fn or "", "", [".prologue.html", ".epilogue.html"])
|
||||
fn = sanitize_fn(fn or "", "")
|
||||
vpath = vjoin(rd, fn)
|
||||
vfs, rem = self.hub.asrv.vfs.get(vpath, self.uname, r, w, m, d)
|
||||
if not vfs.realpath:
|
||||
|
|
|
@ -1804,7 +1804,7 @@ class HttpCli(object):
|
|||
if rnd:
|
||||
fn = rand_name(fdir, fn, rnd)
|
||||
|
||||
fn = sanitize_fn(fn or "", "", [".prologue.html", ".epilogue.html"])
|
||||
fn = sanitize_fn(fn or "", "")
|
||||
|
||||
path = os.path.join(fdir, fn)
|
||||
|
||||
|
@ -2547,7 +2547,7 @@ class HttpCli(object):
|
|||
self.gctx = vpath
|
||||
vpath = undot(vpath)
|
||||
vfs, rem = self.asrv.vfs.get(vpath, self.uname, False, True)
|
||||
rem = sanitize_vpath(rem, "/", [])
|
||||
rem = sanitize_vpath(rem, "/")
|
||||
fn = vfs.canonical(rem)
|
||||
if not fn.startswith(vfs.realpath):
|
||||
self.log("invalid mkdir [%s] [%s]" % (self.gctx, vpath), 1)
|
||||
|
@ -2594,7 +2594,7 @@ class HttpCli(object):
|
|||
if not ext or len(ext) > 5 or not self.can_delete:
|
||||
new_file += ".md"
|
||||
|
||||
sanitized = sanitize_fn(new_file, "", [])
|
||||
sanitized = sanitize_fn(new_file, "")
|
||||
|
||||
if not nullwrite:
|
||||
fdir = vfs.canonical(rem)
|
||||
|
@ -2673,9 +2673,7 @@ class HttpCli(object):
|
|||
# fallthrough
|
||||
|
||||
fdir = fdir_base
|
||||
fname = sanitize_fn(
|
||||
p_file or "", "", [".prologue.html", ".epilogue.html"]
|
||||
)
|
||||
fname = sanitize_fn(p_file or "", "")
|
||||
abspath = os.path.join(fdir, fname)
|
||||
suffix = "-%.6f-%s" % (time.time(), dip)
|
||||
if p_file and not nullwrite:
|
||||
|
|
|
@ -105,7 +105,7 @@ def gen_hdr(
|
|||
ret += spack(b"<LL", vsz, vsz)
|
||||
|
||||
# windows support (the "?" replace below too)
|
||||
fn = sanitize_fn(fn, "/", [])
|
||||
fn = sanitize_fn(fn, "/")
|
||||
bfn = fn.encode("utf-8" if utf8 else "cp437", "replace").replace(b"?", b"_")
|
||||
|
||||
# add ntfs (0x24) and/or unix (0x10) extrafields for utc, add z64 if requested
|
||||
|
|
|
@ -2777,7 +2777,7 @@ class Up2k(object):
|
|||
if ptop not in self.registry:
|
||||
raise Pebkac(410, "location unavailable")
|
||||
|
||||
cj["name"] = sanitize_fn(cj["name"], "", [".prologue.html", ".epilogue.html"])
|
||||
cj["name"] = sanitize_fn(cj["name"], "")
|
||||
cj["poke"] = now = self.db_act = self.vol_act[ptop] = time.time()
|
||||
wark = self._get_wark(cj)
|
||||
job = None
|
||||
|
|
|
@ -1982,13 +1982,10 @@ def undot(path: str) -> str:
|
|||
return "/".join(ret)
|
||||
|
||||
|
||||
def sanitize_fn(fn: str, ok: str, bad: list[str]) -> str:
|
||||
def sanitize_fn(fn: str, ok: str) -> str:
|
||||
if "/" not in ok:
|
||||
fn = fn.replace("\\", "/").split("/")[-1]
|
||||
|
||||
if fn.lower() in bad:
|
||||
fn = "_" + fn
|
||||
|
||||
if ANYWIN:
|
||||
remap = [
|
||||
["<", "<"],
|
||||
|
@ -2014,9 +2011,9 @@ def sanitize_fn(fn: str, ok: str, bad: list[str]) -> str:
|
|||
return fn.strip()
|
||||
|
||||
|
||||
def sanitize_vpath(vp: str, ok: str, bad: list[str]) -> str:
|
||||
def sanitize_vpath(vp: str, ok: str) -> str:
|
||||
parts = vp.replace(os.sep, "/").split("/")
|
||||
ret = [sanitize_fn(x, ok, bad) for x in parts]
|
||||
ret = [sanitize_fn(x, ok) for x in parts]
|
||||
return "/".join(ret)
|
||||
|
||||
|
||||
|
|
Loading…
Reference in a new issue