mirror of
https://github.com/9001/copyparty.git
synced 2025-08-17 09:02:15 -06:00
fix filekeys inside symlinked volumes
This commit is contained in:
parent
75cdf17df4
commit
1c3894743a
|
@ -634,6 +634,7 @@ def run_argparse(argv: list[str], formatter: Any, retry: bool) -> argparse.Names
|
||||||
ap2.add_argument("--no-htp", action="store_true", help="disable httpserver threadpool, create threads as-needed instead")
|
ap2.add_argument("--no-htp", action="store_true", help="disable httpserver threadpool, create threads as-needed instead")
|
||||||
ap2.add_argument("--stackmon", metavar="P,S", type=u, help="write stacktrace to Path every S second")
|
ap2.add_argument("--stackmon", metavar="P,S", type=u, help="write stacktrace to Path every S second")
|
||||||
ap2.add_argument("--log-thrs", metavar="SEC", type=float, help="list active threads every SEC")
|
ap2.add_argument("--log-thrs", metavar="SEC", type=float, help="list active threads every SEC")
|
||||||
|
ap2.add_argument("--log-fk", metavar="REGEX", type=u, default="", help="log filekey params for files where path matches REGEX; '.' (a single dot) = all files")
|
||||||
# fmt: on
|
# fmt: on
|
||||||
|
|
||||||
ap2 = ap.add_argument_group("help sections")
|
ap2 = ap.add_argument_group("help sections")
|
||||||
|
|
|
@ -707,7 +707,7 @@ class AuthSrv(object):
|
||||||
raise Exception('invalid mountpoint "{}"'.format(vol_dst))
|
raise Exception('invalid mountpoint "{}"'.format(vol_dst))
|
||||||
|
|
||||||
# cfg files override arguments and previous files
|
# cfg files override arguments and previous files
|
||||||
vol_src = bos.path.abspath(vol_src)
|
vol_src = absreal(vol_src)
|
||||||
vol_dst = vol_dst.strip("/")
|
vol_dst = vol_dst.strip("/")
|
||||||
self._map_volume(vol_src, vol_dst, mount, daxs, mflags)
|
self._map_volume(vol_src, vol_dst, mount, daxs, mflags)
|
||||||
continue
|
continue
|
||||||
|
@ -818,7 +818,7 @@ class AuthSrv(object):
|
||||||
src = uncyg(src)
|
src = uncyg(src)
|
||||||
|
|
||||||
# print("\n".join([src, dst, perms]))
|
# print("\n".join([src, dst, perms]))
|
||||||
src = bos.path.abspath(src)
|
src = absreal(src)
|
||||||
dst = dst.strip("/")
|
dst = dst.strip("/")
|
||||||
self._map_volume(src, dst, mount, daxs, mflags)
|
self._map_volume(src, dst, mount, daxs, mflags)
|
||||||
|
|
||||||
|
@ -847,7 +847,7 @@ class AuthSrv(object):
|
||||||
if not mount:
|
if not mount:
|
||||||
# -h says our defaults are CWD at root and read/write for everyone
|
# -h says our defaults are CWD at root and read/write for everyone
|
||||||
axs = AXS(["*"], ["*"], None, None)
|
axs = AXS(["*"], ["*"], None, None)
|
||||||
vfs = VFS(self.log_func, bos.path.abspath("."), "", axs, {})
|
vfs = VFS(self.log_func, absreal("."), "", axs, {})
|
||||||
elif "" not in mount:
|
elif "" not in mount:
|
||||||
# there's volumes but no root; make root inaccessible
|
# there's volumes but no root; make root inaccessible
|
||||||
vfs = VFS(self.log_func, "", "", AXS(), {})
|
vfs = VFS(self.log_func, "", "", AXS(), {})
|
||||||
|
@ -1029,10 +1029,15 @@ class AuthSrv(object):
|
||||||
vol.flags["dathumb"] = True
|
vol.flags["dathumb"] = True
|
||||||
vol.flags["dithumb"] = True
|
vol.flags["dithumb"] = True
|
||||||
|
|
||||||
|
have_fk = False
|
||||||
for vol in vfs.all_vols.values():
|
for vol in vfs.all_vols.values():
|
||||||
fk = vol.flags.get("fk")
|
fk = vol.flags.get("fk")
|
||||||
if fk:
|
if fk:
|
||||||
vol.flags["fk"] = int(fk) if fk is not True else 8
|
vol.flags["fk"] = int(fk) if fk is not True else 8
|
||||||
|
have_fk = True
|
||||||
|
|
||||||
|
if have_fk and re.match("^[0-9\.]+$", self.args.fk_salt):
|
||||||
|
self.log("filekey salt: {}".format(self.args.fk_salt))
|
||||||
|
|
||||||
for vol in vfs.all_vols.values():
|
for vol in vfs.all_vols.values():
|
||||||
if "pk" in vol.flags and "gz" not in vol.flags and "xz" not in vol.flags:
|
if "pk" in vol.flags and "gz" not in vol.flags and "xz" not in vol.flags:
|
||||||
|
|
|
@ -42,6 +42,7 @@ from .util import (
|
||||||
exclude_dotfiles,
|
exclude_dotfiles,
|
||||||
fsenc,
|
fsenc,
|
||||||
gen_filekey,
|
gen_filekey,
|
||||||
|
gen_filekey_dbg,
|
||||||
gencookie,
|
gencookie,
|
||||||
get_df,
|
get_df,
|
||||||
get_spd,
|
get_spd,
|
||||||
|
@ -108,6 +109,7 @@ class HttpCli(object):
|
||||||
self.u2fh = conn.u2fh # mypy404
|
self.u2fh = conn.u2fh # mypy404
|
||||||
self.log_func = conn.log_func # mypy404
|
self.log_func = conn.log_func # mypy404
|
||||||
self.log_src = conn.log_src # mypy404
|
self.log_src = conn.log_src # mypy404
|
||||||
|
self.gen_fk = self._gen_fk if self.args.log_fk else gen_filekey
|
||||||
self.tls: bool = hasattr(self.s, "cipher")
|
self.tls: bool = hasattr(self.s, "cipher")
|
||||||
|
|
||||||
# placeholders; assigned by run()
|
# placeholders; assigned by run()
|
||||||
|
@ -177,6 +179,9 @@ class HttpCli(object):
|
||||||
if rem.startswith("/") or rem.startswith("../") or "/../" in rem:
|
if rem.startswith("/") or rem.startswith("../") or "/../" in rem:
|
||||||
raise Exception("that was close")
|
raise Exception("that was close")
|
||||||
|
|
||||||
|
def _gen_fk(self, salt: str, fspath: str, fsize: int, inode: int) -> str:
|
||||||
|
return gen_filekey_dbg(salt, fspath, fsize, inode, self.log, self.args.log_fk)
|
||||||
|
|
||||||
def j2s(self, name: str, **ka: Any) -> str:
|
def j2s(self, name: str, **ka: Any) -> str:
|
||||||
tpl = self.conn.hsrv.j2[name]
|
tpl = self.conn.hsrv.j2[name]
|
||||||
ka["ts"] = self.conn.hsrv.cachebuster()
|
ka["ts"] = self.conn.hsrv.cachebuster()
|
||||||
|
@ -711,7 +716,7 @@ class HttpCli(object):
|
||||||
reader, remains = self.get_body_reader()
|
reader, remains = self.get_body_reader()
|
||||||
vfs, rem = self.asrv.vfs.get(self.vpath, self.uname, False, True)
|
vfs, rem = self.asrv.vfs.get(self.vpath, self.uname, False, True)
|
||||||
lim = vfs.get_dbv(rem)[0].lim
|
lim = vfs.get_dbv(rem)[0].lim
|
||||||
fdir = os.path.join(vfs.realpath, rem)
|
fdir = vfs.canonical(rem)
|
||||||
if lim:
|
if lim:
|
||||||
fdir, rem = lim.all(self.ip, rem, remains, fdir)
|
fdir, rem = lim.all(self.ip, rem, remains, fdir)
|
||||||
|
|
||||||
|
@ -813,7 +818,7 @@ class HttpCli(object):
|
||||||
|
|
||||||
vsuf = ""
|
vsuf = ""
|
||||||
if self.can_read and "fk" in vfs.flags:
|
if self.can_read and "fk" in vfs.flags:
|
||||||
vsuf = "?k=" + gen_filekey(
|
vsuf = "?k=" + self.gen_fk(
|
||||||
self.args.fk_salt,
|
self.args.fk_salt,
|
||||||
path,
|
path,
|
||||||
post_sz,
|
post_sz,
|
||||||
|
@ -950,7 +955,7 @@ class HttpCli(object):
|
||||||
|
|
||||||
if rem:
|
if rem:
|
||||||
try:
|
try:
|
||||||
dst = os.path.join(vfs.realpath, rem)
|
dst = vfs.canonical(rem)
|
||||||
if not bos.path.isdir(dst):
|
if not bos.path.isdir(dst):
|
||||||
bos.makedirs(dst)
|
bos.makedirs(dst)
|
||||||
except OSError as ex:
|
except OSError as ex:
|
||||||
|
@ -1185,7 +1190,7 @@ class HttpCli(object):
|
||||||
sanitized = sanitize_fn(new_dir, "", [])
|
sanitized = sanitize_fn(new_dir, "", [])
|
||||||
|
|
||||||
if not nullwrite:
|
if not nullwrite:
|
||||||
fdir = os.path.join(vfs.realpath, rem)
|
fdir = vfs.canonical(rem)
|
||||||
fn = os.path.join(fdir, sanitized)
|
fn = os.path.join(fdir, sanitized)
|
||||||
|
|
||||||
if not bos.path.isdir(fdir):
|
if not bos.path.isdir(fdir):
|
||||||
|
@ -1224,7 +1229,7 @@ class HttpCli(object):
|
||||||
sanitized = sanitize_fn(new_file, "", [])
|
sanitized = sanitize_fn(new_file, "", [])
|
||||||
|
|
||||||
if not nullwrite:
|
if not nullwrite:
|
||||||
fdir = os.path.join(vfs.realpath, rem)
|
fdir = vfs.canonical(rem)
|
||||||
fn = os.path.join(fdir, sanitized)
|
fn = os.path.join(fdir, sanitized)
|
||||||
|
|
||||||
if bos.path.exists(fn):
|
if bos.path.exists(fn):
|
||||||
|
@ -1245,7 +1250,7 @@ class HttpCli(object):
|
||||||
|
|
||||||
upload_vpath = self.vpath
|
upload_vpath = self.vpath
|
||||||
lim = vfs.get_dbv(rem)[0].lim
|
lim = vfs.get_dbv(rem)[0].lim
|
||||||
fdir_base = os.path.join(vfs.realpath, rem)
|
fdir_base = vfs.canonical(rem)
|
||||||
if lim:
|
if lim:
|
||||||
fdir_base, rem = lim.all(self.ip, rem, -1, fdir_base)
|
fdir_base, rem = lim.all(self.ip, rem, -1, fdir_base)
|
||||||
upload_vpath = "{}/{}".format(vfs.vpath, rem).strip("/")
|
upload_vpath = "{}/{}".format(vfs.vpath, rem).strip("/")
|
||||||
|
@ -1286,7 +1291,7 @@ class HttpCli(object):
|
||||||
else:
|
else:
|
||||||
open_args = {}
|
open_args = {}
|
||||||
tnam = fname = os.devnull
|
tnam = fname = os.devnull
|
||||||
fdir = ""
|
fdir = abspath = ""
|
||||||
|
|
||||||
if lim:
|
if lim:
|
||||||
lim.chk_bup(self.ip)
|
lim.chk_bup(self.ip)
|
||||||
|
@ -1318,12 +1323,15 @@ class HttpCli(object):
|
||||||
lim.chk_bup(self.ip)
|
lim.chk_bup(self.ip)
|
||||||
lim.chk_nup(self.ip)
|
lim.chk_nup(self.ip)
|
||||||
except:
|
except:
|
||||||
bos.unlink(tabspath)
|
if not nullwrite:
|
||||||
bos.unlink(abspath)
|
bos.unlink(tabspath)
|
||||||
|
bos.unlink(abspath)
|
||||||
fname = os.devnull
|
fname = os.devnull
|
||||||
raise
|
raise
|
||||||
|
|
||||||
atomic_move(tabspath, abspath)
|
if not nullwrite:
|
||||||
|
atomic_move(tabspath, abspath)
|
||||||
|
|
||||||
files.append(
|
files.append(
|
||||||
(sz, sha_hex, sha_b64, p_file or "(discarded)", fname, abspath)
|
(sz, sha_hex, sha_b64, p_file or "(discarded)", fname, abspath)
|
||||||
)
|
)
|
||||||
|
@ -1373,9 +1381,9 @@ class HttpCli(object):
|
||||||
for sz, sha_hex, sha_b64, ofn, lfn, ap in files:
|
for sz, sha_hex, sha_b64, ofn, lfn, ap in files:
|
||||||
vsuf = ""
|
vsuf = ""
|
||||||
if self.can_read and "fk" in vfs.flags:
|
if self.can_read and "fk" in vfs.flags:
|
||||||
vsuf = "?k=" + gen_filekey(
|
vsuf = "?k=" + self.gen_fk(
|
||||||
self.args.fk_salt,
|
self.args.fk_salt,
|
||||||
abspath,
|
ap,
|
||||||
sz,
|
sz,
|
||||||
0 if ANYWIN or not ap else bos.stat(ap).st_ino,
|
0 if ANYWIN or not ap else bos.stat(ap).st_ino,
|
||||||
)[: vfs.flags["fk"]]
|
)[: vfs.flags["fk"]]
|
||||||
|
@ -1453,7 +1461,7 @@ class HttpCli(object):
|
||||||
raise Pebkac(411)
|
raise Pebkac(411)
|
||||||
|
|
||||||
rp, fn = vsplit(rem)
|
rp, fn = vsplit(rem)
|
||||||
fp = os.path.join(vfs.realpath, rp)
|
fp = vfs.canonical(rp)
|
||||||
lim = vfs.get_dbv(rem)[0].lim
|
lim = vfs.get_dbv(rem)[0].lim
|
||||||
if lim:
|
if lim:
|
||||||
fp, rp = lim.all(self.ip, rp, clen, fp)
|
fp, rp = lim.all(self.ip, rp, clen, fp)
|
||||||
|
@ -2310,7 +2318,7 @@ class HttpCli(object):
|
||||||
|
|
||||||
if not is_dir and (self.can_read or self.can_get):
|
if not is_dir and (self.can_read or self.can_get):
|
||||||
if not self.can_read and "fk" in vn.flags:
|
if not self.can_read and "fk" in vn.flags:
|
||||||
correct = gen_filekey(
|
correct = self.gen_fk(
|
||||||
self.args.fk_salt, abspath, st.st_size, 0 if ANYWIN else st.st_ino
|
self.args.fk_salt, abspath, st.st_size, 0 if ANYWIN else st.st_ino
|
||||||
)[: vn.flags["fk"]]
|
)[: vn.flags["fk"]]
|
||||||
got = self.uparam.get("k")
|
got = self.uparam.get("k")
|
||||||
|
@ -2534,7 +2542,7 @@ class HttpCli(object):
|
||||||
if add_fk:
|
if add_fk:
|
||||||
href = "{}?k={}".format(
|
href = "{}?k={}".format(
|
||||||
quotep(href),
|
quotep(href),
|
||||||
gen_filekey(
|
self.gen_fk(
|
||||||
self.args.fk_salt, fspath, sz, 0 if ANYWIN else inf.st_ino
|
self.args.fk_salt, fspath, sz, 0 if ANYWIN else inf.st_ino
|
||||||
)[:add_fk],
|
)[:add_fk],
|
||||||
)
|
)
|
||||||
|
|
|
@ -6,6 +6,7 @@ import base64
|
||||||
import calendar
|
import calendar
|
||||||
import gzip
|
import gzip
|
||||||
import os
|
import os
|
||||||
|
import re
|
||||||
import shlex
|
import shlex
|
||||||
import signal
|
import signal
|
||||||
import socket
|
import socket
|
||||||
|
@ -113,6 +114,9 @@ class SvcHub(object):
|
||||||
if not args.hardlink and args.never_symlink:
|
if not args.hardlink and args.never_symlink:
|
||||||
args.no_dedup = True
|
args.no_dedup = True
|
||||||
|
|
||||||
|
if args.log_fk:
|
||||||
|
args.log_fk = re.compile(args.log_fk)
|
||||||
|
|
||||||
# initiate all services to manage
|
# initiate all services to manage
|
||||||
self.asrv = AuthSrv(self.args, self.log)
|
self.asrv = AuthSrv(self.args, self.log)
|
||||||
if args.ls:
|
if args.ls:
|
||||||
|
|
|
@ -46,7 +46,7 @@ try:
|
||||||
from collections.abc import Callable, Iterable
|
from collections.abc import Callable, Iterable
|
||||||
|
|
||||||
import typing
|
import typing
|
||||||
from typing import Any, Generator, Optional, Protocol, Union
|
from typing import Any, Generator, Optional, Pattern, Protocol, Union
|
||||||
|
|
||||||
class RootLogger(Protocol):
|
class RootLogger(Protocol):
|
||||||
def __call__(self, src: str, msg: str, c: Union[int, str] = 0) -> None:
|
def __call__(self, src: str, msg: str, c: Union[int, str] = 0) -> None:
|
||||||
|
@ -932,6 +932,24 @@ def gen_filekey(salt: str, fspath: str, fsize: int, inode: int) -> str:
|
||||||
).decode("ascii")
|
).decode("ascii")
|
||||||
|
|
||||||
|
|
||||||
|
def gen_filekey_dbg(
|
||||||
|
salt: str,
|
||||||
|
fspath: str,
|
||||||
|
fsize: int,
|
||||||
|
inode: int,
|
||||||
|
log: "NamedLogger",
|
||||||
|
log_ptn: Optional[Pattern[str]],
|
||||||
|
) -> str:
|
||||||
|
ret = gen_filekey(salt, fspath, fsize, inode)
|
||||||
|
|
||||||
|
assert log_ptn
|
||||||
|
if log_ptn.search(fspath):
|
||||||
|
t = "fk({}) salt({}) size({}) inode({}) fspath({})"
|
||||||
|
log(t.format(ret[:8], salt, fsize, inode, fspath))
|
||||||
|
|
||||||
|
return ret
|
||||||
|
|
||||||
|
|
||||||
def gencookie(k: str, v: str, dur: Optional[int]) -> str:
|
def gencookie(k: str, v: str, dur: Optional[int]) -> str:
|
||||||
v = v.replace(";", "")
|
v = v.replace(";", "")
|
||||||
if dur:
|
if dur:
|
||||||
|
|
|
@ -106,7 +106,7 @@ class Cfg(Namespace):
|
||||||
ex = "re_maxage rproxy rsp_slp s_wr_slp theme themes turbo df"
|
ex = "re_maxage rproxy rsp_slp s_wr_slp theme themes turbo df"
|
||||||
ka.update(**{k: 0 for k in ex.split()})
|
ka.update(**{k: 0 for k in ex.split()})
|
||||||
|
|
||||||
ex = "doctitle favico html_head mth textfiles"
|
ex = "doctitle favico html_head mth textfiles log_fk"
|
||||||
ka.update(**{k: "" for k in ex.split()})
|
ka.update(**{k: "" for k in ex.split()})
|
||||||
|
|
||||||
super(Cfg, self).__init__(
|
super(Cfg, self).__init__(
|
||||||
|
|
Loading…
Reference in a new issue