mirror of
https://github.com/9001/copyparty.git
synced 2025-09-30 13:42:27 -06:00
prevent upload into ramdisk;
tries to detect misconfigured docker environments, e.g. /w/foo is mapped to a disk but /w/ itself isn't
This commit is contained in:
parent
5996a58b20
commit
59a0122179
|
@ -1220,6 +1220,7 @@ def add_upload(ap):
|
|||
ap2.add_argument("--chmod-d", metavar="UGO", type=u, default="755", help="unix file permissions to use when creating directories; see --help-chmod. Examples: [\033[32m755\033[0m] = owner-RW + all-R, [\033[32m777\033[0m] = full-yolo (volflag=chmod_d)")
|
||||
ap2.add_argument("--uid", metavar="N", type=int, default=-1, help="unix user-id to chown new files/folders to; default = -1 = do-not-change (volflag=uid)")
|
||||
ap2.add_argument("--gid", metavar="N", type=int, default=-1, help="unix group-id to chown new files/folders to; default = -1 = do-not-change (volflag=gid)")
|
||||
ap2.add_argument("--wram", action="store_true", help="allow uploading even if a volume is inside a ramdisk, meaning that all data will be lost on the next server reboot (volflag=wram)")
|
||||
ap2.add_argument("--dedup", action="store_true", help="enable symlink-based upload deduplication (volflag=dedup)")
|
||||
ap2.add_argument("--safe-dedup", metavar="N", type=int, default=50, help="how careful to be when deduplicating files; [\033[32m1\033[0m] = just verify the filesize, [\033[32m50\033[0m] = verify file contents have not been altered (volflag=safededup)")
|
||||
ap2.add_argument("--hardlink", action="store_true", help="enable hardlink-based dedup; will fallback on symlinks when that is impossible (across filesystems) (volflag=hardlink)")
|
||||
|
|
|
@ -12,6 +12,7 @@ import queue
|
|||
from .__init__ import ANYWIN
|
||||
from .authsrv import AuthSrv
|
||||
from .broker_util import BrokerCli, ExceptionalQueue, NotExQueue
|
||||
from .fsutil import ramdisk_chk
|
||||
from .httpsrv import HttpSrv
|
||||
from .util import FAKE_MP, Daemon, HMaccas
|
||||
|
||||
|
@ -56,6 +57,7 @@ class MpWorker(BrokerCli):
|
|||
|
||||
# starting to look like a good idea
|
||||
self.asrv = AuthSrv(args, None, False)
|
||||
ramdisk_chk(self.asrv)
|
||||
|
||||
# instantiate all services here (TODO: inheritance?)
|
||||
self.iphash = HMaccas(os.path.join(self.args.E.cfg, "iphash"), 8)
|
||||
|
@ -99,6 +101,7 @@ class MpWorker(BrokerCli):
|
|||
if dest == "reload":
|
||||
self.logw("mpw.asrv reloading")
|
||||
self.asrv.reload()
|
||||
ramdisk_chk(self.asrv)
|
||||
self.logw("mpw.asrv reloaded")
|
||||
continue
|
||||
|
||||
|
|
|
@ -57,6 +57,7 @@ def vf_bmap() -> dict[str, str]:
|
|||
"rmagic",
|
||||
"rss",
|
||||
"wo_up_readme",
|
||||
"wram",
|
||||
"xdev",
|
||||
"xlink",
|
||||
"xvol",
|
||||
|
@ -187,6 +188,7 @@ flagcats = {
|
|||
"chmod_f=644": "unix-permission for new files",
|
||||
"uid=573": "change owner of new files/folders to unix-user 573",
|
||||
"gid=999": "change owner of new files/folders to unix-group 999",
|
||||
"wram": "allow uploading into ramdisks",
|
||||
"sparse": "force use of sparse files, mainly for s3-backed storage",
|
||||
"nosparse": "deny use of sparse files, mainly for slow storage",
|
||||
"daw": "enable full WebDAV write support (dangerous);\nPUT-operations will now \033[1;31mOVERWRITE\033[0;35m existing files",
|
||||
|
|
|
@ -7,7 +7,7 @@ import re
|
|||
import time
|
||||
|
||||
from .__init__ import ANYWIN, MACOS
|
||||
from .authsrv import AXS, VFS
|
||||
from .authsrv import AXS, VFS, AuthSrv
|
||||
from .bos import bos
|
||||
from .util import chkcmd, min_ex, undot
|
||||
|
||||
|
@ -18,22 +18,25 @@ if True: # pylint: disable=using-constant-test
|
|||
|
||||
|
||||
class Fstab(object):
|
||||
def __init__(self, log: "RootLogger", args: argparse.Namespace):
|
||||
def __init__(self, log: "RootLogger", args: argparse.Namespace, verbose: bool):
|
||||
self.log_func = log
|
||||
self.verbose = verbose
|
||||
|
||||
self.warned = False
|
||||
self.trusted = False
|
||||
self.tab: Optional[VFS] = None
|
||||
self.oldtab: Optional[VFS] = None
|
||||
self.srctab = "a"
|
||||
self.cache: dict[str, str] = {}
|
||||
self.cache: dict[str, tuple[str, str]] = {}
|
||||
self.age = 0.0
|
||||
self.maxage = args.mtab_age
|
||||
|
||||
def log(self, msg: str, c: Union[int, str] = 0) -> None:
|
||||
if not c or self.verbose:
|
||||
return
|
||||
self.log_func("fstab", msg, c)
|
||||
|
||||
def get(self, path: str) -> str:
|
||||
def get(self, path: str) -> tuple[str, str]:
|
||||
now = time.time()
|
||||
if now - self.age > self.maxage or len(self.cache) > 9000:
|
||||
self.age = now
|
||||
|
@ -41,6 +44,7 @@ class Fstab(object):
|
|||
self.tab = None
|
||||
self.cache = {}
|
||||
|
||||
mp = ""
|
||||
fs = "ext4"
|
||||
msg = "failed to determine filesystem at %r; assuming %s\n%s"
|
||||
|
||||
|
@ -50,7 +54,7 @@ class Fstab(object):
|
|||
path = self._winpath(path)
|
||||
except:
|
||||
self.log(msg % (path, fs, min_ex()), 3)
|
||||
return fs
|
||||
return fs, ""
|
||||
|
||||
path = undot(path)
|
||||
try:
|
||||
|
@ -59,14 +63,14 @@ class Fstab(object):
|
|||
pass
|
||||
|
||||
try:
|
||||
fs = self.get_w32(path) if ANYWIN else self.get_unix(path)
|
||||
fs, mp = self.get_w32(path) if ANYWIN else self.get_unix(path)
|
||||
except:
|
||||
self.log(msg % (path, fs, min_ex()), 3)
|
||||
|
||||
fs = fs.lower()
|
||||
self.cache[path] = fs
|
||||
self.log("found %s at %r" % (fs, path))
|
||||
return fs
|
||||
self.cache[path] = (fs, mp)
|
||||
self.log("found %s at %r, %r" % (fs, mp, path))
|
||||
return fs, mp
|
||||
|
||||
def _winpath(self, path: str) -> str:
|
||||
# try to combine volume-label + st_dev (vsn)
|
||||
|
@ -81,34 +85,49 @@ class Fstab(object):
|
|||
self.tab = VFS(self.log_func, "idk", "/", "/", AXS(), {})
|
||||
self.trusted = False
|
||||
|
||||
def build_tab(self) -> None:
|
||||
self.log("inspecting mtab for changes")
|
||||
|
||||
def _from_sp_mount(self) -> dict[str, str]:
|
||||
sptn = r"^.*? on (.*) type ([^ ]+) \(.*"
|
||||
if MACOS:
|
||||
sptn = r"^.*? on (.*) \(([^ ]+), .*"
|
||||
|
||||
ptn = re.compile(sptn)
|
||||
so, _ = chkcmd(["mount"])
|
||||
tab1: list[tuple[str, str]] = []
|
||||
atab = []
|
||||
dtab: dict[str, str] = {}
|
||||
for ln in so.split("\n"):
|
||||
m = ptn.match(ln)
|
||||
if not m:
|
||||
continue
|
||||
|
||||
zs1, zs2 = m.groups()
|
||||
tab1.append((str(zs1), str(zs2)))
|
||||
atab.append(ln)
|
||||
dtab[str(zs1)] = str(zs2)
|
||||
|
||||
return dtab
|
||||
|
||||
def _from_proc(self) -> dict[str, str]:
|
||||
ret: dict[str, str] = {}
|
||||
with open("/proc/self/mounts", "rb", 262144) as f:
|
||||
src = f.read(262144).decode("utf-8", "replace").split("\n")
|
||||
for zsl in [x.split(" ") for x in src]:
|
||||
if len(zsl) < 3:
|
||||
continue
|
||||
zs = zsl[1]
|
||||
zs = zs.replace("\\011", "\t").replace("\\040", " ").replace("\\134", "\\")
|
||||
ret[zs] = zsl[2]
|
||||
return ret
|
||||
|
||||
def build_tab(self) -> None:
|
||||
self.log("inspecting mtab for changes")
|
||||
dtab = self._from_sp_mount() if MACOS else self._from_proc()
|
||||
|
||||
# keep empirically-correct values if mounttab unchanged
|
||||
srctab = "\n".join(sorted(atab))
|
||||
srctab = str(sorted(dtab.items()))
|
||||
if srctab == self.srctab:
|
||||
self.tab = self.oldtab
|
||||
return
|
||||
|
||||
self.log("mtab has changed; reevaluating support for sparse files")
|
||||
|
||||
tab1 = list(dtab.items())
|
||||
tab1.sort(key=lambda x: (len(x[0]), x[0]))
|
||||
path1, fs1 = tab1[0]
|
||||
tab = VFS(self.log_func, fs1, path1, path1, AXS(), {})
|
||||
|
@ -146,7 +165,7 @@ class Fstab(object):
|
|||
vn.realpath = ptn.sub(nval, vn.realpath)
|
||||
visit.extend(list(vn.nodes.values()))
|
||||
|
||||
def get_unix(self, path: str) -> str:
|
||||
def get_unix(self, path: str) -> tuple[str, str]:
|
||||
if not self.tab:
|
||||
try:
|
||||
self.build_tab()
|
||||
|
@ -161,14 +180,37 @@ class Fstab(object):
|
|||
assert self.tab # !rm
|
||||
ret = self.tab._find(path)[0]
|
||||
if self.trusted or path == ret.vpath:
|
||||
return ret.realpath.split("/")[0]
|
||||
return ret.realpath.split("/")[0], ret.vpath
|
||||
else:
|
||||
return "idk"
|
||||
return "idk", ""
|
||||
|
||||
def get_w32(self, path: str) -> str:
|
||||
def get_w32(self, path: str) -> tuple[str, str]:
|
||||
if not self.tab:
|
||||
self.build_fallback()
|
||||
|
||||
assert self.tab # !rm
|
||||
ret = self.tab._find(path)[0]
|
||||
return ret.realpath
|
||||
return ret.realpath, ""
|
||||
|
||||
|
||||
def ramdisk_chk(asrv: AuthSrv) -> None:
|
||||
# should have been in authsrv but that's a circular import
|
||||
mods = []
|
||||
ramfs = ("tmpfs", "overlay")
|
||||
log = asrv.log_func or print
|
||||
fstab = Fstab(log, asrv.args, False)
|
||||
for vn in asrv.vfs.all_nodes.values():
|
||||
if not vn.axs.uwrite or "wram" in vn.flags:
|
||||
continue
|
||||
ap = vn.realpath
|
||||
if not ap or os.path.isfile(ap):
|
||||
continue
|
||||
fs, mp = fstab.get(ap)
|
||||
mp = "/" + mp.strip("/")
|
||||
if fs == "tmpfs" or (mp == "/" and fs in ramfs):
|
||||
mods.append((vn.vpath, ap, fs, mp))
|
||||
vn.axs.uwrite.clear()
|
||||
if mods:
|
||||
t = "WARNING: write-access was removed from the following volumes because they are not mapped to an actual HDD for storage! All uploaded data would live in RAM only, and all uploaded files would be LOST on next reboot. To allow uploading and ignore this hazard, enable the 'wram' option (global/volflag). List of affected volumes:"
|
||||
t2 = ["\n volume=[/%s], abspath=%r, type=%s, root=%r" % x for x in mods]
|
||||
log("vfs", t + "".join(t2) + "\n", 1)
|
||||
|
|
|
@ -30,6 +30,7 @@ from .__init__ import ANYWIN, EXE, MACOS, PY2, TYPE_CHECKING, E, EnvParams, unic
|
|||
from .authsrv import BAD_CFG, AuthSrv, n_du_who, n_ver_who
|
||||
from .bos import bos
|
||||
from .cert import ensure_cert
|
||||
from .fsutil import ramdisk_chk
|
||||
from .mtag import HAVE_FFMPEG, HAVE_FFPROBE, HAVE_MUTAGEN
|
||||
from .pwhash import HAVE_ARGON2
|
||||
from .tcpsrv import TcpSrv
|
||||
|
@ -310,6 +311,7 @@ class SvcHub(object):
|
|||
|
||||
# initiate all services to manage
|
||||
self.asrv = AuthSrv(self.args, self.log, dargs=self.dargs)
|
||||
ramdisk_chk(self.asrv)
|
||||
|
||||
if args.cgen:
|
||||
self.asrv.cgen()
|
||||
|
@ -1359,6 +1361,7 @@ class SvcHub(object):
|
|||
with self.reload_mutex:
|
||||
self.log("root", "reloading config")
|
||||
self.asrv.reload(9 if up2k else 4)
|
||||
ramdisk_chk(self.asrv)
|
||||
if up2k:
|
||||
self.up2k.reload(rescan_all_vols)
|
||||
t += "; volumes are now reinitializing"
|
||||
|
|
|
@ -213,7 +213,7 @@ class Up2k(object):
|
|||
t = "could not initialize sqlite3, will use in-memory registry only"
|
||||
self.log(t, 3)
|
||||
|
||||
self.fstab = Fstab(self.log_func, self.args)
|
||||
self.fstab = Fstab(self.log_func, self.args, True)
|
||||
self.gen_fk = self._gen_fk if self.args.log_fk else gen_filekey
|
||||
|
||||
if self.args.hash_mt < 2:
|
||||
|
|
|
@ -102,6 +102,7 @@ def tc1(vflags):
|
|||
"-p4321",
|
||||
"-e2dsa",
|
||||
"-e2tsr",
|
||||
"--wram",
|
||||
"--ban-403=no",
|
||||
"--dbd=yolo",
|
||||
"--no-mutagen",
|
||||
|
|
|
@ -146,7 +146,7 @@ class Cfg(Namespace):
|
|||
ex = "allow_flac allow_wav chpw cookie_lax daw dav_auth dav_mac dav_rt e2d e2ds e2dsa e2t e2ts e2tsr e2v e2vu e2vp early_ban ed emp exp force_js getmod grid gsel hardlink hardlink_only ih ihead localtime log_badxml magic md_no_br nid nih no_acode no_athumb no_bauth no_clone no_cp no_dav no_db_ip no_del no_dirsz no_dupe no_fnugg no_lifetime no_logues no_mv no_pipe no_poll no_readme no_robots no_sb_md no_sb_lg no_scandir no_tail no_tarcmp no_thumb no_vthumb no_u2abrt no_zip nrand nsort nw og og_no_head og_s_title ohead q rand re_dirsz reflink rmagic rss smb srch_dbg srch_excl srch_icase stats uqe usernames vague_403 vc ver wo_up_readme write_uplog xdev xlink xvol zipmaxu zs"
|
||||
ka.update(**{k: False for k in ex.split()})
|
||||
|
||||
ex = "dav_inf dedup dotpart dotsrch hook_v no_dhash no_fastboot no_fpool no_htp no_rescan no_sendfile no_ses no_snap no_up_list no_voldump re_dhash see_dots plain_ip"
|
||||
ex = "dav_inf dedup dotpart dotsrch hook_v no_dhash no_fastboot no_fpool no_htp no_rescan no_sendfile no_ses no_snap no_up_list no_voldump wram re_dhash see_dots plain_ip"
|
||||
ka.update(**{k: True for k in ex.split()})
|
||||
|
||||
ex = "ah_cli ah_gen css_browser dbpath hist ipu js_browser js_other mime mimes no_forget no_hash no_idx nonsus_urls og_tpl og_ua ua_nodoc ua_nozip"
|
||||
|
|
Loading…
Reference in a new issue