From 10bc2d9205c5f82416f4ff5e02a10091a6cb9a89 Mon Sep 17 00:00:00 2001 From: ed Date: Sun, 17 Dec 2023 22:30:22 +0000 Subject: [PATCH] unsuccessful attempt at dirkeys (#64) --- README.md | 16 +++- copyparty/__main__.py | 19 ++++- copyparty/authsrv.py | 12 +++ copyparty/httpcli.py | 160 ++++++++++++++++++++++++----------- copyparty/web/baguettebox.js | 5 +- copyparty/web/browser.js | 124 +++++++++++++++++---------- copyparty/web/util.js | 7 +- tests/test_dots.py | 57 +++++++++++-- tests/util.py | 3 +- 9 files changed, 295 insertions(+), 108 deletions(-) diff --git a/README.md b/README.md index 2cc4fbec..0328f69c 100644 --- a/README.md +++ b/README.md @@ -91,6 +91,7 @@ turn almost any device into a file server with resumable uploads/downloads using * [gotchas](#gotchas) - behavior that might be unexpected * [cors](#cors) - cross-site request config * [filekeys](#filekeys) - prevent filename bruteforcing + * [dirkeys](#dirkeys) - share specific folders in a volume * [password hashing](#password-hashing) - you can hash passwords * [https](#https) - both HTTP and HTTPS are accepted * [recovering from crashes](#recovering-from-crashes) @@ -1767,12 +1768,25 @@ cors can be configured with `--acao` and `--acam`, or the protections entirely d prevent filename bruteforcing -volflag `c,fk` generates filekeys (per-file accesskeys) for all files; users which have full read-access (permission `r`) will then see URLs with the correct filekey `?k=...` appended to the end, and `g` users must provide that URL including the correct key to avoid a 404 +volflag `fk` generates filekeys (per-file accesskeys) for all files; users which have full read-access (permission `r`) will then see URLs with the correct filekey `?k=...` appended to the end, and `g` users must provide that URL including the correct key to avoid a 404 by default, filekeys are generated based on salt (`--fk-salt`) + filesystem-path + file-size + inode (if not windows); add volflag `fka` to generate slightly weaker filekeys which will not be invalidated if the file is edited (only salt + path) permissions `wG` (write + upget) lets users upload files and receive their own filekeys, still without being able to see other uploads +### dirkeys + +share specific folders in a volume without giving away full read-access to the rest -- the visitor only needs the `g` (get) permission to view the link + +volflag `dk` generates dirkeys (per-directory accesskeys) for all folders, granting read-access to that folder; by default only that folder itself, no subfolders + +volflag `dks` lets people enter subfolders as well, and also enables download-as-zip/tar + +dirkeys are generated based on another salt (`--dk-salt`) + filesystem-path and have a few limitations: +* the key does not change if the contents of the folder is modified + * if you need a new dirkey, either change the salt or rename the folder +* linking to a textfile (so it opens in the textfile viewer) is not possible if recipient doesn't have read-access + ## password hashing diff --git a/copyparty/__main__.py b/copyparty/__main__.py index 32f841e8..952b43b1 100755 --- a/copyparty/__main__.py +++ b/copyparty/__main__.py @@ -261,6 +261,19 @@ def get_fk_salt() -> str: return ret.decode("utf-8") +def get_dk_salt() -> str: + fp = os.path.join(E.cfg, "dk-salt.txt") + try: + with open(fp, "rb") as f: + ret = f.read().strip() + except: + ret = base64.b64encode(os.urandom(30)) + with open(fp, "wb") as f: + f.write(ret + b"\n") + + return ret.decode("utf-8") + + def get_ah_salt() -> str: fp = os.path.join(E.cfg, "ah-salt.txt") try: @@ -1086,13 +1099,14 @@ def add_safety(ap): ap2.add_argument("--acam", metavar="V[,V]", type=u, default="GET,HEAD", help="Access-Control-Allow-Methods; list of methods to accept from offsite ('*' behaves like \033[33m--acao\033[0m's description)") -def add_salt(ap, fk_salt, ah_salt): +def add_salt(ap, fk_salt, dk_salt, ah_salt): ap2 = ap.add_argument_group('salting options') ap2.add_argument("--ah-alg", metavar="ALG", type=u, default="none", help="account-pw hashing algorithm; one of these, best to worst: \033[32margon2 scrypt sha2 none\033[0m (each optionally followed by alg-specific comma-sep. config)") ap2.add_argument("--ah-salt", metavar="SALT", type=u, default=ah_salt, help="account-pw salt; ignored if \033[33m--ah-alg\033[0m is none (default)") ap2.add_argument("--ah-gen", metavar="PW", type=u, default="", help="generate hashed password for \033[33mPW\033[0m, or read passwords from STDIN if \033[33mPW\033[0m is [\033[32m-\033[0m]") ap2.add_argument("--ah-cli", action="store_true", help="launch an interactive shell which hashes passwords without ever storing or displaying the original passwords") ap2.add_argument("--fk-salt", metavar="SALT", type=u, default=fk_salt, help="per-file accesskey salt; used to generate unpredictable URLs for hidden files") + ap2.add_argument("--dk-salt", metavar="SALT", type=u, default=dk_salt, help="per-directory accesskey salt; used to generate unpredictable URLs to share folders with users who only have the 'get' permission") ap2.add_argument("--warksalt", metavar="SALT", type=u, default="hunter2", help="up2k file-hash salt; serves no purpose, no reason to change this (but delete all databases if you do)") @@ -1270,6 +1284,7 @@ def run_argparse( cert_path = os.path.join(E.cfg, "cert.pem") fk_salt = get_fk_salt() + dk_salt = get_dk_salt() ah_salt = get_ah_salt() # alpine peaks at 5 threads for some reason, @@ -1299,7 +1314,7 @@ def run_argparse( add_webdav(ap) add_smb(ap) add_safety(ap) - add_salt(ap, fk_salt, ah_salt) + add_salt(ap, fk_salt, dk_salt, ah_salt) add_optouts(ap) add_shutdown(ap) add_yolo(ap) diff --git a/copyparty/authsrv.py b/copyparty/authsrv.py index fb90e842..3e974aef 100644 --- a/copyparty/authsrv.py +++ b/copyparty/authsrv.py @@ -551,7 +551,12 @@ class VFS(object): # no vfs nodes in the list of real inodes real = [x for x in real if x[0] not in self.nodes] + dbv = self.dbv or self for name, vn2 in sorted(self.nodes.items()): + if vn2.dbv == dbv and self.flags.get("dk"): + virt_vis[name] = vn2 + continue + ok = False zx = vn2.axs axs = [zx.uread, zx.uwrite, zx.umove, zx.udel, zx.uget] @@ -1406,6 +1411,13 @@ class AuthSrv(object): vol.flags["fk"] = int(fk) if fk is not True else 8 have_fk = True + dk = vol.flags.get("dk") + dks = vol.flags.get("dks") + if dks and not dk: + dk = dks + if dk: + vol.flags["dk"] = int(dk) if dk is not True else 8 + if have_fk and re.match(r"^[0-9\.]+$", self.args.fk_salt): self.log("filekey salt: {}".format(self.args.fk_salt)) diff --git a/copyparty/httpcli.py b/copyparty/httpcli.py index f59dfb32..65e780d1 100644 --- a/copyparty/httpcli.py +++ b/copyparty/httpcli.py @@ -498,7 +498,7 @@ class HttpCli(object): self.can_admin, self.can_dot, ) = ( - avn.can_access("", self.uname) if avn else [False] * 7 + avn.can_access("", self.uname) if avn else [False] * 8 ) self.avn = avn self.vn = vn @@ -1894,7 +1894,12 @@ class HttpCli(object): v = self.uparam[k] - vn, rem = self.asrv.vfs.get(self.vpath, self.uname, True, False) + if self._use_dirkey(): + vn = self.vn + rem = self.rem + else: + vn, rem = self.asrv.vfs.get(self.vpath, self.uname, True, False) + zs = self.parser.require("files", 1024 * 1024) if not zs: raise Pebkac(422, "need files list") @@ -2770,6 +2775,27 @@ class HttpCli(object): return file_lastmod, True + def _use_dirkey(self, ap: str = "") -> bool: + if self.can_read or not self.can_get: + return False + + req = self.uparam.get("k") or "" + if not req: + return False + + dk_len = self.vn.flags.get("dk") + if not dk_len: + return False + + ap = ap or self.vn.canonical(self.rem) + zs = self.gen_fk(2, self.args.dk_salt, ap, 0, 0)[:dk_len] + if req == zs: + return True + + t = "wrong dirkey, want %s, got %s\n vp: %s\n ap: %s" + self.log(t % (zs, req, self.req, ap), 6) + return False + def _expand(self, txt: str, phs: list[str]) -> str: for ph in phs: if ph.startswith("hdr."): @@ -3446,7 +3472,7 @@ class HttpCli(object): dst = dst[len(top) + 1 :] - ret = self.gen_tree(top, dst) + ret = self.gen_tree(top, dst, self.uparam.get("k", "")) if self.is_vproxied: parents = self.args.R.split("/") for parent in reversed(parents): @@ -3456,18 +3482,25 @@ class HttpCli(object): self.reply(zs.encode("utf-8"), mime="application/json") return True - def gen_tree(self, top: str, target: str) -> dict[str, Any]: + def gen_tree(self, top: str, target: str, dk: str) -> dict[str, Any]: ret: dict[str, Any] = {} excl = None if target: excl, target = (target.split("/", 1) + [""])[:2] - sub = self.gen_tree("/".join([top, excl]).strip("/"), target) + sub = self.gen_tree("/".join([top, excl]).strip("/"), target, dk) ret["k" + quotep(excl)] = sub vfs = self.asrv.vfs + dk_sz = False + if dk: + vn, rem = vfs.get(top, self.uname, False, False) + if vn.flags.get("dks") and self._use_dirkey(vn.canonical(rem)): + dk_sz = vn.flags.get("dk") + dots = False + fsroot = "" try: - vn, rem = vfs.get(top, self.uname, True, False) + vn, rem = vfs.get(top, self.uname, not dk_sz, False) fsroot, vfs_ls, vfs_virt = vn.ls( rem, self.uname, @@ -3483,15 +3516,20 @@ class HttpCli(object): if d1 == top: vfs_virt[d2] = vfs # typechk, value never read - dirs = [] - - dirnames = [x[0] for x in vfs_ls if stat.S_ISDIR(x[1].st_mode)] + dirs = [x[0] for x in vfs_ls if stat.S_ISDIR(x[1].st_mode)] if not dots or "dots" not in self.uparam: - dirnames = exclude_dotfiles(dirnames) + dirs = exclude_dotfiles(dirs) - for fn in [x for x in dirnames if x != excl]: - dirs.append(quotep(fn)) + dirs = [quotep(x) for x in dirs if x != excl] + + if dk_sz and fsroot: + kdirs = [] + for dn in dirs: + ap = os.path.join(fsroot, dn) + zs = self.gen_fk(2, self.args.dk_salt, ap, 0, 0)[:dk_sz] + kdirs.append(dn + "?k=" + zs) + dirs = kdirs for x in vfs_virt: if x != excl: @@ -3744,6 +3782,7 @@ class HttpCli(object): self.out_headers.pop("X-Robots-Tag", None) is_dir = stat.S_ISDIR(st.st_mode) + is_dk = False fk_pass = False icur = None if is_dir and (e2t or e2d): @@ -3751,47 +3790,48 @@ class HttpCli(object): if idx and hasattr(idx, "p_end"): icur = idx.get_cur(dbv.realpath) - if self.can_read: - th_fmt = self.uparam.get("th") - if th_fmt is not None: - if is_dir: - vrem = vrem.rstrip("/") - if icur and vrem: - q = "select fn from cv where rd=? and dn=?" - crd, cdn = vrem.rsplit("/", 1) if "/" in vrem else ("", vrem) - # no mojibake support: - try: - cfn = icur.execute(q, (crd, cdn)).fetchone() - if cfn: - fn = cfn[0] - fp = os.path.join(abspath, fn) - if bos.path.exists(fp): - vrem = "{}/{}".format(vrem, fn).strip("/") - is_dir = False - except: - pass - else: - for fn in self.args.th_covers: + th_fmt = self.uparam.get("th") + if th_fmt is not None and ( + self.can_read or (self.can_get and vn.flags.get("dk")) + ): + if is_dir: + vrem = vrem.rstrip("/") + if icur and vrem: + q = "select fn from cv where rd=? and dn=?" + crd, cdn = vrem.rsplit("/", 1) if "/" in vrem else ("", vrem) + # no mojibake support: + try: + cfn = icur.execute(q, (crd, cdn)).fetchone() + if cfn: + fn = cfn[0] fp = os.path.join(abspath, fn) if bos.path.exists(fp): vrem = "{}/{}".format(vrem, fn).strip("/") is_dir = False - break + except: + pass + else: + for fn in self.args.th_covers: + fp = os.path.join(abspath, fn) + if bos.path.exists(fp): + vrem = "{}/{}".format(vrem, fn).strip("/") + is_dir = False + break - if is_dir: - return self.tx_ico("a.folder") + if is_dir: + return self.tx_ico("a.folder") - thp = None - if self.thumbcli: - thp = self.thumbcli.get(dbv, vrem, int(st.st_mtime), th_fmt) + thp = None + if self.thumbcli: + thp = self.thumbcli.get(dbv, vrem, int(st.st_mtime), th_fmt) - if thp: - return self.tx_file(thp) + if thp: + return self.tx_file(thp) - if th_fmt == "p": - raise Pebkac(404) + if th_fmt == "p": + raise Pebkac(404) - return self.tx_ico(rem) + return self.tx_ico(rem) elif self.can_get and self.avn: axs = self.avn.axs @@ -3835,7 +3875,8 @@ class HttpCli(object): )[: vn.flags["fk"]] got = self.uparam.get("k") if got != correct: - self.log("wrong filekey, want {}, got {}".format(correct, got)) + t = "wrong filekey, want %s, got %s\n vp: %s\n ap: %s" + self.log(t % (correct, got, self.req, abspath), 6) return self.tx_404() if ( @@ -3851,8 +3892,11 @@ class HttpCli(object): return self.tx_file(abspath) - elif is_dir and not self.can_read and not self.can_write: - return self.tx_404(True) + elif is_dir and not self.can_read: + if self._use_dirkey(abspath): + is_dk = True + elif not self.can_write: + return self.tx_404(True) srv_info = [] @@ -3874,7 +3918,7 @@ class HttpCli(object): srv_infot = " // ".join(srv_info) perms = [] - if self.can_read: + if self.can_read or is_dk: perms.append("read") if self.can_write: perms.append("write") @@ -3999,7 +4043,7 @@ class HttpCli(object): if not self.conn.hsrv.prism: j2a["no_prism"] = True - if not self.can_read: + if not self.can_read and not is_dk: if is_ls: return self.tx_ls(ls_ret) @@ -4052,8 +4096,12 @@ class HttpCli(object): ): ls_names = exclude_dotfiles(ls_names) + add_dk = vf.get("dk") add_fk = vf.get("fk") fk_alg = 2 if "fka" in vf else 1 + if add_dk: + zs = self.gen_fk(2, self.args.dk_salt, abspath, 0, 0)[:add_dk] + ls_ret["dk"] = cgv["dk"] = zs dirs = [] files = [] @@ -4081,6 +4129,12 @@ class HttpCli(object): href += "/" if self.args.no_zip: margin = "DIR" + elif add_dk: + zs = absreal(fspath) + margin = 'zip' % ( + quotep(href), + self.gen_fk(2, self.args.dk_salt, zs, 0, 0)[:add_dk], + ) else: margin = 'zip' % (quotep(href),) elif fn in hist: @@ -4121,6 +4175,11 @@ class HttpCli(object): 0 if ANYWIN else inf.st_ino, )[:add_fk], ) + elif add_dk and is_dir: + href = "%s?k=%s" % ( + quotep(href), + self.gen_fk(2, self.args.dk_salt, fspath, 0, 0)[:add_dk], + ) else: href = quotep(href) @@ -4139,6 +4198,9 @@ class HttpCli(object): files.append(item) item["rd"] = rem + if is_dk and not vf.get("dks"): + dirs = [] + if ( self.cookies.get("idxh") == "y" and "ls" not in self.uparam diff --git a/copyparty/web/baguettebox.js b/copyparty/web/baguettebox.js index 066073f8..2d6b3bfa 100644 --- a/copyparty/web/baguettebox.js +++ b/copyparty/web/baguettebox.js @@ -392,8 +392,7 @@ window.baguetteBox = (function () { } function dlpic() { - var url = findfile()[3].href; - url += (url.indexOf('?') < 0 ? '?' : '&') + 'cache'; + var url = addq(findfile()[3].href, 'cache'); dl_file(url); } @@ -641,7 +640,7 @@ window.baguetteBox = (function () { options.captions.call(currentGallery, imageElement) : imageElement.getAttribute('data-caption') || imageElement.title; - imageSrc += imageSrc.indexOf('?') < 0 ? '?cache' : '&cache'; + imageSrc = addq(imageSrc, 'cache'); if (is_vid && index != currentIndex) return; // no preload diff --git a/copyparty/web/browser.js b/copyparty/web/browser.js index f49a017a..c45018db 100644 --- a/copyparty/web/browser.js +++ b/copyparty/web/browser.js @@ -1355,7 +1355,7 @@ function set_files_html(html) { var ACtx = !IPHONE && (window.AudioContext || window.webkitAudioContext), noih = /[?&]v\b/.exec('' + location), hash0 = location.hash, - mp; + dk, mp; var mpl = (function () { @@ -1475,7 +1475,7 @@ var mpl = (function () { if (!c) return url; - return url + (url.indexOf('?') < 0 ? '?' : '&') + 'th=' + (can_ogg ? 'opus' : 'caf'); + return addq(url, 'th=') + (can_ogg ? 'opus' : 'caf'); }; r.pp = function () { @@ -1526,7 +1526,7 @@ var mpl = (function () { } if (cover) { - cover += (cover.indexOf('?') === -1 ? '?' : '&') + 'th=j'; + cover = addq(cover, 'th=j'); tags.artwork = [{ "src": cover, type: "image/jpeg" }]; } } @@ -1616,8 +1616,8 @@ function MPlayer() { link = tds[1].getElementsByTagName('a'); link = link[link.length - 1]; - var url = noq_href(link), - m = re_audio.exec(url); + var url = link.getAttribute('href'), + m = re_audio.exec(url.split('?')[0]); if (m) { var tid = link.getAttribute('id'); @@ -1711,8 +1711,7 @@ function MPlayer() { } r.preload = function (url, full) { - url = mpl.acode(url); - url += (url.indexOf('?') < 0 ? '?' : '&') + 'cache=987'; + url = addq(mpl.acode(url), 'cache=987'); mpl.preload_url = full ? url : null; var t0 = Date.now(); @@ -2302,8 +2301,7 @@ function dl_song() { return toast.inf(10, L.f_dls); } - var url = mp.tracks[mp.au.tid]; - url += (url.indexOf('?') < 0 ? '?' : '&') + 'cache=987'; + var url = addq(mp.tracks[mp.au.tid], 'cache=987'); dl_file(url); } @@ -2952,8 +2950,7 @@ function play(tid, is_ev, seek) { widget.open(); } - var url = mpl.acode(mp.tracks[tid]); - url += (url.indexOf('?') < 0 ? '?' : '&') + 'cache=987'; + var url = addq(mpl.acode(mp.tracks[tid]), 'cache=987'); if (mp.au.rsrc == url) mp.au.currentTime = 0; @@ -4100,7 +4097,7 @@ var showfile = (function () { }; r.active = function () { - return location.search.indexOf('doc=') + 1; + return !!/[?&]doc=/.exec(location.search); }; r.getlang = function (fn) { @@ -4141,12 +4138,15 @@ var showfile = (function () { }; r.show = function (url, no_push) { - var xhr = new XHR(); + var xhr = new XHR(), + m = /[?&](k=[^&]+)/.exec(url); + + url = url.split('?')[0] + (m ? '?' + m[1] : ''); xhr.url = url; xhr.fname = uricom_dec(url.split('/').pop()); xhr.no_push = no_push; xhr.ts = Date.now(); - xhr.open('GET', url.split('?')[0], true); + xhr.open('GET', url, true); xhr.onprogress = loading; xhr.onload = xhr.onerror = load_cb; xhr.send(); @@ -4184,14 +4184,14 @@ var showfile = (function () { var url = doc[0], lnh = doc[1], txt = doc[2], - name = url.split('/').pop(), + name = url.split('?')[0].split('/').pop(), tname = uricom_dec(name), lang = r.getlang(name), is_md = lang == 'md'; ebi('files').style.display = ebi('gfiles').style.display = ebi('lazy').style.display = ebi('pro').style.display = ebi('epi').style.display = 'none'; ebi('dldoc').setAttribute('href', url); - ebi('editdoc').setAttribute('href', url + (url.indexOf('?') > 0 ? '&' : '?') + 'edit'); + ebi('editdoc').setAttribute('href', addq(url, 'edit')); ebi('editdoc').style.display = (has(perms, 'write') && (is_md || has(perms, 'delete'))) ? '' : 'none'; var wr = ebi('bdoc'), @@ -4242,7 +4242,7 @@ var showfile = (function () { wintitle(tname + ' \u2014 '); document.documentElement.scrollTop = 0; var hfun = no_push ? hist_replace : hist_push; - hfun(get_evpath() + '?doc=' + url.split('/').pop()); + hfun(get_evpath() + '?doc=' + name); // can't dk: server wants dk and js needs fk qsr('#docname'); el = mknod('span', 'docname'); @@ -4441,7 +4441,7 @@ var thegrid = (function () { if (!force) return; - hist_push(get_evpath()); + hist_push(get_evpath() + (dk ? '?k=' + dk : '')); wintitle(); } @@ -4666,10 +4666,10 @@ var thegrid = (function () { ref = ao.getAttribute('id'), isdir = href.endsWith('/'), ac = isdir ? ' class="dir"' : '', - ihref = href; + ihref = ohref; if (r.thumbs) { - ihref += '?th=' + (have_webp ? 'w' : 'j'); + ihref = addq(ihref, 'th=') + (have_webp ? 'w' : 'j'); if (r.full) ihref += 'f' if (href == "#") @@ -4703,7 +4703,7 @@ var thegrid = (function () { } ihref = SR + '/.cpr/ico/' + ext; } - ihref += (ihref.indexOf('?') > 0 ? '&' : '?') + 'cache=i'; + ihref = addq(ihref, 'cache=i'); html.push('' + hek + ''; + url + kdk + '">' + hek + ''; if (res[kk]) { var subtree = parsetree(res[kk], url.slice(1)); @@ -6311,16 +6326,24 @@ var treectl = (function () { if (!e.state) return; - var url = new URL(e.state, "https://" + document.location.host); - var hbase = url.pathname; - var cbase = document.location.pathname; - if (url.search.indexOf('doc=') + 1 && hbase == cbase) + var url = new URL(e.state, "https://" + location.host), + req = url.pathname, + hbase = req, + cbase = location.pathname, + mdoc = /[?&]doc=/.exec('' + url), + mdk = /[?&](k=[^&]+)/.exec('' + url); + + if (mdoc && hbase == cbase) return showfile.show(hbase + showfile.sname(url.search), true); - r.goto(url.pathname, false, true); + if (mdk) + req += '?' + mdk[1]; + + r.goto(req, false, true); }; - hist_replace(get_evpath() + location.hash); + var evp = get_evpath() + (dk ? '?k=' + dk : ''); + hist_replace(evp + location.hash); r.onscroll = onscroll; return r; })(); @@ -6945,11 +6968,11 @@ var arcfmt = (function () { if (!/^(zip|tar|pax|tgz|txz)$/.exec(txt)) continue; - var ofs = href.lastIndexOf('?'); - if (ofs < 0) + var m = /(.*[?&])(tar|zip)([^&]*)(.*)$/.exec(href); + if (!m) throw new Error('missing arg in url'); - o.setAttribute("href", href.slice(0, ofs + 1) + arg); + o.setAttribute("href", m[1] + arg + m[4]); o.textContent = fmt.split('_')[0]; } ebi('selzip').textContent = fmt.split('_')[0]; @@ -7012,13 +7035,20 @@ var msel = (function () { vbase = get_evpath(); for (var a = 0, aa = links.length; a < aa; a++) { - var href = noq_href(links[a]).replace(/\/$/, ""), + var qhref = links[a].getAttribute('href'), + href = qhref.split('?')[0].replace(/\/$/, ""), item = {}; item.id = links[a].getAttribute('id'); item.sel = clgot(links[a].closest('tr'), 'sel'); item.vp = href.indexOf('/') !== -1 ? href : vbase + href; + if (dk) { + var m = /[?&](k=[^&]+)/.exec(qhref); + item.q = m ? '?' + m[1] : ''; + } + else item.q = ''; + r.all.push(item); if (item.sel) r.sel.push(item); @@ -7135,6 +7165,9 @@ var msel = (function () { frm = mknod('form'), txt = []; + if (dk) + arg += '&k=' + dk; + for (var a = 0; a < sel.length; a++) txt.push(vsplit(sel[a].vp)[1]); @@ -7159,7 +7192,7 @@ var msel = (function () { ev(e); var sel = r.getsel(); for (var a = 0; a < sel.length; a++) - dl_file(sel[a].vp); + dl_file(sel[a].vp + sel[a].q); }; r.render = function () { var tds = QSA('#files tbody td+td+td'), @@ -7635,7 +7668,7 @@ var unpost = (function () { function linklist() { var ret = [], - base = document.location.origin.replace(/\/$/, ''); + base = location.origin.replace(/\/$/, ''); for (var a = 0; a < r.files.length; a++) ret.push(base + r.files[a].vp); @@ -7812,8 +7845,9 @@ ebi('files').onclick = ebi('docul').onclick = function (e) { tgt = e.target.closest('a[hl]'); if (tgt) { var a = ebi(tgt.getAttribute('hl')), + href = a.getAttribute('href'), fun = function () { - showfile.show(noq_href(a), tgt.getAttribute('lang')); + showfile.show(href, tgt.getAttribute('lang')); }, szs = ft2dict(a.closest('tr'))[0].sz, sz = parseInt(szs.replace(/[, ]/g, '')); diff --git a/copyparty/web/util.js b/copyparty/web/util.js index d830565a..9ba3919c 100644 --- a/copyparty/web/util.js +++ b/copyparty/web/util.js @@ -681,6 +681,11 @@ function vjoin(p1, p2) { } +function addq(url, q) { + return url + (url.indexOf('?') < 0 ? '?' : '&') + (q === undefined ? '' : q); +} + + function uricom_enc(txt, do_fb_enc) { try { return encodeURIComponent(txt); @@ -1808,7 +1813,7 @@ function md_thumbs(md) { float = has(flags, 'l') ? 'left' : has(flags, 'r') ? 'right' : ''; if (!/[?&]cache/.exec(url)) - url += (url.indexOf('?') < 0 ? '?' : '&') + 'cache=i'; + url = addq(url, 'cache=i'); md[a] = '' + alt + '' + md[a].slice(o2 + 1); } diff --git a/tests/test_dots.py b/tests/test_dots.py index 5822dfdd..79714f57 100644 --- a/tests/test_dots.py +++ b/tests/test_dots.py @@ -4,6 +4,9 @@ from __future__ import print_function, unicode_literals import io import os +import time +import json +import pprint import shutil import tarfile import tempfile @@ -66,7 +69,9 @@ class TestHttpCli(unittest.TestCase): self.assertEqual(self.curl("?tar", "x")[1][:17], "\nJ2EOT") - # search + ## + ## search + up2k = Up2k(self) u2idx = U2idx(self) allvols = list(self.asrv.vfs.all_vols.values()) @@ -91,15 +96,55 @@ class TestHttpCli(unittest.TestCase): xe = "a/da/f4 a/f3 f0 t/f1" self.assertEqual(x, xe) + ## + ## dirkeys + + os.mkdir("v") + with open("v/f1.txt", "wb") as f: + f.write(b"a") + os.rename("a", "v/a") + os.rename(".b", "v/.b") + + vcfg = [ + ".::r.,u1:g,u2:c,dk", + "v/a:v/a:r.,u1:g,u2:c,dk", + "v/.b:v/.b:r.,u1:g,u2:c,dk" + ] + self.args = Cfg(v=vcfg, a=["u1:u1", "u2:u2"]) + self.asrv = AuthSrv(self.args, self.log) + zj = json.loads(self.curl("?ls", "u1")[1]) + url = "?k=" + zj["dk"] + # should descend into folders, but not other volumes: + self.assertEqual(self.tardir(url, "u2"), "f0 t/f1 v/f1.txt") + + zj = json.loads(self.curl("v?ls", "u1")[1]) + url = "v?k=" + zj["dk"] + self.assertEqual(self.tarsel(url, "u2", ["f1.txt", "a", ".b"]), "f1.txt") + def tardir(self, url, uname): - h, b = self.curl("/" + url + "?tar", uname, True) + top = url.split("?")[0] + top = ("top" if not top else top.lstrip(".").split("/")[0]) + "/" + url += ("&" if "?" in url else "?") + "tar" + h, b = self.curl(url, uname, True) tar = tarfile.open(fileobj=io.BytesIO(b), mode="r|").getnames() - top = ("top" if not url else url.lstrip(".").split("/")[0]) + "/" - assert len(tar) == len([x for x in tar if x.startswith(top)]) + if len(tar) != len([x for x in tar if x.startswith(top)]): + raise Exception("bad-prefix:", tar) return " ".join([x[len(top):] for x in tar]) - def curl(self, url, uname, binary=False): - conn = tu.VHttpConn(self.args, self.asrv, self.log, hdr(url, uname)) + def tarsel(self, url, uname, sel): + url += ("&" if "?" in url else "?") + "tar" + zs = '--XD\r\nContent-Disposition: form-data; name="act"\r\n\r\nzip\r\n--XD\r\nContent-Disposition: form-data; name="files"\r\n\r\n' + zs += "\r\n".join(sel) + '\r\n--XD--\r\n' + zb = zs.encode("utf-8") + hdr = "POST /%s HTTP/1.1\r\nPW: %s\r\nConnection: close\r\nContent-Type: multipart/form-data; boundary=XD\r\nContent-Length: %d\r\n\r\n" + req = (hdr % (url, uname, len(zb))).encode("utf-8") + zb + h, b = self.curl("/" + url, uname, True, req) + tar = tarfile.open(fileobj=io.BytesIO(b), mode="r|").getnames() + return " ".join(tar) + + def curl(self, url, uname, binary=False, req=b""): + req = req or hdr(url, uname) + conn = tu.VHttpConn(self.args, self.asrv, self.log, req) HttpCli(conn).run() if binary: h, b = conn.s._reply.split(b"\r\n\r\n", 1) diff --git a/tests/util.py b/tests/util.py index 4593b848..afd374af 100644 --- a/tests/util.py +++ b/tests/util.py @@ -146,6 +146,7 @@ class Cfg(Namespace): E=E, dbd="wal", fk_salt="a" * 16, + dk_salt="b" * 16, lang="eng", log_badpwd=1, logout=573, @@ -248,4 +249,4 @@ class VHttpConn(object): self.thumbcli = None self.u2fh = FHC() - self.get_u2idx = self.hsrv.get_u2idx \ No newline at end of file + self.get_u2idx = self.hsrv.get_u2idx