mirror of
https://github.com/9001/copyparty.git
synced 2025-08-17 17:12:13 -06:00
unsuccessful attempt at dirkeys (#64)
This commit is contained in:
parent
0c50ea1757
commit
10bc2d9205
16
README.md
16
README.md
|
@ -91,6 +91,7 @@ turn almost any device into a file server with resumable uploads/downloads using
|
||||||
* [gotchas](#gotchas) - behavior that might be unexpected
|
* [gotchas](#gotchas) - behavior that might be unexpected
|
||||||
* [cors](#cors) - cross-site request config
|
* [cors](#cors) - cross-site request config
|
||||||
* [filekeys](#filekeys) - prevent filename bruteforcing
|
* [filekeys](#filekeys) - prevent filename bruteforcing
|
||||||
|
* [dirkeys](#dirkeys) - share specific folders in a volume
|
||||||
* [password hashing](#password-hashing) - you can hash passwords
|
* [password hashing](#password-hashing) - you can hash passwords
|
||||||
* [https](#https) - both HTTP and HTTPS are accepted
|
* [https](#https) - both HTTP and HTTPS are accepted
|
||||||
* [recovering from crashes](#recovering-from-crashes)
|
* [recovering from crashes](#recovering-from-crashes)
|
||||||
|
@ -1767,12 +1768,25 @@ cors can be configured with `--acao` and `--acam`, or the protections entirely d
|
||||||
|
|
||||||
prevent filename bruteforcing
|
prevent filename bruteforcing
|
||||||
|
|
||||||
volflag `c,fk` generates filekeys (per-file accesskeys) for all files; users which have full read-access (permission `r`) will then see URLs with the correct filekey `?k=...` appended to the end, and `g` users must provide that URL including the correct key to avoid a 404
|
volflag `fk` generates filekeys (per-file accesskeys) for all files; users which have full read-access (permission `r`) will then see URLs with the correct filekey `?k=...` appended to the end, and `g` users must provide that URL including the correct key to avoid a 404
|
||||||
|
|
||||||
by default, filekeys are generated based on salt (`--fk-salt`) + filesystem-path + file-size + inode (if not windows); add volflag `fka` to generate slightly weaker filekeys which will not be invalidated if the file is edited (only salt + path)
|
by default, filekeys are generated based on salt (`--fk-salt`) + filesystem-path + file-size + inode (if not windows); add volflag `fka` to generate slightly weaker filekeys which will not be invalidated if the file is edited (only salt + path)
|
||||||
|
|
||||||
permissions `wG` (write + upget) lets users upload files and receive their own filekeys, still without being able to see other uploads
|
permissions `wG` (write + upget) lets users upload files and receive their own filekeys, still without being able to see other uploads
|
||||||
|
|
||||||
|
### dirkeys
|
||||||
|
|
||||||
|
share specific folders in a volume without giving away full read-access to the rest -- the visitor only needs the `g` (get) permission to view the link
|
||||||
|
|
||||||
|
volflag `dk` generates dirkeys (per-directory accesskeys) for all folders, granting read-access to that folder; by default only that folder itself, no subfolders
|
||||||
|
|
||||||
|
volflag `dks` lets people enter subfolders as well, and also enables download-as-zip/tar
|
||||||
|
|
||||||
|
dirkeys are generated based on another salt (`--dk-salt`) + filesystem-path and have a few limitations:
|
||||||
|
* the key does not change if the contents of the folder is modified
|
||||||
|
* if you need a new dirkey, either change the salt or rename the folder
|
||||||
|
* linking to a textfile (so it opens in the textfile viewer) is not possible if recipient doesn't have read-access
|
||||||
|
|
||||||
|
|
||||||
## password hashing
|
## password hashing
|
||||||
|
|
||||||
|
|
|
@ -261,6 +261,19 @@ def get_fk_salt() -> str:
|
||||||
return ret.decode("utf-8")
|
return ret.decode("utf-8")
|
||||||
|
|
||||||
|
|
||||||
|
def get_dk_salt() -> str:
|
||||||
|
fp = os.path.join(E.cfg, "dk-salt.txt")
|
||||||
|
try:
|
||||||
|
with open(fp, "rb") as f:
|
||||||
|
ret = f.read().strip()
|
||||||
|
except:
|
||||||
|
ret = base64.b64encode(os.urandom(30))
|
||||||
|
with open(fp, "wb") as f:
|
||||||
|
f.write(ret + b"\n")
|
||||||
|
|
||||||
|
return ret.decode("utf-8")
|
||||||
|
|
||||||
|
|
||||||
def get_ah_salt() -> str:
|
def get_ah_salt() -> str:
|
||||||
fp = os.path.join(E.cfg, "ah-salt.txt")
|
fp = os.path.join(E.cfg, "ah-salt.txt")
|
||||||
try:
|
try:
|
||||||
|
@ -1086,13 +1099,14 @@ def add_safety(ap):
|
||||||
ap2.add_argument("--acam", metavar="V[,V]", type=u, default="GET,HEAD", help="Access-Control-Allow-Methods; list of methods to accept from offsite ('*' behaves like \033[33m--acao\033[0m's description)")
|
ap2.add_argument("--acam", metavar="V[,V]", type=u, default="GET,HEAD", help="Access-Control-Allow-Methods; list of methods to accept from offsite ('*' behaves like \033[33m--acao\033[0m's description)")
|
||||||
|
|
||||||
|
|
||||||
def add_salt(ap, fk_salt, ah_salt):
|
def add_salt(ap, fk_salt, dk_salt, ah_salt):
|
||||||
ap2 = ap.add_argument_group('salting options')
|
ap2 = ap.add_argument_group('salting options')
|
||||||
ap2.add_argument("--ah-alg", metavar="ALG", type=u, default="none", help="account-pw hashing algorithm; one of these, best to worst: \033[32margon2 scrypt sha2 none\033[0m (each optionally followed by alg-specific comma-sep. config)")
|
ap2.add_argument("--ah-alg", metavar="ALG", type=u, default="none", help="account-pw hashing algorithm; one of these, best to worst: \033[32margon2 scrypt sha2 none\033[0m (each optionally followed by alg-specific comma-sep. config)")
|
||||||
ap2.add_argument("--ah-salt", metavar="SALT", type=u, default=ah_salt, help="account-pw salt; ignored if \033[33m--ah-alg\033[0m is none (default)")
|
ap2.add_argument("--ah-salt", metavar="SALT", type=u, default=ah_salt, help="account-pw salt; ignored if \033[33m--ah-alg\033[0m is none (default)")
|
||||||
ap2.add_argument("--ah-gen", metavar="PW", type=u, default="", help="generate hashed password for \033[33mPW\033[0m, or read passwords from STDIN if \033[33mPW\033[0m is [\033[32m-\033[0m]")
|
ap2.add_argument("--ah-gen", metavar="PW", type=u, default="", help="generate hashed password for \033[33mPW\033[0m, or read passwords from STDIN if \033[33mPW\033[0m is [\033[32m-\033[0m]")
|
||||||
ap2.add_argument("--ah-cli", action="store_true", help="launch an interactive shell which hashes passwords without ever storing or displaying the original passwords")
|
ap2.add_argument("--ah-cli", action="store_true", help="launch an interactive shell which hashes passwords without ever storing or displaying the original passwords")
|
||||||
ap2.add_argument("--fk-salt", metavar="SALT", type=u, default=fk_salt, help="per-file accesskey salt; used to generate unpredictable URLs for hidden files")
|
ap2.add_argument("--fk-salt", metavar="SALT", type=u, default=fk_salt, help="per-file accesskey salt; used to generate unpredictable URLs for hidden files")
|
||||||
|
ap2.add_argument("--dk-salt", metavar="SALT", type=u, default=dk_salt, help="per-directory accesskey salt; used to generate unpredictable URLs to share folders with users who only have the 'get' permission")
|
||||||
ap2.add_argument("--warksalt", metavar="SALT", type=u, default="hunter2", help="up2k file-hash salt; serves no purpose, no reason to change this (but delete all databases if you do)")
|
ap2.add_argument("--warksalt", metavar="SALT", type=u, default="hunter2", help="up2k file-hash salt; serves no purpose, no reason to change this (but delete all databases if you do)")
|
||||||
|
|
||||||
|
|
||||||
|
@ -1270,6 +1284,7 @@ def run_argparse(
|
||||||
cert_path = os.path.join(E.cfg, "cert.pem")
|
cert_path = os.path.join(E.cfg, "cert.pem")
|
||||||
|
|
||||||
fk_salt = get_fk_salt()
|
fk_salt = get_fk_salt()
|
||||||
|
dk_salt = get_dk_salt()
|
||||||
ah_salt = get_ah_salt()
|
ah_salt = get_ah_salt()
|
||||||
|
|
||||||
# alpine peaks at 5 threads for some reason,
|
# alpine peaks at 5 threads for some reason,
|
||||||
|
@ -1299,7 +1314,7 @@ def run_argparse(
|
||||||
add_webdav(ap)
|
add_webdav(ap)
|
||||||
add_smb(ap)
|
add_smb(ap)
|
||||||
add_safety(ap)
|
add_safety(ap)
|
||||||
add_salt(ap, fk_salt, ah_salt)
|
add_salt(ap, fk_salt, dk_salt, ah_salt)
|
||||||
add_optouts(ap)
|
add_optouts(ap)
|
||||||
add_shutdown(ap)
|
add_shutdown(ap)
|
||||||
add_yolo(ap)
|
add_yolo(ap)
|
||||||
|
|
|
@ -551,7 +551,12 @@ class VFS(object):
|
||||||
# no vfs nodes in the list of real inodes
|
# no vfs nodes in the list of real inodes
|
||||||
real = [x for x in real if x[0] not in self.nodes]
|
real = [x for x in real if x[0] not in self.nodes]
|
||||||
|
|
||||||
|
dbv = self.dbv or self
|
||||||
for name, vn2 in sorted(self.nodes.items()):
|
for name, vn2 in sorted(self.nodes.items()):
|
||||||
|
if vn2.dbv == dbv and self.flags.get("dk"):
|
||||||
|
virt_vis[name] = vn2
|
||||||
|
continue
|
||||||
|
|
||||||
ok = False
|
ok = False
|
||||||
zx = vn2.axs
|
zx = vn2.axs
|
||||||
axs = [zx.uread, zx.uwrite, zx.umove, zx.udel, zx.uget]
|
axs = [zx.uread, zx.uwrite, zx.umove, zx.udel, zx.uget]
|
||||||
|
@ -1406,6 +1411,13 @@ class AuthSrv(object):
|
||||||
vol.flags["fk"] = int(fk) if fk is not True else 8
|
vol.flags["fk"] = int(fk) if fk is not True else 8
|
||||||
have_fk = True
|
have_fk = True
|
||||||
|
|
||||||
|
dk = vol.flags.get("dk")
|
||||||
|
dks = vol.flags.get("dks")
|
||||||
|
if dks and not dk:
|
||||||
|
dk = dks
|
||||||
|
if dk:
|
||||||
|
vol.flags["dk"] = int(dk) if dk is not True else 8
|
||||||
|
|
||||||
if have_fk and re.match(r"^[0-9\.]+$", self.args.fk_salt):
|
if have_fk and re.match(r"^[0-9\.]+$", self.args.fk_salt):
|
||||||
self.log("filekey salt: {}".format(self.args.fk_salt))
|
self.log("filekey salt: {}".format(self.args.fk_salt))
|
||||||
|
|
||||||
|
|
|
@ -498,7 +498,7 @@ class HttpCli(object):
|
||||||
self.can_admin,
|
self.can_admin,
|
||||||
self.can_dot,
|
self.can_dot,
|
||||||
) = (
|
) = (
|
||||||
avn.can_access("", self.uname) if avn else [False] * 7
|
avn.can_access("", self.uname) if avn else [False] * 8
|
||||||
)
|
)
|
||||||
self.avn = avn
|
self.avn = avn
|
||||||
self.vn = vn
|
self.vn = vn
|
||||||
|
@ -1894,7 +1894,12 @@ class HttpCli(object):
|
||||||
|
|
||||||
v = self.uparam[k]
|
v = self.uparam[k]
|
||||||
|
|
||||||
vn, rem = self.asrv.vfs.get(self.vpath, self.uname, True, False)
|
if self._use_dirkey():
|
||||||
|
vn = self.vn
|
||||||
|
rem = self.rem
|
||||||
|
else:
|
||||||
|
vn, rem = self.asrv.vfs.get(self.vpath, self.uname, True, False)
|
||||||
|
|
||||||
zs = self.parser.require("files", 1024 * 1024)
|
zs = self.parser.require("files", 1024 * 1024)
|
||||||
if not zs:
|
if not zs:
|
||||||
raise Pebkac(422, "need files list")
|
raise Pebkac(422, "need files list")
|
||||||
|
@ -2770,6 +2775,27 @@ class HttpCli(object):
|
||||||
|
|
||||||
return file_lastmod, True
|
return file_lastmod, True
|
||||||
|
|
||||||
|
def _use_dirkey(self, ap: str = "") -> bool:
|
||||||
|
if self.can_read or not self.can_get:
|
||||||
|
return False
|
||||||
|
|
||||||
|
req = self.uparam.get("k") or ""
|
||||||
|
if not req:
|
||||||
|
return False
|
||||||
|
|
||||||
|
dk_len = self.vn.flags.get("dk")
|
||||||
|
if not dk_len:
|
||||||
|
return False
|
||||||
|
|
||||||
|
ap = ap or self.vn.canonical(self.rem)
|
||||||
|
zs = self.gen_fk(2, self.args.dk_salt, ap, 0, 0)[:dk_len]
|
||||||
|
if req == zs:
|
||||||
|
return True
|
||||||
|
|
||||||
|
t = "wrong dirkey, want %s, got %s\n vp: %s\n ap: %s"
|
||||||
|
self.log(t % (zs, req, self.req, ap), 6)
|
||||||
|
return False
|
||||||
|
|
||||||
def _expand(self, txt: str, phs: list[str]) -> str:
|
def _expand(self, txt: str, phs: list[str]) -> str:
|
||||||
for ph in phs:
|
for ph in phs:
|
||||||
if ph.startswith("hdr."):
|
if ph.startswith("hdr."):
|
||||||
|
@ -3446,7 +3472,7 @@ class HttpCli(object):
|
||||||
|
|
||||||
dst = dst[len(top) + 1 :]
|
dst = dst[len(top) + 1 :]
|
||||||
|
|
||||||
ret = self.gen_tree(top, dst)
|
ret = self.gen_tree(top, dst, self.uparam.get("k", ""))
|
||||||
if self.is_vproxied:
|
if self.is_vproxied:
|
||||||
parents = self.args.R.split("/")
|
parents = self.args.R.split("/")
|
||||||
for parent in reversed(parents):
|
for parent in reversed(parents):
|
||||||
|
@ -3456,18 +3482,25 @@ class HttpCli(object):
|
||||||
self.reply(zs.encode("utf-8"), mime="application/json")
|
self.reply(zs.encode("utf-8"), mime="application/json")
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def gen_tree(self, top: str, target: str) -> dict[str, Any]:
|
def gen_tree(self, top: str, target: str, dk: str) -> dict[str, Any]:
|
||||||
ret: dict[str, Any] = {}
|
ret: dict[str, Any] = {}
|
||||||
excl = None
|
excl = None
|
||||||
if target:
|
if target:
|
||||||
excl, target = (target.split("/", 1) + [""])[:2]
|
excl, target = (target.split("/", 1) + [""])[:2]
|
||||||
sub = self.gen_tree("/".join([top, excl]).strip("/"), target)
|
sub = self.gen_tree("/".join([top, excl]).strip("/"), target, dk)
|
||||||
ret["k" + quotep(excl)] = sub
|
ret["k" + quotep(excl)] = sub
|
||||||
|
|
||||||
vfs = self.asrv.vfs
|
vfs = self.asrv.vfs
|
||||||
|
dk_sz = False
|
||||||
|
if dk:
|
||||||
|
vn, rem = vfs.get(top, self.uname, False, False)
|
||||||
|
if vn.flags.get("dks") and self._use_dirkey(vn.canonical(rem)):
|
||||||
|
dk_sz = vn.flags.get("dk")
|
||||||
|
|
||||||
dots = False
|
dots = False
|
||||||
|
fsroot = ""
|
||||||
try:
|
try:
|
||||||
vn, rem = vfs.get(top, self.uname, True, False)
|
vn, rem = vfs.get(top, self.uname, not dk_sz, False)
|
||||||
fsroot, vfs_ls, vfs_virt = vn.ls(
|
fsroot, vfs_ls, vfs_virt = vn.ls(
|
||||||
rem,
|
rem,
|
||||||
self.uname,
|
self.uname,
|
||||||
|
@ -3483,15 +3516,20 @@ class HttpCli(object):
|
||||||
if d1 == top:
|
if d1 == top:
|
||||||
vfs_virt[d2] = vfs # typechk, value never read
|
vfs_virt[d2] = vfs # typechk, value never read
|
||||||
|
|
||||||
dirs = []
|
dirs = [x[0] for x in vfs_ls if stat.S_ISDIR(x[1].st_mode)]
|
||||||
|
|
||||||
dirnames = [x[0] for x in vfs_ls if stat.S_ISDIR(x[1].st_mode)]
|
|
||||||
|
|
||||||
if not dots or "dots" not in self.uparam:
|
if not dots or "dots" not in self.uparam:
|
||||||
dirnames = exclude_dotfiles(dirnames)
|
dirs = exclude_dotfiles(dirs)
|
||||||
|
|
||||||
for fn in [x for x in dirnames if x != excl]:
|
dirs = [quotep(x) for x in dirs if x != excl]
|
||||||
dirs.append(quotep(fn))
|
|
||||||
|
if dk_sz and fsroot:
|
||||||
|
kdirs = []
|
||||||
|
for dn in dirs:
|
||||||
|
ap = os.path.join(fsroot, dn)
|
||||||
|
zs = self.gen_fk(2, self.args.dk_salt, ap, 0, 0)[:dk_sz]
|
||||||
|
kdirs.append(dn + "?k=" + zs)
|
||||||
|
dirs = kdirs
|
||||||
|
|
||||||
for x in vfs_virt:
|
for x in vfs_virt:
|
||||||
if x != excl:
|
if x != excl:
|
||||||
|
@ -3744,6 +3782,7 @@ class HttpCli(object):
|
||||||
self.out_headers.pop("X-Robots-Tag", None)
|
self.out_headers.pop("X-Robots-Tag", None)
|
||||||
|
|
||||||
is_dir = stat.S_ISDIR(st.st_mode)
|
is_dir = stat.S_ISDIR(st.st_mode)
|
||||||
|
is_dk = False
|
||||||
fk_pass = False
|
fk_pass = False
|
||||||
icur = None
|
icur = None
|
||||||
if is_dir and (e2t or e2d):
|
if is_dir and (e2t or e2d):
|
||||||
|
@ -3751,47 +3790,48 @@ class HttpCli(object):
|
||||||
if idx and hasattr(idx, "p_end"):
|
if idx and hasattr(idx, "p_end"):
|
||||||
icur = idx.get_cur(dbv.realpath)
|
icur = idx.get_cur(dbv.realpath)
|
||||||
|
|
||||||
if self.can_read:
|
th_fmt = self.uparam.get("th")
|
||||||
th_fmt = self.uparam.get("th")
|
if th_fmt is not None and (
|
||||||
if th_fmt is not None:
|
self.can_read or (self.can_get and vn.flags.get("dk"))
|
||||||
if is_dir:
|
):
|
||||||
vrem = vrem.rstrip("/")
|
if is_dir:
|
||||||
if icur and vrem:
|
vrem = vrem.rstrip("/")
|
||||||
q = "select fn from cv where rd=? and dn=?"
|
if icur and vrem:
|
||||||
crd, cdn = vrem.rsplit("/", 1) if "/" in vrem else ("", vrem)
|
q = "select fn from cv where rd=? and dn=?"
|
||||||
# no mojibake support:
|
crd, cdn = vrem.rsplit("/", 1) if "/" in vrem else ("", vrem)
|
||||||
try:
|
# no mojibake support:
|
||||||
cfn = icur.execute(q, (crd, cdn)).fetchone()
|
try:
|
||||||
if cfn:
|
cfn = icur.execute(q, (crd, cdn)).fetchone()
|
||||||
fn = cfn[0]
|
if cfn:
|
||||||
fp = os.path.join(abspath, fn)
|
fn = cfn[0]
|
||||||
if bos.path.exists(fp):
|
|
||||||
vrem = "{}/{}".format(vrem, fn).strip("/")
|
|
||||||
is_dir = False
|
|
||||||
except:
|
|
||||||
pass
|
|
||||||
else:
|
|
||||||
for fn in self.args.th_covers:
|
|
||||||
fp = os.path.join(abspath, fn)
|
fp = os.path.join(abspath, fn)
|
||||||
if bos.path.exists(fp):
|
if bos.path.exists(fp):
|
||||||
vrem = "{}/{}".format(vrem, fn).strip("/")
|
vrem = "{}/{}".format(vrem, fn).strip("/")
|
||||||
is_dir = False
|
is_dir = False
|
||||||
break
|
except:
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
for fn in self.args.th_covers:
|
||||||
|
fp = os.path.join(abspath, fn)
|
||||||
|
if bos.path.exists(fp):
|
||||||
|
vrem = "{}/{}".format(vrem, fn).strip("/")
|
||||||
|
is_dir = False
|
||||||
|
break
|
||||||
|
|
||||||
if is_dir:
|
if is_dir:
|
||||||
return self.tx_ico("a.folder")
|
return self.tx_ico("a.folder")
|
||||||
|
|
||||||
thp = None
|
thp = None
|
||||||
if self.thumbcli:
|
if self.thumbcli:
|
||||||
thp = self.thumbcli.get(dbv, vrem, int(st.st_mtime), th_fmt)
|
thp = self.thumbcli.get(dbv, vrem, int(st.st_mtime), th_fmt)
|
||||||
|
|
||||||
if thp:
|
if thp:
|
||||||
return self.tx_file(thp)
|
return self.tx_file(thp)
|
||||||
|
|
||||||
if th_fmt == "p":
|
if th_fmt == "p":
|
||||||
raise Pebkac(404)
|
raise Pebkac(404)
|
||||||
|
|
||||||
return self.tx_ico(rem)
|
return self.tx_ico(rem)
|
||||||
|
|
||||||
elif self.can_get and self.avn:
|
elif self.can_get and self.avn:
|
||||||
axs = self.avn.axs
|
axs = self.avn.axs
|
||||||
|
@ -3835,7 +3875,8 @@ class HttpCli(object):
|
||||||
)[: vn.flags["fk"]]
|
)[: vn.flags["fk"]]
|
||||||
got = self.uparam.get("k")
|
got = self.uparam.get("k")
|
||||||
if got != correct:
|
if got != correct:
|
||||||
self.log("wrong filekey, want {}, got {}".format(correct, got))
|
t = "wrong filekey, want %s, got %s\n vp: %s\n ap: %s"
|
||||||
|
self.log(t % (correct, got, self.req, abspath), 6)
|
||||||
return self.tx_404()
|
return self.tx_404()
|
||||||
|
|
||||||
if (
|
if (
|
||||||
|
@ -3851,8 +3892,11 @@ class HttpCli(object):
|
||||||
|
|
||||||
return self.tx_file(abspath)
|
return self.tx_file(abspath)
|
||||||
|
|
||||||
elif is_dir and not self.can_read and not self.can_write:
|
elif is_dir and not self.can_read:
|
||||||
return self.tx_404(True)
|
if self._use_dirkey(abspath):
|
||||||
|
is_dk = True
|
||||||
|
elif not self.can_write:
|
||||||
|
return self.tx_404(True)
|
||||||
|
|
||||||
srv_info = []
|
srv_info = []
|
||||||
|
|
||||||
|
@ -3874,7 +3918,7 @@ class HttpCli(object):
|
||||||
srv_infot = "</span> // <span>".join(srv_info)
|
srv_infot = "</span> // <span>".join(srv_info)
|
||||||
|
|
||||||
perms = []
|
perms = []
|
||||||
if self.can_read:
|
if self.can_read or is_dk:
|
||||||
perms.append("read")
|
perms.append("read")
|
||||||
if self.can_write:
|
if self.can_write:
|
||||||
perms.append("write")
|
perms.append("write")
|
||||||
|
@ -3999,7 +4043,7 @@ class HttpCli(object):
|
||||||
if not self.conn.hsrv.prism:
|
if not self.conn.hsrv.prism:
|
||||||
j2a["no_prism"] = True
|
j2a["no_prism"] = True
|
||||||
|
|
||||||
if not self.can_read:
|
if not self.can_read and not is_dk:
|
||||||
if is_ls:
|
if is_ls:
|
||||||
return self.tx_ls(ls_ret)
|
return self.tx_ls(ls_ret)
|
||||||
|
|
||||||
|
@ -4052,8 +4096,12 @@ class HttpCli(object):
|
||||||
):
|
):
|
||||||
ls_names = exclude_dotfiles(ls_names)
|
ls_names = exclude_dotfiles(ls_names)
|
||||||
|
|
||||||
|
add_dk = vf.get("dk")
|
||||||
add_fk = vf.get("fk")
|
add_fk = vf.get("fk")
|
||||||
fk_alg = 2 if "fka" in vf else 1
|
fk_alg = 2 if "fka" in vf else 1
|
||||||
|
if add_dk:
|
||||||
|
zs = self.gen_fk(2, self.args.dk_salt, abspath, 0, 0)[:add_dk]
|
||||||
|
ls_ret["dk"] = cgv["dk"] = zs
|
||||||
|
|
||||||
dirs = []
|
dirs = []
|
||||||
files = []
|
files = []
|
||||||
|
@ -4081,6 +4129,12 @@ class HttpCli(object):
|
||||||
href += "/"
|
href += "/"
|
||||||
if self.args.no_zip:
|
if self.args.no_zip:
|
||||||
margin = "DIR"
|
margin = "DIR"
|
||||||
|
elif add_dk:
|
||||||
|
zs = absreal(fspath)
|
||||||
|
margin = '<a href="%s?k=%s&zip" rel="nofollow">zip</a>' % (
|
||||||
|
quotep(href),
|
||||||
|
self.gen_fk(2, self.args.dk_salt, zs, 0, 0)[:add_dk],
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
margin = '<a href="%s?zip" rel="nofollow">zip</a>' % (quotep(href),)
|
margin = '<a href="%s?zip" rel="nofollow">zip</a>' % (quotep(href),)
|
||||||
elif fn in hist:
|
elif fn in hist:
|
||||||
|
@ -4121,6 +4175,11 @@ class HttpCli(object):
|
||||||
0 if ANYWIN else inf.st_ino,
|
0 if ANYWIN else inf.st_ino,
|
||||||
)[:add_fk],
|
)[:add_fk],
|
||||||
)
|
)
|
||||||
|
elif add_dk and is_dir:
|
||||||
|
href = "%s?k=%s" % (
|
||||||
|
quotep(href),
|
||||||
|
self.gen_fk(2, self.args.dk_salt, fspath, 0, 0)[:add_dk],
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
href = quotep(href)
|
href = quotep(href)
|
||||||
|
|
||||||
|
@ -4139,6 +4198,9 @@ class HttpCli(object):
|
||||||
files.append(item)
|
files.append(item)
|
||||||
item["rd"] = rem
|
item["rd"] = rem
|
||||||
|
|
||||||
|
if is_dk and not vf.get("dks"):
|
||||||
|
dirs = []
|
||||||
|
|
||||||
if (
|
if (
|
||||||
self.cookies.get("idxh") == "y"
|
self.cookies.get("idxh") == "y"
|
||||||
and "ls" not in self.uparam
|
and "ls" not in self.uparam
|
||||||
|
|
|
@ -392,8 +392,7 @@ window.baguetteBox = (function () {
|
||||||
}
|
}
|
||||||
|
|
||||||
function dlpic() {
|
function dlpic() {
|
||||||
var url = findfile()[3].href;
|
var url = addq(findfile()[3].href, 'cache');
|
||||||
url += (url.indexOf('?') < 0 ? '?' : '&') + 'cache';
|
|
||||||
dl_file(url);
|
dl_file(url);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -641,7 +640,7 @@ window.baguetteBox = (function () {
|
||||||
options.captions.call(currentGallery, imageElement) :
|
options.captions.call(currentGallery, imageElement) :
|
||||||
imageElement.getAttribute('data-caption') || imageElement.title;
|
imageElement.getAttribute('data-caption') || imageElement.title;
|
||||||
|
|
||||||
imageSrc += imageSrc.indexOf('?') < 0 ? '?cache' : '&cache';
|
imageSrc = addq(imageSrc, 'cache');
|
||||||
|
|
||||||
if (is_vid && index != currentIndex)
|
if (is_vid && index != currentIndex)
|
||||||
return; // no preload
|
return; // no preload
|
||||||
|
|
|
@ -1355,7 +1355,7 @@ function set_files_html(html) {
|
||||||
var ACtx = !IPHONE && (window.AudioContext || window.webkitAudioContext),
|
var ACtx = !IPHONE && (window.AudioContext || window.webkitAudioContext),
|
||||||
noih = /[?&]v\b/.exec('' + location),
|
noih = /[?&]v\b/.exec('' + location),
|
||||||
hash0 = location.hash,
|
hash0 = location.hash,
|
||||||
mp;
|
dk, mp;
|
||||||
|
|
||||||
|
|
||||||
var mpl = (function () {
|
var mpl = (function () {
|
||||||
|
@ -1475,7 +1475,7 @@ var mpl = (function () {
|
||||||
if (!c)
|
if (!c)
|
||||||
return url;
|
return url;
|
||||||
|
|
||||||
return url + (url.indexOf('?') < 0 ? '?' : '&') + 'th=' + (can_ogg ? 'opus' : 'caf');
|
return addq(url, 'th=') + (can_ogg ? 'opus' : 'caf');
|
||||||
};
|
};
|
||||||
|
|
||||||
r.pp = function () {
|
r.pp = function () {
|
||||||
|
@ -1526,7 +1526,7 @@ var mpl = (function () {
|
||||||
}
|
}
|
||||||
|
|
||||||
if (cover) {
|
if (cover) {
|
||||||
cover += (cover.indexOf('?') === -1 ? '?' : '&') + 'th=j';
|
cover = addq(cover, 'th=j');
|
||||||
tags.artwork = [{ "src": cover, type: "image/jpeg" }];
|
tags.artwork = [{ "src": cover, type: "image/jpeg" }];
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1616,8 +1616,8 @@ function MPlayer() {
|
||||||
link = tds[1].getElementsByTagName('a');
|
link = tds[1].getElementsByTagName('a');
|
||||||
|
|
||||||
link = link[link.length - 1];
|
link = link[link.length - 1];
|
||||||
var url = noq_href(link),
|
var url = link.getAttribute('href'),
|
||||||
m = re_audio.exec(url);
|
m = re_audio.exec(url.split('?')[0]);
|
||||||
|
|
||||||
if (m) {
|
if (m) {
|
||||||
var tid = link.getAttribute('id');
|
var tid = link.getAttribute('id');
|
||||||
|
@ -1711,8 +1711,7 @@ function MPlayer() {
|
||||||
}
|
}
|
||||||
|
|
||||||
r.preload = function (url, full) {
|
r.preload = function (url, full) {
|
||||||
url = mpl.acode(url);
|
url = addq(mpl.acode(url), 'cache=987');
|
||||||
url += (url.indexOf('?') < 0 ? '?' : '&') + 'cache=987';
|
|
||||||
mpl.preload_url = full ? url : null;
|
mpl.preload_url = full ? url : null;
|
||||||
var t0 = Date.now();
|
var t0 = Date.now();
|
||||||
|
|
||||||
|
@ -2302,8 +2301,7 @@ function dl_song() {
|
||||||
return toast.inf(10, L.f_dls);
|
return toast.inf(10, L.f_dls);
|
||||||
}
|
}
|
||||||
|
|
||||||
var url = mp.tracks[mp.au.tid];
|
var url = addq(mp.tracks[mp.au.tid], 'cache=987');
|
||||||
url += (url.indexOf('?') < 0 ? '?' : '&') + 'cache=987';
|
|
||||||
dl_file(url);
|
dl_file(url);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -2952,8 +2950,7 @@ function play(tid, is_ev, seek) {
|
||||||
widget.open();
|
widget.open();
|
||||||
}
|
}
|
||||||
|
|
||||||
var url = mpl.acode(mp.tracks[tid]);
|
var url = addq(mpl.acode(mp.tracks[tid]), 'cache=987');
|
||||||
url += (url.indexOf('?') < 0 ? '?' : '&') + 'cache=987';
|
|
||||||
|
|
||||||
if (mp.au.rsrc == url)
|
if (mp.au.rsrc == url)
|
||||||
mp.au.currentTime = 0;
|
mp.au.currentTime = 0;
|
||||||
|
@ -4100,7 +4097,7 @@ var showfile = (function () {
|
||||||
};
|
};
|
||||||
|
|
||||||
r.active = function () {
|
r.active = function () {
|
||||||
return location.search.indexOf('doc=') + 1;
|
return !!/[?&]doc=/.exec(location.search);
|
||||||
};
|
};
|
||||||
|
|
||||||
r.getlang = function (fn) {
|
r.getlang = function (fn) {
|
||||||
|
@ -4141,12 +4138,15 @@ var showfile = (function () {
|
||||||
};
|
};
|
||||||
|
|
||||||
r.show = function (url, no_push) {
|
r.show = function (url, no_push) {
|
||||||
var xhr = new XHR();
|
var xhr = new XHR(),
|
||||||
|
m = /[?&](k=[^&]+)/.exec(url);
|
||||||
|
|
||||||
|
url = url.split('?')[0] + (m ? '?' + m[1] : '');
|
||||||
xhr.url = url;
|
xhr.url = url;
|
||||||
xhr.fname = uricom_dec(url.split('/').pop());
|
xhr.fname = uricom_dec(url.split('/').pop());
|
||||||
xhr.no_push = no_push;
|
xhr.no_push = no_push;
|
||||||
xhr.ts = Date.now();
|
xhr.ts = Date.now();
|
||||||
xhr.open('GET', url.split('?')[0], true);
|
xhr.open('GET', url, true);
|
||||||
xhr.onprogress = loading;
|
xhr.onprogress = loading;
|
||||||
xhr.onload = xhr.onerror = load_cb;
|
xhr.onload = xhr.onerror = load_cb;
|
||||||
xhr.send();
|
xhr.send();
|
||||||
|
@ -4184,14 +4184,14 @@ var showfile = (function () {
|
||||||
var url = doc[0],
|
var url = doc[0],
|
||||||
lnh = doc[1],
|
lnh = doc[1],
|
||||||
txt = doc[2],
|
txt = doc[2],
|
||||||
name = url.split('/').pop(),
|
name = url.split('?')[0].split('/').pop(),
|
||||||
tname = uricom_dec(name),
|
tname = uricom_dec(name),
|
||||||
lang = r.getlang(name),
|
lang = r.getlang(name),
|
||||||
is_md = lang == 'md';
|
is_md = lang == 'md';
|
||||||
|
|
||||||
ebi('files').style.display = ebi('gfiles').style.display = ebi('lazy').style.display = ebi('pro').style.display = ebi('epi').style.display = 'none';
|
ebi('files').style.display = ebi('gfiles').style.display = ebi('lazy').style.display = ebi('pro').style.display = ebi('epi').style.display = 'none';
|
||||||
ebi('dldoc').setAttribute('href', url);
|
ebi('dldoc').setAttribute('href', url);
|
||||||
ebi('editdoc').setAttribute('href', url + (url.indexOf('?') > 0 ? '&' : '?') + 'edit');
|
ebi('editdoc').setAttribute('href', addq(url, 'edit'));
|
||||||
ebi('editdoc').style.display = (has(perms, 'write') && (is_md || has(perms, 'delete'))) ? '' : 'none';
|
ebi('editdoc').style.display = (has(perms, 'write') && (is_md || has(perms, 'delete'))) ? '' : 'none';
|
||||||
|
|
||||||
var wr = ebi('bdoc'),
|
var wr = ebi('bdoc'),
|
||||||
|
@ -4242,7 +4242,7 @@ var showfile = (function () {
|
||||||
wintitle(tname + ' \u2014 ');
|
wintitle(tname + ' \u2014 ');
|
||||||
document.documentElement.scrollTop = 0;
|
document.documentElement.scrollTop = 0;
|
||||||
var hfun = no_push ? hist_replace : hist_push;
|
var hfun = no_push ? hist_replace : hist_push;
|
||||||
hfun(get_evpath() + '?doc=' + url.split('/').pop());
|
hfun(get_evpath() + '?doc=' + name); // can't dk: server wants dk and js needs fk
|
||||||
|
|
||||||
qsr('#docname');
|
qsr('#docname');
|
||||||
el = mknod('span', 'docname');
|
el = mknod('span', 'docname');
|
||||||
|
@ -4441,7 +4441,7 @@ var thegrid = (function () {
|
||||||
if (!force)
|
if (!force)
|
||||||
return;
|
return;
|
||||||
|
|
||||||
hist_push(get_evpath());
|
hist_push(get_evpath() + (dk ? '?k=' + dk : ''));
|
||||||
wintitle();
|
wintitle();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -4666,10 +4666,10 @@ var thegrid = (function () {
|
||||||
ref = ao.getAttribute('id'),
|
ref = ao.getAttribute('id'),
|
||||||
isdir = href.endsWith('/'),
|
isdir = href.endsWith('/'),
|
||||||
ac = isdir ? ' class="dir"' : '',
|
ac = isdir ? ' class="dir"' : '',
|
||||||
ihref = href;
|
ihref = ohref;
|
||||||
|
|
||||||
if (r.thumbs) {
|
if (r.thumbs) {
|
||||||
ihref += '?th=' + (have_webp ? 'w' : 'j');
|
ihref = addq(ihref, 'th=') + (have_webp ? 'w' : 'j');
|
||||||
if (r.full)
|
if (r.full)
|
||||||
ihref += 'f'
|
ihref += 'f'
|
||||||
if (href == "#")
|
if (href == "#")
|
||||||
|
@ -4703,7 +4703,7 @@ var thegrid = (function () {
|
||||||
}
|
}
|
||||||
ihref = SR + '/.cpr/ico/' + ext;
|
ihref = SR + '/.cpr/ico/' + ext;
|
||||||
}
|
}
|
||||||
ihref += (ihref.indexOf('?') > 0 ? '&' : '?') + 'cache=i';
|
ihref = addq(ihref, 'cache=i');
|
||||||
|
|
||||||
html.push('<a href="' + ohref + '" ref="' + ref +
|
html.push('<a href="' + ohref + '" ref="' + ref +
|
||||||
'"' + ac + ' ttt="' + esc(name) + '"><img style="height:' +
|
'"' + ac + ' ttt="' + esc(name) + '"><img style="height:' +
|
||||||
|
@ -5722,12 +5722,15 @@ var treectl = (function () {
|
||||||
};
|
};
|
||||||
|
|
||||||
function get_tree(top, dst, rst) {
|
function get_tree(top, dst, rst) {
|
||||||
var xhr = new XHR();
|
var xhr = new XHR(),
|
||||||
|
m = /[?&](k=[^&]+)/.exec(dst),
|
||||||
|
k = m ? '&' + m[1] : dk ? '&k=' + dk : '';
|
||||||
|
|
||||||
xhr.top = top;
|
xhr.top = top;
|
||||||
xhr.dst = dst;
|
xhr.dst = dst;
|
||||||
xhr.rst = rst;
|
xhr.rst = rst;
|
||||||
xhr.ts = Date.now();
|
xhr.ts = Date.now();
|
||||||
xhr.open('GET', dst + '?tree=' + top + (r.dots ? '&dots' : ''), true);
|
xhr.open('GET', addq(dst, 'tree=') + top + (r.dots ? '&dots' : '') + k, true);
|
||||||
xhr.onload = xhr.onerror = recvtree;
|
xhr.onload = xhr.onerror = recvtree;
|
||||||
xhr.send();
|
xhr.send();
|
||||||
enspin('#tree');
|
enspin('#tree');
|
||||||
|
@ -5804,14 +5807,20 @@ var treectl = (function () {
|
||||||
|
|
||||||
function reload_tree() {
|
function reload_tree() {
|
||||||
var cdir = r.nextdir || get_vpath(),
|
var cdir = r.nextdir || get_vpath(),
|
||||||
|
cevp = get_evpath(),
|
||||||
links = QSA('#treeul a+a'),
|
links = QSA('#treeul a+a'),
|
||||||
nowrap = QS('#tree.nowrap') && QS('#hovertree.on'),
|
nowrap = QS('#tree.nowrap') && QS('#hovertree.on'),
|
||||||
act = null;
|
act = null;
|
||||||
|
|
||||||
for (var a = 0, aa = links.length; a < aa; a++) {
|
for (var a = 0, aa = links.length; a < aa; a++) {
|
||||||
var href = uricom_dec(links[a].getAttribute('href')),
|
var qhref = links[a].getAttribute('href'),
|
||||||
|
ehref = qhref.split('?')[0],
|
||||||
|
href = uricom_dec(ehref),
|
||||||
cl = '';
|
cl = '';
|
||||||
|
|
||||||
|
if (dk && ehref == cevp && !/[?&]k=/.exec(qhref))
|
||||||
|
links[a].setAttribute('href', addq(qhref, 'k=') + dk);
|
||||||
|
|
||||||
if (href == cdir) {
|
if (href == cdir) {
|
||||||
act = links[a];
|
act = links[a];
|
||||||
cl = 'hl';
|
cl = 'hl';
|
||||||
|
@ -5904,12 +5913,15 @@ var treectl = (function () {
|
||||||
}
|
}
|
||||||
|
|
||||||
r.reqls = function (url, hpush, back) {
|
r.reqls = function (url, hpush, back) {
|
||||||
var xhr = new XHR();
|
var xhr = new XHR(),
|
||||||
|
m = /[?&](k=[^&]+)/.exec(url),
|
||||||
|
k = m ? '&' + m[1] : dk ? '&k=' + dk : '';
|
||||||
|
|
||||||
xhr.top = url.split('?')[0];
|
xhr.top = url.split('?')[0];
|
||||||
xhr.back = back
|
xhr.back = back
|
||||||
xhr.hpush = hpush;
|
xhr.hpush = hpush;
|
||||||
xhr.ts = Date.now();
|
xhr.ts = Date.now();
|
||||||
xhr.open('GET', xhr.top + '?ls' + (r.dots ? '&dots' : ''), true);
|
xhr.open('GET', xhr.top + '?ls' + (r.dots ? '&dots' : '') + k, true);
|
||||||
xhr.onload = xhr.onerror = recvls;
|
xhr.onload = xhr.onerror = recvls;
|
||||||
xhr.send();
|
xhr.send();
|
||||||
|
|
||||||
|
@ -5967,6 +5979,7 @@ var treectl = (function () {
|
||||||
|
|
||||||
read_dsort(res.dsort);
|
read_dsort(res.dsort);
|
||||||
dfull = res.dfull;
|
dfull = res.dfull;
|
||||||
|
dk = res.dk;
|
||||||
|
|
||||||
srvinf = res.srvinf;
|
srvinf = res.srvinf;
|
||||||
try {
|
try {
|
||||||
|
@ -5975,14 +5988,14 @@ var treectl = (function () {
|
||||||
catch (ex) { }
|
catch (ex) { }
|
||||||
|
|
||||||
if (this.hpush && !showfile.active())
|
if (this.hpush && !showfile.active())
|
||||||
hist_push(this.top);
|
hist_push(this.top + (dk ? '?k=' + dk : ''));
|
||||||
|
|
||||||
if (!this.back) {
|
if (!this.back) {
|
||||||
var dirs = [];
|
var dirs = [];
|
||||||
for (var a = 0; a < res.dirs.length; a++)
|
for (var a = 0; a < res.dirs.length; a++)
|
||||||
dirs.push(res.dirs[a].href.split('/')[0].split('?')[0]);
|
dirs.push(res.dirs[a].href.split('/')[0].split('?')[0]);
|
||||||
|
|
||||||
rendertree({ "a": dirs }, this.ts, ".", get_evpath());
|
rendertree({ "a": dirs }, this.ts, ".", get_evpath() + (dk ? '?k=' + dk : ''));
|
||||||
}
|
}
|
||||||
|
|
||||||
r.gentab(this.top, res);
|
r.gentab(this.top, res);
|
||||||
|
@ -6083,7 +6096,7 @@ var treectl = (function () {
|
||||||
if (lang) {
|
if (lang) {
|
||||||
showfile.files.push({ 'id': id, 'name': fname });
|
showfile.files.push({ 'id': id, 'name': fname });
|
||||||
if (lang == 'md')
|
if (lang == 'md')
|
||||||
tn.href += tn.href.indexOf('?') < 0 ? '?v' : '&v';
|
tn.href = addq(tn.href, 'v');
|
||||||
}
|
}
|
||||||
|
|
||||||
if (tn.lead == '-')
|
if (tn.lead == '-')
|
||||||
|
@ -6159,7 +6172,7 @@ var treectl = (function () {
|
||||||
url = url.href;
|
url = url.href;
|
||||||
var mt = m[0] == 'a' ? 'audio' : /\.(webm|mkv)($|\?)/i.exec(url) ? 'video' : 'image'
|
var mt = m[0] == 'a' ? 'audio' : /\.(webm|mkv)($|\?)/i.exec(url) ? 'video' : 'image'
|
||||||
if (mt == 'image') {
|
if (mt == 'image') {
|
||||||
url += url.indexOf('?') < 0 ? '?cache' : '&cache';
|
url = addq(url, 'cache');
|
||||||
console.log(url);
|
console.log(url);
|
||||||
new Image().src = url;
|
new Image().src = url;
|
||||||
}
|
}
|
||||||
|
@ -6262,7 +6275,9 @@ var treectl = (function () {
|
||||||
keys.sort(function (a, b) { return a.localeCompare(b); });
|
keys.sort(function (a, b) { return a.localeCompare(b); });
|
||||||
for (var a = 0; a < keys.length; a++) {
|
for (var a = 0; a < keys.length; a++) {
|
||||||
var kk = keys[a],
|
var kk = keys[a],
|
||||||
ks = kk.slice(1),
|
m = /(\?k=[^\n]+)/.exec(kk),
|
||||||
|
kdk = m ? m[1] : '',
|
||||||
|
ks = kk.replace(kdk, '').slice(1),
|
||||||
ded = ks.endsWith('\n'),
|
ded = ks.endsWith('\n'),
|
||||||
k = uricom_sdec(ded ? ks.replace(/\n$/, '') : ks),
|
k = uricom_sdec(ded ? ks.replace(/\n$/, '') : ks),
|
||||||
hek = esc(k[0]),
|
hek = esc(k[0]),
|
||||||
|
@ -6270,7 +6285,7 @@ var treectl = (function () {
|
||||||
url = '/' + (top ? top + uek : uek) + '/',
|
url = '/' + (top ? top + uek : uek) + '/',
|
||||||
sym = res[kk] ? '-' : '+',
|
sym = res[kk] ? '-' : '+',
|
||||||
link = '<a href="#">' + sym + '</a><a href="' +
|
link = '<a href="#">' + sym + '</a><a href="' +
|
||||||
url + '">' + hek + '</a>';
|
url + kdk + '">' + hek + '</a>';
|
||||||
|
|
||||||
if (res[kk]) {
|
if (res[kk]) {
|
||||||
var subtree = parsetree(res[kk], url.slice(1));
|
var subtree = parsetree(res[kk], url.slice(1));
|
||||||
|
@ -6311,16 +6326,24 @@ var treectl = (function () {
|
||||||
if (!e.state)
|
if (!e.state)
|
||||||
return;
|
return;
|
||||||
|
|
||||||
var url = new URL(e.state, "https://" + document.location.host);
|
var url = new URL(e.state, "https://" + location.host),
|
||||||
var hbase = url.pathname;
|
req = url.pathname,
|
||||||
var cbase = document.location.pathname;
|
hbase = req,
|
||||||
if (url.search.indexOf('doc=') + 1 && hbase == cbase)
|
cbase = location.pathname,
|
||||||
|
mdoc = /[?&]doc=/.exec('' + url),
|
||||||
|
mdk = /[?&](k=[^&]+)/.exec('' + url);
|
||||||
|
|
||||||
|
if (mdoc && hbase == cbase)
|
||||||
return showfile.show(hbase + showfile.sname(url.search), true);
|
return showfile.show(hbase + showfile.sname(url.search), true);
|
||||||
|
|
||||||
r.goto(url.pathname, false, true);
|
if (mdk)
|
||||||
|
req += '?' + mdk[1];
|
||||||
|
|
||||||
|
r.goto(req, false, true);
|
||||||
};
|
};
|
||||||
|
|
||||||
hist_replace(get_evpath() + location.hash);
|
var evp = get_evpath() + (dk ? '?k=' + dk : '');
|
||||||
|
hist_replace(evp + location.hash);
|
||||||
r.onscroll = onscroll;
|
r.onscroll = onscroll;
|
||||||
return r;
|
return r;
|
||||||
})();
|
})();
|
||||||
|
@ -6945,11 +6968,11 @@ var arcfmt = (function () {
|
||||||
if (!/^(zip|tar|pax|tgz|txz)$/.exec(txt))
|
if (!/^(zip|tar|pax|tgz|txz)$/.exec(txt))
|
||||||
continue;
|
continue;
|
||||||
|
|
||||||
var ofs = href.lastIndexOf('?');
|
var m = /(.*[?&])(tar|zip)([^&]*)(.*)$/.exec(href);
|
||||||
if (ofs < 0)
|
if (!m)
|
||||||
throw new Error('missing arg in url');
|
throw new Error('missing arg in url');
|
||||||
|
|
||||||
o.setAttribute("href", href.slice(0, ofs + 1) + arg);
|
o.setAttribute("href", m[1] + arg + m[4]);
|
||||||
o.textContent = fmt.split('_')[0];
|
o.textContent = fmt.split('_')[0];
|
||||||
}
|
}
|
||||||
ebi('selzip').textContent = fmt.split('_')[0];
|
ebi('selzip').textContent = fmt.split('_')[0];
|
||||||
|
@ -7012,13 +7035,20 @@ var msel = (function () {
|
||||||
vbase = get_evpath();
|
vbase = get_evpath();
|
||||||
|
|
||||||
for (var a = 0, aa = links.length; a < aa; a++) {
|
for (var a = 0, aa = links.length; a < aa; a++) {
|
||||||
var href = noq_href(links[a]).replace(/\/$/, ""),
|
var qhref = links[a].getAttribute('href'),
|
||||||
|
href = qhref.split('?')[0].replace(/\/$/, ""),
|
||||||
item = {};
|
item = {};
|
||||||
|
|
||||||
item.id = links[a].getAttribute('id');
|
item.id = links[a].getAttribute('id');
|
||||||
item.sel = clgot(links[a].closest('tr'), 'sel');
|
item.sel = clgot(links[a].closest('tr'), 'sel');
|
||||||
item.vp = href.indexOf('/') !== -1 ? href : vbase + href;
|
item.vp = href.indexOf('/') !== -1 ? href : vbase + href;
|
||||||
|
|
||||||
|
if (dk) {
|
||||||
|
var m = /[?&](k=[^&]+)/.exec(qhref);
|
||||||
|
item.q = m ? '?' + m[1] : '';
|
||||||
|
}
|
||||||
|
else item.q = '';
|
||||||
|
|
||||||
r.all.push(item);
|
r.all.push(item);
|
||||||
if (item.sel)
|
if (item.sel)
|
||||||
r.sel.push(item);
|
r.sel.push(item);
|
||||||
|
@ -7135,6 +7165,9 @@ var msel = (function () {
|
||||||
frm = mknod('form'),
|
frm = mknod('form'),
|
||||||
txt = [];
|
txt = [];
|
||||||
|
|
||||||
|
if (dk)
|
||||||
|
arg += '&k=' + dk;
|
||||||
|
|
||||||
for (var a = 0; a < sel.length; a++)
|
for (var a = 0; a < sel.length; a++)
|
||||||
txt.push(vsplit(sel[a].vp)[1]);
|
txt.push(vsplit(sel[a].vp)[1]);
|
||||||
|
|
||||||
|
@ -7159,7 +7192,7 @@ var msel = (function () {
|
||||||
ev(e);
|
ev(e);
|
||||||
var sel = r.getsel();
|
var sel = r.getsel();
|
||||||
for (var a = 0; a < sel.length; a++)
|
for (var a = 0; a < sel.length; a++)
|
||||||
dl_file(sel[a].vp);
|
dl_file(sel[a].vp + sel[a].q);
|
||||||
};
|
};
|
||||||
r.render = function () {
|
r.render = function () {
|
||||||
var tds = QSA('#files tbody td+td+td'),
|
var tds = QSA('#files tbody td+td+td'),
|
||||||
|
@ -7635,7 +7668,7 @@ var unpost = (function () {
|
||||||
|
|
||||||
function linklist() {
|
function linklist() {
|
||||||
var ret = [],
|
var ret = [],
|
||||||
base = document.location.origin.replace(/\/$/, '');
|
base = location.origin.replace(/\/$/, '');
|
||||||
|
|
||||||
for (var a = 0; a < r.files.length; a++)
|
for (var a = 0; a < r.files.length; a++)
|
||||||
ret.push(base + r.files[a].vp);
|
ret.push(base + r.files[a].vp);
|
||||||
|
@ -7812,8 +7845,9 @@ ebi('files').onclick = ebi('docul').onclick = function (e) {
|
||||||
tgt = e.target.closest('a[hl]');
|
tgt = e.target.closest('a[hl]');
|
||||||
if (tgt) {
|
if (tgt) {
|
||||||
var a = ebi(tgt.getAttribute('hl')),
|
var a = ebi(tgt.getAttribute('hl')),
|
||||||
|
href = a.getAttribute('href'),
|
||||||
fun = function () {
|
fun = function () {
|
||||||
showfile.show(noq_href(a), tgt.getAttribute('lang'));
|
showfile.show(href, tgt.getAttribute('lang'));
|
||||||
},
|
},
|
||||||
szs = ft2dict(a.closest('tr'))[0].sz,
|
szs = ft2dict(a.closest('tr'))[0].sz,
|
||||||
sz = parseInt(szs.replace(/[, ]/g, ''));
|
sz = parseInt(szs.replace(/[, ]/g, ''));
|
||||||
|
|
|
@ -681,6 +681,11 @@ function vjoin(p1, p2) {
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
function addq(url, q) {
|
||||||
|
return url + (url.indexOf('?') < 0 ? '?' : '&') + (q === undefined ? '' : q);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
function uricom_enc(txt, do_fb_enc) {
|
function uricom_enc(txt, do_fb_enc) {
|
||||||
try {
|
try {
|
||||||
return encodeURIComponent(txt);
|
return encodeURIComponent(txt);
|
||||||
|
@ -1808,7 +1813,7 @@ function md_thumbs(md) {
|
||||||
float = has(flags, 'l') ? 'left' : has(flags, 'r') ? 'right' : '';
|
float = has(flags, 'l') ? 'left' : has(flags, 'r') ? 'right' : '';
|
||||||
|
|
||||||
if (!/[?&]cache/.exec(url))
|
if (!/[?&]cache/.exec(url))
|
||||||
url += (url.indexOf('?') < 0 ? '?' : '&') + 'cache=i';
|
url = addq(url, 'cache=i');
|
||||||
|
|
||||||
md[a] = '<a href="' + url + '" class="mdth mdth' + float.slice(0, 1) + '"><img src="' + url + '&th=w" alt="' + alt + '" /></a>' + md[a].slice(o2 + 1);
|
md[a] = '<a href="' + url + '" class="mdth mdth' + float.slice(0, 1) + '"><img src="' + url + '&th=w" alt="' + alt + '" /></a>' + md[a].slice(o2 + 1);
|
||||||
}
|
}
|
||||||
|
|
|
@ -4,6 +4,9 @@ from __future__ import print_function, unicode_literals
|
||||||
|
|
||||||
import io
|
import io
|
||||||
import os
|
import os
|
||||||
|
import time
|
||||||
|
import json
|
||||||
|
import pprint
|
||||||
import shutil
|
import shutil
|
||||||
import tarfile
|
import tarfile
|
||||||
import tempfile
|
import tempfile
|
||||||
|
@ -66,7 +69,9 @@ class TestHttpCli(unittest.TestCase):
|
||||||
|
|
||||||
self.assertEqual(self.curl("?tar", "x")[1][:17], "\nJ2EOT")
|
self.assertEqual(self.curl("?tar", "x")[1][:17], "\nJ2EOT")
|
||||||
|
|
||||||
# search
|
##
|
||||||
|
## search
|
||||||
|
|
||||||
up2k = Up2k(self)
|
up2k = Up2k(self)
|
||||||
u2idx = U2idx(self)
|
u2idx = U2idx(self)
|
||||||
allvols = list(self.asrv.vfs.all_vols.values())
|
allvols = list(self.asrv.vfs.all_vols.values())
|
||||||
|
@ -91,15 +96,55 @@ class TestHttpCli(unittest.TestCase):
|
||||||
xe = "a/da/f4 a/f3 f0 t/f1"
|
xe = "a/da/f4 a/f3 f0 t/f1"
|
||||||
self.assertEqual(x, xe)
|
self.assertEqual(x, xe)
|
||||||
|
|
||||||
|
##
|
||||||
|
## dirkeys
|
||||||
|
|
||||||
|
os.mkdir("v")
|
||||||
|
with open("v/f1.txt", "wb") as f:
|
||||||
|
f.write(b"a")
|
||||||
|
os.rename("a", "v/a")
|
||||||
|
os.rename(".b", "v/.b")
|
||||||
|
|
||||||
|
vcfg = [
|
||||||
|
".::r.,u1:g,u2:c,dk",
|
||||||
|
"v/a:v/a:r.,u1:g,u2:c,dk",
|
||||||
|
"v/.b:v/.b:r.,u1:g,u2:c,dk"
|
||||||
|
]
|
||||||
|
self.args = Cfg(v=vcfg, a=["u1:u1", "u2:u2"])
|
||||||
|
self.asrv = AuthSrv(self.args, self.log)
|
||||||
|
zj = json.loads(self.curl("?ls", "u1")[1])
|
||||||
|
url = "?k=" + zj["dk"]
|
||||||
|
# should descend into folders, but not other volumes:
|
||||||
|
self.assertEqual(self.tardir(url, "u2"), "f0 t/f1 v/f1.txt")
|
||||||
|
|
||||||
|
zj = json.loads(self.curl("v?ls", "u1")[1])
|
||||||
|
url = "v?k=" + zj["dk"]
|
||||||
|
self.assertEqual(self.tarsel(url, "u2", ["f1.txt", "a", ".b"]), "f1.txt")
|
||||||
|
|
||||||
def tardir(self, url, uname):
|
def tardir(self, url, uname):
|
||||||
h, b = self.curl("/" + url + "?tar", uname, True)
|
top = url.split("?")[0]
|
||||||
|
top = ("top" if not top else top.lstrip(".").split("/")[0]) + "/"
|
||||||
|
url += ("&" if "?" in url else "?") + "tar"
|
||||||
|
h, b = self.curl(url, uname, True)
|
||||||
tar = tarfile.open(fileobj=io.BytesIO(b), mode="r|").getnames()
|
tar = tarfile.open(fileobj=io.BytesIO(b), mode="r|").getnames()
|
||||||
top = ("top" if not url else url.lstrip(".").split("/")[0]) + "/"
|
if len(tar) != len([x for x in tar if x.startswith(top)]):
|
||||||
assert len(tar) == len([x for x in tar if x.startswith(top)])
|
raise Exception("bad-prefix:", tar)
|
||||||
return " ".join([x[len(top):] for x in tar])
|
return " ".join([x[len(top):] for x in tar])
|
||||||
|
|
||||||
def curl(self, url, uname, binary=False):
|
def tarsel(self, url, uname, sel):
|
||||||
conn = tu.VHttpConn(self.args, self.asrv, self.log, hdr(url, uname))
|
url += ("&" if "?" in url else "?") + "tar"
|
||||||
|
zs = '--XD\r\nContent-Disposition: form-data; name="act"\r\n\r\nzip\r\n--XD\r\nContent-Disposition: form-data; name="files"\r\n\r\n'
|
||||||
|
zs += "\r\n".join(sel) + '\r\n--XD--\r\n'
|
||||||
|
zb = zs.encode("utf-8")
|
||||||
|
hdr = "POST /%s HTTP/1.1\r\nPW: %s\r\nConnection: close\r\nContent-Type: multipart/form-data; boundary=XD\r\nContent-Length: %d\r\n\r\n"
|
||||||
|
req = (hdr % (url, uname, len(zb))).encode("utf-8") + zb
|
||||||
|
h, b = self.curl("/" + url, uname, True, req)
|
||||||
|
tar = tarfile.open(fileobj=io.BytesIO(b), mode="r|").getnames()
|
||||||
|
return " ".join(tar)
|
||||||
|
|
||||||
|
def curl(self, url, uname, binary=False, req=b""):
|
||||||
|
req = req or hdr(url, uname)
|
||||||
|
conn = tu.VHttpConn(self.args, self.asrv, self.log, req)
|
||||||
HttpCli(conn).run()
|
HttpCli(conn).run()
|
||||||
if binary:
|
if binary:
|
||||||
h, b = conn.s._reply.split(b"\r\n\r\n", 1)
|
h, b = conn.s._reply.split(b"\r\n\r\n", 1)
|
||||||
|
|
|
@ -146,6 +146,7 @@ class Cfg(Namespace):
|
||||||
E=E,
|
E=E,
|
||||||
dbd="wal",
|
dbd="wal",
|
||||||
fk_salt="a" * 16,
|
fk_salt="a" * 16,
|
||||||
|
dk_salt="b" * 16,
|
||||||
lang="eng",
|
lang="eng",
|
||||||
log_badpwd=1,
|
log_badpwd=1,
|
||||||
logout=573,
|
logout=573,
|
||||||
|
@ -248,4 +249,4 @@ class VHttpConn(object):
|
||||||
self.thumbcli = None
|
self.thumbcli = None
|
||||||
self.u2fh = FHC()
|
self.u2fh = FHC()
|
||||||
|
|
||||||
self.get_u2idx = self.hsrv.get_u2idx
|
self.get_u2idx = self.hsrv.get_u2idx
|
||||||
|
|
Loading…
Reference in a new issue