mirror of
https://github.com/9001/copyparty.git
synced 2025-08-17 09:02:15 -06:00
more optimizations,
* 5% less cpu load from clients fetching thumbnails * and slight improvement to up2k stuff
This commit is contained in:
parent
bb715704b7
commit
7d64879ba8
|
@ -19,6 +19,7 @@ if True:
|
||||||
from typing import Any, Callable
|
from typing import Any, Callable
|
||||||
|
|
||||||
PY2 = sys.version_info < (3,)
|
PY2 = sys.version_info < (3,)
|
||||||
|
PY36 = sys.version_info > (3, 6)
|
||||||
if not PY2:
|
if not PY2:
|
||||||
unicode: Callable[[Any], str] = str
|
unicode: Callable[[Any], str] = str
|
||||||
else:
|
else:
|
||||||
|
|
|
@ -27,6 +27,7 @@ from .__init__ import (
|
||||||
EXE,
|
EXE,
|
||||||
MACOS,
|
MACOS,
|
||||||
PY2,
|
PY2,
|
||||||
|
PY36,
|
||||||
VT100,
|
VT100,
|
||||||
WINDOWS,
|
WINDOWS,
|
||||||
E,
|
E,
|
||||||
|
@ -54,6 +55,7 @@ from .util import (
|
||||||
Daemon,
|
Daemon,
|
||||||
align_tab,
|
align_tab,
|
||||||
ansi_re,
|
ansi_re,
|
||||||
|
b64enc,
|
||||||
dedent,
|
dedent,
|
||||||
min_ex,
|
min_ex,
|
||||||
pybin,
|
pybin,
|
||||||
|
@ -267,7 +269,7 @@ def get_fk_salt() -> str:
|
||||||
with open(fp, "rb") as f:
|
with open(fp, "rb") as f:
|
||||||
ret = f.read().strip()
|
ret = f.read().strip()
|
||||||
except:
|
except:
|
||||||
ret = base64.b64encode(os.urandom(18))
|
ret = b64enc(os.urandom(18))
|
||||||
with open(fp, "wb") as f:
|
with open(fp, "wb") as f:
|
||||||
f.write(ret + b"\n")
|
f.write(ret + b"\n")
|
||||||
|
|
||||||
|
@ -280,7 +282,7 @@ def get_dk_salt() -> str:
|
||||||
with open(fp, "rb") as f:
|
with open(fp, "rb") as f:
|
||||||
ret = f.read().strip()
|
ret = f.read().strip()
|
||||||
except:
|
except:
|
||||||
ret = base64.b64encode(os.urandom(30))
|
ret = b64enc(os.urandom(30))
|
||||||
with open(fp, "wb") as f:
|
with open(fp, "wb") as f:
|
||||||
f.write(ret + b"\n")
|
f.write(ret + b"\n")
|
||||||
|
|
||||||
|
@ -293,7 +295,7 @@ def get_ah_salt() -> str:
|
||||||
with open(fp, "rb") as f:
|
with open(fp, "rb") as f:
|
||||||
ret = f.read().strip()
|
ret = f.read().strip()
|
||||||
except:
|
except:
|
||||||
ret = base64.b64encode(os.urandom(18))
|
ret = b64enc(os.urandom(18))
|
||||||
with open(fp, "wb") as f:
|
with open(fp, "wb") as f:
|
||||||
f.write(ret + b"\n")
|
f.write(ret + b"\n")
|
||||||
|
|
||||||
|
@ -1759,7 +1761,7 @@ def main(argv: Optional[list[str]] = None, rsrc: Optional[str] = None) -> None:
|
||||||
print("error: python2 cannot --smb")
|
print("error: python2 cannot --smb")
|
||||||
return
|
return
|
||||||
|
|
||||||
if sys.version_info < (3, 6):
|
if not PY36:
|
||||||
al.no_scandir = True
|
al.no_scandir = True
|
||||||
|
|
||||||
if not hasattr(os, "sendfile"):
|
if not hasattr(os, "sendfile"):
|
||||||
|
|
|
@ -855,6 +855,7 @@ class AuthSrv(object):
|
||||||
self.idp_accs: dict[str, list[str]] = {} # username->groupnames
|
self.idp_accs: dict[str, list[str]] = {} # username->groupnames
|
||||||
self.idp_usr_gh: dict[str, str] = {} # username->group-header-value (cache)
|
self.idp_usr_gh: dict[str, str] = {} # username->group-header-value (cache)
|
||||||
|
|
||||||
|
self.hid_cache: dict[str, str] = {}
|
||||||
self.mutex = threading.Lock()
|
self.mutex = threading.Lock()
|
||||||
self.reload()
|
self.reload()
|
||||||
|
|
||||||
|
@ -1550,8 +1551,8 @@ class AuthSrv(object):
|
||||||
if s_pw:
|
if s_pw:
|
||||||
# gotta reuse the "account" for all shares with this pw,
|
# gotta reuse the "account" for all shares with this pw,
|
||||||
# so do a light scramble as this appears in the web-ui
|
# so do a light scramble as this appears in the web-ui
|
||||||
zs = ub64enc(hashlib.sha512(s_pw.encode("utf-8")).digest())[4:16]
|
zb = hashlib.sha512(s_pw.encode("utf-8")).digest()
|
||||||
sun = "s_%s" % (zs.decode("utf-8"),)
|
sun = "s_%s" % (ub64enc(zb)[4:16].decode("ascii"),)
|
||||||
acct[sun] = s_pw
|
acct[sun] = s_pw
|
||||||
else:
|
else:
|
||||||
sun = "*"
|
sun = "*"
|
||||||
|
@ -1656,8 +1657,12 @@ class AuthSrv(object):
|
||||||
promote = []
|
promote = []
|
||||||
demote = []
|
demote = []
|
||||||
for vol in vfs.all_vols.values():
|
for vol in vfs.all_vols.values():
|
||||||
zb = hashlib.sha512(afsenc(vol.realpath)).digest()
|
hid = self.hid_cache.get(vol.realpath)
|
||||||
hid = base64.b32encode(zb).decode("ascii").lower()
|
if not hid:
|
||||||
|
zb = hashlib.sha512(afsenc(vol.realpath)).digest()
|
||||||
|
hid = base64.b32encode(zb).decode("ascii").lower()
|
||||||
|
self.hid_cache[vol.realpath] = hid
|
||||||
|
|
||||||
vflag = vol.flags.get("hist")
|
vflag = vol.flags.get("hist")
|
||||||
if vflag == "-":
|
if vflag == "-":
|
||||||
pass
|
pass
|
||||||
|
@ -2286,7 +2291,7 @@ class AuthSrv(object):
|
||||||
q = "insert into us values (?,?,?)"
|
q = "insert into us values (?,?,?)"
|
||||||
for uname in self.acct:
|
for uname in self.acct:
|
||||||
if uname not in ases:
|
if uname not in ases:
|
||||||
sid = ub64enc(os.urandom(blen)).decode("utf-8")
|
sid = ub64enc(os.urandom(blen)).decode("ascii")
|
||||||
cur.execute(q, (uname, sid, int(time.time())))
|
cur.execute(q, (uname, sid, int(time.time())))
|
||||||
ases[uname] = sid
|
ases[uname] = sid
|
||||||
n.append(uname)
|
n.append(uname)
|
||||||
|
|
|
@ -2,7 +2,6 @@
|
||||||
from __future__ import print_function, unicode_literals
|
from __future__ import print_function, unicode_literals
|
||||||
|
|
||||||
import argparse # typechk
|
import argparse # typechk
|
||||||
import base64
|
|
||||||
import calendar
|
import calendar
|
||||||
import copy
|
import copy
|
||||||
import errno
|
import errno
|
||||||
|
@ -58,6 +57,7 @@ from .util import (
|
||||||
absreal,
|
absreal,
|
||||||
alltrace,
|
alltrace,
|
||||||
atomic_move,
|
atomic_move,
|
||||||
|
b64dec,
|
||||||
exclude_dotfiles,
|
exclude_dotfiles,
|
||||||
formatdate,
|
formatdate,
|
||||||
fsenc,
|
fsenc,
|
||||||
|
@ -503,7 +503,7 @@ class HttpCli(object):
|
||||||
):
|
):
|
||||||
try:
|
try:
|
||||||
zb = zso.split(" ")[1].encode("ascii")
|
zb = zso.split(" ")[1].encode("ascii")
|
||||||
zs = base64.b64decode(zb).decode("utf-8")
|
zs = b64dec(zb).decode("utf-8")
|
||||||
# try "pwd", "x:pwd", "pwd:x"
|
# try "pwd", "x:pwd", "pwd:x"
|
||||||
for bauth in [zs] + zs.split(":", 1)[::-1]:
|
for bauth in [zs] + zs.split(":", 1)[::-1]:
|
||||||
if bauth in self.asrv.sesa:
|
if bauth in self.asrv.sesa:
|
||||||
|
@ -2506,7 +2506,7 @@ class HttpCli(object):
|
||||||
logpwd = ""
|
logpwd = ""
|
||||||
elif self.args.log_badpwd == 2:
|
elif self.args.log_badpwd == 2:
|
||||||
zb = hashlib.sha512(pwd.encode("utf-8", "replace")).digest()
|
zb = hashlib.sha512(pwd.encode("utf-8", "replace")).digest()
|
||||||
logpwd = "%" + base64.b64encode(zb[:12]).decode("utf-8")
|
logpwd = "%" + ub64enc(zb[:12]).decode("ascii")
|
||||||
|
|
||||||
if pwd != "x":
|
if pwd != "x":
|
||||||
self.log("invalid password: {}".format(logpwd), 3)
|
self.log("invalid password: {}".format(logpwd), 3)
|
||||||
|
@ -5364,7 +5364,7 @@ class HttpCli(object):
|
||||||
fmt = vn.flags.get("og_th", "j")
|
fmt = vn.flags.get("og_th", "j")
|
||||||
th_base = ujoin(url_base, quotep(thumb))
|
th_base = ujoin(url_base, quotep(thumb))
|
||||||
query = "th=%s&cache" % (fmt,)
|
query = "th=%s&cache" % (fmt,)
|
||||||
query = ub64enc(query.encode("utf-8")).decode("utf-8")
|
query = ub64enc(query.encode("utf-8")).decode("ascii")
|
||||||
# discord looks at file extension, not content-type...
|
# discord looks at file extension, not content-type...
|
||||||
query += "/th.jpg" if "j" in fmt else "/th.webp"
|
query += "/th.jpg" if "j" in fmt else "/th.webp"
|
||||||
j2a["og_thumb"] = "%s/.uqe/%s" % (th_base, query)
|
j2a["og_thumb"] = "%s/.uqe/%s" % (th_base, query)
|
||||||
|
@ -5373,7 +5373,7 @@ class HttpCli(object):
|
||||||
j2a["og_file"] = file
|
j2a["og_file"] = file
|
||||||
if og_fn:
|
if og_fn:
|
||||||
og_fn_q = quotep(og_fn)
|
og_fn_q = quotep(og_fn)
|
||||||
query = ub64enc(b"raw").decode("utf-8")
|
query = ub64enc(b"raw").decode("ascii")
|
||||||
query += "/%s" % (og_fn_q,)
|
query += "/%s" % (og_fn_q,)
|
||||||
j2a["og_url"] = ujoin(url_base, og_fn_q)
|
j2a["og_url"] = ujoin(url_base, og_fn_q)
|
||||||
j2a["og_raw"] = j2a["og_url"] + "/.uqe/" + query
|
j2a["og_raw"] = j2a["og_url"] + "/.uqe/" + query
|
||||||
|
|
|
@ -1,7 +1,6 @@
|
||||||
# coding: utf-8
|
# coding: utf-8
|
||||||
from __future__ import print_function, unicode_literals
|
from __future__ import print_function, unicode_literals
|
||||||
|
|
||||||
import base64
|
|
||||||
import math
|
import math
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
|
@ -75,6 +74,7 @@ from .util import (
|
||||||
spack,
|
spack,
|
||||||
start_log_thrs,
|
start_log_thrs,
|
||||||
start_stackmon,
|
start_stackmon,
|
||||||
|
ub64enc,
|
||||||
)
|
)
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
|
@ -543,8 +543,8 @@ class HttpSrv(object):
|
||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
v = base64.urlsafe_b64encode(spack(b">xxL", int(v)))
|
# spack gives 4 lsb, take 3 lsb, get 4 ch
|
||||||
self.cb_v = v.decode("ascii")[-4:]
|
self.cb_v = ub64enc(spack(b">L", int(v))[1:]).decode("ascii")
|
||||||
self.cb_ts = time.time()
|
self.cb_ts = time.time()
|
||||||
return self.cb_v
|
return self.cb_v
|
||||||
|
|
||||||
|
|
|
@ -2,7 +2,6 @@
|
||||||
from __future__ import print_function, unicode_literals
|
from __future__ import print_function, unicode_literals
|
||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
import base64
|
|
||||||
import errno
|
import errno
|
||||||
import gzip
|
import gzip
|
||||||
import logging
|
import logging
|
||||||
|
@ -67,6 +66,7 @@ from .util import (
|
||||||
pybin,
|
pybin,
|
||||||
start_log_thrs,
|
start_log_thrs,
|
||||||
start_stackmon,
|
start_stackmon,
|
||||||
|
ub64enc,
|
||||||
)
|
)
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
|
@ -1297,5 +1297,5 @@ class SvcHub(object):
|
||||||
zs = "{}\n{}".format(VERSIONS, alltrace())
|
zs = "{}\n{}".format(VERSIONS, alltrace())
|
||||||
zb = zs.encode("utf-8", "replace")
|
zb = zs.encode("utf-8", "replace")
|
||||||
zb = gzip.compress(zb)
|
zb = gzip.compress(zb)
|
||||||
zs = base64.b64encode(zb).decode("ascii")
|
zs = ub64enc(zb).decode("ascii")
|
||||||
self.log("stacks", zs)
|
self.log("stacks", zs)
|
||||||
|
|
|
@ -1,7 +1,6 @@
|
||||||
# coding: utf-8
|
# coding: utf-8
|
||||||
from __future__ import print_function, unicode_literals
|
from __future__ import print_function, unicode_literals
|
||||||
|
|
||||||
import base64
|
|
||||||
import hashlib
|
import hashlib
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
|
@ -27,6 +26,7 @@ from .util import (
|
||||||
min_ex,
|
min_ex,
|
||||||
runcmd,
|
runcmd,
|
||||||
statdir,
|
statdir,
|
||||||
|
ub64enc,
|
||||||
vsplit,
|
vsplit,
|
||||||
wrename,
|
wrename,
|
||||||
wunlink,
|
wunlink,
|
||||||
|
@ -109,6 +109,9 @@ except:
|
||||||
HAVE_VIPS = False
|
HAVE_VIPS = False
|
||||||
|
|
||||||
|
|
||||||
|
th_dir_cache = {}
|
||||||
|
|
||||||
|
|
||||||
def thumb_path(histpath: str, rem: str, mtime: float, fmt: str, ffa: set[str]) -> str:
|
def thumb_path(histpath: str, rem: str, mtime: float, fmt: str, ffa: set[str]) -> str:
|
||||||
# base16 = 16 = 256
|
# base16 = 16 = 256
|
||||||
# b64-lc = 38 = 1444
|
# b64-lc = 38 = 1444
|
||||||
|
@ -122,14 +125,20 @@ def thumb_path(histpath: str, rem: str, mtime: float, fmt: str, ffa: set[str]) -
|
||||||
if ext in ffa and fmt[:2] in ("wf", "jf"):
|
if ext in ffa and fmt[:2] in ("wf", "jf"):
|
||||||
fmt = fmt.replace("f", "")
|
fmt = fmt.replace("f", "")
|
||||||
|
|
||||||
rd += "\n" + fmt
|
dcache = th_dir_cache
|
||||||
h = hashlib.sha512(afsenc(rd)).digest()
|
rd_key = rd + "\n" + fmt
|
||||||
b64 = base64.urlsafe_b64encode(h).decode("ascii")[:24]
|
rd = dcache.get(rd_key)
|
||||||
rd = ("%s/%s/" % (b64[:2], b64[2:4])).lower() + b64
|
if not rd:
|
||||||
|
h = hashlib.sha512(afsenc(rd_key)).digest()
|
||||||
|
b64 = ub64enc(h).decode("ascii")[:24]
|
||||||
|
rd = ("%s/%s/" % (b64[:2], b64[2:4])).lower() + b64
|
||||||
|
if len(dcache) > 9001:
|
||||||
|
dcache.clear()
|
||||||
|
dcache[rd_key] = rd
|
||||||
|
|
||||||
# could keep original filenames but this is safer re pathlen
|
# could keep original filenames but this is safer re pathlen
|
||||||
h = hashlib.sha512(afsenc(fn)).digest()
|
h = hashlib.sha512(afsenc(fn)).digest()
|
||||||
fn = base64.urlsafe_b64encode(h).decode("ascii")[:24]
|
fn = ub64enc(h).decode("ascii")[:24]
|
||||||
|
|
||||||
if fmt in ("opus", "caf", "mp3"):
|
if fmt in ("opus", "caf", "mp3"):
|
||||||
cat = "ac"
|
cat = "ac"
|
||||||
|
|
|
@ -1,7 +1,6 @@
|
||||||
# coding: utf-8
|
# coding: utf-8
|
||||||
from __future__ import print_function, unicode_literals
|
from __future__ import print_function, unicode_literals
|
||||||
|
|
||||||
import base64
|
|
||||||
import errno
|
import errno
|
||||||
import gzip
|
import gzip
|
||||||
import hashlib
|
import hashlib
|
||||||
|
@ -61,6 +60,7 @@ from .util import (
|
||||||
sfsenc,
|
sfsenc,
|
||||||
spack,
|
spack,
|
||||||
statdir,
|
statdir,
|
||||||
|
ub64enc,
|
||||||
unhumanize,
|
unhumanize,
|
||||||
vjoin,
|
vjoin,
|
||||||
vsplit,
|
vsplit,
|
||||||
|
@ -1156,7 +1156,7 @@ class Up2k(object):
|
||||||
zsl = [x[len(prefix) :] for x in zsl]
|
zsl = [x[len(prefix) :] for x in zsl]
|
||||||
zsl.sort()
|
zsl.sort()
|
||||||
zb = hashlib.sha1("\n".join(zsl).encode("utf-8", "replace")).digest()
|
zb = hashlib.sha1("\n".join(zsl).encode("utf-8", "replace")).digest()
|
||||||
vcfg = base64.urlsafe_b64encode(zb[:18]).decode("ascii")
|
vcfg = ub64enc(zb[:18]).decode("ascii")
|
||||||
|
|
||||||
c = cur.execute("select v from kv where k = 'volcfg'")
|
c = cur.execute("select v from kv where k = 'volcfg'")
|
||||||
try:
|
try:
|
||||||
|
@ -1425,7 +1425,7 @@ class Up2k(object):
|
||||||
|
|
||||||
zh.update(cv.encode("utf-8", "replace"))
|
zh.update(cv.encode("utf-8", "replace"))
|
||||||
zh.update(spack(b"<d", cst.st_mtime))
|
zh.update(spack(b"<d", cst.st_mtime))
|
||||||
dhash = base64.urlsafe_b64encode(zh.digest()[:12]).decode("ascii")
|
dhash = ub64enc(zh.digest()[:12]).decode("ascii")
|
||||||
sql = "select d from dh where d=? and +h=?"
|
sql = "select d from dh where d=? and +h=?"
|
||||||
try:
|
try:
|
||||||
c = db.c.execute(sql, (rd, dhash))
|
c = db.c.execute(sql, (rd, dhash))
|
||||||
|
@ -2431,7 +2431,7 @@ class Up2k(object):
|
||||||
def _log_sqlite_incompat(self, db_path, t0) -> None:
|
def _log_sqlite_incompat(self, db_path, t0) -> None:
|
||||||
txt = t0 or ""
|
txt = t0 or ""
|
||||||
digest = hashlib.sha512(db_path.encode("utf-8", "replace")).digest()
|
digest = hashlib.sha512(db_path.encode("utf-8", "replace")).digest()
|
||||||
stackname = base64.urlsafe_b64encode(digest[:9]).decode("utf-8")
|
stackname = ub64enc(digest[:9]).decode("ascii")
|
||||||
stackpath = os.path.join(E.cfg, "stack-%s.txt" % (stackname,))
|
stackpath = os.path.join(E.cfg, "stack-%s.txt" % (stackname,))
|
||||||
|
|
||||||
t = " the filesystem at %s may not support locking, or is otherwise incompatible with sqlite\n\n %s\n\n"
|
t = " the filesystem at %s may not support locking, or is otherwise incompatible with sqlite\n\n %s\n\n"
|
||||||
|
@ -4458,8 +4458,7 @@ class Up2k(object):
|
||||||
rem -= len(buf)
|
rem -= len(buf)
|
||||||
|
|
||||||
digest = hashobj.digest()[:33]
|
digest = hashobj.digest()[:33]
|
||||||
digest = base64.urlsafe_b64encode(digest)
|
ret.append(ub64enc(digest).decode("ascii"))
|
||||||
ret.append(digest.decode("utf-8"))
|
|
||||||
|
|
||||||
return ret, st
|
return ret, st
|
||||||
|
|
||||||
|
@ -4923,11 +4922,10 @@ def up2k_wark_from_hashlist(salt: str, filesize: int, hashes: list[str]) -> str:
|
||||||
vstr = "\n".join(values)
|
vstr = "\n".join(values)
|
||||||
|
|
||||||
wark = hashlib.sha512(vstr.encode("utf-8")).digest()[:33]
|
wark = hashlib.sha512(vstr.encode("utf-8")).digest()[:33]
|
||||||
wark = base64.urlsafe_b64encode(wark)
|
return ub64enc(wark).decode("ascii")
|
||||||
return wark.decode("ascii")
|
|
||||||
|
|
||||||
|
|
||||||
def up2k_wark_from_metadata(salt: str, sz: int, lastmod: int, rd: str, fn: str) -> str:
|
def up2k_wark_from_metadata(salt: str, sz: int, lastmod: int, rd: str, fn: str) -> str:
|
||||||
ret = sfsenc("%s\n%d\n%d\n%s\n%s" % (salt, lastmod, sz, rd, fn))
|
ret = sfsenc("%s\n%d\n%d\n%s\n%s" % (salt, lastmod, sz, rd, fn))
|
||||||
ret = base64.urlsafe_b64encode(hashlib.sha512(ret).digest())
|
ret = ub64enc(hashlib.sha512(ret).digest())
|
||||||
return ("#%s" % (ret.decode("ascii"),))[:44]
|
return ("#%s" % (ret.decode("ascii"),))[:44]
|
||||||
|
|
|
@ -3,7 +3,7 @@ from __future__ import print_function, unicode_literals
|
||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
import base64
|
import base64
|
||||||
import contextlib
|
import binascii
|
||||||
import errno
|
import errno
|
||||||
import hashlib
|
import hashlib
|
||||||
import hmac
|
import hmac
|
||||||
|
@ -30,13 +30,10 @@ from collections import Counter
|
||||||
from ipaddress import IPv4Address, IPv4Network, IPv6Address, IPv6Network
|
from ipaddress import IPv4Address, IPv4Network, IPv6Address, IPv6Network
|
||||||
from queue import Queue
|
from queue import Queue
|
||||||
|
|
||||||
from .__init__ import ANYWIN, EXE, MACOS, PY2, TYPE_CHECKING, VT100, WINDOWS
|
from .__init__ import ANYWIN, EXE, MACOS, PY2, PY36, TYPE_CHECKING, VT100, WINDOWS
|
||||||
from .__version__ import S_BUILD_DT, S_VERSION
|
from .__version__ import S_BUILD_DT, S_VERSION
|
||||||
from .stolen import surrogateescape
|
from .stolen import surrogateescape
|
||||||
|
|
||||||
ub64dec = base64.urlsafe_b64decode
|
|
||||||
ub64enc = base64.urlsafe_b64encode
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
from datetime import datetime, timezone
|
from datetime import datetime, timezone
|
||||||
|
|
||||||
|
@ -64,7 +61,7 @@ if PY2:
|
||||||
|
|
||||||
|
|
||||||
if sys.version_info >= (3, 7) or (
|
if sys.version_info >= (3, 7) or (
|
||||||
sys.version_info >= (3, 6) and platform.python_implementation() == "CPython"
|
PY36 and platform.python_implementation() == "CPython"
|
||||||
):
|
):
|
||||||
ODict = dict
|
ODict = dict
|
||||||
else:
|
else:
|
||||||
|
@ -212,7 +209,7 @@ else:
|
||||||
FS_ENCODING = sys.getfilesystemencoding()
|
FS_ENCODING = sys.getfilesystemencoding()
|
||||||
|
|
||||||
|
|
||||||
SYMTIME = sys.version_info > (3, 6) and os.utime in os.supports_follow_symlinks
|
SYMTIME = PY36 and os.utime in os.supports_follow_symlinks
|
||||||
|
|
||||||
META_NOBOTS = '<meta name="robots" content="noindex, nofollow">\n'
|
META_NOBOTS = '<meta name="robots" content="noindex, nofollow">\n'
|
||||||
|
|
||||||
|
@ -484,6 +481,38 @@ VERSIONS = (
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
try:
|
||||||
|
_b64_enc_tl = bytes.maketrans(b'+/', b'-_')
|
||||||
|
_b64_dec_tl = bytes.maketrans(b'-_', b'+/')
|
||||||
|
|
||||||
|
def ub64enc(bs: bytes) -> bytes:
|
||||||
|
x = binascii.b2a_base64(bs, newline=False)
|
||||||
|
return x.translate(_b64_enc_tl)
|
||||||
|
|
||||||
|
def ub64dec(bs: bytes) -> bytes:
|
||||||
|
bs = bs.translate(_b64_dec_tl)
|
||||||
|
return binascii.a2b_base64(bs)
|
||||||
|
|
||||||
|
def b64enc(bs: bytes) -> bytes:
|
||||||
|
return binascii.b2a_base64(bs, newline=False)
|
||||||
|
|
||||||
|
def b64dec(bs: bytes) -> bytes:
|
||||||
|
return binascii.a2b_base64(bs)
|
||||||
|
|
||||||
|
zb = b">>>????"
|
||||||
|
zb2 = base64.urlsafe_b64encode(zb)
|
||||||
|
if zb2 != ub64enc(zb) or zb != ub64dec(zb2):
|
||||||
|
raise Exception("bad smoke")
|
||||||
|
|
||||||
|
except Exception as ex:
|
||||||
|
ub64enc = base64.urlsafe_b64encode # type: ignore
|
||||||
|
ub64dec = base64.urlsafe_b64decode # type: ignore
|
||||||
|
b64enc = base64.b64encode # type: ignore
|
||||||
|
b64dec = base64.b64decode # type: ignore
|
||||||
|
if not PY36:
|
||||||
|
print("using fallback base64 codec due to %r" % (ex,))
|
||||||
|
|
||||||
|
|
||||||
class Daemon(threading.Thread):
|
class Daemon(threading.Thread):
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
|
@ -1028,7 +1057,7 @@ class MTHash(object):
|
||||||
ofs += len(buf)
|
ofs += len(buf)
|
||||||
|
|
||||||
bdig = hashobj.digest()[:33]
|
bdig = hashobj.digest()[:33]
|
||||||
udig = base64.urlsafe_b64encode(bdig).decode("utf-8")
|
udig = ub64enc(bdig).decode("ascii")
|
||||||
return nch, udig, ofs0, chunk_sz
|
return nch, udig, ofs0, chunk_sz
|
||||||
|
|
||||||
|
|
||||||
|
@ -1054,7 +1083,7 @@ class HMaccas(object):
|
||||||
self.cache = {}
|
self.cache = {}
|
||||||
|
|
||||||
zb = hmac.new(self.key, msg, hashlib.sha512).digest()
|
zb = hmac.new(self.key, msg, hashlib.sha512).digest()
|
||||||
zs = base64.urlsafe_b64encode(zb)[: self.retlen].decode("utf-8")
|
zs = ub64enc(zb)[: self.retlen].decode("ascii")
|
||||||
self.cache[msg] = zs
|
self.cache[msg] = zs
|
||||||
return zs
|
return zs
|
||||||
|
|
||||||
|
@ -1459,8 +1488,7 @@ def ren_open(fname: str, *args: Any, **kwargs: Any) -> tuple[typing.IO[Any], str
|
||||||
|
|
||||||
if not b64:
|
if not b64:
|
||||||
zs = ("%s\n%s" % (orig_name, suffix)).encode("utf-8", "replace")
|
zs = ("%s\n%s" % (orig_name, suffix)).encode("utf-8", "replace")
|
||||||
zs = hashlib.sha512(zs).digest()[:12]
|
b64 = ub64enc(hashlib.sha512(zs).digest()[:12]).decode("ascii")
|
||||||
b64 = base64.urlsafe_b64encode(zs).decode("utf-8")
|
|
||||||
|
|
||||||
badlen = len(fname)
|
badlen = len(fname)
|
||||||
while len(fname) >= badlen:
|
while len(fname) >= badlen:
|
||||||
|
@ -1766,9 +1794,8 @@ def rand_name(fdir: str, fn: str, rnd: int) -> str:
|
||||||
|
|
||||||
nc = rnd + extra
|
nc = rnd + extra
|
||||||
nb = (6 + 6 * nc) // 8
|
nb = (6 + 6 * nc) // 8
|
||||||
zb = os.urandom(nb)
|
zb = ub64enc(os.urandom(nb))
|
||||||
zb = base64.urlsafe_b64encode(zb)
|
fn = zb[:nc].decode("ascii") + ext
|
||||||
fn = zb[:nc].decode("utf-8") + ext
|
|
||||||
ok = not os.path.exists(fsenc(os.path.join(fdir, fn)))
|
ok = not os.path.exists(fsenc(os.path.join(fdir, fn)))
|
||||||
|
|
||||||
return fn
|
return fn
|
||||||
|
@ -1781,7 +1808,7 @@ def gen_filekey(alg: int, salt: str, fspath: str, fsize: int, inode: int) -> str
|
||||||
zs = "%s %s" % (salt, fspath)
|
zs = "%s %s" % (salt, fspath)
|
||||||
|
|
||||||
zb = zs.encode("utf-8", "replace")
|
zb = zs.encode("utf-8", "replace")
|
||||||
return base64.urlsafe_b64encode(hashlib.sha512(zb).digest()).decode("ascii")
|
return ub64enc(hashlib.sha512(zb).digest()).decode("ascii")
|
||||||
|
|
||||||
|
|
||||||
def gen_filekey_dbg(
|
def gen_filekey_dbg(
|
||||||
|
@ -2263,12 +2290,12 @@ w8enc = _w8enc3 if not PY2 else _w8enc2
|
||||||
|
|
||||||
def w8b64dec(txt: str) -> str:
|
def w8b64dec(txt: str) -> str:
|
||||||
"""decodes base64(filesystem-bytes) to wtf8"""
|
"""decodes base64(filesystem-bytes) to wtf8"""
|
||||||
return w8dec(base64.urlsafe_b64decode(txt.encode("ascii")))
|
return w8dec(ub64dec(txt.encode("ascii")))
|
||||||
|
|
||||||
|
|
||||||
def w8b64enc(txt: str) -> str:
|
def w8b64enc(txt: str) -> str:
|
||||||
"""encodes wtf8 to base64(filesystem-bytes)"""
|
"""encodes wtf8 to base64(filesystem-bytes)"""
|
||||||
return base64.urlsafe_b64encode(w8enc(txt)).decode("ascii")
|
return ub64enc(w8enc(txt)).decode("ascii")
|
||||||
|
|
||||||
|
|
||||||
if not PY2 and WINDOWS:
|
if not PY2 and WINDOWS:
|
||||||
|
@ -2644,8 +2671,7 @@ def hashcopy(
|
||||||
if slp:
|
if slp:
|
||||||
time.sleep(slp)
|
time.sleep(slp)
|
||||||
|
|
||||||
digest = hashobj.digest()[:33]
|
digest_b64 = ub64enc(hashobj.digest()[:33]).decode("ascii")
|
||||||
digest_b64 = base64.urlsafe_b64encode(digest).decode("utf-8")
|
|
||||||
|
|
||||||
return tlen, hashobj.hexdigest(), digest_b64
|
return tlen, hashobj.hexdigest(), digest_b64
|
||||||
|
|
||||||
|
|
Loading…
Reference in a new issue