mirror of
https://github.com/9001/copyparty.git
synced 2025-09-28 12:42:26 -06:00
speed
This commit is contained in:
parent
19a4c45389
commit
74821a38ad
10
bin/u2c.py
10
bin/u2c.py
|
@ -1,8 +1,8 @@
|
|||
#!/usr/bin/env python3
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
S_VERSION = "2.12"
|
||||
S_BUILD_DT = "2025-08-26"
|
||||
S_VERSION = "2.13"
|
||||
S_BUILD_DT = "2025-09-05"
|
||||
|
||||
"""
|
||||
u2c.py: upload to copyparty
|
||||
|
@ -590,9 +590,10 @@ def undns(url):
|
|||
|
||||
def _scd(err, top):
|
||||
"""non-recursive listing of directory contents, along with stat() info"""
|
||||
top_ = os.path.join(top, b"")
|
||||
with os.scandir(top) as dh:
|
||||
for fh in dh:
|
||||
abspath = os.path.join(top, fh.name)
|
||||
abspath = top_ + fh.name
|
||||
try:
|
||||
yield [abspath, fh.stat()]
|
||||
except Exception as ex:
|
||||
|
@ -601,8 +602,9 @@ def _scd(err, top):
|
|||
|
||||
def _lsd(err, top):
|
||||
"""non-recursive listing of directory contents, along with stat() info"""
|
||||
top_ = os.path.join(top, b"")
|
||||
for name in os.listdir(top):
|
||||
abspath = os.path.join(top, name)
|
||||
abspath = top_ + name
|
||||
try:
|
||||
yield [abspath, os.stat(abspath)]
|
||||
except Exception as ex:
|
||||
|
|
|
@ -111,6 +111,7 @@ class EnvParams(object):
|
|||
def __init__(self) -> None:
|
||||
self.t0 = time.time()
|
||||
self.mod = ""
|
||||
self.mod_ = ""
|
||||
self.cfg = ""
|
||||
self.scfg = True
|
||||
|
||||
|
|
|
@ -247,6 +247,7 @@ def init_E(EE: EnvParams) -> None:
|
|||
E.mod = os.path.dirname(os.path.realpath(__file__))
|
||||
if E.mod.endswith("__init__"):
|
||||
E.mod = os.path.dirname(E.mod)
|
||||
E.mod_ = os.path.join(E.mod, "")
|
||||
|
||||
try:
|
||||
p = os.environ.get("XDG_CONFIG_HOME")
|
||||
|
|
|
@ -1255,7 +1255,7 @@ class HttpCli(object):
|
|||
|
||||
res_path = "web/" + self.vpath[5:]
|
||||
if res_path in RES:
|
||||
ap = os.path.join(self.E.mod, res_path)
|
||||
ap = self.E.mod_ + res_path
|
||||
if bos.path.exists(ap) or bos.path.exists(ap + ".gz"):
|
||||
return self.tx_file(ap)
|
||||
else:
|
||||
|
@ -5408,8 +5408,9 @@ class HttpCli(object):
|
|||
|
||||
if dk_sz and fsroot:
|
||||
kdirs = []
|
||||
fsroot_ = os.path.join(fsroot, "")
|
||||
for dn in dirs:
|
||||
ap = os.path.join(fsroot, dn)
|
||||
ap = fsroot_ + dn
|
||||
zs = self.gen_fk(2, self.args.dk_salt, ap, 0, 0)[:dk_sz]
|
||||
kdirs.append(dn + "?k=" + zs)
|
||||
dirs = kdirs
|
||||
|
|
|
@ -571,7 +571,7 @@ class HttpSrv(object):
|
|||
|
||||
v = self.E.t0
|
||||
try:
|
||||
with os.scandir(os.path.join(self.E.mod, "web")) as dh:
|
||||
with os.scandir(self.E.mod_ + "web") as dh:
|
||||
for fh in dh:
|
||||
inf = fh.stat()
|
||||
v = max(v, inf.st_mtime)
|
||||
|
|
|
@ -414,10 +414,11 @@ class Up2k(object):
|
|||
|
||||
ret: list[tuple[int, str, int, int, int]] = []
|
||||
userset = set([(uname or "\n"), "*"])
|
||||
e_d = {}
|
||||
n = 1000
|
||||
try:
|
||||
for ptop, tab2 in self.registry.items():
|
||||
cfg = self.flags.get(ptop, {}).get("u2abort", 1)
|
||||
cfg = self.flags.get(ptop, e_d).get("u2abort", 1)
|
||||
if not cfg:
|
||||
continue
|
||||
addr = (ip or "\n") if cfg in (1, 2) else ""
|
||||
|
@ -1138,7 +1139,7 @@ class Up2k(object):
|
|||
ft = "\033[0;32m{}{:.0}"
|
||||
ff = "\033[0;35m{}{:.0}"
|
||||
fv = "\033[0;36m{}:\033[90m{}"
|
||||
zs = "ext_th_d html_head put_name2 mv_re_r mv_re_t rm_re_r rm_re_t srch_re_dots srch_re_nodot zipmax zipmaxn_v zipmaxs_v"
|
||||
zs = "du_iwho ext_th_d html_head put_name2 mv_re_r mv_re_t rm_re_r rm_re_t srch_re_dots srch_re_nodot zipmax zipmaxn_v zipmaxs_v"
|
||||
fx = set(zs.split())
|
||||
fd = vf_bmap()
|
||||
fd.update(vf_cmap())
|
||||
|
@ -1493,6 +1494,7 @@ class Up2k(object):
|
|||
files: list[tuple[int, int, str]] = []
|
||||
fat32 = True
|
||||
cv = vcv = acv = ""
|
||||
e_d = {}
|
||||
|
||||
th_cvd = self.args.th_coversd
|
||||
th_cvds = self.args.th_coversd_set
|
||||
|
@ -1730,7 +1732,7 @@ class Up2k(object):
|
|||
un = ""
|
||||
|
||||
# skip upload hooks by not providing vflags
|
||||
self.db_add(db.c, {}, rd, fn, lmod, sz, "", "", wark, wark, "", un, ip, at)
|
||||
self.db_add(db.c, e_d, rd, fn, lmod, sz, "", "", wark, wark, "", un, ip, at)
|
||||
db.n += 1
|
||||
db.nf += 1
|
||||
tfa += 1
|
||||
|
|
|
@ -3189,8 +3189,9 @@ def statdir(
|
|||
else:
|
||||
src = "listdir"
|
||||
fun: Any = os.lstat if lstat else os.stat
|
||||
btop_ = os.path.join(btop, b"")
|
||||
for name in os.listdir(btop):
|
||||
abspath = os.path.join(btop, name)
|
||||
abspath = btop_ + name
|
||||
try:
|
||||
yield (fsdec(name), fun(abspath))
|
||||
except Exception as ex:
|
||||
|
@ -3229,7 +3230,9 @@ def rmdirs(
|
|||
|
||||
stats = statdir(logger, scandir, lstat, top, False)
|
||||
dirs = [x[0] for x in stats if stat.S_ISDIR(x[1].st_mode)]
|
||||
dirs = [os.path.join(top, x) for x in dirs]
|
||||
if dirs:
|
||||
top_ = os.path.join(top, "")
|
||||
dirs = [top_ + x for x in dirs]
|
||||
ok = []
|
||||
ng = []
|
||||
for d in reversed(dirs):
|
||||
|
@ -4205,7 +4208,7 @@ def _pkg_resource_exists(pkg: str, name: str) -> bool:
|
|||
|
||||
|
||||
def stat_resource(E: EnvParams, name: str):
|
||||
path = os.path.join(E.mod, name)
|
||||
path = E.mod_ + name
|
||||
if os.path.exists(path):
|
||||
return os.stat(fsenc(path))
|
||||
return None
|
||||
|
@ -4252,7 +4255,7 @@ def _has_resource(name: str):
|
|||
|
||||
|
||||
def has_resource(E: EnvParams, name: str):
|
||||
return _has_resource(name) or os.path.exists(os.path.join(E.mod, name))
|
||||
return _has_resource(name) or os.path.exists(E.mod_ + name)
|
||||
|
||||
|
||||
def load_resource(E: EnvParams, name: str, mode="rb") -> IO[bytes]:
|
||||
|
@ -4277,7 +4280,7 @@ def load_resource(E: EnvParams, name: str, mode="rb") -> IO[bytes]:
|
|||
stream = codecs.getreader(enc)(stream)
|
||||
return stream
|
||||
|
||||
ap = os.path.join(E.mod, name)
|
||||
ap = E.mod_ + name
|
||||
|
||||
if PY2:
|
||||
return codecs.open(ap, "r", encoding=enc) # type: ignore
|
||||
|
|
Loading…
Reference in a new issue