mirror of
https://github.com/9001/copyparty.git
synced 2025-08-17 00:52:16 -06:00
pkgres:
* pyz: yeet the resource tar which is now pointless thanks to pkgres * cache impresource stuff because pyz lookups are Extremely slow * prefer tx_file when possible for slightly better performance * use hardcoded list of expected resources instead of dynamic discovery at runtime; much simpler and probably safer * fix some forgotten resources (copying.txt, insecure.pem) * fix loading jinja templates on windows
This commit is contained in:
parent
a462a644fb
commit
d866841c19
|
@ -16,7 +16,9 @@ except:
|
||||||
TYPE_CHECKING = False
|
TYPE_CHECKING = False
|
||||||
|
|
||||||
if True:
|
if True:
|
||||||
from typing import Any, Callable
|
from types import ModuleType
|
||||||
|
|
||||||
|
from typing import Any, Callable, Optional
|
||||||
|
|
||||||
PY2 = sys.version_info < (3,)
|
PY2 = sys.version_info < (3,)
|
||||||
PY36 = sys.version_info > (3, 6)
|
PY36 = sys.version_info > (3, 6)
|
||||||
|
@ -51,10 +53,63 @@ try:
|
||||||
except:
|
except:
|
||||||
CORES = (os.cpu_count() if hasattr(os, "cpu_count") else 0) or 2
|
CORES = (os.cpu_count() if hasattr(os, "cpu_count") else 0) or 2
|
||||||
|
|
||||||
|
# all embedded resources to be retrievable over http
|
||||||
|
zs = """
|
||||||
|
web/a/partyfuse.py
|
||||||
|
web/a/u2c.py
|
||||||
|
web/a/webdav-cfg.bat
|
||||||
|
web/baguettebox.js
|
||||||
|
web/browser.css
|
||||||
|
web/browser.html
|
||||||
|
web/browser.js
|
||||||
|
web/browser2.html
|
||||||
|
web/cf.html
|
||||||
|
web/copyparty.gif
|
||||||
|
web/dd/2.png
|
||||||
|
web/dd/3.png
|
||||||
|
web/dd/4.png
|
||||||
|
web/dd/5.png
|
||||||
|
web/deps/busy.mp3
|
||||||
|
web/deps/easymde.css
|
||||||
|
web/deps/easymde.js
|
||||||
|
web/deps/marked.js
|
||||||
|
web/deps/mini-fa.css
|
||||||
|
web/deps/mini-fa.woff
|
||||||
|
web/deps/prism.css
|
||||||
|
web/deps/prism.js
|
||||||
|
web/deps/prismd.css
|
||||||
|
web/deps/scp.woff2
|
||||||
|
web/deps/sha512.ac.js
|
||||||
|
web/deps/sha512.hw.js
|
||||||
|
web/md.css
|
||||||
|
web/md.html
|
||||||
|
web/md.js
|
||||||
|
web/md2.css
|
||||||
|
web/md2.js
|
||||||
|
web/mde.css
|
||||||
|
web/mde.html
|
||||||
|
web/mde.js
|
||||||
|
web/msg.css
|
||||||
|
web/msg.html
|
||||||
|
web/shares.css
|
||||||
|
web/shares.html
|
||||||
|
web/shares.js
|
||||||
|
web/splash.css
|
||||||
|
web/splash.html
|
||||||
|
web/splash.js
|
||||||
|
web/svcs.html
|
||||||
|
web/svcs.js
|
||||||
|
web/ui.css
|
||||||
|
web/up2k.js
|
||||||
|
web/util.js
|
||||||
|
web/w.hash.js
|
||||||
|
"""
|
||||||
|
RES = set(zs.strip().split("\n"))
|
||||||
|
|
||||||
|
|
||||||
class EnvParams(object):
|
class EnvParams(object):
|
||||||
def __init__(self) -> None:
|
def __init__(self) -> None:
|
||||||
self.pkg = None
|
self.pkg: Optional[ModuleType] = None
|
||||||
self.t0 = time.time()
|
self.t0 = time.time()
|
||||||
self.mod = ""
|
self.mod = ""
|
||||||
self.cfg = ""
|
self.cfg = ""
|
||||||
|
|
|
@ -58,6 +58,7 @@ from .util import (
|
||||||
b64enc,
|
b64enc,
|
||||||
dedent,
|
dedent,
|
||||||
has_resource,
|
has_resource,
|
||||||
|
load_resource,
|
||||||
min_ex,
|
min_ex,
|
||||||
pybin,
|
pybin,
|
||||||
termsize,
|
termsize,
|
||||||
|
@ -217,6 +218,7 @@ def init_E(EE: EnvParams) -> None:
|
||||||
|
|
||||||
raise Exception("could not find a writable path for config")
|
raise Exception("could not find a writable path for config")
|
||||||
|
|
||||||
|
assert __package__ # !rm
|
||||||
E.pkg = sys.modules[__package__]
|
E.pkg = sys.modules[__package__]
|
||||||
E.mod = os.path.dirname(os.path.realpath(__file__))
|
E.mod = os.path.dirname(os.path.realpath(__file__))
|
||||||
if E.mod.endswith("__init__"):
|
if E.mod.endswith("__init__"):
|
||||||
|
@ -520,14 +522,18 @@ def sfx_tpoke(top: str):
|
||||||
|
|
||||||
|
|
||||||
def showlic() -> None:
|
def showlic() -> None:
|
||||||
p = os.path.join(E.mod, "res", "COPYING.txt")
|
try:
|
||||||
if not os.path.exists(p):
|
with load_resource(E, "res/COPYING.txt") as f:
|
||||||
|
buf = f.read()
|
||||||
|
except:
|
||||||
|
buf = b""
|
||||||
|
|
||||||
|
if buf:
|
||||||
|
print(buf.decode("utf-8", "replace"))
|
||||||
|
else:
|
||||||
print("no relevant license info to display")
|
print("no relevant license info to display")
|
||||||
return
|
return
|
||||||
|
|
||||||
with open(p, "rb") as f:
|
|
||||||
print(f.read().decode("utf-8", "replace"))
|
|
||||||
|
|
||||||
|
|
||||||
def get_sects():
|
def get_sects():
|
||||||
return [
|
return [
|
||||||
|
@ -1567,16 +1573,13 @@ def run_argparse(
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
|
|
||||||
def main(argv: Optional[list[str]] = None, rsrc: Optional[str] = None) -> None:
|
def main(argv: Optional[list[str]] = None) -> None:
|
||||||
time.strptime("19970815", "%Y%m%d") # python#7980
|
time.strptime("19970815", "%Y%m%d") # python#7980
|
||||||
if WINDOWS:
|
if WINDOWS:
|
||||||
os.system("rem") # enables colors
|
os.system("rem") # enables colors
|
||||||
|
|
||||||
init_E(E)
|
init_E(E)
|
||||||
|
|
||||||
if rsrc: # pyz
|
|
||||||
E.mod = rsrc
|
|
||||||
|
|
||||||
if argv is None:
|
if argv is None:
|
||||||
argv = sys.argv
|
argv = sys.argv
|
||||||
|
|
||||||
|
|
|
@ -7,7 +7,7 @@ import shutil
|
||||||
import time
|
import time
|
||||||
|
|
||||||
from .__init__ import ANYWIN
|
from .__init__ import ANYWIN
|
||||||
from .util import Netdev, runcmd, wrename, wunlink
|
from .util import Netdev, load_resource, runcmd, wrename, wunlink
|
||||||
|
|
||||||
HAVE_CFSSL = not os.environ.get("PRTY_NO_CFSSL")
|
HAVE_CFSSL = not os.environ.get("PRTY_NO_CFSSL")
|
||||||
|
|
||||||
|
@ -29,13 +29,15 @@ def ensure_cert(log: "RootLogger", args) -> None:
|
||||||
|
|
||||||
i feel awful about this and so should they
|
i feel awful about this and so should they
|
||||||
"""
|
"""
|
||||||
cert_insec = os.path.join(args.E.mod, "res/insecure.pem")
|
with load_resource(args.E, "res/insecure.pem") as f:
|
||||||
|
cert_insec = f.read()
|
||||||
cert_appdata = os.path.join(args.E.cfg, "cert.pem")
|
cert_appdata = os.path.join(args.E.cfg, "cert.pem")
|
||||||
if not os.path.isfile(args.cert):
|
if not os.path.isfile(args.cert):
|
||||||
if cert_appdata != args.cert:
|
if cert_appdata != args.cert:
|
||||||
raise Exception("certificate file does not exist: " + args.cert)
|
raise Exception("certificate file does not exist: " + args.cert)
|
||||||
|
|
||||||
shutil.copy(cert_insec, args.cert)
|
with open(args.cert, "wb") as f:
|
||||||
|
f.write(cert_insec)
|
||||||
|
|
||||||
with open(args.cert, "rb") as f:
|
with open(args.cert, "rb") as f:
|
||||||
buf = f.read()
|
buf = f.read()
|
||||||
|
@ -50,7 +52,9 @@ def ensure_cert(log: "RootLogger", args) -> None:
|
||||||
raise Exception(m + "private key must appear before server certificate")
|
raise Exception(m + "private key must appear before server certificate")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
if filecmp.cmp(args.cert, cert_insec):
|
with open(args.cert, "rb") as f:
|
||||||
|
active_cert = f.read()
|
||||||
|
if active_cert == cert_insec:
|
||||||
t = "using default TLS certificate; https will be insecure:\033[36m {}"
|
t = "using default TLS certificate; https will be insecure:\033[36m {}"
|
||||||
log("cert", t.format(args.cert), 3)
|
log("cert", t.format(args.cert), 3)
|
||||||
except:
|
except:
|
||||||
|
@ -151,14 +155,22 @@ def _gen_srv(log: "RootLogger", args, netdevs: dict[str, Netdev]):
|
||||||
raise Exception("no useable cert found")
|
raise Exception("no useable cert found")
|
||||||
|
|
||||||
expired = time.time() + args.crt_sdays * 60 * 60 * 24 * 0.5 > expiry
|
expired = time.time() + args.crt_sdays * 60 * 60 * 24 * 0.5 > expiry
|
||||||
cert_insec = os.path.join(args.E.mod, "res/insecure.pem")
|
if expired:
|
||||||
|
raise Exception("old server-cert has expired")
|
||||||
|
|
||||||
for n in names:
|
for n in names:
|
||||||
if n not in inf["sans"]:
|
if n not in inf["sans"]:
|
||||||
raise Exception("does not have {}".format(n))
|
raise Exception("does not have {}".format(n))
|
||||||
if expired:
|
|
||||||
raise Exception("old server-cert has expired")
|
with load_resource(args.E, "res/insecure.pem") as f:
|
||||||
if not filecmp.cmp(args.cert, cert_insec):
|
cert_insec = f.read()
|
||||||
|
|
||||||
|
with open(args.cert, "rb") as f:
|
||||||
|
active_cert = f.read()
|
||||||
|
|
||||||
|
if active_cert and active_cert != cert_insec:
|
||||||
return
|
return
|
||||||
|
|
||||||
except Exception as ex:
|
except Exception as ex:
|
||||||
log("cert", "will create new server-cert; {}".format(ex))
|
log("cert", "will create new server-cert; {}".format(ex))
|
||||||
|
|
||||||
|
|
|
@ -32,7 +32,7 @@ try:
|
||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
from .__init__ import ANYWIN, PY2, TYPE_CHECKING, EnvParams, unicode
|
from .__init__ import ANYWIN, PY2, RES, TYPE_CHECKING, EnvParams, unicode
|
||||||
from .__version__ import S_VERSION
|
from .__version__ import S_VERSION
|
||||||
from .authsrv import VFS # typechk
|
from .authsrv import VFS # typechk
|
||||||
from .bos import bos
|
from .bos import bos
|
||||||
|
@ -67,8 +67,8 @@ from .util import (
|
||||||
get_df,
|
get_df,
|
||||||
get_spd,
|
get_spd,
|
||||||
guess_mime,
|
guess_mime,
|
||||||
gzip_orig_sz,
|
|
||||||
gzip_file_orig_sz,
|
gzip_file_orig_sz,
|
||||||
|
gzip_orig_sz,
|
||||||
has_resource,
|
has_resource,
|
||||||
hashcopy,
|
hashcopy,
|
||||||
hidedir,
|
hidedir,
|
||||||
|
@ -1097,13 +1097,17 @@ class HttpCli(object):
|
||||||
if self.vpath == ".cpr/metrics":
|
if self.vpath == ".cpr/metrics":
|
||||||
return self.conn.hsrv.metrics.tx(self)
|
return self.conn.hsrv.metrics.tx(self)
|
||||||
|
|
||||||
static_path = os.path.join("web", self.vpath[5:])
|
res_path = "web/" + self.vpath[5:]
|
||||||
if static_path in self.conn.hsrv.statics:
|
if res_path in RES:
|
||||||
return self.tx_res(static_path)
|
ap = os.path.join(self.E.mod, res_path)
|
||||||
|
if bos.path.exists(ap) or bos.path.exists(ap + ".gz"):
|
||||||
|
return self.tx_file(ap)
|
||||||
|
else:
|
||||||
|
return self.tx_res(res_path)
|
||||||
|
|
||||||
if not undot(static_path).startswith("web"):
|
if res_path != undot(res_path):
|
||||||
t = "malicious user; attempted path traversal [{}] => [{}]"
|
t = "malicious user; attempted path traversal [{}] => [{}]"
|
||||||
self.log(t.format(self.vpath, static_path), 1)
|
self.log(t.format(self.vpath, res_path), 1)
|
||||||
self.cbonk(self.conn.hsrv.gmal, self.req, "trav", "path traversal")
|
self.cbonk(self.conn.hsrv.gmal, self.req, "trav", "path traversal")
|
||||||
|
|
||||||
self.tx_404()
|
self.tx_404()
|
||||||
|
@ -3415,6 +3419,7 @@ class HttpCli(object):
|
||||||
self.args.s_wr_slp,
|
self.args.s_wr_slp,
|
||||||
not self.args.no_poll,
|
not self.args.no_poll,
|
||||||
)
|
)
|
||||||
|
res.close()
|
||||||
|
|
||||||
if remains > 0:
|
if remains > 0:
|
||||||
logmsg += " \033[31m" + unicode(file_sz - remains) + "\033[0m"
|
logmsg += " \033[31m" + unicode(file_sz - remains) + "\033[0m"
|
||||||
|
|
|
@ -66,7 +66,6 @@ from .util import (
|
||||||
Magician,
|
Magician,
|
||||||
Netdev,
|
Netdev,
|
||||||
NetMap,
|
NetMap,
|
||||||
absreal,
|
|
||||||
build_netmap,
|
build_netmap,
|
||||||
has_resource,
|
has_resource,
|
||||||
ipnorm,
|
ipnorm,
|
||||||
|
@ -76,9 +75,7 @@ from .util import (
|
||||||
spack,
|
spack,
|
||||||
start_log_thrs,
|
start_log_thrs,
|
||||||
start_stackmon,
|
start_stackmon,
|
||||||
stat_resource,
|
|
||||||
ub64enc,
|
ub64enc,
|
||||||
walk_resources,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
|
@ -96,7 +93,7 @@ if not hasattr(socket, "AF_UNIX"):
|
||||||
|
|
||||||
|
|
||||||
def load_jinja2_resource(E: EnvParams, name: str):
|
def load_jinja2_resource(E: EnvParams, name: str):
|
||||||
return load_resource(E, os.path.join("web", name), "r").read()
|
return load_resource(E, "web/" + name, "r").read()
|
||||||
|
|
||||||
|
|
||||||
class HttpSrv(object):
|
class HttpSrv(object):
|
||||||
|
@ -174,15 +171,12 @@ class HttpSrv(object):
|
||||||
"cf",
|
"cf",
|
||||||
]
|
]
|
||||||
self.j2 = {x: env.get_template(x + ".html") for x in jn}
|
self.j2 = {x: env.get_template(x + ".html") for x in jn}
|
||||||
self.prism = has_resource(self.E, os.path.join("web", "deps", "prism.js.gz"))
|
self.prism = has_resource(self.E, "web/deps/prism.js.gz")
|
||||||
|
|
||||||
self.ipa_nm = build_netmap(self.args.ipa)
|
self.ipa_nm = build_netmap(self.args.ipa)
|
||||||
self.xff_nm = build_netmap(self.args.xff_src)
|
self.xff_nm = build_netmap(self.args.xff_src)
|
||||||
self.xff_lan = build_netmap("lan")
|
self.xff_lan = build_netmap("lan")
|
||||||
|
|
||||||
self.statics: set[str] = set()
|
|
||||||
self._build_statics()
|
|
||||||
|
|
||||||
self.ptn_cc = re.compile(r"[\x00-\x1f]")
|
self.ptn_cc = re.compile(r"[\x00-\x1f]")
|
||||||
self.ptn_hsafe = re.compile(r"[\x00-\x1f<>\"'&]")
|
self.ptn_hsafe = re.compile(r"[\x00-\x1f<>\"'&]")
|
||||||
|
|
||||||
|
@ -216,14 +210,6 @@ class HttpSrv(object):
|
||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def _build_statics(self) -> None:
|
|
||||||
for dp, _, df in walk_resources(self.E, "web"):
|
|
||||||
for fn in df:
|
|
||||||
ap = os.path.join(dp, fn)
|
|
||||||
self.statics.add(ap)
|
|
||||||
if ap.endswith(".gz"):
|
|
||||||
self.statics.add(ap[:-3])
|
|
||||||
|
|
||||||
def set_netdevs(self, netdevs: dict[str, Netdev]) -> None:
|
def set_netdevs(self, netdevs: dict[str, Netdev]) -> None:
|
||||||
ips = set()
|
ips = set()
|
||||||
for ip, _ in self.bound:
|
for ip, _ in self.bound:
|
||||||
|
@ -543,20 +529,10 @@ class HttpSrv(object):
|
||||||
|
|
||||||
v = self.E.t0
|
v = self.E.t0
|
||||||
try:
|
try:
|
||||||
for (base, dirs, files) in walk_resources(self.E, "web"):
|
with os.scandir(os.path.join(self.E.mod, "web")) as dh:
|
||||||
inf = stat_resource(self.E, base)
|
for fh in dh:
|
||||||
if inf:
|
inf = fh.stat()
|
||||||
v = max(v, inf.st_mtime)
|
v = max(v, inf.st_mtime)
|
||||||
for d in dirs:
|
|
||||||
inf = stat_resource(self.E, os.path.join(base, d))
|
|
||||||
if inf:
|
|
||||||
v = max(v, inf.st_mtime)
|
|
||||||
for f in files:
|
|
||||||
inf = stat_resource(self.E, os.path.join(base, e))
|
|
||||||
if inf:
|
|
||||||
v = max(v, inf.st_mtime)
|
|
||||||
# only do top-level
|
|
||||||
break
|
|
||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
|
@ -134,7 +134,7 @@ if True: # pylint: disable=using-constant-test
|
||||||
from collections.abc import Callable, Iterable
|
from collections.abc import Callable, Iterable
|
||||||
|
|
||||||
import typing
|
import typing
|
||||||
from typing import Any, Generator, Optional, Pattern, Protocol, Union
|
from typing import Any, Generator, IO, Optional, Pattern, Protocol, Union
|
||||||
|
|
||||||
try:
|
try:
|
||||||
from typing import LiteralString
|
from typing import LiteralString
|
||||||
|
@ -3420,6 +3420,7 @@ def gzip_orig_sz(fn: str) -> int:
|
||||||
with open(fsenc(fn), "rb") as f:
|
with open(fsenc(fn), "rb") as f:
|
||||||
return gzip_file_orig_sz(f)
|
return gzip_file_orig_sz(f)
|
||||||
|
|
||||||
|
|
||||||
def gzip_file_orig_sz(f) -> int:
|
def gzip_file_orig_sz(f) -> int:
|
||||||
start = f.tell()
|
start = f.tell()
|
||||||
f.seek(-4, 2)
|
f.seek(-4, 2)
|
||||||
|
@ -3582,144 +3583,95 @@ def _pkg_resource_exists(pkg: str, name: str) -> bool:
|
||||||
except NotImplementedError:
|
except NotImplementedError:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
def stat_resource(E: EnvParams, name: str):
|
def stat_resource(E: EnvParams, name: str):
|
||||||
path = os.path.join(E.mod, name)
|
path = os.path.join(E.mod, name)
|
||||||
if os.path.exists(path):
|
if os.path.exists(path):
|
||||||
return os.stat(fsenc(path))
|
return os.stat(fsenc(path))
|
||||||
return None
|
return None
|
||||||
|
|
||||||
def has_resource(E: EnvParams, name: str):
|
|
||||||
if impresources:
|
def _find_impresource_cold(E: EnvParams, name: str):
|
||||||
|
global _rescache_imp, _find_impresource
|
||||||
|
|
||||||
|
assert impresources # !rm
|
||||||
try:
|
try:
|
||||||
resources = impresources.files(E.pkg)
|
_rescache_imp = impresources.files(E.pkg)
|
||||||
except ImportError:
|
except ImportError:
|
||||||
|
return None
|
||||||
|
|
||||||
|
_find_impresource = _find_impresource_warm
|
||||||
|
return _find_impresource(E, name)
|
||||||
|
|
||||||
|
|
||||||
|
def _find_impresource_warm(E: EnvParams, name: str):
|
||||||
|
if not _rescache_imp:
|
||||||
|
return None
|
||||||
|
|
||||||
|
try:
|
||||||
|
return _rescache_res[name]
|
||||||
|
except:
|
||||||
|
if len(_rescache_res) > 999:
|
||||||
|
_rescache_res.clear()
|
||||||
|
ret = _rescache_imp.joinpath(name)
|
||||||
|
_rescache_res[name] = ret
|
||||||
|
return ret
|
||||||
|
|
||||||
|
|
||||||
|
_find_impresource = _find_impresource_cold
|
||||||
|
_rescache_imp = None
|
||||||
|
_rescache_has = {}
|
||||||
|
_rescache_res = {}
|
||||||
|
|
||||||
|
|
||||||
|
def _has_resource(E: EnvParams, name: str):
|
||||||
|
try:
|
||||||
|
return _rescache_has[name]
|
||||||
|
except:
|
||||||
pass
|
pass
|
||||||
else:
|
|
||||||
res = resources.joinpath(name)
|
if len(_rescache_has) > 999:
|
||||||
if res.is_file() or res.is_dir():
|
_rescache_has.clear()
|
||||||
|
|
||||||
|
if impresources:
|
||||||
|
res = _find_impresource(E, name)
|
||||||
|
if res and res.is_file():
|
||||||
|
_rescache_has[name] = True
|
||||||
return True
|
return True
|
||||||
|
|
||||||
if pkg_resources:
|
if pkg_resources:
|
||||||
if _pkg_resource_exists(E.pkg.__name__, name):
|
if _pkg_resource_exists(E.pkg.__name__, name):
|
||||||
|
_rescache_has[name] = True
|
||||||
return True
|
return True
|
||||||
|
|
||||||
return os.path.exists(os.path.join(E.mod, name))
|
_rescache_has[name] = False
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
def load_resource(E: EnvParams, name: str, mode="rb"):
|
def has_resource(E: EnvParams, name: str):
|
||||||
|
return _has_resource(E, name) or os.path.exists(os.path.join(E.mod, name))
|
||||||
|
|
||||||
|
|
||||||
|
def load_resource(E: EnvParams, name: str, mode="rb") -> IO[bytes]:
|
||||||
|
enc = None if "b" in mode else "utf-8"
|
||||||
|
|
||||||
if impresources:
|
if impresources:
|
||||||
try:
|
res = _find_impresource(E, name)
|
||||||
resources = impresources.files(E.pkg)
|
if res and res.is_file():
|
||||||
except ImportError:
|
if enc:
|
||||||
pass
|
return res.open(mode, encoding=enc)
|
||||||
else:
|
else:
|
||||||
res = resources.joinpath(name)
|
# throws if encoding= is mentioned at all
|
||||||
if res.is_file():
|
|
||||||
return res.open(mode)
|
return res.open(mode)
|
||||||
|
|
||||||
if pkg_resources:
|
if pkg_resources:
|
||||||
if _pkg_resource_exists(E.pkg.__name__, name) and not pkg_resources.resource_isdir(E.pkg.__name__, name):
|
if _pkg_resource_exists(E.pkg.__name__, name):
|
||||||
stream = pkg_resources.resource_stream(E.pkg.__name__, name)
|
stream = pkg_resources.resource_stream(E.pkg.__name__, name)
|
||||||
if 'b' not in mode:
|
if enc:
|
||||||
stream = io.TextIOWrapper(stream)
|
stream = io.TextIOWrapper(stream, encoding=enc)
|
||||||
return stream
|
return stream
|
||||||
|
|
||||||
return open(os.path.join(E.mod, name), mode)
|
return open(os.path.join(E.mod, name), mode, encoding=enc)
|
||||||
|
|
||||||
|
|
||||||
def walk_resources(E: EnvParams, name: str):
|
|
||||||
def walk_idirs(base, r):
|
|
||||||
queue = [(base, r)]
|
|
||||||
while queue:
|
|
||||||
(b, r) = queue.pop(0)
|
|
||||||
d = []
|
|
||||||
f = []
|
|
||||||
for e in r.iterdir():
|
|
||||||
if e.is_dir():
|
|
||||||
d.append(e.name)
|
|
||||||
queue.append((os.path.join(b, e.name), e))
|
|
||||||
elif e.is_file():
|
|
||||||
f.append(e.name)
|
|
||||||
yield (b, d, f)
|
|
||||||
|
|
||||||
def walk_pdirs(base):
|
|
||||||
queue = [base]
|
|
||||||
while queue:
|
|
||||||
b = queue.pop(0)
|
|
||||||
d = []
|
|
||||||
f = []
|
|
||||||
for e in pkg_resources.resource_listdir(E.pkg.__name__, b):
|
|
||||||
if pkg_resources.resource_isdir(E.pkg.__name__, e):
|
|
||||||
d.append(e)
|
|
||||||
queue.append(os.path.join(b, e))
|
|
||||||
else:
|
|
||||||
f.append(e)
|
|
||||||
yield (b, d, f)
|
|
||||||
|
|
||||||
if impresources:
|
|
||||||
try:
|
|
||||||
iresources = impresources.files(E.pkg)
|
|
||||||
except ImportError:
|
|
||||||
iresources = None
|
|
||||||
else:
|
|
||||||
iresources = None
|
|
||||||
|
|
||||||
base_path = os.path.join(E.mod, name)
|
|
||||||
|
|
||||||
def walk_single(base, dirs, files, normalize_base=False, skip_ires=False, skip_pres=False):
|
|
||||||
if normalize_base:
|
|
||||||
if base != base_path:
|
|
||||||
relbase = os.path.relpath(base, base_path)
|
|
||||||
else:
|
|
||||||
relbase = name
|
|
||||||
else:
|
|
||||||
relbase = base
|
|
||||||
|
|
||||||
ires_dirs = []
|
|
||||||
if not skip_ires and iresources:
|
|
||||||
iresbase = iresources.joinpath(relbase)
|
|
||||||
if iresbase.is_dir():
|
|
||||||
for ientry in iresbase.iterdir():
|
|
||||||
if ientry.is_dir() and ientry.name not in dirs:
|
|
||||||
dirs.append(ientry.name)
|
|
||||||
ires_dirs.append(ientry.name)
|
|
||||||
elif ientry.is_file() and ientry.name not in files:
|
|
||||||
files.append(ientry.name)
|
|
||||||
|
|
||||||
pres_dirs = []
|
|
||||||
if not skip_pres and _pkg_resource_exists(E.pkg.__name__, relbase) and pkg_resources.resource_isdir(E.pkg.__name__, relbase):
|
|
||||||
for pentry in pkg_resources.resource_listdir(E.pkg.__name__, relbase):
|
|
||||||
ppath = os.path.join(relbase, pentry)
|
|
||||||
if pkg_resources.resource_isdir(E.pkg.__name__, ppath):
|
|
||||||
if pentry not in dirs:
|
|
||||||
dirs.append(pentry)
|
|
||||||
pres_dirs.append(pentry)
|
|
||||||
else:
|
|
||||||
if pentry not in files:
|
|
||||||
files.append(pentry)
|
|
||||||
|
|
||||||
yield (base, dirs + ires_dirs + pres_dirs, files)
|
|
||||||
for d in ires_dirs:
|
|
||||||
for (ibase, idirs, ifiles) in walk_idirs(os.path.join(relbase, d), iresources.joinpath(relbase, d)):
|
|
||||||
yield from walk_single(ibase, idirs, ifiles, normalize_base=False, skip_ires=True, skip_pres=skip_pres)
|
|
||||||
for d in pres_dirs:
|
|
||||||
for (pbase, pdirs, pfiles) in walk_pdirs(os.path.join(relbase, d)):
|
|
||||||
yield (pbase, pdirs, pfiles)
|
|
||||||
|
|
||||||
normalize_base = False
|
|
||||||
skip_ires = skip_pres = False
|
|
||||||
if os.path.isdir(base_path):
|
|
||||||
walker = os.walk(base_path)
|
|
||||||
normalize_base = True
|
|
||||||
elif iresources and iresources.joinpath(name).is_dir():
|
|
||||||
walker = walk_idirs(name, iresources.joinpath(name))
|
|
||||||
skip_ires = True
|
|
||||||
elif pkg_resources and _pkg_resource_exists(E.pkg.__name__, name) and pkg_resources.resource_isdir(E.pkg.__name__, name):
|
|
||||||
walker = walk_pdirs(name)
|
|
||||||
skip_pres = True
|
|
||||||
|
|
||||||
for (base, dirs, files) in walker:
|
|
||||||
yield from walk_single(base, dirs, files, normalize_base=normalize_base, skip_ires=skip_ires, skip_pres=skip_pres)
|
|
||||||
|
|
||||||
|
|
||||||
class Pebkac(Exception):
|
class Pebkac(Exception):
|
||||||
|
|
|
@ -42,12 +42,6 @@ ver="$(cat ../sfx/ver)"
|
||||||
mkdir -p ../dist
|
mkdir -p ../dist
|
||||||
pyz_out=../dist/copyparty.pyz
|
pyz_out=../dist/copyparty.pyz
|
||||||
|
|
||||||
echo creating z.tar
|
|
||||||
( cd copyparty
|
|
||||||
tar -cf z.tar "${targs[@]}" --numeric-owner web res
|
|
||||||
rm -rf web res
|
|
||||||
)
|
|
||||||
|
|
||||||
echo creating loader
|
echo creating loader
|
||||||
sed -r 's/^(VER = ).*/\1"'"$ver"'"/; s/^(STAMP = ).*/\1'$(date +%s)/ \
|
sed -r 's/^(VER = ).*/\1"'"$ver"'"/; s/^(STAMP = ).*/\1'$(date +%s)/ \
|
||||||
<../scripts/ziploader.py \
|
<../scripts/ziploader.py \
|
||||||
|
|
|
@ -492,8 +492,8 @@ iawk '/^def /{s=0}/^def generate_lorem_ipsum/{s=1}!s' j2/jinja2/utils.py
|
||||||
iawk '/^(class|def) /{s=0}/^(class InternationalizationExtension|def _make_new_n?gettext)/{s=1}!s' j2/jinja2/ext.py
|
iawk '/^(class|def) /{s=0}/^(class InternationalizationExtension|def _make_new_n?gettext)/{s=1}!s' j2/jinja2/ext.py
|
||||||
iawk '/^[^ ]/{s=0}/^def babel_extract/{s=1}!s' j2/jinja2/ext.py
|
iawk '/^[^ ]/{s=0}/^def babel_extract/{s=1}!s' j2/jinja2/ext.py
|
||||||
ised '/InternationalizationExtension/d' j2/jinja2/ext.py
|
ised '/InternationalizationExtension/d' j2/jinja2/ext.py
|
||||||
iawk '/^class/{s=0}/^class (Package|Dict|Function|Prefix|Choice|Module)Loader/{s=1}!s' j2/jinja2/loaders.py
|
iawk '/^class/{s=0}/^class (Package|Dict|Prefix|Choice|Module)Loader/{s=1}!s' j2/jinja2/loaders.py
|
||||||
sed -ri '/^from .bccache | (Package|Dict|Function|Prefix|Choice|Module)Loader$/d' j2/jinja2/__init__.py
|
sed -ri '/^from .bccache | (Package|Dict|Prefix|Choice|Module)Loader$/d' j2/jinja2/__init__.py
|
||||||
rm -f j2/jinja2/async* j2/jinja2/{bccache,sandbox}.py
|
rm -f j2/jinja2/async* j2/jinja2/{bccache,sandbox}.py
|
||||||
cat > j2/jinja2/_identifier.py <<'EOF'
|
cat > j2/jinja2/_identifier.py <<'EOF'
|
||||||
import re
|
import re
|
||||||
|
|
|
@ -77,11 +77,14 @@ excl=(
|
||||||
email._header_value_parser
|
email._header_value_parser
|
||||||
email.header
|
email.header
|
||||||
email.parser
|
email.parser
|
||||||
|
importlib.resources
|
||||||
|
importlib_resources
|
||||||
inspect
|
inspect
|
||||||
multiprocessing
|
multiprocessing
|
||||||
packaging
|
packaging
|
||||||
pdb
|
pdb
|
||||||
pickle
|
pickle
|
||||||
|
pkg_resources
|
||||||
PIL.EpsImagePlugin
|
PIL.EpsImagePlugin
|
||||||
pyftpdlib.prefork
|
pyftpdlib.prefork
|
||||||
urllib.request
|
urllib.request
|
||||||
|
|
|
@ -1,11 +1,6 @@
|
||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
import atexit
|
|
||||||
import os
|
|
||||||
import platform
|
|
||||||
import sys
|
import sys
|
||||||
import tarfile
|
|
||||||
import tempfile
|
|
||||||
import time
|
import time
|
||||||
import traceback
|
import traceback
|
||||||
|
|
||||||
|
@ -23,20 +18,6 @@ def msg(*a, **ka):
|
||||||
print(*a, **ka)
|
print(*a, **ka)
|
||||||
|
|
||||||
|
|
||||||
def utime(top):
|
|
||||||
# avoid cleaners
|
|
||||||
files = [os.path.join(dp, p) for dp, dd, df in os.walk(top) for p in dd + df]
|
|
||||||
try:
|
|
||||||
while True:
|
|
||||||
t = int(time.time())
|
|
||||||
for f in [top] + files:
|
|
||||||
os.utime(f, (t, t))
|
|
||||||
|
|
||||||
time.sleep(78123)
|
|
||||||
except Exception as ex:
|
|
||||||
print("utime:", ex, f)
|
|
||||||
|
|
||||||
|
|
||||||
def confirm(rv):
|
def confirm(rv):
|
||||||
msg()
|
msg()
|
||||||
msg("retcode", rv if rv else traceback.format_exc())
|
msg("retcode", rv if rv else traceback.format_exc())
|
||||||
|
@ -51,47 +32,17 @@ def confirm(rv):
|
||||||
|
|
||||||
|
|
||||||
def run():
|
def run():
|
||||||
import copyparty
|
|
||||||
from copyparty.__main__ import main as cm
|
from copyparty.__main__ import main as cm
|
||||||
|
|
||||||
td = tempfile.TemporaryDirectory(prefix="")
|
cm()
|
||||||
atexit.register(td.cleanup)
|
|
||||||
rsrc = td.name
|
|
||||||
|
|
||||||
try:
|
|
||||||
from importlib.resources import files
|
|
||||||
|
|
||||||
f = files(copyparty).joinpath("z.tar").open("rb")
|
|
||||||
except:
|
|
||||||
from importlib.resources import open_binary
|
|
||||||
|
|
||||||
f = open_binary("copyparty", "z.tar")
|
|
||||||
|
|
||||||
with tarfile.open(fileobj=f) as tf:
|
|
||||||
try:
|
|
||||||
tf.extractall(rsrc, filter="tar")
|
|
||||||
except TypeError:
|
|
||||||
tf.extractall(rsrc) # nosec (archive is safe)
|
|
||||||
|
|
||||||
f.close()
|
|
||||||
f = None
|
|
||||||
|
|
||||||
msg(" rsrc dir:", rsrc)
|
|
||||||
msg()
|
|
||||||
|
|
||||||
sys.argv.append("--sfx-tpoke=" + rsrc)
|
|
||||||
|
|
||||||
cm(rsrc=rsrc)
|
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
sysver = str(sys.version).replace("\n", "\n" + " " * 18)
|
|
||||||
pktime = time.strftime("%Y-%m-%d, %H:%M:%S", time.gmtime(STAMP))
|
pktime = time.strftime("%Y-%m-%d, %H:%M:%S", time.gmtime(STAMP))
|
||||||
msg()
|
msg()
|
||||||
msg(" this is: copyparty", VER)
|
msg("build-time:", pktime, "UTC,", STAMP)
|
||||||
msg(" packed at:", pktime, "UTC,", STAMP)
|
msg("python-bin:", sys.executable)
|
||||||
msg("python bin:", sys.executable)
|
msg()
|
||||||
msg("python ver:", platform.python_implementation(), sysver)
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
run()
|
run()
|
||||||
|
|
Loading…
Reference in a new issue