add config explainer + generator (#20)

This commit is contained in:
ed 2023-02-05 22:09:17 +00:00
parent 5095d17e81
commit 99cc434779
4 changed files with 385 additions and 117 deletions

View file

@ -27,6 +27,7 @@ from .__init__ import ANYWIN, CORES, PY2, VT100, WINDOWS, E, EnvParams, unicode
from .__version__ import CODENAME, S_BUILD_DT, S_VERSION
from .authsrv import expand_config_file, re_vol
from .svchub import SvcHub
from .cfg import flagcats
from .util import (
IMPLICATIONS,
JINJA_VER,
@ -53,8 +54,9 @@ try:
except:
HAVE_SSL = False
printed: list[str] = []
u = unicode
printed: list[str] = []
zsid = uuid.uuid4().urn[4:]
class RiceFormatter(argparse.HelpFormatter):
@ -366,6 +368,7 @@ def args_from_cfg(cfg_path: str) -> list[str]:
continue
if not ln.startswith("-"):
skip = True
continue
if skip:
@ -495,79 +498,9 @@ def get_sects():
"""
volflags are appended to volume definitions, for example,
to create a write-only volume with the \033[33mnodupe\033[0m and \033[32mnosub\033[0m flags:
\033[35m-v /mnt/inc:/inc:w\033[33m:c,nodupe\033[32m:c,nosub
\033[0muploads, general:
\033[36mnodupe\033[35m rejects existing files (instead of symlinking them)
\033[36mhardlink\033[35m does dedup with hardlinks instead of symlinks
\033[36mneversymlink\033[35m disables symlink fallback; full copy instead
\033[36mcopydupes\033[35m disables dedup, always saves full copies of dupes
\033[36mnosub\033[35m forces all uploads into the top folder of the vfs
\033[36mmagic$\033[35m enables filetype detection for nameless uploads
\033[36mgz\033[35m allows server-side gzip of uploads with ?gz (also c,xz)
\033[36mpk\033[35m forces server-side compression, optional arg: xz,9
\033[0mupload rules:
\033[36mmaxn=250,600\033[35m max 250 uploads over 15min
\033[36mmaxb=1g,300\033[35m max 1 GiB over 5min (suffixes: b, k, m, g)
\033[36mrand\033[35m force randomized filenames, 9 chars long by default
\033[36mnrand=N\033[35m randomized filenames are N chars long
\033[36msz=1k-3m\033[35m allow filesizes between 1 KiB and 3MiB
\033[36mdf=1g\033[35m ensure 1 GiB free disk space
\033[0mupload rotation:
(moves all uploads into the specified folder structure)
\033[36mrotn=100,3\033[35m 3 levels of subfolders with 100 entries in each
\033[36mrotf=%Y-%m/%d-%H\033[35m date-formatted organizing
\033[36mlifetime=3600\033[35m uploads are deleted after 1 hour
\033[0mdatabase, general:
\033[36me2d\033[35m sets -e2d (all -e2* args can be set using ce2* volflags)
\033[36md2ts\033[35m disables metadata collection for existing files
\033[36md2ds\033[35m disables onboot indexing, overrides -e2ds*
\033[36md2t\033[35m disables metadata collection, overrides -e2t*
\033[36md2v\033[35m disables file verification, overrides -e2v*
\033[36md2d\033[35m disables all database stuff, overrides -e2*
\033[36mhist=/tmp/cdb\033[35m puts thumbnails and indexes at that location
\033[36mscan=60\033[35m scan for new files every 60sec, same as --re-maxage
\033[36mnohash=\\.iso$\033[35m skips hashing file contents if path matches *.iso
\033[36mnoidx=\\.iso$\033[35m fully ignores the contents at paths matching *.iso
\033[36mnoforget$\033[35m don't forget files when deleted from disk
\033[36mdbd=[acid|swal|wal|yolo]\033[35m database speed-durability tradeoff
\033[36mxlink$\033[35m cross-volume dupe detection / linking
\033[36mxdev\033[35m do not descend into other filesystems
\033[36mxvol\033[35m skip symlinks leaving the volume root
\033[36mdotsrch\033[35m show dotfiles in search results
\033[36mnodotsrch\033[35m hide dotfiles in search results (default)
\033[0mdatabase, audio tags:
"mte", "mth", "mtp", "mtm" all work the same as -mte, -mth, ...
\033[36mmtp=.bpm=f,audio-bpm.py\033[35m uses the "audio-bpm.py" program to
generate ".bpm" tags from uploads (f = overwrite tags)
\033[36mmtp=ahash,vhash=media-hash.py\033[35m collects two tags at once
\033[0mthumbnails:
\033[36mdthumb\033[35m disables all thumbnails
\033[36mdvthumb\033[35m disables video thumbnails
\033[36mdathumb\033[35m disables audio thumbnails (spectrograms)
\033[36mdithumb\033[35m disables image thumbnails
\033[0mclient and ux:
\033[36mhtml_head=TXT\033[35m includes TXT in the <head>
\033[36mrobots\033[35m allows indexing by search engines (default)
\033[36mnorobots\033[35m kindly asks search engines to leave
\033[36mno_sb_md\033[35m disable js sandbox for markdown files
\033[36mno_sb_lg\033[35m disable js sandbox for prologue/epilogue
\033[36msb_md\033[35m enable js sandbox for markdown files (default)
\033[36msb_lg\033[35m enable js sandbox for prologue/epilogue (default)
\033[36mmd_sbf\033[35m list of markdown-sandbox safeguards to disable
\033[36mlg_sbf\033[35m list of *logue-sandbox safeguards to disable
\033[0mothers:
\033[36mfk=8\033[35m generates per-file accesskeys,
which will then be required at the "g" permission
\033[0m"""
),
\033[35m-v /mnt/inc:/inc:w\033[33m:c,nodupe\033[32m:c,nosub"""
)
+ build_flags_desc(),
],
[
"hooks",
@ -669,6 +602,17 @@ def get_sects():
]
def build_flags_desc():
ret = ""
for grp, flags in flagcats.items():
ret += "\n\n\033[0m" + grp
for k, v in flags.items():
v = v.replace("\n", "\n ")
ret += "\n \033[36m{}\033[35m {}".format(k, v)
return ret + "\033[0m"
# fmt: off
@ -786,7 +730,7 @@ def add_zc_ssdp(ap):
ap2.add_argument("--zs-off", metavar="NETS", type=u, default="", help="disable zeroconf on the comma-separated list of subnets and/or interface names/indexes")
ap2.add_argument("--zsv", action="store_true", help="verbose SSDP")
ap2.add_argument("--zsl", metavar="PATH", type=u, default="/?hc", help="location to include in the url (or a complete external URL), for example [\033[32mpriv/?pw=hunter2\033[0m] (goes directly to /priv/ with password hunter2) or [\033[32m?hc=priv&pw=hunter2\033[0m] (shows mounting options for /priv/ with password)")
ap2.add_argument("--zsid", metavar="UUID", type=u, default=uuid.uuid4().urn[4:], help="USN (device identifier) to announce")
ap2.add_argument("--zsid", metavar="UUID", type=u, default=zsid, help="USN (device identifier) to announce")
def add_ftp(ap):
@ -878,7 +822,7 @@ def add_shutdown(ap):
ap2 = ap.add_argument_group('shutdown options')
ap2.add_argument("--ign-ebind", action="store_true", help="continue running even if it's impossible to listen on some of the requested endpoints")
ap2.add_argument("--ign-ebind-all", action="store_true", help="continue running even if it's impossible to receive connections at all")
ap2.add_argument("--exit", metavar="WHEN", type=u, default="", help="shutdown after WHEN has finished; for example [\033[32midx\033[0m] will do volume indexing + metadata analysis")
ap2.add_argument("--exit", metavar="WHEN", type=u, default="", help="shutdown after WHEN has finished; [\033[32mcfg\033[0m] config parsing, [\033[32midx\033[0m] volscan + multimedia indexing")
def add_logging(ap):
@ -1001,6 +945,8 @@ def add_ui(ap, retry):
def add_debug(ap):
ap2 = ap.add_argument_group('debug options')
ap2.add_argument("--vc", action="store_true", help="verbose config file parser (explain config)")
ap2.add_argument("--cgen", action="store_true", help="generate config file from current config (best-effort; probably buggy)")
ap2.add_argument("--no-sendfile", action="store_true", help="disable sendfile; instead using a traditional file read loop")
ap2.add_argument("--no-scandir", action="store_true", help="disable scandir; instead using listdir + stat on each file")
ap2.add_argument("--no-fastboot", action="store_true", help="wait for up2k indexing before starting the httpd")
@ -1173,6 +1119,7 @@ def main(argv: Optional[list[str]] = None) -> None:
for fmtr in [RiceFormatter, RiceFormatter, Dodge11874, BasicDodge11874]:
try:
al = run_argparse(argv, fmtr, retry, nc)
dal = run_argparse([], fmtr, retry, nc)
break
except SystemExit:
raise
@ -1182,6 +1129,7 @@ def main(argv: Optional[list[str]] = None) -> None:
try:
assert al # type: ignore
assert dal # type: ignore
al.E = E # __init__ is not shared when oxidized
except:
sys.exit(1)
@ -1287,7 +1235,7 @@ def main(argv: Optional[list[str]] = None) -> None:
# signal.signal(signal.SIGINT, sighandler)
SvcHub(al, argv, "".join(printed)).run()
SvcHub(al, dal, argv, "".join(printed)).run()
if __name__ == "__main__":

View file

@ -14,6 +14,7 @@ from datetime import datetime
from .__init__ import ANYWIN, TYPE_CHECKING, WINDOWS
from .bos import bos
from .cfg import vf_bmap, vf_vmap, vf_cmap, onedash, flagdescs, permdescs
from .util import (
IMPLICATIONS,
META_NOBOTS,
@ -653,11 +654,15 @@ class AuthSrv(object):
args: argparse.Namespace,
log_func: Optional["RootLogger"],
warn_anonwrite: bool = True,
dargs: Optional[argparse.Namespace] = None,
) -> None:
self.args = args
self.dargs = dargs or args
self.log_func = log_func
self.warn_anonwrite = warn_anonwrite
self.line_ctr = 0
self.indent = ""
self.desc = []
self.mutex = threading.Lock()
self.reload()
@ -705,6 +710,31 @@ class AuthSrv(object):
daxs[dst] = AXS()
mflags[dst] = {}
def _e(self, desc: str) -> None:
if not self.args.vc or not self.line_ctr:
return
if not desc and not self.indent:
self.log("")
return
desc = desc.replace("[", "[\033[0m").replace("]", "\033[90m]")
self.log(" >>> {}{}".format(self.indent, desc), "90")
def _l(self, ln: str, c: int) -> None:
if not self.args.vc or not self.line_ctr:
return
if c < 10:
c += 30
t = "\033[97m{:4} \033[{}m{}{}"
self.log(t.format(self.line_ctr, c, self.indent, ln))
def _el(self, ln: str, c: int, desc: str) -> None:
self._e(desc)
self._l(ln, c)
def _parse_config_file(
self,
fp: str,
@ -714,33 +744,59 @@ class AuthSrv(object):
mflags: dict[str, dict[str, Any]],
mount: dict[str, str],
) -> None:
skip = False
vol_src = None
vol_dst = None
new_blk = True
self.desc = []
self.line_ctr = 0
expand_config_file(cfg_lines, fp, "")
if self.args.vc:
lns = ["{:4}: {}".format(n, s) for n, s in enumerate(cfg_lines, 1)]
self.log("expanded config file (unprocessed):\n" + "\n".join(lns))
for ln in cfg_lines:
self.line_ctr += 1
if not ln and vol_src is not None:
self.indent = ""
self._e("└─end of settings for volume at URL [/{}]".format(vol_dst))
if vol_dst is None:
t = "no URL provided for filesystem path [{}]"
raise Exception(t.format(vol_src))
vol_src = None
vol_dst = None
if skip:
if not ln:
skip = False
if not ln:
new_blk = True
continue
if not ln or ln.startswith("#"):
if ln.startswith("#"):
continue
if vol_src is None:
if ln.startswith("u "):
u, p = ln[2:].split(":", 1)
self._e("")
self._el(ln, 5, "account [{}], password [{}]:".format(u, p))
acct[u] = p
elif ln.startswith("-"):
skip = True # argv
else:
self._e("")
try:
ck, cv = ln.split(" ", 1)
t = "argument [{}] with value [{}]"
self._el(ln, 6, t.format(ck, cv))
except:
self._el(ln, 6, "argument [{}]".format(ln))
elif new_blk:
self._e("")
self._e("┌─share filesystem path [{}]:".format(ln))
self.indent = ""
self._l(ln, 3)
vol_src = ln
else:
raise Exception("unexpected line: {}".format(ln))
new_blk = False
continue
if vol_src and vol_dst is None:
@ -754,6 +810,9 @@ class AuthSrv(object):
# cfg files override arguments and previous files
vol_src = absreal(vol_src)
vol_dst = vol_dst.strip("/")
self._e("[{}]".format(vol_src))
self._e("")
self._el(ln, 2, "at URL [/{}]:".format(vol_dst))
self._map_volume(vol_src, vol_dst, mount, daxs, mflags)
continue
@ -768,7 +827,12 @@ class AuthSrv(object):
self.log(t, 1)
assert vol_dst is not None
self._e("")
self._read_vol_str(lvl, uname, daxs[vol_dst], mflags[vol_dst])
self._l(ln, 2)
self._e("")
self.line_ctr = 0
def _read_vol_str(
self, lvl: str, uname: str, axs: AXS, flags: dict[str, Any]
@ -807,6 +871,9 @@ class AuthSrv(object):
("G", axs.upget),
]: # b bb bbb
if ch in lvl:
t = "add permission [{}] for user [{}] -- {}"
desc = permdescs.get(ch, "?")
self._e(t.format(ch, un, desc))
al.add(un)
def _read_volflag(
@ -816,7 +883,13 @@ class AuthSrv(object):
value: Union[str, bool, list[str]],
is_list: bool,
) -> None:
desc = flagdescs.get(name, "?").replace("\n", " ")
if name not in ["mtp", "xbu", "xau", "xbr", "xar", "xbd", "xad", "xm"]:
if value is True:
t = "add volflag [{}] = {} ({})"
else:
t = "add volflag [{}] = [{}] ({})"
self._e(t.format(name, value, desc))
flags[name] = value
return
@ -829,6 +902,7 @@ class AuthSrv(object):
vals += [value]
flags[name] = vals
self._e("volflag [{}] += {} ({})".format(name, vals, desc))
def reload(self) -> None:
"""
@ -1118,32 +1192,7 @@ class AuthSrv(object):
if ptn:
vol.flags[vf] = re.compile(ptn)
for k in (
"dotsrch",
"e2t",
"e2ts",
"e2tsr",
"e2v",
"e2vu",
"e2vp",
"hardlink",
"magic",
"no_sb_md",
"no_sb_lg",
"rand",
"xdev",
"xlink",
"xvol",
):
if getattr(self.args, k):
vol.flags[k] = True
for ga, vf in (
("never_symlink", "neversymlink"),
("no_dedup", "copydupes"),
("no_dupe", "nodupe"),
("no_forget", "noforget"),
):
for ga, vf in vf_bmap().items():
if getattr(self.args, ga):
vol.flags[vf] = True
@ -1155,10 +1204,7 @@ class AuthSrv(object):
if ve in vol.flags:
vol.flags.pop(vd, None)
for ga, vf in (
("lg_sbf", "lg_sbf"),
("md_sbf", "md_sbf"),
):
for ga, vf in vf_vmap().items():
if vf not in vol.flags:
vol.flags[vf] = getattr(self.args, ga)
@ -1507,6 +1553,122 @@ class AuthSrv(object):
if not flag_r:
sys.exit(0)
def cgen(self) -> None:
ret = [
"## WARNING:",
"## there will probably be mistakes in",
"## commandline-args (and maybe volflags)",
"",
]
csv = set("i p".split())
lst = set("mtp xbu xau xbr xar xbd xad xm c".split())
askip = set("a v c vc cgen".split())
# keymap from argv to vflag
amap = vf_bmap()
amap.update(vf_vmap())
amap.update(vf_cmap())
vmap = {v: k for k, v in amap.items()}
args = {k: v for k, v in vars(self.args).items()}
pops = []
for k1, k2 in IMPLICATIONS:
if args.get(k1):
pops.append(k2)
for pop in pops:
args.pop(pop, None)
if args:
ret.append("# add commandline args")
for k, v in args.items():
if k in askip:
continue
if k in csv:
v = ",".join([str(za) for za in v])
try:
v2 = getattr(self.dargs, k)
if v == v2:
continue
except:
continue
dk = ("-" if k in onedash else "--") + k.replace("_", "-")
if k in lst:
for ve in v:
ret.append("{} {}".format(dk, ve))
else:
if v is True:
ret.append(dk)
elif v not in (False, None, ""):
ret.append("{} {}".format(dk, v))
ret.append("")
if self.acct:
ret.append("# add accounts")
for u, p in self.acct.items():
ret.append("u {}:{}".format(u, p))
ret.append("")
for vol in self.vfs.all_vols.values():
ret.append("# add volume [/{}]".format(vol.vpath))
ret.append(vol.realpath)
ret.append("/" + vol.vpath)
perms = {
"r": "uread",
"w": "uwrite",
"m": "umove",
"d": "udel",
"g": "uget",
"G": "upget",
}
users = {}
for pkey in perms.values():
for uname in getattr(vol.axs, pkey):
try:
users[uname] += 1
except:
users[uname] = 1
users = {v: k for k, v in users.items()}
for _, uname in sorted(users.items()):
pstr = ""
for pchar, pkey in perms.items():
if pchar == "g" and "G" in perms:
continue
if uname in getattr(vol.axs, pkey):
pstr += pchar
if uname == "*":
uname = ""
ret.append("{} {}".format(pstr, uname).rstrip(" "))
trues = []
vals = []
for k, v in sorted(vol.flags.items()):
try:
ak = vmap[k]
if getattr(self.args, ak) is v:
continue
except:
pass
if k in lst:
for ve in v:
vals.append("c {}={}".format(k, ve))
elif v is True:
trues.append(k)
elif v is not False:
vals.append("c {}={}".format(k, v))
pops = []
for k1, k2 in IMPLICATIONS:
if k1 in trues:
pops.append(k2)
trues = [x for x in trues if x not in pops]
if trues:
ret.append("c " + ",".join(trues))
ret.extend(vals)
ret.append("")
self.log("generated config:\n\n" + "\n".join(ret))
def expand_config_file(ret: list[str], fp: str, ipath: str) -> None:
"""expand all % file includes"""
@ -1519,7 +1681,7 @@ def expand_config_file(ret: list[str], fp: str, ipath: str) -> None:
if os.path.isdir(fp):
for fn in sorted(os.listdir(fp)):
fp2 = os.path.join(fp, fn)
if not fp2.endswith(".conf") or fp in ipath:
if not fp2.endswith(".conf") or fp2 in ipath:
continue
expand_config_file(ret, fp2, ipath)

144
copyparty/cfg.py Normal file
View file

@ -0,0 +1,144 @@
# coding: utf-8
from __future__ import print_function, unicode_literals
# awk -F\" '/add_argument\("-[^-]/{print(substr($2,2))}' copyparty/__main__.py | sort | tr '\n' ' '
zs = "a c e2d e2ds e2dsa e2t e2ts e2tsr e2v e2vp e2vu ed emp i j lo mcr mte mth mtm mtp nb nc nid nih nw p q s ss sss v z zv"
onedash = set(zs.split())
def vf_bmap() -> dict[str, str]:
"""argv-to-volflag: simple bools"""
ret = {
"never_symlink": "neversymlink",
"no_dedup": "copydupes",
"no_dupe": "nodupe",
"no_forget": "noforget",
}
for k in (
"dotsrch",
"e2t",
"e2ts",
"e2tsr",
"e2v",
"e2vu",
"e2vp",
"hardlink",
"magic",
"no_sb_md",
"no_sb_lg",
"rand",
"xdev",
"xlink",
"xvol",
):
ret[k] = k
return ret
def vf_vmap() -> dict[str, str]:
"""argv-to-volflag: simple values"""
ret = {}
for k in ("lg_sbf", "md_sbf"):
ret[k] = k
return ret
def vf_cmap() -> dict[str, str]:
"""argv-to-volflag: complex/lists"""
ret = {}
for k in ("dbd", "html_head", "mte", "mth", "nrand"):
ret[k] = k
return ret
permdescs = {
"r": "read; list folder contents, download files",
"w": 'write; upload files; need "r" to see the uploads',
"m": 'move; move files and folders; need "w" at destination',
"d": "delete; permanently delete files and folders",
"g": "get; download files, but cannot see folder contents",
"G": 'upget; same as "g" but can see filekeys of their own uploads',
}
flagcats = {
"uploads, general": {
"nodupe": "rejects existing files (instead of symlinking them)",
"hardlink": "does dedup with hardlinks instead of symlinks",
"neversymlink": "disables symlink fallback; full copy instead",
"copydupes": "disables dedup, always saves full copies of dupes",
"daw": "enable full WebDAV write support (dangerous);\nPUT-operations will now \033[1;31mOVERWRITE\033[0;35m existing files",
"nosub": "forces all uploads into the top folder of the vfs",
"magic": "enables filetype detection for nameless uploads",
"gz": "allows server-side gzip of uploads with ?gz (also c,xz)",
"pk": "forces server-side compression, optional arg: xz,9",
},
"upload rules": {
"maxn=250,600": "max 250 uploads over 15min",
"maxb=1g,300": "max 1 GiB over 5min (suffixes: b, k, m, g)",
"rand": "force randomized filenames, 9 chars long by default",
"nrand=N": "randomized filenames are N chars long",
"sz=1k-3m": "allow filesizes between 1 KiB and 3MiB",
"df=1g": "ensure 1 GiB free disk space",
},
"upload rotation\n(moves all uploads into the specified folder structure)": {
"rotn=100,3": "3 levels of subfolders with 100 entries in each",
"rotf=%Y-%m/%d-%H": "date-formatted organizing",
"lifetime=3600": "uploads are deleted after 1 hour",
},
"database, general": {
"e2d": "sets -e2d (all -e2* args can be set using ce2* volflags)",
"d2ts": "disables metadata collection for existing files",
"d2ds": "disables onboot indexing, overrides -e2ds*",
"d2t": "disables metadata collection, overrides -e2t*",
"d2v": "disables file verification, overrides -e2v*",
"d2d": "disables all database stuff, overrides -e2*",
"hist=/tmp/cdb": "puts thumbnails and indexes at that location",
"scan=60": "scan for new files every 60sec, same as --re-maxage",
"nohash=\\.iso$": "skips hashing file contents if path matches *.iso",
"noidx=\\.iso$": "fully ignores the contents at paths matching *.iso",
"noforget": "don't forget files when deleted from disk",
"dbd=[acid|swal|wal|yolo]": "database speed-durability tradeoff",
"xlink": "cross-volume dupe detection / linking",
"xdev": "do not descend into other filesystems",
"xvol": "skip symlinks leaving the volume root",
"dotsrch": "show dotfiles in search results",
"nodotsrch": "hide dotfiles in search results (default)",
},
'database, audio tags\n"mte", "mth", "mtp", "mtm" all work the same as -mte, -mth, ...': {
"mtp=.bpm=f,audio-bpm.py": 'uses the "audio-bpm.py" program to\ngenerate ".bpm" tags from uploads (f = overwrite tags)',
"mtp=ahash,vhash=media-hash.py": "collects two tags at once",
},
"thumbnails": {
"dthumb": "disables all thumbnails",
"dvthumb": "disables video thumbnails",
"dathumb": "disables audio thumbnails (spectrograms)",
"dithumb": "disables image thumbnails",
},
"event hooks\n(better explained in --help-hooks)": {
"xbu=CMD": "execute CMD before a file upload starts",
"xau=CMD": "execute CMD after a file upload finishes",
"xbr=CMD": "execute CMD before a file rename/move",
"xar=CMD": "execute CMD after a file rename/move",
"xbd=CMD": "execute CMD before a file delete",
"xad=CMD": "execute CMD after a file delete",
"xm=CMD": "execute CMD on message",
},
"client and ux": {
"html_head=TXT": "includes TXT in the <head>",
"robots": "allows indexing by search engines (default)",
"norobots": "kindly asks search engines to leave",
"no_sb_md": "disable js sandbox for markdown files",
"no_sb_lg": "disable js sandbox for prologue/epilogue",
"sb_md": "enable js sandbox for markdown files (default)",
"sb_lg": "enable js sandbox for prologue/epilogue (default)",
"md_sbf": "list of markdown-sandbox safeguards to disable",
"lg_sbf": "list of *logue-sandbox safeguards to disable",
},
"others": {
"fk=8": 'generates per-file accesskeys,\nwhich will then be required at the "g" permission'
},
}
flagdescs = {k.split("=")[0]: v for tab in flagcats.values() for k, v in tab.items()}

View file

@ -67,8 +67,15 @@ class SvcHub(object):
put() can return a queue (if want_reply=True) which has a blocking get() with the response.
"""
def __init__(self, args: argparse.Namespace, argv: list[str], printed: str) -> None:
def __init__(
self,
args: argparse.Namespace,
dargs: argparse.Namespace,
argv: list[str],
printed: str,
) -> None:
self.args = args
self.dargs = dargs
self.argv = argv
self.E: EnvParams = args.E
self.logf: Optional[typing.TextIO] = None
@ -155,7 +162,14 @@ class SvcHub(object):
args.log_fk = re.compile(args.log_fk)
# initiate all services to manage
self.asrv = AuthSrv(self.args, self.log)
self.asrv = AuthSrv(self.args, self.log, dargs=self.dargs)
if args.cgen:
self.asrv.cgen()
if args.exit == "cfg":
sys.exit(0)
if args.ls:
self.asrv.dbg_ls()