mirror of
https://github.com/9001/copyparty.git
synced 2025-08-17 09:02:15 -06:00
text-editor: optional EOL conversion; closes #513
This commit is contained in:
parent
bcc3b1568e
commit
8b31ed8816
|
@ -1528,6 +1528,7 @@ def add_db_metadata(ap):
|
||||||
def add_txt(ap):
|
def add_txt(ap):
|
||||||
ap2 = ap.add_argument_group("textfile options")
|
ap2 = ap.add_argument_group("textfile options")
|
||||||
ap2.add_argument("--md-hist", metavar="TXT", type=u, default="s", help="where to store old version of markdown files; [\033[32ms\033[0m]=subfolder, [\033[32mv\033[0m]=volume-histpath, [\033[32mn\033[0m]=nope/disabled (volflag=md_hist)")
|
ap2.add_argument("--md-hist", metavar="TXT", type=u, default="s", help="where to store old version of markdown files; [\033[32ms\033[0m]=subfolder, [\033[32mv\033[0m]=volume-histpath, [\033[32mn\033[0m]=nope/disabled (volflag=md_hist)")
|
||||||
|
ap2.add_argument("--txt-eol", metavar="TYPE", type=u, default="", help="enable EOL conversion when writing documents; supported: CRLF, LF (volflag=txt_eol)")
|
||||||
ap2.add_argument("-mcr", metavar="SEC", type=int, default=60, help="the textfile editor will check for serverside changes every \033[33mSEC\033[0m seconds")
|
ap2.add_argument("-mcr", metavar="SEC", type=int, default=60, help="the textfile editor will check for serverside changes every \033[33mSEC\033[0m seconds")
|
||||||
ap2.add_argument("-emp", action="store_true", help="enable markdown plugins -- neat but dangerous, big XSS risk")
|
ap2.add_argument("-emp", action="store_true", help="enable markdown plugins -- neat but dangerous, big XSS risk")
|
||||||
ap2.add_argument("--exp", action="store_true", help="enable textfile expansion -- replace {{self.ip}} and such; see \033[33m--help-exp\033[0m (volflag=exp)")
|
ap2.add_argument("--exp", action="store_true", help="enable textfile expansion -- replace {{self.ip}} and such; see \033[33m--help-exp\033[0m (volflag=exp)")
|
||||||
|
|
|
@ -111,6 +111,7 @@ def vf_vmap() -> dict[str, str]:
|
||||||
"tail_tmax",
|
"tail_tmax",
|
||||||
"tail_who",
|
"tail_who",
|
||||||
"tcolor",
|
"tcolor",
|
||||||
|
"txt_eol",
|
||||||
"unlist",
|
"unlist",
|
||||||
"u2abort",
|
"u2abort",
|
||||||
"u2ts",
|
"u2ts",
|
||||||
|
@ -322,6 +323,7 @@ flagcats = {
|
||||||
"exp": "enable textfile expansion; see --help-exp",
|
"exp": "enable textfile expansion; see --help-exp",
|
||||||
"exp_md": "placeholders to expand in markdown files; see --help",
|
"exp_md": "placeholders to expand in markdown files; see --help",
|
||||||
"exp_lg": "placeholders to expand in prologue/epilogue; see --help",
|
"exp_lg": "placeholders to expand in prologue/epilogue; see --help",
|
||||||
|
"txt_eol=lf": "enable EOL conversion when writing docs (LF or CRLF)",
|
||||||
},
|
},
|
||||||
"tailing": {
|
"tailing": {
|
||||||
"notail": "disable ?tail (download a growing file continuously)",
|
"notail": "disable ?tail (download a growing file continuously)",
|
||||||
|
|
|
@ -62,6 +62,7 @@ from .util import (
|
||||||
alltrace,
|
alltrace,
|
||||||
atomic_move,
|
atomic_move,
|
||||||
b64dec,
|
b64dec,
|
||||||
|
eol_conv,
|
||||||
exclude_dotfiles,
|
exclude_dotfiles,
|
||||||
formatdate,
|
formatdate,
|
||||||
fsenc,
|
fsenc,
|
||||||
|
@ -3679,6 +3680,9 @@ class HttpCli(object):
|
||||||
if p_field != "body":
|
if p_field != "body":
|
||||||
raise Pebkac(400, "expected body, got {}".format(p_field))
|
raise Pebkac(400, "expected body, got {}".format(p_field))
|
||||||
|
|
||||||
|
if "txt_eol" in vfs.flags:
|
||||||
|
p_data = eol_conv(p_data, vfs.flags["txt_eol"])
|
||||||
|
|
||||||
xbu = vfs.flags.get("xbu")
|
xbu = vfs.flags.get("xbu")
|
||||||
if xbu:
|
if xbu:
|
||||||
if not runhook(
|
if not runhook(
|
||||||
|
|
|
@ -2982,6 +2982,18 @@ def justcopy(
|
||||||
return tlen, "checksum-disabled", "checksum-disabled"
|
return tlen, "checksum-disabled", "checksum-disabled"
|
||||||
|
|
||||||
|
|
||||||
|
def eol_conv(
|
||||||
|
fin: Generator[bytes, None, None],
|
||||||
|
conv: str
|
||||||
|
) -> Generator[bytes, None, None]:
|
||||||
|
crlf = conv.lower() == "crlf"
|
||||||
|
for buf in fin:
|
||||||
|
buf = buf.replace(b"\r", b"")
|
||||||
|
if crlf:
|
||||||
|
buf = buf.replace(b"\n", b"\r\n")
|
||||||
|
yield buf
|
||||||
|
|
||||||
|
|
||||||
def hashcopy(
|
def hashcopy(
|
||||||
fin: Generator[bytes, None, None],
|
fin: Generator[bytes, None, None],
|
||||||
fout: Union[typing.BinaryIO, typing.IO[Any]],
|
fout: Union[typing.BinaryIO, typing.IO[Any]],
|
||||||
|
|
|
@ -164,7 +164,7 @@ class Cfg(Namespace):
|
||||||
ex = "ctl_re db_act forget_ip idp_cookie idp_store k304 loris no304 nosubtle re_maxage rproxy rsp_jtr rsp_slp s_wr_slp snap_wri theme themes turbo u2ow zipmaxn zipmaxs"
|
ex = "ctl_re db_act forget_ip idp_cookie idp_store k304 loris no304 nosubtle re_maxage rproxy rsp_jtr rsp_slp s_wr_slp snap_wri theme themes turbo u2ow zipmaxn zipmaxs"
|
||||||
ka.update(**{k: 0 for k in ex.split()})
|
ka.update(**{k: 0 for k in ex.split()})
|
||||||
|
|
||||||
ex = "ah_alg bname chmod_f chpw_db doctitle df exit favico idp_h_usr ipa html_head lg_sba lg_sbf log_fk md_sba md_sbf name og_desc og_site og_th og_title og_title_a og_title_v og_title_i shr tcolor textfiles unlist vname xff_src zipmaxt R RS SR"
|
ex = "ah_alg bname chmod_f chpw_db doctitle df exit favico idp_h_usr ipa html_head lg_sba lg_sbf log_fk md_sba md_sbf name og_desc og_site og_th og_title og_title_a og_title_v og_title_i shr tcolor textfiles txt_eol unlist vname xff_src zipmaxt R RS SR"
|
||||||
ka.update(**{k: "" for k in ex.split()})
|
ka.update(**{k: "" for k in ex.split()})
|
||||||
|
|
||||||
ex = "ban_403 ban_404 ban_422 ban_pw ban_pwc ban_url spinner"
|
ex = "ban_403 ban_404 ban_422 ban_pw ban_pwc ban_url spinner"
|
||||||
|
|
Loading…
Reference in a new issue