mirror of
https://github.com/9001/copyparty.git
synced 2025-08-17 09:02:15 -06:00
add webdav write support + fix http 200/201
This commit is contained in:
parent
81d896be9f
commit
20eeacaac3
|
@ -723,7 +723,7 @@ on windows xp/7/8/10, connect using the explorer UI:
|
|||
on windows 7/8/10, connect using command prompt (`wark`=password):
|
||||
* `net use w: http://192.168.123.1:3923/ wark /user:a`
|
||||
|
||||
on windows 7/8/10, disable wpad for performance:
|
||||
on windows (xp or later), disable wpad for performance:
|
||||
* control panel -> [network and internet] -> [internet options] -> [connections] tab -> [lan settings] -> automatically detect settings: Nope
|
||||
|
||||
known issues:
|
||||
|
|
|
@ -633,7 +633,8 @@ def run_argparse(argv: list[str], formatter: Any, retry: bool) -> argparse.Names
|
|||
ap2.add_argument("--ftp-pr", metavar="P-P", type=u, help="the range of TCP ports to use for passive connections, for example \033[32m12000-13000")
|
||||
|
||||
ap2 = ap.add_argument_group('WebDAV options')
|
||||
ap2.add_argument("--dav", action="store_true", help="enable webdav")
|
||||
ap2.add_argument("--dav", action="store_true", help="enable webdav; read-only even if user has write-access")
|
||||
ap2.add_argument("--daw", action="store_true", help="enable full write support. \033[1;31mNB!\033[0m This has side-effects -- PUT-operations will now \033[1;31mOVERWRITE\033[0m existing files, rather than inventing new filenames to avoid loss of data. You might want to instead set this as a volflag where needed. By not setting this flag, uploaded files can get written to a filename which the client does not expect (which might be okay, depending on client)")
|
||||
ap2.add_argument("--dav-nr", action="store_true", help="reject depth:infinite requests (recursive file listing); breaks spec compliance and some clients, which might be a good thing since depth:infinite is extremely server-heavy")
|
||||
|
||||
ap2 = ap.add_argument_group('opt-outs')
|
||||
|
|
|
@ -1215,6 +1215,18 @@ class AuthSrv(object):
|
|||
self.log(t.format(mtp), 1)
|
||||
errors = True
|
||||
|
||||
have_daw = False
|
||||
for vol in vfs.all_vols.values():
|
||||
daw = vol.flags.get("daw") or self.args.daw
|
||||
if daw:
|
||||
vol.flags["daw"] = True
|
||||
have_daw = True
|
||||
|
||||
if have_daw and not self.args.dav:
|
||||
t = 'volume "/{}" has volflag "daw" (webdav write-access), but argument --dav is not set'
|
||||
self.log(t, 1)
|
||||
errors = True
|
||||
|
||||
if errors:
|
||||
sys.exit(1)
|
||||
|
||||
|
|
75
copyparty/dxml.py
Normal file
75
copyparty/dxml.py
Normal file
|
@ -0,0 +1,75 @@
|
|||
import sys
|
||||
import importlib
|
||||
import xml.etree.ElementTree as ET
|
||||
|
||||
from .__init__ import PY2
|
||||
|
||||
|
||||
try:
|
||||
from typing import Any, Optional
|
||||
except:
|
||||
pass
|
||||
|
||||
|
||||
def get_ET() -> ET.XMLParser:
|
||||
pn = "xml.etree.ElementTree"
|
||||
cn = "_elementtree"
|
||||
|
||||
cmod = sys.modules.pop(cn, None)
|
||||
if not cmod:
|
||||
return ET.XMLParser # type: ignore
|
||||
|
||||
pmod = sys.modules.pop(pn)
|
||||
sys.modules[cn] = None # type: ignore
|
||||
|
||||
ret = importlib.import_module(pn)
|
||||
for name, mod in ((pn, pmod), (cn, cmod)):
|
||||
if mod:
|
||||
sys.modules[name] = mod
|
||||
else:
|
||||
sys.modules.pop(name, None)
|
||||
|
||||
sys.modules["xml.etree"].ElementTree = pmod # type: ignore
|
||||
ret.ParseError = ET.ParseError # type: ignore
|
||||
return ret.XMLParser # type: ignore
|
||||
|
||||
|
||||
XMLParser: ET.XMLParser = get_ET()
|
||||
|
||||
|
||||
class DXMLParser(XMLParser): # type: ignore
|
||||
def __init__(self) -> None:
|
||||
tb = ET.TreeBuilder()
|
||||
super(DXMLParser, self).__init__(target=tb)
|
||||
|
||||
p = self._parser if PY2 else self.parser
|
||||
p.StartDoctypeDeclHandler = self.nope
|
||||
p.EntityDeclHandler = self.nope
|
||||
p.UnparsedEntityDeclHandler = self.nope
|
||||
p.ExternalEntityRefHandler = self.nope
|
||||
|
||||
def nope(self, *a: Any, **ka: Any) -> None:
|
||||
raise BadXML("{}, {}".format(a, ka))
|
||||
|
||||
|
||||
class BadXML(Exception):
|
||||
pass
|
||||
|
||||
|
||||
def parse_xml(txt: str) -> ET.Element:
|
||||
parser = DXMLParser()
|
||||
parser.feed(txt)
|
||||
return parser.close() # type: ignore
|
||||
|
||||
|
||||
def mktnod(name: str, text: str) -> ET.Element:
|
||||
el = ET.Element(name)
|
||||
el.text = text
|
||||
return el
|
||||
|
||||
|
||||
def mkenod(name: str, sub_el: Optional[ET.Element] = None) -> ET.Element:
|
||||
el = ET.Element(name)
|
||||
if sub_el:
|
||||
el.append(sub_el)
|
||||
return el
|
|
@ -401,6 +401,12 @@ class HttpCli(object):
|
|||
return self.handle_options() and self.keepalive
|
||||
elif self.mode == "PROPFIND":
|
||||
return self.handle_propfind() and self.keepalive
|
||||
elif self.mode == "PROPPATCH":
|
||||
return self.handle_proppatch() and self.keepalive
|
||||
elif self.mode == "LOCK":
|
||||
return self.handle_lock() and self.keepalive
|
||||
elif self.mode == "UNLOCK":
|
||||
return self.handle_unlock() and self.keepalive
|
||||
else:
|
||||
raise Pebkac(400, 'invalid HTTP mode "{0}"'.format(self.mode))
|
||||
|
||||
|
@ -666,6 +672,9 @@ class HttpCli(object):
|
|||
return self.tx_browser()
|
||||
|
||||
def handle_propfind(self) -> bool:
|
||||
if self.do_log:
|
||||
self.log("PFIND " + self.req)
|
||||
|
||||
if not self.args.dav:
|
||||
raise Pebkac(405, "WebDAV is disabled in server config")
|
||||
|
||||
|
@ -676,8 +685,10 @@ class HttpCli(object):
|
|||
|
||||
self.uparam["h"] = ""
|
||||
|
||||
enc = "windows-31j"
|
||||
enc = "shift_jis"
|
||||
from .dxml import parse_xml
|
||||
|
||||
# enc = "windows-31j"
|
||||
# enc = "shift_jis"
|
||||
enc = "utf-8"
|
||||
uenc = enc.upper()
|
||||
|
||||
|
@ -689,25 +700,9 @@ class HttpCli(object):
|
|||
if not rbuf or len(buf) >= 32768:
|
||||
break
|
||||
|
||||
props_lst: list[str] = []
|
||||
props_xml = buf.decode(enc, "replace")
|
||||
# dont want defusedxml just for this
|
||||
ptn = re.compile("<(?:[^ :]+:)?([^ =/>]+)")
|
||||
in_prop = False
|
||||
for ln in props_xml.replace(">", "\n").split("\n"):
|
||||
m = ptn.search(ln)
|
||||
if not m:
|
||||
continue
|
||||
|
||||
tag = m.group(1).lower()
|
||||
if tag == "prop":
|
||||
in_prop = not in_prop
|
||||
continue
|
||||
|
||||
if not in_prop:
|
||||
continue
|
||||
|
||||
props_lst.append(tag)
|
||||
xroot = parse_xml(buf.decode(enc, "replace"))
|
||||
xtag = next(x for x in xroot if x.tag.split("}")[-1] == "prop")
|
||||
props_lst = [y.tag.split("}")[-1] for y in xtag]
|
||||
else:
|
||||
props_lst = [
|
||||
"contentclass",
|
||||
|
@ -830,7 +825,122 @@ class HttpCli(object):
|
|||
self.send_chunk("", enc, 0x800)
|
||||
return True
|
||||
|
||||
def send_chunk(self, txt: str, enc: str, bmax: int):
|
||||
def handle_proppatch(self) -> bool:
|
||||
if self.do_log:
|
||||
self.log("PPATCH " + self.req)
|
||||
|
||||
if not self.args.dav:
|
||||
raise Pebkac(405, "WebDAV is disabled in server config")
|
||||
|
||||
if not self.can_write:
|
||||
self.log("{} tried to proppatch [{}]".format(self.uname, self.vpath))
|
||||
raise Pebkac(401, "authenticate")
|
||||
|
||||
from .dxml import parse_xml, mkenod, mktnod
|
||||
from xml.etree import ElementTree as ET
|
||||
|
||||
vn, rem = self.asrv.vfs.get(self.vpath, self.uname, False, False)
|
||||
# abspath = vn.dcanonical(rem)
|
||||
|
||||
buf = b""
|
||||
for rbuf in self.get_body_reader()[0]:
|
||||
buf += rbuf
|
||||
if not rbuf or len(buf) >= 128 * 1024:
|
||||
break
|
||||
|
||||
txt = buf.decode("ascii", "replace").lower()
|
||||
enc = self.get_xml_enc(txt)
|
||||
uenc = enc.upper()
|
||||
|
||||
txt = buf.decode(enc, "replace")
|
||||
ET.register_namespace("D", "DAV:")
|
||||
xroot = mkenod("D:orz")
|
||||
xroot.insert(0, parse_xml(txt))
|
||||
xprop = xroot.find(r"./{DAV:}propertyupdate/{DAV:}set/{DAV:}prop")
|
||||
assert xprop
|
||||
for el in xprop:
|
||||
el.clear()
|
||||
|
||||
txt = """<multistatus xmlns="DAV:"><response><propstat><status>HTTP/1.1 403 Forbidden</status></propstat></response></multistatus>"""
|
||||
xroot = parse_xml(txt)
|
||||
|
||||
el = xroot.find(r"./{DAV:}response")
|
||||
assert el
|
||||
e2 = mktnod("D:href", "/" + self.vpath)
|
||||
el.insert(0, e2)
|
||||
|
||||
el = xroot.find(r"./{DAV:}response/{DAV:}propstat")
|
||||
assert el
|
||||
el.insert(0, xprop)
|
||||
|
||||
ret = '<?xml version="1.0" encoding="{}"?>\n'.format(uenc)
|
||||
ret += ET.tostring(xroot).decode("utf-8")
|
||||
|
||||
self.reply(ret.encode(enc, "replace"), 207, "text/xml; charset=" + enc)
|
||||
return True
|
||||
|
||||
def handle_lock(self) -> bool:
|
||||
if self.do_log:
|
||||
self.log("LOCK " + self.req)
|
||||
|
||||
if not self.args.dav:
|
||||
raise Pebkac(405, "WebDAV is disabled in server config")
|
||||
|
||||
if not self.can_write:
|
||||
self.log("{} tried to lock [{}]".format(self.uname, self.vpath))
|
||||
raise Pebkac(401, "authenticate")
|
||||
|
||||
from .dxml import parse_xml, mkenod, mktnod
|
||||
from xml.etree import ElementTree as ET
|
||||
|
||||
buf = b""
|
||||
for rbuf in self.get_body_reader()[0]:
|
||||
buf += rbuf
|
||||
if not rbuf or len(buf) >= 128 * 1024:
|
||||
break
|
||||
|
||||
txt = buf.decode("ascii", "replace").lower()
|
||||
enc = self.get_xml_enc(txt)
|
||||
uenc = enc.upper()
|
||||
|
||||
txt = buf.decode(enc, "replace")
|
||||
ET.register_namespace("D", "DAV:")
|
||||
lk = parse_xml(txt)
|
||||
assert lk.tag == "{DAV:}lockinfo"
|
||||
|
||||
if not lk.find(r"./{DAV:}depth"):
|
||||
lk.append(mktnod("D:depth", "infinity"))
|
||||
|
||||
lk.append(mkenod("D:timeout", mktnod("D:href", "Second-3600")))
|
||||
lk.append(mkenod("D:locktoken", mktnod("D:href", "56709")))
|
||||
lk.append(mkenod("D:lockroot", mktnod("D:href", "/foo/bar.txt")))
|
||||
|
||||
lk2 = mkenod("D:activelock")
|
||||
xroot = mkenod("D:prop", mkenod("D:lockdiscovery", lk2))
|
||||
for a in lk:
|
||||
lk2.append(a)
|
||||
|
||||
ret = '<?xml version="1.0" encoding="{}"?>\n'.format(uenc)
|
||||
ret += ET.tostring(xroot).decode("utf-8")
|
||||
|
||||
self.reply(ret.encode(enc, "replace"), 207, "text/xml; charset=" + enc)
|
||||
return True
|
||||
|
||||
def handle_unlock(self) -> bool:
|
||||
if self.do_log:
|
||||
self.log("UNLOCK " + self.req)
|
||||
|
||||
if not self.args.dav:
|
||||
raise Pebkac(405, "WebDAV is disabled in server config")
|
||||
|
||||
if not self.can_write:
|
||||
self.log("{} tried to lock [{}]".format(self.uname, self.vpath))
|
||||
raise Pebkac(401, "authenticate")
|
||||
|
||||
self.send_headers(None, 204)
|
||||
return True
|
||||
|
||||
def send_chunk(self, txt: str, enc: str, bmax: int) -> str:
|
||||
orig_len = len(txt)
|
||||
buf = txt[:bmax].encode(enc, "replace")[:bmax]
|
||||
try:
|
||||
|
@ -875,13 +985,17 @@ class HttpCli(object):
|
|||
def handle_put(self) -> bool:
|
||||
self.log("PUT " + self.req)
|
||||
|
||||
if not self.can_write:
|
||||
t = "{} does not have write-access here"
|
||||
raise Pebkac(403, t.format(self.uname))
|
||||
|
||||
if self.headers.get("expect", "").lower() == "100-continue":
|
||||
try:
|
||||
self.s.sendall(b"HTTP/1.1 100 Continue\r\n\r\n")
|
||||
except:
|
||||
raise Pebkac(400, "client d/c before 100 continue")
|
||||
|
||||
return self.handle_stash()
|
||||
return self.handle_stash(True)
|
||||
|
||||
def handle_post(self) -> bool:
|
||||
self.log("POST " + self.req)
|
||||
|
@ -893,7 +1007,7 @@ class HttpCli(object):
|
|||
raise Pebkac(400, "client d/c before 100 continue")
|
||||
|
||||
if "raw" in self.uparam:
|
||||
return self.handle_stash()
|
||||
return self.handle_stash(False)
|
||||
|
||||
ctype = self.headers.get("content-type", "").lower()
|
||||
if not ctype:
|
||||
|
@ -911,10 +1025,10 @@ class HttpCli(object):
|
|||
if "application/x-www-form-urlencoded" in ctype:
|
||||
opt = self.args.urlform
|
||||
if "stash" in opt:
|
||||
return self.handle_stash()
|
||||
return self.handle_stash(False)
|
||||
|
||||
if "save" in opt:
|
||||
post_sz, _, _, _, path, _ = self.dump_to_file()
|
||||
post_sz, _, _, _, path, _ = self.dump_to_file(False)
|
||||
self.log("urlform: {} bytes, {}".format(post_sz, path))
|
||||
elif "print" in opt:
|
||||
reader, _ = self.get_body_reader()
|
||||
|
@ -946,6 +1060,21 @@ class HttpCli(object):
|
|||
|
||||
raise Pebkac(405, "don't know how to handle POST({})".format(ctype))
|
||||
|
||||
def get_xml_enc(self, txt) -> str:
|
||||
ofs = txt[:512].find(' encoding="')
|
||||
enc = ""
|
||||
if ofs + 1:
|
||||
enc = txt[ofs + 6 :].split('"')[1]
|
||||
else:
|
||||
enc = self.headers.get("content-type", "").lower()
|
||||
ofs = enc.find("charset=")
|
||||
if ofs + 1:
|
||||
enc = enc[ofs + 4].split("=")[1].split(";")[0].strip("\"'")
|
||||
else:
|
||||
enc = ""
|
||||
|
||||
return enc or "utf-8"
|
||||
|
||||
def get_body_reader(self) -> tuple[Generator[bytes, None, None], int]:
|
||||
if "chunked" in self.headers.get("transfer-encoding", "").lower():
|
||||
return read_socket_chunked(self.sr), -1
|
||||
|
@ -957,7 +1086,7 @@ class HttpCli(object):
|
|||
else:
|
||||
return read_socket(self.sr, remains), remains
|
||||
|
||||
def dump_to_file(self) -> tuple[int, str, str, int, str, str]:
|
||||
def dump_to_file(self, is_put) -> tuple[int, str, str, int, str, str]:
|
||||
# post_sz, sha_hex, sha_b64, remains, path, url
|
||||
reader, remains = self.get_body_reader()
|
||||
vfs, rem = self.asrv.vfs.get(self.vpath, self.uname, False, True)
|
||||
|
@ -1041,6 +1170,9 @@ class HttpCli(object):
|
|||
if rnd and not self.args.nw:
|
||||
fn = self.rand_name(fdir, fn, rnd)
|
||||
|
||||
if is_put and "daw" in vfs.flags:
|
||||
params["overwrite"] = "a"
|
||||
|
||||
with ren_open(fn, *open_a, **params) as zfw:
|
||||
f, fn = zfw["orz"]
|
||||
path = os.path.join(fdir, fn)
|
||||
|
@ -1111,8 +1243,8 @@ class HttpCli(object):
|
|||
|
||||
return post_sz, sha_hex, sha_b64, remains, path, url
|
||||
|
||||
def handle_stash(self) -> bool:
|
||||
post_sz, sha_hex, sha_b64, remains, path, url = self.dump_to_file()
|
||||
def handle_stash(self, is_put) -> bool:
|
||||
post_sz, sha_hex, sha_b64, remains, path, url = self.dump_to_file(is_put)
|
||||
spd = self._spd(post_sz)
|
||||
t = "{} wrote {}/{} bytes to {} # {}"
|
||||
self.log(t.format(spd, post_sz, remains, path, sha_b64[:28])) # 21
|
||||
|
@ -1125,7 +1257,8 @@ class HttpCli(object):
|
|||
else:
|
||||
t = "{}\n{}\n{}\n{}\n".format(post_sz, sha_b64, sha_hex[:56], url)
|
||||
|
||||
self.reply(t.encode("utf-8"))
|
||||
h = {"Location": url} if is_put else {}
|
||||
self.reply(t.encode("utf-8"), 201, headers=h)
|
||||
return True
|
||||
|
||||
def bakflip(self, f: typing.BinaryIO, ofs: int, sz: int, sha: str) -> None:
|
||||
|
@ -1560,7 +1693,7 @@ class HttpCli(object):
|
|||
|
||||
vpath = "{}/{}".format(self.vpath, sanitized).lstrip("/")
|
||||
self.out_headers["X-New-Dir"] = quotep(sanitized)
|
||||
self.redirect(vpath)
|
||||
self.redirect(vpath, status=201)
|
||||
return True
|
||||
|
||||
def handle_new_md(self) -> bool:
|
||||
|
@ -1801,7 +1934,7 @@ class HttpCli(object):
|
|||
except Exception as ex:
|
||||
suf = "\nfailed to write the upload report: {}".format(ex)
|
||||
|
||||
sc = 400 if errmsg else 200
|
||||
sc = 400 if errmsg else 201
|
||||
if want_url:
|
||||
msg = "\n".join([x["url"] for x in jmsg["files"]])
|
||||
if errmsg:
|
||||
|
|
|
@ -151,6 +151,7 @@ META_NOBOTS = '<meta name="robots" content="noindex, nofollow">'
|
|||
|
||||
HTTPCODE = {
|
||||
200: "OK",
|
||||
201: "Created",
|
||||
204: "No Content",
|
||||
206: "Partial Content",
|
||||
207: "Multi-Status",
|
||||
|
@ -182,6 +183,7 @@ IMPLICATIONS = [
|
|||
["e2vu", "e2v"],
|
||||
["e2vp", "e2v"],
|
||||
["e2v", "e2d"],
|
||||
["daw", "dav"],
|
||||
]
|
||||
|
||||
|
||||
|
@ -993,12 +995,20 @@ def ren_open(
|
|||
fun = kwargs.pop("fun", open)
|
||||
fdir = kwargs.pop("fdir", None)
|
||||
suffix = kwargs.pop("suffix", None)
|
||||
overwrite = kwargs.pop("overwrite", None)
|
||||
|
||||
if fname == os.devnull:
|
||||
with fun(fname, *args, **kwargs) as f:
|
||||
yield {"orz": (f, fname)}
|
||||
return
|
||||
|
||||
if overwrite:
|
||||
assert fdir
|
||||
fpath = os.path.join(fdir, fname)
|
||||
with fun(fsenc(fpath), *args, **kwargs) as f:
|
||||
yield {"orz": (f, fname)}
|
||||
return
|
||||
|
||||
if suffix:
|
||||
ext = fname.split(".")[-1]
|
||||
if len(ext) < 7:
|
||||
|
|
|
@ -6145,7 +6145,7 @@ var msel = (function () {
|
|||
|
||||
xhrchk(this, L.fd_xe1, L.fd_xe2);
|
||||
|
||||
if (this.status !== 200) {
|
||||
if (this.status !== 201) {
|
||||
sf.textContent = 'error: ' + this.responseText;
|
||||
return;
|
||||
}
|
||||
|
@ -6192,7 +6192,7 @@ var msel = (function () {
|
|||
function cb() {
|
||||
xhrchk(this, L.fsm_xe1, L.fsm_xe2);
|
||||
|
||||
if (this.status !== 200) {
|
||||
if (this.status !== 201) {
|
||||
sf.textContent = 'error: ' + this.responseText;
|
||||
return;
|
||||
}
|
||||
|
|
|
@ -9,6 +9,8 @@ python3 ../scripts/strip_hints/a.py
|
|||
|
||||
pids=()
|
||||
for py in python{2,3}; do
|
||||
[ ${1:0:6} = python ] && [ $1 != $py ] && continue
|
||||
|
||||
PYTHONPATH=
|
||||
[ $py = python2 ] && PYTHONPATH=../scripts/py2
|
||||
export PYTHONPATH
|
||||
|
|
|
@ -11,6 +11,7 @@ copyparty/broker_mp.py,
|
|||
copyparty/broker_mpw.py,
|
||||
copyparty/broker_thr.py,
|
||||
copyparty/broker_util.py,
|
||||
copyparty/dxml.py,
|
||||
copyparty/fsutil.py,
|
||||
copyparty/ftpd.py,
|
||||
copyparty/httpcli.py,
|
||||
|
|
|
@ -144,11 +144,11 @@ def tc1(vflags):
|
|||
files={"f": (d.replace("/", "") + ".h264", vid)},
|
||||
)
|
||||
c = r.status_code
|
||||
if c == 200 and p not in ["w", "rw"]:
|
||||
if c == 201 and p not in ["w", "rw"]:
|
||||
raise Exception("post {} with perm {} at {}".format(c, p, d))
|
||||
elif c == 403 and p not in ["r"]:
|
||||
raise Exception("post {} with perm {} at {}".format(c, p, d))
|
||||
elif c not in [200, 403]:
|
||||
elif c not in [201, 403]:
|
||||
raise Exception("post {} with perm {} at {}".format(c, p, d))
|
||||
|
||||
cpp.clean()
|
||||
|
|
139
tests/test_dxml.py
Normal file
139
tests/test_dxml.py
Normal file
|
@ -0,0 +1,139 @@
|
|||
#!/usr/bin/env python3
|
||||
# coding: utf-8
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
import re
|
||||
import unittest
|
||||
|
||||
from xml.etree import ElementTree as ET
|
||||
from copyparty.dxml import parse_xml, BadXML, mkenod, mktnod
|
||||
|
||||
ET.register_namespace("D", "DAV:")
|
||||
|
||||
|
||||
def _parse(txt):
|
||||
try:
|
||||
parse_xml(txt)
|
||||
raise Exception("unsafe")
|
||||
except BadXML:
|
||||
pass
|
||||
|
||||
|
||||
class TestDXML(unittest.TestCase):
|
||||
def test1(self):
|
||||
txt = r"""<!DOCTYPE qbe [
|
||||
<!ENTITY a "nice_bakuretsu">
|
||||
]>
|
||||
<l>&a;&a;&a;&a;&a;&a;&a;&a;&a;</l>"""
|
||||
_parse(txt)
|
||||
ET.fromstring(txt)
|
||||
|
||||
def test2(self):
|
||||
txt = r"""<!DOCTYPE ext [
|
||||
<!ENTITY ee SYSTEM "file:///bin/bash">
|
||||
]>
|
||||
<root>ⅇ</root>"""
|
||||
_parse(txt)
|
||||
try:
|
||||
ET.fromstring(txt)
|
||||
raise Exception("unsafe2")
|
||||
except ET.ParseError:
|
||||
pass
|
||||
|
||||
def test3(self):
|
||||
txt = r"""<?xml version="1.0" ?>
|
||||
<propfind xmlns="DAV:">
|
||||
<prop>
|
||||
<name/>
|
||||
<href/>
|
||||
</prop>
|
||||
</propfind>
|
||||
"""
|
||||
txt = txt.replace("\n", "\r\n")
|
||||
ET.fromstring(txt)
|
||||
el = parse_xml(txt)
|
||||
self.assertListEqual(
|
||||
[y.tag for y in el.findall(r"./{DAV:}prop/*")],
|
||||
[r"{DAV:}name", r"{DAV:}href"],
|
||||
)
|
||||
|
||||
def test4(self):
|
||||
txt = r"""<?xml version="1.0" encoding="utf-8" ?>
|
||||
<D:propertyupdate xmlns:D="DAV:" xmlns:Z="urn:schemas-microsoft-com:">
|
||||
<D:set>
|
||||
<D:prop>
|
||||
<Z:Win32CreationTime>Thu, 20 Oct 2022 02:16:33 GMT</Z:Win32CreationTime>
|
||||
<Z:Win32LastAccessTime>Thu, 20 Oct 2022 02:16:35 GMT</Z:Win32LastAccessTime>
|
||||
<Z:Win32LastModifiedTime>Thu, 20 Oct 2022 02:16:33 GMT</Z:Win32LastModifiedTime>
|
||||
<Z:Win32FileAttributes>00000000</Z:Win32FileAttributes>
|
||||
</D:prop>
|
||||
</D:set>
|
||||
</D:propertyupdate>"""
|
||||
|
||||
ref = r"""<?xml version="1.0" encoding="UTF-8"?>
|
||||
<D:multistatus xmlns:D="DAV:">
|
||||
<D:response>
|
||||
<D:href>/d1/foo.txt</D:href>
|
||||
<D:propstat>
|
||||
<D:prop>
|
||||
<Win32CreationTime xmlns="urn:schemas-microsoft-com:"></Win32CreationTime>
|
||||
<Win32LastAccessTime xmlns="urn:schemas-microsoft-com:"></Win32LastAccessTime>
|
||||
<Win32LastModifiedTime xmlns="urn:schemas-microsoft-com:"></Win32LastModifiedTime>
|
||||
<Win32FileAttributes xmlns="urn:schemas-microsoft-com:"></Win32FileAttributes>
|
||||
</D:prop>
|
||||
<D:status>HTTP/1.1 403 Forbidden</D:status>
|
||||
</D:propstat>
|
||||
</D:response>
|
||||
</D:multistatus>"""
|
||||
|
||||
txt = re.sub("\n +", "\n", txt)
|
||||
root = mkenod("a")
|
||||
root.insert(0, parse_xml(txt))
|
||||
prop = root.find(r"./{DAV:}propertyupdate/{DAV:}set/{DAV:}prop")
|
||||
assert prop
|
||||
for el in prop:
|
||||
el.clear()
|
||||
|
||||
res = ET.tostring(prop).decode("utf-8")
|
||||
want = """<D:prop xmlns:D="DAV:" xmlns:ns1="urn:schemas-microsoft-com:">
|
||||
<ns1:Win32CreationTime /><ns1:Win32LastAccessTime /><ns1:Win32LastModifiedTime /><ns1:Win32FileAttributes /></D:prop>
|
||||
"""
|
||||
self.assertEqual(res, want)
|
||||
|
||||
def test5(self):
|
||||
txt = r"""<?xml version="1.0" encoding="utf-8" ?>
|
||||
<D:lockinfo xmlns:D="DAV:">
|
||||
<D:lockscope><D:exclusive/></D:lockscope>
|
||||
<D:locktype><D:write/></D:locktype>
|
||||
<D:owner><D:href>DESKTOP-FRS9AO2\ed</D:href></D:owner>
|
||||
</D:lockinfo>"""
|
||||
|
||||
ref = r"""<?xml version="1.0" encoding="utf-8"?>
|
||||
<D:prop xmlns:D="DAV:"><D:lockdiscovery><D:activelock>
|
||||
<D:locktype><D:write/></D:locktype>
|
||||
<D:lockscope><D:exclusive/></D:lockscope>
|
||||
<D:depth>infinity</D:depth>
|
||||
<D:owner><D:href>DESKTOP-FRS9AO2\ed</D:href></D:owner>
|
||||
<D:timeout>Second-3600</D:timeout>
|
||||
<D:locktoken><D:href>1666199679</D:href></D:locktoken>
|
||||
<D:lockroot><D:href>/d1/foo.txt</D:href></D:lockroot>
|
||||
</D:activelock></D:lockdiscovery></D:prop>"""
|
||||
|
||||
txt = re.sub("\n +", "\n", txt)
|
||||
ns = {"": "DAV:"}
|
||||
lk = parse_xml(txt)
|
||||
self.assertEqual(lk.tag, "{DAV:}lockinfo")
|
||||
|
||||
if not lk.find(r"./{DAV:}depth"):
|
||||
lk.append(mktnod("D:depth", "infinity"))
|
||||
|
||||
lk.append(mkenod("D:timeout", mktnod("D:href", "Second-3600")))
|
||||
lk.append(mkenod("D:locktoken", mktnod("D:href", "56709")))
|
||||
lk.append(mkenod("D:lockroot", mktnod("D:href", "/foo/bar.txt")))
|
||||
|
||||
lk2 = mkenod("D:activelock")
|
||||
root = mkenod("D:prop", mkenod("D:lockdiscovery", lk2))
|
||||
for a in lk:
|
||||
lk2.append(a)
|
||||
|
||||
print(ET.tostring(root).decode("utf-8"))
|
|
@ -139,7 +139,7 @@ class TestHttpCli(unittest.TestCase):
|
|||
|
||||
# stash
|
||||
h, ret = self.put(url)
|
||||
res = h.startswith("HTTP/1.1 200 ")
|
||||
res = h.startswith("HTTP/1.1 201 ")
|
||||
self.assertEqual(res, wok)
|
||||
|
||||
def can_rw(self, fp):
|
||||
|
@ -171,9 +171,12 @@ class TestHttpCli(unittest.TestCase):
|
|||
def put(self, url):
|
||||
buf = "PUT /{0} HTTP/1.1\r\nCookie: cppwd=o\r\nConnection: close\r\nContent-Length: {1}\r\n\r\nok {0}\n"
|
||||
buf = buf.format(url, len(url) + 4).encode("utf-8")
|
||||
print("PUT -->", buf)
|
||||
conn = tu.VHttpConn(self.args, self.asrv, self.log, buf)
|
||||
HttpCli(conn).run()
|
||||
return conn.s._reply.decode("utf-8").split("\r\n\r\n", 1)
|
||||
ret = conn.s._reply.decode("utf-8").split("\r\n\r\n", 1)
|
||||
print("PUT <--", ret)
|
||||
return ret
|
||||
|
||||
def curl(self, url, binary=False):
|
||||
conn = tu.VHttpConn(self.args, self.asrv, self.log, hdr(url))
|
||||
|
@ -185,5 +188,4 @@ class TestHttpCli(unittest.TestCase):
|
|||
return conn.s._reply.decode("utf-8").split("\r\n\r\n", 1)
|
||||
|
||||
def log(self, src, msg, c=0):
|
||||
# print(repr(msg))
|
||||
pass
|
||||
print(msg)
|
||||
|
|
|
@ -98,7 +98,7 @@ class Cfg(Namespace):
|
|||
def __init__(self, a=None, v=None, c=None):
|
||||
ka = {}
|
||||
|
||||
ex = "e2d e2ds e2dsa e2t e2ts e2tsr e2v e2vu e2vp xdev xvol ed emp force_js ihead magic no_acode no_athumb no_del no_logues no_mv no_readme no_robots no_scandir no_thumb no_vthumb no_zip nid nih nw"
|
||||
ex = "e2d e2ds e2dsa e2t e2ts e2tsr e2v e2vu e2vp dav daw xdev xvol ed emp force_js ihead magic no_acode no_athumb no_del no_logues no_mv no_readme no_robots no_scandir no_thumb no_vthumb no_zip nid nih nw"
|
||||
ka.update(**{k: False for k in ex.split()})
|
||||
|
||||
ex = "no_rescan no_sendfile no_voldump plain_ip"
|
||||
|
|
Loading…
Reference in a new issue