mirror of
https://github.com/9001/copyparty.git
synced 2025-08-17 09:02:15 -06:00
improve http206 and fuse-client
This commit is contained in:
parent
db5f07f164
commit
2e33c8d222
48
bin/copyparty-fuse.py
Executable file → Normal file
48
bin/copyparty-fuse.py
Executable file → Normal file
|
@ -22,7 +22,9 @@ from urllib.parse import quote_from_bytes as quote
|
||||||
try:
|
try:
|
||||||
from fuse import FUSE, FuseOSError, Operations
|
from fuse import FUSE, FuseOSError, Operations
|
||||||
except:
|
except:
|
||||||
print("\n could not import fuse;\n pip install fusepy\n")
|
print(
|
||||||
|
"\n could not import fuse; these may help:\n python3 -m pip install --user fusepy\n apt install libfuse\n modprobe fuse"
|
||||||
|
)
|
||||||
raise
|
raise
|
||||||
|
|
||||||
|
|
||||||
|
@ -34,9 +36,7 @@ usage:
|
||||||
|
|
||||||
dependencies:
|
dependencies:
|
||||||
sudo apk add fuse-dev
|
sudo apk add fuse-dev
|
||||||
python3 -m venv ~/pe/ve.fusepy
|
python3 -m pip install --user fusepy
|
||||||
. ~/pe/ve.fusepy/bin/activate
|
|
||||||
pip install fusepy
|
|
||||||
|
|
||||||
|
|
||||||
MB/s
|
MB/s
|
||||||
|
@ -60,20 +60,21 @@ def boring_log(msg):
|
||||||
def rice_tid():
|
def rice_tid():
|
||||||
tid = threading.current_thread().ident
|
tid = threading.current_thread().ident
|
||||||
c = struct.unpack(b"B" * 5, struct.pack(b">Q", tid)[-5:])
|
c = struct.unpack(b"B" * 5, struct.pack(b">Q", tid)[-5:])
|
||||||
return "".join("\033[1;37;48;5;{}m{:02x}".format(x, x) for x in c)
|
return "".join("\033[1;37;48;5;{}m{:02x}".format(x, x) for x in c) + "\033[0m"
|
||||||
|
|
||||||
|
|
||||||
def fancy_log(msg):
|
def fancy_log(msg):
|
||||||
print("{}\033[0m {}\n".format(rice_tid(), msg), end="")
|
print("{} {}\n".format(rice_tid(), msg), end="")
|
||||||
|
|
||||||
|
|
||||||
def null_log(msg):
|
def null_log(msg):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
log = boring_log
|
info = fancy_log
|
||||||
log = fancy_log
|
log = fancy_log
|
||||||
log = threadless_log
|
dbg = fancy_log
|
||||||
|
log = null_log
|
||||||
dbg = null_log
|
dbg = null_log
|
||||||
|
|
||||||
|
|
||||||
|
@ -118,7 +119,7 @@ class Gateway(object):
|
||||||
try:
|
try:
|
||||||
return self.conns[tid]
|
return self.conns[tid]
|
||||||
except:
|
except:
|
||||||
log("new conn [{}] [{}]".format(self.web_host, self.web_port))
|
info("new conn [{}] [{}]".format(self.web_host, self.web_port))
|
||||||
|
|
||||||
conn = http.client.HTTPConnection(self.web_host, self.web_port, timeout=260)
|
conn = http.client.HTTPConnection(self.web_host, self.web_port, timeout=260)
|
||||||
|
|
||||||
|
@ -152,7 +153,7 @@ class Gateway(object):
|
||||||
if r.status != 200:
|
if r.status != 200:
|
||||||
self.closeconn()
|
self.closeconn()
|
||||||
raise Exception(
|
raise Exception(
|
||||||
"http error {} reading dir {} in {:x}".format(
|
"http error {} reading dir {} in {}".format(
|
||||||
r.status, web_path, rice_tid()
|
r.status, web_path, rice_tid()
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
@ -161,14 +162,14 @@ class Gateway(object):
|
||||||
|
|
||||||
def download_file_range(self, path, ofs1, ofs2):
|
def download_file_range(self, path, ofs1, ofs2):
|
||||||
web_path = "/" + "/".join([self.web_root, path])
|
web_path = "/" + "/".join([self.web_root, path])
|
||||||
hdr_range = "bytes={}-{}".format(ofs1, ofs2)
|
hdr_range = "bytes={}-{}".format(ofs1, ofs2 - 1)
|
||||||
log("downloading {}".format(hdr_range))
|
log("downloading {}".format(hdr_range))
|
||||||
|
|
||||||
r = self.sendreq("GET", self.quotep(web_path), headers={"Range": hdr_range})
|
r = self.sendreq("GET", self.quotep(web_path), headers={"Range": hdr_range})
|
||||||
if r.status != http.client.PARTIAL_CONTENT:
|
if r.status != http.client.PARTIAL_CONTENT:
|
||||||
self.closeconn()
|
self.closeconn()
|
||||||
raise Exception(
|
raise Exception(
|
||||||
"http error {} reading file {} range {} in {:x}".format(
|
"http error {} reading file {} range {} in {}".format(
|
||||||
r.status, web_path, hdr_range, rice_tid()
|
r.status, web_path, hdr_range, rice_tid()
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
@ -246,14 +247,14 @@ class CPPF(Operations):
|
||||||
self.filecache = []
|
self.filecache = []
|
||||||
self.filecache_mtx = threading.Lock()
|
self.filecache_mtx = threading.Lock()
|
||||||
|
|
||||||
log("up")
|
info("up")
|
||||||
|
|
||||||
def clean_dircache(self):
|
def clean_dircache(self):
|
||||||
"""not threadsafe"""
|
"""not threadsafe"""
|
||||||
now = time.time()
|
now = time.time()
|
||||||
cutoff = 0
|
cutoff = 0
|
||||||
for cn in self.dircache:
|
for cn in self.dircache:
|
||||||
if cn.ts - now > 1:
|
if now - cn.ts > 1:
|
||||||
cutoff += 1
|
cutoff += 1
|
||||||
else:
|
else:
|
||||||
break
|
break
|
||||||
|
@ -398,7 +399,7 @@ class CPPF(Operations):
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
buf = self.gw.download_file_range(path, h_ofs, h_end - 1)
|
buf = self.gw.download_file_range(path, h_ofs, h_end)
|
||||||
ret = buf[-buf_ofs:] + cdr
|
ret = buf[-buf_ofs:] + cdr
|
||||||
|
|
||||||
elif car:
|
elif car:
|
||||||
|
@ -416,7 +417,7 @@ class CPPF(Operations):
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
buf = self.gw.download_file_range(path, h_ofs, h_end - 1)
|
buf = self.gw.download_file_range(path, h_ofs, h_end)
|
||||||
ret = car + buf[:buf_ofs]
|
ret = car + buf[:buf_ofs]
|
||||||
|
|
||||||
else:
|
else:
|
||||||
|
@ -438,7 +439,7 @@ class CPPF(Operations):
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
buf = self.gw.download_file_range(path, h_ofs, h_end - 1)
|
buf = self.gw.download_file_range(path, h_ofs, h_end)
|
||||||
ret = buf[buf_ofs:buf_end]
|
ret = buf[buf_ofs:buf_end]
|
||||||
|
|
||||||
cn = CacheNode([path, h_ofs], buf)
|
cn = CacheNode([path, h_ofs], buf)
|
||||||
|
@ -472,13 +473,16 @@ class CPPF(Operations):
|
||||||
log("read {} @ {} len {} end {}".format(path, offset, length, ofs2))
|
log("read {} @ {} len {} end {}".format(path, offset, length, ofs2))
|
||||||
|
|
||||||
file_sz = self.getattr(path)["st_size"]
|
file_sz = self.getattr(path)["st_size"]
|
||||||
if ofs2 >= file_sz:
|
if ofs2 > file_sz:
|
||||||
ofs2 = file_sz - 1
|
ofs2 = file_sz
|
||||||
log("truncate to len {} end {}".format((ofs2 - offset) + 1, ofs2))
|
log("truncate to len {} end {}".format(ofs2 - offset, ofs2))
|
||||||
|
|
||||||
|
if file_sz == 0 or offset >= ofs2:
|
||||||
|
return b""
|
||||||
|
|
||||||
# toggle cache here i suppose
|
# toggle cache here i suppose
|
||||||
# return self.get_cached_file(path, offset, ofs2, file_sz)
|
# return self.get_cached_file(path, offset, ofs2, file_sz)
|
||||||
return self.gw.download_file_range(path, offset, ofs2 - 1)
|
return self.gw.download_file_range(path, offset, ofs2)
|
||||||
|
|
||||||
def getattr(self, path, fh=None):
|
def getattr(self, path, fh=None):
|
||||||
path = path.strip("/")
|
path = path.strip("/")
|
||||||
|
@ -495,7 +499,7 @@ class CPPF(Operations):
|
||||||
|
|
||||||
cn = self.get_cached_dir(dirpath)
|
cn = self.get_cached_dir(dirpath)
|
||||||
if cn:
|
if cn:
|
||||||
# log('cache ok')
|
log("cache ok")
|
||||||
dents = cn.data
|
dents = cn.data
|
||||||
else:
|
else:
|
||||||
log("cache miss")
|
log("cache miss")
|
||||||
|
|
|
@ -80,8 +80,9 @@ class HttpSrv(object):
|
||||||
"%s %s" % addr,
|
"%s %s" % addr,
|
||||||
"shut_rdwr err:\n {}\n {}".format(repr(sck), ex),
|
"shut_rdwr err:\n {}\n {}".format(repr(sck), ex),
|
||||||
)
|
)
|
||||||
if ex.errno not in [10038, 107, 57, 9]:
|
if ex.errno not in [10038, 10054, 107, 57, 9]:
|
||||||
# 10038 No longer considered a socket
|
# 10038 No longer considered a socket
|
||||||
|
# 10054 Foribly closed by remote
|
||||||
# 107 Transport endpoint not connected
|
# 107 Transport endpoint not connected
|
||||||
# 57 Socket is not connected
|
# 57 Socket is not connected
|
||||||
# 9 Bad file descriptor
|
# 9 Bad file descriptor
|
||||||
|
|
|
@ -309,18 +309,7 @@ def get_boundary(headers):
|
||||||
def read_header(sr):
|
def read_header(sr):
|
||||||
ret = b""
|
ret = b""
|
||||||
while True:
|
while True:
|
||||||
if ret.endswith(b"\r\n\r\n"):
|
buf = sr.recv(1024)
|
||||||
break
|
|
||||||
elif ret.endswith(b"\r\n\r"):
|
|
||||||
n = 1
|
|
||||||
elif ret.endswith(b"\r\n"):
|
|
||||||
n = 2
|
|
||||||
elif ret.endswith(b"\r"):
|
|
||||||
n = 3
|
|
||||||
else:
|
|
||||||
n = 4
|
|
||||||
|
|
||||||
buf = sr.recv(n)
|
|
||||||
if not buf:
|
if not buf:
|
||||||
if not ret:
|
if not ret:
|
||||||
return None
|
return None
|
||||||
|
@ -332,11 +321,15 @@ def read_header(sr):
|
||||||
)
|
)
|
||||||
|
|
||||||
ret += buf
|
ret += buf
|
||||||
|
ofs = ret.find(b"\r\n\r\n")
|
||||||
|
if ofs < 0:
|
||||||
|
if len(ret) > 1024 * 64:
|
||||||
|
raise Pebkac(400, "header 2big")
|
||||||
|
else:
|
||||||
|
continue
|
||||||
|
|
||||||
if len(ret) > 1024 * 64:
|
sr.unrecv(ret[ofs + 4 :])
|
||||||
raise Pebkac(400, "header 2big")
|
return ret[:ofs].decode("utf-8", "surrogateescape").split("\r\n")
|
||||||
|
|
||||||
return ret[:-4].decode("utf-8", "surrogateescape").split("\r\n")
|
|
||||||
|
|
||||||
|
|
||||||
def undot(path):
|
def undot(path):
|
||||||
|
|
Loading…
Reference in a new issue