mirror of
https://github.com/9001/copyparty.git
synced 2025-10-02 14:42:28 -06:00
Clean up scripts with shellcheck and ruff
This commit includes a variety of different non-destructive fixes and formatting as recommended by shellcheck and by ruff's check/format. Fixes include things like double quoting vars to prevent space issues, variety of line formatting in python files, and removing of unused imports
This commit is contained in:
parent
abffda5474
commit
433798a09d
|
@ -1,5 +1,5 @@
|
||||||
#!/bin/sh
|
#!/bin/sh
|
||||||
# usage: ./bubbleparty.sh ./copyparty-sfx.py ....
|
# usage: ./bubbleparty.sh ./copyparty-sfx.py ...
|
||||||
bwrap \
|
bwrap \
|
||||||
--unshare-all \
|
--unshare-all \
|
||||||
--ro-bind /usr /usr \
|
--ro-bind /usr /usr \
|
||||||
|
@ -9,11 +9,11 @@ bwrap \
|
||||||
--dev-bind /dev /dev \
|
--dev-bind /dev /dev \
|
||||||
--dir /tmp \
|
--dir /tmp \
|
||||||
--dir /var \
|
--dir /var \
|
||||||
--bind $(pwd) $(pwd) \
|
--bind "$(pwd)" "$(pwd)" \
|
||||||
--share-net \
|
--share-net \
|
||||||
--die-with-parent \
|
--die-with-parent \
|
||||||
--file 11 /etc/passwd \
|
--file 11 /etc/passwd \
|
||||||
--file 12 /etc/group \
|
--file 12 /etc/group \
|
||||||
"$@" \
|
"$@" \
|
||||||
11< <(getent passwd $(id -u) 65534) \
|
11< <(getent passwd "$(id -u)" 65534) \
|
||||||
12< <(getent group $(id -g) 65534)
|
12< <(getent group "$(id -g)" 65534)
|
||||||
|
|
|
@ -79,7 +79,6 @@ def compare(n1, d1, n2, d2, verbose):
|
||||||
miss = {}
|
miss = {}
|
||||||
nmiss = 0
|
nmiss = 0
|
||||||
for w1s, k, v in d1.execute("select * from mt"):
|
for w1s, k, v in d1.execute("select * from mt"):
|
||||||
|
|
||||||
n += 1
|
n += 1
|
||||||
if n % 100_000 == 0:
|
if n % 100_000 == 0:
|
||||||
m = f"\033[36mchecked {n:,} of {nt:,} tags in {n1} against {n2}, so far {nmiss} missing tags\033[0m"
|
m = f"\033[36mchecked {n:,} of {nt:,} tags in {n1} against {n2}, so far {nmiss} missing tags\033[0m"
|
||||||
|
@ -102,9 +101,7 @@ def compare(n1, d1, n2, d2, verbose):
|
||||||
|
|
||||||
v2 = None
|
v2 = None
|
||||||
if w2:
|
if w2:
|
||||||
v2 = d2.execute(
|
v2 = d2.execute("select v from mt where w = ? and +k = ?", (w2[:16], k)).fetchone()
|
||||||
"select v from mt where w = ? and +k = ?", (w2[:16], k)
|
|
||||||
).fetchone()
|
|
||||||
if v2:
|
if v2:
|
||||||
v2 = v2[0]
|
v2 = v2[0]
|
||||||
|
|
||||||
|
|
|
@ -3,7 +3,6 @@
|
||||||
import re
|
import re
|
||||||
import sys
|
import sys
|
||||||
import json
|
import json
|
||||||
import time
|
|
||||||
import base64
|
import base64
|
||||||
import hashlib
|
import hashlib
|
||||||
import subprocess as sp
|
import subprocess as sp
|
||||||
|
|
|
@ -138,7 +138,7 @@ def freg_conv(pd):
|
||||||
}
|
}
|
||||||
|
|
||||||
if [x for x in vid_id if x not in string.ascii_letters + string.digits + "_-"]:
|
if [x for x in vid_id if x not in string.ascii_letters + string.digits + "_-"]:
|
||||||
print(f"malicious json", file=sys.stderr)
|
print("malicious json", file=sys.stderr)
|
||||||
return
|
return
|
||||||
|
|
||||||
basepath = os.path.dirname(sys.argv[1])
|
basepath = os.path.dirname(sys.argv[1])
|
||||||
|
|
|
@ -80,9 +80,7 @@ def print(*args, **kwargs):
|
||||||
|
|
||||||
def termsafe(txt):
|
def termsafe(txt):
|
||||||
try:
|
try:
|
||||||
return txt.encode(sys.stdout.encoding, "backslashreplace").decode(
|
return txt.encode(sys.stdout.encoding, "backslashreplace").decode(sys.stdout.encoding)
|
||||||
sys.stdout.encoding
|
|
||||||
)
|
|
||||||
except:
|
except:
|
||||||
return txt.encode(sys.stdout.encoding, "replace").decode(sys.stdout.encoding)
|
return txt.encode(sys.stdout.encoding, "replace").decode(sys.stdout.encoding)
|
||||||
|
|
||||||
|
@ -361,9 +359,7 @@ class Gateway(object):
|
||||||
except:
|
except:
|
||||||
tid = threading.current_thread().ident
|
tid = threading.current_thread().ident
|
||||||
dbg(
|
dbg(
|
||||||
"\033[1;37;44mbad conn {:x}\n {} {}\n {}\033[0m".format(
|
"\033[1;37;44mbad conn {:x}\n {} {}\n {}\033[0m".format(tid, meth, path, c.rx_path if c else "(null)")
|
||||||
tid, meth, path, c.rx_path if c else "(null)"
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
|
|
||||||
self.closeconn(c)
|
self.closeconn(c)
|
||||||
|
@ -390,11 +386,7 @@ class Gateway(object):
|
||||||
c = self.sendreq("GET", web_path, {})
|
c = self.sendreq("GET", web_path, {})
|
||||||
if c.rx.status != 200:
|
if c.rx.status != 200:
|
||||||
self.closeconn(c)
|
self.closeconn(c)
|
||||||
log(
|
log("http error {} reading dir {} in {}".format(c.rx.status, web_path, rice_tid()))
|
||||||
"http error {} reading dir {} in {}".format(
|
|
||||||
c.rx.status, web_path, rice_tid()
|
|
||||||
)
|
|
||||||
)
|
|
||||||
raise FuseOSError(errno.ENOENT)
|
raise FuseOSError(errno.ENOENT)
|
||||||
|
|
||||||
if not c.rx.getheader("Content-Type", "").startswith("text/html"):
|
if not c.rx.getheader("Content-Type", "").startswith("text/html"):
|
||||||
|
@ -435,19 +427,13 @@ class Gateway(object):
|
||||||
|
|
||||||
web_path = self.quotep("/" + "/".join([self.web_root, path])) + "?raw"
|
web_path = self.quotep("/" + "/".join([self.web_root, path])) + "?raw"
|
||||||
hdr_range = "bytes={}-".format(ofs1)
|
hdr_range = "bytes={}-".format(ofs1)
|
||||||
info(
|
info("DL {:4.0f}K\033[36m{:>9}-{:<9}\033[0m{}".format((ofs2 - ofs1) / 1024.0, ofs1, ofs2 - 1, hexler(path)))
|
||||||
"DL {:4.0f}K\033[36m{:>9}-{:<9}\033[0m{}".format(
|
|
||||||
(ofs2 - ofs1) / 1024.0, ofs1, ofs2 - 1, hexler(path)
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
c = self.sendreq("GET", web_path, {"Range": hdr_range})
|
c = self.sendreq("GET", web_path, {"Range": hdr_range})
|
||||||
if c.rx.status != http.client.PARTIAL_CONTENT:
|
if c.rx.status != http.client.PARTIAL_CONTENT:
|
||||||
self.closeconn(c)
|
self.closeconn(c)
|
||||||
raise Exception(
|
raise Exception(
|
||||||
"http error {} reading file {} range {} in {}".format(
|
"http error {} reading file {} range {} in {}".format(c.rx.status, web_path, hdr_range, rice_tid())
|
||||||
c.rx.status, web_path, hdr_range, rice_tid()
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
|
|
||||||
ret = c.rx.read(ofs2 - ofs1)
|
ret = c.rx.read(ofs2 - ofs1)
|
||||||
|
@ -644,9 +630,7 @@ class CPPF(Operations):
|
||||||
|
|
||||||
if get1 >= cache1 and get2 <= cache2:
|
if get1 >= cache1 and get2 <= cache2:
|
||||||
# keep cache entry alive by moving it to the end
|
# keep cache entry alive by moving it to the end
|
||||||
self.filecache = (
|
self.filecache = self.filecache[:ncn] + self.filecache[ncn + 1 :] + [cn]
|
||||||
self.filecache[:ncn] + self.filecache[ncn + 1 :] + [cn]
|
|
||||||
)
|
|
||||||
buf_ofs = get1 - cache1
|
buf_ofs = get1 - cache1
|
||||||
buf_end = buf_ofs + (get2 - get1)
|
buf_end = buf_ofs + (get2 - get1)
|
||||||
dbg(
|
dbg(
|
||||||
|
@ -729,32 +713,20 @@ class CPPF(Operations):
|
||||||
|
|
||||||
buf_ofs = get1 - h_ofs
|
buf_ofs = get1 - h_ofs
|
||||||
|
|
||||||
dbg(
|
dbg("<cache> cdr {}, car {}:{} |{}| [{}:]".format(len(cdr), h_ofs, h_end, h_end - h_ofs, buf_ofs))
|
||||||
"<cache> cdr {}, car {}:{} |{}| [{}:]".format(
|
|
||||||
len(cdr), h_ofs, h_end, h_end - h_ofs, buf_ofs
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
buf, c = self.gw.download_file_range(path, h_ofs, h_end)
|
buf, c = self.gw.download_file_range(path, h_ofs, h_end)
|
||||||
if len(buf) == h_end - h_ofs:
|
if len(buf) == h_end - h_ofs:
|
||||||
ret = buf[buf_ofs:] + cdr
|
ret = buf[buf_ofs:] + cdr
|
||||||
else:
|
else:
|
||||||
ret = buf[get1 - h_ofs :]
|
ret = buf[get1 - h_ofs :]
|
||||||
info(
|
info("remote truncated {}:{} to |{}|, will return |{}|".format(h_ofs, h_end, len(buf), len(ret)))
|
||||||
"remote truncated {}:{} to |{}|, will return |{}|".format(
|
|
||||||
h_ofs, h_end, len(buf), len(ret)
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
elif car:
|
elif car:
|
||||||
h_ofs = get1 + len(car)
|
h_ofs = get1 + len(car)
|
||||||
buf_ofs = (get2 - get1) - len(car)
|
buf_ofs = (get2 - get1) - len(car)
|
||||||
|
|
||||||
dbg(
|
dbg("<cache> car {}, cdr {}:{} |{}| [:{}]".format(len(car), h_ofs, get2, get2 - h_ofs, buf_ofs))
|
||||||
"<cache> car {}, cdr {}:{} |{}| [:{}]".format(
|
|
||||||
len(car), h_ofs, get2, get2 - h_ofs, buf_ofs
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
buf, c = self.gw.download_file_range(path, h_ofs, get2)
|
buf, c = self.gw.download_file_range(path, h_ofs, get2)
|
||||||
ret = car + buf[:buf_ofs]
|
ret = car + buf[:buf_ofs]
|
||||||
|
@ -771,11 +743,7 @@ class CPPF(Operations):
|
||||||
buf_ofs = get1 - h_ofs
|
buf_ofs = get1 - h_ofs
|
||||||
buf_end = buf_ofs + get2 - get1
|
buf_end = buf_ofs + get2 - get1
|
||||||
|
|
||||||
dbg(
|
dbg("<cache> {}:{} |{}| [{}:{}]".format(h_ofs, get2, get2 - h_ofs, buf_ofs, buf_end))
|
||||||
"<cache> {}:{} |{}| [{}:{}]".format(
|
|
||||||
h_ofs, get2, get2 - h_ofs, buf_ofs, buf_end
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
buf, c = self.gw.download_file_range(path, h_ofs, get2)
|
buf, c = self.gw.download_file_range(path, h_ofs, get2)
|
||||||
ret = buf[buf_ofs:buf_end]
|
ret = buf[buf_ofs:buf_end]
|
||||||
|
@ -783,7 +751,10 @@ class CPPF(Operations):
|
||||||
if c and c.cnode and len(c.cnode.data) + len(buf) < 1024 * 1024:
|
if c and c.cnode and len(c.cnode.data) + len(buf) < 1024 * 1024:
|
||||||
dbg(
|
dbg(
|
||||||
"cache: {}(@{}) + {}(@{})".format(
|
"cache: {}(@{}) + {}(@{})".format(
|
||||||
len(c.cnode.data), c.cnode.tag[1], len(buf), buf_ofs, get1
|
len(c.cnode.data),
|
||||||
|
c.cnode.tag[1],
|
||||||
|
len(buf),
|
||||||
|
buf_ofs,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
c.cnode.data += buf
|
c.cnode.data += buf
|
||||||
|
@ -828,11 +799,7 @@ class CPPF(Operations):
|
||||||
path = path.strip("/")
|
path = path.strip("/")
|
||||||
ofs2 = offset + length
|
ofs2 = offset + length
|
||||||
file_sz = self.getattr(path)["st_size"]
|
file_sz = self.getattr(path)["st_size"]
|
||||||
log(
|
log("read {} |{}| {}:{} max {}".format(hexler(path), length, offset, ofs2, file_sz))
|
||||||
"read {} |{}| {}:{} max {}".format(
|
|
||||||
hexler(path), length, offset, ofs2, file_sz
|
|
||||||
)
|
|
||||||
)
|
|
||||||
if ofs2 > file_sz:
|
if ofs2 > file_sz:
|
||||||
ofs2 = file_sz
|
ofs2 = file_sz
|
||||||
log("truncate to |{}| :{}".format(ofs2 - offset, ofs2))
|
log("truncate to |{}| :{}".format(ofs2 - offset, ofs2))
|
||||||
|
@ -1005,9 +972,7 @@ class CPPF(Operations):
|
||||||
raise FuseOSError(errno.ENOENT)
|
raise FuseOSError(errno.ENOENT)
|
||||||
|
|
||||||
|
|
||||||
class TheArgparseFormatter(
|
class TheArgparseFormatter(argparse.RawTextHelpFormatter, argparse.ArgumentDefaultsHelpFormatter):
|
||||||
argparse.RawTextHelpFormatter, argparse.ArgumentDefaultsHelpFormatter
|
|
||||||
):
|
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
@ -1039,12 +1004,8 @@ def main():
|
||||||
formatter_class=TheArgparseFormatter,
|
formatter_class=TheArgparseFormatter,
|
||||||
epilog="example:" + ex_pre + ex_pre.join(examples),
|
epilog="example:" + ex_pre + ex_pre.join(examples),
|
||||||
)
|
)
|
||||||
ap.add_argument(
|
ap.add_argument("-cd", metavar="NUM_SECONDS", type=float, default=nd, help="directory cache")
|
||||||
"-cd", metavar="NUM_SECONDS", type=float, default=nd, help="directory cache"
|
ap.add_argument("-cf", metavar="NUM_BLOCKS", type=int, default=nf, help="file cache")
|
||||||
)
|
|
||||||
ap.add_argument(
|
|
||||||
"-cf", metavar="NUM_BLOCKS", type=int, default=nf, help="file cache"
|
|
||||||
)
|
|
||||||
ap.add_argument("-a", metavar="PASSWORD", help="password")
|
ap.add_argument("-a", metavar="PASSWORD", help="password")
|
||||||
ap.add_argument("-d", action="store_true", help="enable debug")
|
ap.add_argument("-d", action="store_true", help="enable debug")
|
||||||
ap.add_argument("-te", metavar="PEM_FILE", help="certificate to expect/verify")
|
ap.add_argument("-te", metavar="PEM_FILE", help="certificate to expect/verify")
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
"""partyfuse: remote copyparty as a local filesystem"""
|
"""partyfuse: remote copyparty as a local filesystem"""
|
||||||
|
|
||||||
__author__ = "ed <copyparty@ocv.me>"
|
__author__ = "ed <copyparty@ocv.me>"
|
||||||
__copyright__ = 2019
|
__copyright__ = 2019
|
||||||
__license__ = "MIT"
|
__license__ = "MIT"
|
||||||
|
@ -315,7 +316,7 @@ class Gateway(object):
|
||||||
self.parse_html = self.parse_iis
|
self.parse_html = self.parse_iis
|
||||||
self.fsuf = ""
|
self.fsuf = ""
|
||||||
self.dsuf = ""
|
self.dsuf = ""
|
||||||
self.re_2nl = re.compile(br"<br>|</pre>")
|
self.re_2nl = re.compile(rb"<br>|</pre>")
|
||||||
self.re_row = re.compile(
|
self.re_row = re.compile(
|
||||||
r'^ *([0-9]{1,2})/([0-9]{1,2})/([0-9]{4}) {1,2}([0-9]{1,2}:[0-9]{2}) ([AP]M) +(<dir>|[0-9]+) <A HREF="([^"]+)">([^<>]+)</A>$'
|
r'^ *([0-9]{1,2})/([0-9]{1,2})/([0-9]{4}) {1,2}([0-9]{1,2}:[0-9]{2}) ([AP]M) +(<dir>|[0-9]+) <A HREF="([^"]+)">([^<>]+)</A>$'
|
||||||
)
|
)
|
||||||
|
@ -744,9 +745,7 @@ class CPPF(Operations):
|
||||||
|
|
||||||
if get1 >= cache1 and get2 <= cache2:
|
if get1 >= cache1 and get2 <= cache2:
|
||||||
# keep cache entry alive by moving it to the end
|
# keep cache entry alive by moving it to the end
|
||||||
self.filecache = (
|
self.filecache = self.filecache[:ncn] + self.filecache[ncn + 1 :] + [cn]
|
||||||
self.filecache[:ncn] + self.filecache[ncn + 1 :] + [cn]
|
|
||||||
)
|
|
||||||
buf_ofs = get1 - cache1
|
buf_ofs = get1 - cache1
|
||||||
buf_end = buf_ofs + (get2 - get1)
|
buf_end = buf_ofs + (get2 - get1)
|
||||||
dbg(
|
dbg(
|
||||||
|
@ -1106,9 +1105,7 @@ class CPPF(Operations):
|
||||||
raise FuseOSError(errno.ENOENT)
|
raise FuseOSError(errno.ENOENT)
|
||||||
|
|
||||||
|
|
||||||
class TheArgparseFormatter(
|
class TheArgparseFormatter(argparse.RawTextHelpFormatter, argparse.ArgumentDefaultsHelpFormatter):
|
||||||
argparse.RawTextHelpFormatter, argparse.ArgumentDefaultsHelpFormatter
|
|
||||||
):
|
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -7,7 +7,6 @@ __copyright__ = 2020
|
||||||
__license__ = "MIT"
|
__license__ = "MIT"
|
||||||
__url__ = "https://github.com/9001/copyparty/"
|
__url__ = "https://github.com/9001/copyparty/"
|
||||||
|
|
||||||
import re
|
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
import time
|
import time
|
||||||
|
@ -20,7 +19,6 @@ import platform
|
||||||
import threading
|
import threading
|
||||||
import http.client # py2: httplib
|
import http.client # py2: httplib
|
||||||
import urllib.parse
|
import urllib.parse
|
||||||
from datetime import datetime
|
|
||||||
from urllib.parse import quote_from_bytes as quote
|
from urllib.parse import quote_from_bytes as quote
|
||||||
from urllib.parse import unquote_to_bytes as unquote
|
from urllib.parse import unquote_to_bytes as unquote
|
||||||
|
|
||||||
|
@ -103,12 +101,7 @@ def get_tid():
|
||||||
|
|
||||||
|
|
||||||
def html_dec(txt):
|
def html_dec(txt):
|
||||||
return (
|
return txt.replace("<", "<").replace(">", ">").replace(""", '"').replace("&", "&")
|
||||||
txt.replace("<", "<")
|
|
||||||
.replace(">", ">")
|
|
||||||
.replace(""", '"')
|
|
||||||
.replace("&", "&")
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def register_wtf8():
|
def register_wtf8():
|
||||||
|
@ -238,11 +231,7 @@ class Gateway(object):
|
||||||
r = self.sendreq("GET", web_path)
|
r = self.sendreq("GET", web_path)
|
||||||
if r.status != 200:
|
if r.status != 200:
|
||||||
self.closeconn()
|
self.closeconn()
|
||||||
raise Exception(
|
raise Exception("http error {} reading dir {} in {}".format(r.status, web_path, rice_tid()))
|
||||||
"http error {} reading dir {} in {}".format(
|
|
||||||
r.status, web_path, rice_tid()
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
return self.parse_jls(r)
|
return self.parse_jls(r)
|
||||||
|
|
||||||
|
@ -258,9 +247,7 @@ class Gateway(object):
|
||||||
if r.status != http.client.PARTIAL_CONTENT:
|
if r.status != http.client.PARTIAL_CONTENT:
|
||||||
self.closeconn()
|
self.closeconn()
|
||||||
raise Exception(
|
raise Exception(
|
||||||
"http error {} reading file {} range {} in {}".format(
|
"http error {} reading file {} range {} in {}".format(r.status, web_path, hdr_range, rice_tid())
|
||||||
r.status, web_path, hdr_range, rice_tid()
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
|
|
||||||
return r.read()
|
return r.read()
|
||||||
|
@ -395,9 +382,7 @@ class CPPF(Fuse):
|
||||||
|
|
||||||
if get1 >= cache1 and get2 <= cache2:
|
if get1 >= cache1 and get2 <= cache2:
|
||||||
# keep cache entry alive by moving it to the end
|
# keep cache entry alive by moving it to the end
|
||||||
self.filecache = (
|
self.filecache = self.filecache[:ncn] + self.filecache[ncn + 1 :] + [cn]
|
||||||
self.filecache[:ncn] + self.filecache[ncn + 1 :] + [cn]
|
|
||||||
)
|
|
||||||
buf_ofs = get1 - cache1
|
buf_ofs = get1 - cache1
|
||||||
buf_end = buf_ofs + (get2 - get1)
|
buf_end = buf_ofs + (get2 - get1)
|
||||||
dbg(
|
dbg(
|
||||||
|
@ -471,11 +456,7 @@ class CPPF(Fuse):
|
||||||
|
|
||||||
buf_ofs = (get2 - get1) - len(cdr)
|
buf_ofs = (get2 - get1) - len(cdr)
|
||||||
|
|
||||||
dbg(
|
dbg("<cache> cdr {}, car {}-{}={} [-{}:]".format(len(cdr), h_ofs, h_end, h_end - h_ofs, buf_ofs))
|
||||||
"<cache> cdr {}, car {}-{}={} [-{}:]".format(
|
|
||||||
len(cdr), h_ofs, h_end, h_end - h_ofs, buf_ofs
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
buf = self.gw.download_file_range(path, h_ofs, h_end)
|
buf = self.gw.download_file_range(path, h_ofs, h_end)
|
||||||
ret = buf[-buf_ofs:] + cdr
|
ret = buf[-buf_ofs:] + cdr
|
||||||
|
@ -489,11 +470,7 @@ class CPPF(Fuse):
|
||||||
|
|
||||||
buf_ofs = (get2 - get1) - len(car)
|
buf_ofs = (get2 - get1) - len(car)
|
||||||
|
|
||||||
dbg(
|
dbg("<cache> car {}, cdr {}-{}={} [:{}]".format(len(car), h_ofs, h_end, h_end - h_ofs, buf_ofs))
|
||||||
"<cache> car {}, cdr {}-{}={} [:{}]".format(
|
|
||||||
len(car), h_ofs, h_end, h_end - h_ofs, buf_ofs
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
buf = self.gw.download_file_range(path, h_ofs, h_end)
|
buf = self.gw.download_file_range(path, h_ofs, h_end)
|
||||||
ret = car + buf[:buf_ofs]
|
ret = car + buf[:buf_ofs]
|
||||||
|
@ -511,11 +488,7 @@ class CPPF(Fuse):
|
||||||
buf_ofs = get1 - h_ofs
|
buf_ofs = get1 - h_ofs
|
||||||
buf_end = buf_ofs + get2 - get1
|
buf_end = buf_ofs + get2 - get1
|
||||||
|
|
||||||
dbg(
|
dbg("<cache> {}-{}={} [{}:{}]".format(h_ofs, h_end, h_end - h_ofs, buf_ofs, buf_end))
|
||||||
"<cache> {}-{}={} [{}:{}]".format(
|
|
||||||
h_ofs, h_end, h_end - h_ofs, buf_ofs, buf_end
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
buf = self.gw.download_file_range(path, h_ofs, h_end)
|
buf = self.gw.download_file_range(path, h_ofs, h_end)
|
||||||
ret = buf[buf_ofs:buf_end]
|
ret = buf[buf_ofs:buf_end]
|
||||||
|
@ -648,9 +621,7 @@ def main():
|
||||||
print(" need argument: -o url=<...>")
|
print(" need argument: -o url=<...>")
|
||||||
print(" need argument: mount-path")
|
print(" need argument: mount-path")
|
||||||
print("example:")
|
print("example:")
|
||||||
print(
|
print(" ./partyfuse2.py -f -o allow_other,auto_unmount,nonempty,pw=wark,url=http://192.168.1.69:3923 /mnt/nas")
|
||||||
" ./partyfuse2.py -f -o allow_other,auto_unmount,nonempty,pw=wark,url=http://192.168.1.69:3923 /mnt/nas"
|
|
||||||
)
|
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
server.init2()
|
server.init2()
|
||||||
|
|
|
@ -5,7 +5,7 @@ set -e
|
||||||
#
|
#
|
||||||
# assumption: these directories, and everything within, are owned by root
|
# assumption: these directories, and everything within, are owned by root
|
||||||
sysdirs=(); for v in /bin /lib /lib32 /lib64 /sbin /usr /etc/alternatives ; do
|
sysdirs=(); for v in /bin /lib /lib32 /lib64 /sbin /usr /etc/alternatives ; do
|
||||||
[ -e $v ] && sysdirs+=($v)
|
[ -e "$v" ] && sysdirs+=("$v")
|
||||||
done
|
done
|
||||||
|
|
||||||
# error-handler
|
# error-handler
|
||||||
|
@ -68,13 +68,13 @@ cpp="$1"; shift
|
||||||
}
|
}
|
||||||
trap - EXIT
|
trap - EXIT
|
||||||
|
|
||||||
usr="$(getent passwd $uid | cut -d: -f1)"
|
usr="$(getent passwd "$uid" | cut -d: -f1)"
|
||||||
[ "$usr" ] || { echo "ERROR invalid username/uid $uid"; exit 1; }
|
[ "$usr" ] || { echo "ERROR invalid username/uid $uid"; exit 1; }
|
||||||
uid="$(getent passwd $uid | cut -d: -f3)"
|
uid="$(getent passwd "$uid" | cut -d: -f3)"
|
||||||
|
|
||||||
grp="$(getent group $gid | cut -d: -f1)"
|
grp="$(getent group "$gid" | cut -d: -f1)"
|
||||||
[ "$grp" ] || { echo "ERROR invalid groupname/gid $gid"; exit 1; }
|
[ "$grp" ] || { echo "ERROR invalid groupname/gid $gid"; exit 1; }
|
||||||
gid="$(getent group $gid | cut -d: -f3)"
|
gid="$(getent group "$gid" | cut -d: -f3)"
|
||||||
|
|
||||||
# debug/vis
|
# debug/vis
|
||||||
echo
|
echo
|
||||||
|
@ -106,8 +106,8 @@ while IFS= read -r v; do
|
||||||
}
|
}
|
||||||
i1=$(stat -c%D.%i "$v/" 2>/dev/null || echo a)
|
i1=$(stat -c%D.%i "$v/" 2>/dev/null || echo a)
|
||||||
i2=$(stat -c%D.%i "$jail$v/" 2>/dev/null || echo b)
|
i2=$(stat -c%D.%i "$jail$v/" 2>/dev/null || echo b)
|
||||||
[ $i1 = $i2 ] && continue
|
[ "$i1" = "$i2" ] && continue
|
||||||
mount | grep -qF " $jail$v " && echo wtf $i1 $i2 $v && continue
|
mount | grep -qF " $jail$v " && echo wtf "$i1" "$i2" "$v" && continue
|
||||||
mkdir -p "$jail$v"
|
mkdir -p "$jail$v"
|
||||||
mount --bind "$v" "$jail$v"
|
mount --bind "$v" "$jail$v"
|
||||||
done
|
done
|
||||||
|
@ -130,7 +130,7 @@ cln() {
|
||||||
done
|
done
|
||||||
}
|
}
|
||||||
rmdir "$jail/.prisonlock" || true
|
rmdir "$jail/.prisonlock" || true
|
||||||
exit $rv
|
exit "$rv"
|
||||||
}
|
}
|
||||||
trap cln EXIT
|
trap cln EXIT
|
||||||
|
|
||||||
|
@ -141,7 +141,7 @@ chmod 777 "$jail/tmp"
|
||||||
|
|
||||||
|
|
||||||
# create a dev
|
# create a dev
|
||||||
(cd $jail; mkdir -p dev; cd dev
|
(cd "$jail"; mkdir -p dev; cd dev
|
||||||
[ -e null ] || mknod -m 666 null c 1 3
|
[ -e null ] || mknod -m 666 null c 1 3
|
||||||
[ -e zero ] || mknod -m 666 zero c 1 5
|
[ -e zero ] || mknod -m 666 zero c 1 5
|
||||||
[ -e random ] || mknod -m 444 random c 1 8
|
[ -e random ] || mknod -m 444 random c 1 8
|
||||||
|
@ -150,13 +150,14 @@ chmod 777 "$jail/tmp"
|
||||||
|
|
||||||
|
|
||||||
# run copyparty
|
# run copyparty
|
||||||
export HOME="$(getent passwd $uid | cut -d: -f6)"
|
HOME="$(getent passwd $uid | cut -d: -f6)"
|
||||||
|
export HOME
|
||||||
export USER="$usr"
|
export USER="$usr"
|
||||||
export LOGNAME="$USER"
|
export LOGNAME="$USER"
|
||||||
#echo "pybin [$pybin]"
|
#echo "pybin [$pybin]"
|
||||||
#echo "pyarg [$pyarg]"
|
#echo "pyarg [$pyarg]"
|
||||||
#echo "cpp [$cpp]"
|
#echo "cpp [$cpp]"
|
||||||
chroot --userspec=$uid:$gid "$jail" "$pybin" $pyarg "$cpp" "$@" &
|
chroot --userspec="$uid:$gid" "$jail" "$pybin" "$pyarg" "$cpp" "$@" &
|
||||||
p=$!
|
p=$!
|
||||||
trap 'kill -USR1 $p' USR1
|
trap 'kill -USR1 $p' USR1
|
||||||
trap 'trap - INT TERM; kill $p' INT TERM
|
trap 'trap - INT TERM; kill $p' INT TERM
|
||||||
|
|
36
bin/u2c.py
36
bin/u2c.py
|
@ -199,9 +199,7 @@ class HCli(object):
|
||||||
if ctype:
|
if ctype:
|
||||||
hdrs["Content-Type"] = ctype
|
hdrs["Content-Type"] = ctype
|
||||||
if meth == "POST" and CLEN not in hdrs:
|
if meth == "POST" and CLEN not in hdrs:
|
||||||
hdrs[CLEN] = (
|
hdrs[CLEN] = 0 if not body else body.len if hasattr(body, "len") else len(body)
|
||||||
0 if not body else body.len if hasattr(body, "len") else len(body)
|
|
||||||
)
|
|
||||||
|
|
||||||
# large timeout for handshakes (safededup)
|
# large timeout for handshakes (safededup)
|
||||||
conns = self.hconns if ctype == MJ else self.conns
|
conns = self.hconns if ctype == MJ else self.conns
|
||||||
|
@ -910,11 +908,7 @@ def upload(fsl, stats, maxsz):
|
||||||
sc, txt = web.req("POST", fsl.file.url, headers, fsl, MO)
|
sc, txt = web.req("POST", fsl.file.url, headers, fsl, MO)
|
||||||
|
|
||||||
if sc == 400:
|
if sc == 400:
|
||||||
if (
|
if "already being written" in txt or "already got that" in txt or "only sibling chunks" in txt:
|
||||||
"already being written" in txt
|
|
||||||
or "already got that" in txt
|
|
||||||
or "only sibling chunks" in txt
|
|
||||||
):
|
|
||||||
fsl.file.nojoin = 1
|
fsl.file.nojoin = 1
|
||||||
|
|
||||||
if sc >= 400:
|
if sc >= 400:
|
||||||
|
@ -1223,9 +1217,7 @@ class Ctl(object):
|
||||||
while req:
|
while req:
|
||||||
print("DELETING ~%s#%s" % (srd, len(req)))
|
print("DELETING ~%s#%s" % (srd, len(req)))
|
||||||
body = json.dumps(req).encode("utf-8")
|
body = json.dumps(req).encode("utf-8")
|
||||||
sc, txt = web.req(
|
sc, txt = web.req("POST", self.ar.url + "?delete", {}, body, MJ)
|
||||||
"POST", self.ar.url + "?delete", {}, body, MJ
|
|
||||||
)
|
|
||||||
if sc == 413 and "json 2big" in txt:
|
if sc == 413 and "json 2big" in txt:
|
||||||
print(" (delete request too big; slicing...)")
|
print(" (delete request too big; slicing...)")
|
||||||
req = req[: len(req) // 2]
|
req = req[: len(req) // 2]
|
||||||
|
@ -1251,17 +1243,10 @@ class Ctl(object):
|
||||||
with self.mutex:
|
with self.mutex:
|
||||||
if (
|
if (
|
||||||
self.hash_f - self.up_f == 1
|
self.hash_f - self.up_f == 1
|
||||||
or (
|
or (self.hash_b - self.up_b < 1024 * 1024 * 1024 and self.hash_c - self.up_c < 512)
|
||||||
self.hash_b - self.up_b < 1024 * 1024 * 1024
|
|
||||||
and self.hash_c - self.up_c < 512
|
|
||||||
)
|
|
||||||
) and (
|
) and (
|
||||||
not self.ar.nh
|
not self.ar.nh
|
||||||
or (
|
or (self.q_upload.empty() and self.q_handshake.empty() and not self.uploader_busy)
|
||||||
self.q_upload.empty()
|
|
||||||
and self.q_handshake.empty()
|
|
||||||
and not self.uploader_busy
|
|
||||||
)
|
|
||||||
):
|
):
|
||||||
break
|
break
|
||||||
|
|
||||||
|
@ -1280,10 +1265,7 @@ class Ctl(object):
|
||||||
if self.ar.wlist:
|
if self.ar.wlist:
|
||||||
vp = file.rel.decode("utf-8")
|
vp = file.rel.decode("utf-8")
|
||||||
if self.ar.chs:
|
if self.ar.chs:
|
||||||
zsl = [
|
zsl = ["%s %d %d" % (zsii[0], n, zsii[1]) for n, zsii in enumerate(file.cids)]
|
||||||
"%s %d %d" % (zsii[0], n, zsii[1])
|
|
||||||
for n, zsii in enumerate(file.cids)
|
|
||||||
]
|
|
||||||
print("chs: %s\n%s" % (vp, "\n".join(zsl)))
|
print("chs: %s\n%s" % (vp, "\n".join(zsl)))
|
||||||
zsl = [self.ar.wsalt, str(file.size)] + [x[0] for x in file.cids]
|
zsl = [self.ar.wsalt, str(file.size)] + [x[0] for x in file.cids]
|
||||||
zb = hashlib.sha512("\n".join(zsl).encode("utf-8")).digest()[:33]
|
zb = hashlib.sha512("\n".join(zsl).encode("utf-8")).digest()[:33]
|
||||||
|
@ -1650,8 +1632,7 @@ source file/folder selection uses rsync syntax, meaning that:
|
||||||
raise Exception("--safe is incompatible with " + str(errs))
|
raise Exception("--safe is incompatible with " + str(errs))
|
||||||
|
|
||||||
ar.files = [
|
ar.files = [
|
||||||
os.path.abspath(os.path.realpath(x.encode("utf-8")))
|
os.path.abspath(os.path.realpath(x.encode("utf-8"))) + (x[-1:] if x[-1:] in ("\\", "/") else "").encode("utf-8")
|
||||||
+ (x[-1:] if x[-1:] in ("\\", "/") else "").encode("utf-8")
|
|
||||||
for x in ar.files
|
for x in ar.files
|
||||||
]
|
]
|
||||||
|
|
||||||
|
@ -1667,8 +1648,7 @@ source file/folder selection uses rsync syntax, meaning that:
|
||||||
|
|
||||||
if "https://" in ar.url.lower():
|
if "https://" in ar.url.lower():
|
||||||
try:
|
try:
|
||||||
import ssl
|
pass
|
||||||
import zipfile
|
|
||||||
except:
|
except:
|
||||||
t = "ERROR: https is not available for some reason; please use http"
|
t = "ERROR: https is not available for some reason; please use http"
|
||||||
print("\n\n %s\n\n" % (t,))
|
print("\n\n %s\n\n" % (t,))
|
||||||
|
|
Loading…
Reference in a new issue