mirror of
https://github.com/9001/copyparty.git
synced 2025-08-17 09:02:15 -06:00
cleanup
This commit is contained in:
parent
b1693f95cb
commit
cba2e10d29
|
@ -436,7 +436,7 @@ def disable_quickedit() -> None:
|
||||||
if PY2:
|
if PY2:
|
||||||
wintypes.LPDWORD = ctypes.POINTER(wintypes.DWORD)
|
wintypes.LPDWORD = ctypes.POINTER(wintypes.DWORD)
|
||||||
|
|
||||||
k32.GetStdHandle.errcheck = ecb # type: ignore
|
k32.GetStdHandle.errcheck = ecb # type: ignore
|
||||||
k32.GetConsoleMode.errcheck = ecb # type: ignore
|
k32.GetConsoleMode.errcheck = ecb # type: ignore
|
||||||
k32.SetConsoleMode.errcheck = ecb # type: ignore
|
k32.SetConsoleMode.errcheck = ecb # type: ignore
|
||||||
k32.GetConsoleMode.argtypes = (wintypes.HANDLE, wintypes.LPDWORD)
|
k32.GetConsoleMode.argtypes = (wintypes.HANDLE, wintypes.LPDWORD)
|
||||||
|
|
|
@ -112,32 +112,30 @@ class HttpConn(object):
|
||||||
return self.u2idx
|
return self.u2idx
|
||||||
|
|
||||||
def _detect_https(self) -> bool:
|
def _detect_https(self) -> bool:
|
||||||
method = None
|
try:
|
||||||
if True:
|
method = self.s.recv(4, socket.MSG_PEEK)
|
||||||
try:
|
except socket.timeout:
|
||||||
method = self.s.recv(4, socket.MSG_PEEK)
|
return False
|
||||||
except socket.timeout:
|
except AttributeError:
|
||||||
return False
|
# jython does not support msg_peek; forget about https
|
||||||
except AttributeError:
|
method = self.s.recv(4)
|
||||||
# jython does not support msg_peek; forget about https
|
self.sr = Util.Unrecv(self.s, self.log)
|
||||||
method = self.s.recv(4)
|
self.sr.buf = method
|
||||||
self.sr = Util.Unrecv(self.s, self.log)
|
|
||||||
self.sr.buf = method
|
|
||||||
|
|
||||||
# jython used to do this, they stopped since it's broken
|
# jython used to do this, they stopped since it's broken
|
||||||
# but reimplementing sendall is out of scope for now
|
# but reimplementing sendall is out of scope for now
|
||||||
if not getattr(self.s, "sendall", None):
|
if not getattr(self.s, "sendall", None):
|
||||||
self.s.sendall = self.s.send # type: ignore
|
self.s.sendall = self.s.send # type: ignore
|
||||||
|
|
||||||
if len(method) != 4:
|
if len(method) != 4:
|
||||||
err = "need at least 4 bytes in the first packet; got {}".format(
|
err = "need at least 4 bytes in the first packet; got {}".format(
|
||||||
len(method)
|
len(method)
|
||||||
)
|
)
|
||||||
if method:
|
if method:
|
||||||
self.log(err)
|
self.log(err)
|
||||||
|
|
||||||
self.s.send(b"HTTP/1.1 400 Bad Request\r\n\r\n" + err.encode("utf-8"))
|
self.s.send(b"HTTP/1.1 400 Bad Request\r\n\r\n" + err.encode("utf-8"))
|
||||||
return False
|
return False
|
||||||
|
|
||||||
return not method or not bool(PTN_HTTP.match(method))
|
return not method or not bool(PTN_HTTP.match(method))
|
||||||
|
|
||||||
|
|
|
@ -61,7 +61,7 @@ class Adapter(object):
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
if True:
|
if True: # pylint: disable=using-constant-test
|
||||||
# Type of an IPv4 address (a string in "xxx.xxx.xxx.xxx" format)
|
# Type of an IPv4 address (a string in "xxx.xxx.xxx.xxx" format)
|
||||||
_IPv4Address = str
|
_IPv4Address = str
|
||||||
|
|
||||||
|
|
|
@ -419,50 +419,49 @@ class Up2k(object):
|
||||||
def _check_lifetimes(self) -> float:
|
def _check_lifetimes(self) -> float:
|
||||||
now = time.time()
|
now = time.time()
|
||||||
timeout = now + 9001
|
timeout = now + 9001
|
||||||
if now: # diff-golf
|
for vp, vol in sorted(self.asrv.vfs.all_vols.items()):
|
||||||
for vp, vol in sorted(self.asrv.vfs.all_vols.items()):
|
lifetime = vol.flags.get("lifetime")
|
||||||
lifetime = vol.flags.get("lifetime")
|
if not lifetime:
|
||||||
if not lifetime:
|
continue
|
||||||
continue
|
|
||||||
|
|
||||||
cur = self.cur.get(vol.realpath)
|
cur = self.cur.get(vol.realpath)
|
||||||
if not cur:
|
if not cur:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
nrm = 0
|
nrm = 0
|
||||||
deadline = time.time() - lifetime
|
deadline = time.time() - lifetime
|
||||||
timeout = min(timeout, now + lifetime)
|
timeout = min(timeout, now + lifetime)
|
||||||
q = "select rd, fn from up where at > 0 and at < ? limit 100"
|
q = "select rd, fn from up where at > 0 and at < ? limit 100"
|
||||||
while True:
|
while True:
|
||||||
with self.mutex:
|
|
||||||
hits = cur.execute(q, (deadline,)).fetchall()
|
|
||||||
|
|
||||||
if not hits:
|
|
||||||
break
|
|
||||||
|
|
||||||
for rd, fn in hits:
|
|
||||||
if rd.startswith("//") or fn.startswith("//"):
|
|
||||||
rd, fn = s3dec(rd, fn)
|
|
||||||
|
|
||||||
fvp = ("%s/%s" % (rd, fn)).strip("/")
|
|
||||||
if vp:
|
|
||||||
fvp = "%s/%s" % (vp, fvp)
|
|
||||||
|
|
||||||
self._handle_rm(LEELOO_DALLAS, "", fvp, [], True)
|
|
||||||
nrm += 1
|
|
||||||
|
|
||||||
if nrm:
|
|
||||||
self.log("{} files graduated in {}".format(nrm, vp))
|
|
||||||
|
|
||||||
if timeout < 10:
|
|
||||||
continue
|
|
||||||
|
|
||||||
q = "select at from up where at > 0 order by at limit 1"
|
|
||||||
with self.mutex:
|
with self.mutex:
|
||||||
hits = cur.execute(q).fetchone()
|
hits = cur.execute(q, (deadline,)).fetchall()
|
||||||
|
|
||||||
if hits:
|
if not hits:
|
||||||
timeout = min(timeout, now + lifetime - (now - hits[0]))
|
break
|
||||||
|
|
||||||
|
for rd, fn in hits:
|
||||||
|
if rd.startswith("//") or fn.startswith("//"):
|
||||||
|
rd, fn = s3dec(rd, fn)
|
||||||
|
|
||||||
|
fvp = ("%s/%s" % (rd, fn)).strip("/")
|
||||||
|
if vp:
|
||||||
|
fvp = "%s/%s" % (vp, fvp)
|
||||||
|
|
||||||
|
self._handle_rm(LEELOO_DALLAS, "", fvp, [], True)
|
||||||
|
nrm += 1
|
||||||
|
|
||||||
|
if nrm:
|
||||||
|
self.log("{} files graduated in {}".format(nrm, vp))
|
||||||
|
|
||||||
|
if timeout < 10:
|
||||||
|
continue
|
||||||
|
|
||||||
|
q = "select at from up where at > 0 order by at limit 1"
|
||||||
|
with self.mutex:
|
||||||
|
hits = cur.execute(q).fetchone()
|
||||||
|
|
||||||
|
if hits:
|
||||||
|
timeout = min(timeout, now + lifetime - (now - hits[0]))
|
||||||
|
|
||||||
return timeout
|
return timeout
|
||||||
|
|
||||||
|
@ -1217,72 +1216,70 @@ class Up2k(object):
|
||||||
abspath = os.path.join(cdir, fn)
|
abspath = os.path.join(cdir, fn)
|
||||||
nohash = reh.search(abspath) if reh else False
|
nohash = reh.search(abspath) if reh else False
|
||||||
|
|
||||||
if fn: # diff-golf
|
sql = "select w, mt, sz, at from up where rd = ? and fn = ?"
|
||||||
|
try:
|
||||||
|
c = db.c.execute(sql, (rd, fn))
|
||||||
|
except:
|
||||||
|
c = db.c.execute(sql, s3enc(self.mem_cur, rd, fn))
|
||||||
|
|
||||||
sql = "select w, mt, sz, at from up where rd = ? and fn = ?"
|
in_db = list(c.fetchall())
|
||||||
try:
|
if in_db:
|
||||||
c = db.c.execute(sql, (rd, fn))
|
self.pp.n -= 1
|
||||||
except:
|
dw, dts, dsz, at = in_db[0]
|
||||||
c = db.c.execute(sql, s3enc(self.mem_cur, rd, fn))
|
if len(in_db) > 1:
|
||||||
|
t = "WARN: multiple entries: [{}] => [{}] |{}|\n{}"
|
||||||
|
rep_db = "\n".join([repr(x) for x in in_db])
|
||||||
|
self.log(t.format(top, rp, len(in_db), rep_db))
|
||||||
|
dts = -1
|
||||||
|
|
||||||
in_db = list(c.fetchall())
|
if fat32 and abs(dts - lmod) == 1:
|
||||||
if in_db:
|
dts = lmod
|
||||||
self.pp.n -= 1
|
|
||||||
dw, dts, dsz, at = in_db[0]
|
|
||||||
if len(in_db) > 1:
|
|
||||||
t = "WARN: multiple entries: [{}] => [{}] |{}|\n{}"
|
|
||||||
rep_db = "\n".join([repr(x) for x in in_db])
|
|
||||||
self.log(t.format(top, rp, len(in_db), rep_db))
|
|
||||||
dts = -1
|
|
||||||
|
|
||||||
if fat32 and abs(dts - lmod) == 1:
|
if dts == lmod and dsz == sz and (nohash or dw[0] != "#" or not sz):
|
||||||
dts = lmod
|
continue
|
||||||
|
|
||||||
if dts == lmod and dsz == sz and (nohash or dw[0] != "#" or not sz):
|
t = "reindex [{}] => [{}] ({}/{}) ({}/{})".format(
|
||||||
continue
|
top, rp, dts, lmod, dsz, sz
|
||||||
|
)
|
||||||
t = "reindex [{}] => [{}] ({}/{}) ({}/{})".format(
|
self.log(t)
|
||||||
top, rp, dts, lmod, dsz, sz
|
self.db_rm(db.c, rd, fn, 0)
|
||||||
)
|
|
||||||
self.log(t)
|
|
||||||
self.db_rm(db.c, rd, fn, 0)
|
|
||||||
ret += 1
|
|
||||||
db.n += 1
|
|
||||||
in_db = []
|
|
||||||
else:
|
|
||||||
at = 0
|
|
||||||
|
|
||||||
self.pp.msg = "a{} {}".format(self.pp.n, abspath)
|
|
||||||
|
|
||||||
if nohash or not sz:
|
|
||||||
wark = up2k_wark_from_metadata(self.salt, sz, lmod, rd, fn)
|
|
||||||
else:
|
|
||||||
if sz > 1024 * 1024:
|
|
||||||
self.log("file: {}".format(abspath))
|
|
||||||
|
|
||||||
try:
|
|
||||||
hashes = self._hashlist_from_file(
|
|
||||||
abspath, "a{}, ".format(self.pp.n)
|
|
||||||
)
|
|
||||||
except Exception as ex:
|
|
||||||
self.log("hash: {} @ [{}]".format(repr(ex), abspath))
|
|
||||||
continue
|
|
||||||
|
|
||||||
if not hashes:
|
|
||||||
return -1
|
|
||||||
|
|
||||||
wark = up2k_wark_from_hashlist(self.salt, sz, hashes)
|
|
||||||
|
|
||||||
# skip upload hooks by not providing vflags
|
|
||||||
self.db_add(db.c, {}, rd, fn, lmod, sz, "", "", wark, "", "", "", at)
|
|
||||||
db.n += 1
|
|
||||||
ret += 1
|
ret += 1
|
||||||
td = time.time() - db.t
|
db.n += 1
|
||||||
if db.n >= 4096 or td >= 60:
|
in_db = []
|
||||||
self.log("commit {} new files".format(db.n))
|
else:
|
||||||
db.c.connection.commit()
|
at = 0
|
||||||
db.n = 0
|
|
||||||
db.t = time.time()
|
self.pp.msg = "a{} {}".format(self.pp.n, abspath)
|
||||||
|
|
||||||
|
if nohash or not sz:
|
||||||
|
wark = up2k_wark_from_metadata(self.salt, sz, lmod, rd, fn)
|
||||||
|
else:
|
||||||
|
if sz > 1024 * 1024:
|
||||||
|
self.log("file: {}".format(abspath))
|
||||||
|
|
||||||
|
try:
|
||||||
|
hashes = self._hashlist_from_file(
|
||||||
|
abspath, "a{}, ".format(self.pp.n)
|
||||||
|
)
|
||||||
|
except Exception as ex:
|
||||||
|
self.log("hash: {} @ [{}]".format(repr(ex), abspath))
|
||||||
|
continue
|
||||||
|
|
||||||
|
if not hashes:
|
||||||
|
return -1
|
||||||
|
|
||||||
|
wark = up2k_wark_from_hashlist(self.salt, sz, hashes)
|
||||||
|
|
||||||
|
# skip upload hooks by not providing vflags
|
||||||
|
self.db_add(db.c, {}, rd, fn, lmod, sz, "", "", wark, "", "", "", at)
|
||||||
|
db.n += 1
|
||||||
|
ret += 1
|
||||||
|
td = time.time() - db.t
|
||||||
|
if db.n >= 4096 or td >= 60:
|
||||||
|
self.log("commit {} new files".format(db.n))
|
||||||
|
db.c.connection.commit()
|
||||||
|
db.n = 0
|
||||||
|
db.t = time.time()
|
||||||
|
|
||||||
if not self.args.no_dhash:
|
if not self.args.no_dhash:
|
||||||
db.c.execute("delete from dh where d = ?", (drd,)) # type: ignore
|
db.c.execute("delete from dh where d = ?", (drd,)) # type: ignore
|
||||||
|
|
|
@ -1823,7 +1823,9 @@ def exclude_dotfiles(filepaths: list[str]) -> list[str]:
|
||||||
return [x for x in filepaths if not x.split("/")[-1].startswith(".")]
|
return [x for x in filepaths if not x.split("/")[-1].startswith(".")]
|
||||||
|
|
||||||
|
|
||||||
def odfusion(base: Union[ODict[str, bool], ODict["LiteralString", bool]], oth: str) -> ODict[str, bool]:
|
def odfusion(
|
||||||
|
base: Union[ODict[str, bool], ODict["LiteralString", bool]], oth: str
|
||||||
|
) -> ODict[str, bool]:
|
||||||
# merge an "ordered set" (just a dict really) with another list of keys
|
# merge an "ordered set" (just a dict really) with another list of keys
|
||||||
words0 = [x for x in oth.split(",") if x]
|
words0 = [x for x in oth.split(",") if x]
|
||||||
words1 = [x for x in oth[1:].split(",") if x]
|
words1 = [x for x in oth[1:].split(",") if x]
|
||||||
|
|
|
@ -19,7 +19,7 @@ docker run --rm -it -u 1000 -p 3923:3923 -v /mnt/nas:/w -v $PWD/cfgdir:/cfg copy
|
||||||
|
|
||||||
this example is also available as a podman-compatible [docker-compose yaml](https://github.com/9001/copyparty/blob/hovudstraum/docs/examples/docker/basic-docker-compose); example usage: `docker-compose up` (you may need to `systemctl enable --now podman.socket` or similar)
|
this example is also available as a podman-compatible [docker-compose yaml](https://github.com/9001/copyparty/blob/hovudstraum/docs/examples/docker/basic-docker-compose); example usage: `docker-compose up` (you may need to `systemctl enable --now podman.socket` or similar)
|
||||||
|
|
||||||
i'm unfamiliar with docker-compose and alternatives so let me know if this section could be better 🙏
|
i'm not very familiar with containers, so let me know if this section could be better 🙏
|
||||||
|
|
||||||
|
|
||||||
## configuration
|
## configuration
|
||||||
|
|
Loading…
Reference in a new issue