mirror of
https://github.com/9001/copyparty.git
synced 2025-08-18 01:22:13 -06:00
drop one of the slowloris detectors
This commit is contained in:
parent
89c9f45fd0
commit
7c76d08958
|
@ -749,8 +749,7 @@ def run_argparse(
|
||||||
ap2.add_argument("--ban-pw", metavar="N,W,B", type=u, default="9,60,1440", help="more than \033[33mN\033[0m wrong passwords in \033[33mW\033[0m minutes = ban for \033[33mB\033[0m minutes; disable with [\033[32mno\033[0m]")
|
ap2.add_argument("--ban-pw", metavar="N,W,B", type=u, default="9,60,1440", help="more than \033[33mN\033[0m wrong passwords in \033[33mW\033[0m minutes = ban for \033[33mB\033[0m minutes; disable with [\033[32mno\033[0m]")
|
||||||
ap2.add_argument("--ban-404", metavar="N,W,B", type=u, default="no", help="hitting more than \033[33mN\033[0m 404's in \033[33mW\033[0m minutes = ban for \033[33mB\033[0m minutes (disabled by default since turbo-up2k counts as 404s)")
|
ap2.add_argument("--ban-404", metavar="N,W,B", type=u, default="no", help="hitting more than \033[33mN\033[0m 404's in \033[33mW\033[0m minutes = ban for \033[33mB\033[0m minutes (disabled by default since turbo-up2k counts as 404s)")
|
||||||
ap2.add_argument("--aclose", metavar="MIN", type=int, default=10, help="if a client maxes out the server connection limit, downgrade it from connection:keep-alive to connection:close for MIN minutes (and also kill its active connections) -- disable with 0")
|
ap2.add_argument("--aclose", metavar="MIN", type=int, default=10, help="if a client maxes out the server connection limit, downgrade it from connection:keep-alive to connection:close for MIN minutes (and also kill its active connections) -- disable with 0")
|
||||||
ap2.add_argument("--loris1", metavar="W,B", type=u, default="60,60", help="if a client takes more than W seconds to finish sending headers, ban it for B minutes; disable with [\033[32mno\033[0m]")
|
ap2.add_argument("--loris", metavar="B", type=int, default=60, help="if a client maxes out the server connection limit without sending headers, ban it for B minutes; disable with [\033[32m0\033[0m]")
|
||||||
ap2.add_argument("--loris2", metavar="B", type=int, default=60, help="if a client maxes out the server connection limit without sending headers, ban it for B minutes; disable with [\033[32m0\033[0m]")
|
|
||||||
|
|
||||||
ap2 = ap.add_argument_group('shutdown options')
|
ap2 = ap.add_argument_group('shutdown options')
|
||||||
ap2.add_argument("--ign-ebind", action="store_true", help="continue running even if it's impossible to listen on some of the requested endpoints")
|
ap2.add_argument("--ign-ebind", action="store_true", help="continue running even if it's impossible to listen on some of the requested endpoints")
|
||||||
|
|
|
@ -38,7 +38,6 @@ from .util import (
|
||||||
META_NOBOTS,
|
META_NOBOTS,
|
||||||
MultipartParser,
|
MultipartParser,
|
||||||
Pebkac,
|
Pebkac,
|
||||||
Slowloris,
|
|
||||||
UnrecvEOF,
|
UnrecvEOF,
|
||||||
alltrace,
|
alltrace,
|
||||||
atomic_move,
|
atomic_move,
|
||||||
|
@ -213,7 +212,7 @@ class HttpCli(object):
|
||||||
|
|
||||||
try:
|
try:
|
||||||
self.s.settimeout(2)
|
self.s.settimeout(2)
|
||||||
headerlines = read_header(self.sr, self.args.loris1w)
|
headerlines = read_header(self.sr)
|
||||||
self.in_hdr_recv = False
|
self.in_hdr_recv = False
|
||||||
if not headerlines:
|
if not headerlines:
|
||||||
return False
|
return False
|
||||||
|
@ -245,13 +244,6 @@ class HttpCli(object):
|
||||||
self.loud_reply(unicode(ex), status=ex.code, headers=h, volsan=True)
|
self.loud_reply(unicode(ex), status=ex.code, headers=h, volsan=True)
|
||||||
return self.keepalive
|
return self.keepalive
|
||||||
|
|
||||||
except Slowloris:
|
|
||||||
ip = ipnorm(self.ip)
|
|
||||||
self.conn.bans[ip] = int(time.time() + self.args.loris1b * 60)
|
|
||||||
t = "slowloris (infinite-headers): {} banned for {} min"
|
|
||||||
self.log(t.format(ip, self.args.loris1b), 1)
|
|
||||||
return False
|
|
||||||
|
|
||||||
self.ua = self.headers.get("user-agent", "")
|
self.ua = self.headers.get("user-agent", "")
|
||||||
self.is_rclone = self.ua.startswith("rclone/")
|
self.is_rclone = self.ua.startswith("rclone/")
|
||||||
self.is_ancient = self.ua.startswith("Mozilla/4.")
|
self.is_ancient = self.ua.startswith("Mozilla/4.")
|
||||||
|
|
|
@ -266,8 +266,8 @@ class HttpSrv(object):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
t = "slowloris (idle-conn): {} banned for {} min"
|
t = "slowloris (idle-conn): {} banned for {} min"
|
||||||
self.log(self.name, t.format(ip, self.args.loris2, nclose), 1)
|
self.log(self.name, t.format(ip, self.args.loris, nclose), 1)
|
||||||
self.bans[ip] = int(time.time() + self.args.loris2 * 60)
|
self.bans[ip] = int(time.time() + self.args.loris * 60)
|
||||||
|
|
||||||
if self.args.log_conn:
|
if self.args.log_conn:
|
||||||
self.log(self.name, "|%sC-acc1" % ("-" * 2,), c="90")
|
self.log(self.name, "|%sC-acc1" % ("-" * 2,), c="90")
|
||||||
|
|
|
@ -290,12 +290,6 @@ class SvcHub(object):
|
||||||
|
|
||||||
def _process_config(self) -> bool:
|
def _process_config(self) -> bool:
|
||||||
al = self.args
|
al = self.args
|
||||||
if al.loris1 == "no":
|
|
||||||
al.loris1 = "0,0"
|
|
||||||
|
|
||||||
i1, i2 = al.loris1.split(",")
|
|
||||||
al.loris1w = int(i1)
|
|
||||||
al.loris1b = int(i2)
|
|
||||||
|
|
||||||
al.zm_on = al.zm_on or al.z_on
|
al.zm_on = al.zm_on or al.z_on
|
||||||
al.zs_on = al.zs_on or al.z_on
|
al.zs_on = al.zs_on or al.z_on
|
||||||
|
|
|
@ -1253,7 +1253,7 @@ class MultipartParser(object):
|
||||||
rfc1341/rfc1521/rfc2047/rfc2231/rfc2388/rfc6266/the-real-world
|
rfc1341/rfc1521/rfc2047/rfc2231/rfc2388/rfc6266/the-real-world
|
||||||
(only the fallback non-js uploader relies on these filenames)
|
(only the fallback non-js uploader relies on these filenames)
|
||||||
"""
|
"""
|
||||||
for ln in read_header(self.sr, 0):
|
for ln in read_header(self.sr):
|
||||||
self.log(ln)
|
self.log(ln)
|
||||||
|
|
||||||
m = self.re_ctype.match(ln)
|
m = self.re_ctype.match(ln)
|
||||||
|
@ -1453,12 +1453,12 @@ def get_boundary(headers: dict[str, str]) -> str:
|
||||||
return m.group(2)
|
return m.group(2)
|
||||||
|
|
||||||
|
|
||||||
def read_header(sr: Unrecv, loris: int) -> list[str]:
|
def read_header(sr: Unrecv) -> list[str]:
|
||||||
t0 = time.time()
|
t0 = time.time()
|
||||||
ret = b""
|
ret = b""
|
||||||
while True:
|
while True:
|
||||||
if loris and time.time() - t0 > loris:
|
if time.time() - t0 > 120:
|
||||||
raise Slowloris()
|
return []
|
||||||
|
|
||||||
try:
|
try:
|
||||||
ret += sr.recv(1024)
|
ret += sr.recv(1024)
|
||||||
|
@ -2541,7 +2541,3 @@ class Pebkac(Exception):
|
||||||
|
|
||||||
def __repr__(self) -> str:
|
def __repr__(self) -> str:
|
||||||
return "Pebkac({}, {})".format(self.code, repr(self.args))
|
return "Pebkac({}, {})".format(self.code, repr(self.args))
|
||||||
|
|
||||||
|
|
||||||
class Slowloris(Exception):
|
|
||||||
pass
|
|
||||||
|
|
|
@ -107,7 +107,7 @@ class Cfg(Namespace):
|
||||||
ex = "css_browser hist js_browser no_hash no_idx no_forget"
|
ex = "css_browser hist js_browser no_hash no_idx no_forget"
|
||||||
ka.update(**{k: None for k in ex.split()})
|
ka.update(**{k: None for k in ex.split()})
|
||||||
|
|
||||||
ex = "re_maxage rproxy rsp_slp s_wr_slp theme themes turbo df loris1w loris1b loris2"
|
ex = "re_maxage rproxy rsp_slp s_wr_slp theme themes turbo df loris"
|
||||||
ka.update(**{k: 0 for k in ex.split()})
|
ka.update(**{k: 0 for k in ex.split()})
|
||||||
|
|
||||||
ex = "doctitle favico html_head mth textfiles log_fk"
|
ex = "doctitle favico html_head mth textfiles log_fk"
|
||||||
|
|
Loading…
Reference in a new issue