idp: hide login/logout UI + improve html_head handling

This commit is contained in:
ed 2024-03-13 18:22:24 +00:00
parent a259704596
commit dbf1cbc8af
6 changed files with 21 additions and 22 deletions

View file

@ -18,7 +18,6 @@ from .cfg import flagdescs, permdescs, vf_bmap, vf_cmap, vf_vmap
from .pwhash import PWHash from .pwhash import PWHash
from .util import ( from .util import (
IMPLICATIONS, IMPLICATIONS,
META_NOBOTS,
SQLITE_VER, SQLITE_VER,
UNPLICATIONS, UNPLICATIONS,
UTC, UTC,
@ -1661,13 +1660,6 @@ class AuthSrv(object):
if not vol.flags.get("robots"): if not vol.flags.get("robots"):
vol.flags["norobots"] = True vol.flags["norobots"] = True
for vol in vfs.all_vols.values():
h = [vol.flags.get("html_head", self.args.html_head)]
if vol.flags.get("norobots"):
h.insert(0, META_NOBOTS)
vol.flags["html_head"] = "\n".join([x for x in h if x])
for vol in vfs.all_vols.values(): for vol in vfs.all_vols.values():
if self.args.no_vthumb: if self.args.no_vthumb:
vol.flags["dvthumb"] = True vol.flags["dvthumb"] = True

View file

@ -170,16 +170,12 @@ class HttpCli(object):
self.can_dot = False self.can_dot = False
self.out_headerlist: list[tuple[str, str]] = [] self.out_headerlist: list[tuple[str, str]] = []
self.out_headers: dict[str, str] = {} self.out_headers: dict[str, str] = {}
self.html_head = " "
# post # post
self.parser: Optional[MultipartParser] = None self.parser: Optional[MultipartParser] = None
# end placeholders # end placeholders
self.bufsz = 1024 * 32 self.bufsz = 1024 * 32
h = self.args.html_head self.html_head = ""
if self.args.no_robots:
h = META_NOBOTS + (("\n" + h) if h else "")
self.html_head = h
def log(self, msg: str, c: Union[int, str] = 0) -> None: def log(self, msg: str, c: Union[int, str] = 0) -> None:
ptn = self.asrv.re_pwd ptn = self.asrv.re_pwd
@ -231,8 +227,6 @@ class HttpCli(object):
"Vary": "Origin, PW, Cookie", "Vary": "Origin, PW, Cookie",
"Cache-Control": "no-store, max-age=0", "Cache-Control": "no-store, max-age=0",
} }
if self.args.no_robots:
self.out_headers["X-Robots-Tag"] = "noindex, nofollow"
if self.is_banned(): if self.is_banned():
return False return False
@ -506,6 +500,7 @@ class HttpCli(object):
if idp_usr in self.asrv.vfs.aread: if idp_usr in self.asrv.vfs.aread:
self.uname = idp_usr self.uname = idp_usr
self.html_head += "<script>var is_idp=1</script>\n"
else: else:
self.log("unknown username: [%s]" % (idp_usr), 1) self.log("unknown username: [%s]" % (idp_usr), 1)
self.uname = "*" self.uname = "*"
@ -559,6 +554,10 @@ class HttpCli(object):
self.s.settimeout(self.args.s_tbody or None) self.s.settimeout(self.args.s_tbody or None)
if "norobots" in vn.flags:
self.html_head += META_NOBOTS
self.out_headers["X-Robots-Tag"] = "noindex, nofollow"
try: try:
cors_k = self._cors() cors_k = self._cors()
if self.mode in ("GET", "HEAD"): if self.mode in ("GET", "HEAD"):
@ -3390,6 +3389,8 @@ class HttpCli(object):
self.reply(zb, mime="text/plain; charset=utf-8") self.reply(zb, mime="text/plain; charset=utf-8")
return True return True
self.html_head += self.vn.flags.get("html_head", "")
html = self.j2s( html = self.j2s(
"splash", "splash",
this=self, this=self,
@ -3839,11 +3840,9 @@ class HttpCli(object):
e2d = "e2d" in vn.flags e2d = "e2d" in vn.flags
e2t = "e2t" in vn.flags e2t = "e2t" in vn.flags
self.html_head = vn.flags.get("html_head", "") self.html_head += vn.flags.get("html_head", "")
if vn.flags.get("norobots") or "b" in self.uparam: if "b" in self.uparam:
self.out_headers["X-Robots-Tag"] = "noindex, nofollow" self.out_headers["X-Robots-Tag"] = "noindex, nofollow"
else:
self.out_headers.pop("X-Robots-Tag", None)
is_dir = stat.S_ISDIR(st.st_mode) is_dir = stat.S_ISDIR(st.st_mode)
fk_pass = False fk_pass = False

View file

@ -186,7 +186,7 @@ else:
SYMTIME = sys.version_info > (3, 6) and os.utime in os.supports_follow_symlinks SYMTIME = sys.version_info > (3, 6) and os.utime in os.supports_follow_symlinks
META_NOBOTS = '<meta name="robots" content="noindex, nofollow">' META_NOBOTS = '<meta name="robots" content="noindex, nofollow">\n'
FFMPEG_URL = "https://www.gyan.dev/ffmpeg/builds/ffmpeg-git-full.7z" FFMPEG_URL = "https://www.gyan.dev/ffmpeg/builds/ffmpeg-git-full.7z"

View file

@ -6634,7 +6634,7 @@ function apply_perms(res) {
ebi('acc_info').innerHTML = '<span id="srv_info2"><span>' + srvinf + ebi('acc_info').innerHTML = '<span id="srv_info2"><span>' + srvinf +
'</span></span><span' + aclass + axs + L.access + '</span>' + (acct != '*' ? '</span></span><span' + aclass + axs + L.access + '</span>' + (acct != '*' ?
'<a href="' + SR + '/?pw=x">' + L.logout + acct + '</a>' : '<a href="' + SR + '/?pw=x">' + (window.is_idp ? '' : L.logout) + acct + '</a>' :
'<a href="?h">Login</a>'); '<a href="?h">Login</a>');
var o = QSA('#ops>a[data-perm]'); var o = QSA('#ops>a[data-perm]');

View file

@ -49,6 +49,15 @@ for (var k in (d || {})) {
o[a].setAttribute("tt", d[k]); o[a].setAttribute("tt", d[k]);
} }
try {
if (window.is_idp) {
var z = ['#l+div', '#l', '#c'];
for (var a = 0; a < z.length; a++)
QS(z[a]).style.display = 'none';
}
}
catch (ex) { }
tt.init(); tt.init();
var o = QS('input[name="cppwd"]'); var o = QS('input[name="cppwd"]');
if (!ebi('c') && o.offsetTop + o.offsetHeight < window.innerHeight) if (!ebi('c') && o.offsetTop + o.offsetHeight < window.innerHeight)

View file

@ -10,7 +10,6 @@ a living list of upcoming features / fixes / changes, very roughly in order of p
* [github issue #62](https://github.com/9001/copyparty/issues/62) - IdP / single-sign-on powered by a local identity provider service which is possibly hooked up to ldap or an oauth service * [github issue #62](https://github.com/9001/copyparty/issues/62) - IdP / single-sign-on powered by a local identity provider service which is possibly hooked up to ldap or an oauth service
* persist autogenerated volumes for db-init + nullmapping on next startup (`_map_volume` += `only_if_exist`) * persist autogenerated volumes for db-init + nullmapping on next startup (`_map_volume` += `only_if_exist`)
* sanchk that autogenerated volumes below inaccessible parent * sanchk that autogenerated volumes below inaccessible parent
* disable logout links if idp detected
* download accelerator * download accelerator
* definitely download chunks in parallel * definitely download chunks in parallel