mirror of
https://github.com/9001/copyparty.git
synced 2025-08-18 01:22:13 -06:00
index folder thumbs in db
This commit is contained in:
parent
e405fddf74
commit
2be2e9a0d8
|
@ -874,7 +874,7 @@ def add_thumbnail(ap):
|
||||||
ap2.add_argument("--th-poke", metavar="SEC", type=int, default=300, help="activity labeling cooldown -- avoids doing keepalive pokes (updating the mtime) on thumbnail folders more often than SEC seconds")
|
ap2.add_argument("--th-poke", metavar="SEC", type=int, default=300, help="activity labeling cooldown -- avoids doing keepalive pokes (updating the mtime) on thumbnail folders more often than SEC seconds")
|
||||||
ap2.add_argument("--th-clean", metavar="SEC", type=int, default=43200, help="cleanup interval; 0=disabled")
|
ap2.add_argument("--th-clean", metavar="SEC", type=int, default=43200, help="cleanup interval; 0=disabled")
|
||||||
ap2.add_argument("--th-maxage", metavar="SEC", type=int, default=604800, help="max folder age -- folders which haven't been poked for longer than --th-poke seconds will get deleted every --th-clean seconds")
|
ap2.add_argument("--th-maxage", metavar="SEC", type=int, default=604800, help="max folder age -- folders which haven't been poked for longer than --th-poke seconds will get deleted every --th-clean seconds")
|
||||||
ap2.add_argument("--th-covers", metavar="N,N", type=u, default="folder.png,folder.jpg,cover.png,cover.jpg", help="folder thumbnails to stat/look for")
|
ap2.add_argument("--th-covers", metavar="N,N", type=u, default="folder.png,folder.jpg,cover.png,cover.jpg", help="folder thumbnails to stat/look for; case-insensitive if -e2d")
|
||||||
# https://pillow.readthedocs.io/en/stable/handbook/image-file-formats.html
|
# https://pillow.readthedocs.io/en/stable/handbook/image-file-formats.html
|
||||||
# https://github.com/libvips/libvips
|
# https://github.com/libvips/libvips
|
||||||
# ffmpeg -hide_banner -demuxers | awk '/^ D /{print$2}' | while IFS= read -r x; do ffmpeg -hide_banner -h demuxer=$x; done | grep -E '^Demuxer |extensions:'
|
# ffmpeg -hide_banner -demuxers | awk '/^ D /{print$2}' | while IFS= read -r x; do ffmpeg -hide_banner -h demuxer=$x; done | grep -E '^Demuxer |extensions:'
|
||||||
|
|
|
@ -3248,6 +3248,9 @@ class HttpCli(object):
|
||||||
):
|
):
|
||||||
raise Pebkac(403)
|
raise Pebkac(403)
|
||||||
|
|
||||||
|
e2d = "e2d" in vn.flags
|
||||||
|
e2t = "e2t" in vn.flags
|
||||||
|
|
||||||
self.html_head = vn.flags.get("html_head", "")
|
self.html_head = vn.flags.get("html_head", "")
|
||||||
if vn.flags.get("norobots") or "b" in self.uparam:
|
if vn.flags.get("norobots") or "b" in self.uparam:
|
||||||
self.out_headers["X-Robots-Tag"] = "noindex, nofollow"
|
self.out_headers["X-Robots-Tag"] = "noindex, nofollow"
|
||||||
|
@ -3255,16 +3258,37 @@ class HttpCli(object):
|
||||||
self.out_headers.pop("X-Robots-Tag", None)
|
self.out_headers.pop("X-Robots-Tag", None)
|
||||||
|
|
||||||
is_dir = stat.S_ISDIR(st.st_mode)
|
is_dir = stat.S_ISDIR(st.st_mode)
|
||||||
|
icur = None
|
||||||
|
if e2t or (e2d and is_dir):
|
||||||
|
idx = self.conn.get_u2idx()
|
||||||
|
icur = idx.get_cur(dbv.realpath)
|
||||||
|
|
||||||
if self.can_read:
|
if self.can_read:
|
||||||
th_fmt = self.uparam.get("th")
|
th_fmt = self.uparam.get("th")
|
||||||
if th_fmt is not None:
|
if th_fmt is not None:
|
||||||
if is_dir:
|
if is_dir:
|
||||||
for fn in self.args.th_covers.split(","):
|
vrem = vrem.rstrip("/")
|
||||||
fp = os.path.join(abspath, fn)
|
if icur and vrem:
|
||||||
if bos.path.exists(fp):
|
q = "select fn from cv where rd=? and dn=?"
|
||||||
vrem = "{}/{}".format(vrem.rstrip("/"), fn).strip("/")
|
crd, cdn = vrem.rsplit("/", 1) if "/" in vrem else ("", vrem)
|
||||||
is_dir = False
|
# no mojibake support:
|
||||||
break
|
try:
|
||||||
|
cfn = icur.execute(q, (crd, cdn)).fetchone()
|
||||||
|
if cfn:
|
||||||
|
fn = cfn[0]
|
||||||
|
fp = os.path.join(abspath, fn)
|
||||||
|
if bos.path.exists(fp):
|
||||||
|
vrem = "{}/{}".format(vrem, fn).strip("/")
|
||||||
|
is_dir = False
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
for fn in self.args.th_covers:
|
||||||
|
fp = os.path.join(abspath, fn)
|
||||||
|
if bos.path.exists(fp):
|
||||||
|
vrem = "{}/{}".format(vrem, fn).strip("/")
|
||||||
|
is_dir = False
|
||||||
|
break
|
||||||
|
|
||||||
if is_dir:
|
if is_dir:
|
||||||
return self.tx_ico("a.folder")
|
return self.tx_ico("a.folder")
|
||||||
|
@ -3371,8 +3395,8 @@ class HttpCli(object):
|
||||||
"taglist": [],
|
"taglist": [],
|
||||||
"srvinf": srv_infot,
|
"srvinf": srv_infot,
|
||||||
"acct": self.uname,
|
"acct": self.uname,
|
||||||
"idx": ("e2d" in vn.flags),
|
"idx": e2d,
|
||||||
"itag": ("e2t" in vn.flags),
|
"itag": e2t,
|
||||||
"lifetime": vn.flags.get("lifetime") or 0,
|
"lifetime": vn.flags.get("lifetime") or 0,
|
||||||
"frand": bool(vn.flags.get("rand")),
|
"frand": bool(vn.flags.get("rand")),
|
||||||
"perms": perms,
|
"perms": perms,
|
||||||
|
@ -3391,8 +3415,8 @@ class HttpCli(object):
|
||||||
"taglist": [],
|
"taglist": [],
|
||||||
"def_hcols": [],
|
"def_hcols": [],
|
||||||
"have_emp": self.args.emp,
|
"have_emp": self.args.emp,
|
||||||
"have_up2k_idx": ("e2d" in vn.flags),
|
"have_up2k_idx": e2d,
|
||||||
"have_tags_idx": ("e2t" in vn.flags),
|
"have_tags_idx": e2t,
|
||||||
"have_acode": (not self.args.no_acode),
|
"have_acode": (not self.args.no_acode),
|
||||||
"have_mv": (not self.args.no_mv),
|
"have_mv": (not self.args.no_mv),
|
||||||
"have_del": (not self.args.no_del),
|
"have_del": (not self.args.no_del),
|
||||||
|
@ -3468,11 +3492,6 @@ class HttpCli(object):
|
||||||
if not self.args.ed or "dots" not in self.uparam:
|
if not self.args.ed or "dots" not in self.uparam:
|
||||||
ls_names = exclude_dotfiles(ls_names)
|
ls_names = exclude_dotfiles(ls_names)
|
||||||
|
|
||||||
icur = None
|
|
||||||
if "e2t" in vn.flags:
|
|
||||||
idx = self.conn.get_u2idx()
|
|
||||||
icur = idx.get_cur(dbv.realpath)
|
|
||||||
|
|
||||||
add_fk = vn.flags.get("fk")
|
add_fk = vn.flags.get("fk")
|
||||||
|
|
||||||
dirs = []
|
dirs = []
|
||||||
|
|
|
@ -348,6 +348,8 @@ class SvcHub(object):
|
||||||
if al.rsp_jtr:
|
if al.rsp_jtr:
|
||||||
al.rsp_slp = 0.000001
|
al.rsp_slp = 0.000001
|
||||||
|
|
||||||
|
al.th_covers = set(al.th_covers.split(","))
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def _setlimits(self) -> None:
|
def _setlimits(self) -> None:
|
||||||
|
|
|
@ -73,6 +73,9 @@ if True: # pylint: disable=using-constant-test
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from .svchub import SvcHub
|
from .svchub import SvcHub
|
||||||
|
|
||||||
|
zs = "avif,avifs,bmp,gif,heic,heics,heif,heifs,ico,j2p,j2k,jp2,jpeg,jpg,jpx,png,tga,tif,tiff,webp"
|
||||||
|
CV_EXTS = set(zs.split(","))
|
||||||
|
|
||||||
|
|
||||||
class Dbw(object):
|
class Dbw(object):
|
||||||
def __init__(self, c: "sqlite3.Cursor", n: int, t: float) -> None:
|
def __init__(self, c: "sqlite3.Cursor", n: int, t: float) -> None:
|
||||||
|
@ -945,6 +948,7 @@ class Up2k(object):
|
||||||
unreg: list[str] = []
|
unreg: list[str] = []
|
||||||
files: list[tuple[int, int, str]] = []
|
files: list[tuple[int, int, str]] = []
|
||||||
fat32 = True
|
fat32 = True
|
||||||
|
cv = ""
|
||||||
|
|
||||||
assert self.pp and self.mem_cur
|
assert self.pp and self.mem_cur
|
||||||
self.pp.msg = "a{} {}".format(self.pp.n, cdir)
|
self.pp.msg = "a{} {}".format(self.pp.n, cdir)
|
||||||
|
@ -1007,6 +1011,12 @@ class Up2k(object):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
files.append((sz, lmod, iname))
|
files.append((sz, lmod, iname))
|
||||||
|
liname = iname.lower()
|
||||||
|
if sz and (
|
||||||
|
iname in self.args.th_covers
|
||||||
|
or (not cv and liname.rsplit(".", 1)[-1] in CV_EXTS)
|
||||||
|
):
|
||||||
|
cv = iname
|
||||||
|
|
||||||
# folder of 1000 files = ~1 MiB RAM best-case (tiny filenames);
|
# folder of 1000 files = ~1 MiB RAM best-case (tiny filenames);
|
||||||
# free up stuff we're done with before dhashing
|
# free up stuff we're done with before dhashing
|
||||||
|
@ -1019,6 +1029,7 @@ class Up2k(object):
|
||||||
zh = hashlib.sha1()
|
zh = hashlib.sha1()
|
||||||
_ = [zh.update(str(x).encode("utf-8", "replace")) for x in files]
|
_ = [zh.update(str(x).encode("utf-8", "replace")) for x in files]
|
||||||
|
|
||||||
|
zh.update(cv.encode("utf-8", "replace"))
|
||||||
zh.update(spack(b"<d", cst.st_mtime))
|
zh.update(spack(b"<d", cst.st_mtime))
|
||||||
dhash = base64.urlsafe_b64encode(zh.digest()[:12]).decode("ascii")
|
dhash = base64.urlsafe_b64encode(zh.digest()[:12]).decode("ascii")
|
||||||
sql = "select d from dh where d = ? and h = ?"
|
sql = "select d from dh where d = ? and h = ?"
|
||||||
|
@ -1032,6 +1043,18 @@ class Up2k(object):
|
||||||
if c.fetchone():
|
if c.fetchone():
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
|
if cv and rd:
|
||||||
|
# mojibake not supported (for performance / simplicity):
|
||||||
|
try:
|
||||||
|
q = "select * from cv where rd=? and dn=? and +fn=?"
|
||||||
|
crd, cdn = rd.rsplit("/", 1) if "/" in rd else ("", rd)
|
||||||
|
if not db.c.execute(q, (crd, cdn, cv)).fetchone():
|
||||||
|
db.c.execute("delete from cv where rd=? and dn=?", (crd, cdn))
|
||||||
|
db.c.execute("insert into cv values (?,?,?)", (crd, cdn, cv))
|
||||||
|
db.n += 1
|
||||||
|
except Exception as ex:
|
||||||
|
self.log("cover {}/{} failed: {}".format(rd, cv, ex), 6)
|
||||||
|
|
||||||
seen_files = set([x[2] for x in files]) # for dropcheck
|
seen_files = set([x[2] for x in files]) # for dropcheck
|
||||||
for sz, lmod, fn in files:
|
for sz, lmod, fn in files:
|
||||||
if self.stop:
|
if self.stop:
|
||||||
|
@ -1228,6 +1251,18 @@ class Up2k(object):
|
||||||
if n_rm2:
|
if n_rm2:
|
||||||
self.log("forgetting {} shadowed deleted files".format(n_rm2))
|
self.log("forgetting {} shadowed deleted files".format(n_rm2))
|
||||||
|
|
||||||
|
# then covers
|
||||||
|
n_rm3 = 0
|
||||||
|
q = "delete from cv where rd=? and dn=? and +fn=?"
|
||||||
|
for crd, cdn, fn in cur.execute("select * from cv"):
|
||||||
|
ap = os.path.join(top, crd, cdn, fn)
|
||||||
|
if not bos.path.exists(ap):
|
||||||
|
c2.execute(q, (crd, cdn, fn))
|
||||||
|
n_rm3 += 1
|
||||||
|
|
||||||
|
if n_rm3:
|
||||||
|
self.log("forgetting {} deleted covers".format(n_rm3))
|
||||||
|
|
||||||
c2.close()
|
c2.close()
|
||||||
return n_rm + n_rm2
|
return n_rm + n_rm2
|
||||||
|
|
||||||
|
@ -1380,6 +1415,7 @@ class Up2k(object):
|
||||||
cur, _ = reg
|
cur, _ = reg
|
||||||
self._set_tagscan(cur, True)
|
self._set_tagscan(cur, True)
|
||||||
cur.execute("delete from dh")
|
cur.execute("delete from dh")
|
||||||
|
cur.execute("delete from cv")
|
||||||
cur.connection.commit()
|
cur.connection.commit()
|
||||||
|
|
||||||
def _set_tagscan(self, cur: "sqlite3.Cursor", need: bool) -> bool:
|
def _set_tagscan(self, cur: "sqlite3.Cursor", need: bool) -> bool:
|
||||||
|
@ -1960,6 +1996,7 @@ class Up2k(object):
|
||||||
|
|
||||||
if ver == DB_VER:
|
if ver == DB_VER:
|
||||||
try:
|
try:
|
||||||
|
self._add_cv_tab(cur)
|
||||||
self._add_xiu_tab(cur)
|
self._add_xiu_tab(cur)
|
||||||
self._add_dhash_tab(cur)
|
self._add_dhash_tab(cur)
|
||||||
except:
|
except:
|
||||||
|
@ -2055,6 +2092,7 @@ class Up2k(object):
|
||||||
|
|
||||||
self._add_dhash_tab(cur)
|
self._add_dhash_tab(cur)
|
||||||
self._add_xiu_tab(cur)
|
self._add_xiu_tab(cur)
|
||||||
|
self._add_cv_tab(cur)
|
||||||
self.log("created DB at {}".format(db_path))
|
self.log("created DB at {}".format(db_path))
|
||||||
return cur
|
return cur
|
||||||
|
|
||||||
|
@ -2103,6 +2141,27 @@ class Up2k(object):
|
||||||
|
|
||||||
cur.connection.commit()
|
cur.connection.commit()
|
||||||
|
|
||||||
|
def _add_cv_tab(self, cur: "sqlite3.Cursor") -> None:
|
||||||
|
# v5b -> v5c
|
||||||
|
try:
|
||||||
|
cur.execute("select rd, dn, fn from cv limit 1").fetchone()
|
||||||
|
return
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
|
for cmd in [
|
||||||
|
r"create table cv (rd text, dn text, fn text)",
|
||||||
|
r"create index cv_i on cv(rd, dn)",
|
||||||
|
]:
|
||||||
|
cur.execute(cmd)
|
||||||
|
|
||||||
|
try:
|
||||||
|
cur.execute("delete from dh")
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
|
cur.connection.commit()
|
||||||
|
|
||||||
def _job_volchk(self, cj: dict[str, Any]) -> None:
|
def _job_volchk(self, cj: dict[str, Any]) -> None:
|
||||||
if not self.register_vpath(cj["ptop"], cj["vcfg"]):
|
if not self.register_vpath(cj["ptop"], cj["vcfg"]):
|
||||||
if cj["ptop"] not in self.registry:
|
if cj["ptop"] not in self.registry:
|
||||||
|
@ -2824,6 +2883,16 @@ class Up2k(object):
|
||||||
with self.rescan_cond:
|
with self.rescan_cond:
|
||||||
self.rescan_cond.notify_all()
|
self.rescan_cond.notify_all()
|
||||||
|
|
||||||
|
if rd and sz and fn.lower() in self.args.th_covers:
|
||||||
|
# wasteful; db_add will re-index actual covers
|
||||||
|
# but that won't catch existing files
|
||||||
|
crd, cdn = rd.rsplit("/", 1) if "/" in rd else ("", rd)
|
||||||
|
try:
|
||||||
|
db.execute("delete from cv where rd=? and dn=?", (crd, cdn))
|
||||||
|
db.execute("insert into cv values (?,?,?)", (crd, cdn, fn))
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
def handle_rm(self, uname: str, ip: str, vpaths: list[str], lim: list[int]) -> str:
|
def handle_rm(self, uname: str, ip: str, vpaths: list[str], lim: list[int]) -> str:
|
||||||
n_files = 0
|
n_files = 0
|
||||||
ok = {}
|
ok = {}
|
||||||
|
|
|
@ -93,6 +93,7 @@
|
||||||
--g-fsel-bg: #d39;
|
--g-fsel-bg: #d39;
|
||||||
--g-fsel-b1: #f4a;
|
--g-fsel-b1: #f4a;
|
||||||
--g-fsel-ts: #804;
|
--g-fsel-ts: #804;
|
||||||
|
--g-dfg: var(--srv-3);
|
||||||
--g-fg: var(--a-hil);
|
--g-fg: var(--a-hil);
|
||||||
--g-bg: var(--bg-u2);
|
--g-bg: var(--bg-u2);
|
||||||
--g-b1: var(--bg-u4);
|
--g-b1: var(--bg-u4);
|
||||||
|
@ -327,6 +328,7 @@ html.c {
|
||||||
}
|
}
|
||||||
html.cz {
|
html.cz {
|
||||||
--bgg: var(--bg-u2);
|
--bgg: var(--bg-u2);
|
||||||
|
--srv-3: #fff;
|
||||||
}
|
}
|
||||||
html.cy {
|
html.cy {
|
||||||
--fg: #fff;
|
--fg: #fff;
|
||||||
|
@ -354,6 +356,7 @@ html.cy {
|
||||||
--chk-fg: #fd0;
|
--chk-fg: #fd0;
|
||||||
|
|
||||||
--srv-1: #f00;
|
--srv-1: #f00;
|
||||||
|
--srv-3: #fff;
|
||||||
--op-aa-bg: #fff;
|
--op-aa-bg: #fff;
|
||||||
|
|
||||||
--u2-b1-bg: #f00;
|
--u2-b1-bg: #f00;
|
||||||
|
@ -964,6 +967,9 @@ html.y #path a:hover {
|
||||||
#ggrid>a.dir:before {
|
#ggrid>a.dir:before {
|
||||||
content: '📂';
|
content: '📂';
|
||||||
}
|
}
|
||||||
|
#ggrid>a.dir>span {
|
||||||
|
color: var(--g-dfg);
|
||||||
|
}
|
||||||
#ggrid>a.au:before {
|
#ggrid>a.au:before {
|
||||||
content: '💾';
|
content: '💾';
|
||||||
}
|
}
|
||||||
|
@ -1010,6 +1016,9 @@ html.np_open #ggrid>a.au:before {
|
||||||
background: var(--g-sel-bg);
|
background: var(--g-sel-bg);
|
||||||
border-color: var(--g-sel-b1);
|
border-color: var(--g-sel-b1);
|
||||||
}
|
}
|
||||||
|
#ggrid>a.sel>span {
|
||||||
|
color: var(--g-sel-fg);
|
||||||
|
}
|
||||||
#ggrid>a.sel,
|
#ggrid>a.sel,
|
||||||
#ggrid>a[tt].sel {
|
#ggrid>a[tt].sel {
|
||||||
border-top: 1px solid var(--g-fsel-b1);
|
border-top: 1px solid var(--g-fsel-b1);
|
||||||
|
|
Loading…
Reference in a new issue