read media-tags from files (for display/searching)

This commit is contained in:
ed 2021-03-01 02:50:10 +01:00
parent 9fdc5ee748
commit 8441206e26
14 changed files with 922 additions and 180 deletions

View file

@ -82,7 +82,40 @@ path/name queries are space-separated, AND'ed together, and words are negated wi
* path: `shibayan -bossa` finds all files where one of the folders contain `shibayan` but filters out any results where `bossa` exists somewhere in the path * path: `shibayan -bossa` finds all files where one of the folders contain `shibayan` but filters out any results where `bossa` exists somewhere in the path
* name: `demetori styx` gives you [good stuff](https://www.youtube.com/watch?v=zGh0g14ZJ8I&list=PL3A147BD151EE5218&index=9) * name: `demetori styx` gives you [good stuff](https://www.youtube.com/watch?v=zGh0g14ZJ8I&list=PL3A147BD151EE5218&index=9)
other metadata (like song tags etc) are not yet indexed for searching
## search configuration
searching relies on two databases, the up2k filetree (`-e2d`) and the metadata tags (`-e2t`). Configuration can be done through arguments, volume flags, or a mix of both.
through arguments:
* `-e2d` enables file indexing on upload
* `-e2ds` scans writable folders on startup
* `-e2dsa` scans all mounted volumes (including readonly ones)
* `-e2t` enables metadata indexing on upload
* `-e2ts` scans for tags in all files that don't have tags yet
* `-e2tsr` deletes all existing tags, so a full reindex
in addition to `d2d` and `d2t`, the same arguments can be set as volume flags:
* `-v ~/music::ce2dsa:ce2tsr` does a full reindex of everything on startup
* `-v ~/music::cd2d` disables **all** indexing, even if any `-e2*` are on
* `-v ~/music::cd2t` disables all `-e2t*` (tags), does not affect `-e2d*`
`e2tsr` is probably always overkill, since `e2ds`/`e2dsa` would pick up any file modifications and cause `e2ts` to reindex those
`-mte` decides which tags to index and display in the browser (and also the display order), this can be changed per-volume:
* `-v ~/music::cmte=title,artist` indexes and displays *title* followed by *artist*
if you add/remove a tag from `mte` you will need to run with `-e2tsr` once to rebuild the database, otherwise only new files will be affected
`-mtm` can be used to add or redefine a metadata mapping, say you have media files with `foo` and `bar` tags and you want them to display as `qux` in the browser (preferring `foo` if both are present), then do `-mtm qux=foo,bar` and now you can `-mte artist,title,qux`
see the beautiful mess of a dictionary in [mtag.py](https://github.com/9001/copyparty/blob/master/copyparty/mtag.py) for the default mappings (should cover mp3,opus,flac,m4a,wav,aif,)
`--no-mutagen` disables mutagen and uses ffprobe instead, which...
* is about 20x slower than mutagen
* catches a few tags that mutagen doesn't
* avoids pulling any GPL code into copyparty
* more importantly runs ffprobe on incoming files which is bad if your ffmpeg has a cve
# client examples # client examples

View file

@ -198,7 +198,7 @@ def main():
and "cflag" is config flags to set on this volume and "cflag" is config flags to set on this volume
list of cflags: list of cflags:
cnodupe rejects existing files (instead of symlinking them) "cnodupe" rejects existing files (instead of symlinking them)
example:\033[35m example:\033[35m
-a ed:hunter2 -v .::r:aed -v ../inc:dump:w:aed:cnodupe \033[36m -a ed:hunter2 -v .::r:aed -v ../inc:dump:w:aed:cnodupe \033[36m
@ -239,9 +239,6 @@ def main():
ap.add_argument("-q", action="store_true", help="quiet") ap.add_argument("-q", action="store_true", help="quiet")
ap.add_argument("-ed", action="store_true", help="enable ?dots") ap.add_argument("-ed", action="store_true", help="enable ?dots")
ap.add_argument("-emp", action="store_true", help="enable markdown plugins") ap.add_argument("-emp", action="store_true", help="enable markdown plugins")
ap.add_argument("-e2d", action="store_true", help="enable up2k database")
ap.add_argument("-e2ds", action="store_true", help="enable up2k db-scanner, sets -e2d")
ap.add_argument("-e2dsa", action="store_true", help="scan all folders (for search), sets -e2ds")
ap.add_argument("-mcr", metavar="SEC", type=int, default=60, help="md-editor mod-chk rate") ap.add_argument("-mcr", metavar="SEC", type=int, default=60, help="md-editor mod-chk rate")
ap.add_argument("-nw", action="store_true", help="disable writes (benchmark)") ap.add_argument("-nw", action="store_true", help="disable writes (benchmark)")
ap.add_argument("-nih", action="store_true", help="no info hostname") ap.add_argument("-nih", action="store_true", help="no info hostname")
@ -250,6 +247,18 @@ def main():
ap.add_argument("--urlform", type=str, default="print,get", help="how to handle url-forms") ap.add_argument("--urlform", type=str, default="print,get", help="how to handle url-forms")
ap.add_argument("--salt", type=str, default="hunter2", help="up2k file-hash salt") ap.add_argument("--salt", type=str, default="hunter2", help="up2k file-hash salt")
ap2 = ap.add_argument_group('database options')
ap2.add_argument("-e2d", action="store_true", help="enable up2k database")
ap2.add_argument("-e2ds", action="store_true", help="enable up2k db-scanner, sets -e2d")
ap2.add_argument("-e2dsa", action="store_true", help="scan all folders (for search), sets -e2ds")
ap2.add_argument("-e2t", action="store_true", help="enable metadata indexing")
ap2.add_argument("-e2ts", action="store_true", help="enable metadata scanner, sets -e2t")
ap2.add_argument("-e2tsr", action="store_true", help="rescan all metadata, sets -e2ts")
ap2.add_argument("--no-mutagen", action="store_true", help="use ffprobe for tags instead")
ap2.add_argument("-mtm", metavar="M=t,t,t", action="append", type=str, help="add/replace metadata mapping")
ap2.add_argument("-mte", metavar="M,M,M", type=str, help="tags to index/display (comma-sep.)",
default="circle,album,.tn,artist,title,.dur,.q")
ap2 = ap.add_argument_group('SSL/TLS options') ap2 = ap.add_argument_group('SSL/TLS options')
ap2.add_argument("--http-only", action="store_true", help="disable ssl/tls") ap2.add_argument("--http-only", action="store_true", help="disable ssl/tls")
ap2.add_argument("--https-only", action="store_true", help="disable plaintext") ap2.add_argument("--https-only", action="store_true", help="disable plaintext")
@ -257,14 +266,20 @@ def main():
ap2.add_argument("--ciphers", metavar="LIST", help="set allowed ciphers") ap2.add_argument("--ciphers", metavar="LIST", help="set allowed ciphers")
ap2.add_argument("--ssl-dbg", action="store_true", help="dump some tls info") ap2.add_argument("--ssl-dbg", action="store_true", help="dump some tls info")
ap2.add_argument("--ssl-log", metavar="PATH", help="log master secrets") ap2.add_argument("--ssl-log", metavar="PATH", help="log master secrets")
al = ap.parse_args() al = ap.parse_args()
# fmt: on # fmt: on
if al.e2dsa: # propagate implications
al.e2ds = True for k1, k2 in [
["e2dsa", "e2ds"],
if al.e2ds: ["e2ds", "e2d"],
al.e2d = True ["e2tsr", "e2ts"],
["e2ts", "e2t"],
["e2t", "e2d"],
]:
if getattr(al, k1):
setattr(al, k2, True)
al.i = al.i.split(",") al.i = al.i.split(",")
try: try:

View file

@ -206,8 +206,11 @@ class AuthSrv(object):
if lvl in "wa": if lvl in "wa":
mwrite[vol_dst].append(uname) mwrite[vol_dst].append(uname)
if lvl == "c": if lvl == "c":
# config option, currently switches only cval = True
mflags[vol_dst][uname] = True if "=" in uname:
uname, cval = uname.split("=", 1)
mflags[vol_dst][uname] = cval
def reload(self): def reload(self):
""" """
@ -248,12 +251,19 @@ class AuthSrv(object):
perms = perms.split(":") perms = perms.split(":")
for (lvl, uname) in [[x[0], x[1:]] for x in perms]: for (lvl, uname) in [[x[0], x[1:]] for x in perms]:
if lvl == "c": if lvl == "c":
# config option, currently switches only cval = True
mflags[dst][uname] = True if "=" in uname:
uname, cval = uname.split("=", 1)
mflags[dst][uname] = cval
continue
if uname == "": if uname == "":
uname = "*" uname = "*"
if lvl in "ra": if lvl in "ra":
mread[dst].append(uname) mread[dst].append(uname)
if lvl in "wa": if lvl in "wa":
mwrite[dst].append(uname) mwrite[dst].append(uname)
@ -268,6 +278,7 @@ class AuthSrv(object):
elif "" not in mount: elif "" not in mount:
# there's volumes but no root; make root inaccessible # there's volumes but no root; make root inaccessible
vfs = VFS(os.path.abspath("."), "") vfs = VFS(os.path.abspath("."), "")
vfs.flags["d2d"] = True
maxdepth = 0 maxdepth = 0
for dst in sorted(mount.keys(), key=lambda x: (x.count("/"), len(x))): for dst in sorted(mount.keys(), key=lambda x: (x.count("/"), len(x))):
@ -300,15 +311,27 @@ class AuthSrv(object):
) )
raise Exception("invalid config") raise Exception("invalid config")
for vol in vfs.all_vols.values():
if (self.args.e2ds and vol.uwrite) or self.args.e2dsa:
vol.flags["e2ds"] = True
if self.args.e2d:
vol.flags["e2d"] = True
for k in ["e2t", "e2ts", "e2tsr"]:
if getattr(self.args, k):
vol.flags[k] = True
# default tag-list if unset
if "mte" not in vol.flags:
vol.flags["mte"] = self.args.mte
try: try:
v, _ = vfs.get("/", "*", False, True) v, _ = vfs.get("/", "*", False, True)
if self.warn_anonwrite and os.getcwd() == v.realpath: if self.warn_anonwrite and os.getcwd() == v.realpath:
self.warn_anonwrite = False self.warn_anonwrite = False
self.log( msg = "\033[31manyone can read/write the current directory: {}\033[0m"
"\033[31manyone can read/write the current directory: {}\033[0m".format( self.log(msg.format(v.realpath))
v.realpath
)
)
except Pebkac: except Pebkac:
self.warn_anonwrite = True self.warn_anonwrite = True

View file

@ -222,6 +222,9 @@ class HttpCli(object):
static_path = os.path.join(E.mod, "web/", self.vpath[5:]) static_path = os.path.join(E.mod, "web/", self.vpath[5:])
return self.tx_file(static_path) return self.tx_file(static_path)
if "tree" in self.uparam:
return self.tx_tree()
# conditional redirect to single volumes # conditional redirect to single volumes
if self.vpath == "" and not self.uparam: if self.vpath == "" and not self.uparam:
nread = len(self.rvol) nread = len(self.rvol)
@ -246,9 +249,6 @@ class HttpCli(object):
self.vpath = None self.vpath = None
return self.tx_mounts() return self.tx_mounts()
if "tree" in self.uparam:
return self.tx_tree()
return self.tx_browser() return self.tx_browser()
def handle_options(self): def handle_options(self):
@ -428,7 +428,6 @@ class HttpCli(object):
body["ptop"] = vfs.realpath body["ptop"] = vfs.realpath
body["prel"] = rem body["prel"] = rem
body["addr"] = self.ip body["addr"] = self.ip
body["flag"] = vfs.flags
x = self.conn.hsrv.broker.put(True, "up2k.handle_json", body) x = self.conn.hsrv.broker.put(True, "up2k.handle_json", body)
response = x.get() response = x.get()
@ -445,20 +444,31 @@ class HttpCli(object):
vols.append([vfs.vpath, vfs.realpath, vfs.flags]) vols.append([vfs.vpath, vfs.realpath, vfs.flags])
idx = self.conn.get_u2idx() idx = self.conn.get_u2idx()
t0 = time.time()
if "srch" in body: if "srch" in body:
# search by up2k hashlist # search by up2k hashlist
vbody = copy.deepcopy(body) vbody = copy.deepcopy(body)
vbody["hash"] = len(vbody["hash"]) vbody["hash"] = len(vbody["hash"])
self.log("qj: " + repr(vbody)) self.log("qj: " + repr(vbody))
hits = idx.fsearch(vols, body) hits = idx.fsearch(vols, body)
self.log("q#: " + repr(hits)) self.log("q#: {} ({:.2f}s)".format(repr(hits), time.time() - t0))
taglist = []
else: else:
# search by query params # search by query params
self.log("qj: " + repr(body)) self.log("qj: " + repr(body))
hits = idx.search(vols, body) hits, taglist = idx.search(vols, body)
self.log("q#: " + str(len(hits))) self.log("q#: {} ({:.2f}s)".format(len(hits), time.time() - t0))
r = json.dumps(hits).encode("utf-8") order = []
cfg = self.args.mte.split(",")
for t in cfg:
if t in taglist:
order.append(t)
for t in taglist:
if t not in order:
order.append(t)
r = json.dumps({"hits": hits, "tag_order": order}).encode("utf-8")
self.reply(r, mime="application/json") self.reply(r, mime="application/json")
return True return True
@ -1186,6 +1196,11 @@ class HttpCli(object):
is_ls = "ls" in self.uparam is_ls = "ls" in self.uparam
icur = None
if "e2t" in vn.flags:
idx = self.conn.get_u2idx()
icur = idx.get_cur(vn.realpath)
dirs = [] dirs = []
files = [] files = []
for fn in vfs_ls: for fn in vfs_ls:
@ -1241,6 +1256,31 @@ class HttpCli(object):
dirs.append(item) dirs.append(item)
else: else:
files.append(item) files.append(item)
item["rd"] = rem
taglist = {}
for f in files:
fn = f["name"]
rd = f["rd"]
del f["rd"]
if icur:
q = "select w from up where rd = ? and fn = ?"
r = icur.execute(q, (rd, fn)).fetchone()
if not r:
continue
w = r[0][:16]
tags = {}
for k, v in icur.execute("select k, v from mt where w = ?", (w,)):
taglist[k] = True
tags[k] = v
f["tags"] = tags
if icur:
taglist = [k for k in self.args.mte.split(",") if k in taglist]
for f in dirs:
f["tags"] = {}
srv_info = [] srv_info = []
@ -1293,6 +1333,7 @@ class HttpCli(object):
"srvinf": srv_info, "srvinf": srv_info,
"perms": perms, "perms": perms,
"logues": logues, "logues": logues,
"taglist": taglist,
} }
ret = json.dumps(ret) ret = json.dumps(ret)
self.reply(ret.encode("utf-8", "replace"), mime="application/json") self.reply(ret.encode("utf-8", "replace"), mime="application/json")
@ -1309,7 +1350,10 @@ class HttpCli(object):
files=dirs, files=dirs,
ts=ts, ts=ts,
perms=json.dumps(perms), perms=json.dumps(perms),
have_up2k_idx=self.args.e2d, taglist=taglist,
tag_order=json.dumps(self.args.mte.split(",")),
have_up2k_idx=("e2d" in vn.flags),
have_tags_idx=("e2t" in vn.flags),
logues=logues, logues=logues,
title=html_escape(self.vpath), title=html_escape(self.vpath),
srv_info=srv_info, srv_info=srv_info,

View file

@ -20,10 +20,12 @@ except ImportError:
you do not have jinja2 installed,\033[33m you do not have jinja2 installed,\033[33m
choose one of these:\033[0m choose one of these:\033[0m
* apt install python-jinja2 * apt install python-jinja2
* python3 -m pip install --user jinja2 * {} -m pip install --user jinja2
* (try another python version, if you have one) * (try another python version, if you have one)
* (try copyparty.sfx instead) * (try copyparty.sfx instead)
""" """.format(
os.path.basename(sys.executable)
)
) )
sys.exit(1) sys.exit(1)

305
copyparty/mtag.py Normal file
View file

@ -0,0 +1,305 @@
# coding: utf-8
from __future__ import print_function, unicode_literals
from math import fabs
import re
import os
import sys
import shutil
import subprocess as sp
from .__init__ import PY2, WINDOWS
from .util import fsenc, fsdec
class MTag(object):
def __init__(self, log_func, args):
self.log_func = log_func
self.usable = True
mappings = args.mtm
backend = "ffprobe" if args.no_mutagen else "mutagen"
if backend == "mutagen":
self.get = self.get_mutagen
try:
import mutagen
except:
self.log("could not load mutagen, trying ffprobe instead")
backend = "ffprobe"
if backend == "ffprobe":
self.get = self.get_ffprobe
# about 20x slower
if PY2:
cmd = ["ffprobe", "-version"]
try:
sp.Popen(cmd, stdout=sp.PIPE, stderr=sp.PIPE)
except:
self.usable = False
else:
if not shutil.which("ffprobe"):
self.usable = False
if not self.usable:
msg = "\033[31mneed mutagen or ffprobe to read media tags so please run this:\n {} -m pip install --user mutagen \033[0m"
self.log(msg.format(os.path.basename(sys.executable)))
return
# https://picard-docs.musicbrainz.org/downloads/MusicBrainz_Picard_Tag_Map.html
tagmap = {
"album": ["album", "talb", "\u00a9alb", "original-album", "toal"],
"artist": [
"artist",
"tpe1",
"\u00a9art",
"composer",
"performer",
"arranger",
"\u00a9wrt",
"tcom",
"tpe3",
"original-artist",
"tope",
],
"title": ["title", "tit2", "\u00a9nam"],
"circle": [
"album-artist",
"tpe2",
"aart",
"conductor",
"organization",
"band",
],
".tn": ["tracknumber", "trck", "trkn", "track"],
"genre": ["genre", "tcon", "\u00a9gen"],
"date": [
"original-release-date",
"release-date",
"date",
"tdrc",
"\u00a9day",
"original-date",
"original-year",
"tyer",
"tdor",
"tory",
"year",
"creation-time",
],
".bpm": ["bpm", "tbpm", "tmpo", "tbp"],
"key": ["initial-key", "tkey", "key"],
"comment": ["comment", "comm", "\u00a9cmt", "comments", "description"],
}
if mappings:
for k, v in [x.split("=") for x in mappings]:
tagmap[k] = v.split(",")
self.tagmap = {}
for k, vs in tagmap.items():
vs2 = []
for v in vs:
if "-" not in v:
vs2.append(v)
continue
vs2.append(v.replace("-", " "))
vs2.append(v.replace("-", "_"))
vs2.append(v.replace("-", ""))
self.tagmap[k] = vs2
self.rmap = {
v: [n, k] for k, vs in self.tagmap.items() for n, v in enumerate(vs)
}
# self.get = self.compare
def log(self, msg):
self.log_func("mtag", msg)
def normalize_tags(self, ret, md):
for k, v in dict(md).items():
if not v:
continue
k = k.lower().split("::")[0].strip()
mk = self.rmap.get(k)
if not mk:
continue
pref, mk = mk
if mk not in ret or ret[mk][0] > pref:
ret[mk] = [pref, v[0]]
# take first value
ret = {k: str(v[1]).strip() for k, v in ret.items()}
# track 3/7 => track 3
for k, v in ret.items():
if k[0] == ".":
v = v.split("/")[0].strip().lstrip("0")
ret[k] = v or 0
return ret
def compare(self, abspath):
if abspath.endswith(".au"):
return {}
print("\n" + abspath)
r1 = self.get_mutagen(abspath)
r2 = self.get_ffprobe(abspath)
keys = {}
for d in [r1, r2]:
for k in d.keys():
keys[k] = True
diffs = []
l1 = []
l2 = []
for k in sorted(keys.keys()):
if k in [".q", ".dur"]:
continue # lenient
v1 = r1.get(k)
v2 = r2.get(k)
if v1 == v2:
print(" ", k, v1)
elif v1 != "0000": # ffprobe date=0
diffs.append(k)
print(" 1", k, v1)
print(" 2", k, v2)
if v1:
l1.append(k)
if v2:
l2.append(k)
if diffs:
raise Exception()
return r1
def get_mutagen(self, abspath):
import mutagen
try:
md = mutagen.File(abspath, easy=True)
x = md.info.length
except Exception as ex:
return {}
ret = {}
try:
dur = int(md.info.length)
try:
q = int(md.info.bitrate / 1024)
except:
q = int((os.path.getsize(abspath) / dur) / 128)
ret[".dur"] = [0, dur]
ret[".q"] = [0, q]
except:
pass
return self.normalize_tags(ret, md)
def get_ffprobe(self, abspath):
cmd = ["ffprobe", "-hide_banner", "--", fsenc(abspath)]
p = sp.Popen(cmd, stdout=sp.PIPE, stderr=sp.PIPE)
r = p.communicate()
txt = r[1].decode("utf-8", "replace")
txt = [x.rstrip("\r") for x in txt.split("\n")]
"""
note:
tags which contain newline will be truncated on first \n,
ffmpeg emits \n and spacepads the : to align visually
note:
the Stream ln always mentions Audio: if audio
the Stream ln usually has kb/s, is more accurate
the Duration ln always has kb/s
the Metadata: after Chapter may contain BPM info,
title : Tempo: 126.0
Input #0, wav,
Metadata:
date : <OK>
Duration:
Chapter #
Metadata:
title : <NG>
Input #0, mp3,
Metadata:
album : <OK>
Duration:
Stream #0:0: Audio:
Stream #0:1: Video:
Metadata:
comment : <NG>
"""
ptn_md_beg = re.compile("^( +)Metadata:$")
ptn_md_kv = re.compile("^( +)([^:]+) *: (.*)")
ptn_dur = re.compile("^ *Duration: ([^ ]+)(, |$)")
ptn_br1 = re.compile("^ *Duration: .*, bitrate: ([0-9]+) kb/s(, |$)")
ptn_br2 = re.compile("^ *Stream.*: Audio:.* ([0-9]+) kb/s(, |$)")
ptn_audio = re.compile("^ *Stream .*: Audio: ")
ptn_au_parent = re.compile("^ *(Input #|Stream .*: Audio: )")
ret = {}
md = {}
in_md = False
is_audio = False
au_parent = False
for ln in txt:
m = ptn_md_kv.match(ln)
if m and in_md and len(m.group(1)) == in_md:
_, k, v = [x.strip() for x in m.groups()]
if k != "" and v != "":
md[k] = [v]
continue
else:
in_md = False
m = ptn_md_beg.match(ln)
if m and au_parent:
in_md = len(m.group(1)) + 2
continue
au_parent = bool(ptn_au_parent.search(ln))
if ptn_audio.search(ln):
is_audio = True
m = ptn_dur.search(ln)
if m:
sec = 0
tstr = m.group(1)
if tstr.lower() != "n/a":
try:
tf = tstr.split(",")[0].split(".")[0].split(":")
for f in tf:
sec *= 60
sec += int(f)
except:
self.log(
"\033[33minvalid timestr from ffmpeg: [{}]".format(tstr)
)
ret[".dur"] = sec
m = ptn_br1.search(ln)
if m:
ret[".q"] = m.group(1)
m = ptn_br2.search(ln)
if m:
ret[".q"] = m.group(1)
if not is_audio:
return {}
ret = {k: [0, v] for k, v in ret.items()}
return self.normalize_tags(ret, md)

View file

@ -39,14 +39,6 @@ class SvcHub(object):
self.tcpsrv = TcpSrv(self) self.tcpsrv = TcpSrv(self)
self.up2k = Up2k(self) self.up2k = Up2k(self)
if self.args.e2ds:
auth = AuthSrv(self.args, self.log, False)
vols = auth.vfs.all_vols.values()
if not self.args.e2dsa:
vols = [x for x in vols if x.uwrite]
self.up2k.build_indexes(vols)
# decide which worker impl to use # decide which worker impl to use
if self.check_mp_enable(): if self.check_mp_enable():
from .broker_mp import BrokerMp as Broker from .broker_mp import BrokerMp as Broker

View file

@ -37,7 +37,19 @@ class U2idx(object):
fsize = body["size"] fsize = body["size"]
fhash = body["hash"] fhash = body["hash"]
wark = up2k_wark_from_hashlist(self.args.salt, fsize, fhash) wark = up2k_wark_from_hashlist(self.args.salt, fsize, fhash)
return self.run_query(vols, "select * from up where w = ?", [wark]) return self.run_query(vols, "w = ?", [wark], "", [])
def get_cur(self, ptop):
cur = self.cur.get(ptop)
if cur:
return cur
cur = _open(ptop)
if not cur:
return None
self.cur[ptop] = cur
return cur
def search(self, vols, body): def search(self, vols, body):
"""search by query params""" """search by query params"""
@ -45,53 +57,74 @@ class U2idx(object):
return [] return []
qobj = {} qobj = {}
_conv_sz(qobj, body, "sz_min", "sz >= ?") _conv_sz(qobj, body, "sz_min", "up.sz >= ?")
_conv_sz(qobj, body, "sz_max", "sz <= ?") _conv_sz(qobj, body, "sz_max", "up.sz <= ?")
_conv_dt(qobj, body, "dt_min", "mt >= ?") _conv_dt(qobj, body, "dt_min", "up.mt >= ?")
_conv_dt(qobj, body, "dt_max", "mt <= ?") _conv_dt(qobj, body, "dt_max", "up.mt <= ?")
for seg, dk in [["path", "rd"], ["name", "fn"]]: for seg, dk in [["path", "up.rd"], ["name", "up.fn"]]:
if seg in body: if seg in body:
_conv_txt(qobj, body, seg, dk) _conv_txt(qobj, body, seg, dk)
qstr = "select * from up" uq, uv = _sqlize(qobj)
qv = []
if qobj:
qk = []
for k, v in sorted(qobj.items()):
qk.append(k.split("\n")[0])
qv.append(v)
qstr = " and ".join(qk) tq = ""
qstr = "select * from up where " + qstr tv = []
qobj = {}
if "tags" in body:
_conv_txt(qobj, body, "tags", "mt.v")
tq, tv = _sqlize(qobj)
return self.run_query(vols, qstr, qv) return self.run_query(vols, uq, uv, tq, tv)
def run_query(self, vols, qstr, qv): def run_query(self, vols, uq, uv, tq, tv):
qv = tuple(qv) self.log("qs: {} {} , {} {}".format(uq, repr(uv), tq, repr(tv)))
self.log("qs: {} {}".format(qstr, repr(qv)))
ret = [] ret = []
lim = 100 lim = 1000
taglist = {}
for (vtop, ptop, flags) in vols: for (vtop, ptop, flags) in vols:
cur = self.cur.get(ptop) cur = self.get_cur(ptop)
if not cur:
cur = _open(ptop)
if not cur: if not cur:
continue continue
self.cur[ptop] = cur if not tq:
# self.log("idx /{} @ {} {}".format(vtop, ptop, flags)) if not uq:
q = "select * from up"
v = ()
else:
q = "select * from up where " + uq
v = tuple(uv)
else:
# naive assumption: tags first
q = "select up.* from up inner join mt on substr(up.w,1,16) = mt.w where {}"
q = q.format(" and ".join([tq, uq]) if uq else tq)
v = tuple(tv + uv)
c = cur.execute(qstr, qv) sret = []
for _, ts, sz, rd, fn in c: c = cur.execute(q, v)
for hit in c:
w, ts, sz, rd, fn = hit
lim -= 1 lim -= 1
if lim <= 0: if lim <= 0:
break break
rp = os.path.join(vtop, rd, fn).replace("\\", "/") rp = os.path.join(vtop, rd, fn).replace("\\", "/")
ret.append({"ts": int(ts), "sz": sz, "rp": rp}) sret.append({"ts": int(ts), "sz": sz, "rp": rp, "w": w[:16]})
return ret for hit in sret:
w = hit["w"]
del hit["w"]
tags = {}
q = "select k, v from mt where w = ?"
for k, v in cur.execute(q, (w,)):
taglist[k] = True
tags[k] = v
hit["tags"] = tags
ret.extend(sret)
return ret, taglist.keys()
def _open(ptop): def _open(ptop):
@ -146,3 +179,13 @@ def _conv_txt(q, body, k, sql):
qk = "{} {} like {}?{}".format(sql, inv, head, tail) qk = "{} {} like {}?{}".format(sql, inv, head, tail)
q[qk + "\n" + v] = u8safe(v) q[qk + "\n" + v] = u8safe(v)
def _sqlize(qobj):
keys = []
values = []
for k, v in sorted(qobj.items()):
keys.append(k.split("\n")[0])
values.append(v)
return " and ".join(keys), values

View file

@ -3,6 +3,7 @@ from __future__ import print_function, unicode_literals
import re import re
import os import os
import sys
import time import time
import math import math
import json import json
@ -12,6 +13,7 @@ import shutil
import base64 import base64
import hashlib import hashlib
import threading import threading
import traceback
from copy import deepcopy from copy import deepcopy
from .__init__ import WINDOWS from .__init__ import WINDOWS
@ -27,6 +29,8 @@ from .util import (
w8b64enc, w8b64enc,
w8b64dec, w8b64dec,
) )
from .mtag import MTag
from .authsrv import AuthSrv
try: try:
HAVE_SQLITE3 = True HAVE_SQLITE3 = True
@ -55,12 +59,14 @@ class Up2k(object):
# state # state
self.mutex = threading.Lock() self.mutex = threading.Lock()
self.registry = {} self.registry = {}
self.entags = {}
self.flags = {}
self.cur = {} self.cur = {}
self.mem_cur = None self.mem_cur = None
if HAVE_SQLITE3: if HAVE_SQLITE3:
# mojibake detector # mojibake detector
self.mem_cur = sqlite3.connect(":memory:", check_same_thread=False).cursor() self.mem_cur = self._orz(":memory:")
self.mem_cur.execute(r"create table a (b text)") self.mem_cur.execute(r"create table a (b text)")
if WINDOWS: if WINDOWS:
@ -70,10 +76,9 @@ class Up2k(object):
thr.daemon = True thr.daemon = True
thr.start() thr.start()
if self.persist: self.mtag = MTag(self.log_func, self.args)
thr = threading.Thread(target=self._snapshot) if not self.mtag.usable:
thr.daemon = True self.mtag = None
thr.start()
# static # static
self.r_hash = re.compile("^[0-9a-zA-Z_-]{43}$") self.r_hash = re.compile("^[0-9a-zA-Z_-]{43}$")
@ -81,6 +86,15 @@ class Up2k(object):
if self.persist and not HAVE_SQLITE3: if self.persist and not HAVE_SQLITE3:
self.log("could not initialize sqlite3, will use in-memory registry only") self.log("could not initialize sqlite3, will use in-memory registry only")
# this is kinda jank
auth = AuthSrv(self.args, self.log, False)
self.init_indexes(auth)
if self.persist:
thr = threading.Thread(target=self._snapshot)
thr.daemon = True
thr.start()
def log(self, msg): def log(self, msg):
self.log_func("up2k", msg + "\033[K") self.log_func("up2k", msg + "\033[K")
@ -119,7 +133,49 @@ class Up2k(object):
return ret return ret
def register_vpath(self, ptop): def init_indexes(self, auth):
self.pp = ProgressPrinter()
vols = auth.vfs.all_vols.values()
t0 = time.time()
# e2ds(a) volumes first,
# also covers tags where e2ts is set
for vol in vols:
en = {}
if "mte" in vol.flags:
en = {k: True for k in vol.flags["mte"].split(",")}
self.entags[vol.realpath] = en
if "e2ds" in vol.flags:
r = self._build_file_index(vol, vols)
if not r:
needed_mutagen = True
# open the rest + do any e2ts(a)
needed_mutagen = False
for vol in vols:
r = self.register_vpath(vol.realpath, vol.flags)
if not r or "e2ts" not in vol.flags:
continue
cur, db_path, sz0 = r
n_add, n_rm, success = self._build_tags_index(vol.realpath)
if not success:
needed_mutagen = True
if n_add or n_rm:
self.vac(cur, db_path, n_add, n_rm, sz0)
self.pp.end = True
msg = "{} volumes in {:.2f} sec"
self.log(msg.format(len(vols), time.time() - t0))
if needed_mutagen:
msg = "\033[31mcould not read tags because no backends are available (mutagen or ffprobe)\033[0m"
self.log(msg)
def register_vpath(self, ptop, flags):
with self.mutex: with self.mutex:
if ptop in self.registry: if ptop in self.registry:
return None return None
@ -138,8 +194,9 @@ class Up2k(object):
m = [m] + self._vis_reg_progress(reg) m = [m] + self._vis_reg_progress(reg)
self.log("\n".join(m)) self.log("\n".join(m))
self.flags[ptop] = flags
self.registry[ptop] = reg self.registry[ptop] = reg
if not self.persist or not HAVE_SQLITE3: if not self.persist or not HAVE_SQLITE3 or "d2d" in flags:
return None return None
try: try:
@ -152,48 +209,55 @@ class Up2k(object):
return None return None
try: try:
sz0 = 0
if os.path.exists(db_path):
sz0 = os.path.getsize(db_path) // 1024
cur = self._open_db(db_path) cur = self._open_db(db_path)
self.cur[ptop] = cur self.cur[ptop] = cur
return cur return [cur, db_path, sz0]
except Exception as ex: except:
self.log("cannot use database at [{}]: {}".format(ptop, repr(ex))) msg = "cannot use database at [{}]:\n{}"
self.log(msg.format(ptop, traceback.format_exc()))
return None return None
def build_indexes(self, writeables): def _build_file_index(self, vol, all_vols):
tops = [d.realpath for d in writeables] do_vac = False
self.pp = ProgressPrinter() top = vol.realpath
t0 = time.time() reg = self.register_vpath(top, vol.flags)
for top in tops: if not reg:
dbw = [self.register_vpath(top), 0, time.time()] return
if not dbw[0]:
continue
_, db_path, sz0 = reg
dbw = [reg[0], 0, time.time()]
self.pp.n = next(dbw[0].execute("select count(w) from up"))[0] self.pp.n = next(dbw[0].execute("select count(w) from up"))[0]
db_path = os.path.join(top, ".hist", "up2k.db")
sz0 = os.path.getsize(db_path) // 1024
# can be symlink so don't `and d.startswith(top)`` # can be symlink so don't `and d.startswith(top)``
excl = set([d for d in tops if d != top]) excl = set([d.realpath for d in all_vols if d != vol])
n_add = self._build_dir(dbw, top, excl, top) n_add = self._build_dir(dbw, top, excl, top)
n_rm = self._drop_lost(dbw[0], top) n_rm = self._drop_lost(dbw[0], top)
if dbw[1]: if dbw[1]:
self.log("commit {} new files".format(dbw[1])) self.log("commit {} new files".format(dbw[1]))
dbw[0].connection.commit()
n_add, n_rm, success = self._build_tags_index(vol.realpath)
dbw[0].connection.commit() dbw[0].connection.commit()
if n_add or n_rm: if n_add or n_rm or do_vac:
db_path = os.path.join(top, ".hist", "up2k.db") self.vac(dbw[0], db_path, n_add, n_rm, sz0)
return success
def vac(self, cur, db_path, n_add, n_rm, sz0):
sz1 = os.path.getsize(db_path) // 1024 sz1 = os.path.getsize(db_path) // 1024
dbw[0].execute("vacuum") cur.execute("vacuum")
sz2 = os.path.getsize(db_path) // 1024 sz2 = os.path.getsize(db_path) // 1024
msg = "{} new, {} del, {} kB vacced, {} kB gain, {} kB now".format( msg = "{} new, {} del, {} kB vacced, {} kB gain, {} kB now".format(
n_add, n_rm, sz1 - sz2, sz2 - sz0, sz2 n_add, n_rm, sz1 - sz2, sz2 - sz0, sz2
) )
self.log(msg) self.log(msg)
self.pp.end = True
self.log("{} volumes in {:.2f} sec".format(len(tops), time.time() - t0))
def _build_dir(self, dbw, top, excl, cdir): def _build_dir(self, dbw, top, excl, cdir):
try: try:
inodes = [fsdec(x) for x in os.listdir(fsenc(cdir))] inodes = [fsdec(x) for x in os.listdir(fsenc(cdir))]
@ -298,39 +362,144 @@ class Up2k(object):
return len(rm) return len(rm)
def _build_tags_index(self, ptop):
entags = self.entags[ptop]
flags = self.flags[ptop]
cur = self.cur[ptop]
n_add = 0
n_rm = 0
n_buf = 0
last_write = time.time()
if "e2tsr" in flags:
n_rm = cur.execute("select count(w) from mt").fetchone()[0]
if n_rm:
self.log("discarding {} media tags for a full rescan".format(n_rm))
cur.execute("delete from mt")
else:
self.log("volume has e2tsr but there are no media tags to discard")
# integrity: drop tags for tracks that were deleted
if "e2t" in flags:
drops = []
c2 = cur.connection.cursor()
up_q = "select w from up where substr(w,1,16) = ?"
for (w,) in cur.execute("select w from mt"):
if not c2.execute(up_q, (w,)).fetchone():
drops.append(w)
c2.close()
if drops:
msg = "discarding media tags for {} deleted files"
self.log(msg.format(len(drops)))
n_rm += len(drops)
for w in drops:
cur.execute("delete from up where rowid = ?", (w,))
# bail if a volume flag disables indexing
if "d2t" in flags or "d2d" in flags:
return n_add, n_rm, True
# add tags for new files
if "e2ts" in flags:
if not self.mtag:
return n_add, n_rm, False
c2 = cur.connection.cursor()
n_left = cur.execute("select count(w) from up").fetchone()[0]
for w, rd, fn in cur.execute("select w, rd, fn from up"):
n_left -= 1
q = "select w from mt where w = ?"
if c2.execute(q, (w[:16],)).fetchone():
continue
abspath = os.path.join(ptop, rd, fn)
self.pp.msg = "c{} {}".format(n_left, abspath)
tags = self.mtag.get(abspath)
tags = {k: v for k, v in tags.items() if k in entags}
if not tags:
# indicate scanned without tags
tags = {"x": 0}
for k, v in tags.items():
q = "insert into mt values (?,?,?)"
c2.execute(q, (w[:16], k, v))
n_add += 1
n_buf += 1
td = time.time() - last_write
if n_buf >= 4096 or td >= 60:
self.log("commit {} new tags".format(n_buf))
cur.connection.commit()
last_write = time.time()
n_buf = 0
c2.close()
return n_add, n_rm, True
def _orz(self, db_path):
return sqlite3.connect(db_path, check_same_thread=False).cursor()
def _open_db(self, db_path): def _open_db(self, db_path):
existed = os.path.exists(db_path) existed = os.path.exists(db_path)
cur = sqlite3.connect(db_path, check_same_thread=False).cursor() cur = self._orz(db_path)
try: try:
ver = self._read_ver(cur) ver = self._read_ver(cur)
except:
ver = None
if not existed:
return self._create_db(db_path, cur)
orig_ver = ver
if not ver or ver < 3:
bak = "{}.bak.{:x}.v{}".format(db_path, int(time.time()), ver)
db = cur.connection
cur.close()
db.close()
msg = "creating new DB (old is bad); backup: {}"
if ver:
msg = "creating backup before upgrade: {}"
self.log(msg.format(bak))
shutil.copy2(db_path, bak)
cur = self._orz(db_path)
if ver == 1: if ver == 1:
cur = self._upgrade_v1(cur, db_path) cur = self._upgrade_v1(cur, db_path)
ver = self._read_ver(cur) if cur:
ver = 2
if ver == 2: if ver == 2:
cur = self._create_v3(cur)
ver = self._read_ver(cur) if cur else None
if ver == 3:
if orig_ver != ver:
cur.connection.commit()
cur.execute("vacuum")
cur.connection.commit()
try: try:
nfiles = next(cur.execute("select count(w) from up"))[0] nfiles = next(cur.execute("select count(w) from up"))[0]
self.log("found DB at {} |{}|".format(db_path, nfiles)) self.log("OK: {} |{}|".format(db_path, nfiles))
return cur return cur
except Exception as ex: except Exception as ex:
self.log("WARN: could not list files, DB corrupt?\n " + repr(ex)) self.log("WARN: could not list files, DB corrupt?\n " + repr(ex))
if ver is not None: if cur:
self.log("REPLACING unsupported DB (v.{}) at {}".format(ver, db_path)) db = cur.connection
elif not existed:
raise Exception("whatever")
conn = cur.connection
cur.close() cur.close()
conn.close() db.close()
os.unlink(db_path)
cur = sqlite3.connect(db_path, check_same_thread=False).cursor() return self._create_db(db_path, None)
except:
pass def _create_db(self, db_path, cur):
if not cur:
cur = self._orz(db_path)
# sqlite is variable-width only, no point in using char/nchar/varchar
self._create_v2(cur) self._create_v2(cur)
self._create_v3(cur)
cur.connection.commit() cur.connection.commit()
self.log("created DB at {}".format(db_path)) self.log("created DB at {}".format(db_path))
return cur return cur
@ -348,24 +517,45 @@ class Up2k(object):
def _create_v2(self, cur): def _create_v2(self, cur):
for cmd in [ for cmd in [
r"create table ks (k text, v text)",
r"create table ki (k text, v int)",
r"create table up (w text, mt int, sz int, rd text, fn text)", r"create table up (w text, mt int, sz int, rd text, fn text)",
r"insert into ki values ('sver', 2)",
r"create index up_w on up(w)",
r"create index up_rd on up(rd)", r"create index up_rd on up(rd)",
r"create index up_fn on up(fn)", r"create index up_fn on up(fn)",
]: ]:
cur.execute(cmd) cur.execute(cmd)
return cur
def _create_v3(self, cur):
"""
collision in 2^(n/2) files where n = bits (6 bits/ch)
10*6/2 = 2^30 = 1'073'741'824, 24.1mb idx
12*6/2 = 2^36 = 68'719'476'736, 24.8mb idx
16*6/2 = 2^48 = 281'474'976'710'656, 26.1mb idx
"""
for c, ks in [["drop table k", "isv"], ["drop index up_", "w"]]:
for k in ks:
try:
cur.execute(c + k)
except:
pass
for cmd in [
r"create index up_w on up(substr(w,1,16))",
r"create table mt (w text, k text, v int)",
r"create index mt_w on mt(w)",
r"create index mt_k on mt(k)",
r"create index mt_v on mt(v)",
r"create table kv (k text, v int)",
r"insert into kv values ('sver', 3)",
]:
cur.execute(cmd)
return cur
def _upgrade_v1(self, odb, db_path): def _upgrade_v1(self, odb, db_path):
self.log("\033[33mupgrading v1 to v2:\033[0m {}".format(db_path))
npath = db_path + ".next" npath = db_path + ".next"
if os.path.exists(npath): if os.path.exists(npath):
os.unlink(npath) os.unlink(npath)
ndb = sqlite3.connect(npath, check_same_thread=False).cursor() ndb = self._orz(npath)
self._create_v2(ndb) self._create_v2(ndb)
c = odb.execute("select * from up") c = odb.execute("select * from up")
@ -377,14 +567,10 @@ class Up2k(object):
ndb.connection.commit() ndb.connection.commit()
ndb.connection.close() ndb.connection.close()
odb.connection.close() odb.connection.close()
bpath = db_path + ".bak.v1"
self.log("success; backup at: " + bpath)
atomic_move(db_path, bpath)
atomic_move(npath, db_path) atomic_move(npath, db_path)
return sqlite3.connect(db_path, check_same_thread=False).cursor() return self._orz(db_path)
def handle_json(self, cj): def handle_json(self, cj):
self.register_vpath(cj["ptop"])
cj["name"] = sanitize_fn(cj["name"]) cj["name"] = sanitize_fn(cj["name"])
cj["poke"] = time.time() cj["poke"] = time.time()
wark = self._get_wark(cj) wark = self._get_wark(cj)
@ -394,7 +580,10 @@ class Up2k(object):
cur = self.cur.get(cj["ptop"], None) cur = self.cur.get(cj["ptop"], None)
reg = self.registry[cj["ptop"]] reg = self.registry[cj["ptop"]]
if cur: if cur:
cur = cur.execute(r"select * from up where w = ?", (wark,)) cur = cur.execute(
r"select * from up where substr(w,1,16) = ? and w = ?",
(wark[:16], wark,),
)
for _, dtime, dsize, dp_dir, dp_fn in cur: for _, dtime, dsize, dp_dir, dp_fn in cur:
if dp_dir.startswith("//") or dp_fn.startswith("//"): if dp_dir.startswith("//") or dp_fn.startswith("//"):
dp_dir, dp_fn = self.w8dec(dp_dir, dp_fn) dp_dir, dp_fn = self.w8dec(dp_dir, dp_fn)
@ -407,7 +596,6 @@ class Up2k(object):
"prel": dp_dir, "prel": dp_dir,
"vtop": cj["vtop"], "vtop": cj["vtop"],
"ptop": cj["ptop"], "ptop": cj["ptop"],
"flag": cj["flag"],
"size": dsize, "size": dsize,
"lmod": dtime, "lmod": dtime,
"hash": [], "hash": [],
@ -444,7 +632,7 @@ class Up2k(object):
err = "partial upload exists at a different location; please resume uploading here instead:\n" err = "partial upload exists at a different location; please resume uploading here instead:\n"
err += "/" + vsrc + " " err += "/" + vsrc + " "
raise Pebkac(400, err) raise Pebkac(400, err)
elif "nodupe" in job["flag"]: elif "nodupe" in self.flags[job["ptop"]]:
self.log("dupe-reject:\n {0}\n {1}".format(src, dst)) self.log("dupe-reject:\n {0}\n {1}".format(src, dst))
err = "upload rejected, file already exists:\n/" + vsrc + " " err = "upload rejected, file already exists:\n/" + vsrc + " "
raise Pebkac(400, err) raise Pebkac(400, err)
@ -474,7 +662,6 @@ class Up2k(object):
"vtop", "vtop",
"ptop", "ptop",
"prel", "prel",
"flag",
"name", "name",
"size", "size",
"lmod", "lmod",
@ -603,7 +790,7 @@ class Up2k(object):
def db_add(self, db, wark, rd, fn, ts, sz): def db_add(self, db, wark, rd, fn, ts, sz):
sql = "insert into up values (?,?,?,?,?)" sql = "insert into up values (?,?,?,?,?)"
v = (wark, ts, sz, rd, fn) v = (wark, int(ts), sz, rd, fn)
try: try:
db.execute(sql, v) db.execute(sql, v)
except: except:

View file

@ -46,7 +46,7 @@ body {
display: none; display: none;
} }
#files { #files {
border-collapse: collapse; border-spacing: 0;
margin-top: 2em; margin-top: 2em;
z-index: 1; z-index: 1;
position: relative; position: relative;
@ -94,6 +94,13 @@ a,
margin: 0; margin: 0;
padding: 0 .5em; padding: 0 .5em;
} }
#files td {
border-bottom: 1px solid #111;
max-width: 30em;
}
#files tr+tr td {
border-top: 1px solid #383838;
}
#files tbody td:nth-child(3) { #files tbody td:nth-child(3) {
font-family: monospace; font-family: monospace;
font-size: 1.3em; font-size: 1.3em;
@ -111,6 +118,7 @@ a,
#files tbody tr:last-child td { #files tbody tr:last-child td {
padding-bottom: 1.3em; padding-bottom: 1.3em;
border-bottom: .5em solid #444; border-bottom: .5em solid #444;
white-space: nowrap;
} }
#files thead th[style] { #files thead th[style] {
width: auto !important; width: auto !important;
@ -400,14 +408,13 @@ input[type="checkbox"]:checked+label {
color: #fff; color: #fff;
} }
#files td div a { #files td div a {
display: table-cell; display: inline-block;
white-space: nowrap; white-space: nowrap;
} }
#files td div a:last-child { #files td div a:last-child {
width: 100%; width: 100%;
} }
#files td div { #files td div {
display: table;
border-collapse: collapse; border-collapse: collapse;
width: 100%; width: 100%;
} }

View file

@ -26,7 +26,11 @@
</div> </div>
<div id="op_search" class="opview"> <div id="op_search" class="opview">
{%- if have_tags_idx %}
<table id="srch_form" class="tags"></table>
{%- else %}
<table id="srch_form"></table> <table id="srch_form"></table>
{%- endif %}
<div id="srch_q"></div> <div id="srch_q"></div>
</div> </div>
{%- include 'upload.html' %} {%- include 'upload.html' %}
@ -55,7 +59,14 @@
<tr> <tr>
<th></th> <th></th>
<th>File Name</th> <th>File Name</th>
<th sort="int">File Size</th> <th sort="int">Size</th>
{%- for k in taglist %}
{%- if k.startswith('.') %}
<td sort="int">{{ k[1:] }}</td>
{%- else %}
<td>{{ k[0]|upper }}{{ k[1:] }}</td>
{%- endif %}
{%- endfor %}
<th>T</th> <th>T</th>
<th>Date</th> <th>Date</th>
</tr> </tr>
@ -63,7 +74,13 @@
<tbody> <tbody>
{%- for f in files %} {%- for f in files %}
<tr><td>{{ f.lead }}</td><td><a href="{{ f.href }}">{{ f.name|e }}</a></td><td>{{ f.sz }}</td><td>{{ f.ext }}</td><td>{{ f.dt }}</td></tr> <tr><td>{{ f.lead }}</td><td><a href="{{ f.href }}">{{ f.name|e }}</a></td><td>{{ f.sz }}</td>
{%- if f.tags is defined %}
{%- for k in taglist %}
<td>{{ f.tags[k] }}</td>
{%- endfor %}
{%- endif %}
<td>{{ f.ext }}</td><td>{{ f.dt }}</td></tr>
{%- endfor %} {%- endfor %}
</tbody> </tbody>
@ -87,6 +104,9 @@
</div> </div>
</div> </div>
<script>
var tag_order_cfg = {{ tag_order }};
</script>
<script src="/.cpr/util.js{{ ts }}"></script> <script src="/.cpr/util.js{{ ts }}"></script>
<script src="/.cpr/browser.js{{ ts }}"></script> <script src="/.cpr/browser.js{{ ts }}"></script>
<script src="/.cpr/up2k.js{{ ts }}"></script> <script src="/.cpr/up2k.js{{ ts }}"></script>

View file

@ -472,7 +472,7 @@ function play(tid, call_depth) {
o.setAttribute('id', 'thx_js'); o.setAttribute('id', 'thx_js');
if (window.history && history.replaceState) { if (window.history && history.replaceState) {
var nurl = (document.location + '').split('#')[0] + '#' + oid; var nurl = (document.location + '').split('#')[0] + '#' + oid;
history.replaceState(ebi('files').tBodies[0].innerHTML, nurl, nurl); history.replaceState(ebi('files').innerHTML, nurl, nurl);
} }
else { else {
document.location.hash = oid; document.location.hash = oid;
@ -591,6 +591,12 @@ function autoplay_blocked() {
["name", "name", "name contains &nbsp; (negate with -nope)", "46"] ["name", "name", "name contains &nbsp; (negate with -nope)", "46"]
] ]
]; ];
if (document.querySelector('#srch_form.tags'))
sconf.push(["tags",
["tags", "tags", "tags contains", "46"]
]);
var html = []; var html = [];
var orig_html = null; var orig_html = null;
for (var a = 0; a < sconf.length; a++) { for (var a = 0; a < sconf.length; a++) {
@ -653,6 +659,9 @@ function autoplay_blocked() {
return; return;
} }
var res = JSON.parse(this.responseText),
tagord = res.tag_order;
var ofiles = ebi('files'); var ofiles = ebi('files');
if (ofiles.getAttribute('ts') > this.ts) if (ofiles.getAttribute('ts') > this.ts)
return; return;
@ -660,10 +669,11 @@ function autoplay_blocked() {
ebi('path').style.display = 'none'; ebi('path').style.display = 'none';
ebi('tree').style.display = 'none'; ebi('tree').style.display = 'none';
var html = ['<tr><td>-</td><td colspan="4"><a href="#" id="unsearch">close search results</a></td></tr>']; var html = mk_files_header(tagord);
var res = JSON.parse(this.responseText); html.push('<tbody>');
for (var a = 0; a < res.length; a++) { html.push('<tr><td>-</td><td colspan="42"><a href="#" id="unsearch">close search results</a></td></tr>');
var r = res[a], for (var a = 0; a < res.hits.length; a++) {
var r = res.hits[a],
ts = parseInt(r.ts), ts = parseInt(r.ts),
sz = esc(r.sz + ''), sz = esc(r.sz + ''),
rp = esc(r.rp + ''), rp = esc(r.rp + ''),
@ -674,14 +684,29 @@ function autoplay_blocked() {
ext = '%'; ext = '%';
links = links.join(''); links = links.join('');
html.push('<tr><td>-</td><td><div>' + links + '</div></td><td>' + sz + var nodes = ['<tr><td>-</td><td><div>' + links + '</div>', sz];
'</td><td>' + ext + '</td><td>' + unix2iso(ts) + '</td></tr>'); for (var b = 0; b < tagord.length; b++) {
var k = tagord[b],
v = r.tags[k] || "";
if (k == "dur") {
var sv = s2ms(v);
nodes[nodes.length - 1] += '</td><td sortv="' + v + '">' + sv;
continue;
}
nodes.push(v);
}
nodes = nodes.concat([ext, unix2iso(ts)]);
html.push(nodes.join('</td><td>'));
html.push('</td></tr>');
} }
if (!orig_html) if (!orig_html)
orig_html = ebi('files').tBodies[0].innerHTML; orig_html = ebi('files').innerHTML;
ofiles.tBodies[0].innerHTML = html.join('\n'); ofiles.innerHTML = html.join('\n');
ofiles.setAttribute("ts", this.ts); ofiles.setAttribute("ts", this.ts);
reload_browser(); reload_browser();
@ -692,7 +717,7 @@ function autoplay_blocked() {
ev(e); ev(e);
ebi('path').style.display = 'inline-block'; ebi('path').style.display = 'inline-block';
ebi('tree').style.display = 'block'; ebi('tree').style.display = 'block';
ebi('files').tBodies[0].innerHTML = orig_html; ebi('files').innerHTML = orig_html;
orig_html = null; orig_html = null;
reload_browser(); reload_browser();
} }
@ -851,17 +876,34 @@ function autoplay_blocked() {
ebi('srv_info').innerHTML = '<span>' + res.srvinf + '</span>'; ebi('srv_info').innerHTML = '<span>' + res.srvinf + '</span>';
var nodes = res.dirs.concat(res.files); var nodes = res.dirs.concat(res.files);
var top = this.top; var top = this.top;
var html = []; var html = mk_files_header(res.taglist);
html.push('<tbody>');
for (var a = 0; a < nodes.length; a++) { for (var a = 0; a < nodes.length; a++) {
var r = nodes[a], var r = nodes[a],
ln = '<tr><td>' + r.lead + '</td><td><a href="' + ln = ['<tr><td>' + r.lead + '</td><td><a href="' +
top + r.href + '">' + esc(decodeURIComponent(r.href)) + '</a>'; top + r.href + '">' + esc(decodeURIComponent(r.href)) + '</a>', r.sz];
ln = [ln, r.sz, r.ext, unix2iso(r.ts)].join('</td><td>'); for (var b = 0; b < res.taglist.length; b++) {
var k = res.taglist[b],
v = (r.tags || {})[k] || "";
if (k[0] == '.')
k = k.slice(1);
if (k == "dur") {
var sv = s2ms(v);
ln[ln.length - 1] += '</td><td sortv="' + v + '">' + sv;
continue;
}
ln.push(v);
}
ln = ln.concat([r.ext, unix2iso(r.ts)]).join('</td><td>');
html.push(ln + '</td></tr>'); html.push(ln + '</td></tr>');
} }
html.push('</tbody>');
html = html.join('\n'); html = html.join('\n');
ebi('files').tBodies[0].innerHTML = html; ebi('files').innerHTML = html;
history.pushState(html, this.top, this.top); history.pushState(html, this.top, this.top);
apply_perms(res.perms); apply_perms(res.perms);
despin('#files'); despin('#files');
@ -924,7 +966,7 @@ function autoplay_blocked() {
window.onpopstate = function (e) { window.onpopstate = function (e) {
console.log(e.url + ' ,, ' + ((e.state + '').slice(0, 64))); console.log(e.url + ' ,, ' + ((e.state + '').slice(0, 64)));
if (e.state) { if (e.state) {
ebi('files').tBodies[0].innerHTML = e.state; ebi('files').innerHTML = e.state;
reload_tree(); reload_tree();
reload_browser(); reload_browser();
} }
@ -932,7 +974,7 @@ function autoplay_blocked() {
if (window.history && history.pushState) { if (window.history && history.pushState) {
var u = get_vpath(); var u = get_vpath();
history.replaceState(ebi('files').tBodies[0].innerHTML, u, u); history.replaceState(ebi('files').innerHTML, u, u);
} }
})(); })();
@ -989,6 +1031,28 @@ function apply_perms(perms) {
} }
function mk_files_header(taglist) {
var html = ['<thead>', '<th></th>', '<th>File Name</th>', '<th sort="int">Size</th>'];
for (var a = 0; a < taglist.length; a++) {
var tag = taglist[a];
var c1 = tag.slice(0, 1).toUpperCase();
tag = c1 + tag.slice(1);
if (c1 == '.')
tag = '<th sort="int">' + tag.slice(1);
else
tag = '<th>' + tag;
html.push(tag + '</th>');
}
html = html.concat([
'<th>T</th>',
'<th>Date</th>',
'</thead>',
]);
return html;
}
function reload_browser(not_mp) { function reload_browser(not_mp) {
makeSortable(ebi('files')); makeSortable(ebi('files'));
@ -1012,6 +1076,7 @@ function reload_browser(not_mp) {
hsz = sz.replace(/\B(?=(\d{3})+(?!\d))/g, " "); hsz = sz.replace(/\B(?=(\d{3})+(?!\d))/g, " ");
oo[a].textContent = hsz; oo[a].textContent = hsz;
oo[a].setAttribute("sortv", sz);
} }
if (!not_mp) { if (!not_mp) {

View file

@ -772,13 +772,13 @@ function up2k_init(have_crypto) {
if (!response.name) { if (!response.name) {
var msg = ''; var msg = '';
var smsg = ''; var smsg = '';
if (!response || !response.length) { if (!response || !response.hits || !response.hits.length) {
msg = 'not found on server'; msg = 'not found on server';
smsg = '404'; smsg = '404';
} }
else { else {
smsg = 'found'; smsg = 'found';
var hit = response[0], var hit = response.hits[0],
msg = linksplit(hit.rp).join(''), msg = linksplit(hit.rp).join(''),
tr = unix2iso(hit.ts), tr = unix2iso(hit.ts),
tu = unix2iso(t.lmod), tu = unix2iso(t.lmod),

View file

@ -76,7 +76,7 @@ function import_js(url, cb) {
function sortTable(table, col) { function sortTable(table, col) {
var tb = table.tBodies[0], // use `<tbody>` to ignore `<thead>` and `<tfoot>` rows var tb = table.tBodies[0],
th = table.tHead.rows[0].cells, th = table.tHead.rows[0].cells,
tr = Array.prototype.slice.call(tb.rows, 0), tr = Array.prototype.slice.call(tb.rows, 0),
i, reverse = th[col].className == 'sort1' ? -1 : 1; i, reverse = th[col].className == 'sort1' ? -1 : 1;
@ -90,11 +90,11 @@ function sortTable(table, col) {
if (!b.cells[col]) if (!b.cells[col])
return 1; return 1;
var v1 = a.cells[col].textContent.trim(); var v1 = a.cells[col].getAttribute('sortv') || a.cells[col].textContent.trim();
var v2 = b.cells[col].textContent.trim(); var v2 = b.cells[col].getAttribute('sortv') || b.cells[col].textContent.trim();
if (stype == 'int') { if (stype == 'int') {
v1 = parseInt(v1.replace(/,/g, '')); v1 = parseInt(v1.replace(/,/g, '')) || 0;
v2 = parseInt(v2.replace(/,/g, '')); v2 = parseInt(v2.replace(/,/g, '')) || 0;
return reverse * (v1 - v2); return reverse * (v1 - v2);
} }
return reverse * (v1.localeCompare(v2)); return reverse * (v1.localeCompare(v2));
@ -225,6 +225,12 @@ function unix2iso(ts) {
} }
function s2ms(s) {
var m = Math.floor(s / 60);
return m + ":" + ("0" + (s - m * 60)).slice(-2);
}
function has(haystack, needle) { function has(haystack, needle) {
for (var a = 0; a < haystack.length; a++) for (var a = 0; a < haystack.length; a++)
if (haystack[a] == needle) if (haystack[a] == needle)