mirror of
https://github.com/9001/copyparty.git
synced 2025-08-17 00:52:16 -06:00
add RSS feed output; closes #109
This commit is contained in:
parent
a7e2a0c981
commit
7ffd805a03
25
README.md
25
README.md
|
@ -47,6 +47,7 @@ turn almost any device into a file server with resumable uploads/downloads using
|
|||
* [file manager](#file-manager) - cut/paste, rename, and delete files/folders (if you have permission)
|
||||
* [shares](#shares) - share a file or folder by creating a temporary link
|
||||
* [batch rename](#batch-rename) - select some files and press `F2` to bring up the rename UI
|
||||
* [rss feeds](#rss-feeds) - monitor a folder with your RSS reader
|
||||
* [media player](#media-player) - plays almost every audio format there is
|
||||
* [audio equalizer](#audio-equalizer) - and [dynamic range compressor](https://en.wikipedia.org/wiki/Dynamic_range_compression)
|
||||
* [fix unreliable playback on android](#fix-unreliable-playback-on-android) - due to phone / app settings
|
||||
|
@ -845,6 +846,30 @@ or a mix of both:
|
|||
the metadata keys you can use in the format field are the ones in the file-browser table header (whatever is collected with `-mte` and `-mtp`)
|
||||
|
||||
|
||||
## rss feeds
|
||||
|
||||
monitor a folder with your RSS reader , optionally recursive
|
||||
|
||||
must be enabled per-volume with volflag `rss` or globally with `--rss`
|
||||
|
||||
the feed includes itunes metadata for use with podcast readers such as [AntennaPod](https://antennapod.org/)
|
||||
|
||||
a feed example: https://cd.ocv.me/a/d2/d22/?rss&fext=mp3
|
||||
|
||||
url parameters:
|
||||
|
||||
* `pw=hunter2` for password auth
|
||||
* `recursive` to also include subfolders
|
||||
* `title=foo` changes the feed title (default: folder name)
|
||||
* `fext=mp3,opus` only include mp3 and opus files (default: all)
|
||||
* `nf=30` only show the first 30 results (default: 250)
|
||||
* `sort=m` sort by mtime (file last-modified), newest first (default)
|
||||
* `u` = upload-time; NOTE: non-uploaded files have upload-time `0`
|
||||
* `n` = filename
|
||||
* `a` = filesize
|
||||
* uppercase = reverse-sort; `M` = oldest file first
|
||||
|
||||
|
||||
## media player
|
||||
|
||||
plays almost every audio format there is (if the server has FFmpeg installed for on-demand transcoding)
|
||||
|
|
|
@ -1357,6 +1357,14 @@ def add_transcoding(ap):
|
|||
ap2.add_argument("--ac-maxage", metavar="SEC", type=int, default=86400, help="delete cached transcode output after \033[33mSEC\033[0m seconds")
|
||||
|
||||
|
||||
def add_rss(ap):
|
||||
ap2 = ap.add_argument_group('RSS options')
|
||||
ap2.add_argument("--rss", action="store_true", help="enable RSS output (experimental)")
|
||||
ap2.add_argument("--rss-nf", metavar="HITS", type=int, default=250, help="default number of files to return (url-param 'nf')")
|
||||
ap2.add_argument("--rss-fext", metavar="E,E", type=u, default="", help="default list of file extensions to include (url-param 'fext'); blank=all")
|
||||
ap2.add_argument("--rss-sort", metavar="ORD", type=u, default="m", help="default sort order (url-param 'sort'); [\033[32mm\033[0m]=last-modified [\033[32mu\033[0m]=upload-time [\033[32mn\033[0m]=filename [\033[32ms\033[0m]=filesize; Uppercase=oldest-first. Note that upload-time is 0 for non-uploaded files")
|
||||
|
||||
|
||||
def add_db_general(ap, hcores):
|
||||
noidx = APPLESAN_TXT if MACOS else ""
|
||||
ap2 = ap.add_argument_group('general db options')
|
||||
|
@ -1526,6 +1534,7 @@ def run_argparse(
|
|||
add_db_metadata(ap)
|
||||
add_thumbnail(ap)
|
||||
add_transcoding(ap)
|
||||
add_rss(ap)
|
||||
add_ftp(ap)
|
||||
add_webdav(ap)
|
||||
add_tftp(ap)
|
||||
|
|
|
@ -46,6 +46,7 @@ def vf_bmap() -> dict[str, str]:
|
|||
"og_no_head",
|
||||
"og_s_title",
|
||||
"rand",
|
||||
"rss",
|
||||
"xdev",
|
||||
"xlink",
|
||||
"xvol",
|
||||
|
|
|
@ -131,6 +131,8 @@ LOGUES = [[0, ".prologue.html"], [1, ".epilogue.html"]]
|
|||
|
||||
READMES = [[0, ["preadme.md", "PREADME.md"]], [1, ["readme.md", "README.md"]]]
|
||||
|
||||
RSS_SORT = {"m": "mt", "u": "at", "n": "fn", "s": "sz"}
|
||||
|
||||
|
||||
class HttpCli(object):
|
||||
"""
|
||||
|
@ -1201,8 +1203,146 @@ class HttpCli(object):
|
|||
if "h" in self.uparam:
|
||||
return self.tx_mounts()
|
||||
|
||||
if "rss" in self.uparam:
|
||||
return self.tx_rss()
|
||||
|
||||
return self.tx_browser()
|
||||
|
||||
def tx_rss(self) -> bool:
|
||||
if self.do_log:
|
||||
self.log("RSS %s @%s" % (self.req, self.uname))
|
||||
|
||||
if not self.can_read:
|
||||
return self.tx_404()
|
||||
|
||||
vn = self.vn
|
||||
if not vn.flags.get("rss"):
|
||||
raise Pebkac(405, "RSS is disabled in server config")
|
||||
|
||||
rem = self.rem
|
||||
idx = self.conn.get_u2idx()
|
||||
if not idx or not hasattr(idx, "p_end"):
|
||||
if not HAVE_SQLITE3:
|
||||
raise Pebkac(500, "sqlite3 not found on server; rss is disabled")
|
||||
raise Pebkac(500, "server busy, cannot generate rss; please retry in a bit")
|
||||
|
||||
uv = [rem]
|
||||
if "recursive" in self.uparam:
|
||||
uq = "up.rd like ?||'%'"
|
||||
else:
|
||||
uq = "up.rd == ?"
|
||||
|
||||
zs = str(self.uparam.get("fext", self.args.rss_fext))
|
||||
if zs in ("True", "False"):
|
||||
zs = ""
|
||||
if zs:
|
||||
zsl = []
|
||||
for ext in zs.split(","):
|
||||
zsl.append("+up.fn like '%.'||?")
|
||||
uv.append(ext)
|
||||
uq += " and ( %s )" % (" or ".join(zsl),)
|
||||
|
||||
zs1 = self.uparam.get("sort", self.args.rss_sort)
|
||||
zs2 = zs1.lower()
|
||||
zs = RSS_SORT.get(zs2)
|
||||
if not zs:
|
||||
raise Pebkac(400, "invalid sort key; must be m/u/n/s")
|
||||
|
||||
uq += " order by up." + zs
|
||||
if zs1 == zs2:
|
||||
uq += " desc"
|
||||
|
||||
nmax = int(self.uparam.get("nf") or self.args.rss_nf)
|
||||
|
||||
hits = idx.run_query(self.uname, [self.vn], uq, uv, False, False, nmax)[0]
|
||||
|
||||
pw = self.ouparam.get("pw")
|
||||
if pw:
|
||||
q_pw = "?pw=%s" % (pw,)
|
||||
a_pw = "&pw=%s" % (pw,)
|
||||
for i in hits:
|
||||
i["rp"] += a_pw if "?" in i["rp"] else q_pw
|
||||
else:
|
||||
q_pw = a_pw = ""
|
||||
|
||||
title = self.uparam.get("title") or self.vpath.split("/")[-1]
|
||||
etitle = html_escape(title, True, True)
|
||||
|
||||
baseurl = "%s://%s%s" % (
|
||||
"https" if self.is_https else "http",
|
||||
self.host,
|
||||
self.args.SRS,
|
||||
)
|
||||
feed = "%s%s" % (baseurl, self.req[1:])
|
||||
efeed = html_escape(feed, True, True)
|
||||
edirlink = efeed.split("?")[0] + q_pw
|
||||
|
||||
ret = [
|
||||
"""\
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<rss version="2.0" xmlns:atom="http://www.w3.org/2005/Atom" xmlns:itunes="http://www.itunes.com/dtds/podcast-1.0.dtd" xmlns:content="http://purl.org/rss/1.0/modules/content/">
|
||||
\t<channel>
|
||||
\t\t<atom:link href="%s" rel="self" type="application/rss+xml" />
|
||||
\t\t<title>%s</title>
|
||||
\t\t<description></description>
|
||||
\t\t<link>%s</link>
|
||||
\t\t<generator>copyparty-1</generator>
|
||||
"""
|
||||
% (efeed, etitle, edirlink)
|
||||
]
|
||||
|
||||
q = "select fn from cv where rd=? and dn=?"
|
||||
crd, cdn = rem.rsplit("/", 1) if "/" in rem else ("", rem)
|
||||
try:
|
||||
cfn = idx.cur[self.vn.realpath].execute(q, (crd, cdn)).fetchone()[0]
|
||||
bos.stat(os.path.join(vn.canonical(rem), cfn))
|
||||
cv_url = "%s%s?th=jf%s" % (baseurl, vjoin(self.vpath, cfn), a_pw)
|
||||
cv_url = html_escape(cv_url, True, True)
|
||||
zs = """\
|
||||
\t\t
|
||||
"""
|
||||
ret.append(zs % (cv_url, etitle, edirlink))
|
||||
except:
|
||||
pass
|
||||
|
||||
for i in hits:
|
||||
iurl = html_escape("%s%s" % (baseurl, i["rp"]), True, True)
|
||||
title = unquotep(i["rp"].split("?")[0].split("/")[-1])
|
||||
title = html_escape(title, True, True)
|
||||
tag_t = str(i["tags"].get("title") or "")
|
||||
tag_a = str(i["tags"].get("artist") or "")
|
||||
desc = "%s - %s" % (tag_a, tag_t) if tag_t and tag_a else (tag_t or tag_a)
|
||||
desc = html_escape(desc, True, True) if desc else title
|
||||
mime = html_escape(guess_mime(title))
|
||||
lmod = formatdate(i["ts"])
|
||||
zsa = (iurl, iurl, title, desc, lmod, iurl, mime, i["sz"])
|
||||
zs = (
|
||||
"""\
|
||||
\t\t<item>
|
||||
\t\t\t<guid>%s</guid>
|
||||
\t\t\t<link>%s</link>
|
||||
\t\t\t<title>%s</title>
|
||||
\t\t\t<description>%s</description>
|
||||
\t\t\t<pubDate>%s</pubDate>
|
||||
\t\t\t<enclosure url="%s" type="%s" length="%d"/>
|
||||
"""
|
||||
% zsa
|
||||
)
|
||||
dur = i["tags"].get(".dur")
|
||||
if dur:
|
||||
zs += "\t\t\t<itunes:duration>%d</itunes:duration>\n" % (dur,)
|
||||
ret.append(zs + "\t\t</item>\n")
|
||||
|
||||
ret.append("\t</channel>\n</rss>\n")
|
||||
bret = "".join(ret).encode("utf-8", "replace")
|
||||
self.reply(bret, 200, "text/xml; charset=utf-8")
|
||||
self.log("rss: %d hits, %d bytes" % (len(hits), len(bret)))
|
||||
return True
|
||||
|
||||
def handle_propfind(self) -> bool:
|
||||
if self.do_log:
|
||||
self.log("PFIND %s @%s" % (self.req, self.uname))
|
||||
|
|
|
@ -95,7 +95,7 @@ class U2idx(object):
|
|||
uv: list[Union[str, int]] = [wark[:16], wark]
|
||||
|
||||
try:
|
||||
return self.run_query(uname, vols, uq, uv, False, 99999)[0]
|
||||
return self.run_query(uname, vols, uq, uv, False, True, 99999)[0]
|
||||
except:
|
||||
raise Pebkac(500, min_ex())
|
||||
|
||||
|
@ -301,7 +301,7 @@ class U2idx(object):
|
|||
q += " lower({}) {} ? ) ".format(field, oper)
|
||||
|
||||
try:
|
||||
return self.run_query(uname, vols, q, va, have_mt, lim)
|
||||
return self.run_query(uname, vols, q, va, have_mt, True, lim)
|
||||
except Exception as ex:
|
||||
raise Pebkac(500, repr(ex))
|
||||
|
||||
|
@ -312,6 +312,7 @@ class U2idx(object):
|
|||
uq: str,
|
||||
uv: list[Union[str, int]],
|
||||
have_mt: bool,
|
||||
sort: bool,
|
||||
lim: int,
|
||||
) -> tuple[list[dict[str, Any]], list[str], bool]:
|
||||
if self.args.srch_dbg:
|
||||
|
@ -458,6 +459,7 @@ class U2idx(object):
|
|||
done_flag.append(True)
|
||||
self.active_id = ""
|
||||
|
||||
if sort:
|
||||
ret.sort(key=itemgetter("rp"))
|
||||
|
||||
return ret, list(taglist.keys()), lim < 0 and not clamped
|
||||
|
|
|
@ -122,7 +122,7 @@ class Cfg(Namespace):
|
|||
def __init__(self, a=None, v=None, c=None, **ka0):
|
||||
ka = {}
|
||||
|
||||
ex = "chpw daw dav_auth dav_inf dav_mac dav_rt e2d e2ds e2dsa e2t e2ts e2tsr e2v e2vu e2vp early_ban ed emp exp force_js getmod grid gsel hardlink ih ihead magic hardlink_only nid nih no_acode no_athumb no_clone no_dav no_db_ip no_del no_dirsz no_dupe no_lifetime no_logues no_mv no_pipe no_poll no_readme no_robots no_sb_md no_sb_lg no_scandir no_tarcmp no_thumb no_vthumb no_zip nrand nw og og_no_head og_s_title q rand re_dirsz smb srch_dbg stats uqe vague_403 vc ver write_uplog xdev xlink xvol zs"
|
||||
ex = "chpw daw dav_auth dav_inf dav_mac dav_rt e2d e2ds e2dsa e2t e2ts e2tsr e2v e2vu e2vp early_ban ed emp exp force_js getmod grid gsel hardlink ih ihead magic hardlink_only nid nih no_acode no_athumb no_clone no_dav no_db_ip no_del no_dirsz no_dupe no_lifetime no_logues no_mv no_pipe no_poll no_readme no_robots no_sb_md no_sb_lg no_scandir no_tarcmp no_thumb no_vthumb no_zip nrand nw og og_no_head og_s_title q rand re_dirsz rss smb srch_dbg stats uqe vague_403 vc ver write_uplog xdev xlink xvol zs"
|
||||
ka.update(**{k: False for k in ex.split()})
|
||||
|
||||
ex = "dedup dotpart dotsrch hook_v no_dhash no_fastboot no_fpool no_htp no_rescan no_sendfile no_ses no_snap no_up_list no_voldump re_dhash plain_ip"
|
||||
|
|
Loading…
Reference in a new issue