mirror of
https://github.com/9001/copyparty.git
synced 2025-08-21 10:52:20 -06:00
Compare commits
No commits in common. "hovudstraum" and "v1.1.5" have entirely different histories.
hovudstrau
...
v1.1.5
33
.github/ISSUE_TEMPLATE/bug_report.md
vendored
33
.github/ISSUE_TEMPLATE/bug_report.md
vendored
|
@ -8,42 +8,33 @@ assignees: '9001'
|
|||
---
|
||||
|
||||
NOTE:
|
||||
**please use english, or include an english translation.** aside from that,
|
||||
all of the below are optional, consider them as inspiration, delete and rewrite at will, thx md
|
||||
|
||||
|
||||
### Describe the bug
|
||||
**Describe the bug**
|
||||
a description of what the bug is
|
||||
|
||||
### To Reproduce
|
||||
**To Reproduce**
|
||||
List of steps to reproduce the issue, or, if it's hard to reproduce, then at least a detailed explanation of what you did to run into it
|
||||
|
||||
### Expected behavior
|
||||
**Expected behavior**
|
||||
a description of what you expected to happen
|
||||
|
||||
### Screenshots
|
||||
**Screenshots**
|
||||
if applicable, add screenshots to help explain your problem, such as the kickass crashpage :^)
|
||||
|
||||
### Server details (if you are using docker/podman)
|
||||
remove the ones that are not relevant:
|
||||
* **server OS / version:**
|
||||
* **how you're running copyparty:** (docker/podman/something-else)
|
||||
* **docker image:** (variant, version, and arch if you know)
|
||||
* **copyparty arguments and/or config-file:**
|
||||
**Server details**
|
||||
if the issue is possibly on the server-side, then mention some of the following:
|
||||
* server OS / version:
|
||||
* python version:
|
||||
* copyparty arguments:
|
||||
* filesystem (`lsblk -f` on linux):
|
||||
|
||||
### Server details (if you're NOT using docker/podman)
|
||||
remove the ones that are not relevant:
|
||||
* **server OS / version:**
|
||||
* **what copyparty did you grab:** (sfx/exe/pip/arch/...)
|
||||
* **how you're running it:** (in a terminal, as a systemd-service, ...)
|
||||
* run copyparty with `--version` and grab the last 3 lines (they start with `copyparty`, `CPython`, `sqlite`) and paste them below this line:
|
||||
* **copyparty arguments and/or config-file:**
|
||||
|
||||
### Client details
|
||||
**Client details**
|
||||
if the issue is possibly on the client-side, then mention some of the following:
|
||||
* the device type and model:
|
||||
* OS version:
|
||||
* browser version:
|
||||
|
||||
### Additional context
|
||||
**Additional context**
|
||||
any other context about the problem here
|
||||
|
|
2
.github/ISSUE_TEMPLATE/feature_request.md
vendored
2
.github/ISSUE_TEMPLATE/feature_request.md
vendored
|
@ -7,8 +7,6 @@ assignees: '9001'
|
|||
|
||||
---
|
||||
|
||||
NOTE:
|
||||
**please use english, or include an english translation.** aside from that,
|
||||
all of the below are optional, consider them as inspiration, delete and rewrite at will
|
||||
|
||||
**is your feature request related to a problem? Please describe.**
|
||||
|
|
2
.github/pull_request_template.md
vendored
2
.github/pull_request_template.md
vendored
|
@ -1,2 +0,0 @@
|
|||
To show that your contribution is compatible with the MIT License, please include the following text somewhere in this PR description:
|
||||
This PR complies with the DCO; https://developercertificate.org/
|
31
.gitignore
vendored
31
.gitignore
vendored
|
@ -5,44 +5,23 @@ __pycache__/
|
|||
MANIFEST.in
|
||||
MANIFEST
|
||||
copyparty.egg-info/
|
||||
buildenv/
|
||||
build/
|
||||
dist/
|
||||
sfx/
|
||||
py2/
|
||||
.venv/
|
||||
|
||||
/buildenv/
|
||||
/build/
|
||||
/dist/
|
||||
/py2/
|
||||
/sfx*
|
||||
/pyz/
|
||||
/unt/
|
||||
/log/
|
||||
|
||||
# ide
|
||||
*.sublime-workspace
|
||||
|
||||
# winmerge
|
||||
*.bak
|
||||
|
||||
# apple pls
|
||||
.DS_Store
|
||||
|
||||
# derived
|
||||
copyparty/res/COPYING.txt
|
||||
copyparty/web/deps/
|
||||
srv/
|
||||
scripts/docker/i/
|
||||
scripts/deps-docker/uncomment.py
|
||||
contrib/package/arch/pkg/
|
||||
contrib/package/arch/src/
|
||||
|
||||
# state/logs
|
||||
up.*.txt
|
||||
.hist/
|
||||
scripts/docker/*.out
|
||||
scripts/docker/*.err
|
||||
/perf.*
|
||||
|
||||
# nix build output link
|
||||
result
|
||||
|
||||
# IDEA config
|
||||
.idea/
|
||||
|
|
8
.vscode/launch.json
vendored
8
.vscode/launch.json
vendored
|
@ -8,18 +8,14 @@
|
|||
"module": "copyparty",
|
||||
"console": "integratedTerminal",
|
||||
"cwd": "${workspaceFolder}",
|
||||
"justMyCode": false,
|
||||
"env": {
|
||||
"PYDEVD_DISABLE_FILE_VALIDATION": "1",
|
||||
"PYTHONWARNINGS": "always", //error
|
||||
},
|
||||
"args": [
|
||||
//"-nw",
|
||||
"-ed",
|
||||
"-emp",
|
||||
"-e2dsa",
|
||||
"-e2ts",
|
||||
"-mtp=.bpm=f,bin/mtag/audio-bpm.py",
|
||||
"-mtp",
|
||||
".bpm=f,bin/mtag/audio-bpm.py",
|
||||
"-aed:wark",
|
||||
"-vsrv::r:rw,ed:c,dupe",
|
||||
"-vdist:dist:r"
|
||||
|
|
20
.vscode/launch.py
vendored
Executable file → Normal file
20
.vscode/launch.py
vendored
Executable file → Normal file
|
@ -1,5 +1,3 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
# takes arguments from launch.json
|
||||
# is used by no_dbg in tasks.json
|
||||
# launches 10x faster than mspython debugpy
|
||||
|
@ -11,15 +9,15 @@ import sys
|
|||
|
||||
print(sys.executable)
|
||||
|
||||
import json5
|
||||
import shlex
|
||||
import jstyleson
|
||||
import subprocess as sp
|
||||
|
||||
|
||||
with open(".vscode/launch.json", "r", encoding="utf-8") as f:
|
||||
tj = f.read()
|
||||
|
||||
oj = json5.loads(tj)
|
||||
oj = jstyleson.loads(tj)
|
||||
argv = oj["configurations"][0]["args"]
|
||||
|
||||
try:
|
||||
|
@ -30,18 +28,8 @@ except:
|
|||
|
||||
argv = [os.path.expanduser(x) if x.startswith("~") else x for x in argv]
|
||||
|
||||
sfx = ""
|
||||
if len(sys.argv) > 1 and os.path.isfile(sys.argv[1]):
|
||||
sfx = sys.argv[1]
|
||||
sys.argv = [sys.argv[0]] + sys.argv[2:]
|
||||
|
||||
argv += sys.argv[1:]
|
||||
|
||||
if sfx:
|
||||
argv = [sys.executable, sfx] + argv
|
||||
sp.check_call(argv)
|
||||
elif re.search(" -j ?[0-9]", " ".join(argv)):
|
||||
argv = [sys.executable, "-Wa", "-m", "copyparty"] + argv
|
||||
if re.search(" -j ?[0-9]", " ".join(argv)):
|
||||
argv = [sys.executable, "-m", "copyparty"] + argv
|
||||
sp.check_call(argv)
|
||||
else:
|
||||
sys.path.insert(0, os.getcwd())
|
||||
|
|
28
.vscode/settings.json
vendored
28
.vscode/settings.json
vendored
|
@ -22,10 +22,8 @@
|
|||
"terminal.ansiBrightCyan": "#9cf0ed",
|
||||
"terminal.ansiBrightWhite": "#ffffff",
|
||||
},
|
||||
"python.terminal.activateEnvironment": false,
|
||||
"python.analysis.enablePytestSupport": false,
|
||||
"python.analysis.typeCheckingMode": "standard",
|
||||
"python.testing.pytestEnabled": false,
|
||||
"python.testing.nosetestsEnabled": false,
|
||||
"python.testing.unittestEnabled": true,
|
||||
"python.testing.unittestArgs": [
|
||||
"-v",
|
||||
|
@ -34,16 +32,28 @@
|
|||
"-p",
|
||||
"test_*.py"
|
||||
],
|
||||
// python3 -m isort --py=27 --profile=black ~/dev/copyparty/{copyparty,tests}/*.py && python3 -m black -t py27 ~/dev/copyparty/{copyparty,tests,bin}/*.py $(find ~/dev/copyparty/copyparty/stolen -iname '*.py')
|
||||
"editor.formatOnSave": false,
|
||||
"python.linting.pylintEnabled": true,
|
||||
"python.linting.flake8Enabled": true,
|
||||
"python.linting.banditEnabled": true,
|
||||
"python.linting.flake8Args": [
|
||||
"--max-line-length=120",
|
||||
"--ignore=E722,F405,E203,W503,W293,E402",
|
||||
],
|
||||
"python.linting.banditArgs": [
|
||||
"--ignore=B104"
|
||||
],
|
||||
"python.formatting.provider": "black",
|
||||
"editor.formatOnSave": true,
|
||||
"[html]": {
|
||||
"editor.formatOnSave": false,
|
||||
"editor.autoIndent": "keep",
|
||||
},
|
||||
"[css]": {
|
||||
"editor.formatOnSave": false,
|
||||
},
|
||||
"files.associations": {
|
||||
"*.makefile": "makefile"
|
||||
},
|
||||
"python.formatting.blackArgs": [
|
||||
"-t",
|
||||
"py27"
|
||||
],
|
||||
"python.linting.enabled": true,
|
||||
"python.pythonPath": "/usr/bin/python3"
|
||||
}
|
1
.vscode/tasks.json
vendored
1
.vscode/tasks.json
vendored
|
@ -11,7 +11,6 @@
|
|||
"type": "shell",
|
||||
"command": "${config:python.pythonPath}",
|
||||
"args": [
|
||||
"-Wa", //-We
|
||||
".vscode/launch.py"
|
||||
]
|
||||
}
|
||||
|
|
|
@ -1,58 +1,3 @@
|
|||
* **found a bug?** [create an issue!](https://github.com/9001/copyparty/issues) or let me know in the [discord](https://discord.gg/25J8CdTT6G) :>
|
||||
* **fixed a bug?** create a PR or post a patch! big thx in advance :>
|
||||
* **have a cool idea?** let's discuss it! anywhere's fine, you choose.
|
||||
* do something cool
|
||||
|
||||
but please:
|
||||
|
||||
|
||||
|
||||
# do not use AI / LMM when writing code
|
||||
|
||||
copyparty is 100% organic, free-range, human-written software!
|
||||
|
||||
> ⚠ you are now entering a no-copilot zone
|
||||
|
||||
the *only* place where LMM/AI *may* be accepted is for [localization](https://github.com/9001/copyparty/tree/hovudstraum/docs/rice#translations) if you are fluent and have confirmed that the translation is accurate.
|
||||
|
||||
sorry for the harsh tone, but this is important to me 🙏
|
||||
|
||||
|
||||
|
||||
# contribution ideas
|
||||
|
||||
|
||||
## documentation
|
||||
|
||||
I think we can agree that the documentation leaves a LOT to be desired. I've realized I'm not exactly qualified for this 😅 but maybe the [soon-to-come setup GUI](https://github.com/9001/copyparty/issues/57) will make this more manageable. The best documentation is the one that never had to be written, right? :> so I suppose we can give this a wait-and-see approach for a bit longer.
|
||||
|
||||
|
||||
## crazy ideas & features
|
||||
|
||||
assuming they won't cause too much problems or side-effects :>
|
||||
|
||||
i think someone was working on a way to list directories over DNS for example...
|
||||
|
||||
if you wanna have a go at coding it up yourself then maybe mention the idea on discord before you get too far, otherwise just go nuts 👍
|
||||
|
||||
|
||||
## others
|
||||
|
||||
aside from documentation and ideas, some other things that would be cool to have some help with is:
|
||||
|
||||
* **translations** -- the copyparty web-UI has translations for english and norwegian at the top of [browser.js](https://github.com/9001/copyparty/blob/hovudstraum/copyparty/web/browser.js); if you'd like to add a translation for another language then that'd be welcome! and if that language has a grammar that doesn't fit into the way the strings are assembled, then we'll fix that as we go :>
|
||||
|
||||
* but please note that support for [RTL (Right-to-Left) languages](https://en.wikipedia.org/wiki/Right-to-left_script) is currently not planned, since the javascript is a bit too jank for that
|
||||
|
||||
* **UI ideas** -- at some point I was thinking of rewriting the UI in react/preact/something-not-vanilla-javascript, but I'll admit the comfiness of not having any build stage combined with raw performance has kinda convinced me otherwise :p but I'd be very open to ideas on how the UI could be improved, or be more intuitive.
|
||||
|
||||
* **docker improvements** -- I don't really know what I'm doing when it comes to containers, so I'm sure there's a *huge* room for improvement here, mainly regarding how you're supposed to use the container with kubernetes / docker-compose / any of the other popular ways to do things. At some point I swear I'll start learning about docker so I can pick up clach04's [docker-compose draft](https://github.com/9001/copyparty/issues/38) and learn how that stuff ticks, unless someone beats me to it!
|
||||
|
||||
* **packaging** for various linux distributions -- this could either be as simple as just plopping the sfx.py in the right place and calling that from systemd (the archlinux package [originally did this](https://github.com/9001/copyparty/pull/18)); maybe with a small config-file which would cause copyparty to load settings from `/etc/copyparty.d` (like the [archlinux package](https://github.com/9001/copyparty/tree/hovudstraum/contrib/package/arch) does with `copyparty.conf`), or it could be a proper installation of the copyparty python package into /usr/lib or similar (the archlinux package [eventually went for this approach](https://github.com/9001/copyparty/pull/26))
|
||||
|
||||
* [fpm](https://github.com/jordansissel/fpm) can probably help with the technical part of it, but someone needs to handle distro relations :-)
|
||||
|
||||
* **software integration** -- I'm sure there's a lot of usecases where copyparty could complement something else, or the other way around, so any ideas or any work in this regard would be dope. This doesn't necessarily have to be code inside copyparty itself;
|
||||
|
||||
* [hooks](https://github.com/9001/copyparty/tree/hovudstraum/bin/hooks) -- these are small programs which are called by copyparty when certain things happen (files are uploaded, someone hits a 404, etc.), and could be a fun way to add support for more usecases
|
||||
|
||||
* [parser plugins](https://github.com/9001/copyparty/tree/hovudstraum/bin/mtag) -- if you want to have copyparty analyze and index metadata for some oddball file-formats, then additional plugins would be neat :>
|
||||
really tho, send a PR or an issue or whatever, all appreciated, anything goes, just behave aight
|
||||
|
|
2
LICENSE
2
LICENSE
|
@ -1,6 +1,6 @@
|
|||
MIT License
|
||||
|
||||
Copyright (c) 2019 ed <oss@ocv.me>
|
||||
Copyright (c) 2019 ed
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
|
|
|
@ -1,9 +0,0 @@
|
|||
# Security Policy
|
||||
|
||||
if you hit something extra juicy pls let me know on either of the following
|
||||
* email -- `copyparty@ocv.ze` except `ze` should be `me`
|
||||
* [mastodon dm](https://layer8.space/@tripflag) -- `@tripflag@layer8.space`
|
||||
* [github private vulnerability report](https://github.com/9001/copyparty/security/advisories/new), wow that form is complicated
|
||||
* [twitter dm](https://twitter.com/tripflag) (if im somehow not banned yet)
|
||||
|
||||
no bug bounties sorry! all i can offer is greetz in the release notes
|
|
@ -1,45 +1,43 @@
|
|||
# [`u2c.py`](u2c.py)
|
||||
# [`up2k.py`](up2k.py)
|
||||
* command-line up2k client [(webm)](https://ocv.me/stuff/u2cli.webm)
|
||||
* file uploads, file-search, autoresume of aborted/broken uploads
|
||||
* sync local folder to server
|
||||
* generally faster than browsers
|
||||
* faster than browsers
|
||||
* if something breaks just restart it
|
||||
|
||||
|
||||
# [`partyjournal.py`](partyjournal.py)
|
||||
produces a chronological list of all uploads by collecting info from up2k databases and the filesystem
|
||||
* outputs a standalone html file
|
||||
* optional mapping from IP-addresses to nicknames
|
||||
|
||||
|
||||
# [`partyfuse.py`](partyfuse.py)
|
||||
# [`copyparty-fuse.py`](copyparty-fuse.py)
|
||||
* mount a copyparty server as a local filesystem (read-only)
|
||||
* **supports Windows!** -- expect `194 MiB/s` sequential read
|
||||
* **supports Linux** -- expect `600 MiB/s` sequential read
|
||||
* **supports Linux** -- expect `117 MiB/s` sequential read
|
||||
* **supports macos** -- expect `85 MiB/s` sequential read
|
||||
|
||||
filecache is default-on for windows and macos;
|
||||
* macos readsize is 64kB, so speed ~32 MiB/s without the cache
|
||||
* windows readsize varies by software; explorer=1M, pv=32k
|
||||
|
||||
note that copyparty should run with `-ed` to enable dotfiles (hidden otherwise)
|
||||
|
||||
and consider using [../docs/rclone.md](../docs/rclone.md) instead; usually a bit faster, especially on windows
|
||||
also consider using [../docs/rclone.md](../docs/rclone.md) instead for 5x performance
|
||||
|
||||
|
||||
## to run this on windows:
|
||||
* install [winfsp](https://github.com/billziss-gh/winfsp/releases/latest) and [python 3](https://www.python.org/downloads/)
|
||||
* [x] add python 3.x to PATH (it asks during install)
|
||||
* `python -m pip install --user fusepy` (or grab a copy of `fuse.py` from the `connect` page on your copyparty, and keep it in the same folder)
|
||||
* `python ./partyfuse.py n: http://192.168.1.69:3923/`
|
||||
* `python -m pip install --user fusepy`
|
||||
* `python ./copyparty-fuse.py n: http://192.168.1.69:3923/`
|
||||
|
||||
10% faster in [msys2](https://www.msys2.org/), 700% faster if debug prints are enabled:
|
||||
* `pacman -S mingw64/mingw-w64-x86_64-python{,-pip}`
|
||||
* `/mingw64/bin/python3 -m pip install --user fusepy`
|
||||
* `/mingw64/bin/python3 ./partyfuse.py [...]`
|
||||
* `/mingw64/bin/python3 ./copyparty-fuse.py [...]`
|
||||
|
||||
you could replace winfsp with [dokan](https://github.com/dokan-dev/dokany/releases/latest), let me know if you [figure out how](https://github.com/dokan-dev/dokany/wiki/FUSE)
|
||||
(winfsp's sshfs leaks, doesn't look like winfsp itself does, should be fine)
|
||||
|
||||
|
||||
|
||||
# [`partyfuse2.py`](partyfuse2.py)
|
||||
# [`copyparty-fuse🅱️.py`](copyparty-fuseb.py)
|
||||
* mount a copyparty server as a local filesystem (read-only)
|
||||
* does the same thing except more correct, `samba` approves
|
||||
* **supports Linux** -- expect `18 MiB/s` (wait what)
|
||||
|
@ -47,7 +45,7 @@ you could replace winfsp with [dokan](https://github.com/dokan-dev/dokany/releas
|
|||
|
||||
|
||||
|
||||
# [`partyfuse-streaming.py`](partyfuse-streaming.py)
|
||||
# [`copyparty-fuse-streaming.py`](copyparty-fuse-streaming.py)
|
||||
* pretend this doesn't exist
|
||||
|
||||
|
||||
|
@ -78,6 +76,3 @@ cd /mnt/nas/music/.hist
|
|||
# [`prisonparty.sh`](prisonparty.sh)
|
||||
* run copyparty in a chroot, preventing any accidental file access
|
||||
* creates bindmounts for /bin, /lib, and so on, see `sysdirs=`
|
||||
|
||||
# [`bubbleparty.sh`](bubbleparty.sh)
|
||||
* run copyparty in an isolated process, preventing any accidental file access and more
|
||||
|
|
|
@ -1,19 +0,0 @@
|
|||
#!/bin/sh
|
||||
# usage: ./bubbleparty.sh ./copyparty-sfx.py ....
|
||||
bwrap \
|
||||
--unshare-all \
|
||||
--ro-bind /usr /usr \
|
||||
--ro-bind /bin /bin \
|
||||
--ro-bind /lib /lib \
|
||||
--ro-bind /etc/resolv.conf /etc/resolv.conf \
|
||||
--dev-bind /dev /dev \
|
||||
--dir /tmp \
|
||||
--dir /var \
|
||||
--bind $(pwd) $(pwd) \
|
||||
--share-net \
|
||||
--die-with-parent \
|
||||
--file 11 /etc/passwd \
|
||||
--file 12 /etc/group \
|
||||
"$@" \
|
||||
11< <(getent passwd $(id -u) 65534) \
|
||||
12< <(getent group $(id -g) 65534)
|
|
@ -1,7 +1,7 @@
|
|||
#!/usr/bin/env python3
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
"""partyfuse-streaming: remote copyparty as a local filesystem"""
|
||||
"""copyparty-fuse-streaming: remote copyparty as a local filesystem"""
|
||||
__author__ = "ed <copyparty@ocv.me>"
|
||||
__copyright__ = 2020
|
||||
__license__ = "MIT"
|
||||
|
@ -12,7 +12,7 @@ __url__ = "https://github.com/9001/copyparty/"
|
|||
mount a copyparty server (local or remote) as a filesystem
|
||||
|
||||
usage:
|
||||
python partyfuse-streaming.py http://192.168.1.69:3923/ ./music
|
||||
python copyparty-fuse-streaming.py http://192.168.1.69:3923/ ./music
|
||||
|
||||
dependencies:
|
||||
python3 -m pip install --user fusepy
|
||||
|
@ -21,7 +21,7 @@ dependencies:
|
|||
+ on Windows: https://github.com/billziss-gh/winfsp/releases/latest
|
||||
|
||||
this was a mistake:
|
||||
fork of partyfuse.py with a streaming cache rather than readahead,
|
||||
fork of copyparty-fuse.py with a streaming cache rather than readahead,
|
||||
thought this was gonna be way faster (and it kind of is)
|
||||
except the overhead of reopening connections on trunc totally kills it
|
||||
"""
|
||||
|
@ -42,7 +42,6 @@ import threading
|
|||
import traceback
|
||||
import http.client # py2: httplib
|
||||
import urllib.parse
|
||||
import calendar
|
||||
from datetime import datetime
|
||||
from urllib.parse import quote_from_bytes as quote
|
||||
from urllib.parse import unquote_to_bytes as unquote
|
||||
|
@ -62,12 +61,12 @@ except:
|
|||
else:
|
||||
libfuse = "apt install libfuse\n modprobe fuse"
|
||||
|
||||
m = """\033[33m
|
||||
could not import fuse; these may help:
|
||||
{} -m pip install --user fusepy
|
||||
{}
|
||||
\033[0m"""
|
||||
print(m.format(sys.executable, libfuse))
|
||||
print(
|
||||
"\n could not import fuse; these may help:"
|
||||
+ "\n python3 -m pip install --user fusepy\n "
|
||||
+ libfuse
|
||||
+ "\n"
|
||||
)
|
||||
raise
|
||||
|
||||
|
||||
|
@ -154,7 +153,7 @@ def dewin(txt):
|
|||
class RecentLog(object):
|
||||
def __init__(self):
|
||||
self.mtx = threading.Lock()
|
||||
self.f = None # open("partyfuse.log", "wb")
|
||||
self.f = None # open("copyparty-fuse.log", "wb")
|
||||
self.q = []
|
||||
|
||||
thr = threading.Thread(target=self.printer)
|
||||
|
@ -185,9 +184,9 @@ class RecentLog(object):
|
|||
print("".join(q), end="")
|
||||
|
||||
|
||||
# [windows/cmd/cpy3] python dev\copyparty\bin\partyfuse.py q: http://192.168.1.159:1234/
|
||||
# [windows/cmd/msys2] C:\msys64\mingw64\bin\python3 dev\copyparty\bin\partyfuse.py q: http://192.168.1.159:1234/
|
||||
# [windows/mty/msys2] /mingw64/bin/python3 /c/Users/ed/dev/copyparty/bin/partyfuse.py q: http://192.168.1.159:1234/
|
||||
# [windows/cmd/cpy3] python dev\copyparty\bin\copyparty-fuse.py q: http://192.168.1.159:1234/
|
||||
# [windows/cmd/msys2] C:\msys64\mingw64\bin\python3 dev\copyparty\bin\copyparty-fuse.py q: http://192.168.1.159:1234/
|
||||
# [windows/mty/msys2] /mingw64/bin/python3 /c/Users/ed/dev/copyparty/bin/copyparty-fuse.py q: http://192.168.1.159:1234/
|
||||
#
|
||||
# [windows] find /q/music/albums/Phant*24bit -printf '%s %p\n' | sort -n | tail -n 8 | sed -r 's/^[0-9]+ //' | while IFS= read -r x; do dd if="$x" of=/dev/null bs=4k count=8192 & done
|
||||
# [alpine] ll t; for x in t/2020_0724_16{2,3}*; do dd if="$x" of=/dev/null bs=4k count=10240 & done
|
||||
|
@ -496,7 +495,7 @@ class Gateway(object):
|
|||
ts = 60 * 60 * 24 * 2
|
||||
try:
|
||||
sz = int(fsize)
|
||||
ts = calendar.timegm(time.strptime(fdate, "%Y-%m-%d %H:%M:%S"))
|
||||
ts = datetime.strptime(fdate, "%Y-%m-%d %H:%M:%S").timestamp()
|
||||
except:
|
||||
info("bad HTML or OS [{}] [{}]".format(fdate, fsize))
|
||||
# python cannot strptime(1959-01-01) on windows
|
File diff suppressed because it is too large
Load diff
|
@ -1,7 +1,7 @@
|
|||
#!/usr/bin/env python3
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
"""partyfuse2: remote copyparty as a local filesystem"""
|
||||
"""copyparty-fuseb: remote copyparty as a local filesystem"""
|
||||
__author__ = "ed <copyparty@ocv.me>"
|
||||
__copyright__ = 2020
|
||||
__license__ = "MIT"
|
||||
|
@ -11,18 +11,14 @@ import re
|
|||
import os
|
||||
import sys
|
||||
import time
|
||||
import json
|
||||
import stat
|
||||
import errno
|
||||
import struct
|
||||
import codecs
|
||||
import platform
|
||||
import threading
|
||||
import http.client # py2: httplib
|
||||
import urllib.parse
|
||||
from datetime import datetime
|
||||
from urllib.parse import quote_from_bytes as quote
|
||||
from urllib.parse import unquote_to_bytes as unquote
|
||||
|
||||
try:
|
||||
import fuse
|
||||
|
@ -32,19 +28,9 @@ try:
|
|||
if not hasattr(fuse, "__version__"):
|
||||
raise Exception("your fuse-python is way old")
|
||||
except:
|
||||
if WINDOWS:
|
||||
libfuse = "install https://github.com/billziss-gh/winfsp/releases/latest"
|
||||
elif MACOS:
|
||||
libfuse = "install https://osxfuse.github.io/"
|
||||
else:
|
||||
libfuse = "apt install libfuse\n modprobe fuse"
|
||||
|
||||
m = """\033[33m
|
||||
could not import fuse; these may help:
|
||||
{} -m pip install --user fuse-python
|
||||
{}
|
||||
\033[0m"""
|
||||
print(m.format(sys.executable, libfuse))
|
||||
print(
|
||||
"\n could not import fuse; these may help:\n python3 -m pip install --user fuse-python\n apt install libfuse\n modprobe fuse\n"
|
||||
)
|
||||
raise
|
||||
|
||||
|
||||
|
@ -52,22 +38,18 @@ except:
|
|||
mount a copyparty server (local or remote) as a filesystem
|
||||
|
||||
usage:
|
||||
python ./partyfuse2.py -f -o allow_other,auto_unmount,nonempty,pw=wark,url=http://192.168.1.69:3923 /mnt/nas
|
||||
python ./copyparty-fuseb.py -f -o allow_other,auto_unmount,nonempty,url=http://192.168.1.69:3923 /mnt/nas
|
||||
|
||||
dependencies:
|
||||
sudo apk add fuse-dev python3-dev
|
||||
python3 -m pip install --user fuse-python
|
||||
|
||||
fork of partyfuse.py based on fuse-python which
|
||||
fork of copyparty-fuse.py based on fuse-python which
|
||||
appears to be more compliant than fusepy? since this works with samba
|
||||
(probably just my garbage code tbh)
|
||||
"""
|
||||
|
||||
|
||||
WINDOWS = sys.platform == "win32"
|
||||
MACOS = platform.system() == "Darwin"
|
||||
|
||||
|
||||
def threadless_log(msg):
|
||||
print(msg + "\n", end="")
|
||||
|
||||
|
@ -111,41 +93,6 @@ def html_dec(txt):
|
|||
)
|
||||
|
||||
|
||||
def register_wtf8():
|
||||
def wtf8_enc(text):
|
||||
return str(text).encode("utf-8", "surrogateescape"), len(text)
|
||||
|
||||
def wtf8_dec(binary):
|
||||
return bytes(binary).decode("utf-8", "surrogateescape"), len(binary)
|
||||
|
||||
def wtf8_search(encoding_name):
|
||||
return codecs.CodecInfo(wtf8_enc, wtf8_dec, name="wtf-8")
|
||||
|
||||
codecs.register(wtf8_search)
|
||||
|
||||
|
||||
bad_good = {}
|
||||
good_bad = {}
|
||||
|
||||
|
||||
def enwin(txt):
|
||||
return "".join([bad_good.get(x, x) for x in txt])
|
||||
|
||||
for bad, good in bad_good.items():
|
||||
txt = txt.replace(bad, good)
|
||||
|
||||
return txt
|
||||
|
||||
|
||||
def dewin(txt):
|
||||
return "".join([good_bad.get(x, x) for x in txt])
|
||||
|
||||
for bad, good in bad_good.items():
|
||||
txt = txt.replace(good, bad)
|
||||
|
||||
return txt
|
||||
|
||||
|
||||
class CacheNode(object):
|
||||
def __init__(self, tag, data):
|
||||
self.tag = tag
|
||||
|
@ -168,9 +115,8 @@ class Stat(fuse.Stat):
|
|||
|
||||
|
||||
class Gateway(object):
|
||||
def __init__(self, base_url, pw):
|
||||
def __init__(self, base_url):
|
||||
self.base_url = base_url
|
||||
self.pw = pw
|
||||
|
||||
ui = urllib.parse.urlparse(base_url)
|
||||
self.web_root = ui.path.strip("/")
|
||||
|
@ -189,7 +135,8 @@ class Gateway(object):
|
|||
self.conns = {}
|
||||
|
||||
def quotep(self, path):
|
||||
path = path.encode("wtf-8")
|
||||
# TODO: mojibake support
|
||||
path = path.encode("utf-8", "ignore")
|
||||
return quote(path, safe="/")
|
||||
|
||||
def getconn(self, tid=None):
|
||||
|
@ -212,29 +159,20 @@ class Gateway(object):
|
|||
except:
|
||||
pass
|
||||
|
||||
def sendreq(self, *args, **ka):
|
||||
def sendreq(self, *args, **kwargs):
|
||||
tid = get_tid()
|
||||
if self.pw:
|
||||
ck = "cppwd=" + self.pw
|
||||
try:
|
||||
ka["headers"]["Cookie"] = ck
|
||||
except:
|
||||
ka["headers"] = {"Cookie": ck}
|
||||
try:
|
||||
c = self.getconn(tid)
|
||||
c.request(*list(args), **ka)
|
||||
c.request(*list(args), **kwargs)
|
||||
return c.getresponse()
|
||||
except:
|
||||
self.closeconn(tid)
|
||||
c = self.getconn(tid)
|
||||
c.request(*list(args), **ka)
|
||||
c.request(*list(args), **kwargs)
|
||||
return c.getresponse()
|
||||
|
||||
def listdir(self, path):
|
||||
if bad_good:
|
||||
path = dewin(path)
|
||||
|
||||
web_path = self.quotep("/" + "/".join([self.web_root, path])) + "?dots&ls"
|
||||
web_path = self.quotep("/" + "/".join([self.web_root, path])) + "?dots"
|
||||
r = self.sendreq("GET", web_path)
|
||||
if r.status != 200:
|
||||
self.closeconn()
|
||||
|
@ -244,12 +182,9 @@ class Gateway(object):
|
|||
)
|
||||
)
|
||||
|
||||
return self.parse_jls(r)
|
||||
return self.parse_html(r)
|
||||
|
||||
def download_file_range(self, path, ofs1, ofs2):
|
||||
if bad_good:
|
||||
path = dewin(path)
|
||||
|
||||
web_path = self.quotep("/" + "/".join([self.web_root, path])) + "?raw"
|
||||
hdr_range = "bytes={}-{}".format(ofs1, ofs2 - 1)
|
||||
log("downloading {}".format(hdr_range))
|
||||
|
@ -265,27 +200,40 @@ class Gateway(object):
|
|||
|
||||
return r.read()
|
||||
|
||||
def parse_jls(self, datasrc):
|
||||
rsp = b""
|
||||
def parse_html(self, datasrc):
|
||||
ret = []
|
||||
remainder = b""
|
||||
ptn = re.compile(
|
||||
r"^<tr><td>(-|DIR)</td><td><a [^>]+>([^<]+)</a></td><td>([^<]+)</td><td>([^<]+)</td></tr>$"
|
||||
)
|
||||
|
||||
while True:
|
||||
buf = datasrc.read(1024 * 32)
|
||||
buf = remainder + datasrc.read(4096)
|
||||
# print('[{}]'.format(buf.decode('utf-8')))
|
||||
if not buf:
|
||||
break
|
||||
|
||||
rsp += buf
|
||||
remainder = b""
|
||||
endpos = buf.rfind(b"\n")
|
||||
if endpos >= 0:
|
||||
remainder = buf[endpos + 1 :]
|
||||
buf = buf[:endpos]
|
||||
|
||||
rsp = json.loads(rsp.decode("utf-8"))
|
||||
ret = []
|
||||
for statfun, nodes in [
|
||||
[self.stat_dir, rsp["dirs"]],
|
||||
[self.stat_file, rsp["files"]],
|
||||
]:
|
||||
for n in nodes:
|
||||
fname = unquote(n["href"].split("?")[0]).rstrip(b"/").decode("wtf-8")
|
||||
if bad_good:
|
||||
fname = enwin(fname)
|
||||
lines = buf.decode("utf-8").split("\n")
|
||||
for line in lines:
|
||||
m = ptn.match(line)
|
||||
if not m:
|
||||
# print(line)
|
||||
continue
|
||||
|
||||
ret.append([fname, statfun(n["ts"], n["sz"]), 0])
|
||||
ftype, fname, fsize, fdate = m.groups()
|
||||
fname = html_dec(fname)
|
||||
ts = datetime.strptime(fdate, "%Y-%m-%d %H:%M:%S").timestamp()
|
||||
sz = int(fsize)
|
||||
if ftype == "-":
|
||||
ret.append([fname, self.stat_file(ts, sz), 0])
|
||||
else:
|
||||
ret.append([fname, self.stat_dir(ts, sz), 0])
|
||||
|
||||
return ret
|
||||
|
||||
|
@ -314,7 +262,6 @@ class CPPF(Fuse):
|
|||
Fuse.__init__(self, *args, **kwargs)
|
||||
|
||||
self.url = None
|
||||
self.pw = None
|
||||
|
||||
self.dircache = []
|
||||
self.dircache_mtx = threading.Lock()
|
||||
|
@ -324,7 +271,7 @@ class CPPF(Fuse):
|
|||
|
||||
def init2(self):
|
||||
# TODO figure out how python-fuse wanted this to go
|
||||
self.gw = Gateway(self.url, self.pw) # .decode('utf-8'))
|
||||
self.gw = Gateway(self.url) # .decode('utf-8'))
|
||||
info("up")
|
||||
|
||||
def clean_dircache(self):
|
||||
|
@ -589,8 +536,6 @@ class CPPF(Fuse):
|
|||
|
||||
def getattr(self, path):
|
||||
log("getattr [{}]".format(path))
|
||||
if WINDOWS:
|
||||
path = enwin(path) # windows occasionally decodes f0xx to xx
|
||||
|
||||
path = path.strip("/")
|
||||
try:
|
||||
|
@ -623,25 +568,9 @@ class CPPF(Fuse):
|
|||
|
||||
def main():
|
||||
time.strptime("19970815", "%Y%m%d") # python#7980
|
||||
register_wtf8()
|
||||
if WINDOWS:
|
||||
os.system("rem")
|
||||
|
||||
for ch in '<>:"\\|?*':
|
||||
# microsoft maps illegal characters to f0xx
|
||||
# (e000 to f8ff is basic-plane private-use)
|
||||
bad_good[ch] = chr(ord(ch) + 0xF000)
|
||||
|
||||
for n in range(0, 0x100):
|
||||
# map surrogateescape to another private-use area
|
||||
bad_good[chr(n + 0xDC00)] = chr(n + 0xF100)
|
||||
|
||||
for k, v in bad_good.items():
|
||||
good_bad[v] = k
|
||||
|
||||
server = CPPF()
|
||||
server.parser.add_option(mountopt="url", metavar="BASE_URL", default=None)
|
||||
server.parser.add_option(mountopt="pw", metavar="PASSWORD", default=None)
|
||||
server.parse(values=server, errex=1)
|
||||
if not server.url or not str(server.url).startswith("http"):
|
||||
print("\nerror:")
|
||||
|
@ -649,7 +578,7 @@ def main():
|
|||
print(" need argument: mount-path")
|
||||
print("example:")
|
||||
print(
|
||||
" ./partyfuse2.py -f -o allow_other,auto_unmount,nonempty,pw=wark,url=http://192.168.1.69:3923 /mnt/nas"
|
||||
" ./copyparty-fuseb.py -f -o allow_other,auto_unmount,nonempty,url=http://192.168.1.69:3923 /mnt/nas"
|
||||
)
|
||||
sys.exit(1)
|
||||
|
112
bin/dbtool.py
112
bin/dbtool.py
|
@ -8,10 +8,7 @@ import sqlite3
|
|||
import argparse
|
||||
|
||||
DB_VER1 = 3
|
||||
DB_VER2 = 5
|
||||
|
||||
BY_PATH = None
|
||||
NC = None
|
||||
DB_VER2 = 4
|
||||
|
||||
|
||||
def die(msg):
|
||||
|
@ -60,13 +57,8 @@ def compare(n1, d1, n2, d2, verbose):
|
|||
if rd.split("/", 1)[0] == ".hist":
|
||||
continue
|
||||
|
||||
if BY_PATH:
|
||||
q = "select w from up where rd = ? and fn = ?"
|
||||
hit = d2.execute(q, (rd, fn)).fetchone()
|
||||
else:
|
||||
q = "select w from up where substr(w,1,16) = ? and +w = ?"
|
||||
hit = d2.execute(q, (w1[:16], w1)).fetchone()
|
||||
|
||||
q = "select w from up where rd = ? and fn = ?"
|
||||
hit = d2.execute(q, (rd, fn)).fetchone()
|
||||
if not hit:
|
||||
miss += 1
|
||||
if verbose:
|
||||
|
@ -78,32 +70,27 @@ def compare(n1, d1, n2, d2, verbose):
|
|||
n = 0
|
||||
miss = {}
|
||||
nmiss = 0
|
||||
for w1s, k, v in d1.execute("select * from mt"):
|
||||
for w1, k, v in d1.execute("select * from mt"):
|
||||
|
||||
n += 1
|
||||
if n % 100_000 == 0:
|
||||
m = f"\033[36mchecked {n:,} of {nt:,} tags in {n1} against {n2}, so far {nmiss} missing tags\033[0m"
|
||||
print(m)
|
||||
|
||||
q = "select w, rd, fn from up where substr(w,1,16) = ?"
|
||||
w1, rd, fn = d1.execute(q, (w1s,)).fetchone()
|
||||
q = "select rd, fn from up where substr(w,1,16) = ?"
|
||||
rd, fn = d1.execute(q, (w1,)).fetchone()
|
||||
if rd.split("/", 1)[0] == ".hist":
|
||||
continue
|
||||
|
||||
if BY_PATH:
|
||||
q = "select w from up where rd = ? and fn = ?"
|
||||
w2 = d2.execute(q, (rd, fn)).fetchone()
|
||||
else:
|
||||
q = "select w from up where substr(w,1,16) = ? and +w = ?"
|
||||
w2 = d2.execute(q, (w1s, w1)).fetchone()
|
||||
|
||||
q = "select substr(w,1,16) from up where rd = ? and fn = ?"
|
||||
w2 = d2.execute(q, (rd, fn)).fetchone()
|
||||
if w2:
|
||||
w2 = w2[0]
|
||||
|
||||
v2 = None
|
||||
if w2:
|
||||
v2 = d2.execute(
|
||||
"select v from mt where w = ? and +k = ?", (w2[:16], k)
|
||||
"select v from mt where w = ? and +k = ?", (w2, k)
|
||||
).fetchone()
|
||||
if v2:
|
||||
v2 = v2[0]
|
||||
|
@ -137,7 +124,7 @@ def compare(n1, d1, n2, d2, verbose):
|
|||
|
||||
for k, v in sorted(miss.items()):
|
||||
if v:
|
||||
print(f"{n1} has {v:7} more {k:<7} tags than {n2}")
|
||||
print(f"{n1} has {v:6} more {k:<6} tags than {n2}")
|
||||
|
||||
print(f"in total, {nmiss} missing tags in {n2}\n")
|
||||
|
||||
|
@ -145,75 +132,47 @@ def compare(n1, d1, n2, d2, verbose):
|
|||
def copy_mtp(d1, d2, tag, rm):
|
||||
nt = next(d1.execute("select count(w) from mt where k = ?", (tag,)))[0]
|
||||
n = 0
|
||||
ncopy = 0
|
||||
nskip = 0
|
||||
for w1s, k, v in d1.execute("select * from mt where k = ?", (tag,)):
|
||||
ndone = 0
|
||||
for w1, k, v in d1.execute("select * from mt where k = ?", (tag,)):
|
||||
n += 1
|
||||
if n % 25_000 == 0:
|
||||
m = f"\033[36m{n:,} of {nt:,} tags checked, so far {ncopy} copied, {nskip} skipped\033[0m"
|
||||
m = f"\033[36m{n:,} of {nt:,} tags checked, so far {ndone} copied\033[0m"
|
||||
print(m)
|
||||
|
||||
q = "select w, rd, fn from up where substr(w,1,16) = ?"
|
||||
w1, rd, fn = d1.execute(q, (w1s,)).fetchone()
|
||||
q = "select rd, fn from up where substr(w,1,16) = ?"
|
||||
rd, fn = d1.execute(q, (w1,)).fetchone()
|
||||
if rd.split("/", 1)[0] == ".hist":
|
||||
continue
|
||||
|
||||
if BY_PATH:
|
||||
q = "select w from up where rd = ? and fn = ?"
|
||||
w2 = d2.execute(q, (rd, fn)).fetchone()
|
||||
else:
|
||||
q = "select w from up where substr(w,1,16) = ? and +w = ?"
|
||||
w2 = d2.execute(q, (w1s, w1)).fetchone()
|
||||
|
||||
q = "select substr(w,1,16) from up where rd = ? and fn = ?"
|
||||
w2 = d2.execute(q, (rd, fn)).fetchone()
|
||||
if not w2:
|
||||
continue
|
||||
|
||||
w2s = w2[0][:16]
|
||||
hit = d2.execute("select v from mt where w = ? and +k = ?", (w2s, k)).fetchone()
|
||||
w2 = w2[0]
|
||||
hit = d2.execute("select v from mt where w = ? and +k = ?", (w2, k)).fetchone()
|
||||
if hit:
|
||||
hit = hit[0]
|
||||
|
||||
if hit != v:
|
||||
if NC and hit is not None:
|
||||
nskip += 1
|
||||
continue
|
||||
|
||||
ncopy += 1
|
||||
ndone += 1
|
||||
if hit is not None:
|
||||
d2.execute("delete from mt where w = ? and +k = ?", (w2s, k))
|
||||
d2.execute("delete from mt where w = ? and +k = ?", (w2, k))
|
||||
|
||||
d2.execute("insert into mt values (?,?,?)", (w2s, k, v))
|
||||
d2.execute("insert into mt values (?,?,?)", (w2, k, v))
|
||||
if rm:
|
||||
d2.execute("delete from mt where w = ? and +k = 't:mtp'", (w2s,))
|
||||
d2.execute("delete from mt where w = ? and +k = 't:mtp'", (w2,))
|
||||
|
||||
d2.commit()
|
||||
print(f"copied {ncopy} {tag} tags over, skipped {nskip}")
|
||||
|
||||
|
||||
def examples():
|
||||
print(
|
||||
"""
|
||||
# clearing the journal
|
||||
./dbtool.py up2k.db
|
||||
|
||||
# copy tags ".bpm" and "key" from old.db to up2k.db, and remove the mtp flag from matching files (so copyparty won't run any mtps on it)
|
||||
./dbtool.py -ls up2k.db
|
||||
./dbtool.py -src old.db up2k.db -cmp
|
||||
./dbtool.py -src old.v3 up2k.db -rm-mtp-flag -copy key
|
||||
./dbtool.py -src old.v3 up2k.db -rm-mtp-flag -copy .bpm -vac
|
||||
|
||||
"""
|
||||
)
|
||||
print(f"copied {ndone} {tag} tags over")
|
||||
|
||||
|
||||
def main():
|
||||
global NC, BY_PATH # pylint: disable=global-statement
|
||||
os.system("")
|
||||
print()
|
||||
|
||||
ap = argparse.ArgumentParser()
|
||||
ap.add_argument("db", help="database to work on")
|
||||
ap.add_argument("-h2", action="store_true", help="show examples")
|
||||
ap.add_argument("-src", metavar="DB", type=str, help="database to copy from")
|
||||
|
||||
ap2 = ap.add_argument_group("informational / read-only stuff")
|
||||
|
@ -226,29 +185,11 @@ def main():
|
|||
ap2.add_argument(
|
||||
"-rm-mtp-flag",
|
||||
action="store_true",
|
||||
help="when an mtp tag is copied over, also mark that file as done, so copyparty won't run any mtps on those files",
|
||||
help="when an mtp tag is copied over, also mark that as done, so copyparty won't run mtp on it",
|
||||
)
|
||||
ap2.add_argument("-vac", action="store_true", help="optimize DB")
|
||||
|
||||
ap2 = ap.add_argument_group("behavior modifiers")
|
||||
ap2.add_argument(
|
||||
"-nc",
|
||||
action="store_true",
|
||||
help="no-clobber; don't replace/overwrite existing tags",
|
||||
)
|
||||
ap2.add_argument(
|
||||
"-by-path",
|
||||
action="store_true",
|
||||
help="match files based on location rather than warks (content-hash), use this if the databases have different wark salts",
|
||||
)
|
||||
|
||||
ar = ap.parse_args()
|
||||
if ar.h2:
|
||||
examples()
|
||||
return
|
||||
|
||||
NC = ar.nc
|
||||
BY_PATH = ar.by_path
|
||||
|
||||
for v in [ar.db, ar.src]:
|
||||
if v and not os.path.exists(v):
|
||||
|
@ -282,8 +223,7 @@ def main():
|
|||
if ver == "corrupt":
|
||||
die("{} database appears to be corrupt, sorry")
|
||||
|
||||
iver = int(ver)
|
||||
if iver < DB_VER1 or iver > DB_VER2:
|
||||
if ver < DB_VER1 or ver > DB_VER2:
|
||||
m = f"{n} db is version {ver}, this tool only supports versions between {DB_VER1} and {DB_VER2}, please upgrade it with copyparty first"
|
||||
die(m)
|
||||
|
||||
|
|
|
@ -1,37 +0,0 @@
|
|||
replace the standard 404 / 403 responses with plugins
|
||||
|
||||
|
||||
# usage
|
||||
|
||||
load plugins either globally with `--on404 ~/dev/copyparty/bin/handlers/sorry.py` or for a specific volume with `:c,on404=~/handlers/sorry.py`
|
||||
|
||||
|
||||
# api
|
||||
|
||||
each plugin must define a `main()` which takes 3 arguments;
|
||||
|
||||
* `cli` is an instance of [copyparty/httpcli.py](https://github.com/9001/copyparty/blob/hovudstraum/copyparty/httpcli.py) (the monstrosity itself)
|
||||
* `vn` is the VFS which overlaps with the requested URL, and
|
||||
* `rem` is the URL remainder below the VFS mountpoint
|
||||
* so `vn.vpath + rem` == `cli.vpath` == original request
|
||||
|
||||
|
||||
# examples
|
||||
|
||||
## on404
|
||||
|
||||
* [redirect.py](redirect.py) sends an HTTP 301 or 302, redirecting the client to another page/file
|
||||
* [randpic.py](randpic.py) redirects `/foo/bar/randpic.jpg` to a random pic in `/foo/bar/`
|
||||
* [sorry.py](answer.py) replies with a custom message instead of the usual 404
|
||||
* [nooo.py](nooo.py) replies with an endless noooooooooooooo
|
||||
* [never404.py](never404.py) 100% guarantee that 404 will never be a thing again as it automatically creates dummy files whenever necessary
|
||||
* [caching-proxy.py](caching-proxy.py) transforms copyparty into a squid/varnish knockoff
|
||||
|
||||
## on403
|
||||
|
||||
* [ip-ok.py](ip-ok.py) disables security checks if client-ip is 1.2.3.4
|
||||
|
||||
|
||||
# notes
|
||||
|
||||
* on403 only works for trivial stuff (basic http access) since I haven't been able to think of any good usecases for it (was just easy to add while doing on404)
|
|
@ -1,36 +0,0 @@
|
|||
# assume each requested file exists on another webserver and
|
||||
# download + mirror them as they're requested
|
||||
# (basically pretend we're warnish)
|
||||
|
||||
import os
|
||||
import requests
|
||||
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from copyparty.httpcli import HttpCli
|
||||
|
||||
|
||||
def main(cli: "HttpCli", vn, rem):
|
||||
url = "https://mirrors.edge.kernel.org/alpine/" + rem
|
||||
abspath = os.path.join(vn.realpath, rem)
|
||||
|
||||
# sneaky trick to preserve a requests-session between downloads
|
||||
# so it doesn't have to spend ages reopening https connections;
|
||||
# luckily we can stash it inside the copyparty client session,
|
||||
# name just has to be definitely unused so "hacapo_req_s" it is
|
||||
req_s = getattr(cli.conn, "hacapo_req_s", None) or requests.Session()
|
||||
setattr(cli.conn, "hacapo_req_s", req_s)
|
||||
|
||||
try:
|
||||
os.makedirs(os.path.dirname(abspath), exist_ok=True)
|
||||
with req_s.get(url, stream=True, timeout=69) as r:
|
||||
r.raise_for_status()
|
||||
with open(abspath, "wb", 64 * 1024) as f:
|
||||
for buf in r.iter_content(chunk_size=64 * 1024):
|
||||
f.write(buf)
|
||||
except:
|
||||
os.unlink(abspath)
|
||||
return "false"
|
||||
|
||||
return "retry"
|
|
@ -1,6 +0,0 @@
|
|||
# disable permission checks and allow access if client-ip is 1.2.3.4
|
||||
|
||||
|
||||
def main(cli, vn, rem):
|
||||
if cli.ip == "1.2.3.4":
|
||||
return "allow"
|
|
@ -1,11 +0,0 @@
|
|||
# create a dummy file and let copyparty return it
|
||||
|
||||
|
||||
def main(cli, vn, rem):
|
||||
print("hello", cli.ip)
|
||||
|
||||
abspath = vn.canonical(rem)
|
||||
with open(abspath, "wb") as f:
|
||||
f.write(b"404? not on MY watch!")
|
||||
|
||||
return "retry"
|
|
@ -1,16 +0,0 @@
|
|||
# reply with an endless "noooooooooooooooooooooooo"
|
||||
|
||||
|
||||
def say_no():
|
||||
yield b"n"
|
||||
while True:
|
||||
yield b"o" * 4096
|
||||
|
||||
|
||||
def main(cli, vn, rem):
|
||||
cli.send_headers(None, 404, "text/plain")
|
||||
|
||||
for chunk in say_no():
|
||||
cli.s.sendall(chunk)
|
||||
|
||||
return "false"
|
|
@ -1,35 +0,0 @@
|
|||
import os
|
||||
import random
|
||||
from urllib.parse import quote
|
||||
|
||||
|
||||
# assuming /foo/bar/ is a valid URL but /foo/bar/randpic.png does not exist,
|
||||
# hijack the 404 with a redirect to a random pic in that folder
|
||||
#
|
||||
# thx to lia & kipu for the idea
|
||||
|
||||
|
||||
def main(cli, vn, rem):
|
||||
req_fn = rem.split("/")[-1]
|
||||
if not cli.can_read or not req_fn.startswith("randpic"):
|
||||
return
|
||||
|
||||
req_abspath = vn.canonical(rem)
|
||||
req_ap_dir = os.path.dirname(req_abspath)
|
||||
files_in_dir = os.listdir(req_ap_dir)
|
||||
|
||||
if "." in req_fn:
|
||||
file_ext = "." + req_fn.split(".")[-1]
|
||||
files_in_dir = [x for x in files_in_dir if x.lower().endswith(file_ext)]
|
||||
|
||||
if not files_in_dir:
|
||||
return
|
||||
|
||||
selected_file = random.choice(files_in_dir)
|
||||
|
||||
req_url = "/".join([vn.vpath, rem]).strip("/")
|
||||
req_dir = req_url.rsplit("/", 1)[0]
|
||||
new_url = "/".join([req_dir, quote(selected_file)]).strip("/")
|
||||
|
||||
cli.reply(b"redirecting...", 302, headers={"Location": "/" + new_url})
|
||||
return "true"
|
|
@ -1,52 +0,0 @@
|
|||
# if someone hits a 404, redirect them to another location
|
||||
|
||||
|
||||
def send_http_302_temporary_redirect(cli, new_path):
|
||||
"""
|
||||
replies with an HTTP 302, which is a temporary redirect;
|
||||
"new_path" can be any of the following:
|
||||
- "http://a.com/" would redirect to another website,
|
||||
- "/foo/bar" would redirect to /foo/bar on the same server;
|
||||
note the leading '/' in the location which is important
|
||||
"""
|
||||
cli.reply(b"redirecting...", 302, headers={"Location": new_path})
|
||||
|
||||
|
||||
def send_http_301_permanent_redirect(cli, new_path):
|
||||
"""
|
||||
replies with an HTTP 301, which is a permanent redirect;
|
||||
otherwise identical to send_http_302_temporary_redirect
|
||||
"""
|
||||
cli.reply(b"redirecting...", 301, headers={"Location": new_path})
|
||||
|
||||
|
||||
def send_errorpage_with_redirect_link(cli, new_path):
|
||||
"""
|
||||
replies with a website explaining that the page has moved;
|
||||
"new_path" must be an absolute location on the same server
|
||||
but without a leading '/', so for example "foo/bar"
|
||||
would redirect to "/foo/bar"
|
||||
"""
|
||||
cli.redirect(new_path, click=False, msg="this page has moved")
|
||||
|
||||
|
||||
def main(cli, vn, rem):
|
||||
"""
|
||||
this is the function that gets called by copyparty;
|
||||
note that vn.vpath and cli.vpath does not have a leading '/'
|
||||
so we're adding the slash in the debug messages below
|
||||
"""
|
||||
print(f"this client just hit a 404: {cli.ip}")
|
||||
print(f"they were accessing this volume: /{vn.vpath}")
|
||||
print(f"and the original request-path (straight from the URL) was /{cli.vpath}")
|
||||
print(f"...which resolves to the following filesystem path: {vn.canonical(rem)}")
|
||||
|
||||
new_path = "/foo/bar/"
|
||||
print(f"will now redirect the client to {new_path}")
|
||||
|
||||
# uncomment one of these:
|
||||
send_http_302_temporary_redirect(cli, new_path)
|
||||
#send_http_301_permanent_redirect(cli, new_path)
|
||||
#send_errorpage_with_redirect_link(cli, new_path)
|
||||
|
||||
return "true"
|
|
@ -1,7 +0,0 @@
|
|||
# sends a custom response instead of the usual 404
|
||||
|
||||
|
||||
def main(cli, vn, rem):
|
||||
msg = f"sorry {cli.ip} but {cli.vpath} doesn't exist"
|
||||
|
||||
return str(cli.reply(msg.encode("utf-8"), 404, "text/plain"))
|
|
@ -1,37 +0,0 @@
|
|||
standalone programs which are executed by copyparty when an event happens (upload, file rename, delete, ...)
|
||||
|
||||
these programs either take zero arguments, or a filepath (the affected file), or a json message with filepath + additional info
|
||||
|
||||
run copyparty with `--help-hooks` for usage details / hook type explanations (xm/xbu/xau/xiu/xbc/xac/xbr/xar/xbd/xad/xban)
|
||||
|
||||
> **note:** in addition to event hooks (the stuff described here), copyparty has another api to run your programs/scripts while providing way more information such as audio tags / video codecs / etc and optionally daisychaining data between scripts in a processing pipeline; if that's what you want then see [mtp plugins](../mtag/) instead
|
||||
|
||||
|
||||
# after upload
|
||||
* [notify.py](notify.py) shows a desktop notification ([example](https://user-images.githubusercontent.com/241032/215335767-9c91ed24-d36e-4b6b-9766-fb95d12d163f.png))
|
||||
* [notify2.py](notify2.py) uses the json API to show more context
|
||||
* [image-noexif.py](image-noexif.py) removes image exif by overwriting / directly editing the uploaded file
|
||||
* [discord-announce.py](discord-announce.py) announces new uploads on discord using webhooks ([example](https://user-images.githubusercontent.com/241032/215304439-1c1cb3c8-ec6f-4c17-9f27-81f969b1811a.png))
|
||||
* [reject-mimetype.py](reject-mimetype.py) rejects uploads unless the mimetype is acceptable
|
||||
* [into-the-cache-it-goes.py](into-the-cache-it-goes.py) avoids bugs in caching proxies by immediately downloading each file that is uploaded
|
||||
* [podcast-normalizer.py](podcast-normalizer.py) creates a second file with dynamic-range-compression whenever an audio file is uploaded
|
||||
* good example of the `idx` [hook effect](https://github.com/9001/copyparty/blob/hovudstraum/docs/devnotes.md#hook-effects) to tell copyparty about additional files to scan/index
|
||||
|
||||
|
||||
# upload batches
|
||||
these are `--xiu` hooks; unlike `xbu` and `xau` (which get executed on every single file), `xiu` hooks are given a list of recent uploads on STDIN after the server has gone idle for N seconds, reducing server load + providing more context
|
||||
* [xiu.py](xiu.py) is a "minimal" example showing a list of filenames + total filesize
|
||||
* [xiu-sha.py](xiu-sha.py) produces a sha512 checksum list in the volume root
|
||||
|
||||
|
||||
# before upload
|
||||
* [reject-extension.py](reject-extension.py) rejects uploads if they match a list of file extensions
|
||||
* [reloc-by-ext.py](reloc-by-ext.py) redirects an upload to another destination based on the file extension
|
||||
* good example of the `reloc` [hook effect](https://github.com/9001/copyparty/blob/hovudstraum/docs/devnotes.md#hook-effects)
|
||||
|
||||
|
||||
# on message
|
||||
* [wget.py](wget.py) lets you download files by POSTing URLs to copyparty
|
||||
* [qbittorrent-magnet.py](qbittorrent-magnet.py) starts downloading a torrent if you post a magnet url
|
||||
* [usb-eject.py](usb-eject.py) adds web-UI buttons to safe-remove usb flashdrives shared through copyparty
|
||||
* [msg-log.py](msg-log.py) is a guestbook; logs messages to a doc in the same folder
|
|
@ -1,77 +0,0 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
import sys
|
||||
import json
|
||||
import requests
|
||||
from copyparty.util import humansize, quotep
|
||||
|
||||
|
||||
_ = r"""
|
||||
announces a new upload on discord
|
||||
|
||||
example usage as global config:
|
||||
--xau f,t5,j,bin/hooks/discord-announce.py
|
||||
|
||||
parameters explained,
|
||||
xau = execute after upload
|
||||
f = fork; don't delay other hooks while this is running
|
||||
t5 = timeout if it's still running after 5 sec
|
||||
j = this hook needs upload information as json (not just the filename)
|
||||
|
||||
example usage as a volflag (per-volume config):
|
||||
-v srv/inc:inc:r:rw,ed:c,xau=f,t5,j,bin/hooks/discord-announce.py
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
(share filesystem-path srv/inc as volume /inc,
|
||||
readable by everyone, read-write for user 'ed',
|
||||
running this plugin on all uploads with the params explained above)
|
||||
|
||||
example usage as a volflag in a copyparty config file:
|
||||
[/inc]
|
||||
srv/inc
|
||||
accs:
|
||||
r: *
|
||||
rw: ed
|
||||
flags:
|
||||
xau: f,t5,j,bin/hooks/discord-announce.py
|
||||
|
||||
replace "xau" with "xbu" to announce Before upload starts instead of After completion
|
||||
|
||||
# how to discord:
|
||||
first create the webhook url; https://support.discord.com/hc/en-us/articles/228383668-Intro-to-Webhooks
|
||||
then use this to design your message: https://discohook.org/
|
||||
"""
|
||||
|
||||
|
||||
def main():
|
||||
WEBHOOK = "https://discord.com/api/webhooks/1234/base64"
|
||||
WEBHOOK = "https://discord.com/api/webhooks/1066830390280597718/M1TDD110hQA-meRLMRhdurych8iyG35LDoI1YhzbrjGP--BXNZodZFczNVwK4Ce7Yme5"
|
||||
|
||||
# read info from copyparty
|
||||
inf = json.loads(sys.argv[1])
|
||||
vpath = inf["vp"]
|
||||
filename = vpath.split("/")[-1]
|
||||
url = f"https://{inf['host']}/{quotep(vpath)}"
|
||||
|
||||
# compose the message to discord
|
||||
j = {
|
||||
"title": filename,
|
||||
"url": url,
|
||||
"description": url.rsplit("/", 1)[0],
|
||||
"color": 0x449900,
|
||||
"fields": [
|
||||
{"name": "Size", "value": humansize(inf["sz"])},
|
||||
{"name": "User", "value": inf["user"]},
|
||||
{"name": "IP", "value": inf["ip"]},
|
||||
],
|
||||
}
|
||||
|
||||
for v in j["fields"]:
|
||||
v["inline"] = True
|
||||
|
||||
r = requests.post(WEBHOOK, json={"embeds": [j]})
|
||||
print(f"discord: {r}\n", end="")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
|
@ -1,72 +0,0 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
import os
|
||||
import sys
|
||||
import subprocess as sp
|
||||
|
||||
|
||||
_ = r"""
|
||||
remove exif tags from uploaded images; the eventhook edition of
|
||||
https://github.com/9001/copyparty/blob/hovudstraum/bin/mtag/image-noexif.py
|
||||
|
||||
dependencies:
|
||||
exiftool / perl-Image-ExifTool
|
||||
|
||||
being an upload hook, this will take effect after upload completion
|
||||
but before copyparty has hashed/indexed the file, which means that
|
||||
copyparty will never index the original file, so deduplication will
|
||||
not work as expected... which is mostly OK but ehhh
|
||||
|
||||
note: modifies the file in-place, so don't set the `f` (fork) flag
|
||||
|
||||
example usages; either as global config (all volumes) or as volflag:
|
||||
--xau bin/hooks/image-noexif.py
|
||||
-v srv/inc:inc:r:rw,ed:c,xau=bin/hooks/image-noexif.py
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
explained:
|
||||
share fs-path srv/inc at /inc (readable by all, read-write for user ed)
|
||||
running this xau (execute-after-upload) plugin for all uploaded files
|
||||
"""
|
||||
|
||||
|
||||
# filetypes to process; ignores everything else
|
||||
EXTS = ("jpg", "jpeg", "avif", "heif", "heic")
|
||||
|
||||
|
||||
try:
|
||||
from copyparty.util import fsenc
|
||||
except:
|
||||
|
||||
def fsenc(p):
|
||||
return p.encode("utf-8")
|
||||
|
||||
|
||||
def main():
|
||||
fp = sys.argv[1]
|
||||
ext = fp.lower().split(".")[-1]
|
||||
if ext not in EXTS:
|
||||
return
|
||||
|
||||
cwd, fn = os.path.split(fp)
|
||||
os.chdir(cwd)
|
||||
f1 = fsenc(fn)
|
||||
cmd = [
|
||||
b"exiftool",
|
||||
b"-exif:all=",
|
||||
b"-iptc:all=",
|
||||
b"-xmp:all=",
|
||||
b"-P",
|
||||
b"-overwrite_original",
|
||||
b"--",
|
||||
f1,
|
||||
]
|
||||
sp.check_output(cmd)
|
||||
print("image-noexif: stripped")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
try:
|
||||
main()
|
||||
except:
|
||||
pass
|
|
@ -1,140 +0,0 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
import sys
|
||||
import json
|
||||
import shutil
|
||||
import platform
|
||||
import subprocess as sp
|
||||
from urllib.parse import quote
|
||||
|
||||
|
||||
_ = r"""
|
||||
try to avoid race conditions in caching proxies
|
||||
(primarily cloudflare, but probably others too)
|
||||
by means of the most obvious solution possible:
|
||||
|
||||
just as each file has finished uploading, use
|
||||
the server's external URL to download the file
|
||||
so that it ends up in the cache, warm and snug
|
||||
|
||||
this intentionally delays the upload response
|
||||
as it waits for the file to finish downloading
|
||||
before copyparty is allowed to return the URL
|
||||
|
||||
NOTE: you must edit this script before use,
|
||||
replacing https://example.com with your URL
|
||||
|
||||
NOTE: if the files are only accessible with a
|
||||
password and/or filekey, you must also add
|
||||
a cromulent password in the PASSWORD field
|
||||
|
||||
NOTE: needs either wget, curl, or "requests":
|
||||
python3 -m pip install --user -U requests
|
||||
|
||||
|
||||
example usage as global config:
|
||||
--xau j,t10,bin/hooks/into-the-cache-it-goes.py
|
||||
|
||||
parameters explained,
|
||||
xau = execute after upload
|
||||
j = this hook needs upload information as json (not just the filename)
|
||||
t10 = abort download and continue if it takes longer than 10sec
|
||||
|
||||
example usage as a volflag (per-volume config):
|
||||
-v srv/inc:inc:r:rw,ed:c,xau=j,t10,bin/hooks/into-the-cache-it-goes.py
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
(share filesystem-path srv/inc as volume /inc,
|
||||
readable by everyone, read-write for user 'ed',
|
||||
running this plugin on all uploads with params explained above)
|
||||
|
||||
example usage as a volflag in a copyparty config file:
|
||||
[/inc]
|
||||
srv/inc
|
||||
accs:
|
||||
r: *
|
||||
rw: ed
|
||||
flags:
|
||||
xau: j,t10,bin/hooks/into-the-cache-it-goes.py
|
||||
"""
|
||||
|
||||
|
||||
# replace this with your site's external URL
|
||||
# (including the :portnumber if necessary)
|
||||
SITE_URL = "https://example.com"
|
||||
|
||||
# if downloading is protected by passwords or filekeys,
|
||||
# specify a valid password between the quotes below:
|
||||
PASSWORD = ""
|
||||
|
||||
# if file is larger than this, skip download
|
||||
MAX_MEGABYTES = 8
|
||||
|
||||
# =============== END OF CONFIG ===============
|
||||
|
||||
|
||||
WINDOWS = platform.system() == "Windows"
|
||||
|
||||
|
||||
def main():
|
||||
fun = download_with_python
|
||||
if shutil.which("curl"):
|
||||
fun = download_with_curl
|
||||
elif shutil.which("wget"):
|
||||
fun = download_with_wget
|
||||
|
||||
inf = json.loads(sys.argv[1])
|
||||
|
||||
if inf["sz"] > 1024 * 1024 * MAX_MEGABYTES:
|
||||
print("[into-the-cache] file is too large; will not download")
|
||||
return
|
||||
|
||||
file_url = "/"
|
||||
if inf["vp"]:
|
||||
file_url += inf["vp"] + "/"
|
||||
file_url += inf["ap"].replace("\\", "/").split("/")[-1]
|
||||
file_url = SITE_URL.rstrip("/") + quote(file_url, safe=b"/")
|
||||
|
||||
print("[into-the-cache] %s(%s)" % (fun.__name__, file_url))
|
||||
fun(file_url, PASSWORD.strip())
|
||||
|
||||
print("[into-the-cache] Download OK")
|
||||
|
||||
|
||||
def download_with_curl(url, pw):
|
||||
cmd = ["curl"]
|
||||
|
||||
if pw:
|
||||
cmd += ["-HPW:%s" % (pw,)]
|
||||
|
||||
nah = sp.DEVNULL
|
||||
sp.check_call(cmd + [url], stdout=nah, stderr=nah)
|
||||
|
||||
|
||||
def download_with_wget(url, pw):
|
||||
cmd = ["wget", "-O"]
|
||||
|
||||
cmd += ["nul" if WINDOWS else "/dev/null"]
|
||||
|
||||
if pw:
|
||||
cmd += ["--header=PW:%s" % (pw,)]
|
||||
|
||||
nah = sp.DEVNULL
|
||||
sp.check_call(cmd + [url], stdout=nah, stderr=nah)
|
||||
|
||||
|
||||
def download_with_python(url, pw):
|
||||
import requests
|
||||
|
||||
headers = {}
|
||||
if pw:
|
||||
headers["PW"] = pw
|
||||
|
||||
with requests.get(url, headers=headers, stream=True) as r:
|
||||
r.raise_for_status()
|
||||
for _ in r.iter_content(chunk_size=1024 * 256):
|
||||
pass
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
|
@ -1,136 +0,0 @@
|
|||
#!/usr/bin/env python
|
||||
# coding: utf-8
|
||||
# vim:ts=4:sw=4:softtabstop=4:smarttab:expandtab
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
|
||||
try:
|
||||
from datetime import datetime, timezone
|
||||
except:
|
||||
from datetime import datetime
|
||||
|
||||
|
||||
_ = r"""
|
||||
use copyparty as a dumb messaging server / guestbook thing;
|
||||
accepts guestbook entries from 📟 (message-to-server-log) in the web-ui
|
||||
initially contributed by @clach04 in https://github.com/9001/copyparty/issues/35 (thanks!)
|
||||
|
||||
example usage as global config:
|
||||
python copyparty-sfx.py --xm j,bin/hooks/msg-log.py
|
||||
|
||||
parameters explained,
|
||||
xm = execute on message (📟)
|
||||
j = this hook needs message information as json (not just the message-text)
|
||||
|
||||
example usage as a volflag (per-volume config):
|
||||
python copyparty-sfx.py -v srv/log:log:r:c,xm=j,bin/hooks/msg-log.py
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
(share filesystem-path srv/log as volume /log, readable by everyone,
|
||||
running this plugin on all messages with the params explained above)
|
||||
|
||||
example usage as a volflag in a copyparty config file:
|
||||
[/log]
|
||||
srv/log
|
||||
accs:
|
||||
r: *
|
||||
flags:
|
||||
xm: j,bin/hooks/msg-log.py
|
||||
"""
|
||||
|
||||
|
||||
# output filename
|
||||
FILENAME = os.environ.get("COPYPARTY_MESSAGE_FILENAME", "") or "README.md"
|
||||
|
||||
# set True to write in descending order (newest message at top of file);
|
||||
# note that this becomes very slow/expensive as the file gets bigger
|
||||
DESCENDING = True
|
||||
|
||||
# the message template; the following parameters are provided by copyparty and can be referenced below:
|
||||
# 'ap' = absolute filesystem path where the message was posted
|
||||
# 'vp' = virtual path (URL 'path') where the message was posted
|
||||
# 'mt' = 'at' = unix-timestamp when the message was posted
|
||||
# 'datetime' = ISO-8601 time when the message was posted
|
||||
# 'sz' = message size in bytes
|
||||
# 'host' = the server hostname which the user was accessing (URL 'host')
|
||||
# 'user' = username (if logged in), otherwise '*'
|
||||
# 'txt' = the message text itself
|
||||
# (uncomment the print(msg_info) to see if additional information has been introduced by copyparty since this was written)
|
||||
TEMPLATE = """
|
||||
🕒 %(datetime)s, 👤 %(user)s @ %(ip)s
|
||||
%(txt)s
|
||||
"""
|
||||
|
||||
|
||||
def write_ascending(filepath, msg_text):
|
||||
with open(filepath, "a", encoding="utf-8", errors="replace") as outfile:
|
||||
outfile.write(msg_text)
|
||||
|
||||
|
||||
def write_descending(filepath, msg_text):
|
||||
lockpath = filepath + ".lock"
|
||||
got_it = False
|
||||
for _ in range(16):
|
||||
try:
|
||||
os.mkdir(lockpath)
|
||||
got_it = True
|
||||
break
|
||||
except:
|
||||
time.sleep(0.1)
|
||||
continue
|
||||
|
||||
if not got_it:
|
||||
return sys.exit(1)
|
||||
|
||||
try:
|
||||
oldpath = filepath + ".old"
|
||||
os.rename(filepath, oldpath)
|
||||
with open(oldpath, "r", encoding="utf-8", errors="replace") as infile, open(
|
||||
filepath, "w", encoding="utf-8", errors="replace"
|
||||
) as outfile:
|
||||
outfile.write(msg_text)
|
||||
while True:
|
||||
buf = infile.read(4096)
|
||||
if not buf:
|
||||
break
|
||||
outfile.write(buf)
|
||||
finally:
|
||||
try:
|
||||
os.unlink(oldpath)
|
||||
except:
|
||||
pass
|
||||
os.rmdir(lockpath)
|
||||
|
||||
|
||||
def main(argv=None):
|
||||
if argv is None:
|
||||
argv = sys.argv
|
||||
|
||||
msg_info = json.loads(sys.argv[1])
|
||||
# print(msg_info)
|
||||
|
||||
try:
|
||||
dt = datetime.fromtimestamp(msg_info["at"], timezone.utc)
|
||||
except:
|
||||
dt = datetime.utcfromtimestamp(msg_info["at"])
|
||||
|
||||
msg_info["datetime"] = dt.strftime("%Y-%m-%d, %H:%M:%S")
|
||||
|
||||
msg_text = TEMPLATE % msg_info
|
||||
|
||||
filepath = os.path.join(msg_info["ap"], FILENAME)
|
||||
|
||||
if DESCENDING and os.path.exists(filepath):
|
||||
write_descending(filepath, msg_text)
|
||||
else:
|
||||
write_ascending(filepath, msg_text)
|
||||
|
||||
print(msg_text)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
|
@ -1,66 +0,0 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
import os
|
||||
import sys
|
||||
import subprocess as sp
|
||||
from plyer import notification
|
||||
|
||||
|
||||
_ = r"""
|
||||
show os notification on upload; works on windows, linux, macos, android
|
||||
|
||||
depdencies:
|
||||
windows: python3 -m pip install --user -U plyer
|
||||
linux: python3 -m pip install --user -U plyer
|
||||
macos: python3 -m pip install --user -U plyer pyobjus
|
||||
android: just termux and termux-api
|
||||
|
||||
example usages; either as global config (all volumes) or as volflag:
|
||||
--xau f,bin/hooks/notify.py
|
||||
-v srv/inc:inc:r:rw,ed:c,xau=f,bin/hooks/notify.py
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
(share filesystem-path srv/inc as volume /inc,
|
||||
readable by everyone, read-write for user 'ed',
|
||||
running this plugin on all uploads with the params listed below)
|
||||
|
||||
parameters explained,
|
||||
xau = execute after upload
|
||||
f = fork so it doesn't block uploads
|
||||
"""
|
||||
|
||||
|
||||
try:
|
||||
from copyparty.util import humansize
|
||||
except:
|
||||
|
||||
def humansize(n):
|
||||
return n
|
||||
|
||||
|
||||
def main():
|
||||
fp = sys.argv[1]
|
||||
dp, fn = os.path.split(fp)
|
||||
try:
|
||||
sz = humansize(os.path.getsize(fp))
|
||||
except:
|
||||
sz = "?"
|
||||
|
||||
msg = "{} ({})\n📁 {}".format(fn, sz, dp)
|
||||
title = "File received"
|
||||
|
||||
if "com.termux" in sys.executable:
|
||||
sp.run(["termux-notification", "-t", title, "-c", msg])
|
||||
return
|
||||
|
||||
icon = "emblem-documents-symbolic" if sys.platform == "linux" else ""
|
||||
notification.notify(
|
||||
title=title,
|
||||
message=msg,
|
||||
app_icon=icon,
|
||||
timeout=10,
|
||||
)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
|
@ -1,73 +0,0 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
import subprocess as sp
|
||||
from datetime import datetime, timezone
|
||||
from plyer import notification
|
||||
|
||||
|
||||
_ = r"""
|
||||
same as notify.py but with additional info (uploader, ...)
|
||||
and also supports --xm (notify on 📟 message)
|
||||
|
||||
example usages; either as global config (all volumes) or as volflag:
|
||||
--xm f,j,bin/hooks/notify2.py
|
||||
--xau f,j,bin/hooks/notify2.py
|
||||
-v srv/inc:inc:r:rw,ed:c,xm=f,j,bin/hooks/notify2.py
|
||||
-v srv/inc:inc:r:rw,ed:c,xau=f,j,bin/hooks/notify2.py
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
(share filesystem-path srv/inc as volume /inc,
|
||||
readable by everyone, read-write for user 'ed',
|
||||
running this plugin on all uploads / msgs with the params listed below)
|
||||
|
||||
parameters explained,
|
||||
xau = execute after upload
|
||||
f = fork so it doesn't block uploads
|
||||
j = provide json instead of filepath list
|
||||
"""
|
||||
|
||||
|
||||
try:
|
||||
from copyparty.util import humansize
|
||||
except:
|
||||
|
||||
def humansize(n):
|
||||
return n
|
||||
|
||||
|
||||
def main():
|
||||
inf = json.loads(sys.argv[1])
|
||||
fp = inf["ap"]
|
||||
sz = humansize(inf["sz"])
|
||||
dp, fn = os.path.split(fp)
|
||||
dt = datetime.fromtimestamp(inf["mt"], timezone.utc)
|
||||
mt = dt.strftime("%Y-%m-%d %H:%M:%S")
|
||||
|
||||
msg = f"{fn} ({sz})\n📁 {dp}"
|
||||
title = "File received"
|
||||
icon = "emblem-documents-symbolic" if sys.platform == "linux" else ""
|
||||
|
||||
if inf.get("txt"):
|
||||
msg = inf["txt"]
|
||||
title = "Message received"
|
||||
icon = "mail-unread-symbolic" if sys.platform == "linux" else ""
|
||||
|
||||
msg += f"\n👤 {inf['user']} ({inf['ip']})\n🕒 {mt}"
|
||||
|
||||
if "com.termux" in sys.executable:
|
||||
sp.run(["termux-notification", "-t", title, "-c", msg])
|
||||
return
|
||||
|
||||
notification.notify(
|
||||
title=title,
|
||||
message=msg,
|
||||
app_icon=icon,
|
||||
timeout=10,
|
||||
)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
|
@ -1,121 +0,0 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
import subprocess as sp
|
||||
|
||||
|
||||
_ = r"""
|
||||
sends all uploaded audio files through an aggressive
|
||||
dynamic-range-compressor to even out the volume levels
|
||||
|
||||
dependencies:
|
||||
ffmpeg
|
||||
|
||||
being an xau hook, this gets eXecuted After Upload completion
|
||||
but before copyparty has started hashing/indexing the file, so
|
||||
we'll create a second normalized copy in a subfolder and tell
|
||||
copyparty to hash/index that additional file as well
|
||||
|
||||
example usage as global config:
|
||||
-e2d -e2t --xau j,c1,bin/hooks/podcast-normalizer.py
|
||||
|
||||
parameters explained,
|
||||
e2d/e2t = enable database and metadata indexing
|
||||
xau = execute after upload
|
||||
j = this hook needs upload information as json (not just the filename)
|
||||
c1 = this hook returns json on stdout, so tell copyparty to read that
|
||||
|
||||
example usage as a volflag (per-volume config):
|
||||
-v srv/inc/pods:inc/pods:r:rw,ed:c,xau=j,c1,bin/hooks/podcast-normalizer.py
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
(share fs-path srv/inc/pods at URL /inc/pods,
|
||||
readable by all, read-write for user ed,
|
||||
running this xau (exec-after-upload) plugin for all uploaded files)
|
||||
|
||||
example usage as a volflag in a copyparty config file:
|
||||
[/inc/pods]
|
||||
srv/inc/pods
|
||||
accs:
|
||||
r: *
|
||||
rw: ed
|
||||
flags:
|
||||
e2d # enables file indexing
|
||||
e2t # metadata tags too
|
||||
xau: j,c1,bin/hooks/podcast-normalizer.py
|
||||
|
||||
"""
|
||||
|
||||
########################################################################
|
||||
### CONFIG
|
||||
|
||||
# filetypes to process; ignores everything else
|
||||
EXTS = "mp3 flac ogg oga opus m4a aac wav wma"
|
||||
|
||||
# the name of the subdir to put the normalized files in
|
||||
SUBDIR = "normalized"
|
||||
|
||||
########################################################################
|
||||
|
||||
|
||||
# try to enable support for crazy filenames
|
||||
try:
|
||||
from copyparty.util import fsenc
|
||||
except:
|
||||
|
||||
def fsenc(p):
|
||||
return p.encode("utf-8")
|
||||
|
||||
|
||||
def main():
|
||||
# read info from copyparty
|
||||
inf = json.loads(sys.argv[1])
|
||||
vpath = inf["vp"]
|
||||
abspath = inf["ap"]
|
||||
|
||||
# check if the file-extension is on the to-be-processed list
|
||||
ext = abspath.lower().split(".")[-1]
|
||||
if ext not in EXTS.split():
|
||||
return
|
||||
|
||||
# jump into the folder where the file was uploaded
|
||||
# and create the subfolder to place the normalized copy inside
|
||||
dirpath, filename = os.path.split(abspath)
|
||||
os.chdir(fsenc(dirpath))
|
||||
os.makedirs(SUBDIR, exist_ok=True)
|
||||
|
||||
# the input and output filenames to give ffmpeg
|
||||
fname_in = fsenc(f"./{filename}")
|
||||
fname_out = fsenc(f"{SUBDIR}/{filename}.opus")
|
||||
|
||||
# fmt: off
|
||||
# create and run the ffmpeg command
|
||||
cmd = [
|
||||
b"ffmpeg",
|
||||
b"-nostdin",
|
||||
b"-hide_banner",
|
||||
b"-i", fname_in,
|
||||
b"-af", b"dynaudnorm=f=100:g=9", # the normalizer config
|
||||
b"-c:a", b"libopus",
|
||||
b"-b:a", b"128k",
|
||||
fname_out,
|
||||
]
|
||||
# fmt: on
|
||||
sp.check_output(cmd)
|
||||
|
||||
# and finally, tell copyparty about the new file
|
||||
# so it appears in the database and rss-feed:
|
||||
vpath = f"{SUBDIR}/{filename}.opus"
|
||||
print(json.dumps({"idx": {"vp": [vpath]}}))
|
||||
|
||||
# (it's fine to give it a relative path like that; it gets
|
||||
# resolved relative to the folder the file was uploaded into)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
try:
|
||||
main()
|
||||
except Exception as ex:
|
||||
print("podcast-normalizer failed; %r" % (ex,))
|
|
@ -1,128 +0,0 @@
|
|||
#!/usr/bin/env python3
|
||||
# coding: utf-8
|
||||
|
||||
import os
|
||||
import sys
|
||||
import json
|
||||
import shutil
|
||||
import subprocess as sp
|
||||
|
||||
|
||||
_ = r"""
|
||||
start downloading a torrent by POSTing a magnet URL to copyparty,
|
||||
for example using 📟 (message-to-server-log) in the web-ui
|
||||
|
||||
by default it will download the torrent to the folder you were in
|
||||
when you pasted the magnet into the message-to-server-log field
|
||||
|
||||
you can optionally specify another location by adding a whitespace
|
||||
after the magnet URL followed by the name of the subfolder to DL into,
|
||||
or for example "anime/airing" would download to /srv/media/anime/airing
|
||||
because the keyword "anime" is in the DESTS config below
|
||||
|
||||
needs python3
|
||||
|
||||
example usage as global config (not a good idea):
|
||||
python copyparty-sfx.py --xm aw,f,j,t60,bin/hooks/qbittorrent-magnet.py
|
||||
|
||||
parameters explained,
|
||||
xm = execute on message (📟)
|
||||
aw = only users with write-access can use this
|
||||
f = fork; don't delay other hooks while this is running
|
||||
j = provide message information as json (not just the text)
|
||||
t60 = abort if qbittorrent has to think about it for more than 1 min
|
||||
|
||||
example usage as a volflag (per-volume config, much better):
|
||||
-v srv/qb:qb:A,ed:c,xm=aw,f,j,t60,bin/hooks/qbittorrent-magnet.py
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
(share filesystem-path srv/qb as volume /qb with Admin for user 'ed',
|
||||
running this plugin on all messages with the params explained above)
|
||||
|
||||
example usage as a volflag in a copyparty config file:
|
||||
[/qb]
|
||||
srv/qb
|
||||
accs:
|
||||
A: ed
|
||||
flags:
|
||||
xm: aw,f,j,t60,bin/hooks/qbittorrent-magnet.py
|
||||
|
||||
the volflag examples only kicks in if you send the torrent magnet
|
||||
while you're in the /qb folder (or any folder below there)
|
||||
"""
|
||||
|
||||
|
||||
# list of usernames to allow
|
||||
ALLOWLIST = [ "ed", "morpheus" ]
|
||||
|
||||
|
||||
# list of destination aliases to translate into full filesystem
|
||||
# paths; takes effect if the first folder component in the
|
||||
# custom download location matches anything in this dict
|
||||
DESTS = {
|
||||
"iso": "/srv/pub/linux-isos",
|
||||
"anime": "/srv/media/anime",
|
||||
}
|
||||
|
||||
|
||||
def main():
|
||||
inf = json.loads(sys.argv[1])
|
||||
url = inf["txt"]
|
||||
if not url.lower().startswith("magnet:?"):
|
||||
# not a magnet, abort
|
||||
return
|
||||
|
||||
if inf["user"] not in ALLOWLIST:
|
||||
print("🧲 denied for user", inf["user"])
|
||||
return
|
||||
|
||||
# might as well run the command inside the filesystem folder
|
||||
# which matches the URL that the magnet message was sent to
|
||||
os.chdir(inf["ap"])
|
||||
|
||||
# is there is a custom download location in the url?
|
||||
dst = ""
|
||||
if " " in url:
|
||||
url, dst = url.split(" ", 1)
|
||||
|
||||
# is the location in the predefined list of locations?
|
||||
parts = dst.replace("\\", "/").split("/")
|
||||
if parts[0] in DESTS:
|
||||
dst = os.path.join(DESTS[parts[0]], *(parts[1:]))
|
||||
|
||||
else:
|
||||
# nope, so download to the current folder instead;
|
||||
# comment the dst line below to instead use the default
|
||||
# download location from your qbittorrent settings
|
||||
dst = inf["ap"]
|
||||
pass
|
||||
|
||||
# archlinux has a -nox suffix for qbittorrent if headless
|
||||
# so check if we should be using that
|
||||
if shutil.which("qbittorrent-nox"):
|
||||
torrent_bin = "qbittorrent-nox"
|
||||
else:
|
||||
torrent_bin = "qbittorrent"
|
||||
|
||||
# the command to add a new torrent, adjust if necessary
|
||||
cmd = [torrent_bin, url]
|
||||
if dst:
|
||||
cmd += ["--save-path=%s" % (dst,)]
|
||||
|
||||
# if copyparty and qbittorrent are running as different users
|
||||
# you may have to do something like the following
|
||||
# (assuming qbittorrent* is nopasswd-allowed in sudoers):
|
||||
#
|
||||
# cmd = ["sudo", "-u", "qbitter"] + cmd
|
||||
|
||||
print("🧲", cmd)
|
||||
|
||||
try:
|
||||
sp.check_call(cmd)
|
||||
except:
|
||||
print("🧲 FAILED TO ADD", url)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
|
@ -1,35 +0,0 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
import sys
|
||||
|
||||
|
||||
_ = r"""
|
||||
reject file uploads by file extension
|
||||
|
||||
example usage as global config:
|
||||
--xbu c,bin/hooks/reject-extension.py
|
||||
|
||||
example usage as a volflag (per-volume config):
|
||||
-v srv/inc:inc:r:rw,ed:c,xbu=c,bin/hooks/reject-extension.py
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
(share filesystem-path srv/inc as volume /inc,
|
||||
readable by everyone, read-write for user 'ed',
|
||||
running this plugin on all uploads with the params listed below)
|
||||
|
||||
parameters explained,
|
||||
xbu = execute before upload
|
||||
c = check result, reject upload if error
|
||||
"""
|
||||
|
||||
|
||||
def main():
|
||||
bad = "exe scr com pif bat ps1 jar msi"
|
||||
|
||||
ext = sys.argv[1].split(".")[-1]
|
||||
|
||||
sys.exit(1 if ext in bad.split() else 0)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
|
@ -1,44 +0,0 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
import sys
|
||||
import magic
|
||||
|
||||
|
||||
_ = r"""
|
||||
reject file uploads by mimetype
|
||||
|
||||
dependencies (linux, macos):
|
||||
python3 -m pip install --user -U python-magic
|
||||
|
||||
dependencies (windows):
|
||||
python3 -m pip install --user -U python-magic-bin
|
||||
|
||||
example usage as global config:
|
||||
--xau c,bin/hooks/reject-mimetype.py
|
||||
|
||||
example usage as a volflag (per-volume config):
|
||||
-v srv/inc:inc:r:rw,ed:c,xau=c,bin/hooks/reject-mimetype.py
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
(share filesystem-path srv/inc as volume /inc,
|
||||
readable by everyone, read-write for user 'ed',
|
||||
running this plugin on all uploads with the params listed below)
|
||||
|
||||
parameters explained,
|
||||
xau = execute after upload
|
||||
c = check result, reject upload if error
|
||||
"""
|
||||
|
||||
|
||||
def main():
|
||||
ok = ["image/jpeg", "image/png"]
|
||||
|
||||
mt = magic.from_file(sys.argv[1], mime=True)
|
||||
|
||||
print(mt)
|
||||
|
||||
sys.exit(1 if mt not in ok else 0)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
|
@ -1,130 +0,0 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
|
||||
|
||||
_ = r"""
|
||||
relocate/redirect incoming uploads according to file extension or name
|
||||
|
||||
example usage as global config:
|
||||
--xbu j,c1,bin/hooks/reloc-by-ext.py
|
||||
|
||||
parameters explained,
|
||||
xbu = execute before upload
|
||||
j = this hook needs upload information as json (not just the filename)
|
||||
c1 = this hook returns json on stdout, so tell copyparty to read that
|
||||
|
||||
example usage as a volflag (per-volume config):
|
||||
-v srv/inc:inc:r:rw,ed:c,xbu=j,c1,bin/hooks/reloc-by-ext.py
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
(share filesystem-path srv/inc as volume /inc,
|
||||
readable by everyone, read-write for user 'ed',
|
||||
running this plugin on all uploads with the params explained above)
|
||||
|
||||
example usage as a volflag in a copyparty config file:
|
||||
[/inc]
|
||||
srv/inc
|
||||
accs:
|
||||
r: *
|
||||
rw: ed
|
||||
flags:
|
||||
xbu: j,c1,bin/hooks/reloc-by-ext.py
|
||||
|
||||
note: this could also work as an xau hook (after-upload), but
|
||||
because it doesn't need to read the file contents its better
|
||||
as xbu (before-upload) since that's safer / less buggy,
|
||||
and only xbu works with up2k (dragdrop into browser)
|
||||
"""
|
||||
|
||||
|
||||
PICS = "avif bmp gif heic heif jpeg jpg jxl png psd qoi tga tif tiff webp"
|
||||
VIDS = "3gp asf avi flv mkv mov mp4 mpeg mpeg2 mpegts mpg mpg2 nut ogm ogv rm ts vob webm wmv"
|
||||
MUSIC = "aac aif aiff alac amr ape dfpwm flac m4a mp3 ogg opus ra tak tta wav wma wv"
|
||||
|
||||
|
||||
def main():
|
||||
inf = json.loads(sys.argv[1])
|
||||
vdir, fn = os.path.split(inf["vp"])
|
||||
|
||||
try:
|
||||
fn, ext = fn.rsplit(".", 1)
|
||||
except:
|
||||
# no file extension; pretend it's "bin"
|
||||
ext = "bin"
|
||||
|
||||
ext = ext.lower()
|
||||
|
||||
# this function must end by printing the action to perform;
|
||||
# that's handled by the print(json.dumps(... at the bottom
|
||||
#
|
||||
# the action can contain the following keys:
|
||||
# "vp" is the folder URL to move the upload to,
|
||||
# "ap" is the filesystem-path to move it to (but "vp" is safer),
|
||||
# "fn" overrides the final filename to use
|
||||
|
||||
##
|
||||
## some example actions to take; pick one by
|
||||
## selecting it inside the print at the end:
|
||||
##
|
||||
|
||||
# move all uploads to one specific folder
|
||||
into_junk = {"vp": "/junk"}
|
||||
|
||||
# create a subfolder named after the filetype and move it into there
|
||||
into_subfolder = {"vp": ext}
|
||||
|
||||
# move it into a toplevel folder named after the filetype
|
||||
into_toplevel = {"vp": "/" + ext}
|
||||
|
||||
# move it into a filetype-named folder next to the target folder
|
||||
into_sibling = {"vp": "../" + ext}
|
||||
|
||||
# move images into "/just/pics", vids into "/just/vids",
|
||||
# music into "/just/tunes", and anything else as-is
|
||||
if ext in PICS.split():
|
||||
by_category = {"vp": "/just/pics"}
|
||||
elif ext in VIDS.split():
|
||||
by_category = {"vp": "/just/vids"}
|
||||
elif ext in MUSIC.split():
|
||||
by_category = {"vp": "/just/tunes"}
|
||||
else:
|
||||
by_category = {} # no action
|
||||
|
||||
# now choose the default effect to apply; can be any of these:
|
||||
# into_junk into_subfolder into_toplevel into_sibling by_category
|
||||
effect = into_sibling
|
||||
|
||||
##
|
||||
## but we can keep going, adding more speicifc rules
|
||||
## which can take precedence, replacing the fallback
|
||||
## effect we just specified:
|
||||
##
|
||||
|
||||
fn = fn.lower() # lowercase filename to make this easier
|
||||
|
||||
if "screenshot" in fn:
|
||||
effect = {"vp": "/ss"}
|
||||
if "mpv_" in fn:
|
||||
effect = {"vp": "/anishots"}
|
||||
elif "debian" in fn or "biebian" in fn:
|
||||
effect = {"vp": "/linux-ISOs"}
|
||||
elif re.search(r"ep(isode |\.)?[0-9]", fn):
|
||||
effect = {"vp": "/podcasts"}
|
||||
|
||||
# regex lets you grab a part of the matching
|
||||
# text and use that in the upload path:
|
||||
m = re.search(r"\b(op|ed)([^a-z]|$)", fn)
|
||||
if m:
|
||||
# the regex matched; use "anime-op" or "anime-ed"
|
||||
effect = {"vp": "/anime-" + m[1]}
|
||||
|
||||
# aaand DO IT
|
||||
print(json.dumps({"reloc": effect}))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
|
@ -1,62 +0,0 @@
|
|||
// see usb-eject.py for usage
|
||||
|
||||
function usbclick() {
|
||||
var o = QS('#treeul a[dst="/usb/"]') || QS('#treepar a[dst="/usb/"]');
|
||||
if (o)
|
||||
o.click();
|
||||
}
|
||||
|
||||
function eject_cb() {
|
||||
var t = ('' + this.responseText).trim();
|
||||
if (t.indexOf('can be safely unplugged') < 0 && t.indexOf('Device can be removed') < 0)
|
||||
return toast.err(30, 'usb eject failed:\n\n' + t);
|
||||
|
||||
toast.ok(5, esc(t.replace(/ - /g, '\n\n')).trim());
|
||||
usbclick(); setTimeout(usbclick, 10);
|
||||
};
|
||||
|
||||
function add_eject_2(a) {
|
||||
var aw = a.getAttribute('href').split(/\//g);
|
||||
if (aw.length != 4 || aw[3])
|
||||
return;
|
||||
|
||||
var v = aw[2],
|
||||
k = 'umount_' + v;
|
||||
|
||||
for (var b = 0; b < 9; b++) {
|
||||
var o = ebi(k);
|
||||
if (!o)
|
||||
break;
|
||||
o.parentNode.removeChild(o);
|
||||
}
|
||||
|
||||
a.appendChild(mknod('span', k, '⏏'), a);
|
||||
o = ebi(k);
|
||||
o.style.cssText = 'position:absolute; right:1em; margin-top:-.2em; font-size:1.3em';
|
||||
o.onclick = function (e) {
|
||||
ev(e);
|
||||
var xhr = new XHR();
|
||||
xhr.open('POST', get_evpath(), true);
|
||||
xhr.setRequestHeader('Content-Type', 'application/x-www-form-urlencoded;charset=UTF-8');
|
||||
xhr.send('msg=' + uricom_enc(':usb-eject:' + v + ':'));
|
||||
xhr.onload = xhr.onerror = eject_cb;
|
||||
toast.inf(10, "ejecting " + v + "...");
|
||||
};
|
||||
};
|
||||
|
||||
function add_eject() {
|
||||
var o = QSA('#treeul a[href^="/usb/"]') || QSA('#treepar a[href^="/usb/"]');
|
||||
for (var a = o.length - 1; a > 0; a--)
|
||||
add_eject_2(o[a]);
|
||||
};
|
||||
|
||||
(function() {
|
||||
var f0 = treectl.rendertree;
|
||||
treectl.rendertree = function (res, ts, top0, dst, rst) {
|
||||
var ret = f0(res, ts, top0, dst, rst);
|
||||
add_eject();
|
||||
return ret;
|
||||
};
|
||||
})();
|
||||
|
||||
setTimeout(add_eject, 50);
|
|
@ -1,62 +0,0 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
import os
|
||||
import stat
|
||||
import subprocess as sp
|
||||
import sys
|
||||
from urllib.parse import unquote_to_bytes as unquote
|
||||
|
||||
|
||||
"""
|
||||
if you've found yourself using copyparty to serve flashdrives on a LAN
|
||||
and your only wish is that the web-UI had a button to unmount / safely
|
||||
remove those flashdrives, then boy howdy are you in the right place :D
|
||||
|
||||
put usb-eject.js in the webroot (or somewhere else http-accessible)
|
||||
then run copyparty with these args:
|
||||
|
||||
-v /run/media/egon:/usb:A:c,hist=/tmp/junk
|
||||
--xm=c1,bin/hooks/usb-eject.py
|
||||
--js-browser=/usb-eject.js
|
||||
|
||||
which does the following respectively,
|
||||
|
||||
* share all of /run/media/egon as /usb with admin for everyone
|
||||
and put the histpath somewhere it won't cause trouble
|
||||
* run the usb-eject hook with stdout redirect to the web-ui
|
||||
* add the complementary usb-eject.js to the browser
|
||||
|
||||
"""
|
||||
|
||||
|
||||
MOUNT_BASE = b"/run/media/egon/"
|
||||
|
||||
|
||||
def main():
|
||||
try:
|
||||
label = sys.argv[1].split(":usb-eject:")[1].split(":")[0]
|
||||
mp = MOUNT_BASE + unquote(label)
|
||||
# print("ejecting [%s]... " % (mp,), end="")
|
||||
mp = os.path.abspath(os.path.realpath(mp))
|
||||
st = os.lstat(mp)
|
||||
if not stat.S_ISDIR(st.st_mode) or not mp.startswith(MOUNT_BASE):
|
||||
raise Exception("not a regular directory")
|
||||
|
||||
# if you're running copyparty as root (thx for the faith)
|
||||
# you'll need something like this to make dbus talkative
|
||||
cmd = b"sudo -u egon DBUS_SESSION_BUS_ADDRESS=unix:path=/run/user/1000/bus gio mount -e"
|
||||
|
||||
# but if copyparty and the ui-session is running
|
||||
# as the same user (good) then this is plenty
|
||||
cmd = b"gio mount -e"
|
||||
|
||||
cmd = cmd.split(b" ") + [mp]
|
||||
ret = sp.check_output(cmd).decode("utf-8", "replace")
|
||||
print(ret.strip() or (label + " can be safely unplugged"))
|
||||
|
||||
except Exception as ex:
|
||||
print("unmount failed: %r" % (ex,))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
|
@ -1,77 +0,0 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
import os
|
||||
import sys
|
||||
import json
|
||||
import subprocess as sp
|
||||
|
||||
|
||||
_ = r"""
|
||||
use copyparty as a file downloader by POSTing URLs as
|
||||
application/x-www-form-urlencoded (for example using the
|
||||
📟 message-to-server-log in the web-ui)
|
||||
|
||||
example usage as global config:
|
||||
--xm aw,f,j,t3600,bin/hooks/wget.py
|
||||
|
||||
parameters explained,
|
||||
xm = execute on message-to-server-log
|
||||
aw = only users with write-access can use this
|
||||
f = fork; don't delay other hooks while this is running
|
||||
j = provide message information as json (not just the text)
|
||||
c3 = mute all output
|
||||
t3600 = timeout and abort download after 1 hour
|
||||
|
||||
example usage as a volflag (per-volume config):
|
||||
-v srv/inc:inc:r:rw,ed:c,xm=aw,f,j,t3600,bin/hooks/wget.py
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
(share filesystem-path srv/inc as volume /inc,
|
||||
readable by everyone, read-write for user 'ed',
|
||||
running this plugin on all messages with the params explained above)
|
||||
|
||||
example usage as a volflag in a copyparty config file:
|
||||
[/inc]
|
||||
srv/inc
|
||||
accs:
|
||||
r: *
|
||||
rw: ed
|
||||
flags:
|
||||
xm: aw,f,j,t3600,bin/hooks/wget.py
|
||||
|
||||
the volflag examples only kicks in if you send the message
|
||||
while you're in the /inc folder (or any folder below there)
|
||||
"""
|
||||
|
||||
|
||||
def main():
|
||||
inf = json.loads(sys.argv[1])
|
||||
url = inf["txt"]
|
||||
if "://" not in url:
|
||||
url = "https://" + url
|
||||
|
||||
proto = url.split("://")[0].lower()
|
||||
if proto not in ("http", "https", "ftp", "ftps"):
|
||||
raise Exception("bad proto {}".format(proto))
|
||||
|
||||
os.chdir(inf["ap"])
|
||||
|
||||
name = url.split("?")[0].split("/")[-1]
|
||||
tfn = "-- DOWNLOADING " + name
|
||||
print(f"{tfn}\n", end="")
|
||||
open(tfn, "wb").close()
|
||||
|
||||
cmd = ["wget", "--trust-server-names", "-nv", "--", url]
|
||||
|
||||
try:
|
||||
sp.check_call(cmd)
|
||||
except:
|
||||
t = "-- FAILED TO DONWLOAD " + name
|
||||
print(f"{t}\n", end="")
|
||||
open(t, "wb").close()
|
||||
|
||||
os.unlink(tfn)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
|
@ -1,111 +0,0 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
import hashlib
|
||||
import json
|
||||
import sys
|
||||
from datetime import datetime, timezone
|
||||
|
||||
|
||||
_ = r"""
|
||||
this hook will produce a single sha512 file which
|
||||
covers all recent uploads (plus metadata comments)
|
||||
|
||||
use this with --xiu, which makes copyparty buffer
|
||||
uploads until server is idle, providing file infos
|
||||
on stdin (filepaths or json)
|
||||
|
||||
example usage as global config:
|
||||
--xiu i5,j,bin/hooks/xiu-sha.py
|
||||
|
||||
example usage as a volflag (per-volume config):
|
||||
-v srv/inc:inc:r:rw,ed:c,xiu=i5,j,bin/hooks/xiu-sha.py
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
(share filesystem-path srv/inc as volume /inc,
|
||||
readable by everyone, read-write for user 'ed',
|
||||
running this plugin on batches of uploads with the params listed below)
|
||||
|
||||
parameters explained,
|
||||
xiu = execute after uploads...
|
||||
i5 = ...after volume has been idle for 5sec
|
||||
j = provide json instead of filepath list
|
||||
|
||||
note the "f" (fork) flag is not set, so this xiu
|
||||
will block other xiu hooks while it's running
|
||||
"""
|
||||
|
||||
|
||||
try:
|
||||
from copyparty.util import fsenc
|
||||
except:
|
||||
|
||||
def fsenc(p):
|
||||
return p
|
||||
|
||||
|
||||
UTC = timezone.utc
|
||||
|
||||
|
||||
def humantime(ts):
|
||||
return datetime.fromtimestamp(ts, UTC).strftime("%Y-%m-%d %H:%M:%S")
|
||||
|
||||
|
||||
def find_files_root(inf):
|
||||
di = 9000
|
||||
for f1, f2 in zip(inf, inf[1:]):
|
||||
p1 = f1["ap"].replace("\\", "/").rsplit("/", 1)[0]
|
||||
p2 = f2["ap"].replace("\\", "/").rsplit("/", 1)[0]
|
||||
di = min(len(p1), len(p2), di)
|
||||
di = next((i for i in range(di) if p1[i] != p2[i]), di)
|
||||
|
||||
return di + 1
|
||||
|
||||
|
||||
def find_vol_root(inf):
|
||||
return len(inf[0]["ap"][: -len(inf[0]["vp"])])
|
||||
|
||||
|
||||
def main():
|
||||
zb = sys.stdin.buffer.read()
|
||||
zs = zb.decode("utf-8", "replace")
|
||||
inf = json.loads(zs)
|
||||
|
||||
# root directory (where to put the sha512 file);
|
||||
# di = find_files_root(inf) # next to the file closest to volume root
|
||||
di = find_vol_root(inf) # top of the entire volume
|
||||
|
||||
ret = []
|
||||
total_sz = 0
|
||||
for md in inf:
|
||||
ap = md["ap"]
|
||||
rp = ap[di:]
|
||||
total_sz += md["sz"]
|
||||
fsize = "{:,}".format(md["sz"])
|
||||
mtime = humantime(md["mt"])
|
||||
up_ts = humantime(md["at"])
|
||||
|
||||
h = hashlib.sha512()
|
||||
with open(fsenc(md["ap"]), "rb", 512 * 1024) as f:
|
||||
while True:
|
||||
buf = f.read(512 * 1024)
|
||||
if not buf:
|
||||
break
|
||||
|
||||
h.update(buf)
|
||||
|
||||
cksum = h.hexdigest()
|
||||
meta = " | ".join([md["wark"], up_ts, mtime, fsize, md["ip"]])
|
||||
ret.append("# {}\n{} *{}".format(meta, cksum, rp))
|
||||
|
||||
ret.append("# {} files, {} bytes total".format(len(inf), total_sz))
|
||||
ret.append("")
|
||||
ftime = datetime.now(UTC).strftime("%Y-%m%d-%H%M%S.%f")
|
||||
fp = "{}xfer-{}.sha512".format(inf[0]["ap"][:di], ftime)
|
||||
with open(fsenc(fp), "wb") as f:
|
||||
f.write("\n".join(ret).encode("utf-8", "replace"))
|
||||
|
||||
print("wrote checksums to {}".format(fp))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
|
@ -1,50 +0,0 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
import json
|
||||
import sys
|
||||
|
||||
|
||||
_ = r"""
|
||||
this hook prints absolute filepaths + total size
|
||||
|
||||
use this with --xiu, which makes copyparty buffer
|
||||
uploads until server is idle, providing file infos
|
||||
on stdin (filepaths or json)
|
||||
|
||||
example usage as global config:
|
||||
--xiu i1,j,bin/hooks/xiu.py
|
||||
|
||||
example usage as a volflag (per-volume config):
|
||||
-v srv/inc:inc:r:rw,ed:c,xiu=i1,j,bin/hooks/xiu.py
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
(share filesystem-path srv/inc as volume /inc,
|
||||
readable by everyone, read-write for user 'ed',
|
||||
running this plugin on batches of uploads with the params listed below)
|
||||
|
||||
parameters explained,
|
||||
xiu = execute after uploads...
|
||||
i1 = ...after volume has been idle for 1sec
|
||||
j = provide json instead of filepath list
|
||||
|
||||
note the "f" (fork) flag is not set, so this xiu
|
||||
will block other xiu hooks while it's running
|
||||
"""
|
||||
|
||||
|
||||
def main():
|
||||
zb = sys.stdin.buffer.read()
|
||||
zs = zb.decode("utf-8", "replace")
|
||||
inf = json.loads(zs)
|
||||
|
||||
total_sz = 0
|
||||
for upload in inf:
|
||||
sz = upload["sz"]
|
||||
total_sz += sz
|
||||
print("{:9} {}".format(sz, upload["ap"]))
|
||||
|
||||
print("{} files, {} bytes total".format(len(inf), total_sz))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
|
@ -1,9 +1,5 @@
|
|||
standalone programs which take an audio file as argument
|
||||
|
||||
you may want to forget about all this fancy complicated stuff and just use [event hooks](../hooks/) instead (which doesn't need `-e2ts` or ffmpeg)
|
||||
|
||||
----
|
||||
|
||||
**NOTE:** these all require `-e2ts` to be functional, meaning you need to do at least one of these: `apt install ffmpeg` or `pip3 install mutagen`
|
||||
|
||||
some of these rely on libraries which are not MIT-compatible
|
||||
|
@ -21,19 +17,6 @@ these do not have any problematic dependencies at all:
|
|||
* [cksum.py](./cksum.py) computes various checksums
|
||||
* [exe.py](./exe.py) grabs metadata from .exe and .dll files (example for retrieving multiple tags with one parser)
|
||||
* [wget.py](./wget.py) lets you download files by POSTing URLs to copyparty
|
||||
* also available as an [event hook](../hooks/wget.py)
|
||||
|
||||
|
||||
## dangerous plugins
|
||||
|
||||
plugins in this section should only be used with appropriate precautions:
|
||||
|
||||
* [very-bad-idea.py](./very-bad-idea.py) combined with [meadup.js](https://github.com/9001/copyparty/blob/hovudstraum/contrib/plugins/meadup.js) converts copyparty into a janky yet extremely flexible chromecast clone
|
||||
* also adds a virtual keyboard by @steinuil to the basic-upload tab for comfy couch crowd control
|
||||
* anything uploaded through the [android app](https://github.com/9001/party-up) (files or links) are executed on the server, meaning anyone can infect your PC with malware... so protect this with a password and keep it on a LAN!
|
||||
* [kamelåså](https://github.com/steinuil/kameloso) is a much better (and MUCH safer) alternative to this plugin
|
||||
* powered by [chicken-curry-banana-pineapple-peanut pizza](https://a.ocv.me/pub/g/i/2025/01/298437ce-8351-4c8c-861c-fa131d217999.jpg?cache) so you know it's good
|
||||
* and, unlike this plugin, kamelåså even has windows support (nice)
|
||||
|
||||
|
||||
# dependencies
|
||||
|
@ -43,7 +26,7 @@ run [`install-deps.sh`](install-deps.sh) to build/install most dependencies requ
|
|||
*alternatively* (or preferably) use packages from your distro instead, then you'll need at least these:
|
||||
|
||||
* from distro: `numpy vamp-plugin-sdk beatroot-vamp mixxx-keyfinder ffmpeg`
|
||||
* from pip: `keyfinder vamp`
|
||||
* from pypy: `keyfinder vamp`
|
||||
|
||||
|
||||
# usage from copyparty
|
||||
|
@ -59,7 +42,7 @@ run [`install-deps.sh`](install-deps.sh) to build/install most dependencies requ
|
|||
* `mtp` modules will not run if a file has existing tags in the db, so clear out the tags with `-e2tsr` the first time you launch with new `mtp` options
|
||||
|
||||
|
||||
## usage with volflags
|
||||
## usage with volume-flags
|
||||
|
||||
instead of affecting all volumes, you can set the options for just one volume like so:
|
||||
|
||||
|
|
|
@ -16,10 +16,6 @@ dep: ffmpeg
|
|||
"""
|
||||
|
||||
|
||||
# save beat timestamps to ".beats/filename.txt"
|
||||
SAVE = False
|
||||
|
||||
|
||||
def det(tf):
|
||||
# fmt: off
|
||||
sp.check_call([
|
||||
|
@ -27,11 +23,12 @@ def det(tf):
|
|||
b"-nostdin",
|
||||
b"-hide_banner",
|
||||
b"-v", b"fatal",
|
||||
b"-ss", b"13",
|
||||
b"-y", b"-i", fsenc(sys.argv[1]),
|
||||
b"-map", b"0:a:0",
|
||||
b"-ac", b"1",
|
||||
b"-ar", b"22050",
|
||||
b"-t", b"360",
|
||||
b"-t", b"300",
|
||||
b"-f", b"f32le",
|
||||
fsenc(tf)
|
||||
])
|
||||
|
@ -50,29 +47,10 @@ def det(tf):
|
|||
print(c["list"][0]["label"].split(" ")[0])
|
||||
return
|
||||
|
||||
# throws if detection failed:
|
||||
beats = [float(x["timestamp"]) for x in cl]
|
||||
bds = [b - a for a, b in zip(beats, beats[1:])]
|
||||
bds.sort()
|
||||
n0 = int(len(bds) * 0.2)
|
||||
n1 = int(len(bds) * 0.75) + 1
|
||||
bds = bds[n0:n1]
|
||||
bpm = sum(bds)
|
||||
bpm = round(60 * (len(bds) / bpm), 2)
|
||||
print(f"{bpm:.2f}")
|
||||
|
||||
if SAVE:
|
||||
fdir, fname = os.path.split(sys.argv[1])
|
||||
bdir = os.path.join(fdir, ".beats")
|
||||
try:
|
||||
os.mkdir(fsenc(bdir))
|
||||
except:
|
||||
pass
|
||||
|
||||
fp = os.path.join(bdir, fname) + ".txt"
|
||||
with open(fsenc(fp), "wb") as f:
|
||||
txt = "\n".join([f"{x:.2f}" for x in beats])
|
||||
f.write(txt.encode("utf-8"))
|
||||
# throws if detection failed:
|
||||
bpm = float(cl[-1]["timestamp"] - cl[1]["timestamp"])
|
||||
bpm = round(60 * ((len(cl) - 1) / bpm), 2)
|
||||
print(f"{bpm:.2f}")
|
||||
|
||||
|
||||
def main():
|
||||
|
|
|
@ -2,15 +2,11 @@
|
|||
|
||||
import sys
|
||||
import json
|
||||
import zlib
|
||||
import struct
|
||||
import base64
|
||||
import hashlib
|
||||
|
||||
try:
|
||||
from zlib_ng import zlib_ng as zlib
|
||||
except:
|
||||
import zlib
|
||||
|
||||
try:
|
||||
from copyparty.util import fsenc
|
||||
except:
|
||||
|
@ -21,7 +17,7 @@ except:
|
|||
|
||||
"""
|
||||
calculates various checksums for uploads,
|
||||
usage: -mtp crc32,md5,sha1,sha256b=ad,bin/mtag/cksum.py
|
||||
usage: -mtp crc32,md5,sha1,sha256b=bin/mtag/cksum.py
|
||||
"""
|
||||
|
||||
|
||||
|
|
|
@ -1,61 +0,0 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
"""
|
||||
fetch latest msg from guestbook and return as tag
|
||||
|
||||
example copyparty config to use this:
|
||||
--urlform save,get -vsrv/hello:hello:w:c,e2ts,mtp=guestbook=t10,ad,p,bin/mtag/guestbook-read.py:mte=+guestbook
|
||||
|
||||
explained:
|
||||
for realpath srv/hello (served at /hello), write-only for eveyrone,
|
||||
enable file analysis on upload (e2ts),
|
||||
use mtp plugin "bin/mtag/guestbook-read.py" to provide metadata tag "guestbook",
|
||||
do this on all uploads regardless of extension,
|
||||
t10 = 10 seconds timeout for each dwonload,
|
||||
ad = parse file regardless if FFmpeg thinks it is audio or not
|
||||
p = request upload info as json on stdin (need ip)
|
||||
mte=+guestbook enabled indexing of that tag for this volume
|
||||
|
||||
PS: this requires e2ts to be functional,
|
||||
meaning you need to do at least one of these:
|
||||
* apt install ffmpeg
|
||||
* pip3 install mutagen
|
||||
"""
|
||||
|
||||
|
||||
import json
|
||||
import os
|
||||
import sqlite3
|
||||
import sys
|
||||
|
||||
|
||||
# set 0 to allow infinite msgs from one IP,
|
||||
# other values delete older messages to make space,
|
||||
# so 1 only keeps latest msg
|
||||
NUM_MSGS_TO_KEEP = 1
|
||||
|
||||
|
||||
def main():
|
||||
fp = os.path.abspath(sys.argv[1])
|
||||
fdir = os.path.dirname(fp)
|
||||
|
||||
zb = sys.stdin.buffer.read()
|
||||
zs = zb.decode("utf-8", "replace")
|
||||
md = json.loads(zs)
|
||||
|
||||
ip = md["up_ip"]
|
||||
|
||||
# can put the database inside `fdir` if you'd like,
|
||||
# by default it saves to PWD:
|
||||
# os.chdir(fdir)
|
||||
|
||||
db = sqlite3.connect("guestbook.db3")
|
||||
with db:
|
||||
t = "select msg from gb where ip = ? order by ts desc"
|
||||
r = db.execute(t, (ip,)).fetchone()
|
||||
if r:
|
||||
print(r[0])
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
|
@ -1,111 +0,0 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
"""
|
||||
store messages from users in an sqlite database
|
||||
which can be read from another mtp for example
|
||||
|
||||
takes input from application/x-www-form-urlencoded POSTs,
|
||||
for example using the message/pager function on the website
|
||||
|
||||
example copyparty config to use this:
|
||||
--urlform save,get -vsrv/hello:hello:w:c,e2ts,mtp=xgb=ebin,t10,ad,p,bin/mtag/guestbook.py:mte=+xgb
|
||||
|
||||
explained:
|
||||
for realpath srv/hello (served at /hello),write-only for eveyrone,
|
||||
enable file analysis on upload (e2ts),
|
||||
use mtp plugin "bin/mtag/guestbook.py" to provide metadata tag "xgb",
|
||||
do this on all uploads with the file extension "bin",
|
||||
t300 = 300 seconds timeout for each dwonload,
|
||||
ad = parse file regardless if FFmpeg thinks it is audio or not
|
||||
p = request upload info as json on stdin
|
||||
mte=+xgb enabled indexing of that tag for this volume
|
||||
|
||||
PS: this requires e2ts to be functional,
|
||||
meaning you need to do at least one of these:
|
||||
* apt install ffmpeg
|
||||
* pip3 install mutagen
|
||||
"""
|
||||
|
||||
|
||||
import json
|
||||
import os
|
||||
import sqlite3
|
||||
import sys
|
||||
from urllib.parse import unquote_to_bytes as unquote
|
||||
|
||||
|
||||
# set 0 to allow infinite msgs from one IP,
|
||||
# other values delete older messages to make space,
|
||||
# so 1 only keeps latest msg
|
||||
NUM_MSGS_TO_KEEP = 1
|
||||
|
||||
|
||||
def main():
|
||||
fp = os.path.abspath(sys.argv[1])
|
||||
fdir = os.path.dirname(fp)
|
||||
fname = os.path.basename(fp)
|
||||
if not fname.startswith("put-") or not fname.endswith(".bin"):
|
||||
raise Exception("not a post file")
|
||||
|
||||
zb = sys.stdin.buffer.read()
|
||||
zs = zb.decode("utf-8", "replace")
|
||||
md = json.loads(zs)
|
||||
|
||||
buf = b""
|
||||
with open(fp, "rb") as f:
|
||||
while True:
|
||||
b = f.read(4096)
|
||||
buf += b
|
||||
if len(buf) > 4096:
|
||||
raise Exception("too big")
|
||||
|
||||
if not b:
|
||||
break
|
||||
|
||||
if not buf:
|
||||
raise Exception("file is empty")
|
||||
|
||||
buf = unquote(buf.replace(b"+", b" "))
|
||||
txt = buf.decode("utf-8")
|
||||
|
||||
if not txt.startswith("msg="):
|
||||
raise Exception("does not start with msg=")
|
||||
|
||||
ip = md["up_ip"]
|
||||
ts = md["up_at"]
|
||||
txt = txt[4:]
|
||||
|
||||
# can put the database inside `fdir` if you'd like,
|
||||
# by default it saves to PWD:
|
||||
# os.chdir(fdir)
|
||||
|
||||
db = sqlite3.connect("guestbook.db3")
|
||||
try:
|
||||
db.execute("select 1 from gb").fetchone()
|
||||
except:
|
||||
with db:
|
||||
db.execute("create table gb (ip text, ts real, msg text)")
|
||||
db.execute("create index gb_ip on gb(ip)")
|
||||
|
||||
with db:
|
||||
if NUM_MSGS_TO_KEEP == 1:
|
||||
t = "delete from gb where ip = ?"
|
||||
db.execute(t, (ip,))
|
||||
|
||||
t = "insert into gb values (?,?,?)"
|
||||
db.execute(t, (ip, ts, txt))
|
||||
|
||||
if NUM_MSGS_TO_KEEP > 1:
|
||||
t = "select ts from gb where ip = ? order by ts desc"
|
||||
hits = db.execute(t, (ip,)).fetchall()
|
||||
|
||||
if len(hits) > NUM_MSGS_TO_KEEP:
|
||||
lim = hits[NUM_MSGS_TO_KEEP][0]
|
||||
t = "delete from gb where ip = ? and ts <= ?"
|
||||
db.execute(t, (ip, lim))
|
||||
|
||||
print(txt)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
|
@ -43,6 +43,7 @@ PS: this requires e2ts to be functional,
|
|||
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
import filecmp
|
||||
import subprocess as sp
|
||||
|
||||
|
@ -61,7 +62,7 @@ def main():
|
|||
|
||||
os.chdir(cwd)
|
||||
f1 = fsenc(fn)
|
||||
f2 = fsenc(os.path.join(b"noexif", fn))
|
||||
f2 = os.path.join(b"noexif", f1)
|
||||
cmd = [
|
||||
b"exiftool",
|
||||
b"-exif:all=",
|
||||
|
@ -89,7 +90,4 @@ def main():
|
|||
|
||||
|
||||
if __name__ == "__main__":
|
||||
try:
|
||||
main()
|
||||
except:
|
||||
pass
|
||||
main()
|
||||
|
|
|
@ -4,10 +4,8 @@ set -e
|
|||
|
||||
# install dependencies for audio-*.py
|
||||
#
|
||||
# linux/alpine: requires gcc g++ make cmake patchelf {python3,ffmpeg,fftw,libsndfile}-dev py3-{wheel,pip} py3-numpy{,-dev}
|
||||
# linux/debian: requires libav{codec,device,filter,format,resample,util}-dev {libfftw3,python3,libsndfile1}-dev python3-{numpy,pip} vamp-{plugin-sdk,examples} patchelf cmake
|
||||
# linux/fedora: requires gcc gcc-c++ make cmake patchelf {python3,ffmpeg,fftw,libsndfile}-devel python3-numpy vamp-plugin-sdk qm-vamp-plugins
|
||||
# linux/arch: requires gcc make cmake patchelf python3 ffmpeg fftw libsndfile python-{numpy,wheel,pip,setuptools}
|
||||
# linux/alpine: requires {python3,ffmpeg,fftw}-dev py3-{wheel,pip} py3-numpy{,-dev} vamp-sdk-dev patchelf cmake
|
||||
# linux/debian: requires libav{codec,device,filter,format,resample,util}-dev {libfftw3,python3}-dev python3-{numpy,pip} vamp-{plugin-sdk,examples} patchelf cmake
|
||||
# win64: requires msys2-mingw64 environment
|
||||
# macos: requires macports
|
||||
#
|
||||
|
@ -22,8 +20,6 @@ set -e
|
|||
# modifies the keyfinder python lib to load the .so in ~/pe
|
||||
|
||||
|
||||
export FORCE_COLOR=1
|
||||
|
||||
linux=1
|
||||
|
||||
win=
|
||||
|
@ -60,7 +56,6 @@ hash -r
|
|||
command -v python3 && pybin=python3 || pybin=python
|
||||
}
|
||||
|
||||
$pybin -c 'import numpy' ||
|
||||
$pybin -m pip install --user numpy
|
||||
|
||||
|
||||
|
@ -106,11 +101,8 @@ export -f dl_files
|
|||
|
||||
|
||||
github_tarball() {
|
||||
rm -rf g
|
||||
mkdir g
|
||||
cd g
|
||||
dl_text "$1" |
|
||||
tee ../json |
|
||||
tee json |
|
||||
(
|
||||
# prefer jq if available
|
||||
jq -r '.tarball_url' ||
|
||||
|
@ -119,11 +111,8 @@ github_tarball() {
|
|||
awk -F\" '/"tarball_url": "/ {print$4}'
|
||||
) |
|
||||
tee /dev/stderr |
|
||||
head -n 1 |
|
||||
tr -d '\r' | tr '\n' '\0' |
|
||||
xargs -0 bash -c 'dl_files "$@"' _
|
||||
mv * ../tgz
|
||||
cd ..
|
||||
}
|
||||
|
||||
|
||||
|
@ -138,7 +127,6 @@ gitlab_tarball() {
|
|||
tr \" '\n' | grep -E '\.tar\.gz$' | head -n 1
|
||||
) |
|
||||
tee /dev/stderr |
|
||||
head -n 1 |
|
||||
tr -d '\r' | tr '\n' '\0' |
|
||||
tee links |
|
||||
xargs -0 bash -c 'dl_files "$@"' _
|
||||
|
@ -150,27 +138,20 @@ install_keyfinder() {
|
|||
# use msys2 in mingw-w64 mode
|
||||
# pacman -S --needed mingw-w64-x86_64-{ffmpeg,python}
|
||||
|
||||
[ -e $HOME/pe/keyfinder ] && {
|
||||
echo found a keyfinder build in ~/pe, skipping
|
||||
return
|
||||
}
|
||||
|
||||
cd "$td"
|
||||
github_tarball https://api.github.com/repos/mixxxdj/libkeyfinder/releases/latest
|
||||
ls -al
|
||||
|
||||
tar -xf tgz
|
||||
rm tgz
|
||||
tar -xf mixxxdj-libkeyfinder-*
|
||||
rm -- *.tar.gz
|
||||
cd mixxxdj-libkeyfinder*
|
||||
|
||||
h="$HOME"
|
||||
so="lib/libkeyfinder.so"
|
||||
memes=(-DBUILD_TESTING=OFF)
|
||||
memes=()
|
||||
|
||||
[ $win ] &&
|
||||
so="bin/libkeyfinder.dll" &&
|
||||
h="$(printf '%s\n' "$USERPROFILE" | tr '\\' '/')" &&
|
||||
memes+=(-G "MinGW Makefiles")
|
||||
memes+=(-G "MinGW Makefiles" -DBUILD_TESTING=OFF)
|
||||
|
||||
[ $mac ] &&
|
||||
so="lib/libkeyfinder.dylib"
|
||||
|
@ -189,14 +170,11 @@ install_keyfinder() {
|
|||
exit 1
|
||||
}
|
||||
|
||||
x=${-//[^x]/}; set -x; cat /etc/alpine-release
|
||||
# rm -rf /Users/ed/Library/Python/3.9/lib/python/site-packages/*keyfinder*
|
||||
CFLAGS="-I$h/pe/keyfinder/include -I/opt/local/include -I/usr/include/ffmpeg" \
|
||||
CXXFLAGS="-I$h/pe/keyfinder/include -I/opt/local/include -I/usr/include/ffmpeg" \
|
||||
CFLAGS="-I$h/pe/keyfinder/include -I/opt/local/include" \
|
||||
LDFLAGS="-L$h/pe/keyfinder/lib -L$h/pe/keyfinder/lib64 -L/opt/local/lib" \
|
||||
PKG_CONFIG_PATH="/c/msys64/mingw64/lib/pkgconfig:$h/pe/keyfinder/lib/pkgconfig" \
|
||||
PKG_CONFIG_PATH=/c/msys64/mingw64/lib/pkgconfig \
|
||||
$pybin -m pip install --user keyfinder
|
||||
[ "$x" ] || set +x
|
||||
|
||||
pypath="$($pybin -c 'import keyfinder; print(keyfinder.__file__)')"
|
||||
for pyso in "${pypath%/*}"/*.so; do
|
||||
|
@ -228,41 +206,17 @@ install_vamp() {
|
|||
# use msys2 in mingw-w64 mode
|
||||
# pacman -S --needed mingw-w64-x86_64-{ffmpeg,python,python-pip,vamp-plugin-sdk}
|
||||
|
||||
$pybin -m pip install --user vamp || {
|
||||
printf '\n\033[7malright, trying something else...\033[0m\n'
|
||||
$pybin -m pip install --user --no-build-isolation vamp
|
||||
}
|
||||
$pybin -m pip install --user vamp
|
||||
|
||||
cd "$td"
|
||||
echo '#include <vamp-sdk/Plugin.h>' | g++ -x c++ -c -o /dev/null - || [ -e ~/pe/vamp-sdk ] || {
|
||||
printf '\033[33mcould not find the vamp-sdk, building from source\033[0m\n'
|
||||
(dl_files yolo https://ocv.me/mirror/vamp-plugin-sdk-2.10.0.tar.gz)
|
||||
sha512sum -c <(
|
||||
echo "153b7f2fa01b77c65ad393ca0689742d66421017fd5931d216caa0fcf6909355fff74706fabbc062a3a04588a619c9b515a1dae00f21a57afd97902a355c48ed -"
|
||||
) <vamp-plugin-sdk-2.10.0.tar.gz
|
||||
tar -xf vamp-plugin-sdk-2.10.0.tar.gz
|
||||
rm -- *.tar.gz
|
||||
ls -al
|
||||
cd vamp-plugin-sdk-*
|
||||
printf '%s\n' "int main(int argc, char **argv) { return 0; }" > host/vamp-simple-host.cpp
|
||||
./configure --disable-programs --prefix=$HOME/pe/vamp-sdk
|
||||
make -j1 install
|
||||
}
|
||||
|
||||
cd "$td"
|
||||
have_beatroot || {
|
||||
printf '\033[33mcould not find the vamp beatroot plugin, building from source\033[0m\n'
|
||||
(dl_files yolo https://ocv.me/mirror/beatroot-vamp-v1.0.tar.gz)
|
||||
(dl_files yolo https://code.soundsoftware.ac.uk/attachments/download/885/beatroot-vamp-v1.0.tar.gz)
|
||||
sha512sum -c <(
|
||||
echo "1f444d1d58ccf565c0adfe99f1a1aa62789e19f5071e46857e2adfbc9d453037bc1c4dcb039b02c16240e9b97f444aaff3afb625c86aa2470233e711f55b6874 -"
|
||||
) <beatroot-vamp-v1.0.tar.gz
|
||||
tar -xf beatroot-vamp-v1.0.tar.gz
|
||||
rm -- *.tar.gz
|
||||
cd beatroot-vamp-v1.0
|
||||
[ -e ~/pe/vamp-sdk ] &&
|
||||
sed -ri 's`^(CFLAGS :=.*)`\1 -I'$HOME'/pe/vamp-sdk/include`' Makefile.linux ||
|
||||
sed -ri 's`^(CFLAGS :=.*)`\1 -I/usr/include/vamp-sdk`' Makefile.linux
|
||||
make -f Makefile.linux -j4 LDFLAGS="-L$HOME/pe/vamp-sdk/lib -L/usr/lib64"
|
||||
make -f Makefile.linux -j4
|
||||
# /home/ed/vamp /home/ed/.vamp /usr/local/lib/vamp
|
||||
mkdir ~/vamp
|
||||
cp -pv beatroot-vamp.* ~/vamp/
|
||||
|
@ -276,7 +230,6 @@ install_vamp() {
|
|||
|
||||
# not in use because it kinda segfaults, also no windows support
|
||||
install_soundtouch() {
|
||||
cd "$td"
|
||||
gitlab_tarball https://gitlab.com/api/v4/projects/soundtouch%2Fsoundtouch/releases
|
||||
|
||||
tar -xvf soundtouch-*
|
||||
|
|
|
@ -1,38 +0,0 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
import os
|
||||
import sys
|
||||
import subprocess as sp
|
||||
|
||||
|
||||
"""
|
||||
mtp test -- opens a texteditor
|
||||
|
||||
usage:
|
||||
-vsrv/v1:v1:r:c,mte=+x1:c,mtp=x1=ad,p,bin/mtag/mousepad.py
|
||||
|
||||
explained:
|
||||
c,mte: list of tags to index in this volume
|
||||
c,mtp: add new tag provider
|
||||
x1: dummy tag to provide
|
||||
ad: dontcare if audio or not
|
||||
p: priority 1 (run after initial tag-scan with ffprobe or mutagen)
|
||||
"""
|
||||
|
||||
|
||||
def main():
|
||||
env = os.environ.copy()
|
||||
env["DISPLAY"] = ":0.0"
|
||||
|
||||
if False:
|
||||
# open the uploaded file
|
||||
fp = sys.argv[-1]
|
||||
else:
|
||||
# display stdin contents (`oth_tags`)
|
||||
fp = "/dev/stdin"
|
||||
|
||||
p = sp.Popen(["/usr/bin/mousepad", fp])
|
||||
p.communicate()
|
||||
|
||||
|
||||
main()
|
|
@ -1,76 +0,0 @@
|
|||
#!/usr/bin/env python
|
||||
|
||||
import json
|
||||
import os
|
||||
import subprocess as sp
|
||||
import sys
|
||||
import time
|
||||
|
||||
try:
|
||||
from copyparty.util import fsenc
|
||||
except:
|
||||
|
||||
def fsenc(p):
|
||||
return p.encode("utf-8")
|
||||
|
||||
|
||||
_ = r"""
|
||||
first checks the tag "vidchk" which must be "ok" to continue,
|
||||
then uploads all files to some cloud storage (RCLONE_REMOTE)
|
||||
and DELETES THE ORIGINAL FILES if rclone returns 0 ("success")
|
||||
|
||||
deps:
|
||||
rclone
|
||||
|
||||
usage:
|
||||
-mtp x2=t43200,ay,p2,bin/mtag/rclone-upload.py
|
||||
|
||||
explained:
|
||||
t43200: timeout 12h
|
||||
ay: only process files which contain audio (including video with audio)
|
||||
p2: set priority 2 (after vidchk's suggested priority of 1),
|
||||
so the output of vidchk will be passed in here
|
||||
|
||||
complete usage example as vflags along with vidchk:
|
||||
-vsrv/vidchk:vidchk:r:rw,ed:c,e2dsa,e2ts,mtp=vidchk=t600,p,bin/mtag/vidchk.py:c,mtp=rupload=t43200,ay,p2,bin/mtag/rclone-upload.py:c,mte=+vidchk,rupload
|
||||
|
||||
setup: see https://rclone.org/drive/
|
||||
|
||||
if you wanna use this script standalone / separately from copyparty,
|
||||
either set CONDITIONAL_UPLOAD False or provide the following stdin:
|
||||
{"vidchk":"ok"}
|
||||
"""
|
||||
|
||||
|
||||
RCLONE_REMOTE = "notmybox"
|
||||
CONDITIONAL_UPLOAD = True
|
||||
|
||||
|
||||
def main():
|
||||
fp = sys.argv[1]
|
||||
if CONDITIONAL_UPLOAD:
|
||||
zb = sys.stdin.buffer.read()
|
||||
zs = zb.decode("utf-8", "replace")
|
||||
md = json.loads(zs)
|
||||
|
||||
chk = md.get("vidchk", None)
|
||||
if chk != "ok":
|
||||
print(f"vidchk={chk}", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
dst = f"{RCLONE_REMOTE}:".encode("utf-8")
|
||||
cmd = [b"rclone", b"copy", b"--", fsenc(fp), dst]
|
||||
|
||||
t0 = time.time()
|
||||
try:
|
||||
sp.check_call(cmd)
|
||||
except:
|
||||
print("rclone failed", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
print(f"{time.time() - t0:.1f} sec")
|
||||
os.unlink(fsenc(fp))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
|
@ -1,21 +0,0 @@
|
|||
// ==UserScript==
|
||||
// @name twitter-unmute
|
||||
// @namespace http://ocv.me/
|
||||
// @version 0.1
|
||||
// @description memes
|
||||
// @author ed <irc.rizon.net>
|
||||
// @match https://twitter.com/*
|
||||
// @icon https://www.google.com/s2/favicons?domain=twitter.com
|
||||
// @grant GM_addStyle
|
||||
// ==/UserScript==
|
||||
|
||||
function grunnur() {
|
||||
setInterval(function () {
|
||||
//document.querySelector('div[aria-label="Unmute"]').click();
|
||||
document.querySelector('video').muted = false;
|
||||
}, 200);
|
||||
}
|
||||
|
||||
var scr = document.createElement('script');
|
||||
scr.textContent = '(' + grunnur.toString() + ')();';
|
||||
(document.head || document.getElementsByTagName('head')[0]).appendChild(scr);
|
|
@ -1,210 +0,0 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
"""
|
||||
WARNING -- DANGEROUS PLUGIN --
|
||||
if someone is able to upload files to a copyparty which is
|
||||
running this plugin, they can execute malware on your machine
|
||||
so please keep this on a LAN and protect it with a password
|
||||
|
||||
here is a MUCH BETTER ALTERNATIVE (which also works on Windows):
|
||||
https://github.com/steinuil/kameloso
|
||||
|
||||
----------------------------------------------------------------------
|
||||
|
||||
use copyparty as a chromecast replacement:
|
||||
* post a URL and it will open in the default browser
|
||||
* upload a file and it will open in the default application
|
||||
* the `key` command simulates keyboard input
|
||||
* the `x` command executes other xdotool commands
|
||||
* the `c` command executes arbitrary unix commands
|
||||
|
||||
the android app makes it a breeze to post pics and links:
|
||||
https://github.com/9001/party-up/releases
|
||||
|
||||
iOS devices can use the web-UI or the shortcut instead:
|
||||
https://github.com/9001/copyparty#ios-shortcuts
|
||||
|
||||
example copyparty config to use this;
|
||||
lets the user "kevin" with password "hunter2" use this plugin:
|
||||
-a kevin:hunter2 --urlform save,get -v.::w,kevin:c,e2d,e2t,mte=+a1:c,mtp=a1=ad,kn,c0,bin/mtag/very-bad-idea.py
|
||||
|
||||
recommended deps:
|
||||
apt install xdotool libnotify-bin mpv
|
||||
python3 -m pip install --user -U streamlink yt-dlp
|
||||
https://github.com/9001/copyparty/blob/hovudstraum/contrib/plugins/meadup.js
|
||||
|
||||
and you probably want `twitter-unmute.user.js` from the res folder
|
||||
|
||||
|
||||
-----------------------------------------------------------------------
|
||||
-- startup script:
|
||||
-----------------------------------------------------------------------
|
||||
|
||||
#!/bin/bash
|
||||
set -e
|
||||
|
||||
# create qr code
|
||||
ip=$(ip r | awk '/^default/{print$(NF-2)}'); echo http://$ip:3923/ | qrencode -o - -s 4 >/dev/shm/cpp-qr.png
|
||||
/usr/bin/feh -x /dev/shm/cpp-qr.png &
|
||||
|
||||
# reposition and make topmost (with janky raspbian support)
|
||||
( sleep 0.5
|
||||
xdotool search --name cpp-qr.png windowactivate --sync windowmove 1780 0
|
||||
wmctrl -r :ACTIVE: -b toggle,above || true
|
||||
|
||||
ps aux | grep -E 'sleep[ ]7\.27' ||
|
||||
while true; do
|
||||
w=$(xdotool getactivewindow)
|
||||
xdotool search --name cpp-qr.png windowactivate windowraise windowfocus
|
||||
xdotool windowactivate $w
|
||||
xdotool windowfocus $w
|
||||
sleep 7.27 || break
|
||||
done &
|
||||
xeyes # distraction window to prevent ^w from closing the qr-code
|
||||
) &
|
||||
|
||||
# bail if copyparty is already running
|
||||
ps aux | grep -E '[3] copy[p]arty' && exit 0
|
||||
|
||||
# dumb chrome wrapper to allow autoplay
|
||||
cat >/usr/local/bin/chromium-browser <<'EOF'
|
||||
#!/bin/bash
|
||||
set -e
|
||||
/usr/bin/chromium-browser --autoplay-policy=no-user-gesture-required "$@"
|
||||
EOF
|
||||
chmod 755 /usr/local/bin/chromium-browser
|
||||
|
||||
# start the server
|
||||
# note 1: replace hunter2 with a better password to access the server
|
||||
# note 2: replace `-v.::rw` with `-v.::w` to disallow retrieving uploaded stuff
|
||||
cd ~/Downloads; python3 copyparty-sfx.py -a kevin:hunter2 --urlform save,get -v.::rw,kevin:c,e2d,e2t,mte=+a1:c,mtp=a1=ad,kn,very-bad-idea.py
|
||||
|
||||
"""
|
||||
|
||||
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
import shutil
|
||||
import subprocess as sp
|
||||
from urllib.parse import unquote_to_bytes as unquote
|
||||
from urllib.parse import quote
|
||||
|
||||
have_mpv = shutil.which("mpv")
|
||||
have_vlc = shutil.which("vlc")
|
||||
|
||||
|
||||
def main():
|
||||
if len(sys.argv) > 2 and sys.argv[1] == "x":
|
||||
# invoked on commandline for testing;
|
||||
# python3 very-bad-idea.py x msg=https://youtu.be/dQw4w9WgXcQ
|
||||
txt = " ".join(sys.argv[2:])
|
||||
txt = quote(txt.replace(" ", "+"))
|
||||
return open_post(txt.encode("utf-8"))
|
||||
|
||||
fp = os.path.abspath(sys.argv[1])
|
||||
with open(fp, "rb") as f:
|
||||
txt = f.read(4096)
|
||||
|
||||
if txt.startswith(b"msg="):
|
||||
open_post(txt)
|
||||
else:
|
||||
open_url(fp)
|
||||
|
||||
|
||||
def open_post(txt):
|
||||
txt = unquote(txt.replace(b"+", b" ")).decode("utf-8")[4:]
|
||||
try:
|
||||
k, v = txt.split(" ", 1)
|
||||
except:
|
||||
return open_url(txt)
|
||||
|
||||
if k == "key":
|
||||
sp.call(["xdotool", "key"] + v.split(" "))
|
||||
elif k == "x":
|
||||
sp.call(["xdotool"] + v.split(" "))
|
||||
elif k == "c":
|
||||
env = os.environ.copy()
|
||||
while " " in v:
|
||||
v1, v2 = v.split(" ", 1)
|
||||
if "=" not in v1:
|
||||
break
|
||||
|
||||
ek, ev = v1.split("=", 1)
|
||||
env[ek] = ev
|
||||
v = v2
|
||||
|
||||
sp.call(v.split(" "), env=env)
|
||||
else:
|
||||
open_url(txt)
|
||||
|
||||
|
||||
def open_url(txt):
|
||||
ext = txt.rsplit(".")[-1].lower()
|
||||
sp.call(["notify-send", "--", txt])
|
||||
if ext not in ["jpg", "jpeg", "png", "gif", "webp"]:
|
||||
# sp.call(["wmctrl", "-c", ":ACTIVE:"]) # closes the active window correctly
|
||||
sp.call(["killall", "vlc"])
|
||||
sp.call(["killall", "mpv"])
|
||||
sp.call(["killall", "feh"])
|
||||
time.sleep(0.5)
|
||||
for _ in range(20):
|
||||
sp.call(["xdotool", "key", "ctrl+w"]) # closes the open tab correctly
|
||||
# else:
|
||||
# sp.call(["xdotool", "getactivewindow", "windowminimize"]) # minimizes the focused windo
|
||||
|
||||
# mpv is probably smart enough to use streamlink automatically
|
||||
if try_mpv(txt):
|
||||
print("mpv got it")
|
||||
return
|
||||
|
||||
# or maybe streamlink would be a good choice to open this
|
||||
if try_streamlink(txt):
|
||||
print("streamlink got it")
|
||||
return
|
||||
|
||||
# nope,
|
||||
# close any error messages:
|
||||
sp.call(["xdotool", "search", "--name", "Error", "windowclose"])
|
||||
# sp.call(["xdotool", "key", "ctrl+alt+d"]) # doesnt work at all
|
||||
# sp.call(["xdotool", "keydown", "--delay", "100", "ctrl+alt+d"])
|
||||
# sp.call(["xdotool", "keyup", "ctrl+alt+d"])
|
||||
sp.call(["xdg-open", txt])
|
||||
|
||||
|
||||
def try_mpv(url):
|
||||
t0 = time.time()
|
||||
try:
|
||||
print("trying mpv...")
|
||||
sp.check_call(["mpv", "--fs", url])
|
||||
return True
|
||||
except:
|
||||
# if it ran for 15 sec it probably succeeded and terminated
|
||||
t = time.time()
|
||||
return t - t0 > 15
|
||||
|
||||
|
||||
def try_streamlink(url):
|
||||
t0 = time.time()
|
||||
try:
|
||||
import streamlink
|
||||
|
||||
print("trying streamlink...")
|
||||
streamlink.Streamlink().resolve_url(url)
|
||||
|
||||
if have_mpv:
|
||||
args = "-m streamlink -p mpv -a --fs"
|
||||
else:
|
||||
args = "-m streamlink"
|
||||
|
||||
cmd = [sys.executable] + args.split() + [url, "best"]
|
||||
t0 = time.time()
|
||||
sp.check_call(cmd)
|
||||
return True
|
||||
except:
|
||||
# if it ran for 10 sec it probably succeeded and terminated
|
||||
t = time.time()
|
||||
return t - t0 > 10
|
||||
|
||||
|
||||
main()
|
|
@ -1,131 +0,0 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
import json
|
||||
import re
|
||||
import os
|
||||
import sys
|
||||
import subprocess as sp
|
||||
|
||||
try:
|
||||
from copyparty.util import fsenc
|
||||
except:
|
||||
|
||||
def fsenc(p):
|
||||
return p.encode("utf-8")
|
||||
|
||||
|
||||
_ = r"""
|
||||
inspects video files for errors and such
|
||||
plus stores a bunch of metadata to filename.ff.json
|
||||
|
||||
usage:
|
||||
-mtp vidchk=t600,ay,p,bin/mtag/vidchk.py
|
||||
|
||||
explained:
|
||||
t600: timeout 10min
|
||||
ay: only process files which contain audio (including video with audio)
|
||||
p: set priority 1 (lowest priority after initial ffprobe/mutagen for base tags),
|
||||
makes copyparty feed base tags into this script as json
|
||||
|
||||
if you wanna use this script standalone / separately from copyparty,
|
||||
provide the video resolution on stdin as json: {"res":"1920x1080"}
|
||||
"""
|
||||
|
||||
|
||||
FAST = True # parse entire file at container level
|
||||
# FAST = False # fully decode audio and video streams
|
||||
|
||||
|
||||
# warnings to ignore
|
||||
harmless = re.compile(
|
||||
r"Unsupported codec with id |Could not find codec parameters.*Attachment:|analyzeduration"
|
||||
+ r"|timescale not set"
|
||||
)
|
||||
|
||||
|
||||
def wfilter(lines):
|
||||
return [x for x in lines if x.strip() and not harmless.search(x)]
|
||||
|
||||
|
||||
def errchk(so, se, rc, dbg):
|
||||
if dbg:
|
||||
with open(dbg, "wb") as f:
|
||||
f.write(b"so:\n" + so + b"\nse:\n" + se + b"\n")
|
||||
|
||||
if rc:
|
||||
err = (so + se).decode("utf-8", "replace").split("\n", 1)
|
||||
err = wfilter(err) or err
|
||||
return f"ERROR {rc}: {err[0]}"
|
||||
|
||||
if se:
|
||||
err = se.decode("utf-8", "replace").split("\n", 1)
|
||||
err = wfilter(err)
|
||||
if err:
|
||||
return f"Warning: {err[0]}"
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def main():
|
||||
fp = sys.argv[1]
|
||||
zb = sys.stdin.buffer.read()
|
||||
zs = zb.decode("utf-8", "replace")
|
||||
md = json.loads(zs)
|
||||
|
||||
fdir = os.path.dirname(os.path.realpath(fp))
|
||||
flag = os.path.join(fdir, ".processed")
|
||||
if os.path.exists(flag):
|
||||
return "already processed"
|
||||
|
||||
try:
|
||||
w, h = [int(x) for x in md["res"].split("x")]
|
||||
if not w + h:
|
||||
raise Exception()
|
||||
except:
|
||||
return "could not determine resolution"
|
||||
|
||||
# grab streams/format metadata + 2 seconds of frames at the start and end
|
||||
zs = "ffprobe -hide_banner -v warning -of json -show_streams -show_format -show_packets -show_data_hash crc32 -read_intervals %+2,999999%+2"
|
||||
cmd = zs.encode("ascii").split(b" ") + [fsenc(fp)]
|
||||
p = sp.Popen(cmd, stdout=sp.PIPE, stderr=sp.PIPE)
|
||||
so, se = p.communicate()
|
||||
|
||||
# spaces to tabs, drops filesize from 69k to 48k
|
||||
so = b"\n".join(
|
||||
[
|
||||
b"\t" * int((len(x) - len(x.lstrip())) / 4) + x.lstrip()
|
||||
for x in (so or b"").split(b"\n")
|
||||
]
|
||||
)
|
||||
with open(fsenc(f"{fp}.ff.json"), "wb") as f:
|
||||
f.write(so)
|
||||
|
||||
err = errchk(so, se, p.returncode, f"{fp}.vidchk")
|
||||
if err:
|
||||
return err
|
||||
|
||||
if max(w, h) < 1280 and min(w, h) < 720:
|
||||
return "resolution too small"
|
||||
|
||||
zs = (
|
||||
"ffmpeg -y -hide_banner -nostdin -v warning"
|
||||
+ " -err_detect +crccheck+bitstream+buffer+careful+compliant+aggressive+explode"
|
||||
+ " -xerror -i"
|
||||
)
|
||||
|
||||
cmd = zs.encode("ascii").split(b" ") + [fsenc(fp)]
|
||||
|
||||
if FAST:
|
||||
zs = "-c copy -f null -"
|
||||
else:
|
||||
zs = "-vcodec rawvideo -acodec pcm_s16le -f null -"
|
||||
|
||||
cmd += zs.encode("ascii").split(b" ")
|
||||
|
||||
p = sp.Popen(cmd, stdout=sp.PIPE, stderr=sp.PIPE)
|
||||
so, se = p.communicate()
|
||||
return errchk(so, se, p.returncode, f"{fp}.vidchk")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
print(main() or "ok")
|
|
@ -1,11 +1,6 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
"""
|
||||
DEPRECATED -- replaced by event hooks;
|
||||
https://github.com/9001/copyparty/blob/hovudstraum/bin/hooks/wget.py
|
||||
|
||||
---
|
||||
|
||||
use copyparty as a file downloader by POSTing URLs as
|
||||
application/x-www-form-urlencoded (for example using the
|
||||
message/pager function on the website)
|
||||
|
@ -65,10 +60,6 @@ def main():
|
|||
if "://" not in url:
|
||||
url = "https://" + url
|
||||
|
||||
proto = url.split("://")[0].lower()
|
||||
if proto not in ("http", "https", "ftp", "ftps"):
|
||||
raise Exception("bad proto {}".format(proto))
|
||||
|
||||
os.chdir(fdir)
|
||||
|
||||
name = url.split("?")[0].split("/")[-1]
|
||||
|
|
|
@ -1,177 +0,0 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
"""
|
||||
partyjournal.py: chronological history of uploads
|
||||
2021-12-31, v0.1, ed <irc.rizon.net>, MIT-Licensed
|
||||
https://github.com/9001/copyparty/blob/hovudstraum/bin/partyjournal.py
|
||||
|
||||
produces a chronological list of all uploads,
|
||||
by collecting info from up2k databases and the filesystem
|
||||
|
||||
specify subnet `192.168.1.*` with argument `.=192.168.1.`,
|
||||
affecting all successive mappings
|
||||
|
||||
usage:
|
||||
./partyjournal.py > partyjournal.html .=192.168.1. cart=125 steen=114 steen=131 sleepy=121 fscarlet=144 ed=101 ed=123
|
||||
|
||||
"""
|
||||
|
||||
import sys
|
||||
import base64
|
||||
import sqlite3
|
||||
import argparse
|
||||
from datetime import datetime, timezone
|
||||
from urllib.parse import quote_from_bytes as quote
|
||||
from urllib.parse import unquote_to_bytes as unquote
|
||||
|
||||
|
||||
FS_ENCODING = sys.getfilesystemencoding()
|
||||
UTC = timezone.utc
|
||||
|
||||
|
||||
class APF(argparse.ArgumentDefaultsHelpFormatter, argparse.RawDescriptionHelpFormatter):
|
||||
pass
|
||||
|
||||
|
||||
##
|
||||
## snibbed from copyparty
|
||||
|
||||
|
||||
def s3dec(v):
|
||||
if not v.startswith("//"):
|
||||
return v
|
||||
|
||||
v = base64.urlsafe_b64decode(v.encode("ascii")[2:])
|
||||
return v.decode(FS_ENCODING, "replace")
|
||||
|
||||
|
||||
def quotep(txt):
|
||||
btxt = txt.encode("utf-8", "replace")
|
||||
quot1 = quote(btxt, safe=b"/")
|
||||
quot1 = quot1.encode("ascii")
|
||||
quot2 = quot1.replace(b" ", b"+")
|
||||
return quot2.decode("utf-8", "replace")
|
||||
|
||||
|
||||
def html_escape(s, quote=False, crlf=False):
|
||||
"""html.escape but also newlines"""
|
||||
s = s.replace("&", "&").replace("<", "<").replace(">", ">")
|
||||
if quote:
|
||||
s = s.replace('"', """).replace("'", "'")
|
||||
if crlf:
|
||||
s = s.replace("\r", " ").replace("\n", " ")
|
||||
|
||||
return s
|
||||
|
||||
|
||||
## end snibs
|
||||
##
|
||||
|
||||
|
||||
def main():
|
||||
ap = argparse.ArgumentParser(formatter_class=APF)
|
||||
ap.add_argument("who", nargs="*")
|
||||
ar = ap.parse_args()
|
||||
|
||||
imap = {}
|
||||
subnet = ""
|
||||
for v in ar.who:
|
||||
if "=" not in v:
|
||||
raise Exception("bad who: " + v)
|
||||
|
||||
k, v = v.split("=")
|
||||
if k == ".":
|
||||
subnet = v
|
||||
continue
|
||||
|
||||
imap["{}{}".format(subnet, v)] = k
|
||||
|
||||
print(repr(imap), file=sys.stderr)
|
||||
|
||||
print(
|
||||
"""\
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head><meta charset="utf-8"><style>
|
||||
|
||||
html, body {
|
||||
color: #ccc;
|
||||
background: #222;
|
||||
font-family: sans-serif;
|
||||
}
|
||||
a {
|
||||
color: #fc5;
|
||||
}
|
||||
td, th {
|
||||
padding: .2em .5em;
|
||||
border: 1px solid #999;
|
||||
border-width: 0 1px 1px 0;
|
||||
white-space: nowrap;
|
||||
}
|
||||
td:nth-child(1),
|
||||
td:nth-child(2),
|
||||
td:nth-child(3) {
|
||||
font-family: monospace, monospace;
|
||||
text-align: right;
|
||||
}
|
||||
tr:first-child {
|
||||
position: sticky;
|
||||
top: -1px;
|
||||
}
|
||||
th {
|
||||
background: #222;
|
||||
text-align: left;
|
||||
}
|
||||
|
||||
</style></head><body><table><tr>
|
||||
<th>wark</th>
|
||||
<th>time</th>
|
||||
<th>size</th>
|
||||
<th>who</th>
|
||||
<th>link</th>
|
||||
</tr>"""
|
||||
)
|
||||
|
||||
db_path = ".hist/up2k.db"
|
||||
conn = sqlite3.connect(db_path)
|
||||
q = r"pragma table_info(up)"
|
||||
inf = conn.execute(q).fetchall()
|
||||
cols = [x[1] for x in inf]
|
||||
print("<!-- " + str(cols) + " -->")
|
||||
# ['w', 'mt', 'sz', 'rd', 'fn', 'ip', 'at']
|
||||
|
||||
q = r"select * from up order by case when at > 0 then at else mt end"
|
||||
for w, mt, sz, rd, fn, ip, at in conn.execute(q):
|
||||
link = "/".join([s3dec(x) for x in [rd, fn] if x])
|
||||
if fn.startswith("put-") and sz < 4096:
|
||||
try:
|
||||
with open(link, "rb") as f:
|
||||
txt = f.read().decode("utf-8", "replace")
|
||||
except:
|
||||
continue
|
||||
|
||||
if txt.startswith("msg="):
|
||||
txt = txt.encode("utf-8", "replace")
|
||||
txt = unquote(txt.replace(b"+", b" "))
|
||||
link = txt.decode("utf-8")[4:]
|
||||
|
||||
sz = "{:,}".format(sz)
|
||||
dt = datetime.fromtimestamp(at if at > 0 else mt, UTC)
|
||||
v = [
|
||||
w[:16],
|
||||
dt.strftime("%Y-%m-%d %H:%M:%S"),
|
||||
sz,
|
||||
imap.get(ip, ip),
|
||||
]
|
||||
|
||||
row = "<tr>\n "
|
||||
row += "\n ".join(["<td>{}</th>".format(x) for x in v])
|
||||
row += '\n <td><a href="{}">{}</a></td>'.format(link, html_escape(link))
|
||||
row += "\n</tr>"
|
||||
print(row)
|
||||
|
||||
print("</table></body></html>")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
118
bin/prisonparty.sh
Executable file → Normal file
118
bin/prisonparty.sh
Executable file → Normal file
|
@ -4,40 +4,23 @@ set -e
|
|||
# runs copyparty (or any other program really) in a chroot
|
||||
#
|
||||
# assumption: these directories, and everything within, are owned by root
|
||||
sysdirs=(); for v in /bin /lib /lib32 /lib64 /sbin /usr /etc/alternatives ; do
|
||||
[ -e $v ] && sysdirs+=($v)
|
||||
done
|
||||
sysdirs=( /bin /lib /lib32 /lib64 /sbin /usr )
|
||||
|
||||
|
||||
# error-handler
|
||||
help() { cat <<'EOF'
|
||||
|
||||
usage:
|
||||
./prisonparty.sh <ROOTDIR> <USER|UID> <GROUP|GID> [VOLDIR [VOLDIR...]] -- python3 copyparty-sfx.py [...]
|
||||
./prisonparty.sh <ROOTDIR> <UID> <GID> [VOLDIR [VOLDIR...]] -- copyparty-sfx.py [...]"
|
||||
|
||||
example:
|
||||
./prisonparty.sh /var/lib/copyparty-jail cpp cpp /mnt/nas/music -- python3 copyparty-sfx.py -v /mnt/nas/music::rwmd
|
||||
|
||||
example for running straight from source (instead of using an sfx):
|
||||
PYTHONPATH=$PWD ./prisonparty.sh /var/lib/copyparty-jail cpp cpp /mnt/nas/music -- python3 -um copyparty -v /mnt/nas/music::rwmd
|
||||
|
||||
note that if you have python modules installed as --user (such as bpm/key detectors),
|
||||
you should add /home/foo/.local as a VOLDIR
|
||||
./prisonparty.sh /var/lib/copyparty-jail 1000 1000 /mnt/nas/music -- copyparty-sfx.py -v /mnt/nas/music::rwmd"
|
||||
|
||||
EOF
|
||||
exit 1
|
||||
}
|
||||
|
||||
|
||||
errs=
|
||||
for c in awk chroot dirname getent lsof mknod mount realpath sed sort stat uniq; do
|
||||
command -v $c >/dev/null || {
|
||||
echo ERROR: command not found: $c
|
||||
errs=1
|
||||
}
|
||||
done
|
||||
[ $errs ] && exit 1
|
||||
|
||||
|
||||
# read arguments
|
||||
trap help EXIT
|
||||
jail="$(realpath "$1")"; shift
|
||||
|
@ -49,37 +32,20 @@ while true; do
|
|||
v="$1"; shift
|
||||
[ "$v" = -- ] && break # end of volumes
|
||||
[ "$#" -eq 0 ] && break # invalid usage
|
||||
vols+=( "$(realpath "$v" || echo "$v")" )
|
||||
vols+=( "$(realpath "$v")" )
|
||||
done
|
||||
pybin="$1"; shift
|
||||
pybin="$(command -v "$pybin")"
|
||||
pyarg=
|
||||
while true; do
|
||||
v="$1"
|
||||
[ "${v:0:1}" = - ] || break
|
||||
pyarg="$pyarg $v"
|
||||
shift
|
||||
done
|
||||
pybin="$(realpath "$pybin")"
|
||||
cpp="$1"; shift
|
||||
[ -d "$cpp" ] && cppdir="$PWD" || {
|
||||
# sfx, not module
|
||||
cpp="$(realpath "$cpp")"
|
||||
cppdir="$(dirname "$cpp")"
|
||||
}
|
||||
cpp="$(realpath "$cpp")"
|
||||
cppdir="$(dirname "$cpp")"
|
||||
trap - EXIT
|
||||
|
||||
usr="$(getent passwd $uid | cut -d: -f1)"
|
||||
[ "$usr" ] || { echo "ERROR invalid username/uid $uid"; exit 1; }
|
||||
uid="$(getent passwd $uid | cut -d: -f3)"
|
||||
|
||||
grp="$(getent group $gid | cut -d: -f1)"
|
||||
[ "$grp" ] || { echo "ERROR invalid groupname/gid $gid"; exit 1; }
|
||||
gid="$(getent group $gid | cut -d: -f3)"
|
||||
|
||||
# debug/vis
|
||||
echo
|
||||
echo "chroot-dir = $jail"
|
||||
echo "user:group = $uid:$gid ($usr:$grp)"
|
||||
echo "user:group = $uid:$gid"
|
||||
echo " copyparty = $cpp"
|
||||
echo
|
||||
printf '\033[33m%s\033[0m\n' "copyparty can access these folders and all their subdirectories:"
|
||||
|
@ -94,45 +60,23 @@ echo
|
|||
|
||||
# remove any trailing slashes
|
||||
jail="${jail%/}"
|
||||
cppdir="${cppdir%/}"
|
||||
|
||||
|
||||
# bind-mount system directories and volumes
|
||||
for a in {1..30}; do mkdir "$jail/.prisonlock" && break; sleep 0.1; done
|
||||
printf '%s\n' "${sysdirs[@]}" "${vols[@]}" | sed -r 's`/$``' | LC_ALL=C sort | uniq |
|
||||
printf '%s\n' "${sysdirs[@]}" "${vols[@]}" | LC_ALL=C sort |
|
||||
while IFS= read -r v; do
|
||||
[ -e "$v" ] || {
|
||||
printf '\033[1;31mfolder does not exist:\033[0m %s\n' "$v"
|
||||
# printf '\033[1;31mfolder does not exist:\033[0m %s\n' "/$v"
|
||||
continue
|
||||
}
|
||||
i1=$(stat -c%D.%i "$v/" 2>/dev/null || echo a)
|
||||
i2=$(stat -c%D.%i "$jail$v/" 2>/dev/null || echo b)
|
||||
i1=$(stat -c%D.%i "$v" 2>/dev/null || echo a)
|
||||
i2=$(stat -c%D.%i "$jail$v" 2>/dev/null || echo b)
|
||||
[ $i1 = $i2 ] && continue
|
||||
mount | grep -qF " $jail$v " && echo wtf $i1 $i2 $v && continue
|
||||
|
||||
mkdir -p "$jail$v"
|
||||
mount --bind "$v" "$jail$v"
|
||||
done
|
||||
rmdir "$jail/.prisonlock" || true
|
||||
|
||||
|
||||
cln() {
|
||||
trap - EXIT
|
||||
wait -f -n $p && rv=0 || rv=$?
|
||||
cd /
|
||||
echo "stopping chroot..."
|
||||
for a in {1..30}; do mkdir "$jail/.prisonlock" && break; sleep 0.1; done
|
||||
lsof "$jail" 2>/dev/null | grep -F "$jail" &&
|
||||
echo "chroot is in use; will not unmount" ||
|
||||
{
|
||||
mount | grep -F " on $jail" |
|
||||
awk '{sub(/ type .*/,"");sub(/.* on /,"");print}' |
|
||||
LC_ALL=C sort -r | while IFS= read -r v; do
|
||||
umount "$v" && echo "umount OK: $v"
|
||||
done
|
||||
}
|
||||
rmdir "$jail/.prisonlock" || true
|
||||
exit $rv
|
||||
}
|
||||
trap cln EXIT
|
||||
|
||||
|
||||
# create a tmp
|
||||
|
@ -140,24 +84,16 @@ mkdir -p "$jail/tmp"
|
|||
chmod 777 "$jail/tmp"
|
||||
|
||||
|
||||
# create a dev
|
||||
(cd $jail; mkdir -p dev; cd dev
|
||||
[ -e null ] || mknod -m 666 null c 1 3
|
||||
[ -e zero ] || mknod -m 666 zero c 1 5
|
||||
[ -e random ] || mknod -m 444 random c 1 8
|
||||
[ -e urandom ] || mknod -m 444 urandom c 1 9
|
||||
)
|
||||
|
||||
|
||||
# run copyparty
|
||||
export HOME="$(getent passwd $uid | cut -d: -f6)"
|
||||
export USER="$usr"
|
||||
export LOGNAME="$USER"
|
||||
#echo "pybin [$pybin]"
|
||||
#echo "pyarg [$pyarg]"
|
||||
#echo "cpp [$cpp]"
|
||||
chroot --userspec=$uid:$gid "$jail" "$pybin" $pyarg "$cpp" "$@" &
|
||||
p=$!
|
||||
trap 'kill -USR1 $p' USR1
|
||||
trap 'trap - INT TERM; kill $p' INT TERM
|
||||
wait
|
||||
/sbin/chroot --userspec=$uid:$gid "$jail" "$pybin" "$cpp" "$@" && rv=0 || rv=$?
|
||||
|
||||
|
||||
# cleanup if not in use
|
||||
lsof "$jail" | grep -qF "$jail" &&
|
||||
echo "chroot is in use, will not cleanup" ||
|
||||
{
|
||||
mount | grep -qF " on $jail" |
|
||||
awk '{sub(/ type .*/,"");sub(/.* on /,"");print}' |
|
||||
LC_ALL=C sort -r | tee /dev/stderr | tr '\n' '\0' | xargs -r0 umount
|
||||
}
|
||||
exit $rv
|
||||
|
|
1718
bin/u2c.py
1718
bin/u2c.py
File diff suppressed because it is too large
Load diff
|
@ -1,99 +0,0 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
"""
|
||||
unforget.py: rebuild db from logfiles
|
||||
2022-09-07, v0.1, ed <irc.rizon.net>, MIT-Licensed
|
||||
https://github.com/9001/copyparty/blob/hovudstraum/bin/unforget.py
|
||||
|
||||
only makes sense if running copyparty with --no-forget
|
||||
(e.g. immediately shifting uploads to other storage)
|
||||
|
||||
usage:
|
||||
xz -d < log | ./unforget.py .hist/up2k.db
|
||||
|
||||
"""
|
||||
|
||||
import re
|
||||
import sys
|
||||
import json
|
||||
import base64
|
||||
import sqlite3
|
||||
import argparse
|
||||
|
||||
|
||||
FS_ENCODING = sys.getfilesystemencoding()
|
||||
|
||||
|
||||
class APF(argparse.ArgumentDefaultsHelpFormatter, argparse.RawDescriptionHelpFormatter):
|
||||
pass
|
||||
|
||||
|
||||
mem_cur = sqlite3.connect(":memory:").cursor()
|
||||
mem_cur.execute(r"create table a (b text)")
|
||||
|
||||
|
||||
def s3enc(rd: str, fn: str) -> tuple[str, str]:
|
||||
ret: list[str] = []
|
||||
for v in [rd, fn]:
|
||||
try:
|
||||
mem_cur.execute("select * from a where b = ?", (v,))
|
||||
ret.append(v)
|
||||
except:
|
||||
wtf8 = v.encode(FS_ENCODING, "surrogateescape")
|
||||
ret.append("//" + base64.urlsafe_b64encode(wtf8).decode("ascii"))
|
||||
|
||||
return ret[0], ret[1]
|
||||
|
||||
|
||||
def main():
|
||||
ap = argparse.ArgumentParser()
|
||||
ap.add_argument("db")
|
||||
ar = ap.parse_args()
|
||||
|
||||
db = sqlite3.connect(ar.db).cursor()
|
||||
ptn_times = re.compile(r"no more chunks, setting times \(([0-9]+)")
|
||||
at = 0
|
||||
ctr = 0
|
||||
|
||||
for ln in [x.decode("utf-8", "replace").rstrip() for x in sys.stdin.buffer]:
|
||||
if "no more chunks, setting times (" in ln:
|
||||
m = ptn_times.search(ln)
|
||||
if m:
|
||||
at = int(m.group(1))
|
||||
|
||||
if '"hash": []' in ln:
|
||||
try:
|
||||
ofs = ln.find("{")
|
||||
j = json.loads(ln[ofs:])
|
||||
except:
|
||||
continue
|
||||
|
||||
w = j["wark"]
|
||||
if db.execute("select w from up where w = ?", (w,)).fetchone():
|
||||
continue
|
||||
|
||||
# PYTHONPATH=/home/ed/dev/copyparty/ python3 -m copyparty -e2dsa -v foo:foo:rwmd,ed -aed:wark --no-forget
|
||||
# 05:34:43.845 127.0.0.1 42496 no more chunks, setting times (1662528883, 1658001882)
|
||||
# 05:34:43.863 127.0.0.1 42496 {"name": "f\"2", "purl": "/foo/bar/baz/", "size": 1674, "lmod": 1658001882, "sprs": true, "hash": [], "wark": "LKIWpp2jEAh9dH3fu-DobuURFGEKlODXDGTpZ1otMhUg"}
|
||||
# | w | mt | sz | rd | fn | ip | at |
|
||||
# | LKIWpp2jEAh9dH3fu-DobuURFGEKlODXDGTpZ1otMhUg | 1658001882 | 1674 | bar/baz | f"2 | 127.0.0.1 | 1662528883 |
|
||||
|
||||
rd, fn = s3enc(j["purl"].strip("/"), j["name"])
|
||||
ip = ln.split(" ")[1].split("m")[-1]
|
||||
|
||||
q = "insert into up values (?,?,?,?,?,?,?)"
|
||||
v = (w, int(j["lmod"]), int(j["size"]), rd, fn, ip, at)
|
||||
db.execute(q, v)
|
||||
ctr += 1
|
||||
if ctr % 1024 == 1023:
|
||||
print(f"{ctr} commit...")
|
||||
db.connection.commit()
|
||||
|
||||
if ctr:
|
||||
db.connection.commit()
|
||||
|
||||
print(f"unforgot {ctr} files")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
830
bin/up2k.py
Executable file
830
bin/up2k.py
Executable file
|
@ -0,0 +1,830 @@
|
|||
#!/usr/bin/env python3
|
||||
from __future__ import print_function, unicode_literals
|
||||
|
||||
"""
|
||||
up2k.py: upload to copyparty
|
||||
2021-11-28, v0.13, ed <irc.rizon.net>, MIT-Licensed
|
||||
https://github.com/9001/copyparty/blob/hovudstraum/bin/up2k.py
|
||||
|
||||
- dependencies: requests
|
||||
- supports python 2.6, 2.7, and 3.3 through 3.10
|
||||
|
||||
- almost zero error-handling
|
||||
- but if something breaks just try again and it'll autoresume
|
||||
"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
import stat
|
||||
import math
|
||||
import time
|
||||
import atexit
|
||||
import signal
|
||||
import base64
|
||||
import hashlib
|
||||
import argparse
|
||||
import platform
|
||||
import threading
|
||||
import requests
|
||||
import datetime
|
||||
|
||||
|
||||
# from copyparty/__init__.py
|
||||
PY2 = sys.version_info[0] == 2
|
||||
if PY2:
|
||||
from Queue import Queue
|
||||
from urllib import unquote
|
||||
from urllib import quote
|
||||
|
||||
sys.dont_write_bytecode = True
|
||||
bytes = str
|
||||
else:
|
||||
from queue import Queue
|
||||
from urllib.parse import unquote_to_bytes as unquote
|
||||
from urllib.parse import quote_from_bytes as quote
|
||||
|
||||
unicode = str
|
||||
|
||||
VT100 = platform.system() != "Windows"
|
||||
|
||||
|
||||
req_ses = requests.Session()
|
||||
|
||||
|
||||
class File(object):
|
||||
"""an up2k upload task; represents a single file"""
|
||||
|
||||
def __init__(self, top, rel, size, lmod):
|
||||
self.top = top # type: bytes
|
||||
self.rel = rel.replace(b"\\", b"/") # type: bytes
|
||||
self.size = size # type: int
|
||||
self.lmod = lmod # type: float
|
||||
|
||||
self.abs = os.path.join(top, rel) # type: bytes
|
||||
self.name = self.rel.split(b"/")[-1].decode("utf-8", "replace") # type: str
|
||||
|
||||
# set by get_hashlist
|
||||
self.cids = [] # type: list[tuple[str, int, int]] # [ hash, ofs, sz ]
|
||||
self.kchunks = {} # type: dict[str, tuple[int, int]] # hash: [ ofs, sz ]
|
||||
|
||||
# set by handshake
|
||||
self.ucids = [] # type: list[str] # chunks which need to be uploaded
|
||||
self.wark = None # type: str
|
||||
self.url = None # type: str
|
||||
|
||||
# set by upload
|
||||
self.up_b = 0 # type: int
|
||||
self.up_c = 0 # type: int
|
||||
|
||||
# m = "size({}) lmod({}) top({}) rel({}) abs({}) name({})\n"
|
||||
# eprint(m.format(self.size, self.lmod, self.top, self.rel, self.abs, self.name))
|
||||
|
||||
|
||||
class FileSlice(object):
|
||||
"""file-like object providing a fixed window into a file"""
|
||||
|
||||
def __init__(self, file, cid):
|
||||
# type: (File, str) -> FileSlice
|
||||
|
||||
self.car, self.len = file.kchunks[cid]
|
||||
self.cdr = self.car + self.len
|
||||
self.ofs = 0 # type: int
|
||||
self.f = open(file.abs, "rb", 512 * 1024)
|
||||
self.f.seek(self.car)
|
||||
|
||||
# https://stackoverflow.com/questions/4359495/what-is-exactly-a-file-like-object-in-python
|
||||
# IOBase, RawIOBase, BufferedIOBase
|
||||
funs = "close closed __enter__ __exit__ __iter__ isatty __next__ readable seekable writable"
|
||||
try:
|
||||
for fun in funs.split():
|
||||
setattr(self, fun, getattr(self.f, fun))
|
||||
except:
|
||||
pass # py27 probably
|
||||
|
||||
def tell(self):
|
||||
return self.ofs
|
||||
|
||||
def seek(self, ofs, wh=0):
|
||||
if wh == 1:
|
||||
ofs = self.ofs + ofs
|
||||
elif wh == 2:
|
||||
ofs = self.len + ofs # provided ofs is negative
|
||||
|
||||
if ofs < 0:
|
||||
ofs = 0
|
||||
elif ofs >= self.len:
|
||||
ofs = self.len - 1
|
||||
|
||||
self.ofs = ofs
|
||||
self.f.seek(self.car + ofs)
|
||||
|
||||
def read(self, sz):
|
||||
sz = min(sz, self.len - self.ofs)
|
||||
ret = self.f.read(sz)
|
||||
self.ofs += len(ret)
|
||||
return ret
|
||||
|
||||
|
||||
_print = print
|
||||
|
||||
|
||||
def eprint(*a, **ka):
|
||||
ka["file"] = sys.stderr
|
||||
ka["end"] = ""
|
||||
if not PY2:
|
||||
ka["flush"] = True
|
||||
|
||||
_print(*a, **ka)
|
||||
if PY2 or not VT100:
|
||||
sys.stderr.flush()
|
||||
|
||||
|
||||
def flushing_print(*a, **ka):
|
||||
_print(*a, **ka)
|
||||
if "flush" not in ka:
|
||||
sys.stdout.flush()
|
||||
|
||||
|
||||
if not VT100:
|
||||
print = flushing_print
|
||||
|
||||
|
||||
def termsize():
|
||||
import os
|
||||
|
||||
env = os.environ
|
||||
|
||||
def ioctl_GWINSZ(fd):
|
||||
try:
|
||||
import fcntl, termios, struct, os
|
||||
|
||||
cr = struct.unpack("hh", fcntl.ioctl(fd, termios.TIOCGWINSZ, "1234"))
|
||||
except:
|
||||
return
|
||||
return cr
|
||||
|
||||
cr = ioctl_GWINSZ(0) or ioctl_GWINSZ(1) or ioctl_GWINSZ(2)
|
||||
if not cr:
|
||||
try:
|
||||
fd = os.open(os.ctermid(), os.O_RDONLY)
|
||||
cr = ioctl_GWINSZ(fd)
|
||||
os.close(fd)
|
||||
except:
|
||||
pass
|
||||
if not cr:
|
||||
try:
|
||||
cr = (env["LINES"], env["COLUMNS"])
|
||||
except:
|
||||
cr = (25, 80)
|
||||
return int(cr[1]), int(cr[0])
|
||||
|
||||
|
||||
class CTermsize(object):
|
||||
def __init__(self):
|
||||
self.ev = False
|
||||
self.margin = None
|
||||
self.g = None
|
||||
self.w, self.h = termsize()
|
||||
|
||||
try:
|
||||
signal.signal(signal.SIGWINCH, self.ev_sig)
|
||||
except:
|
||||
return
|
||||
|
||||
thr = threading.Thread(target=self.worker)
|
||||
thr.daemon = True
|
||||
thr.start()
|
||||
|
||||
def worker(self):
|
||||
while True:
|
||||
time.sleep(0.5)
|
||||
if not self.ev:
|
||||
continue
|
||||
|
||||
self.ev = False
|
||||
self.w, self.h = termsize()
|
||||
|
||||
if self.margin is not None:
|
||||
self.scroll_region(self.margin)
|
||||
|
||||
def ev_sig(self, *a, **ka):
|
||||
self.ev = True
|
||||
|
||||
def scroll_region(self, margin):
|
||||
self.margin = margin
|
||||
if margin is None:
|
||||
self.g = None
|
||||
eprint("\033[s\033[r\033[u")
|
||||
else:
|
||||
self.g = 1 + self.h - margin
|
||||
m = "{0}\033[{1}A".format("\n" * margin, margin)
|
||||
eprint("{0}\033[s\033[1;{1}r\033[u".format(m, self.g - 1))
|
||||
|
||||
|
||||
ss = CTermsize()
|
||||
|
||||
|
||||
def _scd(err, top):
|
||||
"""non-recursive listing of directory contents, along with stat() info"""
|
||||
with os.scandir(top) as dh:
|
||||
for fh in dh:
|
||||
abspath = os.path.join(top, fh.name)
|
||||
try:
|
||||
yield [abspath, fh.stat()]
|
||||
except:
|
||||
err.append(abspath)
|
||||
|
||||
|
||||
def _lsd(err, top):
|
||||
"""non-recursive listing of directory contents, along with stat() info"""
|
||||
for name in os.listdir(top):
|
||||
abspath = os.path.join(top, name)
|
||||
try:
|
||||
yield [abspath, os.stat(abspath)]
|
||||
except:
|
||||
err.append(abspath)
|
||||
|
||||
|
||||
if hasattr(os, "scandir"):
|
||||
statdir = _scd
|
||||
else:
|
||||
statdir = _lsd
|
||||
|
||||
|
||||
def walkdir(err, top):
|
||||
"""recursive statdir"""
|
||||
for ap, inf in sorted(statdir(err, top)):
|
||||
if stat.S_ISDIR(inf.st_mode):
|
||||
try:
|
||||
for x in walkdir(err, ap):
|
||||
yield x
|
||||
except:
|
||||
err.append(ap)
|
||||
else:
|
||||
yield ap, inf
|
||||
|
||||
|
||||
def walkdirs(err, tops):
|
||||
"""recursive statdir for a list of tops, yields [top, relpath, stat]"""
|
||||
sep = "{0}".format(os.sep).encode("ascii")
|
||||
for top in tops:
|
||||
if top[-1:] == sep:
|
||||
stop = top.rstrip(sep)
|
||||
else:
|
||||
stop = os.path.dirname(top)
|
||||
|
||||
if os.path.isdir(top):
|
||||
for ap, inf in walkdir(err, top):
|
||||
yield stop, ap[len(stop) :].lstrip(sep), inf
|
||||
else:
|
||||
d, n = top.rsplit(sep, 1)
|
||||
yield d, n, os.stat(top)
|
||||
|
||||
|
||||
# mostly from copyparty/util.py
|
||||
def quotep(btxt):
|
||||
quot1 = quote(btxt, safe=b"/")
|
||||
if not PY2:
|
||||
quot1 = quot1.encode("ascii")
|
||||
|
||||
return quot1.replace(b" ", b"+")
|
||||
|
||||
|
||||
# from copyparty/util.py
|
||||
def humansize(sz, terse=False):
|
||||
"""picks a sensible unit for the given extent"""
|
||||
for unit in ["B", "KiB", "MiB", "GiB", "TiB"]:
|
||||
if sz < 1024:
|
||||
break
|
||||
|
||||
sz /= 1024.0
|
||||
|
||||
ret = " ".join([str(sz)[:4].rstrip("."), unit])
|
||||
|
||||
if not terse:
|
||||
return ret
|
||||
|
||||
return ret.replace("iB", "").replace(" ", "")
|
||||
|
||||
|
||||
# from copyparty/up2k.py
|
||||
def up2k_chunksize(filesize):
|
||||
"""gives The correct chunksize for up2k hashing"""
|
||||
chunksize = 1024 * 1024
|
||||
stepsize = 512 * 1024
|
||||
while True:
|
||||
for mul in [1, 2]:
|
||||
nchunks = math.ceil(filesize * 1.0 / chunksize)
|
||||
if nchunks <= 256 or chunksize >= 32 * 1024 * 1024:
|
||||
return chunksize
|
||||
|
||||
chunksize += stepsize
|
||||
stepsize *= mul
|
||||
|
||||
|
||||
# mostly from copyparty/up2k.py
|
||||
def get_hashlist(file, pcb):
|
||||
# type: (File, any) -> None
|
||||
"""generates the up2k hashlist from file contents, inserts it into `file`"""
|
||||
|
||||
chunk_sz = up2k_chunksize(file.size)
|
||||
file_rem = file.size
|
||||
file_ofs = 0
|
||||
ret = []
|
||||
with open(file.abs, "rb", 512 * 1024) as f:
|
||||
while file_rem > 0:
|
||||
hashobj = hashlib.sha512()
|
||||
chunk_sz = chunk_rem = min(chunk_sz, file_rem)
|
||||
while chunk_rem > 0:
|
||||
buf = f.read(min(chunk_rem, 64 * 1024))
|
||||
if not buf:
|
||||
raise Exception("EOF at " + str(f.tell()))
|
||||
|
||||
hashobj.update(buf)
|
||||
chunk_rem -= len(buf)
|
||||
|
||||
digest = hashobj.digest()[:33]
|
||||
digest = base64.urlsafe_b64encode(digest).decode("utf-8")
|
||||
|
||||
ret.append([digest, file_ofs, chunk_sz])
|
||||
file_ofs += chunk_sz
|
||||
file_rem -= chunk_sz
|
||||
|
||||
if pcb:
|
||||
pcb(file, file_ofs)
|
||||
|
||||
file.cids = ret
|
||||
file.kchunks = {}
|
||||
for k, v1, v2 in ret:
|
||||
file.kchunks[k] = [v1, v2]
|
||||
|
||||
|
||||
def handshake(req_ses, url, file, pw, search):
|
||||
# type: (requests.Session, str, File, any, bool) -> List[str]
|
||||
"""
|
||||
performs a handshake with the server; reply is:
|
||||
if search, a list of search results
|
||||
otherwise, a list of chunks to upload
|
||||
"""
|
||||
|
||||
req = {
|
||||
"hash": [x[0] for x in file.cids],
|
||||
"name": file.name,
|
||||
"lmod": file.lmod,
|
||||
"size": file.size,
|
||||
}
|
||||
if search:
|
||||
req["srch"] = 1
|
||||
|
||||
headers = {"Content-Type": "text/plain"} # wtf ed
|
||||
if pw:
|
||||
headers["Cookie"] = "=".join(["cppwd", pw])
|
||||
|
||||
if file.url:
|
||||
url = file.url
|
||||
elif b"/" in file.rel:
|
||||
url += quotep(file.rel.rsplit(b"/", 1)[0]).decode("utf-8", "replace")
|
||||
|
||||
while True:
|
||||
try:
|
||||
r = req_ses.post(url, headers=headers, json=req)
|
||||
break
|
||||
except:
|
||||
eprint("handshake failed, retrying: {0}\n".format(file.name))
|
||||
time.sleep(1)
|
||||
|
||||
try:
|
||||
r = r.json()
|
||||
except:
|
||||
raise Exception(r.text)
|
||||
|
||||
if search:
|
||||
return r["hits"]
|
||||
|
||||
try:
|
||||
pre, url = url.split("://")
|
||||
pre += "://"
|
||||
except:
|
||||
pre = ""
|
||||
|
||||
file.url = pre + url.split("/")[0] + r["purl"]
|
||||
file.name = r["name"]
|
||||
file.wark = r["wark"]
|
||||
|
||||
return r["hash"]
|
||||
|
||||
|
||||
def upload(req_ses, file, cid, pw):
|
||||
# type: (requests.Session, File, str, any) -> None
|
||||
"""upload one specific chunk, `cid` (a chunk-hash)"""
|
||||
|
||||
headers = {
|
||||
"X-Up2k-Hash": cid,
|
||||
"X-Up2k-Wark": file.wark,
|
||||
"Content-Type": "application/octet-stream",
|
||||
}
|
||||
if pw:
|
||||
headers["Cookie"] = "=".join(["cppwd", pw])
|
||||
|
||||
f = FileSlice(file, cid)
|
||||
try:
|
||||
r = req_ses.post(file.url, headers=headers, data=f)
|
||||
if not r:
|
||||
raise Exception(repr(r))
|
||||
|
||||
_ = r.content
|
||||
finally:
|
||||
f.f.close()
|
||||
|
||||
|
||||
class Daemon(threading.Thread):
|
||||
def __init__(self, *a, **ka):
|
||||
threading.Thread.__init__(self, *a, **ka)
|
||||
self.daemon = True
|
||||
|
||||
|
||||
class Ctl(object):
|
||||
"""
|
||||
this will be the coordinator which runs everything in parallel
|
||||
(hashing, handshakes, uploads) but right now it's p dumb
|
||||
"""
|
||||
|
||||
def __init__(self, ar):
|
||||
self.ar = ar
|
||||
ar.files = [
|
||||
os.path.abspath(os.path.realpath(x.encode("utf-8")))
|
||||
+ (x[-1:] if x[-1:] == os.sep else "").encode("utf-8")
|
||||
for x in ar.files
|
||||
]
|
||||
ar.url = ar.url.rstrip("/") + "/"
|
||||
if "://" not in ar.url:
|
||||
ar.url = "http://" + ar.url
|
||||
|
||||
eprint("\nscanning {0} locations\n".format(len(ar.files)))
|
||||
|
||||
nfiles = 0
|
||||
nbytes = 0
|
||||
err = []
|
||||
for _, _, inf in walkdirs(err, ar.files):
|
||||
nfiles += 1
|
||||
nbytes += inf.st_size
|
||||
|
||||
if err:
|
||||
eprint("\n# failed to access {0} paths:\n".format(len(err)))
|
||||
for x in err:
|
||||
eprint(x.decode("utf-8", "replace") + "\n")
|
||||
|
||||
eprint("^ failed to access those {0} paths ^\n\n".format(len(err)))
|
||||
if not ar.ok:
|
||||
eprint("aborting because --ok is not set\n")
|
||||
return
|
||||
|
||||
eprint("found {0} files, {1}\n\n".format(nfiles, humansize(nbytes)))
|
||||
self.nfiles = nfiles
|
||||
self.nbytes = nbytes
|
||||
|
||||
if ar.td:
|
||||
requests.packages.urllib3.disable_warnings()
|
||||
req_ses.verify = False
|
||||
if ar.te:
|
||||
req_ses.verify = ar.te
|
||||
|
||||
self.filegen = walkdirs([], ar.files)
|
||||
if ar.safe:
|
||||
self.safe()
|
||||
else:
|
||||
self.fancy()
|
||||
|
||||
def safe(self):
|
||||
"""minimal basic slow boring fallback codepath"""
|
||||
search = self.ar.s
|
||||
for nf, (top, rel, inf) in enumerate(self.filegen):
|
||||
file = File(top, rel, inf.st_size, inf.st_mtime)
|
||||
upath = file.abs.decode("utf-8", "replace")
|
||||
|
||||
print("{0} {1}\n hash...".format(self.nfiles - nf, upath))
|
||||
get_hashlist(file, None)
|
||||
|
||||
burl = self.ar.url[:12] + self.ar.url[8:].split("/")[0] + "/"
|
||||
while True:
|
||||
print(" hs...")
|
||||
hs = handshake(req_ses, self.ar.url, file, self.ar.a, search)
|
||||
if search:
|
||||
if hs:
|
||||
for hit in hs:
|
||||
print(" found: {0}{1}".format(burl, hit["rp"]))
|
||||
else:
|
||||
print(" NOT found")
|
||||
break
|
||||
|
||||
file.ucids = hs
|
||||
if not hs:
|
||||
break
|
||||
|
||||
print("{0} {1}".format(self.nfiles - nf, upath))
|
||||
ncs = len(hs)
|
||||
for nc, cid in enumerate(hs):
|
||||
print(" {0} up {1}".format(ncs - nc, cid))
|
||||
upload(req_ses, file, cid, self.ar.a)
|
||||
|
||||
print(" ok!")
|
||||
|
||||
def fancy(self):
|
||||
self.hash_f = 0
|
||||
self.hash_c = 0
|
||||
self.hash_b = 0
|
||||
self.up_f = 0
|
||||
self.up_c = 0
|
||||
self.up_b = 0
|
||||
self.up_br = 0
|
||||
self.hasher_busy = 1
|
||||
self.handshaker_busy = 0
|
||||
self.uploader_busy = 0
|
||||
|
||||
self.t0 = time.time()
|
||||
self.t0_up = None
|
||||
self.spd = None
|
||||
|
||||
self.mutex = threading.Lock()
|
||||
self.q_handshake = Queue() # type: Queue[File]
|
||||
self.q_recheck = Queue() # type: Queue[File] # partial upload exists [...]
|
||||
self.q_upload = Queue() # type: Queue[tuple[File, str]]
|
||||
|
||||
self.st_hash = [None, "(idle, starting...)"] # type: tuple[File, int]
|
||||
self.st_up = [None, "(idle, starting...)"] # type: tuple[File, int]
|
||||
if VT100:
|
||||
atexit.register(self.cleanup_vt100)
|
||||
ss.scroll_region(3)
|
||||
|
||||
Daemon(target=self.hasher).start()
|
||||
for _ in range(self.ar.j):
|
||||
Daemon(target=self.handshaker).start()
|
||||
Daemon(target=self.uploader).start()
|
||||
|
||||
idles = 0
|
||||
while idles < 3:
|
||||
time.sleep(0.07)
|
||||
with self.mutex:
|
||||
if (
|
||||
self.q_handshake.empty()
|
||||
and self.q_upload.empty()
|
||||
and not self.hasher_busy
|
||||
and not self.handshaker_busy
|
||||
and not self.uploader_busy
|
||||
):
|
||||
idles += 1
|
||||
else:
|
||||
idles = 0
|
||||
|
||||
if VT100:
|
||||
maxlen = ss.w - len(str(self.nfiles)) - 14
|
||||
txt = "\033[s\033[{0}H".format(ss.g)
|
||||
for y, k, st, f in [
|
||||
[0, "hash", self.st_hash, self.hash_f],
|
||||
[1, "send", self.st_up, self.up_f],
|
||||
]:
|
||||
txt += "\033[{0}H{1}:".format(ss.g + y, k)
|
||||
file, arg = st
|
||||
if not file:
|
||||
txt += " {0}\033[K".format(arg)
|
||||
else:
|
||||
if y:
|
||||
p = 100 * file.up_b / file.size
|
||||
else:
|
||||
p = 100 * arg / file.size
|
||||
|
||||
name = file.abs.decode("utf-8", "replace")[-maxlen:]
|
||||
if "/" in name:
|
||||
name = "\033[36m{0}\033[0m/{1}".format(*name.rsplit("/", 1))
|
||||
|
||||
m = "{0:6.1f}% {1} {2}\033[K"
|
||||
txt += m.format(p, self.nfiles - f, name)
|
||||
|
||||
txt += "\033[{0}H ".format(ss.g + 2)
|
||||
else:
|
||||
txt = " "
|
||||
|
||||
if not self.up_br:
|
||||
spd = self.hash_b / (time.time() - self.t0)
|
||||
eta = (self.nbytes - self.hash_b) / (spd + 1)
|
||||
else:
|
||||
spd = self.up_br / (time.time() - self.t0_up)
|
||||
spd = self.spd = (self.spd or spd) * 0.9 + spd * 0.1
|
||||
eta = (self.nbytes - self.up_b) / (spd + 1)
|
||||
|
||||
spd = humansize(spd)
|
||||
eta = str(datetime.timedelta(seconds=int(eta)))
|
||||
left = humansize(self.nbytes - self.up_b)
|
||||
tail = "\033[K\033[u" if VT100 else "\r"
|
||||
|
||||
m = "eta: {0} @ {1}/s, {2} left".format(eta, spd, left)
|
||||
eprint(txt + "\033]0;{0}\033\\\r{1}{2}".format(m, m, tail))
|
||||
|
||||
def cleanup_vt100(self):
|
||||
ss.scroll_region(None)
|
||||
eprint("\033[J\033]0;\033\\")
|
||||
|
||||
def cb_hasher(self, file, ofs):
|
||||
self.st_hash = [file, ofs]
|
||||
|
||||
def hasher(self):
|
||||
prd = None
|
||||
ls = {}
|
||||
for top, rel, inf in self.filegen:
|
||||
if self.ar.z:
|
||||
rd = os.path.dirname(rel)
|
||||
if prd != rd:
|
||||
prd = rd
|
||||
headers = {}
|
||||
if self.ar.a:
|
||||
headers["Cookie"] = "=".join(["cppwd", self.ar.a])
|
||||
|
||||
ls = {}
|
||||
try:
|
||||
print(" ls ~{0}".format(rd.decode("utf-8", "replace")))
|
||||
r = req_ses.get(
|
||||
self.ar.url.encode("utf-8") + quotep(rd) + b"?ls",
|
||||
headers=headers,
|
||||
)
|
||||
for f in r.json()["files"]:
|
||||
rfn = f["href"].split("?")[0].encode("utf-8", "replace")
|
||||
ls[unquote(rfn)] = f
|
||||
except:
|
||||
print(" mkdir ~{0}".format(rd.decode("utf-8", "replace")))
|
||||
|
||||
rf = ls.get(os.path.basename(rel), None)
|
||||
if rf and rf["sz"] == inf.st_size and abs(rf["ts"] - inf.st_mtime) <= 1:
|
||||
self.nfiles -= 1
|
||||
self.nbytes -= inf.st_size
|
||||
continue
|
||||
|
||||
file = File(top, rel, inf.st_size, inf.st_mtime)
|
||||
while True:
|
||||
with self.mutex:
|
||||
if (
|
||||
self.hash_b - self.up_b < 1024 * 1024 * 128
|
||||
and self.hash_c - self.up_c < 64
|
||||
and (
|
||||
not self.ar.nh
|
||||
or (
|
||||
self.q_upload.empty()
|
||||
and self.q_handshake.empty()
|
||||
and not self.uploader_busy
|
||||
)
|
||||
)
|
||||
):
|
||||
break
|
||||
|
||||
time.sleep(0.05)
|
||||
|
||||
get_hashlist(file, self.cb_hasher)
|
||||
with self.mutex:
|
||||
self.hash_f += 1
|
||||
self.hash_c += len(file.cids)
|
||||
self.hash_b += file.size
|
||||
|
||||
self.q_handshake.put(file)
|
||||
|
||||
self.hasher_busy = 0
|
||||
self.st_hash = [None, "(finished)"]
|
||||
|
||||
def handshaker(self):
|
||||
search = self.ar.s
|
||||
q = self.q_handshake
|
||||
burl = self.ar.url[:8] + self.ar.url[8:].split("/")[0] + "/"
|
||||
while True:
|
||||
file = q.get()
|
||||
if not file:
|
||||
if q == self.q_handshake:
|
||||
q = self.q_recheck
|
||||
q.put(None)
|
||||
continue
|
||||
|
||||
self.q_upload.put(None)
|
||||
break
|
||||
|
||||
with self.mutex:
|
||||
self.handshaker_busy += 1
|
||||
|
||||
upath = file.abs.decode("utf-8", "replace")
|
||||
|
||||
try:
|
||||
hs = handshake(req_ses, self.ar.url, file, self.ar.a, search)
|
||||
except Exception as ex:
|
||||
if q == self.q_handshake and "<pre>partial upload exists" in str(ex):
|
||||
self.q_recheck.put(file)
|
||||
hs = []
|
||||
else:
|
||||
raise
|
||||
|
||||
if search:
|
||||
if hs:
|
||||
for hit in hs:
|
||||
m = "found: {0}\n {1}{2}\n"
|
||||
print(m.format(upath, burl, hit["rp"]), end="")
|
||||
else:
|
||||
print("NOT found: {0}\n".format(upath), end="")
|
||||
|
||||
with self.mutex:
|
||||
self.up_f += 1
|
||||
self.up_c += len(file.cids)
|
||||
self.up_b += file.size
|
||||
self.handshaker_busy -= 1
|
||||
|
||||
continue
|
||||
|
||||
with self.mutex:
|
||||
if not hs:
|
||||
# all chunks done
|
||||
self.up_f += 1
|
||||
self.up_c += len(file.cids) - file.up_c
|
||||
self.up_b += file.size - file.up_b
|
||||
|
||||
if hs and file.up_c:
|
||||
# some chunks failed
|
||||
self.up_c -= len(hs)
|
||||
file.up_c -= len(hs)
|
||||
for cid in hs:
|
||||
sz = file.kchunks[cid][1]
|
||||
self.up_b -= sz
|
||||
file.up_b -= sz
|
||||
|
||||
file.ucids = hs
|
||||
self.handshaker_busy -= 1
|
||||
|
||||
if not hs:
|
||||
kw = "uploaded" if file.up_b else " found"
|
||||
print("{0} {1}".format(kw, upath))
|
||||
for cid in hs:
|
||||
self.q_upload.put([file, cid])
|
||||
|
||||
def uploader(self):
|
||||
while True:
|
||||
task = self.q_upload.get()
|
||||
if not task:
|
||||
self.st_up = [None, "(finished)"]
|
||||
break
|
||||
|
||||
with self.mutex:
|
||||
self.uploader_busy += 1
|
||||
self.t0_up = self.t0_up or time.time()
|
||||
|
||||
file, cid = task
|
||||
try:
|
||||
upload(req_ses, file, cid, self.ar.a)
|
||||
except:
|
||||
eprint("upload failed, retrying: {0} #{1}\n".format(file.name, cid[:8]))
|
||||
pass # handshake will fix it
|
||||
|
||||
with self.mutex:
|
||||
sz = file.kchunks[cid][1]
|
||||
file.ucids = [x for x in file.ucids if x != cid]
|
||||
if not file.ucids:
|
||||
self.q_handshake.put(file)
|
||||
|
||||
self.st_up = [file, cid]
|
||||
file.up_b += sz
|
||||
self.up_b += sz
|
||||
self.up_br += sz
|
||||
file.up_c += 1
|
||||
self.up_c += 1
|
||||
self.uploader_busy -= 1
|
||||
|
||||
|
||||
class APF(argparse.ArgumentDefaultsHelpFormatter, argparse.RawDescriptionHelpFormatter):
|
||||
pass
|
||||
|
||||
|
||||
def main():
|
||||
time.strptime("19970815", "%Y%m%d") # python#7980
|
||||
if not VT100:
|
||||
os.system("rem") # enables colors
|
||||
|
||||
# fmt: off
|
||||
ap = app = argparse.ArgumentParser(formatter_class=APF, epilog="""
|
||||
NOTE:
|
||||
source file/folder selection uses rsync syntax, meaning that:
|
||||
"foo" uploads the entire folder to URL/foo/
|
||||
"foo/" uploads the CONTENTS of the folder into URL/
|
||||
""")
|
||||
|
||||
ap.add_argument("url", type=unicode, help="server url, including destination folder")
|
||||
ap.add_argument("files", type=unicode, nargs="+", help="files and/or folders to process")
|
||||
ap.add_argument("-a", metavar="PASSWORD", help="password")
|
||||
ap.add_argument("-s", action="store_true", help="file-search (disables upload)")
|
||||
ap.add_argument("--ok", action="store_true", help="continue even if some local files are inaccessible")
|
||||
ap = app.add_argument_group("performance tweaks")
|
||||
ap.add_argument("-j", type=int, metavar="THREADS", default=4, help="parallel connections")
|
||||
ap.add_argument("-nh", action="store_true", help="disable hashing while uploading")
|
||||
ap.add_argument("--safe", action="store_true", help="use simple fallback approach")
|
||||
ap.add_argument("-z", action="store_true", help="ZOOMIN' (skip uploading files if they exist at the destination with the ~same last-modified timestamp, so same as yolo / turbo with date-chk but even faster)")
|
||||
ap = app.add_argument_group("tls")
|
||||
ap.add_argument("-te", metavar="PEM_FILE", help="certificate to expect/verify")
|
||||
ap.add_argument("-td", action="store_true", help="disable certificate check")
|
||||
# fmt: on
|
||||
|
||||
Ctl(app.parse_args())
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
|
@ -1,76 +0,0 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
import sys
|
||||
import zmq
|
||||
|
||||
"""
|
||||
zmq-recv.py: demo zmq receiver
|
||||
2025-01-22, v1.0, ed <irc.rizon.net>, MIT-Licensed
|
||||
https://github.com/9001/copyparty/blob/hovudstraum/bin/zmq-recv.py
|
||||
|
||||
basic zmq-server to receive events from copyparty; try one of
|
||||
the below and then "send a message to serverlog" in the web-ui:
|
||||
|
||||
1) dumb fire-and-forget to any and all listeners;
|
||||
run this script with "sub" and run copyparty with this:
|
||||
--xm zmq:pub:tcp://*:5556
|
||||
|
||||
2) one lucky listener gets the message, blocks if no listeners:
|
||||
run this script with "pull" and run copyparty with this:
|
||||
--xm t3,zmq:push:tcp://*:5557
|
||||
|
||||
3) blocking syn/ack mode, client must ack each message;
|
||||
run this script with "rep" and run copyparty with this:
|
||||
--xm t3,zmq:req:tcp://localhost:5555
|
||||
|
||||
note: to conditionally block uploads based on message contents,
|
||||
use rep_server to answer with "return 1" and run copyparty with
|
||||
--xau t3,c,zmq:req:tcp://localhost:5555
|
||||
"""
|
||||
|
||||
|
||||
ctx = zmq.Context()
|
||||
|
||||
|
||||
def sub_server():
|
||||
# PUB/SUB allows any number of servers/clients, and
|
||||
# messages are fire-and-forget
|
||||
sck = ctx.socket(zmq.SUB)
|
||||
sck.connect("tcp://localhost:5556")
|
||||
sck.setsockopt_string(zmq.SUBSCRIBE, "")
|
||||
while True:
|
||||
print("copyparty says %r" % (sck.recv_string(),))
|
||||
|
||||
|
||||
def pull_server():
|
||||
# PUSH/PULL allows any number of servers/clients, and
|
||||
# each message is sent to a exactly one PULL client
|
||||
sck = ctx.socket(zmq.PULL)
|
||||
sck.connect("tcp://localhost:5557")
|
||||
while True:
|
||||
print("copyparty says %r" % (sck.recv_string(),))
|
||||
|
||||
|
||||
def rep_server():
|
||||
# REP/REQ is a server/client pair where each message must be
|
||||
# acked by the other before another message can be sent, so
|
||||
# copyparty will do a blocking-wait for the ack
|
||||
sck = ctx.socket(zmq.REP)
|
||||
sck.bind("tcp://*:5555")
|
||||
while True:
|
||||
print("copyparty says %r" % (sck.recv_string(),))
|
||||
reply = b"thx"
|
||||
# reply = b"return 1" # non-zero to block an upload
|
||||
sck.send(reply)
|
||||
|
||||
|
||||
mode = sys.argv[1].lower() if len(sys.argv) > 1 else ""
|
||||
|
||||
if mode == "sub":
|
||||
sub_server()
|
||||
elif mode == "pull":
|
||||
pull_server()
|
||||
elif mode == "rep":
|
||||
rep_server()
|
||||
else:
|
||||
print("specify mode as first argument: SUB | PULL | REP")
|
|
@ -1,6 +1,3 @@
|
|||
### [`plugins/`](plugins/)
|
||||
* example extensions
|
||||
|
||||
### [`copyparty.bat`](copyparty.bat)
|
||||
* launches copyparty with no arguments (anon read+write within same folder)
|
||||
* intended for windows machines with no python.exe in PATH
|
||||
|
@ -12,46 +9,23 @@
|
|||
* assumes the webserver and copyparty is running on the same server/IP
|
||||
* modify `10.13.1.1` as necessary if you wish to support browsers without javascript
|
||||
|
||||
### [`sharex.sxcu`](sharex.sxcu) - Windows screenshot uploader
|
||||
* [sharex](https://getsharex.com/) config file to upload screenshots and grab the URL
|
||||
* `RequestURL`: full URL to the target folder
|
||||
* `pw`: password (remove the `pw` line if anon-write)
|
||||
* the `act:bput` thing is optional since copyparty v1.9.29
|
||||
* using an older sharex version, maybe sharex v12.1.1 for example? dw fam i got your back 👉😎👉 [`sharex12.sxcu`](sharex12.sxcu)
|
||||
|
||||
### [`ishare.iscu`](ishare.iscu) - MacOS screenshot uploader
|
||||
* [ishare](https://isharemac.app/) config file to upload screenshots and grab the URL
|
||||
### [`sharex.sxcu`](sharex.sxcu)
|
||||
* sharex config file to upload screenshots and grab the URL
|
||||
* `RequestURL`: full URL to the target folder
|
||||
* `pw`: password (remove the `pw` line if anon-write)
|
||||
|
||||
### [`flameshot.sh`](flameshot.sh) - Linux screenshot uploader
|
||||
* takes a screenshot with [flameshot](https://flameshot.org/) on Linux, uploads it, and writes the URL to clipboard
|
||||
|
||||
### [`send-to-cpp.contextlet.json`](send-to-cpp.contextlet.json)
|
||||
* browser integration, kind of? custom rightclick actions and stuff
|
||||
* rightclick a pic and send it to copyparty straight from your browser
|
||||
* for the [contextlet](https://addons.mozilla.org/en-US/firefox/addon/contextlets/) firefox extension
|
||||
|
||||
### [`media-osd-bgone.ps1`](media-osd-bgone.ps1)
|
||||
* disables the [windows OSD popup](https://user-images.githubusercontent.com/241032/122821375-0e08df80-d2dd-11eb-9fd9-184e8aacf1d0.png) (the thing on the left) which appears every time you hit media hotkeys to adjust volume or change song while playing music with the copyparty web-ui, or most other audio players really
|
||||
however if your copyparty is behind a reverse-proxy, you may want to use [`sharex-html.sxcu`](sharex-html.sxcu) instead:
|
||||
* `RequestURL`: full URL to the target folder
|
||||
* `URL`: full URL to the root folder (with trailing slash) followed by `$regex:1|1$`
|
||||
* `pw`: password (remove `Parameters` if anon-write)
|
||||
|
||||
### [`explorer-nothumbs-nofoldertypes.reg`](explorer-nothumbs-nofoldertypes.reg)
|
||||
* disables thumbnails and folder-type detection in windows explorer
|
||||
* makes it way faster (especially for slow/networked locations (such as partyfuse))
|
||||
|
||||
### [`webdav-cfg.reg`](webdav-cfg.bat)
|
||||
* improves the native webdav support in windows;
|
||||
* removes the 47.6 MiB filesize limit when downloading from webdav
|
||||
* optionally enables webdav basic-auth over plaintext http
|
||||
* optionally helps disable wpad, removing the 10sec latency
|
||||
* makes it way faster (especially for slow/networked locations (such as copyparty-fuse))
|
||||
|
||||
### [`cfssl.sh`](cfssl.sh)
|
||||
* creates CA and server certificates using cfssl
|
||||
* give a 3rd argument to install it to your copyparty config
|
||||
* systemd service at [`systemd/cfssl.service`](systemd/cfssl.service)
|
||||
|
||||
### [`zfs-tune.py`](zfs-tune.py)
|
||||
* optimizes databases for optimal performance when stored on a zfs filesystem; also see [openzfs docs](https://openzfs.github.io/openzfs-docs/Performance%20and%20Tuning/Workload%20Tuning.html#database-workloads) and specifically the SQLite subsection
|
||||
|
||||
# OS integration
|
||||
init-scripts to start copyparty as a service
|
||||
|
@ -61,10 +35,5 @@ init-scripts to start copyparty as a service
|
|||
* [`openrc/copyparty`](openrc/copyparty)
|
||||
|
||||
# Reverse-proxy
|
||||
copyparty supports running behind another webserver
|
||||
* [`apache/copyparty.conf`](apache/copyparty.conf)
|
||||
* [`haproxy/copyparty.conf`](haproxy/copyparty.conf)
|
||||
* [`lighttpd/subdomain.conf`](lighttpd/subdomain.conf)
|
||||
* [`lighttpd/subpath.conf`](lighttpd/subpath.conf)
|
||||
* [`nginx/copyparty.conf`](nginx/copyparty.conf) -- recommended
|
||||
* [`traefik/copyparty.yaml`](traefik/copyparty.yaml)
|
||||
copyparty has basic support for running behind another webserver
|
||||
* [`nginx/copyparty.conf`](nginx/copyparty.conf)
|
||||
|
|
|
@ -1,29 +0,0 @@
|
|||
# if you would like to use unix-sockets (recommended),
|
||||
# you must run copyparty with one of the following:
|
||||
#
|
||||
# -i unix:777:/dev/shm/party.sock
|
||||
# -i unix:777:/dev/shm/party.sock,127.0.0.1
|
||||
#
|
||||
# if you are doing location-based proxying (such as `/stuff` below)
|
||||
# you must run copyparty with --rp-loc=stuff
|
||||
#
|
||||
# on fedora/rhel, remember to setsebool -P httpd_can_network_connect 1
|
||||
|
||||
|
||||
LoadModule proxy_module modules/mod_proxy.so
|
||||
|
||||
RequestHeader set "X-Forwarded-Proto" expr=%{REQUEST_SCHEME}
|
||||
# NOTE: do not specify ProxyPassReverse
|
||||
|
||||
|
||||
##
|
||||
## then, enable one of the below:
|
||||
|
||||
# use subdomain proxying to unix-socket (best)
|
||||
ProxyPass "/" "unix:///dev/shm/party.sock|http://whatever/"
|
||||
|
||||
# use subdomain proxying to 127.0.0.1 (slower)
|
||||
#ProxyPass "/" "http://127.0.0.1:3923/"
|
||||
|
||||
# use subpath proxying to 127.0.0.1 (slow and maybe buggy)
|
||||
#ProxyPass "/stuff" "http://127.0.0.1:3923/stuff"
|
|
@ -1,44 +1,14 @@
|
|||
#!/bin/bash
|
||||
set -e
|
||||
|
||||
cat >/dev/null <<'EOF'
|
||||
|
||||
NOTE: copyparty is now able to do this automatically;
|
||||
however you may wish to use this script instead if
|
||||
you have specific needs (or if copyparty breaks)
|
||||
|
||||
this script generates a new self-signed TLS certificate and
|
||||
replaces the default insecure one that comes with copyparty
|
||||
|
||||
as it is trivial to impersonate a copyparty server using the
|
||||
default certificate, it is highly recommended to do this
|
||||
|
||||
this will create a self-signed CA, and a Server certificate
|
||||
which gets signed by that CA -- you can run it multiple times
|
||||
with different server-FQDNs / IPs to create additional certs
|
||||
for all your different servers / (non-)copyparty services
|
||||
|
||||
EOF
|
||||
|
||||
|
||||
# ca-name and server-fqdn
|
||||
ca_name="$1"
|
||||
srv_fqdn="$2"
|
||||
|
||||
[ -z "$srv_fqdn" ] && { cat <<'EOF'
|
||||
need arg 1: ca name
|
||||
need arg 2: server fqdn and/or IPs, comma-separated
|
||||
optional arg 3: if set, write cert into copyparty cfg
|
||||
|
||||
example:
|
||||
./cfssl.sh PartyCo partybox.local y
|
||||
EOF
|
||||
exit 1
|
||||
}
|
||||
|
||||
|
||||
command -v cfssljson 2>/dev/null || {
|
||||
echo please install cfssl and try again
|
||||
[ -z "$srv_fqdn" ] && {
|
||||
echo "need arg 1: ca name"
|
||||
echo "need arg 2: server fqdn"
|
||||
echo "optional arg 3: if set, write cert into copyparty cfg"
|
||||
exit 1
|
||||
}
|
||||
|
||||
|
@ -89,14 +59,12 @@ show() {
|
|||
}
|
||||
show ca.pem
|
||||
show "$srv_fqdn.pem"
|
||||
echo
|
||||
echo "successfully generated new certificates"
|
||||
|
||||
|
||||
# write cert into copyparty config
|
||||
[ -z "$3" ] || {
|
||||
mkdir -p ~/.config/copyparty
|
||||
cat "$srv_fqdn".{key,pem} ca.pem >~/.config/copyparty/cert.pem
|
||||
echo "successfully replaced copyparty certificate"
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -1,14 +0,0 @@
|
|||
#!/bin/bash
|
||||
set -e
|
||||
|
||||
# take a screenshot with flameshot and send it to copyparty;
|
||||
# the image url will be placed on your clipboard
|
||||
|
||||
password=wark
|
||||
url=https://a.ocv.me/up/
|
||||
filename=$(date +%Y-%m%d-%H%M%S).png
|
||||
|
||||
flameshot gui -s -r |
|
||||
curl -T- $url$filename?pw=$password |
|
||||
tail -n 1 |
|
||||
xsel -ib
|
|
@ -1,24 +0,0 @@
|
|||
# this config is essentially two separate examples;
|
||||
#
|
||||
# foo1 connects to copyparty using tcp, and
|
||||
# foo2 uses unix-sockets for 27% higher performance
|
||||
#
|
||||
# to use foo2 you must run copyparty with one of the following:
|
||||
#
|
||||
# -i unix:777:/dev/shm/party.sock
|
||||
# -i unix:777:/dev/shm/party.sock,127.0.0.1
|
||||
|
||||
defaults
|
||||
mode http
|
||||
option forwardfor
|
||||
timeout connect 1s
|
||||
timeout client 610s
|
||||
timeout server 610s
|
||||
|
||||
listen foo1
|
||||
bind *:8081
|
||||
server srv1 127.0.0.1:3923 maxconn 512
|
||||
|
||||
listen foo2
|
||||
bind *:8082
|
||||
server srv1 /dev/shm/party.sock maxconn 512
|
|
@ -3,7 +3,7 @@
|
|||
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<title>💾🎉 redirect</title>
|
||||
<title>⇆🎉 redirect</title>
|
||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||
<style>
|
||||
|
||||
|
@ -26,8 +26,8 @@ a {
|
|||
<script>
|
||||
|
||||
var a = document.getElementById('redir'),
|
||||
proto = location.protocol.indexOf('https') === 0 ? 'https' : 'http',
|
||||
loc = location.hostname || '127.0.0.1',
|
||||
proto = window.location.protocol.indexOf('https') === 0 ? 'https' : 'http',
|
||||
loc = window.location.hostname || '127.0.0.1',
|
||||
port = a.getAttribute('href').split(':').pop().split('/')[0],
|
||||
url = proto + '://' + loc + ':' + port + '/';
|
||||
|
||||
|
@ -35,7 +35,7 @@ a.setAttribute('href', url);
|
|||
document.getElementById('desc').innerHTML = 'redirecting to';
|
||||
|
||||
setTimeout(function() {
|
||||
location.href = url;
|
||||
window.location.href = url;
|
||||
}, 500);
|
||||
|
||||
</script>
|
||||
|
|
Binary file not shown.
|
@ -1,10 +0,0 @@
|
|||
{
|
||||
"Name": "copyparty",
|
||||
"RequestURL": "http://127.0.0.1:3923/screenshots/",
|
||||
"Headers": {
|
||||
"pw": "PUT_YOUR_PASSWORD_HERE_MY_DUDE",
|
||||
"accept": "json"
|
||||
},
|
||||
"FileFormName": "f",
|
||||
"ResponseURL": "{{fileurl}}"
|
||||
}
|
|
@ -1,24 +0,0 @@
|
|||
# example usage for benchmarking:
|
||||
#
|
||||
# taskset -c 1 lighttpd -Df ~/dev/copyparty/contrib/lighttpd/subdomain.conf
|
||||
#
|
||||
# lighttpd can connect to copyparty using either tcp (127.0.0.1)
|
||||
# or a unix-socket, but unix-sockets are 37% faster because
|
||||
# lighttpd doesn't reuse tcp connections, so we're doing unix-sockets
|
||||
#
|
||||
# this means we must run copyparty with one of the following:
|
||||
#
|
||||
# -i unix:777:/dev/shm/party.sock
|
||||
# -i unix:777:/dev/shm/party.sock,127.0.0.1
|
||||
#
|
||||
# on fedora/rhel, remember to setsebool -P httpd_can_network_connect 1
|
||||
|
||||
server.port = 80
|
||||
server.document-root = "/var/empty"
|
||||
server.upload-dirs = ( "/dev/shm", "/tmp" )
|
||||
server.modules = ( "mod_proxy" )
|
||||
proxy.forwarded = ( "for" => 1, "proto" => 1 )
|
||||
proxy.server = ( "" => ( ( "host" => "/dev/shm/party.sock" ) ) )
|
||||
|
||||
# if you really need to use tcp instead of unix-sockets, do this instead:
|
||||
#proxy.server = ( "" => ( ( "host" => "127.0.0.1", "port" => "3923" ) ) )
|
|
@ -1,31 +0,0 @@
|
|||
# example usage for benchmarking:
|
||||
#
|
||||
# taskset -c 1 lighttpd -Df ~/dev/copyparty/contrib/lighttpd/subpath.conf
|
||||
#
|
||||
# lighttpd can connect to copyparty using either tcp (127.0.0.1)
|
||||
# or a unix-socket, but unix-sockets are 37% faster because
|
||||
# lighttpd doesn't reuse tcp connections, so we're doing unix-sockets
|
||||
#
|
||||
# this means we must run copyparty with one of the following:
|
||||
#
|
||||
# -i unix:777:/dev/shm/party.sock
|
||||
# -i unix:777:/dev/shm/party.sock,127.0.0.1
|
||||
#
|
||||
# also since this example proxies a subpath instead of the
|
||||
# recommended subdomain-proxying, we must also specify this:
|
||||
#
|
||||
# --rp-loc files
|
||||
#
|
||||
# on fedora/rhel, remember to setsebool -P httpd_can_network_connect 1
|
||||
|
||||
server.port = 80
|
||||
server.document-root = "/var/empty"
|
||||
server.upload-dirs = ( "/dev/shm", "/tmp" )
|
||||
server.modules = ( "mod_proxy" )
|
||||
$HTTP["url"] =~ "^/files" {
|
||||
proxy.forwarded = ( "for" => 1, "proto" => 1 )
|
||||
proxy.server = ( "" => ( ( "host" => "/dev/shm/party.sock" ) ) )
|
||||
|
||||
# if you really need to use tcp instead of unix-sockets, do this instead:
|
||||
#proxy.server = ( "" => ( ( "host" => "127.0.0.1", "port" => "3923" ) ) )
|
||||
}
|
|
@ -1,104 +0,0 @@
|
|||
# media-osd-bgone.ps1: disable media-control OSD on win10do
|
||||
# v1.1, 2021-06-25, ed <irc.rizon.net>, MIT-licensed
|
||||
# https://github.com/9001/copyparty/blob/hovudstraum/contrib/media-osd-bgone.ps1
|
||||
#
|
||||
# locates the first window that looks like the media OSD and minimizes it;
|
||||
# doing this once after each reboot should do the trick
|
||||
# (adjust the width/height filter if it doesn't work)
|
||||
#
|
||||
# ---------------------------------------------------------------------
|
||||
#
|
||||
# tip: save the following as "media-osd-bgone.bat" next to this script:
|
||||
# start cmd /c "powershell -command ""set-executionpolicy -scope process bypass; .\media-osd-bgone.ps1"" & ping -n 2 127.1 >nul"
|
||||
#
|
||||
# then create a shortcut to that bat-file and move the shortcut here:
|
||||
# %appdata%\Microsoft\Windows\Start Menu\Programs\Startup
|
||||
#
|
||||
# and now this will autorun on bootup
|
||||
|
||||
|
||||
Add-Type -TypeDefinition @"
|
||||
using System;
|
||||
using System.IO;
|
||||
using System.Threading;
|
||||
using System.Diagnostics;
|
||||
using System.Runtime.InteropServices;
|
||||
using System.Windows.Forms;
|
||||
|
||||
namespace A {
|
||||
public class B : Control {
|
||||
|
||||
[DllImport("user32.dll")]
|
||||
static extern void keybd_event(byte bVk, byte bScan, uint dwFlags, int dwExtraInfo);
|
||||
|
||||
[DllImport("user32.dll", SetLastError = true)]
|
||||
static extern IntPtr FindWindowEx(IntPtr hwndParent, IntPtr hwndChildAfter, string lpszClass, string lpszWindow);
|
||||
|
||||
[DllImport("user32.dll", SetLastError=true)]
|
||||
static extern bool GetWindowRect(IntPtr hwnd, out RECT lpRect);
|
||||
|
||||
[DllImport("user32.dll")]
|
||||
static extern bool ShowWindow(IntPtr hWnd, int nCmdShow);
|
||||
|
||||
[StructLayout(LayoutKind.Sequential)]
|
||||
public struct RECT {
|
||||
public int x;
|
||||
public int y;
|
||||
public int x2;
|
||||
public int y2;
|
||||
}
|
||||
|
||||
bool fa() {
|
||||
RECT r;
|
||||
IntPtr it = IntPtr.Zero;
|
||||
while ((it = FindWindowEx(IntPtr.Zero, it, "NativeHWNDHost", "")) != IntPtr.Zero) {
|
||||
if (FindWindowEx(it, IntPtr.Zero, "DirectUIHWND", "") == IntPtr.Zero)
|
||||
continue;
|
||||
|
||||
if (!GetWindowRect(it, out r))
|
||||
continue;
|
||||
|
||||
int w = r.x2 - r.x + 1;
|
||||
int h = r.y2 - r.y + 1;
|
||||
|
||||
Console.WriteLine("[*] hwnd {0:x} @ {1}x{2} sz {3}x{4}", it, r.x, r.y, w, h);
|
||||
if (h != 141)
|
||||
continue;
|
||||
|
||||
ShowWindow(it, 6);
|
||||
Console.WriteLine("[+] poof");
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
void fb() {
|
||||
keybd_event((byte)Keys.VolumeMute, 0, 0, 0);
|
||||
keybd_event((byte)Keys.VolumeMute, 0, 2, 0);
|
||||
Thread.Sleep(500);
|
||||
keybd_event((byte)Keys.VolumeMute, 0, 0, 0);
|
||||
keybd_event((byte)Keys.VolumeMute, 0, 2, 0);
|
||||
|
||||
while (true) {
|
||||
if (fa()) {
|
||||
break;
|
||||
}
|
||||
Console.WriteLine("[!] not found");
|
||||
Thread.Sleep(1000);
|
||||
}
|
||||
this.Invoke((MethodInvoker)delegate {
|
||||
Application.Exit();
|
||||
});
|
||||
}
|
||||
|
||||
public void Run() {
|
||||
Console.WriteLine("[+] hi");
|
||||
new Thread(new ThreadStart(fb)).Start();
|
||||
Application.Run();
|
||||
Console.WriteLine("[+] bye");
|
||||
}
|
||||
}
|
||||
}
|
||||
"@ -ReferencedAssemblies System.Windows.Forms
|
||||
|
||||
(New-Object -TypeName A.B).Run()
|
|
@ -1,103 +1,39 @@
|
|||
# look for "max clients:" when starting copyparty, as nginx should
|
||||
# not accept more consecutive clients than what copyparty is able to;
|
||||
# when running copyparty behind a reverse proxy,
|
||||
# the following arguments are recommended:
|
||||
#
|
||||
# -nc 512 important, see next paragraph
|
||||
# --http-only lower latency on initial connection
|
||||
# -i 127.0.0.1 only accept connections from nginx
|
||||
#
|
||||
# -nc must match or exceed the webserver's max number of concurrent clients;
|
||||
# nginx default is 512 (worker_processes 1, worker_connections 512)
|
||||
#
|
||||
# ======================================================================
|
||||
#
|
||||
# to reverse-proxy a specific path/subpath/location below a domain
|
||||
# (rather than a complete subdomain), for example "/qw/er", you must
|
||||
# run copyparty with --rp-loc /qw/as and also change the following:
|
||||
# location / {
|
||||
# proxy_pass http://cpp_tcp;
|
||||
# to this:
|
||||
# location /qw/er/ {
|
||||
# proxy_pass http://cpp_tcp/qw/er/;
|
||||
#
|
||||
# ======================================================================
|
||||
#
|
||||
# rarely, in some extreme usecases, it can be good to add -j0
|
||||
# (40'000 requests per second, or 20gbps upload/download in parallel)
|
||||
# but this is usually counterproductive and slightly buggy
|
||||
#
|
||||
# ======================================================================
|
||||
#
|
||||
# on fedora/rhel, remember to setsebool -P httpd_can_network_connect 1
|
||||
#
|
||||
# ======================================================================
|
||||
#
|
||||
# if you are behind cloudflare (or another CDN/WAF/protection service),
|
||||
# remember to reject all connections which are not coming from your
|
||||
# protection service -- for cloudflare in particular, you can
|
||||
# generate the list of permitted IP ranges like so:
|
||||
# (curl -s https://www.cloudflare.com/ips-v{4,6} | sed 's/^/allow /; s/$/;/'; echo; echo "deny all;") > /etc/nginx/cloudflare-only.conf
|
||||
#
|
||||
# and then enable it below by uncomenting the cloudflare-only.conf line
|
||||
#
|
||||
# ======================================================================
|
||||
# you may also consider adding -j0 for CPU-intensive configurations
|
||||
# (not that i can really think of any good examples)
|
||||
|
||||
|
||||
upstream cpp_tcp {
|
||||
# alternative 1: connect to copyparty using tcp;
|
||||
# cpp_uds is slightly faster and more secure, but
|
||||
# cpp_tcp is easier to setup and "just works"
|
||||
# ...you should however restrict copyparty to only
|
||||
# accept connections from nginx by adding these args:
|
||||
# -i 127.0.0.1
|
||||
|
||||
server 127.0.0.1:3923 fail_timeout=1s;
|
||||
upstream cpp {
|
||||
server 127.0.0.1:3923;
|
||||
keepalive 1;
|
||||
}
|
||||
|
||||
|
||||
upstream cpp_uds {
|
||||
# alternative 2: unix-socket, aka. "unix domain socket";
|
||||
# 5-10% faster, and better isolation from other software,
|
||||
# but there must be at least one unix-group which both
|
||||
# nginx and copyparty is a member of; if that group is
|
||||
# "www" then run copyparty with the following args:
|
||||
# -i unix:770:www:/dev/shm/party.sock
|
||||
|
||||
server unix:/dev/shm/party.sock fail_timeout=1s;
|
||||
keepalive 1;
|
||||
}
|
||||
|
||||
|
||||
server {
|
||||
listen 443 ssl;
|
||||
listen [::]:443 ssl;
|
||||
|
||||
server_name fs.example.com;
|
||||
|
||||
# uncomment the following line to reject non-cloudflare connections, ensuring client IPs cannot be spoofed:
|
||||
#include /etc/nginx/cloudflare-only.conf;
|
||||
|
||||
location / {
|
||||
# recommendation: replace cpp_tcp with cpp_uds below
|
||||
proxy_pass http://cpp_tcp;
|
||||
proxy_pass http://cpp;
|
||||
proxy_redirect off;
|
||||
# disable buffering (next 4 lines)
|
||||
proxy_http_version 1.1;
|
||||
client_max_body_size 0;
|
||||
proxy_buffering off;
|
||||
proxy_request_buffering off;
|
||||
# improve download speed from 600 to 1500 MiB/s
|
||||
proxy_buffers 32 8k;
|
||||
proxy_buffer_size 16k;
|
||||
proxy_busy_buffers_size 24k;
|
||||
|
||||
proxy_set_header Connection "Keep-Alive";
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
# NOTE: with cloudflare you want this X-Forwarded-For instead:
|
||||
#proxy_set_header X-Forwarded-For $http_cf_connecting_ip;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
proxy_set_header Connection "Keep-Alive";
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
# default client_max_body_size (1M) blocks uploads larger than 256 MiB
|
||||
client_max_body_size 1024M;
|
||||
client_header_timeout 610m;
|
||||
client_body_timeout 610m;
|
||||
send_timeout 610m;
|
||||
|
|
|
@ -1,375 +0,0 @@
|
|||
{
|
||||
config,
|
||||
pkgs,
|
||||
lib,
|
||||
...
|
||||
}:
|
||||
with lib;
|
||||
let
|
||||
mkKeyValue =
|
||||
key: value:
|
||||
if value == true then
|
||||
# sets with a true boolean value are coerced to just the key name
|
||||
key
|
||||
else if value == false then
|
||||
# or omitted completely when false
|
||||
""
|
||||
else
|
||||
(generators.mkKeyValueDefault { inherit mkValueString; } ": " key value);
|
||||
|
||||
mkAttrsString = value: (generators.toKeyValue { inherit mkKeyValue; } value);
|
||||
|
||||
mkValueString =
|
||||
value:
|
||||
if isList value then
|
||||
(concatStringsSep "," (map mkValueString value))
|
||||
else if isAttrs value then
|
||||
"\n" + (mkAttrsString value)
|
||||
else
|
||||
(generators.mkValueStringDefault { } value);
|
||||
|
||||
mkSectionName = value: "[" + (escape [ "[" "]" ] value) + "]";
|
||||
|
||||
mkSection = name: attrs: ''
|
||||
${mkSectionName name}
|
||||
${mkAttrsString attrs}
|
||||
'';
|
||||
|
||||
mkVolume = name: attrs: ''
|
||||
${mkSectionName name}
|
||||
${attrs.path}
|
||||
${mkAttrsString {
|
||||
accs = attrs.access;
|
||||
flags = attrs.flags;
|
||||
}}
|
||||
'';
|
||||
|
||||
passwordPlaceholder = name: "{{password-${name}}}";
|
||||
|
||||
accountsWithPlaceholders = mapAttrs (name: attrs: passwordPlaceholder name);
|
||||
|
||||
configStr = ''
|
||||
${mkSection "global" cfg.settings}
|
||||
${mkSection "accounts" (accountsWithPlaceholders cfg.accounts)}
|
||||
${concatStringsSep "\n" (mapAttrsToList mkVolume cfg.volumes)}
|
||||
'';
|
||||
|
||||
cfg = config.services.copyparty;
|
||||
configFile = pkgs.writeText "copyparty.conf" configStr;
|
||||
runtimeConfigPath = "/run/copyparty/copyparty.conf";
|
||||
externalCacheDir = "/var/cache/copyparty";
|
||||
externalStateDir = "/var/lib/copyparty";
|
||||
defaultShareDir = "${externalStateDir}/data";
|
||||
in
|
||||
{
|
||||
options.services.copyparty = {
|
||||
enable = mkEnableOption "web-based file manager";
|
||||
|
||||
package = mkOption {
|
||||
type = types.package;
|
||||
default = pkgs.copyparty;
|
||||
defaultText = "pkgs.copyparty";
|
||||
description = ''
|
||||
Package of the application to run, exposed for overriding purposes.
|
||||
'';
|
||||
};
|
||||
|
||||
mkHashWrapper = mkOption {
|
||||
type = types.bool;
|
||||
default = true;
|
||||
description = ''
|
||||
Make a shell script wrapper called 'copyparty-hash' with all options set here,
|
||||
that launches the hashing cli.
|
||||
'';
|
||||
};
|
||||
|
||||
user = mkOption {
|
||||
type = types.str;
|
||||
default = "copyparty";
|
||||
description = ''
|
||||
The user that copyparty will run under.
|
||||
|
||||
If changed from default, you are responsible for making sure the user exists.
|
||||
'';
|
||||
};
|
||||
|
||||
group = mkOption {
|
||||
type = types.str;
|
||||
default = "copyparty";
|
||||
description = ''
|
||||
The group that copyparty will run under.
|
||||
|
||||
If changed from default, you are responsible for making sure the user exists.
|
||||
'';
|
||||
};
|
||||
|
||||
openFilesLimit = mkOption {
|
||||
default = 4096;
|
||||
type = types.either types.int types.str;
|
||||
description = "Number of files to allow copyparty to open.";
|
||||
};
|
||||
|
||||
settings = mkOption {
|
||||
type = types.attrs;
|
||||
description = ''
|
||||
Global settings to apply.
|
||||
Directly maps to values in the [global] section of the copyparty config.
|
||||
Cannot set "c" or "hist", those are set by this module.
|
||||
See `${getExe cfg.package} --help` for more details.
|
||||
'';
|
||||
default = {
|
||||
i = "127.0.0.1";
|
||||
no-reload = true;
|
||||
hist = externalCacheDir;
|
||||
};
|
||||
example = literalExpression ''
|
||||
{
|
||||
i = "0.0.0.0";
|
||||
no-reload = true;
|
||||
hist = ${externalCacheDir};
|
||||
}
|
||||
'';
|
||||
};
|
||||
|
||||
accounts = mkOption {
|
||||
type = types.attrsOf (
|
||||
types.submodule (
|
||||
{ ... }:
|
||||
{
|
||||
options = {
|
||||
passwordFile = mkOption {
|
||||
type = types.str;
|
||||
description = ''
|
||||
Runtime file path to a file containing the user password.
|
||||
Must be readable by the copyparty user.
|
||||
'';
|
||||
example = "/run/keys/copyparty/ed";
|
||||
};
|
||||
};
|
||||
}
|
||||
)
|
||||
);
|
||||
description = ''
|
||||
A set of copyparty accounts to create.
|
||||
'';
|
||||
default = { };
|
||||
example = literalExpression ''
|
||||
{
|
||||
ed.passwordFile = "/run/keys/copyparty/ed";
|
||||
};
|
||||
'';
|
||||
};
|
||||
|
||||
volumes = mkOption {
|
||||
type = types.attrsOf (
|
||||
types.submodule (
|
||||
{ ... }:
|
||||
{
|
||||
options = {
|
||||
path = mkOption {
|
||||
type = types.path;
|
||||
description = ''
|
||||
Path of a directory to share.
|
||||
'';
|
||||
};
|
||||
access = mkOption {
|
||||
type = types.attrs;
|
||||
description = ''
|
||||
Attribute list of permissions and the users to apply them to.
|
||||
|
||||
The key must be a string containing any combination of allowed permission:
|
||||
"r" (read): list folder contents, download files
|
||||
"w" (write): upload files; need "r" to see the uploads
|
||||
"m" (move): move files and folders; need "w" at destination
|
||||
"d" (delete): permanently delete files and folders
|
||||
"g" (get): download files, but cannot see folder contents
|
||||
"G" (upget): "get", but can see filekeys of their own uploads
|
||||
"h" (html): "get", but folders return their index.html
|
||||
"a" (admin): can see uploader IPs, config-reload
|
||||
|
||||
For example: "rwmd"
|
||||
|
||||
The value must be one of:
|
||||
an account name, defined in `accounts`
|
||||
a list of account names
|
||||
"*", which means "any account"
|
||||
'';
|
||||
example = literalExpression ''
|
||||
{
|
||||
# wG = write-upget = see your own uploads only
|
||||
wG = "*";
|
||||
# read-write-modify-delete for users "ed" and "k"
|
||||
rwmd = ["ed" "k"];
|
||||
};
|
||||
'';
|
||||
};
|
||||
flags = mkOption {
|
||||
type = types.attrs;
|
||||
description = ''
|
||||
Attribute list of volume flags to apply.
|
||||
See `${getExe cfg.package} --help-flags` for more details.
|
||||
'';
|
||||
example = literalExpression ''
|
||||
{
|
||||
# "fk" enables filekeys (necessary for upget permission) (4 chars long)
|
||||
fk = 4;
|
||||
# scan for new files every 60sec
|
||||
scan = 60;
|
||||
# volflag "e2d" enables the uploads database
|
||||
e2d = true;
|
||||
# "d2t" disables multimedia parsers (in case the uploads are malicious)
|
||||
d2t = true;
|
||||
# skips hashing file contents if path matches *.iso
|
||||
nohash = "\.iso$";
|
||||
};
|
||||
'';
|
||||
default = { };
|
||||
};
|
||||
};
|
||||
}
|
||||
)
|
||||
);
|
||||
description = "A set of copyparty volumes to create";
|
||||
default = {
|
||||
"/" = {
|
||||
path = defaultShareDir;
|
||||
access = {
|
||||
r = "*";
|
||||
};
|
||||
};
|
||||
};
|
||||
example = literalExpression ''
|
||||
{
|
||||
"/" = {
|
||||
path = ${defaultShareDir};
|
||||
access = {
|
||||
# wG = write-upget = see your own uploads only
|
||||
wG = "*";
|
||||
# read-write-modify-delete for users "ed" and "k"
|
||||
rwmd = ["ed" "k"];
|
||||
};
|
||||
};
|
||||
};
|
||||
'';
|
||||
};
|
||||
};
|
||||
|
||||
config = mkIf cfg.enable (
|
||||
let
|
||||
command = "${getExe cfg.package} -c ${runtimeConfigPath}";
|
||||
in
|
||||
{
|
||||
systemd.services.copyparty = {
|
||||
description = "http file sharing hub";
|
||||
wantedBy = [ "multi-user.target" ];
|
||||
|
||||
environment = {
|
||||
PYTHONUNBUFFERED = "true";
|
||||
XDG_CONFIG_HOME = externalStateDir;
|
||||
};
|
||||
|
||||
preStart =
|
||||
let
|
||||
replaceSecretCommand =
|
||||
name: attrs:
|
||||
"${getExe pkgs.replace-secret} '${passwordPlaceholder name}' '${attrs.passwordFile}' ${runtimeConfigPath}";
|
||||
in
|
||||
''
|
||||
set -euo pipefail
|
||||
install -m 600 ${configFile} ${runtimeConfigPath}
|
||||
${concatStringsSep "\n" (mapAttrsToList replaceSecretCommand cfg.accounts)}
|
||||
'';
|
||||
|
||||
serviceConfig = {
|
||||
Type = "simple";
|
||||
ExecStart = command;
|
||||
# Hardening options
|
||||
User = cfg.user;
|
||||
Group = cfg.group;
|
||||
RuntimeDirectory = [ "copyparty" ];
|
||||
RuntimeDirectoryMode = "0700";
|
||||
StateDirectory = [ "copyparty" ];
|
||||
StateDirectoryMode = "0700";
|
||||
CacheDirectory = lib.mkIf (cfg.settings ? hist) [ "copyparty" ];
|
||||
CacheDirectoryMode = lib.mkIf (cfg.settings ? hist) "0700";
|
||||
WorkingDirectory = externalStateDir;
|
||||
BindReadOnlyPaths = [
|
||||
"/nix/store"
|
||||
"-/etc/resolv.conf"
|
||||
"-/etc/nsswitch.conf"
|
||||
"-/etc/group"
|
||||
"-/etc/hosts"
|
||||
"-/etc/localtime"
|
||||
] ++ (mapAttrsToList (k: v: "-${v.passwordFile}") cfg.accounts);
|
||||
BindPaths =
|
||||
(if cfg.settings ? hist then [ cfg.settings.hist ] else [ ])
|
||||
++ [ externalStateDir ]
|
||||
++ (mapAttrsToList (k: v: v.path) cfg.volumes);
|
||||
# ProtectSystem = "strict";
|
||||
# Note that unlike what 'ro' implies,
|
||||
# this actually makes it impossible to read anything in the root FS,
|
||||
# except for things explicitly mounted via `RuntimeDirectory`, `StateDirectory`, `CacheDirectory`, and `BindReadOnlyPaths`.
|
||||
# This is because TemporaryFileSystem creates a *new* *empty* filesystem for the process, so only bindmounts are visible.
|
||||
TemporaryFileSystem = "/:ro";
|
||||
PrivateTmp = true;
|
||||
PrivateDevices = true;
|
||||
ProtectKernelTunables = true;
|
||||
ProtectControlGroups = true;
|
||||
RestrictSUIDSGID = true;
|
||||
PrivateMounts = true;
|
||||
ProtectKernelModules = true;
|
||||
ProtectKernelLogs = true;
|
||||
ProtectHostname = true;
|
||||
ProtectClock = true;
|
||||
ProtectProc = "invisible";
|
||||
ProcSubset = "pid";
|
||||
RestrictNamespaces = true;
|
||||
RemoveIPC = true;
|
||||
UMask = "0077";
|
||||
LimitNOFILE = cfg.openFilesLimit;
|
||||
NoNewPrivileges = true;
|
||||
LockPersonality = true;
|
||||
RestrictRealtime = true;
|
||||
MemoryDenyWriteExecute = true;
|
||||
};
|
||||
};
|
||||
|
||||
# ensure volumes exist:
|
||||
systemd.tmpfiles.settings."copyparty" = (
|
||||
lib.attrsets.mapAttrs' (
|
||||
name: value:
|
||||
lib.attrsets.nameValuePair (value.path) {
|
||||
d = {
|
||||
#: in front of things means it wont change it if the directory already exists.
|
||||
group = ":${cfg.group}";
|
||||
user = ":${cfg.user}";
|
||||
mode = ":755";
|
||||
};
|
||||
}
|
||||
) cfg.volumes
|
||||
);
|
||||
|
||||
users.groups.copyparty = lib.mkIf (cfg.user == "copyparty" && cfg.group == "copyparty") { };
|
||||
users.users.copyparty = lib.mkIf (cfg.user == "copyparty" && cfg.group == "copyparty") {
|
||||
description = "Service user for copyparty";
|
||||
group = "copyparty";
|
||||
home = externalStateDir;
|
||||
isSystemUser = true;
|
||||
};
|
||||
environment.systemPackages = lib.mkIf cfg.mkHashWrapper [
|
||||
(pkgs.writeShellScriptBin "copyparty-hash" ''
|
||||
set -a # automatically export variables
|
||||
# set same environment variables as the systemd service
|
||||
${lib.pipe config.systemd.services.copyparty.environment [
|
||||
(lib.filterAttrs (n: v: v != null && n != "PATH"))
|
||||
(lib.mapAttrs (_: v: "${v}"))
|
||||
(lib.toShellVars)
|
||||
]}
|
||||
PATH=${config.systemd.services.copyparty.environment.PATH}:$PATH
|
||||
|
||||
exec ${command} --ah-cli
|
||||
'')
|
||||
];
|
||||
}
|
||||
);
|
||||
}
|
|
@ -14,5 +14,5 @@ name="$SVCNAME"
|
|||
command_background=true
|
||||
pidfile="/var/run/$SVCNAME.pid"
|
||||
|
||||
command="/usr/bin/python3 /usr/local/bin/copyparty-sfx.py"
|
||||
command="/usr/bin/python /usr/local/bin/copyparty-sfx.py"
|
||||
command_args="-q -v /mnt::rw"
|
||||
|
|
|
@ -1,48 +0,0 @@
|
|||
# Maintainer: icxes <dev.null@need.moe>
|
||||
# Contributor: Morgan Adamiec <morganamilo@archlinux.org>
|
||||
# NOTE: You generally shouldn't use this PKGBUILD on Arch, as it is mainly for testing purposes. Install copyparty using pacman instead.
|
||||
|
||||
pkgname=copyparty
|
||||
pkgver="1.19.4"
|
||||
pkgrel=1
|
||||
pkgdesc="File server with accelerated resumable uploads, dedup, WebDAV, FTP, TFTP, zeroconf, media indexer, thumbnails++"
|
||||
arch=("any")
|
||||
url="https://github.com/9001/${pkgname}"
|
||||
license=('MIT')
|
||||
depends=("bash" "python" "lsof" "python-jinja")
|
||||
makedepends=("python-wheel" "python-setuptools" "python-build" "python-installer" "make" "pigz")
|
||||
optdepends=("ffmpeg: thumbnails for videos, images (slower) and audio, music tags"
|
||||
"cfssl: generate TLS certificates on startup"
|
||||
"python-mutagen: music tags (alternative)"
|
||||
"python-pillow: thumbnails for images"
|
||||
"python-pyvips: thumbnails for images (higher quality, faster, uses more ram)"
|
||||
"libkeyfinder: detection of musical keys"
|
||||
"python-pyopenssl: ftps functionality"
|
||||
"python-pyzmq: send zeromq messages from event-hooks"
|
||||
"python-argon2-cffi: hashed passwords in config"
|
||||
)
|
||||
source=("https://github.com/9001/${pkgname}/releases/download/v${pkgver}/${pkgname}-${pkgver}.tar.gz")
|
||||
backup=("etc/${pkgname}/copyparty.conf" )
|
||||
sha256sums=("b0e84a78eb2701cb7447b6023afcec280c550617dde67b6f0285bb23483111eb")
|
||||
|
||||
build() {
|
||||
cd "${srcdir}/${pkgname}-${pkgver}/copyparty/web"
|
||||
make
|
||||
|
||||
cd "${srcdir}/${pkgname}-${pkgver}"
|
||||
python -m build --wheel --no-isolation
|
||||
}
|
||||
|
||||
package() {
|
||||
cd "${srcdir}/${pkgname}-${pkgver}"
|
||||
python -m installer --destdir="$pkgdir" dist/*.whl
|
||||
|
||||
install -dm755 "${pkgdir}/etc/${pkgname}"
|
||||
install -Dm755 "bin/prisonparty.sh" "${pkgdir}/usr/bin/prisonparty"
|
||||
install -Dm644 "contrib/systemd/${pkgname}.conf" "${pkgdir}/etc/${pkgname}/copyparty.conf"
|
||||
install -Dm644 "contrib/systemd/${pkgname}@.service" "${pkgdir}/usr/lib/systemd/system/${pkgname}@.service"
|
||||
install -Dm644 "contrib/systemd/${pkgname}-user.service" "${pkgdir}/usr/lib/systemd/user/${pkgname}.service"
|
||||
install -Dm644 "contrib/systemd/prisonparty@.service" "${pkgdir}/usr/lib/systemd/system/prisonparty@.service"
|
||||
install -Dm644 "contrib/systemd/index.md" "${pkgdir}/var/lib/${pkgname}-jail/README.md"
|
||||
install -Dm644 "LICENSE" "${pkgdir}/usr/share/licenses/${pkgname}/LICENSE"
|
||||
}
|
|
@ -1,44 +0,0 @@
|
|||
# Contributor: Beethoven <beethovenisadog@protonmail.com>
|
||||
|
||||
|
||||
pkgname=copyparty
|
||||
pkgver=1.19.4
|
||||
pkgrel=1
|
||||
pkgdesc="File server with accelerated resumable uploads, dedup, WebDAV, FTP, TFTP, zeroconf, media indexer, thumbnails++"
|
||||
arch=("any")
|
||||
url="https://github.com/9001/${pkgname}"
|
||||
license=('MIT')
|
||||
depends=("bash" "python3" "lsof" "python3-jinja2")
|
||||
makedepends=("python3-wheel" "python3-setuptools" "python3-build" "python3-installer" "make" "pigz")
|
||||
optdepends=("ffmpeg: thumbnails for videos, images (slower) and audio, music tags"
|
||||
"golang-cfssl: generate TLS certificates on startup"
|
||||
"python3-mutagen: music tags (alternative)"
|
||||
"python3-pil: thumbnails for images"
|
||||
"python3-openssl: ftps functionality"
|
||||
"python3-zmq: send zeromq messages from event-hooks"
|
||||
"python3-argon2: hashed passwords in config"
|
||||
)
|
||||
source=("https://github.com/9001/${pkgname}/releases/download/v${pkgver}/${pkgname}-${pkgver}.tar.gz")
|
||||
backup=("/etc/${pkgname}.d/init" )
|
||||
sha256sums=("b0e84a78eb2701cb7447b6023afcec280c550617dde67b6f0285bb23483111eb")
|
||||
|
||||
build() {
|
||||
cd "${srcdir}/${pkgname}-${pkgver}/copyparty/web"
|
||||
make
|
||||
|
||||
cd "${srcdir}/${pkgname}-${pkgver}"
|
||||
python -m build --wheel --no-isolation
|
||||
}
|
||||
|
||||
package() {
|
||||
cd "${srcdir}/${pkgname}-${pkgver}"
|
||||
python -m installer --destdir="$pkgdir" dist/*.whl
|
||||
|
||||
install -dm755 "${pkgdir}/etc/${pkgname}.d"
|
||||
install -Dm755 "bin/prisonparty.sh" "${pkgdir}/usr/bin/prisonparty"
|
||||
install -Dm644 "contrib/package/makedeb-mpr/${pkgname}.conf" "${pkgdir}/etc/${pkgname}.d/init"
|
||||
install -Dm644 "contrib/package/makedeb-mpr/${pkgname}.service" "${pkgdir}/usr/lib/systemd/system/${pkgname}.service"
|
||||
install -Dm644 "contrib/package/makedeb-mpr/prisonparty.service" "${pkgdir}/usr/lib/systemd/system/prisonparty.service"
|
||||
install -Dm644 "contrib/package/makedeb-mpr/index.md" "${pkgdir}/var/lib/${pkgname}-jail/README.md"
|
||||
install -Dm644 "LICENSE" "${pkgdir}/usr/share/licenses/${pkgname}/LICENSE"
|
||||
}
|
|
@ -1,7 +0,0 @@
|
|||
## import all *.conf files from the current folder (/etc/copyparty.d)
|
||||
% ./
|
||||
|
||||
# add additional .conf files to this folder;
|
||||
# see example config files for reference:
|
||||
# https://github.com/9001/copyparty/blob/hovudstraum/docs/example.conf
|
||||
# https://github.com/9001/copyparty/tree/hovudstraum/docs/copyparty.d
|
|
@ -1,32 +0,0 @@
|
|||
# this will start `/usr/bin/copyparty-sfx.py`
|
||||
# and read config from `/etc/copyparty.d/*.conf`
|
||||
#
|
||||
# you probably want to:
|
||||
# change "User=cpp" and "/home/cpp/" to another user
|
||||
#
|
||||
# unless you add -q to disable logging, you may want to remove the
|
||||
# following line to allow buffering (slightly better performance):
|
||||
# Environment=PYTHONUNBUFFERED=x
|
||||
|
||||
[Unit]
|
||||
Description=copyparty file server
|
||||
|
||||
[Service]
|
||||
Type=notify
|
||||
SyslogIdentifier=copyparty
|
||||
Environment=PYTHONUNBUFFERED=x
|
||||
WorkingDirectory=/var/lib/copyparty-jail
|
||||
ExecReload=/bin/kill -s USR1 $MAINPID
|
||||
|
||||
# user to run as + where the TLS certificate is (if any)
|
||||
User=cpp
|
||||
Environment=XDG_CONFIG_HOME=/home/cpp/.config
|
||||
|
||||
# stop systemd-tmpfiles-clean.timer from deleting copyparty while it's running
|
||||
ExecStartPre=+/bin/bash -c 'mkdir -p /run/tmpfiles.d/ && echo "x /tmp/pe-copyparty*" > /run/tmpfiles.d/copyparty.conf'
|
||||
|
||||
# run copyparty
|
||||
ExecStart=/usr/bin/python3 /usr/bin/copyparty -c /etc/copyparty.d/init
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
|
@ -1,3 +0,0 @@
|
|||
this is `/var/lib/copyparty-jail`, the fallback webroot when copyparty has not yet been configured
|
||||
|
||||
please add some `*.conf` files to `/etc/copyparty.d/`
|
|
@ -1,33 +0,0 @@
|
|||
# this will start `/usr/bin/copyparty-sfx.py`
|
||||
# in a chroot, preventing accidental access elsewhere,
|
||||
# and read copyparty config from `/etc/copyparty.d/*.conf`
|
||||
#
|
||||
# expose additional filesystem locations to copyparty
|
||||
# by listing them between the last `cpp` and `--`
|
||||
#
|
||||
# `cpp cpp` = user/group to run copyparty as; can be IDs (1000 1000)
|
||||
#
|
||||
# unless you add -q to disable logging, you may want to remove the
|
||||
# following line to allow buffering (slightly better performance):
|
||||
# Environment=PYTHONUNBUFFERED=x
|
||||
|
||||
[Unit]
|
||||
Description=copyparty file server
|
||||
|
||||
[Service]
|
||||
SyslogIdentifier=prisonparty
|
||||
Environment=PYTHONUNBUFFERED=x
|
||||
WorkingDirectory=/var/lib/copyparty-jail
|
||||
ExecReload=/bin/kill -s USR1 $MAINPID
|
||||
|
||||
# stop systemd-tmpfiles-clean.timer from deleting copyparty while it's running
|
||||
ExecStartPre=+/bin/bash -c 'mkdir -p /run/tmpfiles.d/ && echo "x /tmp/pe-copyparty*" > /run/tmpfiles.d/copyparty.conf'
|
||||
|
||||
# run copyparty
|
||||
ExecStart=/bin/bash /usr/bin/prisonparty /var/lib/copyparty-jail cpp cpp \
|
||||
/etc/copyparty.d \
|
||||
-- \
|
||||
/usr/bin/python3 /usr/bin/copyparty -c /etc/copyparty.d/init
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
|
@ -1,118 +0,0 @@
|
|||
{
|
||||
lib,
|
||||
buildPythonApplication,
|
||||
fetchurl,
|
||||
util-linux,
|
||||
python,
|
||||
setuptools,
|
||||
jinja2,
|
||||
impacket,
|
||||
pyopenssl,
|
||||
cfssl,
|
||||
argon2-cffi,
|
||||
pillow,
|
||||
pyvips,
|
||||
pyzmq,
|
||||
ffmpeg,
|
||||
mutagen,
|
||||
pyftpdlib,
|
||||
magic,
|
||||
partftpy,
|
||||
fusepy, # for partyfuse
|
||||
|
||||
# use argon2id-hashed passwords in config files (sha2 is always available)
|
||||
withHashedPasswords ? true,
|
||||
|
||||
# generate TLS certificates on startup (pointless when reverse-proxied)
|
||||
withCertgen ? false,
|
||||
|
||||
# create thumbnails with Pillow; faster than FFmpeg / MediaProcessing
|
||||
withThumbnails ? true,
|
||||
|
||||
# create thumbnails with PyVIPS; even faster, uses more memory
|
||||
# -- can be combined with Pillow to support more filetypes
|
||||
withFastThumbnails ? false,
|
||||
|
||||
# enable FFmpeg; thumbnails for most filetypes (also video and audio), extract audio metadata, transcode audio to opus
|
||||
# -- possibly dangerous if you allow anonymous uploads, since FFmpeg has a huge attack surface
|
||||
# -- can be combined with Thumbnails and/or FastThumbnails, since FFmpeg is slower than both
|
||||
withMediaProcessing ? true,
|
||||
|
||||
# if MediaProcessing is not enabled, you probably want this instead (less accurate, but much safer and faster)
|
||||
withBasicAudioMetadata ? false,
|
||||
|
||||
# send ZeroMQ messages from event-hooks
|
||||
withZeroMQ ? true,
|
||||
|
||||
# enable FTP server
|
||||
withFTP ? true,
|
||||
|
||||
# enable FTPS support in the FTP server
|
||||
withFTPS ? false,
|
||||
|
||||
# enable TFTP server
|
||||
withTFTP ? false,
|
||||
|
||||
# samba/cifs server; dangerous and buggy, enable if you really need it
|
||||
withSMB ? false,
|
||||
|
||||
# enables filetype detection for nameless uploads
|
||||
withMagic ? false,
|
||||
|
||||
# extra packages to add to the PATH
|
||||
extraPackages ? [ ],
|
||||
|
||||
# function that accepts a python packageset and returns a list of packages to
|
||||
# be added to the python venv. useful for scripts and such that require
|
||||
# additional dependencies
|
||||
extraPythonPackages ? (_p: [ ]),
|
||||
|
||||
}:
|
||||
|
||||
let
|
||||
pinData = lib.importJSON ./pin.json;
|
||||
runtimeDeps = ([ util-linux ] ++ extraPackages ++ lib.optional withMediaProcessing ffmpeg);
|
||||
in
|
||||
buildPythonApplication {
|
||||
pname = "copyparty";
|
||||
inherit (pinData) version;
|
||||
src = fetchurl {
|
||||
inherit (pinData) url hash;
|
||||
};
|
||||
dependencies =
|
||||
[
|
||||
jinja2
|
||||
fusepy
|
||||
]
|
||||
++ lib.optional withSMB impacket
|
||||
++ lib.optional withFTP pyftpdlib
|
||||
++ lib.optional withFTPS pyopenssl
|
||||
++ lib.optional withTFTP partftpy
|
||||
++ lib.optional withCertgen cfssl
|
||||
++ lib.optional withThumbnails pillow
|
||||
++ lib.optional withFastThumbnails pyvips
|
||||
++ lib.optional withMediaProcessing ffmpeg
|
||||
++ lib.optional withBasicAudioMetadata mutagen
|
||||
++ lib.optional withHashedPasswords argon2-cffi
|
||||
++ lib.optional withZeroMQ pyzmq
|
||||
++ lib.optional withMagic magic
|
||||
++ (extraPythonPackages python.pkgs);
|
||||
makeWrapperArgs = [ "--prefix PATH : ${lib.makeBinPath runtimeDeps}" ];
|
||||
|
||||
pyproject = true;
|
||||
build-system = [
|
||||
setuptools
|
||||
];
|
||||
meta = {
|
||||
description = "Turn almost any device into a file server";
|
||||
longDescription = ''
|
||||
Portable file server with accelerated resumable uploads, dedup, WebDAV,
|
||||
FTP, TFTP, zeroconf, media indexer, thumbnails++ all in one file, no deps
|
||||
'';
|
||||
homepage = "https://github.com/9001/copyparty";
|
||||
changelog = "https://github.com/9001/copyparty/releases/tag/v${pinData.version}";
|
||||
license = lib.licenses.mit;
|
||||
mainProgram = "copyparty";
|
||||
sourceProvenance = [ lib.sourceTypes.fromSource ];
|
||||
};
|
||||
}
|
|
@ -1,5 +0,0 @@
|
|||
{
|
||||
"url": "https://github.com/9001/copyparty/releases/download/v1.19.4/copyparty-1.19.4.tar.gz",
|
||||
"version": "1.19.4",
|
||||
"hash": "sha256-sOhKeOsnAct0R7YCOvzsKAxVBhfd5ntvAoW7I0gxEes="
|
||||
}
|
|
@ -1,78 +0,0 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
# Update the Nix package pin
|
||||
#
|
||||
# Usage: ./update.sh [PATH]
|
||||
# When the [PATH] is not set, it will fetch the latest release from the repo.
|
||||
# With [PATH] set, it will hash the given file and generate the URL,
|
||||
# base on the version contained within the file
|
||||
|
||||
import base64
|
||||
import json
|
||||
import hashlib
|
||||
import sys
|
||||
import tarfile
|
||||
from pathlib import Path
|
||||
|
||||
OUTPUT_FILE = Path("pin.json")
|
||||
TARGET_ASSET = lambda version: f"copyparty-{version}.tar.gz"
|
||||
HASH_TYPE = "sha256"
|
||||
LATEST_RELEASE_URL = "https://api.github.com/repos/9001/copyparty/releases/latest"
|
||||
DOWNLOAD_URL = lambda version: f"https://github.com/9001/copyparty/releases/download/v{version}/{TARGET_ASSET(version)}"
|
||||
|
||||
|
||||
def get_formatted_hash(binary):
|
||||
hasher = hashlib.new("sha256")
|
||||
hasher.update(binary)
|
||||
asset_hash = hasher.digest()
|
||||
encoded_hash = base64.b64encode(asset_hash).decode("ascii")
|
||||
return f"{HASH_TYPE}-{encoded_hash}"
|
||||
|
||||
|
||||
def version_from_tar_gz(path):
|
||||
with tarfile.open(path) as tarball:
|
||||
release_name = tarball.getmembers()[0].name
|
||||
prefix = "copyparty-"
|
||||
|
||||
if release_name.startswith(prefix):
|
||||
return release_name.replace(prefix, "")
|
||||
raise ValueError("version not found in provided file")
|
||||
|
||||
|
||||
def remote_release_pin():
|
||||
import requests
|
||||
|
||||
response = requests.get(LATEST_RELEASE_URL).json()
|
||||
version = response["tag_name"].lstrip("v")
|
||||
asset_info = [a for a in response["assets"] if a["name"] == TARGET_ASSET(version)][0]
|
||||
download_url = asset_info["browser_download_url"]
|
||||
asset = requests.get(download_url)
|
||||
formatted_hash = get_formatted_hash(asset.content)
|
||||
|
||||
result = {"url": download_url, "version": version, "hash": formatted_hash}
|
||||
return result
|
||||
|
||||
|
||||
def local_release_pin(path):
|
||||
version = version_from_tar_gz(path)
|
||||
download_url = DOWNLOAD_URL(version)
|
||||
formatted_hash = get_formatted_hash(path.read_bytes())
|
||||
|
||||
result = {"url": download_url, "version": version, "hash": formatted_hash}
|
||||
return result
|
||||
|
||||
|
||||
def main():
|
||||
if len(sys.argv) > 1:
|
||||
asset_path = Path(sys.argv[1])
|
||||
result = local_release_pin(asset_path)
|
||||
else:
|
||||
result = remote_release_pin()
|
||||
|
||||
print(result)
|
||||
json_result = json.dumps(result, indent=4)
|
||||
OUTPUT_FILE.write_text(json_result)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
|
@ -1,11 +0,0 @@
|
|||
final: prev: {
|
||||
copyparty = final.python3.pkgs.callPackage ./copyparty {
|
||||
ffmpeg = final.ffmpeg-full;
|
||||
};
|
||||
|
||||
python3 = prev.python3.override {
|
||||
packageOverrides = pyFinal: pyPrev: {
|
||||
partftpy = pyFinal.callPackage ./partftpy { };
|
||||
};
|
||||
};
|
||||
}
|
|
@ -1,30 +0,0 @@
|
|||
{
|
||||
lib,
|
||||
buildPythonPackage,
|
||||
fetchurl,
|
||||
setuptools,
|
||||
}:
|
||||
let
|
||||
pinData = lib.importJSON ./pin.json;
|
||||
in
|
||||
|
||||
buildPythonPackage rec {
|
||||
pname = "partftpy";
|
||||
inherit (pinData) version;
|
||||
pyproject = true;
|
||||
|
||||
src = fetchurl {
|
||||
inherit (pinData) url hash;
|
||||
};
|
||||
|
||||
build-system = [ setuptools ];
|
||||
|
||||
pythonImportsCheck = [ "partftpy.TftpServer" ];
|
||||
|
||||
meta = {
|
||||
description = "Pure Python TFTP library (copyparty edition)";
|
||||
homepage = "https://github.com/9001/partftpy";
|
||||
changelog = "https://github.com/9001/partftpy/releases/tag/${version}";
|
||||
license = lib.licenses.mit;
|
||||
};
|
||||
}
|
|
@ -1,5 +0,0 @@
|
|||
{
|
||||
"url": "https://github.com/9001/partftpy/releases/download/v0.4.0/partftpy-0.4.0.tar.gz",
|
||||
"version": "0.4.0",
|
||||
"hash": "sha256-5Q2zyuJ892PGZmb+YXg0ZPW/DK8RDL1uE0j5HPd4We0="
|
||||
}
|
|
@ -1,50 +0,0 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
# Update the Nix package pin
|
||||
#
|
||||
# Usage: ./update.sh
|
||||
|
||||
import base64
|
||||
import json
|
||||
import hashlib
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
OUTPUT_FILE = Path("pin.json")
|
||||
TARGET_ASSET = lambda version: f"partftpy-{version}.tar.gz"
|
||||
HASH_TYPE = "sha256"
|
||||
LATEST_RELEASE_URL = "https://api.github.com/repos/9001/partftpy/releases/latest"
|
||||
|
||||
|
||||
def get_formatted_hash(binary):
|
||||
hasher = hashlib.new("sha256")
|
||||
hasher.update(binary)
|
||||
asset_hash = hasher.digest()
|
||||
encoded_hash = base64.b64encode(asset_hash).decode("ascii")
|
||||
return f"{HASH_TYPE}-{encoded_hash}"
|
||||
|
||||
|
||||
def remote_release_pin():
|
||||
import requests
|
||||
|
||||
response = requests.get(LATEST_RELEASE_URL).json()
|
||||
version = response["tag_name"].lstrip("v")
|
||||
asset_info = [a for a in response["assets"] if a["name"] == TARGET_ASSET(version)][0]
|
||||
download_url = asset_info["browser_download_url"]
|
||||
asset = requests.get(download_url)
|
||||
formatted_hash = get_formatted_hash(asset.content)
|
||||
|
||||
result = {"url": download_url, "version": version, "hash": formatted_hash}
|
||||
return result
|
||||
|
||||
|
||||
def main():
|
||||
result = remote_release_pin()
|
||||
|
||||
print(result)
|
||||
json_result = json.dumps(result, indent=4)
|
||||
OUTPUT_FILE.write_text(json_result)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
|
@ -1,62 +0,0 @@
|
|||
Name: copyparty
|
||||
Version: $pkgver
|
||||
Release: $pkgrel
|
||||
License: MIT
|
||||
Group: Utilities
|
||||
URL: https://github.com/9001/copyparty
|
||||
Source0: copyparty-$pkgver.tar.gz
|
||||
Summary: File server with accelerated resumable uploads, dedup, WebDAV, FTP, TFTP, zeroconf, media indexer, thumbnails++
|
||||
BuildArch: noarch
|
||||
BuildRequires: python3, python3-devel, pyproject-rpm-macros, python-setuptools, python-wheel, make
|
||||
Requires: python3, (python3-jinja2 or python-jinja2), lsof
|
||||
Recommends: ffmpeg, (golang-github-cloudflare-cfssl or cfssl), python-mutagen, python-pillow, python-pyvips
|
||||
Recommends: qm-vamp-plugins, python-argon2-cffi, (python-pyopenssl or pyopenssl), python-impacket
|
||||
|
||||
%description
|
||||
Portable file server with accelerated resumable uploads, dedup, WebDAV, FTP, TFTP, zeroconf, media indexer, thumbnails++ all in one file, no deps
|
||||
|
||||
See release at https://github.com/9001/copyparty/releases
|
||||
|
||||
%global debug_package %{nil}
|
||||
|
||||
%generate_buildrequires
|
||||
%pyproject_buildrequires
|
||||
|
||||
%prep
|
||||
%setup -q
|
||||
|
||||
%build
|
||||
cd "copyparty/web"
|
||||
make
|
||||
cd -
|
||||
%pyproject_wheel
|
||||
|
||||
%install
|
||||
mkdir -p %{buildroot}%{_bindir}
|
||||
mkdir -p %{buildroot}%{_libdir}/systemd/{system,user}
|
||||
mkdir -p %{buildroot}/etc/%{name}
|
||||
mkdir -p %{buildroot}/var/lib/%{name}-jail
|
||||
mkdir -p %{buildroot}%{_datadir}/licenses/%{name}
|
||||
|
||||
%pyproject_install
|
||||
%pyproject_save_files copyparty
|
||||
|
||||
install -m 0755 bin/prisonparty.sh %{buildroot}%{_bindir}/prisonpary.sh
|
||||
install -m 0644 contrib/systemd/%{name}.conf %{buildroot}/etc/%{name}/%{name}.conf
|
||||
install -m 0644 contrib/systemd/%{name}@.service %{buildroot}%{_libdir}/systemd/system/%{name}@.service
|
||||
install -m 0644 contrib/systemd/%{name}-user.service %{buildroot}%{_libdir}/systemd/user/%{name}.service
|
||||
install -m 0644 contrib/systemd/prisonparty@.service %{buildroot}%{_libdir}/systemd/system/prisonparty@.service
|
||||
install -m 0644 contrib/systemd/index.md %{buildroot}/var/lib/%{name}-jail/README.md
|
||||
install -m 0644 LICENSE %{buildroot}%{_datadir}/licenses/%{name}/LICENSE
|
||||
|
||||
%files -n copyparty -f %{pyproject_files}
|
||||
%license LICENSE
|
||||
%{_bindir}/copyparty
|
||||
%{_bindir}/partyfuse
|
||||
%{_bindir}/u2c
|
||||
%{_bindir}/prisonpary.sh
|
||||
/etc/%{name}/%{name}.conf
|
||||
%{_libdir}/systemd/system/%{name}@.service
|
||||
%{_libdir}/systemd/user/%{name}.service
|
||||
%{_libdir}/systemd/system/prisonparty@.service
|
||||
/var/lib/%{name}-jail/README.md
|
|
@ -1,41 +0,0 @@
|
|||
# example resource files
|
||||
|
||||
can be provided to copyparty to tweak things
|
||||
|
||||
|
||||
|
||||
## example `.epilogue.html`
|
||||
save one of these as `.epilogue.html` inside a folder to customize it:
|
||||
|
||||
* [`minimal-up2k.html`](minimal-up2k.html) will [simplify the upload ui](https://user-images.githubusercontent.com/241032/118311195-dd6ca380-b4ef-11eb-86f3-75a3ff2e1332.png)
|
||||
|
||||
|
||||
|
||||
## example browser-js
|
||||
point `--js-browser` to one of these by URL:
|
||||
|
||||
* [`minimal-up2k.js`](minimal-up2k.js) is similar to the above `minimal-up2k.html` except it applies globally to all write-only folders
|
||||
* [`quickmove.js`](quickmove.js) adds a hotkey to move selected files into a subfolder
|
||||
* [`up2k-hooks.js`](up2k-hooks.js) lets you specify a ruleset for files to skip uploading
|
||||
* [`up2k-hook-ytid.js`](up2k-hook-ytid.js) is a more specific example checking youtube-IDs against some API
|
||||
|
||||
|
||||
|
||||
## example any-js
|
||||
point `--js-browser` and/or `--js-other` to one of these by URL:
|
||||
|
||||
* [`banner.js`](banner.js) shows a very enterprise [legal-banner](https://github.com/user-attachments/assets/8ae8e087-b209-449c-b08d-74e040f0284b)
|
||||
|
||||
|
||||
|
||||
## example browser-css
|
||||
point `--css-browser` to one of these by URL:
|
||||
|
||||
* [`browser-icons.css`](browser-icons.css) adds filetype icons
|
||||
|
||||
|
||||
|
||||
## meadup.js
|
||||
|
||||
* turns copyparty into chromecast just more flexible (and probably way more buggy)
|
||||
* usage: put the js somewhere in the webroot and `--js-browser /memes/meadup.js`
|
|
@ -1,93 +0,0 @@
|
|||
(function() {
|
||||
|
||||
// usage: copy this to '.banner.js' in your webroot,
|
||||
// and run copyparty with the following arguments:
|
||||
// --js-browser /.banner.js --js-other /.banner.js
|
||||
|
||||
|
||||
|
||||
// had to pick the most chuuni one as the default
|
||||
var bannertext = '' +
|
||||
'<h3>You are accessing a U.S. Government (USG) Information System (IS) that is provided for USG-authorized use only.</h3>' +
|
||||
'<p>By using this IS (which includes any device attached to this IS), you consent to the following conditions:</p>' +
|
||||
'<ul>' +
|
||||
'<li>The USG routinely intercepts and monitors communications on this IS for purposes including, but not limited to, penetration testing, COMSEC monitoring, network operations and defense, personnel misconduct (PM), law enforcement (LE), and counterintelligence (CI) investigations.</li>' +
|
||||
'<li>At any time, the USG may inspect and seize data stored on this IS.</li>' +
|
||||
'<li>Communications using, or data stored on, this IS are not private, are subject to routine monitoring, interception, and search, and may be disclosed or used for any USG-authorized purpose.</li>' +
|
||||
'<li>This IS includes security measures (e.g., authentication and access controls) to protect USG interests -- not for your personal benefit or privacy.</li>' +
|
||||
'<li>Notwithstanding the above, using this IS does not constitute consent to PM, LE or CI investigative searching or monitoring of the content of privileged communications, or work product, related to personal representation or services by attorneys, psychotherapists, or clergy, and their assistants. Such communications and work product are private and confidential. See User Agreement for details.</li>' +
|
||||
'</ul>';
|
||||
|
||||
|
||||
|
||||
// fancy div to insert into pages
|
||||
function bannerdiv(border) {
|
||||
var ret = mknod('div', null, bannertext);
|
||||
if (border)
|
||||
ret.setAttribute("style", "border:1em solid var(--fg); border-width:.3em 0; margin:3em 0");
|
||||
return ret;
|
||||
}
|
||||
|
||||
|
||||
|
||||
// keep all of these false and then selectively enable them in the if-blocks below
|
||||
var show_msgbox = false,
|
||||
login_top = false,
|
||||
top = false,
|
||||
bottom = false,
|
||||
top_bordered = false,
|
||||
bottom_bordered = false;
|
||||
|
||||
if (QS("h1#cc") && QS("a#k")) {
|
||||
// this is the controlpanel
|
||||
// (you probably want to keep just one of these enabled)
|
||||
show_msgbox = true;
|
||||
login_top = true;
|
||||
bottom = true;
|
||||
}
|
||||
else if (ebi("swin") && ebi("smac")) {
|
||||
// this is the connect-page, same deal here
|
||||
show_msgbox = true;
|
||||
top_bordered = true;
|
||||
bottom_bordered = true;
|
||||
}
|
||||
else if (ebi("op_cfg") || ebi("div#mw") ) {
|
||||
// we're running in the main filebrowser (op_cfg) or markdown-viewer/editor (div#mw),
|
||||
// fragile pages which break if you do something too fancy
|
||||
show_msgbox = true;
|
||||
}
|
||||
|
||||
|
||||
|
||||
// shows a fullscreen messagebox; works on all pages
|
||||
if (show_msgbox) {
|
||||
var now = Math.floor(Date.now() / 1000),
|
||||
last_shown = sread("bannerts") || 0;
|
||||
|
||||
// 60 * 60 * 17 = 17 hour cooldown
|
||||
if (now - last_shown > 60 * 60 * 17) {
|
||||
swrite("bannerts", now);
|
||||
modal.confirm(bannertext, null, function () {
|
||||
location = 'https://this-page-intentionally-left-blank.org/';
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// show a message on the page footer; only works on the connect-page
|
||||
if (top || top_bordered) {
|
||||
var dst = ebi('wrap');
|
||||
dst.insertBefore(bannerdiv(top_bordered), dst.firstChild);
|
||||
}
|
||||
|
||||
// show a message on the page footer; only works on the controlpanel and connect-page
|
||||
if (bottom || bottom_bordered) {
|
||||
ebi('wrap').appendChild(bannerdiv(bottom_bordered));
|
||||
}
|
||||
|
||||
// show a message on the top of the page; only works on the controlpanel
|
||||
if (login_top) {
|
||||
var dst = QS('h1');
|
||||
dst.parentNode.insertBefore(bannerdiv(false), dst);
|
||||
}
|
||||
|
||||
})();
|
|
@ -1,117 +0,0 @@
|
|||
// USAGE:
|
||||
// place this file somewhere in the webroot and then
|
||||
// python3 -m copyparty --js-browser /.res/graft-thumbs.js
|
||||
//
|
||||
// DESCRIPTION:
|
||||
// this is a gridview plugin which, for each file in a folder,
|
||||
// looks for another file with the same filename (but with a
|
||||
// different file extension)
|
||||
//
|
||||
// if one of those files is an image and the other is not,
|
||||
// then this plugin assumes the image is a "sidecar thumbnail"
|
||||
// for the other file, and it will graft the image thumbnail
|
||||
// onto the non-image file (for example an mp3)
|
||||
//
|
||||
// optional feature 1, default-enabled:
|
||||
// the image-file is then hidden from the directory listing
|
||||
//
|
||||
// optional feature 2, default-enabled:
|
||||
// when clicking the audio file, the image will also open
|
||||
|
||||
|
||||
(function() {
|
||||
|
||||
// `graft_thumbs` assumes the gridview has just been rendered;
|
||||
// it looks for sidecars, and transplants those thumbnails onto
|
||||
// the other file with the same basename (filename sans extension)
|
||||
|
||||
var graft_thumbs = function () {
|
||||
if (!thegrid.en)
|
||||
return; // not in grid mode
|
||||
|
||||
var files = msel.getall(),
|
||||
pairs = {};
|
||||
|
||||
console.log(files);
|
||||
|
||||
for (var a = 0; a < files.length; a++) {
|
||||
var file = files[a],
|
||||
is_pic = /\.(jpe?g|png|gif|webp)$/i.exec(file.vp),
|
||||
is_audio = re_au_all.exec(file.vp),
|
||||
basename = file.vp.replace(/\.[^\.]+$/, ""),
|
||||
entry = pairs[basename];
|
||||
|
||||
if (!entry)
|
||||
// first time seeing this basename; create a new entry in pairs
|
||||
entry = pairs[basename] = {};
|
||||
|
||||
if (is_pic)
|
||||
entry.thumb = file;
|
||||
else if (is_audio)
|
||||
entry.audio = file;
|
||||
}
|
||||
|
||||
var basenames = Object.keys(pairs);
|
||||
for (var a = 0; a < basenames.length; a++)
|
||||
(function(a) {
|
||||
var pair = pairs[basenames[a]];
|
||||
|
||||
if (!pair.thumb || !pair.audio)
|
||||
return; // not a matching pair of files
|
||||
|
||||
var img_thumb = QS('#ggrid a[ref="' + pair.thumb.id + '"] img[onload]'),
|
||||
img_audio = QS('#ggrid a[ref="' + pair.audio.id + '"] img[onload]');
|
||||
|
||||
if (!img_thumb || !img_audio)
|
||||
return; // something's wrong... let's bail
|
||||
|
||||
// alright, graft the thumb...
|
||||
img_audio.src = img_thumb.src;
|
||||
|
||||
// ...and hide the sidecar
|
||||
img_thumb.closest('a').style.display = 'none';
|
||||
|
||||
// ...and add another onclick-handler to the audio,
|
||||
// so it also opens the pic while playing the song
|
||||
img_audio.addEventListener('click', function() {
|
||||
img_thumb.click();
|
||||
return false; // let it bubble to the next listener
|
||||
});
|
||||
|
||||
})(a);
|
||||
};
|
||||
|
||||
// ...and then the trick! near the end of loadgrid,
|
||||
// thegrid.bagit is called to initialize the baguettebox
|
||||
// (image/video gallery); this is the perfect function to
|
||||
// "hook" (hijack) so we can run our code :^)
|
||||
|
||||
// need to grab a backup of the original function first,
|
||||
var orig_func = thegrid.bagit;
|
||||
|
||||
// and then replace it with our own:
|
||||
thegrid.bagit = function (isrc) {
|
||||
|
||||
if (isrc !== '#ggrid')
|
||||
// we only want to modify the grid, so
|
||||
// let the original function handle this one
|
||||
return orig_func(isrc);
|
||||
|
||||
graft_thumbs();
|
||||
|
||||
// when changing directories, the grid is
|
||||
// rendered before msel returns the correct
|
||||
// filenames, so schedule another run:
|
||||
setTimeout(graft_thumbs, 1);
|
||||
|
||||
// and finally, call the original thegrid.bagit function
|
||||
return orig_func(isrc);
|
||||
};
|
||||
|
||||
if (ls0) {
|
||||
// the server included an initial listing json (ls0),
|
||||
// so the grid has already been rendered without our hook
|
||||
graft_thumbs();
|
||||
}
|
||||
|
||||
})();
|
|
@ -1,506 +0,0 @@
|
|||
// USAGE:
|
||||
// place this file somewhere in the webroot and then
|
||||
// python3 -m copyparty --js-browser /memes/meadup.js
|
||||
//
|
||||
// FEATURES:
|
||||
// * adds an onscreen keyboard for operating a media center remotely,
|
||||
// relies on https://github.com/9001/copyparty/blob/hovudstraum/bin/mtag/very-bad-idea.py
|
||||
// * adds an interactive anime girl (if you can find the dependencies)
|
||||
|
||||
var hambagas = [
|
||||
"https://www.youtube.com/watch?v=pFA3KGp4GuU"
|
||||
];
|
||||
|
||||
// keybaord,
|
||||
// onscreen keyboard by @steinuil
|
||||
function initKeybaord(BASE_URL, HAMBAGA, consoleLog, consoleError) {
|
||||
document.querySelector('.keybaord-container').innerHTML = `
|
||||
<div class="keybaord-body">
|
||||
<div class="keybaord-row keybaord-row-1">
|
||||
<div class="keybaord-key" data-keybaord-key="Escape">
|
||||
esc
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="F1">
|
||||
F1
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="F2">
|
||||
F2
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="F3">
|
||||
F3
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="F4">
|
||||
F4
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="F5">
|
||||
F5
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="F6">
|
||||
F6
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="F7">
|
||||
F7
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="F8">
|
||||
F8
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="F9">
|
||||
F9
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="F10">
|
||||
F10
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="F11">
|
||||
F11
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="F12">
|
||||
F12
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="Insert">
|
||||
ins
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="Delete">
|
||||
del
|
||||
</div>
|
||||
</div>
|
||||
<div class="keybaord-row keybaord-row-2">
|
||||
<div class="keybaord-key" data-keybaord-key="\`">
|
||||
\`
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="1">
|
||||
1
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="2">
|
||||
2
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="3">
|
||||
3
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="4">
|
||||
4
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="5">
|
||||
5
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="6">
|
||||
6
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="7">
|
||||
7
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="8">
|
||||
8
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="9">
|
||||
9
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="0">
|
||||
0
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="-">
|
||||
-
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="=">
|
||||
=
|
||||
</div>
|
||||
<div class="keybaord-key keybaord-backspace" data-keybaord-key="BackSpace">
|
||||
backspace
|
||||
</div>
|
||||
</div>
|
||||
<div class="keybaord-row keybaord-row-3">
|
||||
<div class="keybaord-key keybaord-tab" data-keybaord-key="Tab">
|
||||
tab
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="q">
|
||||
q
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="w">
|
||||
w
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="e">
|
||||
e
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="r">
|
||||
r
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="t">
|
||||
t
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="y">
|
||||
y
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="u">
|
||||
u
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="i">
|
||||
i
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="o">
|
||||
o
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="p">
|
||||
p
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="[">
|
||||
[
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="]">
|
||||
]
|
||||
</div>
|
||||
<div class="keybaord-key keybaord-enter" data-keybaord-key="Return">
|
||||
enter
|
||||
</div>
|
||||
</div>
|
||||
<div class="keybaord-row keybaord-row-4">
|
||||
<div class="keybaord-key keybaord-capslock" data-keybaord-key="HAMBAGA">
|
||||
🍔
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="a">
|
||||
a
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="s">
|
||||
s
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="d">
|
||||
d
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="f">
|
||||
f
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="g">
|
||||
g
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="h">
|
||||
h
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="j">
|
||||
j
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="k">
|
||||
k
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="l">
|
||||
l
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key=";">
|
||||
;
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="'">
|
||||
'
|
||||
</div>
|
||||
<div class="keybaord-key keybaord-backslash" data-keybaord-key="\\">
|
||||
\\
|
||||
</div>
|
||||
</div>
|
||||
<div class="keybaord-row keybaord-row-5">
|
||||
<div class="keybaord-key keybaord-lshift" data-keybaord-key="Shift_L">
|
||||
shift
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="\\">
|
||||
\\
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="z">
|
||||
z
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="x">
|
||||
x
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="c">
|
||||
c
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="v">
|
||||
v
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="b">
|
||||
b
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="n">
|
||||
n
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="m">
|
||||
m
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key=",">
|
||||
,
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key=".">
|
||||
.
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="/">
|
||||
/
|
||||
</div>
|
||||
<div class="keybaord-key keybaord-rshift" data-keybaord-key="Shift_R">
|
||||
shift
|
||||
</div>
|
||||
</div>
|
||||
<div class="keybaord-row keybaord-row-6">
|
||||
<div class="keybaord-key keybaord-lctrl" data-keybaord-key="Control_L">
|
||||
ctrl
|
||||
</div>
|
||||
<div class="keybaord-key keybaord-super" data-keybaord-key="Meta_L">
|
||||
win
|
||||
</div>
|
||||
<div class="keybaord-key keybaord-alt" data-keybaord-key="Alt_L">
|
||||
alt
|
||||
</div>
|
||||
<div class="keybaord-key keybaord-spacebar" data-keybaord-key="space">
|
||||
space
|
||||
</div>
|
||||
<div class="keybaord-key keybaord-altgr" data-keybaord-key="Alt_R">
|
||||
altgr
|
||||
</div>
|
||||
<div class="keybaord-key keybaord-what" data-keybaord-key="Menu">
|
||||
menu
|
||||
</div>
|
||||
<div class="keybaord-key keybaord-rctrl" data-keybaord-key="Control_R">
|
||||
ctrl
|
||||
</div>
|
||||
</div>
|
||||
<div class="keybaord-row">
|
||||
<div class="keybaord-key" data-keybaord-key="XF86AudioLowerVolume">
|
||||
🔉
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="XF86AudioRaiseVolume">
|
||||
🔊
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="Left">
|
||||
⬅️
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="Down">
|
||||
⬇️
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="Up">
|
||||
⬆️
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="Right">
|
||||
➡️
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="Page_Up">
|
||||
PgUp
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="Page_Down">
|
||||
PgDn
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="Home">
|
||||
🏠
|
||||
</div>
|
||||
<div class="keybaord-key" data-keybaord-key="End">
|
||||
End
|
||||
</div>
|
||||
</div>
|
||||
<div>
|
||||
`;
|
||||
|
||||
function arraySample(array) {
|
||||
return array[Math.floor(Math.random() * array.length)];
|
||||
}
|
||||
|
||||
function sendMessage(msg) {
|
||||
return fetch(BASE_URL, {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/x-www-form-urlencoded;charset=UTF-8",
|
||||
},
|
||||
body: "msg=" + encodeURIComponent(msg),
|
||||
}).then(
|
||||
(r) => r.text(), // so the response body shows up in network tab
|
||||
(err) => consoleError(err)
|
||||
);
|
||||
}
|
||||
const MODIFIER_ON_CLASS = "keybaord-modifier-on";
|
||||
const KEY_DATASET = "data-keybaord-key";
|
||||
const KEY_CLASS = "keybaord-key";
|
||||
|
||||
const modifiers = new Set()
|
||||
|
||||
function toggleModifier(button, key) {
|
||||
button.classList.toggle(MODIFIER_ON_CLASS);
|
||||
if (modifiers.has(key)) {
|
||||
modifiers.delete(key);
|
||||
} else {
|
||||
modifiers.add(key);
|
||||
}
|
||||
}
|
||||
|
||||
function popModifiers() {
|
||||
let modifierString = "";
|
||||
|
||||
modifiers.forEach((mod) => {
|
||||
document.querySelector("[" + KEY_DATASET + "='" + mod + "']")
|
||||
.classList.remove(MODIFIER_ON_CLASS);
|
||||
|
||||
modifierString += mod + "+";
|
||||
});
|
||||
|
||||
modifiers.clear();
|
||||
|
||||
return modifierString;
|
||||
}
|
||||
|
||||
Array.from(document.querySelectorAll("." + KEY_CLASS)).forEach((button) => {
|
||||
const key = button.dataset.keybaordKey;
|
||||
|
||||
button.addEventListener("click", (ev) => {
|
||||
switch (key) {
|
||||
case "HAMBAGA":
|
||||
sendMessage(arraySample(HAMBAGA));
|
||||
break;
|
||||
|
||||
case "Shift_L":
|
||||
case "Shift_R":
|
||||
|
||||
case "Control_L":
|
||||
case "Control_R":
|
||||
|
||||
case "Meta_L":
|
||||
|
||||
case "Alt_L":
|
||||
case "Alt_R":
|
||||
toggleModifier(button, key);
|
||||
break;
|
||||
|
||||
default: {
|
||||
const keyWithModifiers = popModifiers() + key;
|
||||
|
||||
consoleLog(keyWithModifiers);
|
||||
|
||||
sendMessage("key " + keyWithModifiers)
|
||||
.then(() => consoleLog(keyWithModifiers + " OK"));
|
||||
}
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
// keybaord integration
|
||||
(function () {
|
||||
var o = mknod('div');
|
||||
clmod(o, 'keybaord-container', 1);
|
||||
ebi('op_msg').appendChild(o);
|
||||
|
||||
o = mknod('style');
|
||||
o.innerHTML = `
|
||||
.keybaord-body {
|
||||
display: flex;
|
||||
flex-flow: column nowrap;
|
||||
margin: .6em 0;
|
||||
}
|
||||
|
||||
.keybaord-row {
|
||||
display: flex;
|
||||
}
|
||||
|
||||
.keybaord-key {
|
||||
border: 1px solid rgba(128,128,128,0.2);
|
||||
width: 41px;
|
||||
height: 40px;
|
||||
|
||||
display: flex;
|
||||
justify-content: center;
|
||||
align-items: center;
|
||||
}
|
||||
|
||||
.keybaord-key:active {
|
||||
background-color: lightgrey;
|
||||
}
|
||||
|
||||
.keybaord-key.keybaord-modifier-on {
|
||||
background-color: lightblue;
|
||||
}
|
||||
|
||||
.keybaord-key.keybaord-backspace {
|
||||
width: 82px;
|
||||
}
|
||||
|
||||
.keybaord-key.keybaord-tab {
|
||||
width: 55px;
|
||||
}
|
||||
|
||||
.keybaord-key.keybaord-enter {
|
||||
width: 69px;
|
||||
}
|
||||
|
||||
.keybaord-key.keybaord-capslock {
|
||||
width: 80px;
|
||||
}
|
||||
|
||||
.keybaord-key.keybaord-backslash {
|
||||
width: 88px;
|
||||
}
|
||||
|
||||
.keybaord-key.keybaord-lshift {
|
||||
width: 65px;
|
||||
}
|
||||
|
||||
.keybaord-key.keybaord-rshift {
|
||||
width: 103px;
|
||||
}
|
||||
|
||||
.keybaord-key.keybaord-lctrl {
|
||||
width: 55px;
|
||||
}
|
||||
|
||||
.keybaord-key.keybaord-super {
|
||||
width: 55px;
|
||||
}
|
||||
|
||||
.keybaord-key.keybaord-alt {
|
||||
width: 55px;
|
||||
}
|
||||
|
||||
.keybaord-key.keybaord-altgr {
|
||||
width: 55px;
|
||||
}
|
||||
|
||||
.keybaord-key.keybaord-what {
|
||||
width: 55px;
|
||||
}
|
||||
|
||||
.keybaord-key.keybaord-rctrl {
|
||||
width: 55px;
|
||||
}
|
||||
|
||||
.keybaord-key.keybaord-spacebar {
|
||||
width: 302px;
|
||||
}
|
||||
`;
|
||||
document.head.appendChild(o);
|
||||
|
||||
initKeybaord('/', hambagas,
|
||||
(msg) => { toast.inf(2, msg.toString()) },
|
||||
(msg) => { toast.err(30, msg.toString()) });
|
||||
})();
|
||||
|
||||
|
||||
// live2d (dumb pointless meme)
|
||||
// dependencies for this part are not tracked in git
|
||||
// so delete this section if you wanna use this file
|
||||
// (or supply your own l2d model and js)
|
||||
(function () {
|
||||
var o = mknod('link');
|
||||
o.setAttribute('rel', 'stylesheet');
|
||||
o.setAttribute('href', "/bad-memes/pio.css");
|
||||
document.head.appendChild(o);
|
||||
|
||||
o = mknod('style');
|
||||
o.innerHTML = '.pio-container{text-shadow:none;z-index:1}';
|
||||
document.head.appendChild(o);
|
||||
|
||||
o = mknod('div');
|
||||
clmod(o, 'pio-container', 1);
|
||||
o.innerHTML = '<div class="pio-action"></div><canvas id="pio" width="280" height="500"></canvas>';
|
||||
document.body.appendChild(o);
|
||||
|
||||
var remaining = 3;
|
||||
for (var a of ['pio', 'l2d', 'fireworks']) {
|
||||
import_js(`/bad-memes/${a}.js`, function () {
|
||||
if (remaining --> 1)
|
||||
return;
|
||||
|
||||
o = mknod('script');
|
||||
o.innerHTML = 'var pio = new Paul_Pio({"selector":[],"mode":"fixed","hidden":false,"content":{"close":"ok bye"},"model":["/bad-memes/sagiri/model.json"]});';
|
||||
document.body.appendChild(o);
|
||||
});
|
||||
}
|
||||
})();
|
|
@ -1,76 +0,0 @@
|
|||
/*
|
||||
|
||||
makes the up2k ui REALLY minimal by hiding a bunch of stuff
|
||||
|
||||
almost the same as minimal-up2k.html except this one...:
|
||||
|
||||
-- applies to every write-only folder when used with --js-browser
|
||||
|
||||
-- only applies if javascript is enabled
|
||||
|
||||
-- doesn't hide the total upload ETA display
|
||||
|
||||
-- looks slightly better
|
||||
|
||||
|
||||
========================
|
||||
== USAGE INSTRUCTIONS ==
|
||||
|
||||
1. create a volume which anyone can read from (if you haven't already)
|
||||
2. copy this file into that volume, so anyone can download it
|
||||
3. enable the plugin by telling the webbrowser to load this file;
|
||||
assuming the URL to the public volume is /res/, and
|
||||
assuming you're using config-files, then add this to your config:
|
||||
|
||||
[global]
|
||||
js-browser: /res/minimal-up2k.js
|
||||
|
||||
alternatively, if you're not using config-files, then
|
||||
add the following commandline argument instead:
|
||||
--js-browser=/res/minimal-up2k.js
|
||||
|
||||
*/
|
||||
|
||||
var u2min = `
|
||||
<style>
|
||||
|
||||
#ops, #path, #tree, #files, #wfp,
|
||||
#u2conf td.c+.c, #u2cards, #srch_dz, #srch_zd {
|
||||
display: none !important;
|
||||
}
|
||||
#u2conf {margin:5em auto 0 auto !important}
|
||||
#u2conf.ww {width:70em}
|
||||
#u2conf.w {width:50em}
|
||||
#u2conf.w .c,
|
||||
#u2conf.w #u2btn_cw {text-align:left}
|
||||
#u2conf.w #u2btn_cw {width:70%}
|
||||
#u2etaw {margin:3em auto}
|
||||
#u2etaw.w {
|
||||
text-align: center;
|
||||
margin: -3.5em auto 5em auto;
|
||||
}
|
||||
#u2etaw.w #u2etas {margin-right:-37em}
|
||||
#u2etaw.w #u2etas.o {margin-top:-2.2em}
|
||||
#u2etaw.ww {margin:-1em auto}
|
||||
#u2etaw.ww #u2etas {padding-left:4em}
|
||||
#u2etas {
|
||||
background: none !important;
|
||||
border: none !important;
|
||||
}
|
||||
#wrap {margin-left:2em !important}
|
||||
.logue {
|
||||
border: none !important;
|
||||
margin: 2em auto !important;
|
||||
}
|
||||
.logue:before {content:'' !important}
|
||||
|
||||
</style>
|
||||
|
||||
<a href="#" onclick="this.parentNode.innerHTML='';">show advanced options</a>
|
||||
`;
|
||||
|
||||
if (!has(perms, 'read')) {
|
||||
var e2 = mknod('div');
|
||||
e2.innerHTML = u2min;
|
||||
ebi('wrap').insertBefore(e2, QS('#wfp'));
|
||||
}
|
|
@ -1,140 +0,0 @@
|
|||
"use strict";
|
||||
|
||||
|
||||
// USAGE:
|
||||
// place this file somewhere in the webroot,
|
||||
// for example in a folder named ".res" to hide it, and then
|
||||
// python3 copyparty-sfx.py -v .::A --js-browser /.res/quickmove.js
|
||||
//
|
||||
// DESCRIPTION:
|
||||
// the command above launches copyparty with one single volume;
|
||||
// ".::A" = current folder as webroot, and everyone has Admin
|
||||
//
|
||||
// the plugin adds hotkey "W" which moves all selected files
|
||||
// into a subfolder named "foobar" inside the current folder
|
||||
|
||||
|
||||
(function() {
|
||||
|
||||
var action_to_perform = ask_for_confirmation_and_then_move;
|
||||
// this decides what the new hotkey should do;
|
||||
// ask_for_confirmation_and_then_move = show a yes/no box,
|
||||
// move_selected_files = just move the files immediately
|
||||
|
||||
var move_destination = "foobar";
|
||||
// this is the target folder to move files to;
|
||||
// by default it is a subfolder of the current folder,
|
||||
// but it can also be an absolute path like "/foo/bar"
|
||||
|
||||
// ===
|
||||
// === END OF CONFIG
|
||||
// ===
|
||||
|
||||
var main_hotkey_handler, // copyparty's original hotkey handler
|
||||
plugin_enabler, // timer to engage this plugin when safe
|
||||
files_to_move; // list of files to move
|
||||
|
||||
function ask_for_confirmation_and_then_move() {
|
||||
var num_files = msel.getsel().length,
|
||||
msg = "move the selected " + num_files + " files?";
|
||||
|
||||
if (!num_files)
|
||||
return toast.warn(2, 'no files were selected to be moved');
|
||||
|
||||
modal.confirm(msg, move_selected_files, null);
|
||||
}
|
||||
|
||||
function move_selected_files() {
|
||||
var selection = msel.getsel();
|
||||
|
||||
if (!selection.length)
|
||||
return toast.warn(2, 'no files were selected to be moved');
|
||||
|
||||
if (thegrid.bbox) {
|
||||
// close image/video viewer
|
||||
thegrid.bbox = null;
|
||||
baguetteBox.destroy();
|
||||
}
|
||||
|
||||
files_to_move = [];
|
||||
for (var a = 0; a < selection.length; a++)
|
||||
files_to_move.push(selection[a].vp);
|
||||
|
||||
move_next_file();
|
||||
}
|
||||
|
||||
function move_next_file() {
|
||||
var num_files = files_to_move.length,
|
||||
filepath = files_to_move.pop(),
|
||||
filename = vsplit(filepath)[1];
|
||||
|
||||
toast.inf(10, "moving " + num_files + " files...\n\n" + filename);
|
||||
|
||||
var dst = move_destination;
|
||||
|
||||
if (!dst.endsWith('/'))
|
||||
// must have a trailing slash, so add it
|
||||
dst += '/';
|
||||
|
||||
if (!dst.startsWith('/'))
|
||||
// destination is a relative path, so prefix current folder path
|
||||
dst = get_evpath() + dst;
|
||||
|
||||
// and finally append the filename
|
||||
dst += '/' + filename;
|
||||
|
||||
// prepare the move-request to be sent
|
||||
var xhr = new XHR();
|
||||
xhr.onload = xhr.onerror = function() {
|
||||
if (this.status !== 201)
|
||||
return toast.err(30, 'move failed: ' + esc(this.responseText));
|
||||
|
||||
if (files_to_move.length)
|
||||
return move_next_file(); // still more files to go
|
||||
|
||||
toast.ok(1, 'move OK');
|
||||
treectl.goto(); // reload the folder contents
|
||||
};
|
||||
xhr.open('POST', filepath + '?move=' + dst);
|
||||
xhr.send();
|
||||
}
|
||||
|
||||
function our_hotkey_handler(e) {
|
||||
// bail if either ALT, CTRL, or SHIFT is pressed
|
||||
if (e.altKey || e.shiftKey || e.isComposing || ctrl(e))
|
||||
return main_hotkey_handler(e); // let copyparty handle this keystroke
|
||||
|
||||
var key_name = (e.code || e.key) + '',
|
||||
ae = document.activeElement,
|
||||
aet = ae && ae != document.body ? ae.nodeName.toLowerCase() : '';
|
||||
|
||||
// check the current aet (active element type),
|
||||
// only continue if one of the following currently has input focus:
|
||||
// nothing | link | button | table-row | table-cell | div | text
|
||||
if (aet && !/^(a|button|tr|td|div|pre)$/.test(aet))
|
||||
return main_hotkey_handler(e); // let copyparty handle this keystroke
|
||||
|
||||
if (key_name == 'KeyW') {
|
||||
// okay, this one's for us... do the thing
|
||||
action_to_perform();
|
||||
return ev(e);
|
||||
}
|
||||
|
||||
return main_hotkey_handler(e); // let copyparty handle this keystroke
|
||||
}
|
||||
|
||||
function enable_plugin() {
|
||||
if (!window.hotkeys_attached)
|
||||
return console.log('quickmove is waiting for the page to finish loading');
|
||||
|
||||
clearInterval(plugin_enabler);
|
||||
main_hotkey_handler = document.onkeydown;
|
||||
document.onkeydown = our_hotkey_handler;
|
||||
console.log('quickmove is now enabled');
|
||||
}
|
||||
|
||||
// copyparty doesn't enable its hotkeys until the page
|
||||
// has finished loading, so we'll wait for that too
|
||||
plugin_enabler = setInterval(enable_plugin, 100);
|
||||
|
||||
})();
|
|
@ -1,208 +0,0 @@
|
|||
/* untz untz untz untz */
|
||||
|
||||
(function () {
|
||||
|
||||
var can, ctx, W, H, fft, buf, bars, barw, pv,
|
||||
hue = 0,
|
||||
ibeat = 0,
|
||||
beats = [9001],
|
||||
beats_url = '',
|
||||
uofs = 0,
|
||||
ops = ebi('ops'),
|
||||
raving = false,
|
||||
recalc = 0,
|
||||
cdown = 0,
|
||||
FC = 0.9,
|
||||
css = `<style>
|
||||
|
||||
#fft {
|
||||
position: fixed;
|
||||
top: 0;
|
||||
left: 0;
|
||||
z-index: -1;
|
||||
}
|
||||
body {
|
||||
box-shadow: inset 0 0 0 white;
|
||||
}
|
||||
#ops>a,
|
||||
#path>a {
|
||||
display: inline-block;
|
||||
}
|
||||
/*
|
||||
body.untz {
|
||||
animation: untz-body 200ms ease-out;
|
||||
}
|
||||
@keyframes untz-body {
|
||||
0% {inset 0 0 20em white}
|
||||
100% {inset 0 0 0 white}
|
||||
}
|
||||
*/
|
||||
:root, html.a, html.b, html.c, html.d, html.e {
|
||||
--row-alt: rgba(48,52,78,0.2);
|
||||
}
|
||||
#files td {
|
||||
background: none;
|
||||
}
|
||||
|
||||
</style>`;
|
||||
|
||||
QS('body').appendChild(mknod('div', null, css));
|
||||
|
||||
function rave_load() {
|
||||
console.log('rave_load');
|
||||
can = mknod('canvas', 'fft');
|
||||
QS('body').appendChild(can);
|
||||
ctx = can.getContext('2d');
|
||||
|
||||
fft = new AnalyserNode(actx, {
|
||||
"fftSize": 2048,
|
||||
"maxDecibels": 0,
|
||||
"smoothingTimeConstant": 0.7,
|
||||
});
|
||||
ibeat = 0;
|
||||
beats = [9001];
|
||||
buf = new Uint8Array(fft.frequencyBinCount);
|
||||
bars = buf.length * FC;
|
||||
afilt.filters.push(fft);
|
||||
if (!raving) {
|
||||
raving = true;
|
||||
raver();
|
||||
}
|
||||
beats_url = mp.au.src.split('?')[0].replace(/(.*\/)(.*)/, '$1.beats/$2.txt');
|
||||
console.log("reading beats from", beats_url);
|
||||
var xhr = new XHR();
|
||||
xhr.open('GET', beats_url, true);
|
||||
xhr.onload = readbeats;
|
||||
xhr.url = beats_url;
|
||||
xhr.send();
|
||||
}
|
||||
|
||||
function rave_unload() {
|
||||
qsr('#fft');
|
||||
can = null;
|
||||
}
|
||||
|
||||
function readbeats() {
|
||||
if (this.url != beats_url)
|
||||
return console.log('old beats??', this.url, beats_url);
|
||||
|
||||
var sbeats = this.responseText.replace(/\r/g, '').split(/\n/g);
|
||||
if (sbeats.length < 3)
|
||||
return;
|
||||
|
||||
beats = [];
|
||||
for (var a = 0; a < sbeats.length; a++)
|
||||
beats.push(parseFloat(sbeats[a]));
|
||||
|
||||
var end = beats.slice(-2),
|
||||
t = end[1],
|
||||
d = t - end[0];
|
||||
|
||||
while (d > 0.1 && t < 1200)
|
||||
beats.push(t += d);
|
||||
}
|
||||
|
||||
function hrand() {
|
||||
return Math.random() - 0.5;
|
||||
}
|
||||
|
||||
function raver() {
|
||||
if (!can) {
|
||||
raving = false;
|
||||
return;
|
||||
}
|
||||
|
||||
requestAnimationFrame(raver);
|
||||
if (!mp || !mp.au || mp.au.paused)
|
||||
return;
|
||||
|
||||
if (--uofs >= 0) {
|
||||
document.body.style.marginLeft = hrand() * uofs + 'px';
|
||||
ebi('tree').style.marginLeft = hrand() * uofs + 'px';
|
||||
for (var a of QSA('#ops>a, #path>a, #pctl>a'))
|
||||
a.style.transform = 'translate(' + hrand() * uofs * 1 + 'px, ' + hrand() * uofs * 0.7 + 'px) rotate(' + Math.random() * uofs * 0.7 + 'deg)'
|
||||
}
|
||||
|
||||
if (--recalc < 0) {
|
||||
recalc = 60;
|
||||
var tree = ebi('tree'),
|
||||
x = tree.style.display == 'none' ? 0 : tree.offsetWidth;
|
||||
|
||||
//W = can.width = window.innerWidth - x;
|
||||
//H = can.height = window.innerHeight;
|
||||
//H = ebi('widget').offsetTop;
|
||||
W = can.width = bars;
|
||||
H = can.height = 512;
|
||||
barw = 1; //parseInt(0.8 + W / bars);
|
||||
can.style.left = x + 'px';
|
||||
can.style.width = (window.innerWidth - x) + 'px';
|
||||
can.style.height = ebi('widget').offsetTop + 'px';
|
||||
}
|
||||
|
||||
//if (--cdown == 1)
|
||||
// clmod(ops, 'untz');
|
||||
|
||||
fft.getByteFrequencyData(buf);
|
||||
|
||||
var imax = 0, vmax = 0;
|
||||
for (var a = 10; a < 50; a++)
|
||||
if (vmax < buf[a]) {
|
||||
vmax = buf[a];
|
||||
imax = a;
|
||||
}
|
||||
|
||||
hue = hue * 0.93 + imax * 0.07;
|
||||
|
||||
ctx.fillStyle = 'rgba(0,0,0,0)';
|
||||
ctx.fillRect(0, 0, W, H);
|
||||
ctx.clearRect(0, 0, W, H);
|
||||
ctx.fillStyle = 'hsla(' + (hue * 2.5) + ',100%,50%,0.7)';
|
||||
|
||||
var x = 0, mul = (H / 256) * 0.5;
|
||||
for (var a = 0; a < buf.length * FC; a++) {
|
||||
var v = buf[a] * mul * (1 + 0.69 * a / buf.length);
|
||||
ctx.fillRect(x, H - v, barw, v);
|
||||
x += barw;
|
||||
}
|
||||
|
||||
var t = mp.au.currentTime + 0.05;
|
||||
|
||||
if (ibeat >= beats.length || beats[ibeat] > t)
|
||||
return;
|
||||
|
||||
while (ibeat < beats.length && beats[ibeat++] < t)
|
||||
continue;
|
||||
|
||||
return untz();
|
||||
|
||||
var cv = 0;
|
||||
for (var a = 0; a < 128; a++)
|
||||
cv += buf[a];
|
||||
|
||||
if (cv - pv > 1000) {
|
||||
console.log(pv, cv, cv - pv);
|
||||
if (cdown < 0) {
|
||||
clmod(ops, 'untz', 1);
|
||||
cdown = 20;
|
||||
}
|
||||
}
|
||||
pv = cv;
|
||||
}
|
||||
|
||||
function untz() {
|
||||
console.log('untz');
|
||||
uofs = 14;
|
||||
document.body.animate([
|
||||
{ boxShadow: 'inset 0 0 1em #f0c' },
|
||||
{ boxShadow: 'inset 0 0 20em #f0c', offset: 0.2 },
|
||||
{ boxShadow: 'inset 0 0 0 #f0c' },
|
||||
], { duration: 200, iterations: 1 });
|
||||
}
|
||||
|
||||
afilt.plugs.push({
|
||||
"en": true,
|
||||
"load": rave_load,
|
||||
"unload": rave_unload
|
||||
});
|
||||
|
||||
})();
|
|
@ -1,297 +0,0 @@
|
|||
// way more specific example --
|
||||
// assumes all files dropped into the uploader have a youtube-id somewhere in the filename,
|
||||
// locates the youtube-ids and passes them to an API which returns a list of IDs which should be uploaded
|
||||
//
|
||||
// also tries to find the youtube-id in the embedded metadata
|
||||
//
|
||||
// assumes copyparty is behind nginx as /ytq is a standalone service which must be rproxied in place
|
||||
|
||||
function up2k_namefilter(good_files, nil_files, bad_files, hooks) {
|
||||
var passthru = up2k.uc.fsearch;
|
||||
if (passthru)
|
||||
return hooks[0](good_files, nil_files, bad_files, hooks.slice(1));
|
||||
|
||||
a_up2k_namefilter(good_files, nil_files, bad_files, hooks).then(() => { });
|
||||
}
|
||||
|
||||
// ebi('op_up2k').appendChild(mknod('input','unick'));
|
||||
|
||||
function bstrpos(buf, ptn) {
|
||||
var ofs = 0,
|
||||
ch0 = ptn[0],
|
||||
sz = buf.byteLength;
|
||||
|
||||
while (true) {
|
||||
ofs = buf.indexOf(ch0, ofs);
|
||||
if (ofs < 0 || ofs >= sz)
|
||||
return -1;
|
||||
|
||||
for (var a = 1; a < ptn.length; a++)
|
||||
if (buf[ofs + a] !== ptn[a])
|
||||
break;
|
||||
|
||||
if (a === ptn.length)
|
||||
return ofs;
|
||||
|
||||
++ofs;
|
||||
}
|
||||
}
|
||||
|
||||
async function a_up2k_namefilter(good_files, nil_files, bad_files, hooks) {
|
||||
var t0 = Date.now(),
|
||||
yt_ids = new Set(),
|
||||
textdec = new TextDecoder('latin1'),
|
||||
md_ptn = new TextEncoder().encode('youtube.com/watch?v='),
|
||||
file_ids = [], // all IDs found for each good_files
|
||||
md_only = [], // `${id} ${fn}` where ID was only found in metadata
|
||||
mofs = 0,
|
||||
mnchk = 0,
|
||||
mfile = '',
|
||||
myid = localStorage.getItem('ytid_t0');
|
||||
|
||||
if (!myid)
|
||||
localStorage.setItem('ytid_t0', myid = Date.now());
|
||||
|
||||
for (var a = 0; a < good_files.length; a++) {
|
||||
var [fobj, name] = good_files[a],
|
||||
cname = name, // will clobber
|
||||
sz = fobj.size,
|
||||
ids = [],
|
||||
fn_ids = [],
|
||||
md_ids = [],
|
||||
id_ok = false,
|
||||
m;
|
||||
|
||||
// all IDs found in this file
|
||||
file_ids.push(ids);
|
||||
|
||||
// look for ID in filename; reduce the
|
||||
// metadata-scan intensity if the id looks safe
|
||||
m = /[\[(-]([\w-]{11})[\])]?\.(?:mp4|webm|mkv|flv|opus|ogg|mp3|m4a|aac)$/i.exec(name);
|
||||
id_ok = !!m;
|
||||
|
||||
while (true) {
|
||||
// fuzzy catch-all;
|
||||
// some ytdl fork did %(title)-%(id).%(ext) ...
|
||||
m = /(?:^|[^\w])([\w-]{11})(?:$|[^\w-])/.exec(cname);
|
||||
if (!m)
|
||||
break;
|
||||
|
||||
cname = cname.replace(m[1], '');
|
||||
yt_ids.add(m[1]);
|
||||
fn_ids.unshift(m[1]);
|
||||
}
|
||||
|
||||
// look for IDs in video metadata,
|
||||
if (/\.(mp4|webm|mkv|flv|opus|ogg|mp3|m4a|aac)$/i.exec(name)) {
|
||||
toast.show('inf r', 0, `analyzing file ${a + 1} / ${good_files.length} :\n${name}\n\nhave analysed ${++mnchk} files in ${(Date.now() - t0) / 1000} seconds, ${humantime((good_files.length - (a + 1)) * (((Date.now() - t0) / 1000) / mnchk))} remaining,\n\nbiggest offset so far is ${mofs}, in this file:\n\n${mfile}`);
|
||||
|
||||
// check first and last 128 MiB;
|
||||
// pWxOroN5WCo.mkv @ 6edb98 (6.92M)
|
||||
// Nf-nN1wF5Xo.mp4 @ 4a98034 (74.6M)
|
||||
var chunksz = 1024 * 1024 * 2, // byte
|
||||
aspan = id_ok ? 128 : 512; // MiB
|
||||
|
||||
aspan = parseInt(Math.min(sz / 2, aspan * 1024 * 1024) / chunksz) * chunksz;
|
||||
if (!aspan)
|
||||
aspan = Math.min(sz, chunksz);
|
||||
|
||||
for (var side = 0; side < 2; side++) {
|
||||
var ofs = side ? Math.max(0, sz - aspan) : 0,
|
||||
nchunks = aspan / chunksz;
|
||||
|
||||
for (var chunk = 0; chunk < nchunks; chunk++) {
|
||||
var bchunk = await fobj.slice(ofs, ofs + chunksz + 16).arrayBuffer(),
|
||||
uchunk = new Uint8Array(bchunk, 0, bchunk.byteLength),
|
||||
bofs = bstrpos(uchunk, md_ptn),
|
||||
absofs = Math.min(ofs + bofs, (sz - ofs) + bofs),
|
||||
txt = bofs < 0 ? '' : textdec.decode(uchunk.subarray(bofs)),
|
||||
m;
|
||||
|
||||
//console.log(`side ${ side }, chunk ${ chunk }, ofs ${ ofs }, bchunk ${ bchunk.byteLength }, txt ${ txt.length }`);
|
||||
while (true) {
|
||||
// mkv/webm have [a-z] immediately after url
|
||||
m = /(youtube\.com\/watch\?v=[\w-]{11})/.exec(txt);
|
||||
if (!m)
|
||||
break;
|
||||
|
||||
txt = txt.replace(m[1], '');
|
||||
m = m[1].slice(-11);
|
||||
|
||||
console.log(`found ${m} @${bofs}, ${name} `);
|
||||
yt_ids.add(m);
|
||||
if (!has(fn_ids, m) && !has(md_ids, m)) {
|
||||
md_ids.push(m);
|
||||
md_only.push(`${m} ${name}`);
|
||||
}
|
||||
else
|
||||
// id appears several times; make it preferred
|
||||
md_ids.unshift(m);
|
||||
|
||||
// bail after next iteration
|
||||
chunk = nchunks - 1;
|
||||
side = 9;
|
||||
|
||||
if (mofs < absofs) {
|
||||
mofs = absofs;
|
||||
mfile = name;
|
||||
}
|
||||
}
|
||||
ofs += chunksz;
|
||||
if (ofs >= sz)
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for (var yi of md_ids)
|
||||
ids.push(yi);
|
||||
|
||||
for (var yi of fn_ids)
|
||||
if (!has(ids, yi))
|
||||
ids.push(yi);
|
||||
}
|
||||
|
||||
if (md_only.length)
|
||||
console.log('recovered the following youtube-IDs by inspecting metadata:\n\n' + md_only.join('\n'));
|
||||
else if (yt_ids.size)
|
||||
console.log('did not discover any additional youtube-IDs by inspecting metadata; all the IDs also existed in the filenames');
|
||||
else
|
||||
console.log('failed to find any youtube-IDs at all, sorry');
|
||||
|
||||
if (false) {
|
||||
var msg = `finished analysing ${mnchk} files in ${(Date.now() - t0) / 1000} seconds,\n\nbiggest offset was ${mofs} in this file:\n\n${mfile}`,
|
||||
mfun = function () { toast.ok(0, msg); };
|
||||
|
||||
mfun();
|
||||
setTimeout(mfun, 200);
|
||||
|
||||
return hooks[0]([], [], [], hooks.slice(1));
|
||||
}
|
||||
|
||||
var el = ebi('unick'), unick = el ? el.value : '';
|
||||
if (unick) {
|
||||
console.log(`sending uploader nickname [${unick}]`);
|
||||
fetch(document.location, {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/x-www-form-urlencoded;charset=UTF-8' },
|
||||
body: 'msg=' + encodeURIComponent(unick)
|
||||
});
|
||||
}
|
||||
|
||||
toast.inf(5, `running query for ${yt_ids.size} youtube-IDs...`);
|
||||
|
||||
var xhr = new XHR();
|
||||
xhr.open('POST', '/ytq', true);
|
||||
xhr.setRequestHeader('Content-Type', 'text/plain');
|
||||
xhr.onload = xhr.onerror = function () {
|
||||
if (this.status != 200)
|
||||
return toast.err(0, `sorry, database query failed ;_;\n\nplease let us know so we can look at it, thx!!\n\nerror ${this.status}: ${(this.response && this.response.err) || this.responseText}`);
|
||||
|
||||
process_id_list(this.responseText);
|
||||
};
|
||||
xhr.send(Array.from(yt_ids).join('\n'));
|
||||
|
||||
function process_id_list(txt) {
|
||||
var wanted_ids = new Set(txt.trim().split('\n')),
|
||||
name_id = {},
|
||||
wanted_names = new Set(), // basenames with a wanted ID -- not including relpath
|
||||
wanted_names_scoped = {}, // basenames with a wanted ID -> list of dirs to search under
|
||||
wanted_files = new Set(); // filedrops
|
||||
|
||||
for (var a = 0; a < good_files.length; a++) {
|
||||
var name = good_files[a][1];
|
||||
for (var b = 0; b < file_ids[a].length; b++)
|
||||
if (wanted_ids.has(file_ids[a][b])) {
|
||||
// let the next stage handle this to prevent dupes
|
||||
//wanted_files.add(good_files[a]);
|
||||
|
||||
var m = /(.*)\.(mp4|webm|mkv|flv|opus|ogg|mp3|m4a|aac)$/i.exec(name);
|
||||
if (!m)
|
||||
continue;
|
||||
|
||||
var [rd, fn] = vsplit(m[1]);
|
||||
|
||||
if (fn in wanted_names_scoped)
|
||||
wanted_names_scoped[fn].push(rd);
|
||||
else
|
||||
wanted_names_scoped[fn] = [rd];
|
||||
|
||||
wanted_names.add(fn);
|
||||
name_id[m[1]] = file_ids[a][b];
|
||||
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// add all files with the same basename as each explicitly wanted file
|
||||
// (infojson/chatlog/etc when ID was discovered from metadata)
|
||||
for (var a = 0; a < good_files.length; a++) {
|
||||
var [rd, name] = vsplit(good_files[a][1]);
|
||||
for (var b = 0; b < 3; b++) {
|
||||
name = name.replace(/\.[^\.]+$/, '');
|
||||
if (!wanted_names.has(name))
|
||||
continue;
|
||||
|
||||
var vid_fp = false;
|
||||
for (var c of wanted_names_scoped[name])
|
||||
if (rd.startsWith(c))
|
||||
vid_fp = c + name;
|
||||
|
||||
if (!vid_fp)
|
||||
continue;
|
||||
|
||||
var subdir = name_id[vid_fp];
|
||||
subdir = `v${subdir.slice(0, 1)}/${subdir}-${myid}`;
|
||||
var newpath = subdir + '/' + good_files[a][1].split(/\//g).pop();
|
||||
|
||||
// check if this file is a dupe
|
||||
for (var c of good_files)
|
||||
if (c[1] == newpath)
|
||||
newpath = null;
|
||||
|
||||
if (!newpath)
|
||||
break;
|
||||
|
||||
good_files[a][1] = newpath;
|
||||
wanted_files.add(good_files[a]);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
function upload_filtered() {
|
||||
if (!wanted_files.size)
|
||||
return modal.alert('Good news -- turns out we already have all those.\n\nBut thank you for checking in!');
|
||||
|
||||
hooks[0](Array.from(wanted_files), nil_files, bad_files, hooks.slice(1));
|
||||
}
|
||||
|
||||
function upload_all() {
|
||||
hooks[0](good_files, nil_files, bad_files, hooks.slice(1));
|
||||
}
|
||||
|
||||
var n_skip = good_files.length - wanted_files.size,
|
||||
msg = `you added ${good_files.length} files; ${good_files.length == n_skip ? 'all' : n_skip} of them were skipped --\neither because we already have them,\nor because there is no youtube-ID in your filenames.\n\n<code>OK</code> / <code>Enter</code> = continue uploading just the ${wanted_files.size} files we definitely need\n\n<code>Cancel</code> / <code>ESC</code> = override the filter; upload ALL the files you added`;
|
||||
|
||||
if (!n_skip)
|
||||
upload_filtered();
|
||||
else
|
||||
modal.confirm(msg, upload_filtered, upload_all);
|
||||
};
|
||||
}
|
||||
|
||||
up2k_hooks.push(function () {
|
||||
up2k.gotallfiles.unshift(up2k_namefilter);
|
||||
});
|
||||
|
||||
// persist/restore nickname field if present
|
||||
setInterval(function () {
|
||||
var o = ebi('unick');
|
||||
if (!o || document.activeElement == o)
|
||||
return;
|
||||
|
||||
o.oninput = function () {
|
||||
localStorage.setItem('unick', o.value);
|
||||
};
|
||||
o.value = localStorage.getItem('unick') || '';
|
||||
}, 1000);
|
|
@ -1,45 +0,0 @@
|
|||
// hooks into up2k
|
||||
|
||||
function up2k_namefilter(good_files, nil_files, bad_files, hooks) {
|
||||
// is called when stuff is dropped into the browser,
|
||||
// after iterating through the directory tree and discovering all files,
|
||||
// before the upload confirmation dialogue is shown
|
||||
|
||||
// good_files will successfully upload
|
||||
// nil_files are empty files and will show an alert in the final hook
|
||||
// bad_files are unreadable and cannot be uploaded
|
||||
var file_lists = [good_files, nil_files, bad_files];
|
||||
|
||||
// build a list of filenames
|
||||
var filenames = [];
|
||||
for (var lst of file_lists)
|
||||
for (var ent of lst)
|
||||
filenames.push(ent[1]);
|
||||
|
||||
toast.inf(5, "running database query...");
|
||||
|
||||
// simulate delay while passing the list to some api for checking
|
||||
setTimeout(function () {
|
||||
|
||||
// only keep webm files as an example
|
||||
var new_lists = [];
|
||||
for (var lst of file_lists) {
|
||||
var keep = [];
|
||||
new_lists.push(keep);
|
||||
|
||||
for (var ent of lst)
|
||||
if (/\.webm$/.test(ent[1]))
|
||||
keep.push(ent);
|
||||
}
|
||||
|
||||
// finally, call the next hook in the chain
|
||||
[good_files, nil_files, bad_files] = new_lists;
|
||||
hooks[0](good_files, nil_files, bad_files, hooks.slice(1));
|
||||
|
||||
}, 1000);
|
||||
}
|
||||
|
||||
// register
|
||||
up2k_hooks.push(function () {
|
||||
up2k.gotallfiles.unshift(up2k_namefilter);
|
||||
});
|
|
@ -10,7 +10,7 @@ name="copyparty"
|
|||
rcvar="copyparty_enable"
|
||||
copyparty_user="copyparty"
|
||||
copyparty_args="-e2dsa -v /storage:/storage:r" # change as you see fit
|
||||
copyparty_command="/usr/local/bin/python3.9 /usr/local/copyparty/copyparty-sfx.py ${copyparty_args}"
|
||||
copyparty_command="/usr/local/bin/python3.8 /usr/local/copyparty/copyparty-sfx.py ${copyparty_args}"
|
||||
pidfile="/var/run/copyparty/${name}.pid"
|
||||
command="/usr/sbin/daemon"
|
||||
command_args="-P ${pidfile} -r -f ${copyparty_command}"
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue