Compare commits
244 commits
| Author | SHA1 | Date | |
|---|---|---|---|
| a913c11230 | |||
|
|
9d1e169472 | ||
|
|
4f03a3b709 | ||
|
|
e8d6d054d0 | ||
|
|
ab2210f02d | ||
|
|
5256785b9a | ||
|
|
4460c9c26d | ||
| f87a2f52e1 | |||
| 627ac3645a | |||
| 2020a2395a | |||
|
|
fd734c5a7b | ||
|
|
d86c290c25 | ||
|
|
838d0b0a74 | ||
|
|
ddcc1626f8 | ||
|
|
fbf02025e0 | ||
|
|
a899663ddc | ||
|
|
d2fda68afd | ||
|
|
9fb6993e1b | ||
|
|
4bae78d419 | ||
|
|
f3821628e3 | ||
|
|
b4c6a90fe8 | ||
|
|
a37d60d741 | ||
|
|
71757f6562 | ||
|
|
892628d16d | ||
|
|
2a1b6e2873 | ||
|
|
756cfb6798 | ||
|
|
698ee181b4 | ||
|
|
c80aec05de | ||
|
|
69622b3321 | ||
|
|
1e6332039f | ||
|
|
eb15e04d24 | ||
|
|
25461b75f7 | ||
|
|
4a35cbd23d | ||
|
|
1cdc247c63 | ||
|
|
4c63243692 | ||
|
|
7b9a0e6d71 | ||
|
|
5873e40484 | ||
|
|
f8abae1f58 | ||
|
|
4912e1782d | ||
|
|
6ed678dfa6 | ||
| 7c0deab8c5 | |||
| 00fcffe5b9 | |||
| 246e864ce4 | |||
| c4bcfb70df | |||
| cf8010a33e | |||
| 4c657591a7 | |||
| 6ae0635da7 | |||
| 11dbfe75b9 | |||
| 58ee25bfbb | |||
|
|
fd87664b9e | ||
|
|
731863af9c | ||
|
|
5b72099802 | ||
|
|
c67848d473 | ||
|
|
a454af32f5 | ||
|
|
e557bbcd9d | ||
| b20576da2e | |||
| dee0e01af9 | |||
|
|
e488cc0a42 | ||
|
|
be21f914f4 | ||
|
|
b9eeebdfd7 | ||
|
|
c79e8fb086 | ||
| 8da6785c46 | |||
| 3deb267333 | |||
| 0d7bbab384 | |||
| aafe0f8a81 | |||
| 24faec8de2 | |||
| 816d2332ab | |||
| a4a547e76e | |||
|
|
6cec7d39d6 | ||
|
|
3fbf7e03cf | ||
|
|
31d277ae34 | ||
|
|
3487e93128 | ||
| 93b513d09c | |||
|
|
6443db213a | ||
|
|
263c915d40 | ||
|
|
388d67f5b3 | ||
|
|
6adf0be349 | ||
|
|
2516206a31 | ||
|
|
9311d603fb | ||
|
|
34df23e468 | ||
|
|
1029aa97d2 | ||
|
|
ebd22c07d1 | ||
|
|
97b2dffcb9 | ||
|
|
613135a402 | ||
|
|
120a86953e | ||
|
|
8d0bf2d2de | ||
|
|
32ec7a3446 | ||
|
|
9fffc49aaa | ||
|
|
32a2a0e5fa | ||
|
|
5608f974a3 | ||
|
|
f280dfa26f | ||
|
|
0326330d66 | ||
| d35705912f | |||
|
|
74fa8f5581 | ||
|
|
967e2d0e71 | ||
|
|
ee7e6d87f2 | ||
| e326285085 | |||
|
|
80817ac65e | ||
|
|
5f4083888d | ||
|
|
eb08a3fff2 | ||
|
|
d6209837b3 | ||
|
|
59b524741d | ||
| e941f8c7c1 | |||
| b147d2b19d | |||
| d65758d8f7 | |||
| f5ed0e2e66 | |||
| 3b74ab8623 | |||
| c971f297a5 | |||
| 720b51d08e | |||
| 27b725e382 | |||
|
|
86d62173ff | ||
|
|
cbae0760d0 | ||
|
|
1fed47d0e0 | ||
|
|
712a629d84 | ||
|
|
84ad11452e | ||
|
|
ae17ad49ff | ||
|
|
e2f9315c07 | ||
|
|
fdd6bb5f1a | ||
|
|
7936c01316 | ||
|
|
d92f246c56 | ||
|
|
8f166ed705 | ||
|
|
b44292650e | ||
| 68c79595fd | |||
|
|
be7ce02295 | ||
|
|
b50028cf73 | ||
|
|
82dd0b290a | ||
|
|
981997a621 | ||
|
|
126ac6e0e7 | ||
|
|
3e3baa089b | ||
|
|
25d27edddb | ||
|
|
300744b577 | ||
|
|
ac94214ee6 | ||
|
|
a2d156aa22 | ||
|
|
31d5f556f0 | ||
| 83832f4f53 | |||
|
|
3175b5aa25 | ||
|
|
6c1afa42d2 | ||
| 3a04241e3b | |||
| 353c23c6cd | |||
| 70877306af | |||
| 9abce8876a | |||
| 22d1b08456 | |||
|
|
453ab11fb2 | ||
|
|
0aeeaeb973 | ||
|
|
aac6086ca6 | ||
| 078c73ee2c | |||
| 8fd8e7153e | |||
| 90e18796cf | |||
|
|
6ad9f87a93 | ||
|
|
6d241a18da | ||
|
|
d81b8a9e14 | ||
|
|
5d8cdd6416 | ||
| f5d78b374c | |||
|
|
272799da62 | ||
|
|
85171750f1 | ||
|
|
eaad13e422 | ||
|
|
6ef13aef47 | ||
|
|
ef029d23db | ||
| 8fd2fb995f | |||
| a5683966a8 | |||
|
|
798beec97b | ||
|
|
e6ce7751a9 | ||
| 0b049c3621 | |||
|
|
7d480c67d1 | ||
|
|
8352c1f49d | ||
| c7d75ca0d3 | |||
| e3dd94813c | |||
| bc68761b1d | |||
| c144bc118d | |||
| b9e70c29ef | |||
|
|
dff532ac72 | ||
|
|
810e3b1201 | ||
| 5e4475d61e | |||
| d96c6f438d | |||
|
|
9d19dbab99 | ||
|
|
ffeb70f787 | ||
|
|
865cfabf88 | ||
|
|
271110c1a5 | ||
|
|
180a6ba962 | ||
|
|
9bbebab1a2 | ||
|
|
33dbca5b3a | ||
| c2e9af76a5 | |||
| 703db7eaef | |||
| 0140643761 | |||
| d4a86697d9 | |||
|
|
6b0d4296d9 | ||
|
|
6f971f10cf | ||
|
|
1bff36b990 | ||
|
|
7a5c28a12a | ||
|
|
631f0e817b | ||
| 949d641715 | |||
|
|
9b99a7f902 | ||
|
|
9f05b19b6b | ||
|
|
74cfb2e0db | ||
|
|
dcd664ffbc | ||
|
|
534124cae2 | ||
|
|
c3195b2011 | ||
|
|
d1050dab76 | ||
|
|
dc95f95738 | ||
|
|
086d0a052b | ||
|
|
54fd8379ad | ||
|
|
b344c80ad2 | ||
|
|
264202c7b3 | ||
|
|
4e321f4f47 | ||
|
|
be8846bd89 | ||
|
|
b1397e1670 | ||
|
|
0afe1ab4b0 | ||
|
|
57dc812b70 | ||
|
|
e91d7c3291 | ||
|
|
bf5e2f205e | ||
|
|
53f866b583 | ||
| 467e75e3b1 | |||
| 8532f789ac | |||
| 679c4e1403 | |||
|
|
d635a39141 | ||
| 8da0828b4a | |||
| ccde26725f | |||
|
|
7e6efb2356 | ||
|
|
bd6dda2cd0 | ||
|
|
e7d76bb194 | ||
|
|
318ee6ee17 | ||
|
|
f86a88ca19 | ||
|
|
d38ca268c4 | ||
|
|
0cb2807667 | ||
| 862fba5ac5 | |||
|
|
47ac4ee817 | ||
| 9d12c7c00c | |||
|
|
b7107a9e33 | ||
| 8857c98eaf | |||
| c3dd582659 | |||
| b3a0833d30 | |||
| 300302d432 | |||
|
|
0907521971 | ||
|
|
4744ae4328 | ||
|
|
2b076e59c1 | ||
|
|
6878e4de60 | ||
| 9d8583314b | |||
| 290171d006 | |||
| 4cfbef3b09 | |||
| 27eaddaa05 | |||
| 31e25efbfb | |||
| c588c3f674 | |||
| b5e17f131a | |||
| 2c34bb32ca |
279 changed files with 9130 additions and 6382 deletions
|
|
@ -1,110 +0,0 @@
|
|||
labels:
|
||||
platform: linux/amd64
|
||||
|
||||
when:
|
||||
event:
|
||||
- push
|
||||
- tag
|
||||
branch:
|
||||
- develop
|
||||
- stable
|
||||
|
||||
variables:
|
||||
- &scw-secrets
|
||||
SCW_ACCESS_KEY:
|
||||
from_secret: SCW_ACCESS_KEY
|
||||
SCW_SECRET_KEY:
|
||||
from_secret: SCW_SECRET_KEY
|
||||
SCW_DEFAULT_ORGANIZATION_ID:
|
||||
from_secret: SCW_DEFAULT_ORGANIZATION_ID
|
||||
- &setup-hex "mix local.hex --force && mix local.rebar --force"
|
||||
- &on-stable
|
||||
when:
|
||||
event:
|
||||
- push
|
||||
- tag
|
||||
branch:
|
||||
- stable
|
||||
|
||||
- &tag-build "export BUILD_TAG=$${CI_COMMIT_TAG:-\"$CI_COMMIT_BRANCH\"} && export PLEROMA_BUILD_BRANCH=$BUILD_TAG"
|
||||
|
||||
- &clean "(rm -rf release || true) && (rm -rf _build || true) && (rm -rf /root/.mix)"
|
||||
- &mix-clean "mix deps.clean --all && mix clean"
|
||||
|
||||
steps:
|
||||
# Canonical amd64
|
||||
debian-bookworm:
|
||||
image: hexpm/elixir:1.15.4-erlang-26.0.2-debian-bookworm-20230612
|
||||
environment:
|
||||
MIX_ENV: prod
|
||||
DEBIAN_FRONTEND: noninteractive
|
||||
commands:
|
||||
- apt-get update && apt-get install -y cmake libmagic-dev rclone zip imagemagick libmagic-dev git build-essential g++ wget
|
||||
- *clean
|
||||
- echo "import Config" > config/prod.secret.exs
|
||||
- *setup-hex
|
||||
- *tag-build
|
||||
- mix deps.get --only prod
|
||||
- mix release --path release
|
||||
- zip akkoma-amd64.zip -r release
|
||||
|
||||
release-debian-bookworm:
|
||||
image: akkoma/releaser
|
||||
environment: *scw-secrets
|
||||
commands:
|
||||
- export SOURCE=akkoma-amd64.zip
|
||||
# AMD64
|
||||
- export DEST=scaleway:akkoma-updates/$${CI_COMMIT_TAG:-"$CI_COMMIT_BRANCH"}/akkoma-amd64.zip
|
||||
- /bin/sh /entrypoint.sh
|
||||
# Ubuntu jammy (currently compatible)
|
||||
- export DEST=scaleway:akkoma-updates/$${CI_COMMIT_TAG:-"$CI_COMMIT_BRANCH"}/akkoma-amd64-ubuntu-jammy.zip
|
||||
- /bin/sh /entrypoint.sh
|
||||
|
||||
debian-bullseye:
|
||||
image: hexpm/elixir:1.15.4-erlang-26.0.2-debian-bullseye-20230612
|
||||
environment:
|
||||
MIX_ENV: prod
|
||||
DEBIAN_FRONTEND: noninteractive
|
||||
commands:
|
||||
- apt-get update && apt-get install -y cmake libmagic-dev rclone zip imagemagick libmagic-dev git build-essential g++ wget
|
||||
- *clean
|
||||
- echo "import Config" > config/prod.secret.exs
|
||||
- *setup-hex
|
||||
- *tag-build
|
||||
- mix deps.get --only prod
|
||||
- mix release --path release
|
||||
- zip akkoma-amd64-debian-bullseye.zip -r release
|
||||
|
||||
release-debian-bullseye:
|
||||
image: akkoma/releaser
|
||||
environment: *scw-secrets
|
||||
commands:
|
||||
- export SOURCE=akkoma-amd64-debian-bullseye.zip
|
||||
# AMD64
|
||||
- export DEST=scaleway:akkoma-updates/$${CI_COMMIT_TAG:-"$CI_COMMIT_BRANCH"}/akkoma-amd64-debian-bullseye.zip
|
||||
- /bin/sh /entrypoint.sh
|
||||
|
||||
# Canonical amd64-musl
|
||||
musl:
|
||||
image: hexpm/elixir:1.15.4-erlang-26.0.2-alpine-3.18.2
|
||||
<<: *on-stable
|
||||
environment:
|
||||
MIX_ENV: prod
|
||||
commands:
|
||||
- apk add git gcc g++ musl-dev make cmake file-dev rclone wget zip imagemagick
|
||||
- *clean
|
||||
- *setup-hex
|
||||
- *mix-clean
|
||||
- *tag-build
|
||||
- mix deps.get --only prod
|
||||
- mix release --path release
|
||||
- zip akkoma-amd64-musl.zip -r release
|
||||
|
||||
release-musl:
|
||||
image: akkoma/releaser
|
||||
<<: *on-stable
|
||||
environment: *scw-secrets
|
||||
commands:
|
||||
- export SOURCE=akkoma-amd64-musl.zip
|
||||
- export DEST=scaleway:akkoma-updates/$${CI_COMMIT_TAG:-"$CI_COMMIT_BRANCH"}/akkoma-amd64-musl.zip
|
||||
- /bin/sh /entrypoint.sh
|
||||
|
|
@ -1,84 +0,0 @@
|
|||
labels:
|
||||
platform: linux/arm64
|
||||
|
||||
when:
|
||||
event:
|
||||
- push
|
||||
- tag
|
||||
branch:
|
||||
- develop
|
||||
- stable
|
||||
|
||||
variables:
|
||||
- &scw-secrets
|
||||
SCW_ACCESS_KEY:
|
||||
from_secret: SCW_ACCESS_KEY
|
||||
SCW_SECRET_KEY:
|
||||
from_secret: SCW_SECRET_KEY
|
||||
SCW_DEFAULT_ORGANIZATION_ID:
|
||||
from_secret: SCW_DEFAULT_ORGANIZATION_ID
|
||||
- &setup-hex "mix local.hex --force && mix local.rebar --force"
|
||||
- &on-stable
|
||||
when:
|
||||
event:
|
||||
- push
|
||||
- tag
|
||||
branch:
|
||||
- stable
|
||||
|
||||
- &tag-build "export BUILD_TAG=$${CI_COMMIT_TAG:-\"$CI_COMMIT_BRANCH\"} && export PLEROMA_BUILD_BRANCH=$BUILD_TAG"
|
||||
|
||||
- &clean "(rm -rf release || true) && (rm -rf _build || true) && (rm -rf /root/.mix)"
|
||||
- &mix-clean "mix deps.clean --all && mix clean"
|
||||
|
||||
steps:
|
||||
# Canonical arm64
|
||||
debian-bookworm:
|
||||
image: hexpm/elixir:1.15.4-erlang-26.0.2-debian-bookworm-20230612
|
||||
environment:
|
||||
MIX_ENV: prod
|
||||
DEBIAN_FRONTEND: noninteractive
|
||||
commands:
|
||||
- apt-get update && apt-get install -y cmake libmagic-dev rclone zip imagemagick libmagic-dev git build-essential g++ wget
|
||||
- *clean
|
||||
- echo "import Config" > config/prod.secret.exs
|
||||
- *setup-hex
|
||||
- *tag-build
|
||||
- mix deps.get --only prod
|
||||
- mix release --path release
|
||||
- zip akkoma-arm64.zip -r release
|
||||
|
||||
release-debian-bookworm:
|
||||
image: akkoma/releaser:arm64
|
||||
environment: *scw-secrets
|
||||
commands:
|
||||
- export SOURCE=akkoma-arm64.zip
|
||||
- export DEST=scaleway:akkoma-updates/$${CI_COMMIT_TAG:-"$CI_COMMIT_BRANCH"}/akkoma-arm64-ubuntu-jammy.zip
|
||||
- /bin/sh /entrypoint.sh
|
||||
- export DEST=scaleway:akkoma-updates/$${CI_COMMIT_TAG:-"$CI_COMMIT_BRANCH"}/akkoma-arm64.zip
|
||||
- /bin/sh /entrypoint.sh
|
||||
|
||||
# Canonical arm64-musl
|
||||
musl:
|
||||
image: hexpm/elixir:1.15.4-erlang-26.0.2-alpine-3.18.2
|
||||
<<: *on-stable
|
||||
environment:
|
||||
MIX_ENV: prod
|
||||
commands:
|
||||
- apk add git gcc g++ musl-dev make cmake file-dev rclone wget zip imagemagick
|
||||
- *clean
|
||||
- *setup-hex
|
||||
- *mix-clean
|
||||
- *tag-build
|
||||
- mix deps.get --only prod
|
||||
- mix release --path release
|
||||
- zip akkoma-arm64-musl.zip -r release
|
||||
|
||||
release-musl:
|
||||
image: akkoma/releaser:arm64
|
||||
<<: *on-stable
|
||||
environment: *scw-secrets
|
||||
commands:
|
||||
- export SOURCE=akkoma-arm64-musl.zip
|
||||
- export DEST=scaleway:akkoma-updates/$${CI_COMMIT_TAG:-"$CI_COMMIT_BRANCH"}/akkoma-arm64-musl.zip
|
||||
- /bin/sh /entrypoint.sh
|
||||
|
|
@ -1,9 +1,6 @@
|
|||
labels:
|
||||
platform: linux/amd64
|
||||
|
||||
depends_on:
|
||||
- build-amd64
|
||||
|
||||
when:
|
||||
event:
|
||||
- push
|
||||
|
|
|
|||
104
.woodpecker/publish.yml
Normal file
104
.woodpecker/publish.yml
Normal file
|
|
@ -0,0 +1,104 @@
|
|||
when:
|
||||
event:
|
||||
- push
|
||||
- tag
|
||||
branch:
|
||||
- develop
|
||||
- stable
|
||||
evaluate: 'SKIP_DEVELOP != "YES" || CI_COMMIT_BRANCH != "develop"'
|
||||
|
||||
matrix:
|
||||
include:
|
||||
# Canonical amd64
|
||||
- ARCH: amd64
|
||||
SUFFIX:
|
||||
IMG_VAR: debian-bookworm-20230612
|
||||
UBUNTU_EXPORT: YES
|
||||
|
||||
# old debian variant of amd64
|
||||
- ARCH: amd64
|
||||
SUFFIX: -debian-bullseye
|
||||
IMG_VAR: debian-bullseye-20230612
|
||||
|
||||
# Canonical amd64-musl
|
||||
- ARCH: amd64
|
||||
SUFFIX: -musl
|
||||
IMG_VAR: alpine-3.18.2
|
||||
SKIP_DEVELOP: YES
|
||||
|
||||
# Canonical arm64
|
||||
- ARCH: arm64
|
||||
SUFFIX:
|
||||
RELEASER_TAG: :arm64
|
||||
IMG_VAR: debian-bookworm-20230612
|
||||
UBUNTU_EXPORT: YES
|
||||
|
||||
# Canonical arm64-musl
|
||||
- ARCH: arm64
|
||||
SUFFIX: -musl
|
||||
RELEASER_TAG: :arm64
|
||||
IMG_VAR: alpine-3.18.2
|
||||
SKIP_DEVELOP: YES
|
||||
|
||||
labels:
|
||||
platform: linux/${ARCH}
|
||||
|
||||
steps:
|
||||
# Canonical amd64
|
||||
build:
|
||||
image: hexpm/elixir:1.15.4-erlang-26.0.2-${IMG_VAR}
|
||||
environment:
|
||||
MIX_ENV: prod
|
||||
DEBIAN_FRONTEND: noninteractive
|
||||
commands: |
|
||||
# install deps
|
||||
case "${IMG_VAR}" in
|
||||
debian*)
|
||||
apt-get update && apt-get install -y \
|
||||
cmake libmagic-dev rclone zip git wget \
|
||||
build-essential g++ imagemagick libmagic-dev
|
||||
;;
|
||||
alpine*)
|
||||
apk add git gcc g++ musl-dev make cmake file-dev rclone wget zip imagemagick
|
||||
;;
|
||||
*)
|
||||
echo "No package manager defined for ${BASE_IMG}!"
|
||||
exit 1
|
||||
esac
|
||||
|
||||
# clean leftovers
|
||||
rm -rf release
|
||||
rm -rf _build
|
||||
rm -rf /root/.mix
|
||||
|
||||
# setup
|
||||
echo "import Config" > config/prod.secret.exs
|
||||
mix local.hex --force
|
||||
mix local.rebar --force
|
||||
export BUILD_TAG=$${CI_COMMIT_TAG:-\"$CI_COMMIT_BRANCH\"}
|
||||
export PLEROMA_BUILD_BRANCH=$BUILD_TAG
|
||||
|
||||
# actually build and zip up
|
||||
mix deps.get --only prod
|
||||
mix release --path release
|
||||
zip akkoma-${ARCH}${SUFFIX}.zip -r release
|
||||
|
||||
release:
|
||||
image: akkoma/releaser${RELEASER_TAG}
|
||||
environment:
|
||||
SCW_ACCESS_KEY:
|
||||
from_secret: SCW_ACCESS_KEY
|
||||
SCW_SECRET_KEY:
|
||||
from_secret: SCW_SECRET_KEY
|
||||
SCW_DEFAULT_ORGANIZATION_ID:
|
||||
from_secret: SCW_DEFAULT_ORGANIZATION_ID
|
||||
commands: |
|
||||
export SOURCE=akkoma-${ARCH}${SUFFIX}.zip
|
||||
export DEST=scaleway:akkoma-updates/$${CI_COMMIT_TAG:-"$CI_COMMIT_BRANCH"}/$${SOURCE}
|
||||
/bin/sh /entrypoint.sh
|
||||
|
||||
if [ "${UBUNTU_EXPORT}" = "YES" ] ; then
|
||||
# Ubuntu jammy (currently compatible with our default debian builds)
|
||||
export DEST=scaleway:akkoma-updates/$${CI_COMMIT_TAG:-"$CI_COMMIT_BRANCH"}/akkoma-${ARCH}-ubuntu-jammy.zip
|
||||
/bin/sh /entrypoint.sh
|
||||
fi
|
||||
|
|
@ -1,18 +1,20 @@
|
|||
labels:
|
||||
platform: linux/amd64
|
||||
|
||||
when:
|
||||
- event: pull_request
|
||||
|
||||
matrix:
|
||||
# test the lowest and highest versions
|
||||
include:
|
||||
- ELIXIR_VERSION: 1.14
|
||||
- ELIXIR_VERSION: 1.15
|
||||
OTP_VERSION: 25
|
||||
LINT: NO
|
||||
- ELIXIR_VERSION: 1.18
|
||||
OTP_VERSION: 27
|
||||
PLATFORM: linux/amd64
|
||||
- ELIXIR_VERSION: 1.19
|
||||
OTP_VERSION: 28
|
||||
LINT: YES
|
||||
PLATFORM: linux/arm64
|
||||
|
||||
labels:
|
||||
platform: ${PLATFORM}
|
||||
|
||||
services:
|
||||
postgres:
|
||||
|
|
@ -33,6 +35,7 @@ steps:
|
|||
DB_HOST: postgres
|
||||
LINT: ${LINT}
|
||||
commands:
|
||||
- sh -c 'uname -a && cat /etc/os-release || :'
|
||||
- mix local.hex --force
|
||||
- mix local.rebar --force
|
||||
- mix deps.get
|
||||
|
|
|
|||
109
CHANGELOG.md
109
CHANGELOG.md
|
|
@ -4,7 +4,114 @@ All notable changes to this project will be documented in this file.
|
|||
|
||||
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
|
||||
|
||||
## Unreleased
|
||||
## 2026.03
|
||||
|
||||
### BREAKING
|
||||
- Elixir 1.14 is no longer suported, and it's EOL! Upgrade to Elixir 1.15+
|
||||
- `account` entities in API responses now only contain a cut down version of their servers nodeinfo.
|
||||
TEMPORARILY a config option is provided to serve the full nodeinfo data again.
|
||||
HOWEVER this option WILL be removed soon. If you encounter any issues with third-party clients fixed
|
||||
by using this setting, tell us so we can include all actually needed keys by default.
|
||||
|
||||
### REMOVED
|
||||
|
||||
### Added
|
||||
- Mastodon-compatible translation endpoints are now supported too;
|
||||
the older Akkoma endpoints are deprecated but no immediate plans for removal
|
||||
- `GET pleroma/conversation/:id/statuses` now supports `with_muted`
|
||||
- `POST /api/v1/statuses` accepts and now prefers the Mastodon-compatible `quoted_status_id` parameter for quoting a post
|
||||
- `status` API entities now expose non-shallow quotes in a manner also compatible with Mastodon clients
|
||||
- support for WebFinger backlinks in ActivityPub actors (FEP-2c59)
|
||||
|
||||
### Fixed
|
||||
- pinning, muting or unmuting a status one is not allowed to access no longer leaks its content
|
||||
- revoking a favourite on a post one lost access to no longer leaks its content
|
||||
- user info updates again are actively federated to other servers;
|
||||
this was accidentally broken in the previous release
|
||||
- it is no longer possible to reference posts one cannot access when reporting another user
|
||||
- streamed relationship updates no longer leak follow* counts for users who chose to hide their counts
|
||||
- WebFinger data and user nicknames no longer allow non-consential associations
|
||||
- Correctly setup custom WebFinger domains work again
|
||||
- fix paths of emojis added or updated at runtime and remove emoji from runtime when deleting an entire pack without requiring a full emoji reload
|
||||
- fix retraction of remote emoji reaction when id is not present or its domain differs from image host
|
||||
- fix AP ids declared with the canonical type being ignored in XML WebFinger responses
|
||||
- fix many, many bugs in the conversations API family
|
||||
- notifications about muted entities are no longer streamed out
|
||||
- non-UTF-8 usernames no longer lead to internal server errors in API endpoints
|
||||
- when SimplePolicy rules are configured but the MRF not enabled, it’s rules no longer interfere with fetching
|
||||
- fixed remote follow counter refresh on user (re)fetch
|
||||
- remote users whose follow* counts are private are now actually shown as such in API instead of represeneting them with public zero counters
|
||||
- fix local follow* collections counting and including AP IDs of deleted users
|
||||
|
||||
### Changed
|
||||
- `PATCH /api/v1/pleroma/conversations/:id` now accepts update parameters via JSON body too
|
||||
- it is now possible to quote local and one’s own private posts provided a compatible scope is used
|
||||
- on final activity failures the error log now includes the afected activity
|
||||
- improved performance of `GET api/v1/custom_emoji`
|
||||
- outgoing HTTP requests now accept compressed responses
|
||||
- the system CA certificate store is now used by default
|
||||
- when refreshing remote follow* stats all fetch-related erros are now treated as stats being private;
|
||||
this avoids spurious error logs and better matches the intent of implementations serving fallback HTML responses on the AP collection endpoints
|
||||
|
||||
|
||||
## 2025.12
|
||||
|
||||
### REMOVED
|
||||
- DEPRECATE `/api/v1/timelines/direct`.
|
||||
Technically this was already deprecated, given we extend mastodon 2.7.2 API
|
||||
and Mastodon already deprecated it in 2.6.0 before removing it in 3.0.0.
|
||||
But now we have concrete plans to remove this endpoint in a coming release.
|
||||
The few remaining useres should switch to the conversations API.
|
||||
- DEPRECATE `config :pleroma, :instance, skip_thread_containment: false`.
|
||||
It is due to be removed in one of the next releases if no strong arguments for keeping it are brought up.
|
||||
It is already semi-broken for large threads and conflicts with pending optimisation and cleanup work.
|
||||
- support for `exclude_visibilities` in timeline and notification endpoints has been dropped
|
||||
- support for list visibility / list addressing has been dropped due to lack of usage, maintenance burden and redundancy with the still supported explicit-addressing feature
|
||||
- support for conversations addressing has been dropped due to lack of usage, maintenance burden and being mostly redundant with explicit addressing
|
||||
- per-visibility status counters have been dropped from `/api/v1/pleroma/admin/stats`
|
||||
due to unreasonably perf costs added on most database operations.
|
||||
For now, the response still contains the fields, but with stubbed-out values.
|
||||
|
||||
### Added
|
||||
- status responses include two new fields for ActivityPub cross-referencing: `akkoma.quote_apid` and `akkoma.in_reply_to_apid`
|
||||
- attempting to reply to an already deleted post will return an error
|
||||
(in akkoma-fe the error will be shown and your draft message retained so you can decide
|
||||
for yourself whether to discard it or copy and repost as a, now intentional, new thread)
|
||||
- the notification endpoint now supports the `types` parameter for filtering added in vanilla Mastodon
|
||||
- the mute endpoint now supports the `duration` parameter added in vanilla Mastodon
|
||||
(fixes temporary mutes created via e.g. Husky)
|
||||
|
||||
### Fixed
|
||||
- replies and quotes to unresolvable posts now fill out IDs for replied to
|
||||
status, user or quoted status with a 404-ing ID to make them recognisable as
|
||||
replies/quotes instead of pretending they’re root posts
|
||||
- querying a status using the ID of a non-post AP activity no longer displays
|
||||
a duplicate of the post referenced by said activity with mangled author information
|
||||
- fix users being able to interact (like, emoji react, ...) with posts they cannot access
|
||||
- fix AP fetches of local non-Create, non-Undo activities exposing the raw, unsanitised content of the referenced object
|
||||
- the above two combined allowed local users to gain access to private posts
|
||||
of user they do not follow, but follow a follower of the author.
|
||||
(remote users and other scenarios were to our knowledge not able to achieve this due to other restrictions)
|
||||
- fix RSS and Atom feeds of hashtag timelines potentially exposing more information than Mastodon API when restricting unauthenticated API access
|
||||
- fix mentioning and sending DMs to users with non-ASCII-alphanumerical usernames
|
||||
- correctly hide and show inlined fallback links for quotes from Mastodon instances
|
||||
- API requests with multiple unsupported parameters now will ignore all of them up to a certain limit.
|
||||
If there are too many unsupported parameters this is indicated in the returned error message.
|
||||
- expose generic type of attachment via Masto API if remote did not send a full MIME type but indicated a generic one
|
||||
(the \*oma-specific full mime type field in the API response remains generic however, since we don't have this info)
|
||||
- add back the default banner image we advertise in Masto API
|
||||
- correctly redirect `/users/:nickname.rss` to the RSS instead of Atom feed
|
||||
|
||||
### Changed
|
||||
- depreacted the `included_types` parameter in the notification endpoint; replaced by `types`
|
||||
- depreacted the `expires_in` parameter in the mute endpoint; replaced by `duration`
|
||||
- optimised emoji addition and removal
|
||||
- emoji reloading now happens asynchronously so you won't run into timeout issues with many emoji and/or a slow disk
|
||||
- upgraded all of our dependencies; this should reduce issues when running akoma with OTP28
|
||||
- prefer "summary" over "name" for the attachment alt text of incoming ActivityPub documents;
|
||||
this fixes alt text federation from GtS and Honk
|
||||
- slightly improve index overhead for the users table
|
||||
|
||||
|
||||
## 2025.10
|
||||
|
||||
|
|
|
|||
|
|
@ -10,6 +10,7 @@
|
|||
## Supported FEPs
|
||||
|
||||
- [FEP-67ff: FEDERATION](https://codeberg.org/fediverse/fep/src/branch/main/fep/67ff/fep-67ff.md)
|
||||
- [FEP-2c59: Discovery of a Webfinger address from an ActivityPub actor](https://codeberg.org/fediverse/fep/src/branch/main/fep/2c59/fep-2c59.md)
|
||||
- [FEP-dc88: Formatting Mathematics](https://codeberg.org/fediverse/fep/src/branch/main/fep/dc88/fep-dc88.md)
|
||||
- [FEP-f1d5: NodeInfo in Fediverse Software](https://codeberg.org/fediverse/fep/src/branch/main/fep/f1d5/fep-f1d5.md)
|
||||
- [FEP-fffd: Proxy Objects](https://codeberg.org/fediverse/fep/src/branch/main/fep/fffd/fep-fffd.md)
|
||||
|
|
@ -37,6 +38,21 @@ Depending on instance configuration the same may be true for GET requests.
|
|||
We set the optional extension term `htmlMfm: true` when using content type "text/x.misskeymarkdown".
|
||||
Incoming messages containing `htmlMfm: true` will not have their content re-parsed.
|
||||
|
||||
## WebFinger
|
||||
|
||||
Akkoma requires WebFinger implmentations to respond to queries about a given user both when
|
||||
`acct:user@domain` or the canonical ActivityPub id of the actor is passed as the `resource`.
|
||||
|
||||
Akkoma strongly encourages ActivityPub implementations to include
|
||||
a FEP-2c59-compliant WebFinger backlink in their actor documents.
|
||||
|
||||
Without FEP-2c59 and if different domains are used for ActivityPub and the Webfinger subject,
|
||||
Akkoma relies on the presence of an host-meta LRDD template on the ActivityPub domain
|
||||
or a HTTP redirect from the ActivityPub domain’s `/.well-known/webfinger` to an equivalent endpoint
|
||||
on the domain used in the `subject` to discover and validate the domain association.
|
||||
Without FEP-2c59 Akkoma may not become aware of changes to the
|
||||
preferred WebFinger `subject` domain for already discovered users.
|
||||
|
||||
## Nodeinfo
|
||||
|
||||
Akkoma provides many additional entries in its nodeinfo response,
|
||||
|
|
|
|||
|
|
@ -1,3 +1,2 @@
|
|||
./build.sh 1.14-otp25 1.14.3-erlang-25.3.2-alpine-3.18.0
|
||||
./build.sh 1.15-otp25 1.15.8-erlang-25.3.2.18-alpine-3.19.7
|
||||
./build.sh 1.18-otp27 1.18.2-erlang-27.2.4-alpine-3.19.7
|
||||
./build.sh 1.15-otp25 1.15.8-erlang-25.3.2.18-alpine-3.22.2
|
||||
./build.sh 1.19-otp28 1.19-erlang-28.0-alpine-3.23.2
|
||||
|
|
|
|||
|
|
@ -51,6 +51,11 @@ config :pleroma, Pleroma.Repo,
|
|||
queue_target: 20_000,
|
||||
migration_lock: nil
|
||||
|
||||
# password hash strength
|
||||
config :argon2_elixir,
|
||||
t_cost: 8,
|
||||
parallelism: 2
|
||||
|
||||
config :pleroma, Pleroma.Captcha,
|
||||
enabled: true,
|
||||
seconds_valid: 300,
|
||||
|
|
@ -244,6 +249,7 @@ config :pleroma, :instance,
|
|||
remote_post_retention_days: 90,
|
||||
skip_thread_containment: true,
|
||||
limit_to_local_content: :unauthenticated,
|
||||
filter_embedded_nodeinfo: true,
|
||||
user_bio_length: 5000,
|
||||
user_name_length: 100,
|
||||
max_account_fields: 10,
|
||||
|
|
@ -776,7 +782,9 @@ config :pleroma, :frontends,
|
|||
available: %{
|
||||
"pleroma-fe" => %{
|
||||
"name" => "pleroma-fe",
|
||||
"git" => "https://akkoma.dev/AkkomaGang/pleroma-fe",
|
||||
"blind_trust" => true,
|
||||
"git" => "https://akkoma.dev/AkkomaGang/akkoma-fe",
|
||||
"bugtracker" => "https://akkoma.dev/AkkomaGang/akkoma-fe/issues",
|
||||
"build_url" =>
|
||||
"https://akkoma-updates.s3-website.fr-par.scw.cloud/frontend/${ref}/akkoma-fe.zip",
|
||||
"ref" => "stable",
|
||||
|
|
@ -785,7 +793,9 @@ config :pleroma, :frontends,
|
|||
# Mastodon-Fe cannot be set as a primary - this is only here so we can update this seperately
|
||||
"mastodon-fe" => %{
|
||||
"name" => "mastodon-fe",
|
||||
"blind_trust" => true,
|
||||
"git" => "https://akkoma.dev/AkkomaGang/masto-fe",
|
||||
"bugtracker" => "https://akkoma.dev/AkkomaGang/masto-fe/issues",
|
||||
"build_url" =>
|
||||
"https://akkoma-updates.s3-website.fr-par.scw.cloud/frontend/${ref}/masto-fe.zip",
|
||||
"build_dir" => "distribution",
|
||||
|
|
@ -793,7 +803,9 @@ config :pleroma, :frontends,
|
|||
},
|
||||
"fedibird-fe" => %{
|
||||
"name" => "fedibird-fe",
|
||||
"blind_trust" => true,
|
||||
"git" => "https://akkoma.dev/AkkomaGang/fedibird-fe",
|
||||
"bugtracker" => "https://akkoma.dev/AkkomaGang/fedibird-fe/issues",
|
||||
"build_url" =>
|
||||
"https://akkoma-updates.s3-website.fr-par.scw.cloud/frontend/${ref}/fedibird-fe.zip",
|
||||
"build_dir" => "distribution",
|
||||
|
|
@ -801,7 +813,9 @@ config :pleroma, :frontends,
|
|||
},
|
||||
"admin-fe" => %{
|
||||
"name" => "admin-fe",
|
||||
"blind_trust" => true,
|
||||
"git" => "https://akkoma.dev/AkkomaGang/admin-fe",
|
||||
"bugtracker" => "https://akkoma.dev/AkkomaGang/admin-fe/issues",
|
||||
"build_url" =>
|
||||
"https://akkoma-updates.s3-website.fr-par.scw.cloud/frontend/${ref}/admin-fe.zip",
|
||||
"ref" => "stable"
|
||||
|
|
@ -809,10 +823,31 @@ config :pleroma, :frontends,
|
|||
# For developers - enables a swagger frontend to view the openapi spec
|
||||
"swagger-ui" => %{
|
||||
"name" => "swagger-ui",
|
||||
"blind_trust" => true,
|
||||
"git" => "https://github.com/swagger-api/swagger-ui",
|
||||
# API spec definitions are part of the backend (and the swagger-ui build outdated)
|
||||
"bugtracker" => "https://akkoma.dev/AkkomaGang/akkoma/issues",
|
||||
"build_url" => "https://akkoma-updates.s3-website.fr-par.scw.cloud/frontend/swagger-ui.zip",
|
||||
"build_dir" => "dist",
|
||||
"ref" => "stable"
|
||||
},
|
||||
# Third-party frontends
|
||||
"pleroma-fe-vanilla" => %{
|
||||
"name" => "pleroma-fe-vanilla",
|
||||
"git" => "https://git.pleroma.social/pleroma/pleroma-fe/",
|
||||
"build_url" =>
|
||||
"https://git.pleroma.social/pleroma/pleroma-fe/-/jobs/artifacts/${ref}/download?job=build",
|
||||
"ref" => "develop",
|
||||
"build_dir" => "dist",
|
||||
"bugtracker" => "https://git.pleroma.social/pleroma/pleroma-fe/-/issues"
|
||||
},
|
||||
"pl-fe" => %{
|
||||
"name" => "pl-fe",
|
||||
"git" => "https://codeberg.org/mkljczk/pl-fe",
|
||||
"build_url" => "https://pl.mkljczk.pl/pl-fe.zip",
|
||||
"ref" => "develop",
|
||||
"build_dir" => ".",
|
||||
"bugtracker" => "https://codeberg.org/mkljczk/pl-fe/issues"
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -869,7 +904,11 @@ config :pleroma, ConcurrentLimiter, [
|
|||
{Pleroma.Search, [max_running: 30, max_waiting: 50]}
|
||||
]
|
||||
|
||||
config :pleroma, Pleroma.Web.WebFinger, domain: nil, update_nickname_on_user_fetch: true
|
||||
config :pleroma, Pleroma.Web.WebFinger,
|
||||
domain: nil,
|
||||
# this _forces_ a nickname rediscovery and validation, otherwise only updates when detecting a change
|
||||
# TODO: default this to false after the fallout from recent WebFinger bugs is healed
|
||||
update_nickname_on_user_fetch: true
|
||||
|
||||
config :pleroma, Pleroma.Search, module: Pleroma.Search.DatabaseSearch
|
||||
|
||||
|
|
|
|||
|
|
@ -61,6 +61,18 @@ frontend_options = [
|
|||
type: :string,
|
||||
description: "The directory inside the zip file "
|
||||
},
|
||||
%{
|
||||
key: "blind_trust",
|
||||
label: "Blindly trust frontend devs?",
|
||||
type: :boolean,
|
||||
description: "Do NOT change this unless you’re really sure"
|
||||
},
|
||||
%{
|
||||
key: "bugtracker",
|
||||
label: "Bug tracker",
|
||||
type: :string,
|
||||
description: "Where to report bugs (for third-party FEs)"
|
||||
},
|
||||
%{
|
||||
key: "custom-http-headers",
|
||||
label: "Custom HTTP headers",
|
||||
|
|
@ -3483,7 +3495,7 @@ config :pleroma, :config_description, [
|
|||
key: :module,
|
||||
type: :module,
|
||||
description: "Translation module.",
|
||||
suggestions: {:list_behaviour_implementations, Pleroma.Akkoma.Translator}
|
||||
suggestions: {:list_behaviour_implementations, Pleroma.Akkoma.Translator.Provider}
|
||||
}
|
||||
]
|
||||
},
|
||||
|
|
|
|||
|
|
@ -19,3 +19,26 @@
|
|||
- `--delete` - delete local uploads after migrating them to the target uploader
|
||||
|
||||
A list of available uploaders can be seen in [Configuration Cheat Sheet](../../configuration/cheatsheet.md#pleromaupload)
|
||||
|
||||
## Rewriting old media URLs
|
||||
|
||||
After a migration has taken place, old URLs in your database will not have been changed. You
|
||||
will want to run this task to update these URLs.
|
||||
|
||||
Use the full URL here. So if you moved from `media.example.com/media` to `media.another.com/data`, you'd run with arguments
|
||||
`old_url = https://media.example.com/media` and `new_url = https://media.another.com/data`.
|
||||
|
||||
=== "OTP"
|
||||
|
||||
```sh
|
||||
./bin/pleroma_ctl uploads rewrite_media_domain <old_url> <new_url>
|
||||
```
|
||||
|
||||
=== "From Source"
|
||||
|
||||
```sh
|
||||
mix pleroma.uploads rewrite_media_domain <old_url> <new_url>
|
||||
```
|
||||
|
||||
### Options
|
||||
- `--dry-run` - Do not action any update and simply print what _would_ happen
|
||||
|
|
|
|||
|
|
@ -18,7 +18,7 @@
|
|||
3. Go to the working directory of Akkoma (default is `/opt/akkoma`)
|
||||
4. Copy the above-mentioned files back to their original position.
|
||||
5. Drop the existing database and user[¹]. `sudo -Hu postgres psql -c 'DROP DATABASE akkoma;';` `sudo -Hu postgres psql -c 'DROP USER akkoma;'`
|
||||
6. Restore the database schema and akkoma role[¹] (replace the password with the one you find in the configuration file), `sudo -Hu postgres psql -c "CREATE USER akkoma WITH ENCRYPTED PASSWORD '<database-password-wich-you-can-find-in-your-configuration-file>';"` `sudo -Hu postgres psql -c "CREATE DATABASE akkoma OWNER akkoma;"`.
|
||||
6. Restore the database schema and akkoma role[¹] (replace the password with the one you find in the configuration file), `sudo -Hu postgres psql -c "CREATE USER akkoma WITH ENCRYPTED PASSWORD '<database-password-wich-you-can-find-in-your-configuration-file>';";` `sudo -Hu postgres psql -c "CREATE DATABASE akkoma OWNER akkoma;"`.
|
||||
7. Now restore the Akkoma instance's data into the empty database schema[¹]: `sudo -Hu postgres pg_restore -d akkoma -v -1 </path/to/backup_location/akkoma.pgdump>`
|
||||
8. If you installed a newer Akkoma version, you should run the database migrations `./bin/pleroma_ctl migrate`[²].
|
||||
9. Restart the Akkoma service.
|
||||
|
|
|
|||
|
|
@ -53,6 +53,10 @@ curl -i -H 'Authorization: Bearer $ACCESS_TOKEN' https://myinstance.example/api/
|
|||
You may use the eponymous [Prometheus](https://prometheus.io/)
|
||||
or anything compatible with it like e.g. [VictoriaMetrics](https://victoriametrics.com/).
|
||||
The latter claims better performance and storage efficiency.
|
||||
However, at the moment our reference dashboard only works with VictoriaMetrics,
|
||||
thus if you wish to use use the reference as an easy dropin template you must
|
||||
use VictoriaMetrics.
|
||||
Patches to allow the dashboard to work with plain Prometheus are welcome though.
|
||||
|
||||
Both of them can usually be easily installed via distro-packages or docker.
|
||||
Depending on your distro or installation method the preferred way to change the CLI arguments and the location of config files may differ; consult the documentation of your chosen method to find out.
|
||||
|
|
@ -254,6 +258,44 @@ as well as database diagnostics.
|
|||
BEAM VM stats include detailed memory consumption breakdowns
|
||||
and a full list of running processes for example.
|
||||
|
||||
## Postgres Statements Statistics
|
||||
|
||||
The built-in dashboard can list the queries your instances spends the
|
||||
most accumulative time on giving insight into potential bottlenecks
|
||||
and what might be worth optimising.
|
||||
This is the “Outliers” tab in “Ecto Stats”.
|
||||
However for this to work you first need to enable a PostgreSQL extension
|
||||
as follows:
|
||||
|
||||
Add the following two lines two your `postgresql.conf` (typically placed in your data dir):
|
||||
|
||||
```
|
||||
shared_preload_libraries = 'pg_stat_statements'
|
||||
pg_stat_statements.track = all
|
||||
```
|
||||
|
||||
Now restart PostgreSQL. Then connect to your akkoma database using `psql` and run:
|
||||
|
||||
```sql
|
||||
CREATE EXTENSION IF NOT EXISTS pg_stat_statements;
|
||||
```
|
||||
|
||||
Execution time statistics will now start to be gathered.
|
||||
To get a representative sample of your instances workload you should wait a week or at least a day.
|
||||
|
||||
These statistics are never reset automatically, but with new Akkoma releases and
|
||||
changes in the servers your instance federates with the workload will evolve.
|
||||
Thus it’s a good idea to reset this occasionally using:
|
||||
|
||||
```sql
|
||||
-- get user oid: SELECT oid FROM pg_roles WHERE rolname = 'akkoma';
|
||||
-- get db oid: SELECT oid FROM pg_database WHERE datname = 'akkoma';
|
||||
SELECT pg_stat_statements_reset('<akkoma user oid>'::regclass::oid, '<akkoma database oid>'::regclass::oid);
|
||||
|
||||
-- or alternatively, to just reset stats for all users and databases:
|
||||
-- SELECT pg_stat_statements_reset();
|
||||
```
|
||||
|
||||
## Oban Web
|
||||
|
||||
This too requires administrator rights to access and can be found under `/akkoma/oban` if enabled.
|
||||
|
|
|
|||
|
|
@ -54,7 +54,8 @@ To add configuration to your config file, you can copy it from the base config.
|
|||
* `remote_post_retention_days`: The default number of days to retain remote posts when pruning the database.
|
||||
* `user_bio_length`: A user bio maximum length (default: `5000`).
|
||||
* `user_name_length`: A user name maximum length (default: `100`).
|
||||
* `skip_thread_containment`: Skip filter out broken threads. The default is `false`.
|
||||
* `skip_thread_containment`: **DEPRECATED**, DO NOT CHANGE THE DEFAULT!
|
||||
Skip filter out broken threads. The default is `false`.
|
||||
* `limit_to_local_content`: Limit unauthenticated users to search for local statutes and users only. Possible values: `:unauthenticated`, `:all` and `false`. The default is `:unauthenticated`.
|
||||
* `max_account_fields`: The maximum number of custom fields in the user profile (default: `10`).
|
||||
* `max_remote_account_fields`: The maximum number of custom fields in the remote user profile (default: `20`).
|
||||
|
|
|
|||
|
|
@ -7,72 +7,72 @@ The configuration of Akkoma (and Pleroma) has traditionally been managed with a
|
|||
|
||||
1. Run the mix task to migrate to the database.
|
||||
|
||||
**Source:**
|
||||
**Source:**
|
||||
|
||||
```
|
||||
$ mix pleroma.config migrate_to_db
|
||||
```
|
||||
```
|
||||
$ mix pleroma.config migrate_to_db
|
||||
```
|
||||
|
||||
or
|
||||
or
|
||||
|
||||
**OTP:**
|
||||
**OTP:**
|
||||
|
||||
*Note: OTP users need Akkoma to be running for `pleroma_ctl` commands to work*
|
||||
*Note: OTP users need Akkoma to be running for `pleroma_ctl` commands to work*
|
||||
|
||||
```
|
||||
$ ./bin/pleroma_ctl config migrate_to_db
|
||||
```
|
||||
```
|
||||
$ ./bin/pleroma_ctl config migrate_to_db
|
||||
```
|
||||
|
||||
```
|
||||
Migrating settings from file: /home/pleroma/config/dev.secret.exs
|
||||
```
|
||||
Migrating settings from file: /home/pleroma/config/dev.secret.exs
|
||||
|
||||
Settings for key instance migrated.
|
||||
Settings for group :pleroma migrated.
|
||||
```
|
||||
Settings for key instance migrated.
|
||||
Settings for group :pleroma migrated.
|
||||
```
|
||||
|
||||
2. It is recommended to backup your config file now.
|
||||
|
||||
```
|
||||
cp config/dev.secret.exs config/dev.secret.exs.orig
|
||||
```
|
||||
```
|
||||
cp config/dev.secret.exs config/dev.secret.exs.orig
|
||||
```
|
||||
|
||||
3. Edit your Akkoma config to enable database configuration:
|
||||
|
||||
```
|
||||
config :pleroma, configurable_from_database: true
|
||||
```
|
||||
```
|
||||
config :pleroma, configurable_from_database: true
|
||||
```
|
||||
|
||||
4. ⚠️ **THIS IS NOT REQUIRED** ⚠️
|
||||
|
||||
Now you can edit your config file and strip it down to the only settings which are not possible to control in the database. e.g., the Postgres (Repo) and webserver (Endpoint) settings cannot be controlled in the database because the application needs the settings to start up and access the database.
|
||||
Now you can edit your config file and strip it down to the only settings which are not possible to control in the database. e.g., the Postgres (Repo) and webserver (Endpoint) settings cannot be controlled in the database because the application needs the settings to start up and access the database.
|
||||
|
||||
Any settings in the database will override those in the config file, but you may find it less confusing if the setting is only declared in one place.
|
||||
Any settings in the database will override those in the config file, but you may find it less confusing if the setting is only declared in one place.
|
||||
|
||||
A non-exhaustive list of settings that are only possible in the config file include the following:
|
||||
A non-exhaustive list of settings that are only possible in the config file include the following:
|
||||
|
||||
* config :pleroma, Pleroma.Web.Endpoint
|
||||
* config :pleroma, Pleroma.Repo
|
||||
* config :pleroma, configurable\_from\_database
|
||||
* config :pleroma, :database, rum_enabled
|
||||
* config :pleroma, :connections_pool
|
||||
* config :pleroma, Pleroma.Web.Endpoint
|
||||
* config :pleroma, Pleroma.Repo
|
||||
* config :pleroma, configurable\_from\_database
|
||||
* config :pleroma, :database, rum_enabled
|
||||
* config :pleroma, :connections_pool
|
||||
|
||||
Here is an example of a server config stripped down after migration:
|
||||
Here is an example of a server config stripped down after migration:
|
||||
|
||||
```
|
||||
use Mix.Config
|
||||
```
|
||||
use Mix.Config
|
||||
|
||||
config :pleroma, Pleroma.Web.Endpoint,
|
||||
url: [host: "cool.pleroma.site", scheme: "https", port: 443]
|
||||
config :pleroma, Pleroma.Web.Endpoint,
|
||||
url: [host: "cool.pleroma.site", scheme: "https", port: 443]
|
||||
|
||||
config :pleroma, Pleroma.Repo,
|
||||
adapter: Ecto.Adapters.Postgres,
|
||||
username: "akkoma",
|
||||
password: "MySecretPassword",
|
||||
database: "akkoma_prod",
|
||||
hostname: "localhost"
|
||||
config :pleroma, Pleroma.Repo,
|
||||
adapter: Ecto.Adapters.Postgres,
|
||||
username: "akkoma",
|
||||
password: "MySecretPassword",
|
||||
database: "akkoma_prod",
|
||||
hostname: "localhost"
|
||||
|
||||
config :pleroma, configurable_from_database: true
|
||||
```
|
||||
config :pleroma, configurable_from_database: true
|
||||
```
|
||||
|
||||
5. Restart your instance and you can now access the Settings tab in admin-fe.
|
||||
|
||||
|
|
@ -81,28 +81,28 @@ The configuration of Akkoma (and Pleroma) has traditionally been managed with a
|
|||
|
||||
1. Run the mix task to migrate back from the database. You'll receive some debugging output and a few messages informing you of what happened.
|
||||
|
||||
**Source:**
|
||||
**Source:**
|
||||
|
||||
```
|
||||
$ mix pleroma.config migrate_from_db
|
||||
```
|
||||
```
|
||||
$ mix pleroma.config migrate_from_db
|
||||
```
|
||||
|
||||
or
|
||||
or
|
||||
|
||||
**OTP:**
|
||||
**OTP:**
|
||||
|
||||
```
|
||||
$ ./bin/pleroma_ctl config migrate_from_db
|
||||
```
|
||||
```
|
||||
$ ./bin/pleroma_ctl config migrate_from_db
|
||||
```
|
||||
|
||||
```
|
||||
10:26:30.593 [debug] QUERY OK source="config" db=9.8ms decode=1.2ms queue=26.0ms idle=0.0ms
|
||||
SELECT c0."id", c0."key", c0."group", c0."value", c0."inserted_at", c0."updated_at" FROM "config" AS c0 []
|
||||
```
|
||||
10:26:30.593 [debug] QUERY OK source="config" db=9.8ms decode=1.2ms queue=26.0ms idle=0.0ms
|
||||
SELECT c0."id", c0."key", c0."group", c0."value", c0."inserted_at", c0."updated_at" FROM "config" AS c0 []
|
||||
|
||||
10:26:30.659 [debug] QUERY OK source="config" db=1.1ms idle=80.7ms
|
||||
SELECT c0."id", c0."key", c0."group", c0."value", c0."inserted_at", c0."updated_at" FROM "config" AS c0 []
|
||||
Database configuration settings have been saved to config/dev.exported_from_db.secret.exs
|
||||
```
|
||||
10:26:30.659 [debug] QUERY OK source="config" db=1.1ms idle=80.7ms
|
||||
SELECT c0."id", c0."key", c0."group", c0."value", c0."inserted_at", c0."updated_at" FROM "config" AS c0 []
|
||||
Database configuration settings have been saved to config/dev.exported_from_db.secret.exs
|
||||
```
|
||||
|
||||
2. Remove `config :pleroma, configurable_from_database: true` from your config. The in-database configuration still exists, but it will not be used. Future migrations will erase the database config before importing your config file again.
|
||||
|
||||
|
|
|
|||
|
|
@ -1218,24 +1218,10 @@ Loads JSON generated from `config/descriptions.exs`.
|
|||
|
||||
## `GET /api/v1/pleroma/admin/stats`
|
||||
|
||||
### Stats
|
||||
**DEPRECATED; DO NOT USE**!!
|
||||
|
||||
- Query Params:
|
||||
- *optional* `instance`: **string** instance hostname (without protocol) to get stats for
|
||||
- Example: `https://mypleroma.org/api/v1/pleroma/admin/stats?instance=lain.com`
|
||||
|
||||
- Response:
|
||||
|
||||
```json
|
||||
{
|
||||
"status_visibility": {
|
||||
"direct": 739,
|
||||
"private": 9,
|
||||
"public": 17,
|
||||
"unlisted": 14
|
||||
}
|
||||
}
|
||||
```
|
||||
Returned information is only stubbed out.
|
||||
The endpoint will be removed entirely in an upcoming release.
|
||||
|
||||
## `GET /api/v1/pleroma/admin/oauth_app`
|
||||
|
||||
|
|
|
|||
|
|
@ -14,8 +14,6 @@ by the administrator. It is available under `/api/v1/timelines/bubble`.
|
|||
|
||||
Adding the parameter `with_muted=true` to the timeline queries will also return activities by muted (not by blocked!) users.
|
||||
|
||||
Adding the parameter `exclude_visibilities` to the timeline queries will exclude the statuses with the given visibilities. The parameter accepts an array of visibility types (`public`, `unlisted`, `private`, `direct`), e.g., `exclude_visibilities[]=direct&exclude_visibilities[]=private`.
|
||||
|
||||
Adding the parameter `reply_visibility` to the public, bubble or home timelines queries will filter replies. Possible values: without parameter (default) shows all replies, `following` - replies directed to you or users you follow, `self` - replies directed to you.
|
||||
|
||||
Adding the parameter `instance=lain.com` to the public timeline will show only statuses originating from `lain.com` (or any remote instance).
|
||||
|
|
@ -32,7 +30,7 @@ Home, public, hashtag & list timelines further accept:
|
|||
|
||||
## Statuses
|
||||
|
||||
- `visibility`: has additional possible values `list` and `local` (for local-only statuses)
|
||||
- `visibility`: has additional possible value `local` (for local-only statuses)
|
||||
- `emoji_reactions`: additional field since Akkoma 3.2.0; identical to `pleroma/emoji_reactions`
|
||||
|
||||
Has these additional fields under the `pleroma` object:
|
||||
|
|
@ -60,6 +58,7 @@ The `GET /api/v1/statuses/:id/source` endpoint additionally has the following at
|
|||
Has these additional fields in `params`:
|
||||
|
||||
- `expires_in`: the number of seconds the posted activity should expire in.
|
||||
**Deprecated**; replaced by Mastodon-compatible `duration`
|
||||
|
||||
## Media Attachments
|
||||
|
||||
|
|
@ -90,7 +89,6 @@ The `id` parameter can also be the `nickname` of the user. This only works in th
|
|||
- `with_muted`: include statuses/reactions from muted accounts
|
||||
- `exclude_reblogs`: exclude reblogs
|
||||
- `exclude_replies`: exclude replies
|
||||
- `exclude_visibilities`: exclude visibilities
|
||||
|
||||
Endpoints which accept `with_relationships` parameter:
|
||||
|
||||
|
|
@ -191,8 +189,8 @@ The `type` value is `pleroma:report`
|
|||
|
||||
Accepts additional parameters:
|
||||
|
||||
- `exclude_visibilities`: will exclude the notifications for activities with the given visibilities. The parameter accepts an array of visibility types (`public`, `unlisted`, `private`, `direct`). Usage example: `GET /api/v1/notifications?exclude_visibilities[]=direct&exclude_visibilities[]=private`.
|
||||
- `include_types`: will include the notifications for activities with the given types. The parameter accepts an array of types (`mention`, `follow`, `reblog`, `favourite`, `move`, `pleroma:emoji_reaction`, `pleroma:report`). Usage example: `GET /api/v1/notifications?include_types[]=mention&include_types[]=reblog`.
|
||||
**Deprecated:** replaced by `types` which is equivalent but (by now) also supported by vanilla Mastodon.
|
||||
|
||||
## DELETE `/api/v1/notifications/destroy_multiple`
|
||||
|
||||
|
|
@ -214,8 +212,8 @@ Additional parameters can be added to the JSON body/Form data:
|
|||
- `content_type`: string, contain the MIME type of the status, it is transformed into HTML by the backend. You can get the list of the supported MIME types with the nodeinfo endpoint.
|
||||
- `to`: A list of nicknames (like `admin@otp.akkoma.dev` or `admin` on the local server) that will be used to determine who is going to be addressed by this post. Using this will disable the implicit addressing by mentioned names in the `status` body, only the people in the `to` list will be addressed. The normal rules for post visibility are not affected by this and will still apply.
|
||||
- `visibility`: string, besides standard MastoAPI values (`direct`, `private`, `unlisted`, `local` or `public`) it can be used to address a List by setting it to `list:LIST_ID`.
|
||||
- `expires_in`: The number of seconds the posted activity should expire in. When a posted activity expires it will be deleted from the server, and a delete request for it will be federated. This needs to be longer than an hour.
|
||||
- `in_reply_to_conversation_id`: Will reply to a given conversation, addressing only the people who are part of the recipient set of that conversation. Sets the visibility to `direct`.
|
||||
- `expires_in`: **Deprecated**; replaced by `duration`.
|
||||
The number of seconds the posted activity should expire in. When a posted activity expires it will be deleted from the server, and a delete request for it will be federated. This needs to be longer than an hour.
|
||||
|
||||
## GET `/api/v1/statuses`
|
||||
|
||||
|
|
@ -361,10 +359,6 @@ The message payload consists of:
|
|||
- `follower_count`: follower count
|
||||
- `following_count`: following count
|
||||
|
||||
## User muting and thread muting
|
||||
|
||||
Both user muting and thread muting can be done for only a certain time by adding an `expires_in` parameter to the API calls and giving the expiration time in seconds.
|
||||
|
||||
## Not implemented
|
||||
|
||||
Akkoma is generally compatible with the Mastodon 2.7.2 API, but some newer features and non-essential features are omitted. These features usually return an HTTP 200 status code, but with an empty response. While they may be added in the future, they are considered low priority.
|
||||
|
|
|
|||
|
|
@ -376,13 +376,8 @@ See [Admin-API](admin_api.md)
|
|||
|
||||
Pleroma Conversations have the same general structure that Mastodon Conversations have. The behavior differs in the following ways when using these endpoints:
|
||||
|
||||
1. Pleroma Conversations never add or remove recipients, unless explicitly changed by the user.
|
||||
1. Pleroma Conversations never add or remove recipients (`accounts` key), unless explicitly changed by the user.
|
||||
2. Pleroma Conversations statuses can be requested by Conversation id.
|
||||
3. Pleroma Conversations can be replied to.
|
||||
|
||||
Conversations have the additional field `recipients` under the `pleroma` key. This holds a list of all the accounts that will receive a message in this conversation.
|
||||
|
||||
The status posting endpoint takes an additional parameter, `in_reply_to_conversation_id`, which, when set, will set the visiblity to direct and address only the people who are the recipients of that Conversation.
|
||||
|
||||
⚠ Conversation IDs can be found in direct messages with the `pleroma.direct_conversation_id` key, do not confuse it with `pleroma.conversation_id`.
|
||||
|
||||
|
|
|
|||
|
|
@ -267,17 +267,33 @@ special meaning to the potential local-scope identifier.
|
|||
however those are also shown publicly on the local web interface
|
||||
and are thus visible to non-members.
|
||||
|
||||
## List post scope
|
||||
|
||||
Messages originally addressed to a custom list will contain
|
||||
a `listMessage` field with an unresolvable pseudo ActivityPub id.
|
||||
|
||||
# Deprecated and Removed Extensions
|
||||
|
||||
The following extensions were used in the past but have been dropped.
|
||||
Documentation is retained here as a reference and since old objects might
|
||||
still contains related fields.
|
||||
|
||||
## List post scope
|
||||
|
||||
Messages originally addressed to a custom list will contain
|
||||
a `listMessage` field with an unresolvable pseudo ActivityPub id.
|
||||
|
||||
!!! note
|
||||
The concept did not work out too well in practice with even remote servers
|
||||
recognising the `listMessage` extension being unaware of the state of the
|
||||
list and resulting weird desyncs in thread display and handling between
|
||||
servers.
|
||||
As it was it also never found its way in any known clients or frontends.
|
||||
|
||||
A more consistent superset of what this was able to actually do
|
||||
can be achieved without ActivityPub extensions by explicitly addressing
|
||||
all intended participants without inline mentions.
|
||||
While true federated and moderated "lists" or "groups"
|
||||
will need more work and a different approach.
|
||||
|
||||
Thus suport for it was removed and it is recommended
|
||||
to not create any new implementation of it.
|
||||
|
||||
## Actor endpoints
|
||||
|
||||
The following endpoints used to be present:
|
||||
|
|
|
|||
|
|
@ -1,8 +1,8 @@
|
|||
## Required dependencies
|
||||
|
||||
* PostgreSQL 12+
|
||||
* Elixir 1.14.1+ (currently tested up to 1.18)
|
||||
* Erlang OTP 25+ (currently tested up to OTP27)
|
||||
* Elixir 1.15+ (currently tested up to 1.19)
|
||||
* Erlang OTP 25+ (currently tested up to OTP28)
|
||||
* git
|
||||
* file / libmagic
|
||||
* gcc (clang might also work)
|
||||
|
|
|
|||
|
|
@ -723,6 +723,8 @@
|
|||
},
|
||||
"displayName": "Run Queue",
|
||||
"mappings": [],
|
||||
"max": 1.5,
|
||||
"min": 0,
|
||||
"thresholds": {
|
||||
"mode": "absolute",
|
||||
"steps": [
|
||||
|
|
@ -732,11 +734,11 @@
|
|||
},
|
||||
{
|
||||
"color": "yellow",
|
||||
"value": 15
|
||||
"value": 0.2
|
||||
},
|
||||
{
|
||||
"color": "red",
|
||||
"value": 25
|
||||
"value": 1
|
||||
}
|
||||
]
|
||||
},
|
||||
|
|
@ -784,6 +786,12 @@
|
|||
{
|
||||
"id": "displayName",
|
||||
"value": "Memory"
|
||||
},
|
||||
{
|
||||
"id": "min"
|
||||
},
|
||||
{
|
||||
"id": "max"
|
||||
}
|
||||
]
|
||||
},
|
||||
|
|
@ -836,7 +844,7 @@
|
|||
"disableTextWrap": false,
|
||||
"editorMode": "builder",
|
||||
"exemplar": false,
|
||||
"expr": "rate(vm_memory_total_psum{instance=\"${INSTANCE}\", job=\"${SCRAPE_JOB}\"}[$__interval])",
|
||||
"expr": "increase(vm_memory_total_psum{instance=\"${INSTANCE}\", job=\"${SCRAPE_JOB}\"}[$__interval])",
|
||||
"fullMetaSearch": false,
|
||||
"hide": true,
|
||||
"includeNullMetadata": true,
|
||||
|
|
@ -854,7 +862,7 @@
|
|||
"disableTextWrap": false,
|
||||
"editorMode": "builder",
|
||||
"exemplar": false,
|
||||
"expr": "rate(vm_memory_total_pcount{instance=\"${INSTANCE}\", job=\"${SCRAPE_JOB}\"}[$__interval])",
|
||||
"expr": "increase(vm_memory_total_pcount{instance=\"${INSTANCE}\", job=\"${SCRAPE_JOB}\"}[$__interval])",
|
||||
"fullMetaSearch": false,
|
||||
"hide": true,
|
||||
"includeNullMetadata": true,
|
||||
|
|
@ -882,7 +890,7 @@
|
|||
"disableTextWrap": false,
|
||||
"editorMode": "builder",
|
||||
"exemplar": false,
|
||||
"expr": "rate(vm_total_run_queue_lengths_cpu_psum{instance=\"${INSTANCE}\", job=\"${SCRAPE_JOB}\"}[$__rate_interval])",
|
||||
"expr": "increase(vm_total_run_queue_lengths_cpu_psum{instance=\"${INSTANCE}\", job=\"${SCRAPE_JOB}\"}[$__interval])",
|
||||
"fullMetaSearch": false,
|
||||
"hide": true,
|
||||
"includeNullMetadata": true,
|
||||
|
|
@ -900,7 +908,7 @@
|
|||
"disableTextWrap": false,
|
||||
"editorMode": "builder",
|
||||
"exemplar": false,
|
||||
"expr": "rate(vm_total_run_queue_lengths_cpu_pcount{instance=\"${INSTANCE}\", job=\"${SCRAPE_JOB}\"}[$__rate_interval])",
|
||||
"expr": "increase(vm_total_run_queue_lengths_cpu_pcount{instance=\"${INSTANCE}\", job=\"${SCRAPE_JOB}\"}[$__interval])",
|
||||
"fullMetaSearch": false,
|
||||
"hide": true,
|
||||
"includeNullMetadata": true,
|
||||
|
|
@ -928,7 +936,7 @@
|
|||
"disableTextWrap": false,
|
||||
"editorMode": "builder",
|
||||
"exemplar": false,
|
||||
"expr": "rate(vm_total_run_queue_lengths_io_fsum_psum{instance=\"${INSTANCE}\", job=\"${SCRAPE_JOB}\"}[$__rate_interval])",
|
||||
"expr": "increase(vm_total_run_queue_lengths_io_fsum_psum{instance=\"${INSTANCE}\", job=\"${SCRAPE_JOB}\"}[$__interval])",
|
||||
"fullMetaSearch": false,
|
||||
"hide": true,
|
||||
"includeNullMetadata": true,
|
||||
|
|
@ -946,7 +954,7 @@
|
|||
"disableTextWrap": false,
|
||||
"editorMode": "builder",
|
||||
"exemplar": false,
|
||||
"expr": "rate(vm_total_run_queue_lengths_io_fsum_pcount{instance=\"${INSTANCE}\", job=\"${SCRAPE_JOB}\"}[$__rate_interval])",
|
||||
"expr": "increase(vm_total_run_queue_lengths_io_fsum_pcount{instance=\"${INSTANCE}\", job=\"${SCRAPE_JOB}\"}[$__interval])",
|
||||
"fullMetaSearch": false,
|
||||
"hide": true,
|
||||
"includeNullMetadata": true,
|
||||
|
|
@ -1974,6 +1982,7 @@
|
|||
"type": "prometheus",
|
||||
"uid": "${DATASOURCE}"
|
||||
},
|
||||
"description": "Times are counted upon job completion/failure and may contain IO or network wait times.",
|
||||
"fieldConfig": {
|
||||
"defaults": {
|
||||
"color": {
|
||||
|
|
@ -2356,7 +2365,7 @@
|
|||
"type": "prometheus",
|
||||
"uid": "${DATASOURCE}"
|
||||
},
|
||||
"description": "Jobs intentionally held back until a later start data",
|
||||
"description": "Jobs intentionally held back until a later start date. This also (but not only) includes retries of previously failed jobs since there’s a cooldown between re-attempts.",
|
||||
"fieldConfig": {
|
||||
"defaults": {
|
||||
"color": {
|
||||
|
|
@ -2681,7 +2690,7 @@
|
|||
},
|
||||
"disableTextWrap": false,
|
||||
"editorMode": "builder",
|
||||
"expr": "rate(vm_memory_total_psum{instance=\"${INSTANCE}\", job=\"${SCRAPE_JOB}\"}[$__interval])",
|
||||
"expr": "increase(vm_memory_total_psum{instance=\"${INSTANCE}\", job=\"${SCRAPE_JOB}\"}[$__interval])",
|
||||
"fullMetaSearch": false,
|
||||
"hide": true,
|
||||
"includeNullMetadata": true,
|
||||
|
|
@ -2698,7 +2707,7 @@
|
|||
},
|
||||
"disableTextWrap": false,
|
||||
"editorMode": "builder",
|
||||
"expr": "rate(vm_memory_total_pcount{instance=\"${INSTANCE}\", job=\"${SCRAPE_JOB}\"}[$__interval])",
|
||||
"expr": "increase(vm_memory_total_pcount{instance=\"${INSTANCE}\", job=\"${SCRAPE_JOB}\"}[$__interval])",
|
||||
"fullMetaSearch": false,
|
||||
"hide": true,
|
||||
"includeNullMetadata": true,
|
||||
|
|
@ -3598,6 +3607,6 @@
|
|||
"timezone": "utc",
|
||||
"title": "Akkoma Dashboard",
|
||||
"uid": "edzowz85niznkc",
|
||||
"version": 29,
|
||||
"version": 54,
|
||||
"weekStart": ""
|
||||
}
|
||||
|
|
|
|||
|
|
@ -33,7 +33,7 @@ defmodule Mix.Tasks.Pleroma.Email do
|
|||
|
||||
Pleroma.User.Query.build(%{
|
||||
local: true,
|
||||
is_active: true,
|
||||
deactivated: false,
|
||||
is_confirmed: false,
|
||||
invisible: false
|
||||
})
|
||||
|
|
|
|||
|
|
@ -43,6 +43,7 @@ defmodule Mix.Tasks.Pleroma.NotificationSettings do
|
|||
defp build_query(hide_notification_contents, options) do
|
||||
query =
|
||||
from(u in Pleroma.User,
|
||||
where: u.local,
|
||||
update: [
|
||||
set: [
|
||||
notification_settings:
|
||||
|
|
|
|||
|
|
@ -1,68 +0,0 @@
|
|||
# Pleroma: A lightweight social networking server
|
||||
# Copyright © 2017-2021 Pleroma Authors <https://pleroma.social/>
|
||||
# SPDX-License-Identifier: AGPL-3.0-only
|
||||
|
||||
defmodule Mix.Tasks.Pleroma.RefreshCounterCache do
|
||||
@shortdoc "Refreshes counter cache"
|
||||
|
||||
use Mix.Task
|
||||
|
||||
alias Pleroma.Activity
|
||||
alias Pleroma.CounterCache
|
||||
alias Pleroma.Repo
|
||||
|
||||
import Ecto.Query
|
||||
|
||||
def run([]) do
|
||||
Mix.Pleroma.start_pleroma()
|
||||
|
||||
instances =
|
||||
Activity
|
||||
|> distinct([a], true)
|
||||
|> select([a], fragment("split_part(?, '/', 3)", a.actor))
|
||||
|> Repo.all()
|
||||
|
||||
instances
|
||||
|> Enum.with_index(1)
|
||||
|> Enum.each(fn {instance, i} ->
|
||||
counters = instance_counters(instance)
|
||||
CounterCache.set(instance, counters)
|
||||
|
||||
Mix.Pleroma.shell_info(
|
||||
"[#{i}/#{length(instances)}] Setting #{instance} counters: #{inspect(counters)}"
|
||||
)
|
||||
end)
|
||||
|
||||
Mix.Pleroma.shell_info("Done")
|
||||
end
|
||||
|
||||
defp instance_counters(instance) do
|
||||
counters = %{"public" => 0, "unlisted" => 0, "private" => 0, "direct" => 0}
|
||||
|
||||
Activity
|
||||
|> where([a], fragment("(? ->> 'type'::text) = 'Create'", a.data))
|
||||
|> where([a], fragment("split_part(?, '/', 3) = ?", a.actor, ^instance))
|
||||
|> select(
|
||||
[a],
|
||||
{fragment(
|
||||
"activity_visibility(?, ?, ?)",
|
||||
a.actor,
|
||||
a.recipients,
|
||||
a.data
|
||||
), count(a.id)}
|
||||
)
|
||||
|> group_by(
|
||||
[a],
|
||||
fragment(
|
||||
"activity_visibility(?, ?, ?)",
|
||||
a.actor,
|
||||
a.recipients,
|
||||
a.data
|
||||
)
|
||||
)
|
||||
|> Repo.all(timeout: :timer.minutes(30))
|
||||
|> Enum.reduce(counters, fn {visibility, count}, acc ->
|
||||
Map.put(acc, visibility, count)
|
||||
end)
|
||||
end
|
||||
end
|
||||
|
|
@ -5,6 +5,7 @@
|
|||
defmodule Mix.Tasks.Pleroma.Uploads do
|
||||
use Mix.Task
|
||||
import Mix.Pleroma
|
||||
import Ecto.Query
|
||||
alias Pleroma.Upload
|
||||
alias Pleroma.Uploaders.Local
|
||||
require Logger
|
||||
|
|
@ -97,4 +98,106 @@ defmodule Mix.Tasks.Pleroma.Uploads do
|
|||
|
||||
shell_info("Done!")
|
||||
end
|
||||
|
||||
@doc """
|
||||
Rewrite media domains to somewhere new
|
||||
"""
|
||||
def run(["rewrite_media_domain", from_url, to_url | args]) do
|
||||
dry_run = Enum.member?(args, "--dry-run")
|
||||
start_pleroma()
|
||||
shell_info("Rewriting media domain from #{from_url} to #{to_url}")
|
||||
shell_info("Dry run: #{dry_run}")
|
||||
# actually selecting based on the attachment URL is stupidly difficult due to it being
|
||||
# stored as a JSONB array in the `data` field... the easier way to do this is just to iterate though
|
||||
# local posts
|
||||
from(o in Pleroma.Object)
|
||||
|> where(
|
||||
[o],
|
||||
fragment(
|
||||
"?->'url'->0->>'href' LIKE ?
|
||||
OR
|
||||
?->'attachment'->0->'url'->0->>'href' LIKE ?",
|
||||
o.data,
|
||||
^"#{from_url}%",
|
||||
o.data,
|
||||
^"#{from_url}%"
|
||||
)
|
||||
)
|
||||
|> Pleroma.Repo.chunk_stream(100, :batches, timeout: :infinity)
|
||||
|> Stream.each(fn chunk ->
|
||||
# now we just rewrite it and save it back, ezpz
|
||||
chunk
|
||||
|> Enum.each(fn object ->
|
||||
new_data =
|
||||
rewrite_url_object(Map.get(object, :id), Map.get(object, :data), from_url, to_url)
|
||||
|
||||
if dry_run do
|
||||
shell_info(
|
||||
"Dry run: would update object #{object.id} to new media domain (#{inspect(new_data)})"
|
||||
)
|
||||
else
|
||||
Pleroma.Repo.update!(Ecto.Changeset.change(object, data: new_data))
|
||||
shell_info("Updated object #{object.id} to new media domain")
|
||||
end
|
||||
end)
|
||||
end)
|
||||
|> Stream.run()
|
||||
end
|
||||
|
||||
defp rewrite_url(id, url, from_url, to_url) do
|
||||
new_uri = String.replace(url, from_url, to_url)
|
||||
check = URI.parse(new_uri)
|
||||
|
||||
case check do
|
||||
%URI{scheme: nil, host: nil} ->
|
||||
raise("Invalid URL after rewriting: #{new_uri} (object ID: #{id})")
|
||||
|
||||
_ ->
|
||||
new_uri
|
||||
end
|
||||
end
|
||||
|
||||
# The base object - we're looking for this, it has the actual url
|
||||
defp rewrite_url_object(id, %{"type" => "Link", "href" => href} = link, from_url, to_url) do
|
||||
Map.put(link, "href", rewrite_url(id, href, from_url, to_url))
|
||||
end
|
||||
|
||||
defp rewrite_url_object(id, %{"type" => type, "url" => urls} = object, from_url, to_url)
|
||||
when type in ["Document", "Image"] do
|
||||
# Document and Image contain url field, which will always be an array of links
|
||||
Map.put(
|
||||
object,
|
||||
"url",
|
||||
Enum.map(
|
||||
urls,
|
||||
fn url -> rewrite_url_object(id, url, from_url, to_url) end
|
||||
)
|
||||
)
|
||||
end
|
||||
|
||||
defp rewrite_url_object(
|
||||
id,
|
||||
%{"type" => _type, "attachment" => attachments} = object,
|
||||
from_url,
|
||||
to_url
|
||||
) do
|
||||
# Note will contain an attachment field, which is an array of documents
|
||||
Map.put(
|
||||
object,
|
||||
"attachment",
|
||||
Enum.map(attachments, fn attachment ->
|
||||
rewrite_url_object(id, attachment, from_url, to_url)
|
||||
end)
|
||||
)
|
||||
end
|
||||
|
||||
defp rewrite_url_object(
|
||||
_id,
|
||||
object,
|
||||
_,
|
||||
_
|
||||
) do
|
||||
shell_info(inspect(object))
|
||||
raise("Unhandled object format!")
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -262,7 +262,7 @@ defmodule Mix.Tasks.Pleroma.User do
|
|||
|
||||
Pleroma.User.Query.build(%{
|
||||
external: true,
|
||||
is_active: true
|
||||
deactivated: false
|
||||
})
|
||||
|> refetch_public_keys()
|
||||
end
|
||||
|
|
@ -408,7 +408,7 @@ defmodule Mix.Tasks.Pleroma.User do
|
|||
|
||||
Pleroma.User.Query.build(%{
|
||||
local: true,
|
||||
is_active: true,
|
||||
deactivated: false,
|
||||
is_moderator: false,
|
||||
is_admin: false,
|
||||
invisible: false
|
||||
|
|
@ -426,7 +426,7 @@ defmodule Mix.Tasks.Pleroma.User do
|
|||
|
||||
Pleroma.User.Query.build(%{
|
||||
local: true,
|
||||
is_active: true,
|
||||
deactivated: false,
|
||||
is_moderator: false,
|
||||
is_admin: false,
|
||||
invisible: false
|
||||
|
|
|
|||
|
|
@ -59,6 +59,8 @@ defmodule Pleroma.Activity.HTML do
|
|||
object = Object.normalize(activity, fetch: false)
|
||||
|
||||
add_cache_key_for(activity.id, key)
|
||||
|
||||
# callback already produces :commit or :ignore tuples
|
||||
HTML.ensure_scrubbed_html(content, scrubbers, object.data["fake"] || false, callback)
|
||||
end)
|
||||
end
|
||||
|
|
|
|||
|
|
@ -1,8 +1,15 @@
|
|||
defmodule Pleroma.Akkoma.Translator do
|
||||
@callback translate(String.t(), String.t() | nil, String.t()) ::
|
||||
{:ok, String.t(), String.t()} | {:error, any()}
|
||||
@callback languages() ::
|
||||
{:ok, [%{name: String.t(), code: String.t()}],
|
||||
[%{name: String.t(), code: String.t()}]}
|
||||
| {:error, any()}
|
||||
@cachex Pleroma.Config.get([:cachex, :provider], Cachex)
|
||||
|
||||
def languages do
|
||||
module = Pleroma.Config.get([:translator, :module])
|
||||
|
||||
@cachex.fetch!(:translations_cache, "languages:#{module}}", fn _ ->
|
||||
with {:ok, source_languages, dest_languages} <- module.languages() do
|
||||
{:commit, {:ok, source_languages, dest_languages}}
|
||||
else
|
||||
{:error, err} -> {:ignore, {:error, err}}
|
||||
end
|
||||
end)
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
defmodule Pleroma.Akkoma.Translators.ArgosTranslate do
|
||||
@behaviour Pleroma.Akkoma.Translator
|
||||
@behaviour Pleroma.Akkoma.Translator.Provider
|
||||
|
||||
alias Pleroma.Config
|
||||
|
||||
|
|
@ -23,7 +23,7 @@ defmodule Pleroma.Akkoma.Translators.ArgosTranslate do
|
|||
end
|
||||
end
|
||||
|
||||
@impl Pleroma.Akkoma.Translator
|
||||
@impl Pleroma.Akkoma.Translator.Provider
|
||||
def languages do
|
||||
with {response, 0} <- safe_languages() do
|
||||
langs =
|
||||
|
|
@ -83,7 +83,7 @@ defmodule Pleroma.Akkoma.Translators.ArgosTranslate do
|
|||
|
||||
defp htmlify_response(string, _), do: string
|
||||
|
||||
@impl Pleroma.Akkoma.Translator
|
||||
@impl Pleroma.Akkoma.Translator.Provider
|
||||
def translate(string, nil, to_language) do
|
||||
# Akkoma's Pleroma-fe expects us to detect the source language automatically.
|
||||
# Argos-translate doesn't have that option (yet?)
|
||||
|
|
@ -106,4 +106,7 @@ defmodule Pleroma.Akkoma.Translators.ArgosTranslate do
|
|||
{response, _} -> {:error, "ArgosTranslate failed to translate (#{response})"}
|
||||
end
|
||||
end
|
||||
|
||||
@impl Pleroma.Akkoma.Translator.Provider
|
||||
def name, do: "Argos Translate"
|
||||
end
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
defmodule Pleroma.Akkoma.Translators.DeepL do
|
||||
@behaviour Pleroma.Akkoma.Translator
|
||||
@behaviour Pleroma.Akkoma.Translator.Provider
|
||||
|
||||
alias Pleroma.HTTP
|
||||
alias Pleroma.Config
|
||||
|
|
@ -21,7 +21,7 @@ defmodule Pleroma.Akkoma.Translators.DeepL do
|
|||
Config.get([:deepl, :tier])
|
||||
end
|
||||
|
||||
@impl Pleroma.Akkoma.Translator
|
||||
@impl Pleroma.Akkoma.Translator.Provider
|
||||
def languages do
|
||||
with {:ok, %{status: 200} = source_response} <- do_languages("source"),
|
||||
{:ok, %{status: 200} = dest_response} <- do_languages("target"),
|
||||
|
|
@ -48,7 +48,7 @@ defmodule Pleroma.Akkoma.Translators.DeepL do
|
|||
end
|
||||
end
|
||||
|
||||
@impl Pleroma.Akkoma.Translator
|
||||
@impl Pleroma.Akkoma.Translator.Provider
|
||||
def translate(string, from_language, to_language) do
|
||||
with {:ok, %{status: 200} = response} <-
|
||||
do_request(api_key(), tier(), string, from_language, to_language),
|
||||
|
|
@ -97,4 +97,7 @@ defmodule Pleroma.Akkoma.Translators.DeepL do
|
|||
]
|
||||
)
|
||||
end
|
||||
|
||||
@impl Pleroma.Akkoma.Translator.Provider
|
||||
def name, do: "DeepL"
|
||||
end
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
defmodule Pleroma.Akkoma.Translators.LibreTranslate do
|
||||
@behaviour Pleroma.Akkoma.Translator
|
||||
@behaviour Pleroma.Akkoma.Translator.Provider
|
||||
|
||||
alias Pleroma.Config
|
||||
alias Pleroma.HTTP
|
||||
|
|
@ -13,7 +13,7 @@ defmodule Pleroma.Akkoma.Translators.LibreTranslate do
|
|||
Config.get([:libre_translate, :url])
|
||||
end
|
||||
|
||||
@impl Pleroma.Akkoma.Translator
|
||||
@impl Pleroma.Akkoma.Translator.Provider
|
||||
def languages do
|
||||
with {:ok, %{status: 200} = response} <- do_languages(),
|
||||
{:ok, body} <- Jason.decode(response.body) do
|
||||
|
|
@ -30,7 +30,7 @@ defmodule Pleroma.Akkoma.Translators.LibreTranslate do
|
|||
end
|
||||
end
|
||||
|
||||
@impl Pleroma.Akkoma.Translator
|
||||
@impl Pleroma.Akkoma.Translator.Provider
|
||||
def translate(string, from_language, to_language) do
|
||||
with {:ok, %{status: 200} = response} <- do_request(string, from_language, to_language),
|
||||
{:ok, body} <- Jason.decode(response.body) do
|
||||
|
|
@ -79,4 +79,7 @@ defmodule Pleroma.Akkoma.Translators.LibreTranslate do
|
|||
|
||||
HTTP.get(to_string(url))
|
||||
end
|
||||
|
||||
@impl Pleroma.Akkoma.Translator.Provider
|
||||
def name, do: "LibreTranslate"
|
||||
end
|
||||
|
|
|
|||
9
lib/pleroma/akkoma/translators/provider.ex
Normal file
9
lib/pleroma/akkoma/translators/provider.ex
Normal file
|
|
@ -0,0 +1,9 @@
|
|||
defmodule Pleroma.Akkoma.Translator.Provider do
|
||||
@callback translate(String.t(), String.t() | nil, String.t()) ::
|
||||
{:ok, String.t(), String.t()} | {:error, any()}
|
||||
@callback languages() ::
|
||||
{:ok, [%{name: String.t(), code: String.t()}],
|
||||
[%{name: String.t(), code: String.t()}]}
|
||||
| {:error, any()}
|
||||
@callback name() :: String.t()
|
||||
end
|
||||
|
|
@ -74,7 +74,7 @@ defmodule Pleroma.Application do
|
|||
Pleroma.Web.Telemetry
|
||||
] ++
|
||||
elasticsearch_children() ++
|
||||
task_children(@mix_env) ++
|
||||
task_children() ++
|
||||
dont_run_in_test(@mix_env)
|
||||
|
||||
# See http://elixir-lang.org/docs/stable/elixir/Supervisor.html
|
||||
|
|
@ -144,34 +144,90 @@ defmodule Pleroma.Application do
|
|||
|
||||
defp cachex_children do
|
||||
[
|
||||
build_cachex("used_captcha", ttl_interval: seconds_valid_interval()),
|
||||
build_cachex("user", default_ttl: 25_000, ttl_interval: 1000, limit: 2500),
|
||||
build_cachex("object", default_ttl: 25_000, ttl_interval: 1000, limit: 2500),
|
||||
build_cachex("rich_media", default_ttl: :timer.minutes(120), limit: 5000),
|
||||
build_cachex("scrubber", limit: 2500),
|
||||
build_cachex("scrubber_management", limit: 2500),
|
||||
build_cachex("idempotency", expiration: idempotency_expiration(), limit: 2500),
|
||||
build_cachex("web_resp", limit: 2500),
|
||||
build_cachex("emoji_packs", expiration: emoji_packs_expiration(), limit: 10),
|
||||
build_cachex("failed_proxy_url", limit: 2500),
|
||||
build_cachex("banned_urls", default_ttl: :timer.hours(24 * 30), limit: 5_000),
|
||||
build_cachex("translations", default_ttl: :timer.hours(24 * 30), limit: 2500),
|
||||
build_cachex("instances", default_ttl: :timer.hours(24), ttl_interval: 1000, limit: 2500),
|
||||
build_cachex("rel_me", default_ttl: :timer.hours(24 * 30), limit: 300),
|
||||
build_cachex("host_meta", default_ttl: :timer.minutes(120), limit: 5000),
|
||||
build_cachex("http_backoff", default_ttl: :timer.hours(24 * 30), limit: 10000)
|
||||
build_cachex(
|
||||
"used_captcha",
|
||||
expiration: expiration(interval: seconds_valid_interval())
|
||||
),
|
||||
build_cachex(
|
||||
"user",
|
||||
expiration: expiration(default: 3_000, interval: 1_000),
|
||||
hooks: [cachex_sched_limit(2500)]
|
||||
),
|
||||
build_cachex(
|
||||
"object",
|
||||
expiration: expiration(default: 3_000, interval: 1_000),
|
||||
hooks: [cachex_sched_limit(2500)]
|
||||
),
|
||||
build_cachex(
|
||||
"rich_media",
|
||||
expiration: expiration(default: :timer.hours(2)),
|
||||
hooks: [cachex_sched_limit(5000)]
|
||||
),
|
||||
build_cachex(
|
||||
"scrubber",
|
||||
hooks: [cachex_sched_limit(2500)]
|
||||
),
|
||||
build_cachex(
|
||||
"scrubber_management",
|
||||
hooks: [cachex_sched_limit(2500)]
|
||||
),
|
||||
build_cachex(
|
||||
"idempotency",
|
||||
expiration: expiration(default: :timer.hours(6), interval: :timer.minutes(1)),
|
||||
hooks: [cachex_sched_limit(2500, [], frequency: :timer.minutes(1))]
|
||||
),
|
||||
build_cachex(
|
||||
"web_resp",
|
||||
hooks: [cachex_sched_limit(2500)]
|
||||
),
|
||||
build_cachex(
|
||||
"emoji_packs",
|
||||
expiration: expiration(default: :timer.minutes(5), interval: :timer.minutes(1)),
|
||||
hooks: [cachex_sched_limit(10)]
|
||||
),
|
||||
build_cachex(
|
||||
"failed_proxy_url",
|
||||
hooks: [cachex_sched_limit(2500)]
|
||||
),
|
||||
build_cachex(
|
||||
"banned_urls",
|
||||
expiration: expiration(default: :timer.hours(24 * 30)),
|
||||
hooks: [cachex_sched_limit(5_000, [], frequency: :timer.minutes(5))]
|
||||
),
|
||||
build_cachex(
|
||||
"translations",
|
||||
expiration: expiration(default: :timer.hours(24 * 30)),
|
||||
hooks: [cachex_sched_limit(2500)]
|
||||
),
|
||||
build_cachex(
|
||||
"instances",
|
||||
expiration: expiration(default: :timer.hours(24), interval: 1000),
|
||||
hooks: [cachex_sched_limit(2500)]
|
||||
),
|
||||
build_cachex(
|
||||
"rel_me",
|
||||
expiration: expiration(default: :timer.hours(24 * 30)),
|
||||
hooks: [cachex_sched_limit(300, [], frequency: :timer.minutes(1))]
|
||||
),
|
||||
build_cachex(
|
||||
"host_meta",
|
||||
expiration: expiration(default: :timer.minutes(120)),
|
||||
hooks: [cachex_sched_limit(5000, [], frequency: :timer.minutes(1))]
|
||||
),
|
||||
build_cachex(
|
||||
"http_backoff",
|
||||
expiration: expiration(default: :timer.hours(24 * 30)),
|
||||
hooks: [cachex_sched_limit(10_000, [], frequency: :timer.minutes(5))]
|
||||
)
|
||||
]
|
||||
end
|
||||
|
||||
defp emoji_packs_expiration,
|
||||
do: expiration(default: :timer.seconds(5 * 60), interval: :timer.seconds(60))
|
||||
|
||||
defp idempotency_expiration,
|
||||
do: expiration(default: :timer.seconds(6 * 60 * 60), interval: :timer.seconds(60))
|
||||
|
||||
defp seconds_valid_interval,
|
||||
do: :timer.seconds(Config.get!([Pleroma.Captcha, :seconds_valid]))
|
||||
|
||||
defp cachex_sched_limit(limit, prune_opts \\ [], sched_opts \\ []),
|
||||
do: hook(module: Cachex.Limit.Scheduled, args: {limit, prune_opts, sched_opts})
|
||||
|
||||
@spec build_cachex(String.t(), keyword()) :: map()
|
||||
def build_cachex(type, opts),
|
||||
do: %{
|
||||
|
|
@ -199,31 +255,29 @@ defmodule Pleroma.Application do
|
|||
]
|
||||
end
|
||||
|
||||
@spec task_children(atom()) :: [map()]
|
||||
@spec task_children() :: [map()]
|
||||
defp task_children() do
|
||||
always =
|
||||
[
|
||||
%{
|
||||
id: :web_push_init,
|
||||
start: {Task, :start_link, [&Pleroma.Web.Push.init/0]},
|
||||
restart: :temporary
|
||||
}
|
||||
]
|
||||
|
||||
defp task_children(:test) do
|
||||
[
|
||||
%{
|
||||
id: :web_push_init,
|
||||
start: {Task, :start_link, [&Pleroma.Web.Push.init/0]},
|
||||
restart: :temporary
|
||||
}
|
||||
]
|
||||
end
|
||||
|
||||
defp task_children(_) do
|
||||
[
|
||||
%{
|
||||
id: :web_push_init,
|
||||
start: {Task, :start_link, [&Pleroma.Web.Push.init/0]},
|
||||
restart: :temporary
|
||||
},
|
||||
%{
|
||||
id: :internal_fetch_init,
|
||||
start: {Task, :start_link, [&Pleroma.Web.ActivityPub.InternalFetchActor.init/0]},
|
||||
restart: :temporary
|
||||
}
|
||||
]
|
||||
if @mix_env == :test do
|
||||
always
|
||||
else
|
||||
[
|
||||
%{
|
||||
id: :internal_fetch_init,
|
||||
start: {Task, :start_link, [&Pleroma.Web.ActivityPub.InternalFetchActor.init/0]},
|
||||
restart: :temporary
|
||||
}
|
||||
| always
|
||||
]
|
||||
end
|
||||
end
|
||||
|
||||
@spec elasticsearch_children :: [Pleroma.Search.Elasticsearch.Cluster]
|
||||
|
|
|
|||
|
|
@ -53,13 +53,15 @@ defmodule Pleroma.Bookmark do
|
|||
end
|
||||
|
||||
@spec destroy(FlakeId.Ecto.CompatType.t(), FlakeId.Ecto.CompatType.t()) ::
|
||||
{:ok, Bookmark.t()} | {:error, Changeset.t()}
|
||||
:ok | {:error, any()}
|
||||
def destroy(user_id, activity_id) do
|
||||
from(b in Bookmark,
|
||||
where: b.user_id == ^user_id,
|
||||
where: b.activity_id == ^activity_id
|
||||
)
|
||||
|> Repo.one()
|
||||
|> Repo.delete()
|
||||
{cnt, _} =
|
||||
from(b in Bookmark,
|
||||
where: b.user_id == ^user_id,
|
||||
where: b.activity_id == ^activity_id
|
||||
)
|
||||
|> Repo.delete_all()
|
||||
|
||||
if cnt >= 1, do: :ok, else: {:error, :not_found}
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -97,7 +97,7 @@ defmodule Pleroma.Captcha do
|
|||
|
||||
defp mark_captcha_as_used(token) do
|
||||
ttl = seconds_valid() |> :timer.seconds()
|
||||
@cachex.put(:used_captcha_cache, token, true, ttl: ttl)
|
||||
@cachex.put(:used_captcha_cache, token, true, expire: ttl)
|
||||
end
|
||||
|
||||
defp method, do: Pleroma.Config.get!([__MODULE__, :method])
|
||||
|
|
|
|||
|
|
@ -22,6 +22,43 @@ defmodule Pleroma.Config.DeprecationWarnings do
|
|||
"\n* `config :pleroma, :instance, :quarantined_instances` is now covered by `:pleroma, :mrf_simple, :reject`"}
|
||||
]
|
||||
|
||||
def check_skip_thread_containment do
|
||||
# The default in config/config.exs is "true" since 593b8b1e6a8502cca9bf5559b8bec86f172bbecb
|
||||
# but when the default is retrieved in code the fallback is still "false"
|
||||
uses_thread_visibility_filtering = !Config.get([:instance, :skip_thread_containment], false)
|
||||
|
||||
if uses_thread_visibility_filtering do
|
||||
Logger.warning("""
|
||||
!!!DEPRECATION WARNING!!!
|
||||
Your config is explicitly enabling thread-based visibility containment by setting the below:
|
||||
```
|
||||
config :pleroma, :instance, skip_thread_containment: false
|
||||
```
|
||||
|
||||
This feature comes with a very high performance overhead and is considered for removal.
|
||||
If you actually need or strongly prefer keeping it, speak up NOW(!) by filing a ticket at
|
||||
https://akkoma.dev/AkkomaGang/akkoma/issues
|
||||
Complaints only after the removal happened are much less likely to have any effect.
|
||||
""")
|
||||
end
|
||||
end
|
||||
|
||||
def check_truncated_nodeinfo_in_accounts do
|
||||
if !Config.get!([:instance, :filter_embedded_nodeinfo]) do
|
||||
Logger.warning("""
|
||||
!!!BUG WORKAROUND DETECTED!!!
|
||||
Your config is explicitly disabling filtering of nodeinfo data embedded in other Masto API responses
|
||||
|
||||
config :pleroma, :instance, filter_embedded_nodeinfo: false
|
||||
|
||||
This setting will soon be removed. Any usage of it merely serves as a temporary workaround.
|
||||
Make sure to file a bug telling us which problems you encountered and circumvented by setting this!
|
||||
https://akkoma.dev/AkkomaGang/akkoma/issues
|
||||
We can’t fix bugs we don’t know about.
|
||||
""")
|
||||
end
|
||||
end
|
||||
|
||||
def check_exiftool_filter do
|
||||
filters = Config.get([Pleroma.Upload]) |> Keyword.get(:filters, [])
|
||||
|
||||
|
|
@ -222,7 +259,8 @@ defmodule Pleroma.Config.DeprecationWarnings do
|
|||
check_http_adapter(),
|
||||
check_uploader_base_url_set(),
|
||||
check_uploader_base_url_is_not_base_domain(),
|
||||
check_exiftool_filter()
|
||||
check_exiftool_filter(),
|
||||
check_skip_thread_containment()
|
||||
]
|
||||
|> Enum.reduce(:ok, fn
|
||||
:ok, :ok -> :ok
|
||||
|
|
|
|||
|
|
@ -7,7 +7,9 @@ defmodule Pleroma.ConfigDB do
|
|||
|
||||
import Ecto.Changeset
|
||||
import Ecto.Query, only: [select: 3, from: 2]
|
||||
import Pleroma.Web.Gettext
|
||||
|
||||
use Gettext,
|
||||
backend: Pleroma.Web.Gettext
|
||||
|
||||
alias __MODULE__
|
||||
alias Pleroma.Repo
|
||||
|
|
|
|||
|
|
@ -19,7 +19,8 @@ defmodule Pleroma.Constants do
|
|||
"context_id",
|
||||
"deleted_activity_id",
|
||||
"pleroma_internal",
|
||||
"generator"
|
||||
"generator",
|
||||
"voters"
|
||||
]
|
||||
)
|
||||
|
||||
|
|
|
|||
|
|
@ -15,7 +15,6 @@ defmodule Pleroma.Conversation do
|
|||
# This is the context ap id.
|
||||
field(:ap_id, :string)
|
||||
has_many(:participations, Participation)
|
||||
has_many(:users, through: [:participations, :user])
|
||||
|
||||
timestamps()
|
||||
end
|
||||
|
|
@ -45,7 +44,11 @@ defmodule Pleroma.Conversation do
|
|||
participation = Repo.preload(participation, :recipients)
|
||||
|
||||
if Enum.empty?(participation.recipients) do
|
||||
recipients = User.get_all_by_ap_id(activity.recipients)
|
||||
recipients =
|
||||
[activity.actor | activity.recipients]
|
||||
|> Enum.uniq()
|
||||
|> User.get_all_by_ap_id()
|
||||
|
||||
RecipientShip.create(recipients, participation)
|
||||
end
|
||||
end
|
||||
|
|
@ -64,15 +67,16 @@ defmodule Pleroma.Conversation do
|
|||
ap_id when is_binary(ap_id) and byte_size(ap_id) > 0 <- object.data["context"],
|
||||
{:ok, conversation} <- create_for_ap_id(ap_id) do
|
||||
users = User.get_users_from_set(activity.recipients, local_only: false)
|
||||
local_users = Enum.filter(users, & &1.local)
|
||||
|
||||
participations =
|
||||
Enum.map(users, fn user ->
|
||||
Enum.map(local_users, fn user ->
|
||||
invisible_conversation = Enum.any?(users, &User.blocks?(user, &1))
|
||||
|
||||
opts = Keyword.put(opts, :invisible_conversation, invisible_conversation)
|
||||
|
||||
{:ok, participation} =
|
||||
Participation.create_for_user_and_conversation(user, conversation, opts)
|
||||
Participation.create_or_bump(user, conversation, activity.id, opts)
|
||||
|
||||
maybe_create_recipientships(participation, activity)
|
||||
participation
|
||||
|
|
|
|||
|
|
@ -12,9 +12,12 @@ defmodule Pleroma.Conversation.Participation do
|
|||
import Ecto.Changeset
|
||||
import Ecto.Query
|
||||
|
||||
@type t() :: %__MODULE__{}
|
||||
|
||||
schema "conversation_participations" do
|
||||
belongs_to(:user, User, type: FlakeId.Ecto.CompatType)
|
||||
belongs_to(:conversation, Conversation)
|
||||
field(:last_bump, FlakeId.Ecto.CompatType)
|
||||
field(:read, :boolean, default: false)
|
||||
field(:last_activity_id, FlakeId.Ecto.CompatType, virtual: true)
|
||||
|
||||
|
|
@ -24,24 +27,26 @@ defmodule Pleroma.Conversation.Participation do
|
|||
timestamps()
|
||||
end
|
||||
|
||||
def creation_cng(struct, params) do
|
||||
defp creation_cng(struct, params) do
|
||||
struct
|
||||
|> cast(params, [:user_id, :conversation_id, :read])
|
||||
|> validate_required([:user_id, :conversation_id])
|
||||
|> cast(params, [:user_id, :conversation_id, :last_bump, :read])
|
||||
|> validate_required([:user_id, :conversation_id, :last_bump])
|
||||
end
|
||||
|
||||
def create_for_user_and_conversation(user, conversation, opts \\ []) do
|
||||
def create_or_bump(user, conversation, status_id, opts \\ []) do
|
||||
read = !!opts[:read]
|
||||
invisible_conversation = !!opts[:invisible_conversation]
|
||||
|
||||
update_on_conflict =
|
||||
if(invisible_conversation, do: [], else: [read: read])
|
||||
|> Keyword.put(:updated_at, NaiveDateTime.utc_now())
|
||||
|> Keyword.put(:last_bump, status_id)
|
||||
|
||||
%__MODULE__{}
|
||||
|> creation_cng(%{
|
||||
user_id: user.id,
|
||||
conversation_id: conversation.id,
|
||||
last_bump: status_id,
|
||||
read: invisible_conversation || read
|
||||
})
|
||||
|> Repo.insert(
|
||||
|
|
@ -51,7 +56,7 @@ defmodule Pleroma.Conversation.Participation do
|
|||
)
|
||||
end
|
||||
|
||||
def read_cng(struct, params) do
|
||||
defp read_cng(struct, params) do
|
||||
struct
|
||||
|> cast(params, [:read])
|
||||
|> validate_required([:read])
|
||||
|
|
@ -99,43 +104,90 @@ defmodule Pleroma.Conversation.Participation do
|
|||
{:ok, user, participations}
|
||||
end
|
||||
|
||||
# used for tests
|
||||
def mark_as_unread(participation) do
|
||||
participation
|
||||
|> read_cng(%{read: false})
|
||||
|> Repo.update()
|
||||
end
|
||||
|
||||
def for_user(user, params \\ %{}) do
|
||||
def for_user_with_pagination(user, params \\ %{}) do
|
||||
from(p in __MODULE__,
|
||||
where: p.user_id == ^user.id,
|
||||
order_by: [desc: p.updated_at],
|
||||
preload: [conversation: [:users]]
|
||||
preload: [:conversation]
|
||||
)
|
||||
|> restrict_recipients(user, params)
|
||||
|> Pleroma.Pagination.fetch_paginated(params)
|
||||
|> select([p], %{id: p.last_bump, entry: p})
|
||||
|> Pleroma.Pagination.fetch_paginated(Map.put(params, :pagination_field, :last_bump))
|
||||
end
|
||||
|
||||
def restrict_recipients(query, user, %{recipients: user_ids}) do
|
||||
def preload_last_activity_id_and_filter(participations) when is_list(participations) do
|
||||
participations
|
||||
|> Enum.map(fn p -> load_last_activity_id(p) end)
|
||||
|> Enum.filter(fn p -> p.last_activity_id end)
|
||||
end
|
||||
|
||||
defp load_last_activity_id(%__MODULE__{} = participation) do
|
||||
%{
|
||||
participation
|
||||
| last_activity_id: last_activity_id(participation)
|
||||
}
|
||||
end
|
||||
|
||||
@spec last_activity_id(t(), User.t() | nil) :: Flake.t()
|
||||
def last_activity_id(participation, user \\ nil)
|
||||
|
||||
def last_activity_id(
|
||||
%__MODULE__{conversation: %Conversation{}} = participation,
|
||||
user
|
||||
) do
|
||||
user =
|
||||
if user && user.id == participation.user_id do
|
||||
user
|
||||
else
|
||||
case participation.user do
|
||||
%User{} -> participation.user
|
||||
_ -> User.get_cached_by_id(participation.user_id)
|
||||
end
|
||||
end
|
||||
|
||||
ActivityPub.fetch_latest_direct_activity_id_for_context(
|
||||
participation.conversation.ap_id,
|
||||
%{
|
||||
user: user,
|
||||
blocking_user: user
|
||||
}
|
||||
)
|
||||
end
|
||||
|
||||
def last_activity_id(%__MODULE__{} = participation, user) do
|
||||
case Repo.preload(participation, :conversation) do
|
||||
%{conversation: %Conversation{}} = p -> last_activity_id(p, user)
|
||||
_ -> nil
|
||||
end
|
||||
end
|
||||
|
||||
defp restrict_recipients(query, user, %{recipients: user_ids}) do
|
||||
user_binary_ids =
|
||||
[user.id | user_ids]
|
||||
|> Enum.uniq()
|
||||
|> User.binary_id()
|
||||
|
||||
conversation_subquery =
|
||||
__MODULE__
|
||||
|> group_by([p], p.conversation_id)
|
||||
recipient_subquery =
|
||||
RecipientShip
|
||||
|> group_by([r], r.participation_id)
|
||||
|> having(
|
||||
[p],
|
||||
count(p.user_id) == ^length(user_binary_ids) and
|
||||
fragment("array_agg(?) @> ?", p.user_id, ^user_binary_ids)
|
||||
[r],
|
||||
count(r.user_id) == ^length(user_binary_ids) and
|
||||
fragment("array_agg(?) @> ?", r.user_id, ^user_binary_ids)
|
||||
)
|
||||
|> select([p], %{id: p.conversation_id})
|
||||
|> select([r], %{pid: r.participation_id})
|
||||
|
||||
query
|
||||
|> join(:inner, [p], c in subquery(conversation_subquery), on: p.conversation_id == c.id)
|
||||
|> join(:inner, [p], r in subquery(recipient_subquery), on: p.id == r.pid)
|
||||
end
|
||||
|
||||
def restrict_recipients(query, _, _), do: query
|
||||
defp restrict_recipients(query, _, _), do: query
|
||||
|
||||
def for_user_and_conversation(user, conversation) do
|
||||
from(p in __MODULE__,
|
||||
|
|
@ -145,26 +197,6 @@ defmodule Pleroma.Conversation.Participation do
|
|||
|> Repo.one()
|
||||
end
|
||||
|
||||
def for_user_with_last_activity_id(user, params \\ %{}) do
|
||||
for_user(user, params)
|
||||
|> Enum.map(fn participation ->
|
||||
activity_id =
|
||||
ActivityPub.fetch_latest_direct_activity_id_for_context(
|
||||
participation.conversation.ap_id,
|
||||
%{
|
||||
user: user,
|
||||
blocking_user: user
|
||||
}
|
||||
)
|
||||
|
||||
%{
|
||||
participation
|
||||
| last_activity_id: activity_id
|
||||
}
|
||||
end)
|
||||
|> Enum.reject(&is_nil(&1.last_activity_id))
|
||||
end
|
||||
|
||||
def get(_, _ \\ [])
|
||||
def get(nil, _), do: nil
|
||||
|
||||
|
|
@ -213,14 +245,6 @@ defmodule Pleroma.Conversation.Participation do
|
|||
|> Repo.aggregate(:count, :id)
|
||||
end
|
||||
|
||||
def unread_conversation_count_for_user(user) do
|
||||
from(p in __MODULE__,
|
||||
where: p.user_id == ^user.id,
|
||||
where: not p.read,
|
||||
select: %{count: count(p.id)}
|
||||
)
|
||||
end
|
||||
|
||||
def delete(%__MODULE__{} = participation) do
|
||||
Repo.delete(participation)
|
||||
end
|
||||
|
|
|
|||
|
|
@ -1,79 +0,0 @@
|
|||
# Pleroma: A lightweight social networking server
|
||||
# Copyright © 2017-2021 Pleroma Authors <https://pleroma.social/>
|
||||
# SPDX-License-Identifier: AGPL-3.0-only
|
||||
|
||||
defmodule Pleroma.CounterCache do
|
||||
alias Pleroma.CounterCache
|
||||
alias Pleroma.Repo
|
||||
use Ecto.Schema
|
||||
import Ecto.Changeset
|
||||
import Ecto.Query
|
||||
|
||||
schema "counter_cache" do
|
||||
field(:instance, :string)
|
||||
field(:public, :integer)
|
||||
field(:unlisted, :integer)
|
||||
field(:private, :integer)
|
||||
field(:direct, :integer)
|
||||
end
|
||||
|
||||
def changeset(struct, params) do
|
||||
struct
|
||||
|> cast(params, [:instance, :public, :unlisted, :private, :direct])
|
||||
|> validate_required([:instance])
|
||||
|> unique_constraint(:instance)
|
||||
end
|
||||
|
||||
def get_by_instance(instance) do
|
||||
CounterCache
|
||||
|> select([c], %{
|
||||
"public" => c.public,
|
||||
"unlisted" => c.unlisted,
|
||||
"private" => c.private,
|
||||
"direct" => c.direct
|
||||
})
|
||||
|> where([c], c.instance == ^instance)
|
||||
|> Repo.one()
|
||||
|> case do
|
||||
nil -> %{"public" => 0, "unlisted" => 0, "private" => 0, "direct" => 0}
|
||||
val -> val
|
||||
end
|
||||
end
|
||||
|
||||
def get_sum do
|
||||
CounterCache
|
||||
|> select([c], %{
|
||||
"public" => type(sum(c.public), :integer),
|
||||
"unlisted" => type(sum(c.unlisted), :integer),
|
||||
"private" => type(sum(c.private), :integer),
|
||||
"direct" => type(sum(c.direct), :integer)
|
||||
})
|
||||
|> Repo.one()
|
||||
end
|
||||
|
||||
def set(instance, values) do
|
||||
params =
|
||||
Enum.reduce(
|
||||
["public", "private", "unlisted", "direct"],
|
||||
%{"instance" => instance},
|
||||
fn param, acc ->
|
||||
Map.put_new(acc, param, Map.get(values, param, 0))
|
||||
end
|
||||
)
|
||||
|
||||
%CounterCache{}
|
||||
|> changeset(params)
|
||||
|> Repo.insert(
|
||||
on_conflict: [
|
||||
set: [
|
||||
public: params["public"],
|
||||
private: params["private"],
|
||||
unlisted: params["unlisted"],
|
||||
direct: params["direct"]
|
||||
]
|
||||
],
|
||||
returning: true,
|
||||
conflict_target: :instance
|
||||
)
|
||||
end
|
||||
end
|
||||
|
|
@ -4,7 +4,7 @@
|
|||
|
||||
defmodule Pleroma.Docs.Translator do
|
||||
require Pleroma.Docs.Translator.Compiler
|
||||
require Pleroma.Web.Gettext
|
||||
use Gettext, backend: Pleroma.Web.Gettext
|
||||
|
||||
@before_compile Pleroma.Docs.Translator.Compiler
|
||||
end
|
||||
|
|
|
|||
|
|
@ -7,6 +7,8 @@ defmodule Pleroma.Docs.Translator.Compiler do
|
|||
@raw_config Pleroma.Config.Loader.read("config/description.exs")
|
||||
@raw_descriptions @raw_config[:pleroma][:config_description]
|
||||
|
||||
require Gettext.Macros
|
||||
|
||||
defmacro __before_compile__(_env) do
|
||||
strings =
|
||||
__MODULE__.descriptions()
|
||||
|
|
@ -21,7 +23,8 @@ defmodule Pleroma.Docs.Translator.Compiler do
|
|||
ctxt = msgctxt_for(path, type)
|
||||
|
||||
quote do
|
||||
Pleroma.Web.Gettext.dpgettext_noop(
|
||||
Gettext.Macros.dpgettext_noop_with_backend(
|
||||
Pleroma.Web.Gettext,
|
||||
"config_descriptions",
|
||||
unquote(ctxt),
|
||||
unquote(string)
|
||||
|
|
|
|||
|
|
@ -5,12 +5,13 @@
|
|||
defmodule Pleroma.Emails.UserEmail do
|
||||
@moduledoc "User emails"
|
||||
|
||||
require Pleroma.Web.Gettext
|
||||
require Pleroma.Web.GettextCompanion
|
||||
use Gettext, backend: Pleroma.Web.Gettext
|
||||
use Pleroma.Web, :mailer
|
||||
|
||||
alias Pleroma.Config
|
||||
alias Pleroma.User
|
||||
alias Pleroma.Web.Gettext
|
||||
alias Pleroma.Web.GettextCompanion
|
||||
|
||||
import Swoosh.Email
|
||||
import Phoenix.Swoosh, except: [render_body: 3]
|
||||
|
|
@ -29,7 +30,7 @@ defmodule Pleroma.Emails.UserEmail do
|
|||
|
||||
@spec welcome(User.t(), map()) :: Swoosh.Email.t()
|
||||
def welcome(user, opts \\ %{}) do
|
||||
Gettext.with_locale_or_default user.language do
|
||||
GettextCompanion.with_locale_or_default user.language do
|
||||
new()
|
||||
|> to(recipient(user))
|
||||
|> from(Map.get(opts, :sender, sender()))
|
||||
|
|
@ -37,7 +38,7 @@ defmodule Pleroma.Emails.UserEmail do
|
|||
Map.get(
|
||||
opts,
|
||||
:subject,
|
||||
Gettext.dpgettext(
|
||||
dpgettext(
|
||||
"static_pages",
|
||||
"welcome email subject",
|
||||
"Welcome to %{instance_name}!",
|
||||
|
|
@ -49,7 +50,7 @@ defmodule Pleroma.Emails.UserEmail do
|
|||
Map.get(
|
||||
opts,
|
||||
:html,
|
||||
Gettext.dpgettext(
|
||||
dpgettext(
|
||||
"static_pages",
|
||||
"welcome email html body",
|
||||
"Welcome to %{instance_name}!",
|
||||
|
|
@ -61,7 +62,7 @@ defmodule Pleroma.Emails.UserEmail do
|
|||
Map.get(
|
||||
opts,
|
||||
:text,
|
||||
Gettext.dpgettext(
|
||||
dpgettext(
|
||||
"static_pages",
|
||||
"welcome email text body",
|
||||
"Welcome to %{instance_name}!",
|
||||
|
|
@ -73,11 +74,11 @@ defmodule Pleroma.Emails.UserEmail do
|
|||
end
|
||||
|
||||
def password_reset_email(user, token) when is_binary(token) do
|
||||
Gettext.with_locale_or_default user.language do
|
||||
GettextCompanion.with_locale_or_default user.language do
|
||||
password_reset_url = url(~p[/api/v1/pleroma/password_reset/#{token}])
|
||||
|
||||
html_body =
|
||||
Gettext.dpgettext(
|
||||
dpgettext(
|
||||
"static_pages",
|
||||
"password reset email body",
|
||||
"""
|
||||
|
|
@ -93,9 +94,7 @@ defmodule Pleroma.Emails.UserEmail do
|
|||
new()
|
||||
|> to(recipient(user))
|
||||
|> from(sender())
|
||||
|> subject(
|
||||
Gettext.dpgettext("static_pages", "password reset email subject", "Password reset")
|
||||
)
|
||||
|> subject(dpgettext("static_pages", "password reset email subject", "Password reset"))
|
||||
|> html_body(html_body)
|
||||
end
|
||||
end
|
||||
|
|
@ -106,11 +105,11 @@ defmodule Pleroma.Emails.UserEmail do
|
|||
to_email,
|
||||
to_name \\ nil
|
||||
) do
|
||||
Gettext.with_locale_or_default user.language do
|
||||
GettextCompanion.with_locale_or_default user.language do
|
||||
registration_url = url(~p[/registration/#{user_invite_token.token}])
|
||||
|
||||
html_body =
|
||||
Gettext.dpgettext(
|
||||
dpgettext(
|
||||
"static_pages",
|
||||
"user invitation email body",
|
||||
"""
|
||||
|
|
@ -127,7 +126,7 @@ defmodule Pleroma.Emails.UserEmail do
|
|||
|> to(recipient(to_email, to_name))
|
||||
|> from(sender())
|
||||
|> subject(
|
||||
Gettext.dpgettext(
|
||||
dpgettext(
|
||||
"static_pages",
|
||||
"user invitation email subject",
|
||||
"Invitation to %{instance_name}",
|
||||
|
|
@ -139,11 +138,11 @@ defmodule Pleroma.Emails.UserEmail do
|
|||
end
|
||||
|
||||
def account_confirmation_email(user) do
|
||||
Gettext.with_locale_or_default user.language do
|
||||
GettextCompanion.with_locale_or_default user.language do
|
||||
confirmation_url = url(~p[/api/account/confirm_email/#{user.id}/#{user.confirmation_token}])
|
||||
|
||||
html_body =
|
||||
Gettext.dpgettext(
|
||||
dpgettext(
|
||||
"static_pages",
|
||||
"confirmation email body",
|
||||
"""
|
||||
|
|
@ -159,7 +158,7 @@ defmodule Pleroma.Emails.UserEmail do
|
|||
|> to(recipient(user))
|
||||
|> from(sender())
|
||||
|> subject(
|
||||
Gettext.dpgettext(
|
||||
dpgettext(
|
||||
"static_pages",
|
||||
"confirmation email subject",
|
||||
"%{instance_name} account confirmation",
|
||||
|
|
@ -171,9 +170,9 @@ defmodule Pleroma.Emails.UserEmail do
|
|||
end
|
||||
|
||||
def approval_pending_email(user) do
|
||||
Gettext.with_locale_or_default user.language do
|
||||
GettextCompanion.with_locale_or_default user.language do
|
||||
html_body =
|
||||
Gettext.dpgettext(
|
||||
dpgettext(
|
||||
"static_pages",
|
||||
"approval pending email body",
|
||||
"""
|
||||
|
|
@ -187,7 +186,7 @@ defmodule Pleroma.Emails.UserEmail do
|
|||
|> to(recipient(user))
|
||||
|> from(sender())
|
||||
|> subject(
|
||||
Gettext.dpgettext(
|
||||
dpgettext(
|
||||
"static_pages",
|
||||
"approval pending email subject",
|
||||
"Your account is awaiting approval"
|
||||
|
|
@ -198,9 +197,9 @@ defmodule Pleroma.Emails.UserEmail do
|
|||
end
|
||||
|
||||
def successful_registration_email(user) do
|
||||
Gettext.with_locale_or_default user.language do
|
||||
GettextCompanion.with_locale_or_default user.language do
|
||||
html_body =
|
||||
Gettext.dpgettext(
|
||||
dpgettext(
|
||||
"static_pages",
|
||||
"successful registration email body",
|
||||
"""
|
||||
|
|
@ -216,7 +215,7 @@ defmodule Pleroma.Emails.UserEmail do
|
|||
|> to(recipient(user))
|
||||
|> from(sender())
|
||||
|> subject(
|
||||
Gettext.dpgettext(
|
||||
dpgettext(
|
||||
"static_pages",
|
||||
"successful registration email subject",
|
||||
"Account registered on %{instance_name}",
|
||||
|
|
@ -234,7 +233,7 @@ defmodule Pleroma.Emails.UserEmail do
|
|||
"""
|
||||
@spec digest_email(User.t()) :: Swoosh.Email.t() | nil
|
||||
def digest_email(user) do
|
||||
Gettext.with_locale_or_default user.language do
|
||||
GettextCompanion.with_locale_or_default user.language do
|
||||
notifications = Pleroma.Notification.for_user_since(user, user.last_digest_emailed_at)
|
||||
|
||||
mentions =
|
||||
|
|
@ -295,7 +294,7 @@ defmodule Pleroma.Emails.UserEmail do
|
|||
|> to(recipient(user))
|
||||
|> from(sender())
|
||||
|> subject(
|
||||
Gettext.dpgettext(
|
||||
dpgettext(
|
||||
"static_pages",
|
||||
"digest email subject",
|
||||
"Your digest from %{instance_name}",
|
||||
|
|
@ -336,12 +335,12 @@ defmodule Pleroma.Emails.UserEmail do
|
|||
def backup_is_ready_email(backup, admin_user_id \\ nil) do
|
||||
%{user: user} = Pleroma.Repo.preload(backup, :user)
|
||||
|
||||
Gettext.with_locale_or_default user.language do
|
||||
GettextCompanion.with_locale_or_default user.language do
|
||||
download_url = Pleroma.Web.PleromaAPI.BackupView.download_url(backup)
|
||||
|
||||
html_body =
|
||||
if is_nil(admin_user_id) do
|
||||
Gettext.dpgettext(
|
||||
dpgettext(
|
||||
"static_pages",
|
||||
"account archive email body - self-requested",
|
||||
"""
|
||||
|
|
@ -353,7 +352,7 @@ defmodule Pleroma.Emails.UserEmail do
|
|||
else
|
||||
admin = Pleroma.Repo.get(User, admin_user_id)
|
||||
|
||||
Gettext.dpgettext(
|
||||
dpgettext(
|
||||
"static_pages",
|
||||
"account archive email body - admin requested",
|
||||
"""
|
||||
|
|
@ -369,7 +368,7 @@ defmodule Pleroma.Emails.UserEmail do
|
|||
|> to(recipient(user))
|
||||
|> from(sender())
|
||||
|> subject(
|
||||
Gettext.dpgettext(
|
||||
dpgettext(
|
||||
"static_pages",
|
||||
"account archive email subject",
|
||||
"Your account archive is ready"
|
||||
|
|
|
|||
|
|
@ -16,7 +16,7 @@ defmodule Pleroma.Emoji do
|
|||
|
||||
@ets __MODULE__.Ets
|
||||
@ets_options [
|
||||
:ordered_set,
|
||||
:set,
|
||||
:protected,
|
||||
:named_table,
|
||||
{:read_concurrency, true}
|
||||
|
|
@ -25,6 +25,8 @@ defmodule Pleroma.Emoji do
|
|||
|
||||
defstruct [:code, :file, :tags, :safe_code, :safe_file]
|
||||
|
||||
@type t :: %__MODULE__{}
|
||||
|
||||
@doc "Build emoji struct"
|
||||
def build({code, file, tags}) do
|
||||
%__MODULE__{
|
||||
|
|
@ -43,14 +45,14 @@ defmodule Pleroma.Emoji do
|
|||
GenServer.start_link(__MODULE__, [], name: __MODULE__)
|
||||
end
|
||||
|
||||
@doc "Reloads the emojis from disk."
|
||||
@doc "Reloads the emojis from disk (asynchronous)"
|
||||
@spec reload() :: :ok
|
||||
def reload do
|
||||
GenServer.call(__MODULE__, :reload)
|
||||
GenServer.cast(__MODULE__, :reload)
|
||||
end
|
||||
|
||||
@doc "Returns the path of the emoji `name`."
|
||||
@spec get(String.t()) :: String.t() | nil
|
||||
@doc "Returns the emoji struct of the given `name` if it exists."
|
||||
@spec get(String.t()) :: t() | nil
|
||||
def get(name) do
|
||||
name =
|
||||
if String.starts_with?(name, ":") do
|
||||
|
|
@ -62,11 +64,23 @@ defmodule Pleroma.Emoji do
|
|||
end
|
||||
|
||||
case :ets.lookup(@ets, name) do
|
||||
[{_, path}] -> path
|
||||
[{_, emoji}] -> emoji
|
||||
_ -> nil
|
||||
end
|
||||
end
|
||||
|
||||
@doc "Updates or inserts new emoji (asynchronous)"
|
||||
@spec add_or_update(t()) :: :ok
|
||||
def add_or_update(%__MODULE__{} = emoji) do
|
||||
GenServer.cast(__MODULE__, {:add, emoji})
|
||||
end
|
||||
|
||||
@doc "Delete emoji with given shortcode if it exists (asynchronous)"
|
||||
@spec delete(String.t()) :: :ok
|
||||
def delete(code) do
|
||||
GenServer.cast(__MODULE__, {:delete, code})
|
||||
end
|
||||
|
||||
@spec exist?(String.t()) :: boolean()
|
||||
def exist?(name), do: not is_nil(get(name))
|
||||
|
||||
|
|
@ -89,10 +103,14 @@ defmodule Pleroma.Emoji do
|
|||
{:noreply, state}
|
||||
end
|
||||
|
||||
@doc false
|
||||
def handle_call(:reload, _from, state) do
|
||||
update_emojis(Loader.load())
|
||||
{:reply, :ok, state}
|
||||
def handle_cast({:add, %__MODULE__{} = emoji}, state) do
|
||||
:ets.insert(@ets, {emoji.code, emoji})
|
||||
{:noreply, state}
|
||||
end
|
||||
|
||||
def handle_cast({:delete, code}, state) do
|
||||
:ets.delete(@ets, code)
|
||||
{:noreply, state}
|
||||
end
|
||||
|
||||
@doc false
|
||||
|
|
|
|||
|
|
@ -49,12 +49,15 @@ defmodule Pleroma.Emoji.Pack do
|
|||
Path.join(dir, safe_path)
|
||||
end
|
||||
|
||||
defp tags(%__MODULE__{} = pack), do: ["pack:" <> pack.name]
|
||||
|
||||
@spec create(String.t()) :: {:ok, t()} | {:error, File.posix()} | {:error, :empty_values}
|
||||
def create(name) do
|
||||
with :ok <- validate_not_empty([name]),
|
||||
dir <- path_join_name_safe(emoji_path(), name),
|
||||
:ok <- File.mkdir(dir) do
|
||||
save_pack(%__MODULE__{
|
||||
name: name,
|
||||
path: dir,
|
||||
pack_file: Path.join(dir, "pack.json")
|
||||
})
|
||||
|
|
@ -90,9 +93,13 @@ defmodule Pleroma.Emoji.Pack do
|
|||
@spec delete(String.t()) ::
|
||||
{:ok, [binary()]} | {:error, File.posix(), binary()} | {:error, :empty_values}
|
||||
def delete(name) do
|
||||
with :ok <- validate_not_empty([name]),
|
||||
pack_path <- path_join_name_safe(emoji_path(), name) do
|
||||
File.rm_rf(pack_path)
|
||||
with {_, :ok} <- {:empty, validate_not_empty([name])},
|
||||
{:ok, pack} <- load_pack(name) do
|
||||
Enum.each(pack.files, fn {shortcode, _} -> Emoji.delete(shortcode) end)
|
||||
File.rm_rf(pack.path)
|
||||
else
|
||||
{:empty, error} -> error
|
||||
_ -> {:ok, []}
|
||||
end
|
||||
end
|
||||
|
||||
|
|
@ -142,8 +149,6 @@ defmodule Pleroma.Emoji.Pack do
|
|||
{item, updated_pack}
|
||||
end)
|
||||
|
||||
Emoji.reload()
|
||||
|
||||
{:ok, updated_pack}
|
||||
after
|
||||
File.rm_rf(tmp_dir)
|
||||
|
|
@ -169,16 +174,14 @@ defmodule Pleroma.Emoji.Pack do
|
|||
with :ok <- validate_not_empty([shortcode, filename]),
|
||||
:ok <- validate_emoji_not_exists(shortcode),
|
||||
{:ok, updated_pack} <- do_add_file(pack, shortcode, filename, file) do
|
||||
Emoji.reload()
|
||||
{:ok, updated_pack}
|
||||
end
|
||||
end
|
||||
|
||||
defp do_add_file(pack, shortcode, filename, file) do
|
||||
with :ok <- save_file(file, pack, filename) do
|
||||
pack
|
||||
|> put_emoji(shortcode, filename)
|
||||
|> save_pack()
|
||||
with :ok <- save_file(file, pack, filename),
|
||||
{:ok, pack} <- put_emoji(pack, shortcode, filename) do
|
||||
{:ok, pack}
|
||||
end
|
||||
end
|
||||
|
||||
|
|
@ -188,7 +191,7 @@ defmodule Pleroma.Emoji.Pack do
|
|||
with :ok <- validate_not_empty([shortcode]),
|
||||
:ok <- remove_file(pack, shortcode),
|
||||
{:ok, updated_pack} <- pack |> delete_emoji(shortcode) |> save_pack() do
|
||||
Emoji.reload()
|
||||
Emoji.delete(shortcode)
|
||||
{:ok, updated_pack}
|
||||
end
|
||||
end
|
||||
|
|
@ -203,9 +206,8 @@ defmodule Pleroma.Emoji.Pack do
|
|||
{:ok, updated_pack} <-
|
||||
pack
|
||||
|> delete_emoji(shortcode)
|
||||
|> put_emoji(new_shortcode, new_filename)
|
||||
|> save_pack() do
|
||||
Emoji.reload()
|
||||
|> put_emoji(new_shortcode, new_filename) do
|
||||
if shortcode != new_shortcode, do: Emoji.delete(shortcode)
|
||||
{:ok, updated_pack}
|
||||
end
|
||||
end
|
||||
|
|
@ -455,7 +457,7 @@ defmodule Pleroma.Emoji.Pack do
|
|||
# if pack.json MD5 changes, the cache is not valid anymore
|
||||
%{hash: hash, pack_data: result},
|
||||
# Add a minute to cache time for every file in the pack
|
||||
ttl: overall_ttl
|
||||
expire: overall_ttl
|
||||
)
|
||||
|
||||
result
|
||||
|
|
@ -519,7 +521,17 @@ defmodule Pleroma.Emoji.Pack do
|
|||
|
||||
defp put_emoji(pack, shortcode, filename) do
|
||||
files = Map.put(pack.files, shortcode, filename)
|
||||
%{pack | files: files, files_count: length(Map.keys(files))}
|
||||
pack = %{pack | files: files, files_count: length(Map.keys(files))}
|
||||
|
||||
url_path = path_join_name_safe("/emoji/", pack.name) |> path_join_safe(filename)
|
||||
|
||||
with {:ok, pack} <- save_pack(pack) do
|
||||
{shortcode, url_path, tags(pack)}
|
||||
|> Emoji.build()
|
||||
|> Emoji.add_or_update()
|
||||
|
||||
{:ok, pack}
|
||||
end
|
||||
end
|
||||
|
||||
defp delete_emoji(pack, shortcode) do
|
||||
|
|
|
|||
|
|
@ -193,6 +193,12 @@ defmodule Pleroma.Filter do
|
|||
end
|
||||
end
|
||||
|
||||
defp escape_for_regex(plain_phrase) do
|
||||
# Escape all active characters:
|
||||
# .^$*+?()[{\|
|
||||
Regex.replace(~r/\.\^\$\*\+\?\(\)\[\{\\\|/, plain_phrase, fn m -> "\\" <> m end)
|
||||
end
|
||||
|
||||
@spec compose_regex(User.t() | [t()], format()) :: String.t() | Regex.t() | nil
|
||||
def compose_regex(user_or_filters, format \\ :postgres)
|
||||
|
||||
|
|
@ -207,7 +213,7 @@ defmodule Pleroma.Filter do
|
|||
def compose_regex([_ | _] = filters, format) do
|
||||
phrases =
|
||||
filters
|
||||
|> Enum.map(& &1.phrase)
|
||||
|> Enum.map(&escape_for_regex(&1.phrase))
|
||||
|> Enum.join("|")
|
||||
|
||||
case format do
|
||||
|
|
|
|||
|
|
@ -3,6 +3,7 @@
|
|||
# SPDX-License-Identifier: AGPL-3.0-only
|
||||
|
||||
defmodule Pleroma.Formatter do
|
||||
alias PhoenixHTMLHelpers.Tag
|
||||
alias Pleroma.HTML
|
||||
alias Pleroma.User
|
||||
|
||||
|
|
@ -37,10 +38,10 @@ defmodule Pleroma.Formatter do
|
|||
nickname_text = get_nickname_text(nickname, opts)
|
||||
|
||||
:span
|
||||
|> Phoenix.HTML.Tag.content_tag(
|
||||
Phoenix.HTML.Tag.content_tag(
|
||||
|> Tag.content_tag(
|
||||
Tag.content_tag(
|
||||
:a,
|
||||
["@", Phoenix.HTML.Tag.content_tag(:span, nickname_text)],
|
||||
["@", Tag.content_tag(:span, nickname_text)],
|
||||
"data-user": id,
|
||||
class: "u-url mention",
|
||||
href: user_url,
|
||||
|
|
@ -68,7 +69,7 @@ defmodule Pleroma.Formatter do
|
|||
url = "#{Pleroma.Web.Endpoint.url()}/tag/#{tag}"
|
||||
|
||||
link =
|
||||
Phoenix.HTML.Tag.content_tag(:a, tag_text,
|
||||
Tag.content_tag(:a, tag_text,
|
||||
class: "hashtag",
|
||||
"data-tag": tag,
|
||||
href: url,
|
||||
|
|
|
|||
|
|
@ -14,6 +14,8 @@ defmodule Pleroma.Frontend do
|
|||
"build_dir" => opts[:build_dir]
|
||||
}
|
||||
|
||||
explicit_source = !!(opts[:file] || opts[:build_dir] || opts[:build_url])
|
||||
|
||||
frontend_info =
|
||||
[:frontends, :available, name]
|
||||
|> Config.get(%{})
|
||||
|
|
@ -28,6 +30,25 @@ defmodule Pleroma.Frontend do
|
|||
raise "No ref given or configured"
|
||||
end
|
||||
|
||||
if Map.get(frontend_info, "blind_trust", false) !== true do
|
||||
bugtracker = frontend_info["bugtracker"]
|
||||
|
||||
unless bugtracker || explicit_source do
|
||||
raise "Configured third-party frontend without a bugtracker; refusing install."
|
||||
end
|
||||
|
||||
bugtracker = bugtracker || "the external frontend developers"
|
||||
|
||||
Logger.warning("""
|
||||
!!!!!!!!
|
||||
You are installing a third-party frontend not vetted by the Akkoma team.
|
||||
THERE ARE NO GUARANTTES ABOUT SAFETY AND FUNCTIONALITY!
|
||||
Do NOT report problems to Akkoma, instead
|
||||
all bugs must be reported to #{bugtracker}
|
||||
!!!!!!!!
|
||||
""")
|
||||
end
|
||||
|
||||
dest = Path.join([dir(), name, ref])
|
||||
|
||||
label = "#{name} (#{ref})"
|
||||
|
|
@ -69,7 +90,7 @@ defmodule Pleroma.Frontend do
|
|||
end
|
||||
end
|
||||
|
||||
def unzip(zip, dest) do
|
||||
defp unzip(zip, dest) do
|
||||
File.rm_rf!(dest)
|
||||
File.mkdir_p!(dest)
|
||||
|
||||
|
|
|
|||
|
|
@ -61,12 +61,7 @@ defmodule Pleroma.HTTP do
|
|||
options = options |> Keyword.delete(:params)
|
||||
headers = maybe_add_user_agent(headers)
|
||||
|
||||
client =
|
||||
Tesla.client([
|
||||
Tesla.Middleware.FollowRedirects,
|
||||
Pleroma.HTTP.Middleware.HTTPSignature,
|
||||
Tesla.Middleware.Telemetry
|
||||
])
|
||||
client = build_client(method)
|
||||
|
||||
Logger.debug("Outbound: #{method} #{url}")
|
||||
|
||||
|
|
@ -84,6 +79,37 @@ defmodule Pleroma.HTTP do
|
|||
{:error, :fetch_error}
|
||||
end
|
||||
|
||||
defp build_client(method) do
|
||||
# Orders of middlewares matters!
|
||||
# We start construction with the middlewares _last_ to run
|
||||
# on outgoing requests (and first on incoming responses).
|
||||
# This allows using more efficient list prepending.
|
||||
middlewares = [Tesla.Middleware.Telemetry]
|
||||
|
||||
# XXX: just like the user-agent header below, our current mocks can't handle extra headers
|
||||
# and would break if we used the decompression middleware during tests.
|
||||
# The :test condition can and should be removed once mocks are fixed.
|
||||
#
|
||||
# HEAD responses won't contain a body to compress anyway and we sometimes use
|
||||
# HEAD requests to determine whether a remote resource is within size limits before fetching it.
|
||||
# If the server would send a compressed response however, Content-Length will be the size of
|
||||
# the _compressed_ response body skewing results.
|
||||
middlewares =
|
||||
if method != :head and @mix_env != :test do
|
||||
[Tesla.Middleware.DecompressResponse | middlewares]
|
||||
else
|
||||
middlewares
|
||||
end
|
||||
|
||||
middlewares = [
|
||||
Tesla.Middleware.FollowRedirects,
|
||||
Pleroma.HTTP.Middleware.HTTPSignature | middlewares
|
||||
]
|
||||
|
||||
Tesla.client(middlewares)
|
||||
end
|
||||
|
||||
# XXX: our test mocks are (too) strict about headers and cannot handle user-agent atm
|
||||
if @mix_env == :test do
|
||||
defp maybe_add_user_agent(headers) do
|
||||
with true <- Pleroma.Config.get([:http, :send_user_agent]) do
|
||||
|
|
|
|||
|
|
@ -29,13 +29,11 @@ defmodule Pleroma.HTTP.AdapterHelper do
|
|||
conn_max_idle_time: Config.get!([:http, :receive_timeout]),
|
||||
protocols: Config.get!([:http, :protocols]),
|
||||
conn_opts: [
|
||||
# Do NOT add cacerts here as this will cause issues for plain HTTP connections!
|
||||
# (when we upgrade our deps to Mint >= 1.6.0 we can also explicitly enable "inet4: true")
|
||||
transport_opts: [inet6: true],
|
||||
# up to at least version 0.20.0, Finch leaves server_push enabled by default for HTTP2,
|
||||
# but will actually raise an exception when receiving such a response. Tell servers we don't want it.
|
||||
# see: https://github.com/sneako/finch/issues/325
|
||||
client_settings: [enable_push: false]
|
||||
transport_opts: [
|
||||
inet6: true,
|
||||
inet4: true,
|
||||
cacerts: :public_key.cacerts_get()
|
||||
]
|
||||
]
|
||||
]
|
||||
}
|
||||
|
|
|
|||
|
|
@ -94,7 +94,7 @@ defmodule Pleroma.HTTP.Backoff do
|
|||
log_ratelimit(status, host, timestamp)
|
||||
ttl = Timex.diff(timestamp, DateTime.utc_now(), :seconds)
|
||||
# we will cache the host for 5 minutes
|
||||
@cachex.put(@backoff_cache, host, true, ttl: ttl)
|
||||
@cachex.put(@backoff_cache, host, true, expire: ttl)
|
||||
{:error, :ratelimit}
|
||||
|
||||
_ ->
|
||||
|
|
|
|||
|
|
@ -16,20 +16,6 @@ defmodule Pleroma.HTTP.Middleware.HTTPSignature do
|
|||
|
||||
(Note: the third argument holds static middleware options from client creation)
|
||||
"""
|
||||
|
||||
@doc """
|
||||
If logging raw Tesla.Env use this if you wish to redact signing key details
|
||||
"""
|
||||
def redact_keys(env) do
|
||||
case get_in(env, [:opts, :httpsig, :signing_key]) do
|
||||
nil -> env
|
||||
key -> put_in(env, [:opts, :httpsig, :signing_key], redact_key_details(key))
|
||||
end
|
||||
end
|
||||
|
||||
defp redact_key_details(%SigningKey{key_id: id}), do: id
|
||||
defp redact_key_details(key), do: key
|
||||
|
||||
@impl true
|
||||
def call(env, next, _options) do
|
||||
env = maybe_sign(env)
|
||||
|
|
|
|||
|
|
@ -78,7 +78,7 @@ defmodule Pleroma.Marker do
|
|||
|
||||
defp get_marker(user, timeline) do
|
||||
case Repo.find_resource(get_query(user, timeline)) do
|
||||
{:ok, marker} -> %__MODULE__{marker | user: user}
|
||||
{:ok, %__MODULE__{} = marker} -> %__MODULE__{marker | user: user}
|
||||
_ -> %__MODULE__{timeline: timeline, user_id: user.id}
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -54,6 +54,7 @@ defmodule Pleroma.MFA do
|
|||
end
|
||||
|
||||
@doc false
|
||||
@spec fetch_settings(User.t()) :: Settings.t()
|
||||
def fetch_settings(%User{} = user) do
|
||||
user.multi_factor_authentication_settings || %Settings{}
|
||||
end
|
||||
|
|
|
|||
|
|
@ -8,7 +8,8 @@ defmodule Pleroma.MFA.Changeset do
|
|||
alias Pleroma.User
|
||||
|
||||
def disable(%Ecto.Changeset{} = changeset, force \\ false) do
|
||||
settings =
|
||||
%Settings{} =
|
||||
settings =
|
||||
changeset
|
||||
|> Ecto.Changeset.apply_changes()
|
||||
|> MFA.fetch_settings()
|
||||
|
|
@ -22,18 +23,18 @@ defmodule Pleroma.MFA.Changeset do
|
|||
|
||||
def disable_totp(%User{multi_factor_authentication_settings: settings} = user) do
|
||||
user
|
||||
|> put_change(%Settings{settings | totp: %Settings.TOTP{}})
|
||||
|> put_change(%{settings | totp: %Settings.TOTP{}})
|
||||
end
|
||||
|
||||
def confirm_totp(%User{multi_factor_authentication_settings: settings} = user) do
|
||||
totp_settings = %Settings.TOTP{settings.totp | confirmed: true}
|
||||
totp_settings = %{settings.totp | confirmed: true}
|
||||
|
||||
user
|
||||
|> put_change(%Settings{settings | totp: totp_settings, enabled: true})
|
||||
|> put_change(%{settings | totp: totp_settings, enabled: true})
|
||||
end
|
||||
|
||||
def setup_totp(%User{} = user, attrs) do
|
||||
mfa_settings = MFA.fetch_settings(user)
|
||||
%Settings{} = mfa_settings = MFA.fetch_settings(user)
|
||||
|
||||
totp_settings =
|
||||
%Settings.TOTP{}
|
||||
|
|
@ -45,7 +46,7 @@ defmodule Pleroma.MFA.Changeset do
|
|||
|
||||
def cast_backup_codes(%User{} = user, codes) do
|
||||
user
|
||||
|> put_change(%Settings{
|
||||
|> put_change(%{
|
||||
user.multi_factor_authentication_settings
|
||||
| backup_codes: codes
|
||||
})
|
||||
|
|
|
|||
|
|
@ -15,7 +15,6 @@ defmodule Pleroma.Notification do
|
|||
alias Pleroma.Repo
|
||||
alias Pleroma.ThreadMute
|
||||
alias Pleroma.User
|
||||
alias Pleroma.Web.CommonAPI
|
||||
alias Pleroma.Web.CommonAPI.Utils
|
||||
alias Pleroma.Web.Push
|
||||
alias Pleroma.Web.Streamer
|
||||
|
|
@ -388,40 +387,46 @@ defmodule Pleroma.Notification do
|
|||
end
|
||||
end
|
||||
|
||||
@spec create_notifications(Activity.t(), keyword()) :: {:ok, [Notification.t()] | []}
|
||||
def create_notifications(activity, options \\ [])
|
||||
@doc """
|
||||
Create notifications for given Activity in database, but does NOT send them to streams and webpush.
|
||||
On success returns :ok triple with non-muted notifications in the second position and
|
||||
muted (i.e. likely not supposed to be pro-actively sent) notifications in the third position.
|
||||
"""
|
||||
@spec create_notifications(Activity.t()) ::
|
||||
{:ok, [Notification.t()] | [], [Notification.t()] | []}
|
||||
def create_notifications(activity)
|
||||
|
||||
def create_notifications(%Activity{data: %{"to" => _, "type" => "Create"}} = activity, options) do
|
||||
def create_notifications(%Activity{data: %{"to" => _, "type" => "Create"}} = activity) do
|
||||
object = Object.normalize(activity, fetch: false)
|
||||
|
||||
if object && object.data["type"] == "Answer" do
|
||||
{:ok, []}
|
||||
{:ok, [], []}
|
||||
else
|
||||
do_create_notifications(activity, options)
|
||||
do_create_notifications(activity)
|
||||
end
|
||||
end
|
||||
|
||||
def create_notifications(%Activity{data: %{"type" => type}} = activity, options)
|
||||
def create_notifications(%Activity{data: %{"type" => type}} = activity)
|
||||
when type in ["Follow", "Like", "Announce", "Move", "EmojiReact", "Flag", "Update"] do
|
||||
do_create_notifications(activity, options)
|
||||
do_create_notifications(activity)
|
||||
end
|
||||
|
||||
def create_notifications(_, _), do: {:ok, []}
|
||||
|
||||
defp do_create_notifications(%Activity{} = activity, options) do
|
||||
do_send = Keyword.get(options, :do_send, true)
|
||||
def create_notifications(_), do: {:ok, [], []}
|
||||
|
||||
defp do_create_notifications(%Activity{} = activity) do
|
||||
{enabled_receivers, disabled_receivers} = get_notified_from_activity(activity)
|
||||
potential_receivers = enabled_receivers ++ disabled_receivers
|
||||
|
||||
notifications =
|
||||
Enum.map(potential_receivers, fn user ->
|
||||
do_send = do_send && user in enabled_receivers
|
||||
create_notification(activity, user, do_send: do_send)
|
||||
end)
|
||||
notifications_active =
|
||||
enabled_receivers
|
||||
|> Enum.map(&create_notification(activity, &1))
|
||||
|> Enum.reject(&is_nil/1)
|
||||
|
||||
{:ok, notifications}
|
||||
notifications_silent =
|
||||
disabled_receivers
|
||||
|> Enum.map(&create_notification(activity, &1, seen: true))
|
||||
|> Enum.reject(&is_nil/1)
|
||||
|
||||
{:ok, notifications_active, notifications_silent}
|
||||
end
|
||||
|
||||
defp type_from_activity(%{data: %{"type" => type}} = activity) do
|
||||
|
|
@ -467,9 +472,9 @@ defmodule Pleroma.Notification do
|
|||
defp type_from_activity_object(%{data: %{"type" => "Create"}}), do: "mention"
|
||||
|
||||
# TODO move to sql, too.
|
||||
def create_notification(%Activity{} = activity, %User{} = user, opts \\ []) do
|
||||
do_send = Keyword.get(opts, :do_send, true)
|
||||
defp create_notification(%Activity{} = activity, %User{} = user, opts \\ []) do
|
||||
type = Keyword.get(opts, :type, type_from_activity(activity))
|
||||
seen = Keyword.get(opts, :seen, false)
|
||||
|
||||
unless skip?(activity, user, opts) do
|
||||
{:ok, %{notification: notification}} =
|
||||
|
|
@ -477,17 +482,12 @@ defmodule Pleroma.Notification do
|
|||
|> Multi.insert(:notification, %Notification{
|
||||
user_id: user.id,
|
||||
activity: activity,
|
||||
seen: mark_as_read?(activity, user),
|
||||
seen: seen,
|
||||
type: type
|
||||
})
|
||||
|> Marker.multi_set_last_read_id(user, "notifications")
|
||||
|> Repo.transaction()
|
||||
|
||||
if do_send do
|
||||
Streamer.stream(["user", "user:notification"], notification)
|
||||
Push.send(notification)
|
||||
end
|
||||
|
||||
notification
|
||||
end
|
||||
end
|
||||
|
|
@ -678,6 +678,12 @@ defmodule Pleroma.Notification do
|
|||
end
|
||||
end
|
||||
|
||||
def skip?(:internal, %Activity{} = activity, _user, _opts) do
|
||||
actor = activity.data["actor"]
|
||||
user = User.get_cached_by_ap_id(actor)
|
||||
User.is_internal_user?(user)
|
||||
end
|
||||
|
||||
def skip?(:invisible, %Activity{} = activity, _user, _opts) do
|
||||
actor = activity.data["actor"]
|
||||
user = User.get_cached_by_ap_id(actor)
|
||||
|
|
@ -740,11 +746,6 @@ defmodule Pleroma.Notification do
|
|||
|
||||
def skip?(_type, _activity, _user, _opts), do: false
|
||||
|
||||
def mark_as_read?(activity, target_user) do
|
||||
user = Activity.user_actor(activity)
|
||||
User.mutes_user?(target_user, user) || CommonAPI.thread_muted?(target_user, activity)
|
||||
end
|
||||
|
||||
def for_user_and_activity(user, activity) do
|
||||
from(n in __MODULE__,
|
||||
where: n.user_id == ^user.id,
|
||||
|
|
@ -764,4 +765,12 @@ defmodule Pleroma.Notification do
|
|||
)
|
||||
|> Repo.update_all(set: [seen: true])
|
||||
end
|
||||
|
||||
@spec send(list(Notification.t())) :: :ok
|
||||
def send(notifications) do
|
||||
Enum.each(notifications, fn notification ->
|
||||
Streamer.stream(["user", "user:notification"], notification)
|
||||
Push.send(notification)
|
||||
end)
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -144,7 +144,7 @@ defmodule Pleroma.Object do
|
|||
Logger.debug("Backtrace: #{inspect(Process.info(:erlang.self(), :current_stacktrace))}")
|
||||
end
|
||||
|
||||
def normalize(_, options \\ [fetch: false, id_only: false])
|
||||
def normalize(_, options \\ [fetch: false])
|
||||
|
||||
# If we pass an Activity to Object.normalize(), we can try to use the preloaded object.
|
||||
# Use this whenever possible, especially when walking graphs in an O(N) loop!
|
||||
|
|
@ -173,9 +173,6 @@ defmodule Pleroma.Object do
|
|||
|
||||
def normalize(ap_id, options) when is_binary(ap_id) do
|
||||
cond do
|
||||
Keyword.get(options, :id_only) ->
|
||||
ap_id
|
||||
|
||||
Keyword.get(options, :fetch) ->
|
||||
case Fetcher.fetch_object_from_id(ap_id, options) do
|
||||
{:ok, object} -> object
|
||||
|
|
|
|||
|
|
@ -10,6 +10,7 @@ defmodule Pleroma.Object.Fetcher do
|
|||
alias Pleroma.Object.Containment
|
||||
alias Pleroma.Repo
|
||||
alias Pleroma.Web.ActivityPub.InternalFetchActor
|
||||
alias Pleroma.Web.ActivityPub.MRF
|
||||
alias Pleroma.Web.ActivityPub.ObjectValidator
|
||||
alias Pleroma.Web.ActivityPub.Transmogrifier
|
||||
alias Pleroma.Web.Federator
|
||||
|
|
@ -138,10 +139,7 @@ defmodule Pleroma.Object.Fetcher do
|
|||
{:valid_uri_scheme, true} <-
|
||||
{:valid_uri_scheme, uri.scheme == "http" or uri.scheme == "https"},
|
||||
# If we have instance restrictions, apply them here to prevent fetching from unwanted instances
|
||||
{:mrf_reject_check, {:ok, nil}} <-
|
||||
{:mrf_reject_check, Pleroma.Web.ActivityPub.MRF.SimplePolicy.check_reject(uri)},
|
||||
{:mrf_accept_check, {:ok, _}} <-
|
||||
{:mrf_accept_check, Pleroma.Web.ActivityPub.MRF.SimplePolicy.check_accept(uri)},
|
||||
{_, {:ok, _}} <- {:mrf_check, maybe_restrict_uri_mrf(uri)},
|
||||
{_, nil} <- {:fetch_object, Object.get_cached_by_ap_id(id)},
|
||||
{_, true} <- {:allowed_depth, Federator.allowed_thread_distance?(options[:depth])},
|
||||
{_, {:ok, data}} <- {:fetch, fetch_and_contain_remote_object_from_id(id)},
|
||||
|
|
@ -161,11 +159,7 @@ defmodule Pleroma.Object.Fetcher do
|
|||
log_fetch_error(id, e)
|
||||
{:error, :invalid_uri_scheme}
|
||||
|
||||
{:mrf_reject_check, _} = e ->
|
||||
log_fetch_error(id, e)
|
||||
{:reject, :mrf}
|
||||
|
||||
{:mrf_accept_check, _} = e ->
|
||||
{:mrf_check, _} = e ->
|
||||
log_fetch_error(id, e)
|
||||
{:reject, :mrf}
|
||||
|
||||
|
|
@ -213,6 +207,17 @@ defmodule Pleroma.Object.Fetcher do
|
|||
Logger.error("Object rejected while fetching #{id} #{inspect(error)}")
|
||||
end
|
||||
|
||||
defp maybe_restrict_uri_mrf(uri) do
|
||||
with {:enabled, true} <- {:enabled, MRF.SimplePolicy in MRF.get_policies()},
|
||||
{:ok, _} <- MRF.SimplePolicy.check_reject(uri),
|
||||
{:ok, _} <- MRF.SimplePolicy.check_accept(uri) do
|
||||
{:ok, nil}
|
||||
else
|
||||
{:enabled, false} -> {:ok, nil}
|
||||
{:reject, reason} -> {:reject, reason}
|
||||
end
|
||||
end
|
||||
|
||||
defp prepare_activity_params(data) do
|
||||
%{
|
||||
"type" => "Create",
|
||||
|
|
@ -298,10 +303,7 @@ defmodule Pleroma.Object.Fetcher do
|
|||
|
||||
with {:valid_uri_scheme, true} <- {:valid_uri_scheme, String.starts_with?(id, "http")},
|
||||
%URI{} = uri <- URI.parse(id),
|
||||
{:mrf_reject_check, {:ok, nil}} <-
|
||||
{:mrf_reject_check, Pleroma.Web.ActivityPub.MRF.SimplePolicy.check_reject(uri)},
|
||||
{:mrf_accept_check, {:ok, _}} <-
|
||||
{:mrf_accept_check, Pleroma.Web.ActivityPub.MRF.SimplePolicy.check_accept(uri)},
|
||||
{_, {:ok, _}} <- {:mrf_check, maybe_restrict_uri_mrf(uri)},
|
||||
{:local_fetch, :ok} <- {:local_fetch, Containment.contain_local_fetch(id)},
|
||||
{:ok, final_id, body} <- get_object(id),
|
||||
# a canonical ID shouldn't be a redirect
|
||||
|
|
@ -422,7 +424,7 @@ defmodule Pleroma.Object.Fetcher do
|
|||
|
||||
# connection/protocol-related error
|
||||
{:ok, %Tesla.Env{} = env} ->
|
||||
{:error, {:http_error, :connect, Pleroma.HTTP.Middleware.HTTPSignature.redact_keys(env)}}
|
||||
{:error, {:http_error, :connect, env}}
|
||||
|
||||
{:error, e} ->
|
||||
{:error, e}
|
||||
|
|
|
|||
|
|
@ -97,6 +97,9 @@ defmodule Pleroma.Pagination do
|
|||
defp do_unwrap([], acc), do: Enum.reverse(acc)
|
||||
|
||||
defp cast_params(params) do
|
||||
# Ecto doesn’t support atom types
|
||||
pfield = params[:pagination_field] || :id
|
||||
|
||||
param_types = %{
|
||||
min_id: params[:id_type] || :string,
|
||||
since_id: params[:id_type] || :string,
|
||||
|
|
@ -108,54 +111,54 @@ defmodule Pleroma.Pagination do
|
|||
order_asc: :boolean
|
||||
}
|
||||
|
||||
params = Map.delete(params, :id_type)
|
||||
params = Map.drop(params, [:id_type, :pagination_field])
|
||||
changeset = cast({%{}, param_types}, params, Map.keys(param_types))
|
||||
changeset.changes
|
||||
Map.put(changeset.changes, :pagination_field, pfield)
|
||||
end
|
||||
|
||||
defp order_statement(query, table_binding, :asc) do
|
||||
defp order_statement(query, table_binding, :asc, %{pagination_field: fname}) do
|
||||
order_by(
|
||||
query,
|
||||
[{u, table_position(query, table_binding)}],
|
||||
fragment("? asc nulls last", u.id)
|
||||
fragment("? asc nulls last", field(u, ^fname))
|
||||
)
|
||||
end
|
||||
|
||||
defp order_statement(query, table_binding, :desc) do
|
||||
defp order_statement(query, table_binding, :desc, %{pagination_field: fname}) do
|
||||
order_by(
|
||||
query,
|
||||
[{u, table_position(query, table_binding)}],
|
||||
fragment("? desc nulls last", u.id)
|
||||
fragment("? desc nulls last", field(u, ^fname))
|
||||
)
|
||||
end
|
||||
|
||||
defp restrict(query, :min_id, %{min_id: min_id}, table_binding) do
|
||||
where(query, [{q, table_position(query, table_binding)}], q.id > ^min_id)
|
||||
defp restrict(query, :min_id, %{min_id: min_id, pagination_field: fname}, table_binding) do
|
||||
where(query, [{q, table_position(query, table_binding)}], field(q, ^fname) > ^min_id)
|
||||
end
|
||||
|
||||
defp restrict(query, :since_id, %{since_id: since_id}, table_binding) do
|
||||
where(query, [{q, table_position(query, table_binding)}], q.id > ^since_id)
|
||||
defp restrict(query, :since_id, %{since_id: since_id, pagination_field: fname}, table_binding) do
|
||||
where(query, [{q, table_position(query, table_binding)}], field(q, ^fname) > ^since_id)
|
||||
end
|
||||
|
||||
defp restrict(query, :max_id, %{max_id: max_id}, table_binding) do
|
||||
where(query, [{q, table_position(query, table_binding)}], q.id < ^max_id)
|
||||
defp restrict(query, :max_id, %{max_id: max_id, pagination_field: fname}, table_binding) do
|
||||
where(query, [{q, table_position(query, table_binding)}], field(q, ^fname) < ^max_id)
|
||||
end
|
||||
|
||||
defp restrict(query, :order, %{skip_order: true}, _), do: query
|
||||
|
||||
defp restrict(%{order_bys: [_ | _]} = query, :order, %{skip_extra_order: true}, _), do: query
|
||||
|
||||
defp restrict(query, :order, %{min_id: _}, table_binding) do
|
||||
order_statement(query, table_binding, :asc)
|
||||
defp restrict(query, :order, %{min_id: _} = options, table_binding) do
|
||||
order_statement(query, table_binding, :asc, options)
|
||||
end
|
||||
|
||||
defp restrict(query, :order, %{max_id: _}, table_binding) do
|
||||
order_statement(query, table_binding, :desc)
|
||||
defp restrict(query, :order, %{max_id: _} = options, table_binding) do
|
||||
order_statement(query, table_binding, :desc, options)
|
||||
end
|
||||
|
||||
defp restrict(query, :order, options, table_binding) do
|
||||
dir = if options[:order_asc], do: :asc, else: :desc
|
||||
order_statement(query, table_binding, dir)
|
||||
order_statement(query, table_binding, dir, options)
|
||||
end
|
||||
|
||||
defp restrict(query, :offset, %{offset: offset}, _table_binding) do
|
||||
|
|
|
|||
|
|
@ -109,7 +109,9 @@ defmodule Pleroma.ReverseProxy do
|
|||
with {:ok, nil} <- @cachex.get(:failed_proxy_url_cache, url),
|
||||
{:ok, status, headers, body} <- request(method, url, req_headers, client_opts),
|
||||
:ok <-
|
||||
header_length_constraint(
|
||||
check_length_constraint(
|
||||
method,
|
||||
body,
|
||||
headers,
|
||||
Keyword.get(opts, :max_body_length, @max_body_length)
|
||||
) do
|
||||
|
|
@ -342,7 +344,9 @@ defmodule Pleroma.ReverseProxy do
|
|||
List.keystore(headers, "content-security-policy", 0, {"content-security-policy", "sandbox"})
|
||||
end
|
||||
|
||||
defp header_length_constraint(headers, limit) when is_integer(limit) and limit > 0 do
|
||||
defp check_length_constraint(_, _, _, limit) when not is_integer(limit) or limit <= 0, do: :ok
|
||||
|
||||
defp check_length_constraint(:head, _, headers, limit) do
|
||||
with {_, size} <- List.keyfind(headers, "content-length", 0),
|
||||
{size, _} <- Integer.parse(size),
|
||||
true <- size <= limit do
|
||||
|
|
@ -356,7 +360,15 @@ defmodule Pleroma.ReverseProxy do
|
|||
end
|
||||
end
|
||||
|
||||
defp header_length_constraint(_, _), do: :ok
|
||||
defp check_length_constraint(_, body, _, limit) when is_binary(body) do
|
||||
if byte_size(body) <= limit do
|
||||
:ok
|
||||
else
|
||||
{:error, :body_too_large}
|
||||
end
|
||||
end
|
||||
|
||||
defp check_length_constraint(_, _, _, _), do: :ok
|
||||
|
||||
defp track_failed_url(url, error, opts) do
|
||||
ttl =
|
||||
|
|
@ -366,6 +378,6 @@ defmodule Pleroma.ReverseProxy do
|
|||
nil
|
||||
end
|
||||
|
||||
@cachex.put(:failed_proxy_url_cache, url, true, ttl: ttl)
|
||||
@cachex.put(:failed_proxy_url_cache, url, true, expire: ttl)
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -7,7 +7,6 @@ defmodule Pleroma.Stats do
|
|||
|
||||
import Ecto.Query
|
||||
|
||||
alias Pleroma.CounterCache
|
||||
alias Pleroma.Repo
|
||||
alias Pleroma.User
|
||||
alias Pleroma.Instances.Instance
|
||||
|
|
@ -107,15 +106,6 @@ defmodule Pleroma.Stats do
|
|||
}
|
||||
end
|
||||
|
||||
@spec get_status_visibility_count(String.t() | nil) :: map()
|
||||
def get_status_visibility_count(instance \\ nil) do
|
||||
if is_nil(instance) do
|
||||
CounterCache.get_sum()
|
||||
else
|
||||
CounterCache.get_by_instance(instance)
|
||||
end
|
||||
end
|
||||
|
||||
@impl true
|
||||
def handle_continue(:calculate_stats, _) do
|
||||
stats = calculate_stat_data()
|
||||
|
|
|
|||
|
|
@ -82,7 +82,7 @@ defmodule Pleroma.Upload do
|
|||
def store(upload, opts \\ []) do
|
||||
opts = get_opts(opts)
|
||||
|
||||
with {:ok, upload} <- prepare_upload(upload, opts),
|
||||
with {:ok, %__MODULE__{} = upload} <- prepare_upload(upload, opts),
|
||||
upload = %__MODULE__{upload | path: upload.path || "#{upload.id}/#{upload.name}"},
|
||||
{:ok, upload} <- Pleroma.Upload.Filter.filter(opts.filters, upload),
|
||||
description = Map.get(upload, :description) || "",
|
||||
|
|
|
|||
|
|
@ -3,7 +3,8 @@
|
|||
# SPDX-License-Identifier: AGPL-3.0-only
|
||||
|
||||
defmodule Pleroma.Uploaders.Uploader do
|
||||
import Pleroma.Web.Gettext
|
||||
use Gettext,
|
||||
backend: Pleroma.Web.Gettext
|
||||
|
||||
@mix_env Mix.env()
|
||||
|
||||
|
|
|
|||
|
|
@ -31,6 +31,7 @@ defmodule Pleroma.User do
|
|||
alias Pleroma.Registration
|
||||
alias Pleroma.Repo
|
||||
alias Pleroma.User
|
||||
alias Pleroma.User.Fetcher
|
||||
alias Pleroma.UserRelationship
|
||||
alias Pleroma.Web.ActivityPub.ActivityPub
|
||||
alias Pleroma.Web.ActivityPub.Builder
|
||||
|
|
@ -91,6 +92,9 @@ defmodule Pleroma.User do
|
|||
|
||||
@cachex Pleroma.Config.get([:cachex, :provider], Cachex)
|
||||
|
||||
# hide sensitive data from logs
|
||||
@derive {Inspect, except: [:password, :password_hash, :email]}
|
||||
|
||||
schema "users" do
|
||||
field(:bio, :string, default: "")
|
||||
field(:raw_bio, :string)
|
||||
|
|
@ -270,13 +274,13 @@ defmodule Pleroma.User do
|
|||
|
||||
def cached_blocked_users_ap_ids(user) do
|
||||
@cachex.fetch!(:user_cache, "blocked_users_ap_ids:#{user.ap_id}", fn _ ->
|
||||
blocked_users_ap_ids(user)
|
||||
{:commit, blocked_users_ap_ids(user)}
|
||||
end)
|
||||
end
|
||||
|
||||
def cached_muted_users_ap_ids(user) do
|
||||
@cachex.fetch!(:user_cache, "muted_users_ap_ids:#{user.ap_id}", fn _ ->
|
||||
muted_users_ap_ids(user)
|
||||
{:commit, muted_users_ap_ids(user)}
|
||||
end)
|
||||
end
|
||||
|
||||
|
|
@ -831,7 +835,7 @@ defmodule Pleroma.User do
|
|||
candidates = Config.get([:instance, :autofollowed_nicknames])
|
||||
|
||||
autofollowed_users =
|
||||
User.Query.build(%{nickname: candidates, local: true, is_active: true})
|
||||
User.Query.build(%{nickname: candidates, local: true, deactivated: false})
|
||||
|> Repo.all()
|
||||
|
||||
follow_all(user, autofollowed_users)
|
||||
|
|
@ -1100,16 +1104,6 @@ defmodule Pleroma.User do
|
|||
|> Repo.all()
|
||||
end
|
||||
|
||||
# This is mostly an SPC migration fix. This guesses the user nickname by taking the last part
|
||||
# of the ap_id and the domain and tries to get that user
|
||||
def get_by_guessed_nickname(ap_id) do
|
||||
domain = URI.parse(ap_id).host
|
||||
name = List.last(String.split(ap_id, "/"))
|
||||
nickname = "#{name}@#{domain}"
|
||||
|
||||
get_cached_by_nickname(nickname)
|
||||
end
|
||||
|
||||
@spec set_cache(
|
||||
{:error, any}
|
||||
| {:ok, User.t()}
|
||||
|
|
@ -1162,7 +1156,7 @@ defmodule Pleroma.User do
|
|||
@spec get_cached_user_friends_ap_ids(User.t()) :: [String.t()]
|
||||
def get_cached_user_friends_ap_ids(user) do
|
||||
@cachex.fetch!(:user_cache, "friends_ap_ids:#{user.ap_id}", fn _ ->
|
||||
get_user_friends_ap_ids(user)
|
||||
{:commit, get_user_friends_ap_ids(user)}
|
||||
end)
|
||||
end
|
||||
|
||||
|
|
@ -1208,14 +1202,18 @@ defmodule Pleroma.User do
|
|||
end
|
||||
|
||||
def get_cached_by_nickname(nickname) do
|
||||
key = "nickname:#{nickname}"
|
||||
if String.valid?(nickname) do
|
||||
key = "nickname:#{nickname}"
|
||||
|
||||
@cachex.fetch!(:user_cache, key, fn _ ->
|
||||
case get_or_fetch_by_nickname(nickname) do
|
||||
{:ok, user} -> {:commit, user}
|
||||
{:error, _error} -> {:ignore, nil}
|
||||
end
|
||||
end)
|
||||
@cachex.fetch!(:user_cache, key, fn _ ->
|
||||
case get_or_fetch_by_nickname(nickname) do
|
||||
{:ok, user} -> {:commit, user}
|
||||
{:error, _error} -> {:ignore, nil}
|
||||
end
|
||||
end)
|
||||
else
|
||||
nil
|
||||
end
|
||||
end
|
||||
|
||||
def get_cached_by_nickname_or_id(nickname_or_id, opts \\ []) do
|
||||
|
|
@ -1238,10 +1236,14 @@ defmodule Pleroma.User do
|
|||
|
||||
@spec get_by_nickname(String.t()) :: User.t() | nil
|
||||
def get_by_nickname(nickname) do
|
||||
Repo.get_by(User, nickname: nickname) ||
|
||||
if Regex.match?(~r(@#{Pleroma.Web.Endpoint.host()})i, nickname) do
|
||||
Repo.get_by(User, nickname: local_nickname(nickname))
|
||||
end
|
||||
if String.valid?(nickname) do
|
||||
Repo.get_by(User, nickname: nickname) ||
|
||||
if Regex.match?(~r(@#{Pleroma.Web.Endpoint.host()})i, nickname) do
|
||||
Repo.get_by(User, nickname: local_nickname(nickname))
|
||||
end
|
||||
else
|
||||
nil
|
||||
end
|
||||
end
|
||||
|
||||
def get_by_email(email), do: Repo.get_by(User, email: email)
|
||||
|
|
@ -1250,7 +1252,7 @@ defmodule Pleroma.User do
|
|||
get_by_nickname(nickname_or_email) || get_by_email(nickname_or_email)
|
||||
end
|
||||
|
||||
def fetch_by_nickname(nickname), do: ActivityPub.make_user_from_nickname(nickname)
|
||||
def fetch_by_nickname(nickname), do: Fetcher.make_user_from_nickname(nickname)
|
||||
|
||||
def get_or_fetch_by_nickname(nickname) do
|
||||
with %User{} = user <- get_by_nickname(nickname) do
|
||||
|
|
@ -1266,72 +1268,54 @@ defmodule Pleroma.User do
|
|||
end
|
||||
end
|
||||
|
||||
@spec get_followers_query(User.t(), pos_integer() | nil) :: Ecto.Query.t()
|
||||
def get_followers_query(%User{} = user, nil) do
|
||||
User.Query.build(%{followers: user, is_active: true})
|
||||
end
|
||||
|
||||
def get_followers_query(%User{} = user, page) do
|
||||
user
|
||||
|> get_followers_query(nil)
|
||||
|> User.Query.paginate(page, 20)
|
||||
end
|
||||
|
||||
@spec get_followers_query(User.t()) :: Ecto.Query.t()
|
||||
def get_followers_query(%User{} = user), do: get_followers_query(user, nil)
|
||||
def get_followers_query(%User{} = user) do
|
||||
User.Query.build(%{followers: user, deactivated: false})
|
||||
end
|
||||
|
||||
@spec get_followers(User.t(), pos_integer() | nil) :: {:ok, list(User.t())}
|
||||
def get_followers(%User{} = user, page \\ nil) do
|
||||
@spec get_followers(User.t()) :: {:ok, list(User.t())}
|
||||
def get_followers(%User{} = user) do
|
||||
user
|
||||
|> get_followers_query(page)
|
||||
|> get_followers_query()
|
||||
|> Repo.all()
|
||||
end
|
||||
|
||||
@spec get_external_followers(User.t(), pos_integer() | nil) :: {:ok, list(User.t())}
|
||||
def get_external_followers(%User{} = user, page \\ nil) do
|
||||
@spec get_external_followers(User.t()) :: {:ok, list(User.t())}
|
||||
def get_external_followers(%User{} = user) do
|
||||
user
|
||||
|> get_followers_query(page)
|
||||
|> get_followers_query()
|
||||
|> User.Query.build(%{external: true})
|
||||
|> Repo.all()
|
||||
end
|
||||
|
||||
def get_followers_ids(%User{} = user, page \\ nil) do
|
||||
def get_followers_ids(%User{} = user) do
|
||||
user
|
||||
|> get_followers_query(page)
|
||||
|> get_followers_query()
|
||||
|> select([u], u.id)
|
||||
|> Repo.all()
|
||||
end
|
||||
|
||||
@spec get_friends_query(User.t(), pos_integer() | nil) :: Ecto.Query.t()
|
||||
def get_friends_query(%User{} = user, nil) do
|
||||
@spec get_friends_query(User.t()) :: Ecto.Query.t()
|
||||
def get_friends_query(%User{} = user) do
|
||||
User.Query.build(%{friends: user, deactivated: false})
|
||||
end
|
||||
|
||||
def get_friends_query(%User{} = user, page) do
|
||||
def get_friends(%User{} = user) do
|
||||
user
|
||||
|> get_friends_query(nil)
|
||||
|> User.Query.paginate(page, 20)
|
||||
end
|
||||
|
||||
@spec get_friends_query(User.t()) :: Ecto.Query.t()
|
||||
def get_friends_query(%User{} = user), do: get_friends_query(user, nil)
|
||||
|
||||
def get_friends(%User{} = user, page \\ nil) do
|
||||
user
|
||||
|> get_friends_query(page)
|
||||
|> get_friends_query()
|
||||
|> Repo.all()
|
||||
end
|
||||
|
||||
def get_friends_ap_ids(%User{} = user) do
|
||||
user
|
||||
|> get_friends_query(nil)
|
||||
|> get_friends_query()
|
||||
|> select([u], u.ap_id)
|
||||
|> Repo.all()
|
||||
end
|
||||
|
||||
def get_friends_ids(%User{} = user, page \\ nil) do
|
||||
def get_friends_ids(%User{} = user) do
|
||||
user
|
||||
|> get_friends_query(page)
|
||||
|> get_friends_query()
|
||||
|> select([u], u.id)
|
||||
|> Repo.all()
|
||||
end
|
||||
|
|
@ -1399,7 +1383,7 @@ defmodule Pleroma.User do
|
|||
end
|
||||
|
||||
def fetch_follow_information(user) do
|
||||
with {:ok, info} <- ActivityPub.fetch_follow_information_for_user(user) do
|
||||
with {:ok, info} <- Fetcher.fetch_follow_information_for_user(user) do
|
||||
user
|
||||
|> follow_information_changeset(info)
|
||||
|> update_and_set_cache()
|
||||
|
|
@ -1451,7 +1435,7 @@ defmodule Pleroma.User do
|
|||
@spec get_users_from_set([String.t()], keyword()) :: [User.t()]
|
||||
def get_users_from_set(ap_ids, opts \\ []) do
|
||||
local_only = Keyword.get(opts, :local_only, true)
|
||||
criteria = %{ap_id: ap_ids, is_active: true}
|
||||
criteria = %{ap_id: ap_ids, deactivated: false}
|
||||
criteria = if local_only, do: Map.put(criteria, :local, true), else: criteria
|
||||
|
||||
User.Query.build(criteria)
|
||||
|
|
@ -1462,7 +1446,7 @@ defmodule Pleroma.User do
|
|||
def get_recipients_from_activity(%Activity{recipients: to, actor: actor}) do
|
||||
to = [actor | to]
|
||||
|
||||
query = User.Query.build(%{recipients_from_activity: to, local: true, is_active: true})
|
||||
query = User.Query.build(%{recipients_from_activity: to, local: true, deactivated: false})
|
||||
|
||||
query
|
||||
|> Repo.all()
|
||||
|
|
@ -1472,17 +1456,17 @@ defmodule Pleroma.User do
|
|||
{:ok, list(UserRelationship.t())} | {:error, String.t()}
|
||||
def mute(%User{} = muter, %User{} = mutee, params \\ %{}) do
|
||||
notifications? = Map.get(params, :notifications, true)
|
||||
expires_in = Map.get(params, :expires_in, 0)
|
||||
duration = Map.get(params, :duration, 0)
|
||||
|
||||
with {:ok, user_mute} <- UserRelationship.create_mute(muter, mutee),
|
||||
{:ok, user_notification_mute} <-
|
||||
(notifications? && UserRelationship.create_notification_mute(muter, mutee)) ||
|
||||
{:ok, nil} do
|
||||
if expires_in > 0 do
|
||||
if duration > 0 do
|
||||
Pleroma.Workers.MuteExpireWorker.enqueue(
|
||||
"unmute_user",
|
||||
%{"muter_id" => muter.id, "mutee_id" => mutee.id},
|
||||
schedule_in: expires_in
|
||||
schedule_in: duration
|
||||
)
|
||||
end
|
||||
|
||||
|
|
@ -1974,12 +1958,16 @@ defmodule Pleroma.User do
|
|||
|
||||
def html_filter_policy(_), do: Config.get([:markup, :scrub_policy])
|
||||
|
||||
def fetch_by_ap_id(ap_id), do: ActivityPub.make_user_from_ap_id(ap_id)
|
||||
def fetch_by_ap_id(ap_id), do: Fetcher.make_user_from_ap_id(ap_id)
|
||||
|
||||
defp refetch_or_fetch_by_ap_id(%User{} = user, _), do: Fetcher.refetch_user(user)
|
||||
defp refetch_or_fetch_by_ap_id(_, ap_id), do: Fetcher.make_user_from_ap_id(ap_id)
|
||||
|
||||
def get_or_fetch_by_ap_id(ap_id, options \\ []) do
|
||||
cached_user = get_cached_by_ap_id(ap_id)
|
||||
|
||||
maybe_fetched_user = needs_update?(cached_user, options) && fetch_by_ap_id(ap_id)
|
||||
maybe_fetched_user =
|
||||
needs_update?(cached_user, options) && refetch_or_fetch_by_ap_id(cached_user, ap_id)
|
||||
|
||||
case {cached_user, maybe_fetched_user} do
|
||||
{_, {:ok, %User{} = user}} ->
|
||||
|
|
@ -2067,7 +2055,7 @@ defmodule Pleroma.User do
|
|||
|> set_cache()
|
||||
end
|
||||
|
||||
defdelegate public_key(user), to: SigningKey
|
||||
defdelegate public_key(user), to: SigningKey, as: :public_key_pem
|
||||
|
||||
@doc "Gets or fetch a user by uri or nickname."
|
||||
@spec get_or_fetch(String.t()) :: {:ok, User.t()} | {:error, String.t()}
|
||||
|
|
@ -2200,7 +2188,7 @@ defmodule Pleroma.User do
|
|||
|
||||
@spec all_superusers() :: [User.t()]
|
||||
def all_superusers do
|
||||
User.Query.build(%{super_users: true, local: true, is_active: true})
|
||||
User.Query.build(%{super_users: true, local: true, deactivated: false})
|
||||
|> Repo.all()
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -7,7 +7,9 @@ defmodule Pleroma.User.Backup do
|
|||
|
||||
import Ecto.Changeset
|
||||
import Ecto.Query
|
||||
import Pleroma.Web.Gettext
|
||||
|
||||
use Gettext,
|
||||
backend: Pleroma.Web.Gettext
|
||||
|
||||
require Pleroma.Constants
|
||||
|
||||
|
|
|
|||
443
lib/pleroma/user/fetcher.ex
Normal file
443
lib/pleroma/user/fetcher.ex
Normal file
|
|
@ -0,0 +1,443 @@
|
|||
# Pleroma: A lightweight social networking server
|
||||
# Copyright © 2017-2021 Pleroma Authors <https://pleroma.social/>
|
||||
# Copyright © 2026 Akkoma Authors <https://akkoma.dev/>
|
||||
# SPDX-License-Identifier: AGPL-3.0-only
|
||||
|
||||
defmodule Pleroma.User.Fetcher do
|
||||
alias Akkoma.Collections
|
||||
alias Pleroma.Config
|
||||
alias Pleroma.Object
|
||||
alias Pleroma.Object.Fetcher, as: APFetcher
|
||||
alias Pleroma.Repo
|
||||
alias Pleroma.User
|
||||
alias Pleroma.Web.ActivityPub.MRF
|
||||
alias Pleroma.Web.ActivityPub.ObjectValidators.UserValidator
|
||||
alias Pleroma.Web.ActivityPub.Transmogrifier
|
||||
alias Pleroma.Web.WebFinger
|
||||
|
||||
import Pleroma.Web.ActivityPub.Utils
|
||||
|
||||
require Logger
|
||||
|
||||
@spec get_actor_url(any()) :: binary() | nil
|
||||
defp get_actor_url(url) when is_binary(url), do: url
|
||||
defp get_actor_url(%{"href" => href}) when is_binary(href), do: href
|
||||
|
||||
defp get_actor_url(url) when is_list(url) do
|
||||
url
|
||||
|> List.first()
|
||||
|> get_actor_url()
|
||||
end
|
||||
|
||||
defp get_actor_url(_url), do: nil
|
||||
|
||||
defp normalize_image(%{"url" => url}) do
|
||||
%{
|
||||
"type" => "Image",
|
||||
"url" => [%{"href" => url}]
|
||||
}
|
||||
end
|
||||
|
||||
defp normalize_image(urls) when is_list(urls), do: urls |> List.first() |> normalize_image()
|
||||
defp normalize_image(_), do: nil
|
||||
|
||||
defp normalize_also_known_as(aka) when is_list(aka), do: aka
|
||||
defp normalize_also_known_as(aka) when is_binary(aka), do: [aka]
|
||||
defp normalize_also_known_as(nil), do: []
|
||||
|
||||
defp normalize_attachment(%{} = attachment), do: [attachment]
|
||||
defp normalize_attachment(attachment) when is_list(attachment), do: attachment
|
||||
defp normalize_attachment(_), do: []
|
||||
|
||||
defp maybe_make_public_key_object(data) do
|
||||
if is_map(data["publicKey"]) && is_binary(data["publicKey"]["publicKeyPem"]) do
|
||||
%{
|
||||
public_key: data["publicKey"]["publicKeyPem"],
|
||||
key_id: data["publicKey"]["id"]
|
||||
}
|
||||
else
|
||||
nil
|
||||
end
|
||||
end
|
||||
|
||||
defp try_fallback_nick(%{"id" => ap_id, "preferredUsername" => name})
|
||||
when is_binary(name) and is_binary(ap_id) do
|
||||
with true <- name != "",
|
||||
domain when domain != nil and domain != "" <- URI.parse(ap_id).host do
|
||||
"#{name}@#{domain}"
|
||||
else
|
||||
_ -> nil
|
||||
end
|
||||
end
|
||||
|
||||
defp try_fallback_nick(_), do: nil
|
||||
|
||||
defp object_to_user_data(data, verified_nick) do
|
||||
fields =
|
||||
data
|
||||
|> Map.get("attachment", [])
|
||||
|> normalize_attachment()
|
||||
|> Enum.filter(fn
|
||||
%{"type" => t} -> t == "PropertyValue"
|
||||
_ -> false
|
||||
end)
|
||||
|> Enum.map(fn fields -> Map.take(fields, ["name", "value"]) end)
|
||||
|
||||
emojis =
|
||||
data
|
||||
|> Map.get("tag", [])
|
||||
|> Enum.filter(fn
|
||||
%{"type" => "Emoji"} -> true
|
||||
_ -> false
|
||||
end)
|
||||
|> Map.new(fn %{"icon" => %{"url" => url}, "name" => name} ->
|
||||
{String.trim(name, ":"), url}
|
||||
end)
|
||||
|
||||
is_locked = data["manuallyApprovesFollowers"] || false
|
||||
data = Transmogrifier.maybe_fix_user_object(data)
|
||||
is_discoverable = data["discoverable"] || false
|
||||
invisible = data["invisible"] || false
|
||||
actor_type = data["type"] || "Person"
|
||||
|
||||
{featured_address, pinned_objects} =
|
||||
case process_featured_collection(data["featured"]) do
|
||||
{:ok, featured_address, pinned_objects} -> {featured_address, pinned_objects}
|
||||
_ -> {nil, %{}}
|
||||
end
|
||||
|
||||
# first, check that the owner is correct
|
||||
signing_key =
|
||||
if data["id"] !== data["publicKey"]["owner"] do
|
||||
Logger.error(
|
||||
"Owner of the public key is not the same as the actor - not saving the public key."
|
||||
)
|
||||
|
||||
nil
|
||||
else
|
||||
maybe_make_public_key_object(data)
|
||||
end
|
||||
|
||||
shared_inbox =
|
||||
if is_map(data["endpoints"]) && is_binary(data["endpoints"]["sharedInbox"]) do
|
||||
data["endpoints"]["sharedInbox"]
|
||||
end
|
||||
|
||||
# can still be nil if no name was indicated in AP data
|
||||
nickname = verified_nick || try_fallback_nick(data)
|
||||
|
||||
# also_known_as must be a URL
|
||||
also_known_as =
|
||||
data
|
||||
|> Map.get("alsoKnownAs", [])
|
||||
|> normalize_also_known_as()
|
||||
|> Enum.filter(fn url ->
|
||||
case URI.parse(url) do
|
||||
%URI{scheme: "http"} -> true
|
||||
%URI{scheme: "https"} -> true
|
||||
_ -> false
|
||||
end
|
||||
end)
|
||||
|
||||
%{
|
||||
ap_id: data["id"],
|
||||
uri: get_actor_url(data["url"]),
|
||||
banner: normalize_image(data["image"]),
|
||||
background: normalize_image(data["backgroundUrl"]),
|
||||
fields: fields,
|
||||
emoji: emojis,
|
||||
is_locked: is_locked,
|
||||
is_discoverable: is_discoverable,
|
||||
invisible: invisible,
|
||||
avatar: normalize_image(data["icon"]),
|
||||
name: data["name"],
|
||||
follower_address: data["followers"],
|
||||
following_address: data["following"],
|
||||
featured_address: featured_address,
|
||||
bio: data["summary"] || "",
|
||||
actor_type: actor_type,
|
||||
also_known_as: also_known_as,
|
||||
signing_key: signing_key,
|
||||
inbox: data["inbox"],
|
||||
shared_inbox: shared_inbox,
|
||||
pinned_objects: pinned_objects,
|
||||
nickname: nickname
|
||||
}
|
||||
end
|
||||
|
||||
defp collection_private(%{"first" => %{"type" => type}})
|
||||
when type in ["CollectionPage", "OrderedCollectionPage"],
|
||||
do: false
|
||||
|
||||
defp collection_private(%{"first" => first}) do
|
||||
with {:ok, %{"type" => type}} when type in ["CollectionPage", "OrderedCollectionPage"] <-
|
||||
APFetcher.fetch_and_contain_remote_object_from_id(first) do
|
||||
false
|
||||
else
|
||||
_ -> true
|
||||
end
|
||||
end
|
||||
|
||||
defp collection_private(_data), do: true
|
||||
|
||||
defp counter_private(%{"totalItems" => _}), do: false
|
||||
defp counter_private(_), do: true
|
||||
|
||||
defp normalize_counter(counter) when is_integer(counter), do: counter
|
||||
defp normalize_counter(_), do: 0
|
||||
|
||||
defp eval_collection_counter(apid) when is_binary(apid) do
|
||||
case APFetcher.fetch_and_contain_remote_object_from_id(apid) do
|
||||
{:ok, data} ->
|
||||
{collection_private(data), counter_private(data), normalize_counter(data["totalItems"])}
|
||||
|
||||
_ ->
|
||||
Logger.debug("Failed to fetch follower/ing collection #{apid}; assuming private")
|
||||
{true, true, 0}
|
||||
end
|
||||
end
|
||||
|
||||
defp eval_collection_counter(_), do: {true, 0}
|
||||
|
||||
def fetch_follow_information_for_user(user) do
|
||||
{hide_follows, hide_follows_count, following_count} =
|
||||
eval_collection_counter(user.following_address)
|
||||
|
||||
{hide_followers, hide_followers_count, follower_count} =
|
||||
eval_collection_counter(user.follower_address)
|
||||
|
||||
{:ok,
|
||||
%{
|
||||
hide_follows: hide_follows,
|
||||
hide_follows_count: hide_follows_count,
|
||||
following_count: following_count,
|
||||
hide_followers: hide_followers,
|
||||
hide_followers_count: hide_followers_count,
|
||||
follower_count: follower_count
|
||||
}}
|
||||
end
|
||||
|
||||
def maybe_update_follow_information(user_data) do
|
||||
with {:enabled, true} <- {:enabled, Config.get([:instance, :external_user_synchronization])},
|
||||
{_, true} <-
|
||||
{:collections_available,
|
||||
!!(user_data[:following_address] && user_data[:follower_address])},
|
||||
{:ok, follow_info} <-
|
||||
fetch_follow_information_for_user(user_data) do
|
||||
Map.merge(user_data, follow_info)
|
||||
else
|
||||
{:user_type_check, false} ->
|
||||
user_data
|
||||
|
||||
{:collections_available, false} ->
|
||||
user_data
|
||||
|
||||
{:enabled, false} ->
|
||||
user_data
|
||||
|
||||
e ->
|
||||
Logger.error(
|
||||
"Follower/Following counter update for #{user_data.ap_id} failed.\n" <> inspect(e)
|
||||
)
|
||||
|
||||
user_data
|
||||
end
|
||||
end
|
||||
|
||||
def maybe_handle_clashing_nickname(data) do
|
||||
with nickname when is_binary(nickname) <- data[:nickname],
|
||||
%User{} = old_user <- User.get_by_nickname(nickname),
|
||||
{_, false} <- {:ap_id_comparison, data[:ap_id] == old_user.ap_id} do
|
||||
Logger.info(
|
||||
"Found an old user for #{nickname}, the old ap id is #{old_user.ap_id}, new one is #{data[:ap_id]}, renaming.
|
||||
"
|
||||
)
|
||||
|
||||
old_user
|
||||
|> User.remote_user_changeset(%{nickname: "#{old_user.id}.#{old_user.nickname}"})
|
||||
|> User.update_and_set_cache()
|
||||
else
|
||||
{:ap_id_comparison, true} ->
|
||||
Logger.info(
|
||||
"Found an old user for #{data[:nickname]}, but the ap id #{data[:ap_id]} is the same as the new user. Race
|
||||
condition? Not changing anything."
|
||||
)
|
||||
|
||||
_ ->
|
||||
nil
|
||||
end
|
||||
end
|
||||
|
||||
def process_featured_collection(nil), do: {:ok, nil, %{}}
|
||||
def process_featured_collection(""), do: {:ok, nil, %{}}
|
||||
|
||||
def process_featured_collection(featured_collection) do
|
||||
featured_address =
|
||||
case get_ap_id(featured_collection) do
|
||||
id when is_binary(id) -> id
|
||||
_ -> nil
|
||||
end
|
||||
|
||||
# TODO: allow passing item/page limit as function opt and use here
|
||||
case Collections.Fetcher.fetch_collection(featured_collection) do
|
||||
{:ok, items} ->
|
||||
now = NaiveDateTime.utc_now()
|
||||
dated_obj_ids = Map.new(items, fn obj -> {get_ap_id(obj), now} end)
|
||||
{:ok, featured_address, dated_obj_ids}
|
||||
|
||||
error ->
|
||||
Logger.error(
|
||||
"Could not decode featured collection at fetch #{inspect(featured_collection)}: #{inspect(error)}"
|
||||
)
|
||||
|
||||
error =
|
||||
case error do
|
||||
{:error, e} -> e
|
||||
e -> e
|
||||
end
|
||||
|
||||
{:error, error}
|
||||
end
|
||||
end
|
||||
|
||||
def enqueue_pin_fetches(%{pinned_objects: pins}) do
|
||||
# enqueue a task to fetch all pinned objects
|
||||
Enum.each(pins, fn {ap_id, _} ->
|
||||
if is_nil(Object.get_cached_by_ap_id(ap_id)) do
|
||||
Pleroma.Workers.RemoteFetcherWorker.enqueue("fetch_remote", %{
|
||||
"id" => ap_id,
|
||||
"depth" => 1
|
||||
})
|
||||
end
|
||||
end)
|
||||
end
|
||||
|
||||
def enqueue_pin_fetches(_), do: nil
|
||||
|
||||
def validate_and_cast(data, verified_nick) do
|
||||
with {:ok, data} <- MRF.filter(data),
|
||||
{:valid, {:ok, _, _}} <- {:valid, UserValidator.validate(data, [])} do
|
||||
{:ok, object_to_user_data(data, verified_nick)}
|
||||
else
|
||||
{:valid, reason} ->
|
||||
{:error, {:validate, reason}}
|
||||
|
||||
e ->
|
||||
{:error, e}
|
||||
end
|
||||
end
|
||||
|
||||
defp insert_or_update(%User{} = olduser, newdata) do
|
||||
olduser
|
||||
|> User.remote_user_changeset(newdata)
|
||||
|> User.update_and_set_cache()
|
||||
end
|
||||
|
||||
defp insert_or_update(nil, newdata) do
|
||||
newdata
|
||||
|> User.remote_user_changeset()
|
||||
|> Repo.insert()
|
||||
|> User.set_cache()
|
||||
end
|
||||
|
||||
defp make_user_from_apdata_and_nick(ap_data, verified_nick, olduser \\ nil) do
|
||||
with {:ok, data} <- validate_and_cast(ap_data, verified_nick) do
|
||||
olduser = olduser || User.get_cached_by_ap_id(data.ap_id)
|
||||
|
||||
if !olduser || olduser.nickname != data.nickname do
|
||||
maybe_handle_clashing_nickname(data)
|
||||
end
|
||||
|
||||
data = maybe_update_follow_information(data)
|
||||
|
||||
with {:ok, newuser} <- insert_or_update(olduser, data) do
|
||||
enqueue_pin_fetches(data)
|
||||
{:ok, newuser}
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
defp discover_nick_from_actor_data(data) do
|
||||
case WebFinger.Finger.finger_actor(data) do
|
||||
{:ok, nil} ->
|
||||
Logger.debug("No WebFinger found for #{data["id"]}; using fallback")
|
||||
nil
|
||||
|
||||
{:ok, nick} ->
|
||||
nick
|
||||
|
||||
{:error, error} ->
|
||||
Logger.error(
|
||||
"Invalid WebFinger for #{data["id"]}; spoof attempt or just misconfiguration? Using safe fallback: #{inspect(error)}"
|
||||
)
|
||||
|
||||
nil
|
||||
end
|
||||
end
|
||||
|
||||
defp needs_nick_update(%{"webfinger" => "acct:" <> nick}, nick), do: false
|
||||
defp needs_nick_update(%{"webfinger" => nick}, nick), do: false
|
||||
|
||||
defp needs_nick_update(%{"preferredUsername" => name}, oldnick) when is_binary(name) do
|
||||
String.starts_with?(oldnick, name <> "@")
|
||||
end
|
||||
|
||||
defp needs_nick_update(ap_data, oldnick) do
|
||||
ap_nick = ap_data["webfinger"] || ap_data["preferredUsername"]
|
||||
(!oldnick && ap_nick) || (oldnick && !ap_nick)
|
||||
end
|
||||
|
||||
defp refreshed_nick(ap_data, olduser) do
|
||||
if Config.get!([Pleroma.Web.WebFinger, :update_nickname_on_user_fetch]) ||
|
||||
!olduser || needs_nick_update(ap_data, olduser.nickname) do
|
||||
discover_nick_from_actor_data(ap_data)
|
||||
else
|
||||
olduser.nickname
|
||||
end
|
||||
end
|
||||
|
||||
defp refresh_or_fetch_from_ap_id(ap_id, olduser) do
|
||||
with {:ok, data} <- APFetcher.fetch_and_contain_remote_object_from_id(ap_id),
|
||||
# if AP id somehow changed on refetch, discard old info
|
||||
verified_olduser <- (olduser && olduser.ap_id == data["id"] && olduser) || nil,
|
||||
verified_nick <- refreshed_nick(data, verified_olduser) do
|
||||
make_user_from_apdata_and_nick(data, verified_nick, verified_olduser)
|
||||
else
|
||||
# If this has been deleted, only log a debug and not an error
|
||||
{:error, {"Object has been deleted", _, _} = e} ->
|
||||
Logger.debug("User was explicitly deleted #{ap_id}, #{inspect(e)}")
|
||||
{:error, :not_found}
|
||||
|
||||
{:reject, _reason} = e ->
|
||||
{:error, e}
|
||||
|
||||
{:error, e} ->
|
||||
{:error, e}
|
||||
end
|
||||
end
|
||||
|
||||
def make_user_from_ap_id(ap_id), do: refresh_or_fetch_from_ap_id(ap_id, nil)
|
||||
|
||||
def refetch_user(%User{ap_id: ap_id} = u), do: refresh_or_fetch_from_ap_id(ap_id, u)
|
||||
|
||||
def make_user_from_nickname(nickname) do
|
||||
case WebFinger.Finger.finger_mention(nickname) do
|
||||
{:ok, handle, actor_data} ->
|
||||
make_user_from_apdata_and_nick(actor_data, handle)
|
||||
|
||||
error ->
|
||||
error
|
||||
end
|
||||
end
|
||||
|
||||
def update_user_with_apdata(%{"id" => ap_id} = new_ap_data) do
|
||||
with %User{} = old_user <- User.get_cached_by_ap_id(ap_id) do
|
||||
new_nick = refreshed_nick(new_ap_data, old_user)
|
||||
make_user_from_apdata_and_nick(new_ap_data, new_nick, old_user)
|
||||
else
|
||||
nil ->
|
||||
Logger.warning("Cannot update unknown user #{ap_id}")
|
||||
{:error, :not_found}
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
@ -144,11 +144,6 @@ defmodule Pleroma.User.Query do
|
|||
|> where([u], u.is_confirmed == true)
|
||||
end
|
||||
|
||||
defp compose_query({:legacy_active, _}, query) do
|
||||
query
|
||||
|> where([u], fragment("not (?->'deactivated' @> 'true')", u.info))
|
||||
end
|
||||
|
||||
defp compose_query({:deactivated, false}, query) do
|
||||
where(query, [u], u.is_active == true)
|
||||
end
|
||||
|
|
|
|||
|
|
@ -8,6 +8,7 @@ defmodule Pleroma.User.SigningKey do
|
|||
|
||||
require Logger
|
||||
|
||||
@derive {Inspect, only: [:user_id, :key_id]}
|
||||
@primary_key false
|
||||
schema "signing_keys" do
|
||||
belongs_to(:user, Pleroma.User, type: FlakeId.Ecto.CompatType)
|
||||
|
|
@ -109,7 +110,7 @@ defmodule Pleroma.User.SigningKey do
|
|||
{:ok, :public_key.pem_encode([public_key])}
|
||||
end
|
||||
|
||||
@spec public_key(__MODULE__) :: {:ok, binary()} | {:error, String.t()}
|
||||
@spec public_key_decoded(__MODULE__) :: {:ok, binary()} | {:error, String.t()}
|
||||
@doc """
|
||||
Return public key data in binary format.
|
||||
"""
|
||||
|
|
@ -123,8 +124,12 @@ defmodule Pleroma.User.SigningKey do
|
|||
{:ok, decoded}
|
||||
end
|
||||
|
||||
def public_key(_), do: {:error, "key not found"}
|
||||
def public_key_decoded(_), do: {:error, "key not found"}
|
||||
|
||||
@spec public_key_pem(__MODULE__) :: {:ok, binary()} | {:error, String.t()}
|
||||
@doc """
|
||||
Return public key data for user in PEM format
|
||||
"""
|
||||
def public_key_pem(%User{} = user) do
|
||||
case Repo.preload(user, :signing_key) do
|
||||
%User{signing_key: %__MODULE__{public_key: public_key_pem}} -> {:ok, public_key_pem}
|
||||
|
|
|
|||
|
|
@ -67,7 +67,7 @@ defmodule Pleroma.UserRelationship do
|
|||
target_id: target.id
|
||||
})
|
||||
|> Repo.insert(
|
||||
on_conflict: {:replace_all_except, [:id, :inserted_at]},
|
||||
on_conflict: {:replace, [:relationship_type, :source_id, :target_id]},
|
||||
conflict_target: [:source_id, :relationship_type, :target_id],
|
||||
returning: true
|
||||
)
|
||||
|
|
|
|||
|
|
@ -16,7 +16,7 @@ defmodule Pleroma.Utils do
|
|||
def compile_dir(dir) when is_binary(dir) do
|
||||
dir
|
||||
|> elixir_files()
|
||||
|> Kernel.ParallelCompiler.compile()
|
||||
|> Kernel.ParallelCompiler.compile(return_diagnostics: true)
|
||||
end
|
||||
|
||||
defp elixir_files(dir) when is_binary(dir) do
|
||||
|
|
|
|||
|
|
@ -31,21 +31,19 @@ defmodule Pleroma.Web do
|
|||
|
||||
def controller do
|
||||
quote do
|
||||
use Phoenix.Controller, namespace: Pleroma.Web
|
||||
use Phoenix.Controller,
|
||||
formats: [html: "View", json: "View"],
|
||||
layouts: [html: Pleroma.Web.LayoutView]
|
||||
|
||||
import Plug.Conn
|
||||
|
||||
import Pleroma.Web.Gettext
|
||||
use Gettext,
|
||||
backend: Pleroma.Web.Gettext
|
||||
|
||||
import Pleroma.Web.TranslationHelpers
|
||||
|
||||
unquote(verified_routes())
|
||||
|
||||
plug(:set_put_layout)
|
||||
|
||||
defp set_put_layout(conn, _) do
|
||||
put_layout(conn, Pleroma.Config.get(:app_layout, "app.html"))
|
||||
end
|
||||
|
||||
# Marks plugs intentionally skipped and blocks their execution if present in plugs chain
|
||||
defp skip_plug(conn, plug_modules) do
|
||||
plug_modules
|
||||
|
|
@ -233,14 +231,18 @@ defmodule Pleroma.Web do
|
|||
def channel do
|
||||
quote do
|
||||
use Phoenix.Channel
|
||||
import Pleroma.Web.Gettext
|
||||
|
||||
use Gettext,
|
||||
backend: Pleroma.Web.Gettext
|
||||
end
|
||||
end
|
||||
|
||||
defp view_helpers do
|
||||
quote do
|
||||
# Use all HTML functionality (forms, tags, etc)
|
||||
use Phoenix.HTML
|
||||
import Phoenix.HTML
|
||||
import Phoenix.HTML.Form
|
||||
use PhoenixHTMLHelpers
|
||||
|
||||
# Import LiveView and .heex helpers (live_render, live_patch, <.form>, etc)
|
||||
import Phoenix.LiveView.Helpers
|
||||
|
|
@ -249,7 +251,10 @@ defmodule Pleroma.Web do
|
|||
import Phoenix.View
|
||||
|
||||
import Pleroma.Web.ErrorHelpers
|
||||
import Pleroma.Web.Gettext
|
||||
|
||||
use Gettext,
|
||||
backend: Pleroma.Web.Gettext
|
||||
|
||||
unquote(verified_routes())
|
||||
end
|
||||
end
|
||||
|
|
|
|||
|
|
@ -3,7 +3,6 @@
|
|||
# SPDX-License-Identifier: AGPL-3.0-only
|
||||
|
||||
defmodule Pleroma.Web.ActivityPub.ActivityPub do
|
||||
alias Akkoma.Collections
|
||||
alias Pleroma.Activity
|
||||
alias Pleroma.Activity.Ir.Topics
|
||||
alias Pleroma.Config
|
||||
|
|
@ -16,16 +15,13 @@ defmodule Pleroma.Web.ActivityPub.ActivityPub do
|
|||
alias Pleroma.Notification
|
||||
alias Pleroma.Object
|
||||
alias Pleroma.Object.Containment
|
||||
alias Pleroma.Object.Fetcher
|
||||
alias Pleroma.Pagination
|
||||
alias Pleroma.Repo
|
||||
alias Pleroma.Upload
|
||||
alias Pleroma.User
|
||||
alias Pleroma.Web.ActivityPub.MRF
|
||||
alias Pleroma.Web.ActivityPub.ObjectValidators.UserValidator
|
||||
alias Pleroma.Web.ActivityPub.Transmogrifier
|
||||
alias Pleroma.Web.ActivityPub.Visibility
|
||||
alias Pleroma.Web.Streamer
|
||||
alias Pleroma.Web.WebFinger
|
||||
alias Pleroma.Workers.BackgroundWorker
|
||||
alias Pleroma.Workers.PollWorker
|
||||
|
||||
|
|
@ -208,21 +204,19 @@ defmodule Pleroma.Web.ActivityPub.ActivityPub do
|
|||
end
|
||||
|
||||
def notify_and_stream(activity) do
|
||||
Notification.create_notifications(activity)
|
||||
|
||||
original_activity =
|
||||
case activity do
|
||||
%{data: %{"type" => "Update"}, object: %{data: %{"id" => id}}} ->
|
||||
Activity.get_create_by_object_ap_id_with_object(id)
|
||||
|
||||
_ ->
|
||||
activity
|
||||
end
|
||||
|
||||
conversation = create_or_bump_conversation(original_activity, original_activity.actor)
|
||||
participations = get_participations(conversation)
|
||||
# XXX: all callers of this should be moved to side_effect handling, such that
|
||||
# notifications can be collected and only be sent out _after_ the transaction succeed
|
||||
{:ok, notifications, _} = Notification.create_notifications(activity)
|
||||
Notification.send(notifications)
|
||||
stream_out(activity)
|
||||
stream_out_participations(participations)
|
||||
end
|
||||
|
||||
defp maybe_bump_conversation(activity) do
|
||||
if Visibility.is_direct?(activity) do
|
||||
conversation = create_or_bump_conversation(activity, activity.actor)
|
||||
participations = get_participations(conversation)
|
||||
stream_out_participations(participations)
|
||||
end
|
||||
end
|
||||
|
||||
defp maybe_create_activity_expiration(
|
||||
|
|
@ -239,7 +233,7 @@ defmodule Pleroma.Web.ActivityPub.ActivityPub do
|
|||
|
||||
defp maybe_create_activity_expiration(activity), do: {:ok, activity}
|
||||
|
||||
defp create_or_bump_conversation(activity, actor) do
|
||||
def create_or_bump_conversation(activity, actor) do
|
||||
with {:ok, conversation} <- Conversation.create_or_bump_for(activity),
|
||||
%User{} = user <- User.get_cached_by_ap_id(actor) do
|
||||
Participation.mark_as_read(user, conversation)
|
||||
|
|
@ -258,7 +252,7 @@ defmodule Pleroma.Web.ActivityPub.ActivityPub do
|
|||
def stream_out_participations(participations) do
|
||||
participations =
|
||||
participations
|
||||
|> Repo.preload(:user)
|
||||
|> Repo.preload([:user, :conversation])
|
||||
|
||||
Streamer.stream("participation", participations)
|
||||
end
|
||||
|
|
@ -323,6 +317,7 @@ defmodule Pleroma.Web.ActivityPub.ActivityPub do
|
|||
{:ok, _actor} <- increase_note_count_if_public(actor, activity),
|
||||
{:ok, _actor} <- update_last_status_at_if_public(actor, activity),
|
||||
_ <- notify_and_stream(activity),
|
||||
_ <- maybe_bump_conversation(activity),
|
||||
:ok <- maybe_schedule_poll_notifications(activity),
|
||||
:ok <- maybe_federate(activity) do
|
||||
{:ok, activity}
|
||||
|
|
@ -482,9 +477,9 @@ defmodule Pleroma.Web.ActivityPub.ActivityPub do
|
|||
from(activity in Activity)
|
||||
|> maybe_preload_objects(opts)
|
||||
|> maybe_preload_bookmarks(opts)
|
||||
|> maybe_set_thread_muted_field(opts)
|
||||
|> restrict_blocked(opts)
|
||||
|> restrict_blockers_visibility(opts)
|
||||
|> restrict_muted_users(opts)
|
||||
|> restrict_recipients(recipients, opts[:user])
|
||||
|> restrict_filtered(opts)
|
||||
|> where(
|
||||
|
|
@ -1096,24 +1091,35 @@ defmodule Pleroma.Web.ActivityPub.ActivityPub do
|
|||
|
||||
defp restrict_reblogs(query, _), do: query
|
||||
|
||||
defp restrict_muted(query, %{with_muted: true}), do: query
|
||||
defp restrict_muted(query, opts) do
|
||||
query
|
||||
|> restrict_muted_users(opts)
|
||||
|> restrict_muted_threads(opts)
|
||||
end
|
||||
|
||||
defp restrict_muted(query, %{muting_user: %User{} = user} = opts) do
|
||||
defp restrict_muted_users(query, %{with_muted: true}), do: query
|
||||
|
||||
defp restrict_muted_users(query, %{muting_user: %User{} = user} = opts) do
|
||||
mutes = opts[:muted_users_ap_ids] || User.muted_users_ap_ids(user)
|
||||
|
||||
query =
|
||||
from([activity] in query,
|
||||
where: fragment("not (? = ANY(?))", activity.actor, ^mutes),
|
||||
where:
|
||||
fragment(
|
||||
"not (?->'to' \\?| ?) or ? = ?",
|
||||
activity.data,
|
||||
^mutes,
|
||||
activity.actor,
|
||||
^user.ap_id
|
||||
)
|
||||
)
|
||||
from([activity] in query,
|
||||
where: fragment("not (? = ANY(?))", activity.actor, ^mutes),
|
||||
where:
|
||||
fragment(
|
||||
"not (?->'to' \\?| ?) or ? = ?",
|
||||
activity.data,
|
||||
^mutes,
|
||||
activity.actor,
|
||||
^user.ap_id
|
||||
)
|
||||
)
|
||||
end
|
||||
|
||||
defp restrict_muted_users(query, _), do: query
|
||||
|
||||
defp restrict_muted_threads(query, %{with_muted: true}), do: query
|
||||
|
||||
defp restrict_muted_threads(query, %{muting_user: %User{} = _user} = opts) do
|
||||
unless opts[:skip_preload] do
|
||||
from([thread_mute: tm] in query, where: is_nil(tm.user_id))
|
||||
else
|
||||
|
|
@ -1121,7 +1127,7 @@ defmodule Pleroma.Web.ActivityPub.ActivityPub do
|
|||
end
|
||||
end
|
||||
|
||||
defp restrict_muted(query, _), do: query
|
||||
defp restrict_muted_threads(query, _), do: query
|
||||
|
||||
defp restrict_blocked(query, %{blocking_user: %User{} = user} = opts) do
|
||||
blocked_ap_ids = opts[:blocked_users_ap_ids] || User.blocked_users_ap_ids(user)
|
||||
|
|
@ -1447,7 +1453,6 @@ defmodule Pleroma.Web.ActivityPub.ActivityPub do
|
|||
|> restrict_muted_reblogs(restrict_muted_reblogs_opts)
|
||||
|> restrict_instance(opts)
|
||||
|> restrict_announce_object_actor(opts)
|
||||
|> restrict_filtered(opts)
|
||||
|> maybe_restrict_deactivated_users(opts)
|
||||
|> exclude_poll_votes(opts)
|
||||
|> exclude_invisible_actors(opts)
|
||||
|
|
@ -1536,361 +1541,6 @@ defmodule Pleroma.Web.ActivityPub.ActivityPub do
|
|||
|
||||
defp sanitize_upload_file(upload), do: upload
|
||||
|
||||
@spec get_actor_url(any()) :: binary() | nil
|
||||
defp get_actor_url(url) when is_binary(url), do: url
|
||||
defp get_actor_url(%{"href" => href}) when is_binary(href), do: href
|
||||
|
||||
defp get_actor_url(url) when is_list(url) do
|
||||
url
|
||||
|> List.first()
|
||||
|> get_actor_url()
|
||||
end
|
||||
|
||||
defp get_actor_url(_url), do: nil
|
||||
|
||||
defp normalize_image(%{"url" => url}) do
|
||||
%{
|
||||
"type" => "Image",
|
||||
"url" => [%{"href" => url}]
|
||||
}
|
||||
end
|
||||
|
||||
defp normalize_image(urls) when is_list(urls), do: urls |> List.first() |> normalize_image()
|
||||
defp normalize_image(_), do: nil
|
||||
|
||||
defp normalize_also_known_as(aka) when is_list(aka), do: aka
|
||||
defp normalize_also_known_as(aka) when is_binary(aka), do: [aka]
|
||||
defp normalize_also_known_as(nil), do: []
|
||||
|
||||
defp normalize_attachment(%{} = attachment), do: [attachment]
|
||||
defp normalize_attachment(attachment) when is_list(attachment), do: attachment
|
||||
defp normalize_attachment(_), do: []
|
||||
|
||||
defp maybe_make_public_key_object(data) do
|
||||
if is_map(data["publicKey"]) && is_binary(data["publicKey"]["publicKeyPem"]) do
|
||||
%{
|
||||
public_key: data["publicKey"]["publicKeyPem"],
|
||||
key_id: data["publicKey"]["id"]
|
||||
}
|
||||
else
|
||||
nil
|
||||
end
|
||||
end
|
||||
|
||||
defp object_to_user_data(data, additional) do
|
||||
fields =
|
||||
data
|
||||
|> Map.get("attachment", [])
|
||||
|> normalize_attachment()
|
||||
|> Enum.filter(fn
|
||||
%{"type" => t} -> t == "PropertyValue"
|
||||
_ -> false
|
||||
end)
|
||||
|> Enum.map(fn fields -> Map.take(fields, ["name", "value"]) end)
|
||||
|
||||
emojis =
|
||||
data
|
||||
|> Map.get("tag", [])
|
||||
|> Enum.filter(fn
|
||||
%{"type" => "Emoji"} -> true
|
||||
_ -> false
|
||||
end)
|
||||
|> Map.new(fn %{"icon" => %{"url" => url}, "name" => name} ->
|
||||
{String.trim(name, ":"), url}
|
||||
end)
|
||||
|
||||
is_locked = data["manuallyApprovesFollowers"] || false
|
||||
data = Transmogrifier.maybe_fix_user_object(data)
|
||||
is_discoverable = data["discoverable"] || false
|
||||
invisible = data["invisible"] || false
|
||||
actor_type = data["type"] || "Person"
|
||||
|
||||
{featured_address, pinned_objects} =
|
||||
case process_featured_collection(data["featured"]) do
|
||||
{:ok, featured_address, pinned_objects} -> {featured_address, pinned_objects}
|
||||
_ -> {nil, %{}}
|
||||
end
|
||||
|
||||
# first, check that the owner is correct
|
||||
signing_key =
|
||||
if data["id"] !== data["publicKey"]["owner"] do
|
||||
Logger.error(
|
||||
"Owner of the public key is not the same as the actor - not saving the public key."
|
||||
)
|
||||
|
||||
nil
|
||||
else
|
||||
maybe_make_public_key_object(data)
|
||||
end
|
||||
|
||||
shared_inbox =
|
||||
if is_map(data["endpoints"]) && is_binary(data["endpoints"]["sharedInbox"]) do
|
||||
data["endpoints"]["sharedInbox"]
|
||||
end
|
||||
|
||||
# if WebFinger request was already done, we probably have acct, otherwise
|
||||
# we request WebFinger here
|
||||
nickname = additional[:nickname_from_acct] || generate_nickname(data)
|
||||
|
||||
# also_known_as must be a URL
|
||||
also_known_as =
|
||||
data
|
||||
|> Map.get("alsoKnownAs", [])
|
||||
|> normalize_also_known_as()
|
||||
|> Enum.filter(fn url ->
|
||||
case URI.parse(url) do
|
||||
%URI{scheme: "http"} -> true
|
||||
%URI{scheme: "https"} -> true
|
||||
_ -> false
|
||||
end
|
||||
end)
|
||||
|
||||
%{
|
||||
ap_id: data["id"],
|
||||
uri: get_actor_url(data["url"]),
|
||||
banner: normalize_image(data["image"]),
|
||||
background: normalize_image(data["backgroundUrl"]),
|
||||
fields: fields,
|
||||
emoji: emojis,
|
||||
is_locked: is_locked,
|
||||
is_discoverable: is_discoverable,
|
||||
invisible: invisible,
|
||||
avatar: normalize_image(data["icon"]),
|
||||
name: data["name"],
|
||||
follower_address: data["followers"],
|
||||
following_address: data["following"],
|
||||
featured_address: featured_address,
|
||||
bio: data["summary"] || "",
|
||||
actor_type: actor_type,
|
||||
also_known_as: also_known_as,
|
||||
signing_key: signing_key,
|
||||
inbox: data["inbox"],
|
||||
shared_inbox: shared_inbox,
|
||||
pinned_objects: pinned_objects,
|
||||
nickname: nickname
|
||||
}
|
||||
end
|
||||
|
||||
defp generate_nickname(%{"preferredUsername" => username} = data) when is_binary(username) do
|
||||
generated = "#{username}@#{URI.parse(data["id"]).host}"
|
||||
|
||||
if Config.get([WebFinger, :update_nickname_on_user_fetch]) do
|
||||
case WebFinger.finger(generated) do
|
||||
{:ok, %{"subject" => "acct:" <> acct}} -> acct
|
||||
_ -> generated
|
||||
end
|
||||
else
|
||||
generated
|
||||
end
|
||||
end
|
||||
|
||||
# nickname can be nil because of virtual actors
|
||||
defp generate_nickname(_), do: nil
|
||||
|
||||
def fetch_follow_information_for_user(user) do
|
||||
with {:ok, following_data} <-
|
||||
Fetcher.fetch_and_contain_remote_object_from_id(user.following_address),
|
||||
{:ok, hide_follows} <- collection_private(following_data),
|
||||
{:ok, followers_data} <-
|
||||
Fetcher.fetch_and_contain_remote_object_from_id(user.follower_address),
|
||||
{:ok, hide_followers} <- collection_private(followers_data) do
|
||||
{:ok,
|
||||
%{
|
||||
hide_follows: hide_follows,
|
||||
follower_count: normalize_counter(followers_data["totalItems"]),
|
||||
following_count: normalize_counter(following_data["totalItems"]),
|
||||
hide_followers: hide_followers
|
||||
}}
|
||||
else
|
||||
{:error, _} = e -> e
|
||||
e -> {:error, e}
|
||||
end
|
||||
end
|
||||
|
||||
defp normalize_counter(counter) when is_integer(counter), do: counter
|
||||
defp normalize_counter(_), do: 0
|
||||
|
||||
def maybe_update_follow_information(user_data) do
|
||||
with {:enabled, true} <- {:enabled, Config.get([:instance, :external_user_synchronization])},
|
||||
{_, true} <- {:user_type_check, user_data[:type] in ["Person", "Service"]},
|
||||
{_, true} <-
|
||||
{:collections_available,
|
||||
!!(user_data[:following_address] && user_data[:follower_address])},
|
||||
{:ok, info} <-
|
||||
fetch_follow_information_for_user(user_data) do
|
||||
info = Map.merge(user_data[:info] || %{}, info)
|
||||
|
||||
user_data
|
||||
|> Map.put(:info, info)
|
||||
else
|
||||
{:user_type_check, false} ->
|
||||
user_data
|
||||
|
||||
{:collections_available, false} ->
|
||||
user_data
|
||||
|
||||
{:enabled, false} ->
|
||||
user_data
|
||||
|
||||
e ->
|
||||
Logger.error(
|
||||
"Follower/Following counter update for #{user_data.ap_id} failed.\n" <> inspect(e)
|
||||
)
|
||||
|
||||
user_data
|
||||
end
|
||||
end
|
||||
|
||||
defp collection_private(%{"first" => %{"type" => type}})
|
||||
when type in ["CollectionPage", "OrderedCollectionPage"],
|
||||
do: {:ok, false}
|
||||
|
||||
defp collection_private(%{"first" => first}) do
|
||||
with {:ok, %{"type" => type}} when type in ["CollectionPage", "OrderedCollectionPage"] <-
|
||||
Fetcher.fetch_and_contain_remote_object_from_id(first) do
|
||||
{:ok, false}
|
||||
else
|
||||
{:error, _} -> {:ok, true}
|
||||
end
|
||||
end
|
||||
|
||||
defp collection_private(_data), do: {:ok, true}
|
||||
|
||||
def user_data_from_user_object(data, additional \\ []) do
|
||||
with {:ok, data} <- MRF.filter(data) do
|
||||
{:ok, object_to_user_data(data, additional)}
|
||||
else
|
||||
e -> {:error, e}
|
||||
end
|
||||
end
|
||||
|
||||
defp fetch_and_prepare_user_from_ap_id(ap_id, additional) do
|
||||
with {:ok, data} <- Fetcher.fetch_and_contain_remote_object_from_id(ap_id),
|
||||
{:valid, {:ok, _, _}} <- {:valid, UserValidator.validate(data, [])},
|
||||
{:ok, data} <- user_data_from_user_object(data, additional) do
|
||||
{:ok, maybe_update_follow_information(data)}
|
||||
else
|
||||
# If this has been deleted, only log a debug and not an error
|
||||
{:error, {"Object has been deleted", _, _} = e} ->
|
||||
Logger.debug("User was explicitly deleted #{ap_id}, #{inspect(e)}")
|
||||
{:error, :not_found}
|
||||
|
||||
{:reject, _reason} = e ->
|
||||
{:error, e}
|
||||
|
||||
{:valid, reason} ->
|
||||
{:error, {:validate, reason}}
|
||||
|
||||
{:error, e} ->
|
||||
{:error, e}
|
||||
end
|
||||
end
|
||||
|
||||
def maybe_handle_clashing_nickname(data) do
|
||||
with nickname when is_binary(nickname) <- data[:nickname],
|
||||
%User{} = old_user <- User.get_by_nickname(nickname),
|
||||
{_, false} <- {:ap_id_comparison, data[:ap_id] == old_user.ap_id} do
|
||||
Logger.info(
|
||||
"Found an old user for #{nickname}, the old ap id is #{old_user.ap_id}, new one is #{data[:ap_id]}, renaming."
|
||||
)
|
||||
|
||||
old_user
|
||||
|> User.remote_user_changeset(%{nickname: "#{old_user.id}.#{old_user.nickname}"})
|
||||
|> User.update_and_set_cache()
|
||||
else
|
||||
{:ap_id_comparison, true} ->
|
||||
Logger.info(
|
||||
"Found an old user for #{data[:nickname]}, but the ap id #{data[:ap_id]} is the same as the new user. Race condition? Not changing anything."
|
||||
)
|
||||
|
||||
_ ->
|
||||
nil
|
||||
end
|
||||
end
|
||||
|
||||
def process_featured_collection(nil), do: {:ok, nil, %{}}
|
||||
def process_featured_collection(""), do: {:ok, nil, %{}}
|
||||
|
||||
def process_featured_collection(featured_collection) do
|
||||
featured_address =
|
||||
case get_ap_id(featured_collection) do
|
||||
id when is_binary(id) -> id
|
||||
_ -> nil
|
||||
end
|
||||
|
||||
# TODO: allow passing item/page limit as function opt and use here
|
||||
case Collections.Fetcher.fetch_collection(featured_collection) do
|
||||
{:ok, items} ->
|
||||
now = NaiveDateTime.utc_now()
|
||||
dated_obj_ids = Map.new(items, fn obj -> {get_ap_id(obj), now} end)
|
||||
{:ok, featured_address, dated_obj_ids}
|
||||
|
||||
error ->
|
||||
Logger.error(
|
||||
"Could not decode featured collection at fetch #{inspect(featured_collection)}: #{inspect(error)}"
|
||||
)
|
||||
|
||||
error =
|
||||
case error do
|
||||
{:error, e} -> e
|
||||
e -> e
|
||||
end
|
||||
|
||||
{:error, error}
|
||||
end
|
||||
end
|
||||
|
||||
def enqueue_pin_fetches(%{pinned_objects: pins}) do
|
||||
# enqueue a task to fetch all pinned objects
|
||||
Enum.each(pins, fn {ap_id, _} ->
|
||||
if is_nil(Object.get_cached_by_ap_id(ap_id)) do
|
||||
Pleroma.Workers.RemoteFetcherWorker.enqueue("fetch_remote", %{
|
||||
"id" => ap_id,
|
||||
"depth" => 1
|
||||
})
|
||||
end
|
||||
end)
|
||||
end
|
||||
|
||||
def enqueue_pin_fetches(_), do: nil
|
||||
|
||||
def make_user_from_ap_id(ap_id, additional \\ []) do
|
||||
user = User.get_cached_by_ap_id(ap_id)
|
||||
|
||||
with {:ok, data} <- fetch_and_prepare_user_from_ap_id(ap_id, additional) do
|
||||
user =
|
||||
if data.ap_id != ap_id do
|
||||
User.get_cached_by_ap_id(data.ap_id)
|
||||
else
|
||||
user
|
||||
end
|
||||
|
||||
if user do
|
||||
user
|
||||
|> User.remote_user_changeset(data)
|
||||
|> User.update_and_set_cache()
|
||||
|> tap(fn _ -> enqueue_pin_fetches(data) end)
|
||||
else
|
||||
maybe_handle_clashing_nickname(data)
|
||||
|
||||
data
|
||||
|> User.remote_user_changeset()
|
||||
|> Repo.insert()
|
||||
|> User.set_cache()
|
||||
|> tap(fn _ -> enqueue_pin_fetches(data) end)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def make_user_from_nickname(nickname) do
|
||||
with {:ok, %{"ap_id" => ap_id, "subject" => "acct:" <> acct}} when not is_nil(ap_id) <-
|
||||
WebFinger.finger(nickname) do
|
||||
make_user_from_ap_id(ap_id, nickname_from_acct: acct)
|
||||
else
|
||||
_e -> {:error, "No AP id in WebFinger"}
|
||||
end
|
||||
end
|
||||
|
||||
# filter out broken threads
|
||||
defp contain_broken_threads(%Activity{} = activity, %User{} = user) do
|
||||
entire_thread_visible_for_user?(activity, user)
|
||||
|
|
|
|||
|
|
@ -57,6 +57,17 @@ defmodule Pleroma.Web.ActivityPub.Builder do
|
|||
{:ok, data, []}
|
||||
end
|
||||
|
||||
@spec emoji_object!({String.t(), String.t()}) :: map()
|
||||
def emoji_object!({name, url}) do
|
||||
# TODO: we should probably send mtime instead of unix epoch time for updated
|
||||
%{
|
||||
"icon" => %{"url" => "#{URI.encode(url)}", "type" => "Image"},
|
||||
"name" => Emoji.maybe_quote(name),
|
||||
"type" => "Emoji",
|
||||
"updated" => "1970-01-01T00:00:00Z"
|
||||
}
|
||||
end
|
||||
|
||||
defp unicode_emoji_react(_object, data, emoji) do
|
||||
data
|
||||
|> Map.put("content", emoji)
|
||||
|
|
@ -67,18 +78,7 @@ defmodule Pleroma.Web.ActivityPub.Builder do
|
|||
data
|
||||
|> Map.put("content", Emoji.maybe_quote(emoji))
|
||||
|> Map.put("type", "EmojiReact")
|
||||
|> Map.put("tag", [
|
||||
%{}
|
||||
|> Map.put("id", url)
|
||||
|> Map.put("type", "Emoji")
|
||||
|> Map.put("name", Emoji.maybe_quote(emoji))
|
||||
|> Map.put(
|
||||
"icon",
|
||||
%{}
|
||||
|> Map.put("type", "Image")
|
||||
|> Map.put("url", url)
|
||||
)
|
||||
])
|
||||
|> Map.put("tag", [emoji_object!({emoji, url})])
|
||||
end
|
||||
|
||||
defp remote_custom_emoji_react(
|
||||
|
|
|
|||
|
|
@ -165,7 +165,6 @@ defmodule Pleroma.Web.ActivityPub.MRF.StealEmojiPolicy do
|
|||
|
||||
if !Enum.empty?(new_emojis) do
|
||||
Logger.info("Stole new emojis: #{inspect(new_emojis)}")
|
||||
Pleroma.Emoji.reload()
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
|||
|
|
@ -15,6 +15,7 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.AttachmentValidator do
|
|||
field(:type, :string)
|
||||
field(:mediaType, :string, default: "application/octet-stream")
|
||||
field(:name, :string)
|
||||
field(:summary, :string)
|
||||
field(:blurhash, :string)
|
||||
|
||||
embeds_many :url, UrlObjectValidator, primary_key: false do
|
||||
|
|
@ -44,7 +45,7 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.AttachmentValidator do
|
|||
|> fix_url()
|
||||
|
||||
struct
|
||||
|> cast(data, [:id, :type, :mediaType, :name, :blurhash])
|
||||
|> cast(data, [:id, :type, :mediaType, :name, :summary, :blurhash])
|
||||
|> cast_embed(:url, with: &url_changeset/2, required: true)
|
||||
|> validate_inclusion(:type, ~w[Link Document Audio Image Video])
|
||||
|> validate_required([:type, :mediaType])
|
||||
|
|
|
|||
|
|
@ -44,9 +44,9 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.TagValidator do
|
|||
|> validate_required([:type, :href])
|
||||
end
|
||||
|
||||
def changeset(struct, %{"type" => "Hashtag", "name" => name} = data) do
|
||||
def changeset(struct, %{"type" => "Hashtag", "name" => full_name} = data) do
|
||||
name =
|
||||
cond do
|
||||
case full_name do
|
||||
"#" <> name -> name
|
||||
name -> name
|
||||
end
|
||||
|
|
|
|||
|
|
@ -25,6 +25,7 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.UserValidator do
|
|||
when type in Pleroma.Constants.actor_types() do
|
||||
with :ok <- validate_pubkey(data),
|
||||
:ok <- validate_inbox(data),
|
||||
:ok <- validate_nickname(data),
|
||||
:ok <- contain_collection_origin(data) do
|
||||
{:ok, data, meta}
|
||||
else
|
||||
|
|
@ -83,4 +84,18 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.UserValidator do
|
|||
_, error -> error
|
||||
end)
|
||||
end
|
||||
|
||||
defp validate_nickname(%{"preferredUsername" => nick}) when is_binary(nick) do
|
||||
if String.valid?(nick) do
|
||||
:ok
|
||||
else
|
||||
{:error, "Nickname is not valid UTF-8"}
|
||||
end
|
||||
end
|
||||
|
||||
defp validate_nickname(%{"preferredUsername" => _nick}) do
|
||||
{:error, "Nickname is not a valid string"}
|
||||
end
|
||||
|
||||
defp validate_nickname(_), do: :ok
|
||||
end
|
||||
|
|
|
|||
|
|
@ -86,7 +86,7 @@ defmodule Pleroma.Web.ActivityPub.Publisher do
|
|||
do: {:http_error, code, headers}
|
||||
|
||||
defp format_error_response(%Tesla.Env{} = env),
|
||||
do: {:http_error, :connect, Pleroma.HTTP.Middleware.HTTPSignature.redact_keys(env)}
|
||||
do: {:http_error, :connect, env}
|
||||
|
||||
defp format_error_response(response), do: response
|
||||
|
||||
|
|
|
|||
|
|
@ -15,12 +15,12 @@ defmodule Pleroma.Web.ActivityPub.SideEffects do
|
|||
alias Pleroma.Object
|
||||
alias Pleroma.Repo
|
||||
alias Pleroma.User
|
||||
alias Pleroma.User.Fetcher, as: UserFetcher
|
||||
alias Pleroma.Web.ActivityPub.ActivityPub
|
||||
alias Pleroma.Web.ActivityPub.Builder
|
||||
alias Pleroma.Web.ActivityPub.Pipeline
|
||||
alias Pleroma.Web.ActivityPub.Utils
|
||||
alias Pleroma.Web.ActivityPub.Visibility
|
||||
alias Pleroma.Web.Push
|
||||
alias Pleroma.Web.Streamer
|
||||
alias Pleroma.Workers.PollWorker
|
||||
|
||||
|
|
@ -121,7 +121,7 @@ defmodule Pleroma.Web.ActivityPub.SideEffects do
|
|||
nil
|
||||
end
|
||||
|
||||
{:ok, notifications} = Notification.create_notifications(object, do_send: false)
|
||||
{:ok, notifications, _} = Notification.create_notifications(object)
|
||||
|
||||
meta =
|
||||
meta
|
||||
|
|
@ -180,7 +180,8 @@ defmodule Pleroma.Web.ActivityPub.SideEffects do
|
|||
liked_object = Object.get_by_ap_id(object.data["object"])
|
||||
Utils.add_like_to_object(object, liked_object)
|
||||
|
||||
Notification.create_notifications(object)
|
||||
{:ok, notifications, _} = Notification.create_notifications(object)
|
||||
meta = add_notifications(meta, notifications)
|
||||
|
||||
{:ok, object, meta}
|
||||
end
|
||||
|
|
@ -199,7 +200,7 @@ defmodule Pleroma.Web.ActivityPub.SideEffects do
|
|||
def handle(%{data: %{"type" => "Create"}} = activity, meta) do
|
||||
with {:ok, object, meta} <- handle_object_creation(meta[:object_data], activity, meta),
|
||||
%User{} = user <- User.get_cached_by_ap_id(activity.data["actor"]) do
|
||||
{:ok, notifications} = Notification.create_notifications(activity, do_send: false)
|
||||
{:ok, notifications, _} = Notification.create_notifications(activity)
|
||||
{:ok, _user} = ActivityPub.increase_note_count_if_public(user, object)
|
||||
{:ok, _user} = ActivityPub.update_last_status_at_if_public(user, object)
|
||||
|
||||
|
|
@ -211,6 +212,18 @@ defmodule Pleroma.Web.ActivityPub.SideEffects do
|
|||
|
||||
reply_depth = (meta[:depth] || 0) + 1
|
||||
|
||||
participations =
|
||||
with true <- Visibility.is_direct?(activity),
|
||||
{:ok, conversation} <-
|
||||
ActivityPub.create_or_bump_conversation(activity, activity.actor) do
|
||||
conversation
|
||||
|> Repo.preload(:participations)
|
||||
|> Map.get(:participations)
|
||||
|> Repo.preload(:user)
|
||||
else
|
||||
_ -> []
|
||||
end
|
||||
|
||||
Pleroma.Workers.NodeInfoFetcherWorker.enqueue("process", %{
|
||||
"source_url" => activity.data["actor"]
|
||||
})
|
||||
|
|
@ -233,6 +246,7 @@ defmodule Pleroma.Web.ActivityPub.SideEffects do
|
|||
meta =
|
||||
meta
|
||||
|> add_notifications(notifications)
|
||||
|> add_streamables([{"participation", participations}])
|
||||
|
||||
ap_streamer().stream_out(activity)
|
||||
|
||||
|
|
@ -255,9 +269,11 @@ defmodule Pleroma.Web.ActivityPub.SideEffects do
|
|||
|
||||
Utils.add_announce_to_object(object, announced_object)
|
||||
|
||||
if !User.is_internal_user?(user) do
|
||||
Notification.create_notifications(object)
|
||||
{:ok, notifications, _} = Notification.create_notifications(object)
|
||||
meta = add_notifications(meta, notifications)
|
||||
|
||||
if !User.is_internal_user?(user) do
|
||||
# XXX: this too should be added to meta and only done after transaction
|
||||
ap_streamer().stream_out(object)
|
||||
end
|
||||
|
||||
|
|
@ -280,7 +296,8 @@ defmodule Pleroma.Web.ActivityPub.SideEffects do
|
|||
reacted_object = Object.get_by_ap_id(object.data["object"])
|
||||
Utils.add_emoji_reaction_to_object(object, reacted_object)
|
||||
|
||||
Notification.create_notifications(object)
|
||||
{:ok, notifications, _} = Notification.create_notifications(object)
|
||||
meta = add_notifications(meta, notifications)
|
||||
|
||||
{:ok, object, meta}
|
||||
end
|
||||
|
|
@ -411,11 +428,7 @@ defmodule Pleroma.Web.ActivityPub.SideEffects do
|
|||
changeset
|
||||
|> User.update_and_set_cache()
|
||||
else
|
||||
{:ok, new_user_data} = ActivityPub.user_data_from_user_object(updated_object)
|
||||
|
||||
User.get_by_ap_id(updated_object["id"])
|
||||
|> User.remote_user_changeset(new_user_data)
|
||||
|> User.update_and_set_cache()
|
||||
UserFetcher.update_user_with_apdata(updated_object)
|
||||
end
|
||||
|
||||
{:ok, object, meta}
|
||||
|
|
@ -557,10 +570,7 @@ defmodule Pleroma.Web.ActivityPub.SideEffects do
|
|||
|
||||
defp send_notifications(meta) do
|
||||
Keyword.get(meta, :notifications, [])
|
||||
|> Enum.each(fn notification ->
|
||||
Streamer.stream(["user", "user:notification"], notification)
|
||||
Push.send(notification)
|
||||
end)
|
||||
|> Notification.send()
|
||||
|
||||
meta
|
||||
end
|
||||
|
|
@ -574,13 +584,17 @@ defmodule Pleroma.Web.ActivityPub.SideEffects do
|
|||
meta
|
||||
end
|
||||
|
||||
defp add_notifications(meta, notifications) do
|
||||
existing = Keyword.get(meta, :notifications, [])
|
||||
|
||||
meta
|
||||
|> Keyword.put(:notifications, notifications ++ existing)
|
||||
defp add_to_list(meta, key, entries) do
|
||||
existing = Keyword.get(meta, key, [])
|
||||
Keyword.put(meta, key, entries ++ existing)
|
||||
end
|
||||
|
||||
defp add_notifications(meta, notifications),
|
||||
do: add_to_list(meta, :notifications, notifications)
|
||||
|
||||
defp add_streamables(meta, streamables),
|
||||
do: add_to_list(meta, :streamables, streamables)
|
||||
|
||||
@impl true
|
||||
def handle_after_transaction(meta) do
|
||||
meta
|
||||
|
|
|
|||
|
|
@ -339,6 +339,7 @@ defmodule Pleroma.Web.ActivityPub.Transmogrifier do
|
|||
}
|
||||
|> Maps.put_if_present("mediaType", media_type)
|
||||
|> Maps.put_if_present("name", data["name"])
|
||||
|> Maps.put_if_present("summary", data["summary"])
|
||||
|> Maps.put_if_present("blurhash", data["blurhash"])
|
||||
else
|
||||
nil
|
||||
|
|
@ -878,6 +879,29 @@ defmodule Pleroma.Web.ActivityPub.Transmogrifier do
|
|||
{:ok, data}
|
||||
end
|
||||
|
||||
def prepare_outgoing(%{"type" => "Update", "object" => %{"type" => objtype} = object} = data)
|
||||
when objtype in Pleroma.Constants.actor_types() do
|
||||
object =
|
||||
object
|
||||
|> maybe_fix_user_object()
|
||||
|> strip_internal_fields()
|
||||
|
||||
data =
|
||||
data
|
||||
|> Map.put("object", object)
|
||||
|> strip_internal_fields()
|
||||
|> Map.merge(Utils.make_json_ld_header())
|
||||
|> Map.delete("bcc")
|
||||
|
||||
{:ok, data}
|
||||
end
|
||||
|
||||
def prepare_outgoing(%{"type" => "Update", "object" => %{}} = data) do
|
||||
err_msg = "Requested to serve an Update for non-updateable object type: #{inspect(data)}"
|
||||
Logger.error(err_msg)
|
||||
raise err_msg
|
||||
end
|
||||
|
||||
def prepare_outgoing(%{"type" => "Announce", "actor" => ap_id, "object" => object_id} = data) do
|
||||
object =
|
||||
object_id
|
||||
|
|
@ -1004,29 +1028,19 @@ defmodule Pleroma.Web.ActivityPub.Transmogrifier do
|
|||
def take_emoji_tags(%User{emoji: emoji}) do
|
||||
emoji
|
||||
|> Map.to_list()
|
||||
|> Enum.map(&build_emoji_tag/1)
|
||||
|> Enum.map(&Builder.emoji_object!/1)
|
||||
end
|
||||
|
||||
# TODO: we should probably send mtime instead of unix epoch time for updated
|
||||
def add_emoji_tags(%{"emoji" => emoji} = object) do
|
||||
tags = object["tag"] || []
|
||||
|
||||
out = Enum.map(emoji, &build_emoji_tag/1)
|
||||
out = Enum.map(emoji, &Builder.emoji_object!/1)
|
||||
|
||||
Map.put(object, "tag", tags ++ out)
|
||||
end
|
||||
|
||||
def add_emoji_tags(object), do: object
|
||||
|
||||
defp build_emoji_tag({name, url}) do
|
||||
%{
|
||||
"icon" => %{"url" => "#{URI.encode(url)}", "type" => "Image"},
|
||||
"name" => ":" <> name <> ":",
|
||||
"type" => "Emoji",
|
||||
"updated" => "1970-01-01T00:00:00Z"
|
||||
}
|
||||
end
|
||||
|
||||
def set_conversation(object) do
|
||||
Map.put(object, "conversation", object["context"])
|
||||
end
|
||||
|
|
|
|||
|
|
@ -101,6 +101,8 @@ defmodule Pleroma.Web.ActivityPub.Utils do
|
|||
"@context" => [
|
||||
"https://www.w3.org/ns/activitystreams",
|
||||
"#{Endpoint.url()}/schemas/litepub-0.1.jsonld",
|
||||
# FEP-2c59
|
||||
"https://purl.archive.org/socialweb/webfinger",
|
||||
%{
|
||||
"@language" => "und",
|
||||
"htmlMfm" => "https://w3id.org/fep/c16b#htmlMfm"
|
||||
|
| ||||