forked from AkkomaGang/akkoma
Compare commits
26 commits
4c7dc359ed
...
71f4de937b
Author | SHA1 | Date | |
---|---|---|---|
71f4de937b | |||
f36d14818d | |||
5231d436d1 | |||
deba1d25f5 | |||
66f913355a | |||
60b3c8d17b | |||
edf7d5089f | |||
d4bdd3ddb7 | |||
03662501c3 | |||
856c57208b | |||
cb9b0d3720 | |||
8af50dea36 | |||
ca9e6ffc55 | |||
574f010bc8 | |||
c6e63aaf6b | |||
07295f7c8c | |||
47a793f33e | |||
7775cefd73 | |||
69099d6f44 | |||
5827f7781f | |||
b2aa82cee5 | |||
9b2c169cef | |||
561e1f2470 | |||
0aabe4d0c3 | |||
8fe59d495d | |||
84f8f32ef9 |
55 changed files with 1519 additions and 161 deletions
|
@ -6,12 +6,12 @@ COPYING
|
||||||
*file
|
*file
|
||||||
elixir_buildpack.config
|
elixir_buildpack.config
|
||||||
test/
|
test/
|
||||||
instance/
|
|
||||||
_build
|
|
||||||
deps
|
|
||||||
test
|
test
|
||||||
benchmarks
|
benchmarks
|
||||||
docs/site
|
docs/site
|
||||||
|
docker-db
|
||||||
|
uploads
|
||||||
|
instance
|
||||||
|
|
||||||
# Required to get version
|
# Required to get version
|
||||||
!.git
|
!.git
|
||||||
|
|
10
.gitignore
vendored
10
.gitignore
vendored
|
@ -17,6 +17,13 @@ secret
|
||||||
/instance
|
/instance
|
||||||
/priv/ssh_keys
|
/priv/ssh_keys
|
||||||
vm.args
|
vm.args
|
||||||
|
.cache/
|
||||||
|
.hex/
|
||||||
|
.mix/
|
||||||
|
.psql_history
|
||||||
|
docker-resources/Dockerfile
|
||||||
|
docker-resources/Caddyfile
|
||||||
|
pgdata
|
||||||
|
|
||||||
# Prevent committing custom emojis
|
# Prevent committing custom emojis
|
||||||
/priv/static/emoji/custom/*
|
/priv/static/emoji/custom/*
|
||||||
|
@ -65,3 +72,6 @@ pleroma.iml
|
||||||
|
|
||||||
# Generated documentation
|
# Generated documentation
|
||||||
docs/site
|
docs/site
|
||||||
|
|
||||||
|
# docker stuff
|
||||||
|
docker-db
|
||||||
|
|
|
@ -14,6 +14,14 @@ variables:
|
||||||
- stable
|
- stable
|
||||||
- refs/tags/v*
|
- refs/tags/v*
|
||||||
- refs/tags/stable-*
|
- refs/tags/stable-*
|
||||||
|
- &on-stable
|
||||||
|
when:
|
||||||
|
event:
|
||||||
|
- push
|
||||||
|
- tag
|
||||||
|
branch:
|
||||||
|
- stable
|
||||||
|
- refs/tags/stable-*
|
||||||
- &on-point-release
|
- &on-point-release
|
||||||
when:
|
when:
|
||||||
event:
|
event:
|
||||||
|
@ -87,7 +95,7 @@ pipeline:
|
||||||
|
|
||||||
# Canonical amd64
|
# Canonical amd64
|
||||||
ubuntu22:
|
ubuntu22:
|
||||||
image: hexpm/elixir:1.13.4-erlang-25.0.2-ubuntu-jammy-20220428
|
image: hexpm/elixir:1.13.4-erlang-24.3.4.5-ubuntu-jammy-20220428
|
||||||
<<: *on-release
|
<<: *on-release
|
||||||
environment:
|
environment:
|
||||||
MIX_ENV: prod
|
MIX_ENV: prod
|
||||||
|
@ -110,9 +118,11 @@ pipeline:
|
||||||
- export SOURCE=akkoma-ubuntu-jammy.zip
|
- export SOURCE=akkoma-ubuntu-jammy.zip
|
||||||
- export DEST=scaleway:akkoma-updates/$${CI_COMMIT_TAG:-"$CI_COMMIT_BRANCH"}/akkoma-ubuntu-jammy.zip
|
- export DEST=scaleway:akkoma-updates/$${CI_COMMIT_TAG:-"$CI_COMMIT_BRANCH"}/akkoma-ubuntu-jammy.zip
|
||||||
- /bin/sh /entrypoint.sh
|
- /bin/sh /entrypoint.sh
|
||||||
|
- export DEST=scaleway:akkoma-updates/$${CI_COMMIT_TAG:-"$CI_COMMIT_BRANCH"}/akkoma-amd64-ubuntu-jammy.zip
|
||||||
|
- /bin/sh /entrypoint.sh
|
||||||
|
|
||||||
debian-bullseye:
|
debian-bullseye:
|
||||||
image: elixir:1.13.4
|
image: hexpm/elixir:1.13.4-erlang-24.3.4.5-debian-bullseye-20220801
|
||||||
<<: *on-release
|
<<: *on-release
|
||||||
environment:
|
environment:
|
||||||
MIX_ENV: prod
|
MIX_ENV: prod
|
||||||
|
@ -141,8 +151,8 @@ pipeline:
|
||||||
|
|
||||||
# Canonical amd64-musl
|
# Canonical amd64-musl
|
||||||
musl:
|
musl:
|
||||||
image: elixir:1.13.4-alpine
|
image: hexpm/elixir:1.13.4-erlang-24.3.4.5-alpine-3.15.6
|
||||||
<<: *on-release
|
<<: *on-stable
|
||||||
environment:
|
environment:
|
||||||
MIX_ENV: prod
|
MIX_ENV: prod
|
||||||
commands:
|
commands:
|
||||||
|
@ -157,7 +167,7 @@ pipeline:
|
||||||
|
|
||||||
release-musl:
|
release-musl:
|
||||||
image: akkoma/releaser
|
image: akkoma/releaser
|
||||||
<<: *on-release
|
<<: *on-stable
|
||||||
secrets: *scw-secrets
|
secrets: *scw-secrets
|
||||||
commands:
|
commands:
|
||||||
- export SOURCE=akkoma-amd64-musl.zip
|
- export SOURCE=akkoma-amd64-musl.zip
|
||||||
|
|
28
CHANGELOG.md
28
CHANGELOG.md
|
@ -4,6 +4,32 @@ All notable changes to this project will be documented in this file.
|
||||||
|
|
||||||
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
|
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
|
||||||
|
|
||||||
|
## Unreleased
|
||||||
|
|
||||||
|
## Added
|
||||||
|
- Officially supported docker release
|
||||||
|
- Ability to remove followers unilaterally without a block
|
||||||
|
|
||||||
|
## Changes
|
||||||
|
- Follows no longer override domain blocks, a domain block is final
|
||||||
|
- Deletes are now the lowest priority to publish and will be handled after creates
|
||||||
|
|
||||||
|
## 2022.10
|
||||||
|
|
||||||
|
### Added
|
||||||
|
- Ability to sync frontend profiles between clients, with a name attached
|
||||||
|
- Status card generation will now use the media summary if it is available
|
||||||
|
|
||||||
|
### Changed
|
||||||
|
- Emoji updated to latest 15.0 draft
|
||||||
|
- **Breaking**: `/api/v1/pleroma/backups` endpoints now requires `read:backups` scope instead of `read:accounts`
|
||||||
|
- Verify that the signature on posts is not domain blocked, and belongs to the correct user
|
||||||
|
|
||||||
|
### Fixed
|
||||||
|
- OAuthPlug no longer joins with the database every call and uses the user cache
|
||||||
|
- Undo activities no longer try to look up by ID, and render correctly
|
||||||
|
- prevent false-errors from meilisearch
|
||||||
|
|
||||||
## 2022.09
|
## 2022.09
|
||||||
|
|
||||||
### Added
|
### Added
|
||||||
|
@ -18,6 +44,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
|
||||||
### Changed
|
### Changed
|
||||||
- MFM parsing is now done on the backend by a modified version of ilja's parser -> https://akkoma.dev/AkkomaGang/mfm-parser
|
- MFM parsing is now done on the backend by a modified version of ilja's parser -> https://akkoma.dev/AkkomaGang/mfm-parser
|
||||||
- InlineQuotePolicy is now on by default
|
- InlineQuotePolicy is now on by default
|
||||||
|
- Enable remote users to interact with posts
|
||||||
|
|
||||||
### Fixed
|
### Fixed
|
||||||
- Compatibility with latest meilisearch
|
- Compatibility with latest meilisearch
|
||||||
|
@ -44,7 +71,6 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
|
||||||
- amd64 is built for debian stable. Compatible with ubuntu 20.
|
- amd64 is built for debian stable. Compatible with ubuntu 20.
|
||||||
- ubuntu-jammy is built for... well, ubuntu 22 (LTS)
|
- ubuntu-jammy is built for... well, ubuntu 22 (LTS)
|
||||||
- amd64-musl is built for alpine 3.16
|
- amd64-musl is built for alpine 3.16
|
||||||
- Enable remote users to interact with posts
|
|
||||||
|
|
||||||
### Fixed
|
### Fixed
|
||||||
- Updated mastoFE path, for the newer version
|
- Updated mastoFE path, for the newer version
|
||||||
|
|
52
Dockerfile
52
Dockerfile
|
@ -1,21 +1,9 @@
|
||||||
FROM elixir:1.13.4-alpine as build
|
FROM hexpm/elixir:1.13.4-erlang-24.3.4.5-alpine-3.15.6
|
||||||
|
|
||||||
COPY . .
|
|
||||||
|
|
||||||
ENV MIX_ENV=prod
|
ENV MIX_ENV=prod
|
||||||
|
|
||||||
RUN apk add git gcc g++ musl-dev make cmake file-dev &&\
|
ARG HOME=/opt/akkoma
|
||||||
echo "import Config" > config/prod.secret.exs &&\
|
ARG DATA=/var/lib/akkoma
|
||||||
mix local.hex --force &&\
|
|
||||||
mix local.rebar --force &&\
|
|
||||||
mix deps.get --only prod &&\
|
|
||||||
mkdir release &&\
|
|
||||||
mix release --path release
|
|
||||||
|
|
||||||
FROM alpine:3.16
|
|
||||||
|
|
||||||
ARG BUILD_DATE
|
|
||||||
ARG VCS_REF
|
|
||||||
|
|
||||||
LABEL org.opencontainers.image.title="akkoma" \
|
LABEL org.opencontainers.image.title="akkoma" \
|
||||||
org.opencontainers.image.description="Akkoma for Docker" \
|
org.opencontainers.image.description="Akkoma for Docker" \
|
||||||
|
@ -26,25 +14,21 @@ LABEL org.opencontainers.image.title="akkoma" \
|
||||||
org.opencontainers.image.revision=$VCS_REF \
|
org.opencontainers.image.revision=$VCS_REF \
|
||||||
org.opencontainers.image.created=$BUILD_DATE
|
org.opencontainers.image.created=$BUILD_DATE
|
||||||
|
|
||||||
ARG HOME=/opt/akkoma
|
RUN apk add git gcc g++ musl-dev make cmake file-dev exiftool ffmpeg imagemagick libmagic ncurses postgresql-client
|
||||||
ARG DATA=/var/lib/akkoma
|
|
||||||
|
|
||||||
RUN apk update &&\
|
|
||||||
apk add exiftool ffmpeg imagemagick libmagic ncurses postgresql-client &&\
|
|
||||||
adduser --system --shell /bin/false --home ${HOME} akkoma &&\
|
|
||||||
mkdir -p ${DATA}/uploads &&\
|
|
||||||
mkdir -p ${DATA}/static &&\
|
|
||||||
chown -R akkoma ${DATA} &&\
|
|
||||||
mkdir -p /etc/akkoma &&\
|
|
||||||
chown -R akkoma /etc/akkoma
|
|
||||||
|
|
||||||
USER akkoma
|
|
||||||
|
|
||||||
COPY --from=build --chown=akkoma:0 /release ${HOME}
|
|
||||||
|
|
||||||
COPY ./config/docker.exs /etc/akkoma/config.exs
|
|
||||||
COPY ./docker-entrypoint.sh ${HOME}
|
|
||||||
|
|
||||||
EXPOSE 4000
|
EXPOSE 4000
|
||||||
|
|
||||||
ENTRYPOINT ["/opt/akkoma/docker-entrypoint.sh"]
|
ARG UID=1000
|
||||||
|
ARG GID=1000
|
||||||
|
ARG UNAME=akkoma
|
||||||
|
|
||||||
|
RUN addgroup -g $GID $UNAME
|
||||||
|
RUN adduser -u $UID -G $UNAME -D -h $HOME $UNAME
|
||||||
|
|
||||||
|
WORKDIR /opt/akkoma
|
||||||
|
|
||||||
|
USER $UNAME
|
||||||
|
RUN mix local.hex --force &&\
|
||||||
|
mix local.rebar --force
|
||||||
|
|
||||||
|
CMD ["/opt/akkoma/docker-entrypoint.sh"]
|
||||||
|
|
|
@ -261,7 +261,8 @@
|
||||||
password_reset_token_validity: 60 * 60 * 24,
|
password_reset_token_validity: 60 * 60 * 24,
|
||||||
profile_directory: true,
|
profile_directory: true,
|
||||||
privileged_staff: false,
|
privileged_staff: false,
|
||||||
local_bubble: []
|
local_bubble: [],
|
||||||
|
max_frontend_settings_json_chars: 100_000
|
||||||
|
|
||||||
config :pleroma, :welcome,
|
config :pleroma, :welcome,
|
||||||
direct_message: [
|
direct_message: [
|
||||||
|
@ -568,7 +569,10 @@
|
||||||
mute_expire: 5,
|
mute_expire: 5,
|
||||||
search_indexing: 10
|
search_indexing: 10
|
||||||
],
|
],
|
||||||
plugins: [Oban.Plugins.Pruner],
|
plugins: [
|
||||||
|
Oban.Plugins.Pruner,
|
||||||
|
{Oban.Plugins.Reindexer, schedule: "@weekly"}
|
||||||
|
],
|
||||||
crontab: [
|
crontab: [
|
||||||
{"0 0 * * 0", Pleroma.Workers.Cron.DigestEmailsWorker},
|
{"0 0 * * 0", Pleroma.Workers.Cron.DigestEmailsWorker},
|
||||||
{"0 0 * * *", Pleroma.Workers.Cron.NewUsersDigestWorker}
|
{"0 0 * * *", Pleroma.Workers.Cron.NewUsersDigestWorker}
|
||||||
|
@ -753,9 +757,9 @@
|
||||||
},
|
},
|
||||||
"soapbox-fe" => %{
|
"soapbox-fe" => %{
|
||||||
"name" => "soapbox-fe",
|
"name" => "soapbox-fe",
|
||||||
"git" => "https://gitlab.com/soapbox-pub/soapbox-fe",
|
"git" => "https://gitlab.com/soapbox-pub/soapbox",
|
||||||
"build_url" =>
|
"build_url" =>
|
||||||
"https://gitlab.com/soapbox-pub/soapbox-fe/-/jobs/artifacts/${ref}/download?job=build-production",
|
"https://gitlab.com/soapbox-pub/soapbox/-/jobs/artifacts/${ref}/download?job=build-production",
|
||||||
"ref" => "v2.0.0",
|
"ref" => "v2.0.0",
|
||||||
"build_dir" => "static"
|
"build_dir" => "static"
|
||||||
},
|
},
|
||||||
|
|
|
@ -24,11 +24,11 @@
|
||||||
config :web_push_encryption, :vapid_details, subject: "mailto:#{System.get_env("NOTIFY_EMAIL")}"
|
config :web_push_encryption, :vapid_details, subject: "mailto:#{System.get_env("NOTIFY_EMAIL")}"
|
||||||
|
|
||||||
config :pleroma, :database, rum_enabled: false
|
config :pleroma, :database, rum_enabled: false
|
||||||
config :pleroma, :instance, static_dir: "/var/lib/pleroma/static"
|
config :pleroma, :instance, static_dir: "/var/lib/akkoma/static"
|
||||||
config :pleroma, Pleroma.Uploaders.Local, uploads: "/var/lib/pleroma/uploads"
|
config :pleroma, Pleroma.Uploaders.Local, uploads: "/var/lib/akkoma/uploads"
|
||||||
|
|
||||||
# We can't store the secrets in this file, since this is baked into the docker image
|
# We can't store the secrets in this file, since this is baked into the docker image
|
||||||
if not File.exists?("/var/lib/pleroma/secret.exs") do
|
if not File.exists?("/var/lib/akkoma/secret.exs") do
|
||||||
secret = :crypto.strong_rand_bytes(64) |> Base.encode64() |> binary_part(0, 64)
|
secret = :crypto.strong_rand_bytes(64) |> Base.encode64() |> binary_part(0, 64)
|
||||||
signing_salt = :crypto.strong_rand_bytes(8) |> Base.encode64() |> binary_part(0, 8)
|
signing_salt = :crypto.strong_rand_bytes(8) |> Base.encode64() |> binary_part(0, 8)
|
||||||
{web_push_public_key, web_push_private_key} = :crypto.generate_key(:ecdh, :prime256v1)
|
{web_push_public_key, web_push_private_key} = :crypto.generate_key(:ecdh, :prime256v1)
|
||||||
|
@ -52,16 +52,16 @@
|
||||||
web_push_private_key: Base.url_encode64(web_push_private_key, padding: false)
|
web_push_private_key: Base.url_encode64(web_push_private_key, padding: false)
|
||||||
)
|
)
|
||||||
|
|
||||||
File.write("/var/lib/pleroma/secret.exs", secret_file)
|
File.write("/var/lib/akkoma/secret.exs", secret_file)
|
||||||
end
|
end
|
||||||
|
|
||||||
import_config("/var/lib/pleroma/secret.exs")
|
import_config("/var/lib/akkoma/secret.exs")
|
||||||
|
|
||||||
# For additional user config
|
# For additional user config
|
||||||
if File.exists?("/var/lib/pleroma/config.exs"),
|
if File.exists?("/var/lib/akkoma/config.exs"),
|
||||||
do: import_config("/var/lib/pleroma/config.exs"),
|
do: import_config("/var/lib/akkoma/config.exs"),
|
||||||
else:
|
else:
|
||||||
File.write("/var/lib/pleroma/config.exs", """
|
File.write("/var/lib/akkoma/config.exs", """
|
||||||
import Config
|
import Config
|
||||||
|
|
||||||
# For additional configuration outside of environmental variables
|
# For additional configuration outside of environmental variables
|
||||||
|
|
61
docker-compose.yml
Normal file
61
docker-compose.yml
Normal file
|
@ -0,0 +1,61 @@
|
||||||
|
version: "3.7"
|
||||||
|
|
||||||
|
services:
|
||||||
|
db:
|
||||||
|
image: akkoma-db:latest
|
||||||
|
build: ./docker-resources/database
|
||||||
|
restart: unless-stopped
|
||||||
|
user: ${DOCKER_USER}
|
||||||
|
environment: {
|
||||||
|
# This might seem insecure but is usually not a problem.
|
||||||
|
# You should leave this at the "akkoma" default.
|
||||||
|
# The DB is only reachable by containers in the same docker network,
|
||||||
|
# and is not exposed to the open internet.
|
||||||
|
#
|
||||||
|
# If you do change this, remember to update "config.exs".
|
||||||
|
POSTGRES_DB: akkoma,
|
||||||
|
POSTGRES_USER: akkoma,
|
||||||
|
POSTGRES_PASSWORD: akkoma,
|
||||||
|
}
|
||||||
|
env_file:
|
||||||
|
- .env
|
||||||
|
volumes:
|
||||||
|
- type: bind
|
||||||
|
source: ./pgdata
|
||||||
|
target: /var/lib/postgresql/data
|
||||||
|
|
||||||
|
akkoma:
|
||||||
|
image: akkoma:latest
|
||||||
|
build: .
|
||||||
|
restart: unless-stopped
|
||||||
|
env_file:
|
||||||
|
- .env
|
||||||
|
links:
|
||||||
|
- db
|
||||||
|
ports: [
|
||||||
|
# Uncomment/Change port mappings below as needed.
|
||||||
|
# The left side is your host machine, the right one is the akkoma container.
|
||||||
|
# You can prefix the left side with an ip.
|
||||||
|
|
||||||
|
# Webserver (for reverse-proxies outside of docker)
|
||||||
|
# If you use a dockerized proxy, you can leave this commented
|
||||||
|
# and use a container link instead.
|
||||||
|
"127.0.0.1:4000:4000",
|
||||||
|
]
|
||||||
|
volumes:
|
||||||
|
- .:/opt/akkoma
|
||||||
|
|
||||||
|
# Uncomment the following if you want to use a reverse proxy
|
||||||
|
#proxy:
|
||||||
|
# image: caddy:2-alpine
|
||||||
|
# restart: unless-stopped
|
||||||
|
# links:
|
||||||
|
# - akkoma
|
||||||
|
# ports: [
|
||||||
|
# "443:443",
|
||||||
|
# "80:80"
|
||||||
|
# ]
|
||||||
|
# volumes:
|
||||||
|
# - ./docker-resources/Caddyfile:/etc/caddy/Caddyfile
|
||||||
|
# - ./caddy-data:/data
|
||||||
|
# - ./caddy-config:/config
|
|
@ -8,7 +8,7 @@ while ! pg_isready -U ${DB_USER:-pleroma} -d postgres://${DB_HOST:-db}:5432/${DB
|
||||||
done
|
done
|
||||||
|
|
||||||
echo "-- Running migrations..."
|
echo "-- Running migrations..."
|
||||||
$HOME/bin/pleroma_ctl migrate
|
mix ecto.migrate
|
||||||
|
|
||||||
echo "-- Starting!"
|
echo "-- Starting!"
|
||||||
exec $HOME/bin/pleroma start
|
mix phx.server
|
||||||
|
|
14
docker-resources/Caddyfile.example
Normal file
14
docker-resources/Caddyfile.example
Normal file
|
@ -0,0 +1,14 @@
|
||||||
|
# default docker Caddyfile config for Akkoma
|
||||||
|
#
|
||||||
|
# Simple installation instructions:
|
||||||
|
# 1. Replace 'example.tld' with your instance's domain wherever it appears.
|
||||||
|
|
||||||
|
example.tld {
|
||||||
|
log {
|
||||||
|
output file /var/log/caddy/akkoma.log
|
||||||
|
}
|
||||||
|
|
||||||
|
encode gzip
|
||||||
|
|
||||||
|
reverse_proxy akkoma:4000
|
||||||
|
}
|
4
docker-resources/build.sh
Executable file
4
docker-resources/build.sh
Executable file
|
@ -0,0 +1,4 @@
|
||||||
|
#!/bin/sh
|
||||||
|
|
||||||
|
docker-compose build --build-arg UID=$(id -u) --build-arg GID=$(id -g) akkoma
|
||||||
|
docker-compose build --build-arg UID=$(id -u) --build-arg GID=$(id -g) db
|
10
docker-resources/database/Dockerfile
Normal file
10
docker-resources/database/Dockerfile
Normal file
|
@ -0,0 +1,10 @@
|
||||||
|
FROM postgres:14-alpine
|
||||||
|
|
||||||
|
ARG UID=1000
|
||||||
|
ARG GID=1000
|
||||||
|
ARG UNAME=akkoma
|
||||||
|
|
||||||
|
RUN addgroup -g $GID $UNAME
|
||||||
|
RUN adduser -u $UID -G $UNAME -D -h $HOME $UNAME
|
||||||
|
|
||||||
|
USER akkoma
|
4
docker-resources/env.example
Normal file
4
docker-resources/env.example
Normal file
|
@ -0,0 +1,4 @@
|
||||||
|
MIX_ENV=prod
|
||||||
|
DB_NAME=akkoma
|
||||||
|
DB_USER=akkoma
|
||||||
|
DB_PASS=akkoma
|
3
docker-resources/manage.sh
Executable file
3
docker-resources/manage.sh
Executable file
|
@ -0,0 +1,3 @@
|
||||||
|
#!/bin/sh
|
||||||
|
|
||||||
|
docker-compose run --rm akkoma $@
|
|
@ -14,6 +14,10 @@ su akkoma -s $SHELL -lc "./bin/pleroma_ctl update"
|
||||||
su akkoma -s $SHELL -lc "./bin/pleroma_ctl migrate"
|
su akkoma -s $SHELL -lc "./bin/pleroma_ctl migrate"
|
||||||
```
|
```
|
||||||
|
|
||||||
|
If you selected an alternate flavour on installation,
|
||||||
|
you _may_ need to specify `--flavour`, in the same way as
|
||||||
|
[when installing](../../installation/otp_en#detecting-flavour).
|
||||||
|
|
||||||
## For from source installations (using git)
|
## For from source installations (using git)
|
||||||
|
|
||||||
1. Go to the working directory of Akkoma (default is `/opt/akkoma`)
|
1. Go to the working directory of Akkoma (default is `/opt/akkoma`)
|
||||||
|
|
|
@ -14,11 +14,12 @@ apt -yq install tor
|
||||||
|
|
||||||
**WARNING:** Onion instances not using a Tor version supporting V3 addresses will not be able to federate with you.
|
**WARNING:** Onion instances not using a Tor version supporting V3 addresses will not be able to federate with you.
|
||||||
|
|
||||||
Create the hidden service for your Akkoma instance in `/etc/tor/torrc`:
|
Create the hidden service for your Akkoma instance in `/etc/tor/torrc`, with an HTTP tunnel:
|
||||||
```
|
```
|
||||||
HiddenServiceDir /var/lib/tor/akkoma_hidden_service/
|
HiddenServiceDir /var/lib/tor/akkoma_hidden_service/
|
||||||
HiddenServicePort 80 127.0.0.1:8099
|
HiddenServicePort 80 127.0.0.1:8099
|
||||||
HiddenServiceVersion 3 # Remove if Tor version is below 0.3 ( tor --version )
|
HiddenServiceVersion 3 # Remove if Tor version is below 0.3 ( tor --version )
|
||||||
|
HTTPTunnelPort 9080
|
||||||
```
|
```
|
||||||
Restart Tor to generate an adress:
|
Restart Tor to generate an adress:
|
||||||
```
|
```
|
||||||
|
@ -35,7 +36,7 @@ Next, edit your Akkoma config.
|
||||||
If running in prod, navigate to your Akkoma directory, edit `config/prod.secret.exs`
|
If running in prod, navigate to your Akkoma directory, edit `config/prod.secret.exs`
|
||||||
and append this line:
|
and append this line:
|
||||||
```
|
```
|
||||||
config :pleroma, :http, proxy_url: {:socks5, :localhost, 9050}
|
config :pleroma, :http, proxy_url: "http://localhost:9080"
|
||||||
```
|
```
|
||||||
In your Akkoma directory, assuming you're running prod,
|
In your Akkoma directory, assuming you're running prod,
|
||||||
run the following:
|
run the following:
|
||||||
|
|
|
@ -141,8 +141,7 @@ You then need to set the URL and authentication credentials if relevant.
|
||||||
|
|
||||||
### Initial indexing
|
### Initial indexing
|
||||||
|
|
||||||
After setting up the configuration, you'll want to index all of your already existsing posts. Only public posts are indexed. You'll only
|
After setting up the configuration, you'll want to index all of your already existsing posts. You'll only have to do it one time, but it might take a while, depending on the amount of posts your instance has seen.
|
||||||
have to do it one time, but it might take a while, depending on the amount of posts your instance has seen.
|
|
||||||
|
|
||||||
The sequence of actions is as follows:
|
The sequence of actions is as follows:
|
||||||
|
|
||||||
|
|
|
@ -7,6 +7,20 @@ It actually consists of two components: a backend, named simply Akkoma, and a us
|
||||||
It's part of what we call the fediverse, a federated network of instances which speak common protocols and can communicate with each other.
|
It's part of what we call the fediverse, a federated network of instances which speak common protocols and can communicate with each other.
|
||||||
One account on an instance is enough to talk to the entire fediverse!
|
One account on an instance is enough to talk to the entire fediverse!
|
||||||
|
|
||||||
|
## Community Channels
|
||||||
|
|
||||||
|
### IRC
|
||||||
|
|
||||||
|
For support or general questions, pop over to #akkoma and #akkoma-dev at [irc.akkoma.dev](https://irc.akkoma.dev) (port 6697, SSL)
|
||||||
|
|
||||||
|
### Discourse
|
||||||
|
|
||||||
|
For more general meta-discussion, for example discussion of potential future features, head on over to [meta.akkoma.dev](https://meta.akkoma.dev)
|
||||||
|
|
||||||
|
### Dev diaries and release notifications
|
||||||
|
|
||||||
|
will be posted via [@akkoma@ihba](https://ihatebeinga.live/users/akkoma)
|
||||||
|
|
||||||
## How can I use it?
|
## How can I use it?
|
||||||
|
|
||||||
Akkoma instances are already widely deployed, a list can be found at <https://the-federation.info/pleroma> and <https://fediverse.network/pleroma>.
|
Akkoma instances are already widely deployed, a list can be found at <https://the-federation.info/pleroma> and <https://fediverse.network/pleroma>.
|
||||||
|
@ -26,3 +40,4 @@ Just add a "/web" after your instance url (e.g. <https://pleroma.soykaf.com/web>
|
||||||
The Mastodon interface is from the Glitch-soc fork. For more information on the Mastodon interface you can check the [Mastodon](https://docs.joinmastodon.org/) and [Glitch-soc](https://glitch-soc.github.io/docs/) documentation.
|
The Mastodon interface is from the Glitch-soc fork. For more information on the Mastodon interface you can check the [Mastodon](https://docs.joinmastodon.org/) and [Glitch-soc](https://glitch-soc.github.io/docs/) documentation.
|
||||||
|
|
||||||
Remember, what you see is only the frontend part of Mastodon, the backend is still Akkoma.
|
Remember, what you see is only the frontend part of Mastodon, the backend is still Akkoma.
|
||||||
|
|
||||||
|
|
161
docs/docs/installation/docker_en.md
Normal file
161
docs/docs/installation/docker_en.md
Normal file
|
@ -0,0 +1,161 @@
|
||||||
|
# Installing in Docker
|
||||||
|
|
||||||
|
## Installation
|
||||||
|
|
||||||
|
This guide will show you how to get akkoma working in a docker container,
|
||||||
|
if you want isolation, or if you run a distribution not supported by the OTP
|
||||||
|
releases.
|
||||||
|
|
||||||
|
If you want to migrate from or OTP to docker, check out [the migration guide](./migrating_to_docker_en.md).
|
||||||
|
|
||||||
|
### Prepare the system
|
||||||
|
|
||||||
|
* Install docker and docker-compose
|
||||||
|
* [Docker](https://docs.docker.com/engine/install/)
|
||||||
|
* [Docker-compose](https://docs.docker.com/compose/install/)
|
||||||
|
* This will usually just be a repository installation and a package manager invocation.
|
||||||
|
* Clone the akkoma repository
|
||||||
|
* `git clone https://akkoma.dev/AkkomaGang/akkoma.git -b stable`
|
||||||
|
* `cd akkoma`
|
||||||
|
|
||||||
|
### Set up basic configuration
|
||||||
|
|
||||||
|
```bash
|
||||||
|
cp docker-resources/env.example .env
|
||||||
|
echo "DOCKER_USER=$(id -u):$(id -g)" >> .env
|
||||||
|
```
|
||||||
|
|
||||||
|
This probably won't need to be changed, it's only there to set basic environment
|
||||||
|
variables for the docker-compose file.
|
||||||
|
|
||||||
|
### Building the container
|
||||||
|
|
||||||
|
The container provided is a thin wrapper around akkoma's dependencies,
|
||||||
|
it does not contain the code itself. This is to allow for easy updates
|
||||||
|
and debugging if required.
|
||||||
|
|
||||||
|
```bash
|
||||||
|
./docker-resources/build.sh
|
||||||
|
```
|
||||||
|
|
||||||
|
This will generate a container called `akkoma` which we can use
|
||||||
|
in our compose environment.
|
||||||
|
|
||||||
|
### Generating your instance
|
||||||
|
|
||||||
|
```bash
|
||||||
|
mkdir pgdata
|
||||||
|
./docker-resources/manage.sh mix deps.get
|
||||||
|
./docker-resources/manage.sh mix compile
|
||||||
|
./docker-resources/manage.sh mix pleroma.instance gen
|
||||||
|
```
|
||||||
|
|
||||||
|
This will ask you a few questions - the defaults are fine for most things,
|
||||||
|
the database hostname is `db`, and you will want to set the ip to `0.0.0.0`.
|
||||||
|
|
||||||
|
Now we'll want to copy over the config it just created
|
||||||
|
|
||||||
|
```bash
|
||||||
|
cp config/generated_config.exs config/prod.secret.exs
|
||||||
|
```
|
||||||
|
|
||||||
|
### Setting up the database
|
||||||
|
|
||||||
|
We need to run a few commands on the database container, this isn't too bad
|
||||||
|
|
||||||
|
```bash
|
||||||
|
docker-compose run --rm --user akkoma -d db
|
||||||
|
# Note down the name it gives here, it will be something like akkoma_db_run
|
||||||
|
docker-compose run --rm akkoma psql -h db -U akkoma -f config/setup_db.psql
|
||||||
|
docker stop akkoma_db_run # Replace with the name you noted down
|
||||||
|
```
|
||||||
|
|
||||||
|
Now we can actually run our migrations
|
||||||
|
|
||||||
|
```bash
|
||||||
|
./docker-resources/manage.sh mix ecto.migrate
|
||||||
|
# this will recompile your files at the same time, since we changed the config
|
||||||
|
```
|
||||||
|
|
||||||
|
### Start the server
|
||||||
|
|
||||||
|
We're going to run it in the foreground on the first run, just to make sure
|
||||||
|
everything start up.
|
||||||
|
|
||||||
|
```bash
|
||||||
|
docker-compose up
|
||||||
|
```
|
||||||
|
|
||||||
|
If everything went well, you should be able to access your instance at http://localhost:4000
|
||||||
|
|
||||||
|
You can `ctrl-c` out of the docker-compose now to shutdown the server.
|
||||||
|
|
||||||
|
### Running in the background
|
||||||
|
|
||||||
|
```bash
|
||||||
|
docker-compose up -d
|
||||||
|
```
|
||||||
|
|
||||||
|
### Create your first user
|
||||||
|
|
||||||
|
If your instance is up and running, you can create your first user with administrative rights with the following task:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
./docker-resources/manage.sh mix pleroma.user new MY_USERNAME MY_EMAIL@SOMEWHERE --admin
|
||||||
|
```
|
||||||
|
|
||||||
|
And follow the prompts
|
||||||
|
|
||||||
|
### Reverse proxies
|
||||||
|
|
||||||
|
This is a tad more complex in docker than on the host itself. It
|
||||||
|
|
||||||
|
You've got two options.
|
||||||
|
|
||||||
|
#### Running caddy in a container
|
||||||
|
|
||||||
|
This is by far the easiest option. It'll handle HTTPS and all that for you.
|
||||||
|
|
||||||
|
```bash
|
||||||
|
mkdir caddy-data
|
||||||
|
mkdir caddy-config
|
||||||
|
cp docker-resources/Caddyfile.example docker-resources/Caddyfile
|
||||||
|
```
|
||||||
|
|
||||||
|
Then edit the TLD in your caddyfile to the domain you're serving on.
|
||||||
|
|
||||||
|
Uncomment the `caddy` section in the docker-compose file,
|
||||||
|
then run `docker-compose up -d` again.
|
||||||
|
|
||||||
|
#### Running a reverse proxy on the host
|
||||||
|
|
||||||
|
If you want, you can also run the reverse proxy on the host. This is a bit more complex, but it's also more flexible.
|
||||||
|
|
||||||
|
Follow the guides for source install for your distribution of choice, or adapt
|
||||||
|
as needed. Your standard setup can be found in the [Debian Guide](../debian_based_en/#nginx)
|
||||||
|
|
||||||
|
### You're done!
|
||||||
|
|
||||||
|
All that's left is to set up your frontends.
|
||||||
|
|
||||||
|
The standard from-source commands will apply to you, just make sure you
|
||||||
|
prefix them with `./docker-resources/manage.sh`!
|
||||||
|
|
||||||
|
{! installation/frontends.include !}
|
||||||
|
|
||||||
|
### Updating Docker Installs
|
||||||
|
|
||||||
|
```bash
|
||||||
|
git pull
|
||||||
|
./docker-resources/build.sh
|
||||||
|
./docker-resources/manage.sh mix deps.get
|
||||||
|
./docker-resources/manage.sh mix compile
|
||||||
|
./docker-resources/manage.sh mix ecto.migrate
|
||||||
|
docker-compose restart akkoma db
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Further reading
|
||||||
|
|
||||||
|
{! installation/further_reading.include !}
|
||||||
|
|
||||||
|
{! support.include !}
|
|
@ -21,5 +21,11 @@ For most installations, the following will suffice:
|
||||||
mix pleroma.frontend install admin-fe --ref stable
|
mix pleroma.frontend install admin-fe --ref stable
|
||||||
```
|
```
|
||||||
|
|
||||||
|
=== "Docker"
|
||||||
|
```sh
|
||||||
|
./docker-resources/manage.sh mix pleroma.frontend install pleroma-fe --ref stable
|
||||||
|
./docker-resources/manage.sh mix pleroma.frontend install admin-fe --ref stable
|
||||||
|
```
|
||||||
|
|
||||||
For more customised installations, refer to [Frontend Management](../../configuration/frontend_management)
|
For more customised installations, refer to [Frontend Management](../../configuration/frontend_management)
|
||||||
|
|
||||||
|
|
158
docs/docs/installation/migrating_to_docker_en.md
Normal file
158
docs/docs/installation/migrating_to_docker_en.md
Normal file
|
@ -0,0 +1,158 @@
|
||||||
|
# Migrating to a Docker Installation
|
||||||
|
|
||||||
|
If you for any reason wish to migrate a source or OTP install to a docker one,
|
||||||
|
this guide is for you.
|
||||||
|
|
||||||
|
You have a few options - your major one will be whether you want to keep your
|
||||||
|
reverse-proxy setup from before.
|
||||||
|
|
||||||
|
You probably should, in the first instance.
|
||||||
|
|
||||||
|
### Prepare the system
|
||||||
|
|
||||||
|
* Install docker and docker-compose
|
||||||
|
* [Docker](https://docs.docker.com/engine/install/)
|
||||||
|
* [Docker-compose](https://docs.docker.com/compose/install/)
|
||||||
|
* This will usually just be a repository installation and a package manager invocation.
|
||||||
|
|
||||||
|
=== "Source"
|
||||||
|
```bash
|
||||||
|
git pull
|
||||||
|
```
|
||||||
|
|
||||||
|
=== "OTP"
|
||||||
|
Clone the akkoma repository
|
||||||
|
|
||||||
|
```bash
|
||||||
|
git clone https://akkoma.dev/AkkomaGang/akkoma.git -b stable
|
||||||
|
cd akkoma
|
||||||
|
```
|
||||||
|
|
||||||
|
### Back up your old database
|
||||||
|
|
||||||
|
Change the database name as needed
|
||||||
|
|
||||||
|
```bash
|
||||||
|
pg_dump -d akkoma_prod --format c > akkoma_backup.sql
|
||||||
|
```
|
||||||
|
|
||||||
|
### Getting your static files in the right place
|
||||||
|
|
||||||
|
This will vary by every installation. Copy your `instance` directory to `instance/` in
|
||||||
|
the akkoma source directory - this is where the docker container will look for it.
|
||||||
|
|
||||||
|
For *most* from-source installs it'll already be there.
|
||||||
|
|
||||||
|
And the same with `uploads`, make sure your uploads (if you have them on disk) are
|
||||||
|
located at `uploads/` in the akkoma source directory.
|
||||||
|
|
||||||
|
If you have them on a different disk, you will need to mount that disk into the docker-compose file,
|
||||||
|
with an entry that looks like this:
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
akkoma:
|
||||||
|
volumes:
|
||||||
|
- .:/opt/akkoma # This should already be there
|
||||||
|
- type: bind
|
||||||
|
source: /path/to/your/uploads
|
||||||
|
target: /opt/akkoma/uploads
|
||||||
|
```
|
||||||
|
|
||||||
|
### Set up basic configuration
|
||||||
|
|
||||||
|
```bash
|
||||||
|
cp docker-resources/env.example .env
|
||||||
|
echo "DOCKER_USER=$(id -u):$(id -g)" >> .env
|
||||||
|
```
|
||||||
|
|
||||||
|
This probably won't need to be changed, it's only there to set basic environment
|
||||||
|
variables for the docker-compose file.
|
||||||
|
|
||||||
|
=== "From source"
|
||||||
|
|
||||||
|
You probably won't need to change your config. Provided your `config/prod.secret.exs` file
|
||||||
|
is still there, you're all good.
|
||||||
|
|
||||||
|
=== "OTP"
|
||||||
|
```bash
|
||||||
|
cp /etc/akkoma/config.exs config/prod.secret.exs
|
||||||
|
```
|
||||||
|
|
||||||
|
**BOTH**
|
||||||
|
|
||||||
|
Set the following config in `config/prod.secret.exs`:
|
||||||
|
```elixir
|
||||||
|
config :pleroma, Pleroma.Web.Endpoint,
|
||||||
|
...,
|
||||||
|
http: [ip: {0, 0, 0, 0}, port: 4000]
|
||||||
|
|
||||||
|
config :pleroma, Pleroma.Repo,
|
||||||
|
...,
|
||||||
|
username: "akkoma",
|
||||||
|
password: "akkoma",
|
||||||
|
database: "akkoma",
|
||||||
|
hostname: "db"
|
||||||
|
```
|
||||||
|
|
||||||
|
### Building the container
|
||||||
|
|
||||||
|
The container provided is a thin wrapper around akkoma's dependencies,
|
||||||
|
it does not contain the code itself. This is to allow for easy updates
|
||||||
|
and debugging if required.
|
||||||
|
|
||||||
|
```bash
|
||||||
|
./docker-resources/build.sh
|
||||||
|
```
|
||||||
|
|
||||||
|
This will generate a container called `akkoma` which we can use
|
||||||
|
in our compose environment.
|
||||||
|
|
||||||
|
### Setting up the docker resources
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# These won't exist if you're migrating from OTP
|
||||||
|
rm -rf deps
|
||||||
|
rm -rf _build
|
||||||
|
```
|
||||||
|
|
||||||
|
```bash
|
||||||
|
mkdir pgdata
|
||||||
|
./docker-resources/manage.sh mix deps.get
|
||||||
|
./docker-resources/manage.sh mix compile
|
||||||
|
```
|
||||||
|
|
||||||
|
### Setting up the database
|
||||||
|
|
||||||
|
Now we can import our database to the container.
|
||||||
|
|
||||||
|
```bash
|
||||||
|
docker-compose run --rm --user akkoma -d db
|
||||||
|
docker-compose run --rm akkoma pg_restore -v -U akkoma -j $(grep -c ^processor /proc/cpuinfo) -d akkoma -h db akkoma_backup.sql
|
||||||
|
```
|
||||||
|
|
||||||
|
### Reverse proxies
|
||||||
|
|
||||||
|
If you're just reusing your old proxy, you may have to uncomment the line in
|
||||||
|
the docker-compose file under `ports`. You'll find it.
|
||||||
|
|
||||||
|
Otherwise, you can use the same setup as the [docker installation guide](./docker_en.md#reverse-proxies).
|
||||||
|
|
||||||
|
### Let's go
|
||||||
|
|
||||||
|
```bash
|
||||||
|
docker-compose up -d
|
||||||
|
```
|
||||||
|
|
||||||
|
You should now be at the same point as you were before, but with a docker install.
|
||||||
|
|
||||||
|
{! installation/frontends.include !}
|
||||||
|
|
||||||
|
See the [docker installation guide](./docker_en.md) for more information on how to
|
||||||
|
update.
|
||||||
|
|
||||||
|
#### Further reading
|
||||||
|
|
||||||
|
{! installation/further_reading.include !}
|
||||||
|
|
||||||
|
{! support.include !}
|
||||||
|
|
|
@ -19,12 +19,12 @@ This is a little more complex than it used to be (thanks ubuntu)
|
||||||
|
|
||||||
Use the following mapping to figure out your flavour:
|
Use the following mapping to figure out your flavour:
|
||||||
|
|
||||||
| distribution | flavour |
|
| distribution | flavour | available branches |
|
||||||
| ------------- | ------------ |
|
| ------------- | ------------------ | ------------------- |
|
||||||
| debian stable | amd64 |
|
| debian stable | amd64 | develop, stable |
|
||||||
| ubuntu focal | amd64 |
|
| ubuntu focal | amd64 | develop, stable |
|
||||||
| ubuntu jammy | ubuntu-jammy |
|
| ubuntu jammy | amd64-ubuntu-jammy | develop, stable |
|
||||||
| alpine | amd64-musl |
|
| alpine | amd64-musl | stable |
|
||||||
|
|
||||||
Other similar distributions will _probably_ work, but if it is not listed above, there is no official
|
Other similar distributions will _probably_ work, but if it is not listed above, there is no official
|
||||||
support.
|
support.
|
||||||
|
|
|
@ -538,6 +538,12 @@ def run(["fix_follow_state", local_user, remote_user]) do
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def run(["convert_id", id]) do
|
||||||
|
{:ok, uuid} = FlakeId.Ecto.Type.dump(id)
|
||||||
|
{:ok, raw_id} = Ecto.UUID.load(uuid)
|
||||||
|
shell_info(raw_id)
|
||||||
|
end
|
||||||
|
|
||||||
defp refetch_public_keys(query) do
|
defp refetch_public_keys(query) do
|
||||||
query
|
query
|
||||||
|> Pleroma.Repo.chunk_stream(50, :batches)
|
|> Pleroma.Repo.chunk_stream(50, :batches)
|
||||||
|
|
|
@ -368,9 +368,15 @@ def following_requests_for_actor(%User{ap_id: ap_id}) do
|
||||||
end
|
end
|
||||||
|
|
||||||
def restrict_deactivated_users(query) do
|
def restrict_deactivated_users(query) do
|
||||||
deactivated_users_query = from(u in User.Query.build(%{deactivated: true}), select: u.ap_id)
|
query
|
||||||
|
|> join(
|
||||||
from(activity in query, where: activity.actor not in subquery(deactivated_users_query))
|
:inner_lateral,
|
||||||
|
[activity],
|
||||||
|
active in fragment(
|
||||||
|
"SELECT is_active from users WHERE ap_id = ? AND is_active = TRUE",
|
||||||
|
activity.actor
|
||||||
|
)
|
||||||
|
)
|
||||||
end
|
end
|
||||||
|
|
||||||
defdelegate search(user, query, options \\ []), to: Pleroma.Search.DatabaseSearch
|
defdelegate search(user, query, options \\ []), to: Pleroma.Search.DatabaseSearch
|
||||||
|
|
100
lib/pleroma/akkoma/frontend_setting_profile.ex
Normal file
100
lib/pleroma/akkoma/frontend_setting_profile.ex
Normal file
|
@ -0,0 +1,100 @@
|
||||||
|
defmodule Pleroma.Akkoma.FrontendSettingsProfile do
|
||||||
|
use Ecto.Schema
|
||||||
|
|
||||||
|
import Ecto.Changeset
|
||||||
|
import Ecto.Query
|
||||||
|
alias Pleroma.Repo
|
||||||
|
alias Pleroma.Config
|
||||||
|
alias Pleroma.User
|
||||||
|
|
||||||
|
@primary_key false
|
||||||
|
schema "user_frontend_setting_profiles" do
|
||||||
|
belongs_to(:user, Pleroma.User, primary_key: true, type: FlakeId.Ecto.CompatType)
|
||||||
|
field(:frontend_name, :string, primary_key: true)
|
||||||
|
field(:profile_name, :string, primary_key: true)
|
||||||
|
field(:settings, :map)
|
||||||
|
field(:version, :integer)
|
||||||
|
timestamps()
|
||||||
|
end
|
||||||
|
|
||||||
|
def changeset(%__MODULE__{} = struct, attrs) do
|
||||||
|
struct
|
||||||
|
|> cast(attrs, [:user_id, :frontend_name, :profile_name, :settings, :version])
|
||||||
|
|> validate_required([:user_id, :frontend_name, :profile_name, :settings, :version])
|
||||||
|
|> validate_length(:frontend_name, min: 1, max: 255)
|
||||||
|
|> validate_length(:profile_name, min: 1, max: 255)
|
||||||
|
|> validate_version(struct)
|
||||||
|
|> validate_number(:version, greater_than: 0)
|
||||||
|
|> validate_settings_length(Config.get([:instance, :max_frontend_settings_json_chars]))
|
||||||
|
end
|
||||||
|
|
||||||
|
def create_or_update(%User{} = user, frontend_name, profile_name, settings, version) do
|
||||||
|
struct =
|
||||||
|
case get_by_user_and_frontend_name_and_profile_name(user, frontend_name, profile_name) do
|
||||||
|
nil ->
|
||||||
|
%__MODULE__{}
|
||||||
|
|
||||||
|
%__MODULE__{} = profile ->
|
||||||
|
profile
|
||||||
|
end
|
||||||
|
|
||||||
|
struct
|
||||||
|
|> changeset(%{
|
||||||
|
user_id: user.id,
|
||||||
|
frontend_name: frontend_name,
|
||||||
|
profile_name: profile_name,
|
||||||
|
settings: settings,
|
||||||
|
version: version
|
||||||
|
})
|
||||||
|
|> Repo.insert_or_update()
|
||||||
|
end
|
||||||
|
|
||||||
|
def get_all_by_user_and_frontend_name(%User{id: user_id}, frontend_name) do
|
||||||
|
Repo.all(
|
||||||
|
from(p in __MODULE__, where: p.user_id == ^user_id and p.frontend_name == ^frontend_name)
|
||||||
|
)
|
||||||
|
end
|
||||||
|
|
||||||
|
def get_by_user_and_frontend_name_and_profile_name(
|
||||||
|
%User{id: user_id},
|
||||||
|
frontend_name,
|
||||||
|
profile_name
|
||||||
|
) do
|
||||||
|
Repo.one(
|
||||||
|
from(p in __MODULE__,
|
||||||
|
where:
|
||||||
|
p.user_id == ^user_id and p.frontend_name == ^frontend_name and
|
||||||
|
p.profile_name == ^profile_name
|
||||||
|
)
|
||||||
|
)
|
||||||
|
end
|
||||||
|
|
||||||
|
def delete_profile(profile) do
|
||||||
|
Repo.delete(profile)
|
||||||
|
end
|
||||||
|
|
||||||
|
defp validate_settings_length(
|
||||||
|
%Ecto.Changeset{changes: %{settings: settings}} = changeset,
|
||||||
|
max_length
|
||||||
|
) do
|
||||||
|
settings_json = Jason.encode!(settings)
|
||||||
|
|
||||||
|
if String.length(settings_json) > max_length do
|
||||||
|
add_error(changeset, :settings, "is too long")
|
||||||
|
else
|
||||||
|
changeset
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
defp validate_version(changeset, %{version: nil}), do: changeset
|
||||||
|
|
||||||
|
defp validate_version(%Ecto.Changeset{changes: %{version: version}} = changeset, %{
|
||||||
|
version: prev_version
|
||||||
|
}) do
|
||||||
|
if version != prev_version + 1 do
|
||||||
|
add_error(changeset, :version, "must be incremented by 1")
|
||||||
|
else
|
||||||
|
changeset
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
|
@ -240,30 +240,6 @@ def find(following_relationships, follower, following) do
|
||||||
end)
|
end)
|
||||||
end
|
end
|
||||||
|
|
||||||
@doc """
|
|
||||||
For a query with joined activity,
|
|
||||||
keeps rows where activity's actor is followed by user -or- is NOT domain-blocked by user.
|
|
||||||
"""
|
|
||||||
def keep_following_or_not_domain_blocked(query, user) do
|
|
||||||
where(
|
|
||||||
query,
|
|
||||||
[_, activity],
|
|
||||||
fragment(
|
|
||||||
# "(actor's domain NOT in domain_blocks) OR (actor IS in followed AP IDs)"
|
|
||||||
"""
|
|
||||||
NOT (substring(? from '.*://([^/]*)') = ANY(?)) OR
|
|
||||||
? = ANY(SELECT ap_id FROM users AS u INNER JOIN following_relationships AS fr
|
|
||||||
ON u.id = fr.following_id WHERE fr.follower_id = ? AND fr.state = ?)
|
|
||||||
""",
|
|
||||||
activity.actor,
|
|
||||||
^user.domain_blocks,
|
|
||||||
activity.actor,
|
|
||||||
^User.binary_id(user.id),
|
|
||||||
^accept_state_code()
|
|
||||||
)
|
|
||||||
)
|
|
||||||
end
|
|
||||||
|
|
||||||
defp validate_not_self_relationship(%Changeset{} = changeset) do
|
defp validate_not_self_relationship(%Changeset{} = changeset) do
|
||||||
changeset
|
changeset
|
||||||
|> validate_follower_id_following_id_inequality()
|
|> validate_follower_id_following_id_inequality()
|
||||||
|
|
|
@ -138,7 +138,24 @@ defp exclude_blocked(query, user, opts) do
|
||||||
|
|
||||||
query
|
query
|
||||||
|> where([n, a], a.actor not in ^blocked_ap_ids)
|
|> where([n, a], a.actor not in ^blocked_ap_ids)
|
||||||
|> FollowingRelationship.keep_following_or_not_domain_blocked(user)
|
|> restrict_domain_blocked(user)
|
||||||
|
end
|
||||||
|
|
||||||
|
defp restrict_domain_blocked(query, user) do
|
||||||
|
where(
|
||||||
|
query,
|
||||||
|
[_, activity],
|
||||||
|
fragment(
|
||||||
|
# "(actor's domain NOT in domain_blocks)"
|
||||||
|
"""
|
||||||
|
NOT (
|
||||||
|
substring(? from '.*://([^/]*)') = ANY(?)
|
||||||
|
)
|
||||||
|
""",
|
||||||
|
activity.actor,
|
||||||
|
^user.domain_blocks
|
||||||
|
)
|
||||||
|
)
|
||||||
end
|
end
|
||||||
|
|
||||||
defp exclude_blockers(query, user) do
|
defp exclude_blockers(query, user) do
|
||||||
|
|
|
@ -153,7 +153,7 @@ def add_to_index(activity) do
|
||||||
)
|
)
|
||||||
|
|
||||||
with {:ok, res} <- result,
|
with {:ok, res} <- result,
|
||||||
true <- Map.has_key?(res, "uid") do
|
true <- Map.has_key?(res, "taskUid") do
|
||||||
# Do nothing
|
# Do nothing
|
||||||
else
|
else
|
||||||
_ ->
|
_ ->
|
||||||
|
|
|
@ -165,6 +165,8 @@ defmodule Pleroma.User do
|
||||||
has_many(:outgoing_relationships, UserRelationship, foreign_key: :source_id)
|
has_many(:outgoing_relationships, UserRelationship, foreign_key: :source_id)
|
||||||
has_many(:incoming_relationships, UserRelationship, foreign_key: :target_id)
|
has_many(:incoming_relationships, UserRelationship, foreign_key: :target_id)
|
||||||
|
|
||||||
|
has_many(:frontend_profiles, Pleroma.Akkoma.FrontendSettingsProfile)
|
||||||
|
|
||||||
for {relationship_type,
|
for {relationship_type,
|
||||||
[
|
[
|
||||||
{outgoing_relation, outgoing_relation_target},
|
{outgoing_relation, outgoing_relation_target},
|
||||||
|
|
|
@ -108,8 +108,8 @@ defp blocked_instances do
|
||||||
Config.get([:mrf_simple, :reject], [])
|
Config.get([:mrf_simple, :reject], [])
|
||||||
end
|
end
|
||||||
|
|
||||||
defp should_federate?(inbox) do
|
def should_federate?(url) do
|
||||||
%{host: host} = URI.parse(inbox)
|
%{host: host} = URI.parse(url)
|
||||||
|
|
||||||
quarantined_instances =
|
quarantined_instances =
|
||||||
blocked_instances()
|
blocked_instances()
|
||||||
|
|
|
@ -323,8 +323,6 @@ def handle(%{data: %{"type" => "Delete", "object" => deleted_object}} = object,
|
||||||
end
|
end
|
||||||
|
|
||||||
if result == :ok do
|
if result == :ok do
|
||||||
Notification.create_notifications(object)
|
|
||||||
|
|
||||||
# Only remove from index when deleting actual objects, not users or anything else
|
# Only remove from index when deleting actual objects, not users or anything else
|
||||||
with %Pleroma.Object{} <- deleted_object do
|
with %Pleroma.Object{} <- deleted_object do
|
||||||
Pleroma.Search.remove_from_index(deleted_object)
|
Pleroma.Search.remove_from_index(deleted_object)
|
||||||
|
|
|
@ -0,0 +1,96 @@
|
||||||
|
defmodule Pleroma.Web.AkkomaAPI.FrontendSettingsController do
|
||||||
|
use Pleroma.Web, :controller
|
||||||
|
|
||||||
|
alias Pleroma.Web.Plugs.OAuthScopesPlug
|
||||||
|
alias Pleroma.Akkoma.FrontendSettingsProfile
|
||||||
|
|
||||||
|
@unauthenticated_access %{fallback: :proceed_unauthenticated, scopes: []}
|
||||||
|
plug(
|
||||||
|
OAuthScopesPlug,
|
||||||
|
%{@unauthenticated_access | scopes: ["read:accounts"]}
|
||||||
|
when action in [
|
||||||
|
:list_profiles,
|
||||||
|
:get_profile
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
plug(
|
||||||
|
OAuthScopesPlug,
|
||||||
|
%{@unauthenticated_access | scopes: ["write:accounts"]}
|
||||||
|
when action in [
|
||||||
|
:update_profile,
|
||||||
|
:delete_profile
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
plug(Pleroma.Web.ApiSpec.CastAndValidate)
|
||||||
|
defdelegate open_api_operation(action), to: Pleroma.Web.ApiSpec.FrontendSettingsOperation
|
||||||
|
|
||||||
|
action_fallback(Pleroma.Web.MastodonAPI.FallbackController)
|
||||||
|
|
||||||
|
@doc "GET /api/v1/akkoma/frontend_settings/:frontend_name/:profile_name"
|
||||||
|
def get_profile(conn, %{frontend_name: frontend_name, profile_name: profile_name}) do
|
||||||
|
with %FrontendSettingsProfile{} = profile <-
|
||||||
|
FrontendSettingsProfile.get_by_user_and_frontend_name_and_profile_name(
|
||||||
|
conn.assigns.user,
|
||||||
|
frontend_name,
|
||||||
|
profile_name
|
||||||
|
) do
|
||||||
|
conn
|
||||||
|
|> json(%{
|
||||||
|
settings: profile.settings,
|
||||||
|
version: profile.version
|
||||||
|
})
|
||||||
|
else
|
||||||
|
nil -> {:error, :not_found}
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
@doc "GET /api/v1/akkoma/frontend_settings/:frontend_name"
|
||||||
|
def list_profiles(conn, %{frontend_name: frontend_name}) do
|
||||||
|
with profiles <-
|
||||||
|
FrontendSettingsProfile.get_all_by_user_and_frontend_name(
|
||||||
|
conn.assigns.user,
|
||||||
|
frontend_name
|
||||||
|
),
|
||||||
|
data <-
|
||||||
|
Enum.map(profiles, fn profile ->
|
||||||
|
%{name: profile.profile_name, version: profile.version}
|
||||||
|
end) do
|
||||||
|
json(conn, data)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
@doc "DELETE /api/v1/akkoma/frontend_settings/:frontend_name/:profile_name"
|
||||||
|
def delete_profile(conn, %{frontend_name: frontend_name, profile_name: profile_name}) do
|
||||||
|
with %FrontendSettingsProfile{} = profile <-
|
||||||
|
FrontendSettingsProfile.get_by_user_and_frontend_name_and_profile_name(
|
||||||
|
conn.assigns.user,
|
||||||
|
frontend_name,
|
||||||
|
profile_name
|
||||||
|
),
|
||||||
|
{:ok, _} <- FrontendSettingsProfile.delete_profile(profile) do
|
||||||
|
json(conn, %{deleted: "ok"})
|
||||||
|
else
|
||||||
|
nil -> {:error, :not_found}
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
@doc "PUT /api/v1/akkoma/frontend_settings/:frontend_name/:profile_name"
|
||||||
|
def update_profile(%{body_params: %{settings: settings, version: version}} = conn, %{
|
||||||
|
frontend_name: frontend_name,
|
||||||
|
profile_name: profile_name
|
||||||
|
}) do
|
||||||
|
with {:ok, profile} <-
|
||||||
|
FrontendSettingsProfile.create_or_update(
|
||||||
|
conn.assigns.user,
|
||||||
|
frontend_name,
|
||||||
|
profile_name,
|
||||||
|
settings,
|
||||||
|
version
|
||||||
|
) do
|
||||||
|
conn
|
||||||
|
|> json(profile.settings)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
|
@ -334,6 +334,22 @@ def unblock_operation do
|
||||||
}
|
}
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def remove_from_followers_operation do
|
||||||
|
%Operation{
|
||||||
|
tags: ["Account actions"],
|
||||||
|
summary: "Remove from followers",
|
||||||
|
operationId: "AccountController.remove_from_followers",
|
||||||
|
security: [%{"oAuth" => ["follow", "write:follows"]}],
|
||||||
|
description: "Remove the given account from followers",
|
||||||
|
parameters: [%Reference{"$ref": "#/components/parameters/accountIdOrNickname"}],
|
||||||
|
responses: %{
|
||||||
|
200 => Operation.response("Relationship", "application/json", AccountRelationship),
|
||||||
|
400 => Operation.response("Error", "application/json", ApiError),
|
||||||
|
404 => Operation.response("Error", "application/json", ApiError)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
end
|
||||||
|
|
||||||
def note_operation do
|
def note_operation do
|
||||||
%Operation{
|
%Operation{
|
||||||
tags: ["Account actions"],
|
tags: ["Account actions"],
|
||||||
|
|
|
@ -0,0 +1,133 @@
|
||||||
|
defmodule Pleroma.Web.ApiSpec.FrontendSettingsOperation do
|
||||||
|
alias OpenApiSpex.Operation
|
||||||
|
alias OpenApiSpex.Schema
|
||||||
|
import Pleroma.Web.ApiSpec.Helpers
|
||||||
|
|
||||||
|
@spec open_api_operation(atom) :: Operation.t()
|
||||||
|
def open_api_operation(action) do
|
||||||
|
operation = String.to_existing_atom("#{action}_operation")
|
||||||
|
apply(__MODULE__, operation, [])
|
||||||
|
end
|
||||||
|
|
||||||
|
@spec list_profiles_operation() :: Operation.t()
|
||||||
|
def list_profiles_operation() do
|
||||||
|
%Operation{
|
||||||
|
tags: ["Retrieve frontend setting profiles"],
|
||||||
|
summary: "Frontend Settings Profiles",
|
||||||
|
description: "List frontend setting profiles",
|
||||||
|
operationId: "AkkomaAPI.FrontendSettingsController.list_profiles",
|
||||||
|
parameters: [frontend_name_param()],
|
||||||
|
security: [%{"oAuth" => ["read:accounts"]}],
|
||||||
|
responses: %{
|
||||||
|
200 =>
|
||||||
|
Operation.response("Profiles", "application/json", %Schema{
|
||||||
|
type: :array,
|
||||||
|
items: %Schema{
|
||||||
|
type: :object,
|
||||||
|
properties: %{
|
||||||
|
name: %Schema{type: :string},
|
||||||
|
version: %Schema{type: :integer}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
end
|
||||||
|
|
||||||
|
@spec get_profile_operation() :: Operation.t()
|
||||||
|
def get_profile_operation() do
|
||||||
|
%Operation{
|
||||||
|
tags: ["Retrieve frontend setting profile"],
|
||||||
|
summary: "Frontend Settings Profile",
|
||||||
|
description: "Get frontend setting profile",
|
||||||
|
operationId: "AkkomaAPI.FrontendSettingsController.get_profile",
|
||||||
|
security: [%{"oAuth" => ["read:accounts"]}],
|
||||||
|
parameters: [frontend_name_param(), profile_name_param()],
|
||||||
|
responses: %{
|
||||||
|
200 =>
|
||||||
|
Operation.response("Profile", "application/json", %Schema{
|
||||||
|
type: :object,
|
||||||
|
properties: %{
|
||||||
|
"version" => %Schema{type: :integer},
|
||||||
|
"settings" => %Schema{type: :object, additionalProperties: true}
|
||||||
|
}
|
||||||
|
}),
|
||||||
|
404 => Operation.response("Not Found", "application/json", %Schema{type: :object})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
end
|
||||||
|
|
||||||
|
@spec delete_profile_operation() :: Operation.t()
|
||||||
|
def delete_profile_operation() do
|
||||||
|
%Operation{
|
||||||
|
tags: ["Delete frontend setting profile"],
|
||||||
|
summary: "Delete frontend Settings Profile",
|
||||||
|
description: "Delete frontend setting profile",
|
||||||
|
operationId: "AkkomaAPI.FrontendSettingsController.delete_profile",
|
||||||
|
security: [%{"oAuth" => ["write:accounts"]}],
|
||||||
|
parameters: [frontend_name_param(), profile_name_param()],
|
||||||
|
responses: %{
|
||||||
|
200 => Operation.response("Empty", "application/json", %Schema{type: :object}),
|
||||||
|
404 => Operation.response("Not Found", "application/json", %Schema{type: :object})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
end
|
||||||
|
|
||||||
|
@spec update_profile_operation() :: Operation.t()
|
||||||
|
def update_profile_operation() do
|
||||||
|
%Operation{
|
||||||
|
tags: ["Update frontend setting profile"],
|
||||||
|
summary: "Frontend Settings Profile",
|
||||||
|
description: "Update frontend setting profile",
|
||||||
|
operationId: "AkkomaAPI.FrontendSettingsController.update_profile_operation",
|
||||||
|
security: [%{"oAuth" => ["write:accounts"]}],
|
||||||
|
parameters: [frontend_name_param(), profile_name_param()],
|
||||||
|
requestBody: profile_body_param(),
|
||||||
|
responses: %{
|
||||||
|
200 => Operation.response("Settings", "application/json", %Schema{type: :object}),
|
||||||
|
422 => Operation.response("Invalid", "application/json", %Schema{type: :object})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
end
|
||||||
|
|
||||||
|
def frontend_name_param do
|
||||||
|
Operation.parameter(:frontend_name, :path, :string, "Frontend name",
|
||||||
|
example: "pleroma-fe",
|
||||||
|
required: true
|
||||||
|
)
|
||||||
|
end
|
||||||
|
|
||||||
|
def profile_name_param do
|
||||||
|
Operation.parameter(:profile_name, :path, :string, "Profile name",
|
||||||
|
example: "mobile",
|
||||||
|
required: true
|
||||||
|
)
|
||||||
|
end
|
||||||
|
|
||||||
|
def profile_body_param do
|
||||||
|
request_body(
|
||||||
|
"Settings",
|
||||||
|
%Schema{
|
||||||
|
title: "Frontend Setting Profile",
|
||||||
|
type: :object,
|
||||||
|
required: [:version, :settings],
|
||||||
|
properties: %{
|
||||||
|
version: %Schema{
|
||||||
|
type: :integer,
|
||||||
|
description: "Version of the profile, must increment by 1 each time",
|
||||||
|
example: 1
|
||||||
|
},
|
||||||
|
settings: %Schema{
|
||||||
|
type: :object,
|
||||||
|
description: "Settings of the profile",
|
||||||
|
example: %{
|
||||||
|
theme: "dark",
|
||||||
|
locale: "en"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
required: true
|
||||||
|
)
|
||||||
|
end
|
||||||
|
end
|
|
@ -16,7 +16,7 @@ def index_operation do
|
||||||
%Operation{
|
%Operation{
|
||||||
tags: ["Backups"],
|
tags: ["Backups"],
|
||||||
summary: "List backups",
|
summary: "List backups",
|
||||||
security: [%{"oAuth" => ["read:account"]}],
|
security: [%{"oAuth" => ["read:backups"]}],
|
||||||
operationId: "PleromaAPI.BackupController.index",
|
operationId: "PleromaAPI.BackupController.index",
|
||||||
responses: %{
|
responses: %{
|
||||||
200 =>
|
200 =>
|
||||||
|
@ -37,7 +37,7 @@ def create_operation do
|
||||||
%Operation{
|
%Operation{
|
||||||
tags: ["Backups"],
|
tags: ["Backups"],
|
||||||
summary: "Create a backup",
|
summary: "Create a backup",
|
||||||
security: [%{"oAuth" => ["read:account"]}],
|
security: [%{"oAuth" => ["read:backups"]}],
|
||||||
operationId: "PleromaAPI.BackupController.create",
|
operationId: "PleromaAPI.BackupController.create",
|
||||||
responses: %{
|
responses: %{
|
||||||
200 =>
|
200 =>
|
||||||
|
|
|
@ -53,12 +53,19 @@ def publish(%{data: %{"object" => object}} = activity) when is_binary(object) do
|
||||||
|
|
||||||
@impl true
|
@impl true
|
||||||
def publish(%{data: %{"object" => object}} = activity) when is_map(object) or is_list(object) do
|
def publish(%{data: %{"object" => object}} = activity) when is_map(object) or is_list(object) do
|
||||||
PublisherWorker.enqueue("publish", %{
|
PublisherWorker.enqueue(
|
||||||
"activity_id" => activity.id,
|
"publish",
|
||||||
"object_data" => Jason.encode!(object)
|
%{
|
||||||
})
|
"activity_id" => activity.id,
|
||||||
|
"object_data" => Jason.encode!(object)
|
||||||
|
},
|
||||||
|
priority: publish_priority(activity)
|
||||||
|
)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
defp publish_priority(%{type: "Delete"}), do: 3
|
||||||
|
defp publish_priority(_), do: 0
|
||||||
|
|
||||||
# Job Worker Callbacks
|
# Job Worker Callbacks
|
||||||
|
|
||||||
@spec perform(atom(), module(), any()) :: {:ok, any()} | {:error, any()}
|
@spec perform(atom(), module(), any()) :: {:ok, any()} | {:error, any()}
|
||||||
|
|
|
@ -76,15 +76,16 @@ defmodule Pleroma.Web.MastodonAPI.AccountController do
|
||||||
|
|
||||||
plug(
|
plug(
|
||||||
OAuthScopesPlug,
|
OAuthScopesPlug,
|
||||||
%{scopes: ["follow", "write:follows"]} when action in [:follow_by_uri, :follow, :unfollow]
|
%{scopes: ["follow", "write:follows"]}
|
||||||
|
when action in [:follow_by_uri, :follow, :unfollow, :remove_from_followers]
|
||||||
)
|
)
|
||||||
|
|
||||||
plug(OAuthScopesPlug, %{scopes: ["follow", "read:mutes"]} when action == :mutes)
|
plug(OAuthScopesPlug, %{scopes: ["follow", "read:mutes"]} when action == :mutes)
|
||||||
|
|
||||||
plug(OAuthScopesPlug, %{scopes: ["follow", "write:mutes"]} when action in [:mute, :unmute])
|
plug(OAuthScopesPlug, %{scopes: ["follow", "write:mutes"]} when action in [:mute, :unmute])
|
||||||
|
|
||||||
@relationship_actions [:follow, :unfollow]
|
@relationship_actions [:follow, :unfollow, :remove_from_followers]
|
||||||
@needs_account ~W(followers following lists follow unfollow mute unmute block unblock note)a
|
@needs_account ~W(followers following lists follow unfollow mute unmute block unblock note remove_from_followers)a
|
||||||
|
|
||||||
plug(
|
plug(
|
||||||
RateLimiter,
|
RateLimiter,
|
||||||
|
@ -447,6 +448,20 @@ def note(
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@doc "POST /api/v1/accounts/:id/remove_from_followers"
|
||||||
|
def remove_from_followers(%{assigns: %{user: %{id: id}, account: %{id: id}}}, _params) do
|
||||||
|
{:error, "Can not unfollow yourself"}
|
||||||
|
end
|
||||||
|
|
||||||
|
def remove_from_followers(%{assigns: %{user: followed, account: follower}} = conn, _params) do
|
||||||
|
with {:ok, follower} <- CommonAPI.reject_follow_request(follower, followed) do
|
||||||
|
render(conn, "relationship.json", user: followed, target: follower)
|
||||||
|
else
|
||||||
|
nil ->
|
||||||
|
render_error(conn, :not_found, "Record not found")
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
@doc "POST /api/v1/follows"
|
@doc "POST /api/v1/follows"
|
||||||
def follow_by_uri(%{body_params: %{uri: uri}} = conn, _) do
|
def follow_by_uri(%{body_params: %{uri: uri}} = conn, _) do
|
||||||
case User.get_cached_by_nickname(uri) do
|
case User.get_cached_by_nickname(uri) do
|
||||||
|
|
|
@ -9,7 +9,7 @@ defmodule Pleroma.Web.PleromaAPI.BackupController do
|
||||||
alias Pleroma.Web.Plugs.OAuthScopesPlug
|
alias Pleroma.Web.Plugs.OAuthScopesPlug
|
||||||
|
|
||||||
action_fallback(Pleroma.Web.MastodonAPI.FallbackController)
|
action_fallback(Pleroma.Web.MastodonAPI.FallbackController)
|
||||||
plug(OAuthScopesPlug, %{scopes: ["read:accounts"]} when action in [:index, :create])
|
plug(OAuthScopesPlug, %{scopes: ["read:backups"]} when action in [:index, :create])
|
||||||
plug(Pleroma.Web.ApiSpec.CastAndValidate)
|
plug(Pleroma.Web.ApiSpec.CastAndValidate)
|
||||||
|
|
||||||
defdelegate open_api_operation(action), to: Pleroma.Web.ApiSpec.PleromaBackupOperation
|
defdelegate open_api_operation(action), to: Pleroma.Web.ApiSpec.PleromaBackupOperation
|
||||||
|
|
|
@ -19,6 +19,7 @@ def call(%{assigns: %{user: %User{}}} = conn, _opts), do: conn
|
||||||
def call(%{assigns: %{valid_signature: true}, params: %{"actor" => actor}} = conn, _opts) do
|
def call(%{assigns: %{valid_signature: true}, params: %{"actor" => actor}} = conn, _opts) do
|
||||||
with actor_id <- Utils.get_ap_id(actor),
|
with actor_id <- Utils.get_ap_id(actor),
|
||||||
{:user, %User{} = user} <- {:user, user_from_key_id(conn)},
|
{:user, %User{} = user} <- {:user, user_from_key_id(conn)},
|
||||||
|
{:federate, true} <- {:federate, should_federate?(user)},
|
||||||
{:user_match, true} <- {:user_match, user.ap_id == actor_id} do
|
{:user_match, true} <- {:user_match, user.ap_id == actor_id} do
|
||||||
conn
|
conn
|
||||||
|> assign(:user, user)
|
|> assign(:user, user)
|
||||||
|
@ -27,33 +28,70 @@ def call(%{assigns: %{valid_signature: true}, params: %{"actor" => actor}} = con
|
||||||
{:user_match, false} ->
|
{:user_match, false} ->
|
||||||
Logger.debug("Failed to map identity from signature (payload actor mismatch)")
|
Logger.debug("Failed to map identity from signature (payload actor mismatch)")
|
||||||
Logger.debug("key_id=#{inspect(key_id_from_conn(conn))}, actor=#{inspect(actor)}")
|
Logger.debug("key_id=#{inspect(key_id_from_conn(conn))}, actor=#{inspect(actor)}")
|
||||||
assign(conn, :valid_signature, false)
|
|
||||||
|
conn
|
||||||
|
|> assign(:valid_signature, false)
|
||||||
|
|
||||||
# remove me once testsuite uses mapped capabilities instead of what we do now
|
# remove me once testsuite uses mapped capabilities instead of what we do now
|
||||||
{:user, nil} ->
|
{:user, nil} ->
|
||||||
Logger.debug("Failed to map identity from signature (lookup failure)")
|
Logger.debug("Failed to map identity from signature (lookup failure)")
|
||||||
Logger.debug("key_id=#{inspect(key_id_from_conn(conn))}, actor=#{actor}")
|
Logger.debug("key_id=#{inspect(key_id_from_conn(conn))}, actor=#{actor}")
|
||||||
|
|
||||||
conn
|
conn
|
||||||
|
|> assign(:valid_signature, false)
|
||||||
|
|
||||||
|
{:federate, false} ->
|
||||||
|
Logger.debug("Identity from signature is instance blocked")
|
||||||
|
Logger.debug("key_id=#{inspect(key_id_from_conn(conn))}, actor=#{actor}")
|
||||||
|
|
||||||
|
conn
|
||||||
|
|> assign(:valid_signature, false)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
# no payload, probably a signed fetch
|
# no payload, probably a signed fetch
|
||||||
def call(%{assigns: %{valid_signature: true}} = conn, _opts) do
|
def call(%{assigns: %{valid_signature: true}} = conn, _opts) do
|
||||||
with %User{} = user <- user_from_key_id(conn) do
|
with %User{} = user <- user_from_key_id(conn),
|
||||||
|
{:federate, true} <- {:federate, should_federate?(user)} do
|
||||||
conn
|
conn
|
||||||
|> assign(:user, user)
|
|> assign(:user, user)
|
||||||
|> AuthHelper.skip_oauth()
|
|> AuthHelper.skip_oauth()
|
||||||
else
|
else
|
||||||
|
{:federate, false} ->
|
||||||
|
Logger.debug("Identity from signature is instance blocked")
|
||||||
|
Logger.debug("key_id=#{inspect(key_id_from_conn(conn))}")
|
||||||
|
|
||||||
|
conn
|
||||||
|
|> assign(:valid_signature, false)
|
||||||
|
|
||||||
|
nil ->
|
||||||
|
Logger.debug("Failed to map identity from signature (lookup failure)")
|
||||||
|
Logger.debug("key_id=#{inspect(key_id_from_conn(conn))}")
|
||||||
|
|
||||||
|
only_permit_user_routes(conn)
|
||||||
|
|
||||||
_ ->
|
_ ->
|
||||||
Logger.debug("Failed to map identity from signature (no payload actor mismatch)")
|
Logger.debug("Failed to map identity from signature (no payload actor mismatch)")
|
||||||
Logger.debug("key_id=#{inspect(key_id_from_conn(conn))}")
|
Logger.debug("key_id=#{inspect(key_id_from_conn(conn))}")
|
||||||
assign(conn, :valid_signature, false)
|
|
||||||
|
conn
|
||||||
|
|> assign(:valid_signature, false)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
# no signature at all
|
# no signature at all
|
||||||
def call(conn, _opts), do: conn
|
def call(conn, _opts), do: conn
|
||||||
|
|
||||||
|
defp only_permit_user_routes(%{path_info: ["users", _]} = conn) do
|
||||||
|
conn
|
||||||
|
|> assign(:limited_ap, true)
|
||||||
|
end
|
||||||
|
|
||||||
|
defp only_permit_user_routes(conn) do
|
||||||
|
conn
|
||||||
|
|> assign(:valid_signature, false)
|
||||||
|
end
|
||||||
|
|
||||||
defp key_id_from_conn(conn) do
|
defp key_id_from_conn(conn) do
|
||||||
with %{"keyId" => key_id} <- HTTPSignatures.signature_for_conn(conn),
|
with %{"keyId" => key_id} <- HTTPSignatures.signature_for_conn(conn),
|
||||||
{:ok, ap_id} <- Signature.key_id_to_actor_id(key_id) do
|
{:ok, ap_id} <- Signature.key_id_to_actor_id(key_id) do
|
||||||
|
@ -73,4 +111,14 @@ defp user_from_key_id(conn) do
|
||||||
nil
|
nil
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
defp should_federate?(%User{ap_id: ap_id}), do: should_federate?(ap_id)
|
||||||
|
|
||||||
|
defp should_federate?(ap_id) do
|
||||||
|
if Pleroma.Config.get([:activitypub, :authorized_fetch_mode], false) do
|
||||||
|
Pleroma.Web.ActivityPub.Publisher.should_federate?(ap_id)
|
||||||
|
else
|
||||||
|
true
|
||||||
|
end
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -466,6 +466,26 @@ defmodule Pleroma.Web.Router do
|
||||||
scope "/api/v1/akkoma", Pleroma.Web.AkkomaAPI do
|
scope "/api/v1/akkoma", Pleroma.Web.AkkomaAPI do
|
||||||
pipe_through(:authenticated_api)
|
pipe_through(:authenticated_api)
|
||||||
get("/translation/languages", TranslationController, :languages)
|
get("/translation/languages", TranslationController, :languages)
|
||||||
|
|
||||||
|
get("/frontend_settings/:frontend_name", FrontendSettingsController, :list_profiles)
|
||||||
|
|
||||||
|
get(
|
||||||
|
"/frontend_settings/:frontend_name/:profile_name",
|
||||||
|
FrontendSettingsController,
|
||||||
|
:get_profile
|
||||||
|
)
|
||||||
|
|
||||||
|
put(
|
||||||
|
"/frontend_settings/:frontend_name/:profile_name",
|
||||||
|
FrontendSettingsController,
|
||||||
|
:update_profile
|
||||||
|
)
|
||||||
|
|
||||||
|
delete(
|
||||||
|
"/frontend_settings/:frontend_name/:profile_name",
|
||||||
|
FrontendSettingsController,
|
||||||
|
:delete_profile
|
||||||
|
)
|
||||||
end
|
end
|
||||||
|
|
||||||
scope "/api/v1", Pleroma.Web.MastodonAPI do
|
scope "/api/v1", Pleroma.Web.MastodonAPI do
|
||||||
|
@ -489,6 +509,7 @@ defmodule Pleroma.Web.Router do
|
||||||
post("/accounts/:id/mute", AccountController, :mute)
|
post("/accounts/:id/mute", AccountController, :mute)
|
||||||
post("/accounts/:id/unmute", AccountController, :unmute)
|
post("/accounts/:id/unmute", AccountController, :unmute)
|
||||||
post("/accounts/:id/note", AccountController, :note)
|
post("/accounts/:id/note", AccountController, :note)
|
||||||
|
post("/accounts/:id/remove_from_followers", AccountController, :remove_from_followers)
|
||||||
|
|
||||||
get("/conversations", ConversationController, :index)
|
get("/conversations", ConversationController, :index)
|
||||||
post("/conversations/:id/read", ConversationController, :mark_as_read)
|
post("/conversations/:id/read", ConversationController, :mark_as_read)
|
||||||
|
|
4
mix.exs
4
mix.exs
|
@ -4,7 +4,7 @@ defmodule Pleroma.Mixfile do
|
||||||
def project do
|
def project do
|
||||||
[
|
[
|
||||||
app: :pleroma,
|
app: :pleroma,
|
||||||
version: version("3.2.0"),
|
version: version("3.3.1"),
|
||||||
elixir: "~> 1.12",
|
elixir: "~> 1.12",
|
||||||
elixirc_paths: elixirc_paths(Mix.env()),
|
elixirc_paths: elixirc_paths(Mix.env()),
|
||||||
compilers: [:phoenix, :gettext] ++ Mix.compilers(),
|
compilers: [:phoenix, :gettext] ++ Mix.compilers(),
|
||||||
|
@ -120,7 +120,7 @@ defp deps do
|
||||||
{:phoenix_pubsub, "~> 2.1"},
|
{:phoenix_pubsub, "~> 2.1"},
|
||||||
{:phoenix_ecto, "~> 4.4"},
|
{:phoenix_ecto, "~> 4.4"},
|
||||||
{:ecto_enum, "~> 1.4"},
|
{:ecto_enum, "~> 1.4"},
|
||||||
{:ecto_sql, "~> 3.8.3"},
|
{:ecto_sql, "~> 3.9.0"},
|
||||||
{:postgrex, ">= 0.16.3"},
|
{:postgrex, ">= 0.16.3"},
|
||||||
{:oban, "~> 2.12.1"},
|
{:oban, "~> 2.12.1"},
|
||||||
{:gettext,
|
{:gettext,
|
||||||
|
|
8
mix.lock
8
mix.lock
|
@ -26,10 +26,10 @@
|
||||||
"earmark": {:hex, :earmark, "1.4.26", "f0e3c3d5c278a6d448ad8c27ab0ecdec9c57a7710553138c56af220a6330a4fd", [:mix], [{:earmark_parser, "~> 1.4.26", [hex: :earmark_parser, repo: "hexpm", optional: false]}], "hexpm", "e1231882b56bece0692af33f0959f06c9cd580c2dc2ecb1dc9f16f2750fa78c5"},
|
"earmark": {:hex, :earmark, "1.4.26", "f0e3c3d5c278a6d448ad8c27ab0ecdec9c57a7710553138c56af220a6330a4fd", [:mix], [{:earmark_parser, "~> 1.4.26", [hex: :earmark_parser, repo: "hexpm", optional: false]}], "hexpm", "e1231882b56bece0692af33f0959f06c9cd580c2dc2ecb1dc9f16f2750fa78c5"},
|
||||||
"earmark_parser": {:hex, :earmark_parser, "1.4.26", "f4291134583f373c7d8755566122908eb9662df4c4b63caa66a0eabe06569b0a", [:mix], [], "hexpm", "48d460899f8a0c52c5470676611c01f64f3337bad0b26ddab43648428d94aabc"},
|
"earmark_parser": {:hex, :earmark_parser, "1.4.26", "f4291134583f373c7d8755566122908eb9662df4c4b63caa66a0eabe06569b0a", [:mix], [], "hexpm", "48d460899f8a0c52c5470676611c01f64f3337bad0b26ddab43648428d94aabc"},
|
||||||
"eblurhash": {:hex, :eblurhash, "1.2.2", "7da4255aaea984b31bb71155f673257353b0e0554d0d30dcf859547e74602582", [:rebar3], [], "hexpm", "8c20ca00904de023a835a9dcb7b7762fed32264c85a80c3cafa85288e405044c"},
|
"eblurhash": {:hex, :eblurhash, "1.2.2", "7da4255aaea984b31bb71155f673257353b0e0554d0d30dcf859547e74602582", [:rebar3], [], "hexpm", "8c20ca00904de023a835a9dcb7b7762fed32264c85a80c3cafa85288e405044c"},
|
||||||
"ecto": {:hex, :ecto, "3.8.4", "e06b8b87e62b27fea17fd2ff6041572ddd10339fd16cdf58446e402c6c90a74b", [:mix], [{:decimal, "~> 1.6 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "f9244288b8d42db40515463a008cf3f4e0e564bb9c249fe87bf28a6d79fe82d4"},
|
"ecto": {:hex, :ecto, "3.9.1", "67173b1687afeb68ce805ee7420b4261649d5e2deed8fe5550df23bab0bc4396", [:mix], [{:decimal, "~> 1.6 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "c80bb3d736648df790f7f92f81b36c922d9dd3203ca65be4ff01d067f54eb304"},
|
||||||
"ecto_enum": {:hex, :ecto_enum, "1.4.0", "d14b00e04b974afc69c251632d1e49594d899067ee2b376277efd8233027aec8", [:mix], [{:ecto, ">= 3.0.0", [hex: :ecto, repo: "hexpm", optional: false]}, {:ecto_sql, "> 3.0.0", [hex: :ecto_sql, repo: "hexpm", optional: false]}, {:mariaex, ">= 0.0.0", [hex: :mariaex, repo: "hexpm", optional: true]}, {:postgrex, ">= 0.0.0", [hex: :postgrex, repo: "hexpm", optional: true]}], "hexpm", "8fb55c087181c2b15eee406519dc22578fa60dd82c088be376d0010172764ee4"},
|
"ecto_enum": {:hex, :ecto_enum, "1.4.0", "d14b00e04b974afc69c251632d1e49594d899067ee2b376277efd8233027aec8", [:mix], [{:ecto, ">= 3.0.0", [hex: :ecto, repo: "hexpm", optional: false]}, {:ecto_sql, "> 3.0.0", [hex: :ecto_sql, repo: "hexpm", optional: false]}, {:mariaex, ">= 0.0.0", [hex: :mariaex, repo: "hexpm", optional: true]}, {:postgrex, ">= 0.0.0", [hex: :postgrex, repo: "hexpm", optional: true]}], "hexpm", "8fb55c087181c2b15eee406519dc22578fa60dd82c088be376d0010172764ee4"},
|
||||||
"ecto_psql_extras": {:hex, :ecto_psql_extras, "0.7.4", "5d43fd088d39a158c860b17e8d210669587f63ec89ea122a4654861c8c6e2db4", [:mix], [{:ecto_sql, "~> 3.4", [hex: :ecto_sql, repo: "hexpm", optional: false]}, {:postgrex, ">= 0.15.7", [hex: :postgrex, repo: "hexpm", optional: false]}, {:table_rex, "~> 3.1.1", [hex: :table_rex, repo: "hexpm", optional: false]}], "hexpm", "311db02f1b772e3d0dc7f56a05044b5e1499d78ed6abf38885e1ca70059449e5"},
|
"ecto_psql_extras": {:hex, :ecto_psql_extras, "0.7.4", "5d43fd088d39a158c860b17e8d210669587f63ec89ea122a4654861c8c6e2db4", [:mix], [{:ecto_sql, "~> 3.4", [hex: :ecto_sql, repo: "hexpm", optional: false]}, {:postgrex, ">= 0.15.7", [hex: :postgrex, repo: "hexpm", optional: false]}, {:table_rex, "~> 3.1.1", [hex: :table_rex, repo: "hexpm", optional: false]}], "hexpm", "311db02f1b772e3d0dc7f56a05044b5e1499d78ed6abf38885e1ca70059449e5"},
|
||||||
"ecto_sql": {:hex, :ecto_sql, "3.8.3", "a7d22c624202546a39d615ed7a6b784580391e65723f2d24f65941b4dd73d471", [:mix], [{:db_connection, "~> 2.5 or ~> 2.4.1", [hex: :db_connection, repo: "hexpm", optional: false]}, {:ecto, "~> 3.8.4", [hex: :ecto, repo: "hexpm", optional: false]}, {:myxql, "~> 0.6.0", [hex: :myxql, repo: "hexpm", optional: true]}, {:postgrex, "~> 0.15.0 or ~> 0.16.0 or ~> 1.0", [hex: :postgrex, repo: "hexpm", optional: true]}, {:tds, "~> 2.1.1 or ~> 2.2", [hex: :tds, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4.0 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "348cb17fb9e6daf6f251a87049eafcb57805e2892e5e6a0f5dea0985d367329b"},
|
"ecto_sql": {:hex, :ecto_sql, "3.9.0", "2bb21210a2a13317e098a420a8c1cc58b0c3421ab8e3acfa96417dab7817918c", [:mix], [{:db_connection, "~> 2.5 or ~> 2.4.1", [hex: :db_connection, repo: "hexpm", optional: false]}, {:ecto, "~> 3.9.0", [hex: :ecto, repo: "hexpm", optional: false]}, {:myxql, "~> 0.6.0", [hex: :myxql, repo: "hexpm", optional: true]}, {:postgrex, "~> 0.16.0 or ~> 1.0", [hex: :postgrex, repo: "hexpm", optional: true]}, {:tds, "~> 2.1.1 or ~> 2.2", [hex: :tds, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4.0 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "a8f3f720073b8b1ac4c978be25fa7960ed7fd44997420c304a4a2e200b596453"},
|
||||||
"elasticsearch": {:git, "https://akkoma.dev/AkkomaGang/elasticsearch-elixir.git", "6cd946f75f6ab9042521a009d1d32d29a90113ca", [ref: "main"]},
|
"elasticsearch": {:git, "https://akkoma.dev/AkkomaGang/elasticsearch-elixir.git", "6cd946f75f6ab9042521a009d1d32d29a90113ca", [ref: "main"]},
|
||||||
"elixir_make": {:hex, :elixir_make, "0.6.3", "bc07d53221216838d79e03a8019d0839786703129599e9619f4ab74c8c096eac", [:mix], [], "hexpm", "f5cbd651c5678bcaabdbb7857658ee106b12509cd976c2c2fca99688e1daf716"},
|
"elixir_make": {:hex, :elixir_make, "0.6.3", "bc07d53221216838d79e03a8019d0839786703129599e9619f4ab74c8c096eac", [:mix], [], "hexpm", "f5cbd651c5678bcaabdbb7857658ee106b12509cd976c2c2fca99688e1daf716"},
|
||||||
"eternal": {:hex, :eternal, "1.2.2", "d1641c86368de99375b98d183042dd6c2b234262b8d08dfd72b9eeaafc2a1abd", [:mix], [], "hexpm", "2c9fe32b9c3726703ba5e1d43a1d255a4f3f2d8f8f9bc19f094c7cb1a7a9e782"},
|
"eternal": {:hex, :eternal, "1.2.2", "d1641c86368de99375b98d183042dd6c2b234262b8d08dfd72b9eeaafc2a1abd", [:mix], [], "hexpm", "2c9fe32b9c3726703ba5e1d43a1d255a4f3f2d8f8f9bc19f094c7cb1a7a9e782"},
|
||||||
|
@ -56,7 +56,7 @@
|
||||||
"httpoison": {:hex, :httpoison, "1.8.1", "df030d96de89dad2e9983f92b0c506a642d4b1f4a819c96ff77d12796189c63e", [:mix], [{:hackney, "~> 1.17", [hex: :hackney, repo: "hexpm", optional: false]}], "hexpm", "35156a6d678d6d516b9229e208942c405cf21232edd632327ecfaf4fd03e79e0"},
|
"httpoison": {:hex, :httpoison, "1.8.1", "df030d96de89dad2e9983f92b0c506a642d4b1f4a819c96ff77d12796189c63e", [:mix], [{:hackney, "~> 1.17", [hex: :hackney, repo: "hexpm", optional: false]}], "hexpm", "35156a6d678d6d516b9229e208942c405cf21232edd632327ecfaf4fd03e79e0"},
|
||||||
"idna": {:hex, :idna, "6.1.1", "8a63070e9f7d0c62eb9d9fcb360a7de382448200fbbd1b106cc96d3d8099df8d", [:rebar3], [{:unicode_util_compat, "~>0.7.0", [hex: :unicode_util_compat, repo: "hexpm", optional: false]}], "hexpm", "92376eb7894412ed19ac475e4a86f7b413c1b9fbb5bd16dccd57934157944cea"},
|
"idna": {:hex, :idna, "6.1.1", "8a63070e9f7d0c62eb9d9fcb360a7de382448200fbbd1b106cc96d3d8099df8d", [:rebar3], [{:unicode_util_compat, "~>0.7.0", [hex: :unicode_util_compat, repo: "hexpm", optional: false]}], "hexpm", "92376eb7894412ed19ac475e4a86f7b413c1b9fbb5bd16dccd57934157944cea"},
|
||||||
"inet_cidr": {:hex, :inet_cidr, "1.0.4", "a05744ab7c221ca8e395c926c3919a821eb512e8f36547c062f62c4ca0cf3d6e", [:mix], [], "hexpm", "64a2d30189704ae41ca7dbdd587f5291db5d1dda1414e0774c29ffc81088c1bc"},
|
"inet_cidr": {:hex, :inet_cidr, "1.0.4", "a05744ab7c221ca8e395c926c3919a821eb512e8f36547c062f62c4ca0cf3d6e", [:mix], [], "hexpm", "64a2d30189704ae41ca7dbdd587f5291db5d1dda1414e0774c29ffc81088c1bc"},
|
||||||
"jason": {:hex, :jason, "1.3.0", "fa6b82a934feb176263ad2df0dbd91bf633d4a46ebfdffea0c8ae82953714946", [:mix], [{:decimal, "~> 1.0 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: true]}], "hexpm", "53fc1f51255390e0ec7e50f9cb41e751c260d065dcba2bf0d08dc51a4002c2ac"},
|
"jason": {:hex, :jason, "1.4.0", "e855647bc964a44e2f67df589ccf49105ae039d4179db7f6271dfd3843dc27e6", [:mix], [{:decimal, "~> 1.0 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: true]}], "hexpm", "79a3791085b2a0f743ca04cec0f7be26443738779d09302e01318f97bdb82121"},
|
||||||
"joken": {:hex, :joken, "2.5.0", "09be497d804b8115eb6f07615cef2e60c2a1008fb89dc0aef0d4c4b4609b99aa", [:mix], [{:jose, "~> 1.11.2", [hex: :jose, repo: "hexpm", optional: false]}], "hexpm", "22b25c89617c5ed8ca7b31026340a25ea0f9ca7160f9706b79be9ed81fdf74e7"},
|
"joken": {:hex, :joken, "2.5.0", "09be497d804b8115eb6f07615cef2e60c2a1008fb89dc0aef0d4c4b4609b99aa", [:mix], [{:jose, "~> 1.11.2", [hex: :jose, repo: "hexpm", optional: false]}], "hexpm", "22b25c89617c5ed8ca7b31026340a25ea0f9ca7160f9706b79be9ed81fdf74e7"},
|
||||||
"jose": {:hex, :jose, "1.11.2", "f4c018ccf4fdce22c71e44d471f15f723cb3efab5d909ab2ba202b5bf35557b3", [:mix, :rebar3], [], "hexpm", "98143fbc48d55f3a18daba82d34fe48959d44538e9697c08f34200fa5f0947d2"},
|
"jose": {:hex, :jose, "1.11.2", "f4c018ccf4fdce22c71e44d471f15f723cb3efab5d909ab2ba202b5bf35557b3", [:mix, :rebar3], [], "hexpm", "98143fbc48d55f3a18daba82d34fe48959d44538e9697c08f34200fa5f0947d2"},
|
||||||
"jumper": {:hex, :jumper, "1.0.1", "3c00542ef1a83532b72269fab9f0f0c82bf23a35e27d278bfd9ed0865cecabff", [:mix], [], "hexpm", "318c59078ac220e966d27af3646026db9b5a5e6703cb2aa3e26bcfaba65b7433"},
|
"jumper": {:hex, :jumper, "1.0.1", "3c00542ef1a83532b72269fab9f0f0c82bf23a35e27d278bfd9ed0865cecabff", [:mix], [], "hexpm", "318c59078ac220e966d27af3646026db9b5a5e6703cb2aa3e26bcfaba65b7433"},
|
||||||
|
@ -94,7 +94,7 @@
|
||||||
"plug_static_index_html": {:hex, :plug_static_index_html, "1.0.0", "840123d4d3975585133485ea86af73cb2600afd7f2a976f9f5fd8b3808e636a0", [:mix], [{:plug, "~> 1.0", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm", "79fd4fcf34d110605c26560cbae8f23c603ec4158c08298bd4360fdea90bb5cf"},
|
"plug_static_index_html": {:hex, :plug_static_index_html, "1.0.0", "840123d4d3975585133485ea86af73cb2600afd7f2a976f9f5fd8b3808e636a0", [:mix], [{:plug, "~> 1.0", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm", "79fd4fcf34d110605c26560cbae8f23c603ec4158c08298bd4360fdea90bb5cf"},
|
||||||
"poison": {:hex, :poison, "3.1.0", "d9eb636610e096f86f25d9a46f35a9facac35609a7591b3be3326e99a0484665", [:mix], [], "hexpm", "fec8660eb7733ee4117b85f55799fd3833eb769a6df71ccf8903e8dc5447cfce"},
|
"poison": {:hex, :poison, "3.1.0", "d9eb636610e096f86f25d9a46f35a9facac35609a7591b3be3326e99a0484665", [:mix], [], "hexpm", "fec8660eb7733ee4117b85f55799fd3833eb769a6df71ccf8903e8dc5447cfce"},
|
||||||
"poolboy": {:hex, :poolboy, "1.5.2", "392b007a1693a64540cead79830443abf5762f5d30cf50bc95cb2c1aaafa006b", [:rebar3], [], "hexpm", "dad79704ce5440f3d5a3681c8590b9dc25d1a561e8f5a9c995281012860901e3"},
|
"poolboy": {:hex, :poolboy, "1.5.2", "392b007a1693a64540cead79830443abf5762f5d30cf50bc95cb2c1aaafa006b", [:rebar3], [], "hexpm", "dad79704ce5440f3d5a3681c8590b9dc25d1a561e8f5a9c995281012860901e3"},
|
||||||
"postgrex": {:hex, :postgrex, "0.16.3", "fac79a81a9a234b11c44235a4494d8565303fa4b9147acf57e48978a074971db", [:mix], [{:connection, "~> 1.1", [hex: :connection, repo: "hexpm", optional: false]}, {:db_connection, "~> 2.1", [hex: :db_connection, repo: "hexpm", optional: false]}, {:decimal, "~> 1.5 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:table, "~> 0.1.0", [hex: :table, repo: "hexpm", optional: true]}], "hexpm", "aeaae1d2d1322da4e5fe90d241b0a564ce03a3add09d7270fb85362166194590"},
|
"postgrex": {:hex, :postgrex, "0.16.5", "fcc4035cc90e23933c5d69a9cd686e329469446ef7abba2cf70f08e2c4b69810", [:mix], [{:connection, "~> 1.1", [hex: :connection, repo: "hexpm", optional: false]}, {:db_connection, "~> 2.1", [hex: :db_connection, repo: "hexpm", optional: false]}, {:decimal, "~> 1.5 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:table, "~> 0.1.0", [hex: :table, repo: "hexpm", optional: true]}], "hexpm", "edead639dc6e882618c01d8fc891214c481ab9a3788dfe38dd5e37fd1d5fb2e8"},
|
||||||
"pot": {:hex, :pot, "1.0.2", "13abb849139fdc04ab8154986abbcb63bdee5de6ed2ba7e1713527e33df923dd", [:rebar3], [], "hexpm", "78fe127f5a4f5f919d6ea5a2a671827bd53eb9d37e5b4128c0ad3df99856c2e0"},
|
"pot": {:hex, :pot, "1.0.2", "13abb849139fdc04ab8154986abbcb63bdee5de6ed2ba7e1713527e33df923dd", [:rebar3], [], "hexpm", "78fe127f5a4f5f919d6ea5a2a671827bd53eb9d37e5b4128c0ad3df99856c2e0"},
|
||||||
"quack": {:hex, :quack, "0.1.1", "cca7b4da1a233757fdb44b3334fce80c94785b3ad5a602053b7a002b5a8967bf", [:mix], [{:poison, ">= 1.0.0", [hex: :poison, repo: "hexpm", optional: false]}, {:tesla, "~> 1.2.0", [hex: :tesla, repo: "hexpm", optional: false]}], "hexpm", "d736bfa7444112eb840027bb887832a0e403a4a3437f48028c3b29a2dbbd2543"},
|
"quack": {:hex, :quack, "0.1.1", "cca7b4da1a233757fdb44b3334fce80c94785b3ad5a602053b7a002b5a8967bf", [:mix], [{:poison, ">= 1.0.0", [hex: :poison, repo: "hexpm", optional: false]}, {:tesla, "~> 1.2.0", [hex: :tesla, repo: "hexpm", optional: false]}], "hexpm", "d736bfa7444112eb840027bb887832a0e403a4a3437f48028c3b29a2dbbd2543"},
|
||||||
"ranch": {:hex, :ranch, "1.8.0", "8c7a100a139fd57f17327b6413e4167ac559fbc04ca7448e9be9057311597a1d", [:make, :rebar3], [], "hexpm", "49fbcfd3682fab1f5d109351b61257676da1a2fdbe295904176d5e521a2ddfe5"},
|
"ranch": {:hex, :ranch, "1.8.0", "8c7a100a139fd57f17327b6413e4167ac559fbc04ca7448e9be9057311597a1d", [:make, :rebar3], [], "hexpm", "49fbcfd3682fab1f5d109351b61257676da1a2fdbe295904176d5e521a2ddfe5"},
|
||||||
|
|
|
@ -0,0 +1,29 @@
|
||||||
|
defmodule Pleroma.Repo.Migrations.AddUserFrontendProfiles do
|
||||||
|
use Ecto.Migration
|
||||||
|
|
||||||
|
def up do
|
||||||
|
create_if_not_exists table("user_frontend_setting_profiles", primary_key: false) do
|
||||||
|
add(:user_id, references(:users, type: :uuid, on_delete: :delete_all), primary_key: true)
|
||||||
|
add(:frontend_name, :string, primary_key: true)
|
||||||
|
add(:profile_name, :string, primary_key: true)
|
||||||
|
add(:version, :integer)
|
||||||
|
add(:settings, :map)
|
||||||
|
timestamps()
|
||||||
|
end
|
||||||
|
|
||||||
|
create_if_not_exists(index(:user_frontend_setting_profiles, [:user_id, :frontend_name]))
|
||||||
|
|
||||||
|
create_if_not_exists(
|
||||||
|
unique_index(:user_frontend_setting_profiles, [:user_id, :frontend_name, :profile_name])
|
||||||
|
)
|
||||||
|
end
|
||||||
|
|
||||||
|
def down do
|
||||||
|
drop_if_exists(table("user_frontend_setting_profiles"))
|
||||||
|
drop_if_exists(index(:user_frontend_setting_profiles, [:user_id, :frontend_name]))
|
||||||
|
|
||||||
|
drop_if_exists(
|
||||||
|
unique_index(:user_frontend_setting_profiles, [:user_id, :frontend_name, :profile_name])
|
||||||
|
)
|
||||||
|
end
|
||||||
|
end
|
|
@ -2,28 +2,24 @@
|
||||||
# XXX: This should be removed when elixir's releases get custom command support
|
# XXX: This should be removed when elixir's releases get custom command support
|
||||||
|
|
||||||
detect_flavour() {
|
detect_flavour() {
|
||||||
arch="$(uname -m)"
|
arch="amd64"
|
||||||
if [ "$arch" = "x86_64" ]; then
|
# Special cases
|
||||||
arch="amd64"
|
if grep -qe "VERSION_CODENAME=jammy" /etc/os-release; then
|
||||||
elif [ "$arch" = "aarch64" ]; then
|
echo "$arch-ubuntu-jammy"
|
||||||
arch="arm64"
|
else
|
||||||
else
|
if getconf GNU_LIBC_VERSION >/dev/null; then
|
||||||
echo "Unsupported arch: $arch" >&2
|
libc_postfix=""
|
||||||
exit 1
|
elif [ "$(ldd 2>&1 | head -c 9)" = "musl libc" ]; then
|
||||||
fi
|
libc_postfix="-musl"
|
||||||
|
elif [ "$(find /lib/libc.musl* | wc -l)" ]; then
|
||||||
|
libc_postfix="-musl"
|
||||||
|
else
|
||||||
|
echo "Unsupported libc" >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
if getconf GNU_LIBC_VERSION >/dev/null; then
|
echo "$arch$libc_postfix"
|
||||||
libc_postfix=""
|
fi
|
||||||
elif [ "$(ldd 2>&1 | head -c 9)" = "musl libc" ]; then
|
|
||||||
libc_postfix="-musl"
|
|
||||||
elif [ "$(find /lib/libc.musl* | wc -l)" ]; then
|
|
||||||
libc_postfix="-musl"
|
|
||||||
else
|
|
||||||
echo "Unsupported libc" >&2
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
echo "$arch$libc_postfix"
|
|
||||||
}
|
}
|
||||||
|
|
||||||
detect_branch() {
|
detect_branch() {
|
||||||
|
|
196
test/pleroma/akkoma/frontend_setting_profile_test.exs
Normal file
196
test/pleroma/akkoma/frontend_setting_profile_test.exs
Normal file
|
@ -0,0 +1,196 @@
|
||||||
|
defmodule Pleroma.Akkoma.FrontendSettingsProfileTest do
|
||||||
|
use Pleroma.DataCase, async: true
|
||||||
|
use Oban.Testing, repo: Pleroma.Repo
|
||||||
|
alias Pleroma.Akkoma.FrontendSettingsProfile
|
||||||
|
|
||||||
|
import Pleroma.Factory
|
||||||
|
|
||||||
|
describe "changeset/2" do
|
||||||
|
test "valid" do
|
||||||
|
user = insert(:user)
|
||||||
|
frontend_name = "test"
|
||||||
|
profile_name = "test"
|
||||||
|
settings = %{"test" => "test"}
|
||||||
|
struct = %FrontendSettingsProfile{}
|
||||||
|
|
||||||
|
attrs = %{
|
||||||
|
user_id: user.id,
|
||||||
|
frontend_name: frontend_name,
|
||||||
|
profile_name: profile_name,
|
||||||
|
settings: settings,
|
||||||
|
version: 1
|
||||||
|
}
|
||||||
|
|
||||||
|
assert %{valid?: true} = FrontendSettingsProfile.changeset(struct, attrs)
|
||||||
|
end
|
||||||
|
|
||||||
|
test "when settings is too long" do
|
||||||
|
clear_config([:instance, :max_frontend_settings_json_chars], 10)
|
||||||
|
user = insert(:user)
|
||||||
|
frontend_name = "test"
|
||||||
|
profile_name = "test"
|
||||||
|
settings = %{"verylong" => "verylongoops"}
|
||||||
|
struct = %FrontendSettingsProfile{}
|
||||||
|
|
||||||
|
attrs = %{
|
||||||
|
user_id: user.id,
|
||||||
|
frontend_name: frontend_name,
|
||||||
|
profile_name: profile_name,
|
||||||
|
settings: settings,
|
||||||
|
version: 1
|
||||||
|
}
|
||||||
|
|
||||||
|
assert %{valid?: false, errors: [settings: {"is too long", _}]} =
|
||||||
|
FrontendSettingsProfile.changeset(struct, attrs)
|
||||||
|
end
|
||||||
|
|
||||||
|
test "when frontend name is too short" do
|
||||||
|
user = insert(:user)
|
||||||
|
frontend_name = ""
|
||||||
|
profile_name = "test"
|
||||||
|
settings = %{"test" => "test"}
|
||||||
|
struct = %FrontendSettingsProfile{}
|
||||||
|
|
||||||
|
attrs = %{
|
||||||
|
user_id: user.id,
|
||||||
|
frontend_name: frontend_name,
|
||||||
|
profile_name: profile_name,
|
||||||
|
settings: settings,
|
||||||
|
version: 1
|
||||||
|
}
|
||||||
|
|
||||||
|
assert %{valid?: false, errors: [frontend_name: {"can't be blank", _}]} =
|
||||||
|
FrontendSettingsProfile.changeset(struct, attrs)
|
||||||
|
end
|
||||||
|
|
||||||
|
test "when profile name is too short" do
|
||||||
|
user = insert(:user)
|
||||||
|
frontend_name = "test"
|
||||||
|
profile_name = ""
|
||||||
|
settings = %{"test" => "test"}
|
||||||
|
struct = %FrontendSettingsProfile{}
|
||||||
|
|
||||||
|
attrs = %{
|
||||||
|
user_id: user.id,
|
||||||
|
frontend_name: frontend_name,
|
||||||
|
profile_name: profile_name,
|
||||||
|
settings: settings,
|
||||||
|
version: 1
|
||||||
|
}
|
||||||
|
|
||||||
|
assert %{valid?: false, errors: [profile_name: {"can't be blank", _}]} =
|
||||||
|
FrontendSettingsProfile.changeset(struct, attrs)
|
||||||
|
end
|
||||||
|
|
||||||
|
test "when version is negative" do
|
||||||
|
user = insert(:user)
|
||||||
|
frontend_name = "test"
|
||||||
|
profile_name = "test"
|
||||||
|
settings = %{"test" => "test"}
|
||||||
|
struct = %FrontendSettingsProfile{}
|
||||||
|
|
||||||
|
attrs = %{
|
||||||
|
user_id: user.id,
|
||||||
|
frontend_name: frontend_name,
|
||||||
|
profile_name: profile_name,
|
||||||
|
settings: settings,
|
||||||
|
version: -1
|
||||||
|
}
|
||||||
|
|
||||||
|
assert %{valid?: false, errors: [version: {"must be greater than %{number}", _}]} =
|
||||||
|
FrontendSettingsProfile.changeset(struct, attrs)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
describe "create_or_update/2" do
|
||||||
|
test "it should create a new record" do
|
||||||
|
user = insert(:user)
|
||||||
|
frontend_name = "test"
|
||||||
|
profile_name = "test"
|
||||||
|
settings = %{"test" => "test"}
|
||||||
|
|
||||||
|
assert {:ok, %FrontendSettingsProfile{}} =
|
||||||
|
FrontendSettingsProfile.create_or_update(
|
||||||
|
user,
|
||||||
|
frontend_name,
|
||||||
|
profile_name,
|
||||||
|
settings,
|
||||||
|
1
|
||||||
|
)
|
||||||
|
end
|
||||||
|
|
||||||
|
test "it should update a record" do
|
||||||
|
user = insert(:user)
|
||||||
|
frontend_name = "test"
|
||||||
|
profile_name = "test"
|
||||||
|
|
||||||
|
insert(:frontend_setting_profile,
|
||||||
|
user: user,
|
||||||
|
frontend_name: frontend_name,
|
||||||
|
profile_name: profile_name,
|
||||||
|
settings: %{"test" => "test"},
|
||||||
|
version: 1
|
||||||
|
)
|
||||||
|
|
||||||
|
settings = %{"test" => "test2"}
|
||||||
|
|
||||||
|
assert {:ok, %FrontendSettingsProfile{settings: ^settings}} =
|
||||||
|
FrontendSettingsProfile.create_or_update(
|
||||||
|
user,
|
||||||
|
frontend_name,
|
||||||
|
profile_name,
|
||||||
|
settings,
|
||||||
|
2
|
||||||
|
)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
describe "get_all_by_user_and_frontend_name/2" do
|
||||||
|
test "it should return all records" do
|
||||||
|
user = insert(:user)
|
||||||
|
frontend_name = "test"
|
||||||
|
|
||||||
|
insert(:frontend_setting_profile,
|
||||||
|
user: user,
|
||||||
|
frontend_name: frontend_name,
|
||||||
|
profile_name: "profileA",
|
||||||
|
settings: %{"test" => "test"},
|
||||||
|
version: 1
|
||||||
|
)
|
||||||
|
|
||||||
|
insert(:frontend_setting_profile,
|
||||||
|
user: user,
|
||||||
|
frontend_name: frontend_name,
|
||||||
|
profile_name: "profileB",
|
||||||
|
settings: %{"test" => "test"},
|
||||||
|
version: 1
|
||||||
|
)
|
||||||
|
|
||||||
|
assert [%FrontendSettingsProfile{profile_name: "profileA"}, %{profile_name: "profileB"}] =
|
||||||
|
FrontendSettingsProfile.get_all_by_user_and_frontend_name(user, frontend_name)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
describe "get_by_user_and_frontend_name_and_profile_name/3" do
|
||||||
|
test "it should return a record" do
|
||||||
|
user = insert(:user)
|
||||||
|
frontend_name = "test"
|
||||||
|
profile_name = "profileA"
|
||||||
|
|
||||||
|
insert(:frontend_setting_profile,
|
||||||
|
user: user,
|
||||||
|
frontend_name: frontend_name,
|
||||||
|
profile_name: profile_name,
|
||||||
|
settings: %{"test" => "test"},
|
||||||
|
version: 1
|
||||||
|
)
|
||||||
|
|
||||||
|
assert %FrontendSettingsProfile{profile_name: "profileA"} =
|
||||||
|
FrontendSettingsProfile.get_by_user_and_frontend_name_and_profile_name(
|
||||||
|
user,
|
||||||
|
frontend_name,
|
||||||
|
profile_name
|
||||||
|
)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
|
@ -1149,18 +1149,6 @@ test "it doesn't return notifications for domain-blocked non-followed user", %{u
|
||||||
assert Notification.for_user(user) == []
|
assert Notification.for_user(user) == []
|
||||||
end
|
end
|
||||||
|
|
||||||
test "it returns notifications for domain-blocked but followed user" do
|
|
||||||
user = insert(:user)
|
|
||||||
blocked = insert(:user, ap_id: "http://some-domain.com")
|
|
||||||
|
|
||||||
{:ok, user} = User.block_domain(user, "some-domain.com")
|
|
||||||
{:ok, _, _} = User.follow(user, blocked)
|
|
||||||
|
|
||||||
{:ok, _activity} = CommonAPI.post(blocked, %{status: "hey @#{user.nickname}"})
|
|
||||||
|
|
||||||
assert length(Notification.for_user(user)) == 1
|
|
||||||
end
|
|
||||||
|
|
||||||
test "it doesn't return notifications for muted thread", %{user: user} do
|
test "it doesn't return notifications for muted thread", %{user: user} do
|
||||||
another_user = insert(:user)
|
another_user = insert(:user)
|
||||||
|
|
||||||
|
|
|
@ -47,7 +47,7 @@ test "indexes a local post on creation" do
|
||||||
Jason.decode!(body)
|
Jason.decode!(body)
|
||||||
)
|
)
|
||||||
|
|
||||||
json(%{updateId: 1})
|
json(%{taskUid: 1})
|
||||||
end)
|
end)
|
||||||
|
|
||||||
{:ok, activity} =
|
{:ok, activity} =
|
||||||
|
@ -100,11 +100,11 @@ test "deletes posts from index when deleted locally" do
|
||||||
Jason.decode!(body)
|
Jason.decode!(body)
|
||||||
)
|
)
|
||||||
|
|
||||||
json(%{updateId: 1})
|
json(%{taskUid: 1})
|
||||||
|
|
||||||
%{method: :delete, url: "http://127.0.0.1:7700/indexes/objects/documents/" <> id} ->
|
%{method: :delete, url: "http://127.0.0.1:7700/indexes/objects/documents/" <> id} ->
|
||||||
assert String.length(id) > 1
|
assert String.length(id) > 1
|
||||||
json(%{updateId: 2})
|
json(%{taskUid: 2})
|
||||||
end)
|
end)
|
||||||
|
|
||||||
{:ok, activity} =
|
{:ok, activity} =
|
||||||
|
|
|
@ -311,7 +311,7 @@ test "local users do not automatically follow local locked accounts" do
|
||||||
describe "unfollow/2" do
|
describe "unfollow/2" do
|
||||||
setup do: clear_config([:instance, :external_user_synchronization])
|
setup do: clear_config([:instance, :external_user_synchronization])
|
||||||
|
|
||||||
test "unfollow with syncronizes external user" do
|
test "unfollow with synchronizes external user" do
|
||||||
clear_config([:instance, :external_user_synchronization], true)
|
clear_config([:instance, :external_user_synchronization], true)
|
||||||
|
|
||||||
followed =
|
followed =
|
||||||
|
@ -2260,7 +2260,7 @@ test "updates the counters normally on following/getting a follow when disabled"
|
||||||
assert other_user.follower_count == 1
|
assert other_user.follower_count == 1
|
||||||
end
|
end
|
||||||
|
|
||||||
test "syncronizes the counters with the remote instance for the followed when enabled" do
|
test "synchronizes the counters with the remote instance for the followed when enabled" do
|
||||||
clear_config([:instance, :external_user_synchronization], false)
|
clear_config([:instance, :external_user_synchronization], false)
|
||||||
|
|
||||||
user = insert(:user)
|
user = insert(:user)
|
||||||
|
@ -2282,7 +2282,7 @@ test "syncronizes the counters with the remote instance for the followed when en
|
||||||
assert other_user.follower_count == 437
|
assert other_user.follower_count == 437
|
||||||
end
|
end
|
||||||
|
|
||||||
test "syncronizes the counters with the remote instance for the follower when enabled" do
|
test "synchronizes the counters with the remote instance for the follower when enabled" do
|
||||||
clear_config([:instance, :external_user_synchronization], false)
|
clear_config([:instance, :external_user_synchronization], false)
|
||||||
|
|
||||||
user = insert(:user)
|
user = insert(:user)
|
||||||
|
|
|
@ -559,6 +559,10 @@ test "it inserts an incoming activity into the database", %{conn: conn} do
|
||||||
conn =
|
conn =
|
||||||
conn
|
conn
|
||||||
|> assign(:valid_signature, true)
|
|> assign(:valid_signature, true)
|
||||||
|
|> put_req_header(
|
||||||
|
"signature",
|
||||||
|
"keyId=\"http://mastodon.example.org/users/admin/main-key\""
|
||||||
|
)
|
||||||
|> put_req_header("content-type", "application/activity+json")
|
|> put_req_header("content-type", "application/activity+json")
|
||||||
|> post("/inbox", data)
|
|> post("/inbox", data)
|
||||||
|
|
||||||
|
@ -589,6 +593,7 @@ test "it inserts an incoming activity into the database" <>
|
||||||
conn =
|
conn =
|
||||||
conn
|
conn
|
||||||
|> assign(:valid_signature, true)
|
|> assign(:valid_signature, true)
|
||||||
|
|> put_req_header("signature", "keyId=\"#{user.ap_id}/main-key\"")
|
||||||
|> put_req_header("content-type", "application/activity+json")
|
|> put_req_header("content-type", "application/activity+json")
|
||||||
|> post("/inbox", data)
|
|> post("/inbox", data)
|
||||||
|
|
||||||
|
@ -602,12 +607,15 @@ test "it clears `unreachable` federation status of the sender", %{conn: conn} do
|
||||||
data = File.read!("test/fixtures/mastodon-post-activity.json") |> Jason.decode!()
|
data = File.read!("test/fixtures/mastodon-post-activity.json") |> Jason.decode!()
|
||||||
|
|
||||||
sender_url = data["actor"]
|
sender_url = data["actor"]
|
||||||
|
sender = insert(:user, ap_id: data["actor"])
|
||||||
|
|
||||||
Instances.set_consistently_unreachable(sender_url)
|
Instances.set_consistently_unreachable(sender_url)
|
||||||
refute Instances.reachable?(sender_url)
|
refute Instances.reachable?(sender_url)
|
||||||
|
|
||||||
conn =
|
conn =
|
||||||
conn
|
conn
|
||||||
|> assign(:valid_signature, true)
|
|> assign(:valid_signature, true)
|
||||||
|
|> put_req_header("signature", "keyId=\"#{sender.ap_id}/main-key\"")
|
||||||
|> put_req_header("content-type", "application/activity+json")
|
|> put_req_header("content-type", "application/activity+json")
|
||||||
|> post("/inbox", data)
|
|> post("/inbox", data)
|
||||||
|
|
||||||
|
@ -632,6 +640,7 @@ test "accept follow activity", %{conn: conn} do
|
||||||
assert "ok" ==
|
assert "ok" ==
|
||||||
conn
|
conn
|
||||||
|> assign(:valid_signature, true)
|
|> assign(:valid_signature, true)
|
||||||
|
|> put_req_header("signature", "keyId=\"#{followed_relay.ap_id}/main-key\"")
|
||||||
|> put_req_header("content-type", "application/activity+json")
|
|> put_req_header("content-type", "application/activity+json")
|
||||||
|> post("/inbox", accept)
|
|> post("/inbox", accept)
|
||||||
|> json_response(200)
|
|> json_response(200)
|
||||||
|
@ -698,6 +707,11 @@ test "accepts Add/Remove activities", %{conn: conn} do
|
||||||
|
|
||||||
actor = "https://example.com/users/lain"
|
actor = "https://example.com/users/lain"
|
||||||
|
|
||||||
|
insert(:user,
|
||||||
|
ap_id: actor,
|
||||||
|
featured_address: "https://example.com/users/lain/collections/featured"
|
||||||
|
)
|
||||||
|
|
||||||
Tesla.Mock.mock(fn
|
Tesla.Mock.mock(fn
|
||||||
%{
|
%{
|
||||||
method: :get,
|
method: :get,
|
||||||
|
@ -743,6 +757,7 @@ test "accepts Add/Remove activities", %{conn: conn} do
|
||||||
assert "ok" ==
|
assert "ok" ==
|
||||||
conn
|
conn
|
||||||
|> assign(:valid_signature, true)
|
|> assign(:valid_signature, true)
|
||||||
|
|> put_req_header("signature", "keyId=\"#{actor}/main-key\"")
|
||||||
|> put_req_header("content-type", "application/activity+json")
|
|> put_req_header("content-type", "application/activity+json")
|
||||||
|> post("/inbox", data)
|
|> post("/inbox", data)
|
||||||
|> json_response(200)
|
|> json_response(200)
|
||||||
|
@ -750,6 +765,7 @@ test "accepts Add/Remove activities", %{conn: conn} do
|
||||||
ObanHelpers.perform(all_enqueued(worker: ReceiverWorker))
|
ObanHelpers.perform(all_enqueued(worker: ReceiverWorker))
|
||||||
assert Activity.get_by_ap_id(data["id"])
|
assert Activity.get_by_ap_id(data["id"])
|
||||||
user = User.get_cached_by_ap_id(data["actor"])
|
user = User.get_cached_by_ap_id(data["actor"])
|
||||||
|
|
||||||
assert user.pinned_objects[data["object"]]
|
assert user.pinned_objects[data["object"]]
|
||||||
|
|
||||||
data = %{
|
data = %{
|
||||||
|
@ -764,6 +780,7 @@ test "accepts Add/Remove activities", %{conn: conn} do
|
||||||
assert "ok" ==
|
assert "ok" ==
|
||||||
conn
|
conn
|
||||||
|> assign(:valid_signature, true)
|
|> assign(:valid_signature, true)
|
||||||
|
|> put_req_header("signature", "keyId=\"#{actor}/main-key\"")
|
||||||
|> put_req_header("content-type", "application/activity+json")
|
|> put_req_header("content-type", "application/activity+json")
|
||||||
|> post("/inbox", data)
|
|> post("/inbox", data)
|
||||||
|> json_response(200)
|
|> json_response(200)
|
||||||
|
@ -790,6 +807,12 @@ test "mastodon pin/unpin", %{conn: conn} do
|
||||||
|
|
||||||
actor = "https://example.com/users/lain"
|
actor = "https://example.com/users/lain"
|
||||||
|
|
||||||
|
sender =
|
||||||
|
insert(:user,
|
||||||
|
ap_id: actor,
|
||||||
|
featured_address: "https://example.com/users/lain/collections/featured"
|
||||||
|
)
|
||||||
|
|
||||||
Tesla.Mock.mock(fn
|
Tesla.Mock.mock(fn
|
||||||
%{
|
%{
|
||||||
method: :get,
|
method: :get,
|
||||||
|
@ -844,6 +867,7 @@ test "mastodon pin/unpin", %{conn: conn} do
|
||||||
assert "ok" ==
|
assert "ok" ==
|
||||||
conn
|
conn
|
||||||
|> assign(:valid_signature, true)
|
|> assign(:valid_signature, true)
|
||||||
|
|> put_req_header("signature", "keyId=\"#{sender.ap_id}/main-key\"")
|
||||||
|> put_req_header("content-type", "application/activity+json")
|
|> put_req_header("content-type", "application/activity+json")
|
||||||
|> post("/inbox", data)
|
|> post("/inbox", data)
|
||||||
|> json_response(200)
|
|> json_response(200)
|
||||||
|
@ -863,6 +887,7 @@ test "mastodon pin/unpin", %{conn: conn} do
|
||||||
assert "ok" ==
|
assert "ok" ==
|
||||||
conn
|
conn
|
||||||
|> assign(:valid_signature, true)
|
|> assign(:valid_signature, true)
|
||||||
|
|> put_req_header("signature", "keyId=\"#{actor}/main-key\"")
|
||||||
|> put_req_header("content-type", "application/activity+json")
|
|> put_req_header("content-type", "application/activity+json")
|
||||||
|> post("/inbox", data)
|
|> post("/inbox", data)
|
||||||
|> json_response(200)
|
|> json_response(200)
|
||||||
|
@ -894,6 +919,7 @@ test "it inserts an incoming activity into the database", %{conn: conn, data: da
|
||||||
conn =
|
conn =
|
||||||
conn
|
conn
|
||||||
|> assign(:valid_signature, true)
|
|> assign(:valid_signature, true)
|
||||||
|
|> put_req_header("signature", "keyId=\"#{data["actor"]}/main-key\"")
|
||||||
|> put_req_header("content-type", "application/activity+json")
|
|> put_req_header("content-type", "application/activity+json")
|
||||||
|> post("/users/#{user.nickname}/inbox", data)
|
|> post("/users/#{user.nickname}/inbox", data)
|
||||||
|
|
||||||
|
@ -915,6 +941,7 @@ test "it accepts messages with to as string instead of array", %{conn: conn, dat
|
||||||
conn =
|
conn =
|
||||||
conn
|
conn
|
||||||
|> assign(:valid_signature, true)
|
|> assign(:valid_signature, true)
|
||||||
|
|> put_req_header("signature", "keyId=\"#{data["actor"]}/main-key\"")
|
||||||
|> put_req_header("content-type", "application/activity+json")
|
|> put_req_header("content-type", "application/activity+json")
|
||||||
|> post("/users/#{user.nickname}/inbox", data)
|
|> post("/users/#{user.nickname}/inbox", data)
|
||||||
|
|
||||||
|
@ -936,6 +963,7 @@ test "it accepts messages with cc as string instead of array", %{conn: conn, dat
|
||||||
conn =
|
conn =
|
||||||
conn
|
conn
|
||||||
|> assign(:valid_signature, true)
|
|> assign(:valid_signature, true)
|
||||||
|
|> put_req_header("signature", "keyId=\"#{data["actor"]}/main-key\"")
|
||||||
|> put_req_header("content-type", "application/activity+json")
|
|> put_req_header("content-type", "application/activity+json")
|
||||||
|> post("/users/#{user.nickname}/inbox", data)
|
|> post("/users/#{user.nickname}/inbox", data)
|
||||||
|
|
||||||
|
@ -960,6 +988,7 @@ test "it accepts messages with bcc as string instead of array", %{conn: conn, da
|
||||||
conn =
|
conn =
|
||||||
conn
|
conn
|
||||||
|> assign(:valid_signature, true)
|
|> assign(:valid_signature, true)
|
||||||
|
|> put_req_header("signature", "keyId=\"#{data["actor"]}/main-key\"")
|
||||||
|> put_req_header("content-type", "application/activity+json")
|
|> put_req_header("content-type", "application/activity+json")
|
||||||
|> post("/users/#{user.nickname}/inbox", data)
|
|> post("/users/#{user.nickname}/inbox", data)
|
||||||
|
|
||||||
|
@ -987,6 +1016,7 @@ test "it accepts announces with to as string instead of array", %{conn: conn} do
|
||||||
conn =
|
conn =
|
||||||
conn
|
conn
|
||||||
|> assign(:valid_signature, true)
|
|> assign(:valid_signature, true)
|
||||||
|
|> put_req_header("signature", "keyId=\"#{announcer.ap_id}/main-key\"")
|
||||||
|> put_req_header("content-type", "application/activity+json")
|
|> put_req_header("content-type", "application/activity+json")
|
||||||
|> post("/users/#{user.nickname}/inbox", data)
|
|> post("/users/#{user.nickname}/inbox", data)
|
||||||
|
|
||||||
|
@ -1017,6 +1047,7 @@ test "it accepts messages from actors that are followed by the user", %{
|
||||||
conn =
|
conn =
|
||||||
conn
|
conn
|
||||||
|> assign(:valid_signature, true)
|
|> assign(:valid_signature, true)
|
||||||
|
|> put_req_header("signature", "keyId=\"#{actor.ap_id}/main-key\"")
|
||||||
|> put_req_header("content-type", "application/activity+json")
|
|> put_req_header("content-type", "application/activity+json")
|
||||||
|> post("/users/#{recipient.nickname}/inbox", data)
|
|> post("/users/#{recipient.nickname}/inbox", data)
|
||||||
|
|
||||||
|
@ -1063,6 +1094,7 @@ test "it clears `unreachable` federation status of the sender", %{conn: conn, da
|
||||||
conn =
|
conn =
|
||||||
conn
|
conn
|
||||||
|> assign(:valid_signature, true)
|
|> assign(:valid_signature, true)
|
||||||
|
|> put_req_header("signature", "keyId=\"#{data["actor"]}/main-key\"")
|
||||||
|> put_req_header("content-type", "application/activity+json")
|
|> put_req_header("content-type", "application/activity+json")
|
||||||
|> post("/users/#{user.nickname}/inbox", data)
|
|> post("/users/#{user.nickname}/inbox", data)
|
||||||
|
|
||||||
|
@ -1101,6 +1133,7 @@ test "it removes all follower collections but actor's", %{conn: conn} do
|
||||||
|
|
||||||
conn
|
conn
|
||||||
|> assign(:valid_signature, true)
|
|> assign(:valid_signature, true)
|
||||||
|
|> put_req_header("signature", "keyId=\"#{actor.ap_id}/main-key\"")
|
||||||
|> put_req_header("content-type", "application/activity+json")
|
|> put_req_header("content-type", "application/activity+json")
|
||||||
|> post("/users/#{recipient.nickname}/inbox", data)
|
|> post("/users/#{recipient.nickname}/inbox", data)
|
||||||
|> json_response(200)
|
|> json_response(200)
|
||||||
|
@ -1193,6 +1226,7 @@ test "forwarded report", %{conn: conn} do
|
||||||
|
|
||||||
conn
|
conn
|
||||||
|> assign(:valid_signature, true)
|
|> assign(:valid_signature, true)
|
||||||
|
|> put_req_header("signature", "keyId=\"#{actor.ap_id}/main-key\"")
|
||||||
|> put_req_header("content-type", "application/activity+json")
|
|> put_req_header("content-type", "application/activity+json")
|
||||||
|> post("/users/#{reported_user.nickname}/inbox", data)
|
|> post("/users/#{reported_user.nickname}/inbox", data)
|
||||||
|> json_response(200)
|
|> json_response(200)
|
||||||
|
@ -1248,6 +1282,7 @@ test "forwarded report from mastodon", %{conn: conn} do
|
||||||
|
|
||||||
conn
|
conn
|
||||||
|> assign(:valid_signature, true)
|
|> assign(:valid_signature, true)
|
||||||
|
|> put_req_header("signature", "keyId=\"#{remote_actor}/main-key\"")
|
||||||
|> put_req_header("content-type", "application/activity+json")
|
|> put_req_header("content-type", "application/activity+json")
|
||||||
|> post("/users/#{reported_user.nickname}/inbox", data)
|
|> post("/users/#{reported_user.nickname}/inbox", data)
|
||||||
|> json_response(200)
|
|> json_response(200)
|
||||||
|
|
|
@ -1632,7 +1632,7 @@ test "fetches only public posts for other users" do
|
||||||
end
|
end
|
||||||
|
|
||||||
describe "fetch_follow_information_for_user" do
|
describe "fetch_follow_information_for_user" do
|
||||||
test "syncronizes following/followers counters" do
|
test "synchronizes following/followers counters" do
|
||||||
user =
|
user =
|
||||||
insert(:user,
|
insert(:user,
|
||||||
local: false,
|
local: false,
|
||||||
|
|
|
@ -0,0 +1,122 @@
|
||||||
|
defmodule Pleroma.Web.AkkomaAPI.FrontendSettingsControllerTest do
|
||||||
|
use Pleroma.Web.ConnCase, async: true
|
||||||
|
|
||||||
|
import Pleroma.Factory
|
||||||
|
alias Pleroma.Akkoma.FrontendSettingsProfile
|
||||||
|
|
||||||
|
describe "GET /api/v1/akkoma/frontend_settings/:frontend_name" do
|
||||||
|
test "it returns a list of profiles" do
|
||||||
|
%{conn: conn, user: user} = oauth_access(["read"])
|
||||||
|
|
||||||
|
insert(:frontend_setting_profile, user: user, frontend_name: "test", profile_name: "test1")
|
||||||
|
insert(:frontend_setting_profile, user: user, frontend_name: "test", profile_name: "test2")
|
||||||
|
|
||||||
|
response =
|
||||||
|
conn
|
||||||
|
|> get("/api/v1/akkoma/frontend_settings/test")
|
||||||
|
|> json_response_and_validate_schema(200)
|
||||||
|
|
||||||
|
assert response == [
|
||||||
|
%{"name" => "test1", "version" => 1},
|
||||||
|
%{"name" => "test2", "version" => 1}
|
||||||
|
]
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
describe "GET /api/v1/akkoma/frontend_settings/:frontend_name/:profile_name" do
|
||||||
|
test "it returns 404 if not found" do
|
||||||
|
%{conn: conn} = oauth_access(["read"])
|
||||||
|
|
||||||
|
conn
|
||||||
|
|> get("/api/v1/akkoma/frontend_settings/unknown_frontend/unknown_profile")
|
||||||
|
|> json_response_and_validate_schema(404)
|
||||||
|
end
|
||||||
|
|
||||||
|
test "it returns 200 if found" do
|
||||||
|
%{conn: conn, user: user} = oauth_access(["read"])
|
||||||
|
|
||||||
|
insert(:frontend_setting_profile,
|
||||||
|
user: user,
|
||||||
|
frontend_name: "test",
|
||||||
|
profile_name: "test1",
|
||||||
|
settings: %{"test" => "test"}
|
||||||
|
)
|
||||||
|
|
||||||
|
response =
|
||||||
|
conn
|
||||||
|
|> get("/api/v1/akkoma/frontend_settings/test/test1")
|
||||||
|
|> json_response_and_validate_schema(200)
|
||||||
|
|
||||||
|
assert response == %{"settings" => %{"test" => "test"}, "version" => 1}
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
describe "PUT /api/v1/akkoma/frontend_settings/:frontend_name/:profile_name" do
|
||||||
|
test "puts a config" do
|
||||||
|
%{conn: conn, user: user} = oauth_access(["write"])
|
||||||
|
settings = %{"test" => "test2"}
|
||||||
|
|
||||||
|
response =
|
||||||
|
conn
|
||||||
|
|> put_req_header("content-type", "application/json")
|
||||||
|
|> put("/api/v1/akkoma/frontend_settings/test/test1", %{
|
||||||
|
"settings" => settings,
|
||||||
|
"version" => 1
|
||||||
|
})
|
||||||
|
|> json_response_and_validate_schema(200)
|
||||||
|
|
||||||
|
assert response == settings
|
||||||
|
|
||||||
|
assert %FrontendSettingsProfile{settings: ^settings} =
|
||||||
|
FrontendSettingsProfile.get_by_user_and_frontend_name_and_profile_name(
|
||||||
|
user,
|
||||||
|
"test",
|
||||||
|
"test1"
|
||||||
|
)
|
||||||
|
end
|
||||||
|
|
||||||
|
test "refuses to overwrite a newer config" do
|
||||||
|
%{conn: conn, user: user} = oauth_access(["write"])
|
||||||
|
|
||||||
|
insert(:frontend_setting_profile,
|
||||||
|
user: user,
|
||||||
|
frontend_name: "test",
|
||||||
|
profile_name: "test1",
|
||||||
|
settings: %{"test" => "test"},
|
||||||
|
version: 2
|
||||||
|
)
|
||||||
|
|
||||||
|
conn
|
||||||
|
|> put_req_header("content-type", "application/json")
|
||||||
|
|> put("/api/v1/akkoma/frontend_settings/test/test1", %{
|
||||||
|
"settings" => %{"test" => "test2"},
|
||||||
|
"version" => 1
|
||||||
|
})
|
||||||
|
|> json_response_and_validate_schema(422)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
describe "DELETE /api/v1/akkoma/frontend_settings/:frontend_name/:profile_name" do
|
||||||
|
test "deletes a config" do
|
||||||
|
%{conn: conn, user: user} = oauth_access(["write"])
|
||||||
|
|
||||||
|
insert(:frontend_setting_profile,
|
||||||
|
user: user,
|
||||||
|
frontend_name: "test",
|
||||||
|
profile_name: "test1",
|
||||||
|
settings: %{"test" => "test"},
|
||||||
|
version: 2
|
||||||
|
)
|
||||||
|
|
||||||
|
conn
|
||||||
|
|> delete("/api/v1/akkoma/frontend_settings/test/test1")
|
||||||
|
|> json_response_and_validate_schema(200)
|
||||||
|
|
||||||
|
assert FrontendSettingsProfile.get_by_user_and_frontend_name_and_profile_name(
|
||||||
|
user,
|
||||||
|
"test",
|
||||||
|
"test1"
|
||||||
|
) == nil
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
|
@ -1921,4 +1921,48 @@ test "create a note on a user" do
|
||||||
|> get("/api/v1/accounts/relationships?id=#{other_user.id}")
|
|> get("/api/v1/accounts/relationships?id=#{other_user.id}")
|
||||||
|> json_response_and_validate_schema(200)
|
|> json_response_and_validate_schema(200)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
describe "remove from followers" do
|
||||||
|
setup do: oauth_access(["follow"])
|
||||||
|
|
||||||
|
test "removing user from followers", %{conn: conn, user: user} do
|
||||||
|
%{id: other_user_id} = other_user = insert(:user)
|
||||||
|
|
||||||
|
CommonAPI.follow(other_user, user)
|
||||||
|
|
||||||
|
assert %{"id" => ^other_user_id, "followed_by" => false} =
|
||||||
|
conn
|
||||||
|
|> post("/api/v1/accounts/#{other_user_id}/remove_from_followers")
|
||||||
|
|> json_response_and_validate_schema(200)
|
||||||
|
|
||||||
|
refute User.following?(other_user, user)
|
||||||
|
end
|
||||||
|
|
||||||
|
test "removing remote user from followers", %{conn: conn, user: user} do
|
||||||
|
%{id: other_user_id} = other_user = insert(:user, local: false)
|
||||||
|
|
||||||
|
CommonAPI.follow(other_user, user)
|
||||||
|
|
||||||
|
assert User.following?(other_user, user)
|
||||||
|
|
||||||
|
assert %{"id" => ^other_user_id, "followed_by" => false} =
|
||||||
|
conn
|
||||||
|
|> post("/api/v1/accounts/#{other_user_id}/remove_from_followers")
|
||||||
|
|> json_response_and_validate_schema(200)
|
||||||
|
|
||||||
|
refute User.following?(other_user, user)
|
||||||
|
end
|
||||||
|
|
||||||
|
test "removing user from followers errors", %{user: user, conn: conn} do
|
||||||
|
# self remove
|
||||||
|
conn_res = post(conn, "/api/v1/accounts/#{user.id}/remove_from_followers")
|
||||||
|
|
||||||
|
assert %{"error" => "Can not unfollow yourself"} =
|
||||||
|
json_response_and_validate_schema(conn_res, 400)
|
||||||
|
|
||||||
|
# remove non existing user
|
||||||
|
conn_res = post(conn, "/api/v1/accounts/doesntexist/remove_from_followers")
|
||||||
|
assert %{"error" => "Record not found"} = json_response_and_validate_schema(conn_res, 404)
|
||||||
|
end
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -11,7 +11,7 @@ defmodule Pleroma.Web.PleromaAPI.BackupControllerTest do
|
||||||
setup do
|
setup do
|
||||||
clear_config([Pleroma.Upload, :uploader])
|
clear_config([Pleroma.Upload, :uploader])
|
||||||
clear_config([Backup, :limit_days])
|
clear_config([Backup, :limit_days])
|
||||||
oauth_access(["read:accounts"])
|
oauth_access(["read:backups"])
|
||||||
end
|
end
|
||||||
|
|
||||||
test "GET /api/v1/pleroma/backups", %{user: user, conn: conn} do
|
test "GET /api/v1/pleroma/backups", %{user: user, conn: conn} do
|
||||||
|
@ -85,7 +85,7 @@ test "POST /api/v1/pleroma/backups", %{user: _user, conn: conn} do
|
||||||
|
|
||||||
test "Backup without email address" do
|
test "Backup without email address" do
|
||||||
user = Pleroma.Factory.insert(:user, email: nil)
|
user = Pleroma.Factory.insert(:user, email: nil)
|
||||||
%{conn: conn} = oauth_access(["read:accounts"], user: user)
|
%{conn: conn} = oauth_access(["read:backups"], user: user)
|
||||||
|
|
||||||
assert is_nil(user.email)
|
assert is_nil(user.email)
|
||||||
|
|
||||||
|
|
|
@ -9,6 +9,8 @@ defmodule Pleroma.Web.Plugs.MappedSignatureToIdentityPlugTest do
|
||||||
import Tesla.Mock
|
import Tesla.Mock
|
||||||
import Plug.Conn
|
import Plug.Conn
|
||||||
|
|
||||||
|
import Pleroma.Tests.Helpers, only: [clear_config: 2]
|
||||||
|
|
||||||
setup do
|
setup do
|
||||||
mock(fn env -> apply(HttpRequestMock, :request, [env]) end)
|
mock(fn env -> apply(HttpRequestMock, :request, [env]) end)
|
||||||
:ok
|
:ok
|
||||||
|
@ -47,6 +49,26 @@ test "it considers a mapped identity to be invalid when it mismatches a payload"
|
||||||
assert %{valid_signature: false} == conn.assigns
|
assert %{valid_signature: false} == conn.assigns
|
||||||
end
|
end
|
||||||
|
|
||||||
|
test "it considers a mapped identity to be invalid when the associated instance is blocked" do
|
||||||
|
clear_config([:activitypub, :authorized_fetch_mode], true)
|
||||||
|
|
||||||
|
clear_config([:mrf_simple, :reject], [
|
||||||
|
{"mastodon.example.org", "anime is banned"}
|
||||||
|
])
|
||||||
|
|
||||||
|
on_exit(fn ->
|
||||||
|
Pleroma.Config.put([:activitypub, :authorized_fetch_mode], false)
|
||||||
|
Pleroma.Config.put([:mrf_simple, :reject], [])
|
||||||
|
end)
|
||||||
|
|
||||||
|
conn =
|
||||||
|
build_conn(:post, "/doesntmattter", %{"actor" => "http://mastodon.example.org/users/admin"})
|
||||||
|
|> set_signature("http://mastodon.example.org/users/admin")
|
||||||
|
|> MappedSignatureToIdentityPlug.call(%{})
|
||||||
|
|
||||||
|
assert %{valid_signature: false} == conn.assigns
|
||||||
|
end
|
||||||
|
|
||||||
@tag skip: "known breakage; the testsuite presently depends on it"
|
@tag skip: "known breakage; the testsuite presently depends on it"
|
||||||
test "it considers a mapped identity to be invalid when the identity cannot be found" do
|
test "it considers a mapped identity to be invalid when the identity cannot be found" do
|
||||||
conn =
|
conn =
|
||||||
|
|
|
@ -663,4 +663,15 @@ def announcement_factory(params \\ %{}) do
|
||||||
|> Map.merge(params)
|
|> Map.merge(params)
|
||||||
|> Pleroma.Announcement.add_rendered_properties()
|
|> Pleroma.Announcement.add_rendered_properties()
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def frontend_setting_profile_factory(params \\ %{}) do
|
||||||
|
%Pleroma.Akkoma.FrontendSettingsProfile{
|
||||||
|
user: build(:user),
|
||||||
|
frontend_name: "akkoma-fe",
|
||||||
|
profile_name: "default",
|
||||||
|
settings: %{"test" => "test"},
|
||||||
|
version: 1
|
||||||
|
}
|
||||||
|
|> Map.merge(params)
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
Loading…
Reference in a new issue