Merge branch 'develop' into stable

This commit is contained in:
FloatingGhost 2022-11-12 15:34:19 +00:00
commit f91b896731
75 changed files with 2101 additions and 518 deletions

View file

@ -6,12 +6,12 @@ COPYING
*file
elixir_buildpack.config
test/
instance/
_build
deps
test
benchmarks
docs/site
docker-db
uploads
instance
# Required to get version
!.git

11
.gitignore vendored
View file

@ -1,5 +1,6 @@
# App artifacts
docs/site
*.zip
*.sw*
secret
/_build
@ -17,6 +18,13 @@ secret
/instance
/priv/ssh_keys
vm.args
.cache/
.hex/
.mix/
.psql_history
docker-resources/Dockerfile
docker-resources/Caddyfile
pgdata
# Prevent committing custom emojis
/priv/static/emoji/custom/*
@ -65,3 +73,6 @@ pleroma.iml
# Generated documentation
docs/site
# docker stuff
docker-db

View file

@ -4,11 +4,26 @@ All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
## Unreleased
## 2022.11
## Added
- Officially supported docker release
- Ability to remove followers unilaterally without a block
- Scraping of nodeinfo from remote instances to display instance info
- `requested_by` in relationships when the user has requested to follow you
## Changes
- Follows no longer override domain blocks, a domain block is final
- Deletes are now the lowest priority to publish and will be handled after creates
- Domain blocks are now subdomain-matches by default
## Fixed
- Registrations via ldap are now compatible with the latest OTP24
## Update notes
- If you use LDAP and run from source, please update your elixir/erlang
to the latest. The changes in OTP24.3 are breaking.
- You can now remove the leading `*.` from domain blocks, but you do not have to.
## 2022.10

View file

@ -1,21 +1,8 @@
FROM elixir:1.13.4-alpine as build
COPY . .
FROM hexpm/elixir:1.13.4-erlang-24.3.4.5-alpine-3.15.6
ENV MIX_ENV=prod
RUN apk add git gcc g++ musl-dev make cmake file-dev &&\
echo "import Config" > config/prod.secret.exs &&\
mix local.hex --force &&\
mix local.rebar --force &&\
mix deps.get --only prod &&\
mkdir release &&\
mix release --path release
FROM alpine:3.16
ARG BUILD_DATE
ARG VCS_REF
ARG HOME=/opt/akkoma
LABEL org.opencontainers.image.title="akkoma" \
org.opencontainers.image.description="Akkoma for Docker" \
@ -26,25 +13,21 @@ LABEL org.opencontainers.image.title="akkoma" \
org.opencontainers.image.revision=$VCS_REF \
org.opencontainers.image.created=$BUILD_DATE
ARG HOME=/opt/akkoma
ARG DATA=/var/lib/akkoma
RUN apk update &&\
apk add exiftool ffmpeg imagemagick libmagic ncurses postgresql-client &&\
adduser --system --shell /bin/false --home ${HOME} akkoma &&\
mkdir -p ${DATA}/uploads &&\
mkdir -p ${DATA}/static &&\
chown -R akkoma ${DATA} &&\
mkdir -p /etc/akkoma &&\
chown -R akkoma /etc/akkoma
USER akkoma
COPY --from=build --chown=akkoma:0 /release ${HOME}
COPY ./config/docker.exs /etc/akkoma/config.exs
COPY ./docker-entrypoint.sh ${HOME}
RUN apk add git gcc g++ musl-dev make cmake file-dev exiftool ffmpeg imagemagick libmagic ncurses postgresql-client
EXPOSE 4000
ENTRYPOINT ["/opt/akkoma/docker-entrypoint.sh"]
ARG UID=1000
ARG GID=1000
ARG UNAME=akkoma
RUN addgroup -g $GID $UNAME
RUN adduser -u $UID -G $UNAME -D -h $HOME $UNAME
WORKDIR /opt/akkoma
USER $UNAME
RUN mix local.hex --force &&\
mix local.rebar --force
CMD ["/opt/akkoma/docker-entrypoint.sh"]

View file

@ -8,12 +8,33 @@
This is a fork of Pleroma, which is a microblogging server software that can federate (= exchange messages with) other servers that support ActivityPub. What that means is that you can host a server for yourself or your friends and stay in control of your online identity, but still exchange messages with people on larger servers. Akkoma will federate with all servers that implement ActivityPub, like Friendica, GNU Social, Hubzilla, Mastodon, Misskey, Peertube, and Pixelfed.
Akkoma is written in Elixir and uses PostgresSQL for data storage.
Akkoma is written in Elixir and uses PostgreSQL for data storage.
For clients it supports the [Mastodon client API](https://docs.joinmastodon.org/api/guidelines/) with Pleroma extensions (see the API section on <https://docs.akkoma.dev/stable/>).
- [Client Applications for Akkoma](https://docs.akkoma.dev/stable/clients/)
## Differences with Pleroma
Akkoma is a faster-paced fork, it has a varied and potentially experimental feature set tailored specifically to the corner of the fediverse inhabited by the project
creator and contributors.
This should not be considered a one-for-one match with pleroma; it is more opinionated in many ways, and has a smaller community (which is good or
bad depending on your view)
For example, Akkoma has:
- Custom Emoji reactions (compatible with misskey)
- Misskey-flavoured markdown support
- Elasticsearch and Meilisearch support for search
- Mastodon frontend (Glitch-Soc and Fedibird flavours) support
- Automatic post translation via DeepL or LibreTranslate
- A multitude of heavy modifications to the Pleroma Frontend (Pleroma-FE)
- The "bubble" concept, in which instance administrators can choose closely-related instances to make a "community of communities", so to say
And takes a more opinionated stance on issues like Domain blocks, which are enforced far more on Akkoma.
Take a look at the Changelog if you want a full list of recent changes, everything since 3.0 has been Akkoma.
## Installation
### OTP releases (Recommended)
@ -25,15 +46,13 @@ If your platform is not supported, or you just want to be able to edit the sourc
- [Alpine Linux](https://docs.akkoma.dev/stable/installation/alpine_linux_en/)
- [Arch Linux](https://docs.akkoma.dev/stable/installation/arch_linux_en/)
- [Debian-based](https://docs.akkoma.dev/stable/installation/debian_based_en/)
- [Debian-based (jp)](https://docs.akkoma.dev/stable/installation/debian_based_jp/)
- [FreeBSD](https://docs.akkoma.dev/stable/installation/freebsd_en/)
- [Gentoo Linux](https://docs.akkoma.dev/stable/installation/gentoo_en/)
- [NetBSD](https://docs.akkoma.dev/stable/installation/netbsd_en/)
- [OpenBSD](https://docs.akkoma.dev/stable/installation/openbsd_en/)
- [OpenBSD (fi)](https://docs.akkoma.dev/stable/installation/openbsd_fi/)
### Docker
While we dont provide docker files, other people have written very good ones. Take a look at <https://github.com/angristan/docker-pleroma> or <https://glitch.sh/sn0w/pleroma-docker>.
Docker installation is supported via [this setup](https://docs.akkoma.dev/stable/installation/docker_en/)
### Compilation Troubleshooting
If you ever encounter compilation issues during the updating of Akkoma, you can try these commands and see if they fix things:
@ -45,3 +64,4 @@ If you ever encounter compilation issues during the updating of Akkoma, you can
## Documentation
- https://docs.akkoma.dev/stable
- https://docs.akkoma.dev/develop

View file

@ -185,7 +185,7 @@ config :pleroma, :http,
adapter: []
config :pleroma, :instance,
name: "Pleroma",
name: "Akkoma",
email: "example@example.com",
notify_email: "noreply@example.com",
description: "Akkoma: The cooler fediverse server",
@ -567,7 +567,8 @@ config :pleroma, Oban,
attachments_cleanup: 1,
new_users_digest: 1,
mute_expire: 5,
search_indexing: 10
search_indexing: 10,
nodeinfo_fetcher: 1
],
plugins: [
Oban.Plugins.Pruner,
@ -806,7 +807,8 @@ config :ex_aws, http_client: Pleroma.HTTP.ExAws
config :web_push_encryption, http_client: Pleroma.HTTP.WebPush
config :pleroma, :instances_favicons, enabled: false
config :pleroma, :instances_favicons, enabled: true
config :pleroma, :instances_nodeinfo, enabled: true
config :floki, :html_parser, Floki.HTMLParser.FastHtml

View file

@ -1389,6 +1389,12 @@ config :pleroma, :config_description, [
label: "Render misskey markdown",
type: :boolean,
description: "Whether to render Misskey-flavoured markdown"
},
%{
key: :stopGifs,
label: "Stop Gifs",
type: :boolean,
description: "Whether to pause animated images until they're hovered on"
}
]
},
@ -3041,6 +3047,19 @@ config :pleroma, :config_description, [
}
]
},
%{
group: :pleroma,
key: :instances_nodeinfo,
type: :group,
description: "Control favicons for instances",
children: [
%{
key: :enabled,
type: :boolean,
description: "Allow/disallow getting instance nodeinfo"
}
]
},
%{
group: :ex_aws,
key: :s3,

View file

@ -24,11 +24,11 @@ config :pleroma, Pleroma.Repo,
config :web_push_encryption, :vapid_details, subject: "mailto:#{System.get_env("NOTIFY_EMAIL")}"
config :pleroma, :database, rum_enabled: false
config :pleroma, :instance, static_dir: "/var/lib/pleroma/static"
config :pleroma, Pleroma.Uploaders.Local, uploads: "/var/lib/pleroma/uploads"
config :pleroma, :instance, static_dir: "/var/lib/akkoma/static"
config :pleroma, Pleroma.Uploaders.Local, uploads: "/var/lib/akkoma/uploads"
# We can't store the secrets in this file, since this is baked into the docker image
if not File.exists?("/var/lib/pleroma/secret.exs") do
if not File.exists?("/var/lib/akkoma/secret.exs") do
secret = :crypto.strong_rand_bytes(64) |> Base.encode64() |> binary_part(0, 64)
signing_salt = :crypto.strong_rand_bytes(8) |> Base.encode64() |> binary_part(0, 8)
{web_push_public_key, web_push_private_key} = :crypto.generate_key(:ecdh, :prime256v1)
@ -52,16 +52,16 @@ if not File.exists?("/var/lib/pleroma/secret.exs") do
web_push_private_key: Base.url_encode64(web_push_private_key, padding: false)
)
File.write("/var/lib/pleroma/secret.exs", secret_file)
File.write("/var/lib/akkoma/secret.exs", secret_file)
end
import_config("/var/lib/pleroma/secret.exs")
import_config("/var/lib/akkoma/secret.exs")
# For additional user config
if File.exists?("/var/lib/pleroma/config.exs"),
do: import_config("/var/lib/pleroma/config.exs"),
if File.exists?("/var/lib/akkoma/config.exs"),
do: import_config("/var/lib/akkoma/config.exs"),
else:
File.write("/var/lib/pleroma/config.exs", """
File.write("/var/lib/akkoma/config.exs", """
import Config
# For additional configuration outside of environmental variables

View file

@ -139,6 +139,8 @@ config :pleroma, Pleroma.Search.Meilisearch, url: "http://127.0.0.1:7700/", priv
# Reduce recompilation time
# https://dashbit.co/blog/speeding-up-re-compilation-of-elixir-projects
config :phoenix, :plug_init_mode, :runtime
config :pleroma, :instances_favicons, enabled: false
config :pleroma, :instances_nodeinfo, enabled: false
if File.exists?("./config/test.secret.exs") do
import_config "test.secret.exs"

61
docker-compose.yml Normal file
View file

@ -0,0 +1,61 @@
version: "3.7"
services:
db:
image: akkoma-db:latest
build: ./docker-resources/database
restart: unless-stopped
user: ${DOCKER_USER}
environment: {
# This might seem insecure but is usually not a problem.
# You should leave this at the "akkoma" default.
# The DB is only reachable by containers in the same docker network,
# and is not exposed to the open internet.
#
# If you do change this, remember to update "config.exs".
POSTGRES_DB: akkoma,
POSTGRES_USER: akkoma,
POSTGRES_PASSWORD: akkoma,
}
env_file:
- .env
volumes:
- type: bind
source: ./pgdata
target: /var/lib/postgresql/data
akkoma:
image: akkoma:latest
build: .
restart: unless-stopped
env_file:
- .env
links:
- db
ports: [
# Uncomment/Change port mappings below as needed.
# The left side is your host machine, the right one is the akkoma container.
# You can prefix the left side with an ip.
# Webserver (for reverse-proxies outside of docker)
# If you use a dockerized proxy, you can leave this commented
# and use a container link instead.
"127.0.0.1:4000:4000",
]
volumes:
- .:/opt/akkoma
# Uncomment the following if you want to use a reverse proxy
#proxy:
# image: caddy:2-alpine
# restart: unless-stopped
# links:
# - akkoma
# ports: [
# "443:443",
# "80:80"
# ]
# volumes:
# - ./docker-resources/Caddyfile:/etc/caddy/Caddyfile
# - ./caddy-data:/data
# - ./caddy-config:/config

View file

@ -8,7 +8,7 @@ while ! pg_isready -U ${DB_USER:-pleroma} -d postgres://${DB_HOST:-db}:5432/${DB
done
echo "-- Running migrations..."
$HOME/bin/pleroma_ctl migrate
mix ecto.migrate
echo "-- Starting!"
exec $HOME/bin/pleroma start
mix phx.server

View file

@ -0,0 +1,14 @@
# default docker Caddyfile config for Akkoma
#
# Simple installation instructions:
# 1. Replace 'example.tld' with your instance's domain wherever it appears.
example.tld {
log {
output file /var/log/caddy/akkoma.log
}
encode gzip
reverse_proxy akkoma:4000
}

4
docker-resources/build.sh Executable file
View file

@ -0,0 +1,4 @@
#!/bin/sh
docker-compose build --build-arg UID=$(id -u) --build-arg GID=$(id -g) akkoma
docker-compose build --build-arg UID=$(id -u) --build-arg GID=$(id -g) db

View file

@ -0,0 +1,10 @@
FROM postgres:14-alpine
ARG UID=1000
ARG GID=1000
ARG UNAME=akkoma
RUN addgroup -g $GID $UNAME
RUN adduser -u $UID -G $UNAME -D -h $HOME $UNAME
USER akkoma

View file

@ -0,0 +1,4 @@
MIX_ENV=prod
DB_NAME=akkoma
DB_USER=akkoma
DB_PASS=akkoma

3
docker-resources/manage.sh Executable file
View file

@ -0,0 +1,3 @@
#!/bin/sh
docker-compose run --rm akkoma $@

View file

@ -1,9 +1,14 @@
all: install
pipenv run mkdocs build
branch := $(shell git rev-parse --abbrev-ref HEAD)
install:
pipenv install
clean:
rm -rf site
serve:
pipenv run python3 -m http.server -d site
zip:
zip -r docs.zip site/*
deploy:
cd site && rclone copy . scaleway:akkoma-docs/$(branch)

95
docs/Pipfile.lock generated
View file

@ -14,6 +14,22 @@
]
},
"default": {
"certifi": {
"hashes": [
"sha256:0d9c601124e5a6ba9712dbc60d9c53c21e34f5f641fe83002317394311bdce14",
"sha256:90c1a32f1d68f940488354e36370f6cca89f0f106db09518524c88d6ed83f382"
],
"markers": "python_version >= '3.6'",
"version": "==2022.9.24"
},
"charset-normalizer": {
"hashes": [
"sha256:5a3d016c7c547f69d6f81fb0db9449ce888b418b5b9952cc5e6e66843e9dd845",
"sha256:83e9a75d1911279afd89352c68b45348559d1fc0506b054b346651b5e7fee29f"
],
"markers": "python_version >= '3.6'",
"version": "==2.1.1"
},
"click": {
"hashes": [
"sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e",
@ -29,13 +45,13 @@
],
"version": "==2.1.0"
},
"importlib-metadata": {
"idna": {
"hashes": [
"sha256:637245b8bab2b6502fcbc752cc4b7a6f6243bb02b31c5c26156ad103d3d45670",
"sha256:7401a975809ea1fdc658c3aa4f78cc2195a0e019c5cbc4c06122884e9ae80c23"
"sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4",
"sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"
],
"markers": "python_version >= '3.7'",
"version": "==4.12.0"
"markers": "python_version >= '3.5'",
"version": "==3.4"
},
"jinja2": {
"hashes": [
@ -55,10 +71,10 @@
},
"markdown-include": {
"hashes": [
"sha256:6f5d680e36f7780c7f0f61dca53ca581bd50d1b56137ddcd6353efafa0c3e4a2"
"sha256:a06183b7c7225e73112737acdc6fe0ac0686c39457234eeb5ede23881fed001d"
],
"index": "pypi",
"version": "==0.6.0"
"version": "==0.7.0"
},
"markupsafe": {
"hashes": [
@ -116,27 +132,27 @@
},
"mkdocs": {
"hashes": [
"sha256:26bd2b03d739ac57a3e6eed0b7bcc86168703b719c27b99ad6ca91dc439aacde",
"sha256:b504405b04da38795fec9b2e5e28f6aa3a73bb0960cb6d5d27ead28952bd35ea"
"sha256:8947af423a6d0facf41ea1195b8e1e8c85ad94ac95ae307fe11232e0424b11c5",
"sha256:c8856a832c1e56702577023cd64cc5f84948280c1c0fcc6af4cd39006ea6aa8c"
],
"markers": "python_version >= '3.6'",
"version": "==1.3.0"
"markers": "python_version >= '3.7'",
"version": "==1.4.2"
},
"mkdocs-material": {
"hashes": [
"sha256:263f2721f3abe533b61f7c8bed435a0462620912742c919821ac2d698b4bfe67",
"sha256:dc82b667d2a83f0de581b46a6d0949732ab77e7638b87ea35b770b33bc02e75a"
"sha256:143ea55843b3747b640e1110824d91e8a4c670352380e166e64959f9abe98862",
"sha256:45eeabb23d2caba8fa3b85c91d9ec8e8b22add716e9bba8faf16d56af8aa5622"
],
"index": "pypi",
"version": "==8.3.9"
"version": "==8.5.9"
},
"mkdocs-material-extensions": {
"hashes": [
"sha256:a82b70e533ce060b2a5d9eb2bc2e1be201cf61f901f93704b4acf6e3d5983a44",
"sha256:bfd24dfdef7b41c312ede42648f9eb83476ea168ec163b613f9abd12bbfddba2"
"sha256:96ca979dae66d65c2099eefe189b49d5ac62f76afb59c38e069ffc7cf3c131ec",
"sha256:bcc2e5fc70c0ec50e59703ee6e639d87c7e664c0c441c014ea84461a90f1e902"
],
"markers": "python_version >= '3.6'",
"version": "==1.0.3"
"markers": "python_version >= '3.7'",
"version": "==1.1"
},
"packaging": {
"hashes": [
@ -148,19 +164,19 @@
},
"pygments": {
"hashes": [
"sha256:5eb116118f9612ff1ee89ac96437bb6b49e8f04d8a13b514ba26f620208e26eb",
"sha256:dc9c10fb40944260f6ed4c688ece0cd2048414940f1cea51b8b226318411c519"
"sha256:56a8508ae95f98e2b9bdf93a6be5ae3f7d8af858b43e02c5a2ff083726be40c1",
"sha256:f643f331ab57ba3c9d89212ee4a2dabc6e94f117cf4eefde99a0574720d14c42"
],
"markers": "python_version >= '3.6'",
"version": "==2.12.0"
"version": "==2.13.0"
},
"pymdown-extensions": {
"hashes": [
"sha256:3ef2d998c0d5fa7eb09291926d90d69391283561cf6306f85cd588a5eb5befa0",
"sha256:ec141c0f4983755349f0c8710416348d1a13753976c028186ed14f190c8061c4"
"sha256:1bd4a173095ef8c433b831af1f3cb13c10883be0c100ae613560668e594651f7",
"sha256:8e62688a8b1128acd42fa823f3d429d22f4284b5e6dd4d3cd56721559a5a211b"
],
"markers": "python_version >= '3.7'",
"version": "==9.5"
"version": "==9.8"
},
"pyparsing": {
"hashes": [
@ -180,6 +196,7 @@
},
"pyyaml": {
"hashes": [
"sha256:01b45c0191e6d66c470b6cf1b9531a771a83c1c4208272ead47a3ae4f2f603bf",
"sha256:0283c35a6a9fbf047493e3a0ce8d79ef5030852c51e9d911a27badfde0605293",
"sha256:055d937d65826939cb044fc8c9b08889e8c743fdc6a32b33e2390f66013e449b",
"sha256:07751360502caac1c067a8132d150cf3d61339af5691fe9e87803040dbc5db57",
@ -191,26 +208,32 @@
"sha256:277a0ef2981ca40581a47093e9e2d13b3f1fbbeffae064c1d21bfceba2030287",
"sha256:2cd5df3de48857ed0544b34e2d40e9fac445930039f3cfe4bcc592a1f836d513",
"sha256:40527857252b61eacd1d9af500c3337ba8deb8fc298940291486c465c8b46ec0",
"sha256:432557aa2c09802be39460360ddffd48156e30721f5e8d917f01d31694216782",
"sha256:473f9edb243cb1935ab5a084eb238d842fb8f404ed2193a915d1784b5a6b5fc0",
"sha256:48c346915c114f5fdb3ead70312bd042a953a8ce5c7106d5bfb1a5254e47da92",
"sha256:50602afada6d6cbfad699b0c7bb50d5ccffa7e46a3d738092afddc1f9758427f",
"sha256:68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2",
"sha256:77f396e6ef4c73fdc33a9157446466f1cff553d979bd00ecb64385760c6babdc",
"sha256:81957921f441d50af23654aa6c5e5eaf9b06aba7f0a19c18a538dc7ef291c5a1",
"sha256:819b3830a1543db06c4d4b865e70ded25be52a2e0631ccd2f6a47a2822f2fd7c",
"sha256:897b80890765f037df3403d22bab41627ca8811ae55e9a722fd0392850ec4d86",
"sha256:98c4d36e99714e55cfbaaee6dd5badbc9a1ec339ebfc3b1f52e293aee6bb71a4",
"sha256:9df7ed3b3d2e0ecfe09e14741b857df43adb5a3ddadc919a2d94fbdf78fea53c",
"sha256:9fa600030013c4de8165339db93d182b9431076eb98eb40ee068700c9c813e34",
"sha256:a80a78046a72361de73f8f395f1f1e49f956c6be882eed58505a15f3e430962b",
"sha256:afa17f5bc4d1b10afd4466fd3a44dc0e245382deca5b3c353d8b757f9e3ecb8d",
"sha256:b3d267842bf12586ba6c734f89d1f5b871df0273157918b0ccefa29deb05c21c",
"sha256:b5b9eccad747aabaaffbc6064800670f0c297e52c12754eb1d976c57e4f74dcb",
"sha256:bfaef573a63ba8923503d27530362590ff4f576c626d86a9fed95822a8255fd7",
"sha256:c5687b8d43cf58545ade1fe3e055f70eac7a5a1a0bf42824308d868289a95737",
"sha256:cba8c411ef271aa037d7357a2bc8f9ee8b58b9965831d9e51baf703280dc73d3",
"sha256:d15a181d1ecd0d4270dc32edb46f7cb7733c7c508857278d3d378d14d606db2d",
"sha256:d4b0ba9512519522b118090257be113b9468d804b19d63c71dbcf4a48fa32358",
"sha256:d4db7c7aef085872ef65a8fd7d6d09a14ae91f691dec3e87ee5ee0539d516f53",
"sha256:d4eccecf9adf6fbcc6861a38015c2a64f38b9d94838ac1810a9023a0609e1b78",
"sha256:d67d839ede4ed1b28a4e8909735fc992a923cdb84e618544973d7dfc71540803",
"sha256:daf496c58a8c52083df09b80c860005194014c3698698d1a57cbcfa182142a3a",
"sha256:dbad0e9d368bb989f4515da330b88a057617d16b6a8245084f1b05400f24609f",
"sha256:e61ceaab6f49fb8bdfaa0f92c4b57bcfbea54c09277b1b4f7ac376bfb7a7c174",
"sha256:f84fbc98b019fef2ee9a1cb3ce93e3187a6df0b2538a651bfb890254ba9f90b5"
],
@ -225,6 +248,14 @@
"markers": "python_version >= '3.6'",
"version": "==0.1"
},
"requests": {
"hashes": [
"sha256:7c5599b102feddaa661c826c56ab4fee28bfd17f5abca1ebbe3e7f19d7c97983",
"sha256:8fefa2a1a1365bf5520aac41836fbee479da67864514bdb821f31ce07ce65349"
],
"markers": "python_version >= '3.7' and python_version < '4'",
"version": "==2.28.1"
},
"six": {
"hashes": [
"sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926",
@ -233,6 +264,14 @@
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
"version": "==1.16.0"
},
"urllib3": {
"hashes": [
"sha256:3fa96cf423e6987997fc326ae8df396db2a8b7c667747d47ddd8ecba91f4a74e",
"sha256:b930dd878d5a8afb066a637fbb35144fe7901e3b209d1cd4f524bd0e9deee997"
],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4, 3.5' and python_version < '4'",
"version": "==1.26.12"
},
"watchdog": {
"hashes": [
"sha256:083171652584e1b8829581f965b9b7723ca5f9a2cd7e20271edf264cfd7c1412",
@ -263,14 +302,6 @@
],
"markers": "python_version >= '3.6'",
"version": "==2.1.9"
},
"zipp": {
"hashes": [
"sha256:56bf8aadb83c24db6c4b577e13de374ccfb67da2078beba1d037c17980bf43ad",
"sha256:c4f6e5bbf48e74f7a38e7cc5b0480ff42b0ae5178957d564d18932525d5cf099"
],
"markers": "python_version >= '3.7'",
"version": "==3.8.0"
}
},
"develop": {}

View file

@ -59,6 +59,7 @@ To add configuration to your config file, you can copy it from the base config.
* `cleanup_attachments`: Remove attachments along with statuses. Does not affect duplicate files and attachments without status. Enabling this will increase load to database when deleting statuses on larger instances.
* `show_reactions`: Let favourites and emoji reactions be viewed through the API (default: `true`).
* `password_reset_token_validity`: The time after which reset tokens aren't accepted anymore, in seconds (default: one day).
* `local_bubble`: Array of domains representing instances closely related to yours. Used to populate the `bubble` timeline. e.g `['example.com']`, (default: `[]`)
## :database
* `improved_hashtag_timeline`: Setting to force toggle / force disable improved hashtags timeline. `:enabled` forces hashtags to be fetched from `hashtags` table for hashtags timeline. `:disabled` forces object-embedded hashtags to be used (slower). Keep it `:auto` for automatic behaviour (it is auto-set to `:enabled` [unless overridden] when HashtagsTableMigrator completes).

View file

@ -0,0 +1,161 @@
# Installing in Docker
## Installation
This guide will show you how to get akkoma working in a docker container,
if you want isolation, or if you run a distribution not supported by the OTP
releases.
If you want to migrate from or OTP to docker, check out [the migration guide](./migrating_to_docker_en.md).
### Prepare the system
* Install docker and docker-compose
* [Docker](https://docs.docker.com/engine/install/)
* [Docker-compose](https://docs.docker.com/compose/install/)
* This will usually just be a repository installation and a package manager invocation.
* Clone the akkoma repository
* `git clone https://akkoma.dev/AkkomaGang/akkoma.git -b stable`
* `cd akkoma`
### Set up basic configuration
```bash
cp docker-resources/env.example .env
echo "DOCKER_USER=$(id -u):$(id -g)" >> .env
```
This probably won't need to be changed, it's only there to set basic environment
variables for the docker-compose file.
### Building the container
The container provided is a thin wrapper around akkoma's dependencies,
it does not contain the code itself. This is to allow for easy updates
and debugging if required.
```bash
./docker-resources/build.sh
```
This will generate a container called `akkoma` which we can use
in our compose environment.
### Generating your instance
```bash
mkdir pgdata
./docker-resources/manage.sh mix deps.get
./docker-resources/manage.sh mix compile
./docker-resources/manage.sh mix pleroma.instance gen
```
This will ask you a few questions - the defaults are fine for most things,
the database hostname is `db`, and you will want to set the ip to `0.0.0.0`.
Now we'll want to copy over the config it just created
```bash
cp config/generated_config.exs config/prod.secret.exs
```
### Setting up the database
We need to run a few commands on the database container, this isn't too bad
```bash
docker-compose run --rm --user akkoma -d db
# Note down the name it gives here, it will be something like akkoma_db_run
docker-compose run --rm akkoma psql -h db -U akkoma -f config/setup_db.psql
docker stop akkoma_db_run # Replace with the name you noted down
```
Now we can actually run our migrations
```bash
./docker-resources/manage.sh mix ecto.migrate
# this will recompile your files at the same time, since we changed the config
```
### Start the server
We're going to run it in the foreground on the first run, just to make sure
everything start up.
```bash
docker-compose up
```
If everything went well, you should be able to access your instance at http://localhost:4000
You can `ctrl-c` out of the docker-compose now to shutdown the server.
### Running in the background
```bash
docker-compose up -d
```
### Create your first user
If your instance is up and running, you can create your first user with administrative rights with the following task:
```shell
./docker-resources/manage.sh mix pleroma.user new MY_USERNAME MY_EMAIL@SOMEWHERE --admin
```
And follow the prompts
### Reverse proxies
This is a tad more complex in docker than on the host itself. It
You've got two options.
#### Running caddy in a container
This is by far the easiest option. It'll handle HTTPS and all that for you.
```bash
mkdir caddy-data
mkdir caddy-config
cp docker-resources/Caddyfile.example docker-resources/Caddyfile
```
Then edit the TLD in your caddyfile to the domain you're serving on.
Uncomment the `caddy` section in the docker-compose file,
then run `docker-compose up -d` again.
#### Running a reverse proxy on the host
If you want, you can also run the reverse proxy on the host. This is a bit more complex, but it's also more flexible.
Follow the guides for source install for your distribution of choice, or adapt
as needed. Your standard setup can be found in the [Debian Guide](../debian_based_en/#nginx)
### You're done!
All that's left is to set up your frontends.
The standard from-source commands will apply to you, just make sure you
prefix them with `./docker-resources/manage.sh`!
{! installation/frontends.include !}
### Updating Docker Installs
```bash
git pull
./docker-resources/build.sh
./docker-resources/manage.sh mix deps.get
./docker-resources/manage.sh mix compile
./docker-resources/manage.sh mix ecto.migrate
docker-compose restart akkoma db
```
#### Further reading
{! installation/further_reading.include !}
{! support.include !}

View file

@ -21,5 +21,11 @@ For most installations, the following will suffice:
mix pleroma.frontend install admin-fe --ref stable
```
=== "Docker"
```sh
./docker-resources/manage.sh mix pleroma.frontend install pleroma-fe --ref stable
./docker-resources/manage.sh mix pleroma.frontend install admin-fe --ref stable
```
For more customised installations, refer to [Frontend Management](../../configuration/frontend_management)

View file

@ -87,7 +87,7 @@ export FLAVOUR="amd64-musl"
# Clone the release build into a temporary directory and unpack it
# Replace `stable` with `unstable` if you want to run the unstable branch
su akkoma -s $SHELL -lc "
curl 'https://akkoma-updates.s3-website.fr-par.scw.cloud/develop/akkoma-$FLAVOUR.zip' -o /tmp/akkoma.zip
curl 'https://akkoma-updates.s3-website.fr-par.scw.cloud/stable/akkoma-$FLAVOUR.zip' -o /tmp/akkoma.zip
unzip /tmp/akkoma.zip -d /tmp/
"

View file

@ -34,6 +34,15 @@ git pull -r
# to run "git merge stable" instead (or develop if you want)
```
### WARNING - Migrating from Pleroma Develop
If you are on pleroma develop, and have updated since 2022-08, you may have issues with database migrations.
Please roll back the given migrations:
```bash
MIX_ENV=prod mix ecto.rollback --migrations-path priv/repo/optional_migrations/pleroma_develop_rollbacks -n3
```
Then compile, migrate and restart as usual.
## From OTP

View file

@ -0,0 +1,158 @@
# Migrating to a Docker Installation
If you for any reason wish to migrate a source or OTP install to a docker one,
this guide is for you.
You have a few options - your major one will be whether you want to keep your
reverse-proxy setup from before.
You probably should, in the first instance.
### Prepare the system
* Install docker and docker-compose
* [Docker](https://docs.docker.com/engine/install/)
* [Docker-compose](https://docs.docker.com/compose/install/)
* This will usually just be a repository installation and a package manager invocation.
=== "Source"
```bash
git pull
```
=== "OTP"
Clone the akkoma repository
```bash
git clone https://akkoma.dev/AkkomaGang/akkoma.git -b stable
cd akkoma
```
### Back up your old database
Change the database name as needed
```bash
pg_dump -d akkoma_prod --format c > akkoma_backup.sql
```
### Getting your static files in the right place
This will vary by every installation. Copy your `instance` directory to `instance/` in
the akkoma source directory - this is where the docker container will look for it.
For *most* from-source installs it'll already be there.
And the same with `uploads`, make sure your uploads (if you have them on disk) are
located at `uploads/` in the akkoma source directory.
If you have them on a different disk, you will need to mount that disk into the docker-compose file,
with an entry that looks like this:
```yaml
akkoma:
volumes:
- .:/opt/akkoma # This should already be there
- type: bind
source: /path/to/your/uploads
target: /opt/akkoma/uploads
```
### Set up basic configuration
```bash
cp docker-resources/env.example .env
echo "DOCKER_USER=$(id -u):$(id -g)" >> .env
```
This probably won't need to be changed, it's only there to set basic environment
variables for the docker-compose file.
=== "From source"
You probably won't need to change your config. Provided your `config/prod.secret.exs` file
is still there, you're all good.
=== "OTP"
```bash
cp /etc/akkoma/config.exs config/prod.secret.exs
```
**BOTH**
Set the following config in `config/prod.secret.exs`:
```elixir
config :pleroma, Pleroma.Web.Endpoint,
...,
http: [ip: {0, 0, 0, 0}, port: 4000]
config :pleroma, Pleroma.Repo,
...,
username: "akkoma",
password: "akkoma",
database: "akkoma",
hostname: "db"
```
### Building the container
The container provided is a thin wrapper around akkoma's dependencies,
it does not contain the code itself. This is to allow for easy updates
and debugging if required.
```bash
./docker-resources/build.sh
```
This will generate a container called `akkoma` which we can use
in our compose environment.
### Setting up the docker resources
```bash
# These won't exist if you're migrating from OTP
rm -rf deps
rm -rf _build
```
```bash
mkdir pgdata
./docker-resources/manage.sh mix deps.get
./docker-resources/manage.sh mix compile
```
### Setting up the database
Now we can import our database to the container.
```bash
docker-compose run --rm --user akkoma -d db
docker-compose run --rm akkoma pg_restore -v -U akkoma -j $(grep -c ^processor /proc/cpuinfo) -d akkoma -h db akkoma_backup.sql
```
### Reverse proxies
If you're just reusing your old proxy, you may have to uncomment the line in
the docker-compose file under `ports`. You'll find it.
Otherwise, you can use the same setup as the [docker installation guide](./docker_en.md#reverse-proxies).
### Let's go
```bash
docker-compose up -d
```
You should now be at the same point as you were before, but with a docker install.
{! installation/frontends.include !}
See the [docker installation guide](./docker_en.md) for more information on how to
update.
#### Further reading
{! installation/further_reading.include !}
{! support.include !}

View file

@ -123,7 +123,7 @@ export FLAVOUR="amd64-musl"
# Clone the release build into a temporary directory and unpack it
su akkoma -s $SHELL -lc "
curl 'https://akkoma-updates.s3-website.fr-par.scw.cloud/develop/akkoma-$FLAVOUR.zip' -o /tmp/akkoma.zip
curl 'https://akkoma-updates.s3-website.fr-par.scw.cloud/stable/akkoma-$FLAVOUR.zip' -o /tmp/akkoma.zip
unzip /tmp/akkoma.zip -d /tmp/
"

View file

@ -4,7 +4,7 @@ All stable OTP releases are cryptographically signed, to allow
you to verify the integrity if you choose to.
Releases are signed with [Signify](https://man.openbsd.org/signify.1),
with [the public key in the main repository](https://akkoma.dev/AkkomaGang/akkoma/src/branch/develop/SIGNING_KEY.pub)
with [the public key in the main repository](https://akkoma.dev/AkkomaGang/akkoma/src/branch/stable/SIGNING_KEY.pub)
Release URLs will always be of the form

View file

@ -1,22 +1,26 @@
certifi==2022.9.24
charset-normalizer==2.1.1
click==8.1.3
ghp-import==2.1.0
idna==3.4
importlib-metadata==4.12.0
Jinja2==3.1.2
Markdown==3.3.7
markdown-include==0.6.0
markdown-include==0.7.0
MarkupSafe==2.1.1
mergedeep==1.3.4
mkdocs==1.3.0
mkdocs-bootswatch==1.1
mkdocs-material==8.1.8
mkdocs-material-extensions==1.0.3
mkdocs==1.4.2
mkdocs-material==8.5.9
mkdocs-material-extensions==1.1
packaging==21.3
Pygments==2.11.2
pymdown-extensions==9.1
Pygments==2.13.0
pymdown-extensions==9.8
pyparsing==3.0.9
python-dateutil==2.8.2
PyYAML==6.0
pyyaml_env_tag==0.1
requests==2.28.1
six==1.16.0
urllib3==1.26.12
watchdog==2.1.9
zipp==3.8.0

View file

@ -156,7 +156,8 @@ defmodule Pleroma.Application do
build_cachex("emoji_packs", expiration: emoji_packs_expiration(), limit: 10),
build_cachex("failed_proxy_url", limit: 2500),
build_cachex("banned_urls", default_ttl: :timer.hours(24 * 30), limit: 5_000),
build_cachex("translations", default_ttl: :timer.hours(24 * 30), limit: 2500)
build_cachex("translations", default_ttl: :timer.hours(24 * 30), limit: 2500),
build_cachex("instances", default_ttl: :timer.hours(24), ttl_interval: 1000, limit: 2500)
]
end

View file

@ -5,6 +5,8 @@
defmodule Pleroma.Instances.Instance do
@moduledoc "Instance."
@cachex Pleroma.Config.get([:cachex, :provider], Cachex)
alias Pleroma.Instances
alias Pleroma.Instances.Instance
alias Pleroma.Repo
@ -22,7 +24,8 @@ defmodule Pleroma.Instances.Instance do
field(:host, :string)
field(:unreachable_since, :naive_datetime_usec)
field(:favicon, :string)
field(:favicon_updated_at, :naive_datetime)
field(:metadata_updated_at, :naive_datetime)
field(:nodeinfo, :map, default: %{})
timestamps()
end
@ -31,7 +34,7 @@ defmodule Pleroma.Instances.Instance do
def changeset(struct, params \\ %{}) do
struct
|> cast(params, [:host, :unreachable_since, :favicon, :favicon_updated_at])
|> cast(params, [:host, :unreachable_since, :favicon, :nodeinfo, :metadata_updated_at])
|> validate_required([:host])
|> unique_constraint(:host)
end
@ -138,63 +141,138 @@ defmodule Pleroma.Instances.Instance do
defp parse_datetime(datetime), do: datetime
def get_or_update_favicon(%URI{host: host} = instance_uri) do
existing_record = Repo.get_by(Instance, %{host: host})
def needs_update(nil), do: true
def needs_update(%Instance{metadata_updated_at: nil}), do: true
def needs_update(%Instance{metadata_updated_at: metadata_updated_at}) do
now = NaiveDateTime.utc_now()
NaiveDateTime.diff(now, metadata_updated_at) > 86_400
end
if existing_record && existing_record.favicon_updated_at &&
NaiveDateTime.diff(now, existing_record.favicon_updated_at) < 86_400 do
existing_record.favicon
def local do
%Instance{
host: Pleroma.Web.Endpoint.host(),
favicon: Pleroma.Web.Endpoint.url() <> "/favicon.png",
nodeinfo: Pleroma.Web.Nodeinfo.NodeinfoController.raw_nodeinfo()
}
end
def update_metadata(%URI{host: host} = uri) do
Logger.debug("Checking metadata for #{host}")
existing_record = Repo.get_by(Instance, %{host: host})
if reachable?(host) do
do_update_metadata(uri, existing_record)
else
favicon = scrape_favicon(instance_uri)
{:discard, :unreachable}
end
end
defp do_update_metadata(%URI{host: host} = uri, existing_record) do
if existing_record do
if needs_update(existing_record) do
Logger.info("Updating metadata for #{host}")
favicon = scrape_favicon(uri)
nodeinfo = scrape_nodeinfo(uri)
if existing_record do
existing_record
|> changeset(%{favicon: favicon, favicon_updated_at: now})
|> changeset(%{
host: host,
favicon: favicon,
nodeinfo: nodeinfo,
metadata_updated_at: NaiveDateTime.utc_now()
})
|> Repo.update()
else
%Instance{}
|> changeset(%{host: host, favicon: favicon, favicon_updated_at: now})
|> Repo.insert()
{:discard, "Does not require update"}
end
else
favicon = scrape_favicon(uri)
nodeinfo = scrape_nodeinfo(uri)
favicon
Logger.info("Creating metadata for #{host}")
%Instance{}
|> changeset(%{
host: host,
favicon: favicon,
nodeinfo: nodeinfo,
metadata_updated_at: NaiveDateTime.utc_now()
})
|> Repo.insert()
end
rescue
e ->
Logger.warn("Instance.get_or_update_favicon(\"#{host}\") error: #{inspect(e)}")
end
def get_favicon(%URI{host: host}) do
existing_record = Repo.get_by(Instance, %{host: host})
if existing_record do
existing_record.favicon
else
nil
end
end
defp scrape_nodeinfo(%URI{} = instance_uri) do
with true <- Pleroma.Config.get([:instances_nodeinfo, :enabled]),
{_, true} <- {:reachable, reachable?(instance_uri.host)},
{:ok, %Tesla.Env{status: 200, body: body}} <-
Tesla.get(
"https://#{instance_uri.host}/.well-known/nodeinfo",
headers: [{"Accept", "application/json"}]
),
{:ok, json} <- Jason.decode(body),
{:ok, %{"links" => links}} <- {:ok, json},
{:ok, %{"href" => href}} <-
{:ok,
Enum.find(links, &(&1["rel"] == "http://nodeinfo.diaspora.software/ns/schema/2.0"))},
{:ok, %Tesla.Env{body: data}} <-
Pleroma.HTTP.get(href, [{"accept", "application/json"}], []),
{:length, true} <- {:length, String.length(data) < 50_000},
{:ok, nodeinfo} <- Jason.decode(data) do
nodeinfo
else
{:reachable, false} ->
Logger.debug(
"Instance.scrape_nodeinfo(\"#{to_string(instance_uri)}\") ignored unreachable host"
)
nil
{:length, false} ->
Logger.debug(
"Instance.scrape_nodeinfo(\"#{to_string(instance_uri)}\") ignored too long body"
)
nil
_ ->
nil
end
end
defp scrape_favicon(%URI{} = instance_uri) do
try do
with {_, true} <- {:reachable, reachable?(instance_uri.host)},
{:ok, %Tesla.Env{body: html}} <-
Pleroma.HTTP.get(to_string(instance_uri), [{"accept", "text/html"}], []),
{_, [favicon_rel | _]} when is_binary(favicon_rel) <-
{:parse,
html |> Floki.parse_document!() |> Floki.attribute("link[rel=icon]", "href")},
{_, favicon} when is_binary(favicon) <-
{:merge, URI.merge(instance_uri, favicon_rel) |> to_string()} do
favicon
else
{:reachable, false} ->
Logger.debug(
"Instance.scrape_favicon(\"#{to_string(instance_uri)}\") ignored unreachable host"
)
nil
_ ->
nil
end
rescue
e ->
Logger.warn(
"Instance.scrape_favicon(\"#{to_string(instance_uri)}\") error: #{inspect(e)}"
with true <- Pleroma.Config.get([:instances_favicons, :enabled]),
{_, true} <- {:reachable, reachable?(instance_uri.host)},
{:ok, %Tesla.Env{body: html}} <-
Pleroma.HTTP.get(to_string(instance_uri), [{"accept", "text/html"}], []),
{_, [favicon_rel | _]} when is_binary(favicon_rel) <-
{:parse, html |> Floki.parse_document!() |> Floki.attribute("link[rel=icon]", "href")},
{_, favicon} when is_binary(favicon) <-
{:merge, URI.merge(instance_uri, favicon_rel) |> to_string()},
{:length, true} <- {:length, String.length(favicon) < 255} do
favicon
else
{:reachable, false} ->
Logger.debug(
"Instance.scrape_favicon(\"#{to_string(instance_uri)}\") ignored unreachable host"
)
nil
_ ->
nil
end
end
@ -217,4 +295,25 @@ defmodule Pleroma.Instances.Instance do
end)
|> Stream.run()
end
def get_by_url(url_or_host) do
url = host(url_or_host)
Repo.get_by(Instance, host: url)
end
def get_cached_by_url(url_or_host) do
url = host(url_or_host)
if url == Pleroma.Web.Endpoint.host() do
{:ok, local()}
else
@cachex.fetch!(:instances_cache, "instances:#{url}", fn _ ->
with %Instance{} = instance <- get_by_url(url) do
{:commit, {:ok, instance}}
else
_ -> {:ignore, nil}
end
end)
end
end
end

View file

@ -67,8 +67,9 @@ defmodule Pleroma.UserRelationship do
target_id: target.id
})
|> Repo.insert(
on_conflict: {:replace_all_except, [:id]},
conflict_target: [:source_id, :relationship_type, :target_id]
on_conflict: {:replace_all_except, [:id, :inserted_at]},
conflict_target: [:source_id, :relationship_type, :target_id],
returning: true
)
end

View file

@ -149,9 +149,20 @@ defmodule Pleroma.Web.ActivityPub.MRF do
defp get_policies(policies) when is_list(policies), do: policies
defp get_policies(_), do: []
# Matches the following:
# - https://baddomain.net
# - https://extra.baddomain.net/
# Does NOT match the following:
# - https://maybebaddomain.net/
def subdomain_regex("*." <> domain), do: subdomain_regex(domain)
def subdomain_regex(domain) do
~r/^(.+\.)?#{Regex.escape(domain)}$/i
end
@spec subdomains_regex([String.t()]) :: [Regex.t()]
def subdomains_regex(domains) when is_list(domains) do
for domain <- domains, do: ~r(^#{String.replace(domain, "*.", "(.*\\.)*")}$)i
Enum.map(domains, &subdomain_regex/1)
end
@spec subdomain_match?([Regex.t()], String.t()) :: boolean()

View file

@ -192,6 +192,7 @@ defmodule Pleroma.Web.ActivityPub.SideEffects do
# - Increase the user note count
# - Increase the reply count
# - Increase replies count
# - Ask for scraping of nodeinfo
# - Set up ActivityExpiration
# - Set up notifications
# - Index incoming posts for search (if needed)
@ -209,6 +210,10 @@ defmodule Pleroma.Web.ActivityPub.SideEffects do
reply_depth = (meta[:depth] || 0) + 1
Pleroma.Workers.NodeInfoFetcherWorker.enqueue("process", %{
"source_url" => activity.data["actor"]
})
# FIXME: Force inReplyTo to replies
if Pleroma.Web.Federator.allowed_thread_distance?(reply_depth) and
object.data["replies"] != nil do
@ -234,7 +239,9 @@ defmodule Pleroma.Web.ActivityPub.SideEffects do
{:ok, activity, meta}
else
e -> Repo.rollback(e)
e ->
Logger.error(inspect(e))
Repo.rollback(e)
end
end

View file

@ -21,10 +21,12 @@ defmodule Pleroma.Web.AkkomaAPI.TranslationController do
@doc "GET /api/v1/akkoma/translation/languages"
def languages(conn, _params) do
with {:ok, source_languages, dest_languages} <- get_languages() do
with {:enabled, true} <- {:enabled, Pleroma.Config.get([:translator, :enabled])},
{:ok, source_languages, dest_languages} <- get_languages() do
conn
|> json(%{source: source_languages, target: dest_languages})
else
{:enabled, false} -> json(conn, %{})
e -> IO.inspect(e)
end
end

View file

@ -334,6 +334,22 @@ defmodule Pleroma.Web.ApiSpec.AccountOperation do
}
end
def remove_from_followers_operation do
%Operation{
tags: ["Account actions"],
summary: "Remove from followers",
operationId: "AccountController.remove_from_followers",
security: [%{"oAuth" => ["follow", "write:follows"]}],
description: "Remove the given account from followers",
parameters: [%Reference{"$ref": "#/components/parameters/accountIdOrNickname"}],
responses: %{
200 => Operation.response("Relationship", "application/json", AccountRelationship),
400 => Operation.response("Error", "application/json", ApiError),
404 => Operation.response("Error", "application/json", ApiError)
}
}
end
def note_operation do
%Operation{
tags: ["Account actions"],

View file

@ -102,7 +102,7 @@ defmodule Pleroma.Web.Auth.LDAPAuthenticator do
{:scope, :eldap.wholeSubtree()},
{:timeout, @search_timeout}
]) do
{:ok, {:eldap_search_result, [{:eldap_entry, _, attributes}], _}} ->
{:ok, {:eldap_search_result, [{:eldap_entry, _, attributes}], _, _}} ->
params = %{
name: name,
nickname: name,

View file

@ -76,15 +76,16 @@ defmodule Pleroma.Web.MastodonAPI.AccountController do
plug(
OAuthScopesPlug,
%{scopes: ["follow", "write:follows"]} when action in [:follow_by_uri, :follow, :unfollow]
%{scopes: ["follow", "write:follows"]}
when action in [:follow_by_uri, :follow, :unfollow, :remove_from_followers]
)
plug(OAuthScopesPlug, %{scopes: ["follow", "read:mutes"]} when action == :mutes)
plug(OAuthScopesPlug, %{scopes: ["follow", "write:mutes"]} when action in [:mute, :unmute])
@relationship_actions [:follow, :unfollow]
@needs_account ~W(followers following lists follow unfollow mute unmute block unblock note)a
@relationship_actions [:follow, :unfollow, :remove_from_followers]
@needs_account ~W(followers following lists follow unfollow mute unmute block unblock note remove_from_followers)a
plug(
RateLimiter,
@ -447,6 +448,20 @@ defmodule Pleroma.Web.MastodonAPI.AccountController do
end
end
@doc "POST /api/v1/accounts/:id/remove_from_followers"
def remove_from_followers(%{assigns: %{user: %{id: id}, account: %{id: id}}}, _params) do
{:error, "Can not unfollow yourself"}
end
def remove_from_followers(%{assigns: %{user: followed, account: follower}} = conn, _params) do
with {:ok, follower} <- CommonAPI.reject_follow_request(follower, followed) do
render(conn, "relationship.json", user: followed, target: follower)
else
nil ->
render_error(conn, :not_found, "Record not found")
end
end
@doc "POST /api/v1/follows"
def follow_by_uri(%{body_params: %{uri: uri}} = conn, _) do
case User.get_cached_by_nickname(uri) do

View file

@ -94,12 +94,12 @@ defmodule Pleroma.Web.MastodonAPI.AccountView do
followed_by =
if following_relationships do
case FollowingRelationship.find(following_relationships, target, reading_user) do
%{state: :follow_accept} -> true
_ -> false
end
target_to_user_following_relation =
FollowingRelationship.find(following_relationships, target, reading_user)
User.get_follow_state(target, reading_user, target_to_user_following_relation)
else
User.following?(target, reading_user)
User.get_follow_state(target, reading_user)
end
subscribing =
@ -115,7 +115,7 @@ defmodule Pleroma.Web.MastodonAPI.AccountView do
%{
id: to_string(target.id),
following: follow_state == :follow_accept,
followed_by: followed_by,
followed_by: followed_by == :follow_accept,
blocking:
UserRelationship.exists?(
user_relationships,
@ -151,6 +151,7 @@ defmodule Pleroma.Web.MastodonAPI.AccountView do
subscribing: subscribing,
notifying: subscribing,
requested: follow_state == :follow_pending,
requested_by: followed_by == :follow_pending,
domain_blocking: User.blocks_domain?(reading_user, target),
showing_reblogs:
not UserRelationship.exists?(
@ -186,6 +187,16 @@ defmodule Pleroma.Web.MastodonAPI.AccountView do
render_many(targets, AccountView, "relationship.json", render_opts)
end
def render("instance.json", %{instance: %Pleroma.Instances.Instance{} = instance}) do
%{
name: instance.host,
favicon: instance.favicon |> MediaProxy.url(),
nodeinfo: instance.nodeinfo
}
end
def render("instance.json", _), do: nil
defp do_render("show.json", %{user: user} = opts) do
user = User.sanitize_html(user, User.html_filter_policy(opts[:for]))
display_name = user.name || user.nickname
@ -230,16 +241,20 @@ defmodule Pleroma.Web.MastodonAPI.AccountView do
%{}
end
favicon =
if Pleroma.Config.get([:instances_favicons, :enabled]) do
user
|> Map.get(:ap_id, "")
|> URI.parse()
|> URI.merge("/")
|> Pleroma.Instances.Instance.get_or_update_favicon()
|> MediaProxy.url()
instance =
with {:ok, instance} <- Pleroma.Instances.Instance.get_cached_by_url(user.ap_id) do
instance
else
_ ->
nil
end
favicon =
if is_nil(instance) do
nil
else
instance.favicon
|> MediaProxy.url()
end
%{
@ -271,7 +286,9 @@ defmodule Pleroma.Web.MastodonAPI.AccountView do
}
},
last_status_at: user.last_status_at,
akkoma: %{
instance: render("instance.json", %{instance: instance})
},
# Pleroma extensions
# Note: it's insecure to output :email but fully-qualified nickname may serve as safe stub
fqn: User.full_nickname(user),

View file

@ -68,7 +68,7 @@ defmodule Pleroma.Web.MastodonAPI.PollView do
end)
end
defp voters_count(%{data: %{"voters" => [_ | _] = voters}}) do
defp voters_count(%{data: %{"voters" => voters}}) when is_list(voters) do
length(voters)
end

View file

@ -209,212 +209,214 @@ defmodule Pleroma.Web.MastodonAPI.StatusView do
end
def render("show.json", %{activity: %{data: %{"object" => _object}} = activity} = opts) do
object = Object.normalize(activity, fetch: false)
with %Object{} = object <- Object.normalize(activity, fetch: false) do
user = CommonAPI.get_user(activity.data["actor"])
user_follower_address = user.follower_address
user = CommonAPI.get_user(activity.data["actor"])
user_follower_address = user.follower_address
like_count = object.data["like_count"] || 0
announcement_count = object.data["announcement_count"] || 0
like_count = object.data["like_count"] || 0
announcement_count = object.data["announcement_count"] || 0
hashtags = Object.hashtags(object)
sensitive = object.data["sensitive"] || Enum.member?(hashtags, "nsfw")
hashtags = Object.hashtags(object)
sensitive = object.data["sensitive"] || Enum.member?(hashtags, "nsfw")
tags = Object.tags(object)
tags = Object.tags(object)
tag_mentions =
tags
|> Enum.filter(fn tag -> is_map(tag) and tag["type"] == "Mention" end)
|> Enum.map(fn tag -> tag["href"] end)
tag_mentions =
tags
|> Enum.filter(fn tag -> is_map(tag) and tag["type"] == "Mention" end)
|> Enum.map(fn tag -> tag["href"] end)
mentions =
(object.data["to"] ++ tag_mentions)
|> Enum.uniq()
|> Enum.map(fn
Pleroma.Constants.as_public() -> nil
^user_follower_address -> nil
ap_id -> User.get_cached_by_ap_id(ap_id)
end)
|> Enum.filter(& &1)
|> Enum.map(fn user -> AccountView.render("mention.json", %{user: user}) end)
mentions =
(object.data["to"] ++ tag_mentions)
|> Enum.uniq()
|> Enum.map(fn
Pleroma.Constants.as_public() -> nil
^user_follower_address -> nil
ap_id -> User.get_cached_by_ap_id(ap_id)
end)
|> Enum.filter(& &1)
|> Enum.map(fn user -> AccountView.render("mention.json", %{user: user}) end)
favorited = opts[:for] && opts[:for].ap_id in (object.data["likes"] || [])
favorited = opts[:for] && opts[:for].ap_id in (object.data["likes"] || [])
bookmarked = Activity.get_bookmark(activity, opts[:for]) != nil
bookmarked = Activity.get_bookmark(activity, opts[:for]) != nil
client_posted_this_activity = opts[:for] && user.id == opts[:for].id
client_posted_this_activity = opts[:for] && user.id == opts[:for].id
expires_at =
with true <- client_posted_this_activity,
%Oban.Job{scheduled_at: scheduled_at} <-
Pleroma.Workers.PurgeExpiredActivity.get_expiration(activity.id) do
scheduled_at
else
_ -> nil
end
expires_at =
with true <- client_posted_this_activity,
%Oban.Job{scheduled_at: scheduled_at} <-
Pleroma.Workers.PurgeExpiredActivity.get_expiration(activity.id) do
scheduled_at
else
_ -> nil
end
thread_muted? =
cond do
is_nil(opts[:for]) -> false
is_boolean(activity.thread_muted?) -> activity.thread_muted?
true -> CommonAPI.thread_muted?(opts[:for], activity)
end
thread_muted? =
cond do
is_nil(opts[:for]) -> false
is_boolean(activity.thread_muted?) -> activity.thread_muted?
true -> CommonAPI.thread_muted?(opts[:for], activity)
end
attachment_data = object.data["attachment"] || []
attachments = render_many(attachment_data, StatusView, "attachment.json", as: :attachment)
attachment_data = object.data["attachment"] || []
attachments = render_many(attachment_data, StatusView, "attachment.json", as: :attachment)
created_at = Utils.to_masto_date(object.data["published"])
created_at = Utils.to_masto_date(object.data["published"])
edited_at =
with %{"updated" => updated} <- object.data,
date <- Utils.to_masto_date(updated),
true <- date != "" do
date
else
_ ->
nil
end
edited_at =
with %{"updated" => updated} <- object.data,
date <- Utils.to_masto_date(updated),
true <- date != "" do
date
else
_ ->
nil
end
reply_to = get_reply_to(activity, opts)
reply_to = get_reply_to(activity, opts)
reply_to_user = reply_to && CommonAPI.get_user(reply_to.data["actor"])
reply_to_user = reply_to && CommonAPI.get_user(reply_to.data["actor"])
history_len =
1 +
(Object.Updater.history_for(object.data)
|> Map.get("orderedItems")
|> length())
history_len =
1 +
(Object.Updater.history_for(object.data)
|> Map.get("orderedItems")
|> length())
# See render("history.json", ...) for more details
# Here the implicit index of the current content is 0
chrono_order = history_len - 1
# See render("history.json", ...) for more details
# Here the implicit index of the current content is 0
chrono_order = history_len - 1
content =
object
|> render_content()
content =
object
|> render_content()
content_html =
content
|> Activity.HTML.get_cached_scrubbed_html_for_activity(
User.html_filter_policy(opts[:for]),
activity,
"mastoapi:content:#{chrono_order}"
)
content_plaintext =
content
|> Activity.HTML.get_cached_stripped_html_for_activity(
activity,
"mastoapi:content:#{chrono_order}"
)
summary = object.data["summary"] || ""
card = render("card.json", Pleroma.Web.RichMedia.Helpers.fetch_data_for_activity(activity))
url =
if user.local do
Pleroma.Web.Router.Helpers.o_status_url(Pleroma.Web.Endpoint, :notice, activity)
else
object.data["url"] || object.data["external_url"] || object.data["id"]
end
direct_conversation_id =
with {_, nil} <- {:direct_conversation_id, opts[:direct_conversation_id]},
{_, true} <- {:include_id, opts[:with_direct_conversation_id]},
{_, %User{} = for_user} <- {:for_user, opts[:for]} do
Activity.direct_conversation_id(activity, for_user)
else
{:direct_conversation_id, participation_id} when is_integer(participation_id) ->
participation_id
_e ->
nil
end
emoji_reactions =
object.data
|> Map.get("reactions", [])
|> EmojiReactionController.filter_allowed_users(
opts[:for],
Map.get(opts, :with_muted, false)
)
|> Stream.map(fn {emoji, users, url} ->
build_emoji_map(emoji, users, url, opts[:for])
end)
|> Enum.to_list()
# Status muted state (would do 1 request per status unless user mutes are preloaded)
muted =
thread_muted? ||
UserRelationship.exists?(
get_in(opts, [:relationships, :user_relationships]),
:mute,
opts[:for],
user,
fn for_user, user -> User.mutes?(for_user, user) end
content_html =
content
|> Activity.HTML.get_cached_scrubbed_html_for_activity(
User.html_filter_policy(opts[:for]),
activity,
"mastoapi:content:#{chrono_order}"
)
{pinned?, pinned_at} = pin_data(object, user)
content_plaintext =
content
|> Activity.HTML.get_cached_stripped_html_for_activity(
activity,
"mastoapi:content:#{chrono_order}"
)
quote = Activity.get_quoted_activity_from_object(object)
summary = object.data["summary"] || ""
%{
id: to_string(activity.id),
uri: object.data["id"],
url: url,
account:
AccountView.render("show.json", %{
user: user,
for: opts[:for]
}),
in_reply_to_id: reply_to && to_string(reply_to.id),
in_reply_to_account_id: reply_to_user && to_string(reply_to_user.id),
reblog: nil,
card: card,
content: content_html,
text: opts[:with_source] && get_source_text(object.data["source"]),
created_at: created_at,
edited_at: edited_at,
reblogs_count: announcement_count,
replies_count: object.data["repliesCount"] || 0,
favourites_count: like_count,
reblogged: reblogged?(activity, opts[:for]),
favourited: present?(favorited),
bookmarked: present?(bookmarked),
muted: muted,
pinned: pinned?,
sensitive: sensitive,
spoiler_text: summary,
visibility: get_visibility(object),
media_attachments: attachments,
poll: render(PollView, "show.json", object: object, for: opts[:for]),
mentions: mentions,
tags: build_tags(tags),
application: build_application(object.data["generator"]),
language: nil,
emojis: build_emojis(object.data["emoji"]),
quote_id: if(quote, do: quote.id, else: nil),
quote: maybe_render_quote(quote, opts),
emoji_reactions: emoji_reactions,
pleroma: %{
local: activity.local,
conversation_id: get_context_id(activity),
context: object.data["context"],
in_reply_to_account_acct: reply_to_user && reply_to_user.nickname,
content: %{"text/plain" => content_plaintext},
spoiler_text: %{"text/plain" => summary},
expires_at: expires_at,
direct_conversation_id: direct_conversation_id,
thread_muted: thread_muted?,
card = render("card.json", Pleroma.Web.RichMedia.Helpers.fetch_data_for_activity(activity))
url =
if user.local do
Pleroma.Web.Router.Helpers.o_status_url(Pleroma.Web.Endpoint, :notice, activity)
else
object.data["url"] || object.data["external_url"] || object.data["id"]
end
direct_conversation_id =
with {_, nil} <- {:direct_conversation_id, opts[:direct_conversation_id]},
{_, true} <- {:include_id, opts[:with_direct_conversation_id]},
{_, %User{} = for_user} <- {:for_user, opts[:for]} do
Activity.direct_conversation_id(activity, for_user)
else
{:direct_conversation_id, participation_id} when is_integer(participation_id) ->
participation_id
_e ->
nil
end
emoji_reactions =
object.data
|> Map.get("reactions", [])
|> EmojiReactionController.filter_allowed_users(
opts[:for],
Map.get(opts, :with_muted, false)
)
|> Stream.map(fn {emoji, users, url} ->
build_emoji_map(emoji, users, url, opts[:for])
end)
|> Enum.to_list()
# Status muted state (would do 1 request per status unless user mutes are preloaded)
muted =
thread_muted? ||
UserRelationship.exists?(
get_in(opts, [:relationships, :user_relationships]),
:mute,
opts[:for],
user,
fn for_user, user -> User.mutes?(for_user, user) end
)
{pinned?, pinned_at} = pin_data(object, user)
quote = Activity.get_quoted_activity_from_object(object)
%{
id: to_string(activity.id),
uri: object.data["id"],
url: url,
account:
AccountView.render("show.json", %{
user: user,
for: opts[:for]
}),
in_reply_to_id: reply_to && to_string(reply_to.id),
in_reply_to_account_id: reply_to_user && to_string(reply_to_user.id),
reblog: nil,
card: card,
content: content_html,
text: opts[:with_source] && get_source_text(object.data["source"]),
created_at: created_at,
edited_at: edited_at,
reblogs_count: announcement_count,
replies_count: object.data["repliesCount"] || 0,
favourites_count: like_count,
reblogged: reblogged?(activity, opts[:for]),
favourited: present?(favorited),
bookmarked: present?(bookmarked),
muted: muted,
pinned: pinned?,
sensitive: sensitive,
spoiler_text: summary,
visibility: get_visibility(object),
media_attachments: attachments,
poll: render(PollView, "show.json", object: object, for: opts[:for]),
mentions: mentions,
tags: build_tags(tags),
application: build_application(object.data["generator"]),
language: nil,
emojis: build_emojis(object.data["emoji"]),
quote_id: if(quote, do: quote.id, else: nil),
quote: maybe_render_quote(quote, opts),
emoji_reactions: emoji_reactions,
parent_visible: visible_for_user?(reply_to, opts[:for]),
pinned_at: pinned_at
},
akkoma: %{
source: object.data["source"]
pleroma: %{
local: activity.local,
conversation_id: get_context_id(activity),
context: object.data["context"],
in_reply_to_account_acct: reply_to_user && reply_to_user.nickname,
content: %{"text/plain" => content_plaintext},
spoiler_text: %{"text/plain" => summary},
expires_at: expires_at,
direct_conversation_id: direct_conversation_id,
thread_muted: thread_muted?,
emoji_reactions: emoji_reactions,
parent_visible: visible_for_user?(reply_to, opts[:for]),
pinned_at: pinned_at
},
akkoma: %{
source: object.data["source"]
}
}
}
else
nil -> nil
end
end
def render("show.json", _) do

View file

@ -68,7 +68,7 @@ defmodule Pleroma.Web.Plugs.HTTPSecurityPlug do
]
}
[{"reply-to", Jason.encode!(report_group)} | headers]
[{"report-to", Jason.encode!(report_group)} | headers]
else
headers
end
@ -104,13 +104,12 @@ defmodule Pleroma.Web.Plugs.HTTPSecurityPlug do
{[img_src, " https:"], [media_src, " https:"]}
end
connect_src = ["connect-src 'self' blob: ", static_url, ?\s, websocket_url]
connect_src =
if Config.get(:env) == :dev do
[connect_src, " http://localhost:3035/"]
if Config.get([:media_proxy, :enabled]) do
sources = build_csp_multimedia_source_list()
["connect-src 'self' blob: ", static_url, ?\s, websocket_url, ?\s, sources]
else
connect_src
["connect-src 'self' blob: ", static_url, ?\s, websocket_url]
end
script_src =

View file

@ -35,7 +35,7 @@ defmodule Pleroma.Web.Plugs.UploadedMedia do
conn =
case fetch_query_params(conn) do
%{query_params: %{"name" => name}} = conn ->
name = String.replace(name, "\"", "\\\"")
name = escape_header_value(name)
put_resp_header(conn, "content-disposition", "filename=\"#{name}\"")
@ -98,4 +98,11 @@ defmodule Pleroma.Web.Plugs.UploadedMedia do
|> send_resp(:internal_server_error, dgettext("errors", "Internal Error"))
|> halt()
end
defp escape_header_value(value) do
value
|> String.replace("\"", "\\\"")
|> String.replace("\\r", "")
|> String.replace("\\n", "")
end
end

View file

@ -16,7 +16,7 @@ defmodule Pleroma.Web.Push.Impl do
require Logger
import Ecto.Query
@types ["Create", "Follow", "Announce", "Like", "Move", "EmojiReact"]
@types ["Create", "Follow", "Announce", "Like", "Move", "EmojiReact", "Update"]
@doc "Performs sending notifications for user subscriptions"
@spec perform(Notification.t()) :: list(any) | :error | {:error, :unknown_type}
@ -167,6 +167,15 @@ defmodule Pleroma.Web.Push.Impl do
end
end
def format_body(
%{activity: %{data: %{"type" => "Update"}}},
actor,
_object,
_mastodon_type
) do
"@#{actor.nickname} edited a status"
end
def format_title(activity, mastodon_type \\ nil)
def format_title(%{activity: %{data: %{"directMessage" => true}}}, _mastodon_type) do
@ -180,6 +189,7 @@ defmodule Pleroma.Web.Push.Impl do
"follow_request" -> "New Follow Request"
"reblog" -> "New Repeat"
"favourite" -> "New Favorite"
"update" -> "New Update"
"pleroma:emoji_reaction" -> "New Reaction"
type -> "New #{String.capitalize(type || "event")}"
end

View file

@ -509,6 +509,7 @@ defmodule Pleroma.Web.Router do
post("/accounts/:id/mute", AccountController, :mute)
post("/accounts/:id/unmute", AccountController, :unmute)
post("/accounts/:id/note", AccountController, :note)
post("/accounts/:id/remove_from_followers", AccountController, :remove_from_followers)
get("/conversations", ConversationController, :index)
post("/conversations/:id/read", ConversationController, :mark_as_read)

View file

@ -0,0 +1,18 @@
defmodule Pleroma.Workers.NodeInfoFetcherWorker do
use Pleroma.Workers.WorkerHelper, queue: "nodeinfo_fetcher"
alias Oban.Job
alias Pleroma.Instances.Instance
@impl Oban.Worker
def perform(%Job{
args: %{"op" => "process", "source_url" => domain}
}) do
uri =
domain
|> URI.parse()
|> URI.merge("/")
Instance.update_metadata(uri)
end
end

View file

@ -4,7 +4,7 @@ defmodule Pleroma.Mixfile do
def project do
[
app: :pleroma,
version: version("3.3.1"),
version: version("3.4.0"),
elixir: "~> 1.12",
elixirc_paths: elixirc_paths(Mix.env()),
compilers: [:phoenix, :gettext] ++ Mix.compilers(),
@ -139,7 +139,7 @@ defmodule Pleroma.Mixfile do
{:castore, "~> 0.1"},
{:cowlib, "~> 2.9", override: true},
{:gun, "~> 2.0.0-rc.1", override: true},
{:finch, "~> 0.10.0"},
{:finch, "~> 0.13.0"},
{:jason, "~> 1.2"},
{:mogrify, "~> 0.9.1"},
{:ex_aws, "~> 2.1.6"},

View file

@ -7,7 +7,7 @@
"cachex": {:hex, :cachex, "3.4.0", "868b2959ea4aeb328c6b60ff66c8d5123c083466ad3c33d3d8b5f142e13101fb", [:mix], [{:eternal, "~> 1.2", [hex: :eternal, repo: "hexpm", optional: false]}, {:jumper, "~> 1.0", [hex: :jumper, repo: "hexpm", optional: false]}, {:sleeplocks, "~> 1.1", [hex: :sleeplocks, repo: "hexpm", optional: false]}, {:unsafe, "~> 1.0", [hex: :unsafe, repo: "hexpm", optional: false]}], "hexpm", "370123b1ab4fba4d2965fb18f87fd758325709787c8c5fce35b3fe80645ccbe5"},
"calendar": {:hex, :calendar, "1.0.0", "f52073a708528482ec33d0a171954ca610fe2bd28f1e871f247dc7f1565fa807", [:mix], [{:tzdata, "~> 0.5.20 or ~> 0.1.201603 or ~> 1.0", [hex: :tzdata, repo: "hexpm", optional: false]}], "hexpm", "990e9581920c82912a5ee50e62ff5ef96da6b15949a2ee4734f935fdef0f0a6f"},
"captcha": {:git, "https://git.pleroma.social/pleroma/elixir-libraries/elixir-captcha.git", "e0f16822d578866e186a0974d65ad58cddc1e2ab", [ref: "e0f16822d578866e186a0974d65ad58cddc1e2ab"]},
"castore": {:hex, :castore, "0.1.17", "ba672681de4e51ed8ec1f74ed624d104c0db72742ea1a5e74edbc770c815182f", [:mix], [], "hexpm", "d9844227ed52d26e7519224525cb6868650c272d4a3d327ce3ca5570c12163f9"},
"castore": {:hex, :castore, "0.1.18", "deb5b9ab02400561b6f5708f3e7660fc35ca2d51bfc6a940d2f513f89c2975fc", [:mix], [], "hexpm", "61bbaf6452b782ef80b33cdb45701afbcf0a918a45ebe7e73f1130d661e66a06"},
"certifi": {:hex, :certifi, "2.9.0", "6f2a475689dd47f19fb74334859d460a2dc4e3252a3324bd2111b8f0429e7e21", [:rebar3], [], "hexpm", "266da46bdb06d6c6d35fde799bcb28d36d985d424ad7c08b5bb48f5b5cdd4641"},
"combine": {:hex, :combine, "0.10.0", "eff8224eeb56498a2af13011d142c5e7997a80c8f5b97c499f84c841032e429f", [:mix], [], "hexpm", "1b1dbc1790073076580d0d1d64e42eae2366583e7aecd455d1215b0d16f2451b"},
"comeonin": {:hex, :comeonin, "5.3.3", "2c564dac95a35650e9b6acfe6d2952083d8a08e4a89b93a481acb552b325892e", [:mix], [], "hexpm", "3e38c9c2cb080828116597ca8807bb482618a315bfafd98c90bc22a821cc84df"},
@ -43,14 +43,14 @@
"fast_html": {:hex, :fast_html, "2.0.5", "c61760340606c1077ff1f196f17834056cb1dd3d5cb92a9f2cabf28bc6221c3c", [:make, :mix], [{:elixir_make, "~> 0.4", [hex: :elixir_make, repo: "hexpm", optional: false]}, {:nimble_pool, "~> 0.2.0", [hex: :nimble_pool, repo: "hexpm", optional: false]}], "hexpm", "605f4f4829443c14127694ebabb681778712ceecb4470ec32aa31012330e6506"},
"fast_sanitize": {:hex, :fast_sanitize, "0.2.3", "67b93dfb34e302bef49fec3aaab74951e0f0602fd9fa99085987af05bd91c7a5", [:mix], [{:fast_html, "~> 2.0", [hex: :fast_html, repo: "hexpm", optional: false]}, {:plug, "~> 1.8", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm", "e8ad286d10d0386e15d67d0ee125245ebcfbc7d7290b08712ba9013c8c5e56e2"},
"file_system": {:hex, :file_system, "0.2.10", "fb082005a9cd1711c05b5248710f8826b02d7d1784e7c3451f9c1231d4fc162d", [:mix], [], "hexpm", "41195edbfb562a593726eda3b3e8b103a309b733ad25f3d642ba49696bf715dc"},
"finch": {:hex, :finch, "0.10.2", "9ad27d68270d879f73f26604bb2e573d40f29bf0e907064a9a337f90a16a0312", [:mix], [{:castore, "~> 0.1", [hex: :castore, repo: "hexpm", optional: false]}, {:mint, "~> 1.3", [hex: :mint, repo: "hexpm", optional: false]}, {:nimble_options, "~> 0.4.0", [hex: :nimble_options, repo: "hexpm", optional: false]}, {:nimble_pool, "~> 0.2", [hex: :nimble_pool, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "dd8b11b282072cec2ef30852283949c248bd5d2820c88d8acc89402b81db7550"},
"finch": {:hex, :finch, "0.13.0", "c881e5460ec563bf02d4f4584079e62201db676ed4c0ef3e59189331c4eddf7b", [:mix], [{:castore, "~> 0.1", [hex: :castore, repo: "hexpm", optional: false]}, {:mime, "~> 1.0 or ~> 2.0", [hex: :mime, repo: "hexpm", optional: false]}, {:mint, "~> 1.3", [hex: :mint, repo: "hexpm", optional: false]}, {:nimble_options, "~> 0.4.0", [hex: :nimble_options, repo: "hexpm", optional: false]}, {:nimble_pool, "~> 0.2.6", [hex: :nimble_pool, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "49957dcde10dcdc042a123a507a9c5ec5a803f53646d451db2f7dea696fba6cc"},
"flake_id": {:hex, :flake_id, "0.1.0", "7716b086d2e405d09b647121a166498a0d93d1a623bead243e1f74216079ccb3", [:mix], [{:base62, "~> 1.2", [hex: :base62, repo: "hexpm", optional: false]}, {:ecto, ">= 2.0.0", [hex: :ecto, repo: "hexpm", optional: true]}], "hexpm", "31fc8090fde1acd267c07c36ea7365b8604055f897d3a53dd967658c691bd827"},
"floki": {:hex, :floki, "0.33.1", "f20f1eb471e726342b45ccb68edb9486729e7df94da403936ea94a794f072781", [:mix], [{:html_entities, "~> 0.5.0", [hex: :html_entities, repo: "hexpm", optional: false]}], "hexpm", "461035fd125f13fdf30f243c85a0b1e50afbec876cbf1ceefe6fddd2e6d712c6"},
"gen_smtp": {:hex, :gen_smtp, "0.15.0", "9f51960c17769b26833b50df0b96123605a8024738b62db747fece14eb2fbfcc", [:rebar3], [], "hexpm", "29bd14a88030980849c7ed2447b8db6d6c9278a28b11a44cafe41b791205440f"},
"gettext": {:git, "https://github.com/tusooa/gettext.git", "72fb2496b6c5280ed911bdc3756890e7f38a4808", [ref: "72fb2496b6c5280ed911bdc3756890e7f38a4808"]},
"gun": {:hex, :gun, "2.0.0-rc.2", "7c489a32dedccb77b6e82d1f3c5a7dadfbfa004ec14e322cdb5e579c438632d2", [:make, :rebar3], [{:cowlib, "2.11.0", [hex: :cowlib, repo: "hexpm", optional: false]}], "hexpm", "6b9d1eae146410d727140dbf8b404b9631302ecc2066d1d12f22097ad7d254fc"},
"hackney": {:hex, :hackney, "1.18.1", "f48bf88f521f2a229fc7bae88cf4f85adc9cd9bcf23b5dc8eb6a1788c662c4f6", [:rebar3], [{:certifi, "~>2.9.0", [hex: :certifi, repo: "hexpm", optional: false]}, {:idna, "~>6.1.0", [hex: :idna, repo: "hexpm", optional: false]}, {:metrics, "~>1.0.0", [hex: :metrics, repo: "hexpm", optional: false]}, {:mimerl, "~>1.1", [hex: :mimerl, repo: "hexpm", optional: false]}, {:parse_trans, "3.3.1", [hex: :parse_trans, repo: "hexpm", optional: false]}, {:ssl_verify_fun, "~>1.1.0", [hex: :ssl_verify_fun, repo: "hexpm", optional: false]}, {:unicode_util_compat, "~>0.7.0", [hex: :unicode_util_compat, repo: "hexpm", optional: false]}], "hexpm", "a4ecdaff44297e9b5894ae499e9a070ea1888c84afdd1fd9b7b2bc384950128e"},
"hpax": {:hex, :hpax, "0.1.1", "2396c313683ada39e98c20a75a82911592b47e5c24391363343bde74f82396ca", [:mix], [], "hexpm", "0ae7d5a0b04a8a60caf7a39fcf3ec476f35cc2cc16c05abea730d3ce6ac6c826"},
"hpax": {:hex, :hpax, "0.1.2", "09a75600d9d8bbd064cdd741f21fc06fc1f4cf3d0fcc335e5aa19be1a7235c84", [:mix], [], "hexpm", "2c87843d5a23f5f16748ebe77969880e29809580efdaccd615cd3bed628a8c13"},
"html_entities": {:hex, :html_entities, "0.5.2", "9e47e70598da7de2a9ff6af8758399251db6dbb7eebe2b013f2bbd2515895c3c", [:mix], [], "hexpm", "c53ba390403485615623b9531e97696f076ed415e8d8058b1dbaa28181f4fdcc"},
"http_signatures": {:hex, :http_signatures, "0.1.1", "ca7ebc1b61542b163644c8c3b1f0e0f41037d35f2395940d3c6c7deceab41fd8", [:mix], [], "hexpm", "cc3b8a007322cc7b624c0c15eec49ee58ac977254ff529a3c482f681465942a3"},
"httpoison": {:hex, :httpoison, "1.8.1", "df030d96de89dad2e9983f92b0c506a642d4b1f4a819c96ff77d12796189c63e", [:mix], [{:hackney, "~> 1.17", [hex: :hackney, repo: "hexpm", optional: false]}], "hexpm", "35156a6d678d6d516b9229e208942c405cf21232edd632327ecfaf4fd03e79e0"},

View file

@ -3,16 +3,16 @@ msgstr ""
"Project-Id-Version: PACKAGE VERSION\n"
"Report-Msgid-Bugs-To: \n"
"POT-Creation-Date: 2020-09-09 09:49+0000\n"
"PO-Revision-Date: 2020-09-11 21:26+0000\n"
"Last-Translator: tarteka <info@tarteka.net>\n"
"Language-Team: Spanish <https://translate.pleroma.social/projects/pleroma/"
"pleroma/es/>\n"
"PO-Revision-Date: 2022-08-19 09:25+0000\n"
"Last-Translator: mint <they@mint.lgbt>\n"
"Language-Team: Spanish <http://translate.akkoma.dev/projects/akkoma/"
"akkoma-backend-errors/es/>\n"
"Language: es\n"
"MIME-Version: 1.0\n"
"Content-Type: text/plain; charset=UTF-8\n"
"Content-Transfer-Encoding: 8bit\n"
"Plural-Forms: nplurals=2; plural=n != 1;\n"
"X-Generator: Weblate 4.0.4\n"
"X-Generator: Weblate 4.13.1\n"
## This file is a PO Template file.
##
@ -66,8 +66,8 @@ msgstr[1] "debe tener %{count} caracteres"
msgid "should have %{count} item(s)"
msgid_plural "should have %{count} item(s)"
msgstr[0] ""
msgstr[1] ""
msgstr[0] "debería tener %{count} item"
msgstr[1] "debería tener %{count} items"
msgid "should be at least %{count} character(s)"
msgid_plural "should be at least %{count} character(s)"

View file

@ -0,0 +1,163 @@
msgid ""
msgstr ""
"Project-Id-Version: PACKAGE VERSION\n"
"Report-Msgid-Bugs-To: \n"
"POT-Creation-Date: 2022-08-16 10:49+0000\n"
"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n"
"Last-Translator: Automatically generated\n"
"Language-Team: none\n"
"Language: nl\n"
"MIME-Version: 1.0\n"
"Content-Type: text/plain; charset=UTF-8\n"
"Content-Transfer-Encoding: 8bit\n"
"X-Generator: Translate Toolkit 3.7.1\n"
## This file is a PO Template file.
##
## `msgid`s here are often extracted from source code.
## Add new translations manually only if they're dynamic
## translations that can't be statically extracted.
##
## Run `mix gettext.extract` to bring this file up to
## date. Leave `msgstr`s empty as changing them here as no
## effect: edit them in PO (`.po`) files instead.
msgid "eperm"
msgstr ""
msgid "eacces"
msgstr ""
msgid "eagain"
msgstr ""
msgid "ebadf"
msgstr ""
msgid "ebadmsg"
msgstr ""
msgid "ebusy"
msgstr ""
msgid "edeadlk"
msgstr ""
msgid "edeadlock"
msgstr ""
msgid "edquot"
msgstr ""
msgid "eexist"
msgstr ""
msgid "efault"
msgstr ""
msgid "efbig"
msgstr ""
msgid "eftype"
msgstr ""
msgid "eintr"
msgstr ""
msgid "einval"
msgstr ""
msgid "eio"
msgstr ""
msgid "eisdir"
msgstr ""
msgid "eloop"
msgstr ""
msgid "emfile"
msgstr ""
msgid "emlink"
msgstr ""
msgid "emultihop"
msgstr ""
msgid "enametoolong"
msgstr ""
msgid "enfile"
msgstr ""
msgid "enobufs"
msgstr ""
msgid "enodev"
msgstr ""
msgid "enolck"
msgstr ""
msgid "enolink"
msgstr ""
msgid "enoent"
msgstr ""
msgid "enomem"
msgstr ""
msgid "enospc"
msgstr ""
msgid "enosr"
msgstr ""
msgid "enostr"
msgstr ""
msgid "enosys"
msgstr ""
msgid "enotblk"
msgstr ""
msgid "enotdir"
msgstr ""
msgid "enotsup"
msgstr ""
msgid "enxio"
msgstr ""
msgid "eopnotsupp"
msgstr ""
msgid "eoverflow"
msgstr ""
msgid "epipe"
msgstr ""
msgid "erange"
msgstr ""
msgid "erofs"
msgstr ""
msgid "espipe"
msgstr ""
msgid "esrch"
msgstr ""
msgid "estale"
msgstr ""
msgid "etxtbsy"
msgstr ""
msgid "exdev"
msgstr ""

View file

@ -0,0 +1,17 @@
defmodule Pleroma.Repo.Migrations.AddNodeinfo do
use Ecto.Migration
def up do
alter table(:instances) do
add_if_not_exists(:nodeinfo, :map, default: %{})
add_if_not_exists(:metadata_updated_at, :naive_datetime)
end
end
def down do
alter table(:instances) do
remove_if_exists(:nodeinfo, :map)
remove_if_exists(:metadata_updated_at, :naive_datetime)
end
end
end

View file

@ -0,0 +1,37 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2022 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Repo.Migrations.AddAssociatedObjectIdFunction do
use Ecto.Migration
def up do
statement = """
CREATE OR REPLACE FUNCTION associated_object_id(data jsonb) RETURNS varchar AS $$
DECLARE
object_data jsonb;
BEGIN
IF jsonb_typeof(data->'object') = 'array' THEN
object_data := data->'object'->0;
ELSE
object_data := data->'object';
END IF;
IF jsonb_typeof(object_data->'id') = 'string' THEN
RETURN object_data->>'id';
ELSIF jsonb_typeof(object_data) = 'string' THEN
RETURN object_data#>>'{}';
ELSE
RETURN NULL;
END IF;
END;
$$ LANGUAGE plpgsql IMMUTABLE;
"""
execute(statement)
end
def down do
execute("DROP FUNCTION IF EXISTS associated_object_id(data jsonb)")
end
end

View file

@ -0,0 +1,37 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2022 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Repo.Migrations.SwitchToAssociatedObjectIdIndex do
use Ecto.Migration
@disable_ddl_transaction true
@disable_migration_lock true
def up do
drop_if_exists(
index(:activities, ["(coalesce(data->'object'->>'id', data->>'object'))"],
name: :activities_create_objects_index
)
)
create(
index(:activities, ["associated_object_id(data)"],
name: :activities_create_objects_index,
concurrently: true
)
)
end
def down do
drop_if_exists(
index(:activities, ["associated_object_id(data)"], name: :activities_create_objects_index)
)
create(
index(:activities, ["(coalesce(data->'object'->>'id', data->>'object'))"],
name: :activities_create_objects_index,
concurrently: true
)
)
end
end

View file

@ -0,0 +1,156 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2022 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Repo.Migrations.ChangeThreadVisibilityToUseNewObjectIdIndex do
use Ecto.Migration
def up do
execute(update_thread_visibility())
end
def down do
execute(restore_thread_visibility())
end
def update_thread_visibility do
"""
CREATE OR REPLACE FUNCTION thread_visibility(actor varchar, activity_id varchar, local_public varchar default '') RETURNS boolean AS $$
DECLARE
public varchar := 'https://www.w3.org/ns/activitystreams#Public';
child objects%ROWTYPE;
activity activities%ROWTYPE;
author_fa varchar;
valid_recipients varchar[];
actor_user_following varchar[];
BEGIN
--- Fetch actor following
SELECT array_agg(following.follower_address) INTO actor_user_following FROM following_relationships
JOIN users ON users.id = following_relationships.follower_id
JOIN users AS following ON following.id = following_relationships.following_id
WHERE users.ap_id = actor;
--- Fetch our initial activity.
SELECT * INTO activity FROM activities WHERE activities.data->>'id' = activity_id;
LOOP
--- Ensure that we have an activity before continuing.
--- If we don't, the thread is not satisfiable.
IF activity IS NULL THEN
RETURN false;
END IF;
--- We only care about Create activities.
IF activity.data->>'type' != 'Create' THEN
RETURN true;
END IF;
--- Normalize the child object into child.
SELECT * INTO child FROM objects
INNER JOIN activities ON associated_object_id(activities.data) = objects.data->>'id'
WHERE associated_object_id(activity.data) = objects.data->>'id';
--- Fetch the author's AS2 following collection.
SELECT COALESCE(users.follower_address, '') INTO author_fa FROM users WHERE users.ap_id = activity.actor;
--- Prepare valid recipients array.
valid_recipients := ARRAY[actor, public];
--- If we specified local public, add it.
IF local_public <> '' THEN
valid_recipients := valid_recipients || local_public;
END IF;
IF ARRAY[author_fa] && actor_user_following THEN
valid_recipients := valid_recipients || author_fa;
END IF;
--- Check visibility.
IF NOT valid_recipients && activity.recipients THEN
--- activity not visible, break out of the loop
RETURN false;
END IF;
--- If there's a parent, load it and do this all over again.
IF (child.data->'inReplyTo' IS NOT NULL) AND (child.data->'inReplyTo' != 'null'::jsonb) THEN
SELECT * INTO activity FROM activities
INNER JOIN objects ON associated_object_id(activities.data) = objects.data->>'id'
WHERE child.data->>'inReplyTo' = objects.data->>'id';
ELSE
RETURN true;
END IF;
END LOOP;
END;
$$ LANGUAGE plpgsql IMMUTABLE;
"""
end
# priv/repo/migrations/20220509180452_change_thread_visibility_to_be_local_only_aware.exs
def restore_thread_visibility do
"""
CREATE OR REPLACE FUNCTION thread_visibility(actor varchar, activity_id varchar, local_public varchar default '') RETURNS boolean AS $$
DECLARE
public varchar := 'https://www.w3.org/ns/activitystreams#Public';
child objects%ROWTYPE;
activity activities%ROWTYPE;
author_fa varchar;
valid_recipients varchar[];
actor_user_following varchar[];
BEGIN
--- Fetch actor following
SELECT array_agg(following.follower_address) INTO actor_user_following FROM following_relationships
JOIN users ON users.id = following_relationships.follower_id
JOIN users AS following ON following.id = following_relationships.following_id
WHERE users.ap_id = actor;
--- Fetch our initial activity.
SELECT * INTO activity FROM activities WHERE activities.data->>'id' = activity_id;
LOOP
--- Ensure that we have an activity before continuing.
--- If we don't, the thread is not satisfiable.
IF activity IS NULL THEN
RETURN false;
END IF;
--- We only care about Create activities.
IF activity.data->>'type' != 'Create' THEN
RETURN true;
END IF;
--- Normalize the child object into child.
SELECT * INTO child FROM objects
INNER JOIN activities ON COALESCE(activities.data->'object'->>'id', activities.data->>'object') = objects.data->>'id'
WHERE COALESCE(activity.data->'object'->>'id', activity.data->>'object') = objects.data->>'id';
--- Fetch the author's AS2 following collection.
SELECT COALESCE(users.follower_address, '') INTO author_fa FROM users WHERE users.ap_id = activity.actor;
--- Prepare valid recipients array.
valid_recipients := ARRAY[actor, public];
--- If we specified local public, add it.
IF local_public <> '' THEN
valid_recipients := valid_recipients || local_public;
END IF;
IF ARRAY[author_fa] && actor_user_following THEN
valid_recipients := valid_recipients || author_fa;
END IF;
--- Check visibility.
IF NOT valid_recipients && activity.recipients THEN
--- activity not visible, break out of the loop
RETURN false;
END IF;
--- If there's a parent, load it and do this all over again.
IF (child.data->'inReplyTo' IS NOT NULL) AND (child.data->'inReplyTo' != 'null'::jsonb) THEN
SELECT * INTO activity FROM activities
INNER JOIN objects ON COALESCE(activities.data->'object'->>'id', activities.data->>'object') = objects.data->>'id'
WHERE child.data->>'inReplyTo' = objects.data->>'id';
ELSE
RETURN true;
END IF;
END LOOP;
END;
$$ LANGUAGE plpgsql IMMUTABLE;
"""
end
end

BIN
priv/static/favicon.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 7 KiB

View file

@ -61,6 +61,12 @@ defmodule Restarter.Pleroma do
{:noreply, @init_state}
end
# Don't actually restart during tests.
# We just check if the correct call has been done.
# If we actually restart, we get errors during the tests like
# (RuntimeError) could not lookup Ecto repo Pleroma.Repo because it was not started or
# it does not exist
# See tests in Pleroma.Config.TransferTaskTest
def handle_cast({:restart, :test, _}, state) do
Logger.debug("pleroma manually restarted")
{:noreply, Map.put(state, :need_reboot, false)}
@ -74,6 +80,12 @@ defmodule Restarter.Pleroma do
def handle_cast({:after_boot, _}, %{after_boot: true} = state), do: {:noreply, state}
# Don't actually restart during tests.
# We just check if the correct call has been done.
# If we actually restart, we get errors during the tests like
# (RuntimeError) could not lookup Ecto repo Pleroma.Repo because it was not started or
# it does not exist
# See tests in Pleroma.Config.TransferTaskTest
def handle_cast({:after_boot, :test}, state) do
Logger.debug("pleroma restarted after boot")
state = %{state | after_boot: true, rebooted: true}

View file

@ -13,7 +13,8 @@ defmodule Restarter.MixProject do
def application do
[
mod: {Restarter, []}
mod: {Restarter, []},
extra_applications: [:logger]
]
end

View file

@ -119,44 +119,87 @@ defmodule Pleroma.Config.TransferTaskTest do
describe "pleroma restart" do
setup do
on_exit(fn -> Restarter.Pleroma.refresh() end)
on_exit(fn ->
Restarter.Pleroma.refresh()
# Restarter.Pleroma.refresh/0 is an asynchronous call.
# A GenServer will first finish the previous call before starting a new one.
# Here we do a synchronous call.
# That way we are sure that the previous call has finished before we continue.
# See https://stackoverflow.com/questions/51361856/how-to-use-task-await-with-genserver
Restarter.Pleroma.rebooted?()
end)
end
@tag :erratic
test "don't restart if no reboot time settings were changed" do
clear_config(:emoji)
insert(:config, key: :emoji, value: [groups: [a: 1, b: 2]])
refute String.contains?(
capture_log(fn -> TransferTask.start_link([]) end),
capture_log(fn ->
TransferTask.start_link([])
# TransferTask.start_link/1 is an asynchronous call.
# A GenServer will first finish the previous call before starting a new one.
# Here we do a synchronous call.
# That way we are sure that the previous call has finished before we continue.
Restarter.Pleroma.rebooted?()
end),
"pleroma restarted"
)
end
@tag :erratic
test "on reboot time key" do
clear_config([:pleroma, :rate_limit])
insert(:config, key: {:pleroma, :rate_limit}, value: [enabled: false])
assert capture_log(fn -> TransferTask.start_link([]) end) =~ "pleroma restarted"
clear_config(:rate_limit)
insert(:config, key: :rate_limit, value: [enabled: false])
# Note that we don't actually restart Pleroma.
# See module Restarter.Pleroma
assert capture_log(fn ->
TransferTask.start_link([])
# TransferTask.start_link/1 is an asynchronous call.
# A GenServer will first finish the previous call before starting a new one.
# Here we do a synchronous call.
# That way we are sure that the previous call has finished before we continue.
Restarter.Pleroma.rebooted?()
end) =~ "pleroma restarted"
end
@tag :erratic
test "on reboot time subkey" do
clear_config(Pleroma.Captcha)
insert(:config, key: Pleroma.Captcha, value: [seconds_valid: 60])
assert capture_log(fn -> TransferTask.start_link([]) end) =~ "pleroma restarted"
# Note that we don't actually restart Pleroma.
# See module Restarter.Pleroma
assert capture_log(fn ->
TransferTask.start_link([])
# TransferTask.start_link/1 is an asynchronous call.
# A GenServer will first finish the previous call before starting a new one.
# Here we do a synchronous call.
# That way we are sure that the previous call has finished before we continue.
Restarter.Pleroma.rebooted?()
end) =~ "pleroma restarted"
end
@tag :erratic
test "don't restart pleroma on reboot time key and subkey if there is false flag" do
clear_config([:pleroma, :rate_limit])
clear_config(:rate_limit)
clear_config(Pleroma.Captcha)
insert(:config, key: {:pleroma, :rate_limit}, value: [enabled: false])
insert(:config, key: :rate_limit, value: [enabled: false])
insert(:config, key: Pleroma.Captcha, value: [seconds_valid: 60])
refute String.contains?(
capture_log(fn -> TransferTask.load_and_update_env([], false) end),
capture_log(fn ->
TransferTask.load_and_update_env([], false)
# TransferTask.start_link/1 is an asynchronous call.
# A GenServer will first finish the previous call before starting a new one.
# Here we do a synchronous call.
# That way we are sure that the previous call has finished before we continue.
Restarter.Pleroma.rebooted?()
end),
"pleroma restarted"
)
end

View file

@ -122,11 +122,11 @@ defmodule Pleroma.Conversation.ParticipationTest do
end
test "it marks a participation as read" do
participation = insert(:participation, %{read: false})
participation = insert(:participation, %{updated_at: ~N[2017-07-17 17:09:58], read: false})
{:ok, updated_participation} = Participation.mark_as_read(participation)
assert updated_participation.read
assert updated_participation.updated_at == participation.updated_at
assert :gt = NaiveDateTime.compare(updated_participation.updated_at, participation.updated_at)
end
test "it marks a participation as unread" do

View file

@ -27,7 +27,7 @@ defmodule Pleroma.Emails.UserEmailTest do
token = %Pleroma.UserInviteToken{token: "test-token"}
email = UserEmail.user_invitation_email(user, token, "test@test.com", "Jonh")
assert email.from == {config[:name], config[:notify_email]}
assert email.subject == "Invitation to Pleroma"
assert email.subject == "Invitation to Akkoma"
assert email.to == [{"Jonh", "test@test.com"}]
assert email.html_body =~

View file

@ -9,12 +9,16 @@ defmodule Pleroma.Instances.InstanceTest do
alias Pleroma.Tests.ObanHelpers
alias Pleroma.Web.CommonAPI
use Pleroma.DataCase
use Pleroma.DataCase, async: true
import ExUnit.CaptureLog
import Pleroma.Factory
setup_all do: clear_config([:instance, :federation_reachability_timeout_days], 1)
setup_all do
clear_config([:instance, :federation_reachability_timeout_days], 1)
clear_config([:instances_nodeinfo, :enabled], true)
clear_config([:instances_favicons, :enabled], true)
end
describe "set_reachable/1" do
test "clears `unreachable_since` of existing matching Instance record having non-nil `unreachable_since`" do
@ -102,62 +106,220 @@ defmodule Pleroma.Instances.InstanceTest do
end
end
describe "get_or_update_favicon/1" do
test "Scrapes favicon URLs" do
Tesla.Mock.mock(fn %{url: "https://favicon.example.org/"} ->
%Tesla.Env{
status: 200,
body: ~s[<html><head><link rel="icon" href="/favicon.png"></head></html>]
}
describe "update_metadata/1" do
test "Scrapes favicon URLs and nodeinfo" do
Tesla.Mock.mock(fn
%{url: "https://favicon.example.org/"} ->
%Tesla.Env{
status: 200,
body: ~s[<html><head><link rel="icon" href="/favicon.png"></head></html>]
}
%{url: "https://favicon.example.org/.well-known/nodeinfo"} ->
%Tesla.Env{
status: 200,
body:
Jason.encode!(%{
links: [
%{
rel: "http://nodeinfo.diaspora.software/ns/schema/2.0",
href: "https://favicon.example.org/nodeinfo/2.0"
}
]
})
}
%{url: "https://favicon.example.org/nodeinfo/2.0"} ->
%Tesla.Env{
status: 200,
body: Jason.encode!(%{version: "2.0", software: %{name: "Akkoma"}})
}
end)
assert "https://favicon.example.org/favicon.png" ==
Instance.get_or_update_favicon(URI.parse("https://favicon.example.org/"))
assert {:ok, %Instance{host: "favicon.example.org"}} =
Instance.update_metadata(URI.parse("https://favicon.example.org/"))
{:ok, instance} = Instance.get_cached_by_url("https://favicon.example.org/")
assert instance.favicon == "https://favicon.example.org/favicon.png"
assert instance.nodeinfo == %{"version" => "2.0", "software" => %{"name" => "Akkoma"}}
end
test "Returns nil on too long favicon URLs" do
test "Does not retain favicons that are too long" do
long_favicon_url =
"https://Lorem.ipsum.dolor.sit.amet/consecteturadipiscingelit/Praesentpharetrapurusutaliquamtempus/Mauriseulaoreetarcu/atfacilisisorci/Nullamporttitor/nequesedfeugiatmollis/dolormagnaefficiturlorem/nonpretiumsapienorcieurisus/Nullamveleratsem/Maecenassedaccumsanexnam/favicon.png"
Tesla.Mock.mock(fn %{url: "https://long-favicon.example.org/"} ->
%Tesla.Env{
status: 200,
body:
~s[<html><head><link rel="icon" href="] <> long_favicon_url <> ~s["></head></html>]
}
Tesla.Mock.mock(fn
%{url: "https://long-favicon.example.org/"} ->
%Tesla.Env{
status: 200,
body:
~s[<html><head><link rel="icon" href="] <> long_favicon_url <> ~s["></head></html>]
}
%{url: "https://long-favicon.example.org/.well-known/nodeinfo"} ->
%Tesla.Env{
status: 200,
body:
Jason.encode!(%{
links: [
%{
rel: "http://nodeinfo.diaspora.software/ns/schema/2.0",
href: "https://long-favicon.example.org/nodeinfo/2.0"
}
]
})
}
%{url: "https://long-favicon.example.org/nodeinfo/2.0"} ->
%Tesla.Env{
status: 200,
body: Jason.encode!(%{version: "2.0", software: %{name: "Akkoma"}})
}
end)
assert capture_log(fn ->
assert nil ==
Instance.get_or_update_favicon(
URI.parse("https://long-favicon.example.org/")
)
end) =~
"Instance.get_or_update_favicon(\"long-favicon.example.org\") error: %Postgrex.Error{"
assert {:ok, %Instance{host: "long-favicon.example.org"}} =
Instance.update_metadata(URI.parse("https://long-favicon.example.org/"))
{:ok, instance} = Instance.get_cached_by_url("https://long-favicon.example.org/")
assert instance.favicon == nil
end
test "Handles not getting a favicon URL properly" do
Tesla.Mock.mock(fn %{url: "https://no-favicon.example.org/"} ->
%Tesla.Env{
status: 200,
body: ~s[<html><head><h1>I wil look down and whisper "GNO.."</h1></head></html>]
}
Tesla.Mock.mock(fn
%{url: "https://no-favicon.example.org/"} ->
%Tesla.Env{
status: 200,
body: ~s[<html><head><h1>I wil look down and whisper "GNO.."</h1></head></html>]
}
%{url: "https://no-favicon.example.org/.well-known/nodeinfo"} ->
%Tesla.Env{
status: 200,
body:
Jason.encode!(%{
links: [
%{
rel: "http://nodeinfo.diaspora.software/ns/schema/2.0",
href: "https://no-favicon.example.org/nodeinfo/2.0"
}
]
})
}
%{url: "https://no-favicon.example.org/nodeinfo/2.0"} ->
%Tesla.Env{
status: 200,
body: Jason.encode!(%{version: "2.0", software: %{name: "Akkoma"}})
}
end)
refute capture_log(fn ->
assert nil ==
Instance.get_or_update_favicon(
URI.parse("https://no-favicon.example.org/")
)
end) =~ "Instance.scrape_favicon(\"https://no-favicon.example.org/\") error: "
assert {:ok, %Instance{host: "no-favicon.example.org"}} =
Instance.update_metadata(URI.parse("https://no-favicon.example.org/"))
end) =~ "Instance.update_metadata(\"https://no-favicon.example.org/\") error: "
end
test "Doesn't scrapes unreachable instances" do
test "Doesn't scrape unreachable instances" do
instance = insert(:instance, unreachable_since: Instances.reachability_datetime_threshold())
url = "https://" <> instance.host
assert capture_log(fn -> assert nil == Instance.get_or_update_favicon(URI.parse(url)) end) =~
"Instance.scrape_favicon(\"#{url}\") ignored unreachable host"
assert {:discard, :unreachable} == Instance.update_metadata(URI.parse(url))
end
test "doesn't continue scraping nodeinfo if we can't find a link" do
Tesla.Mock.mock(fn
%{url: "https://bad-nodeinfo.example.org/"} ->
%Tesla.Env{
status: 200,
body: ~s[<html><head><h1>I wil look down and whisper "GNO.."</h1></head></html>]
}
%{url: "https://bad-nodeinfo.example.org/.well-known/nodeinfo"} ->
%Tesla.Env{
status: 200,
body: "oepsie woepsie de nodeinfo is kapotie uwu"
}
end)
assert {:ok, %Instance{host: "bad-nodeinfo.example.org"}} =
Instance.update_metadata(URI.parse("https://bad-nodeinfo.example.org/"))
{:ok, instance} = Instance.get_cached_by_url("https://bad-nodeinfo.example.org/")
assert instance.nodeinfo == nil
end
test "doesn't store bad json in the nodeinfo" do
Tesla.Mock.mock(fn
%{url: "https://bad-nodeinfo.example.org/"} ->
%Tesla.Env{
status: 200,
body: ~s[<html><head><h1>I wil look down and whisper "GNO.."</h1></head></html>]
}
%{url: "https://bad-nodeinfo.example.org/.well-known/nodeinfo"} ->
%Tesla.Env{
status: 200,
body:
Jason.encode!(%{
links: [
%{
rel: "http://nodeinfo.diaspora.software/ns/schema/2.0",
href: "https://bad-nodeinfo.example.org/nodeinfo/2.0"
}
]
})
}
%{url: "https://bad-nodeinfo.example.org/nodeinfo/2.0"} ->
%Tesla.Env{
status: 200,
body: "oepsie woepsie de json might be bad uwu"
}
end)
assert {:ok, %Instance{host: "bad-nodeinfo.example.org"}} =
Instance.update_metadata(URI.parse("https://bad-nodeinfo.example.org/"))
{:ok, instance} = Instance.get_cached_by_url("https://bad-nodeinfo.example.org/")
assert instance.nodeinfo == nil
end
test "doesn't store incredibly long json nodeinfo" do
too_long = String.duplicate("a", 50_000)
Tesla.Mock.mock(fn
%{url: "https://bad-nodeinfo.example.org/"} ->
%Tesla.Env{
status: 200,
body: ~s[<html><head><h1>I wil look down and whisper "GNO.."</h1></head></html>]
}
%{url: "https://bad-nodeinfo.example.org/.well-known/nodeinfo"} ->
%Tesla.Env{
status: 200,
body:
Jason.encode!(%{
links: [
%{
rel: "http://nodeinfo.diaspora.software/ns/schema/2.0",
href: "https://bad-nodeinfo.example.org/nodeinfo/2.0"
}
]
})
}
%{url: "https://bad-nodeinfo.example.org/nodeinfo/2.0"} ->
%Tesla.Env{
status: 200,
body: Jason.encode!(%{version: "2.0", software: %{name: too_long}})
}
end)
assert {:ok, %Instance{host: "bad-nodeinfo.example.org"}} =
Instance.update_metadata(URI.parse("https://bad-nodeinfo.example.org/"))
{:ok, instance} = Instance.get_cached_by_url("https://bad-nodeinfo.example.org/")
assert instance.nodeinfo == nil
end
end

View file

@ -5,8 +5,9 @@
defmodule Pleroma.UserRelationshipTest do
alias Pleroma.UserRelationship
use Pleroma.DataCase, async: true
use Pleroma.DataCase, async: false
import Mock
import Pleroma.Factory
describe "*_exists?/2" do
@ -79,7 +80,12 @@ defmodule Pleroma.UserRelationshipTest do
end
test "if record already exists, returns it", %{users: [user1, user2]} do
user_block = UserRelationship.create_block(user1, user2)
user_block =
with_mock NaiveDateTime, [:passthrough], utc_now: fn -> ~N[2017-03-17 17:09:58] end do
{:ok, %{inserted_at: ~N[2017-03-17 17:09:58]}} =
UserRelationship.create_block(user1, user2)
end
assert user_block == UserRelationship.create_block(user1, user2)
end
end

View file

@ -311,7 +311,7 @@ defmodule Pleroma.UserTest do
describe "unfollow/2" do
setup do: clear_config([:instance, :external_user_synchronization])
test "unfollow with syncronizes external user" do
test "unfollow with synchronizes external user" do
clear_config([:instance, :external_user_synchronization], true)
followed =
@ -444,17 +444,20 @@ defmodule Pleroma.UserTest do
end
setup do:
clear_config(:mrf_simple,
media_removal: [],
media_nsfw: [],
federated_timeline_removal: [],
report_removal: [],
reject: [],
followers_only: [],
accept: [],
avatar_removal: [],
banner_removal: [],
reject_deletes: []
clear_config(
[:mrf_simple],
%{
media_removal: [],
media_nsfw: [],
federated_timeline_removal: [],
report_removal: [],
reject: [],
followers_only: [],
accept: [],
avatar_removal: [],
banner_removal: [],
reject_deletes: []
}
)
setup do:
@ -1324,7 +1327,7 @@ defmodule Pleroma.UserTest do
collateral_user =
insert(:user, %{ap_id: "https://another-awful-and-rude-instance.com/user/bully"})
{:ok, user} = User.block_domain(user, "*.awful-and-rude-instance.com")
{:ok, user} = User.block_domain(user, "awful-and-rude-instance.com")
refute User.blocks?(user, collateral_user)
end
@ -1342,7 +1345,7 @@ defmodule Pleroma.UserTest do
user_domain = insert(:user, %{ap_id: "https://awful-and-rude-instance.com/user/bully"})
{:ok, user} = User.block_domain(user, "*.awful-and-rude-instance.com")
{:ok, user} = User.block_domain(user, "awful-and-rude-instance.com")
assert User.blocks?(user, user_from_subdomain)
assert User.blocks?(user, user_with_two_subdomains)
@ -2260,7 +2263,7 @@ defmodule Pleroma.UserTest do
assert other_user.follower_count == 1
end
test "syncronizes the counters with the remote instance for the followed when enabled" do
test "synchronizes the counters with the remote instance for the followed when enabled" do
clear_config([:instance, :external_user_synchronization], false)
user = insert(:user)
@ -2282,7 +2285,7 @@ defmodule Pleroma.UserTest do
assert other_user.follower_count == 437
end
test "syncronizes the counters with the remote instance for the follower when enabled" do
test "synchronizes the counters with the remote instance for the follower when enabled" do
clear_config([:instance, :external_user_synchronization], false)
user = insert(:user)

View file

@ -1632,7 +1632,7 @@ defmodule Pleroma.Web.ActivityPub.ActivityPubTest do
end
describe "fetch_follow_information_for_user" do
test "syncronizes following/followers counters" do
test "synchronizes following/followers counters" do
user =
insert(:user,
local: false,

View file

@ -9,8 +9,8 @@ defmodule Pleroma.Web.ActivityPub.MRFTest do
test "subdomains_regex/1" do
assert MRF.subdomains_regex(["unsafe.tld", "*.unsafe.tld"]) == [
~r/^unsafe.tld$/i,
~r/^(.*\.)*unsafe.tld$/i
~r/^(.+\.)?unsafe\.tld$/i,
~r/^(.+\.)?unsafe\.tld$/i
]
end
@ -18,7 +18,7 @@ defmodule Pleroma.Web.ActivityPub.MRFTest do
test "common domains" do
regexes = MRF.subdomains_regex(["unsafe.tld", "unsafe2.tld"])
assert regexes == [~r/^unsafe.tld$/i, ~r/^unsafe2.tld$/i]
assert regexes == [~r/^(.+\.)?unsafe\.tld$/i, ~r/^(.+\.)?unsafe2\.tld$/i]
assert MRF.subdomain_match?(regexes, "unsafe.tld")
assert MRF.subdomain_match?(regexes, "unsafe2.tld")
@ -27,9 +27,9 @@ defmodule Pleroma.Web.ActivityPub.MRFTest do
end
test "wildcard domains with one subdomain" do
regexes = MRF.subdomains_regex(["*.unsafe.tld"])
regexes = MRF.subdomains_regex(["unsafe.tld"])
assert regexes == [~r/^(.*\.)*unsafe.tld$/i]
assert regexes == [~r/^(.+\.)?unsafe\.tld$/i]
assert MRF.subdomain_match?(regexes, "unsafe.tld")
assert MRF.subdomain_match?(regexes, "sub.unsafe.tld")
@ -38,9 +38,9 @@ defmodule Pleroma.Web.ActivityPub.MRFTest do
end
test "wildcard domains with two subdomains" do
regexes = MRF.subdomains_regex(["*.unsafe.tld"])
regexes = MRF.subdomains_regex(["unsafe.tld"])
assert regexes == [~r/^(.*\.)*unsafe.tld$/i]
assert regexes == [~r/^(.+\.)?unsafe\.tld$/i]
assert MRF.subdomain_match?(regexes, "unsafe.tld")
assert MRF.subdomain_match?(regexes, "sub.sub.unsafe.tld")
@ -51,7 +51,7 @@ defmodule Pleroma.Web.ActivityPub.MRFTest do
test "matches are case-insensitive" do
regexes = MRF.subdomains_regex(["UnSafe.TLD", "UnSAFE2.Tld"])
assert regexes == [~r/^UnSafe.TLD$/i, ~r/^UnSAFE2.Tld$/i]
assert regexes == [~r/^(.+\.)?UnSafe\.TLD$/i, ~r/^(.+\.)?UnSAFE2\.Tld$/i]
assert MRF.subdomain_match?(regexes, "UNSAFE.TLD")
assert MRF.subdomain_match?(regexes, "UNSAFE2.TLD")

View file

@ -21,6 +21,35 @@ defmodule Pleroma.Web.ActivityPub.SideEffectsTest do
import Mock
import Pleroma.Factory
describe "handle" do
test "it queues a fetch of instance information" do
author = insert(:user, local: false, ap_id: "https://wowee.example.com/users/1")
recipient = insert(:user, local: true)
{:ok, note_data, _meta} =
Builder.note(%Pleroma.Web.CommonAPI.ActivityDraft{
user: author,
to: [recipient.ap_id],
mentions: [recipient],
content_html: "hey",
extra: %{"id" => "https://wowee.example.com/notes/1"}
})
{:ok, create_activity_data, _meta} =
Builder.create(author, note_data["id"], [recipient.ap_id])
{:ok, create_activity, _meta} = ActivityPub.persist(create_activity_data, local: false)
{:ok, _create_activity, _meta} =
SideEffects.handle(create_activity, local: false, object_data: note_data)
assert_enqueued(
worker: Pleroma.Workers.NodeInfoFetcherWorker,
args: %{"op" => "process", "source_url" => "https://wowee.example.com/users/1"}
)
end
end
describe "handle_after_transaction" do
test "it streams out notifications and streams" do
author = insert(:user, local: true)

View file

@ -3,7 +3,7 @@
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Web.AdminAPI.ReportViewTest do
use Pleroma.DataCase, async: true
use Pleroma.DataCase, async: false
import Pleroma.Factory
@ -45,7 +45,7 @@ defmodule Pleroma.Web.AdminAPI.ReportViewTest do
ReportView.render("show.json", Report.extract_report_info(activity))
|> Map.delete(:created_at)
assert result == expected
assert Jason.encode!(result) == Jason.encode!(expected)
end
test "includes reported statuses" do

View file

@ -1921,4 +1921,48 @@ defmodule Pleroma.Web.MastodonAPI.AccountControllerTest do
|> get("/api/v1/accounts/relationships?id=#{other_user.id}")
|> json_response_and_validate_schema(200)
end
describe "remove from followers" do
setup do: oauth_access(["follow"])
test "removing user from followers", %{conn: conn, user: user} do
%{id: other_user_id} = other_user = insert(:user)
CommonAPI.follow(other_user, user)
assert %{"id" => ^other_user_id, "followed_by" => false} =
conn
|> post("/api/v1/accounts/#{other_user_id}/remove_from_followers")
|> json_response_and_validate_schema(200)
refute User.following?(other_user, user)
end
test "removing remote user from followers", %{conn: conn, user: user} do
%{id: other_user_id} = other_user = insert(:user, local: false)
CommonAPI.follow(other_user, user)
assert User.following?(other_user, user)
assert %{"id" => ^other_user_id, "followed_by" => false} =
conn
|> post("/api/v1/accounts/#{other_user_id}/remove_from_followers")
|> json_response_and_validate_schema(200)
refute User.following?(other_user, user)
end
test "removing user from followers errors", %{user: user, conn: conn} do
# self remove
conn_res = post(conn, "/api/v1/accounts/#{user.id}/remove_from_followers")
assert %{"error" => "Can not unfollow yourself"} =
json_response_and_validate_schema(conn_res, 400)
# remove non existing user
conn_res = post(conn, "/api/v1/accounts/doesntexist/remove_from_followers")
assert %{"error" => "Record not found"} = json_response_and_validate_schema(conn_res, 404)
end
end
end

View file

@ -3,9 +3,10 @@
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Web.MastodonAPI.FilterControllerTest do
use Pleroma.Web.ConnCase, async: true
use Pleroma.Web.ConnCase, async: false
use Oban.Testing, repo: Pleroma.Repo
import Mock
import Pleroma.Factory
alias Pleroma.Filter
@ -53,25 +54,20 @@ defmodule Pleroma.Web.MastodonAPI.FilterControllerTest do
in_seconds = 600
response =
conn
|> put_req_header("content-type", "application/json")
|> post("/api/v1/filters", %{
"phrase" => "knights",
context: ["home"],
expires_in: in_seconds
})
|> json_response_and_validate_schema(200)
with_mock NaiveDateTime, [:passthrough], utc_now: fn -> ~N[2017-03-17 17:09:58] end do
conn
|> put_req_header("content-type", "application/json")
|> post("/api/v1/filters", %{
"phrase" => "knights",
context: ["home"],
expires_in: in_seconds
})
|> json_response_and_validate_schema(200)
end
assert response["irreversible"] == false
expires_at =
NaiveDateTime.utc_now()
|> NaiveDateTime.add(in_seconds)
assert NaiveDateTime.diff(
NaiveDateTime.from_iso8601!(response["expires_at"]),
expires_at
) < 5
assert response["expires_at"] == "2017-03-17T17:19:58.000Z"
filter = Filter.get(response["id"], user)
@ -183,26 +179,21 @@ defmodule Pleroma.Web.MastodonAPI.FilterControllerTest do
in_seconds = 600
response =
conn
|> put_req_header("content-type", "application/json")
|> put("/api/v1/filters/#{filter.filter_id}", %{
phrase: "nii",
context: ["public"],
expires_in: in_seconds,
irreversible: true
})
|> json_response_and_validate_schema(200)
with_mock NaiveDateTime, [:passthrough], utc_now: fn -> ~N[2017-03-17 17:09:58] end do
conn
|> put_req_header("content-type", "application/json")
|> put("/api/v1/filters/#{filter.filter_id}", %{
phrase: "nii",
context: ["public"],
expires_in: in_seconds,
irreversible: true
})
|> json_response_and_validate_schema(200)
end
assert response["irreversible"] == true
expected_time =
NaiveDateTime.utc_now()
|> NaiveDateTime.add(in_seconds)
assert NaiveDateTime.diff(
NaiveDateTime.from_iso8601!(response["expires_at"]),
expected_time
) < 5
assert response["expires_at"] == "2017-03-17T17:19:58.000Z"
filter = Filter.get(response["id"], user)

View file

@ -3,7 +3,7 @@
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Web.MastodonAPI.AccountViewTest do
use Pleroma.DataCase
use Pleroma.DataCase, async: false
alias Pleroma.User
alias Pleroma.UserRelationship
@ -12,6 +12,7 @@ defmodule Pleroma.Web.MastodonAPI.AccountViewTest do
import Pleroma.Factory
import Tesla.Mock
import Mock
setup do
mock(fn env -> apply(HttpRequestMock, :request, [env]) end)
@ -25,6 +26,7 @@ defmodule Pleroma.Web.MastodonAPI.AccountViewTest do
user =
insert(:user, %{
ap_id: "https://example.com/users/chikichikibanban",
follower_count: 3,
note_count: 5,
background: background_image,
@ -38,6 +40,8 @@ defmodule Pleroma.Web.MastodonAPI.AccountViewTest do
also_known_as: ["https://shitposter.zone/users/shp"]
})
insert(:instance, %{host: "example.com", nodeinfo: %{version: "2.1"}})
expected = %{
id: to_string(user.id),
username: "shp",
@ -50,6 +54,15 @@ defmodule Pleroma.Web.MastodonAPI.AccountViewTest do
statuses_count: 5,
note: "<span>valid html</span>. a<br/>b<br/>c<br/>d<br/>f &#39;&amp;&lt;&gt;&quot;",
url: user.ap_id,
akkoma: %{
instance: %{
name: "example.com",
nodeinfo: %{
"version" => "2.1"
},
favicon: nil
}
},
avatar: "http://localhost:4001/images/avi.png",
avatar_static: "http://localhost:4001/images/avi.png",
header: "http://localhost:4001/images/banner.png",
@ -98,9 +111,57 @@ defmodule Pleroma.Web.MastodonAPI.AccountViewTest do
assert expected == AccountView.render("show.json", %{user: user, skip_visibility_check: true})
end
describe "nodeinfo" do
setup do
[
user: insert(:user, ap_id: "https://somewhere.example.com/users/chikichikibanban"),
instance:
insert(:instance, %{
host: "somewhere.example.com",
favicon: "https://example.com/favicon.ico"
})
]
end
test "is embedded in the account view", %{user: user} do
assert %{
akkoma: %{
instance: %{
name: "somewhere.example.com",
nodeinfo: %{
"version" => "2.0"
},
favicon: "https://example.com/favicon.ico"
}
}
} = AccountView.render("show.json", %{user: user, skip_visibility_check: true})
end
test "uses local nodeinfo for local users" do
user = insert(:user)
assert %{
akkoma: %{
instance: %{
name: "localhost",
nodeinfo: %{
software: %{
name: "akkoma"
}
}
}
}
} = AccountView.render("show.json", %{user: user, skip_visibility_check: true})
end
end
describe "favicon" do
setup do
[user: insert(:user)]
[
user: insert(:user, ap_id: "https://example.com/users/chikichikibanban"),
instance:
insert(:instance, %{host: "example.com", favicon: "https://example.com/favicon.ico"})
]
end
test "is parsed when :instance_favicons is enabled", %{user: user} do
@ -108,13 +169,14 @@ defmodule Pleroma.Web.MastodonAPI.AccountViewTest do
assert %{
pleroma: %{
favicon:
"https://shitposter.club/plugins/Qvitter/img/gnusocial-favicons/favicon-16x16.png"
favicon: "https://example.com/favicon.ico"
}
} = AccountView.render("show.json", %{user: user, skip_visibility_check: true})
end
test "is nil when :instances_favicons is disabled", %{user: user} do
test "is nil when we have no instance", %{user: user} do
user = %{user | ap_id: "https://wowee.example.com/users/2"}
assert %{pleroma: %{favicon: nil}} =
AccountView.render("show.json", %{user: user, skip_visibility_check: true})
end
@ -176,11 +238,18 @@ defmodule Pleroma.Web.MastodonAPI.AccountViewTest do
},
fqn: "shp@shitposter.club",
last_status_at: nil,
akkoma: %{
instance: %{
name: "localhost",
favicon: "http://localhost:4001/favicon.png",
nodeinfo: %{version: "2.0"}
}
},
pleroma: %{
ap_id: user.ap_id,
also_known_as: [],
background_image: nil,
favicon: nil,
favicon: "http://localhost:4001/favicon.png",
is_confirmed: true,
tags: [],
is_admin: false,
@ -196,7 +265,13 @@ defmodule Pleroma.Web.MastodonAPI.AccountViewTest do
}
}
assert expected == AccountView.render("show.json", %{user: user, skip_visibility_check: true})
with_mock(
Pleroma.Web.Nodeinfo.NodeinfoController,
raw_nodeinfo: fn -> %{version: "2.0"} end
) do
assert expected ==
AccountView.render("show.json", %{user: user, skip_visibility_check: true})
end
end
test "Represent a Funkwhale channel" do
@ -272,6 +347,7 @@ defmodule Pleroma.Web.MastodonAPI.AccountViewTest do
subscribing: false,
notifying: false,
requested: false,
requested_by: false,
domain_blocking: false,
showing_reblogs: true,
endorsed: false,
@ -357,6 +433,24 @@ defmodule Pleroma.Web.MastodonAPI.AccountViewTest do
end
end
test "represent a relationship for a user with an inbound pending follow request" do
follower = insert(:user)
followed = insert(:user, is_locked: true)
{:ok, follower, followed, _} = CommonAPI.follow(follower, followed)
follower = User.get_cached_by_id(follower.id)
followed = User.get_cached_by_id(followed.id)
expected =
Map.merge(
@blank_response,
%{requested_by: true, followed_by: false, id: to_string(follower.id)}
)
test_relationship_rendering(followed, follower, expected)
end
test "returns the settings store if the requesting user is the represented user and it's requested specifically" do
user = insert(:user, pleroma_settings_store: %{fe: "test"})
@ -578,6 +672,8 @@ defmodule Pleroma.Web.MastodonAPI.AccountViewTest do
emoji: %{"joker_smile" => "https://evil.website/society.png"}
)
insert(:instance, %{host: "localhost", favicon: "https://evil.website/favicon.png"})
with media_preview_enabled <- [false, true] do
clear_config([:media_preview_proxy, :enabled], media_preview_enabled)
@ -586,6 +682,9 @@ defmodule Pleroma.Web.MastodonAPI.AccountViewTest do
{key, url} when key in [:avatar, :avatar_static, :header, :header_static] ->
String.starts_with?(url, Pleroma.Web.Endpoint.url())
{:akkoma, %{instance: %{favicon: favicon_url}}} ->
String.starts_with?(favicon_url, Pleroma.Web.Endpoint.url())
{:emojis, emojis} ->
Enum.all?(emojis, fn %{url: url, static_url: static_url} ->
String.starts_with?(url, Pleroma.Web.Endpoint.url()) &&
@ -598,4 +697,10 @@ defmodule Pleroma.Web.MastodonAPI.AccountViewTest do
|> assert()
end
end
test "returns nil in the instance field when no instance is held locally" do
user = insert(:user, ap_id: "https://example.com/users/1")
view = AccountView.render("show.json", %{user: user, skip_visibility_check: true})
assert view[:akkoma][:instance] == nil
end
end

View file

@ -71,7 +71,7 @@ defmodule Pleroma.Web.OAuth.LDAPAuthorizationTest do
equalityMatch: fn _type, _value -> :ok end,
wholeSubtree: fn -> :ok end,
search: fn _connection, _options ->
{:ok, {:eldap_search_result, [{:eldap_entry, '', []}], []}}
{:ok, {:eldap_search_result, [{:eldap_entry, '', []}], [], []}}
end,
close: fn _connection ->
send(self(), :close_connection)

View file

@ -59,9 +59,9 @@ defmodule Pleroma.Web.Plugs.HTTPSecurityPlugTest do
assert csp =~ ~r|report-uri https://endpoint.com;report-to csp-endpoint;|
[reply_to] = Conn.get_resp_header(conn, "reply-to")
[report_to] = Conn.get_resp_header(conn, "report-to")
assert reply_to ==
assert report_to ==
"{\"endpoints\":[{\"url\":\"https://endpoint.com\"}],\"group\":\"csp-endpoint\",\"max-age\":10886400}"
end
@ -100,12 +100,14 @@ defmodule Pleroma.Web.Plugs.HTTPSecurityPlugTest do
url = "https://example.com"
clear_config([:media_proxy, :base_url], url)
assert_media_img_src(conn, url)
assert_connect_src(conn, url)
end
test "upload with base url", %{conn: conn} do
url = "https://example2.com"
clear_config([Pleroma.Upload, :base_url], url)
assert_media_img_src(conn, url)
assert_connect_src(conn, url)
end
test "with S3 public endpoint", %{conn: conn} do
@ -138,6 +140,12 @@ defmodule Pleroma.Web.Plugs.HTTPSecurityPlugTest do
assert csp =~ "img-src 'self' data: blob: #{url};"
end
defp assert_connect_src(conn, url) do
conn = get(conn, "/api/v1/instance")
[csp] = Conn.get_resp_header(conn, "content-security-policy")
assert csp =~ ~r/connect-src 'self' blob: [^;]+ #{url}/
end
test "it does not send CSP headers when disabled", %{conn: conn} do
clear_config([:http_security, :enabled], false)

View file

@ -48,38 +48,42 @@ defmodule Pleroma.Web.Plugs.RateLimiterTest do
refute RateLimiter.disabled?(build_conn())
end
@tag :erratic
test "it restricts based on config values" do
limiter_name = :test_plug_opts
scale = 80
limit = 5
clear_config([Pleroma.Web.Endpoint, :http, :ip], {8, 8, 8, 8})
clear_config([Pleroma.Web.Endpoint, :http, :ip], {127, 0, 0, 1})
clear_config([:rate_limit, limiter_name], {scale, limit})
plug_opts = RateLimiter.init(name: limiter_name)
conn = build_conn(:get, "/")
for i <- 1..5 do
conn = RateLimiter.call(conn, plug_opts)
assert {^i, _} = RateLimiter.inspect_bucket(conn, limiter_name, plug_opts)
Process.sleep(10)
for _ <- 1..5 do
conn_limited = RateLimiter.call(conn, plug_opts)
refute conn_limited.status == Conn.Status.code(:too_many_requests)
refute conn_limited.resp_body
refute conn_limited.halted
end
conn = RateLimiter.call(conn, plug_opts)
assert %{"error" => "Throttled"} = ConnTest.json_response(conn, :too_many_requests)
assert conn.halted
conn_limited = RateLimiter.call(conn, plug_opts)
assert %{"error" => "Throttled"} = ConnTest.json_response(conn_limited, :too_many_requests)
assert conn_limited.halted
Process.sleep(50)
expire_ttl(conn, limiter_name)
conn = build_conn(:get, "/")
for _ <- 1..5 do
conn_limited = RateLimiter.call(conn, plug_opts)
conn = RateLimiter.call(conn, plug_opts)
assert {1, 4} = RateLimiter.inspect_bucket(conn, limiter_name, plug_opts)
refute conn_limited.status == Conn.Status.code(:too_many_requests)
refute conn_limited.resp_body
refute conn_limited.halted
end
refute conn.status == Conn.Status.code(:too_many_requests)
refute conn.resp_body
refute conn.halted
conn_limited = RateLimiter.call(conn, plug_opts)
assert %{"error" => "Throttled"} = ConnTest.json_response(conn_limited, :too_many_requests)
assert conn_limited.halted
end
describe "options" do
@ -263,4 +267,12 @@ defmodule Pleroma.Web.Plugs.RateLimiterTest do
refute {:err, :not_found} == RateLimiter.inspect_bucket(conn, limiter_name, opts)
end
def expire_ttl(%{remote_ip: remote_ip} = _conn, bucket_name_root) do
bucket_name = "anon:#{bucket_name_root}" |> String.to_atom()
key_name = "ip::#{remote_ip |> Tuple.to_list() |> Enum.join(".")}"
{:ok, bucket_value} = Cachex.get(bucket_name, key_name)
Cachex.put(bucket_name, key_name, bucket_value, ttl: -1)
end
end

View file

@ -40,4 +40,15 @@ defmodule Pleroma.Web.Plugs.UploadedMediaPlugTest do
&(&1 == {"content-disposition", "filename=\"\\\"cofe\\\".gif\""})
)
end
test "removes control characters from the Content-Disposition header", %{
attachment_url: attachment_url
} do
conn = get(build_conn(), attachment_url <> "?name=\"cofe\".gif\\r\\n")
assert Enum.any?(
conn.resp_headers,
&(&1 == {"content-disposition", "filename=\"\\\"cofe\\\".gif\""})
)
end
end

View file

@ -200,6 +200,21 @@ defmodule Pleroma.Web.Push.ImplTest do
"New Reaction"
end
test "renders title and body for update activity" do
user = insert(:user)
{:ok, activity} = CommonAPI.post(user, %{status: "lorem ipsum"})
{:ok, activity} = CommonAPI.update(user, activity, %{status: "edited status"})
object = Object.normalize(activity, fetch: false)
assert Impl.format_body(%{activity: activity, type: "update"}, user, object) ==
"@#{user.nickname} edited a status"
assert Impl.format_title(%{activity: activity, type: "update"}) ==
"New Update"
end
test "renders title for create activity with direct visibility" do
user = insert(:user, nickname: "Bob")

View file

@ -3,7 +3,7 @@
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Web.StreamerTest do
use Pleroma.DataCase
use Pleroma.DataCase, async: false
import Pleroma.Factory

View file

@ -36,6 +36,15 @@ defmodule Pleroma.Factory do
}
end
def instance_factory(attrs \\ %{}) do
%Pleroma.Instances.Instance{
host: attrs[:domain] || "example.com",
nodeinfo: %{version: "2.0", openRegistrations: true},
unreachable_since: nil
}
|> Map.merge(attrs)
end
def user_factory(attrs \\ %{}) do
pem = Enum.random(@rsa_keys)
@ -522,13 +531,6 @@ defmodule Pleroma.Factory do
}
end
def instance_factory do
%Pleroma.Instances.Instance{
host: "domain.com",
unreachable_since: nil
}
end
def oauth_token_factory(attrs \\ %{}) do
scopes = Map.get(attrs, :scopes, ["read"])
oauth_app = Map.get_lazy(attrs, :app, fn -> insert(:oauth_app, scopes: scopes) end)