Compare commits
4 commits
develop
...
translatio
Author | SHA1 | Date | |
---|---|---|---|
e877560fef | |||
bc82a70436 | |||
ed866fe09b | |||
4256e5eb13 |
1
.buildpacks
Normal file
1
.buildpacks
Normal file
|
@ -0,0 +1 @@
|
||||||
|
https://github.com/hashnuke/heroku-buildpack-elixir
|
|
@ -1,4 +1,3 @@
|
||||||
labels:
|
|
||||||
platform: linux/amd64
|
platform: linux/amd64
|
||||||
|
|
||||||
depends_on:
|
depends_on:
|
||||||
|
@ -35,7 +34,7 @@ variables:
|
||||||
- &clean "(rm -rf release || true) && (rm -rf _build || true) && (rm -rf /root/.mix)"
|
- &clean "(rm -rf release || true) && (rm -rf _build || true) && (rm -rf /root/.mix)"
|
||||||
- &mix-clean "mix deps.clean --all && mix clean"
|
- &mix-clean "mix deps.clean --all && mix clean"
|
||||||
|
|
||||||
steps:
|
pipeline:
|
||||||
# Canonical amd64
|
# Canonical amd64
|
||||||
debian-bookworm:
|
debian-bookworm:
|
||||||
image: hexpm/elixir:1.15.4-erlang-26.0.2-debian-bookworm-20230612
|
image: hexpm/elixir:1.15.4-erlang-26.0.2-debian-bookworm-20230612
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
labels:
|
platform: linux/arm64
|
||||||
platform: linux/aarch64
|
|
||||||
|
|
||||||
depends_on:
|
depends_on:
|
||||||
- test
|
- test
|
||||||
|
@ -35,7 +34,7 @@ variables:
|
||||||
- &clean "(rm -rf release || true) && (rm -rf _build || true) && (rm -rf /root/.mix)"
|
- &clean "(rm -rf release || true) && (rm -rf _build || true) && (rm -rf /root/.mix)"
|
||||||
- &mix-clean "mix deps.clean --all && mix clean"
|
- &mix-clean "mix deps.clean --all && mix clean"
|
||||||
|
|
||||||
steps:
|
pipeline:
|
||||||
# Canonical arm64
|
# Canonical arm64
|
||||||
debian-bookworm:
|
debian-bookworm:
|
||||||
image: hexpm/elixir:1.15.4-erlang-26.0.2-debian-bookworm-20230612
|
image: hexpm/elixir:1.15.4-erlang-26.0.2-debian-bookworm-20230612
|
||||||
|
|
|
@ -1,4 +1,3 @@
|
||||||
labels:
|
|
||||||
platform: linux/amd64
|
platform: linux/amd64
|
||||||
|
|
||||||
depends_on:
|
depends_on:
|
||||||
|
@ -46,7 +45,7 @@ variables:
|
||||||
- &clean "(rm -rf release || true) && (rm -rf _build || true) && (rm -rf /root/.mix)"
|
- &clean "(rm -rf release || true) && (rm -rf _build || true) && (rm -rf /root/.mix)"
|
||||||
- &mix-clean "mix deps.clean --all && mix clean"
|
- &mix-clean "mix deps.clean --all && mix clean"
|
||||||
|
|
||||||
steps:
|
pipeline:
|
||||||
docs:
|
docs:
|
||||||
<<: *on-point-release
|
<<: *on-point-release
|
||||||
secrets:
|
secrets:
|
||||||
|
|
|
@ -1,4 +1,3 @@
|
||||||
labels:
|
|
||||||
platform: linux/amd64
|
platform: linux/amd64
|
||||||
|
|
||||||
variables:
|
variables:
|
||||||
|
@ -42,9 +41,9 @@ variables:
|
||||||
- &clean "(rm -rf release || true) && (rm -rf _build || true) && (rm -rf /root/.mix)"
|
- &clean "(rm -rf release || true) && (rm -rf _build || true) && (rm -rf /root/.mix)"
|
||||||
- &mix-clean "mix deps.clean --all && mix clean"
|
- &mix-clean "mix deps.clean --all && mix clean"
|
||||||
|
|
||||||
steps:
|
pipeline:
|
||||||
lint:
|
lint:
|
||||||
image: akkoma/ci-base:1.16-otp26
|
image: akkoma/ci-base:1.15-otp26
|
||||||
<<: *on-pr-open
|
<<: *on-pr-open
|
||||||
environment:
|
environment:
|
||||||
MIX_ENV: test
|
MIX_ENV: test
|
||||||
|
|
|
@ -1,4 +1,3 @@
|
||||||
labels:
|
|
||||||
platform: linux/amd64
|
platform: linux/amd64
|
||||||
|
|
||||||
depends_on:
|
depends_on:
|
||||||
|
@ -13,6 +12,12 @@ matrix:
|
||||||
- 25
|
- 25
|
||||||
- 26
|
- 26
|
||||||
include:
|
include:
|
||||||
|
- ELIXIR_VERSION: 1.14
|
||||||
|
OTP_VERSION: 25
|
||||||
|
- ELIXIR_VERSION: 1.15
|
||||||
|
OTP_VERSION: 25
|
||||||
|
- ELIXIR_VERSION: 1.15
|
||||||
|
OTP_VERSION: 26
|
||||||
- ELIXIR_VERSION: 1.16
|
- ELIXIR_VERSION: 1.16
|
||||||
OTP_VERSION: 26
|
OTP_VERSION: 26
|
||||||
|
|
||||||
|
@ -68,7 +73,7 @@ services:
|
||||||
POSTGRES_USER: postgres
|
POSTGRES_USER: postgres
|
||||||
POSTGRES_PASSWORD: postgres
|
POSTGRES_PASSWORD: postgres
|
||||||
|
|
||||||
steps:
|
pipeline:
|
||||||
test:
|
test:
|
||||||
image: akkoma/ci-base:${ELIXIR_VERSION}-otp${OTP_VERSION}
|
image: akkoma/ci-base:${ELIXIR_VERSION}-otp${OTP_VERSION}
|
||||||
<<: *on-pr-open
|
<<: *on-pr-open
|
||||||
|
|
17
CHANGELOG.md
17
CHANGELOG.md
|
@ -4,21 +4,7 @@ All notable changes to this project will be documented in this file.
|
||||||
|
|
||||||
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
|
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
|
||||||
|
|
||||||
## 2024.04.1 (Security)
|
## Unreleased
|
||||||
|
|
||||||
## Fixed
|
|
||||||
- Issue allowing non-owners to use media objects in posts
|
|
||||||
- Issue allowing use of non-media objects as attachments and crashing timeline rendering
|
|
||||||
- Issue allowing webfinger spoofing in certain situations
|
|
||||||
|
|
||||||
## Added
|
|
||||||
- Implement [FEP-67ff](https://codeberg.org/fediverse/fep/src/branch/main/fep/67ff/fep-67ff.md) (federation documentation)
|
|
||||||
|
|
||||||
## Added
|
|
||||||
- Meilisearch: it is now possible to use separate keys for search and admin actions
|
|
||||||
|
|
||||||
## Fixed
|
|
||||||
- Meilisearch: order of results returned from our REST API now actually matches how Meilisearch ranks results
|
|
||||||
|
|
||||||
## 2024.04
|
## 2024.04
|
||||||
|
|
||||||
|
@ -51,7 +37,6 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
|
||||||
- Issue leading to Mastodon bot accounts being rejected
|
- Issue leading to Mastodon bot accounts being rejected
|
||||||
- Scope misdetection of remote posts resulting from not recognising
|
- Scope misdetection of remote posts resulting from not recognising
|
||||||
JSON-LD-compacted forms of public scope; affected e.g. federation with bovine
|
JSON-LD-compacted forms of public scope; affected e.g. federation with bovine
|
||||||
- Ratelimits encountered when fetching objects are now respected; 429 responses will cause a backoff when we get one.
|
|
||||||
|
|
||||||
## Removed
|
## Removed
|
||||||
- ActivityPub Client-To-Server write API endpoints have been disabled;
|
- ActivityPub Client-To-Server write API endpoints have been disabled;
|
||||||
|
|
|
@ -1,42 +0,0 @@
|
||||||
# Federation
|
|
||||||
|
|
||||||
## Supported federation protocols and standards
|
|
||||||
|
|
||||||
- [ActivityPub](https://www.w3.org/TR/activitypub/) (Server-to-Server)
|
|
||||||
- [WebFinger](https://webfinger.net/)
|
|
||||||
- [Http Signatures](https://datatracker.ietf.org/doc/html/draft-cavage-http-signatures)
|
|
||||||
- [NodeInfo](https://nodeinfo.diaspora.software/)
|
|
||||||
|
|
||||||
## Supported FEPs
|
|
||||||
|
|
||||||
- [FEP-67ff: FEDERATION](https://codeberg.org/fediverse/fep/src/branch/main/fep/67ff/fep-67ff.md)
|
|
||||||
- [FEP-f1d5: NodeInfo in Fediverse Software](https://codeberg.org/fediverse/fep/src/branch/main/fep/f1d5/fep-f1d5.md)
|
|
||||||
- [FEP-fffd: Proxy Objects](https://codeberg.org/fediverse/fep/src/branch/main/fep/fffd/fep-fffd.md)
|
|
||||||
|
|
||||||
## ActivityPub
|
|
||||||
|
|
||||||
Akkoma mostly follows the server-to-server parts of the ActivityPub standard,
|
|
||||||
but implements quirks for Mastodon compatibility as well as Mastodon-specific
|
|
||||||
and custom extensions.
|
|
||||||
|
|
||||||
See our documentation and Mastodon’s federation information
|
|
||||||
linked further below for details on these quirks and extensions.
|
|
||||||
|
|
||||||
Akkoma does not perform JSON-LD processing.
|
|
||||||
|
|
||||||
### Required extensions
|
|
||||||
|
|
||||||
#### HTTP Signatures
|
|
||||||
All AP S2S POST requests to Akkoma instances MUST be signed.
|
|
||||||
Depending on instance configuration the same may be true for GET requests.
|
|
||||||
|
|
||||||
## Nodeinfo
|
|
||||||
|
|
||||||
Akkoma provides many additional entries in its nodeinfo response,
|
|
||||||
see the documentation linked below for details.
|
|
||||||
|
|
||||||
## Additional documentation
|
|
||||||
|
|
||||||
- [Akkoma’s ActivityPub extensions](https://docs.akkoma.dev/develop/development/ap_extensions/)
|
|
||||||
- [Akkoma’s nodeinfo extensions](https://docs.akkoma.dev/develop/development/nodeinfo_extensions/)
|
|
||||||
- [Mastodon’s federation requirements](https://github.com/mastodon/mastodon/blob/main/FEDERATION.md)
|
|
25
config/dokku.exs
Normal file
25
config/dokku.exs
Normal file
|
@ -0,0 +1,25 @@
|
||||||
|
import Config
|
||||||
|
|
||||||
|
config :pleroma, Pleroma.Web.Endpoint,
|
||||||
|
http: [
|
||||||
|
port: String.to_integer(System.get_env("PORT") || "4000"),
|
||||||
|
protocol_options: [max_request_line_length: 8192, max_header_value_length: 8192]
|
||||||
|
],
|
||||||
|
protocol: "http",
|
||||||
|
secure_cookie_flag: false,
|
||||||
|
url: [host: System.get_env("APP_HOST"), scheme: "https", port: 443],
|
||||||
|
secret_key_base: "+S+ULgf7+N37c/lc9K66SMphnjQIRGklTu0BRr2vLm2ZzvK0Z6OH/PE77wlUNtvP"
|
||||||
|
|
||||||
|
database_url =
|
||||||
|
System.get_env("DATABASE_URL") ||
|
||||||
|
raise """
|
||||||
|
environment variable DATABASE_URL is missing.
|
||||||
|
For example: ecto://USER:PASS@HOST/DATABASE
|
||||||
|
"""
|
||||||
|
|
||||||
|
config :pleroma, Pleroma.Repo,
|
||||||
|
# ssl: true,
|
||||||
|
url: database_url,
|
||||||
|
pool_size: String.to_integer(System.get_env("POOL_SIZE") || "10")
|
||||||
|
|
||||||
|
config :pleroma, :instance, name: "#{System.get_env("APP_NAME")} CI Instance"
|
|
@ -1,15 +1,12 @@
|
||||||
# Akkoma Clients
|
# Akkoma Clients
|
||||||
This is a list of clients that are known to work with Akkoma.
|
Note: Additional clients may work, but these are known to work with Akkoma.
|
||||||
|
Apps listed here might not support all of Akkoma's features.
|
||||||
!!! warning
|
|
||||||
**Clients listed here are not officially supported by the Akkoma project.**
|
|
||||||
Some Akkoma features may be unsupported by these clients.
|
|
||||||
|
|
||||||
## Multiplatform
|
## Multiplatform
|
||||||
### Kaiteki
|
### Kaiteki
|
||||||
- Homepage: <https://kaiteki.app/>
|
- Homepage: <https://kaiteki.app/>
|
||||||
- Source Code: <https://github.com/Kaiteki-Fedi/Kaiteki>
|
- Source Code: <https://github.com/Kaiteki-Fedi/Kaiteki>
|
||||||
- Contact: [@kaiteki@social.kaiteki.app](https://social.kaiteki.app/@kaiteki)
|
- Contact: [@kaiteki@fedi.software](https://fedi.software/@Kaiteki)
|
||||||
- Platforms: Web, Windows, Linux, Android
|
- Platforms: Web, Windows, Linux, Android
|
||||||
- Features: MastoAPI, Supports multiple backends
|
- Features: MastoAPI, Supports multiple backends
|
||||||
|
|
||||||
|
@ -41,6 +38,12 @@ This is a list of clients that are known to work with Akkoma.
|
||||||
- Platforms: Android
|
- Platforms: Android
|
||||||
- Features: MastoAPI, No Streaming, Emoji Reactions, Text Formatting, FE Stickers
|
- Features: MastoAPI, No Streaming, Emoji Reactions, Text Formatting, FE Stickers
|
||||||
|
|
||||||
|
### Fedi
|
||||||
|
- Homepage: <https://www.fediapp.com/>
|
||||||
|
- Source Code: Proprietary, but gratis
|
||||||
|
- Platforms: iOS, Android
|
||||||
|
- Features: MastoAPI, Pleroma-specific features like Reactions
|
||||||
|
|
||||||
### Tusky
|
### Tusky
|
||||||
- Homepage: <https://tuskyapp.github.io/>
|
- Homepage: <https://tuskyapp.github.io/>
|
||||||
- Source Code: <https://github.com/tuskyapp/Tusky>
|
- Source Code: <https://github.com/tuskyapp/Tusky>
|
||||||
|
@ -48,18 +51,12 @@ This is a list of clients that are known to work with Akkoma.
|
||||||
- Platforms: Android
|
- Platforms: Android
|
||||||
- Features: MastoAPI, No Streaming
|
- Features: MastoAPI, No Streaming
|
||||||
|
|
||||||
### Subway Tooter
|
|
||||||
- Source Code: <https://github.com/tateisu/SubwayTooter/>
|
|
||||||
- Contact: [@SubwayTooter@mastodon.juggler.jp](https://mastodon.juggler.jp/@SubwayTooter)
|
|
||||||
- Platforms: Android
|
|
||||||
- Features: MastoAPI, Editing, Emoji Reactions (including custom emoji)
|
|
||||||
|
|
||||||
## Alternative Web Interfaces
|
## Alternative Web Interfaces
|
||||||
### Enafore
|
### Pinafore
|
||||||
- An actively developed fork of Pinafore with improved Akkoma support
|
- Note: Pinafore is unmaintained (See [the author's original article](https://nolanlawson.com/2023/01/09/retiring-pinafore/) for details)
|
||||||
- Homepage: <https://enafore.social/>
|
- Homepage: <https://pinafore.social/>
|
||||||
- Source Code: <https://github.com/enafore/enafore>
|
- Source Code: <https://github.com/nolanlawson/pinafore>
|
||||||
- Contact: [@enfore@enafore.social](https://meta.enafore.social/@enafore)
|
- Contact: [@pinafore@mastodon.technology](https://mastodon.technology/users/pinafore)
|
||||||
- Features: MastoAPI, No Streaming
|
- Features: MastoAPI, No Streaming
|
||||||
|
|
||||||
### Sengi
|
### Sengi
|
||||||
|
|
|
@ -63,8 +63,6 @@ To add configuration to your config file, you can copy it from the base config.
|
||||||
* `local_bubble`: Array of domains representing instances closely related to yours. Used to populate the `bubble` timeline. e.g `["example.com"]`, (default: `[]`)
|
* `local_bubble`: Array of domains representing instances closely related to yours. Used to populate the `bubble` timeline. e.g `["example.com"]`, (default: `[]`)
|
||||||
* `languages`: List of Language Codes used by the instance. This is used to try and set a default language from the frontend. It will try and find the first match between the languages set here and the user's browser languages. It will default to the first language in this setting if there is no match.. (default `["en"]`)
|
* `languages`: List of Language Codes used by the instance. This is used to try and set a default language from the frontend. It will try and find the first match between the languages set here and the user's browser languages. It will default to the first language in this setting if there is no match.. (default `["en"]`)
|
||||||
* `export_prometheus_metrics`: Enable prometheus metrics, served at `/api/v1/akkoma/metrics`, requiring the `admin:metrics` oauth scope.
|
* `export_prometheus_metrics`: Enable prometheus metrics, served at `/api/v1/akkoma/metrics`, requiring the `admin:metrics` oauth scope.
|
||||||
* `privileged_staff`: Set to `true` to give moderators access to a few higher responsibility actions.
|
|
||||||
* `federated_timeline_available`: Set to `false` to remove access to the federated timeline for all users.
|
|
||||||
|
|
||||||
## :database
|
## :database
|
||||||
* `improved_hashtag_timeline`: Setting to force toggle / force disable improved hashtags timeline. `:enabled` forces hashtags to be fetched from `hashtags` table for hashtags timeline. `:disabled` forces object-embedded hashtags to be used (slower). Keep it `:auto` for automatic behaviour (it is auto-set to `:enabled` [unless overridden] when HashtagsTableMigrator completes).
|
* `improved_hashtag_timeline`: Setting to force toggle / force disable improved hashtags timeline. `:enabled` forces hashtags to be fetched from `hashtags` table for hashtags timeline. `:disabled` forces object-embedded hashtags to be used (slower). Keep it `:auto` for automatic behaviour (it is auto-set to `:enabled` [unless overridden] when HashtagsTableMigrator completes).
|
||||||
|
|
|
@ -6,17 +6,37 @@ With the `mediaproxy` function you can use nginx to cache this content, so users
|
||||||
|
|
||||||
## Activate it
|
## Activate it
|
||||||
|
|
||||||
|
* Edit your nginx config and add the following location to your main server block:
|
||||||
|
```
|
||||||
|
location /proxy {
|
||||||
|
return 404;
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
* Set up a subdomain for the proxy with its nginx config on the same machine
|
* Set up a subdomain for the proxy with its nginx config on the same machine
|
||||||
* Edit the nginx config for the upload/MediaProxy subdomain to point to the subdomain that has been set up
|
*(the latter is not strictly required, but for simplicity we’ll assume so)*
|
||||||
|
* In this subdomain’s server block add
|
||||||
|
```
|
||||||
|
location /proxy {
|
||||||
|
proxy_cache akkoma_media_cache;
|
||||||
|
proxy_cache_lock on;
|
||||||
|
proxy_pass http://localhost:4000;
|
||||||
|
}
|
||||||
|
```
|
||||||
|
Also add the following on top of the configuration, outside of the `server` block:
|
||||||
|
```
|
||||||
|
proxy_cache_path /tmp/akkoma-media-cache levels=1:2 keys_zone=akkoma_media_cache:10m max_size=10g inactive=720m use_temp_path=off;
|
||||||
|
```
|
||||||
|
If you came here from one of the installation guides, take a look at the example configuration `/installation/nginx/akkoma.nginx`, where this part is already included.
|
||||||
|
|
||||||
* Append the following to your `prod.secret.exs` or `dev.secret.exs` (depends on which mode your instance is running):
|
* Append the following to your `prod.secret.exs` or `dev.secret.exs` (depends on which mode your instance is running):
|
||||||
```elixir
|
```
|
||||||
# Replace media.example.td with the subdomain you set up earlier
|
|
||||||
config :pleroma, :media_proxy,
|
config :pleroma, :media_proxy,
|
||||||
enabled: true,
|
enabled: true,
|
||||||
proxy_opts: [
|
proxy_opts: [
|
||||||
redirect_on_failure: true
|
redirect_on_failure: true
|
||||||
],
|
],
|
||||||
base_url: "https://media.example.tld"
|
base_url: "https://cache.akkoma.social"
|
||||||
```
|
```
|
||||||
You **really** should use a subdomain to serve proxied files; while we will fix bugs resulting from this, serving arbitrary remote content on your main domain namespace is a significant attack surface.
|
You **really** should use a subdomain to serve proxied files; while we will fix bugs resulting from this, serving arbitrary remote content on your main domain namespace is a significant attack surface.
|
||||||
|
|
||||||
|
|
|
@ -130,26 +130,59 @@ config :pleroma, :http_security,
|
||||||
enabled: false
|
enabled: false
|
||||||
```
|
```
|
||||||
|
|
||||||
In the Nginx config, add the following into the `location /` block:
|
Use this as the Nginx config:
|
||||||
```nginx
|
```
|
||||||
|
proxy_cache_path /tmp/akkoma-media-cache levels=1:2 keys_zone=akkoma_media_cache:10m max_size=10g inactive=720m use_temp_path=off;
|
||||||
|
# The above already exists in a clearnet instance's config.
|
||||||
|
# If not, add it.
|
||||||
|
|
||||||
|
server {
|
||||||
|
listen 127.0.0.1:14447;
|
||||||
|
server_name youri2paddress;
|
||||||
|
|
||||||
|
# Comment to enable logs
|
||||||
|
access_log /dev/null;
|
||||||
|
error_log /dev/null;
|
||||||
|
|
||||||
|
gzip_vary on;
|
||||||
|
gzip_proxied any;
|
||||||
|
gzip_comp_level 6;
|
||||||
|
gzip_buffers 16 8k;
|
||||||
|
gzip_http_version 1.1;
|
||||||
|
gzip_types text/plain text/css application/json application/javascript text/xml application/xml application/xml+rss text/javascript application/activity+json application/atom+xml;
|
||||||
|
|
||||||
|
client_max_body_size 16m;
|
||||||
|
|
||||||
|
location / {
|
||||||
|
|
||||||
add_header X-XSS-Protection "0";
|
add_header X-XSS-Protection "0";
|
||||||
add_header X-Permitted-Cross-Domain-Policies none;
|
add_header X-Permitted-Cross-Domain-Policies none;
|
||||||
add_header X-Frame-Options DENY;
|
add_header X-Frame-Options DENY;
|
||||||
add_header X-Content-Type-Options nosniff;
|
add_header X-Content-Type-Options nosniff;
|
||||||
add_header Referrer-Policy same-origin;
|
add_header Referrer-Policy same-origin;
|
||||||
```
|
|
||||||
|
|
||||||
Change the `listen` directive to the following:
|
proxy_http_version 1.1;
|
||||||
```nginx
|
proxy_set_header Upgrade $http_upgrade;
|
||||||
listen 127.0.0.1:14447;
|
proxy_set_header Connection "upgrade";
|
||||||
```
|
proxy_set_header Host $http_host;
|
||||||
|
|
||||||
Set `server_name` to your i2p address.
|
proxy_pass http://localhost:4000;
|
||||||
|
|
||||||
Reload Nginx:
|
client_max_body_size 16m;
|
||||||
|
}
|
||||||
|
|
||||||
|
location /proxy {
|
||||||
|
proxy_cache akkoma_media_cache;
|
||||||
|
proxy_cache_lock on;
|
||||||
|
proxy_ignore_client_abort on;
|
||||||
|
proxy_pass http://localhost:4000;
|
||||||
|
}
|
||||||
|
}
|
||||||
```
|
```
|
||||||
systemctl restart i2pd.service --no-block
|
reload Nginx:
|
||||||
systemctl reload nginx.service
|
```
|
||||||
|
systemctl stop i2pd.service --no-block
|
||||||
|
systemctl start i2pd.service
|
||||||
```
|
```
|
||||||
*Notice:* The stop command initiates a graceful shutdown process, i2pd stops after finishing to route transit tunnels (maximum 10 minutes).
|
*Notice:* The stop command initiates a graceful shutdown process, i2pd stops after finishing to route transit tunnels (maximum 10 minutes).
|
||||||
|
|
||||||
|
|
|
@ -74,23 +74,56 @@ config :pleroma, :http_security,
|
||||||
enabled: false
|
enabled: false
|
||||||
```
|
```
|
||||||
|
|
||||||
In the Nginx config, add the following into the `location /` block:
|
Use this as the Nginx config:
|
||||||
```nginx
|
```
|
||||||
|
proxy_cache_path /tmp/akkoma-media-cache levels=1:2 keys_zone=akkoma_media_cache:10m max_size=10g inactive=720m use_temp_path=off;
|
||||||
|
# The above already exists in a clearnet instance's config.
|
||||||
|
# If not, add it.
|
||||||
|
|
||||||
|
server {
|
||||||
|
listen 127.0.0.1:8099;
|
||||||
|
server_name youronionaddress;
|
||||||
|
|
||||||
|
# Comment to enable logs
|
||||||
|
access_log /dev/null;
|
||||||
|
error_log /dev/null;
|
||||||
|
|
||||||
|
gzip_vary on;
|
||||||
|
gzip_proxied any;
|
||||||
|
gzip_comp_level 6;
|
||||||
|
gzip_buffers 16 8k;
|
||||||
|
gzip_http_version 1.1;
|
||||||
|
gzip_types text/plain text/css application/json application/javascript text/xml application/xml application/xml+rss text/javascript application/activity+json application/atom+xml;
|
||||||
|
|
||||||
|
client_max_body_size 16m;
|
||||||
|
|
||||||
|
location / {
|
||||||
|
|
||||||
add_header X-XSS-Protection "0";
|
add_header X-XSS-Protection "0";
|
||||||
add_header X-Permitted-Cross-Domain-Policies none;
|
add_header X-Permitted-Cross-Domain-Policies none;
|
||||||
add_header X-Frame-Options DENY;
|
add_header X-Frame-Options DENY;
|
||||||
add_header X-Content-Type-Options nosniff;
|
add_header X-Content-Type-Options nosniff;
|
||||||
add_header Referrer-Policy same-origin;
|
add_header Referrer-Policy same-origin;
|
||||||
|
|
||||||
|
proxy_http_version 1.1;
|
||||||
|
proxy_set_header Upgrade $http_upgrade;
|
||||||
|
proxy_set_header Connection "upgrade";
|
||||||
|
proxy_set_header Host $http_host;
|
||||||
|
|
||||||
|
proxy_pass http://localhost:4000;
|
||||||
|
|
||||||
|
client_max_body_size 16m;
|
||||||
|
}
|
||||||
|
|
||||||
|
location /proxy {
|
||||||
|
proxy_cache akkoma_media_cache;
|
||||||
|
proxy_cache_lock on;
|
||||||
|
proxy_ignore_client_abort on;
|
||||||
|
proxy_pass http://localhost:4000;
|
||||||
|
}
|
||||||
|
}
|
||||||
```
|
```
|
||||||
|
reload Nginx:
|
||||||
Change the `listen` directive to the following:
|
|
||||||
```nginx
|
|
||||||
listen 127.0.0.1:8099;
|
|
||||||
```
|
|
||||||
|
|
||||||
Set the `server_name` to your onion address.
|
|
||||||
|
|
||||||
Reload Nginx:
|
|
||||||
```
|
```
|
||||||
systemctl reload nginx
|
systemctl reload nginx
|
||||||
```
|
```
|
||||||
|
|
|
@ -33,7 +33,6 @@ indexes faster when it can process many posts in a single batch.
|
||||||
> config :pleroma, Pleroma.Search.Meilisearch,
|
> config :pleroma, Pleroma.Search.Meilisearch,
|
||||||
> url: "http://127.0.0.1:7700/",
|
> url: "http://127.0.0.1:7700/",
|
||||||
> private_key: "private key",
|
> private_key: "private key",
|
||||||
> search_key: "search key",
|
|
||||||
> initial_indexing_chunk_size: 100_000
|
> initial_indexing_chunk_size: 100_000
|
||||||
|
|
||||||
Information about setting up meilisearch can be found in the
|
Information about setting up meilisearch can be found in the
|
||||||
|
@ -46,7 +45,7 @@ is hardly usable on a somewhat big instance.
|
||||||
### Private key authentication (optional)
|
### Private key authentication (optional)
|
||||||
|
|
||||||
To set the private key, use the `MEILI_MASTER_KEY` environment variable when starting. After setting the _master key_,
|
To set the private key, use the `MEILI_MASTER_KEY` environment variable when starting. After setting the _master key_,
|
||||||
you have to get the _private key_ and possibly _search key_, which are actually used for authentication.
|
you have to get the _private key_, which is actually used for authentication.
|
||||||
|
|
||||||
=== "OTP"
|
=== "OTP"
|
||||||
```sh
|
```sh
|
||||||
|
@ -58,11 +57,7 @@ you have to get the _private key_ and possibly _search key_, which are actually
|
||||||
mix pleroma.search.meilisearch show-keys <your master key here>
|
mix pleroma.search.meilisearch show-keys <your master key here>
|
||||||
```
|
```
|
||||||
|
|
||||||
You will see a "Default Admin API Key", this is the key you actually put into
|
You will see a "Default Admin API Key", this is the key you actually put into your configuration file.
|
||||||
your configuration file as `private_key`. You should also see a
|
|
||||||
"Default Search API key", put this into your config as `search_key`.
|
|
||||||
If your version of Meilisearch only showed the former,
|
|
||||||
just leave `search_key` completely unset in Akkoma's config.
|
|
||||||
|
|
||||||
### Initial indexing
|
### Initial indexing
|
||||||
|
|
||||||
|
|
|
@ -4,6 +4,7 @@
|
||||||
The following endpoints are additionally present into our actors.
|
The following endpoints are additionally present into our actors.
|
||||||
|
|
||||||
- `oauthRegistrationEndpoint` (`http://litepub.social/ns#oauthRegistrationEndpoint`)
|
- `oauthRegistrationEndpoint` (`http://litepub.social/ns#oauthRegistrationEndpoint`)
|
||||||
|
- `uploadMedia` (`https://www.w3.org/ns/activitystreams#uploadMedia`)
|
||||||
|
|
||||||
### oauthRegistrationEndpoint
|
### oauthRegistrationEndpoint
|
||||||
|
|
||||||
|
@ -11,279 +12,6 @@ Points to MastodonAPI `/api/v1/apps` for now.
|
||||||
|
|
||||||
See <https://docs.joinmastodon.org/methods/apps/>
|
See <https://docs.joinmastodon.org/methods/apps/>
|
||||||
|
|
||||||
## Emoji reactions
|
|
||||||
|
|
||||||
Emoji reactions are implemented as a new activity type `EmojiReact`.
|
|
||||||
A single user is allowed to react multiple times with different emoji to the
|
|
||||||
same post. However, they may only react at most once with the same emoji.
|
|
||||||
Repeated reaction from the same user with the same emoji are to be ignored.
|
|
||||||
Emoji reactions are also distinct from `Like` activities and a user may both
|
|
||||||
`Like` and react to a post.
|
|
||||||
|
|
||||||
!!! note
|
|
||||||
Misskey also supports emoji reactions, but the implementations differs.
|
|
||||||
It equates likes and reactions and only allows a single reaction per post.
|
|
||||||
|
|
||||||
The emoji is placed in the `content` field of the activity
|
|
||||||
and the `object` property points to the note reacting to.
|
|
||||||
|
|
||||||
Emoji can either be any Unicode emoji sequence or a custom emoji.
|
|
||||||
The latter must place their shortcode, including enclosing colons,
|
|
||||||
into `content` and put the emoji object inside the `tag` property.
|
|
||||||
The `tag` property MAY be omitted for Unicode emoji.
|
|
||||||
|
|
||||||
An example reaction with a Unicode emoji:
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"@context": [
|
|
||||||
"https://www.w3.org/ns/activitystreams",
|
|
||||||
"https://example.org/schemas/litepub-0.1.jsonld",
|
|
||||||
{
|
|
||||||
"@language": "und"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"type": "EmojiReact",
|
|
||||||
"id": "https://example.org/activities/23143872a0346141",
|
|
||||||
"actor": "https://example.org/users/akko",
|
|
||||||
"nickname": "akko",
|
|
||||||
"to": ["https://remote.example/users/diana", "https://example.org/users/akko/followers"],
|
|
||||||
"cc": ["https://www.w3.org/ns/activitystreams#Public"],
|
|
||||||
"content": "🧡",
|
|
||||||
"object": "https://remote.example/objects/9f0e93499d8314a9"
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
An example reaction with a custom emoji:
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"@context": [
|
|
||||||
"https://www.w3.org/ns/activitystreams",
|
|
||||||
"https://example.org/schemas/litepub-0.1.jsonld",
|
|
||||||
{
|
|
||||||
"@language": "und"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"type": "EmojiReact",
|
|
||||||
"id": "https://example.org/activities/d75586dec0541650",
|
|
||||||
"actor": "https://example.org/users/akko",
|
|
||||||
"nickname": "akko",
|
|
||||||
"to": ["https://remote.example/users/diana", "https://example.org/users/akko/followers"],
|
|
||||||
"cc": ["https://www.w3.org/ns/activitystreams#Public"],
|
|
||||||
"content": ":mouse:",
|
|
||||||
"object": "https://remote.example/objects/9f0e93499d8314a9",
|
|
||||||
"tag": [{
|
|
||||||
"type": "Emoji",
|
|
||||||
"id": null,
|
|
||||||
"name": "mouse",
|
|
||||||
"icon": {
|
|
||||||
"type": "Image",
|
|
||||||
"url": "https://example.org/emoji/mouse/mouse.png"
|
|
||||||
}
|
|
||||||
}]
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
!!! note
|
|
||||||
Although an emoji reaction can only contain a single emoji,
|
|
||||||
for compatibility with older versions of Pleroma and Akkoma,
|
|
||||||
it is recommended to wrap the emoji object in a single-element array.
|
|
||||||
|
|
||||||
When reacting with a remote custom emoji do not include the remote domain in `content`’s shortcode
|
|
||||||
*(unlike in our REST API which needs the domain)*:
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"@context": [
|
|
||||||
"https://www.w3.org/ns/activitystreams",
|
|
||||||
"https://example.org/schemas/litepub-0.1.jsonld",
|
|
||||||
{
|
|
||||||
"@language": "und"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"type": "EmojiReact",
|
|
||||||
"id": "https://example.org/activities/7993dcae98d8d5ec",
|
|
||||||
"actor": "https://example.org/users/akko",
|
|
||||||
"nickname": "akko",
|
|
||||||
"to": ["https://remote.example/users/diana", "https://example.org/users/akko/followers"],
|
|
||||||
"cc": ["https://www.w3.org/ns/activitystreams#Public"],
|
|
||||||
"content": ":hug:",
|
|
||||||
"object": "https://remote.example/objects/9f0e93499d8314a9",
|
|
||||||
"tag": [{
|
|
||||||
"type": "Emoji",
|
|
||||||
"id": "https://other.example/emojis/hug",
|
|
||||||
"name": "hug",
|
|
||||||
"icon": {
|
|
||||||
"type": "Image",
|
|
||||||
"url": "https://other.example/files/b71cea432b3fad67.webp"
|
|
||||||
}
|
|
||||||
}]
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
Emoji reactions can be retracted using a standard `Undo` activity:
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"@context": [
|
|
||||||
"https://www.w3.org/ns/activitystreams",
|
|
||||||
"http://example.org/schemas/litepub-0.1.jsonld",
|
|
||||||
{
|
|
||||||
"@language": "und"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"type": "Undo",
|
|
||||||
"id": "http://example.org/activities/4685792e-efb6-4309-b508-ae4f355dd695",
|
|
||||||
"actor": "https://example.org/users/akko",
|
|
||||||
"to": ["https://remote.example/users/diana", "https://example.org/users/akko/followers"],
|
|
||||||
"cc": ["https://www.w3.org/ns/activitystreams#Public"],
|
|
||||||
"object": "https://example.org/activities/23143872a0346141"
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
## User profile backgrounds
|
|
||||||
|
|
||||||
Akkoma federates user profile backgrounds the same way as Sharkey.
|
|
||||||
|
|
||||||
An actors ActivityPub representation contains an additional
|
|
||||||
`backgroundUrl` property containing an `Image` object. This property
|
|
||||||
belongs to the `"sharkey": "https://joinsharkey.org/ns#"` namespace.
|
|
||||||
|
|
||||||
## Quote Posts
|
|
||||||
|
|
||||||
Akkoma allows referencing a single other note as a quote,
|
|
||||||
which will be prominently displayed in the interface.
|
|
||||||
|
|
||||||
The quoted post is referenced by its ActivityPub id in the `quoteUri` property.
|
|
||||||
|
|
||||||
!!! note
|
|
||||||
Old Misskey only understood and modern Misskey still prefers
|
|
||||||
the `_misskey_quote` property for this. Similar some other older
|
|
||||||
software used `quoteUrl` or `quoteURL`.
|
|
||||||
All current implementations with quote support understand `quoteUri`.
|
|
||||||
|
|
||||||
Example:
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"@context": [
|
|
||||||
"https://www.w3.org/ns/activitystreams",
|
|
||||||
"https://example.org/schemas/litepub-0.1.jsonld",
|
|
||||||
{
|
|
||||||
"@language": "und"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"type": "Note",
|
|
||||||
"id": "https://example.org/activities/85717e587f95d5c0",
|
|
||||||
"actor": "https://example.org/users/akko",
|
|
||||||
"to": ["https://remote.example/users/diana", "https://example.org/users/akko/followers"],
|
|
||||||
"cc": ["https://www.w3.org/ns/activitystreams#Public"],
|
|
||||||
"context": "https://example.org/contexts/1",
|
|
||||||
"content": "Look at that!",
|
|
||||||
"quoteUri": "http://remote.example/status/85717e587f95d5c0",
|
|
||||||
"contentMap": {
|
|
||||||
"en": "Look at that!"
|
|
||||||
},
|
|
||||||
"source": {
|
|
||||||
"content": "Look at that!",
|
|
||||||
"mediaType": "text/plain"
|
|
||||||
},
|
|
||||||
"published": "2024-04-06T23:40:28Z",
|
|
||||||
"updated": "2024-04-06T23:40:28Z",
|
|
||||||
"attachemnt": [],
|
|
||||||
"tag": []
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
## Threads
|
|
||||||
|
|
||||||
Akkoma assigns all posts of the same thread the same `context`. This is a
|
|
||||||
standard ActivityPub property but its meaning is left vague. Akkoma will
|
|
||||||
always treat posts with identical `context` as part of the same thread.
|
|
||||||
|
|
||||||
`context` must not be assumed to hold any meaning or be dereferencable.
|
|
||||||
|
|
||||||
Incoming posts without `context` will be assigned a new context.
|
|
||||||
|
|
||||||
!!! note
|
|
||||||
Mastodon uses the non-standard `conversation` property for the same purpose
|
|
||||||
*(named after an older OStatus property)*. For incoming posts without
|
|
||||||
`context` but with `converstions` Akkoma will use the value from
|
|
||||||
`conversations` to fill in `context`.
|
|
||||||
For outgoing posts Akkoma will duplicate the context into `conversation`.
|
|
||||||
|
|
||||||
## Post Source
|
|
||||||
|
|
||||||
Unlike Mastodon, Akkoma supports drafting posts in multiple source formats
|
|
||||||
besides plaintext, like Markdown or MFM. The original input is preserved
|
|
||||||
in the standard ActivityPub `source` property *(not supported by Mastodon)*.
|
|
||||||
Still, `content` will always be present and contain the prerendered HTML form.
|
|
||||||
|
|
||||||
Supported `mediaType` include:
|
|
||||||
- `text/plain`
|
|
||||||
- `text/markdown`
|
|
||||||
- `text/bbcode`
|
|
||||||
- `text/x.misskeymarkdown`
|
|
||||||
|
|
||||||
## Post Language
|
|
||||||
|
|
||||||
!!! note
|
|
||||||
This is also supported in and compatible with Mastodon, but since
|
|
||||||
joinmastodon.org doesn’t document it yet it is included here.
|
|
||||||
[GoToSocial](https://docs.gotosocial.org/en/latest/federation/federating_with_gotosocial/#content-contentmap-and-language)
|
|
||||||
has a more refined version of this which can correctly deal with multiple language entries.
|
|
||||||
|
|
||||||
A post can indicate its language by including a `contentMap` object
|
|
||||||
which contains a sub key named after the language’s ISO 639-1 code
|
|
||||||
and it’s content identical to the post’s `content` field.
|
|
||||||
|
|
||||||
Currently Akkoma, just like Mastodon, only properly supports a single language entry,
|
|
||||||
in case of multiple entries a random language will be picked.
|
|
||||||
Furthermore, Akkoma currently only reads the `content` field
|
|
||||||
and never the value from `contentMap`.
|
|
||||||
|
|
||||||
## Local post scope
|
|
||||||
|
|
||||||
Post using this scope will never federate to other servers
|
|
||||||
but for the sake of completeness it is listed here.
|
|
||||||
|
|
||||||
In addition to the usual scopes *(public, unlisted, followers-only, direct)*
|
|
||||||
Akkoma supports an “unlisted” post scope. Such posts will not federate to
|
|
||||||
other instances and only be shown to logged-in users on the same instance.
|
|
||||||
It is included into the local timeline.
|
|
||||||
This may be useful to discuss or announce instance-specific policies and topics.
|
|
||||||
|
|
||||||
A post is addressed to the local scope by including `<base url of instance>/#Public`
|
|
||||||
in its `to` field. E.g. if the instance is on `https://example.org` it would use
|
|
||||||
`https://example.org/#Public`.
|
|
||||||
|
|
||||||
An implementation creating a new post MUST NOT address both the local and
|
|
||||||
general public scope `as:Public` at the same time. A post addressing the local
|
|
||||||
scope MUST NOT be sent to other instances or be possible to fetch by other
|
|
||||||
instances regardless of potential other listed addressees.
|
|
||||||
|
|
||||||
When receiving a remote post addressing both the public scope and what appears
|
|
||||||
to be a local-scope identifier, the post SHOULD be treated without assigning any
|
|
||||||
special meaning to the potential local-scope identifier.
|
|
||||||
|
|
||||||
!!! note
|
|
||||||
Misskey-derivatives have a similar concept of non-federated posts,
|
|
||||||
however those are also shown publicly on the local web interface
|
|
||||||
and are thus visible to non-members.
|
|
||||||
|
|
||||||
## List post scope
|
|
||||||
|
|
||||||
Messages originally addressed to a custom list will contain
|
|
||||||
a `listMessage` field with an unresolvable pseudo ActivityPub id.
|
|
||||||
|
|
||||||
# Deprecated and Removed Extensions
|
|
||||||
|
|
||||||
The following extensions were used in the past but have been dropped.
|
|
||||||
Documentation is retained here as a reference and since old objects might
|
|
||||||
still contains related fields.
|
|
||||||
|
|
||||||
## Actor endpoints
|
|
||||||
|
|
||||||
The following endpoints used to be present:
|
|
||||||
|
|
||||||
- `uploadMedia` (`https://www.w3.org/ns/activitystreams#uploadMedia`)
|
|
||||||
|
|
||||||
### uploadMedia
|
### uploadMedia
|
||||||
|
|
||||||
Inspired by <https://www.w3.org/wiki/SocialCG/ActivityPub/MediaUpload>, it is part of the ActivityStreams namespace because it used to be part of the ActivityPub specification and got removed from it.
|
Inspired by <https://www.w3.org/wiki/SocialCG/ActivityPub/MediaUpload>, it is part of the ActivityStreams namespace because it used to be part of the ActivityPub specification and got removed from it.
|
||||||
|
@ -292,8 +20,9 @@ Content-Type: multipart/form-data
|
||||||
|
|
||||||
Parameters:
|
Parameters:
|
||||||
- (required) `file`: The file being uploaded
|
- (required) `file`: The file being uploaded
|
||||||
- (optional) `description`: A plain-text description of the media, for accessibility purposes.
|
- (optionnal) `description`: A plain-text description of the media, for accessibility purposes.
|
||||||
|
|
||||||
Response: HTTP 201 Created with the object into the body, no `Location` header provided as it doesn't have an `id`
|
Response: HTTP 201 Created with the object into the body, no `Location` header provided as it doesn't have an `id`
|
||||||
|
|
||||||
The object given in the response should then be inserted into an Object's `attachment` field.
|
The object given in the reponse should then be inserted into an Object's `attachment` field.
|
||||||
|
|
||||||
|
|
|
@ -1,141 +0,0 @@
|
||||||
# Nodeinfo Extensions
|
|
||||||
|
|
||||||
Akkoma currently implements version 2.0 and 2.1 of nodeinfo spec,
|
|
||||||
but provides the following additional fields.
|
|
||||||
|
|
||||||
## metadata
|
|
||||||
|
|
||||||
The spec leaves the content of `metadata` up to implementations
|
|
||||||
and indeed Akkoma adds many fields here apart from the commonly
|
|
||||||
found `nodeName` and `nodeDescription` fields.
|
|
||||||
|
|
||||||
### accountActivationRequired
|
|
||||||
Whether or not users need to confirm their email before completing registration.
|
|
||||||
*(boolean)*
|
|
||||||
|
|
||||||
!!! note
|
|
||||||
Not to be confused with account approval, where each registration needs to
|
|
||||||
be manually approved by an admin. Account approval has no nodeinfo entry.
|
|
||||||
|
|
||||||
### features
|
|
||||||
|
|
||||||
Array of strings denoting supported server features. E.g. a server supporting
|
|
||||||
quote posts should include a `"quote_posting"` entry here.
|
|
||||||
|
|
||||||
A non-exhaustive list of possible features:
|
|
||||||
- `polls`
|
|
||||||
- `quote_posting`
|
|
||||||
- `editing`
|
|
||||||
- `bubble_timeline`
|
|
||||||
- `pleroma_emoji_reactions` *(Unicode emoji)*
|
|
||||||
- `custom_emoji_reactions`
|
|
||||||
- `akkoma_api`
|
|
||||||
- `akkoma:machine_translation`
|
|
||||||
- `mastodon_api`
|
|
||||||
- `pleroma_api`
|
|
||||||
|
|
||||||
### federatedTimelineAvailable
|
|
||||||
Whether or not the “federated timeline”, i.e. a timeline containing posts from
|
|
||||||
the entire known network, is made available.
|
|
||||||
*(boolean)*
|
|
||||||
|
|
||||||
### federation
|
|
||||||
This section is optional and can contain various custom keys describing federation policies.
|
|
||||||
The following are required to be presented:
|
|
||||||
- `enabled` *(boolean)* whether the server federates at all
|
|
||||||
|
|
||||||
A non-exhaustive list of optional keys:
|
|
||||||
- `exclusions` *(boolean)* whether some federation policies are withheld
|
|
||||||
- `mrf_simple` *(object)* describes how the Simple MRF policy is configured
|
|
||||||
|
|
||||||
### fieldsLimits
|
|
||||||
A JSON object documenting restriction for user account info fields.
|
|
||||||
All properties are integers.
|
|
||||||
|
|
||||||
- `maxFields` maximum number of account info fields local users can create
|
|
||||||
- `maxRemoteFields` maximum number of account info fields remote users can have
|
|
||||||
before the user gets rejected or fields truncated
|
|
||||||
- `nameLength` maximum length of a field’s name
|
|
||||||
- `valueLength` maximum length of a field’s value
|
|
||||||
|
|
||||||
### invitesEnabled
|
|
||||||
Whether or not signing up via invite codes is possible.
|
|
||||||
*(boolean)*
|
|
||||||
|
|
||||||
### localBubbleInstances
|
|
||||||
Array of domains (as strings) of other instances chosen
|
|
||||||
by the admin which are shown in the bubble timeline.
|
|
||||||
|
|
||||||
### mailerEnabled
|
|
||||||
Whether or not the instance can send out emails.
|
|
||||||
*(boolean)*
|
|
||||||
|
|
||||||
### nodeDescription
|
|
||||||
Human-friendly description of this instance
|
|
||||||
*(string)*
|
|
||||||
|
|
||||||
### nodeName
|
|
||||||
Human-friendly name of this instance
|
|
||||||
*(string)*
|
|
||||||
|
|
||||||
### pollLimits
|
|
||||||
JSON object containing limits for polls created by local users.
|
|
||||||
All values are integers.
|
|
||||||
- `max_options` maximum number of poll options
|
|
||||||
- `max_option_chars` maximum characters per poll option
|
|
||||||
- `min_expiration` minimum time in seconds a poll must be open for
|
|
||||||
- `max_expiration` maximum time a poll is allowed to be open for
|
|
||||||
|
|
||||||
### postFormats
|
|
||||||
Array of strings containing media types for supported post source formats.
|
|
||||||
A non-exhaustive list of possible values:
|
|
||||||
- `text/plain`
|
|
||||||
- `text/markdown`
|
|
||||||
- `text/bbcode`
|
|
||||||
- `text/x.misskeymarkdown`
|
|
||||||
|
|
||||||
### private
|
|
||||||
Whether or not unauthenticated API access is permitted.
|
|
||||||
*(boolean)*
|
|
||||||
|
|
||||||
### privilegedStaff
|
|
||||||
Whether or not moderators are trusted to perform some
|
|
||||||
additional tasks like e.g. issuing password reset emails.
|
|
||||||
|
|
||||||
### publicTimelineVisibility
|
|
||||||
JSON object containing boolean-valued keys reporting
|
|
||||||
if a given timeline can be viewed without login.
|
|
||||||
- `local`
|
|
||||||
- `federated`
|
|
||||||
- `bubble`
|
|
||||||
|
|
||||||
### restrictedNicknames
|
|
||||||
Array of strings listing nicknames forbidden to be used during signup.
|
|
||||||
|
|
||||||
### skipThreadContainment
|
|
||||||
Whether broken threads are filtered out
|
|
||||||
*(boolean)*
|
|
||||||
|
|
||||||
### staffAccounts
|
|
||||||
Array containing ActivityPub IDs of local accounts
|
|
||||||
with some form of elevated privilege on the instance.
|
|
||||||
|
|
||||||
### suggestions
|
|
||||||
JSON object containing info on whether the interaction-based
|
|
||||||
Mastodon `/api/v1/suggestions` feature is enabled and optionally
|
|
||||||
additional implementation-defined fields with more details
|
|
||||||
on e.g. how suggested users are selected.
|
|
||||||
|
|
||||||
!!! note
|
|
||||||
This has no relation to the newer /api/v2/suggestions API
|
|
||||||
which also (or exclusively) contains staff-curated entries.
|
|
||||||
|
|
||||||
- `enabled` *(boolean)* whether or not user recommendations are enabled
|
|
||||||
|
|
||||||
### uploadLimits
|
|
||||||
JSON object documenting various upload-related size limits.
|
|
||||||
All values are integers and in bytes.
|
|
||||||
- `avatar` maximum size of uploaded user avatars
|
|
||||||
- `banner` maximum size of uploaded user profile banners
|
|
||||||
- `background` maximum size of uploaded user profile backgrounds
|
|
||||||
- `general` maximum size for all other kinds of uploads
|
|
|
@ -60,7 +60,7 @@ ServerTokens Prod
|
||||||
Include /etc/letsencrypt/options-ssl-apache.conf
|
Include /etc/letsencrypt/options-ssl-apache.conf
|
||||||
|
|
||||||
# Uncomment the following to enable MediaProxy caching on disk
|
# Uncomment the following to enable MediaProxy caching on disk
|
||||||
#CacheRoot /var/tmp/akkoma-media-cache/
|
#CacheRoot /tmp/akkoma-media-cache/
|
||||||
#CacheDirLevels 1
|
#CacheDirLevels 1
|
||||||
#CacheDirLength 2
|
#CacheDirLength 2
|
||||||
#CacheEnable disk /proxy
|
#CacheEnable disk /proxy
|
||||||
|
|
|
@ -16,7 +16,7 @@
|
||||||
SCRIPTNAME=${0##*/}
|
SCRIPTNAME=${0##*/}
|
||||||
|
|
||||||
# mod_disk_cache directory
|
# mod_disk_cache directory
|
||||||
CACHE_DIRECTORY="/var/tmp/akkoma-media-cache"
|
CACHE_DIRECTORY="/tmp/akkoma-media-cache"
|
||||||
|
|
||||||
## Removes an item via the htcacheclean utility
|
## Removes an item via the htcacheclean utility
|
||||||
## $1 - the filename, can be a pattern .
|
## $1 - the filename, can be a pattern .
|
||||||
|
|
|
@ -3,7 +3,7 @@
|
||||||
# See the documentation at docs.akkoma.dev for your particular distro/OS for
|
# See the documentation at docs.akkoma.dev for your particular distro/OS for
|
||||||
# installation instructions.
|
# installation instructions.
|
||||||
|
|
||||||
proxy_cache_path /var/tmp/akkoma-media-cache levels=1:2 keys_zone=akkoma_media_cache:10m max_size=1g
|
proxy_cache_path /tmp/akkoma-media-cache levels=1:2 keys_zone=akkoma_media_cache:10m max_size=1g
|
||||||
inactive=720m use_temp_path=off;
|
inactive=720m use_temp_path=off;
|
||||||
|
|
||||||
# this is explicitly IPv4 since Pleroma.Web.Endpoint binds on IPv4 only
|
# this is explicitly IPv4 since Pleroma.Web.Endpoint binds on IPv4 only
|
||||||
|
|
|
@ -5,7 +5,7 @@
|
||||||
SCRIPTNAME=${0##*/}
|
SCRIPTNAME=${0##*/}
|
||||||
|
|
||||||
# NGINX cache directory
|
# NGINX cache directory
|
||||||
CACHE_DIRECTORY="/var/tmp/akkoma-media-cache"
|
CACHE_DIRECTORY="/tmp/akkoma-media-cache"
|
||||||
|
|
||||||
## Return the files where the items are cached.
|
## Return the files where the items are cached.
|
||||||
## $1 - the filename, can be a pattern .
|
## $1 - the filename, can be a pattern .
|
||||||
|
|
|
@ -16,7 +16,7 @@ defmodule Mix.Pleroma do
|
||||||
:fast_html,
|
:fast_html,
|
||||||
:oban
|
:oban
|
||||||
]
|
]
|
||||||
@cachex_children ["object", "user", "scrubber", "web_resp", "http_backoff"]
|
@cachex_children ["object", "user", "scrubber", "web_resp"]
|
||||||
@doc "Common functions to be reused in mix tasks"
|
@doc "Common functions to be reused in mix tasks"
|
||||||
def start_pleroma do
|
def start_pleroma do
|
||||||
Pleroma.Config.Holder.save_default()
|
Pleroma.Config.Holder.save_default()
|
||||||
|
|
|
@ -17,13 +17,6 @@ defmodule Mix.Tasks.Pleroma.Diagnostics do
|
||||||
|> IO.inspect()
|
|> IO.inspect()
|
||||||
end
|
end
|
||||||
|
|
||||||
def run(["fetch_object", url]) do
|
|
||||||
start_pleroma()
|
|
||||||
|
|
||||||
Pleroma.Object.Fetcher.fetch_object_from_id(url)
|
|
||||||
|> IO.inspect()
|
|
||||||
end
|
|
||||||
|
|
||||||
def run(["home_timeline", nickname]) do
|
def run(["home_timeline", nickname]) do
|
||||||
start_pleroma()
|
start_pleroma()
|
||||||
user = Repo.get_by!(User, nickname: nickname)
|
user = Repo.get_by!(User, nickname: nickname)
|
||||||
|
|
|
@ -126,11 +126,7 @@ defmodule Mix.Tasks.Pleroma.Search.Meilisearch do
|
||||||
decoded = Jason.decode!(result.body)
|
decoded = Jason.decode!(result.body)
|
||||||
|
|
||||||
if decoded["results"] do
|
if decoded["results"] do
|
||||||
Enum.each(decoded["results"], fn
|
Enum.each(decoded["results"], fn %{"description" => desc, "key" => key} ->
|
||||||
%{"name" => name, "key" => key} ->
|
|
||||||
IO.puts("#{name}: #{key}")
|
|
||||||
|
|
||||||
%{"description" => desc, "key" => key} ->
|
|
||||||
IO.puts("#{desc}: #{key}")
|
IO.puts("#{desc}: #{key}")
|
||||||
end)
|
end)
|
||||||
else
|
else
|
||||||
|
|
|
@ -258,27 +258,6 @@ defmodule Pleroma.Activity do
|
||||||
|
|
||||||
def get_create_by_object_ap_id(_), do: nil
|
def get_create_by_object_ap_id(_), do: nil
|
||||||
|
|
||||||
@doc """
|
|
||||||
Accepts a list of `ap__id`.
|
|
||||||
Returns a query yielding Create activities for the given objects,
|
|
||||||
in the same order as they were specified in the input list.
|
|
||||||
"""
|
|
||||||
@spec get_presorted_create_by_object_ap_id([String.t()]) :: Ecto.Queryable.t()
|
|
||||||
def get_presorted_create_by_object_ap_id(ap_ids) do
|
|
||||||
from(
|
|
||||||
a in Activity,
|
|
||||||
join:
|
|
||||||
ids in fragment(
|
|
||||||
"SELECT * FROM UNNEST(?::text[]) WITH ORDINALITY AS ids(ap_id, ord)",
|
|
||||||
^ap_ids
|
|
||||||
),
|
|
||||||
on:
|
|
||||||
ids.ap_id == fragment("?->>'object'", a.data) and
|
|
||||||
fragment("?->>'type'", a.data) == "Create",
|
|
||||||
order_by: [asc: ids.ord]
|
|
||||||
)
|
|
||||||
end
|
|
||||||
|
|
||||||
@doc """
|
@doc """
|
||||||
Accepts `ap_id` or list of `ap_id`.
|
Accepts `ap_id` or list of `ap_id`.
|
||||||
Returns a query.
|
Returns a query.
|
||||||
|
|
|
@ -179,9 +179,7 @@ defmodule Pleroma.Application do
|
||||||
build_cachex("translations", default_ttl: :timer.hours(24 * 30), limit: 2500),
|
build_cachex("translations", default_ttl: :timer.hours(24 * 30), limit: 2500),
|
||||||
build_cachex("instances", default_ttl: :timer.hours(24), ttl_interval: 1000, limit: 2500),
|
build_cachex("instances", default_ttl: :timer.hours(24), ttl_interval: 1000, limit: 2500),
|
||||||
build_cachex("request_signatures", default_ttl: :timer.hours(24 * 30), limit: 3000),
|
build_cachex("request_signatures", default_ttl: :timer.hours(24 * 30), limit: 3000),
|
||||||
build_cachex("rel_me", default_ttl: :timer.hours(24 * 30), limit: 300),
|
build_cachex("rel_me", default_ttl: :timer.hours(24 * 30), limit: 300)
|
||||||
build_cachex("host_meta", default_ttl: :timer.minutes(120), limit: 5000),
|
|
||||||
build_cachex("http_backoff", default_ttl: :timer.hours(24 * 30), limit: 10000)
|
|
||||||
]
|
]
|
||||||
end
|
end
|
||||||
|
|
||||||
|
|
|
@ -64,7 +64,4 @@ defmodule Pleroma.Constants do
|
||||||
"Service"
|
"Service"
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
|
|
||||||
# Internally used as top-level types for media attachments and user images
|
|
||||||
const(attachment_types, do: ["Document", "Image"])
|
|
||||||
end
|
end
|
||||||
|
|
|
@ -1,121 +0,0 @@
|
||||||
defmodule Pleroma.HTTP.Backoff do
|
|
||||||
alias Pleroma.HTTP
|
|
||||||
require Logger
|
|
||||||
|
|
||||||
@cachex Pleroma.Config.get([:cachex, :provider], Cachex)
|
|
||||||
@backoff_cache :http_backoff_cache
|
|
||||||
|
|
||||||
# attempt to parse a timestamp from a header
|
|
||||||
# returns nil if it can't parse the timestamp
|
|
||||||
@spec timestamp_or_nil(binary) :: DateTime.t() | nil
|
|
||||||
defp timestamp_or_nil(header) do
|
|
||||||
case DateTime.from_iso8601(header) do
|
|
||||||
{:ok, stamp, _} ->
|
|
||||||
stamp
|
|
||||||
|
|
||||||
_ ->
|
|
||||||
nil
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
# attempt to parse the x-ratelimit-reset header from the headers
|
|
||||||
@spec x_ratelimit_reset(headers :: list) :: DateTime.t() | nil
|
|
||||||
defp x_ratelimit_reset(headers) do
|
|
||||||
with {_header, value} <- List.keyfind(headers, "x-ratelimit-reset", 0),
|
|
||||||
true <- is_binary(value) do
|
|
||||||
timestamp_or_nil(value)
|
|
||||||
else
|
|
||||||
_ ->
|
|
||||||
nil
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
# attempt to parse the Retry-After header from the headers
|
|
||||||
# this can be either a timestamp _or_ a number of seconds to wait!
|
|
||||||
# we'll return a datetime if we can parse it, or nil if we can't
|
|
||||||
@spec retry_after(headers :: list) :: DateTime.t() | nil
|
|
||||||
defp retry_after(headers) do
|
|
||||||
with {_header, value} <- List.keyfind(headers, "retry-after", 0),
|
|
||||||
true <- is_binary(value) do
|
|
||||||
# first, see if it's an integer
|
|
||||||
case Integer.parse(value) do
|
|
||||||
{seconds, ""} ->
|
|
||||||
Logger.debug("Parsed Retry-After header: #{seconds} seconds")
|
|
||||||
DateTime.utc_now() |> Timex.shift(seconds: seconds)
|
|
||||||
|
|
||||||
_ ->
|
|
||||||
# if it's not an integer, try to parse it as a timestamp
|
|
||||||
timestamp_or_nil(value)
|
|
||||||
end
|
|
||||||
else
|
|
||||||
_ ->
|
|
||||||
nil
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
# given a set of headers, will attempt to find the next backoff timestamp
|
|
||||||
# if it can't find one, it will default to 5 minutes from now
|
|
||||||
@spec next_backoff_timestamp(%{headers: list}) :: DateTime.t()
|
|
||||||
defp next_backoff_timestamp(%{headers: headers}) when is_list(headers) do
|
|
||||||
default_5_minute_backoff =
|
|
||||||
DateTime.utc_now()
|
|
||||||
|> Timex.shift(seconds: 5 * 60)
|
|
||||||
|
|
||||||
backoff =
|
|
||||||
[&x_ratelimit_reset/1, &retry_after/1]
|
|
||||||
|> Enum.map(& &1.(headers))
|
|
||||||
|> Enum.find(&(&1 != nil))
|
|
||||||
|
|
||||||
if is_nil(backoff) do
|
|
||||||
Logger.debug("No backoff headers found, defaulting to 5 minutes from now")
|
|
||||||
default_5_minute_backoff
|
|
||||||
else
|
|
||||||
Logger.debug("Found backoff header, will back off until: #{backoff}")
|
|
||||||
backoff
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
defp next_backoff_timestamp(_), do: DateTime.utc_now() |> Timex.shift(seconds: 5 * 60)
|
|
||||||
|
|
||||||
# utility function to check the HTTP response for potential backoff headers
|
|
||||||
# will check if we get a 429 or 503 response, and if we do, will back off for a bit
|
|
||||||
@spec check_backoff({:ok | :error, HTTP.Env.t()}, binary()) ::
|
|
||||||
{:ok | :error, HTTP.Env.t()} | {:error, :ratelimit}
|
|
||||||
defp check_backoff({:ok, env}, host) do
|
|
||||||
case env.status do
|
|
||||||
status when status in [429, 503] ->
|
|
||||||
Logger.error("Rate limited on #{host}! Backing off...")
|
|
||||||
timestamp = next_backoff_timestamp(env)
|
|
||||||
ttl = Timex.diff(timestamp, DateTime.utc_now(), :seconds)
|
|
||||||
# we will cache the host for 5 minutes
|
|
||||||
@cachex.put(@backoff_cache, host, true, ttl: ttl)
|
|
||||||
{:error, :ratelimit}
|
|
||||||
|
|
||||||
_ ->
|
|
||||||
{:ok, env}
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
defp check_backoff(env, _), do: env
|
|
||||||
|
|
||||||
@doc """
|
|
||||||
this acts as a single throughput for all GET requests
|
|
||||||
we will check if the host is in the cache, and if it is, we will automatically fail the request
|
|
||||||
this ensures that we don't hammer the server with requests, and instead wait for the backoff to expire
|
|
||||||
this is a very simple implementation, and can be improved upon!
|
|
||||||
"""
|
|
||||||
@spec get(binary, list, list) :: {:ok | :error, HTTP.Env.t()} | {:error, :ratelimit}
|
|
||||||
def get(url, headers \\ [], options \\ []) do
|
|
||||||
%{host: host} = URI.parse(url)
|
|
||||||
|
|
||||||
case @cachex.get(@backoff_cache, host) do
|
|
||||||
{:ok, nil} ->
|
|
||||||
url
|
|
||||||
|> HTTP.get(headers, options)
|
|
||||||
|> check_backoff(host)
|
|
||||||
|
|
||||||
_ ->
|
|
||||||
{:error, :ratelimit}
|
|
||||||
end
|
|
||||||
end
|
|
||||||
end
|
|
|
@ -354,7 +354,7 @@ defmodule Pleroma.Object.Fetcher do
|
||||||
|
|
||||||
with {:ok, %{body: body, status: code, headers: headers, url: final_url}}
|
with {:ok, %{body: body, status: code, headers: headers, url: final_url}}
|
||||||
when code in 200..299 <-
|
when code in 200..299 <-
|
||||||
HTTP.Backoff.get(id, headers),
|
HTTP.get(id, headers),
|
||||||
remote_host <-
|
remote_host <-
|
||||||
URI.parse(final_url).host,
|
URI.parse(final_url).host,
|
||||||
{:cross_domain_redirect, false} <-
|
{:cross_domain_redirect, false} <-
|
||||||
|
|
|
@ -28,7 +28,7 @@ defmodule Pleroma.ScheduledActivity do
|
||||||
timestamps()
|
timestamps()
|
||||||
end
|
end
|
||||||
|
|
||||||
defp changeset(%ScheduledActivity{} = scheduled_activity, attrs) do
|
def changeset(%ScheduledActivity{} = scheduled_activity, attrs) do
|
||||||
scheduled_activity
|
scheduled_activity
|
||||||
|> cast(attrs, [:scheduled_at, :params])
|
|> cast(attrs, [:scheduled_at, :params])
|
||||||
|> validate_required([:scheduled_at, :params])
|
|> validate_required([:scheduled_at, :params])
|
||||||
|
@ -40,36 +40,26 @@ defmodule Pleroma.ScheduledActivity do
|
||||||
%{changes: %{params: %{"media_ids" => media_ids} = params}} = changeset
|
%{changes: %{params: %{"media_ids" => media_ids} = params}} = changeset
|
||||||
)
|
)
|
||||||
when is_list(media_ids) do
|
when is_list(media_ids) do
|
||||||
user = User.get_by_id(changeset.data.user_id)
|
media_attachments = Utils.attachments_from_ids(%{media_ids: media_ids})
|
||||||
|
|
||||||
case Utils.attachments_from_ids(user, %{media_ids: media_ids}) do
|
|
||||||
media_attachments when is_list(media_attachments) ->
|
|
||||||
params =
|
params =
|
||||||
params
|
params
|
||||||
|> Map.put("media_attachments", media_attachments)
|
|> Map.put("media_attachments", media_attachments)
|
||||||
|> Map.put("media_ids", media_ids)
|
|> Map.put("media_ids", media_ids)
|
||||||
|
|
||||||
put_change(changeset, :params, params)
|
put_change(changeset, :params, params)
|
||||||
|
|
||||||
{:error, _} = e ->
|
|
||||||
e
|
|
||||||
|
|
||||||
e ->
|
|
||||||
{:error, e}
|
|
||||||
end
|
|
||||||
end
|
end
|
||||||
|
|
||||||
defp with_media_attachments(changeset), do: changeset
|
defp with_media_attachments(changeset), do: changeset
|
||||||
|
|
||||||
defp update_changeset(%ScheduledActivity{} = scheduled_activity, attrs) do
|
def update_changeset(%ScheduledActivity{} = scheduled_activity, attrs) do
|
||||||
# note: should this ever allow swapping media attachments, make sure ownership is checked
|
|
||||||
scheduled_activity
|
scheduled_activity
|
||||||
|> cast(attrs, [:scheduled_at])
|
|> cast(attrs, [:scheduled_at])
|
||||||
|> validate_required([:scheduled_at])
|
|> validate_required([:scheduled_at])
|
||||||
|> validate_scheduled_at()
|
|> validate_scheduled_at()
|
||||||
end
|
end
|
||||||
|
|
||||||
defp validate_scheduled_at(changeset) do
|
def validate_scheduled_at(changeset) do
|
||||||
validate_change(changeset, :scheduled_at, fn _, scheduled_at ->
|
validate_change(changeset, :scheduled_at, fn _, scheduled_at ->
|
||||||
cond do
|
cond do
|
||||||
not far_enough?(scheduled_at) ->
|
not far_enough?(scheduled_at) ->
|
||||||
|
@ -87,7 +77,7 @@ defmodule Pleroma.ScheduledActivity do
|
||||||
end)
|
end)
|
||||||
end
|
end
|
||||||
|
|
||||||
defp exceeds_daily_user_limit?(user_id, scheduled_at) do
|
def exceeds_daily_user_limit?(user_id, scheduled_at) do
|
||||||
ScheduledActivity
|
ScheduledActivity
|
||||||
|> where(user_id: ^user_id)
|
|> where(user_id: ^user_id)
|
||||||
|> where([sa], type(sa.scheduled_at, :date) == type(^scheduled_at, :date))
|
|> where([sa], type(sa.scheduled_at, :date) == type(^scheduled_at, :date))
|
||||||
|
@ -96,7 +86,7 @@ defmodule Pleroma.ScheduledActivity do
|
||||||
|> Kernel.>=(Config.get([ScheduledActivity, :daily_user_limit]))
|
|> Kernel.>=(Config.get([ScheduledActivity, :daily_user_limit]))
|
||||||
end
|
end
|
||||||
|
|
||||||
defp exceeds_total_user_limit?(user_id) do
|
def exceeds_total_user_limit?(user_id) do
|
||||||
ScheduledActivity
|
ScheduledActivity
|
||||||
|> where(user_id: ^user_id)
|
|> where(user_id: ^user_id)
|
||||||
|> select([sa], count(sa.id))
|
|> select([sa], count(sa.id))
|
||||||
|
@ -118,29 +108,20 @@ defmodule Pleroma.ScheduledActivity do
|
||||||
diff > @min_offset
|
diff > @min_offset
|
||||||
end
|
end
|
||||||
|
|
||||||
defp new(%User{} = user, attrs) do
|
def new(%User{} = user, attrs) do
|
||||||
changeset(%ScheduledActivity{user_id: user.id}, attrs)
|
changeset(%ScheduledActivity{user_id: user.id}, attrs)
|
||||||
end
|
end
|
||||||
|
|
||||||
@doc """
|
@doc """
|
||||||
Creates ScheduledActivity and add to queue to perform at scheduled_at date
|
Creates ScheduledActivity and add to queue to perform at scheduled_at date
|
||||||
"""
|
"""
|
||||||
@spec create(User.t(), map()) :: {:ok, ScheduledActivity.t()} | {:error, any()}
|
@spec create(User.t(), map()) :: {:ok, ScheduledActivity.t()} | {:error, Ecto.Changeset.t()}
|
||||||
def create(%User{} = user, attrs) do
|
def create(%User{} = user, attrs) do
|
||||||
case new(user, attrs) do
|
|
||||||
%Ecto.Changeset{} = sched_data ->
|
|
||||||
Multi.new()
|
Multi.new()
|
||||||
|> Multi.insert(:scheduled_activity, sched_data)
|
|> Multi.insert(:scheduled_activity, new(user, attrs))
|
||||||
|> maybe_add_jobs(Config.get([ScheduledActivity, :enabled]))
|
|> maybe_add_jobs(Config.get([ScheduledActivity, :enabled]))
|
||||||
|> Repo.transaction()
|
|> Repo.transaction()
|
||||||
|> transaction_response
|
|> transaction_response
|
||||||
|
|
||||||
{:error, _} = e ->
|
|
||||||
e
|
|
||||||
|
|
||||||
e ->
|
|
||||||
{:error, e}
|
|
||||||
end
|
|
||||||
end
|
end
|
||||||
|
|
||||||
defp maybe_add_jobs(multi, true) do
|
defp maybe_add_jobs(multi, true) do
|
||||||
|
@ -206,7 +187,17 @@ defmodule Pleroma.ScheduledActivity do
|
||||||
|> where(user_id: ^user.id)
|
|> where(user_id: ^user.id)
|
||||||
end
|
end
|
||||||
|
|
||||||
defp job_query(scheduled_activity_id) do
|
def due_activities(offset \\ 0) do
|
||||||
|
naive_datetime =
|
||||||
|
NaiveDateTime.utc_now()
|
||||||
|
|> NaiveDateTime.add(offset, :millisecond)
|
||||||
|
|
||||||
|
ScheduledActivity
|
||||||
|
|> where([sa], sa.scheduled_at < ^naive_datetime)
|
||||||
|
|> Repo.all()
|
||||||
|
end
|
||||||
|
|
||||||
|
def job_query(scheduled_activity_id) do
|
||||||
from(j in Oban.Job,
|
from(j in Oban.Job,
|
||||||
where: j.queue == "scheduled_activities",
|
where: j.queue == "scheduled_activities",
|
||||||
where: fragment("args ->> 'activity_id' = ?::text", ^to_string(scheduled_activity_id))
|
where: fragment("args ->> 'activity_id' = ?::text", ^to_string(scheduled_activity_id))
|
||||||
|
|
|
@ -5,27 +5,15 @@ defmodule Pleroma.Search.Meilisearch do
|
||||||
alias Pleroma.Activity
|
alias Pleroma.Activity
|
||||||
|
|
||||||
import Pleroma.Search.DatabaseSearch
|
import Pleroma.Search.DatabaseSearch
|
||||||
|
import Ecto.Query
|
||||||
|
|
||||||
@behaviour Pleroma.Search.SearchBackend
|
@behaviour Pleroma.Search.SearchBackend
|
||||||
|
|
||||||
defp meili_headers(key) do
|
defp meili_headers do
|
||||||
key_header =
|
|
||||||
if is_nil(key), do: [], else: [{"Authorization", "Bearer #{key}"}]
|
|
||||||
|
|
||||||
[{"Content-Type", "application/json"} | key_header]
|
|
||||||
end
|
|
||||||
|
|
||||||
defp meili_headers_admin do
|
|
||||||
private_key = Pleroma.Config.get([Pleroma.Search.Meilisearch, :private_key])
|
private_key = Pleroma.Config.get([Pleroma.Search.Meilisearch, :private_key])
|
||||||
meili_headers(private_key)
|
|
||||||
end
|
|
||||||
|
|
||||||
defp meili_headers_search do
|
[{"Content-Type", "application/json"}] ++
|
||||||
search_key =
|
if is_nil(private_key), do: [], else: [{"Authorization", "Bearer #{private_key}"}]
|
||||||
Pleroma.Config.get([Pleroma.Search.Meilisearch, :search_key]) ||
|
|
||||||
Pleroma.Config.get([Pleroma.Search.Meilisearch, :private_key])
|
|
||||||
|
|
||||||
meili_headers(search_key)
|
|
||||||
end
|
end
|
||||||
|
|
||||||
def meili_get(path) do
|
def meili_get(path) do
|
||||||
|
@ -34,7 +22,7 @@ defmodule Pleroma.Search.Meilisearch do
|
||||||
result =
|
result =
|
||||||
Pleroma.HTTP.get(
|
Pleroma.HTTP.get(
|
||||||
Path.join(endpoint, path),
|
Path.join(endpoint, path),
|
||||||
meili_headers_admin()
|
meili_headers()
|
||||||
)
|
)
|
||||||
|
|
||||||
with {:ok, res} <- result do
|
with {:ok, res} <- result do
|
||||||
|
@ -42,14 +30,14 @@ defmodule Pleroma.Search.Meilisearch do
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
defp meili_search(params) do
|
def meili_post(path, params) do
|
||||||
endpoint = Pleroma.Config.get([Pleroma.Search.Meilisearch, :url])
|
endpoint = Pleroma.Config.get([Pleroma.Search.Meilisearch, :url])
|
||||||
|
|
||||||
result =
|
result =
|
||||||
Pleroma.HTTP.post(
|
Pleroma.HTTP.post(
|
||||||
Path.join(endpoint, "/indexes/objects/search"),
|
Path.join(endpoint, path),
|
||||||
Jason.encode!(params),
|
Jason.encode!(params),
|
||||||
meili_headers_search()
|
meili_headers()
|
||||||
)
|
)
|
||||||
|
|
||||||
with {:ok, res} <- result do
|
with {:ok, res} <- result do
|
||||||
|
@ -65,7 +53,7 @@ defmodule Pleroma.Search.Meilisearch do
|
||||||
:put,
|
:put,
|
||||||
Path.join(endpoint, path),
|
Path.join(endpoint, path),
|
||||||
Jason.encode!(params),
|
Jason.encode!(params),
|
||||||
meili_headers_admin(),
|
meili_headers(),
|
||||||
[]
|
[]
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -82,7 +70,7 @@ defmodule Pleroma.Search.Meilisearch do
|
||||||
:delete,
|
:delete,
|
||||||
Path.join(endpoint, path),
|
Path.join(endpoint, path),
|
||||||
"",
|
"",
|
||||||
meili_headers_admin(),
|
meili_headers(),
|
||||||
[]
|
[]
|
||||||
)
|
)
|
||||||
end
|
end
|
||||||
|
@ -93,20 +81,25 @@ defmodule Pleroma.Search.Meilisearch do
|
||||||
author = Keyword.get(options, :author)
|
author = Keyword.get(options, :author)
|
||||||
|
|
||||||
res =
|
res =
|
||||||
meili_search(%{q: query, offset: offset, limit: limit})
|
meili_post(
|
||||||
|
"/indexes/objects/search",
|
||||||
|
%{q: query, offset: offset, limit: limit}
|
||||||
|
)
|
||||||
|
|
||||||
with {:ok, result} <- res do
|
with {:ok, result} <- res do
|
||||||
hits = result["hits"] |> Enum.map(& &1["ap"])
|
hits = result["hits"] |> Enum.map(& &1["ap"])
|
||||||
|
|
||||||
try do
|
try do
|
||||||
hits
|
hits
|
||||||
|> Activity.get_presorted_create_by_object_ap_id()
|
|> Activity.create_by_object_ap_id()
|
||||||
|
|> Activity.with_preloaded_object()
|
||||||
|> Activity.with_preloaded_object()
|
|> Activity.with_preloaded_object()
|
||||||
|> Activity.restrict_deactivated_users()
|
|> Activity.restrict_deactivated_users()
|
||||||
|> maybe_restrict_local(user)
|
|> maybe_restrict_local(user)
|
||||||
|> maybe_restrict_author(author)
|
|> maybe_restrict_author(author)
|
||||||
|> maybe_restrict_blocked(user)
|
|> maybe_restrict_blocked(user)
|
||||||
|> maybe_fetch(user, query)
|
|> maybe_fetch(user, query)
|
||||||
|
|> order_by([object: obj], desc: obj.data["published"])
|
||||||
|> Pleroma.Repo.all()
|
|> Pleroma.Repo.all()
|
||||||
rescue
|
rescue
|
||||||
_ -> maybe_fetch([], user, query)
|
_ -> maybe_fetch([], user, query)
|
||||||
|
|
|
@ -10,7 +10,7 @@ defmodule Pleroma.Signature do
|
||||||
alias Pleroma.User
|
alias Pleroma.User
|
||||||
alias Pleroma.Web.ActivityPub.ActivityPub
|
alias Pleroma.Web.ActivityPub.ActivityPub
|
||||||
|
|
||||||
@known_suffixes ["/publickey", "/main-key", "#key"]
|
@known_suffixes ["/publickey", "/main-key"]
|
||||||
|
|
||||||
def key_id_to_actor_id(key_id) do
|
def key_id_to_actor_id(key_id) do
|
||||||
uri =
|
uri =
|
||||||
|
|
|
@ -13,6 +13,7 @@ defmodule Pleroma.Upload do
|
||||||
* `:uploader`: override uploader
|
* `:uploader`: override uploader
|
||||||
* `:filters`: override filters
|
* `:filters`: override filters
|
||||||
* `:size_limit`: override size limit
|
* `:size_limit`: override size limit
|
||||||
|
* `:activity_type`: override activity type
|
||||||
|
|
||||||
The `%Pleroma.Upload{}` struct: all documented fields are meant to be overwritten in filters:
|
The `%Pleroma.Upload{}` struct: all documented fields are meant to be overwritten in filters:
|
||||||
|
|
||||||
|
@ -47,6 +48,7 @@ defmodule Pleroma.Upload do
|
||||||
@type option ::
|
@type option ::
|
||||||
{:type, :avatar | :banner | :background}
|
{:type, :avatar | :banner | :background}
|
||||||
| {:description, String.t()}
|
| {:description, String.t()}
|
||||||
|
| {:activity_type, String.t()}
|
||||||
| {:size_limit, nil | non_neg_integer()}
|
| {:size_limit, nil | non_neg_integer()}
|
||||||
| {:uploader, module()}
|
| {:uploader, module()}
|
||||||
| {:filters, [module()]}
|
| {:filters, [module()]}
|
||||||
|
@ -141,7 +143,7 @@ defmodule Pleroma.Upload do
|
||||||
end
|
end
|
||||||
|
|
||||||
%{
|
%{
|
||||||
activity_type: activity_type,
|
activity_type: Keyword.get(opts, :activity_type, activity_type),
|
||||||
size_limit: Keyword.get(opts, :size_limit, size_limit),
|
size_limit: Keyword.get(opts, :size_limit, size_limit),
|
||||||
uploader: Keyword.get(opts, :uploader, Pleroma.Config.get([__MODULE__, :uploader])),
|
uploader: Keyword.get(opts, :uploader, Pleroma.Config.get([__MODULE__, :uploader])),
|
||||||
filters:
|
filters:
|
||||||
|
|
|
@ -33,7 +33,8 @@ defmodule Pleroma.Upload.Filter.Exiftool.ReadDescription do
|
||||||
defp read_when_empty(_, file, tag) do
|
defp read_when_empty(_, file, tag) do
|
||||||
try do
|
try do
|
||||||
{tag_content, 0} =
|
{tag_content, 0} =
|
||||||
System.cmd("exiftool", ["-b", "-s3", "-ignoreMinorErrors", "-q", "-q", tag, file],
|
System.cmd("exiftool", ["-b", "-s3", tag, file],
|
||||||
|
stderr_to_stdout: true,
|
||||||
parallelism: true
|
parallelism: true
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
|
@ -1545,19 +1545,11 @@ defmodule Pleroma.Web.ActivityPub.ActivityPub do
|
||||||
defp normalize_also_known_as(aka) when is_binary(aka), do: [aka]
|
defp normalize_also_known_as(aka) when is_binary(aka), do: [aka]
|
||||||
defp normalize_also_known_as(nil), do: []
|
defp normalize_also_known_as(nil), do: []
|
||||||
|
|
||||||
defp normalize_attachment(%{} = attachment), do: [attachment]
|
|
||||||
defp normalize_attachment(attachment) when is_list(attachment), do: attachment
|
|
||||||
defp normalize_attachment(_), do: []
|
|
||||||
|
|
||||||
defp object_to_user_data(data, additional) do
|
defp object_to_user_data(data, additional) do
|
||||||
fields =
|
fields =
|
||||||
data
|
data
|
||||||
|> Map.get("attachment", [])
|
|> Map.get("attachment", [])
|
||||||
|> normalize_attachment()
|
|> Enum.filter(fn %{"type" => t} -> t == "PropertyValue" end)
|
||||||
|> Enum.filter(fn
|
|
||||||
%{"type" => t} -> t == "PropertyValue"
|
|
||||||
_ -> false
|
|
||||||
end)
|
|
||||||
|> Enum.map(fn fields -> Map.take(fields, ["name", "value"]) end)
|
|> Enum.map(fn fields -> Map.take(fields, ["name", "value"]) end)
|
||||||
|
|
||||||
emojis =
|
emojis =
|
||||||
|
@ -1824,19 +1816,18 @@ defmodule Pleroma.Web.ActivityPub.ActivityPub do
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def enqueue_pin_fetches(%{pinned_objects: pins}) do
|
def pinned_fetch_task(nil), do: nil
|
||||||
# enqueue a task to fetch all pinned objects
|
|
||||||
Enum.each(pins, fn {ap_id, _} ->
|
|
||||||
if is_nil(Object.get_cached_by_ap_id(ap_id)) do
|
|
||||||
Pleroma.Workers.RemoteFetcherWorker.enqueue("fetch_remote", %{
|
|
||||||
"id" => ap_id,
|
|
||||||
"depth" => 1
|
|
||||||
})
|
|
||||||
end
|
|
||||||
end)
|
|
||||||
end
|
|
||||||
|
|
||||||
def enqueue_pin_fetches(_), do: nil
|
def pinned_fetch_task(%{pinned_objects: pins}) do
|
||||||
|
if Enum.all?(pins, fn {ap_id, _} ->
|
||||||
|
Object.get_cached_by_ap_id(ap_id) ||
|
||||||
|
match?({:ok, _object}, Fetcher.fetch_object_from_id(ap_id))
|
||||||
|
end) do
|
||||||
|
:ok
|
||||||
|
else
|
||||||
|
:error
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
def make_user_from_ap_id(ap_id, additional \\ []) do
|
def make_user_from_ap_id(ap_id, additional \\ []) do
|
||||||
user = User.get_cached_by_ap_id(ap_id)
|
user = User.get_cached_by_ap_id(ap_id)
|
||||||
|
@ -1845,6 +1836,8 @@ defmodule Pleroma.Web.ActivityPub.ActivityPub do
|
||||||
Transmogrifier.upgrade_user_from_ap_id(ap_id)
|
Transmogrifier.upgrade_user_from_ap_id(ap_id)
|
||||||
else
|
else
|
||||||
with {:ok, data} <- fetch_and_prepare_user_from_ap_id(ap_id, additional) do
|
with {:ok, data} <- fetch_and_prepare_user_from_ap_id(ap_id, additional) do
|
||||||
|
{:ok, _pid} = Task.start(fn -> pinned_fetch_task(data) end)
|
||||||
|
|
||||||
user =
|
user =
|
||||||
if data.ap_id != ap_id do
|
if data.ap_id != ap_id do
|
||||||
User.get_cached_by_ap_id(data.ap_id)
|
User.get_cached_by_ap_id(data.ap_id)
|
||||||
|
@ -1856,7 +1849,6 @@ defmodule Pleroma.Web.ActivityPub.ActivityPub do
|
||||||
user
|
user
|
||||||
|> User.remote_user_changeset(data)
|
|> User.remote_user_changeset(data)
|
||||||
|> User.update_and_set_cache()
|
|> User.update_and_set_cache()
|
||||||
|> tap(fn _ -> enqueue_pin_fetches(data) end)
|
|
||||||
else
|
else
|
||||||
maybe_handle_clashing_nickname(data)
|
maybe_handle_clashing_nickname(data)
|
||||||
|
|
||||||
|
@ -1864,7 +1856,6 @@ defmodule Pleroma.Web.ActivityPub.ActivityPub do
|
||||||
|> User.remote_user_changeset()
|
|> User.remote_user_changeset()
|
||||||
|> Repo.insert()
|
|> Repo.insert()
|
||||||
|> User.set_cache()
|
|> User.set_cache()
|
||||||
|> tap(fn _ -> enqueue_pin_fetches(data) end)
|
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -1034,7 +1034,7 @@ defmodule Pleroma.Web.ActivityPub.Transmogrifier do
|
||||||
with %User{local: false} = user <- User.get_cached_by_ap_id(ap_id),
|
with %User{local: false} = user <- User.get_cached_by_ap_id(ap_id),
|
||||||
{:ok, data} <- ActivityPub.fetch_and_prepare_user_from_ap_id(ap_id),
|
{:ok, data} <- ActivityPub.fetch_and_prepare_user_from_ap_id(ap_id),
|
||||||
{:ok, user} <- update_user(user, data) do
|
{:ok, user} <- update_user(user, data) do
|
||||||
ActivityPub.enqueue_pin_fetches(user)
|
{:ok, _pid} = Task.start(fn -> ActivityPub.pinned_fetch_task(user) end)
|
||||||
TransmogrifierWorker.enqueue("user_upgrade", %{"user_id" => user.id})
|
TransmogrifierWorker.enqueue("user_upgrade", %{"user_id" => user.id})
|
||||||
{:ok, user}
|
{:ok, user}
|
||||||
else
|
else
|
||||||
|
|
|
@ -41,7 +41,7 @@ defmodule Pleroma.Web.CommonAPI.ActivityDraft do
|
||||||
preview?: false,
|
preview?: false,
|
||||||
changes: %{}
|
changes: %{}
|
||||||
|
|
||||||
defp new(user, params) do
|
def new(user, params) do
|
||||||
%__MODULE__{user: user}
|
%__MODULE__{user: user}
|
||||||
|> put_params(params)
|
|> put_params(params)
|
||||||
end
|
end
|
||||||
|
@ -92,14 +92,9 @@ defmodule Pleroma.Web.CommonAPI.ActivityDraft do
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
defp attachments(%{params: params, user: user} = draft) do
|
defp attachments(%{params: params} = draft) do
|
||||||
case Utils.attachments_from_ids(user, params) do
|
attachments = Utils.attachments_from_ids(params)
|
||||||
attachments when is_list(attachments) ->
|
|
||||||
%__MODULE__{draft | attachments: attachments}
|
%__MODULE__{draft | attachments: attachments}
|
||||||
|
|
||||||
{:error, reason} ->
|
|
||||||
add_error(draft, reason)
|
|
||||||
end
|
|
||||||
end
|
end
|
||||||
|
|
||||||
defp in_reply_to(%{params: %{in_reply_to_status_id: ""}} = draft), do: draft
|
defp in_reply_to(%{params: %{in_reply_to_status_id: ""}} = draft), do: draft
|
||||||
|
|
|
@ -22,31 +22,43 @@ defmodule Pleroma.Web.CommonAPI.Utils do
|
||||||
require Logger
|
require Logger
|
||||||
require Pleroma.Constants
|
require Pleroma.Constants
|
||||||
|
|
||||||
def attachments_from_ids(user, %{media_ids: ids}) do
|
def attachments_from_ids(%{media_ids: ids, descriptions: desc}) do
|
||||||
attachments_from_ids(user, ids, [])
|
attachments_from_ids_descs(ids, desc)
|
||||||
end
|
end
|
||||||
|
|
||||||
def attachments_from_ids(_, _), do: []
|
def attachments_from_ids(%{media_ids: ids}) do
|
||||||
|
attachments_from_ids_no_descs(ids)
|
||||||
defp attachments_from_ids(_user, [], acc), do: Enum.reverse(acc)
|
|
||||||
|
|
||||||
defp attachments_from_ids(user, [media_id | ids], acc) do
|
|
||||||
with {_, %Object{} = object} <- {:get, get_attachment(media_id)},
|
|
||||||
:ok <- Object.authorize_access(object, user) do
|
|
||||||
attachments_from_ids(user, ids, [object.data | acc])
|
|
||||||
else
|
|
||||||
{:get, _} -> attachments_from_ids(user, ids, acc)
|
|
||||||
{:error, reason} -> {:error, reason}
|
|
||||||
end
|
|
||||||
end
|
end
|
||||||
|
|
||||||
def get_attachment(media_id) do
|
def attachments_from_ids(_), do: []
|
||||||
with %Object{} = object <- Repo.get(Object, media_id),
|
|
||||||
true <- object.data["type"] in Pleroma.Constants.attachment_types() do
|
def attachments_from_ids_no_descs([]), do: []
|
||||||
object
|
|
||||||
else
|
def attachments_from_ids_no_descs(ids) do
|
||||||
|
Enum.map(ids, fn media_id ->
|
||||||
|
case get_attachment(media_id) do
|
||||||
|
%Object{data: data} -> data
|
||||||
_ -> nil
|
_ -> nil
|
||||||
end
|
end
|
||||||
|
end)
|
||||||
|
|> Enum.reject(&is_nil/1)
|
||||||
|
end
|
||||||
|
|
||||||
|
def attachments_from_ids_descs([], _), do: []
|
||||||
|
|
||||||
|
def attachments_from_ids_descs(ids, descs_str) do
|
||||||
|
{_, descs} = Jason.decode(descs_str)
|
||||||
|
|
||||||
|
Enum.map(ids, fn media_id ->
|
||||||
|
with %Object{data: data} <- get_attachment(media_id) do
|
||||||
|
Map.put(data, "name", descs[media_id])
|
||||||
|
end
|
||||||
|
end)
|
||||||
|
|> Enum.reject(&is_nil/1)
|
||||||
|
end
|
||||||
|
|
||||||
|
defp get_attachment(media_id) do
|
||||||
|
Repo.get(Object, media_id)
|
||||||
end
|
end
|
||||||
|
|
||||||
@spec get_to_and_cc(ActivityDraft.t()) :: {list(String.t()), list(String.t())}
|
@spec get_to_and_cc(ActivityDraft.t()) :: {list(String.t()), list(String.t())}
|
||||||
|
|
|
@ -8,7 +8,6 @@ defmodule Pleroma.Web.MastodonAPI.MediaController do
|
||||||
alias Pleroma.Object
|
alias Pleroma.Object
|
||||||
alias Pleroma.User
|
alias Pleroma.User
|
||||||
alias Pleroma.Web.ActivityPub.ActivityPub
|
alias Pleroma.Web.ActivityPub.ActivityPub
|
||||||
alias Pleroma.Web.CommonAPI.Utils
|
|
||||||
alias Pleroma.Web.Plugs.OAuthScopesPlug
|
alias Pleroma.Web.Plugs.OAuthScopesPlug
|
||||||
|
|
||||||
action_fallback(Pleroma.Web.MastodonAPI.FallbackController)
|
action_fallback(Pleroma.Web.MastodonAPI.FallbackController)
|
||||||
|
@ -56,15 +55,12 @@ defmodule Pleroma.Web.MastodonAPI.MediaController do
|
||||||
|
|
||||||
@doc "PUT /api/v1/media/:id"
|
@doc "PUT /api/v1/media/:id"
|
||||||
def update(%{assigns: %{user: user}, body_params: %{description: description}} = conn, %{id: id}) do
|
def update(%{assigns: %{user: user}, body_params: %{description: description}} = conn, %{id: id}) do
|
||||||
with {_, %Object{} = object} <- {:get, Utils.get_attachment(id)},
|
with %Object{} = object <- Object.get_by_id(id),
|
||||||
:ok <- Object.authorize_access(object, user),
|
:ok <- Object.authorize_access(object, user),
|
||||||
{:ok, %Object{data: data}} <- Object.update_data(object, %{"name" => description}) do
|
{:ok, %Object{data: data}} <- Object.update_data(object, %{"name" => description}) do
|
||||||
attachment_data = Map.put(data, "id", object.id)
|
attachment_data = Map.put(data, "id", object.id)
|
||||||
|
|
||||||
render(conn, "attachment.json", %{attachment: attachment_data})
|
render(conn, "attachment.json", %{attachment: attachment_data})
|
||||||
else
|
|
||||||
{:get, _} -> {:error, :not_found}
|
|
||||||
e -> e
|
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -72,14 +68,11 @@ defmodule Pleroma.Web.MastodonAPI.MediaController do
|
||||||
|
|
||||||
@doc "GET /api/v1/media/:id"
|
@doc "GET /api/v1/media/:id"
|
||||||
def show(%{assigns: %{user: user}} = conn, %{id: id}) do
|
def show(%{assigns: %{user: user}} = conn, %{id: id}) do
|
||||||
with {_, %Object{data: data, id: object_id} = object} <- {:get, Utils.get_attachment(id)},
|
with %Object{data: data, id: object_id} = object <- Object.get_by_id(id),
|
||||||
:ok <- Object.authorize_access(object, user) do
|
:ok <- Object.authorize_access(object, user) do
|
||||||
attachment_data = Map.put(data, "id", object_id)
|
attachment_data = Map.put(data, "id", object_id)
|
||||||
|
|
||||||
render(conn, "attachment.json", %{attachment: attachment_data})
|
render(conn, "attachment.json", %{attachment: attachment_data})
|
||||||
else
|
|
||||||
{:get, _} -> {:error, :not_found}
|
|
||||||
e -> e
|
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
|
|
@ -87,7 +87,7 @@ defmodule Pleroma.Web.MastodonAPI.StatusController do
|
||||||
%{scopes: ["write:bookmarks"]} when action in [:bookmark, :unbookmark]
|
%{scopes: ["write:bookmarks"]} when action in [:bookmark, :unbookmark]
|
||||||
)
|
)
|
||||||
|
|
||||||
@rate_limited_status_actions ~w(reblog unreblog favourite unfavourite create delete update)a
|
@rate_limited_status_actions ~w(reblog unreblog favourite unfavourite create delete)a
|
||||||
|
|
||||||
plug(
|
plug(
|
||||||
RateLimiter,
|
RateLimiter,
|
||||||
|
|
|
@ -156,21 +156,11 @@ defmodule Pleroma.Web.WebFinger do
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
@cachex Pleroma.Config.get([:cachex, :provider], Cachex)
|
|
||||||
def find_lrdd_template(domain) do
|
def find_lrdd_template(domain) do
|
||||||
@cachex.fetch!(:host_meta_cache, domain, fn _ ->
|
|
||||||
{:commit, fetch_lrdd_template(domain)}
|
|
||||||
end)
|
|
||||||
rescue
|
|
||||||
e -> {:error, "Cachex error: #{inspect(e)}"}
|
|
||||||
end
|
|
||||||
|
|
||||||
defp fetch_lrdd_template(domain) do
|
|
||||||
# WebFinger is restricted to HTTPS - https://tools.ietf.org/html/rfc7033#section-9.1
|
# WebFinger is restricted to HTTPS - https://tools.ietf.org/html/rfc7033#section-9.1
|
||||||
meta_url = "https://#{domain}/.well-known/host-meta"
|
meta_url = "https://#{domain}/.well-known/host-meta"
|
||||||
|
|
||||||
with {:ok, %{status: status, body: body}} when status in 200..299 <-
|
with {:ok, %{status: status, body: body}} when status in 200..299 <- HTTP.get(meta_url) do
|
||||||
HTTP.Backoff.get(meta_url) do
|
|
||||||
get_template_from_xml(body)
|
get_template_from_xml(body)
|
||||||
else
|
else
|
||||||
error ->
|
error ->
|
||||||
|
@ -179,7 +169,7 @@ defmodule Pleroma.Web.WebFinger do
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
defp get_address_from_domain(domain, "acct:" <> _ = encoded_account) when is_binary(domain) do
|
defp get_address_from_domain(domain, encoded_account) when is_binary(domain) do
|
||||||
case find_lrdd_template(domain) do
|
case find_lrdd_template(domain) do
|
||||||
{:ok, template} ->
|
{:ok, template} ->
|
||||||
String.replace(template, "{uri}", encoded_account)
|
String.replace(template, "{uri}", encoded_account)
|
||||||
|
@ -189,11 +179,6 @@ defmodule Pleroma.Web.WebFinger do
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
defp get_address_from_domain(domain, account) when is_binary(domain) do
|
|
||||||
encoded_account = URI.encode("acct:#{account}")
|
|
||||||
get_address_from_domain(domain, encoded_account)
|
|
||||||
end
|
|
||||||
|
|
||||||
defp get_address_from_domain(_, _), do: {:error, :webfinger_no_domain}
|
defp get_address_from_domain(_, _), do: {:error, :webfinger_no_domain}
|
||||||
|
|
||||||
@spec finger(String.t()) :: {:ok, map()} | {:error, any()}
|
@spec finger(String.t()) :: {:ok, map()} | {:error, any()}
|
||||||
|
@ -208,9 +193,11 @@ defmodule Pleroma.Web.WebFinger do
|
||||||
URI.parse(account).host
|
URI.parse(account).host
|
||||||
end
|
end
|
||||||
|
|
||||||
with address when is_binary(address) <- get_address_from_domain(domain, account),
|
encoded_account = URI.encode("acct:#{account}")
|
||||||
|
|
||||||
|
with address when is_binary(address) <- get_address_from_domain(domain, encoded_account),
|
||||||
{:ok, %{status: status, body: body, headers: headers}} when status in 200..299 <-
|
{:ok, %{status: status, body: body, headers: headers}} when status in 200..299 <-
|
||||||
HTTP.Backoff.get(
|
HTTP.get(
|
||||||
address,
|
address,
|
||||||
[{"accept", "application/xrd+xml,application/jrd+json"}]
|
[{"accept", "application/xrd+xml,application/jrd+json"}]
|
||||||
) do
|
) do
|
||||||
|
@ -230,28 +217,10 @@ defmodule Pleroma.Web.WebFinger do
|
||||||
_ ->
|
_ ->
|
||||||
{:error, {:content_type, nil}}
|
{:error, {:content_type, nil}}
|
||||||
end
|
end
|
||||||
|> case do
|
|
||||||
{:ok, data} -> validate_webfinger(address, data)
|
|
||||||
error -> error
|
|
||||||
end
|
|
||||||
else
|
else
|
||||||
error ->
|
error ->
|
||||||
Logger.debug("Couldn't finger #{account}: #{inspect(error)}")
|
Logger.debug("Couldn't finger #{account}: #{inspect(error)}")
|
||||||
error
|
error
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
defp validate_webfinger(request_url, %{"subject" => "acct:" <> acct = subject} = data) do
|
|
||||||
with [_name, acct_host] <- String.split(acct, "@"),
|
|
||||||
{_, url} <- {:address, get_address_from_domain(acct_host, subject)},
|
|
||||||
%URI{host: request_host} <- URI.parse(request_url),
|
|
||||||
%URI{host: acct_host} <- URI.parse(url),
|
|
||||||
{_, true} <- {:hosts_match, acct_host == request_host} do
|
|
||||||
{:ok, data}
|
|
||||||
else
|
|
||||||
_ -> {:error, {:webfinger_invalid, request_url, data}}
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
defp validate_webfinger(url, data), do: {:error, {:webfinger_invalid, url, data}}
|
|
||||||
end
|
end
|
||||||
|
|
2
mix.exs
2
mix.exs
|
@ -4,7 +4,7 @@ defmodule Pleroma.Mixfile do
|
||||||
def project do
|
def project do
|
||||||
[
|
[
|
||||||
app: :pleroma,
|
app: :pleroma,
|
||||||
version: version("3.13.2"),
|
version: version("3.13.0"),
|
||||||
elixir: "~> 1.14",
|
elixir: "~> 1.14",
|
||||||
elixirc_paths: elixirc_paths(Mix.env()),
|
elixirc_paths: elixirc_paths(Mix.env()),
|
||||||
compilers: Mix.compilers(),
|
compilers: Mix.compilers(),
|
||||||
|
|
|
@ -1,10 +1,37 @@
|
||||||
defmodule Pleroma.Repo.Migrations.UploadFilterExiftoolToExiftoolStripMetadata do
|
defmodule Pleroma.Repo.Migrations.UploadFilterExiftoolToExiftoolStripMetadata do
|
||||||
use Ecto.Migration
|
use Ecto.Migration
|
||||||
|
|
||||||
# 20240425120000_upload_filter_exiftool_to_exiftool_strip_location.exs
|
alias Pleroma.ConfigDB
|
||||||
# was originally committed with the id used in this file, but this breaks
|
|
||||||
# rollback order. Thus it was moved to 20240425120000 and this stub just prevents
|
def up,
|
||||||
# errors during large-scale rollbacks for anyone who already applied the old id
|
do:
|
||||||
def up, do: :ok
|
ConfigDB.get_by_params(%{group: :pleroma, key: Pleroma.Upload})
|
||||||
def down, do: :ok
|
|> update_filtername(
|
||||||
|
Pleroma.Upload.Filter.Exiftool,
|
||||||
|
Pleroma.Upload.Filter.Exiftool.StripMetadata
|
||||||
|
)
|
||||||
|
|
||||||
|
def down,
|
||||||
|
do:
|
||||||
|
ConfigDB.get_by_params(%{group: :pleroma, key: Pleroma.Upload})
|
||||||
|
|> update_filtername(
|
||||||
|
Pleroma.Upload.Filter.Exiftool.StripMetadata,
|
||||||
|
Pleroma.Upload.Filter.Exiftool
|
||||||
|
)
|
||||||
|
|
||||||
|
defp update_filtername(%{value: value}, from_filtername, to_filtername) do
|
||||||
|
new_value =
|
||||||
|
value
|
||||||
|
|> Keyword.update(:filters, [], fn filters ->
|
||||||
|
filters
|
||||||
|
|> Enum.map(fn
|
||||||
|
^from_filtername -> to_filtername
|
||||||
|
filter -> filter
|
||||||
|
end)
|
||||||
|
end)
|
||||||
|
|
||||||
|
ConfigDB.update_or_create(%{group: :pleroma, key: Pleroma.Upload, value: new_value})
|
||||||
|
end
|
||||||
|
|
||||||
|
defp update_filtername(_, _, _), do: nil
|
||||||
end
|
end
|
||||||
|
|
|
@ -1,37 +0,0 @@
|
||||||
defmodule Pleroma.Repo.Migrations.UploadFilterExiftoolToExiftoolStripMetadataReal do
|
|
||||||
use Ecto.Migration
|
|
||||||
|
|
||||||
alias Pleroma.ConfigDB
|
|
||||||
|
|
||||||
def up,
|
|
||||||
do:
|
|
||||||
ConfigDB.get_by_params(%{group: :pleroma, key: Pleroma.Upload})
|
|
||||||
|> update_filtername(
|
|
||||||
Pleroma.Upload.Filter.Exiftool,
|
|
||||||
Pleroma.Upload.Filter.Exiftool.StripMetadata
|
|
||||||
)
|
|
||||||
|
|
||||||
def down,
|
|
||||||
do:
|
|
||||||
ConfigDB.get_by_params(%{group: :pleroma, key: Pleroma.Upload})
|
|
||||||
|> update_filtername(
|
|
||||||
Pleroma.Upload.Filter.Exiftool.StripMetadata,
|
|
||||||
Pleroma.Upload.Filter.Exiftool
|
|
||||||
)
|
|
||||||
|
|
||||||
defp update_filtername(%{value: value}, from_filtername, to_filtername) do
|
|
||||||
new_value =
|
|
||||||
value
|
|
||||||
|> Keyword.update(:filters, [], fn filters ->
|
|
||||||
filters
|
|
||||||
|> Enum.map(fn
|
|
||||||
^from_filtername -> to_filtername
|
|
||||||
filter -> filter
|
|
||||||
end)
|
|
||||||
end)
|
|
||||||
|
|
||||||
ConfigDB.update_or_create(%{group: :pleroma, key: Pleroma.Upload, value: new_value})
|
|
||||||
end
|
|
||||||
|
|
||||||
defp update_filtername(_, _, _), do: nil
|
|
||||||
end
|
|
|
@ -1,64 +0,0 @@
|
||||||
defmodule Pleroma.Repo.Migrations.DropUnusedIndexes do
|
|
||||||
use Ecto.Migration
|
|
||||||
|
|
||||||
def up do
|
|
||||||
# Leftovers from a late Pleroma migration (will not be restored on rollback)
|
|
||||||
drop_i(:users, [:show_birthday], :users_show_birthday_index)
|
|
||||||
|
|
||||||
drop_i(
|
|
||||||
:users,
|
|
||||||
["date_part('month', birthday)", "date_part('day', birthday)"],
|
|
||||||
:users_birthday_month_day_index
|
|
||||||
)
|
|
||||||
|
|
||||||
# Unused
|
|
||||||
drop_i(:activities, ["(data->'cc')"], :activities_cc_index)
|
|
||||||
drop_i(:activities, ["(data->'object'->>'inReplyTo')"], :activities_in_reply_to)
|
|
||||||
drop_i(:activities, ["(data #> '{\"object\",\"likes\"}')"], :activities_likes)
|
|
||||||
drop_i(:activities, ["(data->'to')"], :activities_to_index)
|
|
||||||
|
|
||||||
drop_i(:objects, ["(data->'likes')"], :objects_likes)
|
|
||||||
|
|
||||||
drop_i(:users, [:featured_address], :users_featured_address_index)
|
|
||||||
drop_i(:users, [:following_address], :users_following_address_index)
|
|
||||||
drop_i(:users, [:invisible], :users_invisible_index)
|
|
||||||
drop_i(:users, [:last_status_at], :users_last_status_at_index)
|
|
||||||
drop_i(:users, [:tags], :users_tags_index)
|
|
||||||
|
|
||||||
drop_i(:apps, [:client_id, :client_secret], :apps_client_id_client_secret_index)
|
|
||||||
drop_i(:apps, [:user_id], :apps_user_id_index)
|
|
||||||
|
|
||||||
# Duplicate of primary key index (will not be restored on rollback)
|
|
||||||
drop_i(
|
|
||||||
:user_frontend_setting_profiles,
|
|
||||||
[:user_id, :frontend_name, :profile_name],
|
|
||||||
:user_frontend_setting_profiles_user_id_frontend_name_profile_name_index
|
|
||||||
)
|
|
||||||
end
|
|
||||||
|
|
||||||
def down do
|
|
||||||
create_i(:activities, ["(data->'cc')"], :activities_cc_index, :gin)
|
|
||||||
create_i(:activities, ["(data->'object'->>'inReplyTo')"], :activities_in_reply_to)
|
|
||||||
create_i(:activities, ["(data #> '{\"object\",\"likes\"}')"], :activities_likes, :gin)
|
|
||||||
create_i(:activities, ["(data->'to')"], :activities_to_index, :gin)
|
|
||||||
|
|
||||||
create_i(:objects, ["(data->'likes')"], :objects_likes, :gin)
|
|
||||||
|
|
||||||
create_i(:users, [:featured_address], :users_featured_address_index)
|
|
||||||
create_i(:users, [:following_address], :users_following_address_index)
|
|
||||||
create_i(:users, [:invisible], :users_invisible_index)
|
|
||||||
create_i(:users, [:last_status_at], :users_last_status_at_index)
|
|
||||||
create_i(:users, [:tags], :users_tags_index, :gin)
|
|
||||||
|
|
||||||
create_i(:apps, [:client_id, :client_secret], :apps_client_id_client_secret_index)
|
|
||||||
create_i(:apps, [:user_id], :apps_user_id_index)
|
|
||||||
end
|
|
||||||
|
|
||||||
defp drop_i(table, fields, name) do
|
|
||||||
drop_if_exists(index(table, fields, name: name))
|
|
||||||
end
|
|
||||||
|
|
||||||
defp create_i(table, fields, name, type \\ :btree) do
|
|
||||||
create_if_not_exists(index(table, fields, name: name, using: type))
|
|
||||||
end
|
|
||||||
end
|
|
|
@ -0,0 +1,74 @@
|
||||||
|
defmodule Pleroma.Repo.Migrations.DropUnusedIndexes do
|
||||||
|
use Ecto.Migration
|
||||||
|
|
||||||
|
@disable_ddl_transaction true
|
||||||
|
|
||||||
|
@disable_migration_lock true
|
||||||
|
|
||||||
|
def up do
|
||||||
|
drop_if_exists(
|
||||||
|
index(:activities, ["(data->>'actor')", "inserted_at desc"], name: :activities_actor_index)
|
||||||
|
)
|
||||||
|
|
||||||
|
drop_if_exists(index(:activities, ["(data->'to')"], name: :activities_to_index))
|
||||||
|
|
||||||
|
drop_if_exists(index(:activities, ["(data->'cc')"], name: :activities_cc_index))
|
||||||
|
|
||||||
|
drop_if_exists(index(:activities, ["(split_part(actor, '/', 3))"], name: :activities_hosts))
|
||||||
|
|
||||||
|
drop_if_exists(
|
||||||
|
index(:activities, ["(data->'object'->>'inReplyTo')"], name: :activities_in_reply_to)
|
||||||
|
)
|
||||||
|
|
||||||
|
drop_if_exists(
|
||||||
|
index(:activities, ["((data #> '{\"object\",\"likes\"}'))"], name: :activities_likes)
|
||||||
|
)
|
||||||
|
end
|
||||||
|
|
||||||
|
def down do
|
||||||
|
create_if_not_exists(
|
||||||
|
index(:activities, ["(data->>'actor')", "inserted_at desc"],
|
||||||
|
name: :activities_actor_index,
|
||||||
|
concurrently: true
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
create_if_not_exists(
|
||||||
|
index(:activities, ["(data->'to')"],
|
||||||
|
name: :activities_to_index,
|
||||||
|
using: :gin,
|
||||||
|
concurrently: true
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
create_if_not_exists(
|
||||||
|
index(:activities, ["(data->'cc')"],
|
||||||
|
name: :activities_cc_index,
|
||||||
|
using: :gin,
|
||||||
|
concurrently: true
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
create_if_not_exists(
|
||||||
|
index(:activities, ["(split_part(actor, '/', 3))"],
|
||||||
|
name: :activities_hosts,
|
||||||
|
concurrently: true
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
create_if_not_exists(
|
||||||
|
index(:activities, ["(data->'object'->>'inReplyTo')"],
|
||||||
|
name: :activities_in_reply_to,
|
||||||
|
concurrently: true
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
create_if_not_exists(
|
||||||
|
index(:activities, ["((data #> '{\"object\",\"likes\"}'))"],
|
||||||
|
name: :activities_likes,
|
||||||
|
using: :gin,
|
||||||
|
concurrently: true
|
||||||
|
)
|
||||||
|
)
|
||||||
|
end
|
||||||
|
end
|
3
test/fixtures/tesla_mock/bad.com_host_meta
vendored
3
test/fixtures/tesla_mock/bad.com_host_meta
vendored
|
@ -1,3 +0,0 @@
|
||||||
<XRD xmlns="http://docs.oasis-open.org/ns/xri/xrd-1.0">
|
|
||||||
<Link rel="lrdd" template="https://bad.com/.well-known/webfinger?resource={uri}" type="application/xrd+xml" />
|
|
||||||
</XRD>
|
|
28
test/fixtures/tesla_mock/webfinger_spoof.json
vendored
28
test/fixtures/tesla_mock/webfinger_spoof.json
vendored
|
@ -1,28 +0,0 @@
|
||||||
{
|
|
||||||
"aliases": [
|
|
||||||
"https://bad.com/users/meanie",
|
|
||||||
"https://anotherbad.social/users/meanie"
|
|
||||||
],
|
|
||||||
"links": [
|
|
||||||
{
|
|
||||||
"href": "https://bad.com/users/meanie",
|
|
||||||
"rel": "http://webfinger.net/rel/profile-page",
|
|
||||||
"type": "text/html"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"href": "https://bad.com/users/meanie",
|
|
||||||
"rel": "self",
|
|
||||||
"type": "application/activity+json"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"href": "https://bad.com/users/meanie",
|
|
||||||
"rel": "self",
|
|
||||||
"type": "application/ld+json; profile=\"https://www.w3.org/ns/activitystreams\""
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"rel": "http://ostatus.org/schema/1.0/subscribe",
|
|
||||||
"template": "https://bad.com/ostatus_subscribe?acct={uri}"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"subject": "acct:oopsie@notwhereitshouldbe.org"
|
|
||||||
}
|
|
|
@ -1,51 +0,0 @@
|
||||||
{
|
|
||||||
"@context": [
|
|
||||||
"https://www.w3.org/ns/activitystreams",
|
|
||||||
{
|
|
||||||
"blurhash": "toot:blurhash",
|
|
||||||
"Emoji": "toot:Emoji",
|
|
||||||
"focalPoint": {
|
|
||||||
"@container": "@list",
|
|
||||||
"@id": "toot:focalPoint"
|
|
||||||
},
|
|
||||||
"Hashtag": "as:Hashtag",
|
|
||||||
"manuallyApprovesFollowers": "as:manuallyApprovesFollowers",
|
|
||||||
"sensitive": "as:sensitive",
|
|
||||||
"toot": "http://joinmastodon.org/ns#",
|
|
||||||
"votersCount": "toot:votersCount",
|
|
||||||
"featured": {
|
|
||||||
"@id": "toot:featured",
|
|
||||||
"@type": "@id"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"https://w3id.org/security/v1"
|
|
||||||
],
|
|
||||||
"id": "https://fedi.vision/@vote@fedi.vision/",
|
|
||||||
"type": "Person",
|
|
||||||
"toot:discoverable": true,
|
|
||||||
"inbox": "https://fedi.vision/@vote@fedi.vision/inbox/",
|
|
||||||
"publicKey": {
|
|
||||||
"id": "https://fedi.vision/@vote@fedi.vision/#main-key",
|
|
||||||
"owner": "https://fedi.vision/@vote@fedi.vision/",
|
|
||||||
"publicKeyPem": "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAj2f+uQtdoBO9X/u2Qso4\nxHYdfy8zB24m9Gg982/ts88DAMLxZUzX0JsBWT7coL0Ipf4NSbVaqS6nKrr2P8Qs\nf97wMhowyuYxK22BMPcbpfZkFj3tVT/JkDx2iujBJJ5ZBO5KRlupjDTqV4rOAY7F\n58ad0jK9PsJNJMsJ/b8+0t3Q/K+RqCGVmtK+iPSigOYoiKoquyRzHLTfP+mpOlDa\n3f+uyAbFya7CpcgBx1zz0PALWA+oh/zhZK4yT6719Esa8SDcoJ0ws70zMxWekq1A\n3ia88/Io6SY2qFNBpzzXGO3JK8OFRFtmPV8ZfAh5Pv6y52iuTJ21kxjAG7ZTP/fY\nBQIDAQAB\n-----END PUBLIC KEY-----\n"
|
|
||||||
},
|
|
||||||
"attachment": {
|
|
||||||
"haha": "you expected a proper object, but it was me, random nonsense"
|
|
||||||
},
|
|
||||||
"endpoints": {
|
|
||||||
"sharedInbox": "https://fedi.vision/inbox/"
|
|
||||||
},
|
|
||||||
"followers": "https://fedi.vision/@vote@fedi.vision/followers/",
|
|
||||||
"following": "https://fedi.vision/@vote@fedi.vision/following/",
|
|
||||||
"icon": {
|
|
||||||
"type": "Image",
|
|
||||||
"mediaType": "image/webp",
|
|
||||||
"url": "https://eu-central-1.linodeobjects.com:443/st4/profile_images/2024/5/9/RwqTbeYx16gauXPXvt-CaysOnGw.webp"
|
|
||||||
},
|
|
||||||
"name": "FediVision Vote Bot",
|
|
||||||
"outbox": "https://fedi.vision/@vote@fedi.vision/outbox/",
|
|
||||||
"preferredUsername": "vote",
|
|
||||||
"published": "2024-05-09T09:04:04Z",
|
|
||||||
"summary": "<p>New in 2024, this is the bot that will count your #Fedivision vote! Accept no substitutes!</p><p>Send this account a toot in the form<br> vote ABCD EFGH IJKL<br>substituting the (up to) three codes for the songs you want to win. Punctuation ignored, case insensitive, order is unimportant. Only your latest toot counts, so change your vote with a new toot.</p>",
|
|
||||||
"url": "https://fedi.vision/@vote/"
|
|
||||||
}
|
|
53
test/fixtures/users_mock/takahe_user.json
vendored
53
test/fixtures/users_mock/takahe_user.json
vendored
|
@ -1,53 +0,0 @@
|
||||||
{
|
|
||||||
"@context": [
|
|
||||||
"https://www.w3.org/ns/activitystreams",
|
|
||||||
{
|
|
||||||
"blurhash": "toot:blurhash",
|
|
||||||
"Emoji": "toot:Emoji",
|
|
||||||
"focalPoint": {
|
|
||||||
"@container": "@list",
|
|
||||||
"@id": "toot:focalPoint"
|
|
||||||
},
|
|
||||||
"Hashtag": "as:Hashtag",
|
|
||||||
"manuallyApprovesFollowers": "as:manuallyApprovesFollowers",
|
|
||||||
"sensitive": "as:sensitive",
|
|
||||||
"toot": "http://joinmastodon.org/ns#",
|
|
||||||
"votersCount": "toot:votersCount",
|
|
||||||
"featured": {
|
|
||||||
"@id": "toot:featured",
|
|
||||||
"@type": "@id"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"https://w3id.org/security/v1"
|
|
||||||
],
|
|
||||||
"id": "https://fedi.vision/@vote@fedi.vision/",
|
|
||||||
"type": "Person",
|
|
||||||
"toot:discoverable": true,
|
|
||||||
"inbox": "https://fedi.vision/@vote@fedi.vision/inbox/",
|
|
||||||
"publicKey": {
|
|
||||||
"id": "https://fedi.vision/@vote@fedi.vision/#main-key",
|
|
||||||
"owner": "https://fedi.vision/@vote@fedi.vision/",
|
|
||||||
"publicKeyPem": "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAj2f+uQtdoBO9X/u2Qso4\nxHYdfy8zB24m9Gg982/ts88DAMLxZUzX0JsBWT7coL0Ipf4NSbVaqS6nKrr2P8Qs\nf97wMhowyuYxK22BMPcbpfZkFj3tVT/JkDx2iujBJJ5ZBO5KRlupjDTqV4rOAY7F\n58ad0jK9PsJNJMsJ/b8+0t3Q/K+RqCGVmtK+iPSigOYoiKoquyRzHLTfP+mpOlDa\n3f+uyAbFya7CpcgBx1zz0PALWA+oh/zhZK4yT6719Esa8SDcoJ0ws70zMxWekq1A\n3ia88/Io6SY2qFNBpzzXGO3JK8OFRFtmPV8ZfAh5Pv6y52iuTJ21kxjAG7ZTP/fY\nBQIDAQAB\n-----END PUBLIC KEY-----\n"
|
|
||||||
},
|
|
||||||
"attachment": {
|
|
||||||
"type": "PropertyValue",
|
|
||||||
"value": "<a href=\"https://fedivision.party/vote\" rel=\"nofollow\"><span class=\"invisible\">https://</span>fedivision.party/vote</a>",
|
|
||||||
"name": "More details"
|
|
||||||
},
|
|
||||||
"endpoints": {
|
|
||||||
"sharedInbox": "https://fedi.vision/inbox/"
|
|
||||||
},
|
|
||||||
"followers": "https://fedi.vision/@vote@fedi.vision/followers/",
|
|
||||||
"following": "https://fedi.vision/@vote@fedi.vision/following/",
|
|
||||||
"icon": {
|
|
||||||
"type": "Image",
|
|
||||||
"mediaType": "image/webp",
|
|
||||||
"url": "https://eu-central-1.linodeobjects.com:443/st4/profile_images/2024/5/9/RwqTbeYx16gauXPXvt-CaysOnGw.webp"
|
|
||||||
},
|
|
||||||
"name": "FediVision Vote Bot",
|
|
||||||
"outbox": "https://fedi.vision/@vote@fedi.vision/outbox/",
|
|
||||||
"preferredUsername": "vote",
|
|
||||||
"published": "2024-05-09T09:04:04Z",
|
|
||||||
"summary": "<p>New in 2024, this is the bot that will count your #Fedivision vote! Accept no substitutes!</p><p>Send this account a toot in the form<br> vote ABCD EFGH IJKL<br>substituting the (up to) three codes for the songs you want to win. Punctuation ignored, case insensitive, order is unimportant. Only your latest toot counts, so change your vote with a new toot.</p>",
|
|
||||||
"url": "https://fedi.vision/@vote/"
|
|
||||||
}
|
|
41
test/fixtures/webfinger/imposter-webfinger.json
vendored
41
test/fixtures/webfinger/imposter-webfinger.json
vendored
|
@ -1,41 +0,0 @@
|
||||||
{
|
|
||||||
"subject": "acct:oopsie@notwhereitshouldbe.com",
|
|
||||||
"aliases": [
|
|
||||||
"https://bad.com/webfingertest"
|
|
||||||
],
|
|
||||||
"links": [
|
|
||||||
{
|
|
||||||
"rel": "http://webfinger.net/rel/profile-page",
|
|
||||||
"type": "text/html",
|
|
||||||
"href": "https://bad.com/webfingertest"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"rel": "self",
|
|
||||||
"type": "application/activity+json",
|
|
||||||
"href": "https://bad.com/webfingertest"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"rel": "http://ostatus.org/schema/1.0/subscribe",
|
|
||||||
"template": "https://bad.com/contact/follow?url={uri}"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"rel": "http://schemas.google.com/g/2010#updates-from",
|
|
||||||
"type": "application/atom+xml",
|
|
||||||
"href": ""
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"rel": "salmon",
|
|
||||||
"href": "https://bad.com/salmon/friendica"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"rel": "http://microformats.org/profile/hcard",
|
|
||||||
"type": "text/html",
|
|
||||||
"href": "https://bad.com/hcard/friendica"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"rel": "http://joindiaspora.com/seed_location",
|
|
||||||
"type": "text/html",
|
|
||||||
"href": "https://bad.com"
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
|
@ -3,7 +3,7 @@
|
||||||
# SPDX-License-Identifier: AGPL-3.0-only
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
defmodule Mix.Tasks.Pleroma.AppTest do
|
defmodule Mix.Tasks.Pleroma.AppTest do
|
||||||
use Pleroma.DataCase, async: false
|
use Pleroma.DataCase, async: true
|
||||||
|
|
||||||
setup_all do
|
setup_all do
|
||||||
Mix.shell(Mix.Shell.Process)
|
Mix.shell(Mix.Shell.Process)
|
||||||
|
@ -50,13 +50,13 @@ defmodule Mix.Tasks.Pleroma.AppTest do
|
||||||
defp assert_app(name, redirect, scopes) do
|
defp assert_app(name, redirect, scopes) do
|
||||||
app = Repo.get_by(Pleroma.Web.OAuth.App, client_name: name)
|
app = Repo.get_by(Pleroma.Web.OAuth.App, client_name: name)
|
||||||
|
|
||||||
assert_receive {:mix_shell, :info, [message]}, 5_000
|
assert_receive {:mix_shell, :info, [message]}, 1_000
|
||||||
assert message == "#{name} successfully created:"
|
assert message == "#{name} successfully created:"
|
||||||
|
|
||||||
assert_receive {:mix_shell, :info, [message]}, 5_000
|
assert_receive {:mix_shell, :info, [message]}, 1_000
|
||||||
assert message == "App client_id: #{app.client_id}"
|
assert message == "App client_id: #{app.client_id}"
|
||||||
|
|
||||||
assert_receive {:mix_shell, :info, [message]}, 5_000
|
assert_receive {:mix_shell, :info, [message]}, 1_000
|
||||||
assert message == "App client_secret: #{app.client_secret}"
|
assert message == "App client_secret: #{app.client_secret}"
|
||||||
|
|
||||||
assert app.scopes == scopes
|
assert app.scopes == scopes
|
||||||
|
|
|
@ -3,7 +3,7 @@
|
||||||
# SPDX-License-Identifier: AGPL-3.0-only
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
defmodule Mix.Tasks.Pleroma.DatabaseTest do
|
defmodule Mix.Tasks.Pleroma.DatabaseTest do
|
||||||
use Pleroma.DataCase, async: false
|
use Pleroma.DataCase, async: true
|
||||||
use Oban.Testing, repo: Pleroma.Repo
|
use Oban.Testing, repo: Pleroma.Repo
|
||||||
|
|
||||||
alias Pleroma.Activity
|
alias Pleroma.Activity
|
||||||
|
|
|
@ -3,7 +3,7 @@
|
||||||
# SPDX-License-Identifier: AGPL-3.0-only
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
defmodule Mix.Tasks.Pleroma.Ecto.RollbackTest do
|
defmodule Mix.Tasks.Pleroma.Ecto.RollbackTest do
|
||||||
use Pleroma.DataCase, async: false
|
use Pleroma.DataCase, async: true
|
||||||
import ExUnit.CaptureLog
|
import ExUnit.CaptureLog
|
||||||
require Logger
|
require Logger
|
||||||
|
|
||||||
|
|
|
@ -3,7 +3,7 @@
|
||||||
# SPDX-License-Identifier: AGPL-3.0-only
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
defmodule Mix.Tasks.Pleroma.EctoTest do
|
defmodule Mix.Tasks.Pleroma.EctoTest do
|
||||||
use ExUnit.Case, async: false
|
use ExUnit.Case, async: true
|
||||||
|
|
||||||
test "raise on bad path" do
|
test "raise on bad path" do
|
||||||
assert_raise RuntimeError, ~r/Could not find migrations directory/, fn ->
|
assert_raise RuntimeError, ~r/Could not find migrations directory/, fn ->
|
||||||
|
|
|
@ -3,7 +3,7 @@
|
||||||
# SPDX-License-Identifier: AGPL-3.0-only
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
defmodule Mix.Tasks.Pleroma.EmojiTest do
|
defmodule Mix.Tasks.Pleroma.EmojiTest do
|
||||||
use ExUnit.Case, async: false
|
use ExUnit.Case, async: true
|
||||||
|
|
||||||
import ExUnit.CaptureIO
|
import ExUnit.CaptureIO
|
||||||
import Tesla.Mock
|
import Tesla.Mock
|
||||||
|
|
|
@ -41,26 +41,6 @@ defmodule Pleroma.ActivityTest do
|
||||||
assert activity == found_activity
|
assert activity == found_activity
|
||||||
end
|
end
|
||||||
|
|
||||||
test "returns activities by object's AP id in requested presorted order" do
|
|
||||||
a1 = insert(:note_activity)
|
|
||||||
o1 = Object.normalize(a1, fetch: false).data["id"]
|
|
||||||
|
|
||||||
a2 = insert(:note_activity)
|
|
||||||
o2 = Object.normalize(a2, fetch: false).data["id"]
|
|
||||||
|
|
||||||
a3 = insert(:note_activity)
|
|
||||||
o3 = Object.normalize(a3, fetch: false).data["id"]
|
|
||||||
|
|
||||||
a4 = insert(:note_activity)
|
|
||||||
o4 = Object.normalize(a4, fetch: false).data["id"]
|
|
||||||
|
|
||||||
found_activities =
|
|
||||||
Activity.get_presorted_create_by_object_ap_id([o3, o2, o4, o1])
|
|
||||||
|> Repo.all()
|
|
||||||
|
|
||||||
assert found_activities == [a3, a2, a4, a1]
|
|
||||||
end
|
|
||||||
|
|
||||||
test "preloading a bookmark" do
|
test "preloading a bookmark" do
|
||||||
user = insert(:user)
|
user = insert(:user)
|
||||||
user2 = insert(:user)
|
user2 = insert(:user)
|
||||||
|
|
|
@ -4,7 +4,7 @@
|
||||||
|
|
||||||
defmodule Pleroma.Emoji.FormatterTest do
|
defmodule Pleroma.Emoji.FormatterTest do
|
||||||
alias Pleroma.Emoji.Formatter
|
alias Pleroma.Emoji.Formatter
|
||||||
use Pleroma.DataCase, async: false
|
use Pleroma.DataCase, async: true
|
||||||
|
|
||||||
describe "emojify" do
|
describe "emojify" do
|
||||||
test "it adds cool emoji" do
|
test "it adds cool emoji" do
|
||||||
|
|
|
@ -1,105 +0,0 @@
|
||||||
defmodule Pleroma.HTTP.BackoffTest do
|
|
||||||
@backoff_cache :http_backoff_cache
|
|
||||||
use Pleroma.DataCase, async: false
|
|
||||||
alias Pleroma.HTTP.Backoff
|
|
||||||
|
|
||||||
defp within_tolerance?(ttl, expected) do
|
|
||||||
ttl > expected - 15 and ttl < expected + 15
|
|
||||||
end
|
|
||||||
|
|
||||||
describe "get/3" do
|
|
||||||
test "should return {:ok, env} when not rate limited" do
|
|
||||||
Tesla.Mock.mock_global(fn
|
|
||||||
%Tesla.Env{url: "https://akkoma.dev/api/v1/instance"} ->
|
|
||||||
{:ok, %Tesla.Env{status: 200, body: "ok"}}
|
|
||||||
end)
|
|
||||||
|
|
||||||
assert {:ok, env} = Backoff.get("https://akkoma.dev/api/v1/instance")
|
|
||||||
assert env.status == 200
|
|
||||||
end
|
|
||||||
|
|
||||||
test "should return {:error, env} when rate limited" do
|
|
||||||
# Shove a value into the cache to simulate a rate limit
|
|
||||||
Cachex.put(@backoff_cache, "akkoma.dev", true)
|
|
||||||
assert {:error, :ratelimit} = Backoff.get("https://akkoma.dev/api/v1/instance")
|
|
||||||
end
|
|
||||||
|
|
||||||
test "should insert a value into the cache when rate limited" do
|
|
||||||
Tesla.Mock.mock_global(fn
|
|
||||||
%Tesla.Env{url: "https://ratelimited.dev/api/v1/instance"} ->
|
|
||||||
{:ok, %Tesla.Env{status: 429, body: "Rate limited"}}
|
|
||||||
end)
|
|
||||||
|
|
||||||
assert {:error, :ratelimit} = Backoff.get("https://ratelimited.dev/api/v1/instance")
|
|
||||||
assert {:ok, true} = Cachex.get(@backoff_cache, "ratelimited.dev")
|
|
||||||
end
|
|
||||||
|
|
||||||
test "should insert a value into the cache when rate limited with a 503 response" do
|
|
||||||
Tesla.Mock.mock_global(fn
|
|
||||||
%Tesla.Env{url: "https://ratelimited.dev/api/v1/instance"} ->
|
|
||||||
{:ok, %Tesla.Env{status: 503, body: "Rate limited"}}
|
|
||||||
end)
|
|
||||||
|
|
||||||
assert {:error, :ratelimit} = Backoff.get("https://ratelimited.dev/api/v1/instance")
|
|
||||||
assert {:ok, true} = Cachex.get(@backoff_cache, "ratelimited.dev")
|
|
||||||
end
|
|
||||||
|
|
||||||
test "should parse the value of x-ratelimit-reset, if present" do
|
|
||||||
ten_minutes_from_now =
|
|
||||||
DateTime.utc_now() |> Timex.shift(minutes: 10) |> DateTime.to_iso8601()
|
|
||||||
|
|
||||||
Tesla.Mock.mock_global(fn
|
|
||||||
%Tesla.Env{url: "https://ratelimited.dev/api/v1/instance"} ->
|
|
||||||
{:ok,
|
|
||||||
%Tesla.Env{
|
|
||||||
status: 429,
|
|
||||||
body: "Rate limited",
|
|
||||||
headers: [{"x-ratelimit-reset", ten_minutes_from_now}]
|
|
||||||
}}
|
|
||||||
end)
|
|
||||||
|
|
||||||
assert {:error, :ratelimit} = Backoff.get("https://ratelimited.dev/api/v1/instance")
|
|
||||||
assert {:ok, true} = Cachex.get(@backoff_cache, "ratelimited.dev")
|
|
||||||
{:ok, ttl} = Cachex.ttl(@backoff_cache, "ratelimited.dev")
|
|
||||||
assert within_tolerance?(ttl, 600)
|
|
||||||
end
|
|
||||||
|
|
||||||
test "should parse the value of retry-after when it's a timestamp" do
|
|
||||||
ten_minutes_from_now =
|
|
||||||
DateTime.utc_now() |> Timex.shift(minutes: 10) |> DateTime.to_iso8601()
|
|
||||||
|
|
||||||
Tesla.Mock.mock_global(fn
|
|
||||||
%Tesla.Env{url: "https://ratelimited.dev/api/v1/instance"} ->
|
|
||||||
{:ok,
|
|
||||||
%Tesla.Env{
|
|
||||||
status: 429,
|
|
||||||
body: "Rate limited",
|
|
||||||
headers: [{"retry-after", ten_minutes_from_now}]
|
|
||||||
}}
|
|
||||||
end)
|
|
||||||
|
|
||||||
assert {:error, :ratelimit} = Backoff.get("https://ratelimited.dev/api/v1/instance")
|
|
||||||
assert {:ok, true} = Cachex.get(@backoff_cache, "ratelimited.dev")
|
|
||||||
{:ok, ttl} = Cachex.ttl(@backoff_cache, "ratelimited.dev")
|
|
||||||
assert within_tolerance?(ttl, 600)
|
|
||||||
end
|
|
||||||
|
|
||||||
test "should parse the value of retry-after when it's a number of seconds" do
|
|
||||||
Tesla.Mock.mock_global(fn
|
|
||||||
%Tesla.Env{url: "https://ratelimited.dev/api/v1/instance"} ->
|
|
||||||
{:ok,
|
|
||||||
%Tesla.Env{
|
|
||||||
status: 429,
|
|
||||||
body: "Rate limited",
|
|
||||||
headers: [{"retry-after", "600"}]
|
|
||||||
}}
|
|
||||||
end)
|
|
||||||
|
|
||||||
assert {:error, :ratelimit} = Backoff.get("https://ratelimited.dev/api/v1/instance")
|
|
||||||
assert {:ok, true} = Cachex.get(@backoff_cache, "ratelimited.dev")
|
|
||||||
# assert that the value is 10 minutes from now
|
|
||||||
{:ok, ttl} = Cachex.ttl(@backoff_cache, "ratelimited.dev")
|
|
||||||
assert within_tolerance?(ttl, 600)
|
|
||||||
end
|
|
||||||
end
|
|
||||||
end
|
|
|
@ -153,11 +153,6 @@ defmodule Pleroma.SignatureTest do
|
||||||
{:ok, "https://example.com/users/1234"}
|
{:ok, "https://example.com/users/1234"}
|
||||||
end
|
end
|
||||||
|
|
||||||
test "it deduces the actor ID for bridgy" do
|
|
||||||
assert Signature.key_id_to_actor_id("https://example.com/1234#key") ==
|
|
||||||
{:ok, "https://example.com/1234"}
|
|
||||||
end
|
|
||||||
|
|
||||||
test "it calls webfinger for 'acct:' accounts" do
|
test "it calls webfinger for 'acct:' accounts" do
|
||||||
with_mock(Pleroma.Web.WebFinger,
|
with_mock(Pleroma.Web.WebFinger,
|
||||||
finger: fn _ -> {:ok, %{"ap_id" => "https://gensokyo.2hu/users/raymoo"}} end
|
finger: fn _ -> {:ok, %{"ap_id" => "https://gensokyo.2hu/users/raymoo"}} end
|
||||||
|
|
|
@ -3,7 +3,7 @@
|
||||||
# SPDX-License-Identifier: AGPL-3.0-only
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
defmodule Pleroma.Upload.Filter.Exiftool.ReadDescriptionTest do
|
defmodule Pleroma.Upload.Filter.Exiftool.ReadDescriptionTest do
|
||||||
use Pleroma.DataCase, async: false
|
use Pleroma.DataCase, async: true
|
||||||
alias Pleroma.Upload.Filter
|
alias Pleroma.Upload.Filter
|
||||||
|
|
||||||
@uploads %Pleroma.Upload{
|
@uploads %Pleroma.Upload{
|
||||||
|
|
|
@ -765,19 +765,109 @@ defmodule Pleroma.UserTest do
|
||||||
setup do: clear_config([Pleroma.Web.WebFinger, :update_nickname_on_user_fetch], true)
|
setup do: clear_config([Pleroma.Web.WebFinger, :update_nickname_on_user_fetch], true)
|
||||||
|
|
||||||
test "for mastodon" do
|
test "for mastodon" do
|
||||||
ap_id = "a@mastodon.example"
|
Tesla.Mock.mock(fn
|
||||||
|
%{url: "https://example.com/.well-known/host-meta"} ->
|
||||||
|
%Tesla.Env{
|
||||||
|
status: 302,
|
||||||
|
headers: [{"location", "https://sub.example.com/.well-known/host-meta"}]
|
||||||
|
}
|
||||||
|
|
||||||
|
%{url: "https://sub.example.com/.well-known/host-meta"} ->
|
||||||
|
%Tesla.Env{
|
||||||
|
status: 200,
|
||||||
|
body:
|
||||||
|
"test/fixtures/webfinger/masto-host-meta.xml"
|
||||||
|
|> File.read!()
|
||||||
|
|> String.replace("{{domain}}", "sub.example.com")
|
||||||
|
}
|
||||||
|
|
||||||
|
%{url: "https://sub.example.com/.well-known/webfinger?resource=acct:a@example.com"} ->
|
||||||
|
%Tesla.Env{
|
||||||
|
status: 200,
|
||||||
|
body:
|
||||||
|
"test/fixtures/webfinger/masto-webfinger.json"
|
||||||
|
|> File.read!()
|
||||||
|
|> String.replace("{{nickname}}", "a")
|
||||||
|
|> String.replace("{{domain}}", "example.com")
|
||||||
|
|> String.replace("{{subdomain}}", "sub.example.com"),
|
||||||
|
headers: [{"content-type", "application/jrd+json"}]
|
||||||
|
}
|
||||||
|
|
||||||
|
%{url: "https://sub.example.com/users/a"} ->
|
||||||
|
%Tesla.Env{
|
||||||
|
status: 200,
|
||||||
|
body:
|
||||||
|
"test/fixtures/webfinger/masto-user.json"
|
||||||
|
|> File.read!()
|
||||||
|
|> String.replace("{{nickname}}", "a")
|
||||||
|
|> String.replace("{{domain}}", "sub.example.com"),
|
||||||
|
headers: [{"content-type", "application/activity+json"}]
|
||||||
|
}
|
||||||
|
|
||||||
|
%{url: "https://sub.example.com/users/a/collections/featured"} ->
|
||||||
|
%Tesla.Env{
|
||||||
|
status: 200,
|
||||||
|
body:
|
||||||
|
File.read!("test/fixtures/users_mock/masto_featured.json")
|
||||||
|
|> String.replace("{{domain}}", "sub.example.com")
|
||||||
|
|> String.replace("{{nickname}}", "a"),
|
||||||
|
headers: [{"content-type", "application/activity+json"}]
|
||||||
|
}
|
||||||
|
end)
|
||||||
|
|
||||||
|
ap_id = "a@example.com"
|
||||||
{:ok, fetched_user} = User.get_or_fetch(ap_id)
|
{:ok, fetched_user} = User.get_or_fetch(ap_id)
|
||||||
|
|
||||||
assert fetched_user.ap_id == "https://sub.mastodon.example/users/a"
|
assert fetched_user.ap_id == "https://sub.example.com/users/a"
|
||||||
assert fetched_user.nickname == "a@mastodon.example"
|
assert fetched_user.nickname == "a@example.com"
|
||||||
end
|
end
|
||||||
|
|
||||||
test "for pleroma" do
|
test "for pleroma" do
|
||||||
ap_id = "a@pleroma.example"
|
Tesla.Mock.mock(fn
|
||||||
|
%{url: "https://example.com/.well-known/host-meta"} ->
|
||||||
|
%Tesla.Env{
|
||||||
|
status: 302,
|
||||||
|
headers: [{"location", "https://sub.example.com/.well-known/host-meta"}]
|
||||||
|
}
|
||||||
|
|
||||||
|
%{url: "https://sub.example.com/.well-known/host-meta"} ->
|
||||||
|
%Tesla.Env{
|
||||||
|
status: 200,
|
||||||
|
body:
|
||||||
|
"test/fixtures/webfinger/pleroma-host-meta.xml"
|
||||||
|
|> File.read!()
|
||||||
|
|> String.replace("{{domain}}", "sub.example.com")
|
||||||
|
}
|
||||||
|
|
||||||
|
%{url: "https://sub.example.com/.well-known/webfinger?resource=acct:a@example.com"} ->
|
||||||
|
%Tesla.Env{
|
||||||
|
status: 200,
|
||||||
|
body:
|
||||||
|
"test/fixtures/webfinger/pleroma-webfinger.json"
|
||||||
|
|> File.read!()
|
||||||
|
|> String.replace("{{nickname}}", "a")
|
||||||
|
|> String.replace("{{domain}}", "example.com")
|
||||||
|
|> String.replace("{{subdomain}}", "sub.example.com"),
|
||||||
|
headers: [{"content-type", "application/jrd+json"}]
|
||||||
|
}
|
||||||
|
|
||||||
|
%{url: "https://sub.example.com/users/a"} ->
|
||||||
|
%Tesla.Env{
|
||||||
|
status: 200,
|
||||||
|
body:
|
||||||
|
"test/fixtures/webfinger/pleroma-user.json"
|
||||||
|
|> File.read!()
|
||||||
|
|> String.replace("{{nickname}}", "a")
|
||||||
|
|> String.replace("{{domain}}", "sub.example.com"),
|
||||||
|
headers: [{"content-type", "application/activity+json"}]
|
||||||
|
}
|
||||||
|
end)
|
||||||
|
|
||||||
|
ap_id = "a@example.com"
|
||||||
{:ok, fetched_user} = User.get_or_fetch(ap_id)
|
{:ok, fetched_user} = User.get_or_fetch(ap_id)
|
||||||
|
|
||||||
assert fetched_user.ap_id == "https://sub.pleroma.example/users/a"
|
assert fetched_user.ap_id == "https://sub.example.com/users/a"
|
||||||
assert fetched_user.nickname == "a@pleroma.example"
|
assert fetched_user.nickname == "a@example.com"
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
|
|
@ -233,48 +233,6 @@ defmodule Pleroma.Web.ActivityPub.ActivityPubTest do
|
||||||
}
|
}
|
||||||
end
|
end
|
||||||
|
|
||||||
test "works for takahe actors" do
|
|
||||||
user_id = "https://fedi.vision/@vote@fedi.vision/"
|
|
||||||
|
|
||||||
Tesla.Mock.mock(fn
|
|
||||||
%{method: :get, url: ^user_id} ->
|
|
||||||
%Tesla.Env{
|
|
||||||
status: 200,
|
|
||||||
body: File.read!("test/fixtures/users_mock/takahe_user.json"),
|
|
||||||
headers: [{"content-type", "application/activity+json"}]
|
|
||||||
}
|
|
||||||
end)
|
|
||||||
|
|
||||||
{:ok, user} = ActivityPub.make_user_from_ap_id(user_id)
|
|
||||||
|
|
||||||
assert user.actor_type == "Person"
|
|
||||||
|
|
||||||
assert [
|
|
||||||
%{
|
|
||||||
"name" => "More details"
|
|
||||||
}
|
|
||||||
] = user.fields
|
|
||||||
end
|
|
||||||
|
|
||||||
test "works for actors with malformed attachment fields" do
|
|
||||||
user_id = "https://fedi.vision/@vote@fedi.vision/"
|
|
||||||
|
|
||||||
Tesla.Mock.mock(fn
|
|
||||||
%{method: :get, url: ^user_id} ->
|
|
||||||
%Tesla.Env{
|
|
||||||
status: 200,
|
|
||||||
body: File.read!("test/fixtures/users_mock/nonsense_attachment_user.json"),
|
|
||||||
headers: [{"content-type", "application/activity+json"}]
|
|
||||||
}
|
|
||||||
end)
|
|
||||||
|
|
||||||
{:ok, user} = ActivityPub.make_user_from_ap_id(user_id)
|
|
||||||
|
|
||||||
assert user.actor_type == "Person"
|
|
||||||
|
|
||||||
assert [] = user.fields
|
|
||||||
end
|
|
||||||
|
|
||||||
test "fetches user featured collection" do
|
test "fetches user featured collection" do
|
||||||
ap_id = "https://example.com/users/lain"
|
ap_id = "https://example.com/users/lain"
|
||||||
|
|
||||||
|
@ -325,7 +283,9 @@ defmodule Pleroma.Web.ActivityPub.ActivityPubTest do
|
||||||
body: featured_data,
|
body: featured_data,
|
||||||
headers: [{"content-type", "application/activity+json"}]
|
headers: [{"content-type", "application/activity+json"}]
|
||||||
}
|
}
|
||||||
|
end)
|
||||||
|
|
||||||
|
Tesla.Mock.mock_global(fn
|
||||||
%{
|
%{
|
||||||
method: :get,
|
method: :get,
|
||||||
url: ^object_url
|
url: ^object_url
|
||||||
|
@ -338,8 +298,7 @@ defmodule Pleroma.Web.ActivityPub.ActivityPubTest do
|
||||||
end)
|
end)
|
||||||
|
|
||||||
{:ok, user} = ActivityPub.make_user_from_ap_id(ap_id)
|
{:ok, user} = ActivityPub.make_user_from_ap_id(ap_id)
|
||||||
# wait for oban
|
Process.sleep(50)
|
||||||
Pleroma.Tests.ObanHelpers.perform_all()
|
|
||||||
|
|
||||||
assert user.featured_address == featured_url
|
assert user.featured_address == featured_url
|
||||||
assert Map.has_key?(user.pinned_objects, object_url)
|
assert Map.has_key?(user.pinned_objects, object_url)
|
||||||
|
|
|
@ -590,22 +590,41 @@ defmodule Pleroma.Web.CommonAPI.UtilsTest do
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
describe "attachments_from_ids/1" do
|
describe "attachments_from_ids_descs/2" do
|
||||||
test "returns attachments without descs" do
|
test "returns [] when attachment ids is empty" do
|
||||||
user = insert(:user)
|
assert Utils.attachments_from_ids_descs([], "{}") == []
|
||||||
object = insert(:attachment, user: user)
|
|
||||||
assert Utils.attachments_from_ids(user, %{media_ids: ["#{object.id}"]}) == [object.data]
|
|
||||||
end
|
end
|
||||||
|
|
||||||
test "returns [] when passed non-media object ids" do
|
test "returns list attachments with desc" do
|
||||||
user = insert(:user)
|
object = insert(:note)
|
||||||
object = insert(:note, user: user)
|
desc = Jason.encode!(%{object.id => "test-desc"})
|
||||||
assert Utils.attachments_from_ids(user, %{media_ids: ["#{object.id}"]}) == []
|
|
||||||
|
assert Utils.attachments_from_ids_descs(["#{object.id}", "34"], desc) == [
|
||||||
|
Map.merge(object.data, %{"name" => "test-desc"})
|
||||||
|
]
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
describe "attachments_from_ids/1" do
|
||||||
|
test "returns attachments with descs" do
|
||||||
|
object = insert(:note)
|
||||||
|
desc = Jason.encode!(%{object.id => "test-desc"})
|
||||||
|
|
||||||
|
assert Utils.attachments_from_ids(%{
|
||||||
|
media_ids: ["#{object.id}"],
|
||||||
|
descriptions: desc
|
||||||
|
}) == [
|
||||||
|
Map.merge(object.data, %{"name" => "test-desc"})
|
||||||
|
]
|
||||||
|
end
|
||||||
|
|
||||||
|
test "returns attachments without descs" do
|
||||||
|
object = insert(:note)
|
||||||
|
assert Utils.attachments_from_ids(%{media_ids: ["#{object.id}"]}) == [object.data]
|
||||||
end
|
end
|
||||||
|
|
||||||
test "returns [] when not pass media_ids" do
|
test "returns [] when not pass media_ids" do
|
||||||
user = insert(:user)
|
assert Utils.attachments_from_ids(%{}) == []
|
||||||
assert Utils.attachments_from_ids(user, %{}) == []
|
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
|
|
@ -6,7 +6,6 @@ defmodule Pleroma.Web.MastodonAPI.MediaControllerTest do
|
||||||
use Pleroma.Web.ConnCase, async: false
|
use Pleroma.Web.ConnCase, async: false
|
||||||
|
|
||||||
import ExUnit.CaptureLog
|
import ExUnit.CaptureLog
|
||||||
import Pleroma.Factory
|
|
||||||
|
|
||||||
alias Pleroma.Object
|
alias Pleroma.Object
|
||||||
alias Pleroma.User
|
alias Pleroma.User
|
||||||
|
@ -175,18 +174,6 @@ defmodule Pleroma.Web.MastodonAPI.MediaControllerTest do
|
||||||
assert media["description"] == "test-media"
|
assert media["description"] == "test-media"
|
||||||
assert refresh_record(object).data["name"] == "test-media"
|
assert refresh_record(object).data["name"] == "test-media"
|
||||||
end
|
end
|
||||||
|
|
||||||
test "won't update non-media", %{conn: conn, user: user} do
|
|
||||||
object = insert(:note, user: user)
|
|
||||||
|
|
||||||
response =
|
|
||||||
conn
|
|
||||||
|> put_req_header("content-type", "multipart/form-data")
|
|
||||||
|> put("/api/v1/media/#{object.id}", %{"description" => "test-media"})
|
|
||||||
|> json_response(404)
|
|
||||||
|
|
||||||
assert response == %{"error" => "Record not found"}
|
|
||||||
end
|
|
||||||
end
|
end
|
||||||
|
|
||||||
describe "Get media by id (/api/v1/media/:id)" do
|
describe "Get media by id (/api/v1/media/:id)" do
|
||||||
|
@ -220,17 +207,6 @@ defmodule Pleroma.Web.MastodonAPI.MediaControllerTest do
|
||||||
assert media["id"]
|
assert media["id"]
|
||||||
end
|
end
|
||||||
|
|
||||||
test "it returns 404 when requesting non-media object", %{conn: conn, user: user} do
|
|
||||||
object = insert(:note, user: user)
|
|
||||||
|
|
||||||
response =
|
|
||||||
conn
|
|
||||||
|> get("/api/v1/media/#{object.id}")
|
|
||||||
|> json_response(404)
|
|
||||||
|
|
||||||
assert response == %{"error" => "Record not found"}
|
|
||||||
end
|
|
||||||
|
|
||||||
test "it returns 403 if media object requested by non-owner", %{object: object, user: user} do
|
test "it returns 403 if media object requested by non-owner", %{object: object, user: user} do
|
||||||
%{conn: conn, user: other_user} = oauth_access(["read:media"])
|
%{conn: conn, user: other_user} = oauth_access(["read:media"])
|
||||||
|
|
||||||
|
|
|
@ -220,28 +220,6 @@ defmodule Pleroma.Web.MastodonAPI.StatusControllerTest do
|
||||||
assert json_response_and_validate_schema(conn, 200)
|
assert json_response_and_validate_schema(conn, 200)
|
||||||
end
|
end
|
||||||
|
|
||||||
test "refuses to post non-owned media", %{conn: conn} do
|
|
||||||
other_user = insert(:user)
|
|
||||||
|
|
||||||
file = %Plug.Upload{
|
|
||||||
content_type: "image/jpeg",
|
|
||||||
path: Path.absname("test/fixtures/image.jpg"),
|
|
||||||
filename: "an_image.jpg"
|
|
||||||
}
|
|
||||||
|
|
||||||
{:ok, upload} = ActivityPub.upload(file, actor: other_user.ap_id)
|
|
||||||
|
|
||||||
conn =
|
|
||||||
conn
|
|
||||||
|> put_req_header("content-type", "application/json")
|
|
||||||
|> post("/api/v1/statuses", %{
|
|
||||||
"status" => "mew",
|
|
||||||
"media_ids" => [to_string(upload.id)]
|
|
||||||
})
|
|
||||||
|
|
||||||
assert json_response(conn, 422) == %{"error" => "forbidden"}
|
|
||||||
end
|
|
||||||
|
|
||||||
test "posting a status with an invalid language", %{conn: conn} do
|
test "posting a status with an invalid language", %{conn: conn} do
|
||||||
conn =
|
conn =
|
||||||
conn
|
conn
|
||||||
|
@ -591,29 +569,6 @@ defmodule Pleroma.Web.MastodonAPI.StatusControllerTest do
|
||||||
assert %{"type" => "image"} = media_attachment
|
assert %{"type" => "image"} = media_attachment
|
||||||
end
|
end
|
||||||
|
|
||||||
test "refuses to schedule post with non-owned media", %{conn: conn} do
|
|
||||||
other_user = insert(:user)
|
|
||||||
|
|
||||||
file = %Plug.Upload{
|
|
||||||
content_type: "image/jpeg",
|
|
||||||
path: Path.absname("test/fixtures/image.jpg"),
|
|
||||||
filename: "an_image.jpg"
|
|
||||||
}
|
|
||||||
|
|
||||||
{:ok, upload} = ActivityPub.upload(file, actor: other_user.ap_id)
|
|
||||||
|
|
||||||
conn =
|
|
||||||
conn
|
|
||||||
|> put_req_header("content-type", "application/json")
|
|
||||||
|> post("/api/v1/statuses", %{
|
|
||||||
"status" => "mew",
|
|
||||||
"scheduled_at" => DateTime.add(DateTime.utc_now(), 6, :minute),
|
|
||||||
"media_ids" => [to_string(upload.id)]
|
|
||||||
})
|
|
||||||
|
|
||||||
assert json_response(conn, 403) == %{"error" => "Access denied"}
|
|
||||||
end
|
|
||||||
|
|
||||||
test "skips the scheduling and creates the activity if scheduled_at is earlier than 5 minutes from now",
|
test "skips the scheduling and creates the activity if scheduled_at is earlier than 5 minutes from now",
|
||||||
%{conn: conn} do
|
%{conn: conn} do
|
||||||
scheduled_at =
|
scheduled_at =
|
||||||
|
@ -2451,25 +2406,6 @@ defmodule Pleroma.Web.MastodonAPI.StatusControllerTest do
|
||||||
assert [%{"id" => ^attachment_id}] = response["media_attachments"]
|
assert [%{"id" => ^attachment_id}] = response["media_attachments"]
|
||||||
end
|
end
|
||||||
|
|
||||||
test "it does not update to non-owned attachments", %{conn: conn, user: user} do
|
|
||||||
other_user = insert(:user)
|
|
||||||
attachment = insert(:attachment, user: other_user)
|
|
||||||
attachment_id = to_string(attachment.id)
|
|
||||||
|
|
||||||
{:ok, activity} = CommonAPI.post(user, %{status: "mew mew #abc", spoiler_text: "#def"})
|
|
||||||
|
|
||||||
conn =
|
|
||||||
conn
|
|
||||||
|> put_req_header("content-type", "application/json")
|
|
||||||
|> put("/api/v1/statuses/#{activity.id}", %{
|
|
||||||
"status" => "mew mew #abc",
|
|
||||||
"spoiler_text" => "#def",
|
|
||||||
"media_ids" => [attachment_id]
|
|
||||||
})
|
|
||||||
|
|
||||||
assert json_response(conn, 400) == %{"error" => "internal_server_error"}
|
|
||||||
end
|
|
||||||
|
|
||||||
test "it does not update visibility", %{conn: conn, user: user} do
|
test "it does not update visibility", %{conn: conn, user: user} do
|
||||||
{:ok, activity} =
|
{:ok, activity} =
|
||||||
CommonAPI.post(user, %{
|
CommonAPI.post(user, %{
|
||||||
|
|
|
@ -47,7 +47,8 @@ defmodule Pleroma.Web.MastodonAPI.ScheduledActivityViewTest do
|
||||||
expected = %{
|
expected = %{
|
||||||
id: to_string(scheduled_activity.id),
|
id: to_string(scheduled_activity.id),
|
||||||
media_attachments:
|
media_attachments:
|
||||||
Utils.attachments_from_ids(user, %{media_ids: [upload.id]})
|
%{media_ids: [upload.id]}
|
||||||
|
|> Utils.attachments_from_ids()
|
||||||
|> Enum.map(&StatusView.render("attachment.json", %{attachment: &1})),
|
|> Enum.map(&StatusView.render("attachment.json", %{attachment: &1})),
|
||||||
params: %{
|
params: %{
|
||||||
in_reply_to_id: to_string(activity.id),
|
in_reply_to_id: to_string(activity.id),
|
||||||
|
|
|
@ -50,7 +50,12 @@ defmodule Pleroma.Web.WebFinger.WebFingerControllerTest do
|
||||||
]
|
]
|
||||||
end
|
end
|
||||||
|
|
||||||
test "reach user on tld, while pleroma is running on subdomain" do
|
test "reach user on tld, while pleroma is runned on subdomain" do
|
||||||
|
Pleroma.Web.Endpoint.config_change(
|
||||||
|
[{Pleroma.Web.Endpoint, url: [host: "sub.example.com"]}],
|
||||||
|
[]
|
||||||
|
)
|
||||||
|
|
||||||
clear_config([Pleroma.Web.Endpoint, :url, :host], "sub.example.com")
|
clear_config([Pleroma.Web.Endpoint, :url, :host], "sub.example.com")
|
||||||
|
|
||||||
clear_config([Pleroma.Web.WebFinger, :domain], "example.com")
|
clear_config([Pleroma.Web.WebFinger, :domain], "example.com")
|
||||||
|
|
|
@ -9,7 +9,7 @@ defmodule Pleroma.Web.WebFingerTest do
|
||||||
import Tesla.Mock
|
import Tesla.Mock
|
||||||
|
|
||||||
setup do
|
setup do
|
||||||
mock_global(fn env -> apply(HttpRequestMock, :request, [env]) end)
|
mock(fn env -> apply(HttpRequestMock, :request, [env]) end)
|
||||||
:ok
|
:ok
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -76,6 +76,15 @@ defmodule Pleroma.Web.WebFingerTest do
|
||||||
{:ok, _data} = WebFinger.finger(user)
|
{:ok, _data} = WebFinger.finger(user)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
test "returns the ActivityPub actor URI and subscribe address for an ActivityPub user with the ld+json mimetype" do
|
||||||
|
user = "kaniini@gerzilla.de"
|
||||||
|
|
||||||
|
{:ok, data} = WebFinger.finger(user)
|
||||||
|
|
||||||
|
assert data["ap_id"] == "https://gerzilla.de/channel/kaniini"
|
||||||
|
assert data["subscribe_address"] == "https://gerzilla.de/follow?f=&url={uri}"
|
||||||
|
end
|
||||||
|
|
||||||
test "it work for AP-only user" do
|
test "it work for AP-only user" do
|
||||||
user = "kpherox@mstdn.jp"
|
user = "kpherox@mstdn.jp"
|
||||||
|
|
||||||
|
@ -90,6 +99,12 @@ defmodule Pleroma.Web.WebFingerTest do
|
||||||
assert data["subscribe_address"] == "https://mstdn.jp/authorize_interaction?acct={uri}"
|
assert data["subscribe_address"] == "https://mstdn.jp/authorize_interaction?acct={uri}"
|
||||||
end
|
end
|
||||||
|
|
||||||
|
test "it works for friendica" do
|
||||||
|
user = "lain@squeet.me"
|
||||||
|
|
||||||
|
{:ok, _data} = WebFinger.finger(user)
|
||||||
|
end
|
||||||
|
|
||||||
test "it gets the xrd endpoint" do
|
test "it gets the xrd endpoint" do
|
||||||
{:ok, template} = WebFinger.find_lrdd_template("social.heldscal.la")
|
{:ok, template} = WebFinger.find_lrdd_template("social.heldscal.la")
|
||||||
|
|
||||||
|
@ -165,44 +180,5 @@ defmodule Pleroma.Web.WebFingerTest do
|
||||||
|
|
||||||
{:ok, _data} = WebFinger.finger("pekorino@pawoo.net")
|
{:ok, _data} = WebFinger.finger("pekorino@pawoo.net")
|
||||||
end
|
end
|
||||||
|
|
||||||
test "prevents spoofing" do
|
|
||||||
Tesla.Mock.mock(fn
|
|
||||||
%{
|
|
||||||
url: "https://bad.com/.well-known/webfinger?resource=acct:meanie@bad.com"
|
|
||||||
} ->
|
|
||||||
{:ok,
|
|
||||||
%Tesla.Env{
|
|
||||||
status: 200,
|
|
||||||
body: File.read!("test/fixtures/tesla_mock/webfinger_spoof.json"),
|
|
||||||
headers: [{"content-type", "application/jrd+json"}]
|
|
||||||
}}
|
|
||||||
|
|
||||||
%{url: "https://bad.com/.well-known/host-meta"} ->
|
|
||||||
{:ok,
|
|
||||||
%Tesla.Env{
|
|
||||||
status: 200,
|
|
||||||
body: File.read!("test/fixtures/tesla_mock/bad.com_host_meta")
|
|
||||||
}}
|
|
||||||
end)
|
|
||||||
|
|
||||||
{:error, _data} = WebFinger.finger("meanie@bad.com")
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
@tag capture_log: true
|
|
||||||
test "prevents forgeries" do
|
|
||||||
Tesla.Mock.mock(fn
|
|
||||||
%{url: "https://bad.com/.well-known/webfinger?resource=acct:meanie@bad.com"} ->
|
|
||||||
fake_webfinger =
|
|
||||||
File.read!("test/fixtures/webfinger/imposter-webfinger.json") |> Jason.decode!()
|
|
||||||
|
|
||||||
Tesla.Mock.json(fake_webfinger)
|
|
||||||
|
|
||||||
%{url: "https://bad.com/.well-known/host-meta"} ->
|
|
||||||
{:ok, %Tesla.Env{status: 404}}
|
|
||||||
end)
|
|
||||||
|
|
||||||
assert {:error, {:webfinger_invalid, _, _}} = WebFinger.finger("meanie@bad.com")
|
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -1464,177 +1464,6 @@ defmodule HttpRequestMock do
|
||||||
}}
|
}}
|
||||||
end
|
end
|
||||||
|
|
||||||
def get("https://google.com/", _, _, _) do
|
|
||||||
{:ok, %Tesla.Env{status: 200, body: File.read!("test/fixtures/rich_media/google.html")}}
|
|
||||||
end
|
|
||||||
|
|
||||||
def get("https://yahoo.com/", _, _, _) do
|
|
||||||
{:ok, %Tesla.Env{status: 200, body: File.read!("test/fixtures/rich_media/yahoo.html")}}
|
|
||||||
end
|
|
||||||
|
|
||||||
def get("https://example.com/error", _, _, _), do: {:error, :overload}
|
|
||||||
|
|
||||||
def get("https://example.com/ogp-missing-title", _, _, _) do
|
|
||||||
{:ok,
|
|
||||||
%Tesla.Env{
|
|
||||||
status: 200,
|
|
||||||
body: File.read!("test/fixtures/rich_media/ogp-missing-title.html")
|
|
||||||
}}
|
|
||||||
end
|
|
||||||
|
|
||||||
def get("https://example.com/oembed", _, _, _) do
|
|
||||||
{:ok, %Tesla.Env{status: 200, body: File.read!("test/fixtures/rich_media/oembed.html")}}
|
|
||||||
end
|
|
||||||
|
|
||||||
def get("https://example.com/oembed.json", _, _, _) do
|
|
||||||
{:ok, %Tesla.Env{status: 200, body: File.read!("test/fixtures/rich_media/oembed.json")}}
|
|
||||||
end
|
|
||||||
|
|
||||||
def get("https://example.com/twitter-card", _, _, _) do
|
|
||||||
{:ok, %Tesla.Env{status: 200, body: File.read!("test/fixtures/rich_media/twitter_card.html")}}
|
|
||||||
end
|
|
||||||
|
|
||||||
def get("https://example.com/non-ogp", _, _, _) do
|
|
||||||
{:ok,
|
|
||||||
%Tesla.Env{status: 200, body: File.read!("test/fixtures/rich_media/non_ogp_embed.html")}}
|
|
||||||
end
|
|
||||||
|
|
||||||
def get("https://example.com/empty", _, _, _) do
|
|
||||||
{:ok, %Tesla.Env{status: 200, body: "hello"}}
|
|
||||||
end
|
|
||||||
|
|
||||||
def get("https://friends.grishka.me/posts/54642", _, _, _) do
|
|
||||||
{:ok,
|
|
||||||
%Tesla.Env{
|
|
||||||
status: 200,
|
|
||||||
body: File.read!("test/fixtures/tesla_mock/smithereen_non_anonymous_poll.json"),
|
|
||||||
headers: activitypub_object_headers()
|
|
||||||
}}
|
|
||||||
end
|
|
||||||
|
|
||||||
def get("https://friends.grishka.me/users/1", _, _, _) do
|
|
||||||
{:ok,
|
|
||||||
%Tesla.Env{
|
|
||||||
status: 200,
|
|
||||||
body: File.read!("test/fixtures/tesla_mock/smithereen_user.json"),
|
|
||||||
headers: activitypub_object_headers()
|
|
||||||
}}
|
|
||||||
end
|
|
||||||
|
|
||||||
def get("https://mastodon.example/.well-known/host-meta", _, _, _) do
|
|
||||||
{:ok,
|
|
||||||
%Tesla.Env{
|
|
||||||
status: 302,
|
|
||||||
headers: [{"location", "https://sub.mastodon.example/.well-known/host-meta"}]
|
|
||||||
}}
|
|
||||||
end
|
|
||||||
|
|
||||||
def get("https://sub.mastodon.example/.well-known/host-meta", _, _, _) do
|
|
||||||
{:ok,
|
|
||||||
%Tesla.Env{
|
|
||||||
status: 200,
|
|
||||||
body:
|
|
||||||
"test/fixtures/webfinger/masto-host-meta.xml"
|
|
||||||
|> File.read!()
|
|
||||||
|> String.replace("{{domain}}", "sub.mastodon.example")
|
|
||||||
}}
|
|
||||||
end
|
|
||||||
|
|
||||||
def get(
|
|
||||||
"https://sub.mastodon.example/.well-known/webfinger?resource=acct:a@mastodon.example",
|
|
||||||
_,
|
|
||||||
_,
|
|
||||||
_
|
|
||||||
) do
|
|
||||||
{:ok,
|
|
||||||
%Tesla.Env{
|
|
||||||
status: 200,
|
|
||||||
body:
|
|
||||||
"test/fixtures/webfinger/masto-webfinger.json"
|
|
||||||
|> File.read!()
|
|
||||||
|> String.replace("{{nickname}}", "a")
|
|
||||||
|> String.replace("{{domain}}", "mastodon.example")
|
|
||||||
|> String.replace("{{subdomain}}", "sub.mastodon.example"),
|
|
||||||
headers: [{"content-type", "application/jrd+json"}]
|
|
||||||
}}
|
|
||||||
end
|
|
||||||
|
|
||||||
def get("https://sub.mastodon.example/users/a", _, _, _) do
|
|
||||||
{:ok,
|
|
||||||
%Tesla.Env{
|
|
||||||
status: 200,
|
|
||||||
body:
|
|
||||||
"test/fixtures/webfinger/masto-user.json"
|
|
||||||
|> File.read!()
|
|
||||||
|> String.replace("{{nickname}}", "a")
|
|
||||||
|> String.replace("{{domain}}", "sub.mastodon.example"),
|
|
||||||
headers: [{"content-type", "application/activity+json"}]
|
|
||||||
}}
|
|
||||||
end
|
|
||||||
|
|
||||||
def get("https://sub.mastodon.example/users/a/collections/featured", _, _, _) do
|
|
||||||
{:ok,
|
|
||||||
%Tesla.Env{
|
|
||||||
status: 200,
|
|
||||||
body:
|
|
||||||
File.read!("test/fixtures/users_mock/masto_featured.json")
|
|
||||||
|> String.replace("{{domain}}", "sub.mastodon.example")
|
|
||||||
|> String.replace("{{nickname}}", "a"),
|
|
||||||
headers: [{"content-type", "application/activity+json"}]
|
|
||||||
}}
|
|
||||||
end
|
|
||||||
|
|
||||||
def get("https://pleroma.example/.well-known/host-meta", _, _, _) do
|
|
||||||
{:ok,
|
|
||||||
%Tesla.Env{
|
|
||||||
status: 302,
|
|
||||||
headers: [{"location", "https://sub.pleroma.example/.well-known/host-meta"}]
|
|
||||||
}}
|
|
||||||
end
|
|
||||||
|
|
||||||
def get("https://sub.pleroma.example/.well-known/host-meta", _, _, _) do
|
|
||||||
{:ok,
|
|
||||||
%Tesla.Env{
|
|
||||||
status: 200,
|
|
||||||
body:
|
|
||||||
"test/fixtures/webfinger/pleroma-host-meta.xml"
|
|
||||||
|> File.read!()
|
|
||||||
|> String.replace("{{domain}}", "sub.pleroma.example")
|
|
||||||
}}
|
|
||||||
end
|
|
||||||
|
|
||||||
def get(
|
|
||||||
"https://sub.pleroma.example/.well-known/webfinger?resource=acct:a@pleroma.example",
|
|
||||||
_,
|
|
||||||
_,
|
|
||||||
_
|
|
||||||
) do
|
|
||||||
{:ok,
|
|
||||||
%Tesla.Env{
|
|
||||||
status: 200,
|
|
||||||
body:
|
|
||||||
"test/fixtures/webfinger/pleroma-webfinger.json"
|
|
||||||
|> File.read!()
|
|
||||||
|> String.replace("{{nickname}}", "a")
|
|
||||||
|> String.replace("{{domain}}", "pleroma.example")
|
|
||||||
|> String.replace("{{subdomain}}", "sub.pleroma.example"),
|
|
||||||
headers: [{"content-type", "application/jrd+json"}]
|
|
||||||
}}
|
|
||||||
end
|
|
||||||
|
|
||||||
def get("https://sub.pleroma.example/users/a", _, _, _) do
|
|
||||||
{:ok,
|
|
||||||
%Tesla.Env{
|
|
||||||
status: 200,
|
|
||||||
body:
|
|
||||||
"test/fixtures/webfinger/pleroma-user.json"
|
|
||||||
|> File.read!()
|
|
||||||
|> String.replace("{{nickname}}", "a")
|
|
||||||
|> String.replace("{{domain}}", "sub.pleroma.example"),
|
|
||||||
headers: [{"content-type", "application/activity+json"}]
|
|
||||||
}}
|
|
||||||
end
|
|
||||||
|
|
||||||
def get(url, query, body, headers) do
|
def get(url, query, body, headers) do
|
||||||
{:error,
|
{:error,
|
||||||
"Mock response not implemented for GET #{inspect(url)}, #{query}, #{inspect(body)}, #{inspect(headers)}"}
|
"Mock response not implemented for GET #{inspect(url)}, #{query}, #{inspect(body)}, #{inspect(headers)}"}
|
||||||
|
|
Loading…
Reference in a new issue