Merge branch 'develop' into stable

This commit is contained in:
FloatingGhost 2023-02-11 10:43:22 +00:00
commit 551f92dd50
220 changed files with 3902 additions and 2034 deletions

9
.gitattributes vendored
View file

@ -1,11 +1,4 @@
*.ex diff=elixir
*.exs diff=elixir
# Most of js/css files included in the repo are minified bundles,
# and we don't want to search/diff those as text files.
*.js binary
*.js.map binary
*.css binary
priv/static/instance/static.css diff=css
priv/static/static-fe/static-fe.css diff=css
*.css diff=css

View file

@ -1,6 +1,8 @@
name: "Bug report"
about: "Something isn't working as expected"
title: "[bug] "
labels:
- bug
body:
- type: markdown
attributes:

View file

@ -1,6 +1,9 @@
name: "Feature request"
about: "I'd like something to be added to Akkoma"
title: "[feat] "
labels:
- "feature request"
body:
- type: markdown
attributes:

1
.gitignore vendored
View file

@ -76,3 +76,4 @@ docs/site
# docker stuff
docker-db
*.iml

View file

@ -41,7 +41,7 @@ variables:
services:
postgres:
image: postgres:13
image: postgres:15
when:
event:
- pull_request

View file

@ -4,6 +4,50 @@ All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
## Unreleased
### Added
- Prometheus metrics exporting from `/api/v1/akkoma/metrics`
- Ability to alter http pool size
- Translation of statuses via ArgosTranslate
- Argon2 password hashing
- Ability to "verify" links in profile fields via rel=me
- Mix tasks to dump/load config to/from json for bulk editing
- Followed hashtag list at /api/v1/followed\_tags, API parity with mastodon
- Ability to set posting language in the post form, API parity with mastodon
- Ability to match domains in MRF by a trailing wildcard
- Currently supported formats:
- `example.com` (implicitly matches `*.example.com`)
- `*.example.com`
- `example.*` (implicitly matches `*.example.*`)
### Removed
- Non-finch HTTP adapters
- Legacy redirect from /api/pleroma/admin to /api/v1/pleroma/admin
- Legacy redirects from /api/pleroma to /api/v1/pleroma
- :crypt dependency
### Changed
- Return HTTP error 413 when uploading an avatar or banner that's above the configured upload limit instead of a 500.
- Non-admin users now cannot register `admin` scope tokens (not security-critical, they didn't work before, but you _could_ create them)
- Admin scopes will be dropped on create
- Rich media will now backoff for 20 minutes after a failure
- Quote posts are now considered as part of the same thread as the post they are quoting
- Extend the mix task `prune_objects` with options to keep more relevant posts
- Simplified HTTP signature processing
- Rich media will now hard-exit after 5 seconds, to prevent timeline hangs
- HTTP Content Security Policy is now far more strict to prevent any potential XSS/CSS leakages
- Follow requests are now paginated, matches mastodon API spec, so use the Link header to paginate.
- `internal.fetch` and `relay` actors are now represented with the actor type `Application`
### Fixed
- /api/v1/accounts/lookup will now respect restrict\_unauthenticated
- Unknown atoms in the config DB will no longer crash akkoma on boot
### Upgrade notes
- Ensure `config :tesla, :adapter` is either unset, or set to `{Tesla.Adapter.Finch, name: MyFinch}` in your .exs config
- Pleroma-FE will need to be updated to handle the new /api/v1/pleroma endpoints for custom emoji
## 2022.12
## Added

View file

@ -163,11 +163,6 @@
format: "$metadata[$level] $message",
metadata: [:request_id]
config :quack,
level: :warn,
meta: [:all],
webhook_url: "https://hooks.slack.com/services/YOUR-KEY-HERE"
config :mime, :types, %{
"application/xml" => ["xml"],
"application/xrd+xml" => ["xrd+xml"],
@ -184,6 +179,7 @@
receive_timeout: :timer.seconds(15),
proxy_url: nil,
user_agent: :default,
pool_size: 50,
adapter: []
config :pleroma, :instance,
@ -264,7 +260,8 @@
profile_directory: true,
privileged_staff: false,
local_bubble: [],
max_frontend_settings_json_chars: 100_000
max_frontend_settings_json_chars: 100_000,
export_prometheus_metrics: true
config :pleroma, :welcome,
direct_message: [
@ -357,7 +354,7 @@
config :pleroma, :activitypub,
unfollow_blocked: true,
outgoing_blocks: true,
outgoing_blocks: false,
blockers_visible: true,
follow_handshake_timeout: 500,
note_replies_output_limit: 5,
@ -429,7 +426,7 @@
Pleroma.Web.RichMedia.Parsers.TwitterCard,
Pleroma.Web.RichMedia.Parsers.OEmbed
],
failure_backoff: 60_000,
failure_backoff: :timer.minutes(20),
ttl_setters: [Pleroma.Web.RichMedia.Parser.TTL.AwsSignedUrl]
config :pleroma, :media_proxy,
@ -655,6 +652,10 @@
config :pleroma, Pleroma.Emails.Mailer, adapter: Swoosh.Adapters.Sendmail, enabled: false
config :swoosh,
api_client: Swoosh.ApiClient.Finch,
finch_name: MyFinch
config :pleroma, Pleroma.Emails.UserEmail,
logo: nil,
styling: %{
@ -782,14 +783,6 @@
"https://akkoma-updates.s3-website.fr-par.scw.cloud/frontend/${ref}/admin-fe.zip",
"ref" => "stable"
},
"soapbox-fe" => %{
"name" => "soapbox-fe",
"git" => "https://gitlab.com/soapbox-pub/soapbox",
"build_url" =>
"https://gitlab.com/soapbox-pub/soapbox/-/jobs/artifacts/${ref}/download?job=build-production",
"ref" => "v2.0.0",
"build_dir" => "static"
},
# For developers - enables a swagger frontend to view the openapi spec
"swagger-ui" => %{
"name" => "swagger-ui",
@ -889,6 +882,11 @@
url: "http://127.0.0.1:5000",
api_key: nil
config :pleroma, :argos_translate,
command_argos_translate: "argos-translate",
command_argospm: "argospm",
strip_html: true
# Import environment specific config. This must remain at the bottom
# of this file so it overrides the configuration defined above.
import_config "#{Mix.env()}.exs"

View file

@ -964,6 +964,11 @@
type: {:list, :string},
description:
"List of instances that make up your local bubble (closely-related instances). Used to populate the 'bubble' timeline (domain only)."
},
%{
key: :export_prometheus_metrics,
type: :boolean,
description: "Enable prometheus metrics (at /api/v1/akkoma/metrics)"
}
]
},
@ -1118,45 +1123,6 @@
}
]
},
%{
group: :quack,
type: :group,
label: "Quack Logger",
description: "Quack-related settings",
children: [
%{
key: :level,
type: {:dropdown, :atom},
description: "Log level",
suggestions: [:debug, :info, :warn, :error]
},
%{
key: :meta,
type: {:list, :atom},
description: "Configure which metadata you want to report on",
suggestions: [
:application,
:module,
:file,
:function,
:line,
:pid,
:crash_reason,
:initial_call,
:registered_name,
:all,
:none
]
},
%{
key: :webhook_url,
label: "Webhook URL",
type: :string,
description: "Configure the Slack incoming webhook",
suggestions: ["https://hooks.slack.com/services/YOUR-KEY-HERE"]
}
]
},
%{
group: :pleroma,
key: :frontend_configurations,
@ -2695,6 +2661,12 @@
"What user agent to use. Must be a string or an atom `:default`. Default value is `:default`.",
suggestions: ["Pleroma", :default]
},
%{
key: :pool_size,
type: :integer,
description: "Number of concurrent outbound HTTP requests to allow. Default 50.",
suggestions: [50]
},
%{
key: :adapter,
type: :keyword,
@ -3470,5 +3442,32 @@
suggestion: [nil]
}
]
},
%{
group: :pleroma,
key: :argos_translate,
type: :group,
description: "ArgosTranslate Settings.",
children: [
%{
key: :command_argos_translate,
type: :string,
description:
"command for `argos-translate`. Can be the command if it's in your PATH, or the full path to the file.",
suggestion: ["argos-translate"]
},
%{
key: :command_argospm,
type: :string,
description:
"command for `argospm`. Can be the command if it's in your PATH, or the full path to the file.",
suggestion: ["argospm"]
},
%{
key: :strip_html,
type: :boolean,
description: "Strip html from the post before translating it."
}
]
}
]

View file

@ -155,3 +155,51 @@ This forcibly removes all saved values in the database.
```sh
mix pleroma.config [--force] reset
```
## Dumping specific configuration values to JSON
If you want to bulk-modify configuration values (for example, for MRF modifications),
it may be easier to dump the values to JSON and then modify them in a text editor.
=== "OTP"
```sh
./bin/pleroma_ctl config dump_to_file group key path
# For example, to dump the MRF simple configuration:
./bin/pleroma_ctl config dump_to_file pleroma mrf_simple /tmp/mrf_simple.json
```
=== "From Source"
```sh
mix pleroma.config dump_to_file group key path
# For example, to dump the MRF simple configuration:
mix pleroma.config dump_to_file pleroma mrf_simple /tmp/mrf_simple.json
```
## Loading specific configuration values from JSON
**Note:** This will overwrite any existing value in the database, and can
cause crashes if you do not have exactly the correct formatting.
Once you have modified the JSON file, you can load it back into the database.
=== "OTP"
```sh
./bin/pleroma_ctl config load_from_file path
# For example, to load the MRF simple configuration:
./bin/pleroma_ctl config load_from_file /tmp/mrf_simple.json
```
=== "From Source"
```sh
mix pleroma.config load_from_file path
# For example, to load the MRF simple configuration:
mix pleroma.config load_from_file /tmp/mrf_simple.json
```
**NOTE** an instance reboot is needed for many changes to take effect,
you may want to visit `/api/v1/pleroma/admin/restart` on your instance
to soft-restart the instance.

View file

@ -27,7 +27,7 @@ Replaces embedded objects with references to them in the `objects` table. Only n
## Prune old remote posts from the database
This will prune remote posts older than 90 days (configurable with [`config :pleroma, :instance, remote_post_retention_days`](../../configuration/cheatsheet.md#instance)) from the database, they will be refetched from source when accessed.
This will prune remote posts older than 90 days (configurable with [`config :pleroma, :instance, remote_post_retention_days`](../../configuration/cheatsheet.md#instance)) from the database. Pruned posts may be refetched in some cases.
!!! danger
The disk space will only be reclaimed after `VACUUM FULL`. You may run out of disk space during the execution of the task or vacuuming if you don't have about 1/3rds of the database size free.
@ -45,6 +45,9 @@ This will prune remote posts older than 90 days (configurable with [`config :ple
```
### Options
- `--keep-threads` - don't prune posts when they are part of a thread where at least one post has seen local interaction (e.g. one of the posts is a local post, or is favourited by a local user, or has been repeated by a local user...)
- `--keep-non-public` - keep non-public posts like DM's and followers-only, even if they are remote
- `--vacuum` - run `VACUUM FULL` after the objects are pruned
## Create a conversation for all existing DMs
@ -178,4 +181,4 @@ to the current day.
```sh
mix pleroma.database prune_task
```
```

View file

@ -11,11 +11,11 @@ If you want to generate a restrictive `robots.txt`, you can run the following mi
=== "OTP"
```sh
./bin/pleroma_ctl robots_txt disallow_all
./bin/pleroma_ctl robotstxt disallow_all
```
=== "From Source"
```sh
mix pleroma.robots_txt disallow_all
mix pleroma.robotstxt disallow_all
```

View file

@ -0,0 +1,33 @@
# Monitoring Akkoma
If you run akkoma, you may be inclined to collect metrics to ensure your instance is running smoothly,
and that there's nothing quietly failing in the background.
To facilitate this, akkoma exposes prometheus metrics to be scraped.
## Prometheus
See: [export\_prometheus\_metrics](../../configuration/cheatsheet#instance)
To scrape prometheus metrics, we need an oauth2 token with the `admin:metrics` scope.
consider using [constanze](https://akkoma.dev/AkkomaGang/constanze) to make this easier -
```bash
constanze token --client-app --scopes "admin:metrics" --client-name "Prometheus"
```
or see `scripts/create_metrics_app.sh` in the source tree for the process to get this token.
Once you have your token of the form `Bearer $ACCESS_TOKEN`, you can use that in your prometheus config:
```yaml
- job_name: akkoma
scheme: https
authorization:
credentials: $ACCESS_TOKEN # this should have the bearer prefix removed
metrics_path: /api/v1/akkoma/metrics
static_configs:
- targets:
- example.com
```

View file

@ -41,8 +41,10 @@ you _may_ need to specify `--flavour`, in the same way as
Run as the `akkoma` user:
```sh
# Pull in new changes
git pull
# fetch changes
git fetch
# check out the latest tag
git checkout $(git tag -l | grep -v 'rc[0-9]*$' | sort -V | tail -n 1)
# Run with production configuration
export MIX_ENV=prod

View file

@ -62,6 +62,7 @@ To add configuration to your config file, you can copy it from the base config.
* `password_reset_token_validity`: The time after which reset tokens aren't accepted anymore, in seconds (default: one day).
* `local_bubble`: Array of domains representing instances closely related to yours. Used to populate the `bubble` timeline. e.g `["example.com"]`, (default: `[]`)
* `languages`: List of Language Codes used by the instance. This is used to try and set a default language from the frontend. It will try and find the first match between the languages set here and the user's browser languages. It will default to the first language in this setting if there is no match.. (default `["en"]`)
* `export_prometheus_metrics`: Enable prometheus metrics, served at `/api/v1/akkoma/metrics`, requiring the `admin:metrics` oauth scope.
## :database
* `improved_hashtag_timeline`: Setting to force toggle / force disable improved hashtags timeline. `:enabled` forces hashtags to be fetched from `hashtags` table for hashtags timeline. `:disabled` forces object-embedded hashtags to be used (slower). Keep it `:auto` for automatic behaviour (it is auto-set to `:enabled` [unless overridden] when HashtagsTableMigrator completes).
@ -528,54 +529,6 @@ Available caches:
* `user_agent`: what user agent should we use? (default: `:default`), must be string or `:default`
* `adapter`: array of adapter options
### :hackney_pools
Advanced. Tweaks Hackney (http client) connections pools.
There's three pools used:
* `:federation` for the federation jobs.
You may want this pool max_connections to be at least equal to the number of federator jobs + retry queue jobs.
* `:media` for rich media, media proxy
* `:upload` for uploaded media (if using a remote uploader and `proxy_remote: true`)
For each pool, the options are:
* `max_connections` - how much connections a pool can hold
* `timeout` - retention duration for connections
### :connections_pool
*For `gun` adapter*
Settings for HTTP connection pool.
* `:connection_acquisition_wait` - Timeout to acquire a connection from pool.The total max time is this value multiplied by the number of retries.
* `connection_acquisition_retries` - Number of attempts to acquire the connection from the pool if it is overloaded. Each attempt is timed `:connection_acquisition_wait` apart.
* `:max_connections` - Maximum number of connections in the pool.
* `:connect_timeout` - Timeout to connect to the host.
* `:reclaim_multiplier` - Multiplied by `:max_connections` this will be the maximum number of idle connections that will be reclaimed in case the pool is overloaded.
### :pools
*For `gun` adapter*
Settings for request pools. These pools are limited on top of `:connections_pool`.
There are four pools used:
* `:federation` for the federation jobs. You may want this pool's max_connections to be at least equal to the number of federator jobs + retry queue jobs.
* `:media` - for rich media, media proxy.
* `:upload` - for proxying media when a remote uploader is used and `proxy_remote: true`.
* `:default` - for other requests.
For each pool, the options are:
* `:size` - limit to how much requests can be concurrently executed.
* `:recv_timeout` - timeout while `gun` will wait for response
* `:max_waiting` - limit to how much requests can be waiting for others to finish, after this is reached, subsequent requests will be dropped.
## Captcha
### Pleroma.Captcha
@ -833,17 +786,8 @@ config :logger, :ex_syslogger,
level: :info,
ident: "pleroma",
format: "$metadata[$level] $message"
config :quack,
level: :warn,
meta: [:all],
webhook_url: "https://hooks.slack.com/services/YOUR-API-KEY-HERE"
```
See the [Quack Github](https://github.com/azohra/quack) for more details
## Database options
### RUM indexing for full text search
@ -1175,7 +1119,7 @@ Each job has these settings:
### Translation Settings
Settings to automatically translate statuses for end users. Currently supported
translation services are DeepL and LibreTranslate.
translation services are DeepL and LibreTranslate. The supported command line tool is [Argos Translate](https://github.com/argosopentech/argos-translate).
Translations are available at `/api/v1/statuses/:id/translations/:language`, where
`language` is the target language code (e.g `en`)
@ -1184,7 +1128,7 @@ Translations are available at `/api/v1/statuses/:id/translations/:language`, whe
- `:enabled` - enables translation
- `:module` - Sets module to be used
- Either `Pleroma.Akkoma.Translators.DeepL` or `Pleroma.Akkoma.Translators.LibreTranslate`
- Either `Pleroma.Akkoma.Translators.DeepL`, `Pleroma.Akkoma.Translators.LibreTranslate`, or `Pleroma.Akkoma.Translators.ArgosTranslate`
### `:deepl`
@ -1196,3 +1140,9 @@ Translations are available at `/api/v1/statuses/:id/translations/:language`, whe
- `:url` - URL of LibreTranslate instance
- `:api_key` - API key for LibreTranslate
### `:argos_translate`
- `:command_argos_translate` - command for `argos-translate`. Can be the command if it's in your PATH, or the full path to the file (default: `argos-translate`).
- `:command_argospm` - command for `argospm`. Can be the command if it's in your PATH, or the full path to the file (default: `argospm`).
- `:strip_html` - Strip html from the post before translating it (default: `true`).

View file

@ -67,3 +67,29 @@ Priority of tags assigns in emoji.txt and custom.txt:
Priority for globs:
`special group setting in config.exs > default setting in config.exs`
## Stealing emoji
Managing your emoji can be hard work, and you just want to have the cool emoji your friends use? As usual, crime comes to the rescue!
You can use the `Pleroma.Web.ActivityPub.MRF.StealEmojiPolicy` [Message Rewrite Facility](../configuration/cheatsheet.md#mrf) to automatically add to your instance emoji that messages from specific servers contain. Note that this happens on message processing, so the emoji will be added only after your instance receives some interaction containing emoji _after_ configuring this.
To activate this you have to [configure](../configuration/cheatsheet.md#mrf_steal_emoji) it in your configuration file. For example if you wanted to steal any emoji that is not related to cinnamon and not larger than about 10K from `coolemoji.space` and `spiceenthusiasts.biz`, you would add the following:
```elixir
config :pleroma, :mrf,
policies: [
Pleroma.Web.ActivityPub.MRF.StealEmojiPolicy
]
config :pleroma, :mrf_steal_emoji,
hosts: [
"coolemoji.space",
"spiceenthusiasts.biz"
],
rejected_shortcodes: [
".*cinnamon.*"
],
size_limit: 10000
```
Note that this may not obey emoji licensing restrictions. It's extremely unlikely that anyone will care, but keep this in mind for when Nintendo starts their own instance.

View file

@ -6,6 +6,31 @@ Akkoma performance is largely dependent on performance of the underlying databas
[PgTune](https://pgtune.leopard.in.ua) can be used to get recommended settings. Be sure to set "Number of Connections" to 20, otherwise it might produce settings hurtful to database performance. It is also recommended to not use "Network Storage" option.
If your server runs other services, you may want to take that into account. E.g. if you have 4G ram, but 1G of it is already used for other services, it may be better to tell PGTune you only have 3G. In the end, PGTune only provides recomended settings, you can always try to finetune further.
### Example configurations
Here are some configuration suggestions for PostgreSQL 10+.
#### 1GB RAM, 1 CPU
```
shared_buffers = 256MB
effective_cache_size = 768MB
maintenance_work_mem = 64MB
work_mem = 13107kB
```
#### 2GB RAM, 2 CPU
```
shared_buffers = 512MB
effective_cache_size = 1536MB
maintenance_work_mem = 128MB
work_mem = 26214kB
max_worker_processes = 2
max_parallel_workers_per_gather = 1
max_parallel_workers = 2
```
## Disable generic query plans
When PostgreSQL receives a query, it decides on a strategy for searching the requested data, this is called a query plan. The query planner has two modes: generic and custom. Generic makes a plan for all queries of the same shape, ignoring the parameters, which is then cached and reused. Custom, on the contrary, generates a unique query plan based on query parameters.
@ -23,26 +48,3 @@ config :pleroma, Pleroma.Repo,
```
A more detailed explaination of the issue can be found at <https://blog.soykaf.com/post/postgresql-elixir-troubles/>.
## Example configurations
Here are some configuration suggestions for PostgreSQL 10+.
### 1GB RAM, 1 CPU
```
shared_buffers = 256MB
effective_cache_size = 768MB
maintenance_work_mem = 64MB
work_mem = 13107kB
```
### 2GB RAM, 2 CPU
```
shared_buffers = 512MB
effective_cache_size = 1536MB
maintenance_work_mem = 128MB
work_mem = 26214kB
max_worker_processes = 2
max_parallel_workers_per_gather = 1
max_parallel_workers = 2
```

View file

@ -1056,14 +1056,13 @@ Most of the settings will be applied in `runtime`, this means that you don't nee
Example of setting without keyword in value:
```elixir
config :tesla, :adapter, Tesla.Adapter.Hackney
config :tesla, :adapter, {Tesla.Adapter.Finch, name: MyFinch}
```
List of settings which support only full update by key:
```elixir
@full_key_update [
{:pleroma, :ecto_repos},
{:quack, :meta},
{:mime, :types},
{:cors_plug, [:max_age, :methods, :expose, :headers]},
{:auto_linker, :opts},
@ -1083,22 +1082,6 @@ List of settings which support only full update by subkey:
]
```
*Settings without explicit key must be sended in separate config object params.*
```elixir
config :quack,
level: :debug,
meta: [:all],
...
```
```json
{
"configs": [
{"group": ":quack", "key": ":level", "value": ":debug"},
{"group": ":quack", "key": ":meta", "value": [":all"]},
...
]
}
```
- Request:
```json

View file

@ -1 +1,48 @@
This section contains notes and guidelines for developers.
# Contributing to Akkoma
You wish to add a new feature in Akkoma, but don't know how to proceed? This guide takes you through the various steps of the development and contribution process.
If you're looking for stuff to implement or fix, check the [bug-tracker](https://akkoma.dev/AkkomaGang/akkoma/issues) or [forum](https://meta.akkoma.dev/c/requests/5).
Come say hi to us in the [#akkoma-dev chat room](./../#irc)!
## Akkoma Clients
Akkoma is the back-end. Clients have their own repositories and often separate projects. You can check what clients work with Akkoma [on the clients page](../clients/). If you maintain a working client not listed yet, feel free to make a PR [to these docs](./#docs)!
For resources on APIs and such, check the sidebar of this page.
## Docs
The docs are written in Markdown, including certain extensions, and can be found [in the docs folder of the Akkoma repo](https://akkoma.dev/AkkomaGang/akkoma/src/branch/develop/docs/). The content itself is stored in the `docs` subdirectory.
## Technology
Akkoma is written in [Elixir](https://elixir-lang.org/) and uses [Postgresql](https://www.postgresql.org/) for database. We use [Git](https://git-scm.com/) for collaboration and tracking code changes. Furthermore it can typically run on [Unix and Unix-like OS'es](https://en.wikipedia.org/wiki/Unix-like). For development, you should use an OS which [can run Akkoma](../installation/debian_based_en/).
It's good to have at least some basic understanding of at least Git and Elixir. If this is completely new for you, there's some [videos explaining Git](https://git-scm.com/doc) and Codeberg has a nice article explaining the typical [pull requests Git flow](https://docs.codeberg.org/collaborating/pull-requests-and-git-flow/). For Elixir, you can follow Elixir's own [Getting Started guide](https://elixir-lang.org/getting-started/introduction.html).
## Setting up a development environment
The best way to start is getting the software to run from source so you can start poking on it. Check out the [guides for setting up an Akkoma instance for development](setting_up_akkoma_dev/#setting-up-a-akkoma-development-environment).
## General overview
### Modules
Akkoma has several modules. There are modules for [uploading](https://akkoma.dev/AkkomaGang/akkoma/src/branch/develop/lib/pleroma/uploaders), [upload filters](https://akkoma.dev/AkkomaGang/akkoma/src/branch/develop/lib/pleroma/upload/filter), [translators](https://akkoma.dev/AkkomaGang/akkoma/src/branch/develop/lib/pleroma/akkoma/translators)... The most famous ones are without a doubt the [MRF policies](https://akkoma.dev/AkkomaGang/akkoma/src/branch/develop/lib/pleroma/web/activity_pub/mrf). Modules are often self contained and a good way to start with development because you don't have to think about much more than just the module itself. We even have an example on [writing your own MRF policy](/configuration/mrf/#writing-your-own-mrf-policy)!
Another easy entry point is the [mix tasks](https://akkoma.dev/AkkomaGang/akkoma/src/branch/develop/lib/mix/tasks/pleroma). They too are often self contained and don't need you to go through much of the code.
### Activity Streams/Activity Pub
Akkoma uses Activity Streams for both federation, as well as internal representation. It may be interesting to at least go over the specifications of [Activity Pub](https://www.w3.org/TR/activitypub/), [Activity Streams 2.0](https://www.w3.org/TR/activitystreams-core/), and [Activity Streams Vocabulary](https://www.w3.org/TR/activitystreams-vocabulary/). Note that these are not enough to have a full grasp of how everything works, but should at least give you the basics to understand how messages are passed between and inside Akkoma instances.
## Don't forget
When you make changes, you're expected to create [a Pull Request](https://akkoma.dev/AkkomaGang/akkoma/pulls). You don't have to wait until you finish to create the PR, but please do prefix the title of the PR with "WIP: " for as long as you're still working on it. The sooner you create your PR, the sooner people know what you are working on and the sooner you can get feedback and, if needed, help. You can then simply keep working on it until you are finished.
When doing changes, don't forget to add it to the relevant parts of the [CHANGELOG.md](https://akkoma.dev/AkkomaGang/akkoma/src/branch/develop/CHANGELOG.md).
You're expected to write [tests](https://elixirschool.com/en/lessons/testing/basics). While code is generally stored in the `lib` directory, tests are stored in the `test` directory using a similar folder structure. Feel free to peak at other tests to see how they are done. Obviously tests are expected to pass and properly test the functionality you added. If you feel really confident, you could even try to [write a test first and then write the code needed to make it pass](https://en.wikipedia.org/wiki/Test-driven_development)!
Code is formatted using the default formatter that comes with Elixir. You can format a file with e.g. `mix format /path/to/file.ex`. To check if everything is properly formatted, you can run `mix format --check-formatted`.

View file

@ -5,22 +5,37 @@ Akkoma requires some adjustments from the defaults for running the instance loca
## Installing
1. Install Akkoma as explained in [the docs](../installation/debian_based_en.md), with some exceptions:
* You can use your own fork of the repository and add akkoma as a remote `git remote add akkoma 'https://akkoma.dev/AkkomaGang/akkoma.git'`
* You can skip systemd and nginx and all that stuff
* No need to create a dedicated akkoma user, it's easier to just use your own user
* For the DB you can still choose a dedicated user, the mix tasks set it up for you so it's no extra work for you
* You can use your own fork of the repository and add akkoma as a remote `git remote add akkoma 'https://akkoma.dev/AkkomaGang/akkoma.git'`
* For domain you can use `localhost`
* For the DB you can still choose a dedicated user. The mix tasks sets it up, so it's no extra work for you
* instead of creating a `prod.secret.exs`, create `dev.secret.exs`
* No need to prefix with `MIX_ENV=prod`. We're using dev and that's the default MIX_ENV
* You can skip nginx and systemd
* For front-end, you'll probably want to install and use the develop branch instead of the stable branch. There's no guarantee that the stable branch of the FE will always work on the develop branch of the BE.
2. Change the dev.secret.exs
* Change the FE settings to use the installed branch (see also [Frontend Management](/configuration/frontend_management/))
* Change the scheme in `config :pleroma, Pleroma.Web.Endpoint` to http (see examples below)
* If you want to change other settings, you can do that too
3. You can now start the server `mix phx.server`. Once it's build and started, you can access the instance on `http://<host>:<port>` (e.g.http://localhost:4000 ) and should be able to do everything locally you normaly can.
3. You can now start the server with `mix phx.server`. Once it's build and started, you can access the instance on `http://<host>:<port>` (e.g.http://localhost:4000 ) and should be able to do everything locally you normally can.
Example on how to install pleroma-fe and admin-fe using it's develop branch
```sh
mix pleroma.frontend install pleroma-fe --ref develop
mix pleroma.frontend install admin-fe --ref develop
```
Example config to use the pleroma-fe and admin-fe installed from the develop branch
```elixir
config :pleroma, :frontends,
primary: %{"name" => "pleroma-fe", "ref" => "develop"},
admin: %{"name" => "admin-fe", "ref" => "develop"}
```
Example config to change the scheme to http. Change the port if you want to run on another port.
```elixir
config :pleroma, Pleroma.Web.Endpoint,
url: [host: "localhost", scheme: "http", port: 4000],
config :pleroma, Pleroma.Web.Endpoint,
url: [host: "localhost", scheme: "http", port: 4000],
```
Example config to disable captcha. This makes it a bit easier to create test-users.
@ -94,4 +109,4 @@ Update Akkoma as explained in [the docs](../administration/updating.md). Just ma
## Working on multiple branches
If you develop on a separate branch, it's possible you did migrations that aren't merged into another branch you're working on. If you have multiple things you're working on, it's probably best to set up multiple Akkoma instances each with their own database. If you finished with a branch and want to switch back to develop to start a new branch from there, you can drop the database and recreate the database (e.g. by using `config/setup_db.psql`). The commands to drop and recreate the database can be found in [the docs](../administration/backup.md).
If you develop on a separate branch, it's possible you did migrations that aren't merged into another branch you're working on. In that case, it's probably best to set up multiple Akkoma instances each with their own database. If you finished with a branch and want to switch back to develop to start a new branch from there, you can drop the database and recreate the database (e.g. by using `config/setup_db.psql`). The commands to drop and recreate the database can be found in [the docs](../administration/backup.md).

View file

@ -30,11 +30,10 @@ sudo dnf install git gcc g++ make cmake file-devel postgresql-server postgresql-
* Enable and initialize Postgres:
```shell
sudo systemctl enable postgresql.service
sudo postgresql-setup --initdb --unit postgresql
# Allow password auth for postgres
sudo sed -E -i 's|(host +all +all +127.0.0.1/32 +)ident|\1md5|' /var/lib/pgsql/data/pg_hba.conf
sudo systemctl start postgresql.service
sudo systemctl enable --now postgresql.service
```
### Install Elixir and Erlang
@ -59,7 +58,7 @@ sudo dnf install ffmpeg
* Install ImageMagick and ExifTool for image manipulation:
```shell
sudo dnf install Imagemagick perl-Image-ExifTool
sudo dnf install ImageMagick perl-Image-ExifTool
```

View file

@ -1,6 +1,6 @@
# Installing on OpenBSD
This guide describes the installation and configuration of akkoma (and the required software to run it) on a single OpenBSD 6.6 server.
This guide describes the installation and configuration of akkoma (and the required software to run it) on a single OpenBSD 7.2 server.
For any additional information regarding commands and configuration files mentioned here, check the man pages [online](https://man.openbsd.org/) or directly on your server with the man command.
@ -12,11 +12,10 @@ For any additional information regarding commands and configuration files mentio
To install them, run the following command (with doas or as root):
```
pkg_add elixir gmake git postgresql-server postgresql-contrib cmake ffmpeg ImageMagick erlang-wx-25
pkg_add elixir gmake git postgresql-server postgresql-contrib cmake ffmpeg erlang-wx libmagic
pkg_add erlang-wx # Choose the latest version as package version when promted
```
(Note that the erlang version may change, it was 25 at the time of writing)
Akkoma requires a reverse proxy, OpenBSD has relayd in base (and is used in this guide) and packages/ports are available for nginx (www/nginx) and apache (www/apache-httpd). Independently of the reverse proxy, [acme-client(1)](https://man.openbsd.org/acme-client) can be used to get a certificate from Let's Encrypt.
#### Optional software
@ -29,32 +28,35 @@ Per [`docs/installation/optional/media_graphics_packages.md`](../installation/op
To install the above:
```
pkg_add ImageMagick ffmpeg p5-Image-ExifTool
pkg_add ffmpeg p5-Image-ExifTool
```
#### Creating the akkoma user
Akkoma will be run by a dedicated user, \_akkoma. Before creating it, insert the following lines in login.conf:
Akkoma will be run by a dedicated user, `_akkoma`. Before creating it, insert the following lines in `/etc/login.conf`:
```
akkoma:\
:datasize-max=1536M:\
:datasize-cur=1536M:\
:openfiles-max=4096
```
This creates a "akkoma" login class and sets higher values than default for datasize and openfiles (see [login.conf(5)](https://man.openbsd.org/login.conf)), this is required to avoid having akkoma crash some time after starting.
This creates a `akkoma` login class and sets higher values than default for datasize and openfiles (see [login.conf(5)](https://man.openbsd.org/login.conf)), this is required to avoid having akkoma crash some time after starting.
Create the \_akkoma user, assign it the akkoma login class and create its home directory (/home/\_akkoma/): `useradd -m -L akkoma _akkoma`
Create the `_akkoma` user, assign it the akkoma login class and create its home directory (`/home/_akkoma/`): `useradd -m -L akkoma _akkoma`
#### Clone akkoma's directory
Enter a shell as the \_akkoma user. As root, run `su _akkoma -;cd`. Then clone the repository with `git clone https://akkoma.dev/AkkomaGang/akkoma.git`. Akkoma is now installed in /home/\_akkoma/akkoma/, it will be configured and started at the end of this guide.
Enter a shell as the `_akkoma` user. As root, run `su _akkoma -;cd`. Then clone the repository with `git clone https://akkoma.dev/AkkomaGang/akkoma.git`. Akkoma is now installed in `/home/_akkoma/akkoma/`, it will be configured and started at the end of this guide.
#### PostgreSQL
Start a shell as the \_postgresql user (as root run `su _postgresql -` then run the `initdb` command to initialize postgresql:
You will need to specify pgdata directory to the default (/var/postgresql/data) with the `-D <path>` and set the user to postgres with the `-U <username>` flag. This can be done as follows:
Create `_postgresql`'s user directory (it hasn't been created yet): `mdir var/postgresql/data`. To set it as home
directory for user `_postgresql` run `usermod -d /var/postgresql/data _postgresql`.
Start a shell as the `_postgresql` user (as root run `su _postgresql -` then run the `initdb` command to initialize postgresql.
You will need to specify pgdata directory to the default (`/var/postgresql/data`) with the `-D <path>` and set the user to postgres with the `-U <username>` flag. This can be done as follows:
```
initdb -D /var/postgresql/data -U postgres
```
If you are not using the default directory, you will have to update the `datadir` variable in the /etc/rc.d/postgresql script.
If you are not using the default directory, you will have to update the `datadir` variable in the `/etc/rc.d/postgresql` script.
When this is done, enable postgresql so that it starts on boot and start it. As root, run:
```
@ -70,7 +72,7 @@ httpd will have three fuctions:
* serve a robots.txt file
* get Let's Encrypt certificates, with acme-client
Insert the following config in httpd.conf:
Insert the following config in `/etc/httpd.conf`:
```
# $OpenBSD: httpd.conf,v 1.17 2017/04/16 08:50:49 ajacoutot Exp $
@ -93,13 +95,10 @@ server "default" {
location "/robots.txt" { root "/htdocs/local/" }
location "/*" { block return 302 "https://$HTTP_HOST$REQUEST_URI" }
}
types {
}
```
Do not forget to change *<IPv4/6 address\>* to your server's address(es). If httpd should only listen on one protocol family, comment one of the two first *listen* options.
Create the /var/www/htdocs/local/ folder and write the content of your robots.txt in /var/www/htdocs/local/robots.txt.
Create the `/var/www/htdocs/local/` folder and write the content of your robots.txt in `/var/www/htdocs/local/robots.txt`.
Check the configuration with `httpd -n`, if it is OK enable and start httpd (as root):
```
rcctl enable httpd
@ -108,7 +107,7 @@ rcctl start httpd
#### acme-client
acme-client is used to get SSL/TLS certificates from Let's Encrypt.
Insert the following configuration in /etc/acme-client.conf:
Insert the following configuration in `/etc/acme-client.conf`:
```
#
# $OpenBSD: acme-client.conf,v 1.4 2017/03/22 11:14:14 benno Exp $
@ -129,7 +128,7 @@ domain <domain name> {
}
```
Replace *<domain name\>* by the domain name you'll use for your instance. As root, run `acme-client -n` to check the config, then `acme-client -ADv <domain name>` to create account and domain keys, and request a certificate for the first time.
Make acme-client run everyday by adding it in /etc/daily.local. As root, run the following command: `echo "acme-client <domain name>" >> /etc/daily.local`.
Make acme-client run everyday by adding it in `/etc/daily.local`. As root, run the following command: `echo "acme-client <domain name>" >> /etc/daily.local`.
Relayd will look for certificates and keys based on the address it listens on (see next part), the easiest way to make them available to relayd is to create a link, as root run:
```
@ -140,7 +139,7 @@ This will have to be done for each IPv4 and IPv6 address relayd listens on.
#### relayd
relayd will be used as the reverse proxy sitting in front of akkoma.
Insert the following configuration in /etc/relayd.conf:
Insert the following configuration in `/etc/relayd.conf`:
```
# $OpenBSD: relayd.conf,v 1.4 2018/03/23 09:55:06 claudio Exp $
@ -198,7 +197,7 @@ rcctl start relayd
#### pf
Enabling and configuring pf is highly recommended.
In /etc/pf.conf, insert the following configuration:
In `/etc/pf.conf`, insert the following configuration:
```
# Macros
if="<network interface>"
@ -222,31 +221,30 @@ pass in quick on $if inet6 proto icmp6 to ($if) icmp6-type { echoreq unreach par
pass in quick on $if proto tcp to ($if) port { http https } # relayd/httpd
pass in quick on $if proto tcp from $authorized_ssh_clients to ($if) port ssh
```
Replace *<network interface\>* by your server's network interface name (which you can get with ifconfig). Consider replacing the content of the authorized\_ssh\_clients macro by, for exemple, your home IP address, to avoid SSH connection attempts from bots.
Replace *<network interface\>* by your server's network interface name (which you can get with ifconfig). Consider replacing the content of the `authorized_ssh_clients` macro by, for example, your home IP address, to avoid SSH connection attempts from bots.
Check pf's configuration by running `pfctl -nf /etc/pf.conf`, load it with `pfctl -f /etc/pf.conf` and enable pf at boot with `rcctl enable pf`.
#### Configure and start akkoma
Enter a shell as \_akkoma (as root `su _akkoma -`) and enter akkoma's installation directory (`cd ~/akkoma/`).
Enter a shell as `_akkoma` (as root `su _akkoma -`) and enter akkoma's installation directory (`cd ~/akkoma/`).
Then follow the main installation guide:
* run `mix deps.get`
* run `MIX_ENV=prod mix pleroma.instance gen` and enter your instance's information when asked
* copy config/generated\_config.exs to config/prod.secret.exs. The default values should be sufficient but you should edit it and check that everything seems OK.
* copy `config/generated_config.exs` to `config/prod.secret.exs`. The default values should be sufficient but you should edit it and check that everything seems OK.
* exit your current shell back to a root one and run `psql -U postgres -f /home/_akkoma/akkoma/config/setup_db.psql` to setup the database.
* return to a \_akkoma shell into akkoma's installation directory (`su _akkoma -;cd ~/akkoma`) and run `MIX_ENV=prod mix ecto.migrate`
* return to a `_akkoma` shell into akkoma's installation directory (`su _akkoma -;cd ~/akkoma`) and run `MIX_ENV=prod mix ecto.migrate`
As \_akkoma in /home/\_akkoma/akkoma, you can now run `LC_ALL=en_US.UTF-8 MIX_ENV=prod mix phx.server` to start your instance.
As `_akkoma` in `/home/_akkoma/akkoma`, you can now run `LC_ALL=en_US.UTF-8 MIX_ENV=prod mix phx.server` to start your instance.
In another SSH session/tmux window, check that it is working properly by running `ftp -MVo - http://127.0.0.1:4000/api/v1/instance`, you should get json output. Double-check that *uri*'s value is your instance's domain name.
##### Starting akkoma at boot
An rc script to automatically start akkoma at boot hasn't been written yet, it can be run in a tmux session (tmux is in base).
#### Create administrative user
If your instance is up and running, you can create your first user with administrative rights with the following command as the \_akkoma user.
If your instance is up and running, you can create your first user with administrative rights with the following command as the `_akkoma` user.
```
LC_ALL=en_US.UTF-8 MIX_ENV=prod mix pleroma.user new <username> <your@emailaddress> --admin
```

View file

@ -5,7 +5,7 @@
This guide covers a installation using an OTP release. To install Akkoma from source, please check out the corresponding guide for your distro.
## Pre-requisites
* A machine running Linux with GNU (e.g. Debian, Ubuntu) or musl (e.g. Alpine) libc and `x86_64`, `aarch64` or `armv7l` CPU, you have root access to. If you are not sure if it's compatible see [Detecting flavour section](#detecting-flavour) below
* A machine running Linux with GNU (e.g. Debian, Ubuntu) or musl (e.g. Alpine) libc and an `x86_64` CPU you have root access to. If you are not sure if it's compatible see [Detecting flavour section](#detecting-flavour) below
* For installing OTP releases on RedHat-based distros like Fedora and Centos Stream, please follow [this guide](./otp_redhat_en.md) instead.
* A (sub)domain pointed to the machine
@ -118,8 +118,8 @@ Restart PostgreSQL to apply configuration changes:
adduser --system --shell /bin/false --home /opt/akkoma akkoma
# Set the flavour environment variable to the string you got in Detecting flavour section.
# For example if the flavour is `amd64-musl` the command will be
export FLAVOUR="amd64-musl"
# For example if the flavour is `amd64` the command will be
export FLAVOUR="amd64"
# Clone the release build into a temporary directory and unpack it
su akkoma -s $SHELL -lc "

View file

@ -54,8 +54,6 @@ server {
ssl_protocols TLSv1.2 TLSv1.3;
ssl_ciphers "ECDHE-ECDSA-AES256-GCM-SHA384:ECDHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-CHACHA20-POLY1305:ECDHE-RSA-CHACHA20-POLY1305:ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES256-SHA384:ECDHE-RSA-AES256-SHA384:!aNULL:!eNULL:!EXPORT:!DES:!MD5:!PSK:!RC4";
ssl_prefer_server_ciphers off;
# In case of an old server with an OpenSSL version of 1.0.2 or below,
# leave only prime256v1 or comment out the following line.
ssl_ecdh_curve X25519:prime256v1:secp384r1:secp521r1;
ssl_stapling on;
ssl_stapling_verify on;

View file

@ -1,3 +1,4 @@
# credo:disable-for-this-file
# Pleroma: A lightweight social networking server
# Copyright © 2017-2018 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only

View file

@ -79,6 +79,45 @@ def run(["dump", group]) do
end)
end
def run(["dump_to_file", group, key, fname]) do
check_configdb(fn ->
start_pleroma()
group = maybe_atomize(group)
key = maybe_atomize(key)
config = ConfigDB.get_by_group_and_key(group, key)
json =
%{
group: ConfigDB.to_json_types(config.group),
key: ConfigDB.to_json_types(config.key),
value: ConfigDB.to_json_types(config.value)
}
|> Jason.encode!()
|> Jason.Formatter.pretty_print()
File.write(fname, json)
shell_info("Wrote #{group}_#{key}.json")
end)
end
def run(["load_from_file", fname]) do
check_configdb(fn ->
start_pleroma()
json = File.read!(fname)
config = Jason.decode!(json)
group = ConfigDB.to_elixir_types(config["group"])
key = ConfigDB.to_elixir_types(config["key"])
value = ConfigDB.to_elixir_types(config["value"])
params = %{group: group, key: key, value: value}
ConfigDB.update_or_create(params)
shell_info("Loaded #{config["group"]}, #{config["key"]}")
end)
end
def run(["groups"]) do
check_configdb(fn ->
start_pleroma()

View file

@ -67,33 +67,92 @@ def run(["prune_objects" | args]) do
OptionParser.parse(
args,
strict: [
vacuum: :boolean
vacuum: :boolean,
keep_threads: :boolean,
keep_non_public: :boolean
]
)
start_pleroma()
deadline = Pleroma.Config.get([:instance, :remote_post_retention_days])
time_deadline = NaiveDateTime.utc_now() |> NaiveDateTime.add(-(deadline * 86_400))
Logger.info("Pruning objects older than #{deadline} days")
log_message = "Pruning objects older than #{deadline} days"
time_deadline =
NaiveDateTime.utc_now()
|> NaiveDateTime.add(-(deadline * 86_400))
log_message =
if Keyword.get(options, :keep_non_public) do
log_message <> ", keeping non public posts"
else
log_message
end
from(o in Object,
where:
fragment(
"?->'to' \\? ? OR ?->'cc' \\? ?",
o.data,
^Pleroma.Constants.as_public(),
o.data,
^Pleroma.Constants.as_public()
),
where: o.inserted_at < ^time_deadline,
where:
log_message =
if Keyword.get(options, :keep_threads) do
log_message <> ", keeping threads intact"
else
log_message
end
Logger.info(log_message)
if Keyword.get(options, :keep_threads) do
# We want to delete objects from threads where
# 1. the newest post is still old
# 2. none of the activities is local
# 3. none of the activities is bookmarked
# 4. optionally none of the posts is non-public
deletable_context =
if Keyword.get(options, :keep_non_public) do
Pleroma.Activity
|> join(:left, [a], b in Pleroma.Bookmark, on: a.id == b.activity_id)
|> group_by([a], fragment("? ->> 'context'::text", a.data))
|> having(
[a],
not fragment(
# Posts (checked on Create Activity) is non-public
"bool_or((not(?->'to' \\? ? OR ?->'cc' \\? ?)) and ? ->> 'type' = 'Create')",
a.data,
^Pleroma.Constants.as_public(),
a.data,
^Pleroma.Constants.as_public(),
a.data
)
)
else
Pleroma.Activity
|> join(:left, [a], b in Pleroma.Bookmark, on: a.id == b.activity_id)
|> group_by([a], fragment("? ->> 'context'::text", a.data))
end
|> having([a], max(a.updated_at) < ^time_deadline)
|> having([a], not fragment("bool_or(?)", a.local))
|> having([_, b], fragment("max(?::text) is null", b.id))
|> select([a], fragment("? ->> 'context'::text", a.data))
Pleroma.Object
|> where([o], fragment("? ->> 'context'::text", o.data) in subquery(deletable_context))
else
if Keyword.get(options, :keep_non_public) do
Pleroma.Object
|> where(
[o],
fragment(
"?->'to' \\? ? OR ?->'cc' \\? ?",
o.data,
^Pleroma.Constants.as_public(),
o.data,
^Pleroma.Constants.as_public()
)
)
else
Pleroma.Object
end
|> where([o], o.updated_at < ^time_deadline)
|> where(
[o],
fragment("split_part(?->>'actor', '/', 3) != ?", o.data, ^Pleroma.Web.Endpoint.host())
)
)
end
|> Repo.delete_all(timeout: :infinity)
prune_hashtags_query = """
@ -115,7 +174,6 @@ def run(["prune_task"]) do
nil
|> Pleroma.Workers.Cron.PruneDatabaseWorker.perform()
|> IO.inspect()
end
def run(["fix_likes_collections"]) do

View file

@ -1,3 +1,4 @@
# credo:disable-for-this-file
defmodule Mix.Tasks.Pleroma.Diagnostics do
alias Pleroma.Repo
alias Pleroma.User
@ -9,6 +10,13 @@ defmodule Mix.Tasks.Pleroma.Diagnostics do
import Ecto.Query
use Mix.Task
def run(["http", url]) do
start_pleroma()
Pleroma.HTTP.get(url)
|> IO.inspect()
end
def run(["home_timeline", nickname]) do
start_pleroma()
user = Repo.get_by!(User, nickname: nickname)

View file

@ -247,9 +247,13 @@ def run(["gen" | rest]) do
config_dir = Path.dirname(config_path)
psql_dir = Path.dirname(psql_path)
[config_dir, psql_dir, static_dir, uploads_dir]
|> Enum.reject(&File.exists?/1)
|> Enum.map(&File.mkdir_p!/1)
to_create =
[config_dir, psql_dir, static_dir, uploads_dir]
|> Enum.reject(&File.exists?/1)
for dir <- to_create do
File.mkdir_p!(dir)
end
shell_info("Writing config to #{config_path}.")
@ -319,6 +323,4 @@ defp upload_filters(filters) when is_map(filters) do
enabled_filters
end
defp upload_filters(_), do: []
end

View file

@ -10,14 +10,11 @@ defmodule Mix.Tasks.Pleroma.Search do
def run(["import", "activities" | _rest]) do
start_pleroma()
IO.inspect(Pleroma.Config.get([Pleroma.Search.Elasticsearch.Cluster, :indexes, :activities]))
IO.inspect(
Elasticsearch.Index.Bulk.upload(
Pleroma.Search.Elasticsearch.Cluster,
"activities",
Pleroma.Config.get([Pleroma.Search.Elasticsearch.Cluster, :indexes, :activities])
)
Elasticsearch.Index.Bulk.upload(
Pleroma.Search.Elasticsearch.Cluster,
"activities",
Pleroma.Config.get([Pleroma.Search.Elasticsearch.Cluster, :indexes, :activities])
)
end
end

View file

@ -378,9 +378,11 @@ def run(["change_email", nickname, email]) do
def run(["show", nickname]) do
start_pleroma()
nickname
|> User.get_cached_by_nickname()
|> IO.inspect()
user =
nickname
|> User.get_cached_by_nickname()
shell_info("#{inspect(user)}")
end
def run(["send_confirmation", nickname]) do
@ -389,7 +391,6 @@ def run(["send_confirmation", nickname]) do
with %User{} = user <- User.get_cached_by_nickname(nickname) do
user
|> Pleroma.Emails.UserEmail.account_confirmation_email()
|> IO.inspect()
|> Pleroma.Emails.Mailer.deliver!()
shell_info("#{nickname}'s email sent")
@ -465,7 +466,7 @@ def run(["blocking", nickname]) do
with %User{local: true} = user <- User.get_cached_by_nickname(nickname) do
blocks = User.following_ap_ids(user)
IO.inspect(blocks, limit: :infinity)
IO.puts("#{inspect(blocks)}")
end
end

View file

@ -38,7 +38,11 @@ defp add_cache_key_for(activity_id, additional_key) do
def invalidate_cache_for(activity_id) do
keys = get_cache_keys_for(activity_id)
Enum.map(keys, &@cachex.del(:scrubber_cache, &1))
for key <- keys do
@cachex.del(:scrubber_cache, key)
end
@cachex.del(:scrubber_management_cache, activity_id)
end

View file

@ -0,0 +1,109 @@
defmodule Pleroma.Akkoma.Translators.ArgosTranslate do
@behaviour Pleroma.Akkoma.Translator
alias Pleroma.Config
defp argos_translate do
Config.get([:argos_translate, :command_argos_translate])
end
defp argospm do
Config.get([:argos_translate, :command_argospm])
end
defp strip_html? do
Config.get([:argos_translate, :strip_html])
end
defp safe_languages() do
try do
System.cmd(argospm(), ["list"], stderr_to_stdout: true, parallelism: true)
rescue
_ -> {"Command #{argospm()} not found", 1}
end
end
@impl Pleroma.Akkoma.Translator
def languages do
with {response, 0} <- safe_languages() do
langs =
response
|> String.split("\n", trim: true)
|> Enum.map(fn
"translate-" <> l -> String.split(l, "_")
end)
source_langs =
langs
|> Enum.map(fn [l, _] -> %{code: l, name: l} end)
|> Enum.uniq()
dest_langs =
langs
|> Enum.map(fn [_, l] -> %{code: l, name: l} end)
|> Enum.uniq()
{:ok, source_langs, dest_langs}
else
{response, _} -> {:error, "ArgosTranslate failed to fetch languages (#{response})"}
end
end
defp safe_translate(string, from_language, to_language) do
try do
System.cmd(
argos_translate(),
["--from-lang", from_language, "--to-lang", to_language, string],
stderr_to_stdout: true,
parallelism: true
)
rescue
_ -> {"Command #{argos_translate()} not found", 1}
end
end
defp clean_string(string, true) do
string
|> String.replace("<p>", "\n")
|> String.replace("</p>", "\n")
|> String.replace("<br>", "\n")
|> String.replace("<br/>", "\n")
|> String.replace("<li>", "\n")
|> Pleroma.HTML.strip_tags()
|> HtmlEntities.decode()
end
defp clean_string(string, _), do: string
defp htmlify_response(string, true) do
string
|> HtmlEntities.encode()
|> String.replace("\n", "<br/>")
end
defp htmlify_response(string, _), do: string
@impl Pleroma.Akkoma.Translator
def translate(string, nil, to_language) do
# Akkoma's Pleroma-fe expects us to detect the source language automatically.
# Argos-translate doesn't have that option (yet?)
# see <https://github.com/argosopentech/argos-translate/issues/9>
# For now we return the text unchanged, supposedly translated from the target language.
# Afterwards people get the option to overwrite the source language from a dropdown.
{:ok, to_language, string}
end
def translate(string, from_language, to_language) do
# Argos Translate doesn't properly translate HTML (yet?)
# For now we give admins the option to strip the html before translating
# Note that we have to add some html back to the response afterwards
string = clean_string(string, strip_html?())
with {translated, 0} <-
safe_translate(string, from_language, to_language) do
{:ok, from_language, translated |> htmlify_response(strip_html?())}
else
{response, _} -> {:error, "ArgosTranslate failed to translate (#{response})"}
end
end
end

View file

@ -24,8 +24,10 @@ defmodule Pleroma.Announcement do
end
def change(struct, params \\ %{}) do
params = validate_params(struct, params)
struct
|> cast(validate_params(struct, params), [:data, :starts_at, :ends_at, :rendered])
|> cast(params, [:data, :starts_at, :ends_at, :rendered])
|> validate_required([:data])
end

View file

@ -73,7 +73,8 @@ def start(_type, _args) do
Pleroma.JobQueueMonitor,
{Majic.Pool, [name: Pleroma.MajicPool, pool_size: Config.get([:majic_pool, :size], 2)]},
{Oban, Config.get(Oban)},
Pleroma.Web.Endpoint
Pleroma.Web.Endpoint,
Pleroma.Web.Telemetry
] ++
elasticsearch_children() ++
task_children(@mix_env) ++
@ -158,7 +159,8 @@ defp cachex_children do
build_cachex("banned_urls", default_ttl: :timer.hours(24 * 30), limit: 5_000),
build_cachex("translations", default_ttl: :timer.hours(24 * 30), limit: 2500),
build_cachex("instances", default_ttl: :timer.hours(24), ttl_interval: 1000, limit: 2500),
build_cachex("request_signatures", default_ttl: :timer.hours(24 * 30), limit: 3000)
build_cachex("request_signatures", default_ttl: :timer.hours(24 * 30), limit: 3000),
build_cachex("rel_me", default_ttl: :timer.hours(24 * 30), limit: 300)
]
end
@ -198,6 +200,8 @@ defp background_migrators do
]
end
@spec task_children(atom()) :: [map()]
defp task_children(:test) do
[
%{
@ -223,6 +227,7 @@ defp task_children(_) do
]
end
@spec elasticsearch_children :: [Pleroma.Search.Elasticsearch.Cluster]
def elasticsearch_children do
config = Config.get([Pleroma.Search, :module])
@ -255,10 +260,12 @@ def limiters_setup do
defp http_children do
proxy_url = Config.get([:http, :proxy_url])
proxy = Pleroma.HTTP.AdapterHelper.format_proxy(proxy_url)
pool_size = Config.get([:http, :pool_size])
config =
[:http, :adapter]
|> Config.get([])
|> Pleroma.HTTP.AdapterHelper.add_pool_size(pool_size)
|> Pleroma.HTTP.AdapterHelper.maybe_add_proxy_pool(proxy)
|> Keyword.put(:name, MyFinch)

View file

@ -194,8 +194,6 @@ defp check_system_commands!(:ok) do
end
end
defp check_system_commands!(result), do: result
defp check_repo_pool_size!(:ok) do
if Pleroma.Config.get([Pleroma.Repo, :pool_size], 10) != 10 and
not Pleroma.Config.get([:dangerzone, :override_repo_pool_size], false) do

View file

@ -181,7 +181,8 @@ def warn do
check_uploders_s3_public_endpoint(),
check_quarantined_instances_tuples(),
check_transparency_exclusions_tuples(),
check_simple_policy_tuples()
check_simple_policy_tuples(),
check_http_adapter()
]
|> Enum.reduce(:ok, fn
:ok, :ok -> :ok
@ -210,6 +211,32 @@ def check_welcome_message_config do
end
end
def check_http_adapter do
http_adapter = Application.get_env(:tesla, :adapter)
case http_adapter do
{Tesla.Adapter.Finch, _} ->
:ok
Tesla.Mock ->
# tests do be testing
:ok
_anything_else ->
Logger.error("""
!!!CONFIG ERROR!!!
Your config is using a custom tesla adapter, this was standardised
to finch in 2022.06, and alternate adapters were removed in 2023.02.
Please ensure you either:
\n* do not have any custom value for `:tesla, :adapter`, or
\n* have `config :tesla, :adapter, {Tesla.Adapter.Finch, name: MyFinch}`
(your current value is #{inspect(http_adapter)})
""")
:error
end
end
def check_old_mrf_config do
warning_preface = """
!!!DEPRECATION WARNING!!!

View file

@ -25,7 +25,9 @@ defp reboot_time_subkeys,
do: [
{:pleroma, Pleroma.Captcha, [:seconds_valid]},
{:pleroma, Pleroma.Upload, [:proxy_remote]},
{:pleroma, :instance, [:upload_limit]}
{:pleroma, :instance, [:upload_limit]},
{:pleroma, :http, [:pool_size]},
{:pleroma, :http, [:proxy_url]}
]
def start_link(restart_pleroma? \\ true) do
@ -40,8 +42,9 @@ def load_and_update_env(deleted_settings \\ [], restart_pleroma? \\ true) do
# We need to restart applications for loaded settings take effect
{logger, other} =
(Repo.all(ConfigDB) ++ deleted_settings)
|> Enum.reject(&invalid_key_or_group/1)
|> Enum.map(&merge_with_default/1)
|> Enum.split_with(fn {group, _, _, _} -> group in [:logger, :quack] end)
|> Enum.split_with(fn {group, _, _, _} -> group == :logger end)
logger
|> Enum.sort()
@ -83,6 +86,10 @@ defp maybe_set_pleroma_last(apps) do
end
end
defp invalid_key_or_group(%ConfigDB{key: :invalid_atom}), do: true
defp invalid_key_or_group(%ConfigDB{group: :invalid_atom}), do: true
defp invalid_key_or_group(_), do: false
defp merge_with_default(%{group: group, key: key, value: value} = setting) do
default =
if group == :pleroma do
@ -101,12 +108,6 @@ defp merge_with_default(%{group: group, key: key, value: value} = setting) do
{group, key, value, merged}
end
# change logger configuration in runtime, without restart
defp configure({:quack, key, _, merged}) do
Logger.configure_backend(Quack.Logger, [{key, merged}])
:ok = update_env(:quack, key, merged)
end
defp configure({_, :backends, _, merged}) do
# removing current backends
Enum.each(Application.get_env(:logger, :backends), &Logger.remove_backend/1)

View file

@ -163,7 +163,6 @@ defp can_be_partially_updated?(%ConfigDB{} = config), do: not only_full_update?(
defp only_full_update?(%ConfigDB{group: group, key: key}) do
full_key_update = [
{:pleroma, :ecto_repos},
{:quack, :meta},
{:mime, :types},
{:cors_plug, [:max_age, :methods, :expose, :headers]},
{:swarm, :node_blacklist},
@ -343,7 +342,11 @@ def string_to_elixir_types(":" <> atom), do: String.to_atom(atom)
def string_to_elixir_types(value) do
if module_name?(value) do
String.to_existing_atom("Elixir." <> value)
try do
String.to_existing_atom("Elixir." <> value)
rescue
ArgumentError -> :invalid_atom
end
else
value
end

View file

@ -35,11 +35,6 @@ def perform(:deliver_async, email, config), do: deliver(email, config)
def deliver(email, config \\ [])
def deliver(email, config) do
# temporary hackney fix until hackney max_connections bug is fixed
# https://git.pleroma.social/pleroma/pleroma/-/issues/2101
email =
Swoosh.Email.put_private(email, :hackney_options, ssl_options: [versions: [:"tlsv1.2"]])
case enabled?() do
true -> Swoosh.Mailer.deliver(email, parse_config(config))
false -> {:error, :deliveries_disabled}

View file

@ -121,7 +121,7 @@ def user_invitation_email(
"user invitation email body",
"""
<h3>You are invited to %{instance_name}</h3>
<p>%{inviter_name} invites you to join %{instance_name}, an instance of Pleroma federated social networking platform.</p>
<p>%{inviter_name} invites you to join %{instance_name}, an instance of Akkoma federated social networking platform.</p>
<p>Click the following link to register: <a href="%{registration_url}">accept invitation</a>.</p>
""",
instance_name: instance_name(),
@ -357,7 +357,7 @@ def backup_is_ready_email(backup, admin_user_id \\ nil) do
"static_pages",
"account archive email body - self-requested",
"""
<p>You requested a full backup of your Pleroma account. It's ready for download:</p>
<p>You requested a full backup of your Akkoma account. It's ready for download:</p>
<p><a href="%{download_url}">%{download_url}</a></p>
""",
download_url: download_url
@ -369,7 +369,7 @@ def backup_is_ready_email(backup, admin_user_id \\ nil) do
"static_pages",
"account archive email body - admin requested",
"""
<p>Admin @%{admin_nickname} requested a full backup of your Pleroma account. It's ready for download:</p>
<p>Admin @%{admin_nickname} requested a full backup of your Akkoma account. It's ready for download:</p>
<p><a href="%{download_url}">%{download_url}</a></p>
""",
admin_nickname: admin.nickname,

View file

@ -209,7 +209,9 @@ def list_remote(opts) do
with :ok <- validate_shareable_packs_available(uri) do
uri
|> URI.merge("/api/pleroma/emoji/packs?page=#{opts[:page]}&page_size=#{opts[:page_size]}")
|> URI.merge(
"/api/v1/pleroma/emoji/packs?page=#{opts[:page]}&page_size=#{opts[:page_size]}"
)
|> http_get()
end
end
@ -250,7 +252,7 @@ def download(name, url, as) do
with :ok <- validate_shareable_packs_available(uri),
{:ok, remote_pack} <-
uri |> URI.merge("/api/pleroma/emoji/pack?name=#{name}") |> http_get(),
uri |> URI.merge("/api/v1/pleroma/emoji/pack?name=#{URI.encode(name)}") |> http_get(),
{:ok, %{sha: sha, url: url} = pack_info} <- fetch_pack_info(remote_pack, uri, name),
{:ok, archive} <- download_archive(url, sha),
pack <- copy_as(remote_pack, as || name),
@ -591,7 +593,9 @@ defp fetch_pack_info(remote_pack, uri, name) do
{:ok,
%{
sha: sha,
url: URI.merge(uri, "/api/pleroma/emoji/packs/archive?name=#{name}") |> to_string()
url:
URI.merge(uri, "/api/v1/pleroma/emoji/packs/archive?name=#{URI.encode(name)}")
|> to_string()
}}
%{"fallback-src" => src, "fallback-src-sha256" => sha} when is_binary(src) ->

View file

@ -14,6 +14,8 @@ defmodule Pleroma.FollowingRelationship do
alias Pleroma.Repo
alias Pleroma.User
@type follow_state :: :follow_pending | :follow_accept | :follow_reject | :unfollow
schema "following_relationships" do
field(:state, State, default: :follow_pending)
@ -72,6 +74,7 @@ def update(%User{} = follower, %User{} = following, state) do
end
end
@spec follow(User.t(), User.t()) :: {:ok, User.t(), User.t()} | {:error, any}
def follow(%User{} = follower, %User{} = following, state \\ :follow_accept) do
with {:ok, _following_relationship} <-
%__MODULE__{}
@ -81,6 +84,7 @@ def follow(%User{} = follower, %User{} = following, state \\ :follow_accept) do
end
end
@spec unfollow(User.t(), User.t()) :: {:ok, User.t(), User.t()} | {:error, any}
def unfollow(%User{} = follower, %User{} = following) do
case get(follower, following) do
%__MODULE__{} = following_relationship ->
@ -89,10 +93,12 @@ def unfollow(%User{} = follower, %User{} = following) do
end
_ ->
{:ok, nil}
{:ok, follower, following}
end
end
@spec after_update(follow_state(), User.t(), User.t()) ::
{:ok, User.t(), User.t()} | {:error, any()}
defp after_update(state, %User{} = follower, %User{} = following) do
with {:ok, following} <- User.update_follower_count(following),
{:ok, follower} <- User.update_following_count(follower) do
@ -103,6 +109,8 @@ defp after_update(state, %User{} = follower, %User{} = following) do
})
{:ok, follower, following}
else
err -> {:error, err}
end
end
@ -147,14 +155,13 @@ def following_count(%User{} = user) do
|> Repo.aggregate(:count, :id)
end
def get_follow_requests(%User{id: id}) do
def get_follow_requests_query(%User{id: id}) do
__MODULE__
|> join(:inner, [r], f in assoc(r, :follower))
|> join(:inner, [r], f in assoc(r, :follower), as: :follower)
|> where([r], r.state == ^:follow_pending)
|> where([r], r.following_id == ^id)
|> where([r, f], f.is_active == true)
|> select([r, f], f)
|> Repo.all()
|> where([r, follower: f], f.is_active == true)
|> select([r, follower: f], f)
end
def following?(%User{id: follower_id}, %User{id: followed_id}) do

View file

@ -93,7 +93,7 @@ defp download_build(frontend_info, dest) do
url = String.replace(frontend_info["build_url"], "${ref}", frontend_info["ref"])
with {:ok, %{status: 200, body: zip_body}} <-
Pleroma.HTTP.get(url, [], recv_timeout: 120_000) do
Pleroma.HTTP.get(url, [], receive_timeout: 120_000) do
unzip(zip_body, dest)
else
{:error, e} -> {:error, e}

View file

@ -1,29 +0,0 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2021 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Gun do
@callback open(charlist(), pos_integer(), map()) :: {:ok, pid()}
@callback info(pid()) :: map()
@callback close(pid()) :: :ok
@callback await_up(pid, pos_integer()) :: {:ok, atom()} | {:error, atom()}
@callback connect(pid(), map()) :: reference()
@callback await(pid(), reference()) :: {:response, :fin, 200, []}
@callback set_owner(pid(), pid()) :: :ok
defp api, do: Pleroma.Config.get([Pleroma.Gun], Pleroma.Gun.API)
def open(host, port, opts), do: api().open(host, port, opts)
def info(pid), do: api().info(pid)
def close(pid), do: api().close(pid)
def await_up(pid, timeout \\ 5_000), do: api().await_up(pid, timeout)
def connect(pid, opts), do: api().connect(pid, opts)
def await(pid, ref), do: api().await(pid, ref)
def set_owner(pid, owner), do: api().set_owner(pid, owner)
end

View file

@ -1,46 +0,0 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2021 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Gun.API do
@behaviour Pleroma.Gun
alias Pleroma.Gun
@gun_keys [
:connect_timeout,
:http_opts,
:http2_opts,
:protocols,
:retry,
:retry_timeout,
:trace,
:transport,
:tls_opts,
:tcp_opts,
:socks_opts,
:ws_opts,
:supervise
]
@impl Gun
def open(host, port, opts \\ %{}), do: :gun.open(host, port, Map.take(opts, @gun_keys))
@impl Gun
defdelegate info(pid), to: :gun
@impl Gun
defdelegate close(pid), to: :gun
@impl Gun
defdelegate await_up(pid, timeout \\ 5_000), to: :gun
@impl Gun
defdelegate connect(pid, opts), to: :gun
@impl Gun
defdelegate await(pid, ref), to: :gun
@impl Gun
defdelegate set_owner(pid, owner), to: :gun
end

View file

@ -1,131 +0,0 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2021 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Gun.Conn do
alias Pleroma.Gun
require Logger
def open(%URI{} = uri, opts) do
pool_opts = Pleroma.Config.get([:connections_pool], [])
opts =
opts
|> Enum.into(%{})
|> Map.put_new(:connect_timeout, pool_opts[:connect_timeout] || 5_000)
|> Map.put_new(:supervise, false)
|> maybe_add_tls_opts(uri)
do_open(uri, opts)
end
defp maybe_add_tls_opts(opts, %URI{scheme: "http"}), do: opts
defp maybe_add_tls_opts(opts, %URI{scheme: "https"}) do
tls_opts = [
verify: :verify_peer,
cacertfile: CAStore.file_path(),
depth: 20,
reuse_sessions: false,
log_level: :warning,
customize_hostname_check: [match_fun: :public_key.pkix_verify_hostname_match_fun(:https)]
]
tls_opts =
if Keyword.keyword?(opts[:tls_opts]) do
Keyword.merge(tls_opts, opts[:tls_opts])
else
tls_opts
end
Map.put(opts, :tls_opts, tls_opts)
end
defp do_open(uri, %{proxy: {proxy_host, proxy_port}} = opts) do
connect_opts =
uri
|> destination_opts()
|> add_http2_opts(uri.scheme, Map.get(opts, :tls_opts, []))
with open_opts <- Map.delete(opts, :tls_opts),
{:ok, conn} <- Gun.open(proxy_host, proxy_port, open_opts),
{:ok, protocol} <- Gun.await_up(conn, opts[:connect_timeout]),
stream <- Gun.connect(conn, connect_opts),
{:response, :fin, 200, _} <- Gun.await(conn, stream) do
{:ok, conn, protocol}
else
error ->
Logger.warn(
"Opening proxied connection to #{compose_uri_log(uri)} failed with error #{inspect(error)}"
)
error
end
end
defp do_open(uri, %{proxy: {proxy_type, proxy_host, proxy_port}} = opts) do
version =
proxy_type
|> to_string()
|> String.last()
|> case do
"4" -> 4
_ -> 5
end
socks_opts =
uri
|> destination_opts()
|> add_http2_opts(uri.scheme, Map.get(opts, :tls_opts, []))
|> Map.put(:version, version)
opts =
opts
|> Map.put(:protocols, [:socks])
|> Map.put(:socks_opts, socks_opts)
with {:ok, conn} <- Gun.open(proxy_host, proxy_port, opts),
{:ok, protocol} <- Gun.await_up(conn, opts[:connect_timeout]) do
{:ok, conn, protocol}
else
error ->
Logger.warn(
"Opening socks proxied connection to #{compose_uri_log(uri)} failed with error #{inspect(error)}"
)
error
end
end
defp do_open(%URI{host: host, port: port} = uri, opts) do
host = Pleroma.HTTP.AdapterHelper.parse_host(host)
with {:ok, conn} <- Gun.open(host, port, opts),
{:ok, protocol} <- Gun.await_up(conn, opts[:connect_timeout]) do
{:ok, conn, protocol}
else
error ->
Logger.warn(
"Opening connection to #{compose_uri_log(uri)} failed with error #{inspect(error)}"
)
error
end
end
defp destination_opts(%URI{host: host, port: port}) do
host = Pleroma.HTTP.AdapterHelper.parse_host(host)
%{host: host, port: port}
end
defp add_http2_opts(opts, "https", tls_opts) do
Map.merge(opts, %{protocols: [:http2], transport: :tls, tls_opts: tls_opts})
end
defp add_http2_opts(opts, _, _), do: opts
def compose_uri_log(%URI{scheme: scheme, host: host, path: path}) do
"#{scheme}://#{host}#{path}"
end
end

View file

@ -1,86 +0,0 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2021 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Gun.ConnectionPool do
@registry __MODULE__
alias Pleroma.Gun.ConnectionPool.WorkerSupervisor
def children do
[
{Registry, keys: :unique, name: @registry},
Pleroma.Gun.ConnectionPool.WorkerSupervisor
]
end
@spec get_conn(URI.t(), keyword()) :: {:ok, pid()} | {:error, term()}
def get_conn(uri, opts) do
key = "#{uri.scheme}:#{uri.host}:#{uri.port}"
case Registry.lookup(@registry, key) do
# The key has already been registered, but connection is not up yet
[{worker_pid, nil}] ->
get_gun_pid_from_worker(worker_pid, true)
[{worker_pid, {gun_pid, _used_by, _crf, _last_reference}}] ->
GenServer.call(worker_pid, :add_client)
{:ok, gun_pid}
[] ->
# :gun.set_owner fails in :connected state for whatevever reason,
# so we open the connection in the process directly and send it's pid back
# We trust gun to handle timeouts by itself
case WorkerSupervisor.start_worker([key, uri, opts, self()]) do
{:ok, worker_pid} ->
get_gun_pid_from_worker(worker_pid, false)
{:error, {:already_started, worker_pid}} ->
get_gun_pid_from_worker(worker_pid, true)
err ->
err
end
end
end
defp get_gun_pid_from_worker(worker_pid, register) do
# GenServer.call will block the process for timeout length if
# the server crashes on startup (which will happen if gun fails to connect)
# so instead we use cast + monitor
ref = Process.monitor(worker_pid)
if register, do: GenServer.cast(worker_pid, {:add_client, self()})
receive do
{:conn_pid, pid} ->
Process.demonitor(ref)
{:ok, pid}
{:DOWN, ^ref, :process, ^worker_pid, reason} ->
case reason do
{:shutdown, {:error, _} = error} -> error
{:shutdown, error} -> {:error, error}
_ -> {:error, reason}
end
end
end
@spec release_conn(pid()) :: :ok
def release_conn(conn_pid) do
# :ets.fun2ms(fn {_, {worker_pid, {gun_pid, _, _, _}}} when gun_pid == conn_pid ->
# worker_pid end)
query_result =
Registry.select(@registry, [
{{:_, :"$1", {:"$2", :_, :_, :_}}, [{:==, :"$2", conn_pid}], [:"$1"]}
])
case query_result do
[worker_pid] ->
GenServer.call(worker_pid, :remove_client)
[] ->
:ok
end
end
end

View file

@ -1,89 +0,0 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2021 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Gun.ConnectionPool.Reclaimer do
use GenServer, restart: :temporary
defp registry, do: Pleroma.Gun.ConnectionPool
def start_monitor do
pid =
case :gen_server.start(__MODULE__, [], name: {:via, Registry, {registry(), "reclaimer"}}) do
{:ok, pid} ->
pid
{:error, {:already_registered, pid}} ->
pid
end
{pid, Process.monitor(pid)}
end
@impl true
def init(_) do
{:ok, nil, {:continue, :reclaim}}
end
@impl true
def handle_continue(:reclaim, _) do
max_connections = Pleroma.Config.get([:connections_pool, :max_connections])
reclaim_max =
[:connections_pool, :reclaim_multiplier]
|> Pleroma.Config.get()
|> Kernel.*(max_connections)
|> round
|> max(1)
:telemetry.execute([:pleroma, :connection_pool, :reclaim, :start], %{}, %{
max_connections: max_connections,
reclaim_max: reclaim_max
})
# :ets.fun2ms(
# fn {_, {worker_pid, {_, used_by, crf, last_reference}}} when used_by == [] ->
# {worker_pid, crf, last_reference} end)
unused_conns =
Registry.select(
registry(),
[
{{:_, :"$1", {:_, :"$2", :"$3", :"$4"}}, [{:==, :"$2", []}], [{{:"$1", :"$3", :"$4"}}]}
]
)
case unused_conns do
[] ->
:telemetry.execute(
[:pleroma, :connection_pool, :reclaim, :stop],
%{reclaimed_count: 0},
%{
max_connections: max_connections
}
)
{:stop, :no_unused_conns, nil}
unused_conns ->
reclaimed =
unused_conns
|> Enum.sort(fn {_pid1, crf1, last_reference1}, {_pid2, crf2, last_reference2} ->
crf1 <= crf2 and last_reference1 <= last_reference2
end)
|> Enum.take(reclaim_max)
reclaimed
|> Enum.each(fn {pid, _, _} ->
DynamicSupervisor.terminate_child(Pleroma.Gun.ConnectionPool.WorkerSupervisor, pid)
end)
:telemetry.execute(
[:pleroma, :connection_pool, :reclaim, :stop],
%{reclaimed_count: Enum.count(reclaimed)},
%{max_connections: max_connections}
)
{:stop, :normal, nil}
end
end
end

View file

@ -1,153 +0,0 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2021 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Gun.ConnectionPool.Worker do
alias Pleroma.Gun
use GenServer, restart: :temporary
defp registry, do: Pleroma.Gun.ConnectionPool
def start_link([key | _] = opts) do
GenServer.start_link(__MODULE__, opts, name: {:via, Registry, {registry(), key}})
end
@impl true
def init([_key, _uri, _opts, _client_pid] = opts) do
{:ok, nil, {:continue, {:connect, opts}}}
end
@impl true
def handle_continue({:connect, [key, uri, opts, client_pid]}, _) do
with {:ok, conn_pid, protocol} <- Gun.Conn.open(uri, opts),
Process.link(conn_pid) do
time = :erlang.monotonic_time(:millisecond)
{_, _} =
Registry.update_value(registry(), key, fn _ ->
{conn_pid, [client_pid], 1, time}
end)
send(client_pid, {:conn_pid, conn_pid})
{:noreply,
%{
key: key,
timer: nil,
client_monitors: %{client_pid => Process.monitor(client_pid)},
protocol: protocol
}, :hibernate}
else
err ->
{:stop, {:shutdown, err}, nil}
end
end
@impl true
def handle_cast({:add_client, client_pid}, state) do
case handle_call(:add_client, {client_pid, nil}, state) do
{:reply, conn_pid, state, :hibernate} ->
send(client_pid, {:conn_pid, conn_pid})
{:noreply, state, :hibernate}
end
end
@impl true
def handle_cast({:remove_client, client_pid}, state) do
case handle_call(:remove_client, {client_pid, nil}, state) do
{:reply, _, state, :hibernate} ->
{:noreply, state, :hibernate}
end
end
@impl true
def handle_call(:add_client, {client_pid, _}, %{key: key, protocol: protocol} = state) do
time = :erlang.monotonic_time(:millisecond)
{{conn_pid, used_by, _, _}, _} =
Registry.update_value(registry(), key, fn {conn_pid, used_by, crf, last_reference} ->
{conn_pid, [client_pid | used_by], crf(time - last_reference, crf), time}
end)
:telemetry.execute(
[:pleroma, :connection_pool, :client, :add],
%{client_pid: client_pid, clients: used_by},
%{key: state.key, protocol: protocol}
)
state =
if state.timer != nil do
Process.cancel_timer(state[:timer])
%{state | timer: nil}
else
state
end
ref = Process.monitor(client_pid)
state = put_in(state.client_monitors[client_pid], ref)
{:reply, conn_pid, state, :hibernate}
end
@impl true
def handle_call(:remove_client, {client_pid, _}, %{key: key} = state) do
{{_conn_pid, used_by, _crf, _last_reference}, _} =
Registry.update_value(registry(), key, fn {conn_pid, used_by, crf, last_reference} ->
{conn_pid, List.delete(used_by, client_pid), crf, last_reference}
end)
{ref, state} = pop_in(state.client_monitors[client_pid])
Process.demonitor(ref, [:flush])
timer =
if used_by == [] do
max_idle = Pleroma.Config.get([:connections_pool, :max_idle_time], 30_000)
Process.send_after(self(), :idle_close, max_idle)
else
nil
end
{:reply, :ok, %{state | timer: timer}, :hibernate}
end
@impl true
def handle_info(:idle_close, state) do
# Gun monitors the owner process, and will close the connection automatically
# when it's terminated
{:stop, :normal, state}
end
@impl true
def handle_info({:gun_up, _pid, _protocol}, state) do
{:noreply, state, :hibernate}
end
# Gracefully shutdown if the connection got closed without any streams left
@impl true
def handle_info({:gun_down, _pid, _protocol, _reason, []}, state) do
{:stop, :normal, state}
end
# Otherwise, wait for retry
@impl true
def handle_info({:gun_down, _pid, _protocol, _reason, _killed_streams}, state) do
{:noreply, state, :hibernate}
end
@impl true
def handle_info({:DOWN, _ref, :process, pid, reason}, state) do
:telemetry.execute(
[:pleroma, :connection_pool, :client, :dead],
%{client_pid: pid, reason: reason},
%{key: state.key}
)
handle_cast({:remove_client, pid}, state)
end
# LRFU policy: https://citeseerx.ist.psu.edu/viewdoc/summary?doi=10.1.1.55.1478
defp crf(time_delta, prev_crf) do
1 + :math.pow(0.5, 0.0001 * time_delta) * prev_crf
end
end

View file

@ -1,49 +0,0 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2021 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Gun.ConnectionPool.WorkerSupervisor do
@moduledoc "Supervisor for pool workers. Does not do anything except enforce max connection limit"
use DynamicSupervisor
def start_link(opts) do
DynamicSupervisor.start_link(__MODULE__, opts, name: __MODULE__)
end
def init(_opts) do
DynamicSupervisor.init(
strategy: :one_for_one,
max_children: Pleroma.Config.get([:connections_pool, :max_connections])
)
end
def start_worker(opts, retry \\ false) do
case DynamicSupervisor.start_child(__MODULE__, {Pleroma.Gun.ConnectionPool.Worker, opts}) do
{:error, :max_children} ->
if retry or free_pool() == :error do
:telemetry.execute([:pleroma, :connection_pool, :provision_failure], %{opts: opts})
{:error, :pool_full}
else
start_worker(opts, true)
end
res ->
res
end
end
defp free_pool do
wait_for_reclaimer_finish(Pleroma.Gun.ConnectionPool.Reclaimer.start_monitor())
end
defp wait_for_reclaimer_finish({pid, mon}) do
receive do
{:DOWN, ^mon, :process, ^pid, :no_unused_conns} ->
:error
{:DOWN, ^mon, :process, ^pid, :normal} ->
:ok
end
end
end

View file

@ -104,10 +104,10 @@ defp run_fifo(fifo_path, env, executable, args) do
args: args
])
fifo = Port.open(to_charlist(fifo_path), [:eof, :binary, :stream, :out])
fifo = File.open!(fifo_path, [:append, :binary])
fix = Pleroma.Helpers.QtFastStart.fix(env.body)
true = Port.command(fifo, fix)
:erlang.port_close(fifo)
IO.binwrite(fifo, fix)
File.close(fifo)
loop_recv(pid)
after
File.rm(fifo_path)

View file

@ -65,7 +65,7 @@ def request(method, url, body, headers, options) when is_binary(url) do
options = put_in(options[:adapter], adapter_opts)
params = options[:params] || []
request = build_request(method, headers, options, url, body, params)
client = Tesla.client([Tesla.Middleware.FollowRedirects])
client = Tesla.client([Tesla.Middleware.FollowRedirects, Tesla.Middleware.Telemetry])
request(client, request)
end

View file

@ -14,9 +14,7 @@ defmodule Pleroma.HTTP.AdapterHelper do
alias Pleroma.HTTP.AdapterHelper
require Logger
@type proxy ::
{Connection.host(), pos_integer()}
| {Connection.proxy_type(), Connection.host(), pos_integer()}
@type proxy :: {Connection.proxy_type(), Connection.host(), pos_integer(), list()}
@callback options(keyword(), URI.t()) :: keyword()
@ -25,7 +23,6 @@ def format_proxy(nil), do: nil
def format_proxy(proxy_url) do
case parse_proxy(proxy_url) do
{:ok, host, port} -> {:http, host, port, []}
{:ok, type, host, port} -> {type, host, port, []}
_ -> nil
end
@ -50,6 +47,13 @@ def maybe_add_proxy_pool(opts, proxy) do
|> put_in([:pools, :default, :conn_opts, :proxy], proxy)
end
def add_pool_size(opts, pool_size) do
opts
|> maybe_add_pools()
|> maybe_add_default_pool()
|> put_in([:pools, :default, :size], pool_size)
end
defp maybe_add_pools(opts) do
if Keyword.has_key?(opts, :pools) do
opts
@ -94,8 +98,7 @@ defp proxy_type("https"), do: {:ok, :https}
defp proxy_type(_), do: {:error, :unknown}
@spec parse_proxy(String.t() | tuple() | nil) ::
{:ok, host(), pos_integer()}
| {:ok, proxy_type(), host(), pos_integer()}
{:ok, proxy_type(), host(), pos_integer()}
| {:error, atom()}
| nil
def parse_proxy(nil), do: nil

11
lib/pleroma/iso639.ex Normal file
View file

@ -0,0 +1,11 @@
defmodule Pleroma.ISO639 do
@file "priv/language-codes.json"
@data File.read!(@file)
|> Jason.decode!()
for %{"alpha2" => alpha2} <- @data do
def valid_alpha2?(unquote(alpha2)), do: true
end
def valid_alpha2?(_alpha2), do: false
end

View file

@ -14,7 +14,7 @@ defmodule Pleroma.Migrators.Support.BaseMigrator do
@callback fault_rate_allowance() :: integer() | float()
defmacro __using__(_opts) do
quote do
quote generated: true do
use GenServer
require Logger

View file

@ -237,7 +237,8 @@ def insert_log(%{actor: %User{}, action: action, target: target} = attrs)
insert_log_entry_with_message(%ModerationLog{data: data})
end
@spec insert_log_entry_with_message(ModerationLog) :: {:ok, ModerationLog} | {:error, any}
@spec insert_log_entry_with_message(ModerationLog.t()) ::
{:ok, ModerationLog.t()} | {:error, any}
defp insert_log_entry_with_message(entry) do
entry.data["message"]
|> put_in(get_log_entry_message(entry))

View file

@ -240,7 +240,7 @@ def delete(%Object{data: %{"id" => id}} = object) do
{:ok, _} <- invalid_object_cache(object) do
cleanup_attachments(
Config.get([:instance, :cleanup_attachments]),
%{"object" => object}
%{object: object}
)
{:ok, object, deleted_activity}
@ -249,7 +249,7 @@ def delete(%Object{data: %{"id" => id}} = object) do
@spec cleanup_attachments(boolean(), %{required(:object) => map()}) ::
{:ok, Oban.Job.t() | nil}
def cleanup_attachments(true, %{"object" => _} = params) do
def cleanup_attachments(true, %{object: _} = params) do
AttachmentsCleanupWorker.enqueue("cleanup_attachments", params)
end

View file

@ -262,7 +262,7 @@ def fetch_and_contain_remote_object_from_id(id) when is_binary(id) do
def fetch_and_contain_remote_object_from_id(_id),
do: {:error, "id must be a string"}
defp get_object(id) do
def get_object(id) do
date = Pleroma.Signature.signed_date()
headers =
@ -282,6 +282,11 @@ defp get_object(id) do
%{"profile" => "https://www.w3.org/ns/activitystreams"}} ->
{:ok, body}
# pixelfed sometimes (and only sometimes) responds with http instead of https
{:ok, "application", "ld+json",
%{"profile" => "http://www.w3.org/ns/activitystreams"}} ->
{:ok, body}
_ ->
{:error, {:content_type, content_type}}
end

View file

@ -88,9 +88,9 @@ def paginate(query, options, :offset, table_binding) do
defp cast_params(params) do
param_types = %{
min_id: :string,
since_id: :string,
max_id: :string,
min_id: params[:id_type] || :string,
since_id: params[:id_type] || :string,
max_id: params[:id_type] || :string,
offset: :integer,
limit: :integer,
skip_extra_order: :boolean,

55
lib/pleroma/password.ex Normal file
View file

@ -0,0 +1,55 @@
defmodule Pleroma.Password do
@moduledoc """
This module handles password hashing and verification.
It will delegate to the appropriate module based on the password hash.
It also handles upgrading of password hashes.
"""
alias Pleroma.User
alias Pleroma.Password.Pbkdf2
require Logger
@hashing_module Argon2
@spec hash_pwd_salt(String.t()) :: String.t()
defdelegate hash_pwd_salt(pass), to: @hashing_module
@spec checkpw(String.t(), String.t()) :: boolean()
def checkpw(password, "$2" <> _ = password_hash) do
# Handle bcrypt passwords for Mastodon migration
Bcrypt.verify_pass(password, password_hash)
end
def checkpw(password, "$pbkdf2" <> _ = password_hash) do
Pbkdf2.verify_pass(password, password_hash)
end
def checkpw(password, "$argon2" <> _ = password_hash) do
Argon2.verify_pass(password, password_hash)
end
def checkpw(_password, _password_hash) do
Logger.error("Password hash not recognized")
false
end
@spec maybe_update_password(User.t(), String.t()) ::
{:ok, User.t()} | {:error, Ecto.Changeset.t()}
def maybe_update_password(%User{password_hash: "$2" <> _} = user, password) do
do_update_password(user, password)
end
def maybe_update_password(%User{password_hash: "$6" <> _} = user, password) do
do_update_password(user, password)
end
def maybe_update_password(%User{password_hash: "$pbkdf2" <> _} = user, password) do
do_update_password(user, password)
end
def maybe_update_password(user, _), do: {:ok, user}
defp do_update_password(user, password) do
User.reset_password(user, %{password: password, password_confirmation: password})
end
end

View file

@ -0,0 +1,49 @@
defmodule Pleroma.PrometheusExporter do
@moduledoc """
Exports metrics in Prometheus format.
Mostly exists because of https://github.com/beam-telemetry/telemetry_metrics_prometheus_core/issues/52
Basically we need to fetch metrics every so often, or the lib will let them pile up and eventually crash the VM.
It also sorta acts as a cache so there is that too.
"""
use GenServer
require Logger
def start_link(_opts) do
GenServer.start_link(__MODULE__, :ok, name: __MODULE__)
end
def init(_opts) do
schedule_next()
{:ok, ""}
end
defp schedule_next do
Process.send_after(self(), :gather, 60_000)
end
# Scheduled function, gather metrics and schedule next run
def handle_info(:gather, _state) do
schedule_next()
state = TelemetryMetricsPrometheus.Core.scrape()
{:noreply, state}
end
# Trigger the call dynamically, mostly for testing
def handle_call(:gather, _from, _state) do
state = TelemetryMetricsPrometheus.Core.scrape()
{:reply, state, state}
end
def handle_call(:show, _from, state) do
{:reply, state, state}
end
def show do
GenServer.call(__MODULE__, :show)
end
def gather do
GenServer.call(__MODULE__, :gather)
end
end

View file

@ -61,9 +61,6 @@ def create do
IO.puts("The database for #{inspect(@repo)} has already been created")
{:error, term} when is_binary(term) ->
IO.puts(:stderr, "The database for #{inspect(@repo)} couldn't be created: #{term}")
{:error, term} ->
IO.puts(
:stderr,
"The database for #{inspect(@repo)} couldn't be created: #{inspect(term)}"

View file

@ -1,25 +0,0 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2021 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.ReverseProxy.Client.Hackney do
@behaviour Pleroma.ReverseProxy.Client
@impl true
def request(method, url, headers, body, opts \\ []) do
opts = Keyword.put(opts, :ssl_options, versions: [:"tlsv1.2", :"tlsv1.1", :tlsv1])
:hackney.request(method, url, headers, body, opts)
end
@impl true
def stream_body(ref) do
case :hackney.stream_body(ref) do
:done -> :done
{:ok, data} -> {:ok, data, ref}
{:error, error} -> {:error, error}
end
end
@impl true
def close(ref), do: :hackney.close(ref)
end

View file

@ -5,8 +5,6 @@
defmodule Pleroma.ReverseProxy.Client.Tesla do
@behaviour Pleroma.ReverseProxy.Client
alias Pleroma.Gun.ConnectionPool
@type headers() :: [{String.t(), String.t()}]
@type status() :: pos_integer()
@ -33,8 +31,6 @@ def request(method, url, headers, body, opts \\ []) do
if is_map(response.body) and method != :head do
{:ok, response.status, response.headers, response.body}
else
conn_pid = response.opts[:adapter][:conn]
ConnectionPool.release_conn(conn_pid)
{:ok, response.status, response.headers}
end
else
@ -45,8 +41,7 @@ def request(method, url, headers, body, opts \\ []) do
@impl true
@spec stream_body(map()) ::
{:ok, binary(), map()} | {:error, atom() | String.t()} | :done | no_return()
def stream_body(%{pid: pid, fin: true}) do
ConnectionPool.release_conn(pid)
def stream_body(%{pid: _pid, fin: true}) do
:done
end
@ -70,17 +65,13 @@ defp read_chunk!(%{pid: pid, stream: stream, opts: opts}) do
@impl true
@spec close(map) :: :ok | no_return()
def close(%{pid: pid}) do
ConnectionPool.release_conn(pid)
def close(%{pid: _pid}) do
:ok
end
defp check_adapter do
adapter = Application.get_env(:tesla, :adapter)
unless adapter == Tesla.Adapter.Gun do
raise "#{adapter} doesn't support reading body in chunks"
end
adapter
end
end

View file

@ -23,8 +23,6 @@ defp client do
|> client()
end
defp client(Tesla.Adapter.Hackney), do: Pleroma.ReverseProxy.Client.Hackney
defp client(Tesla.Adapter.Gun), do: Pleroma.ReverseProxy.Client.Tesla
defp client({Tesla.Adapter.Finch, _}), do: Pleroma.ReverseProxy.Client.Hackney
defp client({Tesla.Adapter.Finch, _}), do: Pleroma.ReverseProxy.Client.Tesla
defp client(_), do: Pleroma.Config.get!(Pleroma.ReverseProxy.Client)
end

View file

@ -13,25 +13,21 @@ defmodule Pleroma.Search.Elasticsearch do
def es_query(:activity, query, offset, limit) do
must = Parsers.Activity.parse(query)
if must == [] do
:skip
else
%{
size: limit,
from: offset,
terminate_after: 50,
timeout: "5s",
sort: [
"_score",
%{"_timestamp" => %{order: "desc", format: "basic_date_time"}}
],
query: %{
bool: %{
must: must
}
%{
size: limit,
from: offset,
terminate_after: 50,
timeout: "5s",
sort: [
"_score",
%{"_timestamp" => %{order: "desc", format: "basic_date_time"}}
],
query: %{
bool: %{
must: must
}
}
end
}
end
defp maybe_fetch(:activity, search_query) do

View file

@ -57,5 +57,5 @@ def encode(activity) do
defimpl Elasticsearch.Document, for: Pleroma.Object do
def id(obj), do: obj.id
def routing(_), do: false
def encode(_), do: nil
def encode(_), do: %{}
end

View file

@ -154,10 +154,11 @@ def add_to_index(activity) do
with {:ok, res} <- result,
true <- Map.has_key?(res, "taskUid") do
# Do nothing
{:ok, res}
else
_ ->
err ->
Logger.error("Failed to add activity #{activity.id} to index: #{inspect(result)}")
{:error, err}
end
end
end

View file

@ -4,7 +4,7 @@ defmodule Pleroma.Search.SearchBackend do
The whole activity is passed, to allow filtering on things such as scope.
"""
@callback add_to_index(activity :: Pleroma.Activity.t()) :: nil
@callback add_to_index(activity :: Pleroma.Activity.t()) :: {:ok, any()} | {:error, any()}
@doc """
Remove the object from the index.
@ -13,5 +13,5 @@ defmodule Pleroma.Search.SearchBackend do
is what contains the actual content and there is no need for fitlering when removing
from index.
"""
@callback remove_from_index(object :: Pleroma.Object.t()) :: nil
@callback remove_from_index(object :: Pleroma.Object.t()) :: {:ok, any()} | {:error, any()}
end

View file

@ -27,7 +27,7 @@ def key_id_to_actor_id(key_id) do
_ ->
case Pleroma.Web.WebFinger.finger(maybe_ap_id) do
%{"ap_id" => ap_id} -> {:ok, ap_id}
{:ok, %{"ap_id" => ap_id}} -> {:ok, ap_id}
_ -> {:error, maybe_ap_id}
end
end

View file

@ -11,7 +11,7 @@ defmodule Pleroma.Stats do
alias Pleroma.Repo
alias Pleroma.User
@interval :timer.seconds(60)
@interval :timer.seconds(300)
def start_link(_) do
GenServer.start_link(
@ -85,14 +85,24 @@ def calculate_stat_data do
where: not u.invisible
)
remote_users_query =
from(u in User,
where: u.is_active == true,
where: u.local == false,
where: not is_nil(u.nickname),
where: not u.invisible
)
user_count = Repo.aggregate(users_query, :count, :id)
remote_user_count = Repo.aggregate(remote_users_query, :count, :id)
%{
peers: peers,
stats: %{
domain_count: domain_count,
status_count: status_count || 0,
user_count: user_count
user_count: user_count,
remote_user_count: remote_user_count
}
}
end

View file

@ -1,50 +0,0 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2021 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Tesla.Middleware.ConnectionPool do
@moduledoc """
Middleware to get/release connections from `Pleroma.Gun.ConnectionPool`
"""
@behaviour Tesla.Middleware
alias Pleroma.Gun.ConnectionPool
@impl Tesla.Middleware
def call(%Tesla.Env{url: url, opts: opts} = env, next, _) do
uri = URI.parse(url)
# Avoid leaking connections when the middleware is called twice
# with body_as: :chunks. We assume only the middleware can set
# opts[:adapter][:conn]
if opts[:adapter][:conn] do
ConnectionPool.release_conn(opts[:adapter][:conn])
end
case ConnectionPool.get_conn(uri, opts[:adapter]) do
{:ok, conn_pid} ->
adapter_opts = Keyword.merge(opts[:adapter], conn: conn_pid, close_conn: false)
opts = Keyword.put(opts, :adapter, adapter_opts)
env = %{env | opts: opts}
case Tesla.run(env, next) do
{:ok, env} ->
unless opts[:adapter][:body_as] == :chunks do
ConnectionPool.release_conn(conn_pid)
{_, res} = pop_in(env.opts[:adapter][:conn])
{:ok, res}
else
{:ok, env}
end
err ->
ConnectionPool.release_conn(conn_pid)
err
end
err ->
err
end
end
end

View file

@ -162,7 +162,7 @@ defp prepare_upload(%Plug.Upload{} = file, opts) do
defp prepare_upload(%{img: "data:image/" <> image_data}, opts) do
parsed = Regex.named_captures(~r/(?<filetype>jpeg|png|gif);base64,(?<data>.*)/, image_data)
data = Base.decode64!(parsed["data"], ignore: :whitespace)
hash = Base.encode16(:crypto.hash(:sha256, data), lower: true)
hash = Base.encode16(:crypto.hash(:sha256, data), case: :lower)
with :ok <- check_binary_size(data, opts.size_limit),
tmp_path <- tempfile_for_image(data),

View file

@ -77,7 +77,6 @@ defp media_dimensions(file) do
%{width: width, height: height}
else
nil -> {:error, {:ffprobe, :command_not_found}}
{:error, _} = error -> error
end
end
end

View file

@ -9,11 +9,13 @@ defmodule Pleroma.Upload.Filter.Exiftool do
"""
@behaviour Pleroma.Upload.Filter
@spec filter(Pleroma.Upload.t()) :: {:ok, any()} | {:error, String.t()}
@spec filter(Pleroma.Upload.t()) :: {:ok, :noop} | {:ok, :filtered} | {:error, String.t()}
# Formats not compatible with exiftool at this time
def filter(%Pleroma.Upload{content_type: "image/heic"}), do: {:ok, :noop}
def filter(%Pleroma.Upload{content_type: "image/webp"}), do: {:ok, :noop}
def filter(%Pleroma.Upload{content_type: "image/svg+xml"}), do: {:ok, :noop}
def filter(%Pleroma.Upload{content_type: "image/jxl"}), do: {:ok, :noop}
def filter(%Pleroma.Upload{tempfile: file, content_type: "image" <> _}) do
try do

View file

@ -38,7 +38,7 @@ defmodule Pleroma.Upload.Filter.Mogrifun do
[{"fill", "yellow"}, {"tint", "40"}]
]
@spec filter(Pleroma.Upload.t()) :: {:ok, atom()} | {:error, String.t()}
@spec filter(Pleroma.Upload.t()) :: {:ok, :filtered | :noop} | {:error, String.t()}
def filter(%Pleroma.Upload{tempfile: file, content_type: "image" <> _}) do
try do
Filter.Mogrify.do_filter(file, [Enum.random(@filters)])

View file

@ -3,6 +3,10 @@
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.User do
@moduledoc """
A user, local or remote
"""
use Ecto.Schema
import Ecto.Changeset
@ -269,7 +273,13 @@ def cached_muted_users_ap_ids(user) do
defdelegate following(user), to: FollowingRelationship
defdelegate following?(follower, followed), to: FollowingRelationship
defdelegate following_ap_ids(user), to: FollowingRelationship
defdelegate get_follow_requests(user), to: FollowingRelationship
defdelegate get_follow_requests_query(user), to: FollowingRelationship
def get_follow_requests(user) do
get_follow_requests_query(user)
|> Repo.all()
end
defdelegate search(query, opts \\ []), to: User.Search
@doc """
@ -475,7 +485,7 @@ def remote_user_changeset(struct \\ %User{local: false}, params) do
|> validate_format(:nickname, @email_regex)
|> validate_length(:bio, max: bio_limit)
|> validate_length(:name, max: name_limit)
|> validate_fields(true)
|> validate_fields(true, struct)
|> validate_non_local()
end
@ -545,13 +555,21 @@ def update_changeset(struct, params \\ %{}) do
:pleroma_settings_store,
&{:ok, Map.merge(struct.pleroma_settings_store, &1)}
)
|> validate_fields(false)
|> validate_fields(false, struct)
end
defp put_fields(changeset) do
# These fields are inconsistent in tests when it comes to binary/atom keys
if raw_fields = get_change(changeset, :raw_fields) do
raw_fields =
raw_fields
|> Enum.map(fn
%{name: name, value: value} ->
%{"name" => name, "value" => value}
%{"name" => _} = field ->
field
end)
|> Enum.filter(fn %{"name" => n} -> n != "" end)
fields =
@ -599,7 +617,13 @@ defp put_change_if_present(changeset, map_field, value_function) do
{:ok, new_value} <- value_function.(value) do
put_change(changeset, map_field, new_value)
else
_ -> changeset
{:error, :file_too_large} ->
Ecto.Changeset.validate_change(changeset, map_field, fn map_field, _value ->
[{map_field, "file is too large"}]
end)
_ ->
changeset
end
end
@ -699,7 +723,8 @@ def register_changeset_ldap(struct, params = %{password: password})
|> put_private_key()
end
def register_changeset(struct, params \\ %{}, opts \\ []) do
@spec register_changeset(User.t(), map(), keyword()) :: Changeset.t()
def register_changeset(%User{} = struct, params \\ %{}, opts \\ []) do
bio_limit = Config.get([:instance, :user_bio_length], 5000)
name_limit = Config.get([:instance, :user_name_length], 100)
reason_limit = Config.get([:instance, :registration_reason_length], 500)
@ -813,12 +838,14 @@ defp autofollowing_users(user) do
end
@doc "Inserts provided changeset, performs post-registration actions (confirmation email sending etc.)"
@spec register(Changeset.t()) :: {:ok, User.t()} | {:error, any} | nil
def register(%Ecto.Changeset{} = changeset) do
with {:ok, user} <- Repo.insert(changeset) do
post_register_action(user)
end
end
@spec post_register_action(User.t()) :: {:error, any} | {:ok, User.t()}
def post_register_action(%User{is_confirmed: false} = user) do
with {:ok, _} <- maybe_send_confirmation_email(user) do
{:ok, user}
@ -933,7 +960,8 @@ def needs_update?(%User{local: false} = user) do
def needs_update?(_), do: true
@spec maybe_direct_follow(User.t(), User.t()) :: {:ok, User.t()} | {:error, String.t()}
@spec maybe_direct_follow(User.t(), User.t()) ::
{:ok, User.t(), User.t()} | {:error, String.t()}
# "Locked" (self-locked) users demand explicit authorization of follow requests
def maybe_direct_follow(%User{} = follower, %User{local: true, is_locked: true} = followed) do
@ -1066,6 +1094,11 @@ def get_by_guessed_nickname(ap_id) do
get_cached_by_nickname(nickname)
end
@spec set_cache(
{:error, any}
| {:ok, User.t()}
| User.t()
) :: {:ok, User.t()} | {:error, any}
def set_cache({:ok, user}), do: set_cache(user)
def set_cache({:error, err}), do: {:error, err}
@ -1076,12 +1109,14 @@ def set_cache(%User{} = user) do
{:ok, user}
end
@spec update_and_set_cache(User.t(), map()) :: {:ok, User.t()} | {:error, any}
def update_and_set_cache(struct, params) do
struct
|> update_changeset(params)
|> update_and_set_cache()
end
@spec update_and_set_cache(Changeset.t()) :: {:ok, User.t()} | {:error, any}
def update_and_set_cache(%{data: %Pleroma.User{} = user} = changeset) do
was_superuser_before_update = User.superuser?(user)
@ -1136,6 +1171,7 @@ def get_cached_by_ap_id(ap_id) do
end
end
@spec get_cached_by_id(String.t()) :: nil | Pleroma.User.t()
def get_cached_by_id(id) do
key = "id:#{id}"
@ -1964,6 +2000,7 @@ defp create_service_actor(uri, nickname) do
%User{
invisible: true,
local: true,
actor_type: "Application",
ap_id: uri,
nickname: nickname,
follower_address: uri <> "/followers"
@ -2247,7 +2284,7 @@ def get_ap_ids_by_nicknames(nicknames) do
defp put_password_hash(
%Ecto.Changeset{valid?: true, changes: %{password: password}} = changeset
) do
change(changeset, password_hash: Pleroma.Password.Pbkdf2.hash_pwd_salt(password))
change(changeset, password_hash: Pleroma.Password.hash_pwd_salt(password))
end
defp put_password_hash(changeset), do: changeset
@ -2296,6 +2333,7 @@ def add_alias(user, new_alias_user) do
end
end
@spec delete_alias(User.t(), User.t()) :: {:error, :no_such_alias}
def delete_alias(user, alias_user) do
current_aliases = user.also_known_as || []
alias_ap_id = alias_user.ap_id
@ -2328,7 +2366,8 @@ def update_background(user, background) do
|> update_and_set_cache()
end
def validate_fields(changeset, remote? \\ false) do
@spec validate_fields(Ecto.Changeset.t(), Boolean.t(), User.t()) :: Ecto.Changeset.t()
def validate_fields(changeset, remote? \\ false, struct) do
limit_name = if remote?, do: :max_remote_account_fields, else: :max_account_fields
limit = Config.get([:instance, limit_name], 0)
@ -2341,6 +2380,7 @@ def validate_fields(changeset, remote? \\ false) do
[fields: "invalid"]
end
end)
|> maybe_validate_rel_me_field(struct)
end
defp valid_field?(%{"name" => name, "value" => value}) do
@ -2353,6 +2393,75 @@ defp valid_field?(%{"name" => name, "value" => value}) do
defp valid_field?(_), do: false
defp is_url(nil), do: nil
defp is_url(uri) do
case URI.parse(uri) do
%URI{host: nil} -> false
%URI{scheme: nil} -> false
_ -> true
end
end
@spec maybe_validate_rel_me_field(Changeset.t(), User.t()) :: Changeset.t()
defp maybe_validate_rel_me_field(changeset, %User{ap_id: _ap_id} = struct) do
fields = get_change(changeset, :fields)
raw_fields = get_change(changeset, :raw_fields)
if is_nil(fields) do
changeset
else
validate_rel_me_field(changeset, fields, raw_fields, struct)
end
end
defp maybe_validate_rel_me_field(changeset, _), do: changeset
@spec validate_rel_me_field(Changeset.t(), [Map.t()], [Map.t()], User.t()) :: Changeset.t()
defp validate_rel_me_field(changeset, fields, raw_fields, %User{
nickname: nickname,
ap_id: ap_id
}) do
fields =
fields
|> Enum.with_index()
|> Enum.map(fn {%{"name" => name, "value" => value}, index} ->
raw_value =
if is_nil(raw_fields) do
nil
else
Enum.at(raw_fields, index)["value"]
end
if is_url(raw_value) do
frontend_url =
Pleroma.Web.Router.Helpers.redirect_url(
Pleroma.Web.Endpoint,
:redirector_with_meta,
nickname
)
possible_urls = [ap_id, frontend_url]
with "me" <- RelMe.maybe_put_rel_me(raw_value, possible_urls) do
%{
"name" => name,
"value" => value,
"verified_at" => DateTime.to_iso8601(DateTime.utc_now())
}
else
e ->
Logger.error("Could not check for rel=me, #{inspect(e)}")
%{"name" => name, "value" => value}
end
else
%{"name" => name, "value" => value}
end
end)
put_change(changeset, :fields, fields)
end
defp truncate_field(%{"name" => name, "value" => value}) do
{name, _chopped} =
String.split_at(name, Config.get([:instance, :account_field_name_length], 255))
@ -2411,7 +2520,7 @@ def confirmation_changeset(user, set_confirmation: confirmed?) do
cast(user, params, [:is_confirmed, :confirmation_token])
end
@spec approval_changeset(User.t(), keyword()) :: Changeset.t()
@spec approval_changeset(Changeset.t(), keyword()) :: Changeset.t()
def approval_changeset(user, set_approval: approved?) do
cast(user, %{is_approved: approved?}, [:is_approved])
end
@ -2486,15 +2595,19 @@ defp add_to_block(%User{} = user, %User{} = blocked) do
with {:ok, relationship} <- UserRelationship.create_block(user, blocked) do
@cachex.del(:user_cache, "blocked_users_ap_ids:#{user.ap_id}")
{:ok, relationship}
else
err -> err
end
end
@spec add_to_block(User.t(), User.t()) ::
@spec remove_from_block(User.t(), User.t()) ::
{:ok, UserRelationship.t()} | {:ok, nil} | {:error, Ecto.Changeset.t()}
defp remove_from_block(%User{} = user, %User{} = blocked) do
with {:ok, relationship} <- UserRelationship.delete_block(user, blocked) do
@cachex.del(:user_cache, "blocked_users_ap_ids:#{user.ap_id}")
{:ok, relationship}
else
err -> err
end
end
@ -2516,11 +2629,8 @@ def sanitize_html(%User{} = user) do
# - display name
def sanitize_html(%User{} = user, filter) do
fields =
Enum.map(user.fields, fn %{"name" => name, "value" => value} ->
%{
"name" => name,
"value" => HTML.filter_tags(value, Pleroma.HTML.Scrubber.LinksOnly)
}
Enum.map(user.fields, fn %{"value" => value} = field ->
Map.put(field, "value", HTML.filter_tags(value, Pleroma.HTML.Scrubber.LinksOnly))
end)
user

View file

@ -130,7 +130,8 @@ def export(%__MODULE__{} = backup) do
:ok <- statuses(dir, backup.user),
:ok <- likes(dir, backup.user),
:ok <- bookmarks(dir, backup.user),
{:ok, zip_path} <- :zip.create(String.to_charlist(dir <> ".zip"), @files, cwd: dir),
{:ok, zip_path} <-
:zip.create(String.to_charlist(dir <> ".zip"), @files, cwd: String.to_charlist(dir)),
{:ok, _} <- File.rm_rf(dir) do
{:ok, to_string(zip_path)}
end

View file

@ -43,7 +43,13 @@ def get(%User{} = user, %Hashtag{} = hashtag) do
end
def get_by_user(%User{} = user) do
Ecto.assoc(user, :followed_hashtags)
user
|> followed_hashtags_query()
|> Repo.all()
end
def followed_hashtags_query(%User{} = user) do
Ecto.assoc(user, :followed_hashtags)
|> Ecto.Query.order_by([h], desc: h.id)
end
end

View file

@ -56,7 +56,10 @@ defp skip_plug(conn, plug_modules) do
plug_module.skip_plug(conn)
rescue
UndefinedFunctionError ->
raise "`#{plug_module}` is not skippable. Append `use Pleroma.Web, :plug` to its code."
reraise(
"`#{plug_module}` is not skippable. Append `use Pleroma.Web, :plug` to its code.",
__STACKTRACE__
)
end
end
)
@ -129,66 +132,6 @@ defp maybe_halt_on_missing_oauth_scopes_check(conn) do
end
end
def view do
quote do
use Phoenix.View,
root: "lib/pleroma/web/templates",
namespace: Pleroma.Web
# Import convenience functions from controllers
import Phoenix.Controller, only: [get_csrf_token: 0, get_flash: 2, view_module: 1]
import Pleroma.Web.ErrorHelpers
import Pleroma.Web.Gettext
alias Pleroma.Web.Router.Helpers, as: Routes
require Logger
@doc "Same as `render/3` but wrapped in a rescue block"
def safe_render(view, template, assigns \\ %{}) do
Phoenix.View.render(view, template, assigns)
rescue
error ->
Logger.error(
"#{__MODULE__} failed to render #{inspect({view, template})}\n" <>
Exception.format(:error, error, __STACKTRACE__)
)
nil
end
@doc """
Same as `render_many/4` but wrapped in rescue block.
"""
def safe_render_many(collection, view, template, assigns \\ %{}) do
Enum.map(collection, fn resource ->
as = Map.get(assigns, :as) || view.__resource__
assigns = Map.put(assigns, as, resource)
safe_render(view, template, assigns)
end)
|> Enum.filter(& &1)
end
end
end
def router do
quote do
use Phoenix.Router
# credo:disable-for-next-line Credo.Check.Consistency.MultiAliasImportRequireUse
import Plug.Conn
import Phoenix.Controller
end
end
def channel do
quote do
# credo:disable-for-next-line Credo.Check.Consistency.MultiAliasImportRequireUse
import Phoenix.Channel
import Pleroma.Web.Gettext
end
end
def plug do
quote do
@behaviour Pleroma.Web.Plug
@ -233,6 +176,80 @@ def call(%Plug.Conn{} = conn, options) do
end
end
def view do
quote do
use Phoenix.View,
root: "lib/pleroma/web/templates",
namespace: Pleroma.Web
# Import convenience functions from controllers
import Phoenix.Controller,
only: [get_flash: 1, get_flash: 2, view_module: 1, view_template: 1]
# Include shared imports and aliases for views
unquote(view_helpers())
end
end
def live_view do
quote do
use Phoenix.LiveView,
layout: {Pleroma.Web.LayoutView, "live.html"}
unquote(view_helpers())
end
end
def live_component do
quote do
use Phoenix.LiveComponent
unquote(view_helpers())
end
end
def component do
quote do
use Phoenix.Component
unquote(view_helpers())
end
end
def router do
quote do
use Phoenix.Router
import Plug.Conn
import Phoenix.Controller
import Phoenix.LiveView.Router
end
end
def channel do
quote do
use Phoenix.Channel
import Pleroma.Web.Gettext
end
end
defp view_helpers do
quote do
# Use all HTML functionality (forms, tags, etc)
use Phoenix.HTML
# Import LiveView and .heex helpers (live_render, live_patch, <.form>, etc)
import Phoenix.LiveView.Helpers
# Import basic rendering functionality (render, render_layout, etc)
import Phoenix.View
import Pleroma.Web.ErrorHelpers
import Pleroma.Web.Gettext
alias Pleroma.Web.Router.Helpers, as: Routes
end
end
@doc """
When used, dispatch to the appropriate controller/view/etc.
"""

View file

@ -1531,6 +1531,10 @@ defp normalize_image(%{"url" => url}) do
defp normalize_image(urls) when is_list(urls), do: urls |> List.first() |> normalize_image()
defp normalize_image(_), do: nil
defp normalize_also_known_as(aka) when is_list(aka), do: aka
defp normalize_also_known_as(aka) when is_binary(aka), do: [aka]
defp normalize_also_known_as(nil), do: []
defp object_to_user_data(data, additional) do
fields =
data
@ -1576,6 +1580,7 @@ defp object_to_user_data(data, additional) do
also_known_as =
data
|> Map.get("alsoKnownAs", [])
|> normalize_also_known_as()
|> Enum.filter(fn url ->
case URI.parse(url) do
%URI{scheme: "http"} -> true

View file

@ -161,10 +161,17 @@ defp get_policies(_), do: []
# - https://extra.baddomain.net/
# Does NOT match the following:
# - https://maybebaddomain.net/
# *.baddomain.net
def subdomain_regex("*." <> domain), do: subdomain_regex(domain)
# baddomain.net
def subdomain_regex(domain) do
~r/^(.+\.)?#{Regex.escape(domain)}$/i
if String.ends_with?(domain, ".*") do
~r/^(.+\.)?#{Regex.escape(String.replace_suffix(domain, ".*", ""))}\.(.+)$/i
else
~r/^(.+\.)?#{Regex.escape(domain)}$/i
end
end
@spec subdomains_regex([String.t()]) :: [Regex.t()]

View file

@ -12,7 +12,7 @@ defmodule Pleroma.Web.ActivityPub.MRF.MediaProxyWarmingPolicy do
require Logger
@adapter_options [
recv_timeout: 10_000
receive_timeout: 10_000
]
@impl true

View file

@ -30,6 +30,7 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.ArticleNotePageValidator do
field(:replies, {:array, ObjectValidators.ObjectID}, default: [])
field(:source, :map)
field(:contentMap, :map)
end
def cast_and_apply(data) do
@ -146,6 +147,21 @@ defp fix_source(%{"source" => source} = object) when is_binary(source) do
defp fix_source(object), do: object
defp fix_content_map_languages(%{"contentMap" => content_map} = object)
when is_map(content_map) do
# Only allow valid languages
content_map =
content_map
|> Enum.reject(fn {lang, _content} ->
!Pleroma.ISO639.valid_alpha2?(lang)
end)
|> Enum.into(%{})
Map.put(object, "contentMap", content_map)
end
defp fix_content_map_languages(object), do: object
defp fix(data) do
data
|> CommonFixes.fix_actor()
@ -158,6 +174,7 @@ defp fix(data) do
|> Transmogrifier.fix_attachments()
|> Transmogrifier.fix_emoji()
|> Transmogrifier.fix_content_map()
|> fix_content_map_languages()
end
def changeset(struct, data) do

View file

@ -346,11 +346,16 @@ def fix_tag(%{"tag" => %{} = tag} = object) do
def fix_tag(object), do: object
# content map usually only has one language so this will do for now.
def fix_content_map(%{"contentMap" => content_map} = object) do
def fix_content_map(%{"contentMap" => content_map} = object) when is_map(content_map) do
content_groups = Map.to_list(content_map)
{_, content} = Enum.at(content_groups, 0)
Map.put(object, "content", content)
if Enum.empty?(content_groups) do
object
else
{_, content} = Enum.at(content_groups, 0)
Map.put(object, "content", content)
end
end
def fix_content_map(object), do: object

View file

@ -14,11 +14,11 @@ defmodule Pleroma.Web.AdminAPI.StatusView do
defdelegate merge_account_views(user), to: AdminAPI.AccountView
def render("index.json", %{total: total} = opts) do
%{total: total, activities: safe_render_many(opts.activities, __MODULE__, "show.json", opts)}
%{total: total, activities: render_many(opts.activities, __MODULE__, "show.json", opts)}
end
def render("index.json", opts) do
safe_render_many(opts.activities, __MODULE__, "show.json", opts)
render_many(opts.activities, __MODULE__, "show.json", opts)
end
def render("show.json", %{activity: %{data: %{"object" => _object}} = activity} = opts) do

View file

@ -0,0 +1,24 @@
defmodule Pleroma.Web.AkkomaAPI.MetricsController do
use Pleroma.Web, :controller
alias Pleroma.Web.Plugs.OAuthScopesPlug
alias Pleroma.Config
plug(
OAuthScopesPlug,
%{scopes: ["admin:metrics"]}
when action in [
:show
]
)
def show(conn, _params) do
if Config.get([:instance, :export_prometheus_metrics], true) do
conn
|> text(Pleroma.PrometheusExporter.show())
else
conn
|> send_resp(404, "Not Found")
end
end
end

View file

@ -3,6 +3,8 @@ defmodule Pleroma.Web.AkkomaAPI.TranslationController do
alias Pleroma.Web.Plugs.OAuthScopesPlug
require Logger
@cachex Pleroma.Config.get([:cachex, :provider], Cachex)
@unauthenticated_access %{fallback: :proceed_unauthenticated, scopes: []}
@ -26,8 +28,12 @@ def languages(conn, _params) do
conn
|> json(%{source: source_languages, target: dest_languages})
else
{:enabled, false} -> json(conn, %{})
e -> IO.inspect(e)
{:enabled, false} ->
json(conn, %{})
e ->
Logger.error("Translation language list error: #{inspect(e)}")
{:error, e}
end
end

View file

@ -23,19 +23,19 @@ def spec(opts \\ []) do
[]
end,
info: %OpenApiSpex.Info{
title: "Pleroma API",
title: "Akkoma API",
description: """
This is documentation for client Pleroma API. Most of the endpoints and entities come
This is documentation for the Akkoma API. Most of the endpoints and entities come
from Mastodon API and have custom extensions on top.
While this document aims to be a complete guide to the client API Pleroma exposes,
the details are still being worked out. Some endpoints may have incomplete or poorly worded documentation.
While this document aims to be a complete guide to the client API Akkoma exposes,
it may not be complete. Some endpoints may have incomplete or poorly worded documentation.
You might want to check the following resources if something is not clear:
- [Legacy Pleroma-specific endpoint documentation](https://docs-develop.pleroma.social/backend/development/API/pleroma_api/)
- [Mastodon API documentation](https://docs.joinmastodon.org/client/intro/)
- [Differences in Mastodon API responses from vanilla Mastodon](https://docs-develop.pleroma.social/backend/development/API/differences_in_mastoapi_responses/)
- [Differences in Mastodon API responses from vanilla Mastodon](https://docs.akkoma.dev/stable/development/API/differences_in_mastoapi_responses/)
Please report such occurences on our [issue tracker](https://git.pleroma.social/pleroma/pleroma/-/issues). Feel free to submit API questions or proposals there too!
Please report such occurrences on our [issue tracker](https://akkoma.dev/AkkomaGang/akkoma). Feel free to submit API questions or proposals there too!
""",
# Strip environment from the version
version: Application.spec(:pleroma, :vsn) |> to_string() |> String.replace(~r/\+.*$/, ""),

View file

@ -64,7 +64,8 @@ def update_credentials_operation do
requestBody: request_body("Parameters", update_credentials_request(), required: true),
responses: %{
200 => Operation.response("Account", "application/json", Account),
403 => Operation.response("Error", "application/json", ApiError)
403 => Operation.response("Error", "application/json", ApiError),
413 => Operation.response("Error", "application/json", ApiError)
}
}
end
@ -431,6 +432,7 @@ def lookup_operation do
],
responses: %{
200 => Operation.response("Account", "application/json", Account),
401 => Operation.response("Error", "application/json", ApiError),
404 => Operation.response("Error", "application/json", ApiError)
}
}

View file

@ -19,6 +19,7 @@ def index_operation do
summary: "Retrieve follow requests",
security: [%{"oAuth" => ["read:follows", "follow"]}],
operationId: "FollowRequestController.index",
parameters: pagination_params(),
responses: %{
200 =>
Operation.response("Array of Account", "application/json", %Schema{
@ -62,4 +63,22 @@ defp id_param do
required: true
)
end
defp pagination_params do
[
Operation.parameter(:max_id, :query, :string, "Return items older than this ID"),
Operation.parameter(
:since_id,
:query,
:string,
"Return the oldest items newer than this ID"
),
Operation.parameter(
:limit,
:query,
%Schema{type: :integer, default: 20},
"Maximum number of items to return. Will be ignored if it's more than 40"
)
]
end
end

View file

@ -231,9 +231,18 @@ defp emoji_packs_response do
"application/json",
%Schema{
type: :object,
additionalProperties: emoji_pack(),
properties: %{
count: %Schema{type: :integer},
packs: %Schema{
type: :object,
additionalProperties: emoji_pack()
}
},
example: %{
"emojos" => emoji_pack().example
"count" => 4,
"packs" => %{
"emojos" => emoji_pack().example
}
}
}
)

View file

@ -44,7 +44,7 @@ def unfollow_operation do
tags: ["Tags"],
summary: "Unfollow a hashtag",
description: "Unfollow a hashtag",
security: [%{"oAuth" => ["write:follow"]}],
security: [%{"oAuth" => ["write:follows"]}],
parameters: [id_param()],
operationId: "TagController.unfollow",
responses: %{
@ -54,6 +54,26 @@ def unfollow_operation do
}
end
def show_followed_operation do
%Operation{
tags: ["Tags"],
summary: "Followed hashtags",
description: "View a list of hashtags the currently authenticated user is following",
parameters: pagination_params(),
security: [%{"oAuth" => ["read:follows"]}],
operationId: "TagController.show_followed",
responses: %{
200 =>
Operation.response("Hashtags", "application/json", %Schema{
type: :array,
items: Tag
}),
403 => Operation.response("Forbidden", "application/json", ApiError),
404 => Operation.response("Not Found", "application/json", ApiError)
}
}
end
defp id_param do
Operation.parameter(
:id,
@ -62,4 +82,22 @@ defp id_param do
"Name of the hashtag"
)
end
def pagination_params do
[
Operation.parameter(:max_id, :query, :integer, "Return items older than this ID"),
Operation.parameter(
:min_id,
:query,
:integer,
"Return the oldest items newer than this ID"
),
Operation.parameter(
:limit,
:query,
%Schema{type: :integer, default: 20},
"Maximum number of items to return. Will be ignored if it's more than 40"
)
]
end
end

View file

@ -21,6 +21,12 @@ defmodule Pleroma.Web.ApiSpec.Schemas.Tag do
following: %Schema{
type: :boolean,
description: "Whether the authenticated user is following the hashtag"
},
history: %Schema{
type: :array,
items: %Schema{type: :string},
description:
"A list of historical uses of the hashtag (not implemented, for compatibility only)"
}
},
example: %{

View file

@ -6,7 +6,6 @@ defmodule Pleroma.Web.Auth.PleromaAuthenticator do
alias Pleroma.Registration
alias Pleroma.Repo
alias Pleroma.User
alias Pleroma.Web.Plugs.AuthenticationPlug
import Pleroma.Web.Auth.Helpers, only: [fetch_credentials: 1, fetch_user: 1]
@ -15,8 +14,8 @@ defmodule Pleroma.Web.Auth.PleromaAuthenticator do
def get_user(%Plug.Conn{} = conn) do
with {:ok, {name, password}} <- fetch_credentials(conn),
{_, %User{} = user} <- {:user, fetch_user(name)},
{_, true} <- {:checkpw, AuthenticationPlug.checkpw(password, user.password_hash)},
{:ok, user} <- AuthenticationPlug.maybe_update_password(user, password) do
{_, true} <- {:checkpw, Pleroma.Password.checkpw(password, user.password_hash)},
{:ok, user} <- Pleroma.Password.maybe_update_password(user, password) do
{:ok, user}
else
{:error, _reason} = error -> error
@ -60,6 +59,8 @@ def get_registration(%Plug.Conn{
def get_registration(%Plug.Conn{} = _conn), do: {:error, :missing_credentials}
@doc "Creates Pleroma.User record basing on params and Pleroma.Registration record."
@spec create_from_registration(Plug.Conn.t(), Registration.t()) ::
{:ok, User.t()} | {:error, any()}
def create_from_registration(
%Plug.Conn{params: %{"authorization" => registration_attrs}},
%Registration{} = registration
@ -89,6 +90,8 @@ def create_from_registration(
{:ok, _} <-
Registration.changeset(registration, %{user_id: new_user.id}) |> Repo.update() do
{:ok, new_user}
else
err -> err
end
end

View file

@ -6,7 +6,6 @@ defmodule Pleroma.Web.Auth.TOTPAuthenticator do
alias Pleroma.MFA
alias Pleroma.MFA.TOTP
alias Pleroma.User
alias Pleroma.Web.Plugs.AuthenticationPlug
@doc "Verify code or check backup code."
@spec verify(String.t(), User.t()) ::
@ -31,7 +30,7 @@ def verify_recovery_code(
code
)
when is_list(codes) and is_binary(code) do
hash_code = Enum.find(codes, fn hash -> AuthenticationPlug.checkpw(code, hash) end)
hash_code = Enum.find(codes, fn hash -> Pleroma.Password.checkpw(code, hash) end)
if hash_code do
MFA.invalidate_backup_code(user, hash_code)

View file

@ -22,6 +22,8 @@ defmodule Pleroma.Web.CommonAPI.ActivityDraft do
attachments: [],
in_reply_to: nil,
in_reply_to_conversation: nil,
language: nil,
content_map: %{},
quote_id: nil,
quote: nil,
visibility: nil,
@ -58,6 +60,7 @@ def create(user, params) do
|> with_valid(&visibility/1)
|> with_valid(&quote_id/1)
|> content()
|> with_valid(&language/1)
|> with_valid(&to_and_cc/1)
|> with_valid(&context/1)
|> sensitive()
@ -133,6 +136,20 @@ defp quote_id(%{params: %{quote_id: %Activity{} = quote}} = draft) do
defp quote_id(draft), do: draft
defp language(%{params: %{language: language}, content_html: content} = draft)
when is_binary(language) do
if Pleroma.ISO639.valid_alpha2?(language) do
%__MODULE__{draft | content_map: %{language => content}}
else
add_error(draft, dgettext("errors", "Invalid language"))
end
end
defp language(%{content_html: content} = draft) do
# Use a default language if no language is specified
%__MODULE__{draft | content_map: %{"en" => content}}
end
defp visibility(%{params: params} = draft) do
case CommonAPI.get_visibility(params, draft.in_reply_to, draft.in_reply_to_conversation) do
{visibility, "direct"} when visibility != "direct" ->
@ -177,7 +194,7 @@ defp to_and_cc(draft) do
end
defp context(draft) do
context = Utils.make_context(draft.in_reply_to, draft.in_reply_to_conversation)
context = Utils.make_context(draft)
%__MODULE__{draft | context: context}
end
@ -224,6 +241,7 @@ defp object(draft) do
"mediaType" => Utils.get_content_type(draft.params[:content_type])
})
|> Map.put("generator", draft.params[:generator])
|> Map.put("contentMap", draft.content_map)
%__MODULE__{draft | object: object}
end

View file

@ -17,7 +17,6 @@ defmodule Pleroma.Web.CommonAPI.Utils do
alias Pleroma.Web.ActivityPub.Visibility
alias Pleroma.Web.CommonAPI.ActivityDraft
alias Pleroma.Web.MediaProxy
alias Pleroma.Web.Plugs.AuthenticationPlug
alias Pleroma.Web.Utils.Params
require Logger
@ -231,12 +230,13 @@ def get_content_type(content_type) do
end
end
def make_context(_, %Participation{} = participation) do
def make_context(%{in_reply_to_conversation: %Participation{} = participation}) do
Repo.preload(participation, :conversation).conversation.ap_id
end
def make_context(%Activity{data: %{"context" => context}}, _), do: context
def make_context(_, _), do: Utils.generate_context_id()
def make_context(%{in_reply_to: %Activity{data: %{"context" => context}}}), do: context
def make_context(%{quote: %Activity{data: %{"context" => context}}}), do: context
def make_context(_), do: Utils.generate_context_id()
def maybe_add_attachments(parsed, _attachments, false = _no_links), do: parsed
@ -328,20 +328,27 @@ def date_to_asctime(date) do
end
def to_masto_date(%NaiveDateTime{} = date) do
date
|> NaiveDateTime.to_iso8601()
|> String.replace(~r/(\.\d+)?$/, ".000Z", global: false)
# NOTE: Elixirs ISO 8601 format is a superset of the real standard
# It supports negative years for example.
# ISO8601 only supports years before 1583 with mutual agreement
if date.year < 1583 do
"1970-01-01T00:00:00Z"
else
date
|> NaiveDateTime.to_iso8601()
|> String.replace(~r/(\.\d+)?$/, ".000Z", global: false)
end
end
def to_masto_date(date) when is_binary(date) do
with {:ok, date} <- NaiveDateTime.from_iso8601(date) do
to_masto_date(date)
else
_ -> ""
_ -> "1970-01-01T00:00:00Z"
end
end
def to_masto_date(_), do: ""
def to_masto_date(_), do: "1970-01-01T00:00:00Z"
defp shortname(name) do
with max_length when max_length > 0 <-
@ -356,7 +363,7 @@ defp shortname(name) do
@spec confirm_current_password(User.t(), String.t()) :: {:ok, User.t()} | {:error, String.t()}
def confirm_current_password(user, password) do
with %User{local: true} = db_user <- User.get_cached_by_id(user.id),
true <- AuthenticationPlug.checkpw(password, db_user.password_hash) do
true <- Pleroma.Password.checkpw(password, db_user.password_hash) do
{:ok, db_user}
else
_ -> {:error, dgettext("errors", "Invalid password.")}

View file

@ -87,16 +87,18 @@ def get_pagination_fields(conn, entries, extra_params \\ %{}) do
def assign_account_by_id(conn, _) do
case Pleroma.User.get_cached_by_id(conn.params.id) do
%Pleroma.User{} = account -> assign(conn, :account, account)
nil -> Pleroma.Web.MastodonAPI.FallbackController.call(conn, {:error, :not_found}) |> halt()
%Pleroma.User{} = account ->
assign(conn, :account, account)
nil ->
Pleroma.Web.MastodonAPI.FallbackController.call(conn, {:error, :not_found})
|> halt()
end
end
@spec try_render(Plug.Conn.t(), any, any) :: Plug.Conn.t()
def try_render(conn, target, params) when is_binary(target) do
case render(conn, target, params) do
nil -> render_error(conn, :not_implemented, "Can't display this activity")
res -> res
end
render(conn, target, params)
end
def try_render(conn, _, _) do

Some files were not shown because too many files have changed in this diff Show more