Merge branch 'develop' into feature/addressable-lists

This commit is contained in:
Egor Kislitsyn 2019-07-11 13:26:59 +07:00
commit 182f7bbb11
826 changed files with 13387 additions and 2831 deletions

View file

@ -16,6 +16,7 @@ stages:
- build
- test
- deploy
- release
before_script:
- mix local.hex --force
@ -34,6 +35,7 @@ docs-build:
- develop@pleroma/pleroma
variables:
MIX_ENV: dev
PLEROMA_BUILD_ENV: prod
script:
- mix deps.get
- mix compile
@ -42,6 +44,7 @@ docs-build:
paths:
- priv/static/doc
unit-testing:
stage: test
services:
@ -140,3 +143,104 @@ stop_review_app:
- ssh-keyscan -H "pleroma.online" >> ~/.ssh/known_hosts
- ssh -t dokku@pleroma.online -- --force apps:destroy "$CI_ENVIRONMENT_SLUG"
- ssh -t dokku@pleroma.online -- --force postgres:destroy $(echo $CI_ENVIRONMENT_SLUG | sed -e 's/-/_/g')_db
amd64:
stage: release
# TODO: Replace with upstream image when 1.9.0 comes out
image: rinpatch/elixir:1.9.0-rc.0
only: &release-only
- master@pleroma/pleroma
- develop@pleroma/pleroma
artifacts: &release-artifacts
name: "pleroma-$CI_COMMIT_REF_NAME-$CI_COMMIT_SHORT_SHA-$CI_JOB_NAME"
paths:
- release/*
# Ideally it would be never for master branch and with the next commit for develop,
# but Gitlab does not support neither `only` for artifacts
# nor setting it to never from .gitlab-ci.yml
# nor expiring with the next commit
expire_in: 42 yrs
cache: &release-cache
key: $CI_COMMIT_REF_NAME-$CI_JOB_NAME
paths:
- deps
variables: &release-variables
MIX_ENV: prod
before_script: &before-release
- echo "import Mix.Config" > config/prod.secret.exs
- mix local.hex --force
- mix local.rebar --force
script: &release
- mix deps.get --only prod
- mkdir release
- export PLEROMA_BUILD_BRANCH=$CI_COMMIT_REF_NAME
- mix release --path release
amd64-musl:
stage: release
artifacts: *release-artifacts
only: *release-only
# TODO: Replace with upstream image when 1.9.0 comes out
image: rinpatch/elixir:1.9.0-rc.0-alpine
cache: *release-cache
variables: *release-variables
before_script: &before-release-musl
- apk add git gcc g++ musl-dev make
- echo "import Mix.Config" > config/prod.secret.exs
- mix local.hex --force
- mix local.rebar --force
script: *release
arm:
stage: release
artifacts: *release-artifacts
only: *release-only
tags:
- arm32
# TODO: Replace with upstream image when 1.9.0 comes out
image: rinpatch/elixir:1.9.0-rc.0-arm
cache: *release-cache
variables: *release-variables
before_script: *before-release
script: *release
arm-musl:
stage: release
artifacts: *release-artifacts
only: *release-only
tags:
- arm32
# TODO: Replace with upstream image when 1.9.0 comes out
image: rinpatch/elixir:1.9.0-rc.0-arm-alpine
cache: *release-cache
variables: *release-variables
before_script: *before-release-musl
script: *release
arm64:
stage: release
artifacts: *release-artifacts
only: *release-only
tags:
- arm
# TODO: Replace with upstream image when 1.9.0 comes out
image: rinpatch/elixir:1.9.0-rc.0-arm64
cache: *release-cache
variables: *release-variables
before_script: *before-release
script: *release
arm64-musl:
stage: release
artifacts: *release-artifacts
only: *release-only
tags:
- arm
# TODO: Replace with upstream image when 1.9.0 comes out
image: rinpatch/elixir:1.9.0-rc.0-arm64-alpine
cache: *release-cache
variables: *release-variables
before_script: *before-release-musl
script: *release

View file

@ -3,7 +3,37 @@ All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
## [unreleased]
## [Unreleased]
### Changed
- **Breaking:** Configuration: A setting to explicitly disable the mailer was added, defaulting to true, if you are using a mailer add `config :pleroma, Pleroma.Emails.Mailer, enabled: true` to your config
- Configuration: OpenGraph and TwitterCard providers enabled by default
- Configuration: Filter.AnonymizeFilename added ability to retain file extension with custom text
- NodeInfo: Return `skipThreadContainment` in `metadata` for the `skip_thread_containment` option
### Fixed
- Not being able to pin unlisted posts
- Metadata rendering errors resulting in the entire page being inaccessible
- Mastodon API: Handling of search timeouts (`/api/v1/search` and `/api/v2/search`)
- Mastodon API: Embedded relationships not being properly rendered in the Account entity of Status entity
### Added
- MRF: Support for priming the mediaproxy cache (`Pleroma.Web.ActivityPub.MRF.MediaProxyWarmingPolicy`)
Configuration: `federation_incoming_replies_max_depth` option
- Mastodon API: Support for the [`tagged` filter](https://github.com/tootsuite/mastodon/pull/9755) in [`GET /api/v1/accounts/:id/statuses`](https://docs.joinmastodon.org/api/rest/accounts/#get-api-v1-accounts-id-statuses)
- Mastodon API, streaming: Add support for passing the token in the `Sec-WebSocket-Protocol` header
- Mastodon API, extension: Ability to reset avatar, profile banner, and background
- Admin API: Return users' tags when querying reports
- Admin API: Return avatar and display name when querying users
- Admin API: Allow querying user by ID
- Added synchronization of following/followers counters for external users
- Configuration: `enabled` option for `Pleroma.Emails.Mailer`, defaulting to `false`.
- Mastodon API: Add support for categories for custom emojis by reusing the group feature. <https://github.com/tootsuite/mastodon/pull/11196>
## [1.0.0] - 2019-06-29
### Security
- Mastodon API: Fix display names not being sanitized
- Rich media: Do not crawl private IP ranges
### Added
- Add a generic settings store for frontends / clients to use.
- Explicit addressing option for posting.
@ -11,6 +41,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
- [MongooseIM](https://github.com/esl/MongooseIM) http authentication support.
- LDAP authentication
- External OAuth provider authentication
- Support for building a release using [`mix release`](https://hexdocs.pm/mix/master/Mix.Tasks.Release.html)
- A [job queue](https://git.pleroma.social/pleroma/pleroma_job_queue) for federation, emails, web push, etc.
- [Prometheus](https://prometheus.io/) metrics
- Support for Mastodon's remote interaction
@ -18,15 +49,19 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
- Mix Tasks: `mix pleroma.database remove_embedded_objects`
- Mix Tasks: `mix pleroma.database update_users_following_followers_counts`
- Mix Tasks: `mix pleroma.user toggle_confirmed`
- Mix Tasks: `mix pleroma.config migrate_to_db`
- Mix Tasks: `mix pleroma.config migrate_from_db`
- Federation: Support for `Question` and `Answer` objects
- Federation: Support for reports
- Configuration: `poll_limits` option
- Configuration: `pack_extensions` option
- Configuration: `safe_dm_mentions` option
- Configuration: `link_name` option
- Configuration: `fetch_initial_posts` option
- Configuration: `notify_email` option
- Configuration: Media proxy `whitelist` option
- Configuration: `report_uri` option
- Configuration: `limit_to_local_content` option
- Pleroma API: User subscriptions
- Pleroma API: Healthcheck endpoint
- Pleroma API: `/api/v1/pleroma/mascot` per-user frontend mascot configuration endpoints
@ -35,7 +70,10 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
- Admin API: added filters (role, tags, email, name) for users endpoint
- Admin API: Endpoints for managing reports
- Admin API: Endpoints for deleting and changing the scope of individual reported statuses
- Admin API: Endpoints to view and change config settings.
- AdminFE: initial release with basic user management accessible at /pleroma/admin/
- Mastodon API: Add chat token to `verify_credentials` response
- Mastodon API: Add background image setting to `update_credentials`
- Mastodon API: [Scheduled statuses](https://docs.joinmastodon.org/api/rest/scheduled-statuses/)
- Mastodon API: `/api/v1/notifications/destroy_multiple` (glitch-soc extension)
- Mastodon API: `/api/v1/pleroma/accounts/:id/favourites` (API extension)
@ -53,9 +91,16 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
- MRF: Support for running subchains.
- Addressable lists
- Configuration: `skip_thread_containment` option
- Configuration: `rate_limit` option. See `Pleroma.Plugs.RateLimiter` documentation for details.
- MRF: Support for filtering out likely spam messages by rejecting posts from new users that contain links.
- Configuration: `ignore_hosts` option
- Configuration: `ignore_tld` option
- Configuration: default syslog tag "Pleroma" is now lowercased to "pleroma"
### Changed
- **Breaking:** bind to 127.0.0.1 instead of 0.0.0.0 by default
- **Breaking:** Configuration: move from Pleroma.Mailer to Pleroma.Emails.Mailer
- Thread containment / test for complete visibility will be skipped by default.
- Enforcement of OAuth scopes
- Add multiple use/time expiring invite token
- Restyled OAuth pages to fit with Pleroma's default theme
@ -64,6 +109,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
- Federation: Expand the audience of delete activities to all recipients of the deleted object
- Federation: Removed `inReplyToStatusId` from objects
- Configuration: Dedupe enabled by default
- Configuration: Default log level in `prod` environment is now set to `warn`
- Configuration: Added `extra_cookie_attrs` for setting non-standard cookie attributes. Defaults to ["SameSite=Lax"] so that remote follows work.
- Timelines: Messages involving people you have blocked will be excluded from the timeline in all cases instead of just repeats.
- Admin API: Move the user related API to `api/pleroma/admin/users`
@ -89,8 +135,10 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
- Posts which are marked sensitive or tagged nsfw no longer have link previews.
- HTTP connection timeout is now set to 10 seconds.
- Respond with a 404 Not implemented JSON error message when requested API is not implemented
- Rich Media: crawl only https URLs.
### Fixed
- Follow requests don't get 'stuck' anymore.
- Added an FTS index on objects. Running `vacuum analyze` and setting a larger `work_mem` is recommended.
- Followers counter not being updated when a follower is blocked
- Deactivated users being able to request an access token

View file

@ -15,9 +15,12 @@ For clients it supports both the [GNU Social API with Qvitter extensions](https:
If you want to run your own server, feel free to contact us at @lain@pleroma.soykaf.com or in our dev chat at #pleroma on freenode or via matrix at <https://matrix.heldscal.la/#/room/#freenode_#pleroma:matrix.org>.
## Installation
**Note:** The guide below may be outdated and in most cases shouldn't be used. Instead check out our [wiki](https://docs.pleroma.social) for platform-specific installation instructions, most likely [Installing on Linux using OTP releases](https://docs.pleroma.social/otp_en.html) is the guide you need.
### OS/Distro packages
Currently Pleroma is not packaged by any OS/Distros, but feel free to reach out to us at [#pleroma-dev on freenode](https://webchat.freenode.net/?channels=%23pleroma-dev) or via matrix at <https://matrix.heldscal.la/#/room/#freenode_#pleroma-dev:matrix.org> for assistance. If you want to change default options in your Pleroma package, please **discuss it with us first**.
### Docker
While we dont provide docker files, other people have written very good ones. Take a look at <https://github.com/angristan/docker-pleroma> or <https://github.com/sn0w/pleroma-docker>.
### Dependencies

View file

@ -99,6 +99,7 @@
config :pleroma, :emoji,
shortcode_globs: ["/emoji/custom/**/*.png"],
pack_extensions: [".png", ".gif"],
groups: [
# Put groups that have higher priority than defaults here. Example in `docs/config/custom_emoji.md`
Custom: ["/emoji/*.png", "/emoji/**/*.png"]
@ -139,6 +140,7 @@
instrumenters: [Pleroma.Web.Endpoint.Instrumenter],
url: [host: "localhost"],
http: [
ip: {127, 0, 0, 1},
dispatch: [
{:_,
[
@ -167,7 +169,7 @@
config :logger, :ex_syslogger,
level: :debug,
ident: "Pleroma",
ident: "pleroma",
format: "$metadata[$level] $message",
metadata: [:request_id]
@ -216,6 +218,7 @@
},
registrations_open: true,
federating: true,
federation_incoming_replies_max_depth: 100,
federation_reachability_timeout_days: 7,
federation_publisher_modules: [
Pleroma.Web.ActivityPub.Publisher,
@ -244,9 +247,16 @@
safe_dm_mentions: false,
healthcheck: false,
remote_post_retention_days: 90,
skip_thread_containment: false
config :pleroma, :app_account_creation, enabled: true, max_requests: 25, interval: 1800
skip_thread_containment: true,
limit_to_local_content: :unauthenticated,
dynamic_configuration: false,
external_user_synchronization: [
enabled: false,
# every 2 hours
interval: 60 * 60 * 2,
max_retries: 3,
limit: 500
]
config :pleroma, :markup,
# XXX - unfortunately, inline images must be enabled by default right now, because
@ -329,7 +339,10 @@
config :pleroma, :mrf_subchain, match_actor: %{}
config :pleroma, :rich_media, enabled: true
config :pleroma, :rich_media,
enabled: true,
ignore_hosts: [],
ignore_tld: ["local", "localdomain", "lan"]
config :pleroma, :media_proxy,
enabled: false,
@ -353,7 +366,11 @@
port: 9999
config :pleroma, Pleroma.Web.Metadata,
providers: [Pleroma.Web.Metadata.Providers.RelMe],
providers: [
Pleroma.Web.Metadata.Providers.OpenGraph,
Pleroma.Web.Metadata.Providers.TwitterCard,
Pleroma.Web.Metadata.Providers.RelMe
],
unfurl_nsfw: false
config :pleroma, :suggestions,
@ -361,8 +378,8 @@
third_party_engine:
"http://vinayaka.distsn.org/cgi-bin/vinayaka-user-match-suggestions-api.cgi?{{host}}+{{user}}",
timeout: 300_000,
limit: 23,
web: "https://vinayaka.distsn.org/?{{host}}+{{user}}"
limit: 40,
web: "https://vinayaka.distsn.org"
config :pleroma, :http_security,
enabled: true,
@ -442,6 +459,8 @@
opts: [
scheme: true,
extra: true,
# TODO: Set to :no_scheme when it works properly
validate_tld: true,
class: false,
strip_prefix: false,
new_window: false,
@ -482,7 +501,7 @@
config :pleroma, :auth, oauth_consumer_strategies: oauth_consumer_strategies
config :pleroma, Pleroma.Emails.Mailer, adapter: Swoosh.Adapters.Sendmail
config :pleroma, Pleroma.Emails.Mailer, adapter: Swoosh.Adapters.Sendmail, enabled: false
config :prometheus, Pleroma.Web.Endpoint.MetricsExporter, path: "/api/pleroma/app_metrics"
@ -499,9 +518,15 @@
config :pleroma, :database, rum_enabled: false
config :pleroma, :env, Mix.env()
config :http_signatures,
adapter: Pleroma.Signature
config :pleroma, :rate_limit,
search: [{1000, 10}, {1000, 30}],
app_account_creation: {1_800_000, 25}
# Import environment specific config. This must remain at the bottom
# of this file so it overrides the configuration defined above.
import_config "#{Mix.env()}.exs"

View file

@ -59,3 +59,6 @@
"!!! RUNNING IN LOCALHOST DEV MODE! !!!\nFEDERATION WON'T WORK UNTIL YOU CONFIGURE A dev.secret.exs"
)
end
if File.exists?("./config/dev.exported_from_db.secret.exs"),
do: import_config("dev.exported_from_db.secret.exs")

View file

@ -17,8 +17,10 @@
http: [port: 4000],
protocol: "http"
config :phoenix, serve_endpoints: true
# Do not print debug messages in production
config :logger, level: :info
config :logger, level: :warn
# ## SSL Support
#
@ -61,3 +63,6 @@
# Finally import the config/prod.secret.exs
# which should be versioned separately.
import_config "prod.secret.exs"
if File.exists?("./config/prod.exported_from_db.secret.exs"),
do: import_config("prod.exported_from_db.secret.exs")

19
config/releases.exs Normal file
View file

@ -0,0 +1,19 @@
import Config
config :pleroma, :instance, static_dir: "/var/lib/pleroma/static"
config :pleroma, Pleroma.Uploaders.Local, uploads: "/var/lib/pleroma/uploads"
config_path = System.get_env("PLEROMA_CONFIG_PATH") || "/etc/pleroma/config.exs"
if File.exists?(config_path) do
import_config config_path
else
warning = [
IO.ANSI.red(),
IO.ANSI.bright(),
"!!! #{config_path} not found! Please ensure it exists and that PLEROMA_CONFIG_PATH is unset or points to an existing file",
IO.ANSI.reset()
]
IO.puts(warning)
end

View file

@ -17,15 +17,19 @@
# Print only warnings and errors during test
config :logger, level: :warn
config :pleroma, :auth, oauth_consumer_strategies: []
config :pleroma, Pleroma.Upload, filters: [], link_name: false
config :pleroma, Pleroma.Uploaders.Local, uploads: "test/uploads"
config :pleroma, Pleroma.Emails.Mailer, adapter: Swoosh.Adapters.Test
config :pleroma, Pleroma.Emails.Mailer, adapter: Swoosh.Adapters.Test, enabled: true
config :pleroma, :instance,
email: "admin@example.com",
notify_email: "noreply@example.com"
notify_email: "noreply@example.com",
skip_thread_containment: false,
federating: false
# Configure your database
config :pleroma, Pleroma.Repo,
@ -40,7 +44,11 @@
config :pbkdf2_elixir, rounds: 1
config :tesla, adapter: Tesla.Mock
config :pleroma, :rich_media, enabled: false
config :pleroma, :rich_media,
enabled: false,
ignore_hosts: [],
ignore_tld: ["local", "localdomain", "lan"]
config :web_push_encryption, :vapid_details,
subject: "mailto:administrator@example.com",
@ -57,7 +65,7 @@
total_user_limit: 3,
enabled: false
config :pleroma, :app_account_creation, max_requests: 5
config :pleroma, :rate_limit, app_account_creation: {10_000, 5}
config :pleroma, :http_security, report_uri: "https://endpoint.com"
@ -67,6 +75,8 @@
config :pleroma, :database, rum_enabled: rum_enabled
IO.puts("RUM enabled: #{rum_enabled}")
config :pleroma, Pleroma.ReverseProxy.Client, Pleroma.ReverseProxy.ClientMock
try do
import_config "test.secret.exs"
rescue

View file

@ -38,7 +38,9 @@ Authentication is required and the user must be an admin.
"moderator": bool
},
"local": bool,
"tags": array
"tags": array,
"avatar": string,
"display_name": string
},
...
]
@ -174,13 +176,13 @@ Note: Available `:permission_group` is currently moderator and admin. 404 is ret
- `nickname`
- `status` BOOLEAN field, false value means deactivation.
## `/api/pleroma/admin/users/:nickname`
## `/api/pleroma/admin/users/:nickname_or_id`
### Retrive the details of a user
- Method: `GET`
- Params:
- `nickname`
- `nickname` or `id`
- Response:
- On failure: `Not found`
- On success: JSON of the user
@ -289,7 +291,7 @@ Note: Available `:permission_group` is currently moderator and admin. 404 is ret
- `limit`: optional, the number of records to retrieve
- `since_id`: optional, returns results that are more recent than the specified id
- `max_id`: optional, returns results that are older than the specified id
- Response:
- Response:
- On failure: 403 Forbidden error `{"error": "error_msg"}` when requested by anonymous or non-admin
- On success: JSON, returns a list of reports, where:
- `account`: the user who has been reported
@ -331,6 +333,7 @@ Note: Available `:permission_group` is currently moderator and admin. 404 is ret
"pleroma": {},
"sensitive": false
},
"tags": ["force_unlisted"],
"statuses_count": 3,
"url": "https://pleroma.example.org/users/user",
"username": "user"
@ -366,6 +369,7 @@ Note: Available `:permission_group` is currently moderator and admin. 404 is ret
"pleroma": {},
"sensitive": false
},
"tags": ["force_unlisted"],
"statuses_count": 1,
"url": "https://pleroma.example.org/users/lain",
"username": "lain"
@ -443,7 +447,7 @@ Note: Available `:permission_group` is currently moderator and admin. 404 is ret
- Params:
- `id`
- Response:
- On failure:
- On failure:
- 403 Forbidden `{"error": "error_msg"}`
- 404 Not Found `"Not found"`
- On success: JSON, Report object (see above)
@ -454,8 +458,8 @@ Note: Available `:permission_group` is currently moderator and admin. 404 is ret
- Params:
- `id`
- `state`: required, the new state. Valid values are `open`, `closed` and `resolved`
- Response:
- On failure:
- Response:
- On failure:
- 400 Bad Request `"Unsupported state"`
- 403 Forbidden `{"error": "error_msg"}`
- 404 Not Found `"Not found"`
@ -467,10 +471,10 @@ Note: Available `:permission_group` is currently moderator and admin. 404 is ret
- Params:
- `id`
- `status`: required, the message
- Response:
- On failure:
- 400 Bad Request `"Invalid parameters"` when `status` is missing
- 403 Forbidden `{"error": "error_msg"}`
- Response:
- On failure:
- 400 Bad Request `"Invalid parameters"` when `status` is missing
- 403 Forbidden `{"error": "error_msg"}`
- 404 Not Found `"Not found"`
- On success: JSON, created Mastodon Status entity
@ -540,10 +544,10 @@ Note: Available `:permission_group` is currently moderator and admin. 404 is ret
- `id`
- `sensitive`: optional, valid values are `true` or `false`
- `visibility`: optional, valid values are `public`, `private` and `unlisted`
- Response:
- On failure:
- Response:
- On failure:
- 400 Bad Request `"Unsupported visibility"`
- 403 Forbidden `{"error": "error_msg"}`
- 403 Forbidden `{"error": "error_msg"}`
- 404 Not Found `"Not found"`
- On success: JSON, Mastodon Status entity
@ -552,8 +556,97 @@ Note: Available `:permission_group` is currently moderator and admin. 404 is ret
- Method `DELETE`
- Params:
- `id`
- Response:
- On failure:
- 403 Forbidden `{"error": "error_msg"}`
- Response:
- On failure:
- 403 Forbidden `{"error": "error_msg"}`
- 404 Not Found `"Not found"`
- On success: 200 OK `{}`
## `/api/pleroma/admin/config`
### List config settings
- Method `GET`
- Params: none
- Response:
```json
{
configs: [
{
"group": string,
"key": string,
"value": string or {} or [] or {"tuple": []}
}
]
}
```
## `/api/pleroma/admin/config`
### Update config settings
Module name can be passed as string, which starts with `Pleroma`, e.g. `"Pleroma.Upload"`.
Atom or boolean value can be passed with `:` in the beginning, e.g. `":true"`, `":upload"`. For keys it is not needed.
Integer with `i:`, e.g. `"i:150"`.
Tuple with more than 2 values with `{"tuple": ["first_val", Pleroma.Module, []]}`.
`{"tuple": ["some_string", "Pleroma.Some.Module", []]}` will be converted to `{"some_string", Pleroma.Some.Module, []}`.
Compile time settings (need instance reboot):
- all settings by this keys:
- `:hackney_pools`
- `:chat`
- `Pleroma.Web.Endpoint`
- `Pleroma.Repo`
- part settings:
- `Pleroma.Captcha` -> `:seconds_valid`
- `Pleroma.Upload` -> `:proxy_remote`
- `:instance` -> `:upload_limit`
- Method `POST`
- Params:
- `configs` => [
- `group` (string)
- `key` (string)
- `value` (string, [], {} or {"tuple": []})
- `delete` = true (optional, if parameter must be deleted)
]
- Request (example):
```json
{
configs: [
{
"group": "pleroma",
"key": "Pleroma.Upload",
"value": {
"uploader": "Pleroma.Uploaders.Local",
"filters": ["Pleroma.Upload.Filter.Dedupe"],
"link_name": ":true",
"proxy_remote": ":false",
"proxy_opts": {
"redirect_on_failure": ":false",
"max_body_length": "i:1048576",
"http": {
"follow_redirect": ":true",
"pool": ":upload"
}
},
"dispatch": {
"tuple": ["/api/v1/streaming", "Pleroma.Web.MastodonAPI.WebsocketHandler", []]
}
}
}
]
}
- Response:
```json
{
configs: [
{
"group": string,
"key": string,
"value": string or {} or [] or {"tuple": []}
}
]
}
```

View file

@ -44,6 +44,15 @@ Has these additional fields under the `pleroma` object:
- `hide_followers`: boolean, true when the user has follower hiding enabled
- `hide_follows`: boolean, true when the user has follow hiding enabled
- `settings_store`: A generic map of settings for frontends. Opaque to the backend. Only returned in `verify_credentials` and `update_credentials`
- `chat_token`: The token needed for Pleroma chat. Only returned in `verify_credentials`
### Extensions for PleromaFE
These endpoints added for controlling PleromaFE features over the Mastodon API
- PATCH `/api/v1/accounts/update_avatar`: Set/clear user avatar image
- PATCH `/api/v1/accounts/update_banner`: Set/clear user banner image
- PATCH `/api/v1/accounts/update_background`: Set/clear user background image
### Source
@ -85,6 +94,7 @@ Additional parameters can be added to the JSON body/Form data:
- `default_scope` - the scope returned under `privacy` key in Source subentity
- `pleroma_settings_store` - Opaque user settings to be saved on the backend.
- `skip_thread_containment` - if true, skip filtering out broken threads
- `pleroma_background_image` - sets the background image of the user.
### Pleroma Settings Store
Pleroma has mechanism that allows frontends to save blobs of json for each user on the backend. This can be used to save frontend-specific settings for a user that the backend does not need to know about.

View file

@ -49,13 +49,6 @@ Feel free to contact us to be added to this list!
- Platforms: iOS, Android
- Features: No Streaming
### Tootdon
- Homepage: <http://tootdon.club/>, <http://blog.mastodon-tootdon.com/>
- Source Code: ???
- Contact: [@tootdon@mstdn.jp](https://mstdn.jp/users/tootdon)
- Platforms: Android, iOS
- Features: No Streaming
### Tusky
- Homepage: <https://tuskyapp.github.io/>
- Source Code: <https://github.com/tuskyapp/Tusky>

View file

@ -16,6 +16,13 @@ Note: `strip_exif` has been replaced by `Pleroma.Upload.Filter.Mogrify`.
## Pleroma.Uploaders.Local
* `uploads`: Which directory to store the user-uploads in, relative to pleromas working directory
## Pleroma.Uploaders.S3
* `bucket`: S3 bucket name
* `public_endpoint`: S3 endpoint that the user finally accesses(ex. "https://s3.dualstack.ap-northeast-1.amazonaws.com")
* `truncated_namespace`: If you use S3 compatible service such as Digital Ocean Spaces or CDN, set folder name or "" etc.
For example, when using CDN to S3 virtual host format, set "".
At this time, write CNAME to CDN in public_endpoint.
## Pleroma.Upload.Filter.Mogrify
* `args`: List of actions for the `mogrify` command like `"strip"` or `["strip", "auto-orient", {"impode", "1"}]`.
@ -29,11 +36,12 @@ No specific configuration.
This filter replaces the filename (not the path) of an upload. For complete obfuscation, add
`Pleroma.Upload.Filter.Dedupe` before AnonymizeFilename.
* `text`: Text to replace filenames in links. If empty, `{random}.extension` will be used.
* `text`: Text to replace filenames in links. If empty, `{random}.extension` will be used. You can get the original filename extension by using `{extension}`, for example `custom-file-name.{extension}`.
## Pleroma.Emails.Mailer
* `adapter`: one of the mail adapters listed in [Swoosh readme](https://github.com/swoosh/swoosh#adapters), or `Swoosh.Adapters.Local` for in-memory mailbox.
* `api_key` / `password` and / or other adapter-specific settings, per the above documentation.
* `enabled`: Allows enable/disable send emails. Default: `false`.
An example for Sendgrid adapter:
@ -80,15 +88,19 @@ config :pleroma, Pleroma.Emails.Mailer,
* `invites_enabled`: Enable user invitations for admins (depends on `registrations_open: false`).
* `account_activation_required`: Require users to confirm their emails before signing in.
* `federating`: Enable federation with other instances
* `federation_incoming_replies_max_depth`: Max. depth of reply-to activities fetching on incoming federation, to prevent out-of-memory situations while fetching very long threads. If set to `nil`, threads of any depth will be fetched. Lower this value if you experience out-of-memory crashes.
* `federation_reachability_timeout_days`: Timeout (in days) of each external federation target being unreachable prior to pausing federating to it.
* `allow_relay`: Enable Pleromas Relay, which makes it possible to follow a whole instance
* `rewrite_policy`: Message Rewrite Policy, either one or a list. Here are the ones available by default:
* `Pleroma.Web.ActivityPub.MRF.NoOpPolicy`: Doesnt modify activities (default)
* `Pleroma.Web.ActivityPub.MRF.DropPolicy`: Drops all activities. It generally doesnt makes sense to use in production
* `Pleroma.Web.ActivityPub.MRF.SimplePolicy`: Restrict the visibility of activities from certains instances (See ``:mrf_simple`` section)
* `Pleroma.Web.ActivityPub.MRF.TagPolicy`: Applies policies to individual users based on tags, which can be set using pleroma-fe/admin-fe/any other app that supports Pleroma Admin API. For example it allows marking posts from individual users nsfw (sensitive)
* `Pleroma.Web.ActivityPub.MRF.SubchainPolicy`: Selectively runs other MRF policies when messages match (see ``:mrf_subchain`` section)
* `Pleroma.Web.ActivityPub.MRF.RejectNonPublic`: Drops posts with non-public visibility settings (See ``:mrf_rejectnonpublic`` section)
* `Pleroma.Web.ActivityPub.MRF.EnsureRePrepended`: Rewrites posts to ensure that replies to posts with subjects do not have an identical subject and instead begin with re:.
* `Pleroma.Web.ActivityPub.MRF.AntiLinkSpamPolicy`: Rejects posts from likely spambots by rejecting posts from new users that contain links.
* `Pleroma.Web.ActivityPub.MRF.MediaProxyWarmingPolicy`: Crawls attachments using their MediaProxy URLs so that the MediaProxy cache is primed.
* `public`: Makes the client API in authentificated mode-only except for user-profiles. Useful for disabling the Local Timeline and The Whole Known Network.
* `quarantined_instances`: List of ActivityPub instances where private(DMs, followers-only) activities will not be send.
* `managed_config`: Whenether the config for pleroma-fe is configured in this config or in ``static/config.json``
@ -108,16 +120,19 @@ config :pleroma, Pleroma.Emails.Mailer,
* `welcome_message`: A message that will be send to a newly registered users as a direct message.
* `welcome_user_nickname`: The nickname of the local user that sends the welcome message.
* `max_report_comment_size`: The maximum size of the report comment (Default: `1000`)
* `safe_dm_mentions`: If set to true, only mentions at the beginning of a post will be used to address people in direct messages. This is to prevent accidental mentioning of people when talking about them (e.g. "@friend hey i really don't like @enemy"). (Default: `false`)
* `healthcheck`: if set to true, system data will be shown on ``/api/pleroma/healthcheck``.
* `remote_post_retention_days`: the default amount of days to retain remote posts when pruning the database
* `skip_thread_containment`: Skip filter out broken threads. the default is `false`.
* `safe_dm_mentions`: If set to true, only mentions at the beginning of a post will be used to address people in direct messages. This is to prevent accidental mentioning of people when talking about them (e.g. "@friend hey i really don't like @enemy"). Default: `false`.
* `healthcheck`: If set to true, system data will be shown on ``/api/pleroma/healthcheck``.
* `remote_post_retention_days`: The default amount of days to retain remote posts when pruning the database.
* `skip_thread_containment`: Skip filter out broken threads. The default is `false`.
* `limit_to_local_content`: Limit unauthenticated users to search for local statutes and users only. Possible values: `:unauthenticated`, `:all` and `false`. The default is `:unauthenticated`.
* `dynamic_configuration`: Allow transferring configuration to DB with the subsequent customization from Admin api.
* `external_user_synchronization`: Following/followers counters synchronization settings.
* `enabled`: Enables synchronization
* `interval`: Interval between synchronization.
* `max_retries`: Max rettries for host. After exceeding the limit, the check will not be carried out for users from this host.
* `limit`: Users batch size for processing in one time.
## :app_account_creation
REST API for creating an account settings
* `enabled`: Enable/disable registration
* `max_requests`: Number of requests allowed for creating accounts
* `interval`: Interval for restricting requests for one ip (seconds)
## :logger
* `backends`: `:console` is used to send logs to stdout, `{ExSyslogger, :ex_syslogger}` to log to syslog, and `Quack.Logger` to log to Slack
@ -273,7 +288,7 @@ config :pleroma, :mrf_subchain,
## Pleroma.Web.Endpoint
`Phoenix` endpoint configuration, all configuration options can be viewed [here](https://hexdocs.pm/phoenix/Phoenix.Endpoint.html#module-dynamic-configuration), only common options are listed here
* `http` - a list containing http protocol configuration, all configuration options can be viewed [here](https://hexdocs.pm/plug_cowboy/Plug.Cowboy.html#module-options), only common options are listed here
* `http` - a list containing http protocol configuration, all configuration options can be viewed [here](https://hexdocs.pm/plug_cowboy/Plug.Cowboy.html#module-options), only common options are listed here. For deployment using docker, you need to set this to `[ip: {0,0,0,0}, port: 4000]` to make pleroma accessible from other containers (such as your nginx server).
- `ip` - a tuple consisting of 4 integers
- `port`
* `url` - a list containing the configuration for generating urls, accepts
@ -411,6 +426,8 @@ This config contains two queues: `federator_incoming` and `federator_outgoing`.
## :rich_media
* `enabled`: if enabled the instance will parse metadata from attached links to generate link previews
* `ignore_hosts`: list of hosts which will be ignored by the metadata parser. For example `["accounts.google.com", "xss.website"]`, defaults to `[]`.
* `ignore_tld`: list TLDs (top-level domains) which will ignore for parse metadata. default is ["local", "localdomain", "lan"]
## :fetch_initial_posts
* `enabled`: if enabled, when a new user is federated with, fetch some of their latest posts
@ -514,7 +531,7 @@ Authentication / authorization settings.
* `auth_template`: authentication form template. By default it's `show.html` which corresponds to `lib/pleroma/web/templates/o_auth/o_auth/show.html.eex`.
* `oauth_consumer_template`: OAuth consumer mode authentication form template. By default it's `consumer.html` which corresponds to `lib/pleroma/web/templates/o_auth/o_auth/consumer.html.eex`.
* `oauth_consumer_strategies`: the list of enabled OAuth consumer strategies; by default it's set by OAUTH_CONSUMER_STRATEGIES environment variable. Each entry in this space-delimited string should be of format `<strategy>` or `<strategy>:<dependency>` (e.g. `twitter` or `keycloak:ueberauth_keycloak_strategy` in case dependency is named differently than `ueberauth_<strategy>`).
* `oauth_consumer_strategies`: the list of enabled OAuth consumer strategies; by default it's set by `OAUTH_CONSUMER_STRATEGIES` environment variable. Each entry in this space-delimited string should be of format `<strategy>` or `<strategy>:<dependency>` (e.g. `twitter` or `keycloak:ueberauth_keycloak_strategy` in case dependency is named differently than `ueberauth_<strategy>`).
## OAuth consumer mode
@ -567,6 +584,24 @@ config :ueberauth, Ueberauth,
providers: [
microsoft: {Ueberauth.Strategy.Microsoft, [callback_params: []]}
]
# Keycloak
# Note: make sure to add `keycloak:ueberauth_keycloak_strategy` entry to `OAUTH_CONSUMER_STRATEGIES` environment variable
keycloak_url = "https://publicly-reachable-keycloak-instance.org:8080"
config :ueberauth, Ueberauth.Strategy.Keycloak.OAuth,
client_id: System.get_env("KEYCLOAK_CLIENT_ID"),
client_secret: System.get_env("KEYCLOAK_CLIENT_SECRET"),
site: keycloak_url,
authorize_url: "#{keycloak_url}/auth/realms/master/protocol/openid-connect/auth",
token_url: "#{keycloak_url}/auth/realms/master/protocol/openid-connect/token",
userinfo_url: "#{keycloak_url}/auth/realms/master/protocol/openid-connect/userinfo",
token_method: :post
config :ueberauth, Ueberauth,
providers: [
keycloak: {Ueberauth.Strategy.Keycloak, [uid_field: :email]}
]
```
## OAuth 2.0 provider - :oauth2
@ -580,6 +615,7 @@ Configure OAuth 2 provider capabilities:
## :emoji
* `shortcode_globs`: Location of custom emoji files. `*` can be used as a wildcard. Example `["/emoji/custom/**/*.png"]`
* `pack_extensions`: A list of file extensions for emojis, when no emoji.txt for a pack is present. Example `[".png", ".gif"]`
* `groups`: Emojis are ordered in groups (tags). This is an array of key-value pairs where the key is the groupname and the value the location or array of locations. `*` can be used as a wildcard. Example `[Custom: ["/emoji/*.png", "/emoji/custom/*.png"]]`
* `default_manifest`: Location of the JSON-manifest. This manifest contains information about the emoji-packs you can download. Currently only one manifest can be added (no arrays).
@ -597,3 +633,14 @@ To enable them, both the `rum_enabled` flag has to be set and the following spec
`mix ecto.migrate --migrations-path priv/repo/optional_migrations/rum_indexing/`
This will probably take a long time.
## :rate_limit
A keyword list of rate limiters where a key is a limiter name and value is the limiter configuration. The basic configuration is a tuple where:
* The first element: `scale` (Integer). The time scale in milliseconds.
* The second element: `limit` (Integer). How many requests to limit in the time scale provided.
It is also possible to have different limits for unauthenticated and authenticated users: the keyword value must be a list of two tuples where the first one is a config for unauthenticated users and the second one is for authenticated.
See [`Pleroma.Plugs.RateLimiter`](Pleroma.Plugs.RateLimiter.html) documentation for examples.

View file

@ -9,8 +9,8 @@ config :pleroma, :suggestions,
third_party_engine:
"http://vinayaka.distsn.org/cgi-bin/vinayaka-user-match-suggestions-api.cgi?{{host}}+{{user}}",
timeout: 300_000,
limit: 23,
web: "https://vinayaka.distsn.org/?{{host}}+{{user}}"
limit: 40,
web: "https://vinayaka.distsn.org"
```
@ -26,6 +26,6 @@ config :pleroma, :suggestions,
third_party_engine:
"http://vinayaka.distsn.org/cgi-bin/vinayaka-user-new-suggestions-api.cgi?{{host}}+{{user}}",
timeout: 60_000,
limit: 23,
limit: 40,
web: "https://vinayaka.distsn.org/user-new.html"
```

View file

@ -203,12 +203,12 @@ sudo -Hu pleroma MIX_ENV=prod mix pleroma.user new <username> <your@emailaddress
#### Further reading
* [Admin tasks](Admin tasks)
* [Backup your instance](Backup-your-instance)
* [Configuration tips](General tips for customizing pleroma fe)
* [Hardening your instance](Hardening-your-instance)
* [How to activate mediaproxy](How-to-activate-mediaproxy)
* [Small Pleroma-FE customizations](Small customizations)
* [Updating your instance](Updating-your-instance)
* [Backup your instance](backup.html)
* [Configuration tips](general-tips-for-customizing-pleroma-fe.html)
* [Hardening your instance](hardening.html)
* [How to activate mediaproxy](howto_mediaproxy.html)
* [Small Pleroma-FE customizations](small_customizations.html)
* [Updating your instance](updating.html)
## Questions

View file

@ -201,12 +201,12 @@ sudo -Hu pleroma MIX_ENV=prod mix pleroma.user new <username> <your@emailaddress
#### Further reading
* [Admin tasks](Admin tasks)
* [Backup your instance](Backup-your-instance)
* [Configuration tips](General tips for customizing pleroma fe)
* [Hardening your instance](Hardening-your-instance)
* [How to activate mediaproxy](How-to-activate-mediaproxy)
* [Small Pleroma-FE customizations](Small customizations)
* [Updating your instance](Updating-your-instance)
* [Backup your instance](backup.html)
* [Configuration tips](general-tips-for-customizing-pleroma-fe.html)
* [Hardening your instance](hardening.html)
* [How to activate mediaproxy](howto_mediaproxy.html)
* [Small Pleroma-FE customizations](small_customizations.html)
* [Updating your instance](updating.html)
## Questions

View file

@ -265,12 +265,12 @@ sudo -Hu pleroma MIX_ENV=prod mix pleroma.user new <username> <your@emailaddress
#### Further reading
* [Admin tasks](Admin tasks)
* [Backup your instance](Backup-your-instance)
* [Configuration tips](General tips for customizing pleroma fe)
* [Hardening your instance](Hardening-your-instance)
* [How to activate mediaproxy](How-to-activate-mediaproxy)
* [Small Pleroma-FE customizations](Small customizations)
* [Updating your instance](Updating-your-instance)
* [Backup your instance](backup.html)
* [Configuration tips](general-tips-for-customizing-pleroma-fe.html)
* [Hardening your instance](hardening.html)
* [How to activate mediaproxy](howto_mediaproxy.html)
* [Small Pleroma-FE customizations](small_customizations.html)
* [Updating your instance](updating.html)
## Questions

View file

@ -191,12 +191,12 @@ sudo -Hu pleroma MIX_ENV=prod mix pleroma.user new <username> <your@emailaddress
#### Further reading
* [Admin tasks](Admin tasks)
* [Backup your instance](Backup-your-instance)
* [Configuration tips](General tips for customizing pleroma fe)
* [Hardening your instance](Hardening-your-instance)
* [How to activate mediaproxy](How-to-activate-mediaproxy)
* [Small Pleroma-FE customizations](Small customizations)
* [Updating your instance](Updating-your-instance)
* [Backup your instance](backup.html)
* [Configuration tips](general-tips-for-customizing-pleroma-fe.html)
* [Hardening your instance](hardening.html)
* [How to activate mediaproxy](howto_mediaproxy.html)
* [Small Pleroma-FE customizations](small_customizations.html)
* [Updating your instance](updating.html)
## Questions

View file

@ -180,9 +180,13 @@ mix set_moderator username [true|false]
#### コンフィギュレーションとカスタマイズ
* [Configuration tips](General tips for customizing pleroma fe)
* [Small Pleroma-FE customizations](Small customizations)
* [Admin tasks](Admin tasks)
* [Backup your instance](backup.html)
* [Configuration tips](general-tips-for-customizing-pleroma-fe.html)
* [Hardening your instance](hardening.html)
* [How to activate mediaproxy](howto_mediaproxy.html)
* [Small Pleroma-FE customizations](small_customizations.html)
* [Updating your instance](updating.html)
## 質問ある?

View file

@ -284,12 +284,12 @@ If you opted to allow sudo for the `pleroma` user but would like to remove the a
#### Further reading
* [Admin tasks](Admin tasks)
* [Backup your instance](Backup-your-instance)
* [Configuration tips](General tips for customizing pleroma fe)
* [Hardening your instance](Hardening-your-instance)
* [How to activate mediaproxy](How-to-activate-mediaproxy)
* [Small Pleroma-FE customizations](Small customizations)
* [Updating your instance](Updating-your-instance)
* [Backup your instance](backup.html)
* [Configuration tips](general-tips-for-customizing-pleroma-fe.html)
* [Hardening your instance](hardening.html)
* [How to activate mediaproxy](howto_mediaproxy.html)
* [Small Pleroma-FE customizations](small_customizations.html)
* [Updating your instance](updating.html)
## Questions

View file

@ -0,0 +1,153 @@
# Switching a from-source install to OTP releases
## What are OTP releases?
OTP releases are as close as you can get to binary releases with Erlang/Elixir. The release is self-contained, and provides everything needed to boot it, it is easily administered via the provided shell script to open up a remote console, start/stop/restart the release, start in the background, send remote commands, and more.
### Can I still run the develop branch if I decide to use them?
Yes, we produce builds for every commit in `develop`. However `develop` is considered unstable, please don't use it in production because of faster access to new features, unless you need them as an app developer.
## Why would one want to switch?
Benefits of OTP releases over from-source installs include:
* **Less space used.** OTP releases come without source code, build tools, have docs and debug symbols stripped from the compiled bytecode and do not cointain tests, docs, revision history.
* **Minimal system dependencies.** Excluding the database and reverse proxy, only `curl`, `unzip` and `ncurses` are needed to download and run the release. Because Erlang runtime and Elixir are shipped with Pleroma, one can use the latest BEAM optimizations and Pleroma features, without having to worry about outdated system repos or a missing `erlang-*` package.
* **Potentially less bugs and better performance.** This extends on the previous point, because we have control over exactly what gets shipped, we can tweak the VM arguments and forget about weird bugs due to Erlang/Elixir version mismatches.
* **Faster and less bug-prone mix tasks.** On a from-source install one has to wait untill a new Pleroma node is started for each mix task and they execute outside of the instance context (for example if a user was deleted via a mix task, the instance will have no knowledge of that and continue to display status count and follows before the cache expires). Mix tasks in OTP releases are executed by calling into a running instance via RPC, which solves both of these problems.
### Sounds great, how do I switch?
Currently we support Linux machines with GNU (e.g. Debian, Ubuntu) or musl (e.g. Alpine) libc and `x86_64`, `aarch64` or `armv7l` CPUs. If you are unsure, check the [Detecting flavour](otp_en.html#detecting-flavour) section in OTP install guide. If your platform is supported, proceed with the guide, if not check the [My platform is not supported](#my-platform-is-not-supported) section.
### I don't think it is worth the effort, can I stay on a from-source install?
Yes, currently there are no plans to deprecate them.
### My platform is not supported
If you think your platform is a popular choice for running Pleroma instances, or has the potential to become one, you can [file an issue on our Gitlab](https://git.pleroma.social/pleroma/pleroma/issues/new). If not, guides on how to build and update releases by yourself will be available soon.
## Pre-requisites
You will be running commands as root. If you aren't root already, please elevate your priviledges by executing `sudo su`/`su`.
The system needs to have `curl` and `unzip` installed for downloading and unpacking release builds.
Debian/Ubuntu:
```sh
apt install curl unzip
```
Alpine:
```
apk add curl unzip
```
## Moving content out of the application directory
When using OTP releases the application directory changes with every version so it would be a bother to keep content there (and also dangerous unless `--no-rm` option is used when updating). Fortunately almost all paths in Pleroma are configurable, so it is possible to move them out of there.
Pleroma should be stopped before proceeding.
### Moving uploads/custom public files directory
```sh
# Create uploads directory and set proper permissions (skip if using a remote uploader)
# Note: It does not have to be `/var/lib/pleroma/uploads`, you can configure it to be something else later
mkdir -p /var/lib/pleroma/uploads
chown -R pleroma /var/lib/pleroma
# Create custom public files directory
# Note: It does not have to be `/var/lib/pleroma/static`, you can configure it to be something else later
mkdir -p /var/lib/pleroma/static
chown -R pleroma /var/lib/pleroma
# If you use the local uploader with default settings your uploads should be located in `~pleroma/uploads`
mv ~pleroma/uploads/* /var/lib/pleroma/uploads
# If you have created the custom public files directory with default settings it should be located in `~pleroma/instance/static`
mv ~pleroma/instance/static /var/lib/pleroma/static
```
### Moving emoji
Assuming you have all emojis in subdirectories of `priv/static/emoji` moving them can be done with
```sh
mkdir /var/lib/pleroma/static/emoji
ls -d ~pleroma/priv/static/emoji/*/ | xargs -i sh -c 'mv "{}" "/var/lib/pleroma/static/emoji/$(basename {})"'
```
But, if for some reason you have custom emojis in the root directory you should copy the whole directory instead.
```sh
mv ~pleroma/priv/static/emoji /var/lib/pleroma/static/emoji
```
and then copy custom emojis to `/var/lib/pleroma/static/emoji/custom`.
This is needed because storing custom emojis in the root directory is deprecated, but if you just move them to `/var/lib/pleroma/static/emoji/custom` it will break emoji urls on old posts.
Note that globs have been replaced with `pack_extensions`, so if your emojis are not in png/gif you should [modify the default value](config.html#emoji).
### Moving the config
```sh
# Create the config directory
# The default path for Pleroma config is /etc/pleroma/config.exs
# but it can be set via PLEROMA_CONFIG_PATH environment variable
mkdir -p /etc/pleroma
# Move the config file
mv ~pleroma/config/prod.secret.exs /etc/pleroma/config.exs
# Change `use Mix.Config` at the top to `import Config`
$EDITOR /etc/pleroma/config.exs
```
## Installing the release
Before proceeding, get the flavour from [Detecting flavour](otp_en.html#detecting-flavour) section in OTP installation guide.
```sh
# Delete all files in pleroma user's directory
rm -r ~pleroma/*
# Set the flavour environment variable to the string you got in Detecting flavour section.
# For example if the flavour is `arm64-musl` the command will be
export FLAVOUR="arm64-musl"
# Clone the release build into a temporary directory and unpack it
# Replace `master` with `develop` if you want to run the develop branch
su pleroma -s $SHELL -lc "
curl 'https://git.pleroma.social/api/v4/projects/2/jobs/artifacts/master/download?job=$FLAVOUR' -o /tmp/pleroma.zip
unzip /tmp/pleroma.zip -d /tmp/
"
# Move the release to the home directory and delete temporary files
su pleroma -s $SHELL -lc "
mv /tmp/release/* ~pleroma/
rmdir /tmp/release
rm /tmp/pleroma.zip
"
# Start the instance to verify that everything is working as expected
su pleroma -s $SHELL -lc "./bin/pleroma daemon"
# Wait for about 20 seconds and query the instance endpoint, if it shows your uri, name and email correctly, you are configured correctly
sleep 20 && curl http://localhost:4000/api/v1/instance
# Stop the instance
su pleroma -s $SHELL -lc "./bin/pleroma stop"
```
## Setting up a system service
OTP releases have different service files than from-source installs so they need to be copied over again.
**Warning:** The service files assume pleroma user's home directory is `/opt/pleroma`, please make sure all paths fit your installation.
Debian/Ubuntu:
```sh
# Copy the service into a proper directory
cp ~pleroma/installation/pleroma.service /etc/systemd/system/pleroma.service
# Reload service files
systemctl daemon-reload
# Reenable pleroma to start on boot
systemctl reenable pleroma
# Start pleroma
systemctl start pleroma
```
Alpine:
```sh
# Copy the service into a proper directory
cp -f ~pleroma/installation/init.d/pleroma /etc/init.d/pleroma
# Start pleroma
rc-service pleroma start
```
## Running mix tasks
Refer to [Running mix tasks](otp_en.html#running-mix-tasks) section from OTP release installation guide.
## Updating
Refer to [Updating](otp_en.html#updating) section from OTP release installation guide.

261
docs/installation/otp_en.md Normal file
View file

@ -0,0 +1,261 @@
# Installing on Linux using OTP releases
## Pre-requisites
* A machine running Linux with GNU (e.g. Debian, Ubuntu) or musl (e.g. Alpine) libc and `x86_64`, `aarch64` or `armv7l` CPU, you have root access to. If you are not sure if it's compatible see [Detecting flavour section](#detecting-flavour) below
* A (sub)domain pointed to the machine
You will be running commands as root. If you aren't root already, please elevate your priviledges by executing `sudo su`/`su`.
While in theory OTP releases are possbile to install on any compatible machine, for the sake of simplicity this guide focuses only on Debian/Ubuntu/Alpine.
### Detecting flavour
Paste the following into the shell:
```sh
arch="$(uname -m)";if [ "$arch" = "x86_64" ];then arch="amd64";elif [ "$arch" = "armv7l" ];then arch="arm";elif [ "$arch" = "aarch64" ];then arch="arm64";else echo "Unsupported arch: $arch">&2;fi;if getconf GNU_LIBC_VERSION>/dev/null;then libc_postfix="";elif [ "$(ldd 2>&1|head -c 9)" = "musl libc" ];then libc_postfix="-musl";elif [ "$(find /lib/libc.musl*|wc -l)" ];then libc_postfix="-musl";else echo "Unsupported libc">&2;fi;echo "$arch$libc_postfix"
```
If your platform is supported the output will contain the flavour string, you will need it later. If not, this just means that we don't build releases for your platform, you can still try installing from source.
### Installing the required packages
Other than things bundled in the OTP release Pleroma depends on:
* curl (to download the release build)
* unzip (needed to unpack release builds)
* ncurses (ERTS won't run without it)
* PostgreSQL (also utilizes extensions in postgresql-contrib)
* nginx (could be swapped with another reverse proxy but this guide covers only it)
* certbot (for Let's Encrypt certificates, could be swapped with another ACME client, but this guide covers only it)
Debian/Ubuntu:
```sh
apt install curl unzip libncurses5 postgresql postgresql-contrib nginx certbot
```
Alpine:
```sh
echo "http://nl.alpinelinux.org/alpine/latest-stable/community" >> /etc/apk/repositories
apk update
apk add curl unzip ncurses postgresql postgresql-contrib nginx certbot
```
## Setup
### Configuring PostgreSQL
#### (Optional) Installing RUM indexes
RUM indexes are an alternative indexing scheme that is not included in PostgreSQL by default. You can read more about them on the [Configuration page](config.html#rum-indexing-for-full-text-search). They are completely optional and most of the time are not worth it, especially if you are running a single user instance (unless you absolutely need ordered search results).
Debian/Ubuntu (available only on Buster/19.04):
```sh
apt install postgresql-11-rum
```
Alpine:
```sh
apk add git build-base postgresql-dev
git clone https://github.com/postgrespro/rum /tmp/rum
cd /tmp/rum
make USE_PGXS=1
make USE_PGXS=1 install
cd
rm -r /tmp/rum
```
#### (Optional) Performance configuration
For optimal performance, you may use [PGTune](https://pgtune.leopard.in.ua), don't forget to restart postgresql after editing the configuration
Debian/Ubuntu:
```sh
systemctl restart postgresql
```
Alpine:
```sh
rc-service postgresql restart
```
### Installing Pleroma
```sh
# Create the Pleroma user
adduser --system --shell /bin/false --home /opt/pleroma pleroma
# Set the flavour environment variable to the string you got in Detecting flavour section.
# For example if the flavour is `arm64-musl` the command will be
export FLAVOUR="arm64-musl"
# Clone the release build into a temporary directory and unpack it
su pleroma -s $SHELL -lc "
curl 'https://git.pleroma.social/api/v4/projects/2/jobs/artifacts/master/download?job=$FLAVOUR' -o /tmp/pleroma.zip
unzip /tmp/pleroma.zip -d /tmp/
"
# Move the release to the home directory and delete temporary files
su pleroma -s $SHELL -lc "
mv /tmp/release/* /opt/pleroma
rmdir /tmp/release
rm /tmp/pleroma.zip
"
# Create uploads directory and set proper permissions (skip if planning to use a remote uploader)
# Note: It does not have to be `/var/lib/pleroma/uploads`, the config generator will ask about the upload directory later
mkdir -p /var/lib/pleroma/uploads
chown -R pleroma /var/lib/pleroma
# Create custom public files directory (custom emojis, frontend bundle overrides, robots.txt, etc.)
# Note: It does not have to be `/var/lib/pleroma/static`, the config generator will ask about the custom public files directory later
mkdir -p /var/lib/pleroma/static
chown -R pleroma /var/lib/pleroma
# Create a config directory
mkdir -p /etc/pleroma
chown -R pleroma /etc/pleroma
# Run the config generator
su pleroma -s $SHELL -lc "./bin/pleroma_ctl instance gen --output /etc/pleroma/config.exs --output-psql /tmp/setup_db.psql"
# Create the postgres database
su postgres -s $SHELL -lc "psql -f /tmp/setup_db.psql"
# Create the database schema
su pleroma -s $SHELL -lc "./bin/pleroma_ctl migrate"
# If you have installed RUM indexes uncommend and run
# su pleroma -s $SHELL -lc "./bin/pleroma_ctl migrate --migrations-path priv/repo/optional_migrations/rum_indexing/"
# Start the instance to verify that everything is working as expected
su pleroma -s $SHELL -lc "./bin/pleroma daemon"
# Wait for about 20 seconds and query the instance endpoint, if it shows your uri, name and email correctly, you are configured correctly
sleep 20 && curl http://localhost:4000/api/v1/instance
# Stop the instance
su pleroma -s $SHELL -lc "./bin/pleroma stop"
```
### Setting up nginx and getting Let's Encrypt SSL certificaties
```sh
# Get a Let's Encrypt certificate
certbot certonly --standalone --preferred-challenges http -d yourinstance.tld
# Copy the Pleroma nginx configuration to the nginx folder
# The location of nginx configs is dependent on the distro
# For Debian/Ubuntu:
cp /opt/pleroma/installation/pleroma.nginx /etc/nginx/sites-available/pleroma.nginx
ln -s /etc/nginx/sites-available/pleroma.nginx /etc/nginx/sites-enabled/pleroma.nginx
# For Alpine:
cp /opt/pleroma/installation/pleroma.nginx /etc/nginx/conf.d/pleroma.conf
# If your distro does not have either of those you can append
# `include /etc/nginx/pleroma.conf` to the end of the http section in /etc/nginx/nginx.conf and
cp /opt/pleroma/installation/pleroma.nginx /etc/nginx/pleroma.conf
# Edit the nginx config replacing example.tld with your (sub)domain
$EDITOR path-to-nginx-config
# Verify that the config is valid
nginx -t
# Start nginx
# For Debian/Ubuntu:
systemctl start nginx
# For Alpine:
rc-service nginx start
```
At this point if you open your (sub)domain in a browser you should see a 502 error, that's because pleroma is not started yet.
### Setting up a system service
Debian/Ubuntu:
```sh
# Copy the service into a proper directory
cp /opt/pleroma/installation/pleroma.service /etc/systemd/system/pleroma.service
# Start pleroma and enable it on boot
systemctl start pleroma
systemctl enable pleroma
```
Alpine:
```sh
# Copy the service into a proper directory
cp /opt/pleroma/installation/init.d/pleroma /etc/init.d/pleroma
# Start pleroma and enable it on boot
rc-service pleroma start
rc-update add pleroma
```
If everything worked, you should see Pleroma-FE when visiting your domain. If that didn't happen, try reviewing the installation steps, starting Pleroma in the foreground and seeing if there are any errrors.
Still doesn't work? Feel free to contact us on [#pleroma on freenode](https://webchat.freenode.net/?channels=%23pleroma) or via matrix at <https://matrix.heldscal.la/#/room/#freenode_#pleroma:matrix.org>, you can also [file an issue on our Gitlab](https://git.pleroma.social/pleroma/pleroma/issues/new)
## Post installation
### Setting up auto-renew Let's Encrypt certificate
```sh
# Create the directory for webroot challenges
mkdir -p /var/lib/letsencrypt
# Uncomment the webroot method
$EDITOR path-to-nginx-config
# Verify that the config is valid
nginx -t
```
Debian/Ubuntu:
```sh
# Restart nginx
systemctl restart nginx
# Ensure the webroot menthod and post hook is working
certbot renew --cert-name yourinstance.tld --webroot -w /var/lib/letsencrypt/ --dry-run --post-hook 'systemctl nginx reload'
# Add it to the daily cron
echo '#!/bin/sh
certbot renew --cert-name yourinstance.tld --webroot -w /var/lib/letsencrypt/ --post-hook "systemctl reload nginx"
' > /etc/cron.daily/renew-pleroma-cert
chmod +x /etc/cron.daily/renew-pleroma-cert
# If everything worked the output should contain /etc/cron.daily/renew-pleroma-cert
run-parts --test /etc/cron.daily
```
Alpine:
```sh
# Restart nginx
rc-service nginx restart
# Start the cron daemon and make it start on boot
rc-service crond start
rc-update add crond
# Ensure the webroot menthod and post hook is working
certbot renew --cert-name yourinstance.tld --webroot -w /var/lib/letsencrypt/ --dry-run --post-hook 'rc-service nginx reload'
# Add it to the daily cron
echo '#!/bin/sh
certbot renew --cert-name yourinstance.tld --webroot -w /var/lib/letsencrypt/ --post-hook "rc-service nginx reload"
' > /etc/periodic/daily/renew-pleroma-cert
chmod +x /etc/periodic/daily/renew-pleroma-cert
# If everything worked this should output /etc/periodic/daily/renew-pleroma-cert
run-parts --test /etc/periodic/daily
```
### Running mix tasks
Throughout the wiki and guides there is a lot of references to mix tasks. Since `mix` is a build tool, you can't just call `mix pleroma.task`, instead you should call `pleroma_ctl` stripping pleroma/ecto namespace.
So for example, if the task is `mix pleroma.user set admin --admin`, you should run it like this:
```sh
su pleroma -s $SHELL -lc "./bin/pleroma_ctl user set admin --admin"
```
### Updating
Generally, doing the following is enough:
```sh
# Download the new release
su pleroma -s $SHELL -lc "./bin/pleroma_ctl update"
# Migrate the database, you are advised to stop the instance before doing that
su pleroma -s $SHELL -lc "./bin/pleroma_ctl migrate"
```
But you should **always check the release notes/changelog** in case there are config deprecations, special update steps, etc.
## Further reading
* [Configuration](config.html)
* [Pleroma's base config.exs](https://git.pleroma.social/pleroma/pleroma/blob/master/config/config.exs)
* [Hardening your instance](hardening.html)
* [Pleroma Clients](clients.html)
* [Emoji pack manager](Mix.Tasks.Pleroma.Emoji.html)

View file

@ -14,17 +14,19 @@ server {
listen 80;
listen [::]:80;
return 301 https://$server_name$request_uri;
# Uncomment this if you need to use the 'webroot' method with certbot. Make sure
# that the directory exists and that it is accessible by the webserver. If you followed
# the guide, you already ran 'sudo mkdir -p /var/lib/letsencrypt' to create the folder.
# the guide, you already ran 'mkdir -p /var/lib/letsencrypt' to create the folder.
# You may need to load this file with the ssl server block commented out, run certbot
# to get the certificate, and then uncomment it.
#
# location ~ /\.well-known/acme-challenge {
# root /var/lib/letsencrypt/.well-known/acme-challenge;
# root /var/lib/letsencrypt/;
# }
location / {
return 301 https://$server_name$request_uri;
}
}
# Enable SSL session caching for improved performance

View file

@ -1,3 +1,7 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Healthcheck do
@moduledoc """
Module collects metrics about app and assign healthy status.

View file

@ -1,3 +1,7 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
Postgrex.Types.define(
Pleroma.PostgresTypes,
[] ++ Ecto.Adapters.Postgres.extensions(),

67
lib/mix/pleroma.ex Normal file
View file

@ -0,0 +1,67 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2018 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Mix.Pleroma do
@doc "Common functions to be reused in mix tasks"
def start_pleroma do
Application.put_env(:phoenix, :serve_endpoints, false, persistent: true)
{:ok, _} = Application.ensure_all_started(:pleroma)
end
def load_pleroma do
Application.load(:pleroma)
end
def get_option(options, opt, prompt, defval \\ nil, defname \\ nil) do
Keyword.get(options, opt) || shell_prompt(prompt, defval, defname)
end
def shell_prompt(prompt, defval \\ nil, defname \\ nil) do
prompt_message = "#{prompt} [#{defname || defval}] "
input =
if mix_shell?(),
do: Mix.shell().prompt(prompt_message),
else: :io.get_line(prompt_message)
case input do
"\n" ->
case defval do
nil ->
shell_prompt(prompt, defval, defname)
defval ->
defval
end
input ->
String.trim(input)
end
end
def shell_yes?(message) do
if mix_shell?(),
do: Mix.shell().yes?("Continue?"),
else: shell_prompt(message, "Continue?") in ~w(Yn Y y)
end
def shell_info(message) do
if mix_shell?(),
do: Mix.shell().info(message),
else: IO.puts(message)
end
def shell_error(message) do
if mix_shell?(),
do: Mix.shell().error(message),
else: IO.puts(:stderr, message)
end
@doc "Performs a safe check whether `Mix.shell/0` is available (does not raise if Mix is not loaded)"
def mix_shell?, do: :erlang.function_exported(Mix, :shell, 0)
def escape_sh_path(path) do
~S(') <> String.replace(path, ~S('), ~S(\')) <> ~S(')
end
end

View file

@ -1,19 +1,23 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Mix.Tasks.Pleroma.Benchmark do
import Mix.Pleroma
use Mix.Task
alias Mix.Tasks.Pleroma.Common
def run(["search"]) do
Common.start_pleroma()
start_pleroma()
Benchee.run(%{
"search" => fn ->
Pleroma.Web.MastodonAPI.MastodonAPIController.status_search(nil, "cofe")
Pleroma.Activity.search(nil, "cofe")
end
})
end
def run(["tag"]) do
Common.start_pleroma()
start_pleroma()
Benchee.run(%{
"tag" => fn ->

View file

@ -1,28 +0,0 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2018 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Mix.Tasks.Pleroma.Common do
@doc "Common functions to be reused in mix tasks"
def start_pleroma do
Mix.Task.run("app.start")
end
def get_option(options, opt, prompt, defval \\ nil, defname \\ nil) do
Keyword.get(options, opt) ||
case Mix.shell().prompt("#{prompt} [#{defname || defval}]") do
"\n" ->
case defval do
nil -> get_option(options, opt, prompt, defval)
defval -> defval
end
opt ->
opt |> String.trim()
end
end
def escape_sh_path(path) do
~S(') <> String.replace(path, ~S('), ~S(\')) <> ~S(')
end
end

View file

@ -0,0 +1,83 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Mix.Tasks.Pleroma.Config do
use Mix.Task
import Mix.Pleroma
alias Pleroma.Repo
alias Pleroma.Web.AdminAPI.Config
@shortdoc "Manages the location of the config"
@moduledoc """
Manages the location of the config.
## Transfers config from file to DB.
mix pleroma.config migrate_to_db
## Transfers config from DB to file.
mix pleroma.config migrate_from_db ENV
"""
def run(["migrate_to_db"]) do
start_pleroma()
if Pleroma.Config.get([:instance, :dynamic_configuration]) do
Application.get_all_env(:pleroma)
|> Enum.reject(fn {k, _v} -> k in [Pleroma.Repo, :env] end)
|> Enum.each(fn {k, v} ->
key = to_string(k) |> String.replace("Elixir.", "")
{:ok, _} = Config.update_or_create(%{group: "pleroma", key: key, value: v})
Mix.shell().info("#{key} is migrated.")
end)
Mix.shell().info("Settings migrated.")
else
Mix.shell().info(
"Migration is not allowed by config. You can change this behavior in instance settings."
)
end
end
def run(["migrate_from_db", env, delete?]) do
start_pleroma()
delete? = if delete? == "true", do: true, else: false
if Pleroma.Config.get([:instance, :dynamic_configuration]) do
config_path = "config/#{env}.exported_from_db.secret.exs"
{:ok, file} = File.open(config_path, [:write])
IO.write(file, "use Mix.Config\r\n")
Repo.all(Config)
|> Enum.each(fn config ->
mark =
if String.starts_with?(config.key, "Pleroma.") or
String.starts_with?(config.key, "Ueberauth"),
do: ",",
else: ":"
IO.write(
file,
"config :#{config.group}, #{config.key}#{mark} #{
inspect(Config.from_binary(config.value))
}\r\n"
)
if delete? do
{:ok, _} = Repo.delete(config)
Mix.shell().info("#{config.key} deleted from DB.")
end
end)
File.close(file)
System.cmd("mix", ["format", config_path])
else
Mix.shell().info(
"Migration is not allowed by config. You can change this behavior in instance settings."
)
end
end
end

View file

@ -3,12 +3,12 @@
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Mix.Tasks.Pleroma.Database do
alias Mix.Tasks.Pleroma.Common
alias Pleroma.Conversation
alias Pleroma.Object
alias Pleroma.Repo
alias Pleroma.User
require Logger
import Mix.Pleroma
use Mix.Task
@shortdoc "A collection of database related tasks"
@ -45,7 +45,7 @@ def run(["remove_embedded_objects" | args]) do
]
)
Common.start_pleroma()
start_pleroma()
Logger.info("Removing embedded objects")
Repo.query!(
@ -66,12 +66,12 @@ def run(["remove_embedded_objects" | args]) do
end
def run(["bump_all_conversations"]) do
Common.start_pleroma()
start_pleroma()
Conversation.bump_for_all_activities()
end
def run(["update_users_following_followers_counts"]) do
Common.start_pleroma()
start_pleroma()
users = Repo.all(User)
Enum.each(users, &User.remove_duplicated_following/1)
@ -89,7 +89,7 @@ def run(["prune_objects" | args]) do
]
)
Common.start_pleroma()
start_pleroma()
deadline = Pleroma.Config.get([:instance, :remote_post_retention_days])

View file

@ -0,0 +1,50 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2018 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-onl
defmodule Mix.Tasks.Pleroma.Ecto do
@doc """
Ensures the given repository's migrations path exists on the file system.
"""
@spec ensure_migrations_path(Ecto.Repo.t(), Keyword.t()) :: String.t()
def ensure_migrations_path(repo, opts) do
path = opts[:migrations_path] || Path.join(source_repo_priv(repo), "migrations")
path =
case Path.type(path) do
:relative ->
Path.join(Application.app_dir(:pleroma), path)
:absolute ->
path
end
if not File.dir?(path) do
raise_missing_migrations(Path.relative_to_cwd(path), repo)
end
path
end
@doc """
Returns the private repository path relative to the source.
"""
def source_repo_priv(repo) do
config = repo.config()
priv = config[:priv] || "priv/#{repo |> Module.split() |> List.last() |> Macro.underscore()}"
Path.join(Application.app_dir(:pleroma), priv)
end
defp raise_missing_migrations(path, repo) do
raise("""
Could not find migrations directory #{inspect(path)}
for repo #{inspect(repo)}.
This may be because you are in a new project and the
migration directory has not been created yet. Creating an
empty directory at the path above will fix this error.
If you expected existing migrations to be found, please
make sure your repository has been properly configured
and the configured path exists.
""")
end
end

View file

@ -0,0 +1,63 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2018 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-onl
defmodule Mix.Tasks.Pleroma.Ecto.Migrate do
use Mix.Task
import Mix.Pleroma
require Logger
@shortdoc "Wrapper on `ecto.migrate` task."
@aliases [
n: :step,
v: :to
]
@switches [
all: :boolean,
step: :integer,
to: :integer,
quiet: :boolean,
log_sql: :boolean,
strict_version_order: :boolean,
migrations_path: :string
]
@moduledoc """
Changes `Logger` level to `:info` before start migration.
Changes level back when migration ends.
## Start migration
mix pleroma.ecto.migrate [OPTIONS]
Options:
- see https://hexdocs.pm/ecto/2.0.0/Mix.Tasks.Ecto.Migrate.html
"""
@impl true
def run(args \\ []) do
load_pleroma()
{opts, _} = OptionParser.parse!(args, strict: @switches, aliases: @aliases)
opts =
if opts[:to] || opts[:step] || opts[:all],
do: opts,
else: Keyword.put(opts, :all, true)
opts =
if opts[:quiet],
do: Keyword.merge(opts, log: false, log_sql: false),
else: opts
path = Mix.Tasks.Pleroma.Ecto.ensure_migrations_path(Pleroma.Repo, opts)
level = Logger.level()
Logger.configure(level: :info)
{:ok, _, _} = Ecto.Migrator.with_repo(Pleroma.Repo, &Ecto.Migrator.run(&1, path, :up, opts))
Logger.configure(level: level)
end
end

View file

@ -0,0 +1,67 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2018 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-onl
defmodule Mix.Tasks.Pleroma.Ecto.Rollback do
use Mix.Task
import Mix.Pleroma
require Logger
@shortdoc "Wrapper on `ecto.rollback` task"
@aliases [
n: :step,
v: :to
]
@switches [
all: :boolean,
step: :integer,
to: :integer,
start: :boolean,
quiet: :boolean,
log_sql: :boolean,
migrations_path: :string
]
@moduledoc """
Changes `Logger` level to `:info` before start rollback.
Changes level back when rollback ends.
## Start rollback
mix pleroma.ecto.rollback
Options:
- see https://hexdocs.pm/ecto/2.0.0/Mix.Tasks.Ecto.Rollback.html
"""
@impl true
def run(args \\ []) do
load_pleroma()
{opts, _} = OptionParser.parse!(args, strict: @switches, aliases: @aliases)
opts =
if opts[:to] || opts[:step] || opts[:all],
do: opts,
else: Keyword.put(opts, :step, 1)
opts =
if opts[:quiet],
do: Keyword.merge(opts, log: false, log_sql: false),
else: opts
path = Mix.Tasks.Pleroma.Ecto.ensure_migrations_path(Pleroma.Repo, opts)
level = Logger.level()
Logger.configure(level: :info)
if Pleroma.Config.get(:env) == :test do
Logger.info("Rollback succesfully")
else
{:ok, _, _} =
Ecto.Migrator.with_repo(Pleroma.Repo, &Ecto.Migrator.run(&1, path, :down, opts))
end
Logger.configure(level: level)
end
end

View file

@ -55,15 +55,13 @@ defmodule Mix.Tasks.Pleroma.Emoji do
are extracted).
"""
@default_manifest Pleroma.Config.get!([:emoji, :default_manifest])
def run(["ls-packs" | args]) do
Application.ensure_all_started(:hackney)
{options, [], []} = parse_global_opts(args)
manifest =
fetch_manifest(if options[:manifest], do: options[:manifest], else: @default_manifest)
fetch_manifest(if options[:manifest], do: options[:manifest], else: default_manifest())
Enum.each(manifest, fn {name, info} ->
to_print = [
@ -88,7 +86,7 @@ def run(["get-packs" | args]) do
{options, pack_names, []} = parse_global_opts(args)
manifest_url = if options[:manifest], do: options[:manifest], else: @default_manifest
manifest_url = if options[:manifest], do: options[:manifest], else: default_manifest()
manifest = fetch_manifest(manifest_url)
@ -298,4 +296,6 @@ defp client do
Tesla.client(middleware)
end
defp default_manifest, do: Pleroma.Config.get!([:emoji, :default_manifest])
end

View file

@ -4,7 +4,7 @@
defmodule Mix.Tasks.Pleroma.Instance do
use Mix.Task
alias Mix.Tasks.Pleroma.Common
import Mix.Pleroma
@shortdoc "Manages Pleroma instance"
@moduledoc """
@ -29,7 +29,13 @@ defmodule Mix.Tasks.Pleroma.Instance do
- `--dbname DBNAME` - the name of the database to use
- `--dbuser DBUSER` - the user (aka role) to use for the database connection
- `--dbpass DBPASS` - the password to use for the database connection
- `--rum Y/N` - Whether to enable RUM indexes
- `--indexable Y/N` - Allow/disallow indexing site by search engines
- `--db-configurable Y/N` - Allow/disallow configuring instance from admin part
- `--uploads-dir` - the directory uploads go in when using a local uploader
- `--static-dir` - the directory custom public files should be read from (custom emojis, frontend bundle overrides, robots.txt, etc.)
- `--listen-ip` - the ip the app should listen to, defaults to 127.0.0.1
- `--listen-port` - the port the app should listen to, defaults to 4000
"""
def run(["gen" | rest]) do
@ -48,7 +54,13 @@ def run(["gen" | rest]) do
dbname: :string,
dbuser: :string,
dbpass: :string,
indexable: :string
rum: :string,
indexable: :string,
db_configurable: :string,
uploads_dir: :string,
static_dir: :string,
listen_ip: :string,
listen_port: :string
],
aliases: [
o: :output,
@ -68,7 +80,7 @@ def run(["gen" | rest]) do
if proceed? do
[domain, port | _] =
String.split(
Common.get_option(
get_option(
options,
:domain,
"What domain will your instance use? (e.g pleroma.soykaf.com)"
@ -77,16 +89,16 @@ def run(["gen" | rest]) do
) ++ [443]
name =
Common.get_option(
get_option(
options,
:instance_name,
"What is the name of your instance? (e.g. Pleroma/Soykaf)"
)
email = Common.get_option(options, :admin_email, "What is your admin email address?")
email = get_option(options, :admin_email, "What is your admin email address?")
notify_email =
Common.get_option(
get_option(
options,
:notify_email,
"What email address do you want to use for sending email notifications?",
@ -94,21 +106,27 @@ def run(["gen" | rest]) do
)
indexable =
Common.get_option(
get_option(
options,
:indexable,
"Do you want search engines to index your site? (y/n)",
"y"
) === "y"
dbhost =
Common.get_option(options, :dbhost, "What is the hostname of your database?", "localhost")
db_configurable? =
get_option(
options,
:db_configurable,
"Do you want to store the configuration in the database (allows controlling it from admin-fe)? (y/n)",
"n"
) === "y"
dbname =
Common.get_option(options, :dbname, "What is the name of your database?", "pleroma_dev")
dbhost = get_option(options, :dbhost, "What is the hostname of your database?", "localhost")
dbname = get_option(options, :dbname, "What is the name of your database?", "pleroma")
dbuser =
Common.get_option(
get_option(
options,
:dbuser,
"What is the user used to connect to your database?",
@ -116,7 +134,7 @@ def run(["gen" | rest]) do
)
dbpass =
Common.get_option(
get_option(
options,
:dbpass,
"What is the password used to connect to your database?",
@ -124,13 +142,54 @@ def run(["gen" | rest]) do
"autogenerated"
)
rum_enabled =
get_option(
options,
:rum,
"Would you like to use RUM indices?",
"n"
) === "y"
listen_port =
get_option(
options,
:listen_port,
"What port will the app listen to (leave it if you are using the default setup with nginx)?",
4000
)
listen_ip =
get_option(
options,
:listen_ip,
"What ip will the app listen to (leave it if you are using the default setup with nginx)?",
"127.0.0.1"
)
uploads_dir =
get_option(
options,
:uploads_dir,
"What directory should media uploads go in (when using the local uploader)?",
Pleroma.Config.get([Pleroma.Uploaders.Local, :uploads])
)
static_dir =
get_option(
options,
:static_dir,
"What directory should custom public files be read from (custom emojis, frontend bundle overrides, robots.txt, etc.)?",
Pleroma.Config.get([:instance, :static_dir])
)
secret = :crypto.strong_rand_bytes(64) |> Base.encode64() |> binary_part(0, 64)
signing_salt = :crypto.strong_rand_bytes(8) |> Base.encode64() |> binary_part(0, 8)
{web_push_public_key, web_push_private_key} = :crypto.generate_key(:ecdh, :prime256v1)
template_dir = Application.app_dir(:pleroma, "priv") <> "/templates"
result_config =
EEx.eval_file(
"sample_config.eex" |> Path.expand(__DIR__),
template_dir <> "/sample_config.eex",
domain: domain,
port: port,
email: email,
@ -140,46 +199,40 @@ def run(["gen" | rest]) do
dbname: dbname,
dbuser: dbuser,
dbpass: dbpass,
version: Pleroma.Mixfile.project() |> Keyword.get(:version),
secret: secret,
signing_salt: signing_salt,
web_push_public_key: Base.url_encode64(web_push_public_key, padding: false),
web_push_private_key: Base.url_encode64(web_push_private_key, padding: false)
web_push_private_key: Base.url_encode64(web_push_private_key, padding: false),
db_configurable?: db_configurable?,
static_dir: static_dir,
uploads_dir: uploads_dir,
rum_enabled: rum_enabled,
listen_ip: listen_ip,
listen_port: listen_port
)
result_psql =
EEx.eval_file(
"sample_psql.eex" |> Path.expand(__DIR__),
template_dir <> "/sample_psql.eex",
dbname: dbname,
dbuser: dbuser,
dbpass: dbpass
dbpass: dbpass,
rum_enabled: rum_enabled
)
Mix.shell().info(
"Writing config to #{config_path}. You should rename it to config/prod.secret.exs or config/dev.secret.exs."
)
shell_info("Writing config to #{config_path}.")
File.write(config_path, result_config)
Mix.shell().info("Writing #{psql_path}.")
shell_info("Writing the postgres script to #{psql_path}.")
File.write(psql_path, result_psql)
write_robots_txt(indexable)
write_robots_txt(indexable, template_dir)
Mix.shell().info(
"\n" <>
"""
To get started:
1. Verify the contents of the generated files.
2. Run `sudo -u postgres psql -f #{Common.escape_sh_path(psql_path)}`.
""" <>
if config_path in ["config/dev.secret.exs", "config/prod.secret.exs"] do
""
else
"3. Run `mv #{Common.escape_sh_path(config_path)} 'config/prod.secret.exs'`."
end
shell_info(
"\n All files successfully written! Refer to the installation instructions for your platform for next steps"
)
else
Mix.shell().error(
shell_error(
"The task would have overwritten the following files:\n" <>
(Enum.map(paths, &"- #{&1}\n") |> Enum.join("")) <>
"Rerun with `--force` to overwrite them."
@ -187,10 +240,10 @@ def run(["gen" | rest]) do
end
end
defp write_robots_txt(indexable) do
defp write_robots_txt(indexable, template_dir) do
robots_txt =
EEx.eval_file(
Path.expand("robots_txt.eex", __DIR__),
template_dir <> "/robots_txt.eex",
indexable: indexable
)
@ -204,10 +257,10 @@ defp write_robots_txt(indexable) do
if File.exists?(robots_txt_path) do
File.cp!(robots_txt_path, "#{robots_txt_path}.bak")
Mix.shell().info("Backing up existing robots.txt to #{robots_txt_path}.bak")
shell_info("Backing up existing robots.txt to #{robots_txt_path}.bak")
end
File.write(robots_txt_path, robots_txt)
Mix.shell().info("Writing #{robots_txt_path}.")
shell_info("Writing #{robots_txt_path}.")
end
end

View file

@ -4,7 +4,7 @@
defmodule Mix.Tasks.Pleroma.Relay do
use Mix.Task
alias Mix.Tasks.Pleroma.Common
import Mix.Pleroma
alias Pleroma.Web.ActivityPub.Relay
@shortdoc "Manages remote relays"
@ -24,24 +24,24 @@ defmodule Mix.Tasks.Pleroma.Relay do
Example: ``mix pleroma.relay unfollow https://example.org/relay``
"""
def run(["follow", target]) do
Common.start_pleroma()
start_pleroma()
with {:ok, _activity} <- Relay.follow(target) do
# put this task to sleep to allow the genserver to push out the messages
:timer.sleep(500)
else
{:error, e} -> Mix.shell().error("Error while following #{target}: #{inspect(e)}")
{:error, e} -> shell_error("Error while following #{target}: #{inspect(e)}")
end
end
def run(["unfollow", target]) do
Common.start_pleroma()
start_pleroma()
with {:ok, _activity} <- Relay.unfollow(target) do
# put this task to sleep to allow the genserver to push out the messages
:timer.sleep(500)
else
{:error, e} -> Mix.shell().error("Error while following #{target}: #{inspect(e)}")
{:error, e} -> shell_error("Error while following #{target}: #{inspect(e)}")
end
end
end

View file

@ -4,7 +4,7 @@
defmodule Mix.Tasks.Pleroma.Uploads do
use Mix.Task
alias Mix.Tasks.Pleroma.Common
import Mix.Pleroma
alias Pleroma.Upload
alias Pleroma.Uploaders.Local
require Logger
@ -24,7 +24,7 @@ defmodule Mix.Tasks.Pleroma.Uploads do
"""
def run(["migrate_local", target_uploader | args]) do
delete? = Enum.member?(args, "--delete")
Common.start_pleroma()
start_pleroma()
local_path = Pleroma.Config.get!([Local, :uploads])
uploader = Module.concat(Pleroma.Uploaders, target_uploader)
@ -38,10 +38,10 @@ def run(["migrate_local", target_uploader | args]) do
Pleroma.Config.put([Upload, :uploader], uploader)
end
Mix.shell().info("Migrating files from local #{local_path} to #{to_string(uploader)}")
shell_info("Migrating files from local #{local_path} to #{to_string(uploader)}")
if delete? do
Mix.shell().info(
shell_info(
"Attention: uploaded files will be deleted, hope you have backups! (--delete ; cancel with ^C)"
)
@ -78,7 +78,7 @@ def run(["migrate_local", target_uploader | args]) do
|> Enum.filter(& &1)
total_count = length(uploads)
Mix.shell().info("Found #{total_count} uploads")
shell_info("Found #{total_count} uploads")
uploads
|> Task.async_stream(
@ -90,7 +90,7 @@ def run(["migrate_local", target_uploader | args]) do
:ok
error ->
Mix.shell().error("failed to upload #{inspect(upload.path)}: #{inspect(error)}")
shell_error("failed to upload #{inspect(upload.path)}: #{inspect(error)}")
end
end,
timeout: 150_000
@ -99,10 +99,10 @@ def run(["migrate_local", target_uploader | args]) do
# credo:disable-for-next-line Credo.Check.Warning.UnusedEnumOperation
|> Enum.reduce(0, fn done, count ->
count = count + length(done)
Mix.shell().info("Uploaded #{count}/#{total_count} files")
shell_info("Uploaded #{count}/#{total_count} files")
count
end)
Mix.shell().info("Done!")
shell_info("Done!")
end
end

View file

@ -5,9 +5,10 @@
defmodule Mix.Tasks.Pleroma.User do
use Mix.Task
import Ecto.Changeset
alias Mix.Tasks.Pleroma.Common
import Mix.Pleroma
alias Pleroma.User
alias Pleroma.UserInviteToken
alias Pleroma.Web.OAuth
@shortdoc "Manages Pleroma users"
@moduledoc """
@ -49,6 +50,10 @@ defmodule Mix.Tasks.Pleroma.User do
mix pleroma.user delete_activities NICKNAME
## Sign user out from all applications (delete user's OAuth tokens and authorizations).
mix pleroma.user sign_out NICKNAME
## Deactivate or activate the user's account.
mix pleroma.user toggle_activated NICKNAME
@ -115,7 +120,7 @@ def run(["new", nickname, email | rest]) do
admin? = Keyword.get(options, :admin, false)
assume_yes? = Keyword.get(options, :assume_yes, false)
Mix.shell().info("""
shell_info("""
A user will be created with the following information:
- nickname: #{nickname}
- email: #{email}
@ -128,10 +133,10 @@ def run(["new", nickname, email | rest]) do
- admin: #{if(admin?, do: "true", else: "false")}
""")
proceed? = assume_yes? or Mix.shell().yes?("Continue?")
proceed? = assume_yes? or shell_yes?("Continue?")
if proceed? do
Common.start_pleroma()
start_pleroma()
params = %{
nickname: nickname,
@ -145,7 +150,7 @@ def run(["new", nickname, email | rest]) do
changeset = User.register_changeset(%User{}, params, need_confirmation: false)
{:ok, _user} = User.register(changeset)
Mix.shell().info("User #{nickname} created")
shell_info("User #{nickname} created")
if moderator? do
run(["set", nickname, "--moderator"])
@ -159,64 +164,64 @@ def run(["new", nickname, email | rest]) do
run(["reset_password", nickname])
end
else
Mix.shell().info("User will not be created.")
shell_info("User will not be created.")
end
end
def run(["rm", nickname]) do
Common.start_pleroma()
start_pleroma()
with %User{local: true} = user <- User.get_cached_by_nickname(nickname) do
User.perform(:delete, user)
Mix.shell().info("User #{nickname} deleted.")
shell_info("User #{nickname} deleted.")
else
_ ->
Mix.shell().error("No local user #{nickname}")
shell_error("No local user #{nickname}")
end
end
def run(["toggle_activated", nickname]) do
Common.start_pleroma()
start_pleroma()
with %User{} = user <- User.get_cached_by_nickname(nickname) do
{:ok, user} = User.deactivate(user, !user.info.deactivated)
Mix.shell().info(
shell_info(
"Activation status of #{nickname}: #{if(user.info.deactivated, do: "de", else: "")}activated"
)
else
_ ->
Mix.shell().error("No user #{nickname}")
shell_error("No user #{nickname}")
end
end
def run(["reset_password", nickname]) do
Common.start_pleroma()
start_pleroma()
with %User{local: true} = user <- User.get_cached_by_nickname(nickname),
{:ok, token} <- Pleroma.PasswordResetToken.create_token(user) do
Mix.shell().info("Generated password reset token for #{user.nickname}")
shell_info("Generated password reset token for #{user.nickname}")
IO.puts(
"URL: #{
Pleroma.Web.Router.Helpers.util_url(
Pleroma.Web.Router.Helpers.reset_password_url(
Pleroma.Web.Endpoint,
:show_password_reset,
:reset,
token.token
)
}"
)
else
_ ->
Mix.shell().error("No local user #{nickname}")
shell_error("No local user #{nickname}")
end
end
def run(["unsubscribe", nickname]) do
Common.start_pleroma()
start_pleroma()
with %User{} = user <- User.get_cached_by_nickname(nickname) do
Mix.shell().info("Deactivating #{user.nickname}")
shell_info("Deactivating #{user.nickname}")
User.deactivate(user)
{:ok, friends} = User.get_friends(user)
@ -224,7 +229,7 @@ def run(["unsubscribe", nickname]) do
Enum.each(friends, fn friend ->
user = User.get_cached_by_id(user.id)
Mix.shell().info("Unsubscribing #{friend.nickname} from #{user.nickname}")
shell_info("Unsubscribing #{friend.nickname} from #{user.nickname}")
User.unfollow(user, friend)
end)
@ -233,16 +238,16 @@ def run(["unsubscribe", nickname]) do
user = User.get_cached_by_id(user.id)
if Enum.empty?(user.following) do
Mix.shell().info("Successfully unsubscribed all followers from #{user.nickname}")
shell_info("Successfully unsubscribed all followers from #{user.nickname}")
end
else
_ ->
Mix.shell().error("No user #{nickname}")
shell_error("No user #{nickname}")
end
end
def run(["set", nickname | rest]) do
Common.start_pleroma()
start_pleroma()
{options, [], []} =
OptionParser.parse(
@ -274,33 +279,33 @@ def run(["set", nickname | rest]) do
end
else
_ ->
Mix.shell().error("No local user #{nickname}")
shell_error("No local user #{nickname}")
end
end
def run(["tag", nickname | tags]) do
Common.start_pleroma()
start_pleroma()
with %User{} = user <- User.get_cached_by_nickname(nickname) do
user = user |> User.tag(tags)
Mix.shell().info("Tags of #{user.nickname}: #{inspect(tags)}")
shell_info("Tags of #{user.nickname}: #{inspect(tags)}")
else
_ ->
Mix.shell().error("Could not change user tags for #{nickname}")
shell_error("Could not change user tags for #{nickname}")
end
end
def run(["untag", nickname | tags]) do
Common.start_pleroma()
start_pleroma()
with %User{} = user <- User.get_cached_by_nickname(nickname) do
user = user |> User.untag(tags)
Mix.shell().info("Tags of #{user.nickname}: #{inspect(tags)}")
shell_info("Tags of #{user.nickname}: #{inspect(tags)}")
else
_ ->
Mix.shell().error("Could not change user tags for #{nickname}")
shell_error("Could not change user tags for #{nickname}")
end
end
@ -321,14 +326,12 @@ def run(["invite" | rest]) do
end)
|> Enum.into(%{})
Common.start_pleroma()
start_pleroma()
with {:ok, val} <- options[:expires_at],
options = Map.put(options, :expires_at, val),
{:ok, invite} <- UserInviteToken.create_invite(options) do
Mix.shell().info(
"Generated user invite token " <> String.replace(invite.invite_type, "_", " ")
)
shell_info("Generated user invite token " <> String.replace(invite.invite_type, "_", " "))
url =
Pleroma.Web.Router.Helpers.redirect_url(
@ -340,14 +343,14 @@ def run(["invite" | rest]) do
IO.puts(url)
else
error ->
Mix.shell().error("Could not create invite token: #{inspect(error)}")
shell_error("Could not create invite token: #{inspect(error)}")
end
end
def run(["invites"]) do
Common.start_pleroma()
start_pleroma()
Mix.shell().info("Invites list:")
shell_info("Invites list:")
UserInviteToken.list_invites()
|> Enum.each(fn invite ->
@ -361,7 +364,7 @@ def run(["invites"]) do
" | Max use: #{max_use} Left use: #{max_use - invite.uses}"
end
Mix.shell().info(
shell_info(
"ID: #{invite.id} | Token: #{invite.token} | Token type: #{invite.invite_type} | Used: #{
invite.used
}#{expire_info}#{using_info}"
@ -370,40 +373,54 @@ def run(["invites"]) do
end
def run(["revoke_invite", token]) do
Common.start_pleroma()
start_pleroma()
with {:ok, invite} <- UserInviteToken.find_by_token(token),
{:ok, _} <- UserInviteToken.update_invite(invite, %{used: true}) do
Mix.shell().info("Invite for token #{token} was revoked.")
shell_info("Invite for token #{token} was revoked.")
else
_ -> Mix.shell().error("No invite found with token #{token}")
_ -> shell_error("No invite found with token #{token}")
end
end
def run(["delete_activities", nickname]) do
Common.start_pleroma()
start_pleroma()
with %User{local: true} = user <- User.get_cached_by_nickname(nickname) do
{:ok, _} = User.delete_user_activities(user)
Mix.shell().info("User #{nickname} statuses deleted.")
shell_info("User #{nickname} statuses deleted.")
else
_ ->
Mix.shell().error("No local user #{nickname}")
shell_error("No local user #{nickname}")
end
end
def run(["toggle_confirmed", nickname]) do
Common.start_pleroma()
start_pleroma()
with %User{} = user <- User.get_cached_by_nickname(nickname) do
{:ok, user} = User.toggle_confirmation(user)
message = if user.info.confirmation_pending, do: "needs", else: "doesn't need"
Mix.shell().info("#{nickname} #{message} confirmation.")
shell_info("#{nickname} #{message} confirmation.")
else
_ ->
Mix.shell().error("No local user #{nickname}")
shell_error("No local user #{nickname}")
end
end
def run(["sign_out", nickname]) do
start_pleroma()
with %User{local: true} = user <- User.get_cached_by_nickname(nickname) do
OAuth.Token.delete_user_tokens(user)
OAuth.Authorization.delete_user_authorizations(user)
shell_info("#{nickname} signed out from all apps.")
else
_ ->
shell_error("No local user #{nickname}")
end
end
@ -416,7 +433,7 @@ defp set_moderator(user, value) do
{:ok, user} = User.update_and_set_cache(user_cng)
Mix.shell().info("Moderator status of #{user.nickname}: #{user.info.is_moderator}")
shell_info("Moderator status of #{user.nickname}: #{user.info.is_moderator}")
user
end
@ -429,7 +446,7 @@ defp set_admin(user, value) do
{:ok, user} = User.update_and_set_cache(user_cng)
Mix.shell().info("Admin status of #{user.nickname}: #{user.info.is_admin}")
shell_info("Admin status of #{user.nickname}: #{user.info.is_admin}")
user
end
@ -442,7 +459,7 @@ defp set_locked(user, value) do
{:ok, user} = User.update_and_set_cache(user_cng)
Mix.shell().info("Locked status of #{user.nickname}: #{user.info.locked}")
shell_info("Locked status of #{user.nickname}: #{user.info.locked}")
user
end
end

View file

@ -343,4 +343,6 @@ def restrict_deactivated_users(query) do
)
)
end
defdelegate search(user, query), to: Pleroma.Activity.Search
end

View file

@ -0,0 +1,81 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Activity.Search do
alias Pleroma.Activity
alias Pleroma.Object.Fetcher
alias Pleroma.Repo
alias Pleroma.User
alias Pleroma.Web.ActivityPub.Visibility
import Ecto.Query
def search(user, search_query) do
index_type = if Pleroma.Config.get([:database, :rum_enabled]), do: :rum, else: :gin
Activity
|> Activity.with_preloaded_object()
|> Activity.restrict_deactivated_users()
|> restrict_public()
|> query_with(index_type, search_query)
|> maybe_restrict_local(user)
|> Repo.all()
|> maybe_fetch(user, search_query)
end
defp restrict_public(q) do
from([a, o] in q,
where: fragment("?->>'type' = 'Create'", a.data),
where: "https://www.w3.org/ns/activitystreams#Public" in a.recipients,
limit: 40
)
end
defp query_with(q, :gin, search_query) do
from([a, o] in q,
where:
fragment(
"to_tsvector('english', ?->>'content') @@ plainto_tsquery('english', ?)",
o.data,
^search_query
)
)
end
defp query_with(q, :rum, search_query) do
from([a, o] in q,
where:
fragment(
"? @@ plainto_tsquery('english', ?)",
o.fts_content,
^search_query
),
order_by: [fragment("? <=> now()::date", o.inserted_at)]
)
end
defp maybe_restrict_local(q, user) do
limit = Pleroma.Config.get([:instance, :limit_to_local_content], :unauthenticated)
case {limit, user} do
{:all, _} -> restrict_local(q)
{:unauthenticated, %User{}} -> q
{:unauthenticated, _} -> restrict_local(q)
{false, _} -> q
end
end
defp restrict_local(q), do: where(q, local: true)
defp maybe_fetch(activities, user, search_query) do
with true <- Regex.match?(~r/https?:/, search_query),
{:ok, object} <- Fetcher.fetch_object_from_id(search_query),
%Activity{} = activity <- Activity.get_create_by_object_ap_id(object.data["id"]),
true <- Visibility.visible_for_user?(activity, user) do
activities ++ [activity]
else
_ -> activities
end
end
end

View file

@ -4,7 +4,6 @@
defmodule Pleroma.Application do
use Application
import Supervisor.Spec
@name Mix.Project.config()[:name]
@version Mix.Project.config()[:version]
@ -31,96 +30,132 @@ def start(_type, _args) do
children =
[
# Start the Ecto repository
supervisor(Pleroma.Repo, []),
worker(Pleroma.Emoji, []),
worker(Pleroma.Captcha, []),
worker(
Cachex,
[
:used_captcha_cache,
[
ttl_interval: :timer.seconds(Pleroma.Config.get!([Pleroma.Captcha, :seconds_valid]))
]
],
id: :cachex_used_captcha_cache
),
worker(
Cachex,
[
:user_cache,
[
default_ttl: 25_000,
ttl_interval: 1000,
limit: 2500
]
],
id: :cachex_user
),
worker(
Cachex,
[
:object_cache,
[
default_ttl: 25_000,
ttl_interval: 1000,
limit: 2500
]
],
id: :cachex_object
),
worker(
Cachex,
[
:rich_media_cache,
[
default_ttl: :timer.minutes(120),
limit: 5000
]
],
id: :cachex_rich_media
),
worker(
Cachex,
[
:scrubber_cache,
[
limit: 2500
]
],
id: :cachex_scrubber
),
worker(
Cachex,
[
:idempotency_cache,
[
expiration:
expiration(
default: :timer.seconds(6 * 60 * 60),
interval: :timer.seconds(60)
),
limit: 2500
]
],
id: :cachex_idem
),
worker(Pleroma.FlakeId, []),
worker(Pleroma.ScheduledActivityWorker, [])
%{id: Pleroma.Repo, start: {Pleroma.Repo, :start_link, []}, type: :supervisor},
%{id: Pleroma.Config.TransferTask, start: {Pleroma.Config.TransferTask, :start_link, []}},
%{id: Pleroma.Emoji, start: {Pleroma.Emoji, :start_link, []}},
%{id: Pleroma.Captcha, start: {Pleroma.Captcha, :start_link, []}},
%{
id: :cachex_used_captcha_cache,
start:
{Cachex, :start_link,
[
:used_captcha_cache,
[
ttl_interval:
:timer.seconds(Pleroma.Config.get!([Pleroma.Captcha, :seconds_valid]))
]
]}
},
%{
id: :cachex_user,
start:
{Cachex, :start_link,
[
:user_cache,
[
default_ttl: 25_000,
ttl_interval: 1000,
limit: 2500
]
]}
},
%{
id: :cachex_object,
start:
{Cachex, :start_link,
[
:object_cache,
[
default_ttl: 25_000,
ttl_interval: 1000,
limit: 2500
]
]}
},
%{
id: :cachex_rich_media,
start:
{Cachex, :start_link,
[
:rich_media_cache,
[
default_ttl: :timer.minutes(120),
limit: 5000
]
]}
},
%{
id: :cachex_scrubber,
start:
{Cachex, :start_link,
[
:scrubber_cache,
[
limit: 2500
]
]}
},
%{
id: :cachex_idem,
start:
{Cachex, :start_link,
[
:idempotency_cache,
[
expiration:
expiration(
default: :timer.seconds(6 * 60 * 60),
interval: :timer.seconds(60)
),
limit: 2500
]
]}
},
%{id: Pleroma.FlakeId, start: {Pleroma.FlakeId, :start_link, []}},
%{
id: Pleroma.ScheduledActivityWorker,
start: {Pleroma.ScheduledActivityWorker, :start_link, []}
}
] ++
hackney_pool_children() ++
[
worker(Pleroma.Web.Federator.RetryQueue, []),
worker(Pleroma.Web.OAuth.Token.CleanWorker, []),
worker(Pleroma.Stats, []),
worker(Task, [&Pleroma.Web.Push.init/0], restart: :temporary, id: :web_push_init),
worker(Task, [&Pleroma.Web.Federator.init/0], restart: :temporary, id: :federator_init)
%{
id: Pleroma.Web.Federator.RetryQueue,
start: {Pleroma.Web.Federator.RetryQueue, :start_link, []}
},
%{
id: Pleroma.Web.OAuth.Token.CleanWorker,
start: {Pleroma.Web.OAuth.Token.CleanWorker, :start_link, []}
},
%{
id: Pleroma.Stats,
start: {Pleroma.Stats, :start_link, []}
},
%{
id: :web_push_init,
start: {Task, :start_link, [&Pleroma.Web.Push.init/0]},
restart: :temporary
},
%{
id: :federator_init,
start: {Task, :start_link, [&Pleroma.Web.Federator.init/0]},
restart: :temporary
}
] ++
streamer_child() ++
chat_child() ++
[
# Start the endpoint when the application starts
supervisor(Pleroma.Web.Endpoint, []),
worker(Pleroma.Gopher.Server, [])
%{
id: Pleroma.Web.Endpoint,
start: {Pleroma.Web.Endpoint, :start_link, []},
type: :supervisor
},
%{id: Pleroma.Gopher.Server, start: {Pleroma.Gopher.Server, :start_link, []}},
%{
id: Pleroma.User.SynchronizationWorker,
start: {Pleroma.User.SynchronizationWorker, :start_link, []}
}
]
# See http://elixir-lang.org/docs/stable/elixir/Supervisor.html
@ -144,7 +179,6 @@ defp setup_instrumenters do
Pleroma.Repo.Instrumenter.setup()
end
Prometheus.Registry.register_collector(:prometheus_process_collector)
Pleroma.Web.Endpoint.MetricsExporter.setup()
Pleroma.Web.Endpoint.PipelineInstrumenter.setup()
Pleroma.Web.Endpoint.Instrumenter.setup()
@ -157,24 +191,29 @@ def enabled_hackney_pools do
else
[]
end ++
if Pleroma.Config.get([Pleroma.Uploader, :proxy_remote]) do
if Pleroma.Config.get([Pleroma.Upload, :proxy_remote]) do
[:upload]
else
[]
end
end
if Mix.env() == :test do
if Pleroma.Config.get(:env) == :test do
defp streamer_child, do: []
defp chat_child, do: []
else
defp streamer_child do
[worker(Pleroma.Web.Streamer, [])]
[%{id: Pleroma.Web.Streamer, start: {Pleroma.Web.Streamer, :start_link, []}}]
end
defp chat_child do
if Pleroma.Config.get([:chat, :enabled]) do
[worker(Pleroma.Web.ChatChannel.ChatChannelState, [])]
[
%{
id: Pleroma.Web.ChatChannel.ChatChannelState,
start: {Pleroma.Web.ChatChannel.ChatChannelState, :start_link, []}
}
]
else
[]
end

View file

@ -1,3 +1,7 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.BBS.Authenticator do
use Sshd.PasswordAuthenticator
alias Comeonin.Pbkdf2

View file

@ -1,3 +1,7 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.BBS.Handler do
use Sshd.ShellHandler
alias Pleroma.Activity

View file

@ -1,3 +1,7 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Bookmark do
use Ecto.Schema

View file

@ -3,6 +3,8 @@
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Captcha do
import Pleroma.Web.Gettext
alias Calendar.DateTime
alias Plug.Crypto.KeyGenerator
alias Plug.Crypto.MessageEncryptor
@ -83,10 +85,11 @@ def handle_call({:validate, token, captcha, answer_data}, _from, state) do
with {:ok, data} <- MessageEncryptor.decrypt(answer_data, secret, sign_secret),
%{at: at, answer_data: answer_md5} <- :erlang.binary_to_term(data) do
try do
if DateTime.before?(at, valid_if_after), do: throw({:error, "CAPTCHA expired"})
if DateTime.before?(at, valid_if_after),
do: throw({:error, dgettext("errors", "CAPTCHA expired")})
if not is_nil(Cachex.get!(:used_captcha_cache, token)),
do: throw({:error, "CAPTCHA already used"})
do: throw({:error, dgettext("errors", "CAPTCHA already used")})
res = method().validate(token, captcha, answer_md5)
# Throw if an error occurs
@ -101,7 +104,7 @@ def handle_call({:validate, token, captcha, answer_data}, _from, state) do
:throw, e -> e
end
else
_ -> {:error, "Invalid answer data"}
_ -> {:error, dgettext("errors", "Invalid answer data")}
end
{:reply, result, state}

View file

@ -3,6 +3,7 @@
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Captcha.Kocaptcha do
import Pleroma.Web.Gettext
alias Pleroma.Captcha.Service
@behaviour Service
@ -12,7 +13,7 @@ def new do
case Tesla.get(endpoint <> "/new") do
{:error, _} ->
%{error: "Kocaptcha service unavailable"}
%{error: dgettext("errors", "Kocaptcha service unavailable")}
{:ok, res} ->
json_resp = Jason.decode!(res.body)
@ -32,6 +33,6 @@ def validate(_token, captcha, answer_data) do
if not is_nil(captcha) and
:crypto.hash(:md5, captcha) |> Base.encode16() == String.upcase(answer_data),
do: :ok,
else: {:error, "Invalid CAPTCHA"}
else: {:error, dgettext("errors", "Invalid CAPTCHA")}
end
end

View file

@ -38,7 +38,7 @@ def put([key], value), do: put(key, value)
def put([parent_key | keys], value) do
parent =
Application.get_env(:pleroma, parent_key)
Application.get_env(:pleroma, parent_key, [])
|> put_in(keys, value)
Application.put_env(:pleroma, parent_key, parent)

View file

@ -0,0 +1,59 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Config.TransferTask do
use Task
alias Pleroma.Web.AdminAPI.Config
def start_link do
load_and_update_env()
if Pleroma.Config.get(:env) == :test, do: Ecto.Adapters.SQL.Sandbox.checkin(Pleroma.Repo)
:ignore
end
def load_and_update_env do
if Pleroma.Config.get([:instance, :dynamic_configuration]) and
Ecto.Adapters.SQL.table_exists?(Pleroma.Repo, "config") do
for_restart =
Pleroma.Repo.all(Config)
|> Enum.map(&update_env(&1))
# We need to restart applications for loaded settings take effect
for_restart
|> Enum.reject(&(&1 in [:pleroma, :ok]))
|> Enum.each(fn app ->
Application.stop(app)
:ok = Application.start(app)
end)
end
end
defp update_env(setting) do
try do
key =
if String.starts_with?(setting.key, "Pleroma.") do
"Elixir." <> setting.key
else
setting.key
end
group = String.to_existing_atom(setting.group)
Application.put_env(
group,
String.to_existing_atom(key),
Config.from_binary(setting.value)
)
group
rescue
e ->
require Logger
Logger.warn(
"updating env causes error, key: #{inspect(setting.key)}, error: #{inspect(e)}"
)
end
end
end

View file

@ -59,10 +59,10 @@ def mark_as_unread(participation) do
def for_user(user, params \\ %{}) do
from(p in __MODULE__,
where: p.user_id == ^user.id,
order_by: [desc: p.updated_at]
order_by: [desc: p.updated_at],
preload: [conversation: [:users]]
)
|> Pleroma.Pagination.fetch_paginated(params)
|> Repo.preload(conversation: [:users])
end
def for_user_with_last_activity_id(user, params \\ %{}) do

View file

@ -3,11 +3,58 @@
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Emails.Mailer do
use Swoosh.Mailer, otp_app: :pleroma
@moduledoc """
Defines the Pleroma mailer.
The module contains functions to delivery email using Swoosh.Mailer.
"""
alias Swoosh.DeliveryError
@otp_app :pleroma
@mailer_config [otp: :pleroma]
@spec enabled?() :: boolean()
def enabled?, do: Pleroma.Config.get([__MODULE__, :enabled])
@doc "add email to queue"
def deliver_async(email, config \\ []) do
PleromaJobQueue.enqueue(:mailer, __MODULE__, [:deliver_async, email, config])
end
@doc "callback to perform send email from queue"
def perform(:deliver_async, email, config), do: deliver(email, config)
@spec deliver(Swoosh.Email.t(), Keyword.t()) :: {:ok, term} | {:error, term}
def deliver(email, config \\ [])
def deliver(email, config) do
case enabled?() do
true -> Swoosh.Mailer.deliver(email, parse_config(config))
false -> {:error, :deliveries_disabled}
end
end
@spec deliver!(Swoosh.Email.t(), Keyword.t()) :: term | no_return
def deliver!(email, config \\ [])
def deliver!(email, config) do
case deliver(email, config) do
{:ok, result} -> result
{:error, reason} -> raise DeliveryError, reason: reason
end
end
@on_load :validate_dependency
@doc false
def validate_dependency do
parse_config([])
|> Keyword.get(:adapter)
|> Swoosh.Mailer.validate_dependency()
end
defp parse_config(config) do
Swoosh.Mailer.parse_config(@otp_app, __MODULE__, @mailer_config, config)
end
end

View file

@ -23,13 +23,8 @@ defp recipient(email, nil), do: email
defp recipient(email, name), do: {name, email}
defp recipient(%Pleroma.User{} = user), do: recipient(user.email, user.name)
def password_reset_email(user, password_reset_token) when is_binary(password_reset_token) do
password_reset_url =
Router.Helpers.util_url(
Endpoint,
:show_password_reset,
password_reset_token
)
def password_reset_email(user, token) when is_binary(token) do
password_reset_url = Router.Helpers.reset_password_url(Endpoint, :reset, token)
html_body = """
<h3>Reset your password at #{instance_name()}</h3>

View file

@ -22,7 +22,6 @@ defmodule Pleroma.Emoji do
@ets __MODULE__.Ets
@ets_options [:ordered_set, :protected, :named_table, {:read_concurrency, true}]
@groups Pleroma.Config.get([:emoji, :groups])
@doc false
def start_link do
@ -87,6 +86,8 @@ defp load do
"emoji"
)
emoji_groups = Pleroma.Config.get([:emoji, :groups])
case File.ls(emoji_dir_path) do
{:error, :enoent} ->
# The custom emoji directory doesn't exist,
@ -98,7 +99,9 @@ defp load do
Logger.error("Could not access the custom emoji directory #{emoji_dir_path}: #{e}")
{:ok, results} ->
grouped = Enum.group_by(results, &File.dir?/1)
grouped =
Enum.group_by(results, fn file -> File.dir?(Path.join(emoji_dir_path, file)) end)
packs = grouped[true] || []
files = grouped[false] || []
@ -116,7 +119,7 @@ defp load do
emojis =
Enum.flat_map(
packs,
fn pack -> load_pack(Path.join(emoji_dir_path, pack)) end
fn pack -> load_pack(Path.join(emoji_dir_path, pack), emoji_groups) end
)
true = :ets.insert(@ets, emojis)
@ -127,9 +130,9 @@ defp load do
shortcode_globs = Pleroma.Config.get([:emoji, :shortcode_globs], [])
emojis =
(load_from_file("config/emoji.txt") ++
load_from_file("config/custom_emoji.txt") ++
load_from_globs(shortcode_globs))
(load_from_file("config/emoji.txt", emoji_groups) ++
load_from_file("config/custom_emoji.txt", emoji_groups) ++
load_from_globs(shortcode_globs, emoji_groups))
|> Enum.reject(fn value -> value == nil end)
true = :ets.insert(@ets, emojis)
@ -137,23 +140,25 @@ defp load do
:ok
end
defp load_pack(pack_dir) do
defp load_pack(pack_dir, emoji_groups) do
pack_name = Path.basename(pack_dir)
emoji_txt = Path.join(pack_dir, "emoji.txt")
if File.exists?(emoji_txt) do
load_from_file(emoji_txt)
load_from_file(emoji_txt, emoji_groups)
else
extensions = Pleroma.Config.get([:emoji, :pack_extensions])
Logger.info(
"No emoji.txt found for pack \"#{pack_name}\", assuming all .png files are emoji"
"No emoji.txt found for pack \"#{pack_name}\", assuming all #{Enum.join(extensions, ", ")} files are emoji"
)
make_shortcode_to_file_map(pack_dir, [".png"])
make_shortcode_to_file_map(pack_dir, extensions)
|> Enum.map(fn {shortcode, rel_file} ->
filename = Path.join("/emoji/#{pack_name}", rel_file)
{shortcode, filename, [to_string(match_extra(@groups, filename))]}
{shortcode, filename, [to_string(match_extra(emoji_groups, filename))]}
end)
end
end
@ -182,21 +187,21 @@ def find_all_emoji(dir, exts) do
|> Enum.filter(fn f -> Path.extname(f) in exts end)
end
defp load_from_file(file) do
defp load_from_file(file, emoji_groups) do
if File.exists?(file) do
load_from_file_stream(File.stream!(file))
load_from_file_stream(File.stream!(file), emoji_groups)
else
[]
end
end
defp load_from_file_stream(stream) do
defp load_from_file_stream(stream, emoji_groups) do
stream
|> Stream.map(&String.trim/1)
|> Stream.map(fn line ->
case String.split(line, ~r/,\s*/) do
[name, file] ->
{name, file, [to_string(match_extra(@groups, file))]}
{name, file, [to_string(match_extra(emoji_groups, file))]}
[name, file | tags] ->
{name, file, tags}
@ -208,7 +213,7 @@ defp load_from_file_stream(stream) do
|> Enum.to_list()
end
defp load_from_globs(globs) do
defp load_from_globs(globs, emoji_groups) do
static_path = Path.join(:code.priv_dir(:pleroma), "static")
paths =
@ -219,7 +224,7 @@ defp load_from_globs(globs) do
|> Enum.concat()
Enum.map(paths, fn path ->
tag = match_extra(@groups, Path.join("/", Path.relative_to(path, static_path)))
tag = match_extra(emoji_groups, Path.join("/", Path.relative_to(path, static_path)))
shortcode = Path.basename(path, Path.extname(path))
external_path = Path.join("/", Path.relative_to(path, static_path))
{shortcode, external_path, [to_string(tag)]}

View file

@ -0,0 +1,27 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Helpers.UriHelper do
def append_uri_params(uri, appended_params) do
uri = URI.parse(uri)
appended_params = for {k, v} <- appended_params, into: %{}, do: {to_string(k), v}
existing_params = URI.query_decoder(uri.query || "") |> Enum.into(%{})
updated_params_keys = Enum.uniq(Map.keys(existing_params) ++ Map.keys(appended_params))
updated_params =
for k <- updated_params_keys, do: {k, appended_params[k] || existing_params[k]}
uri
|> Map.put(:query, URI.encode_query(updated_params))
|> URI.to_string()
end
def append_param_if_present(%{} = params, param_name, param_value) do
if param_value do
Map.put(params, param_name, param_value)
else
params
end
end
end

View file

@ -89,7 +89,7 @@ def extract_first_external_url(object, content) do
Cachex.fetch!(:scrubber_cache, key, fn _key ->
result =
content
|> Floki.filter_out("a.mention")
|> Floki.filter_out("a.mention,a.hashtag,a[rel~=\"tag\"]")
|> Floki.attribute("a", "href")
|> Enum.at(0)

View file

@ -1,3 +1,7 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Instances do
@moduledoc "Instances context."
@ -13,7 +17,7 @@ def set_consistently_unreachable(url_or_host),
def reachability_datetime_threshold do
federation_reachability_timeout_days =
Pleroma.Config.get(:instance)[:federation_reachability_timeout_days] || 0
Pleroma.Config.get([:instance, :federation_reachability_timeout_days], 0)
if federation_reachability_timeout_days > 0 do
NaiveDateTime.add(

View file

@ -1,3 +1,7 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Instances.Instance do
@moduledoc "Instance."

View file

@ -13,6 +13,8 @@ defmodule Pleroma.Notification do
alias Pleroma.User
alias Pleroma.Web.CommonAPI
alias Pleroma.Web.CommonAPI.Utils
alias Pleroma.Web.Push
alias Pleroma.Web.Streamer
import Ecto.Query
import Ecto.Changeset
@ -125,8 +127,7 @@ def dismiss(%{id: user_id} = _user, id) do
end
end
def create_notifications(%Activity{data: %{"to" => _, "type" => type}} = activity)
when type in ["Create", "Like", "Announce", "Follow"] do
def create_notifications(%Activity{data: %{"to" => _, "type" => "Create"}} = activity) do
object = Object.normalize(activity)
unless object && object.data["type"] == "Answer" do
@ -138,6 +139,13 @@ def create_notifications(%Activity{data: %{"to" => _, "type" => type}} = activit
end
end
def create_notifications(%Activity{data: %{"to" => _, "type" => type}} = activity)
when type in ["Like", "Announce", "Follow"] do
users = get_notified_from_activity(activity)
notifications = Enum.map(users, fn user -> create_notification(activity, user) end)
{:ok, notifications}
end
def create_notifications(_), do: {:ok, []}
# TODO move to sql, too.
@ -145,8 +153,9 @@ def create_notification(%Activity{} = activity, %User{} = user) do
unless skip?(activity, user) do
notification = %Notification{user_id: user.id, activity: activity}
{:ok, notification} = Repo.insert(notification)
Pleroma.Web.Streamer.stream("user", notification)
Pleroma.Web.Push.send(notification)
Streamer.stream("user", notification)
Streamer.stream("user:notification", notification)
Push.send(notification)
notification
end
end

View file

@ -44,44 +44,46 @@ def get_by_ap_id(ap_id) do
Repo.one(from(object in Object, where: fragment("(?)->>'id' = ?", object.data, ^ap_id)))
end
def normalize(_, fetch_remote \\ true)
defp warn_on_no_object_preloaded(ap_id) do
"Object.normalize() called without preloaded object (#{ap_id}). Consider preloading the object"
|> Logger.debug()
Logger.debug("Backtrace: #{inspect(Process.info(:erlang.self(), :current_stacktrace))}")
end
def normalize(_, fetch_remote \\ true, options \\ [])
# If we pass an Activity to Object.normalize(), we can try to use the preloaded object.
# Use this whenever possible, especially when walking graphs in an O(N) loop!
def normalize(%Object{} = object, _), do: object
def normalize(%Activity{object: %Object{} = object}, _), do: object
def normalize(%Object{} = object, _, _), do: object
def normalize(%Activity{object: %Object{} = object}, _, _), do: object
# A hack for fake activities
def normalize(%Activity{data: %{"object" => %{"fake" => true} = data}}, _) do
def normalize(%Activity{data: %{"object" => %{"fake" => true} = data}}, _, _) do
%Object{id: "pleroma:fake_object_id", data: data}
end
# Catch and log Object.normalize() calls where the Activity's child object is not
# preloaded.
def normalize(%Activity{data: %{"object" => %{"id" => ap_id}}}, fetch_remote) do
Logger.debug(
"Object.normalize() called without preloaded object (#{ap_id}). Consider preloading the object!"
)
Logger.debug("Backtrace: #{inspect(Process.info(:erlang.self(), :current_stacktrace))}")
# No preloaded object
def normalize(%Activity{data: %{"object" => %{"id" => ap_id}}}, fetch_remote, _) do
warn_on_no_object_preloaded(ap_id)
normalize(ap_id, fetch_remote)
end
def normalize(%Activity{data: %{"object" => ap_id}}, fetch_remote) do
Logger.debug(
"Object.normalize() called without preloaded object (#{ap_id}). Consider preloading the object!"
)
Logger.debug("Backtrace: #{inspect(Process.info(:erlang.self(), :current_stacktrace))}")
# No preloaded object
def normalize(%Activity{data: %{"object" => ap_id}}, fetch_remote, _) do
warn_on_no_object_preloaded(ap_id)
normalize(ap_id, fetch_remote)
end
# Old way, try fetching the object through cache.
def normalize(%{"id" => ap_id}, fetch_remote), do: normalize(ap_id, fetch_remote)
def normalize(ap_id, false) when is_binary(ap_id), do: get_cached_by_ap_id(ap_id)
def normalize(ap_id, true) when is_binary(ap_id), do: Fetcher.fetch_object_from_id!(ap_id)
def normalize(_, _), do: nil
def normalize(%{"id" => ap_id}, fetch_remote, _), do: normalize(ap_id, fetch_remote)
def normalize(ap_id, false, _) when is_binary(ap_id), do: get_cached_by_ap_id(ap_id)
def normalize(ap_id, true, options) when is_binary(ap_id) do
Fetcher.fetch_object_from_id!(ap_id, options)
end
def normalize(_, _, _), do: nil
# Owned objects can only be mutated by their owner
def authorize_mutation(%Object{data: %{"actor" => actor}}, %User{ap_id: ap_id}),

View file

@ -1,3 +1,7 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Object.Containment do
@moduledoc """
This module contains some useful functions for containing objects to specific

View file

@ -1,3 +1,7 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Object.Fetcher do
alias Pleroma.HTTP
alias Pleroma.Object
@ -22,7 +26,7 @@ defp reinject_object(data) do
# TODO:
# This will create a Create activity, which we need internally at the moment.
def fetch_object_from_id(id) do
def fetch_object_from_id(id, options \\ []) do
if object = Object.get_cached_by_ap_id(id) do
{:ok, object}
else
@ -38,7 +42,7 @@ def fetch_object_from_id(id) do
"object" => data
},
:ok <- Containment.contain_origin(id, params),
{:ok, activity} <- Transmogrifier.handle_incoming(params),
{:ok, activity} <- Transmogrifier.handle_incoming(params, options),
{:object, _data, %Object{} = object} <-
{:object, data, Object.normalize(activity, false)} do
{:ok, object}
@ -63,8 +67,8 @@ def fetch_object_from_id(id) do
end
end
def fetch_object_from_id!(id) do
with {:ok, object} <- fetch_object_from_id(id) do
def fetch_object_from_id!(id, options \\ []) do
with {:ok, object} <- fetch_object_from_id(id, options) do
object
else
_e ->
@ -85,6 +89,9 @@ def fetch_and_contain_remote_object_from_id(id) do
:ok <- Containment.contain_origin_from_id(id, data) do
{:ok, data}
else
{:ok, %{status: code}} when code in [404, 410] ->
{:error, "Object has been deleted"}
e ->
{:error, e}
end

View file

@ -1,3 +1,7 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.ObjectTombstone do
@enforce_keys [:id, :formerType, :deleted]
defstruct [:id, :formerType, :deleted, type: "Tombstone"]

View file

@ -1,3 +1,7 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Pagination do
@moduledoc """
Implements Mastodon-compatible pagination.

View file

@ -37,6 +37,7 @@ def used_changeset(struct) do
|> put_change(:used, true)
end
@spec reset_password(binary(), map()) :: {:ok, User.t()} | {:error, binary()}
def reset_password(token, data) do
with %{used: false} = token <- Repo.get_by(PasswordResetToken, %{token: token}),
%User{} = user <- User.get_cached_by_id(token.user_id),

View file

@ -4,6 +4,7 @@
defmodule Pleroma.Plugs.EnsureAuthenticatedPlug do
import Plug.Conn
import Pleroma.Web.TranslationHelpers
alias Pleroma.User
def init(options) do
@ -16,8 +17,7 @@ def call(%{assigns: %{user: %User{}}} = conn, _) do
def call(conn, _) do
conn
|> put_resp_content_type("application/json")
|> send_resp(403, Jason.encode!(%{error: "Invalid credentials."}))
|> render_error(:forbidden, "Invalid credentials.")
|> halt
end
end

View file

@ -3,6 +3,7 @@
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Plugs.EnsurePublicOrAuthenticatedPlug do
import Pleroma.Web.TranslationHelpers
import Plug.Conn
alias Pleroma.Config
alias Pleroma.User
@ -23,8 +24,7 @@ def call(conn, _) do
{false, _} ->
conn
|> put_resp_content_type("application/json")
|> send_resp(403, Jason.encode!(%{error: "This resource requires authentication."}))
|> render_error(:forbidden, "This resource requires authentication.")
|> halt
end
end

View file

@ -56,14 +56,14 @@ defp csp_string do
connect_src = "connect-src 'self' #{static_url} #{websocket_url}"
connect_src =
if Mix.env() == :dev do
if Pleroma.Config.get(:env) == :dev do
connect_src <> " http://localhost:3035/"
else
connect_src
end
script_src =
if Mix.env() == :dev do
if Pleroma.Config.get(:env) == :dev do
"script-src 'self' 'unsafe-eval'"
else
"script-src 'self'"

View file

@ -0,0 +1,84 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Plugs.IdempotencyPlug do
import Phoenix.Controller, only: [json: 2]
import Plug.Conn
@behaviour Plug
@impl true
def init(opts), do: opts
# Sending idempotency keys in `GET` and `DELETE` requests has no effect
# and should be avoided, as these requests are idempotent by definition.
@impl true
def call(%{method: method} = conn, _) when method in ["POST", "PUT", "PATCH"] do
case get_req_header(conn, "idempotency-key") do
[key] -> process_request(conn, key)
_ -> conn
end
end
def call(conn, _), do: conn
def process_request(conn, key) do
case Cachex.get(:idempotency_cache, key) do
{:ok, nil} ->
cache_resposnse(conn, key)
{:ok, record} ->
send_cached(conn, key, record)
{atom, message} when atom in [:ignore, :error] ->
render_error(conn, message)
end
end
defp cache_resposnse(conn, key) do
register_before_send(conn, fn conn ->
[request_id] = get_resp_header(conn, "x-request-id")
content_type = get_content_type(conn)
record = {request_id, content_type, conn.status, conn.resp_body}
{:ok, _} = Cachex.put(:idempotency_cache, key, record)
conn
|> put_resp_header("idempotency-key", key)
|> put_resp_header("x-original-request-id", request_id)
end)
end
defp send_cached(conn, key, record) do
{request_id, content_type, status, body} = record
conn
|> put_resp_header("idempotency-key", key)
|> put_resp_header("idempotent-replayed", "true")
|> put_resp_header("x-original-request-id", request_id)
|> put_resp_content_type(content_type)
|> send_resp(status, body)
|> halt()
end
defp render_error(conn, message) do
conn
|> put_status(:unprocessable_entity)
|> json(%{error: message})
|> halt()
end
defp get_content_type(conn) do
[content_type] = get_resp_header(conn, "content-type")
if String.contains?(content_type, ";") do
content_type
|> String.split(";")
|> hd()
else
content_type
end
end
end

View file

@ -4,6 +4,7 @@
defmodule Pleroma.Plugs.OAuthScopesPlug do
import Plug.Conn
import Pleroma.Web.Gettext
@behaviour Plug
@ -30,11 +31,14 @@ def call(%Plug.Conn{assigns: assigns} = conn, %{scopes: scopes} = options) do
true ->
missing_scopes = scopes -- token.scopes
error_message = "Insufficient permissions: #{Enum.join(missing_scopes, " #{op} ")}."
permissions = Enum.join(missing_scopes, " #{op} ")
error_message =
dgettext("errors", "Insufficient permissions: %{permissions}.", permissions: permissions)
conn
|> put_resp_content_type("application/json")
|> send_resp(403, Jason.encode!(%{error: error_message}))
|> send_resp(:forbidden, Jason.encode!(%{error: error_message}))
|> halt()
end
end

View file

@ -1,36 +0,0 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Plugs.RateLimitPlug do
import Phoenix.Controller, only: [json: 2]
import Plug.Conn
def init(opts), do: opts
def call(conn, opts) do
enabled? = Pleroma.Config.get([:app_account_creation, :enabled])
case check_rate(conn, Map.put(opts, :enabled, enabled?)) do
{:ok, _count} -> conn
{:error, _count} -> render_error(conn)
%Plug.Conn{} = conn -> conn
end
end
defp check_rate(conn, %{enabled: true} = opts) do
max_requests = opts[:max_requests]
bucket_name = conn.remote_ip |> Tuple.to_list() |> Enum.join(".")
ExRated.check_rate(bucket_name, opts[:interval] * 1000, max_requests)
end
defp check_rate(conn, _), do: conn
defp render_error(conn) do
conn
|> put_status(:forbidden)
|> json(%{error: "Rate limit exceeded."})
|> halt()
end
end

View file

@ -0,0 +1,92 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Plugs.RateLimiter do
@moduledoc """
## Configuration
A keyword list of rate limiters where a key is a limiter name and value is the limiter configuration. The basic configuration is a tuple where:
* The first element: `scale` (Integer). The time scale in milliseconds.
* The second element: `limit` (Integer). How many requests to limit in the time scale provided.
It is also possible to have different limits for unauthenticated and authenticated users: the keyword value must be a list of two tuples where the first one is a config for unauthenticated users and the second one is for authenticated.
To disable a limiter set its value to `nil`.
### Example
config :pleroma, :rate_limit,
one: {1000, 10},
two: [{10_000, 10}, {10_000, 50}],
foobar: nil
Here we have three limiters:
* `one` which is not over 10req/1s
* `two` which has two limits: 10req/10s for unauthenticated users and 50req/10s for authenticated users
* `foobar` which is disabled
## Usage
Inside a controller:
plug(Pleroma.Plugs.RateLimiter, :one when action == :one)
plug(Pleroma.Plugs.RateLimiter, :two when action in [:two, :three])
or inside a router pipiline:
pipeline :api do
...
plug(Pleroma.Plugs.RateLimiter, :one)
...
end
"""
import Pleroma.Web.TranslationHelpers
import Plug.Conn
alias Pleroma.User
def init(limiter_name) do
case Pleroma.Config.get([:rate_limit, limiter_name]) do
nil -> nil
config -> {limiter_name, config}
end
end
# do not limit if there is no limiter configuration
def call(conn, nil), do: conn
def call(conn, opts) do
case check_rate(conn, opts) do
{:ok, _count} -> conn
{:error, _count} -> render_throttled_error(conn)
end
end
defp check_rate(%{assigns: %{user: %User{id: user_id}}}, {limiter_name, [_, {scale, limit}]}) do
ExRated.check_rate("#{limiter_name}:#{user_id}", scale, limit)
end
defp check_rate(conn, {limiter_name, [{scale, limit} | _]}) do
ExRated.check_rate("#{limiter_name}:#{ip(conn)}", scale, limit)
end
defp check_rate(conn, {limiter_name, {scale, limit}}) do
check_rate(conn, {limiter_name, [{scale, limit}]})
end
def ip(%{remote_ip: remote_ip}) do
remote_ip
|> Tuple.to_list()
|> Enum.join(".")
end
defp render_throttled_error(conn) do
conn
|> render_error(:too_many_requests, "Throttled")
|> halt()
end
end

View file

@ -0,0 +1,63 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
# NOTE: this module is based on https://github.com/smeevil/set_locale
defmodule Pleroma.Plugs.SetLocalePlug do
import Plug.Conn, only: [get_req_header: 2, assign: 3]
def init(_), do: nil
def call(conn, _) do
locale = get_locale_from_header(conn) || Gettext.get_locale()
Gettext.put_locale(locale)
assign(conn, :locale, locale)
end
defp get_locale_from_header(conn) do
conn
|> extract_accept_language()
|> Enum.find(&supported_locale?/1)
end
defp extract_accept_language(conn) do
case get_req_header(conn, "accept-language") do
[value | _] ->
value
|> String.split(",")
|> Enum.map(&parse_language_option/1)
|> Enum.sort(&(&1.quality > &2.quality))
|> Enum.map(& &1.tag)
|> Enum.reject(&is_nil/1)
|> ensure_language_fallbacks()
_ ->
[]
end
end
defp supported_locale?(locale) do
Pleroma.Web.Gettext
|> Gettext.known_locales()
|> Enum.member?(locale)
end
defp parse_language_option(string) do
captures = Regex.named_captures(~r/^\s?(?<tag>[\w\-]+)(?:;q=(?<quality>[\d\.]+))?$/i, string)
quality =
case Float.parse(captures["quality"] || "1.0") do
{val, _} -> val
:error -> 1.0
end
%{tag: captures["tag"], quality: quality}
end
defp ensure_language_fallbacks(tags) do
Enum.flat_map(tags, fn tag ->
[language | _] = String.split(tag, "-")
if Enum.member?(tags, language), do: [tag], else: [tag, language]
end)
end
end

View file

@ -7,6 +7,7 @@ defmodule Pleroma.Plugs.UploadedMedia do
"""
import Plug.Conn
import Pleroma.Web.Gettext
require Logger
@behaviour Plug
@ -36,7 +37,7 @@ def call(%{request_path: <<"/", @path, "/", file::binary>>} = conn, opts) do
conn
end
config = Pleroma.Config.get([Pleroma.Upload])
config = Pleroma.Config.get(Pleroma.Upload)
with uploader <- Keyword.fetch!(config, :uploader),
proxy_remote = Keyword.get(config, :proxy_remote, false),
@ -45,7 +46,7 @@ def call(%{request_path: <<"/", @path, "/", file::binary>>} = conn, opts) do
else
_ ->
conn
|> send_resp(500, "Failed")
|> send_resp(:internal_server_error, dgettext("errors", "Failed"))
|> halt()
end
end
@ -64,7 +65,7 @@ defp get_media(conn, {:static_dir, directory}, _, opts) do
conn
else
conn
|> send_resp(404, "Not found")
|> send_resp(:not_found, dgettext("errors", "Not found"))
|> halt()
end
end
@ -84,7 +85,7 @@ defp get_media(conn, unknown, _, _) do
Logger.error("#{__MODULE__}: Unknown get startegy: #{inspect(unknown)}")
conn
|> send_resp(500, "Internal Error")
|> send_resp(:internal_server_error, dgettext("errors", "Internal Error"))
|> halt()
end
end

View file

@ -3,6 +3,7 @@
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Plugs.UserIsAdminPlug do
import Pleroma.Web.TranslationHelpers
import Plug.Conn
alias Pleroma.User
@ -16,8 +17,7 @@ def call(%{assigns: %{user: %User{info: %{is_admin: true}}}} = conn, _) do
def call(conn, _) do
conn
|> put_resp_content_type("application/json")
|> send_resp(403, Jason.encode!(%{error: "User is not admin."}))
|> render_error(:forbidden, "User is not admin.")
|> halt
end
end

View file

@ -0,0 +1,66 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.ReleaseTasks do
@repo Pleroma.Repo
def run(args) do
[task | args] = String.split(args)
case task do
"migrate" -> migrate(args)
"create" -> create()
"rollback" -> rollback(args)
task -> mix_task(task, args)
end
end
defp mix_task(task, args) do
Application.load(:pleroma)
{:ok, modules} = :application.get_key(:pleroma, :modules)
module =
Enum.find(modules, fn module ->
module = Module.split(module)
match?(["Mix", "Tasks", "Pleroma" | _], module) and
String.downcase(List.last(module)) == task
end)
if module do
module.run(args)
else
IO.puts("The task #{task} does not exist")
end
end
def migrate(args) do
Mix.Tasks.Pleroma.Ecto.Migrate.run(args)
end
def rollback(args) do
Mix.Tasks.Pleroma.Ecto.Rollback.run(args)
end
def create do
Application.load(:pleroma)
case @repo.__adapter__.storage_up(@repo.config) do
:ok ->
IO.puts("The database for #{inspect(@repo)} has been created")
{:error, :already_up} ->
IO.puts("The database for #{inspect(@repo)} has already been created")
{:error, term} when is_binary(term) ->
IO.puts(:stderr, "The database for #{inspect(@repo)} couldn't be created: #{term}")
{:error, term} ->
IO.puts(
:stderr,
"The database for #{inspect(@repo)} couldn't be created: #{inspect(term)}"
)
end
end
end

View file

@ -0,0 +1,34 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.RepoStreamer do
alias Pleroma.Repo
import Ecto.Query
def chunk_stream(query, chunk_size) do
Stream.unfold(0, fn
:halt ->
{[], :halt}
last_id ->
query
|> order_by(asc: :id)
|> where([r], r.id > ^last_id)
|> limit(^chunk_size)
|> Repo.all()
|> case do
[] ->
{[], :halt}
records ->
last_id = List.last(records).id
{records, last_id}
end
end)
|> Stream.take_while(fn
[] -> false
_ -> true
end)
end
end

View file

@ -0,0 +1,28 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.ReverseProxy.Client do
@callback request(atom(), String.t(), [tuple()], String.t(), list()) ::
{:ok, pos_integer(), [tuple()], reference() | map()}
| {:ok, pos_integer(), [tuple()]}
| {:ok, reference()}
| {:error, term()}
@callback stream_body(reference() | pid() | map()) ::
{:ok, binary()} | :done | {:error, String.t()}
@callback close(reference() | pid() | map()) :: :ok
def request(method, url, headers, "", opts \\ []) do
client().request(method, url, headers, "", opts)
end
def stream_body(ref), do: client().stream_body(ref)
def close(ref), do: client().close(ref)
defp client do
Pleroma.Config.get([Pleroma.ReverseProxy.Client], :hackney)
end
end

View file

@ -146,7 +146,7 @@ defp request(method, url, headers, hackney_opts) do
Logger.debug("#{__MODULE__} #{method} #{url} #{inspect(headers)}")
method = method |> String.downcase() |> String.to_existing_atom()
case :hackney.request(method, url, headers, "", hackney_opts) do
case client().request(method, url, headers, "", hackney_opts) do
{:ok, code, headers, client} when code in @valid_resp_codes ->
{:ok, code, downcase_headers(headers), client}
@ -173,7 +173,7 @@ defp response(conn, client, url, status, headers, opts) do
halt(conn)
{:error, :closed, conn} ->
:hackney.close(client)
client().close(client)
halt(conn)
{:error, error, conn} ->
@ -181,7 +181,7 @@ defp response(conn, client, url, status, headers, opts) do
"#{__MODULE__} request to #{url} failed while reading/chunking: #{inspect(error)}"
)
:hackney.close(client)
client().close(client)
halt(conn)
end
end
@ -196,7 +196,7 @@ defp chunk_reply(conn, client, opts, sent_so_far, duration) do
duration,
Keyword.get(opts, :max_read_duration, @max_read_duration)
),
{:ok, data} <- :hackney.stream_body(client),
{:ok, data} <- client().stream_body(client),
{:ok, duration} <- increase_read_duration(duration),
sent_so_far = sent_so_far + byte_size(data),
:ok <- body_size_constraint(sent_so_far, Keyword.get(opts, :max_body_size)),
@ -377,4 +377,6 @@ defp increase_read_duration({previous_duration, started})
defp increase_read_duration(_) do
{:ok, :no_duration_limit, :no_duration_limit}
end
defp client, do: Pleroma.ReverseProxy.Client
end

View file

@ -10,10 +10,19 @@ defmodule Pleroma.Upload.Filter.AnonymizeFilename do
"""
@behaviour Pleroma.Upload.Filter
def filter(upload) do
extension = List.last(String.split(upload.name, "."))
name = Pleroma.Config.get([__MODULE__, :text], random(extension))
{:ok, %Pleroma.Upload{upload | name: name}}
alias Pleroma.Config
alias Pleroma.Upload
def filter(%Upload{name: name} = upload) do
extension = List.last(String.split(name, "."))
name = predefined_name(extension) || random(extension)
{:ok, %Upload{upload | name: name}}
end
@spec predefined_name(String.t()) :: String.t() | nil
defp predefined_name(extension) do
with name when not is_nil(name) <- Config.get([__MODULE__, :text]),
do: String.replace(name, "{extension}", extension)
end
defp random(extension) do

View file

@ -1,51 +0,0 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Uploaders.Swift.Keystone do
use HTTPoison.Base
def process_url(url) do
Enum.join(
[Pleroma.Config.get!([Pleroma.Uploaders.Swift, :auth_url]), url],
"/"
)
end
def process_response_body(body) do
body
|> Jason.decode!()
end
def get_token do
settings = Pleroma.Config.get(Pleroma.Uploaders.Swift)
username = Keyword.fetch!(settings, :username)
password = Keyword.fetch!(settings, :password)
tenant_id = Keyword.fetch!(settings, :tenant_id)
case post(
"/tokens",
make_auth_body(username, password, tenant_id),
["Content-Type": "application/json"],
hackney: [:insecure]
) do
{:ok, %Tesla.Env{status: 200, body: body}} ->
body["access"]["token"]["id"]
{:ok, %Tesla.Env{status: _}} ->
""
end
end
def make_auth_body(username, password, tenant) do
Jason.encode!(%{
:auth => %{
:passwordCredentials => %{
:username => username,
:password => password
},
:tenantId => tenant
}
})
end
end

View file

@ -1,29 +0,0 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Uploaders.Swift.Client do
use HTTPoison.Base
def process_url(url) do
Enum.join(
[Pleroma.Config.get!([Pleroma.Uploaders.Swift, :storage_url]), url],
"/"
)
end
def upload_file(filename, body, content_type) do
token = Pleroma.Uploaders.Swift.Keystone.get_token()
case put("#{filename}", body, "X-Auth-Token": token, "Content-Type": content_type) do
{:ok, %Tesla.Env{status: 201}} ->
{:ok, {:file, filename}}
{:ok, %Tesla.Env{status: 401}} ->
{:error, "Unauthorized, Bad Token"}
{:error, _} ->
{:error, "Swift Upload Error"}
end
end
end

View file

@ -1,19 +0,0 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Uploaders.Swift do
@behaviour Pleroma.Uploaders.Uploader
def get_file(name) do
{:ok, {:url, Path.join([Pleroma.Config.get!([__MODULE__, :object_url]), name])}}
end
def put_file(upload) do
Pleroma.Uploaders.Swift.Client.upload_file(
upload.path,
File.read!(upload.tmpfile),
upload.content_type
)
end
end

View file

@ -3,6 +3,8 @@
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Uploaders.Uploader do
import Pleroma.Web.Gettext
@moduledoc """
Defines the contract to put and get an uploaded file to any backend.
"""
@ -66,7 +68,7 @@ defp handle_callback(uploader, upload) do
{:error, error}
end
after
30_000 -> {:error, "Uploader callback timeout"}
30_000 -> {:error, dgettext("errors", "Uploader callback timeout")}
end
end
end

View file

@ -9,12 +9,14 @@ defmodule Pleroma.User do
import Ecto.Query
alias Comeonin.Pbkdf2
alias Ecto.Multi
alias Pleroma.Activity
alias Pleroma.Keys
alias Pleroma.Notification
alias Pleroma.Object
alias Pleroma.Registration
alias Pleroma.Repo
alias Pleroma.RepoStreamer
alias Pleroma.User
alias Pleroma.Web
alias Pleroma.Web.ActivityPub.ActivityPub
@ -105,15 +107,25 @@ def ap_id(%User{nickname: nickname}) do
def ap_followers(%User{follower_address: fa}) when is_binary(fa), do: fa
def ap_followers(%User{} = user), do: "#{ap_id(user)}/followers"
def user_info(%User{} = user) do
def user_info(%User{} = user, args \\ %{}) do
following_count =
if args[:following_count], do: args[:following_count], else: following_count(user)
follower_count =
if args[:follower_count], do: args[:follower_count], else: user.info.follower_count
%{
following_count: following_count(user),
note_count: user.info.note_count,
follower_count: user.info.follower_count,
locked: user.info.locked,
confirmation_pending: user.info.confirmation_pending,
default_scope: user.info.default_scope
}
|> Map.put(:following_count, following_count)
|> Map.put(:follower_count, follower_count)
end
def set_info_cache(user, args) do
Cachex.put(:user_cache, "user_info:#{user.id}", user_info(user, args))
end
def restrict_deactivated(query) do
@ -193,27 +205,24 @@ def upgrade_changeset(struct, params \\ %{}) do
end
def password_update_changeset(struct, params) do
changeset =
struct
|> cast(params, [:password, :password_confirmation])
|> validate_required([:password, :password_confirmation])
|> validate_confirmation(:password)
OAuth.Token.delete_user_tokens(struct)
OAuth.Authorization.delete_user_authorizations(struct)
if changeset.valid? do
hashed = Pbkdf2.hashpwsalt(changeset.changes[:password])
changeset
|> put_change(:password_hash, hashed)
else
changeset
end
struct
|> cast(params, [:password, :password_confirmation])
|> validate_required([:password, :password_confirmation])
|> validate_confirmation(:password)
|> put_password_hash
end
def reset_password(user, data) do
update_and_set_cache(password_update_changeset(user, data))
def reset_password(%User{id: user_id} = user, data) do
multi =
Multi.new()
|> Multi.update(:user, password_update_changeset(user, data))
|> Multi.delete_all(:tokens, OAuth.Token.Query.get_by_user(user_id))
|> Multi.delete_all(:auth, OAuth.Authorization.delete_by_user_query(user))
case Repo.transaction(multi) do
{:ok, %{user: user} = _} -> set_cache(user)
{:error, _, changeset, _} -> {:error, changeset}
end
end
def register_changeset(struct, params \\ %{}, opts \\ []) do
@ -249,12 +258,11 @@ def register_changeset(struct, params \\ %{}, opts \\ []) do
end
if changeset.valid? do
hashed = Pbkdf2.hashpwsalt(changeset.changes[:password])
ap_id = User.ap_id(%User{nickname: changeset.changes[:nickname]})
followers = User.ap_followers(%User{nickname: changeset.changes[:nickname]})
changeset
|> put_change(:password_hash, hashed)
|> put_password_hash
|> put_change(:ap_id, ap_id)
|> unique_constraint(:ap_id)
|> put_change(:following, [followers])
@ -324,14 +332,6 @@ def maybe_direct_follow(%User{} = follower, %User{} = followed) do
end
end
def maybe_follow(%User{} = follower, %User{info: _info} = followed) do
if not following?(follower, followed) do
follow(follower, followed)
else
{:ok, follower}
end
end
@doc "A mass follow for local users. Respects blocks in both directions but does not create activities."
@spec follow_all(User.t(), list(User.t())) :: {atom(), User.t()}
def follow_all(follower, followeds) do
@ -370,8 +370,8 @@ def follow(%User{} = follower, %User{info: info} = followed) do
ap_followers = followed.follower_address
cond do
following?(follower, followed) or info.deactivated ->
{:error, "Could not follow user: #{followed.nickname} is already on your list."}
info.deactivated ->
{:error, "Could not follow user: You are deactivated."}
deny_follow_blocked and blocks?(followed, follower) ->
{:error, "Could not follow user: #{followed.nickname} blocked you."}
@ -735,122 +735,6 @@ def get_recipients_from_activity(%Activity{recipients: to}) do
|> Repo.all()
end
def search(query, resolve \\ false, for_user \\ nil) do
# Strip the beginning @ off if there is a query
query = String.trim_leading(query, "@")
if resolve, do: get_or_fetch(query)
{:ok, results} =
Repo.transaction(fn ->
Ecto.Adapters.SQL.query(Repo, "select set_limit(0.25)", [])
Repo.all(search_query(query, for_user))
end)
results
end
def search_query(query, for_user) do
fts_subquery = fts_search_subquery(query)
trigram_subquery = trigram_search_subquery(query)
union_query = from(s in trigram_subquery, union_all: ^fts_subquery)
distinct_query = from(s in subquery(union_query), order_by: s.search_type, distinct: s.id)
from(s in subquery(boost_search_rank_query(distinct_query, for_user)),
order_by: [desc: s.search_rank],
limit: 40
)
end
defp boost_search_rank_query(query, nil), do: query
defp boost_search_rank_query(query, for_user) do
friends_ids = get_friends_ids(for_user)
followers_ids = get_followers_ids(for_user)
from(u in subquery(query),
select_merge: %{
search_rank:
fragment(
"""
CASE WHEN (?) THEN (?) * 1.3
WHEN (?) THEN (?) * 1.2
WHEN (?) THEN (?) * 1.1
ELSE (?) END
""",
u.id in ^friends_ids and u.id in ^followers_ids,
u.search_rank,
u.id in ^friends_ids,
u.search_rank,
u.id in ^followers_ids,
u.search_rank,
u.search_rank
)
}
)
end
defp fts_search_subquery(term, query \\ User) do
processed_query =
term
|> String.replace(~r/\W+/, " ")
|> String.trim()
|> String.split()
|> Enum.map(&(&1 <> ":*"))
|> Enum.join(" | ")
from(
u in query,
select_merge: %{
search_type: ^0,
search_rank:
fragment(
"""
ts_rank_cd(
setweight(to_tsvector('simple', regexp_replace(?, '\\W', ' ', 'g')), 'A') ||
setweight(to_tsvector('simple', regexp_replace(coalesce(?, ''), '\\W', ' ', 'g')), 'B'),
to_tsquery('simple', ?),
32
)
""",
u.nickname,
u.name,
^processed_query
)
},
where:
fragment(
"""
(setweight(to_tsvector('simple', regexp_replace(?, '\\W', ' ', 'g')), 'A') ||
setweight(to_tsvector('simple', regexp_replace(coalesce(?, ''), '\\W', ' ', 'g')), 'B')) @@ to_tsquery('simple', ?)
""",
u.nickname,
u.name,
^processed_query
)
)
|> restrict_deactivated()
end
defp trigram_search_subquery(term) do
from(
u in User,
select_merge: %{
# ^1 gives 'Postgrex expected a binary, got 1' for some weird reason
search_type: fragment("?", 1),
search_rank:
fragment(
"similarity(?, trim(? || ' ' || coalesce(?, '')))",
^term,
u.nickname,
u.name
)
},
where: fragment("trim(? || ' ' || coalesce(?, '')) % ?", u.nickname, u.name, ^term)
)
|> restrict_deactivated()
end
def mute(muter, %User{ap_id: ap_id}) do
info_cng =
muter.info
@ -962,15 +846,12 @@ def unblock(blocker, %{ap_id: ap_id}) do
def mutes?(nil, _), do: false
def mutes?(user, %{ap_id: ap_id}), do: Enum.member?(user.info.mutes, ap_id)
def blocks?(user, %{ap_id: ap_id}) do
blocks = user.info.blocks
domain_blocks = user.info.domain_blocks
def blocks?(%User{info: info} = _user, %{ap_id: ap_id}) do
blocks = info.blocks
domain_blocks = info.domain_blocks
%{host: host} = URI.parse(ap_id)
Enum.member?(blocks, ap_id) ||
Enum.any?(domain_blocks, fn domain ->
host == domain
end)
Enum.member?(blocks, ap_id) || Enum.any?(domain_blocks, &(&1 == host))
end
def subscribed_to?(user, %{ap_id: ap_id}) do
@ -1056,18 +937,26 @@ def delete(%User{} = user),
@spec perform(atom(), User.t()) :: {:ok, User.t()}
def perform(:delete, %User{} = user) do
{:ok, user} = User.deactivate(user)
{:ok, _user} = ActivityPub.delete(user)
# Remove all relationships
{:ok, followers} = User.get_followers(user)
Enum.each(followers, fn follower -> User.unfollow(follower, user) end)
Enum.each(followers, fn follower ->
ActivityPub.unfollow(follower, user)
User.unfollow(follower, user)
end)
{:ok, friends} = User.get_friends(user)
Enum.each(friends, fn followed -> User.unfollow(user, followed) end)
Enum.each(friends, fn followed ->
ActivityPub.unfollow(user, followed)
User.unfollow(user, followed)
end)
delete_user_activities(user)
invalidate_cache(user)
Repo.delete(user)
end
@spec perform(atom(), User.t()) :: {:ok, User.t()}
@ -1123,6 +1012,56 @@ def perform(:follow_import, %User{} = follower, followed_identifiers)
)
end
@spec sync_follow_counter() :: :ok
def sync_follow_counter,
do: PleromaJobQueue.enqueue(:background, __MODULE__, [:sync_follow_counters])
@spec perform(:sync_follow_counters) :: :ok
def perform(:sync_follow_counters) do
{:ok, _pid} = Agent.start_link(fn -> %{} end, name: :domain_errors)
config = Pleroma.Config.get([:instance, :external_user_synchronization])
:ok = sync_follow_counters(config)
Agent.stop(:domain_errors)
end
@spec sync_follow_counters(keyword()) :: :ok
def sync_follow_counters(opts \\ []) do
users = external_users(opts)
if length(users) > 0 do
errors = Agent.get(:domain_errors, fn state -> state end)
{last, updated_errors} = User.Synchronization.call(users, errors, opts)
Agent.update(:domain_errors, fn _state -> updated_errors end)
sync_follow_counters(max_id: last.id, limit: opts[:limit])
else
:ok
end
end
@spec external_users(keyword()) :: [User.t()]
def external_users(opts \\ []) do
query =
User.Query.build(%{
external: true,
active: true,
order_by: :id,
select: [:id, :ap_id, :info]
})
query =
if opts[:max_id],
do: where(query, [u], u.id > ^opts[:max_id]),
else: query
query =
if opts[:limit],
do: limit(query, ^opts[:limit]),
else: query
Repo.all(query)
end
def blocks_import(%User{} = blocker, blocked_identifiers) when is_list(blocked_identifiers),
do:
PleromaJobQueue.enqueue(:background, __MODULE__, [
@ -1140,18 +1079,35 @@ def follow_import(%User{} = follower, followed_identifiers) when is_list(followe
])
def delete_user_activities(%User{ap_id: ap_id} = user) do
stream =
ap_id
|> Activity.query_by_actor()
|> Repo.stream()
Repo.transaction(fn -> Enum.each(stream, &delete_activity(&1)) end, timeout: :infinity)
ap_id
|> Activity.query_by_actor()
|> RepoStreamer.chunk_stream(50)
|> Stream.each(fn activities ->
Enum.each(activities, &delete_activity(&1))
end)
|> Stream.run()
{:ok, user}
end
defp delete_activity(%{data: %{"type" => "Create"}} = activity) do
Object.normalize(activity) |> ActivityPub.delete()
activity
|> Object.normalize()
|> ActivityPub.delete()
end
defp delete_activity(%{data: %{"type" => "Like"}} = activity) do
user = get_cached_by_ap_id(activity.actor)
object = Object.normalize(activity)
ActivityPub.unlike(user, object)
end
defp delete_activity(%{data: %{"type" => "Announce"}} = activity) do
user = get_cached_by_ap_id(activity.actor)
object = Object.normalize(activity)
ActivityPub.unannounce(user, object)
end
defp delete_activity(_activity), do: "Doing nothing"
@ -1160,9 +1116,7 @@ def html_filter_policy(%User{info: %{no_rich_text: true}}) do
Pleroma.HTML.Scrubber.TwitterText
end
@default_scrubbers Pleroma.Config.get([:markup, :scrub_policy])
def html_filter_policy(_), do: @default_scrubbers
def html_filter_policy(_), do: Pleroma.Config.get([:markup, :scrub_policy])
def fetch_by_ap_id(ap_id) do
ap_try = ActivityPub.make_user_from_ap_id(ap_id)
@ -1449,4 +1403,14 @@ def get_ap_ids_by_nicknames(nicknames) do
)
|> Repo.all()
end
defdelegate search(query, opts \\ []), to: User.Search
defp put_password_hash(
%Ecto.Changeset{valid?: true, changes: %{password: password}} = changeset
) do
change(changeset, password_hash: Pbkdf2.hashpwsalt(password))
end
defp put_password_hash(changeset), do: changeset
end

View file

@ -7,7 +7,7 @@ defmodule Pleroma.User.Query do
User query builder module. Builds query from new query or another user query.
## Example:
query = Pleroma.User.Query(%{nickname: "nickname"})
query = Pleroma.User.Query.build(%{nickname: "nickname"})
another_query = Pleroma.User.Query.build(query, %{email: "email@example.com"})
Pleroma.Repo.all(query)
Pleroma.Repo.all(another_query)
@ -47,7 +47,10 @@ defmodule Pleroma.User.Query do
friends: User.t(),
recipients_from_activity: [String.t()],
nickname: [String.t()],
ap_id: [String.t()]
ap_id: [String.t()],
order_by: term(),
select: term(),
limit: pos_integer()
}
| %{}
@ -141,6 +144,18 @@ defp compose_query({:recipients_from_activity, to}, query) do
where(query, [u], u.ap_id in ^to or fragment("? && ?", u.following, ^to))
end
defp compose_query({:order_by, key}, query) do
order_by(query, [u], field(u, ^key))
end
defp compose_query({:select, keys}, query) do
select(query, [u], ^keys)
end
defp compose_query({:limit, limit}, query) do
limit(query, ^limit)
end
defp compose_query(_unsupported_param, query), do: query
defp prepare_tag_criteria(tag, query) do

227
lib/pleroma/user/search.ex Normal file
View file

@ -0,0 +1,227 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.User.Search do
alias Pleroma.Repo
alias Pleroma.User
import Ecto.Query
@similarity_threshold 0.25
@limit 20
def search(query_string, opts \\ []) do
resolve = Keyword.get(opts, :resolve, false)
following = Keyword.get(opts, :following, false)
result_limit = Keyword.get(opts, :limit, @limit)
offset = Keyword.get(opts, :offset, 0)
for_user = Keyword.get(opts, :for_user)
query_string = format_query(query_string)
maybe_resolve(resolve, for_user, query_string)
{:ok, results} =
Repo.transaction(fn ->
Ecto.Adapters.SQL.query(
Repo,
"select set_limit(#{@similarity_threshold})",
[]
)
query_string
|> search_query(for_user, following)
|> paginate(result_limit, offset)
|> Repo.all()
end)
results
end
defp format_query(query_string) do
# Strip the beginning @ off if there is a query
query_string = String.trim_leading(query_string, "@")
with [name, domain] <- String.split(query_string, "@"),
formatted_domain <- String.replace(domain, ~r/[!-\-|@|[-`|{-~|\/|:]+/, "") do
name <> "@" <> to_string(:idna.encode(formatted_domain))
else
_ -> query_string
end
end
defp search_query(query_string, for_user, following) do
for_user
|> base_query(following)
|> filter_blocked_user(for_user)
|> filter_blocked_domains(for_user)
|> search_subqueries(query_string)
|> union_subqueries
|> distinct_query()
|> boost_search_rank_query(for_user)
|> subquery()
|> order_by(desc: :search_rank)
|> maybe_restrict_local(for_user)
end
defp base_query(_user, false), do: User
defp base_query(user, true), do: User.get_followers_query(user)
defp filter_blocked_user(query, %User{info: %{blocks: blocks}})
when length(blocks) > 0 do
from(q in query, where: not (q.ap_id in ^blocks))
end
defp filter_blocked_user(query, _), do: query
defp filter_blocked_domains(query, %User{info: %{domain_blocks: domain_blocks}})
when length(domain_blocks) > 0 do
domains = Enum.join(domain_blocks, ",")
from(
q in query,
where: fragment("substring(ap_id from '.*://([^/]*)') NOT IN (?)", ^domains)
)
end
defp filter_blocked_domains(query, _), do: query
defp paginate(query, limit, offset) do
from(q in query, limit: ^limit, offset: ^offset)
end
defp union_subqueries({fts_subquery, trigram_subquery}) do
from(s in trigram_subquery, union_all: ^fts_subquery)
end
defp search_subqueries(base_query, query_string) do
{
fts_search_subquery(base_query, query_string),
trigram_search_subquery(base_query, query_string)
}
end
defp distinct_query(q) do
from(s in subquery(q), order_by: s.search_type, distinct: s.id)
end
defp maybe_resolve(true, user, query) do
case {limit(), user} do
{:all, _} -> :noop
{:unauthenticated, %User{}} -> User.get_or_fetch(query)
{:unauthenticated, _} -> :noop
{false, _} -> User.get_or_fetch(query)
end
end
defp maybe_resolve(_, _, _), do: :noop
defp maybe_restrict_local(q, user) do
case {limit(), user} do
{:all, _} -> restrict_local(q)
{:unauthenticated, %User{}} -> q
{:unauthenticated, _} -> restrict_local(q)
{false, _} -> q
end
end
defp limit, do: Pleroma.Config.get([:instance, :limit_to_local_content], :unauthenticated)
defp restrict_local(q), do: where(q, [u], u.local == true)
defp boost_search_rank_query(query, nil), do: query
defp boost_search_rank_query(query, for_user) do
friends_ids = User.get_friends_ids(for_user)
followers_ids = User.get_followers_ids(for_user)
from(u in subquery(query),
select_merge: %{
search_rank:
fragment(
"""
CASE WHEN (?) THEN 0.5 + (?) * 1.3
WHEN (?) THEN 0.5 + (?) * 1.2
WHEN (?) THEN (?) * 1.1
ELSE (?) END
""",
u.id in ^friends_ids and u.id in ^followers_ids,
u.search_rank,
u.id in ^friends_ids,
u.search_rank,
u.id in ^followers_ids,
u.search_rank,
u.search_rank
)
}
)
end
@spec fts_search_subquery(User.t() | Ecto.Query.t(), String.t()) :: Ecto.Query.t()
defp fts_search_subquery(query, term) do
processed_query =
String.trim_trailing(term, "@" <> local_domain())
|> String.replace(~r/[!-\/|@|[-`|{-~|:-?]+/, " ")
|> String.trim()
|> String.split()
|> Enum.map(&(&1 <> ":*"))
|> Enum.join(" | ")
from(
u in query,
select_merge: %{
search_type: ^0,
search_rank:
fragment(
"""
ts_rank_cd(
setweight(to_tsvector('simple', regexp_replace(?, '\\W', ' ', 'g')), 'A') ||
setweight(to_tsvector('simple', regexp_replace(coalesce(?, ''), '\\W', ' ', 'g')), 'B'),
to_tsquery('simple', ?),
32
)
""",
u.nickname,
u.name,
^processed_query
)
},
where:
fragment(
"""
(setweight(to_tsvector('simple', regexp_replace(?, '\\W', ' ', 'g')), 'A') ||
setweight(to_tsvector('simple', regexp_replace(coalesce(?, ''), '\\W', ' ', 'g')), 'B')) @@ to_tsquery('simple', ?)
""",
u.nickname,
u.name,
^processed_query
)
)
|> User.restrict_deactivated()
end
@spec trigram_search_subquery(User.t() | Ecto.Query.t(), String.t()) :: Ecto.Query.t()
defp trigram_search_subquery(query, term) do
term = String.trim_trailing(term, "@" <> local_domain())
from(
u in query,
select_merge: %{
# ^1 gives 'Postgrex expected a binary, got 1' for some weird reason
search_type: fragment("?", 1),
search_rank:
fragment(
"similarity(?, trim(? || ' ' || coalesce(?, '')))",
^term,
u.nickname,
u.name
)
},
where: fragment("trim(? || ' ' || coalesce(?, '')) % ?", u.nickname, u.name, ^term)
)
|> User.restrict_deactivated()
end
defp local_domain, do: Pleroma.Config.get([Pleroma.Web.Endpoint, :url, :host])
end

View file

@ -0,0 +1,60 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2018 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.User.Synchronization do
alias Pleroma.HTTP
alias Pleroma.User
@spec call([User.t()], map(), keyword()) :: {User.t(), map()}
def call(users, errors, opts \\ []) do
do_call(users, errors, opts)
end
defp do_call([user | []], errors, opts) do
updated = fetch_counters(user, errors, opts)
{user, updated}
end
defp do_call([user | others], errors, opts) do
updated = fetch_counters(user, errors, opts)
do_call(others, updated, opts)
end
defp fetch_counters(user, errors, opts) do
%{host: host} = URI.parse(user.ap_id)
info = %{}
{following, errors} = fetch_counter(user.ap_id <> "/following", host, errors, opts)
info = if following, do: Map.put(info, :following_count, following), else: info
{followers, errors} = fetch_counter(user.ap_id <> "/followers", host, errors, opts)
info = if followers, do: Map.put(info, :follower_count, followers), else: info
User.set_info_cache(user, info)
errors
end
defp available_domain?(domain, errors, opts) do
max_retries = Keyword.get(opts, :max_retries, 3)
not (Map.has_key?(errors, domain) && errors[domain] >= max_retries)
end
defp fetch_counter(url, host, errors, opts) do
with true <- available_domain?(host, errors, opts),
{:ok, %{body: body, status: code}} when code in 200..299 <-
HTTP.get(
url,
[{:Accept, "application/activity+json"}]
),
{:ok, data} <- Jason.decode(body) do
{data["totalItems"], errors}
else
false ->
{nil, errors}
_ ->
{nil, Map.update(errors, host, 1, &(&1 + 1))}
end
end
end

View file

@ -0,0 +1,32 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2018 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-onl
defmodule Pleroma.User.SynchronizationWorker do
use GenServer
def start_link do
config = Pleroma.Config.get([:instance, :external_user_synchronization])
if config[:enabled] do
GenServer.start_link(__MODULE__, interval: config[:interval])
else
:ignore
end
end
def init(opts) do
schedule_next(opts)
{:ok, opts}
end
def handle_info(:sync_follow_counters, opts) do
Pleroma.User.sync_follow_counter()
schedule_next(opts)
{:noreply, opts}
end
defp schedule_next(opts) do
Process.send_after(self(), :sync_follow_counters, opts[:interval])
end
end

View file

@ -1,3 +1,7 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.User.WelcomeMessage do
alias Pleroma.User
alias Pleroma.Web.CommonAPI

View file

@ -188,6 +188,22 @@ def stream_out_participations(participations) do
end)
end
def stream_out_participations(%Object{data: %{"context" => context}}, user) do
with %Conversation{} = conversation <- Conversation.get_for_ap_id(context),
conversation = Repo.preload(conversation, :participations),
last_activity_id =
fetch_latest_activity_id_for_context(conversation.ap_id, %{
"user" => user,
"blocking_user" => user
}) do
if last_activity_id do
stream_out_participations(conversation.participations)
end
end
end
def stream_out_participations(_, _), do: :noop
def stream_out(activity) do
public = "https://www.w3.org/ns/activitystreams#Public"
@ -388,6 +404,19 @@ def unfollow(follower, followed, activity_id \\ nil, local \\ true) do
end
end
def delete(%User{ap_id: ap_id, follower_address: follower_address} = user) do
with data <- %{
"to" => [follower_address],
"type" => "Delete",
"actor" => ap_id,
"object" => %{"type" => "Person", "id" => ap_id}
},
{:ok, activity} <- insert(data, true, true),
:ok <- maybe_federate(activity) do
{:ok, user}
end
end
def delete(%Object{data: %{"id" => id, "actor" => actor}} = object, local \\ true) do
user = User.get_cached_by_ap_id(actor)
to = (object.data["to"] || []) ++ (object.data["cc"] || [])
@ -400,7 +429,8 @@ def delete(%Object{data: %{"id" => id, "actor" => actor}} = object, local \\ tru
"to" => to,
"deleted_activity_id" => activity && activity.id
},
{:ok, activity} <- insert(data, local),
{:ok, activity} <- insert(data, local, false),
stream_out_participations(object, user),
_ <- decrease_replies_count_if_reply(object),
# Changing note count prior to enqueuing federation task in order to avoid
# race conditions on updating user.info

View file

@ -31,9 +31,8 @@ def relay_active?(conn, _) do
conn
else
conn
|> put_status(404)
|> json(%{error: "not found"})
|> halt
|> render_error(:not_found, "not found")
|> halt()
end
end
@ -190,7 +189,7 @@ def inbox(conn, params) do
Logger.info(inspect(conn.req_headers))
end
json(conn, "error")
json(conn, dgettext("errors", "error"))
end
def relay(conn, _params) do
@ -218,9 +217,15 @@ def read_inbox(%{assigns: %{user: user}} = conn, %{"nickname" => nickname} = par
|> put_resp_header("content-type", "application/activity+json")
|> json(UserView.render("inbox.json", %{user: user, max_id: params["max_id"]}))
else
err =
dgettext("errors", "can't read inbox of %{nickname} as %{as_nickname}",
nickname: nickname,
as_nickname: user.nickname
)
conn
|> put_status(:forbidden)
|> json("can't read inbox of #{nickname} as #{user.nickname}")
|> json(err)
end
end
@ -246,7 +251,7 @@ def handle_user_activity(user, %{"type" => "Delete"} = params) do
{:ok, delete} <- ActivityPub.delete(object) do
{:ok, delete}
else
_ -> {:error, "Can't delete object"}
_ -> {:error, dgettext("errors", "Can't delete object")}
end
end
@ -255,12 +260,12 @@ def handle_user_activity(user, %{"type" => "Like"} = params) do
{:ok, activity, _object} <- ActivityPub.like(user, object) do
{:ok, activity}
else
_ -> {:error, "Can't like object"}
_ -> {:error, dgettext("errors", "Can't like object")}
end
end
def handle_user_activity(_, _) do
{:error, "Unhandled activity type"}
{:error, dgettext("errors", "Unhandled activity type")}
end
def update_outbox(
@ -288,22 +293,28 @@ def update_outbox(
|> json(message)
end
else
err =
dgettext("errors", "can't update outbox of %{nickname} as %{as_nickname}",
nickname: nickname,
as_nickname: user.nickname
)
conn
|> put_status(:forbidden)
|> json("can't update outbox of #{nickname} as #{user.nickname}")
|> json(err)
end
end
def errors(conn, {:error, :not_found}) do
conn
|> put_status(404)
|> json("Not found")
|> put_status(:not_found)
|> json(dgettext("errors", "Not found"))
end
def errors(conn, _e) do
conn
|> put_status(500)
|> json("error")
|> put_status(:internal_server_error)
|> json(dgettext("errors", "error"))
end
defp set_requester_reachable(%Plug.Conn{} = conn, _) do

View file

@ -0,0 +1,48 @@
# Pleroma: A lightweight social networking server
# Copyright © 2019 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Web.ActivityPub.MRF.AntiLinkSpamPolicy do
alias Pleroma.User
require Logger
# has the user successfully posted before?
defp old_user?(%User{} = u) do
u.info.note_count > 0 || u.info.follower_count > 0
end
# does the post contain links?
defp contains_links?(%{"content" => content} = _object) do
content
|> Floki.filter_out("a.mention,a.hashtag,a[rel~=\"tag\"],a.zrl")
|> Floki.attribute("a", "href")
|> length() > 0
end
defp contains_links?(_), do: false
def filter(%{"type" => "Create", "actor" => actor, "object" => object} = message) do
with {:ok, %User{} = u} <- User.get_or_fetch_by_ap_id(actor),
{:contains_links, true} <- {:contains_links, contains_links?(object)},
{:old_user, true} <- {:old_user, old_user?(u)} do
{:ok, message}
else
{:contains_links, false} ->
{:ok, message}
{:old_user, false} ->
{:reject, nil}
{:error, _} ->
{:reject, nil}
e ->
Logger.warn("[MRF anti-link-spam] WTF: unhandled error #{inspect(e)}")
{:reject, nil}
end
end
# in all other cases, pass through
def filter(message), do: {:ok, message}
end

View file

@ -9,8 +9,9 @@ defmodule Pleroma.Web.ActivityPub.MRF.EnsureRePrepended do
@behaviour Pleroma.Web.ActivityPub.MRF
@reply_prefix Regex.compile!("^re:[[:space:]]*", [:caseless])
def filter_by_summary(
%{"summary" => parent_summary} = _parent,
%{data: %{"summary" => parent_summary}} = _in_reply_to,
%{"summary" => child_summary} = child
)
when not is_nil(child_summary) and byte_size(child_summary) > 0 and
@ -24,17 +25,13 @@ def filter_by_summary(
end
end
def filter_by_summary(_parent, child), do: child
def filter(%{"type" => activity_type} = object) when activity_type == "Create" do
child = object["object"]
in_reply_to = Object.normalize(child["inReplyTo"])
def filter_by_summary(_in_reply_to, child), do: child
def filter(%{"type" => "Create", "object" => child_object} = object) do
child =
if(in_reply_to,
do: filter_by_summary(in_reply_to.data, child),
else: child
)
child_object["inReplyTo"]
|> Object.normalize(child_object["inReplyTo"])
|> filter_by_summary(child_object)
object = Map.put(object, "object", child)

View file

@ -0,0 +1,56 @@
# Pleroma: A lightweight social networking server
# Copyright © 2019 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Web.ActivityPub.MRF.MediaProxyWarmingPolicy do
@moduledoc "Preloads any attachments in the MediaProxy cache by prefetching them"
@behaviour Pleroma.Web.ActivityPub.MRF
alias Pleroma.HTTP
alias Pleroma.Web.MediaProxy
require Logger
@hackney_options [
pool: :media,
recv_timeout: 10_000
]
def perform(:prefetch, url) do
Logger.info("Prefetching #{inspect(url)}")
url
|> MediaProxy.url()
|> HTTP.get([], adapter: @hackney_options)
end
def perform(:preload, %{"object" => %{"attachment" => attachments}} = _message) do
Enum.each(attachments, fn
%{"url" => url} when is_list(url) ->
url
|> Enum.each(fn
%{"href" => href} ->
PleromaJobQueue.enqueue(:background, __MODULE__, [:prefetch, href])
x ->
Logger.debug("Unhandled attachment URL object #{inspect(x)}")
end)
x ->
Logger.debug("Unhandled attachment #{inspect(x)}")
end)
end
@impl true
def filter(
%{"type" => "Create", "object" => %{"attachment" => attachments} = _object} = message
)
when is_list(attachments) and length(attachments) > 0 do
PleromaJobQueue.enqueue(:background, __MODULE__, [:preload, message])
{:ok, message}
end
@impl true
def filter(message), do: {:ok, message}
end

View file

@ -10,19 +10,11 @@ defmodule Pleroma.Web.ActivityPub.MRF.NoPlaceholderTextPolicy do
def filter(
%{
"type" => "Create",
"object" => %{"content" => content, "attachment" => _attachment} = child_object
"object" => %{"content" => content, "attachment" => _} = _child_object
} = object
)
when content in [".", "<p>.</p>"] do
child_object =
child_object
|> Map.put("content", "")
object =
object
|> Map.put("object", child_object)
{:ok, object}
{:ok, put_in(object, ["object", "content"], "")}
end
@impl true

View file

@ -8,18 +8,14 @@ defmodule Pleroma.Web.ActivityPub.MRF.NormalizeMarkup do
@behaviour Pleroma.Web.ActivityPub.MRF
def filter(%{"type" => activity_type} = object) when activity_type == "Create" do
def filter(%{"type" => "Create", "object" => child_object} = object) do
scrub_policy = Pleroma.Config.get([:mrf_normalize_markup, :scrub_policy])
child = object["object"]
content =
child["content"]
child_object["content"]
|> HTML.filter_tags(scrub_policy)
child = Map.put(child, "content", content)
object = Map.put(object, "object", child)
object = put_in(object, ["object", "content"], content)
{:ok, object}
end

View file

@ -3,46 +3,42 @@
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Web.ActivityPub.MRF.RejectNonPublic do
alias Pleroma.User
@moduledoc "Rejects non-public (followers-only, direct) activities"
alias Pleroma.Config
alias Pleroma.User
@behaviour Pleroma.Web.ActivityPub.MRF
@public "https://www.w3.org/ns/activitystreams#Public"
@impl true
def filter(%{"type" => "Create"} = object) do
user = User.get_cached_by_ap_id(object["actor"])
public = "https://www.w3.org/ns/activitystreams#Public"
# Determine visibility
visibility =
cond do
public in object["to"] -> "public"
public in object["cc"] -> "unlisted"
@public in object["to"] -> "public"
@public in object["cc"] -> "unlisted"
user.follower_address in object["to"] -> "followers"
true -> "direct"
end
policy = Pleroma.Config.get(:mrf_rejectnonpublic)
policy = Config.get(:mrf_rejectnonpublic)
case visibility do
"public" ->
cond do
visibility in ["public", "unlisted"] ->
{:ok, object}
"unlisted" ->
visibility == "followers" and Keyword.get(policy, :allow_followersonly) ->
{:ok, object}
"followers" ->
with true <- Keyword.get(policy, :allow_followersonly) do
{:ok, object}
else
_e -> {:reject, nil}
end
visibility == "direct" and Keyword.get(policy, :allow_direct) ->
{:ok, object}
"direct" ->
with true <- Keyword.get(policy, :allow_direct) do
{:ok, object}
else
_e -> {:reject, nil}
end
true ->
{:reject, nil}
end
end

View file

@ -19,12 +19,17 @@ defmodule Pleroma.Web.ActivityPub.MRF.TagPolicy do
- `mrf_tag:disable-any-subscription`: Reject any follow requests
"""
@public "https://www.w3.org/ns/activitystreams#Public"
defp get_tags(%User{tags: tags}) when is_list(tags), do: tags
defp get_tags(_), do: []
defp process_tag(
"mrf_tag:media-force-nsfw",
%{"type" => "Create", "object" => %{"attachment" => child_attachment} = object} = message
%{
"type" => "Create",
"object" => %{"attachment" => child_attachment} = object
} = message
)
when length(child_attachment) > 0 do
tags = (object["tag"] || []) ++ ["nsfw"]
@ -41,7 +46,10 @@ defp process_tag(
defp process_tag(
"mrf_tag:media-strip",
%{"type" => "Create", "object" => %{"attachment" => child_attachment} = object} = message
%{
"type" => "Create",
"object" => %{"attachment" => child_attachment} = object
} = message
)
when length(child_attachment) > 0 do
object = Map.delete(object, "attachment")
@ -52,19 +60,22 @@ defp process_tag(
defp process_tag(
"mrf_tag:force-unlisted",
%{"type" => "Create", "to" => to, "cc" => cc, "actor" => actor} = message
%{
"type" => "Create",
"to" => to,
"cc" => cc,
"actor" => actor,
"object" => object
} = message
) do
user = User.get_cached_by_ap_id(actor)
if Enum.member?(to, "https://www.w3.org/ns/activitystreams#Public") do
to =
List.delete(to, "https://www.w3.org/ns/activitystreams#Public") ++ [user.follower_address]
cc =
List.delete(cc, user.follower_address) ++ ["https://www.w3.org/ns/activitystreams#Public"]
if Enum.member?(to, @public) do
to = List.delete(to, @public) ++ [user.follower_address]
cc = List.delete(cc, user.follower_address) ++ [@public]
object =
message["object"]
object
|> Map.put("to", to)
|> Map.put("cc", cc)
@ -82,19 +93,22 @@ defp process_tag(
defp process_tag(
"mrf_tag:sandbox",
%{"type" => "Create", "to" => to, "cc" => cc, "actor" => actor} = message
%{
"type" => "Create",
"to" => to,
"cc" => cc,
"actor" => actor,
"object" => object
} = message
) do
user = User.get_cached_by_ap_id(actor)
if Enum.member?(to, "https://www.w3.org/ns/activitystreams#Public") or
Enum.member?(cc, "https://www.w3.org/ns/activitystreams#Public") do
to =
List.delete(to, "https://www.w3.org/ns/activitystreams#Public") ++ [user.follower_address]
cc = List.delete(cc, "https://www.w3.org/ns/activitystreams#Public")
if Enum.member?(to, @public) or Enum.member?(cc, @public) do
to = List.delete(to, @public) ++ [user.follower_address]
cc = List.delete(cc, @public)
object =
message["object"]
object
|> Map.put("to", to)
|> Map.put("cc", cc)
@ -123,7 +137,8 @@ defp process_tag(
end
end
defp process_tag("mrf_tag:disable-any-subscription", %{"type" => "Follow"}), do: {:reject, nil}
defp process_tag("mrf_tag:disable-any-subscription", %{"type" => "Follow"}),
do: {:reject, nil}
defp process_tag(_, message), do: {:ok, message}

View file

@ -21,7 +21,12 @@ defp filter_by_list(%{"actor" => actor} = object, allow_list) do
@impl true
def filter(%{"actor" => actor} = object) do
actor_info = URI.parse(actor)
allow_list = Config.get([:mrf_user_allowlist, String.to_atom(actor_info.host)], [])
allow_list =
Config.get(
[:mrf_user_allowlist, String.to_atom(actor_info.host)],
[]
)
filter_by_list(object, allow_list)
end

View file

@ -88,7 +88,7 @@ defp should_federate?(inbox, public) do
true
else
inbox_info = URI.parse(inbox)
!Enum.member?(Pleroma.Config.get([:instance, :quarantined_instances], []), inbox_info.host)
!Enum.member?(Config.get([:instance, :quarantined_instances], []), inbox_info.host)
end
end

View file

@ -14,6 +14,7 @@ defmodule Pleroma.Web.ActivityPub.Transmogrifier do
alias Pleroma.Web.ActivityPub.ActivityPub
alias Pleroma.Web.ActivityPub.Utils
alias Pleroma.Web.ActivityPub.Visibility
alias Pleroma.Web.Federator
import Ecto.Query
@ -22,20 +23,20 @@ defmodule Pleroma.Web.ActivityPub.Transmogrifier do
@doc """
Modifies an incoming AP object (mastodon format) to our internal format.
"""
def fix_object(object) do
def fix_object(object, options \\ []) do
object
|> fix_actor
|> fix_url
|> fix_attachments
|> fix_context
|> fix_in_reply_to
|> fix_in_reply_to(options)
|> fix_emoji
|> fix_tag
|> fix_content_map
|> fix_likes
|> fix_addressing
|> fix_summary
|> fix_type
|> fix_type(options)
end
def fix_summary(%{"summary" => nil} = object) do
@ -164,7 +165,9 @@ def fix_likes(object) do
object
end
def fix_in_reply_to(%{"inReplyTo" => in_reply_to} = object)
def fix_in_reply_to(object, options \\ [])
def fix_in_reply_to(%{"inReplyTo" => in_reply_to} = object, options)
when not is_nil(in_reply_to) do
in_reply_to_id =
cond do
@ -182,28 +185,34 @@ def fix_in_reply_to(%{"inReplyTo" => in_reply_to} = object)
""
end
case get_obj_helper(in_reply_to_id) do
{:ok, replied_object} ->
with %Activity{} = _activity <-
Activity.get_create_by_object_ap_id(replied_object.data["id"]) do
object
|> Map.put("inReplyTo", replied_object.data["id"])
|> Map.put("inReplyToAtomUri", object["inReplyToAtomUri"] || in_reply_to_id)
|> Map.put("conversation", replied_object.data["context"] || object["conversation"])
|> Map.put("context", replied_object.data["context"] || object["conversation"])
else
e ->
Logger.error("Couldn't fetch \"#{inspect(in_reply_to_id)}\", error: #{inspect(e)}")
object
end
object = Map.put(object, "inReplyToAtomUri", in_reply_to_id)
e ->
Logger.error("Couldn't fetch \"#{inspect(in_reply_to_id)}\", error: #{inspect(e)}")
object
if Federator.allowed_incoming_reply_depth?(options[:depth]) do
case get_obj_helper(in_reply_to_id, options) do
{:ok, replied_object} ->
with %Activity{} = _activity <-
Activity.get_create_by_object_ap_id(replied_object.data["id"]) do
object
|> Map.put("inReplyTo", replied_object.data["id"])
|> Map.put("inReplyToAtomUri", object["inReplyToAtomUri"] || in_reply_to_id)
|> Map.put("conversation", replied_object.data["context"] || object["conversation"])
|> Map.put("context", replied_object.data["context"] || object["conversation"])
else
e ->
Logger.error("Couldn't fetch \"#{inspect(in_reply_to_id)}\", error: #{inspect(e)}")
object
end
e ->
Logger.error("Couldn't fetch \"#{inspect(in_reply_to_id)}\", error: #{inspect(e)}")
object
end
else
object
end
end
def fix_in_reply_to(object), do: object
def fix_in_reply_to(object, _options), do: object
def fix_context(object) do
context = object["context"] || object["conversation"] || Utils.generate_context_id()
@ -336,17 +345,22 @@ def fix_content_map(%{"contentMap" => content_map} = object) do
def fix_content_map(object), do: object
def fix_type(%{"inReplyTo" => reply_id} = object) when is_binary(reply_id) do
reply = Object.normalize(reply_id)
def fix_type(object, options \\ [])
if reply.data["type"] == "Question" and object["name"] do
def fix_type(%{"inReplyTo" => reply_id} = object, options) when is_binary(reply_id) do
reply =
if Federator.allowed_incoming_reply_depth?(options[:depth]) do
Object.normalize(reply_id, true)
end
if reply && (reply.data["type"] == "Question" and object["name"]) do
Map.put(object, "type", "Answer")
else
object
end
end
def fix_type(object), do: object
def fix_type(object, _), do: object
defp mastodon_follow_hack(%{"id" => id, "actor" => follower_id}, followed) do
with true <- id =~ "follows",
@ -374,9 +388,11 @@ defp get_follow_activity(follow_object, followed) do
end
end
def handle_incoming(data, options \\ [])
# Flag objects are placed ahead of the ID check because Mastodon 2.8 and earlier send them
# with nil ID.
def handle_incoming(%{"type" => "Flag", "object" => objects, "actor" => actor} = data) do
def handle_incoming(%{"type" => "Flag", "object" => objects, "actor" => actor} = data, _options) do
with context <- data["context"] || Utils.generate_context_id(),
content <- data["content"] || "",
%User{} = actor <- User.get_cached_by_ap_id(actor),
@ -409,15 +425,19 @@ def handle_incoming(%{"type" => "Flag", "object" => objects, "actor" => actor} =
end
# disallow objects with bogus IDs
def handle_incoming(%{"id" => nil}), do: :error
def handle_incoming(%{"id" => ""}), do: :error
def handle_incoming(%{"id" => nil}, _options), do: :error
def handle_incoming(%{"id" => ""}, _options), do: :error
# length of https:// = 8, should validate better, but good enough for now.
def handle_incoming(%{"id" => id}) when not (is_binary(id) and length(id) > 8), do: :error
def handle_incoming(%{"id" => id}, _options) when not (is_binary(id) and length(id) > 8),
do: :error
# TODO: validate those with a Ecto scheme
# - tags
# - emoji
def handle_incoming(%{"type" => "Create", "object" => %{"type" => objtype} = object} = data)
def handle_incoming(
%{"type" => "Create", "object" => %{"type" => objtype} = object} = data,
options
)
when objtype in ["Article", "Note", "Video", "Page", "Question", "Answer"] do
actor = Containment.get_actor(data)
@ -427,7 +447,8 @@ def handle_incoming(%{"type" => "Create", "object" => %{"type" => objtype} = obj
with nil <- Activity.get_create_by_object_ap_id(object["id"]),
{:ok, %User{} = user} <- User.get_or_fetch_by_ap_id(data["actor"]) do
object = fix_object(data["object"])
options = Keyword.put(options, :depth, (options[:depth] || 0) + 1)
object = fix_object(data["object"], options)
params = %{
to: data["to"],
@ -452,16 +473,19 @@ def handle_incoming(%{"type" => "Create", "object" => %{"type" => objtype} = obj
end
def handle_incoming(
%{"type" => "Follow", "object" => followed, "actor" => follower, "id" => id} = data
%{"type" => "Follow", "object" => followed, "actor" => follower, "id" => id} = data,
_options
) do
with %User{local: true} = followed <- User.get_cached_by_ap_id(followed),
{:ok, %User{} = follower} <- User.get_or_fetch_by_ap_id(follower),
{:ok, activity} <- ActivityPub.follow(follower, followed, id, false) do
with deny_follow_blocked <- Pleroma.Config.get([:user, :deny_follow_blocked]),
{:user_blocked, false} <-
{_, false} <-
{:user_blocked, User.blocks?(followed, follower) && deny_follow_blocked},
{:user_locked, false} <- {:user_locked, User.locked?(followed)},
{:follow, {:ok, follower}} <- {:follow, User.follow(follower, followed)} do
{_, false} <- {:user_locked, User.locked?(followed)},
{_, {:ok, follower}} <- {:follow, User.follow(follower, followed)},
{_, {:ok, _}} <-
{:follow_state_update, Utils.update_follow_state_for_all(activity, "accept")} do
ActivityPub.accept(%{
to: [follower.ap_id],
actor: followed,
@ -470,7 +494,7 @@ def handle_incoming(
})
else
{:user_blocked, true} ->
{:ok, _} = Utils.update_follow_state(activity, "reject")
{:ok, _} = Utils.update_follow_state_for_all(activity, "reject")
ActivityPub.reject(%{
to: [follower.ap_id],
@ -480,7 +504,7 @@ def handle_incoming(
})
{:follow, {:error, _}} ->
{:ok, _} = Utils.update_follow_state(activity, "reject")
{:ok, _} = Utils.update_follow_state_for_all(activity, "reject")
ActivityPub.reject(%{
to: [follower.ap_id],
@ -501,38 +525,35 @@ def handle_incoming(
end
def handle_incoming(
%{"type" => "Accept", "object" => follow_object, "actor" => _actor, "id" => _id} = data
%{"type" => "Accept", "object" => follow_object, "actor" => _actor, "id" => _id} = data,
_options
) do
with actor <- Containment.get_actor(data),
{:ok, %User{} = followed} <- User.get_or_fetch_by_ap_id(actor),
{:ok, follow_activity} <- get_follow_activity(follow_object, followed),
{:ok, follow_activity} <- Utils.update_follow_state(follow_activity, "accept"),
{:ok, follow_activity} <- Utils.update_follow_state_for_all(follow_activity, "accept"),
%User{local: true} = follower <- User.get_cached_by_ap_id(follow_activity.data["actor"]),
{:ok, activity} <-
ActivityPub.accept(%{
to: follow_activity.data["to"],
type: "Accept",
actor: followed,
object: follow_activity.data["id"],
local: false
}) do
if not User.following?(follower, followed) do
{:ok, _follower} = User.follow(follower, followed)
end
{:ok, activity}
{:ok, _follower} = User.follow(follower, followed) do
ActivityPub.accept(%{
to: follow_activity.data["to"],
type: "Accept",
actor: followed,
object: follow_activity.data["id"],
local: false
})
else
_e -> :error
end
end
def handle_incoming(
%{"type" => "Reject", "object" => follow_object, "actor" => _actor, "id" => _id} = data
%{"type" => "Reject", "object" => follow_object, "actor" => _actor, "id" => _id} = data,
_options
) do
with actor <- Containment.get_actor(data),
{:ok, %User{} = followed} <- User.get_or_fetch_by_ap_id(actor),
{:ok, follow_activity} <- get_follow_activity(follow_object, followed),
{:ok, follow_activity} <- Utils.update_follow_state(follow_activity, "reject"),
{:ok, follow_activity} <- Utils.update_follow_state_for_all(follow_activity, "reject"),
%User{local: true} = follower <- User.get_cached_by_ap_id(follow_activity.data["actor"]),
{:ok, activity} <-
ActivityPub.reject(%{
@ -551,7 +572,8 @@ def handle_incoming(
end
def handle_incoming(
%{"type" => "Like", "object" => object_id, "actor" => _actor, "id" => id} = data
%{"type" => "Like", "object" => object_id, "actor" => _actor, "id" => id} = data,
_options
) do
with actor <- Containment.get_actor(data),
{:ok, %User{} = actor} <- User.get_or_fetch_by_ap_id(actor),
@ -564,7 +586,8 @@ def handle_incoming(
end
def handle_incoming(
%{"type" => "Announce", "object" => object_id, "actor" => _actor, "id" => id} = data
%{"type" => "Announce", "object" => object_id, "actor" => _actor, "id" => id} = data,
_options
) do
with actor <- Containment.get_actor(data),
{:ok, %User{} = actor} <- User.get_or_fetch_by_ap_id(actor),
@ -579,7 +602,8 @@ def handle_incoming(
def handle_incoming(
%{"type" => "Update", "object" => %{"type" => object_type} = object, "actor" => actor_id} =
data
data,
_options
)
when object_type in ["Person", "Application", "Service", "Organization"] do
with %User{ap_id: ^actor_id} = actor <- User.get_cached_by_ap_id(object["id"]) do
@ -617,7 +641,8 @@ def handle_incoming(
# an error or a tombstone. This would allow us to verify that a deletion actually took
# place.
def handle_incoming(
%{"type" => "Delete", "object" => object_id, "actor" => _actor, "id" => _id} = data
%{"type" => "Delete", "object" => object_id, "actor" => actor, "id" => _id} = data,
_options
) do
object_id = Utils.get_ap_id(object_id)
@ -628,7 +653,30 @@ def handle_incoming(
{:ok, activity} <- ActivityPub.delete(object, false) do
{:ok, activity}
else
_e -> :error
nil ->
case User.get_cached_by_ap_id(object_id) do
%User{ap_id: ^actor} = user ->
{:ok, followers} = User.get_followers(user)
Enum.each(followers, fn follower ->
User.unfollow(follower, user)
end)
{:ok, friends} = User.get_friends(user)
Enum.each(friends, fn followed ->
User.unfollow(user, followed)
end)
User.invalidate_cache(user)
Repo.delete(user)
nil ->
:error
end
_e ->
:error
end
end
@ -638,7 +686,8 @@ def handle_incoming(
"object" => %{"type" => "Announce", "object" => object_id},
"actor" => _actor,
"id" => id
} = data
} = data,
_options
) do
with actor <- Containment.get_actor(data),
{:ok, %User{} = actor} <- User.get_or_fetch_by_ap_id(actor),
@ -656,7 +705,8 @@ def handle_incoming(
"object" => %{"type" => "Follow", "object" => followed},
"actor" => follower,
"id" => id
} = _data
} = _data,
_options
) do
with %User{local: true} = followed <- User.get_cached_by_ap_id(followed),
{:ok, %User{} = follower} <- User.get_or_fetch_by_ap_id(follower),
@ -674,7 +724,8 @@ def handle_incoming(
"object" => %{"type" => "Block", "object" => blocked},
"actor" => blocker,
"id" => id
} = _data
} = _data,
_options
) do
with true <- Pleroma.Config.get([:activitypub, :accept_blocks]),
%User{local: true} = blocked <- User.get_cached_by_ap_id(blocked),
@ -688,7 +739,8 @@ def handle_incoming(
end
def handle_incoming(
%{"type" => "Block", "object" => blocked, "actor" => blocker, "id" => id} = _data
%{"type" => "Block", "object" => blocked, "actor" => blocker, "id" => id} = _data,
_options
) do
with true <- Pleroma.Config.get([:activitypub, :accept_blocks]),
%User{local: true} = blocked = User.get_cached_by_ap_id(blocked),
@ -708,7 +760,8 @@ def handle_incoming(
"object" => %{"type" => "Like", "object" => object_id},
"actor" => _actor,
"id" => id
} = data
} = data,
_options
) do
with actor <- Containment.get_actor(data),
{:ok, %User{} = actor} <- User.get_or_fetch_by_ap_id(actor),
@ -720,10 +773,10 @@ def handle_incoming(
end
end
def handle_incoming(_), do: :error
def handle_incoming(_, _), do: :error
def get_obj_helper(id) do
if object = Object.normalize(id), do: {:ok, object}, else: nil
def get_obj_helper(id, options \\ []) do
if object = Object.normalize(id, true, options), do: {:ok, object}, else: nil
end
def set_reply_to_uri(%{"inReplyTo" => in_reply_to} = object) when is_binary(in_reply_to) do

View file

@ -151,16 +151,18 @@ def get_notified_from_object(object) do
def create_context(context) do
context = context || generate_id("contexts")
changeset = Object.context_mapping(context)
case Repo.insert(changeset) do
{:ok, object} ->
# Ecto has problems accessing the constraint inside the jsonb,
# so we explicitly check for the existed object before insert
object = Object.get_cached_by_ap_id(context)
with true <- is_nil(object),
changeset <- Object.context_mapping(context),
{:ok, inserted_object} <- Repo.insert(changeset) do
inserted_object
else
_ ->
object
# This should be solved by an upsert, but it seems ecto
# has problems accessing the constraint inside the jsonb.
{:error, _} ->
Object.get_cached_by_ap_id(context)
end
end
@ -168,14 +170,17 @@ def create_context(context) do
Enqueues an activity for federation if it's local
"""
def maybe_federate(%Activity{local: true} = activity) do
priority =
case activity.data["type"] do
"Delete" -> 10
"Create" -> 1
_ -> 5
end
if Pleroma.Config.get!([:instance, :federating]) do
priority =
case activity.data["type"] do
"Delete" -> 10
"Create" -> 1
_ -> 5
end
Pleroma.Web.Federator.publish(activity, priority)
end
Pleroma.Web.Federator.publish(activity, priority)
:ok
end
@ -376,8 +381,8 @@ def remove_like_from_object(%Activity{data: %{"actor" => actor}}, object) do
@doc """
Updates a follow activity's state (for locked accounts).
"""
def update_follow_state(
%Activity{data: %{"actor" => actor, "object" => object, "state" => "pending"}} = activity,
def update_follow_state_for_all(
%Activity{data: %{"actor" => actor, "object" => object}} = activity,
state
) do
try do

Some files were not shown because too many files have changed in this diff Show more