Merge branch 'develop' into 'remove-avatar-header'

# Conflicts:
#   CHANGELOG.md
This commit is contained in:
Sachin Joshi 2019-06-23 03:25:50 +00:00
commit a0c65bbd6c
237 changed files with 8534 additions and 2226 deletions

1
.buildpacks Normal file
View file

@ -0,0 +1 @@
https://github.com/hashnuke/heroku-buildpack-elixir

View file

@ -16,6 +16,7 @@ stages:
- build - build
- test - test
- deploy - deploy
- release
before_script: before_script:
- mix local.hex --force - mix local.hex --force
@ -42,6 +43,7 @@ docs-build:
paths: paths:
- priv/static/doc - priv/static/doc
unit-testing: unit-testing:
stage: test stage: test
services: services:
@ -52,8 +54,7 @@ unit-testing:
- mix deps.get - mix deps.get
- mix ecto.create - mix ecto.create
- mix ecto.migrate - mix ecto.migrate
- mix test --trace --preload-modules - mix coveralls --trace --preload-modules
- mix coveralls
unit-testing-rum: unit-testing-rum:
stage: test stage: test
@ -95,3 +96,150 @@ docs-deploy:
- eval $(ssh-agent -s) - eval $(ssh-agent -s)
- echo "$SSH_PRIVATE_KEY" | tr -d '\r' | ssh-add - - echo "$SSH_PRIVATE_KEY" | tr -d '\r' | ssh-add -
- rsync -hrvz --delete -e "ssh -p ${SSH_PORT}" priv/static/doc/ "${SSH_USER_HOST_LOCATION}/${CI_COMMIT_REF_NAME}" - rsync -hrvz --delete -e "ssh -p ${SSH_PORT}" priv/static/doc/ "${SSH_USER_HOST_LOCATION}/${CI_COMMIT_REF_NAME}"
review_app:
image: alpine:3.9
stage: deploy
before_script:
- apk update && apk add openssh-client git
when: manual
environment:
name: review/$CI_COMMIT_REF_NAME
url: https://$CI_ENVIRONMENT_SLUG.pleroma.online/
on_stop: stop_review_app
only:
- branches
except:
- master
- develop
script:
- echo "$CI_ENVIRONMENT_SLUG"
- mkdir -p ~/.ssh
- eval $(ssh-agent -s)
- echo "$SSH_PRIVATE_KEY" | tr -d '\r' | ssh-add -
- ssh-keyscan -H "pleroma.online" >> ~/.ssh/known_hosts
- (ssh -t dokku@pleroma.online -- apps:create "$CI_ENVIRONMENT_SLUG") || true
- ssh -t dokku@pleroma.online -- config:set "$CI_ENVIRONMENT_SLUG" APP_NAME="$CI_ENVIRONMENT_SLUG" APP_HOST="$CI_ENVIRONMENT_SLUG.pleroma.online" MIX_ENV=dokku
- (ssh -t dokku@pleroma.online -- postgres:create $(echo $CI_ENVIRONMENT_SLUG | sed -e 's/-/_/g')_db) || true
- (ssh -t dokku@pleroma.online -- postgres:link $(echo $CI_ENVIRONMENT_SLUG | sed -e 's/-/_/g')_db "$CI_ENVIRONMENT_SLUG") || true
- (ssh -t dokku@pleroma.online -- certs:add "$CI_ENVIRONMENT_SLUG" /home/dokku/server.crt /home/dokku/server.key) || true
- git push -f dokku@pleroma.online:$CI_ENVIRONMENT_SLUG $CI_COMMIT_SHA:refs/heads/master
stop_review_app:
image: alpine:3.9
stage: deploy
before_script:
- apk update && apk add openssh-client git
when: manual
environment:
name: review/$CI_COMMIT_REF_NAME
action: stop
script:
- echo "$CI_ENVIRONMENT_SLUG"
- mkdir -p ~/.ssh
- eval $(ssh-agent -s)
- echo "$SSH_PRIVATE_KEY" | tr -d '\r' | ssh-add -
- ssh-keyscan -H "pleroma.online" >> ~/.ssh/known_hosts
- ssh -t dokku@pleroma.online -- --force apps:destroy "$CI_ENVIRONMENT_SLUG"
- ssh -t dokku@pleroma.online -- --force postgres:destroy $(echo $CI_ENVIRONMENT_SLUG | sed -e 's/-/_/g')_db
amd64:
stage: release
# TODO: Replace with upstream image when 1.9.0 comes out
image: rinpatch/elixir:1.9.0-rc.0
only: &release-only
- master@pleroma/pleroma
- develop@pleroma/pleroma
artifacts: &release-artifacts
name: "pleroma-$CI_COMMIT_REF_NAME-$CI_COMMIT_SHORT_SHA-$CI_JOB_NAME"
paths:
- release/*
# Ideally it would be never for master branch and with the next commit for develop,
# but Gitlab does not support neither `only` for artifacts
# nor setting it to never from .gitlab-ci.yml
# nor expiring with the next commit
expire_in: 42 yrs
cache: &release-cache
key: $CI_COMMIT_REF_NAME-$CI_JOB_NAME
paths:
- deps
variables: &release-variables
MIX_ENV: prod
before_script: &before-release
- echo "import Mix.Config" > config/prod.secret.exs
- mix local.hex --force
- mix local.rebar --force
script: &release
- mix deps.get --only prod
- mkdir release
- export PLEROMA_BUILD_BRANCH=$CI_COMMIT_REF_NAME
- mix release --path release
amd64-musl:
stage: release
artifacts: *release-artifacts
only: *release-only
# TODO: Replace with upstream image when 1.9.0 comes out
image: rinpatch/elixir:1.9.0-rc.0-alpine
cache: *release-cache
variables: *release-variables
before_script: &before-release-musl
- apk add git gcc g++ musl-dev make
- echo "import Mix.Config" > config/prod.secret.exs
- mix local.hex --force
- mix local.rebar --force
script: *release
arm:
stage: release
artifacts: *release-artifacts
only: *release-only
tags:
- arm32
# TODO: Replace with upstream image when 1.9.0 comes out
image: rinpatch/elixir:1.9.0-rc.0-arm
cache: *release-cache
variables: *release-variables
before_script: *before-release
script: *release
arm-musl:
stage: release
artifacts: *release-artifacts
only: *release-only
tags:
- arm32
# TODO: Replace with upstream image when 1.9.0 comes out
image: rinpatch/elixir:1.9.0-rc.0-arm-alpine
cache: *release-cache
variables: *release-variables
before_script: *before-release-musl
script: *release
arm64:
stage: release
artifacts: *release-artifacts
only: *release-only
tags:
- arm
# TODO: Replace with upstream image when 1.9.0 comes out
image: rinpatch/elixir:1.9.0-rc.0-arm64
cache: *release-cache
variables: *release-variables
before_script: *before-release
script: *release
arm64-musl:
stage: release
artifacts: *release-artifacts
only: *release-only
tags:
- arm
# TODO: Replace with upstream image when 1.9.0 comes out
image: rinpatch/elixir:1.9.0-rc.0-arm64-alpine
cache: *release-cache
variables: *release-variables
before_script: *before-release-musl
script: *release

View file

@ -4,11 +4,16 @@ All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/). The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
## [unreleased] ## [unreleased]
### Security
- Mastodon API: Fix display names not being sanitized
### Added ### Added
- Add a generic settings store for frontends / clients to use.
- Explicit addressing option for posting.
- Optional SSH access mode. (Needs `erlang-ssh` package on some distributions). - Optional SSH access mode. (Needs `erlang-ssh` package on some distributions).
- [MongooseIM](https://github.com/esl/MongooseIM) http authentication support. - [MongooseIM](https://github.com/esl/MongooseIM) http authentication support.
- LDAP authentication - LDAP authentication
- External OAuth provider authentication - External OAuth provider authentication
- Support for building a release using [`mix release`](https://hexdocs.pm/mix/master/Mix.Tasks.Release.html)
- A [job queue](https://git.pleroma.social/pleroma/pleroma_job_queue) for federation, emails, web push, etc. - A [job queue](https://git.pleroma.social/pleroma/pleroma_job_queue) for federation, emails, web push, etc.
- [Prometheus](https://prometheus.io/) metrics - [Prometheus](https://prometheus.io/) metrics
- Support for Mastodon's remote interaction - Support for Mastodon's remote interaction
@ -16,13 +21,18 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
- Mix Tasks: `mix pleroma.database remove_embedded_objects` - Mix Tasks: `mix pleroma.database remove_embedded_objects`
- Mix Tasks: `mix pleroma.database update_users_following_followers_counts` - Mix Tasks: `mix pleroma.database update_users_following_followers_counts`
- Mix Tasks: `mix pleroma.user toggle_confirmed` - Mix Tasks: `mix pleroma.user toggle_confirmed`
- Mix Tasks: `mix pleroma.config migrate_to_db`
- Mix Tasks: `mix pleroma.config migrate_from_db`
- Federation: Support for `Question` and `Answer` objects
- Federation: Support for reports - Federation: Support for reports
- Configuration: `poll_limits` option
- Configuration: `safe_dm_mentions` option - Configuration: `safe_dm_mentions` option
- Configuration: `link_name` option - Configuration: `link_name` option
- Configuration: `fetch_initial_posts` option - Configuration: `fetch_initial_posts` option
- Configuration: `notify_email` option - Configuration: `notify_email` option
- Configuration: Media proxy `whitelist` option - Configuration: Media proxy `whitelist` option
- Configuration: `report_uri` option - Configuration: `report_uri` option
- Configuration: `limit_to_local_content` option
- Pleroma API: User subscriptions - Pleroma API: User subscriptions
- Pleroma API: Healthcheck endpoint - Pleroma API: Healthcheck endpoint
- Pleroma API: `/api/v1/pleroma/mascot` per-user frontend mascot configuration endpoints - Pleroma API: `/api/v1/pleroma/mascot` per-user frontend mascot configuration endpoints
@ -31,12 +41,16 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
- Admin API: added filters (role, tags, email, name) for users endpoint - Admin API: added filters (role, tags, email, name) for users endpoint
- Admin API: Endpoints for managing reports - Admin API: Endpoints for managing reports
- Admin API: Endpoints for deleting and changing the scope of individual reported statuses - Admin API: Endpoints for deleting and changing the scope of individual reported statuses
- Admin API: Endpoints to view and change config settings.
- AdminFE: initial release with basic user management accessible at /pleroma/admin/ - AdminFE: initial release with basic user management accessible at /pleroma/admin/
- Mastodon API: Add chat token to `verify_credentials` response
- Mastodon API: Add background image setting to `update_credentials`
- Mastodon API: [Scheduled statuses](https://docs.joinmastodon.org/api/rest/scheduled-statuses/) - Mastodon API: [Scheduled statuses](https://docs.joinmastodon.org/api/rest/scheduled-statuses/)
- Mastodon API: `/api/v1/notifications/destroy_multiple` (glitch-soc extension) - Mastodon API: `/api/v1/notifications/destroy_multiple` (glitch-soc extension)
- Mastodon API: `/api/v1/pleroma/accounts/:id/favourites` (API extension) - Mastodon API: `/api/v1/pleroma/accounts/:id/favourites` (API extension)
- Mastodon API: [Reports](https://docs.joinmastodon.org/api/rest/reports/) - Mastodon API: [Reports](https://docs.joinmastodon.org/api/rest/reports/)
- Mastodon API: `POST /api/v1/accounts` (account creation API) - Mastodon API: `POST /api/v1/accounts` (account creation API)
- Mastodon API: [Polls](https://docs.joinmastodon.org/api/rest/polls/)
- ActivityPub C2S: OAuth endpoints - ActivityPub C2S: OAuth endpoints
- Metadata: RelMe provider - Metadata: RelMe provider
- OAuth: added support for refresh tokens - OAuth: added support for refresh tokens
@ -46,9 +60,14 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
- MRF: Support for rejecting reports from specific instances (`mrf_simple`) - MRF: Support for rejecting reports from specific instances (`mrf_simple`)
- MRF: Support for stripping avatars and banner images from specific instances (`mrf_simple`) - MRF: Support for stripping avatars and banner images from specific instances (`mrf_simple`)
- Ability to reset avatar, profile banner and backgroud - Ability to reset avatar, profile banner and backgroud
- MRF: Support for running subchains.
- Configuration: `skip_thread_containment` option
- Configuration: `rate_limit` option. See `Pleroma.Plugs.RateLimiter` documentation for details.
- MRF: Support for filtering out likely spam messages by rejecting posts from new users that contain links.
### Changed ### Changed
- **Breaking:** Configuration: move from Pleroma.Mailer to Pleroma.Emails.Mailer - **Breaking:** Configuration: move from Pleroma.Mailer to Pleroma.Emails.Mailer
- Thread containment / test for complete visibility will be skipped by default.
- Enforcement of OAuth scopes - Enforcement of OAuth scopes
- Add multiple use/time expiring invite token - Add multiple use/time expiring invite token
- Restyled OAuth pages to fit with Pleroma's default theme - Restyled OAuth pages to fit with Pleroma's default theme
@ -57,6 +76,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
- Federation: Expand the audience of delete activities to all recipients of the deleted object - Federation: Expand the audience of delete activities to all recipients of the deleted object
- Federation: Removed `inReplyToStatusId` from objects - Federation: Removed `inReplyToStatusId` from objects
- Configuration: Dedupe enabled by default - Configuration: Dedupe enabled by default
- Configuration: Default log level in `prod` environment is now set to `warn`
- Configuration: Added `extra_cookie_attrs` for setting non-standard cookie attributes. Defaults to ["SameSite=Lax"] so that remote follows work. - Configuration: Added `extra_cookie_attrs` for setting non-standard cookie attributes. Defaults to ["SameSite=Lax"] so that remote follows work.
- Timelines: Messages involving people you have blocked will be excluded from the timeline in all cases instead of just repeats. - Timelines: Messages involving people you have blocked will be excluded from the timeline in all cases instead of just repeats.
- Admin API: Move the user related API to `api/pleroma/admin/users` - Admin API: Move the user related API to `api/pleroma/admin/users`
@ -84,6 +104,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
- Respond with a 404 Not implemented JSON error message when requested API is not implemented - Respond with a 404 Not implemented JSON error message when requested API is not implemented
### Fixed ### Fixed
- Follow requests don't get 'stuck' anymore.
- Added an FTS index on objects. Running `vacuum analyze` and setting a larger `work_mem` is recommended. - Added an FTS index on objects. Running `vacuum analyze` and setting a larger `work_mem` is recommended.
- Followers counter not being updated when a follower is blocked - Followers counter not being updated when a follower is blocked
- Deactivated users being able to request an access token - Deactivated users being able to request an access token
@ -113,11 +134,17 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
- Mastodon API: Correct `reblogged`, `favourited`, and `bookmarked` values in the reblog status JSON - Mastodon API: Correct `reblogged`, `favourited`, and `bookmarked` values in the reblog status JSON
- Mastodon API: Exposing default scope of the user to anyone - Mastodon API: Exposing default scope of the user to anyone
- Mastodon API: Make `irreversible` field default to `false` [`POST /api/v1/filters`] - Mastodon API: Make `irreversible` field default to `false` [`POST /api/v1/filters`]
- Mastodon API: Replace missing non-nullable Card attributes with empty strings
- User-Agent is now sent correctly for all HTTP requests. - User-Agent is now sent correctly for all HTTP requests.
- MRF: Simple policy now properly delists imported or relayed statuses
## Removed ## Removed
- Configuration: `config :pleroma, :fe` in favor of the more flexible `config :pleroma, :frontend_configurations` - Configuration: `config :pleroma, :fe` in favor of the more flexible `config :pleroma, :frontend_configurations`
## [0.9.99999] - 2019-05-31
### Security
- Mastodon API: Fix lists leaking private posts
## [0.9.9999] - 2019-04-05 ## [0.9.9999] - 2019-04-05
### Security ### Security
- Mastodon API: Fix content warnings skipping HTML sanitization - Mastodon API: Fix content warnings skipping HTML sanitization

2
Procfile Normal file
View file

@ -0,0 +1,2 @@
web: mix phx.server
release: mix ecto.migrate

View file

@ -184,9 +184,6 @@
"application/ld+json" => ["activity+json"] "application/ld+json" => ["activity+json"]
} }
config :pleroma, :websub, Pleroma.Web.Websub
config :pleroma, :ostatus, Pleroma.Web.OStatus
config :pleroma, :httpoison, Pleroma.HTTP
config :tesla, adapter: Tesla.Adapter.Hackney config :tesla, adapter: Tesla.Adapter.Hackney
# Configures http settings, upstream proxy etc. # Configures http settings, upstream proxy etc.
@ -211,6 +208,12 @@
avatar_upload_limit: 2_000_000, avatar_upload_limit: 2_000_000,
background_upload_limit: 4_000_000, background_upload_limit: 4_000_000,
banner_upload_limit: 4_000_000, banner_upload_limit: 4_000_000,
poll_limits: %{
max_options: 20,
max_option_chars: 200,
min_expiration: 0,
max_expiration: 365 * 24 * 60 * 60
},
registrations_open: true, registrations_open: true,
federating: true, federating: true,
federation_reachability_timeout_days: 7, federation_reachability_timeout_days: 7,
@ -240,9 +243,10 @@
max_report_comment_size: 1000, max_report_comment_size: 1000,
safe_dm_mentions: false, safe_dm_mentions: false,
healthcheck: false, healthcheck: false,
remote_post_retention_days: 90 remote_post_retention_days: 90,
skip_thread_containment: true,
config :pleroma, :app_account_creation, enabled: true, max_requests: 25, interval: 1800 limit_to_local_content: :unauthenticated,
dynamic_configuration: false
config :pleroma, :markup, config :pleroma, :markup,
# XXX - unfortunately, inline images must be enabled by default right now, because # XXX - unfortunately, inline images must be enabled by default right now, because
@ -323,6 +327,8 @@
federated_timeline_removal: [], federated_timeline_removal: [],
replace: [] replace: []
config :pleroma, :mrf_subchain, match_actor: %{}
config :pleroma, :rich_media, enabled: true config :pleroma, :rich_media, enabled: true
config :pleroma, :media_proxy, config :pleroma, :media_proxy,
@ -355,8 +361,8 @@
third_party_engine: third_party_engine:
"http://vinayaka.distsn.org/cgi-bin/vinayaka-user-match-suggestions-api.cgi?{{host}}+{{user}}", "http://vinayaka.distsn.org/cgi-bin/vinayaka-user-match-suggestions-api.cgi?{{host}}+{{user}}",
timeout: 300_000, timeout: 300_000,
limit: 23, limit: 40,
web: "https://vinayaka.distsn.org/?{{host}}+{{user}}" web: "https://vinayaka.distsn.org"
config :pleroma, :http_security, config :pleroma, :http_security,
enabled: true, enabled: true,
@ -436,6 +442,8 @@
opts: [ opts: [
scheme: true, scheme: true,
extra: true, extra: true,
# TODO: Set to :no_scheme when it works properly
validate_tld: true,
class: false, class: false,
strip_prefix: false, strip_prefix: false,
new_window: false, new_window: false,
@ -456,7 +464,11 @@
config :esshd, config :esshd,
enabled: false enabled: false
oauth_consumer_strategies = String.split(System.get_env("OAUTH_CONSUMER_STRATEGIES") || "") oauth_consumer_strategies =
System.get_env("OAUTH_CONSUMER_STRATEGIES")
|> to_string()
|> String.split()
|> Enum.map(&hd(String.split(&1, ":")))
ueberauth_providers = ueberauth_providers =
for strategy <- oauth_consumer_strategies do for strategy <- oauth_consumer_strategies do
@ -489,9 +501,15 @@
config :pleroma, :database, rum_enabled: false config :pleroma, :database, rum_enabled: false
config :pleroma, :env, Mix.env()
config :http_signatures, config :http_signatures,
adapter: Pleroma.Signature adapter: Pleroma.Signature
config :pleroma, :rate_limit,
search: [{1000, 10}, {1000, 30}],
app_account_creation: {1_800_000, 25}
# Import environment specific config. This must remain at the bottom # Import environment specific config. This must remain at the bottom
# of this file so it overrides the configuration defined above. # of this file so it overrides the configuration defined above.
import_config "#{Mix.env()}.exs" import_config "#{Mix.env()}.exs"

View file

@ -59,3 +59,6 @@
"!!! RUNNING IN LOCALHOST DEV MODE! !!!\nFEDERATION WON'T WORK UNTIL YOU CONFIGURE A dev.secret.exs" "!!! RUNNING IN LOCALHOST DEV MODE! !!!\nFEDERATION WON'T WORK UNTIL YOU CONFIGURE A dev.secret.exs"
) )
end end
if File.exists?("./config/dev.exported_from_db.secret.exs"),
do: import_config("dev.exported_from_db.secret.exs")

25
config/dokku.exs Normal file
View file

@ -0,0 +1,25 @@
use Mix.Config
config :pleroma, Pleroma.Web.Endpoint,
http: [
port: String.to_integer(System.get_env("PORT") || "4000"),
protocol_options: [max_request_line_length: 8192, max_header_value_length: 8192]
],
protocol: "http",
secure_cookie_flag: false,
url: [host: System.get_env("APP_HOST"), scheme: "https", port: 443],
secret_key_base: "+S+ULgf7+N37c/lc9K66SMphnjQIRGklTu0BRr2vLm2ZzvK0Z6OH/PE77wlUNtvP"
database_url =
System.get_env("DATABASE_URL") ||
raise """
environment variable DATABASE_URL is missing.
For example: ecto://USER:PASS@HOST/DATABASE
"""
config :pleroma, Pleroma.Repo,
# ssl: true,
url: database_url,
pool_size: String.to_integer(System.get_env("POOL_SIZE") || "10")
config :pleroma, :instance, name: "#{System.get_env("APP_NAME")} CI Instance"

View file

@ -17,8 +17,10 @@
http: [port: 4000], http: [port: 4000],
protocol: "http" protocol: "http"
config :phoenix, serve_endpoints: true
# Do not print debug messages in production # Do not print debug messages in production
config :logger, level: :info config :logger, level: :warn
# ## SSL Support # ## SSL Support
# #
@ -61,3 +63,6 @@
# Finally import the config/prod.secret.exs # Finally import the config/prod.secret.exs
# which should be versioned separately. # which should be versioned separately.
import_config "prod.secret.exs" import_config "prod.secret.exs"
if File.exists?("./config/prod.exported_from_db.secret.exs"),
do: import_config("prod.exported_from_db.secret.exs")

19
config/releases.exs Normal file
View file

@ -0,0 +1,19 @@
import Config
config :pleroma, :instance, static_dir: "/var/lib/pleroma/static"
config :pleroma, Pleroma.Uploaders.Local, uploads: "/var/lib/pleroma/uploads"
config_path = System.get_env("PLEROMA_CONFIG_PATH") || "/etc/pleroma/config.exs"
if File.exists?(config_path) do
import_config config_path
else
warning = [
IO.ANSI.red(),
IO.ANSI.bright(),
"!!! #{config_path} not found! Please ensure it exists and that PLEROMA_CONFIG_PATH is unset or points to an existing file",
IO.ANSI.reset()
]
IO.puts(warning)
end

View file

@ -17,6 +17,8 @@
# Print only warnings and errors during test # Print only warnings and errors during test
config :logger, level: :warn config :logger, level: :warn
config :pleroma, :auth, oauth_consumer_strategies: []
config :pleroma, Pleroma.Upload, filters: [], link_name: false config :pleroma, Pleroma.Upload, filters: [], link_name: false
config :pleroma, Pleroma.Uploaders.Local, uploads: "test/uploads" config :pleroma, Pleroma.Uploaders.Local, uploads: "test/uploads"
@ -25,7 +27,8 @@
config :pleroma, :instance, config :pleroma, :instance,
email: "admin@example.com", email: "admin@example.com",
notify_email: "noreply@example.com" notify_email: "noreply@example.com",
skip_thread_containment: false
# Configure your database # Configure your database
config :pleroma, Pleroma.Repo, config :pleroma, Pleroma.Repo,
@ -39,8 +42,6 @@
# Reduce hash rounds for testing # Reduce hash rounds for testing
config :pbkdf2_elixir, rounds: 1 config :pbkdf2_elixir, rounds: 1
config :pleroma, :websub, Pleroma.Web.WebsubMock
config :pleroma, :ostatus, Pleroma.Web.OStatusMock
config :tesla, adapter: Tesla.Mock config :tesla, adapter: Tesla.Mock
config :pleroma, :rich_media, enabled: false config :pleroma, :rich_media, enabled: false
@ -59,7 +60,7 @@
total_user_limit: 3, total_user_limit: 3,
enabled: false enabled: false
config :pleroma, :app_account_creation, max_requests: 5 config :pleroma, :rate_limit, app_account_creation: {10_000, 5}
config :pleroma, :http_security, report_uri: "https://endpoint.com" config :pleroma, :http_security, report_uri: "https://endpoint.com"

View file

@ -557,3 +557,83 @@ Note: Available `:permission_group` is currently moderator and admin. 404 is ret
- 403 Forbidden `{"error": "error_msg"}` - 403 Forbidden `{"error": "error_msg"}`
- 404 Not Found `"Not found"` - 404 Not Found `"Not found"`
- On success: 200 OK `{}` - On success: 200 OK `{}`
## `/api/pleroma/admin/config`
### List config settings
- Method `GET`
- Params: none
- Response:
```json
{
configs: [
{
"key": string,
"value": string or {} or []
}
]
}
```
## `/api/pleroma/admin/config`
### Update config settings
Module name can be passed as string, which starts with `Pleroma`, e.g. `"Pleroma.Upload"`.
Atom or boolean value can be passed with `:` in the beginning, e.g. `":true"`, `":upload"`.
Integer with `i:`, e.g. `"i:150"`.
Compile time settings (need instance reboot):
- all settings by this keys:
- `:hackney_pools`
- `:chat`
- `Pleroma.Web.Endpoint`
- `Pleroma.Repo`
- part settings:
- `Pleroma.Captcha` -> `:seconds_valid`
- `Pleroma.Upload` -> `:proxy_remote`
- `:instance` -> `:upload_limit`
- Method `POST`
- Params:
- `configs` => [
- `key` (string)
- `value` (string, [], {})
- `delete` = true (optional, if parameter must be deleted)
]
- Request (example):
```json
{
configs: [
{
"key": "Pleroma.Upload",
"value": {
"uploader": "Pleroma.Uploaders.Local",
"filters": ["Pleroma.Upload.Filter.Dedupe"],
"link_name": ":true",
"proxy_remote": ":false",
"proxy_opts": {
"redirect_on_failure": ":false",
"max_body_length": "i:1048576",
"http": {
"follow_redirect": ":true",
"pool": ":upload"
}
}
}
}
]
}
- Response:
```json
{
configs: [
{
"key": string,
"value": string or {} or []
}
]
}
```

View file

@ -43,6 +43,8 @@ Has these additional fields under the `pleroma` object:
- `confirmation_pending`: boolean, true if a new user account is waiting on email confirmation to be activated - `confirmation_pending`: boolean, true if a new user account is waiting on email confirmation to be activated
- `hide_followers`: boolean, true when the user has follower hiding enabled - `hide_followers`: boolean, true when the user has follower hiding enabled
- `hide_follows`: boolean, true when the user has follow hiding enabled - `hide_follows`: boolean, true when the user has follow hiding enabled
- `settings_store`: A generic map of settings for frontends. Opaque to the backend. Only returned in `verify_credentials` and `update_credentials`
- `chat_token`: The token needed for Pleroma chat. Only returned in `verify_credentials`
### Source ### Source
@ -69,6 +71,7 @@ Additional parameters can be added to the JSON body/Form data:
- `preview`: boolean, if set to `true` the post won't be actually posted, but the status entitiy would still be rendered back. This could be useful for previewing rich text/custom emoji, for example. - `preview`: boolean, if set to `true` the post won't be actually posted, but the status entitiy would still be rendered back. This could be useful for previewing rich text/custom emoji, for example.
- `content_type`: string, contain the MIME type of the status, it is transformed into HTML by the backend. You can get the list of the supported MIME types with the nodeinfo endpoint. - `content_type`: string, contain the MIME type of the status, it is transformed into HTML by the backend. You can get the list of the supported MIME types with the nodeinfo endpoint.
- `to`: A list of nicknames (like `lain@soykaf.club` or `lain` on the local server) that will be used to determine who is going to be addressed by this post. Using this will disable the implicit addressing by mentioned names in the `status` body, only the people in the `to` list will be addressed. The normal rules for for post visibility are not affected by this and will still apply.
## PATCH `/api/v1/update_credentials` ## PATCH `/api/v1/update_credentials`
@ -80,6 +83,16 @@ Additional parameters can be added to the JSON body/Form data:
- `hide_favorites` - if true, user's favorites timeline will be hidden - `hide_favorites` - if true, user's favorites timeline will be hidden
- `show_role` - if true, user's role (e.g admin, moderator) will be exposed to anyone in the API - `show_role` - if true, user's role (e.g admin, moderator) will be exposed to anyone in the API
- `default_scope` - the scope returned under `privacy` key in Source subentity - `default_scope` - the scope returned under `privacy` key in Source subentity
- `pleroma_settings_store` - Opaque user settings to be saved on the backend.
- `skip_thread_containment` - if true, skip filtering out broken threads
- `pleroma_background_image` - sets the background image of the user.
### Pleroma Settings Store
Pleroma has mechanism that allows frontends to save blobs of json for each user on the backend. This can be used to save frontend-specific settings for a user that the backend does not need to know about.
The parameter should have a form of `{frontend_name: {...}}`, with `frontend_name` identifying your type of client, e.g. `pleroma_fe`. It will overwrite everything under this property, but will not overwrite other frontend's settings.
This information is returned in the `verify_credentials` endpoint.
## Authentication ## Authentication

View file

@ -126,20 +126,6 @@ Request parameters can be passed via [query strings](https://en.wikipedia.org/wi
## `/api/pleroma/admin/` ## `/api/pleroma/admin/`
See [Admin-API](Admin-API.md) See [Admin-API](Admin-API.md)
## `/api/v1/pleroma/flavour/:flavour`
* Method `POST`
* Authentication: required
* Response: JSON string. Returns the user flavour or the default one on success, otherwise returns `{"error": "error_msg"}`
* Example response: "glitch"
* Note: This is intended to be used only by mastofe
## `/api/v1/pleroma/flavour`
* Method `GET`
* Authentication: required
* Response: JSON string. Returns the user flavour or the default one.
* Example response: "glitch"
* Note: This is intended to be used only by mastofe
## `/api/pleroma/notifications/read` ## `/api/pleroma/notifications/read`
### Mark a single notification as read ### Mark a single notification as read
* Method `POST` * Method `POST`

View file

@ -49,13 +49,6 @@ Feel free to contact us to be added to this list!
- Platforms: iOS, Android - Platforms: iOS, Android
- Features: No Streaming - Features: No Streaming
### Tootdon
- Homepage: <http://tootdon.club/>, <http://blog.mastodon-tootdon.com/>
- Source Code: ???
- Contact: [@tootdon@mstdn.jp](https://mstdn.jp/users/tootdon)
- Platforms: Android, iOS
- Features: No Streaming
### Tusky ### Tusky
- Homepage: <https://tuskyapp.github.io/> - Homepage: <https://tuskyapp.github.io/>
- Source Code: <https://github.com/tuskyapp/Tusky> - Source Code: <https://github.com/tuskyapp/Tusky>

View file

@ -71,6 +71,11 @@ config :pleroma, Pleroma.Emails.Mailer,
* `avatar_upload_limit`: File size limit of users profile avatars * `avatar_upload_limit`: File size limit of users profile avatars
* `background_upload_limit`: File size limit of users profile backgrounds * `background_upload_limit`: File size limit of users profile backgrounds
* `banner_upload_limit`: File size limit of users profile banners * `banner_upload_limit`: File size limit of users profile banners
* `poll_limits`: A map with poll limits for **local** polls
* `max_options`: Maximum number of options
* `max_option_chars`: Maximum number of characters per option
* `min_expiration`: Minimum expiration time (in seconds)
* `max_expiration`: Maximum expiration time (in seconds)
* `registrations_open`: Enable registrations for anyone, invitations can be enabled when false. * `registrations_open`: Enable registrations for anyone, invitations can be enabled when false.
* `invites_enabled`: Enable user invitations for admins (depends on `registrations_open: false`). * `invites_enabled`: Enable user invitations for admins (depends on `registrations_open: false`).
* `account_activation_required`: Require users to confirm their emails before signing in. * `account_activation_required`: Require users to confirm their emails before signing in.
@ -81,8 +86,11 @@ config :pleroma, Pleroma.Emails.Mailer,
* `Pleroma.Web.ActivityPub.MRF.NoOpPolicy`: Doesnt modify activities (default) * `Pleroma.Web.ActivityPub.MRF.NoOpPolicy`: Doesnt modify activities (default)
* `Pleroma.Web.ActivityPub.MRF.DropPolicy`: Drops all activities. It generally doesnt makes sense to use in production * `Pleroma.Web.ActivityPub.MRF.DropPolicy`: Drops all activities. It generally doesnt makes sense to use in production
* `Pleroma.Web.ActivityPub.MRF.SimplePolicy`: Restrict the visibility of activities from certains instances (See ``:mrf_simple`` section) * `Pleroma.Web.ActivityPub.MRF.SimplePolicy`: Restrict the visibility of activities from certains instances (See ``:mrf_simple`` section)
* `Pleroma.Web.ActivityPub.MRF.TagPolicy`: Applies policies to individual users based on tags, which can be set using pleroma-fe/admin-fe/any other app that supports Pleroma Admin API. For example it allows marking posts from individual users nsfw (sensitive)
* `Pleroma.Web.ActivityPub.MRF.SubchainPolicy`: Selectively runs other MRF policies when messages match (see ``:mrf_subchain`` section)
* `Pleroma.Web.ActivityPub.MRF.RejectNonPublic`: Drops posts with non-public visibility settings (See ``:mrf_rejectnonpublic`` section) * `Pleroma.Web.ActivityPub.MRF.RejectNonPublic`: Drops posts with non-public visibility settings (See ``:mrf_rejectnonpublic`` section)
* `Pleroma.Web.ActivityPub.MRF.EnsureRePrepended`: Rewrites posts to ensure that replies to posts with subjects do not have an identical subject and instead begin with re:. * `Pleroma.Web.ActivityPub.MRF.EnsureRePrepended`: Rewrites posts to ensure that replies to posts with subjects do not have an identical subject and instead begin with re:.
* `Pleroma.Web.ActivityPub.MRF.AntiLinkSpamPolicy`: Rejects posts from likely spambots by rejecting posts from new users that contain links.
* `public`: Makes the client API in authentificated mode-only except for user-profiles. Useful for disabling the Local Timeline and The Whole Known Network. * `public`: Makes the client API in authentificated mode-only except for user-profiles. Useful for disabling the Local Timeline and The Whole Known Network.
* `quarantined_instances`: List of ActivityPub instances where private(DMs, followers-only) activities will not be send. * `quarantined_instances`: List of ActivityPub instances where private(DMs, followers-only) activities will not be send.
* `managed_config`: Whenether the config for pleroma-fe is configured in this config or in ``static/config.json`` * `managed_config`: Whenether the config for pleroma-fe is configured in this config or in ``static/config.json``
@ -102,15 +110,13 @@ config :pleroma, Pleroma.Emails.Mailer,
* `welcome_message`: A message that will be send to a newly registered users as a direct message. * `welcome_message`: A message that will be send to a newly registered users as a direct message.
* `welcome_user_nickname`: The nickname of the local user that sends the welcome message. * `welcome_user_nickname`: The nickname of the local user that sends the welcome message.
* `max_report_comment_size`: The maximum size of the report comment (Default: `1000`) * `max_report_comment_size`: The maximum size of the report comment (Default: `1000`)
* `safe_dm_mentions`: If set to true, only mentions at the beginning of a post will be used to address people in direct messages. This is to prevent accidental mentioning of people when talking about them (e.g. "@friend hey i really don't like @enemy"). (Default: `false`) * `safe_dm_mentions`: If set to true, only mentions at the beginning of a post will be used to address people in direct messages. This is to prevent accidental mentioning of people when talking about them (e.g. "@friend hey i really don't like @enemy"). Default: `false`.
* `healthcheck`: if set to true, system data will be shown on ``/api/pleroma/healthcheck``. * `healthcheck`: If set to true, system data will be shown on ``/api/pleroma/healthcheck``.
* `remote_post_retention_days`: the default amount of days to retain remote posts when pruning the database * `remote_post_retention_days`: The default amount of days to retain remote posts when pruning the database.
* `skip_thread_containment`: Skip filter out broken threads. The default is `false`.
* `limit_to_local_content`: Limit unauthenticated users to search for local statutes and users only. Possible values: `:unauthenticated`, `:all` and `false`. The default is `:unauthenticated`.
* `dynamic_configuration`: Allow transferring configuration to DB with the subsequent customization from Admin api.
## :app_account_creation
REST API for creating an account settings
* `enabled`: Enable/disable registration
* `max_requests`: Number of requests allowed for creating accounts
* `interval`: Interval for restricting requests for one ip (seconds)
## :logger ## :logger
* `backends`: `:console` is used to send logs to stdout, `{ExSyslogger, :ex_syslogger}` to log to syslog, and `Quack.Logger` to log to Slack * `backends`: `:console` is used to send logs to stdout, `{ExSyslogger, :ex_syslogger}` to log to syslog, and `Quack.Logger` to log to Slack
@ -224,6 +230,21 @@ relates to mascots on the mastodon frontend
* `avatar_removal`: List of instances to strip avatars from * `avatar_removal`: List of instances to strip avatars from
* `banner_removal`: List of instances to strip banners from * `banner_removal`: List of instances to strip banners from
## :mrf_subchain
This policy processes messages through an alternate pipeline when a given message matches certain criteria.
All criteria are configured as a map of regular expressions to lists of policy modules.
* `match_actor`: Matches a series of regular expressions against the actor field.
Example:
```
config :pleroma, :mrf_subchain,
match_actor: %{
~r/https:\/\/example.com/s => [Pleroma.Web.ActivityPub.MRF.DropPolicy]
}
```
## :mrf_rejectnonpublic ## :mrf_rejectnonpublic
* `allow_followersonly`: whether to allow followers-only posts * `allow_followersonly`: whether to allow followers-only posts
* `allow_direct`: whether to allow direct messages * `allow_direct`: whether to allow direct messages
@ -492,7 +513,7 @@ Authentication / authorization settings.
* `auth_template`: authentication form template. By default it's `show.html` which corresponds to `lib/pleroma/web/templates/o_auth/o_auth/show.html.eex`. * `auth_template`: authentication form template. By default it's `show.html` which corresponds to `lib/pleroma/web/templates/o_auth/o_auth/show.html.eex`.
* `oauth_consumer_template`: OAuth consumer mode authentication form template. By default it's `consumer.html` which corresponds to `lib/pleroma/web/templates/o_auth/o_auth/consumer.html.eex`. * `oauth_consumer_template`: OAuth consumer mode authentication form template. By default it's `consumer.html` which corresponds to `lib/pleroma/web/templates/o_auth/o_auth/consumer.html.eex`.
* `oauth_consumer_strategies`: the list of enabled OAuth consumer strategies; by default it's set by OAUTH_CONSUMER_STRATEGIES environment variable. * `oauth_consumer_strategies`: the list of enabled OAuth consumer strategies; by default it's set by `OAUTH_CONSUMER_STRATEGIES` environment variable. Each entry in this space-delimited string should be of format `<strategy>` or `<strategy>:<dependency>` (e.g. `twitter` or `keycloak:ueberauth_keycloak_strategy` in case dependency is named differently than `ueberauth_<strategy>`).
## OAuth consumer mode ## OAuth consumer mode
@ -545,6 +566,24 @@ config :ueberauth, Ueberauth,
providers: [ providers: [
microsoft: {Ueberauth.Strategy.Microsoft, [callback_params: []]} microsoft: {Ueberauth.Strategy.Microsoft, [callback_params: []]}
] ]
# Keycloak
# Note: make sure to add `keycloak:ueberauth_keycloak_strategy` entry to `OAUTH_CONSUMER_STRATEGIES` environment variable
keycloak_url = "https://publicly-reachable-keycloak-instance.org:8080"
config :ueberauth, Ueberauth.Strategy.Keycloak.OAuth,
client_id: System.get_env("KEYCLOAK_CLIENT_ID"),
client_secret: System.get_env("KEYCLOAK_CLIENT_SECRET"),
site: keycloak_url,
authorize_url: "#{keycloak_url}/auth/realms/master/protocol/openid-connect/auth",
token_url: "#{keycloak_url}/auth/realms/master/protocol/openid-connect/token",
userinfo_url: "#{keycloak_url}/auth/realms/master/protocol/openid-connect/userinfo",
token_method: :post
config :ueberauth, Ueberauth,
providers: [
keycloak: {Ueberauth.Strategy.Keycloak, [uid_field: :email]}
]
``` ```
## OAuth 2.0 provider - :oauth2 ## OAuth 2.0 provider - :oauth2
@ -575,3 +614,14 @@ To enable them, both the `rum_enabled` flag has to be set and the following spec
`mix ecto.migrate --migrations-path priv/repo/optional_migrations/rum_indexing/` `mix ecto.migrate --migrations-path priv/repo/optional_migrations/rum_indexing/`
This will probably take a long time. This will probably take a long time.
## :rate_limit
A keyword list of rate limiters where a key is a limiter name and value is the limiter configuration. The basic configuration is a tuple where:
* The first element: `scale` (Integer). The time scale in milliseconds.
* The second element: `limit` (Integer). How many requests to limit in the time scale provided.
It is also possible to have different limits for unauthenticated and authenticated users: the keyword value must be a list of two tuples where the first one is a config for unauthenticated users and the second one is for authenticated.
See [`Pleroma.Plugs.RateLimiter`](Pleroma.Plugs.RateLimiter.html) documentation for examples.

View file

@ -9,8 +9,8 @@ config :pleroma, :suggestions,
third_party_engine: third_party_engine:
"http://vinayaka.distsn.org/cgi-bin/vinayaka-user-match-suggestions-api.cgi?{{host}}+{{user}}", "http://vinayaka.distsn.org/cgi-bin/vinayaka-user-match-suggestions-api.cgi?{{host}}+{{user}}",
timeout: 300_000, timeout: 300_000,
limit: 23, limit: 40,
web: "https://vinayaka.distsn.org/?{{host}}+{{user}}" web: "https://vinayaka.distsn.org"
``` ```
@ -26,6 +26,6 @@ config :pleroma, :suggestions,
third_party_engine: third_party_engine:
"http://vinayaka.distsn.org/cgi-bin/vinayaka-user-new-suggestions-api.cgi?{{host}}+{{user}}", "http://vinayaka.distsn.org/cgi-bin/vinayaka-user-new-suggestions-api.cgi?{{host}}+{{user}}",
timeout: 60_000, timeout: 60_000,
limit: 23, limit: 40,
web: "https://vinayaka.distsn.org/user-new.html" web: "https://vinayaka.distsn.org/user-new.html"
``` ```

View file

@ -87,7 +87,7 @@ sudo adduser -S -s /bin/false -h /opt/pleroma -H pleroma
```shell ```shell
sudo mkdir -p /opt/pleroma sudo mkdir -p /opt/pleroma
sudo chown -R pleroma:pleroma /opt/pleroma sudo chown -R pleroma:pleroma /opt/pleroma
sudo -Hu pleroma git clone https://git.pleroma.social/pleroma/pleroma /opt/pleroma sudo -Hu pleroma git clone -b master https://git.pleroma.social/pleroma/pleroma /opt/pleroma
``` ```
* Change to the new directory: * Change to the new directory:

View file

@ -66,7 +66,7 @@ sudo useradd -r -s /bin/false -m -d /var/lib/pleroma -U pleroma
```shell ```shell
sudo mkdir -p /opt/pleroma sudo mkdir -p /opt/pleroma
sudo chown -R pleroma:pleroma /opt/pleroma sudo chown -R pleroma:pleroma /opt/pleroma
sudo -Hu pleroma git clone https://git.pleroma.social/pleroma/pleroma /opt/pleroma sudo -Hu pleroma git clone -b master https://git.pleroma.social/pleroma/pleroma /opt/pleroma
``` ```
* Change to the new directory: * Change to the new directory:

View file

@ -143,7 +143,7 @@ sudo useradd -r -s /bin/false -m -d /var/lib/pleroma -U pleroma
```shell ```shell
sudo mkdir -p /opt/pleroma sudo mkdir -p /opt/pleroma
sudo chown -R pleroma:pleroma /opt/pleroma sudo chown -R pleroma:pleroma /opt/pleroma
sudo -Hu pleroma git clone https://git.pleroma.social/pleroma/pleroma /opt/pleroma sudo -Hu pleroma git clone -b master https://git.pleroma.social/pleroma/pleroma /opt/pleroma
``` ```
* Change to the new directory: * Change to the new directory:

View file

@ -68,7 +68,7 @@ sudo useradd -r -s /bin/false -m -d /var/lib/pleroma -U pleroma
```shell ```shell
sudo mkdir -p /opt/pleroma sudo mkdir -p /opt/pleroma
sudo chown -R pleroma:pleroma /opt/pleroma sudo chown -R pleroma:pleroma /opt/pleroma
sudo -Hu pleroma git clone https://git.pleroma.social/pleroma/pleroma /opt/pleroma sudo -Hu pleroma git clone -b master https://git.pleroma.social/pleroma/pleroma /opt/pleroma
``` ```
* Change to the new directory: * Change to the new directory:

View file

@ -69,7 +69,7 @@ cd ~
* Gitリポジトリをクローンします。 * Gitリポジトリをクローンします。
``` ```
git clone https://git.pleroma.social/pleroma/pleroma git clone -b master https://git.pleroma.social/pleroma/pleroma
``` ```
* 新しいディレクトリに移動します。 * 新しいディレクトリに移動します。

View file

@ -106,7 +106,7 @@ It is highly recommended you use your own fork for the `https://path/to/repo` pa
```shell ```shell
pleroma$ cd ~ pleroma$ cd ~
pleroma$ git clone https://path/to/repo pleroma$ git clone -b master https://path/to/repo
``` ```
* Change to the new directory: * Change to the new directory:

View file

@ -58,7 +58,7 @@ Clone the repository:
``` ```
$ cd /home/pleroma $ cd /home/pleroma
$ git clone https://git.pleroma.social/pleroma/pleroma.git $ git clone -b master https://git.pleroma.social/pleroma/pleroma.git
``` ```
Configure Pleroma. Note that you need a domain name at this point: Configure Pleroma. Note that you need a domain name at this point:

View file

@ -29,7 +29,7 @@ This creates a "pleroma" login class and sets higher values than default for dat
Create the \_pleroma user, assign it the pleroma login class and create its home directory (/home/\_pleroma/): `useradd -m -L pleroma _pleroma` Create the \_pleroma user, assign it the pleroma login class and create its home directory (/home/\_pleroma/): `useradd -m -L pleroma _pleroma`
#### Clone pleroma's directory #### Clone pleroma's directory
Enter a shell as the \_pleroma user. As root, run `su _pleroma -;cd`. Then clone the repository with `git clone https://git.pleroma.social/pleroma/pleroma.git`. Pleroma is now installed in /home/\_pleroma/pleroma/, it will be configured and started at the end of this guide. Enter a shell as the \_pleroma user. As root, run `su _pleroma -;cd`. Then clone the repository with `git clone -b master https://git.pleroma.social/pleroma/pleroma.git`. Pleroma is now installed in /home/\_pleroma/pleroma/, it will be configured and started at the end of this guide.
#### Postgresql #### Postgresql
Start a shell as the \_postgresql user (as root run `su _postgresql -` then run the `initdb` command to initialize postgresql: Start a shell as the \_postgresql user (as root run `su _postgresql -` then run the `initdb` command to initialize postgresql:

View file

@ -44,7 +44,7 @@ Vaihda pleroma-käyttäjään ja mene kotihakemistoosi:
Lataa pleroman lähdekoodi: Lataa pleroman lähdekoodi:
`$ git clone https://git.pleroma.social/pleroma/pleroma.git` `$ git clone -b master https://git.pleroma.social/pleroma/pleroma.git`
`$ cd pleroma` `$ cd pleroma`

2
elixir_buildpack.config Normal file
View file

@ -0,0 +1,2 @@
elixir_version=1.8.2
erlang_version=21.3.7

View file

@ -10,7 +10,9 @@ example.tld {
gzip gzip
proxy / localhost:4000 { # this is explicitly IPv4 since Pleroma.Web.Endpoint binds on IPv4 only
# and `localhost.` resolves to [::0] on some systems: see issue #930
proxy / 127.0.0.1:4000 {
websocket websocket
transparent transparent
} }

View file

@ -58,8 +58,10 @@ CustomLog ${APACHE_LOG_DIR}/access.log combined
RewriteRule /(.*) ws://localhost:4000/$1 [P,L] RewriteRule /(.*) ws://localhost:4000/$1 [P,L]
ProxyRequests off ProxyRequests off
ProxyPass / http://localhost:4000/ # this is explicitly IPv4 since Pleroma.Web.Endpoint binds on IPv4 only
ProxyPassReverse / http://localhost:4000/ # and `localhost.` resolves to [::0] on some systems: see issue #930
ProxyPass / http://127.0.0.1:4000/
ProxyPassReverse / http://127.0.0.1:4000/
RequestHeader set Host ${servername} RequestHeader set Host ${servername}
ProxyPreserveHost On ProxyPreserveHost On

View file

@ -0,0 +1,932 @@
%%%
%%% ejabberd configuration file
%%%
%%%'
%%% The parameters used in this configuration file are explained in more detail
%%% in the ejabberd Installation and Operation Guide.
%%% Please consult the Guide in case of doubts, it is included with
%%% your copy of ejabberd, and is also available online at
%%% http://www.process-one.net/en/ejabberd/docs/
%%% This configuration file contains Erlang terms.
%%% In case you want to understand the syntax, here are the concepts:
%%%
%%% - The character to comment a line is %
%%%
%%% - Each term ends in a dot, for example:
%%% override_global.
%%%
%%% - A tuple has a fixed definition, its elements are
%%% enclosed in {}, and separated with commas:
%%% {loglevel, 4}.
%%%
%%% - A list can have as many elements as you want,
%%% and is enclosed in [], for example:
%%% [http_poll, web_admin, tls]
%%%
%%% Pay attention that list elements are delimited with commas,
%%% but no comma is allowed after the last list element. This will
%%% give a syntax error unlike in more lenient languages (e.g. Python).
%%%
%%% - A keyword of ejabberd is a word in lowercase.
%%% Strings are enclosed in "" and can contain spaces, dots, ...
%%% {language, "en"}.
%%% {ldap_rootdn, "dc=example,dc=com"}.
%%%
%%% - This term includes a tuple, a keyword, a list, and two strings:
%%% {hosts, ["jabber.example.net", "im.example.com"]}.
%%%
%%% - This config is preprocessed during release generation by a tool which
%%% interprets double curly braces as substitution markers, so avoid this
%%% syntax in this file (though it's valid Erlang).
%%%
%%% So this is OK (though arguably looks quite ugly):
%%% { {s2s_addr, "example-host.net"}, {127,0,0,1} }.
%%%
%%% And I can't give an example of what's not OK exactly because
%%% of this rule.
%%%
%%%. =======================
%%%' OVERRIDE STORED OPTIONS
%%
%% Override the old values stored in the database.
%%
%%
%% Override global options (shared by all ejabberd nodes in a cluster).
%%
%%override_global.
%%
%% Override local options (specific for this particular ejabberd node).
%%
%%override_local.
%%
%% Remove the Access Control Lists before new ones are added.
%%
%%override_acls.
%%%. =========
%%%' DEBUGGING
%%
%% loglevel: Verbosity of log files generated by ejabberd.
%% 0: No ejabberd log at all (not recommended)
%% 1: Critical
%% 2: Error
%% 3: Warning
%% 4: Info
%% 5: Debug
%%
{loglevel, 3}.
%%%. ================
%%%' SERVED HOSTNAMES
%%
%% hosts: Domains served by ejabberd.
%% You can define one or several, for example:
%% {hosts, ["example.net", "example.com", "example.org"]}.
%%
{hosts, ["pleroma.soykaf.com"] }.
%%
%% route_subdomains: Delegate subdomains to other XMPP servers.
%% For example, if this ejabberd serves example.org and you want
%% to allow communication with an XMPP server called im.example.org.
%%
%%{route_subdomains, s2s}.
%%%. ===============
%%%' LISTENING PORTS
%%
%% listen: The ports ejabberd will listen on, which service each is handled
%% by and what options to start it with.
%%
{listen,
[
%% BOSH and WS endpoints over HTTP
{ 5280, ejabberd_cowboy, [
{num_acceptors, 10},
{transport_options, [{max_connections, 1024}]},
{modules, [
{"_", "/http-bind", mod_bosh},
{"_", "/ws-xmpp", mod_websockets, [{ejabberd_service, [
{access, all},
{shaper_rule, fast},
{ip, {127, 0, 0, 1}},
{password, "secret"}]}
%% Uncomment to enable connection dropping or/and server-side pings
%{timeout, 600000}, {ping_rate, 2000}
]}
%% Uncomment to serve static files
%{"_", "/static/[...]", cowboy_static,
% {dir, "/var/www", [{mimetypes, cow_mimetypes, all}]}
%},
%% Example usage of mod_revproxy
%% {"_", "/[...]", mod_revproxy, [{timeout, 5000},
%% % time limit for upstream to respond
%% {body_length, 8000000},
%% % maximum body size (may be infinity)
%% {custom_headers, [{<<"header">>,<<"value">>}]}
%% % list of extra headers that are send to upstream
%% ]}
%% Example usage of mod_cowboy
%% {"_", "/[...]", mod_cowboy, [{http, mod_revproxy,
%% [{timeout, 5000},
%% % time limit for upstream to respond
%% {body_length, 8000000},
%% % maximum body size (may be infinity)
%% {custom_headers, [{<<"header">>,<<"value">>}]}
%% % list of extra headers that are send to upstream
%% ]},
%% {ws, xmpp, mod_websockets}
%% ]}
]}
]},
%% BOSH and WS endpoints over HTTPS
{ 5285, ejabberd_cowboy, [
{num_acceptors, 10},
{transport_options, [{max_connections, 1024}]},
{ssl, [{certfile, "priv/ssl/fullchain.pem"}, {keyfile, "priv/ssl/privkey.pem"}, {password, ""}]},
{modules, [
{"_", "/http-bind", mod_bosh},
{"_", "/ws-xmpp", mod_websockets, [
%% Uncomment to enable connection dropping or/and server-side pings
%{timeout, 600000}, {ping_rate, 60000}
]}
%% Uncomment to serve static files
%{"_", "/static/[...]", cowboy_static,
% {dir, "/var/www", [{mimetypes, cow_mimetypes, all}]}
%},
]}
]},
%% MongooseIM HTTP API it's important to start it on localhost
%% or some private interface only (not accessible from the outside)
%% At least start it on different port which will be hidden behind firewall
{ {8088, "127.0.0.1"} , ejabberd_cowboy, [
{num_acceptors, 10},
{transport_options, [{max_connections, 1024}]},
{modules, [
{"localhost", "/api", mongoose_api_admin, []}
]}
]},
{ 8089 , ejabberd_cowboy, [
{num_acceptors, 10},
{transport_options, [{max_connections, 1024}]},
{protocol_options, [{compress, true}]},
{ssl, [{certfile, "priv/ssl/fullchain.pem"}, {keyfile, "priv/ssl/privkey.pem"}, {password, ""}]},
{modules, [
{"_", "/api/sse", lasse_handler, [mongoose_client_api_sse]},
{"_", "/api/messages/[:with]", mongoose_client_api_messages, []},
{"_", "/api/contacts/[:jid]", mongoose_client_api_contacts, []},
{"_", "/api/rooms/[:id]", mongoose_client_api_rooms, []},
{"_", "/api/rooms/[:id]/config", mongoose_client_api_rooms_config, []},
{"_", "/api/rooms/:id/users/[:user]", mongoose_client_api_rooms_users, []},
{"_", "/api/rooms/[:id]/messages", mongoose_client_api_rooms_messages, []}
]}
]},
%% Following HTTP API is deprected, the new one abouve should be used instead
{ {5288, "127.0.0.1"} , ejabberd_cowboy, [
{num_acceptors, 10},
{transport_options, [{max_connections, 1024}]},
{modules, [
{"localhost", "/api", mongoose_api, [{handlers, [mongoose_api_metrics,
mongoose_api_users]}]}
]}
]},
{ 5222, ejabberd_c2s, [
%%
%% If TLS is compiled in and you installed a SSL
%% certificate, specify the full path to the
%% file and uncomment this line:
%%
{certfile, "priv/ssl/both.pem"}, starttls,
%%{zlib, 10000},
%% https://www.openssl.org/docs/apps/ciphers.html#CIPHER_STRINGS
%% {ciphers, "DEFAULT:!EXPORT:!LOW:!SSLv2"},
{access, c2s},
{shaper, c2s_shaper},
{max_stanza_size, 65536},
{protocol_options, ["no_sslv3"]}
]},
%%
%% To enable the old SSL connection method on port 5223:
%%
%%{5223, ejabberd_c2s, [
%% {access, c2s},
%% {shaper, c2s_shaper},
%% {certfile, "/path/to/ssl.pem"}, tls,
%% {max_stanza_size, 65536}
%% ]},
{ 5269, ejabberd_s2s_in, [
{shaper, s2s_shaper},
{max_stanza_size, 131072},
{protocol_options, ["no_sslv3"]}
]}
%%
%% ejabberd_service: Interact with external components (transports, ...)
%%
,{8888, ejabberd_service, [
{access, all},
{shaper_rule, fast},
{ip, {127, 0, 0, 1}},
{password, "secret"}
]}
%%
%% ejabberd_stun: Handles STUN Binding requests
%%
%%{ {3478, udp}, ejabberd_stun, []}
]}.
%%
%% s2s_use_starttls: Enable STARTTLS + Dialback for S2S connections.
%% Allowed values are: false optional required required_trusted
%% You must specify a certificate file.
%%
{s2s_use_starttls, optional}.
%%
%% s2s_certfile: Specify a certificate file.
%%
{s2s_certfile, "priv/ssl/both.pem"}.
%% https://www.openssl.org/docs/apps/ciphers.html#CIPHER_STRINGS
%% {s2s_ciphers, "DEFAULT:!EXPORT:!LOW:!SSLv2"}.
%%
%% domain_certfile: Specify a different certificate for each served hostname.
%%
%%{domain_certfile, "example.org", "/path/to/example_org.pem"}.
%%{domain_certfile, "example.com", "/path/to/example_com.pem"}.
%%
%% S2S whitelist or blacklist
%%
%% Default s2s policy for undefined hosts.
%%
{s2s_default_policy, deny }.
%%
%% Allow or deny communication with specific servers.
%%
%%{ {s2s_host, "goodhost.org"}, allow}.
%%{ {s2s_host, "badhost.org"}, deny}.
{outgoing_s2s_port, 5269 }.
%%
%% IP addresses predefined for specific hosts to skip DNS lookups.
%% Ports defined here take precedence over outgoing_s2s_port.
%% Examples:
%%
%% { {s2s_addr, "example-host.net"}, {127,0,0,1} }.
%% { {s2s_addr, "example-host.net"}, { {127,0,0,1}, 5269 } }.
%% { {s2s_addr, "example-host.net"}, { {127,0,0,1}, 5269 } }.
%%
%% Outgoing S2S options
%%
%% Preferred address families (which to try first) and connect timeout
%% in milliseconds.
%%
%%{outgoing_s2s_options, [ipv4, ipv6], 10000}.
%%
%%%. ==============
%%%' SESSION BACKEND
%%{sm_backend, {mnesia, []}}.
%% Requires {redis, global, default, ..., ...} outgoing pool
%%{sm_backend, {redis, []}}.
{sm_backend, {mnesia, []} }.
%%%. ==============
%%%' AUTHENTICATION
%% Advertised SASL mechanisms
{sasl_mechanisms, [cyrsasl_plain]}.
%%
%% auth_method: Method used to authenticate the users.
%% The default method is the internal.
%% If you want to use a different method,
%% comment this line and enable the correct ones.
%%
%% {auth_method, internal }.
{auth_method, http }.
{auth_opts, [
{http, global, auth, [{workers, 50}], [{server, "https://pleroma.soykaf.com"}]},
{password_format, plain} % default
%% {password_format, scram}
%% {scram_iterations, 4096} % default
%%
%% For auth_http:
%% {basic_auth, "user:password"}
%% {path_prefix, "/"} % default
%% auth_http requires {http, Host | global, auth, ..., ...} outgoing pool.
%%
%% For auth_external
%%{extauth_program, "/path/to/authentication/script"}.
%%
%% For auth_jwt
%% {jwt_secret_source, "/path/to/file"},
%% {jwt_algorithm, "RS256"},
%% {jwt_username_key, user}
%% For cyrsasl_external
%% {authenticate_with_cn, false}
{cyrsasl_external, standard}
]}.
%%
%% Authentication using external script
%% Make sure the script is executable by ejabberd.
%%
%%{auth_method, external}.
%%
%% Authentication using RDBMS
%% Remember to setup a database in the next section.
%%
%%{auth_method, rdbms}.
%%
%% Authentication using LDAP
%%
%%{auth_method, ldap}.
%%
%% List of LDAP servers:
%%{ldap_servers, ["localhost"]}.
%%
%% Encryption of connection to LDAP servers:
%%{ldap_encrypt, none}.
%%{ldap_encrypt, tls}.
%%
%% Port to connect to on LDAP servers:
%%{ldap_port, 389}.
%%{ldap_port, 636}.
%%
%% LDAP manager:
%%{ldap_rootdn, "dc=example,dc=com"}.
%%
%% Password of LDAP manager:
%%{ldap_password, "******"}.
%%
%% Search base of LDAP directory:
%%{ldap_base, "dc=example,dc=com"}.
%%
%% LDAP attribute that holds user ID:
%%{ldap_uids, [{"mail", "%u@mail.example.org"}]}.
%%
%% LDAP filter:
%%{ldap_filter, "(objectClass=shadowAccount)"}.
%%
%% Anonymous login support:
%% auth_method: anonymous
%% anonymous_protocol: sasl_anon | login_anon | both
%% allow_multiple_connections: true | false
%%
%%{host_config, "public.example.org", [{auth_method, anonymous},
%% {allow_multiple_connections, false},
%% {anonymous_protocol, sasl_anon}]}.
%%
%% To use both anonymous and internal authentication:
%%
%%{host_config, "public.example.org", [{auth_method, [internal, anonymous]}]}.
%%%. ==============
%%%' OUTGOING CONNECTIONS (e.g. DB)
%% Here you may configure all outgoing connections used by MongooseIM,
%% e.g. to RDBMS (such as MySQL), Riak or external HTTP components.
%% Default MongooseIM configuration uses only Mnesia (non-Mnesia extensions are disabled),
%% so no options here are uncommented out of the box.
%% This section includes configuration examples; for comprehensive guide
%% please consult MongooseIM documentation, page "Outgoing connections":
%% - doc/advanced-configuration/outgoing-connections.md
%% - https://mongooseim.readthedocs.io/en/latest/advanced-configuration/outgoing-connections/
{outgoing_pools, [
% {riak, global, default, [{workers, 5}], [{address, "127.0.0.1"}, {port, 8087}]},
% {elastic, global, default, [], [{host, "elastic.host.com"}, {port, 9042}]},
{http, global, auth, [{workers, 50}], [{server, "https://pleroma.soykaf.com"}]}
% {cassandra, global, default, [{workers, 100}], [{servers, [{"server1", 9042}]}, {keyspace, "big_mongooseim"}]},
% {rdbms, global, default, [{workers, 10}], [{server, {mysql, "server", 3306, "database", "username", "password"}}]}
]}.
%% More examples that may be added to outgoing_pools list:
%%
%% == MySQL ==
%% {rdbms, global, default, [{workers, 10}],
%% [{server, {mysql, "server", 3306, "database", "username", "password"}},
%% {keepalive_interval, 10}]},
%% keepalive_interval is optional
%% == PostgreSQL ==
%% {rdbms, global, default, [{workers, 10}],
%% [{server, {pgsql, "server", 5432, "database", "username", "password"}}]},
%% == ODBC (MSSQL) ==
%% {rdbms, global, default, [{workers, 10}],
%% [{server, "DSN=mongooseim;UID=mongooseim;PWD=mongooseim"}]},
%% == Elastic Search ==
%% {elastic, global, default, [], [{host, "elastic.host.com"}, {port, 9042}]},
%% == Riak ==
%% {riak, global, default, [{workers, 20}], [{address, "127.0.0.1"}, {port, 8087}]},
%% == HTTP ==
%% {http, global, conn1, [{workers, 50}], [{server, "http://server:8080"}]},
%% == Cassandra ==
%% {cassandra, global, default, [{workers, 100}],
%% [
%% {servers, [
%% {"cassandra_server1.example.com", 9042},
%% {"cassandra_server2.example.com", 9042},
%% {"cassandra_server3.example.com", 9042},
%% {"cassandra_server4.example.com", 9042}
%% ]},
%% {keyspace, "big_mongooseim"}
%% ]}
%% == Extra options ==
%%
%% If you use PostgreSQL, have a large database, and need a
%% faster but inexact replacement for "select count(*) from users"
%%
%%{pgsql_users_number_estimate, true}.
%%
%% rdbms_server_type specifies what database is used over the RDBMS layer
%% Can take values mssql, pgsql, mysql
%% In some cases (for example for MAM with pgsql) it is required to set proper value.
%%
%% {rdbms_server_type, pgsql}.
%%%. ===============
%%%' TRAFFIC SHAPERS
%%
%% The "normal" shaper limits traffic speed to 1000 B/s
%%
{shaper, normal, {maxrate, 1000}}.
%%
%% The "fast" shaper limits traffic speed to 50000 B/s
%%
{shaper, fast, {maxrate, 50000}}.
%%
%% This option specifies the maximum number of elements in the queue
%% of the FSM. Refer to the documentation for details.
%%
{max_fsm_queue, 1000}.
%%%. ====================
%%%' ACCESS CONTROL LISTS
%%
%% The 'admin' ACL grants administrative privileges to XMPP accounts.
%% You can put here as many accounts as you want.
%%
%{acl, admin, {user, "alice", "localhost"}}.
%{acl, admin, {user, "a", "localhost"}}.
%%
%% Blocked users
%%
%%{acl, blocked, {user, "baduser", "example.org"}}.
%%{acl, blocked, {user, "test"}}.
%%
%% Local users: don't modify this line.
%%
{acl, local, {user_regexp, ""}}.
%%
%% More examples of ACLs
%%
%%{acl, jabberorg, {server, "jabber.org"}}.
%%{acl, aleksey, {user, "aleksey", "jabber.ru"}}.
%%{acl, test, {user_regexp, "^test"}}.
%%{acl, test, {user_glob, "test*"}}.
%%
%% Define specific ACLs in a virtual host.
%%
%%{host_config, "localhost",
%% [
%% {acl, admin, {user, "bob-local", "localhost"}}
%% ]
%%}.
%%%. ============
%%%' ACCESS RULES
%% Maximum number of simultaneous sessions allowed for a single user:
{access, max_user_sessions, [{10, all}]}.
%% Maximum number of offline messages that users can have:
{access, max_user_offline_messages, [{5000, admin}, {100, all}]}.
%% This rule allows access only for local users:
{access, local, [{allow, local}]}.
%% Only non-blocked users can use c2s connections:
{access, c2s, [{deny, blocked},
{allow, all}]}.
%% For C2S connections, all users except admins use the "normal" shaper
{access, c2s_shaper, [{none, admin},
{normal, all}]}.
%% All S2S connections use the "fast" shaper
{access, s2s_shaper, [{fast, all}]}.
%% Admins of this server are also admins of the MUC service:
{access, muc_admin, [{allow, admin}]}.
%% Only accounts of the local ejabberd server can create rooms:
{access, muc_create, [{allow, local}]}.
%% All users are allowed to use the MUC service:
{access, muc, [{allow, all}]}.
%% In-band registration allows registration of any possible username.
%% To disable in-band registration, replace 'allow' with 'deny'.
{access, register, [{allow, all}]}.
%% By default the frequency of account registrations from the same IP
%% is limited to 1 account every 10 minutes. To disable, specify: infinity
{registration_timeout, infinity}.
%% Default settings for MAM.
%% To set non-standard value, replace 'default' with 'allow' or 'deny'.
%% Only user can access his/her archive by default.
%% An online user can read room's archive by default.
%% Only an owner can change settings and purge messages by default.
%% Empty list (i.e. `[]`) means `[{deny, all}]`.
{access, mam_set_prefs, [{default, all}]}.
{access, mam_get_prefs, [{default, all}]}.
{access, mam_lookup_messages, [{default, all}]}.
{access, mam_purge_single_message, [{default, all}]}.
{access, mam_purge_multiple_messages, [{default, all}]}.
%% 1 command of the specified type per second.
{shaper, mam_shaper, {maxrate, 1}}.
%% This shaper is primeraly for Mnesia overload protection during stress testing.
%% The limit is 1000 operations of each type per second.
{shaper, mam_global_shaper, {maxrate, 1000}}.
{access, mam_set_prefs_shaper, [{mam_shaper, all}]}.
{access, mam_get_prefs_shaper, [{mam_shaper, all}]}.
{access, mam_lookup_messages_shaper, [{mam_shaper, all}]}.
{access, mam_purge_single_message_shaper, [{mam_shaper, all}]}.
{access, mam_purge_multiple_messages_shaper, [{mam_shaper, all}]}.
{access, mam_set_prefs_global_shaper, [{mam_global_shaper, all}]}.
{access, mam_get_prefs_global_shaper, [{mam_global_shaper, all}]}.
{access, mam_lookup_messages_global_shaper, [{mam_global_shaper, all}]}.
{access, mam_purge_single_message_global_shaper, [{mam_global_shaper, all}]}.
{access, mam_purge_multiple_messages_global_shaper, [{mam_global_shaper, all}]}.
%%
%% Define specific Access Rules in a virtual host.
%%
%%{host_config, "localhost",
%% [
%% {access, c2s, [{allow, admin}, {deny, all}]},
%% {access, register, [{deny, all}]}
%% ]
%%}.
%%%. ================
%%%' DEFAULT LANGUAGE
%%
%% language: Default language used for server messages.
%%
{language, "en"}.
%%
%% Set a different default language in a virtual host.
%%
%%{host_config, "localhost",
%% [{language, "ru"}]
%%}.
%%%. ================
%%%' MISCELLANEOUS
{all_metrics_are_global, false }.
%%%. ========
%%%' SERVICES
%% Unlike modules, services are started per node and provide either features which are not
%% related to any particular host, or backend stuff which is used by modules.
%% This is handled by `mongoose_service` module.
{services,
[
{service_admin_extra, [{submods, [node, accounts, sessions, vcard,
roster, last, private, stanza, stats]}]}
]
}.
%%%. =======
%%%' MODULES
%%
%% Modules enabled in all mongooseim virtual hosts.
%% For list of possible modules options, check documentation.
%%
{modules,
[
%% The format for a single route is as follows:
%% {Host, Path, Method, Upstream}
%%
%% "_" can be used as wildcard for Host, Path and Method
%% Upstream can be either host (just http(s)://host:port) or uri
%% The difference is that host upstreams append whole path while
%% uri upstreams append only remainder that follows the matched Path
%% (this behaviour is similar to nginx's proxy_pass rules)
%%
%% Bindings can be used to match certain parts of host or path.
%% They will be later overlaid with parts of the upstream uri.
%%
%% {mod_revproxy,
%% [{routes, [{"www.erlang-solutions.com", "/admin", "_",
%% "https://www.erlang-solutions.com/"},
%% {":var.com", "/:var", "_", "http://localhost:8080/"},
%% {":domain.com", "/", "_", "http://localhost:8080/:domain"}]
%% }]},
% {mod_http_upload, [
%% Set max file size in bytes. Defaults to 10 MB.
%% Disabled if value is `undefined`.
% {max_file_size, 1024},
%% Use S3 storage backend
% {backend, s3},
%% Set options for S3 backend
% {s3, [
% {bucket_url, "http://s3-eu-west-1.amazonaws.com/konbucket2"},
% {region, "eu-west-1"},
% {access_key_id, "AKIAIAOAONIULXQGMOUA"},
% {secret_access_key, "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"}
% ]}
% ]},
{mod_adhoc, []},
{mod_disco, [{users_can_see_hidden_services, false}]},
{mod_commands, []},
{mod_muc_commands, []},
{mod_muc_light_commands, []},
{mod_last, []},
{mod_stream_management, [
% default 100
% size of a buffer of unacked messages
% {buffer_max, 100}
% default 1 - server sends the ack request after each stanza
% {ack_freq, 1}
% default: 600 seconds
% {resume_timeout, 600}
]},
%% {mod_muc_light, [{host, "muclight.@HOST@"}]},
%% {mod_muc, [{host, "muc.@HOST@"},
%% {access, muc},
%% {access_create, muc_create}
%% ]},
%% {mod_muc_log, [
%% {outdir, "/tmp/muclogs"},
%% {access_log, muc}
%% ]},
{mod_offline, [{access_max_user_messages, max_user_offline_messages}]},
{mod_privacy, []},
{mod_blocking, []},
{mod_private, []},
% {mod_private, [{backend, mnesia}]},
% {mod_private, [{backend, rdbms}]},
% {mod_register, [
% %%
% %% Set the minimum informational entropy for passwords.
% %%
% %%{password_strength, 32},
%
% %%
% %% After successful registration, the user receives
% %% a message with this subject and body.
% %%
% {welcome_message, {""}},
%
% %%
% %% When a user registers, send a notification to
% %% these XMPP accounts.
% %%
%
%
% %%
% %% Only clients in the server machine can register accounts
% %%
% {ip_access, [{allow, "127.0.0.0/8"},
% {deny, "0.0.0.0/0"}]},
%
% %%
% %% Local c2s or remote s2s users cannot register accounts
% %%
% %%{access_from, deny},
%
% {access, register}
% ]},
{mod_roster, []},
{mod_sic, []},
{mod_vcard, [%{matches, 1},
%{search, true},
%{ldap_search_operator, 'or'}, %% either 'or' or 'and'
%{ldap_binary_search_fields, [<<"PHOTO">>]},
%% list of binary search fields (as in vcard after mapping)
{host, "vjud.@HOST@"}
]},
{mod_bosh, []},
{mod_carboncopy, []}
%%
%% Message Archive Management (MAM, XEP-0313) for registered users and
%% Multi-User chats (MUCs).
%%
% {mod_mam_meta, [
%% Use RDBMS backend (default)
% {backend, rdbms},
%% Do not store user preferences (default)
% {user_prefs_store, false},
%% Store user preferences in RDBMS
% {user_prefs_store, rdbms},
%% Store user preferences in Mnesia (recommended).
%% The preferences store will be called each time, as a message is routed.
%% That is why Mnesia is better suited for this job.
% {user_prefs_store, mnesia},
%% Enables a pool of asynchronous writers. (default)
%% Messages will be grouped together based on archive id.
% {async_writer, true},
%% Cache information about users (default)
% {cache_users, true},
%% Enable archivization for private messages (default)
% {pm, [
%% Top-level options can be overriden here if needed, for example:
% {async_writer, false}
% ]},
%%
%% Message Archive Management (MAM) for multi-user chats (MUC).
%% Enable XEP-0313 for "muc.@HOST@".
%%
% {muc, [
% {host, "muc.@HOST@"}
%% As with pm, top-level options can be overriden for MUC archive
% ]},
%
%% Do not use a <stanza-id/> element (by default stanzaid is used)
% no_stanzaid_element,
% ]},
%%
%% MAM configuration examples
%%
%% Only MUC, no user-defined preferences, good performance.
% {mod_mam_meta, [
% {backend, rdbms},
% {pm, false},
% {muc, [
% {host, "muc.@HOST@"}
% ]}
% ]},
%% Only archives for c2c messages, good performance.
% {mod_mam_meta, [
% {backend, rdbms},
% {pm, [
% {user_prefs_store, mnesia}
% ]}
% ]},
%% Basic configuration for c2c messages, bad performance, easy to debug.
% {mod_mam_meta, [
% {backend, rdbms},
% {async_writer, false},
% {cache_users, false}
% ]},
%% Cassandra archive for c2c and MUC conversations.
%% No custom settings supported (always archive).
% {mod_mam_meta, [
% {backend, cassandra},
% {user_prefs_store, cassandra},
% {muc, [{host, "muc.@HOST@"}]}
% ]}
% {mod_event_pusher, [
% {backends, [
% %%
% %% Configuration for Amazon SNS notifications.
% %%
% {sns, [
% %% AWS credentials, region and host configuration
% {access_key_id, "AKIAJAZYHOIPY6A2PESA"},
% {secret_access_key, "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"},
% {region, "eu-west-1"},
% {account_id, "251423380551"},
% {region, "eu-west-1"},
% {sns_host, "sns.eu-west-1.amazonaws.com"},
%
% %% Messages from this MUC host will be sent to the SNS topic
% {muc_host, "muc.@HOST@"},
%
% %% Plugin module for defining custom message attributes and user identification
% {plugin_module, mod_event_pusher_sns_defaults},
%
% %% Topic name configurations. Removing a topic will disable this specific SNS notification
% {presence_updates_topic, "user_presence_updated-dev-1"}, %% For presence updates
% {pm_messages_topic, "user_message_sent-dev-1"}, %% For private chat messages
% {muc_messages_topic, "user_messagegroup_sent-dev-1"} %% For group chat messages
%
% %% Pool options
% {pool_size, 100}, %% Worker pool size for publishing notifications
% {publish_retry_count, 2}, %% Retry count in case of publish error
% {publish_retry_time_ms, 50} %% Base exponential backoff time (in ms) for publish errors
% ]}
% ]}
]}.
%%
%% Enable modules with custom options in a specific virtual host
%%
%%{host_config, "localhost",
%% [{ {add, modules},
%% [
%% {mod_some_module, []}
%% ]
%% }
%% ]}.
%%%.
%%%'
%%% $Id$
%%% Local Variables:
%%% mode: erlang
%%% End:
%%% vim: set filetype=erlang tabstop=8 foldmarker=%%%',%%%. foldmethod=marker:
%%%.

View file

@ -69,7 +69,9 @@ server {
proxy_set_header Connection "upgrade"; proxy_set_header Connection "upgrade";
proxy_set_header Host $http_host; proxy_set_header Host $http_host;
proxy_pass http://localhost:4000; # this is explicitly IPv4 since Pleroma.Web.Endpoint binds on IPv4 only
# and `localhost.` resolves to [::0] on some systems: see issue #930
proxy_pass http://127.0.0.1:4000;
client_max_body_size 16m; client_max_body_size 16m;
} }

View file

@ -1,4 +1,4 @@
vcl 4.0; vcl 4.1;
import std; import std;
backend default { backend default {
@ -35,24 +35,6 @@ sub vcl_recv {
} }
return(purge); return(purge);
} }
# Pleroma MediaProxy - strip headers that will affect caching
if (req.url ~ "^/proxy/") {
unset req.http.Cookie;
unset req.http.Authorization;
unset req.http.Accept;
return (hash);
}
# Strip headers that will affect caching from all other static content
# This also permits caching of individual toots and AP Activities
if ((req.url ~ "^/(media|static)/") ||
(req.url ~ "(?i)\.(html|js|css|jpg|jpeg|png|gif|gz|tgz|bz2|tbz|mp3|mp4|ogg|webm|svg|swf|ttf|pdf|woff|woff2)$"))
{
unset req.http.Cookie;
unset req.http.Authorization;
return (hash);
}
} }
sub vcl_backend_response { sub vcl_backend_response {
@ -61,6 +43,12 @@ sub vcl_backend_response {
set beresp.do_gzip = true; set beresp.do_gzip = true;
} }
# Retry broken backend responses.
if (beresp.status == 503) {
set bereq.http.X-Varnish-Backend-503 = "1";
return (retry);
}
# CHUNKED SUPPORT # CHUNKED SUPPORT
if (bereq.http.x-range ~ "bytes=" && beresp.status == 206) { if (bereq.http.x-range ~ "bytes=" && beresp.status == 206) {
set beresp.ttl = 10m; set beresp.ttl = 10m;
@ -73,8 +61,6 @@ sub vcl_backend_response {
return (deliver); return (deliver);
} }
# Default object caching of 86400s;
set beresp.ttl = 86400s;
# Allow serving cached content for 6h in case backend goes down # Allow serving cached content for 6h in case backend goes down
set beresp.grace = 6h; set beresp.grace = 6h;
@ -90,20 +76,6 @@ sub vcl_backend_response {
set beresp.ttl = 30s; set beresp.ttl = 30s;
return (deliver); return (deliver);
} }
# Pleroma MediaProxy internally sets headers properly
if (bereq.url ~ "^/proxy/") {
return (deliver);
}
# Strip cache-restricting headers from Pleroma on static content that we want to cache
if (bereq.url ~ "(?i)\.(js|css|jpg|jpeg|png|gif|gz|tgz|bz2|tbz|mp3|mp4|ogg|webm|svg|swf|ttf|pdf|woff|woff2)$")
{
unset beresp.http.set-cookie;
unset beresp.http.Cache-Control;
unset beresp.http.x-request-id;
set beresp.http.Cache-Control = "public, max-age=86400";
}
} }
# The synthetic response for 301 redirects # The synthetic response for 301 redirects
@ -132,10 +104,32 @@ sub vcl_hash {
} }
sub vcl_backend_fetch { sub vcl_backend_fetch {
# Be more lenient for slow servers on the fediverse
if bereq.url ~ "^/proxy/" {
set bereq.first_byte_timeout = 300s;
}
# CHUNKED SUPPORT # CHUNKED SUPPORT
if (bereq.http.x-range) { if (bereq.http.x-range) {
set bereq.http.Range = bereq.http.x-range; set bereq.http.Range = bereq.http.x-range;
} }
if (bereq.retries == 0) {
# Clean up the X-Varnish-Backend-503 flag that is used internally
# to mark broken backend responses that should be retried.
unset bereq.http.X-Varnish-Backend-503;
} else {
if (bereq.http.X-Varnish-Backend-503) {
if (bereq.method != "POST" &&
std.healthy(bereq.backend) &&
bereq.retries <= 4) {
# Flush broken backend response flag & try again.
unset bereq.http.X-Varnish-Backend-503;
} else {
return (abandon);
}
}
}
} }
sub vcl_deliver { sub vcl_deliver {
@ -145,3 +139,9 @@ sub vcl_deliver {
unset resp.http.CR; unset resp.http.CR;
} }
} }
sub vcl_backend_error {
# Retry broken backend responses.
set bereq.http.X-Varnish-Backend-503 = "1";
return (retry);
}

View file

@ -29,13 +29,13 @@ def system_info do
end end
defp assign_db_info(healthcheck) do defp assign_db_info(healthcheck) do
database = Application.get_env(:pleroma, Repo)[:database] database = Pleroma.Config.get([Repo, :database])
query = query =
"select state, count(pid) from pg_stat_activity where datname = '#{database}' group by state;" "select state, count(pid) from pg_stat_activity where datname = '#{database}' group by state;"
result = Repo.query!(query) result = Repo.query!(query)
pool_size = Application.get_env(:pleroma, Repo)[:pool_size] pool_size = Pleroma.Config.get([Repo, :pool_size])
db_info = db_info =
Enum.reduce(result.rows, %{active: 0, idle: 0}, fn [state, cnt], states -> Enum.reduce(result.rows, %{active: 0, idle: 0}, fn [state, cnt], states ->

67
lib/mix/pleroma.ex Normal file
View file

@ -0,0 +1,67 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2018 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Mix.Pleroma do
@doc "Common functions to be reused in mix tasks"
def start_pleroma do
Application.put_env(:phoenix, :serve_endpoints, false, persistent: true)
{:ok, _} = Application.ensure_all_started(:pleroma)
end
def load_pleroma do
Application.load(:pleroma)
end
def get_option(options, opt, prompt, defval \\ nil, defname \\ nil) do
Keyword.get(options, opt) || shell_prompt(prompt, defval, defname)
end
def shell_prompt(prompt, defval \\ nil, defname \\ nil) do
prompt_message = "#{prompt} [#{defname || defval}] "
input =
if mix_shell?(),
do: Mix.shell().prompt(prompt_message),
else: :io.get_line(prompt_message)
case input do
"\n" ->
case defval do
nil ->
shell_prompt(prompt, defval, defname)
defval ->
defval
end
input ->
String.trim(input)
end
end
def shell_yes?(message) do
if mix_shell?(),
do: Mix.shell().yes?("Continue?"),
else: shell_prompt(message, "Continue?") in ~w(Yn Y y)
end
def shell_info(message) do
if mix_shell?(),
do: Mix.shell().info(message),
else: IO.puts(message)
end
def shell_error(message) do
if mix_shell?(),
do: Mix.shell().error(message),
else: IO.puts(:stderr, message)
end
@doc "Performs a safe check whether `Mix.shell/0` is available (does not raise if Mix is not loaded)"
def mix_shell?, do: :erlang.function_exported(Mix, :shell, 0)
def escape_sh_path(path) do
~S(') <> String.replace(path, ~S('), ~S(\')) <> ~S(')
end
end

View file

@ -1,19 +1,19 @@
defmodule Mix.Tasks.Pleroma.Benchmark do defmodule Mix.Tasks.Pleroma.Benchmark do
import Mix.Pleroma
use Mix.Task use Mix.Task
alias Mix.Tasks.Pleroma.Common
def run(["search"]) do def run(["search"]) do
Common.start_pleroma() start_pleroma()
Benchee.run(%{ Benchee.run(%{
"search" => fn -> "search" => fn ->
Pleroma.Web.MastodonAPI.MastodonAPIController.status_search(nil, "cofe") Pleroma.Activity.search(nil, "cofe")
end end
}) })
end end
def run(["tag"]) do def run(["tag"]) do
Common.start_pleroma() start_pleroma()
Benchee.run(%{ Benchee.run(%{
"tag" => fn -> "tag" => fn ->

View file

@ -1,28 +0,0 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2018 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Mix.Tasks.Pleroma.Common do
@doc "Common functions to be reused in mix tasks"
def start_pleroma do
Mix.Task.run("app.start")
end
def get_option(options, opt, prompt, defval \\ nil, defname \\ nil) do
Keyword.get(options, opt) ||
case Mix.shell().prompt("#{prompt} [#{defname || defval}]") do
"\n" ->
case defval do
nil -> get_option(options, opt, prompt, defval)
defval -> defval
end
opt ->
opt |> String.trim()
end
end
def escape_sh_path(path) do
~S(') <> String.replace(path, ~S('), ~S(\')) <> ~S(')
end
end

View file

@ -0,0 +1,69 @@
defmodule Mix.Tasks.Pleroma.Config do
use Mix.Task
import Mix.Pleroma
alias Pleroma.Repo
alias Pleroma.Web.AdminAPI.Config
@shortdoc "Manages the location of the config"
@moduledoc """
Manages the location of the config.
## Transfers config from file to DB.
mix pleroma.config migrate_to_db
## Transfers config from DB to file.
mix pleroma.config migrate_from_db ENV
"""
def run(["migrate_to_db"]) do
start_pleroma()
if Pleroma.Config.get([:instance, :dynamic_configuration]) do
Application.get_all_env(:pleroma)
|> Enum.reject(fn {k, _v} -> k in [Pleroma.Repo, :env] end)
|> Enum.each(fn {k, v} ->
key = to_string(k) |> String.replace("Elixir.", "")
{:ok, _} = Config.update_or_create(%{key: key, value: v})
Mix.shell().info("#{key} is migrated.")
end)
Mix.shell().info("Settings migrated.")
else
Mix.shell().info(
"Migration is not allowed by config. You can change this behavior in instance settings."
)
end
end
def run(["migrate_from_db", env]) do
start_pleroma()
if Pleroma.Config.get([:instance, :dynamic_configuration]) do
config_path = "config/#{env}.exported_from_db.secret.exs"
{:ok, file} = File.open(config_path, [:write])
IO.write(file, "use Mix.Config\r\n")
Repo.all(Config)
|> Enum.each(fn config ->
mark = if String.starts_with?(config.key, "Pleroma."), do: ",", else: ":"
IO.write(
file,
"config :pleroma, #{config.key}#{mark} #{inspect(Config.from_binary(config.value))}\r\n"
)
{:ok, _} = Repo.delete(config)
Mix.shell().info("#{config.key} deleted from DB.")
end)
File.close(file)
System.cmd("mix", ["format", config_path])
else
Mix.shell().info(
"Migration is not allowed by config. You can change this behavior in instance settings."
)
end
end
end

View file

@ -3,12 +3,12 @@
# SPDX-License-Identifier: AGPL-3.0-only # SPDX-License-Identifier: AGPL-3.0-only
defmodule Mix.Tasks.Pleroma.Database do defmodule Mix.Tasks.Pleroma.Database do
alias Mix.Tasks.Pleroma.Common
alias Pleroma.Conversation alias Pleroma.Conversation
alias Pleroma.Object alias Pleroma.Object
alias Pleroma.Repo alias Pleroma.Repo
alias Pleroma.User alias Pleroma.User
require Logger require Logger
import Mix.Pleroma
use Mix.Task use Mix.Task
@shortdoc "A collection of database related tasks" @shortdoc "A collection of database related tasks"
@ -45,7 +45,7 @@ def run(["remove_embedded_objects" | args]) do
] ]
) )
Common.start_pleroma() start_pleroma()
Logger.info("Removing embedded objects") Logger.info("Removing embedded objects")
Repo.query!( Repo.query!(
@ -66,12 +66,12 @@ def run(["remove_embedded_objects" | args]) do
end end
def run(["bump_all_conversations"]) do def run(["bump_all_conversations"]) do
Common.start_pleroma() start_pleroma()
Conversation.bump_for_all_activities() Conversation.bump_for_all_activities()
end end
def run(["update_users_following_followers_counts"]) do def run(["update_users_following_followers_counts"]) do
Common.start_pleroma() start_pleroma()
users = Repo.all(User) users = Repo.all(User)
Enum.each(users, &User.remove_duplicated_following/1) Enum.each(users, &User.remove_duplicated_following/1)
@ -89,7 +89,7 @@ def run(["prune_objects" | args]) do
] ]
) )
Common.start_pleroma() start_pleroma()
deadline = Pleroma.Config.get([:instance, :remote_post_retention_days]) deadline = Pleroma.Config.get([:instance, :remote_post_retention_days])

View file

@ -0,0 +1,49 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2018 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-onl
defmodule Mix.Tasks.Pleroma.Ecto do
@doc """
Ensures the given repository's migrations path exists on the file system.
"""
@spec ensure_migrations_path(Ecto.Repo.t(), Keyword.t()) :: String.t()
def ensure_migrations_path(repo, opts) do
path = opts[:migrations_path] || Path.join(source_repo_priv(repo), "migrations")
path =
case Path.type(path) do
:relative ->
Path.join(Application.app_dir(:pleroma), path)
:absolute ->
path
end
if not File.dir?(path) do
raise_missing_migrations(Path.relative_to_cwd(path), repo)
end
path
end
@doc """
Returns the private repository path relative to the source.
"""
def source_repo_priv(repo) do
config = repo.config()
priv = config[:priv] || "priv/#{repo |> Module.split() |> List.last() |> Macro.underscore()}"
Path.join(Application.app_dir(:pleroma), priv)
end
defp raise_missing_migrations(path, repo) do
raise("""
Could not find migrations directory #{inspect(path)}
for repo #{inspect(repo)}.
This may be because you are in a new project and the
migration directory has not been created yet. Creating an
empty directory at the path above will fix this error.
If you expected existing migrations to be found, please
make sure your repository has been properly configured
and the configured path exists.
""")
end
end

View file

@ -0,0 +1,63 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2018 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-onl
defmodule Mix.Tasks.Pleroma.Ecto.Migrate do
use Mix.Task
import Mix.Pleroma
require Logger
@shortdoc "Wrapper on `ecto.migrate` task."
@aliases [
n: :step,
v: :to
]
@switches [
all: :boolean,
step: :integer,
to: :integer,
quiet: :boolean,
log_sql: :boolean,
strict_version_order: :boolean,
migrations_path: :string
]
@moduledoc """
Changes `Logger` level to `:info` before start migration.
Changes level back when migration ends.
## Start migration
mix pleroma.ecto.migrate [OPTIONS]
Options:
- see https://hexdocs.pm/ecto/2.0.0/Mix.Tasks.Ecto.Migrate.html
"""
@impl true
def run(args \\ []) do
load_pleroma()
{opts, _} = OptionParser.parse!(args, strict: @switches, aliases: @aliases)
opts =
if opts[:to] || opts[:step] || opts[:all],
do: opts,
else: Keyword.put(opts, :all, true)
opts =
if opts[:quiet],
do: Keyword.merge(opts, log: false, log_sql: false),
else: opts
path = Mix.Tasks.Pleroma.Ecto.ensure_migrations_path(Pleroma.Repo, opts)
level = Logger.level()
Logger.configure(level: :info)
{:ok, _, _} = Ecto.Migrator.with_repo(Pleroma.Repo, &Ecto.Migrator.run(&1, path, :up, opts))
Logger.configure(level: level)
end
end

View file

@ -0,0 +1,67 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2018 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-onl
defmodule Mix.Tasks.Pleroma.Ecto.Rollback do
use Mix.Task
import Mix.Pleroma
require Logger
@shortdoc "Wrapper on `ecto.rollback` task"
@aliases [
n: :step,
v: :to
]
@switches [
all: :boolean,
step: :integer,
to: :integer,
start: :boolean,
quiet: :boolean,
log_sql: :boolean,
migrations_path: :string
]
@moduledoc """
Changes `Logger` level to `:info` before start rollback.
Changes level back when rollback ends.
## Start rollback
mix pleroma.ecto.rollback
Options:
- see https://hexdocs.pm/ecto/2.0.0/Mix.Tasks.Ecto.Rollback.html
"""
@impl true
def run(args \\ []) do
load_pleroma()
{opts, _} = OptionParser.parse!(args, strict: @switches, aliases: @aliases)
opts =
if opts[:to] || opts[:step] || opts[:all],
do: opts,
else: Keyword.put(opts, :step, 1)
opts =
if opts[:quiet],
do: Keyword.merge(opts, log: false, log_sql: false),
else: opts
path = Mix.Tasks.Pleroma.Ecto.ensure_migrations_path(Pleroma.Repo, opts)
level = Logger.level()
Logger.configure(level: :info)
if Pleroma.Config.get(:env) == :test do
Logger.info("Rollback succesfully")
else
{:ok, _, _} =
Ecto.Migrator.with_repo(Pleroma.Repo, &Ecto.Migrator.run(&1, path, :down, opts))
end
Logger.configure(level: level)
end
end

View file

@ -55,15 +55,13 @@ defmodule Mix.Tasks.Pleroma.Emoji do
are extracted). are extracted).
""" """
@default_manifest Pleroma.Config.get!([:emoji, :default_manifest])
def run(["ls-packs" | args]) do def run(["ls-packs" | args]) do
Application.ensure_all_started(:hackney) Application.ensure_all_started(:hackney)
{options, [], []} = parse_global_opts(args) {options, [], []} = parse_global_opts(args)
manifest = manifest =
fetch_manifest(if options[:manifest], do: options[:manifest], else: @default_manifest) fetch_manifest(if options[:manifest], do: options[:manifest], else: default_manifest())
Enum.each(manifest, fn {name, info} -> Enum.each(manifest, fn {name, info} ->
to_print = [ to_print = [
@ -88,7 +86,7 @@ def run(["get-packs" | args]) do
{options, pack_names, []} = parse_global_opts(args) {options, pack_names, []} = parse_global_opts(args)
manifest_url = if options[:manifest], do: options[:manifest], else: @default_manifest manifest_url = if options[:manifest], do: options[:manifest], else: default_manifest()
manifest = fetch_manifest(manifest_url) manifest = fetch_manifest(manifest_url)
@ -298,4 +296,6 @@ defp client do
Tesla.client(middleware) Tesla.client(middleware)
end end
defp default_manifest, do: Pleroma.Config.get!([:emoji, :default_manifest])
end end

View file

@ -4,7 +4,7 @@
defmodule Mix.Tasks.Pleroma.Instance do defmodule Mix.Tasks.Pleroma.Instance do
use Mix.Task use Mix.Task
alias Mix.Tasks.Pleroma.Common import Mix.Pleroma
@shortdoc "Manages Pleroma instance" @shortdoc "Manages Pleroma instance"
@moduledoc """ @moduledoc """
@ -29,7 +29,11 @@ defmodule Mix.Tasks.Pleroma.Instance do
- `--dbname DBNAME` - the name of the database to use - `--dbname DBNAME` - the name of the database to use
- `--dbuser DBUSER` - the user (aka role) to use for the database connection - `--dbuser DBUSER` - the user (aka role) to use for the database connection
- `--dbpass DBPASS` - the password to use for the database connection - `--dbpass DBPASS` - the password to use for the database connection
- `--rum Y/N` - Whether to enable RUM indexes
- `--indexable Y/N` - Allow/disallow indexing site by search engines - `--indexable Y/N` - Allow/disallow indexing site by search engines
- `--db-configurable Y/N` - Allow/disallow configuring instance from admin part
- `--uploads-dir` - the directory uploads go in when using a local uploader
- `--static-dir` - the directory custom public files should be read from (custom emojis, frontend bundle overrides, robots.txt, etc.)
""" """
def run(["gen" | rest]) do def run(["gen" | rest]) do
@ -48,7 +52,11 @@ def run(["gen" | rest]) do
dbname: :string, dbname: :string,
dbuser: :string, dbuser: :string,
dbpass: :string, dbpass: :string,
indexable: :string rum: :string,
indexable: :string,
db_configurable: :string,
uploads_dir: :string,
static_dir: :string
], ],
aliases: [ aliases: [
o: :output, o: :output,
@ -68,7 +76,7 @@ def run(["gen" | rest]) do
if proceed? do if proceed? do
[domain, port | _] = [domain, port | _] =
String.split( String.split(
Common.get_option( get_option(
options, options,
:domain, :domain,
"What domain will your instance use? (e.g pleroma.soykaf.com)" "What domain will your instance use? (e.g pleroma.soykaf.com)"
@ -77,16 +85,16 @@ def run(["gen" | rest]) do
) ++ [443] ) ++ [443]
name = name =
Common.get_option( get_option(
options, options,
:instance_name, :instance_name,
"What is the name of your instance? (e.g. Pleroma/Soykaf)" "What is the name of your instance? (e.g. Pleroma/Soykaf)"
) )
email = Common.get_option(options, :admin_email, "What is your admin email address?") email = get_option(options, :admin_email, "What is your admin email address?")
notify_email = notify_email =
Common.get_option( get_option(
options, options,
:notify_email, :notify_email,
"What email address do you want to use for sending email notifications?", "What email address do you want to use for sending email notifications?",
@ -94,21 +102,27 @@ def run(["gen" | rest]) do
) )
indexable = indexable =
Common.get_option( get_option(
options, options,
:indexable, :indexable,
"Do you want search engines to index your site? (y/n)", "Do you want search engines to index your site? (y/n)",
"y" "y"
) === "y" ) === "y"
dbhost = db_configurable? =
Common.get_option(options, :dbhost, "What is the hostname of your database?", "localhost") get_option(
options,
:db_configurable,
"Do you want to store the configuration in the database (allows controlling it from admin-fe)? (y/n)",
"y"
) === "y"
dbname = dbhost = get_option(options, :dbhost, "What is the hostname of your database?", "localhost")
Common.get_option(options, :dbname, "What is the name of your database?", "pleroma_dev")
dbname = get_option(options, :dbname, "What is the name of your database?", "pleroma_dev")
dbuser = dbuser =
Common.get_option( get_option(
options, options,
:dbuser, :dbuser,
"What is the user used to connect to your database?", "What is the user used to connect to your database?",
@ -116,7 +130,7 @@ def run(["gen" | rest]) do
) )
dbpass = dbpass =
Common.get_option( get_option(
options, options,
:dbpass, :dbpass,
"What is the password used to connect to your database?", "What is the password used to connect to your database?",
@ -124,13 +138,38 @@ def run(["gen" | rest]) do
"autogenerated" "autogenerated"
) )
rum_enabled =
get_option(
options,
:rum,
"Would you like to use RUM indices?",
"n"
) === "y"
uploads_dir =
get_option(
options,
:upload_dir,
"What directory should media uploads go in (when using the local uploader)?",
Pleroma.Config.get([Pleroma.Uploaders.Local, :uploads])
)
static_dir =
get_option(
options,
:static_dir,
"What directory should custom public files be read from (custom emojis, frontend bundle overrides, robots.txt, etc.)?",
Pleroma.Config.get([:instance, :static_dir])
)
secret = :crypto.strong_rand_bytes(64) |> Base.encode64() |> binary_part(0, 64) secret = :crypto.strong_rand_bytes(64) |> Base.encode64() |> binary_part(0, 64)
signing_salt = :crypto.strong_rand_bytes(8) |> Base.encode64() |> binary_part(0, 8) signing_salt = :crypto.strong_rand_bytes(8) |> Base.encode64() |> binary_part(0, 8)
{web_push_public_key, web_push_private_key} = :crypto.generate_key(:ecdh, :prime256v1) {web_push_public_key, web_push_private_key} = :crypto.generate_key(:ecdh, :prime256v1)
template_dir = Application.app_dir(:pleroma, "priv") <> "/templates"
result_config = result_config =
EEx.eval_file( EEx.eval_file(
"sample_config.eex" |> Path.expand(__DIR__), template_dir <> "/sample_config.eex",
domain: domain, domain: domain,
port: port, port: port,
email: email, email: email,
@ -140,46 +179,50 @@ def run(["gen" | rest]) do
dbname: dbname, dbname: dbname,
dbuser: dbuser, dbuser: dbuser,
dbpass: dbpass, dbpass: dbpass,
version: Pleroma.Mixfile.project() |> Keyword.get(:version),
secret: secret, secret: secret,
signing_salt: signing_salt, signing_salt: signing_salt,
web_push_public_key: Base.url_encode64(web_push_public_key, padding: false), web_push_public_key: Base.url_encode64(web_push_public_key, padding: false),
web_push_private_key: Base.url_encode64(web_push_private_key, padding: false) web_push_private_key: Base.url_encode64(web_push_private_key, padding: false),
db_configurable?: db_configurable?,
static_dir: static_dir,
uploads_dir: uploads_dir,
rum_enabled: rum_enabled
) )
result_psql = result_psql =
EEx.eval_file( EEx.eval_file(
"sample_psql.eex" |> Path.expand(__DIR__), template_dir <> "/sample_psql.eex",
dbname: dbname, dbname: dbname,
dbuser: dbuser, dbuser: dbuser,
dbpass: dbpass dbpass: dbpass,
rum_enabled: rum_enabled
) )
Mix.shell().info( shell_info(
"Writing config to #{config_path}. You should rename it to config/prod.secret.exs or config/dev.secret.exs." "Writing config to #{config_path}. You should rename it to config/prod.secret.exs or config/dev.secret.exs."
) )
File.write(config_path, result_config) File.write(config_path, result_config)
Mix.shell().info("Writing #{psql_path}.") shell_info("Writing #{psql_path}.")
File.write(psql_path, result_psql) File.write(psql_path, result_psql)
write_robots_txt(indexable) write_robots_txt(indexable, template_dir)
Mix.shell().info( shell_info(
"\n" <> "\n" <>
""" """
To get started: To get started:
1. Verify the contents of the generated files. 1. Verify the contents of the generated files.
2. Run `sudo -u postgres psql -f #{Common.escape_sh_path(psql_path)}`. 2. Run `sudo -u postgres psql -f #{escape_sh_path(psql_path)}`.
""" <> """ <>
if config_path in ["config/dev.secret.exs", "config/prod.secret.exs"] do if config_path in ["config/dev.secret.exs", "config/prod.secret.exs"] do
"" ""
else else
"3. Run `mv #{Common.escape_sh_path(config_path)} 'config/prod.secret.exs'`." "3. Run `mv #{escape_sh_path(config_path)} 'config/prod.secret.exs'`."
end end
) )
else else
Mix.shell().error( shell_error(
"The task would have overwritten the following files:\n" <> "The task would have overwritten the following files:\n" <>
(Enum.map(paths, &"- #{&1}\n") |> Enum.join("")) <> (Enum.map(paths, &"- #{&1}\n") |> Enum.join("")) <>
"Rerun with `--force` to overwrite them." "Rerun with `--force` to overwrite them."
@ -187,10 +230,10 @@ def run(["gen" | rest]) do
end end
end end
defp write_robots_txt(indexable) do defp write_robots_txt(indexable, template_dir) do
robots_txt = robots_txt =
EEx.eval_file( EEx.eval_file(
Path.expand("robots_txt.eex", __DIR__), template_dir <> "/robots_txt.eex",
indexable: indexable indexable: indexable
) )
@ -204,10 +247,10 @@ defp write_robots_txt(indexable) do
if File.exists?(robots_txt_path) do if File.exists?(robots_txt_path) do
File.cp!(robots_txt_path, "#{robots_txt_path}.bak") File.cp!(robots_txt_path, "#{robots_txt_path}.bak")
Mix.shell().info("Backing up existing robots.txt to #{robots_txt_path}.bak") shell_info("Backing up existing robots.txt to #{robots_txt_path}.bak")
end end
File.write(robots_txt_path, robots_txt) File.write(robots_txt_path, robots_txt)
Mix.shell().info("Writing #{robots_txt_path}.") shell_info("Writing #{robots_txt_path}.")
end end
end end

View file

@ -4,7 +4,7 @@
defmodule Mix.Tasks.Pleroma.Relay do defmodule Mix.Tasks.Pleroma.Relay do
use Mix.Task use Mix.Task
alias Mix.Tasks.Pleroma.Common import Mix.Pleroma
alias Pleroma.Web.ActivityPub.Relay alias Pleroma.Web.ActivityPub.Relay
@shortdoc "Manages remote relays" @shortdoc "Manages remote relays"
@ -24,24 +24,24 @@ defmodule Mix.Tasks.Pleroma.Relay do
Example: ``mix pleroma.relay unfollow https://example.org/relay`` Example: ``mix pleroma.relay unfollow https://example.org/relay``
""" """
def run(["follow", target]) do def run(["follow", target]) do
Common.start_pleroma() start_pleroma()
with {:ok, _activity} <- Relay.follow(target) do with {:ok, _activity} <- Relay.follow(target) do
# put this task to sleep to allow the genserver to push out the messages # put this task to sleep to allow the genserver to push out the messages
:timer.sleep(500) :timer.sleep(500)
else else
{:error, e} -> Mix.shell().error("Error while following #{target}: #{inspect(e)}") {:error, e} -> shell_error("Error while following #{target}: #{inspect(e)}")
end end
end end
def run(["unfollow", target]) do def run(["unfollow", target]) do
Common.start_pleroma() start_pleroma()
with {:ok, _activity} <- Relay.unfollow(target) do with {:ok, _activity} <- Relay.unfollow(target) do
# put this task to sleep to allow the genserver to push out the messages # put this task to sleep to allow the genserver to push out the messages
:timer.sleep(500) :timer.sleep(500)
else else
{:error, e} -> Mix.shell().error("Error while following #{target}: #{inspect(e)}") {:error, e} -> shell_error("Error while following #{target}: #{inspect(e)}")
end end
end end
end end

View file

@ -4,7 +4,7 @@
defmodule Mix.Tasks.Pleroma.Uploads do defmodule Mix.Tasks.Pleroma.Uploads do
use Mix.Task use Mix.Task
alias Mix.Tasks.Pleroma.Common import Mix.Pleroma
alias Pleroma.Upload alias Pleroma.Upload
alias Pleroma.Uploaders.Local alias Pleroma.Uploaders.Local
require Logger require Logger
@ -24,7 +24,7 @@ defmodule Mix.Tasks.Pleroma.Uploads do
""" """
def run(["migrate_local", target_uploader | args]) do def run(["migrate_local", target_uploader | args]) do
delete? = Enum.member?(args, "--delete") delete? = Enum.member?(args, "--delete")
Common.start_pleroma() start_pleroma()
local_path = Pleroma.Config.get!([Local, :uploads]) local_path = Pleroma.Config.get!([Local, :uploads])
uploader = Module.concat(Pleroma.Uploaders, target_uploader) uploader = Module.concat(Pleroma.Uploaders, target_uploader)
@ -38,10 +38,10 @@ def run(["migrate_local", target_uploader | args]) do
Pleroma.Config.put([Upload, :uploader], uploader) Pleroma.Config.put([Upload, :uploader], uploader)
end end
Mix.shell().info("Migrating files from local #{local_path} to #{to_string(uploader)}") shell_info("Migrating files from local #{local_path} to #{to_string(uploader)}")
if delete? do if delete? do
Mix.shell().info( shell_info(
"Attention: uploaded files will be deleted, hope you have backups! (--delete ; cancel with ^C)" "Attention: uploaded files will be deleted, hope you have backups! (--delete ; cancel with ^C)"
) )
@ -78,7 +78,7 @@ def run(["migrate_local", target_uploader | args]) do
|> Enum.filter(& &1) |> Enum.filter(& &1)
total_count = length(uploads) total_count = length(uploads)
Mix.shell().info("Found #{total_count} uploads") shell_info("Found #{total_count} uploads")
uploads uploads
|> Task.async_stream( |> Task.async_stream(
@ -90,7 +90,7 @@ def run(["migrate_local", target_uploader | args]) do
:ok :ok
error -> error ->
Mix.shell().error("failed to upload #{inspect(upload.path)}: #{inspect(error)}") shell_error("failed to upload #{inspect(upload.path)}: #{inspect(error)}")
end end
end, end,
timeout: 150_000 timeout: 150_000
@ -99,10 +99,10 @@ def run(["migrate_local", target_uploader | args]) do
# credo:disable-for-next-line Credo.Check.Warning.UnusedEnumOperation # credo:disable-for-next-line Credo.Check.Warning.UnusedEnumOperation
|> Enum.reduce(0, fn done, count -> |> Enum.reduce(0, fn done, count ->
count = count + length(done) count = count + length(done)
Mix.shell().info("Uploaded #{count}/#{total_count} files") shell_info("Uploaded #{count}/#{total_count} files")
count count
end) end)
Mix.shell().info("Done!") shell_info("Done!")
end end
end end

View file

@ -5,9 +5,10 @@
defmodule Mix.Tasks.Pleroma.User do defmodule Mix.Tasks.Pleroma.User do
use Mix.Task use Mix.Task
import Ecto.Changeset import Ecto.Changeset
alias Mix.Tasks.Pleroma.Common import Mix.Pleroma
alias Pleroma.User alias Pleroma.User
alias Pleroma.UserInviteToken alias Pleroma.UserInviteToken
alias Pleroma.Web.OAuth
@shortdoc "Manages Pleroma users" @shortdoc "Manages Pleroma users"
@moduledoc """ @moduledoc """
@ -49,6 +50,10 @@ defmodule Mix.Tasks.Pleroma.User do
mix pleroma.user delete_activities NICKNAME mix pleroma.user delete_activities NICKNAME
## Sign user out from all applications (delete user's OAuth tokens and authorizations).
mix pleroma.user sign_out NICKNAME
## Deactivate or activate the user's account. ## Deactivate or activate the user's account.
mix pleroma.user toggle_activated NICKNAME mix pleroma.user toggle_activated NICKNAME
@ -115,7 +120,7 @@ def run(["new", nickname, email | rest]) do
admin? = Keyword.get(options, :admin, false) admin? = Keyword.get(options, :admin, false)
assume_yes? = Keyword.get(options, :assume_yes, false) assume_yes? = Keyword.get(options, :assume_yes, false)
Mix.shell().info(""" shell_info("""
A user will be created with the following information: A user will be created with the following information:
- nickname: #{nickname} - nickname: #{nickname}
- email: #{email} - email: #{email}
@ -128,10 +133,10 @@ def run(["new", nickname, email | rest]) do
- admin: #{if(admin?, do: "true", else: "false")} - admin: #{if(admin?, do: "true", else: "false")}
""") """)
proceed? = assume_yes? or Mix.shell().yes?("Continue?") proceed? = assume_yes? or shell_yes?("Continue?")
if proceed? do if proceed? do
Common.start_pleroma() start_pleroma()
params = %{ params = %{
nickname: nickname, nickname: nickname,
@ -145,7 +150,7 @@ def run(["new", nickname, email | rest]) do
changeset = User.register_changeset(%User{}, params, need_confirmation: false) changeset = User.register_changeset(%User{}, params, need_confirmation: false)
{:ok, _user} = User.register(changeset) {:ok, _user} = User.register(changeset)
Mix.shell().info("User #{nickname} created") shell_info("User #{nickname} created")
if moderator? do if moderator? do
run(["set", nickname, "--moderator"]) run(["set", nickname, "--moderator"])
@ -159,43 +164,43 @@ def run(["new", nickname, email | rest]) do
run(["reset_password", nickname]) run(["reset_password", nickname])
end end
else else
Mix.shell().info("User will not be created.") shell_info("User will not be created.")
end end
end end
def run(["rm", nickname]) do def run(["rm", nickname]) do
Common.start_pleroma() start_pleroma()
with %User{local: true} = user <- User.get_cached_by_nickname(nickname) do with %User{local: true} = user <- User.get_cached_by_nickname(nickname) do
User.perform(:delete, user) User.perform(:delete, user)
Mix.shell().info("User #{nickname} deleted.") shell_info("User #{nickname} deleted.")
else else
_ -> _ ->
Mix.shell().error("No local user #{nickname}") shell_error("No local user #{nickname}")
end end
end end
def run(["toggle_activated", nickname]) do def run(["toggle_activated", nickname]) do
Common.start_pleroma() start_pleroma()
with %User{} = user <- User.get_cached_by_nickname(nickname) do with %User{} = user <- User.get_cached_by_nickname(nickname) do
{:ok, user} = User.deactivate(user, !user.info.deactivated) {:ok, user} = User.deactivate(user, !user.info.deactivated)
Mix.shell().info( shell_info(
"Activation status of #{nickname}: #{if(user.info.deactivated, do: "de", else: "")}activated" "Activation status of #{nickname}: #{if(user.info.deactivated, do: "de", else: "")}activated"
) )
else else
_ -> _ ->
Mix.shell().error("No user #{nickname}") shell_error("No user #{nickname}")
end end
end end
def run(["reset_password", nickname]) do def run(["reset_password", nickname]) do
Common.start_pleroma() start_pleroma()
with %User{local: true} = user <- User.get_cached_by_nickname(nickname), with %User{local: true} = user <- User.get_cached_by_nickname(nickname),
{:ok, token} <- Pleroma.PasswordResetToken.create_token(user) do {:ok, token} <- Pleroma.PasswordResetToken.create_token(user) do
Mix.shell().info("Generated password reset token for #{user.nickname}") shell_info("Generated password reset token for #{user.nickname}")
IO.puts( IO.puts(
"URL: #{ "URL: #{
@ -208,15 +213,15 @@ def run(["reset_password", nickname]) do
) )
else else
_ -> _ ->
Mix.shell().error("No local user #{nickname}") shell_error("No local user #{nickname}")
end end
end end
def run(["unsubscribe", nickname]) do def run(["unsubscribe", nickname]) do
Common.start_pleroma() start_pleroma()
with %User{} = user <- User.get_cached_by_nickname(nickname) do with %User{} = user <- User.get_cached_by_nickname(nickname) do
Mix.shell().info("Deactivating #{user.nickname}") shell_info("Deactivating #{user.nickname}")
User.deactivate(user) User.deactivate(user)
{:ok, friends} = User.get_friends(user) {:ok, friends} = User.get_friends(user)
@ -224,7 +229,7 @@ def run(["unsubscribe", nickname]) do
Enum.each(friends, fn friend -> Enum.each(friends, fn friend ->
user = User.get_cached_by_id(user.id) user = User.get_cached_by_id(user.id)
Mix.shell().info("Unsubscribing #{friend.nickname} from #{user.nickname}") shell_info("Unsubscribing #{friend.nickname} from #{user.nickname}")
User.unfollow(user, friend) User.unfollow(user, friend)
end) end)
@ -233,16 +238,16 @@ def run(["unsubscribe", nickname]) do
user = User.get_cached_by_id(user.id) user = User.get_cached_by_id(user.id)
if Enum.empty?(user.following) do if Enum.empty?(user.following) do
Mix.shell().info("Successfully unsubscribed all followers from #{user.nickname}") shell_info("Successfully unsubscribed all followers from #{user.nickname}")
end end
else else
_ -> _ ->
Mix.shell().error("No user #{nickname}") shell_error("No user #{nickname}")
end end
end end
def run(["set", nickname | rest]) do def run(["set", nickname | rest]) do
Common.start_pleroma() start_pleroma()
{options, [], []} = {options, [], []} =
OptionParser.parse( OptionParser.parse(
@ -274,33 +279,33 @@ def run(["set", nickname | rest]) do
end end
else else
_ -> _ ->
Mix.shell().error("No local user #{nickname}") shell_error("No local user #{nickname}")
end end
end end
def run(["tag", nickname | tags]) do def run(["tag", nickname | tags]) do
Common.start_pleroma() start_pleroma()
with %User{} = user <- User.get_cached_by_nickname(nickname) do with %User{} = user <- User.get_cached_by_nickname(nickname) do
user = user |> User.tag(tags) user = user |> User.tag(tags)
Mix.shell().info("Tags of #{user.nickname}: #{inspect(tags)}") shell_info("Tags of #{user.nickname}: #{inspect(tags)}")
else else
_ -> _ ->
Mix.shell().error("Could not change user tags for #{nickname}") shell_error("Could not change user tags for #{nickname}")
end end
end end
def run(["untag", nickname | tags]) do def run(["untag", nickname | tags]) do
Common.start_pleroma() start_pleroma()
with %User{} = user <- User.get_cached_by_nickname(nickname) do with %User{} = user <- User.get_cached_by_nickname(nickname) do
user = user |> User.untag(tags) user = user |> User.untag(tags)
Mix.shell().info("Tags of #{user.nickname}: #{inspect(tags)}") shell_info("Tags of #{user.nickname}: #{inspect(tags)}")
else else
_ -> _ ->
Mix.shell().error("Could not change user tags for #{nickname}") shell_error("Could not change user tags for #{nickname}")
end end
end end
@ -321,14 +326,12 @@ def run(["invite" | rest]) do
end) end)
|> Enum.into(%{}) |> Enum.into(%{})
Common.start_pleroma() start_pleroma()
with {:ok, val} <- options[:expires_at], with {:ok, val} <- options[:expires_at],
options = Map.put(options, :expires_at, val), options = Map.put(options, :expires_at, val),
{:ok, invite} <- UserInviteToken.create_invite(options) do {:ok, invite} <- UserInviteToken.create_invite(options) do
Mix.shell().info( shell_info("Generated user invite token " <> String.replace(invite.invite_type, "_", " "))
"Generated user invite token " <> String.replace(invite.invite_type, "_", " ")
)
url = url =
Pleroma.Web.Router.Helpers.redirect_url( Pleroma.Web.Router.Helpers.redirect_url(
@ -340,14 +343,14 @@ def run(["invite" | rest]) do
IO.puts(url) IO.puts(url)
else else
error -> error ->
Mix.shell().error("Could not create invite token: #{inspect(error)}") shell_error("Could not create invite token: #{inspect(error)}")
end end
end end
def run(["invites"]) do def run(["invites"]) do
Common.start_pleroma() start_pleroma()
Mix.shell().info("Invites list:") shell_info("Invites list:")
UserInviteToken.list_invites() UserInviteToken.list_invites()
|> Enum.each(fn invite -> |> Enum.each(fn invite ->
@ -361,7 +364,7 @@ def run(["invites"]) do
" | Max use: #{max_use} Left use: #{max_use - invite.uses}" " | Max use: #{max_use} Left use: #{max_use - invite.uses}"
end end
Mix.shell().info( shell_info(
"ID: #{invite.id} | Token: #{invite.token} | Token type: #{invite.invite_type} | Used: #{ "ID: #{invite.id} | Token: #{invite.token} | Token type: #{invite.invite_type} | Used: #{
invite.used invite.used
}#{expire_info}#{using_info}" }#{expire_info}#{using_info}"
@ -370,40 +373,54 @@ def run(["invites"]) do
end end
def run(["revoke_invite", token]) do def run(["revoke_invite", token]) do
Common.start_pleroma() start_pleroma()
with {:ok, invite} <- UserInviteToken.find_by_token(token), with {:ok, invite} <- UserInviteToken.find_by_token(token),
{:ok, _} <- UserInviteToken.update_invite(invite, %{used: true}) do {:ok, _} <- UserInviteToken.update_invite(invite, %{used: true}) do
Mix.shell().info("Invite for token #{token} was revoked.") shell_info("Invite for token #{token} was revoked.")
else else
_ -> Mix.shell().error("No invite found with token #{token}") _ -> shell_error("No invite found with token #{token}")
end end
end end
def run(["delete_activities", nickname]) do def run(["delete_activities", nickname]) do
Common.start_pleroma() start_pleroma()
with %User{local: true} = user <- User.get_cached_by_nickname(nickname) do with %User{local: true} = user <- User.get_cached_by_nickname(nickname) do
{:ok, _} = User.delete_user_activities(user) {:ok, _} = User.delete_user_activities(user)
Mix.shell().info("User #{nickname} statuses deleted.") shell_info("User #{nickname} statuses deleted.")
else else
_ -> _ ->
Mix.shell().error("No local user #{nickname}") shell_error("No local user #{nickname}")
end end
end end
def run(["toggle_confirmed", nickname]) do def run(["toggle_confirmed", nickname]) do
Common.start_pleroma() start_pleroma()
with %User{} = user <- User.get_cached_by_nickname(nickname) do with %User{} = user <- User.get_cached_by_nickname(nickname) do
{:ok, user} = User.toggle_confirmation(user) {:ok, user} = User.toggle_confirmation(user)
message = if user.info.confirmation_pending, do: "needs", else: "doesn't need" message = if user.info.confirmation_pending, do: "needs", else: "doesn't need"
Mix.shell().info("#{nickname} #{message} confirmation.") shell_info("#{nickname} #{message} confirmation.")
else else
_ -> _ ->
Mix.shell().error("No local user #{nickname}") shell_error("No local user #{nickname}")
end
end
def run(["sign_out", nickname]) do
start_pleroma()
with %User{local: true} = user <- User.get_cached_by_nickname(nickname) do
OAuth.Token.delete_user_tokens(user)
OAuth.Authorization.delete_user_authorizations(user)
shell_info("#{nickname} signed out from all apps.")
else
_ ->
shell_error("No local user #{nickname}")
end end
end end
@ -416,7 +433,7 @@ defp set_moderator(user, value) do
{:ok, user} = User.update_and_set_cache(user_cng) {:ok, user} = User.update_and_set_cache(user_cng)
Mix.shell().info("Moderator status of #{user.nickname}: #{user.info.is_moderator}") shell_info("Moderator status of #{user.nickname}: #{user.info.is_moderator}")
user user
end end
@ -429,7 +446,7 @@ defp set_admin(user, value) do
{:ok, user} = User.update_and_set_cache(user_cng) {:ok, user} = User.update_and_set_cache(user_cng)
Mix.shell().info("Admin status of #{user.nickname}: #{user.info.is_admin}") shell_info("Admin status of #{user.nickname}: #{user.info.is_admin}")
user user
end end
@ -442,7 +459,7 @@ defp set_locked(user, value) do
{:ok, user} = User.update_and_set_cache(user_cng) {:ok, user} = User.update_and_set_cache(user_cng)
Mix.shell().info("Locked status of #{user.nickname}: #{user.info.locked}") shell_info("Locked status of #{user.nickname}: #{user.info.locked}")
user user
end end
end end

View file

@ -343,4 +343,6 @@ def restrict_deactivated_users(query) do
) )
) )
end end
defdelegate search(user, query), to: Pleroma.Activity.Search
end end

View file

@ -0,0 +1,81 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Activity.Search do
alias Pleroma.Activity
alias Pleroma.Object.Fetcher
alias Pleroma.Repo
alias Pleroma.User
alias Pleroma.Web.ActivityPub.Visibility
import Ecto.Query
def search(user, search_query) do
index_type = if Pleroma.Config.get([:database, :rum_enabled]), do: :rum, else: :gin
Activity
|> Activity.with_preloaded_object()
|> Activity.restrict_deactivated_users()
|> restrict_public()
|> query_with(index_type, search_query)
|> maybe_restrict_local(user)
|> Repo.all()
|> maybe_fetch(user, search_query)
end
defp restrict_public(q) do
from([a, o] in q,
where: fragment("?->>'type' = 'Create'", a.data),
where: "https://www.w3.org/ns/activitystreams#Public" in a.recipients,
limit: 40
)
end
defp query_with(q, :gin, search_query) do
from([a, o] in q,
where:
fragment(
"to_tsvector('english', ?->>'content') @@ plainto_tsquery('english', ?)",
o.data,
^search_query
)
)
end
defp query_with(q, :rum, search_query) do
from([a, o] in q,
where:
fragment(
"? @@ plainto_tsquery('english', ?)",
o.fts_content,
^search_query
),
order_by: [fragment("? <=> now()::date", o.inserted_at)]
)
end
defp maybe_restrict_local(q, user) do
limit = Pleroma.Config.get([:instance, :limit_to_local_content], :unauthenticated)
case {limit, user} do
{:all, _} -> restrict_local(q)
{:unauthenticated, %User{}} -> q
{:unauthenticated, _} -> restrict_local(q)
{false, _} -> q
end
end
defp restrict_local(q), do: where(q, local: true)
defp maybe_fetch(activities, user, search_query) do
with true <- Regex.match?(~r/https?:/, search_query),
{:ok, object} <- Fetcher.fetch_object_from_id(search_query),
%Activity{} = activity <- Activity.get_create_by_object_ap_id(object.data["id"]),
true <- Visibility.visible_for_user?(activity, user) do
activities ++ [activity]
else
_ -> activities
end
end
end

View file

@ -4,7 +4,6 @@
defmodule Pleroma.Application do defmodule Pleroma.Application do
use Application use Application
import Supervisor.Spec
@name Mix.Project.config()[:name] @name Mix.Project.config()[:name]
@version Mix.Project.config()[:version] @version Mix.Project.config()[:version]
@ -31,21 +30,26 @@ def start(_type, _args) do
children = children =
[ [
# Start the Ecto repository # Start the Ecto repository
supervisor(Pleroma.Repo, []), %{id: Pleroma.Repo, start: {Pleroma.Repo, :start_link, []}, type: :supervisor},
worker(Pleroma.Emoji, []), %{id: Pleroma.Config.TransferTask, start: {Pleroma.Config.TransferTask, :start_link, []}},
worker(Pleroma.Captcha, []), %{id: Pleroma.Emoji, start: {Pleroma.Emoji, :start_link, []}},
worker( %{id: Pleroma.Captcha, start: {Pleroma.Captcha, :start_link, []}},
Cachex, %{
id: :cachex_used_captcha_cache,
start:
{Cachex, :start_link,
[ [
:used_captcha_cache, :used_captcha_cache,
[ [
ttl_interval: :timer.seconds(Pleroma.Config.get!([Pleroma.Captcha, :seconds_valid])) ttl_interval:
:timer.seconds(Pleroma.Config.get!([Pleroma.Captcha, :seconds_valid]))
] ]
], ]}
id: :cachex_used_captcha_cache },
), %{
worker( id: :cachex_user,
Cachex, start:
{Cachex, :start_link,
[ [
:user_cache, :user_cache,
[ [
@ -53,11 +57,12 @@ def start(_type, _args) do
ttl_interval: 1000, ttl_interval: 1000,
limit: 2500 limit: 2500
] ]
], ]}
id: :cachex_user },
), %{
worker( id: :cachex_object,
Cachex, start:
{Cachex, :start_link,
[ [
:object_cache, :object_cache,
[ [
@ -65,32 +70,35 @@ def start(_type, _args) do
ttl_interval: 1000, ttl_interval: 1000,
limit: 2500 limit: 2500
] ]
], ]}
id: :cachex_object },
), %{
worker( id: :cachex_rich_media,
Cachex, start:
{Cachex, :start_link,
[ [
:rich_media_cache, :rich_media_cache,
[ [
default_ttl: :timer.minutes(120), default_ttl: :timer.minutes(120),
limit: 5000 limit: 5000
] ]
], ]}
id: :cachex_rich_media },
), %{
worker( id: :cachex_scrubber,
Cachex, start:
{Cachex, :start_link,
[ [
:scrubber_cache, :scrubber_cache,
[ [
limit: 2500 limit: 2500
] ]
], ]}
id: :cachex_scrubber },
), %{
worker( id: :cachex_idem,
Cachex, start:
{Cachex, :start_link,
[ [
:idempotency_cache, :idempotency_cache,
[ [
@ -101,26 +109,49 @@ def start(_type, _args) do
), ),
limit: 2500 limit: 2500
] ]
], ]}
id: :cachex_idem },
), %{id: Pleroma.FlakeId, start: {Pleroma.FlakeId, :start_link, []}},
worker(Pleroma.FlakeId, []), %{
worker(Pleroma.ScheduledActivityWorker, []) id: Pleroma.ScheduledActivityWorker,
start: {Pleroma.ScheduledActivityWorker, :start_link, []}
}
] ++ ] ++
hackney_pool_children() ++ hackney_pool_children() ++
[ [
worker(Pleroma.Web.Federator.RetryQueue, []), %{
worker(Pleroma.Web.OAuth.Token.CleanWorker, []), id: Pleroma.Web.Federator.RetryQueue,
worker(Pleroma.Stats, []), start: {Pleroma.Web.Federator.RetryQueue, :start_link, []}
worker(Task, [&Pleroma.Web.Push.init/0], restart: :temporary, id: :web_push_init), },
worker(Task, [&Pleroma.Web.Federator.init/0], restart: :temporary, id: :federator_init) %{
id: Pleroma.Web.OAuth.Token.CleanWorker,
start: {Pleroma.Web.OAuth.Token.CleanWorker, :start_link, []}
},
%{
id: Pleroma.Stats,
start: {Pleroma.Stats, :start_link, []}
},
%{
id: :web_push_init,
start: {Task, :start_link, [&Pleroma.Web.Push.init/0]},
restart: :temporary
},
%{
id: :federator_init,
start: {Task, :start_link, [&Pleroma.Web.Federator.init/0]},
restart: :temporary
}
] ++ ] ++
streamer_child() ++ streamer_child() ++
chat_child() ++ chat_child() ++
[ [
# Start the endpoint when the application starts # Start the endpoint when the application starts
supervisor(Pleroma.Web.Endpoint, []), %{
worker(Pleroma.Gopher.Server, []) id: Pleroma.Web.Endpoint,
start: {Pleroma.Web.Endpoint, :start_link, []},
type: :supervisor
},
%{id: Pleroma.Gopher.Server, start: {Pleroma.Gopher.Server, :start_link, []}}
] ]
# See http://elixir-lang.org/docs/stable/elixir/Supervisor.html # See http://elixir-lang.org/docs/stable/elixir/Supervisor.html
@ -144,7 +175,6 @@ defp setup_instrumenters do
Pleroma.Repo.Instrumenter.setup() Pleroma.Repo.Instrumenter.setup()
end end
Prometheus.Registry.register_collector(:prometheus_process_collector)
Pleroma.Web.Endpoint.MetricsExporter.setup() Pleroma.Web.Endpoint.MetricsExporter.setup()
Pleroma.Web.Endpoint.PipelineInstrumenter.setup() Pleroma.Web.Endpoint.PipelineInstrumenter.setup()
Pleroma.Web.Endpoint.Instrumenter.setup() Pleroma.Web.Endpoint.Instrumenter.setup()
@ -157,24 +187,29 @@ def enabled_hackney_pools do
else else
[] []
end ++ end ++
if Pleroma.Config.get([Pleroma.Uploader, :proxy_remote]) do if Pleroma.Config.get([Pleroma.Upload, :proxy_remote]) do
[:upload] [:upload]
else else
[] []
end end
end end
if Mix.env() == :test do if Pleroma.Config.get(:env) == :test do
defp streamer_child, do: [] defp streamer_child, do: []
defp chat_child, do: [] defp chat_child, do: []
else else
defp streamer_child do defp streamer_child do
[worker(Pleroma.Web.Streamer, [])] [%{id: Pleroma.Web.Streamer, start: {Pleroma.Web.Streamer, :start_link, []}}]
end end
defp chat_child do defp chat_child do
if Pleroma.Config.get([:chat, :enabled]) do if Pleroma.Config.get([:chat, :enabled]) do
[worker(Pleroma.Web.ChatChannel.ChatChannelState, [])] [
%{
id: Pleroma.Web.ChatChannel.ChatChannelState,
start: {Pleroma.Web.ChatChannel.ChatChannelState, :start_link, []}
}
]
else else
[] []
end end

View file

@ -0,0 +1,42 @@
defmodule Pleroma.Config.TransferTask do
use Task
alias Pleroma.Web.AdminAPI.Config
def start_link do
load_and_update_env()
if Pleroma.Config.get(:env) == :test, do: Ecto.Adapters.SQL.Sandbox.checkin(Pleroma.Repo)
:ignore
end
def load_and_update_env do
if Pleroma.Config.get([:instance, :dynamic_configuration]) and
Ecto.Adapters.SQL.table_exists?(Pleroma.Repo, "config") do
Pleroma.Repo.all(Config)
|> Enum.each(&update_env(&1))
end
end
defp update_env(setting) do
try do
key =
if String.starts_with?(setting.key, "Pleroma.") do
"Elixir." <> setting.key
else
setting.key
end
Application.put_env(
:pleroma,
String.to_existing_atom(key),
Config.from_binary(setting.value)
)
rescue
e ->
require Logger
Logger.warn(
"updating env causes error, key: #{inspect(setting.key)}, error: #{inspect(e)}"
)
end
end
end

View file

@ -49,7 +49,7 @@ def create_or_bump_for(activity, opts \\ []) do
with true <- Pleroma.Web.ActivityPub.Visibility.is_direct?(activity), with true <- Pleroma.Web.ActivityPub.Visibility.is_direct?(activity),
"Create" <- activity.data["type"], "Create" <- activity.data["type"],
object <- Pleroma.Object.normalize(activity), object <- Pleroma.Object.normalize(activity),
"Note" <- object.data["type"], true <- object.data["type"] in ["Note", "Question"],
ap_id when is_binary(ap_id) and byte_size(ap_id) > 0 <- object.data["context"] do ap_id when is_binary(ap_id) and byte_size(ap_id) > 0 <- object.data["context"] do
{:ok, conversation} = create_for_ap_id(ap_id) {:ok, conversation} = create_for_ap_id(ap_id)

View file

@ -59,10 +59,10 @@ def mark_as_unread(participation) do
def for_user(user, params \\ %{}) do def for_user(user, params \\ %{}) do
from(p in __MODULE__, from(p in __MODULE__,
where: p.user_id == ^user.id, where: p.user_id == ^user.id,
order_by: [desc: p.updated_at] order_by: [desc: p.updated_at],
preload: [conversation: [:users]]
) )
|> Pleroma.Pagination.fetch_paginated(params) |> Pleroma.Pagination.fetch_paginated(params)
|> Repo.preload(conversation: [:users])
end end
def for_user_with_last_activity_id(user, params \\ %{}) do def for_user_with_last_activity_id(user, params \\ %{}) do
@ -79,5 +79,6 @@ def for_user_with_last_activity_id(user, params \\ %{}) do
| last_activity_id: activity_id | last_activity_id: activity_id
} }
end) end)
|> Enum.filter(& &1.last_activity_id)
end end
end end

View file

@ -22,7 +22,6 @@ defmodule Pleroma.Emoji do
@ets __MODULE__.Ets @ets __MODULE__.Ets
@ets_options [:ordered_set, :protected, :named_table, {:read_concurrency, true}] @ets_options [:ordered_set, :protected, :named_table, {:read_concurrency, true}]
@groups Application.get_env(:pleroma, :emoji)[:groups]
@doc false @doc false
def start_link do def start_link do
@ -87,6 +86,8 @@ defp load do
"emoji" "emoji"
) )
emoji_groups = Pleroma.Config.get([:emoji, :groups])
case File.ls(emoji_dir_path) do case File.ls(emoji_dir_path) do
{:error, :enoent} -> {:error, :enoent} ->
# The custom emoji directory doesn't exist, # The custom emoji directory doesn't exist,
@ -97,14 +98,28 @@ defp load do
# There was some other error # There was some other error
Logger.error("Could not access the custom emoji directory #{emoji_dir_path}: #{e}") Logger.error("Could not access the custom emoji directory #{emoji_dir_path}: #{e}")
{:ok, packs} -> {:ok, results} ->
grouped =
Enum.group_by(results, fn file -> File.dir?(Path.join(emoji_dir_path, file)) end)
packs = grouped[true] || []
files = grouped[false] || []
# Print the packs we've found # Print the packs we've found
Logger.info("Found emoji packs: #{Enum.join(packs, ", ")}") Logger.info("Found emoji packs: #{Enum.join(packs, ", ")}")
if not Enum.empty?(files) do
Logger.warn(
"Found files in the emoji folder. These will be ignored, please move them to a subdirectory\nFound files: #{
Enum.join(files, ", ")
}"
)
end
emojis = emojis =
Enum.flat_map( Enum.flat_map(
packs, packs,
fn pack -> load_pack(Path.join(emoji_dir_path, pack)) end fn pack -> load_pack(Path.join(emoji_dir_path, pack), emoji_groups) end
) )
true = :ets.insert(@ets, emojis) true = :ets.insert(@ets, emojis)
@ -112,12 +127,12 @@ defp load do
# Compat thing for old custom emoji handling & default emoji, # Compat thing for old custom emoji handling & default emoji,
# it should run even if there are no emoji packs # it should run even if there are no emoji packs
shortcode_globs = Application.get_env(:pleroma, :emoji)[:shortcode_globs] || [] shortcode_globs = Pleroma.Config.get([:emoji, :shortcode_globs], [])
emojis = emojis =
(load_from_file("config/emoji.txt") ++ (load_from_file("config/emoji.txt", emoji_groups) ++
load_from_file("config/custom_emoji.txt") ++ load_from_file("config/custom_emoji.txt", emoji_groups) ++
load_from_globs(shortcode_globs)) load_from_globs(shortcode_globs, emoji_groups))
|> Enum.reject(fn value -> value == nil end) |> Enum.reject(fn value -> value == nil end)
true = :ets.insert(@ets, emojis) true = :ets.insert(@ets, emojis)
@ -125,13 +140,13 @@ defp load do
:ok :ok
end end
defp load_pack(pack_dir) do defp load_pack(pack_dir, emoji_groups) do
pack_name = Path.basename(pack_dir) pack_name = Path.basename(pack_dir)
emoji_txt = Path.join(pack_dir, "emoji.txt") emoji_txt = Path.join(pack_dir, "emoji.txt")
if File.exists?(emoji_txt) do if File.exists?(emoji_txt) do
load_from_file(emoji_txt) load_from_file(emoji_txt, emoji_groups)
else else
Logger.info( Logger.info(
"No emoji.txt found for pack \"#{pack_name}\", assuming all .png files are emoji" "No emoji.txt found for pack \"#{pack_name}\", assuming all .png files are emoji"
@ -141,7 +156,7 @@ defp load_pack(pack_dir) do
|> Enum.map(fn {shortcode, rel_file} -> |> Enum.map(fn {shortcode, rel_file} ->
filename = Path.join("/emoji/#{pack_name}", rel_file) filename = Path.join("/emoji/#{pack_name}", rel_file)
{shortcode, filename, [to_string(match_extra(@groups, filename))]} {shortcode, filename, [to_string(match_extra(emoji_groups, filename))]}
end) end)
end end
end end
@ -170,21 +185,21 @@ def find_all_emoji(dir, exts) do
|> Enum.filter(fn f -> Path.extname(f) in exts end) |> Enum.filter(fn f -> Path.extname(f) in exts end)
end end
defp load_from_file(file) do defp load_from_file(file, emoji_groups) do
if File.exists?(file) do if File.exists?(file) do
load_from_file_stream(File.stream!(file)) load_from_file_stream(File.stream!(file), emoji_groups)
else else
[] []
end end
end end
defp load_from_file_stream(stream) do defp load_from_file_stream(stream, emoji_groups) do
stream stream
|> Stream.map(&String.trim/1) |> Stream.map(&String.trim/1)
|> Stream.map(fn line -> |> Stream.map(fn line ->
case String.split(line, ~r/,\s*/) do case String.split(line, ~r/,\s*/) do
[name, file] -> [name, file] ->
{name, file, [to_string(match_extra(@groups, file))]} {name, file, [to_string(match_extra(emoji_groups, file))]}
[name, file | tags] -> [name, file | tags] ->
{name, file, tags} {name, file, tags}
@ -196,7 +211,7 @@ defp load_from_file_stream(stream) do
|> Enum.to_list() |> Enum.to_list()
end end
defp load_from_globs(globs) do defp load_from_globs(globs, emoji_groups) do
static_path = Path.join(:code.priv_dir(:pleroma), "static") static_path = Path.join(:code.priv_dir(:pleroma), "static")
paths = paths =
@ -207,7 +222,7 @@ defp load_from_globs(globs) do
|> Enum.concat() |> Enum.concat()
Enum.map(paths, fn path -> Enum.map(paths, fn path ->
tag = match_extra(@groups, Path.join("/", Path.relative_to(path, static_path))) tag = match_extra(emoji_groups, Path.join("/", Path.relative_to(path, static_path)))
shortcode = Path.basename(path, Path.extname(path)) shortcode = Path.basename(path, Path.extname(path))
external_path = Path.join("/", Path.relative_to(path, static_path)) external_path = Path.join("/", Path.relative_to(path, static_path))
{shortcode, external_path, [to_string(tag)]} {shortcode, external_path, [to_string(tag)]}

View file

@ -8,7 +8,7 @@ defmodule Pleroma.Formatter do
alias Pleroma.User alias Pleroma.User
alias Pleroma.Web.MediaProxy alias Pleroma.Web.MediaProxy
@safe_mention_regex ~r/^(\s*(?<mentions>@.+?\s+)+)(?<rest>.*)/s @safe_mention_regex ~r/^(\s*(?<mentions>(@.+?\s+){1,})+)(?<rest>.*)/s
@link_regex ~r"((?:http(s)?:\/\/)?[\w.-]+(?:\.[\w\.-]+)+[\w\-\._~%:/?#[\]@!\$&'\(\)\*\+,;=.]+)|[0-9a-z+\-\.]+:[0-9a-z$-_.+!*'(),]+"ui @link_regex ~r"((?:http(s)?:\/\/)?[\w.-]+(?:\.[\w\.-]+)+[\w\-\._~%:/?#[\]@!\$&'\(\)\*\+,;=.]+)|[0-9a-z+\-\.]+:[0-9a-z$-_.+!*'(),]+"ui
@markdown_characters_regex ~r/(`|\*|_|{|}|[|]|\(|\)|#|\+|-|\.|!)/ @markdown_characters_regex ~r/(`|\*|_|{|}|[|]|\(|\)|#|\+|-|\.|!)/

View file

@ -0,0 +1,27 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Helpers.UriHelper do
def append_uri_params(uri, appended_params) do
uri = URI.parse(uri)
appended_params = for {k, v} <- appended_params, into: %{}, do: {to_string(k), v}
existing_params = URI.query_decoder(uri.query || "") |> Enum.into(%{})
updated_params_keys = Enum.uniq(Map.keys(existing_params) ++ Map.keys(appended_params))
updated_params =
for k <- updated_params_keys, do: {k, appended_params[k] || existing_params[k]}
uri
|> Map.put(:query, URI.encode_query(updated_params))
|> URI.to_string()
end
def append_param_if_present(%{} = params, param_name, param_value) do
if param_value do
Map.put(params, param_name, param_value)
else
params
end
end
end

View file

@ -89,7 +89,7 @@ def extract_first_external_url(object, content) do
Cachex.fetch!(:scrubber_cache, key, fn _key -> Cachex.fetch!(:scrubber_cache, key, fn _key ->
result = result =
content content
|> Floki.filter_out("a.mention") |> Floki.filter_out("a.mention,a.hashtag,a[rel~=\"tag\"]")
|> Floki.attribute("a", "href") |> Floki.attribute("a", "href")
|> Enum.at(0) |> Enum.at(0)
@ -104,7 +104,6 @@ defmodule Pleroma.HTML.Scrubber.TwitterText do
paragraphs, breaks and links are allowed through the filter. paragraphs, breaks and links are allowed through the filter.
""" """
@markup Application.get_env(:pleroma, :markup)
@valid_schemes Pleroma.Config.get([:uri_schemes, :valid_schemes], []) @valid_schemes Pleroma.Config.get([:uri_schemes, :valid_schemes], [])
require HtmlSanitizeEx.Scrubber.Meta require HtmlSanitizeEx.Scrubber.Meta
@ -142,9 +141,7 @@ defmodule Pleroma.HTML.Scrubber.TwitterText do
Meta.allow_tag_with_these_attributes("span", []) Meta.allow_tag_with_these_attributes("span", [])
# allow inline images for custom emoji # allow inline images for custom emoji
@allow_inline_images Keyword.get(@markup, :allow_inline_images) if Pleroma.Config.get([:markup, :allow_inline_images]) do
if @allow_inline_images do
# restrict img tags to http/https only, because of MediaProxy. # restrict img tags to http/https only, because of MediaProxy.
Meta.allow_tag_with_uri_attributes("img", ["src"], ["http", "https"]) Meta.allow_tag_with_uri_attributes("img", ["src"], ["http", "https"])
@ -168,7 +165,6 @@ defmodule Pleroma.HTML.Scrubber.Default do
# credo:disable-for-previous-line # credo:disable-for-previous-line
# No idea how to fix this one… # No idea how to fix this one…
@markup Application.get_env(:pleroma, :markup)
@valid_schemes Pleroma.Config.get([:uri_schemes, :valid_schemes], []) @valid_schemes Pleroma.Config.get([:uri_schemes, :valid_schemes], [])
Meta.remove_cdata_sections_before_scrub() Meta.remove_cdata_sections_before_scrub()
@ -213,7 +209,7 @@ defmodule Pleroma.HTML.Scrubber.Default do
Meta.allow_tag_with_this_attribute_values("span", "class", ["h-card"]) Meta.allow_tag_with_this_attribute_values("span", "class", ["h-card"])
Meta.allow_tag_with_these_attributes("span", []) Meta.allow_tag_with_these_attributes("span", [])
@allow_inline_images Keyword.get(@markup, :allow_inline_images) @allow_inline_images Pleroma.Config.get([:markup, :allow_inline_images])
if @allow_inline_images do if @allow_inline_images do
# restrict img tags to http/https only, because of MediaProxy. # restrict img tags to http/https only, because of MediaProxy.
@ -228,9 +224,7 @@ defmodule Pleroma.HTML.Scrubber.Default do
]) ])
end end
@allow_tables Keyword.get(@markup, :allow_tables) if Pleroma.Config.get([:markup, :allow_tables]) do
if @allow_tables do
Meta.allow_tag_with_these_attributes("table", []) Meta.allow_tag_with_these_attributes("table", [])
Meta.allow_tag_with_these_attributes("tbody", []) Meta.allow_tag_with_these_attributes("tbody", [])
Meta.allow_tag_with_these_attributes("td", []) Meta.allow_tag_with_these_attributes("td", [])
@ -239,9 +233,7 @@ defmodule Pleroma.HTML.Scrubber.Default do
Meta.allow_tag_with_these_attributes("tr", []) Meta.allow_tag_with_these_attributes("tr", [])
end end
@allow_headings Keyword.get(@markup, :allow_headings) if Pleroma.Config.get([:markup, :allow_headings]) do
if @allow_headings do
Meta.allow_tag_with_these_attributes("h1", []) Meta.allow_tag_with_these_attributes("h1", [])
Meta.allow_tag_with_these_attributes("h2", []) Meta.allow_tag_with_these_attributes("h2", [])
Meta.allow_tag_with_these_attributes("h3", []) Meta.allow_tag_with_these_attributes("h3", [])
@ -249,9 +241,7 @@ defmodule Pleroma.HTML.Scrubber.Default do
Meta.allow_tag_with_these_attributes("h5", []) Meta.allow_tag_with_these_attributes("h5", [])
end end
@allow_fonts Keyword.get(@markup, :allow_fonts) if Pleroma.Config.get([:markup, :allow_fonts]) do
if @allow_fonts do
Meta.allow_tag_with_these_attributes("font", ["face"]) Meta.allow_tag_with_these_attributes("font", ["face"])
end end

View file

@ -32,9 +32,11 @@ def new(opts \\ []) do
defp hackney_options(opts) do defp hackney_options(opts) do
options = Keyword.get(opts, :adapter, []) options = Keyword.get(opts, :adapter, [])
adapter_options = Pleroma.Config.get([:http, :adapter], []) adapter_options = Pleroma.Config.get([:http, :adapter], [])
proxy_url = Pleroma.Config.get([:http, :proxy_url], nil)
@hackney_options @hackney_options
|> Keyword.merge(adapter_options) |> Keyword.merge(adapter_options)
|> Keyword.merge(options) |> Keyword.merge(options)
|> Keyword.merge(proxy: proxy_url)
end end
end end

View file

@ -65,12 +65,9 @@ defp process_sni_options(options, url) do
end end
def process_request_options(options) do def process_request_options(options) do
config = Application.get_env(:pleroma, :http, []) case Pleroma.Config.get([:http, :proxy_url]) do
proxy = Keyword.get(config, :proxy_url, nil)
case proxy do
nil -> options nil -> options
_ -> options ++ [proxy: proxy] proxy -> options ++ [proxy: proxy]
end end
end end

View file

@ -13,7 +13,7 @@ def set_consistently_unreachable(url_or_host),
def reachability_datetime_threshold do def reachability_datetime_threshold do
federation_reachability_timeout_days = federation_reachability_timeout_days =
Pleroma.Config.get(:instance)[:federation_reachability_timeout_days] || 0 Pleroma.Config.get([:instance, :federation_reachability_timeout_days], 0)
if federation_reachability_timeout_days > 0 do if federation_reachability_timeout_days > 0 do
NaiveDateTime.add( NaiveDateTime.add(

View file

@ -13,6 +13,8 @@ defmodule Pleroma.Notification do
alias Pleroma.User alias Pleroma.User
alias Pleroma.Web.CommonAPI alias Pleroma.Web.CommonAPI
alias Pleroma.Web.CommonAPI.Utils alias Pleroma.Web.CommonAPI.Utils
alias Pleroma.Web.Push
alias Pleroma.Web.Streamer
import Ecto.Query import Ecto.Query
import Ecto.Changeset import Ecto.Changeset
@ -125,10 +127,21 @@ def dismiss(%{id: user_id} = _user, id) do
end end
end end
def create_notifications(%Activity{data: %{"to" => _, "type" => type}} = activity) def create_notifications(%Activity{data: %{"to" => _, "type" => "Create"}} = activity) do
when type in ["Create", "Like", "Announce", "Follow"] do object = Object.normalize(activity)
users = get_notified_from_activity(activity)
unless object && object.data["type"] == "Answer" do
users = get_notified_from_activity(activity)
notifications = Enum.map(users, fn user -> create_notification(activity, user) end)
{:ok, notifications}
else
{:ok, []}
end
end
def create_notifications(%Activity{data: %{"to" => _, "type" => type}} = activity)
when type in ["Like", "Announce", "Follow"] do
users = get_notified_from_activity(activity)
notifications = Enum.map(users, fn user -> create_notification(activity, user) end) notifications = Enum.map(users, fn user -> create_notification(activity, user) end)
{:ok, notifications} {:ok, notifications}
end end
@ -140,8 +153,9 @@ def create_notification(%Activity{} = activity, %User{} = user) do
unless skip?(activity, user) do unless skip?(activity, user) do
notification = %Notification{user_id: user.id, activity: activity} notification = %Notification{user_id: user.id, activity: activity}
{:ok, notification} = Repo.insert(notification) {:ok, notification} = Repo.insert(notification)
Pleroma.Web.Streamer.stream("user", notification) Streamer.stream("user", notification)
Pleroma.Web.Push.send(notification) Streamer.stream("user:notification", notification)
Push.send(notification)
notification notification
end end
end end
@ -166,7 +180,16 @@ def get_notified_from_activity(
def get_notified_from_activity(_, _local_only), do: [] def get_notified_from_activity(_, _local_only), do: []
def skip?(activity, user) do def skip?(activity, user) do
[:self, :blocked, :local, :muted, :followers, :follows, :recently_followed] [
:self,
:blocked,
:muted,
:followers,
:follows,
:non_followers,
:non_follows,
:recently_followed
]
|> Enum.any?(&skip?(&1, activity, user)) |> Enum.any?(&skip?(&1, activity, user))
end end
@ -179,12 +202,6 @@ def skip?(:blocked, activity, user) do
User.blocks?(user, %{ap_id: actor}) User.blocks?(user, %{ap_id: actor})
end end
def skip?(:local, %{local: true}, %{info: %{notification_settings: %{"local" => false}}}),
do: true
def skip?(:local, %{local: false}, %{info: %{notification_settings: %{"remote" => false}}}),
do: true
def skip?(:muted, activity, user) do def skip?(:muted, activity, user) do
actor = activity.data["actor"] actor = activity.data["actor"]
@ -201,12 +218,32 @@ def skip?(
User.following?(follower, user) User.following?(follower, user)
end end
def skip?(
:non_followers,
activity,
%{info: %{notification_settings: %{"non_followers" => false}}} = user
) do
actor = activity.data["actor"]
follower = User.get_cached_by_ap_id(actor)
!User.following?(follower, user)
end
def skip?(:follows, activity, %{info: %{notification_settings: %{"follows" => false}}} = user) do def skip?(:follows, activity, %{info: %{notification_settings: %{"follows" => false}}} = user) do
actor = activity.data["actor"] actor = activity.data["actor"]
followed = User.get_cached_by_ap_id(actor) followed = User.get_cached_by_ap_id(actor)
User.following?(user, followed) User.following?(user, followed)
end end
def skip?(
:non_follows,
activity,
%{info: %{notification_settings: %{"non_follows" => false}}} = user
) do
actor = activity.data["actor"]
followed = User.get_cached_by_ap_id(actor)
!User.following?(user, followed)
end
def skip?(:recently_followed, %{data: %{"type" => "Follow"}} = activity, user) do def skip?(:recently_followed, %{data: %{"type" => "Follow"}} = activity, user) do
actor = activity.data["actor"] actor = activity.data["actor"]

View file

@ -35,6 +35,9 @@ def change(struct, params \\ %{}) do
|> unique_constraint(:ap_id, name: :objects_unique_apid_index) |> unique_constraint(:ap_id, name: :objects_unique_apid_index)
end end
def get_by_id(nil), do: nil
def get_by_id(id), do: Repo.get(Object, id)
def get_by_ap_id(nil), do: nil def get_by_ap_id(nil), do: nil
def get_by_ap_id(ap_id) do def get_by_ap_id(ap_id) do
@ -195,4 +198,34 @@ def decrease_replies_count(ap_id) do
_ -> {:error, "Not found"} _ -> {:error, "Not found"}
end end
end end
def increase_vote_count(ap_id, name) do
with %Object{} = object <- Object.normalize(ap_id),
"Question" <- object.data["type"] do
multiple = Map.has_key?(object.data, "anyOf")
options =
(object.data["anyOf"] || object.data["oneOf"] || [])
|> Enum.map(fn
%{"name" => ^name} = option ->
Kernel.update_in(option["replies"]["totalItems"], &(&1 + 1))
option ->
option
end)
data =
if multiple do
Map.put(object.data, "anyOf", options)
else
Map.put(object.data, "oneOf", options)
end
object
|> Object.change(%{data: data})
|> update_and_set_cache()
else
_ -> :noop
end
end
end end

View file

@ -1,3 +1,7 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Object.Containment do defmodule Pleroma.Object.Containment do
@moduledoc """ @moduledoc """
This module contains some useful functions for containing objects to specific This module contains some useful functions for containing objects to specific

View file

@ -1,4 +1,5 @@
defmodule Pleroma.Object.Fetcher do defmodule Pleroma.Object.Fetcher do
alias Pleroma.HTTP
alias Pleroma.Object alias Pleroma.Object
alias Pleroma.Object.Containment alias Pleroma.Object.Containment
alias Pleroma.Web.ActivityPub.Transmogrifier alias Pleroma.Web.ActivityPub.Transmogrifier
@ -6,8 +7,6 @@ defmodule Pleroma.Object.Fetcher do
require Logger require Logger
@httpoison Application.get_env(:pleroma, :httpoison)
defp reinject_object(data) do defp reinject_object(data) do
Logger.debug("Reinjecting object #{data["id"]}") Logger.debug("Reinjecting object #{data["id"]}")
@ -78,7 +77,7 @@ def fetch_and_contain_remote_object_from_id(id) do
with true <- String.starts_with?(id, "http"), with true <- String.starts_with?(id, "http"),
{:ok, %{body: body, status: code}} when code in 200..299 <- {:ok, %{body: body, status: code}} when code in 200..299 <-
@httpoison.get( HTTP.get(
id, id,
[{:Accept, "application/activity+json"}] [{:Accept, "application/activity+json"}]
), ),
@ -86,6 +85,9 @@ def fetch_and_contain_remote_object_from_id(id) do
:ok <- Containment.contain_origin_from_id(id, data) do :ok <- Containment.contain_origin_from_id(id, data) do
{:ok, data} {:ok, data}
else else
{:ok, %{status: code}} when code in [404, 410] ->
{:error, "Object has been deleted"}
e -> e ->
{:error, e} {:error, e}
end end

View file

@ -10,7 +10,7 @@ def init(options) do
end end
def call(conn, _opts) do def call(conn, _opts) do
if Keyword.get(Application.get_env(:pleroma, :instance), :federating) do if Pleroma.Config.get([:instance, :federating]) do
conn conn
else else
conn conn

View file

@ -56,14 +56,14 @@ defp csp_string do
connect_src = "connect-src 'self' #{static_url} #{websocket_url}" connect_src = "connect-src 'self' #{static_url} #{websocket_url}"
connect_src = connect_src =
if Mix.env() == :dev do if Pleroma.Config.get(:env) == :dev do
connect_src <> " http://localhost:3035/" connect_src <> " http://localhost:3035/"
else else
connect_src connect_src
end end
script_src = script_src =
if Mix.env() == :dev do if Pleroma.Config.get(:env) == :dev do
"script-src 'self' 'unsafe-eval'" "script-src 'self' 'unsafe-eval'"
else else
"script-src 'self'" "script-src 'self'"

View file

@ -1,36 +0,0 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Plugs.RateLimitPlug do
import Phoenix.Controller, only: [json: 2]
import Plug.Conn
def init(opts), do: opts
def call(conn, opts) do
enabled? = Pleroma.Config.get([:app_account_creation, :enabled])
case check_rate(conn, Map.put(opts, :enabled, enabled?)) do
{:ok, _count} -> conn
{:error, _count} -> render_error(conn)
%Plug.Conn{} = conn -> conn
end
end
defp check_rate(conn, %{enabled: true} = opts) do
max_requests = opts[:max_requests]
bucket_name = conn.remote_ip |> Tuple.to_list() |> Enum.join(".")
ExRated.check_rate(bucket_name, opts[:interval] * 1000, max_requests)
end
defp check_rate(conn, _), do: conn
defp render_error(conn) do
conn
|> put_status(:forbidden)
|> json(%{error: "Rate limit exceeded."})
|> halt()
end
end

View file

@ -0,0 +1,94 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Plugs.RateLimiter do
@moduledoc """
## Configuration
A keyword list of rate limiters where a key is a limiter name and value is the limiter configuration. The basic configuration is a tuple where:
* The first element: `scale` (Integer). The time scale in milliseconds.
* The second element: `limit` (Integer). How many requests to limit in the time scale provided.
It is also possible to have different limits for unauthenticated and authenticated users: the keyword value must be a list of two tuples where the first one is a config for unauthenticated users and the second one is for authenticated.
To disable a limiter set its value to `nil`.
### Example
config :pleroma, :rate_limit,
one: {1000, 10},
two: [{10_000, 10}, {10_000, 50}],
foobar: nil
Here we have three limiters:
* `one` which is not over 10req/1s
* `two` which has two limits: 10req/10s for unauthenticated users and 50req/10s for authenticated users
* `foobar` which is disabled
## Usage
Inside a controller:
plug(Pleroma.Plugs.RateLimiter, :one when action == :one)
plug(Pleroma.Plugs.RateLimiter, :two when action in [:two, :three])
or inside a router pipiline:
pipeline :api do
...
plug(Pleroma.Plugs.RateLimiter, :one)
...
end
"""
import Phoenix.Controller, only: [json: 2]
import Plug.Conn
alias Pleroma.User
def init(limiter_name) do
case Pleroma.Config.get([:rate_limit, limiter_name]) do
nil -> nil
config -> {limiter_name, config}
end
end
# do not limit if there is no limiter configuration
def call(conn, nil), do: conn
def call(conn, opts) do
case check_rate(conn, opts) do
{:ok, _count} -> conn
{:error, _count} -> render_error(conn)
end
end
defp check_rate(%{assigns: %{user: %User{id: user_id}}}, {limiter_name, [_, {scale, limit}]}) do
ExRated.check_rate("#{limiter_name}:#{user_id}", scale, limit)
end
defp check_rate(conn, {limiter_name, [{scale, limit} | _]}) do
ExRated.check_rate("#{limiter_name}:#{ip(conn)}", scale, limit)
end
defp check_rate(conn, {limiter_name, {scale, limit}}) do
check_rate(conn, {limiter_name, [{scale, limit}]})
end
def ip(%{remote_ip: remote_ip}) do
remote_ip
|> Tuple.to_list()
|> Enum.join(".")
end
defp render_error(conn) do
conn
|> put_status(:too_many_requests)
|> json(%{error: "Throttled"})
|> halt()
end
end

View file

@ -36,7 +36,7 @@ def call(%{request_path: <<"/", @path, "/", file::binary>>} = conn, opts) do
conn conn
end end
config = Pleroma.Config.get([Pleroma.Upload]) config = Pleroma.Config.get(Pleroma.Upload)
with uploader <- Keyword.fetch!(config, :uploader), with uploader <- Keyword.fetch!(config, :uploader),
proxy_remote = Keyword.get(config, :proxy_remote, false), proxy_remote = Keyword.get(config, :proxy_remote, false),

View file

@ -0,0 +1,66 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.ReleaseTasks do
@repo Pleroma.Repo
def run(args) do
[task | args] = String.split(args)
case task do
"migrate" -> migrate(args)
"create" -> create()
"rollback" -> rollback(args)
task -> mix_task(task, args)
end
end
defp mix_task(task, args) do
Application.load(:pleroma)
{:ok, modules} = :application.get_key(:pleroma, :modules)
module =
Enum.find(modules, fn module ->
module = Module.split(module)
match?(["Mix", "Tasks", "Pleroma" | _], module) and
String.downcase(List.last(module)) == task
end)
if module do
module.run(args)
else
IO.puts("The task #{task} does not exist")
end
end
def migrate(args) do
Mix.Tasks.Pleroma.Ecto.Migrate.run(args)
end
def rollback(args) do
Mix.Tasks.Pleroma.Ecto.Rollback.run(args)
end
def create do
Application.load(:pleroma)
case @repo.__adapter__.storage_up(@repo.config) do
:ok ->
IO.puts("The database for #{inspect(@repo)} has been created")
{:error, :already_up} ->
IO.puts("The database for #{inspect(@repo)} has already been created")
{:error, term} when is_binary(term) ->
IO.puts(:stderr, "The database for #{inspect(@repo)} couldn't be created: #{term}")
{:error, term} ->
IO.puts(
:stderr,
"The database for #{inspect(@repo)} couldn't be created: #{inspect(term)}"
)
end
end
end

View file

@ -3,6 +3,8 @@
# SPDX-License-Identifier: AGPL-3.0-only # SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.ReverseProxy do defmodule Pleroma.ReverseProxy do
alias Pleroma.HTTP
@keep_req_headers ~w(accept user-agent accept-encoding cache-control if-modified-since) ++ @keep_req_headers ~w(accept user-agent accept-encoding cache-control if-modified-since) ++
~w(if-unmodified-since if-none-match if-range range) ~w(if-unmodified-since if-none-match if-range range)
@resp_cache_headers ~w(etag date last-modified cache-control) @resp_cache_headers ~w(etag date last-modified cache-control)
@ -59,9 +61,6 @@ defmodule Pleroma.ReverseProxy do
* `http`: options for [hackney](https://github.com/benoitc/hackney). * `http`: options for [hackney](https://github.com/benoitc/hackney).
""" """
@hackney Application.get_env(:pleroma, :hackney, :hackney)
@httpoison Application.get_env(:pleroma, :httpoison, HTTPoison)
@default_hackney_options [] @default_hackney_options []
@inline_content_types [ @inline_content_types [
@ -97,7 +96,7 @@ def call(conn = %{method: method}, url, opts) when method in @methods do
hackney_opts = hackney_opts =
@default_hackney_options @default_hackney_options
|> Keyword.merge(Keyword.get(opts, :http, [])) |> Keyword.merge(Keyword.get(opts, :http, []))
|> @httpoison.process_request_options() |> HTTP.process_request_options()
req_headers = build_req_headers(conn.req_headers, opts) req_headers = build_req_headers(conn.req_headers, opts)
@ -147,7 +146,7 @@ defp request(method, url, headers, hackney_opts) do
Logger.debug("#{__MODULE__} #{method} #{url} #{inspect(headers)}") Logger.debug("#{__MODULE__} #{method} #{url} #{inspect(headers)}")
method = method |> String.downcase() |> String.to_existing_atom() method = method |> String.downcase() |> String.to_existing_atom()
case @hackney.request(method, url, headers, "", hackney_opts) do case hackney().request(method, url, headers, "", hackney_opts) do
{:ok, code, headers, client} when code in @valid_resp_codes -> {:ok, code, headers, client} when code in @valid_resp_codes ->
{:ok, code, downcase_headers(headers), client} {:ok, code, downcase_headers(headers), client}
@ -197,7 +196,7 @@ defp chunk_reply(conn, client, opts, sent_so_far, duration) do
duration, duration,
Keyword.get(opts, :max_read_duration, @max_read_duration) Keyword.get(opts, :max_read_duration, @max_read_duration)
), ),
{:ok, data} <- @hackney.stream_body(client), {:ok, data} <- hackney().stream_body(client),
{:ok, duration} <- increase_read_duration(duration), {:ok, duration} <- increase_read_duration(duration),
sent_so_far = sent_so_far + byte_size(data), sent_so_far = sent_so_far + byte_size(data),
:ok <- body_size_constraint(sent_so_far, Keyword.get(opts, :max_body_size)), :ok <- body_size_constraint(sent_so_far, Keyword.get(opts, :max_body_size)),
@ -378,4 +377,6 @@ defp increase_read_duration({previous_duration, started})
defp increase_read_duration(_) do defp increase_read_duration(_) do
{:ok, :no_duration_limit, :no_duration_limit} {:ok, :no_duration_limit, :no_duration_limit}
end end
defp hackney, do: Pleroma.Config.get(:hackney, :hackney)
end end

View file

@ -4,11 +4,10 @@
defmodule Pleroma.Uploaders.MDII do defmodule Pleroma.Uploaders.MDII do
alias Pleroma.Config alias Pleroma.Config
alias Pleroma.HTTP
@behaviour Pleroma.Uploaders.Uploader @behaviour Pleroma.Uploaders.Uploader
@httpoison Application.get_env(:pleroma, :httpoison)
# MDII-hosted images are never passed through the MediaPlug; only local media. # MDII-hosted images are never passed through the MediaPlug; only local media.
# Delegate to Pleroma.Uploaders.Local # Delegate to Pleroma.Uploaders.Local
def get_file(file) do def get_file(file) do
@ -25,7 +24,7 @@ def put_file(upload) do
query = "#{cgi}?#{extension}" query = "#{cgi}?#{extension}"
with {:ok, %{status: 200, body: body}} <- with {:ok, %{status: 200, body: body}} <-
@httpoison.post(query, file_data, [], adapter: [pool: :default]) do HTTP.post(query, file_data, [], adapter: [pool: :default]) do
remote_file_name = String.split(body) |> List.first() remote_file_name = String.split(body) |> List.first()
public_url = "#{files}/#{remote_file_name}.#{extension}" public_url = "#{files}/#{remote_file_name}.#{extension}"
{:ok, {:url, public_url}} {:ok, {:url, public_url}}

View file

@ -324,14 +324,6 @@ def maybe_direct_follow(%User{} = follower, %User{} = followed) do
end end
end end
def maybe_follow(%User{} = follower, %User{info: _info} = followed) do
if not following?(follower, followed) do
follow(follower, followed)
else
{:ok, follower}
end
end
@doc "A mass follow for local users. Respects blocks in both directions but does not create activities." @doc "A mass follow for local users. Respects blocks in both directions but does not create activities."
@spec follow_all(User.t(), list(User.t())) :: {atom(), User.t()} @spec follow_all(User.t(), list(User.t())) :: {atom(), User.t()}
def follow_all(follower, followeds) do def follow_all(follower, followeds) do
@ -366,14 +358,12 @@ def follow_all(follower, followeds) do
end end
def follow(%User{} = follower, %User{info: info} = followed) do def follow(%User{} = follower, %User{info: info} = followed) do
user_config = Application.get_env(:pleroma, :user) deny_follow_blocked = Pleroma.Config.get([:user, :deny_follow_blocked])
deny_follow_blocked = Keyword.get(user_config, :deny_follow_blocked)
ap_followers = followed.follower_address ap_followers = followed.follower_address
cond do cond do
following?(follower, followed) or info.deactivated -> info.deactivated ->
{:error, "Could not follow user: #{followed.nickname} is already on your list."} {:error, "Could not follow user: You are deactivated."}
deny_follow_blocked and blocks?(followed, follower) -> deny_follow_blocked and blocks?(followed, follower) ->
{:error, "Could not follow user: #{followed.nickname} blocked you."} {:error, "Could not follow user: #{followed.nickname} blocked you."}
@ -737,122 +727,6 @@ def get_recipients_from_activity(%Activity{recipients: to}) do
|> Repo.all() |> Repo.all()
end end
def search(query, resolve \\ false, for_user \\ nil) do
# Strip the beginning @ off if there is a query
query = String.trim_leading(query, "@")
if resolve, do: get_or_fetch(query)
{:ok, results} =
Repo.transaction(fn ->
Ecto.Adapters.SQL.query(Repo, "select set_limit(0.25)", [])
Repo.all(search_query(query, for_user))
end)
results
end
def search_query(query, for_user) do
fts_subquery = fts_search_subquery(query)
trigram_subquery = trigram_search_subquery(query)
union_query = from(s in trigram_subquery, union_all: ^fts_subquery)
distinct_query = from(s in subquery(union_query), order_by: s.search_type, distinct: s.id)
from(s in subquery(boost_search_rank_query(distinct_query, for_user)),
order_by: [desc: s.search_rank],
limit: 20
)
end
defp boost_search_rank_query(query, nil), do: query
defp boost_search_rank_query(query, for_user) do
friends_ids = get_friends_ids(for_user)
followers_ids = get_followers_ids(for_user)
from(u in subquery(query),
select_merge: %{
search_rank:
fragment(
"""
CASE WHEN (?) THEN (?) * 1.3
WHEN (?) THEN (?) * 1.2
WHEN (?) THEN (?) * 1.1
ELSE (?) END
""",
u.id in ^friends_ids and u.id in ^followers_ids,
u.search_rank,
u.id in ^friends_ids,
u.search_rank,
u.id in ^followers_ids,
u.search_rank,
u.search_rank
)
}
)
end
defp fts_search_subquery(term, query \\ User) do
processed_query =
term
|> String.replace(~r/\W+/, " ")
|> String.trim()
|> String.split()
|> Enum.map(&(&1 <> ":*"))
|> Enum.join(" | ")
from(
u in query,
select_merge: %{
search_type: ^0,
search_rank:
fragment(
"""
ts_rank_cd(
setweight(to_tsvector('simple', regexp_replace(?, '\\W', ' ', 'g')), 'A') ||
setweight(to_tsvector('simple', regexp_replace(coalesce(?, ''), '\\W', ' ', 'g')), 'B'),
to_tsquery('simple', ?),
32
)
""",
u.nickname,
u.name,
^processed_query
)
},
where:
fragment(
"""
(setweight(to_tsvector('simple', regexp_replace(?, '\\W', ' ', 'g')), 'A') ||
setweight(to_tsvector('simple', regexp_replace(coalesce(?, ''), '\\W', ' ', 'g')), 'B')) @@ to_tsquery('simple', ?)
""",
u.nickname,
u.name,
^processed_query
)
)
|> restrict_deactivated()
end
defp trigram_search_subquery(term) do
from(
u in User,
select_merge: %{
# ^1 gives 'Postgrex expected a binary, got 1' for some weird reason
search_type: fragment("?", 1),
search_rank:
fragment(
"similarity(?, trim(? || ' ' || coalesce(?, '')))",
^term,
u.nickname,
u.name
)
},
where: fragment("trim(? || ' ' || coalesce(?, '')) % ?", u.nickname, u.name, ^term)
)
|> restrict_deactivated()
end
def mute(muter, %User{ap_id: ap_id}) do def mute(muter, %User{ap_id: ap_id}) do
info_cng = info_cng =
muter.info muter.info
@ -1162,9 +1036,7 @@ def html_filter_policy(%User{info: %{no_rich_text: true}}) do
Pleroma.HTML.Scrubber.TwitterText Pleroma.HTML.Scrubber.TwitterText
end end
@default_scrubbers Pleroma.Config.get([:markup, :scrub_policy]) def html_filter_policy(_), do: Pleroma.Config.get([:markup, :scrub_policy])
def html_filter_policy(_), do: @default_scrubbers
def fetch_by_ap_id(ap_id) do def fetch_by_ap_id(ap_id) do
ap_try = ActivityPub.make_user_from_ap_id(ap_id) ap_try = ActivityPub.make_user_from_ap_id(ap_id)
@ -1443,4 +1315,14 @@ def ensure_keys_present(user) do
update_and_set_cache(cng) update_and_set_cache(cng)
end end
end end
def get_ap_ids_by_nicknames(nicknames) do
from(u in User,
where: u.nickname in ^nicknames,
select: u.ap_id
)
|> Repo.all()
end
defdelegate search(query, opts \\ []), to: User.Search
end end

View file

@ -42,14 +42,21 @@ defmodule Pleroma.User.Info do
field(:hide_follows, :boolean, default: false) field(:hide_follows, :boolean, default: false)
field(:hide_favorites, :boolean, default: true) field(:hide_favorites, :boolean, default: true)
field(:pinned_activities, {:array, :string}, default: []) field(:pinned_activities, {:array, :string}, default: [])
field(:flavour, :string, default: nil)
field(:mascot, :map, default: nil) field(:mascot, :map, default: nil)
field(:emoji, {:array, :map}, default: []) field(:emoji, {:array, :map}, default: [])
field(:pleroma_settings_store, :map, default: %{})
field(:notification_settings, :map, field(:notification_settings, :map,
default: %{"remote" => true, "local" => true, "followers" => true, "follows" => true} default: %{
"followers" => true,
"follows" => true,
"non_follows" => true,
"non_followers" => true
}
) )
field(:skip_thread_containment, :boolean, default: false)
# Found in the wild # Found in the wild
# ap_id -> Where is this used? # ap_id -> Where is this used?
# bio -> Where is this used? # bio -> Where is this used?
@ -68,10 +75,15 @@ def set_activation_status(info, deactivated) do
end end
def update_notification_settings(info, settings) do def update_notification_settings(info, settings) do
settings =
settings
|> Enum.map(fn {k, v} -> {k, v in [true, "true", "True", "1"]} end)
|> Map.new()
notification_settings = notification_settings =
info.notification_settings info.notification_settings
|> Map.merge(settings) |> Map.merge(settings)
|> Map.take(["remote", "local", "followers", "follows"]) |> Map.take(["followers", "follows", "non_follows", "non_followers"])
params = %{notification_settings: notification_settings} params = %{notification_settings: notification_settings}
@ -209,7 +221,9 @@ def profile_update(info, params) do
:hide_followers, :hide_followers,
:hide_favorites, :hide_favorites,
:background, :background,
:show_role :show_role,
:skip_thread_containment,
:pleroma_settings_store
]) ])
end end
@ -241,14 +255,6 @@ def mastodon_settings_update(info, settings) do
|> validate_required([:settings]) |> validate_required([:settings])
end end
def mastodon_flavour_update(info, flavour) do
params = %{flavour: flavour}
info
|> cast(params, [:flavour])
|> validate_required([:flavour])
end
def mascot_update(info, url) do def mascot_update(info, url) do
params = %{mascot: url} params = %{mascot: url}

191
lib/pleroma/user/search.ex Normal file
View file

@ -0,0 +1,191 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.User.Search do
alias Pleroma.Repo
alias Pleroma.User
import Ecto.Query
@similarity_threshold 0.25
@limit 20
def search(query_string, opts \\ []) do
resolve = Keyword.get(opts, :resolve, false)
following = Keyword.get(opts, :following, false)
result_limit = Keyword.get(opts, :limit, @limit)
offset = Keyword.get(opts, :offset, 0)
for_user = Keyword.get(opts, :for_user)
# Strip the beginning @ off if there is a query
query_string = String.trim_leading(query_string, "@")
maybe_resolve(resolve, for_user, query_string)
{:ok, results} =
Repo.transaction(fn ->
Ecto.Adapters.SQL.query(
Repo,
"select set_limit(#{@similarity_threshold})",
[]
)
query_string
|> search_query(for_user, following)
|> paginate(result_limit, offset)
|> Repo.all()
end)
results
end
defp search_query(query_string, for_user, following) do
for_user
|> base_query(following)
|> search_subqueries(query_string)
|> union_subqueries
|> distinct_query()
|> boost_search_rank_query(for_user)
|> subquery()
|> order_by(desc: :search_rank)
|> maybe_restrict_local(for_user)
end
defp base_query(_user, false), do: User
defp base_query(user, true), do: User.get_followers_query(user)
defp paginate(query, limit, offset) do
from(q in query, limit: ^limit, offset: ^offset)
end
defp union_subqueries({fts_subquery, trigram_subquery}) do
from(s in trigram_subquery, union_all: ^fts_subquery)
end
defp search_subqueries(base_query, query_string) do
{
fts_search_subquery(base_query, query_string),
trigram_search_subquery(base_query, query_string)
}
end
defp distinct_query(q) do
from(s in subquery(q), order_by: s.search_type, distinct: s.id)
end
defp maybe_resolve(true, user, query) do
case {limit(), user} do
{:all, _} -> :noop
{:unauthenticated, %User{}} -> User.get_or_fetch(query)
{:unauthenticated, _} -> :noop
{false, _} -> User.get_or_fetch(query)
end
end
defp maybe_resolve(_, _, _), do: :noop
defp maybe_restrict_local(q, user) do
case {limit(), user} do
{:all, _} -> restrict_local(q)
{:unauthenticated, %User{}} -> q
{:unauthenticated, _} -> restrict_local(q)
{false, _} -> q
end
end
defp limit, do: Pleroma.Config.get([:instance, :limit_to_local_content], :unauthenticated)
defp restrict_local(q), do: where(q, [u], u.local == true)
defp boost_search_rank_query(query, nil), do: query
defp boost_search_rank_query(query, for_user) do
friends_ids = User.get_friends_ids(for_user)
followers_ids = User.get_followers_ids(for_user)
from(u in subquery(query),
select_merge: %{
search_rank:
fragment(
"""
CASE WHEN (?) THEN 0.5 + (?) * 1.3
WHEN (?) THEN 0.5 + (?) * 1.2
WHEN (?) THEN (?) * 1.1
ELSE (?) END
""",
u.id in ^friends_ids and u.id in ^followers_ids,
u.search_rank,
u.id in ^friends_ids,
u.search_rank,
u.id in ^followers_ids,
u.search_rank,
u.search_rank
)
}
)
end
@spec fts_search_subquery(User.t() | Ecto.Query.t(), String.t()) :: Ecto.Query.t()
defp fts_search_subquery(query, term) do
processed_query =
term
|> String.replace(~r/\W+/, " ")
|> String.trim()
|> String.split()
|> Enum.map(&(&1 <> ":*"))
|> Enum.join(" | ")
from(
u in query,
select_merge: %{
search_type: ^0,
search_rank:
fragment(
"""
ts_rank_cd(
setweight(to_tsvector('simple', regexp_replace(?, '\\W', ' ', 'g')), 'A') ||
setweight(to_tsvector('simple', regexp_replace(coalesce(?, ''), '\\W', ' ', 'g')), 'B'),
to_tsquery('simple', ?),
32
)
""",
u.nickname,
u.name,
^processed_query
)
},
where:
fragment(
"""
(setweight(to_tsvector('simple', regexp_replace(?, '\\W', ' ', 'g')), 'A') ||
setweight(to_tsvector('simple', regexp_replace(coalesce(?, ''), '\\W', ' ', 'g')), 'B')) @@ to_tsquery('simple', ?)
""",
u.nickname,
u.name,
^processed_query
)
)
|> User.restrict_deactivated()
end
@spec trigram_search_subquery(User.t() | Ecto.Query.t(), String.t()) :: Ecto.Query.t()
defp trigram_search_subquery(query, term) do
from(
u in query,
select_merge: %{
# ^1 gives 'Postgrex expected a binary, got 1' for some weird reason
search_type: fragment("?", 1),
search_rank:
fragment(
"similarity(?, trim(? || ' ' || coalesce(?, '')))",
^term,
u.nickname,
u.name
)
},
where: fragment("trim(? || ' ' || coalesce(?, '')) % ?", u.nickname, u.name, ^term)
)
|> User.restrict_deactivated()
end
end

View file

@ -4,6 +4,7 @@
defmodule Pleroma.Web.ActivityPub.ActivityPub do defmodule Pleroma.Web.ActivityPub.ActivityPub do
alias Pleroma.Activity alias Pleroma.Activity
alias Pleroma.Config
alias Pleroma.Conversation alias Pleroma.Conversation
alias Pleroma.Notification alias Pleroma.Notification
alias Pleroma.Object alias Pleroma.Object
@ -73,7 +74,7 @@ defp check_actor_is_active(actor) do
end end
defp check_remote_limit(%{"object" => %{"content" => content}}) when not is_nil(content) do defp check_remote_limit(%{"object" => %{"content" => content}}) when not is_nil(content) do
limit = Pleroma.Config.get([:instance, :remote_limit]) limit = Config.get([:instance, :remote_limit])
String.length(content) <= limit String.length(content) <= limit
end end
@ -108,6 +109,15 @@ def decrease_replies_count_if_reply(%Object{
def decrease_replies_count_if_reply(_object), do: :noop def decrease_replies_count_if_reply(_object), do: :noop
def increase_poll_votes_if_vote(%{
"object" => %{"inReplyTo" => reply_ap_id, "name" => name},
"type" => "Create"
}) do
Object.increase_vote_count(reply_ap_id, name)
end
def increase_poll_votes_if_vote(_create_data), do: :noop
def insert(map, local \\ true, fake \\ false) when is_map(map) do def insert(map, local \\ true, fake \\ false) when is_map(map) do
with nil <- Activity.normalize(map), with nil <- Activity.normalize(map),
map <- lazy_put_activity_defaults(map, fake), map <- lazy_put_activity_defaults(map, fake),
@ -183,6 +193,9 @@ def stream_out(activity) do
public = "https://www.w3.org/ns/activitystreams#Public" public = "https://www.w3.org/ns/activitystreams#Public"
if activity.data["type"] in ["Create", "Announce", "Delete"] do if activity.data["type"] in ["Create", "Announce", "Delete"] do
object = Object.normalize(activity)
# Do not stream out poll replies
unless object.data["type"] == "Answer" do
Pleroma.Web.Streamer.stream("user", activity) Pleroma.Web.Streamer.stream("user", activity)
Pleroma.Web.Streamer.stream("list", activity) Pleroma.Web.Streamer.stream("list", activity)
@ -194,8 +207,6 @@ def stream_out(activity) do
end end
if activity.data["type"] in ["Create"] do if activity.data["type"] in ["Create"] do
object = Object.normalize(activity)
object.data object.data
|> Map.get("tag", []) |> Map.get("tag", [])
|> Enum.filter(fn tag -> is_bitstring(tag) end) |> Enum.filter(fn tag -> is_bitstring(tag) end)
@ -220,6 +231,7 @@ def stream_out(activity) do
end end
end end
end end
end
def create(%{to: to, actor: actor, context: context, object: object} = params, fake \\ false) do def create(%{to: to, actor: actor, context: context, object: object} = params, fake \\ false) do
additional = params[:additional] || %{} additional = params[:additional] || %{}
@ -235,6 +247,7 @@ def create(%{to: to, actor: actor, context: context, object: object} = params, f
{:ok, activity} <- insert(create_data, local, fake), {:ok, activity} <- insert(create_data, local, fake),
{:fake, false, activity} <- {:fake, fake, activity}, {:fake, false, activity} <- {:fake, fake, activity},
_ <- increase_replies_count_if_reply(create_data), _ <- increase_replies_count_if_reply(create_data),
_ <- increase_poll_votes_if_vote(create_data),
# Changing note count prior to enqueuing federation task in order to avoid # Changing note count prior to enqueuing federation task in order to avoid
# race conditions on updating user.info # race conditions on updating user.info
{:ok, _actor} <- increase_note_count_if_public(actor, activity), {:ok, _actor} <- increase_note_count_if_public(actor, activity),
@ -399,16 +412,12 @@ def delete(%Object{data: %{"id" => id, "actor" => actor}} = object, local \\ tru
end end
def block(blocker, blocked, activity_id \\ nil, local \\ true) do def block(blocker, blocked, activity_id \\ nil, local \\ true) do
ap_config = Application.get_env(:pleroma, :activitypub) outgoing_blocks = Config.get([:activitypub, :outgoing_blocks])
unfollow_blocked = Keyword.get(ap_config, :unfollow_blocked) unfollow_blocked = Config.get([:activitypub, :unfollow_blocked])
outgoing_blocks = Keyword.get(ap_config, :outgoing_blocks)
with true <- unfollow_blocked do if unfollow_blocked do
follow_activity = fetch_latest_follow(blocker, blocked) follow_activity = fetch_latest_follow(blocker, blocked)
if follow_activity, do: unfollow(blocker, blocked, nil, local)
if follow_activity do
unfollow(blocker, blocked, nil, local)
end
end end
with true <- outgoing_blocks, with true <- outgoing_blocks,
@ -480,6 +489,7 @@ defp fetch_activities_for_context_query(context, opts) do
if opts["user"], do: [opts["user"].ap_id | opts["user"].following] ++ public, else: public if opts["user"], do: [opts["user"].ap_id | opts["user"].following] ++ public, else: public
from(activity in Activity) from(activity in Activity)
|> maybe_preload_objects(opts)
|> restrict_blocked(opts) |> restrict_blocked(opts)
|> restrict_recipients(recipients, opts["user"]) |> restrict_recipients(recipients, opts["user"])
|> where( |> where(
@ -492,6 +502,7 @@ defp fetch_activities_for_context_query(context, opts) do
^context ^context
) )
) )
|> exclude_poll_votes(opts)
|> order_by([activity], desc: activity.id) |> order_by([activity], desc: activity.id)
end end
@ -499,7 +510,6 @@ defp fetch_activities_for_context_query(context, opts) do
def fetch_activities_for_context(context, opts \\ %{}) do def fetch_activities_for_context(context, opts \\ %{}) do
context context
|> fetch_activities_for_context_query(opts) |> fetch_activities_for_context_query(opts)
|> Activity.with_preloaded_object()
|> Repo.all() |> Repo.all()
end end
@ -507,7 +517,7 @@ def fetch_activities_for_context(context, opts \\ %{}) do
Pleroma.FlakeId.t() | nil Pleroma.FlakeId.t() | nil
def fetch_latest_activity_id_for_context(context, opts \\ %{}) do def fetch_latest_activity_id_for_context(context, opts \\ %{}) do
context context
|> fetch_activities_for_context_query(opts) |> fetch_activities_for_context_query(Map.merge(%{"skip_preload" => true}, opts))
|> limit(1) |> limit(1)
|> select([a], a.id) |> select([a], a.id)
|> Repo.one() |> Repo.one()
@ -548,14 +558,11 @@ defp restrict_visibility(query, %{visibility: visibility})
defp restrict_visibility(query, %{visibility: visibility}) defp restrict_visibility(query, %{visibility: visibility})
when visibility in @valid_visibilities do when visibility in @valid_visibilities do
query =
from( from(
a in query, a in query,
where: where:
fragment("activity_visibility(?, ?, ?) = ?", a.actor, a.recipients, a.data, ^visibility) fragment("activity_visibility(?, ?, ?) = ?", a.actor, a.recipients, a.data, ^visibility)
) )
query
end end
defp restrict_visibility(_query, %{visibility: visibility}) defp restrict_visibility(_query, %{visibility: visibility})
@ -565,17 +572,24 @@ defp restrict_visibility(_query, %{visibility: visibility})
defp restrict_visibility(query, _visibility), do: query defp restrict_visibility(query, _visibility), do: query
defp restrict_thread_visibility(query, %{"user" => %User{ap_id: ap_id}}) do defp restrict_thread_visibility(query, _, %{skip_thread_containment: true} = _),
query = do: query
defp restrict_thread_visibility(
query,
%{"user" => %User{info: %{skip_thread_containment: true}}},
_
),
do: query
defp restrict_thread_visibility(query, %{"user" => %User{ap_id: ap_id}}, _) do
from( from(
a in query, a in query,
where: fragment("thread_visibility(?, (?)->>'id') = true", ^ap_id, a.data) where: fragment("thread_visibility(?, (?)->>'id') = true", ^ap_id, a.data)
) )
query
end end
defp restrict_thread_visibility(query, _), do: query defp restrict_thread_visibility(query, _, _), do: query
def fetch_user_activities(user, reading_user, params \\ %{}) do def fetch_user_activities(user, reading_user, params \\ %{}) do
params = params =
@ -653,20 +667,6 @@ defp restrict_tag(query, %{"tag" => tag}) when is_binary(tag) do
defp restrict_tag(query, _), do: query defp restrict_tag(query, _), do: query
defp restrict_to_cc(query, recipients_to, recipients_cc) do
from(
activity in query,
where:
fragment(
"(?->'to' \\?| ?) or (?->'cc' \\?| ?)",
activity.data,
^recipients_to,
activity.data,
^recipients_cc
)
)
end
defp restrict_recipients(query, [], _user), do: query defp restrict_recipients(query, [], _user), do: query
defp restrict_recipients(query, recipients, nil) do defp restrict_recipients(query, recipients, nil) do
@ -820,6 +820,18 @@ defp restrict_muted_reblogs(query, %{"muting_user" => %User{info: info}}) do
defp restrict_muted_reblogs(query, _), do: query defp restrict_muted_reblogs(query, _), do: query
defp exclude_poll_votes(query, %{"include_poll_votes" => "true"}), do: query
defp exclude_poll_votes(query, _) do
if has_named_binding?(query, :object) do
from([activity, object: o] in query,
where: fragment("not(?->>'type' = ?)", o.data, "Answer")
)
else
query
end
end
defp maybe_preload_objects(query, %{"skip_preload" => true}), do: query defp maybe_preload_objects(query, %{"skip_preload" => true}), do: query
defp maybe_preload_objects(query, _) do defp maybe_preload_objects(query, _) do
@ -856,6 +868,10 @@ defp maybe_order(query, _), do: query
def fetch_activities_query(recipients, opts \\ %{}) do def fetch_activities_query(recipients, opts \\ %{}) do
base_query = from(activity in Activity) base_query = from(activity in Activity)
config = %{
skip_thread_containment: Config.get([:instance, :skip_thread_containment])
}
base_query base_query
|> maybe_preload_objects(opts) |> maybe_preload_objects(opts)
|> maybe_preload_bookmarks(opts) |> maybe_preload_bookmarks(opts)
@ -875,12 +891,13 @@ def fetch_activities_query(recipients, opts \\ %{}) do
|> restrict_muted(opts) |> restrict_muted(opts)
|> restrict_media(opts) |> restrict_media(opts)
|> restrict_visibility(opts) |> restrict_visibility(opts)
|> restrict_thread_visibility(opts) |> restrict_thread_visibility(opts, config)
|> restrict_replies(opts) |> restrict_replies(opts)
|> restrict_reblogs(opts) |> restrict_reblogs(opts)
|> restrict_pinned(opts) |> restrict_pinned(opts)
|> restrict_muted_reblogs(opts) |> restrict_muted_reblogs(opts)
|> Activity.restrict_deactivated_users() |> Activity.restrict_deactivated_users()
|> exclude_poll_votes(opts)
end end
def fetch_activities(recipients, opts \\ %{}) do def fetch_activities(recipients, opts \\ %{}) do
@ -889,9 +906,18 @@ def fetch_activities(recipients, opts \\ %{}) do
|> Enum.reverse() |> Enum.reverse()
end end
def fetch_activities_bounded(recipients_to, recipients_cc, opts \\ %{}) do def fetch_activities_bounded_query(query, recipients, recipients_with_public) do
from(activity in query,
where:
fragment("? && ?", activity.recipients, ^recipients) or
(fragment("? && ?", activity.recipients, ^recipients_with_public) and
"https://www.w3.org/ns/activitystreams#Public" in activity.recipients)
)
end
def fetch_activities_bounded(recipients, recipients_with_public, opts \\ %{}) do
fetch_activities_query([], opts) fetch_activities_query([], opts)
|> restrict_to_cc(recipients_to, recipients_cc) |> fetch_activities_bounded_query(recipients, recipients_with_public)
|> Pagination.fetch_paginated(opts) |> Pagination.fetch_paginated(opts)
|> Enum.reverse() |> Enum.reverse()
end end

View file

@ -27,7 +27,7 @@ defmodule Pleroma.Web.ActivityPub.ActivityPubController do
plug(:relay_active? when action in [:relay]) plug(:relay_active? when action in [:relay])
def relay_active?(conn, _) do def relay_active?(conn, _) do
if Keyword.get(Application.get_env(:pleroma, :instance), :allow_relay) do if Pleroma.Config.get([:instance, :allow_relay]) do
conn conn
else else
conn conn

View file

@ -5,8 +5,8 @@
defmodule Pleroma.Web.ActivityPub.MRF do defmodule Pleroma.Web.ActivityPub.MRF do
@callback filter(Map.t()) :: {:ok | :reject, Map.t()} @callback filter(Map.t()) :: {:ok | :reject, Map.t()}
def filter(object) do def filter(policies, %{} = object) do
get_policies() policies
|> Enum.reduce({:ok, object}, fn |> Enum.reduce({:ok, object}, fn
policy, {:ok, object} -> policy, {:ok, object} ->
policy.filter(object) policy.filter(object)
@ -16,10 +16,10 @@ def filter(object) do
end) end)
end end
def filter(%{} = object), do: get_policies() |> filter(object)
def get_policies do def get_policies do
Application.get_env(:pleroma, :instance, []) Pleroma.Config.get([:instance, :rewrite_policy], []) |> get_policies()
|> Keyword.get(:rewrite_policy, [])
|> get_policies()
end end
defp get_policies(policy) when is_atom(policy), do: [policy] defp get_policies(policy) when is_atom(policy), do: [policy]

View file

@ -0,0 +1,48 @@
# Pleroma: A lightweight social networking server
# Copyright © 2019 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Web.ActivityPub.MRF.AntiLinkSpamPolicy do
alias Pleroma.User
require Logger
# has the user successfully posted before?
defp old_user?(%User{} = u) do
u.info.note_count > 0 || u.info.follower_count > 0
end
# does the post contain links?
defp contains_links?(%{"content" => content} = _object) do
content
|> Floki.filter_out("a.mention,a.hashtag,a[rel~=\"tag\"],a.zrl")
|> Floki.attribute("a", "href")
|> length() > 0
end
defp contains_links?(_), do: false
def filter(%{"type" => "Create", "actor" => actor, "object" => object} = message) do
with {:ok, %User{} = u} <- User.get_or_fetch_by_ap_id(actor),
{:contains_links, true} <- {:contains_links, contains_links?(object)},
{:old_user, true} <- {:old_user, old_user?(u)} do
{:ok, message}
else
{:contains_links, false} ->
{:ok, message}
{:old_user, false} ->
{:reject, nil}
{:error, _} ->
{:reject, nil}
e ->
Logger.warn("[MRF anti-link-spam] WTF: unhandled error #{inspect(e)}")
{:reject, nil}
end
end
# in all other cases, pass through
def filter(message), do: {:ok, message}
end

View file

@ -74,8 +74,7 @@ defp check_ftl_removal(%{host: actor_host} = _actor_info, object) do
actor_host actor_host
), ),
user <- User.get_cached_by_ap_id(object["actor"]), user <- User.get_cached_by_ap_id(object["actor"]),
true <- "https://www.w3.org/ns/activitystreams#Public" in object["to"], true <- "https://www.w3.org/ns/activitystreams#Public" in object["to"] do
true <- user.follower_address in object["cc"] do
to = to =
List.delete(object["to"], "https://www.w3.org/ns/activitystreams#Public") ++ List.delete(object["to"], "https://www.w3.org/ns/activitystreams#Public") ++
[user.follower_address] [user.follower_address]

View file

@ -0,0 +1,40 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Web.ActivityPub.MRF.SubchainPolicy do
alias Pleroma.Config
alias Pleroma.Web.ActivityPub.MRF
require Logger
@behaviour MRF
defp lookup_subchain(actor) do
with matches <- Config.get([:mrf_subchain, :match_actor]),
{match, subchain} <- Enum.find(matches, fn {k, _v} -> String.match?(actor, k) end) do
{:ok, match, subchain}
else
_e -> {:error, :notfound}
end
end
@impl true
def filter(%{"actor" => actor} = message) do
with {:ok, match, subchain} <- lookup_subchain(actor) do
Logger.debug(
"[SubchainPolicy] Matched #{actor} against #{inspect(match)} with subchain #{
inspect(subchain)
}"
)
subchain
|> MRF.filter(message)
else
_e -> {:ok, message}
end
end
@impl true
def filter(message), do: {:ok, message}
end

View file

@ -5,6 +5,7 @@
defmodule Pleroma.Web.ActivityPub.Publisher do defmodule Pleroma.Web.ActivityPub.Publisher do
alias Pleroma.Activity alias Pleroma.Activity
alias Pleroma.Config alias Pleroma.Config
alias Pleroma.HTTP
alias Pleroma.Instances alias Pleroma.Instances
alias Pleroma.User alias Pleroma.User
alias Pleroma.Web.ActivityPub.Relay alias Pleroma.Web.ActivityPub.Relay
@ -16,8 +17,6 @@ defmodule Pleroma.Web.ActivityPub.Publisher do
require Logger require Logger
@httpoison Application.get_env(:pleroma, :httpoison)
@moduledoc """ @moduledoc """
ActivityPub outgoing federation module. ActivityPub outgoing federation module.
""" """
@ -63,7 +62,7 @@ def publish_one(%{inbox: inbox, json: json, actor: %User{} = actor, id: id} = pa
with {:ok, %{status: code}} when code in 200..299 <- with {:ok, %{status: code}} when code in 200..299 <-
result = result =
@httpoison.post( HTTP.post(
inbox, inbox,
json, json,
[ [
@ -89,7 +88,7 @@ defp should_federate?(inbox, public) do
true true
else else
inbox_info = URI.parse(inbox) inbox_info = URI.parse(inbox)
!Enum.member?(Pleroma.Config.get([:instance, :quarantined_instances], []), inbox_info.host) !Enum.member?(Config.get([:instance, :quarantined_instances], []), inbox_info.host)
end end
end end

View file

@ -35,6 +35,7 @@ def fix_object(object) do
|> fix_likes |> fix_likes
|> fix_addressing |> fix_addressing
|> fix_summary |> fix_summary
|> fix_type
end end
def fix_summary(%{"summary" => nil} = object) do def fix_summary(%{"summary" => nil} = object) do
@ -65,7 +66,11 @@ def fix_addressing_list(map, field) do
end end
end end
def fix_explicit_addressing(%{"to" => to, "cc" => cc} = object, explicit_mentions) do def fix_explicit_addressing(
%{"to" => to, "cc" => cc} = object,
explicit_mentions,
follower_collection
) do
explicit_to = explicit_to =
to to
|> Enum.filter(fn x -> x in explicit_mentions end) |> Enum.filter(fn x -> x in explicit_mentions end)
@ -76,6 +81,7 @@ def fix_explicit_addressing(%{"to" => to, "cc" => cc} = object, explicit_mention
final_cc = final_cc =
(cc ++ explicit_cc) (cc ++ explicit_cc)
|> Enum.reject(fn x -> String.ends_with?(x, "/followers") and x != follower_collection end)
|> Enum.uniq() |> Enum.uniq()
object object
@ -83,7 +89,7 @@ def fix_explicit_addressing(%{"to" => to, "cc" => cc} = object, explicit_mention
|> Map.put("cc", final_cc) |> Map.put("cc", final_cc)
end end
def fix_explicit_addressing(object, _explicit_mentions), do: object def fix_explicit_addressing(object, _explicit_mentions, _followers_collection), do: object
# if directMessage flag is set to true, leave the addressing alone # if directMessage flag is set to true, leave the addressing alone
def fix_explicit_addressing(%{"directMessage" => true} = object), do: object def fix_explicit_addressing(%{"directMessage" => true} = object), do: object
@ -93,10 +99,12 @@ def fix_explicit_addressing(object) do
object object
|> Utils.determine_explicit_mentions() |> Utils.determine_explicit_mentions()
explicit_mentions = explicit_mentions ++ ["https://www.w3.org/ns/activitystreams#Public"] follower_collection = User.get_cached_by_ap_id(Containment.get_actor(object)).follower_address
object explicit_mentions =
|> fix_explicit_addressing(explicit_mentions) explicit_mentions ++ ["https://www.w3.org/ns/activitystreams#Public", follower_collection]
fix_explicit_addressing(object, explicit_mentions, follower_collection)
end end
# if as:Public is addressed, then make sure the followers collection is also addressed # if as:Public is addressed, then make sure the followers collection is also addressed
@ -133,7 +141,7 @@ def fix_addressing(object) do
|> fix_addressing_list("cc") |> fix_addressing_list("cc")
|> fix_addressing_list("bto") |> fix_addressing_list("bto")
|> fix_addressing_list("bcc") |> fix_addressing_list("bcc")
|> fix_explicit_addressing |> fix_explicit_addressing()
|> fix_implicit_addressing(followers_collection) |> fix_implicit_addressing(followers_collection)
end end
@ -328,6 +336,18 @@ def fix_content_map(%{"contentMap" => content_map} = object) do
def fix_content_map(object), do: object def fix_content_map(object), do: object
def fix_type(%{"inReplyTo" => reply_id} = object) when is_binary(reply_id) do
reply = Object.normalize(reply_id)
if reply && (reply.data["type"] == "Question" and object["name"]) do
Map.put(object, "type", "Answer")
else
object
end
end
def fix_type(object), do: object
defp mastodon_follow_hack(%{"id" => id, "actor" => follower_id}, followed) do defp mastodon_follow_hack(%{"id" => id, "actor" => follower_id}, followed) do
with true <- id =~ "follows", with true <- id =~ "follows",
%User{local: true} = follower <- User.get_cached_by_ap_id(follower_id), %User{local: true} = follower <- User.get_cached_by_ap_id(follower_id),
@ -398,7 +418,7 @@ def handle_incoming(%{"id" => id}) when not (is_binary(id) and length(id) > 8),
# - tags # - tags
# - emoji # - emoji
def handle_incoming(%{"type" => "Create", "object" => %{"type" => objtype} = object} = data) def handle_incoming(%{"type" => "Create", "object" => %{"type" => objtype} = object} = data)
when objtype in ["Article", "Note", "Video", "Page"] do when objtype in ["Article", "Note", "Video", "Page", "Question", "Answer"] do
actor = Containment.get_actor(data) actor = Containment.get_actor(data)
data = data =
@ -438,10 +458,12 @@ def handle_incoming(
{:ok, %User{} = follower} <- User.get_or_fetch_by_ap_id(follower), {:ok, %User{} = follower} <- User.get_or_fetch_by_ap_id(follower),
{:ok, activity} <- ActivityPub.follow(follower, followed, id, false) do {:ok, activity} <- ActivityPub.follow(follower, followed, id, false) do
with deny_follow_blocked <- Pleroma.Config.get([:user, :deny_follow_blocked]), with deny_follow_blocked <- Pleroma.Config.get([:user, :deny_follow_blocked]),
{:user_blocked, false} <- {_, false} <-
{:user_blocked, User.blocks?(followed, follower) && deny_follow_blocked}, {:user_blocked, User.blocks?(followed, follower) && deny_follow_blocked},
{:user_locked, false} <- {:user_locked, User.locked?(followed)}, {_, false} <- {:user_locked, User.locked?(followed)},
{:follow, {:ok, follower}} <- {:follow, User.follow(follower, followed)} do {_, {:ok, follower}} <- {:follow, User.follow(follower, followed)},
{_, {:ok, _}} <-
{:follow_state_update, Utils.update_follow_state_for_all(activity, "accept")} do
ActivityPub.accept(%{ ActivityPub.accept(%{
to: [follower.ap_id], to: [follower.ap_id],
actor: followed, actor: followed,
@ -450,7 +472,7 @@ def handle_incoming(
}) })
else else
{:user_blocked, true} -> {:user_blocked, true} ->
{:ok, _} = Utils.update_follow_state(activity, "reject") {:ok, _} = Utils.update_follow_state_for_all(activity, "reject")
ActivityPub.reject(%{ ActivityPub.reject(%{
to: [follower.ap_id], to: [follower.ap_id],
@ -460,7 +482,7 @@ def handle_incoming(
}) })
{:follow, {:error, _}} -> {:follow, {:error, _}} ->
{:ok, _} = Utils.update_follow_state(activity, "reject") {:ok, _} = Utils.update_follow_state_for_all(activity, "reject")
ActivityPub.reject(%{ ActivityPub.reject(%{
to: [follower.ap_id], to: [follower.ap_id],
@ -486,21 +508,16 @@ def handle_incoming(
with actor <- Containment.get_actor(data), with actor <- Containment.get_actor(data),
{:ok, %User{} = followed} <- User.get_or_fetch_by_ap_id(actor), {:ok, %User{} = followed} <- User.get_or_fetch_by_ap_id(actor),
{:ok, follow_activity} <- get_follow_activity(follow_object, followed), {:ok, follow_activity} <- get_follow_activity(follow_object, followed),
{:ok, follow_activity} <- Utils.update_follow_state(follow_activity, "accept"), {:ok, follow_activity} <- Utils.update_follow_state_for_all(follow_activity, "accept"),
%User{local: true} = follower <- User.get_cached_by_ap_id(follow_activity.data["actor"]), %User{local: true} = follower <- User.get_cached_by_ap_id(follow_activity.data["actor"]),
{:ok, activity} <- {:ok, _follower} = User.follow(follower, followed) do
ActivityPub.accept(%{ ActivityPub.accept(%{
to: follow_activity.data["to"], to: follow_activity.data["to"],
type: "Accept", type: "Accept",
actor: followed, actor: followed,
object: follow_activity.data["id"], object: follow_activity.data["id"],
local: false local: false
}) do })
if not User.following?(follower, followed) do
{:ok, _follower} = User.follow(follower, followed)
end
{:ok, activity}
else else
_e -> :error _e -> :error
end end
@ -512,7 +529,7 @@ def handle_incoming(
with actor <- Containment.get_actor(data), with actor <- Containment.get_actor(data),
{:ok, %User{} = followed} <- User.get_or_fetch_by_ap_id(actor), {:ok, %User{} = followed} <- User.get_or_fetch_by_ap_id(actor),
{:ok, follow_activity} <- get_follow_activity(follow_object, followed), {:ok, follow_activity} <- get_follow_activity(follow_object, followed),
{:ok, follow_activity} <- Utils.update_follow_state(follow_activity, "reject"), {:ok, follow_activity} <- Utils.update_follow_state_for_all(follow_activity, "reject"),
%User{local: true} = follower <- User.get_cached_by_ap_id(follow_activity.data["actor"]), %User{local: true} = follower <- User.get_cached_by_ap_id(follow_activity.data["actor"]),
{:ok, activity} <- {:ok, activity} <-
ActivityPub.reject(%{ ActivityPub.reject(%{
@ -731,6 +748,7 @@ def prepare_object(object) do
|> set_reply_to_uri |> set_reply_to_uri
|> strip_internal_fields |> strip_internal_fields
|> strip_internal_tags |> strip_internal_tags
|> set_type
end end
# @doc # @doc
@ -895,6 +913,12 @@ def set_sensitive(object) do
Map.put(object, "sensitive", "nsfw" in tags) Map.put(object, "sensitive", "nsfw" in tags)
end end
def set_type(%{"type" => "Answer"} = object) do
Map.put(object, "type", "Note")
end
def set_type(object), do: object
def add_attributed_to(object) do def add_attributed_to(object) do
attributed_to = object["attributedTo"] || object["actor"] attributed_to = object["attributedTo"] || object["actor"]

View file

@ -19,7 +19,7 @@ defmodule Pleroma.Web.ActivityPub.Utils do
require Logger require Logger
@supported_object_types ["Article", "Note", "Video", "Page"] @supported_object_types ["Article", "Note", "Video", "Page", "Question", "Answer"]
@supported_report_states ~w(open closed resolved) @supported_report_states ~w(open closed resolved)
@valid_visibilities ~w(public unlisted private direct) @valid_visibilities ~w(public unlisted private direct)
@ -376,8 +376,8 @@ def remove_like_from_object(%Activity{data: %{"actor" => actor}}, object) do
@doc """ @doc """
Updates a follow activity's state (for locked accounts). Updates a follow activity's state (for locked accounts).
""" """
def update_follow_state( def update_follow_state_for_all(
%Activity{data: %{"actor" => actor, "object" => object, "state" => "pending"}} = activity, %Activity{data: %{"actor" => actor, "object" => object}} = activity,
state state
) do ) do
try do try do
@ -789,4 +789,22 @@ defp get_updated_targets(
[to, cc, recipients] [to, cc, recipients]
end end
end end
def get_existing_votes(actor, %{data: %{"id" => id}}) do
query =
from(
[activity, object: object] in Activity.with_preloaded_object(Activity),
where: fragment("(?)->>'type' = 'Create'", activity.data),
where: fragment("(?)->>'actor' = ?", activity.data, ^actor),
where:
fragment(
"(?)->>'inReplyTo' = ?",
object.data,
^to_string(id)
),
where: fragment("(?)->>'type' = 'Answer'", object.data)
)
Repo.all(query)
end
end end

View file

@ -66,6 +66,9 @@ def get_visibility(object) do
Enum.any?(to, &String.contains?(&1, "/followers")) -> Enum.any?(to, &String.contains?(&1, "/followers")) ->
"private" "private"
object.data["directMessage"] == true ->
"direct"
length(cc) > 0 -> length(cc) > 0 ->
"private" "private"

View file

@ -10,6 +10,8 @@ defmodule Pleroma.Web.AdminAPI.AdminAPIController do
alias Pleroma.Web.ActivityPub.ActivityPub alias Pleroma.Web.ActivityPub.ActivityPub
alias Pleroma.Web.ActivityPub.Relay alias Pleroma.Web.ActivityPub.Relay
alias Pleroma.Web.AdminAPI.AccountView alias Pleroma.Web.AdminAPI.AccountView
alias Pleroma.Web.AdminAPI.Config
alias Pleroma.Web.AdminAPI.ConfigView
alias Pleroma.Web.AdminAPI.ReportView alias Pleroma.Web.AdminAPI.ReportView
alias Pleroma.Web.AdminAPI.Search alias Pleroma.Web.AdminAPI.Search
alias Pleroma.Web.CommonAPI alias Pleroma.Web.CommonAPI
@ -362,6 +364,41 @@ def status_delete(%{assigns: %{user: user}} = conn, %{"id" => id}) do
end end
end end
def config_show(conn, _params) do
configs = Pleroma.Repo.all(Config)
conn
|> put_view(ConfigView)
|> render("index.json", %{configs: configs})
end
def config_update(conn, %{"configs" => configs}) do
updated =
if Pleroma.Config.get([:instance, :dynamic_configuration]) do
updated =
Enum.map(configs, fn
%{"key" => key, "value" => value} ->
{:ok, config} = Config.update_or_create(%{key: key, value: value})
config
%{"key" => key, "delete" => "true"} ->
{:ok, _} = Config.delete(key)
nil
end)
|> Enum.reject(&is_nil(&1))
Pleroma.Config.TransferTask.load_and_update_env()
Mix.Tasks.Pleroma.Config.run(["migrate_from_db", Pleroma.Config.get(:env)])
updated
else
[]
end
conn
|> put_view(ConfigView)
|> render("index.json", %{configs: updated})
end
def errors(conn, {:error, :not_found}) do def errors(conn, {:error, :not_found}) do
conn conn
|> put_status(404) |> put_status(404)

View file

@ -0,0 +1,144 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Web.AdminAPI.Config do
use Ecto.Schema
import Ecto.Changeset
alias __MODULE__
alias Pleroma.Repo
@type t :: %__MODULE__{}
schema "config" do
field(:key, :string)
field(:value, :binary)
timestamps()
end
@spec get_by_key(String.t()) :: Config.t() | nil
def get_by_key(key), do: Repo.get_by(Config, key: key)
@spec changeset(Config.t(), map()) :: Changeset.t()
def changeset(config, params \\ %{}) do
config
|> cast(params, [:key, :value])
|> validate_required([:key, :value])
|> unique_constraint(:key)
end
@spec create(map()) :: {:ok, Config.t()} | {:error, Changeset.t()}
def create(%{key: key, value: value}) do
%Config{}
|> changeset(%{key: key, value: transform(value)})
|> Repo.insert()
end
@spec update(Config.t(), map()) :: {:ok, Config} | {:error, Changeset.t()}
def update(%Config{} = config, %{value: value}) do
config
|> change(value: transform(value))
|> Repo.update()
end
@spec update_or_create(map()) :: {:ok, Config.t()} | {:error, Changeset.t()}
def update_or_create(%{key: key} = params) do
with %Config{} = config <- Config.get_by_key(key) do
Config.update(config, params)
else
nil -> Config.create(params)
end
end
@spec delete(String.t()) :: {:ok, Config.t()} | {:error, Changeset.t()}
def delete(key) do
with %Config{} = config <- Config.get_by_key(key) do
Repo.delete(config)
else
nil -> {:error, "Config with key #{key} not found"}
end
end
@spec from_binary(binary()) :: term()
def from_binary(value), do: :erlang.binary_to_term(value)
@spec from_binary_to_map(binary()) :: any()
def from_binary_to_map(binary) do
from_binary(binary)
|> do_convert()
end
defp do_convert([{k, v}] = value) when is_list(value) and length(value) == 1,
do: %{k => do_convert(v)}
defp do_convert(values) when is_list(values), do: for(val <- values, do: do_convert(val))
defp do_convert({k, v} = value) when is_tuple(value),
do: %{k => do_convert(v)}
defp do_convert(value) when is_binary(value) or is_atom(value) or is_map(value),
do: value
@spec transform(any()) :: binary()
def transform(entity) when is_map(entity) do
tuples =
for {k, v} <- entity,
into: [],
do: {if(is_atom(k), do: k, else: String.to_atom(k)), do_transform(v)}
Enum.reject(tuples, fn {_k, v} -> is_nil(v) end)
|> Enum.sort()
|> :erlang.term_to_binary()
end
def transform(entity) when is_list(entity) do
list = Enum.map(entity, &do_transform(&1))
:erlang.term_to_binary(list)
end
def transform(entity), do: :erlang.term_to_binary(entity)
defp do_transform(%Regex{} = value) when is_map(value), do: value
defp do_transform(value) when is_map(value) do
values =
for {key, val} <- value,
into: [],
do: {String.to_atom(key), do_transform(val)}
Enum.sort(values)
end
defp do_transform(value) when is_list(value) do
Enum.map(value, &do_transform(&1))
end
defp do_transform(entity) when is_list(entity) and length(entity) == 1, do: hd(entity)
defp do_transform(value) when is_binary(value) do
value = String.trim(value)
case String.length(value) do
0 ->
nil
_ ->
cond do
String.starts_with?(value, "Pleroma") ->
String.to_existing_atom("Elixir." <> value)
String.starts_with?(value, ":") ->
String.replace(value, ":", "") |> String.to_existing_atom()
String.starts_with?(value, "i:") ->
String.replace(value, "i:", "") |> String.to_integer()
true ->
value
end
end
end
defp do_transform(value), do: value
end

View file

@ -0,0 +1,16 @@
defmodule Pleroma.Web.AdminAPI.ConfigView do
use Pleroma.Web, :view
def render("index.json", %{configs: configs}) do
%{
configs: render_many(configs, __MODULE__, "show.json", as: :config)
}
end
def render("show.json", %{config: config}) do
%{
key: config.key,
value: Pleroma.Web.AdminAPI.Config.from_binary_to_map(config.value)
}
end
end

View file

@ -5,6 +5,7 @@
defmodule Pleroma.Web.AdminAPI.ReportView do defmodule Pleroma.Web.AdminAPI.ReportView do
use Pleroma.Web, :view use Pleroma.Web, :view
alias Pleroma.Activity alias Pleroma.Activity
alias Pleroma.HTML
alias Pleroma.User alias Pleroma.User
alias Pleroma.Web.CommonAPI.Utils alias Pleroma.Web.CommonAPI.Utils
alias Pleroma.Web.MastodonAPI.AccountView alias Pleroma.Web.MastodonAPI.AccountView
@ -23,6 +24,13 @@ def render("show.json", %{report: report}) do
[account_ap_id | status_ap_ids] = report.data["object"] [account_ap_id | status_ap_ids] = report.data["object"]
account = User.get_cached_by_ap_id(account_ap_id) account = User.get_cached_by_ap_id(account_ap_id)
content =
unless is_nil(report.data["content"]) do
HTML.filter_tags(report.data["content"])
else
nil
end
statuses = statuses =
Enum.map(status_ap_ids, fn ap_id -> Enum.map(status_ap_ids, fn ap_id ->
Activity.get_by_ap_id_with_object(ap_id) Activity.get_by_ap_id_with_object(ap_id)
@ -32,7 +40,7 @@ def render("show.json", %{report: report}) do
id: report.id, id: report.id,
account: AccountView.render("account.json", %{user: account}), account: AccountView.render("account.json", %{user: account}),
actor: AccountView.render("account.json", %{user: user}), actor: AccountView.render("account.json", %{user: user}),
content: report.data["content"], content: content,
created_at: created_at, created_at: created_at,
statuses: StatusView.render("index.json", %{activities: statuses, as: :activity}), statuses: StatusView.render("index.json", %{activities: statuses, as: :activity}),
state: report.data["state"] state: report.data["state"]

View file

@ -24,6 +24,14 @@ def get_user(%Plug.Conn{} = conn) do
end end
end end
@doc """
Gets or creates Pleroma.Registration record from Ueberauth assigns.
Note: some strategies (like `keycloak`) might need extra configuration to fill `uid` from callback response
see [`docs/config.md`](docs/config.md).
"""
def get_registration(%Plug.Conn{assigns: %{ueberauth_auth: %{uid: nil}}}),
do: {:error, :missing_uid}
def get_registration(%Plug.Conn{ def get_registration(%Plug.Conn{
assigns: %{ueberauth_auth: %{provider: provider, uid: uid} = auth} assigns: %{ueberauth_auth: %{provider: provider, uid: uid} = auth}
}) do }) do
@ -51,9 +59,10 @@ def get_registration(%Plug.Conn{
def get_registration(%Plug.Conn{} = _conn), do: {:error, :missing_credentials} def get_registration(%Plug.Conn{} = _conn), do: {:error, :missing_credentials}
@doc "Creates Pleroma.User record basing on params and Pleroma.Registration record."
def create_from_registration( def create_from_registration(
%Plug.Conn{params: %{"authorization" => registration_attrs}}, %Plug.Conn{params: %{"authorization" => registration_attrs}},
registration %Registration{} = registration
) do ) do
nickname = value([registration_attrs["nickname"], Registration.nickname(registration)]) nickname = value([registration_attrs["nickname"], Registration.nickname(registration)])
email = value([registration_attrs["email"], Registration.email(registration)]) email = value([registration_attrs["email"], Registration.email(registration)])

View file

@ -35,9 +35,9 @@ def unfollow(follower, unfollowed) do
end end
def accept_follow_request(follower, followed) do def accept_follow_request(follower, followed) do
with {:ok, follower} <- User.maybe_follow(follower, followed), with {:ok, follower} <- User.follow(follower, followed),
%Activity{} = follow_activity <- Utils.fetch_latest_follow(follower, followed), %Activity{} = follow_activity <- Utils.fetch_latest_follow(follower, followed),
{:ok, follow_activity} <- Utils.update_follow_state(follow_activity, "accept"), {:ok, follow_activity} <- Utils.update_follow_state_for_all(follow_activity, "accept"),
{:ok, _activity} <- {:ok, _activity} <-
ActivityPub.accept(%{ ActivityPub.accept(%{
to: [follower.ap_id], to: [follower.ap_id],
@ -51,7 +51,7 @@ def accept_follow_request(follower, followed) do
def reject_follow_request(follower, followed) do def reject_follow_request(follower, followed) do
with %Activity{} = follow_activity <- Utils.fetch_latest_follow(follower, followed), with %Activity{} = follow_activity <- Utils.fetch_latest_follow(follower, followed),
{:ok, follow_activity} <- Utils.update_follow_state(follow_activity, "reject"), {:ok, follow_activity} <- Utils.update_follow_state_for_all(follow_activity, "reject"),
{:ok, _activity} <- {:ok, _activity} <-
ActivityPub.reject(%{ ActivityPub.reject(%{
to: [follower.ap_id], to: [follower.ap_id],
@ -119,6 +119,56 @@ def unfavorite(id_or_ap_id, user) do
end end
end end
def vote(user, object, choices) do
with "Question" <- object.data["type"],
{:author, false} <- {:author, object.data["actor"] == user.ap_id},
{:existing_votes, []} <- {:existing_votes, Utils.get_existing_votes(user.ap_id, object)},
{options, max_count} <- get_options_and_max_count(object),
option_count <- Enum.count(options),
{:choice_check, {choices, true}} <-
{:choice_check, normalize_and_validate_choice_indices(choices, option_count)},
{:count_check, true} <- {:count_check, Enum.count(choices) <= max_count} do
answer_activities =
Enum.map(choices, fn index ->
answer_data = make_answer_data(user, object, Enum.at(options, index)["name"])
{:ok, activity} =
ActivityPub.create(%{
to: answer_data["to"],
actor: user,
context: object.data["context"],
object: answer_data,
additional: %{"cc" => answer_data["cc"]}
})
activity
end)
object = Object.get_cached_by_ap_id(object.data["id"])
{:ok, answer_activities, object}
else
{:author, _} -> {:error, "Poll's author can't vote"}
{:existing_votes, _} -> {:error, "Already voted"}
{:choice_check, {_, false}} -> {:error, "Invalid indices"}
{:count_check, false} -> {:error, "Too many choices"}
end
end
defp get_options_and_max_count(object) do
if Map.has_key?(object.data, "anyOf") do
{object.data["anyOf"], Enum.count(object.data["anyOf"])}
else
{object.data["oneOf"], 1}
end
end
defp normalize_and_validate_choice_indices(choices, count) do
Enum.map_reduce(choices, true, fn index, valid ->
index = if is_binary(index), do: String.to_integer(index), else: index
{index, if(valid, do: index < count, else: valid)}
end)
end
def get_visibility(%{"visibility" => visibility}, in_reply_to) def get_visibility(%{"visibility" => visibility}, in_reply_to)
when visibility in ~w{public unlisted private direct}, when visibility in ~w{public unlisted private direct},
do: {visibility, get_replied_to_visibility(in_reply_to)} do: {visibility, get_replied_to_visibility(in_reply_to)}
@ -154,12 +204,15 @@ def post(user, %{"status" => status} = data) do
data, data,
visibility visibility
), ),
{to, cc} <- to_for_user_and_mentions(user, mentions, in_reply_to, visibility), mentioned_users <- for({_, mentioned_user} <- mentions, do: mentioned_user.ap_id),
addressed_users <- get_addressed_users(mentioned_users, data["to"]),
{poll, poll_emoji} <- make_poll_data(data),
{to, cc} <- get_to_and_cc(user, addressed_users, in_reply_to, visibility),
context <- make_context(in_reply_to), context <- make_context(in_reply_to),
cw <- data["spoiler_text"] || "", cw <- data["spoiler_text"] || "",
sensitive <- data["sensitive"] || Enum.member?(tags, {"#nsfw", "nsfw"}), sensitive <- data["sensitive"] || Enum.member?(tags, {"#nsfw", "nsfw"}),
full_payload <- String.trim(status <> cw), full_payload <- String.trim(status <> cw),
length when length in 1..limit <- String.length(full_payload), :ok <- validate_character_limit(full_payload, attachments, limit),
object <- object <-
make_note_data( make_note_data(
user.ap_id, user.ap_id,
@ -171,13 +224,14 @@ def post(user, %{"status" => status} = data) do
tags, tags,
cw, cw,
cc, cc,
sensitive sensitive,
poll
), ),
object <- object <-
Map.put( Map.put(
object, object,
"emoji", "emoji",
Formatter.get_emoji_map(full_payload) Map.merge(Formatter.get_emoji_map(full_payload), poll_emoji)
) do ) do
res = res =
ActivityPub.create( ActivityPub.create(
@ -193,6 +247,7 @@ def post(user, %{"status" => status} = data) do
res res
else else
{:error, _} = e -> e
e -> {:error, e} e -> {:error, e}
end end
end end

View file

@ -61,9 +61,9 @@ def attachments_from_ids_descs(ids, descs_str) do
end) end)
end end
def to_for_user_and_mentions(user, mentions, inReplyTo, "public") do @spec get_to_and_cc(User.t(), list(String.t()), Activity.t() | nil, String.t()) ::
mentioned_users = Enum.map(mentions, fn {_, %{ap_id: ap_id}} -> ap_id end) {list(String.t()), list(String.t())}
def get_to_and_cc(user, mentioned_users, inReplyTo, "public") do
to = ["https://www.w3.org/ns/activitystreams#Public" | mentioned_users] to = ["https://www.w3.org/ns/activitystreams#Public" | mentioned_users]
cc = [user.follower_address] cc = [user.follower_address]
@ -74,9 +74,7 @@ def to_for_user_and_mentions(user, mentions, inReplyTo, "public") do
end end
end end
def to_for_user_and_mentions(user, mentions, inReplyTo, "unlisted") do def get_to_and_cc(user, mentioned_users, inReplyTo, "unlisted") do
mentioned_users = Enum.map(mentions, fn {_, %{ap_id: ap_id}} -> ap_id end)
to = [user.follower_address | mentioned_users] to = [user.follower_address | mentioned_users]
cc = ["https://www.w3.org/ns/activitystreams#Public"] cc = ["https://www.w3.org/ns/activitystreams#Public"]
@ -87,14 +85,12 @@ def to_for_user_and_mentions(user, mentions, inReplyTo, "unlisted") do
end end
end end
def to_for_user_and_mentions(user, mentions, inReplyTo, "private") do def get_to_and_cc(user, mentioned_users, inReplyTo, "private") do
{to, cc} = to_for_user_and_mentions(user, mentions, inReplyTo, "direct") {to, cc} = get_to_and_cc(user, mentioned_users, inReplyTo, "direct")
{[user.follower_address | to], cc} {[user.follower_address | to], cc}
end end
def to_for_user_and_mentions(_user, mentions, inReplyTo, "direct") do def get_to_and_cc(_user, mentioned_users, inReplyTo, "direct") do
mentioned_users = Enum.map(mentions, fn {_, %{ap_id: ap_id}} -> ap_id end)
if inReplyTo do if inReplyTo do
{Enum.uniq([inReplyTo.data["actor"] | mentioned_users]), []} {Enum.uniq([inReplyTo.data["actor"] | mentioned_users]), []}
else else
@ -102,6 +98,78 @@ def to_for_user_and_mentions(_user, mentions, inReplyTo, "direct") do
end end
end end
def get_addressed_users(_, to) when is_list(to) do
User.get_ap_ids_by_nicknames(to)
end
def get_addressed_users(mentioned_users, _), do: mentioned_users
def make_poll_data(%{"poll" => %{"options" => options, "expires_in" => expires_in}} = data)
when is_list(options) do
%{max_expiration: max_expiration, min_expiration: min_expiration} =
limits = Pleroma.Config.get([:instance, :poll_limits])
# XXX: There is probably a cleaner way of doing this
try do
# In some cases mastofe sends out strings instead of integers
expires_in = if is_binary(expires_in), do: String.to_integer(expires_in), else: expires_in
if Enum.count(options) > limits.max_options do
raise ArgumentError, message: "Poll can't contain more than #{limits.max_options} options"
end
{poll, emoji} =
Enum.map_reduce(options, %{}, fn option, emoji ->
if String.length(option) > limits.max_option_chars do
raise ArgumentError,
message:
"Poll options cannot be longer than #{limits.max_option_chars} characters each"
end
{%{
"name" => option,
"type" => "Note",
"replies" => %{"type" => "Collection", "totalItems" => 0}
}, Map.merge(emoji, Formatter.get_emoji_map(option))}
end)
case expires_in do
expires_in when expires_in > max_expiration ->
raise ArgumentError, message: "Expiration date is too far in the future"
expires_in when expires_in < min_expiration ->
raise ArgumentError, message: "Expiration date is too soon"
_ ->
:noop
end
end_time =
NaiveDateTime.utc_now()
|> NaiveDateTime.add(expires_in)
|> NaiveDateTime.to_iso8601()
poll =
if Pleroma.Web.ControllerHelper.truthy_param?(data["poll"]["multiple"]) do
%{"type" => "Question", "anyOf" => poll, "closed" => end_time}
else
%{"type" => "Question", "oneOf" => poll, "closed" => end_time}
end
{poll, emoji}
rescue
e in ArgumentError -> e.message
end
end
def make_poll_data(%{"poll" => poll}) when is_map(poll) do
"Invalid poll"
end
def make_poll_data(_data) do
{%{}, %{}}
end
def make_content_html( def make_content_html(
status, status,
attachments, attachments,
@ -224,7 +292,8 @@ def make_note_data(
tags, tags,
cw \\ nil, cw \\ nil,
cc \\ [], cc \\ [],
sensitive \\ false sensitive \\ false,
merge \\ %{}
) do ) do
object = %{ object = %{
"type" => "Note", "type" => "Note",
@ -239,12 +308,15 @@ def make_note_data(
"tag" => tags |> Enum.map(fn {_, tag} -> tag end) |> Enum.uniq() "tag" => tags |> Enum.map(fn {_, tag} -> tag end) |> Enum.uniq()
} }
object =
with false <- is_nil(in_reply_to), with false <- is_nil(in_reply_to),
%Object{} = in_reply_to_object <- Object.normalize(in_reply_to) do %Object{} = in_reply_to_object <- Object.normalize(in_reply_to) do
Map.put(object, "inReplyTo", in_reply_to_object.data["id"]) Map.put(object, "inReplyTo", in_reply_to_object.data["id"])
else else
_ -> object _ -> object
end end
Map.merge(object, merge)
end end
def format_naive_asctime(date) do def format_naive_asctime(date) do
@ -421,4 +493,29 @@ def conversation_id_to_context(id) do
{:error, "No such conversation"} {:error, "No such conversation"}
end end
end end
def make_answer_data(%User{ap_id: ap_id}, object, name) do
%{
"type" => "Answer",
"actor" => ap_id,
"cc" => [object.data["actor"]],
"to" => [],
"name" => name,
"inReplyTo" => object.data["id"]
}
end
def validate_character_limit(full_payload, attachments, limit) do
length = String.length(full_payload)
if length < limit do
if length > 0 or Enum.count(attachments) > 0 do
:ok
else
{:error, "Cannot post an empty status without attachments"}
end
else
{:error, "The status is over the character limit"}
end
end
end end

View file

@ -15,4 +15,22 @@ def json_response(conn, status, json) do
|> put_status(status) |> put_status(status)
|> json(json) |> json(json)
end end
@spec fetch_integer_param(map(), String.t(), integer() | nil) :: integer() | nil
def fetch_integer_param(params, name, default \\ nil) do
params
|> Map.get(name, default)
|> param_to_integer(default)
end
defp param_to_integer(val, _) when is_integer(val), do: val
defp param_to_integer(val, default) when is_binary(val) do
case Integer.parse(val) do
{res, _} -> res
_ -> default
end
end
defp param_to_integer(_, default), do: default
end end

View file

@ -16,17 +16,32 @@ defmodule Pleroma.Web.Endpoint do
plug(Pleroma.Plugs.UploadedMedia) plug(Pleroma.Plugs.UploadedMedia)
@static_cache_control "public, no-cache"
# InstanceStatic needs to be before Plug.Static to be able to override shipped-static files # InstanceStatic needs to be before Plug.Static to be able to override shipped-static files
# If you're adding new paths to `only:` you'll need to configure them in InstanceStatic as well # If you're adding new paths to `only:` you'll need to configure them in InstanceStatic as well
plug(Pleroma.Plugs.InstanceStatic, at: "/") # Cache-control headers are duplicated in case we turn off etags in the future
plug(Pleroma.Plugs.InstanceStatic,
at: "/",
gzip: true,
cache_control_for_etags: @static_cache_control,
headers: %{
"cache-control" => @static_cache_control
}
)
plug( plug(
Plug.Static, Plug.Static,
at: "/", at: "/",
from: :pleroma, from: :pleroma,
only: only:
~w(index.html robots.txt static finmoji emoji packs sounds images instance sw.js sw-pleroma.js favicon.png schemas doc) ~w(index.html robots.txt static finmoji emoji packs sounds images instance sw.js sw-pleroma.js favicon.png schemas doc),
# credo:disable-for-previous-line Credo.Check.Readability.MaxLineLength # credo:disable-for-previous-line Credo.Check.Readability.MaxLineLength
gzip: true,
cache_control_for_etags: @static_cache_control,
headers: %{
"cache-control" => @static_cache_control
}
) )
plug(Plug.Static.IndexHtml, at: "/pleroma/admin/") plug(Plug.Static.IndexHtml, at: "/pleroma/admin/")
@ -51,7 +66,7 @@ defmodule Pleroma.Web.Endpoint do
parsers: [:urlencoded, :multipart, :json], parsers: [:urlencoded, :multipart, :json],
pass: ["*/*"], pass: ["*/*"],
json_decoder: Jason, json_decoder: Jason,
length: Application.get_env(:pleroma, :instance) |> Keyword.get(:upload_limit), length: Pleroma.Config.get([:instance, :upload_limit]),
body_reader: {Pleroma.Web.Plugs.DigestPlug, :read_body, []} body_reader: {Pleroma.Web.Plugs.DigestPlug, :read_body, []}
) )
@ -76,7 +91,7 @@ defmodule Pleroma.Web.Endpoint do
Plug.Session, Plug.Session,
store: :cookie, store: :cookie,
key: cookie_name, key: cookie_name,
signing_salt: {Pleroma.Config, :get, [[__MODULE__, :signing_salt], "CqaoopA2"]}, signing_salt: Pleroma.Config.get([__MODULE__, :signing_salt], "CqaoopA2"),
http_only: true, http_only: true,
secure: secure_cookies, secure: secure_cookies,
extra: extra extra: extra

View file

@ -11,13 +11,11 @@ defmodule Pleroma.Web.Federator do
alias Pleroma.Web.ActivityPub.Utils alias Pleroma.Web.ActivityPub.Utils
alias Pleroma.Web.Federator.Publisher alias Pleroma.Web.Federator.Publisher
alias Pleroma.Web.Federator.RetryQueue alias Pleroma.Web.Federator.RetryQueue
alias Pleroma.Web.OStatus
alias Pleroma.Web.Websub alias Pleroma.Web.Websub
require Logger require Logger
@websub Application.get_env(:pleroma, :websub)
@ostatus Application.get_env(:pleroma, :ostatus)
def init do def init do
# 1 minute # 1 minute
Process.sleep(1000 * 60) Process.sleep(1000 * 60)
@ -87,12 +85,12 @@ def perform(:verify_websub, websub) do
"Running WebSub verification for #{websub.id} (#{websub.topic}, #{websub.callback})" "Running WebSub verification for #{websub.id} (#{websub.topic}, #{websub.callback})"
end) end)
@websub.verify(websub) Websub.verify(websub)
end end
def perform(:incoming_doc, doc) do def perform(:incoming_doc, doc) do
Logger.info("Got document, trying to parse") Logger.info("Got document, trying to parse")
@ostatus.handle_incoming(doc) OStatus.handle_incoming(doc)
end end
def perform(:incoming_ap_doc, params) do def perform(:incoming_ap_doc, params) do

View file

@ -15,7 +15,9 @@ def init(args) do
def start_link do def start_link do
enabled = enabled =
if Mix.env() == :test, do: true, else: Pleroma.Config.get([__MODULE__, :enabled], false) if Pleroma.Config.get(:env) == :test,
do: true,
else: Pleroma.Config.get([__MODULE__, :enabled], false)
if enabled do if enabled do
Logger.info("Starting retry queue") Logger.info("Starting retry queue")
@ -219,7 +221,7 @@ def handle_info(unknown, state) do
{:noreply, state} {:noreply, state}
end end
if Mix.env() == :test do if Pleroma.Config.get(:env) == :test do
defp growth_function(_retries) do defp growth_function(_retries) do
_shutit = Pleroma.Config.get([__MODULE__, :initial_timeout]) _shutit = Pleroma.Config.get([__MODULE__, :initial_timeout])
DateTime.to_unix(DateTime.utc_now()) - 1 DateTime.to_unix(DateTime.utc_now()) - 1

View file

@ -11,9 +11,9 @@ defmodule Pleroma.Web.MastodonAPI.MastodonAPIController do
alias Pleroma.Conversation.Participation alias Pleroma.Conversation.Participation
alias Pleroma.Filter alias Pleroma.Filter
alias Pleroma.Formatter alias Pleroma.Formatter
alias Pleroma.HTTP
alias Pleroma.Notification alias Pleroma.Notification
alias Pleroma.Object alias Pleroma.Object
alias Pleroma.Object.Fetcher
alias Pleroma.Pagination alias Pleroma.Pagination
alias Pleroma.Repo alias Pleroma.Repo
alias Pleroma.ScheduledActivity alias Pleroma.ScheduledActivity
@ -46,16 +46,9 @@ defmodule Pleroma.Web.MastodonAPI.MastodonAPIController do
require Logger require Logger
plug( plug(Pleroma.Plugs.RateLimiter, :app_account_creation when action == :account_register)
Pleroma.Plugs.RateLimitPlug, plug(Pleroma.Plugs.RateLimiter, :search when action in [:search, :search2, :account_search])
%{
max_requests: Config.get([:app_account_creation, :max_requests]),
interval: Config.get([:app_account_creation, :interval])
}
when action in [:account_register]
)
@httpoison Application.get_env(:pleroma, :httpoison)
@local_mastodon_name "Mastodon-Local" @local_mastodon_name "Mastodon-Local"
action_fallback(:errors) action_fallback(:errors)
@ -117,13 +110,24 @@ def update_credentials(%{assigns: %{user: user}} = conn, params) do
|> Enum.dedup() |> Enum.dedup()
info_params = info_params =
[:no_rich_text, :locked, :hide_followers, :hide_follows, :hide_favorites, :show_role] [
:no_rich_text,
:locked,
:hide_followers,
:hide_follows,
:hide_favorites,
:show_role,
:skip_thread_containment
]
|> Enum.reduce(%{}, fn key, acc -> |> Enum.reduce(%{}, fn key, acc ->
add_if_present(acc, params, to_string(key), key, fn value -> add_if_present(acc, params, to_string(key), key, fn value ->
{:ok, ControllerHelper.truthy_param?(value)} {:ok, ControllerHelper.truthy_param?(value)}
end) end)
end) end)
|> add_if_present(params, "default_scope", :default_scope) |> add_if_present(params, "default_scope", :default_scope)
|> add_if_present(params, "pleroma_settings_store", :pleroma_settings_store, fn value ->
{:ok, Map.merge(user.info.pleroma_settings_store, value)}
end)
|> add_if_present(params, "header", :banner, fn value -> |> add_if_present(params, "header", :banner, fn value ->
with %Plug.Upload{} <- value, with %Plug.Upload{} <- value,
{:ok, object} <- ActivityPub.upload(value, type: :banner) do {:ok, object} <- ActivityPub.upload(value, type: :banner) do
@ -132,6 +136,14 @@ def update_credentials(%{assigns: %{user: user}} = conn, params) do
_ -> :error _ -> :error
end end
end) end)
|> add_if_present(params, "pleroma_background_image", :background, fn value ->
with %Plug.Upload{} <- value,
{:ok, object} <- ActivityPub.upload(value, type: :background) do
{:ok, object.data}
else
_ -> :error
end
end)
|> Map.put(:emoji, user_info_emojis) |> Map.put(:emoji, user_info_emojis)
info_cng = User.Info.profile_update(user.info, info_params) info_cng = User.Info.profile_update(user.info, info_params)
@ -143,7 +155,10 @@ def update_credentials(%{assigns: %{user: user}} = conn, params) do
CommonAPI.update(user) CommonAPI.update(user)
end end
json(conn, AccountView.render("account.json", %{user: user, for: user})) json(
conn,
AccountView.render("account.json", %{user: user, for: user, with_pleroma_settings: true})
)
else else
_e -> _e ->
conn conn
@ -216,7 +231,16 @@ def update_background(%{assigns: %{user: user}} = conn, params) do
end end
def verify_credentials(%{assigns: %{user: user}} = conn, _) do def verify_credentials(%{assigns: %{user: user}} = conn, _) do
account = AccountView.render("account.json", %{user: user, for: user}) chat_token = Phoenix.Token.sign(conn, "user socket", user.id)
account =
AccountView.render("account.json", %{
user: user,
for: user,
with_pleroma_settings: true,
with_chat_token: chat_token
})
json(conn, account) json(conn, account)
end end
@ -260,7 +284,8 @@ def masto_instance(conn, _params) do
languages: ["en"], languages: ["en"],
registrations: Pleroma.Config.get([:instance, :registrations_open]), registrations: Pleroma.Config.get([:instance, :registrations_open]),
# Extra (not present in Mastodon): # Extra (not present in Mastodon):
max_toot_chars: Keyword.get(instance, :limit) max_toot_chars: Keyword.get(instance, :limit),
poll_limits: Keyword.get(instance, :poll_limits)
} }
json(conn, response) json(conn, response)
@ -472,6 +497,67 @@ def get_context(%{assigns: %{user: user}} = conn, %{"id" => id}) do
end end
end end
def get_poll(%{assigns: %{user: user}} = conn, %{"id" => id}) do
with %Object{} = object <- Object.get_by_id(id),
%Activity{} = activity <- Activity.get_create_by_object_ap_id(object.data["id"]),
true <- Visibility.visible_for_user?(activity, user) do
conn
|> put_view(StatusView)
|> try_render("poll.json", %{object: object, for: user})
else
nil ->
conn
|> put_status(404)
|> json(%{error: "Record not found"})
false ->
conn
|> put_status(404)
|> json(%{error: "Record not found"})
end
end
defp get_cached_vote_or_vote(user, object, choices) do
idempotency_key = "polls:#{user.id}:#{object.data["id"]}"
{_, res} =
Cachex.fetch(:idempotency_cache, idempotency_key, fn _ ->
case CommonAPI.vote(user, object, choices) do
{:error, _message} = res -> {:ignore, res}
res -> {:commit, res}
end
end)
res
end
def poll_vote(%{assigns: %{user: user}} = conn, %{"id" => id, "choices" => choices}) do
with %Object{} = object <- Object.get_by_id(id),
true <- object.data["type"] == "Question",
%Activity{} = activity <- Activity.get_create_by_object_ap_id(object.data["id"]),
true <- Visibility.visible_for_user?(activity, user),
{:ok, _activities, object} <- get_cached_vote_or_vote(user, object, choices) do
conn
|> put_view(StatusView)
|> try_render("poll.json", %{object: object, for: user})
else
nil ->
conn
|> put_status(404)
|> json(%{error: "Record not found"})
false ->
conn
|> put_status(404)
|> json(%{error: "Record not found"})
{:error, message} ->
conn
|> put_status(422)
|> json(%{error: message})
end
end
def scheduled_statuses(%{assigns: %{user: user}} = conn, params) do def scheduled_statuses(%{assigns: %{user: user}} = conn, params) do
with scheduled_activities <- MastodonAPI.get_scheduled_activities(user, params) do with scheduled_activities <- MastodonAPI.get_scheduled_activities(user, params) do
conn conn
@ -521,26 +607,11 @@ def delete_scheduled_status(%{assigns: %{user: user}} = conn, %{"id" => schedule
end end
end end
def post_status(conn, %{"status" => "", "media_ids" => media_ids} = params)
when length(media_ids) > 0 do
params =
params
|> Map.put("status", ".")
post_status(conn, params)
end
def post_status(%{assigns: %{user: user}} = conn, %{"status" => _} = params) do def post_status(%{assigns: %{user: user}} = conn, %{"status" => _} = params) do
params = params =
params params
|> Map.put("in_reply_to_status_id", params["in_reply_to_id"]) |> Map.put("in_reply_to_status_id", params["in_reply_to_id"])
idempotency_key =
case get_req_header(conn, "idempotency-key") do
[key] -> key
_ -> Ecto.UUID.generate()
end
scheduled_at = params["scheduled_at"] scheduled_at = params["scheduled_at"]
if scheduled_at && ScheduledActivity.far_enough?(scheduled_at) do if scheduled_at && ScheduledActivity.far_enough?(scheduled_at) do
@ -553,16 +624,39 @@ def post_status(%{assigns: %{user: user}} = conn, %{"status" => _} = params) do
else else
params = Map.drop(params, ["scheduled_at"]) params = Map.drop(params, ["scheduled_at"])
{:ok, activity} = case get_cached_status_or_post(conn, params) do
Cachex.fetch!(:idempotency_cache, idempotency_key, fn _ -> {:ignore, message} ->
CommonAPI.post(user, params) conn
end) |> put_status(422)
|> json(%{error: message})
{:error, message} ->
conn
|> put_status(422)
|> json(%{error: message})
{_, activity} ->
conn conn
|> put_view(StatusView) |> put_view(StatusView)
|> try_render("status.json", %{activity: activity, for: user, as: :activity}) |> try_render("status.json", %{activity: activity, for: user, as: :activity})
end end
end end
end
defp get_cached_status_or_post(%{assigns: %{user: user}} = conn, params) do
idempotency_key =
case get_req_header(conn, "idempotency-key") do
[key] -> key
_ -> Ecto.UUID.generate()
end
Cachex.fetch(:idempotency_cache, idempotency_key, fn _ ->
case CommonAPI.post(user, params) do
{:ok, activity} -> activity
{:error, message} -> {:ignore, message}
end
end)
end
def delete_status(%{assigns: %{user: user}} = conn, %{"id" => id}) do def delete_status(%{assigns: %{user: user}} = conn, %{"id" => id}) do
with {:ok, %Activity{}} <- CommonAPI.delete(id, user) do with {:ok, %Activity{}} <- CommonAPI.delete(id, user) do
@ -1107,114 +1201,6 @@ def unsubscribe(%{assigns: %{user: user}} = conn, %{"id" => id}) do
end end
end end
def status_search_query_with_gin(q, query) do
from([a, o] in q,
where:
fragment(
"to_tsvector('english', ?->>'content') @@ plainto_tsquery('english', ?)",
o.data,
^query
),
order_by: [desc: :id]
)
end
def status_search_query_with_rum(q, query) do
from([a, o] in q,
where:
fragment(
"? @@ plainto_tsquery('english', ?)",
o.fts_content,
^query
),
order_by: [fragment("? <=> now()::date", o.inserted_at)]
)
end
def status_search(user, query) do
fetched =
if Regex.match?(~r/https?:/, query) do
with {:ok, object} <- Fetcher.fetch_object_from_id(query),
%Activity{} = activity <- Activity.get_create_by_object_ap_id(object.data["id"]),
true <- Visibility.visible_for_user?(activity, user) do
[activity]
else
_e -> []
end
end || []
q =
from([a, o] in Activity.with_preloaded_object(Activity),
where: fragment("?->>'type' = 'Create'", a.data),
where: "https://www.w3.org/ns/activitystreams#Public" in a.recipients,
limit: 20
)
q =
if Pleroma.Config.get([:database, :rum_enabled]) do
status_search_query_with_rum(q, query)
else
status_search_query_with_gin(q, query)
end
Repo.all(q) ++ fetched
end
def search2(%{assigns: %{user: user}} = conn, %{"q" => query} = params) do
accounts = User.search(query, resolve: params["resolve"] == "true", for_user: user)
statuses = status_search(user, query)
tags_path = Web.base_url() <> "/tag/"
tags =
query
|> String.split()
|> Enum.uniq()
|> Enum.filter(fn tag -> String.starts_with?(tag, "#") end)
|> Enum.map(fn tag -> String.slice(tag, 1..-1) end)
|> Enum.map(fn tag -> %{name: tag, url: tags_path <> tag} end)
res = %{
"accounts" => AccountView.render("accounts.json", users: accounts, for: user, as: :user),
"statuses" =>
StatusView.render("index.json", activities: statuses, for: user, as: :activity),
"hashtags" => tags
}
json(conn, res)
end
def search(%{assigns: %{user: user}} = conn, %{"q" => query} = params) do
accounts = User.search(query, resolve: params["resolve"] == "true", for_user: user)
statuses = status_search(user, query)
tags =
query
|> String.split()
|> Enum.uniq()
|> Enum.filter(fn tag -> String.starts_with?(tag, "#") end)
|> Enum.map(fn tag -> String.slice(tag, 1..-1) end)
res = %{
"accounts" => AccountView.render("accounts.json", users: accounts, for: user, as: :user),
"statuses" =>
StatusView.render("index.json", activities: statuses, for: user, as: :activity),
"hashtags" => tags
}
json(conn, res)
end
def account_search(%{assigns: %{user: user}} = conn, %{"q" => query} = params) do
accounts = User.search(query, resolve: params["resolve"] == "true", for_user: user)
res = AccountView.render("accounts.json", users: accounts, for: user, as: :user)
json(conn, res)
end
def favourites(%{assigns: %{user: user}} = conn, params) do def favourites(%{assigns: %{user: user}} = conn, params) do
params = params =
params params
@ -1409,8 +1395,6 @@ def index(%{assigns: %{user: user}} = conn, _params) do
accounts = accounts =
Map.put(%{}, user.id, AccountView.render("account.json", %{user: user, for: user})) Map.put(%{}, user.id, AccountView.render("account.json", %{user: user, for: user}))
flavour = get_user_flavour(user)
initial_state = initial_state =
%{ %{
meta: %{ meta: %{
@ -1429,6 +1413,7 @@ def index(%{assigns: %{user: user}} = conn, _params) do
max_toot_chars: limit, max_toot_chars: limit,
mascot: User.get_mascot(user)["url"] mascot: User.get_mascot(user)["url"]
}, },
poll_limits: Config.get([:instance, :poll_limits]),
rights: %{ rights: %{
delete_others_notice: present?(user.info.is_moderator), delete_others_notice: present?(user.info.is_moderator),
admin: present?(user.info.is_admin) admin: present?(user.info.is_admin)
@ -1496,7 +1481,7 @@ def index(%{assigns: %{user: user}} = conn, _params) do
conn conn
|> put_layout(false) |> put_layout(false)
|> put_view(MastodonView) |> put_view(MastodonView)
|> render("index.html", %{initial_state: initial_state, flavour: flavour}) |> render("index.html", %{initial_state: initial_state})
else else
conn conn
|> put_session(:return_to, conn.request_path) |> put_session(:return_to, conn.request_path)
@ -1519,43 +1504,6 @@ def put_settings(%{assigns: %{user: user}} = conn, %{"data" => settings} = _para
end end
end end
@supported_flavours ["glitch", "vanilla"]
def set_flavour(%{assigns: %{user: user}} = conn, %{"flavour" => flavour} = _params)
when flavour in @supported_flavours do
flavour_cng = User.Info.mastodon_flavour_update(user.info, flavour)
with changeset <- Ecto.Changeset.change(user),
changeset <- Ecto.Changeset.put_embed(changeset, :info, flavour_cng),
{:ok, user} <- User.update_and_set_cache(changeset),
flavour <- user.info.flavour do
json(conn, flavour)
else
e ->
conn
|> put_resp_content_type("application/json")
|> send_resp(500, Jason.encode!(%{"error" => inspect(e)}))
end
end
def set_flavour(conn, _params) do
conn
|> put_status(400)
|> json(%{error: "Unsupported flavour"})
end
def get_flavour(%{assigns: %{user: user}} = conn, _params) do
json(conn, get_user_flavour(user))
end
defp get_user_flavour(%User{info: %{flavour: flavour}}) when flavour in @supported_flavours do
flavour
end
defp get_user_flavour(_) do
"glitch"
end
def login(%{assigns: %{user: %User{}}} = conn, _params) do def login(%{assigns: %{user: %User{}}} = conn, _params) do
redirect(conn, to: local_mastodon_root_path(conn)) redirect(conn, to: local_mastodon_root_path(conn))
end end
@ -1754,7 +1702,7 @@ def suggestions(%{assigns: %{user: user}} = conn, _) do
|> String.replace("{{user}}", user) |> String.replace("{{user}}", user)
with {:ok, %{status: 200, body: body}} <- with {:ok, %{status: 200, body: body}} <-
@httpoison.get( HTTP.get(
url, url,
[], [],
adapter: [ adapter: [

View file

@ -0,0 +1,79 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Web.MastodonAPI.SearchController do
use Pleroma.Web, :controller
alias Pleroma.Activity
alias Pleroma.User
alias Pleroma.Web
alias Pleroma.Web.MastodonAPI.AccountView
alias Pleroma.Web.MastodonAPI.StatusView
alias Pleroma.Web.ControllerHelper
require Logger
plug(Pleroma.Plugs.RateLimiter, :search when action in [:search, :search2, :account_search])
def search2(%{assigns: %{user: user}} = conn, %{"q" => query} = params) do
accounts = User.search(query, search_options(params, user))
statuses = Activity.search(user, query)
tags_path = Web.base_url() <> "/tag/"
tags =
query
|> String.split()
|> Enum.uniq()
|> Enum.filter(fn tag -> String.starts_with?(tag, "#") end)
|> Enum.map(fn tag -> String.slice(tag, 1..-1) end)
|> Enum.map(fn tag -> %{name: tag, url: tags_path <> tag} end)
res = %{
"accounts" => AccountView.render("accounts.json", users: accounts, for: user, as: :user),
"statuses" =>
StatusView.render("index.json", activities: statuses, for: user, as: :activity),
"hashtags" => tags
}
json(conn, res)
end
def search(%{assigns: %{user: user}} = conn, %{"q" => query} = params) do
accounts = User.search(query, search_options(params, user))
statuses = Activity.search(user, query)
tags =
query
|> String.split()
|> Enum.uniq()
|> Enum.filter(fn tag -> String.starts_with?(tag, "#") end)
|> Enum.map(fn tag -> String.slice(tag, 1..-1) end)
res = %{
"accounts" => AccountView.render("accounts.json", users: accounts, for: user, as: :user),
"statuses" =>
StatusView.render("index.json", activities: statuses, for: user, as: :activity),
"hashtags" => tags
}
json(conn, res)
end
def account_search(%{assigns: %{user: user}} = conn, %{"q" => query} = params) do
accounts = User.search(query, search_options(params, user))
res = AccountView.render("accounts.json", users: accounts, for: user, as: :user)
json(conn, res)
end
defp search_options(params, user) do
[
resolve: params["resolve"] == "true",
following: params["following"] == "true",
limit: ControllerHelper.fetch_integer_param(params, "limit"),
offset: ControllerHelper.fetch_integer_param(params, "offset"),
for_user: user
]
end
end

View file

@ -66,6 +66,8 @@ def render("relationships.json", %{user: user, targets: targets}) do
end end
defp do_render("account.json", %{user: user} = opts) do defp do_render("account.json", %{user: user} = opts) do
display_name = HTML.strip_tags(user.name || user.nickname)
image = User.avatar_url(user) |> MediaProxy.url() image = User.avatar_url(user) |> MediaProxy.url()
header = User.banner_url(user) |> MediaProxy.url() header = User.banner_url(user) |> MediaProxy.url()
user_info = User.get_cached_user_info(user) user_info = User.get_cached_user_info(user)
@ -96,7 +98,7 @@ defp do_render("account.json", %{user: user} = opts) do
id: to_string(user.id), id: to_string(user.id),
username: username_from_nickname(user.nickname), username: username_from_nickname(user.nickname),
acct: user.nickname, acct: user.nickname,
display_name: user.name || user.nickname, display_name: display_name,
locked: user_info.locked, locked: user_info.locked,
created_at: Utils.to_masto_date(user.inserted_at), created_at: Utils.to_masto_date(user.inserted_at),
followers_count: user_info.follower_count, followers_count: user_info.follower_count,
@ -124,12 +126,16 @@ defp do_render("account.json", %{user: user} = opts) do
hide_followers: user.info.hide_followers, hide_followers: user.info.hide_followers,
hide_follows: user.info.hide_follows, hide_follows: user.info.hide_follows,
hide_favorites: user.info.hide_favorites, hide_favorites: user.info.hide_favorites,
relationship: relationship relationship: relationship,
skip_thread_containment: user.info.skip_thread_containment,
background_image: image_url(user.info.background) |> MediaProxy.url()
} }
} }
|> maybe_put_role(user, opts[:for]) |> maybe_put_role(user, opts[:for])
|> maybe_put_settings(user, opts[:for], user_info) |> maybe_put_settings(user, opts[:for], user_info)
|> maybe_put_notification_settings(user, opts[:for]) |> maybe_put_notification_settings(user, opts[:for])
|> maybe_put_settings_store(user, opts[:for], opts)
|> maybe_put_chat_token(user, opts[:for], opts)
end end
defp username_from_nickname(string) when is_binary(string) do defp username_from_nickname(string) when is_binary(string) do
@ -152,6 +158,24 @@ defp maybe_put_settings(
defp maybe_put_settings(data, _, _, _), do: data defp maybe_put_settings(data, _, _, _), do: data
defp maybe_put_settings_store(data, %User{info: info, id: id}, %User{id: id}, %{
with_pleroma_settings: true
}) do
data
|> Kernel.put_in([:pleroma, :settings_store], info.pleroma_settings_store)
end
defp maybe_put_settings_store(data, _, _, _), do: data
defp maybe_put_chat_token(data, %User{id: id}, %User{id: id}, %{
with_chat_token: token
}) do
data
|> Kernel.put_in([:pleroma, :chat_token], token)
end
defp maybe_put_chat_token(data, _, _, _), do: data
defp maybe_put_role(data, %User{info: %{show_role: true}} = user, _) do defp maybe_put_role(data, %User{info: %{show_role: true}} = user, _) do
data data
|> Kernel.put_in([:pleroma, :is_admin], user.info.is_admin) |> Kernel.put_in([:pleroma, :is_admin], user.info.is_admin)
@ -171,4 +195,7 @@ defp maybe_put_notification_settings(data, %User{id: user_id} = user, %User{id:
end end
defp maybe_put_notification_settings(data, _, _), do: data defp maybe_put_notification_settings(data, _, _), do: data
defp image_url(%{"url" => [%{"href" => href} | _]}), do: href
defp image_url(_), do: nil
end end

View file

@ -22,9 +22,14 @@ def render("participation.json", %{participation: participation, user: user}) do
last_status = StatusView.render("status.json", %{activity: activity, for: user}) last_status = StatusView.render("status.json", %{activity: activity, for: user})
# Conversations return all users except the current user.
users =
participation.conversation.users
|> Enum.reject(&(&1.id == user.id))
accounts = accounts =
AccountView.render("accounts.json", %{ AccountView.render("accounts.json", %{
users: participation.conversation.users, users: users,
as: :user as: :user
}) })

View file

@ -240,6 +240,7 @@ def render("status.json", %{activity: %{data: %{"object" => _object}} = activity
spoiler_text: summary_html, spoiler_text: summary_html,
visibility: get_visibility(object), visibility: get_visibility(object),
media_attachments: attachments, media_attachments: attachments,
poll: render("poll.json", %{object: object, for: opts[:for]}),
mentions: mentions, mentions: mentions,
tags: build_tags(tags), tags: build_tags(tags),
application: %{ application: %{
@ -290,8 +291,8 @@ def render("card.json", %{rich_media: rich_media, page_url: page_url}) do
provider_url: page_url_data.scheme <> "://" <> page_url_data.host, provider_url: page_url_data.scheme <> "://" <> page_url_data.host,
url: page_url, url: page_url,
image: image_url |> MediaProxy.url(), image: image_url |> MediaProxy.url(),
title: rich_media[:title], title: rich_media[:title] || "",
description: rich_media[:description], description: rich_media[:description] || "",
pleroma: %{ pleroma: %{
opengraph: rich_media opengraph: rich_media
} }
@ -329,6 +330,64 @@ def render("attachment.json", %{attachment: attachment}) do
} }
end end
def render("poll.json", %{object: object} = opts) do
{multiple, options} =
case object.data do
%{"anyOf" => options} when is_list(options) -> {true, options}
%{"oneOf" => options} when is_list(options) -> {false, options}
_ -> {nil, nil}
end
if options do
end_time =
(object.data["closed"] || object.data["endTime"])
|> NaiveDateTime.from_iso8601!()
expired =
end_time
|> NaiveDateTime.compare(NaiveDateTime.utc_now())
|> case do
:lt -> true
_ -> false
end
voted =
if opts[:for] do
existing_votes =
Pleroma.Web.ActivityPub.Utils.get_existing_votes(opts[:for].ap_id, object)
existing_votes != [] or opts[:for].ap_id == object.data["actor"]
else
false
end
{options, votes_count} =
Enum.map_reduce(options, 0, fn %{"name" => name} = option, count ->
current_count = option["replies"]["totalItems"] || 0
{%{
title: HTML.strip_tags(name),
votes_count: current_count
}, current_count + count}
end)
%{
# Mastodon uses separate ids for polls, but an object can't have
# more than one poll embedded so object id is fine
id: object.id,
expires_at: Utils.to_masto_date(end_time),
expired: expired,
multiple: multiple,
votes_count: votes_count,
options: options,
voted: voted,
emojis: build_emojis(object.data["emoji"])
}
else
nil
end
end
def get_reply_to(activity, %{replied_to_activities: replied_to_activities}) do def get_reply_to(activity, %{replied_to_activities: replied_to_activities}) do
object = Object.normalize(activity) object = Object.normalize(activity)

View file

@ -17,6 +17,7 @@ defmodule Pleroma.Web.MastodonAPI.WebsocketHandler do
"public:media", "public:media",
"public:local:media", "public:local:media",
"user", "user",
"user:notification",
"direct", "direct",
"list", "list",
"hashtag" "hashtag"

Some files were not shown because too many files have changed in this diff Show more