forked from AkkomaGang/akkoma
Merge branch 'develop' into issue/1276
This commit is contained in:
commit
e442ea5722
806 changed files with 10583 additions and 7450 deletions
|
@ -1,3 +1,3 @@
|
||||||
[
|
[
|
||||||
inputs: ["mix.exs", "{config,lib,test}/**/*.{ex,exs}", "priv/repo/migrations/*.exs"]
|
inputs: ["mix.exs", "{config,lib,test}/**/*.{ex,exs}", "priv/repo/migrations/*.exs", "priv/scrubbers/*.ex"]
|
||||||
]
|
]
|
||||||
|
|
|
@ -1,23 +1,25 @@
|
||||||
image: elixir:1.8.1
|
image: elixir:1.8.1
|
||||||
|
|
||||||
variables:
|
variables: &global_variables
|
||||||
POSTGRES_DB: pleroma_test
|
POSTGRES_DB: pleroma_test
|
||||||
POSTGRES_USER: postgres
|
POSTGRES_USER: postgres
|
||||||
POSTGRES_PASSWORD: postgres
|
POSTGRES_PASSWORD: postgres
|
||||||
DB_HOST: postgres
|
DB_HOST: postgres
|
||||||
MIX_ENV: test
|
MIX_ENV: test
|
||||||
|
|
||||||
cache:
|
cache: &global_cache_policy
|
||||||
key: ${CI_COMMIT_REF_SLUG}
|
key: ${CI_COMMIT_REF_SLUG}
|
||||||
paths:
|
paths:
|
||||||
- deps
|
- deps
|
||||||
- _build
|
- _build
|
||||||
|
|
||||||
stages:
|
stages:
|
||||||
- build
|
- build
|
||||||
- test
|
- test
|
||||||
- benchmark
|
- benchmark
|
||||||
- deploy
|
- deploy
|
||||||
- release
|
- release
|
||||||
|
- docker
|
||||||
|
|
||||||
before_script:
|
before_script:
|
||||||
- mix local.hex --force
|
- mix local.hex --force
|
||||||
|
@ -46,6 +48,10 @@ benchmark:
|
||||||
|
|
||||||
unit-testing:
|
unit-testing:
|
||||||
stage: test
|
stage: test
|
||||||
|
cache: &testing_cache_policy
|
||||||
|
<<: *global_cache_policy
|
||||||
|
policy: pull
|
||||||
|
|
||||||
services:
|
services:
|
||||||
- name: postgres:9.6
|
- name: postgres:9.6
|
||||||
alias: postgres
|
alias: postgres
|
||||||
|
@ -58,6 +64,7 @@ unit-testing:
|
||||||
|
|
||||||
federated-testing:
|
federated-testing:
|
||||||
stage: test
|
stage: test
|
||||||
|
cache: *testing_cache_policy
|
||||||
services:
|
services:
|
||||||
- name: minibikini/postgres-with-rum:12
|
- name: minibikini/postgres-with-rum:12
|
||||||
alias: postgres
|
alias: postgres
|
||||||
|
@ -71,11 +78,13 @@ federated-testing:
|
||||||
|
|
||||||
unit-testing-rum:
|
unit-testing-rum:
|
||||||
stage: test
|
stage: test
|
||||||
|
cache: *testing_cache_policy
|
||||||
services:
|
services:
|
||||||
- name: minibikini/postgres-with-rum:12
|
- name: minibikini/postgres-with-rum:12
|
||||||
alias: postgres
|
alias: postgres
|
||||||
command: ["postgres", "-c", "fsync=off", "-c", "synchronous_commit=off", "-c", "full_page_writes=off"]
|
command: ["postgres", "-c", "fsync=off", "-c", "synchronous_commit=off", "-c", "full_page_writes=off"]
|
||||||
variables:
|
variables:
|
||||||
|
<<: *global_variables
|
||||||
RUM_ENABLED: "true"
|
RUM_ENABLED: "true"
|
||||||
script:
|
script:
|
||||||
- mix deps.get
|
- mix deps.get
|
||||||
|
@ -86,17 +95,20 @@ unit-testing-rum:
|
||||||
|
|
||||||
lint:
|
lint:
|
||||||
stage: test
|
stage: test
|
||||||
|
cache: *testing_cache_policy
|
||||||
script:
|
script:
|
||||||
- mix format --check-formatted
|
- mix format --check-formatted
|
||||||
|
|
||||||
analysis:
|
analysis:
|
||||||
stage: test
|
stage: test
|
||||||
|
cache: *testing_cache_policy
|
||||||
script:
|
script:
|
||||||
- mix deps.get
|
- mix deps.get
|
||||||
- mix credo --strict --only=warnings,todo,fixme,consistency,readability
|
- mix credo --strict --only=warnings,todo,fixme,consistency,readability
|
||||||
|
|
||||||
docs-deploy:
|
docs-deploy:
|
||||||
stage: deploy
|
stage: deploy
|
||||||
|
cache: *testing_cache_policy
|
||||||
image: alpine:latest
|
image: alpine:latest
|
||||||
only:
|
only:
|
||||||
- stable@pleroma/pleroma
|
- stable@pleroma/pleroma
|
||||||
|
@ -254,3 +266,66 @@ arm64-musl:
|
||||||
variables: *release-variables
|
variables: *release-variables
|
||||||
before_script: *before-release-musl
|
before_script: *before-release-musl
|
||||||
script: *release
|
script: *release
|
||||||
|
|
||||||
|
docker:
|
||||||
|
stage: docker
|
||||||
|
image: docker:latest
|
||||||
|
cache: {}
|
||||||
|
dependencies: []
|
||||||
|
variables: &docker-variables
|
||||||
|
DOCKER_DRIVER: overlay2
|
||||||
|
DOCKER_HOST: unix:///var/run/docker.sock
|
||||||
|
IMAGE_TAG: $CI_REGISTRY_IMAGE:$CI_COMMIT_SHORT_SHA
|
||||||
|
IMAGE_TAG_SLUG: $CI_REGISTRY_IMAGE:$CI_COMMIT_REF_SLUG
|
||||||
|
IMAGE_TAG_LATEST: $CI_REGISTRY_IMAGE:latest
|
||||||
|
IMAGE_TAG_LATEST_STABLE: $CI_REGISTRY_IMAGE:latest-stable
|
||||||
|
before_script: &before-docker
|
||||||
|
- docker login -u $CI_REGISTRY_USER -p $CI_REGISTRY_PASSWORD $CI_REGISTRY
|
||||||
|
- docker pull $IMAGE_TAG_SLUG || true
|
||||||
|
- export CI_JOB_TIMESTAMP=$(date --utc -Iseconds)
|
||||||
|
- export CI_VCS_REF=$CI_COMMIT_SHORT_SHA
|
||||||
|
allow_failure: true
|
||||||
|
script:
|
||||||
|
- docker build --cache-from $IMAGE_TAG_SLUG --build-arg VCS_REF=$CI_VCS_REF --build-arg BUILD_DATE=$CI_JOB_TIMESTAMP -t $IMAGE_TAG -t $IMAGE_TAG_SLUG -t $IMAGE_TAG_LATEST .
|
||||||
|
- docker push $IMAGE_TAG
|
||||||
|
- docker push $IMAGE_TAG_SLUG
|
||||||
|
- docker push $IMAGE_TAG_LATEST
|
||||||
|
tags:
|
||||||
|
- dind
|
||||||
|
only:
|
||||||
|
- develop@pleroma/pleroma
|
||||||
|
|
||||||
|
docker-stable:
|
||||||
|
stage: docker
|
||||||
|
image: docker:latest
|
||||||
|
cache: {}
|
||||||
|
dependencies: []
|
||||||
|
variables: *docker-variables
|
||||||
|
before_script: *before-docker
|
||||||
|
allow_failure: true
|
||||||
|
script:
|
||||||
|
- docker build --cache-from $IMAGE_TAG_SLUG --build-arg VCS_REF=$CI_VCS_REF --build-arg BUILD_DATE=$CI_JOB_TIMESTAMP -t $IMAGE_TAG -t $IMAGE_TAG_SLUG -t $IMAGE_TAG_LATEST_STABLE .
|
||||||
|
- docker push $IMAGE_TAG
|
||||||
|
- docker push $IMAGE_TAG_SLUG
|
||||||
|
- docker push $IMAGE_TAG_LATEST_STABLE
|
||||||
|
tags:
|
||||||
|
- dind
|
||||||
|
only:
|
||||||
|
- stable@pleroma/pleroma
|
||||||
|
|
||||||
|
docker-release:
|
||||||
|
stage: docker
|
||||||
|
image: docker:latest
|
||||||
|
cache: {}
|
||||||
|
dependencies: []
|
||||||
|
variables: *docker-variables
|
||||||
|
before_script: *before-docker
|
||||||
|
allow_failure: true
|
||||||
|
script:
|
||||||
|
- docker build --cache-from $IMAGE_TAG_SLUG --build-arg VCS_REF=$CI_VCS_REF --build-arg BUILD_DATE=$CI_JOB_TIMESTAMP -t $IMAGE_TAG -t $IMAGE_TAG_SLUG .
|
||||||
|
- docker push $IMAGE_TAG
|
||||||
|
- docker push $IMAGE_TAG_SLUG
|
||||||
|
tags:
|
||||||
|
- dind
|
||||||
|
only:
|
||||||
|
- /^release/.*$/@pleroma/pleroma
|
||||||
|
|
28
CHANGELOG.md
28
CHANGELOG.md
|
@ -7,10 +7,15 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
|
||||||
### Removed
|
### Removed
|
||||||
- **Breaking**: Removed 1.0+ deprecated configurations `Pleroma.Upload, :strip_exif` and `:instance, :dedupe_media`
|
- **Breaking**: Removed 1.0+ deprecated configurations `Pleroma.Upload, :strip_exif` and `:instance, :dedupe_media`
|
||||||
- **Breaking**: OStatus protocol support
|
- **Breaking**: OStatus protocol support
|
||||||
|
- **Breaking**: MDII uploader
|
||||||
|
|
||||||
### Changed
|
### Changed
|
||||||
|
- **Breaking:** Pleroma won't start if it detects unapplied migrations
|
||||||
|
- **Breaking:** attachments are removed along with statuses when there are no other references to it
|
||||||
- **Breaking:** Elixir >=1.8 is now required (was >= 1.7)
|
- **Breaking:** Elixir >=1.8 is now required (was >= 1.7)
|
||||||
- **Breaking:** attachment links (`config :pleroma, :instance, no_attachment_links` and `config :pleroma, Pleroma.Upload, link_name`) disabled by default
|
- **Breaking:** attachment links (`config :pleroma, :instance, no_attachment_links` and `config :pleroma, Pleroma.Upload, link_name`) disabled by default
|
||||||
|
- **Breaking:** OAuth: defaulted `[:auth, :enforce_oauth_admin_scope_usage]` setting to `true` which demands `admin` OAuth scope to perform admin actions (in addition to `is_admin` flag on User); make sure to use bundled or newer versions of AdminFE & PleromaFE to access admin / moderator features.
|
||||||
|
- **Breaking:** Dynamic configuration has been rearchitected. The `:pleroma, :instance, dynamic_configuration` setting has been replaced with `config :pleroma, configurable_from_database`. Please backup your configuration to a file and run the migration task to ensure consistency with the new schema.
|
||||||
- Replaced [pleroma_job_queue](https://git.pleroma.social/pleroma/pleroma_job_queue) and `Pleroma.Web.Federator.RetryQueue` with [Oban](https://github.com/sorentwo/oban) (see [`docs/config.md`](docs/config.md) on migrating customized worker / retry settings)
|
- Replaced [pleroma_job_queue](https://git.pleroma.social/pleroma/pleroma_job_queue) and `Pleroma.Web.Federator.RetryQueue` with [Oban](https://github.com/sorentwo/oban) (see [`docs/config.md`](docs/config.md) on migrating customized worker / retry settings)
|
||||||
- Introduced [quantum](https://github.com/quantum-elixir/quantum-core) job scheduler
|
- Introduced [quantum](https://github.com/quantum-elixir/quantum-core) job scheduler
|
||||||
- Enabled `:instance, extended_nickname_format` in the default config
|
- Enabled `:instance, extended_nickname_format` in the default config
|
||||||
|
@ -21,6 +26,8 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
|
||||||
- Deprecated `User.Info` embedded schema (fields moved to `User`)
|
- Deprecated `User.Info` embedded schema (fields moved to `User`)
|
||||||
- Store status data inside Flag activity
|
- Store status data inside Flag activity
|
||||||
- Deprecated (reorganized as `UserRelationship` entity) User fields with user AP IDs (`blocks`, `mutes`, `muted_reblogs`, `muted_notifications`, `subscribers`).
|
- Deprecated (reorganized as `UserRelationship` entity) User fields with user AP IDs (`blocks`, `mutes`, `muted_reblogs`, `muted_notifications`, `subscribers`).
|
||||||
|
- Logger: default log level changed from `warn` to `info`.
|
||||||
|
- Config mix task `migrate_to_db` truncates `config` table before migrating the config file.
|
||||||
<details>
|
<details>
|
||||||
<summary>API Changes</summary>
|
<summary>API Changes</summary>
|
||||||
|
|
||||||
|
@ -28,6 +35,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
|
||||||
- **Breaking:** Admin API: Return link alongside with token on password reset
|
- **Breaking:** Admin API: Return link alongside with token on password reset
|
||||||
- **Breaking:** Admin API: `PUT /api/pleroma/admin/reports/:id` is now `PATCH /api/pleroma/admin/reports`, see admin_api.md for details
|
- **Breaking:** Admin API: `PUT /api/pleroma/admin/reports/:id` is now `PATCH /api/pleroma/admin/reports`, see admin_api.md for details
|
||||||
- **Breaking:** `/api/pleroma/admin/users/invite_token` now uses `POST`, changed accepted params and returns full invite in json instead of only token string.
|
- **Breaking:** `/api/pleroma/admin/users/invite_token` now uses `POST`, changed accepted params and returns full invite in json instead of only token string.
|
||||||
|
- **Breaking** replying to reports is now "report notes", enpoint changed from `POST /api/pleroma/admin/reports/:id/respond` to `POST /api/pleroma/admin/reports/:id/notes`
|
||||||
- Admin API: Return `total` when querying for reports
|
- Admin API: Return `total` when querying for reports
|
||||||
- Mastodon API: Return `pleroma.direct_conversation_id` when creating a direct message (`POST /api/v1/statuses`)
|
- Mastodon API: Return `pleroma.direct_conversation_id` when creating a direct message (`POST /api/v1/statuses`)
|
||||||
- Admin API: Return link alongside with token on password reset
|
- Admin API: Return link alongside with token on password reset
|
||||||
|
@ -38,6 +46,8 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
|
||||||
- Mastodon API, streaming: Add `pleroma.direct_conversation_id` to the `conversation` stream event payload.
|
- Mastodon API, streaming: Add `pleroma.direct_conversation_id` to the `conversation` stream event payload.
|
||||||
- Mastodon API: Add `pleroma.unread_count` to the Marker entity
|
- Mastodon API: Add `pleroma.unread_count` to the Marker entity
|
||||||
- Admin API: Render whole status in grouped reports
|
- Admin API: Render whole status in grouped reports
|
||||||
|
- Mastodon API: User timelines will now respect blocks, unless you are getting the user timeline of somebody you blocked (which would be empty otherwise).
|
||||||
|
- Mastodon API: Favoriting / Repeating a post multiple times will now return the identical response every time. Before, executing that action twice would return an error ("already favorited") on the second try.
|
||||||
</details>
|
</details>
|
||||||
|
|
||||||
### Added
|
### Added
|
||||||
|
@ -49,6 +59,10 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
|
||||||
- Mix task to list all users (`mix pleroma.user list`)
|
- Mix task to list all users (`mix pleroma.user list`)
|
||||||
- Support for `X-Forwarded-For` and similar HTTP headers which used by reverse proxies to pass a real user IP address to the backend. Must not be enabled unless your instance is behind at least one reverse proxy (such as Nginx, Apache HTTPD or Varnish Cache).
|
- Support for `X-Forwarded-For` and similar HTTP headers which used by reverse proxies to pass a real user IP address to the backend. Must not be enabled unless your instance is behind at least one reverse proxy (such as Nginx, Apache HTTPD or Varnish Cache).
|
||||||
- MRF: New module which handles incoming posts based on their age. By default, all incoming posts that are older than 2 days will be unlisted and not shown to their followers.
|
- MRF: New module which handles incoming posts based on their age. By default, all incoming posts that are older than 2 days will be unlisted and not shown to their followers.
|
||||||
|
- User notification settings: Add `privacy_option` option.
|
||||||
|
- Support for custom Elixir modules (such as MRF policies)
|
||||||
|
- User settings: Add _This account is a_ option.
|
||||||
|
- OAuth: admin scopes support (relevant setting: `[:auth, :enforce_oauth_admin_scope_usage]`).
|
||||||
<details>
|
<details>
|
||||||
<summary>API Changes</summary>
|
<summary>API Changes</summary>
|
||||||
|
|
||||||
|
@ -77,12 +91,25 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
|
||||||
- Pleroma API: Add Emoji reactions
|
- Pleroma API: Add Emoji reactions
|
||||||
- Admin API: Add `/api/pleroma/admin/instances/:instance/statuses` - lists all statuses from a given instance
|
- Admin API: Add `/api/pleroma/admin/instances/:instance/statuses` - lists all statuses from a given instance
|
||||||
- Admin API: `PATCH /api/pleroma/users/confirm_email` to confirm email for multiple users, `PATCH /api/pleroma/users/resend_confirmation_email` to resend confirmation email for multiple users
|
- Admin API: `PATCH /api/pleroma/users/confirm_email` to confirm email for multiple users, `PATCH /api/pleroma/users/resend_confirmation_email` to resend confirmation email for multiple users
|
||||||
|
- ActivityPub: Configurable `type` field of the actors.
|
||||||
|
- Mastodon API: `/api/v1/accounts/:id` has `source/pleroma/actor_type` field.
|
||||||
|
- Mastodon API: `/api/v1/update_credentials` accepts `actor_type` field.
|
||||||
|
- Captcha: Support native provider
|
||||||
|
- Captcha: Enable by default
|
||||||
|
- Mastodon API: Add support for `account_id` param to filter notifications by the account
|
||||||
|
- Mastodon API: Add `emoji_reactions` property to Statuses
|
||||||
|
- Mastodon API: Change emoji reaction reply format
|
||||||
|
- Notifications: Added `pleroma:emoji_reaction` notification type
|
||||||
|
- Mastodon API: Change emoji reaction reply format once more
|
||||||
</details>
|
</details>
|
||||||
|
|
||||||
### Fixed
|
### Fixed
|
||||||
- Report emails now include functional links to profiles of remote user accounts
|
- Report emails now include functional links to profiles of remote user accounts
|
||||||
- Not being able to log in to some third-party apps when logged in to MastoFE
|
- Not being able to log in to some third-party apps when logged in to MastoFE
|
||||||
- MRF: `Delete` activities being exempt from MRF policies
|
- MRF: `Delete` activities being exempt from MRF policies
|
||||||
|
- OTP releases: Not being able to configure OAuth expired token cleanup interval
|
||||||
|
- OTP releases: Not being able to configure HTML sanitization policy
|
||||||
|
- Favorites timeline now ordered by favorite date instead of post date
|
||||||
<details>
|
<details>
|
||||||
<summary>API Changes</summary>
|
<summary>API Changes</summary>
|
||||||
|
|
||||||
|
@ -90,6 +117,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
|
||||||
- Mastodon API: Inability to get some local users by nickname in `/api/v1/accounts/:id_or_nickname`
|
- Mastodon API: Inability to get some local users by nickname in `/api/v1/accounts/:id_or_nickname`
|
||||||
- AdminAPI: If some status received reports both in the "new" format and "old" format it was considered reports on two different statuses (in the context of grouped reports)
|
- AdminAPI: If some status received reports both in the "new" format and "old" format it was considered reports on two different statuses (in the context of grouped reports)
|
||||||
- Admin API: Error when trying to update reports in the "old" format
|
- Admin API: Error when trying to update reports in the "old" format
|
||||||
|
- Mastodon API: Marking a conversation as read (`POST /api/v1/conversations/:id/read`) now no longer brings it to the top in the user's direct conversation list
|
||||||
</details>
|
</details>
|
||||||
|
|
||||||
## [1.1.6] - 2019-11-19
|
## [1.1.6] - 2019-11-19
|
||||||
|
|
14
Dockerfile
14
Dockerfile
|
@ -14,6 +14,20 @@ RUN apk add git gcc g++ musl-dev make &&\
|
||||||
|
|
||||||
FROM alpine:3.9
|
FROM alpine:3.9
|
||||||
|
|
||||||
|
ARG BUILD_DATE
|
||||||
|
ARG VCS_REF
|
||||||
|
|
||||||
|
LABEL maintainer="ops@pleroma.social" \
|
||||||
|
org.opencontainers.image.title="pleroma" \
|
||||||
|
org.opencontainers.image.description="Pleroma for Docker" \
|
||||||
|
org.opencontainers.image.authors="ops@pleroma.social" \
|
||||||
|
org.opencontainers.image.vendor="pleroma.social" \
|
||||||
|
org.opencontainers.image.documentation="https://git.pleroma.social/pleroma/pleroma" \
|
||||||
|
org.opencontainers.image.licenses="AGPL-3.0" \
|
||||||
|
org.opencontainers.image.url="https://pleroma.social" \
|
||||||
|
org.opencontainers.image.revision=$VCS_REF \
|
||||||
|
org.opencontainers.image.created=$BUILD_DATE
|
||||||
|
|
||||||
ARG HOME=/opt/pleroma
|
ARG HOME=/opt/pleroma
|
||||||
ARG DATA=/var/lib/pleroma
|
ARG DATA=/var/lib/pleroma
|
||||||
|
|
||||||
|
|
|
@ -142,6 +142,48 @@ defp do_generate_activity(users) do
|
||||||
CommonAPI.post(Enum.random(users), post)
|
CommonAPI.post(Enum.random(users), post)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def generate_power_intervals(opts \\ []) do
|
||||||
|
count = Keyword.get(opts, :count, 20)
|
||||||
|
power = Keyword.get(opts, :power, 2)
|
||||||
|
IO.puts("Generating #{count} intervals for a power #{power} series...")
|
||||||
|
counts = Enum.map(1..count, fn n -> :math.pow(n, power) end)
|
||||||
|
sum = Enum.sum(counts)
|
||||||
|
|
||||||
|
densities =
|
||||||
|
Enum.map(counts, fn c ->
|
||||||
|
c / sum
|
||||||
|
end)
|
||||||
|
|
||||||
|
densities
|
||||||
|
|> Enum.reduce(0, fn density, acc ->
|
||||||
|
if acc == 0 do
|
||||||
|
[{0, density}]
|
||||||
|
else
|
||||||
|
[{_, lower} | _] = acc
|
||||||
|
[{lower, lower + density} | acc]
|
||||||
|
end
|
||||||
|
end)
|
||||||
|
|> Enum.reverse()
|
||||||
|
end
|
||||||
|
|
||||||
|
def generate_tagged_activities(opts \\ []) do
|
||||||
|
tag_count = Keyword.get(opts, :tag_count, 20)
|
||||||
|
users = Keyword.get(opts, :users, Repo.all(User))
|
||||||
|
activity_count = Keyword.get(opts, :count, 200_000)
|
||||||
|
|
||||||
|
intervals = generate_power_intervals(count: tag_count)
|
||||||
|
|
||||||
|
IO.puts(
|
||||||
|
"Generating #{activity_count} activities using #{tag_count} different tags of format `tag_n`, starting at tag_0"
|
||||||
|
)
|
||||||
|
|
||||||
|
Enum.each(1..activity_count, fn _ ->
|
||||||
|
random = :rand.uniform()
|
||||||
|
i = Enum.find_index(intervals, fn {lower, upper} -> lower <= random && upper > random end)
|
||||||
|
CommonAPI.post(Enum.random(users), %{"status" => "a post with the tag #tag_#{i}"})
|
||||||
|
end)
|
||||||
|
end
|
||||||
|
|
||||||
defp do_generate_activity_with_mention(user, users) do
|
defp do_generate_activity_with_mention(user, users) do
|
||||||
mentions_cnt = Enum.random([2, 3, 4, 5])
|
mentions_cnt = Enum.random([2, 3, 4, 5])
|
||||||
with_user = Enum.random([true, false])
|
with_user = Enum.random([true, false])
|
||||||
|
|
87
benchmarks/mix/tasks/pleroma/benchmarks/tags.ex
Normal file
87
benchmarks/mix/tasks/pleroma/benchmarks/tags.ex
Normal file
|
@ -0,0 +1,87 @@
|
||||||
|
defmodule Mix.Tasks.Pleroma.Benchmarks.Tags do
|
||||||
|
use Mix.Task
|
||||||
|
alias Pleroma.Repo
|
||||||
|
alias Pleroma.LoadTesting.Generator
|
||||||
|
import Ecto.Query
|
||||||
|
|
||||||
|
def run(_args) do
|
||||||
|
Mix.Pleroma.start_pleroma()
|
||||||
|
activities_count = Repo.aggregate(from(a in Pleroma.Activity), :count, :id)
|
||||||
|
|
||||||
|
if activities_count == 0 do
|
||||||
|
IO.puts("Did not find any activities, cleaning and generating")
|
||||||
|
clean_tables()
|
||||||
|
Generator.generate_users(users_max: 10)
|
||||||
|
Generator.generate_tagged_activities()
|
||||||
|
else
|
||||||
|
IO.puts("Found #{activities_count} activities, won't generate new ones")
|
||||||
|
end
|
||||||
|
|
||||||
|
tags = Enum.map(0..20, fn i -> {"For #tag_#{i}", "tag_#{i}"} end)
|
||||||
|
|
||||||
|
Enum.each(tags, fn {_, tag} ->
|
||||||
|
query =
|
||||||
|
from(o in Pleroma.Object,
|
||||||
|
where: fragment("(?)->'tag' \\? (?)", o.data, ^tag)
|
||||||
|
)
|
||||||
|
|
||||||
|
count = Repo.aggregate(query, :count, :id)
|
||||||
|
IO.puts("Database contains #{count} posts tagged with #{tag}")
|
||||||
|
end)
|
||||||
|
|
||||||
|
user = Repo.all(Pleroma.User) |> List.first()
|
||||||
|
|
||||||
|
Benchee.run(
|
||||||
|
%{
|
||||||
|
"Hashtag fetching, any" => fn tags ->
|
||||||
|
Pleroma.Web.MastodonAPI.TimelineController.hashtag_fetching(
|
||||||
|
%{
|
||||||
|
"any" => tags
|
||||||
|
},
|
||||||
|
user,
|
||||||
|
false
|
||||||
|
)
|
||||||
|
end,
|
||||||
|
# Will always return zero results because no overlapping hashtags are generated.
|
||||||
|
"Hashtag fetching, all" => fn tags ->
|
||||||
|
Pleroma.Web.MastodonAPI.TimelineController.hashtag_fetching(
|
||||||
|
%{
|
||||||
|
"all" => tags
|
||||||
|
},
|
||||||
|
user,
|
||||||
|
false
|
||||||
|
)
|
||||||
|
end
|
||||||
|
},
|
||||||
|
inputs:
|
||||||
|
tags
|
||||||
|
|> Enum.map(fn {_, v} -> v end)
|
||||||
|
|> Enum.chunk_every(2)
|
||||||
|
|> Enum.map(fn tags -> {"For #{inspect(tags)}", tags} end),
|
||||||
|
time: 5
|
||||||
|
)
|
||||||
|
|
||||||
|
Benchee.run(
|
||||||
|
%{
|
||||||
|
"Hashtag fetching" => fn tag ->
|
||||||
|
Pleroma.Web.MastodonAPI.TimelineController.hashtag_fetching(
|
||||||
|
%{
|
||||||
|
"tag" => tag
|
||||||
|
},
|
||||||
|
user,
|
||||||
|
false
|
||||||
|
)
|
||||||
|
end
|
||||||
|
},
|
||||||
|
inputs: tags,
|
||||||
|
time: 5
|
||||||
|
)
|
||||||
|
end
|
||||||
|
|
||||||
|
defp clean_tables do
|
||||||
|
IO.puts("Deleting old data...\n")
|
||||||
|
Ecto.Adapters.SQL.query!(Repo, "TRUNCATE users CASCADE;")
|
||||||
|
Ecto.Adapters.SQL.query!(Repo, "TRUNCATE activities CASCADE;")
|
||||||
|
Ecto.Adapters.SQL.query!(Repo, "TRUNCATE objects CASCADE;")
|
||||||
|
end
|
||||||
|
end
|
|
@ -82,3 +82,11 @@
|
||||||
IO.puts("RUM enabled: #{rum_enabled}")
|
IO.puts("RUM enabled: #{rum_enabled}")
|
||||||
|
|
||||||
config :pleroma, Pleroma.ReverseProxy.Client, Pleroma.ReverseProxy.ClientMock
|
config :pleroma, Pleroma.ReverseProxy.Client, Pleroma.ReverseProxy.ClientMock
|
||||||
|
|
||||||
|
if File.exists?("./config/benchmark.secret.exs") do
|
||||||
|
import_config "benchmark.secret.exs"
|
||||||
|
else
|
||||||
|
IO.puts(
|
||||||
|
"You may want to create benchmark.secret.exs to declare custom database connection parameters."
|
||||||
|
)
|
||||||
|
end
|
||||||
|
|
|
@ -66,9 +66,11 @@
|
||||||
jobs: scheduled_jobs
|
jobs: scheduled_jobs
|
||||||
|
|
||||||
config :pleroma, Pleroma.Captcha,
|
config :pleroma, Pleroma.Captcha,
|
||||||
enabled: false,
|
enabled: true,
|
||||||
seconds_valid: 60,
|
seconds_valid: 300,
|
||||||
method: Pleroma.Captcha.Kocaptcha
|
method: Pleroma.Captcha.Native
|
||||||
|
|
||||||
|
config :pleroma, Pleroma.Captcha.Kocaptcha, endpoint: "https://captcha.kotobank.ch"
|
||||||
|
|
||||||
config :pleroma, :hackney_pools,
|
config :pleroma, :hackney_pools,
|
||||||
federation: [
|
federation: [
|
||||||
|
@ -84,8 +86,6 @@
|
||||||
timeout: 300_000
|
timeout: 300_000
|
||||||
]
|
]
|
||||||
|
|
||||||
config :pleroma, Pleroma.Captcha.Kocaptcha, endpoint: "https://captcha.kotobank.ch"
|
|
||||||
|
|
||||||
# Upload configuration
|
# Upload configuration
|
||||||
config :pleroma, Pleroma.Upload,
|
config :pleroma, Pleroma.Upload,
|
||||||
uploader: Pleroma.Uploaders.Local,
|
uploader: Pleroma.Uploaders.Local,
|
||||||
|
@ -108,15 +108,10 @@
|
||||||
streaming_enabled: true,
|
streaming_enabled: true,
|
||||||
public_endpoint: "https://s3.amazonaws.com"
|
public_endpoint: "https://s3.amazonaws.com"
|
||||||
|
|
||||||
config :pleroma, Pleroma.Uploaders.MDII,
|
|
||||||
cgi: "https://mdii.sakura.ne.jp/mdii-post.cgi",
|
|
||||||
files: "https://mdii.sakura.ne.jp"
|
|
||||||
|
|
||||||
config :pleroma, :emoji,
|
config :pleroma, :emoji,
|
||||||
shortcode_globs: ["/emoji/custom/**/*.png"],
|
shortcode_globs: ["/emoji/custom/**/*.png"],
|
||||||
pack_extensions: [".png", ".gif"],
|
pack_extensions: [".png", ".gif"],
|
||||||
groups: [
|
groups: [
|
||||||
# Put groups that have higher priority than defaults here. Example in `docs/config/custom_emoji.md`
|
|
||||||
Custom: ["/emoji/*.png", "/emoji/**/*.png"]
|
Custom: ["/emoji/*.png", "/emoji/**/*.png"]
|
||||||
],
|
],
|
||||||
default_manifest: "https://git.pleroma.social/pleroma/emoji-index/raw/master/index.json",
|
default_manifest: "https://git.pleroma.social/pleroma/emoji-index/raw/master/index.json",
|
||||||
|
@ -269,7 +264,6 @@
|
||||||
remote_post_retention_days: 90,
|
remote_post_retention_days: 90,
|
||||||
skip_thread_containment: true,
|
skip_thread_containment: true,
|
||||||
limit_to_local_content: :unauthenticated,
|
limit_to_local_content: :unauthenticated,
|
||||||
dynamic_configuration: false,
|
|
||||||
user_bio_length: 5000,
|
user_bio_length: 5000,
|
||||||
user_name_length: 100,
|
user_name_length: 100,
|
||||||
max_account_fields: 10,
|
max_account_fields: 10,
|
||||||
|
@ -506,7 +500,8 @@
|
||||||
mailer: 10,
|
mailer: 10,
|
||||||
transmogrifier: 20,
|
transmogrifier: 20,
|
||||||
scheduled_activities: 10,
|
scheduled_activities: 10,
|
||||||
background: 5
|
background: 5,
|
||||||
|
attachments_cleanup: 5
|
||||||
]
|
]
|
||||||
|
|
||||||
config :pleroma, :workers,
|
config :pleroma, :workers,
|
||||||
|
@ -563,7 +558,10 @@
|
||||||
base_path: "/oauth",
|
base_path: "/oauth",
|
||||||
providers: ueberauth_providers
|
providers: ueberauth_providers
|
||||||
|
|
||||||
config :pleroma, :auth, oauth_consumer_strategies: oauth_consumer_strategies
|
config :pleroma,
|
||||||
|
:auth,
|
||||||
|
enforce_oauth_admin_scope_usage: true,
|
||||||
|
oauth_consumer_strategies: oauth_consumer_strategies
|
||||||
|
|
||||||
config :pleroma, Pleroma.Emails.Mailer, adapter: Swoosh.Adapters.Sendmail, enabled: false
|
config :pleroma, Pleroma.Emails.Mailer, adapter: Swoosh.Adapters.Sendmail, enabled: false
|
||||||
|
|
||||||
|
@ -618,6 +616,10 @@
|
||||||
activity_pub: nil,
|
activity_pub: nil,
|
||||||
activity_pub_question: 30_000
|
activity_pub_question: 30_000
|
||||||
|
|
||||||
|
config :pleroma, :modules, runtime_dir: "instance/modules"
|
||||||
|
|
||||||
|
config :pleroma, configurable_from_database: false
|
||||||
|
|
||||||
config :swarm, node_blacklist: [~r/myhtml_.*$/]
|
config :swarm, node_blacklist: [~r/myhtml_.*$/]
|
||||||
# Import environment specific config. This must remain at the bottom
|
# Import environment specific config. This must remain at the bottom
|
||||||
# of this file so it overrides the configuration defined above.
|
# of this file so it overrides the configuration defined above.
|
||||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -20,8 +20,8 @@
|
||||||
config :phoenix, serve_endpoints: true
|
config :phoenix, serve_endpoints: true
|
||||||
|
|
||||||
# Do not print debug messages in production
|
# Do not print debug messages in production
|
||||||
config :logger, :console, level: :warn
|
config :logger, :console, level: :info
|
||||||
config :logger, :ex_syslogger, level: :warn
|
config :logger, :ex_syslogger, level: :info
|
||||||
|
|
||||||
# ## SSL Support
|
# ## SSL Support
|
||||||
#
|
#
|
||||||
|
|
|
@ -2,9 +2,12 @@
|
||||||
|
|
||||||
config :pleroma, :instance, static_dir: "/var/lib/pleroma/static"
|
config :pleroma, :instance, static_dir: "/var/lib/pleroma/static"
|
||||||
config :pleroma, Pleroma.Uploaders.Local, uploads: "/var/lib/pleroma/uploads"
|
config :pleroma, Pleroma.Uploaders.Local, uploads: "/var/lib/pleroma/uploads"
|
||||||
|
config :pleroma, :modules, runtime_dir: "/var/lib/pleroma/modules"
|
||||||
|
|
||||||
config_path = System.get_env("PLEROMA_CONFIG_PATH") || "/etc/pleroma/config.exs"
|
config_path = System.get_env("PLEROMA_CONFIG_PATH") || "/etc/pleroma/config.exs"
|
||||||
|
|
||||||
|
config :pleroma, release: true, config_path: config_path
|
||||||
|
|
||||||
if File.exists?(config_path) do
|
if File.exists?(config_path) do
|
||||||
import_config config_path
|
import_config config_path
|
||||||
else
|
else
|
||||||
|
@ -17,3 +20,12 @@
|
||||||
|
|
||||||
IO.puts(warning)
|
IO.puts(warning)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
exported_config =
|
||||||
|
config_path
|
||||||
|
|> Path.dirname()
|
||||||
|
|> Path.join("prod.exported_from_db.secret.exs")
|
||||||
|
|
||||||
|
if File.exists?(exported_config) do
|
||||||
|
import_config exported_config
|
||||||
|
end
|
||||||
|
|
|
@ -68,7 +68,9 @@
|
||||||
queues: false,
|
queues: false,
|
||||||
prune: :disabled
|
prune: :disabled
|
||||||
|
|
||||||
config :pleroma, Pleroma.Scheduler, jobs: []
|
config :pleroma, Pleroma.Scheduler,
|
||||||
|
jobs: [],
|
||||||
|
global: false
|
||||||
|
|
||||||
config :pleroma, Pleroma.ScheduledActivity,
|
config :pleroma, Pleroma.ScheduledActivity,
|
||||||
daily_user_limit: 2,
|
daily_user_limit: 2,
|
||||||
|
@ -93,6 +95,8 @@
|
||||||
|
|
||||||
config :pleroma, Pleroma.ReverseProxy.Client, Pleroma.ReverseProxy.ClientMock
|
config :pleroma, Pleroma.ReverseProxy.Client, Pleroma.ReverseProxy.ClientMock
|
||||||
|
|
||||||
|
config :pleroma, :modules, runtime_dir: "test/fixtures/modules"
|
||||||
|
|
||||||
if File.exists?("./config/test.secret.exs") do
|
if File.exists?("./config/test.secret.exs") do
|
||||||
import_config "test.secret.exs"
|
import_config "test.secret.exs"
|
||||||
else
|
else
|
||||||
|
|
|
@ -2,6 +2,13 @@
|
||||||
|
|
||||||
Authentication is required and the user must be an admin.
|
Authentication is required and the user must be an admin.
|
||||||
|
|
||||||
|
Configuration options:
|
||||||
|
|
||||||
|
* `[:auth, :enforce_oauth_admin_scope_usage]` — OAuth admin scope requirement toggle.
|
||||||
|
If `true`, admin actions explicitly demand admin OAuth scope(s) presence in OAuth token (client app must support admin scopes).
|
||||||
|
If `false` and token doesn't have admin scope(s), `is_admin` user flag grants access to admin-specific actions.
|
||||||
|
Note that client app needs to explicitly support admin scopes and request them when obtaining auth token.
|
||||||
|
|
||||||
## `GET /api/pleroma/admin/users`
|
## `GET /api/pleroma/admin/users`
|
||||||
|
|
||||||
### List users
|
### List users
|
||||||
|
@ -607,78 +614,29 @@ Note: Available `:permission_group` is currently moderator and admin. 404 is ret
|
||||||
|
|
||||||
- On success: `204`, empty response
|
- On success: `204`, empty response
|
||||||
|
|
||||||
## `POST /api/pleroma/admin/reports/:id/respond`
|
## `POST /api/pleroma/admin/reports/:id/notes`
|
||||||
|
|
||||||
### Respond to a report
|
### Create report note
|
||||||
|
|
||||||
- Params:
|
- Params:
|
||||||
- `id`
|
- `id`: required, report id
|
||||||
- `status`: required, the message
|
- `content`: required, the message
|
||||||
- Response:
|
- Response:
|
||||||
- On failure:
|
- On failure:
|
||||||
- 400 Bad Request `"Invalid parameters"` when `status` is missing
|
- 400 Bad Request `"Invalid parameters"` when `status` is missing
|
||||||
- 403 Forbidden `{"error": "error_msg"}`
|
- On success: `204`, empty response
|
||||||
- 404 Not Found `"Not found"`
|
|
||||||
- On success: JSON, created Mastodon Status entity
|
|
||||||
|
|
||||||
```json
|
## `POST /api/pleroma/admin/reports/:report_id/notes/:id`
|
||||||
{
|
|
||||||
"account": { ... },
|
### Delete report note
|
||||||
"application": {
|
|
||||||
"name": "Web",
|
- Params:
|
||||||
"website": null
|
- `report_id`: required, report id
|
||||||
},
|
- `id`: required, note id
|
||||||
"bookmarked": false,
|
- Response:
|
||||||
"card": null,
|
- On failure:
|
||||||
"content": "Your claim is going to be closed",
|
- 400 Bad Request `"Invalid parameters"` when `status` is missing
|
||||||
"created_at": "2019-05-11T17:13:03.000Z",
|
- On success: `204`, empty response
|
||||||
"emojis": [],
|
|
||||||
"favourited": false,
|
|
||||||
"favourites_count": 0,
|
|
||||||
"id": "9ihuiSL1405I65TmEq",
|
|
||||||
"in_reply_to_account_id": null,
|
|
||||||
"in_reply_to_id": null,
|
|
||||||
"language": null,
|
|
||||||
"media_attachments": [],
|
|
||||||
"mentions": [
|
|
||||||
{
|
|
||||||
"acct": "user",
|
|
||||||
"id": "9i6dAJqSGSKMzLG2Lo",
|
|
||||||
"url": "https://pleroma.example.org/users/user",
|
|
||||||
"username": "user"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"acct": "admin",
|
|
||||||
"id": "9hEkA5JsvAdlSrocam",
|
|
||||||
"url": "https://pleroma.example.org/users/admin",
|
|
||||||
"username": "admin"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"muted": false,
|
|
||||||
"pinned": false,
|
|
||||||
"pleroma": {
|
|
||||||
"content": {
|
|
||||||
"text/plain": "Your claim is going to be closed"
|
|
||||||
},
|
|
||||||
"conversation_id": 35,
|
|
||||||
"in_reply_to_account_acct": null,
|
|
||||||
"local": true,
|
|
||||||
"spoiler_text": {
|
|
||||||
"text/plain": ""
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"reblog": null,
|
|
||||||
"reblogged": false,
|
|
||||||
"reblogs_count": 0,
|
|
||||||
"replies_count": 0,
|
|
||||||
"sensitive": false,
|
|
||||||
"spoiler_text": "",
|
|
||||||
"tags": [],
|
|
||||||
"uri": "https://pleroma.example.org/objects/cab0836d-9814-46cd-a0ea-529da9db5fcb",
|
|
||||||
"url": "https://pleroma.example.org/notice/9ihuiSL1405I65TmEq",
|
|
||||||
"visibility": "direct"
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
## `PUT /api/pleroma/admin/statuses/:id`
|
## `PUT /api/pleroma/admin/statuses/:id`
|
||||||
|
|
||||||
|
@ -707,27 +665,16 @@ Note: Available `:permission_group` is currently moderator and admin. 404 is ret
|
||||||
- 404 Not Found `"Not found"`
|
- 404 Not Found `"Not found"`
|
||||||
- On success: 200 OK `{}`
|
- On success: 200 OK `{}`
|
||||||
|
|
||||||
## `GET /api/pleroma/admin/config/migrate_to_db`
|
|
||||||
|
|
||||||
### Run mix task pleroma.config migrate_to_db
|
|
||||||
|
|
||||||
Copy settings on key `:pleroma` to DB.
|
|
||||||
|
|
||||||
- Params: none
|
|
||||||
- Response:
|
|
||||||
|
|
||||||
```json
|
|
||||||
{}
|
|
||||||
```
|
|
||||||
|
|
||||||
## `GET /api/pleroma/admin/config/migrate_from_db`
|
## `GET /api/pleroma/admin/config/migrate_from_db`
|
||||||
|
|
||||||
### Run mix task pleroma.config migrate_from_db
|
### Run mix task pleroma.config migrate_from_db
|
||||||
|
|
||||||
Copy all settings from DB to `config/prod.exported_from_db.secret.exs` with deletion from DB.
|
Copies all settings from database to `config/{env}.exported_from_db.secret.exs` with deletion from the table. Where `{env}` is the environment in which `pleroma` is running.
|
||||||
|
|
||||||
- Params: none
|
- Params: none
|
||||||
- Response:
|
- Response:
|
||||||
|
- On failure:
|
||||||
|
- 400 Bad Request `"To use this endpoint you need to enable configuration from database."`
|
||||||
|
|
||||||
```json
|
```json
|
||||||
{}
|
{}
|
||||||
|
@ -735,20 +682,24 @@ Copy all settings from DB to `config/prod.exported_from_db.secret.exs` with dele
|
||||||
|
|
||||||
## `GET /api/pleroma/admin/config`
|
## `GET /api/pleroma/admin/config`
|
||||||
|
|
||||||
### List config settings
|
### Get list of merged default settings with saved in database.
|
||||||
|
|
||||||
List config settings only works with `:pleroma => :instance => :dynamic_configuration` setting to `true`.
|
**Only works when configuration from database is enabled.**
|
||||||
|
|
||||||
- Params: none
|
- Params:
|
||||||
|
- `only_db`: true (*optional*, get only saved in database settings)
|
||||||
- Response:
|
- Response:
|
||||||
|
- On failure:
|
||||||
|
- 400 Bad Request `"To use this endpoint you need to enable configuration from database."`
|
||||||
|
- 400 Bad Request `"To use configuration from database migrate your settings to database."`
|
||||||
|
|
||||||
```json
|
```json
|
||||||
{
|
{
|
||||||
configs: [
|
configs: [
|
||||||
{
|
{
|
||||||
"group": string,
|
"group": ":pleroma",
|
||||||
"key": string or string with leading `:` for atoms,
|
"key": "Pleroma.Upload",
|
||||||
"value": string or {} or [] or {"tuple": []}
|
"value": []
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
@ -758,44 +709,107 @@ List config settings only works with `:pleroma => :instance => :dynamic_configur
|
||||||
|
|
||||||
### Update config settings
|
### Update config settings
|
||||||
|
|
||||||
Updating config settings only works with `:pleroma => :instance => :dynamic_configuration` setting to `true`.
|
**Only works when configuration from database is enabled.**
|
||||||
Module name can be passed as string, which starts with `Pleroma`, e.g. `"Pleroma.Upload"`.
|
|
||||||
Atom keys and values can be passed with `:` in the beginning, e.g. `":upload"`.
|
|
||||||
Tuples can be passed as `{"tuple": ["first_val", Pleroma.Module, []]}`.
|
|
||||||
`{"tuple": ["some_string", "Pleroma.Some.Module", []]}` will be converted to `{"some_string", Pleroma.Some.Module, []}`.
|
|
||||||
Keywords can be passed as lists with 2 child tuples, e.g.
|
|
||||||
`[{"tuple": ["first_val", Pleroma.Module]}, {"tuple": ["second_val", true]}]`.
|
|
||||||
|
|
||||||
If value contains list of settings `[subkey: val1, subkey2: val2, subkey3: val3]`, it's possible to remove only subkeys instead of all settings passing `subkeys` parameter. E.g.:
|
Some modifications are necessary to save the config settings correctly:
|
||||||
{"group": "pleroma", "key": "some_key", "delete": "true", "subkeys": [":subkey", ":subkey3"]}.
|
|
||||||
|
|
||||||
Compile time settings (need instance reboot):
|
- strings which start with `Pleroma.`, `Phoenix.`, `Tesla.` or strings like `Oban`, `Ueberauth` will be converted to modules;
|
||||||
- all settings by this keys:
|
```
|
||||||
|
"Pleroma.Upload" -> Pleroma.Upload
|
||||||
|
"Oban" -> Oban
|
||||||
|
```
|
||||||
|
- strings starting with `:` will be converted to atoms;
|
||||||
|
```
|
||||||
|
":pleroma" -> :pleroma
|
||||||
|
```
|
||||||
|
- objects with `tuple` key and array value will be converted to tuples;
|
||||||
|
```
|
||||||
|
{"tuple": ["string", "Pleroma.Upload", []]} -> {"string", Pleroma.Upload, []}
|
||||||
|
```
|
||||||
|
- arrays with *tuple objects* will be converted to keywords;
|
||||||
|
```
|
||||||
|
[{"tuple": [":key1", "value"]}, {"tuple": [":key2", "value"]}] -> [key1: "value", key2: "value"]
|
||||||
|
```
|
||||||
|
|
||||||
|
Most of the settings will be applied in `runtime`, this means that you don't need to restart the instance. But some settings are applied in `compile time` and require a reboot of the instance, such as:
|
||||||
|
- all settings inside these keys:
|
||||||
- `:hackney_pools`
|
- `:hackney_pools`
|
||||||
- `:chat`
|
- `:chat`
|
||||||
- `Pleroma.Web.Endpoint`
|
- partially settings inside these keys:
|
||||||
- `Pleroma.Repo`
|
- `:seconds_valid` in `Pleroma.Captcha`
|
||||||
- part settings:
|
- `:proxy_remote` in `Pleroma.Upload`
|
||||||
- `Pleroma.Captcha` -> `:seconds_valid`
|
- `:upload_limit` in `:instance`
|
||||||
- `Pleroma.Upload` -> `:proxy_remote`
|
|
||||||
- `:instance` -> `:upload_limit`
|
|
||||||
|
|
||||||
- Params:
|
- Params:
|
||||||
- `configs` => [
|
- `configs` - array of config objects
|
||||||
- `group` (string)
|
- config object params:
|
||||||
- `key` (string or string with leading `:` for atoms)
|
- `group` - string (**required**)
|
||||||
- `value` (string, [], {} or {"tuple": []})
|
- `key` - string (**required**)
|
||||||
- `delete` = true (optional, if parameter must be deleted)
|
- `value` - string, [], {} or {"tuple": []} (**required**)
|
||||||
- `subkeys` [(string with leading `:` for atoms)] (optional, works only if `delete=true` parameter is passed, otherwise will be ignored)
|
- `delete` - true (*optional*, if setting must be deleted)
|
||||||
]
|
- `subkeys` - array of strings (*optional*, only works when `delete=true` parameter is passed, otherwise will be ignored)
|
||||||
|
|
||||||
- Request (example):
|
*When a value have several nested settings, you can delete only some nested settings by passing a parameter `subkeys`, without deleting all settings by key.*
|
||||||
|
```
|
||||||
|
[subkey: val1, subkey2: val2, subkey3: val3] \\ initial value
|
||||||
|
{"group": ":pleroma", "key": "some_key", "delete": true, "subkeys": [":subkey", ":subkey3"]} \\ passing json for deletion
|
||||||
|
[subkey2: val2] \\ value after deletion
|
||||||
|
```
|
||||||
|
|
||||||
|
*Most of the settings can be partially updated through merge old values with new values, except settings value of which is list or is not keyword.*
|
||||||
|
|
||||||
|
Example of setting without keyword in value:
|
||||||
|
```elixir
|
||||||
|
config :tesla, :adapter, Tesla.Adapter.Hackney
|
||||||
|
```
|
||||||
|
|
||||||
|
List of settings which support only full update by key:
|
||||||
|
```elixir
|
||||||
|
@full_key_update [
|
||||||
|
{:pleroma, :ecto_repos},
|
||||||
|
{:quack, :meta},
|
||||||
|
{:mime, :types},
|
||||||
|
{:cors_plug, [:max_age, :methods, :expose, :headers]},
|
||||||
|
{:auto_linker, :opts},
|
||||||
|
{:swarm, :node_blacklist},
|
||||||
|
{:logger, :backends}
|
||||||
|
]
|
||||||
|
```
|
||||||
|
|
||||||
|
List of settings which support only full update by subkey:
|
||||||
|
```elixir
|
||||||
|
@full_subkey_update [
|
||||||
|
{:pleroma, :assets, :mascots},
|
||||||
|
{:pleroma, :emoji, :groups},
|
||||||
|
{:pleroma, :workers, :retries},
|
||||||
|
{:pleroma, :mrf_subchain, :match_actor},
|
||||||
|
{:pleroma, :mrf_keyword, :replace}
|
||||||
|
]
|
||||||
|
```
|
||||||
|
|
||||||
|
*Settings without explicit key must be sended in separate config object params.*
|
||||||
|
```elixir
|
||||||
|
config :quack,
|
||||||
|
level: :debug,
|
||||||
|
meta: [:all],
|
||||||
|
...
|
||||||
|
```
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
configs: [
|
||||||
|
{"group": ":quack", "key": ":level", "value": ":debug"},
|
||||||
|
{"group": ":quack", "key": ":meta", "value": [":all"]},
|
||||||
|
...
|
||||||
|
]
|
||||||
|
}
|
||||||
|
```
|
||||||
|
- Request:
|
||||||
|
|
||||||
```json
|
```json
|
||||||
{
|
{
|
||||||
configs: [
|
configs: [
|
||||||
{
|
{
|
||||||
"group": "pleroma",
|
"group": ":pleroma",
|
||||||
"key": "Pleroma.Upload",
|
"key": "Pleroma.Upload",
|
||||||
"value": [
|
"value": [
|
||||||
{"tuple": [":uploader", "Pleroma.Uploaders.Local"]},
|
{"tuple": [":uploader", "Pleroma.Uploaders.Local"]},
|
||||||
|
@ -805,7 +819,7 @@ Compile time settings (need instance reboot):
|
||||||
{"tuple": [":proxy_opts", [
|
{"tuple": [":proxy_opts", [
|
||||||
{"tuple": [":redirect_on_failure", false]},
|
{"tuple": [":redirect_on_failure", false]},
|
||||||
{"tuple": [":max_body_length", 1048576]},
|
{"tuple": [":max_body_length", 1048576]},
|
||||||
{"tuple": [":http": [
|
{"tuple": [":http", [
|
||||||
{"tuple": [":follow_redirect", true]},
|
{"tuple": [":follow_redirect", true]},
|
||||||
{"tuple": [":pool", ":upload"]},
|
{"tuple": [":pool", ":upload"]},
|
||||||
]]}
|
]]}
|
||||||
|
@ -821,19 +835,53 @@ Compile time settings (need instance reboot):
|
||||||
```
|
```
|
||||||
|
|
||||||
- Response:
|
- Response:
|
||||||
|
- On failure:
|
||||||
|
- 400 Bad Request `"To use this endpoint you need to enable configuration from database."`
|
||||||
```json
|
```json
|
||||||
{
|
{
|
||||||
configs: [
|
configs: [
|
||||||
{
|
{
|
||||||
"group": string,
|
"group": ":pleroma",
|
||||||
"key": string or string with leading `:` for atoms,
|
"key": "Pleroma.Upload",
|
||||||
"value": string or {} or [] or {"tuple": []}
|
"value": [...]
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
|
## ` GET /api/pleroma/admin/config/descriptions`
|
||||||
|
|
||||||
|
### Get JSON with config descriptions.
|
||||||
|
Loads json generated from `config/descriptions.exs`.
|
||||||
|
|
||||||
|
- Params: none
|
||||||
|
- Response:
|
||||||
|
|
||||||
|
```json
|
||||||
|
[{
|
||||||
|
"group": ":pleroma", // string
|
||||||
|
"key": "ModuleName", // string
|
||||||
|
"type": "group", // string or list with possible values,
|
||||||
|
"description": "Upload general settings", // string
|
||||||
|
"children": [
|
||||||
|
{
|
||||||
|
"key": ":uploader", // string or module name `Pleroma.Upload`
|
||||||
|
"type": "module",
|
||||||
|
"description": "Module which will be used for uploads",
|
||||||
|
"suggestions": ["module1", "module2"]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"key": ":filters",
|
||||||
|
"type": ["list", "module"],
|
||||||
|
"description": "List of filter modules for uploads",
|
||||||
|
"suggestions": [
|
||||||
|
"module1", "module2", "module3"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}]
|
||||||
|
```
|
||||||
|
|
||||||
## `GET /api/pleroma/admin/moderation_log`
|
## `GET /api/pleroma/admin/moderation_log`
|
||||||
|
|
||||||
### Get moderation log
|
### Get moderation log
|
||||||
|
|
|
@ -29,6 +29,7 @@ Has these additional fields under the `pleroma` object:
|
||||||
- `spoiler_text`: a map consisting of alternate representations of the `spoiler_text` property with the key being it's mimetype. Currently the only alternate representation supported is `text/plain`
|
- `spoiler_text`: a map consisting of alternate representations of the `spoiler_text` property with the key being it's mimetype. Currently the only alternate representation supported is `text/plain`
|
||||||
- `expires_at`: a datetime (iso8601) that states when the post will expire (be deleted automatically), or empty if the post won't expire
|
- `expires_at`: a datetime (iso8601) that states when the post will expire (be deleted automatically), or empty if the post won't expire
|
||||||
- `thread_muted`: true if the thread the post belongs to is muted
|
- `thread_muted`: true if the thread the post belongs to is muted
|
||||||
|
- `emoji_reactions`: A list with emoji / reaction maps. The format is {emoji: "☕", count: 1}. Contains no information about the reacting users, for that use the `emoji_reactions_by` endpoint.
|
||||||
|
|
||||||
## Attachments
|
## Attachments
|
||||||
|
|
||||||
|
@ -46,7 +47,7 @@ The `id` parameter can also be the `nickname` of the user. This only works in th
|
||||||
Has these additional fields under the `pleroma` object:
|
Has these additional fields under the `pleroma` object:
|
||||||
|
|
||||||
- `tags`: Lists an array of tags for the user
|
- `tags`: Lists an array of tags for the user
|
||||||
- `relationship{}`: Includes fields as documented for Mastodon API https://docs.joinmastodon.org/api/entities/#relationship
|
- `relationship{}`: Includes fields as documented for Mastodon API https://docs.joinmastodon.org/entities/relationship/
|
||||||
- `is_moderator`: boolean, nullable, true if user is a moderator
|
- `is_moderator`: boolean, nullable, true if user is a moderator
|
||||||
- `is_admin`: boolean, nullable, true if user is an admin
|
- `is_admin`: boolean, nullable, true if user is an admin
|
||||||
- `confirmation_pending`: boolean, true if a new user account is waiting on email confirmation to be activated
|
- `confirmation_pending`: boolean, true if a new user account is waiting on email confirmation to be activated
|
||||||
|
@ -66,6 +67,8 @@ Has these additional fields under the `pleroma` object:
|
||||||
|
|
||||||
- `show_role`: boolean, nullable, true when the user wants his role (e.g admin, moderator) to be shown
|
- `show_role`: boolean, nullable, true when the user wants his role (e.g admin, moderator) to be shown
|
||||||
- `no_rich_text` - boolean, nullable, true when html tags are stripped from all statuses requested from the API
|
- `no_rich_text` - boolean, nullable, true when html tags are stripped from all statuses requested from the API
|
||||||
|
- `discoverable`: boolean, true when the user allows discovery of the account in search results and other services.
|
||||||
|
- `actor_type`: string, the type of this account.
|
||||||
|
|
||||||
## Conversations
|
## Conversations
|
||||||
|
|
||||||
|
@ -98,11 +101,20 @@ The `type` value is `move`. Has an additional field:
|
||||||
|
|
||||||
- `target`: new account
|
- `target`: new account
|
||||||
|
|
||||||
|
### EmojiReaction Notification
|
||||||
|
|
||||||
|
The `type` value is `pleroma:emoji_reaction`. Has these fields:
|
||||||
|
|
||||||
|
- `emoji`: The used emoji
|
||||||
|
- `account`: The account of the user who reacted
|
||||||
|
- `status`: The status that was reacted on
|
||||||
|
|
||||||
## GET `/api/v1/notifications`
|
## GET `/api/v1/notifications`
|
||||||
|
|
||||||
Accepts additional parameters:
|
Accepts additional parameters:
|
||||||
|
|
||||||
- `exclude_visibilities`: will exclude the notifications for activities with the given visibilities. The parameter accepts an array of visibility types (`public`, `unlisted`, `private`, `direct`). Usage example: `GET /api/v1/notifications?exclude_visibilities[]=direct&exclude_visibilities[]=private`.
|
- `exclude_visibilities`: will exclude the notifications for activities with the given visibilities. The parameter accepts an array of visibility types (`public`, `unlisted`, `private`, `direct`). Usage example: `GET /api/v1/notifications?exclude_visibilities[]=direct&exclude_visibilities[]=private`.
|
||||||
|
- `with_move`: boolean, when set to `true` will include Move notifications. `false` by default.
|
||||||
|
|
||||||
## POST `/api/v1/statuses`
|
## POST `/api/v1/statuses`
|
||||||
|
|
||||||
|
@ -145,6 +157,8 @@ Additional parameters can be added to the JSON body/Form data:
|
||||||
- `skip_thread_containment` - if true, skip filtering out broken threads
|
- `skip_thread_containment` - if true, skip filtering out broken threads
|
||||||
- `allow_following_move` - if true, allows automatically follow moved following accounts
|
- `allow_following_move` - if true, allows automatically follow moved following accounts
|
||||||
- `pleroma_background_image` - sets the background image of the user.
|
- `pleroma_background_image` - sets the background image of the user.
|
||||||
|
- `discoverable` - if true, discovery of this account in search results and other services is allowed.
|
||||||
|
- `actor_type` - the type of this account.
|
||||||
|
|
||||||
### Pleroma Settings Store
|
### Pleroma Settings Store
|
||||||
Pleroma has mechanism that allows frontends to save blobs of json for each user on the backend. This can be used to save frontend-specific settings for a user that the backend does not need to know about.
|
Pleroma has mechanism that allows frontends to save blobs of json for each user on the backend. This can be used to save frontend-specific settings for a user that the backend does not need to know about.
|
||||||
|
|
|
@ -70,59 +70,6 @@ Request parameters can be passed via [query strings](https://en.wikipedia.org/wi
|
||||||
* Response: JSON. Returns `{"status": "success"}` if the account was successfully disabled, `{"error": "[error message]"}` otherwise
|
* Response: JSON. Returns `{"status": "success"}` if the account was successfully disabled, `{"error": "[error message]"}` otherwise
|
||||||
* Example response: `{"error": "Invalid password."}`
|
* Example response: `{"error": "Invalid password."}`
|
||||||
|
|
||||||
## `/api/account/register`
|
|
||||||
### Register a new user
|
|
||||||
* Method `POST`
|
|
||||||
* Authentication: not required
|
|
||||||
* Params:
|
|
||||||
* `nickname`
|
|
||||||
* `fullname`
|
|
||||||
* `bio`
|
|
||||||
* `email`
|
|
||||||
* `password`
|
|
||||||
* `confirm`
|
|
||||||
* `captcha_solution`: optional, contains provider-specific captcha solution,
|
|
||||||
* `captcha_token`: optional, contains provider-specific captcha token
|
|
||||||
* `token`: invite token required when the registrations aren't public.
|
|
||||||
* Response: JSON. Returns a user object on success, otherwise returns `{"error": "error_msg"}`
|
|
||||||
* Example response:
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"background_image": null,
|
|
||||||
"cover_photo": "https://pleroma.soykaf.com/images/banner.png",
|
|
||||||
"created_at": "Tue Dec 18 16:55:56 +0000 2018",
|
|
||||||
"default_scope": "public",
|
|
||||||
"description": "blushy-crushy fediverse idol + pleroma dev\nlet's be friends \nぷれろまの生徒会長。謎の外人。日本語OK. \n公主病.",
|
|
||||||
"description_html": "blushy-crushy fediverse idol + pleroma dev.<br />let's be friends <br />ぷれろまの生徒会長。謎の外人。日本語OK. <br />公主病.",
|
|
||||||
"favourites_count": 0,
|
|
||||||
"fields": [],
|
|
||||||
"followers_count": 0,
|
|
||||||
"following": false,
|
|
||||||
"follows_you": false,
|
|
||||||
"friends_count": 0,
|
|
||||||
"id": 6,
|
|
||||||
"is_local": true,
|
|
||||||
"locked": false,
|
|
||||||
"name": "lain",
|
|
||||||
"name_html": "lain",
|
|
||||||
"no_rich_text": false,
|
|
||||||
"pleroma": {
|
|
||||||
"tags": []
|
|
||||||
},
|
|
||||||
"profile_image_url": "https://pleroma.soykaf.com/images/avi.png",
|
|
||||||
"profile_image_url_https": "https://pleroma.soykaf.com/images/avi.png",
|
|
||||||
"profile_image_url_original": "https://pleroma.soykaf.com/images/avi.png",
|
|
||||||
"profile_image_url_profile_size": "https://pleroma.soykaf.com/images/avi.png",
|
|
||||||
"rights": {
|
|
||||||
"delete_others_notice": false
|
|
||||||
},
|
|
||||||
"screen_name": "lain",
|
|
||||||
"statuses_count": 0,
|
|
||||||
"statusnet_blocking": false,
|
|
||||||
"statusnet_profile_url": "https://pleroma.soykaf.com/users/lain"
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
## `/api/pleroma/admin/`…
|
## `/api/pleroma/admin/`…
|
||||||
See [Admin-API](admin_api.md)
|
See [Admin-API](admin_api.md)
|
||||||
|
|
||||||
|
@ -302,6 +249,7 @@ See [Admin-API](admin_api.md)
|
||||||
* `follows`: BOOLEAN field, receives notifications from people the user follows
|
* `follows`: BOOLEAN field, receives notifications from people the user follows
|
||||||
* `remote`: BOOLEAN field, receives notifications from people on remote instances
|
* `remote`: BOOLEAN field, receives notifications from people on remote instances
|
||||||
* `local`: BOOLEAN field, receives notifications from people on the local instance
|
* `local`: BOOLEAN field, receives notifications from people on the local instance
|
||||||
|
* `privacy_option`: BOOLEAN field. When set to true, it removes the contents of a message from the push notification.
|
||||||
* Response: JSON. Returns `{"status": "success"}` if the update was successful, otherwise returns `{"error": "error_msg"}`
|
* Response: JSON. Returns `{"status": "success"}` if the update was successful, otherwise returns `{"error": "error_msg"}`
|
||||||
|
|
||||||
## `/api/pleroma/healthcheck`
|
## `/api/pleroma/healthcheck`
|
||||||
|
@ -503,11 +451,11 @@ Emoji reactions work a lot like favourites do. They make it possible to react to
|
||||||
* Method: `GET`
|
* Method: `GET`
|
||||||
* Authentication: optional
|
* Authentication: optional
|
||||||
* Params: None
|
* Params: None
|
||||||
* Response: JSON, a map of emoji to account list mappings.
|
* Response: JSON, a list of emoji/account list tuples, sorted by emoji insertion date, in ascending order, e.g, the first emoji in the list is the oldest.
|
||||||
* Example Response:
|
* Example Response:
|
||||||
```json
|
```json
|
||||||
{
|
[
|
||||||
"😀" => [{"id" => "xyz.."...}, {"id" => "zyx..."}],
|
{"emoji": "😀", "count": 2, "accounts": [{"id" => "xyz.."...}, {"id" => "zyx..."}]},
|
||||||
"🗡" => [{"id" => "abc..."}]
|
{"emoji": "☕", "count": 1, "accounts": [{"id" => "abc..."}]}
|
||||||
}
|
]
|
||||||
```
|
```
|
||||||
|
|
79
docs/admin/config.md
Normal file
79
docs/admin/config.md
Normal file
|
@ -0,0 +1,79 @@
|
||||||
|
# Configuring instance
|
||||||
|
You can configure your instance from admin interface. You need account with admin rights and little change in config file, which will allow settings configuration from database.
|
||||||
|
|
||||||
|
```elixir
|
||||||
|
config :pleroma, configurable_from_database: true
|
||||||
|
```
|
||||||
|
|
||||||
|
## How it works
|
||||||
|
Settings are stored in database and are applied in `runtime` after each change. Most of the settings take effect immediately, except some, which need instance reboot. These settings are needed in `compile time`, that's why settings are duplicated to the file.
|
||||||
|
|
||||||
|
File with duplicated settings is located in `config/{env}.exported_from_db.exs` if pleroma is runned from source. For prod env it will be `config/prod.exported_from_db.exs`.
|
||||||
|
|
||||||
|
For releases: `/etc/pleroma/prod.exported_from_db.secret.exs` or `PLEROMA_CONFIG_PATH/prod.exported_from_db.exs`.
|
||||||
|
|
||||||
|
## How to set it up
|
||||||
|
You need to migrate your existing settings to the database. This task will migrate only added by user settings.
|
||||||
|
For example you add settings to `prod.secret.exs` file, only these settings will be migrated to database. For release it will be `/etc/pleroma/config.exs` or `PLEROMA_CONFIG_PATH`.
|
||||||
|
You can do this with mix task (all config files will remain untouched):
|
||||||
|
|
||||||
|
```sh tab="OTP"
|
||||||
|
./bin/pleroma_ctl config migrate_to_db
|
||||||
|
```
|
||||||
|
|
||||||
|
```sh tab="From Source"
|
||||||
|
mix pleroma.config migrate_to_db
|
||||||
|
```
|
||||||
|
|
||||||
|
Now you can change settings in admin interface. After each save, settings from database are duplicated to the `config/{env}.exported_from_db.exs` file.
|
||||||
|
|
||||||
|
<span style="color:red">**ATTENTION**</span>
|
||||||
|
|
||||||
|
**<span style="color:red">Be careful while changing the settings. Every inaccurate configuration change can break the federation or the instance load.</span>**
|
||||||
|
|
||||||
|
*Compile time settings, which require instance reboot and can break instance loading:*
|
||||||
|
- all settings inside these keys:
|
||||||
|
- `:hackney_pools`
|
||||||
|
- `:chat`
|
||||||
|
- partially settings inside these keys:
|
||||||
|
- `:seconds_valid` in `Pleroma.Captcha`
|
||||||
|
- `:proxy_remote` in `Pleroma.Upload`
|
||||||
|
- `:upload_limit` in `:instance`
|
||||||
|
|
||||||
|
## How to dump settings from database to file
|
||||||
|
|
||||||
|
*Adding `-d` flag will delete migrated settings from database table.*
|
||||||
|
|
||||||
|
```sh tab="OTP"
|
||||||
|
./bin/pleroma_ctl config migrate_from_db [-d]
|
||||||
|
```
|
||||||
|
|
||||||
|
```sh tab="From Source"
|
||||||
|
mix pleroma.config migrate_from_db [-d]
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
|
## How to completely remove it
|
||||||
|
|
||||||
|
1. Truncate or delete all values from `config` table
|
||||||
|
```sql
|
||||||
|
TRUNCATE TABLE config;
|
||||||
|
```
|
||||||
|
2. Delete `config/{env}.exported_from_db.exs`.
|
||||||
|
|
||||||
|
For `prod` env:
|
||||||
|
```bash
|
||||||
|
cd /opt/pleroma
|
||||||
|
cp config/prod.exported_from_db.exs config/exported_from_db.back
|
||||||
|
rm -rf config/prod.exported_from_db.exs
|
||||||
|
```
|
||||||
|
*If you don't want to backup settings, you can skip step with `cp` command.*
|
||||||
|
|
||||||
|
3. Set configurable_from_database to `false`.
|
||||||
|
```elixir
|
||||||
|
config :pleroma, configurable_from_database: false
|
||||||
|
```
|
||||||
|
4. Restart pleroma instance
|
||||||
|
```bash
|
||||||
|
sudo service pleroma restart
|
||||||
|
```
|
|
@ -3,17 +3,26 @@
|
||||||
!!! danger
|
!!! danger
|
||||||
This is a Work In Progress, not usable just yet.
|
This is a Work In Progress, not usable just yet.
|
||||||
|
|
||||||
Every command should be ran with a prefix, in case of OTP releases it is `./bin/pleroma_ctl config` and in case of source installs it's
|
{! backend/administration/CLI_tasks/general_cli_task_info.include !}
|
||||||
`mix pleroma.config`.
|
|
||||||
|
|
||||||
## Transfer config from file to DB.
|
## Transfer config from file to DB.
|
||||||
|
|
||||||
```sh
|
```sh tab="OTP"
|
||||||
$PREFIX migrate_to_db
|
./bin/pleroma_ctl config migrate_to_db
|
||||||
```
|
```
|
||||||
|
|
||||||
|
```sh tab="From Source"
|
||||||
|
mix pleroma.config migrate_to_db
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
## Transfer config from DB to `config/env.exported_from_db.secret.exs`
|
## Transfer config from DB to `config/env.exported_from_db.secret.exs`
|
||||||
|
|
||||||
```sh
|
To delete transfered settings from database optional flag `-d` can be used. <env> is `prod` by default.
|
||||||
$PREFIX migrate_from_db <env>
|
```sh tab="OTP"
|
||||||
|
./bin/pleroma_ctl config migrate_from_db [--env=<env>] [-d]
|
||||||
|
```
|
||||||
|
|
||||||
|
```sh tab="From Source"
|
||||||
|
mix pleroma.config migrate_from_db [--env=<env>] [-d]
|
||||||
```
|
```
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
# Database maintenance tasks
|
# Database maintenance tasks
|
||||||
|
|
||||||
Every command should be ran with a prefix, in case of OTP releases it is `./bin/pleroma_ctl database` and in case of source installs it's `mix pleroma.database`.
|
{! backend/administration/CLI_tasks/general_cli_task_info.include !}
|
||||||
|
|
||||||
!!! danger
|
!!! danger
|
||||||
These mix tasks can take a long time to complete. Many of them were written to address specific database issues that happened because of bugs in migrations or other specific scenarios. Do not run these tasks "just in case" if everything is fine your instance.
|
These mix tasks can take a long time to complete. Many of them were written to address specific database issues that happened because of bugs in migrations or other specific scenarios. Do not run these tasks "just in case" if everything is fine your instance.
|
||||||
|
@ -9,8 +9,12 @@ Every command should be ran with a prefix, in case of OTP releases it is `./bin/
|
||||||
|
|
||||||
Replaces embedded objects with references to them in the `objects` table. Only needs to be ran once if the instance was created before Pleroma 1.0.5. The reason why this is not a migration is because it could significantly increase the database size after being ran, however after this `VACUUM FULL` will be able to reclaim about 20% (really depends on what is in the database, your mileage may vary) of the db size before the migration.
|
Replaces embedded objects with references to them in the `objects` table. Only needs to be ran once if the instance was created before Pleroma 1.0.5. The reason why this is not a migration is because it could significantly increase the database size after being ran, however after this `VACUUM FULL` will be able to reclaim about 20% (really depends on what is in the database, your mileage may vary) of the db size before the migration.
|
||||||
|
|
||||||
```sh
|
```sh tab="OTP"
|
||||||
$PREFIX remove_embedded_objects [<options>]
|
./bin/pleroma_ctl database remove_embedded_objects [<options>]
|
||||||
|
```
|
||||||
|
|
||||||
|
```sh tab="From Source"
|
||||||
|
mix pleroma.database remove_embedded_objects [<options>]
|
||||||
```
|
```
|
||||||
|
|
||||||
### Options
|
### Options
|
||||||
|
@ -20,11 +24,15 @@ $PREFIX remove_embedded_objects [<options>]
|
||||||
|
|
||||||
This will prune remote posts older than 90 days (configurable with [`config :pleroma, :instance, remote_post_retention_days`](../../configuration/cheatsheet.md#instance)) from the database, they will be refetched from source when accessed.
|
This will prune remote posts older than 90 days (configurable with [`config :pleroma, :instance, remote_post_retention_days`](../../configuration/cheatsheet.md#instance)) from the database, they will be refetched from source when accessed.
|
||||||
|
|
||||||
!!! note
|
!!! danger
|
||||||
The disk space will only be reclaimed after `VACUUM FULL`
|
The disk space will only be reclaimed after `VACUUM FULL`. You may run out of disk space during the execution of the task or vacuuming if you don't have about 1/3rds of the database size free.
|
||||||
|
|
||||||
```sh
|
```sh tab="OTP"
|
||||||
$PREFIX pleroma.database prune_objects [<options>]
|
./bin/pleroma_ctl database prune_objects [<options>]
|
||||||
|
```
|
||||||
|
|
||||||
|
```sh tab="From Source"
|
||||||
|
mix pleroma.database prune_objects [<options>]
|
||||||
```
|
```
|
||||||
|
|
||||||
### Options
|
### Options
|
||||||
|
@ -34,18 +42,30 @@ $PREFIX pleroma.database prune_objects [<options>]
|
||||||
|
|
||||||
Can be safely re-run
|
Can be safely re-run
|
||||||
|
|
||||||
```sh
|
```sh tab="OTP"
|
||||||
$PREFIX bump_all_conversations
|
./bin/pleroma_ctl database bump_all_conversations
|
||||||
|
```
|
||||||
|
|
||||||
|
```sh tab="From Source"
|
||||||
|
mix pleroma.database bump_all_conversations
|
||||||
```
|
```
|
||||||
|
|
||||||
## Remove duplicated items from following and update followers count for all users
|
## Remove duplicated items from following and update followers count for all users
|
||||||
|
|
||||||
```sh
|
```sh tab="OTP"
|
||||||
$PREFIX update_users_following_followers_counts
|
./bin/pleroma_ctl database update_users_following_followers_counts
|
||||||
|
```
|
||||||
|
|
||||||
|
```sh tab="From Source"
|
||||||
|
mix pleroma.database update_users_following_followers_counts
|
||||||
```
|
```
|
||||||
|
|
||||||
## Fix the pre-existing "likes" collections for all objects
|
## Fix the pre-existing "likes" collections for all objects
|
||||||
|
|
||||||
```sh
|
```sh tab="OTP"
|
||||||
$PREFIX fix_likes_collections
|
./bin/pleroma_ctl database fix_likes_collections
|
||||||
|
```
|
||||||
|
|
||||||
|
```sh tab="From Source"
|
||||||
|
mix pleroma.database fix_likes_collections
|
||||||
```
|
```
|
||||||
|
|
|
@ -1,13 +1,25 @@
|
||||||
# Managing digest emails
|
# Managing digest emails
|
||||||
Every command should be ran with a prefix, in case of OTP releases it is `./bin/pleroma_ctl digest` and in case of source installs it's `mix pleroma.digest`.
|
|
||||||
|
{! backend/administration/CLI_tasks/general_cli_task_info.include !}
|
||||||
|
|
||||||
## Send digest email since given date (user registration date by default) ignoring user activity status.
|
## Send digest email since given date (user registration date by default) ignoring user activity status.
|
||||||
|
|
||||||
```sh
|
```sh tab="OTP"
|
||||||
$PREFIX test <nickname> [<since_date>]
|
./bin/pleroma_ctl digest test <nickname> [<since_date>]
|
||||||
```
|
```
|
||||||
|
|
||||||
Example:
|
```sh tab="From Source"
|
||||||
```sh
|
mix pleroma.digest test <nickname> [<since_date>]
|
||||||
$PREFIX test donaldtheduck 2019-05-20
|
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
||||||
|
Example:
|
||||||
|
|
||||||
|
```sh tab="OTP"
|
||||||
|
./bin/pleroma_ctl digest test donaldtheduck 2019-05-20
|
||||||
|
```
|
||||||
|
|
||||||
|
```sh tab="From Source"
|
||||||
|
mix pleroma.digest test donaldtheduck 2019-05-20
|
||||||
|
```
|
||||||
|
|
||||||
|
|
|
@ -1,28 +1,44 @@
|
||||||
# Managing emoji packs
|
# Managing emoji packs
|
||||||
|
|
||||||
Every command should be ran with a prefix, in case of OTP releases it is `./bin/pleroma_ctl emoji` and in case of source installs it's `mix pleroma.emoji`.
|
{! backend/administration/CLI_tasks/general_cli_task_info.include !}
|
||||||
|
|
||||||
## Lists emoji packs and metadata specified in the manifest
|
## Lists emoji packs and metadata specified in the manifest
|
||||||
|
|
||||||
```sh
|
```sh tab="OTP"
|
||||||
$PREFIX ls-packs [<options>]
|
./bin/pleroma_ctl emoji ls-packs [<options>]
|
||||||
```
|
```
|
||||||
|
|
||||||
|
```sh tab="From Source"
|
||||||
|
mix pleroma.emoji ls-packs [<options>]
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
### Options
|
### Options
|
||||||
- `-m, --manifest PATH/URL` - path to a custom manifest, it can either be an URL starting with `http`, in that case the manifest will be fetched from that address, or a local path
|
- `-m, --manifest PATH/URL` - path to a custom manifest, it can either be an URL starting with `http`, in that case the manifest will be fetched from that address, or a local path
|
||||||
|
|
||||||
## Fetch, verify and install the specified packs from the manifest into `STATIC-DIR/emoji/PACK-NAME`
|
## Fetch, verify and install the specified packs from the manifest into `STATIC-DIR/emoji/PACK-NAME`
|
||||||
```sh
|
|
||||||
$PREFIX get-packs [<options>] <packs>
|
```sh tab="OTP"
|
||||||
|
./bin/pleroma_ctl emoji get-packs [<options>] <packs>
|
||||||
|
```
|
||||||
|
|
||||||
|
```sh tab="From Source"
|
||||||
|
mix pleroma.emoji get-packs [<options>] <packs>
|
||||||
```
|
```
|
||||||
|
|
||||||
### Options
|
### Options
|
||||||
- `-m, --manifest PATH/URL` - same as [`ls-packs`](#ls-packs)
|
- `-m, --manifest PATH/URL` - same as [`ls-packs`](#ls-packs)
|
||||||
|
|
||||||
## Create a new manifest entry and a file list from the specified remote pack file
|
## Create a new manifest entry and a file list from the specified remote pack file
|
||||||
```sh
|
|
||||||
$PREFIX gen-pack PACK-URL
|
```sh tab="OTP"
|
||||||
|
./bin/pleroma_ctl emoji gen-pack PACK-URL
|
||||||
```
|
```
|
||||||
|
|
||||||
|
```sh tab="From Source"
|
||||||
|
mix pleroma.emoji gen-pack PACK-URL
|
||||||
|
```
|
||||||
|
|
||||||
Currently, only .zip archives are recognized as remote pack files and packs are therefore assumed to be zip archives. This command is intended to run interactively and will first ask you some basic questions about the pack, then download the remote file and generate an SHA256 checksum for it, then generate an emoji file list for you.
|
Currently, only .zip archives are recognized as remote pack files and packs are therefore assumed to be zip archives. This command is intended to run interactively and will first ask you some basic questions about the pack, then download the remote file and generate an SHA256 checksum for it, then generate an emoji file list for you.
|
||||||
|
|
||||||
The manifest entry will either be written to a newly created `index.json` file or appended to the existing one, *replacing* the old pack with the same name if it was in the file previously.
|
The manifest entry will either be written to a newly created `index.json` file or appended to the existing one, *replacing* the old pack with the same name if it was in the file previously.
|
||||||
|
|
|
@ -0,0 +1,5 @@
|
||||||
|
Every command should be ran as the `pleroma` user from it's home directory. For example if you are superuser, you would have to wrap the command in `su pleroma -s $SHELL -lc "$COMMAND"`.
|
||||||
|
|
||||||
|
??? note "From source note about `MIX_ENV`"
|
||||||
|
|
||||||
|
The `mix` command should be prefixed with the name of environment your Pleroma server is running in, usually it's `MIX_ENV=prod`
|
|
@ -1,12 +1,17 @@
|
||||||
# Managing instance configuration
|
# Managing instance configuration
|
||||||
|
|
||||||
Every command should be ran with a prefix, in case of OTP releases it is `./bin/pleroma_ctl instance` and in case of source installs it's `mix pleroma.instance`.
|
{! backend/administration/CLI_tasks/general_cli_task_info.include !}
|
||||||
|
|
||||||
## Generate a new configuration file
|
## Generate a new configuration file
|
||||||
```sh
|
```sh tab="OTP"
|
||||||
$PREFIX gen [<options>]
|
./bin/pleroma_ctl instance gen [<options>]
|
||||||
```
|
```
|
||||||
|
|
||||||
|
```sh tab="From Source"
|
||||||
|
mix pleroma.instance gen [<options>]
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
If any of the options are left unspecified, you will be prompted interactively.
|
If any of the options are left unspecified, you will be prompted interactively.
|
||||||
|
|
||||||
### Options
|
### Options
|
||||||
|
|
|
@ -1,30 +1,33 @@
|
||||||
# Managing relays
|
# Managing relays
|
||||||
|
|
||||||
Every command should be ran with a prefix, in case of OTP releases it is `./bin/pleroma_ctl relay` and in case of source installs it's `mix pleroma.relay`.
|
{! backend/administration/CLI_tasks/general_cli_task_info.include !}
|
||||||
|
|
||||||
## Follow a relay
|
## Follow a relay
|
||||||
```sh
|
|
||||||
$PREFIX follow <relay_url>
|
```sh tab="OTP"
|
||||||
|
./bin/pleroma_ctl relay follow <relay_url>
|
||||||
```
|
```
|
||||||
|
|
||||||
Example:
|
```sh tab="From Source"
|
||||||
```sh
|
mix pleroma.relay follow <relay_url>
|
||||||
$PREFIX follow https://example.org/relay
|
|
||||||
```
|
```
|
||||||
|
|
||||||
## Unfollow a remote relay
|
## Unfollow a remote relay
|
||||||
|
|
||||||
```sh
|
```sh tab="OTP"
|
||||||
$PREFIX unfollow <relay_url>
|
./bin/pleroma_ctl relay unfollow <relay_url>
|
||||||
```
|
```
|
||||||
|
|
||||||
Example:
|
```sh tab="From Source"
|
||||||
```sh
|
mix pleroma.relay unfollow <relay_url>
|
||||||
$PREFIX unfollow https://example.org/relay
|
|
||||||
```
|
```
|
||||||
|
|
||||||
## List relay subscriptions
|
## List relay subscriptions
|
||||||
|
|
||||||
```sh
|
```sh tab="OTP"
|
||||||
$PREFIX list
|
./bin/pleroma_ctl relay list
|
||||||
|
```
|
||||||
|
|
||||||
|
```sh tab="From Source"
|
||||||
|
mix pleroma.relay list
|
||||||
```
|
```
|
||||||
|
|
|
@ -1,11 +1,16 @@
|
||||||
# Managing uploads
|
# Managing uploads
|
||||||
|
|
||||||
Every command should be ran with a prefix, in case of OTP releases it is `./bin/pleroma_ctl uploads` and in case of source installs it's `mix pleroma.uploads`.
|
{! backend/administration/CLI_tasks/general_cli_task_info.include !}
|
||||||
|
|
||||||
## Migrate uploads from local to remote storage
|
## Migrate uploads from local to remote storage
|
||||||
```sh
|
```sh tab="OTP"
|
||||||
$PREFIX migrate_local <target_uploader> [<options>]
|
./bin/pleroma_ctl uploads migrate_local <target_uploader> [<options>]
|
||||||
```
|
```
|
||||||
|
|
||||||
|
```sh tab="From Source"
|
||||||
|
mix pleroma.uploads migrate_local <target_uploader> [<options>]
|
||||||
|
```
|
||||||
|
|
||||||
### Options
|
### Options
|
||||||
- `--delete` - delete local uploads after migrating them to the target uploader
|
- `--delete` - delete local uploads after migrating them to the target uploader
|
||||||
|
|
||||||
|
|
|
@ -1,12 +1,18 @@
|
||||||
# Managing users
|
# Managing users
|
||||||
|
|
||||||
Every command should be ran with a prefix, in case of OTP releases it is `./bin/pleroma_ctl user` and in case of source installs it's `mix pleroma.user`.
|
{! backend/administration/CLI_tasks/general_cli_task_info.include !}
|
||||||
|
|
||||||
## Create a user
|
## Create a user
|
||||||
```sh
|
|
||||||
$PREFIX new <nickname> <email> [<options>]
|
```sh tab="OTP"
|
||||||
|
./bin/pleroma_ctl user new <email> [<options>]
|
||||||
```
|
```
|
||||||
|
|
||||||
|
```sh tab="From Source"
|
||||||
|
mix pleroma.user new <email> [<options>]
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
### Options
|
### Options
|
||||||
- `--name <name>` - the user's display name
|
- `--name <name>` - the user's display name
|
||||||
- `--bio <bio>` - the user's bio
|
- `--bio <bio>` - the user's bio
|
||||||
|
@ -16,84 +22,159 @@ $PREFIX new <nickname> <email> [<options>]
|
||||||
- `-y`, `--assume-yes`/`--no-assume-yes` - whether to assume yes to all questions
|
- `-y`, `--assume-yes`/`--no-assume-yes` - whether to assume yes to all questions
|
||||||
|
|
||||||
## List local users
|
## List local users
|
||||||
```sh
|
```sh tab="OTP"
|
||||||
$PREFIX list
|
./bin/pleroma_ctl user list
|
||||||
```
|
```
|
||||||
|
|
||||||
## Generate an invite link
|
```sh tab="From Source"
|
||||||
```sh
|
mix pleroma.user list
|
||||||
$PREFIX invite [<options>]
|
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
||||||
|
## Generate an invite link
|
||||||
|
```sh tab="OTP"
|
||||||
|
./bin/pleroma_ctl user invite [<options>]
|
||||||
|
```
|
||||||
|
|
||||||
|
```sh tab="From Source"
|
||||||
|
mix pleroma.user invite [<options>]
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
### Options
|
### Options
|
||||||
- `--expires-at DATE` - last day on which token is active (e.g. "2019-04-05")
|
- `--expires-at DATE` - last day on which token is active (e.g. "2019-04-05")
|
||||||
- `--max-use NUMBER` - maximum numbers of token uses
|
- `--max-use NUMBER` - maximum numbers of token uses
|
||||||
|
|
||||||
## List generated invites
|
## List generated invites
|
||||||
```sh
|
```sh tab="OTP"
|
||||||
$PREFIX invites
|
./bin/pleroma_ctl user invites
|
||||||
```
|
```
|
||||||
|
|
||||||
|
```sh tab="From Source"
|
||||||
|
mix pleroma.user invites
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
## Revoke invite
|
## Revoke invite
|
||||||
```sh
|
```sh tab="OTP"
|
||||||
$PREFIX revoke_invite <token_or_id>
|
./bin/pleroma_ctl user revoke_invite <token_or_id>
|
||||||
```
|
```
|
||||||
|
|
||||||
|
```sh tab="From Source"
|
||||||
|
mix pleroma.user revoke_invite <token_or_id>
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
## Delete a user
|
## Delete a user
|
||||||
```sh
|
```sh tab="OTP"
|
||||||
$PREFIX rm <nickname>
|
./bin/pleroma_ctl user rm <nickname>
|
||||||
```
|
```
|
||||||
|
|
||||||
|
```sh tab="From Source"
|
||||||
|
mix pleroma.user rm <nickname>
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
## Delete user's posts and interactions
|
## Delete user's posts and interactions
|
||||||
```sh
|
```sh tab="OTP"
|
||||||
$PREFIX delete_activities <nickname>
|
./bin/pleroma_ctl user delete_activities <nickname>
|
||||||
```
|
```
|
||||||
|
|
||||||
|
```sh tab="From Source"
|
||||||
|
mix pleroma.user delete_activities <nickname>
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
## Sign user out from all applications (delete user's OAuth tokens and authorizations)
|
## Sign user out from all applications (delete user's OAuth tokens and authorizations)
|
||||||
```sh
|
```sh tab="OTP"
|
||||||
$PREFIX sign_out <nickname>
|
./bin/pleroma_ctl user sign_out <nickname>
|
||||||
```
|
```
|
||||||
|
|
||||||
|
```sh tab="From Source"
|
||||||
|
mix pleroma.user sign_out <nickname>
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
## Deactivate or activate a user
|
## Deactivate or activate a user
|
||||||
```sh
|
```sh tab="OTP"
|
||||||
$PREFIX toggle_activated <nickname>
|
./bin/pleroma_ctl user toggle_activated <nickname>
|
||||||
```
|
```
|
||||||
|
|
||||||
|
```sh tab="From Source"
|
||||||
|
mix pleroma.user toggle_activated <nickname>
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
## Unsubscribe local users from a user and deactivate the user
|
## Unsubscribe local users from a user and deactivate the user
|
||||||
```sh
|
```sh tab="OTP"
|
||||||
$PREFIX unsubscribe NICKNAME
|
./bin/pleroma_ctl user unsubscribe NICKNAME
|
||||||
```
|
```
|
||||||
|
|
||||||
|
```sh tab="From Source"
|
||||||
|
mix pleroma.user unsubscribe NICKNAME
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
## Unsubscribe local users from an instance and deactivate all accounts on it
|
## Unsubscribe local users from an instance and deactivate all accounts on it
|
||||||
```sh
|
```sh tab="OTP"
|
||||||
$PREFIX unsubscribe_all_from_instance <instance>
|
./bin/pleroma_ctl user unsubscribe_all_from_instance <instance>
|
||||||
```
|
```
|
||||||
|
|
||||||
|
```sh tab="From Source"
|
||||||
|
mix pleroma.user unsubscribe_all_from_instance <instance>
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
## Create a password reset link for user
|
## Create a password reset link for user
|
||||||
```sh
|
```sh tab="OTP"
|
||||||
$PREFIX reset_password <nickname>
|
./bin/pleroma_ctl user reset_password <nickname>
|
||||||
```
|
```
|
||||||
|
|
||||||
## Set the value of the given user's settings
|
```sh tab="From Source"
|
||||||
```sh
|
mix pleroma.user reset_password <nickname>
|
||||||
$PREFIX set <nickname> [<options>]
|
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
||||||
|
## Set the value of the given user's settings
|
||||||
|
```sh tab="OTP"
|
||||||
|
./bin/pleroma_ctl user set <nickname> [<options>]
|
||||||
|
```
|
||||||
|
|
||||||
|
```sh tab="From Source"
|
||||||
|
mix pleroma.user set <nickname> [<options>]
|
||||||
|
```
|
||||||
|
|
||||||
### Options
|
### Options
|
||||||
- `--locked`/`--no-locked` - whether the user should be locked
|
- `--locked`/`--no-locked` - whether the user should be locked
|
||||||
- `--moderator`/`--no-moderator` - whether the user should be a moderator
|
- `--moderator`/`--no-moderator` - whether the user should be a moderator
|
||||||
- `--admin`/`--no-admin` - whether the user should be an admin
|
- `--admin`/`--no-admin` - whether the user should be an admin
|
||||||
|
|
||||||
## Add tags to a user
|
## Add tags to a user
|
||||||
```sh
|
```sh tab="OTP"
|
||||||
$PREFIX tag <nickname> <tags>
|
./bin/pleroma_ctl user tag <nickname> <tags>
|
||||||
```
|
```
|
||||||
|
|
||||||
|
```sh tab="From Source"
|
||||||
|
mix pleroma.user tag <nickname> <tags>
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
## Delete tags from a user
|
## Delete tags from a user
|
||||||
```sh
|
```sh tab="OTP"
|
||||||
$PREFIX untag <nickname> <tags>
|
./bin/pleroma_ctl user untag <nickname> <tags>
|
||||||
```
|
```
|
||||||
|
|
||||||
## Toggle confirmation status of the user
|
```sh tab="From Source"
|
||||||
```sh
|
mix pleroma.user untag <nickname> <tags>
|
||||||
$PREFIX toggle_confirmed <nickname>
|
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
||||||
|
## Toggle confirmation status of the user
|
||||||
|
```sh tab="OTP"
|
||||||
|
./bin/pleroma_ctl user toggle_confirmed <nickname>
|
||||||
|
```
|
||||||
|
|
||||||
|
```sh tab="From Source"
|
||||||
|
mix pleroma.user toggle_confirmed <nickname>
|
||||||
|
```
|
||||||
|
|
||||||
|
|
|
@ -70,11 +70,6 @@ You shouldn't edit the base config directly to avoid breakages and merge conflic
|
||||||
* `account_field_value_length`: An account field value maximum length (default: `2048`).
|
* `account_field_value_length`: An account field value maximum length (default: `2048`).
|
||||||
* `external_user_synchronization`: Enabling following/followers counters synchronization for external users.
|
* `external_user_synchronization`: Enabling following/followers counters synchronization for external users.
|
||||||
|
|
||||||
!!! danger
|
|
||||||
This is a Work In Progress, not usable just yet
|
|
||||||
|
|
||||||
* `dynamic_configuration`: Allow transferring configuration to DB with the subsequent customization from Admin api.
|
|
||||||
|
|
||||||
## Federation
|
## Federation
|
||||||
### MRF policies
|
### MRF policies
|
||||||
|
|
||||||
|
@ -379,13 +374,19 @@ For each pool, the options are:
|
||||||
## Captcha
|
## Captcha
|
||||||
|
|
||||||
### Pleroma.Captcha
|
### Pleroma.Captcha
|
||||||
|
|
||||||
* `enabled`: Whether the captcha should be shown on registration.
|
* `enabled`: Whether the captcha should be shown on registration.
|
||||||
* `method`: The method/service to use for captcha.
|
* `method`: The method/service to use for captcha.
|
||||||
* `seconds_valid`: The time in seconds for which the captcha is valid.
|
* `seconds_valid`: The time in seconds for which the captcha is valid.
|
||||||
|
|
||||||
### Captcha providers
|
### Captcha providers
|
||||||
|
|
||||||
|
#### Pleroma.Captcha.Native
|
||||||
|
|
||||||
|
A built-in captcha provider. Enabled by default.
|
||||||
|
|
||||||
#### Pleroma.Captcha.Kocaptcha
|
#### Pleroma.Captcha.Kocaptcha
|
||||||
|
|
||||||
Kocaptcha is a very simple captcha service with a single API endpoint,
|
Kocaptcha is a very simple captcha service with a single API endpoint,
|
||||||
the source code is here: https://github.com/koto-bank/kocaptcha. The default endpoint
|
the source code is here: https://github.com/koto-bank/kocaptcha. The default endpoint
|
||||||
`https://captcha.kotobank.ch` is hosted by the developer.
|
`https://captcha.kotobank.ch` is hosted by the developer.
|
||||||
|
@ -447,6 +448,7 @@ An example for Sendgrid adapter:
|
||||||
|
|
||||||
```elixir
|
```elixir
|
||||||
config :pleroma, Pleroma.Emails.Mailer,
|
config :pleroma, Pleroma.Emails.Mailer,
|
||||||
|
enabled: true,
|
||||||
adapter: Swoosh.Adapters.Sendgrid,
|
adapter: Swoosh.Adapters.Sendgrid,
|
||||||
api_key: "YOUR_API_KEY"
|
api_key: "YOUR_API_KEY"
|
||||||
```
|
```
|
||||||
|
@ -455,13 +457,13 @@ An example for SMTP adapter:
|
||||||
|
|
||||||
```elixir
|
```elixir
|
||||||
config :pleroma, Pleroma.Emails.Mailer,
|
config :pleroma, Pleroma.Emails.Mailer,
|
||||||
|
enabled: true,
|
||||||
adapter: Swoosh.Adapters.SMTP,
|
adapter: Swoosh.Adapters.SMTP,
|
||||||
relay: "smtp.gmail.com",
|
relay: "smtp.gmail.com",
|
||||||
username: "YOUR_USERNAME@gmail.com",
|
username: "YOUR_USERNAME@gmail.com",
|
||||||
password: "YOUR_SMTP_PASSWORD",
|
password: "YOUR_SMTP_PASSWORD",
|
||||||
port: 465,
|
port: 465,
|
||||||
ssl: true,
|
ssl: true,
|
||||||
tls: :always,
|
|
||||||
auth: :always
|
auth: :always
|
||||||
```
|
```
|
||||||
|
|
||||||
|
@ -830,3 +832,11 @@ config :auto_linker,
|
||||||
rel: "ugc"
|
rel: "ugc"
|
||||||
]
|
]
|
||||||
```
|
```
|
||||||
|
|
||||||
|
## Custom Runtime Modules (`:modules`)
|
||||||
|
|
||||||
|
* `runtime_dir`: A path to custom Elixir modules (such as MRF policies).
|
||||||
|
|
||||||
|
|
||||||
|
## :configurable_from_database
|
||||||
|
Enable/disable configuration from database.
|
||||||
|
|
|
@ -1,274 +0,0 @@
|
||||||
# Installing on CentOS 7
|
|
||||||
## Installation
|
|
||||||
|
|
||||||
This guide is a step-by-step installation guide for CentOS 7. It also assumes that you have administrative rights, either as root or a user with [sudo permissions](https://www.digitalocean.com/community/tutorials/how-to-create-a-sudo-user-on-centos-quickstart). If you want to run this guide with root, ignore the `sudo` at the beginning of the lines, unless it calls a user like `sudo -Hu pleroma`; in this case, use `su <username> -s $SHELL -c 'command'` instead.
|
|
||||||
|
|
||||||
### Required packages
|
|
||||||
|
|
||||||
* `postgresql` (9,6+, CentOS 7 comes with 9.2, we will install version 11 in this guide)
|
|
||||||
* `elixir` (1.5+)
|
|
||||||
* `erlang`
|
|
||||||
* `erlang-parsetools`
|
|
||||||
* `erlang-xmerl`
|
|
||||||
* `git`
|
|
||||||
* Development Tools
|
|
||||||
|
|
||||||
#### Optional packages used in this guide
|
|
||||||
|
|
||||||
* `nginx` (preferred, example configs for other reverse proxies can be found in the repo)
|
|
||||||
* `certbot` (or any other ACME client for Let’s Encrypt certificates)
|
|
||||||
|
|
||||||
### Prepare the system
|
|
||||||
|
|
||||||
* First update the system, if not already done:
|
|
||||||
|
|
||||||
```shell
|
|
||||||
sudo yum update
|
|
||||||
```
|
|
||||||
|
|
||||||
* Install some of the above mentioned programs:
|
|
||||||
|
|
||||||
```shell
|
|
||||||
sudo yum install wget git unzip
|
|
||||||
```
|
|
||||||
|
|
||||||
* Install development tools:
|
|
||||||
|
|
||||||
```shell
|
|
||||||
sudo yum group install "Development Tools"
|
|
||||||
```
|
|
||||||
|
|
||||||
### Install Elixir and Erlang
|
|
||||||
|
|
||||||
* Add the EPEL repo:
|
|
||||||
|
|
||||||
```shell
|
|
||||||
sudo yum install epel-release
|
|
||||||
sudo yum -y update
|
|
||||||
```
|
|
||||||
|
|
||||||
* Install Erlang repository:
|
|
||||||
|
|
||||||
```shell
|
|
||||||
wget -P /tmp/ https://packages.erlang-solutions.com/erlang-solutions-1.0-1.noarch.rpm
|
|
||||||
sudo rpm -Uvh erlang-solutions-1.0-1.noarch.rpm
|
|
||||||
```
|
|
||||||
|
|
||||||
* Install Erlang:
|
|
||||||
|
|
||||||
```shell
|
|
||||||
sudo yum install erlang erlang-parsetools erlang-xmerl
|
|
||||||
```
|
|
||||||
|
|
||||||
* Download [latest Elixir release from Github](https://github.com/elixir-lang/elixir/releases/tag/v1.8.1) (Example for the newest version at the time when this manual was written)
|
|
||||||
|
|
||||||
```shell
|
|
||||||
wget -P /tmp/ https://github.com/elixir-lang/elixir/releases/download/v1.8.1/Precompiled.zip
|
|
||||||
```
|
|
||||||
|
|
||||||
* Create folder where you want to install Elixir, we’ll use:
|
|
||||||
|
|
||||||
```shell
|
|
||||||
sudo mkdir -p /opt/elixir
|
|
||||||
```
|
|
||||||
|
|
||||||
* Unzip downloaded file there:
|
|
||||||
|
|
||||||
```shell
|
|
||||||
sudo unzip /tmp/Precompiled.zip -d /opt/elixir
|
|
||||||
```
|
|
||||||
|
|
||||||
* Create symlinks for the pre-compiled binaries:
|
|
||||||
|
|
||||||
```shell
|
|
||||||
for e in elixir elixirc iex mix; do sudo ln -s /opt/elixir/bin/${e} /usr/local/bin/${e}; done
|
|
||||||
```
|
|
||||||
|
|
||||||
### Install PostgreSQL
|
|
||||||
|
|
||||||
* Add the Postgresql repository:
|
|
||||||
|
|
||||||
```shell
|
|
||||||
sudo yum install https://download.postgresql.org/pub/repos/yum/11/redhat/rhel-7-x86_64/pgdg-centos11-11-2.noarch.rpm
|
|
||||||
```
|
|
||||||
|
|
||||||
* Install the Postgresql server:
|
|
||||||
|
|
||||||
```shell
|
|
||||||
sudo yum install postgresql11-server postgresql11-contrib
|
|
||||||
```
|
|
||||||
|
|
||||||
* Initialize database:
|
|
||||||
|
|
||||||
```shell
|
|
||||||
sudo /usr/pgsql-11/bin/postgresql-11-setup initdb
|
|
||||||
```
|
|
||||||
|
|
||||||
* Open configuration file `/var/lib/pgsql/11/data/pg_hba.conf` and change the following lines from:
|
|
||||||
|
|
||||||
```plain
|
|
||||||
# IPv4 local connections:
|
|
||||||
host all all 127.0.0.1/32 ident
|
|
||||||
# IPv6 local connections:
|
|
||||||
host all all ::1/128 ident
|
|
||||||
```
|
|
||||||
|
|
||||||
to
|
|
||||||
|
|
||||||
```plain
|
|
||||||
# IPv4 local connections:
|
|
||||||
host all all 127.0.0.1/32 md5
|
|
||||||
# IPv6 local connections:
|
|
||||||
host all all ::1/128 md5
|
|
||||||
```
|
|
||||||
|
|
||||||
* Enable and start postgresql server:
|
|
||||||
|
|
||||||
```shell
|
|
||||||
sudo systemctl enable --now postgresql-11.service
|
|
||||||
```
|
|
||||||
|
|
||||||
### Install PleromaBE
|
|
||||||
|
|
||||||
* Add a new system user for the Pleroma service:
|
|
||||||
|
|
||||||
```shell
|
|
||||||
sudo useradd -r -s /bin/false -m -d /var/lib/pleroma -U pleroma
|
|
||||||
```
|
|
||||||
|
|
||||||
**Note**: To execute a single command as the Pleroma system user, use `sudo -Hu pleroma command`. You can also switch to a shell by using `sudo -Hu pleroma $SHELL`. If you don’t have and want `sudo` on your system, you can use `su` as root user (UID 0) for a single command by using `su -l pleroma -s $SHELL -c 'command'` and `su -l pleroma -s $SHELL` for starting a shell.
|
|
||||||
|
|
||||||
* Git clone the PleromaBE repository and make the Pleroma user the owner of the directory:
|
|
||||||
|
|
||||||
```shell
|
|
||||||
sudo mkdir -p /opt/pleroma
|
|
||||||
sudo chown -R pleroma:pleroma /opt/pleroma
|
|
||||||
sudo -Hu pleroma git clone -b stable https://git.pleroma.social/pleroma/pleroma /opt/pleroma
|
|
||||||
```
|
|
||||||
|
|
||||||
* Change to the new directory:
|
|
||||||
|
|
||||||
```shell
|
|
||||||
cd /opt/pleroma
|
|
||||||
```
|
|
||||||
|
|
||||||
* Install the dependencies for Pleroma and answer with `yes` if it asks you to install `Hex`:
|
|
||||||
|
|
||||||
```shell
|
|
||||||
sudo -Hu pleroma mix deps.get
|
|
||||||
```
|
|
||||||
|
|
||||||
* Generate the configuration: `sudo -Hu pleroma mix pleroma.instance gen`
|
|
||||||
* Answer with `yes` if it asks you to install `rebar3`.
|
|
||||||
* This may take some time, because parts of pleroma get compiled first.
|
|
||||||
* After that it will ask you a few questions about your instance and generates a configuration file in `config/generated_config.exs`.
|
|
||||||
|
|
||||||
* Check the configuration and if all looks right, rename it, so Pleroma will load it (`prod.secret.exs` for productive instance, `dev.secret.exs` for development instances):
|
|
||||||
|
|
||||||
```shell
|
|
||||||
mv config/{generated_config.exs,prod.secret.exs}
|
|
||||||
```
|
|
||||||
|
|
||||||
* The previous command creates also the file `config/setup_db.psql`, with which you can create the database:
|
|
||||||
|
|
||||||
```shell
|
|
||||||
sudo -Hu postgres psql -f config/setup_db.psql
|
|
||||||
```
|
|
||||||
|
|
||||||
* Now run the database migration:
|
|
||||||
|
|
||||||
```shell
|
|
||||||
sudo -Hu pleroma MIX_ENV=prod mix ecto.migrate
|
|
||||||
```
|
|
||||||
|
|
||||||
* Now you can start Pleroma already
|
|
||||||
|
|
||||||
```shell
|
|
||||||
sudo -Hu pleroma MIX_ENV=prod mix phx.server
|
|
||||||
```
|
|
||||||
|
|
||||||
### Finalize installation
|
|
||||||
|
|
||||||
If you want to open your newly installed instance to the world, you should run nginx or some other webserver/proxy in front of Pleroma and you should consider to create a systemd service file for Pleroma.
|
|
||||||
|
|
||||||
#### Nginx
|
|
||||||
|
|
||||||
* Install nginx, if not already done:
|
|
||||||
|
|
||||||
```shell
|
|
||||||
sudo yum install nginx
|
|
||||||
```
|
|
||||||
|
|
||||||
* Setup your SSL cert, using your method of choice or certbot. If using certbot, first install it:
|
|
||||||
|
|
||||||
```shell
|
|
||||||
sudo yum install certbot-nginx
|
|
||||||
```
|
|
||||||
|
|
||||||
and then set it up:
|
|
||||||
|
|
||||||
```shell
|
|
||||||
sudo mkdir -p /var/lib/letsencrypt/
|
|
||||||
sudo certbot certonly --email <your@emailaddress> -d <yourdomain> --standalone
|
|
||||||
```
|
|
||||||
|
|
||||||
If that doesn’t work, make sure, that nginx is not already running. If it still doesn’t work, try setting up nginx first (change ssl “on” to “off” and try again).
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
* Copy the example nginx configuration to the nginx folder
|
|
||||||
|
|
||||||
```shell
|
|
||||||
sudo cp /opt/pleroma/installation/pleroma.nginx /etc/nginx/conf.d/pleroma.conf
|
|
||||||
```
|
|
||||||
|
|
||||||
* Before starting nginx edit the configuration and change it to your needs (e.g. change servername, change cert paths)
|
|
||||||
* Enable and start nginx:
|
|
||||||
|
|
||||||
```shell
|
|
||||||
sudo systemctl enable --now nginx
|
|
||||||
```
|
|
||||||
|
|
||||||
If you need to renew the certificate in the future, uncomment the relevant location block in the nginx config and run:
|
|
||||||
|
|
||||||
```shell
|
|
||||||
sudo certbot certonly --email <your@emailaddress> -d <yourdomain> --webroot -w /var/lib/letsencrypt/
|
|
||||||
```
|
|
||||||
|
|
||||||
#### Other webserver/proxies
|
|
||||||
|
|
||||||
You can find example configurations for them in `/opt/pleroma/installation/`.
|
|
||||||
|
|
||||||
#### Systemd service
|
|
||||||
|
|
||||||
* Copy example service file
|
|
||||||
|
|
||||||
```shell
|
|
||||||
sudo cp /opt/pleroma/installation/pleroma.service /etc/systemd/system/pleroma.service
|
|
||||||
```
|
|
||||||
|
|
||||||
* Edit the service file and make sure that all paths fit your installation
|
|
||||||
* Enable and start `pleroma.service`:
|
|
||||||
|
|
||||||
```shell
|
|
||||||
sudo systemctl enable --now pleroma.service
|
|
||||||
```
|
|
||||||
|
|
||||||
#### Create your first user
|
|
||||||
|
|
||||||
If your instance is up and running, you can create your first user with administrative rights with the following task:
|
|
||||||
|
|
||||||
```shell
|
|
||||||
sudo -Hu pleroma MIX_ENV=prod mix pleroma.user new <username> <your@emailaddress> --admin
|
|
||||||
```
|
|
||||||
|
|
||||||
#### Further reading
|
|
||||||
|
|
||||||
* [Backup your instance](../administration/backup.md)
|
|
||||||
* [Hardening your instance](../configuration/hardening.md)
|
|
||||||
* [How to activate mediaproxy](../configuration/howto_mediaproxy.md)
|
|
||||||
* [Updating your instance](../administration/updating.md)
|
|
||||||
|
|
||||||
## Questions
|
|
||||||
|
|
||||||
Questions about the installation or didn’t it work as it should be, ask in [#pleroma:matrix.org](https://matrix.heldscal.la/#/room/#freenode_#pleroma:matrix.org) or IRC Channel **#pleroma** on **Freenode**.
|
|
|
@ -1,42 +1,28 @@
|
||||||
# Switching a from-source install to OTP releases
|
# Switching a from-source install to OTP releases
|
||||||
|
|
||||||
## What are OTP releases?
|
## What are OTP releases?
|
||||||
OTP releases are as close as you can get to binary releases with Erlang/Elixir. The release is self-contained, and provides everything needed to boot it, it is easily administered via the provided shell script to open up a remote console, start/stop/restart the release, start in the background, send remote commands, and more.
|
OTP releases are as close as you can get to binary releases with Erlang/Elixir. The release is self-contained, and provides everything needed to boot it, it is easily administered via the provided shell script to open up a remote console, start/stop/restart the release, start in the background, send remote commands, and more.
|
||||||
### Can I still run the develop branch if I decide to use them?
|
|
||||||
Yes, we produce builds for every commit in `develop`. However `develop` is considered unstable, please don't use it in production because of faster access to new features, unless you need them as an app developer.
|
|
||||||
## Why would one want to switch?
|
|
||||||
Benefits of OTP releases over from-source installs include:
|
|
||||||
* **Less space used.** OTP releases come without source code, build tools, have docs and debug symbols stripped from the compiled bytecode and do not cointain tests, docs, revision history.
|
|
||||||
* **Minimal system dependencies.** Excluding the database and reverse proxy, only `curl`, `unzip` and `ncurses` are needed to download and run the release. Because Erlang runtime and Elixir are shipped with Pleroma, one can use the latest BEAM optimizations and Pleroma features, without having to worry about outdated system repos or a missing `erlang-*` package.
|
|
||||||
* **Potentially less bugs and better performance.** This extends on the previous point, because we have control over exactly what gets shipped, we can tweak the VM arguments and forget about weird bugs due to Erlang/Elixir version mismatches.
|
|
||||||
* **Faster and less bug-prone mix tasks.** On a from-source install one has to wait untill a new Pleroma node is started for each mix task and they execute outside of the instance context (for example if a user was deleted via a mix task, the instance will have no knowledge of that and continue to display status count and follows before the cache expires). Mix tasks in OTP releases are executed by calling into a running instance via RPC, which solves both of these problems.
|
|
||||||
|
|
||||||
### Sounds great, how do I switch?
|
|
||||||
Currently we support Linux machines with GNU (e.g. Debian, Ubuntu) or musl (e.g. Alpine) libc and `x86_64`, `aarch64` or `armv7l` CPUs. If you are unsure, check the [Detecting flavour](otp_en.md#detecting-flavour) section in OTP install guide. If your platform is supported, proceed with the guide, if not check the [My platform is not supported](#my-platform-is-not-supported) section.
|
|
||||||
### I don't think it is worth the effort, can I stay on a from-source install?
|
|
||||||
Yes, currently there are no plans to deprecate them.
|
|
||||||
|
|
||||||
### My platform is not supported
|
|
||||||
If you think your platform is a popular choice for running Pleroma instances, or has the potential to become one, you can [file an issue on our Gitlab](https://git.pleroma.social/pleroma/pleroma/issues/new). If not, guides on how to build and update releases by yourself will be available soon.
|
|
||||||
## Pre-requisites
|
## Pre-requisites
|
||||||
You will be running commands as root. If you aren't root already, please elevate your priviledges by executing `sudo su`/`su`.
|
You will be running commands as root. If you aren't root already, please elevate your priviledges by executing `sudo su`/`su`.
|
||||||
|
|
||||||
The system needs to have `curl` and `unzip` installed for downloading and unpacking release builds.
|
The system needs to have `curl` and `unzip` installed for downloading and unpacking release builds.
|
||||||
|
|
||||||
Debian/Ubuntu:
|
```sh tab="Alpine"
|
||||||
```sh
|
apk add curl unzip
|
||||||
|
```
|
||||||
|
|
||||||
|
```sh tab="Debian/Ubuntu"
|
||||||
apt install curl unzip
|
apt install curl unzip
|
||||||
```
|
```
|
||||||
Alpine:
|
|
||||||
```
|
|
||||||
apk add curl unzip
|
|
||||||
|
|
||||||
```
|
|
||||||
## Moving content out of the application directory
|
## Moving content out of the application directory
|
||||||
When using OTP releases the application directory changes with every version so it would be a bother to keep content there (and also dangerous unless `--no-rm` option is used when updating). Fortunately almost all paths in Pleroma are configurable, so it is possible to move them out of there.
|
When using OTP releases the application directory changes with every version so it would be a bother to keep content there (and also dangerous unless `--no-rm` option is used when updating). Fortunately almost all paths in Pleroma are configurable, so it is possible to move them out of there.
|
||||||
|
|
||||||
Pleroma should be stopped before proceeding.
|
Pleroma should be stopped before proceeding.
|
||||||
|
|
||||||
### Moving uploads/custom public files directory
|
### Moving uploads/custom public files directory
|
||||||
|
|
||||||
```sh
|
```sh
|
||||||
# Create uploads directory and set proper permissions (skip if using a remote uploader)
|
# Create uploads directory and set proper permissions (skip if using a remote uploader)
|
||||||
# Note: It does not have to be `/var/lib/pleroma/uploads`, you can configure it to be something else later
|
# Note: It does not have to be `/var/lib/pleroma/uploads`, you can configure it to be something else later
|
||||||
|
@ -92,8 +78,8 @@ Before proceeding, get the flavour from [Detecting flavour](otp_en.md#detecting-
|
||||||
rm -r ~pleroma/*
|
rm -r ~pleroma/*
|
||||||
|
|
||||||
# Set the flavour environment variable to the string you got in Detecting flavour section.
|
# Set the flavour environment variable to the string you got in Detecting flavour section.
|
||||||
# For example if the flavour is `arm64-musl` the command will be
|
# For example if the flavour is `amd64-musl` the command will be
|
||||||
export FLAVOUR="arm64-musl"
|
export FLAVOUR="amd64-musl"
|
||||||
|
|
||||||
# Clone the release build into a temporary directory and unpack it
|
# Clone the release build into a temporary directory and unpack it
|
||||||
# Replace `stable` with `unstable` if you want to run the unstable branch
|
# Replace `stable` with `unstable` if you want to run the unstable branch
|
||||||
|
@ -124,8 +110,15 @@ OTP releases have different service files than from-source installs so they need
|
||||||
|
|
||||||
**Warning:** The service files assume pleroma user's home directory is `/opt/pleroma`, please make sure all paths fit your installation.
|
**Warning:** The service files assume pleroma user's home directory is `/opt/pleroma`, please make sure all paths fit your installation.
|
||||||
|
|
||||||
Debian/Ubuntu:
|
```sh tab="Alpine"
|
||||||
```sh
|
# Copy the service into a proper directory
|
||||||
|
cp -f ~pleroma/installation/init.d/pleroma /etc/init.d/pleroma
|
||||||
|
|
||||||
|
# Start pleroma
|
||||||
|
rc-service pleroma start
|
||||||
|
```
|
||||||
|
|
||||||
|
```sh tab="Debian/Ubuntu"
|
||||||
# Copy the service into a proper directory
|
# Copy the service into a proper directory
|
||||||
cp ~pleroma/installation/pleroma.service /etc/systemd/system/pleroma.service
|
cp ~pleroma/installation/pleroma.service /etc/systemd/system/pleroma.service
|
||||||
|
|
||||||
|
@ -139,14 +132,6 @@ systemctl reenable pleroma
|
||||||
systemctl start pleroma
|
systemctl start pleroma
|
||||||
```
|
```
|
||||||
|
|
||||||
Alpine:
|
|
||||||
```sh
|
|
||||||
# Copy the service into a proper directory
|
|
||||||
cp -f ~pleroma/installation/init.d/pleroma /etc/init.d/pleroma
|
|
||||||
|
|
||||||
# Start pleroma
|
|
||||||
rc-service pleroma start
|
|
||||||
```
|
|
||||||
## Running mix tasks
|
## Running mix tasks
|
||||||
Refer to [Running mix tasks](otp_en.md#running-mix-tasks) section from OTP release installation guide.
|
Refer to [Running mix tasks](otp_en.md#running-mix-tasks) section from OTP release installation guide.
|
||||||
## Updating
|
## Updating
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
# Installing on OpenBSD
|
# Installing on OpenBSD
|
||||||
|
|
||||||
This guide describes the installation and configuration of pleroma (and the required software to run it) on a single OpenBSD 6.4 server.
|
This guide describes the installation and configuration of pleroma (and the required software to run it) on a single OpenBSD 6.6 server.
|
||||||
|
|
||||||
For any additional information regarding commands and configuration files mentioned here, check the man pages [online](https://man.openbsd.org/) or directly on your server with the man command.
|
For any additional information regarding commands and configuration files mentioned here, check the man pages [online](https://man.openbsd.org/) or directly on your server with the man command.
|
||||||
|
|
||||||
|
@ -40,7 +40,12 @@ Enter a shell as the \_pleroma user. As root, run `su _pleroma -;cd`. Then clone
|
||||||
|
|
||||||
#### PostgreSQL
|
#### PostgreSQL
|
||||||
Start a shell as the \_postgresql user (as root run `su _postgresql -` then run the `initdb` command to initialize postgresql:
|
Start a shell as the \_postgresql user (as root run `su _postgresql -` then run the `initdb` command to initialize postgresql:
|
||||||
If you wish to not use the default location for postgresql's data (/var/postgresql/data), add the following switch at the end of the command: `-D <path>` and modify the `datadir` variable in the /etc/rc.d/postgresql script.
|
You will need to specify pgdata directory to the default (/var/postgresql/data) with the `-D <path>` and set the user to postgres with the `-U <username>` flag. This can be done as follows:
|
||||||
|
|
||||||
|
```
|
||||||
|
initdb -D /var/postgresql/data -U postgres
|
||||||
|
```
|
||||||
|
If you are not using the default directory, you will have to update the `datadir` variable in the /etc/rc.d/postgresql script.
|
||||||
|
|
||||||
When this is done, enable postgresql so that it starts on boot and start it. As root, run:
|
When this is done, enable postgresql so that it starts on boot and start it. As root, run:
|
||||||
```
|
```
|
||||||
|
@ -81,7 +86,6 @@ server "default" {
|
||||||
}
|
}
|
||||||
|
|
||||||
types {
|
types {
|
||||||
include "/usr/share/misc/mime.types"
|
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
Do not forget to change *<IPv4/6 address\>* to your server's address(es). If httpd should only listen on one protocol family, comment one of the two first *listen* options.
|
Do not forget to change *<IPv4/6 address\>* to your server's address(es). If httpd should only listen on one protocol family, comment one of the two first *listen* options.
|
||||||
|
@ -103,7 +107,7 @@ Insert the following configuration in /etc/acme-client.conf:
|
||||||
|
|
||||||
authority letsencrypt-<domain name> {
|
authority letsencrypt-<domain name> {
|
||||||
#agreement url "https://letsencrypt.org/documents/LE-SA-v1.2-November-15-2017.pdf"
|
#agreement url "https://letsencrypt.org/documents/LE-SA-v1.2-November-15-2017.pdf"
|
||||||
api url "https://acme-v01.api.letsencrypt.org/directory"
|
api url "https://acme-v02.api.letsencrypt.org/directory"
|
||||||
account key "/etc/acme/letsencrypt-privkey-<domain name>.pem"
|
account key "/etc/acme/letsencrypt-privkey-<domain name>.pem"
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -222,7 +226,7 @@ Then follow the main installation guide:
|
||||||
* run `mix deps.get`
|
* run `mix deps.get`
|
||||||
* run `mix pleroma.instance gen` and enter your instance's information when asked
|
* run `mix pleroma.instance gen` and enter your instance's information when asked
|
||||||
* copy config/generated\_config.exs to config/prod.secret.exs. The default values should be sufficient but you should edit it and check that everything seems OK.
|
* copy config/generated\_config.exs to config/prod.secret.exs. The default values should be sufficient but you should edit it and check that everything seems OK.
|
||||||
* exit your current shell back to a root one and run `psql -U postgres -f /home/_pleroma/config/setup_db.psql` to setup the database.
|
* exit your current shell back to a root one and run `psql -U postgres -f /home/_pleroma/pleroma/config/setup_db.psql` to setup the database.
|
||||||
* return to a \_pleroma shell into pleroma's installation directory (`su _pleroma -;cd ~/pleroma`) and run `MIX_ENV=prod mix ecto.migrate`
|
* return to a \_pleroma shell into pleroma's installation directory (`su _pleroma -;cd ~/pleroma`) and run `MIX_ENV=prod mix ecto.migrate`
|
||||||
|
|
||||||
As \_pleroma in /home/\_pleroma/pleroma, you can now run `LC_ALL=en_US.UTF-8 MIX_ENV=prod mix phx.server` to start your instance.
|
As \_pleroma in /home/\_pleroma/pleroma, you can now run `LC_ALL=en_US.UTF-8 MIX_ENV=prod mix phx.server` to start your instance.
|
||||||
|
@ -230,3 +234,11 @@ In another SSH session/tmux window, check that it is working properly by running
|
||||||
|
|
||||||
##### Starting pleroma at boot
|
##### Starting pleroma at boot
|
||||||
An rc script to automatically start pleroma at boot hasn't been written yet, it can be run in a tmux session (tmux is in base).
|
An rc script to automatically start pleroma at boot hasn't been written yet, it can be run in a tmux session (tmux is in base).
|
||||||
|
|
||||||
|
|
||||||
|
#### Create administrative user
|
||||||
|
|
||||||
|
If your instance is up and running, you can create your first user with administrative rights with the following command as the \_pleroma user.
|
||||||
|
```
|
||||||
|
LC_ALL=en_US.UTF-8 MIX_ENV=prod mix pleroma.user new <username> <your@emailaddress> --admin
|
||||||
|
```
|
||||||
|
|
|
@ -6,7 +6,7 @@
|
||||||
|
|
||||||
You will be running commands as root. If you aren't root already, please elevate your priviledges by executing `sudo su`/`su`.
|
You will be running commands as root. If you aren't root already, please elevate your priviledges by executing `sudo su`/`su`.
|
||||||
|
|
||||||
While in theory OTP releases are possbile to install on any compatible machine, for the sake of simplicity this guide focuses only on Debian/Ubuntu/Alpine.
|
While in theory OTP releases are possbile to install on any compatible machine, for the sake of simplicity this guide focuses only on Debian/Ubuntu and Alpine.
|
||||||
|
|
||||||
### Detecting flavour
|
### Detecting flavour
|
||||||
|
|
||||||
|
@ -20,6 +20,7 @@ If your platform is supported the output will contain the flavour string, you wi
|
||||||
### Installing the required packages
|
### Installing the required packages
|
||||||
|
|
||||||
Other than things bundled in the OTP release Pleroma depends on:
|
Other than things bundled in the OTP release Pleroma depends on:
|
||||||
|
|
||||||
* curl (to download the release build)
|
* curl (to download the release build)
|
||||||
* unzip (needed to unpack release builds)
|
* unzip (needed to unpack release builds)
|
||||||
* ncurses (ERTS won't run without it)
|
* ncurses (ERTS won't run without it)
|
||||||
|
@ -27,18 +28,16 @@ Other than things bundled in the OTP release Pleroma depends on:
|
||||||
* nginx (could be swapped with another reverse proxy but this guide covers only it)
|
* nginx (could be swapped with another reverse proxy but this guide covers only it)
|
||||||
* certbot (for Let's Encrypt certificates, could be swapped with another ACME client, but this guide covers only it)
|
* certbot (for Let's Encrypt certificates, could be swapped with another ACME client, but this guide covers only it)
|
||||||
|
|
||||||
Debian/Ubuntu:
|
```sh tab="Alpine"
|
||||||
```sh
|
|
||||||
apt install curl unzip libncurses5 postgresql postgresql-contrib nginx certbot
|
|
||||||
```
|
|
||||||
Alpine:
|
|
||||||
|
|
||||||
```sh
|
|
||||||
echo "http://nl.alpinelinux.org/alpine/latest-stable/community" >> /etc/apk/repositories
|
echo "http://nl.alpinelinux.org/alpine/latest-stable/community" >> /etc/apk/repositories
|
||||||
apk update
|
apk update
|
||||||
apk add curl unzip ncurses postgresql postgresql-contrib nginx certbot
|
apk add curl unzip ncurses postgresql postgresql-contrib nginx certbot
|
||||||
```
|
```
|
||||||
|
|
||||||
|
```sh tab="Debian/Ubuntu"
|
||||||
|
apt install curl unzip libncurses5 postgresql postgresql-contrib nginx certbot
|
||||||
|
```
|
||||||
|
|
||||||
## Setup
|
## Setup
|
||||||
### Configuring PostgreSQL
|
### Configuring PostgreSQL
|
||||||
#### (Optional) Installing RUM indexes
|
#### (Optional) Installing RUM indexes
|
||||||
|
@ -48,12 +47,7 @@ apk add curl unzip ncurses postgresql postgresql-contrib nginx certbot
|
||||||
|
|
||||||
RUM indexes are an alternative indexing scheme that is not included in PostgreSQL by default. You can read more about them on the [Configuration page](../configuration/cheatsheet.md#rum-indexing-for-full-text-search). They are completely optional and most of the time are not worth it, especially if you are running a single user instance (unless you absolutely need ordered search results).
|
RUM indexes are an alternative indexing scheme that is not included in PostgreSQL by default. You can read more about them on the [Configuration page](../configuration/cheatsheet.md#rum-indexing-for-full-text-search). They are completely optional and most of the time are not worth it, especially if you are running a single user instance (unless you absolutely need ordered search results).
|
||||||
|
|
||||||
Debian/Ubuntu (available only on Buster/19.04):
|
```sh tab="Alpine"
|
||||||
```sh
|
|
||||||
apt install postgresql-11-rum
|
|
||||||
```
|
|
||||||
Alpine:
|
|
||||||
```sh
|
|
||||||
apk add git build-base postgresql-dev
|
apk add git build-base postgresql-dev
|
||||||
git clone https://github.com/postgrespro/rum /tmp/rum
|
git clone https://github.com/postgrespro/rum /tmp/rum
|
||||||
cd /tmp/rum
|
cd /tmp/rum
|
||||||
|
@ -62,25 +56,31 @@ make USE_PGXS=1 install
|
||||||
cd
|
cd
|
||||||
rm -r /tmp/rum
|
rm -r /tmp/rum
|
||||||
```
|
```
|
||||||
|
|
||||||
|
```sh tab="Debian/Ubuntu"
|
||||||
|
# Available only on Buster/19.04
|
||||||
|
apt install postgresql-11-rum
|
||||||
|
```
|
||||||
|
|
||||||
#### (Optional) Performance configuration
|
#### (Optional) Performance configuration
|
||||||
For optimal performance, you may use [PGTune](https://pgtune.leopard.in.ua), don't forget to restart postgresql after editing the configuration
|
For optimal performance, you may use [PGTune](https://pgtune.leopard.in.ua), don't forget to restart postgresql after editing the configuration
|
||||||
|
|
||||||
Debian/Ubuntu:
|
```sh tab="Alpine"
|
||||||
```sh
|
|
||||||
systemctl restart postgresql
|
|
||||||
```
|
|
||||||
Alpine:
|
|
||||||
```sh
|
|
||||||
rc-service postgresql restart
|
rc-service postgresql restart
|
||||||
```
|
```
|
||||||
|
|
||||||
|
```sh tab="Debian/Ubuntu"
|
||||||
|
systemctl restart postgresql
|
||||||
|
```
|
||||||
|
|
||||||
### Installing Pleroma
|
### Installing Pleroma
|
||||||
```sh
|
```sh
|
||||||
# Create the Pleroma user
|
# Create a Pleroma user
|
||||||
adduser --system --shell /bin/false --home /opt/pleroma pleroma
|
adduser --system --shell /bin/false --home /opt/pleroma pleroma
|
||||||
|
|
||||||
# Set the flavour environment variable to the string you got in Detecting flavour section.
|
# Set the flavour environment variable to the string you got in Detecting flavour section.
|
||||||
# For example if the flavour is `arm64-musl` the command will be
|
# For example if the flavour is `amd64-musl` the command will be
|
||||||
export FLAVOUR="arm64-musl"
|
export FLAVOUR="amd64-musl"
|
||||||
|
|
||||||
# Clone the release build into a temporary directory and unpack it
|
# Clone the release build into a temporary directory and unpack it
|
||||||
su pleroma -s $SHELL -lc "
|
su pleroma -s $SHELL -lc "
|
||||||
|
@ -133,49 +133,52 @@ su pleroma -s $SHELL -lc "./bin/pleroma stop"
|
||||||
|
|
||||||
### Setting up nginx and getting Let's Encrypt SSL certificaties
|
### Setting up nginx and getting Let's Encrypt SSL certificaties
|
||||||
|
|
||||||
|
#### Get a Let's Encrypt certificate
|
||||||
```sh
|
```sh
|
||||||
# Get a Let's Encrypt certificate
|
|
||||||
certbot certonly --standalone --preferred-challenges http -d yourinstance.tld
|
certbot certonly --standalone --preferred-challenges http -d yourinstance.tld
|
||||||
|
```
|
||||||
|
|
||||||
# Copy the Pleroma nginx configuration to the nginx folder
|
#### Copy Pleroma nginx configuration to the nginx folder
|
||||||
# The location of nginx configs is dependent on the distro
|
|
||||||
|
|
||||||
# For Debian/Ubuntu:
|
The location of nginx configs is dependent on the distro
|
||||||
|
|
||||||
|
```sh tab="Alpine"
|
||||||
|
cp /opt/pleroma/installation/pleroma.nginx /etc/nginx/conf.d/pleroma.conf
|
||||||
|
```
|
||||||
|
|
||||||
|
```sh tab="Debian/Ubuntu"
|
||||||
cp /opt/pleroma/installation/pleroma.nginx /etc/nginx/sites-available/pleroma.nginx
|
cp /opt/pleroma/installation/pleroma.nginx /etc/nginx/sites-available/pleroma.nginx
|
||||||
ln -s /etc/nginx/sites-available/pleroma.nginx /etc/nginx/sites-enabled/pleroma.nginx
|
ln -s /etc/nginx/sites-available/pleroma.nginx /etc/nginx/sites-enabled/pleroma.nginx
|
||||||
# For Alpine:
|
```
|
||||||
cp /opt/pleroma/installation/pleroma.nginx /etc/nginx/conf.d/pleroma.conf
|
|
||||||
# If your distro does not have either of those you can append
|
|
||||||
# `include /etc/nginx/pleroma.conf` to the end of the http section in /etc/nginx/nginx.conf and
|
|
||||||
cp /opt/pleroma/installation/pleroma.nginx /etc/nginx/pleroma.conf
|
|
||||||
|
|
||||||
# Edit the nginx config replacing example.tld with your (sub)domain
|
If your distro does not have either of those you can append `include /etc/nginx/pleroma.conf` to the end of the http section in /etc/nginx/nginx.conf and
|
||||||
|
```sh
|
||||||
|
cp /opt/pleroma/installation/pleroma.nginx /etc/nginx/pleroma.conf
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Edit the nginx config
|
||||||
|
```sh
|
||||||
|
# Replace example.tld with your (sub)domain
|
||||||
$EDITOR path-to-nginx-config
|
$EDITOR path-to-nginx-config
|
||||||
|
|
||||||
# Verify that the config is valid
|
# Verify that the config is valid
|
||||||
nginx -t
|
nginx -t
|
||||||
|
```
|
||||||
|
#### Start nginx
|
||||||
|
|
||||||
# Start nginx
|
```sh tab="Alpine"
|
||||||
# For Debian/Ubuntu:
|
|
||||||
systemctl start nginx
|
|
||||||
# For Alpine:
|
|
||||||
rc-service nginx start
|
rc-service nginx start
|
||||||
```
|
```
|
||||||
|
|
||||||
At this point if you open your (sub)domain in a browser you should see a 502 error, that's because pleroma is not started yet.
|
```sh tab="Debian/Ubuntu"
|
||||||
|
systemctl start nginx
|
||||||
|
```
|
||||||
|
|
||||||
|
At this point if you open your (sub)domain in a browser you should see a 502 error, that's because Pleroma is not started yet.
|
||||||
|
|
||||||
### Setting up a system service
|
### Setting up a system service
|
||||||
Debian/Ubuntu:
|
|
||||||
```sh
|
|
||||||
# Copy the service into a proper directory
|
|
||||||
cp /opt/pleroma/installation/pleroma.service /etc/systemd/system/pleroma.service
|
|
||||||
|
|
||||||
# Start pleroma and enable it on boot
|
```sh tab="Alpine"
|
||||||
systemctl start pleroma
|
|
||||||
systemctl enable pleroma
|
|
||||||
```
|
|
||||||
Alpine:
|
|
||||||
```sh
|
|
||||||
# Copy the service into a proper directory
|
# Copy the service into a proper directory
|
||||||
cp /opt/pleroma/installation/init.d/pleroma /etc/init.d/pleroma
|
cp /opt/pleroma/installation/init.d/pleroma /etc/init.d/pleroma
|
||||||
|
|
||||||
|
@ -184,13 +187,22 @@ rc-service pleroma start
|
||||||
rc-update add pleroma
|
rc-update add pleroma
|
||||||
```
|
```
|
||||||
|
|
||||||
|
```sh tab="Debian/Ubuntu"
|
||||||
|
# Copy the service into a proper directory
|
||||||
|
cp /opt/pleroma/installation/pleroma.service /etc/systemd/system/pleroma.service
|
||||||
|
|
||||||
|
# Start pleroma and enable it on boot
|
||||||
|
systemctl start pleroma
|
||||||
|
systemctl enable pleroma
|
||||||
|
```
|
||||||
|
|
||||||
If everything worked, you should see Pleroma-FE when visiting your domain. If that didn't happen, try reviewing the installation steps, starting Pleroma in the foreground and seeing if there are any errrors.
|
If everything worked, you should see Pleroma-FE when visiting your domain. If that didn't happen, try reviewing the installation steps, starting Pleroma in the foreground and seeing if there are any errrors.
|
||||||
|
|
||||||
Still doesn't work? Feel free to contact us on [#pleroma on freenode](https://webchat.freenode.net/?channels=%23pleroma) or via matrix at <https://matrix.heldscal.la/#/room/#freenode_#pleroma:matrix.org>, you can also [file an issue on our Gitlab](https://git.pleroma.social/pleroma/pleroma/issues/new)
|
Still doesn't work? Feel free to contact us on [#pleroma on freenode](https://irc.pleroma.social) or via matrix at <https://matrix.heldscal.la/#/room/#freenode_#pleroma:matrix.org>, you can also [file an issue on our Gitlab](https://git.pleroma.social/pleroma/pleroma-support/issues/new)
|
||||||
|
|
||||||
## Post installation
|
## Post installation
|
||||||
|
|
||||||
### Setting up auto-renew Let's Encrypt certificate
|
### Setting up auto-renew of the Let's Encrypt certificate
|
||||||
```sh
|
```sh
|
||||||
# Create the directory for webroot challenges
|
# Create the directory for webroot challenges
|
||||||
mkdir -p /var/lib/letsencrypt
|
mkdir -p /var/lib/letsencrypt
|
||||||
|
@ -201,25 +213,8 @@ $EDITOR path-to-nginx-config
|
||||||
# Verify that the config is valid
|
# Verify that the config is valid
|
||||||
nginx -t
|
nginx -t
|
||||||
```
|
```
|
||||||
Debian/Ubuntu:
|
|
||||||
```sh
|
|
||||||
# Restart nginx
|
|
||||||
systemctl restart nginx
|
|
||||||
|
|
||||||
# Ensure the webroot menthod and post hook is working
|
```sh tab="Alpine"
|
||||||
certbot renew --cert-name yourinstance.tld --webroot -w /var/lib/letsencrypt/ --dry-run --post-hook 'systemctl nginx reload'
|
|
||||||
|
|
||||||
# Add it to the daily cron
|
|
||||||
echo '#!/bin/sh
|
|
||||||
certbot renew --cert-name yourinstance.tld --webroot -w /var/lib/letsencrypt/ --post-hook "systemctl reload nginx"
|
|
||||||
' > /etc/cron.daily/renew-pleroma-cert
|
|
||||||
chmod +x /etc/cron.daily/renew-pleroma-cert
|
|
||||||
|
|
||||||
# If everything worked the output should contain /etc/cron.daily/renew-pleroma-cert
|
|
||||||
run-parts --test /etc/cron.daily
|
|
||||||
```
|
|
||||||
Alpine:
|
|
||||||
```sh
|
|
||||||
# Restart nginx
|
# Restart nginx
|
||||||
rc-service nginx restart
|
rc-service nginx restart
|
||||||
|
|
||||||
|
@ -236,15 +231,25 @@ certbot renew --cert-name yourinstance.tld --webroot -w /var/lib/letsencrypt/ --
|
||||||
' > /etc/periodic/daily/renew-pleroma-cert
|
' > /etc/periodic/daily/renew-pleroma-cert
|
||||||
chmod +x /etc/periodic/daily/renew-pleroma-cert
|
chmod +x /etc/periodic/daily/renew-pleroma-cert
|
||||||
|
|
||||||
# If everything worked this should output /etc/periodic/daily/renew-pleroma-cert
|
# If everything worked the output should contain /etc/cron.daily/renew-pleroma-cert
|
||||||
run-parts --test /etc/periodic/daily
|
run-parts --test /etc/periodic/daily
|
||||||
```
|
```
|
||||||
### Running mix tasks
|
|
||||||
Throughout the wiki and guides there is a lot of references to mix tasks. Since `mix` is a build tool, you can't just call `mix pleroma.task`, instead you should call `pleroma_ctl` stripping pleroma/ecto namespace.
|
|
||||||
|
|
||||||
So for example, if the task is `mix pleroma.user set admin --admin`, you should run it like this:
|
```sh tab="Debian/Ubuntu"
|
||||||
```sh
|
# Restart nginx
|
||||||
su pleroma -s $SHELL -lc "./bin/pleroma_ctl user set admin --admin"
|
systemctl restart nginx
|
||||||
|
|
||||||
|
# Ensure the webroot menthod and post hook is working
|
||||||
|
certbot renew --cert-name yourinstance.tld --webroot -w /var/lib/letsencrypt/ --dry-run --post-hook 'systemctl reload nginx'
|
||||||
|
|
||||||
|
# Add it to the daily cron
|
||||||
|
echo '#!/bin/sh
|
||||||
|
certbot renew --cert-name yourinstance.tld --webroot -w /var/lib/letsencrypt/ --post-hook "systemctl reload nginx"
|
||||||
|
' > /etc/cron.daily/renew-pleroma-cert
|
||||||
|
chmod +x /etc/cron.daily/renew-pleroma-cert
|
||||||
|
|
||||||
|
# If everything worked the output should contain /etc/cron.daily/renew-pleroma-cert
|
||||||
|
run-parts --test /etc/cron.daily
|
||||||
```
|
```
|
||||||
|
|
||||||
## Create your first user and set as admin
|
## Create your first user and set as admin
|
||||||
|
@ -270,4 +275,3 @@ But you should **always check the release notes/changelog** in case there are co
|
||||||
* [Backup your instance](../administration/backup.md)
|
* [Backup your instance](../administration/backup.md)
|
||||||
* [Hardening your instance](../configuration/hardening.md)
|
* [Hardening your instance](../configuration/hardening.md)
|
||||||
* [How to activate mediaproxy](../configuration/howto_mediaproxy.md)
|
* [How to activate mediaproxy](../configuration/howto_mediaproxy.md)
|
||||||
* [Updating your instance](../administration/updating.md)
|
|
||||||
|
|
|
@ -4,69 +4,147 @@
|
||||||
|
|
||||||
defmodule Mix.Tasks.Pleroma.Config do
|
defmodule Mix.Tasks.Pleroma.Config do
|
||||||
use Mix.Task
|
use Mix.Task
|
||||||
|
|
||||||
import Mix.Pleroma
|
import Mix.Pleroma
|
||||||
|
|
||||||
|
alias Pleroma.ConfigDB
|
||||||
alias Pleroma.Repo
|
alias Pleroma.Repo
|
||||||
alias Pleroma.Web.AdminAPI.Config
|
|
||||||
@shortdoc "Manages the location of the config"
|
@shortdoc "Manages the location of the config"
|
||||||
@moduledoc File.read!("docs/administration/CLI_tasks/config.md")
|
@moduledoc File.read!("docs/administration/CLI_tasks/config.md")
|
||||||
|
|
||||||
def run(["migrate_to_db"]) do
|
def run(["migrate_to_db"]) do
|
||||||
start_pleroma()
|
start_pleroma()
|
||||||
|
migrate_to_db()
|
||||||
if Pleroma.Config.get([:instance, :dynamic_configuration]) do
|
|
||||||
Application.get_all_env(:pleroma)
|
|
||||||
|> Enum.reject(fn {k, _v} -> k in [Pleroma.Repo, :env] end)
|
|
||||||
|> Enum.each(fn {k, v} ->
|
|
||||||
key = to_string(k) |> String.replace("Elixir.", "")
|
|
||||||
|
|
||||||
key =
|
|
||||||
if String.starts_with?(key, "Pleroma.") do
|
|
||||||
key
|
|
||||||
else
|
|
||||||
":" <> key
|
|
||||||
end
|
end
|
||||||
|
|
||||||
{:ok, _} = Config.update_or_create(%{group: "pleroma", key: key, value: v})
|
def run(["migrate_from_db" | options]) do
|
||||||
Mix.shell().info("#{key} is migrated.")
|
|
||||||
end)
|
|
||||||
|
|
||||||
Mix.shell().info("Settings migrated.")
|
|
||||||
else
|
|
||||||
Mix.shell().info(
|
|
||||||
"Migration is not allowed by config. You can change this behavior in instance settings."
|
|
||||||
)
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
def run(["migrate_from_db", env, delete?]) do
|
|
||||||
start_pleroma()
|
start_pleroma()
|
||||||
|
|
||||||
delete? = if delete? == "true", do: true, else: false
|
{opts, _} =
|
||||||
|
OptionParser.parse!(options,
|
||||||
if Pleroma.Config.get([:instance, :dynamic_configuration]) do
|
strict: [env: :string, delete: :boolean],
|
||||||
config_path = "config/#{env}.exported_from_db.secret.exs"
|
aliases: [d: :delete]
|
||||||
|
|
||||||
{:ok, file} = File.open(config_path, [:write, :utf8])
|
|
||||||
IO.write(file, "use Mix.Config\r\n")
|
|
||||||
|
|
||||||
Repo.all(Config)
|
|
||||||
|> Enum.each(fn config ->
|
|
||||||
IO.write(
|
|
||||||
file,
|
|
||||||
"config :#{config.group}, #{config.key}, #{inspect(Config.from_binary(config.value))}\r\n\r\n"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
if delete? do
|
migrate_from_db(opts)
|
||||||
{:ok, _} = Repo.delete(config)
|
|
||||||
Mix.shell().info("#{config.key} deleted from DB.")
|
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@spec migrate_to_db(Path.t() | nil) :: any()
|
||||||
|
def migrate_to_db(file_path \\ nil) do
|
||||||
|
if Pleroma.Config.get([:configurable_from_database]) do
|
||||||
|
config_file =
|
||||||
|
if file_path do
|
||||||
|
file_path
|
||||||
|
else
|
||||||
|
if Pleroma.Config.get(:release) do
|
||||||
|
Pleroma.Config.get(:config_path)
|
||||||
|
else
|
||||||
|
"config/#{Pleroma.Config.get(:env)}.secret.exs"
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
do_migrate_to_db(config_file)
|
||||||
|
else
|
||||||
|
migration_error()
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
defp do_migrate_to_db(config_file) do
|
||||||
|
if File.exists?(config_file) do
|
||||||
|
Ecto.Adapters.SQL.query!(Repo, "TRUNCATE config;")
|
||||||
|
Ecto.Adapters.SQL.query!(Repo, "ALTER SEQUENCE config_id_seq RESTART;")
|
||||||
|
|
||||||
|
custom_config =
|
||||||
|
config_file
|
||||||
|
|> read_file()
|
||||||
|
|> elem(0)
|
||||||
|
|
||||||
|
custom_config
|
||||||
|
|> Keyword.keys()
|
||||||
|
|> Enum.each(&create(&1, custom_config))
|
||||||
|
else
|
||||||
|
shell_info("To migrate settings, you must define custom settings in #{config_file}.")
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
defp create(group, settings) do
|
||||||
|
group
|
||||||
|
|> Pleroma.Config.Loader.filter_group(settings)
|
||||||
|
|> Enum.each(fn {key, value} ->
|
||||||
|
key = inspect(key)
|
||||||
|
{:ok, _} = ConfigDB.update_or_create(%{group: inspect(group), key: key, value: value})
|
||||||
|
|
||||||
|
shell_info("Settings for key #{key} migrated.")
|
||||||
end)
|
end)
|
||||||
|
|
||||||
File.close(file)
|
shell_info("Settings for group :#{group} migrated.")
|
||||||
|
end
|
||||||
|
|
||||||
|
defp migrate_from_db(opts) do
|
||||||
|
if Pleroma.Config.get([:configurable_from_database]) do
|
||||||
|
env = opts[:env] || "prod"
|
||||||
|
|
||||||
|
config_path =
|
||||||
|
if Pleroma.Config.get(:release) do
|
||||||
|
:config_path
|
||||||
|
|> Pleroma.Config.get()
|
||||||
|
|> Path.dirname()
|
||||||
|
else
|
||||||
|
"config"
|
||||||
|
end
|
||||||
|
|> Path.join("#{env}.exported_from_db.secret.exs")
|
||||||
|
|
||||||
|
file = File.open!(config_path, [:write, :utf8])
|
||||||
|
|
||||||
|
IO.write(file, config_header())
|
||||||
|
|
||||||
|
ConfigDB
|
||||||
|
|> Repo.all()
|
||||||
|
|> Enum.each(&write_and_delete(&1, file, opts[:delete]))
|
||||||
|
|
||||||
|
:ok = File.close(file)
|
||||||
System.cmd("mix", ["format", config_path])
|
System.cmd("mix", ["format", config_path])
|
||||||
else
|
else
|
||||||
Mix.shell().info(
|
migration_error()
|
||||||
"Migration is not allowed by config. You can change this behavior in instance settings."
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
defp migration_error do
|
||||||
|
shell_error(
|
||||||
|
"Migration is not allowed in config. You can change this behavior by setting `configurable_from_database` to true."
|
||||||
)
|
)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
if Code.ensure_loaded?(Config.Reader) do
|
||||||
|
defp config_header, do: "import Config\r\n\r\n"
|
||||||
|
defp read_file(config_file), do: Config.Reader.read_imports!(config_file)
|
||||||
|
else
|
||||||
|
defp config_header, do: "use Mix.Config\r\n\r\n"
|
||||||
|
defp read_file(config_file), do: Mix.Config.eval!(config_file)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
defp write_and_delete(config, file, delete?) do
|
||||||
|
config
|
||||||
|
|> write(file)
|
||||||
|
|> delete(delete?)
|
||||||
|
end
|
||||||
|
|
||||||
|
defp write(config, file) do
|
||||||
|
value =
|
||||||
|
config.value
|
||||||
|
|> ConfigDB.from_binary()
|
||||||
|
|> inspect(limit: :infinity)
|
||||||
|
|
||||||
|
IO.write(file, "config #{config.group}, #{config.key}, #{value}\r\n\r\n")
|
||||||
|
|
||||||
|
config
|
||||||
|
end
|
||||||
|
|
||||||
|
defp delete(config, true) do
|
||||||
|
{:ok, _} = Repo.delete(config)
|
||||||
|
shell_info("#{config.key} deleted from DB.")
|
||||||
|
end
|
||||||
|
|
||||||
|
defp delete(_config, _), do: :ok
|
||||||
end
|
end
|
||||||
|
|
|
@ -28,7 +28,7 @@ def run(_) do
|
||||||
defp do_run(implementation) do
|
defp do_run(implementation) do
|
||||||
start_pleroma()
|
start_pleroma()
|
||||||
|
|
||||||
with {descriptions, _paths} <- Mix.Config.eval!("config/description.exs"),
|
with descriptions <- Pleroma.Config.Loader.load("config/description.exs"),
|
||||||
{:ok, file_path} <-
|
{:ok, file_path} <-
|
||||||
Pleroma.Docs.Generator.process(
|
Pleroma.Docs.Generator.process(
|
||||||
implementation,
|
implementation,
|
||||||
|
|
|
@ -9,6 +9,7 @@ defmodule Mix.Tasks.Pleroma.Emoji do
|
||||||
@moduledoc File.read!("docs/administration/CLI_tasks/emoji.md")
|
@moduledoc File.read!("docs/administration/CLI_tasks/emoji.md")
|
||||||
|
|
||||||
def run(["ls-packs" | args]) do
|
def run(["ls-packs" | args]) do
|
||||||
|
Mix.Pleroma.start_pleroma()
|
||||||
Application.ensure_all_started(:hackney)
|
Application.ensure_all_started(:hackney)
|
||||||
|
|
||||||
{options, [], []} = parse_global_opts(args)
|
{options, [], []} = parse_global_opts(args)
|
||||||
|
@ -35,6 +36,7 @@ def run(["ls-packs" | args]) do
|
||||||
end
|
end
|
||||||
|
|
||||||
def run(["get-packs" | args]) do
|
def run(["get-packs" | args]) do
|
||||||
|
Mix.Pleroma.start_pleroma()
|
||||||
Application.ensure_all_started(:hackney)
|
Application.ensure_all_started(:hackney)
|
||||||
|
|
||||||
{options, pack_names, []} = parse_global_opts(args)
|
{options, pack_names, []} = parse_global_opts(args)
|
||||||
|
|
83
lib/mix/tasks/pleroma/notification_settings.ex
Normal file
83
lib/mix/tasks/pleroma/notification_settings.ex
Normal file
|
@ -0,0 +1,83 @@
|
||||||
|
defmodule Mix.Tasks.Pleroma.NotificationSettings do
|
||||||
|
@shortdoc "Enable&Disable privacy option for push notifications"
|
||||||
|
@moduledoc """
|
||||||
|
Example:
|
||||||
|
|
||||||
|
> mix pleroma.notification_settings --privacy-option=false --nickname-users="parallel588" # set false only for parallel588 user
|
||||||
|
> mix pleroma.notification_settings --privacy-option=true # set true for all users
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
use Mix.Task
|
||||||
|
import Mix.Pleroma
|
||||||
|
import Ecto.Query
|
||||||
|
|
||||||
|
def run(args) do
|
||||||
|
start_pleroma()
|
||||||
|
|
||||||
|
{options, _, _} =
|
||||||
|
OptionParser.parse(
|
||||||
|
args,
|
||||||
|
strict: [
|
||||||
|
privacy_option: :boolean,
|
||||||
|
email_users: :string,
|
||||||
|
nickname_users: :string
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
privacy_option = Keyword.get(options, :privacy_option)
|
||||||
|
|
||||||
|
if not is_nil(privacy_option) do
|
||||||
|
privacy_option
|
||||||
|
|> build_query(options)
|
||||||
|
|> Pleroma.Repo.update_all([])
|
||||||
|
end
|
||||||
|
|
||||||
|
shell_info("Done")
|
||||||
|
end
|
||||||
|
|
||||||
|
defp build_query(privacy_option, options) do
|
||||||
|
query =
|
||||||
|
from(u in Pleroma.User,
|
||||||
|
update: [
|
||||||
|
set: [
|
||||||
|
notification_settings:
|
||||||
|
fragment(
|
||||||
|
"jsonb_set(notification_settings, '{privacy_option}', ?)",
|
||||||
|
^privacy_option
|
||||||
|
)
|
||||||
|
]
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
user_emails =
|
||||||
|
options
|
||||||
|
|> Keyword.get(:email_users, "")
|
||||||
|
|> String.split(",")
|
||||||
|
|> Enum.map(&String.trim(&1))
|
||||||
|
|> Enum.reject(&(&1 == ""))
|
||||||
|
|
||||||
|
query =
|
||||||
|
if length(user_emails) > 0 do
|
||||||
|
where(query, [u], u.email in ^user_emails)
|
||||||
|
else
|
||||||
|
query
|
||||||
|
end
|
||||||
|
|
||||||
|
user_nicknames =
|
||||||
|
options
|
||||||
|
|> Keyword.get(:nickname_users, "")
|
||||||
|
|> String.split(",")
|
||||||
|
|> Enum.map(&String.trim(&1))
|
||||||
|
|> Enum.reject(&(&1 == ""))
|
||||||
|
|
||||||
|
query =
|
||||||
|
if length(user_nicknames) > 0 do
|
||||||
|
where(query, [u], u.nickname in ^user_nicknames)
|
||||||
|
else
|
||||||
|
query
|
||||||
|
end
|
||||||
|
|
||||||
|
query
|
||||||
|
end
|
||||||
|
end
|
|
@ -18,6 +18,7 @@ defmodule Mix.Tasks.Pleroma.RobotsTxt do
|
||||||
|
|
||||||
"""
|
"""
|
||||||
def run(["disallow_all"]) do
|
def run(["disallow_all"]) do
|
||||||
|
Mix.Pleroma.start_pleroma()
|
||||||
static_dir = Pleroma.Config.get([:instance, :static_dir], "instance/static/")
|
static_dir = Pleroma.Config.get([:instance, :static_dir], "instance/static/")
|
||||||
|
|
||||||
if !File.exists?(static_dir) do
|
if !File.exists?(static_dir) do
|
||||||
|
|
|
@ -8,7 +8,6 @@ defmodule Mix.Tasks.Pleroma.User do
|
||||||
alias Ecto.Changeset
|
alias Ecto.Changeset
|
||||||
alias Pleroma.User
|
alias Pleroma.User
|
||||||
alias Pleroma.UserInviteToken
|
alias Pleroma.UserInviteToken
|
||||||
alias Pleroma.Web.OAuth
|
|
||||||
|
|
||||||
@shortdoc "Manages Pleroma users"
|
@shortdoc "Manages Pleroma users"
|
||||||
@moduledoc File.read!("docs/administration/CLI_tasks/user.md")
|
@moduledoc File.read!("docs/administration/CLI_tasks/user.md")
|
||||||
|
@ -354,8 +353,7 @@ def run(["sign_out", nickname]) do
|
||||||
start_pleroma()
|
start_pleroma()
|
||||||
|
|
||||||
with %User{local: true} = user <- User.get_cached_by_nickname(nickname) do
|
with %User{local: true} = user <- User.get_cached_by_nickname(nickname) do
|
||||||
OAuth.Token.delete_user_tokens(user)
|
User.global_sign_out(user)
|
||||||
OAuth.Authorization.delete_user_authorizations(user)
|
|
||||||
|
|
||||||
shell_info("#{nickname} signed out from all apps.")
|
shell_info("#{nickname} signed out from all apps.")
|
||||||
else
|
else
|
||||||
|
@ -373,9 +371,9 @@ def run(["list"]) do
|
||||||
users
|
users
|
||||||
|> Enum.each(fn user ->
|
|> Enum.each(fn user ->
|
||||||
shell_info(
|
shell_info(
|
||||||
"#{user.nickname} moderator: #{user.info.is_moderator}, admin: #{user.info.is_admin}, locked: #{
|
"#{user.nickname} moderator: #{user.is_moderator}, admin: #{user.is_admin}, locked: #{
|
||||||
user.info.locked
|
user.locked
|
||||||
}, deactivated: #{user.info.deactivated}"
|
}, deactivated: #{user.deactivated}"
|
||||||
)
|
)
|
||||||
end)
|
end)
|
||||||
end)
|
end)
|
||||||
|
@ -393,10 +391,7 @@ defp set_moderator(user, value) do
|
||||||
end
|
end
|
||||||
|
|
||||||
defp set_admin(user, value) do
|
defp set_admin(user, value) do
|
||||||
{:ok, user} =
|
{:ok, user} = User.admin_api_update(user, %{is_admin: value})
|
||||||
user
|
|
||||||
|> Changeset.change(%{is_admin: value})
|
|
||||||
|> User.update_and_set_cache()
|
|
||||||
|
|
||||||
shell_info("Admin status of #{user.nickname}: #{user.is_admin}")
|
shell_info("Admin status of #{user.nickname}: #{user.is_admin}")
|
||||||
user
|
user
|
||||||
|
|
|
@ -12,6 +12,7 @@ defmodule Pleroma.Activity do
|
||||||
alias Pleroma.Notification
|
alias Pleroma.Notification
|
||||||
alias Pleroma.Object
|
alias Pleroma.Object
|
||||||
alias Pleroma.Repo
|
alias Pleroma.Repo
|
||||||
|
alias Pleroma.ReportNote
|
||||||
alias Pleroma.ThreadMute
|
alias Pleroma.ThreadMute
|
||||||
alias Pleroma.User
|
alias Pleroma.User
|
||||||
|
|
||||||
|
@ -29,7 +30,8 @@ defmodule Pleroma.Activity do
|
||||||
"Follow" => "follow",
|
"Follow" => "follow",
|
||||||
"Announce" => "reblog",
|
"Announce" => "reblog",
|
||||||
"Like" => "favourite",
|
"Like" => "favourite",
|
||||||
"Move" => "move"
|
"Move" => "move",
|
||||||
|
"EmojiReaction" => "pleroma:emoji_reaction"
|
||||||
}
|
}
|
||||||
|
|
||||||
@mastodon_to_ap_notification_types for {k, v} <- @mastodon_notification_types,
|
@mastodon_to_ap_notification_types for {k, v} <- @mastodon_notification_types,
|
||||||
|
@ -48,6 +50,8 @@ defmodule Pleroma.Activity do
|
||||||
has_one(:user_actor, User, on_delete: :nothing, foreign_key: :id)
|
has_one(:user_actor, User, on_delete: :nothing, foreign_key: :id)
|
||||||
# This is a fake relation, do not use outside of with_preloaded_bookmark/get_bookmark
|
# This is a fake relation, do not use outside of with_preloaded_bookmark/get_bookmark
|
||||||
has_one(:bookmark, Bookmark)
|
has_one(:bookmark, Bookmark)
|
||||||
|
# This is a fake relation, do not use outside of with_preloaded_report_notes
|
||||||
|
has_many(:report_notes, ReportNote)
|
||||||
has_many(:notifications, Notification, on_delete: :delete_all)
|
has_many(:notifications, Notification, on_delete: :delete_all)
|
||||||
|
|
||||||
# Attention: this is a fake relation, don't try to preload it blindly and expect it to work!
|
# Attention: this is a fake relation, don't try to preload it blindly and expect it to work!
|
||||||
|
@ -114,6 +118,16 @@ def with_preloaded_bookmark(query, %User{} = user) do
|
||||||
|
|
||||||
def with_preloaded_bookmark(query, _), do: query
|
def with_preloaded_bookmark(query, _), do: query
|
||||||
|
|
||||||
|
def with_preloaded_report_notes(query) do
|
||||||
|
from([a] in query,
|
||||||
|
left_join: r in ReportNote,
|
||||||
|
on: a.id == r.activity_id,
|
||||||
|
preload: [report_notes: r]
|
||||||
|
)
|
||||||
|
end
|
||||||
|
|
||||||
|
def with_preloaded_report_notes(query, _), do: query
|
||||||
|
|
||||||
def with_set_thread_muted_field(query, %User{} = user) do
|
def with_set_thread_muted_field(query, %User{} = user) do
|
||||||
from([a] in query,
|
from([a] in query,
|
||||||
left_join: tm in ThreadMute,
|
left_join: tm in ThreadMute,
|
||||||
|
@ -299,9 +313,7 @@ def restrict_deactivated_users(query) do
|
||||||
from(u in User.Query.build(deactivated: true), select: u.ap_id)
|
from(u in User.Query.build(deactivated: true), select: u.ap_id)
|
||||||
|> Repo.all()
|
|> Repo.all()
|
||||||
|
|
||||||
from(activity in query,
|
Activity.Queries.exclude_authors(query, deactivated_users)
|
||||||
where: activity.actor not in ^deactivated_users
|
|
||||||
)
|
|
||||||
end
|
end
|
||||||
|
|
||||||
defdelegate search(user, query, options \\ []), to: Pleroma.Activity.Search
|
defdelegate search(user, query, options \\ []), to: Pleroma.Activity.Search
|
||||||
|
|
|
@ -12,6 +12,7 @@ defmodule Pleroma.Activity.Queries do
|
||||||
@type query :: Ecto.Queryable.t() | Activity.t()
|
@type query :: Ecto.Queryable.t() | Activity.t()
|
||||||
|
|
||||||
alias Pleroma.Activity
|
alias Pleroma.Activity
|
||||||
|
alias Pleroma.User
|
||||||
|
|
||||||
@spec by_ap_id(query, String.t()) :: query
|
@spec by_ap_id(query, String.t()) :: query
|
||||||
def by_ap_id(query \\ Activity, ap_id) do
|
def by_ap_id(query \\ Activity, ap_id) do
|
||||||
|
@ -29,6 +30,11 @@ def by_actor(query \\ Activity, actor) do
|
||||||
)
|
)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@spec by_author(query, String.t()) :: query
|
||||||
|
def by_author(query \\ Activity, %User{ap_id: ap_id}) do
|
||||||
|
from(a in query, where: a.actor == ^ap_id)
|
||||||
|
end
|
||||||
|
|
||||||
@spec by_object_id(query, String.t() | [String.t()]) :: query
|
@spec by_object_id(query, String.t() | [String.t()]) :: query
|
||||||
def by_object_id(query \\ Activity, object_id)
|
def by_object_id(query \\ Activity, object_id)
|
||||||
|
|
||||||
|
@ -72,4 +78,8 @@ def exclude_type(query \\ Activity, activity_type) do
|
||||||
where: fragment("(?)->>'type' != ?", activity.data, ^activity_type)
|
where: fragment("(?)->>'type' != ?", activity.data, ^activity_type)
|
||||||
)
|
)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def exclude_authors(query \\ Activity, actors) do
|
||||||
|
from(activity in query, where: activity.actor not in ^actors)
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -26,18 +26,23 @@ def search(user, search_query, options \\ []) do
|
||||||
|> query_with(index_type, search_query)
|
|> query_with(index_type, search_query)
|
||||||
|> maybe_restrict_local(user)
|
|> maybe_restrict_local(user)
|
||||||
|> maybe_restrict_author(author)
|
|> maybe_restrict_author(author)
|
||||||
|
|> maybe_restrict_blocked(user)
|
||||||
|> Pagination.fetch_paginated(%{"offset" => offset, "limit" => limit}, :offset)
|
|> Pagination.fetch_paginated(%{"offset" => offset, "limit" => limit}, :offset)
|
||||||
|> maybe_fetch(user, search_query)
|
|> maybe_fetch(user, search_query)
|
||||||
end
|
end
|
||||||
|
|
||||||
def maybe_restrict_author(query, %User{} = author) do
|
def maybe_restrict_author(query, %User{} = author) do
|
||||||
from([a, o] in query,
|
Activity.Queries.by_author(query, author)
|
||||||
where: a.actor == ^author.ap_id
|
|
||||||
)
|
|
||||||
end
|
end
|
||||||
|
|
||||||
def maybe_restrict_author(query, _), do: query
|
def maybe_restrict_author(query, _), do: query
|
||||||
|
|
||||||
|
def maybe_restrict_blocked(query, %User{} = user) do
|
||||||
|
Activity.Queries.exclude_authors(query, User.blocked_users_ap_ids(user))
|
||||||
|
end
|
||||||
|
|
||||||
|
def maybe_restrict_blocked(query, _), do: query
|
||||||
|
|
||||||
defp restrict_public(q) do
|
defp restrict_public(q) do
|
||||||
from([a, o] in q,
|
from([a, o] in q,
|
||||||
where: fragment("?->>'type' = 'Create'", a.data),
|
where: fragment("?->>'type' = 'Create'", a.data),
|
||||||
|
@ -86,7 +91,7 @@ defp maybe_fetch(activities, user, search_query) do
|
||||||
{:ok, object} <- Fetcher.fetch_object_from_id(search_query),
|
{:ok, object} <- Fetcher.fetch_object_from_id(search_query),
|
||||||
%Activity{} = activity <- Activity.get_create_by_object_ap_id(object.data["id"]),
|
%Activity{} = activity <- Activity.get_create_by_object_ap_id(object.data["id"]),
|
||||||
true <- Visibility.visible_for_user?(activity, user) do
|
true <- Visibility.visible_for_user?(activity, user) do
|
||||||
activities ++ [activity]
|
[activity | activities]
|
||||||
else
|
else
|
||||||
_ -> activities
|
_ -> activities
|
||||||
end
|
end
|
||||||
|
|
|
@ -5,6 +5,7 @@
|
||||||
defmodule Pleroma.Application do
|
defmodule Pleroma.Application do
|
||||||
import Cachex.Spec
|
import Cachex.Spec
|
||||||
use Application
|
use Application
|
||||||
|
require Logger
|
||||||
|
|
||||||
@name Mix.Project.config()[:name]
|
@name Mix.Project.config()[:name]
|
||||||
@version Mix.Project.config()[:version]
|
@version Mix.Project.config()[:version]
|
||||||
|
@ -30,8 +31,11 @@ def user_agent do
|
||||||
# See http://elixir-lang.org/docs/stable/elixir/Application.html
|
# See http://elixir-lang.org/docs/stable/elixir/Application.html
|
||||||
# for more information on OTP Applications
|
# for more information on OTP Applications
|
||||||
def start(_type, _args) do
|
def start(_type, _args) do
|
||||||
|
Pleroma.HTML.compile_scrubbers()
|
||||||
Pleroma.Config.DeprecationWarnings.warn()
|
Pleroma.Config.DeprecationWarnings.warn()
|
||||||
|
Pleroma.Repo.check_migrations_applied!()
|
||||||
setup_instrumenters()
|
setup_instrumenters()
|
||||||
|
load_custom_modules()
|
||||||
|
|
||||||
# Define workers and child supervisors to be supervised
|
# Define workers and child supervisors to be supervised
|
||||||
children =
|
children =
|
||||||
|
@ -67,6 +71,28 @@ def start(_type, _args) do
|
||||||
Supervisor.start_link(children, opts)
|
Supervisor.start_link(children, opts)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def load_custom_modules do
|
||||||
|
dir = Pleroma.Config.get([:modules, :runtime_dir])
|
||||||
|
|
||||||
|
if dir && File.exists?(dir) do
|
||||||
|
dir
|
||||||
|
|> Pleroma.Utils.compile_dir()
|
||||||
|
|> case do
|
||||||
|
{:error, _errors, _warnings} ->
|
||||||
|
raise "Invalid custom modules"
|
||||||
|
|
||||||
|
{:ok, modules, _warnings} ->
|
||||||
|
if @env != :test do
|
||||||
|
Enum.each(modules, fn mod ->
|
||||||
|
Logger.info("Custom module loaded: #{inspect(mod)}")
|
||||||
|
end)
|
||||||
|
end
|
||||||
|
|
||||||
|
:ok
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
defp setup_instrumenters do
|
defp setup_instrumenters do
|
||||||
require Prometheus.Registry
|
require Prometheus.Registry
|
||||||
|
|
||||||
|
|
35
lib/pleroma/captcha/native.ex
Normal file
35
lib/pleroma/captcha/native.ex
Normal file
|
@ -0,0 +1,35 @@
|
||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
defmodule Pleroma.Captcha.Native do
|
||||||
|
import Pleroma.Web.Gettext
|
||||||
|
alias Pleroma.Captcha.Service
|
||||||
|
@behaviour Service
|
||||||
|
|
||||||
|
@impl Service
|
||||||
|
def new do
|
||||||
|
case Captcha.get() do
|
||||||
|
{:timeout} ->
|
||||||
|
%{error: dgettext("errors", "Captcha timeout")}
|
||||||
|
|
||||||
|
{:ok, answer_data, img_binary} ->
|
||||||
|
%{
|
||||||
|
type: :native,
|
||||||
|
token: token(),
|
||||||
|
url: "data:image/png;base64," <> Base.encode64(img_binary),
|
||||||
|
answer_data: answer_data
|
||||||
|
}
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
@impl Service
|
||||||
|
def validate(_token, captcha, captcha) when not is_nil(captcha), do: :ok
|
||||||
|
def validate(_token, _captcha, _answer), do: {:error, dgettext("errors", "Invalid CAPTCHA")}
|
||||||
|
|
||||||
|
defp token do
|
||||||
|
10
|
||||||
|
|> :crypto.strong_rand_bytes()
|
||||||
|
|> Base.url_encode64(padding: false)
|
||||||
|
end
|
||||||
|
end
|
|
@ -65,4 +65,16 @@ def delete(key) do
|
||||||
def oauth_consumer_strategies, do: get([:auth, :oauth_consumer_strategies], [])
|
def oauth_consumer_strategies, do: get([:auth, :oauth_consumer_strategies], [])
|
||||||
|
|
||||||
def oauth_consumer_enabled?, do: oauth_consumer_strategies() != []
|
def oauth_consumer_enabled?, do: oauth_consumer_strategies() != []
|
||||||
|
|
||||||
|
def enforce_oauth_admin_scope_usage?, do: !!get([:auth, :enforce_oauth_admin_scope_usage])
|
||||||
|
|
||||||
|
def oauth_admin_scopes(scopes) when is_list(scopes) do
|
||||||
|
Enum.flat_map(
|
||||||
|
scopes,
|
||||||
|
fn scope ->
|
||||||
|
["admin:#{scope}"] ++
|
||||||
|
if enforce_oauth_admin_scope_usage?(), do: [], else: [scope]
|
||||||
|
end
|
||||||
|
)
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
414
lib/pleroma/config/config_db.ex
Normal file
414
lib/pleroma/config/config_db.ex
Normal file
|
@ -0,0 +1,414 @@
|
||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
defmodule Pleroma.ConfigDB do
|
||||||
|
use Ecto.Schema
|
||||||
|
|
||||||
|
import Ecto.Changeset
|
||||||
|
import Ecto.Query
|
||||||
|
import Pleroma.Web.Gettext
|
||||||
|
|
||||||
|
alias __MODULE__
|
||||||
|
alias Pleroma.Repo
|
||||||
|
|
||||||
|
@type t :: %__MODULE__{}
|
||||||
|
|
||||||
|
@full_key_update [
|
||||||
|
{:pleroma, :ecto_repos},
|
||||||
|
{:quack, :meta},
|
||||||
|
{:mime, :types},
|
||||||
|
{:cors_plug, [:max_age, :methods, :expose, :headers]},
|
||||||
|
{:auto_linker, :opts},
|
||||||
|
{:swarm, :node_blacklist},
|
||||||
|
{:logger, :backends}
|
||||||
|
]
|
||||||
|
|
||||||
|
@full_subkey_update [
|
||||||
|
{:pleroma, :assets, :mascots},
|
||||||
|
{:pleroma, :emoji, :groups},
|
||||||
|
{:pleroma, :workers, :retries},
|
||||||
|
{:pleroma, :mrf_subchain, :match_actor},
|
||||||
|
{:pleroma, :mrf_keyword, :replace}
|
||||||
|
]
|
||||||
|
|
||||||
|
@regex ~r/^~r(?'delimiter'[\/|"'([{<]{1})(?'pattern'.+)[\/|"')\]}>]{1}(?'modifier'[uismxfU]*)/u
|
||||||
|
|
||||||
|
@delimiters ["/", "|", "\"", "'", {"(", ")"}, {"[", "]"}, {"{", "}"}, {"<", ">"}]
|
||||||
|
|
||||||
|
schema "config" do
|
||||||
|
field(:key, :string)
|
||||||
|
field(:group, :string)
|
||||||
|
field(:value, :binary)
|
||||||
|
field(:db, {:array, :string}, virtual: true, default: [])
|
||||||
|
|
||||||
|
timestamps()
|
||||||
|
end
|
||||||
|
|
||||||
|
@spec get_all_as_keyword() :: keyword()
|
||||||
|
def get_all_as_keyword do
|
||||||
|
ConfigDB
|
||||||
|
|> select([c], {c.group, c.key, c.value})
|
||||||
|
|> Repo.all()
|
||||||
|
|> Enum.reduce([], fn {group, key, value}, acc ->
|
||||||
|
group = ConfigDB.from_string(group)
|
||||||
|
key = ConfigDB.from_string(key)
|
||||||
|
value = from_binary(value)
|
||||||
|
|
||||||
|
Keyword.update(acc, group, [{key, value}], &Keyword.merge(&1, [{key, value}]))
|
||||||
|
end)
|
||||||
|
end
|
||||||
|
|
||||||
|
@spec get_by_params(map()) :: ConfigDB.t() | nil
|
||||||
|
def get_by_params(params), do: Repo.get_by(ConfigDB, params)
|
||||||
|
|
||||||
|
@spec changeset(ConfigDB.t(), map()) :: Changeset.t()
|
||||||
|
def changeset(config, params \\ %{}) do
|
||||||
|
params = Map.put(params, :value, transform(params[:value]))
|
||||||
|
|
||||||
|
config
|
||||||
|
|> cast(params, [:key, :group, :value])
|
||||||
|
|> validate_required([:key, :group, :value])
|
||||||
|
|> unique_constraint(:key, name: :config_group_key_index)
|
||||||
|
end
|
||||||
|
|
||||||
|
@spec create(map()) :: {:ok, ConfigDB.t()} | {:error, Changeset.t()}
|
||||||
|
def create(params) do
|
||||||
|
%ConfigDB{}
|
||||||
|
|> changeset(params)
|
||||||
|
|> Repo.insert()
|
||||||
|
end
|
||||||
|
|
||||||
|
@spec update(ConfigDB.t(), map()) :: {:ok, ConfigDB.t()} | {:error, Changeset.t()}
|
||||||
|
def update(%ConfigDB{} = config, %{value: value}) do
|
||||||
|
config
|
||||||
|
|> changeset(%{value: value})
|
||||||
|
|> Repo.update()
|
||||||
|
end
|
||||||
|
|
||||||
|
@spec get_db_keys(ConfigDB.t()) :: [String.t()]
|
||||||
|
def get_db_keys(%ConfigDB{} = config) do
|
||||||
|
config.value
|
||||||
|
|> ConfigDB.from_binary()
|
||||||
|
|> get_db_keys(config.key)
|
||||||
|
end
|
||||||
|
|
||||||
|
@spec get_db_keys(keyword(), any()) :: [String.t()]
|
||||||
|
def get_db_keys(value, key) do
|
||||||
|
if Keyword.keyword?(value) do
|
||||||
|
value |> Keyword.keys() |> Enum.map(&convert(&1))
|
||||||
|
else
|
||||||
|
[convert(key)]
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
@spec merge_group(atom(), atom(), keyword(), keyword()) :: keyword()
|
||||||
|
def merge_group(group, key, old_value, new_value) do
|
||||||
|
new_keys = to_map_set(new_value)
|
||||||
|
|
||||||
|
intersect_keys =
|
||||||
|
old_value |> to_map_set() |> MapSet.intersection(new_keys) |> MapSet.to_list()
|
||||||
|
|
||||||
|
merged_value = ConfigDB.merge(old_value, new_value)
|
||||||
|
|
||||||
|
@full_subkey_update
|
||||||
|
|> Enum.map(fn
|
||||||
|
{g, k, subkey} when g == group and k == key ->
|
||||||
|
if subkey in intersect_keys, do: subkey, else: []
|
||||||
|
|
||||||
|
_ ->
|
||||||
|
[]
|
||||||
|
end)
|
||||||
|
|> List.flatten()
|
||||||
|
|> Enum.reduce(merged_value, fn subkey, acc ->
|
||||||
|
Keyword.put(acc, subkey, new_value[subkey])
|
||||||
|
end)
|
||||||
|
end
|
||||||
|
|
||||||
|
defp to_map_set(keyword) do
|
||||||
|
keyword
|
||||||
|
|> Keyword.keys()
|
||||||
|
|> MapSet.new()
|
||||||
|
end
|
||||||
|
|
||||||
|
@spec sub_key_full_update?(atom(), atom(), [Keyword.key()]) :: boolean()
|
||||||
|
def sub_key_full_update?(group, key, subkeys) do
|
||||||
|
Enum.any?(@full_subkey_update, fn {g, k, subkey} ->
|
||||||
|
g == group and k == key and subkey in subkeys
|
||||||
|
end)
|
||||||
|
end
|
||||||
|
|
||||||
|
@spec merge(keyword(), keyword()) :: keyword()
|
||||||
|
def merge(config1, config2) when is_list(config1) and is_list(config2) do
|
||||||
|
Keyword.merge(config1, config2, fn _, app1, app2 ->
|
||||||
|
if Keyword.keyword?(app1) and Keyword.keyword?(app2) do
|
||||||
|
Keyword.merge(app1, app2, &deep_merge/3)
|
||||||
|
else
|
||||||
|
app2
|
||||||
|
end
|
||||||
|
end)
|
||||||
|
end
|
||||||
|
|
||||||
|
defp deep_merge(_key, value1, value2) do
|
||||||
|
if Keyword.keyword?(value1) and Keyword.keyword?(value2) do
|
||||||
|
Keyword.merge(value1, value2, &deep_merge/3)
|
||||||
|
else
|
||||||
|
value2
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
@spec update_or_create(map()) :: {:ok, ConfigDB.t()} | {:error, Changeset.t()}
|
||||||
|
def update_or_create(params) do
|
||||||
|
search_opts = Map.take(params, [:group, :key])
|
||||||
|
|
||||||
|
with %ConfigDB{} = config <- ConfigDB.get_by_params(search_opts),
|
||||||
|
{:partial_update, true, config} <-
|
||||||
|
{:partial_update, can_be_partially_updated?(config), config},
|
||||||
|
old_value <- from_binary(config.value),
|
||||||
|
transformed_value <- do_transform(params[:value]),
|
||||||
|
{:can_be_merged, true, config} <- {:can_be_merged, is_list(transformed_value), config},
|
||||||
|
new_value <-
|
||||||
|
merge_group(
|
||||||
|
ConfigDB.from_string(config.group),
|
||||||
|
ConfigDB.from_string(config.key),
|
||||||
|
old_value,
|
||||||
|
transformed_value
|
||||||
|
) do
|
||||||
|
ConfigDB.update(config, %{value: new_value})
|
||||||
|
else
|
||||||
|
{reason, false, config} when reason in [:partial_update, :can_be_merged] ->
|
||||||
|
ConfigDB.update(config, params)
|
||||||
|
|
||||||
|
nil ->
|
||||||
|
ConfigDB.create(params)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
defp can_be_partially_updated?(%ConfigDB{} = config), do: not only_full_update?(config)
|
||||||
|
|
||||||
|
defp only_full_update?(%ConfigDB{} = config) do
|
||||||
|
config_group = ConfigDB.from_string(config.group)
|
||||||
|
config_key = ConfigDB.from_string(config.key)
|
||||||
|
|
||||||
|
Enum.any?(@full_key_update, fn
|
||||||
|
{group, key} when is_list(key) ->
|
||||||
|
config_group == group and config_key in key
|
||||||
|
|
||||||
|
{group, key} ->
|
||||||
|
config_group == group and config_key == key
|
||||||
|
end)
|
||||||
|
end
|
||||||
|
|
||||||
|
@spec delete(map()) :: {:ok, ConfigDB.t()} | {:error, Changeset.t()}
|
||||||
|
def delete(params) do
|
||||||
|
search_opts = Map.delete(params, :subkeys)
|
||||||
|
|
||||||
|
with %ConfigDB{} = config <- ConfigDB.get_by_params(search_opts),
|
||||||
|
{config, sub_keys} when is_list(sub_keys) <- {config, params[:subkeys]},
|
||||||
|
old_value <- from_binary(config.value),
|
||||||
|
keys <- Enum.map(sub_keys, &do_transform_string(&1)),
|
||||||
|
{:partial_remove, config, new_value} when new_value != [] <-
|
||||||
|
{:partial_remove, config, Keyword.drop(old_value, keys)} do
|
||||||
|
ConfigDB.update(config, %{value: new_value})
|
||||||
|
else
|
||||||
|
{:partial_remove, config, []} ->
|
||||||
|
Repo.delete(config)
|
||||||
|
|
||||||
|
{config, nil} ->
|
||||||
|
Repo.delete(config)
|
||||||
|
|
||||||
|
nil ->
|
||||||
|
err =
|
||||||
|
dgettext("errors", "Config with params %{params} not found", params: inspect(params))
|
||||||
|
|
||||||
|
{:error, err}
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
@spec from_binary(binary()) :: term()
|
||||||
|
def from_binary(binary), do: :erlang.binary_to_term(binary)
|
||||||
|
|
||||||
|
@spec from_binary_with_convert(binary()) :: any()
|
||||||
|
def from_binary_with_convert(binary) do
|
||||||
|
binary
|
||||||
|
|> from_binary()
|
||||||
|
|> do_convert()
|
||||||
|
end
|
||||||
|
|
||||||
|
@spec from_string(String.t()) :: atom() | no_return()
|
||||||
|
def from_string(string), do: do_transform_string(string)
|
||||||
|
|
||||||
|
@spec convert(any()) :: any()
|
||||||
|
def convert(entity), do: do_convert(entity)
|
||||||
|
|
||||||
|
defp do_convert(entity) when is_list(entity) do
|
||||||
|
for v <- entity, into: [], do: do_convert(v)
|
||||||
|
end
|
||||||
|
|
||||||
|
defp do_convert(%Regex{} = entity), do: inspect(entity)
|
||||||
|
|
||||||
|
defp do_convert(entity) when is_map(entity) do
|
||||||
|
for {k, v} <- entity, into: %{}, do: {do_convert(k), do_convert(v)}
|
||||||
|
end
|
||||||
|
|
||||||
|
defp do_convert({:proxy_url, {type, :localhost, port}}) do
|
||||||
|
%{"tuple" => [":proxy_url", %{"tuple" => [do_convert(type), "localhost", port]}]}
|
||||||
|
end
|
||||||
|
|
||||||
|
defp do_convert({:proxy_url, {type, host, port}}) when is_tuple(host) do
|
||||||
|
ip =
|
||||||
|
host
|
||||||
|
|> :inet_parse.ntoa()
|
||||||
|
|> to_string()
|
||||||
|
|
||||||
|
%{
|
||||||
|
"tuple" => [
|
||||||
|
":proxy_url",
|
||||||
|
%{"tuple" => [do_convert(type), ip, port]}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
end
|
||||||
|
|
||||||
|
defp do_convert({:proxy_url, {type, host, port}}) do
|
||||||
|
%{
|
||||||
|
"tuple" => [
|
||||||
|
":proxy_url",
|
||||||
|
%{"tuple" => [do_convert(type), to_string(host), port]}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
end
|
||||||
|
|
||||||
|
defp do_convert({:partial_chain, entity}), do: %{"tuple" => [":partial_chain", inspect(entity)]}
|
||||||
|
|
||||||
|
defp do_convert(entity) when is_tuple(entity) do
|
||||||
|
value =
|
||||||
|
entity
|
||||||
|
|> Tuple.to_list()
|
||||||
|
|> do_convert()
|
||||||
|
|
||||||
|
%{"tuple" => value}
|
||||||
|
end
|
||||||
|
|
||||||
|
defp do_convert(entity) when is_boolean(entity) or is_number(entity) or is_nil(entity) do
|
||||||
|
entity
|
||||||
|
end
|
||||||
|
|
||||||
|
defp do_convert(entity)
|
||||||
|
when is_atom(entity) and entity in [:"tlsv1.1", :"tlsv1.2", :"tlsv1.3"] do
|
||||||
|
":#{entity}"
|
||||||
|
end
|
||||||
|
|
||||||
|
defp do_convert(entity) when is_atom(entity), do: inspect(entity)
|
||||||
|
|
||||||
|
defp do_convert(entity) when is_binary(entity), do: entity
|
||||||
|
|
||||||
|
@spec transform(any()) :: binary() | no_return()
|
||||||
|
def transform(entity) when is_binary(entity) or is_map(entity) or is_list(entity) do
|
||||||
|
entity
|
||||||
|
|> do_transform()
|
||||||
|
|> to_binary()
|
||||||
|
end
|
||||||
|
|
||||||
|
def transform(entity), do: to_binary(entity)
|
||||||
|
|
||||||
|
@spec transform_with_out_binary(any()) :: any()
|
||||||
|
def transform_with_out_binary(entity), do: do_transform(entity)
|
||||||
|
|
||||||
|
@spec to_binary(any()) :: binary()
|
||||||
|
def to_binary(entity), do: :erlang.term_to_binary(entity)
|
||||||
|
|
||||||
|
defp do_transform(%Regex{} = entity), do: entity
|
||||||
|
|
||||||
|
defp do_transform(%{"tuple" => [":proxy_url", %{"tuple" => [type, host, port]}]}) do
|
||||||
|
{:proxy_url, {do_transform_string(type), parse_host(host), port}}
|
||||||
|
end
|
||||||
|
|
||||||
|
defp do_transform(%{"tuple" => [":partial_chain", entity]}) do
|
||||||
|
{partial_chain, []} =
|
||||||
|
entity
|
||||||
|
|> String.replace(~r/[^\w|^{:,[|^,|^[|^\]^}|^\/|^\.|^"]^\s/, "")
|
||||||
|
|> Code.eval_string()
|
||||||
|
|
||||||
|
{:partial_chain, partial_chain}
|
||||||
|
end
|
||||||
|
|
||||||
|
defp do_transform(%{"tuple" => entity}) do
|
||||||
|
Enum.reduce(entity, {}, fn val, acc -> Tuple.append(acc, do_transform(val)) end)
|
||||||
|
end
|
||||||
|
|
||||||
|
defp do_transform(entity) when is_map(entity) do
|
||||||
|
for {k, v} <- entity, into: %{}, do: {do_transform(k), do_transform(v)}
|
||||||
|
end
|
||||||
|
|
||||||
|
defp do_transform(entity) when is_list(entity) do
|
||||||
|
for v <- entity, into: [], do: do_transform(v)
|
||||||
|
end
|
||||||
|
|
||||||
|
defp do_transform(entity) when is_binary(entity) do
|
||||||
|
entity
|
||||||
|
|> String.trim()
|
||||||
|
|> do_transform_string()
|
||||||
|
end
|
||||||
|
|
||||||
|
defp do_transform(entity), do: entity
|
||||||
|
|
||||||
|
defp parse_host("localhost"), do: :localhost
|
||||||
|
|
||||||
|
defp parse_host(host) do
|
||||||
|
charlist = to_charlist(host)
|
||||||
|
|
||||||
|
case :inet.parse_address(charlist) do
|
||||||
|
{:error, :einval} ->
|
||||||
|
charlist
|
||||||
|
|
||||||
|
{:ok, ip} ->
|
||||||
|
ip
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
defp find_valid_delimiter([], _string, _) do
|
||||||
|
raise(ArgumentError, message: "valid delimiter for Regex expression not found")
|
||||||
|
end
|
||||||
|
|
||||||
|
defp find_valid_delimiter([{leading, closing} = delimiter | others], pattern, regex_delimiter)
|
||||||
|
when is_tuple(delimiter) do
|
||||||
|
if String.contains?(pattern, closing) do
|
||||||
|
find_valid_delimiter(others, pattern, regex_delimiter)
|
||||||
|
else
|
||||||
|
{:ok, {leading, closing}}
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
defp find_valid_delimiter([delimiter | others], pattern, regex_delimiter) do
|
||||||
|
if String.contains?(pattern, delimiter) do
|
||||||
|
find_valid_delimiter(others, pattern, regex_delimiter)
|
||||||
|
else
|
||||||
|
{:ok, {delimiter, delimiter}}
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
defp do_transform_string("~r" <> _pattern = regex) do
|
||||||
|
with %{"modifier" => modifier, "pattern" => pattern, "delimiter" => regex_delimiter} <-
|
||||||
|
Regex.named_captures(@regex, regex),
|
||||||
|
{:ok, {leading, closing}} <- find_valid_delimiter(@delimiters, pattern, regex_delimiter),
|
||||||
|
{result, _} <- Code.eval_string("~r#{leading}#{pattern}#{closing}#{modifier}") do
|
||||||
|
result
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
defp do_transform_string(":" <> atom), do: String.to_atom(atom)
|
||||||
|
|
||||||
|
defp do_transform_string(value) do
|
||||||
|
if is_module_name?(value) do
|
||||||
|
String.to_existing_atom("Elixir." <> value)
|
||||||
|
else
|
||||||
|
value
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
@spec is_module_name?(String.t()) :: boolean()
|
||||||
|
def is_module_name?(string) do
|
||||||
|
Regex.match?(~r/^(Pleroma|Phoenix|Tesla|Quack|Ueberauth|Swoosh)\./, string) or
|
||||||
|
string in ["Oban", "Ueberauth", "ExSyslogger"]
|
||||||
|
end
|
||||||
|
end
|
16
lib/pleroma/config/holder.ex
Normal file
16
lib/pleroma/config/holder.ex
Normal file
|
@ -0,0 +1,16 @@
|
||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
defmodule Pleroma.Config.Holder do
|
||||||
|
@config Pleroma.Config.Loader.load_and_merge()
|
||||||
|
|
||||||
|
@spec config() :: keyword()
|
||||||
|
def config, do: @config
|
||||||
|
|
||||||
|
@spec config(atom()) :: any()
|
||||||
|
def config(group), do: @config[group]
|
||||||
|
|
||||||
|
@spec config(atom(), atom()) :: any()
|
||||||
|
def config(group, key), do: @config[group][key]
|
||||||
|
end
|
59
lib/pleroma/config/loader.ex
Normal file
59
lib/pleroma/config/loader.ex
Normal file
|
@ -0,0 +1,59 @@
|
||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
defmodule Pleroma.Config.Loader do
|
||||||
|
@paths ["config/config.exs", "config/#{Mix.env()}.exs"]
|
||||||
|
|
||||||
|
@reject_keys [
|
||||||
|
Pleroma.Repo,
|
||||||
|
Pleroma.Web.Endpoint,
|
||||||
|
:env,
|
||||||
|
:configurable_from_database,
|
||||||
|
:database,
|
||||||
|
:swarm
|
||||||
|
]
|
||||||
|
|
||||||
|
if Code.ensure_loaded?(Config.Reader) do
|
||||||
|
@spec load(Path.t()) :: keyword()
|
||||||
|
def load(path), do: Config.Reader.read!(path)
|
||||||
|
|
||||||
|
defp do_merge(conf1, conf2), do: Config.Reader.merge(conf1, conf2)
|
||||||
|
else
|
||||||
|
# support for Elixir less than 1.9
|
||||||
|
@spec load(Path.t()) :: keyword()
|
||||||
|
def load(path) do
|
||||||
|
path
|
||||||
|
|> Mix.Config.eval!()
|
||||||
|
|> elem(0)
|
||||||
|
end
|
||||||
|
|
||||||
|
defp do_merge(conf1, conf2), do: Mix.Config.merge(conf1, conf2)
|
||||||
|
end
|
||||||
|
|
||||||
|
@spec load_and_merge() :: keyword()
|
||||||
|
def load_and_merge do
|
||||||
|
all_paths =
|
||||||
|
if Pleroma.Config.get(:release),
|
||||||
|
do: @paths ++ ["config/releases.exs"],
|
||||||
|
else: @paths
|
||||||
|
|
||||||
|
all_paths
|
||||||
|
|> Enum.map(&load(&1))
|
||||||
|
|> Enum.reduce([], &do_merge(&2, &1))
|
||||||
|
|> filter()
|
||||||
|
end
|
||||||
|
|
||||||
|
defp filter(configs) do
|
||||||
|
configs
|
||||||
|
|> Keyword.keys()
|
||||||
|
|> Enum.reduce([], &Keyword.put(&2, &1, filter_group(&1, configs)))
|
||||||
|
end
|
||||||
|
|
||||||
|
@spec filter_group(atom(), keyword()) :: keyword()
|
||||||
|
def filter_group(group, configs) do
|
||||||
|
Enum.reject(configs[group], fn {key, _v} ->
|
||||||
|
key in @reject_keys or (group == :phoenix and key == :serve_endpoints)
|
||||||
|
end)
|
||||||
|
end
|
||||||
|
end
|
|
@ -4,56 +4,111 @@
|
||||||
|
|
||||||
defmodule Pleroma.Config.TransferTask do
|
defmodule Pleroma.Config.TransferTask do
|
||||||
use Task
|
use Task
|
||||||
alias Pleroma.Web.AdminAPI.Config
|
|
||||||
|
alias Pleroma.ConfigDB
|
||||||
|
alias Pleroma.Repo
|
||||||
|
|
||||||
|
require Logger
|
||||||
|
|
||||||
def start_link(_) do
|
def start_link(_) do
|
||||||
load_and_update_env()
|
load_and_update_env()
|
||||||
if Pleroma.Config.get(:env) == :test, do: Ecto.Adapters.SQL.Sandbox.checkin(Pleroma.Repo)
|
if Pleroma.Config.get(:env) == :test, do: Ecto.Adapters.SQL.Sandbox.checkin(Repo)
|
||||||
:ignore
|
:ignore
|
||||||
end
|
end
|
||||||
|
|
||||||
def load_and_update_env do
|
@spec load_and_update_env([ConfigDB.t()]) :: :ok | false
|
||||||
if Pleroma.Config.get([:instance, :dynamic_configuration]) and
|
def load_and_update_env(deleted \\ []) do
|
||||||
Ecto.Adapters.SQL.table_exists?(Pleroma.Repo, "config") do
|
with true <- Pleroma.Config.get(:configurable_from_database),
|
||||||
for_restart =
|
true <- Ecto.Adapters.SQL.table_exists?(Repo, "config"),
|
||||||
Pleroma.Repo.all(Config)
|
started_applications <- Application.started_applications() do
|
||||||
|> Enum.map(&update_env(&1))
|
|
||||||
|
|
||||||
# We need to restart applications for loaded settings take effect
|
# We need to restart applications for loaded settings take effect
|
||||||
for_restart
|
in_db = Repo.all(ConfigDB)
|
||||||
|> Enum.reject(&(&1 in [:pleroma, :ok]))
|
|
||||||
|> Enum.each(fn app ->
|
with_deleted = in_db ++ deleted
|
||||||
Application.stop(app)
|
|
||||||
:ok = Application.start(app)
|
with_deleted
|
||||||
end)
|
|> Enum.map(&merge_and_update(&1))
|
||||||
|
|> Enum.uniq()
|
||||||
|
# TODO: some problem with prometheus after restart!
|
||||||
|
|> Enum.reject(&(&1 in [:pleroma, nil, :prometheus]))
|
||||||
|
|> Enum.each(&restart(started_applications, &1))
|
||||||
|
|
||||||
|
:ok
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
defp update_env(setting) do
|
defp merge_and_update(setting) do
|
||||||
try do
|
try do
|
||||||
key =
|
key = ConfigDB.from_string(setting.key)
|
||||||
if String.starts_with?(setting.key, "Pleroma.") do
|
group = ConfigDB.from_string(setting.group)
|
||||||
"Elixir." <> setting.key
|
|
||||||
else
|
|
||||||
String.trim_leading(setting.key, ":")
|
|
||||||
end
|
|
||||||
|
|
||||||
group = String.to_existing_atom(setting.group)
|
default = Pleroma.Config.Holder.config(group, key)
|
||||||
|
merged_value = merge_value(setting, default, group, key)
|
||||||
|
|
||||||
Application.put_env(
|
:ok = update_env(group, key, merged_value)
|
||||||
group,
|
|
||||||
String.to_existing_atom(key),
|
|
||||||
Config.from_binary(setting.value)
|
|
||||||
)
|
|
||||||
|
|
||||||
|
if group != :logger do
|
||||||
group
|
group
|
||||||
rescue
|
else
|
||||||
e ->
|
# change logger configuration in runtime, without restart
|
||||||
require Logger
|
if Keyword.keyword?(merged_value) and
|
||||||
|
key not in [:compile_time_application, :backends, :compile_time_purge_matching] do
|
||||||
|
Logger.configure_backend(key, merged_value)
|
||||||
|
else
|
||||||
|
Logger.configure([{key, merged_value}])
|
||||||
|
end
|
||||||
|
|
||||||
Logger.warn(
|
nil
|
||||||
"updating env causes error, key: #{inspect(setting.key)}, error: #{inspect(e)}"
|
end
|
||||||
)
|
rescue
|
||||||
|
error ->
|
||||||
|
error_msg =
|
||||||
|
"updating env causes error, group: " <>
|
||||||
|
inspect(setting.group) <>
|
||||||
|
" key: " <>
|
||||||
|
inspect(setting.key) <>
|
||||||
|
" value: " <>
|
||||||
|
inspect(ConfigDB.from_binary(setting.value)) <> " error: " <> inspect(error)
|
||||||
|
|
||||||
|
Logger.warn(error_msg)
|
||||||
|
|
||||||
|
nil
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
defp merge_value(%{__meta__: %{state: :deleted}}, default, _group, _key), do: default
|
||||||
|
|
||||||
|
defp merge_value(setting, default, group, key) do
|
||||||
|
value = ConfigDB.from_binary(setting.value)
|
||||||
|
|
||||||
|
if can_be_merged?(default, value) do
|
||||||
|
ConfigDB.merge_group(group, key, default, value)
|
||||||
|
else
|
||||||
|
value
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
defp update_env(group, key, nil), do: Application.delete_env(group, key)
|
||||||
|
defp update_env(group, key, value), do: Application.put_env(group, key, value)
|
||||||
|
|
||||||
|
defp restart(started_applications, app) do
|
||||||
|
with {^app, _, _} <- List.keyfind(started_applications, app, 0),
|
||||||
|
:ok <- Application.stop(app) do
|
||||||
|
:ok = Application.start(app)
|
||||||
|
else
|
||||||
|
nil ->
|
||||||
|
Logger.warn("#{app} is not started.")
|
||||||
|
|
||||||
|
error ->
|
||||||
|
error
|
||||||
|
|> inspect()
|
||||||
|
|> Logger.warn()
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
defp can_be_merged?(val1, val2) when is_list(val1) and is_list(val2) do
|
||||||
|
Keyword.keyword?(val1) and Keyword.keyword?(val2)
|
||||||
|
end
|
||||||
|
|
||||||
|
defp can_be_merged?(_val1, _val2), do: false
|
||||||
end
|
end
|
||||||
|
|
|
@ -64,11 +64,13 @@ def mark_as_read(%User{} = user, %Conversation{} = conversation) do
|
||||||
end
|
end
|
||||||
|
|
||||||
def mark_as_read(participation) do
|
def mark_as_read(participation) do
|
||||||
participation
|
__MODULE__
|
||||||
|> read_cng(%{read: true})
|
|> where(id: ^participation.id)
|
||||||
|> Repo.update()
|
|> update(set: [read: true])
|
||||||
|
|> select([p], p)
|
||||||
|
|> Repo.update_all([])
|
||||||
|> case do
|
|> case do
|
||||||
{:ok, participation} ->
|
{1, [participation]} ->
|
||||||
participation = Repo.preload(participation, :user)
|
participation = Repo.preload(participation, :user)
|
||||||
User.set_unread_conversation_count(participation.user)
|
User.set_unread_conversation_count(participation.user)
|
||||||
{:ok, participation}
|
{:ok, participation}
|
||||||
|
|
|
@ -6,68 +6,116 @@ def process(implementation, descriptions) do
|
||||||
implementation.process(descriptions)
|
implementation.process(descriptions)
|
||||||
end
|
end
|
||||||
|
|
||||||
@spec uploaders_list() :: [module()]
|
@spec list_modules_in_dir(String.t(), String.t()) :: [module()]
|
||||||
def uploaders_list do
|
def list_modules_in_dir(dir, start) do
|
||||||
{:ok, modules} = :application.get_key(:pleroma, :modules)
|
with {:ok, files} <- File.ls(dir) do
|
||||||
|
files
|
||||||
Enum.filter(modules, fn module ->
|
|> Enum.filter(&String.ends_with?(&1, ".ex"))
|
||||||
name_as_list = Module.split(module)
|
|> Enum.map(fn filename ->
|
||||||
|
module = filename |> String.trim_trailing(".ex") |> Macro.camelize()
|
||||||
List.starts_with?(name_as_list, ["Pleroma", "Uploaders"]) and
|
String.to_existing_atom(start <> module)
|
||||||
List.last(name_as_list) != "Uploader"
|
|
||||||
end)
|
end)
|
||||||
end
|
end
|
||||||
|
|
||||||
@spec filters_list() :: [module()]
|
|
||||||
def filters_list do
|
|
||||||
{:ok, modules} = :application.get_key(:pleroma, :modules)
|
|
||||||
|
|
||||||
Enum.filter(modules, fn module ->
|
|
||||||
name_as_list = Module.split(module)
|
|
||||||
|
|
||||||
List.starts_with?(name_as_list, ["Pleroma", "Upload", "Filter"])
|
|
||||||
end)
|
|
||||||
end
|
end
|
||||||
|
|
||||||
@spec mrf_list() :: [module()]
|
@doc """
|
||||||
def mrf_list do
|
Converts:
|
||||||
{:ok, modules} = :application.get_key(:pleroma, :modules)
|
- atoms to strings with leading `:`
|
||||||
|
- module names to strings, without leading `Elixir.`
|
||||||
Enum.filter(modules, fn module ->
|
- add humanized labels to `keys` if label is not defined, e.g. `:instance` -> `Instance`
|
||||||
name_as_list = Module.split(module)
|
"""
|
||||||
|
@spec convert_to_strings([map()]) :: [map()]
|
||||||
List.starts_with?(name_as_list, ["Pleroma", "Web", "ActivityPub", "MRF"]) and
|
def convert_to_strings(descriptions) do
|
||||||
length(name_as_list) > 4
|
Enum.map(descriptions, &format_entity(&1))
|
||||||
end)
|
|
||||||
end
|
end
|
||||||
|
|
||||||
@spec richmedia_parsers() :: [module()]
|
defp format_entity(entity) do
|
||||||
def richmedia_parsers do
|
entity
|
||||||
{:ok, modules} = :application.get_key(:pleroma, :modules)
|
|> format_key()
|
||||||
|
|> Map.put(:group, atom_to_string(entity[:group]))
|
||||||
Enum.filter(modules, fn module ->
|
|> format_children()
|
||||||
name_as_list = Module.split(module)
|
|
||||||
|
|
||||||
List.starts_with?(name_as_list, ["Pleroma", "Web", "RichMedia", "Parsers"]) and
|
|
||||||
length(name_as_list) == 5
|
|
||||||
end)
|
|
||||||
end
|
end
|
||||||
|
|
||||||
|
defp format_key(%{key: key} = entity) do
|
||||||
|
entity
|
||||||
|
|> Map.put(:key, atom_to_string(key))
|
||||||
|
|> Map.put(:label, entity[:label] || humanize(key))
|
||||||
|
end
|
||||||
|
|
||||||
|
defp format_key(%{group: group} = entity) do
|
||||||
|
Map.put(entity, :label, entity[:label] || humanize(group))
|
||||||
|
end
|
||||||
|
|
||||||
|
defp format_key(entity), do: entity
|
||||||
|
|
||||||
|
defp format_children(%{children: children} = entity) do
|
||||||
|
Map.put(entity, :children, Enum.map(children, &format_child(&1)))
|
||||||
|
end
|
||||||
|
|
||||||
|
defp format_children(entity), do: entity
|
||||||
|
|
||||||
|
defp format_child(%{suggestions: suggestions} = entity) do
|
||||||
|
entity
|
||||||
|
|> Map.put(:suggestions, format_suggestions(suggestions))
|
||||||
|
|> format_key()
|
||||||
|
|> format_group()
|
||||||
|
|> format_children()
|
||||||
|
end
|
||||||
|
|
||||||
|
defp format_child(entity) do
|
||||||
|
entity
|
||||||
|
|> format_key()
|
||||||
|
|> format_group()
|
||||||
|
|> format_children()
|
||||||
|
end
|
||||||
|
|
||||||
|
defp format_group(%{group: group} = entity) do
|
||||||
|
Map.put(entity, :group, format_suggestion(group))
|
||||||
|
end
|
||||||
|
|
||||||
|
defp format_group(entity), do: entity
|
||||||
|
|
||||||
|
defp atom_to_string(entity) when is_binary(entity), do: entity
|
||||||
|
|
||||||
|
defp atom_to_string(entity) when is_atom(entity), do: inspect(entity)
|
||||||
|
|
||||||
|
defp humanize(entity) do
|
||||||
|
string = inspect(entity)
|
||||||
|
|
||||||
|
if String.starts_with?(string, ":"),
|
||||||
|
do: Phoenix.Naming.humanize(entity),
|
||||||
|
else: string
|
||||||
|
end
|
||||||
|
|
||||||
|
defp format_suggestions([]), do: []
|
||||||
|
|
||||||
|
defp format_suggestions([suggestion | tail]) do
|
||||||
|
[format_suggestion(suggestion) | format_suggestions(tail)]
|
||||||
|
end
|
||||||
|
|
||||||
|
defp format_suggestion(entity) when is_atom(entity) do
|
||||||
|
atom_to_string(entity)
|
||||||
|
end
|
||||||
|
|
||||||
|
defp format_suggestion([head | tail] = entity) when is_list(entity) do
|
||||||
|
[format_suggestion(head) | format_suggestions(tail)]
|
||||||
|
end
|
||||||
|
|
||||||
|
defp format_suggestion(entity) when is_tuple(entity) do
|
||||||
|
format_suggestions(Tuple.to_list(entity)) |> List.to_tuple()
|
||||||
|
end
|
||||||
|
|
||||||
|
defp format_suggestion(entity), do: entity
|
||||||
end
|
end
|
||||||
|
|
||||||
defimpl Jason.Encoder, for: Tuple do
|
defimpl Jason.Encoder, for: Tuple do
|
||||||
def encode(tuple, opts) do
|
def encode(tuple, opts), do: Jason.Encode.list(Tuple.to_list(tuple), opts)
|
||||||
Jason.Encode.list(Tuple.to_list(tuple), opts)
|
|
||||||
end
|
|
||||||
end
|
end
|
||||||
|
|
||||||
defimpl Jason.Encoder, for: [Regex, Function] do
|
defimpl Jason.Encoder, for: [Regex, Function] do
|
||||||
def encode(term, opts) do
|
def encode(term, opts), do: Jason.Encode.string(inspect(term), opts)
|
||||||
Jason.Encode.string(inspect(term), opts)
|
|
||||||
end
|
|
||||||
end
|
end
|
||||||
|
|
||||||
defimpl String.Chars, for: Regex do
|
defimpl String.Chars, for: Regex do
|
||||||
def to_string(term) do
|
def to_string(term), do: inspect(term)
|
||||||
inspect(term)
|
|
||||||
end
|
|
||||||
end
|
end
|
||||||
|
|
|
@ -3,18 +3,22 @@ defmodule Pleroma.Docs.JSON do
|
||||||
|
|
||||||
@spec process(keyword()) :: {:ok, String.t()}
|
@spec process(keyword()) :: {:ok, String.t()}
|
||||||
def process(descriptions) do
|
def process(descriptions) do
|
||||||
config_path = "docs/generate_config.json"
|
with path <- "docs/generated_config.json",
|
||||||
|
{:ok, file} <- File.open(path, [:write, :utf8]),
|
||||||
with {:ok, file} <- File.open(config_path, [:write, :utf8]),
|
formatted_descriptions <-
|
||||||
json <- generate_json(descriptions),
|
Pleroma.Docs.Generator.convert_to_strings(descriptions),
|
||||||
|
json <- Jason.encode!(formatted_descriptions),
|
||||||
:ok <- IO.write(file, json),
|
:ok <- IO.write(file, json),
|
||||||
:ok <- File.close(file) do
|
:ok <- File.close(file) do
|
||||||
{:ok, config_path}
|
{:ok, path}
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
@spec generate_json([keyword()]) :: String.t()
|
def compile do
|
||||||
def generate_json(descriptions) do
|
with config <- Pleroma.Config.Loader.load("config/description.exs") do
|
||||||
Jason.encode!(descriptions)
|
config[:pleroma][:config_description]
|
||||||
|
|> Pleroma.Docs.Generator.convert_to_strings()
|
||||||
|
|> Jason.encode!()
|
||||||
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -3,6 +3,23 @@
|
||||||
# SPDX-License-Identifier: AGPL-3.0-only
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
defmodule Pleroma.HTML do
|
defmodule Pleroma.HTML do
|
||||||
|
# Scrubbers are compiled on boot so they can be configured in OTP releases
|
||||||
|
# @on_load :compile_scrubbers
|
||||||
|
|
||||||
|
def compile_scrubbers do
|
||||||
|
dir = Path.join(:code.priv_dir(:pleroma), "scrubbers")
|
||||||
|
|
||||||
|
dir
|
||||||
|
|> Pleroma.Utils.compile_dir()
|
||||||
|
|> case do
|
||||||
|
{:error, _errors, _warnings} ->
|
||||||
|
raise "Compiling scrubbers failed"
|
||||||
|
|
||||||
|
{:ok, _modules, _warnings} ->
|
||||||
|
:ok
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
defp get_scrubbers(scrubber) when is_atom(scrubber), do: [scrubber]
|
defp get_scrubbers(scrubber) when is_atom(scrubber), do: [scrubber]
|
||||||
defp get_scrubbers(scrubbers) when is_list(scrubbers), do: scrubbers
|
defp get_scrubbers(scrubbers) when is_list(scrubbers), do: scrubbers
|
||||||
defp get_scrubbers(_), do: [Pleroma.HTML.Scrubber.Default]
|
defp get_scrubbers(_), do: [Pleroma.HTML.Scrubber.Default]
|
||||||
|
@ -99,216 +116,3 @@ def extract_first_external_url(object, content) do
|
||||||
end)
|
end)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
defmodule Pleroma.HTML.Scrubber.TwitterText do
|
|
||||||
@moduledoc """
|
|
||||||
An HTML scrubbing policy which limits to twitter-style text. Only
|
|
||||||
paragraphs, breaks and links are allowed through the filter.
|
|
||||||
"""
|
|
||||||
|
|
||||||
@valid_schemes Pleroma.Config.get([:uri_schemes, :valid_schemes], [])
|
|
||||||
|
|
||||||
require FastSanitize.Sanitizer.Meta
|
|
||||||
alias FastSanitize.Sanitizer.Meta
|
|
||||||
|
|
||||||
Meta.strip_comments()
|
|
||||||
|
|
||||||
# links
|
|
||||||
Meta.allow_tag_with_uri_attributes(:a, ["href", "data-user", "data-tag"], @valid_schemes)
|
|
||||||
|
|
||||||
Meta.allow_tag_with_this_attribute_values(:a, "class", [
|
|
||||||
"hashtag",
|
|
||||||
"u-url",
|
|
||||||
"mention",
|
|
||||||
"u-url mention",
|
|
||||||
"mention u-url"
|
|
||||||
])
|
|
||||||
|
|
||||||
Meta.allow_tag_with_this_attribute_values(:a, "rel", [
|
|
||||||
"tag",
|
|
||||||
"nofollow",
|
|
||||||
"noopener",
|
|
||||||
"noreferrer"
|
|
||||||
])
|
|
||||||
|
|
||||||
Meta.allow_tag_with_these_attributes(:a, ["name", "title"])
|
|
||||||
|
|
||||||
# paragraphs and linebreaks
|
|
||||||
Meta.allow_tag_with_these_attributes(:br, [])
|
|
||||||
Meta.allow_tag_with_these_attributes(:p, [])
|
|
||||||
|
|
||||||
# microformats
|
|
||||||
Meta.allow_tag_with_this_attribute_values(:span, "class", ["h-card"])
|
|
||||||
Meta.allow_tag_with_these_attributes(:span, [])
|
|
||||||
|
|
||||||
# allow inline images for custom emoji
|
|
||||||
if Pleroma.Config.get([:markup, :allow_inline_images]) do
|
|
||||||
# restrict img tags to http/https only, because of MediaProxy.
|
|
||||||
Meta.allow_tag_with_uri_attributes(:img, ["src"], ["http", "https"])
|
|
||||||
|
|
||||||
Meta.allow_tag_with_these_attributes(:img, [
|
|
||||||
"width",
|
|
||||||
"height",
|
|
||||||
"class",
|
|
||||||
"title",
|
|
||||||
"alt"
|
|
||||||
])
|
|
||||||
end
|
|
||||||
|
|
||||||
Meta.strip_everything_not_covered()
|
|
||||||
end
|
|
||||||
|
|
||||||
defmodule Pleroma.HTML.Scrubber.Default do
|
|
||||||
@doc "The default HTML scrubbing policy: no "
|
|
||||||
|
|
||||||
require FastSanitize.Sanitizer.Meta
|
|
||||||
alias FastSanitize.Sanitizer.Meta
|
|
||||||
|
|
||||||
# credo:disable-for-previous-line
|
|
||||||
# No idea how to fix this one…
|
|
||||||
|
|
||||||
@valid_schemes Pleroma.Config.get([:uri_schemes, :valid_schemes], [])
|
|
||||||
|
|
||||||
Meta.strip_comments()
|
|
||||||
|
|
||||||
Meta.allow_tag_with_uri_attributes(:a, ["href", "data-user", "data-tag"], @valid_schemes)
|
|
||||||
|
|
||||||
Meta.allow_tag_with_this_attribute_values(:a, "class", [
|
|
||||||
"hashtag",
|
|
||||||
"u-url",
|
|
||||||
"mention",
|
|
||||||
"u-url mention",
|
|
||||||
"mention u-url"
|
|
||||||
])
|
|
||||||
|
|
||||||
Meta.allow_tag_with_this_attribute_values(:a, "rel", [
|
|
||||||
"tag",
|
|
||||||
"nofollow",
|
|
||||||
"noopener",
|
|
||||||
"noreferrer",
|
|
||||||
"ugc"
|
|
||||||
])
|
|
||||||
|
|
||||||
Meta.allow_tag_with_these_attributes(:a, ["name", "title"])
|
|
||||||
|
|
||||||
Meta.allow_tag_with_these_attributes(:abbr, ["title"])
|
|
||||||
|
|
||||||
Meta.allow_tag_with_these_attributes(:b, [])
|
|
||||||
Meta.allow_tag_with_these_attributes(:blockquote, [])
|
|
||||||
Meta.allow_tag_with_these_attributes(:br, [])
|
|
||||||
Meta.allow_tag_with_these_attributes(:code, [])
|
|
||||||
Meta.allow_tag_with_these_attributes(:del, [])
|
|
||||||
Meta.allow_tag_with_these_attributes(:em, [])
|
|
||||||
Meta.allow_tag_with_these_attributes(:i, [])
|
|
||||||
Meta.allow_tag_with_these_attributes(:li, [])
|
|
||||||
Meta.allow_tag_with_these_attributes(:ol, [])
|
|
||||||
Meta.allow_tag_with_these_attributes(:p, [])
|
|
||||||
Meta.allow_tag_with_these_attributes(:pre, [])
|
|
||||||
Meta.allow_tag_with_these_attributes(:strong, [])
|
|
||||||
Meta.allow_tag_with_these_attributes(:sub, [])
|
|
||||||
Meta.allow_tag_with_these_attributes(:sup, [])
|
|
||||||
Meta.allow_tag_with_these_attributes(:u, [])
|
|
||||||
Meta.allow_tag_with_these_attributes(:ul, [])
|
|
||||||
|
|
||||||
Meta.allow_tag_with_this_attribute_values(:span, "class", ["h-card"])
|
|
||||||
Meta.allow_tag_with_these_attributes(:span, [])
|
|
||||||
|
|
||||||
@allow_inline_images Pleroma.Config.get([:markup, :allow_inline_images])
|
|
||||||
|
|
||||||
if @allow_inline_images do
|
|
||||||
# restrict img tags to http/https only, because of MediaProxy.
|
|
||||||
Meta.allow_tag_with_uri_attributes(:img, ["src"], ["http", "https"])
|
|
||||||
|
|
||||||
Meta.allow_tag_with_these_attributes(:img, [
|
|
||||||
"width",
|
|
||||||
"height",
|
|
||||||
"class",
|
|
||||||
"title",
|
|
||||||
"alt"
|
|
||||||
])
|
|
||||||
end
|
|
||||||
|
|
||||||
if Pleroma.Config.get([:markup, :allow_tables]) do
|
|
||||||
Meta.allow_tag_with_these_attributes(:table, [])
|
|
||||||
Meta.allow_tag_with_these_attributes(:tbody, [])
|
|
||||||
Meta.allow_tag_with_these_attributes(:td, [])
|
|
||||||
Meta.allow_tag_with_these_attributes(:th, [])
|
|
||||||
Meta.allow_tag_with_these_attributes(:thead, [])
|
|
||||||
Meta.allow_tag_with_these_attributes(:tr, [])
|
|
||||||
end
|
|
||||||
|
|
||||||
if Pleroma.Config.get([:markup, :allow_headings]) do
|
|
||||||
Meta.allow_tag_with_these_attributes(:h1, [])
|
|
||||||
Meta.allow_tag_with_these_attributes(:h2, [])
|
|
||||||
Meta.allow_tag_with_these_attributes(:h3, [])
|
|
||||||
Meta.allow_tag_with_these_attributes(:h4, [])
|
|
||||||
Meta.allow_tag_with_these_attributes(:h5, [])
|
|
||||||
end
|
|
||||||
|
|
||||||
if Pleroma.Config.get([:markup, :allow_fonts]) do
|
|
||||||
Meta.allow_tag_with_these_attributes(:font, ["face"])
|
|
||||||
end
|
|
||||||
|
|
||||||
Meta.strip_everything_not_covered()
|
|
||||||
end
|
|
||||||
|
|
||||||
defmodule Pleroma.HTML.Transform.MediaProxy do
|
|
||||||
@moduledoc "Transforms inline image URIs to use MediaProxy."
|
|
||||||
|
|
||||||
alias Pleroma.Web.MediaProxy
|
|
||||||
|
|
||||||
def before_scrub(html), do: html
|
|
||||||
|
|
||||||
def scrub_attribute(:img, {"src", "http" <> target}) do
|
|
||||||
media_url =
|
|
||||||
("http" <> target)
|
|
||||||
|> MediaProxy.url()
|
|
||||||
|
|
||||||
{"src", media_url}
|
|
||||||
end
|
|
||||||
|
|
||||||
def scrub_attribute(_tag, attribute), do: attribute
|
|
||||||
|
|
||||||
def scrub({:img, attributes, children}) do
|
|
||||||
attributes =
|
|
||||||
attributes
|
|
||||||
|> Enum.map(fn attr -> scrub_attribute(:img, attr) end)
|
|
||||||
|> Enum.reject(&is_nil(&1))
|
|
||||||
|
|
||||||
{:img, attributes, children}
|
|
||||||
end
|
|
||||||
|
|
||||||
def scrub({:comment, _text, _children}), do: ""
|
|
||||||
|
|
||||||
def scrub({tag, attributes, children}), do: {tag, attributes, children}
|
|
||||||
def scrub({_tag, children}), do: children
|
|
||||||
def scrub(text), do: text
|
|
||||||
end
|
|
||||||
|
|
||||||
defmodule Pleroma.HTML.Scrubber.LinksOnly do
|
|
||||||
@moduledoc """
|
|
||||||
An HTML scrubbing policy which limits to links only.
|
|
||||||
"""
|
|
||||||
|
|
||||||
@valid_schemes Pleroma.Config.get([:uri_schemes, :valid_schemes], [])
|
|
||||||
|
|
||||||
require FastSanitize.Sanitizer.Meta
|
|
||||||
alias FastSanitize.Sanitizer.Meta
|
|
||||||
|
|
||||||
Meta.strip_comments()
|
|
||||||
|
|
||||||
# links
|
|
||||||
Meta.allow_tag_with_uri_attributes(:a, ["href"], @valid_schemes)
|
|
||||||
|
|
||||||
Meta.allow_tag_with_this_attribute_values(:a, "rel", [
|
|
||||||
"tag",
|
|
||||||
"nofollow",
|
|
||||||
"noopener",
|
|
||||||
"noreferrer",
|
|
||||||
"me",
|
|
||||||
"ugc"
|
|
||||||
])
|
|
||||||
|
|
||||||
Meta.allow_tag_with_these_attributes(:a, ["name", "title"])
|
|
||||||
Meta.strip_everything_not_covered()
|
|
||||||
end
|
|
||||||
|
|
|
@ -128,17 +128,35 @@ def insert_log(%{
|
||||||
{:ok, ModerationLog} | {:error, any}
|
{:ok, ModerationLog} | {:error, any}
|
||||||
def insert_log(%{
|
def insert_log(%{
|
||||||
actor: %User{} = actor,
|
actor: %User{} = actor,
|
||||||
action: "report_response",
|
action: "report_note",
|
||||||
subject: %Activity{} = subject,
|
subject: %Activity{} = subject,
|
||||||
text: text
|
text: text
|
||||||
}) do
|
}) do
|
||||||
%ModerationLog{
|
%ModerationLog{
|
||||||
data: %{
|
data: %{
|
||||||
"actor" => user_to_map(actor),
|
"actor" => user_to_map(actor),
|
||||||
"action" => "report_response",
|
"action" => "report_note",
|
||||||
"subject" => report_to_map(subject),
|
"subject" => report_to_map(subject),
|
||||||
"text" => text,
|
"text" => text
|
||||||
"message" => ""
|
}
|
||||||
|
}
|
||||||
|
|> insert_log_entry_with_message()
|
||||||
|
end
|
||||||
|
|
||||||
|
@spec insert_log(%{actor: User, subject: Activity, action: String.t(), text: String.t()}) ::
|
||||||
|
{:ok, ModerationLog} | {:error, any}
|
||||||
|
def insert_log(%{
|
||||||
|
actor: %User{} = actor,
|
||||||
|
action: "report_note_delete",
|
||||||
|
subject: %Activity{} = subject,
|
||||||
|
text: text
|
||||||
|
}) do
|
||||||
|
%ModerationLog{
|
||||||
|
data: %{
|
||||||
|
"actor" => user_to_map(actor),
|
||||||
|
"action" => "report_note_delete",
|
||||||
|
"subject" => report_to_map(subject),
|
||||||
|
"text" => text
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|> insert_log_entry_with_message()
|
|> insert_log_entry_with_message()
|
||||||
|
@ -556,12 +574,24 @@ def get_log_entry_message(%ModerationLog{
|
||||||
def get_log_entry_message(%ModerationLog{
|
def get_log_entry_message(%ModerationLog{
|
||||||
data: %{
|
data: %{
|
||||||
"actor" => %{"nickname" => actor_nickname},
|
"actor" => %{"nickname" => actor_nickname},
|
||||||
"action" => "report_response",
|
"action" => "report_note",
|
||||||
"subject" => %{"id" => subject_id, "type" => "report"},
|
"subject" => %{"id" => subject_id, "type" => "report"},
|
||||||
"text" => text
|
"text" => text
|
||||||
}
|
}
|
||||||
}) do
|
}) do
|
||||||
"@#{actor_nickname} responded with '#{text}' to report ##{subject_id}"
|
"@#{actor_nickname} added note '#{text}' to report ##{subject_id}"
|
||||||
|
end
|
||||||
|
|
||||||
|
@spec get_log_entry_message(ModerationLog) :: String.t()
|
||||||
|
def get_log_entry_message(%ModerationLog{
|
||||||
|
data: %{
|
||||||
|
"actor" => %{"nickname" => actor_nickname},
|
||||||
|
"action" => "report_note_delete",
|
||||||
|
"subject" => %{"id" => subject_id, "type" => "report"},
|
||||||
|
"text" => text
|
||||||
|
}
|
||||||
|
}) do
|
||||||
|
"@#{actor_nickname} deleted note '#{text}' from report ##{subject_id}"
|
||||||
end
|
end
|
||||||
|
|
||||||
@spec get_log_entry_message(ModerationLog) :: String.t()
|
@spec get_log_entry_message(ModerationLog) :: String.t()
|
||||||
|
|
|
@ -142,10 +142,28 @@ defp exclude_visibility(query, %{exclude_visibilities: visibility})
|
||||||
when is_list(visibility) do
|
when is_list(visibility) do
|
||||||
if Enum.all?(visibility, &(&1 in @valid_visibilities)) do
|
if Enum.all?(visibility, &(&1 in @valid_visibilities)) do
|
||||||
query
|
query
|
||||||
|
|> join(:left, [n, a], mutated_activity in Pleroma.Activity,
|
||||||
|
on:
|
||||||
|
fragment("?->>'context'", a.data) ==
|
||||||
|
fragment("?->>'context'", mutated_activity.data) and
|
||||||
|
fragment("(?->>'type' = 'Like' or ?->>'type' = 'Announce')", a.data, a.data) and
|
||||||
|
fragment("?->>'type'", mutated_activity.data) == "Create",
|
||||||
|
as: :mutated_activity
|
||||||
|
)
|
||||||
|> where(
|
|> where(
|
||||||
[n, a],
|
[n, a, mutated_activity: mutated_activity],
|
||||||
not fragment(
|
not fragment(
|
||||||
"activity_visibility(?, ?, ?) = ANY (?)",
|
"""
|
||||||
|
CASE WHEN (?->>'type') = 'Like' or (?->>'type') = 'Announce'
|
||||||
|
THEN (activity_visibility(?, ?, ?) = ANY (?))
|
||||||
|
ELSE (activity_visibility(?, ?, ?) = ANY (?)) END
|
||||||
|
""",
|
||||||
|
a.data,
|
||||||
|
a.data,
|
||||||
|
mutated_activity.actor,
|
||||||
|
mutated_activity.recipients,
|
||||||
|
mutated_activity.data,
|
||||||
|
^visibility,
|
||||||
a.actor,
|
a.actor,
|
||||||
a.recipients,
|
a.recipients,
|
||||||
a.data,
|
a.data,
|
||||||
|
@ -160,17 +178,7 @@ defp exclude_visibility(query, %{exclude_visibilities: visibility})
|
||||||
|
|
||||||
defp exclude_visibility(query, %{exclude_visibilities: visibility})
|
defp exclude_visibility(query, %{exclude_visibilities: visibility})
|
||||||
when visibility in @valid_visibilities do
|
when visibility in @valid_visibilities do
|
||||||
query
|
exclude_visibility(query, [visibility])
|
||||||
|> where(
|
|
||||||
[n, a],
|
|
||||||
not fragment(
|
|
||||||
"activity_visibility(?, ?, ?) = (?)",
|
|
||||||
a.actor,
|
|
||||||
a.recipients,
|
|
||||||
a.data,
|
|
||||||
^visibility
|
|
||||||
)
|
|
||||||
)
|
|
||||||
end
|
end
|
||||||
|
|
||||||
defp exclude_visibility(query, %{exclude_visibilities: visibility})
|
defp exclude_visibility(query, %{exclude_visibilities: visibility})
|
||||||
|
@ -315,7 +323,7 @@ def create_notifications(%Activity{data: %{"to" => _, "type" => "Create"}} = act
|
||||||
end
|
end
|
||||||
|
|
||||||
def create_notifications(%Activity{data: %{"type" => type}} = activity)
|
def create_notifications(%Activity{data: %{"type" => type}} = activity)
|
||||||
when type in ["Like", "Announce", "Follow", "Move"] do
|
when type in ["Like", "Announce", "Follow", "Move", "EmojiReaction"] do
|
||||||
notifications =
|
notifications =
|
||||||
activity
|
activity
|
||||||
|> get_notified_from_activity()
|
|> get_notified_from_activity()
|
||||||
|
@ -346,7 +354,7 @@ def create_notification(%Activity{} = activity, %User{} = user) do
|
||||||
def get_notified_from_activity(activity, local_only \\ true)
|
def get_notified_from_activity(activity, local_only \\ true)
|
||||||
|
|
||||||
def get_notified_from_activity(%Activity{data: %{"type" => type}} = activity, local_only)
|
def get_notified_from_activity(%Activity{data: %{"type" => type}} = activity, local_only)
|
||||||
when type in ["Create", "Like", "Announce", "Follow", "Move"] do
|
when type in ["Create", "Like", "Announce", "Follow", "Move", "EmojiReaction"] do
|
||||||
[]
|
[]
|
||||||
|> Utils.maybe_notify_to_recipients(activity)
|
|> Utils.maybe_notify_to_recipients(activity)
|
||||||
|> Utils.maybe_notify_mentioned_recipients(activity)
|
|> Utils.maybe_notify_mentioned_recipients(activity)
|
||||||
|
@ -379,7 +387,7 @@ def skip?(:self, activity, user) do
|
||||||
def skip?(
|
def skip?(
|
||||||
:followers,
|
:followers,
|
||||||
activity,
|
activity,
|
||||||
%{notification_settings: %{"followers" => false}} = user
|
%{notification_settings: %{followers: false}} = user
|
||||||
) do
|
) do
|
||||||
actor = activity.data["actor"]
|
actor = activity.data["actor"]
|
||||||
follower = User.get_cached_by_ap_id(actor)
|
follower = User.get_cached_by_ap_id(actor)
|
||||||
|
@ -389,14 +397,14 @@ def skip?(
|
||||||
def skip?(
|
def skip?(
|
||||||
:non_followers,
|
:non_followers,
|
||||||
activity,
|
activity,
|
||||||
%{notification_settings: %{"non_followers" => false}} = user
|
%{notification_settings: %{non_followers: false}} = user
|
||||||
) do
|
) do
|
||||||
actor = activity.data["actor"]
|
actor = activity.data["actor"]
|
||||||
follower = User.get_cached_by_ap_id(actor)
|
follower = User.get_cached_by_ap_id(actor)
|
||||||
!User.following?(follower, user)
|
!User.following?(follower, user)
|
||||||
end
|
end
|
||||||
|
|
||||||
def skip?(:follows, activity, %{notification_settings: %{"follows" => false}} = user) do
|
def skip?(:follows, activity, %{notification_settings: %{follows: false}} = user) do
|
||||||
actor = activity.data["actor"]
|
actor = activity.data["actor"]
|
||||||
followed = User.get_cached_by_ap_id(actor)
|
followed = User.get_cached_by_ap_id(actor)
|
||||||
User.following?(user, followed)
|
User.following?(user, followed)
|
||||||
|
@ -405,7 +413,7 @@ def skip?(:follows, activity, %{notification_settings: %{"follows" => false}} =
|
||||||
def skip?(
|
def skip?(
|
||||||
:non_follows,
|
:non_follows,
|
||||||
activity,
|
activity,
|
||||||
%{notification_settings: %{"non_follows" => false}} = user
|
%{notification_settings: %{non_follows: false}} = user
|
||||||
) do
|
) do
|
||||||
actor = activity.data["actor"]
|
actor = activity.data["actor"]
|
||||||
followed = User.get_cached_by_ap_id(actor)
|
followed = User.get_cached_by_ap_id(actor)
|
||||||
|
|
|
@ -17,12 +17,33 @@ defmodule Pleroma.Object do
|
||||||
|
|
||||||
require Logger
|
require Logger
|
||||||
|
|
||||||
|
@type t() :: %__MODULE__{}
|
||||||
|
|
||||||
|
@derive {Jason.Encoder, only: [:data]}
|
||||||
|
|
||||||
schema "objects" do
|
schema "objects" do
|
||||||
field(:data, :map)
|
field(:data, :map)
|
||||||
|
|
||||||
timestamps()
|
timestamps()
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def with_joined_activity(query, activity_type \\ "Create", join_type \\ :inner) do
|
||||||
|
object_position = Map.get(query.aliases, :object, 0)
|
||||||
|
|
||||||
|
join(query, join_type, [{object, object_position}], a in Activity,
|
||||||
|
on:
|
||||||
|
fragment(
|
||||||
|
"COALESCE(?->'object'->>'id', ?->>'object') = (? ->> 'id') AND (?->>'type' = ?) ",
|
||||||
|
a.data,
|
||||||
|
a.data,
|
||||||
|
object.data,
|
||||||
|
a.data,
|
||||||
|
^activity_type
|
||||||
|
),
|
||||||
|
as: :object_activity
|
||||||
|
)
|
||||||
|
end
|
||||||
|
|
||||||
def create(data) do
|
def create(data) do
|
||||||
Object.change(%Object{}, %{data: data})
|
Object.change(%Object{}, %{data: data})
|
||||||
|> Repo.insert()
|
|> Repo.insert()
|
||||||
|
@ -62,6 +83,20 @@ def get_by_ap_id(ap_id) do
|
||||||
Repo.one(from(object in Object, where: fragment("(?)->>'id' = ?", object.data, ^ap_id)))
|
Repo.one(from(object in Object, where: fragment("(?)->>'id' = ?", object.data, ^ap_id)))
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@doc """
|
||||||
|
Get a single attachment by it's name and href
|
||||||
|
"""
|
||||||
|
@spec get_attachment_by_name_and_href(String.t(), String.t()) :: Object.t() | nil
|
||||||
|
def get_attachment_by_name_and_href(name, href) do
|
||||||
|
query =
|
||||||
|
from(o in Object,
|
||||||
|
where: fragment("(?)->>'name' = ?", o.data, ^name),
|
||||||
|
where: fragment("(?)->>'href' = ?", o.data, ^href)
|
||||||
|
)
|
||||||
|
|
||||||
|
Repo.one(query)
|
||||||
|
end
|
||||||
|
|
||||||
defp warn_on_no_object_preloaded(ap_id) do
|
defp warn_on_no_object_preloaded(ap_id) do
|
||||||
"Object.normalize() called without preloaded object (#{inspect(ap_id)}). Consider preloading the object"
|
"Object.normalize() called without preloaded object (#{inspect(ap_id)}). Consider preloading the object"
|
||||||
|> Logger.debug()
|
|> Logger.debug()
|
||||||
|
@ -149,7 +184,11 @@ def delete(%Object{data: %{"id" => id}} = object) do
|
||||||
with {:ok, _obj} = swap_object_with_tombstone(object),
|
with {:ok, _obj} = swap_object_with_tombstone(object),
|
||||||
deleted_activity = Activity.delete_all_by_object_ap_id(id),
|
deleted_activity = Activity.delete_all_by_object_ap_id(id),
|
||||||
{:ok, true} <- Cachex.del(:object_cache, "object:#{id}"),
|
{:ok, true} <- Cachex.del(:object_cache, "object:#{id}"),
|
||||||
{:ok, _} <- Cachex.del(:web_resp_cache, URI.parse(id).path) do
|
{:ok, _} <- Cachex.del(:web_resp_cache, URI.parse(id).path),
|
||||||
|
{:ok, _} <-
|
||||||
|
Pleroma.Workers.AttachmentsCleanupWorker.enqueue("cleanup_attachments", %{
|
||||||
|
"object" => object
|
||||||
|
}) do
|
||||||
{:ok, object, deleted_activity}
|
{:ok, object, deleted_activity}
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -117,6 +117,9 @@ def fetch_object_from_id!(id, options \\ []) do
|
||||||
{:error, %Tesla.Mock.Error{}} ->
|
{:error, %Tesla.Mock.Error{}} ->
|
||||||
nil
|
nil
|
||||||
|
|
||||||
|
{:error, "Object has been deleted"} ->
|
||||||
|
nil
|
||||||
|
|
||||||
e ->
|
e ->
|
||||||
Logger.error("Error while fetching #{id}: #{inspect(e)}")
|
Logger.error("Error while fetching #{id}: #{inspect(e)}")
|
||||||
nil
|
nil
|
||||||
|
@ -154,7 +157,7 @@ defp maybe_date_fetch(headers, date) do
|
||||||
end
|
end
|
||||||
|
|
||||||
def fetch_and_contain_remote_object_from_id(id) when is_binary(id) do
|
def fetch_and_contain_remote_object_from_id(id) when is_binary(id) do
|
||||||
Logger.info("Fetching object #{id} via AP")
|
Logger.debug("Fetching object #{id} via AP")
|
||||||
|
|
||||||
date = Pleroma.Signature.signed_date()
|
date = Pleroma.Signature.signed_date()
|
||||||
|
|
||||||
|
|
|
@ -13,60 +13,66 @@ defmodule Pleroma.Pagination do
|
||||||
alias Pleroma.Repo
|
alias Pleroma.Repo
|
||||||
|
|
||||||
@default_limit 20
|
@default_limit 20
|
||||||
|
@page_keys ["max_id", "min_id", "limit", "since_id", "order"]
|
||||||
|
|
||||||
def fetch_paginated(query, params, type \\ :keyset)
|
def page_keys, do: @page_keys
|
||||||
|
|
||||||
def fetch_paginated(query, %{"total" => true} = params, :keyset) do
|
def fetch_paginated(query, params, type \\ :keyset, table_binding \\ nil)
|
||||||
|
|
||||||
|
def fetch_paginated(query, %{"total" => true} = params, :keyset, table_binding) do
|
||||||
total = Repo.aggregate(query, :count, :id)
|
total = Repo.aggregate(query, :count, :id)
|
||||||
|
|
||||||
%{
|
%{
|
||||||
total: total,
|
total: total,
|
||||||
items: fetch_paginated(query, Map.drop(params, ["total"]), :keyset)
|
items: fetch_paginated(query, Map.drop(params, ["total"]), :keyset, table_binding)
|
||||||
}
|
}
|
||||||
end
|
end
|
||||||
|
|
||||||
def fetch_paginated(query, params, :keyset) do
|
def fetch_paginated(query, params, :keyset, table_binding) do
|
||||||
options = cast_params(params)
|
options = cast_params(params)
|
||||||
|
|
||||||
query
|
query
|
||||||
|> paginate(options, :keyset)
|
|> paginate(options, :keyset, table_binding)
|
||||||
|> Repo.all()
|
|> Repo.all()
|
||||||
|> enforce_order(options)
|
|> enforce_order(options)
|
||||||
end
|
end
|
||||||
|
|
||||||
def fetch_paginated(query, %{"total" => true} = params, :offset) do
|
def fetch_paginated(query, %{"total" => true} = params, :offset, table_binding) do
|
||||||
total = Repo.aggregate(query, :count, :id)
|
total =
|
||||||
|
query
|
||||||
|
|> Ecto.Query.exclude(:left_join)
|
||||||
|
|> Repo.aggregate(:count, :id)
|
||||||
|
|
||||||
%{
|
%{
|
||||||
total: total,
|
total: total,
|
||||||
items: fetch_paginated(query, Map.drop(params, ["total"]), :offset)
|
items: fetch_paginated(query, Map.drop(params, ["total"]), :offset, table_binding)
|
||||||
}
|
}
|
||||||
end
|
end
|
||||||
|
|
||||||
def fetch_paginated(query, params, :offset) do
|
def fetch_paginated(query, params, :offset, table_binding) do
|
||||||
options = cast_params(params)
|
options = cast_params(params)
|
||||||
|
|
||||||
query
|
query
|
||||||
|> paginate(options, :offset)
|
|> paginate(options, :offset, table_binding)
|
||||||
|> Repo.all()
|
|> Repo.all()
|
||||||
end
|
end
|
||||||
|
|
||||||
def paginate(query, options, method \\ :keyset)
|
def paginate(query, options, method \\ :keyset, table_binding \\ nil)
|
||||||
|
|
||||||
def paginate(query, options, :keyset) do
|
def paginate(query, options, :keyset, table_binding) do
|
||||||
query
|
query
|
||||||
|> restrict(:min_id, options)
|
|> restrict(:min_id, options, table_binding)
|
||||||
|> restrict(:since_id, options)
|
|> restrict(:since_id, options, table_binding)
|
||||||
|> restrict(:max_id, options)
|
|> restrict(:max_id, options, table_binding)
|
||||||
|> restrict(:order, options)
|
|> restrict(:order, options, table_binding)
|
||||||
|> restrict(:limit, options)
|
|> restrict(:limit, options, table_binding)
|
||||||
end
|
end
|
||||||
|
|
||||||
def paginate(query, options, :offset) do
|
def paginate(query, options, :offset, table_binding) do
|
||||||
query
|
query
|
||||||
|> restrict(:order, options)
|
|> restrict(:order, options, table_binding)
|
||||||
|> restrict(:offset, options)
|
|> restrict(:offset, options, table_binding)
|
||||||
|> restrict(:limit, options)
|
|> restrict(:limit, options, table_binding)
|
||||||
end
|
end
|
||||||
|
|
||||||
defp cast_params(params) do
|
defp cast_params(params) do
|
||||||
|
@ -75,7 +81,8 @@ defp cast_params(params) do
|
||||||
since_id: :string,
|
since_id: :string,
|
||||||
max_id: :string,
|
max_id: :string,
|
||||||
offset: :integer,
|
offset: :integer,
|
||||||
limit: :integer
|
limit: :integer,
|
||||||
|
skip_order: :boolean
|
||||||
}
|
}
|
||||||
|
|
||||||
params =
|
params =
|
||||||
|
@ -88,38 +95,48 @@ defp cast_params(params) do
|
||||||
changeset.changes
|
changeset.changes
|
||||||
end
|
end
|
||||||
|
|
||||||
defp restrict(query, :min_id, %{min_id: min_id}) do
|
defp restrict(query, :min_id, %{min_id: min_id}, table_binding) do
|
||||||
where(query, [q], q.id > ^min_id)
|
where(query, [{q, table_position(query, table_binding)}], q.id > ^min_id)
|
||||||
end
|
end
|
||||||
|
|
||||||
defp restrict(query, :since_id, %{since_id: since_id}) do
|
defp restrict(query, :since_id, %{since_id: since_id}, table_binding) do
|
||||||
where(query, [q], q.id > ^since_id)
|
where(query, [{q, table_position(query, table_binding)}], q.id > ^since_id)
|
||||||
end
|
end
|
||||||
|
|
||||||
defp restrict(query, :max_id, %{max_id: max_id}) do
|
defp restrict(query, :max_id, %{max_id: max_id}, table_binding) do
|
||||||
where(query, [q], q.id < ^max_id)
|
where(query, [{q, table_position(query, table_binding)}], q.id < ^max_id)
|
||||||
end
|
end
|
||||||
|
|
||||||
defp restrict(query, :order, %{min_id: _}) do
|
defp restrict(query, :order, %{skip_order: true}, _), do: query
|
||||||
order_by(query, [u], fragment("? asc nulls last", u.id))
|
|
||||||
|
defp restrict(query, :order, %{min_id: _}, table_binding) do
|
||||||
|
order_by(
|
||||||
|
query,
|
||||||
|
[{u, table_position(query, table_binding)}],
|
||||||
|
fragment("? asc nulls last", u.id)
|
||||||
|
)
|
||||||
end
|
end
|
||||||
|
|
||||||
defp restrict(query, :order, _options) do
|
defp restrict(query, :order, _options, table_binding) do
|
||||||
order_by(query, [u], fragment("? desc nulls last", u.id))
|
order_by(
|
||||||
|
query,
|
||||||
|
[{u, table_position(query, table_binding)}],
|
||||||
|
fragment("? desc nulls last", u.id)
|
||||||
|
)
|
||||||
end
|
end
|
||||||
|
|
||||||
defp restrict(query, :offset, %{offset: offset}) do
|
defp restrict(query, :offset, %{offset: offset}, _table_binding) do
|
||||||
offset(query, ^offset)
|
offset(query, ^offset)
|
||||||
end
|
end
|
||||||
|
|
||||||
defp restrict(query, :limit, options) do
|
defp restrict(query, :limit, options, _table_binding) do
|
||||||
limit = Map.get(options, :limit, @default_limit)
|
limit = Map.get(options, :limit, @default_limit)
|
||||||
|
|
||||||
query
|
query
|
||||||
|> limit(^limit)
|
|> limit(^limit)
|
||||||
end
|
end
|
||||||
|
|
||||||
defp restrict(query, _, _), do: query
|
defp restrict(query, _, _, _), do: query
|
||||||
|
|
||||||
defp enforce_order(result, %{min_id: _}) do
|
defp enforce_order(result, %{min_id: _}) do
|
||||||
result
|
result
|
||||||
|
@ -127,4 +144,10 @@ defp enforce_order(result, %{min_id: _}) do
|
||||||
end
|
end
|
||||||
|
|
||||||
defp enforce_order(result, _), do: result
|
defp enforce_order(result, _), do: result
|
||||||
|
|
||||||
|
defp table_position(%Ecto.Query{} = query, binding_name) do
|
||||||
|
Map.get(query.aliases, binding_name, 0)
|
||||||
|
end
|
||||||
|
|
||||||
|
defp table_position(_, _), do: 0
|
||||||
end
|
end
|
||||||
|
|
|
@ -6,6 +6,7 @@ defmodule Pleroma.Plugs.OAuthScopesPlug do
|
||||||
import Plug.Conn
|
import Plug.Conn
|
||||||
import Pleroma.Web.Gettext
|
import Pleroma.Web.Gettext
|
||||||
|
|
||||||
|
alias Pleroma.Config
|
||||||
alias Pleroma.Plugs.EnsurePublicOrAuthenticatedPlug
|
alias Pleroma.Plugs.EnsurePublicOrAuthenticatedPlug
|
||||||
|
|
||||||
@behaviour Plug
|
@behaviour Plug
|
||||||
|
@ -15,16 +16,15 @@ def init(%{scopes: _} = options), do: options
|
||||||
def call(%Plug.Conn{assigns: assigns} = conn, %{scopes: scopes} = options) do
|
def call(%Plug.Conn{assigns: assigns} = conn, %{scopes: scopes} = options) do
|
||||||
op = options[:op] || :|
|
op = options[:op] || :|
|
||||||
token = assigns[:token]
|
token = assigns[:token]
|
||||||
matched_scopes = token && filter_descendants(scopes, token.scopes)
|
|
||||||
|
scopes = transform_scopes(scopes, options)
|
||||||
|
matched_scopes = (token && filter_descendants(scopes, token.scopes)) || []
|
||||||
|
|
||||||
cond do
|
cond do
|
||||||
is_nil(token) ->
|
token && op == :| && Enum.any?(matched_scopes) ->
|
||||||
maybe_perform_instance_privacy_check(conn, options)
|
|
||||||
|
|
||||||
op == :| && Enum.any?(matched_scopes) ->
|
|
||||||
conn
|
conn
|
||||||
|
|
||||||
op == :& && matched_scopes == scopes ->
|
token && op == :& && matched_scopes == scopes ->
|
||||||
conn
|
conn
|
||||||
|
|
||||||
options[:fallback] == :proceed_unauthenticated ->
|
options[:fallback] == :proceed_unauthenticated ->
|
||||||
|
@ -60,6 +60,15 @@ def filter_descendants(scopes, supported_scopes) do
|
||||||
)
|
)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@doc "Transforms scopes by applying supported options (e.g. :admin)"
|
||||||
|
def transform_scopes(scopes, options) do
|
||||||
|
if options[:admin] do
|
||||||
|
Config.oauth_admin_scopes(scopes)
|
||||||
|
else
|
||||||
|
scopes
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
defp maybe_perform_instance_privacy_check(%Plug.Conn{} = conn, options) do
|
defp maybe_perform_instance_privacy_check(%Plug.Conn{} = conn, options) do
|
||||||
if options[:skip_instance_privacy_check] do
|
if options[:skip_instance_privacy_check] do
|
||||||
conn
|
conn
|
||||||
|
|
21
lib/pleroma/plugs/parsers_plug.ex
Normal file
21
lib/pleroma/plugs/parsers_plug.ex
Normal file
|
@ -0,0 +1,21 @@
|
||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
defmodule Pleroma.Plugs.Parsers do
|
||||||
|
@moduledoc "Initializes Plug.Parsers with upload limit set at boot time"
|
||||||
|
|
||||||
|
@behaviour Plug
|
||||||
|
|
||||||
|
def init(_opts) do
|
||||||
|
Plug.Parsers.init(
|
||||||
|
parsers: [:urlencoded, :multipart, :json],
|
||||||
|
pass: ["*/*"],
|
||||||
|
json_decoder: Jason,
|
||||||
|
length: Pleroma.Config.get([:instance, :upload_limit]),
|
||||||
|
body_reader: {Pleroma.Web.Plugs.DigestPlug, :read_body, []}
|
||||||
|
)
|
||||||
|
end
|
||||||
|
|
||||||
|
defdelegate call(conn, opts), to: Plug.Parsers
|
||||||
|
end
|
|
@ -11,11 +11,9 @@ def init(options) do
|
||||||
end
|
end
|
||||||
|
|
||||||
def call(%{assigns: %{user: %User{} = user}} = conn, _) do
|
def call(%{assigns: %{user: %User{} = user}} = conn, _) do
|
||||||
if User.auth_active?(user) do
|
case User.account_status(user) do
|
||||||
conn
|
:active -> conn
|
||||||
else
|
_ -> assign(conn, :user, nil)
|
||||||
conn
|
|
||||||
|> assign(:user, nil)
|
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
|
|
@ -5,19 +5,39 @@
|
||||||
defmodule Pleroma.Plugs.UserIsAdminPlug do
|
defmodule Pleroma.Plugs.UserIsAdminPlug do
|
||||||
import Pleroma.Web.TranslationHelpers
|
import Pleroma.Web.TranslationHelpers
|
||||||
import Plug.Conn
|
import Plug.Conn
|
||||||
|
|
||||||
alias Pleroma.User
|
alias Pleroma.User
|
||||||
|
alias Pleroma.Web.OAuth
|
||||||
|
|
||||||
def init(options) do
|
def init(options) do
|
||||||
options
|
options
|
||||||
end
|
end
|
||||||
|
|
||||||
def call(%{assigns: %{user: %User{is_admin: true}}} = conn, _) do
|
def call(%{assigns: %{user: %User{is_admin: true}} = assigns} = conn, _) do
|
||||||
|
token = assigns[:token]
|
||||||
|
|
||||||
|
cond do
|
||||||
|
not Pleroma.Config.enforce_oauth_admin_scope_usage?() ->
|
||||||
conn
|
conn
|
||||||
|
|
||||||
|
token && OAuth.Scopes.contains_admin_scopes?(token.scopes) ->
|
||||||
|
# Note: checking for _any_ admin scope presence, not necessarily fitting requested action.
|
||||||
|
# Thus, controller must explicitly invoke OAuthScopesPlug to verify scope requirements.
|
||||||
|
# Admin might opt out of admin scope for some apps to block any admin actions from them.
|
||||||
|
conn
|
||||||
|
|
||||||
|
true ->
|
||||||
|
fail(conn)
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def call(conn, _) do
|
def call(conn, _) do
|
||||||
|
fail(conn)
|
||||||
|
end
|
||||||
|
|
||||||
|
defp fail(conn) do
|
||||||
conn
|
conn
|
||||||
|> render_error(:forbidden, "User is not admin.")
|
|> render_error(:forbidden, "User is not an admin or OAuth admin scope is not granted.")
|
||||||
|> halt
|
|> halt()
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -8,6 +8,8 @@ defmodule Pleroma.Repo do
|
||||||
adapter: Ecto.Adapters.Postgres,
|
adapter: Ecto.Adapters.Postgres,
|
||||||
migration_timestamps: [type: :naive_datetime_usec]
|
migration_timestamps: [type: :naive_datetime_usec]
|
||||||
|
|
||||||
|
require Logger
|
||||||
|
|
||||||
defmodule Instrumenter do
|
defmodule Instrumenter do
|
||||||
use Prometheus.EctoInstrumenter
|
use Prometheus.EctoInstrumenter
|
||||||
end
|
end
|
||||||
|
@ -47,4 +49,37 @@ def get_assoc(resource, association) do
|
||||||
_ -> {:error, :not_found}
|
_ -> {:error, :not_found}
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def check_migrations_applied!() do
|
||||||
|
unless Pleroma.Config.get(
|
||||||
|
[:i_am_aware_this_may_cause_data_loss, :disable_migration_check],
|
||||||
|
false
|
||||||
|
) do
|
||||||
|
Ecto.Migrator.with_repo(__MODULE__, fn repo ->
|
||||||
|
down_migrations =
|
||||||
|
Ecto.Migrator.migrations(repo)
|
||||||
|
|> Enum.reject(fn
|
||||||
|
{:up, _, _} -> true
|
||||||
|
{:down, _, _} -> false
|
||||||
|
end)
|
||||||
|
|
||||||
|
if length(down_migrations) > 0 do
|
||||||
|
down_migrations_text =
|
||||||
|
Enum.map(down_migrations, fn {:down, id, name} -> "- #{name} (#{id})\n" end)
|
||||||
|
|
||||||
|
Logger.error(
|
||||||
|
"The following migrations were not applied:\n#{down_migrations_text}If you want to start Pleroma anyway, set\nconfig :pleroma, :i_am_aware_this_may_cause_data_loss, disable_migration_check: true"
|
||||||
|
)
|
||||||
|
|
||||||
|
raise Pleroma.Repo.UnappliedMigrationsError
|
||||||
|
end
|
||||||
|
end)
|
||||||
|
else
|
||||||
|
:ok
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
defmodule Pleroma.Repo.UnappliedMigrationsError do
|
||||||
|
defexception message: "Unapplied Migrations detected"
|
||||||
end
|
end
|
||||||
|
|
48
lib/pleroma/report_note.ex
Normal file
48
lib/pleroma/report_note.ex
Normal file
|
@ -0,0 +1,48 @@
|
||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
defmodule Pleroma.ReportNote do
|
||||||
|
use Ecto.Schema
|
||||||
|
|
||||||
|
import Ecto.Changeset
|
||||||
|
import Ecto.Query
|
||||||
|
|
||||||
|
alias Pleroma.Activity
|
||||||
|
alias Pleroma.Repo
|
||||||
|
alias Pleroma.ReportNote
|
||||||
|
alias Pleroma.User
|
||||||
|
|
||||||
|
@type t :: %__MODULE__{}
|
||||||
|
|
||||||
|
schema "report_notes" do
|
||||||
|
field(:content, :string)
|
||||||
|
belongs_to(:user, User, type: FlakeId.Ecto.CompatType)
|
||||||
|
belongs_to(:activity, Activity, type: FlakeId.Ecto.CompatType)
|
||||||
|
|
||||||
|
timestamps()
|
||||||
|
end
|
||||||
|
|
||||||
|
@spec create(FlakeId.Ecto.CompatType.t(), FlakeId.Ecto.CompatType.t(), String.t()) ::
|
||||||
|
{:ok, ReportNote.t()} | {:error, Changeset.t()}
|
||||||
|
def create(user_id, activity_id, content) do
|
||||||
|
attrs = %{
|
||||||
|
user_id: user_id,
|
||||||
|
activity_id: activity_id,
|
||||||
|
content: content
|
||||||
|
}
|
||||||
|
|
||||||
|
%ReportNote{}
|
||||||
|
|> cast(attrs, [:user_id, :activity_id, :content])
|
||||||
|
|> validate_required([:user_id, :activity_id, :content])
|
||||||
|
|> Repo.insert()
|
||||||
|
end
|
||||||
|
|
||||||
|
@spec destroy(FlakeId.Ecto.CompatType.t()) ::
|
||||||
|
{:ok, ReportNote.t()} | {:error, Changeset.t()}
|
||||||
|
def destroy(id) do
|
||||||
|
from(r in ReportNote, where: r.id == ^id)
|
||||||
|
|> Repo.one()
|
||||||
|
|> Repo.delete()
|
||||||
|
end
|
||||||
|
end
|
|
@ -5,10 +5,12 @@
|
||||||
defmodule Pleroma.Uploaders.Local do
|
defmodule Pleroma.Uploaders.Local do
|
||||||
@behaviour Pleroma.Uploaders.Uploader
|
@behaviour Pleroma.Uploaders.Uploader
|
||||||
|
|
||||||
|
@impl true
|
||||||
def get_file(_) do
|
def get_file(_) do
|
||||||
{:ok, {:static_dir, upload_path()}}
|
{:ok, {:static_dir, upload_path()}}
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@impl true
|
||||||
def put_file(upload) do
|
def put_file(upload) do
|
||||||
{local_path, file} =
|
{local_path, file} =
|
||||||
case Enum.reverse(Path.split(upload.path)) do
|
case Enum.reverse(Path.split(upload.path)) do
|
||||||
|
@ -33,4 +35,15 @@ def put_file(upload) do
|
||||||
def upload_path do
|
def upload_path do
|
||||||
Pleroma.Config.get!([__MODULE__, :uploads])
|
Pleroma.Config.get!([__MODULE__, :uploads])
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@impl true
|
||||||
|
def delete_file(path) do
|
||||||
|
upload_path()
|
||||||
|
|> Path.join(path)
|
||||||
|
|> File.rm()
|
||||||
|
|> case do
|
||||||
|
:ok -> :ok
|
||||||
|
{:error, posix_error} -> {:error, to_string(posix_error)}
|
||||||
|
end
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -1,37 +0,0 @@
|
||||||
# Pleroma: A lightweight social networking server
|
|
||||||
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
|
||||||
# SPDX-License-Identifier: AGPL-3.0-only
|
|
||||||
|
|
||||||
defmodule Pleroma.Uploaders.MDII do
|
|
||||||
@moduledoc "Represents uploader for https://github.com/hakaba-hitoyo/minimal-digital-image-infrastructure"
|
|
||||||
|
|
||||||
alias Pleroma.Config
|
|
||||||
alias Pleroma.HTTP
|
|
||||||
|
|
||||||
@behaviour Pleroma.Uploaders.Uploader
|
|
||||||
|
|
||||||
# MDII-hosted images are never passed through the MediaPlug; only local media.
|
|
||||||
# Delegate to Pleroma.Uploaders.Local
|
|
||||||
def get_file(file) do
|
|
||||||
Pleroma.Uploaders.Local.get_file(file)
|
|
||||||
end
|
|
||||||
|
|
||||||
def put_file(upload) do
|
|
||||||
cgi = Config.get([Pleroma.Uploaders.MDII, :cgi])
|
|
||||||
files = Config.get([Pleroma.Uploaders.MDII, :files])
|
|
||||||
|
|
||||||
{:ok, file_data} = File.read(upload.tempfile)
|
|
||||||
|
|
||||||
extension = String.split(upload.name, ".") |> List.last()
|
|
||||||
query = "#{cgi}?#{extension}"
|
|
||||||
|
|
||||||
with {:ok, %{status: 200, body: body}} <-
|
|
||||||
HTTP.post(query, file_data, [], adapter: [pool: :default]) do
|
|
||||||
remote_file_name = String.split(body) |> List.first()
|
|
||||||
public_url = "#{files}/#{remote_file_name}.#{extension}"
|
|
||||||
{:ok, {:url, public_url}}
|
|
||||||
else
|
|
||||||
_ -> Pleroma.Uploaders.Local.put_file(upload)
|
|
||||||
end
|
|
||||||
end
|
|
||||||
end
|
|
|
@ -10,6 +10,7 @@ defmodule Pleroma.Uploaders.S3 do
|
||||||
|
|
||||||
# The file name is re-encoded with S3's constraints here to comply with previous
|
# The file name is re-encoded with S3's constraints here to comply with previous
|
||||||
# links with less strict filenames
|
# links with less strict filenames
|
||||||
|
@impl true
|
||||||
def get_file(file) do
|
def get_file(file) do
|
||||||
config = Config.get([__MODULE__])
|
config = Config.get([__MODULE__])
|
||||||
bucket = Keyword.fetch!(config, :bucket)
|
bucket = Keyword.fetch!(config, :bucket)
|
||||||
|
@ -35,6 +36,7 @@ def get_file(file) do
|
||||||
])}}
|
])}}
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@impl true
|
||||||
def put_file(%Pleroma.Upload{} = upload) do
|
def put_file(%Pleroma.Upload{} = upload) do
|
||||||
config = Config.get([__MODULE__])
|
config = Config.get([__MODULE__])
|
||||||
bucket = Keyword.get(config, :bucket)
|
bucket = Keyword.get(config, :bucket)
|
||||||
|
@ -69,6 +71,18 @@ def put_file(%Pleroma.Upload{} = upload) do
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@impl true
|
||||||
|
def delete_file(file) do
|
||||||
|
[__MODULE__, :bucket]
|
||||||
|
|> Config.get()
|
||||||
|
|> ExAws.S3.delete_object(file)
|
||||||
|
|> ExAws.request()
|
||||||
|
|> case do
|
||||||
|
{:ok, %{status_code: 204}} -> :ok
|
||||||
|
error -> {:error, inspect(error)}
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
@regex Regex.compile!("[^0-9a-zA-Z!.*/'()_-]")
|
@regex Regex.compile!("[^0-9a-zA-Z!.*/'()_-]")
|
||||||
def strict_encode(name) do
|
def strict_encode(name) do
|
||||||
String.replace(name, @regex, "-")
|
String.replace(name, @regex, "-")
|
||||||
|
|
|
@ -36,6 +36,8 @@ defmodule Pleroma.Uploaders.Uploader do
|
||||||
@callback put_file(Pleroma.Upload.t()) ::
|
@callback put_file(Pleroma.Upload.t()) ::
|
||||||
:ok | {:ok, file_spec()} | {:error, String.t()} | :wait_callback
|
:ok | {:ok, file_spec()} | {:error, String.t()} | :wait_callback
|
||||||
|
|
||||||
|
@callback delete_file(file :: String.t()) :: :ok | {:error, String.t()}
|
||||||
|
|
||||||
@callback http_callback(Plug.Conn.t(), Map.t()) ::
|
@callback http_callback(Plug.Conn.t(), Map.t()) ::
|
||||||
{:ok, Plug.Conn.t()}
|
{:ok, Plug.Conn.t()}
|
||||||
| {:ok, Plug.Conn.t(), file_spec()}
|
| {:ok, Plug.Conn.t(), file_spec()}
|
||||||
|
@ -43,7 +45,6 @@ defmodule Pleroma.Uploaders.Uploader do
|
||||||
@optional_callbacks http_callback: 2
|
@optional_callbacks http_callback: 2
|
||||||
|
|
||||||
@spec put_file(module(), Pleroma.Upload.t()) :: {:ok, file_spec()} | {:error, String.t()}
|
@spec put_file(module(), Pleroma.Upload.t()) :: {:ok, file_spec()} | {:error, String.t()}
|
||||||
|
|
||||||
def put_file(uploader, upload) do
|
def put_file(uploader, upload) do
|
||||||
case uploader.put_file(upload) do
|
case uploader.put_file(upload) do
|
||||||
:ok -> {:ok, {:file, upload.path}}
|
:ok -> {:ok, {:file, upload.path}}
|
||||||
|
|
|
@ -12,6 +12,7 @@ defmodule Pleroma.User do
|
||||||
alias Comeonin.Pbkdf2
|
alias Comeonin.Pbkdf2
|
||||||
alias Ecto.Multi
|
alias Ecto.Multi
|
||||||
alias Pleroma.Activity
|
alias Pleroma.Activity
|
||||||
|
alias Pleroma.Config
|
||||||
alias Pleroma.Conversation.Participation
|
alias Pleroma.Conversation.Participation
|
||||||
alias Pleroma.Delivery
|
alias Pleroma.Delivery
|
||||||
alias Pleroma.FollowingRelationship
|
alias Pleroma.FollowingRelationship
|
||||||
|
@ -35,7 +36,7 @@ defmodule Pleroma.User do
|
||||||
require Logger
|
require Logger
|
||||||
|
|
||||||
@type t :: %__MODULE__{}
|
@type t :: %__MODULE__{}
|
||||||
|
@type account_status :: :active | :deactivated | :password_reset_pending | :confirmation_pending
|
||||||
@primary_key {:id, FlakeId.Ecto.CompatType, autogenerate: true}
|
@primary_key {:id, FlakeId.Ecto.CompatType, autogenerate: true}
|
||||||
|
|
||||||
# credo:disable-for-next-line Credo.Check.Readability.MaxLineLength
|
# credo:disable-for-next-line Credo.Check.Readability.MaxLineLength
|
||||||
|
@ -127,15 +128,13 @@ defmodule Pleroma.User do
|
||||||
field(:invisible, :boolean, default: false)
|
field(:invisible, :boolean, default: false)
|
||||||
field(:allow_following_move, :boolean, default: true)
|
field(:allow_following_move, :boolean, default: true)
|
||||||
field(:skip_thread_containment, :boolean, default: false)
|
field(:skip_thread_containment, :boolean, default: false)
|
||||||
|
field(:actor_type, :string, default: "Person")
|
||||||
field(:also_known_as, {:array, :string}, default: [])
|
field(:also_known_as, {:array, :string}, default: [])
|
||||||
|
|
||||||
field(:notification_settings, :map,
|
embeds_one(
|
||||||
default: %{
|
:notification_settings,
|
||||||
"followers" => true,
|
Pleroma.User.NotificationSetting,
|
||||||
"follows" => true,
|
on_replace: :update
|
||||||
"non_follows" => true,
|
|
||||||
"non_followers" => true
|
|
||||||
}
|
|
||||||
)
|
)
|
||||||
|
|
||||||
has_many(:notifications, Notification)
|
has_many(:notifications, Notification)
|
||||||
|
@ -218,14 +217,21 @@ def unquote(:"#{outgoing_relation_target}_ap_ids")(user, restrict_deactivated? \
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
@doc "Returns if the user should be allowed to authenticate"
|
@doc "Returns status account"
|
||||||
def auth_active?(%User{deactivated: true}), do: false
|
@spec account_status(User.t()) :: account_status()
|
||||||
|
def account_status(%User{deactivated: true}), do: :deactivated
|
||||||
|
def account_status(%User{password_reset_pending: true}), do: :password_reset_pending
|
||||||
|
|
||||||
def auth_active?(%User{confirmation_pending: true}),
|
def account_status(%User{confirmation_pending: true}) do
|
||||||
do: !Pleroma.Config.get([:instance, :account_activation_required])
|
case Config.get([:instance, :account_activation_required]) do
|
||||||
|
true -> :confirmation_pending
|
||||||
|
_ -> :active
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
def auth_active?(%User{}), do: true
|
def account_status(%User{}), do: :active
|
||||||
|
|
||||||
|
@spec visible_for?(User.t(), User.t() | nil) :: boolean()
|
||||||
def visible_for?(user, for_user \\ nil)
|
def visible_for?(user, for_user \\ nil)
|
||||||
|
|
||||||
def visible_for?(%User{invisible: true}, _), do: false
|
def visible_for?(%User{invisible: true}, _), do: false
|
||||||
|
@ -233,15 +239,17 @@ def visible_for?(%User{invisible: true}, _), do: false
|
||||||
def visible_for?(%User{id: user_id}, %User{id: for_id}) when user_id == for_id, do: true
|
def visible_for?(%User{id: user_id}, %User{id: for_id}) when user_id == for_id, do: true
|
||||||
|
|
||||||
def visible_for?(%User{} = user, for_user) do
|
def visible_for?(%User{} = user, for_user) do
|
||||||
auth_active?(user) || superuser?(for_user)
|
account_status(user) == :active || superuser?(for_user)
|
||||||
end
|
end
|
||||||
|
|
||||||
def visible_for?(_, _), do: false
|
def visible_for?(_, _), do: false
|
||||||
|
|
||||||
|
@spec superuser?(User.t()) :: boolean()
|
||||||
def superuser?(%User{local: true, is_admin: true}), do: true
|
def superuser?(%User{local: true, is_admin: true}), do: true
|
||||||
def superuser?(%User{local: true, is_moderator: true}), do: true
|
def superuser?(%User{local: true, is_moderator: true}), do: true
|
||||||
def superuser?(_), do: false
|
def superuser?(_), do: false
|
||||||
|
|
||||||
|
@spec invisible?(User.t()) :: boolean()
|
||||||
def invisible?(%User{invisible: true}), do: true
|
def invisible?(%User{invisible: true}), do: true
|
||||||
def invisible?(_), do: false
|
def invisible?(_), do: false
|
||||||
|
|
||||||
|
@ -349,6 +357,7 @@ def remote_user_creation(params) do
|
||||||
:following_count,
|
:following_count,
|
||||||
:discoverable,
|
:discoverable,
|
||||||
:invisible,
|
:invisible,
|
||||||
|
:actor_type,
|
||||||
:also_known_as
|
:also_known_as
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
|
@ -399,6 +408,7 @@ def update_changeset(struct, params \\ %{}) do
|
||||||
:raw_fields,
|
:raw_fields,
|
||||||
:pleroma_settings_store,
|
:pleroma_settings_store,
|
||||||
:discoverable,
|
:discoverable,
|
||||||
|
:actor_type,
|
||||||
:also_known_as
|
:also_known_as
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
|
@ -441,6 +451,7 @@ def upgrade_changeset(struct, params \\ %{}, remote? \\ false) do
|
||||||
:discoverable,
|
:discoverable,
|
||||||
:hide_followers_count,
|
:hide_followers_count,
|
||||||
:hide_follows_count,
|
:hide_follows_count,
|
||||||
|
:actor_type,
|
||||||
:also_known_as
|
:also_known_as
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
|
@ -861,6 +872,13 @@ def get_friends(user, page \\ nil) do
|
||||||
|> Repo.all()
|
|> Repo.all()
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def get_friends_ap_ids(user) do
|
||||||
|
user
|
||||||
|
|> get_friends_query(nil)
|
||||||
|
|> select([u], u.ap_id)
|
||||||
|
|> Repo.all()
|
||||||
|
end
|
||||||
|
|
||||||
def get_friends_ids(user, page \\ nil) do
|
def get_friends_ids(user, page \\ nil) do
|
||||||
user
|
user
|
||||||
|> get_friends_query(page)
|
|> get_friends_query(page)
|
||||||
|
@ -1135,7 +1153,8 @@ def muted_notifications?(%User{} = user, %User{} = target),
|
||||||
def blocks?(nil, _), do: false
|
def blocks?(nil, _), do: false
|
||||||
|
|
||||||
def blocks?(%User{} = user, %User{} = target) do
|
def blocks?(%User{} = user, %User{} = target) do
|
||||||
blocks_user?(user, target) || blocks_domain?(user, target)
|
blocks_user?(user, target) ||
|
||||||
|
(!User.following?(user, target) && blocks_domain?(user, target))
|
||||||
end
|
end
|
||||||
|
|
||||||
def blocks_user?(%User{} = user, %User{} = target) do
|
def blocks_user?(%User{} = user, %User{} = target) do
|
||||||
|
@ -1221,20 +1240,9 @@ def deactivate(%User{} = user, status) do
|
||||||
end
|
end
|
||||||
|
|
||||||
def update_notification_settings(%User{} = user, settings) do
|
def update_notification_settings(%User{} = user, settings) do
|
||||||
settings =
|
|
||||||
settings
|
|
||||||
|> Enum.map(fn {k, v} -> {k, v in [true, "true", "True", "1"]} end)
|
|
||||||
|> Map.new()
|
|
||||||
|
|
||||||
notification_settings =
|
|
||||||
user.notification_settings
|
|
||||||
|> Map.merge(settings)
|
|
||||||
|> Map.take(["followers", "follows", "non_follows", "non_followers"])
|
|
||||||
|
|
||||||
params = %{notification_settings: notification_settings}
|
|
||||||
|
|
||||||
user
|
user
|
||||||
|> cast(params, [:notification_settings])
|
|> cast(%{notification_settings: settings}, [])
|
||||||
|
|> cast_embed(:notification_settings)
|
||||||
|> validate_required([:notification_settings])
|
|> validate_required([:notification_settings])
|
||||||
|> update_and_set_cache()
|
|> update_and_set_cache()
|
||||||
end
|
end
|
||||||
|
@ -1432,9 +1440,36 @@ def get_or_fetch_by_ap_id(ap_id) do
|
||||||
Creates an internal service actor by URI if missing.
|
Creates an internal service actor by URI if missing.
|
||||||
Optionally takes nickname for addressing.
|
Optionally takes nickname for addressing.
|
||||||
"""
|
"""
|
||||||
def get_or_create_service_actor_by_ap_id(uri, nickname \\ nil) do
|
@spec get_or_create_service_actor_by_ap_id(String.t(), String.t()) :: User.t() | nil
|
||||||
with user when is_nil(user) <- get_cached_by_ap_id(uri) do
|
def get_or_create_service_actor_by_ap_id(uri, nickname) do
|
||||||
{:ok, user} =
|
{_, user} =
|
||||||
|
case get_cached_by_ap_id(uri) do
|
||||||
|
nil ->
|
||||||
|
with {:error, %{errors: errors}} <- create_service_actor(uri, nickname) do
|
||||||
|
Logger.error("Cannot create service actor: #{uri}/.\n#{inspect(errors)}")
|
||||||
|
{:error, nil}
|
||||||
|
end
|
||||||
|
|
||||||
|
%User{invisible: false} = user ->
|
||||||
|
set_invisible(user)
|
||||||
|
|
||||||
|
user ->
|
||||||
|
{:ok, user}
|
||||||
|
end
|
||||||
|
|
||||||
|
user
|
||||||
|
end
|
||||||
|
|
||||||
|
@spec set_invisible(User.t()) :: {:ok, User.t()}
|
||||||
|
defp set_invisible(user) do
|
||||||
|
user
|
||||||
|
|> change(%{invisible: true})
|
||||||
|
|> update_and_set_cache()
|
||||||
|
end
|
||||||
|
|
||||||
|
@spec create_service_actor(String.t(), String.t()) ::
|
||||||
|
{:ok, User.t()} | {:error, Ecto.Changeset.t()}
|
||||||
|
defp create_service_actor(uri, nickname) do
|
||||||
%User{
|
%User{
|
||||||
invisible: true,
|
invisible: true,
|
||||||
local: true,
|
local: true,
|
||||||
|
@ -1442,10 +1477,10 @@ def get_or_create_service_actor_by_ap_id(uri, nickname \\ nil) do
|
||||||
nickname: nickname,
|
nickname: nickname,
|
||||||
follower_address: uri <> "/followers"
|
follower_address: uri <> "/followers"
|
||||||
}
|
}
|
||||||
|
|> change
|
||||||
|
|> unique_constraint(:nickname)
|
||||||
|> Repo.insert()
|
|> Repo.insert()
|
||||||
|
|> set_cache()
|
||||||
user
|
|
||||||
end
|
|
||||||
end
|
end
|
||||||
|
|
||||||
# AP style
|
# AP style
|
||||||
|
@ -1858,6 +1893,12 @@ def admin_api_update(user, params) do
|
||||||
|> update_and_set_cache()
|
|> update_and_set_cache()
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@doc "Signs user out of all applications"
|
||||||
|
def global_sign_out(user) do
|
||||||
|
OAuth.Authorization.delete_user_authorizations(user)
|
||||||
|
OAuth.Token.delete_user_tokens(user)
|
||||||
|
end
|
||||||
|
|
||||||
def mascot_update(user, url) do
|
def mascot_update(user, url) do
|
||||||
user
|
user
|
||||||
|> cast(%{mascot: url}, [:mascot])
|
|> cast(%{mascot: url}, [:mascot])
|
||||||
|
|
40
lib/pleroma/user/notification_setting.ex
Normal file
40
lib/pleroma/user/notification_setting.ex
Normal file
|
@ -0,0 +1,40 @@
|
||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
defmodule Pleroma.User.NotificationSetting do
|
||||||
|
use Ecto.Schema
|
||||||
|
import Ecto.Changeset
|
||||||
|
|
||||||
|
@derive Jason.Encoder
|
||||||
|
@primary_key false
|
||||||
|
|
||||||
|
embedded_schema do
|
||||||
|
field(:followers, :boolean, default: true)
|
||||||
|
field(:follows, :boolean, default: true)
|
||||||
|
field(:non_follows, :boolean, default: true)
|
||||||
|
field(:non_followers, :boolean, default: true)
|
||||||
|
field(:privacy_option, :boolean, default: false)
|
||||||
|
end
|
||||||
|
|
||||||
|
def changeset(schema, params) do
|
||||||
|
schema
|
||||||
|
|> cast(prepare_attrs(params), [
|
||||||
|
:followers,
|
||||||
|
:follows,
|
||||||
|
:non_follows,
|
||||||
|
:non_followers,
|
||||||
|
:privacy_option
|
||||||
|
])
|
||||||
|
end
|
||||||
|
|
||||||
|
defp prepare_attrs(params) do
|
||||||
|
Enum.reduce(params, %{}, fn
|
||||||
|
{k, v}, acc when is_binary(v) ->
|
||||||
|
Map.put(acc, k, String.downcase(v))
|
||||||
|
|
||||||
|
{k, v}, acc ->
|
||||||
|
Map.put(acc, k, v)
|
||||||
|
end)
|
||||||
|
end
|
||||||
|
end
|
12
lib/pleroma/utils.ex
Normal file
12
lib/pleroma/utils.ex
Normal file
|
@ -0,0 +1,12 @@
|
||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
defmodule Pleroma.Utils do
|
||||||
|
def compile_dir(dir) when is_binary(dir) do
|
||||||
|
dir
|
||||||
|
|> File.ls!()
|
||||||
|
|> Enum.map(&Path.join(dir, &1))
|
||||||
|
|> Kernel.ParallelCompiler.compile()
|
||||||
|
end
|
||||||
|
end
|
|
@ -950,6 +950,8 @@ defp restrict_blocked(query, %{"blocking_user" => %User{} = user} = opts) do
|
||||||
blocked_ap_ids = opts["blocked_users_ap_ids"] || User.blocked_users_ap_ids(user)
|
blocked_ap_ids = opts["blocked_users_ap_ids"] || User.blocked_users_ap_ids(user)
|
||||||
domain_blocks = user.domain_blocks || []
|
domain_blocks = user.domain_blocks || []
|
||||||
|
|
||||||
|
following_ap_ids = User.get_friends_ap_ids(user)
|
||||||
|
|
||||||
query =
|
query =
|
||||||
if has_named_binding?(query, :object), do: query, else: Activity.with_joined_object(query)
|
if has_named_binding?(query, :object), do: query, else: Activity.with_joined_object(query)
|
||||||
|
|
||||||
|
@ -964,8 +966,22 @@ defp restrict_blocked(query, %{"blocking_user" => %User{} = user} = opts) do
|
||||||
activity.data,
|
activity.data,
|
||||||
^blocked_ap_ids
|
^blocked_ap_ids
|
||||||
),
|
),
|
||||||
where: fragment("not (split_part(?, '/', 3) = ANY(?))", activity.actor, ^domain_blocks),
|
where:
|
||||||
where: fragment("not (split_part(?->>'actor', '/', 3) = ANY(?))", o.data, ^domain_blocks)
|
fragment(
|
||||||
|
"(not (split_part(?, '/', 3) = ANY(?))) or ? = ANY(?)",
|
||||||
|
activity.actor,
|
||||||
|
^domain_blocks,
|
||||||
|
activity.actor,
|
||||||
|
^following_ap_ids
|
||||||
|
),
|
||||||
|
where:
|
||||||
|
fragment(
|
||||||
|
"(not (split_part(?->>'actor', '/', 3) = ANY(?))) or (?->>'actor') = ANY(?)",
|
||||||
|
o.data,
|
||||||
|
^domain_blocks,
|
||||||
|
o.data,
|
||||||
|
^following_ap_ids
|
||||||
|
)
|
||||||
)
|
)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -1052,6 +1068,13 @@ defp maybe_preload_bookmarks(query, opts) do
|
||||||
|> Activity.with_preloaded_bookmark(opts["user"])
|
|> Activity.with_preloaded_bookmark(opts["user"])
|
||||||
end
|
end
|
||||||
|
|
||||||
|
defp maybe_preload_report_notes(query, %{"preload_report_notes" => true}) do
|
||||||
|
query
|
||||||
|
|> Activity.with_preloaded_report_notes()
|
||||||
|
end
|
||||||
|
|
||||||
|
defp maybe_preload_report_notes(query, _), do: query
|
||||||
|
|
||||||
defp maybe_set_thread_muted_field(query, %{"skip_preload" => true}), do: query
|
defp maybe_set_thread_muted_field(query, %{"skip_preload" => true}), do: query
|
||||||
|
|
||||||
defp maybe_set_thread_muted_field(query, opts) do
|
defp maybe_set_thread_muted_field(query, opts) do
|
||||||
|
@ -1105,6 +1128,7 @@ def fetch_activities_query(recipients, opts \\ %{}) do
|
||||||
Activity
|
Activity
|
||||||
|> maybe_preload_objects(opts)
|
|> maybe_preload_objects(opts)
|
||||||
|> maybe_preload_bookmarks(opts)
|
|> maybe_preload_bookmarks(opts)
|
||||||
|
|> maybe_preload_report_notes(opts)
|
||||||
|> maybe_set_thread_muted_field(opts)
|
|> maybe_set_thread_muted_field(opts)
|
||||||
|> maybe_order(opts)
|
|> maybe_order(opts)
|
||||||
|> restrict_recipients(recipients, opts["user"])
|
|> restrict_recipients(recipients, opts["user"])
|
||||||
|
@ -1141,6 +1165,25 @@ def fetch_activities(recipients, opts \\ %{}, pagination \\ :keyset) do
|
||||||
|> maybe_update_cc(list_memberships, opts["user"])
|
|> maybe_update_cc(list_memberships, opts["user"])
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@doc """
|
||||||
|
Fetch favorites activities of user with order by sort adds to favorites
|
||||||
|
"""
|
||||||
|
@spec fetch_favourites(User.t(), map(), atom()) :: list(Activity.t())
|
||||||
|
def fetch_favourites(user, params \\ %{}, pagination \\ :keyset) do
|
||||||
|
user.ap_id
|
||||||
|
|> Activity.Queries.by_actor()
|
||||||
|
|> Activity.Queries.by_type("Like")
|
||||||
|
|> Activity.with_joined_object()
|
||||||
|
|> Object.with_joined_activity()
|
||||||
|
|> select([_like, object, activity], %{activity | object: object})
|
||||||
|
|> order_by([like, _, _], desc: like.id)
|
||||||
|
|> Pagination.fetch_paginated(
|
||||||
|
Map.merge(params, %{"skip_order" => true}),
|
||||||
|
pagination,
|
||||||
|
:object_activity
|
||||||
|
)
|
||||||
|
end
|
||||||
|
|
||||||
defp maybe_update_cc(activities, list_memberships, %User{ap_id: user_ap_id})
|
defp maybe_update_cc(activities, list_memberships, %User{ap_id: user_ap_id})
|
||||||
when is_list(list_memberships) and length(list_memberships) > 0 do
|
when is_list(list_memberships) and length(list_memberships) > 0 do
|
||||||
Enum.map(activities, fn
|
Enum.map(activities, fn
|
||||||
|
@ -1217,6 +1260,7 @@ defp object_to_user_data(data) do
|
||||||
data = Transmogrifier.maybe_fix_user_object(data)
|
data = Transmogrifier.maybe_fix_user_object(data)
|
||||||
discoverable = data["discoverable"] || false
|
discoverable = data["discoverable"] || false
|
||||||
invisible = data["invisible"] || false
|
invisible = data["invisible"] || false
|
||||||
|
actor_type = data["type"] || "Person"
|
||||||
|
|
||||||
user_data = %{
|
user_data = %{
|
||||||
ap_id: data["id"],
|
ap_id: data["id"],
|
||||||
|
@ -1232,6 +1276,7 @@ defp object_to_user_data(data) do
|
||||||
follower_address: data["followers"],
|
follower_address: data["followers"],
|
||||||
following_address: data["following"],
|
following_address: data["following"],
|
||||||
bio: data["summary"],
|
bio: data["summary"],
|
||||||
|
actor_type: actor_type,
|
||||||
also_known_as: Map.get(data, "alsoKnownAs", [])
|
also_known_as: Map.get(data, "alsoKnownAs", [])
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1253,27 +1298,25 @@ defp object_to_user_data(data) do
|
||||||
def fetch_follow_information_for_user(user) do
|
def fetch_follow_information_for_user(user) do
|
||||||
with {:ok, following_data} <-
|
with {:ok, following_data} <-
|
||||||
Fetcher.fetch_and_contain_remote_object_from_id(user.following_address),
|
Fetcher.fetch_and_contain_remote_object_from_id(user.following_address),
|
||||||
following_count when is_integer(following_count) <- following_data["totalItems"],
|
|
||||||
{:ok, hide_follows} <- collection_private(following_data),
|
{:ok, hide_follows} <- collection_private(following_data),
|
||||||
{:ok, followers_data} <-
|
{:ok, followers_data} <-
|
||||||
Fetcher.fetch_and_contain_remote_object_from_id(user.follower_address),
|
Fetcher.fetch_and_contain_remote_object_from_id(user.follower_address),
|
||||||
followers_count when is_integer(followers_count) <- followers_data["totalItems"],
|
|
||||||
{:ok, hide_followers} <- collection_private(followers_data) do
|
{:ok, hide_followers} <- collection_private(followers_data) do
|
||||||
{:ok,
|
{:ok,
|
||||||
%{
|
%{
|
||||||
hide_follows: hide_follows,
|
hide_follows: hide_follows,
|
||||||
follower_count: followers_count,
|
follower_count: normalize_counter(followers_data["totalItems"]),
|
||||||
following_count: following_count,
|
following_count: normalize_counter(following_data["totalItems"]),
|
||||||
hide_followers: hide_followers
|
hide_followers: hide_followers
|
||||||
}}
|
}}
|
||||||
else
|
else
|
||||||
{:error, _} = e ->
|
{:error, _} = e -> e
|
||||||
e
|
e -> {:error, e}
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
e ->
|
defp normalize_counter(counter) when is_integer(counter), do: counter
|
||||||
{:error, e}
|
defp normalize_counter(_), do: 0
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
defp maybe_update_follow_information(data) do
|
defp maybe_update_follow_information(data) do
|
||||||
with {:enabled, true} <-
|
with {:enabled, true} <-
|
||||||
|
@ -1294,24 +1337,18 @@ defp maybe_update_follow_information(data) do
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
defp collection_private(%{"first" => %{"type" => type}})
|
||||||
|
when type in ["CollectionPage", "OrderedCollectionPage"],
|
||||||
|
do: {:ok, false}
|
||||||
|
|
||||||
defp collection_private(%{"first" => first}) do
|
defp collection_private(%{"first" => first}) do
|
||||||
if is_map(first) and
|
|
||||||
first["type"] in ["CollectionPage", "OrderedCollectionPage"] do
|
|
||||||
{:ok, false}
|
|
||||||
else
|
|
||||||
with {:ok, %{"type" => type}} when type in ["CollectionPage", "OrderedCollectionPage"] <-
|
with {:ok, %{"type" => type}} when type in ["CollectionPage", "OrderedCollectionPage"] <-
|
||||||
Fetcher.fetch_and_contain_remote_object_from_id(first) do
|
Fetcher.fetch_and_contain_remote_object_from_id(first) do
|
||||||
{:ok, false}
|
{:ok, false}
|
||||||
else
|
else
|
||||||
{:error, {:ok, %{status: code}}} when code in [401, 403] ->
|
{:error, {:ok, %{status: code}}} when code in [401, 403] -> {:ok, true}
|
||||||
{:ok, true}
|
{:error, _} = e -> e
|
||||||
|
e -> {:error, e}
|
||||||
{:error, _} = e ->
|
|
||||||
e
|
|
||||||
|
|
||||||
e ->
|
|
||||||
{:error, e}
|
|
||||||
end
|
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -1332,6 +1369,10 @@ def fetch_and_prepare_user_from_ap_id(ap_id) do
|
||||||
data <- maybe_update_follow_information(data) do
|
data <- maybe_update_follow_information(data) do
|
||||||
{:ok, data}
|
{:ok, data}
|
||||||
else
|
else
|
||||||
|
{:error, "Object has been deleted"} = e ->
|
||||||
|
Logger.debug("Could not decode user at fetch #{ap_id}, #{inspect(e)}")
|
||||||
|
{:error, e}
|
||||||
|
|
||||||
e ->
|
e ->
|
||||||
Logger.error("Could not decode user at fetch #{ap_id}, #{inspect(e)}")
|
Logger.error("Could not decode user at fetch #{ap_id}, #{inspect(e)}")
|
||||||
{:error, e}
|
{:error, e}
|
||||||
|
|
|
@ -257,7 +257,7 @@ def inbox(%{assigns: %{valid_signature: true}} = conn, params) do
|
||||||
|
|
||||||
# only accept relayed Creates
|
# only accept relayed Creates
|
||||||
def inbox(conn, %{"type" => "Create"} = params) do
|
def inbox(conn, %{"type" => "Create"} = params) do
|
||||||
Logger.info(
|
Logger.debug(
|
||||||
"Signature missing or not from author, relayed Create message, fetching object from source"
|
"Signature missing or not from author, relayed Create message, fetching object from source"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -270,11 +270,11 @@ def inbox(conn, params) do
|
||||||
headers = Enum.into(conn.req_headers, %{})
|
headers = Enum.into(conn.req_headers, %{})
|
||||||
|
|
||||||
if String.contains?(headers["signature"], params["actor"]) do
|
if String.contains?(headers["signature"], params["actor"]) do
|
||||||
Logger.info(
|
Logger.debug(
|
||||||
"Signature validation error for: #{params["actor"]}, make sure you are forwarding the HTTP Host header!"
|
"Signature validation error for: #{params["actor"]}, make sure you are forwarding the HTTP Host header!"
|
||||||
)
|
)
|
||||||
|
|
||||||
Logger.info(inspect(conn.req_headers))
|
Logger.debug(inspect(conn.req_headers))
|
||||||
end
|
end
|
||||||
|
|
||||||
json(conn, dgettext("errors", "error"))
|
json(conn, dgettext("errors", "error"))
|
||||||
|
|
|
@ -9,7 +9,7 @@ defmodule Pleroma.Web.ActivityPub.MRF.DropPolicy do
|
||||||
|
|
||||||
@impl true
|
@impl true
|
||||||
def filter(object) do
|
def filter(object) do
|
||||||
Logger.info("REJECTING #{inspect(object)}")
|
Logger.debug("REJECTING #{inspect(object)}")
|
||||||
{:reject, object}
|
{:reject, object}
|
||||||
end
|
end
|
||||||
|
|
||||||
|
|
|
@ -18,7 +18,7 @@ defmodule Pleroma.Web.ActivityPub.MRF.MediaProxyWarmingPolicy do
|
||||||
]
|
]
|
||||||
|
|
||||||
def perform(:prefetch, url) do
|
def perform(:prefetch, url) do
|
||||||
Logger.info("Prefetching #{inspect(url)}")
|
Logger.debug("Prefetching #{inspect(url)}")
|
||||||
|
|
||||||
url
|
url
|
||||||
|> MediaProxy.url()
|
|> MediaProxy.url()
|
|
@ -20,7 +20,7 @@ def filter(%{"type" => message_type} = message) do
|
||||||
with accepted_vocabulary <- Pleroma.Config.get([:mrf_vocabulary, :accept]),
|
with accepted_vocabulary <- Pleroma.Config.get([:mrf_vocabulary, :accept]),
|
||||||
rejected_vocabulary <- Pleroma.Config.get([:mrf_vocabulary, :reject]),
|
rejected_vocabulary <- Pleroma.Config.get([:mrf_vocabulary, :reject]),
|
||||||
true <-
|
true <-
|
||||||
length(accepted_vocabulary) == 0 || Enum.member?(accepted_vocabulary, message_type),
|
Enum.empty?(accepted_vocabulary) || Enum.member?(accepted_vocabulary, message_type),
|
||||||
false <-
|
false <-
|
||||||
length(rejected_vocabulary) > 0 && Enum.member?(rejected_vocabulary, message_type),
|
length(rejected_vocabulary) > 0 && Enum.member?(rejected_vocabulary, message_type),
|
||||||
{:ok, _} <- filter(message["object"]) do
|
{:ok, _} <- filter(message["object"]) do
|
||||||
|
|
|
@ -9,6 +9,7 @@ defmodule Pleroma.Web.ActivityPub.Publisher do
|
||||||
alias Pleroma.HTTP
|
alias Pleroma.HTTP
|
||||||
alias Pleroma.Instances
|
alias Pleroma.Instances
|
||||||
alias Pleroma.Object
|
alias Pleroma.Object
|
||||||
|
alias Pleroma.Repo
|
||||||
alias Pleroma.User
|
alias Pleroma.User
|
||||||
alias Pleroma.Web.ActivityPub.Relay
|
alias Pleroma.Web.ActivityPub.Relay
|
||||||
alias Pleroma.Web.ActivityPub.Transmogrifier
|
alias Pleroma.Web.ActivityPub.Transmogrifier
|
||||||
|
@ -47,7 +48,7 @@ def is_representable?(%Activity{} = activity) do
|
||||||
* `id`: the ActivityStreams URI of the message
|
* `id`: the ActivityStreams URI of the message
|
||||||
"""
|
"""
|
||||||
def publish_one(%{inbox: inbox, json: json, actor: %User{} = actor, id: id} = params) do
|
def publish_one(%{inbox: inbox, json: json, actor: %User{} = actor, id: id} = params) do
|
||||||
Logger.info("Federating #{id} to #{inbox}")
|
Logger.debug("Federating #{id} to #{inbox}")
|
||||||
%{host: host, path: path} = URI.parse(inbox)
|
%{host: host, path: path} = URI.parse(inbox)
|
||||||
|
|
||||||
digest = "SHA-256=" <> (:crypto.hash(:sha256, json) |> Base.encode64())
|
digest = "SHA-256=" <> (:crypto.hash(:sha256, json) |> Base.encode64())
|
||||||
|
@ -188,12 +189,15 @@ def publish(%User{} = actor, %{data: %{"bcc" => bcc}} = activity)
|
||||||
|
|
||||||
recipients = recipients(actor, activity)
|
recipients = recipients(actor, activity)
|
||||||
|
|
||||||
|
inboxes =
|
||||||
recipients
|
recipients
|
||||||
|> Enum.filter(&User.ap_enabled?/1)
|
|> Enum.filter(&User.ap_enabled?/1)
|
||||||
|> Enum.map(fn %{source_data: data} -> data["inbox"] end)
|
|> Enum.map(fn %{source_data: data} -> data["inbox"] end)
|
||||||
|> Enum.filter(fn inbox -> should_federate?(inbox, public) end)
|
|> Enum.filter(fn inbox -> should_federate?(inbox, public) end)
|
||||||
|> Instances.filter_reachable()
|
|> Instances.filter_reachable()
|
||||||
|> Enum.each(fn {inbox, unreachable_since} ->
|
|
||||||
|
Repo.checkout(fn ->
|
||||||
|
Enum.each(inboxes, fn {inbox, unreachable_since} ->
|
||||||
%User{ap_id: ap_id} =
|
%User{ap_id: ap_id} =
|
||||||
Enum.find(recipients, fn %{source_data: data} -> data["inbox"] == inbox end)
|
Enum.find(recipients, fn %{source_data: data} -> data["inbox"] == inbox end)
|
||||||
|
|
||||||
|
@ -214,6 +218,7 @@ def publish(%User{} = actor, %{data: %{"bcc" => bcc}} = activity)
|
||||||
unreachable_since: unreachable_since
|
unreachable_since: unreachable_since
|
||||||
})
|
})
|
||||||
end)
|
end)
|
||||||
|
end)
|
||||||
end
|
end
|
||||||
|
|
||||||
@doc """
|
@doc """
|
||||||
|
@ -223,7 +228,7 @@ def publish(%User{} = actor, %Activity{} = activity) do
|
||||||
public = is_public?(activity)
|
public = is_public?(activity)
|
||||||
|
|
||||||
if public && Config.get([:instance, :allow_relay]) do
|
if public && Config.get([:instance, :allow_relay]) do
|
||||||
Logger.info(fn -> "Relaying #{activity.data["id"]} out" end)
|
Logger.debug(fn -> "Relaying #{activity.data["id"]} out" end)
|
||||||
Relay.publish(activity)
|
Relay.publish(activity)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -259,6 +264,10 @@ def gather_webfinger_links(%User{} = user) do
|
||||||
"rel" => "self",
|
"rel" => "self",
|
||||||
"type" => "application/ld+json; profile=\"https://www.w3.org/ns/activitystreams\"",
|
"type" => "application/ld+json; profile=\"https://www.w3.org/ns/activitystreams\"",
|
||||||
"href" => user.ap_id
|
"href" => user.ap_id
|
||||||
|
},
|
||||||
|
%{
|
||||||
|
"rel" => "http://ostatus.org/schema/1.0/subscribe",
|
||||||
|
"template" => "#{Pleroma.Web.base_url()}/ostatus_subscribe?acct={uri}"
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
end
|
end
|
||||||
|
|
|
@ -9,10 +9,12 @@ defmodule Pleroma.Web.ActivityPub.Relay do
|
||||||
alias Pleroma.Web.ActivityPub.ActivityPub
|
alias Pleroma.Web.ActivityPub.ActivityPub
|
||||||
require Logger
|
require Logger
|
||||||
|
|
||||||
|
@relay_nickname "relay"
|
||||||
|
|
||||||
def get_actor do
|
def get_actor do
|
||||||
actor =
|
actor =
|
||||||
relay_ap_id()
|
relay_ap_id()
|
||||||
|> User.get_or_create_service_actor_by_ap_id()
|
|> User.get_or_create_service_actor_by_ap_id(@relay_nickname)
|
||||||
|
|
||||||
actor
|
actor
|
||||||
end
|
end
|
||||||
|
|
|
@ -397,7 +397,7 @@ def handle_incoming(
|
||||||
%{"type" => "Create", "object" => %{"type" => objtype} = object} = data,
|
%{"type" => "Create", "object" => %{"type" => objtype} = object} = data,
|
||||||
options
|
options
|
||||||
)
|
)
|
||||||
when objtype in ["Article", "Note", "Video", "Page", "Question", "Answer"] do
|
when objtype in ["Article", "Event", "Note", "Video", "Page", "Question", "Answer"] do
|
||||||
actor = Containment.get_actor(data)
|
actor = Containment.get_actor(data)
|
||||||
|
|
||||||
data =
|
data =
|
||||||
|
@ -658,24 +658,8 @@ def handle_incoming(
|
||||||
with %User{ap_id: ^actor_id} = actor <- User.get_cached_by_ap_id(object["id"]) do
|
with %User{ap_id: ^actor_id} = actor <- User.get_cached_by_ap_id(object["id"]) do
|
||||||
{:ok, new_user_data} = ActivityPub.user_data_from_user_object(object)
|
{:ok, new_user_data} = ActivityPub.user_data_from_user_object(object)
|
||||||
|
|
||||||
locked = new_user_data[:locked] || false
|
|
||||||
attachment = get_in(new_user_data, [:source_data, "attachment"]) || []
|
|
||||||
invisible = new_user_data[:invisible] || false
|
|
||||||
|
|
||||||
fields =
|
|
||||||
attachment
|
|
||||||
|> Enum.filter(fn %{"type" => t} -> t == "PropertyValue" end)
|
|
||||||
|> Enum.map(fn fields -> Map.take(fields, ["name", "value"]) end)
|
|
||||||
|
|
||||||
update_data =
|
|
||||||
new_user_data
|
|
||||||
|> Map.take([:avatar, :banner, :bio, :name, :also_known_as])
|
|
||||||
|> Map.put(:fields, fields)
|
|
||||||
|> Map.put(:locked, locked)
|
|
||||||
|> Map.put(:invisible, invisible)
|
|
||||||
|
|
||||||
actor
|
actor
|
||||||
|> User.upgrade_changeset(update_data, true)
|
|> User.upgrade_changeset(new_user_data, true)
|
||||||
|> User.update_and_set_cache()
|
|> User.update_and_set_cache()
|
||||||
|
|
||||||
ActivityPub.update(%{
|
ActivityPub.update(%{
|
||||||
|
|
|
@ -22,7 +22,16 @@ defmodule Pleroma.Web.ActivityPub.Utils do
|
||||||
require Logger
|
require Logger
|
||||||
require Pleroma.Constants
|
require Pleroma.Constants
|
||||||
|
|
||||||
@supported_object_types ["Article", "Note", "Video", "Page", "Question", "Answer", "Audio"]
|
@supported_object_types [
|
||||||
|
"Article",
|
||||||
|
"Note",
|
||||||
|
"Event",
|
||||||
|
"Video",
|
||||||
|
"Page",
|
||||||
|
"Question",
|
||||||
|
"Answer",
|
||||||
|
"Audio"
|
||||||
|
]
|
||||||
@strip_status_report_states ~w(closed resolved)
|
@strip_status_report_states ~w(closed resolved)
|
||||||
@supported_report_states ~w(open closed resolved)
|
@supported_report_states ~w(open closed resolved)
|
||||||
@valid_visibilities ~w(public unlisted private direct)
|
@valid_visibilities ~w(public unlisted private direct)
|
||||||
|
@ -303,19 +312,12 @@ def make_emoji_reaction_data(user, object, emoji, activity_id) do
|
||||||
|> Map.put("content", emoji)
|
|> Map.put("content", emoji)
|
||||||
end
|
end
|
||||||
|
|
||||||
@spec update_element_in_object(String.t(), list(any), Object.t()) ::
|
@spec update_element_in_object(String.t(), list(any), Object.t(), integer() | nil) ::
|
||||||
{:ok, Object.t()} | {:error, Ecto.Changeset.t()}
|
{:ok, Object.t()} | {:error, Ecto.Changeset.t()}
|
||||||
def update_element_in_object(property, element, object) do
|
def update_element_in_object(property, element, object, count \\ nil) do
|
||||||
length =
|
length =
|
||||||
if is_map(element) do
|
count ||
|
||||||
element
|
length(element)
|
||||||
|> Map.values()
|
|
||||||
|> List.flatten()
|
|
||||||
|> length()
|
|
||||||
else
|
|
||||||
element
|
|
||||||
|> length()
|
|
||||||
end
|
|
||||||
|
|
||||||
data =
|
data =
|
||||||
Map.merge(
|
Map.merge(
|
||||||
|
@ -335,29 +337,60 @@ def add_emoji_reaction_to_object(
|
||||||
%Activity{data: %{"content" => emoji, "actor" => actor}},
|
%Activity{data: %{"content" => emoji, "actor" => actor}},
|
||||||
object
|
object
|
||||||
) do
|
) do
|
||||||
reactions = object.data["reactions"] || %{}
|
reactions = get_cached_emoji_reactions(object)
|
||||||
emoji_actors = reactions[emoji] || []
|
|
||||||
new_emoji_actors = [actor | emoji_actors] |> Enum.uniq()
|
new_reactions =
|
||||||
new_reactions = Map.put(reactions, emoji, new_emoji_actors)
|
case Enum.find_index(reactions, fn [candidate, _] -> emoji == candidate end) do
|
||||||
update_element_in_object("reaction", new_reactions, object)
|
nil ->
|
||||||
|
reactions ++ [[emoji, [actor]]]
|
||||||
|
|
||||||
|
index ->
|
||||||
|
List.update_at(
|
||||||
|
reactions,
|
||||||
|
index,
|
||||||
|
fn [emoji, users] -> [emoji, Enum.uniq([actor | users])] end
|
||||||
|
)
|
||||||
|
end
|
||||||
|
|
||||||
|
count = emoji_count(new_reactions)
|
||||||
|
|
||||||
|
update_element_in_object("reaction", new_reactions, object, count)
|
||||||
|
end
|
||||||
|
|
||||||
|
def emoji_count(reactions_list) do
|
||||||
|
Enum.reduce(reactions_list, 0, fn [_, users], acc -> acc + length(users) end)
|
||||||
end
|
end
|
||||||
|
|
||||||
def remove_emoji_reaction_from_object(
|
def remove_emoji_reaction_from_object(
|
||||||
%Activity{data: %{"content" => emoji, "actor" => actor}},
|
%Activity{data: %{"content" => emoji, "actor" => actor}},
|
||||||
object
|
object
|
||||||
) do
|
) do
|
||||||
reactions = object.data["reactions"] || %{}
|
reactions = get_cached_emoji_reactions(object)
|
||||||
emoji_actors = reactions[emoji] || []
|
|
||||||
new_emoji_actors = List.delete(emoji_actors, actor)
|
|
||||||
|
|
||||||
new_reactions =
|
new_reactions =
|
||||||
if new_emoji_actors == [] do
|
case Enum.find_index(reactions, fn [candidate, _] -> emoji == candidate end) do
|
||||||
Map.delete(reactions, emoji)
|
nil ->
|
||||||
else
|
reactions
|
||||||
Map.put(reactions, emoji, new_emoji_actors)
|
|
||||||
|
index ->
|
||||||
|
List.update_at(
|
||||||
|
reactions,
|
||||||
|
index,
|
||||||
|
fn [emoji, users] -> [emoji, List.delete(users, actor)] end
|
||||||
|
)
|
||||||
|
|> Enum.reject(fn [_, users] -> Enum.empty?(users) end)
|
||||||
end
|
end
|
||||||
|
|
||||||
update_element_in_object("reaction", new_reactions, object)
|
count = emoji_count(new_reactions)
|
||||||
|
update_element_in_object("reaction", new_reactions, object, count)
|
||||||
|
end
|
||||||
|
|
||||||
|
def get_cached_emoji_reactions(object) do
|
||||||
|
if is_list(object.data["reactions"]) do
|
||||||
|
object.data["reactions"]
|
||||||
|
else
|
||||||
|
[]
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
@spec add_like_to_object(Activity.t(), Object.t()) ::
|
@spec add_like_to_object(Activity.t(), Object.t()) ::
|
||||||
|
@ -787,6 +820,7 @@ def get_reports(params, page, page_size) do
|
||||||
params
|
params
|
||||||
|> Map.put("type", "Flag")
|
|> Map.put("type", "Flag")
|
||||||
|> Map.put("skip_preload", true)
|
|> Map.put("skip_preload", true)
|
||||||
|
|> Map.put("preload_report_notes", true)
|
||||||
|> Map.put("total", true)
|
|> Map.put("total", true)
|
||||||
|> Map.put("limit", page_size)
|
|> Map.put("limit", page_size)
|
||||||
|> Map.put("offset", (page - 1) * page_size)
|
|> Map.put("offset", (page - 1) * page_size)
|
||||||
|
|
|
@ -91,7 +91,7 @@ def render("user.json", %{user: user}) do
|
||||||
|
|
||||||
%{
|
%{
|
||||||
"id" => user.ap_id,
|
"id" => user.ap_id,
|
||||||
"type" => "Person",
|
"type" => user.actor_type,
|
||||||
"following" => "#{user.ap_id}/following",
|
"following" => "#{user.ap_id}/following",
|
||||||
"followers" => "#{user.ap_id}/followers",
|
"followers" => "#{user.ap_id}/followers",
|
||||||
"inbox" => "#{user.ap_id}/inbox",
|
"inbox" => "#{user.ap_id}/inbox",
|
||||||
|
@ -201,7 +201,6 @@ def render("followers.json", %{user: user} = opts) do
|
||||||
%{
|
%{
|
||||||
"id" => "#{user.ap_id}/followers",
|
"id" => "#{user.ap_id}/followers",
|
||||||
"type" => "OrderedCollection",
|
"type" => "OrderedCollection",
|
||||||
"totalItems" => total,
|
|
||||||
"first" =>
|
"first" =>
|
||||||
if showing_items do
|
if showing_items do
|
||||||
collection(followers, "#{user.ap_id}/followers", 1, showing_items, total)
|
collection(followers, "#{user.ap_id}/followers", 1, showing_items, total)
|
||||||
|
@ -209,6 +208,7 @@ def render("followers.json", %{user: user} = opts) do
|
||||||
"#{user.ap_id}/followers?page=1"
|
"#{user.ap_id}/followers?page=1"
|
||||||
end
|
end
|
||||||
}
|
}
|
||||||
|
|> maybe_put_total_items(showing_count, total)
|
||||||
|> Map.merge(Utils.make_json_ld_header())
|
|> Map.merge(Utils.make_json_ld_header())
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -251,6 +251,12 @@ def render("activity_collection_page.json", %{activities: activities, iri: iri})
|
||||||
|> Map.merge(Utils.make_json_ld_header())
|
|> Map.merge(Utils.make_json_ld_header())
|
||||||
end
|
end
|
||||||
|
|
||||||
|
defp maybe_put_total_items(map, false, _total), do: map
|
||||||
|
|
||||||
|
defp maybe_put_total_items(map, true, total) do
|
||||||
|
Map.put(map, "totalItems", total)
|
||||||
|
end
|
||||||
|
|
||||||
def collection(collection, iri, page, show_items \\ true, total \\ nil) do
|
def collection(collection, iri, page, show_items \\ true, total \\ nil) do
|
||||||
offset = (page - 1) * 10
|
offset = (page - 1) * 10
|
||||||
items = Enum.slice(collection, offset, 10)
|
items = Enum.slice(collection, offset, 10)
|
||||||
|
|
|
@ -4,16 +4,20 @@
|
||||||
|
|
||||||
defmodule Pleroma.Web.AdminAPI.AdminAPIController do
|
defmodule Pleroma.Web.AdminAPI.AdminAPIController do
|
||||||
use Pleroma.Web, :controller
|
use Pleroma.Web, :controller
|
||||||
|
|
||||||
|
import Pleroma.Web.ControllerHelper, only: [json_response: 3]
|
||||||
|
|
||||||
alias Pleroma.Activity
|
alias Pleroma.Activity
|
||||||
|
alias Pleroma.ConfigDB
|
||||||
alias Pleroma.ModerationLog
|
alias Pleroma.ModerationLog
|
||||||
alias Pleroma.Plugs.OAuthScopesPlug
|
alias Pleroma.Plugs.OAuthScopesPlug
|
||||||
|
alias Pleroma.ReportNote
|
||||||
alias Pleroma.User
|
alias Pleroma.User
|
||||||
alias Pleroma.UserInviteToken
|
alias Pleroma.UserInviteToken
|
||||||
alias Pleroma.Web.ActivityPub.ActivityPub
|
alias Pleroma.Web.ActivityPub.ActivityPub
|
||||||
alias Pleroma.Web.ActivityPub.Relay
|
alias Pleroma.Web.ActivityPub.Relay
|
||||||
alias Pleroma.Web.ActivityPub.Utils
|
alias Pleroma.Web.ActivityPub.Utils
|
||||||
alias Pleroma.Web.AdminAPI.AccountView
|
alias Pleroma.Web.AdminAPI.AccountView
|
||||||
alias Pleroma.Web.AdminAPI.Config
|
|
||||||
alias Pleroma.Web.AdminAPI.ConfigView
|
alias Pleroma.Web.AdminAPI.ConfigView
|
||||||
alias Pleroma.Web.AdminAPI.ModerationLogView
|
alias Pleroma.Web.AdminAPI.ModerationLogView
|
||||||
alias Pleroma.Web.AdminAPI.Report
|
alias Pleroma.Web.AdminAPI.Report
|
||||||
|
@ -24,26 +28,22 @@ defmodule Pleroma.Web.AdminAPI.AdminAPIController do
|
||||||
alias Pleroma.Web.MastodonAPI.StatusView
|
alias Pleroma.Web.MastodonAPI.StatusView
|
||||||
alias Pleroma.Web.Router
|
alias Pleroma.Web.Router
|
||||||
|
|
||||||
import Pleroma.Web.ControllerHelper, only: [json_response: 3]
|
|
||||||
|
|
||||||
require Logger
|
require Logger
|
||||||
|
|
||||||
|
@descriptions_json Pleroma.Docs.JSON.compile()
|
||||||
|
@users_page_size 50
|
||||||
|
|
||||||
plug(
|
plug(
|
||||||
OAuthScopesPlug,
|
OAuthScopesPlug,
|
||||||
%{scopes: ["read:accounts"]}
|
%{scopes: ["read:accounts"], admin: true}
|
||||||
when action in [:list_users, :user_show, :right_get, :invites]
|
when action in [:list_users, :user_show, :right_get]
|
||||||
)
|
)
|
||||||
|
|
||||||
plug(
|
plug(
|
||||||
OAuthScopesPlug,
|
OAuthScopesPlug,
|
||||||
%{scopes: ["write:accounts"]}
|
%{scopes: ["write:accounts"], admin: true}
|
||||||
when action in [
|
when action in [
|
||||||
:get_invite_token,
|
|
||||||
:revoke_invite,
|
|
||||||
:email_invite,
|
|
||||||
:get_password_reset,
|
:get_password_reset,
|
||||||
:user_follow,
|
|
||||||
:user_unfollow,
|
|
||||||
:user_delete,
|
:user_delete,
|
||||||
:users_create,
|
:users_create,
|
||||||
:user_toggle_activation,
|
:user_toggle_activation,
|
||||||
|
@ -56,42 +56,56 @@ defmodule Pleroma.Web.AdminAPI.AdminAPIController do
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
|
|
||||||
|
plug(OAuthScopesPlug, %{scopes: ["read:invites"], admin: true} when action == :invites)
|
||||||
|
|
||||||
plug(
|
plug(
|
||||||
OAuthScopesPlug,
|
OAuthScopesPlug,
|
||||||
%{scopes: ["read:reports"]} when action in [:list_reports, :report_show]
|
%{scopes: ["write:invites"], admin: true}
|
||||||
|
when action in [:create_invite_token, :revoke_invite, :email_invite]
|
||||||
)
|
)
|
||||||
|
|
||||||
plug(
|
plug(
|
||||||
OAuthScopesPlug,
|
OAuthScopesPlug,
|
||||||
%{scopes: ["write:reports"]}
|
%{scopes: ["write:follows"], admin: true}
|
||||||
when action in [:report_update_state, :report_respond]
|
when action in [:user_follow, :user_unfollow, :relay_follow, :relay_unfollow]
|
||||||
)
|
)
|
||||||
|
|
||||||
plug(
|
plug(
|
||||||
OAuthScopesPlug,
|
OAuthScopesPlug,
|
||||||
%{scopes: ["read:statuses"]} when action == :list_user_statuses
|
%{scopes: ["read:reports"], admin: true}
|
||||||
|
when action in [:list_reports, :report_show]
|
||||||
)
|
)
|
||||||
|
|
||||||
plug(
|
plug(
|
||||||
OAuthScopesPlug,
|
OAuthScopesPlug,
|
||||||
%{scopes: ["write:statuses"]}
|
%{scopes: ["write:reports"], admin: true}
|
||||||
|
when action in [:reports_update]
|
||||||
|
)
|
||||||
|
|
||||||
|
plug(
|
||||||
|
OAuthScopesPlug,
|
||||||
|
%{scopes: ["read:statuses"], admin: true}
|
||||||
|
when action == :list_user_statuses
|
||||||
|
)
|
||||||
|
|
||||||
|
plug(
|
||||||
|
OAuthScopesPlug,
|
||||||
|
%{scopes: ["write:statuses"], admin: true}
|
||||||
when action in [:status_update, :status_delete]
|
when action in [:status_update, :status_delete]
|
||||||
)
|
)
|
||||||
|
|
||||||
plug(
|
plug(
|
||||||
OAuthScopesPlug,
|
OAuthScopesPlug,
|
||||||
%{scopes: ["read"]}
|
%{scopes: ["read"], admin: true}
|
||||||
when action in [:config_show, :migrate_to_db, :migrate_from_db, :list_log]
|
when action in [:config_show, :migrate_from_db, :list_log]
|
||||||
)
|
)
|
||||||
|
|
||||||
plug(
|
plug(
|
||||||
OAuthScopesPlug,
|
OAuthScopesPlug,
|
||||||
%{scopes: ["write"]}
|
%{scopes: ["write"], admin: true}
|
||||||
when action in [:relay_follow, :relay_unfollow, :config_update]
|
when action == :config_update
|
||||||
)
|
)
|
||||||
|
|
||||||
@users_page_size 50
|
|
||||||
|
|
||||||
action_fallback(:errors)
|
action_fallback(:errors)
|
||||||
|
|
||||||
def user_delete(%{assigns: %{user: admin}} = conn, %{"nickname" => nickname}) do
|
def user_delete(%{assigns: %{user: admin}} = conn, %{"nickname" => nickname}) do
|
||||||
|
@ -238,7 +252,7 @@ def list_instance_statuses(conn, %{"instance" => instance} = params) do
|
||||||
})
|
})
|
||||||
|
|
||||||
conn
|
conn
|
||||||
|> put_view(StatusView)
|
|> put_view(Pleroma.Web.AdminAPI.StatusView)
|
||||||
|> render("index.json", %{activities: activities, as: :activity})
|
|> render("index.json", %{activities: activities, as: :activity})
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -627,7 +641,7 @@ def get_password_reset(conn, %{"nickname" => nickname}) do
|
||||||
def force_password_reset(%{assigns: %{user: admin}} = conn, %{"nicknames" => nicknames}) do
|
def force_password_reset(%{assigns: %{user: admin}} = conn, %{"nicknames" => nicknames}) do
|
||||||
users = nicknames |> Enum.map(&User.get_cached_by_nickname/1)
|
users = nicknames |> Enum.map(&User.get_cached_by_nickname/1)
|
||||||
|
|
||||||
Enum.map(users, &User.force_password_reset_async/1)
|
Enum.each(users, &User.force_password_reset_async/1)
|
||||||
|
|
||||||
ModerationLog.insert_log(%{
|
ModerationLog.insert_log(%{
|
||||||
actor: admin,
|
actor: admin,
|
||||||
|
@ -641,9 +655,11 @@ def force_password_reset(%{assigns: %{user: admin}} = conn, %{"nicknames" => nic
|
||||||
def list_reports(conn, params) do
|
def list_reports(conn, params) do
|
||||||
{page, page_size} = page_params(params)
|
{page, page_size} = page_params(params)
|
||||||
|
|
||||||
|
reports = Utils.get_reports(params, page, page_size)
|
||||||
|
|
||||||
conn
|
conn
|
||||||
|> put_view(ReportView)
|
|> put_view(ReportView)
|
||||||
|> render("index.json", %{reports: Utils.get_reports(params, page, page_size)})
|
|> render("index.json", %{reports: reports})
|
||||||
end
|
end
|
||||||
|
|
||||||
def list_grouped_reports(conn, _params) do
|
def list_grouped_reports(conn, _params) do
|
||||||
|
@ -687,32 +703,39 @@ def reports_update(%{assigns: %{user: admin}} = conn, %{"reports" => reports}) d
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def report_respond(%{assigns: %{user: user}} = conn, %{"id" => id} = params) do
|
def report_notes_create(%{assigns: %{user: user}} = conn, %{
|
||||||
with false <- is_nil(params["status"]),
|
"id" => report_id,
|
||||||
%Activity{} <- Activity.get_by_id(id) do
|
"content" => content
|
||||||
params =
|
}) do
|
||||||
params
|
with {:ok, _} <- ReportNote.create(user.id, report_id, content) do
|
||||||
|> Map.put("in_reply_to_status_id", id)
|
|
||||||
|> Map.put("visibility", "direct")
|
|
||||||
|
|
||||||
{:ok, activity} = CommonAPI.post(user, params)
|
|
||||||
|
|
||||||
ModerationLog.insert_log(%{
|
ModerationLog.insert_log(%{
|
||||||
action: "report_response",
|
action: "report_note",
|
||||||
actor: user,
|
actor: user,
|
||||||
subject: activity,
|
subject: Activity.get_by_id(report_id),
|
||||||
text: params["status"]
|
text: content
|
||||||
})
|
})
|
||||||
|
|
||||||
conn
|
json_response(conn, :no_content, "")
|
||||||
|> put_view(StatusView)
|
|
||||||
|> render("show.json", %{activity: activity})
|
|
||||||
else
|
else
|
||||||
true ->
|
_ -> json_response(conn, :bad_request, "")
|
||||||
{:param_cast, nil}
|
end
|
||||||
|
end
|
||||||
|
|
||||||
nil ->
|
def report_notes_delete(%{assigns: %{user: user}} = conn, %{
|
||||||
{:error, :not_found}
|
"id" => note_id,
|
||||||
|
"report_id" => report_id
|
||||||
|
}) do
|
||||||
|
with {:ok, note} <- ReportNote.destroy(note_id) do
|
||||||
|
ModerationLog.insert_log(%{
|
||||||
|
action: "report_note_delete",
|
||||||
|
actor: user,
|
||||||
|
subject: Activity.get_by_id(report_id),
|
||||||
|
text: note.content
|
||||||
|
})
|
||||||
|
|
||||||
|
json_response(conn, :no_content, "")
|
||||||
|
else
|
||||||
|
_ -> json_response(conn, :bad_request, "")
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -764,50 +787,133 @@ def list_log(conn, params) do
|
||||||
|> render("index.json", %{log: log})
|
|> render("index.json", %{log: log})
|
||||||
end
|
end
|
||||||
|
|
||||||
def migrate_to_db(conn, _params) do
|
def config_descriptions(conn, _params) do
|
||||||
Mix.Tasks.Pleroma.Config.run(["migrate_to_db"])
|
conn
|
||||||
json(conn, %{})
|
|> Plug.Conn.put_resp_content_type("application/json")
|
||||||
|
|> Plug.Conn.send_resp(200, @descriptions_json)
|
||||||
end
|
end
|
||||||
|
|
||||||
def migrate_from_db(conn, _params) do
|
def migrate_from_db(conn, _params) do
|
||||||
Mix.Tasks.Pleroma.Config.run(["migrate_from_db", Pleroma.Config.get(:env), "true"])
|
with :ok <- configurable_from_database(conn) do
|
||||||
|
Mix.Tasks.Pleroma.Config.run([
|
||||||
|
"migrate_from_db",
|
||||||
|
"--env",
|
||||||
|
to_string(Pleroma.Config.get(:env)),
|
||||||
|
"-d"
|
||||||
|
])
|
||||||
|
|
||||||
json(conn, %{})
|
json(conn, %{})
|
||||||
end
|
end
|
||||||
|
end
|
||||||
|
|
||||||
def config_show(conn, _params) do
|
def config_show(conn, %{"only_db" => true}) do
|
||||||
configs = Pleroma.Repo.all(Config)
|
with :ok <- configurable_from_database(conn) do
|
||||||
|
configs = Pleroma.Repo.all(ConfigDB)
|
||||||
|
|
||||||
|
if configs == [] do
|
||||||
|
errors(
|
||||||
|
conn,
|
||||||
|
{:error, "To use configuration from database migrate your settings to database."}
|
||||||
|
)
|
||||||
|
else
|
||||||
conn
|
conn
|
||||||
|> put_view(ConfigView)
|
|> put_view(ConfigView)
|
||||||
|> render("index.json", %{configs: configs})
|
|> render("index.json", %{configs: configs})
|
||||||
end
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def config_show(conn, _params) do
|
||||||
|
with :ok <- configurable_from_database(conn) do
|
||||||
|
configs = ConfigDB.get_all_as_keyword()
|
||||||
|
|
||||||
|
if configs == [] do
|
||||||
|
errors(
|
||||||
|
conn,
|
||||||
|
{:error, "To use configuration from database migrate your settings to database."}
|
||||||
|
)
|
||||||
|
else
|
||||||
|
merged =
|
||||||
|
Pleroma.Config.Holder.config()
|
||||||
|
|> ConfigDB.merge(configs)
|
||||||
|
|> Enum.map(fn {group, values} ->
|
||||||
|
Enum.map(values, fn {key, value} ->
|
||||||
|
db =
|
||||||
|
if configs[group][key] do
|
||||||
|
ConfigDB.get_db_keys(configs[group][key], key)
|
||||||
|
end
|
||||||
|
|
||||||
|
db_value = configs[group][key]
|
||||||
|
|
||||||
|
merged_value =
|
||||||
|
if !is_nil(db_value) and Keyword.keyword?(db_value) and
|
||||||
|
ConfigDB.sub_key_full_update?(group, key, Keyword.keys(db_value)) do
|
||||||
|
ConfigDB.merge_group(group, key, value, db_value)
|
||||||
|
else
|
||||||
|
value
|
||||||
|
end
|
||||||
|
|
||||||
|
setting = %{
|
||||||
|
group: ConfigDB.convert(group),
|
||||||
|
key: ConfigDB.convert(key),
|
||||||
|
value: ConfigDB.convert(merged_value)
|
||||||
|
}
|
||||||
|
|
||||||
|
if db, do: Map.put(setting, :db, db), else: setting
|
||||||
|
end)
|
||||||
|
end)
|
||||||
|
|> List.flatten()
|
||||||
|
|
||||||
|
json(conn, %{configs: merged})
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
def config_update(conn, %{"configs" => configs}) do
|
def config_update(conn, %{"configs" => configs}) do
|
||||||
updated =
|
with :ok <- configurable_from_database(conn) do
|
||||||
if Pleroma.Config.get([:instance, :dynamic_configuration]) do
|
{_errors, results} =
|
||||||
updated =
|
|
||||||
Enum.map(configs, fn
|
Enum.map(configs, fn
|
||||||
%{"group" => group, "key" => key, "delete" => "true"} = params ->
|
%{"group" => group, "key" => key, "delete" => true} = params ->
|
||||||
{:ok, config} = Config.delete(%{group: group, key: key, subkeys: params["subkeys"]})
|
ConfigDB.delete(%{group: group, key: key, subkeys: params["subkeys"]})
|
||||||
config
|
|
||||||
|
|
||||||
%{"group" => group, "key" => key, "value" => value} ->
|
%{"group" => group, "key" => key, "value" => value} ->
|
||||||
{:ok, config} = Config.update_or_create(%{group: group, key: key, value: value})
|
ConfigDB.update_or_create(%{group: group, key: key, value: value})
|
||||||
config
|
|
||||||
end)
|
end)
|
||||||
|> Enum.reject(&is_nil(&1))
|
|> Enum.split_with(fn result -> elem(result, 0) == :error end)
|
||||||
|
|
||||||
Pleroma.Config.TransferTask.load_and_update_env()
|
{deleted, updated} =
|
||||||
Mix.Tasks.Pleroma.Config.run(["migrate_from_db", Pleroma.Config.get(:env), "false"])
|
results
|
||||||
updated
|
|> Enum.map(fn {:ok, config} ->
|
||||||
else
|
Map.put(config, :db, ConfigDB.get_db_keys(config))
|
||||||
[]
|
end)
|
||||||
end
|
|> Enum.split_with(fn config ->
|
||||||
|
Ecto.get_meta(config, :state) == :deleted
|
||||||
|
end)
|
||||||
|
|
||||||
|
Pleroma.Config.TransferTask.load_and_update_env(deleted)
|
||||||
|
|
||||||
|
Mix.Tasks.Pleroma.Config.run([
|
||||||
|
"migrate_from_db",
|
||||||
|
"--env",
|
||||||
|
to_string(Pleroma.Config.get(:env))
|
||||||
|
])
|
||||||
|
|
||||||
conn
|
conn
|
||||||
|> put_view(ConfigView)
|
|> put_view(ConfigView)
|
||||||
|> render("index.json", %{configs: updated})
|
|> render("index.json", %{configs: updated})
|
||||||
end
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
defp configurable_from_database(conn) do
|
||||||
|
if Pleroma.Config.get(:configurable_from_database) do
|
||||||
|
:ok
|
||||||
|
else
|
||||||
|
errors(
|
||||||
|
conn,
|
||||||
|
{:error, "To use this endpoint you need to enable configuration from database."}
|
||||||
|
)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
def reload_emoji(conn, _params) do
|
def reload_emoji(conn, _params) do
|
||||||
Pleroma.Emoji.reload()
|
Pleroma.Emoji.reload()
|
||||||
|
|
|
@ -1,182 +0,0 @@
|
||||||
# Pleroma: A lightweight social networking server
|
|
||||||
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
|
||||||
# SPDX-License-Identifier: AGPL-3.0-only
|
|
||||||
|
|
||||||
defmodule Pleroma.Web.AdminAPI.Config do
|
|
||||||
use Ecto.Schema
|
|
||||||
import Ecto.Changeset
|
|
||||||
import Pleroma.Web.Gettext
|
|
||||||
alias __MODULE__
|
|
||||||
alias Pleroma.Repo
|
|
||||||
|
|
||||||
@type t :: %__MODULE__{}
|
|
||||||
|
|
||||||
schema "config" do
|
|
||||||
field(:key, :string)
|
|
||||||
field(:group, :string)
|
|
||||||
field(:value, :binary)
|
|
||||||
|
|
||||||
timestamps()
|
|
||||||
end
|
|
||||||
|
|
||||||
@spec get_by_params(map()) :: Config.t() | nil
|
|
||||||
def get_by_params(params), do: Repo.get_by(Config, params)
|
|
||||||
|
|
||||||
@spec changeset(Config.t(), map()) :: Changeset.t()
|
|
||||||
def changeset(config, params \\ %{}) do
|
|
||||||
config
|
|
||||||
|> cast(params, [:key, :group, :value])
|
|
||||||
|> validate_required([:key, :group, :value])
|
|
||||||
|> unique_constraint(:key, name: :config_group_key_index)
|
|
||||||
end
|
|
||||||
|
|
||||||
@spec create(map()) :: {:ok, Config.t()} | {:error, Changeset.t()}
|
|
||||||
def create(params) do
|
|
||||||
%Config{}
|
|
||||||
|> changeset(Map.put(params, :value, transform(params[:value])))
|
|
||||||
|> Repo.insert()
|
|
||||||
end
|
|
||||||
|
|
||||||
@spec update(Config.t(), map()) :: {:ok, Config} | {:error, Changeset.t()}
|
|
||||||
def update(%Config{} = config, %{value: value}) do
|
|
||||||
config
|
|
||||||
|> change(value: transform(value))
|
|
||||||
|> Repo.update()
|
|
||||||
end
|
|
||||||
|
|
||||||
@spec update_or_create(map()) :: {:ok, Config.t()} | {:error, Changeset.t()}
|
|
||||||
def update_or_create(params) do
|
|
||||||
with %Config{} = config <- Config.get_by_params(Map.take(params, [:group, :key])) do
|
|
||||||
Config.update(config, params)
|
|
||||||
else
|
|
||||||
nil -> Config.create(params)
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
@spec delete(map()) :: {:ok, Config.t()} | {:error, Changeset.t()}
|
|
||||||
def delete(params) do
|
|
||||||
with %Config{} = config <- Config.get_by_params(Map.delete(params, :subkeys)) do
|
|
||||||
if params[:subkeys] do
|
|
||||||
updated_value =
|
|
||||||
Keyword.drop(
|
|
||||||
:erlang.binary_to_term(config.value),
|
|
||||||
Enum.map(params[:subkeys], &do_transform_string(&1))
|
|
||||||
)
|
|
||||||
|
|
||||||
Config.update(config, %{value: updated_value})
|
|
||||||
else
|
|
||||||
Repo.delete(config)
|
|
||||||
{:ok, nil}
|
|
||||||
end
|
|
||||||
else
|
|
||||||
nil ->
|
|
||||||
err =
|
|
||||||
dgettext("errors", "Config with params %{params} not found", params: inspect(params))
|
|
||||||
|
|
||||||
{:error, err}
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
@spec from_binary(binary()) :: term()
|
|
||||||
def from_binary(binary), do: :erlang.binary_to_term(binary)
|
|
||||||
|
|
||||||
@spec from_binary_with_convert(binary()) :: any()
|
|
||||||
def from_binary_with_convert(binary) do
|
|
||||||
from_binary(binary)
|
|
||||||
|> do_convert()
|
|
||||||
end
|
|
||||||
|
|
||||||
defp do_convert(entity) when is_list(entity) do
|
|
||||||
for v <- entity, into: [], do: do_convert(v)
|
|
||||||
end
|
|
||||||
|
|
||||||
defp do_convert(%Regex{} = entity), do: inspect(entity)
|
|
||||||
|
|
||||||
defp do_convert(entity) when is_map(entity) do
|
|
||||||
for {k, v} <- entity, into: %{}, do: {do_convert(k), do_convert(v)}
|
|
||||||
end
|
|
||||||
|
|
||||||
defp do_convert({:dispatch, [entity]}), do: %{"tuple" => [":dispatch", [inspect(entity)]]}
|
|
||||||
defp do_convert({:partial_chain, entity}), do: %{"tuple" => [":partial_chain", inspect(entity)]}
|
|
||||||
|
|
||||||
defp do_convert(entity) when is_tuple(entity),
|
|
||||||
do: %{"tuple" => do_convert(Tuple.to_list(entity))}
|
|
||||||
|
|
||||||
defp do_convert(entity) when is_boolean(entity) or is_number(entity) or is_nil(entity),
|
|
||||||
do: entity
|
|
||||||
|
|
||||||
defp do_convert(entity) when is_atom(entity) do
|
|
||||||
string = to_string(entity)
|
|
||||||
|
|
||||||
if String.starts_with?(string, "Elixir."),
|
|
||||||
do: do_convert(string),
|
|
||||||
else: ":" <> string
|
|
||||||
end
|
|
||||||
|
|
||||||
defp do_convert("Elixir." <> module_name), do: module_name
|
|
||||||
|
|
||||||
defp do_convert(entity) when is_binary(entity), do: entity
|
|
||||||
|
|
||||||
@spec transform(any()) :: binary()
|
|
||||||
def transform(entity) when is_binary(entity) or is_map(entity) or is_list(entity) do
|
|
||||||
:erlang.term_to_binary(do_transform(entity))
|
|
||||||
end
|
|
||||||
|
|
||||||
def transform(entity), do: :erlang.term_to_binary(entity)
|
|
||||||
|
|
||||||
defp do_transform(%Regex{} = entity), do: entity
|
|
||||||
|
|
||||||
defp do_transform(%{"tuple" => [":dispatch", [entity]]}) do
|
|
||||||
{dispatch_settings, []} = do_eval(entity)
|
|
||||||
{:dispatch, [dispatch_settings]}
|
|
||||||
end
|
|
||||||
|
|
||||||
defp do_transform(%{"tuple" => [":partial_chain", entity]}) do
|
|
||||||
{partial_chain, []} = do_eval(entity)
|
|
||||||
{:partial_chain, partial_chain}
|
|
||||||
end
|
|
||||||
|
|
||||||
defp do_transform(%{"tuple" => entity}) do
|
|
||||||
Enum.reduce(entity, {}, fn val, acc -> Tuple.append(acc, do_transform(val)) end)
|
|
||||||
end
|
|
||||||
|
|
||||||
defp do_transform(entity) when is_map(entity) do
|
|
||||||
for {k, v} <- entity, into: %{}, do: {do_transform(k), do_transform(v)}
|
|
||||||
end
|
|
||||||
|
|
||||||
defp do_transform(entity) when is_list(entity) do
|
|
||||||
for v <- entity, into: [], do: do_transform(v)
|
|
||||||
end
|
|
||||||
|
|
||||||
defp do_transform(entity) when is_binary(entity) do
|
|
||||||
String.trim(entity)
|
|
||||||
|> do_transform_string()
|
|
||||||
end
|
|
||||||
|
|
||||||
defp do_transform(entity), do: entity
|
|
||||||
|
|
||||||
defp do_transform_string("~r/" <> pattern) do
|
|
||||||
modificator = String.split(pattern, "/") |> List.last()
|
|
||||||
pattern = String.trim_trailing(pattern, "/" <> modificator)
|
|
||||||
|
|
||||||
case modificator do
|
|
||||||
"" -> ~r/#{pattern}/
|
|
||||||
"i" -> ~r/#{pattern}/i
|
|
||||||
"u" -> ~r/#{pattern}/u
|
|
||||||
"s" -> ~r/#{pattern}/s
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
defp do_transform_string(":" <> atom), do: String.to_atom(atom)
|
|
||||||
|
|
||||||
defp do_transform_string(value) do
|
|
||||||
if String.starts_with?(value, "Pleroma") or String.starts_with?(value, "Phoenix"),
|
|
||||||
do: String.to_existing_atom("Elixir." <> value),
|
|
||||||
else: value
|
|
||||||
end
|
|
||||||
|
|
||||||
defp do_eval(entity) do
|
|
||||||
cleaned_string = String.replace(entity, ~r/[^\w|^{:,[|^,|^[|^\]^}|^\/|^\.|^"]^\s/, "")
|
|
||||||
Code.eval_string(cleaned_string, [], requires: [], macros: [])
|
|
||||||
end
|
|
||||||
end
|
|
|
@ -12,10 +12,16 @@ def render("index.json", %{configs: configs}) do
|
||||||
end
|
end
|
||||||
|
|
||||||
def render("show.json", %{config: config}) do
|
def render("show.json", %{config: config}) do
|
||||||
%{
|
map = %{
|
||||||
key: config.key,
|
key: config.key,
|
||||||
group: config.group,
|
group: config.group,
|
||||||
value: Pleroma.Web.AdminAPI.Config.from_binary_with_convert(config.value)
|
value: Pleroma.ConfigDB.from_binary_with_convert(config.value)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if config.db != [] do
|
||||||
|
Map.put(map, :db, config.db)
|
||||||
|
else
|
||||||
|
map
|
||||||
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -39,7 +39,8 @@ def render("show.json", %{report: report, user: user, account: account, statuses
|
||||||
content: content,
|
content: content,
|
||||||
created_at: created_at,
|
created_at: created_at,
|
||||||
statuses: StatusView.render("index.json", %{activities: statuses, as: :activity}),
|
statuses: StatusView.render("index.json", %{activities: statuses, as: :activity}),
|
||||||
state: report.data["state"]
|
state: report.data["state"],
|
||||||
|
notes: render(__MODULE__, "index_notes.json", %{notes: report.report_notes})
|
||||||
}
|
}
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -69,6 +70,28 @@ def render("index_grouped.json", %{groups: groups}) do
|
||||||
}
|
}
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def render("index_notes.json", %{notes: notes}) when is_list(notes) do
|
||||||
|
Enum.map(notes, &render(__MODULE__, "show_note.json", &1))
|
||||||
|
end
|
||||||
|
|
||||||
|
def render("index_notes.json", _), do: []
|
||||||
|
|
||||||
|
def render("show_note.json", %{
|
||||||
|
id: id,
|
||||||
|
content: content,
|
||||||
|
user_id: user_id,
|
||||||
|
inserted_at: inserted_at
|
||||||
|
}) do
|
||||||
|
user = User.get_by_id(user_id)
|
||||||
|
|
||||||
|
%{
|
||||||
|
id: id,
|
||||||
|
content: content,
|
||||||
|
user: merge_account_views(user),
|
||||||
|
created_at: Utils.to_masto_date(inserted_at)
|
||||||
|
}
|
||||||
|
end
|
||||||
|
|
||||||
defp merge_account_views(%User{} = user) do
|
defp merge_account_views(%User{} = user) do
|
||||||
Pleroma.Web.MastodonAPI.AccountView.render("show.json", %{user: user})
|
Pleroma.Web.MastodonAPI.AccountView.render("show.json", %{user: user})
|
||||||
|> Map.merge(Pleroma.Web.AdminAPI.AccountView.render("show.json", %{user: user}))
|
|> Map.merge(Pleroma.Web.AdminAPI.AccountView.render("show.json", %{user: user}))
|
||||||
|
|
42
lib/pleroma/web/admin_api/views/status_view.ex
Normal file
42
lib/pleroma/web/admin_api/views/status_view.ex
Normal file
|
@ -0,0 +1,42 @@
|
||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
defmodule Pleroma.Web.AdminAPI.StatusView do
|
||||||
|
use Pleroma.Web, :view
|
||||||
|
|
||||||
|
require Pleroma.Constants
|
||||||
|
|
||||||
|
alias Pleroma.User
|
||||||
|
|
||||||
|
def render("index.json", opts) do
|
||||||
|
render_many(opts.activities, __MODULE__, "show.json", opts)
|
||||||
|
end
|
||||||
|
|
||||||
|
def render("show.json", %{activity: %{data: %{"object" => _object}} = activity} = opts) do
|
||||||
|
user = get_user(activity.data["actor"])
|
||||||
|
|
||||||
|
Pleroma.Web.MastodonAPI.StatusView.render("show.json", opts)
|
||||||
|
|> Map.merge(%{account: merge_account_views(user)})
|
||||||
|
end
|
||||||
|
|
||||||
|
defp merge_account_views(%User{} = user) do
|
||||||
|
Pleroma.Web.MastodonAPI.AccountView.render("show.json", %{user: user})
|
||||||
|
|> Map.merge(Pleroma.Web.AdminAPI.AccountView.render("show.json", %{user: user}))
|
||||||
|
end
|
||||||
|
|
||||||
|
defp merge_account_views(_), do: %{}
|
||||||
|
|
||||||
|
defp get_user(ap_id) do
|
||||||
|
cond do
|
||||||
|
user = User.get_cached_by_ap_id(ap_id) ->
|
||||||
|
user
|
||||||
|
|
||||||
|
user = User.get_by_guessed_nickname(ap_id) ->
|
||||||
|
user
|
||||||
|
|
||||||
|
true ->
|
||||||
|
User.error_user(ap_id)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
|
@ -85,9 +85,13 @@ def delete(activity_id, user) do
|
||||||
def repeat(id_or_ap_id, user, params \\ %{}) do
|
def repeat(id_or_ap_id, user, params \\ %{}) do
|
||||||
with %Activity{} = activity <- get_by_id_or_ap_id(id_or_ap_id),
|
with %Activity{} = activity <- get_by_id_or_ap_id(id_or_ap_id),
|
||||||
object <- Object.normalize(activity),
|
object <- Object.normalize(activity),
|
||||||
nil <- Utils.get_existing_announce(user.ap_id, object),
|
announce_activity <- Utils.get_existing_announce(user.ap_id, object),
|
||||||
public <- public_announce?(object, params) do
|
public <- public_announce?(object, params) do
|
||||||
|
if announce_activity do
|
||||||
|
{:ok, announce_activity, object}
|
||||||
|
else
|
||||||
ActivityPub.announce(user, object, nil, true, public)
|
ActivityPub.announce(user, object, nil, true, public)
|
||||||
|
end
|
||||||
else
|
else
|
||||||
_ -> {:error, dgettext("errors", "Could not repeat")}
|
_ -> {:error, dgettext("errors", "Could not repeat")}
|
||||||
end
|
end
|
||||||
|
@ -105,8 +109,12 @@ def unrepeat(id_or_ap_id, user) do
|
||||||
def favorite(id_or_ap_id, user) do
|
def favorite(id_or_ap_id, user) do
|
||||||
with %Activity{} = activity <- get_by_id_or_ap_id(id_or_ap_id),
|
with %Activity{} = activity <- get_by_id_or_ap_id(id_or_ap_id),
|
||||||
object <- Object.normalize(activity),
|
object <- Object.normalize(activity),
|
||||||
nil <- Utils.get_existing_like(user.ap_id, object) do
|
like_activity <- Utils.get_existing_like(user.ap_id, object) do
|
||||||
|
if like_activity do
|
||||||
|
{:ok, like_activity, object}
|
||||||
|
else
|
||||||
ActivityPub.like(user, object)
|
ActivityPub.like(user, object)
|
||||||
|
end
|
||||||
else
|
else
|
||||||
_ -> {:error, dgettext("errors", "Could not favorite")}
|
_ -> {:error, dgettext("errors", "Could not favorite")}
|
||||||
end
|
end
|
||||||
|
|
|
@ -59,16 +59,9 @@ defmodule Pleroma.Web.Endpoint do
|
||||||
|
|
||||||
plug(Pleroma.Plugs.TrailingFormatPlug)
|
plug(Pleroma.Plugs.TrailingFormatPlug)
|
||||||
plug(Plug.RequestId)
|
plug(Plug.RequestId)
|
||||||
plug(Plug.Logger)
|
plug(Plug.Logger, log: :debug)
|
||||||
|
|
||||||
plug(
|
plug(Pleroma.Plugs.Parsers)
|
||||||
Plug.Parsers,
|
|
||||||
parsers: [:urlencoded, :multipart, :json],
|
|
||||||
pass: ["*/*"],
|
|
||||||
json_decoder: Jason,
|
|
||||||
length: Pleroma.Config.get([:instance, :upload_limit]),
|
|
||||||
body_reader: {Pleroma.Web.Plugs.DigestPlug, :read_body, []}
|
|
||||||
)
|
|
||||||
|
|
||||||
plug(Plug.MethodOverride)
|
plug(Plug.MethodOverride)
|
||||||
plug(Plug.Head)
|
plug(Plug.Head)
|
||||||
|
|
|
@ -58,7 +58,7 @@ def perform(:publish, activity) do
|
||||||
end
|
end
|
||||||
|
|
||||||
def perform(:incoming_ap_doc, params) do
|
def perform(:incoming_ap_doc, params) do
|
||||||
Logger.info("Handling incoming AP activity")
|
Logger.debug("Handling incoming AP activity")
|
||||||
|
|
||||||
params = Utils.normalize_params(params)
|
params = Utils.normalize_params(params)
|
||||||
|
|
||||||
|
@ -71,13 +71,13 @@ def perform(:incoming_ap_doc, params) do
|
||||||
{:ok, activity}
|
{:ok, activity}
|
||||||
else
|
else
|
||||||
%Activity{} ->
|
%Activity{} ->
|
||||||
Logger.info("Already had #{params["id"]}")
|
Logger.debug("Already had #{params["id"]}")
|
||||||
:error
|
:error
|
||||||
|
|
||||||
_e ->
|
_e ->
|
||||||
# Just drop those for now
|
# Just drop those for now
|
||||||
Logger.info("Unhandled activity")
|
Logger.debug("Unhandled activity")
|
||||||
Logger.info(Jason.encode!(params, pretty: true))
|
Logger.debug(Jason.encode!(params, pretty: true))
|
||||||
:error
|
:error
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -47,7 +47,7 @@ def publish(%User{} = user, %Activity{} = activity) do
|
||||||
Config.get([:instance, :federation_publisher_modules])
|
Config.get([:instance, :federation_publisher_modules])
|
||||||
|> Enum.each(fn module ->
|
|> Enum.each(fn module ->
|
||||||
if module.is_representable?(activity) do
|
if module.is_representable?(activity) do
|
||||||
Logger.info("Publishing #{activity.data["id"]} using #{inspect(module)}")
|
Logger.debug("Publishing #{activity.data["id"]} using #{inspect(module)}")
|
||||||
module.publish(user, activity)
|
module.publish(user, activity)
|
||||||
end
|
end
|
||||||
end)
|
end)
|
||||||
|
|
|
@ -20,19 +20,22 @@ defmodule Pleroma.Web.MastoFEController do
|
||||||
plug(Pleroma.Plugs.EnsurePublicOrAuthenticatedPlug when action != :index)
|
plug(Pleroma.Plugs.EnsurePublicOrAuthenticatedPlug when action != :index)
|
||||||
|
|
||||||
@doc "GET /web/*path"
|
@doc "GET /web/*path"
|
||||||
def index(%{assigns: %{user: user}} = conn, _params) do
|
def index(%{assigns: %{user: user, token: token}} = conn, _params)
|
||||||
token = get_session(conn, :oauth_token)
|
when not is_nil(user) and not is_nil(token) do
|
||||||
|
|
||||||
if user && token do
|
|
||||||
conn
|
conn
|
||||||
|> put_layout(false)
|
|> put_layout(false)
|
||||||
|> render("index.html", token: token, user: user, custom_emojis: Pleroma.Emoji.get_all())
|
|> render("index.html",
|
||||||
else
|
token: token.token,
|
||||||
|
user: user,
|
||||||
|
custom_emojis: Pleroma.Emoji.get_all()
|
||||||
|
)
|
||||||
|
end
|
||||||
|
|
||||||
|
def index(conn, _params) do
|
||||||
conn
|
conn
|
||||||
|> put_session(:return_to, conn.request_path)
|
|> put_session(:return_to, conn.request_path)
|
||||||
|> redirect(to: "/web/login")
|
|> redirect(to: "/web/login")
|
||||||
end
|
end
|
||||||
end
|
|
||||||
|
|
||||||
@doc "GET /web/manifest.json"
|
@doc "GET /web/manifest.json"
|
||||||
def manifest(conn, _params) do
|
def manifest(conn, _params) do
|
||||||
|
|
|
@ -188,6 +188,7 @@ def update_credentials(%{assigns: %{user: original_user}} = conn, params) do
|
||||||
{:ok, Map.merge(user.pleroma_settings_store, value)}
|
{:ok, Map.merge(user.pleroma_settings_store, value)}
|
||||||
end)
|
end)
|
||||||
|> add_if_present(params, "default_scope", :default_scope)
|
|> add_if_present(params, "default_scope", :default_scope)
|
||||||
|
|> add_if_present(params, "actor_type", :actor_type)
|
||||||
|
|
||||||
emojis_text = (user_params["display_name"] || "") <> (user_params["note"] || "")
|
emojis_text = (user_params["display_name"] || "") <> (user_params["note"] || "")
|
||||||
|
|
||||||
|
|
|
@ -23,6 +23,23 @@ defmodule Pleroma.Web.MastodonAPI.NotificationController do
|
||||||
plug(Pleroma.Plugs.EnsurePublicOrAuthenticatedPlug)
|
plug(Pleroma.Plugs.EnsurePublicOrAuthenticatedPlug)
|
||||||
|
|
||||||
# GET /api/v1/notifications
|
# GET /api/v1/notifications
|
||||||
|
def index(conn, %{"account_id" => account_id} = params) do
|
||||||
|
case Pleroma.User.get_cached_by_id(account_id) do
|
||||||
|
%{ap_id: account_ap_id} ->
|
||||||
|
params =
|
||||||
|
params
|
||||||
|
|> Map.delete("account_id")
|
||||||
|
|> Map.put("account_ap_id", account_ap_id)
|
||||||
|
|
||||||
|
index(conn, params)
|
||||||
|
|
||||||
|
_ ->
|
||||||
|
conn
|
||||||
|
|> put_status(:not_found)
|
||||||
|
|> json(%{"error" => "Account is not found"})
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
def index(%{assigns: %{user: user}} = conn, params) do
|
def index(%{assigns: %{user: user}} = conn, params) do
|
||||||
notifications = MastodonAPI.get_notifications(user, params)
|
notifications = MastodonAPI.get_notifications(user, params)
|
||||||
|
|
||||||
|
|
|
@ -43,7 +43,7 @@ defp do_search(version, %{assigns: %{user: user}} = conn, %{"q" => query} = para
|
||||||
result =
|
result =
|
||||||
default_values
|
default_values
|
||||||
|> Enum.map(fn {resource, default_value} ->
|
|> Enum.map(fn {resource, default_value} ->
|
||||||
if params["type"] == nil or params["type"] == resource do
|
if params["type"] in [nil, resource] do
|
||||||
{resource, fn -> resource_search(version, resource, query, options) end}
|
{resource, fn -> resource_search(version, resource, query, options) end}
|
||||||
else
|
else
|
||||||
{resource, fn -> default_value end}
|
{resource, fn -> default_value end}
|
||||||
|
|
|
@ -346,15 +346,11 @@ def context(%{assigns: %{user: user}} = conn, %{"id" => id}) do
|
||||||
|
|
||||||
@doc "GET /api/v1/favourites"
|
@doc "GET /api/v1/favourites"
|
||||||
def favourites(%{assigns: %{user: user}} = conn, params) do
|
def favourites(%{assigns: %{user: user}} = conn, params) do
|
||||||
params =
|
|
||||||
params
|
|
||||||
|> Map.put("type", "Create")
|
|
||||||
|> Map.put("favorited_by", user.ap_id)
|
|
||||||
|> Map.put("blocking_user", user)
|
|
||||||
|
|
||||||
activities =
|
activities =
|
||||||
ActivityPub.fetch_activities([], params)
|
ActivityPub.fetch_favourites(
|
||||||
|> Enum.reverse()
|
user,
|
||||||
|
Map.take(params, Pleroma.Pagination.page_keys())
|
||||||
|
)
|
||||||
|
|
||||||
conn
|
conn
|
||||||
|> add_link_headers(activities)
|
|> add_link_headers(activities)
|
||||||
|
|
|
@ -6,9 +6,9 @@ defmodule Pleroma.Web.MastodonAPI.SubscriptionController do
|
||||||
@moduledoc "The module represents functions to manage user subscriptions."
|
@moduledoc "The module represents functions to manage user subscriptions."
|
||||||
use Pleroma.Web, :controller
|
use Pleroma.Web, :controller
|
||||||
|
|
||||||
|
alias Pleroma.Web.MastodonAPI.PushSubscriptionView, as: View
|
||||||
alias Pleroma.Web.Push
|
alias Pleroma.Web.Push
|
||||||
alias Pleroma.Web.Push.Subscription
|
alias Pleroma.Web.Push.Subscription
|
||||||
alias Pleroma.Web.MastodonAPI.PushSubscriptionView, as: View
|
|
||||||
|
|
||||||
action_fallback(:errors)
|
action_fallback(:errors)
|
||||||
|
|
||||||
|
|
|
@ -77,10 +77,7 @@ def public(%{assigns: %{user: user}} = conn, params) do
|
||||||
|> render("index.json", activities: activities, for: user, as: :activity)
|
|> render("index.json", activities: activities, for: user, as: :activity)
|
||||||
end
|
end
|
||||||
|
|
||||||
# GET /api/v1/timelines/tag/:tag
|
def hashtag_fetching(params, user, local_only) do
|
||||||
def hashtag(%{assigns: %{user: user}} = conn, params) do
|
|
||||||
local_only = truthy_param?(params["local"])
|
|
||||||
|
|
||||||
tags =
|
tags =
|
||||||
[params["tag"], params["any"]]
|
[params["tag"], params["any"]]
|
||||||
|> List.flatten()
|
|> List.flatten()
|
||||||
|
@ -98,7 +95,7 @@ def hashtag(%{assigns: %{user: user}} = conn, params) do
|
||||||
|> Map.get("none", [])
|
|> Map.get("none", [])
|
||||||
|> Enum.map(&String.downcase(&1))
|
|> Enum.map(&String.downcase(&1))
|
||||||
|
|
||||||
activities =
|
_activities =
|
||||||
params
|
params
|
||||||
|> Map.put("type", "Create")
|
|> Map.put("type", "Create")
|
||||||
|> Map.put("local_only", local_only)
|
|> Map.put("local_only", local_only)
|
||||||
|
@ -109,6 +106,13 @@ def hashtag(%{assigns: %{user: user}} = conn, params) do
|
||||||
|> Map.put("tag_all", tag_all)
|
|> Map.put("tag_all", tag_all)
|
||||||
|> Map.put("tag_reject", tag_reject)
|
|> Map.put("tag_reject", tag_reject)
|
||||||
|> ActivityPub.fetch_public_activities()
|
|> ActivityPub.fetch_public_activities()
|
||||||
|
end
|
||||||
|
|
||||||
|
# GET /api/v1/timelines/tag/:tag
|
||||||
|
def hashtag(%{assigns: %{user: user}} = conn, params) do
|
||||||
|
local_only = truthy_param?(params["local"])
|
||||||
|
|
||||||
|
activities = hashtag_fetching(params, user, local_only)
|
||||||
|
|
||||||
conn
|
conn
|
||||||
|> add_link_headers(activities, %{"local" => local_only})
|
|> add_link_headers(activities, %{"local" => local_only})
|
||||||
|
|
|
@ -56,6 +56,7 @@ def get_notifications(user, params \\ %{}) do
|
||||||
user
|
user
|
||||||
|> Notification.for_user_query(options)
|
|> Notification.for_user_query(options)
|
||||||
|> restrict(:exclude_types, options)
|
|> restrict(:exclude_types, options)
|
||||||
|
|> restrict(:account_ap_id, options)
|
||||||
|> Pagination.fetch_paginated(params)
|
|> Pagination.fetch_paginated(params)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -71,7 +72,8 @@ defp cast_params(params) do
|
||||||
exclude_visibilities: {:array, :string},
|
exclude_visibilities: {:array, :string},
|
||||||
reblogs: :boolean,
|
reblogs: :boolean,
|
||||||
with_muted: :boolean,
|
with_muted: :boolean,
|
||||||
with_move: :boolean
|
with_move: :boolean,
|
||||||
|
account_ap_id: :string
|
||||||
}
|
}
|
||||||
|
|
||||||
changeset = cast({%{}, param_types}, params, Map.keys(param_types))
|
changeset = cast({%{}, param_types}, params, Map.keys(param_types))
|
||||||
|
@ -88,5 +90,9 @@ defp restrict(query, :exclude_types, %{exclude_types: mastodon_types = [_ | _]})
|
||||||
|> where([q, a], not fragment("? @> ARRAY[?->>'type']::varchar[]", ^ap_types, a.data))
|
|> where([q, a], not fragment("? @> ARRAY[?->>'type']::varchar[]", ^ap_types, a.data))
|
||||||
end
|
end
|
||||||
|
|
||||||
|
defp restrict(query, :account_ap_id, %{account_ap_id: account_ap_id}) do
|
||||||
|
where(query, [n, a], a.actor == ^account_ap_id)
|
||||||
|
end
|
||||||
|
|
||||||
defp restrict(query, _, _), do: query
|
defp restrict(query, _, _), do: query
|
||||||
end
|
end
|
||||||
|
|
|
@ -86,7 +86,7 @@ defp do_render("show.json", %{user: user} = opts) do
|
||||||
0
|
0
|
||||||
end
|
end
|
||||||
|
|
||||||
bot = (user.source_data["type"] || "Person") in ["Application", "Service"]
|
bot = user.actor_type in ["Application", "Service"]
|
||||||
|
|
||||||
emojis =
|
emojis =
|
||||||
(user.source_data["tag"] || [])
|
(user.source_data["tag"] || [])
|
||||||
|
@ -137,7 +137,8 @@ defp do_render("show.json", %{user: user} = opts) do
|
||||||
sensitive: false,
|
sensitive: false,
|
||||||
fields: user.raw_fields,
|
fields: user.raw_fields,
|
||||||
pleroma: %{
|
pleroma: %{
|
||||||
discoverable: user.discoverable
|
discoverable: user.discoverable,
|
||||||
|
actor_type: user.actor_type
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
|
||||||
|
|
|
@ -7,10 +7,6 @@ defmodule Pleroma.Web.MastodonAPI.AppView do
|
||||||
|
|
||||||
alias Pleroma.Web.OAuth.App
|
alias Pleroma.Web.OAuth.App
|
||||||
|
|
||||||
@vapid_key :web_push_encryption
|
|
||||||
|> Application.get_env(:vapid_details, [])
|
|
||||||
|> Keyword.get(:public_key)
|
|
||||||
|
|
||||||
def render("show.json", %{app: %App{} = app}) do
|
def render("show.json", %{app: %App{} = app}) do
|
||||||
%{
|
%{
|
||||||
id: app.id |> to_string,
|
id: app.id |> to_string,
|
||||||
|
@ -32,8 +28,10 @@ def render("short.json", %{app: %App{website: webiste, client_name: name}}) do
|
||||||
end
|
end
|
||||||
|
|
||||||
defp with_vapid_key(data) do
|
defp with_vapid_key(data) do
|
||||||
if @vapid_key do
|
vapid_key = Application.get_env(:web_push_encryption, :vapid_details, [])[:public_key]
|
||||||
Map.put(data, "vapid_key", @vapid_key)
|
|
||||||
|
if vapid_key do
|
||||||
|
Map.put(data, "vapid_key", vapid_key)
|
||||||
else
|
else
|
||||||
data
|
data
|
||||||
end
|
end
|
||||||
|
|
|
@ -37,18 +37,37 @@ def render("show.json", %{
|
||||||
}
|
}
|
||||||
|
|
||||||
case mastodon_type do
|
case mastodon_type do
|
||||||
"mention" -> put_status(response, activity, user)
|
"mention" ->
|
||||||
"favourite" -> put_status(response, parent_activity, user)
|
put_status(response, activity, user)
|
||||||
"reblog" -> put_status(response, parent_activity, user)
|
|
||||||
"move" -> put_target(response, activity, user)
|
"favourite" ->
|
||||||
"follow" -> response
|
put_status(response, parent_activity, user)
|
||||||
_ -> nil
|
|
||||||
|
"reblog" ->
|
||||||
|
put_status(response, parent_activity, user)
|
||||||
|
|
||||||
|
"move" ->
|
||||||
|
put_target(response, activity, user)
|
||||||
|
|
||||||
|
"follow" ->
|
||||||
|
response
|
||||||
|
|
||||||
|
"pleroma:emoji_reaction" ->
|
||||||
|
put_status(response, parent_activity, user) |> put_emoji(activity)
|
||||||
|
|
||||||
|
_ ->
|
||||||
|
nil
|
||||||
end
|
end
|
||||||
else
|
else
|
||||||
_ -> nil
|
_ -> nil
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
defp put_emoji(response, activity) do
|
||||||
|
response
|
||||||
|
|> Map.put(:emoji, activity.data["content"])
|
||||||
|
end
|
||||||
|
|
||||||
defp put_status(response, activity, user) do
|
defp put_status(response, activity, user) do
|
||||||
Map.put(response, :status, StatusView.render("show.json", %{activity: activity, for: user}))
|
Map.put(response, :status, StatusView.render("show.json", %{activity: activity, for: user}))
|
||||||
end
|
end
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue