forked from AkkomaGang/akkoma
Resolve conflicts
This commit is contained in:
commit
657277ffc0
548 changed files with 7933 additions and 1976 deletions
103
.gitlab-ci.yml
103
.gitlab-ci.yml
|
@ -16,6 +16,7 @@ stages:
|
||||||
- build
|
- build
|
||||||
- test
|
- test
|
||||||
- deploy
|
- deploy
|
||||||
|
- release
|
||||||
|
|
||||||
before_script:
|
before_script:
|
||||||
- mix local.hex --force
|
- mix local.hex --force
|
||||||
|
@ -42,6 +43,7 @@ docs-build:
|
||||||
paths:
|
paths:
|
||||||
- priv/static/doc
|
- priv/static/doc
|
||||||
|
|
||||||
|
|
||||||
unit-testing:
|
unit-testing:
|
||||||
stage: test
|
stage: test
|
||||||
services:
|
services:
|
||||||
|
@ -140,3 +142,104 @@ stop_review_app:
|
||||||
- ssh-keyscan -H "pleroma.online" >> ~/.ssh/known_hosts
|
- ssh-keyscan -H "pleroma.online" >> ~/.ssh/known_hosts
|
||||||
- ssh -t dokku@pleroma.online -- --force apps:destroy "$CI_ENVIRONMENT_SLUG"
|
- ssh -t dokku@pleroma.online -- --force apps:destroy "$CI_ENVIRONMENT_SLUG"
|
||||||
- ssh -t dokku@pleroma.online -- --force postgres:destroy $(echo $CI_ENVIRONMENT_SLUG | sed -e 's/-/_/g')_db
|
- ssh -t dokku@pleroma.online -- --force postgres:destroy $(echo $CI_ENVIRONMENT_SLUG | sed -e 's/-/_/g')_db
|
||||||
|
|
||||||
|
amd64:
|
||||||
|
stage: release
|
||||||
|
# TODO: Replace with upstream image when 1.9.0 comes out
|
||||||
|
image: rinpatch/elixir:1.9.0-rc.0
|
||||||
|
only: &release-only
|
||||||
|
- master@pleroma/pleroma
|
||||||
|
- develop@pleroma/pleroma
|
||||||
|
artifacts: &release-artifacts
|
||||||
|
name: "pleroma-$CI_COMMIT_REF_NAME-$CI_COMMIT_SHORT_SHA-$CI_JOB_NAME"
|
||||||
|
paths:
|
||||||
|
- release/*
|
||||||
|
# Ideally it would be never for master branch and with the next commit for develop,
|
||||||
|
# but Gitlab does not support neither `only` for artifacts
|
||||||
|
# nor setting it to never from .gitlab-ci.yml
|
||||||
|
# nor expiring with the next commit
|
||||||
|
expire_in: 42 yrs
|
||||||
|
|
||||||
|
cache: &release-cache
|
||||||
|
key: $CI_COMMIT_REF_NAME-$CI_JOB_NAME
|
||||||
|
paths:
|
||||||
|
- deps
|
||||||
|
variables: &release-variables
|
||||||
|
MIX_ENV: prod
|
||||||
|
before_script: &before-release
|
||||||
|
- echo "import Mix.Config" > config/prod.secret.exs
|
||||||
|
- mix local.hex --force
|
||||||
|
- mix local.rebar --force
|
||||||
|
script: &release
|
||||||
|
- mix deps.get --only prod
|
||||||
|
- mkdir release
|
||||||
|
- export PLEROMA_BUILD_BRANCH=$CI_COMMIT_REF_NAME
|
||||||
|
- mix release --path release
|
||||||
|
|
||||||
|
|
||||||
|
amd64-musl:
|
||||||
|
stage: release
|
||||||
|
artifacts: *release-artifacts
|
||||||
|
only: *release-only
|
||||||
|
# TODO: Replace with upstream image when 1.9.0 comes out
|
||||||
|
image: rinpatch/elixir:1.9.0-rc.0-alpine
|
||||||
|
cache: *release-cache
|
||||||
|
variables: *release-variables
|
||||||
|
before_script: &before-release-musl
|
||||||
|
- apk add git gcc g++ musl-dev make
|
||||||
|
- echo "import Mix.Config" > config/prod.secret.exs
|
||||||
|
- mix local.hex --force
|
||||||
|
- mix local.rebar --force
|
||||||
|
script: *release
|
||||||
|
|
||||||
|
arm:
|
||||||
|
stage: release
|
||||||
|
artifacts: *release-artifacts
|
||||||
|
only: *release-only
|
||||||
|
tags:
|
||||||
|
- arm32
|
||||||
|
# TODO: Replace with upstream image when 1.9.0 comes out
|
||||||
|
image: rinpatch/elixir:1.9.0-rc.0-arm
|
||||||
|
cache: *release-cache
|
||||||
|
variables: *release-variables
|
||||||
|
before_script: *before-release
|
||||||
|
script: *release
|
||||||
|
|
||||||
|
arm-musl:
|
||||||
|
stage: release
|
||||||
|
artifacts: *release-artifacts
|
||||||
|
only: *release-only
|
||||||
|
tags:
|
||||||
|
- arm32
|
||||||
|
# TODO: Replace with upstream image when 1.9.0 comes out
|
||||||
|
image: rinpatch/elixir:1.9.0-rc.0-arm-alpine
|
||||||
|
cache: *release-cache
|
||||||
|
variables: *release-variables
|
||||||
|
before_script: *before-release-musl
|
||||||
|
script: *release
|
||||||
|
|
||||||
|
arm64:
|
||||||
|
stage: release
|
||||||
|
artifacts: *release-artifacts
|
||||||
|
only: *release-only
|
||||||
|
tags:
|
||||||
|
- arm
|
||||||
|
# TODO: Replace with upstream image when 1.9.0 comes out
|
||||||
|
image: rinpatch/elixir:1.9.0-rc.0-arm64
|
||||||
|
cache: *release-cache
|
||||||
|
variables: *release-variables
|
||||||
|
before_script: *before-release
|
||||||
|
script: *release
|
||||||
|
|
||||||
|
arm64-musl:
|
||||||
|
stage: release
|
||||||
|
artifacts: *release-artifacts
|
||||||
|
only: *release-only
|
||||||
|
tags:
|
||||||
|
- arm
|
||||||
|
# TODO: Replace with upstream image when 1.9.0 comes out
|
||||||
|
image: rinpatch/elixir:1.9.0-rc.0-arm64-alpine
|
||||||
|
cache: *release-cache
|
||||||
|
variables: *release-variables
|
||||||
|
before_script: *before-release-musl
|
||||||
|
script: *release
|
||||||
|
|
24
CHANGELOG.md
24
CHANGELOG.md
|
@ -3,7 +3,11 @@ All notable changes to this project will be documented in this file.
|
||||||
|
|
||||||
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
|
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
|
||||||
|
|
||||||
## [unreleased]
|
## [1.0.0] - 2019-06-29
|
||||||
|
### Security
|
||||||
|
- Mastodon API: Fix display names not being sanitized
|
||||||
|
- Rich media: Do not crawl private IP ranges
|
||||||
|
|
||||||
### Added
|
### Added
|
||||||
- Digest email for inactive users
|
- Digest email for inactive users
|
||||||
- Add a generic settings store for frontends / clients to use.
|
- Add a generic settings store for frontends / clients to use.
|
||||||
|
@ -12,6 +16,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
|
||||||
- [MongooseIM](https://github.com/esl/MongooseIM) http authentication support.
|
- [MongooseIM](https://github.com/esl/MongooseIM) http authentication support.
|
||||||
- LDAP authentication
|
- LDAP authentication
|
||||||
- External OAuth provider authentication
|
- External OAuth provider authentication
|
||||||
|
- Support for building a release using [`mix release`](https://hexdocs.pm/mix/master/Mix.Tasks.Release.html)
|
||||||
- A [job queue](https://git.pleroma.social/pleroma/pleroma_job_queue) for federation, emails, web push, etc.
|
- A [job queue](https://git.pleroma.social/pleroma/pleroma_job_queue) for federation, emails, web push, etc.
|
||||||
- [Prometheus](https://prometheus.io/) metrics
|
- [Prometheus](https://prometheus.io/) metrics
|
||||||
- Support for Mastodon's remote interaction
|
- Support for Mastodon's remote interaction
|
||||||
|
@ -19,9 +24,12 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
|
||||||
- Mix Tasks: `mix pleroma.database remove_embedded_objects`
|
- Mix Tasks: `mix pleroma.database remove_embedded_objects`
|
||||||
- Mix Tasks: `mix pleroma.database update_users_following_followers_counts`
|
- Mix Tasks: `mix pleroma.database update_users_following_followers_counts`
|
||||||
- Mix Tasks: `mix pleroma.user toggle_confirmed`
|
- Mix Tasks: `mix pleroma.user toggle_confirmed`
|
||||||
|
- Mix Tasks: `mix pleroma.config migrate_to_db`
|
||||||
|
- Mix Tasks: `mix pleroma.config migrate_from_db`
|
||||||
- Federation: Support for `Question` and `Answer` objects
|
- Federation: Support for `Question` and `Answer` objects
|
||||||
- Federation: Support for reports
|
- Federation: Support for reports
|
||||||
- Configuration: `poll_limits` option
|
- Configuration: `poll_limits` option
|
||||||
|
- Configuration: `pack_extensions` option
|
||||||
- Configuration: `safe_dm_mentions` option
|
- Configuration: `safe_dm_mentions` option
|
||||||
- Configuration: `link_name` option
|
- Configuration: `link_name` option
|
||||||
- Configuration: `fetch_initial_posts` option
|
- Configuration: `fetch_initial_posts` option
|
||||||
|
@ -29,7 +37,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
|
||||||
- Configuration: Media proxy `whitelist` option
|
- Configuration: Media proxy `whitelist` option
|
||||||
- Configuration: `report_uri` option
|
- Configuration: `report_uri` option
|
||||||
- Configuration: `email_notifications` option
|
- Configuration: `email_notifications` option
|
||||||
- Configuration: `limit_unauthenticated_to_local_content` option
|
- Configuration: `limit_to_local_content` option
|
||||||
- Pleroma API: User subscriptions
|
- Pleroma API: User subscriptions
|
||||||
- Pleroma API: Healthcheck endpoint
|
- Pleroma API: Healthcheck endpoint
|
||||||
- Pleroma API: `/api/v1/pleroma/mascot` per-user frontend mascot configuration endpoints
|
- Pleroma API: `/api/v1/pleroma/mascot` per-user frontend mascot configuration endpoints
|
||||||
|
@ -38,7 +46,10 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
|
||||||
- Admin API: added filters (role, tags, email, name) for users endpoint
|
- Admin API: added filters (role, tags, email, name) for users endpoint
|
||||||
- Admin API: Endpoints for managing reports
|
- Admin API: Endpoints for managing reports
|
||||||
- Admin API: Endpoints for deleting and changing the scope of individual reported statuses
|
- Admin API: Endpoints for deleting and changing the scope of individual reported statuses
|
||||||
|
- Admin API: Endpoints to view and change config settings.
|
||||||
- AdminFE: initial release with basic user management accessible at /pleroma/admin/
|
- AdminFE: initial release with basic user management accessible at /pleroma/admin/
|
||||||
|
- Mastodon API: Add chat token to `verify_credentials` response
|
||||||
|
- Mastodon API: Add background image setting to `update_credentials`
|
||||||
- Mastodon API: [Scheduled statuses](https://docs.joinmastodon.org/api/rest/scheduled-statuses/)
|
- Mastodon API: [Scheduled statuses](https://docs.joinmastodon.org/api/rest/scheduled-statuses/)
|
||||||
- Mastodon API: `/api/v1/notifications/destroy_multiple` (glitch-soc extension)
|
- Mastodon API: `/api/v1/notifications/destroy_multiple` (glitch-soc extension)
|
||||||
- Mastodon API: `/api/v1/pleroma/accounts/:id/favourites` (API extension)
|
- Mastodon API: `/api/v1/pleroma/accounts/:id/favourites` (API extension)
|
||||||
|
@ -55,9 +66,16 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
|
||||||
- MRF: Support for stripping avatars and banner images from specific instances (`mrf_simple`)
|
- MRF: Support for stripping avatars and banner images from specific instances (`mrf_simple`)
|
||||||
- MRF: Support for running subchains.
|
- MRF: Support for running subchains.
|
||||||
- Configuration: `skip_thread_containment` option
|
- Configuration: `skip_thread_containment` option
|
||||||
|
- Configuration: `rate_limit` option. See `Pleroma.Plugs.RateLimiter` documentation for details.
|
||||||
|
- MRF: Support for filtering out likely spam messages by rejecting posts from new users that contain links.
|
||||||
|
- Configuration: `ignore_hosts` option
|
||||||
|
- Configuration: `ignore_tld` option
|
||||||
|
- Configuration: default syslog tag "Pleroma" is now lowercased to "pleroma"
|
||||||
|
|
||||||
### Changed
|
### Changed
|
||||||
|
- **Breaking:** bind to 127.0.0.1 instead of 0.0.0.0 by default
|
||||||
- **Breaking:** Configuration: move from Pleroma.Mailer to Pleroma.Emails.Mailer
|
- **Breaking:** Configuration: move from Pleroma.Mailer to Pleroma.Emails.Mailer
|
||||||
|
- Thread containment / test for complete visibility will be skipped by default.
|
||||||
- Enforcement of OAuth scopes
|
- Enforcement of OAuth scopes
|
||||||
- Add multiple use/time expiring invite token
|
- Add multiple use/time expiring invite token
|
||||||
- Restyled OAuth pages to fit with Pleroma's default theme
|
- Restyled OAuth pages to fit with Pleroma's default theme
|
||||||
|
@ -66,6 +84,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
|
||||||
- Federation: Expand the audience of delete activities to all recipients of the deleted object
|
- Federation: Expand the audience of delete activities to all recipients of the deleted object
|
||||||
- Federation: Removed `inReplyToStatusId` from objects
|
- Federation: Removed `inReplyToStatusId` from objects
|
||||||
- Configuration: Dedupe enabled by default
|
- Configuration: Dedupe enabled by default
|
||||||
|
- Configuration: Default log level in `prod` environment is now set to `warn`
|
||||||
- Configuration: Added `extra_cookie_attrs` for setting non-standard cookie attributes. Defaults to ["SameSite=Lax"] so that remote follows work.
|
- Configuration: Added `extra_cookie_attrs` for setting non-standard cookie attributes. Defaults to ["SameSite=Lax"] so that remote follows work.
|
||||||
- Timelines: Messages involving people you have blocked will be excluded from the timeline in all cases instead of just repeats.
|
- Timelines: Messages involving people you have blocked will be excluded from the timeline in all cases instead of just repeats.
|
||||||
- Admin API: Move the user related API to `api/pleroma/admin/users`
|
- Admin API: Move the user related API to `api/pleroma/admin/users`
|
||||||
|
@ -91,6 +110,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
|
||||||
- Posts which are marked sensitive or tagged nsfw no longer have link previews.
|
- Posts which are marked sensitive or tagged nsfw no longer have link previews.
|
||||||
- HTTP connection timeout is now set to 10 seconds.
|
- HTTP connection timeout is now set to 10 seconds.
|
||||||
- Respond with a 404 Not implemented JSON error message when requested API is not implemented
|
- Respond with a 404 Not implemented JSON error message when requested API is not implemented
|
||||||
|
- Rich Media: crawl only https URLs.
|
||||||
|
|
||||||
### Fixed
|
### Fixed
|
||||||
- Follow requests don't get 'stuck' anymore.
|
- Follow requests don't get 'stuck' anymore.
|
||||||
|
|
|
@ -15,9 +15,12 @@ For clients it supports both the [GNU Social API with Qvitter extensions](https:
|
||||||
If you want to run your own server, feel free to contact us at @lain@pleroma.soykaf.com or in our dev chat at #pleroma on freenode or via matrix at <https://matrix.heldscal.la/#/room/#freenode_#pleroma:matrix.org>.
|
If you want to run your own server, feel free to contact us at @lain@pleroma.soykaf.com or in our dev chat at #pleroma on freenode or via matrix at <https://matrix.heldscal.la/#/room/#freenode_#pleroma:matrix.org>.
|
||||||
|
|
||||||
## Installation
|
## Installation
|
||||||
|
**Note:** The guide below may be outdated and in most cases shouldn't be used. Instead check out our [wiki](https://docs.pleroma.social) for platform-specific installation instructions, most likely [Installing on Linux using OTP releases](https://docs.pleroma.social/otp_en.html) is the guide you need.
|
||||||
|
|
||||||
|
### OS/Distro packages
|
||||||
|
Currently Pleroma is not packaged by any OS/Distros, but feel free to reach out to us at [#pleroma-dev on freenode](https://webchat.freenode.net/?channels=%23pleroma-dev) or via matrix at <https://matrix.heldscal.la/#/room/#freenode_#pleroma-dev:matrix.org> for assistance. If you want to change default options in your Pleroma package, please **discuss it with us first**.
|
||||||
|
|
||||||
### Docker
|
### Docker
|
||||||
|
|
||||||
While we don’t provide docker files, other people have written very good ones. Take a look at <https://github.com/angristan/docker-pleroma> or <https://github.com/sn0w/pleroma-docker>.
|
While we don’t provide docker files, other people have written very good ones. Take a look at <https://github.com/angristan/docker-pleroma> or <https://github.com/sn0w/pleroma-docker>.
|
||||||
|
|
||||||
### Dependencies
|
### Dependencies
|
||||||
|
|
|
@ -99,6 +99,7 @@
|
||||||
|
|
||||||
config :pleroma, :emoji,
|
config :pleroma, :emoji,
|
||||||
shortcode_globs: ["/emoji/custom/**/*.png"],
|
shortcode_globs: ["/emoji/custom/**/*.png"],
|
||||||
|
pack_extensions: [".png", ".gif"],
|
||||||
groups: [
|
groups: [
|
||||||
# Put groups that have higher priority than defaults here. Example in `docs/config/custom_emoji.md`
|
# Put groups that have higher priority than defaults here. Example in `docs/config/custom_emoji.md`
|
||||||
Custom: ["/emoji/*.png", "/emoji/**/*.png"]
|
Custom: ["/emoji/*.png", "/emoji/**/*.png"]
|
||||||
|
@ -139,6 +140,7 @@
|
||||||
instrumenters: [Pleroma.Web.Endpoint.Instrumenter],
|
instrumenters: [Pleroma.Web.Endpoint.Instrumenter],
|
||||||
url: [host: "localhost"],
|
url: [host: "localhost"],
|
||||||
http: [
|
http: [
|
||||||
|
ip: {127, 0, 0, 1},
|
||||||
dispatch: [
|
dispatch: [
|
||||||
{:_,
|
{:_,
|
||||||
[
|
[
|
||||||
|
@ -167,7 +169,7 @@
|
||||||
|
|
||||||
config :logger, :ex_syslogger,
|
config :logger, :ex_syslogger,
|
||||||
level: :debug,
|
level: :debug,
|
||||||
ident: "Pleroma",
|
ident: "pleroma",
|
||||||
format: "$metadata[$level] $message",
|
format: "$metadata[$level] $message",
|
||||||
metadata: [:request_id]
|
metadata: [:request_id]
|
||||||
|
|
||||||
|
@ -244,10 +246,9 @@
|
||||||
safe_dm_mentions: false,
|
safe_dm_mentions: false,
|
||||||
healthcheck: false,
|
healthcheck: false,
|
||||||
remote_post_retention_days: 90,
|
remote_post_retention_days: 90,
|
||||||
skip_thread_containment: false,
|
skip_thread_containment: true,
|
||||||
limit_unauthenticated_to_local_content: true
|
limit_to_local_content: :unauthenticated,
|
||||||
|
dynamic_configuration: false
|
||||||
config :pleroma, :app_account_creation, enabled: true, max_requests: 25, interval: 1800
|
|
||||||
|
|
||||||
config :pleroma, :markup,
|
config :pleroma, :markup,
|
||||||
# XXX - unfortunately, inline images must be enabled by default right now, because
|
# XXX - unfortunately, inline images must be enabled by default right now, because
|
||||||
|
@ -330,7 +331,10 @@
|
||||||
|
|
||||||
config :pleroma, :mrf_subchain, match_actor: %{}
|
config :pleroma, :mrf_subchain, match_actor: %{}
|
||||||
|
|
||||||
config :pleroma, :rich_media, enabled: true
|
config :pleroma, :rich_media,
|
||||||
|
enabled: true,
|
||||||
|
ignore_hosts: [],
|
||||||
|
ignore_tld: ["local", "localdomain", "lan"]
|
||||||
|
|
||||||
config :pleroma, :media_proxy,
|
config :pleroma, :media_proxy,
|
||||||
enabled: false,
|
enabled: false,
|
||||||
|
@ -362,8 +366,8 @@
|
||||||
third_party_engine:
|
third_party_engine:
|
||||||
"http://vinayaka.distsn.org/cgi-bin/vinayaka-user-match-suggestions-api.cgi?{{host}}+{{user}}",
|
"http://vinayaka.distsn.org/cgi-bin/vinayaka-user-match-suggestions-api.cgi?{{host}}+{{user}}",
|
||||||
timeout: 300_000,
|
timeout: 300_000,
|
||||||
limit: 23,
|
limit: 40,
|
||||||
web: "https://vinayaka.distsn.org/?{{host}}+{{user}}"
|
web: "https://vinayaka.distsn.org"
|
||||||
|
|
||||||
config :pleroma, :http_security,
|
config :pleroma, :http_security,
|
||||||
enabled: true,
|
enabled: true,
|
||||||
|
@ -443,6 +447,8 @@
|
||||||
opts: [
|
opts: [
|
||||||
scheme: true,
|
scheme: true,
|
||||||
extra: true,
|
extra: true,
|
||||||
|
# TODO: Set to :no_scheme when it works properly
|
||||||
|
validate_tld: true,
|
||||||
class: false,
|
class: false,
|
||||||
strip_prefix: false,
|
strip_prefix: false,
|
||||||
new_window: false,
|
new_window: false,
|
||||||
|
@ -508,9 +514,15 @@
|
||||||
|
|
||||||
config :pleroma, :database, rum_enabled: false
|
config :pleroma, :database, rum_enabled: false
|
||||||
|
|
||||||
|
config :pleroma, :env, Mix.env()
|
||||||
|
|
||||||
config :http_signatures,
|
config :http_signatures,
|
||||||
adapter: Pleroma.Signature
|
adapter: Pleroma.Signature
|
||||||
|
|
||||||
|
config :pleroma, :rate_limit,
|
||||||
|
search: [{1000, 10}, {1000, 30}],
|
||||||
|
app_account_creation: {1_800_000, 25}
|
||||||
|
|
||||||
# Import environment specific config. This must remain at the bottom
|
# Import environment specific config. This must remain at the bottom
|
||||||
# of this file so it overrides the configuration defined above.
|
# of this file so it overrides the configuration defined above.
|
||||||
import_config "#{Mix.env()}.exs"
|
import_config "#{Mix.env()}.exs"
|
||||||
|
|
|
@ -59,3 +59,6 @@
|
||||||
"!!! RUNNING IN LOCALHOST DEV MODE! !!!\nFEDERATION WON'T WORK UNTIL YOU CONFIGURE A dev.secret.exs"
|
"!!! RUNNING IN LOCALHOST DEV MODE! !!!\nFEDERATION WON'T WORK UNTIL YOU CONFIGURE A dev.secret.exs"
|
||||||
)
|
)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
if File.exists?("./config/dev.exported_from_db.secret.exs"),
|
||||||
|
do: import_config("dev.exported_from_db.secret.exs")
|
||||||
|
|
|
@ -17,8 +17,10 @@
|
||||||
http: [port: 4000],
|
http: [port: 4000],
|
||||||
protocol: "http"
|
protocol: "http"
|
||||||
|
|
||||||
|
config :phoenix, serve_endpoints: true
|
||||||
|
|
||||||
# Do not print debug messages in production
|
# Do not print debug messages in production
|
||||||
config :logger, level: :info
|
config :logger, level: :warn
|
||||||
|
|
||||||
# ## SSL Support
|
# ## SSL Support
|
||||||
#
|
#
|
||||||
|
@ -61,3 +63,6 @@
|
||||||
# Finally import the config/prod.secret.exs
|
# Finally import the config/prod.secret.exs
|
||||||
# which should be versioned separately.
|
# which should be versioned separately.
|
||||||
import_config "prod.secret.exs"
|
import_config "prod.secret.exs"
|
||||||
|
|
||||||
|
if File.exists?("./config/prod.exported_from_db.secret.exs"),
|
||||||
|
do: import_config("prod.exported_from_db.secret.exs")
|
||||||
|
|
19
config/releases.exs
Normal file
19
config/releases.exs
Normal file
|
@ -0,0 +1,19 @@
|
||||||
|
import Config
|
||||||
|
|
||||||
|
config :pleroma, :instance, static_dir: "/var/lib/pleroma/static"
|
||||||
|
config :pleroma, Pleroma.Uploaders.Local, uploads: "/var/lib/pleroma/uploads"
|
||||||
|
|
||||||
|
config_path = System.get_env("PLEROMA_CONFIG_PATH") || "/etc/pleroma/config.exs"
|
||||||
|
|
||||||
|
if File.exists?(config_path) do
|
||||||
|
import_config config_path
|
||||||
|
else
|
||||||
|
warning = [
|
||||||
|
IO.ANSI.red(),
|
||||||
|
IO.ANSI.bright(),
|
||||||
|
"!!! #{config_path} not found! Please ensure it exists and that PLEROMA_CONFIG_PATH is unset or points to an existing file",
|
||||||
|
IO.ANSI.reset()
|
||||||
|
]
|
||||||
|
|
||||||
|
IO.puts(warning)
|
||||||
|
end
|
|
@ -27,7 +27,8 @@
|
||||||
|
|
||||||
config :pleroma, :instance,
|
config :pleroma, :instance,
|
||||||
email: "admin@example.com",
|
email: "admin@example.com",
|
||||||
notify_email: "noreply@example.com"
|
notify_email: "noreply@example.com",
|
||||||
|
skip_thread_containment: false
|
||||||
|
|
||||||
# Configure your database
|
# Configure your database
|
||||||
config :pleroma, Pleroma.Repo,
|
config :pleroma, Pleroma.Repo,
|
||||||
|
@ -42,7 +43,11 @@
|
||||||
config :pbkdf2_elixir, rounds: 1
|
config :pbkdf2_elixir, rounds: 1
|
||||||
|
|
||||||
config :tesla, adapter: Tesla.Mock
|
config :tesla, adapter: Tesla.Mock
|
||||||
config :pleroma, :rich_media, enabled: false
|
|
||||||
|
config :pleroma, :rich_media,
|
||||||
|
enabled: false,
|
||||||
|
ignore_hosts: [],
|
||||||
|
ignore_tld: ["local", "localdomain", "lan"]
|
||||||
|
|
||||||
config :web_push_encryption, :vapid_details,
|
config :web_push_encryption, :vapid_details,
|
||||||
subject: "mailto:administrator@example.com",
|
subject: "mailto:administrator@example.com",
|
||||||
|
@ -59,7 +64,7 @@
|
||||||
total_user_limit: 3,
|
total_user_limit: 3,
|
||||||
enabled: false
|
enabled: false
|
||||||
|
|
||||||
config :pleroma, :app_account_creation, max_requests: 5
|
config :pleroma, :rate_limit, app_account_creation: {10_000, 5}
|
||||||
|
|
||||||
config :pleroma, :http_security, report_uri: "https://endpoint.com"
|
config :pleroma, :http_security, report_uri: "https://endpoint.com"
|
||||||
|
|
||||||
|
|
|
@ -289,7 +289,7 @@ Note: Available `:permission_group` is currently moderator and admin. 404 is ret
|
||||||
- `limit`: optional, the number of records to retrieve
|
- `limit`: optional, the number of records to retrieve
|
||||||
- `since_id`: optional, returns results that are more recent than the specified id
|
- `since_id`: optional, returns results that are more recent than the specified id
|
||||||
- `max_id`: optional, returns results that are older than the specified id
|
- `max_id`: optional, returns results that are older than the specified id
|
||||||
- Response:
|
- Response:
|
||||||
- On failure: 403 Forbidden error `{"error": "error_msg"}` when requested by anonymous or non-admin
|
- On failure: 403 Forbidden error `{"error": "error_msg"}` when requested by anonymous or non-admin
|
||||||
- On success: JSON, returns a list of reports, where:
|
- On success: JSON, returns a list of reports, where:
|
||||||
- `account`: the user who has been reported
|
- `account`: the user who has been reported
|
||||||
|
@ -443,7 +443,7 @@ Note: Available `:permission_group` is currently moderator and admin. 404 is ret
|
||||||
- Params:
|
- Params:
|
||||||
- `id`
|
- `id`
|
||||||
- Response:
|
- Response:
|
||||||
- On failure:
|
- On failure:
|
||||||
- 403 Forbidden `{"error": "error_msg"}`
|
- 403 Forbidden `{"error": "error_msg"}`
|
||||||
- 404 Not Found `"Not found"`
|
- 404 Not Found `"Not found"`
|
||||||
- On success: JSON, Report object (see above)
|
- On success: JSON, Report object (see above)
|
||||||
|
@ -454,8 +454,8 @@ Note: Available `:permission_group` is currently moderator and admin. 404 is ret
|
||||||
- Params:
|
- Params:
|
||||||
- `id`
|
- `id`
|
||||||
- `state`: required, the new state. Valid values are `open`, `closed` and `resolved`
|
- `state`: required, the new state. Valid values are `open`, `closed` and `resolved`
|
||||||
- Response:
|
- Response:
|
||||||
- On failure:
|
- On failure:
|
||||||
- 400 Bad Request `"Unsupported state"`
|
- 400 Bad Request `"Unsupported state"`
|
||||||
- 403 Forbidden `{"error": "error_msg"}`
|
- 403 Forbidden `{"error": "error_msg"}`
|
||||||
- 404 Not Found `"Not found"`
|
- 404 Not Found `"Not found"`
|
||||||
|
@ -467,10 +467,10 @@ Note: Available `:permission_group` is currently moderator and admin. 404 is ret
|
||||||
- Params:
|
- Params:
|
||||||
- `id`
|
- `id`
|
||||||
- `status`: required, the message
|
- `status`: required, the message
|
||||||
- Response:
|
- Response:
|
||||||
- On failure:
|
- On failure:
|
||||||
- 400 Bad Request `"Invalid parameters"` when `status` is missing
|
- 400 Bad Request `"Invalid parameters"` when `status` is missing
|
||||||
- 403 Forbidden `{"error": "error_msg"}`
|
- 403 Forbidden `{"error": "error_msg"}`
|
||||||
- 404 Not Found `"Not found"`
|
- 404 Not Found `"Not found"`
|
||||||
- On success: JSON, created Mastodon Status entity
|
- On success: JSON, created Mastodon Status entity
|
||||||
|
|
||||||
|
@ -540,10 +540,10 @@ Note: Available `:permission_group` is currently moderator and admin. 404 is ret
|
||||||
- `id`
|
- `id`
|
||||||
- `sensitive`: optional, valid values are `true` or `false`
|
- `sensitive`: optional, valid values are `true` or `false`
|
||||||
- `visibility`: optional, valid values are `public`, `private` and `unlisted`
|
- `visibility`: optional, valid values are `public`, `private` and `unlisted`
|
||||||
- Response:
|
- Response:
|
||||||
- On failure:
|
- On failure:
|
||||||
- 400 Bad Request `"Unsupported visibility"`
|
- 400 Bad Request `"Unsupported visibility"`
|
||||||
- 403 Forbidden `{"error": "error_msg"}`
|
- 403 Forbidden `{"error": "error_msg"}`
|
||||||
- 404 Not Found `"Not found"`
|
- 404 Not Found `"Not found"`
|
||||||
- On success: JSON, Mastodon Status entity
|
- On success: JSON, Mastodon Status entity
|
||||||
|
|
||||||
|
@ -552,8 +552,97 @@ Note: Available `:permission_group` is currently moderator and admin. 404 is ret
|
||||||
- Method `DELETE`
|
- Method `DELETE`
|
||||||
- Params:
|
- Params:
|
||||||
- `id`
|
- `id`
|
||||||
- Response:
|
- Response:
|
||||||
- On failure:
|
- On failure:
|
||||||
- 403 Forbidden `{"error": "error_msg"}`
|
- 403 Forbidden `{"error": "error_msg"}`
|
||||||
- 404 Not Found `"Not found"`
|
- 404 Not Found `"Not found"`
|
||||||
- On success: 200 OK `{}`
|
- On success: 200 OK `{}`
|
||||||
|
|
||||||
|
## `/api/pleroma/admin/config`
|
||||||
|
### List config settings
|
||||||
|
- Method `GET`
|
||||||
|
- Params: none
|
||||||
|
- Response:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
configs: [
|
||||||
|
{
|
||||||
|
"group": string,
|
||||||
|
"key": string,
|
||||||
|
"value": string or {} or [] or {"tuple": []}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## `/api/pleroma/admin/config`
|
||||||
|
### Update config settings
|
||||||
|
Module name can be passed as string, which starts with `Pleroma`, e.g. `"Pleroma.Upload"`.
|
||||||
|
Atom or boolean value can be passed with `:` in the beginning, e.g. `":true"`, `":upload"`. For keys it is not needed.
|
||||||
|
Integer with `i:`, e.g. `"i:150"`.
|
||||||
|
Tuple with more than 2 values with `{"tuple": ["first_val", Pleroma.Module, []]}`.
|
||||||
|
`{"tuple": ["some_string", "Pleroma.Some.Module", []]}` will be converted to `{"some_string", Pleroma.Some.Module, []}`.
|
||||||
|
|
||||||
|
Compile time settings (need instance reboot):
|
||||||
|
- all settings by this keys:
|
||||||
|
- `:hackney_pools`
|
||||||
|
- `:chat`
|
||||||
|
- `Pleroma.Web.Endpoint`
|
||||||
|
- `Pleroma.Repo`
|
||||||
|
- part settings:
|
||||||
|
- `Pleroma.Captcha` -> `:seconds_valid`
|
||||||
|
- `Pleroma.Upload` -> `:proxy_remote`
|
||||||
|
- `:instance` -> `:upload_limit`
|
||||||
|
|
||||||
|
- Method `POST`
|
||||||
|
- Params:
|
||||||
|
- `configs` => [
|
||||||
|
- `group` (string)
|
||||||
|
- `key` (string)
|
||||||
|
- `value` (string, [], {} or {"tuple": []})
|
||||||
|
- `delete` = true (optional, if parameter must be deleted)
|
||||||
|
]
|
||||||
|
|
||||||
|
- Request (example):
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
configs: [
|
||||||
|
{
|
||||||
|
"group": "pleroma",
|
||||||
|
"key": "Pleroma.Upload",
|
||||||
|
"value": {
|
||||||
|
"uploader": "Pleroma.Uploaders.Local",
|
||||||
|
"filters": ["Pleroma.Upload.Filter.Dedupe"],
|
||||||
|
"link_name": ":true",
|
||||||
|
"proxy_remote": ":false",
|
||||||
|
"proxy_opts": {
|
||||||
|
"redirect_on_failure": ":false",
|
||||||
|
"max_body_length": "i:1048576",
|
||||||
|
"http": {
|
||||||
|
"follow_redirect": ":true",
|
||||||
|
"pool": ":upload"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"dispatch": {
|
||||||
|
"tuple": ["/api/v1/streaming", "Pleroma.Web.MastodonAPI.WebsocketHandler", []]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
- Response:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
configs: [
|
||||||
|
{
|
||||||
|
"group": string,
|
||||||
|
"key": string,
|
||||||
|
"value": string or {} or [] or {"tuple": []}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
|
@ -44,6 +44,7 @@ Has these additional fields under the `pleroma` object:
|
||||||
- `hide_followers`: boolean, true when the user has follower hiding enabled
|
- `hide_followers`: boolean, true when the user has follower hiding enabled
|
||||||
- `hide_follows`: boolean, true when the user has follow hiding enabled
|
- `hide_follows`: boolean, true when the user has follow hiding enabled
|
||||||
- `settings_store`: A generic map of settings for frontends. Opaque to the backend. Only returned in `verify_credentials` and `update_credentials`
|
- `settings_store`: A generic map of settings for frontends. Opaque to the backend. Only returned in `verify_credentials` and `update_credentials`
|
||||||
|
- `chat_token`: The token needed for Pleroma chat. Only returned in `verify_credentials`
|
||||||
|
|
||||||
### Source
|
### Source
|
||||||
|
|
||||||
|
@ -84,6 +85,7 @@ Additional parameters can be added to the JSON body/Form data:
|
||||||
- `default_scope` - the scope returned under `privacy` key in Source subentity
|
- `default_scope` - the scope returned under `privacy` key in Source subentity
|
||||||
- `pleroma_settings_store` - Opaque user settings to be saved on the backend.
|
- `pleroma_settings_store` - Opaque user settings to be saved on the backend.
|
||||||
- `skip_thread_containment` - if true, skip filtering out broken threads
|
- `skip_thread_containment` - if true, skip filtering out broken threads
|
||||||
|
- `pleroma_background_image` - sets the background image of the user.
|
||||||
|
|
||||||
### Pleroma Settings Store
|
### Pleroma Settings Store
|
||||||
Pleroma has mechanism that allows frontends to save blobs of json for each user on the backend. This can be used to save frontend-specific settings for a user that the backend does not need to know about.
|
Pleroma has mechanism that allows frontends to save blobs of json for each user on the backend. This can be used to save frontend-specific settings for a user that the backend does not need to know about.
|
||||||
|
|
|
@ -49,13 +49,6 @@ Feel free to contact us to be added to this list!
|
||||||
- Platforms: iOS, Android
|
- Platforms: iOS, Android
|
||||||
- Features: No Streaming
|
- Features: No Streaming
|
||||||
|
|
||||||
### Tootdon
|
|
||||||
- Homepage: <http://tootdon.club/>, <http://blog.mastodon-tootdon.com/>
|
|
||||||
- Source Code: ???
|
|
||||||
- Contact: [@tootdon@mstdn.jp](https://mstdn.jp/users/tootdon)
|
|
||||||
- Platforms: Android, iOS
|
|
||||||
- Features: No Streaming
|
|
||||||
|
|
||||||
### Tusky
|
### Tusky
|
||||||
- Homepage: <https://tuskyapp.github.io/>
|
- Homepage: <https://tuskyapp.github.io/>
|
||||||
- Source Code: <https://github.com/tuskyapp/Tusky>
|
- Source Code: <https://github.com/tuskyapp/Tusky>
|
||||||
|
|
|
@ -16,6 +16,13 @@ Note: `strip_exif` has been replaced by `Pleroma.Upload.Filter.Mogrify`.
|
||||||
## Pleroma.Uploaders.Local
|
## Pleroma.Uploaders.Local
|
||||||
* `uploads`: Which directory to store the user-uploads in, relative to pleroma’s working directory
|
* `uploads`: Which directory to store the user-uploads in, relative to pleroma’s working directory
|
||||||
|
|
||||||
|
## Pleroma.Uploaders.S3
|
||||||
|
* `bucket`: S3 bucket name
|
||||||
|
* `public_endpoint`: S3 endpoint that the user finally accesses(ex. "https://s3.dualstack.ap-northeast-1.amazonaws.com")
|
||||||
|
* `truncated_namespace`: If you use S3 compatible service such as Digital Ocean Spaces or CDN, set folder name or "" etc.
|
||||||
|
For example, when using CDN to S3 virtual host format, set "".
|
||||||
|
At this time, write CNAME to CDN in public_endpoint.
|
||||||
|
|
||||||
## Pleroma.Upload.Filter.Mogrify
|
## Pleroma.Upload.Filter.Mogrify
|
||||||
|
|
||||||
* `args`: List of actions for the `mogrify` command like `"strip"` or `["strip", "auto-orient", {"impode", "1"}]`.
|
* `args`: List of actions for the `mogrify` command like `"strip"` or `["strip", "auto-orient", {"impode", "1"}]`.
|
||||||
|
@ -86,9 +93,11 @@ config :pleroma, Pleroma.Emails.Mailer,
|
||||||
* `Pleroma.Web.ActivityPub.MRF.NoOpPolicy`: Doesn’t modify activities (default)
|
* `Pleroma.Web.ActivityPub.MRF.NoOpPolicy`: Doesn’t modify activities (default)
|
||||||
* `Pleroma.Web.ActivityPub.MRF.DropPolicy`: Drops all activities. It generally doesn’t makes sense to use in production
|
* `Pleroma.Web.ActivityPub.MRF.DropPolicy`: Drops all activities. It generally doesn’t makes sense to use in production
|
||||||
* `Pleroma.Web.ActivityPub.MRF.SimplePolicy`: Restrict the visibility of activities from certains instances (See ``:mrf_simple`` section)
|
* `Pleroma.Web.ActivityPub.MRF.SimplePolicy`: Restrict the visibility of activities from certains instances (See ``:mrf_simple`` section)
|
||||||
|
* `Pleroma.Web.ActivityPub.MRF.TagPolicy`: Applies policies to individual users based on tags, which can be set using pleroma-fe/admin-fe/any other app that supports Pleroma Admin API. For example it allows marking posts from individual users nsfw (sensitive)
|
||||||
* `Pleroma.Web.ActivityPub.MRF.SubchainPolicy`: Selectively runs other MRF policies when messages match (see ``:mrf_subchain`` section)
|
* `Pleroma.Web.ActivityPub.MRF.SubchainPolicy`: Selectively runs other MRF policies when messages match (see ``:mrf_subchain`` section)
|
||||||
* `Pleroma.Web.ActivityPub.MRF.RejectNonPublic`: Drops posts with non-public visibility settings (See ``:mrf_rejectnonpublic`` section)
|
* `Pleroma.Web.ActivityPub.MRF.RejectNonPublic`: Drops posts with non-public visibility settings (See ``:mrf_rejectnonpublic`` section)
|
||||||
* `Pleroma.Web.ActivityPub.MRF.EnsureRePrepended`: Rewrites posts to ensure that replies to posts with subjects do not have an identical subject and instead begin with re:.
|
* `Pleroma.Web.ActivityPub.MRF.EnsureRePrepended`: Rewrites posts to ensure that replies to posts with subjects do not have an identical subject and instead begin with re:.
|
||||||
|
* `Pleroma.Web.ActivityPub.MRF.AntiLinkSpamPolicy`: Rejects posts from likely spambots by rejecting posts from new users that contain links.
|
||||||
* `public`: Makes the client API in authentificated mode-only except for user-profiles. Useful for disabling the Local Timeline and The Whole Known Network.
|
* `public`: Makes the client API in authentificated mode-only except for user-profiles. Useful for disabling the Local Timeline and The Whole Known Network.
|
||||||
* `quarantined_instances`: List of ActivityPub instances where private(DMs, followers-only) activities will not be send.
|
* `quarantined_instances`: List of ActivityPub instances where private(DMs, followers-only) activities will not be send.
|
||||||
* `managed_config`: Whenether the config for pleroma-fe is configured in this config or in ``static/config.json``
|
* `managed_config`: Whenether the config for pleroma-fe is configured in this config or in ``static/config.json``
|
||||||
|
@ -112,13 +121,9 @@ config :pleroma, Pleroma.Emails.Mailer,
|
||||||
* `healthcheck`: If set to true, system data will be shown on ``/api/pleroma/healthcheck``.
|
* `healthcheck`: If set to true, system data will be shown on ``/api/pleroma/healthcheck``.
|
||||||
* `remote_post_retention_days`: The default amount of days to retain remote posts when pruning the database.
|
* `remote_post_retention_days`: The default amount of days to retain remote posts when pruning the database.
|
||||||
* `skip_thread_containment`: Skip filter out broken threads. The default is `false`.
|
* `skip_thread_containment`: Skip filter out broken threads. The default is `false`.
|
||||||
* `limit_unauthenticated_to_local_content`: Limit unauthenticated users to search for local statutes and users only. The default is `true`.
|
* `limit_to_local_content`: Limit unauthenticated users to search for local statutes and users only. Possible values: `:unauthenticated`, `:all` and `false`. The default is `:unauthenticated`.
|
||||||
|
* `dynamic_configuration`: Allow transferring configuration to DB with the subsequent customization from Admin api.
|
||||||
|
|
||||||
## :app_account_creation
|
|
||||||
REST API for creating an account settings
|
|
||||||
* `enabled`: Enable/disable registration
|
|
||||||
* `max_requests`: Number of requests allowed for creating accounts
|
|
||||||
* `interval`: Interval for restricting requests for one ip (seconds)
|
|
||||||
|
|
||||||
## :logger
|
## :logger
|
||||||
* `backends`: `:console` is used to send logs to stdout, `{ExSyslogger, :ex_syslogger}` to log to syslog, and `Quack.Logger` to log to Slack
|
* `backends`: `:console` is used to send logs to stdout, `{ExSyslogger, :ex_syslogger}` to log to syslog, and `Quack.Logger` to log to Slack
|
||||||
|
@ -412,6 +417,8 @@ This config contains two queues: `federator_incoming` and `federator_outgoing`.
|
||||||
|
|
||||||
## :rich_media
|
## :rich_media
|
||||||
* `enabled`: if enabled the instance will parse metadata from attached links to generate link previews
|
* `enabled`: if enabled the instance will parse metadata from attached links to generate link previews
|
||||||
|
* `ignore_hosts`: list of hosts which will be ignored by the metadata parser. For example `["accounts.google.com", "xss.website"]`, defaults to `[]`.
|
||||||
|
* `ignore_tld`: list TLDs (top-level domains) which will ignore for parse metadata. default is ["local", "localdomain", "lan"]
|
||||||
|
|
||||||
## :fetch_initial_posts
|
## :fetch_initial_posts
|
||||||
* `enabled`: if enabled, when a new user is federated with, fetch some of their latest posts
|
* `enabled`: if enabled, when a new user is federated with, fetch some of their latest posts
|
||||||
|
@ -580,7 +587,7 @@ config :ueberauth, Ueberauth,
|
||||||
providers: [
|
providers: [
|
||||||
microsoft: {Ueberauth.Strategy.Microsoft, [callback_params: []]}
|
microsoft: {Ueberauth.Strategy.Microsoft, [callback_params: []]}
|
||||||
]
|
]
|
||||||
|
|
||||||
# Keycloak
|
# Keycloak
|
||||||
# Note: make sure to add `keycloak:ueberauth_keycloak_strategy` entry to `OAUTH_CONSUMER_STRATEGIES` environment variable
|
# Note: make sure to add `keycloak:ueberauth_keycloak_strategy` entry to `OAUTH_CONSUMER_STRATEGIES` environment variable
|
||||||
keycloak_url = "https://publicly-reachable-keycloak-instance.org:8080"
|
keycloak_url = "https://publicly-reachable-keycloak-instance.org:8080"
|
||||||
|
@ -611,6 +618,7 @@ Configure OAuth 2 provider capabilities:
|
||||||
|
|
||||||
## :emoji
|
## :emoji
|
||||||
* `shortcode_globs`: Location of custom emoji files. `*` can be used as a wildcard. Example `["/emoji/custom/**/*.png"]`
|
* `shortcode_globs`: Location of custom emoji files. `*` can be used as a wildcard. Example `["/emoji/custom/**/*.png"]`
|
||||||
|
* `pack_extensions`: A list of file extensions for emojis, when no emoji.txt for a pack is present. Example `[".png", ".gif"]`
|
||||||
* `groups`: Emojis are ordered in groups (tags). This is an array of key-value pairs where the key is the groupname and the value the location or array of locations. `*` can be used as a wildcard. Example `[Custom: ["/emoji/*.png", "/emoji/custom/*.png"]]`
|
* `groups`: Emojis are ordered in groups (tags). This is an array of key-value pairs where the key is the groupname and the value the location or array of locations. `*` can be used as a wildcard. Example `[Custom: ["/emoji/*.png", "/emoji/custom/*.png"]]`
|
||||||
* `default_manifest`: Location of the JSON-manifest. This manifest contains information about the emoji-packs you can download. Currently only one manifest can be added (no arrays).
|
* `default_manifest`: Location of the JSON-manifest. This manifest contains information about the emoji-packs you can download. Currently only one manifest can be added (no arrays).
|
||||||
|
|
||||||
|
@ -628,3 +636,14 @@ To enable them, both the `rum_enabled` flag has to be set and the following spec
|
||||||
`mix ecto.migrate --migrations-path priv/repo/optional_migrations/rum_indexing/`
|
`mix ecto.migrate --migrations-path priv/repo/optional_migrations/rum_indexing/`
|
||||||
|
|
||||||
This will probably take a long time.
|
This will probably take a long time.
|
||||||
|
|
||||||
|
## :rate_limit
|
||||||
|
|
||||||
|
A keyword list of rate limiters where a key is a limiter name and value is the limiter configuration. The basic configuration is a tuple where:
|
||||||
|
|
||||||
|
* The first element: `scale` (Integer). The time scale in milliseconds.
|
||||||
|
* The second element: `limit` (Integer). How many requests to limit in the time scale provided.
|
||||||
|
|
||||||
|
It is also possible to have different limits for unauthenticated and authenticated users: the keyword value must be a list of two tuples where the first one is a config for unauthenticated users and the second one is for authenticated.
|
||||||
|
|
||||||
|
See [`Pleroma.Plugs.RateLimiter`](Pleroma.Plugs.RateLimiter.html) documentation for examples.
|
||||||
|
|
|
@ -9,8 +9,8 @@ config :pleroma, :suggestions,
|
||||||
third_party_engine:
|
third_party_engine:
|
||||||
"http://vinayaka.distsn.org/cgi-bin/vinayaka-user-match-suggestions-api.cgi?{{host}}+{{user}}",
|
"http://vinayaka.distsn.org/cgi-bin/vinayaka-user-match-suggestions-api.cgi?{{host}}+{{user}}",
|
||||||
timeout: 300_000,
|
timeout: 300_000,
|
||||||
limit: 23,
|
limit: 40,
|
||||||
web: "https://vinayaka.distsn.org/?{{host}}+{{user}}"
|
web: "https://vinayaka.distsn.org"
|
||||||
|
|
||||||
```
|
```
|
||||||
|
|
||||||
|
@ -26,6 +26,6 @@ config :pleroma, :suggestions,
|
||||||
third_party_engine:
|
third_party_engine:
|
||||||
"http://vinayaka.distsn.org/cgi-bin/vinayaka-user-new-suggestions-api.cgi?{{host}}+{{user}}",
|
"http://vinayaka.distsn.org/cgi-bin/vinayaka-user-new-suggestions-api.cgi?{{host}}+{{user}}",
|
||||||
timeout: 60_000,
|
timeout: 60_000,
|
||||||
limit: 23,
|
limit: 40,
|
||||||
web: "https://vinayaka.distsn.org/user-new.html"
|
web: "https://vinayaka.distsn.org/user-new.html"
|
||||||
```
|
```
|
||||||
|
|
|
@ -203,12 +203,12 @@ sudo -Hu pleroma MIX_ENV=prod mix pleroma.user new <username> <your@emailaddress
|
||||||
#### Further reading
|
#### Further reading
|
||||||
|
|
||||||
* [Admin tasks](Admin tasks)
|
* [Admin tasks](Admin tasks)
|
||||||
* [Backup your instance](Backup-your-instance)
|
* [Backup your instance](backup.html)
|
||||||
* [Configuration tips](General tips for customizing pleroma fe)
|
* [Configuration tips](general-tips-for-customizing-pleroma-fe.html)
|
||||||
* [Hardening your instance](Hardening-your-instance)
|
* [Hardening your instance](hardening.html)
|
||||||
* [How to activate mediaproxy](How-to-activate-mediaproxy)
|
* [How to activate mediaproxy](howto_mediaproxy.html)
|
||||||
* [Small Pleroma-FE customizations](Small customizations)
|
* [Small Pleroma-FE customizations](small_customizations.html)
|
||||||
* [Updating your instance](Updating-your-instance)
|
* [Updating your instance](updating.html)
|
||||||
|
|
||||||
## Questions
|
## Questions
|
||||||
|
|
||||||
|
|
|
@ -201,12 +201,12 @@ sudo -Hu pleroma MIX_ENV=prod mix pleroma.user new <username> <your@emailaddress
|
||||||
#### Further reading
|
#### Further reading
|
||||||
|
|
||||||
* [Admin tasks](Admin tasks)
|
* [Admin tasks](Admin tasks)
|
||||||
* [Backup your instance](Backup-your-instance)
|
* [Backup your instance](backup.html)
|
||||||
* [Configuration tips](General tips for customizing pleroma fe)
|
* [Configuration tips](general-tips-for-customizing-pleroma-fe.html)
|
||||||
* [Hardening your instance](Hardening-your-instance)
|
* [Hardening your instance](hardening.html)
|
||||||
* [How to activate mediaproxy](How-to-activate-mediaproxy)
|
* [How to activate mediaproxy](howto_mediaproxy.html)
|
||||||
* [Small Pleroma-FE customizations](Small customizations)
|
* [Small Pleroma-FE customizations](small_customizations.html)
|
||||||
* [Updating your instance](Updating-your-instance)
|
* [Updating your instance](updating.html)
|
||||||
|
|
||||||
## Questions
|
## Questions
|
||||||
|
|
||||||
|
|
|
@ -265,12 +265,12 @@ sudo -Hu pleroma MIX_ENV=prod mix pleroma.user new <username> <your@emailaddress
|
||||||
#### Further reading
|
#### Further reading
|
||||||
|
|
||||||
* [Admin tasks](Admin tasks)
|
* [Admin tasks](Admin tasks)
|
||||||
* [Backup your instance](Backup-your-instance)
|
* [Backup your instance](backup.html)
|
||||||
* [Configuration tips](General tips for customizing pleroma fe)
|
* [Configuration tips](general-tips-for-customizing-pleroma-fe.html)
|
||||||
* [Hardening your instance](Hardening-your-instance)
|
* [Hardening your instance](hardening.html)
|
||||||
* [How to activate mediaproxy](How-to-activate-mediaproxy)
|
* [How to activate mediaproxy](howto_mediaproxy.html)
|
||||||
* [Small Pleroma-FE customizations](Small customizations)
|
* [Small Pleroma-FE customizations](small_customizations.html)
|
||||||
* [Updating your instance](Updating-your-instance)
|
* [Updating your instance](updating.html)
|
||||||
|
|
||||||
## Questions
|
## Questions
|
||||||
|
|
||||||
|
|
|
@ -191,12 +191,12 @@ sudo -Hu pleroma MIX_ENV=prod mix pleroma.user new <username> <your@emailaddress
|
||||||
#### Further reading
|
#### Further reading
|
||||||
|
|
||||||
* [Admin tasks](Admin tasks)
|
* [Admin tasks](Admin tasks)
|
||||||
* [Backup your instance](Backup-your-instance)
|
* [Backup your instance](backup.html)
|
||||||
* [Configuration tips](General tips for customizing pleroma fe)
|
* [Configuration tips](general-tips-for-customizing-pleroma-fe.html)
|
||||||
* [Hardening your instance](Hardening-your-instance)
|
* [Hardening your instance](hardening.html)
|
||||||
* [How to activate mediaproxy](How-to-activate-mediaproxy)
|
* [How to activate mediaproxy](howto_mediaproxy.html)
|
||||||
* [Small Pleroma-FE customizations](Small customizations)
|
* [Small Pleroma-FE customizations](small_customizations.html)
|
||||||
* [Updating your instance](Updating-your-instance)
|
* [Updating your instance](updating.html)
|
||||||
|
|
||||||
## Questions
|
## Questions
|
||||||
|
|
||||||
|
|
|
@ -180,9 +180,13 @@ mix set_moderator username [true|false]
|
||||||
|
|
||||||
#### コンフィギュレーションとカスタマイズ
|
#### コンフィギュレーションとカスタマイズ
|
||||||
|
|
||||||
* [Configuration tips](General tips for customizing pleroma fe)
|
|
||||||
* [Small Pleroma-FE customizations](Small customizations)
|
|
||||||
* [Admin tasks](Admin tasks)
|
* [Admin tasks](Admin tasks)
|
||||||
|
* [Backup your instance](backup.html)
|
||||||
|
* [Configuration tips](general-tips-for-customizing-pleroma-fe.html)
|
||||||
|
* [Hardening your instance](hardening.html)
|
||||||
|
* [How to activate mediaproxy](howto_mediaproxy.html)
|
||||||
|
* [Small Pleroma-FE customizations](small_customizations.html)
|
||||||
|
* [Updating your instance](updating.html)
|
||||||
|
|
||||||
## 質問ある?
|
## 質問ある?
|
||||||
|
|
||||||
|
|
|
@ -284,12 +284,12 @@ If you opted to allow sudo for the `pleroma` user but would like to remove the a
|
||||||
#### Further reading
|
#### Further reading
|
||||||
|
|
||||||
* [Admin tasks](Admin tasks)
|
* [Admin tasks](Admin tasks)
|
||||||
* [Backup your instance](Backup-your-instance)
|
* [Backup your instance](backup.html)
|
||||||
* [Configuration tips](General tips for customizing pleroma fe)
|
* [Configuration tips](general-tips-for-customizing-pleroma-fe.html)
|
||||||
* [Hardening your instance](Hardening-your-instance)
|
* [Hardening your instance](hardening.html)
|
||||||
* [How to activate mediaproxy](How-to-activate-mediaproxy)
|
* [How to activate mediaproxy](howto_mediaproxy.html)
|
||||||
* [Small Pleroma-FE customizations](Small customizations)
|
* [Small Pleroma-FE customizations](small_customizations.html)
|
||||||
* [Updating your instance](Updating-your-instance)
|
* [Updating your instance](updating.html)
|
||||||
|
|
||||||
## Questions
|
## Questions
|
||||||
|
|
||||||
|
|
151
docs/installation/migrating_from_source_otp_en.md
Normal file
151
docs/installation/migrating_from_source_otp_en.md
Normal file
|
@ -0,0 +1,151 @@
|
||||||
|
# Switching a from-source install to OTP releases
|
||||||
|
## What are OTP releases?
|
||||||
|
OTP releases are as close as you can get to binary releases with Erlang/Elixir. The release is self-contained, and provides everything needed to boot it, it is easily administered via the provided shell script to open up a remote console, start/stop/restart the release, start in the background, send remote commands, and more.
|
||||||
|
### Can I still run the develop branch if I decide to use them?
|
||||||
|
Yes, we produce builds for every commit in `develop`. However `develop` is considered unstable, please don't use it in production because of faster access to new features, unless you need them as an app developer.
|
||||||
|
## Why would one want to switch?
|
||||||
|
Benefits of OTP releases over from-source installs include:
|
||||||
|
* **Less space used.** OTP releases come without source code, build tools, have docs and debug symbols stripped from the compiled bytecode and do not cointain tests, docs, revision history.
|
||||||
|
* **Minimal system dependencies.** Excluding the database and reverse proxy, only `curl`, `unzip` and `ncurses` are needed to download and run the release. Because Erlang runtime and Elixir are shipped with Pleroma, one can use the latest BEAM optimizations and Pleroma features, without having to worry about outdated system repos or a missing `erlang-*` package.
|
||||||
|
* **Potentially less bugs and better performance.** This extends on the previous point, because we have control over exactly what gets shipped, we can tweak the VM arguments and forget about weird bugs due to Erlang/Elixir version mismatches.
|
||||||
|
* **Faster and less bug-prone mix tasks.** On a from-source install one has to wait untill a new Pleroma node is started for each mix task and they execute outside of the instance context (for example if a user was deleted via a mix task, the instance will have no knowledge of that and continue to display status count and follows before the cache expires). Mix tasks in OTP releases are executed by calling into a running instance via RPC, which solves both of these problems.
|
||||||
|
|
||||||
|
### Sounds great, how do I switch?
|
||||||
|
Currently we support Linux machines with GNU (e.g. Debian, Ubuntu) or musl (e.g. Alpine) libc and `x86_64`, `aarch64` or `armv7l` CPUs. If you are unsure, check the [Detecting flavour](otp_en.html#detecting-flavour) section in OTP install guide. If your platform is supported, proceed with the guide, if not check the [My platform is not supported](#my-platform-is-not-supported) section.
|
||||||
|
### I don't think it is worth the effort, can I stay on a from-source install?
|
||||||
|
Yes, currently there are no plans to deprecate them.
|
||||||
|
|
||||||
|
### My platform is not supported
|
||||||
|
If you think your platform is a popular choice for running Pleroma instances, or has the potential to become one, you can [file an issue on our Gitlab](https://git.pleroma.social/pleroma/pleroma/issues/new). If not, guides on how to build and update releases by yourself will be available soon.
|
||||||
|
## Pre-requisites
|
||||||
|
You will be running commands as root. If you aren't root already, please elevate your priviledges by executing `sudo su`/`su`.
|
||||||
|
|
||||||
|
The system needs to have `curl` and `unzip` installed for downloading and unpacking release builds.
|
||||||
|
|
||||||
|
Debian/Ubuntu:
|
||||||
|
```sh
|
||||||
|
apt install curl unzip
|
||||||
|
```
|
||||||
|
Alpine:
|
||||||
|
```
|
||||||
|
apk add curl unzip
|
||||||
|
|
||||||
|
```
|
||||||
|
## Moving content out of the application directory
|
||||||
|
When using OTP releases the application directory changes with every version so it would be a bother to keep content there (and also dangerous unless `--no-rm` option is used when updating). Fortunately almost all paths in Pleroma are configurable, so it is possible to move them out of there.
|
||||||
|
|
||||||
|
Pleroma should be stopped before proceeding.
|
||||||
|
|
||||||
|
### Moving uploads/custom public files directory
|
||||||
|
```sh
|
||||||
|
# Create uploads directory and set proper permissions (skip if using a remote uploader)
|
||||||
|
# Note: It does not have to be `/var/lib/pleroma/uploads`, you can configure it to be something else later
|
||||||
|
mkdir -p /var/lib/pleroma/uploads
|
||||||
|
chown -R pleroma /var/lib/pleroma
|
||||||
|
|
||||||
|
# Create custom public files directory
|
||||||
|
# Note: It does not have to be `/var/lib/pleroma/static`, you can configure it to be something else later
|
||||||
|
mkdir -p /var/lib/pleroma/static
|
||||||
|
chown -R pleroma /var/lib/pleroma
|
||||||
|
|
||||||
|
# If you use the local uploader with default settings your uploads should be located in `~pleroma/uploads`
|
||||||
|
mv ~pleroma/uploads /var/lib/pleroma/uploads
|
||||||
|
|
||||||
|
# If you have created the custom public files directory with default settings it should be located in `~pleroma/instance/static`
|
||||||
|
mv ~pleroma/instance/static /var/lib/pleroma/static
|
||||||
|
```
|
||||||
|
|
||||||
|
### Moving emoji
|
||||||
|
Assuming you have all emojis in subdirectories of `priv/static/emoji` moving them can be done with
|
||||||
|
```sh
|
||||||
|
mkdir /var/lib/pleroma/static/emoji
|
||||||
|
ls -d ~pleroma/priv/static/emoji/*/ | xargs -i sh -c 'mv "{}" "/var/lib/pleroma/static/emoji/$(basename {})"'
|
||||||
|
```
|
||||||
|
|
||||||
|
But, if for some reason you have custom emojis in the root directory you should copy the whole directory instead.
|
||||||
|
```sh
|
||||||
|
mv ~pleroma/priv/static/emoji /var/lib/pleroma/static/emoji
|
||||||
|
```
|
||||||
|
and then copy custom emojis to `/var/lib/pleroma/static/emoji/custom`.
|
||||||
|
|
||||||
|
This is needed because storing custom emojis in the root directory is deprecated, but if you just move them to `/var/lib/pleroma/static/emoji/custom` it will break emoji urls on old posts.
|
||||||
|
|
||||||
|
Note that globs have been replaced with `pack_extensions`, so if your emojis are not in png/gif you should [modify the default value](config.html#emoji).
|
||||||
|
|
||||||
|
### Moving the config
|
||||||
|
```sh
|
||||||
|
# Create the config directory
|
||||||
|
# The default path for Pleroma config is /etc/pleroma/config.exs
|
||||||
|
# but it can be set via PLEROMA_CONFIG_PATH environment variable
|
||||||
|
mkdir -p /etc/pleroma
|
||||||
|
|
||||||
|
# Move the config file
|
||||||
|
mv ~pleroma/config/prod.secret.exs /etc/pleroma/config.exs
|
||||||
|
|
||||||
|
# Change `use Mix.Config` at the top to `import Config`
|
||||||
|
$EDITOR /etc/pleroma/config.exs
|
||||||
|
```
|
||||||
|
## Installing the release
|
||||||
|
Before proceeding, get the flavour from [Detecting flavour](otp_en.html#detecting-flavour) section in OTP installation guide.
|
||||||
|
```sh
|
||||||
|
# Delete all files in pleroma user's directory
|
||||||
|
rm -r ~pleroma/*
|
||||||
|
|
||||||
|
# Set the flavour environment variable to the string you got in Detecting flavour section.
|
||||||
|
# For example if the flavour is `arm64-musl` the command will be
|
||||||
|
export FLAVOUR="arm64-musl"
|
||||||
|
|
||||||
|
# Clone the release build into a temporary directory and unpack it
|
||||||
|
# Replace `master` with `develop` if you want to run the develop branch
|
||||||
|
su pleroma -s $SHELL -lc "
|
||||||
|
curl 'https://git.pleroma.social/api/v4/projects/2/jobs/artifacts/master/download?job=$FLAVOUR' -o /tmp/pleroma.zip
|
||||||
|
unzip /tmp/pleroma.zip -d /tmp/
|
||||||
|
"
|
||||||
|
|
||||||
|
# Move the release to the home directory and delete temporary files
|
||||||
|
su pleroma -s $SHELL -lc "
|
||||||
|
mv /tmp/release/* ~pleroma/
|
||||||
|
rmdir /tmp/release
|
||||||
|
rm /tmp/pleroma.zip
|
||||||
|
"
|
||||||
|
|
||||||
|
# Start the instance to verify that everything is working as expected
|
||||||
|
su pleroma -s $SHELL -lc "./bin/pleroma daemon"
|
||||||
|
|
||||||
|
# Wait for about 20 seconds and query the instance endpoint, if it shows your uri, name and email correctly, you are configured correctly
|
||||||
|
sleep 20 && curl http://localhost:4000/api/v1/instance
|
||||||
|
|
||||||
|
# Stop the instance
|
||||||
|
su pleroma -s $SHELL -lc "./bin/pleroma stop"
|
||||||
|
```
|
||||||
|
|
||||||
|
## Setting up a system service
|
||||||
|
OTP releases have different service files than from-source installs so they need to be copied over again.
|
||||||
|
|
||||||
|
Debian/Ubuntu:
|
||||||
|
```sh
|
||||||
|
# Copy the service into a proper directory
|
||||||
|
cp ~pleroma/installation/pleroma.service /etc/systemd/system/pleroma.service
|
||||||
|
|
||||||
|
# Reload service files
|
||||||
|
systemctl reload-daemon
|
||||||
|
|
||||||
|
# Reenable pleroma to start on boot
|
||||||
|
systemctl reenable pleroma
|
||||||
|
|
||||||
|
# Start pleroma
|
||||||
|
systemctl start pleroma
|
||||||
|
```
|
||||||
|
|
||||||
|
Alpine:
|
||||||
|
```sh
|
||||||
|
# Copy the service into a proper directory
|
||||||
|
cp -f ~pleroma/installation/init.d/pleroma /etc/init.d/pleroma
|
||||||
|
|
||||||
|
# Start pleroma
|
||||||
|
rc-service pleroma start
|
||||||
|
```
|
||||||
|
## Running mix tasks
|
||||||
|
Refer to [Running mix tasks](otp_en.html#running-mix-tasks) section from OTP release installation guide.
|
||||||
|
## Updating
|
||||||
|
Refer to [Updating](otp_en.html#updating) section from OTP release installation guide.
|
261
docs/installation/otp_en.md
Normal file
261
docs/installation/otp_en.md
Normal file
|
@ -0,0 +1,261 @@
|
||||||
|
# Installing on Linux using OTP releases
|
||||||
|
|
||||||
|
## Pre-requisites
|
||||||
|
* A machine running Linux with GNU (e.g. Debian, Ubuntu) or musl (e.g. Alpine) libc and `x86_64`, `aarch64` or `armv7l` CPU, you have root access to. If you are not sure if it's compatible see [Detecting flavour section](#detecting-flavour) below
|
||||||
|
* A (sub)domain pointed to the machine
|
||||||
|
|
||||||
|
You will be running commands as root. If you aren't root already, please elevate your priviledges by executing `sudo su`/`su`.
|
||||||
|
|
||||||
|
While in theory OTP releases are possbile to install on any compatible machine, for the sake of simplicity this guide focuses only on Debian/Ubuntu/Alpine.
|
||||||
|
|
||||||
|
### Detecting flavour
|
||||||
|
|
||||||
|
Paste the following into the shell:
|
||||||
|
```sh
|
||||||
|
arch="$(uname -m)";if [ "$arch" = "x86_64" ];then arch="amd64";elif [ "$arch" = "armv7l" ];then arch="arm";elif [ "$arch" = "aarch64" ];then arch="arm64";else echo "Unsupported arch: $arch">&2;fi;if getconf GNU_LIBC_VERSION>/dev/null;then libc_postfix="";elif [ "$(ldd 2>&1|head -c 9)" = "musl libc" ];then libc_postfix="-musl";elif [ "$(find /lib/libc.musl*|wc -l)" ];then libc_postfix="-musl";else echo "Unsupported libc">&2;fi;echo "$arch$libc_postfix"
|
||||||
|
```
|
||||||
|
|
||||||
|
If your platform is supported the output will contain the flavour string, you will need it later. If not, this just means that we don't build releases for your platform, you can still try installing from source.
|
||||||
|
|
||||||
|
### Installing the required packages
|
||||||
|
|
||||||
|
Other than things bundled in the OTP release Pleroma depends on:
|
||||||
|
* curl (to download the release build)
|
||||||
|
* unzip (needed to unpack release builds)
|
||||||
|
* ncurses (ERTS won't run without it)
|
||||||
|
* PostgreSQL (also utilizes extensions in postgresql-contrib)
|
||||||
|
* nginx (could be swapped with another reverse proxy but this guide covers only it)
|
||||||
|
* certbot (for Let's Encrypt certificates, could be swapped with another ACME client, but this guide covers only it)
|
||||||
|
|
||||||
|
Debian/Ubuntu:
|
||||||
|
```sh
|
||||||
|
apt install curl unzip libncurses5 postgresql postgresql-contrib nginx certbot
|
||||||
|
```
|
||||||
|
Alpine:
|
||||||
|
|
||||||
|
```sh
|
||||||
|
echo "http://nl.alpinelinux.org/alpine/latest-stable/community" >> /etc/apk/repositories
|
||||||
|
apk update
|
||||||
|
apk add curl unzip ncurses postgresql postgresql-contrib nginx certbot
|
||||||
|
```
|
||||||
|
|
||||||
|
## Setup
|
||||||
|
### Configuring PostgreSQL
|
||||||
|
#### (Optional) Installing RUM indexes
|
||||||
|
RUM indexes are an alternative indexing scheme that is not included in PostgreSQL by default. You can read more about them on the [Configuration page](config.html#rum-indexing-for-full-text-search). They are completely optional and most of the time are not worth it, especially if you are running a single user instance (unless you absolutely need ordered search results).
|
||||||
|
|
||||||
|
Debian/Ubuntu (available only on Buster/19.04):
|
||||||
|
```sh
|
||||||
|
apt install postgresql-11-rum
|
||||||
|
```
|
||||||
|
Alpine:
|
||||||
|
```sh
|
||||||
|
apk add git build-base postgresql-dev
|
||||||
|
git clone https://github.com/postgrespro/rum /tmp/rum
|
||||||
|
cd /tmp/rum
|
||||||
|
make USE_PGXS=1
|
||||||
|
make USE_PGXS=1 install
|
||||||
|
cd
|
||||||
|
rm -r /tmp/rum
|
||||||
|
```
|
||||||
|
#### (Optional) Performance configuration
|
||||||
|
For optimal performance, you may use [PGTune](https://pgtune.leopard.in.ua), don't forget to restart postgresql after editing the configuration
|
||||||
|
|
||||||
|
Debian/Ubuntu:
|
||||||
|
```sh
|
||||||
|
systemctl restart postgresql
|
||||||
|
```
|
||||||
|
Alpine:
|
||||||
|
```sh
|
||||||
|
rc-service postgresql restart
|
||||||
|
```
|
||||||
|
### Installing Pleroma
|
||||||
|
```sh
|
||||||
|
# Create the Pleroma user
|
||||||
|
adduser --system --shell /bin/false --home /opt/pleroma pleroma
|
||||||
|
|
||||||
|
# Set the flavour environment variable to the string you got in Detecting flavour section.
|
||||||
|
# For example if the flavour is `arm64-musl` the command will be
|
||||||
|
export FLAVOUR="arm64-musl"
|
||||||
|
|
||||||
|
# Clone the release build into a temporary directory and unpack it
|
||||||
|
su pleroma -s $SHELL -lc "
|
||||||
|
curl 'https://git.pleroma.social/api/v4/projects/2/jobs/artifacts/master/download?job=$FLAVOUR' -o /tmp/pleroma.zip
|
||||||
|
unzip /tmp/pleroma.zip -d /tmp/
|
||||||
|
"
|
||||||
|
|
||||||
|
# Move the release to the home directory and delete temporary files
|
||||||
|
su pleroma -s $SHELL -lc "
|
||||||
|
mv /tmp/release/* /opt/pleroma
|
||||||
|
rmdir /tmp/release
|
||||||
|
rm /tmp/pleroma.zip
|
||||||
|
"
|
||||||
|
# Create uploads directory and set proper permissions (skip if planning to use a remote uploader)
|
||||||
|
# Note: It does not have to be `/var/lib/pleroma/uploads`, the config generator will ask about the upload directory later
|
||||||
|
|
||||||
|
mkdir -p /var/lib/pleroma/uploads
|
||||||
|
chown -R pleroma /var/lib/pleroma
|
||||||
|
|
||||||
|
# Create custom public files directory (custom emojis, frontend bundle overrides, robots.txt, etc.)
|
||||||
|
# Note: It does not have to be `/var/lib/pleroma/static`, the config generator will ask about the custom public files directory later
|
||||||
|
mkdir -p /var/lib/pleroma/static
|
||||||
|
chown -R pleroma /var/lib/pleroma
|
||||||
|
|
||||||
|
# Create a config directory
|
||||||
|
mkdir -p /etc/pleroma
|
||||||
|
chown -R pleroma /etc/pleroma
|
||||||
|
|
||||||
|
# Run the config generator
|
||||||
|
su pleroma -s $SHELL -lc "./bin/pleroma_ctl instance gen --output /etc/pleroma/config.exs --output-psql /tmp/setup_db.psql"
|
||||||
|
|
||||||
|
# Create the postgres database
|
||||||
|
su postgres -s $SHELL -lc "psql -f /tmp/setup_db.psql"
|
||||||
|
|
||||||
|
# Create the database schema
|
||||||
|
su pleroma -s $SHELL -lc "./bin/pleroma_ctl migrate"
|
||||||
|
|
||||||
|
# If you have installed RUM indexes uncommend and run
|
||||||
|
# su pleroma -s $SHELL -lc "./bin/pleroma_ctl migrate --migrations-path priv/repo/optional_migrations/rum_indexing/"
|
||||||
|
|
||||||
|
# Start the instance to verify that everything is working as expected
|
||||||
|
su pleroma -s $SHELL -lc "./bin/pleroma daemon"
|
||||||
|
|
||||||
|
# Wait for about 20 seconds and query the instance endpoint, if it shows your uri, name and email correctly, you are configured correctly
|
||||||
|
sleep 20 && curl http://localhost:4000/api/v1/instance
|
||||||
|
|
||||||
|
# Stop the instance
|
||||||
|
su pleroma -s $SHELL -lc "./bin/pleroma stop"
|
||||||
|
```
|
||||||
|
|
||||||
|
### Setting up nginx and getting Let's Encrypt SSL certificaties
|
||||||
|
|
||||||
|
```sh
|
||||||
|
# Get a Let's Encrypt certificate
|
||||||
|
certbot certonly --standalone --preferred-challenges http -d yourinstance.tld
|
||||||
|
|
||||||
|
# Copy the Pleroma nginx configuration to the nginx folder
|
||||||
|
# The location of nginx configs is dependent on the distro
|
||||||
|
|
||||||
|
# For Debian/Ubuntu:
|
||||||
|
cp /opt/pleroma/installation/pleroma.nginx /etc/nginx/sites-available/pleroma.nginx
|
||||||
|
ln -s /etc/nginx/sites-available/pleroma.nginx /etc/nginx/sites-enabled/pleroma.nginx
|
||||||
|
# For Alpine:
|
||||||
|
cp /opt/pleroma/installation/pleroma.nginx /etc/nginx/conf.d/pleroma.conf
|
||||||
|
# If your distro does not have either of those you can append
|
||||||
|
# `include /etc/nginx/pleroma.conf` to the end of the http section in /etc/nginx/nginx.conf and
|
||||||
|
cp /opt/pleroma/installation/pleroma.nginx /etc/nginx/pleroma.conf
|
||||||
|
|
||||||
|
# Edit the nginx config replacing example.tld with your (sub)domain
|
||||||
|
$EDITOR path-to-nginx-config
|
||||||
|
|
||||||
|
# Verify that the config is valid
|
||||||
|
nginx -t
|
||||||
|
|
||||||
|
# Start nginx
|
||||||
|
# For Debian/Ubuntu:
|
||||||
|
systemctl start nginx
|
||||||
|
# For Alpine:
|
||||||
|
rc-service nginx start
|
||||||
|
```
|
||||||
|
|
||||||
|
At this point if you open your (sub)domain in a browser you should see a 502 error, that's because pleroma is not started yet.
|
||||||
|
|
||||||
|
### Setting up a system service
|
||||||
|
Debian/Ubuntu:
|
||||||
|
```sh
|
||||||
|
# Copy the service into a proper directory
|
||||||
|
cp /opt/pleroma/installation/pleroma.service /etc/systemd/system/pleroma.service
|
||||||
|
|
||||||
|
# Start pleroma and enable it on boot
|
||||||
|
systemctl start pleroma
|
||||||
|
systemctl enable pleroma
|
||||||
|
```
|
||||||
|
Alpine:
|
||||||
|
```sh
|
||||||
|
# Copy the service into a proper directory
|
||||||
|
cp /opt/pleroma/installation/init.d/pleroma /etc/init.d/pleroma
|
||||||
|
|
||||||
|
# Start pleroma and enable it on boot
|
||||||
|
rc-service pleroma start
|
||||||
|
rc-update add pleroma
|
||||||
|
```
|
||||||
|
|
||||||
|
If everything worked, you should see Pleroma-FE when visiting your domain. If that didn't happen, try reviewing the installation steps, starting Pleroma in the foreground and seeing if there are any errrors.
|
||||||
|
|
||||||
|
Still doesn't work? Feel free to contact us on [#pleroma on freenode](https://webchat.freenode.net/?channels=%23pleroma) or via matrix at <https://matrix.heldscal.la/#/room/#freenode_#pleroma:matrix.org>, you can also [file an issue on our Gitlab](https://git.pleroma.social/pleroma/pleroma/issues/new)
|
||||||
|
|
||||||
|
## Post installation
|
||||||
|
|
||||||
|
### Setting up auto-renew Let's Encrypt certificate
|
||||||
|
```sh
|
||||||
|
# Create the directory for webroot challenges
|
||||||
|
mkdir -p /var/lib/letsencrypt
|
||||||
|
|
||||||
|
# Uncomment the webroot method
|
||||||
|
$EDITOR path-to-nginx-config
|
||||||
|
|
||||||
|
# Verify that the config is valid
|
||||||
|
nginx -t
|
||||||
|
```
|
||||||
|
Debian/Ubuntu:
|
||||||
|
```sh
|
||||||
|
# Restart nginx
|
||||||
|
systemctl restart nginx
|
||||||
|
|
||||||
|
# Ensure the webroot menthod and post hook is working
|
||||||
|
certbot renew --cert-name yourinstance.tld --webroot -w /var/lib/letsencrypt/ --dry-run --post-hook 'systemctl nginx reload'
|
||||||
|
|
||||||
|
# Add it to the daily cron
|
||||||
|
echo '#!/bin/sh
|
||||||
|
certbot renew --cert-name yourinstance.tld --webroot -w /var/lib/letsencrypt/ --dry-run --post-hook "systemctl reload nginx"
|
||||||
|
' > /etc/cron.daily/renew-pleroma-cert
|
||||||
|
chmod +x /etc/cron.daily/renew-pleroma-cert
|
||||||
|
|
||||||
|
# If everything worked the output should contain /etc/cron.daily/renew-pleroma-cert
|
||||||
|
run-parts --test /etc/cron.daily
|
||||||
|
```
|
||||||
|
Alpine:
|
||||||
|
```sh
|
||||||
|
# Restart nginx
|
||||||
|
rc-service nginx restart
|
||||||
|
|
||||||
|
# Start the cron daemon and make it start on boot
|
||||||
|
rc-service crond start
|
||||||
|
rc-update add crond
|
||||||
|
|
||||||
|
# Ensure the webroot menthod and post hook is working
|
||||||
|
certbot renew --cert-name yourinstance.tld --webroot -w /var/lib/letsencrypt/ --dry-run --post-hook 'rc-service nginx reload'
|
||||||
|
|
||||||
|
# Add it to the daily cron
|
||||||
|
echo '#!/bin/sh
|
||||||
|
certbot renew --cert-name yourinstance.tld --webroot -w /var/lib/letsencrypt/ --dry-run --post-hook "rc-service nginx reload"
|
||||||
|
' > /etc/periodic/daily/renew-pleroma-cert
|
||||||
|
chmod +x /etc/periodic/daily/renew-pleroma-cert
|
||||||
|
|
||||||
|
# If everything worked this should output /etc/periodic/daily/renew-pleroma-cert
|
||||||
|
run-parts --test /etc/periodic/daily
|
||||||
|
```
|
||||||
|
### Running mix tasks
|
||||||
|
Throughout the wiki and guides there is a lot of references to mix tasks. Since `mix` is a build tool, you can't just call `mix pleroma.task`, instead you should call `pleroma_ctl` stripping pleroma/ecto namespace.
|
||||||
|
|
||||||
|
So for example, if the task is `mix pleroma.user set admin --admin`, you should run it like this:
|
||||||
|
```sh
|
||||||
|
su pleroma -s $SHELL -lc "./bin/pleroma_ctl user set admin --admin"
|
||||||
|
```
|
||||||
|
### Updating
|
||||||
|
Generally, doing the following is enough:
|
||||||
|
```sh
|
||||||
|
# Download the new release
|
||||||
|
su pleroma -s $SHELL -lc "./bin/pleroma_ctl update"
|
||||||
|
|
||||||
|
# Migrate the database, you are advised to stop the instance before doing that
|
||||||
|
su pleroma -s $SHELL -lc "./bin/pleroma_ctl migrate"
|
||||||
|
```
|
||||||
|
But you should **always check the release notes/changelog** in case there are config deprecations, special update steps, etc.
|
||||||
|
|
||||||
|
## Further reading
|
||||||
|
* [Configuration](config.html)
|
||||||
|
* [Pleroma's base config.exs](https://git.pleroma.social/pleroma/pleroma/blob/master/config/config.exs)
|
||||||
|
* [Hardening your instance](hardening.html)
|
||||||
|
* [Pleroma Clients](clients.html)
|
||||||
|
* [Emoji pack manager](Mix.Tasks.Pleroma.Emoji.html)
|
|
@ -14,17 +14,19 @@ server {
|
||||||
|
|
||||||
listen 80;
|
listen 80;
|
||||||
listen [::]:80;
|
listen [::]:80;
|
||||||
return 301 https://$server_name$request_uri;
|
|
||||||
|
|
||||||
# Uncomment this if you need to use the 'webroot' method with certbot. Make sure
|
# Uncomment this if you need to use the 'webroot' method with certbot. Make sure
|
||||||
# that the directory exists and that it is accessible by the webserver. If you followed
|
# that the directory exists and that it is accessible by the webserver. If you followed
|
||||||
# the guide, you already ran 'sudo mkdir -p /var/lib/letsencrypt' to create the folder.
|
# the guide, you already ran 'mkdir -p /var/lib/letsencrypt' to create the folder.
|
||||||
# You may need to load this file with the ssl server block commented out, run certbot
|
# You may need to load this file with the ssl server block commented out, run certbot
|
||||||
# to get the certificate, and then uncomment it.
|
# to get the certificate, and then uncomment it.
|
||||||
#
|
#
|
||||||
# location ~ /\.well-known/acme-challenge {
|
# location ~ /\.well-known/acme-challenge {
|
||||||
# root /var/lib/letsencrypt/.well-known/acme-challenge;
|
# root /var/lib/letsencrypt/;
|
||||||
# }
|
# }
|
||||||
|
location / {
|
||||||
|
return 301 https://$server_name$request_uri;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
# Enable SSL session caching for improved performance
|
# Enable SSL session caching for improved performance
|
||||||
|
|
67
lib/mix/pleroma.ex
Normal file
67
lib/mix/pleroma.ex
Normal file
|
@ -0,0 +1,67 @@
|
||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2018 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
defmodule Mix.Pleroma do
|
||||||
|
@doc "Common functions to be reused in mix tasks"
|
||||||
|
def start_pleroma do
|
||||||
|
Application.put_env(:phoenix, :serve_endpoints, false, persistent: true)
|
||||||
|
{:ok, _} = Application.ensure_all_started(:pleroma)
|
||||||
|
end
|
||||||
|
|
||||||
|
def load_pleroma do
|
||||||
|
Application.load(:pleroma)
|
||||||
|
end
|
||||||
|
|
||||||
|
def get_option(options, opt, prompt, defval \\ nil, defname \\ nil) do
|
||||||
|
Keyword.get(options, opt) || shell_prompt(prompt, defval, defname)
|
||||||
|
end
|
||||||
|
|
||||||
|
def shell_prompt(prompt, defval \\ nil, defname \\ nil) do
|
||||||
|
prompt_message = "#{prompt} [#{defname || defval}] "
|
||||||
|
|
||||||
|
input =
|
||||||
|
if mix_shell?(),
|
||||||
|
do: Mix.shell().prompt(prompt_message),
|
||||||
|
else: :io.get_line(prompt_message)
|
||||||
|
|
||||||
|
case input do
|
||||||
|
"\n" ->
|
||||||
|
case defval do
|
||||||
|
nil ->
|
||||||
|
shell_prompt(prompt, defval, defname)
|
||||||
|
|
||||||
|
defval ->
|
||||||
|
defval
|
||||||
|
end
|
||||||
|
|
||||||
|
input ->
|
||||||
|
String.trim(input)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def shell_yes?(message) do
|
||||||
|
if mix_shell?(),
|
||||||
|
do: Mix.shell().yes?("Continue?"),
|
||||||
|
else: shell_prompt(message, "Continue?") in ~w(Yn Y y)
|
||||||
|
end
|
||||||
|
|
||||||
|
def shell_info(message) do
|
||||||
|
if mix_shell?(),
|
||||||
|
do: Mix.shell().info(message),
|
||||||
|
else: IO.puts(message)
|
||||||
|
end
|
||||||
|
|
||||||
|
def shell_error(message) do
|
||||||
|
if mix_shell?(),
|
||||||
|
do: Mix.shell().error(message),
|
||||||
|
else: IO.puts(:stderr, message)
|
||||||
|
end
|
||||||
|
|
||||||
|
@doc "Performs a safe check whether `Mix.shell/0` is available (does not raise if Mix is not loaded)"
|
||||||
|
def mix_shell?, do: :erlang.function_exported(Mix, :shell, 0)
|
||||||
|
|
||||||
|
def escape_sh_path(path) do
|
||||||
|
~S(') <> String.replace(path, ~S('), ~S(\')) <> ~S(')
|
||||||
|
end
|
||||||
|
end
|
|
@ -1,9 +1,9 @@
|
||||||
defmodule Mix.Tasks.Pleroma.Benchmark do
|
defmodule Mix.Tasks.Pleroma.Benchmark do
|
||||||
|
import Mix.Pleroma
|
||||||
use Mix.Task
|
use Mix.Task
|
||||||
alias Mix.Tasks.Pleroma.Common
|
|
||||||
|
|
||||||
def run(["search"]) do
|
def run(["search"]) do
|
||||||
Common.start_pleroma()
|
start_pleroma()
|
||||||
|
|
||||||
Benchee.run(%{
|
Benchee.run(%{
|
||||||
"search" => fn ->
|
"search" => fn ->
|
||||||
|
@ -13,7 +13,7 @@ def run(["search"]) do
|
||||||
end
|
end
|
||||||
|
|
||||||
def run(["tag"]) do
|
def run(["tag"]) do
|
||||||
Common.start_pleroma()
|
start_pleroma()
|
||||||
|
|
||||||
Benchee.run(%{
|
Benchee.run(%{
|
||||||
"tag" => fn ->
|
"tag" => fn ->
|
|
@ -1,28 +0,0 @@
|
||||||
# Pleroma: A lightweight social networking server
|
|
||||||
# Copyright © 2017-2018 Pleroma Authors <https://pleroma.social/>
|
|
||||||
# SPDX-License-Identifier: AGPL-3.0-only
|
|
||||||
|
|
||||||
defmodule Mix.Tasks.Pleroma.Common do
|
|
||||||
@doc "Common functions to be reused in mix tasks"
|
|
||||||
def start_pleroma do
|
|
||||||
Mix.Task.run("app.start")
|
|
||||||
end
|
|
||||||
|
|
||||||
def get_option(options, opt, prompt, defval \\ nil, defname \\ nil) do
|
|
||||||
Keyword.get(options, opt) ||
|
|
||||||
case Mix.shell().prompt("#{prompt} [#{defname || defval}]") do
|
|
||||||
"\n" ->
|
|
||||||
case defval do
|
|
||||||
nil -> get_option(options, opt, prompt, defval)
|
|
||||||
defval -> defval
|
|
||||||
end
|
|
||||||
|
|
||||||
opt ->
|
|
||||||
opt |> String.trim()
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
def escape_sh_path(path) do
|
|
||||||
~S(') <> String.replace(path, ~S('), ~S(\')) <> ~S(')
|
|
||||||
end
|
|
||||||
end
|
|
79
lib/mix/tasks/pleroma/config.ex
Normal file
79
lib/mix/tasks/pleroma/config.ex
Normal file
|
@ -0,0 +1,79 @@
|
||||||
|
defmodule Mix.Tasks.Pleroma.Config do
|
||||||
|
use Mix.Task
|
||||||
|
import Mix.Pleroma
|
||||||
|
alias Pleroma.Repo
|
||||||
|
alias Pleroma.Web.AdminAPI.Config
|
||||||
|
@shortdoc "Manages the location of the config"
|
||||||
|
@moduledoc """
|
||||||
|
Manages the location of the config.
|
||||||
|
|
||||||
|
## Transfers config from file to DB.
|
||||||
|
|
||||||
|
mix pleroma.config migrate_to_db
|
||||||
|
|
||||||
|
## Transfers config from DB to file.
|
||||||
|
|
||||||
|
mix pleroma.config migrate_from_db ENV
|
||||||
|
"""
|
||||||
|
|
||||||
|
def run(["migrate_to_db"]) do
|
||||||
|
start_pleroma()
|
||||||
|
|
||||||
|
if Pleroma.Config.get([:instance, :dynamic_configuration]) do
|
||||||
|
Application.get_all_env(:pleroma)
|
||||||
|
|> Enum.reject(fn {k, _v} -> k in [Pleroma.Repo, :env] end)
|
||||||
|
|> Enum.each(fn {k, v} ->
|
||||||
|
key = to_string(k) |> String.replace("Elixir.", "")
|
||||||
|
{:ok, _} = Config.update_or_create(%{group: "pleroma", key: key, value: v})
|
||||||
|
Mix.shell().info("#{key} is migrated.")
|
||||||
|
end)
|
||||||
|
|
||||||
|
Mix.shell().info("Settings migrated.")
|
||||||
|
else
|
||||||
|
Mix.shell().info(
|
||||||
|
"Migration is not allowed by config. You can change this behavior in instance settings."
|
||||||
|
)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def run(["migrate_from_db", env, delete?]) do
|
||||||
|
start_pleroma()
|
||||||
|
|
||||||
|
delete? = if delete? == "true", do: true, else: false
|
||||||
|
|
||||||
|
if Pleroma.Config.get([:instance, :dynamic_configuration]) do
|
||||||
|
config_path = "config/#{env}.exported_from_db.secret.exs"
|
||||||
|
|
||||||
|
{:ok, file} = File.open(config_path, [:write])
|
||||||
|
IO.write(file, "use Mix.Config\r\n")
|
||||||
|
|
||||||
|
Repo.all(Config)
|
||||||
|
|> Enum.each(fn config ->
|
||||||
|
mark =
|
||||||
|
if String.starts_with?(config.key, "Pleroma.") or
|
||||||
|
String.starts_with?(config.key, "Ueberauth"),
|
||||||
|
do: ",",
|
||||||
|
else: ":"
|
||||||
|
|
||||||
|
IO.write(
|
||||||
|
file,
|
||||||
|
"config :#{config.group}, #{config.key}#{mark} #{
|
||||||
|
inspect(Config.from_binary(config.value))
|
||||||
|
}\r\n"
|
||||||
|
)
|
||||||
|
|
||||||
|
if delete? do
|
||||||
|
{:ok, _} = Repo.delete(config)
|
||||||
|
Mix.shell().info("#{config.key} deleted from DB.")
|
||||||
|
end
|
||||||
|
end)
|
||||||
|
|
||||||
|
File.close(file)
|
||||||
|
System.cmd("mix", ["format", config_path])
|
||||||
|
else
|
||||||
|
Mix.shell().info(
|
||||||
|
"Migration is not allowed by config. You can change this behavior in instance settings."
|
||||||
|
)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
|
@ -3,12 +3,12 @@
|
||||||
# SPDX-License-Identifier: AGPL-3.0-only
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
defmodule Mix.Tasks.Pleroma.Database do
|
defmodule Mix.Tasks.Pleroma.Database do
|
||||||
alias Mix.Tasks.Pleroma.Common
|
|
||||||
alias Pleroma.Conversation
|
alias Pleroma.Conversation
|
||||||
alias Pleroma.Object
|
alias Pleroma.Object
|
||||||
alias Pleroma.Repo
|
alias Pleroma.Repo
|
||||||
alias Pleroma.User
|
alias Pleroma.User
|
||||||
require Logger
|
require Logger
|
||||||
|
import Mix.Pleroma
|
||||||
use Mix.Task
|
use Mix.Task
|
||||||
|
|
||||||
@shortdoc "A collection of database related tasks"
|
@shortdoc "A collection of database related tasks"
|
||||||
|
@ -45,7 +45,7 @@ def run(["remove_embedded_objects" | args]) do
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
|
|
||||||
Common.start_pleroma()
|
start_pleroma()
|
||||||
Logger.info("Removing embedded objects")
|
Logger.info("Removing embedded objects")
|
||||||
|
|
||||||
Repo.query!(
|
Repo.query!(
|
||||||
|
@ -66,12 +66,12 @@ def run(["remove_embedded_objects" | args]) do
|
||||||
end
|
end
|
||||||
|
|
||||||
def run(["bump_all_conversations"]) do
|
def run(["bump_all_conversations"]) do
|
||||||
Common.start_pleroma()
|
start_pleroma()
|
||||||
Conversation.bump_for_all_activities()
|
Conversation.bump_for_all_activities()
|
||||||
end
|
end
|
||||||
|
|
||||||
def run(["update_users_following_followers_counts"]) do
|
def run(["update_users_following_followers_counts"]) do
|
||||||
Common.start_pleroma()
|
start_pleroma()
|
||||||
|
|
||||||
users = Repo.all(User)
|
users = Repo.all(User)
|
||||||
Enum.each(users, &User.remove_duplicated_following/1)
|
Enum.each(users, &User.remove_duplicated_following/1)
|
||||||
|
@ -89,7 +89,7 @@ def run(["prune_objects" | args]) do
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
|
|
||||||
Common.start_pleroma()
|
start_pleroma()
|
||||||
|
|
||||||
deadline = Pleroma.Config.get([:instance, :remote_post_retention_days])
|
deadline = Pleroma.Config.get([:instance, :remote_post_retention_days])
|
||||||
|
|
||||||
|
|
|
@ -1,6 +1,5 @@
|
||||||
defmodule Mix.Tasks.Pleroma.Digest do
|
defmodule Mix.Tasks.Pleroma.Digest do
|
||||||
use Mix.Task
|
use Mix.Task
|
||||||
alias Mix.Tasks.Pleroma.Common
|
|
||||||
|
|
||||||
@shortdoc "Manages digest emails"
|
@shortdoc "Manages digest emails"
|
||||||
@moduledoc """
|
@moduledoc """
|
||||||
|
@ -14,7 +13,7 @@ defmodule Mix.Tasks.Pleroma.Digest do
|
||||||
Example: ``mix pleroma.digest test donaldtheduck 2019-05-20``
|
Example: ``mix pleroma.digest test donaldtheduck 2019-05-20``
|
||||||
"""
|
"""
|
||||||
def run(["test", nickname | opts]) do
|
def run(["test", nickname | opts]) do
|
||||||
Common.start_pleroma()
|
Mix.Pleroma.start_pleroma()
|
||||||
|
|
||||||
user = Pleroma.User.get_by_nickname(nickname)
|
user = Pleroma.User.get_by_nickname(nickname)
|
||||||
|
|
||||||
|
|
49
lib/mix/tasks/pleroma/ecto/ecto.ex
Normal file
49
lib/mix/tasks/pleroma/ecto/ecto.ex
Normal file
|
@ -0,0 +1,49 @@
|
||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2018 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-onl
|
||||||
|
defmodule Mix.Tasks.Pleroma.Ecto do
|
||||||
|
@doc """
|
||||||
|
Ensures the given repository's migrations path exists on the file system.
|
||||||
|
"""
|
||||||
|
@spec ensure_migrations_path(Ecto.Repo.t(), Keyword.t()) :: String.t()
|
||||||
|
def ensure_migrations_path(repo, opts) do
|
||||||
|
path = opts[:migrations_path] || Path.join(source_repo_priv(repo), "migrations")
|
||||||
|
|
||||||
|
path =
|
||||||
|
case Path.type(path) do
|
||||||
|
:relative ->
|
||||||
|
Path.join(Application.app_dir(:pleroma), path)
|
||||||
|
|
||||||
|
:absolute ->
|
||||||
|
path
|
||||||
|
end
|
||||||
|
|
||||||
|
if not File.dir?(path) do
|
||||||
|
raise_missing_migrations(Path.relative_to_cwd(path), repo)
|
||||||
|
end
|
||||||
|
|
||||||
|
path
|
||||||
|
end
|
||||||
|
|
||||||
|
@doc """
|
||||||
|
Returns the private repository path relative to the source.
|
||||||
|
"""
|
||||||
|
def source_repo_priv(repo) do
|
||||||
|
config = repo.config()
|
||||||
|
priv = config[:priv] || "priv/#{repo |> Module.split() |> List.last() |> Macro.underscore()}"
|
||||||
|
Path.join(Application.app_dir(:pleroma), priv)
|
||||||
|
end
|
||||||
|
|
||||||
|
defp raise_missing_migrations(path, repo) do
|
||||||
|
raise("""
|
||||||
|
Could not find migrations directory #{inspect(path)}
|
||||||
|
for repo #{inspect(repo)}.
|
||||||
|
This may be because you are in a new project and the
|
||||||
|
migration directory has not been created yet. Creating an
|
||||||
|
empty directory at the path above will fix this error.
|
||||||
|
If you expected existing migrations to be found, please
|
||||||
|
make sure your repository has been properly configured
|
||||||
|
and the configured path exists.
|
||||||
|
""")
|
||||||
|
end
|
||||||
|
end
|
63
lib/mix/tasks/pleroma/ecto/migrate.ex
Normal file
63
lib/mix/tasks/pleroma/ecto/migrate.ex
Normal file
|
@ -0,0 +1,63 @@
|
||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2018 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-onl
|
||||||
|
|
||||||
|
defmodule Mix.Tasks.Pleroma.Ecto.Migrate do
|
||||||
|
use Mix.Task
|
||||||
|
import Mix.Pleroma
|
||||||
|
require Logger
|
||||||
|
|
||||||
|
@shortdoc "Wrapper on `ecto.migrate` task."
|
||||||
|
|
||||||
|
@aliases [
|
||||||
|
n: :step,
|
||||||
|
v: :to
|
||||||
|
]
|
||||||
|
|
||||||
|
@switches [
|
||||||
|
all: :boolean,
|
||||||
|
step: :integer,
|
||||||
|
to: :integer,
|
||||||
|
quiet: :boolean,
|
||||||
|
log_sql: :boolean,
|
||||||
|
strict_version_order: :boolean,
|
||||||
|
migrations_path: :string
|
||||||
|
]
|
||||||
|
|
||||||
|
@moduledoc """
|
||||||
|
Changes `Logger` level to `:info` before start migration.
|
||||||
|
Changes level back when migration ends.
|
||||||
|
|
||||||
|
## Start migration
|
||||||
|
|
||||||
|
mix pleroma.ecto.migrate [OPTIONS]
|
||||||
|
|
||||||
|
Options:
|
||||||
|
- see https://hexdocs.pm/ecto/2.0.0/Mix.Tasks.Ecto.Migrate.html
|
||||||
|
"""
|
||||||
|
|
||||||
|
@impl true
|
||||||
|
def run(args \\ []) do
|
||||||
|
load_pleroma()
|
||||||
|
{opts, _} = OptionParser.parse!(args, strict: @switches, aliases: @aliases)
|
||||||
|
|
||||||
|
opts =
|
||||||
|
if opts[:to] || opts[:step] || opts[:all],
|
||||||
|
do: opts,
|
||||||
|
else: Keyword.put(opts, :all, true)
|
||||||
|
|
||||||
|
opts =
|
||||||
|
if opts[:quiet],
|
||||||
|
do: Keyword.merge(opts, log: false, log_sql: false),
|
||||||
|
else: opts
|
||||||
|
|
||||||
|
path = Mix.Tasks.Pleroma.Ecto.ensure_migrations_path(Pleroma.Repo, opts)
|
||||||
|
|
||||||
|
level = Logger.level()
|
||||||
|
Logger.configure(level: :info)
|
||||||
|
|
||||||
|
{:ok, _, _} = Ecto.Migrator.with_repo(Pleroma.Repo, &Ecto.Migrator.run(&1, path, :up, opts))
|
||||||
|
|
||||||
|
Logger.configure(level: level)
|
||||||
|
end
|
||||||
|
end
|
67
lib/mix/tasks/pleroma/ecto/rollback.ex
Normal file
67
lib/mix/tasks/pleroma/ecto/rollback.ex
Normal file
|
@ -0,0 +1,67 @@
|
||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2018 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-onl
|
||||||
|
|
||||||
|
defmodule Mix.Tasks.Pleroma.Ecto.Rollback do
|
||||||
|
use Mix.Task
|
||||||
|
import Mix.Pleroma
|
||||||
|
require Logger
|
||||||
|
@shortdoc "Wrapper on `ecto.rollback` task"
|
||||||
|
|
||||||
|
@aliases [
|
||||||
|
n: :step,
|
||||||
|
v: :to
|
||||||
|
]
|
||||||
|
|
||||||
|
@switches [
|
||||||
|
all: :boolean,
|
||||||
|
step: :integer,
|
||||||
|
to: :integer,
|
||||||
|
start: :boolean,
|
||||||
|
quiet: :boolean,
|
||||||
|
log_sql: :boolean,
|
||||||
|
migrations_path: :string
|
||||||
|
]
|
||||||
|
|
||||||
|
@moduledoc """
|
||||||
|
Changes `Logger` level to `:info` before start rollback.
|
||||||
|
Changes level back when rollback ends.
|
||||||
|
|
||||||
|
## Start rollback
|
||||||
|
|
||||||
|
mix pleroma.ecto.rollback
|
||||||
|
|
||||||
|
Options:
|
||||||
|
- see https://hexdocs.pm/ecto/2.0.0/Mix.Tasks.Ecto.Rollback.html
|
||||||
|
"""
|
||||||
|
|
||||||
|
@impl true
|
||||||
|
def run(args \\ []) do
|
||||||
|
load_pleroma()
|
||||||
|
{opts, _} = OptionParser.parse!(args, strict: @switches, aliases: @aliases)
|
||||||
|
|
||||||
|
opts =
|
||||||
|
if opts[:to] || opts[:step] || opts[:all],
|
||||||
|
do: opts,
|
||||||
|
else: Keyword.put(opts, :step, 1)
|
||||||
|
|
||||||
|
opts =
|
||||||
|
if opts[:quiet],
|
||||||
|
do: Keyword.merge(opts, log: false, log_sql: false),
|
||||||
|
else: opts
|
||||||
|
|
||||||
|
path = Mix.Tasks.Pleroma.Ecto.ensure_migrations_path(Pleroma.Repo, opts)
|
||||||
|
|
||||||
|
level = Logger.level()
|
||||||
|
Logger.configure(level: :info)
|
||||||
|
|
||||||
|
if Pleroma.Config.get(:env) == :test do
|
||||||
|
Logger.info("Rollback succesfully")
|
||||||
|
else
|
||||||
|
{:ok, _, _} =
|
||||||
|
Ecto.Migrator.with_repo(Pleroma.Repo, &Ecto.Migrator.run(&1, path, :down, opts))
|
||||||
|
end
|
||||||
|
|
||||||
|
Logger.configure(level: level)
|
||||||
|
end
|
||||||
|
end
|
|
@ -55,15 +55,13 @@ defmodule Mix.Tasks.Pleroma.Emoji do
|
||||||
are extracted).
|
are extracted).
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@default_manifest Pleroma.Config.get!([:emoji, :default_manifest])
|
|
||||||
|
|
||||||
def run(["ls-packs" | args]) do
|
def run(["ls-packs" | args]) do
|
||||||
Application.ensure_all_started(:hackney)
|
Application.ensure_all_started(:hackney)
|
||||||
|
|
||||||
{options, [], []} = parse_global_opts(args)
|
{options, [], []} = parse_global_opts(args)
|
||||||
|
|
||||||
manifest =
|
manifest =
|
||||||
fetch_manifest(if options[:manifest], do: options[:manifest], else: @default_manifest)
|
fetch_manifest(if options[:manifest], do: options[:manifest], else: default_manifest())
|
||||||
|
|
||||||
Enum.each(manifest, fn {name, info} ->
|
Enum.each(manifest, fn {name, info} ->
|
||||||
to_print = [
|
to_print = [
|
||||||
|
@ -88,7 +86,7 @@ def run(["get-packs" | args]) do
|
||||||
|
|
||||||
{options, pack_names, []} = parse_global_opts(args)
|
{options, pack_names, []} = parse_global_opts(args)
|
||||||
|
|
||||||
manifest_url = if options[:manifest], do: options[:manifest], else: @default_manifest
|
manifest_url = if options[:manifest], do: options[:manifest], else: default_manifest()
|
||||||
|
|
||||||
manifest = fetch_manifest(manifest_url)
|
manifest = fetch_manifest(manifest_url)
|
||||||
|
|
||||||
|
@ -298,4 +296,6 @@ defp client do
|
||||||
|
|
||||||
Tesla.client(middleware)
|
Tesla.client(middleware)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
defp default_manifest, do: Pleroma.Config.get!([:emoji, :default_manifest])
|
||||||
end
|
end
|
||||||
|
|
|
@ -4,7 +4,7 @@
|
||||||
|
|
||||||
defmodule Mix.Tasks.Pleroma.Instance do
|
defmodule Mix.Tasks.Pleroma.Instance do
|
||||||
use Mix.Task
|
use Mix.Task
|
||||||
alias Mix.Tasks.Pleroma.Common
|
import Mix.Pleroma
|
||||||
|
|
||||||
@shortdoc "Manages Pleroma instance"
|
@shortdoc "Manages Pleroma instance"
|
||||||
@moduledoc """
|
@moduledoc """
|
||||||
|
@ -29,7 +29,11 @@ defmodule Mix.Tasks.Pleroma.Instance do
|
||||||
- `--dbname DBNAME` - the name of the database to use
|
- `--dbname DBNAME` - the name of the database to use
|
||||||
- `--dbuser DBUSER` - the user (aka role) to use for the database connection
|
- `--dbuser DBUSER` - the user (aka role) to use for the database connection
|
||||||
- `--dbpass DBPASS` - the password to use for the database connection
|
- `--dbpass DBPASS` - the password to use for the database connection
|
||||||
|
- `--rum Y/N` - Whether to enable RUM indexes
|
||||||
- `--indexable Y/N` - Allow/disallow indexing site by search engines
|
- `--indexable Y/N` - Allow/disallow indexing site by search engines
|
||||||
|
- `--db-configurable Y/N` - Allow/disallow configuring instance from admin part
|
||||||
|
- `--uploads-dir` - the directory uploads go in when using a local uploader
|
||||||
|
- `--static-dir` - the directory custom public files should be read from (custom emojis, frontend bundle overrides, robots.txt, etc.)
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def run(["gen" | rest]) do
|
def run(["gen" | rest]) do
|
||||||
|
@ -48,7 +52,11 @@ def run(["gen" | rest]) do
|
||||||
dbname: :string,
|
dbname: :string,
|
||||||
dbuser: :string,
|
dbuser: :string,
|
||||||
dbpass: :string,
|
dbpass: :string,
|
||||||
indexable: :string
|
rum: :string,
|
||||||
|
indexable: :string,
|
||||||
|
db_configurable: :string,
|
||||||
|
uploads_dir: :string,
|
||||||
|
static_dir: :string
|
||||||
],
|
],
|
||||||
aliases: [
|
aliases: [
|
||||||
o: :output,
|
o: :output,
|
||||||
|
@ -68,7 +76,7 @@ def run(["gen" | rest]) do
|
||||||
if proceed? do
|
if proceed? do
|
||||||
[domain, port | _] =
|
[domain, port | _] =
|
||||||
String.split(
|
String.split(
|
||||||
Common.get_option(
|
get_option(
|
||||||
options,
|
options,
|
||||||
:domain,
|
:domain,
|
||||||
"What domain will your instance use? (e.g pleroma.soykaf.com)"
|
"What domain will your instance use? (e.g pleroma.soykaf.com)"
|
||||||
|
@ -77,16 +85,16 @@ def run(["gen" | rest]) do
|
||||||
) ++ [443]
|
) ++ [443]
|
||||||
|
|
||||||
name =
|
name =
|
||||||
Common.get_option(
|
get_option(
|
||||||
options,
|
options,
|
||||||
:instance_name,
|
:instance_name,
|
||||||
"What is the name of your instance? (e.g. Pleroma/Soykaf)"
|
"What is the name of your instance? (e.g. Pleroma/Soykaf)"
|
||||||
)
|
)
|
||||||
|
|
||||||
email = Common.get_option(options, :admin_email, "What is your admin email address?")
|
email = get_option(options, :admin_email, "What is your admin email address?")
|
||||||
|
|
||||||
notify_email =
|
notify_email =
|
||||||
Common.get_option(
|
get_option(
|
||||||
options,
|
options,
|
||||||
:notify_email,
|
:notify_email,
|
||||||
"What email address do you want to use for sending email notifications?",
|
"What email address do you want to use for sending email notifications?",
|
||||||
|
@ -94,21 +102,27 @@ def run(["gen" | rest]) do
|
||||||
)
|
)
|
||||||
|
|
||||||
indexable =
|
indexable =
|
||||||
Common.get_option(
|
get_option(
|
||||||
options,
|
options,
|
||||||
:indexable,
|
:indexable,
|
||||||
"Do you want search engines to index your site? (y/n)",
|
"Do you want search engines to index your site? (y/n)",
|
||||||
"y"
|
"y"
|
||||||
) === "y"
|
) === "y"
|
||||||
|
|
||||||
dbhost =
|
db_configurable? =
|
||||||
Common.get_option(options, :dbhost, "What is the hostname of your database?", "localhost")
|
get_option(
|
||||||
|
options,
|
||||||
|
:db_configurable,
|
||||||
|
"Do you want to store the configuration in the database (allows controlling it from admin-fe)? (y/n)",
|
||||||
|
"n"
|
||||||
|
) === "y"
|
||||||
|
|
||||||
dbname =
|
dbhost = get_option(options, :dbhost, "What is the hostname of your database?", "localhost")
|
||||||
Common.get_option(options, :dbname, "What is the name of your database?", "pleroma_dev")
|
|
||||||
|
dbname = get_option(options, :dbname, "What is the name of your database?", "pleroma")
|
||||||
|
|
||||||
dbuser =
|
dbuser =
|
||||||
Common.get_option(
|
get_option(
|
||||||
options,
|
options,
|
||||||
:dbuser,
|
:dbuser,
|
||||||
"What is the user used to connect to your database?",
|
"What is the user used to connect to your database?",
|
||||||
|
@ -116,7 +130,7 @@ def run(["gen" | rest]) do
|
||||||
)
|
)
|
||||||
|
|
||||||
dbpass =
|
dbpass =
|
||||||
Common.get_option(
|
get_option(
|
||||||
options,
|
options,
|
||||||
:dbpass,
|
:dbpass,
|
||||||
"What is the password used to connect to your database?",
|
"What is the password used to connect to your database?",
|
||||||
|
@ -124,14 +138,39 @@ def run(["gen" | rest]) do
|
||||||
"autogenerated"
|
"autogenerated"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
rum_enabled =
|
||||||
|
get_option(
|
||||||
|
options,
|
||||||
|
:rum,
|
||||||
|
"Would you like to use RUM indices?",
|
||||||
|
"n"
|
||||||
|
) === "y"
|
||||||
|
|
||||||
|
uploads_dir =
|
||||||
|
get_option(
|
||||||
|
options,
|
||||||
|
:upload_dir,
|
||||||
|
"What directory should media uploads go in (when using the local uploader)?",
|
||||||
|
Pleroma.Config.get([Pleroma.Uploaders.Local, :uploads])
|
||||||
|
)
|
||||||
|
|
||||||
|
static_dir =
|
||||||
|
get_option(
|
||||||
|
options,
|
||||||
|
:static_dir,
|
||||||
|
"What directory should custom public files be read from (custom emojis, frontend bundle overrides, robots.txt, etc.)?",
|
||||||
|
Pleroma.Config.get([:instance, :static_dir])
|
||||||
|
)
|
||||||
|
|
||||||
secret = :crypto.strong_rand_bytes(64) |> Base.encode64() |> binary_part(0, 64)
|
secret = :crypto.strong_rand_bytes(64) |> Base.encode64() |> binary_part(0, 64)
|
||||||
jwt_secret = :crypto.strong_rand_bytes(64) |> Base.encode64() |> binary_part(0, 64)
|
jwt_secret = :crypto.strong_rand_bytes(64) |> Base.encode64() |> binary_part(0, 64)
|
||||||
signing_salt = :crypto.strong_rand_bytes(8) |> Base.encode64() |> binary_part(0, 8)
|
signing_salt = :crypto.strong_rand_bytes(8) |> Base.encode64() |> binary_part(0, 8)
|
||||||
{web_push_public_key, web_push_private_key} = :crypto.generate_key(:ecdh, :prime256v1)
|
{web_push_public_key, web_push_private_key} = :crypto.generate_key(:ecdh, :prime256v1)
|
||||||
|
template_dir = Application.app_dir(:pleroma, "priv") <> "/templates"
|
||||||
|
|
||||||
result_config =
|
result_config =
|
||||||
EEx.eval_file(
|
EEx.eval_file(
|
||||||
"sample_config.eex" |> Path.expand(__DIR__),
|
template_dir <> "/sample_config.eex",
|
||||||
domain: domain,
|
domain: domain,
|
||||||
port: port,
|
port: port,
|
||||||
email: email,
|
email: email,
|
||||||
|
@ -141,47 +180,39 @@ def run(["gen" | rest]) do
|
||||||
dbname: dbname,
|
dbname: dbname,
|
||||||
dbuser: dbuser,
|
dbuser: dbuser,
|
||||||
dbpass: dbpass,
|
dbpass: dbpass,
|
||||||
version: Pleroma.Mixfile.project() |> Keyword.get(:version),
|
|
||||||
secret: secret,
|
secret: secret,
|
||||||
jwt_secret: jwt_secret,
|
jwt_secret: jwt_secret,
|
||||||
signing_salt: signing_salt,
|
signing_salt: signing_salt,
|
||||||
web_push_public_key: Base.url_encode64(web_push_public_key, padding: false),
|
web_push_public_key: Base.url_encode64(web_push_public_key, padding: false),
|
||||||
web_push_private_key: Base.url_encode64(web_push_private_key, padding: false)
|
web_push_private_key: Base.url_encode64(web_push_private_key, padding: false),
|
||||||
|
db_configurable?: db_configurable?,
|
||||||
|
static_dir: static_dir,
|
||||||
|
uploads_dir: uploads_dir,
|
||||||
|
rum_enabled: rum_enabled
|
||||||
)
|
)
|
||||||
|
|
||||||
result_psql =
|
result_psql =
|
||||||
EEx.eval_file(
|
EEx.eval_file(
|
||||||
"sample_psql.eex" |> Path.expand(__DIR__),
|
template_dir <> "/sample_psql.eex",
|
||||||
dbname: dbname,
|
dbname: dbname,
|
||||||
dbuser: dbuser,
|
dbuser: dbuser,
|
||||||
dbpass: dbpass
|
dbpass: dbpass,
|
||||||
|
rum_enabled: rum_enabled
|
||||||
)
|
)
|
||||||
|
|
||||||
Mix.shell().info(
|
shell_info("Writing config to #{config_path}.")
|
||||||
"Writing config to #{config_path}. You should rename it to config/prod.secret.exs or config/dev.secret.exs."
|
|
||||||
)
|
|
||||||
|
|
||||||
File.write(config_path, result_config)
|
File.write(config_path, result_config)
|
||||||
Mix.shell().info("Writing #{psql_path}.")
|
shell_info("Writing the postgres script to #{psql_path}.")
|
||||||
File.write(psql_path, result_psql)
|
File.write(psql_path, result_psql)
|
||||||
|
|
||||||
write_robots_txt(indexable)
|
write_robots_txt(indexable, template_dir)
|
||||||
|
|
||||||
Mix.shell().info(
|
shell_info(
|
||||||
"\n" <>
|
"\n All files successfully written! Refer to the installation instructions for your platform for next steps"
|
||||||
"""
|
|
||||||
To get started:
|
|
||||||
1. Verify the contents of the generated files.
|
|
||||||
2. Run `sudo -u postgres psql -f #{Common.escape_sh_path(psql_path)}`.
|
|
||||||
""" <>
|
|
||||||
if config_path in ["config/dev.secret.exs", "config/prod.secret.exs"] do
|
|
||||||
""
|
|
||||||
else
|
|
||||||
"3. Run `mv #{Common.escape_sh_path(config_path)} 'config/prod.secret.exs'`."
|
|
||||||
end
|
|
||||||
)
|
)
|
||||||
else
|
else
|
||||||
Mix.shell().error(
|
shell_error(
|
||||||
"The task would have overwritten the following files:\n" <>
|
"The task would have overwritten the following files:\n" <>
|
||||||
(Enum.map(paths, &"- #{&1}\n") |> Enum.join("")) <>
|
(Enum.map(paths, &"- #{&1}\n") |> Enum.join("")) <>
|
||||||
"Rerun with `--force` to overwrite them."
|
"Rerun with `--force` to overwrite them."
|
||||||
|
@ -189,10 +220,10 @@ def run(["gen" | rest]) do
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
defp write_robots_txt(indexable) do
|
defp write_robots_txt(indexable, template_dir) do
|
||||||
robots_txt =
|
robots_txt =
|
||||||
EEx.eval_file(
|
EEx.eval_file(
|
||||||
Path.expand("robots_txt.eex", __DIR__),
|
template_dir <> "/robots_txt.eex",
|
||||||
indexable: indexable
|
indexable: indexable
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -206,10 +237,10 @@ defp write_robots_txt(indexable) do
|
||||||
|
|
||||||
if File.exists?(robots_txt_path) do
|
if File.exists?(robots_txt_path) do
|
||||||
File.cp!(robots_txt_path, "#{robots_txt_path}.bak")
|
File.cp!(robots_txt_path, "#{robots_txt_path}.bak")
|
||||||
Mix.shell().info("Backing up existing robots.txt to #{robots_txt_path}.bak")
|
shell_info("Backing up existing robots.txt to #{robots_txt_path}.bak")
|
||||||
end
|
end
|
||||||
|
|
||||||
File.write(robots_txt_path, robots_txt)
|
File.write(robots_txt_path, robots_txt)
|
||||||
Mix.shell().info("Writing #{robots_txt_path}.")
|
shell_info("Writing #{robots_txt_path}.")
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -4,7 +4,7 @@
|
||||||
|
|
||||||
defmodule Mix.Tasks.Pleroma.Relay do
|
defmodule Mix.Tasks.Pleroma.Relay do
|
||||||
use Mix.Task
|
use Mix.Task
|
||||||
alias Mix.Tasks.Pleroma.Common
|
import Mix.Pleroma
|
||||||
alias Pleroma.Web.ActivityPub.Relay
|
alias Pleroma.Web.ActivityPub.Relay
|
||||||
|
|
||||||
@shortdoc "Manages remote relays"
|
@shortdoc "Manages remote relays"
|
||||||
|
@ -24,24 +24,24 @@ defmodule Mix.Tasks.Pleroma.Relay do
|
||||||
Example: ``mix pleroma.relay unfollow https://example.org/relay``
|
Example: ``mix pleroma.relay unfollow https://example.org/relay``
|
||||||
"""
|
"""
|
||||||
def run(["follow", target]) do
|
def run(["follow", target]) do
|
||||||
Common.start_pleroma()
|
start_pleroma()
|
||||||
|
|
||||||
with {:ok, _activity} <- Relay.follow(target) do
|
with {:ok, _activity} <- Relay.follow(target) do
|
||||||
# put this task to sleep to allow the genserver to push out the messages
|
# put this task to sleep to allow the genserver to push out the messages
|
||||||
:timer.sleep(500)
|
:timer.sleep(500)
|
||||||
else
|
else
|
||||||
{:error, e} -> Mix.shell().error("Error while following #{target}: #{inspect(e)}")
|
{:error, e} -> shell_error("Error while following #{target}: #{inspect(e)}")
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def run(["unfollow", target]) do
|
def run(["unfollow", target]) do
|
||||||
Common.start_pleroma()
|
start_pleroma()
|
||||||
|
|
||||||
with {:ok, _activity} <- Relay.unfollow(target) do
|
with {:ok, _activity} <- Relay.unfollow(target) do
|
||||||
# put this task to sleep to allow the genserver to push out the messages
|
# put this task to sleep to allow the genserver to push out the messages
|
||||||
:timer.sleep(500)
|
:timer.sleep(500)
|
||||||
else
|
else
|
||||||
{:error, e} -> Mix.shell().error("Error while following #{target}: #{inspect(e)}")
|
{:error, e} -> shell_error("Error while following #{target}: #{inspect(e)}")
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -4,7 +4,7 @@
|
||||||
|
|
||||||
defmodule Mix.Tasks.Pleroma.Uploads do
|
defmodule Mix.Tasks.Pleroma.Uploads do
|
||||||
use Mix.Task
|
use Mix.Task
|
||||||
alias Mix.Tasks.Pleroma.Common
|
import Mix.Pleroma
|
||||||
alias Pleroma.Upload
|
alias Pleroma.Upload
|
||||||
alias Pleroma.Uploaders.Local
|
alias Pleroma.Uploaders.Local
|
||||||
require Logger
|
require Logger
|
||||||
|
@ -24,7 +24,7 @@ defmodule Mix.Tasks.Pleroma.Uploads do
|
||||||
"""
|
"""
|
||||||
def run(["migrate_local", target_uploader | args]) do
|
def run(["migrate_local", target_uploader | args]) do
|
||||||
delete? = Enum.member?(args, "--delete")
|
delete? = Enum.member?(args, "--delete")
|
||||||
Common.start_pleroma()
|
start_pleroma()
|
||||||
local_path = Pleroma.Config.get!([Local, :uploads])
|
local_path = Pleroma.Config.get!([Local, :uploads])
|
||||||
uploader = Module.concat(Pleroma.Uploaders, target_uploader)
|
uploader = Module.concat(Pleroma.Uploaders, target_uploader)
|
||||||
|
|
||||||
|
@ -38,10 +38,10 @@ def run(["migrate_local", target_uploader | args]) do
|
||||||
Pleroma.Config.put([Upload, :uploader], uploader)
|
Pleroma.Config.put([Upload, :uploader], uploader)
|
||||||
end
|
end
|
||||||
|
|
||||||
Mix.shell().info("Migrating files from local #{local_path} to #{to_string(uploader)}")
|
shell_info("Migrating files from local #{local_path} to #{to_string(uploader)}")
|
||||||
|
|
||||||
if delete? do
|
if delete? do
|
||||||
Mix.shell().info(
|
shell_info(
|
||||||
"Attention: uploaded files will be deleted, hope you have backups! (--delete ; cancel with ^C)"
|
"Attention: uploaded files will be deleted, hope you have backups! (--delete ; cancel with ^C)"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -78,7 +78,7 @@ def run(["migrate_local", target_uploader | args]) do
|
||||||
|> Enum.filter(& &1)
|
|> Enum.filter(& &1)
|
||||||
|
|
||||||
total_count = length(uploads)
|
total_count = length(uploads)
|
||||||
Mix.shell().info("Found #{total_count} uploads")
|
shell_info("Found #{total_count} uploads")
|
||||||
|
|
||||||
uploads
|
uploads
|
||||||
|> Task.async_stream(
|
|> Task.async_stream(
|
||||||
|
@ -90,7 +90,7 @@ def run(["migrate_local", target_uploader | args]) do
|
||||||
:ok
|
:ok
|
||||||
|
|
||||||
error ->
|
error ->
|
||||||
Mix.shell().error("failed to upload #{inspect(upload.path)}: #{inspect(error)}")
|
shell_error("failed to upload #{inspect(upload.path)}: #{inspect(error)}")
|
||||||
end
|
end
|
||||||
end,
|
end,
|
||||||
timeout: 150_000
|
timeout: 150_000
|
||||||
|
@ -99,10 +99,10 @@ def run(["migrate_local", target_uploader | args]) do
|
||||||
# credo:disable-for-next-line Credo.Check.Warning.UnusedEnumOperation
|
# credo:disable-for-next-line Credo.Check.Warning.UnusedEnumOperation
|
||||||
|> Enum.reduce(0, fn done, count ->
|
|> Enum.reduce(0, fn done, count ->
|
||||||
count = count + length(done)
|
count = count + length(done)
|
||||||
Mix.shell().info("Uploaded #{count}/#{total_count} files")
|
shell_info("Uploaded #{count}/#{total_count} files")
|
||||||
count
|
count
|
||||||
end)
|
end)
|
||||||
|
|
||||||
Mix.shell().info("Done!")
|
shell_info("Done!")
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -5,9 +5,10 @@
|
||||||
defmodule Mix.Tasks.Pleroma.User do
|
defmodule Mix.Tasks.Pleroma.User do
|
||||||
use Mix.Task
|
use Mix.Task
|
||||||
import Ecto.Changeset
|
import Ecto.Changeset
|
||||||
alias Mix.Tasks.Pleroma.Common
|
import Mix.Pleroma
|
||||||
alias Pleroma.User
|
alias Pleroma.User
|
||||||
alias Pleroma.UserInviteToken
|
alias Pleroma.UserInviteToken
|
||||||
|
alias Pleroma.Web.OAuth
|
||||||
|
|
||||||
@shortdoc "Manages Pleroma users"
|
@shortdoc "Manages Pleroma users"
|
||||||
@moduledoc """
|
@moduledoc """
|
||||||
|
@ -49,6 +50,10 @@ defmodule Mix.Tasks.Pleroma.User do
|
||||||
|
|
||||||
mix pleroma.user delete_activities NICKNAME
|
mix pleroma.user delete_activities NICKNAME
|
||||||
|
|
||||||
|
## Sign user out from all applications (delete user's OAuth tokens and authorizations).
|
||||||
|
|
||||||
|
mix pleroma.user sign_out NICKNAME
|
||||||
|
|
||||||
## Deactivate or activate the user's account.
|
## Deactivate or activate the user's account.
|
||||||
|
|
||||||
mix pleroma.user toggle_activated NICKNAME
|
mix pleroma.user toggle_activated NICKNAME
|
||||||
|
@ -115,7 +120,7 @@ def run(["new", nickname, email | rest]) do
|
||||||
admin? = Keyword.get(options, :admin, false)
|
admin? = Keyword.get(options, :admin, false)
|
||||||
assume_yes? = Keyword.get(options, :assume_yes, false)
|
assume_yes? = Keyword.get(options, :assume_yes, false)
|
||||||
|
|
||||||
Mix.shell().info("""
|
shell_info("""
|
||||||
A user will be created with the following information:
|
A user will be created with the following information:
|
||||||
- nickname: #{nickname}
|
- nickname: #{nickname}
|
||||||
- email: #{email}
|
- email: #{email}
|
||||||
|
@ -128,10 +133,10 @@ def run(["new", nickname, email | rest]) do
|
||||||
- admin: #{if(admin?, do: "true", else: "false")}
|
- admin: #{if(admin?, do: "true", else: "false")}
|
||||||
""")
|
""")
|
||||||
|
|
||||||
proceed? = assume_yes? or Mix.shell().yes?("Continue?")
|
proceed? = assume_yes? or shell_yes?("Continue?")
|
||||||
|
|
||||||
if proceed? do
|
if proceed? do
|
||||||
Common.start_pleroma()
|
start_pleroma()
|
||||||
|
|
||||||
params = %{
|
params = %{
|
||||||
nickname: nickname,
|
nickname: nickname,
|
||||||
|
@ -145,7 +150,7 @@ def run(["new", nickname, email | rest]) do
|
||||||
changeset = User.register_changeset(%User{}, params, need_confirmation: false)
|
changeset = User.register_changeset(%User{}, params, need_confirmation: false)
|
||||||
{:ok, _user} = User.register(changeset)
|
{:ok, _user} = User.register(changeset)
|
||||||
|
|
||||||
Mix.shell().info("User #{nickname} created")
|
shell_info("User #{nickname} created")
|
||||||
|
|
||||||
if moderator? do
|
if moderator? do
|
||||||
run(["set", nickname, "--moderator"])
|
run(["set", nickname, "--moderator"])
|
||||||
|
@ -159,64 +164,64 @@ def run(["new", nickname, email | rest]) do
|
||||||
run(["reset_password", nickname])
|
run(["reset_password", nickname])
|
||||||
end
|
end
|
||||||
else
|
else
|
||||||
Mix.shell().info("User will not be created.")
|
shell_info("User will not be created.")
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def run(["rm", nickname]) do
|
def run(["rm", nickname]) do
|
||||||
Common.start_pleroma()
|
start_pleroma()
|
||||||
|
|
||||||
with %User{local: true} = user <- User.get_cached_by_nickname(nickname) do
|
with %User{local: true} = user <- User.get_cached_by_nickname(nickname) do
|
||||||
User.perform(:delete, user)
|
User.perform(:delete, user)
|
||||||
Mix.shell().info("User #{nickname} deleted.")
|
shell_info("User #{nickname} deleted.")
|
||||||
else
|
else
|
||||||
_ ->
|
_ ->
|
||||||
Mix.shell().error("No local user #{nickname}")
|
shell_error("No local user #{nickname}")
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def run(["toggle_activated", nickname]) do
|
def run(["toggle_activated", nickname]) do
|
||||||
Common.start_pleroma()
|
start_pleroma()
|
||||||
|
|
||||||
with %User{} = user <- User.get_cached_by_nickname(nickname) do
|
with %User{} = user <- User.get_cached_by_nickname(nickname) do
|
||||||
{:ok, user} = User.deactivate(user, !user.info.deactivated)
|
{:ok, user} = User.deactivate(user, !user.info.deactivated)
|
||||||
|
|
||||||
Mix.shell().info(
|
shell_info(
|
||||||
"Activation status of #{nickname}: #{if(user.info.deactivated, do: "de", else: "")}activated"
|
"Activation status of #{nickname}: #{if(user.info.deactivated, do: "de", else: "")}activated"
|
||||||
)
|
)
|
||||||
else
|
else
|
||||||
_ ->
|
_ ->
|
||||||
Mix.shell().error("No user #{nickname}")
|
shell_error("No user #{nickname}")
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def run(["reset_password", nickname]) do
|
def run(["reset_password", nickname]) do
|
||||||
Common.start_pleroma()
|
start_pleroma()
|
||||||
|
|
||||||
with %User{local: true} = user <- User.get_cached_by_nickname(nickname),
|
with %User{local: true} = user <- User.get_cached_by_nickname(nickname),
|
||||||
{:ok, token} <- Pleroma.PasswordResetToken.create_token(user) do
|
{:ok, token} <- Pleroma.PasswordResetToken.create_token(user) do
|
||||||
Mix.shell().info("Generated password reset token for #{user.nickname}")
|
shell_info("Generated password reset token for #{user.nickname}")
|
||||||
|
|
||||||
IO.puts(
|
IO.puts(
|
||||||
"URL: #{
|
"URL: #{
|
||||||
Pleroma.Web.Router.Helpers.util_url(
|
Pleroma.Web.Router.Helpers.reset_password_url(
|
||||||
Pleroma.Web.Endpoint,
|
Pleroma.Web.Endpoint,
|
||||||
:show_password_reset,
|
:reset,
|
||||||
token.token
|
token.token
|
||||||
)
|
)
|
||||||
}"
|
}"
|
||||||
)
|
)
|
||||||
else
|
else
|
||||||
_ ->
|
_ ->
|
||||||
Mix.shell().error("No local user #{nickname}")
|
shell_error("No local user #{nickname}")
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def run(["unsubscribe", nickname]) do
|
def run(["unsubscribe", nickname]) do
|
||||||
Common.start_pleroma()
|
start_pleroma()
|
||||||
|
|
||||||
with %User{} = user <- User.get_cached_by_nickname(nickname) do
|
with %User{} = user <- User.get_cached_by_nickname(nickname) do
|
||||||
Mix.shell().info("Deactivating #{user.nickname}")
|
shell_info("Deactivating #{user.nickname}")
|
||||||
User.deactivate(user)
|
User.deactivate(user)
|
||||||
|
|
||||||
{:ok, friends} = User.get_friends(user)
|
{:ok, friends} = User.get_friends(user)
|
||||||
|
@ -224,7 +229,7 @@ def run(["unsubscribe", nickname]) do
|
||||||
Enum.each(friends, fn friend ->
|
Enum.each(friends, fn friend ->
|
||||||
user = User.get_cached_by_id(user.id)
|
user = User.get_cached_by_id(user.id)
|
||||||
|
|
||||||
Mix.shell().info("Unsubscribing #{friend.nickname} from #{user.nickname}")
|
shell_info("Unsubscribing #{friend.nickname} from #{user.nickname}")
|
||||||
User.unfollow(user, friend)
|
User.unfollow(user, friend)
|
||||||
end)
|
end)
|
||||||
|
|
||||||
|
@ -233,16 +238,16 @@ def run(["unsubscribe", nickname]) do
|
||||||
user = User.get_cached_by_id(user.id)
|
user = User.get_cached_by_id(user.id)
|
||||||
|
|
||||||
if Enum.empty?(user.following) do
|
if Enum.empty?(user.following) do
|
||||||
Mix.shell().info("Successfully unsubscribed all followers from #{user.nickname}")
|
shell_info("Successfully unsubscribed all followers from #{user.nickname}")
|
||||||
end
|
end
|
||||||
else
|
else
|
||||||
_ ->
|
_ ->
|
||||||
Mix.shell().error("No user #{nickname}")
|
shell_error("No user #{nickname}")
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def run(["set", nickname | rest]) do
|
def run(["set", nickname | rest]) do
|
||||||
Common.start_pleroma()
|
start_pleroma()
|
||||||
|
|
||||||
{options, [], []} =
|
{options, [], []} =
|
||||||
OptionParser.parse(
|
OptionParser.parse(
|
||||||
|
@ -274,33 +279,33 @@ def run(["set", nickname | rest]) do
|
||||||
end
|
end
|
||||||
else
|
else
|
||||||
_ ->
|
_ ->
|
||||||
Mix.shell().error("No local user #{nickname}")
|
shell_error("No local user #{nickname}")
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def run(["tag", nickname | tags]) do
|
def run(["tag", nickname | tags]) do
|
||||||
Common.start_pleroma()
|
start_pleroma()
|
||||||
|
|
||||||
with %User{} = user <- User.get_cached_by_nickname(nickname) do
|
with %User{} = user <- User.get_cached_by_nickname(nickname) do
|
||||||
user = user |> User.tag(tags)
|
user = user |> User.tag(tags)
|
||||||
|
|
||||||
Mix.shell().info("Tags of #{user.nickname}: #{inspect(tags)}")
|
shell_info("Tags of #{user.nickname}: #{inspect(tags)}")
|
||||||
else
|
else
|
||||||
_ ->
|
_ ->
|
||||||
Mix.shell().error("Could not change user tags for #{nickname}")
|
shell_error("Could not change user tags for #{nickname}")
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def run(["untag", nickname | tags]) do
|
def run(["untag", nickname | tags]) do
|
||||||
Common.start_pleroma()
|
start_pleroma()
|
||||||
|
|
||||||
with %User{} = user <- User.get_cached_by_nickname(nickname) do
|
with %User{} = user <- User.get_cached_by_nickname(nickname) do
|
||||||
user = user |> User.untag(tags)
|
user = user |> User.untag(tags)
|
||||||
|
|
||||||
Mix.shell().info("Tags of #{user.nickname}: #{inspect(tags)}")
|
shell_info("Tags of #{user.nickname}: #{inspect(tags)}")
|
||||||
else
|
else
|
||||||
_ ->
|
_ ->
|
||||||
Mix.shell().error("Could not change user tags for #{nickname}")
|
shell_error("Could not change user tags for #{nickname}")
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -321,14 +326,12 @@ def run(["invite" | rest]) do
|
||||||
end)
|
end)
|
||||||
|> Enum.into(%{})
|
|> Enum.into(%{})
|
||||||
|
|
||||||
Common.start_pleroma()
|
start_pleroma()
|
||||||
|
|
||||||
with {:ok, val} <- options[:expires_at],
|
with {:ok, val} <- options[:expires_at],
|
||||||
options = Map.put(options, :expires_at, val),
|
options = Map.put(options, :expires_at, val),
|
||||||
{:ok, invite} <- UserInviteToken.create_invite(options) do
|
{:ok, invite} <- UserInviteToken.create_invite(options) do
|
||||||
Mix.shell().info(
|
shell_info("Generated user invite token " <> String.replace(invite.invite_type, "_", " "))
|
||||||
"Generated user invite token " <> String.replace(invite.invite_type, "_", " ")
|
|
||||||
)
|
|
||||||
|
|
||||||
url =
|
url =
|
||||||
Pleroma.Web.Router.Helpers.redirect_url(
|
Pleroma.Web.Router.Helpers.redirect_url(
|
||||||
|
@ -340,14 +343,14 @@ def run(["invite" | rest]) do
|
||||||
IO.puts(url)
|
IO.puts(url)
|
||||||
else
|
else
|
||||||
error ->
|
error ->
|
||||||
Mix.shell().error("Could not create invite token: #{inspect(error)}")
|
shell_error("Could not create invite token: #{inspect(error)}")
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def run(["invites"]) do
|
def run(["invites"]) do
|
||||||
Common.start_pleroma()
|
start_pleroma()
|
||||||
|
|
||||||
Mix.shell().info("Invites list:")
|
shell_info("Invites list:")
|
||||||
|
|
||||||
UserInviteToken.list_invites()
|
UserInviteToken.list_invites()
|
||||||
|> Enum.each(fn invite ->
|
|> Enum.each(fn invite ->
|
||||||
|
@ -361,7 +364,7 @@ def run(["invites"]) do
|
||||||
" | Max use: #{max_use} Left use: #{max_use - invite.uses}"
|
" | Max use: #{max_use} Left use: #{max_use - invite.uses}"
|
||||||
end
|
end
|
||||||
|
|
||||||
Mix.shell().info(
|
shell_info(
|
||||||
"ID: #{invite.id} | Token: #{invite.token} | Token type: #{invite.invite_type} | Used: #{
|
"ID: #{invite.id} | Token: #{invite.token} | Token type: #{invite.invite_type} | Used: #{
|
||||||
invite.used
|
invite.used
|
||||||
}#{expire_info}#{using_info}"
|
}#{expire_info}#{using_info}"
|
||||||
|
@ -370,40 +373,54 @@ def run(["invites"]) do
|
||||||
end
|
end
|
||||||
|
|
||||||
def run(["revoke_invite", token]) do
|
def run(["revoke_invite", token]) do
|
||||||
Common.start_pleroma()
|
start_pleroma()
|
||||||
|
|
||||||
with {:ok, invite} <- UserInviteToken.find_by_token(token),
|
with {:ok, invite} <- UserInviteToken.find_by_token(token),
|
||||||
{:ok, _} <- UserInviteToken.update_invite(invite, %{used: true}) do
|
{:ok, _} <- UserInviteToken.update_invite(invite, %{used: true}) do
|
||||||
Mix.shell().info("Invite for token #{token} was revoked.")
|
shell_info("Invite for token #{token} was revoked.")
|
||||||
else
|
else
|
||||||
_ -> Mix.shell().error("No invite found with token #{token}")
|
_ -> shell_error("No invite found with token #{token}")
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def run(["delete_activities", nickname]) do
|
def run(["delete_activities", nickname]) do
|
||||||
Common.start_pleroma()
|
start_pleroma()
|
||||||
|
|
||||||
with %User{local: true} = user <- User.get_cached_by_nickname(nickname) do
|
with %User{local: true} = user <- User.get_cached_by_nickname(nickname) do
|
||||||
{:ok, _} = User.delete_user_activities(user)
|
{:ok, _} = User.delete_user_activities(user)
|
||||||
Mix.shell().info("User #{nickname} statuses deleted.")
|
shell_info("User #{nickname} statuses deleted.")
|
||||||
else
|
else
|
||||||
_ ->
|
_ ->
|
||||||
Mix.shell().error("No local user #{nickname}")
|
shell_error("No local user #{nickname}")
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def run(["toggle_confirmed", nickname]) do
|
def run(["toggle_confirmed", nickname]) do
|
||||||
Common.start_pleroma()
|
start_pleroma()
|
||||||
|
|
||||||
with %User{} = user <- User.get_cached_by_nickname(nickname) do
|
with %User{} = user <- User.get_cached_by_nickname(nickname) do
|
||||||
{:ok, user} = User.toggle_confirmation(user)
|
{:ok, user} = User.toggle_confirmation(user)
|
||||||
|
|
||||||
message = if user.info.confirmation_pending, do: "needs", else: "doesn't need"
|
message = if user.info.confirmation_pending, do: "needs", else: "doesn't need"
|
||||||
|
|
||||||
Mix.shell().info("#{nickname} #{message} confirmation.")
|
shell_info("#{nickname} #{message} confirmation.")
|
||||||
else
|
else
|
||||||
_ ->
|
_ ->
|
||||||
Mix.shell().error("No local user #{nickname}")
|
shell_error("No local user #{nickname}")
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def run(["sign_out", nickname]) do
|
||||||
|
start_pleroma()
|
||||||
|
|
||||||
|
with %User{local: true} = user <- User.get_cached_by_nickname(nickname) do
|
||||||
|
OAuth.Token.delete_user_tokens(user)
|
||||||
|
OAuth.Authorization.delete_user_authorizations(user)
|
||||||
|
|
||||||
|
shell_info("#{nickname} signed out from all apps.")
|
||||||
|
else
|
||||||
|
_ ->
|
||||||
|
shell_error("No local user #{nickname}")
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -416,7 +433,7 @@ defp set_moderator(user, value) do
|
||||||
|
|
||||||
{:ok, user} = User.update_and_set_cache(user_cng)
|
{:ok, user} = User.update_and_set_cache(user_cng)
|
||||||
|
|
||||||
Mix.shell().info("Moderator status of #{user.nickname}: #{user.info.is_moderator}")
|
shell_info("Moderator status of #{user.nickname}: #{user.info.is_moderator}")
|
||||||
user
|
user
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -429,7 +446,7 @@ defp set_admin(user, value) do
|
||||||
|
|
||||||
{:ok, user} = User.update_and_set_cache(user_cng)
|
{:ok, user} = User.update_and_set_cache(user_cng)
|
||||||
|
|
||||||
Mix.shell().info("Admin status of #{user.nickname}: #{user.info.is_admin}")
|
shell_info("Admin status of #{user.nickname}: #{user.info.is_admin}")
|
||||||
user
|
user
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -442,7 +459,7 @@ defp set_locked(user, value) do
|
||||||
|
|
||||||
{:ok, user} = User.update_and_set_cache(user_cng)
|
{:ok, user} = User.update_and_set_cache(user_cng)
|
||||||
|
|
||||||
Mix.shell().info("Locked status of #{user.nickname}: #{user.info.locked}")
|
shell_info("Locked status of #{user.nickname}: #{user.info.locked}")
|
||||||
user
|
user
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -39,8 +39,7 @@ defp query_with(q, :gin, search_query) do
|
||||||
"to_tsvector('english', ?->>'content') @@ plainto_tsquery('english', ?)",
|
"to_tsvector('english', ?->>'content') @@ plainto_tsquery('english', ?)",
|
||||||
o.data,
|
o.data,
|
||||||
^search_query
|
^search_query
|
||||||
),
|
)
|
||||||
order_by: [desc: :id]
|
|
||||||
)
|
)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -56,18 +55,19 @@ defp query_with(q, :rum, search_query) do
|
||||||
)
|
)
|
||||||
end
|
end
|
||||||
|
|
||||||
# users can search everything
|
defp maybe_restrict_local(q, user) do
|
||||||
defp maybe_restrict_local(q, %User{}), do: q
|
limit = Pleroma.Config.get([:instance, :limit_to_local_content], :unauthenticated)
|
||||||
|
|
||||||
# unauthenticated users can only search local activities
|
case {limit, user} do
|
||||||
defp maybe_restrict_local(q, _) do
|
{:all, _} -> restrict_local(q)
|
||||||
if Pleroma.Config.get([:instance, :limit_unauthenticated_to_local_content], true) do
|
{:unauthenticated, %User{}} -> q
|
||||||
where(q, local: true)
|
{:unauthenticated, _} -> restrict_local(q)
|
||||||
else
|
{false, _} -> q
|
||||||
q
|
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
defp restrict_local(q), do: where(q, local: true)
|
||||||
|
|
||||||
defp maybe_fetch(activities, user, search_query) do
|
defp maybe_fetch(activities, user, search_query) do
|
||||||
with true <- Regex.match?(~r/https?:/, search_query),
|
with true <- Regex.match?(~r/https?:/, search_query),
|
||||||
{:ok, object} <- Fetcher.fetch_object_from_id(search_query),
|
{:ok, object} <- Fetcher.fetch_object_from_id(search_query),
|
||||||
|
|
|
@ -31,6 +31,7 @@ def start(_type, _args) do
|
||||||
[
|
[
|
||||||
# Start the Ecto repository
|
# Start the Ecto repository
|
||||||
%{id: Pleroma.Repo, start: {Pleroma.Repo, :start_link, []}, type: :supervisor},
|
%{id: Pleroma.Repo, start: {Pleroma.Repo, :start_link, []}, type: :supervisor},
|
||||||
|
%{id: Pleroma.Config.TransferTask, start: {Pleroma.Config.TransferTask, :start_link, []}},
|
||||||
%{id: Pleroma.Emoji, start: {Pleroma.Emoji, :start_link, []}},
|
%{id: Pleroma.Emoji, start: {Pleroma.Emoji, :start_link, []}},
|
||||||
%{id: Pleroma.Captcha, start: {Pleroma.Captcha, :start_link, []}},
|
%{id: Pleroma.Captcha, start: {Pleroma.Captcha, :start_link, []}},
|
||||||
%{
|
%{
|
||||||
|
@ -180,7 +181,6 @@ defp setup_instrumenters do
|
||||||
Pleroma.Repo.Instrumenter.setup()
|
Pleroma.Repo.Instrumenter.setup()
|
||||||
end
|
end
|
||||||
|
|
||||||
Prometheus.Registry.register_collector(:prometheus_process_collector)
|
|
||||||
Pleroma.Web.Endpoint.MetricsExporter.setup()
|
Pleroma.Web.Endpoint.MetricsExporter.setup()
|
||||||
Pleroma.Web.Endpoint.PipelineInstrumenter.setup()
|
Pleroma.Web.Endpoint.PipelineInstrumenter.setup()
|
||||||
Pleroma.Web.Endpoint.Instrumenter.setup()
|
Pleroma.Web.Endpoint.Instrumenter.setup()
|
||||||
|
@ -193,14 +193,14 @@ def enabled_hackney_pools do
|
||||||
else
|
else
|
||||||
[]
|
[]
|
||||||
end ++
|
end ++
|
||||||
if Pleroma.Config.get([Pleroma.Uploader, :proxy_remote]) do
|
if Pleroma.Config.get([Pleroma.Upload, :proxy_remote]) do
|
||||||
[:upload]
|
[:upload]
|
||||||
else
|
else
|
||||||
[]
|
[]
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
if Mix.env() == :test do
|
if Pleroma.Config.get(:env) == :test do
|
||||||
defp streamer_child, do: []
|
defp streamer_child, do: []
|
||||||
defp chat_child, do: []
|
defp chat_child, do: []
|
||||||
else
|
else
|
||||||
|
|
55
lib/pleroma/config/transfer_task.ex
Normal file
55
lib/pleroma/config/transfer_task.ex
Normal file
|
@ -0,0 +1,55 @@
|
||||||
|
defmodule Pleroma.Config.TransferTask do
|
||||||
|
use Task
|
||||||
|
alias Pleroma.Web.AdminAPI.Config
|
||||||
|
|
||||||
|
def start_link do
|
||||||
|
load_and_update_env()
|
||||||
|
if Pleroma.Config.get(:env) == :test, do: Ecto.Adapters.SQL.Sandbox.checkin(Pleroma.Repo)
|
||||||
|
:ignore
|
||||||
|
end
|
||||||
|
|
||||||
|
def load_and_update_env do
|
||||||
|
if Pleroma.Config.get([:instance, :dynamic_configuration]) and
|
||||||
|
Ecto.Adapters.SQL.table_exists?(Pleroma.Repo, "config") do
|
||||||
|
for_restart =
|
||||||
|
Pleroma.Repo.all(Config)
|
||||||
|
|> Enum.map(&update_env(&1))
|
||||||
|
|
||||||
|
# We need to restart applications for loaded settings take effect
|
||||||
|
for_restart
|
||||||
|
|> Enum.reject(&(&1 in [:pleroma, :ok]))
|
||||||
|
|> Enum.each(fn app ->
|
||||||
|
Application.stop(app)
|
||||||
|
:ok = Application.start(app)
|
||||||
|
end)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
defp update_env(setting) do
|
||||||
|
try do
|
||||||
|
key =
|
||||||
|
if String.starts_with?(setting.key, "Pleroma.") do
|
||||||
|
"Elixir." <> setting.key
|
||||||
|
else
|
||||||
|
setting.key
|
||||||
|
end
|
||||||
|
|
||||||
|
group = String.to_existing_atom(setting.group)
|
||||||
|
|
||||||
|
Application.put_env(
|
||||||
|
group,
|
||||||
|
String.to_existing_atom(key),
|
||||||
|
Config.from_binary(setting.value)
|
||||||
|
)
|
||||||
|
|
||||||
|
group
|
||||||
|
rescue
|
||||||
|
e ->
|
||||||
|
require Logger
|
||||||
|
|
||||||
|
Logger.warn(
|
||||||
|
"updating env causes error, key: #{inspect(setting.key)}, error: #{inspect(e)}"
|
||||||
|
)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
|
@ -59,10 +59,10 @@ def mark_as_unread(participation) do
|
||||||
def for_user(user, params \\ %{}) do
|
def for_user(user, params \\ %{}) do
|
||||||
from(p in __MODULE__,
|
from(p in __MODULE__,
|
||||||
where: p.user_id == ^user.id,
|
where: p.user_id == ^user.id,
|
||||||
order_by: [desc: p.updated_at]
|
order_by: [desc: p.updated_at],
|
||||||
|
preload: [conversation: [:users]]
|
||||||
)
|
)
|
||||||
|> Pleroma.Pagination.fetch_paginated(params)
|
|> Pleroma.Pagination.fetch_paginated(params)
|
||||||
|> Repo.preload(conversation: [:users])
|
|
||||||
end
|
end
|
||||||
|
|
||||||
def for_user_with_last_activity_id(user, params \\ %{}) do
|
def for_user_with_last_activity_id(user, params \\ %{}) do
|
||||||
|
|
|
@ -23,13 +23,8 @@ defp recipient(email, nil), do: email
|
||||||
defp recipient(email, name), do: {name, email}
|
defp recipient(email, name), do: {name, email}
|
||||||
defp recipient(%Pleroma.User{} = user), do: recipient(user.email, user.name)
|
defp recipient(%Pleroma.User{} = user), do: recipient(user.email, user.name)
|
||||||
|
|
||||||
def password_reset_email(user, password_reset_token) when is_binary(password_reset_token) do
|
def password_reset_email(user, token) when is_binary(token) do
|
||||||
password_reset_url =
|
password_reset_url = Router.Helpers.reset_password_url(Endpoint, :reset, token)
|
||||||
Router.Helpers.util_url(
|
|
||||||
Endpoint,
|
|
||||||
:show_password_reset,
|
|
||||||
password_reset_token
|
|
||||||
)
|
|
||||||
|
|
||||||
html_body = """
|
html_body = """
|
||||||
<h3>Reset your password at #{instance_name()}</h3>
|
<h3>Reset your password at #{instance_name()}</h3>
|
||||||
|
|
|
@ -22,7 +22,6 @@ defmodule Pleroma.Emoji do
|
||||||
|
|
||||||
@ets __MODULE__.Ets
|
@ets __MODULE__.Ets
|
||||||
@ets_options [:ordered_set, :protected, :named_table, {:read_concurrency, true}]
|
@ets_options [:ordered_set, :protected, :named_table, {:read_concurrency, true}]
|
||||||
@groups Pleroma.Config.get([:emoji, :groups])
|
|
||||||
|
|
||||||
@doc false
|
@doc false
|
||||||
def start_link do
|
def start_link do
|
||||||
|
@ -87,6 +86,8 @@ defp load do
|
||||||
"emoji"
|
"emoji"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
emoji_groups = Pleroma.Config.get([:emoji, :groups])
|
||||||
|
|
||||||
case File.ls(emoji_dir_path) do
|
case File.ls(emoji_dir_path) do
|
||||||
{:error, :enoent} ->
|
{:error, :enoent} ->
|
||||||
# The custom emoji directory doesn't exist,
|
# The custom emoji directory doesn't exist,
|
||||||
|
@ -98,7 +99,9 @@ defp load do
|
||||||
Logger.error("Could not access the custom emoji directory #{emoji_dir_path}: #{e}")
|
Logger.error("Could not access the custom emoji directory #{emoji_dir_path}: #{e}")
|
||||||
|
|
||||||
{:ok, results} ->
|
{:ok, results} ->
|
||||||
grouped = Enum.group_by(results, &File.dir?/1)
|
grouped =
|
||||||
|
Enum.group_by(results, fn file -> File.dir?(Path.join(emoji_dir_path, file)) end)
|
||||||
|
|
||||||
packs = grouped[true] || []
|
packs = grouped[true] || []
|
||||||
files = grouped[false] || []
|
files = grouped[false] || []
|
||||||
|
|
||||||
|
@ -116,7 +119,7 @@ defp load do
|
||||||
emojis =
|
emojis =
|
||||||
Enum.flat_map(
|
Enum.flat_map(
|
||||||
packs,
|
packs,
|
||||||
fn pack -> load_pack(Path.join(emoji_dir_path, pack)) end
|
fn pack -> load_pack(Path.join(emoji_dir_path, pack), emoji_groups) end
|
||||||
)
|
)
|
||||||
|
|
||||||
true = :ets.insert(@ets, emojis)
|
true = :ets.insert(@ets, emojis)
|
||||||
|
@ -127,9 +130,9 @@ defp load do
|
||||||
shortcode_globs = Pleroma.Config.get([:emoji, :shortcode_globs], [])
|
shortcode_globs = Pleroma.Config.get([:emoji, :shortcode_globs], [])
|
||||||
|
|
||||||
emojis =
|
emojis =
|
||||||
(load_from_file("config/emoji.txt") ++
|
(load_from_file("config/emoji.txt", emoji_groups) ++
|
||||||
load_from_file("config/custom_emoji.txt") ++
|
load_from_file("config/custom_emoji.txt", emoji_groups) ++
|
||||||
load_from_globs(shortcode_globs))
|
load_from_globs(shortcode_globs, emoji_groups))
|
||||||
|> Enum.reject(fn value -> value == nil end)
|
|> Enum.reject(fn value -> value == nil end)
|
||||||
|
|
||||||
true = :ets.insert(@ets, emojis)
|
true = :ets.insert(@ets, emojis)
|
||||||
|
@ -137,23 +140,25 @@ defp load do
|
||||||
:ok
|
:ok
|
||||||
end
|
end
|
||||||
|
|
||||||
defp load_pack(pack_dir) do
|
defp load_pack(pack_dir, emoji_groups) do
|
||||||
pack_name = Path.basename(pack_dir)
|
pack_name = Path.basename(pack_dir)
|
||||||
|
|
||||||
emoji_txt = Path.join(pack_dir, "emoji.txt")
|
emoji_txt = Path.join(pack_dir, "emoji.txt")
|
||||||
|
|
||||||
if File.exists?(emoji_txt) do
|
if File.exists?(emoji_txt) do
|
||||||
load_from_file(emoji_txt)
|
load_from_file(emoji_txt, emoji_groups)
|
||||||
else
|
else
|
||||||
|
extensions = Pleroma.Config.get([:emoji, :pack_extensions])
|
||||||
|
|
||||||
Logger.info(
|
Logger.info(
|
||||||
"No emoji.txt found for pack \"#{pack_name}\", assuming all .png files are emoji"
|
"No emoji.txt found for pack \"#{pack_name}\", assuming all #{Enum.join(extensions, ", ")} files are emoji"
|
||||||
)
|
)
|
||||||
|
|
||||||
make_shortcode_to_file_map(pack_dir, [".png"])
|
make_shortcode_to_file_map(pack_dir, extensions)
|
||||||
|> Enum.map(fn {shortcode, rel_file} ->
|
|> Enum.map(fn {shortcode, rel_file} ->
|
||||||
filename = Path.join("/emoji/#{pack_name}", rel_file)
|
filename = Path.join("/emoji/#{pack_name}", rel_file)
|
||||||
|
|
||||||
{shortcode, filename, [to_string(match_extra(@groups, filename))]}
|
{shortcode, filename, [to_string(match_extra(emoji_groups, filename))]}
|
||||||
end)
|
end)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
@ -182,21 +187,21 @@ def find_all_emoji(dir, exts) do
|
||||||
|> Enum.filter(fn f -> Path.extname(f) in exts end)
|
|> Enum.filter(fn f -> Path.extname(f) in exts end)
|
||||||
end
|
end
|
||||||
|
|
||||||
defp load_from_file(file) do
|
defp load_from_file(file, emoji_groups) do
|
||||||
if File.exists?(file) do
|
if File.exists?(file) do
|
||||||
load_from_file_stream(File.stream!(file))
|
load_from_file_stream(File.stream!(file), emoji_groups)
|
||||||
else
|
else
|
||||||
[]
|
[]
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
defp load_from_file_stream(stream) do
|
defp load_from_file_stream(stream, emoji_groups) do
|
||||||
stream
|
stream
|
||||||
|> Stream.map(&String.trim/1)
|
|> Stream.map(&String.trim/1)
|
||||||
|> Stream.map(fn line ->
|
|> Stream.map(fn line ->
|
||||||
case String.split(line, ~r/,\s*/) do
|
case String.split(line, ~r/,\s*/) do
|
||||||
[name, file] ->
|
[name, file] ->
|
||||||
{name, file, [to_string(match_extra(@groups, file))]}
|
{name, file, [to_string(match_extra(emoji_groups, file))]}
|
||||||
|
|
||||||
[name, file | tags] ->
|
[name, file | tags] ->
|
||||||
{name, file, tags}
|
{name, file, tags}
|
||||||
|
@ -208,7 +213,7 @@ defp load_from_file_stream(stream) do
|
||||||
|> Enum.to_list()
|
|> Enum.to_list()
|
||||||
end
|
end
|
||||||
|
|
||||||
defp load_from_globs(globs) do
|
defp load_from_globs(globs, emoji_groups) do
|
||||||
static_path = Path.join(:code.priv_dir(:pleroma), "static")
|
static_path = Path.join(:code.priv_dir(:pleroma), "static")
|
||||||
|
|
||||||
paths =
|
paths =
|
||||||
|
@ -219,7 +224,7 @@ defp load_from_globs(globs) do
|
||||||
|> Enum.concat()
|
|> Enum.concat()
|
||||||
|
|
||||||
Enum.map(paths, fn path ->
|
Enum.map(paths, fn path ->
|
||||||
tag = match_extra(@groups, Path.join("/", Path.relative_to(path, static_path)))
|
tag = match_extra(emoji_groups, Path.join("/", Path.relative_to(path, static_path)))
|
||||||
shortcode = Path.basename(path, Path.extname(path))
|
shortcode = Path.basename(path, Path.extname(path))
|
||||||
external_path = Path.join("/", Path.relative_to(path, static_path))
|
external_path = Path.join("/", Path.relative_to(path, static_path))
|
||||||
{shortcode, external_path, [to_string(tag)]}
|
{shortcode, external_path, [to_string(tag)]}
|
||||||
|
|
27
lib/pleroma/helpers/uri_helper.ex
Normal file
27
lib/pleroma/helpers/uri_helper.ex
Normal file
|
@ -0,0 +1,27 @@
|
||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
defmodule Pleroma.Helpers.UriHelper do
|
||||||
|
def append_uri_params(uri, appended_params) do
|
||||||
|
uri = URI.parse(uri)
|
||||||
|
appended_params = for {k, v} <- appended_params, into: %{}, do: {to_string(k), v}
|
||||||
|
existing_params = URI.query_decoder(uri.query || "") |> Enum.into(%{})
|
||||||
|
updated_params_keys = Enum.uniq(Map.keys(existing_params) ++ Map.keys(appended_params))
|
||||||
|
|
||||||
|
updated_params =
|
||||||
|
for k <- updated_params_keys, do: {k, appended_params[k] || existing_params[k]}
|
||||||
|
|
||||||
|
uri
|
||||||
|
|> Map.put(:query, URI.encode_query(updated_params))
|
||||||
|
|> URI.to_string()
|
||||||
|
end
|
||||||
|
|
||||||
|
def append_param_if_present(%{} = params, param_name, param_value) do
|
||||||
|
if param_value do
|
||||||
|
Map.put(params, param_name, param_value)
|
||||||
|
else
|
||||||
|
params
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
|
@ -89,7 +89,7 @@ def extract_first_external_url(object, content) do
|
||||||
Cachex.fetch!(:scrubber_cache, key, fn _key ->
|
Cachex.fetch!(:scrubber_cache, key, fn _key ->
|
||||||
result =
|
result =
|
||||||
content
|
content
|
||||||
|> Floki.filter_out("a.mention")
|
|> Floki.filter_out("a.mention,a.hashtag,a[rel~=\"tag\"]")
|
||||||
|> Floki.attribute("a", "href")
|
|> Floki.attribute("a", "href")
|
||||||
|> Enum.at(0)
|
|> Enum.at(0)
|
||||||
|
|
||||||
|
|
|
@ -13,7 +13,7 @@ def set_consistently_unreachable(url_or_host),
|
||||||
|
|
||||||
def reachability_datetime_threshold do
|
def reachability_datetime_threshold do
|
||||||
federation_reachability_timeout_days =
|
federation_reachability_timeout_days =
|
||||||
Pleroma.Config.get(:instance)[:federation_reachability_timeout_days] || 0
|
Pleroma.Config.get([:instance, :federation_reachability_timeout_days], 0)
|
||||||
|
|
||||||
if federation_reachability_timeout_days > 0 do
|
if federation_reachability_timeout_days > 0 do
|
||||||
NaiveDateTime.add(
|
NaiveDateTime.add(
|
||||||
|
|
|
@ -13,6 +13,8 @@ defmodule Pleroma.Notification do
|
||||||
alias Pleroma.User
|
alias Pleroma.User
|
||||||
alias Pleroma.Web.CommonAPI
|
alias Pleroma.Web.CommonAPI
|
||||||
alias Pleroma.Web.CommonAPI.Utils
|
alias Pleroma.Web.CommonAPI.Utils
|
||||||
|
alias Pleroma.Web.Push
|
||||||
|
alias Pleroma.Web.Streamer
|
||||||
|
|
||||||
import Ecto.Query
|
import Ecto.Query
|
||||||
import Ecto.Changeset
|
import Ecto.Changeset
|
||||||
|
@ -149,8 +151,7 @@ def dismiss(%{id: user_id} = _user, id) do
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def create_notifications(%Activity{data: %{"to" => _, "type" => type}} = activity)
|
def create_notifications(%Activity{data: %{"to" => _, "type" => "Create"}} = activity) do
|
||||||
when type in ["Create", "Like", "Announce", "Follow"] do
|
|
||||||
object = Object.normalize(activity)
|
object = Object.normalize(activity)
|
||||||
|
|
||||||
unless object && object.data["type"] == "Answer" do
|
unless object && object.data["type"] == "Answer" do
|
||||||
|
@ -162,6 +163,13 @@ def create_notifications(%Activity{data: %{"to" => _, "type" => type}} = activit
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def create_notifications(%Activity{data: %{"to" => _, "type" => type}} = activity)
|
||||||
|
when type in ["Like", "Announce", "Follow"] do
|
||||||
|
users = get_notified_from_activity(activity)
|
||||||
|
notifications = Enum.map(users, fn user -> create_notification(activity, user) end)
|
||||||
|
{:ok, notifications}
|
||||||
|
end
|
||||||
|
|
||||||
def create_notifications(_), do: {:ok, []}
|
def create_notifications(_), do: {:ok, []}
|
||||||
|
|
||||||
# TODO move to sql, too.
|
# TODO move to sql, too.
|
||||||
|
@ -169,8 +177,9 @@ def create_notification(%Activity{} = activity, %User{} = user) do
|
||||||
unless skip?(activity, user) do
|
unless skip?(activity, user) do
|
||||||
notification = %Notification{user_id: user.id, activity: activity}
|
notification = %Notification{user_id: user.id, activity: activity}
|
||||||
{:ok, notification} = Repo.insert(notification)
|
{:ok, notification} = Repo.insert(notification)
|
||||||
Pleroma.Web.Streamer.stream("user", notification)
|
Streamer.stream("user", notification)
|
||||||
Pleroma.Web.Push.send(notification)
|
Streamer.stream("user:notification", notification)
|
||||||
|
Push.send(notification)
|
||||||
notification
|
notification
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -1,3 +1,7 @@
|
||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
defmodule Pleroma.Object.Containment do
|
defmodule Pleroma.Object.Containment do
|
||||||
@moduledoc """
|
@moduledoc """
|
||||||
This module contains some useful functions for containing objects to specific
|
This module contains some useful functions for containing objects to specific
|
||||||
|
|
|
@ -85,6 +85,9 @@ def fetch_and_contain_remote_object_from_id(id) do
|
||||||
:ok <- Containment.contain_origin_from_id(id, data) do
|
:ok <- Containment.contain_origin_from_id(id, data) do
|
||||||
{:ok, data}
|
{:ok, data}
|
||||||
else
|
else
|
||||||
|
{:ok, %{status: code}} when code in [404, 410] ->
|
||||||
|
{:error, "Object has been deleted"}
|
||||||
|
|
||||||
e ->
|
e ->
|
||||||
{:error, e}
|
{:error, e}
|
||||||
end
|
end
|
||||||
|
|
|
@ -37,6 +37,7 @@ def used_changeset(struct) do
|
||||||
|> put_change(:used, true)
|
|> put_change(:used, true)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@spec reset_password(binary(), map()) :: {:ok, User.t()} | {:error, binary()}
|
||||||
def reset_password(token, data) do
|
def reset_password(token, data) do
|
||||||
with %{used: false} = token <- Repo.get_by(PasswordResetToken, %{token: token}),
|
with %{used: false} = token <- Repo.get_by(PasswordResetToken, %{token: token}),
|
||||||
%User{} = user <- User.get_cached_by_id(token.user_id),
|
%User{} = user <- User.get_cached_by_id(token.user_id),
|
|
@ -56,14 +56,14 @@ defp csp_string do
|
||||||
connect_src = "connect-src 'self' #{static_url} #{websocket_url}"
|
connect_src = "connect-src 'self' #{static_url} #{websocket_url}"
|
||||||
|
|
||||||
connect_src =
|
connect_src =
|
||||||
if Mix.env() == :dev do
|
if Pleroma.Config.get(:env) == :dev do
|
||||||
connect_src <> " http://localhost:3035/"
|
connect_src <> " http://localhost:3035/"
|
||||||
else
|
else
|
||||||
connect_src
|
connect_src
|
||||||
end
|
end
|
||||||
|
|
||||||
script_src =
|
script_src =
|
||||||
if Mix.env() == :dev do
|
if Pleroma.Config.get(:env) == :dev do
|
||||||
"script-src 'self' 'unsafe-eval'"
|
"script-src 'self' 'unsafe-eval'"
|
||||||
else
|
else
|
||||||
"script-src 'self'"
|
"script-src 'self'"
|
||||||
|
|
84
lib/pleroma/plugs/idempotency_plug.ex
Normal file
84
lib/pleroma/plugs/idempotency_plug.ex
Normal file
|
@ -0,0 +1,84 @@
|
||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
defmodule Pleroma.Plugs.IdempotencyPlug do
|
||||||
|
import Phoenix.Controller, only: [json: 2]
|
||||||
|
import Plug.Conn
|
||||||
|
|
||||||
|
@behaviour Plug
|
||||||
|
|
||||||
|
@impl true
|
||||||
|
def init(opts), do: opts
|
||||||
|
|
||||||
|
# Sending idempotency keys in `GET` and `DELETE` requests has no effect
|
||||||
|
# and should be avoided, as these requests are idempotent by definition.
|
||||||
|
|
||||||
|
@impl true
|
||||||
|
def call(%{method: method} = conn, _) when method in ["POST", "PUT", "PATCH"] do
|
||||||
|
case get_req_header(conn, "idempotency-key") do
|
||||||
|
[key] -> process_request(conn, key)
|
||||||
|
_ -> conn
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def call(conn, _), do: conn
|
||||||
|
|
||||||
|
def process_request(conn, key) do
|
||||||
|
case Cachex.get(:idempotency_cache, key) do
|
||||||
|
{:ok, nil} ->
|
||||||
|
cache_resposnse(conn, key)
|
||||||
|
|
||||||
|
{:ok, record} ->
|
||||||
|
send_cached(conn, key, record)
|
||||||
|
|
||||||
|
{atom, message} when atom in [:ignore, :error] ->
|
||||||
|
render_error(conn, message)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
defp cache_resposnse(conn, key) do
|
||||||
|
register_before_send(conn, fn conn ->
|
||||||
|
[request_id] = get_resp_header(conn, "x-request-id")
|
||||||
|
content_type = get_content_type(conn)
|
||||||
|
|
||||||
|
record = {request_id, content_type, conn.status, conn.resp_body}
|
||||||
|
{:ok, _} = Cachex.put(:idempotency_cache, key, record)
|
||||||
|
|
||||||
|
conn
|
||||||
|
|> put_resp_header("idempotency-key", key)
|
||||||
|
|> put_resp_header("x-original-request-id", request_id)
|
||||||
|
end)
|
||||||
|
end
|
||||||
|
|
||||||
|
defp send_cached(conn, key, record) do
|
||||||
|
{request_id, content_type, status, body} = record
|
||||||
|
|
||||||
|
conn
|
||||||
|
|> put_resp_header("idempotency-key", key)
|
||||||
|
|> put_resp_header("idempotent-replayed", "true")
|
||||||
|
|> put_resp_header("x-original-request-id", request_id)
|
||||||
|
|> put_resp_content_type(content_type)
|
||||||
|
|> send_resp(status, body)
|
||||||
|
|> halt()
|
||||||
|
end
|
||||||
|
|
||||||
|
defp render_error(conn, message) do
|
||||||
|
conn
|
||||||
|
|> put_status(:unprocessable_entity)
|
||||||
|
|> json(%{error: message})
|
||||||
|
|> halt()
|
||||||
|
end
|
||||||
|
|
||||||
|
defp get_content_type(conn) do
|
||||||
|
[content_type] = get_resp_header(conn, "content-type")
|
||||||
|
|
||||||
|
if String.contains?(content_type, ";") do
|
||||||
|
content_type
|
||||||
|
|> String.split(";")
|
||||||
|
|> hd()
|
||||||
|
else
|
||||||
|
content_type
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
|
@ -1,36 +0,0 @@
|
||||||
# Pleroma: A lightweight social networking server
|
|
||||||
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
|
||||||
# SPDX-License-Identifier: AGPL-3.0-only
|
|
||||||
|
|
||||||
defmodule Pleroma.Plugs.RateLimitPlug do
|
|
||||||
import Phoenix.Controller, only: [json: 2]
|
|
||||||
import Plug.Conn
|
|
||||||
|
|
||||||
def init(opts), do: opts
|
|
||||||
|
|
||||||
def call(conn, opts) do
|
|
||||||
enabled? = Pleroma.Config.get([:app_account_creation, :enabled])
|
|
||||||
|
|
||||||
case check_rate(conn, Map.put(opts, :enabled, enabled?)) do
|
|
||||||
{:ok, _count} -> conn
|
|
||||||
{:error, _count} -> render_error(conn)
|
|
||||||
%Plug.Conn{} = conn -> conn
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
defp check_rate(conn, %{enabled: true} = opts) do
|
|
||||||
max_requests = opts[:max_requests]
|
|
||||||
bucket_name = conn.remote_ip |> Tuple.to_list() |> Enum.join(".")
|
|
||||||
|
|
||||||
ExRated.check_rate(bucket_name, opts[:interval] * 1000, max_requests)
|
|
||||||
end
|
|
||||||
|
|
||||||
defp check_rate(conn, _), do: conn
|
|
||||||
|
|
||||||
defp render_error(conn) do
|
|
||||||
conn
|
|
||||||
|> put_status(:forbidden)
|
|
||||||
|> json(%{error: "Rate limit exceeded."})
|
|
||||||
|> halt()
|
|
||||||
end
|
|
||||||
end
|
|
94
lib/pleroma/plugs/rate_limiter.ex
Normal file
94
lib/pleroma/plugs/rate_limiter.ex
Normal file
|
@ -0,0 +1,94 @@
|
||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
defmodule Pleroma.Plugs.RateLimiter do
|
||||||
|
@moduledoc """
|
||||||
|
|
||||||
|
## Configuration
|
||||||
|
|
||||||
|
A keyword list of rate limiters where a key is a limiter name and value is the limiter configuration. The basic configuration is a tuple where:
|
||||||
|
|
||||||
|
* The first element: `scale` (Integer). The time scale in milliseconds.
|
||||||
|
* The second element: `limit` (Integer). How many requests to limit in the time scale provided.
|
||||||
|
|
||||||
|
It is also possible to have different limits for unauthenticated and authenticated users: the keyword value must be a list of two tuples where the first one is a config for unauthenticated users and the second one is for authenticated.
|
||||||
|
|
||||||
|
To disable a limiter set its value to `nil`.
|
||||||
|
|
||||||
|
### Example
|
||||||
|
|
||||||
|
config :pleroma, :rate_limit,
|
||||||
|
one: {1000, 10},
|
||||||
|
two: [{10_000, 10}, {10_000, 50}],
|
||||||
|
foobar: nil
|
||||||
|
|
||||||
|
Here we have three limiters:
|
||||||
|
|
||||||
|
* `one` which is not over 10req/1s
|
||||||
|
* `two` which has two limits: 10req/10s for unauthenticated users and 50req/10s for authenticated users
|
||||||
|
* `foobar` which is disabled
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
Inside a controller:
|
||||||
|
|
||||||
|
plug(Pleroma.Plugs.RateLimiter, :one when action == :one)
|
||||||
|
plug(Pleroma.Plugs.RateLimiter, :two when action in [:two, :three])
|
||||||
|
|
||||||
|
or inside a router pipiline:
|
||||||
|
|
||||||
|
pipeline :api do
|
||||||
|
...
|
||||||
|
plug(Pleroma.Plugs.RateLimiter, :one)
|
||||||
|
...
|
||||||
|
end
|
||||||
|
"""
|
||||||
|
|
||||||
|
import Phoenix.Controller, only: [json: 2]
|
||||||
|
import Plug.Conn
|
||||||
|
|
||||||
|
alias Pleroma.User
|
||||||
|
|
||||||
|
def init(limiter_name) do
|
||||||
|
case Pleroma.Config.get([:rate_limit, limiter_name]) do
|
||||||
|
nil -> nil
|
||||||
|
config -> {limiter_name, config}
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
# do not limit if there is no limiter configuration
|
||||||
|
def call(conn, nil), do: conn
|
||||||
|
|
||||||
|
def call(conn, opts) do
|
||||||
|
case check_rate(conn, opts) do
|
||||||
|
{:ok, _count} -> conn
|
||||||
|
{:error, _count} -> render_error(conn)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
defp check_rate(%{assigns: %{user: %User{id: user_id}}}, {limiter_name, [_, {scale, limit}]}) do
|
||||||
|
ExRated.check_rate("#{limiter_name}:#{user_id}", scale, limit)
|
||||||
|
end
|
||||||
|
|
||||||
|
defp check_rate(conn, {limiter_name, [{scale, limit} | _]}) do
|
||||||
|
ExRated.check_rate("#{limiter_name}:#{ip(conn)}", scale, limit)
|
||||||
|
end
|
||||||
|
|
||||||
|
defp check_rate(conn, {limiter_name, {scale, limit}}) do
|
||||||
|
check_rate(conn, {limiter_name, [{scale, limit}]})
|
||||||
|
end
|
||||||
|
|
||||||
|
def ip(%{remote_ip: remote_ip}) do
|
||||||
|
remote_ip
|
||||||
|
|> Tuple.to_list()
|
||||||
|
|> Enum.join(".")
|
||||||
|
end
|
||||||
|
|
||||||
|
defp render_error(conn) do
|
||||||
|
conn
|
||||||
|
|> put_status(:too_many_requests)
|
||||||
|
|> json(%{error: "Throttled"})
|
||||||
|
|> halt()
|
||||||
|
end
|
||||||
|
end
|
|
@ -36,7 +36,7 @@ def call(%{request_path: <<"/", @path, "/", file::binary>>} = conn, opts) do
|
||||||
conn
|
conn
|
||||||
end
|
end
|
||||||
|
|
||||||
config = Pleroma.Config.get([Pleroma.Upload])
|
config = Pleroma.Config.get(Pleroma.Upload)
|
||||||
|
|
||||||
with uploader <- Keyword.fetch!(config, :uploader),
|
with uploader <- Keyword.fetch!(config, :uploader),
|
||||||
proxy_remote = Keyword.get(config, :proxy_remote, false),
|
proxy_remote = Keyword.get(config, :proxy_remote, false),
|
||||||
|
|
66
lib/pleroma/release_tasks.ex
Normal file
66
lib/pleroma/release_tasks.ex
Normal file
|
@ -0,0 +1,66 @@
|
||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
defmodule Pleroma.ReleaseTasks do
|
||||||
|
@repo Pleroma.Repo
|
||||||
|
|
||||||
|
def run(args) do
|
||||||
|
[task | args] = String.split(args)
|
||||||
|
|
||||||
|
case task do
|
||||||
|
"migrate" -> migrate(args)
|
||||||
|
"create" -> create()
|
||||||
|
"rollback" -> rollback(args)
|
||||||
|
task -> mix_task(task, args)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
defp mix_task(task, args) do
|
||||||
|
Application.load(:pleroma)
|
||||||
|
{:ok, modules} = :application.get_key(:pleroma, :modules)
|
||||||
|
|
||||||
|
module =
|
||||||
|
Enum.find(modules, fn module ->
|
||||||
|
module = Module.split(module)
|
||||||
|
|
||||||
|
match?(["Mix", "Tasks", "Pleroma" | _], module) and
|
||||||
|
String.downcase(List.last(module)) == task
|
||||||
|
end)
|
||||||
|
|
||||||
|
if module do
|
||||||
|
module.run(args)
|
||||||
|
else
|
||||||
|
IO.puts("The task #{task} does not exist")
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def migrate(args) do
|
||||||
|
Mix.Tasks.Pleroma.Ecto.Migrate.run(args)
|
||||||
|
end
|
||||||
|
|
||||||
|
def rollback(args) do
|
||||||
|
Mix.Tasks.Pleroma.Ecto.Rollback.run(args)
|
||||||
|
end
|
||||||
|
|
||||||
|
def create do
|
||||||
|
Application.load(:pleroma)
|
||||||
|
|
||||||
|
case @repo.__adapter__.storage_up(@repo.config) do
|
||||||
|
:ok ->
|
||||||
|
IO.puts("The database for #{inspect(@repo)} has been created")
|
||||||
|
|
||||||
|
{:error, :already_up} ->
|
||||||
|
IO.puts("The database for #{inspect(@repo)} has already been created")
|
||||||
|
|
||||||
|
{:error, term} when is_binary(term) ->
|
||||||
|
IO.puts(:stderr, "The database for #{inspect(@repo)} couldn't be created: #{term}")
|
||||||
|
|
||||||
|
{:error, term} ->
|
||||||
|
IO.puts(
|
||||||
|
:stderr,
|
||||||
|
"The database for #{inspect(@repo)} couldn't be created: #{inspect(term)}"
|
||||||
|
)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
34
lib/pleroma/repo_streamer.ex
Normal file
34
lib/pleroma/repo_streamer.ex
Normal file
|
@ -0,0 +1,34 @@
|
||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
defmodule Pleroma.RepoStreamer do
|
||||||
|
alias Pleroma.Repo
|
||||||
|
import Ecto.Query
|
||||||
|
|
||||||
|
def chunk_stream(query, chunk_size) do
|
||||||
|
Stream.unfold(0, fn
|
||||||
|
:halt ->
|
||||||
|
{[], :halt}
|
||||||
|
|
||||||
|
last_id ->
|
||||||
|
query
|
||||||
|
|> order_by(asc: :id)
|
||||||
|
|> where([r], r.id > ^last_id)
|
||||||
|
|> limit(^chunk_size)
|
||||||
|
|> Repo.all()
|
||||||
|
|> case do
|
||||||
|
[] ->
|
||||||
|
{[], :halt}
|
||||||
|
|
||||||
|
records ->
|
||||||
|
last_id = List.last(records).id
|
||||||
|
{records, last_id}
|
||||||
|
end
|
||||||
|
end)
|
||||||
|
|> Stream.take_while(fn
|
||||||
|
[] -> false
|
||||||
|
_ -> true
|
||||||
|
end)
|
||||||
|
end
|
||||||
|
end
|
|
@ -146,7 +146,7 @@ defp request(method, url, headers, hackney_opts) do
|
||||||
Logger.debug("#{__MODULE__} #{method} #{url} #{inspect(headers)}")
|
Logger.debug("#{__MODULE__} #{method} #{url} #{inspect(headers)}")
|
||||||
method = method |> String.downcase() |> String.to_existing_atom()
|
method = method |> String.downcase() |> String.to_existing_atom()
|
||||||
|
|
||||||
case :hackney.request(method, url, headers, "", hackney_opts) do
|
case hackney().request(method, url, headers, "", hackney_opts) do
|
||||||
{:ok, code, headers, client} when code in @valid_resp_codes ->
|
{:ok, code, headers, client} when code in @valid_resp_codes ->
|
||||||
{:ok, code, downcase_headers(headers), client}
|
{:ok, code, downcase_headers(headers), client}
|
||||||
|
|
||||||
|
@ -196,7 +196,7 @@ defp chunk_reply(conn, client, opts, sent_so_far, duration) do
|
||||||
duration,
|
duration,
|
||||||
Keyword.get(opts, :max_read_duration, @max_read_duration)
|
Keyword.get(opts, :max_read_duration, @max_read_duration)
|
||||||
),
|
),
|
||||||
{:ok, data} <- :hackney.stream_body(client),
|
{:ok, data} <- hackney().stream_body(client),
|
||||||
{:ok, duration} <- increase_read_duration(duration),
|
{:ok, duration} <- increase_read_duration(duration),
|
||||||
sent_so_far = sent_so_far + byte_size(data),
|
sent_so_far = sent_so_far + byte_size(data),
|
||||||
:ok <- body_size_constraint(sent_so_far, Keyword.get(opts, :max_body_size)),
|
:ok <- body_size_constraint(sent_so_far, Keyword.get(opts, :max_body_size)),
|
||||||
|
@ -377,4 +377,6 @@ defp increase_read_duration({previous_duration, started})
|
||||||
defp increase_read_duration(_) do
|
defp increase_read_duration(_) do
|
||||||
{:ok, :no_duration_limit, :no_duration_limit}
|
{:ok, :no_duration_limit, :no_duration_limit}
|
||||||
end
|
end
|
||||||
|
|
||||||
|
defp hackney, do: Pleroma.Config.get(:hackney, :hackney)
|
||||||
end
|
end
|
||||||
|
|
|
@ -9,12 +9,14 @@ defmodule Pleroma.User do
|
||||||
import Ecto.Query
|
import Ecto.Query
|
||||||
|
|
||||||
alias Comeonin.Pbkdf2
|
alias Comeonin.Pbkdf2
|
||||||
|
alias Ecto.Multi
|
||||||
alias Pleroma.Activity
|
alias Pleroma.Activity
|
||||||
alias Pleroma.Keys
|
alias Pleroma.Keys
|
||||||
alias Pleroma.Notification
|
alias Pleroma.Notification
|
||||||
alias Pleroma.Object
|
alias Pleroma.Object
|
||||||
alias Pleroma.Registration
|
alias Pleroma.Registration
|
||||||
alias Pleroma.Repo
|
alias Pleroma.Repo
|
||||||
|
alias Pleroma.RepoStreamer
|
||||||
alias Pleroma.User
|
alias Pleroma.User
|
||||||
alias Pleroma.Web
|
alias Pleroma.Web
|
||||||
alias Pleroma.Web.ActivityPub.ActivityPub
|
alias Pleroma.Web.ActivityPub.ActivityPub
|
||||||
|
@ -194,27 +196,24 @@ def upgrade_changeset(struct, params \\ %{}) do
|
||||||
end
|
end
|
||||||
|
|
||||||
def password_update_changeset(struct, params) do
|
def password_update_changeset(struct, params) do
|
||||||
changeset =
|
struct
|
||||||
struct
|
|> cast(params, [:password, :password_confirmation])
|
||||||
|> cast(params, [:password, :password_confirmation])
|
|> validate_required([:password, :password_confirmation])
|
||||||
|> validate_required([:password, :password_confirmation])
|
|> validate_confirmation(:password)
|
||||||
|> validate_confirmation(:password)
|
|> put_password_hash
|
||||||
|
|
||||||
OAuth.Token.delete_user_tokens(struct)
|
|
||||||
OAuth.Authorization.delete_user_authorizations(struct)
|
|
||||||
|
|
||||||
if changeset.valid? do
|
|
||||||
hashed = Pbkdf2.hashpwsalt(changeset.changes[:password])
|
|
||||||
|
|
||||||
changeset
|
|
||||||
|> put_change(:password_hash, hashed)
|
|
||||||
else
|
|
||||||
changeset
|
|
||||||
end
|
|
||||||
end
|
end
|
||||||
|
|
||||||
def reset_password(user, data) do
|
def reset_password(%User{id: user_id} = user, data) do
|
||||||
update_and_set_cache(password_update_changeset(user, data))
|
multi =
|
||||||
|
Multi.new()
|
||||||
|
|> Multi.update(:user, password_update_changeset(user, data))
|
||||||
|
|> Multi.delete_all(:tokens, OAuth.Token.Query.get_by_user(user_id))
|
||||||
|
|> Multi.delete_all(:auth, OAuth.Authorization.delete_by_user_query(user))
|
||||||
|
|
||||||
|
case Repo.transaction(multi) do
|
||||||
|
{:ok, %{user: user} = _} -> set_cache(user)
|
||||||
|
{:error, _, changeset, _} -> {:error, changeset}
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def register_changeset(struct, params \\ %{}, opts \\ []) do
|
def register_changeset(struct, params \\ %{}, opts \\ []) do
|
||||||
|
@ -250,12 +249,11 @@ def register_changeset(struct, params \\ %{}, opts \\ []) do
|
||||||
end
|
end
|
||||||
|
|
||||||
if changeset.valid? do
|
if changeset.valid? do
|
||||||
hashed = Pbkdf2.hashpwsalt(changeset.changes[:password])
|
|
||||||
ap_id = User.ap_id(%User{nickname: changeset.changes[:nickname]})
|
ap_id = User.ap_id(%User{nickname: changeset.changes[:nickname]})
|
||||||
followers = User.ap_followers(%User{nickname: changeset.changes[:nickname]})
|
followers = User.ap_followers(%User{nickname: changeset.changes[:nickname]})
|
||||||
|
|
||||||
changeset
|
changeset
|
||||||
|> put_change(:password_hash, hashed)
|
|> put_password_hash
|
||||||
|> put_change(:ap_id, ap_id)
|
|> put_change(:ap_id, ap_id)
|
||||||
|> unique_constraint(:ap_id)
|
|> unique_constraint(:ap_id)
|
||||||
|> put_change(:following, [followers])
|
|> put_change(:following, [followers])
|
||||||
|
@ -933,18 +931,24 @@ def delete(%User{} = user),
|
||||||
|
|
||||||
@spec perform(atom(), User.t()) :: {:ok, User.t()}
|
@spec perform(atom(), User.t()) :: {:ok, User.t()}
|
||||||
def perform(:delete, %User{} = user) do
|
def perform(:delete, %User{} = user) do
|
||||||
{:ok, user} = User.deactivate(user)
|
|
||||||
|
|
||||||
# Remove all relationships
|
# Remove all relationships
|
||||||
{:ok, followers} = User.get_followers(user)
|
{:ok, followers} = User.get_followers(user)
|
||||||
|
|
||||||
Enum.each(followers, fn follower -> User.unfollow(follower, user) end)
|
Enum.each(followers, fn follower ->
|
||||||
|
ActivityPub.unfollow(follower, user)
|
||||||
|
User.unfollow(follower, user)
|
||||||
|
end)
|
||||||
|
|
||||||
{:ok, friends} = User.get_friends(user)
|
{:ok, friends} = User.get_friends(user)
|
||||||
|
|
||||||
Enum.each(friends, fn followed -> User.unfollow(user, followed) end)
|
Enum.each(friends, fn followed ->
|
||||||
|
ActivityPub.unfollow(user, followed)
|
||||||
|
User.unfollow(user, followed)
|
||||||
|
end)
|
||||||
|
|
||||||
delete_user_activities(user)
|
delete_user_activities(user)
|
||||||
|
|
||||||
|
{:ok, _user} = Repo.delete(user)
|
||||||
end
|
end
|
||||||
|
|
||||||
@spec perform(atom(), User.t()) :: {:ok, User.t()}
|
@spec perform(atom(), User.t()) :: {:ok, User.t()}
|
||||||
|
@ -1017,18 +1021,35 @@ def follow_import(%User{} = follower, followed_identifiers) when is_list(followe
|
||||||
])
|
])
|
||||||
|
|
||||||
def delete_user_activities(%User{ap_id: ap_id} = user) do
|
def delete_user_activities(%User{ap_id: ap_id} = user) do
|
||||||
stream =
|
ap_id
|
||||||
ap_id
|
|> Activity.query_by_actor()
|
||||||
|> Activity.query_by_actor()
|
|> RepoStreamer.chunk_stream(50)
|
||||||
|> Repo.stream()
|
|> Stream.each(fn activities ->
|
||||||
|
Enum.each(activities, &delete_activity(&1))
|
||||||
Repo.transaction(fn -> Enum.each(stream, &delete_activity(&1)) end, timeout: :infinity)
|
end)
|
||||||
|
|> Stream.run()
|
||||||
|
|
||||||
{:ok, user}
|
{:ok, user}
|
||||||
end
|
end
|
||||||
|
|
||||||
defp delete_activity(%{data: %{"type" => "Create"}} = activity) do
|
defp delete_activity(%{data: %{"type" => "Create"}} = activity) do
|
||||||
Object.normalize(activity) |> ActivityPub.delete()
|
activity
|
||||||
|
|> Object.normalize()
|
||||||
|
|> ActivityPub.delete()
|
||||||
|
end
|
||||||
|
|
||||||
|
defp delete_activity(%{data: %{"type" => "Like"}} = activity) do
|
||||||
|
user = get_cached_by_ap_id(activity.actor)
|
||||||
|
object = Object.normalize(activity)
|
||||||
|
|
||||||
|
ActivityPub.unlike(user, object)
|
||||||
|
end
|
||||||
|
|
||||||
|
defp delete_activity(%{data: %{"type" => "Announce"}} = activity) do
|
||||||
|
user = get_cached_by_ap_id(activity.actor)
|
||||||
|
object = Object.normalize(activity)
|
||||||
|
|
||||||
|
ActivityPub.unannounce(user, object)
|
||||||
end
|
end
|
||||||
|
|
||||||
defp delete_activity(_activity), do: "Doing nothing"
|
defp delete_activity(_activity), do: "Doing nothing"
|
||||||
|
@ -1037,9 +1058,7 @@ def html_filter_policy(%User{info: %{no_rich_text: true}}) do
|
||||||
Pleroma.HTML.Scrubber.TwitterText
|
Pleroma.HTML.Scrubber.TwitterText
|
||||||
end
|
end
|
||||||
|
|
||||||
@default_scrubbers Pleroma.Config.get([:markup, :scrub_policy])
|
def html_filter_policy(_), do: Pleroma.Config.get([:markup, :scrub_policy])
|
||||||
|
|
||||||
def html_filter_policy(_), do: @default_scrubbers
|
|
||||||
|
|
||||||
def fetch_by_ap_id(ap_id) do
|
def fetch_by_ap_id(ap_id) do
|
||||||
ap_try = ActivityPub.make_user_from_ap_id(ap_id)
|
ap_try = ActivityPub.make_user_from_ap_id(ap_id)
|
||||||
|
@ -1402,4 +1421,12 @@ def get_ap_ids_by_nicknames(nicknames) do
|
||||||
end
|
end
|
||||||
|
|
||||||
defdelegate search(query, opts \\ []), to: User.Search
|
defdelegate search(query, opts \\ []), to: User.Search
|
||||||
|
|
||||||
|
defp put_password_hash(
|
||||||
|
%Ecto.Changeset{valid?: true, changes: %{password: password}} = changeset
|
||||||
|
) do
|
||||||
|
change(changeset, password_hash: Pbkdf2.hashpwsalt(password))
|
||||||
|
end
|
||||||
|
|
||||||
|
defp put_password_hash(changeset), do: changeset
|
||||||
end
|
end
|
||||||
|
|
|
@ -7,74 +7,97 @@ defmodule Pleroma.User.Search do
|
||||||
alias Pleroma.User
|
alias Pleroma.User
|
||||||
import Ecto.Query
|
import Ecto.Query
|
||||||
|
|
||||||
def search(query, opts \\ []) do
|
@similarity_threshold 0.25
|
||||||
|
@limit 20
|
||||||
|
|
||||||
|
def search(query_string, opts \\ []) do
|
||||||
resolve = Keyword.get(opts, :resolve, false)
|
resolve = Keyword.get(opts, :resolve, false)
|
||||||
|
following = Keyword.get(opts, :following, false)
|
||||||
|
result_limit = Keyword.get(opts, :limit, @limit)
|
||||||
|
offset = Keyword.get(opts, :offset, 0)
|
||||||
|
|
||||||
for_user = Keyword.get(opts, :for_user)
|
for_user = Keyword.get(opts, :for_user)
|
||||||
|
|
||||||
# Strip the beginning @ off if there is a query
|
# Strip the beginning @ off if there is a query
|
||||||
query = String.trim_leading(query, "@")
|
query_string = String.trim_leading(query_string, "@")
|
||||||
|
|
||||||
maybe_resolve(resolve, for_user, query)
|
maybe_resolve(resolve, for_user, query_string)
|
||||||
|
|
||||||
{:ok, results} =
|
{:ok, results} =
|
||||||
Repo.transaction(fn ->
|
Repo.transaction(fn ->
|
||||||
Ecto.Adapters.SQL.query(Repo, "select set_limit(0.25)", [])
|
Ecto.Adapters.SQL.query(
|
||||||
|
Repo,
|
||||||
|
"select set_limit(#{@similarity_threshold})",
|
||||||
|
[]
|
||||||
|
)
|
||||||
|
|
||||||
query
|
query_string
|
||||||
|> search_query(for_user)
|
|> search_query(for_user, following)
|
||||||
|
|> paginate(result_limit, offset)
|
||||||
|> Repo.all()
|
|> Repo.all()
|
||||||
end)
|
end)
|
||||||
|
|
||||||
results
|
results
|
||||||
end
|
end
|
||||||
|
|
||||||
defp maybe_resolve(true, %User{}, query) do
|
defp search_query(query_string, for_user, following) do
|
||||||
User.get_or_fetch(query)
|
for_user
|
||||||
end
|
|> base_query(following)
|
||||||
|
|> search_subqueries(query_string)
|
||||||
defp maybe_resolve(true, _, query) do
|
|> union_subqueries
|
||||||
unless restrict_local?(), do: User.get_or_fetch(query)
|
|
||||||
end
|
|
||||||
|
|
||||||
defp maybe_resolve(_, _, _), do: :noop
|
|
||||||
|
|
||||||
defp search_query(query, for_user) do
|
|
||||||
query
|
|
||||||
|> union_query()
|
|
||||||
|> distinct_query()
|
|> distinct_query()
|
||||||
|> boost_search_rank_query(for_user)
|
|> boost_search_rank_query(for_user)
|
||||||
|> subquery()
|
|> subquery()
|
||||||
|> order_by(desc: :search_rank)
|
|> order_by(desc: :search_rank)
|
||||||
|> limit(20)
|
|
||||||
|> maybe_restrict_local(for_user)
|
|> maybe_restrict_local(for_user)
|
||||||
end
|
end
|
||||||
|
|
||||||
defp restrict_local? do
|
defp base_query(_user, false), do: User
|
||||||
Pleroma.Config.get([:instance, :limit_unauthenticated_to_local_content], true)
|
defp base_query(user, true), do: User.get_followers_query(user)
|
||||||
|
|
||||||
|
defp paginate(query, limit, offset) do
|
||||||
|
from(q in query, limit: ^limit, offset: ^offset)
|
||||||
end
|
end
|
||||||
|
|
||||||
defp union_query(query) do
|
defp union_subqueries({fts_subquery, trigram_subquery}) do
|
||||||
fts_subquery = fts_search_subquery(query)
|
|
||||||
trigram_subquery = trigram_search_subquery(query)
|
|
||||||
|
|
||||||
from(s in trigram_subquery, union_all: ^fts_subquery)
|
from(s in trigram_subquery, union_all: ^fts_subquery)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
defp search_subqueries(base_query, query_string) do
|
||||||
|
{
|
||||||
|
fts_search_subquery(base_query, query_string),
|
||||||
|
trigram_search_subquery(base_query, query_string)
|
||||||
|
}
|
||||||
|
end
|
||||||
|
|
||||||
defp distinct_query(q) do
|
defp distinct_query(q) do
|
||||||
from(s in subquery(q), order_by: s.search_type, distinct: s.id)
|
from(s in subquery(q), order_by: s.search_type, distinct: s.id)
|
||||||
end
|
end
|
||||||
|
|
||||||
# unauthenticated users can only search local activities
|
defp maybe_resolve(true, user, query) do
|
||||||
defp maybe_restrict_local(q, %User{}), do: q
|
case {limit(), user} do
|
||||||
|
{:all, _} -> :noop
|
||||||
defp maybe_restrict_local(q, _) do
|
{:unauthenticated, %User{}} -> User.get_or_fetch(query)
|
||||||
if restrict_local?() do
|
{:unauthenticated, _} -> :noop
|
||||||
where(q, [u], u.local == true)
|
{false, _} -> User.get_or_fetch(query)
|
||||||
else
|
|
||||||
q
|
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
defp maybe_resolve(_, _, _), do: :noop
|
||||||
|
|
||||||
|
defp maybe_restrict_local(q, user) do
|
||||||
|
case {limit(), user} do
|
||||||
|
{:all, _} -> restrict_local(q)
|
||||||
|
{:unauthenticated, %User{}} -> q
|
||||||
|
{:unauthenticated, _} -> restrict_local(q)
|
||||||
|
{false, _} -> q
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
defp limit, do: Pleroma.Config.get([:instance, :limit_to_local_content], :unauthenticated)
|
||||||
|
|
||||||
|
defp restrict_local(q), do: where(q, [u], u.local == true)
|
||||||
|
|
||||||
defp boost_search_rank_query(query, nil), do: query
|
defp boost_search_rank_query(query, nil), do: query
|
||||||
|
|
||||||
defp boost_search_rank_query(query, for_user) do
|
defp boost_search_rank_query(query, for_user) do
|
||||||
|
@ -103,7 +126,8 @@ defp boost_search_rank_query(query, for_user) do
|
||||||
)
|
)
|
||||||
end
|
end
|
||||||
|
|
||||||
defp fts_search_subquery(term, query \\ User) do
|
@spec fts_search_subquery(User.t() | Ecto.Query.t(), String.t()) :: Ecto.Query.t()
|
||||||
|
defp fts_search_subquery(query, term) do
|
||||||
processed_query =
|
processed_query =
|
||||||
term
|
term
|
||||||
|> String.replace(~r/\W+/, " ")
|
|> String.replace(~r/\W+/, " ")
|
||||||
|
@ -145,9 +169,10 @@ defp fts_search_subquery(term, query \\ User) do
|
||||||
|> User.restrict_deactivated()
|
|> User.restrict_deactivated()
|
||||||
end
|
end
|
||||||
|
|
||||||
defp trigram_search_subquery(term) do
|
@spec trigram_search_subquery(User.t() | Ecto.Query.t(), String.t()) :: Ecto.Query.t()
|
||||||
|
defp trigram_search_subquery(query, term) do
|
||||||
from(
|
from(
|
||||||
u in User,
|
u in query,
|
||||||
select_merge: %{
|
select_merge: %{
|
||||||
# ^1 gives 'Postgrex expected a binary, got 1' for some weird reason
|
# ^1 gives 'Postgrex expected a binary, got 1' for some weird reason
|
||||||
search_type: fragment("?", 1),
|
search_type: fragment("?", 1),
|
||||||
|
|
|
@ -189,6 +189,22 @@ def stream_out_participations(participations) do
|
||||||
end)
|
end)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def stream_out_participations(%Object{data: %{"context" => context}}, user) do
|
||||||
|
with %Conversation{} = conversation <- Conversation.get_for_ap_id(context),
|
||||||
|
conversation = Repo.preload(conversation, :participations),
|
||||||
|
last_activity_id =
|
||||||
|
fetch_latest_activity_id_for_context(conversation.ap_id, %{
|
||||||
|
"user" => user,
|
||||||
|
"blocking_user" => user
|
||||||
|
}) do
|
||||||
|
if last_activity_id do
|
||||||
|
stream_out_participations(conversation.participations)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def stream_out_participations(_, _), do: :noop
|
||||||
|
|
||||||
def stream_out(activity) do
|
def stream_out(activity) do
|
||||||
public = "https://www.w3.org/ns/activitystreams#Public"
|
public = "https://www.w3.org/ns/activitystreams#Public"
|
||||||
|
|
||||||
|
@ -401,7 +417,8 @@ def delete(%Object{data: %{"id" => id, "actor" => actor}} = object, local \\ tru
|
||||||
"to" => to,
|
"to" => to,
|
||||||
"deleted_activity_id" => activity && activity.id
|
"deleted_activity_id" => activity && activity.id
|
||||||
},
|
},
|
||||||
{:ok, activity} <- insert(data, local),
|
{:ok, activity} <- insert(data, local, false),
|
||||||
|
stream_out_participations(object, user),
|
||||||
_ <- decrease_replies_count_if_reply(object),
|
_ <- decrease_replies_count_if_reply(object),
|
||||||
# Changing note count prior to enqueuing federation task in order to avoid
|
# Changing note count prior to enqueuing federation task in order to avoid
|
||||||
# race conditions on updating user.info
|
# race conditions on updating user.info
|
||||||
|
|
48
lib/pleroma/web/activity_pub/mrf/anti_link_spam_policy.ex
Normal file
48
lib/pleroma/web/activity_pub/mrf/anti_link_spam_policy.ex
Normal file
|
@ -0,0 +1,48 @@
|
||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2019 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
defmodule Pleroma.Web.ActivityPub.MRF.AntiLinkSpamPolicy do
|
||||||
|
alias Pleroma.User
|
||||||
|
|
||||||
|
require Logger
|
||||||
|
|
||||||
|
# has the user successfully posted before?
|
||||||
|
defp old_user?(%User{} = u) do
|
||||||
|
u.info.note_count > 0 || u.info.follower_count > 0
|
||||||
|
end
|
||||||
|
|
||||||
|
# does the post contain links?
|
||||||
|
defp contains_links?(%{"content" => content} = _object) do
|
||||||
|
content
|
||||||
|
|> Floki.filter_out("a.mention,a.hashtag,a[rel~=\"tag\"],a.zrl")
|
||||||
|
|> Floki.attribute("a", "href")
|
||||||
|
|> length() > 0
|
||||||
|
end
|
||||||
|
|
||||||
|
defp contains_links?(_), do: false
|
||||||
|
|
||||||
|
def filter(%{"type" => "Create", "actor" => actor, "object" => object} = message) do
|
||||||
|
with {:ok, %User{} = u} <- User.get_or_fetch_by_ap_id(actor),
|
||||||
|
{:contains_links, true} <- {:contains_links, contains_links?(object)},
|
||||||
|
{:old_user, true} <- {:old_user, old_user?(u)} do
|
||||||
|
{:ok, message}
|
||||||
|
else
|
||||||
|
{:contains_links, false} ->
|
||||||
|
{:ok, message}
|
||||||
|
|
||||||
|
{:old_user, false} ->
|
||||||
|
{:reject, nil}
|
||||||
|
|
||||||
|
{:error, _} ->
|
||||||
|
{:reject, nil}
|
||||||
|
|
||||||
|
e ->
|
||||||
|
Logger.warn("[MRF anti-link-spam] WTF: unhandled error #{inspect(e)}")
|
||||||
|
{:reject, nil}
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
# in all other cases, pass through
|
||||||
|
def filter(message), do: {:ok, message}
|
||||||
|
end
|
|
@ -88,7 +88,7 @@ defp should_federate?(inbox, public) do
|
||||||
true
|
true
|
||||||
else
|
else
|
||||||
inbox_info = URI.parse(inbox)
|
inbox_info = URI.parse(inbox)
|
||||||
!Enum.member?(Pleroma.Config.get([:instance, :quarantined_instances], []), inbox_info.host)
|
!Enum.member?(Config.get([:instance, :quarantined_instances], []), inbox_info.host)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
|
|
@ -339,7 +339,7 @@ def fix_content_map(object), do: object
|
||||||
def fix_type(%{"inReplyTo" => reply_id} = object) when is_binary(reply_id) do
|
def fix_type(%{"inReplyTo" => reply_id} = object) when is_binary(reply_id) do
|
||||||
reply = Object.normalize(reply_id)
|
reply = Object.normalize(reply_id)
|
||||||
|
|
||||||
if reply.data["type"] == "Question" and object["name"] do
|
if reply && (reply.data["type"] == "Question" and object["name"]) do
|
||||||
Map.put(object, "type", "Answer")
|
Map.put(object, "type", "Answer")
|
||||||
else
|
else
|
||||||
object
|
object
|
||||||
|
|
|
@ -151,16 +151,18 @@ def get_notified_from_object(object) do
|
||||||
|
|
||||||
def create_context(context) do
|
def create_context(context) do
|
||||||
context = context || generate_id("contexts")
|
context = context || generate_id("contexts")
|
||||||
changeset = Object.context_mapping(context)
|
|
||||||
|
|
||||||
case Repo.insert(changeset) do
|
# Ecto has problems accessing the constraint inside the jsonb,
|
||||||
{:ok, object} ->
|
# so we explicitly check for the existed object before insert
|
||||||
|
object = Object.get_cached_by_ap_id(context)
|
||||||
|
|
||||||
|
with true <- is_nil(object),
|
||||||
|
changeset <- Object.context_mapping(context),
|
||||||
|
{:ok, inserted_object} <- Repo.insert(changeset) do
|
||||||
|
inserted_object
|
||||||
|
else
|
||||||
|
_ ->
|
||||||
object
|
object
|
||||||
|
|
||||||
# This should be solved by an upsert, but it seems ecto
|
|
||||||
# has problems accessing the constraint inside the jsonb.
|
|
||||||
{:error, _} ->
|
|
||||||
Object.get_cached_by_ap_id(context)
|
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
|
|
@ -10,6 +10,8 @@ defmodule Pleroma.Web.AdminAPI.AdminAPIController do
|
||||||
alias Pleroma.Web.ActivityPub.ActivityPub
|
alias Pleroma.Web.ActivityPub.ActivityPub
|
||||||
alias Pleroma.Web.ActivityPub.Relay
|
alias Pleroma.Web.ActivityPub.Relay
|
||||||
alias Pleroma.Web.AdminAPI.AccountView
|
alias Pleroma.Web.AdminAPI.AccountView
|
||||||
|
alias Pleroma.Web.AdminAPI.Config
|
||||||
|
alias Pleroma.Web.AdminAPI.ConfigView
|
||||||
alias Pleroma.Web.AdminAPI.ReportView
|
alias Pleroma.Web.AdminAPI.ReportView
|
||||||
alias Pleroma.Web.AdminAPI.Search
|
alias Pleroma.Web.AdminAPI.Search
|
||||||
alias Pleroma.Web.CommonAPI
|
alias Pleroma.Web.CommonAPI
|
||||||
|
@ -362,6 +364,41 @@ def status_delete(%{assigns: %{user: user}} = conn, %{"id" => id}) do
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def config_show(conn, _params) do
|
||||||
|
configs = Pleroma.Repo.all(Config)
|
||||||
|
|
||||||
|
conn
|
||||||
|
|> put_view(ConfigView)
|
||||||
|
|> render("index.json", %{configs: configs})
|
||||||
|
end
|
||||||
|
|
||||||
|
def config_update(conn, %{"configs" => configs}) do
|
||||||
|
updated =
|
||||||
|
if Pleroma.Config.get([:instance, :dynamic_configuration]) do
|
||||||
|
updated =
|
||||||
|
Enum.map(configs, fn
|
||||||
|
%{"group" => group, "key" => key, "value" => value} ->
|
||||||
|
{:ok, config} = Config.update_or_create(%{group: group, key: key, value: value})
|
||||||
|
config
|
||||||
|
|
||||||
|
%{"group" => group, "key" => key, "delete" => "true"} ->
|
||||||
|
{:ok, _} = Config.delete(%{group: group, key: key})
|
||||||
|
nil
|
||||||
|
end)
|
||||||
|
|> Enum.reject(&is_nil(&1))
|
||||||
|
|
||||||
|
Pleroma.Config.TransferTask.load_and_update_env()
|
||||||
|
Mix.Tasks.Pleroma.Config.run(["migrate_from_db", Pleroma.Config.get(:env), "false"])
|
||||||
|
updated
|
||||||
|
else
|
||||||
|
[]
|
||||||
|
end
|
||||||
|
|
||||||
|
conn
|
||||||
|
|> put_view(ConfigView)
|
||||||
|
|> render("index.json", %{configs: updated})
|
||||||
|
end
|
||||||
|
|
||||||
def errors(conn, {:error, :not_found}) do
|
def errors(conn, {:error, :not_found}) do
|
||||||
conn
|
conn
|
||||||
|> put_status(404)
|
|> put_status(404)
|
||||||
|
|
160
lib/pleroma/web/admin_api/config.ex
Normal file
160
lib/pleroma/web/admin_api/config.ex
Normal file
|
@ -0,0 +1,160 @@
|
||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
defmodule Pleroma.Web.AdminAPI.Config do
|
||||||
|
use Ecto.Schema
|
||||||
|
import Ecto.Changeset
|
||||||
|
alias __MODULE__
|
||||||
|
alias Pleroma.Repo
|
||||||
|
|
||||||
|
@type t :: %__MODULE__{}
|
||||||
|
|
||||||
|
schema "config" do
|
||||||
|
field(:key, :string)
|
||||||
|
field(:group, :string)
|
||||||
|
field(:value, :binary)
|
||||||
|
|
||||||
|
timestamps()
|
||||||
|
end
|
||||||
|
|
||||||
|
@spec get_by_params(map()) :: Config.t() | nil
|
||||||
|
def get_by_params(params), do: Repo.get_by(Config, params)
|
||||||
|
|
||||||
|
@spec changeset(Config.t(), map()) :: Changeset.t()
|
||||||
|
def changeset(config, params \\ %{}) do
|
||||||
|
config
|
||||||
|
|> cast(params, [:key, :group, :value])
|
||||||
|
|> validate_required([:key, :group, :value])
|
||||||
|
|> unique_constraint(:key, name: :config_group_key_index)
|
||||||
|
end
|
||||||
|
|
||||||
|
@spec create(map()) :: {:ok, Config.t()} | {:error, Changeset.t()}
|
||||||
|
def create(params) do
|
||||||
|
%Config{}
|
||||||
|
|> changeset(Map.put(params, :value, transform(params[:value])))
|
||||||
|
|> Repo.insert()
|
||||||
|
end
|
||||||
|
|
||||||
|
@spec update(Config.t(), map()) :: {:ok, Config} | {:error, Changeset.t()}
|
||||||
|
def update(%Config{} = config, %{value: value}) do
|
||||||
|
config
|
||||||
|
|> change(value: transform(value))
|
||||||
|
|> Repo.update()
|
||||||
|
end
|
||||||
|
|
||||||
|
@spec update_or_create(map()) :: {:ok, Config.t()} | {:error, Changeset.t()}
|
||||||
|
def update_or_create(params) do
|
||||||
|
with %Config{} = config <- Config.get_by_params(Map.take(params, [:group, :key])) do
|
||||||
|
Config.update(config, params)
|
||||||
|
else
|
||||||
|
nil -> Config.create(params)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
@spec delete(map()) :: {:ok, Config.t()} | {:error, Changeset.t()}
|
||||||
|
def delete(params) do
|
||||||
|
with %Config{} = config <- Config.get_by_params(params) do
|
||||||
|
Repo.delete(config)
|
||||||
|
else
|
||||||
|
nil -> {:error, "Config with params #{inspect(params)} not found"}
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
@spec from_binary(binary()) :: term()
|
||||||
|
def from_binary(value), do: :erlang.binary_to_term(value)
|
||||||
|
|
||||||
|
@spec from_binary_to_map(binary()) :: any()
|
||||||
|
def from_binary_to_map(binary) do
|
||||||
|
from_binary(binary)
|
||||||
|
|> do_convert()
|
||||||
|
end
|
||||||
|
|
||||||
|
defp do_convert([{k, v}] = value) when is_list(value) and length(value) == 1,
|
||||||
|
do: %{k => do_convert(v)}
|
||||||
|
|
||||||
|
defp do_convert(values) when is_list(values), do: for(val <- values, do: do_convert(val))
|
||||||
|
|
||||||
|
defp do_convert({k, v} = value) when is_tuple(value),
|
||||||
|
do: %{k => do_convert(v)}
|
||||||
|
|
||||||
|
defp do_convert(value) when is_tuple(value), do: %{"tuple" => do_convert(Tuple.to_list(value))}
|
||||||
|
|
||||||
|
defp do_convert(value) when is_binary(value) or is_map(value) or is_number(value), do: value
|
||||||
|
|
||||||
|
defp do_convert(value) when is_atom(value) do
|
||||||
|
string = to_string(value)
|
||||||
|
|
||||||
|
if String.starts_with?(string, "Elixir."),
|
||||||
|
do: String.trim_leading(string, "Elixir."),
|
||||||
|
else: value
|
||||||
|
end
|
||||||
|
|
||||||
|
@spec transform(any()) :: binary()
|
||||||
|
def transform(%{"tuple" => _} = entity), do: :erlang.term_to_binary(do_transform(entity))
|
||||||
|
|
||||||
|
def transform(entity) when is_map(entity) do
|
||||||
|
tuples =
|
||||||
|
for {k, v} <- entity,
|
||||||
|
into: [],
|
||||||
|
do: {if(is_atom(k), do: k, else: String.to_atom(k)), do_transform(v)}
|
||||||
|
|
||||||
|
Enum.reject(tuples, fn {_k, v} -> is_nil(v) end)
|
||||||
|
|> Enum.sort()
|
||||||
|
|> :erlang.term_to_binary()
|
||||||
|
end
|
||||||
|
|
||||||
|
def transform(entity) when is_list(entity) do
|
||||||
|
list = Enum.map(entity, &do_transform(&1))
|
||||||
|
:erlang.term_to_binary(list)
|
||||||
|
end
|
||||||
|
|
||||||
|
def transform(entity), do: :erlang.term_to_binary(entity)
|
||||||
|
|
||||||
|
defp do_transform(%Regex{} = value) when is_map(value), do: value
|
||||||
|
|
||||||
|
defp do_transform(%{"tuple" => [k, values] = entity}) when length(entity) == 2 do
|
||||||
|
{do_transform(k), do_transform(values)}
|
||||||
|
end
|
||||||
|
|
||||||
|
defp do_transform(%{"tuple" => values}) do
|
||||||
|
Enum.reduce(values, {}, fn val, acc -> Tuple.append(acc, do_transform(val)) end)
|
||||||
|
end
|
||||||
|
|
||||||
|
defp do_transform(value) when is_map(value) do
|
||||||
|
values = for {key, val} <- value, into: [], do: {String.to_atom(key), do_transform(val)}
|
||||||
|
|
||||||
|
Enum.sort(values)
|
||||||
|
end
|
||||||
|
|
||||||
|
defp do_transform(value) when is_list(value) do
|
||||||
|
Enum.map(value, &do_transform(&1))
|
||||||
|
end
|
||||||
|
|
||||||
|
defp do_transform(entity) when is_list(entity) and length(entity) == 1, do: hd(entity)
|
||||||
|
|
||||||
|
defp do_transform(value) when is_binary(value) do
|
||||||
|
String.trim(value)
|
||||||
|
|> do_transform_string()
|
||||||
|
end
|
||||||
|
|
||||||
|
defp do_transform(value), do: value
|
||||||
|
|
||||||
|
defp do_transform_string(value) when byte_size(value) == 0, do: nil
|
||||||
|
|
||||||
|
defp do_transform_string(value) do
|
||||||
|
cond do
|
||||||
|
String.starts_with?(value, "Pleroma") or String.starts_with?(value, "Phoenix") ->
|
||||||
|
String.to_existing_atom("Elixir." <> value)
|
||||||
|
|
||||||
|
String.starts_with?(value, ":") ->
|
||||||
|
String.replace(value, ":", "") |> String.to_existing_atom()
|
||||||
|
|
||||||
|
String.starts_with?(value, "i:") ->
|
||||||
|
String.replace(value, "i:", "") |> String.to_integer()
|
||||||
|
|
||||||
|
true ->
|
||||||
|
value
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
17
lib/pleroma/web/admin_api/views/config_view.ex
Normal file
17
lib/pleroma/web/admin_api/views/config_view.ex
Normal file
|
@ -0,0 +1,17 @@
|
||||||
|
defmodule Pleroma.Web.AdminAPI.ConfigView do
|
||||||
|
use Pleroma.Web, :view
|
||||||
|
|
||||||
|
def render("index.json", %{configs: configs}) do
|
||||||
|
%{
|
||||||
|
configs: render_many(configs, __MODULE__, "show.json", as: :config)
|
||||||
|
}
|
||||||
|
end
|
||||||
|
|
||||||
|
def render("show.json", %{config: config}) do
|
||||||
|
%{
|
||||||
|
key: config.key,
|
||||||
|
group: config.group,
|
||||||
|
value: Pleroma.Web.AdminAPI.Config.from_binary_to_map(config.value)
|
||||||
|
}
|
||||||
|
end
|
||||||
|
end
|
|
@ -5,6 +5,7 @@
|
||||||
defmodule Pleroma.Web.AdminAPI.ReportView do
|
defmodule Pleroma.Web.AdminAPI.ReportView do
|
||||||
use Pleroma.Web, :view
|
use Pleroma.Web, :view
|
||||||
alias Pleroma.Activity
|
alias Pleroma.Activity
|
||||||
|
alias Pleroma.HTML
|
||||||
alias Pleroma.User
|
alias Pleroma.User
|
||||||
alias Pleroma.Web.CommonAPI.Utils
|
alias Pleroma.Web.CommonAPI.Utils
|
||||||
alias Pleroma.Web.MastodonAPI.AccountView
|
alias Pleroma.Web.MastodonAPI.AccountView
|
||||||
|
@ -23,6 +24,13 @@ def render("show.json", %{report: report}) do
|
||||||
[account_ap_id | status_ap_ids] = report.data["object"]
|
[account_ap_id | status_ap_ids] = report.data["object"]
|
||||||
account = User.get_cached_by_ap_id(account_ap_id)
|
account = User.get_cached_by_ap_id(account_ap_id)
|
||||||
|
|
||||||
|
content =
|
||||||
|
unless is_nil(report.data["content"]) do
|
||||||
|
HTML.filter_tags(report.data["content"])
|
||||||
|
else
|
||||||
|
nil
|
||||||
|
end
|
||||||
|
|
||||||
statuses =
|
statuses =
|
||||||
Enum.map(status_ap_ids, fn ap_id ->
|
Enum.map(status_ap_ids, fn ap_id ->
|
||||||
Activity.get_by_ap_id_with_object(ap_id)
|
Activity.get_by_ap_id_with_object(ap_id)
|
||||||
|
@ -32,7 +40,7 @@ def render("show.json", %{report: report}) do
|
||||||
id: report.id,
|
id: report.id,
|
||||||
account: AccountView.render("account.json", %{user: account}),
|
account: AccountView.render("account.json", %{user: account}),
|
||||||
actor: AccountView.render("account.json", %{user: user}),
|
actor: AccountView.render("account.json", %{user: user}),
|
||||||
content: report.data["content"],
|
content: content,
|
||||||
created_at: created_at,
|
created_at: created_at,
|
||||||
statuses: StatusView.render("index.json", %{activities: statuses, as: :activity}),
|
statuses: StatusView.render("index.json", %{activities: statuses, as: :activity}),
|
||||||
state: report.data["state"]
|
state: report.data["state"]
|
||||||
|
|
|
@ -212,7 +212,7 @@ def post(user, %{"status" => status} = data) do
|
||||||
cw <- data["spoiler_text"] || "",
|
cw <- data["spoiler_text"] || "",
|
||||||
sensitive <- data["sensitive"] || Enum.member?(tags, {"#nsfw", "nsfw"}),
|
sensitive <- data["sensitive"] || Enum.member?(tags, {"#nsfw", "nsfw"}),
|
||||||
full_payload <- String.trim(status <> cw),
|
full_payload <- String.trim(status <> cw),
|
||||||
length when length in 1..limit <- String.length(full_payload),
|
:ok <- validate_character_limit(full_payload, attachments, limit),
|
||||||
object <-
|
object <-
|
||||||
make_note_data(
|
make_note_data(
|
||||||
user.ap_id,
|
user.ap_id,
|
||||||
|
@ -247,6 +247,8 @@ def post(user, %{"status" => status} = data) do
|
||||||
|
|
||||||
res
|
res
|
||||||
else
|
else
|
||||||
|
{:private_to_public, true} -> {:error, "The message visibility must be direct"}
|
||||||
|
{:error, _} = e -> e
|
||||||
e -> {:error, e}
|
e -> {:error, e}
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -504,4 +504,18 @@ def make_answer_data(%User{ap_id: ap_id}, object, name) do
|
||||||
"inReplyTo" => object.data["id"]
|
"inReplyTo" => object.data["id"]
|
||||||
}
|
}
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def validate_character_limit(full_payload, attachments, limit) do
|
||||||
|
length = String.length(full_payload)
|
||||||
|
|
||||||
|
if length < limit do
|
||||||
|
if length > 0 or Enum.count(attachments) > 0 do
|
||||||
|
:ok
|
||||||
|
else
|
||||||
|
{:error, "Cannot post an empty status without attachments"}
|
||||||
|
end
|
||||||
|
else
|
||||||
|
{:error, "The status is over the character limit"}
|
||||||
|
end
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -15,4 +15,22 @@ def json_response(conn, status, json) do
|
||||||
|> put_status(status)
|
|> put_status(status)
|
||||||
|> json(json)
|
|> json(json)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@spec fetch_integer_param(map(), String.t(), integer() | nil) :: integer() | nil
|
||||||
|
def fetch_integer_param(params, name, default \\ nil) do
|
||||||
|
params
|
||||||
|
|> Map.get(name, default)
|
||||||
|
|> param_to_integer(default)
|
||||||
|
end
|
||||||
|
|
||||||
|
defp param_to_integer(val, _) when is_integer(val), do: val
|
||||||
|
|
||||||
|
defp param_to_integer(val, default) when is_binary(val) do
|
||||||
|
case Integer.parse(val) do
|
||||||
|
{res, _} -> res
|
||||||
|
_ -> default
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
defp param_to_integer(_, default), do: default
|
||||||
end
|
end
|
||||||
|
|
|
@ -91,7 +91,7 @@ defmodule Pleroma.Web.Endpoint do
|
||||||
Plug.Session,
|
Plug.Session,
|
||||||
store: :cookie,
|
store: :cookie,
|
||||||
key: cookie_name,
|
key: cookie_name,
|
||||||
signing_salt: {Pleroma.Config, :get, [[__MODULE__, :signing_salt], "CqaoopA2"]},
|
signing_salt: Pleroma.Config.get([__MODULE__, :signing_salt], "CqaoopA2"),
|
||||||
http_only: true,
|
http_only: true,
|
||||||
secure: secure_cookies,
|
secure: secure_cookies,
|
||||||
extra: extra
|
extra: extra
|
||||||
|
|
|
@ -15,7 +15,9 @@ def init(args) do
|
||||||
|
|
||||||
def start_link do
|
def start_link do
|
||||||
enabled =
|
enabled =
|
||||||
if Mix.env() == :test, do: true, else: Pleroma.Config.get([__MODULE__, :enabled], false)
|
if Pleroma.Config.get(:env) == :test,
|
||||||
|
do: true,
|
||||||
|
else: Pleroma.Config.get([__MODULE__, :enabled], false)
|
||||||
|
|
||||||
if enabled do
|
if enabled do
|
||||||
Logger.info("Starting retry queue")
|
Logger.info("Starting retry queue")
|
||||||
|
@ -219,7 +221,7 @@ def handle_info(unknown, state) do
|
||||||
{:noreply, state}
|
{:noreply, state}
|
||||||
end
|
end
|
||||||
|
|
||||||
if Mix.env() == :test do
|
if Pleroma.Config.get(:env) == :test do
|
||||||
defp growth_function(_retries) do
|
defp growth_function(_retries) do
|
||||||
_shutit = Pleroma.Config.get([__MODULE__, :initial_timeout])
|
_shutit = Pleroma.Config.get([__MODULE__, :initial_timeout])
|
||||||
DateTime.to_unix(DateTime.utc_now()) - 1
|
DateTime.to_unix(DateTime.utc_now()) - 1
|
||||||
|
|
|
@ -46,14 +46,8 @@ defmodule Pleroma.Web.MastodonAPI.MastodonAPIController do
|
||||||
|
|
||||||
require Logger
|
require Logger
|
||||||
|
|
||||||
plug(
|
plug(Pleroma.Plugs.RateLimiter, :app_account_creation when action == :account_register)
|
||||||
Pleroma.Plugs.RateLimitPlug,
|
plug(Pleroma.Plugs.RateLimiter, :search when action in [:search, :search2, :account_search])
|
||||||
%{
|
|
||||||
max_requests: Config.get([:app_account_creation, :max_requests]),
|
|
||||||
interval: Config.get([:app_account_creation, :interval])
|
|
||||||
}
|
|
||||||
when action in [:account_register]
|
|
||||||
)
|
|
||||||
|
|
||||||
@local_mastodon_name "Mastodon-Local"
|
@local_mastodon_name "Mastodon-Local"
|
||||||
|
|
||||||
|
@ -142,6 +136,14 @@ def update_credentials(%{assigns: %{user: user}} = conn, params) do
|
||||||
_ -> :error
|
_ -> :error
|
||||||
end
|
end
|
||||||
end)
|
end)
|
||||||
|
|> add_if_present(params, "pleroma_background_image", :background, fn value ->
|
||||||
|
with %Plug.Upload{} <- value,
|
||||||
|
{:ok, object} <- ActivityPub.upload(value, type: :background) do
|
||||||
|
{:ok, object.data}
|
||||||
|
else
|
||||||
|
_ -> :error
|
||||||
|
end
|
||||||
|
end)
|
||||||
|> Map.put(:emoji, user_info_emojis)
|
|> Map.put(:emoji, user_info_emojis)
|
||||||
|
|
||||||
info_cng = User.Info.profile_update(user.info, info_params)
|
info_cng = User.Info.profile_update(user.info, info_params)
|
||||||
|
@ -166,8 +168,15 @@ def update_credentials(%{assigns: %{user: user}} = conn, params) do
|
||||||
end
|
end
|
||||||
|
|
||||||
def verify_credentials(%{assigns: %{user: user}} = conn, _) do
|
def verify_credentials(%{assigns: %{user: user}} = conn, _) do
|
||||||
|
chat_token = Phoenix.Token.sign(conn, "user socket", user.id)
|
||||||
|
|
||||||
account =
|
account =
|
||||||
AccountView.render("account.json", %{user: user, for: user, with_pleroma_settings: true})
|
AccountView.render("account.json", %{
|
||||||
|
user: user,
|
||||||
|
for: user,
|
||||||
|
with_pleroma_settings: true,
|
||||||
|
with_chat_token: chat_token
|
||||||
|
})
|
||||||
|
|
||||||
json(conn, account)
|
json(conn, account)
|
||||||
end
|
end
|
||||||
|
@ -445,12 +454,26 @@ def get_poll(%{assigns: %{user: user}} = conn, %{"id" => id}) do
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
defp get_cached_vote_or_vote(user, object, choices) do
|
||||||
|
idempotency_key = "polls:#{user.id}:#{object.data["id"]}"
|
||||||
|
|
||||||
|
{_, res} =
|
||||||
|
Cachex.fetch(:idempotency_cache, idempotency_key, fn _ ->
|
||||||
|
case CommonAPI.vote(user, object, choices) do
|
||||||
|
{:error, _message} = res -> {:ignore, res}
|
||||||
|
res -> {:commit, res}
|
||||||
|
end
|
||||||
|
end)
|
||||||
|
|
||||||
|
res
|
||||||
|
end
|
||||||
|
|
||||||
def poll_vote(%{assigns: %{user: user}} = conn, %{"id" => id, "choices" => choices}) do
|
def poll_vote(%{assigns: %{user: user}} = conn, %{"id" => id, "choices" => choices}) do
|
||||||
with %Object{} = object <- Object.get_by_id(id),
|
with %Object{} = object <- Object.get_by_id(id),
|
||||||
true <- object.data["type"] == "Question",
|
true <- object.data["type"] == "Question",
|
||||||
%Activity{} = activity <- Activity.get_create_by_object_ap_id(object.data["id"]),
|
%Activity{} = activity <- Activity.get_create_by_object_ap_id(object.data["id"]),
|
||||||
true <- Visibility.visible_for_user?(activity, user),
|
true <- Visibility.visible_for_user?(activity, user),
|
||||||
{:ok, _activities, object} <- CommonAPI.vote(user, object, choices) do
|
{:ok, _activities, object} <- get_cached_vote_or_vote(user, object, choices) do
|
||||||
conn
|
conn
|
||||||
|> put_view(StatusView)
|
|> put_view(StatusView)
|
||||||
|> try_render("poll.json", %{object: object, for: user})
|
|> try_render("poll.json", %{object: object, for: user})
|
||||||
|
@ -521,15 +544,6 @@ def delete_scheduled_status(%{assigns: %{user: user}} = conn, %{"id" => schedule
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def post_status(conn, %{"status" => "", "media_ids" => media_ids} = params)
|
|
||||||
when length(media_ids) > 0 do
|
|
||||||
params =
|
|
||||||
params
|
|
||||||
|> Map.put("status", ".")
|
|
||||||
|
|
||||||
post_status(conn, params)
|
|
||||||
end
|
|
||||||
|
|
||||||
def post_status(%{assigns: %{user: user}} = conn, %{"status" => _} = params) do
|
def post_status(%{assigns: %{user: user}} = conn, %{"status" => _} = params) do
|
||||||
params =
|
params =
|
||||||
params
|
params
|
||||||
|
@ -547,18 +561,13 @@ def post_status(%{assigns: %{user: user}} = conn, %{"status" => _} = params) do
|
||||||
else
|
else
|
||||||
params = Map.drop(params, ["scheduled_at"])
|
params = Map.drop(params, ["scheduled_at"])
|
||||||
|
|
||||||
case get_cached_status_or_post(conn, params) do
|
case CommonAPI.post(user, params) do
|
||||||
{:ignore, message} ->
|
|
||||||
conn
|
|
||||||
|> put_status(422)
|
|
||||||
|> json(%{error: message})
|
|
||||||
|
|
||||||
{:error, message} ->
|
{:error, message} ->
|
||||||
conn
|
conn
|
||||||
|> put_status(422)
|
|> put_status(:unprocessable_entity)
|
||||||
|> json(%{error: message})
|
|> json(%{error: message})
|
||||||
|
|
||||||
{_, activity} ->
|
{:ok, activity} ->
|
||||||
conn
|
conn
|
||||||
|> put_view(StatusView)
|
|> put_view(StatusView)
|
||||||
|> try_render("status.json", %{activity: activity, for: user, as: :activity})
|
|> try_render("status.json", %{activity: activity, for: user, as: :activity})
|
||||||
|
@ -566,21 +575,6 @@ def post_status(%{assigns: %{user: user}} = conn, %{"status" => _} = params) do
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
defp get_cached_status_or_post(%{assigns: %{user: user}} = conn, params) do
|
|
||||||
idempotency_key =
|
|
||||||
case get_req_header(conn, "idempotency-key") do
|
|
||||||
[key] -> key
|
|
||||||
_ -> Ecto.UUID.generate()
|
|
||||||
end
|
|
||||||
|
|
||||||
Cachex.fetch(:idempotency_cache, idempotency_key, fn _ ->
|
|
||||||
case CommonAPI.post(user, params) do
|
|
||||||
{:ok, activity} -> activity
|
|
||||||
{:error, message} -> {:ignore, message}
|
|
||||||
end
|
|
||||||
end)
|
|
||||||
end
|
|
||||||
|
|
||||||
def delete_status(%{assigns: %{user: user}} = conn, %{"id" => id}) do
|
def delete_status(%{assigns: %{user: user}} = conn, %{"id" => id}) do
|
||||||
with {:ok, %Activity{}} <- CommonAPI.delete(id, user) do
|
with {:ok, %Activity{}} <- CommonAPI.delete(id, user) do
|
||||||
json(conn, %{})
|
json(conn, %{})
|
||||||
|
@ -830,7 +824,7 @@ def favourited_by(%{assigns: %{user: user}} = conn, %{"id" => id}) do
|
||||||
|
|
||||||
conn
|
conn
|
||||||
|> put_view(AccountView)
|
|> put_view(AccountView)
|
||||||
|> render(AccountView, "accounts.json", %{for: user, users: users, as: :user})
|
|> render("accounts.json", %{for: user, users: users, as: :user})
|
||||||
else
|
else
|
||||||
_ -> json(conn, [])
|
_ -> json(conn, [])
|
||||||
end
|
end
|
||||||
|
@ -1124,58 +1118,6 @@ def unsubscribe(%{assigns: %{user: user}} = conn, %{"id" => id}) do
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def search2(%{assigns: %{user: user}} = conn, %{"q" => query} = params) do
|
|
||||||
accounts = User.search(query, resolve: params["resolve"] == "true", for_user: user)
|
|
||||||
statuses = Activity.search(user, query)
|
|
||||||
tags_path = Web.base_url() <> "/tag/"
|
|
||||||
|
|
||||||
tags =
|
|
||||||
query
|
|
||||||
|> String.split()
|
|
||||||
|> Enum.uniq()
|
|
||||||
|> Enum.filter(fn tag -> String.starts_with?(tag, "#") end)
|
|
||||||
|> Enum.map(fn tag -> String.slice(tag, 1..-1) end)
|
|
||||||
|> Enum.map(fn tag -> %{name: tag, url: tags_path <> tag} end)
|
|
||||||
|
|
||||||
res = %{
|
|
||||||
"accounts" => AccountView.render("accounts.json", users: accounts, for: user, as: :user),
|
|
||||||
"statuses" =>
|
|
||||||
StatusView.render("index.json", activities: statuses, for: user, as: :activity),
|
|
||||||
"hashtags" => tags
|
|
||||||
}
|
|
||||||
|
|
||||||
json(conn, res)
|
|
||||||
end
|
|
||||||
|
|
||||||
def search(%{assigns: %{user: user}} = conn, %{"q" => query} = params) do
|
|
||||||
accounts = User.search(query, resolve: params["resolve"] == "true", for_user: user)
|
|
||||||
statuses = Activity.search(user, query)
|
|
||||||
|
|
||||||
tags =
|
|
||||||
query
|
|
||||||
|> String.split()
|
|
||||||
|> Enum.uniq()
|
|
||||||
|> Enum.filter(fn tag -> String.starts_with?(tag, "#") end)
|
|
||||||
|> Enum.map(fn tag -> String.slice(tag, 1..-1) end)
|
|
||||||
|
|
||||||
res = %{
|
|
||||||
"accounts" => AccountView.render("accounts.json", users: accounts, for: user, as: :user),
|
|
||||||
"statuses" =>
|
|
||||||
StatusView.render("index.json", activities: statuses, for: user, as: :activity),
|
|
||||||
"hashtags" => tags
|
|
||||||
}
|
|
||||||
|
|
||||||
json(conn, res)
|
|
||||||
end
|
|
||||||
|
|
||||||
def account_search(%{assigns: %{user: user}} = conn, %{"q" => query} = params) do
|
|
||||||
accounts = User.search(query, resolve: params["resolve"] == "true", for_user: user)
|
|
||||||
|
|
||||||
res = AccountView.render("accounts.json", users: accounts, for: user, as: :user)
|
|
||||||
|
|
||||||
json(conn, res)
|
|
||||||
end
|
|
||||||
|
|
||||||
def favourites(%{assigns: %{user: user}} = conn, params) do
|
def favourites(%{assigns: %{user: user}} = conn, params) do
|
||||||
params =
|
params =
|
||||||
params
|
params
|
||||||
|
|
79
lib/pleroma/web/mastodon_api/search_controller.ex
Normal file
79
lib/pleroma/web/mastodon_api/search_controller.ex
Normal file
|
@ -0,0 +1,79 @@
|
||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
defmodule Pleroma.Web.MastodonAPI.SearchController do
|
||||||
|
use Pleroma.Web, :controller
|
||||||
|
alias Pleroma.Activity
|
||||||
|
alias Pleroma.User
|
||||||
|
alias Pleroma.Web
|
||||||
|
alias Pleroma.Web.MastodonAPI.AccountView
|
||||||
|
alias Pleroma.Web.MastodonAPI.StatusView
|
||||||
|
|
||||||
|
alias Pleroma.Web.ControllerHelper
|
||||||
|
|
||||||
|
require Logger
|
||||||
|
|
||||||
|
plug(Pleroma.Plugs.RateLimiter, :search when action in [:search, :search2, :account_search])
|
||||||
|
|
||||||
|
def search2(%{assigns: %{user: user}} = conn, %{"q" => query} = params) do
|
||||||
|
accounts = User.search(query, search_options(params, user))
|
||||||
|
statuses = Activity.search(user, query)
|
||||||
|
tags_path = Web.base_url() <> "/tag/"
|
||||||
|
|
||||||
|
tags =
|
||||||
|
query
|
||||||
|
|> String.split()
|
||||||
|
|> Enum.uniq()
|
||||||
|
|> Enum.filter(fn tag -> String.starts_with?(tag, "#") end)
|
||||||
|
|> Enum.map(fn tag -> String.slice(tag, 1..-1) end)
|
||||||
|
|> Enum.map(fn tag -> %{name: tag, url: tags_path <> tag} end)
|
||||||
|
|
||||||
|
res = %{
|
||||||
|
"accounts" => AccountView.render("accounts.json", users: accounts, for: user, as: :user),
|
||||||
|
"statuses" =>
|
||||||
|
StatusView.render("index.json", activities: statuses, for: user, as: :activity),
|
||||||
|
"hashtags" => tags
|
||||||
|
}
|
||||||
|
|
||||||
|
json(conn, res)
|
||||||
|
end
|
||||||
|
|
||||||
|
def search(%{assigns: %{user: user}} = conn, %{"q" => query} = params) do
|
||||||
|
accounts = User.search(query, search_options(params, user))
|
||||||
|
statuses = Activity.search(user, query)
|
||||||
|
|
||||||
|
tags =
|
||||||
|
query
|
||||||
|
|> String.split()
|
||||||
|
|> Enum.uniq()
|
||||||
|
|> Enum.filter(fn tag -> String.starts_with?(tag, "#") end)
|
||||||
|
|> Enum.map(fn tag -> String.slice(tag, 1..-1) end)
|
||||||
|
|
||||||
|
res = %{
|
||||||
|
"accounts" => AccountView.render("accounts.json", users: accounts, for: user, as: :user),
|
||||||
|
"statuses" =>
|
||||||
|
StatusView.render("index.json", activities: statuses, for: user, as: :activity),
|
||||||
|
"hashtags" => tags
|
||||||
|
}
|
||||||
|
|
||||||
|
json(conn, res)
|
||||||
|
end
|
||||||
|
|
||||||
|
def account_search(%{assigns: %{user: user}} = conn, %{"q" => query} = params) do
|
||||||
|
accounts = User.search(query, search_options(params, user))
|
||||||
|
res = AccountView.render("accounts.json", users: accounts, for: user, as: :user)
|
||||||
|
|
||||||
|
json(conn, res)
|
||||||
|
end
|
||||||
|
|
||||||
|
defp search_options(params, user) do
|
||||||
|
[
|
||||||
|
resolve: params["resolve"] == "true",
|
||||||
|
following: params["following"] == "true",
|
||||||
|
limit: ControllerHelper.fetch_integer_param(params, "limit"),
|
||||||
|
offset: ControllerHelper.fetch_integer_param(params, "offset"),
|
||||||
|
for_user: user
|
||||||
|
]
|
||||||
|
end
|
||||||
|
end
|
|
@ -66,6 +66,8 @@ def render("relationships.json", %{user: user, targets: targets}) do
|
||||||
end
|
end
|
||||||
|
|
||||||
defp do_render("account.json", %{user: user} = opts) do
|
defp do_render("account.json", %{user: user} = opts) do
|
||||||
|
display_name = HTML.strip_tags(user.name || user.nickname)
|
||||||
|
|
||||||
image = User.avatar_url(user) |> MediaProxy.url()
|
image = User.avatar_url(user) |> MediaProxy.url()
|
||||||
header = User.banner_url(user) |> MediaProxy.url()
|
header = User.banner_url(user) |> MediaProxy.url()
|
||||||
user_info = User.get_cached_user_info(user)
|
user_info = User.get_cached_user_info(user)
|
||||||
|
@ -96,7 +98,7 @@ defp do_render("account.json", %{user: user} = opts) do
|
||||||
id: to_string(user.id),
|
id: to_string(user.id),
|
||||||
username: username_from_nickname(user.nickname),
|
username: username_from_nickname(user.nickname),
|
||||||
acct: user.nickname,
|
acct: user.nickname,
|
||||||
display_name: user.name || user.nickname,
|
display_name: display_name,
|
||||||
locked: user_info.locked,
|
locked: user_info.locked,
|
||||||
created_at: Utils.to_masto_date(user.inserted_at),
|
created_at: Utils.to_masto_date(user.inserted_at),
|
||||||
followers_count: user_info.follower_count,
|
followers_count: user_info.follower_count,
|
||||||
|
@ -125,13 +127,15 @@ defp do_render("account.json", %{user: user} = opts) do
|
||||||
hide_follows: user.info.hide_follows,
|
hide_follows: user.info.hide_follows,
|
||||||
hide_favorites: user.info.hide_favorites,
|
hide_favorites: user.info.hide_favorites,
|
||||||
relationship: relationship,
|
relationship: relationship,
|
||||||
skip_thread_containment: user.info.skip_thread_containment
|
skip_thread_containment: user.info.skip_thread_containment,
|
||||||
|
background_image: image_url(user.info.background) |> MediaProxy.url()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|> maybe_put_role(user, opts[:for])
|
|> maybe_put_role(user, opts[:for])
|
||||||
|> maybe_put_settings(user, opts[:for], user_info)
|
|> maybe_put_settings(user, opts[:for], user_info)
|
||||||
|> maybe_put_notification_settings(user, opts[:for])
|
|> maybe_put_notification_settings(user, opts[:for])
|
||||||
|> maybe_put_settings_store(user, opts[:for], opts)
|
|> maybe_put_settings_store(user, opts[:for], opts)
|
||||||
|
|> maybe_put_chat_token(user, opts[:for], opts)
|
||||||
end
|
end
|
||||||
|
|
||||||
defp username_from_nickname(string) when is_binary(string) do
|
defp username_from_nickname(string) when is_binary(string) do
|
||||||
|
@ -163,6 +167,15 @@ defp maybe_put_settings_store(data, %User{info: info, id: id}, %User{id: id}, %{
|
||||||
|
|
||||||
defp maybe_put_settings_store(data, _, _, _), do: data
|
defp maybe_put_settings_store(data, _, _, _), do: data
|
||||||
|
|
||||||
|
defp maybe_put_chat_token(data, %User{id: id}, %User{id: id}, %{
|
||||||
|
with_chat_token: token
|
||||||
|
}) do
|
||||||
|
data
|
||||||
|
|> Kernel.put_in([:pleroma, :chat_token], token)
|
||||||
|
end
|
||||||
|
|
||||||
|
defp maybe_put_chat_token(data, _, _, _), do: data
|
||||||
|
|
||||||
defp maybe_put_role(data, %User{info: %{show_role: true}} = user, _) do
|
defp maybe_put_role(data, %User{info: %{show_role: true}} = user, _) do
|
||||||
data
|
data
|
||||||
|> Kernel.put_in([:pleroma, :is_admin], user.info.is_admin)
|
|> Kernel.put_in([:pleroma, :is_admin], user.info.is_admin)
|
||||||
|
@ -182,4 +195,7 @@ defp maybe_put_notification_settings(data, %User{id: user_id} = user, %User{id:
|
||||||
end
|
end
|
||||||
|
|
||||||
defp maybe_put_notification_settings(data, _, _), do: data
|
defp maybe_put_notification_settings(data, _, _), do: data
|
||||||
|
|
||||||
|
defp image_url(%{"url" => [%{"href" => href} | _]}), do: href
|
||||||
|
defp image_url(_), do: nil
|
||||||
end
|
end
|
||||||
|
|
|
@ -17,6 +17,7 @@ defmodule Pleroma.Web.MastodonAPI.WebsocketHandler do
|
||||||
"public:media",
|
"public:media",
|
||||||
"public:local:media",
|
"public:local:media",
|
||||||
"user",
|
"user",
|
||||||
|
"user:notification",
|
||||||
"direct",
|
"direct",
|
||||||
"list",
|
"list",
|
||||||
"hashtag"
|
"hashtag"
|
||||||
|
|
|
@ -76,14 +76,16 @@ def use_token(%Authorization{used: false, valid_until: valid_until} = auth) do
|
||||||
def use_token(%Authorization{used: true}), do: {:error, "already used"}
|
def use_token(%Authorization{used: true}), do: {:error, "already used"}
|
||||||
|
|
||||||
@spec delete_user_authorizations(User.t()) :: {integer(), any()}
|
@spec delete_user_authorizations(User.t()) :: {integer(), any()}
|
||||||
def delete_user_authorizations(%User{id: user_id}) do
|
def delete_user_authorizations(%User{} = user) do
|
||||||
from(
|
user
|
||||||
a in Pleroma.Web.OAuth.Authorization,
|
|> delete_by_user_query
|
||||||
where: a.user_id == ^user_id
|
|
||||||
)
|
|
||||||
|> Repo.delete_all()
|
|> Repo.delete_all()
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def delete_by_user_query(%User{id: user_id}) do
|
||||||
|
from(a in __MODULE__, where: a.user_id == ^user_id)
|
||||||
|
end
|
||||||
|
|
||||||
@doc "gets auth for app by token"
|
@doc "gets auth for app by token"
|
||||||
@spec get_by_token(App.t(), String.t()) :: {:ok, t()} | {:error, :not_found}
|
@spec get_by_token(App.t(), String.t()) :: {:ok, t()} | {:error, :not_found}
|
||||||
def get_by_token(%App{id: app_id} = _app, token) do
|
def get_by_token(%App{id: app_id} = _app, token) do
|
||||||
|
|
|
@ -5,6 +5,7 @@
|
||||||
defmodule Pleroma.Web.OAuth.OAuthController do
|
defmodule Pleroma.Web.OAuth.OAuthController do
|
||||||
use Pleroma.Web, :controller
|
use Pleroma.Web, :controller
|
||||||
|
|
||||||
|
alias Pleroma.Helpers.UriHelper
|
||||||
alias Pleroma.Registration
|
alias Pleroma.Registration
|
||||||
alias Pleroma.Repo
|
alias Pleroma.Repo
|
||||||
alias Pleroma.User
|
alias Pleroma.User
|
||||||
|
@ -26,34 +27,25 @@ defmodule Pleroma.Web.OAuth.OAuthController do
|
||||||
|
|
||||||
action_fallback(Pleroma.Web.OAuth.FallbackController)
|
action_fallback(Pleroma.Web.OAuth.FallbackController)
|
||||||
|
|
||||||
|
@oob_token_redirect_uri "urn:ietf:wg:oauth:2.0:oob"
|
||||||
|
|
||||||
# Note: this definition is only called from error-handling methods with `conn.params` as 2nd arg
|
# Note: this definition is only called from error-handling methods with `conn.params` as 2nd arg
|
||||||
def authorize(conn, %{"authorization" => _} = params) do
|
def authorize(%Plug.Conn{} = conn, %{"authorization" => _} = params) do
|
||||||
{auth_attrs, params} = Map.pop(params, "authorization")
|
{auth_attrs, params} = Map.pop(params, "authorization")
|
||||||
authorize(conn, Map.merge(params, auth_attrs))
|
authorize(conn, Map.merge(params, auth_attrs))
|
||||||
end
|
end
|
||||||
|
|
||||||
def authorize(%{assigns: %{token: %Token{} = token}} = conn, params) do
|
def authorize(%Plug.Conn{assigns: %{token: %Token{}}} = conn, params) do
|
||||||
if ControllerHelper.truthy_param?(params["force_login"]) do
|
if ControllerHelper.truthy_param?(params["force_login"]) do
|
||||||
do_authorize(conn, params)
|
do_authorize(conn, params)
|
||||||
else
|
else
|
||||||
redirect_uri =
|
handle_existing_authorization(conn, params)
|
||||||
if is_binary(params["redirect_uri"]) do
|
|
||||||
params["redirect_uri"]
|
|
||||||
else
|
|
||||||
app = Repo.preload(token, :app).app
|
|
||||||
|
|
||||||
app.redirect_uris
|
|
||||||
|> String.split()
|
|
||||||
|> Enum.at(0)
|
|
||||||
end
|
|
||||||
|
|
||||||
redirect(conn, external: redirect_uri(conn, redirect_uri))
|
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def authorize(conn, params), do: do_authorize(conn, params)
|
def authorize(%Plug.Conn{} = conn, params), do: do_authorize(conn, params)
|
||||||
|
|
||||||
defp do_authorize(conn, params) do
|
defp do_authorize(%Plug.Conn{} = conn, params) do
|
||||||
app = Repo.get_by(App, client_id: params["client_id"])
|
app = Repo.get_by(App, client_id: params["client_id"])
|
||||||
available_scopes = (app && app.scopes) || []
|
available_scopes = (app && app.scopes) || []
|
||||||
scopes = Scopes.fetch_scopes(params, available_scopes)
|
scopes = Scopes.fetch_scopes(params, available_scopes)
|
||||||
|
@ -70,8 +62,41 @@ defp do_authorize(conn, params) do
|
||||||
})
|
})
|
||||||
end
|
end
|
||||||
|
|
||||||
|
defp handle_existing_authorization(
|
||||||
|
%Plug.Conn{assigns: %{token: %Token{} = token}} = conn,
|
||||||
|
%{"redirect_uri" => @oob_token_redirect_uri}
|
||||||
|
) do
|
||||||
|
render(conn, "oob_token_exists.html", %{token: token})
|
||||||
|
end
|
||||||
|
|
||||||
|
defp handle_existing_authorization(
|
||||||
|
%Plug.Conn{assigns: %{token: %Token{} = token}} = conn,
|
||||||
|
%{} = params
|
||||||
|
) do
|
||||||
|
app = Repo.preload(token, :app).app
|
||||||
|
|
||||||
|
redirect_uri =
|
||||||
|
if is_binary(params["redirect_uri"]) do
|
||||||
|
params["redirect_uri"]
|
||||||
|
else
|
||||||
|
default_redirect_uri(app)
|
||||||
|
end
|
||||||
|
|
||||||
|
if redirect_uri in String.split(app.redirect_uris) do
|
||||||
|
redirect_uri = redirect_uri(conn, redirect_uri)
|
||||||
|
url_params = %{access_token: token.token}
|
||||||
|
url_params = UriHelper.append_param_if_present(url_params, :state, params["state"])
|
||||||
|
url = UriHelper.append_uri_params(redirect_uri, url_params)
|
||||||
|
redirect(conn, external: url)
|
||||||
|
else
|
||||||
|
conn
|
||||||
|
|> put_flash(:error, "Unlisted redirect_uri.")
|
||||||
|
|> redirect(external: redirect_uri(conn, redirect_uri))
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
def create_authorization(
|
def create_authorization(
|
||||||
conn,
|
%Plug.Conn{} = conn,
|
||||||
%{"authorization" => _} = params,
|
%{"authorization" => _} = params,
|
||||||
opts \\ []
|
opts \\ []
|
||||||
) do
|
) do
|
||||||
|
@ -83,35 +108,33 @@ def create_authorization(
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def after_create_authorization(conn, auth, %{
|
def after_create_authorization(%Plug.Conn{} = conn, %Authorization{} = auth, %{
|
||||||
|
"authorization" => %{"redirect_uri" => @oob_token_redirect_uri}
|
||||||
|
}) do
|
||||||
|
render(conn, "oob_authorization_created.html", %{auth: auth})
|
||||||
|
end
|
||||||
|
|
||||||
|
def after_create_authorization(%Plug.Conn{} = conn, %Authorization{} = auth, %{
|
||||||
"authorization" => %{"redirect_uri" => redirect_uri} = auth_attrs
|
"authorization" => %{"redirect_uri" => redirect_uri} = auth_attrs
|
||||||
}) do
|
}) do
|
||||||
redirect_uri = redirect_uri(conn, redirect_uri)
|
app = Repo.preload(auth, :app).app
|
||||||
|
|
||||||
if redirect_uri == "urn:ietf:wg:oauth:2.0:oob" do
|
|
||||||
render(conn, "results.html", %{
|
|
||||||
auth: auth
|
|
||||||
})
|
|
||||||
else
|
|
||||||
connector = if String.contains?(redirect_uri, "?"), do: "&", else: "?"
|
|
||||||
url = "#{redirect_uri}#{connector}"
|
|
||||||
url_params = %{:code => auth.token}
|
|
||||||
|
|
||||||
url_params =
|
|
||||||
if auth_attrs["state"] do
|
|
||||||
Map.put(url_params, :state, auth_attrs["state"])
|
|
||||||
else
|
|
||||||
url_params
|
|
||||||
end
|
|
||||||
|
|
||||||
url = "#{url}#{Plug.Conn.Query.encode(url_params)}"
|
|
||||||
|
|
||||||
|
# An extra safety measure before we redirect (also done in `do_create_authorization/2`)
|
||||||
|
if redirect_uri in String.split(app.redirect_uris) do
|
||||||
|
redirect_uri = redirect_uri(conn, redirect_uri)
|
||||||
|
url_params = %{code: auth.token}
|
||||||
|
url_params = UriHelper.append_param_if_present(url_params, :state, auth_attrs["state"])
|
||||||
|
url = UriHelper.append_uri_params(redirect_uri, url_params)
|
||||||
redirect(conn, external: url)
|
redirect(conn, external: url)
|
||||||
|
else
|
||||||
|
conn
|
||||||
|
|> put_flash(:error, "Unlisted redirect_uri.")
|
||||||
|
|> redirect(external: redirect_uri(conn, redirect_uri))
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
defp handle_create_authorization_error(
|
defp handle_create_authorization_error(
|
||||||
conn,
|
%Plug.Conn{} = conn,
|
||||||
{:error, scopes_issue},
|
{:error, scopes_issue},
|
||||||
%{"authorization" => _} = params
|
%{"authorization" => _} = params
|
||||||
)
|
)
|
||||||
|
@ -125,7 +148,7 @@ defp handle_create_authorization_error(
|
||||||
end
|
end
|
||||||
|
|
||||||
defp handle_create_authorization_error(
|
defp handle_create_authorization_error(
|
||||||
conn,
|
%Plug.Conn{} = conn,
|
||||||
{:auth_active, false},
|
{:auth_active, false},
|
||||||
%{"authorization" => _} = params
|
%{"authorization" => _} = params
|
||||||
) do
|
) do
|
||||||
|
@ -137,13 +160,13 @@ defp handle_create_authorization_error(
|
||||||
|> authorize(params)
|
|> authorize(params)
|
||||||
end
|
end
|
||||||
|
|
||||||
defp handle_create_authorization_error(conn, error, %{"authorization" => _}) do
|
defp handle_create_authorization_error(%Plug.Conn{} = conn, error, %{"authorization" => _}) do
|
||||||
Authenticator.handle_error(conn, error)
|
Authenticator.handle_error(conn, error)
|
||||||
end
|
end
|
||||||
|
|
||||||
@doc "Renew access_token with refresh_token"
|
@doc "Renew access_token with refresh_token"
|
||||||
def token_exchange(
|
def token_exchange(
|
||||||
conn,
|
%Plug.Conn{} = conn,
|
||||||
%{"grant_type" => "refresh_token", "refresh_token" => token} = _params
|
%{"grant_type" => "refresh_token", "refresh_token" => token} = _params
|
||||||
) do
|
) do
|
||||||
with {:ok, app} <- Token.Utils.fetch_app(conn),
|
with {:ok, app} <- Token.Utils.fetch_app(conn),
|
||||||
|
@ -159,7 +182,7 @@ def token_exchange(
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def token_exchange(conn, %{"grant_type" => "authorization_code"} = params) do
|
def token_exchange(%Plug.Conn{} = conn, %{"grant_type" => "authorization_code"} = params) do
|
||||||
with {:ok, app} <- Token.Utils.fetch_app(conn),
|
with {:ok, app} <- Token.Utils.fetch_app(conn),
|
||||||
fixed_token = Token.Utils.fix_padding(params["code"]),
|
fixed_token = Token.Utils.fix_padding(params["code"]),
|
||||||
{:ok, auth} <- Authorization.get_by_token(app, fixed_token),
|
{:ok, auth} <- Authorization.get_by_token(app, fixed_token),
|
||||||
|
@ -176,7 +199,7 @@ def token_exchange(conn, %{"grant_type" => "authorization_code"} = params) do
|
||||||
end
|
end
|
||||||
|
|
||||||
def token_exchange(
|
def token_exchange(
|
||||||
conn,
|
%Plug.Conn{} = conn,
|
||||||
%{"grant_type" => "password"} = params
|
%{"grant_type" => "password"} = params
|
||||||
) do
|
) do
|
||||||
with {:ok, %User{} = user} <- Authenticator.get_user(conn),
|
with {:ok, %User{} = user} <- Authenticator.get_user(conn),
|
||||||
|
@ -207,7 +230,7 @@ def token_exchange(
|
||||||
end
|
end
|
||||||
|
|
||||||
def token_exchange(
|
def token_exchange(
|
||||||
conn,
|
%Plug.Conn{} = conn,
|
||||||
%{"grant_type" => "password", "name" => name, "password" => _password} = params
|
%{"grant_type" => "password", "name" => name, "password" => _password} = params
|
||||||
) do
|
) do
|
||||||
params =
|
params =
|
||||||
|
@ -218,7 +241,7 @@ def token_exchange(
|
||||||
token_exchange(conn, params)
|
token_exchange(conn, params)
|
||||||
end
|
end
|
||||||
|
|
||||||
def token_exchange(conn, %{"grant_type" => "client_credentials"} = _params) do
|
def token_exchange(%Plug.Conn{} = conn, %{"grant_type" => "client_credentials"} = _params) do
|
||||||
with {:ok, app} <- Token.Utils.fetch_app(conn),
|
with {:ok, app} <- Token.Utils.fetch_app(conn),
|
||||||
{:ok, auth} <- Authorization.create_authorization(app, %User{}),
|
{:ok, auth} <- Authorization.create_authorization(app, %User{}),
|
||||||
{:ok, token} <- Token.exchange_token(app, auth) do
|
{:ok, token} <- Token.exchange_token(app, auth) do
|
||||||
|
@ -231,9 +254,9 @@ def token_exchange(conn, %{"grant_type" => "client_credentials"} = _params) do
|
||||||
end
|
end
|
||||||
|
|
||||||
# Bad request
|
# Bad request
|
||||||
def token_exchange(conn, params), do: bad_request(conn, params)
|
def token_exchange(%Plug.Conn{} = conn, params), do: bad_request(conn, params)
|
||||||
|
|
||||||
def token_revoke(conn, %{"token" => _token} = params) do
|
def token_revoke(%Plug.Conn{} = conn, %{"token" => _token} = params) do
|
||||||
with {:ok, app} <- Token.Utils.fetch_app(conn),
|
with {:ok, app} <- Token.Utils.fetch_app(conn),
|
||||||
{:ok, _token} <- RevokeToken.revoke(app, params) do
|
{:ok, _token} <- RevokeToken.revoke(app, params) do
|
||||||
json(conn, %{})
|
json(conn, %{})
|
||||||
|
@ -244,17 +267,20 @@ def token_revoke(conn, %{"token" => _token} = params) do
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def token_revoke(conn, params), do: bad_request(conn, params)
|
def token_revoke(%Plug.Conn{} = conn, params), do: bad_request(conn, params)
|
||||||
|
|
||||||
# Response for bad request
|
# Response for bad request
|
||||||
defp bad_request(conn, _) do
|
defp bad_request(%Plug.Conn{} = conn, _) do
|
||||||
conn
|
conn
|
||||||
|> put_status(500)
|
|> put_status(500)
|
||||||
|> json(%{error: "Bad request"})
|
|> json(%{error: "Bad request"})
|
||||||
end
|
end
|
||||||
|
|
||||||
@doc "Prepares OAuth request to provider for Ueberauth"
|
@doc "Prepares OAuth request to provider for Ueberauth"
|
||||||
def prepare_request(conn, %{"provider" => provider, "authorization" => auth_attrs}) do
|
def prepare_request(%Plug.Conn{} = conn, %{
|
||||||
|
"provider" => provider,
|
||||||
|
"authorization" => auth_attrs
|
||||||
|
}) do
|
||||||
scope =
|
scope =
|
||||||
auth_attrs
|
auth_attrs
|
||||||
|> Scopes.fetch_scopes([])
|
|> Scopes.fetch_scopes([])
|
||||||
|
@ -275,7 +301,7 @@ def prepare_request(conn, %{"provider" => provider, "authorization" => auth_attr
|
||||||
redirect(conn, to: o_auth_path(conn, :request, provider, params))
|
redirect(conn, to: o_auth_path(conn, :request, provider, params))
|
||||||
end
|
end
|
||||||
|
|
||||||
def request(conn, params) do
|
def request(%Plug.Conn{} = conn, params) do
|
||||||
message =
|
message =
|
||||||
if params["provider"] do
|
if params["provider"] do
|
||||||
"Unsupported OAuth provider: #{params["provider"]}."
|
"Unsupported OAuth provider: #{params["provider"]}."
|
||||||
|
@ -288,7 +314,7 @@ def request(conn, params) do
|
||||||
|> redirect(to: "/")
|
|> redirect(to: "/")
|
||||||
end
|
end
|
||||||
|
|
||||||
def callback(%{assigns: %{ueberauth_failure: failure}} = conn, params) do
|
def callback(%Plug.Conn{assigns: %{ueberauth_failure: failure}} = conn, params) do
|
||||||
params = callback_params(params)
|
params = callback_params(params)
|
||||||
messages = for e <- Map.get(failure, :errors, []), do: e.message
|
messages = for e <- Map.get(failure, :errors, []), do: e.message
|
||||||
message = Enum.join(messages, "; ")
|
message = Enum.join(messages, "; ")
|
||||||
|
@ -298,7 +324,7 @@ def callback(%{assigns: %{ueberauth_failure: failure}} = conn, params) do
|
||||||
|> redirect(external: redirect_uri(conn, params["redirect_uri"]))
|
|> redirect(external: redirect_uri(conn, params["redirect_uri"]))
|
||||||
end
|
end
|
||||||
|
|
||||||
def callback(conn, params) do
|
def callback(%Plug.Conn{} = conn, params) do
|
||||||
params = callback_params(params)
|
params = callback_params(params)
|
||||||
|
|
||||||
with {:ok, registration} <- Authenticator.get_registration(conn) do
|
with {:ok, registration} <- Authenticator.get_registration(conn) do
|
||||||
|
@ -316,7 +342,7 @@ def callback(conn, params) do
|
||||||
})
|
})
|
||||||
|
|
||||||
conn
|
conn
|
||||||
|> put_session(:registration_id, registration.id)
|
|> put_session_registration_id(registration.id)
|
||||||
|> registration_details(%{"authorization" => registration_params})
|
|> registration_details(%{"authorization" => registration_params})
|
||||||
end
|
end
|
||||||
else
|
else
|
||||||
|
@ -333,7 +359,7 @@ defp callback_params(%{"state" => state} = params) do
|
||||||
Map.merge(params, Jason.decode!(state))
|
Map.merge(params, Jason.decode!(state))
|
||||||
end
|
end
|
||||||
|
|
||||||
def registration_details(conn, %{"authorization" => auth_attrs}) do
|
def registration_details(%Plug.Conn{} = conn, %{"authorization" => auth_attrs}) do
|
||||||
render(conn, "register.html", %{
|
render(conn, "register.html", %{
|
||||||
client_id: auth_attrs["client_id"],
|
client_id: auth_attrs["client_id"],
|
||||||
redirect_uri: auth_attrs["redirect_uri"],
|
redirect_uri: auth_attrs["redirect_uri"],
|
||||||
|
@ -344,7 +370,7 @@ def registration_details(conn, %{"authorization" => auth_attrs}) do
|
||||||
})
|
})
|
||||||
end
|
end
|
||||||
|
|
||||||
def register(conn, %{"authorization" => _, "op" => "connect"} = params) do
|
def register(%Plug.Conn{} = conn, %{"authorization" => _, "op" => "connect"} = params) do
|
||||||
with registration_id when not is_nil(registration_id) <- get_session_registration_id(conn),
|
with registration_id when not is_nil(registration_id) <- get_session_registration_id(conn),
|
||||||
%Registration{} = registration <- Repo.get(Registration, registration_id),
|
%Registration{} = registration <- Repo.get(Registration, registration_id),
|
||||||
{_, {:ok, auth}} <-
|
{_, {:ok, auth}} <-
|
||||||
|
@ -363,7 +389,7 @@ def register(conn, %{"authorization" => _, "op" => "connect"} = params) do
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def register(conn, %{"authorization" => _, "op" => "register"} = params) do
|
def register(%Plug.Conn{} = conn, %{"authorization" => _, "op" => "register"} = params) do
|
||||||
with registration_id when not is_nil(registration_id) <- get_session_registration_id(conn),
|
with registration_id when not is_nil(registration_id) <- get_session_registration_id(conn),
|
||||||
%Registration{} = registration <- Repo.get(Registration, registration_id),
|
%Registration{} = registration <- Repo.get(Registration, registration_id),
|
||||||
{:ok, user} <- Authenticator.create_from_registration(conn, registration) do
|
{:ok, user} <- Authenticator.create_from_registration(conn, registration) do
|
||||||
|
@ -399,7 +425,7 @@ def register(conn, %{"authorization" => _, "op" => "register"} = params) do
|
||||||
end
|
end
|
||||||
|
|
||||||
defp do_create_authorization(
|
defp do_create_authorization(
|
||||||
conn,
|
%Plug.Conn{} = conn,
|
||||||
%{
|
%{
|
||||||
"authorization" =>
|
"authorization" =>
|
||||||
%{
|
%{
|
||||||
|
@ -420,13 +446,13 @@ defp do_create_authorization(
|
||||||
end
|
end
|
||||||
|
|
||||||
# Special case: Local MastodonFE
|
# Special case: Local MastodonFE
|
||||||
defp redirect_uri(conn, "."), do: mastodon_api_url(conn, :login)
|
defp redirect_uri(%Plug.Conn{} = conn, "."), do: mastodon_api_url(conn, :login)
|
||||||
|
|
||||||
defp redirect_uri(_conn, redirect_uri), do: redirect_uri
|
defp redirect_uri(%Plug.Conn{}, redirect_uri), do: redirect_uri
|
||||||
|
|
||||||
defp get_session_registration_id(conn), do: get_session(conn, :registration_id)
|
defp get_session_registration_id(%Plug.Conn{} = conn), do: get_session(conn, :registration_id)
|
||||||
|
|
||||||
defp put_session_registration_id(conn, registration_id),
|
defp put_session_registration_id(%Plug.Conn{} = conn, registration_id),
|
||||||
do: put_session(conn, :registration_id, registration_id)
|
do: put_session(conn, :registration_id, registration_id)
|
||||||
|
|
||||||
@spec validate_scopes(App.t(), map()) ::
|
@spec validate_scopes(App.t(), map()) ::
|
||||||
|
@ -436,4 +462,10 @@ defp validate_scopes(app, params) do
|
||||||
|> Scopes.fetch_scopes(app.scopes)
|
|> Scopes.fetch_scopes(app.scopes)
|
||||||
|> Scopes.validates(app.scopes)
|
|> Scopes.validates(app.scopes)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def default_redirect_uri(%App{} = app) do
|
||||||
|
app.redirect_uris
|
||||||
|
|> String.split()
|
||||||
|
|> Enum.at(0)
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -14,7 +14,6 @@ defmodule Pleroma.Web.OAuth.Token do
|
||||||
alias Pleroma.Web.OAuth.Token
|
alias Pleroma.Web.OAuth.Token
|
||||||
alias Pleroma.Web.OAuth.Token.Query
|
alias Pleroma.Web.OAuth.Token.Query
|
||||||
|
|
||||||
@expires_in Pleroma.Config.get([:oauth2, :token_expires_in], 600)
|
|
||||||
@type t :: %__MODULE__{}
|
@type t :: %__MODULE__{}
|
||||||
|
|
||||||
schema "oauth_tokens" do
|
schema "oauth_tokens" do
|
||||||
|
@ -78,7 +77,7 @@ defp put_refresh_token(changeset, attrs) do
|
||||||
|
|
||||||
defp put_valid_until(changeset, attrs) do
|
defp put_valid_until(changeset, attrs) do
|
||||||
expires_in =
|
expires_in =
|
||||||
Map.get(attrs, :valid_until, NaiveDateTime.add(NaiveDateTime.utc_now(), @expires_in))
|
Map.get(attrs, :valid_until, NaiveDateTime.add(NaiveDateTime.utc_now(), expires_in()))
|
||||||
|
|
||||||
changeset
|
changeset
|
||||||
|> change(%{valid_until: expires_in})
|
|> change(%{valid_until: expires_in})
|
||||||
|
@ -123,4 +122,6 @@ def is_expired?(%__MODULE__{valid_until: valid_until}) do
|
||||||
end
|
end
|
||||||
|
|
||||||
def is_expired?(_), do: false
|
def is_expired?(_), do: false
|
||||||
|
|
||||||
|
defp expires_in, do: Pleroma.Config.get([:oauth2, :token_expires_in], 600)
|
||||||
end
|
end
|
||||||
|
|
|
@ -4,15 +4,13 @@ defmodule Pleroma.Web.OAuth.Token.Response do
|
||||||
alias Pleroma.User
|
alias Pleroma.User
|
||||||
alias Pleroma.Web.OAuth.Token.Utils
|
alias Pleroma.Web.OAuth.Token.Utils
|
||||||
|
|
||||||
@expires_in Pleroma.Config.get([:oauth2, :token_expires_in], 600)
|
|
||||||
|
|
||||||
@doc false
|
@doc false
|
||||||
def build(%User{} = user, token, opts \\ %{}) do
|
def build(%User{} = user, token, opts \\ %{}) do
|
||||||
%{
|
%{
|
||||||
token_type: "Bearer",
|
token_type: "Bearer",
|
||||||
access_token: token.token,
|
access_token: token.token,
|
||||||
refresh_token: token.refresh_token,
|
refresh_token: token.refresh_token,
|
||||||
expires_in: @expires_in,
|
expires_in: expires_in(),
|
||||||
scope: Enum.join(token.scopes, " "),
|
scope: Enum.join(token.scopes, " "),
|
||||||
me: user.ap_id
|
me: user.ap_id
|
||||||
}
|
}
|
||||||
|
@ -25,8 +23,10 @@ def build_for_client_credentials(token) do
|
||||||
access_token: token.token,
|
access_token: token.token,
|
||||||
refresh_token: token.refresh_token,
|
refresh_token: token.refresh_token,
|
||||||
created_at: Utils.format_created_at(token),
|
created_at: Utils.format_created_at(token),
|
||||||
expires_in: @expires_in,
|
expires_in: expires_in(),
|
||||||
scope: Enum.join(token.scopes, " ")
|
scope: Enum.join(token.scopes, " ")
|
||||||
}
|
}
|
||||||
end
|
end
|
||||||
|
|
||||||
|
defp expires_in, do: Pleroma.Config.get([:oauth2, :token_expires_in], 600)
|
||||||
end
|
end
|
||||||
|
|
|
@ -10,7 +10,7 @@ defmodule Pleroma.Web.RelMe do
|
||||||
with_body: true
|
with_body: true
|
||||||
]
|
]
|
||||||
|
|
||||||
if Mix.env() == :test do
|
if Pleroma.Config.get(:env) == :test do
|
||||||
def parse(url) when is_binary(url), do: parse_url(url)
|
def parse(url) when is_binary(url), do: parse_url(url)
|
||||||
else
|
else
|
||||||
def parse(url) when is_binary(url) do
|
def parse(url) when is_binary(url) do
|
||||||
|
|
|
@ -4,25 +4,53 @@
|
||||||
|
|
||||||
defmodule Pleroma.Web.RichMedia.Helpers do
|
defmodule Pleroma.Web.RichMedia.Helpers do
|
||||||
alias Pleroma.Activity
|
alias Pleroma.Activity
|
||||||
|
alias Pleroma.Config
|
||||||
alias Pleroma.HTML
|
alias Pleroma.HTML
|
||||||
alias Pleroma.Object
|
alias Pleroma.Object
|
||||||
alias Pleroma.Web.RichMedia.Parser
|
alias Pleroma.Web.RichMedia.Parser
|
||||||
|
|
||||||
|
@spec validate_page_url(any()) :: :ok | :error
|
||||||
defp validate_page_url(page_url) when is_binary(page_url) do
|
defp validate_page_url(page_url) when is_binary(page_url) do
|
||||||
if AutoLinker.Parser.is_url?(page_url, true) do
|
validate_tld = Application.get_env(:auto_linker, :opts)[:validate_tld]
|
||||||
URI.parse(page_url) |> validate_page_url
|
|
||||||
else
|
page_url
|
||||||
:error
|
|> AutoLinker.Parser.url?(scheme: true, validate_tld: validate_tld)
|
||||||
|
|> parse_uri(page_url)
|
||||||
|
end
|
||||||
|
|
||||||
|
defp validate_page_url(%URI{host: host, scheme: scheme, authority: authority})
|
||||||
|
when scheme == "https" and not is_nil(authority) do
|
||||||
|
cond do
|
||||||
|
host in Config.get([:rich_media, :ignore_hosts], []) ->
|
||||||
|
:error
|
||||||
|
|
||||||
|
get_tld(host) in Config.get([:rich_media, :ignore_tld], []) ->
|
||||||
|
:error
|
||||||
|
|
||||||
|
true ->
|
||||||
|
:ok
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
defp validate_page_url(%URI{authority: nil}), do: :error
|
|
||||||
defp validate_page_url(%URI{scheme: nil}), do: :error
|
|
||||||
defp validate_page_url(%URI{}), do: :ok
|
|
||||||
defp validate_page_url(_), do: :error
|
defp validate_page_url(_), do: :error
|
||||||
|
|
||||||
|
defp parse_uri(true, url) do
|
||||||
|
url
|
||||||
|
|> URI.parse()
|
||||||
|
|> validate_page_url
|
||||||
|
end
|
||||||
|
|
||||||
|
defp parse_uri(_, _), do: :error
|
||||||
|
|
||||||
|
defp get_tld(host) do
|
||||||
|
host
|
||||||
|
|> String.split(".")
|
||||||
|
|> Enum.reverse()
|
||||||
|
|> hd
|
||||||
|
end
|
||||||
|
|
||||||
def fetch_data_for_activity(%Activity{data: %{"type" => "Create"}} = activity) do
|
def fetch_data_for_activity(%Activity{data: %{"type" => "Create"}} = activity) do
|
||||||
with true <- Pleroma.Config.get([:rich_media, :enabled]),
|
with true <- Config.get([:rich_media, :enabled]),
|
||||||
%Object{} = object <- Object.normalize(activity),
|
%Object{} = object <- Object.normalize(activity),
|
||||||
false <- object.data["sensitive"] || false,
|
false <- object.data["sensitive"] || false,
|
||||||
{:ok, page_url} <- HTML.extract_first_external_url(object, object.data["content"]),
|
{:ok, page_url} <- HTML.extract_first_external_url(object, object.data["content"]),
|
||||||
|
|
|
@ -18,7 +18,7 @@ defmodule Pleroma.Web.RichMedia.Parser do
|
||||||
|
|
||||||
def parse(nil), do: {:error, "No URL provided"}
|
def parse(nil), do: {:error, "No URL provided"}
|
||||||
|
|
||||||
if Mix.env() == :test do
|
if Pleroma.Config.get(:env) == :test do
|
||||||
def parse(url), do: parse_url(url)
|
def parse(url), do: parse_url(url)
|
||||||
else
|
else
|
||||||
def parse(url) do
|
def parse(url) do
|
||||||
|
|
|
@ -1,15 +1,19 @@
|
||||||
defmodule Pleroma.Web.RichMedia.Parsers.MetaTagsParser do
|
defmodule Pleroma.Web.RichMedia.Parsers.MetaTagsParser do
|
||||||
def parse(html, data, prefix, error_message, key_name, value_name \\ "content") do
|
def parse(html, data, prefix, error_message, key_name, value_name \\ "content") do
|
||||||
with elements = [_ | _] <- get_elements(html, key_name, prefix),
|
meta_data =
|
||||||
meta_data =
|
html
|
||||||
Enum.reduce(elements, data, fn el, acc ->
|
|> get_elements(key_name, prefix)
|
||||||
attributes = normalize_attributes(el, prefix, key_name, value_name)
|
|> Enum.reduce(data, fn el, acc ->
|
||||||
|
attributes = normalize_attributes(el, prefix, key_name, value_name)
|
||||||
|
|
||||||
Map.merge(acc, attributes)
|
Map.merge(acc, attributes)
|
||||||
end) do
|
end)
|
||||||
{:ok, meta_data}
|
|> maybe_put_title(html)
|
||||||
|
|
||||||
|
if Enum.empty?(meta_data) do
|
||||||
|
{:error, error_message}
|
||||||
else
|
else
|
||||||
_e -> {:error, error_message}
|
{:ok, meta_data}
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -27,4 +31,19 @@ defp normalize_attributes(html_node, prefix, key_name, value_name) do
|
||||||
|
|
||||||
%{String.to_atom(data[key_name]) => data[value_name]}
|
%{String.to_atom(data[key_name]) => data[value_name]}
|
||||||
end
|
end
|
||||||
|
|
||||||
|
defp maybe_put_title(%{title: _} = meta, _), do: meta
|
||||||
|
|
||||||
|
defp maybe_put_title(meta, html) when meta != %{} do
|
||||||
|
case get_page_title(html) do
|
||||||
|
"" -> meta
|
||||||
|
title -> Map.put_new(meta, :title, title)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
defp maybe_put_title(meta, _), do: meta
|
||||||
|
|
||||||
|
defp get_page_title(html) do
|
||||||
|
Floki.find(html, "title") |> Floki.text()
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -27,6 +27,7 @@ defmodule Pleroma.Web.Router do
|
||||||
plug(Pleroma.Plugs.UserEnabledPlug)
|
plug(Pleroma.Plugs.UserEnabledPlug)
|
||||||
plug(Pleroma.Plugs.SetUserSessionIdPlug)
|
plug(Pleroma.Plugs.SetUserSessionIdPlug)
|
||||||
plug(Pleroma.Plugs.EnsureUserKeyPlug)
|
plug(Pleroma.Plugs.EnsureUserKeyPlug)
|
||||||
|
plug(Pleroma.Plugs.IdempotencyPlug)
|
||||||
end
|
end
|
||||||
|
|
||||||
pipeline :authenticated_api do
|
pipeline :authenticated_api do
|
||||||
|
@ -41,6 +42,7 @@ defmodule Pleroma.Web.Router do
|
||||||
plug(Pleroma.Plugs.UserEnabledPlug)
|
plug(Pleroma.Plugs.UserEnabledPlug)
|
||||||
plug(Pleroma.Plugs.SetUserSessionIdPlug)
|
plug(Pleroma.Plugs.SetUserSessionIdPlug)
|
||||||
plug(Pleroma.Plugs.EnsureAuthenticatedPlug)
|
plug(Pleroma.Plugs.EnsureAuthenticatedPlug)
|
||||||
|
plug(Pleroma.Plugs.IdempotencyPlug)
|
||||||
end
|
end
|
||||||
|
|
||||||
pipeline :admin_api do
|
pipeline :admin_api do
|
||||||
|
@ -57,6 +59,7 @@ defmodule Pleroma.Web.Router do
|
||||||
plug(Pleroma.Plugs.SetUserSessionIdPlug)
|
plug(Pleroma.Plugs.SetUserSessionIdPlug)
|
||||||
plug(Pleroma.Plugs.EnsureAuthenticatedPlug)
|
plug(Pleroma.Plugs.EnsureAuthenticatedPlug)
|
||||||
plug(Pleroma.Plugs.UserIsAdminPlug)
|
plug(Pleroma.Plugs.UserIsAdminPlug)
|
||||||
|
plug(Pleroma.Plugs.IdempotencyPlug)
|
||||||
end
|
end
|
||||||
|
|
||||||
pipeline :mastodon_html do
|
pipeline :mastodon_html do
|
||||||
|
@ -133,8 +136,8 @@ defmodule Pleroma.Web.Router do
|
||||||
scope "/api/pleroma", Pleroma.Web.TwitterAPI do
|
scope "/api/pleroma", Pleroma.Web.TwitterAPI do
|
||||||
pipe_through(:pleroma_api)
|
pipe_through(:pleroma_api)
|
||||||
|
|
||||||
get("/password_reset/:token", UtilController, :show_password_reset)
|
get("/password_reset/:token", PasswordController, :reset, as: :reset_password)
|
||||||
post("/password_reset", UtilController, :password_reset)
|
post("/password_reset", PasswordController, :do_reset, as: :reset_password)
|
||||||
get("/emoji", UtilController, :emoji)
|
get("/emoji", UtilController, :emoji)
|
||||||
get("/captcha", UtilController, :captcha)
|
get("/captcha", UtilController, :captcha)
|
||||||
get("/healthcheck", UtilController, :healthcheck)
|
get("/healthcheck", UtilController, :healthcheck)
|
||||||
|
@ -202,6 +205,9 @@ defmodule Pleroma.Web.Router do
|
||||||
|
|
||||||
put("/statuses/:id", AdminAPIController, :status_update)
|
put("/statuses/:id", AdminAPIController, :status_update)
|
||||||
delete("/statuses/:id", AdminAPIController, :status_delete)
|
delete("/statuses/:id", AdminAPIController, :status_delete)
|
||||||
|
|
||||||
|
get("/config", AdminAPIController, :config_show)
|
||||||
|
post("/config", AdminAPIController, :config_update)
|
||||||
end
|
end
|
||||||
|
|
||||||
scope "/", Pleroma.Web.TwitterAPI do
|
scope "/", Pleroma.Web.TwitterAPI do
|
||||||
|
@ -412,7 +418,7 @@ defmodule Pleroma.Web.Router do
|
||||||
|
|
||||||
get("/trends", MastodonAPIController, :empty_array)
|
get("/trends", MastodonAPIController, :empty_array)
|
||||||
|
|
||||||
get("/accounts/search", MastodonAPIController, :account_search)
|
get("/accounts/search", SearchController, :account_search)
|
||||||
|
|
||||||
scope [] do
|
scope [] do
|
||||||
pipe_through(:oauth_read_or_public)
|
pipe_through(:oauth_read_or_public)
|
||||||
|
@ -431,7 +437,7 @@ defmodule Pleroma.Web.Router do
|
||||||
get("/accounts/:id/following", MastodonAPIController, :following)
|
get("/accounts/:id/following", MastodonAPIController, :following)
|
||||||
get("/accounts/:id", MastodonAPIController, :user)
|
get("/accounts/:id", MastodonAPIController, :user)
|
||||||
|
|
||||||
get("/search", MastodonAPIController, :search)
|
get("/search", SearchController, :search)
|
||||||
|
|
||||||
get("/pleroma/accounts/:id/favourites", MastodonAPIController, :user_favourites)
|
get("/pleroma/accounts/:id/favourites", MastodonAPIController, :user_favourites)
|
||||||
end
|
end
|
||||||
|
@ -439,7 +445,7 @@ defmodule Pleroma.Web.Router do
|
||||||
|
|
||||||
scope "/api/v2", Pleroma.Web.MastodonAPI do
|
scope "/api/v2", Pleroma.Web.MastodonAPI do
|
||||||
pipe_through([:api, :oauth_read_or_public])
|
pipe_through([:api, :oauth_read_or_public])
|
||||||
get("/search", MastodonAPIController, :search2)
|
get("/search", SearchController, :search2)
|
||||||
end
|
end
|
||||||
|
|
||||||
scope "/api", Pleroma.Web do
|
scope "/api", Pleroma.Web do
|
||||||
|
@ -606,12 +612,6 @@ defmodule Pleroma.Web.Router do
|
||||||
get("/mailer/unsubscribe/:token", Mailer.SubscriptionController, :unsubscribe)
|
get("/mailer/unsubscribe/:token", Mailer.SubscriptionController, :unsubscribe)
|
||||||
end
|
end
|
||||||
|
|
||||||
scope "/", Pleroma.Web do
|
|
||||||
pipe_through(:oembed)
|
|
||||||
|
|
||||||
get("/oembed", OEmbed.OEmbedController, :url)
|
|
||||||
end
|
|
||||||
|
|
||||||
pipeline :activitypub do
|
pipeline :activitypub do
|
||||||
plug(:accepts, ["activity+json", "json"])
|
plug(:accepts, ["activity+json", "json"])
|
||||||
plug(Pleroma.Web.Plugs.HTTPSignaturePlug)
|
plug(Pleroma.Web.Plugs.HTTPSignaturePlug)
|
||||||
|
@ -701,7 +701,7 @@ defmodule Pleroma.Web.Router do
|
||||||
get("/:sig/:url/:filename", MediaProxyController, :remote)
|
get("/:sig/:url/:filename", MediaProxyController, :remote)
|
||||||
end
|
end
|
||||||
|
|
||||||
if Mix.env() == :dev do
|
if Pleroma.Config.get(:env) == :dev do
|
||||||
scope "/dev" do
|
scope "/dev" do
|
||||||
pipe_through([:mailbox_preview])
|
pipe_through([:mailbox_preview])
|
||||||
|
|
||||||
|
|
|
@ -146,7 +146,7 @@ def publish_one(%{recipient: url, feed: feed} = params) when is_binary(url) do
|
||||||
do: Instances.set_reachable(url)
|
do: Instances.set_reachable(url)
|
||||||
|
|
||||||
Logger.debug(fn -> "Pushed to #{url}, code #{code}" end)
|
Logger.debug(fn -> "Pushed to #{url}, code #{code}" end)
|
||||||
:ok
|
{:ok, code}
|
||||||
else
|
else
|
||||||
e ->
|
e ->
|
||||||
unless params[:unreachable_since], do: Instances.set_reachable(url)
|
unless params[:unreachable_since], do: Instances.set_reachable(url)
|
||||||
|
|
|
@ -110,23 +110,18 @@ def handle_cast(%{action: :stream, topic: "list", item: item}, topics) do
|
||||||
{:noreply, topics}
|
{:noreply, topics}
|
||||||
end
|
end
|
||||||
|
|
||||||
def handle_cast(%{action: :stream, topic: "user", item: %Notification{} = item}, topics) do
|
def handle_cast(
|
||||||
topic = "user:#{item.user_id}"
|
%{action: :stream, topic: topic, item: %Notification{} = item},
|
||||||
|
topics
|
||||||
Enum.each(topics[topic] || [], fn socket ->
|
)
|
||||||
json =
|
when topic in ["user", "user:notification"] do
|
||||||
%{
|
topics
|
||||||
event: "notification",
|
|> Map.get("#{topic}:#{item.user_id}", [])
|
||||||
payload:
|
|> Enum.each(fn socket ->
|
||||||
NotificationView.render("show.json", %{
|
send(
|
||||||
notification: item,
|
socket.transport_pid,
|
||||||
for: socket.assigns["user"]
|
{:text, represent_notification(socket.assigns[:user], item)}
|
||||||
})
|
)
|
||||||
|> Jason.encode!()
|
|
||||||
}
|
|
||||||
|> Jason.encode!()
|
|
||||||
|
|
||||||
send(socket.transport_pid, {:text, json})
|
|
||||||
end)
|
end)
|
||||||
|
|
||||||
{:noreply, topics}
|
{:noreply, topics}
|
||||||
|
@ -216,6 +211,20 @@ def represent_conversation(%Participation{} = participation) do
|
||||||
|> Jason.encode!()
|
|> Jason.encode!()
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@spec represent_notification(User.t(), Notification.t()) :: binary()
|
||||||
|
defp represent_notification(%User{} = user, %Notification{} = notify) do
|
||||||
|
%{
|
||||||
|
event: "notification",
|
||||||
|
payload:
|
||||||
|
NotificationView.render(
|
||||||
|
"show.json",
|
||||||
|
%{notification: notify, for: user}
|
||||||
|
)
|
||||||
|
|> Jason.encode!()
|
||||||
|
}
|
||||||
|
|> Jason.encode!()
|
||||||
|
end
|
||||||
|
|
||||||
def push_to_socket(topics, topic, %Activity{data: %{"type" => "Announce"}} = item) do
|
def push_to_socket(topics, topic, %Activity{data: %{"type" => "Announce"}} = item) do
|
||||||
Enum.each(topics[topic] || [], fn socket ->
|
Enum.each(topics[topic] || [], fn socket ->
|
||||||
# Get the current user so we have up-to-date blocks etc.
|
# Get the current user so we have up-to-date blocks etc.
|
||||||
|
@ -274,7 +283,7 @@ def push_to_socket(topics, topic, item) do
|
||||||
end)
|
end)
|
||||||
end
|
end
|
||||||
|
|
||||||
defp internal_topic(topic, socket) when topic in ~w[user direct] do
|
defp internal_topic(topic, socket) when topic in ~w[user user:notification direct] do
|
||||||
"#{topic}:#{socket.assigns[:user].id}"
|
"#{topic}:#{socket.assigns[:user].id}"
|
||||||
end
|
end
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,2 @@
|
||||||
|
<h1>Authorization exists</h1>
|
||||||
|
<h2>Access token is <%= @token.token %></h2>
|
|
@ -1,5 +1,5 @@
|
||||||
<h2>Password Reset for <%= @user.nickname %></h2>
|
<h2>Password Reset for <%= @user.nickname %></h2>
|
||||||
<%= form_for @conn, util_path(@conn, :password_reset), [as: "data"], fn f -> %>
|
<%= form_for @conn, reset_password_path(@conn, :do_reset), [as: "data"], fn f -> %>
|
||||||
<div class="form-row">
|
<div class="form-row">
|
||||||
<%= label f, :password, "Password" %>
|
<%= label f, :password, "Password" %>
|
||||||
<%= password_input f, :password %>
|
<%= password_input f, :password %>
|
|
@ -0,0 +1,37 @@
|
||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
defmodule Pleroma.Web.TwitterAPI.PasswordController do
|
||||||
|
@moduledoc """
|
||||||
|
The module containts functions for reset password.
|
||||||
|
"""
|
||||||
|
|
||||||
|
use Pleroma.Web, :controller
|
||||||
|
|
||||||
|
require Logger
|
||||||
|
|
||||||
|
alias Pleroma.PasswordResetToken
|
||||||
|
alias Pleroma.Repo
|
||||||
|
alias Pleroma.User
|
||||||
|
|
||||||
|
def reset(conn, %{"token" => token}) do
|
||||||
|
with %{used: false} = token <- Repo.get_by(PasswordResetToken, %{token: token}),
|
||||||
|
%User{} = user <- User.get_cached_by_id(token.user_id) do
|
||||||
|
render(conn, "reset.html", %{
|
||||||
|
token: token,
|
||||||
|
user: user
|
||||||
|
})
|
||||||
|
else
|
||||||
|
_e -> render(conn, "invalid_token.html")
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def do_reset(conn, %{"data" => data}) do
|
||||||
|
with {:ok, _} <- PasswordResetToken.reset_password(data["token"], data) do
|
||||||
|
render(conn, "reset_success.html")
|
||||||
|
else
|
||||||
|
_e -> render(conn, "reset_failed.html")
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
|
@ -11,8 +11,6 @@ defmodule Pleroma.Web.TwitterAPI.UtilController do
|
||||||
alias Pleroma.Activity
|
alias Pleroma.Activity
|
||||||
alias Pleroma.Emoji
|
alias Pleroma.Emoji
|
||||||
alias Pleroma.Notification
|
alias Pleroma.Notification
|
||||||
alias Pleroma.PasswordResetToken
|
|
||||||
alias Pleroma.Repo
|
|
||||||
alias Pleroma.User
|
alias Pleroma.User
|
||||||
alias Pleroma.Web
|
alias Pleroma.Web
|
||||||
alias Pleroma.Web.ActivityPub.ActivityPub
|
alias Pleroma.Web.ActivityPub.ActivityPub
|
||||||
|
@ -20,26 +18,6 @@ defmodule Pleroma.Web.TwitterAPI.UtilController do
|
||||||
alias Pleroma.Web.OStatus
|
alias Pleroma.Web.OStatus
|
||||||
alias Pleroma.Web.WebFinger
|
alias Pleroma.Web.WebFinger
|
||||||
|
|
||||||
def show_password_reset(conn, %{"token" => token}) do
|
|
||||||
with %{used: false} = token <- Repo.get_by(PasswordResetToken, %{token: token}),
|
|
||||||
%User{} = user <- User.get_cached_by_id(token.user_id) do
|
|
||||||
render(conn, "password_reset.html", %{
|
|
||||||
token: token,
|
|
||||||
user: user
|
|
||||||
})
|
|
||||||
else
|
|
||||||
_e -> render(conn, "invalid_token.html")
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
def password_reset(conn, %{"data" => data}) do
|
|
||||||
with {:ok, _} <- PasswordResetToken.reset_password(data["token"], data) do
|
|
||||||
render(conn, "password_reset_success.html")
|
|
||||||
else
|
|
||||||
_e -> render(conn, "password_reset_failed.html")
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
def help_test(conn, _params) do
|
def help_test(conn, _params) do
|
||||||
json(conn, "ok")
|
json(conn, "ok")
|
||||||
end
|
end
|
||||||
|
|
8
lib/pleroma/web/twitter_api/views/password_view.ex
Normal file
8
lib/pleroma/web/twitter_api/views/password_view.ex
Normal file
|
@ -0,0 +1,8 @@
|
||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
defmodule Pleroma.Web.TwitterAPI.PasswordView do
|
||||||
|
use Pleroma.Web, :view
|
||||||
|
import Phoenix.HTML.Form
|
||||||
|
end
|
|
@ -13,7 +13,7 @@ def render("404.json", _assigns) do
|
||||||
def render("500.json", assigns) do
|
def render("500.json", assigns) do
|
||||||
Logger.error("Internal server error: #{inspect(assigns[:reason])}")
|
Logger.error("Internal server error: #{inspect(assigns[:reason])}")
|
||||||
|
|
||||||
if Mix.env() != :prod do
|
if Pleroma.Config.get(:env) != :prod do
|
||||||
%{errors: %{detail: "Internal server error", reason: inspect(assigns[:reason])}}
|
%{errors: %{detail: "Internal server error", reason: inspect(assigns[:reason])}}
|
||||||
else
|
else
|
||||||
%{errors: %{detail: "Internal server error"}}
|
%{errors: %{detail: "Internal server error"}}
|
||||||
|
|
51
mix.exs
51
mix.exs
|
@ -4,7 +4,7 @@ defmodule Pleroma.Mixfile do
|
||||||
def project do
|
def project do
|
||||||
[
|
[
|
||||||
app: :pleroma,
|
app: :pleroma,
|
||||||
version: version("0.9.0"),
|
version: version("1.0.0"),
|
||||||
elixir: "~> 1.7",
|
elixir: "~> 1.7",
|
||||||
elixirc_paths: elixirc_paths(Mix.env()),
|
elixirc_paths: elixirc_paths(Mix.env()),
|
||||||
compilers: [:phoenix, :gettext] ++ Mix.compilers(),
|
compilers: [:phoenix, :gettext] ++ Mix.compilers(),
|
||||||
|
@ -32,10 +32,31 @@ def project do
|
||||||
],
|
],
|
||||||
main: "readme",
|
main: "readme",
|
||||||
output: "priv/static/doc"
|
output: "priv/static/doc"
|
||||||
|
],
|
||||||
|
releases: [
|
||||||
|
pleroma: [
|
||||||
|
include_executables_for: [:unix],
|
||||||
|
applications: [ex_syslogger: :load, syslog: :load],
|
||||||
|
steps: [:assemble, ©_files/1, ©_nginx_config/1]
|
||||||
|
]
|
||||||
]
|
]
|
||||||
]
|
]
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def copy_files(%{path: target_path} = release) do
|
||||||
|
File.cp_r!("./rel/files", target_path)
|
||||||
|
release
|
||||||
|
end
|
||||||
|
|
||||||
|
def copy_nginx_config(%{path: target_path} = release) do
|
||||||
|
File.cp!(
|
||||||
|
"./installation/pleroma.nginx",
|
||||||
|
Path.join([target_path, "installation", "pleroma.nginx"])
|
||||||
|
)
|
||||||
|
|
||||||
|
release
|
||||||
|
end
|
||||||
|
|
||||||
# Configuration for the OTP application.
|
# Configuration for the OTP application.
|
||||||
#
|
#
|
||||||
# Type `mix help compile.app` for more information.
|
# Type `mix help compile.app` for more information.
|
||||||
|
@ -73,7 +94,7 @@ defp oauth_deps do
|
||||||
# Type `mix help deps` for examples and options.
|
# Type `mix help deps` for examples and options.
|
||||||
defp deps do
|
defp deps do
|
||||||
[
|
[
|
||||||
{:phoenix, "~> 1.4.1"},
|
{:phoenix, "~> 1.4.8"},
|
||||||
{:plug_cowboy, "~> 2.0"},
|
{:plug_cowboy, "~> 2.0"},
|
||||||
{:phoenix_pubsub, "~> 1.1"},
|
{:phoenix_pubsub, "~> 1.1"},
|
||||||
{:phoenix_ecto, "~> 4.0"},
|
{:phoenix_ecto, "~> 4.0"},
|
||||||
|
@ -96,7 +117,7 @@ defp deps do
|
||||||
{:ex_aws, "~> 2.0"},
|
{:ex_aws, "~> 2.0"},
|
||||||
{:ex_aws_s3, "~> 2.0"},
|
{:ex_aws_s3, "~> 2.0"},
|
||||||
{:earmark, "~> 1.3"},
|
{:earmark, "~> 1.3"},
|
||||||
{:bbcode, "~> 0.1"},
|
{:bbcode, "~> 0.1.1"},
|
||||||
{:ex_machina, "~> 2.3", only: :test},
|
{:ex_machina, "~> 2.3", only: :test},
|
||||||
{:credo, "~> 0.9.3", only: [:dev, :test]},
|
{:credo, "~> 0.9.3", only: [:dev, :test]},
|
||||||
{:mock, "~> 0.3.3", only: :test},
|
{:mock, "~> 0.3.3", only: :test},
|
||||||
|
@ -115,7 +136,7 @@ defp deps do
|
||||||
{:ueberauth, "~> 0.4"},
|
{:ueberauth, "~> 0.4"},
|
||||||
{:auto_linker,
|
{:auto_linker,
|
||||||
git: "https://git.pleroma.social/pleroma/auto_linker.git",
|
git: "https://git.pleroma.social/pleroma/auto_linker.git",
|
||||||
ref: "c00c4e75b35367fa42c95ffd9b8c455bf9995829"},
|
ref: "95e8188490e97505c56636c1379ffdf036c1fdde"},
|
||||||
{:http_signatures,
|
{:http_signatures,
|
||||||
git: "https://git.pleroma.social/pleroma/http_signatures.git",
|
git: "https://git.pleroma.social/pleroma/http_signatures.git",
|
||||||
ref: "9789401987096ead65646b52b5a2ca6bf52fc531"},
|
ref: "9789401987096ead65646b52b5a2ca6bf52fc531"},
|
||||||
|
@ -125,14 +146,13 @@ defp deps do
|
||||||
{:prometheus_plugs, "~> 1.1"},
|
{:prometheus_plugs, "~> 1.1"},
|
||||||
{:prometheus_phoenix, "~> 1.2"},
|
{:prometheus_phoenix, "~> 1.2"},
|
||||||
{:prometheus_ecto, "~> 1.4"},
|
{:prometheus_ecto, "~> 1.4"},
|
||||||
{:prometheus_process_collector, "~> 1.4"},
|
|
||||||
{:recon, github: "ferd/recon", tag: "2.4.0"},
|
{:recon, github: "ferd/recon", tag: "2.4.0"},
|
||||||
{:quack, "~> 0.1.1"},
|
{:quack, "~> 0.1.1"},
|
||||||
{:quantum, "~> 2.3"},
|
{:quantum, "~> 2.3"},
|
||||||
{:joken, "~> 2.0"},
|
{:joken, "~> 2.0"},
|
||||||
{:benchee, "~> 1.0"},
|
{:benchee, "~> 1.0"},
|
||||||
{:esshd, "~> 0.1.0", runtime: Application.get_env(:esshd, :enabled, false)},
|
{:esshd, "~> 0.1.0", runtime: Application.get_env(:esshd, :enabled, false)},
|
||||||
{:ex_rated, "~> 1.2"},
|
{:ex_rated, "~> 1.3"},
|
||||||
{:plug_static_index_html, "~> 1.0.0"},
|
{:plug_static_index_html, "~> 1.0.0"},
|
||||||
{:excoveralls, "~> 0.11.1", only: :test}
|
{:excoveralls, "~> 0.11.1", only: :test}
|
||||||
] ++ oauth_deps()
|
] ++ oauth_deps()
|
||||||
|
@ -146,6 +166,8 @@ defp deps do
|
||||||
# See the documentation for `Mix` for more info on aliases.
|
# See the documentation for `Mix` for more info on aliases.
|
||||||
defp aliases do
|
defp aliases do
|
||||||
[
|
[
|
||||||
|
"ecto.migrate": ["pleroma.ecto.migrate"],
|
||||||
|
"ecto.rollback": ["pleroma.ecto.rollback"],
|
||||||
"ecto.setup": ["ecto.create", "ecto.migrate", "run priv/repo/seeds.exs"],
|
"ecto.setup": ["ecto.create", "ecto.migrate", "run priv/repo/seeds.exs"],
|
||||||
"ecto.reset": ["ecto.drop", "ecto.setup"],
|
"ecto.reset": ["ecto.drop", "ecto.setup"],
|
||||||
test: ["ecto.create --quiet", "ecto.migrate", "test"]
|
test: ["ecto.create --quiet", "ecto.migrate", "test"]
|
||||||
|
@ -168,7 +190,9 @@ defp version(version) do
|
||||||
ahead <- String.replace(describe, tag, "") do
|
ahead <- String.replace(describe, tag, "") do
|
||||||
{String.replace_prefix(tag, "v", ""), if(ahead != "", do: String.trim(ahead))}
|
{String.replace_prefix(tag, "v", ""), if(ahead != "", do: String.trim(ahead))}
|
||||||
else
|
else
|
||||||
_ -> {nil, nil}
|
_ ->
|
||||||
|
{commit_hash, 0} = System.cmd("git", ["rev-parse", "--short", "HEAD"])
|
||||||
|
{nil, "-0-g" <> String.trim(commit_hash)}
|
||||||
end
|
end
|
||||||
|
|
||||||
if git_tag && version != git_tag do
|
if git_tag && version != git_tag do
|
||||||
|
@ -195,7 +219,18 @@ defp version(version) do
|
||||||
string -> "+" <> string
|
string -> "+" <> string
|
||||||
end).()
|
end).()
|
||||||
|
|
||||||
[version, git_pre_release, build]
|
branch_name =
|
||||||
|
with {branch_name, 0} <- System.cmd("git", ["rev-parse", "--abbrev-ref", "HEAD"]),
|
||||||
|
branch_name <- System.get_env("PLEROMA_BUILD_BRANCH") || branch_name,
|
||||||
|
true <- branch_name != "master" do
|
||||||
|
branch_name =
|
||||||
|
String.trim(branch_name)
|
||||||
|
|> String.replace(~r/[^0-9a-z\-\.]+/i, "-")
|
||||||
|
|
||||||
|
"-" <> branch_name
|
||||||
|
end
|
||||||
|
|
||||||
|
[version, git_pre_release, branch_name, build]
|
||||||
|> Enum.filter(fn string -> string && string != "" end)
|
|> Enum.filter(fn string -> string && string != "" end)
|
||||||
|> Enum.join()
|
|> Enum.join()
|
||||||
end
|
end
|
||||||
|
|
14
mix.lock
14
mix.lock
|
@ -1,8 +1,8 @@
|
||||||
%{
|
%{
|
||||||
"accept": {:hex, :accept, "0.3.5", "b33b127abca7cc948bbe6caa4c263369abf1347cfa9d8e699c6d214660f10cd1", [:rebar3], [], "hexpm"},
|
"accept": {:hex, :accept, "0.3.5", "b33b127abca7cc948bbe6caa4c263369abf1347cfa9d8e699c6d214660f10cd1", [:rebar3], [], "hexpm"},
|
||||||
"auto_linker": {:git, "https://git.pleroma.social/pleroma/auto_linker.git", "c00c4e75b35367fa42c95ffd9b8c455bf9995829", [ref: "c00c4e75b35367fa42c95ffd9b8c455bf9995829"]},
|
"auto_linker": {:git, "https://git.pleroma.social/pleroma/auto_linker.git", "95e8188490e97505c56636c1379ffdf036c1fdde", [ref: "95e8188490e97505c56636c1379ffdf036c1fdde"]},
|
||||||
"base64url": {:hex, :base64url, "0.0.1", "36a90125f5948e3afd7be97662a1504b934dd5dac78451ca6e9abf85a10286be", [:rebar], [], "hexpm"},
|
"base64url": {:hex, :base64url, "0.0.1", "36a90125f5948e3afd7be97662a1504b934dd5dac78451ca6e9abf85a10286be", [:rebar], [], "hexpm"},
|
||||||
"bbcode": {:hex, :bbcode, "0.1.0", "400e618b640b635261611d7fb7f79d104917fc5b084aae371ab6b08477cb035b", [:mix], [{:nimble_parsec, "~> 0.5", [hex: :nimble_parsec, repo: "hexpm", optional: false]}], "hexpm"},
|
"bbcode": {:hex, :bbcode, "0.1.1", "0023e2c7814119b2e620b7add67182e3f6019f92bfec9a22da7e99821aceba70", [:mix], [{:nimble_parsec, "~> 0.5", [hex: :nimble_parsec, repo: "hexpm", optional: false]}], "hexpm"},
|
||||||
"benchee": {:hex, :benchee, "1.0.1", "66b211f9bfd84bd97e6d1beaddf8fc2312aaabe192f776e8931cb0c16f53a521", [:mix], [{:deep_merge, "~> 1.0", [hex: :deep_merge, repo: "hexpm", optional: false]}], "hexpm"},
|
"benchee": {:hex, :benchee, "1.0.1", "66b211f9bfd84bd97e6d1beaddf8fc2312aaabe192f776e8931cb0c16f53a521", [:mix], [{:deep_merge, "~> 1.0", [hex: :deep_merge, repo: "hexpm", optional: false]}], "hexpm"},
|
||||||
"bunt": {:hex, :bunt, "0.2.0", "951c6e801e8b1d2cbe58ebbd3e616a869061ddadcc4863d0a2182541acae9a38", [:mix], [], "hexpm"},
|
"bunt": {:hex, :bunt, "0.2.0", "951c6e801e8b1d2cbe58ebbd3e616a869061ddadcc4863d0a2182541acae9a38", [:mix], [], "hexpm"},
|
||||||
"cachex": {:hex, :cachex, "3.0.3", "4e2d3e05814a5738f5ff3903151d5c25636d72a3527251b753f501ad9c657967", [:mix], [{:eternal, "~> 1.2", [hex: :eternal, repo: "hexpm", optional: false]}, {:unsafe, "~> 1.0", [hex: :unsafe, repo: "hexpm", optional: false]}], "hexpm"},
|
"cachex": {:hex, :cachex, "3.0.3", "4e2d3e05814a5738f5ff3903151d5c25636d72a3527251b753f501ad9c657967", [:mix], [{:eternal, "~> 1.2", [hex: :eternal, repo: "hexpm", optional: false]}, {:unsafe, "~> 1.0", [hex: :unsafe, repo: "hexpm", optional: false]}], "hexpm"},
|
||||||
|
@ -30,7 +30,7 @@
|
||||||
"ex_aws_s3": {:hex, :ex_aws_s3, "2.0.1", "9e09366e77f25d3d88c5393824e613344631be8db0d1839faca49686e99b6704", [:mix], [{:ex_aws, "~> 2.0", [hex: :ex_aws, repo: "hexpm", optional: false]}, {:sweet_xml, ">= 0.0.0", [hex: :sweet_xml, repo: "hexpm", optional: true]}], "hexpm"},
|
"ex_aws_s3": {:hex, :ex_aws_s3, "2.0.1", "9e09366e77f25d3d88c5393824e613344631be8db0d1839faca49686e99b6704", [:mix], [{:ex_aws, "~> 2.0", [hex: :ex_aws, repo: "hexpm", optional: false]}, {:sweet_xml, ">= 0.0.0", [hex: :sweet_xml, repo: "hexpm", optional: true]}], "hexpm"},
|
||||||
"ex_doc": {:hex, :ex_doc, "0.20.2", "1bd0dfb0304bade58beb77f20f21ee3558cc3c753743ae0ddbb0fd7ba2912331", [:mix], [{:earmark, "~> 1.3", [hex: :earmark, repo: "hexpm", optional: false]}, {:makeup_elixir, "~> 0.10", [hex: :makeup_elixir, repo: "hexpm", optional: false]}], "hexpm"},
|
"ex_doc": {:hex, :ex_doc, "0.20.2", "1bd0dfb0304bade58beb77f20f21ee3558cc3c753743ae0ddbb0fd7ba2912331", [:mix], [{:earmark, "~> 1.3", [hex: :earmark, repo: "hexpm", optional: false]}, {:makeup_elixir, "~> 0.10", [hex: :makeup_elixir, repo: "hexpm", optional: false]}], "hexpm"},
|
||||||
"ex_machina": {:hex, :ex_machina, "2.3.0", "92a5ad0a8b10ea6314b876a99c8c9e3f25f4dde71a2a835845b136b9adaf199a", [:mix], [{:ecto, "~> 2.2 or ~> 3.0", [hex: :ecto, repo: "hexpm", optional: true]}, {:ecto_sql, "~> 3.0", [hex: :ecto_sql, repo: "hexpm", optional: true]}], "hexpm"},
|
"ex_machina": {:hex, :ex_machina, "2.3.0", "92a5ad0a8b10ea6314b876a99c8c9e3f25f4dde71a2a835845b136b9adaf199a", [:mix], [{:ecto, "~> 2.2 or ~> 3.0", [hex: :ecto, repo: "hexpm", optional: true]}, {:ecto_sql, "~> 3.0", [hex: :ecto_sql, repo: "hexpm", optional: true]}], "hexpm"},
|
||||||
"ex_rated": {:hex, :ex_rated, "1.3.2", "6aeb32abb46ea6076f417a9ce8cb1cf08abf35fb2d42375beaad4dd72b550bf1", [:mix], [{:ex2ms, "~> 1.5", [hex: :ex2ms, repo: "hexpm", optional: false]}], "hexpm"},
|
"ex_rated": {:hex, :ex_rated, "1.3.3", "30ecbdabe91f7eaa9d37fa4e81c85ba420f371babeb9d1910adbcd79ec798d27", [:mix], [{:ex2ms, "~> 1.5", [hex: :ex2ms, repo: "hexpm", optional: false]}], "hexpm"},
|
||||||
"ex_syslogger": {:git, "https://github.com/slashmili/ex_syslogger.git", "f3963399047af17e038897c69e20d552e6899e1d", [tag: "1.4.0"]},
|
"ex_syslogger": {:git, "https://github.com/slashmili/ex_syslogger.git", "f3963399047af17e038897c69e20d552e6899e1d", [tag: "1.4.0"]},
|
||||||
"excoveralls": {:hex, :excoveralls, "0.11.1", "dd677fbdd49114fdbdbf445540ec735808250d56b011077798316505064edb2c", [:mix], [{:hackney, "~> 1.0", [hex: :hackney, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}], "hexpm"},
|
"excoveralls": {:hex, :excoveralls, "0.11.1", "dd677fbdd49114fdbdbf445540ec735808250d56b011077798316505064edb2c", [:mix], [{:hackney, "~> 1.0", [hex: :hackney, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}], "hexpm"},
|
||||||
"floki": {:hex, :floki, "0.20.4", "be42ac911fece24b4c72f3b5846774b6e61b83fe685c2fc9d62093277fb3bc86", [:mix], [{:html_entities, "~> 0.4.0", [hex: :html_entities, repo: "hexpm", optional: false]}, {:mochiweb, "~> 2.15", [hex: :mochiweb, repo: "hexpm", optional: false]}], "hexpm"},
|
"floki": {:hex, :floki, "0.20.4", "be42ac911fece24b4c72f3b5846774b6e61b83fe685c2fc9d62093277fb3bc86", [:mix], [{:html_entities, "~> 0.4.0", [hex: :html_entities, repo: "hexpm", optional: false]}, {:mochiweb, "~> 2.15", [hex: :mochiweb, repo: "hexpm", optional: false]}], "hexpm"},
|
||||||
|
@ -59,14 +59,14 @@
|
||||||
"mogrify": {:hex, :mogrify, "0.6.1", "de1b527514f2d95a7bbe9642eb556061afb337e220cf97adbf3a4e6438ed70af", [:mix], [], "hexpm"},
|
"mogrify": {:hex, :mogrify, "0.6.1", "de1b527514f2d95a7bbe9642eb556061afb337e220cf97adbf3a4e6438ed70af", [:mix], [], "hexpm"},
|
||||||
"nimble_parsec": {:hex, :nimble_parsec, "0.5.0", "90e2eca3d0266e5c53f8fbe0079694740b9c91b6747f2b7e3c5d21966bba8300", [:mix], [], "hexpm"},
|
"nimble_parsec": {:hex, :nimble_parsec, "0.5.0", "90e2eca3d0266e5c53f8fbe0079694740b9c91b6747f2b7e3c5d21966bba8300", [:mix], [], "hexpm"},
|
||||||
"parse_trans": {:hex, :parse_trans, "3.3.0", "09765507a3c7590a784615cfd421d101aec25098d50b89d7aa1d66646bc571c1", [:rebar3], [], "hexpm"},
|
"parse_trans": {:hex, :parse_trans, "3.3.0", "09765507a3c7590a784615cfd421d101aec25098d50b89d7aa1d66646bc571c1", [:rebar3], [], "hexpm"},
|
||||||
"pbkdf2_elixir": {:hex, :pbkdf2_elixir, "0.12.4", "8dd29ed783f2e12195d7e0a4640effc0a7c37e6537da491f1db01839eee6d053", [:mix], [], "hexpm"},
|
"pbkdf2_elixir": {:hex, :pbkdf2_elixir, "0.12.3", "6706a148809a29c306062862c803406e88f048277f6e85b68faf73291e820b84", [:mix], [], "hexpm"},
|
||||||
"phoenix": {:hex, :phoenix, "1.4.3", "8eed4a64ff1e12372cd634724bddd69185938f52c18e1396ebac76375d85677d", [:mix], [{:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:phoenix_pubsub, "~> 1.1", [hex: :phoenix_pubsub, repo: "hexpm", optional: false]}, {:plug, "~> 1.7", [hex: :plug, repo: "hexpm", optional: false]}, {:plug_cowboy, "~> 1.0 or ~> 2.0", [hex: :plug_cowboy, repo: "hexpm", optional: true]}], "hexpm"},
|
"phoenix": {:hex, :phoenix, "1.4.8", "c72dc3adeb49c70eb963a0ea24f7a064ec1588e651e84e1b7ad5ed8253c0b4a2", [:mix], [{:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:phoenix_pubsub, "~> 1.1", [hex: :phoenix_pubsub, repo: "hexpm", optional: false]}, {:plug, "~> 1.8.1 or ~> 1.9", [hex: :plug, repo: "hexpm", optional: false]}, {:plug_cowboy, "~> 1.0 or ~> 2.0", [hex: :plug_cowboy, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm"},
|
||||||
"phoenix_ecto": {:hex, :phoenix_ecto, "4.0.0", "c43117a136e7399ea04ecaac73f8f23ee0ffe3e07acfcb8062fe5f4c9f0f6531", [:mix], [{:ecto, "~> 3.0", [hex: :ecto, repo: "hexpm", optional: false]}, {:phoenix_html, "~> 2.9", [hex: :phoenix_html, repo: "hexpm", optional: true]}, {:plug, "~> 1.0", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm"},
|
"phoenix_ecto": {:hex, :phoenix_ecto, "4.0.0", "c43117a136e7399ea04ecaac73f8f23ee0ffe3e07acfcb8062fe5f4c9f0f6531", [:mix], [{:ecto, "~> 3.0", [hex: :ecto, repo: "hexpm", optional: false]}, {:phoenix_html, "~> 2.9", [hex: :phoenix_html, repo: "hexpm", optional: true]}, {:plug, "~> 1.0", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm"},
|
||||||
"phoenix_html": {:hex, :phoenix_html, "2.13.2", "f5d27c9b10ce881a60177d2b5227314fc60881e6b66b41dfe3349db6ed06cf57", [:mix], [{:plug, "~> 1.5", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm"},
|
"phoenix_html": {:hex, :phoenix_html, "2.13.1", "fa8f034b5328e2dfa0e4131b5569379003f34bc1fafdaa84985b0b9d2f12e68b", [:mix], [{:plug, "~> 1.5", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm"},
|
||||||
"phoenix_pubsub": {:hex, :phoenix_pubsub, "1.1.2", "496c303bdf1b2e98a9d26e89af5bba3ab487ba3a3735f74bf1f4064d2a845a3e", [:mix], [], "hexpm"},
|
"phoenix_pubsub": {:hex, :phoenix_pubsub, "1.1.2", "496c303bdf1b2e98a9d26e89af5bba3ab487ba3a3735f74bf1f4064d2a845a3e", [:mix], [], "hexpm"},
|
||||||
"phoenix_swoosh": {:hex, :phoenix_swoosh, "0.2.0", "a7e0b32077cd6d2323ae15198839b05d9caddfa20663fd85787479e81f89520e", [:mix], [{:phoenix, "~> 1.0", [hex: :phoenix, repo: "hexpm", optional: false]}, {:phoenix_html, "~> 2.2", [hex: :phoenix_html, repo: "hexpm", optional: false]}, {:swoosh, "~> 0.1", [hex: :swoosh, repo: "hexpm", optional: false]}], "hexpm"},
|
"phoenix_swoosh": {:hex, :phoenix_swoosh, "0.2.0", "a7e0b32077cd6d2323ae15198839b05d9caddfa20663fd85787479e81f89520e", [:mix], [{:phoenix, "~> 1.0", [hex: :phoenix, repo: "hexpm", optional: false]}, {:phoenix_html, "~> 2.2", [hex: :phoenix_html, repo: "hexpm", optional: false]}, {:swoosh, "~> 0.1", [hex: :swoosh, repo: "hexpm", optional: false]}], "hexpm"},
|
||||||
"pleroma_job_queue": {:hex, :pleroma_job_queue, "0.2.0", "879e660aa1cebe8dc6f0aaaa6aa48b4875e89cd961d4a585fd128e0773b31a18", [:mix], [], "hexpm"},
|
"pleroma_job_queue": {:hex, :pleroma_job_queue, "0.2.0", "879e660aa1cebe8dc6f0aaaa6aa48b4875e89cd961d4a585fd128e0773b31a18", [:mix], [], "hexpm"},
|
||||||
"plug": {:hex, :plug, "1.7.2", "d7b7db7fbd755e8283b6c0a50be71ec0a3d67d9213d74422d9372effc8e87fd1", [:mix], [{:mime, "~> 1.0", [hex: :mime, repo: "hexpm", optional: false]}, {:plug_crypto, "~> 1.0", [hex: :plug_crypto, repo: "hexpm", optional: false]}], "hexpm"},
|
"plug": {:hex, :plug, "1.8.2", "0bcce1daa420f189a6491f3940cc77ea7fb1919761175c9c3b59800d897440fc", [:mix], [{:mime, "~> 1.0", [hex: :mime, repo: "hexpm", optional: false]}, {:plug_crypto, "~> 1.0", [hex: :plug_crypto, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4", [hex: :telemetry, repo: "hexpm", optional: true]}], "hexpm"},
|
||||||
"plug_cowboy": {:hex, :plug_cowboy, "2.0.2", "6055f16868cc4882b24b6e1d63d2bada94fb4978413377a3b32ac16c18dffba2", [:mix], [{:cowboy, "~> 2.5", [hex: :cowboy, repo: "hexpm", optional: false]}, {:plug, "~> 1.7", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm"},
|
"plug_cowboy": {:hex, :plug_cowboy, "2.0.2", "6055f16868cc4882b24b6e1d63d2bada94fb4978413377a3b32ac16c18dffba2", [:mix], [{:cowboy, "~> 2.5", [hex: :cowboy, repo: "hexpm", optional: false]}, {:plug, "~> 1.7", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm"},
|
||||||
"plug_crypto": {:hex, :plug_crypto, "1.0.0", "18e49317d3fa343f24620ed22795ec29d4a5e602d52d1513ccea0b07d8ea7d4d", [:mix], [], "hexpm"},
|
"plug_crypto": {:hex, :plug_crypto, "1.0.0", "18e49317d3fa343f24620ed22795ec29d4a5e602d52d1513ccea0b07d8ea7d4d", [:mix], [], "hexpm"},
|
||||||
"plug_static_index_html": {:hex, :plug_static_index_html, "1.0.0", "840123d4d3975585133485ea86af73cb2600afd7f2a976f9f5fd8b3808e636a0", [:mix], [{:plug, "~> 1.0", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm"},
|
"plug_static_index_html": {:hex, :plug_static_index_html, "1.0.0", "840123d4d3975585133485ea86af73cb2600afd7f2a976f9f5fd8b3808e636a0", [:mix], [{:plug, "~> 1.0", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm"},
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue