forked from AkkomaGang/akkoma
Merge branch 'develop' into feature/matstodon-statuses-by-name
This commit is contained in:
commit
ffb4eb9779
872 changed files with 18887 additions and 3937 deletions
1
.buildpacks
Normal file
1
.buildpacks
Normal file
|
@ -0,0 +1 @@
|
||||||
|
https://github.com/hashnuke/heroku-buildpack-elixir
|
153
.gitlab-ci.yml
153
.gitlab-ci.yml
|
@ -16,6 +16,7 @@ stages:
|
||||||
- build
|
- build
|
||||||
- test
|
- test
|
||||||
- deploy
|
- deploy
|
||||||
|
- release
|
||||||
|
|
||||||
before_script:
|
before_script:
|
||||||
- mix local.hex --force
|
- mix local.hex --force
|
||||||
|
@ -34,6 +35,7 @@ docs-build:
|
||||||
- develop@pleroma/pleroma
|
- develop@pleroma/pleroma
|
||||||
variables:
|
variables:
|
||||||
MIX_ENV: dev
|
MIX_ENV: dev
|
||||||
|
PLEROMA_BUILD_ENV: prod
|
||||||
script:
|
script:
|
||||||
- mix deps.get
|
- mix deps.get
|
||||||
- mix compile
|
- mix compile
|
||||||
|
@ -42,6 +44,7 @@ docs-build:
|
||||||
paths:
|
paths:
|
||||||
- priv/static/doc
|
- priv/static/doc
|
||||||
|
|
||||||
|
|
||||||
unit-testing:
|
unit-testing:
|
||||||
stage: test
|
stage: test
|
||||||
services:
|
services:
|
||||||
|
@ -52,8 +55,7 @@ unit-testing:
|
||||||
- mix deps.get
|
- mix deps.get
|
||||||
- mix ecto.create
|
- mix ecto.create
|
||||||
- mix ecto.migrate
|
- mix ecto.migrate
|
||||||
- mix test --trace --preload-modules
|
- mix coveralls --trace --preload-modules
|
||||||
- mix coveralls
|
|
||||||
|
|
||||||
unit-testing-rum:
|
unit-testing-rum:
|
||||||
stage: test
|
stage: test
|
||||||
|
@ -95,3 +97,150 @@ docs-deploy:
|
||||||
- eval $(ssh-agent -s)
|
- eval $(ssh-agent -s)
|
||||||
- echo "$SSH_PRIVATE_KEY" | tr -d '\r' | ssh-add -
|
- echo "$SSH_PRIVATE_KEY" | tr -d '\r' | ssh-add -
|
||||||
- rsync -hrvz --delete -e "ssh -p ${SSH_PORT}" priv/static/doc/ "${SSH_USER_HOST_LOCATION}/${CI_COMMIT_REF_NAME}"
|
- rsync -hrvz --delete -e "ssh -p ${SSH_PORT}" priv/static/doc/ "${SSH_USER_HOST_LOCATION}/${CI_COMMIT_REF_NAME}"
|
||||||
|
|
||||||
|
review_app:
|
||||||
|
image: alpine:3.9
|
||||||
|
stage: deploy
|
||||||
|
before_script:
|
||||||
|
- apk update && apk add openssh-client git
|
||||||
|
when: manual
|
||||||
|
environment:
|
||||||
|
name: review/$CI_COMMIT_REF_NAME
|
||||||
|
url: https://$CI_ENVIRONMENT_SLUG.pleroma.online/
|
||||||
|
on_stop: stop_review_app
|
||||||
|
only:
|
||||||
|
- branches
|
||||||
|
except:
|
||||||
|
- master
|
||||||
|
- develop
|
||||||
|
script:
|
||||||
|
- echo "$CI_ENVIRONMENT_SLUG"
|
||||||
|
- mkdir -p ~/.ssh
|
||||||
|
- eval $(ssh-agent -s)
|
||||||
|
- echo "$SSH_PRIVATE_KEY" | tr -d '\r' | ssh-add -
|
||||||
|
- ssh-keyscan -H "pleroma.online" >> ~/.ssh/known_hosts
|
||||||
|
- (ssh -t dokku@pleroma.online -- apps:create "$CI_ENVIRONMENT_SLUG") || true
|
||||||
|
- ssh -t dokku@pleroma.online -- config:set "$CI_ENVIRONMENT_SLUG" APP_NAME="$CI_ENVIRONMENT_SLUG" APP_HOST="$CI_ENVIRONMENT_SLUG.pleroma.online" MIX_ENV=dokku
|
||||||
|
- (ssh -t dokku@pleroma.online -- postgres:create $(echo $CI_ENVIRONMENT_SLUG | sed -e 's/-/_/g')_db) || true
|
||||||
|
- (ssh -t dokku@pleroma.online -- postgres:link $(echo $CI_ENVIRONMENT_SLUG | sed -e 's/-/_/g')_db "$CI_ENVIRONMENT_SLUG") || true
|
||||||
|
- (ssh -t dokku@pleroma.online -- certs:add "$CI_ENVIRONMENT_SLUG" /home/dokku/server.crt /home/dokku/server.key) || true
|
||||||
|
- git push -f dokku@pleroma.online:$CI_ENVIRONMENT_SLUG $CI_COMMIT_SHA:refs/heads/master
|
||||||
|
|
||||||
|
stop_review_app:
|
||||||
|
image: alpine:3.9
|
||||||
|
stage: deploy
|
||||||
|
before_script:
|
||||||
|
- apk update && apk add openssh-client git
|
||||||
|
when: manual
|
||||||
|
environment:
|
||||||
|
name: review/$CI_COMMIT_REF_NAME
|
||||||
|
action: stop
|
||||||
|
script:
|
||||||
|
- echo "$CI_ENVIRONMENT_SLUG"
|
||||||
|
- mkdir -p ~/.ssh
|
||||||
|
- eval $(ssh-agent -s)
|
||||||
|
- echo "$SSH_PRIVATE_KEY" | tr -d '\r' | ssh-add -
|
||||||
|
- ssh-keyscan -H "pleroma.online" >> ~/.ssh/known_hosts
|
||||||
|
- ssh -t dokku@pleroma.online -- --force apps:destroy "$CI_ENVIRONMENT_SLUG"
|
||||||
|
- ssh -t dokku@pleroma.online -- --force postgres:destroy $(echo $CI_ENVIRONMENT_SLUG | sed -e 's/-/_/g')_db
|
||||||
|
|
||||||
|
amd64:
|
||||||
|
stage: release
|
||||||
|
# TODO: Replace with upstream image when 1.9.0 comes out
|
||||||
|
image: rinpatch/elixir:1.9.0-rc.0
|
||||||
|
only: &release-only
|
||||||
|
- master@pleroma/pleroma
|
||||||
|
- develop@pleroma/pleroma
|
||||||
|
artifacts: &release-artifacts
|
||||||
|
name: "pleroma-$CI_COMMIT_REF_NAME-$CI_COMMIT_SHORT_SHA-$CI_JOB_NAME"
|
||||||
|
paths:
|
||||||
|
- release/*
|
||||||
|
# Ideally it would be never for master branch and with the next commit for develop,
|
||||||
|
# but Gitlab does not support neither `only` for artifacts
|
||||||
|
# nor setting it to never from .gitlab-ci.yml
|
||||||
|
# nor expiring with the next commit
|
||||||
|
expire_in: 42 yrs
|
||||||
|
|
||||||
|
cache: &release-cache
|
||||||
|
key: $CI_COMMIT_REF_NAME-$CI_JOB_NAME
|
||||||
|
paths:
|
||||||
|
- deps
|
||||||
|
variables: &release-variables
|
||||||
|
MIX_ENV: prod
|
||||||
|
before_script: &before-release
|
||||||
|
- echo "import Mix.Config" > config/prod.secret.exs
|
||||||
|
- mix local.hex --force
|
||||||
|
- mix local.rebar --force
|
||||||
|
script: &release
|
||||||
|
- mix deps.get --only prod
|
||||||
|
- mkdir release
|
||||||
|
- export PLEROMA_BUILD_BRANCH=$CI_COMMIT_REF_NAME
|
||||||
|
- mix release --path release
|
||||||
|
|
||||||
|
|
||||||
|
amd64-musl:
|
||||||
|
stage: release
|
||||||
|
artifacts: *release-artifacts
|
||||||
|
only: *release-only
|
||||||
|
# TODO: Replace with upstream image when 1.9.0 comes out
|
||||||
|
image: rinpatch/elixir:1.9.0-rc.0-alpine
|
||||||
|
cache: *release-cache
|
||||||
|
variables: *release-variables
|
||||||
|
before_script: &before-release-musl
|
||||||
|
- apk add git gcc g++ musl-dev make
|
||||||
|
- echo "import Mix.Config" > config/prod.secret.exs
|
||||||
|
- mix local.hex --force
|
||||||
|
- mix local.rebar --force
|
||||||
|
script: *release
|
||||||
|
|
||||||
|
arm:
|
||||||
|
stage: release
|
||||||
|
artifacts: *release-artifacts
|
||||||
|
only: *release-only
|
||||||
|
tags:
|
||||||
|
- arm32
|
||||||
|
# TODO: Replace with upstream image when 1.9.0 comes out
|
||||||
|
image: rinpatch/elixir:1.9.0-rc.0-arm
|
||||||
|
cache: *release-cache
|
||||||
|
variables: *release-variables
|
||||||
|
before_script: *before-release
|
||||||
|
script: *release
|
||||||
|
|
||||||
|
arm-musl:
|
||||||
|
stage: release
|
||||||
|
artifacts: *release-artifacts
|
||||||
|
only: *release-only
|
||||||
|
tags:
|
||||||
|
- arm32
|
||||||
|
# TODO: Replace with upstream image when 1.9.0 comes out
|
||||||
|
image: rinpatch/elixir:1.9.0-rc.0-arm-alpine
|
||||||
|
cache: *release-cache
|
||||||
|
variables: *release-variables
|
||||||
|
before_script: *before-release-musl
|
||||||
|
script: *release
|
||||||
|
|
||||||
|
arm64:
|
||||||
|
stage: release
|
||||||
|
artifacts: *release-artifacts
|
||||||
|
only: *release-only
|
||||||
|
tags:
|
||||||
|
- arm
|
||||||
|
# TODO: Replace with upstream image when 1.9.0 comes out
|
||||||
|
image: rinpatch/elixir:1.9.0-rc.0-arm64
|
||||||
|
cache: *release-cache
|
||||||
|
variables: *release-variables
|
||||||
|
before_script: *before-release
|
||||||
|
script: *release
|
||||||
|
|
||||||
|
arm64-musl:
|
||||||
|
stage: release
|
||||||
|
artifacts: *release-artifacts
|
||||||
|
only: *release-only
|
||||||
|
tags:
|
||||||
|
- arm
|
||||||
|
# TODO: Replace with upstream image when 1.9.0 comes out
|
||||||
|
image: rinpatch/elixir:1.9.0-rc.0-arm64-alpine
|
||||||
|
cache: *release-cache
|
||||||
|
variables: *release-variables
|
||||||
|
before_script: *before-release-musl
|
||||||
|
script: *release
|
||||||
|
|
2
.mailmap
Normal file
2
.mailmap
Normal file
|
@ -0,0 +1,2 @@
|
||||||
|
Ariadne Conill <ariadne@dereferenced.org> <nenolod@dereferenced.org>
|
||||||
|
Ariadne Conill <ariadne@dereferenced.org> <nenolod@gmail.com>
|
403
CC-BY-NC-ND-4.0
403
CC-BY-NC-ND-4.0
|
@ -1,403 +0,0 @@
|
||||||
Attribution-NonCommercial-NoDerivatives 4.0 International
|
|
||||||
|
|
||||||
=======================================================================
|
|
||||||
|
|
||||||
Creative Commons Corporation ("Creative Commons") is not a law firm and
|
|
||||||
does not provide legal services or legal advice. Distribution of
|
|
||||||
Creative Commons public licenses does not create a lawyer-client or
|
|
||||||
other relationship. Creative Commons makes its licenses and related
|
|
||||||
information available on an "as-is" basis. Creative Commons gives no
|
|
||||||
warranties regarding its licenses, any material licensed under their
|
|
||||||
terms and conditions, or any related information. Creative Commons
|
|
||||||
disclaims all liability for damages resulting from their use to the
|
|
||||||
fullest extent possible.
|
|
||||||
|
|
||||||
Using Creative Commons Public Licenses
|
|
||||||
|
|
||||||
Creative Commons public licenses provide a standard set of terms and
|
|
||||||
conditions that creators and other rights holders may use to share
|
|
||||||
original works of authorship and other material subject to copyright
|
|
||||||
and certain other rights specified in the public license below. The
|
|
||||||
following considerations are for informational purposes only, are not
|
|
||||||
exhaustive, and do not form part of our licenses.
|
|
||||||
|
|
||||||
Considerations for licensors: Our public licenses are
|
|
||||||
intended for use by those authorized to give the public
|
|
||||||
permission to use material in ways otherwise restricted by
|
|
||||||
copyright and certain other rights. Our licenses are
|
|
||||||
irrevocable. Licensors should read and understand the terms
|
|
||||||
and conditions of the license they choose before applying it.
|
|
||||||
Licensors should also secure all rights necessary before
|
|
||||||
applying our licenses so that the public can reuse the
|
|
||||||
material as expected. Licensors should clearly mark any
|
|
||||||
material not subject to the license. This includes other CC-
|
|
||||||
licensed material, or material used under an exception or
|
|
||||||
limitation to copyright. More considerations for licensors:
|
|
||||||
wiki.creativecommons.org/Considerations_for_licensors
|
|
||||||
|
|
||||||
Considerations for the public: By using one of our public
|
|
||||||
licenses, a licensor grants the public permission to use the
|
|
||||||
licensed material under specified terms and conditions. If
|
|
||||||
the licensor's permission is not necessary for any reason--for
|
|
||||||
example, because of any applicable exception or limitation to
|
|
||||||
copyright--then that use is not regulated by the license. Our
|
|
||||||
licenses grant only permissions under copyright and certain
|
|
||||||
other rights that a licensor has authority to grant. Use of
|
|
||||||
the licensed material may still be restricted for other
|
|
||||||
reasons, including because others have copyright or other
|
|
||||||
rights in the material. A licensor may make special requests,
|
|
||||||
such as asking that all changes be marked or described.
|
|
||||||
Although not required by our licenses, you are encouraged to
|
|
||||||
respect those requests where reasonable. More considerations
|
|
||||||
for the public:
|
|
||||||
wiki.creativecommons.org/Considerations_for_licensees
|
|
||||||
|
|
||||||
=======================================================================
|
|
||||||
|
|
||||||
Creative Commons Attribution-NonCommercial-NoDerivatives 4.0
|
|
||||||
International Public License
|
|
||||||
|
|
||||||
By exercising the Licensed Rights (defined below), You accept and agree
|
|
||||||
to be bound by the terms and conditions of this Creative Commons
|
|
||||||
Attribution-NonCommercial-NoDerivatives 4.0 International Public
|
|
||||||
License ("Public License"). To the extent this Public License may be
|
|
||||||
interpreted as a contract, You are granted the Licensed Rights in
|
|
||||||
consideration of Your acceptance of these terms and conditions, and the
|
|
||||||
Licensor grants You such rights in consideration of benefits the
|
|
||||||
Licensor receives from making the Licensed Material available under
|
|
||||||
these terms and conditions.
|
|
||||||
|
|
||||||
|
|
||||||
Section 1 -- Definitions.
|
|
||||||
|
|
||||||
a. Adapted Material means material subject to Copyright and Similar
|
|
||||||
Rights that is derived from or based upon the Licensed Material
|
|
||||||
and in which the Licensed Material is translated, altered,
|
|
||||||
arranged, transformed, or otherwise modified in a manner requiring
|
|
||||||
permission under the Copyright and Similar Rights held by the
|
|
||||||
Licensor. For purposes of this Public License, where the Licensed
|
|
||||||
Material is a musical work, performance, or sound recording,
|
|
||||||
Adapted Material is always produced where the Licensed Material is
|
|
||||||
synched in timed relation with a moving image.
|
|
||||||
|
|
||||||
b. Copyright and Similar Rights means copyright and/or similar rights
|
|
||||||
closely related to copyright including, without limitation,
|
|
||||||
performance, broadcast, sound recording, and Sui Generis Database
|
|
||||||
Rights, without regard to how the rights are labeled or
|
|
||||||
categorized. For purposes of this Public License, the rights
|
|
||||||
specified in Section 2(b)(1)-(2) are not Copyright and Similar
|
|
||||||
Rights.
|
|
||||||
|
|
||||||
c. Effective Technological Measures means those measures that, in the
|
|
||||||
absence of proper authority, may not be circumvented under laws
|
|
||||||
fulfilling obligations under Article 11 of the WIPO Copyright
|
|
||||||
Treaty adopted on December 20, 1996, and/or similar international
|
|
||||||
agreements.
|
|
||||||
|
|
||||||
d. Exceptions and Limitations means fair use, fair dealing, and/or
|
|
||||||
any other exception or limitation to Copyright and Similar Rights
|
|
||||||
that applies to Your use of the Licensed Material.
|
|
||||||
|
|
||||||
e. Licensed Material means the artistic or literary work, database,
|
|
||||||
or other material to which the Licensor applied this Public
|
|
||||||
License.
|
|
||||||
|
|
||||||
f. Licensed Rights means the rights granted to You subject to the
|
|
||||||
terms and conditions of this Public License, which are limited to
|
|
||||||
all Copyright and Similar Rights that apply to Your use of the
|
|
||||||
Licensed Material and that the Licensor has authority to license.
|
|
||||||
|
|
||||||
g. Licensor means the individual(s) or entity(ies) granting rights
|
|
||||||
under this Public License.
|
|
||||||
|
|
||||||
h. NonCommercial means not primarily intended for or directed towards
|
|
||||||
commercial advantage or monetary compensation. For purposes of
|
|
||||||
this Public License, the exchange of the Licensed Material for
|
|
||||||
other material subject to Copyright and Similar Rights by digital
|
|
||||||
file-sharing or similar means is NonCommercial provided there is
|
|
||||||
no payment of monetary compensation in connection with the
|
|
||||||
exchange.
|
|
||||||
|
|
||||||
i. Share means to provide material to the public by any means or
|
|
||||||
process that requires permission under the Licensed Rights, such
|
|
||||||
as reproduction, public display, public performance, distribution,
|
|
||||||
dissemination, communication, or importation, and to make material
|
|
||||||
available to the public including in ways that members of the
|
|
||||||
public may access the material from a place and at a time
|
|
||||||
individually chosen by them.
|
|
||||||
|
|
||||||
j. Sui Generis Database Rights means rights other than copyright
|
|
||||||
resulting from Directive 96/9/EC of the European Parliament and of
|
|
||||||
the Council of 11 March 1996 on the legal protection of databases,
|
|
||||||
as amended and/or succeeded, as well as other essentially
|
|
||||||
equivalent rights anywhere in the world.
|
|
||||||
|
|
||||||
k. You means the individual or entity exercising the Licensed Rights
|
|
||||||
under this Public License. Your has a corresponding meaning.
|
|
||||||
|
|
||||||
|
|
||||||
Section 2 -- Scope.
|
|
||||||
|
|
||||||
a. License grant.
|
|
||||||
|
|
||||||
1. Subject to the terms and conditions of this Public License,
|
|
||||||
the Licensor hereby grants You a worldwide, royalty-free,
|
|
||||||
non-sublicensable, non-exclusive, irrevocable license to
|
|
||||||
exercise the Licensed Rights in the Licensed Material to:
|
|
||||||
|
|
||||||
a. reproduce and Share the Licensed Material, in whole or
|
|
||||||
in part, for NonCommercial purposes only; and
|
|
||||||
|
|
||||||
b. produce and reproduce, but not Share, Adapted Material
|
|
||||||
for NonCommercial purposes only.
|
|
||||||
|
|
||||||
2. Exceptions and Limitations. For the avoidance of doubt, where
|
|
||||||
Exceptions and Limitations apply to Your use, this Public
|
|
||||||
License does not apply, and You do not need to comply with
|
|
||||||
its terms and conditions.
|
|
||||||
|
|
||||||
3. Term. The term of this Public License is specified in Section
|
|
||||||
6(a).
|
|
||||||
|
|
||||||
4. Media and formats; technical modifications allowed. The
|
|
||||||
Licensor authorizes You to exercise the Licensed Rights in
|
|
||||||
all media and formats whether now known or hereafter created,
|
|
||||||
and to make technical modifications necessary to do so. The
|
|
||||||
Licensor waives and/or agrees not to assert any right or
|
|
||||||
authority to forbid You from making technical modifications
|
|
||||||
necessary to exercise the Licensed Rights, including
|
|
||||||
technical modifications necessary to circumvent Effective
|
|
||||||
Technological Measures. For purposes of this Public License,
|
|
||||||
simply making modifications authorized by this Section 2(a)
|
|
||||||
(4) never produces Adapted Material.
|
|
||||||
|
|
||||||
5. Downstream recipients.
|
|
||||||
|
|
||||||
a. Offer from the Licensor -- Licensed Material. Every
|
|
||||||
recipient of the Licensed Material automatically
|
|
||||||
receives an offer from the Licensor to exercise the
|
|
||||||
Licensed Rights under the terms and conditions of this
|
|
||||||
Public License.
|
|
||||||
|
|
||||||
b. No downstream restrictions. You may not offer or impose
|
|
||||||
any additional or different terms or conditions on, or
|
|
||||||
apply any Effective Technological Measures to, the
|
|
||||||
Licensed Material if doing so restricts exercise of the
|
|
||||||
Licensed Rights by any recipient of the Licensed
|
|
||||||
Material.
|
|
||||||
|
|
||||||
6. No endorsement. Nothing in this Public License constitutes or
|
|
||||||
may be construed as permission to assert or imply that You
|
|
||||||
are, or that Your use of the Licensed Material is, connected
|
|
||||||
with, or sponsored, endorsed, or granted official status by,
|
|
||||||
the Licensor or others designated to receive attribution as
|
|
||||||
provided in Section 3(a)(1)(A)(i).
|
|
||||||
|
|
||||||
b. Other rights.
|
|
||||||
|
|
||||||
1. Moral rights, such as the right of integrity, are not
|
|
||||||
licensed under this Public License, nor are publicity,
|
|
||||||
privacy, and/or other similar personality rights; however, to
|
|
||||||
the extent possible, the Licensor waives and/or agrees not to
|
|
||||||
assert any such rights held by the Licensor to the limited
|
|
||||||
extent necessary to allow You to exercise the Licensed
|
|
||||||
Rights, but not otherwise.
|
|
||||||
|
|
||||||
2. Patent and trademark rights are not licensed under this
|
|
||||||
Public License.
|
|
||||||
|
|
||||||
3. To the extent possible, the Licensor waives any right to
|
|
||||||
collect royalties from You for the exercise of the Licensed
|
|
||||||
Rights, whether directly or through a collecting society
|
|
||||||
under any voluntary or waivable statutory or compulsory
|
|
||||||
licensing scheme. In all other cases the Licensor expressly
|
|
||||||
reserves any right to collect such royalties, including when
|
|
||||||
the Licensed Material is used other than for NonCommercial
|
|
||||||
purposes.
|
|
||||||
|
|
||||||
|
|
||||||
Section 3 -- License Conditions.
|
|
||||||
|
|
||||||
Your exercise of the Licensed Rights is expressly made subject to the
|
|
||||||
following conditions.
|
|
||||||
|
|
||||||
a. Attribution.
|
|
||||||
|
|
||||||
1. If You Share the Licensed Material, You must:
|
|
||||||
|
|
||||||
a. retain the following if it is supplied by the Licensor
|
|
||||||
with the Licensed Material:
|
|
||||||
|
|
||||||
i. identification of the creator(s) of the Licensed
|
|
||||||
Material and any others designated to receive
|
|
||||||
attribution, in any reasonable manner requested by
|
|
||||||
the Licensor (including by pseudonym if
|
|
||||||
designated);
|
|
||||||
|
|
||||||
ii. a copyright notice;
|
|
||||||
|
|
||||||
iii. a notice that refers to this Public License;
|
|
||||||
|
|
||||||
iv. a notice that refers to the disclaimer of
|
|
||||||
warranties;
|
|
||||||
|
|
||||||
v. a URI or hyperlink to the Licensed Material to the
|
|
||||||
extent reasonably practicable;
|
|
||||||
|
|
||||||
b. indicate if You modified the Licensed Material and
|
|
||||||
retain an indication of any previous modifications; and
|
|
||||||
|
|
||||||
c. indicate the Licensed Material is licensed under this
|
|
||||||
Public License, and include the text of, or the URI or
|
|
||||||
hyperlink to, this Public License.
|
|
||||||
|
|
||||||
For the avoidance of doubt, You do not have permission under
|
|
||||||
this Public License to Share Adapted Material.
|
|
||||||
|
|
||||||
2. You may satisfy the conditions in Section 3(a)(1) in any
|
|
||||||
reasonable manner based on the medium, means, and context in
|
|
||||||
which You Share the Licensed Material. For example, it may be
|
|
||||||
reasonable to satisfy the conditions by providing a URI or
|
|
||||||
hyperlink to a resource that includes the required
|
|
||||||
information.
|
|
||||||
|
|
||||||
3. If requested by the Licensor, You must remove any of the
|
|
||||||
information required by Section 3(a)(1)(A) to the extent
|
|
||||||
reasonably practicable.
|
|
||||||
|
|
||||||
|
|
||||||
Section 4 -- Sui Generis Database Rights.
|
|
||||||
|
|
||||||
Where the Licensed Rights include Sui Generis Database Rights that
|
|
||||||
apply to Your use of the Licensed Material:
|
|
||||||
|
|
||||||
a. for the avoidance of doubt, Section 2(a)(1) grants You the right
|
|
||||||
to extract, reuse, reproduce, and Share all or a substantial
|
|
||||||
portion of the contents of the database for NonCommercial purposes
|
|
||||||
only and provided You do not Share Adapted Material;
|
|
||||||
|
|
||||||
b. if You include all or a substantial portion of the database
|
|
||||||
contents in a database in which You have Sui Generis Database
|
|
||||||
Rights, then the database in which You have Sui Generis Database
|
|
||||||
Rights (but not its individual contents) is Adapted Material; and
|
|
||||||
|
|
||||||
c. You must comply with the conditions in Section 3(a) if You Share
|
|
||||||
all or a substantial portion of the contents of the database.
|
|
||||||
|
|
||||||
For the avoidance of doubt, this Section 4 supplements and does not
|
|
||||||
replace Your obligations under this Public License where the Licensed
|
|
||||||
Rights include other Copyright and Similar Rights.
|
|
||||||
|
|
||||||
|
|
||||||
Section 5 -- Disclaimer of Warranties and Limitation of Liability.
|
|
||||||
|
|
||||||
a. UNLESS OTHERWISE SEPARATELY UNDERTAKEN BY THE LICENSOR, TO THE
|
|
||||||
EXTENT POSSIBLE, THE LICENSOR OFFERS THE LICENSED MATERIAL AS-IS
|
|
||||||
AND AS-AVAILABLE, AND MAKES NO REPRESENTATIONS OR WARRANTIES OF
|
|
||||||
ANY KIND CONCERNING THE LICENSED MATERIAL, WHETHER EXPRESS,
|
|
||||||
IMPLIED, STATUTORY, OR OTHER. THIS INCLUDES, WITHOUT LIMITATION,
|
|
||||||
WARRANTIES OF TITLE, MERCHANTABILITY, FITNESS FOR A PARTICULAR
|
|
||||||
PURPOSE, NON-INFRINGEMENT, ABSENCE OF LATENT OR OTHER DEFECTS,
|
|
||||||
ACCURACY, OR THE PRESENCE OR ABSENCE OF ERRORS, WHETHER OR NOT
|
|
||||||
KNOWN OR DISCOVERABLE. WHERE DISCLAIMERS OF WARRANTIES ARE NOT
|
|
||||||
ALLOWED IN FULL OR IN PART, THIS DISCLAIMER MAY NOT APPLY TO YOU.
|
|
||||||
|
|
||||||
b. TO THE EXTENT POSSIBLE, IN NO EVENT WILL THE LICENSOR BE LIABLE
|
|
||||||
TO YOU ON ANY LEGAL THEORY (INCLUDING, WITHOUT LIMITATION,
|
|
||||||
NEGLIGENCE) OR OTHERWISE FOR ANY DIRECT, SPECIAL, INDIRECT,
|
|
||||||
INCIDENTAL, CONSEQUENTIAL, PUNITIVE, EXEMPLARY, OR OTHER LOSSES,
|
|
||||||
COSTS, EXPENSES, OR DAMAGES ARISING OUT OF THIS PUBLIC LICENSE OR
|
|
||||||
USE OF THE LICENSED MATERIAL, EVEN IF THE LICENSOR HAS BEEN
|
|
||||||
ADVISED OF THE POSSIBILITY OF SUCH LOSSES, COSTS, EXPENSES, OR
|
|
||||||
DAMAGES. WHERE A LIMITATION OF LIABILITY IS NOT ALLOWED IN FULL OR
|
|
||||||
IN PART, THIS LIMITATION MAY NOT APPLY TO YOU.
|
|
||||||
|
|
||||||
c. The disclaimer of warranties and limitation of liability provided
|
|
||||||
above shall be interpreted in a manner that, to the extent
|
|
||||||
possible, most closely approximates an absolute disclaimer and
|
|
||||||
waiver of all liability.
|
|
||||||
|
|
||||||
|
|
||||||
Section 6 -- Term and Termination.
|
|
||||||
|
|
||||||
a. This Public License applies for the term of the Copyright and
|
|
||||||
Similar Rights licensed here. However, if You fail to comply with
|
|
||||||
this Public License, then Your rights under this Public License
|
|
||||||
terminate automatically.
|
|
||||||
|
|
||||||
b. Where Your right to use the Licensed Material has terminated under
|
|
||||||
Section 6(a), it reinstates:
|
|
||||||
|
|
||||||
1. automatically as of the date the violation is cured, provided
|
|
||||||
it is cured within 30 days of Your discovery of the
|
|
||||||
violation; or
|
|
||||||
|
|
||||||
2. upon express reinstatement by the Licensor.
|
|
||||||
|
|
||||||
For the avoidance of doubt, this Section 6(b) does not affect any
|
|
||||||
right the Licensor may have to seek remedies for Your violations
|
|
||||||
of this Public License.
|
|
||||||
|
|
||||||
c. For the avoidance of doubt, the Licensor may also offer the
|
|
||||||
Licensed Material under separate terms or conditions or stop
|
|
||||||
distributing the Licensed Material at any time; however, doing so
|
|
||||||
will not terminate this Public License.
|
|
||||||
|
|
||||||
d. Sections 1, 5, 6, 7, and 8 survive termination of this Public
|
|
||||||
License.
|
|
||||||
|
|
||||||
|
|
||||||
Section 7 -- Other Terms and Conditions.
|
|
||||||
|
|
||||||
a. The Licensor shall not be bound by any additional or different
|
|
||||||
terms or conditions communicated by You unless expressly agreed.
|
|
||||||
|
|
||||||
b. Any arrangements, understandings, or agreements regarding the
|
|
||||||
Licensed Material not stated herein are separate from and
|
|
||||||
independent of the terms and conditions of this Public License.
|
|
||||||
|
|
||||||
|
|
||||||
Section 8 -- Interpretation.
|
|
||||||
|
|
||||||
a. For the avoidance of doubt, this Public License does not, and
|
|
||||||
shall not be interpreted to, reduce, limit, restrict, or impose
|
|
||||||
conditions on any use of the Licensed Material that could lawfully
|
|
||||||
be made without permission under this Public License.
|
|
||||||
|
|
||||||
b. To the extent possible, if any provision of this Public License is
|
|
||||||
deemed unenforceable, it shall be automatically reformed to the
|
|
||||||
minimum extent necessary to make it enforceable. If the provision
|
|
||||||
cannot be reformed, it shall be severed from this Public License
|
|
||||||
without affecting the enforceability of the remaining terms and
|
|
||||||
conditions.
|
|
||||||
|
|
||||||
c. No term or condition of this Public License will be waived and no
|
|
||||||
failure to comply consented to unless expressly agreed to by the
|
|
||||||
Licensor.
|
|
||||||
|
|
||||||
d. Nothing in this Public License constitutes or may be interpreted
|
|
||||||
as a limitation upon, or waiver of, any privileges and immunities
|
|
||||||
that apply to the Licensor or You, including from the legal
|
|
||||||
processes of any jurisdiction or authority.
|
|
||||||
|
|
||||||
=======================================================================
|
|
||||||
|
|
||||||
Creative Commons is not a party to its public
|
|
||||||
licenses. Notwithstanding, Creative Commons may elect to apply one of
|
|
||||||
its public licenses to material it publishes and in those instances
|
|
||||||
will be considered the “Licensor.” The text of the Creative Commons
|
|
||||||
public licenses is dedicated to the public domain under the CC0 Public
|
|
||||||
Domain Dedication. Except for the limited purpose of indicating that
|
|
||||||
material is shared under a Creative Commons public license or as
|
|
||||||
otherwise permitted by the Creative Commons policies published at
|
|
||||||
creativecommons.org/policies, Creative Commons does not authorize the
|
|
||||||
use of the trademark "Creative Commons" or any other trademark or logo
|
|
||||||
of Creative Commons without its prior written consent including,
|
|
||||||
without limitation, in connection with any unauthorized modifications
|
|
||||||
to any of its public licenses or any other arrangements,
|
|
||||||
understandings, or agreements concerning use of licensed material. For
|
|
||||||
the avoidance of doubt, this paragraph does not form part of the
|
|
||||||
public licenses.
|
|
||||||
|
|
||||||
Creative Commons may be contacted at creativecommons.org.
|
|
||||||
|
|
81
CHANGELOG.md
81
CHANGELOG.md
|
@ -3,12 +3,64 @@ All notable changes to this project will be documented in this file.
|
||||||
|
|
||||||
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
|
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
|
||||||
|
|
||||||
## [unreleased]
|
## [Unreleased]
|
||||||
|
### Changed
|
||||||
|
- **Breaking:** Configuration: A setting to explicitly disable the mailer was added, defaulting to true, if you are using a mailer add `config :pleroma, Pleroma.Emails.Mailer, enabled: true` to your config
|
||||||
|
- Configuration: OpenGraph and TwitterCard providers enabled by default
|
||||||
|
- Configuration: Filter.AnonymizeFilename added ability to retain file extension with custom text
|
||||||
|
- Federation: Return 403 errors when trying to request pages from a user's follower/following collections if they have `hide_followers`/`hide_follows` set
|
||||||
|
- NodeInfo: Return `skipThreadContainment` in `metadata` for the `skip_thread_containment` option
|
||||||
|
- Mastodon API: Unsubscribe followers when they unfollow a user
|
||||||
|
|
||||||
|
### Fixed
|
||||||
|
- Not being able to pin unlisted posts
|
||||||
|
- Metadata rendering errors resulting in the entire page being inaccessible
|
||||||
|
- Federation/MediaProxy not working with instances that have wrong certificate order
|
||||||
|
- Mastodon API: Handling of search timeouts (`/api/v1/search` and `/api/v2/search`)
|
||||||
|
- Mastodon API: Embedded relationships not being properly rendered in the Account entity of Status entity
|
||||||
|
- Mastodon API: Add `account_id`, `type`, `offset`, and `limit` to search API (`/api/v1/search` and `/api/v2/search`)
|
||||||
|
- ActivityPub C2S: follower/following collection pages being inaccessible even when authentifucated if `hide_followers`/ `hide_follows` was set
|
||||||
|
|
||||||
### Added
|
### Added
|
||||||
|
- MRF: Support for priming the mediaproxy cache (`Pleroma.Web.ActivityPub.MRF.MediaProxyWarmingPolicy`)
|
||||||
|
- MRF: Support for excluding specific domains from Transparency.
|
||||||
|
- Configuration: `federation_incoming_replies_max_depth` option
|
||||||
|
- Mastodon API: Support for the [`tagged` filter](https://github.com/tootsuite/mastodon/pull/9755) in [`GET /api/v1/accounts/:id/statuses`](https://docs.joinmastodon.org/api/rest/accounts/#get-api-v1-accounts-id-statuses)
|
||||||
|
- Mastodon API, streaming: Add support for passing the token in the `Sec-WebSocket-Protocol` header
|
||||||
|
- Mastodon API, extension: Ability to reset avatar, profile banner, and background
|
||||||
|
- Mastodon API: Add support for categories for custom emojis by reusing the group feature. <https://github.com/tootsuite/mastodon/pull/11196>
|
||||||
|
- Mastodon API: Add support for muting/unmuting notifications
|
||||||
|
- Admin API: Return users' tags when querying reports
|
||||||
|
- Admin API: Return avatar and display name when querying users
|
||||||
|
- Admin API: Allow querying user by ID
|
||||||
|
- Admin API: Added support for `tuples`.
|
||||||
|
- Added synchronization of following/followers counters for external users
|
||||||
|
- Configuration: `enabled` option for `Pleroma.Emails.Mailer`, defaulting to `false`.
|
||||||
|
- Configuration: Pleroma.Plugs.RateLimiter `bucket_name`, `params` options.
|
||||||
|
- Addressable lists
|
||||||
|
|
||||||
|
### Changed
|
||||||
|
- Configuration: Filter.AnonymizeFilename added ability to retain file extension with custom text
|
||||||
|
- Admin API: changed json structure for saving config settings.
|
||||||
|
- RichMedia: parsers and their order are configured in `rich_media` config.
|
||||||
|
|
||||||
|
## [1.0.1] - 2019-07-14
|
||||||
|
### Security
|
||||||
|
- OStatus: fix an object spoofing vulnerability.
|
||||||
|
|
||||||
|
## [1.0.0] - 2019-06-29
|
||||||
|
### Security
|
||||||
|
- Mastodon API: Fix display names not being sanitized
|
||||||
|
- Rich media: Do not crawl private IP ranges
|
||||||
|
|
||||||
|
### Added
|
||||||
|
- Add a generic settings store for frontends / clients to use.
|
||||||
|
- Explicit addressing option for posting.
|
||||||
- Optional SSH access mode. (Needs `erlang-ssh` package on some distributions).
|
- Optional SSH access mode. (Needs `erlang-ssh` package on some distributions).
|
||||||
- [MongooseIM](https://github.com/esl/MongooseIM) http authentication support.
|
- [MongooseIM](https://github.com/esl/MongooseIM) http authentication support.
|
||||||
- LDAP authentication
|
- LDAP authentication
|
||||||
- External OAuth provider authentication
|
- External OAuth provider authentication
|
||||||
|
- Support for building a release using [`mix release`](https://hexdocs.pm/mix/master/Mix.Tasks.Release.html)
|
||||||
- A [job queue](https://git.pleroma.social/pleroma/pleroma_job_queue) for federation, emails, web push, etc.
|
- A [job queue](https://git.pleroma.social/pleroma/pleroma_job_queue) for federation, emails, web push, etc.
|
||||||
- [Prometheus](https://prometheus.io/) metrics
|
- [Prometheus](https://prometheus.io/) metrics
|
||||||
- Support for Mastodon's remote interaction
|
- Support for Mastodon's remote interaction
|
||||||
|
@ -16,13 +68,19 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
|
||||||
- Mix Tasks: `mix pleroma.database remove_embedded_objects`
|
- Mix Tasks: `mix pleroma.database remove_embedded_objects`
|
||||||
- Mix Tasks: `mix pleroma.database update_users_following_followers_counts`
|
- Mix Tasks: `mix pleroma.database update_users_following_followers_counts`
|
||||||
- Mix Tasks: `mix pleroma.user toggle_confirmed`
|
- Mix Tasks: `mix pleroma.user toggle_confirmed`
|
||||||
|
- Mix Tasks: `mix pleroma.config migrate_to_db`
|
||||||
|
- Mix Tasks: `mix pleroma.config migrate_from_db`
|
||||||
|
- Federation: Support for `Question` and `Answer` objects
|
||||||
- Federation: Support for reports
|
- Federation: Support for reports
|
||||||
|
- Configuration: `poll_limits` option
|
||||||
|
- Configuration: `pack_extensions` option
|
||||||
- Configuration: `safe_dm_mentions` option
|
- Configuration: `safe_dm_mentions` option
|
||||||
- Configuration: `link_name` option
|
- Configuration: `link_name` option
|
||||||
- Configuration: `fetch_initial_posts` option
|
- Configuration: `fetch_initial_posts` option
|
||||||
- Configuration: `notify_email` option
|
- Configuration: `notify_email` option
|
||||||
- Configuration: Media proxy `whitelist` option
|
- Configuration: Media proxy `whitelist` option
|
||||||
- Configuration: `report_uri` option
|
- Configuration: `report_uri` option
|
||||||
|
- Configuration: `limit_to_local_content` option
|
||||||
- Pleroma API: User subscriptions
|
- Pleroma API: User subscriptions
|
||||||
- Pleroma API: Healthcheck endpoint
|
- Pleroma API: Healthcheck endpoint
|
||||||
- Pleroma API: `/api/v1/pleroma/mascot` per-user frontend mascot configuration endpoints
|
- Pleroma API: `/api/v1/pleroma/mascot` per-user frontend mascot configuration endpoints
|
||||||
|
@ -31,12 +89,16 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
|
||||||
- Admin API: added filters (role, tags, email, name) for users endpoint
|
- Admin API: added filters (role, tags, email, name) for users endpoint
|
||||||
- Admin API: Endpoints for managing reports
|
- Admin API: Endpoints for managing reports
|
||||||
- Admin API: Endpoints for deleting and changing the scope of individual reported statuses
|
- Admin API: Endpoints for deleting and changing the scope of individual reported statuses
|
||||||
|
- Admin API: Endpoints to view and change config settings.
|
||||||
- AdminFE: initial release with basic user management accessible at /pleroma/admin/
|
- AdminFE: initial release with basic user management accessible at /pleroma/admin/
|
||||||
|
- Mastodon API: Add chat token to `verify_credentials` response
|
||||||
|
- Mastodon API: Add background image setting to `update_credentials`
|
||||||
- Mastodon API: [Scheduled statuses](https://docs.joinmastodon.org/api/rest/scheduled-statuses/)
|
- Mastodon API: [Scheduled statuses](https://docs.joinmastodon.org/api/rest/scheduled-statuses/)
|
||||||
- Mastodon API: `/api/v1/notifications/destroy_multiple` (glitch-soc extension)
|
- Mastodon API: `/api/v1/notifications/destroy_multiple` (glitch-soc extension)
|
||||||
- Mastodon API: `/api/v1/pleroma/accounts/:id/favourites` (API extension)
|
- Mastodon API: `/api/v1/pleroma/accounts/:id/favourites` (API extension)
|
||||||
- Mastodon API: [Reports](https://docs.joinmastodon.org/api/rest/reports/)
|
- Mastodon API: [Reports](https://docs.joinmastodon.org/api/rest/reports/)
|
||||||
- Mastodon API: `POST /api/v1/accounts` (account creation API)
|
- Mastodon API: `POST /api/v1/accounts` (account creation API)
|
||||||
|
- Mastodon API: [Polls](https://docs.joinmastodon.org/api/rest/polls/)
|
||||||
- ActivityPub C2S: OAuth endpoints
|
- ActivityPub C2S: OAuth endpoints
|
||||||
- Metadata: RelMe provider
|
- Metadata: RelMe provider
|
||||||
- OAuth: added support for refresh tokens
|
- OAuth: added support for refresh tokens
|
||||||
|
@ -45,9 +107,18 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
|
||||||
- OAuth: added job to clean expired access tokens
|
- OAuth: added job to clean expired access tokens
|
||||||
- MRF: Support for rejecting reports from specific instances (`mrf_simple`)
|
- MRF: Support for rejecting reports from specific instances (`mrf_simple`)
|
||||||
- MRF: Support for stripping avatars and banner images from specific instances (`mrf_simple`)
|
- MRF: Support for stripping avatars and banner images from specific instances (`mrf_simple`)
|
||||||
|
- MRF: Support for running subchains.
|
||||||
|
- Configuration: `skip_thread_containment` option
|
||||||
|
- Configuration: `rate_limit` option. See `Pleroma.Plugs.RateLimiter` documentation for details.
|
||||||
|
- MRF: Support for filtering out likely spam messages by rejecting posts from new users that contain links.
|
||||||
|
- Configuration: `ignore_hosts` option
|
||||||
|
- Configuration: `ignore_tld` option
|
||||||
|
- Configuration: default syslog tag "Pleroma" is now lowercased to "pleroma"
|
||||||
|
|
||||||
### Changed
|
### Changed
|
||||||
|
- **Breaking:** bind to 127.0.0.1 instead of 0.0.0.0 by default
|
||||||
- **Breaking:** Configuration: move from Pleroma.Mailer to Pleroma.Emails.Mailer
|
- **Breaking:** Configuration: move from Pleroma.Mailer to Pleroma.Emails.Mailer
|
||||||
|
- Thread containment / test for complete visibility will be skipped by default.
|
||||||
- Enforcement of OAuth scopes
|
- Enforcement of OAuth scopes
|
||||||
- Add multiple use/time expiring invite token
|
- Add multiple use/time expiring invite token
|
||||||
- Restyled OAuth pages to fit with Pleroma's default theme
|
- Restyled OAuth pages to fit with Pleroma's default theme
|
||||||
|
@ -56,6 +127,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
|
||||||
- Federation: Expand the audience of delete activities to all recipients of the deleted object
|
- Federation: Expand the audience of delete activities to all recipients of the deleted object
|
||||||
- Federation: Removed `inReplyToStatusId` from objects
|
- Federation: Removed `inReplyToStatusId` from objects
|
||||||
- Configuration: Dedupe enabled by default
|
- Configuration: Dedupe enabled by default
|
||||||
|
- Configuration: Default log level in `prod` environment is now set to `warn`
|
||||||
- Configuration: Added `extra_cookie_attrs` for setting non-standard cookie attributes. Defaults to ["SameSite=Lax"] so that remote follows work.
|
- Configuration: Added `extra_cookie_attrs` for setting non-standard cookie attributes. Defaults to ["SameSite=Lax"] so that remote follows work.
|
||||||
- Timelines: Messages involving people you have blocked will be excluded from the timeline in all cases instead of just repeats.
|
- Timelines: Messages involving people you have blocked will be excluded from the timeline in all cases instead of just repeats.
|
||||||
- Admin API: Move the user related API to `api/pleroma/admin/users`
|
- Admin API: Move the user related API to `api/pleroma/admin/users`
|
||||||
|
@ -81,8 +153,10 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
|
||||||
- Posts which are marked sensitive or tagged nsfw no longer have link previews.
|
- Posts which are marked sensitive or tagged nsfw no longer have link previews.
|
||||||
- HTTP connection timeout is now set to 10 seconds.
|
- HTTP connection timeout is now set to 10 seconds.
|
||||||
- Respond with a 404 Not implemented JSON error message when requested API is not implemented
|
- Respond with a 404 Not implemented JSON error message when requested API is not implemented
|
||||||
|
- Rich Media: crawl only https URLs.
|
||||||
|
|
||||||
### Fixed
|
### Fixed
|
||||||
|
- Follow requests don't get 'stuck' anymore.
|
||||||
- Added an FTS index on objects. Running `vacuum analyze` and setting a larger `work_mem` is recommended.
|
- Added an FTS index on objects. Running `vacuum analyze` and setting a larger `work_mem` is recommended.
|
||||||
- Followers counter not being updated when a follower is blocked
|
- Followers counter not being updated when a follower is blocked
|
||||||
- Deactivated users being able to request an access token
|
- Deactivated users being able to request an access token
|
||||||
|
@ -112,12 +186,17 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
|
||||||
- Mastodon API: Correct `reblogged`, `favourited`, and `bookmarked` values in the reblog status JSON
|
- Mastodon API: Correct `reblogged`, `favourited`, and `bookmarked` values in the reblog status JSON
|
||||||
- Mastodon API: Exposing default scope of the user to anyone
|
- Mastodon API: Exposing default scope of the user to anyone
|
||||||
- Mastodon API: Make `irreversible` field default to `false` [`POST /api/v1/filters`]
|
- Mastodon API: Make `irreversible` field default to `false` [`POST /api/v1/filters`]
|
||||||
|
- Mastodon API: Replace missing non-nullable Card attributes with empty strings
|
||||||
- User-Agent is now sent correctly for all HTTP requests.
|
- User-Agent is now sent correctly for all HTTP requests.
|
||||||
- MRF: Simple policy now properly delists imported or relayed statuses
|
- MRF: Simple policy now properly delists imported or relayed statuses
|
||||||
|
|
||||||
## Removed
|
## Removed
|
||||||
- Configuration: `config :pleroma, :fe` in favor of the more flexible `config :pleroma, :frontend_configurations`
|
- Configuration: `config :pleroma, :fe` in favor of the more flexible `config :pleroma, :frontend_configurations`
|
||||||
|
|
||||||
|
## [0.9.99999] - 2019-05-31
|
||||||
|
### Security
|
||||||
|
- Mastodon API: Fix lists leaking private posts
|
||||||
|
|
||||||
## [0.9.9999] - 2019-04-05
|
## [0.9.9999] - 2019-04-05
|
||||||
### Security
|
### Security
|
||||||
- Mastodon API: Fix content warnings skipping HTML sanitization
|
- Mastodon API: Fix content warnings skipping HTML sanitization
|
||||||
|
|
2
Procfile
Normal file
2
Procfile
Normal file
|
@ -0,0 +1,2 @@
|
||||||
|
web: mix phx.server
|
||||||
|
release: mix ecto.migrate
|
|
@ -15,9 +15,12 @@ For clients it supports both the [GNU Social API with Qvitter extensions](https:
|
||||||
If you want to run your own server, feel free to contact us at @lain@pleroma.soykaf.com or in our dev chat at #pleroma on freenode or via matrix at <https://matrix.heldscal.la/#/room/#freenode_#pleroma:matrix.org>.
|
If you want to run your own server, feel free to contact us at @lain@pleroma.soykaf.com or in our dev chat at #pleroma on freenode or via matrix at <https://matrix.heldscal.la/#/room/#freenode_#pleroma:matrix.org>.
|
||||||
|
|
||||||
## Installation
|
## Installation
|
||||||
|
**Note:** The guide below may be outdated and in most cases shouldn't be used. Instead check out our [wiki](https://docs.pleroma.social) for platform-specific installation instructions, most likely [Installing on Linux using OTP releases](https://docs.pleroma.social/otp_en.html) is the guide you need.
|
||||||
|
|
||||||
|
### OS/Distro packages
|
||||||
|
Currently Pleroma is not packaged by any OS/Distros, but feel free to reach out to us at [#pleroma-dev on freenode](https://webchat.freenode.net/?channels=%23pleroma-dev) or via matrix at <https://matrix.heldscal.la/#/room/#freenode_#pleroma-dev:matrix.org> for assistance. If you want to change default options in your Pleroma package, please **discuss it with us first**.
|
||||||
|
|
||||||
### Docker
|
### Docker
|
||||||
|
|
||||||
While we don’t provide docker files, other people have written very good ones. Take a look at <https://github.com/angristan/docker-pleroma> or <https://github.com/sn0w/pleroma-docker>.
|
While we don’t provide docker files, other people have written very good ones. Take a look at <https://github.com/angristan/docker-pleroma> or <https://github.com/sn0w/pleroma-docker>.
|
||||||
|
|
||||||
### Dependencies
|
### Dependencies
|
||||||
|
|
|
@ -99,6 +99,7 @@
|
||||||
|
|
||||||
config :pleroma, :emoji,
|
config :pleroma, :emoji,
|
||||||
shortcode_globs: ["/emoji/custom/**/*.png"],
|
shortcode_globs: ["/emoji/custom/**/*.png"],
|
||||||
|
pack_extensions: [".png", ".gif"],
|
||||||
groups: [
|
groups: [
|
||||||
# Put groups that have higher priority than defaults here. Example in `docs/config/custom_emoji.md`
|
# Put groups that have higher priority than defaults here. Example in `docs/config/custom_emoji.md`
|
||||||
Custom: ["/emoji/*.png", "/emoji/**/*.png"]
|
Custom: ["/emoji/*.png", "/emoji/**/*.png"]
|
||||||
|
@ -139,6 +140,7 @@
|
||||||
instrumenters: [Pleroma.Web.Endpoint.Instrumenter],
|
instrumenters: [Pleroma.Web.Endpoint.Instrumenter],
|
||||||
url: [host: "localhost"],
|
url: [host: "localhost"],
|
||||||
http: [
|
http: [
|
||||||
|
ip: {127, 0, 0, 1},
|
||||||
dispatch: [
|
dispatch: [
|
||||||
{:_,
|
{:_,
|
||||||
[
|
[
|
||||||
|
@ -167,7 +169,7 @@
|
||||||
|
|
||||||
config :logger, :ex_syslogger,
|
config :logger, :ex_syslogger,
|
||||||
level: :debug,
|
level: :debug,
|
||||||
ident: "Pleroma",
|
ident: "pleroma",
|
||||||
format: "$metadata[$level] $message",
|
format: "$metadata[$level] $message",
|
||||||
metadata: [:request_id]
|
metadata: [:request_id]
|
||||||
|
|
||||||
|
@ -192,6 +194,8 @@
|
||||||
send_user_agent: true,
|
send_user_agent: true,
|
||||||
adapter: [
|
adapter: [
|
||||||
ssl_options: [
|
ssl_options: [
|
||||||
|
# Workaround for remote server certificate chain issues
|
||||||
|
partial_chain: &:hackney_connect.partial_chain/1,
|
||||||
# We don't support TLS v1.3 yet
|
# We don't support TLS v1.3 yet
|
||||||
versions: [:tlsv1, :"tlsv1.1", :"tlsv1.2"]
|
versions: [:tlsv1, :"tlsv1.1", :"tlsv1.2"]
|
||||||
]
|
]
|
||||||
|
@ -208,8 +212,15 @@
|
||||||
avatar_upload_limit: 2_000_000,
|
avatar_upload_limit: 2_000_000,
|
||||||
background_upload_limit: 4_000_000,
|
background_upload_limit: 4_000_000,
|
||||||
banner_upload_limit: 4_000_000,
|
banner_upload_limit: 4_000_000,
|
||||||
|
poll_limits: %{
|
||||||
|
max_options: 20,
|
||||||
|
max_option_chars: 200,
|
||||||
|
min_expiration: 0,
|
||||||
|
max_expiration: 365 * 24 * 60 * 60
|
||||||
|
},
|
||||||
registrations_open: true,
|
registrations_open: true,
|
||||||
federating: true,
|
federating: true,
|
||||||
|
federation_incoming_replies_max_depth: 100,
|
||||||
federation_reachability_timeout_days: 7,
|
federation_reachability_timeout_days: 7,
|
||||||
federation_publisher_modules: [
|
federation_publisher_modules: [
|
||||||
Pleroma.Web.ActivityPub.Publisher,
|
Pleroma.Web.ActivityPub.Publisher,
|
||||||
|
@ -229,6 +240,7 @@
|
||||||
"text/bbcode"
|
"text/bbcode"
|
||||||
],
|
],
|
||||||
mrf_transparency: true,
|
mrf_transparency: true,
|
||||||
|
mrf_transparency_exclusions: [],
|
||||||
autofollowed_nicknames: [],
|
autofollowed_nicknames: [],
|
||||||
max_pinned_statuses: 1,
|
max_pinned_statuses: 1,
|
||||||
no_attachment_links: false,
|
no_attachment_links: false,
|
||||||
|
@ -237,9 +249,11 @@
|
||||||
max_report_comment_size: 1000,
|
max_report_comment_size: 1000,
|
||||||
safe_dm_mentions: false,
|
safe_dm_mentions: false,
|
||||||
healthcheck: false,
|
healthcheck: false,
|
||||||
remote_post_retention_days: 90
|
remote_post_retention_days: 90,
|
||||||
|
skip_thread_containment: true,
|
||||||
config :pleroma, :app_account_creation, enabled: true, max_requests: 25, interval: 1800
|
limit_to_local_content: :unauthenticated,
|
||||||
|
dynamic_configuration: false,
|
||||||
|
external_user_synchronization: true
|
||||||
|
|
||||||
config :pleroma, :markup,
|
config :pleroma, :markup,
|
||||||
# XXX - unfortunately, inline images must be enabled by default right now, because
|
# XXX - unfortunately, inline images must be enabled by default right now, because
|
||||||
|
@ -320,7 +334,17 @@
|
||||||
federated_timeline_removal: [],
|
federated_timeline_removal: [],
|
||||||
replace: []
|
replace: []
|
||||||
|
|
||||||
config :pleroma, :rich_media, enabled: true
|
config :pleroma, :mrf_subchain, match_actor: %{}
|
||||||
|
|
||||||
|
config :pleroma, :rich_media,
|
||||||
|
enabled: true,
|
||||||
|
ignore_hosts: [],
|
||||||
|
ignore_tld: ["local", "localdomain", "lan"],
|
||||||
|
parsers: [
|
||||||
|
Pleroma.Web.RichMedia.Parsers.TwitterCard,
|
||||||
|
Pleroma.Web.RichMedia.Parsers.OGP,
|
||||||
|
Pleroma.Web.RichMedia.Parsers.OEmbed
|
||||||
|
]
|
||||||
|
|
||||||
config :pleroma, :media_proxy,
|
config :pleroma, :media_proxy,
|
||||||
enabled: false,
|
enabled: false,
|
||||||
|
@ -344,7 +368,11 @@
|
||||||
port: 9999
|
port: 9999
|
||||||
|
|
||||||
config :pleroma, Pleroma.Web.Metadata,
|
config :pleroma, Pleroma.Web.Metadata,
|
||||||
providers: [Pleroma.Web.Metadata.Providers.RelMe],
|
providers: [
|
||||||
|
Pleroma.Web.Metadata.Providers.OpenGraph,
|
||||||
|
Pleroma.Web.Metadata.Providers.TwitterCard,
|
||||||
|
Pleroma.Web.Metadata.Providers.RelMe
|
||||||
|
],
|
||||||
unfurl_nsfw: false
|
unfurl_nsfw: false
|
||||||
|
|
||||||
config :pleroma, :suggestions,
|
config :pleroma, :suggestions,
|
||||||
|
@ -352,8 +380,8 @@
|
||||||
third_party_engine:
|
third_party_engine:
|
||||||
"http://vinayaka.distsn.org/cgi-bin/vinayaka-user-match-suggestions-api.cgi?{{host}}+{{user}}",
|
"http://vinayaka.distsn.org/cgi-bin/vinayaka-user-match-suggestions-api.cgi?{{host}}+{{user}}",
|
||||||
timeout: 300_000,
|
timeout: 300_000,
|
||||||
limit: 23,
|
limit: 40,
|
||||||
web: "https://vinayaka.distsn.org/?{{host}}+{{user}}"
|
web: "https://vinayaka.distsn.org"
|
||||||
|
|
||||||
config :pleroma, :http_security,
|
config :pleroma, :http_security,
|
||||||
enabled: true,
|
enabled: true,
|
||||||
|
@ -433,6 +461,8 @@
|
||||||
opts: [
|
opts: [
|
||||||
scheme: true,
|
scheme: true,
|
||||||
extra: true,
|
extra: true,
|
||||||
|
# TODO: Set to :no_scheme when it works properly
|
||||||
|
validate_tld: true,
|
||||||
class: false,
|
class: false,
|
||||||
strip_prefix: false,
|
strip_prefix: false,
|
||||||
new_window: false,
|
new_window: false,
|
||||||
|
@ -453,7 +483,11 @@
|
||||||
config :esshd,
|
config :esshd,
|
||||||
enabled: false
|
enabled: false
|
||||||
|
|
||||||
oauth_consumer_strategies = String.split(System.get_env("OAUTH_CONSUMER_STRATEGIES") || "")
|
oauth_consumer_strategies =
|
||||||
|
System.get_env("OAUTH_CONSUMER_STRATEGIES")
|
||||||
|
|> to_string()
|
||||||
|
|> String.split()
|
||||||
|
|> Enum.map(&hd(String.split(&1, ":")))
|
||||||
|
|
||||||
ueberauth_providers =
|
ueberauth_providers =
|
||||||
for strategy <- oauth_consumer_strategies do
|
for strategy <- oauth_consumer_strategies do
|
||||||
|
@ -469,7 +503,7 @@
|
||||||
|
|
||||||
config :pleroma, :auth, oauth_consumer_strategies: oauth_consumer_strategies
|
config :pleroma, :auth, oauth_consumer_strategies: oauth_consumer_strategies
|
||||||
|
|
||||||
config :pleroma, Pleroma.Emails.Mailer, adapter: Swoosh.Adapters.Sendmail
|
config :pleroma, Pleroma.Emails.Mailer, adapter: Swoosh.Adapters.Sendmail, enabled: false
|
||||||
|
|
||||||
config :prometheus, Pleroma.Web.Endpoint.MetricsExporter, path: "/api/pleroma/app_metrics"
|
config :prometheus, Pleroma.Web.Endpoint.MetricsExporter, path: "/api/pleroma/app_metrics"
|
||||||
|
|
||||||
|
@ -486,9 +520,17 @@
|
||||||
|
|
||||||
config :pleroma, :database, rum_enabled: false
|
config :pleroma, :database, rum_enabled: false
|
||||||
|
|
||||||
|
config :pleroma, :env, Mix.env()
|
||||||
|
|
||||||
config :http_signatures,
|
config :http_signatures,
|
||||||
adapter: Pleroma.Signature
|
adapter: Pleroma.Signature
|
||||||
|
|
||||||
|
config :pleroma, :rate_limit,
|
||||||
|
search: [{1000, 10}, {1000, 30}],
|
||||||
|
app_account_creation: {1_800_000, 25},
|
||||||
|
statuses_actions: {10_000, 15},
|
||||||
|
status_id_action: {60_000, 3}
|
||||||
|
|
||||||
# Import environment specific config. This must remain at the bottom
|
# Import environment specific config. This must remain at the bottom
|
||||||
# of this file so it overrides the configuration defined above.
|
# of this file so it overrides the configuration defined above.
|
||||||
import_config "#{Mix.env()}.exs"
|
import_config "#{Mix.env()}.exs"
|
||||||
|
|
|
@ -59,3 +59,6 @@
|
||||||
"!!! RUNNING IN LOCALHOST DEV MODE! !!!\nFEDERATION WON'T WORK UNTIL YOU CONFIGURE A dev.secret.exs"
|
"!!! RUNNING IN LOCALHOST DEV MODE! !!!\nFEDERATION WON'T WORK UNTIL YOU CONFIGURE A dev.secret.exs"
|
||||||
)
|
)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
if File.exists?("./config/dev.exported_from_db.secret.exs"),
|
||||||
|
do: import_config("dev.exported_from_db.secret.exs")
|
||||||
|
|
25
config/dokku.exs
Normal file
25
config/dokku.exs
Normal file
|
@ -0,0 +1,25 @@
|
||||||
|
use Mix.Config
|
||||||
|
|
||||||
|
config :pleroma, Pleroma.Web.Endpoint,
|
||||||
|
http: [
|
||||||
|
port: String.to_integer(System.get_env("PORT") || "4000"),
|
||||||
|
protocol_options: [max_request_line_length: 8192, max_header_value_length: 8192]
|
||||||
|
],
|
||||||
|
protocol: "http",
|
||||||
|
secure_cookie_flag: false,
|
||||||
|
url: [host: System.get_env("APP_HOST"), scheme: "https", port: 443],
|
||||||
|
secret_key_base: "+S+ULgf7+N37c/lc9K66SMphnjQIRGklTu0BRr2vLm2ZzvK0Z6OH/PE77wlUNtvP"
|
||||||
|
|
||||||
|
database_url =
|
||||||
|
System.get_env("DATABASE_URL") ||
|
||||||
|
raise """
|
||||||
|
environment variable DATABASE_URL is missing.
|
||||||
|
For example: ecto://USER:PASS@HOST/DATABASE
|
||||||
|
"""
|
||||||
|
|
||||||
|
config :pleroma, Pleroma.Repo,
|
||||||
|
# ssl: true,
|
||||||
|
url: database_url,
|
||||||
|
pool_size: String.to_integer(System.get_env("POOL_SIZE") || "10")
|
||||||
|
|
||||||
|
config :pleroma, :instance, name: "#{System.get_env("APP_NAME")} CI Instance"
|
|
@ -17,8 +17,10 @@
|
||||||
http: [port: 4000],
|
http: [port: 4000],
|
||||||
protocol: "http"
|
protocol: "http"
|
||||||
|
|
||||||
|
config :phoenix, serve_endpoints: true
|
||||||
|
|
||||||
# Do not print debug messages in production
|
# Do not print debug messages in production
|
||||||
config :logger, level: :info
|
config :logger, level: :warn
|
||||||
|
|
||||||
# ## SSL Support
|
# ## SSL Support
|
||||||
#
|
#
|
||||||
|
@ -61,3 +63,6 @@
|
||||||
# Finally import the config/prod.secret.exs
|
# Finally import the config/prod.secret.exs
|
||||||
# which should be versioned separately.
|
# which should be versioned separately.
|
||||||
import_config "prod.secret.exs"
|
import_config "prod.secret.exs"
|
||||||
|
|
||||||
|
if File.exists?("./config/prod.exported_from_db.secret.exs"),
|
||||||
|
do: import_config("prod.exported_from_db.secret.exs")
|
||||||
|
|
19
config/releases.exs
Normal file
19
config/releases.exs
Normal file
|
@ -0,0 +1,19 @@
|
||||||
|
import Config
|
||||||
|
|
||||||
|
config :pleroma, :instance, static_dir: "/var/lib/pleroma/static"
|
||||||
|
config :pleroma, Pleroma.Uploaders.Local, uploads: "/var/lib/pleroma/uploads"
|
||||||
|
|
||||||
|
config_path = System.get_env("PLEROMA_CONFIG_PATH") || "/etc/pleroma/config.exs"
|
||||||
|
|
||||||
|
if File.exists?(config_path) do
|
||||||
|
import_config config_path
|
||||||
|
else
|
||||||
|
warning = [
|
||||||
|
IO.ANSI.red(),
|
||||||
|
IO.ANSI.bright(),
|
||||||
|
"!!! #{config_path} not found! Please ensure it exists and that PLEROMA_CONFIG_PATH is unset or points to an existing file",
|
||||||
|
IO.ANSI.reset()
|
||||||
|
]
|
||||||
|
|
||||||
|
IO.puts(warning)
|
||||||
|
end
|
|
@ -17,15 +17,19 @@
|
||||||
# Print only warnings and errors during test
|
# Print only warnings and errors during test
|
||||||
config :logger, level: :warn
|
config :logger, level: :warn
|
||||||
|
|
||||||
|
config :pleroma, :auth, oauth_consumer_strategies: []
|
||||||
|
|
||||||
config :pleroma, Pleroma.Upload, filters: [], link_name: false
|
config :pleroma, Pleroma.Upload, filters: [], link_name: false
|
||||||
|
|
||||||
config :pleroma, Pleroma.Uploaders.Local, uploads: "test/uploads"
|
config :pleroma, Pleroma.Uploaders.Local, uploads: "test/uploads"
|
||||||
|
|
||||||
config :pleroma, Pleroma.Emails.Mailer, adapter: Swoosh.Adapters.Test
|
config :pleroma, Pleroma.Emails.Mailer, adapter: Swoosh.Adapters.Test, enabled: true
|
||||||
|
|
||||||
config :pleroma, :instance,
|
config :pleroma, :instance,
|
||||||
email: "admin@example.com",
|
email: "admin@example.com",
|
||||||
notify_email: "noreply@example.com"
|
notify_email: "noreply@example.com",
|
||||||
|
skip_thread_containment: false,
|
||||||
|
federating: false
|
||||||
|
|
||||||
# Configure your database
|
# Configure your database
|
||||||
config :pleroma, Pleroma.Repo,
|
config :pleroma, Pleroma.Repo,
|
||||||
|
@ -40,7 +44,11 @@
|
||||||
config :pbkdf2_elixir, rounds: 1
|
config :pbkdf2_elixir, rounds: 1
|
||||||
|
|
||||||
config :tesla, adapter: Tesla.Mock
|
config :tesla, adapter: Tesla.Mock
|
||||||
config :pleroma, :rich_media, enabled: false
|
|
||||||
|
config :pleroma, :rich_media,
|
||||||
|
enabled: false,
|
||||||
|
ignore_hosts: [],
|
||||||
|
ignore_tld: ["local", "localdomain", "lan"]
|
||||||
|
|
||||||
config :web_push_encryption, :vapid_details,
|
config :web_push_encryption, :vapid_details,
|
||||||
subject: "mailto:administrator@example.com",
|
subject: "mailto:administrator@example.com",
|
||||||
|
@ -57,7 +65,9 @@
|
||||||
total_user_limit: 3,
|
total_user_limit: 3,
|
||||||
enabled: false
|
enabled: false
|
||||||
|
|
||||||
config :pleroma, :app_account_creation, max_requests: 5
|
config :pleroma, :rate_limit,
|
||||||
|
search: [{1000, 30}, {1000, 30}],
|
||||||
|
app_account_creation: {10_000, 5}
|
||||||
|
|
||||||
config :pleroma, :http_security, report_uri: "https://endpoint.com"
|
config :pleroma, :http_security, report_uri: "https://endpoint.com"
|
||||||
|
|
||||||
|
@ -67,6 +77,8 @@
|
||||||
config :pleroma, :database, rum_enabled: rum_enabled
|
config :pleroma, :database, rum_enabled: rum_enabled
|
||||||
IO.puts("RUM enabled: #{rum_enabled}")
|
IO.puts("RUM enabled: #{rum_enabled}")
|
||||||
|
|
||||||
|
config :pleroma, Pleroma.ReverseProxy.Client, Pleroma.ReverseProxy.ClientMock
|
||||||
|
|
||||||
try do
|
try do
|
||||||
import_config "test.secret.exs"
|
import_config "test.secret.exs"
|
||||||
rescue
|
rescue
|
||||||
|
|
|
@ -38,7 +38,9 @@ Authentication is required and the user must be an admin.
|
||||||
"moderator": bool
|
"moderator": bool
|
||||||
},
|
},
|
||||||
"local": bool,
|
"local": bool,
|
||||||
"tags": array
|
"tags": array,
|
||||||
|
"avatar": string,
|
||||||
|
"display_name": string
|
||||||
},
|
},
|
||||||
...
|
...
|
||||||
]
|
]
|
||||||
|
@ -174,13 +176,13 @@ Note: Available `:permission_group` is currently moderator and admin. 404 is ret
|
||||||
- `nickname`
|
- `nickname`
|
||||||
- `status` BOOLEAN field, false value means deactivation.
|
- `status` BOOLEAN field, false value means deactivation.
|
||||||
|
|
||||||
## `/api/pleroma/admin/users/:nickname`
|
## `/api/pleroma/admin/users/:nickname_or_id`
|
||||||
|
|
||||||
### Retrive the details of a user
|
### Retrive the details of a user
|
||||||
|
|
||||||
- Method: `GET`
|
- Method: `GET`
|
||||||
- Params:
|
- Params:
|
||||||
- `nickname`
|
- `nickname` or `id`
|
||||||
- Response:
|
- Response:
|
||||||
- On failure: `Not found`
|
- On failure: `Not found`
|
||||||
- On success: JSON of the user
|
- On success: JSON of the user
|
||||||
|
@ -331,6 +333,7 @@ Note: Available `:permission_group` is currently moderator and admin. 404 is ret
|
||||||
"pleroma": {},
|
"pleroma": {},
|
||||||
"sensitive": false
|
"sensitive": false
|
||||||
},
|
},
|
||||||
|
"tags": ["force_unlisted"],
|
||||||
"statuses_count": 3,
|
"statuses_count": 3,
|
||||||
"url": "https://pleroma.example.org/users/user",
|
"url": "https://pleroma.example.org/users/user",
|
||||||
"username": "user"
|
"username": "user"
|
||||||
|
@ -366,6 +369,7 @@ Note: Available `:permission_group` is currently moderator and admin. 404 is ret
|
||||||
"pleroma": {},
|
"pleroma": {},
|
||||||
"sensitive": false
|
"sensitive": false
|
||||||
},
|
},
|
||||||
|
"tags": ["force_unlisted"],
|
||||||
"statuses_count": 1,
|
"statuses_count": 1,
|
||||||
"url": "https://pleroma.example.org/users/lain",
|
"url": "https://pleroma.example.org/users/lain",
|
||||||
"username": "lain"
|
"username": "lain"
|
||||||
|
@ -557,3 +561,94 @@ Note: Available `:permission_group` is currently moderator and admin. 404 is ret
|
||||||
- 403 Forbidden `{"error": "error_msg"}`
|
- 403 Forbidden `{"error": "error_msg"}`
|
||||||
- 404 Not Found `"Not found"`
|
- 404 Not Found `"Not found"`
|
||||||
- On success: 200 OK `{}`
|
- On success: 200 OK `{}`
|
||||||
|
|
||||||
|
## `/api/pleroma/admin/config`
|
||||||
|
### List config settings
|
||||||
|
- Method `GET`
|
||||||
|
- Params: none
|
||||||
|
- Response:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
configs: [
|
||||||
|
{
|
||||||
|
"group": string,
|
||||||
|
"key": string or string with leading `:` for atoms,
|
||||||
|
"value": string or {} or [] or {"tuple": []}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## `/api/pleroma/admin/config`
|
||||||
|
### Update config settings
|
||||||
|
Module name can be passed as string, which starts with `Pleroma`, e.g. `"Pleroma.Upload"`.
|
||||||
|
Atom keys and values can be passed with `:` in the beginning, e.g. `":upload"`.
|
||||||
|
Tuples can be passed as `{"tuple": ["first_val", Pleroma.Module, []]}`.
|
||||||
|
`{"tuple": ["some_string", "Pleroma.Some.Module", []]}` will be converted to `{"some_string", Pleroma.Some.Module, []}`.
|
||||||
|
Keywords can be passed as lists with 2 child tuples, e.g.
|
||||||
|
`[{"tuple": ["first_val", Pleroma.Module]}, {"tuple": ["second_val", true]}]`.
|
||||||
|
|
||||||
|
Compile time settings (need instance reboot):
|
||||||
|
- all settings by this keys:
|
||||||
|
- `:hackney_pools`
|
||||||
|
- `:chat`
|
||||||
|
- `Pleroma.Web.Endpoint`
|
||||||
|
- `Pleroma.Repo`
|
||||||
|
- part settings:
|
||||||
|
- `Pleroma.Captcha` -> `:seconds_valid`
|
||||||
|
- `Pleroma.Upload` -> `:proxy_remote`
|
||||||
|
- `:instance` -> `:upload_limit`
|
||||||
|
|
||||||
|
- Method `POST`
|
||||||
|
- Params:
|
||||||
|
- `configs` => [
|
||||||
|
- `group` (string)
|
||||||
|
- `key` (string or string with leading `:` for atoms)
|
||||||
|
- `value` (string, [], {} or {"tuple": []})
|
||||||
|
- `delete` = true (optional, if parameter must be deleted)
|
||||||
|
]
|
||||||
|
|
||||||
|
- Request (example):
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
configs: [
|
||||||
|
{
|
||||||
|
"group": "pleroma",
|
||||||
|
"key": "Pleroma.Upload",
|
||||||
|
"value": [
|
||||||
|
{"tuple": [":uploader", "Pleroma.Uploaders.Local"]},
|
||||||
|
{"tuple": [":filters", ["Pleroma.Upload.Filter.Dedupe"]]},
|
||||||
|
{"tuple": [":link_name", true]},
|
||||||
|
{"tuple": [":proxy_remote", false]},
|
||||||
|
{"tuple": [":proxy_opts", [
|
||||||
|
{"tuple": [":redirect_on_failure", false]},
|
||||||
|
{"tuple": [":max_body_length", 1048576]},
|
||||||
|
{"tuple": [":http": [
|
||||||
|
{"tuple": [":follow_redirect", true]},
|
||||||
|
{"tuple": [":pool", ":upload"]},
|
||||||
|
]]}
|
||||||
|
]
|
||||||
|
]},
|
||||||
|
{"tuple": [":dispatch", {
|
||||||
|
"tuple": ["/api/v1/streaming", "Pleroma.Web.MastodonAPI.WebsocketHandler", []]
|
||||||
|
}]}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
- Response:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
configs: [
|
||||||
|
{
|
||||||
|
"group": string,
|
||||||
|
"key": string or string with leading `:` for atoms,
|
||||||
|
"value": string or {} or [] or {"tuple": []}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
|
@ -16,9 +16,11 @@ Adding the parameter `with_muted=true` to the timeline queries will also return
|
||||||
|
|
||||||
## Statuses
|
## Statuses
|
||||||
|
|
||||||
|
- `visibility`: has an additional possible value `list`
|
||||||
|
|
||||||
Has these additional fields under the `pleroma` object:
|
Has these additional fields under the `pleroma` object:
|
||||||
|
|
||||||
- `local`: true if the post was made on the local instance.
|
- `local`: true if the post was made on the local instance
|
||||||
- `conversation_id`: the ID of the conversation the status is associated with (if any)
|
- `conversation_id`: the ID of the conversation the status is associated with (if any)
|
||||||
- `in_reply_to_account_acct`: the `acct` property of User entity for replied user (if any)
|
- `in_reply_to_account_acct`: the `acct` property of User entity for replied user (if any)
|
||||||
- `content`: a map consisting of alternate representations of the `content` property with the key being it's mimetype. Currently the only alternate representation supported is `text/plain`
|
- `content`: a map consisting of alternate representations of the `content` property with the key being it's mimetype. Currently the only alternate representation supported is `text/plain`
|
||||||
|
@ -46,6 +48,8 @@ Has these additional fields under the `pleroma` object:
|
||||||
- `confirmation_pending`: boolean, true if a new user account is waiting on email confirmation to be activated
|
- `confirmation_pending`: boolean, true if a new user account is waiting on email confirmation to be activated
|
||||||
- `hide_followers`: boolean, true when the user has follower hiding enabled
|
- `hide_followers`: boolean, true when the user has follower hiding enabled
|
||||||
- `hide_follows`: boolean, true when the user has follow hiding enabled
|
- `hide_follows`: boolean, true when the user has follow hiding enabled
|
||||||
|
- `settings_store`: A generic map of settings for frontends. Opaque to the backend. Only returned in `verify_credentials` and `update_credentials`
|
||||||
|
- `chat_token`: The token needed for Pleroma chat. Only returned in `verify_credentials`
|
||||||
|
|
||||||
### Source
|
### Source
|
||||||
|
|
||||||
|
@ -72,6 +76,8 @@ Additional parameters can be added to the JSON body/Form data:
|
||||||
|
|
||||||
- `preview`: boolean, if set to `true` the post won't be actually posted, but the status entitiy would still be rendered back. This could be useful for previewing rich text/custom emoji, for example.
|
- `preview`: boolean, if set to `true` the post won't be actually posted, but the status entitiy would still be rendered back. This could be useful for previewing rich text/custom emoji, for example.
|
||||||
- `content_type`: string, contain the MIME type of the status, it is transformed into HTML by the backend. You can get the list of the supported MIME types with the nodeinfo endpoint.
|
- `content_type`: string, contain the MIME type of the status, it is transformed into HTML by the backend. You can get the list of the supported MIME types with the nodeinfo endpoint.
|
||||||
|
- `to`: A list of nicknames (like `lain@soykaf.club` or `lain` on the local server) that will be used to determine who is going to be addressed by this post. Using this will disable the implicit addressing by mentioned names in the `status` body, only the people in the `to` list will be addressed. The normal rules for for post visibility are not affected by this and will still apply.
|
||||||
|
- `visibility`: string, besides standard MastoAPI values (`direct`, `private`, `unlisted` or `public`) it can be used to address a List by setting it to `list:LIST_ID`.
|
||||||
|
|
||||||
## PATCH `/api/v1/update_credentials`
|
## PATCH `/api/v1/update_credentials`
|
||||||
|
|
||||||
|
@ -83,6 +89,16 @@ Additional parameters can be added to the JSON body/Form data:
|
||||||
- `hide_favorites` - if true, user's favorites timeline will be hidden
|
- `hide_favorites` - if true, user's favorites timeline will be hidden
|
||||||
- `show_role` - if true, user's role (e.g admin, moderator) will be exposed to anyone in the API
|
- `show_role` - if true, user's role (e.g admin, moderator) will be exposed to anyone in the API
|
||||||
- `default_scope` - the scope returned under `privacy` key in Source subentity
|
- `default_scope` - the scope returned under `privacy` key in Source subentity
|
||||||
|
- `pleroma_settings_store` - Opaque user settings to be saved on the backend.
|
||||||
|
- `skip_thread_containment` - if true, skip filtering out broken threads
|
||||||
|
- `pleroma_background_image` - sets the background image of the user.
|
||||||
|
|
||||||
|
### Pleroma Settings Store
|
||||||
|
Pleroma has mechanism that allows frontends to save blobs of json for each user on the backend. This can be used to save frontend-specific settings for a user that the backend does not need to know about.
|
||||||
|
|
||||||
|
The parameter should have a form of `{frontend_name: {...}}`, with `frontend_name` identifying your type of client, e.g. `pleroma_fe`. It will overwrite everything under this property, but will not overwrite other frontend's settings.
|
||||||
|
|
||||||
|
This information is returned in the `verify_credentials` endpoint.
|
||||||
|
|
||||||
## Authentication
|
## Authentication
|
||||||
|
|
||||||
|
|
|
@ -126,20 +126,6 @@ Request parameters can be passed via [query strings](https://en.wikipedia.org/wi
|
||||||
## `/api/pleroma/admin/`…
|
## `/api/pleroma/admin/`…
|
||||||
See [Admin-API](Admin-API.md)
|
See [Admin-API](Admin-API.md)
|
||||||
|
|
||||||
## `/api/v1/pleroma/flavour/:flavour`
|
|
||||||
* Method `POST`
|
|
||||||
* Authentication: required
|
|
||||||
* Response: JSON string. Returns the user flavour or the default one on success, otherwise returns `{"error": "error_msg"}`
|
|
||||||
* Example response: "glitch"
|
|
||||||
* Note: This is intended to be used only by mastofe
|
|
||||||
|
|
||||||
## `/api/v1/pleroma/flavour`
|
|
||||||
* Method `GET`
|
|
||||||
* Authentication: required
|
|
||||||
* Response: JSON string. Returns the user flavour or the default one.
|
|
||||||
* Example response: "glitch"
|
|
||||||
* Note: This is intended to be used only by mastofe
|
|
||||||
|
|
||||||
## `/api/pleroma/notifications/read`
|
## `/api/pleroma/notifications/read`
|
||||||
### Mark a single notification as read
|
### Mark a single notification as read
|
||||||
* Method `POST`
|
* Method `POST`
|
||||||
|
@ -252,6 +238,13 @@ See [Admin-API](Admin-API.md)
|
||||||
]
|
]
|
||||||
```
|
```
|
||||||
|
|
||||||
|
## `/api/v1/pleroma/accounts/update_*`
|
||||||
|
### Set and clear account avatar, banner, and background
|
||||||
|
|
||||||
|
- PATCH `/api/v1/pleroma/accounts/update_avatar`: Set/clear user avatar image
|
||||||
|
- PATCH `/api/v1/pleroma/accounts/update_banner`: Set/clear user banner image
|
||||||
|
- PATCH `/api/v1/pleroma/accounts/update_background`: Set/clear user background image
|
||||||
|
|
||||||
## `/api/v1/pleroma/mascot`
|
## `/api/v1/pleroma/mascot`
|
||||||
### Gets user mascot image
|
### Gets user mascot image
|
||||||
* Method `GET`
|
* Method `GET`
|
||||||
|
|
|
@ -49,13 +49,6 @@ Feel free to contact us to be added to this list!
|
||||||
- Platforms: iOS, Android
|
- Platforms: iOS, Android
|
||||||
- Features: No Streaming
|
- Features: No Streaming
|
||||||
|
|
||||||
### Tootdon
|
|
||||||
- Homepage: <http://tootdon.club/>, <http://blog.mastodon-tootdon.com/>
|
|
||||||
- Source Code: ???
|
|
||||||
- Contact: [@tootdon@mstdn.jp](https://mstdn.jp/users/tootdon)
|
|
||||||
- Platforms: Android, iOS
|
|
||||||
- Features: No Streaming
|
|
||||||
|
|
||||||
### Tusky
|
### Tusky
|
||||||
- Homepage: <https://tuskyapp.github.io/>
|
- Homepage: <https://tuskyapp.github.io/>
|
||||||
- Source Code: <https://github.com/tuskyapp/Tusky>
|
- Source Code: <https://github.com/tuskyapp/Tusky>
|
||||||
|
|
|
@ -16,6 +16,13 @@ Note: `strip_exif` has been replaced by `Pleroma.Upload.Filter.Mogrify`.
|
||||||
## Pleroma.Uploaders.Local
|
## Pleroma.Uploaders.Local
|
||||||
* `uploads`: Which directory to store the user-uploads in, relative to pleroma’s working directory
|
* `uploads`: Which directory to store the user-uploads in, relative to pleroma’s working directory
|
||||||
|
|
||||||
|
## Pleroma.Uploaders.S3
|
||||||
|
* `bucket`: S3 bucket name
|
||||||
|
* `public_endpoint`: S3 endpoint that the user finally accesses(ex. "https://s3.dualstack.ap-northeast-1.amazonaws.com")
|
||||||
|
* `truncated_namespace`: If you use S3 compatible service such as Digital Ocean Spaces or CDN, set folder name or "" etc.
|
||||||
|
For example, when using CDN to S3 virtual host format, set "".
|
||||||
|
At this time, write CNAME to CDN in public_endpoint.
|
||||||
|
|
||||||
## Pleroma.Upload.Filter.Mogrify
|
## Pleroma.Upload.Filter.Mogrify
|
||||||
|
|
||||||
* `args`: List of actions for the `mogrify` command like `"strip"` or `["strip", "auto-orient", {"impode", "1"}]`.
|
* `args`: List of actions for the `mogrify` command like `"strip"` or `["strip", "auto-orient", {"impode", "1"}]`.
|
||||||
|
@ -29,11 +36,12 @@ No specific configuration.
|
||||||
This filter replaces the filename (not the path) of an upload. For complete obfuscation, add
|
This filter replaces the filename (not the path) of an upload. For complete obfuscation, add
|
||||||
`Pleroma.Upload.Filter.Dedupe` before AnonymizeFilename.
|
`Pleroma.Upload.Filter.Dedupe` before AnonymizeFilename.
|
||||||
|
|
||||||
* `text`: Text to replace filenames in links. If empty, `{random}.extension` will be used.
|
* `text`: Text to replace filenames in links. If empty, `{random}.extension` will be used. You can get the original filename extension by using `{extension}`, for example `custom-file-name.{extension}`.
|
||||||
|
|
||||||
## Pleroma.Emails.Mailer
|
## Pleroma.Emails.Mailer
|
||||||
* `adapter`: one of the mail adapters listed in [Swoosh readme](https://github.com/swoosh/swoosh#adapters), or `Swoosh.Adapters.Local` for in-memory mailbox.
|
* `adapter`: one of the mail adapters listed in [Swoosh readme](https://github.com/swoosh/swoosh#adapters), or `Swoosh.Adapters.Local` for in-memory mailbox.
|
||||||
* `api_key` / `password` and / or other adapter-specific settings, per the above documentation.
|
* `api_key` / `password` and / or other adapter-specific settings, per the above documentation.
|
||||||
|
* `enabled`: Allows enable/disable send emails. Default: `false`.
|
||||||
|
|
||||||
An example for Sendgrid adapter:
|
An example for Sendgrid adapter:
|
||||||
|
|
||||||
|
@ -71,23 +79,34 @@ config :pleroma, Pleroma.Emails.Mailer,
|
||||||
* `avatar_upload_limit`: File size limit of user’s profile avatars
|
* `avatar_upload_limit`: File size limit of user’s profile avatars
|
||||||
* `background_upload_limit`: File size limit of user’s profile backgrounds
|
* `background_upload_limit`: File size limit of user’s profile backgrounds
|
||||||
* `banner_upload_limit`: File size limit of user’s profile banners
|
* `banner_upload_limit`: File size limit of user’s profile banners
|
||||||
|
* `poll_limits`: A map with poll limits for **local** polls
|
||||||
|
* `max_options`: Maximum number of options
|
||||||
|
* `max_option_chars`: Maximum number of characters per option
|
||||||
|
* `min_expiration`: Minimum expiration time (in seconds)
|
||||||
|
* `max_expiration`: Maximum expiration time (in seconds)
|
||||||
* `registrations_open`: Enable registrations for anyone, invitations can be enabled when false.
|
* `registrations_open`: Enable registrations for anyone, invitations can be enabled when false.
|
||||||
* `invites_enabled`: Enable user invitations for admins (depends on `registrations_open: false`).
|
* `invites_enabled`: Enable user invitations for admins (depends on `registrations_open: false`).
|
||||||
* `account_activation_required`: Require users to confirm their emails before signing in.
|
* `account_activation_required`: Require users to confirm their emails before signing in.
|
||||||
* `federating`: Enable federation with other instances
|
* `federating`: Enable federation with other instances
|
||||||
|
* `federation_incoming_replies_max_depth`: Max. depth of reply-to activities fetching on incoming federation, to prevent out-of-memory situations while fetching very long threads. If set to `nil`, threads of any depth will be fetched. Lower this value if you experience out-of-memory crashes.
|
||||||
* `federation_reachability_timeout_days`: Timeout (in days) of each external federation target being unreachable prior to pausing federating to it.
|
* `federation_reachability_timeout_days`: Timeout (in days) of each external federation target being unreachable prior to pausing federating to it.
|
||||||
* `allow_relay`: Enable Pleroma’s Relay, which makes it possible to follow a whole instance
|
* `allow_relay`: Enable Pleroma’s Relay, which makes it possible to follow a whole instance
|
||||||
* `rewrite_policy`: Message Rewrite Policy, either one or a list. Here are the ones available by default:
|
* `rewrite_policy`: Message Rewrite Policy, either one or a list. Here are the ones available by default:
|
||||||
* `Pleroma.Web.ActivityPub.MRF.NoOpPolicy`: Doesn’t modify activities (default)
|
* `Pleroma.Web.ActivityPub.MRF.NoOpPolicy`: Doesn’t modify activities (default)
|
||||||
* `Pleroma.Web.ActivityPub.MRF.DropPolicy`: Drops all activities. It generally doesn’t makes sense to use in production
|
* `Pleroma.Web.ActivityPub.MRF.DropPolicy`: Drops all activities. It generally doesn’t makes sense to use in production
|
||||||
* `Pleroma.Web.ActivityPub.MRF.SimplePolicy`: Restrict the visibility of activities from certains instances (See ``:mrf_simple`` section)
|
* `Pleroma.Web.ActivityPub.MRF.SimplePolicy`: Restrict the visibility of activities from certains instances (See ``:mrf_simple`` section)
|
||||||
|
* `Pleroma.Web.ActivityPub.MRF.TagPolicy`: Applies policies to individual users based on tags, which can be set using pleroma-fe/admin-fe/any other app that supports Pleroma Admin API. For example it allows marking posts from individual users nsfw (sensitive)
|
||||||
|
* `Pleroma.Web.ActivityPub.MRF.SubchainPolicy`: Selectively runs other MRF policies when messages match (see ``:mrf_subchain`` section)
|
||||||
* `Pleroma.Web.ActivityPub.MRF.RejectNonPublic`: Drops posts with non-public visibility settings (See ``:mrf_rejectnonpublic`` section)
|
* `Pleroma.Web.ActivityPub.MRF.RejectNonPublic`: Drops posts with non-public visibility settings (See ``:mrf_rejectnonpublic`` section)
|
||||||
* `Pleroma.Web.ActivityPub.MRF.EnsureRePrepended`: Rewrites posts to ensure that replies to posts with subjects do not have an identical subject and instead begin with re:.
|
* `Pleroma.Web.ActivityPub.MRF.EnsureRePrepended`: Rewrites posts to ensure that replies to posts with subjects do not have an identical subject and instead begin with re:.
|
||||||
|
* `Pleroma.Web.ActivityPub.MRF.AntiLinkSpamPolicy`: Rejects posts from likely spambots by rejecting posts from new users that contain links.
|
||||||
|
* `Pleroma.Web.ActivityPub.MRF.MediaProxyWarmingPolicy`: Crawls attachments using their MediaProxy URLs so that the MediaProxy cache is primed.
|
||||||
* `public`: Makes the client API in authentificated mode-only except for user-profiles. Useful for disabling the Local Timeline and The Whole Known Network.
|
* `public`: Makes the client API in authentificated mode-only except for user-profiles. Useful for disabling the Local Timeline and The Whole Known Network.
|
||||||
* `quarantined_instances`: List of ActivityPub instances where private(DMs, followers-only) activities will not be send.
|
* `quarantined_instances`: List of ActivityPub instances where private(DMs, followers-only) activities will not be send.
|
||||||
* `managed_config`: Whenether the config for pleroma-fe is configured in this config or in ``static/config.json``
|
* `managed_config`: Whenether the config for pleroma-fe is configured in this config or in ``static/config.json``
|
||||||
* `allowed_post_formats`: MIME-type list of formats allowed to be posted (transformed into HTML)
|
* `allowed_post_formats`: MIME-type list of formats allowed to be posted (transformed into HTML)
|
||||||
* `mrf_transparency`: Make the content of your Message Rewrite Facility settings public (via nodeinfo).
|
* `mrf_transparency`: Make the content of your Message Rewrite Facility settings public (via nodeinfo).
|
||||||
|
* `mrf_transparency_exclusions`: Exclude specific instance names from MRF transparency. The use of the exclusions feature will be disclosed in nodeinfo as a boolean value.
|
||||||
* `scope_copy`: Copy the scope (private/unlisted/public) in replies to posts by default.
|
* `scope_copy`: Copy the scope (private/unlisted/public) in replies to posts by default.
|
||||||
* `subject_line_behavior`: Allows changing the default behaviour of subject lines in replies. Valid values:
|
* `subject_line_behavior`: Allows changing the default behaviour of subject lines in replies. Valid values:
|
||||||
* "email": Copy and preprend re:, as in email.
|
* "email": Copy and preprend re:, as in email.
|
||||||
|
@ -102,15 +121,15 @@ config :pleroma, Pleroma.Emails.Mailer,
|
||||||
* `welcome_message`: A message that will be send to a newly registered users as a direct message.
|
* `welcome_message`: A message that will be send to a newly registered users as a direct message.
|
||||||
* `welcome_user_nickname`: The nickname of the local user that sends the welcome message.
|
* `welcome_user_nickname`: The nickname of the local user that sends the welcome message.
|
||||||
* `max_report_comment_size`: The maximum size of the report comment (Default: `1000`)
|
* `max_report_comment_size`: The maximum size of the report comment (Default: `1000`)
|
||||||
* `safe_dm_mentions`: If set to true, only mentions at the beginning of a post will be used to address people in direct messages. This is to prevent accidental mentioning of people when talking about them (e.g. "@friend hey i really don't like @enemy"). (Default: `false`)
|
* `safe_dm_mentions`: If set to true, only mentions at the beginning of a post will be used to address people in direct messages. This is to prevent accidental mentioning of people when talking about them (e.g. "@friend hey i really don't like @enemy"). Default: `false`.
|
||||||
* `healthcheck`: if set to true, system data will be shown on ``/api/pleroma/healthcheck``.
|
* `healthcheck`: If set to true, system data will be shown on ``/api/pleroma/healthcheck``.
|
||||||
* `remote_post_retention_days`: the default amount of days to retain remote posts when pruning the database
|
* `remote_post_retention_days`: The default amount of days to retain remote posts when pruning the database.
|
||||||
|
* `skip_thread_containment`: Skip filter out broken threads. The default is `false`.
|
||||||
|
* `limit_to_local_content`: Limit unauthenticated users to search for local statutes and users only. Possible values: `:unauthenticated`, `:all` and `false`. The default is `:unauthenticated`.
|
||||||
|
* `dynamic_configuration`: Allow transferring configuration to DB with the subsequent customization from Admin api.
|
||||||
|
* `external_user_synchronization`: Enabling following/followers counters synchronization for external users.
|
||||||
|
|
||||||
|
|
||||||
## :app_account_creation
|
|
||||||
REST API for creating an account settings
|
|
||||||
* `enabled`: Enable/disable registration
|
|
||||||
* `max_requests`: Number of requests allowed for creating accounts
|
|
||||||
* `interval`: Interval for restricting requests for one ip (seconds)
|
|
||||||
|
|
||||||
## :logger
|
## :logger
|
||||||
* `backends`: `:console` is used to send logs to stdout, `{ExSyslogger, :ex_syslogger}` to log to syslog, and `Quack.Logger` to log to Slack
|
* `backends`: `:console` is used to send logs to stdout, `{ExSyslogger, :ex_syslogger}` to log to syslog, and `Quack.Logger` to log to Slack
|
||||||
|
@ -224,6 +243,21 @@ relates to mascots on the mastodon frontend
|
||||||
* `avatar_removal`: List of instances to strip avatars from
|
* `avatar_removal`: List of instances to strip avatars from
|
||||||
* `banner_removal`: List of instances to strip banners from
|
* `banner_removal`: List of instances to strip banners from
|
||||||
|
|
||||||
|
## :mrf_subchain
|
||||||
|
This policy processes messages through an alternate pipeline when a given message matches certain criteria.
|
||||||
|
All criteria are configured as a map of regular expressions to lists of policy modules.
|
||||||
|
|
||||||
|
* `match_actor`: Matches a series of regular expressions against the actor field.
|
||||||
|
|
||||||
|
Example:
|
||||||
|
|
||||||
|
```
|
||||||
|
config :pleroma, :mrf_subchain,
|
||||||
|
match_actor: %{
|
||||||
|
~r/https:\/\/example.com/s => [Pleroma.Web.ActivityPub.MRF.DropPolicy]
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
## :mrf_rejectnonpublic
|
## :mrf_rejectnonpublic
|
||||||
* `allow_followersonly`: whether to allow followers-only posts
|
* `allow_followersonly`: whether to allow followers-only posts
|
||||||
* `allow_direct`: whether to allow direct messages
|
* `allow_direct`: whether to allow direct messages
|
||||||
|
@ -251,7 +285,7 @@ relates to mascots on the mastodon frontend
|
||||||
|
|
||||||
## Pleroma.Web.Endpoint
|
## Pleroma.Web.Endpoint
|
||||||
`Phoenix` endpoint configuration, all configuration options can be viewed [here](https://hexdocs.pm/phoenix/Phoenix.Endpoint.html#module-dynamic-configuration), only common options are listed here
|
`Phoenix` endpoint configuration, all configuration options can be viewed [here](https://hexdocs.pm/phoenix/Phoenix.Endpoint.html#module-dynamic-configuration), only common options are listed here
|
||||||
* `http` - a list containing http protocol configuration, all configuration options can be viewed [here](https://hexdocs.pm/plug_cowboy/Plug.Cowboy.html#module-options), only common options are listed here
|
* `http` - a list containing http protocol configuration, all configuration options can be viewed [here](https://hexdocs.pm/plug_cowboy/Plug.Cowboy.html#module-options), only common options are listed here. For deployment using docker, you need to set this to `[ip: {0,0,0,0}, port: 4000]` to make pleroma accessible from other containers (such as your nginx server).
|
||||||
- `ip` - a tuple consisting of 4 integers
|
- `ip` - a tuple consisting of 4 integers
|
||||||
- `port`
|
- `port`
|
||||||
* `url` - a list containing the configuration for generating urls, accepts
|
* `url` - a list containing the configuration for generating urls, accepts
|
||||||
|
@ -389,6 +423,9 @@ This config contains two queues: `federator_incoming` and `federator_outgoing`.
|
||||||
|
|
||||||
## :rich_media
|
## :rich_media
|
||||||
* `enabled`: if enabled the instance will parse metadata from attached links to generate link previews
|
* `enabled`: if enabled the instance will parse metadata from attached links to generate link previews
|
||||||
|
* `ignore_hosts`: list of hosts which will be ignored by the metadata parser. For example `["accounts.google.com", "xss.website"]`, defaults to `[]`.
|
||||||
|
* `ignore_tld`: list TLDs (top-level domains) which will ignore for parse metadata. default is ["local", "localdomain", "lan"]
|
||||||
|
* `parsers`: list of Rich Media parsers
|
||||||
|
|
||||||
## :fetch_initial_posts
|
## :fetch_initial_posts
|
||||||
* `enabled`: if enabled, when a new user is federated with, fetch some of their latest posts
|
* `enabled`: if enabled, when a new user is federated with, fetch some of their latest posts
|
||||||
|
@ -492,7 +529,7 @@ Authentication / authorization settings.
|
||||||
|
|
||||||
* `auth_template`: authentication form template. By default it's `show.html` which corresponds to `lib/pleroma/web/templates/o_auth/o_auth/show.html.eex`.
|
* `auth_template`: authentication form template. By default it's `show.html` which corresponds to `lib/pleroma/web/templates/o_auth/o_auth/show.html.eex`.
|
||||||
* `oauth_consumer_template`: OAuth consumer mode authentication form template. By default it's `consumer.html` which corresponds to `lib/pleroma/web/templates/o_auth/o_auth/consumer.html.eex`.
|
* `oauth_consumer_template`: OAuth consumer mode authentication form template. By default it's `consumer.html` which corresponds to `lib/pleroma/web/templates/o_auth/o_auth/consumer.html.eex`.
|
||||||
* `oauth_consumer_strategies`: the list of enabled OAuth consumer strategies; by default it's set by OAUTH_CONSUMER_STRATEGIES environment variable.
|
* `oauth_consumer_strategies`: the list of enabled OAuth consumer strategies; by default it's set by `OAUTH_CONSUMER_STRATEGIES` environment variable. Each entry in this space-delimited string should be of format `<strategy>` or `<strategy>:<dependency>` (e.g. `twitter` or `keycloak:ueberauth_keycloak_strategy` in case dependency is named differently than `ueberauth_<strategy>`).
|
||||||
|
|
||||||
## OAuth consumer mode
|
## OAuth consumer mode
|
||||||
|
|
||||||
|
@ -545,6 +582,24 @@ config :ueberauth, Ueberauth,
|
||||||
providers: [
|
providers: [
|
||||||
microsoft: {Ueberauth.Strategy.Microsoft, [callback_params: []]}
|
microsoft: {Ueberauth.Strategy.Microsoft, [callback_params: []]}
|
||||||
]
|
]
|
||||||
|
|
||||||
|
# Keycloak
|
||||||
|
# Note: make sure to add `keycloak:ueberauth_keycloak_strategy` entry to `OAUTH_CONSUMER_STRATEGIES` environment variable
|
||||||
|
keycloak_url = "https://publicly-reachable-keycloak-instance.org:8080"
|
||||||
|
|
||||||
|
config :ueberauth, Ueberauth.Strategy.Keycloak.OAuth,
|
||||||
|
client_id: System.get_env("KEYCLOAK_CLIENT_ID"),
|
||||||
|
client_secret: System.get_env("KEYCLOAK_CLIENT_SECRET"),
|
||||||
|
site: keycloak_url,
|
||||||
|
authorize_url: "#{keycloak_url}/auth/realms/master/protocol/openid-connect/auth",
|
||||||
|
token_url: "#{keycloak_url}/auth/realms/master/protocol/openid-connect/token",
|
||||||
|
userinfo_url: "#{keycloak_url}/auth/realms/master/protocol/openid-connect/userinfo",
|
||||||
|
token_method: :post
|
||||||
|
|
||||||
|
config :ueberauth, Ueberauth,
|
||||||
|
providers: [
|
||||||
|
keycloak: {Ueberauth.Strategy.Keycloak, [uid_field: :email]}
|
||||||
|
]
|
||||||
```
|
```
|
||||||
|
|
||||||
## OAuth 2.0 provider - :oauth2
|
## OAuth 2.0 provider - :oauth2
|
||||||
|
@ -558,6 +613,7 @@ Configure OAuth 2 provider capabilities:
|
||||||
|
|
||||||
## :emoji
|
## :emoji
|
||||||
* `shortcode_globs`: Location of custom emoji files. `*` can be used as a wildcard. Example `["/emoji/custom/**/*.png"]`
|
* `shortcode_globs`: Location of custom emoji files. `*` can be used as a wildcard. Example `["/emoji/custom/**/*.png"]`
|
||||||
|
* `pack_extensions`: A list of file extensions for emojis, when no emoji.txt for a pack is present. Example `[".png", ".gif"]`
|
||||||
* `groups`: Emojis are ordered in groups (tags). This is an array of key-value pairs where the key is the groupname and the value the location or array of locations. `*` can be used as a wildcard. Example `[Custom: ["/emoji/*.png", "/emoji/custom/*.png"]]`
|
* `groups`: Emojis are ordered in groups (tags). This is an array of key-value pairs where the key is the groupname and the value the location or array of locations. `*` can be used as a wildcard. Example `[Custom: ["/emoji/*.png", "/emoji/custom/*.png"]]`
|
||||||
* `default_manifest`: Location of the JSON-manifest. This manifest contains information about the emoji-packs you can download. Currently only one manifest can be added (no arrays).
|
* `default_manifest`: Location of the JSON-manifest. This manifest contains information about the emoji-packs you can download. Currently only one manifest can be added (no arrays).
|
||||||
|
|
||||||
|
@ -575,3 +631,21 @@ To enable them, both the `rum_enabled` flag has to be set and the following spec
|
||||||
`mix ecto.migrate --migrations-path priv/repo/optional_migrations/rum_indexing/`
|
`mix ecto.migrate --migrations-path priv/repo/optional_migrations/rum_indexing/`
|
||||||
|
|
||||||
This will probably take a long time.
|
This will probably take a long time.
|
||||||
|
|
||||||
|
## :rate_limit
|
||||||
|
|
||||||
|
A keyword list of rate limiters where a key is a limiter name and value is the limiter configuration. The basic configuration is a tuple where:
|
||||||
|
|
||||||
|
* The first element: `scale` (Integer). The time scale in milliseconds.
|
||||||
|
* The second element: `limit` (Integer). How many requests to limit in the time scale provided.
|
||||||
|
|
||||||
|
It is also possible to have different limits for unauthenticated and authenticated users: the keyword value must be a list of two tuples where the first one is a config for unauthenticated users and the second one is for authenticated.
|
||||||
|
|
||||||
|
See [`Pleroma.Plugs.RateLimiter`](Pleroma.Plugs.RateLimiter.html) documentation for examples.
|
||||||
|
|
||||||
|
Supported rate limiters:
|
||||||
|
|
||||||
|
* `:search` for the search requests (account & status search etc.)
|
||||||
|
* `:app_account_creation` for registering user accounts from the same IP address
|
||||||
|
* `:statuses_actions` for create / delete / fav / unfav / reblog / unreblog actions on any statuses
|
||||||
|
* `:status_id_action` for fav / unfav or reblog / unreblog actions on the same status by the same user
|
||||||
|
|
|
@ -9,8 +9,8 @@ config :pleroma, :suggestions,
|
||||||
third_party_engine:
|
third_party_engine:
|
||||||
"http://vinayaka.distsn.org/cgi-bin/vinayaka-user-match-suggestions-api.cgi?{{host}}+{{user}}",
|
"http://vinayaka.distsn.org/cgi-bin/vinayaka-user-match-suggestions-api.cgi?{{host}}+{{user}}",
|
||||||
timeout: 300_000,
|
timeout: 300_000,
|
||||||
limit: 23,
|
limit: 40,
|
||||||
web: "https://vinayaka.distsn.org/?{{host}}+{{user}}"
|
web: "https://vinayaka.distsn.org"
|
||||||
|
|
||||||
```
|
```
|
||||||
|
|
||||||
|
@ -26,6 +26,6 @@ config :pleroma, :suggestions,
|
||||||
third_party_engine:
|
third_party_engine:
|
||||||
"http://vinayaka.distsn.org/cgi-bin/vinayaka-user-new-suggestions-api.cgi?{{host}}+{{user}}",
|
"http://vinayaka.distsn.org/cgi-bin/vinayaka-user-new-suggestions-api.cgi?{{host}}+{{user}}",
|
||||||
timeout: 60_000,
|
timeout: 60_000,
|
||||||
limit: 23,
|
limit: 40,
|
||||||
web: "https://vinayaka.distsn.org/user-new.html"
|
web: "https://vinayaka.distsn.org/user-new.html"
|
||||||
```
|
```
|
||||||
|
|
|
@ -87,7 +87,7 @@ sudo adduser -S -s /bin/false -h /opt/pleroma -H pleroma
|
||||||
```shell
|
```shell
|
||||||
sudo mkdir -p /opt/pleroma
|
sudo mkdir -p /opt/pleroma
|
||||||
sudo chown -R pleroma:pleroma /opt/pleroma
|
sudo chown -R pleroma:pleroma /opt/pleroma
|
||||||
sudo -Hu pleroma git clone https://git.pleroma.social/pleroma/pleroma /opt/pleroma
|
sudo -Hu pleroma git clone -b master https://git.pleroma.social/pleroma/pleroma /opt/pleroma
|
||||||
```
|
```
|
||||||
|
|
||||||
* Change to the new directory:
|
* Change to the new directory:
|
||||||
|
@ -202,13 +202,12 @@ sudo -Hu pleroma MIX_ENV=prod mix pleroma.user new <username> <your@emailaddress
|
||||||
|
|
||||||
#### Further reading
|
#### Further reading
|
||||||
|
|
||||||
* [Admin tasks](Admin tasks)
|
* [Backup your instance](backup.html)
|
||||||
* [Backup your instance](Backup-your-instance)
|
* [Configuration tips](general-tips-for-customizing-pleroma-fe.html)
|
||||||
* [Configuration tips](General tips for customizing pleroma fe)
|
* [Hardening your instance](hardening.html)
|
||||||
* [Hardening your instance](Hardening-your-instance)
|
* [How to activate mediaproxy](howto_mediaproxy.html)
|
||||||
* [How to activate mediaproxy](How-to-activate-mediaproxy)
|
* [Small Pleroma-FE customizations](small_customizations.html)
|
||||||
* [Small Pleroma-FE customizations](Small customizations)
|
* [Updating your instance](updating.html)
|
||||||
* [Updating your instance](Updating-your-instance)
|
|
||||||
|
|
||||||
## Questions
|
## Questions
|
||||||
|
|
||||||
|
|
|
@ -66,7 +66,7 @@ sudo useradd -r -s /bin/false -m -d /var/lib/pleroma -U pleroma
|
||||||
```shell
|
```shell
|
||||||
sudo mkdir -p /opt/pleroma
|
sudo mkdir -p /opt/pleroma
|
||||||
sudo chown -R pleroma:pleroma /opt/pleroma
|
sudo chown -R pleroma:pleroma /opt/pleroma
|
||||||
sudo -Hu pleroma git clone https://git.pleroma.social/pleroma/pleroma /opt/pleroma
|
sudo -Hu pleroma git clone -b master https://git.pleroma.social/pleroma/pleroma /opt/pleroma
|
||||||
```
|
```
|
||||||
|
|
||||||
* Change to the new directory:
|
* Change to the new directory:
|
||||||
|
@ -200,13 +200,12 @@ sudo -Hu pleroma MIX_ENV=prod mix pleroma.user new <username> <your@emailaddress
|
||||||
|
|
||||||
#### Further reading
|
#### Further reading
|
||||||
|
|
||||||
* [Admin tasks](Admin tasks)
|
* [Backup your instance](backup.html)
|
||||||
* [Backup your instance](Backup-your-instance)
|
* [Configuration tips](general-tips-for-customizing-pleroma-fe.html)
|
||||||
* [Configuration tips](General tips for customizing pleroma fe)
|
* [Hardening your instance](hardening.html)
|
||||||
* [Hardening your instance](Hardening-your-instance)
|
* [How to activate mediaproxy](howto_mediaproxy.html)
|
||||||
* [How to activate mediaproxy](How-to-activate-mediaproxy)
|
* [Small Pleroma-FE customizations](small_customizations.html)
|
||||||
* [Small Pleroma-FE customizations](Small customizations)
|
* [Updating your instance](updating.html)
|
||||||
* [Updating your instance](Updating-your-instance)
|
|
||||||
|
|
||||||
## Questions
|
## Questions
|
||||||
|
|
||||||
|
|
|
@ -143,7 +143,7 @@ sudo useradd -r -s /bin/false -m -d /var/lib/pleroma -U pleroma
|
||||||
```shell
|
```shell
|
||||||
sudo mkdir -p /opt/pleroma
|
sudo mkdir -p /opt/pleroma
|
||||||
sudo chown -R pleroma:pleroma /opt/pleroma
|
sudo chown -R pleroma:pleroma /opt/pleroma
|
||||||
sudo -Hu pleroma git clone https://git.pleroma.social/pleroma/pleroma /opt/pleroma
|
sudo -Hu pleroma git clone -b master https://git.pleroma.social/pleroma/pleroma /opt/pleroma
|
||||||
```
|
```
|
||||||
|
|
||||||
* Change to the new directory:
|
* Change to the new directory:
|
||||||
|
@ -264,13 +264,12 @@ sudo -Hu pleroma MIX_ENV=prod mix pleroma.user new <username> <your@emailaddress
|
||||||
|
|
||||||
#### Further reading
|
#### Further reading
|
||||||
|
|
||||||
* [Admin tasks](Admin tasks)
|
* [Backup your instance](backup.html)
|
||||||
* [Backup your instance](Backup-your-instance)
|
* [Configuration tips](general-tips-for-customizing-pleroma-fe.html)
|
||||||
* [Configuration tips](General tips for customizing pleroma fe)
|
* [Hardening your instance](hardening.html)
|
||||||
* [Hardening your instance](Hardening-your-instance)
|
* [How to activate mediaproxy](howto_mediaproxy.html)
|
||||||
* [How to activate mediaproxy](How-to-activate-mediaproxy)
|
* [Small Pleroma-FE customizations](small_customizations.html)
|
||||||
* [Small Pleroma-FE customizations](Small customizations)
|
* [Updating your instance](updating.html)
|
||||||
* [Updating your instance](Updating-your-instance)
|
|
||||||
|
|
||||||
## Questions
|
## Questions
|
||||||
|
|
||||||
|
|
|
@ -68,7 +68,7 @@ sudo useradd -r -s /bin/false -m -d /var/lib/pleroma -U pleroma
|
||||||
```shell
|
```shell
|
||||||
sudo mkdir -p /opt/pleroma
|
sudo mkdir -p /opt/pleroma
|
||||||
sudo chown -R pleroma:pleroma /opt/pleroma
|
sudo chown -R pleroma:pleroma /opt/pleroma
|
||||||
sudo -Hu pleroma git clone https://git.pleroma.social/pleroma/pleroma /opt/pleroma
|
sudo -Hu pleroma git clone -b master https://git.pleroma.social/pleroma/pleroma /opt/pleroma
|
||||||
```
|
```
|
||||||
|
|
||||||
* Change to the new directory:
|
* Change to the new directory:
|
||||||
|
@ -190,13 +190,12 @@ sudo -Hu pleroma MIX_ENV=prod mix pleroma.user new <username> <your@emailaddress
|
||||||
|
|
||||||
#### Further reading
|
#### Further reading
|
||||||
|
|
||||||
* [Admin tasks](Admin tasks)
|
* [Backup your instance](backup.html)
|
||||||
* [Backup your instance](Backup-your-instance)
|
* [Configuration tips](general-tips-for-customizing-pleroma-fe.html)
|
||||||
* [Configuration tips](General tips for customizing pleroma fe)
|
* [Hardening your instance](hardening.html)
|
||||||
* [Hardening your instance](Hardening-your-instance)
|
* [How to activate mediaproxy](howto_mediaproxy.html)
|
||||||
* [How to activate mediaproxy](How-to-activate-mediaproxy)
|
* [Small Pleroma-FE customizations](small_customizations.html)
|
||||||
* [Small Pleroma-FE customizations](Small customizations)
|
* [Updating your instance](updating.html)
|
||||||
* [Updating your instance](Updating-your-instance)
|
|
||||||
|
|
||||||
## Questions
|
## Questions
|
||||||
|
|
||||||
|
|
|
@ -69,7 +69,7 @@ cd ~
|
||||||
|
|
||||||
* Gitリポジトリをクローンします。
|
* Gitリポジトリをクローンします。
|
||||||
```
|
```
|
||||||
git clone https://git.pleroma.social/pleroma/pleroma
|
git clone -b master https://git.pleroma.social/pleroma/pleroma
|
||||||
```
|
```
|
||||||
|
|
||||||
* 新しいディレクトリに移動します。
|
* 新しいディレクトリに移動します。
|
||||||
|
@ -180,9 +180,12 @@ mix set_moderator username [true|false]
|
||||||
|
|
||||||
#### コンフィギュレーションとカスタマイズ
|
#### コンフィギュレーションとカスタマイズ
|
||||||
|
|
||||||
* [Configuration tips](General tips for customizing pleroma fe)
|
* [Backup your instance](backup.html)
|
||||||
* [Small Pleroma-FE customizations](Small customizations)
|
* [Configuration tips](general-tips-for-customizing-pleroma-fe.html)
|
||||||
* [Admin tasks](Admin tasks)
|
* [Hardening your instance](hardening.html)
|
||||||
|
* [How to activate mediaproxy](howto_mediaproxy.html)
|
||||||
|
* [Small Pleroma-FE customizations](small_customizations.html)
|
||||||
|
* [Updating your instance](updating.html)
|
||||||
|
|
||||||
## 質問ある?
|
## 質問ある?
|
||||||
|
|
||||||
|
|
|
@ -106,7 +106,7 @@ It is highly recommended you use your own fork for the `https://path/to/repo` pa
|
||||||
|
|
||||||
```shell
|
```shell
|
||||||
pleroma$ cd ~
|
pleroma$ cd ~
|
||||||
pleroma$ git clone https://path/to/repo
|
pleroma$ git clone -b master https://path/to/repo
|
||||||
```
|
```
|
||||||
|
|
||||||
* Change to the new directory:
|
* Change to the new directory:
|
||||||
|
@ -283,13 +283,12 @@ If you opted to allow sudo for the `pleroma` user but would like to remove the a
|
||||||
|
|
||||||
#### Further reading
|
#### Further reading
|
||||||
|
|
||||||
* [Admin tasks](Admin tasks)
|
* [Backup your instance](backup.html)
|
||||||
* [Backup your instance](Backup-your-instance)
|
* [Configuration tips](general-tips-for-customizing-pleroma-fe.html)
|
||||||
* [Configuration tips](General tips for customizing pleroma fe)
|
* [Hardening your instance](hardening.html)
|
||||||
* [Hardening your instance](Hardening-your-instance)
|
* [How to activate mediaproxy](howto_mediaproxy.html)
|
||||||
* [How to activate mediaproxy](How-to-activate-mediaproxy)
|
* [Small Pleroma-FE customizations](small_customizations.html)
|
||||||
* [Small Pleroma-FE customizations](Small customizations)
|
* [Updating your instance](updating.html)
|
||||||
* [Updating your instance](Updating-your-instance)
|
|
||||||
|
|
||||||
## Questions
|
## Questions
|
||||||
|
|
||||||
|
|
153
docs/installation/migrating_from_source_otp_en.md
Normal file
153
docs/installation/migrating_from_source_otp_en.md
Normal file
|
@ -0,0 +1,153 @@
|
||||||
|
# Switching a from-source install to OTP releases
|
||||||
|
## What are OTP releases?
|
||||||
|
OTP releases are as close as you can get to binary releases with Erlang/Elixir. The release is self-contained, and provides everything needed to boot it, it is easily administered via the provided shell script to open up a remote console, start/stop/restart the release, start in the background, send remote commands, and more.
|
||||||
|
### Can I still run the develop branch if I decide to use them?
|
||||||
|
Yes, we produce builds for every commit in `develop`. However `develop` is considered unstable, please don't use it in production because of faster access to new features, unless you need them as an app developer.
|
||||||
|
## Why would one want to switch?
|
||||||
|
Benefits of OTP releases over from-source installs include:
|
||||||
|
* **Less space used.** OTP releases come without source code, build tools, have docs and debug symbols stripped from the compiled bytecode and do not cointain tests, docs, revision history.
|
||||||
|
* **Minimal system dependencies.** Excluding the database and reverse proxy, only `curl`, `unzip` and `ncurses` are needed to download and run the release. Because Erlang runtime and Elixir are shipped with Pleroma, one can use the latest BEAM optimizations and Pleroma features, without having to worry about outdated system repos or a missing `erlang-*` package.
|
||||||
|
* **Potentially less bugs and better performance.** This extends on the previous point, because we have control over exactly what gets shipped, we can tweak the VM arguments and forget about weird bugs due to Erlang/Elixir version mismatches.
|
||||||
|
* **Faster and less bug-prone mix tasks.** On a from-source install one has to wait untill a new Pleroma node is started for each mix task and they execute outside of the instance context (for example if a user was deleted via a mix task, the instance will have no knowledge of that and continue to display status count and follows before the cache expires). Mix tasks in OTP releases are executed by calling into a running instance via RPC, which solves both of these problems.
|
||||||
|
|
||||||
|
### Sounds great, how do I switch?
|
||||||
|
Currently we support Linux machines with GNU (e.g. Debian, Ubuntu) or musl (e.g. Alpine) libc and `x86_64`, `aarch64` or `armv7l` CPUs. If you are unsure, check the [Detecting flavour](otp_en.html#detecting-flavour) section in OTP install guide. If your platform is supported, proceed with the guide, if not check the [My platform is not supported](#my-platform-is-not-supported) section.
|
||||||
|
### I don't think it is worth the effort, can I stay on a from-source install?
|
||||||
|
Yes, currently there are no plans to deprecate them.
|
||||||
|
|
||||||
|
### My platform is not supported
|
||||||
|
If you think your platform is a popular choice for running Pleroma instances, or has the potential to become one, you can [file an issue on our Gitlab](https://git.pleroma.social/pleroma/pleroma/issues/new). If not, guides on how to build and update releases by yourself will be available soon.
|
||||||
|
## Pre-requisites
|
||||||
|
You will be running commands as root. If you aren't root already, please elevate your priviledges by executing `sudo su`/`su`.
|
||||||
|
|
||||||
|
The system needs to have `curl` and `unzip` installed for downloading and unpacking release builds.
|
||||||
|
|
||||||
|
Debian/Ubuntu:
|
||||||
|
```sh
|
||||||
|
apt install curl unzip
|
||||||
|
```
|
||||||
|
Alpine:
|
||||||
|
```
|
||||||
|
apk add curl unzip
|
||||||
|
|
||||||
|
```
|
||||||
|
## Moving content out of the application directory
|
||||||
|
When using OTP releases the application directory changes with every version so it would be a bother to keep content there (and also dangerous unless `--no-rm` option is used when updating). Fortunately almost all paths in Pleroma are configurable, so it is possible to move them out of there.
|
||||||
|
|
||||||
|
Pleroma should be stopped before proceeding.
|
||||||
|
|
||||||
|
### Moving uploads/custom public files directory
|
||||||
|
```sh
|
||||||
|
# Create uploads directory and set proper permissions (skip if using a remote uploader)
|
||||||
|
# Note: It does not have to be `/var/lib/pleroma/uploads`, you can configure it to be something else later
|
||||||
|
mkdir -p /var/lib/pleroma/uploads
|
||||||
|
chown -R pleroma /var/lib/pleroma
|
||||||
|
|
||||||
|
# Create custom public files directory
|
||||||
|
# Note: It does not have to be `/var/lib/pleroma/static`, you can configure it to be something else later
|
||||||
|
mkdir -p /var/lib/pleroma/static
|
||||||
|
chown -R pleroma /var/lib/pleroma
|
||||||
|
|
||||||
|
# If you use the local uploader with default settings your uploads should be located in `~pleroma/uploads`
|
||||||
|
mv ~pleroma/uploads/* /var/lib/pleroma/uploads
|
||||||
|
|
||||||
|
# If you have created the custom public files directory with default settings it should be located in `~pleroma/instance/static`
|
||||||
|
mv ~pleroma/instance/static /var/lib/pleroma/static
|
||||||
|
```
|
||||||
|
|
||||||
|
### Moving emoji
|
||||||
|
Assuming you have all emojis in subdirectories of `priv/static/emoji` moving them can be done with
|
||||||
|
```sh
|
||||||
|
mkdir /var/lib/pleroma/static/emoji
|
||||||
|
ls -d ~pleroma/priv/static/emoji/*/ | xargs -i sh -c 'mv "{}" "/var/lib/pleroma/static/emoji/$(basename {})"'
|
||||||
|
```
|
||||||
|
|
||||||
|
But, if for some reason you have custom emojis in the root directory you should copy the whole directory instead.
|
||||||
|
```sh
|
||||||
|
mv ~pleroma/priv/static/emoji /var/lib/pleroma/static/emoji
|
||||||
|
```
|
||||||
|
and then copy custom emojis to `/var/lib/pleroma/static/emoji/custom`.
|
||||||
|
|
||||||
|
This is needed because storing custom emojis in the root directory is deprecated, but if you just move them to `/var/lib/pleroma/static/emoji/custom` it will break emoji urls on old posts.
|
||||||
|
|
||||||
|
Note that globs have been replaced with `pack_extensions`, so if your emojis are not in png/gif you should [modify the default value](config.html#emoji).
|
||||||
|
|
||||||
|
### Moving the config
|
||||||
|
```sh
|
||||||
|
# Create the config directory
|
||||||
|
# The default path for Pleroma config is /etc/pleroma/config.exs
|
||||||
|
# but it can be set via PLEROMA_CONFIG_PATH environment variable
|
||||||
|
mkdir -p /etc/pleroma
|
||||||
|
|
||||||
|
# Move the config file
|
||||||
|
mv ~pleroma/config/prod.secret.exs /etc/pleroma/config.exs
|
||||||
|
|
||||||
|
# Change `use Mix.Config` at the top to `import Config`
|
||||||
|
$EDITOR /etc/pleroma/config.exs
|
||||||
|
```
|
||||||
|
## Installing the release
|
||||||
|
Before proceeding, get the flavour from [Detecting flavour](otp_en.html#detecting-flavour) section in OTP installation guide.
|
||||||
|
```sh
|
||||||
|
# Delete all files in pleroma user's directory
|
||||||
|
rm -r ~pleroma/*
|
||||||
|
|
||||||
|
# Set the flavour environment variable to the string you got in Detecting flavour section.
|
||||||
|
# For example if the flavour is `arm64-musl` the command will be
|
||||||
|
export FLAVOUR="arm64-musl"
|
||||||
|
|
||||||
|
# Clone the release build into a temporary directory and unpack it
|
||||||
|
# Replace `master` with `develop` if you want to run the develop branch
|
||||||
|
su pleroma -s $SHELL -lc "
|
||||||
|
curl 'https://git.pleroma.social/api/v4/projects/2/jobs/artifacts/master/download?job=$FLAVOUR' -o /tmp/pleroma.zip
|
||||||
|
unzip /tmp/pleroma.zip -d /tmp/
|
||||||
|
"
|
||||||
|
|
||||||
|
# Move the release to the home directory and delete temporary files
|
||||||
|
su pleroma -s $SHELL -lc "
|
||||||
|
mv /tmp/release/* ~pleroma/
|
||||||
|
rmdir /tmp/release
|
||||||
|
rm /tmp/pleroma.zip
|
||||||
|
"
|
||||||
|
|
||||||
|
# Start the instance to verify that everything is working as expected
|
||||||
|
su pleroma -s $SHELL -lc "./bin/pleroma daemon"
|
||||||
|
|
||||||
|
# Wait for about 20 seconds and query the instance endpoint, if it shows your uri, name and email correctly, you are configured correctly
|
||||||
|
sleep 20 && curl http://localhost:4000/api/v1/instance
|
||||||
|
|
||||||
|
# Stop the instance
|
||||||
|
su pleroma -s $SHELL -lc "./bin/pleroma stop"
|
||||||
|
```
|
||||||
|
|
||||||
|
## Setting up a system service
|
||||||
|
OTP releases have different service files than from-source installs so they need to be copied over again.
|
||||||
|
|
||||||
|
**Warning:** The service files assume pleroma user's home directory is `/opt/pleroma`, please make sure all paths fit your installation.
|
||||||
|
|
||||||
|
Debian/Ubuntu:
|
||||||
|
```sh
|
||||||
|
# Copy the service into a proper directory
|
||||||
|
cp ~pleroma/installation/pleroma.service /etc/systemd/system/pleroma.service
|
||||||
|
|
||||||
|
# Reload service files
|
||||||
|
systemctl daemon-reload
|
||||||
|
|
||||||
|
# Reenable pleroma to start on boot
|
||||||
|
systemctl reenable pleroma
|
||||||
|
|
||||||
|
# Start pleroma
|
||||||
|
systemctl start pleroma
|
||||||
|
```
|
||||||
|
|
||||||
|
Alpine:
|
||||||
|
```sh
|
||||||
|
# Copy the service into a proper directory
|
||||||
|
cp -f ~pleroma/installation/init.d/pleroma /etc/init.d/pleroma
|
||||||
|
|
||||||
|
# Start pleroma
|
||||||
|
rc-service pleroma start
|
||||||
|
```
|
||||||
|
## Running mix tasks
|
||||||
|
Refer to [Running mix tasks](otp_en.html#running-mix-tasks) section from OTP release installation guide.
|
||||||
|
## Updating
|
||||||
|
Refer to [Updating](otp_en.html#updating) section from OTP release installation guide.
|
|
@ -58,7 +58,7 @@ Clone the repository:
|
||||||
|
|
||||||
```
|
```
|
||||||
$ cd /home/pleroma
|
$ cd /home/pleroma
|
||||||
$ git clone https://git.pleroma.social/pleroma/pleroma.git
|
$ git clone -b master https://git.pleroma.social/pleroma/pleroma.git
|
||||||
```
|
```
|
||||||
|
|
||||||
Configure Pleroma. Note that you need a domain name at this point:
|
Configure Pleroma. Note that you need a domain name at this point:
|
||||||
|
|
|
@ -29,7 +29,7 @@ This creates a "pleroma" login class and sets higher values than default for dat
|
||||||
Create the \_pleroma user, assign it the pleroma login class and create its home directory (/home/\_pleroma/): `useradd -m -L pleroma _pleroma`
|
Create the \_pleroma user, assign it the pleroma login class and create its home directory (/home/\_pleroma/): `useradd -m -L pleroma _pleroma`
|
||||||
|
|
||||||
#### Clone pleroma's directory
|
#### Clone pleroma's directory
|
||||||
Enter a shell as the \_pleroma user. As root, run `su _pleroma -;cd`. Then clone the repository with `git clone https://git.pleroma.social/pleroma/pleroma.git`. Pleroma is now installed in /home/\_pleroma/pleroma/, it will be configured and started at the end of this guide.
|
Enter a shell as the \_pleroma user. As root, run `su _pleroma -;cd`. Then clone the repository with `git clone -b master https://git.pleroma.social/pleroma/pleroma.git`. Pleroma is now installed in /home/\_pleroma/pleroma/, it will be configured and started at the end of this guide.
|
||||||
|
|
||||||
#### Postgresql
|
#### Postgresql
|
||||||
Start a shell as the \_postgresql user (as root run `su _postgresql -` then run the `initdb` command to initialize postgresql:
|
Start a shell as the \_postgresql user (as root run `su _postgresql -` then run the `initdb` command to initialize postgresql:
|
||||||
|
|
|
@ -44,7 +44,7 @@ Vaihda pleroma-käyttäjään ja mene kotihakemistoosi:
|
||||||
|
|
||||||
Lataa pleroman lähdekoodi:
|
Lataa pleroman lähdekoodi:
|
||||||
|
|
||||||
`$ git clone https://git.pleroma.social/pleroma/pleroma.git`
|
`$ git clone -b master https://git.pleroma.social/pleroma/pleroma.git`
|
||||||
|
|
||||||
`$ cd pleroma`
|
`$ cd pleroma`
|
||||||
|
|
||||||
|
|
261
docs/installation/otp_en.md
Normal file
261
docs/installation/otp_en.md
Normal file
|
@ -0,0 +1,261 @@
|
||||||
|
# Installing on Linux using OTP releases
|
||||||
|
|
||||||
|
## Pre-requisites
|
||||||
|
* A machine running Linux with GNU (e.g. Debian, Ubuntu) or musl (e.g. Alpine) libc and `x86_64`, `aarch64` or `armv7l` CPU, you have root access to. If you are not sure if it's compatible see [Detecting flavour section](#detecting-flavour) below
|
||||||
|
* A (sub)domain pointed to the machine
|
||||||
|
|
||||||
|
You will be running commands as root. If you aren't root already, please elevate your priviledges by executing `sudo su`/`su`.
|
||||||
|
|
||||||
|
While in theory OTP releases are possbile to install on any compatible machine, for the sake of simplicity this guide focuses only on Debian/Ubuntu/Alpine.
|
||||||
|
|
||||||
|
### Detecting flavour
|
||||||
|
|
||||||
|
Paste the following into the shell:
|
||||||
|
```sh
|
||||||
|
arch="$(uname -m)";if [ "$arch" = "x86_64" ];then arch="amd64";elif [ "$arch" = "armv7l" ];then arch="arm";elif [ "$arch" = "aarch64" ];then arch="arm64";else echo "Unsupported arch: $arch">&2;fi;if getconf GNU_LIBC_VERSION>/dev/null;then libc_postfix="";elif [ "$(ldd 2>&1|head -c 9)" = "musl libc" ];then libc_postfix="-musl";elif [ "$(find /lib/libc.musl*|wc -l)" ];then libc_postfix="-musl";else echo "Unsupported libc">&2;fi;echo "$arch$libc_postfix"
|
||||||
|
```
|
||||||
|
|
||||||
|
If your platform is supported the output will contain the flavour string, you will need it later. If not, this just means that we don't build releases for your platform, you can still try installing from source.
|
||||||
|
|
||||||
|
### Installing the required packages
|
||||||
|
|
||||||
|
Other than things bundled in the OTP release Pleroma depends on:
|
||||||
|
* curl (to download the release build)
|
||||||
|
* unzip (needed to unpack release builds)
|
||||||
|
* ncurses (ERTS won't run without it)
|
||||||
|
* PostgreSQL (also utilizes extensions in postgresql-contrib)
|
||||||
|
* nginx (could be swapped with another reverse proxy but this guide covers only it)
|
||||||
|
* certbot (for Let's Encrypt certificates, could be swapped with another ACME client, but this guide covers only it)
|
||||||
|
|
||||||
|
Debian/Ubuntu:
|
||||||
|
```sh
|
||||||
|
apt install curl unzip libncurses5 postgresql postgresql-contrib nginx certbot
|
||||||
|
```
|
||||||
|
Alpine:
|
||||||
|
|
||||||
|
```sh
|
||||||
|
echo "http://nl.alpinelinux.org/alpine/latest-stable/community" >> /etc/apk/repositories
|
||||||
|
apk update
|
||||||
|
apk add curl unzip ncurses postgresql postgresql-contrib nginx certbot
|
||||||
|
```
|
||||||
|
|
||||||
|
## Setup
|
||||||
|
### Configuring PostgreSQL
|
||||||
|
#### (Optional) Installing RUM indexes
|
||||||
|
RUM indexes are an alternative indexing scheme that is not included in PostgreSQL by default. You can read more about them on the [Configuration page](config.html#rum-indexing-for-full-text-search). They are completely optional and most of the time are not worth it, especially if you are running a single user instance (unless you absolutely need ordered search results).
|
||||||
|
|
||||||
|
Debian/Ubuntu (available only on Buster/19.04):
|
||||||
|
```sh
|
||||||
|
apt install postgresql-11-rum
|
||||||
|
```
|
||||||
|
Alpine:
|
||||||
|
```sh
|
||||||
|
apk add git build-base postgresql-dev
|
||||||
|
git clone https://github.com/postgrespro/rum /tmp/rum
|
||||||
|
cd /tmp/rum
|
||||||
|
make USE_PGXS=1
|
||||||
|
make USE_PGXS=1 install
|
||||||
|
cd
|
||||||
|
rm -r /tmp/rum
|
||||||
|
```
|
||||||
|
#### (Optional) Performance configuration
|
||||||
|
For optimal performance, you may use [PGTune](https://pgtune.leopard.in.ua), don't forget to restart postgresql after editing the configuration
|
||||||
|
|
||||||
|
Debian/Ubuntu:
|
||||||
|
```sh
|
||||||
|
systemctl restart postgresql
|
||||||
|
```
|
||||||
|
Alpine:
|
||||||
|
```sh
|
||||||
|
rc-service postgresql restart
|
||||||
|
```
|
||||||
|
### Installing Pleroma
|
||||||
|
```sh
|
||||||
|
# Create the Pleroma user
|
||||||
|
adduser --system --shell /bin/false --home /opt/pleroma pleroma
|
||||||
|
|
||||||
|
# Set the flavour environment variable to the string you got in Detecting flavour section.
|
||||||
|
# For example if the flavour is `arm64-musl` the command will be
|
||||||
|
export FLAVOUR="arm64-musl"
|
||||||
|
|
||||||
|
# Clone the release build into a temporary directory and unpack it
|
||||||
|
su pleroma -s $SHELL -lc "
|
||||||
|
curl 'https://git.pleroma.social/api/v4/projects/2/jobs/artifacts/master/download?job=$FLAVOUR' -o /tmp/pleroma.zip
|
||||||
|
unzip /tmp/pleroma.zip -d /tmp/
|
||||||
|
"
|
||||||
|
|
||||||
|
# Move the release to the home directory and delete temporary files
|
||||||
|
su pleroma -s $SHELL -lc "
|
||||||
|
mv /tmp/release/* /opt/pleroma
|
||||||
|
rmdir /tmp/release
|
||||||
|
rm /tmp/pleroma.zip
|
||||||
|
"
|
||||||
|
# Create uploads directory and set proper permissions (skip if planning to use a remote uploader)
|
||||||
|
# Note: It does not have to be `/var/lib/pleroma/uploads`, the config generator will ask about the upload directory later
|
||||||
|
|
||||||
|
mkdir -p /var/lib/pleroma/uploads
|
||||||
|
chown -R pleroma /var/lib/pleroma
|
||||||
|
|
||||||
|
# Create custom public files directory (custom emojis, frontend bundle overrides, robots.txt, etc.)
|
||||||
|
# Note: It does not have to be `/var/lib/pleroma/static`, the config generator will ask about the custom public files directory later
|
||||||
|
mkdir -p /var/lib/pleroma/static
|
||||||
|
chown -R pleroma /var/lib/pleroma
|
||||||
|
|
||||||
|
# Create a config directory
|
||||||
|
mkdir -p /etc/pleroma
|
||||||
|
chown -R pleroma /etc/pleroma
|
||||||
|
|
||||||
|
# Run the config generator
|
||||||
|
su pleroma -s $SHELL -lc "./bin/pleroma_ctl instance gen --output /etc/pleroma/config.exs --output-psql /tmp/setup_db.psql"
|
||||||
|
|
||||||
|
# Create the postgres database
|
||||||
|
su postgres -s $SHELL -lc "psql -f /tmp/setup_db.psql"
|
||||||
|
|
||||||
|
# Create the database schema
|
||||||
|
su pleroma -s $SHELL -lc "./bin/pleroma_ctl migrate"
|
||||||
|
|
||||||
|
# If you have installed RUM indexes uncommend and run
|
||||||
|
# su pleroma -s $SHELL -lc "./bin/pleroma_ctl migrate --migrations-path priv/repo/optional_migrations/rum_indexing/"
|
||||||
|
|
||||||
|
# Start the instance to verify that everything is working as expected
|
||||||
|
su pleroma -s $SHELL -lc "./bin/pleroma daemon"
|
||||||
|
|
||||||
|
# Wait for about 20 seconds and query the instance endpoint, if it shows your uri, name and email correctly, you are configured correctly
|
||||||
|
sleep 20 && curl http://localhost:4000/api/v1/instance
|
||||||
|
|
||||||
|
# Stop the instance
|
||||||
|
su pleroma -s $SHELL -lc "./bin/pleroma stop"
|
||||||
|
```
|
||||||
|
|
||||||
|
### Setting up nginx and getting Let's Encrypt SSL certificaties
|
||||||
|
|
||||||
|
```sh
|
||||||
|
# Get a Let's Encrypt certificate
|
||||||
|
certbot certonly --standalone --preferred-challenges http -d yourinstance.tld
|
||||||
|
|
||||||
|
# Copy the Pleroma nginx configuration to the nginx folder
|
||||||
|
# The location of nginx configs is dependent on the distro
|
||||||
|
|
||||||
|
# For Debian/Ubuntu:
|
||||||
|
cp /opt/pleroma/installation/pleroma.nginx /etc/nginx/sites-available/pleroma.nginx
|
||||||
|
ln -s /etc/nginx/sites-available/pleroma.nginx /etc/nginx/sites-enabled/pleroma.nginx
|
||||||
|
# For Alpine:
|
||||||
|
cp /opt/pleroma/installation/pleroma.nginx /etc/nginx/conf.d/pleroma.conf
|
||||||
|
# If your distro does not have either of those you can append
|
||||||
|
# `include /etc/nginx/pleroma.conf` to the end of the http section in /etc/nginx/nginx.conf and
|
||||||
|
cp /opt/pleroma/installation/pleroma.nginx /etc/nginx/pleroma.conf
|
||||||
|
|
||||||
|
# Edit the nginx config replacing example.tld with your (sub)domain
|
||||||
|
$EDITOR path-to-nginx-config
|
||||||
|
|
||||||
|
# Verify that the config is valid
|
||||||
|
nginx -t
|
||||||
|
|
||||||
|
# Start nginx
|
||||||
|
# For Debian/Ubuntu:
|
||||||
|
systemctl start nginx
|
||||||
|
# For Alpine:
|
||||||
|
rc-service nginx start
|
||||||
|
```
|
||||||
|
|
||||||
|
At this point if you open your (sub)domain in a browser you should see a 502 error, that's because pleroma is not started yet.
|
||||||
|
|
||||||
|
### Setting up a system service
|
||||||
|
Debian/Ubuntu:
|
||||||
|
```sh
|
||||||
|
# Copy the service into a proper directory
|
||||||
|
cp /opt/pleroma/installation/pleroma.service /etc/systemd/system/pleroma.service
|
||||||
|
|
||||||
|
# Start pleroma and enable it on boot
|
||||||
|
systemctl start pleroma
|
||||||
|
systemctl enable pleroma
|
||||||
|
```
|
||||||
|
Alpine:
|
||||||
|
```sh
|
||||||
|
# Copy the service into a proper directory
|
||||||
|
cp /opt/pleroma/installation/init.d/pleroma /etc/init.d/pleroma
|
||||||
|
|
||||||
|
# Start pleroma and enable it on boot
|
||||||
|
rc-service pleroma start
|
||||||
|
rc-update add pleroma
|
||||||
|
```
|
||||||
|
|
||||||
|
If everything worked, you should see Pleroma-FE when visiting your domain. If that didn't happen, try reviewing the installation steps, starting Pleroma in the foreground and seeing if there are any errrors.
|
||||||
|
|
||||||
|
Still doesn't work? Feel free to contact us on [#pleroma on freenode](https://webchat.freenode.net/?channels=%23pleroma) or via matrix at <https://matrix.heldscal.la/#/room/#freenode_#pleroma:matrix.org>, you can also [file an issue on our Gitlab](https://git.pleroma.social/pleroma/pleroma/issues/new)
|
||||||
|
|
||||||
|
## Post installation
|
||||||
|
|
||||||
|
### Setting up auto-renew Let's Encrypt certificate
|
||||||
|
```sh
|
||||||
|
# Create the directory for webroot challenges
|
||||||
|
mkdir -p /var/lib/letsencrypt
|
||||||
|
|
||||||
|
# Uncomment the webroot method
|
||||||
|
$EDITOR path-to-nginx-config
|
||||||
|
|
||||||
|
# Verify that the config is valid
|
||||||
|
nginx -t
|
||||||
|
```
|
||||||
|
Debian/Ubuntu:
|
||||||
|
```sh
|
||||||
|
# Restart nginx
|
||||||
|
systemctl restart nginx
|
||||||
|
|
||||||
|
# Ensure the webroot menthod and post hook is working
|
||||||
|
certbot renew --cert-name yourinstance.tld --webroot -w /var/lib/letsencrypt/ --dry-run --post-hook 'systemctl nginx reload'
|
||||||
|
|
||||||
|
# Add it to the daily cron
|
||||||
|
echo '#!/bin/sh
|
||||||
|
certbot renew --cert-name yourinstance.tld --webroot -w /var/lib/letsencrypt/ --post-hook "systemctl reload nginx"
|
||||||
|
' > /etc/cron.daily/renew-pleroma-cert
|
||||||
|
chmod +x /etc/cron.daily/renew-pleroma-cert
|
||||||
|
|
||||||
|
# If everything worked the output should contain /etc/cron.daily/renew-pleroma-cert
|
||||||
|
run-parts --test /etc/cron.daily
|
||||||
|
```
|
||||||
|
Alpine:
|
||||||
|
```sh
|
||||||
|
# Restart nginx
|
||||||
|
rc-service nginx restart
|
||||||
|
|
||||||
|
# Start the cron daemon and make it start on boot
|
||||||
|
rc-service crond start
|
||||||
|
rc-update add crond
|
||||||
|
|
||||||
|
# Ensure the webroot menthod and post hook is working
|
||||||
|
certbot renew --cert-name yourinstance.tld --webroot -w /var/lib/letsencrypt/ --dry-run --post-hook 'rc-service nginx reload'
|
||||||
|
|
||||||
|
# Add it to the daily cron
|
||||||
|
echo '#!/bin/sh
|
||||||
|
certbot renew --cert-name yourinstance.tld --webroot -w /var/lib/letsencrypt/ --post-hook "rc-service nginx reload"
|
||||||
|
' > /etc/periodic/daily/renew-pleroma-cert
|
||||||
|
chmod +x /etc/periodic/daily/renew-pleroma-cert
|
||||||
|
|
||||||
|
# If everything worked this should output /etc/periodic/daily/renew-pleroma-cert
|
||||||
|
run-parts --test /etc/periodic/daily
|
||||||
|
```
|
||||||
|
### Running mix tasks
|
||||||
|
Throughout the wiki and guides there is a lot of references to mix tasks. Since `mix` is a build tool, you can't just call `mix pleroma.task`, instead you should call `pleroma_ctl` stripping pleroma/ecto namespace.
|
||||||
|
|
||||||
|
So for example, if the task is `mix pleroma.user set admin --admin`, you should run it like this:
|
||||||
|
```sh
|
||||||
|
su pleroma -s $SHELL -lc "./bin/pleroma_ctl user set admin --admin"
|
||||||
|
```
|
||||||
|
### Updating
|
||||||
|
Generally, doing the following is enough:
|
||||||
|
```sh
|
||||||
|
# Download the new release
|
||||||
|
su pleroma -s $SHELL -lc "./bin/pleroma_ctl update"
|
||||||
|
|
||||||
|
# Migrate the database, you are advised to stop the instance before doing that
|
||||||
|
su pleroma -s $SHELL -lc "./bin/pleroma_ctl migrate"
|
||||||
|
```
|
||||||
|
But you should **always check the release notes/changelog** in case there are config deprecations, special update steps, etc.
|
||||||
|
|
||||||
|
## Further reading
|
||||||
|
* [Configuration](config.html)
|
||||||
|
* [Pleroma's base config.exs](https://git.pleroma.social/pleroma/pleroma/blob/master/config/config.exs)
|
||||||
|
* [Hardening your instance](hardening.html)
|
||||||
|
* [Pleroma Clients](clients.html)
|
||||||
|
* [Emoji pack manager](Mix.Tasks.Pleroma.Emoji.html)
|
2
elixir_buildpack.config
Normal file
2
elixir_buildpack.config
Normal file
|
@ -0,0 +1,2 @@
|
||||||
|
elixir_version=1.8.2
|
||||||
|
erlang_version=21.3.7
|
932
installation/pleroma-mongooseim.cfg
Executable file
932
installation/pleroma-mongooseim.cfg
Executable file
|
@ -0,0 +1,932 @@
|
||||||
|
%%%
|
||||||
|
%%% ejabberd configuration file
|
||||||
|
%%%
|
||||||
|
%%%'
|
||||||
|
|
||||||
|
%%% The parameters used in this configuration file are explained in more detail
|
||||||
|
%%% in the ejabberd Installation and Operation Guide.
|
||||||
|
%%% Please consult the Guide in case of doubts, it is included with
|
||||||
|
%%% your copy of ejabberd, and is also available online at
|
||||||
|
%%% http://www.process-one.net/en/ejabberd/docs/
|
||||||
|
|
||||||
|
%%% This configuration file contains Erlang terms.
|
||||||
|
%%% In case you want to understand the syntax, here are the concepts:
|
||||||
|
%%%
|
||||||
|
%%% - The character to comment a line is %
|
||||||
|
%%%
|
||||||
|
%%% - Each term ends in a dot, for example:
|
||||||
|
%%% override_global.
|
||||||
|
%%%
|
||||||
|
%%% - A tuple has a fixed definition, its elements are
|
||||||
|
%%% enclosed in {}, and separated with commas:
|
||||||
|
%%% {loglevel, 4}.
|
||||||
|
%%%
|
||||||
|
%%% - A list can have as many elements as you want,
|
||||||
|
%%% and is enclosed in [], for example:
|
||||||
|
%%% [http_poll, web_admin, tls]
|
||||||
|
%%%
|
||||||
|
%%% Pay attention that list elements are delimited with commas,
|
||||||
|
%%% but no comma is allowed after the last list element. This will
|
||||||
|
%%% give a syntax error unlike in more lenient languages (e.g. Python).
|
||||||
|
%%%
|
||||||
|
%%% - A keyword of ejabberd is a word in lowercase.
|
||||||
|
%%% Strings are enclosed in "" and can contain spaces, dots, ...
|
||||||
|
%%% {language, "en"}.
|
||||||
|
%%% {ldap_rootdn, "dc=example,dc=com"}.
|
||||||
|
%%%
|
||||||
|
%%% - This term includes a tuple, a keyword, a list, and two strings:
|
||||||
|
%%% {hosts, ["jabber.example.net", "im.example.com"]}.
|
||||||
|
%%%
|
||||||
|
%%% - This config is preprocessed during release generation by a tool which
|
||||||
|
%%% interprets double curly braces as substitution markers, so avoid this
|
||||||
|
%%% syntax in this file (though it's valid Erlang).
|
||||||
|
%%%
|
||||||
|
%%% So this is OK (though arguably looks quite ugly):
|
||||||
|
%%% { {s2s_addr, "example-host.net"}, {127,0,0,1} }.
|
||||||
|
%%%
|
||||||
|
%%% And I can't give an example of what's not OK exactly because
|
||||||
|
%%% of this rule.
|
||||||
|
%%%
|
||||||
|
|
||||||
|
|
||||||
|
%%%. =======================
|
||||||
|
%%%' OVERRIDE STORED OPTIONS
|
||||||
|
|
||||||
|
%%
|
||||||
|
%% Override the old values stored in the database.
|
||||||
|
%%
|
||||||
|
|
||||||
|
%%
|
||||||
|
%% Override global options (shared by all ejabberd nodes in a cluster).
|
||||||
|
%%
|
||||||
|
%%override_global.
|
||||||
|
|
||||||
|
%%
|
||||||
|
%% Override local options (specific for this particular ejabberd node).
|
||||||
|
%%
|
||||||
|
%%override_local.
|
||||||
|
|
||||||
|
%%
|
||||||
|
%% Remove the Access Control Lists before new ones are added.
|
||||||
|
%%
|
||||||
|
%%override_acls.
|
||||||
|
|
||||||
|
|
||||||
|
%%%. =========
|
||||||
|
%%%' DEBUGGING
|
||||||
|
|
||||||
|
%%
|
||||||
|
%% loglevel: Verbosity of log files generated by ejabberd.
|
||||||
|
%% 0: No ejabberd log at all (not recommended)
|
||||||
|
%% 1: Critical
|
||||||
|
%% 2: Error
|
||||||
|
%% 3: Warning
|
||||||
|
%% 4: Info
|
||||||
|
%% 5: Debug
|
||||||
|
%%
|
||||||
|
{loglevel, 3}.
|
||||||
|
|
||||||
|
%%%. ================
|
||||||
|
%%%' SERVED HOSTNAMES
|
||||||
|
|
||||||
|
%%
|
||||||
|
%% hosts: Domains served by ejabberd.
|
||||||
|
%% You can define one or several, for example:
|
||||||
|
%% {hosts, ["example.net", "example.com", "example.org"]}.
|
||||||
|
%%
|
||||||
|
{hosts, ["pleroma.soykaf.com"] }.
|
||||||
|
|
||||||
|
%%
|
||||||
|
%% route_subdomains: Delegate subdomains to other XMPP servers.
|
||||||
|
%% For example, if this ejabberd serves example.org and you want
|
||||||
|
%% to allow communication with an XMPP server called im.example.org.
|
||||||
|
%%
|
||||||
|
%%{route_subdomains, s2s}.
|
||||||
|
|
||||||
|
|
||||||
|
%%%. ===============
|
||||||
|
%%%' LISTENING PORTS
|
||||||
|
|
||||||
|
%%
|
||||||
|
%% listen: The ports ejabberd will listen on, which service each is handled
|
||||||
|
%% by and what options to start it with.
|
||||||
|
%%
|
||||||
|
{listen,
|
||||||
|
[
|
||||||
|
%% BOSH and WS endpoints over HTTP
|
||||||
|
{ 5280, ejabberd_cowboy, [
|
||||||
|
{num_acceptors, 10},
|
||||||
|
{transport_options, [{max_connections, 1024}]},
|
||||||
|
{modules, [
|
||||||
|
|
||||||
|
{"_", "/http-bind", mod_bosh},
|
||||||
|
{"_", "/ws-xmpp", mod_websockets, [{ejabberd_service, [
|
||||||
|
{access, all},
|
||||||
|
{shaper_rule, fast},
|
||||||
|
{ip, {127, 0, 0, 1}},
|
||||||
|
{password, "secret"}]}
|
||||||
|
%% Uncomment to enable connection dropping or/and server-side pings
|
||||||
|
%{timeout, 600000}, {ping_rate, 2000}
|
||||||
|
]}
|
||||||
|
%% Uncomment to serve static files
|
||||||
|
%{"_", "/static/[...]", cowboy_static,
|
||||||
|
% {dir, "/var/www", [{mimetypes, cow_mimetypes, all}]}
|
||||||
|
%},
|
||||||
|
|
||||||
|
%% Example usage of mod_revproxy
|
||||||
|
|
||||||
|
%% {"_", "/[...]", mod_revproxy, [{timeout, 5000},
|
||||||
|
%% % time limit for upstream to respond
|
||||||
|
%% {body_length, 8000000},
|
||||||
|
%% % maximum body size (may be infinity)
|
||||||
|
%% {custom_headers, [{<<"header">>,<<"value">>}]}
|
||||||
|
%% % list of extra headers that are send to upstream
|
||||||
|
%% ]}
|
||||||
|
|
||||||
|
%% Example usage of mod_cowboy
|
||||||
|
|
||||||
|
%% {"_", "/[...]", mod_cowboy, [{http, mod_revproxy,
|
||||||
|
%% [{timeout, 5000},
|
||||||
|
%% % time limit for upstream to respond
|
||||||
|
%% {body_length, 8000000},
|
||||||
|
%% % maximum body size (may be infinity)
|
||||||
|
%% {custom_headers, [{<<"header">>,<<"value">>}]}
|
||||||
|
%% % list of extra headers that are send to upstream
|
||||||
|
%% ]},
|
||||||
|
%% {ws, xmpp, mod_websockets}
|
||||||
|
%% ]}
|
||||||
|
]}
|
||||||
|
]},
|
||||||
|
|
||||||
|
%% BOSH and WS endpoints over HTTPS
|
||||||
|
{ 5285, ejabberd_cowboy, [
|
||||||
|
{num_acceptors, 10},
|
||||||
|
{transport_options, [{max_connections, 1024}]},
|
||||||
|
{ssl, [{certfile, "priv/ssl/fullchain.pem"}, {keyfile, "priv/ssl/privkey.pem"}, {password, ""}]},
|
||||||
|
{modules, [
|
||||||
|
{"_", "/http-bind", mod_bosh},
|
||||||
|
{"_", "/ws-xmpp", mod_websockets, [
|
||||||
|
%% Uncomment to enable connection dropping or/and server-side pings
|
||||||
|
%{timeout, 600000}, {ping_rate, 60000}
|
||||||
|
]}
|
||||||
|
%% Uncomment to serve static files
|
||||||
|
%{"_", "/static/[...]", cowboy_static,
|
||||||
|
% {dir, "/var/www", [{mimetypes, cow_mimetypes, all}]}
|
||||||
|
%},
|
||||||
|
]}
|
||||||
|
]},
|
||||||
|
|
||||||
|
%% MongooseIM HTTP API it's important to start it on localhost
|
||||||
|
%% or some private interface only (not accessible from the outside)
|
||||||
|
%% At least start it on different port which will be hidden behind firewall
|
||||||
|
|
||||||
|
{ {8088, "127.0.0.1"} , ejabberd_cowboy, [
|
||||||
|
{num_acceptors, 10},
|
||||||
|
{transport_options, [{max_connections, 1024}]},
|
||||||
|
{modules, [
|
||||||
|
{"localhost", "/api", mongoose_api_admin, []}
|
||||||
|
]}
|
||||||
|
]},
|
||||||
|
|
||||||
|
{ 8089 , ejabberd_cowboy, [
|
||||||
|
{num_acceptors, 10},
|
||||||
|
{transport_options, [{max_connections, 1024}]},
|
||||||
|
{protocol_options, [{compress, true}]},
|
||||||
|
{ssl, [{certfile, "priv/ssl/fullchain.pem"}, {keyfile, "priv/ssl/privkey.pem"}, {password, ""}]},
|
||||||
|
{modules, [
|
||||||
|
{"_", "/api/sse", lasse_handler, [mongoose_client_api_sse]},
|
||||||
|
{"_", "/api/messages/[:with]", mongoose_client_api_messages, []},
|
||||||
|
{"_", "/api/contacts/[:jid]", mongoose_client_api_contacts, []},
|
||||||
|
{"_", "/api/rooms/[:id]", mongoose_client_api_rooms, []},
|
||||||
|
{"_", "/api/rooms/[:id]/config", mongoose_client_api_rooms_config, []},
|
||||||
|
{"_", "/api/rooms/:id/users/[:user]", mongoose_client_api_rooms_users, []},
|
||||||
|
{"_", "/api/rooms/[:id]/messages", mongoose_client_api_rooms_messages, []}
|
||||||
|
]}
|
||||||
|
]},
|
||||||
|
|
||||||
|
%% Following HTTP API is deprected, the new one abouve should be used instead
|
||||||
|
|
||||||
|
{ {5288, "127.0.0.1"} , ejabberd_cowboy, [
|
||||||
|
{num_acceptors, 10},
|
||||||
|
{transport_options, [{max_connections, 1024}]},
|
||||||
|
{modules, [
|
||||||
|
{"localhost", "/api", mongoose_api, [{handlers, [mongoose_api_metrics,
|
||||||
|
mongoose_api_users]}]}
|
||||||
|
]}
|
||||||
|
]},
|
||||||
|
|
||||||
|
{ 5222, ejabberd_c2s, [
|
||||||
|
|
||||||
|
%%
|
||||||
|
%% If TLS is compiled in and you installed a SSL
|
||||||
|
%% certificate, specify the full path to the
|
||||||
|
%% file and uncomment this line:
|
||||||
|
%%
|
||||||
|
{certfile, "priv/ssl/both.pem"}, starttls,
|
||||||
|
|
||||||
|
%%{zlib, 10000},
|
||||||
|
%% https://www.openssl.org/docs/apps/ciphers.html#CIPHER_STRINGS
|
||||||
|
%% {ciphers, "DEFAULT:!EXPORT:!LOW:!SSLv2"},
|
||||||
|
{access, c2s},
|
||||||
|
{shaper, c2s_shaper},
|
||||||
|
{max_stanza_size, 65536},
|
||||||
|
{protocol_options, ["no_sslv3"]}
|
||||||
|
|
||||||
|
]},
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
%%
|
||||||
|
%% To enable the old SSL connection method on port 5223:
|
||||||
|
%%
|
||||||
|
%%{5223, ejabberd_c2s, [
|
||||||
|
%% {access, c2s},
|
||||||
|
%% {shaper, c2s_shaper},
|
||||||
|
%% {certfile, "/path/to/ssl.pem"}, tls,
|
||||||
|
%% {max_stanza_size, 65536}
|
||||||
|
%% ]},
|
||||||
|
|
||||||
|
{ 5269, ejabberd_s2s_in, [
|
||||||
|
{shaper, s2s_shaper},
|
||||||
|
{max_stanza_size, 131072},
|
||||||
|
{protocol_options, ["no_sslv3"]}
|
||||||
|
|
||||||
|
]}
|
||||||
|
|
||||||
|
%%
|
||||||
|
%% ejabberd_service: Interact with external components (transports, ...)
|
||||||
|
%%
|
||||||
|
,{8888, ejabberd_service, [
|
||||||
|
{access, all},
|
||||||
|
{shaper_rule, fast},
|
||||||
|
{ip, {127, 0, 0, 1}},
|
||||||
|
{password, "secret"}
|
||||||
|
]}
|
||||||
|
|
||||||
|
%%
|
||||||
|
%% ejabberd_stun: Handles STUN Binding requests
|
||||||
|
%%
|
||||||
|
%%{ {3478, udp}, ejabberd_stun, []}
|
||||||
|
|
||||||
|
]}.
|
||||||
|
|
||||||
|
%%
|
||||||
|
%% s2s_use_starttls: Enable STARTTLS + Dialback for S2S connections.
|
||||||
|
%% Allowed values are: false optional required required_trusted
|
||||||
|
%% You must specify a certificate file.
|
||||||
|
%%
|
||||||
|
{s2s_use_starttls, optional}.
|
||||||
|
%%
|
||||||
|
%% s2s_certfile: Specify a certificate file.
|
||||||
|
%%
|
||||||
|
{s2s_certfile, "priv/ssl/both.pem"}.
|
||||||
|
|
||||||
|
%% https://www.openssl.org/docs/apps/ciphers.html#CIPHER_STRINGS
|
||||||
|
%% {s2s_ciphers, "DEFAULT:!EXPORT:!LOW:!SSLv2"}.
|
||||||
|
|
||||||
|
%%
|
||||||
|
%% domain_certfile: Specify a different certificate for each served hostname.
|
||||||
|
%%
|
||||||
|
%%{domain_certfile, "example.org", "/path/to/example_org.pem"}.
|
||||||
|
%%{domain_certfile, "example.com", "/path/to/example_com.pem"}.
|
||||||
|
|
||||||
|
%%
|
||||||
|
%% S2S whitelist or blacklist
|
||||||
|
%%
|
||||||
|
%% Default s2s policy for undefined hosts.
|
||||||
|
%%
|
||||||
|
{s2s_default_policy, deny }.
|
||||||
|
|
||||||
|
%%
|
||||||
|
%% Allow or deny communication with specific servers.
|
||||||
|
%%
|
||||||
|
%%{ {s2s_host, "goodhost.org"}, allow}.
|
||||||
|
%%{ {s2s_host, "badhost.org"}, deny}.
|
||||||
|
|
||||||
|
{outgoing_s2s_port, 5269 }.
|
||||||
|
|
||||||
|
%%
|
||||||
|
%% IP addresses predefined for specific hosts to skip DNS lookups.
|
||||||
|
%% Ports defined here take precedence over outgoing_s2s_port.
|
||||||
|
%% Examples:
|
||||||
|
%%
|
||||||
|
%% { {s2s_addr, "example-host.net"}, {127,0,0,1} }.
|
||||||
|
%% { {s2s_addr, "example-host.net"}, { {127,0,0,1}, 5269 } }.
|
||||||
|
%% { {s2s_addr, "example-host.net"}, { {127,0,0,1}, 5269 } }.
|
||||||
|
|
||||||
|
%%
|
||||||
|
%% Outgoing S2S options
|
||||||
|
%%
|
||||||
|
%% Preferred address families (which to try first) and connect timeout
|
||||||
|
%% in milliseconds.
|
||||||
|
%%
|
||||||
|
%%{outgoing_s2s_options, [ipv4, ipv6], 10000}.
|
||||||
|
%%
|
||||||
|
%%%. ==============
|
||||||
|
%%%' SESSION BACKEND
|
||||||
|
|
||||||
|
%%{sm_backend, {mnesia, []}}.
|
||||||
|
|
||||||
|
%% Requires {redis, global, default, ..., ...} outgoing pool
|
||||||
|
%%{sm_backend, {redis, []}}.
|
||||||
|
|
||||||
|
{sm_backend, {mnesia, []} }.
|
||||||
|
|
||||||
|
|
||||||
|
%%%. ==============
|
||||||
|
%%%' AUTHENTICATION
|
||||||
|
|
||||||
|
%% Advertised SASL mechanisms
|
||||||
|
{sasl_mechanisms, [cyrsasl_plain]}.
|
||||||
|
|
||||||
|
%%
|
||||||
|
%% auth_method: Method used to authenticate the users.
|
||||||
|
%% The default method is the internal.
|
||||||
|
%% If you want to use a different method,
|
||||||
|
%% comment this line and enable the correct ones.
|
||||||
|
%%
|
||||||
|
%% {auth_method, internal }.
|
||||||
|
{auth_method, http }.
|
||||||
|
{auth_opts, [
|
||||||
|
{http, global, auth, [{workers, 50}], [{server, "https://pleroma.soykaf.com"}]},
|
||||||
|
{password_format, plain} % default
|
||||||
|
%% {password_format, scram}
|
||||||
|
|
||||||
|
%% {scram_iterations, 4096} % default
|
||||||
|
|
||||||
|
%%
|
||||||
|
%% For auth_http:
|
||||||
|
%% {basic_auth, "user:password"}
|
||||||
|
%% {path_prefix, "/"} % default
|
||||||
|
%% auth_http requires {http, Host | global, auth, ..., ...} outgoing pool.
|
||||||
|
%%
|
||||||
|
%% For auth_external
|
||||||
|
%%{extauth_program, "/path/to/authentication/script"}.
|
||||||
|
%%
|
||||||
|
%% For auth_jwt
|
||||||
|
%% {jwt_secret_source, "/path/to/file"},
|
||||||
|
%% {jwt_algorithm, "RS256"},
|
||||||
|
%% {jwt_username_key, user}
|
||||||
|
%% For cyrsasl_external
|
||||||
|
%% {authenticate_with_cn, false}
|
||||||
|
{cyrsasl_external, standard}
|
||||||
|
]}.
|
||||||
|
|
||||||
|
%%
|
||||||
|
%% Authentication using external script
|
||||||
|
%% Make sure the script is executable by ejabberd.
|
||||||
|
%%
|
||||||
|
%%{auth_method, external}.
|
||||||
|
|
||||||
|
%%
|
||||||
|
%% Authentication using RDBMS
|
||||||
|
%% Remember to setup a database in the next section.
|
||||||
|
%%
|
||||||
|
%%{auth_method, rdbms}.
|
||||||
|
|
||||||
|
%%
|
||||||
|
%% Authentication using LDAP
|
||||||
|
%%
|
||||||
|
%%{auth_method, ldap}.
|
||||||
|
%%
|
||||||
|
|
||||||
|
%% List of LDAP servers:
|
||||||
|
%%{ldap_servers, ["localhost"]}.
|
||||||
|
%%
|
||||||
|
%% Encryption of connection to LDAP servers:
|
||||||
|
%%{ldap_encrypt, none}.
|
||||||
|
%%{ldap_encrypt, tls}.
|
||||||
|
%%
|
||||||
|
%% Port to connect to on LDAP servers:
|
||||||
|
%%{ldap_port, 389}.
|
||||||
|
%%{ldap_port, 636}.
|
||||||
|
%%
|
||||||
|
%% LDAP manager:
|
||||||
|
%%{ldap_rootdn, "dc=example,dc=com"}.
|
||||||
|
%%
|
||||||
|
%% Password of LDAP manager:
|
||||||
|
%%{ldap_password, "******"}.
|
||||||
|
%%
|
||||||
|
%% Search base of LDAP directory:
|
||||||
|
%%{ldap_base, "dc=example,dc=com"}.
|
||||||
|
%%
|
||||||
|
%% LDAP attribute that holds user ID:
|
||||||
|
%%{ldap_uids, [{"mail", "%u@mail.example.org"}]}.
|
||||||
|
%%
|
||||||
|
%% LDAP filter:
|
||||||
|
%%{ldap_filter, "(objectClass=shadowAccount)"}.
|
||||||
|
|
||||||
|
%%
|
||||||
|
%% Anonymous login support:
|
||||||
|
%% auth_method: anonymous
|
||||||
|
%% anonymous_protocol: sasl_anon | login_anon | both
|
||||||
|
%% allow_multiple_connections: true | false
|
||||||
|
%%
|
||||||
|
%%{host_config, "public.example.org", [{auth_method, anonymous},
|
||||||
|
%% {allow_multiple_connections, false},
|
||||||
|
%% {anonymous_protocol, sasl_anon}]}.
|
||||||
|
%%
|
||||||
|
%% To use both anonymous and internal authentication:
|
||||||
|
%%
|
||||||
|
%%{host_config, "public.example.org", [{auth_method, [internal, anonymous]}]}.
|
||||||
|
|
||||||
|
|
||||||
|
%%%. ==============
|
||||||
|
%%%' OUTGOING CONNECTIONS (e.g. DB)
|
||||||
|
|
||||||
|
%% Here you may configure all outgoing connections used by MongooseIM,
|
||||||
|
%% e.g. to RDBMS (such as MySQL), Riak or external HTTP components.
|
||||||
|
%% Default MongooseIM configuration uses only Mnesia (non-Mnesia extensions are disabled),
|
||||||
|
%% so no options here are uncommented out of the box.
|
||||||
|
%% This section includes configuration examples; for comprehensive guide
|
||||||
|
%% please consult MongooseIM documentation, page "Outgoing connections":
|
||||||
|
%% - doc/advanced-configuration/outgoing-connections.md
|
||||||
|
%% - https://mongooseim.readthedocs.io/en/latest/advanced-configuration/outgoing-connections/
|
||||||
|
|
||||||
|
|
||||||
|
{outgoing_pools, [
|
||||||
|
% {riak, global, default, [{workers, 5}], [{address, "127.0.0.1"}, {port, 8087}]},
|
||||||
|
% {elastic, global, default, [], [{host, "elastic.host.com"}, {port, 9042}]},
|
||||||
|
{http, global, auth, [{workers, 50}], [{server, "https://pleroma.soykaf.com"}]}
|
||||||
|
% {cassandra, global, default, [{workers, 100}], [{servers, [{"server1", 9042}]}, {keyspace, "big_mongooseim"}]},
|
||||||
|
% {rdbms, global, default, [{workers, 10}], [{server, {mysql, "server", 3306, "database", "username", "password"}}]}
|
||||||
|
]}.
|
||||||
|
|
||||||
|
%% More examples that may be added to outgoing_pools list:
|
||||||
|
%%
|
||||||
|
%% == MySQL ==
|
||||||
|
%% {rdbms, global, default, [{workers, 10}],
|
||||||
|
%% [{server, {mysql, "server", 3306, "database", "username", "password"}},
|
||||||
|
%% {keepalive_interval, 10}]},
|
||||||
|
%% keepalive_interval is optional
|
||||||
|
|
||||||
|
%% == PostgreSQL ==
|
||||||
|
%% {rdbms, global, default, [{workers, 10}],
|
||||||
|
%% [{server, {pgsql, "server", 5432, "database", "username", "password"}}]},
|
||||||
|
|
||||||
|
%% == ODBC (MSSQL) ==
|
||||||
|
%% {rdbms, global, default, [{workers, 10}],
|
||||||
|
%% [{server, "DSN=mongooseim;UID=mongooseim;PWD=mongooseim"}]},
|
||||||
|
|
||||||
|
%% == Elastic Search ==
|
||||||
|
%% {elastic, global, default, [], [{host, "elastic.host.com"}, {port, 9042}]},
|
||||||
|
|
||||||
|
%% == Riak ==
|
||||||
|
%% {riak, global, default, [{workers, 20}], [{address, "127.0.0.1"}, {port, 8087}]},
|
||||||
|
|
||||||
|
%% == HTTP ==
|
||||||
|
%% {http, global, conn1, [{workers, 50}], [{server, "http://server:8080"}]},
|
||||||
|
|
||||||
|
%% == Cassandra ==
|
||||||
|
%% {cassandra, global, default, [{workers, 100}],
|
||||||
|
%% [
|
||||||
|
%% {servers, [
|
||||||
|
%% {"cassandra_server1.example.com", 9042},
|
||||||
|
%% {"cassandra_server2.example.com", 9042},
|
||||||
|
%% {"cassandra_server3.example.com", 9042},
|
||||||
|
%% {"cassandra_server4.example.com", 9042}
|
||||||
|
%% ]},
|
||||||
|
%% {keyspace, "big_mongooseim"}
|
||||||
|
%% ]}
|
||||||
|
|
||||||
|
%% == Extra options ==
|
||||||
|
%%
|
||||||
|
%% If you use PostgreSQL, have a large database, and need a
|
||||||
|
%% faster but inexact replacement for "select count(*) from users"
|
||||||
|
%%
|
||||||
|
%%{pgsql_users_number_estimate, true}.
|
||||||
|
%%
|
||||||
|
%% rdbms_server_type specifies what database is used over the RDBMS layer
|
||||||
|
%% Can take values mssql, pgsql, mysql
|
||||||
|
%% In some cases (for example for MAM with pgsql) it is required to set proper value.
|
||||||
|
%%
|
||||||
|
%% {rdbms_server_type, pgsql}.
|
||||||
|
|
||||||
|
%%%. ===============
|
||||||
|
%%%' TRAFFIC SHAPERS
|
||||||
|
|
||||||
|
%%
|
||||||
|
%% The "normal" shaper limits traffic speed to 1000 B/s
|
||||||
|
%%
|
||||||
|
{shaper, normal, {maxrate, 1000}}.
|
||||||
|
|
||||||
|
%%
|
||||||
|
%% The "fast" shaper limits traffic speed to 50000 B/s
|
||||||
|
%%
|
||||||
|
{shaper, fast, {maxrate, 50000}}.
|
||||||
|
|
||||||
|
%%
|
||||||
|
%% This option specifies the maximum number of elements in the queue
|
||||||
|
%% of the FSM. Refer to the documentation for details.
|
||||||
|
%%
|
||||||
|
{max_fsm_queue, 1000}.
|
||||||
|
|
||||||
|
%%%. ====================
|
||||||
|
%%%' ACCESS CONTROL LISTS
|
||||||
|
|
||||||
|
%%
|
||||||
|
%% The 'admin' ACL grants administrative privileges to XMPP accounts.
|
||||||
|
%% You can put here as many accounts as you want.
|
||||||
|
%%
|
||||||
|
%{acl, admin, {user, "alice", "localhost"}}.
|
||||||
|
%{acl, admin, {user, "a", "localhost"}}.
|
||||||
|
|
||||||
|
%%
|
||||||
|
%% Blocked users
|
||||||
|
%%
|
||||||
|
%%{acl, blocked, {user, "baduser", "example.org"}}.
|
||||||
|
%%{acl, blocked, {user, "test"}}.
|
||||||
|
|
||||||
|
%%
|
||||||
|
%% Local users: don't modify this line.
|
||||||
|
%%
|
||||||
|
{acl, local, {user_regexp, ""}}.
|
||||||
|
|
||||||
|
%%
|
||||||
|
%% More examples of ACLs
|
||||||
|
%%
|
||||||
|
%%{acl, jabberorg, {server, "jabber.org"}}.
|
||||||
|
%%{acl, aleksey, {user, "aleksey", "jabber.ru"}}.
|
||||||
|
%%{acl, test, {user_regexp, "^test"}}.
|
||||||
|
%%{acl, test, {user_glob, "test*"}}.
|
||||||
|
|
||||||
|
%%
|
||||||
|
%% Define specific ACLs in a virtual host.
|
||||||
|
%%
|
||||||
|
%%{host_config, "localhost",
|
||||||
|
%% [
|
||||||
|
%% {acl, admin, {user, "bob-local", "localhost"}}
|
||||||
|
%% ]
|
||||||
|
%%}.
|
||||||
|
|
||||||
|
%%%. ============
|
||||||
|
%%%' ACCESS RULES
|
||||||
|
|
||||||
|
%% Maximum number of simultaneous sessions allowed for a single user:
|
||||||
|
{access, max_user_sessions, [{10, all}]}.
|
||||||
|
|
||||||
|
%% Maximum number of offline messages that users can have:
|
||||||
|
{access, max_user_offline_messages, [{5000, admin}, {100, all}]}.
|
||||||
|
|
||||||
|
%% This rule allows access only for local users:
|
||||||
|
{access, local, [{allow, local}]}.
|
||||||
|
|
||||||
|
%% Only non-blocked users can use c2s connections:
|
||||||
|
{access, c2s, [{deny, blocked},
|
||||||
|
{allow, all}]}.
|
||||||
|
|
||||||
|
%% For C2S connections, all users except admins use the "normal" shaper
|
||||||
|
{access, c2s_shaper, [{none, admin},
|
||||||
|
{normal, all}]}.
|
||||||
|
|
||||||
|
%% All S2S connections use the "fast" shaper
|
||||||
|
{access, s2s_shaper, [{fast, all}]}.
|
||||||
|
|
||||||
|
%% Admins of this server are also admins of the MUC service:
|
||||||
|
{access, muc_admin, [{allow, admin}]}.
|
||||||
|
|
||||||
|
%% Only accounts of the local ejabberd server can create rooms:
|
||||||
|
{access, muc_create, [{allow, local}]}.
|
||||||
|
|
||||||
|
%% All users are allowed to use the MUC service:
|
||||||
|
{access, muc, [{allow, all}]}.
|
||||||
|
|
||||||
|
%% In-band registration allows registration of any possible username.
|
||||||
|
%% To disable in-band registration, replace 'allow' with 'deny'.
|
||||||
|
{access, register, [{allow, all}]}.
|
||||||
|
|
||||||
|
%% By default the frequency of account registrations from the same IP
|
||||||
|
%% is limited to 1 account every 10 minutes. To disable, specify: infinity
|
||||||
|
{registration_timeout, infinity}.
|
||||||
|
|
||||||
|
%% Default settings for MAM.
|
||||||
|
%% To set non-standard value, replace 'default' with 'allow' or 'deny'.
|
||||||
|
%% Only user can access his/her archive by default.
|
||||||
|
%% An online user can read room's archive by default.
|
||||||
|
%% Only an owner can change settings and purge messages by default.
|
||||||
|
%% Empty list (i.e. `[]`) means `[{deny, all}]`.
|
||||||
|
{access, mam_set_prefs, [{default, all}]}.
|
||||||
|
{access, mam_get_prefs, [{default, all}]}.
|
||||||
|
{access, mam_lookup_messages, [{default, all}]}.
|
||||||
|
{access, mam_purge_single_message, [{default, all}]}.
|
||||||
|
{access, mam_purge_multiple_messages, [{default, all}]}.
|
||||||
|
|
||||||
|
%% 1 command of the specified type per second.
|
||||||
|
{shaper, mam_shaper, {maxrate, 1}}.
|
||||||
|
%% This shaper is primeraly for Mnesia overload protection during stress testing.
|
||||||
|
%% The limit is 1000 operations of each type per second.
|
||||||
|
{shaper, mam_global_shaper, {maxrate, 1000}}.
|
||||||
|
|
||||||
|
{access, mam_set_prefs_shaper, [{mam_shaper, all}]}.
|
||||||
|
{access, mam_get_prefs_shaper, [{mam_shaper, all}]}.
|
||||||
|
{access, mam_lookup_messages_shaper, [{mam_shaper, all}]}.
|
||||||
|
{access, mam_purge_single_message_shaper, [{mam_shaper, all}]}.
|
||||||
|
{access, mam_purge_multiple_messages_shaper, [{mam_shaper, all}]}.
|
||||||
|
|
||||||
|
{access, mam_set_prefs_global_shaper, [{mam_global_shaper, all}]}.
|
||||||
|
{access, mam_get_prefs_global_shaper, [{mam_global_shaper, all}]}.
|
||||||
|
{access, mam_lookup_messages_global_shaper, [{mam_global_shaper, all}]}.
|
||||||
|
{access, mam_purge_single_message_global_shaper, [{mam_global_shaper, all}]}.
|
||||||
|
{access, mam_purge_multiple_messages_global_shaper, [{mam_global_shaper, all}]}.
|
||||||
|
|
||||||
|
%%
|
||||||
|
%% Define specific Access Rules in a virtual host.
|
||||||
|
%%
|
||||||
|
%%{host_config, "localhost",
|
||||||
|
%% [
|
||||||
|
%% {access, c2s, [{allow, admin}, {deny, all}]},
|
||||||
|
%% {access, register, [{deny, all}]}
|
||||||
|
%% ]
|
||||||
|
%%}.
|
||||||
|
|
||||||
|
%%%. ================
|
||||||
|
%%%' DEFAULT LANGUAGE
|
||||||
|
|
||||||
|
%%
|
||||||
|
%% language: Default language used for server messages.
|
||||||
|
%%
|
||||||
|
{language, "en"}.
|
||||||
|
|
||||||
|
%%
|
||||||
|
%% Set a different default language in a virtual host.
|
||||||
|
%%
|
||||||
|
%%{host_config, "localhost",
|
||||||
|
%% [{language, "ru"}]
|
||||||
|
%%}.
|
||||||
|
|
||||||
|
%%%. ================
|
||||||
|
%%%' MISCELLANEOUS
|
||||||
|
|
||||||
|
{all_metrics_are_global, false }.
|
||||||
|
|
||||||
|
%%%. ========
|
||||||
|
%%%' SERVICES
|
||||||
|
|
||||||
|
%% Unlike modules, services are started per node and provide either features which are not
|
||||||
|
%% related to any particular host, or backend stuff which is used by modules.
|
||||||
|
%% This is handled by `mongoose_service` module.
|
||||||
|
|
||||||
|
{services,
|
||||||
|
[
|
||||||
|
{service_admin_extra, [{submods, [node, accounts, sessions, vcard,
|
||||||
|
roster, last, private, stanza, stats]}]}
|
||||||
|
]
|
||||||
|
}.
|
||||||
|
|
||||||
|
%%%. =======
|
||||||
|
%%%' MODULES
|
||||||
|
|
||||||
|
%%
|
||||||
|
%% Modules enabled in all mongooseim virtual hosts.
|
||||||
|
%% For list of possible modules options, check documentation.
|
||||||
|
%%
|
||||||
|
{modules,
|
||||||
|
[
|
||||||
|
|
||||||
|
%% The format for a single route is as follows:
|
||||||
|
%% {Host, Path, Method, Upstream}
|
||||||
|
%%
|
||||||
|
%% "_" can be used as wildcard for Host, Path and Method
|
||||||
|
%% Upstream can be either host (just http(s)://host:port) or uri
|
||||||
|
%% The difference is that host upstreams append whole path while
|
||||||
|
%% uri upstreams append only remainder that follows the matched Path
|
||||||
|
%% (this behaviour is similar to nginx's proxy_pass rules)
|
||||||
|
%%
|
||||||
|
%% Bindings can be used to match certain parts of host or path.
|
||||||
|
%% They will be later overlaid with parts of the upstream uri.
|
||||||
|
%%
|
||||||
|
%% {mod_revproxy,
|
||||||
|
%% [{routes, [{"www.erlang-solutions.com", "/admin", "_",
|
||||||
|
%% "https://www.erlang-solutions.com/"},
|
||||||
|
%% {":var.com", "/:var", "_", "http://localhost:8080/"},
|
||||||
|
%% {":domain.com", "/", "_", "http://localhost:8080/:domain"}]
|
||||||
|
%% }]},
|
||||||
|
|
||||||
|
% {mod_http_upload, [
|
||||||
|
%% Set max file size in bytes. Defaults to 10 MB.
|
||||||
|
%% Disabled if value is `undefined`.
|
||||||
|
% {max_file_size, 1024},
|
||||||
|
%% Use S3 storage backend
|
||||||
|
% {backend, s3},
|
||||||
|
%% Set options for S3 backend
|
||||||
|
% {s3, [
|
||||||
|
% {bucket_url, "http://s3-eu-west-1.amazonaws.com/konbucket2"},
|
||||||
|
% {region, "eu-west-1"},
|
||||||
|
% {access_key_id, "AKIAIAOAONIULXQGMOUA"},
|
||||||
|
% {secret_access_key, "dGhlcmUgYXJlIG5vIGVhc3RlciBlZ2dzIGhlcmVf"}
|
||||||
|
% ]}
|
||||||
|
% ]},
|
||||||
|
|
||||||
|
{mod_adhoc, []},
|
||||||
|
|
||||||
|
{mod_disco, [{users_can_see_hidden_services, false}]},
|
||||||
|
{mod_commands, []},
|
||||||
|
{mod_muc_commands, []},
|
||||||
|
{mod_muc_light_commands, []},
|
||||||
|
{mod_last, []},
|
||||||
|
{mod_stream_management, [
|
||||||
|
% default 100
|
||||||
|
% size of a buffer of unacked messages
|
||||||
|
% {buffer_max, 100}
|
||||||
|
|
||||||
|
% default 1 - server sends the ack request after each stanza
|
||||||
|
% {ack_freq, 1}
|
||||||
|
|
||||||
|
% default: 600 seconds
|
||||||
|
% {resume_timeout, 600}
|
||||||
|
]},
|
||||||
|
%% {mod_muc_light, [{host, "muclight.@HOST@"}]},
|
||||||
|
%% {mod_muc, [{host, "muc.@HOST@"},
|
||||||
|
%% {access, muc},
|
||||||
|
%% {access_create, muc_create}
|
||||||
|
%% ]},
|
||||||
|
%% {mod_muc_log, [
|
||||||
|
%% {outdir, "/tmp/muclogs"},
|
||||||
|
%% {access_log, muc}
|
||||||
|
%% ]},
|
||||||
|
{mod_offline, [{access_max_user_messages, max_user_offline_messages}]},
|
||||||
|
{mod_privacy, []},
|
||||||
|
{mod_blocking, []},
|
||||||
|
{mod_private, []},
|
||||||
|
% {mod_private, [{backend, mnesia}]},
|
||||||
|
% {mod_private, [{backend, rdbms}]},
|
||||||
|
% {mod_register, [
|
||||||
|
% %%
|
||||||
|
% %% Set the minimum informational entropy for passwords.
|
||||||
|
% %%
|
||||||
|
% %%{password_strength, 32},
|
||||||
|
%
|
||||||
|
% %%
|
||||||
|
% %% After successful registration, the user receives
|
||||||
|
% %% a message with this subject and body.
|
||||||
|
% %%
|
||||||
|
% {welcome_message, {""}},
|
||||||
|
%
|
||||||
|
% %%
|
||||||
|
% %% When a user registers, send a notification to
|
||||||
|
% %% these XMPP accounts.
|
||||||
|
% %%
|
||||||
|
%
|
||||||
|
%
|
||||||
|
% %%
|
||||||
|
% %% Only clients in the server machine can register accounts
|
||||||
|
% %%
|
||||||
|
% {ip_access, [{allow, "127.0.0.0/8"},
|
||||||
|
% {deny, "0.0.0.0/0"}]},
|
||||||
|
%
|
||||||
|
% %%
|
||||||
|
% %% Local c2s or remote s2s users cannot register accounts
|
||||||
|
% %%
|
||||||
|
% %%{access_from, deny},
|
||||||
|
%
|
||||||
|
% {access, register}
|
||||||
|
% ]},
|
||||||
|
{mod_roster, []},
|
||||||
|
{mod_sic, []},
|
||||||
|
{mod_vcard, [%{matches, 1},
|
||||||
|
%{search, true},
|
||||||
|
%{ldap_search_operator, 'or'}, %% either 'or' or 'and'
|
||||||
|
%{ldap_binary_search_fields, [<<"PHOTO">>]},
|
||||||
|
%% list of binary search fields (as in vcard after mapping)
|
||||||
|
{host, "vjud.@HOST@"}
|
||||||
|
]},
|
||||||
|
{mod_bosh, []},
|
||||||
|
{mod_carboncopy, []}
|
||||||
|
|
||||||
|
%%
|
||||||
|
%% Message Archive Management (MAM, XEP-0313) for registered users and
|
||||||
|
%% Multi-User chats (MUCs).
|
||||||
|
%%
|
||||||
|
|
||||||
|
% {mod_mam_meta, [
|
||||||
|
%% Use RDBMS backend (default)
|
||||||
|
% {backend, rdbms},
|
||||||
|
|
||||||
|
%% Do not store user preferences (default)
|
||||||
|
% {user_prefs_store, false},
|
||||||
|
%% Store user preferences in RDBMS
|
||||||
|
% {user_prefs_store, rdbms},
|
||||||
|
%% Store user preferences in Mnesia (recommended).
|
||||||
|
%% The preferences store will be called each time, as a message is routed.
|
||||||
|
%% That is why Mnesia is better suited for this job.
|
||||||
|
% {user_prefs_store, mnesia},
|
||||||
|
|
||||||
|
%% Enables a pool of asynchronous writers. (default)
|
||||||
|
%% Messages will be grouped together based on archive id.
|
||||||
|
% {async_writer, true},
|
||||||
|
|
||||||
|
%% Cache information about users (default)
|
||||||
|
% {cache_users, true},
|
||||||
|
|
||||||
|
%% Enable archivization for private messages (default)
|
||||||
|
% {pm, [
|
||||||
|
%% Top-level options can be overriden here if needed, for example:
|
||||||
|
% {async_writer, false}
|
||||||
|
% ]},
|
||||||
|
|
||||||
|
%%
|
||||||
|
%% Message Archive Management (MAM) for multi-user chats (MUC).
|
||||||
|
%% Enable XEP-0313 for "muc.@HOST@".
|
||||||
|
%%
|
||||||
|
% {muc, [
|
||||||
|
% {host, "muc.@HOST@"}
|
||||||
|
%% As with pm, top-level options can be overriden for MUC archive
|
||||||
|
% ]},
|
||||||
|
%
|
||||||
|
%% Do not use a <stanza-id/> element (by default stanzaid is used)
|
||||||
|
% no_stanzaid_element,
|
||||||
|
% ]},
|
||||||
|
|
||||||
|
|
||||||
|
%%
|
||||||
|
%% MAM configuration examples
|
||||||
|
%%
|
||||||
|
|
||||||
|
%% Only MUC, no user-defined preferences, good performance.
|
||||||
|
% {mod_mam_meta, [
|
||||||
|
% {backend, rdbms},
|
||||||
|
% {pm, false},
|
||||||
|
% {muc, [
|
||||||
|
% {host, "muc.@HOST@"}
|
||||||
|
% ]}
|
||||||
|
% ]},
|
||||||
|
|
||||||
|
%% Only archives for c2c messages, good performance.
|
||||||
|
% {mod_mam_meta, [
|
||||||
|
% {backend, rdbms},
|
||||||
|
% {pm, [
|
||||||
|
% {user_prefs_store, mnesia}
|
||||||
|
% ]}
|
||||||
|
% ]},
|
||||||
|
|
||||||
|
%% Basic configuration for c2c messages, bad performance, easy to debug.
|
||||||
|
% {mod_mam_meta, [
|
||||||
|
% {backend, rdbms},
|
||||||
|
% {async_writer, false},
|
||||||
|
% {cache_users, false}
|
||||||
|
% ]},
|
||||||
|
|
||||||
|
%% Cassandra archive for c2c and MUC conversations.
|
||||||
|
%% No custom settings supported (always archive).
|
||||||
|
% {mod_mam_meta, [
|
||||||
|
% {backend, cassandra},
|
||||||
|
% {user_prefs_store, cassandra},
|
||||||
|
% {muc, [{host, "muc.@HOST@"}]}
|
||||||
|
% ]}
|
||||||
|
|
||||||
|
% {mod_event_pusher, [
|
||||||
|
% {backends, [
|
||||||
|
% %%
|
||||||
|
% %% Configuration for Amazon SNS notifications.
|
||||||
|
% %%
|
||||||
|
% {sns, [
|
||||||
|
% %% AWS credentials, region and host configuration
|
||||||
|
% {access_key_id, "AKIAJAZYHOIPY6A2PESA"},
|
||||||
|
% {secret_access_key, "c3RvcCBsb29raW5nIGZvciBlYXN0ZXIgZWdncyxr"},
|
||||||
|
% {region, "eu-west-1"},
|
||||||
|
% {account_id, "251423380551"},
|
||||||
|
% {region, "eu-west-1"},
|
||||||
|
% {sns_host, "sns.eu-west-1.amazonaws.com"},
|
||||||
|
%
|
||||||
|
% %% Messages from this MUC host will be sent to the SNS topic
|
||||||
|
% {muc_host, "muc.@HOST@"},
|
||||||
|
%
|
||||||
|
% %% Plugin module for defining custom message attributes and user identification
|
||||||
|
% {plugin_module, mod_event_pusher_sns_defaults},
|
||||||
|
%
|
||||||
|
% %% Topic name configurations. Removing a topic will disable this specific SNS notification
|
||||||
|
% {presence_updates_topic, "user_presence_updated-dev-1"}, %% For presence updates
|
||||||
|
% {pm_messages_topic, "user_message_sent-dev-1"}, %% For private chat messages
|
||||||
|
% {muc_messages_topic, "user_messagegroup_sent-dev-1"} %% For group chat messages
|
||||||
|
%
|
||||||
|
% %% Pool options
|
||||||
|
% {pool_size, 100}, %% Worker pool size for publishing notifications
|
||||||
|
% {publish_retry_count, 2}, %% Retry count in case of publish error
|
||||||
|
% {publish_retry_time_ms, 50} %% Base exponential backoff time (in ms) for publish errors
|
||||||
|
% ]}
|
||||||
|
% ]}
|
||||||
|
|
||||||
|
]}.
|
||||||
|
|
||||||
|
|
||||||
|
%%
|
||||||
|
%% Enable modules with custom options in a specific virtual host
|
||||||
|
%%
|
||||||
|
%%{host_config, "localhost",
|
||||||
|
%% [{ {add, modules},
|
||||||
|
%% [
|
||||||
|
%% {mod_some_module, []}
|
||||||
|
%% ]
|
||||||
|
%% }
|
||||||
|
%% ]}.
|
||||||
|
|
||||||
|
%%%.
|
||||||
|
%%%'
|
||||||
|
|
||||||
|
%%% $Id$
|
||||||
|
|
||||||
|
%%% Local Variables:
|
||||||
|
%%% mode: erlang
|
||||||
|
%%% End:
|
||||||
|
%%% vim: set filetype=erlang tabstop=8 foldmarker=%%%',%%%. foldmethod=marker:
|
||||||
|
%%%.
|
|
@ -14,17 +14,19 @@ server {
|
||||||
|
|
||||||
listen 80;
|
listen 80;
|
||||||
listen [::]:80;
|
listen [::]:80;
|
||||||
return 301 https://$server_name$request_uri;
|
|
||||||
|
|
||||||
# Uncomment this if you need to use the 'webroot' method with certbot. Make sure
|
# Uncomment this if you need to use the 'webroot' method with certbot. Make sure
|
||||||
# that the directory exists and that it is accessible by the webserver. If you followed
|
# that the directory exists and that it is accessible by the webserver. If you followed
|
||||||
# the guide, you already ran 'sudo mkdir -p /var/lib/letsencrypt' to create the folder.
|
# the guide, you already ran 'mkdir -p /var/lib/letsencrypt' to create the folder.
|
||||||
# You may need to load this file with the ssl server block commented out, run certbot
|
# You may need to load this file with the ssl server block commented out, run certbot
|
||||||
# to get the certificate, and then uncomment it.
|
# to get the certificate, and then uncomment it.
|
||||||
#
|
#
|
||||||
# location ~ /\.well-known/acme-challenge {
|
# location ~ /\.well-known/acme-challenge {
|
||||||
# root /var/lib/letsencrypt/.well-known/acme-challenge;
|
# root /var/lib/letsencrypt/;
|
||||||
# }
|
# }
|
||||||
|
location / {
|
||||||
|
return 301 https://$server_name$request_uri;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
# Enable SSL session caching for improved performance
|
# Enable SSL session caching for improved performance
|
||||||
|
|
|
@ -1,3 +1,7 @@
|
||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
defmodule Pleroma.Healthcheck do
|
defmodule Pleroma.Healthcheck do
|
||||||
@moduledoc """
|
@moduledoc """
|
||||||
Module collects metrics about app and assign healthy status.
|
Module collects metrics about app and assign healthy status.
|
||||||
|
@ -29,13 +33,13 @@ def system_info do
|
||||||
end
|
end
|
||||||
|
|
||||||
defp assign_db_info(healthcheck) do
|
defp assign_db_info(healthcheck) do
|
||||||
database = Application.get_env(:pleroma, Repo)[:database]
|
database = Pleroma.Config.get([Repo, :database])
|
||||||
|
|
||||||
query =
|
query =
|
||||||
"select state, count(pid) from pg_stat_activity where datname = '#{database}' group by state;"
|
"select state, count(pid) from pg_stat_activity where datname = '#{database}' group by state;"
|
||||||
|
|
||||||
result = Repo.query!(query)
|
result = Repo.query!(query)
|
||||||
pool_size = Application.get_env(:pleroma, Repo)[:pool_size]
|
pool_size = Pleroma.Config.get([Repo, :pool_size])
|
||||||
|
|
||||||
db_info =
|
db_info =
|
||||||
Enum.reduce(result.rows, %{active: 0, idle: 0}, fn [state, cnt], states ->
|
Enum.reduce(result.rows, %{active: 0, idle: 0}, fn [state, cnt], states ->
|
||||||
|
|
|
@ -1,3 +1,7 @@
|
||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
Postgrex.Types.define(
|
Postgrex.Types.define(
|
||||||
Pleroma.PostgresTypes,
|
Pleroma.PostgresTypes,
|
||||||
[] ++ Ecto.Adapters.Postgres.extensions(),
|
[] ++ Ecto.Adapters.Postgres.extensions(),
|
||||||
|
|
67
lib/mix/pleroma.ex
Normal file
67
lib/mix/pleroma.ex
Normal file
|
@ -0,0 +1,67 @@
|
||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2018 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
defmodule Mix.Pleroma do
|
||||||
|
@doc "Common functions to be reused in mix tasks"
|
||||||
|
def start_pleroma do
|
||||||
|
Application.put_env(:phoenix, :serve_endpoints, false, persistent: true)
|
||||||
|
{:ok, _} = Application.ensure_all_started(:pleroma)
|
||||||
|
end
|
||||||
|
|
||||||
|
def load_pleroma do
|
||||||
|
Application.load(:pleroma)
|
||||||
|
end
|
||||||
|
|
||||||
|
def get_option(options, opt, prompt, defval \\ nil, defname \\ nil) do
|
||||||
|
Keyword.get(options, opt) || shell_prompt(prompt, defval, defname)
|
||||||
|
end
|
||||||
|
|
||||||
|
def shell_prompt(prompt, defval \\ nil, defname \\ nil) do
|
||||||
|
prompt_message = "#{prompt} [#{defname || defval}] "
|
||||||
|
|
||||||
|
input =
|
||||||
|
if mix_shell?(),
|
||||||
|
do: Mix.shell().prompt(prompt_message),
|
||||||
|
else: :io.get_line(prompt_message)
|
||||||
|
|
||||||
|
case input do
|
||||||
|
"\n" ->
|
||||||
|
case defval do
|
||||||
|
nil ->
|
||||||
|
shell_prompt(prompt, defval, defname)
|
||||||
|
|
||||||
|
defval ->
|
||||||
|
defval
|
||||||
|
end
|
||||||
|
|
||||||
|
input ->
|
||||||
|
String.trim(input)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def shell_yes?(message) do
|
||||||
|
if mix_shell?(),
|
||||||
|
do: Mix.shell().yes?("Continue?"),
|
||||||
|
else: shell_prompt(message, "Continue?") in ~w(Yn Y y)
|
||||||
|
end
|
||||||
|
|
||||||
|
def shell_info(message) do
|
||||||
|
if mix_shell?(),
|
||||||
|
do: Mix.shell().info(message),
|
||||||
|
else: IO.puts(message)
|
||||||
|
end
|
||||||
|
|
||||||
|
def shell_error(message) do
|
||||||
|
if mix_shell?(),
|
||||||
|
do: Mix.shell().error(message),
|
||||||
|
else: IO.puts(:stderr, message)
|
||||||
|
end
|
||||||
|
|
||||||
|
@doc "Performs a safe check whether `Mix.shell/0` is available (does not raise if Mix is not loaded)"
|
||||||
|
def mix_shell?, do: :erlang.function_exported(Mix, :shell, 0)
|
||||||
|
|
||||||
|
def escape_sh_path(path) do
|
||||||
|
~S(') <> String.replace(path, ~S('), ~S(\')) <> ~S(')
|
||||||
|
end
|
||||||
|
end
|
|
@ -1,19 +1,23 @@
|
||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
defmodule Mix.Tasks.Pleroma.Benchmark do
|
defmodule Mix.Tasks.Pleroma.Benchmark do
|
||||||
|
import Mix.Pleroma
|
||||||
use Mix.Task
|
use Mix.Task
|
||||||
alias Mix.Tasks.Pleroma.Common
|
|
||||||
|
|
||||||
def run(["search"]) do
|
def run(["search"]) do
|
||||||
Common.start_pleroma()
|
start_pleroma()
|
||||||
|
|
||||||
Benchee.run(%{
|
Benchee.run(%{
|
||||||
"search" => fn ->
|
"search" => fn ->
|
||||||
Pleroma.Web.MastodonAPI.MastodonAPIController.status_search(nil, "cofe")
|
Pleroma.Activity.search(nil, "cofe")
|
||||||
end
|
end
|
||||||
})
|
})
|
||||||
end
|
end
|
||||||
|
|
||||||
def run(["tag"]) do
|
def run(["tag"]) do
|
||||||
Common.start_pleroma()
|
start_pleroma()
|
||||||
|
|
||||||
Benchee.run(%{
|
Benchee.run(%{
|
||||||
"tag" => fn ->
|
"tag" => fn ->
|
|
@ -1,28 +0,0 @@
|
||||||
# Pleroma: A lightweight social networking server
|
|
||||||
# Copyright © 2017-2018 Pleroma Authors <https://pleroma.social/>
|
|
||||||
# SPDX-License-Identifier: AGPL-3.0-only
|
|
||||||
|
|
||||||
defmodule Mix.Tasks.Pleroma.Common do
|
|
||||||
@doc "Common functions to be reused in mix tasks"
|
|
||||||
def start_pleroma do
|
|
||||||
Mix.Task.run("app.start")
|
|
||||||
end
|
|
||||||
|
|
||||||
def get_option(options, opt, prompt, defval \\ nil, defname \\ nil) do
|
|
||||||
Keyword.get(options, opt) ||
|
|
||||||
case Mix.shell().prompt("#{prompt} [#{defname || defval}]") do
|
|
||||||
"\n" ->
|
|
||||||
case defval do
|
|
||||||
nil -> get_option(options, opt, prompt, defval)
|
|
||||||
defval -> defval
|
|
||||||
end
|
|
||||||
|
|
||||||
opt ->
|
|
||||||
opt |> String.trim()
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
def escape_sh_path(path) do
|
|
||||||
~S(') <> String.replace(path, ~S('), ~S(\')) <> ~S(')
|
|
||||||
end
|
|
||||||
end
|
|
83
lib/mix/tasks/pleroma/config.ex
Normal file
83
lib/mix/tasks/pleroma/config.ex
Normal file
|
@ -0,0 +1,83 @@
|
||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
defmodule Mix.Tasks.Pleroma.Config do
|
||||||
|
use Mix.Task
|
||||||
|
import Mix.Pleroma
|
||||||
|
alias Pleroma.Repo
|
||||||
|
alias Pleroma.Web.AdminAPI.Config
|
||||||
|
@shortdoc "Manages the location of the config"
|
||||||
|
@moduledoc """
|
||||||
|
Manages the location of the config.
|
||||||
|
|
||||||
|
## Transfers config from file to DB.
|
||||||
|
|
||||||
|
mix pleroma.config migrate_to_db
|
||||||
|
|
||||||
|
## Transfers config from DB to file.
|
||||||
|
|
||||||
|
mix pleroma.config migrate_from_db ENV
|
||||||
|
"""
|
||||||
|
|
||||||
|
def run(["migrate_to_db"]) do
|
||||||
|
start_pleroma()
|
||||||
|
|
||||||
|
if Pleroma.Config.get([:instance, :dynamic_configuration]) do
|
||||||
|
Application.get_all_env(:pleroma)
|
||||||
|
|> Enum.reject(fn {k, _v} -> k in [Pleroma.Repo, :env] end)
|
||||||
|
|> Enum.each(fn {k, v} ->
|
||||||
|
key = to_string(k) |> String.replace("Elixir.", "")
|
||||||
|
|
||||||
|
key =
|
||||||
|
if String.starts_with?(key, "Pleroma.") do
|
||||||
|
key
|
||||||
|
else
|
||||||
|
":" <> key
|
||||||
|
end
|
||||||
|
|
||||||
|
{:ok, _} = Config.update_or_create(%{group: "pleroma", key: key, value: v})
|
||||||
|
Mix.shell().info("#{key} is migrated.")
|
||||||
|
end)
|
||||||
|
|
||||||
|
Mix.shell().info("Settings migrated.")
|
||||||
|
else
|
||||||
|
Mix.shell().info(
|
||||||
|
"Migration is not allowed by config. You can change this behavior in instance settings."
|
||||||
|
)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def run(["migrate_from_db", env, delete?]) do
|
||||||
|
start_pleroma()
|
||||||
|
|
||||||
|
delete? = if delete? == "true", do: true, else: false
|
||||||
|
|
||||||
|
if Pleroma.Config.get([:instance, :dynamic_configuration]) do
|
||||||
|
config_path = "config/#{env}.exported_from_db.secret.exs"
|
||||||
|
|
||||||
|
{:ok, file} = File.open(config_path, [:write])
|
||||||
|
IO.write(file, "use Mix.Config\r\n")
|
||||||
|
|
||||||
|
Repo.all(Config)
|
||||||
|
|> Enum.each(fn config ->
|
||||||
|
IO.write(
|
||||||
|
file,
|
||||||
|
"config :#{config.group}, #{config.key}, #{inspect(Config.from_binary(config.value))}\r\n\r\n"
|
||||||
|
)
|
||||||
|
|
||||||
|
if delete? do
|
||||||
|
{:ok, _} = Repo.delete(config)
|
||||||
|
Mix.shell().info("#{config.key} deleted from DB.")
|
||||||
|
end
|
||||||
|
end)
|
||||||
|
|
||||||
|
File.close(file)
|
||||||
|
System.cmd("mix", ["format", config_path])
|
||||||
|
else
|
||||||
|
Mix.shell().info(
|
||||||
|
"Migration is not allowed by config. You can change this behavior in instance settings."
|
||||||
|
)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
|
@ -3,12 +3,12 @@
|
||||||
# SPDX-License-Identifier: AGPL-3.0-only
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
defmodule Mix.Tasks.Pleroma.Database do
|
defmodule Mix.Tasks.Pleroma.Database do
|
||||||
alias Mix.Tasks.Pleroma.Common
|
|
||||||
alias Pleroma.Conversation
|
alias Pleroma.Conversation
|
||||||
alias Pleroma.Object
|
alias Pleroma.Object
|
||||||
alias Pleroma.Repo
|
alias Pleroma.Repo
|
||||||
alias Pleroma.User
|
alias Pleroma.User
|
||||||
require Logger
|
require Logger
|
||||||
|
import Mix.Pleroma
|
||||||
use Mix.Task
|
use Mix.Task
|
||||||
|
|
||||||
@shortdoc "A collection of database related tasks"
|
@shortdoc "A collection of database related tasks"
|
||||||
|
@ -45,7 +45,7 @@ def run(["remove_embedded_objects" | args]) do
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
|
|
||||||
Common.start_pleroma()
|
start_pleroma()
|
||||||
Logger.info("Removing embedded objects")
|
Logger.info("Removing embedded objects")
|
||||||
|
|
||||||
Repo.query!(
|
Repo.query!(
|
||||||
|
@ -66,12 +66,12 @@ def run(["remove_embedded_objects" | args]) do
|
||||||
end
|
end
|
||||||
|
|
||||||
def run(["bump_all_conversations"]) do
|
def run(["bump_all_conversations"]) do
|
||||||
Common.start_pleroma()
|
start_pleroma()
|
||||||
Conversation.bump_for_all_activities()
|
Conversation.bump_for_all_activities()
|
||||||
end
|
end
|
||||||
|
|
||||||
def run(["update_users_following_followers_counts"]) do
|
def run(["update_users_following_followers_counts"]) do
|
||||||
Common.start_pleroma()
|
start_pleroma()
|
||||||
|
|
||||||
users = Repo.all(User)
|
users = Repo.all(User)
|
||||||
Enum.each(users, &User.remove_duplicated_following/1)
|
Enum.each(users, &User.remove_duplicated_following/1)
|
||||||
|
@ -89,7 +89,7 @@ def run(["prune_objects" | args]) do
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
|
|
||||||
Common.start_pleroma()
|
start_pleroma()
|
||||||
|
|
||||||
deadline = Pleroma.Config.get([:instance, :remote_post_retention_days])
|
deadline = Pleroma.Config.get([:instance, :remote_post_retention_days])
|
||||||
|
|
||||||
|
|
50
lib/mix/tasks/pleroma/ecto/ecto.ex
Normal file
50
lib/mix/tasks/pleroma/ecto/ecto.ex
Normal file
|
@ -0,0 +1,50 @@
|
||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2018 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-onl
|
||||||
|
|
||||||
|
defmodule Mix.Tasks.Pleroma.Ecto do
|
||||||
|
@doc """
|
||||||
|
Ensures the given repository's migrations path exists on the file system.
|
||||||
|
"""
|
||||||
|
@spec ensure_migrations_path(Ecto.Repo.t(), Keyword.t()) :: String.t()
|
||||||
|
def ensure_migrations_path(repo, opts) do
|
||||||
|
path = opts[:migrations_path] || Path.join(source_repo_priv(repo), "migrations")
|
||||||
|
|
||||||
|
path =
|
||||||
|
case Path.type(path) do
|
||||||
|
:relative ->
|
||||||
|
Path.join(Application.app_dir(:pleroma), path)
|
||||||
|
|
||||||
|
:absolute ->
|
||||||
|
path
|
||||||
|
end
|
||||||
|
|
||||||
|
if not File.dir?(path) do
|
||||||
|
raise_missing_migrations(Path.relative_to_cwd(path), repo)
|
||||||
|
end
|
||||||
|
|
||||||
|
path
|
||||||
|
end
|
||||||
|
|
||||||
|
@doc """
|
||||||
|
Returns the private repository path relative to the source.
|
||||||
|
"""
|
||||||
|
def source_repo_priv(repo) do
|
||||||
|
config = repo.config()
|
||||||
|
priv = config[:priv] || "priv/#{repo |> Module.split() |> List.last() |> Macro.underscore()}"
|
||||||
|
Path.join(Application.app_dir(:pleroma), priv)
|
||||||
|
end
|
||||||
|
|
||||||
|
defp raise_missing_migrations(path, repo) do
|
||||||
|
raise("""
|
||||||
|
Could not find migrations directory #{inspect(path)}
|
||||||
|
for repo #{inspect(repo)}.
|
||||||
|
This may be because you are in a new project and the
|
||||||
|
migration directory has not been created yet. Creating an
|
||||||
|
empty directory at the path above will fix this error.
|
||||||
|
If you expected existing migrations to be found, please
|
||||||
|
make sure your repository has been properly configured
|
||||||
|
and the configured path exists.
|
||||||
|
""")
|
||||||
|
end
|
||||||
|
end
|
63
lib/mix/tasks/pleroma/ecto/migrate.ex
Normal file
63
lib/mix/tasks/pleroma/ecto/migrate.ex
Normal file
|
@ -0,0 +1,63 @@
|
||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2018 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-onl
|
||||||
|
|
||||||
|
defmodule Mix.Tasks.Pleroma.Ecto.Migrate do
|
||||||
|
use Mix.Task
|
||||||
|
import Mix.Pleroma
|
||||||
|
require Logger
|
||||||
|
|
||||||
|
@shortdoc "Wrapper on `ecto.migrate` task."
|
||||||
|
|
||||||
|
@aliases [
|
||||||
|
n: :step,
|
||||||
|
v: :to
|
||||||
|
]
|
||||||
|
|
||||||
|
@switches [
|
||||||
|
all: :boolean,
|
||||||
|
step: :integer,
|
||||||
|
to: :integer,
|
||||||
|
quiet: :boolean,
|
||||||
|
log_sql: :boolean,
|
||||||
|
strict_version_order: :boolean,
|
||||||
|
migrations_path: :string
|
||||||
|
]
|
||||||
|
|
||||||
|
@moduledoc """
|
||||||
|
Changes `Logger` level to `:info` before start migration.
|
||||||
|
Changes level back when migration ends.
|
||||||
|
|
||||||
|
## Start migration
|
||||||
|
|
||||||
|
mix pleroma.ecto.migrate [OPTIONS]
|
||||||
|
|
||||||
|
Options:
|
||||||
|
- see https://hexdocs.pm/ecto/2.0.0/Mix.Tasks.Ecto.Migrate.html
|
||||||
|
"""
|
||||||
|
|
||||||
|
@impl true
|
||||||
|
def run(args \\ []) do
|
||||||
|
load_pleroma()
|
||||||
|
{opts, _} = OptionParser.parse!(args, strict: @switches, aliases: @aliases)
|
||||||
|
|
||||||
|
opts =
|
||||||
|
if opts[:to] || opts[:step] || opts[:all],
|
||||||
|
do: opts,
|
||||||
|
else: Keyword.put(opts, :all, true)
|
||||||
|
|
||||||
|
opts =
|
||||||
|
if opts[:quiet],
|
||||||
|
do: Keyword.merge(opts, log: false, log_sql: false),
|
||||||
|
else: opts
|
||||||
|
|
||||||
|
path = Mix.Tasks.Pleroma.Ecto.ensure_migrations_path(Pleroma.Repo, opts)
|
||||||
|
|
||||||
|
level = Logger.level()
|
||||||
|
Logger.configure(level: :info)
|
||||||
|
|
||||||
|
{:ok, _, _} = Ecto.Migrator.with_repo(Pleroma.Repo, &Ecto.Migrator.run(&1, path, :up, opts))
|
||||||
|
|
||||||
|
Logger.configure(level: level)
|
||||||
|
end
|
||||||
|
end
|
67
lib/mix/tasks/pleroma/ecto/rollback.ex
Normal file
67
lib/mix/tasks/pleroma/ecto/rollback.ex
Normal file
|
@ -0,0 +1,67 @@
|
||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2018 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-onl
|
||||||
|
|
||||||
|
defmodule Mix.Tasks.Pleroma.Ecto.Rollback do
|
||||||
|
use Mix.Task
|
||||||
|
import Mix.Pleroma
|
||||||
|
require Logger
|
||||||
|
@shortdoc "Wrapper on `ecto.rollback` task"
|
||||||
|
|
||||||
|
@aliases [
|
||||||
|
n: :step,
|
||||||
|
v: :to
|
||||||
|
]
|
||||||
|
|
||||||
|
@switches [
|
||||||
|
all: :boolean,
|
||||||
|
step: :integer,
|
||||||
|
to: :integer,
|
||||||
|
start: :boolean,
|
||||||
|
quiet: :boolean,
|
||||||
|
log_sql: :boolean,
|
||||||
|
migrations_path: :string
|
||||||
|
]
|
||||||
|
|
||||||
|
@moduledoc """
|
||||||
|
Changes `Logger` level to `:info` before start rollback.
|
||||||
|
Changes level back when rollback ends.
|
||||||
|
|
||||||
|
## Start rollback
|
||||||
|
|
||||||
|
mix pleroma.ecto.rollback
|
||||||
|
|
||||||
|
Options:
|
||||||
|
- see https://hexdocs.pm/ecto/2.0.0/Mix.Tasks.Ecto.Rollback.html
|
||||||
|
"""
|
||||||
|
|
||||||
|
@impl true
|
||||||
|
def run(args \\ []) do
|
||||||
|
load_pleroma()
|
||||||
|
{opts, _} = OptionParser.parse!(args, strict: @switches, aliases: @aliases)
|
||||||
|
|
||||||
|
opts =
|
||||||
|
if opts[:to] || opts[:step] || opts[:all],
|
||||||
|
do: opts,
|
||||||
|
else: Keyword.put(opts, :step, 1)
|
||||||
|
|
||||||
|
opts =
|
||||||
|
if opts[:quiet],
|
||||||
|
do: Keyword.merge(opts, log: false, log_sql: false),
|
||||||
|
else: opts
|
||||||
|
|
||||||
|
path = Mix.Tasks.Pleroma.Ecto.ensure_migrations_path(Pleroma.Repo, opts)
|
||||||
|
|
||||||
|
level = Logger.level()
|
||||||
|
Logger.configure(level: :info)
|
||||||
|
|
||||||
|
if Pleroma.Config.get(:env) == :test do
|
||||||
|
Logger.info("Rollback succesfully")
|
||||||
|
else
|
||||||
|
{:ok, _, _} =
|
||||||
|
Ecto.Migrator.with_repo(Pleroma.Repo, &Ecto.Migrator.run(&1, path, :down, opts))
|
||||||
|
end
|
||||||
|
|
||||||
|
Logger.configure(level: level)
|
||||||
|
end
|
||||||
|
end
|
|
@ -55,15 +55,13 @@ defmodule Mix.Tasks.Pleroma.Emoji do
|
||||||
are extracted).
|
are extracted).
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@default_manifest Pleroma.Config.get!([:emoji, :default_manifest])
|
|
||||||
|
|
||||||
def run(["ls-packs" | args]) do
|
def run(["ls-packs" | args]) do
|
||||||
Application.ensure_all_started(:hackney)
|
Application.ensure_all_started(:hackney)
|
||||||
|
|
||||||
{options, [], []} = parse_global_opts(args)
|
{options, [], []} = parse_global_opts(args)
|
||||||
|
|
||||||
manifest =
|
manifest =
|
||||||
fetch_manifest(if options[:manifest], do: options[:manifest], else: @default_manifest)
|
fetch_manifest(if options[:manifest], do: options[:manifest], else: default_manifest())
|
||||||
|
|
||||||
Enum.each(manifest, fn {name, info} ->
|
Enum.each(manifest, fn {name, info} ->
|
||||||
to_print = [
|
to_print = [
|
||||||
|
@ -88,7 +86,7 @@ def run(["get-packs" | args]) do
|
||||||
|
|
||||||
{options, pack_names, []} = parse_global_opts(args)
|
{options, pack_names, []} = parse_global_opts(args)
|
||||||
|
|
||||||
manifest_url = if options[:manifest], do: options[:manifest], else: @default_manifest
|
manifest_url = if options[:manifest], do: options[:manifest], else: default_manifest()
|
||||||
|
|
||||||
manifest = fetch_manifest(manifest_url)
|
manifest = fetch_manifest(manifest_url)
|
||||||
|
|
||||||
|
@ -298,4 +296,6 @@ defp client do
|
||||||
|
|
||||||
Tesla.client(middleware)
|
Tesla.client(middleware)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
defp default_manifest, do: Pleroma.Config.get!([:emoji, :default_manifest])
|
||||||
end
|
end
|
||||||
|
|
|
@ -4,7 +4,7 @@
|
||||||
|
|
||||||
defmodule Mix.Tasks.Pleroma.Instance do
|
defmodule Mix.Tasks.Pleroma.Instance do
|
||||||
use Mix.Task
|
use Mix.Task
|
||||||
alias Mix.Tasks.Pleroma.Common
|
import Mix.Pleroma
|
||||||
|
|
||||||
@shortdoc "Manages Pleroma instance"
|
@shortdoc "Manages Pleroma instance"
|
||||||
@moduledoc """
|
@moduledoc """
|
||||||
|
@ -29,7 +29,13 @@ defmodule Mix.Tasks.Pleroma.Instance do
|
||||||
- `--dbname DBNAME` - the name of the database to use
|
- `--dbname DBNAME` - the name of the database to use
|
||||||
- `--dbuser DBUSER` - the user (aka role) to use for the database connection
|
- `--dbuser DBUSER` - the user (aka role) to use for the database connection
|
||||||
- `--dbpass DBPASS` - the password to use for the database connection
|
- `--dbpass DBPASS` - the password to use for the database connection
|
||||||
|
- `--rum Y/N` - Whether to enable RUM indexes
|
||||||
- `--indexable Y/N` - Allow/disallow indexing site by search engines
|
- `--indexable Y/N` - Allow/disallow indexing site by search engines
|
||||||
|
- `--db-configurable Y/N` - Allow/disallow configuring instance from admin part
|
||||||
|
- `--uploads-dir` - the directory uploads go in when using a local uploader
|
||||||
|
- `--static-dir` - the directory custom public files should be read from (custom emojis, frontend bundle overrides, robots.txt, etc.)
|
||||||
|
- `--listen-ip` - the ip the app should listen to, defaults to 127.0.0.1
|
||||||
|
- `--listen-port` - the port the app should listen to, defaults to 4000
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def run(["gen" | rest]) do
|
def run(["gen" | rest]) do
|
||||||
|
@ -48,7 +54,13 @@ def run(["gen" | rest]) do
|
||||||
dbname: :string,
|
dbname: :string,
|
||||||
dbuser: :string,
|
dbuser: :string,
|
||||||
dbpass: :string,
|
dbpass: :string,
|
||||||
indexable: :string
|
rum: :string,
|
||||||
|
indexable: :string,
|
||||||
|
db_configurable: :string,
|
||||||
|
uploads_dir: :string,
|
||||||
|
static_dir: :string,
|
||||||
|
listen_ip: :string,
|
||||||
|
listen_port: :string
|
||||||
],
|
],
|
||||||
aliases: [
|
aliases: [
|
||||||
o: :output,
|
o: :output,
|
||||||
|
@ -68,7 +80,7 @@ def run(["gen" | rest]) do
|
||||||
if proceed? do
|
if proceed? do
|
||||||
[domain, port | _] =
|
[domain, port | _] =
|
||||||
String.split(
|
String.split(
|
||||||
Common.get_option(
|
get_option(
|
||||||
options,
|
options,
|
||||||
:domain,
|
:domain,
|
||||||
"What domain will your instance use? (e.g pleroma.soykaf.com)"
|
"What domain will your instance use? (e.g pleroma.soykaf.com)"
|
||||||
|
@ -77,16 +89,16 @@ def run(["gen" | rest]) do
|
||||||
) ++ [443]
|
) ++ [443]
|
||||||
|
|
||||||
name =
|
name =
|
||||||
Common.get_option(
|
get_option(
|
||||||
options,
|
options,
|
||||||
:instance_name,
|
:instance_name,
|
||||||
"What is the name of your instance? (e.g. Pleroma/Soykaf)"
|
"What is the name of your instance? (e.g. Pleroma/Soykaf)"
|
||||||
)
|
)
|
||||||
|
|
||||||
email = Common.get_option(options, :admin_email, "What is your admin email address?")
|
email = get_option(options, :admin_email, "What is your admin email address?")
|
||||||
|
|
||||||
notify_email =
|
notify_email =
|
||||||
Common.get_option(
|
get_option(
|
||||||
options,
|
options,
|
||||||
:notify_email,
|
:notify_email,
|
||||||
"What email address do you want to use for sending email notifications?",
|
"What email address do you want to use for sending email notifications?",
|
||||||
|
@ -94,21 +106,27 @@ def run(["gen" | rest]) do
|
||||||
)
|
)
|
||||||
|
|
||||||
indexable =
|
indexable =
|
||||||
Common.get_option(
|
get_option(
|
||||||
options,
|
options,
|
||||||
:indexable,
|
:indexable,
|
||||||
"Do you want search engines to index your site? (y/n)",
|
"Do you want search engines to index your site? (y/n)",
|
||||||
"y"
|
"y"
|
||||||
) === "y"
|
) === "y"
|
||||||
|
|
||||||
dbhost =
|
db_configurable? =
|
||||||
Common.get_option(options, :dbhost, "What is the hostname of your database?", "localhost")
|
get_option(
|
||||||
|
options,
|
||||||
|
:db_configurable,
|
||||||
|
"Do you want to store the configuration in the database (allows controlling it from admin-fe)? (y/n)",
|
||||||
|
"n"
|
||||||
|
) === "y"
|
||||||
|
|
||||||
dbname =
|
dbhost = get_option(options, :dbhost, "What is the hostname of your database?", "localhost")
|
||||||
Common.get_option(options, :dbname, "What is the name of your database?", "pleroma_dev")
|
|
||||||
|
dbname = get_option(options, :dbname, "What is the name of your database?", "pleroma")
|
||||||
|
|
||||||
dbuser =
|
dbuser =
|
||||||
Common.get_option(
|
get_option(
|
||||||
options,
|
options,
|
||||||
:dbuser,
|
:dbuser,
|
||||||
"What is the user used to connect to your database?",
|
"What is the user used to connect to your database?",
|
||||||
|
@ -116,7 +134,7 @@ def run(["gen" | rest]) do
|
||||||
)
|
)
|
||||||
|
|
||||||
dbpass =
|
dbpass =
|
||||||
Common.get_option(
|
get_option(
|
||||||
options,
|
options,
|
||||||
:dbpass,
|
:dbpass,
|
||||||
"What is the password used to connect to your database?",
|
"What is the password used to connect to your database?",
|
||||||
|
@ -124,13 +142,54 @@ def run(["gen" | rest]) do
|
||||||
"autogenerated"
|
"autogenerated"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
rum_enabled =
|
||||||
|
get_option(
|
||||||
|
options,
|
||||||
|
:rum,
|
||||||
|
"Would you like to use RUM indices?",
|
||||||
|
"n"
|
||||||
|
) === "y"
|
||||||
|
|
||||||
|
listen_port =
|
||||||
|
get_option(
|
||||||
|
options,
|
||||||
|
:listen_port,
|
||||||
|
"What port will the app listen to (leave it if you are using the default setup with nginx)?",
|
||||||
|
4000
|
||||||
|
)
|
||||||
|
|
||||||
|
listen_ip =
|
||||||
|
get_option(
|
||||||
|
options,
|
||||||
|
:listen_ip,
|
||||||
|
"What ip will the app listen to (leave it if you are using the default setup with nginx)?",
|
||||||
|
"127.0.0.1"
|
||||||
|
)
|
||||||
|
|
||||||
|
uploads_dir =
|
||||||
|
get_option(
|
||||||
|
options,
|
||||||
|
:uploads_dir,
|
||||||
|
"What directory should media uploads go in (when using the local uploader)?",
|
||||||
|
Pleroma.Config.get([Pleroma.Uploaders.Local, :uploads])
|
||||||
|
)
|
||||||
|
|
||||||
|
static_dir =
|
||||||
|
get_option(
|
||||||
|
options,
|
||||||
|
:static_dir,
|
||||||
|
"What directory should custom public files be read from (custom emojis, frontend bundle overrides, robots.txt, etc.)?",
|
||||||
|
Pleroma.Config.get([:instance, :static_dir])
|
||||||
|
)
|
||||||
|
|
||||||
secret = :crypto.strong_rand_bytes(64) |> Base.encode64() |> binary_part(0, 64)
|
secret = :crypto.strong_rand_bytes(64) |> Base.encode64() |> binary_part(0, 64)
|
||||||
signing_salt = :crypto.strong_rand_bytes(8) |> Base.encode64() |> binary_part(0, 8)
|
signing_salt = :crypto.strong_rand_bytes(8) |> Base.encode64() |> binary_part(0, 8)
|
||||||
{web_push_public_key, web_push_private_key} = :crypto.generate_key(:ecdh, :prime256v1)
|
{web_push_public_key, web_push_private_key} = :crypto.generate_key(:ecdh, :prime256v1)
|
||||||
|
template_dir = Application.app_dir(:pleroma, "priv") <> "/templates"
|
||||||
|
|
||||||
result_config =
|
result_config =
|
||||||
EEx.eval_file(
|
EEx.eval_file(
|
||||||
"sample_config.eex" |> Path.expand(__DIR__),
|
template_dir <> "/sample_config.eex",
|
||||||
domain: domain,
|
domain: domain,
|
||||||
port: port,
|
port: port,
|
||||||
email: email,
|
email: email,
|
||||||
|
@ -140,46 +199,40 @@ def run(["gen" | rest]) do
|
||||||
dbname: dbname,
|
dbname: dbname,
|
||||||
dbuser: dbuser,
|
dbuser: dbuser,
|
||||||
dbpass: dbpass,
|
dbpass: dbpass,
|
||||||
version: Pleroma.Mixfile.project() |> Keyword.get(:version),
|
|
||||||
secret: secret,
|
secret: secret,
|
||||||
signing_salt: signing_salt,
|
signing_salt: signing_salt,
|
||||||
web_push_public_key: Base.url_encode64(web_push_public_key, padding: false),
|
web_push_public_key: Base.url_encode64(web_push_public_key, padding: false),
|
||||||
web_push_private_key: Base.url_encode64(web_push_private_key, padding: false)
|
web_push_private_key: Base.url_encode64(web_push_private_key, padding: false),
|
||||||
|
db_configurable?: db_configurable?,
|
||||||
|
static_dir: static_dir,
|
||||||
|
uploads_dir: uploads_dir,
|
||||||
|
rum_enabled: rum_enabled,
|
||||||
|
listen_ip: listen_ip,
|
||||||
|
listen_port: listen_port
|
||||||
)
|
)
|
||||||
|
|
||||||
result_psql =
|
result_psql =
|
||||||
EEx.eval_file(
|
EEx.eval_file(
|
||||||
"sample_psql.eex" |> Path.expand(__DIR__),
|
template_dir <> "/sample_psql.eex",
|
||||||
dbname: dbname,
|
dbname: dbname,
|
||||||
dbuser: dbuser,
|
dbuser: dbuser,
|
||||||
dbpass: dbpass
|
dbpass: dbpass,
|
||||||
|
rum_enabled: rum_enabled
|
||||||
)
|
)
|
||||||
|
|
||||||
Mix.shell().info(
|
shell_info("Writing config to #{config_path}.")
|
||||||
"Writing config to #{config_path}. You should rename it to config/prod.secret.exs or config/dev.secret.exs."
|
|
||||||
)
|
|
||||||
|
|
||||||
File.write(config_path, result_config)
|
File.write(config_path, result_config)
|
||||||
Mix.shell().info("Writing #{psql_path}.")
|
shell_info("Writing the postgres script to #{psql_path}.")
|
||||||
File.write(psql_path, result_psql)
|
File.write(psql_path, result_psql)
|
||||||
|
|
||||||
write_robots_txt(indexable)
|
write_robots_txt(indexable, template_dir)
|
||||||
|
|
||||||
Mix.shell().info(
|
shell_info(
|
||||||
"\n" <>
|
"\n All files successfully written! Refer to the installation instructions for your platform for next steps"
|
||||||
"""
|
|
||||||
To get started:
|
|
||||||
1. Verify the contents of the generated files.
|
|
||||||
2. Run `sudo -u postgres psql -f #{Common.escape_sh_path(psql_path)}`.
|
|
||||||
""" <>
|
|
||||||
if config_path in ["config/dev.secret.exs", "config/prod.secret.exs"] do
|
|
||||||
""
|
|
||||||
else
|
|
||||||
"3. Run `mv #{Common.escape_sh_path(config_path)} 'config/prod.secret.exs'`."
|
|
||||||
end
|
|
||||||
)
|
)
|
||||||
else
|
else
|
||||||
Mix.shell().error(
|
shell_error(
|
||||||
"The task would have overwritten the following files:\n" <>
|
"The task would have overwritten the following files:\n" <>
|
||||||
(Enum.map(paths, &"- #{&1}\n") |> Enum.join("")) <>
|
(Enum.map(paths, &"- #{&1}\n") |> Enum.join("")) <>
|
||||||
"Rerun with `--force` to overwrite them."
|
"Rerun with `--force` to overwrite them."
|
||||||
|
@ -187,10 +240,10 @@ def run(["gen" | rest]) do
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
defp write_robots_txt(indexable) do
|
defp write_robots_txt(indexable, template_dir) do
|
||||||
robots_txt =
|
robots_txt =
|
||||||
EEx.eval_file(
|
EEx.eval_file(
|
||||||
Path.expand("robots_txt.eex", __DIR__),
|
template_dir <> "/robots_txt.eex",
|
||||||
indexable: indexable
|
indexable: indexable
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -204,10 +257,10 @@ defp write_robots_txt(indexable) do
|
||||||
|
|
||||||
if File.exists?(robots_txt_path) do
|
if File.exists?(robots_txt_path) do
|
||||||
File.cp!(robots_txt_path, "#{robots_txt_path}.bak")
|
File.cp!(robots_txt_path, "#{robots_txt_path}.bak")
|
||||||
Mix.shell().info("Backing up existing robots.txt to #{robots_txt_path}.bak")
|
shell_info("Backing up existing robots.txt to #{robots_txt_path}.bak")
|
||||||
end
|
end
|
||||||
|
|
||||||
File.write(robots_txt_path, robots_txt)
|
File.write(robots_txt_path, robots_txt)
|
||||||
Mix.shell().info("Writing #{robots_txt_path}.")
|
shell_info("Writing #{robots_txt_path}.")
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -4,7 +4,7 @@
|
||||||
|
|
||||||
defmodule Mix.Tasks.Pleroma.Relay do
|
defmodule Mix.Tasks.Pleroma.Relay do
|
||||||
use Mix.Task
|
use Mix.Task
|
||||||
alias Mix.Tasks.Pleroma.Common
|
import Mix.Pleroma
|
||||||
alias Pleroma.Web.ActivityPub.Relay
|
alias Pleroma.Web.ActivityPub.Relay
|
||||||
|
|
||||||
@shortdoc "Manages remote relays"
|
@shortdoc "Manages remote relays"
|
||||||
|
@ -24,24 +24,24 @@ defmodule Mix.Tasks.Pleroma.Relay do
|
||||||
Example: ``mix pleroma.relay unfollow https://example.org/relay``
|
Example: ``mix pleroma.relay unfollow https://example.org/relay``
|
||||||
"""
|
"""
|
||||||
def run(["follow", target]) do
|
def run(["follow", target]) do
|
||||||
Common.start_pleroma()
|
start_pleroma()
|
||||||
|
|
||||||
with {:ok, _activity} <- Relay.follow(target) do
|
with {:ok, _activity} <- Relay.follow(target) do
|
||||||
# put this task to sleep to allow the genserver to push out the messages
|
# put this task to sleep to allow the genserver to push out the messages
|
||||||
:timer.sleep(500)
|
:timer.sleep(500)
|
||||||
else
|
else
|
||||||
{:error, e} -> Mix.shell().error("Error while following #{target}: #{inspect(e)}")
|
{:error, e} -> shell_error("Error while following #{target}: #{inspect(e)}")
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def run(["unfollow", target]) do
|
def run(["unfollow", target]) do
|
||||||
Common.start_pleroma()
|
start_pleroma()
|
||||||
|
|
||||||
with {:ok, _activity} <- Relay.unfollow(target) do
|
with {:ok, _activity} <- Relay.unfollow(target) do
|
||||||
# put this task to sleep to allow the genserver to push out the messages
|
# put this task to sleep to allow the genserver to push out the messages
|
||||||
:timer.sleep(500)
|
:timer.sleep(500)
|
||||||
else
|
else
|
||||||
{:error, e} -> Mix.shell().error("Error while following #{target}: #{inspect(e)}")
|
{:error, e} -> shell_error("Error while following #{target}: #{inspect(e)}")
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -4,7 +4,7 @@
|
||||||
|
|
||||||
defmodule Mix.Tasks.Pleroma.Uploads do
|
defmodule Mix.Tasks.Pleroma.Uploads do
|
||||||
use Mix.Task
|
use Mix.Task
|
||||||
alias Mix.Tasks.Pleroma.Common
|
import Mix.Pleroma
|
||||||
alias Pleroma.Upload
|
alias Pleroma.Upload
|
||||||
alias Pleroma.Uploaders.Local
|
alias Pleroma.Uploaders.Local
|
||||||
require Logger
|
require Logger
|
||||||
|
@ -24,7 +24,7 @@ defmodule Mix.Tasks.Pleroma.Uploads do
|
||||||
"""
|
"""
|
||||||
def run(["migrate_local", target_uploader | args]) do
|
def run(["migrate_local", target_uploader | args]) do
|
||||||
delete? = Enum.member?(args, "--delete")
|
delete? = Enum.member?(args, "--delete")
|
||||||
Common.start_pleroma()
|
start_pleroma()
|
||||||
local_path = Pleroma.Config.get!([Local, :uploads])
|
local_path = Pleroma.Config.get!([Local, :uploads])
|
||||||
uploader = Module.concat(Pleroma.Uploaders, target_uploader)
|
uploader = Module.concat(Pleroma.Uploaders, target_uploader)
|
||||||
|
|
||||||
|
@ -38,10 +38,10 @@ def run(["migrate_local", target_uploader | args]) do
|
||||||
Pleroma.Config.put([Upload, :uploader], uploader)
|
Pleroma.Config.put([Upload, :uploader], uploader)
|
||||||
end
|
end
|
||||||
|
|
||||||
Mix.shell().info("Migrating files from local #{local_path} to #{to_string(uploader)}")
|
shell_info("Migrating files from local #{local_path} to #{to_string(uploader)}")
|
||||||
|
|
||||||
if delete? do
|
if delete? do
|
||||||
Mix.shell().info(
|
shell_info(
|
||||||
"Attention: uploaded files will be deleted, hope you have backups! (--delete ; cancel with ^C)"
|
"Attention: uploaded files will be deleted, hope you have backups! (--delete ; cancel with ^C)"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -78,7 +78,7 @@ def run(["migrate_local", target_uploader | args]) do
|
||||||
|> Enum.filter(& &1)
|
|> Enum.filter(& &1)
|
||||||
|
|
||||||
total_count = length(uploads)
|
total_count = length(uploads)
|
||||||
Mix.shell().info("Found #{total_count} uploads")
|
shell_info("Found #{total_count} uploads")
|
||||||
|
|
||||||
uploads
|
uploads
|
||||||
|> Task.async_stream(
|
|> Task.async_stream(
|
||||||
|
@ -90,7 +90,7 @@ def run(["migrate_local", target_uploader | args]) do
|
||||||
:ok
|
:ok
|
||||||
|
|
||||||
error ->
|
error ->
|
||||||
Mix.shell().error("failed to upload #{inspect(upload.path)}: #{inspect(error)}")
|
shell_error("failed to upload #{inspect(upload.path)}: #{inspect(error)}")
|
||||||
end
|
end
|
||||||
end,
|
end,
|
||||||
timeout: 150_000
|
timeout: 150_000
|
||||||
|
@ -99,10 +99,10 @@ def run(["migrate_local", target_uploader | args]) do
|
||||||
# credo:disable-for-next-line Credo.Check.Warning.UnusedEnumOperation
|
# credo:disable-for-next-line Credo.Check.Warning.UnusedEnumOperation
|
||||||
|> Enum.reduce(0, fn done, count ->
|
|> Enum.reduce(0, fn done, count ->
|
||||||
count = count + length(done)
|
count = count + length(done)
|
||||||
Mix.shell().info("Uploaded #{count}/#{total_count} files")
|
shell_info("Uploaded #{count}/#{total_count} files")
|
||||||
count
|
count
|
||||||
end)
|
end)
|
||||||
|
|
||||||
Mix.shell().info("Done!")
|
shell_info("Done!")
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -5,9 +5,10 @@
|
||||||
defmodule Mix.Tasks.Pleroma.User do
|
defmodule Mix.Tasks.Pleroma.User do
|
||||||
use Mix.Task
|
use Mix.Task
|
||||||
import Ecto.Changeset
|
import Ecto.Changeset
|
||||||
alias Mix.Tasks.Pleroma.Common
|
import Mix.Pleroma
|
||||||
alias Pleroma.User
|
alias Pleroma.User
|
||||||
alias Pleroma.UserInviteToken
|
alias Pleroma.UserInviteToken
|
||||||
|
alias Pleroma.Web.OAuth
|
||||||
|
|
||||||
@shortdoc "Manages Pleroma users"
|
@shortdoc "Manages Pleroma users"
|
||||||
@moduledoc """
|
@moduledoc """
|
||||||
|
@ -49,6 +50,10 @@ defmodule Mix.Tasks.Pleroma.User do
|
||||||
|
|
||||||
mix pleroma.user delete_activities NICKNAME
|
mix pleroma.user delete_activities NICKNAME
|
||||||
|
|
||||||
|
## Sign user out from all applications (delete user's OAuth tokens and authorizations).
|
||||||
|
|
||||||
|
mix pleroma.user sign_out NICKNAME
|
||||||
|
|
||||||
## Deactivate or activate the user's account.
|
## Deactivate or activate the user's account.
|
||||||
|
|
||||||
mix pleroma.user toggle_activated NICKNAME
|
mix pleroma.user toggle_activated NICKNAME
|
||||||
|
@ -115,7 +120,7 @@ def run(["new", nickname, email | rest]) do
|
||||||
admin? = Keyword.get(options, :admin, false)
|
admin? = Keyword.get(options, :admin, false)
|
||||||
assume_yes? = Keyword.get(options, :assume_yes, false)
|
assume_yes? = Keyword.get(options, :assume_yes, false)
|
||||||
|
|
||||||
Mix.shell().info("""
|
shell_info("""
|
||||||
A user will be created with the following information:
|
A user will be created with the following information:
|
||||||
- nickname: #{nickname}
|
- nickname: #{nickname}
|
||||||
- email: #{email}
|
- email: #{email}
|
||||||
|
@ -128,10 +133,10 @@ def run(["new", nickname, email | rest]) do
|
||||||
- admin: #{if(admin?, do: "true", else: "false")}
|
- admin: #{if(admin?, do: "true", else: "false")}
|
||||||
""")
|
""")
|
||||||
|
|
||||||
proceed? = assume_yes? or Mix.shell().yes?("Continue?")
|
proceed? = assume_yes? or shell_yes?("Continue?")
|
||||||
|
|
||||||
if proceed? do
|
if proceed? do
|
||||||
Common.start_pleroma()
|
start_pleroma()
|
||||||
|
|
||||||
params = %{
|
params = %{
|
||||||
nickname: nickname,
|
nickname: nickname,
|
||||||
|
@ -145,7 +150,7 @@ def run(["new", nickname, email | rest]) do
|
||||||
changeset = User.register_changeset(%User{}, params, need_confirmation: false)
|
changeset = User.register_changeset(%User{}, params, need_confirmation: false)
|
||||||
{:ok, _user} = User.register(changeset)
|
{:ok, _user} = User.register(changeset)
|
||||||
|
|
||||||
Mix.shell().info("User #{nickname} created")
|
shell_info("User #{nickname} created")
|
||||||
|
|
||||||
if moderator? do
|
if moderator? do
|
||||||
run(["set", nickname, "--moderator"])
|
run(["set", nickname, "--moderator"])
|
||||||
|
@ -159,64 +164,64 @@ def run(["new", nickname, email | rest]) do
|
||||||
run(["reset_password", nickname])
|
run(["reset_password", nickname])
|
||||||
end
|
end
|
||||||
else
|
else
|
||||||
Mix.shell().info("User will not be created.")
|
shell_info("User will not be created.")
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def run(["rm", nickname]) do
|
def run(["rm", nickname]) do
|
||||||
Common.start_pleroma()
|
start_pleroma()
|
||||||
|
|
||||||
with %User{local: true} = user <- User.get_cached_by_nickname(nickname) do
|
with %User{local: true} = user <- User.get_cached_by_nickname(nickname) do
|
||||||
User.perform(:delete, user)
|
User.perform(:delete, user)
|
||||||
Mix.shell().info("User #{nickname} deleted.")
|
shell_info("User #{nickname} deleted.")
|
||||||
else
|
else
|
||||||
_ ->
|
_ ->
|
||||||
Mix.shell().error("No local user #{nickname}")
|
shell_error("No local user #{nickname}")
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def run(["toggle_activated", nickname]) do
|
def run(["toggle_activated", nickname]) do
|
||||||
Common.start_pleroma()
|
start_pleroma()
|
||||||
|
|
||||||
with %User{} = user <- User.get_cached_by_nickname(nickname) do
|
with %User{} = user <- User.get_cached_by_nickname(nickname) do
|
||||||
{:ok, user} = User.deactivate(user, !user.info.deactivated)
|
{:ok, user} = User.deactivate(user, !user.info.deactivated)
|
||||||
|
|
||||||
Mix.shell().info(
|
shell_info(
|
||||||
"Activation status of #{nickname}: #{if(user.info.deactivated, do: "de", else: "")}activated"
|
"Activation status of #{nickname}: #{if(user.info.deactivated, do: "de", else: "")}activated"
|
||||||
)
|
)
|
||||||
else
|
else
|
||||||
_ ->
|
_ ->
|
||||||
Mix.shell().error("No user #{nickname}")
|
shell_error("No user #{nickname}")
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def run(["reset_password", nickname]) do
|
def run(["reset_password", nickname]) do
|
||||||
Common.start_pleroma()
|
start_pleroma()
|
||||||
|
|
||||||
with %User{local: true} = user <- User.get_cached_by_nickname(nickname),
|
with %User{local: true} = user <- User.get_cached_by_nickname(nickname),
|
||||||
{:ok, token} <- Pleroma.PasswordResetToken.create_token(user) do
|
{:ok, token} <- Pleroma.PasswordResetToken.create_token(user) do
|
||||||
Mix.shell().info("Generated password reset token for #{user.nickname}")
|
shell_info("Generated password reset token for #{user.nickname}")
|
||||||
|
|
||||||
IO.puts(
|
IO.puts(
|
||||||
"URL: #{
|
"URL: #{
|
||||||
Pleroma.Web.Router.Helpers.util_url(
|
Pleroma.Web.Router.Helpers.reset_password_url(
|
||||||
Pleroma.Web.Endpoint,
|
Pleroma.Web.Endpoint,
|
||||||
:show_password_reset,
|
:reset,
|
||||||
token.token
|
token.token
|
||||||
)
|
)
|
||||||
}"
|
}"
|
||||||
)
|
)
|
||||||
else
|
else
|
||||||
_ ->
|
_ ->
|
||||||
Mix.shell().error("No local user #{nickname}")
|
shell_error("No local user #{nickname}")
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def run(["unsubscribe", nickname]) do
|
def run(["unsubscribe", nickname]) do
|
||||||
Common.start_pleroma()
|
start_pleroma()
|
||||||
|
|
||||||
with %User{} = user <- User.get_cached_by_nickname(nickname) do
|
with %User{} = user <- User.get_cached_by_nickname(nickname) do
|
||||||
Mix.shell().info("Deactivating #{user.nickname}")
|
shell_info("Deactivating #{user.nickname}")
|
||||||
User.deactivate(user)
|
User.deactivate(user)
|
||||||
|
|
||||||
{:ok, friends} = User.get_friends(user)
|
{:ok, friends} = User.get_friends(user)
|
||||||
|
@ -224,7 +229,7 @@ def run(["unsubscribe", nickname]) do
|
||||||
Enum.each(friends, fn friend ->
|
Enum.each(friends, fn friend ->
|
||||||
user = User.get_cached_by_id(user.id)
|
user = User.get_cached_by_id(user.id)
|
||||||
|
|
||||||
Mix.shell().info("Unsubscribing #{friend.nickname} from #{user.nickname}")
|
shell_info("Unsubscribing #{friend.nickname} from #{user.nickname}")
|
||||||
User.unfollow(user, friend)
|
User.unfollow(user, friend)
|
||||||
end)
|
end)
|
||||||
|
|
||||||
|
@ -233,16 +238,16 @@ def run(["unsubscribe", nickname]) do
|
||||||
user = User.get_cached_by_id(user.id)
|
user = User.get_cached_by_id(user.id)
|
||||||
|
|
||||||
if Enum.empty?(user.following) do
|
if Enum.empty?(user.following) do
|
||||||
Mix.shell().info("Successfully unsubscribed all followers from #{user.nickname}")
|
shell_info("Successfully unsubscribed all followers from #{user.nickname}")
|
||||||
end
|
end
|
||||||
else
|
else
|
||||||
_ ->
|
_ ->
|
||||||
Mix.shell().error("No user #{nickname}")
|
shell_error("No user #{nickname}")
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def run(["set", nickname | rest]) do
|
def run(["set", nickname | rest]) do
|
||||||
Common.start_pleroma()
|
start_pleroma()
|
||||||
|
|
||||||
{options, [], []} =
|
{options, [], []} =
|
||||||
OptionParser.parse(
|
OptionParser.parse(
|
||||||
|
@ -274,33 +279,33 @@ def run(["set", nickname | rest]) do
|
||||||
end
|
end
|
||||||
else
|
else
|
||||||
_ ->
|
_ ->
|
||||||
Mix.shell().error("No local user #{nickname}")
|
shell_error("No local user #{nickname}")
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def run(["tag", nickname | tags]) do
|
def run(["tag", nickname | tags]) do
|
||||||
Common.start_pleroma()
|
start_pleroma()
|
||||||
|
|
||||||
with %User{} = user <- User.get_cached_by_nickname(nickname) do
|
with %User{} = user <- User.get_cached_by_nickname(nickname) do
|
||||||
user = user |> User.tag(tags)
|
user = user |> User.tag(tags)
|
||||||
|
|
||||||
Mix.shell().info("Tags of #{user.nickname}: #{inspect(tags)}")
|
shell_info("Tags of #{user.nickname}: #{inspect(tags)}")
|
||||||
else
|
else
|
||||||
_ ->
|
_ ->
|
||||||
Mix.shell().error("Could not change user tags for #{nickname}")
|
shell_error("Could not change user tags for #{nickname}")
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def run(["untag", nickname | tags]) do
|
def run(["untag", nickname | tags]) do
|
||||||
Common.start_pleroma()
|
start_pleroma()
|
||||||
|
|
||||||
with %User{} = user <- User.get_cached_by_nickname(nickname) do
|
with %User{} = user <- User.get_cached_by_nickname(nickname) do
|
||||||
user = user |> User.untag(tags)
|
user = user |> User.untag(tags)
|
||||||
|
|
||||||
Mix.shell().info("Tags of #{user.nickname}: #{inspect(tags)}")
|
shell_info("Tags of #{user.nickname}: #{inspect(tags)}")
|
||||||
else
|
else
|
||||||
_ ->
|
_ ->
|
||||||
Mix.shell().error("Could not change user tags for #{nickname}")
|
shell_error("Could not change user tags for #{nickname}")
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -321,14 +326,12 @@ def run(["invite" | rest]) do
|
||||||
end)
|
end)
|
||||||
|> Enum.into(%{})
|
|> Enum.into(%{})
|
||||||
|
|
||||||
Common.start_pleroma()
|
start_pleroma()
|
||||||
|
|
||||||
with {:ok, val} <- options[:expires_at],
|
with {:ok, val} <- options[:expires_at],
|
||||||
options = Map.put(options, :expires_at, val),
|
options = Map.put(options, :expires_at, val),
|
||||||
{:ok, invite} <- UserInviteToken.create_invite(options) do
|
{:ok, invite} <- UserInviteToken.create_invite(options) do
|
||||||
Mix.shell().info(
|
shell_info("Generated user invite token " <> String.replace(invite.invite_type, "_", " "))
|
||||||
"Generated user invite token " <> String.replace(invite.invite_type, "_", " ")
|
|
||||||
)
|
|
||||||
|
|
||||||
url =
|
url =
|
||||||
Pleroma.Web.Router.Helpers.redirect_url(
|
Pleroma.Web.Router.Helpers.redirect_url(
|
||||||
|
@ -340,14 +343,14 @@ def run(["invite" | rest]) do
|
||||||
IO.puts(url)
|
IO.puts(url)
|
||||||
else
|
else
|
||||||
error ->
|
error ->
|
||||||
Mix.shell().error("Could not create invite token: #{inspect(error)}")
|
shell_error("Could not create invite token: #{inspect(error)}")
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def run(["invites"]) do
|
def run(["invites"]) do
|
||||||
Common.start_pleroma()
|
start_pleroma()
|
||||||
|
|
||||||
Mix.shell().info("Invites list:")
|
shell_info("Invites list:")
|
||||||
|
|
||||||
UserInviteToken.list_invites()
|
UserInviteToken.list_invites()
|
||||||
|> Enum.each(fn invite ->
|
|> Enum.each(fn invite ->
|
||||||
|
@ -361,7 +364,7 @@ def run(["invites"]) do
|
||||||
" | Max use: #{max_use} Left use: #{max_use - invite.uses}"
|
" | Max use: #{max_use} Left use: #{max_use - invite.uses}"
|
||||||
end
|
end
|
||||||
|
|
||||||
Mix.shell().info(
|
shell_info(
|
||||||
"ID: #{invite.id} | Token: #{invite.token} | Token type: #{invite.invite_type} | Used: #{
|
"ID: #{invite.id} | Token: #{invite.token} | Token type: #{invite.invite_type} | Used: #{
|
||||||
invite.used
|
invite.used
|
||||||
}#{expire_info}#{using_info}"
|
}#{expire_info}#{using_info}"
|
||||||
|
@ -370,40 +373,54 @@ def run(["invites"]) do
|
||||||
end
|
end
|
||||||
|
|
||||||
def run(["revoke_invite", token]) do
|
def run(["revoke_invite", token]) do
|
||||||
Common.start_pleroma()
|
start_pleroma()
|
||||||
|
|
||||||
with {:ok, invite} <- UserInviteToken.find_by_token(token),
|
with {:ok, invite} <- UserInviteToken.find_by_token(token),
|
||||||
{:ok, _} <- UserInviteToken.update_invite(invite, %{used: true}) do
|
{:ok, _} <- UserInviteToken.update_invite(invite, %{used: true}) do
|
||||||
Mix.shell().info("Invite for token #{token} was revoked.")
|
shell_info("Invite for token #{token} was revoked.")
|
||||||
else
|
else
|
||||||
_ -> Mix.shell().error("No invite found with token #{token}")
|
_ -> shell_error("No invite found with token #{token}")
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def run(["delete_activities", nickname]) do
|
def run(["delete_activities", nickname]) do
|
||||||
Common.start_pleroma()
|
start_pleroma()
|
||||||
|
|
||||||
with %User{local: true} = user <- User.get_cached_by_nickname(nickname) do
|
with %User{local: true} = user <- User.get_cached_by_nickname(nickname) do
|
||||||
{:ok, _} = User.delete_user_activities(user)
|
{:ok, _} = User.delete_user_activities(user)
|
||||||
Mix.shell().info("User #{nickname} statuses deleted.")
|
shell_info("User #{nickname} statuses deleted.")
|
||||||
else
|
else
|
||||||
_ ->
|
_ ->
|
||||||
Mix.shell().error("No local user #{nickname}")
|
shell_error("No local user #{nickname}")
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def run(["toggle_confirmed", nickname]) do
|
def run(["toggle_confirmed", nickname]) do
|
||||||
Common.start_pleroma()
|
start_pleroma()
|
||||||
|
|
||||||
with %User{} = user <- User.get_cached_by_nickname(nickname) do
|
with %User{} = user <- User.get_cached_by_nickname(nickname) do
|
||||||
{:ok, user} = User.toggle_confirmation(user)
|
{:ok, user} = User.toggle_confirmation(user)
|
||||||
|
|
||||||
message = if user.info.confirmation_pending, do: "needs", else: "doesn't need"
|
message = if user.info.confirmation_pending, do: "needs", else: "doesn't need"
|
||||||
|
|
||||||
Mix.shell().info("#{nickname} #{message} confirmation.")
|
shell_info("#{nickname} #{message} confirmation.")
|
||||||
else
|
else
|
||||||
_ ->
|
_ ->
|
||||||
Mix.shell().error("No local user #{nickname}")
|
shell_error("No local user #{nickname}")
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def run(["sign_out", nickname]) do
|
||||||
|
start_pleroma()
|
||||||
|
|
||||||
|
with %User{local: true} = user <- User.get_cached_by_nickname(nickname) do
|
||||||
|
OAuth.Token.delete_user_tokens(user)
|
||||||
|
OAuth.Authorization.delete_user_authorizations(user)
|
||||||
|
|
||||||
|
shell_info("#{nickname} signed out from all apps.")
|
||||||
|
else
|
||||||
|
_ ->
|
||||||
|
shell_error("No local user #{nickname}")
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -416,7 +433,7 @@ defp set_moderator(user, value) do
|
||||||
|
|
||||||
{:ok, user} = User.update_and_set_cache(user_cng)
|
{:ok, user} = User.update_and_set_cache(user_cng)
|
||||||
|
|
||||||
Mix.shell().info("Moderator status of #{user.nickname}: #{user.info.is_moderator}")
|
shell_info("Moderator status of #{user.nickname}: #{user.info.is_moderator}")
|
||||||
user
|
user
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -429,7 +446,7 @@ defp set_admin(user, value) do
|
||||||
|
|
||||||
{:ok, user} = User.update_and_set_cache(user_cng)
|
{:ok, user} = User.update_and_set_cache(user_cng)
|
||||||
|
|
||||||
Mix.shell().info("Admin status of #{user.nickname}: #{user.info.is_admin}")
|
shell_info("Admin status of #{user.nickname}: #{user.info.is_admin}")
|
||||||
user
|
user
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -442,7 +459,7 @@ defp set_locked(user, value) do
|
||||||
|
|
||||||
{:ok, user} = User.update_and_set_cache(user_cng)
|
{:ok, user} = User.update_and_set_cache(user_cng)
|
||||||
|
|
||||||
Mix.shell().info("Locked status of #{user.nickname}: #{user.info.locked}")
|
shell_info("Locked status of #{user.nickname}: #{user.info.locked}")
|
||||||
user
|
user
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -343,4 +343,6 @@ def restrict_deactivated_users(query) do
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
defdelegate search(user, query, options \\ []), to: Pleroma.Activity.Search
|
||||||
end
|
end
|
||||||
|
|
92
lib/pleroma/activity/search.ex
Normal file
92
lib/pleroma/activity/search.ex
Normal file
|
@ -0,0 +1,92 @@
|
||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
defmodule Pleroma.Activity.Search do
|
||||||
|
alias Pleroma.Activity
|
||||||
|
alias Pleroma.Object.Fetcher
|
||||||
|
alias Pleroma.Pagination
|
||||||
|
alias Pleroma.User
|
||||||
|
alias Pleroma.Web.ActivityPub.Visibility
|
||||||
|
|
||||||
|
import Ecto.Query
|
||||||
|
|
||||||
|
def search(user, search_query, options \\ []) do
|
||||||
|
index_type = if Pleroma.Config.get([:database, :rum_enabled]), do: :rum, else: :gin
|
||||||
|
limit = Enum.min([Keyword.get(options, :limit), 40])
|
||||||
|
offset = Keyword.get(options, :offset, 0)
|
||||||
|
author = Keyword.get(options, :author)
|
||||||
|
|
||||||
|
Activity
|
||||||
|
|> Activity.with_preloaded_object()
|
||||||
|
|> Activity.restrict_deactivated_users()
|
||||||
|
|> restrict_public()
|
||||||
|
|> query_with(index_type, search_query)
|
||||||
|
|> maybe_restrict_local(user)
|
||||||
|
|> maybe_restrict_author(author)
|
||||||
|
|> Pagination.fetch_paginated(%{"offset" => offset, "limit" => limit}, :offset)
|
||||||
|
|> maybe_fetch(user, search_query)
|
||||||
|
end
|
||||||
|
|
||||||
|
def maybe_restrict_author(query, %User{} = author) do
|
||||||
|
from([a, o] in query,
|
||||||
|
where: a.actor == ^author.ap_id
|
||||||
|
)
|
||||||
|
end
|
||||||
|
|
||||||
|
def maybe_restrict_author(query, _), do: query
|
||||||
|
|
||||||
|
defp restrict_public(q) do
|
||||||
|
from([a, o] in q,
|
||||||
|
where: fragment("?->>'type' = 'Create'", a.data),
|
||||||
|
where: "https://www.w3.org/ns/activitystreams#Public" in a.recipients
|
||||||
|
)
|
||||||
|
end
|
||||||
|
|
||||||
|
defp query_with(q, :gin, search_query) do
|
||||||
|
from([a, o] in q,
|
||||||
|
where:
|
||||||
|
fragment(
|
||||||
|
"to_tsvector('english', ?->>'content') @@ plainto_tsquery('english', ?)",
|
||||||
|
o.data,
|
||||||
|
^search_query
|
||||||
|
)
|
||||||
|
)
|
||||||
|
end
|
||||||
|
|
||||||
|
defp query_with(q, :rum, search_query) do
|
||||||
|
from([a, o] in q,
|
||||||
|
where:
|
||||||
|
fragment(
|
||||||
|
"? @@ plainto_tsquery('english', ?)",
|
||||||
|
o.fts_content,
|
||||||
|
^search_query
|
||||||
|
),
|
||||||
|
order_by: [fragment("? <=> now()::date", o.inserted_at)]
|
||||||
|
)
|
||||||
|
end
|
||||||
|
|
||||||
|
defp maybe_restrict_local(q, user) do
|
||||||
|
limit = Pleroma.Config.get([:instance, :limit_to_local_content], :unauthenticated)
|
||||||
|
|
||||||
|
case {limit, user} do
|
||||||
|
{:all, _} -> restrict_local(q)
|
||||||
|
{:unauthenticated, %User{}} -> q
|
||||||
|
{:unauthenticated, _} -> restrict_local(q)
|
||||||
|
{false, _} -> q
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
defp restrict_local(q), do: where(q, local: true)
|
||||||
|
|
||||||
|
defp maybe_fetch(activities, user, search_query) do
|
||||||
|
with true <- Regex.match?(~r/https?:/, search_query),
|
||||||
|
{:ok, object} <- Fetcher.fetch_object_from_id(search_query),
|
||||||
|
%Activity{} = activity <- Activity.get_create_by_object_ap_id(object.data["id"]),
|
||||||
|
true <- Visibility.visible_for_user?(activity, user) do
|
||||||
|
activities ++ [activity]
|
||||||
|
else
|
||||||
|
_ -> activities
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
|
@ -4,7 +4,6 @@
|
||||||
|
|
||||||
defmodule Pleroma.Application do
|
defmodule Pleroma.Application do
|
||||||
use Application
|
use Application
|
||||||
import Supervisor.Spec
|
|
||||||
|
|
||||||
@name Mix.Project.config()[:name]
|
@name Mix.Project.config()[:name]
|
||||||
@version Mix.Project.config()[:version]
|
@version Mix.Project.config()[:version]
|
||||||
|
@ -31,21 +30,26 @@ def start(_type, _args) do
|
||||||
children =
|
children =
|
||||||
[
|
[
|
||||||
# Start the Ecto repository
|
# Start the Ecto repository
|
||||||
supervisor(Pleroma.Repo, []),
|
%{id: Pleroma.Repo, start: {Pleroma.Repo, :start_link, []}, type: :supervisor},
|
||||||
worker(Pleroma.Emoji, []),
|
%{id: Pleroma.Config.TransferTask, start: {Pleroma.Config.TransferTask, :start_link, []}},
|
||||||
worker(Pleroma.Captcha, []),
|
%{id: Pleroma.Emoji, start: {Pleroma.Emoji, :start_link, []}},
|
||||||
worker(
|
%{id: Pleroma.Captcha, start: {Pleroma.Captcha, :start_link, []}},
|
||||||
Cachex,
|
%{
|
||||||
|
id: :cachex_used_captcha_cache,
|
||||||
|
start:
|
||||||
|
{Cachex, :start_link,
|
||||||
[
|
[
|
||||||
:used_captcha_cache,
|
:used_captcha_cache,
|
||||||
[
|
[
|
||||||
ttl_interval: :timer.seconds(Pleroma.Config.get!([Pleroma.Captcha, :seconds_valid]))
|
ttl_interval:
|
||||||
|
:timer.seconds(Pleroma.Config.get!([Pleroma.Captcha, :seconds_valid]))
|
||||||
]
|
]
|
||||||
],
|
]}
|
||||||
id: :cachex_used_captcha_cache
|
},
|
||||||
),
|
%{
|
||||||
worker(
|
id: :cachex_user,
|
||||||
Cachex,
|
start:
|
||||||
|
{Cachex, :start_link,
|
||||||
[
|
[
|
||||||
:user_cache,
|
:user_cache,
|
||||||
[
|
[
|
||||||
|
@ -53,11 +57,12 @@ def start(_type, _args) do
|
||||||
ttl_interval: 1000,
|
ttl_interval: 1000,
|
||||||
limit: 2500
|
limit: 2500
|
||||||
]
|
]
|
||||||
],
|
]}
|
||||||
id: :cachex_user
|
},
|
||||||
),
|
%{
|
||||||
worker(
|
id: :cachex_object,
|
||||||
Cachex,
|
start:
|
||||||
|
{Cachex, :start_link,
|
||||||
[
|
[
|
||||||
:object_cache,
|
:object_cache,
|
||||||
[
|
[
|
||||||
|
@ -65,32 +70,35 @@ def start(_type, _args) do
|
||||||
ttl_interval: 1000,
|
ttl_interval: 1000,
|
||||||
limit: 2500
|
limit: 2500
|
||||||
]
|
]
|
||||||
],
|
]}
|
||||||
id: :cachex_object
|
},
|
||||||
),
|
%{
|
||||||
worker(
|
id: :cachex_rich_media,
|
||||||
Cachex,
|
start:
|
||||||
|
{Cachex, :start_link,
|
||||||
[
|
[
|
||||||
:rich_media_cache,
|
:rich_media_cache,
|
||||||
[
|
[
|
||||||
default_ttl: :timer.minutes(120),
|
default_ttl: :timer.minutes(120),
|
||||||
limit: 5000
|
limit: 5000
|
||||||
]
|
]
|
||||||
],
|
]}
|
||||||
id: :cachex_rich_media
|
},
|
||||||
),
|
%{
|
||||||
worker(
|
id: :cachex_scrubber,
|
||||||
Cachex,
|
start:
|
||||||
|
{Cachex, :start_link,
|
||||||
[
|
[
|
||||||
:scrubber_cache,
|
:scrubber_cache,
|
||||||
[
|
[
|
||||||
limit: 2500
|
limit: 2500
|
||||||
]
|
]
|
||||||
],
|
]}
|
||||||
id: :cachex_scrubber
|
},
|
||||||
),
|
%{
|
||||||
worker(
|
id: :cachex_idem,
|
||||||
Cachex,
|
start:
|
||||||
|
{Cachex, :start_link,
|
||||||
[
|
[
|
||||||
:idempotency_cache,
|
:idempotency_cache,
|
||||||
[
|
[
|
||||||
|
@ -101,26 +109,49 @@ def start(_type, _args) do
|
||||||
),
|
),
|
||||||
limit: 2500
|
limit: 2500
|
||||||
]
|
]
|
||||||
],
|
]}
|
||||||
id: :cachex_idem
|
},
|
||||||
),
|
%{id: Pleroma.FlakeId, start: {Pleroma.FlakeId, :start_link, []}},
|
||||||
worker(Pleroma.FlakeId, []),
|
%{
|
||||||
worker(Pleroma.ScheduledActivityWorker, [])
|
id: Pleroma.ScheduledActivityWorker,
|
||||||
|
start: {Pleroma.ScheduledActivityWorker, :start_link, []}
|
||||||
|
}
|
||||||
] ++
|
] ++
|
||||||
hackney_pool_children() ++
|
hackney_pool_children() ++
|
||||||
[
|
[
|
||||||
worker(Pleroma.Web.Federator.RetryQueue, []),
|
%{
|
||||||
worker(Pleroma.Web.OAuth.Token.CleanWorker, []),
|
id: Pleroma.Web.Federator.RetryQueue,
|
||||||
worker(Pleroma.Stats, []),
|
start: {Pleroma.Web.Federator.RetryQueue, :start_link, []}
|
||||||
worker(Task, [&Pleroma.Web.Push.init/0], restart: :temporary, id: :web_push_init),
|
},
|
||||||
worker(Task, [&Pleroma.Web.Federator.init/0], restart: :temporary, id: :federator_init)
|
%{
|
||||||
|
id: Pleroma.Web.OAuth.Token.CleanWorker,
|
||||||
|
start: {Pleroma.Web.OAuth.Token.CleanWorker, :start_link, []}
|
||||||
|
},
|
||||||
|
%{
|
||||||
|
id: Pleroma.Stats,
|
||||||
|
start: {Pleroma.Stats, :start_link, []}
|
||||||
|
},
|
||||||
|
%{
|
||||||
|
id: :web_push_init,
|
||||||
|
start: {Task, :start_link, [&Pleroma.Web.Push.init/0]},
|
||||||
|
restart: :temporary
|
||||||
|
},
|
||||||
|
%{
|
||||||
|
id: :federator_init,
|
||||||
|
start: {Task, :start_link, [&Pleroma.Web.Federator.init/0]},
|
||||||
|
restart: :temporary
|
||||||
|
}
|
||||||
] ++
|
] ++
|
||||||
streamer_child() ++
|
streamer_child() ++
|
||||||
chat_child() ++
|
chat_child() ++
|
||||||
[
|
[
|
||||||
# Start the endpoint when the application starts
|
# Start the endpoint when the application starts
|
||||||
supervisor(Pleroma.Web.Endpoint, []),
|
%{
|
||||||
worker(Pleroma.Gopher.Server, [])
|
id: Pleroma.Web.Endpoint,
|
||||||
|
start: {Pleroma.Web.Endpoint, :start_link, []},
|
||||||
|
type: :supervisor
|
||||||
|
},
|
||||||
|
%{id: Pleroma.Gopher.Server, start: {Pleroma.Gopher.Server, :start_link, []}}
|
||||||
]
|
]
|
||||||
|
|
||||||
# See http://elixir-lang.org/docs/stable/elixir/Supervisor.html
|
# See http://elixir-lang.org/docs/stable/elixir/Supervisor.html
|
||||||
|
@ -144,7 +175,6 @@ defp setup_instrumenters do
|
||||||
Pleroma.Repo.Instrumenter.setup()
|
Pleroma.Repo.Instrumenter.setup()
|
||||||
end
|
end
|
||||||
|
|
||||||
Prometheus.Registry.register_collector(:prometheus_process_collector)
|
|
||||||
Pleroma.Web.Endpoint.MetricsExporter.setup()
|
Pleroma.Web.Endpoint.MetricsExporter.setup()
|
||||||
Pleroma.Web.Endpoint.PipelineInstrumenter.setup()
|
Pleroma.Web.Endpoint.PipelineInstrumenter.setup()
|
||||||
Pleroma.Web.Endpoint.Instrumenter.setup()
|
Pleroma.Web.Endpoint.Instrumenter.setup()
|
||||||
|
@ -157,24 +187,29 @@ def enabled_hackney_pools do
|
||||||
else
|
else
|
||||||
[]
|
[]
|
||||||
end ++
|
end ++
|
||||||
if Pleroma.Config.get([Pleroma.Uploader, :proxy_remote]) do
|
if Pleroma.Config.get([Pleroma.Upload, :proxy_remote]) do
|
||||||
[:upload]
|
[:upload]
|
||||||
else
|
else
|
||||||
[]
|
[]
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
if Mix.env() == :test do
|
if Pleroma.Config.get(:env) == :test do
|
||||||
defp streamer_child, do: []
|
defp streamer_child, do: []
|
||||||
defp chat_child, do: []
|
defp chat_child, do: []
|
||||||
else
|
else
|
||||||
defp streamer_child do
|
defp streamer_child do
|
||||||
[worker(Pleroma.Web.Streamer, [])]
|
[%{id: Pleroma.Web.Streamer, start: {Pleroma.Web.Streamer, :start_link, []}}]
|
||||||
end
|
end
|
||||||
|
|
||||||
defp chat_child do
|
defp chat_child do
|
||||||
if Pleroma.Config.get([:chat, :enabled]) do
|
if Pleroma.Config.get([:chat, :enabled]) do
|
||||||
[worker(Pleroma.Web.ChatChannel.ChatChannelState, [])]
|
[
|
||||||
|
%{
|
||||||
|
id: Pleroma.Web.ChatChannel.ChatChannelState,
|
||||||
|
start: {Pleroma.Web.ChatChannel.ChatChannelState, :start_link, []}
|
||||||
|
}
|
||||||
|
]
|
||||||
else
|
else
|
||||||
[]
|
[]
|
||||||
end
|
end
|
||||||
|
|
|
@ -1,3 +1,7 @@
|
||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
defmodule Pleroma.BBS.Authenticator do
|
defmodule Pleroma.BBS.Authenticator do
|
||||||
use Sshd.PasswordAuthenticator
|
use Sshd.PasswordAuthenticator
|
||||||
alias Comeonin.Pbkdf2
|
alias Comeonin.Pbkdf2
|
||||||
|
|
|
@ -1,3 +1,7 @@
|
||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
defmodule Pleroma.BBS.Handler do
|
defmodule Pleroma.BBS.Handler do
|
||||||
use Sshd.ShellHandler
|
use Sshd.ShellHandler
|
||||||
alias Pleroma.Activity
|
alias Pleroma.Activity
|
||||||
|
|
|
@ -1,3 +1,7 @@
|
||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
defmodule Pleroma.Bookmark do
|
defmodule Pleroma.Bookmark do
|
||||||
use Ecto.Schema
|
use Ecto.Schema
|
||||||
|
|
||||||
|
|
|
@ -3,6 +3,8 @@
|
||||||
# SPDX-License-Identifier: AGPL-3.0-only
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
defmodule Pleroma.Captcha do
|
defmodule Pleroma.Captcha do
|
||||||
|
import Pleroma.Web.Gettext
|
||||||
|
|
||||||
alias Calendar.DateTime
|
alias Calendar.DateTime
|
||||||
alias Plug.Crypto.KeyGenerator
|
alias Plug.Crypto.KeyGenerator
|
||||||
alias Plug.Crypto.MessageEncryptor
|
alias Plug.Crypto.MessageEncryptor
|
||||||
|
@ -83,10 +85,11 @@ def handle_call({:validate, token, captcha, answer_data}, _from, state) do
|
||||||
with {:ok, data} <- MessageEncryptor.decrypt(answer_data, secret, sign_secret),
|
with {:ok, data} <- MessageEncryptor.decrypt(answer_data, secret, sign_secret),
|
||||||
%{at: at, answer_data: answer_md5} <- :erlang.binary_to_term(data) do
|
%{at: at, answer_data: answer_md5} <- :erlang.binary_to_term(data) do
|
||||||
try do
|
try do
|
||||||
if DateTime.before?(at, valid_if_after), do: throw({:error, "CAPTCHA expired"})
|
if DateTime.before?(at, valid_if_after),
|
||||||
|
do: throw({:error, dgettext("errors", "CAPTCHA expired")})
|
||||||
|
|
||||||
if not is_nil(Cachex.get!(:used_captcha_cache, token)),
|
if not is_nil(Cachex.get!(:used_captcha_cache, token)),
|
||||||
do: throw({:error, "CAPTCHA already used"})
|
do: throw({:error, dgettext("errors", "CAPTCHA already used")})
|
||||||
|
|
||||||
res = method().validate(token, captcha, answer_md5)
|
res = method().validate(token, captcha, answer_md5)
|
||||||
# Throw if an error occurs
|
# Throw if an error occurs
|
||||||
|
@ -101,7 +104,7 @@ def handle_call({:validate, token, captcha, answer_data}, _from, state) do
|
||||||
:throw, e -> e
|
:throw, e -> e
|
||||||
end
|
end
|
||||||
else
|
else
|
||||||
_ -> {:error, "Invalid answer data"}
|
_ -> {:error, dgettext("errors", "Invalid answer data")}
|
||||||
end
|
end
|
||||||
|
|
||||||
{:reply, result, state}
|
{:reply, result, state}
|
||||||
|
|
|
@ -3,6 +3,7 @@
|
||||||
# SPDX-License-Identifier: AGPL-3.0-only
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
defmodule Pleroma.Captcha.Kocaptcha do
|
defmodule Pleroma.Captcha.Kocaptcha do
|
||||||
|
import Pleroma.Web.Gettext
|
||||||
alias Pleroma.Captcha.Service
|
alias Pleroma.Captcha.Service
|
||||||
@behaviour Service
|
@behaviour Service
|
||||||
|
|
||||||
|
@ -12,7 +13,7 @@ def new do
|
||||||
|
|
||||||
case Tesla.get(endpoint <> "/new") do
|
case Tesla.get(endpoint <> "/new") do
|
||||||
{:error, _} ->
|
{:error, _} ->
|
||||||
%{error: "Kocaptcha service unavailable"}
|
%{error: dgettext("errors", "Kocaptcha service unavailable")}
|
||||||
|
|
||||||
{:ok, res} ->
|
{:ok, res} ->
|
||||||
json_resp = Jason.decode!(res.body)
|
json_resp = Jason.decode!(res.body)
|
||||||
|
@ -32,6 +33,6 @@ def validate(_token, captcha, answer_data) do
|
||||||
if not is_nil(captcha) and
|
if not is_nil(captcha) and
|
||||||
:crypto.hash(:md5, captcha) |> Base.encode16() == String.upcase(answer_data),
|
:crypto.hash(:md5, captcha) |> Base.encode16() == String.upcase(answer_data),
|
||||||
do: :ok,
|
do: :ok,
|
||||||
else: {:error, "Invalid CAPTCHA"}
|
else: {:error, dgettext("errors", "Invalid CAPTCHA")}
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -38,7 +38,7 @@ def put([key], value), do: put(key, value)
|
||||||
|
|
||||||
def put([parent_key | keys], value) do
|
def put([parent_key | keys], value) do
|
||||||
parent =
|
parent =
|
||||||
Application.get_env(:pleroma, parent_key)
|
Application.get_env(:pleroma, parent_key, [])
|
||||||
|> put_in(keys, value)
|
|> put_in(keys, value)
|
||||||
|
|
||||||
Application.put_env(:pleroma, parent_key, parent)
|
Application.put_env(:pleroma, parent_key, parent)
|
||||||
|
|
59
lib/pleroma/config/transfer_task.ex
Normal file
59
lib/pleroma/config/transfer_task.ex
Normal file
|
@ -0,0 +1,59 @@
|
||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
defmodule Pleroma.Config.TransferTask do
|
||||||
|
use Task
|
||||||
|
alias Pleroma.Web.AdminAPI.Config
|
||||||
|
|
||||||
|
def start_link do
|
||||||
|
load_and_update_env()
|
||||||
|
if Pleroma.Config.get(:env) == :test, do: Ecto.Adapters.SQL.Sandbox.checkin(Pleroma.Repo)
|
||||||
|
:ignore
|
||||||
|
end
|
||||||
|
|
||||||
|
def load_and_update_env do
|
||||||
|
if Pleroma.Config.get([:instance, :dynamic_configuration]) and
|
||||||
|
Ecto.Adapters.SQL.table_exists?(Pleroma.Repo, "config") do
|
||||||
|
for_restart =
|
||||||
|
Pleroma.Repo.all(Config)
|
||||||
|
|> Enum.map(&update_env(&1))
|
||||||
|
|
||||||
|
# We need to restart applications for loaded settings take effect
|
||||||
|
for_restart
|
||||||
|
|> Enum.reject(&(&1 in [:pleroma, :ok]))
|
||||||
|
|> Enum.each(fn app ->
|
||||||
|
Application.stop(app)
|
||||||
|
:ok = Application.start(app)
|
||||||
|
end)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
defp update_env(setting) do
|
||||||
|
try do
|
||||||
|
key =
|
||||||
|
if String.starts_with?(setting.key, "Pleroma.") do
|
||||||
|
"Elixir." <> setting.key
|
||||||
|
else
|
||||||
|
String.trim_leading(setting.key, ":")
|
||||||
|
end
|
||||||
|
|
||||||
|
group = String.to_existing_atom(setting.group)
|
||||||
|
|
||||||
|
Application.put_env(
|
||||||
|
group,
|
||||||
|
String.to_existing_atom(key),
|
||||||
|
Config.from_binary(setting.value)
|
||||||
|
)
|
||||||
|
|
||||||
|
group
|
||||||
|
rescue
|
||||||
|
e ->
|
||||||
|
require Logger
|
||||||
|
|
||||||
|
Logger.warn(
|
||||||
|
"updating env causes error, key: #{inspect(setting.key)}, error: #{inspect(e)}"
|
||||||
|
)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
|
@ -49,7 +49,7 @@ def create_or_bump_for(activity, opts \\ []) do
|
||||||
with true <- Pleroma.Web.ActivityPub.Visibility.is_direct?(activity),
|
with true <- Pleroma.Web.ActivityPub.Visibility.is_direct?(activity),
|
||||||
"Create" <- activity.data["type"],
|
"Create" <- activity.data["type"],
|
||||||
object <- Pleroma.Object.normalize(activity),
|
object <- Pleroma.Object.normalize(activity),
|
||||||
"Note" <- object.data["type"],
|
true <- object.data["type"] in ["Note", "Question"],
|
||||||
ap_id when is_binary(ap_id) and byte_size(ap_id) > 0 <- object.data["context"] do
|
ap_id when is_binary(ap_id) and byte_size(ap_id) > 0 <- object.data["context"] do
|
||||||
{:ok, conversation} = create_for_ap_id(ap_id)
|
{:ok, conversation} = create_for_ap_id(ap_id)
|
||||||
|
|
||||||
|
|
|
@ -59,10 +59,10 @@ def mark_as_unread(participation) do
|
||||||
def for_user(user, params \\ %{}) do
|
def for_user(user, params \\ %{}) do
|
||||||
from(p in __MODULE__,
|
from(p in __MODULE__,
|
||||||
where: p.user_id == ^user.id,
|
where: p.user_id == ^user.id,
|
||||||
order_by: [desc: p.updated_at]
|
order_by: [desc: p.updated_at],
|
||||||
|
preload: [conversation: [:users]]
|
||||||
)
|
)
|
||||||
|> Pleroma.Pagination.fetch_paginated(params)
|
|> Pleroma.Pagination.fetch_paginated(params)
|
||||||
|> Repo.preload(conversation: [:users])
|
|
||||||
end
|
end
|
||||||
|
|
||||||
def for_user_with_last_activity_id(user, params \\ %{}) do
|
def for_user_with_last_activity_id(user, params \\ %{}) do
|
||||||
|
@ -79,5 +79,6 @@ def for_user_with_last_activity_id(user, params \\ %{}) do
|
||||||
| last_activity_id: activity_id
|
| last_activity_id: activity_id
|
||||||
}
|
}
|
||||||
end)
|
end)
|
||||||
|
|> Enum.filter(& &1.last_activity_id)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -3,11 +3,58 @@
|
||||||
# SPDX-License-Identifier: AGPL-3.0-only
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
defmodule Pleroma.Emails.Mailer do
|
defmodule Pleroma.Emails.Mailer do
|
||||||
use Swoosh.Mailer, otp_app: :pleroma
|
@moduledoc """
|
||||||
|
Defines the Pleroma mailer.
|
||||||
|
|
||||||
|
The module contains functions to delivery email using Swoosh.Mailer.
|
||||||
|
"""
|
||||||
|
|
||||||
|
alias Swoosh.DeliveryError
|
||||||
|
|
||||||
|
@otp_app :pleroma
|
||||||
|
@mailer_config [otp: :pleroma]
|
||||||
|
|
||||||
|
@spec enabled?() :: boolean()
|
||||||
|
def enabled?, do: Pleroma.Config.get([__MODULE__, :enabled])
|
||||||
|
|
||||||
|
@doc "add email to queue"
|
||||||
def deliver_async(email, config \\ []) do
|
def deliver_async(email, config \\ []) do
|
||||||
PleromaJobQueue.enqueue(:mailer, __MODULE__, [:deliver_async, email, config])
|
PleromaJobQueue.enqueue(:mailer, __MODULE__, [:deliver_async, email, config])
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@doc "callback to perform send email from queue"
|
||||||
def perform(:deliver_async, email, config), do: deliver(email, config)
|
def perform(:deliver_async, email, config), do: deliver(email, config)
|
||||||
|
|
||||||
|
@spec deliver(Swoosh.Email.t(), Keyword.t()) :: {:ok, term} | {:error, term}
|
||||||
|
def deliver(email, config \\ [])
|
||||||
|
|
||||||
|
def deliver(email, config) do
|
||||||
|
case enabled?() do
|
||||||
|
true -> Swoosh.Mailer.deliver(email, parse_config(config))
|
||||||
|
false -> {:error, :deliveries_disabled}
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
@spec deliver!(Swoosh.Email.t(), Keyword.t()) :: term | no_return
|
||||||
|
def deliver!(email, config \\ [])
|
||||||
|
|
||||||
|
def deliver!(email, config) do
|
||||||
|
case deliver(email, config) do
|
||||||
|
{:ok, result} -> result
|
||||||
|
{:error, reason} -> raise DeliveryError, reason: reason
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
@on_load :validate_dependency
|
||||||
|
|
||||||
|
@doc false
|
||||||
|
def validate_dependency do
|
||||||
|
parse_config([])
|
||||||
|
|> Keyword.get(:adapter)
|
||||||
|
|> Swoosh.Mailer.validate_dependency()
|
||||||
|
end
|
||||||
|
|
||||||
|
defp parse_config(config) do
|
||||||
|
Swoosh.Mailer.parse_config(@otp_app, __MODULE__, @mailer_config, config)
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -23,13 +23,8 @@ defp recipient(email, nil), do: email
|
||||||
defp recipient(email, name), do: {name, email}
|
defp recipient(email, name), do: {name, email}
|
||||||
defp recipient(%Pleroma.User{} = user), do: recipient(user.email, user.name)
|
defp recipient(%Pleroma.User{} = user), do: recipient(user.email, user.name)
|
||||||
|
|
||||||
def password_reset_email(user, password_reset_token) when is_binary(password_reset_token) do
|
def password_reset_email(user, token) when is_binary(token) do
|
||||||
password_reset_url =
|
password_reset_url = Router.Helpers.reset_password_url(Endpoint, :reset, token)
|
||||||
Router.Helpers.util_url(
|
|
||||||
Endpoint,
|
|
||||||
:show_password_reset,
|
|
||||||
password_reset_token
|
|
||||||
)
|
|
||||||
|
|
||||||
html_body = """
|
html_body = """
|
||||||
<h3>Reset your password at #{instance_name()}</h3>
|
<h3>Reset your password at #{instance_name()}</h3>
|
||||||
|
|
|
@ -22,7 +22,6 @@ defmodule Pleroma.Emoji do
|
||||||
|
|
||||||
@ets __MODULE__.Ets
|
@ets __MODULE__.Ets
|
||||||
@ets_options [:ordered_set, :protected, :named_table, {:read_concurrency, true}]
|
@ets_options [:ordered_set, :protected, :named_table, {:read_concurrency, true}]
|
||||||
@groups Application.get_env(:pleroma, :emoji)[:groups]
|
|
||||||
|
|
||||||
@doc false
|
@doc false
|
||||||
def start_link do
|
def start_link do
|
||||||
|
@ -87,6 +86,8 @@ defp load do
|
||||||
"emoji"
|
"emoji"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
emoji_groups = Pleroma.Config.get([:emoji, :groups])
|
||||||
|
|
||||||
case File.ls(emoji_dir_path) do
|
case File.ls(emoji_dir_path) do
|
||||||
{:error, :enoent} ->
|
{:error, :enoent} ->
|
||||||
# The custom emoji directory doesn't exist,
|
# The custom emoji directory doesn't exist,
|
||||||
|
@ -97,14 +98,28 @@ defp load do
|
||||||
# There was some other error
|
# There was some other error
|
||||||
Logger.error("Could not access the custom emoji directory #{emoji_dir_path}: #{e}")
|
Logger.error("Could not access the custom emoji directory #{emoji_dir_path}: #{e}")
|
||||||
|
|
||||||
{:ok, packs} ->
|
{:ok, results} ->
|
||||||
|
grouped =
|
||||||
|
Enum.group_by(results, fn file -> File.dir?(Path.join(emoji_dir_path, file)) end)
|
||||||
|
|
||||||
|
packs = grouped[true] || []
|
||||||
|
files = grouped[false] || []
|
||||||
|
|
||||||
# Print the packs we've found
|
# Print the packs we've found
|
||||||
Logger.info("Found emoji packs: #{Enum.join(packs, ", ")}")
|
Logger.info("Found emoji packs: #{Enum.join(packs, ", ")}")
|
||||||
|
|
||||||
|
if not Enum.empty?(files) do
|
||||||
|
Logger.warn(
|
||||||
|
"Found files in the emoji folder. These will be ignored, please move them to a subdirectory\nFound files: #{
|
||||||
|
Enum.join(files, ", ")
|
||||||
|
}"
|
||||||
|
)
|
||||||
|
end
|
||||||
|
|
||||||
emojis =
|
emojis =
|
||||||
Enum.flat_map(
|
Enum.flat_map(
|
||||||
packs,
|
packs,
|
||||||
fn pack -> load_pack(Path.join(emoji_dir_path, pack)) end
|
fn pack -> load_pack(Path.join(emoji_dir_path, pack), emoji_groups) end
|
||||||
)
|
)
|
||||||
|
|
||||||
true = :ets.insert(@ets, emojis)
|
true = :ets.insert(@ets, emojis)
|
||||||
|
@ -112,12 +127,12 @@ defp load do
|
||||||
|
|
||||||
# Compat thing for old custom emoji handling & default emoji,
|
# Compat thing for old custom emoji handling & default emoji,
|
||||||
# it should run even if there are no emoji packs
|
# it should run even if there are no emoji packs
|
||||||
shortcode_globs = Application.get_env(:pleroma, :emoji)[:shortcode_globs] || []
|
shortcode_globs = Pleroma.Config.get([:emoji, :shortcode_globs], [])
|
||||||
|
|
||||||
emojis =
|
emojis =
|
||||||
(load_from_file("config/emoji.txt") ++
|
(load_from_file("config/emoji.txt", emoji_groups) ++
|
||||||
load_from_file("config/custom_emoji.txt") ++
|
load_from_file("config/custom_emoji.txt", emoji_groups) ++
|
||||||
load_from_globs(shortcode_globs))
|
load_from_globs(shortcode_globs, emoji_groups))
|
||||||
|> Enum.reject(fn value -> value == nil end)
|
|> Enum.reject(fn value -> value == nil end)
|
||||||
|
|
||||||
true = :ets.insert(@ets, emojis)
|
true = :ets.insert(@ets, emojis)
|
||||||
|
@ -125,23 +140,25 @@ defp load do
|
||||||
:ok
|
:ok
|
||||||
end
|
end
|
||||||
|
|
||||||
defp load_pack(pack_dir) do
|
defp load_pack(pack_dir, emoji_groups) do
|
||||||
pack_name = Path.basename(pack_dir)
|
pack_name = Path.basename(pack_dir)
|
||||||
|
|
||||||
emoji_txt = Path.join(pack_dir, "emoji.txt")
|
emoji_txt = Path.join(pack_dir, "emoji.txt")
|
||||||
|
|
||||||
if File.exists?(emoji_txt) do
|
if File.exists?(emoji_txt) do
|
||||||
load_from_file(emoji_txt)
|
load_from_file(emoji_txt, emoji_groups)
|
||||||
else
|
else
|
||||||
|
extensions = Pleroma.Config.get([:emoji, :pack_extensions])
|
||||||
|
|
||||||
Logger.info(
|
Logger.info(
|
||||||
"No emoji.txt found for pack \"#{pack_name}\", assuming all .png files are emoji"
|
"No emoji.txt found for pack \"#{pack_name}\", assuming all #{Enum.join(extensions, ", ")} files are emoji"
|
||||||
)
|
)
|
||||||
|
|
||||||
make_shortcode_to_file_map(pack_dir, [".png"])
|
make_shortcode_to_file_map(pack_dir, extensions)
|
||||||
|> Enum.map(fn {shortcode, rel_file} ->
|
|> Enum.map(fn {shortcode, rel_file} ->
|
||||||
filename = Path.join("/emoji/#{pack_name}", rel_file)
|
filename = Path.join("/emoji/#{pack_name}", rel_file)
|
||||||
|
|
||||||
{shortcode, filename, [to_string(match_extra(@groups, filename))]}
|
{shortcode, filename, [to_string(match_extra(emoji_groups, filename))]}
|
||||||
end)
|
end)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
@ -170,21 +187,21 @@ def find_all_emoji(dir, exts) do
|
||||||
|> Enum.filter(fn f -> Path.extname(f) in exts end)
|
|> Enum.filter(fn f -> Path.extname(f) in exts end)
|
||||||
end
|
end
|
||||||
|
|
||||||
defp load_from_file(file) do
|
defp load_from_file(file, emoji_groups) do
|
||||||
if File.exists?(file) do
|
if File.exists?(file) do
|
||||||
load_from_file_stream(File.stream!(file))
|
load_from_file_stream(File.stream!(file), emoji_groups)
|
||||||
else
|
else
|
||||||
[]
|
[]
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
defp load_from_file_stream(stream) do
|
defp load_from_file_stream(stream, emoji_groups) do
|
||||||
stream
|
stream
|
||||||
|> Stream.map(&String.trim/1)
|
|> Stream.map(&String.trim/1)
|
||||||
|> Stream.map(fn line ->
|
|> Stream.map(fn line ->
|
||||||
case String.split(line, ~r/,\s*/) do
|
case String.split(line, ~r/,\s*/) do
|
||||||
[name, file] ->
|
[name, file] ->
|
||||||
{name, file, [to_string(match_extra(@groups, file))]}
|
{name, file, [to_string(match_extra(emoji_groups, file))]}
|
||||||
|
|
||||||
[name, file | tags] ->
|
[name, file | tags] ->
|
||||||
{name, file, tags}
|
{name, file, tags}
|
||||||
|
@ -196,7 +213,7 @@ defp load_from_file_stream(stream) do
|
||||||
|> Enum.to_list()
|
|> Enum.to_list()
|
||||||
end
|
end
|
||||||
|
|
||||||
defp load_from_globs(globs) do
|
defp load_from_globs(globs, emoji_groups) do
|
||||||
static_path = Path.join(:code.priv_dir(:pleroma), "static")
|
static_path = Path.join(:code.priv_dir(:pleroma), "static")
|
||||||
|
|
||||||
paths =
|
paths =
|
||||||
|
@ -207,7 +224,7 @@ defp load_from_globs(globs) do
|
||||||
|> Enum.concat()
|
|> Enum.concat()
|
||||||
|
|
||||||
Enum.map(paths, fn path ->
|
Enum.map(paths, fn path ->
|
||||||
tag = match_extra(@groups, Path.join("/", Path.relative_to(path, static_path)))
|
tag = match_extra(emoji_groups, Path.join("/", Path.relative_to(path, static_path)))
|
||||||
shortcode = Path.basename(path, Path.extname(path))
|
shortcode = Path.basename(path, Path.extname(path))
|
||||||
external_path = Path.join("/", Path.relative_to(path, static_path))
|
external_path = Path.join("/", Path.relative_to(path, static_path))
|
||||||
{shortcode, external_path, [to_string(tag)]}
|
{shortcode, external_path, [to_string(tag)]}
|
||||||
|
|
27
lib/pleroma/helpers/uri_helper.ex
Normal file
27
lib/pleroma/helpers/uri_helper.ex
Normal file
|
@ -0,0 +1,27 @@
|
||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
defmodule Pleroma.Helpers.UriHelper do
|
||||||
|
def append_uri_params(uri, appended_params) do
|
||||||
|
uri = URI.parse(uri)
|
||||||
|
appended_params = for {k, v} <- appended_params, into: %{}, do: {to_string(k), v}
|
||||||
|
existing_params = URI.query_decoder(uri.query || "") |> Enum.into(%{})
|
||||||
|
updated_params_keys = Enum.uniq(Map.keys(existing_params) ++ Map.keys(appended_params))
|
||||||
|
|
||||||
|
updated_params =
|
||||||
|
for k <- updated_params_keys, do: {k, appended_params[k] || existing_params[k]}
|
||||||
|
|
||||||
|
uri
|
||||||
|
|> Map.put(:query, URI.encode_query(updated_params))
|
||||||
|
|> URI.to_string()
|
||||||
|
end
|
||||||
|
|
||||||
|
def append_param_if_present(%{} = params, param_name, param_value) do
|
||||||
|
if param_value do
|
||||||
|
Map.put(params, param_name, param_value)
|
||||||
|
else
|
||||||
|
params
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
|
@ -89,7 +89,7 @@ def extract_first_external_url(object, content) do
|
||||||
Cachex.fetch!(:scrubber_cache, key, fn _key ->
|
Cachex.fetch!(:scrubber_cache, key, fn _key ->
|
||||||
result =
|
result =
|
||||||
content
|
content
|
||||||
|> Floki.filter_out("a.mention")
|
|> Floki.filter_out("a.mention,a.hashtag,a[rel~=\"tag\"]")
|
||||||
|> Floki.attribute("a", "href")
|
|> Floki.attribute("a", "href")
|
||||||
|> Enum.at(0)
|
|> Enum.at(0)
|
||||||
|
|
||||||
|
@ -104,7 +104,6 @@ defmodule Pleroma.HTML.Scrubber.TwitterText do
|
||||||
paragraphs, breaks and links are allowed through the filter.
|
paragraphs, breaks and links are allowed through the filter.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@markup Application.get_env(:pleroma, :markup)
|
|
||||||
@valid_schemes Pleroma.Config.get([:uri_schemes, :valid_schemes], [])
|
@valid_schemes Pleroma.Config.get([:uri_schemes, :valid_schemes], [])
|
||||||
|
|
||||||
require HtmlSanitizeEx.Scrubber.Meta
|
require HtmlSanitizeEx.Scrubber.Meta
|
||||||
|
@ -142,9 +141,7 @@ defmodule Pleroma.HTML.Scrubber.TwitterText do
|
||||||
Meta.allow_tag_with_these_attributes("span", [])
|
Meta.allow_tag_with_these_attributes("span", [])
|
||||||
|
|
||||||
# allow inline images for custom emoji
|
# allow inline images for custom emoji
|
||||||
@allow_inline_images Keyword.get(@markup, :allow_inline_images)
|
if Pleroma.Config.get([:markup, :allow_inline_images]) do
|
||||||
|
|
||||||
if @allow_inline_images do
|
|
||||||
# restrict img tags to http/https only, because of MediaProxy.
|
# restrict img tags to http/https only, because of MediaProxy.
|
||||||
Meta.allow_tag_with_uri_attributes("img", ["src"], ["http", "https"])
|
Meta.allow_tag_with_uri_attributes("img", ["src"], ["http", "https"])
|
||||||
|
|
||||||
|
@ -168,7 +165,6 @@ defmodule Pleroma.HTML.Scrubber.Default do
|
||||||
# credo:disable-for-previous-line
|
# credo:disable-for-previous-line
|
||||||
# No idea how to fix this one…
|
# No idea how to fix this one…
|
||||||
|
|
||||||
@markup Application.get_env(:pleroma, :markup)
|
|
||||||
@valid_schemes Pleroma.Config.get([:uri_schemes, :valid_schemes], [])
|
@valid_schemes Pleroma.Config.get([:uri_schemes, :valid_schemes], [])
|
||||||
|
|
||||||
Meta.remove_cdata_sections_before_scrub()
|
Meta.remove_cdata_sections_before_scrub()
|
||||||
|
@ -213,7 +209,7 @@ defmodule Pleroma.HTML.Scrubber.Default do
|
||||||
Meta.allow_tag_with_this_attribute_values("span", "class", ["h-card"])
|
Meta.allow_tag_with_this_attribute_values("span", "class", ["h-card"])
|
||||||
Meta.allow_tag_with_these_attributes("span", [])
|
Meta.allow_tag_with_these_attributes("span", [])
|
||||||
|
|
||||||
@allow_inline_images Keyword.get(@markup, :allow_inline_images)
|
@allow_inline_images Pleroma.Config.get([:markup, :allow_inline_images])
|
||||||
|
|
||||||
if @allow_inline_images do
|
if @allow_inline_images do
|
||||||
# restrict img tags to http/https only, because of MediaProxy.
|
# restrict img tags to http/https only, because of MediaProxy.
|
||||||
|
@ -228,9 +224,7 @@ defmodule Pleroma.HTML.Scrubber.Default do
|
||||||
])
|
])
|
||||||
end
|
end
|
||||||
|
|
||||||
@allow_tables Keyword.get(@markup, :allow_tables)
|
if Pleroma.Config.get([:markup, :allow_tables]) do
|
||||||
|
|
||||||
if @allow_tables do
|
|
||||||
Meta.allow_tag_with_these_attributes("table", [])
|
Meta.allow_tag_with_these_attributes("table", [])
|
||||||
Meta.allow_tag_with_these_attributes("tbody", [])
|
Meta.allow_tag_with_these_attributes("tbody", [])
|
||||||
Meta.allow_tag_with_these_attributes("td", [])
|
Meta.allow_tag_with_these_attributes("td", [])
|
||||||
|
@ -239,9 +233,7 @@ defmodule Pleroma.HTML.Scrubber.Default do
|
||||||
Meta.allow_tag_with_these_attributes("tr", [])
|
Meta.allow_tag_with_these_attributes("tr", [])
|
||||||
end
|
end
|
||||||
|
|
||||||
@allow_headings Keyword.get(@markup, :allow_headings)
|
if Pleroma.Config.get([:markup, :allow_headings]) do
|
||||||
|
|
||||||
if @allow_headings do
|
|
||||||
Meta.allow_tag_with_these_attributes("h1", [])
|
Meta.allow_tag_with_these_attributes("h1", [])
|
||||||
Meta.allow_tag_with_these_attributes("h2", [])
|
Meta.allow_tag_with_these_attributes("h2", [])
|
||||||
Meta.allow_tag_with_these_attributes("h3", [])
|
Meta.allow_tag_with_these_attributes("h3", [])
|
||||||
|
@ -249,9 +241,7 @@ defmodule Pleroma.HTML.Scrubber.Default do
|
||||||
Meta.allow_tag_with_these_attributes("h5", [])
|
Meta.allow_tag_with_these_attributes("h5", [])
|
||||||
end
|
end
|
||||||
|
|
||||||
@allow_fonts Keyword.get(@markup, :allow_fonts)
|
if Pleroma.Config.get([:markup, :allow_fonts]) do
|
||||||
|
|
||||||
if @allow_fonts do
|
|
||||||
Meta.allow_tag_with_these_attributes("font", ["face"])
|
Meta.allow_tag_with_these_attributes("font", ["face"])
|
||||||
end
|
end
|
||||||
|
|
||||||
|
|
|
@ -29,7 +29,7 @@ def new(opts \\ []) do
|
||||||
|
|
||||||
# fetch Hackney options
|
# fetch Hackney options
|
||||||
#
|
#
|
||||||
defp hackney_options(opts) do
|
def hackney_options(opts) do
|
||||||
options = Keyword.get(opts, :adapter, [])
|
options = Keyword.get(opts, :adapter, [])
|
||||||
adapter_options = Pleroma.Config.get([:http, :adapter], [])
|
adapter_options = Pleroma.Config.get([:http, :adapter], [])
|
||||||
proxy_url = Pleroma.Config.get([:http, :proxy_url], nil)
|
proxy_url = Pleroma.Config.get([:http, :proxy_url], nil)
|
||||||
|
|
|
@ -65,13 +65,7 @@ defp process_sni_options(options, url) do
|
||||||
end
|
end
|
||||||
|
|
||||||
def process_request_options(options) do
|
def process_request_options(options) do
|
||||||
config = Application.get_env(:pleroma, :http, [])
|
Keyword.merge(Pleroma.HTTP.Connection.hackney_options([]), options)
|
||||||
proxy = Keyword.get(config, :proxy_url, nil)
|
|
||||||
|
|
||||||
case proxy do
|
|
||||||
nil -> options
|
|
||||||
_ -> options ++ [proxy: proxy]
|
|
||||||
end
|
|
||||||
end
|
end
|
||||||
|
|
||||||
@doc """
|
@doc """
|
||||||
|
|
|
@ -1,3 +1,7 @@
|
||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
defmodule Pleroma.Instances do
|
defmodule Pleroma.Instances do
|
||||||
@moduledoc "Instances context."
|
@moduledoc "Instances context."
|
||||||
|
|
||||||
|
@ -13,7 +17,7 @@ def set_consistently_unreachable(url_or_host),
|
||||||
|
|
||||||
def reachability_datetime_threshold do
|
def reachability_datetime_threshold do
|
||||||
federation_reachability_timeout_days =
|
federation_reachability_timeout_days =
|
||||||
Pleroma.Config.get(:instance)[:federation_reachability_timeout_days] || 0
|
Pleroma.Config.get([:instance, :federation_reachability_timeout_days], 0)
|
||||||
|
|
||||||
if federation_reachability_timeout_days > 0 do
|
if federation_reachability_timeout_days > 0 do
|
||||||
NaiveDateTime.add(
|
NaiveDateTime.add(
|
||||||
|
|
|
@ -1,3 +1,7 @@
|
||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
defmodule Pleroma.Instances.Instance do
|
defmodule Pleroma.Instances.Instance do
|
||||||
@moduledoc "Instance."
|
@moduledoc "Instance."
|
||||||
|
|
||||||
|
|
|
@ -35,10 +35,12 @@ def generate_rsa_pem do
|
||||||
end
|
end
|
||||||
|
|
||||||
def keys_from_pem(pem) do
|
def keys_from_pem(pem) do
|
||||||
[private_key_code] = :public_key.pem_decode(pem)
|
with [private_key_code] <- :public_key.pem_decode(pem),
|
||||||
private_key = :public_key.pem_entry_decode(private_key_code)
|
private_key <- :public_key.pem_entry_decode(private_key_code),
|
||||||
{:RSAPrivateKey, _, modulus, exponent, _, _, _, _, _, _, _} = private_key
|
{:RSAPrivateKey, _, modulus, exponent, _, _, _, _, _, _, _} <- private_key do
|
||||||
public_key = {:RSAPublicKey, modulus, exponent}
|
{:ok, private_key, {:RSAPublicKey, modulus, exponent}}
|
||||||
{:ok, private_key, public_key}
|
else
|
||||||
|
error -> {:error, error}
|
||||||
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -16,6 +16,7 @@ defmodule Pleroma.List do
|
||||||
belongs_to(:user, User, type: Pleroma.FlakeId)
|
belongs_to(:user, User, type: Pleroma.FlakeId)
|
||||||
field(:title, :string)
|
field(:title, :string)
|
||||||
field(:following, {:array, :string}, default: [])
|
field(:following, {:array, :string}, default: [])
|
||||||
|
field(:ap_id, :string)
|
||||||
|
|
||||||
timestamps()
|
timestamps()
|
||||||
end
|
end
|
||||||
|
@ -55,6 +56,10 @@ def get(id, %{id: user_id} = _user) do
|
||||||
Repo.one(query)
|
Repo.one(query)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def get_by_ap_id(ap_id) do
|
||||||
|
Repo.get_by(__MODULE__, ap_id: ap_id)
|
||||||
|
end
|
||||||
|
|
||||||
def get_following(%Pleroma.List{following: following} = _list) do
|
def get_following(%Pleroma.List{following: following} = _list) do
|
||||||
q =
|
q =
|
||||||
from(
|
from(
|
||||||
|
@ -105,7 +110,14 @@ def rename(%Pleroma.List{} = list, title) do
|
||||||
|
|
||||||
def create(title, %User{} = creator) do
|
def create(title, %User{} = creator) do
|
||||||
list = %Pleroma.List{user_id: creator.id, title: title}
|
list = %Pleroma.List{user_id: creator.id, title: title}
|
||||||
Repo.insert(list)
|
|
||||||
|
Repo.transaction(fn ->
|
||||||
|
list = Repo.insert!(list)
|
||||||
|
|
||||||
|
list
|
||||||
|
|> change(ap_id: "#{creator.ap_id}/lists/#{list.id}")
|
||||||
|
|> Repo.update!()
|
||||||
|
end)
|
||||||
end
|
end
|
||||||
|
|
||||||
def follow(%Pleroma.List{following: following} = list, %User{} = followed) do
|
def follow(%Pleroma.List{following: following} = list, %User{} = followed) do
|
||||||
|
@ -125,4 +137,19 @@ def update_follows(%Pleroma.List{} = list, attrs) do
|
||||||
|> follow_changeset(attrs)
|
|> follow_changeset(attrs)
|
||||||
|> Repo.update()
|
|> Repo.update()
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def memberships(%User{follower_address: follower_address}) do
|
||||||
|
Pleroma.List
|
||||||
|
|> where([l], ^follower_address in l.following)
|
||||||
|
|> select([l], l.ap_id)
|
||||||
|
|> Repo.all()
|
||||||
|
end
|
||||||
|
|
||||||
|
def memberships(_), do: []
|
||||||
|
|
||||||
|
def member?(%Pleroma.List{following: following}, %User{follower_address: follower_address}) do
|
||||||
|
Enum.member?(following, follower_address)
|
||||||
|
end
|
||||||
|
|
||||||
|
def member?(_, _), do: false
|
||||||
end
|
end
|
||||||
|
|
|
@ -11,8 +11,9 @@ defmodule Pleroma.Notification do
|
||||||
alias Pleroma.Pagination
|
alias Pleroma.Pagination
|
||||||
alias Pleroma.Repo
|
alias Pleroma.Repo
|
||||||
alias Pleroma.User
|
alias Pleroma.User
|
||||||
alias Pleroma.Web.CommonAPI
|
|
||||||
alias Pleroma.Web.CommonAPI.Utils
|
alias Pleroma.Web.CommonAPI.Utils
|
||||||
|
alias Pleroma.Web.Push
|
||||||
|
alias Pleroma.Web.Streamer
|
||||||
|
|
||||||
import Ecto.Query
|
import Ecto.Query
|
||||||
import Ecto.Changeset
|
import Ecto.Changeset
|
||||||
|
@ -30,7 +31,8 @@ def changeset(%Notification{} = notification, attrs) do
|
||||||
|> cast(attrs, [:seen])
|
|> cast(attrs, [:seen])
|
||||||
end
|
end
|
||||||
|
|
||||||
def for_user_query(user) do
|
def for_user_query(user, opts) do
|
||||||
|
query =
|
||||||
Notification
|
Notification
|
||||||
|> where(user_id: ^user.id)
|
|> where(user_id: ^user.id)
|
||||||
|> where(
|
|> where(
|
||||||
|
@ -50,11 +52,26 @@ def for_user_query(user) do
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|> preload([n, a, o], activity: {a, object: o})
|
|> preload([n, a, o], activity: {a, object: o})
|
||||||
|
|
||||||
|
if opts[:with_muted] do
|
||||||
|
query
|
||||||
|
else
|
||||||
|
where(query, [n, a], a.actor not in ^user.info.muted_notifications)
|
||||||
|
|> where([n, a], a.actor not in ^user.info.blocks)
|
||||||
|
|> where(
|
||||||
|
[n, a],
|
||||||
|
fragment("substring(? from '.*://([^/]*)')", a.actor) not in ^user.info.domain_blocks
|
||||||
|
)
|
||||||
|
|> join(:left, [n, a], tm in Pleroma.ThreadMute,
|
||||||
|
on: tm.user_id == ^user.id and tm.context == fragment("?->>'context'", a.data)
|
||||||
|
)
|
||||||
|
|> where([n, a, o, tm], is_nil(tm.user_id))
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def for_user(user, opts \\ %{}) do
|
def for_user(user, opts \\ %{}) do
|
||||||
user
|
user
|
||||||
|> for_user_query()
|
|> for_user_query(opts)
|
||||||
|> Pagination.fetch_paginated(opts)
|
|> Pagination.fetch_paginated(opts)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -125,10 +142,21 @@ def dismiss(%{id: user_id} = _user, id) do
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def create_notifications(%Activity{data: %{"to" => _, "type" => type}} = activity)
|
def create_notifications(%Activity{data: %{"to" => _, "type" => "Create"}} = activity) do
|
||||||
when type in ["Create", "Like", "Announce", "Follow"] do
|
object = Object.normalize(activity)
|
||||||
users = get_notified_from_activity(activity)
|
|
||||||
|
|
||||||
|
unless object && object.data["type"] == "Answer" do
|
||||||
|
users = get_notified_from_activity(activity)
|
||||||
|
notifications = Enum.map(users, fn user -> create_notification(activity, user) end)
|
||||||
|
{:ok, notifications}
|
||||||
|
else
|
||||||
|
{:ok, []}
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def create_notifications(%Activity{data: %{"to" => _, "type" => type}} = activity)
|
||||||
|
when type in ["Like", "Announce", "Follow"] do
|
||||||
|
users = get_notified_from_activity(activity)
|
||||||
notifications = Enum.map(users, fn user -> create_notification(activity, user) end)
|
notifications = Enum.map(users, fn user -> create_notification(activity, user) end)
|
||||||
{:ok, notifications}
|
{:ok, notifications}
|
||||||
end
|
end
|
||||||
|
@ -140,8 +168,9 @@ def create_notification(%Activity{} = activity, %User{} = user) do
|
||||||
unless skip?(activity, user) do
|
unless skip?(activity, user) do
|
||||||
notification = %Notification{user_id: user.id, activity: activity}
|
notification = %Notification{user_id: user.id, activity: activity}
|
||||||
{:ok, notification} = Repo.insert(notification)
|
{:ok, notification} = Repo.insert(notification)
|
||||||
Pleroma.Web.Streamer.stream("user", notification)
|
Streamer.stream("user", notification)
|
||||||
Pleroma.Web.Push.send(notification)
|
Streamer.stream("user:notification", notification)
|
||||||
|
Push.send(notification)
|
||||||
notification
|
notification
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
@ -165,32 +194,24 @@ def get_notified_from_activity(
|
||||||
|
|
||||||
def get_notified_from_activity(_, _local_only), do: []
|
def get_notified_from_activity(_, _local_only), do: []
|
||||||
|
|
||||||
|
@spec skip?(Activity.t(), User.t()) :: boolean()
|
||||||
def skip?(activity, user) do
|
def skip?(activity, user) do
|
||||||
[:self, :blocked, :local, :muted, :followers, :follows, :recently_followed]
|
[
|
||||||
|
:self,
|
||||||
|
:followers,
|
||||||
|
:follows,
|
||||||
|
:non_followers,
|
||||||
|
:non_follows,
|
||||||
|
:recently_followed
|
||||||
|
]
|
||||||
|> Enum.any?(&skip?(&1, activity, user))
|
|> Enum.any?(&skip?(&1, activity, user))
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@spec skip?(atom(), Activity.t(), User.t()) :: boolean()
|
||||||
def skip?(:self, activity, user) do
|
def skip?(:self, activity, user) do
|
||||||
activity.data["actor"] == user.ap_id
|
activity.data["actor"] == user.ap_id
|
||||||
end
|
end
|
||||||
|
|
||||||
def skip?(:blocked, activity, user) do
|
|
||||||
actor = activity.data["actor"]
|
|
||||||
User.blocks?(user, %{ap_id: actor})
|
|
||||||
end
|
|
||||||
|
|
||||||
def skip?(:local, %{local: true}, %{info: %{notification_settings: %{"local" => false}}}),
|
|
||||||
do: true
|
|
||||||
|
|
||||||
def skip?(:local, %{local: false}, %{info: %{notification_settings: %{"remote" => false}}}),
|
|
||||||
do: true
|
|
||||||
|
|
||||||
def skip?(:muted, activity, user) do
|
|
||||||
actor = activity.data["actor"]
|
|
||||||
|
|
||||||
User.mutes?(user, %{ap_id: actor}) or CommonAPI.thread_muted?(user, activity)
|
|
||||||
end
|
|
||||||
|
|
||||||
def skip?(
|
def skip?(
|
||||||
:followers,
|
:followers,
|
||||||
activity,
|
activity,
|
||||||
|
@ -201,12 +222,32 @@ def skip?(
|
||||||
User.following?(follower, user)
|
User.following?(follower, user)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def skip?(
|
||||||
|
:non_followers,
|
||||||
|
activity,
|
||||||
|
%{info: %{notification_settings: %{"non_followers" => false}}} = user
|
||||||
|
) do
|
||||||
|
actor = activity.data["actor"]
|
||||||
|
follower = User.get_cached_by_ap_id(actor)
|
||||||
|
!User.following?(follower, user)
|
||||||
|
end
|
||||||
|
|
||||||
def skip?(:follows, activity, %{info: %{notification_settings: %{"follows" => false}}} = user) do
|
def skip?(:follows, activity, %{info: %{notification_settings: %{"follows" => false}}} = user) do
|
||||||
actor = activity.data["actor"]
|
actor = activity.data["actor"]
|
||||||
followed = User.get_cached_by_ap_id(actor)
|
followed = User.get_cached_by_ap_id(actor)
|
||||||
User.following?(user, followed)
|
User.following?(user, followed)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def skip?(
|
||||||
|
:non_follows,
|
||||||
|
activity,
|
||||||
|
%{info: %{notification_settings: %{"non_follows" => false}}} = user
|
||||||
|
) do
|
||||||
|
actor = activity.data["actor"]
|
||||||
|
followed = User.get_cached_by_ap_id(actor)
|
||||||
|
!User.following?(user, followed)
|
||||||
|
end
|
||||||
|
|
||||||
def skip?(:recently_followed, %{data: %{"type" => "Follow"}} = activity, user) do
|
def skip?(:recently_followed, %{data: %{"type" => "Follow"}} = activity, user) do
|
||||||
actor = activity.data["actor"]
|
actor = activity.data["actor"]
|
||||||
|
|
||||||
|
|
|
@ -35,50 +35,55 @@ def change(struct, params \\ %{}) do
|
||||||
|> unique_constraint(:ap_id, name: :objects_unique_apid_index)
|
|> unique_constraint(:ap_id, name: :objects_unique_apid_index)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def get_by_id(nil), do: nil
|
||||||
|
def get_by_id(id), do: Repo.get(Object, id)
|
||||||
|
|
||||||
def get_by_ap_id(nil), do: nil
|
def get_by_ap_id(nil), do: nil
|
||||||
|
|
||||||
def get_by_ap_id(ap_id) do
|
def get_by_ap_id(ap_id) do
|
||||||
Repo.one(from(object in Object, where: fragment("(?)->>'id' = ?", object.data, ^ap_id)))
|
Repo.one(from(object in Object, where: fragment("(?)->>'id' = ?", object.data, ^ap_id)))
|
||||||
end
|
end
|
||||||
|
|
||||||
def normalize(_, fetch_remote \\ true)
|
defp warn_on_no_object_preloaded(ap_id) do
|
||||||
|
"Object.normalize() called without preloaded object (#{ap_id}). Consider preloading the object"
|
||||||
|
|> Logger.debug()
|
||||||
|
|
||||||
|
Logger.debug("Backtrace: #{inspect(Process.info(:erlang.self(), :current_stacktrace))}")
|
||||||
|
end
|
||||||
|
|
||||||
|
def normalize(_, fetch_remote \\ true, options \\ [])
|
||||||
|
|
||||||
# If we pass an Activity to Object.normalize(), we can try to use the preloaded object.
|
# If we pass an Activity to Object.normalize(), we can try to use the preloaded object.
|
||||||
# Use this whenever possible, especially when walking graphs in an O(N) loop!
|
# Use this whenever possible, especially when walking graphs in an O(N) loop!
|
||||||
def normalize(%Object{} = object, _), do: object
|
def normalize(%Object{} = object, _, _), do: object
|
||||||
def normalize(%Activity{object: %Object{} = object}, _), do: object
|
def normalize(%Activity{object: %Object{} = object}, _, _), do: object
|
||||||
|
|
||||||
# A hack for fake activities
|
# A hack for fake activities
|
||||||
def normalize(%Activity{data: %{"object" => %{"fake" => true} = data}}, _) do
|
def normalize(%Activity{data: %{"object" => %{"fake" => true} = data}}, _, _) do
|
||||||
%Object{id: "pleroma:fake_object_id", data: data}
|
%Object{id: "pleroma:fake_object_id", data: data}
|
||||||
end
|
end
|
||||||
|
|
||||||
# Catch and log Object.normalize() calls where the Activity's child object is not
|
# No preloaded object
|
||||||
# preloaded.
|
def normalize(%Activity{data: %{"object" => %{"id" => ap_id}}}, fetch_remote, _) do
|
||||||
def normalize(%Activity{data: %{"object" => %{"id" => ap_id}}}, fetch_remote) do
|
warn_on_no_object_preloaded(ap_id)
|
||||||
Logger.debug(
|
|
||||||
"Object.normalize() called without preloaded object (#{ap_id}). Consider preloading the object!"
|
|
||||||
)
|
|
||||||
|
|
||||||
Logger.debug("Backtrace: #{inspect(Process.info(:erlang.self(), :current_stacktrace))}")
|
|
||||||
|
|
||||||
normalize(ap_id, fetch_remote)
|
normalize(ap_id, fetch_remote)
|
||||||
end
|
end
|
||||||
|
|
||||||
def normalize(%Activity{data: %{"object" => ap_id}}, fetch_remote) do
|
# No preloaded object
|
||||||
Logger.debug(
|
def normalize(%Activity{data: %{"object" => ap_id}}, fetch_remote, _) do
|
||||||
"Object.normalize() called without preloaded object (#{ap_id}). Consider preloading the object!"
|
warn_on_no_object_preloaded(ap_id)
|
||||||
)
|
|
||||||
|
|
||||||
Logger.debug("Backtrace: #{inspect(Process.info(:erlang.self(), :current_stacktrace))}")
|
|
||||||
|
|
||||||
normalize(ap_id, fetch_remote)
|
normalize(ap_id, fetch_remote)
|
||||||
end
|
end
|
||||||
|
|
||||||
# Old way, try fetching the object through cache.
|
# Old way, try fetching the object through cache.
|
||||||
def normalize(%{"id" => ap_id}, fetch_remote), do: normalize(ap_id, fetch_remote)
|
def normalize(%{"id" => ap_id}, fetch_remote, _), do: normalize(ap_id, fetch_remote)
|
||||||
def normalize(ap_id, false) when is_binary(ap_id), do: get_cached_by_ap_id(ap_id)
|
def normalize(ap_id, false, _) when is_binary(ap_id), do: get_cached_by_ap_id(ap_id)
|
||||||
def normalize(ap_id, true) when is_binary(ap_id), do: Fetcher.fetch_object_from_id!(ap_id)
|
|
||||||
def normalize(_, _), do: nil
|
def normalize(ap_id, true, options) when is_binary(ap_id) do
|
||||||
|
Fetcher.fetch_object_from_id!(ap_id, options)
|
||||||
|
end
|
||||||
|
|
||||||
|
def normalize(_, _, _), do: nil
|
||||||
|
|
||||||
# Owned objects can only be mutated by their owner
|
# Owned objects can only be mutated by their owner
|
||||||
def authorize_mutation(%Object{data: %{"actor" => actor}}, %User{ap_id: ap_id}),
|
def authorize_mutation(%Object{data: %{"actor" => actor}}, %User{ap_id: ap_id}),
|
||||||
|
@ -195,4 +200,34 @@ def decrease_replies_count(ap_id) do
|
||||||
_ -> {:error, "Not found"}
|
_ -> {:error, "Not found"}
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def increase_vote_count(ap_id, name) do
|
||||||
|
with %Object{} = object <- Object.normalize(ap_id),
|
||||||
|
"Question" <- object.data["type"] do
|
||||||
|
multiple = Map.has_key?(object.data, "anyOf")
|
||||||
|
|
||||||
|
options =
|
||||||
|
(object.data["anyOf"] || object.data["oneOf"] || [])
|
||||||
|
|> Enum.map(fn
|
||||||
|
%{"name" => ^name} = option ->
|
||||||
|
Kernel.update_in(option["replies"]["totalItems"], &(&1 + 1))
|
||||||
|
|
||||||
|
option ->
|
||||||
|
option
|
||||||
|
end)
|
||||||
|
|
||||||
|
data =
|
||||||
|
if multiple do
|
||||||
|
Map.put(object.data, "anyOf", options)
|
||||||
|
else
|
||||||
|
Map.put(object.data, "oneOf", options)
|
||||||
|
end
|
||||||
|
|
||||||
|
object
|
||||||
|
|> Object.change(%{data: data})
|
||||||
|
|> update_and_set_cache()
|
||||||
|
else
|
||||||
|
_ -> :noop
|
||||||
|
end
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -1,3 +1,7 @@
|
||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
defmodule Pleroma.Object.Containment do
|
defmodule Pleroma.Object.Containment do
|
||||||
@moduledoc """
|
@moduledoc """
|
||||||
This module contains some useful functions for containing objects to specific
|
This module contains some useful functions for containing objects to specific
|
||||||
|
@ -44,6 +48,9 @@ def contain_origin(id, %{"actor" => _actor} = params) do
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def contain_origin(id, %{"attributedTo" => actor} = params),
|
||||||
|
do: contain_origin(id, Map.put(params, "actor", actor))
|
||||||
|
|
||||||
def contain_origin_from_id(_id, %{"id" => nil}), do: :error
|
def contain_origin_from_id(_id, %{"id" => nil}), do: :error
|
||||||
|
|
||||||
def contain_origin_from_id(id, %{"id" => other_id} = _params) do
|
def contain_origin_from_id(id, %{"id" => other_id} = _params) do
|
||||||
|
@ -56,4 +63,9 @@ def contain_origin_from_id(id, %{"id" => other_id} = _params) do
|
||||||
:error
|
:error
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def contain_child(%{"object" => %{"id" => id, "attributedTo" => _} = object}),
|
||||||
|
do: contain_origin(id, object)
|
||||||
|
|
||||||
|
def contain_child(_), do: :ok
|
||||||
end
|
end
|
||||||
|
|
|
@ -1,3 +1,7 @@
|
||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
defmodule Pleroma.Object.Fetcher do
|
defmodule Pleroma.Object.Fetcher do
|
||||||
alias Pleroma.HTTP
|
alias Pleroma.HTTP
|
||||||
alias Pleroma.Object
|
alias Pleroma.Object
|
||||||
|
@ -22,39 +26,45 @@ defp reinject_object(data) do
|
||||||
|
|
||||||
# TODO:
|
# TODO:
|
||||||
# This will create a Create activity, which we need internally at the moment.
|
# This will create a Create activity, which we need internally at the moment.
|
||||||
def fetch_object_from_id(id) do
|
def fetch_object_from_id(id, options \\ []) do
|
||||||
if object = Object.get_cached_by_ap_id(id) do
|
if object = Object.get_cached_by_ap_id(id) do
|
||||||
{:ok, object}
|
{:ok, object}
|
||||||
else
|
else
|
||||||
Logger.info("Fetching #{id} via AP")
|
Logger.info("Fetching #{id} via AP")
|
||||||
|
|
||||||
with {:ok, data} <- fetch_and_contain_remote_object_from_id(id),
|
with {:fetch, {:ok, data}} <- {:fetch, fetch_and_contain_remote_object_from_id(id)},
|
||||||
nil <- Object.normalize(data, false),
|
{:normalize, nil} <- {:normalize, Object.normalize(data, false)},
|
||||||
params <- %{
|
params <- %{
|
||||||
"type" => "Create",
|
"type" => "Create",
|
||||||
"to" => data["to"],
|
"to" => data["to"],
|
||||||
"cc" => data["cc"],
|
"cc" => data["cc"],
|
||||||
|
# Should we seriously keep this attributedTo thing?
|
||||||
"actor" => data["actor"] || data["attributedTo"],
|
"actor" => data["actor"] || data["attributedTo"],
|
||||||
"object" => data
|
"object" => data
|
||||||
},
|
},
|
||||||
:ok <- Containment.contain_origin(id, params),
|
{:containment, :ok} <- {:containment, Containment.contain_origin(id, params)},
|
||||||
{:ok, activity} <- Transmogrifier.handle_incoming(params),
|
{:ok, activity} <- Transmogrifier.handle_incoming(params, options),
|
||||||
{:object, _data, %Object{} = object} <-
|
{:object, _data, %Object{} = object} <-
|
||||||
{:object, data, Object.normalize(activity, false)} do
|
{:object, data, Object.normalize(activity, false)} do
|
||||||
{:ok, object}
|
{:ok, object}
|
||||||
else
|
else
|
||||||
|
{:containment, _} ->
|
||||||
|
{:error, "Object containment failed."}
|
||||||
|
|
||||||
{:error, {:reject, nil}} ->
|
{:error, {:reject, nil}} ->
|
||||||
{:reject, nil}
|
{:reject, nil}
|
||||||
|
|
||||||
{:object, data, nil} ->
|
{:object, data, nil} ->
|
||||||
reinject_object(data)
|
reinject_object(data)
|
||||||
|
|
||||||
object = %Object{} ->
|
{:normalize, object = %Object{}} ->
|
||||||
{:ok, object}
|
{:ok, object}
|
||||||
|
|
||||||
_e ->
|
_e ->
|
||||||
|
# Only fallback when receiving a fetch/normalization error with ActivityPub
|
||||||
Logger.info("Couldn't get object via AP, trying out OStatus fetching...")
|
Logger.info("Couldn't get object via AP, trying out OStatus fetching...")
|
||||||
|
|
||||||
|
# FIXME: OStatus Object Containment?
|
||||||
case OStatus.fetch_activity_from_url(id) do
|
case OStatus.fetch_activity_from_url(id) do
|
||||||
{:ok, [activity | _]} -> {:ok, Object.normalize(activity, false)}
|
{:ok, [activity | _]} -> {:ok, Object.normalize(activity, false)}
|
||||||
e -> e
|
e -> e
|
||||||
|
@ -63,8 +73,8 @@ def fetch_object_from_id(id) do
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def fetch_object_from_id!(id) do
|
def fetch_object_from_id!(id, options \\ []) do
|
||||||
with {:ok, object} <- fetch_object_from_id(id) do
|
with {:ok, object} <- fetch_object_from_id(id, options) do
|
||||||
object
|
object
|
||||||
else
|
else
|
||||||
_e ->
|
_e ->
|
||||||
|
@ -85,6 +95,9 @@ def fetch_and_contain_remote_object_from_id(id) do
|
||||||
:ok <- Containment.contain_origin_from_id(id, data) do
|
:ok <- Containment.contain_origin_from_id(id, data) do
|
||||||
{:ok, data}
|
{:ok, data}
|
||||||
else
|
else
|
||||||
|
{:ok, %{status: code}} when code in [404, 410] ->
|
||||||
|
{:error, "Object has been deleted"}
|
||||||
|
|
||||||
e ->
|
e ->
|
||||||
{:error, e}
|
{:error, e}
|
||||||
end
|
end
|
||||||
|
|
|
@ -1,3 +1,7 @@
|
||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
defmodule Pleroma.ObjectTombstone do
|
defmodule Pleroma.ObjectTombstone do
|
||||||
@enforce_keys [:id, :formerType, :deleted]
|
@enforce_keys [:id, :formerType, :deleted]
|
||||||
defstruct [:id, :formerType, :deleted, type: "Tombstone"]
|
defstruct [:id, :formerType, :deleted, type: "Tombstone"]
|
||||||
|
|
|
@ -1,3 +1,7 @@
|
||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
defmodule Pleroma.Pagination do
|
defmodule Pleroma.Pagination do
|
||||||
@moduledoc """
|
@moduledoc """
|
||||||
Implements Mastodon-compatible pagination.
|
Implements Mastodon-compatible pagination.
|
||||||
|
@ -10,16 +14,28 @@ defmodule Pleroma.Pagination do
|
||||||
|
|
||||||
@default_limit 20
|
@default_limit 20
|
||||||
|
|
||||||
def fetch_paginated(query, params) do
|
def fetch_paginated(query, params, type \\ :keyset)
|
||||||
|
|
||||||
|
def fetch_paginated(query, params, :keyset) do
|
||||||
options = cast_params(params)
|
options = cast_params(params)
|
||||||
|
|
||||||
query
|
query
|
||||||
|> paginate(options)
|
|> paginate(options, :keyset)
|
||||||
|> Repo.all()
|
|> Repo.all()
|
||||||
|> enforce_order(options)
|
|> enforce_order(options)
|
||||||
end
|
end
|
||||||
|
|
||||||
def paginate(query, options) do
|
def fetch_paginated(query, params, :offset) do
|
||||||
|
options = cast_params(params)
|
||||||
|
|
||||||
|
query
|
||||||
|
|> paginate(options, :offset)
|
||||||
|
|> Repo.all()
|
||||||
|
end
|
||||||
|
|
||||||
|
def paginate(query, options, method \\ :keyset)
|
||||||
|
|
||||||
|
def paginate(query, options, :keyset) do
|
||||||
query
|
query
|
||||||
|> restrict(:min_id, options)
|
|> restrict(:min_id, options)
|
||||||
|> restrict(:since_id, options)
|
|> restrict(:since_id, options)
|
||||||
|
@ -28,11 +44,18 @@ def paginate(query, options) do
|
||||||
|> restrict(:limit, options)
|
|> restrict(:limit, options)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def paginate(query, options, :offset) do
|
||||||
|
query
|
||||||
|
|> restrict(:offset, options)
|
||||||
|
|> restrict(:limit, options)
|
||||||
|
end
|
||||||
|
|
||||||
defp cast_params(params) do
|
defp cast_params(params) do
|
||||||
param_types = %{
|
param_types = %{
|
||||||
min_id: :string,
|
min_id: :string,
|
||||||
since_id: :string,
|
since_id: :string,
|
||||||
max_id: :string,
|
max_id: :string,
|
||||||
|
offset: :integer,
|
||||||
limit: :integer
|
limit: :integer
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -66,6 +89,10 @@ defp restrict(query, :order, _options) do
|
||||||
order_by(query, [u], fragment("? desc nulls last", u.id))
|
order_by(query, [u], fragment("? desc nulls last", u.id))
|
||||||
end
|
end
|
||||||
|
|
||||||
|
defp restrict(query, :offset, %{offset: offset}) do
|
||||||
|
offset(query, ^offset)
|
||||||
|
end
|
||||||
|
|
||||||
defp restrict(query, :limit, options) do
|
defp restrict(query, :limit, options) do
|
||||||
limit = Map.get(options, :limit, @default_limit)
|
limit = Map.get(options, :limit, @default_limit)
|
||||||
|
|
||||||
|
|
|
@ -37,6 +37,7 @@ def used_changeset(struct) do
|
||||||
|> put_change(:used, true)
|
|> put_change(:used, true)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@spec reset_password(binary(), map()) :: {:ok, User.t()} | {:error, binary()}
|
||||||
def reset_password(token, data) do
|
def reset_password(token, data) do
|
||||||
with %{used: false} = token <- Repo.get_by(PasswordResetToken, %{token: token}),
|
with %{used: false} = token <- Repo.get_by(PasswordResetToken, %{token: token}),
|
||||||
%User{} = user <- User.get_cached_by_id(token.user_id),
|
%User{} = user <- User.get_cached_by_id(token.user_id),
|
|
@ -6,11 +6,26 @@ defmodule Pleroma.Plugs.AuthenticationPlug do
|
||||||
alias Comeonin.Pbkdf2
|
alias Comeonin.Pbkdf2
|
||||||
import Plug.Conn
|
import Plug.Conn
|
||||||
alias Pleroma.User
|
alias Pleroma.User
|
||||||
|
require Logger
|
||||||
|
|
||||||
def init(options) do
|
def init(options) do
|
||||||
options
|
options
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def checkpw(password, password_hash) do
|
||||||
|
cond do
|
||||||
|
String.starts_with?(password_hash, "$pbkdf2") ->
|
||||||
|
Pbkdf2.checkpw(password, password_hash)
|
||||||
|
|
||||||
|
String.starts_with?(password_hash, "$6") ->
|
||||||
|
:crypt.crypt(password, password_hash) == password_hash
|
||||||
|
|
||||||
|
true ->
|
||||||
|
Logger.error("Password hash not recognized")
|
||||||
|
false
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
def call(%{assigns: %{user: %User{}}} = conn, _), do: conn
|
def call(%{assigns: %{user: %User{}}} = conn, _), do: conn
|
||||||
|
|
||||||
def call(
|
def call(
|
||||||
|
|
|
@ -4,6 +4,7 @@
|
||||||
|
|
||||||
defmodule Pleroma.Plugs.EnsureAuthenticatedPlug do
|
defmodule Pleroma.Plugs.EnsureAuthenticatedPlug do
|
||||||
import Plug.Conn
|
import Plug.Conn
|
||||||
|
import Pleroma.Web.TranslationHelpers
|
||||||
alias Pleroma.User
|
alias Pleroma.User
|
||||||
|
|
||||||
def init(options) do
|
def init(options) do
|
||||||
|
@ -16,8 +17,7 @@ def call(%{assigns: %{user: %User{}}} = conn, _) do
|
||||||
|
|
||||||
def call(conn, _) do
|
def call(conn, _) do
|
||||||
conn
|
conn
|
||||||
|> put_resp_content_type("application/json")
|
|> render_error(:forbidden, "Invalid credentials.")
|
||||||
|> send_resp(403, Jason.encode!(%{error: "Invalid credentials."}))
|
|
||||||
|> halt
|
|> halt
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -3,6 +3,7 @@
|
||||||
# SPDX-License-Identifier: AGPL-3.0-only
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
defmodule Pleroma.Plugs.EnsurePublicOrAuthenticatedPlug do
|
defmodule Pleroma.Plugs.EnsurePublicOrAuthenticatedPlug do
|
||||||
|
import Pleroma.Web.TranslationHelpers
|
||||||
import Plug.Conn
|
import Plug.Conn
|
||||||
alias Pleroma.Config
|
alias Pleroma.Config
|
||||||
alias Pleroma.User
|
alias Pleroma.User
|
||||||
|
@ -23,8 +24,7 @@ def call(conn, _) do
|
||||||
|
|
||||||
{false, _} ->
|
{false, _} ->
|
||||||
conn
|
conn
|
||||||
|> put_resp_content_type("application/json")
|
|> render_error(:forbidden, "This resource requires authentication.")
|
||||||
|> send_resp(403, Jason.encode!(%{error: "This resource requires authentication."}))
|
|
||||||
|> halt
|
|> halt
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -10,7 +10,7 @@ def init(options) do
|
||||||
end
|
end
|
||||||
|
|
||||||
def call(conn, _opts) do
|
def call(conn, _opts) do
|
||||||
if Keyword.get(Application.get_env(:pleroma, :instance), :federating) do
|
if Pleroma.Config.get([:instance, :federating]) do
|
||||||
conn
|
conn
|
||||||
else
|
else
|
||||||
conn
|
conn
|
||||||
|
|
|
@ -56,14 +56,14 @@ defp csp_string do
|
||||||
connect_src = "connect-src 'self' #{static_url} #{websocket_url}"
|
connect_src = "connect-src 'self' #{static_url} #{websocket_url}"
|
||||||
|
|
||||||
connect_src =
|
connect_src =
|
||||||
if Mix.env() == :dev do
|
if Pleroma.Config.get(:env) == :dev do
|
||||||
connect_src <> " http://localhost:3035/"
|
connect_src <> " http://localhost:3035/"
|
||||||
else
|
else
|
||||||
connect_src
|
connect_src
|
||||||
end
|
end
|
||||||
|
|
||||||
script_src =
|
script_src =
|
||||||
if Mix.env() == :dev do
|
if Pleroma.Config.get(:env) == :dev do
|
||||||
"script-src 'self' 'unsafe-eval'"
|
"script-src 'self' 'unsafe-eval'"
|
||||||
else
|
else
|
||||||
"script-src 'self'"
|
"script-src 'self'"
|
||||||
|
|
84
lib/pleroma/plugs/idempotency_plug.ex
Normal file
84
lib/pleroma/plugs/idempotency_plug.ex
Normal file
|
@ -0,0 +1,84 @@
|
||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
defmodule Pleroma.Plugs.IdempotencyPlug do
|
||||||
|
import Phoenix.Controller, only: [json: 2]
|
||||||
|
import Plug.Conn
|
||||||
|
|
||||||
|
@behaviour Plug
|
||||||
|
|
||||||
|
@impl true
|
||||||
|
def init(opts), do: opts
|
||||||
|
|
||||||
|
# Sending idempotency keys in `GET` and `DELETE` requests has no effect
|
||||||
|
# and should be avoided, as these requests are idempotent by definition.
|
||||||
|
|
||||||
|
@impl true
|
||||||
|
def call(%{method: method} = conn, _) when method in ["POST", "PUT", "PATCH"] do
|
||||||
|
case get_req_header(conn, "idempotency-key") do
|
||||||
|
[key] -> process_request(conn, key)
|
||||||
|
_ -> conn
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def call(conn, _), do: conn
|
||||||
|
|
||||||
|
def process_request(conn, key) do
|
||||||
|
case Cachex.get(:idempotency_cache, key) do
|
||||||
|
{:ok, nil} ->
|
||||||
|
cache_resposnse(conn, key)
|
||||||
|
|
||||||
|
{:ok, record} ->
|
||||||
|
send_cached(conn, key, record)
|
||||||
|
|
||||||
|
{atom, message} when atom in [:ignore, :error] ->
|
||||||
|
render_error(conn, message)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
defp cache_resposnse(conn, key) do
|
||||||
|
register_before_send(conn, fn conn ->
|
||||||
|
[request_id] = get_resp_header(conn, "x-request-id")
|
||||||
|
content_type = get_content_type(conn)
|
||||||
|
|
||||||
|
record = {request_id, content_type, conn.status, conn.resp_body}
|
||||||
|
{:ok, _} = Cachex.put(:idempotency_cache, key, record)
|
||||||
|
|
||||||
|
conn
|
||||||
|
|> put_resp_header("idempotency-key", key)
|
||||||
|
|> put_resp_header("x-original-request-id", request_id)
|
||||||
|
end)
|
||||||
|
end
|
||||||
|
|
||||||
|
defp send_cached(conn, key, record) do
|
||||||
|
{request_id, content_type, status, body} = record
|
||||||
|
|
||||||
|
conn
|
||||||
|
|> put_resp_header("idempotency-key", key)
|
||||||
|
|> put_resp_header("idempotent-replayed", "true")
|
||||||
|
|> put_resp_header("x-original-request-id", request_id)
|
||||||
|
|> put_resp_content_type(content_type)
|
||||||
|
|> send_resp(status, body)
|
||||||
|
|> halt()
|
||||||
|
end
|
||||||
|
|
||||||
|
defp render_error(conn, message) do
|
||||||
|
conn
|
||||||
|
|> put_status(:unprocessable_entity)
|
||||||
|
|> json(%{error: message})
|
||||||
|
|> halt()
|
||||||
|
end
|
||||||
|
|
||||||
|
defp get_content_type(conn) do
|
||||||
|
[content_type] = get_resp_header(conn, "content-type")
|
||||||
|
|
||||||
|
if String.contains?(content_type, ";") do
|
||||||
|
content_type
|
||||||
|
|> String.split(";")
|
||||||
|
|> hd()
|
||||||
|
else
|
||||||
|
content_type
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
|
@ -4,6 +4,7 @@
|
||||||
|
|
||||||
defmodule Pleroma.Plugs.OAuthScopesPlug do
|
defmodule Pleroma.Plugs.OAuthScopesPlug do
|
||||||
import Plug.Conn
|
import Plug.Conn
|
||||||
|
import Pleroma.Web.Gettext
|
||||||
|
|
||||||
@behaviour Plug
|
@behaviour Plug
|
||||||
|
|
||||||
|
@ -30,11 +31,14 @@ def call(%Plug.Conn{assigns: assigns} = conn, %{scopes: scopes} = options) do
|
||||||
|
|
||||||
true ->
|
true ->
|
||||||
missing_scopes = scopes -- token.scopes
|
missing_scopes = scopes -- token.scopes
|
||||||
error_message = "Insufficient permissions: #{Enum.join(missing_scopes, " #{op} ")}."
|
permissions = Enum.join(missing_scopes, " #{op} ")
|
||||||
|
|
||||||
|
error_message =
|
||||||
|
dgettext("errors", "Insufficient permissions: %{permissions}.", permissions: permissions)
|
||||||
|
|
||||||
conn
|
conn
|
||||||
|> put_resp_content_type("application/json")
|
|> put_resp_content_type("application/json")
|
||||||
|> send_resp(403, Jason.encode!(%{error: error_message}))
|
|> send_resp(:forbidden, Jason.encode!(%{error: error_message}))
|
||||||
|> halt()
|
|> halt()
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -1,36 +0,0 @@
|
||||||
# Pleroma: A lightweight social networking server
|
|
||||||
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
|
||||||
# SPDX-License-Identifier: AGPL-3.0-only
|
|
||||||
|
|
||||||
defmodule Pleroma.Plugs.RateLimitPlug do
|
|
||||||
import Phoenix.Controller, only: [json: 2]
|
|
||||||
import Plug.Conn
|
|
||||||
|
|
||||||
def init(opts), do: opts
|
|
||||||
|
|
||||||
def call(conn, opts) do
|
|
||||||
enabled? = Pleroma.Config.get([:app_account_creation, :enabled])
|
|
||||||
|
|
||||||
case check_rate(conn, Map.put(opts, :enabled, enabled?)) do
|
|
||||||
{:ok, _count} -> conn
|
|
||||||
{:error, _count} -> render_error(conn)
|
|
||||||
%Plug.Conn{} = conn -> conn
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
defp check_rate(conn, %{enabled: true} = opts) do
|
|
||||||
max_requests = opts[:max_requests]
|
|
||||||
bucket_name = conn.remote_ip |> Tuple.to_list() |> Enum.join(".")
|
|
||||||
|
|
||||||
ExRated.check_rate(bucket_name, opts[:interval] * 1000, max_requests)
|
|
||||||
end
|
|
||||||
|
|
||||||
defp check_rate(conn, _), do: conn
|
|
||||||
|
|
||||||
defp render_error(conn) do
|
|
||||||
conn
|
|
||||||
|> put_status(:forbidden)
|
|
||||||
|> json(%{error: "Rate limit exceeded."})
|
|
||||||
|> halt()
|
|
||||||
end
|
|
||||||
end
|
|
131
lib/pleroma/plugs/rate_limiter.ex
Normal file
131
lib/pleroma/plugs/rate_limiter.ex
Normal file
|
@ -0,0 +1,131 @@
|
||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
defmodule Pleroma.Plugs.RateLimiter do
|
||||||
|
@moduledoc """
|
||||||
|
|
||||||
|
## Configuration
|
||||||
|
|
||||||
|
A keyword list of rate limiters where a key is a limiter name and value is the limiter configuration. The basic configuration is a tuple where:
|
||||||
|
|
||||||
|
* The first element: `scale` (Integer). The time scale in milliseconds.
|
||||||
|
* The second element: `limit` (Integer). How many requests to limit in the time scale provided.
|
||||||
|
|
||||||
|
It is also possible to have different limits for unauthenticated and authenticated users: the keyword value must be a list of two tuples where the first one is a config for unauthenticated users and the second one is for authenticated.
|
||||||
|
|
||||||
|
To disable a limiter set its value to `nil`.
|
||||||
|
|
||||||
|
### Example
|
||||||
|
|
||||||
|
config :pleroma, :rate_limit,
|
||||||
|
one: {1000, 10},
|
||||||
|
two: [{10_000, 10}, {10_000, 50}],
|
||||||
|
foobar: nil
|
||||||
|
|
||||||
|
Here we have three limiters:
|
||||||
|
|
||||||
|
* `one` which is not over 10req/1s
|
||||||
|
* `two` which has two limits: 10req/10s for unauthenticated users and 50req/10s for authenticated users
|
||||||
|
* `foobar` which is disabled
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
AllowedSyntax:
|
||||||
|
|
||||||
|
plug(Pleroma.Plugs.RateLimiter, :limiter_name)
|
||||||
|
plug(Pleroma.Plugs.RateLimiter, {:limiter_name, options})
|
||||||
|
|
||||||
|
Allowed options:
|
||||||
|
|
||||||
|
* `bucket_name` overrides bucket name (e.g. to have a separate limit for a set of actions)
|
||||||
|
* `params` appends values of specified request params (e.g. ["id"]) to bucket name
|
||||||
|
|
||||||
|
Inside a controller:
|
||||||
|
|
||||||
|
plug(Pleroma.Plugs.RateLimiter, :one when action == :one)
|
||||||
|
plug(Pleroma.Plugs.RateLimiter, :two when action in [:two, :three])
|
||||||
|
|
||||||
|
plug(
|
||||||
|
Pleroma.Plugs.RateLimiter,
|
||||||
|
{:status_id_action, bucket_name: "status_id_action:fav_unfav", params: ["id"]}
|
||||||
|
when action in ~w(fav_status unfav_status)a
|
||||||
|
)
|
||||||
|
|
||||||
|
or inside a router pipeline:
|
||||||
|
|
||||||
|
pipeline :api do
|
||||||
|
...
|
||||||
|
plug(Pleroma.Plugs.RateLimiter, :one)
|
||||||
|
...
|
||||||
|
end
|
||||||
|
"""
|
||||||
|
import Pleroma.Web.TranslationHelpers
|
||||||
|
import Plug.Conn
|
||||||
|
|
||||||
|
alias Pleroma.User
|
||||||
|
|
||||||
|
def init(limiter_name) when is_atom(limiter_name) do
|
||||||
|
init({limiter_name, []})
|
||||||
|
end
|
||||||
|
|
||||||
|
def init({limiter_name, opts}) do
|
||||||
|
case Pleroma.Config.get([:rate_limit, limiter_name]) do
|
||||||
|
nil -> nil
|
||||||
|
config -> {limiter_name, config, opts}
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
# Do not limit if there is no limiter configuration
|
||||||
|
def call(conn, nil), do: conn
|
||||||
|
|
||||||
|
def call(conn, settings) do
|
||||||
|
case check_rate(conn, settings) do
|
||||||
|
{:ok, _count} ->
|
||||||
|
conn
|
||||||
|
|
||||||
|
{:error, _count} ->
|
||||||
|
render_throttled_error(conn)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
defp bucket_name(conn, limiter_name, opts) do
|
||||||
|
bucket_name = opts[:bucket_name] || limiter_name
|
||||||
|
|
||||||
|
if params_names = opts[:params] do
|
||||||
|
params_values = for p <- Enum.sort(params_names), do: conn.params[p]
|
||||||
|
Enum.join([bucket_name] ++ params_values, ":")
|
||||||
|
else
|
||||||
|
bucket_name
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
defp check_rate(
|
||||||
|
%{assigns: %{user: %User{id: user_id}}} = conn,
|
||||||
|
{limiter_name, [_, {scale, limit}], opts}
|
||||||
|
) do
|
||||||
|
bucket_name = bucket_name(conn, limiter_name, opts)
|
||||||
|
ExRated.check_rate("#{bucket_name}:#{user_id}", scale, limit)
|
||||||
|
end
|
||||||
|
|
||||||
|
defp check_rate(conn, {limiter_name, [{scale, limit} | _], opts}) do
|
||||||
|
bucket_name = bucket_name(conn, limiter_name, opts)
|
||||||
|
ExRated.check_rate("#{bucket_name}:#{ip(conn)}", scale, limit)
|
||||||
|
end
|
||||||
|
|
||||||
|
defp check_rate(conn, {limiter_name, {scale, limit}, opts}) do
|
||||||
|
check_rate(conn, {limiter_name, [{scale, limit}, {scale, limit}], opts})
|
||||||
|
end
|
||||||
|
|
||||||
|
def ip(%{remote_ip: remote_ip}) do
|
||||||
|
remote_ip
|
||||||
|
|> Tuple.to_list()
|
||||||
|
|> Enum.join(".")
|
||||||
|
end
|
||||||
|
|
||||||
|
defp render_throttled_error(conn) do
|
||||||
|
conn
|
||||||
|
|> render_error(:too_many_requests, "Throttled")
|
||||||
|
|> halt()
|
||||||
|
end
|
||||||
|
end
|
63
lib/pleroma/plugs/set_locale_plug.ex
Normal file
63
lib/pleroma/plugs/set_locale_plug.ex
Normal file
|
@ -0,0 +1,63 @@
|
||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
# NOTE: this module is based on https://github.com/smeevil/set_locale
|
||||||
|
defmodule Pleroma.Plugs.SetLocalePlug do
|
||||||
|
import Plug.Conn, only: [get_req_header: 2, assign: 3]
|
||||||
|
|
||||||
|
def init(_), do: nil
|
||||||
|
|
||||||
|
def call(conn, _) do
|
||||||
|
locale = get_locale_from_header(conn) || Gettext.get_locale()
|
||||||
|
Gettext.put_locale(locale)
|
||||||
|
assign(conn, :locale, locale)
|
||||||
|
end
|
||||||
|
|
||||||
|
defp get_locale_from_header(conn) do
|
||||||
|
conn
|
||||||
|
|> extract_accept_language()
|
||||||
|
|> Enum.find(&supported_locale?/1)
|
||||||
|
end
|
||||||
|
|
||||||
|
defp extract_accept_language(conn) do
|
||||||
|
case get_req_header(conn, "accept-language") do
|
||||||
|
[value | _] ->
|
||||||
|
value
|
||||||
|
|> String.split(",")
|
||||||
|
|> Enum.map(&parse_language_option/1)
|
||||||
|
|> Enum.sort(&(&1.quality > &2.quality))
|
||||||
|
|> Enum.map(& &1.tag)
|
||||||
|
|> Enum.reject(&is_nil/1)
|
||||||
|
|> ensure_language_fallbacks()
|
||||||
|
|
||||||
|
_ ->
|
||||||
|
[]
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
defp supported_locale?(locale) do
|
||||||
|
Pleroma.Web.Gettext
|
||||||
|
|> Gettext.known_locales()
|
||||||
|
|> Enum.member?(locale)
|
||||||
|
end
|
||||||
|
|
||||||
|
defp parse_language_option(string) do
|
||||||
|
captures = Regex.named_captures(~r/^\s?(?<tag>[\w\-]+)(?:;q=(?<quality>[\d\.]+))?$/i, string)
|
||||||
|
|
||||||
|
quality =
|
||||||
|
case Float.parse(captures["quality"] || "1.0") do
|
||||||
|
{val, _} -> val
|
||||||
|
:error -> 1.0
|
||||||
|
end
|
||||||
|
|
||||||
|
%{tag: captures["tag"], quality: quality}
|
||||||
|
end
|
||||||
|
|
||||||
|
defp ensure_language_fallbacks(tags) do
|
||||||
|
Enum.flat_map(tags, fn tag ->
|
||||||
|
[language | _] = String.split(tag, "-")
|
||||||
|
if Enum.member?(tags, language), do: [tag], else: [tag, language]
|
||||||
|
end)
|
||||||
|
end
|
||||||
|
end
|
|
@ -7,6 +7,7 @@ defmodule Pleroma.Plugs.UploadedMedia do
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import Plug.Conn
|
import Plug.Conn
|
||||||
|
import Pleroma.Web.Gettext
|
||||||
require Logger
|
require Logger
|
||||||
|
|
||||||
@behaviour Plug
|
@behaviour Plug
|
||||||
|
@ -36,7 +37,7 @@ def call(%{request_path: <<"/", @path, "/", file::binary>>} = conn, opts) do
|
||||||
conn
|
conn
|
||||||
end
|
end
|
||||||
|
|
||||||
config = Pleroma.Config.get([Pleroma.Upload])
|
config = Pleroma.Config.get(Pleroma.Upload)
|
||||||
|
|
||||||
with uploader <- Keyword.fetch!(config, :uploader),
|
with uploader <- Keyword.fetch!(config, :uploader),
|
||||||
proxy_remote = Keyword.get(config, :proxy_remote, false),
|
proxy_remote = Keyword.get(config, :proxy_remote, false),
|
||||||
|
@ -45,7 +46,7 @@ def call(%{request_path: <<"/", @path, "/", file::binary>>} = conn, opts) do
|
||||||
else
|
else
|
||||||
_ ->
|
_ ->
|
||||||
conn
|
conn
|
||||||
|> send_resp(500, "Failed")
|
|> send_resp(:internal_server_error, dgettext("errors", "Failed"))
|
||||||
|> halt()
|
|> halt()
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
@ -64,7 +65,7 @@ defp get_media(conn, {:static_dir, directory}, _, opts) do
|
||||||
conn
|
conn
|
||||||
else
|
else
|
||||||
conn
|
conn
|
||||||
|> send_resp(404, "Not found")
|
|> send_resp(:not_found, dgettext("errors", "Not found"))
|
||||||
|> halt()
|
|> halt()
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
@ -84,7 +85,7 @@ defp get_media(conn, unknown, _, _) do
|
||||||
Logger.error("#{__MODULE__}: Unknown get startegy: #{inspect(unknown)}")
|
Logger.error("#{__MODULE__}: Unknown get startegy: #{inspect(unknown)}")
|
||||||
|
|
||||||
conn
|
conn
|
||||||
|> send_resp(500, "Internal Error")
|
|> send_resp(:internal_server_error, dgettext("errors", "Internal Error"))
|
||||||
|> halt()
|
|> halt()
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -3,6 +3,7 @@
|
||||||
# SPDX-License-Identifier: AGPL-3.0-only
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
defmodule Pleroma.Plugs.UserIsAdminPlug do
|
defmodule Pleroma.Plugs.UserIsAdminPlug do
|
||||||
|
import Pleroma.Web.TranslationHelpers
|
||||||
import Plug.Conn
|
import Plug.Conn
|
||||||
alias Pleroma.User
|
alias Pleroma.User
|
||||||
|
|
||||||
|
@ -16,8 +17,7 @@ def call(%{assigns: %{user: %User{info: %{is_admin: true}}}} = conn, _) do
|
||||||
|
|
||||||
def call(conn, _) do
|
def call(conn, _) do
|
||||||
conn
|
conn
|
||||||
|> put_resp_content_type("application/json")
|
|> render_error(:forbidden, "User is not admin.")
|
||||||
|> send_resp(403, Jason.encode!(%{error: "User is not admin."}))
|
|
||||||
|> halt
|
|> halt
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
66
lib/pleroma/release_tasks.ex
Normal file
66
lib/pleroma/release_tasks.ex
Normal file
|
@ -0,0 +1,66 @@
|
||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
defmodule Pleroma.ReleaseTasks do
|
||||||
|
@repo Pleroma.Repo
|
||||||
|
|
||||||
|
def run(args) do
|
||||||
|
[task | args] = String.split(args)
|
||||||
|
|
||||||
|
case task do
|
||||||
|
"migrate" -> migrate(args)
|
||||||
|
"create" -> create()
|
||||||
|
"rollback" -> rollback(args)
|
||||||
|
task -> mix_task(task, args)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
defp mix_task(task, args) do
|
||||||
|
Application.load(:pleroma)
|
||||||
|
{:ok, modules} = :application.get_key(:pleroma, :modules)
|
||||||
|
|
||||||
|
module =
|
||||||
|
Enum.find(modules, fn module ->
|
||||||
|
module = Module.split(module)
|
||||||
|
|
||||||
|
match?(["Mix", "Tasks", "Pleroma" | _], module) and
|
||||||
|
String.downcase(List.last(module)) == task
|
||||||
|
end)
|
||||||
|
|
||||||
|
if module do
|
||||||
|
module.run(args)
|
||||||
|
else
|
||||||
|
IO.puts("The task #{task} does not exist")
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def migrate(args) do
|
||||||
|
Mix.Tasks.Pleroma.Ecto.Migrate.run(args)
|
||||||
|
end
|
||||||
|
|
||||||
|
def rollback(args) do
|
||||||
|
Mix.Tasks.Pleroma.Ecto.Rollback.run(args)
|
||||||
|
end
|
||||||
|
|
||||||
|
def create do
|
||||||
|
Application.load(:pleroma)
|
||||||
|
|
||||||
|
case @repo.__adapter__.storage_up(@repo.config) do
|
||||||
|
:ok ->
|
||||||
|
IO.puts("The database for #{inspect(@repo)} has been created")
|
||||||
|
|
||||||
|
{:error, :already_up} ->
|
||||||
|
IO.puts("The database for #{inspect(@repo)} has already been created")
|
||||||
|
|
||||||
|
{:error, term} when is_binary(term) ->
|
||||||
|
IO.puts(:stderr, "The database for #{inspect(@repo)} couldn't be created: #{term}")
|
||||||
|
|
||||||
|
{:error, term} ->
|
||||||
|
IO.puts(
|
||||||
|
:stderr,
|
||||||
|
"The database for #{inspect(@repo)} couldn't be created: #{inspect(term)}"
|
||||||
|
)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
34
lib/pleroma/repo_streamer.ex
Normal file
34
lib/pleroma/repo_streamer.ex
Normal file
|
@ -0,0 +1,34 @@
|
||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
defmodule Pleroma.RepoStreamer do
|
||||||
|
alias Pleroma.Repo
|
||||||
|
import Ecto.Query
|
||||||
|
|
||||||
|
def chunk_stream(query, chunk_size) do
|
||||||
|
Stream.unfold(0, fn
|
||||||
|
:halt ->
|
||||||
|
{[], :halt}
|
||||||
|
|
||||||
|
last_id ->
|
||||||
|
query
|
||||||
|
|> order_by(asc: :id)
|
||||||
|
|> where([r], r.id > ^last_id)
|
||||||
|
|> limit(^chunk_size)
|
||||||
|
|> Repo.all()
|
||||||
|
|> case do
|
||||||
|
[] ->
|
||||||
|
{[], :halt}
|
||||||
|
|
||||||
|
records ->
|
||||||
|
last_id = List.last(records).id
|
||||||
|
{records, last_id}
|
||||||
|
end
|
||||||
|
end)
|
||||||
|
|> Stream.take_while(fn
|
||||||
|
[] -> false
|
||||||
|
_ -> true
|
||||||
|
end)
|
||||||
|
end
|
||||||
|
end
|
28
lib/pleroma/reverse_proxy/client.ex
Normal file
28
lib/pleroma/reverse_proxy/client.ex
Normal file
|
@ -0,0 +1,28 @@
|
||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
defmodule Pleroma.ReverseProxy.Client do
|
||||||
|
@callback request(atom(), String.t(), [tuple()], String.t(), list()) ::
|
||||||
|
{:ok, pos_integer(), [tuple()], reference() | map()}
|
||||||
|
| {:ok, pos_integer(), [tuple()]}
|
||||||
|
| {:ok, reference()}
|
||||||
|
| {:error, term()}
|
||||||
|
|
||||||
|
@callback stream_body(reference() | pid() | map()) ::
|
||||||
|
{:ok, binary()} | :done | {:error, String.t()}
|
||||||
|
|
||||||
|
@callback close(reference() | pid() | map()) :: :ok
|
||||||
|
|
||||||
|
def request(method, url, headers, "", opts \\ []) do
|
||||||
|
client().request(method, url, headers, "", opts)
|
||||||
|
end
|
||||||
|
|
||||||
|
def stream_body(ref), do: client().stream_body(ref)
|
||||||
|
|
||||||
|
def close(ref), do: client().close(ref)
|
||||||
|
|
||||||
|
defp client do
|
||||||
|
Pleroma.Config.get([Pleroma.ReverseProxy.Client], :hackney)
|
||||||
|
end
|
||||||
|
end
|
|
@ -61,9 +61,7 @@ defmodule Pleroma.ReverseProxy do
|
||||||
* `http`: options for [hackney](https://github.com/benoitc/hackney).
|
* `http`: options for [hackney](https://github.com/benoitc/hackney).
|
||||||
|
|
||||||
"""
|
"""
|
||||||
@hackney Application.get_env(:pleroma, :hackney, :hackney)
|
@default_hackney_options [pool: :media]
|
||||||
|
|
||||||
@default_hackney_options []
|
|
||||||
|
|
||||||
@inline_content_types [
|
@inline_content_types [
|
||||||
"image/gif",
|
"image/gif",
|
||||||
|
@ -96,7 +94,8 @@ def call(_conn, _url, _opts \\ [])
|
||||||
|
|
||||||
def call(conn = %{method: method}, url, opts) when method in @methods do
|
def call(conn = %{method: method}, url, opts) when method in @methods do
|
||||||
hackney_opts =
|
hackney_opts =
|
||||||
@default_hackney_options
|
Pleroma.HTTP.Connection.hackney_options([])
|
||||||
|
|> Keyword.merge(@default_hackney_options)
|
||||||
|> Keyword.merge(Keyword.get(opts, :http, []))
|
|> Keyword.merge(Keyword.get(opts, :http, []))
|
||||||
|> HTTP.process_request_options()
|
|> HTTP.process_request_options()
|
||||||
|
|
||||||
|
@ -148,7 +147,7 @@ defp request(method, url, headers, hackney_opts) do
|
||||||
Logger.debug("#{__MODULE__} #{method} #{url} #{inspect(headers)}")
|
Logger.debug("#{__MODULE__} #{method} #{url} #{inspect(headers)}")
|
||||||
method = method |> String.downcase() |> String.to_existing_atom()
|
method = method |> String.downcase() |> String.to_existing_atom()
|
||||||
|
|
||||||
case @hackney.request(method, url, headers, "", hackney_opts) do
|
case client().request(method, url, headers, "", hackney_opts) do
|
||||||
{:ok, code, headers, client} when code in @valid_resp_codes ->
|
{:ok, code, headers, client} when code in @valid_resp_codes ->
|
||||||
{:ok, code, downcase_headers(headers), client}
|
{:ok, code, downcase_headers(headers), client}
|
||||||
|
|
||||||
|
@ -175,7 +174,7 @@ defp response(conn, client, url, status, headers, opts) do
|
||||||
halt(conn)
|
halt(conn)
|
||||||
|
|
||||||
{:error, :closed, conn} ->
|
{:error, :closed, conn} ->
|
||||||
:hackney.close(client)
|
client().close(client)
|
||||||
halt(conn)
|
halt(conn)
|
||||||
|
|
||||||
{:error, error, conn} ->
|
{:error, error, conn} ->
|
||||||
|
@ -183,7 +182,7 @@ defp response(conn, client, url, status, headers, opts) do
|
||||||
"#{__MODULE__} request to #{url} failed while reading/chunking: #{inspect(error)}"
|
"#{__MODULE__} request to #{url} failed while reading/chunking: #{inspect(error)}"
|
||||||
)
|
)
|
||||||
|
|
||||||
:hackney.close(client)
|
client().close(client)
|
||||||
halt(conn)
|
halt(conn)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
@ -198,7 +197,7 @@ defp chunk_reply(conn, client, opts, sent_so_far, duration) do
|
||||||
duration,
|
duration,
|
||||||
Keyword.get(opts, :max_read_duration, @max_read_duration)
|
Keyword.get(opts, :max_read_duration, @max_read_duration)
|
||||||
),
|
),
|
||||||
{:ok, data} <- @hackney.stream_body(client),
|
{:ok, data} <- client().stream_body(client),
|
||||||
{:ok, duration} <- increase_read_duration(duration),
|
{:ok, duration} <- increase_read_duration(duration),
|
||||||
sent_so_far = sent_so_far + byte_size(data),
|
sent_so_far = sent_so_far + byte_size(data),
|
||||||
:ok <- body_size_constraint(sent_so_far, Keyword.get(opts, :max_body_size)),
|
:ok <- body_size_constraint(sent_so_far, Keyword.get(opts, :max_body_size)),
|
||||||
|
@ -379,4 +378,6 @@ defp increase_read_duration({previous_duration, started})
|
||||||
defp increase_read_duration(_) do
|
defp increase_read_duration(_) do
|
||||||
{:ok, :no_duration_limit, :no_duration_limit}
|
{:ok, :no_duration_limit, :no_duration_limit}
|
||||||
end
|
end
|
||||||
|
|
||||||
|
defp client, do: Pleroma.ReverseProxy.Client
|
||||||
end
|
end
|
|
@ -10,10 +10,19 @@ defmodule Pleroma.Upload.Filter.AnonymizeFilename do
|
||||||
"""
|
"""
|
||||||
@behaviour Pleroma.Upload.Filter
|
@behaviour Pleroma.Upload.Filter
|
||||||
|
|
||||||
def filter(upload) do
|
alias Pleroma.Config
|
||||||
extension = List.last(String.split(upload.name, "."))
|
alias Pleroma.Upload
|
||||||
name = Pleroma.Config.get([__MODULE__, :text], random(extension))
|
|
||||||
{:ok, %Pleroma.Upload{upload | name: name}}
|
def filter(%Upload{name: name} = upload) do
|
||||||
|
extension = List.last(String.split(name, "."))
|
||||||
|
name = predefined_name(extension) || random(extension)
|
||||||
|
{:ok, %Upload{upload | name: name}}
|
||||||
|
end
|
||||||
|
|
||||||
|
@spec predefined_name(String.t()) :: String.t() | nil
|
||||||
|
defp predefined_name(extension) do
|
||||||
|
with name when not is_nil(name) <- Config.get([__MODULE__, :text]),
|
||||||
|
do: String.replace(name, "{extension}", extension)
|
||||||
end
|
end
|
||||||
|
|
||||||
defp random(extension) do
|
defp random(extension) do
|
||||||
|
|
|
@ -1,51 +0,0 @@
|
||||||
# Pleroma: A lightweight social networking server
|
|
||||||
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
|
||||||
# SPDX-License-Identifier: AGPL-3.0-only
|
|
||||||
|
|
||||||
defmodule Pleroma.Uploaders.Swift.Keystone do
|
|
||||||
use HTTPoison.Base
|
|
||||||
|
|
||||||
def process_url(url) do
|
|
||||||
Enum.join(
|
|
||||||
[Pleroma.Config.get!([Pleroma.Uploaders.Swift, :auth_url]), url],
|
|
||||||
"/"
|
|
||||||
)
|
|
||||||
end
|
|
||||||
|
|
||||||
def process_response_body(body) do
|
|
||||||
body
|
|
||||||
|> Jason.decode!()
|
|
||||||
end
|
|
||||||
|
|
||||||
def get_token do
|
|
||||||
settings = Pleroma.Config.get(Pleroma.Uploaders.Swift)
|
|
||||||
username = Keyword.fetch!(settings, :username)
|
|
||||||
password = Keyword.fetch!(settings, :password)
|
|
||||||
tenant_id = Keyword.fetch!(settings, :tenant_id)
|
|
||||||
|
|
||||||
case post(
|
|
||||||
"/tokens",
|
|
||||||
make_auth_body(username, password, tenant_id),
|
|
||||||
["Content-Type": "application/json"],
|
|
||||||
hackney: [:insecure]
|
|
||||||
) do
|
|
||||||
{:ok, %Tesla.Env{status: 200, body: body}} ->
|
|
||||||
body["access"]["token"]["id"]
|
|
||||||
|
|
||||||
{:ok, %Tesla.Env{status: _}} ->
|
|
||||||
""
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
def make_auth_body(username, password, tenant) do
|
|
||||||
Jason.encode!(%{
|
|
||||||
:auth => %{
|
|
||||||
:passwordCredentials => %{
|
|
||||||
:username => username,
|
|
||||||
:password => password
|
|
||||||
},
|
|
||||||
:tenantId => tenant
|
|
||||||
}
|
|
||||||
})
|
|
||||||
end
|
|
||||||
end
|
|
|
@ -1,29 +0,0 @@
|
||||||
# Pleroma: A lightweight social networking server
|
|
||||||
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
|
||||||
# SPDX-License-Identifier: AGPL-3.0-only
|
|
||||||
|
|
||||||
defmodule Pleroma.Uploaders.Swift.Client do
|
|
||||||
use HTTPoison.Base
|
|
||||||
|
|
||||||
def process_url(url) do
|
|
||||||
Enum.join(
|
|
||||||
[Pleroma.Config.get!([Pleroma.Uploaders.Swift, :storage_url]), url],
|
|
||||||
"/"
|
|
||||||
)
|
|
||||||
end
|
|
||||||
|
|
||||||
def upload_file(filename, body, content_type) do
|
|
||||||
token = Pleroma.Uploaders.Swift.Keystone.get_token()
|
|
||||||
|
|
||||||
case put("#{filename}", body, "X-Auth-Token": token, "Content-Type": content_type) do
|
|
||||||
{:ok, %Tesla.Env{status: 201}} ->
|
|
||||||
{:ok, {:file, filename}}
|
|
||||||
|
|
||||||
{:ok, %Tesla.Env{status: 401}} ->
|
|
||||||
{:error, "Unauthorized, Bad Token"}
|
|
||||||
|
|
||||||
{:error, _} ->
|
|
||||||
{:error, "Swift Upload Error"}
|
|
||||||
end
|
|
||||||
end
|
|
||||||
end
|
|
|
@ -1,19 +0,0 @@
|
||||||
# Pleroma: A lightweight social networking server
|
|
||||||
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
|
||||||
# SPDX-License-Identifier: AGPL-3.0-only
|
|
||||||
|
|
||||||
defmodule Pleroma.Uploaders.Swift do
|
|
||||||
@behaviour Pleroma.Uploaders.Uploader
|
|
||||||
|
|
||||||
def get_file(name) do
|
|
||||||
{:ok, {:url, Path.join([Pleroma.Config.get!([__MODULE__, :object_url]), name])}}
|
|
||||||
end
|
|
||||||
|
|
||||||
def put_file(upload) do
|
|
||||||
Pleroma.Uploaders.Swift.Client.upload_file(
|
|
||||||
upload.path,
|
|
||||||
File.read!(upload.tmpfile),
|
|
||||||
upload.content_type
|
|
||||||
)
|
|
||||||
end
|
|
||||||
end
|
|
|
@ -3,6 +3,8 @@
|
||||||
# SPDX-License-Identifier: AGPL-3.0-only
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
defmodule Pleroma.Uploaders.Uploader do
|
defmodule Pleroma.Uploaders.Uploader do
|
||||||
|
import Pleroma.Web.Gettext
|
||||||
|
|
||||||
@moduledoc """
|
@moduledoc """
|
||||||
Defines the contract to put and get an uploaded file to any backend.
|
Defines the contract to put and get an uploaded file to any backend.
|
||||||
"""
|
"""
|
||||||
|
@ -66,7 +68,7 @@ defp handle_callback(uploader, upload) do
|
||||||
{:error, error}
|
{:error, error}
|
||||||
end
|
end
|
||||||
after
|
after
|
||||||
30_000 -> {:error, "Uploader callback timeout"}
|
30_000 -> {:error, dgettext("errors", "Uploader callback timeout")}
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -9,12 +9,14 @@ defmodule Pleroma.User do
|
||||||
import Ecto.Query
|
import Ecto.Query
|
||||||
|
|
||||||
alias Comeonin.Pbkdf2
|
alias Comeonin.Pbkdf2
|
||||||
|
alias Ecto.Multi
|
||||||
alias Pleroma.Activity
|
alias Pleroma.Activity
|
||||||
alias Pleroma.Keys
|
alias Pleroma.Keys
|
||||||
alias Pleroma.Notification
|
alias Pleroma.Notification
|
||||||
alias Pleroma.Object
|
alias Pleroma.Object
|
||||||
alias Pleroma.Registration
|
alias Pleroma.Registration
|
||||||
alias Pleroma.Repo
|
alias Pleroma.Repo
|
||||||
|
alias Pleroma.RepoStreamer
|
||||||
alias Pleroma.User
|
alias Pleroma.User
|
||||||
alias Pleroma.Web
|
alias Pleroma.Web
|
||||||
alias Pleroma.Web.ActivityPub.ActivityPub
|
alias Pleroma.Web.ActivityPub.ActivityPub
|
||||||
|
@ -50,6 +52,7 @@ defmodule Pleroma.User do
|
||||||
field(:avatar, :map)
|
field(:avatar, :map)
|
||||||
field(:local, :boolean, default: true)
|
field(:local, :boolean, default: true)
|
||||||
field(:follower_address, :string)
|
field(:follower_address, :string)
|
||||||
|
field(:following_address, :string)
|
||||||
field(:search_rank, :float, virtual: true)
|
field(:search_rank, :float, virtual: true)
|
||||||
field(:search_type, :integer, virtual: true)
|
field(:search_type, :integer, virtual: true)
|
||||||
field(:tags, {:array, :string}, default: [])
|
field(:tags, {:array, :string}, default: [])
|
||||||
|
@ -105,17 +108,32 @@ def ap_id(%User{nickname: nickname}) do
|
||||||
def ap_followers(%User{follower_address: fa}) when is_binary(fa), do: fa
|
def ap_followers(%User{follower_address: fa}) when is_binary(fa), do: fa
|
||||||
def ap_followers(%User{} = user), do: "#{ap_id(user)}/followers"
|
def ap_followers(%User{} = user), do: "#{ap_id(user)}/followers"
|
||||||
|
|
||||||
def user_info(%User{} = user) do
|
@spec ap_following(User.t()) :: Sring.t()
|
||||||
|
def ap_following(%User{following_address: fa}) when is_binary(fa), do: fa
|
||||||
|
def ap_following(%User{} = user), do: "#{ap_id(user)}/following"
|
||||||
|
|
||||||
|
def user_info(%User{} = user, args \\ %{}) do
|
||||||
|
following_count =
|
||||||
|
if args[:following_count], do: args[:following_count], else: following_count(user)
|
||||||
|
|
||||||
|
follower_count =
|
||||||
|
if args[:follower_count], do: args[:follower_count], else: user.info.follower_count
|
||||||
|
|
||||||
%{
|
%{
|
||||||
following_count: following_count(user),
|
|
||||||
note_count: user.info.note_count,
|
note_count: user.info.note_count,
|
||||||
follower_count: user.info.follower_count,
|
|
||||||
locked: user.info.locked,
|
locked: user.info.locked,
|
||||||
confirmation_pending: user.info.confirmation_pending,
|
confirmation_pending: user.info.confirmation_pending,
|
||||||
default_scope: user.info.default_scope
|
default_scope: user.info.default_scope
|
||||||
}
|
}
|
||||||
|
|> Map.put(:following_count, following_count)
|
||||||
|
|> Map.put(:follower_count, follower_count)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def set_info_cache(user, args) do
|
||||||
|
Cachex.put(:user_cache, "user_info:#{user.id}", user_info(user, args))
|
||||||
|
end
|
||||||
|
|
||||||
|
@spec restrict_deactivated(Ecto.Query.t()) :: Ecto.Query.t()
|
||||||
def restrict_deactivated(query) do
|
def restrict_deactivated(query) do
|
||||||
from(u in query,
|
from(u in query,
|
||||||
where: not fragment("? \\? 'deactivated' AND ?->'deactivated' @> 'true'", u.info, u.info)
|
where: not fragment("? \\? 'deactivated' AND ?->'deactivated' @> 'true'", u.info, u.info)
|
||||||
|
@ -150,9 +168,10 @@ def remote_user_creation(params) do
|
||||||
|
|
||||||
if changes.valid? do
|
if changes.valid? do
|
||||||
case info_cng.changes[:source_data] do
|
case info_cng.changes[:source_data] do
|
||||||
%{"followers" => followers} ->
|
%{"followers" => followers, "following" => following} ->
|
||||||
changes
|
changes
|
||||||
|> put_change(:follower_address, followers)
|
|> put_change(:follower_address, followers)
|
||||||
|
|> put_change(:following_address, following)
|
||||||
|
|
||||||
_ ->
|
_ ->
|
||||||
followers = User.ap_followers(%User{nickname: changes.changes[:nickname]})
|
followers = User.ap_followers(%User{nickname: changes.changes[:nickname]})
|
||||||
|
@ -184,7 +203,14 @@ def upgrade_changeset(struct, params \\ %{}) do
|
||||||
|> User.Info.user_upgrade(params[:info])
|
|> User.Info.user_upgrade(params[:info])
|
||||||
|
|
||||||
struct
|
struct
|
||||||
|> cast(params, [:bio, :name, :follower_address, :avatar, :last_refreshed_at])
|
|> cast(params, [
|
||||||
|
:bio,
|
||||||
|
:name,
|
||||||
|
:follower_address,
|
||||||
|
:following_address,
|
||||||
|
:avatar,
|
||||||
|
:last_refreshed_at
|
||||||
|
])
|
||||||
|> unique_constraint(:nickname)
|
|> unique_constraint(:nickname)
|
||||||
|> validate_format(:nickname, local_nickname_regex())
|
|> validate_format(:nickname, local_nickname_regex())
|
||||||
|> validate_length(:bio, max: 5000)
|
|> validate_length(:bio, max: 5000)
|
||||||
|
@ -193,27 +219,24 @@ def upgrade_changeset(struct, params \\ %{}) do
|
||||||
end
|
end
|
||||||
|
|
||||||
def password_update_changeset(struct, params) do
|
def password_update_changeset(struct, params) do
|
||||||
changeset =
|
|
||||||
struct
|
struct
|
||||||
|> cast(params, [:password, :password_confirmation])
|
|> cast(params, [:password, :password_confirmation])
|
||||||
|> validate_required([:password, :password_confirmation])
|
|> validate_required([:password, :password_confirmation])
|
||||||
|> validate_confirmation(:password)
|
|> validate_confirmation(:password)
|
||||||
|
|> put_password_hash
|
||||||
OAuth.Token.delete_user_tokens(struct)
|
|
||||||
OAuth.Authorization.delete_user_authorizations(struct)
|
|
||||||
|
|
||||||
if changeset.valid? do
|
|
||||||
hashed = Pbkdf2.hashpwsalt(changeset.changes[:password])
|
|
||||||
|
|
||||||
changeset
|
|
||||||
|> put_change(:password_hash, hashed)
|
|
||||||
else
|
|
||||||
changeset
|
|
||||||
end
|
|
||||||
end
|
end
|
||||||
|
|
||||||
def reset_password(user, data) do
|
def reset_password(%User{id: user_id} = user, data) do
|
||||||
update_and_set_cache(password_update_changeset(user, data))
|
multi =
|
||||||
|
Multi.new()
|
||||||
|
|> Multi.update(:user, password_update_changeset(user, data))
|
||||||
|
|> Multi.delete_all(:tokens, OAuth.Token.Query.get_by_user(user_id))
|
||||||
|
|> Multi.delete_all(:auth, OAuth.Authorization.delete_by_user_query(user))
|
||||||
|
|
||||||
|
case Repo.transaction(multi) do
|
||||||
|
{:ok, %{user: user} = _} -> set_cache(user)
|
||||||
|
{:error, _, changeset, _} -> {:error, changeset}
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def register_changeset(struct, params \\ %{}, opts \\ []) do
|
def register_changeset(struct, params \\ %{}, opts \\ []) do
|
||||||
|
@ -249,12 +272,11 @@ def register_changeset(struct, params \\ %{}, opts \\ []) do
|
||||||
end
|
end
|
||||||
|
|
||||||
if changeset.valid? do
|
if changeset.valid? do
|
||||||
hashed = Pbkdf2.hashpwsalt(changeset.changes[:password])
|
|
||||||
ap_id = User.ap_id(%User{nickname: changeset.changes[:nickname]})
|
ap_id = User.ap_id(%User{nickname: changeset.changes[:nickname]})
|
||||||
followers = User.ap_followers(%User{nickname: changeset.changes[:nickname]})
|
followers = User.ap_followers(%User{nickname: changeset.changes[:nickname]})
|
||||||
|
|
||||||
changeset
|
changeset
|
||||||
|> put_change(:password_hash, hashed)
|
|> put_password_hash
|
||||||
|> put_change(:ap_id, ap_id)
|
|> put_change(:ap_id, ap_id)
|
||||||
|> unique_constraint(:ap_id)
|
|> unique_constraint(:ap_id)
|
||||||
|> put_change(:following, [followers])
|
|> put_change(:following, [followers])
|
||||||
|
@ -324,14 +346,6 @@ def maybe_direct_follow(%User{} = follower, %User{} = followed) do
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def maybe_follow(%User{} = follower, %User{info: _info} = followed) do
|
|
||||||
if not following?(follower, followed) do
|
|
||||||
follow(follower, followed)
|
|
||||||
else
|
|
||||||
{:ok, follower}
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
@doc "A mass follow for local users. Respects blocks in both directions but does not create activities."
|
@doc "A mass follow for local users. Respects blocks in both directions but does not create activities."
|
||||||
@spec follow_all(User.t(), list(User.t())) :: {atom(), User.t()}
|
@spec follow_all(User.t(), list(User.t())) :: {atom(), User.t()}
|
||||||
def follow_all(follower, followeds) do
|
def follow_all(follower, followeds) do
|
||||||
|
@ -366,14 +380,12 @@ def follow_all(follower, followeds) do
|
||||||
end
|
end
|
||||||
|
|
||||||
def follow(%User{} = follower, %User{info: info} = followed) do
|
def follow(%User{} = follower, %User{info: info} = followed) do
|
||||||
user_config = Application.get_env(:pleroma, :user)
|
deny_follow_blocked = Pleroma.Config.get([:user, :deny_follow_blocked])
|
||||||
deny_follow_blocked = Keyword.get(user_config, :deny_follow_blocked)
|
|
||||||
|
|
||||||
ap_followers = followed.follower_address
|
ap_followers = followed.follower_address
|
||||||
|
|
||||||
cond do
|
cond do
|
||||||
following?(follower, followed) or info.deactivated ->
|
info.deactivated ->
|
||||||
{:error, "Could not follow user: #{followed.nickname} is already on your list."}
|
{:error, "Could not follow user: You are deactivated."}
|
||||||
|
|
||||||
deny_follow_blocked and blocks?(followed, follower) ->
|
deny_follow_blocked and blocks?(followed, follower) ->
|
||||||
{:error, "Could not follow user: #{followed.nickname} blocked you."}
|
{:error, "Could not follow user: #{followed.nickname} blocked you."}
|
||||||
|
@ -737,126 +749,13 @@ def get_recipients_from_activity(%Activity{recipients: to}) do
|
||||||
|> Repo.all()
|
|> Repo.all()
|
||||||
end
|
end
|
||||||
|
|
||||||
def search(query, resolve \\ false, for_user \\ nil) do
|
@spec mute(User.t(), User.t(), boolean()) :: {:ok, User.t()} | {:error, String.t()}
|
||||||
# Strip the beginning @ off if there is a query
|
def mute(muter, %User{ap_id: ap_id}, notifications? \\ true) do
|
||||||
query = String.trim_leading(query, "@")
|
info = muter.info
|
||||||
|
|
||||||
if resolve, do: get_or_fetch(query)
|
|
||||||
|
|
||||||
{:ok, results} =
|
|
||||||
Repo.transaction(fn ->
|
|
||||||
Ecto.Adapters.SQL.query(Repo, "select set_limit(0.25)", [])
|
|
||||||
Repo.all(search_query(query, for_user))
|
|
||||||
end)
|
|
||||||
|
|
||||||
results
|
|
||||||
end
|
|
||||||
|
|
||||||
def search_query(query, for_user) do
|
|
||||||
fts_subquery = fts_search_subquery(query)
|
|
||||||
trigram_subquery = trigram_search_subquery(query)
|
|
||||||
union_query = from(s in trigram_subquery, union_all: ^fts_subquery)
|
|
||||||
distinct_query = from(s in subquery(union_query), order_by: s.search_type, distinct: s.id)
|
|
||||||
|
|
||||||
from(s in subquery(boost_search_rank_query(distinct_query, for_user)),
|
|
||||||
order_by: [desc: s.search_rank],
|
|
||||||
limit: 20
|
|
||||||
)
|
|
||||||
end
|
|
||||||
|
|
||||||
defp boost_search_rank_query(query, nil), do: query
|
|
||||||
|
|
||||||
defp boost_search_rank_query(query, for_user) do
|
|
||||||
friends_ids = get_friends_ids(for_user)
|
|
||||||
followers_ids = get_followers_ids(for_user)
|
|
||||||
|
|
||||||
from(u in subquery(query),
|
|
||||||
select_merge: %{
|
|
||||||
search_rank:
|
|
||||||
fragment(
|
|
||||||
"""
|
|
||||||
CASE WHEN (?) THEN (?) * 1.3
|
|
||||||
WHEN (?) THEN (?) * 1.2
|
|
||||||
WHEN (?) THEN (?) * 1.1
|
|
||||||
ELSE (?) END
|
|
||||||
""",
|
|
||||||
u.id in ^friends_ids and u.id in ^followers_ids,
|
|
||||||
u.search_rank,
|
|
||||||
u.id in ^friends_ids,
|
|
||||||
u.search_rank,
|
|
||||||
u.id in ^followers_ids,
|
|
||||||
u.search_rank,
|
|
||||||
u.search_rank
|
|
||||||
)
|
|
||||||
}
|
|
||||||
)
|
|
||||||
end
|
|
||||||
|
|
||||||
defp fts_search_subquery(term, query \\ User) do
|
|
||||||
processed_query =
|
|
||||||
term
|
|
||||||
|> String.replace(~r/\W+/, " ")
|
|
||||||
|> String.trim()
|
|
||||||
|> String.split()
|
|
||||||
|> Enum.map(&(&1 <> ":*"))
|
|
||||||
|> Enum.join(" | ")
|
|
||||||
|
|
||||||
from(
|
|
||||||
u in query,
|
|
||||||
select_merge: %{
|
|
||||||
search_type: ^0,
|
|
||||||
search_rank:
|
|
||||||
fragment(
|
|
||||||
"""
|
|
||||||
ts_rank_cd(
|
|
||||||
setweight(to_tsvector('simple', regexp_replace(?, '\\W', ' ', 'g')), 'A') ||
|
|
||||||
setweight(to_tsvector('simple', regexp_replace(coalesce(?, ''), '\\W', ' ', 'g')), 'B'),
|
|
||||||
to_tsquery('simple', ?),
|
|
||||||
32
|
|
||||||
)
|
|
||||||
""",
|
|
||||||
u.nickname,
|
|
||||||
u.name,
|
|
||||||
^processed_query
|
|
||||||
)
|
|
||||||
},
|
|
||||||
where:
|
|
||||||
fragment(
|
|
||||||
"""
|
|
||||||
(setweight(to_tsvector('simple', regexp_replace(?, '\\W', ' ', 'g')), 'A') ||
|
|
||||||
setweight(to_tsvector('simple', regexp_replace(coalesce(?, ''), '\\W', ' ', 'g')), 'B')) @@ to_tsquery('simple', ?)
|
|
||||||
""",
|
|
||||||
u.nickname,
|
|
||||||
u.name,
|
|
||||||
^processed_query
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|> restrict_deactivated()
|
|
||||||
end
|
|
||||||
|
|
||||||
defp trigram_search_subquery(term) do
|
|
||||||
from(
|
|
||||||
u in User,
|
|
||||||
select_merge: %{
|
|
||||||
# ^1 gives 'Postgrex expected a binary, got 1' for some weird reason
|
|
||||||
search_type: fragment("?", 1),
|
|
||||||
search_rank:
|
|
||||||
fragment(
|
|
||||||
"similarity(?, trim(? || ' ' || coalesce(?, '')))",
|
|
||||||
^term,
|
|
||||||
u.nickname,
|
|
||||||
u.name
|
|
||||||
)
|
|
||||||
},
|
|
||||||
where: fragment("trim(? || ' ' || coalesce(?, '')) % ?", u.nickname, u.name, ^term)
|
|
||||||
)
|
|
||||||
|> restrict_deactivated()
|
|
||||||
end
|
|
||||||
|
|
||||||
def mute(muter, %User{ap_id: ap_id}) do
|
|
||||||
info_cng =
|
info_cng =
|
||||||
muter.info
|
User.Info.add_to_mutes(info, ap_id)
|
||||||
|> User.Info.add_to_mutes(ap_id)
|
|> User.Info.add_to_muted_notifications(info, ap_id, notifications?)
|
||||||
|
|
||||||
cng =
|
cng =
|
||||||
change(muter)
|
change(muter)
|
||||||
|
@ -866,9 +765,11 @@ def mute(muter, %User{ap_id: ap_id}) do
|
||||||
end
|
end
|
||||||
|
|
||||||
def unmute(muter, %{ap_id: ap_id}) do
|
def unmute(muter, %{ap_id: ap_id}) do
|
||||||
|
info = muter.info
|
||||||
|
|
||||||
info_cng =
|
info_cng =
|
||||||
muter.info
|
User.Info.remove_from_mutes(info, ap_id)
|
||||||
|> User.Info.remove_from_mutes(ap_id)
|
|> User.Info.remove_from_muted_notifications(info, ap_id)
|
||||||
|
|
||||||
cng =
|
cng =
|
||||||
change(muter)
|
change(muter)
|
||||||
|
@ -964,15 +865,18 @@ def unblock(blocker, %{ap_id: ap_id}) do
|
||||||
def mutes?(nil, _), do: false
|
def mutes?(nil, _), do: false
|
||||||
def mutes?(user, %{ap_id: ap_id}), do: Enum.member?(user.info.mutes, ap_id)
|
def mutes?(user, %{ap_id: ap_id}), do: Enum.member?(user.info.mutes, ap_id)
|
||||||
|
|
||||||
def blocks?(user, %{ap_id: ap_id}) do
|
@spec muted_notifications?(User.t() | nil, User.t() | map()) :: boolean()
|
||||||
blocks = user.info.blocks
|
def muted_notifications?(nil, _), do: false
|
||||||
domain_blocks = user.info.domain_blocks
|
|
||||||
|
def muted_notifications?(user, %{ap_id: ap_id}),
|
||||||
|
do: Enum.member?(user.info.muted_notifications, ap_id)
|
||||||
|
|
||||||
|
def blocks?(%User{info: info} = _user, %{ap_id: ap_id}) do
|
||||||
|
blocks = info.blocks
|
||||||
|
domain_blocks = info.domain_blocks
|
||||||
%{host: host} = URI.parse(ap_id)
|
%{host: host} = URI.parse(ap_id)
|
||||||
|
|
||||||
Enum.member?(blocks, ap_id) ||
|
Enum.member?(blocks, ap_id) || Enum.any?(domain_blocks, &(&1 == host))
|
||||||
Enum.any?(domain_blocks, fn domain ->
|
|
||||||
host == domain
|
|
||||||
end)
|
|
||||||
end
|
end
|
||||||
|
|
||||||
def subscribed_to?(user, %{ap_id: ap_id}) do
|
def subscribed_to?(user, %{ap_id: ap_id}) do
|
||||||
|
@ -1058,18 +962,26 @@ def delete(%User{} = user),
|
||||||
|
|
||||||
@spec perform(atom(), User.t()) :: {:ok, User.t()}
|
@spec perform(atom(), User.t()) :: {:ok, User.t()}
|
||||||
def perform(:delete, %User{} = user) do
|
def perform(:delete, %User{} = user) do
|
||||||
{:ok, user} = User.deactivate(user)
|
{:ok, _user} = ActivityPub.delete(user)
|
||||||
|
|
||||||
# Remove all relationships
|
# Remove all relationships
|
||||||
{:ok, followers} = User.get_followers(user)
|
{:ok, followers} = User.get_followers(user)
|
||||||
|
|
||||||
Enum.each(followers, fn follower -> User.unfollow(follower, user) end)
|
Enum.each(followers, fn follower ->
|
||||||
|
ActivityPub.unfollow(follower, user)
|
||||||
|
User.unfollow(follower, user)
|
||||||
|
end)
|
||||||
|
|
||||||
{:ok, friends} = User.get_friends(user)
|
{:ok, friends} = User.get_friends(user)
|
||||||
|
|
||||||
Enum.each(friends, fn followed -> User.unfollow(user, followed) end)
|
Enum.each(friends, fn followed ->
|
||||||
|
ActivityPub.unfollow(user, followed)
|
||||||
|
User.unfollow(user, followed)
|
||||||
|
end)
|
||||||
|
|
||||||
delete_user_activities(user)
|
delete_user_activities(user)
|
||||||
|
invalidate_cache(user)
|
||||||
|
Repo.delete(user)
|
||||||
end
|
end
|
||||||
|
|
||||||
@spec perform(atom(), User.t()) :: {:ok, User.t()}
|
@spec perform(atom(), User.t()) :: {:ok, User.t()}
|
||||||
|
@ -1125,6 +1037,34 @@ def perform(:follow_import, %User{} = follower, followed_identifiers)
|
||||||
)
|
)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@spec external_users_query() :: Ecto.Query.t()
|
||||||
|
def external_users_query do
|
||||||
|
User.Query.build(%{
|
||||||
|
external: true,
|
||||||
|
active: true,
|
||||||
|
order_by: :id
|
||||||
|
})
|
||||||
|
end
|
||||||
|
|
||||||
|
@spec external_users(keyword()) :: [User.t()]
|
||||||
|
def external_users(opts \\ []) do
|
||||||
|
query =
|
||||||
|
external_users_query()
|
||||||
|
|> select([u], struct(u, [:id, :ap_id, :info]))
|
||||||
|
|
||||||
|
query =
|
||||||
|
if opts[:max_id],
|
||||||
|
do: where(query, [u], u.id > ^opts[:max_id]),
|
||||||
|
else: query
|
||||||
|
|
||||||
|
query =
|
||||||
|
if opts[:limit],
|
||||||
|
do: limit(query, ^opts[:limit]),
|
||||||
|
else: query
|
||||||
|
|
||||||
|
Repo.all(query)
|
||||||
|
end
|
||||||
|
|
||||||
def blocks_import(%User{} = blocker, blocked_identifiers) when is_list(blocked_identifiers),
|
def blocks_import(%User{} = blocker, blocked_identifiers) when is_list(blocked_identifiers),
|
||||||
do:
|
do:
|
||||||
PleromaJobQueue.enqueue(:background, __MODULE__, [
|
PleromaJobQueue.enqueue(:background, __MODULE__, [
|
||||||
|
@ -1142,18 +1082,35 @@ def follow_import(%User{} = follower, followed_identifiers) when is_list(followe
|
||||||
])
|
])
|
||||||
|
|
||||||
def delete_user_activities(%User{ap_id: ap_id} = user) do
|
def delete_user_activities(%User{ap_id: ap_id} = user) do
|
||||||
stream =
|
|
||||||
ap_id
|
ap_id
|
||||||
|> Activity.query_by_actor()
|
|> Activity.query_by_actor()
|
||||||
|> Repo.stream()
|
|> RepoStreamer.chunk_stream(50)
|
||||||
|
|> Stream.each(fn activities ->
|
||||||
Repo.transaction(fn -> Enum.each(stream, &delete_activity(&1)) end, timeout: :infinity)
|
Enum.each(activities, &delete_activity(&1))
|
||||||
|
end)
|
||||||
|
|> Stream.run()
|
||||||
|
|
||||||
{:ok, user}
|
{:ok, user}
|
||||||
end
|
end
|
||||||
|
|
||||||
defp delete_activity(%{data: %{"type" => "Create"}} = activity) do
|
defp delete_activity(%{data: %{"type" => "Create"}} = activity) do
|
||||||
Object.normalize(activity) |> ActivityPub.delete()
|
activity
|
||||||
|
|> Object.normalize()
|
||||||
|
|> ActivityPub.delete()
|
||||||
|
end
|
||||||
|
|
||||||
|
defp delete_activity(%{data: %{"type" => "Like"}} = activity) do
|
||||||
|
user = get_cached_by_ap_id(activity.actor)
|
||||||
|
object = Object.normalize(activity)
|
||||||
|
|
||||||
|
ActivityPub.unlike(user, object)
|
||||||
|
end
|
||||||
|
|
||||||
|
defp delete_activity(%{data: %{"type" => "Announce"}} = activity) do
|
||||||
|
user = get_cached_by_ap_id(activity.actor)
|
||||||
|
object = Object.normalize(activity)
|
||||||
|
|
||||||
|
ActivityPub.unannounce(user, object)
|
||||||
end
|
end
|
||||||
|
|
||||||
defp delete_activity(_activity), do: "Doing nothing"
|
defp delete_activity(_activity), do: "Doing nothing"
|
||||||
|
@ -1162,9 +1119,7 @@ def html_filter_policy(%User{info: %{no_rich_text: true}}) do
|
||||||
Pleroma.HTML.Scrubber.TwitterText
|
Pleroma.HTML.Scrubber.TwitterText
|
||||||
end
|
end
|
||||||
|
|
||||||
@default_scrubbers Pleroma.Config.get([:markup, :scrub_policy])
|
def html_filter_policy(_), do: Pleroma.Config.get([:markup, :scrub_policy])
|
||||||
|
|
||||||
def html_filter_policy(_), do: @default_scrubbers
|
|
||||||
|
|
||||||
def fetch_by_ap_id(ap_id) do
|
def fetch_by_ap_id(ap_id) do
|
||||||
ap_try = ActivityPub.make_user_from_ap_id(ap_id)
|
ap_try = ActivityPub.make_user_from_ap_id(ap_id)
|
||||||
|
@ -1235,10 +1190,12 @@ def public_key_from_info(%{
|
||||||
end
|
end
|
||||||
|
|
||||||
# OStatus Magic Key
|
# OStatus Magic Key
|
||||||
def public_key_from_info(%{magic_key: magic_key}) do
|
def public_key_from_info(%{magic_key: magic_key}) when not is_nil(magic_key) do
|
||||||
{:ok, Pleroma.Web.Salmon.decode_key(magic_key)}
|
{:ok, Pleroma.Web.Salmon.decode_key(magic_key)}
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def public_key_from_info(_), do: {:error, "not found key"}
|
||||||
|
|
||||||
def get_public_key_for_ap_id(ap_id) do
|
def get_public_key_for_ap_id(ap_id) do
|
||||||
with {:ok, %User{} = user} <- get_or_fetch_by_ap_id(ap_id),
|
with {:ok, %User{} = user} <- get_or_fetch_by_ap_id(ap_id),
|
||||||
{:ok, public_key} <- public_key_from_info(user.info) do
|
{:ok, public_key} <- public_key_from_info(user.info) do
|
||||||
|
@ -1424,23 +1381,34 @@ def get_mascot(%{info: %{mascot: mascot}}) when is_nil(mascot) do
|
||||||
}
|
}
|
||||||
end
|
end
|
||||||
|
|
||||||
def ensure_keys_present(user) do
|
def ensure_keys_present(%User{info: info} = user) do
|
||||||
info = user.info
|
|
||||||
|
|
||||||
if info.keys do
|
if info.keys do
|
||||||
{:ok, user}
|
{:ok, user}
|
||||||
else
|
else
|
||||||
{:ok, pem} = Keys.generate_rsa_pem()
|
{:ok, pem} = Keys.generate_rsa_pem()
|
||||||
|
|
||||||
info_cng =
|
user
|
||||||
info
|
|> Ecto.Changeset.change()
|
||||||
|> User.Info.set_keys(pem)
|
|> Ecto.Changeset.put_embed(:info, User.Info.set_keys(info, pem))
|
||||||
|
|> update_and_set_cache()
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
cng =
|
def get_ap_ids_by_nicknames(nicknames) do
|
||||||
Ecto.Changeset.change(user)
|
from(u in User,
|
||||||
|> Ecto.Changeset.put_embed(:info, info_cng)
|
where: u.nickname in ^nicknames,
|
||||||
|
select: u.ap_id
|
||||||
|
)
|
||||||
|
|> Repo.all()
|
||||||
|
end
|
||||||
|
|
||||||
update_and_set_cache(cng)
|
defdelegate search(query, opts \\ []), to: User.Search
|
||||||
end
|
|
||||||
|
defp put_password_hash(
|
||||||
|
%Ecto.Changeset{valid?: true, changes: %{password: password}} = changeset
|
||||||
|
) do
|
||||||
|
change(changeset, password_hash: Pbkdf2.hashpwsalt(password))
|
||||||
end
|
end
|
||||||
|
|
||||||
|
defp put_password_hash(changeset), do: changeset
|
||||||
end
|
end
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue