forked from AkkomaGang/akkoma
Merging with develop
Conflicts: lib/pleroma/web/activity_pub/transmogrifier.ex lib/pleroma/user.ex
This commit is contained in:
commit
bdfa3a6fa8
888 changed files with 17070 additions and 68142 deletions
4
.gitignore
vendored
4
.gitignore
vendored
|
@ -3,7 +3,6 @@
|
||||||
/db
|
/db
|
||||||
/deps
|
/deps
|
||||||
/*.ez
|
/*.ez
|
||||||
/uploads
|
|
||||||
/test/uploads
|
/test/uploads
|
||||||
/.elixir_ls
|
/.elixir_ls
|
||||||
/test/fixtures/test_tmp.txt
|
/test/fixtures/test_tmp.txt
|
||||||
|
@ -35,3 +34,6 @@ erl_crash.dump
|
||||||
|
|
||||||
# Editor config
|
# Editor config
|
||||||
/.vscode/
|
/.vscode/
|
||||||
|
|
||||||
|
# Prevent committing docs files
|
||||||
|
/priv/static/doc/*
|
||||||
|
|
|
@ -1,9 +1,5 @@
|
||||||
image: elixir:1.8.1
|
image: elixir:1.8.1
|
||||||
|
|
||||||
services:
|
|
||||||
- name: postgres:9.6.2
|
|
||||||
command: ["postgres", "-c", "fsync=off", "-c", "synchronous_commit=off", "-c", "full_page_writes=off"]
|
|
||||||
|
|
||||||
variables:
|
variables:
|
||||||
POSTGRES_DB: pleroma_test
|
POSTGRES_DB: pleroma_test
|
||||||
POSTGRES_USER: postgres
|
POSTGRES_USER: postgres
|
||||||
|
@ -17,29 +13,68 @@ cache:
|
||||||
- deps
|
- deps
|
||||||
- _build
|
- _build
|
||||||
stages:
|
stages:
|
||||||
- lint
|
- build
|
||||||
- test
|
- test
|
||||||
- analysis
|
- deploy
|
||||||
|
|
||||||
before_script:
|
before_script:
|
||||||
- mix local.hex --force
|
- mix local.hex --force
|
||||||
- mix local.rebar --force
|
- mix local.rebar --force
|
||||||
|
|
||||||
|
build:
|
||||||
|
stage: build
|
||||||
|
script:
|
||||||
- mix deps.get
|
- mix deps.get
|
||||||
- mix compile --force
|
- mix compile --force
|
||||||
- mix ecto.create
|
|
||||||
- mix ecto.migrate
|
|
||||||
|
|
||||||
lint:
|
docs-build:
|
||||||
stage: lint
|
stage: build
|
||||||
|
only:
|
||||||
|
- master@pleroma/pleroma
|
||||||
|
- develop@pleroma/pleroma
|
||||||
|
variables:
|
||||||
|
MIX_ENV: dev
|
||||||
script:
|
script:
|
||||||
- mix format --check-formatted
|
- mix deps.get
|
||||||
|
- mix compile
|
||||||
|
- mix docs
|
||||||
|
artifacts:
|
||||||
|
paths:
|
||||||
|
- priv/static/doc
|
||||||
|
|
||||||
unit-testing:
|
unit-testing:
|
||||||
stage: test
|
stage: test
|
||||||
|
services:
|
||||||
|
- name: postgres:9.6.2
|
||||||
|
command: ["postgres", "-c", "fsync=off", "-c", "synchronous_commit=off", "-c", "full_page_writes=off"]
|
||||||
script:
|
script:
|
||||||
|
- mix ecto.create
|
||||||
|
- mix ecto.migrate
|
||||||
- mix test --trace --preload-modules
|
- mix test --trace --preload-modules
|
||||||
|
|
||||||
analysis:
|
lint:
|
||||||
stage: analysis
|
stage: test
|
||||||
script:
|
script:
|
||||||
|
- mix format --check-formatted
|
||||||
|
|
||||||
|
analysis:
|
||||||
|
stage: test
|
||||||
|
script:
|
||||||
|
- mix deps.get
|
||||||
- mix credo --strict --only=warnings,todo,fixme,consistency,readability
|
- mix credo --strict --only=warnings,todo,fixme,consistency,readability
|
||||||
|
|
||||||
|
|
||||||
|
docs-deploy:
|
||||||
|
stage: deploy
|
||||||
|
image: alpine:3.9
|
||||||
|
only:
|
||||||
|
- master@pleroma/pleroma
|
||||||
|
- develop@pleroma/pleroma
|
||||||
|
before_script:
|
||||||
|
- apk update && apk add openssh-client rsync
|
||||||
|
script:
|
||||||
|
- mkdir -p ~/.ssh
|
||||||
|
- echo "${SSH_HOST_KEY}" > ~/.ssh/known_hosts
|
||||||
|
- eval $(ssh-agent -s)
|
||||||
|
- echo "$SSH_PRIVATE_KEY" | tr -d '\r' | ssh-add -
|
||||||
|
- rsync -hrvz --delete -e "ssh -p ${SSH_PORT}" priv/static/doc/ "${SSH_USER_HOST_LOCATION}/${CI_COMMIT_REF_NAME}"
|
||||||
|
|
403
CC-BY-NC-ND-4.0
Normal file
403
CC-BY-NC-ND-4.0
Normal file
|
@ -0,0 +1,403 @@
|
||||||
|
Attribution-NonCommercial-NoDerivatives 4.0 International
|
||||||
|
|
||||||
|
=======================================================================
|
||||||
|
|
||||||
|
Creative Commons Corporation ("Creative Commons") is not a law firm and
|
||||||
|
does not provide legal services or legal advice. Distribution of
|
||||||
|
Creative Commons public licenses does not create a lawyer-client or
|
||||||
|
other relationship. Creative Commons makes its licenses and related
|
||||||
|
information available on an "as-is" basis. Creative Commons gives no
|
||||||
|
warranties regarding its licenses, any material licensed under their
|
||||||
|
terms and conditions, or any related information. Creative Commons
|
||||||
|
disclaims all liability for damages resulting from their use to the
|
||||||
|
fullest extent possible.
|
||||||
|
|
||||||
|
Using Creative Commons Public Licenses
|
||||||
|
|
||||||
|
Creative Commons public licenses provide a standard set of terms and
|
||||||
|
conditions that creators and other rights holders may use to share
|
||||||
|
original works of authorship and other material subject to copyright
|
||||||
|
and certain other rights specified in the public license below. The
|
||||||
|
following considerations are for informational purposes only, are not
|
||||||
|
exhaustive, and do not form part of our licenses.
|
||||||
|
|
||||||
|
Considerations for licensors: Our public licenses are
|
||||||
|
intended for use by those authorized to give the public
|
||||||
|
permission to use material in ways otherwise restricted by
|
||||||
|
copyright and certain other rights. Our licenses are
|
||||||
|
irrevocable. Licensors should read and understand the terms
|
||||||
|
and conditions of the license they choose before applying it.
|
||||||
|
Licensors should also secure all rights necessary before
|
||||||
|
applying our licenses so that the public can reuse the
|
||||||
|
material as expected. Licensors should clearly mark any
|
||||||
|
material not subject to the license. This includes other CC-
|
||||||
|
licensed material, or material used under an exception or
|
||||||
|
limitation to copyright. More considerations for licensors:
|
||||||
|
wiki.creativecommons.org/Considerations_for_licensors
|
||||||
|
|
||||||
|
Considerations for the public: By using one of our public
|
||||||
|
licenses, a licensor grants the public permission to use the
|
||||||
|
licensed material under specified terms and conditions. If
|
||||||
|
the licensor's permission is not necessary for any reason--for
|
||||||
|
example, because of any applicable exception or limitation to
|
||||||
|
copyright--then that use is not regulated by the license. Our
|
||||||
|
licenses grant only permissions under copyright and certain
|
||||||
|
other rights that a licensor has authority to grant. Use of
|
||||||
|
the licensed material may still be restricted for other
|
||||||
|
reasons, including because others have copyright or other
|
||||||
|
rights in the material. A licensor may make special requests,
|
||||||
|
such as asking that all changes be marked or described.
|
||||||
|
Although not required by our licenses, you are encouraged to
|
||||||
|
respect those requests where reasonable. More considerations
|
||||||
|
for the public:
|
||||||
|
wiki.creativecommons.org/Considerations_for_licensees
|
||||||
|
|
||||||
|
=======================================================================
|
||||||
|
|
||||||
|
Creative Commons Attribution-NonCommercial-NoDerivatives 4.0
|
||||||
|
International Public License
|
||||||
|
|
||||||
|
By exercising the Licensed Rights (defined below), You accept and agree
|
||||||
|
to be bound by the terms and conditions of this Creative Commons
|
||||||
|
Attribution-NonCommercial-NoDerivatives 4.0 International Public
|
||||||
|
License ("Public License"). To the extent this Public License may be
|
||||||
|
interpreted as a contract, You are granted the Licensed Rights in
|
||||||
|
consideration of Your acceptance of these terms and conditions, and the
|
||||||
|
Licensor grants You such rights in consideration of benefits the
|
||||||
|
Licensor receives from making the Licensed Material available under
|
||||||
|
these terms and conditions.
|
||||||
|
|
||||||
|
|
||||||
|
Section 1 -- Definitions.
|
||||||
|
|
||||||
|
a. Adapted Material means material subject to Copyright and Similar
|
||||||
|
Rights that is derived from or based upon the Licensed Material
|
||||||
|
and in which the Licensed Material is translated, altered,
|
||||||
|
arranged, transformed, or otherwise modified in a manner requiring
|
||||||
|
permission under the Copyright and Similar Rights held by the
|
||||||
|
Licensor. For purposes of this Public License, where the Licensed
|
||||||
|
Material is a musical work, performance, or sound recording,
|
||||||
|
Adapted Material is always produced where the Licensed Material is
|
||||||
|
synched in timed relation with a moving image.
|
||||||
|
|
||||||
|
b. Copyright and Similar Rights means copyright and/or similar rights
|
||||||
|
closely related to copyright including, without limitation,
|
||||||
|
performance, broadcast, sound recording, and Sui Generis Database
|
||||||
|
Rights, without regard to how the rights are labeled or
|
||||||
|
categorized. For purposes of this Public License, the rights
|
||||||
|
specified in Section 2(b)(1)-(2) are not Copyright and Similar
|
||||||
|
Rights.
|
||||||
|
|
||||||
|
c. Effective Technological Measures means those measures that, in the
|
||||||
|
absence of proper authority, may not be circumvented under laws
|
||||||
|
fulfilling obligations under Article 11 of the WIPO Copyright
|
||||||
|
Treaty adopted on December 20, 1996, and/or similar international
|
||||||
|
agreements.
|
||||||
|
|
||||||
|
d. Exceptions and Limitations means fair use, fair dealing, and/or
|
||||||
|
any other exception or limitation to Copyright and Similar Rights
|
||||||
|
that applies to Your use of the Licensed Material.
|
||||||
|
|
||||||
|
e. Licensed Material means the artistic or literary work, database,
|
||||||
|
or other material to which the Licensor applied this Public
|
||||||
|
License.
|
||||||
|
|
||||||
|
f. Licensed Rights means the rights granted to You subject to the
|
||||||
|
terms and conditions of this Public License, which are limited to
|
||||||
|
all Copyright and Similar Rights that apply to Your use of the
|
||||||
|
Licensed Material and that the Licensor has authority to license.
|
||||||
|
|
||||||
|
g. Licensor means the individual(s) or entity(ies) granting rights
|
||||||
|
under this Public License.
|
||||||
|
|
||||||
|
h. NonCommercial means not primarily intended for or directed towards
|
||||||
|
commercial advantage or monetary compensation. For purposes of
|
||||||
|
this Public License, the exchange of the Licensed Material for
|
||||||
|
other material subject to Copyright and Similar Rights by digital
|
||||||
|
file-sharing or similar means is NonCommercial provided there is
|
||||||
|
no payment of monetary compensation in connection with the
|
||||||
|
exchange.
|
||||||
|
|
||||||
|
i. Share means to provide material to the public by any means or
|
||||||
|
process that requires permission under the Licensed Rights, such
|
||||||
|
as reproduction, public display, public performance, distribution,
|
||||||
|
dissemination, communication, or importation, and to make material
|
||||||
|
available to the public including in ways that members of the
|
||||||
|
public may access the material from a place and at a time
|
||||||
|
individually chosen by them.
|
||||||
|
|
||||||
|
j. Sui Generis Database Rights means rights other than copyright
|
||||||
|
resulting from Directive 96/9/EC of the European Parliament and of
|
||||||
|
the Council of 11 March 1996 on the legal protection of databases,
|
||||||
|
as amended and/or succeeded, as well as other essentially
|
||||||
|
equivalent rights anywhere in the world.
|
||||||
|
|
||||||
|
k. You means the individual or entity exercising the Licensed Rights
|
||||||
|
under this Public License. Your has a corresponding meaning.
|
||||||
|
|
||||||
|
|
||||||
|
Section 2 -- Scope.
|
||||||
|
|
||||||
|
a. License grant.
|
||||||
|
|
||||||
|
1. Subject to the terms and conditions of this Public License,
|
||||||
|
the Licensor hereby grants You a worldwide, royalty-free,
|
||||||
|
non-sublicensable, non-exclusive, irrevocable license to
|
||||||
|
exercise the Licensed Rights in the Licensed Material to:
|
||||||
|
|
||||||
|
a. reproduce and Share the Licensed Material, in whole or
|
||||||
|
in part, for NonCommercial purposes only; and
|
||||||
|
|
||||||
|
b. produce and reproduce, but not Share, Adapted Material
|
||||||
|
for NonCommercial purposes only.
|
||||||
|
|
||||||
|
2. Exceptions and Limitations. For the avoidance of doubt, where
|
||||||
|
Exceptions and Limitations apply to Your use, this Public
|
||||||
|
License does not apply, and You do not need to comply with
|
||||||
|
its terms and conditions.
|
||||||
|
|
||||||
|
3. Term. The term of this Public License is specified in Section
|
||||||
|
6(a).
|
||||||
|
|
||||||
|
4. Media and formats; technical modifications allowed. The
|
||||||
|
Licensor authorizes You to exercise the Licensed Rights in
|
||||||
|
all media and formats whether now known or hereafter created,
|
||||||
|
and to make technical modifications necessary to do so. The
|
||||||
|
Licensor waives and/or agrees not to assert any right or
|
||||||
|
authority to forbid You from making technical modifications
|
||||||
|
necessary to exercise the Licensed Rights, including
|
||||||
|
technical modifications necessary to circumvent Effective
|
||||||
|
Technological Measures. For purposes of this Public License,
|
||||||
|
simply making modifications authorized by this Section 2(a)
|
||||||
|
(4) never produces Adapted Material.
|
||||||
|
|
||||||
|
5. Downstream recipients.
|
||||||
|
|
||||||
|
a. Offer from the Licensor -- Licensed Material. Every
|
||||||
|
recipient of the Licensed Material automatically
|
||||||
|
receives an offer from the Licensor to exercise the
|
||||||
|
Licensed Rights under the terms and conditions of this
|
||||||
|
Public License.
|
||||||
|
|
||||||
|
b. No downstream restrictions. You may not offer or impose
|
||||||
|
any additional or different terms or conditions on, or
|
||||||
|
apply any Effective Technological Measures to, the
|
||||||
|
Licensed Material if doing so restricts exercise of the
|
||||||
|
Licensed Rights by any recipient of the Licensed
|
||||||
|
Material.
|
||||||
|
|
||||||
|
6. No endorsement. Nothing in this Public License constitutes or
|
||||||
|
may be construed as permission to assert or imply that You
|
||||||
|
are, or that Your use of the Licensed Material is, connected
|
||||||
|
with, or sponsored, endorsed, or granted official status by,
|
||||||
|
the Licensor or others designated to receive attribution as
|
||||||
|
provided in Section 3(a)(1)(A)(i).
|
||||||
|
|
||||||
|
b. Other rights.
|
||||||
|
|
||||||
|
1. Moral rights, such as the right of integrity, are not
|
||||||
|
licensed under this Public License, nor are publicity,
|
||||||
|
privacy, and/or other similar personality rights; however, to
|
||||||
|
the extent possible, the Licensor waives and/or agrees not to
|
||||||
|
assert any such rights held by the Licensor to the limited
|
||||||
|
extent necessary to allow You to exercise the Licensed
|
||||||
|
Rights, but not otherwise.
|
||||||
|
|
||||||
|
2. Patent and trademark rights are not licensed under this
|
||||||
|
Public License.
|
||||||
|
|
||||||
|
3. To the extent possible, the Licensor waives any right to
|
||||||
|
collect royalties from You for the exercise of the Licensed
|
||||||
|
Rights, whether directly or through a collecting society
|
||||||
|
under any voluntary or waivable statutory or compulsory
|
||||||
|
licensing scheme. In all other cases the Licensor expressly
|
||||||
|
reserves any right to collect such royalties, including when
|
||||||
|
the Licensed Material is used other than for NonCommercial
|
||||||
|
purposes.
|
||||||
|
|
||||||
|
|
||||||
|
Section 3 -- License Conditions.
|
||||||
|
|
||||||
|
Your exercise of the Licensed Rights is expressly made subject to the
|
||||||
|
following conditions.
|
||||||
|
|
||||||
|
a. Attribution.
|
||||||
|
|
||||||
|
1. If You Share the Licensed Material, You must:
|
||||||
|
|
||||||
|
a. retain the following if it is supplied by the Licensor
|
||||||
|
with the Licensed Material:
|
||||||
|
|
||||||
|
i. identification of the creator(s) of the Licensed
|
||||||
|
Material and any others designated to receive
|
||||||
|
attribution, in any reasonable manner requested by
|
||||||
|
the Licensor (including by pseudonym if
|
||||||
|
designated);
|
||||||
|
|
||||||
|
ii. a copyright notice;
|
||||||
|
|
||||||
|
iii. a notice that refers to this Public License;
|
||||||
|
|
||||||
|
iv. a notice that refers to the disclaimer of
|
||||||
|
warranties;
|
||||||
|
|
||||||
|
v. a URI or hyperlink to the Licensed Material to the
|
||||||
|
extent reasonably practicable;
|
||||||
|
|
||||||
|
b. indicate if You modified the Licensed Material and
|
||||||
|
retain an indication of any previous modifications; and
|
||||||
|
|
||||||
|
c. indicate the Licensed Material is licensed under this
|
||||||
|
Public License, and include the text of, or the URI or
|
||||||
|
hyperlink to, this Public License.
|
||||||
|
|
||||||
|
For the avoidance of doubt, You do not have permission under
|
||||||
|
this Public License to Share Adapted Material.
|
||||||
|
|
||||||
|
2. You may satisfy the conditions in Section 3(a)(1) in any
|
||||||
|
reasonable manner based on the medium, means, and context in
|
||||||
|
which You Share the Licensed Material. For example, it may be
|
||||||
|
reasonable to satisfy the conditions by providing a URI or
|
||||||
|
hyperlink to a resource that includes the required
|
||||||
|
information.
|
||||||
|
|
||||||
|
3. If requested by the Licensor, You must remove any of the
|
||||||
|
information required by Section 3(a)(1)(A) to the extent
|
||||||
|
reasonably practicable.
|
||||||
|
|
||||||
|
|
||||||
|
Section 4 -- Sui Generis Database Rights.
|
||||||
|
|
||||||
|
Where the Licensed Rights include Sui Generis Database Rights that
|
||||||
|
apply to Your use of the Licensed Material:
|
||||||
|
|
||||||
|
a. for the avoidance of doubt, Section 2(a)(1) grants You the right
|
||||||
|
to extract, reuse, reproduce, and Share all or a substantial
|
||||||
|
portion of the contents of the database for NonCommercial purposes
|
||||||
|
only and provided You do not Share Adapted Material;
|
||||||
|
|
||||||
|
b. if You include all or a substantial portion of the database
|
||||||
|
contents in a database in which You have Sui Generis Database
|
||||||
|
Rights, then the database in which You have Sui Generis Database
|
||||||
|
Rights (but not its individual contents) is Adapted Material; and
|
||||||
|
|
||||||
|
c. You must comply with the conditions in Section 3(a) if You Share
|
||||||
|
all or a substantial portion of the contents of the database.
|
||||||
|
|
||||||
|
For the avoidance of doubt, this Section 4 supplements and does not
|
||||||
|
replace Your obligations under this Public License where the Licensed
|
||||||
|
Rights include other Copyright and Similar Rights.
|
||||||
|
|
||||||
|
|
||||||
|
Section 5 -- Disclaimer of Warranties and Limitation of Liability.
|
||||||
|
|
||||||
|
a. UNLESS OTHERWISE SEPARATELY UNDERTAKEN BY THE LICENSOR, TO THE
|
||||||
|
EXTENT POSSIBLE, THE LICENSOR OFFERS THE LICENSED MATERIAL AS-IS
|
||||||
|
AND AS-AVAILABLE, AND MAKES NO REPRESENTATIONS OR WARRANTIES OF
|
||||||
|
ANY KIND CONCERNING THE LICENSED MATERIAL, WHETHER EXPRESS,
|
||||||
|
IMPLIED, STATUTORY, OR OTHER. THIS INCLUDES, WITHOUT LIMITATION,
|
||||||
|
WARRANTIES OF TITLE, MERCHANTABILITY, FITNESS FOR A PARTICULAR
|
||||||
|
PURPOSE, NON-INFRINGEMENT, ABSENCE OF LATENT OR OTHER DEFECTS,
|
||||||
|
ACCURACY, OR THE PRESENCE OR ABSENCE OF ERRORS, WHETHER OR NOT
|
||||||
|
KNOWN OR DISCOVERABLE. WHERE DISCLAIMERS OF WARRANTIES ARE NOT
|
||||||
|
ALLOWED IN FULL OR IN PART, THIS DISCLAIMER MAY NOT APPLY TO YOU.
|
||||||
|
|
||||||
|
b. TO THE EXTENT POSSIBLE, IN NO EVENT WILL THE LICENSOR BE LIABLE
|
||||||
|
TO YOU ON ANY LEGAL THEORY (INCLUDING, WITHOUT LIMITATION,
|
||||||
|
NEGLIGENCE) OR OTHERWISE FOR ANY DIRECT, SPECIAL, INDIRECT,
|
||||||
|
INCIDENTAL, CONSEQUENTIAL, PUNITIVE, EXEMPLARY, OR OTHER LOSSES,
|
||||||
|
COSTS, EXPENSES, OR DAMAGES ARISING OUT OF THIS PUBLIC LICENSE OR
|
||||||
|
USE OF THE LICENSED MATERIAL, EVEN IF THE LICENSOR HAS BEEN
|
||||||
|
ADVISED OF THE POSSIBILITY OF SUCH LOSSES, COSTS, EXPENSES, OR
|
||||||
|
DAMAGES. WHERE A LIMITATION OF LIABILITY IS NOT ALLOWED IN FULL OR
|
||||||
|
IN PART, THIS LIMITATION MAY NOT APPLY TO YOU.
|
||||||
|
|
||||||
|
c. The disclaimer of warranties and limitation of liability provided
|
||||||
|
above shall be interpreted in a manner that, to the extent
|
||||||
|
possible, most closely approximates an absolute disclaimer and
|
||||||
|
waiver of all liability.
|
||||||
|
|
||||||
|
|
||||||
|
Section 6 -- Term and Termination.
|
||||||
|
|
||||||
|
a. This Public License applies for the term of the Copyright and
|
||||||
|
Similar Rights licensed here. However, if You fail to comply with
|
||||||
|
this Public License, then Your rights under this Public License
|
||||||
|
terminate automatically.
|
||||||
|
|
||||||
|
b. Where Your right to use the Licensed Material has terminated under
|
||||||
|
Section 6(a), it reinstates:
|
||||||
|
|
||||||
|
1. automatically as of the date the violation is cured, provided
|
||||||
|
it is cured within 30 days of Your discovery of the
|
||||||
|
violation; or
|
||||||
|
|
||||||
|
2. upon express reinstatement by the Licensor.
|
||||||
|
|
||||||
|
For the avoidance of doubt, this Section 6(b) does not affect any
|
||||||
|
right the Licensor may have to seek remedies for Your violations
|
||||||
|
of this Public License.
|
||||||
|
|
||||||
|
c. For the avoidance of doubt, the Licensor may also offer the
|
||||||
|
Licensed Material under separate terms or conditions or stop
|
||||||
|
distributing the Licensed Material at any time; however, doing so
|
||||||
|
will not terminate this Public License.
|
||||||
|
|
||||||
|
d. Sections 1, 5, 6, 7, and 8 survive termination of this Public
|
||||||
|
License.
|
||||||
|
|
||||||
|
|
||||||
|
Section 7 -- Other Terms and Conditions.
|
||||||
|
|
||||||
|
a. The Licensor shall not be bound by any additional or different
|
||||||
|
terms or conditions communicated by You unless expressly agreed.
|
||||||
|
|
||||||
|
b. Any arrangements, understandings, or agreements regarding the
|
||||||
|
Licensed Material not stated herein are separate from and
|
||||||
|
independent of the terms and conditions of this Public License.
|
||||||
|
|
||||||
|
|
||||||
|
Section 8 -- Interpretation.
|
||||||
|
|
||||||
|
a. For the avoidance of doubt, this Public License does not, and
|
||||||
|
shall not be interpreted to, reduce, limit, restrict, or impose
|
||||||
|
conditions on any use of the Licensed Material that could lawfully
|
||||||
|
be made without permission under this Public License.
|
||||||
|
|
||||||
|
b. To the extent possible, if any provision of this Public License is
|
||||||
|
deemed unenforceable, it shall be automatically reformed to the
|
||||||
|
minimum extent necessary to make it enforceable. If the provision
|
||||||
|
cannot be reformed, it shall be severed from this Public License
|
||||||
|
without affecting the enforceability of the remaining terms and
|
||||||
|
conditions.
|
||||||
|
|
||||||
|
c. No term or condition of this Public License will be waived and no
|
||||||
|
failure to comply consented to unless expressly agreed to by the
|
||||||
|
Licensor.
|
||||||
|
|
||||||
|
d. Nothing in this Public License constitutes or may be interpreted
|
||||||
|
as a limitation upon, or waiver of, any privileges and immunities
|
||||||
|
that apply to the Licensor or You, including from the legal
|
||||||
|
processes of any jurisdiction or authority.
|
||||||
|
|
||||||
|
=======================================================================
|
||||||
|
|
||||||
|
Creative Commons is not a party to its public
|
||||||
|
licenses. Notwithstanding, Creative Commons may elect to apply one of
|
||||||
|
its public licenses to material it publishes and in those instances
|
||||||
|
will be considered the “Licensor.” The text of the Creative Commons
|
||||||
|
public licenses is dedicated to the public domain under the CC0 Public
|
||||||
|
Domain Dedication. Except for the limited purpose of indicating that
|
||||||
|
material is shared under a Creative Commons public license or as
|
||||||
|
otherwise permitted by the Creative Commons policies published at
|
||||||
|
creativecommons.org/policies, Creative Commons does not authorize the
|
||||||
|
use of the trademark "Creative Commons" or any other trademark or logo
|
||||||
|
of Creative Commons without its prior written consent including,
|
||||||
|
without limitation, in connection with any unauthorized modifications
|
||||||
|
to any of its public licenses or any other arrangements,
|
||||||
|
understandings, or agreements concerning use of licensed material. For
|
||||||
|
the avoidance of doubt, this paragraph does not form part of the
|
||||||
|
public licenses.
|
||||||
|
|
||||||
|
Creative Commons may be contacted at creativecommons.org.
|
||||||
|
|
427
CC-BY-SA-4.0
Normal file
427
CC-BY-SA-4.0
Normal file
|
@ -0,0 +1,427 @@
|
||||||
|
Attribution-ShareAlike 4.0 International
|
||||||
|
|
||||||
|
=======================================================================
|
||||||
|
|
||||||
|
Creative Commons Corporation ("Creative Commons") is not a law firm and
|
||||||
|
does not provide legal services or legal advice. Distribution of
|
||||||
|
Creative Commons public licenses does not create a lawyer-client or
|
||||||
|
other relationship. Creative Commons makes its licenses and related
|
||||||
|
information available on an "as-is" basis. Creative Commons gives no
|
||||||
|
warranties regarding its licenses, any material licensed under their
|
||||||
|
terms and conditions, or any related information. Creative Commons
|
||||||
|
disclaims all liability for damages resulting from their use to the
|
||||||
|
fullest extent possible.
|
||||||
|
|
||||||
|
Using Creative Commons Public Licenses
|
||||||
|
|
||||||
|
Creative Commons public licenses provide a standard set of terms and
|
||||||
|
conditions that creators and other rights holders may use to share
|
||||||
|
original works of authorship and other material subject to copyright
|
||||||
|
and certain other rights specified in the public license below. The
|
||||||
|
following considerations are for informational purposes only, are not
|
||||||
|
exhaustive, and do not form part of our licenses.
|
||||||
|
|
||||||
|
Considerations for licensors: Our public licenses are
|
||||||
|
intended for use by those authorized to give the public
|
||||||
|
permission to use material in ways otherwise restricted by
|
||||||
|
copyright and certain other rights. Our licenses are
|
||||||
|
irrevocable. Licensors should read and understand the terms
|
||||||
|
and conditions of the license they choose before applying it.
|
||||||
|
Licensors should also secure all rights necessary before
|
||||||
|
applying our licenses so that the public can reuse the
|
||||||
|
material as expected. Licensors should clearly mark any
|
||||||
|
material not subject to the license. This includes other CC-
|
||||||
|
licensed material, or material used under an exception or
|
||||||
|
limitation to copyright. More considerations for licensors:
|
||||||
|
wiki.creativecommons.org/Considerations_for_licensors
|
||||||
|
|
||||||
|
Considerations for the public: By using one of our public
|
||||||
|
licenses, a licensor grants the public permission to use the
|
||||||
|
licensed material under specified terms and conditions. If
|
||||||
|
the licensor's permission is not necessary for any reason--for
|
||||||
|
example, because of any applicable exception or limitation to
|
||||||
|
copyright--then that use is not regulated by the license. Our
|
||||||
|
licenses grant only permissions under copyright and certain
|
||||||
|
other rights that a licensor has authority to grant. Use of
|
||||||
|
the licensed material may still be restricted for other
|
||||||
|
reasons, including because others have copyright or other
|
||||||
|
rights in the material. A licensor may make special requests,
|
||||||
|
such as asking that all changes be marked or described.
|
||||||
|
Although not required by our licenses, you are encouraged to
|
||||||
|
respect those requests where reasonable. More considerations
|
||||||
|
for the public:
|
||||||
|
wiki.creativecommons.org/Considerations_for_licensees
|
||||||
|
|
||||||
|
=======================================================================
|
||||||
|
|
||||||
|
Creative Commons Attribution-ShareAlike 4.0 International Public
|
||||||
|
License
|
||||||
|
|
||||||
|
By exercising the Licensed Rights (defined below), You accept and agree
|
||||||
|
to be bound by the terms and conditions of this Creative Commons
|
||||||
|
Attribution-ShareAlike 4.0 International Public License ("Public
|
||||||
|
License"). To the extent this Public License may be interpreted as a
|
||||||
|
contract, You are granted the Licensed Rights in consideration of Your
|
||||||
|
acceptance of these terms and conditions, and the Licensor grants You
|
||||||
|
such rights in consideration of benefits the Licensor receives from
|
||||||
|
making the Licensed Material available under these terms and
|
||||||
|
conditions.
|
||||||
|
|
||||||
|
|
||||||
|
Section 1 -- Definitions.
|
||||||
|
|
||||||
|
a. Adapted Material means material subject to Copyright and Similar
|
||||||
|
Rights that is derived from or based upon the Licensed Material
|
||||||
|
and in which the Licensed Material is translated, altered,
|
||||||
|
arranged, transformed, or otherwise modified in a manner requiring
|
||||||
|
permission under the Copyright and Similar Rights held by the
|
||||||
|
Licensor. For purposes of this Public License, where the Licensed
|
||||||
|
Material is a musical work, performance, or sound recording,
|
||||||
|
Adapted Material is always produced where the Licensed Material is
|
||||||
|
synched in timed relation with a moving image.
|
||||||
|
|
||||||
|
b. Adapter's License means the license You apply to Your Copyright
|
||||||
|
and Similar Rights in Your contributions to Adapted Material in
|
||||||
|
accordance with the terms and conditions of this Public License.
|
||||||
|
|
||||||
|
c. BY-SA Compatible License means a license listed at
|
||||||
|
creativecommons.org/compatiblelicenses, approved by Creative
|
||||||
|
Commons as essentially the equivalent of this Public License.
|
||||||
|
|
||||||
|
d. Copyright and Similar Rights means copyright and/or similar rights
|
||||||
|
closely related to copyright including, without limitation,
|
||||||
|
performance, broadcast, sound recording, and Sui Generis Database
|
||||||
|
Rights, without regard to how the rights are labeled or
|
||||||
|
categorized. For purposes of this Public License, the rights
|
||||||
|
specified in Section 2(b)(1)-(2) are not Copyright and Similar
|
||||||
|
Rights.
|
||||||
|
|
||||||
|
e. Effective Technological Measures means those measures that, in the
|
||||||
|
absence of proper authority, may not be circumvented under laws
|
||||||
|
fulfilling obligations under Article 11 of the WIPO Copyright
|
||||||
|
Treaty adopted on December 20, 1996, and/or similar international
|
||||||
|
agreements.
|
||||||
|
|
||||||
|
f. Exceptions and Limitations means fair use, fair dealing, and/or
|
||||||
|
any other exception or limitation to Copyright and Similar Rights
|
||||||
|
that applies to Your use of the Licensed Material.
|
||||||
|
|
||||||
|
g. License Elements means the license attributes listed in the name
|
||||||
|
of a Creative Commons Public License. The License Elements of this
|
||||||
|
Public License are Attribution and ShareAlike.
|
||||||
|
|
||||||
|
h. Licensed Material means the artistic or literary work, database,
|
||||||
|
or other material to which the Licensor applied this Public
|
||||||
|
License.
|
||||||
|
|
||||||
|
i. Licensed Rights means the rights granted to You subject to the
|
||||||
|
terms and conditions of this Public License, which are limited to
|
||||||
|
all Copyright and Similar Rights that apply to Your use of the
|
||||||
|
Licensed Material and that the Licensor has authority to license.
|
||||||
|
|
||||||
|
j. Licensor means the individual(s) or entity(ies) granting rights
|
||||||
|
under this Public License.
|
||||||
|
|
||||||
|
k. Share means to provide material to the public by any means or
|
||||||
|
process that requires permission under the Licensed Rights, such
|
||||||
|
as reproduction, public display, public performance, distribution,
|
||||||
|
dissemination, communication, or importation, and to make material
|
||||||
|
available to the public including in ways that members of the
|
||||||
|
public may access the material from a place and at a time
|
||||||
|
individually chosen by them.
|
||||||
|
|
||||||
|
l. Sui Generis Database Rights means rights other than copyright
|
||||||
|
resulting from Directive 96/9/EC of the European Parliament and of
|
||||||
|
the Council of 11 March 1996 on the legal protection of databases,
|
||||||
|
as amended and/or succeeded, as well as other essentially
|
||||||
|
equivalent rights anywhere in the world.
|
||||||
|
|
||||||
|
m. You means the individual or entity exercising the Licensed Rights
|
||||||
|
under this Public License. Your has a corresponding meaning.
|
||||||
|
|
||||||
|
|
||||||
|
Section 2 -- Scope.
|
||||||
|
|
||||||
|
a. License grant.
|
||||||
|
|
||||||
|
1. Subject to the terms and conditions of this Public License,
|
||||||
|
the Licensor hereby grants You a worldwide, royalty-free,
|
||||||
|
non-sublicensable, non-exclusive, irrevocable license to
|
||||||
|
exercise the Licensed Rights in the Licensed Material to:
|
||||||
|
|
||||||
|
a. reproduce and Share the Licensed Material, in whole or
|
||||||
|
in part; and
|
||||||
|
|
||||||
|
b. produce, reproduce, and Share Adapted Material.
|
||||||
|
|
||||||
|
2. Exceptions and Limitations. For the avoidance of doubt, where
|
||||||
|
Exceptions and Limitations apply to Your use, this Public
|
||||||
|
License does not apply, and You do not need to comply with
|
||||||
|
its terms and conditions.
|
||||||
|
|
||||||
|
3. Term. The term of this Public License is specified in Section
|
||||||
|
6(a).
|
||||||
|
|
||||||
|
4. Media and formats; technical modifications allowed. The
|
||||||
|
Licensor authorizes You to exercise the Licensed Rights in
|
||||||
|
all media and formats whether now known or hereafter created,
|
||||||
|
and to make technical modifications necessary to do so. The
|
||||||
|
Licensor waives and/or agrees not to assert any right or
|
||||||
|
authority to forbid You from making technical modifications
|
||||||
|
necessary to exercise the Licensed Rights, including
|
||||||
|
technical modifications necessary to circumvent Effective
|
||||||
|
Technological Measures. For purposes of this Public License,
|
||||||
|
simply making modifications authorized by this Section 2(a)
|
||||||
|
(4) never produces Adapted Material.
|
||||||
|
|
||||||
|
5. Downstream recipients.
|
||||||
|
|
||||||
|
a. Offer from the Licensor -- Licensed Material. Every
|
||||||
|
recipient of the Licensed Material automatically
|
||||||
|
receives an offer from the Licensor to exercise the
|
||||||
|
Licensed Rights under the terms and conditions of this
|
||||||
|
Public License.
|
||||||
|
|
||||||
|
b. Additional offer from the Licensor -- Adapted Material.
|
||||||
|
Every recipient of Adapted Material from You
|
||||||
|
automatically receives an offer from the Licensor to
|
||||||
|
exercise the Licensed Rights in the Adapted Material
|
||||||
|
under the conditions of the Adapter's License You apply.
|
||||||
|
|
||||||
|
c. No downstream restrictions. You may not offer or impose
|
||||||
|
any additional or different terms or conditions on, or
|
||||||
|
apply any Effective Technological Measures to, the
|
||||||
|
Licensed Material if doing so restricts exercise of the
|
||||||
|
Licensed Rights by any recipient of the Licensed
|
||||||
|
Material.
|
||||||
|
|
||||||
|
6. No endorsement. Nothing in this Public License constitutes or
|
||||||
|
may be construed as permission to assert or imply that You
|
||||||
|
are, or that Your use of the Licensed Material is, connected
|
||||||
|
with, or sponsored, endorsed, or granted official status by,
|
||||||
|
the Licensor or others designated to receive attribution as
|
||||||
|
provided in Section 3(a)(1)(A)(i).
|
||||||
|
|
||||||
|
b. Other rights.
|
||||||
|
|
||||||
|
1. Moral rights, such as the right of integrity, are not
|
||||||
|
licensed under this Public License, nor are publicity,
|
||||||
|
privacy, and/or other similar personality rights; however, to
|
||||||
|
the extent possible, the Licensor waives and/or agrees not to
|
||||||
|
assert any such rights held by the Licensor to the limited
|
||||||
|
extent necessary to allow You to exercise the Licensed
|
||||||
|
Rights, but not otherwise.
|
||||||
|
|
||||||
|
2. Patent and trademark rights are not licensed under this
|
||||||
|
Public License.
|
||||||
|
|
||||||
|
3. To the extent possible, the Licensor waives any right to
|
||||||
|
collect royalties from You for the exercise of the Licensed
|
||||||
|
Rights, whether directly or through a collecting society
|
||||||
|
under any voluntary or waivable statutory or compulsory
|
||||||
|
licensing scheme. In all other cases the Licensor expressly
|
||||||
|
reserves any right to collect such royalties.
|
||||||
|
|
||||||
|
|
||||||
|
Section 3 -- License Conditions.
|
||||||
|
|
||||||
|
Your exercise of the Licensed Rights is expressly made subject to the
|
||||||
|
following conditions.
|
||||||
|
|
||||||
|
a. Attribution.
|
||||||
|
|
||||||
|
1. If You Share the Licensed Material (including in modified
|
||||||
|
form), You must:
|
||||||
|
|
||||||
|
a. retain the following if it is supplied by the Licensor
|
||||||
|
with the Licensed Material:
|
||||||
|
|
||||||
|
i. identification of the creator(s) of the Licensed
|
||||||
|
Material and any others designated to receive
|
||||||
|
attribution, in any reasonable manner requested by
|
||||||
|
the Licensor (including by pseudonym if
|
||||||
|
designated);
|
||||||
|
|
||||||
|
ii. a copyright notice;
|
||||||
|
|
||||||
|
iii. a notice that refers to this Public License;
|
||||||
|
|
||||||
|
iv. a notice that refers to the disclaimer of
|
||||||
|
warranties;
|
||||||
|
|
||||||
|
v. a URI or hyperlink to the Licensed Material to the
|
||||||
|
extent reasonably practicable;
|
||||||
|
|
||||||
|
b. indicate if You modified the Licensed Material and
|
||||||
|
retain an indication of any previous modifications; and
|
||||||
|
|
||||||
|
c. indicate the Licensed Material is licensed under this
|
||||||
|
Public License, and include the text of, or the URI or
|
||||||
|
hyperlink to, this Public License.
|
||||||
|
|
||||||
|
2. You may satisfy the conditions in Section 3(a)(1) in any
|
||||||
|
reasonable manner based on the medium, means, and context in
|
||||||
|
which You Share the Licensed Material. For example, it may be
|
||||||
|
reasonable to satisfy the conditions by providing a URI or
|
||||||
|
hyperlink to a resource that includes the required
|
||||||
|
information.
|
||||||
|
|
||||||
|
3. If requested by the Licensor, You must remove any of the
|
||||||
|
information required by Section 3(a)(1)(A) to the extent
|
||||||
|
reasonably practicable.
|
||||||
|
|
||||||
|
b. ShareAlike.
|
||||||
|
|
||||||
|
In addition to the conditions in Section 3(a), if You Share
|
||||||
|
Adapted Material You produce, the following conditions also apply.
|
||||||
|
|
||||||
|
1. The Adapter's License You apply must be a Creative Commons
|
||||||
|
license with the same License Elements, this version or
|
||||||
|
later, or a BY-SA Compatible License.
|
||||||
|
|
||||||
|
2. You must include the text of, or the URI or hyperlink to, the
|
||||||
|
Adapter's License You apply. You may satisfy this condition
|
||||||
|
in any reasonable manner based on the medium, means, and
|
||||||
|
context in which You Share Adapted Material.
|
||||||
|
|
||||||
|
3. You may not offer or impose any additional or different terms
|
||||||
|
or conditions on, or apply any Effective Technological
|
||||||
|
Measures to, Adapted Material that restrict exercise of the
|
||||||
|
rights granted under the Adapter's License You apply.
|
||||||
|
|
||||||
|
|
||||||
|
Section 4 -- Sui Generis Database Rights.
|
||||||
|
|
||||||
|
Where the Licensed Rights include Sui Generis Database Rights that
|
||||||
|
apply to Your use of the Licensed Material:
|
||||||
|
|
||||||
|
a. for the avoidance of doubt, Section 2(a)(1) grants You the right
|
||||||
|
to extract, reuse, reproduce, and Share all or a substantial
|
||||||
|
portion of the contents of the database;
|
||||||
|
|
||||||
|
b. if You include all or a substantial portion of the database
|
||||||
|
contents in a database in which You have Sui Generis Database
|
||||||
|
Rights, then the database in which You have Sui Generis Database
|
||||||
|
Rights (but not its individual contents) is Adapted Material,
|
||||||
|
including for purposes of Section 3(b); and
|
||||||
|
|
||||||
|
c. You must comply with the conditions in Section 3(a) if You Share
|
||||||
|
all or a substantial portion of the contents of the database.
|
||||||
|
|
||||||
|
For the avoidance of doubt, this Section 4 supplements and does not
|
||||||
|
replace Your obligations under this Public License where the Licensed
|
||||||
|
Rights include other Copyright and Similar Rights.
|
||||||
|
|
||||||
|
|
||||||
|
Section 5 -- Disclaimer of Warranties and Limitation of Liability.
|
||||||
|
|
||||||
|
a. UNLESS OTHERWISE SEPARATELY UNDERTAKEN BY THE LICENSOR, TO THE
|
||||||
|
EXTENT POSSIBLE, THE LICENSOR OFFERS THE LICENSED MATERIAL AS-IS
|
||||||
|
AND AS-AVAILABLE, AND MAKES NO REPRESENTATIONS OR WARRANTIES OF
|
||||||
|
ANY KIND CONCERNING THE LICENSED MATERIAL, WHETHER EXPRESS,
|
||||||
|
IMPLIED, STATUTORY, OR OTHER. THIS INCLUDES, WITHOUT LIMITATION,
|
||||||
|
WARRANTIES OF TITLE, MERCHANTABILITY, FITNESS FOR A PARTICULAR
|
||||||
|
PURPOSE, NON-INFRINGEMENT, ABSENCE OF LATENT OR OTHER DEFECTS,
|
||||||
|
ACCURACY, OR THE PRESENCE OR ABSENCE OF ERRORS, WHETHER OR NOT
|
||||||
|
KNOWN OR DISCOVERABLE. WHERE DISCLAIMERS OF WARRANTIES ARE NOT
|
||||||
|
ALLOWED IN FULL OR IN PART, THIS DISCLAIMER MAY NOT APPLY TO YOU.
|
||||||
|
|
||||||
|
b. TO THE EXTENT POSSIBLE, IN NO EVENT WILL THE LICENSOR BE LIABLE
|
||||||
|
TO YOU ON ANY LEGAL THEORY (INCLUDING, WITHOUT LIMITATION,
|
||||||
|
NEGLIGENCE) OR OTHERWISE FOR ANY DIRECT, SPECIAL, INDIRECT,
|
||||||
|
INCIDENTAL, CONSEQUENTIAL, PUNITIVE, EXEMPLARY, OR OTHER LOSSES,
|
||||||
|
COSTS, EXPENSES, OR DAMAGES ARISING OUT OF THIS PUBLIC LICENSE OR
|
||||||
|
USE OF THE LICENSED MATERIAL, EVEN IF THE LICENSOR HAS BEEN
|
||||||
|
ADVISED OF THE POSSIBILITY OF SUCH LOSSES, COSTS, EXPENSES, OR
|
||||||
|
DAMAGES. WHERE A LIMITATION OF LIABILITY IS NOT ALLOWED IN FULL OR
|
||||||
|
IN PART, THIS LIMITATION MAY NOT APPLY TO YOU.
|
||||||
|
|
||||||
|
c. The disclaimer of warranties and limitation of liability provided
|
||||||
|
above shall be interpreted in a manner that, to the extent
|
||||||
|
possible, most closely approximates an absolute disclaimer and
|
||||||
|
waiver of all liability.
|
||||||
|
|
||||||
|
|
||||||
|
Section 6 -- Term and Termination.
|
||||||
|
|
||||||
|
a. This Public License applies for the term of the Copyright and
|
||||||
|
Similar Rights licensed here. However, if You fail to comply with
|
||||||
|
this Public License, then Your rights under this Public License
|
||||||
|
terminate automatically.
|
||||||
|
|
||||||
|
b. Where Your right to use the Licensed Material has terminated under
|
||||||
|
Section 6(a), it reinstates:
|
||||||
|
|
||||||
|
1. automatically as of the date the violation is cured, provided
|
||||||
|
it is cured within 30 days of Your discovery of the
|
||||||
|
violation; or
|
||||||
|
|
||||||
|
2. upon express reinstatement by the Licensor.
|
||||||
|
|
||||||
|
For the avoidance of doubt, this Section 6(b) does not affect any
|
||||||
|
right the Licensor may have to seek remedies for Your violations
|
||||||
|
of this Public License.
|
||||||
|
|
||||||
|
c. For the avoidance of doubt, the Licensor may also offer the
|
||||||
|
Licensed Material under separate terms or conditions or stop
|
||||||
|
distributing the Licensed Material at any time; however, doing so
|
||||||
|
will not terminate this Public License.
|
||||||
|
|
||||||
|
d. Sections 1, 5, 6, 7, and 8 survive termination of this Public
|
||||||
|
License.
|
||||||
|
|
||||||
|
|
||||||
|
Section 7 -- Other Terms and Conditions.
|
||||||
|
|
||||||
|
a. The Licensor shall not be bound by any additional or different
|
||||||
|
terms or conditions communicated by You unless expressly agreed.
|
||||||
|
|
||||||
|
b. Any arrangements, understandings, or agreements regarding the
|
||||||
|
Licensed Material not stated herein are separate from and
|
||||||
|
independent of the terms and conditions of this Public License.
|
||||||
|
|
||||||
|
|
||||||
|
Section 8 -- Interpretation.
|
||||||
|
|
||||||
|
a. For the avoidance of doubt, this Public License does not, and
|
||||||
|
shall not be interpreted to, reduce, limit, restrict, or impose
|
||||||
|
conditions on any use of the Licensed Material that could lawfully
|
||||||
|
be made without permission under this Public License.
|
||||||
|
|
||||||
|
b. To the extent possible, if any provision of this Public License is
|
||||||
|
deemed unenforceable, it shall be automatically reformed to the
|
||||||
|
minimum extent necessary to make it enforceable. If the provision
|
||||||
|
cannot be reformed, it shall be severed from this Public License
|
||||||
|
without affecting the enforceability of the remaining terms and
|
||||||
|
conditions.
|
||||||
|
|
||||||
|
c. No term or condition of this Public License will be waived and no
|
||||||
|
failure to comply consented to unless expressly agreed to by the
|
||||||
|
Licensor.
|
||||||
|
|
||||||
|
d. Nothing in this Public License constitutes or may be interpreted
|
||||||
|
as a limitation upon, or waiver of, any privileges and immunities
|
||||||
|
that apply to the Licensor or You, including from the legal
|
||||||
|
processes of any jurisdiction or authority.
|
||||||
|
|
||||||
|
|
||||||
|
=======================================================================
|
||||||
|
|
||||||
|
Creative Commons is not a party to its public
|
||||||
|
licenses. Notwithstanding, Creative Commons may elect to apply one of
|
||||||
|
its public licenses to material it publishes and in those instances
|
||||||
|
will be considered the “Licensor.” The text of the Creative Commons
|
||||||
|
public licenses is dedicated to the public domain under the CC0 Public
|
||||||
|
Domain Dedication. Except for the limited purpose of indicating that
|
||||||
|
material is shared under a Creative Commons public license or as
|
||||||
|
otherwise permitted by the Creative Commons policies published at
|
||||||
|
creativecommons.org/policies, Creative Commons does not authorize the
|
||||||
|
use of the trademark "Creative Commons" or any other trademark or logo
|
||||||
|
of Creative Commons without its prior written consent including,
|
||||||
|
without limitation, in connection with any unauthorized modifications
|
||||||
|
to any of its public licenses or any other arrangements,
|
||||||
|
understandings, or agreements concerning use of licensed material. For
|
||||||
|
the avoidance of doubt, this paragraph does not form part of the
|
||||||
|
public licenses.
|
||||||
|
|
||||||
|
Creative Commons may be contacted at creativecommons.org.
|
112
CHANGELOG.md
Normal file
112
CHANGELOG.md
Normal file
|
@ -0,0 +1,112 @@
|
||||||
|
# Changelog
|
||||||
|
All notable changes to this project will be documented in this file.
|
||||||
|
|
||||||
|
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
|
||||||
|
|
||||||
|
## [unreleased]
|
||||||
|
### Added
|
||||||
|
- LDAP authentication
|
||||||
|
- External OAuth provider authentication
|
||||||
|
- A [job queue](https://git.pleroma.social/pleroma/pleroma_job_queue) for federation, emails, web push, etc.
|
||||||
|
- [Prometheus](https://prometheus.io/) metrics
|
||||||
|
- Support for Mastodon's remote interaction
|
||||||
|
- Mix Tasks: `mix pleroma.database remove_embedded_objects`
|
||||||
|
- Federation: Support for reports
|
||||||
|
- Configuration: `safe_dm_mentions` option
|
||||||
|
- Configuration: `link_name` option
|
||||||
|
- Configuration: `fetch_initial_posts` option
|
||||||
|
- Configuration: `notify_email` option
|
||||||
|
- Configuration: Media proxy `whitelist` option
|
||||||
|
- Pleroma API: User subscriptions
|
||||||
|
- Pleroma API: Healthcheck endpoint
|
||||||
|
- Admin API: Endpoints for listing/revoking invite tokens
|
||||||
|
- Admin API: Endpoints for making users follow/unfollow each other
|
||||||
|
- Mastodon API: [Scheduled statuses](https://docs.joinmastodon.org/api/rest/scheduled-statuses/)
|
||||||
|
- Mastodon API: `/api/v1/notifications/destroy_multiple` (glitch-soc extension)
|
||||||
|
- Mastodon API: `/api/v1/pleroma/accounts/:id/favourites` (API extension)
|
||||||
|
- Mastodon API: [Reports](https://docs.joinmastodon.org/api/rest/reports/)
|
||||||
|
- ActivityPub C2S: OAuth endpoints
|
||||||
|
- Metadata RelMe provider
|
||||||
|
- Emoji packs and emoji pack manager
|
||||||
|
|
||||||
|
### Changed
|
||||||
|
- **Breaking:** Configuration: move from Pleroma.Mailer to Pleroma.Emails.Mailer
|
||||||
|
- Enforcement of OAuth scopes
|
||||||
|
- Add multiple use/time expiring invite token
|
||||||
|
- Restyled OAuth pages to fit with Pleroma's default theme
|
||||||
|
- Link/mention/hashtag detection is now handled by [auto_linker](https://git.pleroma.social/pleroma/auto_linker)
|
||||||
|
- NodeInfo: Return `safe_dm_mentions` feature flag
|
||||||
|
- Federation: Expand the audience of delete activities to all recipients of the deleted object
|
||||||
|
- Federation: Removed `inReplyToStatusId` from objects
|
||||||
|
- Configuration: Dedupe enabled by default
|
||||||
|
- Configuration: Added `extra_cookie_attrs` for setting non-standard cookie attributes. Defaults to ["SameSite=Lax"] so that remote follows work.
|
||||||
|
- Pleroma API: Support for emoji tags in `/api/pleroma/emoji` resulting in a breaking API change
|
||||||
|
- Timelines: Messages involving people you have blocked will be excluded from the timeline in all cases instead of just repeats.
|
||||||
|
- Mastodon API: Support for `exclude_types`, `limit` and `min_id` in `/api/v1/notifications`
|
||||||
|
- Mastodon API: Add `languages` and `registrations` to `/api/v1/instance`
|
||||||
|
- Mastodon API: Provide plaintext versions of cw/content in the Status entity
|
||||||
|
- Mastodon API: Add `pleroma.conversation_id`, `pleroma.in_reply_to_account_acct` fields to the Status entity
|
||||||
|
- Mastodon API: Add `pleroma.tags`, `pleroma.relationship{}`, `pleroma.is_moderator`, `pleroma.is_admin`, `pleroma.confirmation_pending`, `pleroma.hide_followers`, `pleroma.hide_follows`, `pleroma.hide_favorites` fields to the User entity
|
||||||
|
- Mastodon API: Add `pleroma.show_role`, `pleroma.no_rich_text` fields to the Source subentity
|
||||||
|
- Mastodon API: Add support for updating `no_rich_text`, `hide_followers`, `hide_follows`, `hide_favorites`, `show_role` in `PATCH /api/v1/update_credentials`
|
||||||
|
- Mastodon API: Add `pleroma.is_seen` to the Notification entity
|
||||||
|
- Mastodon API: Add `pleroma.local` to the Status entity
|
||||||
|
- Mastodon API: Add `preview` parameter to `POST /api/v1/statuses`
|
||||||
|
- Mastodon API: Add `with_muted` parameter to timeline endpoints
|
||||||
|
- Mastodon API: Actual reblog hiding instead of a dummy
|
||||||
|
- Mastodon API: Remove attachment limit in the Status entity
|
||||||
|
- Deps: Updated Cowboy to 2.6
|
||||||
|
- Deps: Updated Ecto to 3.0.7
|
||||||
|
- Don't ship finmoji by default, they can be installed as an emoji pack
|
||||||
|
- Mastodon API: Added support max_id & since_id for bookmark timeline endpoints.
|
||||||
|
|
||||||
|
### Fixed
|
||||||
|
- Followers counter not being updated when a follower is blocked
|
||||||
|
- Deactivated users being able to request an access token
|
||||||
|
- Limit on request body in rich media/relme parsers being ignored resulting in a possible memory leak
|
||||||
|
- proper Twitter Card generation instead of a dummy
|
||||||
|
- NodeInfo: Include admins in `staffAccounts`
|
||||||
|
- ActivityPub: Crashing when requesting empty local user's outbox
|
||||||
|
- Federation: Handling of objects without `summary` property
|
||||||
|
- Federation: Add a language tag to activities as required by ActivityStreams 2.0
|
||||||
|
- Federation: Do not federate avatar/banner if set to default allowing other servers/clients to use their defaults
|
||||||
|
- Federation: Cope with missing or explicitly nulled address lists
|
||||||
|
- Federation: Explicitly ensure activities addressed to `as:Public` become addressed to the followers collection
|
||||||
|
- Federation: Better cope with actors which do not declare a followers collection and use `as:Public` with these semantics
|
||||||
|
- Federation: Follow requests from remote users who have been blocked will be automatically rejected if appropriate
|
||||||
|
- MediaProxy: Parse name from content disposition headers even for non-whitelisted types
|
||||||
|
- MediaProxy: S3 link encoding
|
||||||
|
- Rich Media: Reject any data which cannot be explicitly encoded into JSON
|
||||||
|
- Pleroma API: Importing follows from Mastodon 2.8+
|
||||||
|
- Twitter API: Exposing default scope, `no_rich_text` of the user to anyone
|
||||||
|
- Twitter API: Returning the `role` object in user entity despite `show_role = false`
|
||||||
|
- Mastodon API: `/api/v1/favourites` serving only public activities
|
||||||
|
- Mastodon API: Reblogs having `in_reply_to_id` - `null` even when they are replies
|
||||||
|
- Mastodon API: Streaming API broadcasting wrong activity id
|
||||||
|
- Mastodon API: 500 errors when requesting a card for a private conversation
|
||||||
|
- Mastodon API: Handling of `reblogs` in `/api/v1/accounts/:id/follow`
|
||||||
|
- Mastodon API: Correct `reblogged`, `favourited`, and `bookmarked` values in the reblog status JSON
|
||||||
|
- Mastodon API: Exposing default scope of the user to anyone
|
||||||
|
|
||||||
|
## [0.9.9999] - 2019-04-05
|
||||||
|
### Security
|
||||||
|
- Mastodon API: Fix content warnings skipping HTML sanitization
|
||||||
|
|
||||||
|
## [0.9.999] - 2019-03-13
|
||||||
|
Frontend changes only.
|
||||||
|
### Added
|
||||||
|
- Added floating action button for posting status on mobile
|
||||||
|
### Changed
|
||||||
|
- Changed user-settings icon to a pencil
|
||||||
|
### Fixed
|
||||||
|
- Keyboard shortcuts activating when typing a message
|
||||||
|
- Gaps when scrolling down on a timeline after showing new
|
||||||
|
|
||||||
|
## [0.9.99] - 2019-03-08
|
||||||
|
### Changed
|
||||||
|
- Update the frontend to the 0.9.99 tag
|
||||||
|
### Fixed
|
||||||
|
- Sign the date header in federation to fix Mastodon federation.
|
||||||
|
|
||||||
|
## [0.9.9] - 2019-02-22
|
||||||
|
This is our first stable release.
|
41
COPYING
Normal file
41
COPYING
Normal file
|
@ -0,0 +1,41 @@
|
||||||
|
Unless otherwise stated this repository is copyright © 2017-2019
|
||||||
|
Pleroma Authors <https://pleroma.social/>, and is distributed under
|
||||||
|
The GNU Affero General Public License Version 3, you should have received a
|
||||||
|
copy of the license file as AGPL-3.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
The following files are copyright © 2019 shitposter.club, and are distributed
|
||||||
|
under the Creative Commons Attribution-ShareAlike 4.0 International license,
|
||||||
|
you should have received a copy of the license file as CC-BY-SA-4.0.
|
||||||
|
|
||||||
|
priv/static/images/pleroma-fox-tan.png
|
||||||
|
priv/static/images/pleroma-fox-tan-smol.png
|
||||||
|
priv/static/images/pleroma-tan.png
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
The following files are copyright © 2017-2019 Pleroma Authors
|
||||||
|
<https://pleroma.social/>, and are distributed under the Creative Commons
|
||||||
|
Attribution-ShareAlike 4.0 International license, you should have received
|
||||||
|
a copy of the license file as CC-BY-SA-4.0.
|
||||||
|
|
||||||
|
priv/static/images/avi.png
|
||||||
|
priv/static/images/banner.png
|
||||||
|
priv/static/instance/thumbnail.jpeg
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
All photos published on Unsplash can be used for free. You can use them for
|
||||||
|
commercial and noncommercial purposes. You do not need to ask permission from
|
||||||
|
or provide credit to the photographer or Unsplash, although it is appreciated
|
||||||
|
when possible.
|
||||||
|
|
||||||
|
More precisely, Unsplash grants you an irrevocable, nonexclusive, worldwide
|
||||||
|
copyright license to download, copy, modify, distribute, perform, and use
|
||||||
|
photos from Unsplash for free, including for commercial purposes, without
|
||||||
|
permission from or attributing the photographer or Unsplash. This license
|
||||||
|
does not include the right to compile photos from Unsplash to replicate
|
||||||
|
a similar or competing service.
|
||||||
|
|
||||||
|
priv/static/images/city.jpg
|
10
README.md
10
README.md
|
@ -1,14 +1,16 @@
|
||||||
# Pleroma
|
# Pleroma
|
||||||
|
|
||||||
|
**Note**: This readme as well as complete documentation is also available at <https://docs-develop.pleroma.social>
|
||||||
|
|
||||||
## About Pleroma
|
## About Pleroma
|
||||||
|
|
||||||
Pleroma is a microblogging server software that can federate (= exchange messages with) other servers that support the same federation standards (OStatus and ActivityPub). What that means is that you can host a server for yourself or your friends and stay in control of your online identity, but still exchange messages with people on larger servers. Pleroma will federate with all servers that implement either OStatus or ActivityPub, like Friendica, GNU Social, Hubzilla, Mastodon, Misskey, Peertube, and Pixelfed.
|
Pleroma is a microblogging server software that can federate (= exchange messages with) other servers that support the same federation standards (OStatus and ActivityPub). What that means is that you can host a server for yourself or your friends and stay in control of your online identity, but still exchange messages with people on larger servers. Pleroma will federate with all servers that implement either OStatus or ActivityPub, like Friendica, GNU Social, Hubzilla, Mastodon, Misskey, Peertube, and Pixelfed.
|
||||||
|
|
||||||
Pleroma is written in Elixir, high-performance and can run on small devices like a Raspberry Pi.
|
Pleroma is written in Elixir, high-performance and can run on small devices like a Raspberry Pi.
|
||||||
|
|
||||||
For clients it supports both the [GNU Social API with Qvitter extensions](https://twitter-api.readthedocs.io/en/latest/index.html) and the [Mastodon client API](https://github.com/tootsuite/documentation/blob/master/Using-the-API/API.md).
|
For clients it supports both the [GNU Social API with Qvitter extensions](https://twitter-api.readthedocs.io/en/latest/index.html) and the [Mastodon client API](https://docs.joinmastodon.org/api/guidelines/).
|
||||||
|
|
||||||
- [Client Applications for Pleroma](docs/Clients.md)
|
- [Client Applications for Pleroma](https://docs-develop.pleroma.social/clients.html)
|
||||||
|
|
||||||
No release has been made yet, but several servers have been online for months already. If you want to run your own server, feel free to contact us at @lain@pleroma.soykaf.com or in our dev chat at #pleroma on freenode or via matrix at <https://matrix.heldscal.la/#/room/#freenode_#pleroma:matrix.org>.
|
No release has been made yet, but several servers have been online for months already. If you want to run your own server, feel free to contact us at @lain@pleroma.soykaf.com or in our dev chat at #pleroma on freenode or via matrix at <https://matrix.heldscal.la/#/room/#freenode_#pleroma:matrix.org>.
|
||||||
|
|
||||||
|
@ -28,7 +30,7 @@ While we don’t provide docker files, other people have written very good ones.
|
||||||
|
|
||||||
* Run `mix deps.get` to install elixir dependencies.
|
* Run `mix deps.get` to install elixir dependencies.
|
||||||
* Run `mix pleroma.instance gen`. This will ask you questions about your instance and generate a configuration file in `config/generated_config.exs`. Check that and copy it to either `config/dev.secret.exs` or `config/prod.secret.exs`. It will also create a `config/setup_db.psql`, which you should run as the PostgreSQL superuser (i.e., `sudo -u postgres psql -f config/setup_db.psql`). It will create the database, user, and password you gave `mix pleroma.gen.instance` earlier, as well as set up the necessary extensions in the database. PostgreSQL superuser privileges are only needed for this step.
|
* Run `mix pleroma.instance gen`. This will ask you questions about your instance and generate a configuration file in `config/generated_config.exs`. Check that and copy it to either `config/dev.secret.exs` or `config/prod.secret.exs`. It will also create a `config/setup_db.psql`, which you should run as the PostgreSQL superuser (i.e., `sudo -u postgres psql -f config/setup_db.psql`). It will create the database, user, and password you gave `mix pleroma.gen.instance` earlier, as well as set up the necessary extensions in the database. PostgreSQL superuser privileges are only needed for this step.
|
||||||
* For these next steps, the default will be to run pleroma using the dev configuration file, `config/dev.secret.exs`. To run them using the prod config file, prefix each command at the shell with `MIX_ENV=prod`. For example: `MIX_ENV=prod mix phx.server`. Documentation for the config can be found at [`docs/config.md`](docs/config.md)
|
* For these next steps, the default will be to run pleroma using the dev configuration file, `config/dev.secret.exs`. To run them using the prod config file, prefix each command at the shell with `MIX_ENV=prod`. For example: `MIX_ENV=prod mix phx.server`. Documentation for the config can be found at [`docs/config.md`](docs/config.md) in the repository, or at the "Configuration" page on <https://docs-develop.pleroma.social/config.html>
|
||||||
* Run `mix ecto.migrate` to run the database migrations. You will have to do this again after certain updates.
|
* Run `mix ecto.migrate` to run the database migrations. You will have to do this again after certain updates.
|
||||||
* You can check if your instance is configured correctly by running it with `mix phx.server` and checking the instance info endpoint at `/api/v1/instance`. If it shows your uri, name and email correctly, you are configured correctly. If it shows something like `localhost:4000`, your configuration is probably wrong, unless you are running a local development setup.
|
* You can check if your instance is configured correctly by running it with `mix phx.server` and checking the instance info endpoint at `/api/v1/instance`. If it shows your uri, name and email correctly, you are configured correctly. If it shows something like `localhost:4000`, your configuration is probably wrong, unless you are running a local development setup.
|
||||||
* The common and convenient way for adding HTTPS is by using Nginx as a reverse proxy. You can look at example Nginx configuration in `installation/pleroma.nginx`. If you need TLS/SSL certificates for HTTPS, you can look get some for free with letsencrypt: <https://letsencrypt.org/>. The simplest way to obtain and install a certificate is to use [Certbot.](https://certbot.eff.org) Depending on your specific setup, certbot may be able to get a certificate and configure your web server automatically.
|
* The common and convenient way for adding HTTPS is by using Nginx as a reverse proxy. You can look at example Nginx configuration in `installation/pleroma.nginx`. If you need TLS/SSL certificates for HTTPS, you can look get some for free with letsencrypt: <https://letsencrypt.org/>. The simplest way to obtain and install a certificate is to use [Certbot.](https://certbot.eff.org) Depending on your specific setup, certbot may be able to get a certificate and configure your web server automatically.
|
||||||
|
@ -66,7 +68,7 @@ This is useful for running Pleroma inside Tor or I2P.
|
||||||
|
|
||||||
## Customization and contribution
|
## Customization and contribution
|
||||||
|
|
||||||
The [Pleroma Wiki](https://git.pleroma.social/pleroma/pleroma/wikis/home) offers manuals and guides on how to further customize your instance to your liking and how you can contribute to the project.
|
The [Pleroma Documentation](https://docs-develop.pleroma.social/readme.html) offers manuals and guides on how to further customize your instance to your liking and how you can contribute to the project.
|
||||||
|
|
||||||
## Troubleshooting
|
## Troubleshooting
|
||||||
|
|
||||||
|
|
|
@ -1,3 +1,41 @@
|
||||||
|
# .i;;;;i.
|
||||||
|
# iYcviii;vXY:
|
||||||
|
# .YXi .i1c.
|
||||||
|
# .YC. . in7.
|
||||||
|
# .vc. ...... ;1c.
|
||||||
|
# i7, .. .;1;
|
||||||
|
# i7, .. ... .Y1i
|
||||||
|
# ,7v .6MMM@; .YX,
|
||||||
|
# .7;. ..IMMMMMM1 :t7.
|
||||||
|
# .;Y. ;$MMMMMM9. :tc.
|
||||||
|
# vY. .. .nMMM@MMU. ;1v.
|
||||||
|
# i7i ... .#MM@M@C. .....:71i
|
||||||
|
# it: .... $MMM@9;.,i;;;i,;tti
|
||||||
|
# :t7. ..... 0MMMWv.,iii:::,,;St.
|
||||||
|
# .nC. ..... IMMMQ..,::::::,.,czX.
|
||||||
|
# .ct: ....... .ZMMMI..,:::::::,,:76Y.
|
||||||
|
# c2: ......,i..Y$M@t..:::::::,,..inZY
|
||||||
|
# vov ......:ii..c$MBc..,,,,,,,,,,..iI9i
|
||||||
|
# i9Y ......iii:..7@MA,..,,,,,,,,,....;AA:
|
||||||
|
# iIS. ......:ii::..;@MI....,............;Ez.
|
||||||
|
# .I9. ......:i::::...8M1..................C0z.
|
||||||
|
# .z9; ......:i::::,.. .i:...................zWX.
|
||||||
|
# vbv ......,i::::,,. ................. :AQY
|
||||||
|
# c6Y. .,...,::::,,..:t0@@QY. ................ :8bi
|
||||||
|
# :6S. ..,,...,:::,,,..EMMMMMMI. ............... .;bZ,
|
||||||
|
# :6o, .,,,,..:::,,,..i#MMMMMM#v................. YW2.
|
||||||
|
# .n8i ..,,,,,,,::,,,,.. tMMMMM@C:.................. .1Wn
|
||||||
|
# 7Uc. .:::,,,,,::,,,,.. i1t;,..................... .UEi
|
||||||
|
# 7C...::::::::::::,,,,.. .................... vSi.
|
||||||
|
# ;1;...,,::::::,......... .................. Yz:
|
||||||
|
# v97,......... .voC.
|
||||||
|
# izAotX7777777777777777777777777777777777777777Y7n92:
|
||||||
|
# .;CoIIIIIUAA666666699999ZZZZZZZZZZZZZZZZZZZZ6ov.
|
||||||
|
#
|
||||||
|
# !!! ATTENTION !!!
|
||||||
|
# DO NOT EDIT THIS FILE! THIS FILE CONTAINS THE DEFAULT VALUES FOR THE CON-
|
||||||
|
# FIGURATION! EDIT YOUR SECRET FILE (either prod.secret.exs, dev.secret.exs).
|
||||||
|
#
|
||||||
# This file is responsible for configuring your application
|
# This file is responsible for configuring your application
|
||||||
# and its dependencies with the aid of the Mix.Config module.
|
# and its dependencies with the aid of the Mix.Config module.
|
||||||
#
|
#
|
||||||
|
@ -8,7 +46,9 @@
|
||||||
# General application configuration
|
# General application configuration
|
||||||
config :pleroma, ecto_repos: [Pleroma.Repo]
|
config :pleroma, ecto_repos: [Pleroma.Repo]
|
||||||
|
|
||||||
config :pleroma, Pleroma.Repo, types: Pleroma.PostgresTypes
|
config :pleroma, Pleroma.Repo,
|
||||||
|
types: Pleroma.PostgresTypes,
|
||||||
|
telemetry_event: [Pleroma.Repo.Instrumenter]
|
||||||
|
|
||||||
config :pleroma, Pleroma.Captcha,
|
config :pleroma, Pleroma.Captcha,
|
||||||
enabled: false,
|
enabled: false,
|
||||||
|
@ -34,7 +74,8 @@
|
||||||
# Upload configuration
|
# Upload configuration
|
||||||
config :pleroma, Pleroma.Upload,
|
config :pleroma, Pleroma.Upload,
|
||||||
uploader: Pleroma.Uploaders.Local,
|
uploader: Pleroma.Uploaders.Local,
|
||||||
filters: [],
|
filters: [Pleroma.Upload.Filter.Dedupe],
|
||||||
|
link_name: true,
|
||||||
proxy_remote: false,
|
proxy_remote: false,
|
||||||
proxy_opts: [
|
proxy_opts: [
|
||||||
redirect_on_failure: false,
|
redirect_on_failure: false,
|
||||||
|
@ -55,7 +96,13 @@
|
||||||
cgi: "https://mdii.sakura.ne.jp/mdii-post.cgi",
|
cgi: "https://mdii.sakura.ne.jp/mdii-post.cgi",
|
||||||
files: "https://mdii.sakura.ne.jp"
|
files: "https://mdii.sakura.ne.jp"
|
||||||
|
|
||||||
config :pleroma, :emoji, shortcode_globs: ["/emoji/custom/**/*.png"]
|
config :pleroma, :emoji,
|
||||||
|
shortcode_globs: ["/emoji/custom/**/*.png"],
|
||||||
|
groups: [
|
||||||
|
# Put groups that have higher priority than defaults here. Example in `docs/config/custom_emoji.md`
|
||||||
|
Custom: ["/emoji/*.png", "/emoji/**/*.png"]
|
||||||
|
],
|
||||||
|
default_manifest: "https://git.pleroma.social/pleroma/emoji-index/raw/master/index.json"
|
||||||
|
|
||||||
config :pleroma, :uri_schemes,
|
config :pleroma, :uri_schemes,
|
||||||
valid_schemes: [
|
valid_schemes: [
|
||||||
|
@ -88,6 +135,7 @@
|
||||||
|
|
||||||
# Configures the endpoint
|
# Configures the endpoint
|
||||||
config :pleroma, Pleroma.Web.Endpoint,
|
config :pleroma, Pleroma.Web.Endpoint,
|
||||||
|
instrumenters: [Pleroma.Web.Endpoint.Instrumenter],
|
||||||
url: [host: "localhost"],
|
url: [host: "localhost"],
|
||||||
http: [
|
http: [
|
||||||
dispatch: [
|
dispatch: [
|
||||||
|
@ -106,7 +154,10 @@
|
||||||
signing_salt: "CqaoopA2",
|
signing_salt: "CqaoopA2",
|
||||||
render_errors: [view: Pleroma.Web.ErrorView, accepts: ~w(json)],
|
render_errors: [view: Pleroma.Web.ErrorView, accepts: ~w(json)],
|
||||||
pubsub: [name: Pleroma.PubSub, adapter: Phoenix.PubSub.PG2],
|
pubsub: [name: Pleroma.PubSub, adapter: Phoenix.PubSub.PG2],
|
||||||
secure_cookie_flag: true
|
secure_cookie_flag: true,
|
||||||
|
extra_cookie_attrs: [
|
||||||
|
"SameSite=Lax"
|
||||||
|
]
|
||||||
|
|
||||||
# Configures Elixir's Logger
|
# Configures Elixir's Logger
|
||||||
config :logger, :console,
|
config :logger, :console,
|
||||||
|
@ -119,6 +170,11 @@
|
||||||
format: "$metadata[$level] $message",
|
format: "$metadata[$level] $message",
|
||||||
metadata: [:request_id]
|
metadata: [:request_id]
|
||||||
|
|
||||||
|
config :quack,
|
||||||
|
level: :warn,
|
||||||
|
meta: [:all],
|
||||||
|
webhook_url: "https://hooks.slack.com/services/YOUR-KEY-HERE"
|
||||||
|
|
||||||
config :mime, :types, %{
|
config :mime, :types, %{
|
||||||
"application/xml" => ["xml"],
|
"application/xml" => ["xml"],
|
||||||
"application/xrd+xml" => ["xrd+xml"],
|
"application/xrd+xml" => ["xrd+xml"],
|
||||||
|
@ -145,6 +201,7 @@
|
||||||
config :pleroma, :instance,
|
config :pleroma, :instance,
|
||||||
name: "Pleroma",
|
name: "Pleroma",
|
||||||
email: "example@example.com",
|
email: "example@example.com",
|
||||||
|
notify_email: "noreply@example.com",
|
||||||
description: "A Pleroma instance, an alternative fediverse server",
|
description: "A Pleroma instance, an alternative fediverse server",
|
||||||
limit: 5_000,
|
limit: 5_000,
|
||||||
remote_limit: 100_000,
|
remote_limit: 100_000,
|
||||||
|
@ -164,16 +221,18 @@
|
||||||
allowed_post_formats: [
|
allowed_post_formats: [
|
||||||
"text/plain",
|
"text/plain",
|
||||||
"text/html",
|
"text/html",
|
||||||
"text/markdown"
|
"text/markdown",
|
||||||
|
"text/bbcode"
|
||||||
],
|
],
|
||||||
finmoji_enabled: true,
|
|
||||||
mrf_transparency: true,
|
mrf_transparency: true,
|
||||||
autofollowed_nicknames: [],
|
autofollowed_nicknames: [],
|
||||||
max_pinned_statuses: 1,
|
max_pinned_statuses: 1,
|
||||||
no_attachment_links: false,
|
no_attachment_links: false,
|
||||||
welcome_user_nickname: nil,
|
welcome_user_nickname: nil,
|
||||||
welcome_message: nil,
|
welcome_message: nil,
|
||||||
max_report_comment_size: 1000
|
max_report_comment_size: 1000,
|
||||||
|
safe_dm_mentions: false,
|
||||||
|
healthcheck: false
|
||||||
|
|
||||||
config :pleroma, :markup,
|
config :pleroma, :markup,
|
||||||
# XXX - unfortunately, inline images must be enabled by default right now, because
|
# XXX - unfortunately, inline images must be enabled by default right now, because
|
||||||
|
@ -268,12 +327,11 @@
|
||||||
follow_redirect: true,
|
follow_redirect: true,
|
||||||
pool: :media
|
pool: :media
|
||||||
]
|
]
|
||||||
]
|
],
|
||||||
|
whitelist: []
|
||||||
|
|
||||||
config :pleroma, :chat, enabled: true
|
config :pleroma, :chat, enabled: true
|
||||||
|
|
||||||
config :ecto, json_library: Jason
|
|
||||||
|
|
||||||
config :phoenix, :format_encoders, json: Jason
|
config :phoenix, :format_encoders, json: Jason
|
||||||
|
|
||||||
config :pleroma, :gopher,
|
config :pleroma, :gopher,
|
||||||
|
@ -281,7 +339,9 @@
|
||||||
ip: {0, 0, 0, 0},
|
ip: {0, 0, 0, 0},
|
||||||
port: 9999
|
port: 9999
|
||||||
|
|
||||||
config :pleroma, Pleroma.Web.Metadata, providers: [], unfurl_nsfw: false
|
config :pleroma, Pleroma.Web.Metadata,
|
||||||
|
providers: [Pleroma.Web.Metadata.Providers.RelMe],
|
||||||
|
unfurl_nsfw: false
|
||||||
|
|
||||||
config :pleroma, :suggestions,
|
config :pleroma, :suggestions,
|
||||||
enabled: false,
|
enabled: false,
|
||||||
|
@ -350,10 +410,13 @@
|
||||||
initial_timeout: 30,
|
initial_timeout: 30,
|
||||||
max_retries: 5
|
max_retries: 5
|
||||||
|
|
||||||
config :pleroma, Pleroma.Jobs,
|
config :pleroma_job_queue, :queues,
|
||||||
federator_incoming: [max_jobs: 50],
|
federator_incoming: 50,
|
||||||
federator_outgoing: [max_jobs: 50],
|
federator_outgoing: 50,
|
||||||
mailer: [max_jobs: 10]
|
web_push: 50,
|
||||||
|
mailer: 10,
|
||||||
|
transmogrifier: 20,
|
||||||
|
scheduled_activities: 10
|
||||||
|
|
||||||
config :pleroma, :fetch_initial_posts,
|
config :pleroma, :fetch_initial_posts,
|
||||||
enabled: false,
|
enabled: false,
|
||||||
|
@ -369,6 +432,42 @@
|
||||||
rel: false
|
rel: false
|
||||||
]
|
]
|
||||||
|
|
||||||
|
config :pleroma, :ldap,
|
||||||
|
enabled: System.get_env("LDAP_ENABLED") == "true",
|
||||||
|
host: System.get_env("LDAP_HOST") || "localhost",
|
||||||
|
port: String.to_integer(System.get_env("LDAP_PORT") || "389"),
|
||||||
|
ssl: System.get_env("LDAP_SSL") == "true",
|
||||||
|
sslopts: [],
|
||||||
|
tls: System.get_env("LDAP_TLS") == "true",
|
||||||
|
tlsopts: [],
|
||||||
|
base: System.get_env("LDAP_BASE") || "dc=example,dc=com",
|
||||||
|
uid: System.get_env("LDAP_UID") || "cn"
|
||||||
|
|
||||||
|
oauth_consumer_strategies = String.split(System.get_env("OAUTH_CONSUMER_STRATEGIES") || "")
|
||||||
|
|
||||||
|
ueberauth_providers =
|
||||||
|
for strategy <- oauth_consumer_strategies do
|
||||||
|
strategy_module_name = "Elixir.Ueberauth.Strategy.#{String.capitalize(strategy)}"
|
||||||
|
strategy_module = String.to_atom(strategy_module_name)
|
||||||
|
{String.to_atom(strategy), {strategy_module, [callback_params: ["state"]]}}
|
||||||
|
end
|
||||||
|
|
||||||
|
config :ueberauth,
|
||||||
|
Ueberauth,
|
||||||
|
base_path: "/oauth",
|
||||||
|
providers: ueberauth_providers
|
||||||
|
|
||||||
|
config :pleroma, :auth, oauth_consumer_strategies: oauth_consumer_strategies
|
||||||
|
|
||||||
|
config :pleroma, Pleroma.Emails.Mailer, adapter: Swoosh.Adapters.Sendmail
|
||||||
|
|
||||||
|
config :prometheus, Pleroma.Web.Endpoint.MetricsExporter, path: "/api/pleroma/app_metrics"
|
||||||
|
|
||||||
|
config :pleroma, Pleroma.ScheduledActivity,
|
||||||
|
daily_user_limit: 25,
|
||||||
|
total_user_limit: 300,
|
||||||
|
enabled: true
|
||||||
|
|
||||||
# Import environment specific config. This must remain at the bottom
|
# Import environment specific config. This must remain at the bottom
|
||||||
# of this file so it overrides the configuration defined above.
|
# of this file so it overrides the configuration defined above.
|
||||||
import_config "#{Mix.env()}.exs"
|
import_config "#{Mix.env()}.exs"
|
||||||
|
|
|
@ -12,14 +12,13 @@
|
||||||
protocol_options: [max_request_line_length: 8192, max_header_value_length: 8192]
|
protocol_options: [max_request_line_length: 8192, max_header_value_length: 8192]
|
||||||
],
|
],
|
||||||
protocol: "http",
|
protocol: "http",
|
||||||
secure_cookie_flag: false,
|
|
||||||
debug_errors: true,
|
debug_errors: true,
|
||||||
code_reloader: true,
|
code_reloader: true,
|
||||||
check_origin: false,
|
check_origin: false,
|
||||||
watchers: [],
|
watchers: [],
|
||||||
secure_cookie_flag: false
|
secure_cookie_flag: false
|
||||||
|
|
||||||
config :pleroma, Pleroma.Mailer, adapter: Swoosh.Adapters.Local
|
config :pleroma, Pleroma.Emails.Mailer, adapter: Swoosh.Adapters.Local
|
||||||
|
|
||||||
# ## SSL Support
|
# ## SSL Support
|
||||||
#
|
#
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
firefox, /emoji/Firefox.gif
|
firefox, /emoji/Firefox.gif, Gif,Fun
|
||||||
blank, /emoji/blank.png
|
blank, /emoji/blank.png, Fun
|
||||||
f_00b, /emoji/f_00b.png
|
f_00b, /emoji/f_00b.png
|
||||||
f_00b11b, /emoji/f_00b11b.png
|
f_00b11b, /emoji/f_00b11b.png
|
||||||
f_00b33b, /emoji/f_00b33b.png
|
f_00b33b, /emoji/f_00b33b.png
|
||||||
|
@ -28,4 +28,3 @@ f_33b00b, /emoji/f_33b00b.png
|
||||||
f_33b22b, /emoji/f_33b22b.png
|
f_33b22b, /emoji/f_33b22b.png
|
||||||
f_33h, /emoji/f_33h.png
|
f_33h, /emoji/f_33h.png
|
||||||
f_33t, /emoji/f_33t.png
|
f_33t, /emoji/f_33t.png
|
||||||
|
|
||||||
|
|
|
@ -17,9 +17,15 @@
|
||||||
# Print only warnings and errors during test
|
# Print only warnings and errors during test
|
||||||
config :logger, level: :warn
|
config :logger, level: :warn
|
||||||
|
|
||||||
|
config :pleroma, Pleroma.Upload, filters: [], link_name: false
|
||||||
|
|
||||||
config :pleroma, Pleroma.Uploaders.Local, uploads: "test/uploads"
|
config :pleroma, Pleroma.Uploaders.Local, uploads: "test/uploads"
|
||||||
|
|
||||||
config :pleroma, Pleroma.Mailer, adapter: Swoosh.Adapters.Test
|
config :pleroma, Pleroma.Emails.Mailer, adapter: Swoosh.Adapters.Test
|
||||||
|
|
||||||
|
config :pleroma, :instance,
|
||||||
|
email: "admin@example.com",
|
||||||
|
notify_email: "noreply@example.com"
|
||||||
|
|
||||||
# Configure your database
|
# Configure your database
|
||||||
config :pleroma, Pleroma.Repo,
|
config :pleroma, Pleroma.Repo,
|
||||||
|
@ -46,7 +52,12 @@
|
||||||
|
|
||||||
config :web_push_encryption, :http_client, Pleroma.Web.WebPushHttpClientMock
|
config :web_push_encryption, :http_client, Pleroma.Web.WebPushHttpClientMock
|
||||||
|
|
||||||
config :pleroma, Pleroma.Jobs, testing: [max_jobs: 2]
|
config :pleroma_job_queue, disabled: true
|
||||||
|
|
||||||
|
config :pleroma, Pleroma.ScheduledActivity,
|
||||||
|
daily_user_limit: 2,
|
||||||
|
total_user_limit: 3,
|
||||||
|
enabled: false
|
||||||
|
|
||||||
try do
|
try do
|
||||||
import_config "test.secret.exs"
|
import_config "test.secret.exs"
|
||||||
|
|
|
@ -1,25 +0,0 @@
|
||||||
# Differences in Mastodon API responses from vanilla Mastodon
|
|
||||||
|
|
||||||
A Pleroma instance can be identified by "<Mastodon version> (compatible; Pleroma <version>)" present in `version` field in response from `/api/v1/instance`
|
|
||||||
|
|
||||||
## Flake IDs
|
|
||||||
|
|
||||||
Pleroma uses 128-bit ids as opposed to Mastodon's 64 bits. However just like Mastodon's ids they are sortable strings
|
|
||||||
|
|
||||||
## Attachment cap
|
|
||||||
|
|
||||||
Some apps operate under the assumption that no more than 4 attachments can be returned or uploaded. Pleroma however does not enforce any limits on attachment count neither when returning the status object nor when posting.
|
|
||||||
|
|
||||||
## Timelines
|
|
||||||
|
|
||||||
Adding the parameter `with_muted=true` to the timeline queries will also return activities by muted (not by blocked!) users.
|
|
||||||
|
|
||||||
## Statuses
|
|
||||||
|
|
||||||
Has these additional fields under the `pleroma` object:
|
|
||||||
|
|
||||||
- `local`: true if the post was made on the local instance.
|
|
||||||
|
|
||||||
## Accounts
|
|
||||||
|
|
||||||
- `/api/v1/accounts/:id`: The `id` parameter can also be the `nickname` of the user. This only works in this endpoint, not the deeper nested ones for following etc.
|
|
|
@ -1,110 +0,0 @@
|
||||||
# Pleroma API
|
|
||||||
|
|
||||||
Requests that require it can be authenticated with [an OAuth token](https://tools.ietf.org/html/rfc6749), the `_pleroma_key` cookie, or [HTTP Basic Authentication](https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Authorization).
|
|
||||||
|
|
||||||
Request parameters can be passed via [query strings](https://en.wikipedia.org/wiki/Query_string) or as [form data](https://www.w3.org/TR/html401/interact/forms.html). Files must be uploaded as `multipart/form-data`.
|
|
||||||
|
|
||||||
## `/api/pleroma/emoji`
|
|
||||||
### Lists the custom emoji on that server.
|
|
||||||
* Method: `GET`
|
|
||||||
* Authentication: not required
|
|
||||||
* Params: none
|
|
||||||
* Response: JSON
|
|
||||||
* Example response: `{"kalsarikannit_f":"/finmoji/128px/kalsarikannit_f-128.png","perkele":"/finmoji/128px/perkele-128.png","blobdab":"/emoji/blobdab.png","happiness":"/finmoji/128px/happiness-128.png"}`
|
|
||||||
* Note: Same data as Mastodon API’s `/api/v1/custom_emojis` but in a different format
|
|
||||||
|
|
||||||
## `/api/pleroma/follow_import`
|
|
||||||
### Imports your follows, for example from a Mastodon CSV file.
|
|
||||||
* Method: `POST`
|
|
||||||
* Authentication: required
|
|
||||||
* Params:
|
|
||||||
* `list`: STRING or FILE containing a whitespace-separated list of accounts to follow
|
|
||||||
* Response: HTTP 200 on success, 500 on error
|
|
||||||
* Note: Users that can't be followed are silently skipped.
|
|
||||||
|
|
||||||
## `/api/pleroma/captcha`
|
|
||||||
### Get a new captcha
|
|
||||||
* Method: `GET`
|
|
||||||
* Authentication: not required
|
|
||||||
* Params: none
|
|
||||||
* Response: Provider specific JSON, the only guaranteed parameter is `type`
|
|
||||||
* Example response: `{"type": "kocaptcha", "token": "whatever", "url": "https://captcha.kotobank.ch/endpoint"}`
|
|
||||||
|
|
||||||
## `/api/pleroma/delete_account`
|
|
||||||
### Delete an account
|
|
||||||
* Method `POST`
|
|
||||||
* Authentication: required
|
|
||||||
* Params:
|
|
||||||
* `password`: user's password
|
|
||||||
* Response: JSON. Returns `{"status": "success"}` if the deletion was successful, `{"error": "[error message]"}` otherwise
|
|
||||||
* Example response: `{"error": "Invalid password."}`
|
|
||||||
|
|
||||||
## `/api/account/register`
|
|
||||||
### Register a new user
|
|
||||||
* Method `POST`
|
|
||||||
* Authentication: not required
|
|
||||||
* Params:
|
|
||||||
* `nickname`
|
|
||||||
* `fullname`
|
|
||||||
* `bio`
|
|
||||||
* `email`
|
|
||||||
* `password`
|
|
||||||
* `confirm`
|
|
||||||
* `captcha_solution`: optional, contains provider-specific captcha solution,
|
|
||||||
* `captcha_token`: optional, contains provider-specific captcha token
|
|
||||||
* `token`: invite token required when the registerations aren't public.
|
|
||||||
* Response: JSON. Returns a user object on success, otherwise returns `{"error": "error_msg"}`
|
|
||||||
* Example response:
|
|
||||||
```
|
|
||||||
{
|
|
||||||
"background_image": null,
|
|
||||||
"cover_photo": "https://pleroma.soykaf.com/images/banner.png",
|
|
||||||
"created_at": "Tue Dec 18 16:55:56 +0000 2018",
|
|
||||||
"default_scope": "public",
|
|
||||||
"description": "blushy-crushy fediverse idol + pleroma dev\nlet's be friends \nぷれろまの生徒会長。謎の外人。日本語OK. \n公主病.",
|
|
||||||
"description_html": "blushy-crushy fediverse idol + pleroma dev.<br />let's be friends <br />ぷれろまの生徒会長。謎の外人。日本語OK. <br />公主病.",
|
|
||||||
"favourites_count": 0,
|
|
||||||
"fields": [],
|
|
||||||
"followers_count": 0,
|
|
||||||
"following": false,
|
|
||||||
"follows_you": false,
|
|
||||||
"friends_count": 0,
|
|
||||||
"id": 6,
|
|
||||||
"is_local": true,
|
|
||||||
"locked": false,
|
|
||||||
"name": "lain",
|
|
||||||
"name_html": "lain",
|
|
||||||
"no_rich_text": false,
|
|
||||||
"pleroma": {
|
|
||||||
"tags": []
|
|
||||||
},
|
|
||||||
"profile_image_url": "https://pleroma.soykaf.com/images/avi.png",
|
|
||||||
"profile_image_url_https": "https://pleroma.soykaf.com/images/avi.png",
|
|
||||||
"profile_image_url_original": "https://pleroma.soykaf.com/images/avi.png",
|
|
||||||
"profile_image_url_profile_size": "https://pleroma.soykaf.com/images/avi.png",
|
|
||||||
"rights": {
|
|
||||||
"delete_others_notice": false
|
|
||||||
},
|
|
||||||
"screen_name": "lain",
|
|
||||||
"statuses_count": 0,
|
|
||||||
"statusnet_blocking": false,
|
|
||||||
"statusnet_profile_url": "https://pleroma.soykaf.com/users/lain"
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
## `/api/pleroma/admin/`…
|
|
||||||
See [Admin-API](Admin-API.md)
|
|
||||||
|
|
||||||
## `/api/v1/pleroma/flavour/:flavour`
|
|
||||||
* Method `POST`
|
|
||||||
* Authentication: required
|
|
||||||
* Response: JSON string. Returns the user flavour or the default one on success, otherwise returns `{"error": "error_msg"}`
|
|
||||||
* Example response: "glitch"
|
|
||||||
* Note: This is intended to be used only by mastofe
|
|
||||||
|
|
||||||
## `/api/v1/pleroma/flavour`
|
|
||||||
* Method `GET`
|
|
||||||
* Authentication: required
|
|
||||||
* Response: JSON string. Returns the user flavour or the default one.
|
|
||||||
* Example response: "glitch"
|
|
||||||
* Note: This is intended to be used only by mastofe
|
|
17
docs/admin/backup.md
Normal file
17
docs/admin/backup.md
Normal file
|
@ -0,0 +1,17 @@
|
||||||
|
# Backup/Restore your instance
|
||||||
|
|
||||||
|
## Backup
|
||||||
|
|
||||||
|
1. Stop the Pleroma service.
|
||||||
|
2. Go to the working directory of Pleroma (default is `/opt/pleroma`)
|
||||||
|
3. Run `sudo -Hu postgres pg_dump -d <pleroma_db> --format=custom -f </path/to/backup_location/pleroma.pgdump>`
|
||||||
|
4. Copy `pleroma.pgdump`, `config/prod.secret.exs` and the `uploads` folder to your backup destination. If you have other modifications, copy those changes too.
|
||||||
|
5. Restart the Pleroma service.
|
||||||
|
|
||||||
|
## Restore
|
||||||
|
|
||||||
|
1. Stop the Pleroma service.
|
||||||
|
2. Go to the working directory of Pleroma (default is `/opt/pleroma`)
|
||||||
|
3. Copy the above mentioned files back to their original position.
|
||||||
|
4. Run `sudo -Hu postgres pg_restore -d <pleroma_db> -v -1 </path/to/backup_location/pleroma.pgdump>`
|
||||||
|
5. Restart the Pleroma service.
|
9
docs/admin/updating.md
Normal file
9
docs/admin/updating.md
Normal file
|
@ -0,0 +1,9 @@
|
||||||
|
# Updating your instance
|
||||||
|
1. Go to the working directory of Pleroma (default is `/opt/pleroma`)
|
||||||
|
2. Run `git pull`. This pulls the latest changes from upstream.
|
||||||
|
3. Run `mix deps.get`. This pulls in any new dependencies.
|
||||||
|
4. Stop the Pleroma service.
|
||||||
|
5. Run `mix ecto.migrate`[^1]. This task performs database migrations, if there were any.
|
||||||
|
6. Start the Pleroma service.
|
||||||
|
|
||||||
|
[^1]: Prefix with `MIX_ENV=prod` to run it using the production config file.
|
|
@ -8,10 +8,15 @@ Authentication is required and the user must be an admin.
|
||||||
|
|
||||||
- Method `GET`
|
- Method `GET`
|
||||||
- Query Params:
|
- Query Params:
|
||||||
- `query`: **string** *optional* search term
|
- *optional* `query`: **string** search term
|
||||||
- `local_only`: **bool** *optional* whether to return only local users
|
- *optional* `filters`: **string** comma-separated string of filters:
|
||||||
- `page`: **integer** *optional* page number
|
- `local`: only local users
|
||||||
- `page_size`: **integer** *optional* number of users per page (default is `50`)
|
- `external`: only external users
|
||||||
|
- `active`: only active users
|
||||||
|
- `deactivated`: only deactivated users
|
||||||
|
- *optional* `page`: **integer** page number
|
||||||
|
- *optional* `page_size`: **integer** number of users per page (default is `50`)
|
||||||
|
- Example: `https://mypleroma.org/api/pleroma/admin/users?query=john&filters=local,active&page=1&page_size=10`
|
||||||
- Response:
|
- Response:
|
||||||
|
|
||||||
```JSON
|
```JSON
|
||||||
|
@ -22,7 +27,13 @@ Authentication is required and the user must be an admin.
|
||||||
{
|
{
|
||||||
"deactivated": bool,
|
"deactivated": bool,
|
||||||
"id": integer,
|
"id": integer,
|
||||||
"nickname": string
|
"nickname": string,
|
||||||
|
"roles": {
|
||||||
|
"admin": bool,
|
||||||
|
"moderator": bool
|
||||||
|
},
|
||||||
|
"local": bool,
|
||||||
|
"tags": array
|
||||||
},
|
},
|
||||||
...
|
...
|
||||||
]
|
]
|
||||||
|
@ -47,6 +58,26 @@ Authentication is required and the user must be an admin.
|
||||||
- `password`
|
- `password`
|
||||||
- Response: User’s nickname
|
- Response: User’s nickname
|
||||||
|
|
||||||
|
## `/api/pleroma/admin/user/follow`
|
||||||
|
### Make a user follow another user
|
||||||
|
|
||||||
|
- Methods: `POST`
|
||||||
|
- Params:
|
||||||
|
- `follower`: The nickname of the follower
|
||||||
|
- `followed`: The nickname of the followed
|
||||||
|
- Response:
|
||||||
|
- "ok"
|
||||||
|
|
||||||
|
## `/api/pleroma/admin/user/unfollow`
|
||||||
|
### Make a user unfollow another user
|
||||||
|
|
||||||
|
- Methods: `POST`
|
||||||
|
- Params:
|
||||||
|
- `follower`: The nickname of the follower
|
||||||
|
- `followed`: The nickname of the followed
|
||||||
|
- Response:
|
||||||
|
- "ok"
|
||||||
|
|
||||||
## `/api/pleroma/admin/users/:nickname/toggle_activation`
|
## `/api/pleroma/admin/users/:nickname/toggle_activation`
|
||||||
|
|
||||||
### Toggle user activation
|
### Toggle user activation
|
||||||
|
@ -99,7 +130,7 @@ Authentication is required and the user must be an admin.
|
||||||
|
|
||||||
Note: Available `:permission_group` is currently moderator and admin. 404 is returned when the permission group doesn’t exist.
|
Note: Available `:permission_group` is currently moderator and admin. 404 is returned when the permission group doesn’t exist.
|
||||||
|
|
||||||
### Get user user permission groups membership
|
### Get user user permission groups membership per permission group
|
||||||
|
|
||||||
- Method: `GET`
|
- Method: `GET`
|
||||||
- Params: none
|
- Params: none
|
||||||
|
@ -138,6 +169,17 @@ Note: Available `:permission_group` is currently moderator and admin. 404 is ret
|
||||||
- `nickname`
|
- `nickname`
|
||||||
- `status` BOOLEAN field, false value means deactivation.
|
- `status` BOOLEAN field, false value means deactivation.
|
||||||
|
|
||||||
|
## `/api/pleroma/admin/users/:nickname`
|
||||||
|
|
||||||
|
### Retrive the details of a user
|
||||||
|
|
||||||
|
- Method: `GET`
|
||||||
|
- Params:
|
||||||
|
- `nickname`
|
||||||
|
- Response:
|
||||||
|
- On failure: `Not found`
|
||||||
|
- On success: JSON of the user
|
||||||
|
|
||||||
## `/api/pleroma/admin/relay`
|
## `/api/pleroma/admin/relay`
|
||||||
|
|
||||||
### Follow a Relay
|
### Follow a Relay
|
||||||
|
@ -158,11 +200,64 @@ Note: Available `:permission_group` is currently moderator and admin. 404 is ret
|
||||||
|
|
||||||
## `/api/pleroma/admin/invite_token`
|
## `/api/pleroma/admin/invite_token`
|
||||||
|
|
||||||
### Get a account registeration invite token
|
### Get an account registration invite token
|
||||||
|
|
||||||
|
- Methods: `GET`
|
||||||
|
- Params:
|
||||||
|
- *optional* `invite` => [
|
||||||
|
- *optional* `max_use` (integer)
|
||||||
|
- *optional* `expires_at` (date string e.g. "2019-04-07")
|
||||||
|
]
|
||||||
|
- Response: invite token (base64 string)
|
||||||
|
|
||||||
|
## `/api/pleroma/admin/invites`
|
||||||
|
|
||||||
|
### Get a list of generated invites
|
||||||
|
|
||||||
- Methods: `GET`
|
- Methods: `GET`
|
||||||
- Params: none
|
- Params: none
|
||||||
- Response: invite token (base64 string)
|
- Response:
|
||||||
|
|
||||||
|
```JSON
|
||||||
|
{
|
||||||
|
|
||||||
|
"invites": [
|
||||||
|
{
|
||||||
|
"id": integer,
|
||||||
|
"token": string,
|
||||||
|
"used": boolean,
|
||||||
|
"expires_at": date,
|
||||||
|
"uses": integer,
|
||||||
|
"max_use": integer,
|
||||||
|
"invite_type": string (possible values: `one_time`, `reusable`, `date_limited`, `reusable_date_limited`)
|
||||||
|
},
|
||||||
|
...
|
||||||
|
]
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## `/api/pleroma/admin/revoke_invite`
|
||||||
|
|
||||||
|
### Revoke invite by token
|
||||||
|
|
||||||
|
- Methods: `POST`
|
||||||
|
- Params:
|
||||||
|
- `token`
|
||||||
|
- Response:
|
||||||
|
|
||||||
|
```JSON
|
||||||
|
{
|
||||||
|
"id": integer,
|
||||||
|
"token": string,
|
||||||
|
"used": boolean,
|
||||||
|
"expires_at": date,
|
||||||
|
"uses": integer,
|
||||||
|
"max_use": integer,
|
||||||
|
"invite_type": string (possible values: `one_time`, `reusable`, `date_limited`, `reusable_date_limited`)
|
||||||
|
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
## `/api/pleroma/admin/email_invite`
|
## `/api/pleroma/admin/email_invite`
|
||||||
|
|
||||||
|
@ -171,7 +266,7 @@ Note: Available `:permission_group` is currently moderator and admin. 404 is ret
|
||||||
- Methods: `POST`
|
- Methods: `POST`
|
||||||
- Params:
|
- Params:
|
||||||
- `email`
|
- `email`
|
||||||
- `name`, optionnal
|
- `name`, optional
|
||||||
|
|
||||||
## `/api/pleroma/admin/password_reset`
|
## `/api/pleroma/admin/password_reset`
|
||||||
|
|
82
docs/api/differences_in_mastoapi_responses.md
Normal file
82
docs/api/differences_in_mastoapi_responses.md
Normal file
|
@ -0,0 +1,82 @@
|
||||||
|
# Differences in Mastodon API responses from vanilla Mastodon
|
||||||
|
|
||||||
|
A Pleroma instance can be identified by "<Mastodon version> (compatible; Pleroma <version>)" present in `version` field in response from `/api/v1/instance`
|
||||||
|
|
||||||
|
## Flake IDs
|
||||||
|
|
||||||
|
Pleroma uses 128-bit ids as opposed to Mastodon's 64 bits. However just like Mastodon's ids they are sortable strings
|
||||||
|
|
||||||
|
## Attachment cap
|
||||||
|
|
||||||
|
Some apps operate under the assumption that no more than 4 attachments can be returned or uploaded. Pleroma however does not enforce any limits on attachment count neither when returning the status object nor when posting.
|
||||||
|
|
||||||
|
## Timelines
|
||||||
|
|
||||||
|
Adding the parameter `with_muted=true` to the timeline queries will also return activities by muted (not by blocked!) users.
|
||||||
|
|
||||||
|
## Statuses
|
||||||
|
|
||||||
|
Has these additional fields under the `pleroma` object:
|
||||||
|
|
||||||
|
- `local`: true if the post was made on the local instance.
|
||||||
|
- `conversation_id`: the ID of the conversation the status is associated with (if any)
|
||||||
|
- `in_reply_to_account_acct`: the `acct` property of User entity for replied user (if any)
|
||||||
|
- `content`: a map consisting of alternate representations of the `content` property with the key being it's mimetype. Currently the only alternate representation supported is `text/plain`
|
||||||
|
- `spoiler_text`: a map consisting of alternate representations of the `spoiler_text` property with the key being it's mimetype. Currently the only alternate representation supported is `text/plain`
|
||||||
|
|
||||||
|
## Attachments
|
||||||
|
|
||||||
|
Has these additional fields under the `pleroma` object:
|
||||||
|
|
||||||
|
- `mime_type`: mime type of the attachment.
|
||||||
|
|
||||||
|
## Accounts
|
||||||
|
|
||||||
|
- `/api/v1/accounts/:id`: The `id` parameter can also be the `nickname` of the user. This only works in this endpoint, not the deeper nested ones for following etc.
|
||||||
|
|
||||||
|
Has these additional fields under the `pleroma` object:
|
||||||
|
|
||||||
|
- `tags`: Lists an array of tags for the user
|
||||||
|
- `relationship{}`: Includes fields as documented for Mastodon API https://docs.joinmastodon.org/api/entities/#relationship
|
||||||
|
- `is_moderator`: boolean, nullable, true if user is a moderator
|
||||||
|
- `is_admin`: boolean, nullable, true if user is an admin
|
||||||
|
- `confirmation_pending`: boolean, true if a new user account is waiting on email confirmation to be activated
|
||||||
|
- `hide_followers`: boolean, true when the user has follower hiding enabled
|
||||||
|
- `hide_follows`: boolean, true when the user has follow hiding enabled
|
||||||
|
|
||||||
|
### Source
|
||||||
|
|
||||||
|
Has these additional fields under the `pleroma` object:
|
||||||
|
|
||||||
|
- `show_role`: boolean, nullable, true when the user wants his role (e.g admin, moderator) to be shown
|
||||||
|
- `no_rich_text` - boolean, nullable, true when html tags are stripped from all statuses requested from the API
|
||||||
|
|
||||||
|
## Account Search
|
||||||
|
|
||||||
|
Behavior has changed:
|
||||||
|
|
||||||
|
- `/api/v1/accounts/search`: Does not require authentication
|
||||||
|
|
||||||
|
## Notifications
|
||||||
|
|
||||||
|
Has these additional fields under the `pleroma` object:
|
||||||
|
|
||||||
|
- `is_seen`: true if the notification was read by the user
|
||||||
|
|
||||||
|
## POST `/api/v1/statuses`
|
||||||
|
|
||||||
|
Additional parameters can be added to the JSON body/Form data:
|
||||||
|
|
||||||
|
- `preview`: boolean, if set to `true` the post won't be actually posted, but the status entitiy would still be rendered back. This could be useful for previewing rich text/custom emoji, for example.
|
||||||
|
- `content_type`: string, contain the MIME type of the status, it is transformed into HTML by the backend. You can get the list of the supported MIME types with the nodeinfo endpoint.
|
||||||
|
|
||||||
|
## PATCH `/api/v1/update_credentials`
|
||||||
|
|
||||||
|
Additional parameters can be added to the JSON body/Form data:
|
||||||
|
|
||||||
|
- `no_rich_text` - if true, html tags are stripped from all statuses requested from the API
|
||||||
|
- `hide_followers` - if true, user's followers will be hidden
|
||||||
|
- `hide_follows` - if true, user's follows will be hidden
|
||||||
|
- `hide_favorites` - if true, user's favorites timeline will be hidden
|
||||||
|
- `show_role` - if true, user's role (e.g admin, moderator) will be exposed to anyone in the API
|
||||||
|
- `default_scope` - the scope returned under `privacy` key in Source subentity
|
272
docs/api/pleroma_api.md
Normal file
272
docs/api/pleroma_api.md
Normal file
|
@ -0,0 +1,272 @@
|
||||||
|
# Pleroma API
|
||||||
|
|
||||||
|
Requests that require it can be authenticated with [an OAuth token](https://tools.ietf.org/html/rfc6749), the `_pleroma_key` cookie, or [HTTP Basic Authentication](https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Authorization).
|
||||||
|
|
||||||
|
Request parameters can be passed via [query strings](https://en.wikipedia.org/wiki/Query_string) or as [form data](https://www.w3.org/TR/html401/interact/forms.html). Files must be uploaded as `multipart/form-data`.
|
||||||
|
|
||||||
|
## `/api/pleroma/emoji`
|
||||||
|
### Lists the custom emoji on that server.
|
||||||
|
* Method: `GET`
|
||||||
|
* Authentication: not required
|
||||||
|
* Params: none
|
||||||
|
* Response: JSON
|
||||||
|
* Example response:
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"girlpower": {
|
||||||
|
"tags": [
|
||||||
|
"Finmoji"
|
||||||
|
],
|
||||||
|
"image_url": "/finmoji/128px/girlpower-128.png"
|
||||||
|
},
|
||||||
|
"education": {
|
||||||
|
"tags": [
|
||||||
|
"Finmoji"
|
||||||
|
],
|
||||||
|
"image_url": "/finmoji/128px/education-128.png"
|
||||||
|
},
|
||||||
|
"finnishlove": {
|
||||||
|
"tags": [
|
||||||
|
"Finmoji"
|
||||||
|
],
|
||||||
|
"image_url": "/finmoji/128px/finnishlove-128.png"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
* Note: Same data as Mastodon API’s `/api/v1/custom_emojis` but in a different format
|
||||||
|
|
||||||
|
## `/api/pleroma/follow_import`
|
||||||
|
### Imports your follows, for example from a Mastodon CSV file.
|
||||||
|
* Method: `POST`
|
||||||
|
* Authentication: required
|
||||||
|
* Params:
|
||||||
|
* `list`: STRING or FILE containing a whitespace-separated list of accounts to follow
|
||||||
|
* Response: HTTP 200 on success, 500 on error
|
||||||
|
* Note: Users that can't be followed are silently skipped.
|
||||||
|
|
||||||
|
## `/api/pleroma/captcha`
|
||||||
|
### Get a new captcha
|
||||||
|
* Method: `GET`
|
||||||
|
* Authentication: not required
|
||||||
|
* Params: none
|
||||||
|
* Response: Provider specific JSON, the only guaranteed parameter is `type`
|
||||||
|
* Example response: `{"type": "kocaptcha", "token": "whatever", "url": "https://captcha.kotobank.ch/endpoint"}`
|
||||||
|
|
||||||
|
## `/api/pleroma/delete_account`
|
||||||
|
### Delete an account
|
||||||
|
* Method `POST`
|
||||||
|
* Authentication: required
|
||||||
|
* Params:
|
||||||
|
* `password`: user's password
|
||||||
|
* Response: JSON. Returns `{"status": "success"}` if the deletion was successful, `{"error": "[error message]"}` otherwise
|
||||||
|
* Example response: `{"error": "Invalid password."}`
|
||||||
|
|
||||||
|
## `/api/account/register`
|
||||||
|
### Register a new user
|
||||||
|
* Method `POST`
|
||||||
|
* Authentication: not required
|
||||||
|
* Params:
|
||||||
|
* `nickname`
|
||||||
|
* `fullname`
|
||||||
|
* `bio`
|
||||||
|
* `email`
|
||||||
|
* `password`
|
||||||
|
* `confirm`
|
||||||
|
* `captcha_solution`: optional, contains provider-specific captcha solution,
|
||||||
|
* `captcha_token`: optional, contains provider-specific captcha token
|
||||||
|
* `token`: invite token required when the registrations aren't public.
|
||||||
|
* Response: JSON. Returns a user object on success, otherwise returns `{"error": "error_msg"}`
|
||||||
|
* Example response:
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"background_image": null,
|
||||||
|
"cover_photo": "https://pleroma.soykaf.com/images/banner.png",
|
||||||
|
"created_at": "Tue Dec 18 16:55:56 +0000 2018",
|
||||||
|
"default_scope": "public",
|
||||||
|
"description": "blushy-crushy fediverse idol + pleroma dev\nlet's be friends \nぷれろまの生徒会長。謎の外人。日本語OK. \n公主病.",
|
||||||
|
"description_html": "blushy-crushy fediverse idol + pleroma dev.<br />let's be friends <br />ぷれろまの生徒会長。謎の外人。日本語OK. <br />公主病.",
|
||||||
|
"favourites_count": 0,
|
||||||
|
"fields": [],
|
||||||
|
"followers_count": 0,
|
||||||
|
"following": false,
|
||||||
|
"follows_you": false,
|
||||||
|
"friends_count": 0,
|
||||||
|
"id": 6,
|
||||||
|
"is_local": true,
|
||||||
|
"locked": false,
|
||||||
|
"name": "lain",
|
||||||
|
"name_html": "lain",
|
||||||
|
"no_rich_text": false,
|
||||||
|
"pleroma": {
|
||||||
|
"tags": []
|
||||||
|
},
|
||||||
|
"profile_image_url": "https://pleroma.soykaf.com/images/avi.png",
|
||||||
|
"profile_image_url_https": "https://pleroma.soykaf.com/images/avi.png",
|
||||||
|
"profile_image_url_original": "https://pleroma.soykaf.com/images/avi.png",
|
||||||
|
"profile_image_url_profile_size": "https://pleroma.soykaf.com/images/avi.png",
|
||||||
|
"rights": {
|
||||||
|
"delete_others_notice": false
|
||||||
|
},
|
||||||
|
"screen_name": "lain",
|
||||||
|
"statuses_count": 0,
|
||||||
|
"statusnet_blocking": false,
|
||||||
|
"statusnet_profile_url": "https://pleroma.soykaf.com/users/lain"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## `/api/pleroma/admin/`…
|
||||||
|
See [Admin-API](Admin-API.md)
|
||||||
|
|
||||||
|
## `/api/v1/pleroma/flavour/:flavour`
|
||||||
|
* Method `POST`
|
||||||
|
* Authentication: required
|
||||||
|
* Response: JSON string. Returns the user flavour or the default one on success, otherwise returns `{"error": "error_msg"}`
|
||||||
|
* Example response: "glitch"
|
||||||
|
* Note: This is intended to be used only by mastofe
|
||||||
|
|
||||||
|
## `/api/v1/pleroma/flavour`
|
||||||
|
* Method `GET`
|
||||||
|
* Authentication: required
|
||||||
|
* Response: JSON string. Returns the user flavour or the default one.
|
||||||
|
* Example response: "glitch"
|
||||||
|
* Note: This is intended to be used only by mastofe
|
||||||
|
|
||||||
|
## `/api/pleroma/notifications/read`
|
||||||
|
### Mark a single notification as read
|
||||||
|
* Method `POST`
|
||||||
|
* Authentication: required
|
||||||
|
* Params:
|
||||||
|
* `id`: notification's id
|
||||||
|
* Response: JSON. Returns `{"status": "success"}` if the reading was successful, otherwise returns `{"error": "error_msg"}`
|
||||||
|
|
||||||
|
## `/api/v1/pleroma/accounts/:id/subscribe`
|
||||||
|
### Subscribe to receive notifications for all statuses posted by a user
|
||||||
|
* Method `POST`
|
||||||
|
* Authentication: required
|
||||||
|
* Params:
|
||||||
|
* `id`: account id to subscribe to
|
||||||
|
* Response: JSON, returns a mastodon relationship object on success, otherwise returns `{"error": "error_msg"}`
|
||||||
|
* Example response:
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"id": "abcdefg",
|
||||||
|
"following": true,
|
||||||
|
"followed_by": false,
|
||||||
|
"blocking": false,
|
||||||
|
"muting": false,
|
||||||
|
"muting_notifications": false,
|
||||||
|
"subscribing": true,
|
||||||
|
"requested": false,
|
||||||
|
"domain_blocking": false,
|
||||||
|
"showing_reblogs": true,
|
||||||
|
"endorsed": false
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## `/api/v1/pleroma/accounts/:id/unsubscribe`
|
||||||
|
### Unsubscribe to stop receiving notifications from user statuses
|
||||||
|
* Method `POST`
|
||||||
|
* Authentication: required
|
||||||
|
* Params:
|
||||||
|
* `id`: account id to unsubscribe from
|
||||||
|
* Response: JSON, returns a mastodon relationship object on success, otherwise returns `{"error": "error_msg"}`
|
||||||
|
* Example response:
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"id": "abcdefg",
|
||||||
|
"following": true,
|
||||||
|
"followed_by": false,
|
||||||
|
"blocking": false,
|
||||||
|
"muting": false,
|
||||||
|
"muting_notifications": false,
|
||||||
|
"subscribing": false,
|
||||||
|
"requested": false,
|
||||||
|
"domain_blocking": false,
|
||||||
|
"showing_reblogs": true,
|
||||||
|
"endorsed": false
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## `/api/v1/pleroma/accounts/:id/favourites`
|
||||||
|
### Returns favorites timeline of any user
|
||||||
|
* Method `GET`
|
||||||
|
* Authentication: not required
|
||||||
|
* Params:
|
||||||
|
* `id`: the id of the account for whom to return results
|
||||||
|
* `limit`: optional, the number of records to retrieve
|
||||||
|
* `since_id`: optional, returns results that are more recent than the specified id
|
||||||
|
* `max_id`: optional, returns results that are older than the specified id
|
||||||
|
* Response: JSON, returns a list of Mastodon Status entities on success, otherwise returns `{"error": "error_msg"}`
|
||||||
|
* Example response:
|
||||||
|
```json
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"account": {
|
||||||
|
"id": "9hptFmUF3ztxYh3Svg",
|
||||||
|
"url": "https://pleroma.example.org/users/nick2",
|
||||||
|
"username": "nick2",
|
||||||
|
...
|
||||||
|
},
|
||||||
|
"application": {"name": "Web", "website": null},
|
||||||
|
"bookmarked": false,
|
||||||
|
"card": null,
|
||||||
|
"content": "This is :moominmamma: note 0",
|
||||||
|
"created_at": "2019-04-15T15:42:15.000Z",
|
||||||
|
"emojis": [],
|
||||||
|
"favourited": false,
|
||||||
|
"favourites_count": 1,
|
||||||
|
"id": "9hptFmVJ02khbzYJaS",
|
||||||
|
"in_reply_to_account_id": null,
|
||||||
|
"in_reply_to_id": null,
|
||||||
|
"language": null,
|
||||||
|
"media_attachments": [],
|
||||||
|
"mentions": [],
|
||||||
|
"muted": false,
|
||||||
|
"pinned": false,
|
||||||
|
"pleroma": {
|
||||||
|
"content": {"text/plain": "This is :moominmamma: note 0"},
|
||||||
|
"conversation_id": 13679,
|
||||||
|
"local": true,
|
||||||
|
"spoiler_text": {"text/plain": "2hu"}
|
||||||
|
},
|
||||||
|
"reblog": null,
|
||||||
|
"reblogged": false,
|
||||||
|
"reblogs_count": 0,
|
||||||
|
"replies_count": 0,
|
||||||
|
"sensitive": false,
|
||||||
|
"spoiler_text": "2hu",
|
||||||
|
"tags": [{"name": "2hu", "url": "/tag/2hu"}],
|
||||||
|
"uri": "https://pleroma.example.org/objects/198ed2a1-7912-4482-b559-244a0369e984",
|
||||||
|
"url": "https://pleroma.example.org/notice/9hptFmVJ02khbzYJaS",
|
||||||
|
"visibility": "public"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
```
|
||||||
|
|
||||||
|
## `/api/pleroma/notification_settings`
|
||||||
|
### Updates user notification settings
|
||||||
|
* Method `PUT`
|
||||||
|
* Authentication: required
|
||||||
|
* Params:
|
||||||
|
* `followers`: BOOLEAN field, receives notifications from followers
|
||||||
|
* `follows`: BOOLEAN field, receives notifications from people the user follows
|
||||||
|
* `remote`: BOOLEAN field, receives notifications from people on remote instances
|
||||||
|
* `local`: BOOLEAN field, receives notifications from people on the local instance
|
||||||
|
* Response: JSON. Returns `{"status": "success"}` if the update was successful, otherwise returns `{"error": "error_msg"}`
|
||||||
|
|
||||||
|
## `/api/pleroma/healthcheck`
|
||||||
|
### Healthcheck endpoint with additional system data.
|
||||||
|
* Method `GET`
|
||||||
|
* Authentication: not required
|
||||||
|
* Params: none
|
||||||
|
* Response: JSON, statuses (200 - healthy, 503 unhealthy).
|
||||||
|
* Example response:
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"pool_size": 0, # database connection pool
|
||||||
|
"active": 0, # active processes
|
||||||
|
"idle": 0, # idle processes
|
||||||
|
"memory_used": 0.00, # Memory used
|
||||||
|
"healthy": true # Instance state
|
||||||
|
}
|
||||||
|
```
|
22
docs/api/prometheus.md
Normal file
22
docs/api/prometheus.md
Normal file
|
@ -0,0 +1,22 @@
|
||||||
|
# Prometheus Metrics
|
||||||
|
|
||||||
|
Pleroma includes support for exporting metrics via the [prometheus_ex](https://github.com/deadtrickster/prometheus.ex) library.
|
||||||
|
|
||||||
|
## `/api/pleroma/app_metrics`
|
||||||
|
### Exports Prometheus application metrics
|
||||||
|
* Method: `GET`
|
||||||
|
* Authentication: not required
|
||||||
|
* Params: none
|
||||||
|
* Response: JSON
|
||||||
|
|
||||||
|
## Grafana
|
||||||
|
### Config example
|
||||||
|
The following is a config example to use with [Grafana](https://grafana.com)
|
||||||
|
|
||||||
|
```
|
||||||
|
- job_name: 'beam'
|
||||||
|
metrics_path: /api/pleroma/app_metrics
|
||||||
|
scheme: https
|
||||||
|
static_configs:
|
||||||
|
- targets: ['pleroma.soykaf.com']
|
||||||
|
```
|
195
docs/config.md
195
docs/config.md
|
@ -6,6 +6,7 @@ If you run Pleroma with ``MIX_ENV=prod`` the file is ``prod.secret.exs``, otherw
|
||||||
## Pleroma.Upload
|
## Pleroma.Upload
|
||||||
* `uploader`: Select which `Pleroma.Uploaders` to use
|
* `uploader`: Select which `Pleroma.Uploaders` to use
|
||||||
* `filters`: List of `Pleroma.Upload.Filter` to use.
|
* `filters`: List of `Pleroma.Upload.Filter` to use.
|
||||||
|
* `link_name`: When enabled Pleroma will add a `name` parameter to the url of the upload, for example `https://instance.tld/media/corndog.png?name=corndog.png`. This is needed to provide the correct filename in Content-Disposition headers when using filters like `Pleroma.Upload.Filter.Dedupe`
|
||||||
* `base_url`: The base URL to access a user-uploaded file. Useful when you want to proxy the media files via another host.
|
* `base_url`: The base URL to access a user-uploaded file. Useful when you want to proxy the media files via another host.
|
||||||
* `proxy_remote`: If you\'re using a remote uploader, Pleroma will proxy media requests instead of redirecting to it.
|
* `proxy_remote`: If you\'re using a remote uploader, Pleroma will proxy media requests instead of redirecting to it.
|
||||||
* `proxy_opts`: Proxy options, see `Pleroma.ReverseProxy` documentation.
|
* `proxy_opts`: Proxy options, see `Pleroma.ReverseProxy` documentation.
|
||||||
|
@ -30,14 +31,14 @@ This filter replaces the filename (not the path) of an upload. For complete obfu
|
||||||
|
|
||||||
* `text`: Text to replace filenames in links. If empty, `{random}.extension` will be used.
|
* `text`: Text to replace filenames in links. If empty, `{random}.extension` will be used.
|
||||||
|
|
||||||
## Pleroma.Mailer
|
## Pleroma.Emails.Mailer
|
||||||
* `adapter`: one of the mail adapters listed in [Swoosh readme](https://github.com/swoosh/swoosh#adapters), or `Swoosh.Adapters.Local` for in-memory mailbox.
|
* `adapter`: one of the mail adapters listed in [Swoosh readme](https://github.com/swoosh/swoosh#adapters), or `Swoosh.Adapters.Local` for in-memory mailbox.
|
||||||
* `api_key` / `password` and / or other adapter-specific settings, per the above documentation.
|
* `api_key` / `password` and / or other adapter-specific settings, per the above documentation.
|
||||||
|
|
||||||
An example for Sendgrid adapter:
|
An example for Sendgrid adapter:
|
||||||
|
|
||||||
```exs
|
```exs
|
||||||
config :pleroma, Pleroma.Mailer,
|
config :pleroma, Pleroma.Emails.Mailer,
|
||||||
adapter: Swoosh.Adapters.Sendgrid,
|
adapter: Swoosh.Adapters.Sendgrid,
|
||||||
api_key: "YOUR_API_KEY"
|
api_key: "YOUR_API_KEY"
|
||||||
```
|
```
|
||||||
|
@ -45,7 +46,7 @@ config :pleroma, Pleroma.Mailer,
|
||||||
An example for SMTP adapter:
|
An example for SMTP adapter:
|
||||||
|
|
||||||
```exs
|
```exs
|
||||||
config :pleroma, Pleroma.Mailer,
|
config :pleroma, Pleroma.Emails.Mailer,
|
||||||
adapter: Swoosh.Adapters.SMTP,
|
adapter: Swoosh.Adapters.SMTP,
|
||||||
relay: "smtp.gmail.com",
|
relay: "smtp.gmail.com",
|
||||||
username: "YOUR_USERNAME@gmail.com",
|
username: "YOUR_USERNAME@gmail.com",
|
||||||
|
@ -62,6 +63,7 @@ config :pleroma, Pleroma.Mailer,
|
||||||
## :instance
|
## :instance
|
||||||
* `name`: The instance’s name
|
* `name`: The instance’s name
|
||||||
* `email`: Email used to reach an Administrator/Moderator of the instance
|
* `email`: Email used to reach an Administrator/Moderator of the instance
|
||||||
|
* `notify_email`: Email used for notifications.
|
||||||
* `description`: The instance’s description, can be seen in nodeinfo and ``/api/v1/instance``
|
* `description`: The instance’s description, can be seen in nodeinfo and ``/api/v1/instance``
|
||||||
* `limit`: Posts character limit (CW/Subject included in the counter)
|
* `limit`: Posts character limit (CW/Subject included in the counter)
|
||||||
* `remote_limit`: Hard character limit beyond which remote posts will be dropped.
|
* `remote_limit`: Hard character limit beyond which remote posts will be dropped.
|
||||||
|
@ -85,7 +87,6 @@ config :pleroma, Pleroma.Mailer,
|
||||||
* `quarantined_instances`: List of ActivityPub instances where private(DMs, followers-only) activities will not be send.
|
* `quarantined_instances`: List of ActivityPub instances where private(DMs, followers-only) activities will not be send.
|
||||||
* `managed_config`: Whenether the config for pleroma-fe is configured in this config or in ``static/config.json``
|
* `managed_config`: Whenether the config for pleroma-fe is configured in this config or in ``static/config.json``
|
||||||
* `allowed_post_formats`: MIME-type list of formats allowed to be posted (transformed into HTML)
|
* `allowed_post_formats`: MIME-type list of formats allowed to be posted (transformed into HTML)
|
||||||
* `finmoji_enabled`: Whenether to enable the finmojis in the custom emojis.
|
|
||||||
* `mrf_transparency`: Make the content of your Message Rewrite Facility settings public (via nodeinfo).
|
* `mrf_transparency`: Make the content of your Message Rewrite Facility settings public (via nodeinfo).
|
||||||
* `scope_copy`: Copy the scope (private/unlisted/public) in replies to posts by default.
|
* `scope_copy`: Copy the scope (private/unlisted/public) in replies to posts by default.
|
||||||
* `subject_line_behavior`: Allows changing the default behaviour of subject lines in replies. Valid values:
|
* `subject_line_behavior`: Allows changing the default behaviour of subject lines in replies. Valid values:
|
||||||
|
@ -100,10 +101,12 @@ config :pleroma, Pleroma.Mailer,
|
||||||
* `no_attachment_links`: Set to true to disable automatically adding attachment link text to statuses
|
* `no_attachment_links`: Set to true to disable automatically adding attachment link text to statuses
|
||||||
* `welcome_message`: A message that will be send to a newly registered users as a direct message.
|
* `welcome_message`: A message that will be send to a newly registered users as a direct message.
|
||||||
* `welcome_user_nickname`: The nickname of the local user that sends the welcome message.
|
* `welcome_user_nickname`: The nickname of the local user that sends the welcome message.
|
||||||
* `max_report_size`: The maximum size of the report comment (Default: `1000`)
|
* `max_report_comment_size`: The maximum size of the report comment (Default: `1000`)
|
||||||
|
* `safe_dm_mentions`: If set to true, only mentions at the beginning of a post will be used to address people in direct messages. This is to prevent accidental mentioning of people when talking about them (e.g. "@friend hey i really don't like @enemy"). (Default: `false`)
|
||||||
|
* `healthcheck`: if set to true, system data will be shown on ``/api/pleroma/healthcheck``.
|
||||||
|
|
||||||
## :logger
|
## :logger
|
||||||
* `backends`: `:console` is used to send logs to stdout, `{ExSyslogger, :ex_syslogger}` to log to syslog
|
* `backends`: `:console` is used to send logs to stdout, `{ExSyslogger, :ex_syslogger}` to log to syslog, and `Quack.Logger` to log to Slack
|
||||||
|
|
||||||
An example to enable ONLY ExSyslogger (f/ex in ``prod.secret.exs``) with info and debug suppressed:
|
An example to enable ONLY ExSyslogger (f/ex in ``prod.secret.exs``) with info and debug suppressed:
|
||||||
```
|
```
|
||||||
|
@ -126,6 +129,24 @@ config :logger, :ex_syslogger,
|
||||||
|
|
||||||
See: [logger’s documentation](https://hexdocs.pm/logger/Logger.html) and [ex_syslogger’s documentation](https://hexdocs.pm/ex_syslogger/)
|
See: [logger’s documentation](https://hexdocs.pm/logger/Logger.html) and [ex_syslogger’s documentation](https://hexdocs.pm/ex_syslogger/)
|
||||||
|
|
||||||
|
An example of logging info to local syslog, but warn to a Slack channel:
|
||||||
|
```
|
||||||
|
config :logger,
|
||||||
|
backends: [ {ExSyslogger, :ex_syslogger}, Quack.Logger ],
|
||||||
|
level: :info
|
||||||
|
|
||||||
|
config :logger, :ex_syslogger,
|
||||||
|
level: :info,
|
||||||
|
ident: "pleroma",
|
||||||
|
format: "$metadata[$level] $message"
|
||||||
|
|
||||||
|
config :quack,
|
||||||
|
level: :warn,
|
||||||
|
meta: [:all],
|
||||||
|
webhook_url: "https://hooks.slack.com/services/YOUR-API-KEY-HERE"
|
||||||
|
```
|
||||||
|
|
||||||
|
See the [Quack Github](https://github.com/azohra/quack) for more details
|
||||||
|
|
||||||
## :frontend_configurations
|
## :frontend_configurations
|
||||||
|
|
||||||
|
@ -184,11 +205,53 @@ This section is used to configure Pleroma-FE, unless ``:managed_config`` in ``:i
|
||||||
* `enabled`: Enables proxying of remote media to the instance’s proxy
|
* `enabled`: Enables proxying of remote media to the instance’s proxy
|
||||||
* `base_url`: The base URL to access a user-uploaded file. Useful when you want to proxy the media files via another host/CDN fronts.
|
* `base_url`: The base URL to access a user-uploaded file. Useful when you want to proxy the media files via another host/CDN fronts.
|
||||||
* `proxy_opts`: All options defined in `Pleroma.ReverseProxy` documentation, defaults to `[max_body_length: (25*1_048_576)]`.
|
* `proxy_opts`: All options defined in `Pleroma.ReverseProxy` documentation, defaults to `[max_body_length: (25*1_048_576)]`.
|
||||||
|
* `whitelist`: List of domains to bypass the mediaproxy
|
||||||
|
|
||||||
## :gopher
|
## :gopher
|
||||||
* `enabled`: Enables the gopher interface
|
* `enabled`: Enables the gopher interface
|
||||||
* `ip`: IP address to bind to
|
* `ip`: IP address to bind to
|
||||||
* `port`: Port to bind to
|
* `port`: Port to bind to
|
||||||
|
* `dstport`: Port advertised in urls (optional, defaults to `port`)
|
||||||
|
|
||||||
|
## Pleroma.Web.Endpoint
|
||||||
|
`Phoenix` endpoint configuration, all configuration options can be viewed [here](https://hexdocs.pm/phoenix/Phoenix.Endpoint.html#module-dynamic-configuration), only common options are listed here
|
||||||
|
* `http` - a list containing http protocol configuration, all configuration options can be viewed [here](https://hexdocs.pm/plug_cowboy/Plug.Cowboy.html#module-options), only common options are listed here
|
||||||
|
- `ip` - a tuple consisting of 4 integers
|
||||||
|
- `port`
|
||||||
|
* `url` - a list containing the configuration for generating urls, accepts
|
||||||
|
- `host` - the host without the scheme and a post (e.g `example.com`, not `https://example.com:2020`)
|
||||||
|
- `scheme` - e.g `http`, `https`
|
||||||
|
- `port`
|
||||||
|
- `path`
|
||||||
|
* `extra_cookie_attrs` - a list of `Key=Value` strings to be added as non-standard cookie attributes. Defaults to `["SameSite=Lax"]`. See the [SameSite article](https://www.owasp.org/index.php/SameSite) on OWASP for more info.
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
**Important note**: if you modify anything inside these lists, default `config.exs` values will be overwritten, which may result in breakage, to make sure this does not happen please copy the default value for the list from `config.exs` and modify/add only what you need
|
||||||
|
|
||||||
|
Example:
|
||||||
|
```elixir
|
||||||
|
config :pleroma, Pleroma.Web.Endpoint,
|
||||||
|
url: [host: "example.com", port: 2020, scheme: "https"],
|
||||||
|
http: [
|
||||||
|
# start copied from config.exs
|
||||||
|
dispatch: [
|
||||||
|
{:_,
|
||||||
|
[
|
||||||
|
{"/api/v1/streaming", Pleroma.Web.MastodonAPI.WebsocketHandler, []},
|
||||||
|
{"/websocket", Phoenix.Endpoint.CowboyWebSocket,
|
||||||
|
{Phoenix.Transports.WebSocket,
|
||||||
|
{Pleroma.Web.Endpoint, Pleroma.Web.UserSocket, websocket_config}}},
|
||||||
|
{:_, Phoenix.Endpoint.Cowboy2Handler, {Pleroma.Web.Endpoint, []}}
|
||||||
|
]}
|
||||||
|
# end copied from config.exs
|
||||||
|
],
|
||||||
|
port: 8080,
|
||||||
|
ip: {127, 0, 0, 1}
|
||||||
|
]
|
||||||
|
```
|
||||||
|
|
||||||
|
This will make Pleroma listen on `127.0.0.1` port `8080` and generate urls starting with `https://example.com:2020`
|
||||||
|
|
||||||
## :activitypub
|
## :activitypub
|
||||||
* ``accept_blocks``: Whether to accept incoming block activities from other instances
|
* ``accept_blocks``: Whether to accept incoming block activities from other instances
|
||||||
|
@ -250,25 +313,29 @@ You can then do
|
||||||
curl "http://localhost:4000/api/pleroma/admin/invite_token?admin_token=somerandomtoken"
|
curl "http://localhost:4000/api/pleroma/admin/invite_token?admin_token=somerandomtoken"
|
||||||
```
|
```
|
||||||
|
|
||||||
## Pleroma.Jobs
|
## :pleroma_job_queue
|
||||||
|
|
||||||
A list of job queues and their settings.
|
[Pleroma Job Queue](https://git.pleroma.social/pleroma/pleroma_job_queue) configuration: a list of queues with maximum concurrent jobs.
|
||||||
|
|
||||||
Job queue settings:
|
Pleroma has the following queues:
|
||||||
|
|
||||||
* `max_jobs`: The maximum amount of parallel jobs running at the same time.
|
* `federator_outgoing` - Outgoing federation
|
||||||
|
* `federator_incoming` - Incoming federation
|
||||||
|
* `mailer` - Email sender, see [`Pleroma.Emails.Mailer`](#pleroma-emails-mailer)
|
||||||
|
* `transmogrifier` - Transmogrifier
|
||||||
|
* `web_push` - Web push notifications
|
||||||
|
* `scheduled_activities` - Scheduled activities, see [`Pleroma.ScheduledActivities`](#pleromascheduledactivity)
|
||||||
|
|
||||||
Example:
|
Example:
|
||||||
|
|
||||||
```exs
|
```elixir
|
||||||
config :pleroma, Pleroma.Jobs,
|
config :pleroma_job_queue, :queues,
|
||||||
federator_incoming: [max_jobs: 50],
|
federator_incoming: 50,
|
||||||
federator_outgoing: [max_jobs: 50]
|
federator_outgoing: 50
|
||||||
```
|
```
|
||||||
|
|
||||||
This config contains two queues: `federator_incoming` and `federator_outgoing`. Both have the `max_jobs` set to `50`.
|
This config contains two queues: `federator_incoming` and `federator_outgoing`. Both have the `max_jobs` set to `50`.
|
||||||
|
|
||||||
|
|
||||||
## Pleroma.Web.Federator.RetryQueue
|
## Pleroma.Web.Federator.RetryQueue
|
||||||
|
|
||||||
* `enabled`: If set to `true`, failed federation jobs will be retried
|
* `enabled`: If set to `true`, failed federation jobs will be retried
|
||||||
|
@ -277,9 +344,10 @@ This config contains two queues: `federator_incoming` and `federator_outgoing`.
|
||||||
* `max_retries`: The maximum number of times a federation job is retried
|
* `max_retries`: The maximum number of times a federation job is retried
|
||||||
|
|
||||||
## Pleroma.Web.Metadata
|
## Pleroma.Web.Metadata
|
||||||
* `providers`: a list of metadata providers to enable. Providers availible:
|
* `providers`: a list of metadata providers to enable. Providers available:
|
||||||
* Pleroma.Web.Metadata.Providers.OpenGraph
|
* Pleroma.Web.Metadata.Providers.OpenGraph
|
||||||
* Pleroma.Web.Metadata.Providers.TwitterCard
|
* Pleroma.Web.Metadata.Providers.TwitterCard
|
||||||
|
* Pleroma.Web.Metadata.Providers.RelMe - add links from user bio with rel=me into the `<header>` as `<link rel=me>`
|
||||||
* `unfurl_nsfw`: If set to `true` nsfw attachments will be shown in previews
|
* `unfurl_nsfw`: If set to `true` nsfw attachments will be shown in previews
|
||||||
|
|
||||||
## :rich_media
|
## :rich_media
|
||||||
|
@ -330,3 +398,98 @@ config :auto_linker,
|
||||||
rel: false
|
rel: false
|
||||||
]
|
]
|
||||||
```
|
```
|
||||||
|
|
||||||
|
## Pleroma.ScheduledActivity
|
||||||
|
|
||||||
|
* `daily_user_limit`: the number of scheduled activities a user is allowed to create in a single day (Default: `25`)
|
||||||
|
* `total_user_limit`: the number of scheduled activities a user is allowed to create in total (Default: `300`)
|
||||||
|
* `enabled`: whether scheduled activities are sent to the job queue to be executed
|
||||||
|
|
||||||
|
## Pleroma.Web.Auth.Authenticator
|
||||||
|
|
||||||
|
* `Pleroma.Web.Auth.PleromaAuthenticator`: default database authenticator
|
||||||
|
* `Pleroma.Web.Auth.LDAPAuthenticator`: LDAP authentication
|
||||||
|
|
||||||
|
## :ldap
|
||||||
|
|
||||||
|
Use LDAP for user authentication. When a user logs in to the Pleroma
|
||||||
|
instance, the name and password will be verified by trying to authenticate
|
||||||
|
(bind) to an LDAP server. If a user exists in the LDAP directory but there
|
||||||
|
is no account with the same name yet on the Pleroma instance then a new
|
||||||
|
Pleroma account will be created with the same name as the LDAP user name.
|
||||||
|
|
||||||
|
* `enabled`: enables LDAP authentication
|
||||||
|
* `host`: LDAP server hostname
|
||||||
|
* `port`: LDAP port, e.g. 389 or 636
|
||||||
|
* `ssl`: true to use SSL, usually implies the port 636
|
||||||
|
* `sslopts`: additional SSL options
|
||||||
|
* `tls`: true to start TLS, usually implies the port 389
|
||||||
|
* `tlsopts`: additional TLS options
|
||||||
|
* `base`: LDAP base, e.g. "dc=example,dc=com"
|
||||||
|
* `uid`: LDAP attribute name to authenticate the user, e.g. when "cn", the filter will be "cn=username,base"
|
||||||
|
|
||||||
|
## :auth
|
||||||
|
|
||||||
|
Authentication / authorization settings.
|
||||||
|
|
||||||
|
* `auth_template`: authentication form template. By default it's `show.html` which corresponds to `lib/pleroma/web/templates/o_auth/o_auth/show.html.eex`.
|
||||||
|
* `oauth_consumer_template`: OAuth consumer mode authentication form template. By default it's `consumer.html` which corresponds to `lib/pleroma/web/templates/o_auth/o_auth/consumer.html.eex`.
|
||||||
|
* `oauth_consumer_strategies`: the list of enabled OAuth consumer strategies; by default it's set by OAUTH_CONSUMER_STRATEGIES environment variable.
|
||||||
|
|
||||||
|
# OAuth consumer mode
|
||||||
|
|
||||||
|
OAuth consumer mode allows sign in / sign up via external OAuth providers (e.g. Twitter, Facebook, Google, Microsoft, etc.).
|
||||||
|
Implementation is based on Ueberauth; see the list of [available strategies](https://github.com/ueberauth/ueberauth/wiki/List-of-Strategies).
|
||||||
|
|
||||||
|
Note: each strategy is shipped as a separate dependency; in order to get the strategies, run `OAUTH_CONSUMER_STRATEGIES="..." mix deps.get`,
|
||||||
|
e.g. `OAUTH_CONSUMER_STRATEGIES="twitter facebook google microsoft" mix deps.get`.
|
||||||
|
The server should also be started with `OAUTH_CONSUMER_STRATEGIES="..." mix phx.server` in case you enable any strategies.
|
||||||
|
|
||||||
|
Note: each strategy requires separate setup (on external provider side and Pleroma side). Below are the guidelines on setting up most popular strategies.
|
||||||
|
|
||||||
|
Note: make sure that `"SameSite=Lax"` is set in `extra_cookie_attrs` when you have this feature enabled. OAuth consumer mode will not work with `"SameSite=Strict"`
|
||||||
|
|
||||||
|
* For Twitter, [register an app](https://developer.twitter.com/en/apps), configure callback URL to https://<your_host>/oauth/twitter/callback
|
||||||
|
|
||||||
|
* For Facebook, [register an app](https://developers.facebook.com/apps), configure callback URL to https://<your_host>/oauth/facebook/callback, enable Facebook Login service at https://developers.facebook.com/apps/<app_id>/fb-login/settings/
|
||||||
|
|
||||||
|
* For Google, [register an app](https://console.developers.google.com), configure callback URL to https://<your_host>/oauth/google/callback
|
||||||
|
|
||||||
|
* For Microsoft, [register an app](https://portal.azure.com), configure callback URL to https://<your_host>/oauth/microsoft/callback
|
||||||
|
|
||||||
|
Once the app is configured on external OAuth provider side, add app's credentials and strategy-specific settings (if any — e.g. see Microsoft below) to `config/prod.secret.exs`,
|
||||||
|
per strategy's documentation (e.g. [ueberauth_twitter](https://github.com/ueberauth/ueberauth_twitter)). Example config basing on environment variables:
|
||||||
|
|
||||||
|
```
|
||||||
|
# Twitter
|
||||||
|
config :ueberauth, Ueberauth.Strategy.Twitter.OAuth,
|
||||||
|
consumer_key: System.get_env("TWITTER_CONSUMER_KEY"),
|
||||||
|
consumer_secret: System.get_env("TWITTER_CONSUMER_SECRET")
|
||||||
|
|
||||||
|
# Facebook
|
||||||
|
config :ueberauth, Ueberauth.Strategy.Facebook.OAuth,
|
||||||
|
client_id: System.get_env("FACEBOOK_APP_ID"),
|
||||||
|
client_secret: System.get_env("FACEBOOK_APP_SECRET"),
|
||||||
|
redirect_uri: System.get_env("FACEBOOK_REDIRECT_URI")
|
||||||
|
|
||||||
|
# Google
|
||||||
|
config :ueberauth, Ueberauth.Strategy.Google.OAuth,
|
||||||
|
client_id: System.get_env("GOOGLE_CLIENT_ID"),
|
||||||
|
client_secret: System.get_env("GOOGLE_CLIENT_SECRET"),
|
||||||
|
redirect_uri: System.get_env("GOOGLE_REDIRECT_URI")
|
||||||
|
|
||||||
|
# Microsoft
|
||||||
|
config :ueberauth, Ueberauth.Strategy.Microsoft.OAuth,
|
||||||
|
client_id: System.get_env("MICROSOFT_CLIENT_ID"),
|
||||||
|
client_secret: System.get_env("MICROSOFT_CLIENT_SECRET")
|
||||||
|
|
||||||
|
config :ueberauth, Ueberauth,
|
||||||
|
providers: [
|
||||||
|
microsoft: {Ueberauth.Strategy.Microsoft, [callback_params: []]}
|
||||||
|
]
|
||||||
|
```
|
||||||
|
|
||||||
|
## :emoji
|
||||||
|
* `shortcode_globs`: Location of custom emoji files. `*` can be used as a wildcard. Example `["/emoji/custom/**/*.png"]`
|
||||||
|
* `groups`: Emojis are ordered in groups (tags). This is an array of key-value pairs where the key is the groupname and the value the location or array of locations. `*` can be used as a wildcard. Example `[Custom: ["/emoji/*.png", "/emoji/custom/*.png"]]`
|
||||||
|
* `default_manifest`: Location of the JSON-manifest. This manifest contains information about the emoji-packs you can download. Currently only one manifest can be added (no arrays).
|
||||||
|
|
17
docs/config/General-tips-for-customizing-Pleroma-FE.md
Normal file
17
docs/config/General-tips-for-customizing-Pleroma-FE.md
Normal file
|
@ -0,0 +1,17 @@
|
||||||
|
# General tips for customizing Pleroma FE
|
||||||
|
There are some configuration scripts for Pleroma BE and FE:
|
||||||
|
|
||||||
|
1. `config/prod.secret.exs`
|
||||||
|
1. `config/config.exs`
|
||||||
|
1. `priv/static/static/config.json`
|
||||||
|
|
||||||
|
The `prod.secret.exs` affects first. `config.exs` is for fallback or default. `config.json` is for GNU-social-BE-Pleroma-FE instances.
|
||||||
|
|
||||||
|
Usually all you have to do is:
|
||||||
|
|
||||||
|
1. Copy the section in the `config/config.exs` which you want to activate.
|
||||||
|
1. Paste into `config/prod.secret.exs`.
|
||||||
|
1. Edit `config/prod.secret.exs`.
|
||||||
|
1. Restart the Pleroma daemon.
|
||||||
|
|
||||||
|
`prod.secret.exs` is for the `MIX_ENV=prod` environment. `dev.secret.exs` is for the `MIX_ENV=dev` environment respectively.
|
68
docs/config/custom_emoji.md
Normal file
68
docs/config/custom_emoji.md
Normal file
|
@ -0,0 +1,68 @@
|
||||||
|
# Custom Emoji
|
||||||
|
|
||||||
|
Before you add your own custom emoji, check if they are available in an existing pack.
|
||||||
|
See `Mix.Tasks.Pleroma.Emoji` for information about emoji packs.
|
||||||
|
|
||||||
|
To add custom emoji:
|
||||||
|
* Create the `STATIC-DIR/emoji/` directory if it doesn't exist
|
||||||
|
(`STATIC-DIR` is configurable, `instance/static/` by default)
|
||||||
|
* Create a directory with whatever name you want (custom is a good name to show the purpose of it).
|
||||||
|
This will create a local emoji pack.
|
||||||
|
* Put your `.png` emoji files in that directory. In case of conflicts, you can create an `emoji.txt`
|
||||||
|
file in that directory and specify a custom shortcode using the following format:
|
||||||
|
`shortcode, file-path, tag1, tag2, etc`. One emoji per line. Note that if you do so,
|
||||||
|
you'll have to list all other emojis in the pack too.
|
||||||
|
* Either restart pleroma or connect to the iex session pleroma's running and
|
||||||
|
run `Pleroma.Emoji.reload/0` in it.
|
||||||
|
|
||||||
|
Example:
|
||||||
|
|
||||||
|
image files (in `instance/static/emoji/custom`): `happy.png` and `sad.png`
|
||||||
|
|
||||||
|
content of `emoji.txt`:
|
||||||
|
```
|
||||||
|
happy, /emoji/custom/happy.png, Tag1,Tag2
|
||||||
|
sad, /emoji/custom/sad.png, Tag1
|
||||||
|
foo, /emoji/custom/foo.png
|
||||||
|
```
|
||||||
|
|
||||||
|
The files should be PNG (APNG is okay with `.png` for `image/png` Content-type) and under 50kb for compatibility with mastodon.
|
||||||
|
|
||||||
|
Default file extentions and locations for emojis are set in `config.exs`. To use different locations or file-extentions, add the `shortcode_globs` to your secrets file (`prod.secret.exs` or `dev.secret.exs`) and edit it. Note that not all fediverse-software will show emojis with other file extentions:
|
||||||
|
```elixir
|
||||||
|
config :pleroma, :emoji, shortcode_globs: ["/emoji/custom/**/*.png", "/emoji/custom/**/*.gif"]
|
||||||
|
```
|
||||||
|
|
||||||
|
## Emoji tags (groups)
|
||||||
|
|
||||||
|
Default tags are set in `config.exs`. To set your own tags, copy the structure to your secrets file (`prod.secret.exs` or `dev.secret.exs`) and edit it.
|
||||||
|
```elixir
|
||||||
|
config :pleroma, :emoji,
|
||||||
|
shortcode_globs: ["/emoji/custom/**/*.png"],
|
||||||
|
groups: [
|
||||||
|
Finmoji: "/finmoji/128px/*-128.png",
|
||||||
|
Custom: ["/emoji/*.png", "/emoji/custom/*.png"]
|
||||||
|
]
|
||||||
|
```
|
||||||
|
|
||||||
|
Order of the `groups` matters, so to override default tags just put your group on top of the list. E.g:
|
||||||
|
```elixir
|
||||||
|
config :pleroma, :emoji,
|
||||||
|
shortcode_globs: ["/emoji/custom/**/*.png"],
|
||||||
|
groups: [
|
||||||
|
"Finmoji special": "/finmoji/128px/a_trusted_friend-128.png", # special file
|
||||||
|
"Cirno": "/emoji/custom/cirno*.png", # png files in /emoji/custom/ which start with `cirno`
|
||||||
|
"Special group": "/emoji/custom/special_folder/*.png", # png files in /emoji/custom/special_folder/
|
||||||
|
"Another group": "/emoji/custom/special_folder/*/.png", # png files in /emoji/custom/special_folder/ subfolders
|
||||||
|
Finmoji: "/finmoji/128px/*-128.png",
|
||||||
|
Custom: ["/emoji/*.png", "/emoji/custom/*.png"]
|
||||||
|
]
|
||||||
|
```
|
||||||
|
|
||||||
|
Priority of tags assigns in emoji.txt and custom.txt:
|
||||||
|
|
||||||
|
`tag in file > special group setting in config.exs > default setting in config.exs`
|
||||||
|
|
||||||
|
Priority for globs:
|
||||||
|
|
||||||
|
`special group setting in config.exs > default setting in config.exs`
|
103
docs/config/hardening.md
Normal file
103
docs/config/hardening.md
Normal file
|
@ -0,0 +1,103 @@
|
||||||
|
# Hardening your instance
|
||||||
|
Here are some suggestions which improve the security of parts of your Pleroma instance.
|
||||||
|
|
||||||
|
## Configuration file
|
||||||
|
|
||||||
|
These changes should go into `prod.secret.exs` or `dev.secret.exs`, depending on your `MIX_ENV` value.
|
||||||
|
|
||||||
|
### `http`
|
||||||
|
|
||||||
|
> Recommended value: `[ip: {127, 0, 0, 1}]`
|
||||||
|
|
||||||
|
This sets the Pleroma application server to only listen to the localhost interface. This way, you can only reach your server over the Internet by going through the reverse proxy. By default, Pleroma listens on all interfaces.
|
||||||
|
|
||||||
|
### `secure_cookie_flag`
|
||||||
|
|
||||||
|
> Recommended value: `true`
|
||||||
|
|
||||||
|
This sets the `secure` flag on Pleroma’s session cookie. This makes sure, that the cookie is only accepted over encrypted HTTPs connections. This implicitly renames the cookie from `pleroma_key` to `__Host-pleroma-key` which enforces some restrictions. (see [cookie prefixes](https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Set-Cookie#Cookie_prefixes))
|
||||||
|
|
||||||
|
### `:http_security`
|
||||||
|
|
||||||
|
> Recommended value: `true`
|
||||||
|
|
||||||
|
This will send additional HTTP security headers to the clients, including:
|
||||||
|
|
||||||
|
* `X-XSS-Protection: "1; mode=block"`
|
||||||
|
* `X-Permitted-Cross-Domain-Policies: "none"`
|
||||||
|
* `X-Frame-Options: "DENY"`
|
||||||
|
* `X-Content-Type-Options: "nosniff"`
|
||||||
|
* `X-Download-Options: "noopen"`
|
||||||
|
|
||||||
|
A content security policy (CSP) will also be set:
|
||||||
|
|
||||||
|
```csp
|
||||||
|
content-security-policy:
|
||||||
|
default-src 'none';
|
||||||
|
base-uri 'self';
|
||||||
|
frame-ancestors 'none';
|
||||||
|
img-src 'self' data: https:;
|
||||||
|
media-src 'self' https:;
|
||||||
|
style-src 'self' 'unsafe-inline';
|
||||||
|
font-src 'self';
|
||||||
|
script-src 'self';
|
||||||
|
connect-src 'self' wss://example.tld;
|
||||||
|
manifest-src 'self';
|
||||||
|
upgrade-insecure-requests;
|
||||||
|
```
|
||||||
|
|
||||||
|
#### `sts`
|
||||||
|
|
||||||
|
> Recommended value: `true`
|
||||||
|
|
||||||
|
An additional “Strict transport security” header will be sent with the configured `sts_max_age` parameter. This tells the browser, that the domain should only be accessed over a secure HTTPs connection.
|
||||||
|
|
||||||
|
#### `ct_max_age`
|
||||||
|
|
||||||
|
An additional “Expect-CT” header will be sent with the configured `ct_max_age` parameter. This enforces the use of TLS certificates that are published in the certificate transparency log. (see [Expect-CT](https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Expect-CT))
|
||||||
|
|
||||||
|
#### `referrer_policy`
|
||||||
|
|
||||||
|
> Recommended value: `same-origin`
|
||||||
|
|
||||||
|
If you click on a link, your browser’s request to the other site will include from where it is coming from. The “Referrer policy” header tells the browser how and if it should send this information. (see [Referrer policy](https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Referrer-Policy))
|
||||||
|
|
||||||
|
## systemd
|
||||||
|
|
||||||
|
A systemd unit example is provided at `installation/pleroma.service`.
|
||||||
|
|
||||||
|
### PrivateTmp
|
||||||
|
|
||||||
|
> Recommended value: `true`
|
||||||
|
|
||||||
|
Use private `/tmp` and `/var/tmp` folders inside a new file system namespace, which are discarded after the process stops.
|
||||||
|
|
||||||
|
### ProtectHome
|
||||||
|
|
||||||
|
> Recommended value: `true`
|
||||||
|
|
||||||
|
The `/home`, `/root`, and `/run/user` folders can not be accessed by this service anymore. If your Pleroma user has its home folder in one of the restricted places, or use one of these folders as its working directory, you have to set this to `false`.
|
||||||
|
|
||||||
|
### ProtectSystem
|
||||||
|
|
||||||
|
> Recommended value: `full`
|
||||||
|
|
||||||
|
Mount `/usr`, `/boot`, and `/etc` as read-only for processes invoked by this service.
|
||||||
|
|
||||||
|
### PrivateDevices
|
||||||
|
|
||||||
|
> Recommended value: `true`
|
||||||
|
|
||||||
|
Sets up a new `/dev` mount for the process and only adds API pseudo devices like `/dev/null`, `/dev/zero` or `/dev/random` but not physical devices. This may not work on devices like the Raspberry Pi, where you need to set this to `false`.
|
||||||
|
|
||||||
|
### NoNewPrivileges
|
||||||
|
|
||||||
|
> Recommended value: `true`
|
||||||
|
|
||||||
|
Ensures that the service process and all its children can never gain new privileges through `execve()`.
|
||||||
|
|
||||||
|
### CapabilityBoundingSet
|
||||||
|
|
||||||
|
> Recommended value: `~CAP_SYS_ADMIN`
|
||||||
|
|
||||||
|
Drops the sysadmin capability from the daemon.
|
32
docs/config/howto_mediaproxy.md
Normal file
32
docs/config/howto_mediaproxy.md
Normal file
|
@ -0,0 +1,32 @@
|
||||||
|
# How to activate mediaproxy
|
||||||
|
## Explanation
|
||||||
|
|
||||||
|
Without the `mediaproxy` function, Pleroma don't store any remote content like pictures, video etc. locally. So every time you open Pleroma, the content is loaded from the source server, from where the post is coming. This can result in slowly loading content or/and increased bandwidth usage on the source server.
|
||||||
|
With the `mediaproxy` function you can use the cache ability of nginx, to cache these content, so user can access it faster, cause it's loaded from your server.
|
||||||
|
|
||||||
|
## Activate it
|
||||||
|
|
||||||
|
* Edit your nginx config and add the following location:
|
||||||
|
```
|
||||||
|
location /proxy {
|
||||||
|
proxy_cache pleroma_media_cache;
|
||||||
|
proxy_cache_lock on;
|
||||||
|
proxy_pass http://localhost:4000;
|
||||||
|
}
|
||||||
|
```
|
||||||
|
Also add the following on top of the configuration, outside of the `server` block:
|
||||||
|
```
|
||||||
|
proxy_cache_path /tmp/pleroma-media-cache levels=1:2 keys_zone=pleroma_media_cache:10m max_size=10g inactive=720m use_temp_path=off;
|
||||||
|
```
|
||||||
|
If you came here from one of the installation guides, take a look at the example configuration `/installation/pleroma.nginx`, where this part is already included.
|
||||||
|
|
||||||
|
* Append the following to your `prod.secret.exs` or `dev.secret.exs` (depends on which mode your instance is running):
|
||||||
|
```
|
||||||
|
config :pleroma, :media_proxy,
|
||||||
|
enabled: true,
|
||||||
|
redirect_on_failure: true
|
||||||
|
#base_url: "https://cache.pleroma.social"
|
||||||
|
```
|
||||||
|
If you want to use a subdomain to serve the files, uncomment `base_url`, change the url and add a comma after `true` in the previous line.
|
||||||
|
|
||||||
|
* Restart nginx and Pleroma
|
12
docs/config/howto_proxy.md
Normal file
12
docs/config/howto_proxy.md
Normal file
|
@ -0,0 +1,12 @@
|
||||||
|
# How to configure upstream proxy for federation
|
||||||
|
If you want to proxify all http requests (e.g. for TOR) that pleroma makes to an upstream proxy server, edit you config file (`dev.secret.exs` or `prod.secret.exs`) and add the following:
|
||||||
|
|
||||||
|
```
|
||||||
|
config :pleroma, :http,
|
||||||
|
proxy_url: "127.0.0.1:8123"
|
||||||
|
```
|
||||||
|
|
||||||
|
The other way to do it, for example, with Tor you would most likely add something like this:
|
||||||
|
```
|
||||||
|
config :pleroma, :http, proxy_url: {:socks5, :localhost, 9050}
|
||||||
|
```
|
31
docs/config/howto_user_recomendation.md
Normal file
31
docs/config/howto_user_recomendation.md
Normal file
|
@ -0,0 +1,31 @@
|
||||||
|
# How to activate user recommendation (Who to follow panel)
|
||||||
|
![who-to-follow-panel-small](/uploads/9de1b1300436c32461d272945f1bc23e/who-to-follow-panel-small.png)
|
||||||
|
|
||||||
|
To show the *who to follow* panel, edit `config/prod.secret.exs` in the Pleroma backend. Following code activates the *who to follow* panel:
|
||||||
|
|
||||||
|
```elixir
|
||||||
|
config :pleroma, :suggestions,
|
||||||
|
enabled: true,
|
||||||
|
third_party_engine:
|
||||||
|
"http://vinayaka.distsn.org/cgi-bin/vinayaka-user-match-suggestions-api.cgi?{{host}}+{{user}}",
|
||||||
|
timeout: 300_000,
|
||||||
|
limit: 23,
|
||||||
|
web: "https://vinayaka.distsn.org/?{{host}}+{{user}}"
|
||||||
|
|
||||||
|
```
|
||||||
|
|
||||||
|
`config/config.exs` already includes this code, but `enabled:` is `false`.
|
||||||
|
|
||||||
|
`/api/v1/suggestions` is also provided when *who to follow* panel is enabled.
|
||||||
|
|
||||||
|
For advanced customization, following code shows the newcomers of the fediverse at the *who to follow* panel:
|
||||||
|
|
||||||
|
```elixir
|
||||||
|
config :pleroma, :suggestions,
|
||||||
|
enabled: true,
|
||||||
|
third_party_engine:
|
||||||
|
"http://vinayaka.distsn.org/cgi-bin/vinayaka-user-new-suggestions-api.cgi?{{host}}+{{user}}",
|
||||||
|
timeout: 60_000,
|
||||||
|
limit: 23,
|
||||||
|
web: "https://vinayaka.distsn.org/user-new.html"
|
||||||
|
```
|
196
docs/config/i2p.md
Normal file
196
docs/config/i2p.md
Normal file
|
@ -0,0 +1,196 @@
|
||||||
|
# I2P Federation and Accessability
|
||||||
|
|
||||||
|
This guide is going to focus on the Pleroma federation aspect. The actual installation is neatly explained in the official documentation, and more likely to remain up-to-date.
|
||||||
|
It might be added to this guide if there will be a need for that.
|
||||||
|
|
||||||
|
We're going to use I2PD for its lightweightness over the official client.
|
||||||
|
Follow the documentation according to your distro: https://i2pd.readthedocs.io/en/latest/user-guide/install/#installing
|
||||||
|
|
||||||
|
How to run it: https://i2pd.readthedocs.io/en/latest/user-guide/run/
|
||||||
|
|
||||||
|
## I2P Federation
|
||||||
|
|
||||||
|
There are 2 ways to go about this.
|
||||||
|
One using the config, and one using external software (fedproxy). The external software works better so far.
|
||||||
|
|
||||||
|
### Using the Config
|
||||||
|
|
||||||
|
**Warning:** So far, everytime I followed this way of federating using I2P, the rest of my federation stopped working. I'm leaving this here in case it will help with making it work.
|
||||||
|
|
||||||
|
Assuming you're running in prod, cd to your Pleroma folder and append the following to `config/prod.secret.exs`:
|
||||||
|
```
|
||||||
|
config :pleroma, :http, proxy_url: {:socks5, :localhost, 4447}
|
||||||
|
```
|
||||||
|
And then run the following:
|
||||||
|
```
|
||||||
|
su pleroma
|
||||||
|
MIX_ENV=prod mix deps.get
|
||||||
|
MIX_ENV=prod mix ecto.migrate
|
||||||
|
exit
|
||||||
|
```
|
||||||
|
You can restart I2PD here and finish if you don't wish to make your instance viewable or accessible over I2P.
|
||||||
|
```
|
||||||
|
systemctl stop i2pd.service --no-block
|
||||||
|
systemctl start i2pd.service
|
||||||
|
```
|
||||||
|
*Notice:* The stop command initiates a graceful shutdown process, i2pd stops after finishing to route transit tunnels (maximum 10 minutes).
|
||||||
|
|
||||||
|
You can change the socks proxy port in `/etc/i2pd/i2pd.conf`.
|
||||||
|
|
||||||
|
### Using Fedproxy
|
||||||
|
|
||||||
|
Fedproxy passes through clearnet requests direct to where they are going. It doesn't force anything over Tor.
|
||||||
|
|
||||||
|
To use [fedproxy](https://github.com/majestrate/fedproxy) you'll need to install Golang.
|
||||||
|
```
|
||||||
|
apt install golang
|
||||||
|
```
|
||||||
|
Use a different user than pleroma or root. Run the following to add the Gopath to your ~/.bashrc.
|
||||||
|
```
|
||||||
|
echo "export GOPATH=/home/ren/.go" >> ~/.bashrc
|
||||||
|
```
|
||||||
|
Restart that bash session (you can exit and log back in).
|
||||||
|
Run the following to get fedproxy.
|
||||||
|
```
|
||||||
|
go get -u github.com/majestrate/fedproxy$
|
||||||
|
cp $(GOPATH)/bin/fedproxy /usr/local/bin/fedproxy
|
||||||
|
```
|
||||||
|
And then the following to start it for I2P only.
|
||||||
|
```
|
||||||
|
fedproxy 127.0.0.1:2000 127.0.0.1:4447
|
||||||
|
```
|
||||||
|
If you want to also use it for Tor, add `127.0.0.1:9050` to that command.
|
||||||
|
You'll also need to modify your Pleroma config.
|
||||||
|
|
||||||
|
Assuming you're running in prod, cd to your Pleroma folder and append the following to `config/prod.secret.exs`:
|
||||||
|
```
|
||||||
|
config :pleroma, :http, proxy_url: {:socks5, :localhost, 2000}
|
||||||
|
```
|
||||||
|
And then run the following:
|
||||||
|
```
|
||||||
|
su pleroma
|
||||||
|
MIX_ENV=prod mix deps.get
|
||||||
|
MIX_ENV=prod mix ecto.migrate
|
||||||
|
exit
|
||||||
|
```
|
||||||
|
You can restart I2PD here and finish if you don't wish to make your instance viewable or accessible over I2P.
|
||||||
|
|
||||||
|
```
|
||||||
|
systemctl stop i2pd.service --no-block
|
||||||
|
systemctl start i2pd.service
|
||||||
|
```
|
||||||
|
*Notice:* The stop command initiates a graceful shutdown process, i2pd stops after finishing to route transit tunnels (maximum 10 minutes).
|
||||||
|
|
||||||
|
You can change the socks proxy port in `/etc/i2pd/i2pd.conf`.
|
||||||
|
|
||||||
|
## I2P Instance Access
|
||||||
|
|
||||||
|
Make your instance accessible using I2P.
|
||||||
|
|
||||||
|
Add the following to your I2PD config `/etc/i2pd/tunnels.conf`:
|
||||||
|
```
|
||||||
|
[pleroma]
|
||||||
|
type = http
|
||||||
|
host = 127.0.0.1
|
||||||
|
port = 14447
|
||||||
|
keys = pleroma.dat
|
||||||
|
```
|
||||||
|
Restart I2PD:
|
||||||
|
```
|
||||||
|
systemctl stop i2pd.service --no-block
|
||||||
|
systemctl start i2pd.service
|
||||||
|
```
|
||||||
|
*Notice:* The stop command initiates a graceful shutdown process, i2pd stops after finishing to route transit tunnels (maximum 10 minutes).
|
||||||
|
|
||||||
|
Now you'll have to find your address.
|
||||||
|
To do that you can download and use I2PD tools.[^1]
|
||||||
|
Or you'll need to access your web-console on localhost:7070.
|
||||||
|
If you don't have a GUI, you'll have to SSH tunnel into it like this:
|
||||||
|
`ssh -L 7070:127.0.0.1:7070 user@ip -p port`.
|
||||||
|
Now you can access it at localhost:7070.
|
||||||
|
Go to I2P tunnels page. Look for Server tunnels and you will see an address that ends with `.b32.i2p` next to "pleroma".
|
||||||
|
This is your site's address.
|
||||||
|
|
||||||
|
### I2P-only Instance
|
||||||
|
|
||||||
|
If creating an I2P-only instance, open `config/prod.secret.exs` and under "config :pleroma, Pleroma.Web.Endpoint," edit "https" and "port: 443" to the following:
|
||||||
|
```
|
||||||
|
url: [host: "i2paddress", scheme: "http", port: 80],
|
||||||
|
```
|
||||||
|
In addition to that, replace the existing nginx config's contents with the example below.
|
||||||
|
|
||||||
|
### Existing Instance (Clearnet Instance)
|
||||||
|
|
||||||
|
If not an I2P-only instance, add the nginx config below to your existing config at `/etc/nginx/sites-enabled/pleroma.nginx`.
|
||||||
|
|
||||||
|
And for both cases, disable CSP in Pleroma's config (STS is disabled by default) so you can define those yourself seperately from the clearnet (if your instance is also on the clearnet).
|
||||||
|
Copy the following into the `config/prod.secret.exs` in your Pleroma folder (/home/pleroma/pleroma/):
|
||||||
|
```
|
||||||
|
config :pleroma, :http_security,
|
||||||
|
enabled: false
|
||||||
|
```
|
||||||
|
|
||||||
|
Use this as the Nginx config:
|
||||||
|
```
|
||||||
|
proxy_cache_path /tmp/pleroma-media-cache levels=1:2 keys_zone=pleroma_media_cache:10m max_size=10g inactive=720m use_temp_path=off;
|
||||||
|
# The above already exists in a clearnet instance's config.
|
||||||
|
# If not, add it.
|
||||||
|
|
||||||
|
server {
|
||||||
|
listen 127.0.0.1:14447;
|
||||||
|
server_name youri2paddress;
|
||||||
|
|
||||||
|
# Comment to enable logs
|
||||||
|
access_log /dev/null;
|
||||||
|
error_log /dev/null;
|
||||||
|
|
||||||
|
gzip_vary on;
|
||||||
|
gzip_proxied any;
|
||||||
|
gzip_comp_level 6;
|
||||||
|
gzip_buffers 16 8k;
|
||||||
|
gzip_http_version 1.1;
|
||||||
|
gzip_types text/plain text/css application/json application/javascript text/xml application/xml application/xml+rss text/javascript application/activity+json application/atom+xml;
|
||||||
|
|
||||||
|
client_max_body_size 16m;
|
||||||
|
|
||||||
|
location / {
|
||||||
|
|
||||||
|
add_header X-XSS-Protection "1; mode=block";
|
||||||
|
add_header X-Permitted-Cross-Domain-Policies none;
|
||||||
|
add_header X-Frame-Options DENY;
|
||||||
|
add_header X-Content-Type-Options nosniff;
|
||||||
|
add_header Referrer-Policy same-origin;
|
||||||
|
add_header X-Download-Options noopen;
|
||||||
|
|
||||||
|
proxy_http_version 1.1;
|
||||||
|
proxy_set_header Upgrade $http_upgrade;
|
||||||
|
proxy_set_header Connection "upgrade";
|
||||||
|
proxy_set_header Host $http_host;
|
||||||
|
|
||||||
|
proxy_pass http://localhost:4000;
|
||||||
|
|
||||||
|
client_max_body_size 16m;
|
||||||
|
}
|
||||||
|
|
||||||
|
location /proxy {
|
||||||
|
proxy_cache pleroma_media_cache;
|
||||||
|
proxy_cache_lock on;
|
||||||
|
proxy_ignore_client_abort on;
|
||||||
|
proxy_pass http://localhost:4000;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
reload Nginx:
|
||||||
|
```
|
||||||
|
systemctl stop i2pd.service --no-block
|
||||||
|
systemctl start i2pd.service
|
||||||
|
```
|
||||||
|
*Notice:* The stop command initiates a graceful shutdown process, i2pd stops after finishing to route transit tunnels (maximum 10 minutes).
|
||||||
|
|
||||||
|
You should now be able to both access your instance using I2P and federate with other I2P instances!
|
||||||
|
|
||||||
|
[^1]: [I2PD tools](https://github.com/purplei2p/i2pd-tools) to print information about a router info file or an I2P private key, generate an I2P private key, and generate vanity addresses.
|
||||||
|
|
||||||
|
### Possible Issues
|
||||||
|
|
||||||
|
Will be added when encountered.
|
119
docs/config/mrf.md
Normal file
119
docs/config/mrf.md
Normal file
|
@ -0,0 +1,119 @@
|
||||||
|
# Message Rewrite Facility
|
||||||
|
The Message Rewrite Facility (MRF) is a subsystem that is implemented as a series of hooks that allows the administrator to rewrite or discard messages.
|
||||||
|
|
||||||
|
Possible uses include:
|
||||||
|
|
||||||
|
* marking incoming messages with media from a given account or instance as sensitive
|
||||||
|
* rejecting messages from a specific instance
|
||||||
|
* removing/unlisting messages from the public timelines
|
||||||
|
* removing media from messages
|
||||||
|
* sending only public messages to a specific instance
|
||||||
|
|
||||||
|
The MRF provides user-configurable policies. The default policy is `NoOpPolicy`, which disables the MRF functionality. Pleroma also includes an easy to use policy called `SimplePolicy` which maps messages matching certain pre-defined criterion to actions built into the policy module.
|
||||||
|
It is possible to use multiple, active MRF policies at the same time.
|
||||||
|
|
||||||
|
## Quarantine Instances
|
||||||
|
|
||||||
|
You have the ability to prevent from private / followers-only messages from federating with specific instances. Which means they will only get the public or unlisted messages from your instance.
|
||||||
|
|
||||||
|
If, for example, you're using `MIX_ENV=prod` aka using production mode, you would open your configuration file located in `config/prod.secret.exs` and edit or add the option under your `:instance` config object. Then you would specify the instance within quotes.
|
||||||
|
```
|
||||||
|
config :pleroma, :instance,
|
||||||
|
[...]
|
||||||
|
quarantined_instances: ["instance.example", "other.example"]
|
||||||
|
```
|
||||||
|
|
||||||
|
## Using `SimplePolicy`
|
||||||
|
|
||||||
|
`SimplePolicy` is capable of handling most common admin tasks.
|
||||||
|
|
||||||
|
To use `SimplePolicy`, you must enable it. Do so by adding the following to your `:instance` config object, so that it looks like this:
|
||||||
|
|
||||||
|
```
|
||||||
|
config :pleroma, :instance,
|
||||||
|
[...]
|
||||||
|
rewrite_policy: Pleroma.Web.ActivityPub.MRF.SimplePolicy
|
||||||
|
```
|
||||||
|
|
||||||
|
Once `SimplePolicy` is enabled, you can configure various groups in the `:mrf_simple` config object. These groups are:
|
||||||
|
|
||||||
|
* `media_removal`: Servers in this group will have media stripped from incoming messages.
|
||||||
|
* `media_nsfw`: Servers in this group will have the #nsfw tag and sensitive setting injected into incoming messages which contain media.
|
||||||
|
* `reject`: Servers in this group will have their messages rejected.
|
||||||
|
* `federated_timeline_removal`: Servers in this group will have their messages unlisted from the public timelines by flipping the `to` and `cc` fields.
|
||||||
|
|
||||||
|
Servers should be configured as lists.
|
||||||
|
|
||||||
|
### Example
|
||||||
|
|
||||||
|
This example will enable `SimplePolicy`, block media from `illegalporn.biz`, mark media as NSFW from `porn.biz` and `porn.business`, reject messages from `spam.com` and remove messages from `spam.university` from the federated timeline:
|
||||||
|
|
||||||
|
```
|
||||||
|
config :pleroma, :instance,
|
||||||
|
rewrite_policy: [Pleroma.Web.ActivityPub.MRF.SimplePolicy]
|
||||||
|
|
||||||
|
config :pleroma, :mrf_simple,
|
||||||
|
media_removal: ["illegalporn.biz"],
|
||||||
|
media_nsfw: ["porn.biz", "porn.business"],
|
||||||
|
reject: ["spam.com"],
|
||||||
|
federated_timeline_removal: ["spam.university"]
|
||||||
|
|
||||||
|
```
|
||||||
|
|
||||||
|
### Use with Care
|
||||||
|
|
||||||
|
The effects of MRF policies can be very drastic. It is important to use this functionality carefully. Always try to talk to an admin before writing an MRF policy concerning their instance.
|
||||||
|
|
||||||
|
## Writing your own MRF Policy
|
||||||
|
|
||||||
|
As discussed above, the MRF system is a modular system that supports pluggable policies. This means that an admin may write a custom MRF policy in Elixir or any other language that runs on the Erlang VM, by specifying the module name in the `rewrite_policy` config setting.
|
||||||
|
|
||||||
|
For example, here is a sample policy module which rewrites all messages to "new message content":
|
||||||
|
|
||||||
|
```elixir
|
||||||
|
# This is a sample MRF policy which rewrites all Notes to have "new message
|
||||||
|
# content."
|
||||||
|
defmodule Site.RewritePolicy do
|
||||||
|
@behavior Pleroma.Web.ActivityPub.MRF
|
||||||
|
|
||||||
|
# Catch messages which contain Note objects with actual data to filter.
|
||||||
|
# Capture the object as `object`, the message content as `content` and the
|
||||||
|
# message itself as `message`.
|
||||||
|
@impl true
|
||||||
|
def filter(%{"type" => Create", "object" => {"type" => "Note", "content" => content} = object} = message)
|
||||||
|
when is_binary(content) do
|
||||||
|
# Subject / CW is stored as summary instead of `name` like other AS2 objects
|
||||||
|
# because of Mastodon doing it that way.
|
||||||
|
summary = object["summary"]
|
||||||
|
|
||||||
|
# Message edits go here.
|
||||||
|
content = "new message content"
|
||||||
|
|
||||||
|
# Assemble the mutated object.
|
||||||
|
object =
|
||||||
|
object
|
||||||
|
|> Map.put("content", content)
|
||||||
|
|> Map.put("summary", summary)
|
||||||
|
|
||||||
|
# Assemble the mutated message.
|
||||||
|
message = Map.put(message, "object", object)
|
||||||
|
{:ok, message}
|
||||||
|
end
|
||||||
|
|
||||||
|
# Let all other messages through without modifying them.
|
||||||
|
@impl true
|
||||||
|
def filter(message), do: {:ok, message}
|
||||||
|
end
|
||||||
|
```
|
||||||
|
|
||||||
|
If you save this file as `lib/site/mrf/rewrite_policy.ex`, it will be included when you next rebuild Pleroma. You can enable it in the configuration like so:
|
||||||
|
|
||||||
|
```
|
||||||
|
config :pleroma, :instance,
|
||||||
|
rewrite_policy: [
|
||||||
|
Pleroma.Web.ActivityPub.MRF.SimplePolicy,
|
||||||
|
Site.RewritePolicy
|
||||||
|
]
|
||||||
|
```
|
||||||
|
|
||||||
|
Please note that the Pleroma developers consider custom MRF policy modules to fall under the purview of the AGPL. As such, you are obligated to release the sources to your custom MRF policy modules upon request.
|
159
docs/config/onion_federation.md
Normal file
159
docs/config/onion_federation.md
Normal file
|
@ -0,0 +1,159 @@
|
||||||
|
# Easy Onion Federation (Tor)
|
||||||
|
Tor can free people from the necessity of a domain, in addition to helping protect their privacy. As Pleroma's goal is to empower the people and let as many as possible host an instance with as little resources as possible, the ability to host an instance with a small, cheap computer like a RaspberryPi along with Tor, would be a great way to achieve that.
|
||||||
|
In addition, federating with such instances will also help furthering that goal.
|
||||||
|
|
||||||
|
This is a guide to show you how it can be easily done.
|
||||||
|
|
||||||
|
This guide assumes you already got Pleroma working, and that it's running on the default port 4000.
|
||||||
|
Currently only has an Nginx example.
|
||||||
|
|
||||||
|
To install Tor on Debian / Ubuntu:
|
||||||
|
```
|
||||||
|
apt -yq install tor
|
||||||
|
```
|
||||||
|
If using an old server version (older than Debian Stretch or Ubuntu 18.04), install from backports or PPA.
|
||||||
|
I recommend using a newer server version instead.
|
||||||
|
|
||||||
|
To have the newest, V3 onion addresses (which I recommend) in Debian, install Tor from backports.
|
||||||
|
If you do not have backports, uncomment the stretch-backports links at the end of `/etc/apt/sources.list`.
|
||||||
|
Then install:
|
||||||
|
```
|
||||||
|
apt update
|
||||||
|
apt -t stretch-backports -yq install tor
|
||||||
|
```
|
||||||
|
**WARNING:** Onion instances not using a Tor version supporting V3 addresses will not be able to federate with you.
|
||||||
|
|
||||||
|
Create the hidden service for your Pleroma instance in `/etc/tor/torrc`:
|
||||||
|
```
|
||||||
|
HiddenServiceDir /var/lib/tor/pleroma_hidden_service/
|
||||||
|
HiddenServicePort 80 127.0.0.1:8099
|
||||||
|
HiddenServiceVersion 3 # Remove if Tor version is below 0.3 ( tor --version )
|
||||||
|
```
|
||||||
|
Restart Tor to generate an adress:
|
||||||
|
```
|
||||||
|
systemctl restart tor@default.service
|
||||||
|
```
|
||||||
|
Get the address:
|
||||||
|
```
|
||||||
|
cat /var/lib/tor/pleroma_hidden_service/hostname
|
||||||
|
```
|
||||||
|
|
||||||
|
# Federation
|
||||||
|
|
||||||
|
Next, edit your Pleroma config.
|
||||||
|
If running in prod, cd to your Pleroma directory, edit `config/prod.secret.exs`
|
||||||
|
and append this line:
|
||||||
|
```
|
||||||
|
config :pleroma, :http, proxy_url: {:socks5, :localhost, 9050}
|
||||||
|
```
|
||||||
|
In your Pleroma directory, assuming you're running prod,
|
||||||
|
run the following:
|
||||||
|
```
|
||||||
|
su pleroma
|
||||||
|
MIX_ENV=prod mix deps.get
|
||||||
|
MIX_ENV=prod mix ecto.migrate
|
||||||
|
exit
|
||||||
|
```
|
||||||
|
restart Pleroma (if using systemd):
|
||||||
|
```
|
||||||
|
systemctl restart pleroma
|
||||||
|
```
|
||||||
|
|
||||||
|
# Tor Instance Access
|
||||||
|
|
||||||
|
Make your instance accessible using Tor.
|
||||||
|
|
||||||
|
## Tor-only Instance
|
||||||
|
If creating a Tor-only instance, open `config/prod.secret.exs` and under "config :pleroma, Pleroma.Web.Endpoint," edit "https" and "port: 443" to the following:
|
||||||
|
```
|
||||||
|
url: [host: "onionaddress", scheme: "http", port: 80],
|
||||||
|
```
|
||||||
|
In addition to that, replace the existing nginx config's contents with the example below.
|
||||||
|
|
||||||
|
## Existing Instance (Clearnet Instance)
|
||||||
|
If not a Tor-only instance,
|
||||||
|
add the nginx config below to your existing config at `/etc/nginx/sites-enabled/pleroma.nginx`.
|
||||||
|
|
||||||
|
---
|
||||||
|
For both cases, disable CSP in Pleroma's config (STS is disabled by default) so you can define those yourself seperately from the clearnet (if your instance is also on the clearnet).
|
||||||
|
Copy the following into the `config/prod.secret.exs` in your Pleroma folder (/home/pleroma/pleroma/):
|
||||||
|
```
|
||||||
|
config :pleroma, :http_security,
|
||||||
|
enabled: false
|
||||||
|
```
|
||||||
|
|
||||||
|
Use this as the Nginx config:
|
||||||
|
```
|
||||||
|
proxy_cache_path /tmp/pleroma-media-cache levels=1:2 keys_zone=pleroma_media_cache:10m max_size=10g inactive=720m use_temp_path=off;
|
||||||
|
# The above already exists in a clearnet instance's config.
|
||||||
|
# If not, add it.
|
||||||
|
|
||||||
|
server {
|
||||||
|
listen 127.0.0.1:8099;
|
||||||
|
server_name youronionaddress;
|
||||||
|
|
||||||
|
# Comment to enable logs
|
||||||
|
access_log /dev/null;
|
||||||
|
error_log /dev/null;
|
||||||
|
|
||||||
|
gzip_vary on;
|
||||||
|
gzip_proxied any;
|
||||||
|
gzip_comp_level 6;
|
||||||
|
gzip_buffers 16 8k;
|
||||||
|
gzip_http_version 1.1;
|
||||||
|
gzip_types text/plain text/css application/json application/javascript text/xml application/xml application/xml+rss text/javascript application/activity+json application/atom+xml;
|
||||||
|
|
||||||
|
client_max_body_size 16m;
|
||||||
|
|
||||||
|
location / {
|
||||||
|
|
||||||
|
add_header X-XSS-Protection "1; mode=block";
|
||||||
|
add_header X-Permitted-Cross-Domain-Policies none;
|
||||||
|
add_header X-Frame-Options DENY;
|
||||||
|
add_header X-Content-Type-Options nosniff;
|
||||||
|
add_header Referrer-Policy same-origin;
|
||||||
|
add_header X-Download-Options noopen;
|
||||||
|
|
||||||
|
proxy_http_version 1.1;
|
||||||
|
proxy_set_header Upgrade $http_upgrade;
|
||||||
|
proxy_set_header Connection "upgrade";
|
||||||
|
proxy_set_header Host $http_host;
|
||||||
|
|
||||||
|
proxy_pass http://localhost:4000;
|
||||||
|
|
||||||
|
client_max_body_size 16m;
|
||||||
|
}
|
||||||
|
|
||||||
|
location /proxy {
|
||||||
|
proxy_cache pleroma_media_cache;
|
||||||
|
proxy_cache_lock on;
|
||||||
|
proxy_ignore_client_abort on;
|
||||||
|
proxy_pass http://localhost:4000;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
reload Nginx:
|
||||||
|
```
|
||||||
|
systemctl reload nginx
|
||||||
|
```
|
||||||
|
|
||||||
|
You should now be able to both access your instance using Tor and federate with other Tor instances!
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
### Possible Issues
|
||||||
|
|
||||||
|
* In Debian, make sure your hidden service folder `/var/lib/tor/pleroma_hidden_service/` and its contents, has debian-tor as both owner and group by using
|
||||||
|
```
|
||||||
|
ls -la /var/lib/tor/
|
||||||
|
```
|
||||||
|
If it's not, run:
|
||||||
|
```
|
||||||
|
chown -R debian-tor:debian-tor /var/lib/tor/pleroma_hidden_service/
|
||||||
|
```
|
||||||
|
* Make sure *only* the owner has *only* read and write permissions.
|
||||||
|
If not, run:
|
||||||
|
```
|
||||||
|
chmod -R 600 /var/lib/tor/pleroma_hidden_service/
|
||||||
|
```
|
||||||
|
* If you have trouble logging in to the Mastodon Frontend when using Tor, use the Tor Browser Bundle.
|
35
docs/config/small_customizations.md
Normal file
35
docs/config/small_customizations.md
Normal file
|
@ -0,0 +1,35 @@
|
||||||
|
# Small customizations
|
||||||
|
Replace `dev.secret.exs` with `prod.secret.exs` according to your setup.
|
||||||
|
|
||||||
|
# Thumbnail
|
||||||
|
|
||||||
|
Replace `priv/static/instance/thumbnail.jpeg` with your selfie or other neat picture. It will appear in [Pleroma Instances](http://distsn.org/pleroma-instances.html).
|
||||||
|
|
||||||
|
# Instance-specific panel
|
||||||
|
|
||||||
|
![instance-specific panel demo](/uploads/296b19ec806b130e0b49b16bfe29ce8a/image.png)
|
||||||
|
|
||||||
|
To show the instance specific panel, set `show_instance_panel` to `true` in `config/dev.secret.exs`. You can modify its content by editing `priv/static/instance/panel.html`.
|
||||||
|
|
||||||
|
# Background
|
||||||
|
|
||||||
|
You can change the background of your Pleroma instance by uploading it to `priv/static/static`, and then changing `"background"` in `config/dev.secret.exs` accordingly.
|
||||||
|
|
||||||
|
# Logo
|
||||||
|
|
||||||
|
![logo modification demo](/uploads/c70b14de60fa74245e7f0dcfa695ebff/image.png)
|
||||||
|
|
||||||
|
If you want to give a brand to your instance, look no further. You can change the logo of your instance by uploading it to `priv/static/static`, and then changing `logo` in `config/dev.secret.exs` accordingly.
|
||||||
|
|
||||||
|
# Theme
|
||||||
|
|
||||||
|
All users of your instance will be able to change the theme they use by going to the settings (the cog in the top-right hand corner). However, if you wish to change the default theme, you can do so by editing `theme` in `config/dev.secret.exs` accordingly.
|
||||||
|
|
||||||
|
# Terms of Service
|
||||||
|
|
||||||
|
Terms of Service will be shown to all users on the registration page. It's the best place where to write down the rules for your instance. You can modify the rules by changing `priv/static/static/terms-of-service.html`.
|
||||||
|
|
||||||
|
# Message Visibility
|
||||||
|
|
||||||
|
To enable message visibility options when posting like in the Mastodon frontend, set
|
||||||
|
`scope_options_enabled` to `true` in `config/dev.secret.exs`.
|
20
docs/config/static_dir.md
Normal file
20
docs/config/static_dir.md
Normal file
|
@ -0,0 +1,20 @@
|
||||||
|
# Static Directory
|
||||||
|
|
||||||
|
Static frontend files are shipped in `priv/static/` and tracked by version control in this repository. If you want to overwrite or update these without the possibility of merge conflicts, you can write your custom versions to `instance/static/`.
|
||||||
|
|
||||||
|
```
|
||||||
|
config :pleroma, :instance,
|
||||||
|
static_dir: "instance/static/",
|
||||||
|
```
|
||||||
|
|
||||||
|
You can overwrite this value in your configuration to use a different static instance directory.
|
||||||
|
|
||||||
|
## robots.txt
|
||||||
|
|
||||||
|
By default, the `robots.txt` that ships in `priv/static/` is permissive. It allows well-behaved search engines to index all of your instance's URIs.
|
||||||
|
|
||||||
|
If you want to generate a restrictive `robots.txt`, you can run the following mix task. The generated `robots.txt` will be written in your instance static directory.
|
||||||
|
|
||||||
|
```
|
||||||
|
mix pleroma.robots_txt disallow_all
|
||||||
|
```
|
215
docs/installation/alpine_linux_en.md
Normal file
215
docs/installation/alpine_linux_en.md
Normal file
|
@ -0,0 +1,215 @@
|
||||||
|
# Installing on Alpine Linux
|
||||||
|
## Installation
|
||||||
|
|
||||||
|
This guide is a step-by-step installation guide for Alpine Linux. It also assumes that you have administrative rights, either as root or a user with [sudo permissions](https://www.linode.com/docs/tools-reference/custom-kernels-distros/install-alpine-linux-on-your-linode/#configuration). If you want to run this guide with root, ignore the `sudo` at the beginning of the lines, unless it calls a user like `sudo -Hu pleroma`; in this case, use `su -l <username> -s $SHELL -c 'command'` instead.
|
||||||
|
|
||||||
|
### Required packages
|
||||||
|
|
||||||
|
* `postgresql`
|
||||||
|
* `elixir`
|
||||||
|
* `erlang`
|
||||||
|
* `erlang-parsetools`
|
||||||
|
* `erlang-xmerl`
|
||||||
|
* `git`
|
||||||
|
* Development Tools
|
||||||
|
|
||||||
|
#### Optional packages used in this guide
|
||||||
|
|
||||||
|
* `nginx` (preferred, example configs for other reverse proxies can be found in the repo)
|
||||||
|
* `certbot` (or any other ACME client for Let’s Encrypt certificates)
|
||||||
|
|
||||||
|
### Prepare the system
|
||||||
|
|
||||||
|
* First make sure to have the community repository enabled:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
echo "https://nl.alpinelinux.org/alpine/latest-stable/community" | sudo tee -a /etc/apk/repository
|
||||||
|
```
|
||||||
|
|
||||||
|
* Then update the system, if not already done:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
sudo apk update
|
||||||
|
sudo apk upgrade
|
||||||
|
```
|
||||||
|
|
||||||
|
* Install some tools, which are needed later:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
sudo apk add git build-base
|
||||||
|
```
|
||||||
|
|
||||||
|
### Install Elixir and Erlang
|
||||||
|
|
||||||
|
* Install Erlang and Elixir:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
sudo apk add erlang erlang-runtime-tools erlang-xmerl elixir
|
||||||
|
```
|
||||||
|
|
||||||
|
* Install `erlang-eldap` if you want to enable ldap authenticator
|
||||||
|
|
||||||
|
```shell
|
||||||
|
sudo apk add erlang-eldap
|
||||||
|
```
|
||||||
|
### Install PostgreSQL
|
||||||
|
|
||||||
|
* Install Postgresql server:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
sudo apk add postgresql postgresql-contrib
|
||||||
|
```
|
||||||
|
|
||||||
|
* Initialize database:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
sudo /etc/init.d/postgresql start
|
||||||
|
```
|
||||||
|
|
||||||
|
* Enable and start postgresql server:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
sudo rc-update add postgresql
|
||||||
|
```
|
||||||
|
|
||||||
|
### Install PleromaBE
|
||||||
|
|
||||||
|
* Add a new system user for the Pleroma service:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
sudo adduser -S -s /bin/false -h /opt/pleroma -H pleroma
|
||||||
|
```
|
||||||
|
|
||||||
|
**Note**: To execute a single command as the Pleroma system user, use `sudo -Hu pleroma command`. You can also switch to a shell by using `sudo -Hu pleroma $SHELL`. If you don’t have and want `sudo` on your system, you can use `su` as root user (UID 0) for a single command by using `su -l pleroma -s $SHELL -c 'command'` and `su -l pleroma -s $SHELL` for starting a shell.
|
||||||
|
|
||||||
|
* Git clone the PleromaBE repository and make the Pleroma user the owner of the directory:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
sudo mkdir -p /opt/pleroma
|
||||||
|
sudo chown -R pleroma:pleroma /opt/pleroma
|
||||||
|
sudo -Hu pleroma git clone https://git.pleroma.social/pleroma/pleroma /opt/pleroma
|
||||||
|
```
|
||||||
|
|
||||||
|
* Change to the new directory:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
cd /opt/pleroma
|
||||||
|
```
|
||||||
|
|
||||||
|
* Install the dependencies for Pleroma and answer with `yes` if it asks you to install `Hex`:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
sudo -Hu pleroma mix deps.get
|
||||||
|
```
|
||||||
|
|
||||||
|
* Generate the configuration: `sudo -Hu pleroma mix pleroma.instance gen`
|
||||||
|
* Answer with `yes` if it asks you to install `rebar3`.
|
||||||
|
* This may take some time, because parts of pleroma get compiled first.
|
||||||
|
* After that it will ask you a few questions about your instance and generates a configuration file in `config/generated_config.exs`.
|
||||||
|
|
||||||
|
* Check the configuration and if all looks right, rename it, so Pleroma will load it (`prod.secret.exs` for productive instance, `dev.secret.exs` for development instances):
|
||||||
|
|
||||||
|
```shell
|
||||||
|
mv config/{generated_config.exs,prod.secret.exs}
|
||||||
|
```
|
||||||
|
|
||||||
|
* The previous command creates also the file `config/setup_db.psql`, with which you can create the database:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
sudo -Hu postgres psql -f config/setup_db.psql
|
||||||
|
```
|
||||||
|
|
||||||
|
* Now run the database migration:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
sudo -Hu pleroma MIX_ENV=prod mix ecto.migrate
|
||||||
|
```
|
||||||
|
|
||||||
|
* Now you can start Pleroma already
|
||||||
|
|
||||||
|
```shell
|
||||||
|
sudo -Hu pleroma MIX_ENV=prod mix phx.server
|
||||||
|
```
|
||||||
|
|
||||||
|
### Finalize installation
|
||||||
|
|
||||||
|
If you want to open your newly installed instance to the world, you should run nginx or some other webserver/proxy in front of Pleroma and you should consider to create an OpenRC service file for Pleroma.
|
||||||
|
|
||||||
|
#### Nginx
|
||||||
|
|
||||||
|
* Install nginx, if not already done:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
sudo apk add nginx
|
||||||
|
```
|
||||||
|
|
||||||
|
* Setup your SSL cert, using your method of choice or certbot. If using certbot, first install it:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
sudo apk add certbot
|
||||||
|
```
|
||||||
|
|
||||||
|
and then set it up:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
sudo mkdir -p /var/lib/letsencrypt/
|
||||||
|
sudo certbot certonly --email <your@emailaddress> -d <yourdomain> --standalone
|
||||||
|
```
|
||||||
|
|
||||||
|
If that doesn’t work, make sure, that nginx is not already running. If it still doesn’t work, try setting up nginx first (change ssl “on” to “off” and try again).
|
||||||
|
|
||||||
|
* Copy the example nginx configuration to the nginx folder
|
||||||
|
|
||||||
|
```shell
|
||||||
|
sudo cp /opt/pleroma/installation/pleroma.nginx /etc/nginx/conf.d/pleroma.conf
|
||||||
|
```
|
||||||
|
|
||||||
|
* Before starting nginx edit the configuration and change it to your needs (e.g. change servername, change cert paths)
|
||||||
|
* Enable and start nginx:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
sudo rc-update add nginx
|
||||||
|
sudo service nginx start
|
||||||
|
```
|
||||||
|
|
||||||
|
If you need to renew the certificate in the future, uncomment the relevant location block in the nginx config and run:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
sudo certbot certonly --email <your@emailaddress> -d <yourdomain> --webroot -w /var/lib/letsencrypt/
|
||||||
|
```
|
||||||
|
|
||||||
|
#### OpenRC service
|
||||||
|
|
||||||
|
* Copy example service file:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
sudo cp /opt/pleroma/installation/init.d/pleroma /etc/init.d/pleroma
|
||||||
|
```
|
||||||
|
|
||||||
|
* Make sure to start it during the boot
|
||||||
|
|
||||||
|
```shell
|
||||||
|
sudo rc-update add pleroma
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Create your first user
|
||||||
|
|
||||||
|
If your instance is up and running, you can create your first user with administrative rights with the following task:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
sudo -Hu pleroma MIX_ENV=prod mix pleroma.user new <username> <your@emailaddress> --admin
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Further reading
|
||||||
|
|
||||||
|
* [Admin tasks](Admin tasks)
|
||||||
|
* [Backup your instance](Backup-your-instance)
|
||||||
|
* [Configuration tips](General tips for customizing pleroma fe)
|
||||||
|
* [Hardening your instance](Hardening-your-instance)
|
||||||
|
* [How to activate mediaproxy](How-to-activate-mediaproxy)
|
||||||
|
* [Small Pleroma-FE customizations](Small customizations)
|
||||||
|
* [Updating your instance](Updating-your-instance)
|
||||||
|
|
||||||
|
## Questions
|
||||||
|
|
||||||
|
Questions about the installation or didn’t it work as it should be, ask in [#pleroma:matrix.org](https://matrix.heldscal.la/#/room/#freenode_#pleroma:matrix.org) or IRC Channel **#pleroma** on **Freenode**.
|
213
docs/installation/arch_linux_en.md
Normal file
213
docs/installation/arch_linux_en.md
Normal file
|
@ -0,0 +1,213 @@
|
||||||
|
# Installing on Arch Linux
|
||||||
|
## Installation
|
||||||
|
|
||||||
|
This guide will assume that you have administrative rights, either as root or a user with [sudo permissions](https://wiki.archlinux.org/index.php/Sudo). If you want to run this guide with root, ignore the `sudo` at the beginning of the lines, unless it calls a user like `sudo -Hu pleroma`; in this case, use `su <username> -s $SHELL -c 'command'` instead.
|
||||||
|
|
||||||
|
### Required packages
|
||||||
|
|
||||||
|
* `postgresql`
|
||||||
|
* `elixir`
|
||||||
|
* `git`
|
||||||
|
* `base-devel`
|
||||||
|
|
||||||
|
#### Optional packages used in this guide
|
||||||
|
|
||||||
|
* `nginx` (preferred, example configs for other reverse proxies can be found in the repo)
|
||||||
|
* `certbot` (or any other ACME client for Let’s Encrypt certificates)
|
||||||
|
|
||||||
|
### Prepare the system
|
||||||
|
|
||||||
|
* First update the system, if not already done:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
sudo pacman -Syu
|
||||||
|
```
|
||||||
|
|
||||||
|
* Install some of the above mentioned programs:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
sudo pacman -S git base-devel elixir
|
||||||
|
```
|
||||||
|
|
||||||
|
### Install PostgreSQL
|
||||||
|
|
||||||
|
[Arch Wiki article](https://wiki.archlinux.org/index.php/PostgreSQL)
|
||||||
|
|
||||||
|
* Install the `postgresql` package:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
sudo pacman -S postgresql
|
||||||
|
```
|
||||||
|
|
||||||
|
* Initialize the database cluster:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
sudo -iu postgres initdb -D /var/lib/postgres/data
|
||||||
|
```
|
||||||
|
|
||||||
|
* Start and enable the `postgresql.service`
|
||||||
|
|
||||||
|
```shell
|
||||||
|
sudo systemctl enable --now postgresql.service
|
||||||
|
```
|
||||||
|
|
||||||
|
### Install PleromaBE
|
||||||
|
|
||||||
|
* Add a new system user for the Pleroma service:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
sudo useradd -r -s /bin/false -m -d /var/lib/pleroma -U pleroma
|
||||||
|
```
|
||||||
|
|
||||||
|
**Note**: To execute a single command as the Pleroma system user, use `sudo -Hu pleroma command`. You can also switch to a shell by using `sudo -Hu pleroma $SHELL`. If you don’t have and want `sudo` on your system, you can use `su` as root user (UID 0) for a single command by using `su -l pleroma -s $SHELL -c 'command'` and `su -l pleroma -s $SHELL` for starting a shell.
|
||||||
|
|
||||||
|
* Git clone the PleromaBE repository and make the Pleroma user the owner of the directory:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
sudo mkdir -p /opt/pleroma
|
||||||
|
sudo chown -R pleroma:pleroma /opt/pleroma
|
||||||
|
sudo -Hu pleroma git clone https://git.pleroma.social/pleroma/pleroma /opt/pleroma
|
||||||
|
```
|
||||||
|
|
||||||
|
* Change to the new directory:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
cd /opt/pleroma
|
||||||
|
```
|
||||||
|
|
||||||
|
* Install the dependencies for Pleroma and answer with `yes` if it asks you to install `Hex`:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
sudo -Hu pleroma mix deps.get
|
||||||
|
```
|
||||||
|
|
||||||
|
* Generate the configuration: `sudo -Hu pleroma mix pleroma.instance gen`
|
||||||
|
* Answer with `yes` if it asks you to install `rebar3`.
|
||||||
|
* This may take some time, because parts of pleroma get compiled first.
|
||||||
|
* After that it will ask you a few questions about your instance and generates a configuration file in `config/generated_config.exs`.
|
||||||
|
|
||||||
|
* Check the configuration and if all looks right, rename it, so Pleroma will load it (`prod.secret.exs` for productive instance, `dev.secret.exs` for development instances):
|
||||||
|
|
||||||
|
```shell
|
||||||
|
mv config/{generated_config.exs,prod.secret.exs}
|
||||||
|
```
|
||||||
|
|
||||||
|
* The previous command creates also the file `config/setup_db.psql`, with which you can create the database:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
sudo -Hu postgres psql -f config/setup_db.psql
|
||||||
|
```
|
||||||
|
|
||||||
|
* Now run the database migration:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
sudo -Hu pleroma MIX_ENV=prod mix ecto.migrate
|
||||||
|
```
|
||||||
|
|
||||||
|
* Now you can start Pleroma already
|
||||||
|
|
||||||
|
```shell
|
||||||
|
sudo -Hu pleroma MIX_ENV=prod mix phx.server
|
||||||
|
```
|
||||||
|
|
||||||
|
### Finalize installation
|
||||||
|
|
||||||
|
If you want to open your newly installed instance to the world, you should run nginx or some other webserver/proxy in front of Pleroma and you should consider to create a systemd service file for Pleroma.
|
||||||
|
|
||||||
|
#### Nginx
|
||||||
|
|
||||||
|
* Install nginx, if not already done:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
sudo pacman -S nginx
|
||||||
|
```
|
||||||
|
|
||||||
|
* Create directories for available and enabled sites:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
sudo mkdir -p /etc/nginx/sites-{available,enabled}
|
||||||
|
```
|
||||||
|
|
||||||
|
* Append the following line at the end of the `http` block in `/etc/nginx/nginx.conf`:
|
||||||
|
|
||||||
|
```Nginx
|
||||||
|
include sites-enabled/*;
|
||||||
|
```
|
||||||
|
|
||||||
|
* Setup your SSL cert, using your method of choice or certbot. If using certbot, first install it:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
sudo pacman -S certbot certbot-nginx
|
||||||
|
```
|
||||||
|
|
||||||
|
and then set it up:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
sudo mkdir -p /var/lib/letsencrypt/
|
||||||
|
sudo certbot certonly --email <your@emailaddress> -d <yourdomain> --standalone
|
||||||
|
```
|
||||||
|
|
||||||
|
If that doesn’t work, make sure, that nginx is not already running. If it still doesn’t work, try setting up nginx first (change ssl “on” to “off” and try again).
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
* Copy the example nginx configuration and activate it:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
sudo cp /opt/pleroma/installation/pleroma.nginx /etc/nginx/sites-available/pleroma.nginx
|
||||||
|
sudo ln -s /etc/nginx/sites-available/pleroma.nginx /etc/nginx/sites-enabled/pleroma.nginx
|
||||||
|
```
|
||||||
|
|
||||||
|
* Before starting nginx edit the configuration and change it to your needs (e.g. change servername, change cert paths)
|
||||||
|
* Enable and start nginx:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
sudo systemctl enable --now nginx.service
|
||||||
|
```
|
||||||
|
|
||||||
|
If you need to renew the certificate in the future, uncomment the relevant location block in the nginx config and run:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
sudo certbot certonly --email <your@emailaddress> -d <yourdomain> --webroot -w /var/lib/letsencrypt/
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Other webserver/proxies
|
||||||
|
|
||||||
|
You can find example configurations for them in `/opt/pleroma/installation/`.
|
||||||
|
|
||||||
|
#### Systemd service
|
||||||
|
|
||||||
|
* Copy example service file
|
||||||
|
|
||||||
|
```shell
|
||||||
|
sudo cp /opt/pleroma/installation/pleroma.service /etc/systemd/system/pleroma.service
|
||||||
|
```
|
||||||
|
|
||||||
|
* Edit the service file and make sure that all paths fit your installation
|
||||||
|
* Enable and start `pleroma.service`:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
sudo systemctl enable --now pleroma.service
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Create your first user
|
||||||
|
|
||||||
|
If your instance is up and running, you can create your first user with administrative rights with the following task:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
sudo -Hu pleroma MIX_ENV=prod mix pleroma.user new <username> <your@emailaddress> --admin
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Further reading
|
||||||
|
|
||||||
|
* [Admin tasks](Admin tasks)
|
||||||
|
* [Backup your instance](Backup-your-instance)
|
||||||
|
* [Configuration tips](General tips for customizing pleroma fe)
|
||||||
|
* [Hardening your instance](Hardening-your-instance)
|
||||||
|
* [How to activate mediaproxy](How-to-activate-mediaproxy)
|
||||||
|
* [Small Pleroma-FE customizations](Small customizations)
|
||||||
|
* [Updating your instance](Updating-your-instance)
|
||||||
|
|
||||||
|
## Questions
|
||||||
|
|
||||||
|
Questions about the installation or didn’t it work as it should be, ask in [#pleroma:matrix.org](https://matrix.heldscal.la/#/room/#freenode_#pleroma:matrix.org) or IRC Channel **#pleroma** on **Freenode**.
|
277
docs/installation/centos7_en.md
Normal file
277
docs/installation/centos7_en.md
Normal file
|
@ -0,0 +1,277 @@
|
||||||
|
# Installing on CentOS 7
|
||||||
|
## Installation
|
||||||
|
|
||||||
|
This guide is a step-by-step installation guide for CentOS 7. It also assumes that you have administrative rights, either as root or a user with [sudo permissions](https://www.digitalocean.com/community/tutorials/how-to-create-a-sudo-user-on-centos-quickstart). If you want to run this guide with root, ignore the `sudo` at the beginning of the lines, unless it calls a user like `sudo -Hu pleroma`; in this case, use `su <username> -s $SHELL -c 'command'` instead.
|
||||||
|
|
||||||
|
### Required packages
|
||||||
|
|
||||||
|
* `postgresql` (9,6+, CentOS 7 comes with 9.2, we will install version 11 in this guide)
|
||||||
|
* `elixir` (1.5+)
|
||||||
|
* `erlang`
|
||||||
|
* `erlang-parsetools`
|
||||||
|
* `erlang-xmerl`
|
||||||
|
* `git`
|
||||||
|
* Development Tools
|
||||||
|
|
||||||
|
#### Optional packages used in this guide
|
||||||
|
|
||||||
|
* `nginx` (preferred, example configs for other reverse proxies can be found in the repo)
|
||||||
|
* `certbot` (or any other ACME client for Let’s Encrypt certificates)
|
||||||
|
|
||||||
|
### Prepare the system
|
||||||
|
|
||||||
|
* First update the system, if not already done:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
sudo yum update
|
||||||
|
```
|
||||||
|
|
||||||
|
* Install some of the above mentioned programs:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
sudo yum install wget git unzip
|
||||||
|
```
|
||||||
|
|
||||||
|
* Install development tools:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
sudo yum group install "Development Tools"
|
||||||
|
```
|
||||||
|
|
||||||
|
### Install Elixir and Erlang
|
||||||
|
|
||||||
|
* Add the EPEL repo:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
sudo yum install epel-release
|
||||||
|
sudo yum -y update
|
||||||
|
```
|
||||||
|
|
||||||
|
* Install Erlang repository:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
wget -P /tmp/ https://packages.erlang-solutions.com/erlang-solutions-1.0-1.noarch.rpm
|
||||||
|
sudo rpm -Uvh erlang-solutions-1.0-1.noarch.rpm
|
||||||
|
```
|
||||||
|
|
||||||
|
* Install Erlang:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
sudo yum install erlang erlang-parsetools erlang-xmerl
|
||||||
|
```
|
||||||
|
|
||||||
|
* Download [latest Elixir release from Github](https://github.com/elixir-lang/elixir/releases/tag/v1.8.1) (Example for the newest version at the time when this manual was written)
|
||||||
|
|
||||||
|
```shell
|
||||||
|
wget -P /tmp/ https://github.com/elixir-lang/elixir/releases/download/v1.8.1/Precompiled.zip
|
||||||
|
```
|
||||||
|
|
||||||
|
* Create folder where you want to install Elixir, we’ll use:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
sudo mkdir -p /opt/elixir
|
||||||
|
```
|
||||||
|
|
||||||
|
* Unzip downloaded file there:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
sudo unzip /tmp/Precompiled.zip -d /opt/elixir
|
||||||
|
```
|
||||||
|
|
||||||
|
* Create symlinks for the pre-compiled binaries:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
for e in elixir elixirc iex mix; do sudo ln -s /opt/elixir/bin/${e} /usr/local/bin/${e}; done
|
||||||
|
```
|
||||||
|
|
||||||
|
### Install PostgreSQL
|
||||||
|
|
||||||
|
* Add the Postgresql repository:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
sudo yum install https://download.postgresql.org/pub/repos/yum/11/redhat/rhel-7-x86_64/pgdg-centos11-11-2.noarch.rpm
|
||||||
|
```
|
||||||
|
|
||||||
|
* Install the Postgresql server:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
sudo yum install postgresql11-server postgresql11-contrib
|
||||||
|
```
|
||||||
|
|
||||||
|
* Initialize database:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
sudo /usr/pgsql-11/bin/postgresql-11-setup initdb
|
||||||
|
```
|
||||||
|
|
||||||
|
* Open configuration file `/var/lib/pgsql/11/data/pg_hba.conf` and change the following lines from:
|
||||||
|
|
||||||
|
```plain
|
||||||
|
# IPv4 local connections:
|
||||||
|
host all all 127.0.0.1/32 ident
|
||||||
|
# IPv6 local connections:
|
||||||
|
host all all ::1/128 ident
|
||||||
|
```
|
||||||
|
|
||||||
|
to
|
||||||
|
|
||||||
|
```plain
|
||||||
|
# IPv4 local connections:
|
||||||
|
host all all 127.0.0.1/32 md5
|
||||||
|
# IPv6 local connections:
|
||||||
|
host all all ::1/128 md5
|
||||||
|
```
|
||||||
|
|
||||||
|
* Enable and start postgresql server:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
sudo systemctl enable --now postgresql-11.service
|
||||||
|
```
|
||||||
|
|
||||||
|
### Install PleromaBE
|
||||||
|
|
||||||
|
* Add a new system user for the Pleroma service:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
sudo useradd -r -s /bin/false -m -d /var/lib/pleroma -U pleroma
|
||||||
|
```
|
||||||
|
|
||||||
|
**Note**: To execute a single command as the Pleroma system user, use `sudo -Hu pleroma command`. You can also switch to a shell by using `sudo -Hu pleroma $SHELL`. If you don’t have and want `sudo` on your system, you can use `su` as root user (UID 0) for a single command by using `su -l pleroma -s $SHELL -c 'command'` and `su -l pleroma -s $SHELL` for starting a shell.
|
||||||
|
|
||||||
|
* Git clone the PleromaBE repository and make the Pleroma user the owner of the directory:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
sudo mkdir -p /opt/pleroma
|
||||||
|
sudo chown -R pleroma:pleroma /opt/pleroma
|
||||||
|
sudo -Hu pleroma git clone https://git.pleroma.social/pleroma/pleroma /opt/pleroma
|
||||||
|
```
|
||||||
|
|
||||||
|
* Change to the new directory:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
cd /opt/pleroma
|
||||||
|
```
|
||||||
|
|
||||||
|
* Install the dependencies for Pleroma and answer with `yes` if it asks you to install `Hex`:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
sudo -Hu pleroma mix deps.get
|
||||||
|
```
|
||||||
|
|
||||||
|
* Generate the configuration: `sudo -Hu pleroma mix pleroma.instance gen`
|
||||||
|
* Answer with `yes` if it asks you to install `rebar3`.
|
||||||
|
* This may take some time, because parts of pleroma get compiled first.
|
||||||
|
* After that it will ask you a few questions about your instance and generates a configuration file in `config/generated_config.exs`.
|
||||||
|
|
||||||
|
* Check the configuration and if all looks right, rename it, so Pleroma will load it (`prod.secret.exs` for productive instance, `dev.secret.exs` for development instances):
|
||||||
|
|
||||||
|
```shell
|
||||||
|
mv config/{generated_config.exs,prod.secret.exs}
|
||||||
|
```
|
||||||
|
|
||||||
|
* The previous command creates also the file `config/setup_db.psql`, with which you can create the database:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
sudo -Hu postgres psql -f config/setup_db.psql
|
||||||
|
```
|
||||||
|
|
||||||
|
* Now run the database migration:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
sudo -Hu pleroma MIX_ENV=prod mix ecto.migrate
|
||||||
|
```
|
||||||
|
|
||||||
|
* Now you can start Pleroma already
|
||||||
|
|
||||||
|
```shell
|
||||||
|
sudo -Hu pleroma MIX_ENV=prod mix phx.server
|
||||||
|
```
|
||||||
|
|
||||||
|
### Finalize installation
|
||||||
|
|
||||||
|
If you want to open your newly installed instance to the world, you should run nginx or some other webserver/proxy in front of Pleroma and you should consider to create a systemd service file for Pleroma.
|
||||||
|
|
||||||
|
#### Nginx
|
||||||
|
|
||||||
|
* Install nginx, if not already done:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
sudo yum install nginx
|
||||||
|
```
|
||||||
|
|
||||||
|
* Setup your SSL cert, using your method of choice or certbot. If using certbot, first install it:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
sudo yum install certbot-nginx
|
||||||
|
```
|
||||||
|
|
||||||
|
and then set it up:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
sudo mkdir -p /var/lib/letsencrypt/
|
||||||
|
sudo certbot certonly --email <your@emailaddress> -d <yourdomain> --standalone
|
||||||
|
```
|
||||||
|
|
||||||
|
If that doesn’t work, make sure, that nginx is not already running. If it still doesn’t work, try setting up nginx first (change ssl “on” to “off” and try again).
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
* Copy the example nginx configuration to the nginx folder
|
||||||
|
|
||||||
|
```shell
|
||||||
|
sudo cp /opt/pleroma/installation/pleroma.nginx /etc/nginx/conf.d/pleroma.conf
|
||||||
|
```
|
||||||
|
|
||||||
|
* Before starting nginx edit the configuration and change it to your needs (e.g. change servername, change cert paths)
|
||||||
|
* Enable and start nginx:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
sudo systemctl enable --now nginx
|
||||||
|
```
|
||||||
|
|
||||||
|
If you need to renew the certificate in the future, uncomment the relevant location block in the nginx config and run:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
sudo certbot certonly --email <your@emailaddress> -d <yourdomain> --webroot -w /var/lib/letsencrypt/
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Other webserver/proxies
|
||||||
|
|
||||||
|
You can find example configurations for them in `/opt/pleroma/installation/`.
|
||||||
|
|
||||||
|
#### Systemd service
|
||||||
|
|
||||||
|
* Copy example service file
|
||||||
|
|
||||||
|
```shell
|
||||||
|
sudo cp /opt/pleroma/installation/pleroma.service /etc/systemd/system/pleroma.service
|
||||||
|
```
|
||||||
|
|
||||||
|
* Edit the service file and make sure that all paths fit your installation
|
||||||
|
* Enable and start `pleroma.service`:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
sudo systemctl enable --now pleroma.service
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Create your first user
|
||||||
|
|
||||||
|
If your instance is up and running, you can create your first user with administrative rights with the following task:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
sudo -Hu pleroma MIX_ENV=prod mix pleroma.user new <username> <your@emailaddress> --admin
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Further reading
|
||||||
|
|
||||||
|
* [Admin tasks](Admin tasks)
|
||||||
|
* [Backup your instance](Backup-your-instance)
|
||||||
|
* [Configuration tips](General tips for customizing pleroma fe)
|
||||||
|
* [Hardening your instance](Hardening-your-instance)
|
||||||
|
* [How to activate mediaproxy](How-to-activate-mediaproxy)
|
||||||
|
* [Small Pleroma-FE customizations](Small customizations)
|
||||||
|
* [Updating your instance](Updating-your-instance)
|
||||||
|
|
||||||
|
## Questions
|
||||||
|
|
||||||
|
Questions about the installation or didn’t it work as it should be, ask in [#pleroma:matrix.org](https://matrix.heldscal.la/#/room/#freenode_#pleroma:matrix.org) or IRC Channel **#pleroma** on **Freenode**.
|
202
docs/installation/debian_based_en.md
Normal file
202
docs/installation/debian_based_en.md
Normal file
|
@ -0,0 +1,202 @@
|
||||||
|
# Installing on Debian Based Distributions
|
||||||
|
## Installation
|
||||||
|
|
||||||
|
This guide will assume you are on Debian Stretch. This guide should also work with Ubuntu 16.04 and 18.04. It also assumes that you have administrative rights, either as root or a user with [sudo permissions](https://www.digitalocean.com/community/tutorials/how-to-add-delete-and-grant-sudo-privileges-to-users-on-a-debian-vps). If you want to run this guide with root, ignore the `sudo` at the beginning of the lines, unless it calls a user like `sudo -Hu pleroma`; in this case, use `su <username> -s $SHELL -c 'command'` instead.
|
||||||
|
|
||||||
|
### Required packages
|
||||||
|
|
||||||
|
* `postgresql` (9.6+, Ubuntu 16.04 comes with 9.5, you can get a newer version from [here](https://www.postgresql.org/download/linux/ubuntu/))
|
||||||
|
* `postgresql-contrib` (9.6+, same situtation as above)
|
||||||
|
* `elixir` (1.5+, [install from here, Debian and Ubuntu ship older versions](https://elixir-lang.org/install.html#unix-and-unix-like) or use [asdf](https://github.com/asdf-vm/asdf) as the pleroma user)
|
||||||
|
* `erlang-dev`
|
||||||
|
* `erlang-tools`
|
||||||
|
* `erlang-parsetools`
|
||||||
|
* `erlang-eldap`, if you want to enable ldap authenticator
|
||||||
|
* `erlang-xmerl`
|
||||||
|
* `git`
|
||||||
|
* `build-essential`
|
||||||
|
|
||||||
|
#### Optional packages used in this guide
|
||||||
|
|
||||||
|
* `nginx` (preferred, example configs for other reverse proxies can be found in the repo)
|
||||||
|
* `certbot` (or any other ACME client for Let’s Encrypt certificates)
|
||||||
|
|
||||||
|
### Prepare the system
|
||||||
|
|
||||||
|
* First update the system, if not already done:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
sudo apt update
|
||||||
|
sudo apt full-upgrade
|
||||||
|
```
|
||||||
|
|
||||||
|
* Install some of the above mentioned programs:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
sudo apt install git build-essential postgresql postgresql-contrib
|
||||||
|
```
|
||||||
|
|
||||||
|
### Install Elixir and Erlang
|
||||||
|
|
||||||
|
* Download and add the Erlang repository:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
wget -P /tmp/ https://packages.erlang-solutions.com/erlang-solutions_1.0_all.deb
|
||||||
|
sudo dpkg -i /tmp/erlang-solutions_1.0_all.deb
|
||||||
|
```
|
||||||
|
|
||||||
|
* Install Elixir and Erlang:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
sudo apt update
|
||||||
|
sudo apt install elixir erlang-dev erlang-parsetools erlang-xmerl erlang-tools
|
||||||
|
```
|
||||||
|
|
||||||
|
### Install PleromaBE
|
||||||
|
|
||||||
|
* Add a new system user for the Pleroma service:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
sudo useradd -r -s /bin/false -m -d /var/lib/pleroma -U pleroma
|
||||||
|
```
|
||||||
|
|
||||||
|
**Note**: To execute a single command as the Pleroma system user, use `sudo -Hu pleroma command`. You can also switch to a shell by using `sudo -Hu pleroma $SHELL`. If you don’t have and want `sudo` on your system, you can use `su` as root user (UID 0) for a single command by using `su -l pleroma -s $SHELL -c 'command'` and `su -l pleroma -s $SHELL` for starting a shell.
|
||||||
|
|
||||||
|
* Git clone the PleromaBE repository and make the Pleroma user the owner of the directory:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
sudo mkdir -p /opt/pleroma
|
||||||
|
sudo chown -R pleroma:pleroma /opt/pleroma
|
||||||
|
sudo -Hu pleroma git clone https://git.pleroma.social/pleroma/pleroma /opt/pleroma
|
||||||
|
```
|
||||||
|
|
||||||
|
* Change to the new directory:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
cd /opt/pleroma
|
||||||
|
```
|
||||||
|
|
||||||
|
* Install the dependencies for Pleroma and answer with `yes` if it asks you to install `Hex`:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
sudo -Hu pleroma mix deps.get
|
||||||
|
```
|
||||||
|
|
||||||
|
* Generate the configuration: `sudo -Hu pleroma mix pleroma.instance gen`
|
||||||
|
* Answer with `yes` if it asks you to install `rebar3`.
|
||||||
|
* This may take some time, because parts of pleroma get compiled first.
|
||||||
|
* After that it will ask you a few questions about your instance and generates a configuration file in `config/generated_config.exs`.
|
||||||
|
|
||||||
|
* Check the configuration and if all looks right, rename it, so Pleroma will load it (`prod.secret.exs` for productive instance, `dev.secret.exs` for development instances):
|
||||||
|
|
||||||
|
```shell
|
||||||
|
mv config/{generated_config.exs,prod.secret.exs}
|
||||||
|
```
|
||||||
|
|
||||||
|
* The previous command creates also the file `config/setup_db.psql`, with which you can create the database:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
sudo -Hu postgres psql -f config/setup_db.psql
|
||||||
|
```
|
||||||
|
|
||||||
|
* Now run the database migration:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
sudo -Hu pleroma MIX_ENV=prod mix ecto.migrate
|
||||||
|
```
|
||||||
|
|
||||||
|
* Now you can start Pleroma already
|
||||||
|
|
||||||
|
```shell
|
||||||
|
sudo -Hu pleroma MIX_ENV=prod mix phx.server
|
||||||
|
```
|
||||||
|
|
||||||
|
### Finalize installation
|
||||||
|
|
||||||
|
If you want to open your newly installed instance to the world, you should run nginx or some other webserver/proxy in front of Pleroma and you should consider to create a systemd service file for Pleroma.
|
||||||
|
|
||||||
|
#### Nginx
|
||||||
|
|
||||||
|
* Install nginx, if not already done:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
sudo apt install nginx
|
||||||
|
```
|
||||||
|
|
||||||
|
* Setup your SSL cert, using your method of choice or certbot. If using certbot, first install it:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
sudo apt install certbot
|
||||||
|
```
|
||||||
|
|
||||||
|
and then set it up:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
sudo mkdir -p /var/lib/letsencrypt/
|
||||||
|
sudo certbot certonly --email <your@emailaddress> -d <yourdomain> --standalone
|
||||||
|
```
|
||||||
|
|
||||||
|
If that doesn’t work, make sure, that nginx is not already running. If it still doesn’t work, try setting up nginx first (change ssl “on” to “off” and try again).
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
* Copy the example nginx configuration and activate it:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
sudo cp /opt/pleroma/installation/pleroma.nginx /etc/nginx/sites-available/pleroma.nginx
|
||||||
|
sudo ln -s /etc/nginx/sites-available/pleroma.nginx /etc/nginx/sites-enabled/pleroma.nginx
|
||||||
|
```
|
||||||
|
|
||||||
|
* Before starting nginx edit the configuration and change it to your needs (e.g. change servername, change cert paths)
|
||||||
|
* Enable and start nginx:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
sudo systemctl enable --now nginx.service
|
||||||
|
```
|
||||||
|
|
||||||
|
If you need to renew the certificate in the future, uncomment the relevant location block in the nginx config and run:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
sudo certbot certonly --email <your@emailaddress> -d <yourdomain> --webroot -w /var/lib/letsencrypt/
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Other webserver/proxies
|
||||||
|
|
||||||
|
You can find example configurations for them in `/opt/pleroma/installation/`.
|
||||||
|
|
||||||
|
#### Systemd service
|
||||||
|
|
||||||
|
* Copy example service file
|
||||||
|
|
||||||
|
```shell
|
||||||
|
sudo cp /opt/pleroma/installation/pleroma.service /etc/systemd/system/pleroma.service
|
||||||
|
```
|
||||||
|
|
||||||
|
* Edit the service file and make sure that all paths fit your installation
|
||||||
|
* Enable and start `pleroma.service`:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
sudo systemctl enable --now pleroma.service
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Create your first user
|
||||||
|
|
||||||
|
If your instance is up and running, you can create your first user with administrative rights with the following task:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
sudo -Hu pleroma MIX_ENV=prod mix pleroma.user new <username> <your@emailaddress> --admin
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Further reading
|
||||||
|
|
||||||
|
* [Admin tasks](Admin tasks)
|
||||||
|
* [Backup your instance](Backup-your-instance)
|
||||||
|
* [Configuration tips](General tips for customizing pleroma fe)
|
||||||
|
* [Hardening your instance](Hardening-your-instance)
|
||||||
|
* [How to activate mediaproxy](How-to-activate-mediaproxy)
|
||||||
|
* [Small Pleroma-FE customizations](Small customizations)
|
||||||
|
* [Updating your instance](Updating-your-instance)
|
||||||
|
|
||||||
|
## Questions
|
||||||
|
|
||||||
|
Questions about the installation or didn’t it work as it should be, ask in [#pleroma:matrix.org](https://matrix.heldscal.la/#/room/#freenode_#pleroma:matrix.org) or IRC Channel **#pleroma** on **Freenode**.
|
191
docs/installation/debian_based_jp.md
Normal file
191
docs/installation/debian_based_jp.md
Normal file
|
@ -0,0 +1,191 @@
|
||||||
|
# Pleromaの入れ方
|
||||||
|
## 日本語訳について
|
||||||
|
|
||||||
|
この記事は [Installing on Debian based distributions](Installing on Debian based distributions) の日本語訳です。何かがおかしいと思ったら、原文を見てください。
|
||||||
|
|
||||||
|
## インストール
|
||||||
|
|
||||||
|
このガイドはDebian Stretchを仮定しています。Ubuntu 16.04でも可能です。
|
||||||
|
|
||||||
|
### 必要なソフトウェア
|
||||||
|
|
||||||
|
- PostgreSQL 9.6+ (postgresql-contrib-9.6 または他のバージョンの PSQL をインストールしてください)
|
||||||
|
- Elixir 1.5 以上 ([Debianのリポジトリからインストールしないこと!!! ここからインストールすること!](https://elixir-lang.org/install.html#unix-and-unix-like))。または [asdf](https://github.com/asdf-vm/asdf) を pleroma ユーザーでインストール。
|
||||||
|
- erlang-dev
|
||||||
|
- erlang-tools
|
||||||
|
- erlang-parsetools
|
||||||
|
- erlang-xmerl (Jessieではバックポートからインストールすること!)
|
||||||
|
- git
|
||||||
|
- build-essential
|
||||||
|
- openssh
|
||||||
|
- openssl
|
||||||
|
- nginx prefered (Apacheも動くかもしれませんが、誰もテストしていません!)
|
||||||
|
- certbot (または何らかのACME Let's encryptクライアント)
|
||||||
|
|
||||||
|
### システムを準備する
|
||||||
|
|
||||||
|
* まずシステムをアップデートしてください。
|
||||||
|
```
|
||||||
|
apt update && apt dist-upgrade
|
||||||
|
```
|
||||||
|
|
||||||
|
* 複数のツールとpostgresqlをインストールします。あとで必要になるので。
|
||||||
|
```
|
||||||
|
apt install git build-essential openssl ssh sudo postgresql-9.6 postgresql-contrib-9.6
|
||||||
|
```
|
||||||
|
(postgresqlのバージョンは、あなたのディストロにあわせて変えてください。または、バージョン番号がいらないかもしれません。)
|
||||||
|
|
||||||
|
### ElixirとErlangをインストールします
|
||||||
|
|
||||||
|
* Erlangのリポジトリをダウンロードおよびインストールします。
|
||||||
|
```
|
||||||
|
wget -P /tmp/ https://packages.erlang-solutions.com/erlang-solutions_1.0_all.deb && sudo dpkg -i /tmp/erlang-solutions_1.0_all.deb
|
||||||
|
```
|
||||||
|
|
||||||
|
* ElixirとErlangをインストールします、
|
||||||
|
```
|
||||||
|
apt update && apt install elixir erlang-dev erlang-parsetools erlang-xmerl erlang-tools
|
||||||
|
```
|
||||||
|
|
||||||
|
### Pleroma BE (バックエンド) をインストールします
|
||||||
|
|
||||||
|
* 新しいユーザーを作ります。
|
||||||
|
```
|
||||||
|
adduser pleroma
|
||||||
|
```
|
||||||
|
(Give it any password you want, make it STRONG)
|
||||||
|
|
||||||
|
* 新しいユーザーをsudoグループに入れます。
|
||||||
|
```
|
||||||
|
usermod -aG sudo pleroma
|
||||||
|
```
|
||||||
|
|
||||||
|
* 新しいユーザーに変身し、ホームディレクトリに移動します。
|
||||||
|
```
|
||||||
|
su pleroma
|
||||||
|
cd ~
|
||||||
|
```
|
||||||
|
|
||||||
|
* Gitリポジトリをクローンします。
|
||||||
|
```
|
||||||
|
git clone https://git.pleroma.social/pleroma/pleroma
|
||||||
|
```
|
||||||
|
|
||||||
|
* 新しいディレクトリに移動します。
|
||||||
|
```
|
||||||
|
cd pleroma/
|
||||||
|
```
|
||||||
|
|
||||||
|
* Pleromaが依存するパッケージをインストールします。Hexをインストールしてもよいか聞かれたら、yesを入力してください。
|
||||||
|
```
|
||||||
|
mix deps.get
|
||||||
|
```
|
||||||
|
|
||||||
|
* コンフィギュレーションを生成します。
|
||||||
|
```
|
||||||
|
mix pleroma.instance gen
|
||||||
|
```
|
||||||
|
* rebar3をインストールしてもよいか聞かれたら、yesを入力してください。
|
||||||
|
* この処理には時間がかかります。私もよく分かりませんが、何らかのコンパイルが行われているようです。
|
||||||
|
* あなたのインスタンスについて、いくつかの質問があります。その回答は `config/generated_config.exs` というコンフィギュレーションファイルに保存されます。
|
||||||
|
|
||||||
|
**注意**: メディアプロクシを有効にすると回答して、なおかつ、キャッシュのURLは空欄のままにしている場合は、`generated_config.exs` を編集して、`base_url` で始まる行をコメントアウトまたは削除してください。そして、上にある行の `true` の後にあるコンマを消してください。
|
||||||
|
|
||||||
|
* コンフィギュレーションを確認して、もし問題なければ、ファイル名を変更してください。
|
||||||
|
```
|
||||||
|
mv config/{generated_config.exs,prod.secret.exs}
|
||||||
|
```
|
||||||
|
|
||||||
|
* これまでのコマンドで、すでに `config/setup_db.psql` というファイルが作られています。このファイルをもとに、データベースを作成します。
|
||||||
|
```
|
||||||
|
sudo su postgres -c 'psql -f config/setup_db.psql'
|
||||||
|
```
|
||||||
|
|
||||||
|
* そして、データベースのミグレーションを実行します。
|
||||||
|
```
|
||||||
|
MIX_ENV=prod mix ecto.migrate
|
||||||
|
```
|
||||||
|
|
||||||
|
* Pleromaを起動できるようになりました。
|
||||||
|
```
|
||||||
|
MIX_ENV=prod mix phx.server
|
||||||
|
```
|
||||||
|
|
||||||
|
### インストールを終わらせる
|
||||||
|
|
||||||
|
あなたの新しいインスタンスを世界に向けて公開するには、nginxまたは何らかのウェブサーバー (プロクシ) を使用する必要があります。また、Pleroma のためにシステムサービスファイルを作成する必要があります。
|
||||||
|
|
||||||
|
#### Nginx
|
||||||
|
|
||||||
|
* まだインストールしていないなら、nginxをインストールします。
|
||||||
|
```
|
||||||
|
apt install nginx
|
||||||
|
```
|
||||||
|
|
||||||
|
* SSLをセットアップします。他の方法でもよいですが、ここではcertbotを説明します。
|
||||||
|
certbotを使うならば、まずそれをインストールします。
|
||||||
|
```
|
||||||
|
apt install certbot
|
||||||
|
```
|
||||||
|
そしてセットアップします。
|
||||||
|
```
|
||||||
|
mkdir -p /var/lib/letsencrypt/.well-known
|
||||||
|
% certbot certonly --email your@emailaddress --webroot -w /var/lib/letsencrypt/ -d yourdomain
|
||||||
|
```
|
||||||
|
もしうまくいかないときは、先にnginxを設定してください。ssl "on" を "off" に変えてから再試行してください。
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
* nginxコンフィギュレーションの例をnginxフォルダーにコピーします。
|
||||||
|
```
|
||||||
|
cp /home/pleroma/pleroma/installation/pleroma.nginx /etc/nginx/sites-enabled/pleroma.nginx
|
||||||
|
```
|
||||||
|
|
||||||
|
* nginxを起動する前に、コンフィギュレーションを編集してください。例えば、サーバー名、証明書のパスなどを変更する必要があります。
|
||||||
|
* nginxを再起動します。
|
||||||
|
```
|
||||||
|
systemctl reload nginx.service
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Systemd サービス
|
||||||
|
|
||||||
|
* サービスファイルの例をコピーします。
|
||||||
|
```
|
||||||
|
cp /home/pleroma/pleroma/installation/pleroma.service /usr/lib/systemd/system/pleroma.service
|
||||||
|
```
|
||||||
|
|
||||||
|
* サービスファイルを変更します。すべてのパスが正しいことを確認してください。また、`[Service]` セクションに以下の行があることを確認してください。
|
||||||
|
```
|
||||||
|
Environment="MIX_ENV=prod"
|
||||||
|
```
|
||||||
|
|
||||||
|
* `pleroma.service` を enable および start してください。
|
||||||
|
```
|
||||||
|
systemctl enable --now pleroma.service
|
||||||
|
```
|
||||||
|
|
||||||
|
#### モデレーターを作る
|
||||||
|
|
||||||
|
新たにユーザーを作ったら、モデレーター権限を与えたいかもしれません。以下のタスクで可能です。
|
||||||
|
```
|
||||||
|
mix set_moderator username [true|false]
|
||||||
|
```
|
||||||
|
|
||||||
|
モデレーターはすべてのポストを消すことができます。将来的には他のことも可能になるかもしれません。
|
||||||
|
|
||||||
|
#### メディアプロクシを有効にする
|
||||||
|
|
||||||
|
`generate_config` でメディアプロクシを有効にしているなら、すでにメディアプロクシが動作しています。あとから設定を変更したいなら、[How to activate mediaproxy](How-to-activate-mediaproxy) を見てください。
|
||||||
|
|
||||||
|
#### コンフィギュレーションとカスタマイズ
|
||||||
|
|
||||||
|
* [Configuration tips](General tips for customizing pleroma fe)
|
||||||
|
* [Small Pleroma-FE customizations](Small customizations)
|
||||||
|
* [Admin tasks](Admin tasks)
|
||||||
|
|
||||||
|
## 質問ある?
|
||||||
|
|
||||||
|
インストールについて質問がある、もしくは、うまくいかないときは、以下のところで質問できます。
|
||||||
|
|
||||||
|
* [#pleroma:matrix.org](https://matrix.heldscal.la/#/room/#freenode_#pleroma:matrix.org)
|
||||||
|
* **Freenode** の **#pleroma** IRCチャンネル
|
296
docs/installation/gentoo_en.md
Normal file
296
docs/installation/gentoo_en.md
Normal file
|
@ -0,0 +1,296 @@
|
||||||
|
# Installing on Gentoo GNU/Linux
|
||||||
|
## Installation
|
||||||
|
|
||||||
|
This guide will assume that you have administrative rights, either as root or a user with [sudo permissions](https://wiki.gentoo.org/wiki/Sudo). Lines that begin with `#` indicate that they should be run as the superuser. Lines using `$` should be run as the indicated user, e.g. `pleroma$` should be run as the `pleroma` user.
|
||||||
|
|
||||||
|
### Configuring your hostname (optional)
|
||||||
|
|
||||||
|
If you would like your prompt to permanently include your host/domain, change `/etc/conf.d/hostname` to your hostname. You can reboot or use the `hostname` command to make immediate changes.
|
||||||
|
|
||||||
|
### Your make.conf, package.use, and USE flags
|
||||||
|
|
||||||
|
The only specific USE flag you should need is the `uuid` flag for `dev-db/postgresql`. Add the following line to any new file in `/etc/portage/package.use`. If you would like a suggested name for the file, either `postgresql` or `pleroma` would do fine, depending on how you like to arrange your package.use flags.
|
||||||
|
|
||||||
|
```text
|
||||||
|
dev-db/postgresql uuid
|
||||||
|
```
|
||||||
|
|
||||||
|
You could opt to add `USE="uuid"` to `/etc/portage/make.conf` if you'd rather set this as a global USE flags, but this flags does unrelated things in other packages, so keep that in mind if you elect to do so.
|
||||||
|
|
||||||
|
Double check your compiler flags in `/etc/portage/make.conf`. If you require any special compilation flags or would like to set up remote builds, now is the time to do so. Be sure that your CFLAGS and MAKEOPTS make sense for the platform you are using. It is not recommended to use above `-O2` or risky optimization flags for a production server.
|
||||||
|
|
||||||
|
### Installing a cron daemon
|
||||||
|
|
||||||
|
Gentoo quite pointedly does not come with a cron daemon installed, and as such it is recommended you install one to automate certbot renewals and to allow other system administration tasks to be run automatically. Gentoo has [a whole wide world of cron options](https://wiki.gentoo.org/wiki/Cron) but if you just want A Cron That Works, `emerge --ask virtual/cron` will install the default cron implementation (probably cronie) which will work just fine. For the purpouses of this guide, we will be doing just that.
|
||||||
|
|
||||||
|
### Required ebuilds
|
||||||
|
|
||||||
|
* `dev-db/postgresql`
|
||||||
|
* `dev-lang/elixir`
|
||||||
|
* `dev-vcs/git`
|
||||||
|
|
||||||
|
#### Optional ebuilds used in this guide
|
||||||
|
|
||||||
|
* `www-servers/nginx` (preferred, example configs for other reverse proxies can be found in the repo)
|
||||||
|
* `app-crypt/certbot` (or any other ACME client for Let’s Encrypt certificates)
|
||||||
|
* `app-crypt/certbot-nginx` (nginx certbot plugin that allows use of the all-powerful `--nginx` flag on certbot)
|
||||||
|
|
||||||
|
### Prepare the system
|
||||||
|
|
||||||
|
* First ensure that you have the latest copy of the portage ebuilds if you have not synced them yet:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
# emaint sync -a
|
||||||
|
```
|
||||||
|
|
||||||
|
* Emerge all required the required and suggested software in one go:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
# emerge --ask dev-db/postgresql dev-lang/elixir dev-vcs/git www-servers/nginx app-crypt/certbot app-crypt/certbot-nginx
|
||||||
|
```
|
||||||
|
|
||||||
|
If you would not like to install the optional packages, remove them from this line.
|
||||||
|
|
||||||
|
If you're running this from a low-powered virtual machine, it should work though it will take some time. There were no issues on a VPS with a single core and 1GB of RAM; if you are using an even more limited device and run into issues, you can try creating a swapfile or use a more powerful machine running Gentoo to [cross build](https://wiki.gentoo.org/wiki/Cross_build_environment). If you have a wait ahead of you, now would be a good time to take a break, strech a bit, refresh your beverage of choice and/or get a snack, and reply to Arch users' posts with "I use Gentoo btw" as we do.
|
||||||
|
|
||||||
|
### Install PostgreSQL
|
||||||
|
|
||||||
|
[Gentoo Wiki article](https://wiki.gentoo.org/wiki/PostgreSQL) as well as [PostgreSQL QuickStart](https://wiki.gentoo.org/wiki/PostgreSQL/QuickStart) might be worth a quick glance, as the way Gentoo handles postgres is slightly unusual, with built in capability to have two different databases running for testing and live or whatever other purpouse. While it is still straightforward to install, it does mean that the version numbers used in this guide might change for future updates, so keep an eye out for the output you get from `emerge` to ensure you are using the correct ones.
|
||||||
|
|
||||||
|
* Install postgresql if you have not done so already:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
# emerge --ask dev-db/postgresql
|
||||||
|
```
|
||||||
|
|
||||||
|
Ensure that `/etc/conf.d/postgresql-11` has the encoding you want (it defaults to UTF8 which is probably what you want) and make any adjustments to the data directory if you find it necessary. Be sure to adjust the number at the end depending on what version of postgres you actually installed.
|
||||||
|
|
||||||
|
* Initialize the database cluster
|
||||||
|
|
||||||
|
The output from emerging postgresql should give you a command for initializing the postgres database. The default slot should be indicated in this command, ensure that it matches the command below.
|
||||||
|
|
||||||
|
```shell
|
||||||
|
# emerge --config dev-db/postgresql:11
|
||||||
|
```
|
||||||
|
|
||||||
|
* Start postgres and enable the system service
|
||||||
|
|
||||||
|
```shell
|
||||||
|
# /etc/init.d/postgresql-11 start
|
||||||
|
# rc-update add postgresql-11 default
|
||||||
|
```
|
||||||
|
|
||||||
|
### A note on licenses, the AGPL, and deployment procedures
|
||||||
|
|
||||||
|
If you do not plan to make any modifications to your Pleroma instance, cloning directly from the main repo will get you what you need. However, if you plan on doing any contributions to upstream development, making changes or modifications to your instance, making custom themes, or want to play around--and let's be honest here, if you're using Gentoo that is most likely you--you will save yourself a lot of headache later if you take the time right now to fork the Pleroma repo and use that in the following section.
|
||||||
|
|
||||||
|
Not only does this make it much easier to deploy changes you make, as you can commit and pull from upstream and all that good stuff from the comfort of your local machine then simply `git pull` on your instance server when you're ready to deploy, it also ensures you are compliant with the Affero General Public Licence that Pleroma is licenced under, which stipulates that all network services provided with modified AGPL code must publish their changes on a publicly available internet service and for free. It also makes it much easier to ask for help from and provide help to your fellow Pleroma admins if your public repo always reflects what you are running because it is part of your deployment procedure.
|
||||||
|
|
||||||
|
### Install PleromaBE
|
||||||
|
|
||||||
|
* Add a new system user for the Pleroma service and set up default directories:
|
||||||
|
|
||||||
|
Remove `,wheel` if you do not want this user to be able to use `sudo`, however note that being able to `sudo` as the `pleroma` user will make finishing the insallation and common maintenence tasks somewhat easier:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
# useradd -m -G users,wheel -s /bin/bash pleroma
|
||||||
|
```
|
||||||
|
|
||||||
|
Optional: If you are using sudo, review your sudo setup to ensure it works for you. The `/etc/sudoers` file has a lot of options and examples to help you, and [the Gentoo sudo guide](https://wiki.gentoo.org/wiki/Sudo) has more information. Finishing this installation will be somewhat easier if you have a way to sudo from the `pleroma` user, but it might be best to not allow that user to sudo during normal operation, and as such there will be a reminder at the end of this guide to double check if you would like to lock down the `pleroma` user after initial setup.
|
||||||
|
|
||||||
|
**Note**: To execute a single command as the Pleroma system user, use `sudo -Hu pleroma command`. You can also switch to a shell by using `sudo -Hu pleroma $SHELL`. If you don't have or want `sudo` or would like to use the system as the `pleroma` user for instance maintenance tasks, you can simply use `su - pleroma` to switch to the `pleroma` user.
|
||||||
|
|
||||||
|
* Git clone the PleromaBE repository and make the Pleroma user the owner of the directory:
|
||||||
|
|
||||||
|
It is highly recommended you use your own fork for the `https://path/to/repo` part below, however if you foolishly decide to forego using your own fork, the primary repo `https://git.pleroma.social/pleroma/pleroma` will work here.
|
||||||
|
|
||||||
|
```shell
|
||||||
|
pleroma$ cd ~
|
||||||
|
pleroma$ git clone https://path/to/repo
|
||||||
|
```
|
||||||
|
|
||||||
|
* Change to the new directory:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
pleroma$ cd ~/pleroma
|
||||||
|
```
|
||||||
|
|
||||||
|
* Install the dependencies for Pleroma and answer with `yes` if it asks you to install `Hex`:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
pleroma$ mix deps.get
|
||||||
|
```
|
||||||
|
|
||||||
|
* Generate the configuration:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
pleroma$ mix pleroma.instance gen
|
||||||
|
```
|
||||||
|
|
||||||
|
* Answer with `yes` if it asks you to install `rebar3`.
|
||||||
|
|
||||||
|
* This part precompiles some parts of Pleroma, so it might take a few moments
|
||||||
|
|
||||||
|
* After that it will ask you a few questions about your instance and generates a configuration file in `config/generated_config.exs`.
|
||||||
|
|
||||||
|
* Spend some time with `generated_config.exs` to ensure that everything is in order. If you plan on using an S3-compatible service to store your local media, that can be done here. You will likely mostly be using `prod.secret.exs` for a production instance, however if you would like to set up a development environment, make a copy to `dev.secret.exs` and adjust settings as needed as well.
|
||||||
|
|
||||||
|
```shell
|
||||||
|
pleroma$ mv config/generated_config.exs config/prod.secret.exs
|
||||||
|
```
|
||||||
|
|
||||||
|
* The previous command creates also the file `config/setup_db.psql`, with which you can create the database. Ensure that it is using the correct database name on the `CREATE DATABASE` and the `\c` lines, then run the postgres script:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
pleroma$ sudo -Hu postgres psql -f config/setup_db.psql
|
||||||
|
```
|
||||||
|
|
||||||
|
* Now run the database migration:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
pleroma$ MIX_ENV=prod mix ecto.migrate
|
||||||
|
```
|
||||||
|
|
||||||
|
* Now you can start Pleroma already
|
||||||
|
|
||||||
|
```shell
|
||||||
|
pleroma$ MIX_ENV=prod mix phx.server
|
||||||
|
```
|
||||||
|
|
||||||
|
It probably won't work over the public internet quite yet, however, as we still need to set up a web servere to proxy to the pleroma application, as well as configure SSL.
|
||||||
|
|
||||||
|
### Finalize installation
|
||||||
|
|
||||||
|
Assuming you want to open your newly installed federated social network to, well, the federation, you should run nginx or some other webserver/proxy in front of Pleroma. It is also a good idea to set up Pleroma to run as a system service.
|
||||||
|
|
||||||
|
#### Nginx
|
||||||
|
|
||||||
|
* Install nginx, if not already done:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
# emerge --ask www-servers/nginx
|
||||||
|
```
|
||||||
|
|
||||||
|
* Create directories for available and enabled sites:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
# mkdir -p /etc/nginx/sites-{available,enabled}
|
||||||
|
```
|
||||||
|
|
||||||
|
* Append the following line at the end of the `http` block in `/etc/nginx/nginx.conf`:
|
||||||
|
|
||||||
|
```Nginx
|
||||||
|
include sites-enabled/*;
|
||||||
|
```
|
||||||
|
|
||||||
|
* Setup your SSL cert, using your method of choice or certbot. If using certbot, install it if you haven't already:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
# emerge --ask app-crypt/certbot app-crypt/certbot-nginx
|
||||||
|
```
|
||||||
|
|
||||||
|
and then set it up:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
# mkdir -p /var/lib/letsencrypt/
|
||||||
|
# certbot certonly --email <your@emailaddress> -d <yourdomain> --standalone
|
||||||
|
```
|
||||||
|
|
||||||
|
If that doesn't work the first time, add `--dry-run` to further attempts to avoid being ratelimited as you identify the issue, and do not remove it until the dry run succeeds. If that doesn’t work, make sure, that nginx is not already running. If it still doesn’t work, try setting up nginx first (change ssl “on” to “off” and try again). Often the answer to issues with certbot is to use the `--nginx` flag once you have nginx up and running.
|
||||||
|
|
||||||
|
If you are using any additional subdomains, such as for a media proxy, you can re-run the same command with the subdomain in question. When it comes time to renew later, you will not need to run multiple times for each domain, one renew will handle it.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
* Copy the example nginx configuration and activate it:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
# cp /home/pleroma/pleroma/installation/pleroma.nginx /etc/nginx/sites-available/
|
||||||
|
# ln -s /etc/nginx/sites-available/pleroma.nginx /etc/nginx/sites-enabled/pleroma.nginx
|
||||||
|
```
|
||||||
|
|
||||||
|
* Take some time to ensure that your nginx config is correct
|
||||||
|
|
||||||
|
Replace all instances of `example.tld` with your instance's public URL. If for whatever reason you made changes to the port that your pleroma app runs on, be sure that is reflected in your configuration.
|
||||||
|
|
||||||
|
Pay special attention to the line that begins with `ssl_ecdh_curve`. It is stongly advised to comment that line out so that OpenSSL will use its full capabilities, and it is also possible you are running OpenSSL 1.0.2 necessitating that you do this.
|
||||||
|
|
||||||
|
* Enable and start nginx:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
# rc-update add nginx default
|
||||||
|
# /etc/init.d/nginx start
|
||||||
|
```
|
||||||
|
|
||||||
|
If you are using certbot, it is HIGHLY recommend you set up a cron job that renews your certificate, and that you install the suggested `certbot-nginx` plugin. If you don't do these things, you only have yourself to blame when your instance breaks suddenly because you forgot about it.
|
||||||
|
|
||||||
|
First, ensure that the command you will be installing into your crontab works.
|
||||||
|
|
||||||
|
```shell
|
||||||
|
# /usr/bin/certbot renew --nginx
|
||||||
|
```
|
||||||
|
|
||||||
|
Assuming not much time has passed since you got certbot working a few steps ago, you should get a message for all domains you installed certificates for saying `Cert not yet due for renewal`.
|
||||||
|
|
||||||
|
Now, run crontab as a superuser with `crontab -e` or `sudo crontab -e` as appropriate, and add the following line to your cron:
|
||||||
|
|
||||||
|
```cron
|
||||||
|
0 0 1 * * /usr/bin/certbot renew --nginx
|
||||||
|
```
|
||||||
|
|
||||||
|
This will run certbot on the first of the month at midnight. If you'd rather run more frequently, it's not a bad idea, feel free to go for it.
|
||||||
|
|
||||||
|
#### Other webserver/proxies
|
||||||
|
|
||||||
|
If you would like to use other webservers or proxies, there are example configurations for some popular alternatives in `/home/pleroma/pleroma/installation/`. You can, of course, check out [the Gentoo wiki](https://wiki.gentoo.org) for more information on installing and configuring said alternatives.
|
||||||
|
|
||||||
|
#### Create the uploads folder
|
||||||
|
|
||||||
|
Even if you are using S3, Pleroma needs someplace to store media posted on your instance. If you are using the `/home/pleroma/pleroma` root folder suggested by this guide, simply:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
pleroma$ mkdir -p ~/pleroma/uploads
|
||||||
|
```
|
||||||
|
|
||||||
|
#### init.d service
|
||||||
|
|
||||||
|
* Copy example service file
|
||||||
|
|
||||||
|
```shell
|
||||||
|
# cp /home/pleroma/pleroma/installation/init.d/pleroma /etc/init.d/
|
||||||
|
```
|
||||||
|
|
||||||
|
* Be sure to take a look at this service file and make sure that all paths fit your installation
|
||||||
|
|
||||||
|
* Enable and start `pleroma`:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
# rc-update add pleroma default
|
||||||
|
# /etc/init.d/pleroma start
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Create your first user
|
||||||
|
|
||||||
|
If your instance is up and running, you can create your first user with administrative rights with the following task:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
pleroma$ MIX_ENV=prod mix pleroma.user new <username> <your@emailaddress> --admin
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Privilege cleanup
|
||||||
|
|
||||||
|
If you opted to allow sudo for the `pleroma` user but would like to remove the ability for greater security, now might be a good time to edit `/etc/sudoers` and/or change the groups the `pleroma` user belongs to. Be sure to restart the pleroma service afterwards to ensure it picks up on the changes.
|
||||||
|
|
||||||
|
#### Further reading
|
||||||
|
|
||||||
|
* [Admin tasks](Admin tasks)
|
||||||
|
* [Backup your instance](Backup-your-instance)
|
||||||
|
* [Configuration tips](General tips for customizing pleroma fe)
|
||||||
|
* [Hardening your instance](Hardening-your-instance)
|
||||||
|
* [How to activate mediaproxy](How-to-activate-mediaproxy)
|
||||||
|
* [Small Pleroma-FE customizations](Small customizations)
|
||||||
|
* [Updating your instance](Updating-your-instance)
|
||||||
|
|
||||||
|
## Questions
|
||||||
|
|
||||||
|
Questions about the installation or didn’t it work as it should be, ask in [#pleroma:matrix.org](https://matrix.heldscal.la/#/room/#freenode_#pleroma:matrix.org) or IRC Channel **#pleroma** on **Freenode**.
|
198
docs/installation/netbsd_en.md
Normal file
198
docs/installation/netbsd_en.md
Normal file
|
@ -0,0 +1,198 @@
|
||||||
|
# Installing on NetBSD
|
||||||
|
|
||||||
|
## Required software
|
||||||
|
|
||||||
|
pkgin should have been installed by the NetBSD installer if you selected
|
||||||
|
the right options. If it isn't installed, install it using pkg_add.
|
||||||
|
|
||||||
|
Note that `postgresql11-contrib` is needed for the Postgres extensions
|
||||||
|
Pleroma uses.
|
||||||
|
|
||||||
|
The `mksh` shell is needed to run the Elixir `mix` script.
|
||||||
|
|
||||||
|
`# pkgin install acmesh elixir git-base git-docs mksh nginx postgresql11-server postgresql11-client postgresql11-contrib sudo`
|
||||||
|
|
||||||
|
You can also build these packages using pkgsrc:
|
||||||
|
```
|
||||||
|
databases/postgresql11-contrib
|
||||||
|
databases/postgresql11-client
|
||||||
|
databases/postgresql11-server
|
||||||
|
devel/git-base
|
||||||
|
devel/git-docs
|
||||||
|
lang/elixir
|
||||||
|
security/acmesh
|
||||||
|
security/sudo
|
||||||
|
shells/mksh
|
||||||
|
www/nginx
|
||||||
|
```
|
||||||
|
|
||||||
|
Copy the rc.d scripts to the right directory:
|
||||||
|
|
||||||
|
```
|
||||||
|
# cp /usr/pkg/share/examples/rc.d/nginx /usr/pkg/share/examples/rc.d/pgsql /etc/rc.d
|
||||||
|
```
|
||||||
|
|
||||||
|
Add nginx and Postgres to `/etc/rc.conf`:
|
||||||
|
|
||||||
|
```
|
||||||
|
nginx=YES
|
||||||
|
pgsql=YES
|
||||||
|
```
|
||||||
|
|
||||||
|
## Configuring postgres
|
||||||
|
|
||||||
|
First, run `# /etc/rc.d/pgsql start`. Then, `$ sudo -Hu pgsql -g pgsql createdb`.
|
||||||
|
|
||||||
|
## Configuring Pleroma
|
||||||
|
|
||||||
|
Create a user for Pleroma:
|
||||||
|
|
||||||
|
```
|
||||||
|
# groupadd pleroma
|
||||||
|
# useradd -d /home/pleroma -m -g pleroma -s /usr/pkg/bin/mksh pleroma
|
||||||
|
# echo 'export LC_ALL="en_GB.UTF-8"' >> /home/pleroma/.profile
|
||||||
|
# su -l pleroma -c $SHELL
|
||||||
|
```
|
||||||
|
|
||||||
|
Clone the repository:
|
||||||
|
|
||||||
|
```
|
||||||
|
$ cd /home/pleroma
|
||||||
|
$ git clone https://git.pleroma.social/pleroma/pleroma.git
|
||||||
|
```
|
||||||
|
|
||||||
|
Configure Pleroma. Note that you need a domain name at this point:
|
||||||
|
|
||||||
|
```
|
||||||
|
$ cd /home/pleroma/pleroma
|
||||||
|
$ mix deps.get
|
||||||
|
$ mix pleroma.instance gen # You will be asked a few questions here.
|
||||||
|
```
|
||||||
|
|
||||||
|
Since Postgres is configured, we can now initialize the database. There should
|
||||||
|
now be a file in `config/setup_db.psql` that makes this easier. Edit it, and
|
||||||
|
*change the password* to a password of your choice. Make sure it is secure, since
|
||||||
|
it'll be protecting your database. Now initialize the database:
|
||||||
|
|
||||||
|
```
|
||||||
|
$ sudo -Hu pgsql -g pgsql psql -f config/setup_db.psql
|
||||||
|
```
|
||||||
|
|
||||||
|
Postgres allows connections from all users without a password by default. To
|
||||||
|
fix this, edit `/usr/pkg/pgsql/data/pg_hba.conf`. Change every `trust` to
|
||||||
|
`password`.
|
||||||
|
|
||||||
|
Once this is done, restart Postgres with `# /etc/rc.d/pgsql restart`.
|
||||||
|
|
||||||
|
Run the database migrations.
|
||||||
|
You will need to do this whenever you update with `git pull`:
|
||||||
|
|
||||||
|
```
|
||||||
|
$ MIX_ENV=prod mix ecto.migrate
|
||||||
|
```
|
||||||
|
|
||||||
|
## Configuring nginx
|
||||||
|
|
||||||
|
Install the example configuration file
|
||||||
|
`/home/pleroma/pleroma/installation/pleroma.nginx` to
|
||||||
|
`/usr/pkg/etc/nginx.conf`.
|
||||||
|
|
||||||
|
Note that it will need to be wrapped in a `http {}` block. You should add
|
||||||
|
settings for the nginx daemon outside of the http block, for example:
|
||||||
|
|
||||||
|
```
|
||||||
|
user nginx nginx;
|
||||||
|
error_log /var/log/nginx/error.log;
|
||||||
|
worker_processes 4;
|
||||||
|
|
||||||
|
events {
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
Edit the defaults:
|
||||||
|
|
||||||
|
* Change `ssl_certificate` and `ssl_trusted_certificate` to
|
||||||
|
`/etc/nginx/tls/fullchain`.
|
||||||
|
* Change `ssl_certificate_key` to `/etc/nginx/tls/key`.
|
||||||
|
* Change `example.tld` to your instance's domain name.
|
||||||
|
|
||||||
|
## Configuring acme.sh
|
||||||
|
|
||||||
|
We'll be using acme.sh in Stateless Mode for TLS certificate renewal.
|
||||||
|
|
||||||
|
First, get your account fingerprint:
|
||||||
|
|
||||||
|
```
|
||||||
|
$ sudo -Hu nginx -g nginx acme.sh --register-account
|
||||||
|
```
|
||||||
|
|
||||||
|
You need to add the following to your nginx configuration for the server
|
||||||
|
running on port 80:
|
||||||
|
|
||||||
|
```
|
||||||
|
location ~ ^/\.well-known/acme-challenge/([-_a-zA-Z0-9]+)$ {
|
||||||
|
default_type text/plain;
|
||||||
|
return 200 "$1.6fXAG9VyG0IahirPEU2ZerUtItW2DHzDzD9wZaEKpqd";
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
Replace the string after after `$1.` with your fingerprint.
|
||||||
|
|
||||||
|
Start nginx:
|
||||||
|
|
||||||
|
```
|
||||||
|
# /etc/rc.d/nginx start
|
||||||
|
```
|
||||||
|
|
||||||
|
It should now be possible to issue a cert (replace `example.com`
|
||||||
|
with your domain name):
|
||||||
|
|
||||||
|
```
|
||||||
|
$ sudo -Hu nginx -g nginx acme.sh --issue -d example.com --stateless
|
||||||
|
```
|
||||||
|
|
||||||
|
Let's add auto-renewal to `/etc/daily.local`
|
||||||
|
(replace `example.com` with your domain):
|
||||||
|
|
||||||
|
```
|
||||||
|
/usr/pkg/bin/sudo -Hu nginx -g nginx \
|
||||||
|
/usr/pkg/sbin/acme.sh -r \
|
||||||
|
-d example.com \
|
||||||
|
--cert-file /etc/nginx/tls/cert \
|
||||||
|
--key-file /etc/nginx/tls/key \
|
||||||
|
--ca-file /etc/nginx/tls/ca \
|
||||||
|
--fullchain-file /etc/nginx/tls/fullchain \
|
||||||
|
--stateless
|
||||||
|
```
|
||||||
|
|
||||||
|
## Creating a startup script for Pleroma
|
||||||
|
|
||||||
|
Copy the startup script to the correct location and make sure it's executable:
|
||||||
|
|
||||||
|
```
|
||||||
|
# cp /home/pleroma/pleroma/installation/netbsd/rc.d/pleroma /etc/rc.d/pleroma
|
||||||
|
# chmod +x /etc/rc.d/pleroma
|
||||||
|
```
|
||||||
|
|
||||||
|
Add the following to `/etc/rc.conf`:
|
||||||
|
|
||||||
|
```
|
||||||
|
pleroma=YES
|
||||||
|
pleroma_home="/home/pleroma"
|
||||||
|
pleroma_user="pleroma"
|
||||||
|
```
|
||||||
|
|
||||||
|
Run `# /etc/rc.d/pleroma start` to start Pleroma.
|
||||||
|
|
||||||
|
## Conclusion
|
||||||
|
|
||||||
|
Restart nginx with `# /etc/rc.d/nginx restart` and you should be up and running.
|
||||||
|
|
||||||
|
If you need further help, contact niaa on freenode.
|
||||||
|
|
||||||
|
Make sure your time is in sync, or other instances will receive your posts with
|
||||||
|
incorrect timestamps. You should have ntpd running.
|
||||||
|
|
||||||
|
## Instances running NetBSD
|
||||||
|
|
||||||
|
* <https://catgirl.science>
|
222
docs/installation/openbsd_en.md
Normal file
222
docs/installation/openbsd_en.md
Normal file
|
@ -0,0 +1,222 @@
|
||||||
|
# Installing on OpenBSD
|
||||||
|
This guide describes the installation and configuration of pleroma (and the required software to run it) on a single OpenBSD 6.4 server.
|
||||||
|
For any additional information regarding commands and configuration files mentioned here, check the man pages [online](https://man.openbsd.org/) or directly on your server with the man command.
|
||||||
|
|
||||||
|
#### Required software
|
||||||
|
The following packages need to be installed:
|
||||||
|
* elixir
|
||||||
|
* gmake
|
||||||
|
* ImageMagick
|
||||||
|
* git
|
||||||
|
* postgresql-server
|
||||||
|
* postgresql-contrib
|
||||||
|
|
||||||
|
To install them, run the following command (with doas or as root):
|
||||||
|
`pkg_add elixir gmake ImageMagick git postgresql-server postgresql-contrib`
|
||||||
|
|
||||||
|
Pleroma requires a reverse proxy, OpenBSD has relayd in base (and is used in this guide) and packages/ports are available for nginx (www/nginx) and apache (www/apache-httpd). Independently of the reverse proxy, [acme-client(1)](https://man.openbsd.org/acme-client) can be used to get a certificate from Let's Encrypt.
|
||||||
|
|
||||||
|
#### Creating the pleroma user
|
||||||
|
Pleroma will be run by a dedicated user, \_pleroma. Before creating it, insert the following lines in login.conf:
|
||||||
|
```
|
||||||
|
pleroma:\
|
||||||
|
:datasize-max=1536M:\
|
||||||
|
:datasize-cur=1536M:\
|
||||||
|
:openfiles-max=4096
|
||||||
|
```
|
||||||
|
This creates a "pleroma" login class and sets higher values than default for datasize and openfiles (see [login.conf(5)](https://man.openbsd.org/login.conf)), this is required to avoid having pleroma crash some time after starting.
|
||||||
|
|
||||||
|
Create the \_pleroma user, assign it the pleroma login class and create its home directory (/home/\_pleroma/): `useradd -m -L pleroma _pleroma`
|
||||||
|
|
||||||
|
#### Clone pleroma's directory
|
||||||
|
Enter a shell as the \_pleroma user. As root, run `su _pleroma -;cd`. Then clone the repository with `git clone https://git.pleroma.social/pleroma/pleroma.git`. Pleroma is now installed in /home/\_pleroma/pleroma/, it will be configured and started at the end of this guide.
|
||||||
|
|
||||||
|
#### Postgresql
|
||||||
|
Start a shell as the \_postgresql user (as root run `su _postgresql -` then run the `initdb` command to initialize postgresql:
|
||||||
|
If you wish to not use the default location for postgresql's data (/var/postgresql/data), add the following switch at the end of the command: `-D <path>` and modify the `datadir` variable in the /etc/rc.d/postgresql script.
|
||||||
|
|
||||||
|
When this is done, enable postgresql so that it starts on boot and start it. As root, run:
|
||||||
|
```
|
||||||
|
rcctl enable postgresql
|
||||||
|
rcctl start postgresql
|
||||||
|
```
|
||||||
|
To check that it started properly and didn't fail right after starting, you can run `ps aux | grep postgres`, there should be multiple lines of output.
|
||||||
|
|
||||||
|
#### httpd
|
||||||
|
httpd will have three fuctions:
|
||||||
|
* redirect requests trying to reach the instance over http to the https URL
|
||||||
|
* serve a robots.txt file
|
||||||
|
* get Let's Encrypt certificates, with acme-client
|
||||||
|
|
||||||
|
Insert the following config in httpd.conf:
|
||||||
|
```
|
||||||
|
# $OpenBSD: httpd.conf,v 1.17 2017/04/16 08:50:49 ajacoutot Exp $
|
||||||
|
|
||||||
|
ext_inet="<IPv4 address>"
|
||||||
|
ext_inet6="<IPv6 address>"
|
||||||
|
|
||||||
|
server "default" {
|
||||||
|
listen on $ext_inet port 80 # Comment to disable listening on IPv4
|
||||||
|
listen on $ext_inet6 port 80 # Comment to disable listening on IPv6
|
||||||
|
listen on 127.0.0.1 port 80 # Do NOT comment this line
|
||||||
|
|
||||||
|
log syslog
|
||||||
|
directory no index
|
||||||
|
|
||||||
|
location "/.well-known/acme-challenge/*" {
|
||||||
|
root "/acme"
|
||||||
|
request strip 2
|
||||||
|
}
|
||||||
|
|
||||||
|
location "/robots.txt" { root "/htdocs/local/" }
|
||||||
|
location "/*" { block return 302 "https://$HTTP_HOST$REQUEST_URI" }
|
||||||
|
}
|
||||||
|
|
||||||
|
types {
|
||||||
|
include "/usr/share/misc/mime.types"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
Do not forget to change *\<IPv4/6 address\>* to your server's address(es). If httpd should only listen on one protocol family, comment one of the two first *listen* options.
|
||||||
|
|
||||||
|
Create the /var/www/htdocs/local/ folder and write the content of your robots.txt in /var/www/htdocs/local/robots.txt.
|
||||||
|
Check the configuration with `httpd -n`, if it is OK enable and start httpd (as root):
|
||||||
|
```
|
||||||
|
rcctl enable httpd
|
||||||
|
rcctl start httpd
|
||||||
|
```
|
||||||
|
|
||||||
|
#### acme-client
|
||||||
|
acme-client is used to get SSL/TLS certificates from Let's Encrypt.
|
||||||
|
Insert the following configuration in /etc/acme-client.conf:
|
||||||
|
```
|
||||||
|
#
|
||||||
|
# $OpenBSD: acme-client.conf,v 1.4 2017/03/22 11:14:14 benno Exp $
|
||||||
|
#
|
||||||
|
|
||||||
|
authority letsencrypt-<domain name> {
|
||||||
|
#agreement url "https://letsencrypt.org/documents/LE-SA-v1.2-November-15-2017.pdf"
|
||||||
|
api url "https://acme-v01.api.letsencrypt.org/directory"
|
||||||
|
account key "/etc/acme/letsencrypt-privkey-<domain name>.pem"
|
||||||
|
}
|
||||||
|
|
||||||
|
domain <domain name> {
|
||||||
|
domain key "/etc/ssl/private/<domain name>.key"
|
||||||
|
domain certificate "/etc/ssl/<domain name>.crt"
|
||||||
|
domain full chain certificate "/etc/ssl/<domain name>.fullchain.pem"
|
||||||
|
sign with letsencrypt-<domain name>
|
||||||
|
challengedir "/var/www/acme/"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
Replace *\<domain name\>* by the domain name you'll use for your instance. As root, run `acme-client -n` to check the config, then `acme-client -ADv <domain name>` to create account and domain keys, and request a certificate for the first time.
|
||||||
|
Make acme-client run everyday by adding it in /etc/daily.local. As root, run the following command: `echo "acme-client <domain name>" >> /etc/daily.local`.
|
||||||
|
|
||||||
|
Relayd will look for certificates and keys based on the address it listens on (see next part), the easiest way to make them available to relayd is to create a link, as root run:
|
||||||
|
```
|
||||||
|
ln -s /etc/ssl/<domain name>.fullchain.pem /etc/ssl/<IP address>.crt
|
||||||
|
ln -s /etc/ssl/private/<domain name>.key /etc/ssl/private/<IP address>.key
|
||||||
|
```
|
||||||
|
This will have to be done for each IPv4 and IPv6 address relayd listens on.
|
||||||
|
|
||||||
|
#### relayd
|
||||||
|
relayd will be used as the reverse proxy sitting in front of pleroma.
|
||||||
|
Insert the following configuration in /etc/relayd.conf:
|
||||||
|
```
|
||||||
|
# $OpenBSD: relayd.conf,v 1.4 2018/03/23 09:55:06 claudio Exp $
|
||||||
|
|
||||||
|
ext_inet="<IPv4 address>"
|
||||||
|
ext_inet6="<IPv6 address>"
|
||||||
|
|
||||||
|
table <pleroma_server> { 127.0.0.1 }
|
||||||
|
table <httpd_server> { 127.0.0.1 }
|
||||||
|
|
||||||
|
http protocol plerup { # Protocol for upstream pleroma server
|
||||||
|
#tcp { nodelay, sack, socket buffer 65536, backlog 128 } # Uncomment and adjust as you see fit
|
||||||
|
tls ciphers "ECDHE-ECDSA-AES256-GCM-SHA384:ECDHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-CHACHA20-POLY1305:ECDHE-RSA-CHACHA20-POLY1305"
|
||||||
|
tls ecdhe secp384r1
|
||||||
|
|
||||||
|
# Forward some paths to the local server (as pleroma won't respond to them as you might want)
|
||||||
|
pass request quick path "/robots.txt" forward to <httpd_server>
|
||||||
|
|
||||||
|
# Append a bunch of headers
|
||||||
|
match request header append "X-Forwarded-For" value "$REMOTE_ADDR" # This two header and the next one are not strictly required by pleroma but adding them won't hurt
|
||||||
|
match request header append "X-Forwarded-By" value "$SERVER_ADDR:$SERVER_PORT"
|
||||||
|
|
||||||
|
match response header append "X-XSS-Protection" value "1; mode=block"
|
||||||
|
match response header append "X-Permitted-Cross-Domain-Policies" value "none"
|
||||||
|
match response header append "X-Frame-Options" value "DENY"
|
||||||
|
match response header append "X-Content-Type-Options" value "nosniff"
|
||||||
|
match response header append "Referrer-Policy" value "same-origin"
|
||||||
|
match response header append "X-Download-Options" value "noopen"
|
||||||
|
match response header append "Content-Security-Policy" value "default-src 'none'; base-uri 'self'; form-action 'self'; img-src 'self' data: https:; media-src 'self' https:; style-src 'self' 'unsafe-inline'; font-src 'self'; script-src 'self'; connect-src 'self' wss://CHANGEME.tld; upgrade-insecure-requests;" # Modify "CHANGEME.tld" and set your instance's domain here
|
||||||
|
match request header append "Connection" value "upgrade"
|
||||||
|
#match response header append "Strict-Transport-Security" value "max-age=31536000; includeSubDomains" # Uncomment this only after you get HTTPS working.
|
||||||
|
|
||||||
|
# If you do not want remote frontends to be able to access your Pleroma backend server, comment these lines
|
||||||
|
match response header append "Access-Control-Allow-Origin" value "*"
|
||||||
|
match response header append "Access-Control-Allow-Methods" value "POST, PUT, DELETE, GET, PATCH, OPTIONS"
|
||||||
|
match response header append "Access-Control-Allow-Headers" value "Authorization, Content-Type, Idempotency-Key"
|
||||||
|
match response header append "Access-Control-Expose-Headers" value "Link, X-RateLimit-Reset, X-RateLimit-Limit, X-RateLimit-Remaining, X-Request-Id"
|
||||||
|
# Stop commenting lines here
|
||||||
|
}
|
||||||
|
|
||||||
|
relay wwwtls {
|
||||||
|
listen on $ext_inet port https tls # Comment to disable listening on IPv4
|
||||||
|
listen on $ext_inet6 port https tls # Comment to disable listening on IPv6
|
||||||
|
|
||||||
|
protocol plerup
|
||||||
|
|
||||||
|
forward to <pleroma_server> port 4000 check http "/" code 200
|
||||||
|
forward to <httpd_server> port 80 check http "/robots.txt" code 200
|
||||||
|
}
|
||||||
|
```
|
||||||
|
Again, change *\<IPv4/6 address\>* to your server's address(es) and comment one of the two *listen* options if needed. Also change *wss://CHANGEME.tld* to *wss://\<your instance's domain name\>*.
|
||||||
|
Check the configuration with `relayd -n`, if it is OK enable and start relayd (as root):
|
||||||
|
```
|
||||||
|
rcctl enable relayd
|
||||||
|
rcctl start relayd
|
||||||
|
```
|
||||||
|
|
||||||
|
#### pf
|
||||||
|
Enabling and configuring pf is highly recommended.
|
||||||
|
In /etc/pf.conf, insert the following configuration:
|
||||||
|
```
|
||||||
|
# Macros
|
||||||
|
if="<network interface>"
|
||||||
|
authorized_ssh_clients="any"
|
||||||
|
|
||||||
|
# Skip traffic on loopback interface
|
||||||
|
set skip on lo
|
||||||
|
|
||||||
|
# Default behavior
|
||||||
|
set block-policy drop
|
||||||
|
block in log all
|
||||||
|
pass out quick
|
||||||
|
|
||||||
|
# Security features
|
||||||
|
match in all scrub (no-df random-id)
|
||||||
|
block in log from urpf-failed
|
||||||
|
|
||||||
|
# Rules
|
||||||
|
pass in quick on $if inet proto icmp to ($if) icmp-type { echoreq unreach paramprob trace } # ICMP
|
||||||
|
pass in quick on $if inet6 proto icmp6 to ($if) icmp6-type { echoreq unreach paramprob timex toobig } # ICMPv6
|
||||||
|
pass in quick on $if proto tcp to ($if) port { http https } # relayd/httpd
|
||||||
|
pass in quick on $if proto tcp from $authorized_ssh_clients to ($if) port ssh
|
||||||
|
```
|
||||||
|
Replace *\<network interface\>* by your server's network interface name (which you can get with ifconfig). Consider replacing the content of the authorized\_ssh\_clients macro by, for exemple, your home IP address, to avoid SSH connection attempts from bots.
|
||||||
|
|
||||||
|
Check pf's configuration by running `pfctl -nf /etc/pf.conf`, load it with `pfctl -f /etc/pf.conf` and enable pf at boot with `rcctl enable pf`.
|
||||||
|
|
||||||
|
#### Configure and start pleroma
|
||||||
|
Enter a shell as \_pleroma (as root `su _pleroma -`) and enter pleroma's installation directory (`cd ~/pleroma/`).
|
||||||
|
Then follow the main installation guide:
|
||||||
|
* run `mix deps.get`
|
||||||
|
* run `mix pleroma.instance gen` and enter your instance's information when asked
|
||||||
|
* copy config/generated\_config.exs to config/prod.secret.exs. The default values should be sufficient but you should edit it and check that everything seems OK.
|
||||||
|
* exit your current shell back to a root one and run `psql -U postgres -f /home/_pleroma/config/setup_db.psql` to setup the database.
|
||||||
|
* return to a \_pleroma shell into pleroma's installation directory (`su _pleroma -;cd ~/pleroma`) and run `MIX_ENV=prod mix ecto.migrate`
|
||||||
|
|
||||||
|
As \_pleroma in /home/\_pleroma/pleroma, you can now run `LC_ALL=en_US.UTF-8 MIX_ENV=prod mix phx.server` to start your instance.
|
||||||
|
In another SSH session/tmux window, check that it is working properly by running `ftp -MVo - http://127.0.0.1:4000/api/v1/instance`, you should get json output. Double-check that *uri*'s value is your instance's domain name.
|
||||||
|
|
||||||
|
##### Starting pleroma at boot
|
||||||
|
An rc script to automatically start pleroma at boot hasn't been written yet, it can be run in a tmux session (tmux is in base).
|
110
docs/installation/openbsd_fi.md
Normal file
110
docs/installation/openbsd_fi.md
Normal file
|
@ -0,0 +1,110 @@
|
||||||
|
# Pleroman asennus OpenBSD:llä
|
||||||
|
|
||||||
|
Tarvitset:
|
||||||
|
* Oman domainin
|
||||||
|
* OpenBSD 6.3 -serverin
|
||||||
|
* Auttavan ymmärryksen unix-järjestelmistä
|
||||||
|
|
||||||
|
Komennot, joiden edessä on '#', tulee ajaa käyttäjänä `root`. Tämä on
|
||||||
|
suositeltavaa tehdä komennon `doas` avulla, katso `doas (1)` ja `doas.conf (5)`.
|
||||||
|
Tästä eteenpäin oletuksena on, että domain "esimerkki.com" osoittaa
|
||||||
|
serverin IP-osoitteeseen.
|
||||||
|
|
||||||
|
Jos asennuksen kanssa on ongelmia, IRC-kanava #pleroma Freenodessa tai
|
||||||
|
Matrix-kanava #freenode_#pleroma:matrix.org ovat hyviä paikkoja löytää apua
|
||||||
|
(englanniksi), `/msg eal kukkuu` jos haluat välttämättä puhua härmää.
|
||||||
|
|
||||||
|
Asenna tarvittava ohjelmisto:
|
||||||
|
|
||||||
|
`# pkg_add git elixir gmake postgresql-server-10.3 postgresql-contrib-10.3`
|
||||||
|
|
||||||
|
Luo postgresql-tietokanta:
|
||||||
|
|
||||||
|
`# su - _postgresql`
|
||||||
|
|
||||||
|
`$ mkdir /var/postgresql/data`
|
||||||
|
|
||||||
|
`$ initdb -D /var/postgresql/data -E UTF8`
|
||||||
|
|
||||||
|
`$ createdb`
|
||||||
|
|
||||||
|
Käynnistä tietokanta ja aseta se käynnistymään automaattisesti.
|
||||||
|
|
||||||
|
`# rcctl start postgresql`
|
||||||
|
|
||||||
|
`# rcctl enable postgresql`
|
||||||
|
|
||||||
|
Luo käyttäjä pleromaa varten (kysyy muutaman kysymyksen):
|
||||||
|
|
||||||
|
`# adduser pleroma`
|
||||||
|
|
||||||
|
Vaihda pleroma-käyttäjään ja mene kotihakemistoosi:
|
||||||
|
|
||||||
|
`# su - pleroma`
|
||||||
|
|
||||||
|
Lataa pleroman lähdekoodi:
|
||||||
|
|
||||||
|
`$ git clone https://git.pleroma.social/pleroma/pleroma.git`
|
||||||
|
|
||||||
|
`$ cd pleroma`
|
||||||
|
|
||||||
|
Asenna tarvittavat elixir-kirjastot:
|
||||||
|
|
||||||
|
`$ mix deps.get`
|
||||||
|
|
||||||
|
`$ mix deps.compile`
|
||||||
|
|
||||||
|
Luo tarvittava konfiguraatio:
|
||||||
|
|
||||||
|
`$ mix generate_config`
|
||||||
|
|
||||||
|
`$ cp config/generated_config.exs config/prod.secret.exs`
|
||||||
|
|
||||||
|
Aja luodut tietokantakomennot:
|
||||||
|
|
||||||
|
`# su _postgres -c 'psql -f config/setup_db.psql'`
|
||||||
|
|
||||||
|
`$ MIX_ENV=prod mix ecto.migrate`
|
||||||
|
|
||||||
|
Käynnistä pleroma-prosessi:
|
||||||
|
|
||||||
|
`$ MIX_ENV=prod mix compile`
|
||||||
|
|
||||||
|
`$ MIX_ENV=prod mix phx.server`
|
||||||
|
|
||||||
|
Tässä vaiheessa on hyvä tarkistaa että asetukset ovat oikein. Avaa selaimella,
|
||||||
|
curlilla tai vastaavalla työkalulla `esimerkki.com:4000/api/v1/instance` ja katso
|
||||||
|
että kohta "uri" on "https://esimerkki.com".
|
||||||
|
|
||||||
|
Huom! Muista varmistaa että muuttuja MIX_ENV on "prod" mix-komentoja ajaessasi.
|
||||||
|
Mix lukee oikean konfiguraatiotiedoston sen mukaisesti.
|
||||||
|
|
||||||
|
Ohessa enimmäkseen toimivaksi todettu rc.d-skripti pleroman käynnistämiseen.
|
||||||
|
Kirjoita se tiedostoon /etc/rc.d/pleroma. Tämän jälkeen aja
|
||||||
|
`# chmod +x /etc/rc.d/pleroma`, ja voit käynnistää pleroman komennolla
|
||||||
|
`# /etc/rc.d/pleroma start`.
|
||||||
|
|
||||||
|
```
|
||||||
|
#!/bin/ksh
|
||||||
|
#/etc/rc.d/pleroma
|
||||||
|
|
||||||
|
daemon="cd /home/pleroma/pleroma;MIX_ENV=prod /usr/local/bin/elixir"
|
||||||
|
daemon_flags="--detached /usr/local/bin/mix phx.server"
|
||||||
|
daemon_user="pleroma"
|
||||||
|
rc_reload="NO"
|
||||||
|
rc_bg="YES"
|
||||||
|
|
||||||
|
pexp="beam"
|
||||||
|
|
||||||
|
. /etc/rc.d/rc.subr
|
||||||
|
|
||||||
|
rc_cmd $1
|
||||||
|
```
|
||||||
|
|
||||||
|
Tämän jälkeen tarvitset enää HTTP-serverin välittämään kutsut pleroma-prosessille.
|
||||||
|
Tiedostosta `install/pleroma.nginx` löytyy esimerkkikonfiguraatio, ja TLS-sertifikaatit
|
||||||
|
saat ilmaiseksi esimerkiksi [letsencryptiltä](https://certbot.eff.org/lets-encrypt/opbsd-nginx.html).
|
||||||
|
Nginx asentuu yksinkertaisesti komennolla `# pkg_add nginx`.
|
||||||
|
|
||||||
|
Kun olet valmis, avaa https://esimerkki.com selaimessasi. Luo käyttäjä ja seuraa kiinnostavia
|
||||||
|
tyyppejä muilla palvelimilla!
|
55
docs/introduction.md
Normal file
55
docs/introduction.md
Normal file
|
@ -0,0 +1,55 @@
|
||||||
|
# Introduction to Pleroma
|
||||||
|
## What is Pleroma?
|
||||||
|
Pleroma is a federated social networking platform, compatible with GNU social, Mastodon and other OStatus and ActivityPub implementations. It is free software licensed under the AGPLv3.
|
||||||
|
It actually consists of two components: a backend, named simply Pleroma, and a user-facing frontend, named Pleroma-FE. It also includes the Mastodon frontend, if that's your thing.
|
||||||
|
It's part of what we call the fediverse, a federated network of instances which speak common protocols and can communicate with each other.
|
||||||
|
One account on a instance is enough to talk to the entire fediverse!
|
||||||
|
|
||||||
|
## How can I use it?
|
||||||
|
|
||||||
|
Pleroma instances are already widely deployed, a list can be found here:
|
||||||
|
http://distsn.org/pleroma-instances.html
|
||||||
|
|
||||||
|
If you don't feel like joining an existing instance, but instead prefer to deploy your own instance, that's easy too!
|
||||||
|
Installation instructions can be found here:
|
||||||
|
[main Pleroma wiki](/)
|
||||||
|
|
||||||
|
## I got an account, now what?
|
||||||
|
Great! Now you can explore the fediverse!
|
||||||
|
- Open the login page for your Pleroma instance (for ex. https://pleroma.soykaf.com) and login with your username and password.
|
||||||
|
(If you don't have one yet, click on Register) :slightly_smiling_face:
|
||||||
|
|
||||||
|
At this point you will have two columns in front of you.
|
||||||
|
|
||||||
|
### Left column
|
||||||
|
- first block: here you can see your avatar, your nickname a bio, and statistics (Statuses, Following, Followers).
|
||||||
|
Under that you have a text form which allows you to post new statuses. The icon on the left is for uploading media files and attach them to your post. The number under the text form is a character counter, every instance can have a different character limit (the default is 5000).
|
||||||
|
If you want to mention someone, type @ + name of the person. A drop-down menu will help you in finding the right person. :slight_smile:
|
||||||
|
To post your status, simply press Submit.
|
||||||
|
|
||||||
|
- second block: Here you can switch between the different timelines:
|
||||||
|
- Timeline: all the people that you follow
|
||||||
|
- Mentions: all the statutes where you are mentioned
|
||||||
|
- Public Timeline: all the statutes from the local instance
|
||||||
|
- The Whole Known Network: everything, local and remote!
|
||||||
|
|
||||||
|
- third block: this is the Chat block, where you communicate with people on the same instance in realtime. It is local-only, for now, but we're planning to make it extendable to the entire fediverse! :sweat_smile:
|
||||||
|
|
||||||
|
- fourth block: This is the Notifications block, here you will get notified whenever somebody mentions you, follows you, repeats or favorites one of your statuses.
|
||||||
|
|
||||||
|
### Right column
|
||||||
|
This is where the interesting stuff happens! :slight_smile:
|
||||||
|
Depending on the timeline you will see different statuses, but each status has a standard structure:
|
||||||
|
- Icon + name + link to profile. An optional left-arrow if it's a reply to another status (hovering will reveal the replied-to status).
|
||||||
|
- A + button on the right allows you to Expand/Collapse an entire discussion thread. It also updates in realtime!
|
||||||
|
- A binocular icon allows you to open the status on the instance where it's originating from.
|
||||||
|
- The text of the status, including mentions. If you click on a mention, it will automatically open the profile page of that person.
|
||||||
|
- Four buttons (left to right): Reply, Repeat, Favorite, Delete.
|
||||||
|
|
||||||
|
## Mastodon interface
|
||||||
|
If the Pleroma interface isn't your thing, or you're just trying something new but you want to keep using the familiar Mastodon interface, we got that too! :smile:
|
||||||
|
Just add a "/web" after your instance url (for ex. https://pleroma.soycaf.com/web) and you'll end on the Mastodon web interface, but with a Pleroma backend! MAGIC! :fireworks:
|
||||||
|
For more information on the Mastodon interface, please look here:
|
||||||
|
https://github.com/tootsuite/documentation/blob/master/Using-Mastodon/User-guide.md
|
||||||
|
|
||||||
|
Remember, what you see is only the frontend part of Mastodon, the backend is still Pleroma.
|
|
@ -37,7 +37,7 @@ server {
|
||||||
listen [::]:443 ssl http2;
|
listen [::]:443 ssl http2;
|
||||||
ssl_session_timeout 5m;
|
ssl_session_timeout 5m;
|
||||||
|
|
||||||
ssl_trusted_certificate /etc/letsencrypt/live/example.tld/fullchain.pem;
|
ssl_trusted_certificate /etc/letsencrypt/live/example.tld/chain.pem;
|
||||||
ssl_certificate /etc/letsencrypt/live/example.tld/fullchain.pem;
|
ssl_certificate /etc/letsencrypt/live/example.tld/fullchain.pem;
|
||||||
ssl_certificate_key /etc/letsencrypt/live/example.tld/privkey.pem;
|
ssl_certificate_key /etc/letsencrypt/live/example.tld/privkey.pem;
|
||||||
|
|
||||||
|
|
21
installation/pleroma.supervisord
Normal file
21
installation/pleroma.supervisord
Normal file
|
@ -0,0 +1,21 @@
|
||||||
|
; Assumes pleroma is installed in /home/pleroma/pleroma and running as the pleroma user
|
||||||
|
; Also assumes mix is in /usr/bin, this might differ on BSDs or niche Linux distros
|
||||||
|
; Logs into /home/pleroma/logs
|
||||||
|
[program:pleroma]
|
||||||
|
command=/usr/bin/mix phx.server
|
||||||
|
directory=/home/pleroma/pleroma
|
||||||
|
autostart=true
|
||||||
|
autorestart=true
|
||||||
|
user=pleroma
|
||||||
|
environment =
|
||||||
|
MIX_ENV=prod,
|
||||||
|
HOME=/home/pleroma,
|
||||||
|
USER=pleroma,
|
||||||
|
PATH="/sbin:/bin:/usr/sbin:/usr/bin:/usr/local/sbin:/usr/local/bin:/home/pleroma/bin:%(ENV_PATH)s",
|
||||||
|
PWD=/home/pleroma/pleroma
|
||||||
|
stdout_logfile=/home/pleroma/logs/stdout.log
|
||||||
|
stdout_logfile_maxbytes=50MB
|
||||||
|
stdout_logfile_backups=10
|
||||||
|
stderr_logfile=/home/pleroma/logs/stderr.log
|
||||||
|
stderr_logfile_maxbytes=50MB
|
||||||
|
stderr_logfile_backups=10
|
60
lib/healthcheck.ex
Normal file
60
lib/healthcheck.ex
Normal file
|
@ -0,0 +1,60 @@
|
||||||
|
defmodule Pleroma.Healthcheck do
|
||||||
|
@moduledoc """
|
||||||
|
Module collects metrics about app and assign healthy status.
|
||||||
|
"""
|
||||||
|
alias Pleroma.Healthcheck
|
||||||
|
alias Pleroma.Repo
|
||||||
|
|
||||||
|
defstruct pool_size: 0,
|
||||||
|
active: 0,
|
||||||
|
idle: 0,
|
||||||
|
memory_used: 0,
|
||||||
|
healthy: true
|
||||||
|
|
||||||
|
@type t :: %__MODULE__{
|
||||||
|
pool_size: non_neg_integer(),
|
||||||
|
active: non_neg_integer(),
|
||||||
|
idle: non_neg_integer(),
|
||||||
|
memory_used: number(),
|
||||||
|
healthy: boolean()
|
||||||
|
}
|
||||||
|
|
||||||
|
@spec system_info() :: t()
|
||||||
|
def system_info do
|
||||||
|
%Healthcheck{
|
||||||
|
memory_used: Float.round(:erlang.memory(:total) / 1024 / 1024, 2)
|
||||||
|
}
|
||||||
|
|> assign_db_info()
|
||||||
|
|> check_health()
|
||||||
|
end
|
||||||
|
|
||||||
|
defp assign_db_info(healthcheck) do
|
||||||
|
database = Application.get_env(:pleroma, Repo)[:database]
|
||||||
|
|
||||||
|
query =
|
||||||
|
"select state, count(pid) from pg_stat_activity where datname = '#{database}' group by state;"
|
||||||
|
|
||||||
|
result = Repo.query!(query)
|
||||||
|
pool_size = Application.get_env(:pleroma, Repo)[:pool_size]
|
||||||
|
|
||||||
|
db_info =
|
||||||
|
Enum.reduce(result.rows, %{active: 0, idle: 0}, fn [state, cnt], states ->
|
||||||
|
if state == "active" do
|
||||||
|
Map.put(states, :active, states.active + cnt)
|
||||||
|
else
|
||||||
|
Map.put(states, :idle, states.idle + cnt)
|
||||||
|
end
|
||||||
|
end)
|
||||||
|
|> Map.put(:pool_size, pool_size)
|
||||||
|
|
||||||
|
Map.merge(healthcheck, db_info)
|
||||||
|
end
|
||||||
|
|
||||||
|
@spec check_health(Healthcheck.t()) :: Healthcheck.t()
|
||||||
|
def check_health(%{pool_size: pool_size, active: active} = check)
|
||||||
|
when active >= pool_size do
|
||||||
|
%{check | healthy: false}
|
||||||
|
end
|
||||||
|
|
||||||
|
def check_health(check), do: check
|
||||||
|
end
|
51
lib/mix/tasks/pleroma/database.ex
Normal file
51
lib/mix/tasks/pleroma/database.ex
Normal file
|
@ -0,0 +1,51 @@
|
||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2018 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
defmodule Mix.Tasks.Pleroma.Database do
|
||||||
|
alias Mix.Tasks.Pleroma.Common
|
||||||
|
require Logger
|
||||||
|
use Mix.Task
|
||||||
|
|
||||||
|
@shortdoc "A collection of database related tasks"
|
||||||
|
@moduledoc """
|
||||||
|
A collection of database related tasks
|
||||||
|
|
||||||
|
## Replace embedded objects with their references
|
||||||
|
|
||||||
|
Replaces embedded objects with references to them in the `objects` table. Only needs to be ran once. The reason why this is not a migration is because it could significantly increase the database size after being ran, however after this `VACUUM FULL` will be able to reclaim about 20% (really depends on what is in the database, your mileage may vary) of the db size before the migration.
|
||||||
|
|
||||||
|
mix pleroma.database remove_embedded_objects
|
||||||
|
|
||||||
|
Options:
|
||||||
|
- `--vacuum` - run `VACUUM FULL` after the embedded objects are replaced with their references
|
||||||
|
"""
|
||||||
|
def run(["remove_embedded_objects" | args]) do
|
||||||
|
{options, [], []} =
|
||||||
|
OptionParser.parse(
|
||||||
|
args,
|
||||||
|
strict: [
|
||||||
|
vacuum: :boolean
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
Common.start_pleroma()
|
||||||
|
Logger.info("Removing embedded objects")
|
||||||
|
|
||||||
|
Pleroma.Repo.query!(
|
||||||
|
"update activities set data = jsonb_set(data, '{object}'::text[], data->'object'->'id') where data->'object'->>'id' is not null;",
|
||||||
|
[],
|
||||||
|
timeout: :infinity
|
||||||
|
)
|
||||||
|
|
||||||
|
if Keyword.get(options, :vacuum) do
|
||||||
|
Logger.info("Runnning VACUUM FULL")
|
||||||
|
|
||||||
|
Pleroma.Repo.query!(
|
||||||
|
"vacuum full;",
|
||||||
|
[],
|
||||||
|
timeout: :infinity
|
||||||
|
)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
293
lib/mix/tasks/pleroma/emoji.ex
Normal file
293
lib/mix/tasks/pleroma/emoji.ex
Normal file
|
@ -0,0 +1,293 @@
|
||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2018 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
defmodule Mix.Tasks.Pleroma.Emoji do
|
||||||
|
use Mix.Task
|
||||||
|
|
||||||
|
@shortdoc "Manages emoji packs"
|
||||||
|
@moduledoc """
|
||||||
|
Manages emoji packs
|
||||||
|
|
||||||
|
## ls-packs
|
||||||
|
|
||||||
|
mix pleroma.emoji ls-packs [OPTION...]
|
||||||
|
|
||||||
|
Lists the emoji packs and metadata specified in the manifest.
|
||||||
|
|
||||||
|
### Options
|
||||||
|
|
||||||
|
- `-m, --manifest PATH/URL` - path to a custom manifest, it can
|
||||||
|
either be an URL starting with `http`, in that case the
|
||||||
|
manifest will be fetched from that address, or a local path
|
||||||
|
|
||||||
|
## get-packs
|
||||||
|
|
||||||
|
mix pleroma.emoji get-packs [OPTION...] PACKS
|
||||||
|
|
||||||
|
Fetches, verifies and installs the specified PACKS from the
|
||||||
|
manifest into the `STATIC-DIR/emoji/PACK-NAME`
|
||||||
|
|
||||||
|
### Options
|
||||||
|
|
||||||
|
- `-m, --manifest PATH/URL` - same as ls-packs
|
||||||
|
|
||||||
|
## gen-pack
|
||||||
|
|
||||||
|
mix pleroma.emoji gen-pack PACK-URL
|
||||||
|
|
||||||
|
Creates a new manifest entry and a file list from the specified
|
||||||
|
remote pack file. Currently, only .zip archives are recognized
|
||||||
|
as remote pack files and packs are therefore assumed to be zip
|
||||||
|
archives. This command is intended to run interactively and will
|
||||||
|
first ask you some basic questions about the pack, then download
|
||||||
|
the remote file and generate an SHA256 checksum for it, then
|
||||||
|
generate an emoji file list for you.
|
||||||
|
|
||||||
|
The manifest entry will either be written to a newly created
|
||||||
|
`index.json` file or appended to the existing one, *replacing*
|
||||||
|
the old pack with the same name if it was in the file previously.
|
||||||
|
|
||||||
|
The file list will be written to the file specified previously,
|
||||||
|
*replacing* that file. You _should_ check that the file list doesn't
|
||||||
|
contain anything you don't need in the pack, that is, anything that is
|
||||||
|
not an emoji (the whole pack is downloaded, but only emoji files
|
||||||
|
are extracted).
|
||||||
|
"""
|
||||||
|
|
||||||
|
@default_manifest Pleroma.Config.get!([:emoji, :default_manifest])
|
||||||
|
|
||||||
|
def run(["ls-packs" | args]) do
|
||||||
|
Application.ensure_all_started(:hackney)
|
||||||
|
|
||||||
|
{options, [], []} = parse_global_opts(args)
|
||||||
|
|
||||||
|
manifest =
|
||||||
|
fetch_manifest(if options[:manifest], do: options[:manifest], else: @default_manifest)
|
||||||
|
|
||||||
|
Enum.each(manifest, fn {name, info} ->
|
||||||
|
to_print = [
|
||||||
|
{"Name", name},
|
||||||
|
{"Homepage", info["homepage"]},
|
||||||
|
{"Description", info["description"]},
|
||||||
|
{"License", info["license"]},
|
||||||
|
{"Source", info["src"]}
|
||||||
|
]
|
||||||
|
|
||||||
|
for {param, value} <- to_print do
|
||||||
|
IO.puts(IO.ANSI.format([:bright, param, :normal, ": ", value]))
|
||||||
|
end
|
||||||
|
|
||||||
|
# A newline
|
||||||
|
IO.puts("")
|
||||||
|
end)
|
||||||
|
end
|
||||||
|
|
||||||
|
def run(["get-packs" | args]) do
|
||||||
|
Application.ensure_all_started(:hackney)
|
||||||
|
|
||||||
|
{options, pack_names, []} = parse_global_opts(args)
|
||||||
|
|
||||||
|
manifest_url = if options[:manifest], do: options[:manifest], else: @default_manifest
|
||||||
|
|
||||||
|
manifest = fetch_manifest(manifest_url)
|
||||||
|
|
||||||
|
for pack_name <- pack_names do
|
||||||
|
if Map.has_key?(manifest, pack_name) do
|
||||||
|
pack = manifest[pack_name]
|
||||||
|
src_url = pack["src"]
|
||||||
|
|
||||||
|
IO.puts(
|
||||||
|
IO.ANSI.format([
|
||||||
|
"Downloading ",
|
||||||
|
:bright,
|
||||||
|
pack_name,
|
||||||
|
:normal,
|
||||||
|
" from ",
|
||||||
|
:underline,
|
||||||
|
src_url
|
||||||
|
])
|
||||||
|
)
|
||||||
|
|
||||||
|
binary_archive = Tesla.get!(src_url).body
|
||||||
|
archive_sha = :crypto.hash(:sha256, binary_archive) |> Base.encode16()
|
||||||
|
|
||||||
|
sha_status_text = ["SHA256 of ", :bright, pack_name, :normal, " source file is ", :bright]
|
||||||
|
|
||||||
|
if archive_sha == String.upcase(pack["src_sha256"]) do
|
||||||
|
IO.puts(IO.ANSI.format(sha_status_text ++ [:green, "OK"]))
|
||||||
|
else
|
||||||
|
IO.puts(IO.ANSI.format(sha_status_text ++ [:red, "BAD"]))
|
||||||
|
|
||||||
|
raise "Bad SHA256 for #{pack_name}"
|
||||||
|
end
|
||||||
|
|
||||||
|
# The url specified in files should be in the same directory
|
||||||
|
files_url = Path.join(Path.dirname(manifest_url), pack["files"])
|
||||||
|
|
||||||
|
IO.puts(
|
||||||
|
IO.ANSI.format([
|
||||||
|
"Fetching the file list for ",
|
||||||
|
:bright,
|
||||||
|
pack_name,
|
||||||
|
:normal,
|
||||||
|
" from ",
|
||||||
|
:underline,
|
||||||
|
files_url
|
||||||
|
])
|
||||||
|
)
|
||||||
|
|
||||||
|
files = Tesla.get!(files_url).body |> Poison.decode!()
|
||||||
|
|
||||||
|
IO.puts(IO.ANSI.format(["Unpacking ", :bright, pack_name]))
|
||||||
|
|
||||||
|
pack_path =
|
||||||
|
Path.join([
|
||||||
|
Pleroma.Config.get!([:instance, :static_dir]),
|
||||||
|
"emoji",
|
||||||
|
pack_name
|
||||||
|
])
|
||||||
|
|
||||||
|
files_to_unzip =
|
||||||
|
Enum.map(
|
||||||
|
files,
|
||||||
|
fn {_, f} -> to_charlist(f) end
|
||||||
|
)
|
||||||
|
|
||||||
|
{:ok, _} =
|
||||||
|
:zip.unzip(binary_archive,
|
||||||
|
cwd: pack_path,
|
||||||
|
file_list: files_to_unzip
|
||||||
|
)
|
||||||
|
|
||||||
|
IO.puts(IO.ANSI.format(["Writing emoji.txt for ", :bright, pack_name]))
|
||||||
|
|
||||||
|
emoji_txt_str =
|
||||||
|
Enum.map(
|
||||||
|
files,
|
||||||
|
fn {shortcode, path} ->
|
||||||
|
emojo_path = Path.join("/emoji/#{pack_name}", path)
|
||||||
|
"#{shortcode}, #{emojo_path}"
|
||||||
|
end
|
||||||
|
)
|
||||||
|
|> Enum.join("\n")
|
||||||
|
|
||||||
|
File.write!(Path.join(pack_path, "emoji.txt"), emoji_txt_str)
|
||||||
|
else
|
||||||
|
IO.puts(IO.ANSI.format([:bright, :red, "No pack named \"#{pack_name}\" found"]))
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def run(["gen-pack", src]) do
|
||||||
|
Application.ensure_all_started(:hackney)
|
||||||
|
|
||||||
|
proposed_name = Path.basename(src) |> Path.rootname()
|
||||||
|
name = String.trim(IO.gets("Pack name [#{proposed_name}]: "))
|
||||||
|
# If there's no name, use the default one
|
||||||
|
name = if String.length(name) > 0, do: name, else: proposed_name
|
||||||
|
|
||||||
|
license = String.trim(IO.gets("License: "))
|
||||||
|
homepage = String.trim(IO.gets("Homepage: "))
|
||||||
|
description = String.trim(IO.gets("Description: "))
|
||||||
|
|
||||||
|
proposed_files_name = "#{name}.json"
|
||||||
|
files_name = String.trim(IO.gets("Save file list to [#{proposed_files_name}]: "))
|
||||||
|
files_name = if String.length(files_name) > 0, do: files_name, else: proposed_files_name
|
||||||
|
|
||||||
|
default_exts = [".png", ".gif"]
|
||||||
|
default_exts_str = Enum.join(default_exts, " ")
|
||||||
|
|
||||||
|
exts =
|
||||||
|
String.trim(
|
||||||
|
IO.gets("Emoji file extensions (separated with spaces) [#{default_exts_str}]: ")
|
||||||
|
)
|
||||||
|
|
||||||
|
exts =
|
||||||
|
if String.length(exts) > 0 do
|
||||||
|
String.split(exts, " ")
|
||||||
|
|> Enum.filter(fn e -> e |> String.trim() |> String.length() > 0 end)
|
||||||
|
else
|
||||||
|
default_exts
|
||||||
|
end
|
||||||
|
|
||||||
|
IO.puts("Downloading the pack and generating SHA256")
|
||||||
|
|
||||||
|
binary_archive = Tesla.get!(src).body
|
||||||
|
archive_sha = :crypto.hash(:sha256, binary_archive) |> Base.encode16()
|
||||||
|
|
||||||
|
IO.puts("SHA256 is #{archive_sha}")
|
||||||
|
|
||||||
|
pack_json = %{
|
||||||
|
name => %{
|
||||||
|
license: license,
|
||||||
|
homepage: homepage,
|
||||||
|
description: description,
|
||||||
|
src: src,
|
||||||
|
src_sha256: archive_sha,
|
||||||
|
files: files_name
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
tmp_pack_dir = Path.join(System.tmp_dir!(), "emoji-pack-#{name}")
|
||||||
|
|
||||||
|
{:ok, _} =
|
||||||
|
:zip.unzip(
|
||||||
|
binary_archive,
|
||||||
|
cwd: tmp_pack_dir
|
||||||
|
)
|
||||||
|
|
||||||
|
emoji_map = Pleroma.Emoji.make_shortcode_to_file_map(tmp_pack_dir, exts)
|
||||||
|
|
||||||
|
File.write!(files_name, Poison.encode!(emoji_map, pretty: true))
|
||||||
|
|
||||||
|
IO.puts("""
|
||||||
|
|
||||||
|
#{files_name} has been created and contains the list of all found emojis in the pack.
|
||||||
|
Please review the files in the remove those not needed.
|
||||||
|
""")
|
||||||
|
|
||||||
|
if File.exists?("index.json") do
|
||||||
|
existing_data = File.read!("index.json") |> Poison.decode!()
|
||||||
|
|
||||||
|
File.write!(
|
||||||
|
"index.json",
|
||||||
|
Poison.encode!(
|
||||||
|
Map.merge(
|
||||||
|
existing_data,
|
||||||
|
pack_json
|
||||||
|
),
|
||||||
|
pretty: true
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
IO.puts("index.json file has been update with the #{name} pack")
|
||||||
|
else
|
||||||
|
File.write!("index.json", Poison.encode!(pack_json, pretty: true))
|
||||||
|
|
||||||
|
IO.puts("index.json has been created with the #{name} pack")
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
defp fetch_manifest(from) do
|
||||||
|
Poison.decode!(
|
||||||
|
if String.starts_with?(from, "http") do
|
||||||
|
Tesla.get!(from).body
|
||||||
|
else
|
||||||
|
File.read!(from)
|
||||||
|
end
|
||||||
|
)
|
||||||
|
end
|
||||||
|
|
||||||
|
defp parse_global_opts(args) do
|
||||||
|
OptionParser.parse(
|
||||||
|
args,
|
||||||
|
strict: [
|
||||||
|
manifest: :string
|
||||||
|
],
|
||||||
|
aliases: [
|
||||||
|
m: :manifest
|
||||||
|
]
|
||||||
|
)
|
||||||
|
end
|
||||||
|
end
|
|
@ -24,10 +24,12 @@ defmodule Mix.Tasks.Pleroma.Instance do
|
||||||
- `--domain DOMAIN` - the domain of your instance
|
- `--domain DOMAIN` - the domain of your instance
|
||||||
- `--instance-name INSTANCE_NAME` - the name of your instance
|
- `--instance-name INSTANCE_NAME` - the name of your instance
|
||||||
- `--admin-email ADMIN_EMAIL` - the email address of the instance admin
|
- `--admin-email ADMIN_EMAIL` - the email address of the instance admin
|
||||||
|
- `--notify-email NOTIFY_EMAIL` - email address for notifications
|
||||||
- `--dbhost HOSTNAME` - the hostname of the PostgreSQL database to use
|
- `--dbhost HOSTNAME` - the hostname of the PostgreSQL database to use
|
||||||
- `--dbname DBNAME` - the name of the database to use
|
- `--dbname DBNAME` - the name of the database to use
|
||||||
- `--dbuser DBUSER` - the user (aka role) to use for the database connection
|
- `--dbuser DBUSER` - the user (aka role) to use for the database connection
|
||||||
- `--dbpass DBPASS` - the password to use for the database connection
|
- `--dbpass DBPASS` - the password to use for the database connection
|
||||||
|
- `--indexable Y/N` - Allow/disallow indexing site by search engines
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def run(["gen" | rest]) do
|
def run(["gen" | rest]) do
|
||||||
|
@ -41,10 +43,12 @@ def run(["gen" | rest]) do
|
||||||
domain: :string,
|
domain: :string,
|
||||||
instance_name: :string,
|
instance_name: :string,
|
||||||
admin_email: :string,
|
admin_email: :string,
|
||||||
|
notify_email: :string,
|
||||||
dbhost: :string,
|
dbhost: :string,
|
||||||
dbname: :string,
|
dbname: :string,
|
||||||
dbuser: :string,
|
dbuser: :string,
|
||||||
dbpass: :string
|
dbpass: :string,
|
||||||
|
indexable: :string
|
||||||
],
|
],
|
||||||
aliases: [
|
aliases: [
|
||||||
o: :output,
|
o: :output,
|
||||||
|
@ -61,7 +65,7 @@ def run(["gen" | rest]) do
|
||||||
will_overwrite = Enum.filter(paths, &File.exists?/1)
|
will_overwrite = Enum.filter(paths, &File.exists?/1)
|
||||||
proceed? = Enum.empty?(will_overwrite) or Keyword.get(options, :force, false)
|
proceed? = Enum.empty?(will_overwrite) or Keyword.get(options, :force, false)
|
||||||
|
|
||||||
unless not proceed? do
|
if proceed? do
|
||||||
[domain, port | _] =
|
[domain, port | _] =
|
||||||
String.split(
|
String.split(
|
||||||
Common.get_option(
|
Common.get_option(
|
||||||
|
@ -81,6 +85,22 @@ def run(["gen" | rest]) do
|
||||||
|
|
||||||
email = Common.get_option(options, :admin_email, "What is your admin email address?")
|
email = Common.get_option(options, :admin_email, "What is your admin email address?")
|
||||||
|
|
||||||
|
notify_email =
|
||||||
|
Common.get_option(
|
||||||
|
options,
|
||||||
|
:notify_email,
|
||||||
|
"What email address do you want to use for sending email notifications?",
|
||||||
|
email
|
||||||
|
)
|
||||||
|
|
||||||
|
indexable =
|
||||||
|
Common.get_option(
|
||||||
|
options,
|
||||||
|
:indexable,
|
||||||
|
"Do you want search engines to index your site? (y/n)",
|
||||||
|
"y"
|
||||||
|
) === "y"
|
||||||
|
|
||||||
dbhost =
|
dbhost =
|
||||||
Common.get_option(options, :dbhost, "What is the hostname of your database?", "localhost")
|
Common.get_option(options, :dbhost, "What is the hostname of your database?", "localhost")
|
||||||
|
|
||||||
|
@ -114,6 +134,7 @@ def run(["gen" | rest]) do
|
||||||
domain: domain,
|
domain: domain,
|
||||||
port: port,
|
port: port,
|
||||||
email: email,
|
email: email,
|
||||||
|
notify_email: notify_email,
|
||||||
name: name,
|
name: name,
|
||||||
dbhost: dbhost,
|
dbhost: dbhost,
|
||||||
dbname: dbname,
|
dbname: dbname,
|
||||||
|
@ -142,6 +163,8 @@ def run(["gen" | rest]) do
|
||||||
Mix.shell().info("Writing #{psql_path}.")
|
Mix.shell().info("Writing #{psql_path}.")
|
||||||
File.write(psql_path, result_psql)
|
File.write(psql_path, result_psql)
|
||||||
|
|
||||||
|
write_robots_txt(indexable)
|
||||||
|
|
||||||
Mix.shell().info(
|
Mix.shell().info(
|
||||||
"\n" <>
|
"\n" <>
|
||||||
"""
|
"""
|
||||||
|
@ -163,4 +186,28 @@ def run(["gen" | rest]) do
|
||||||
)
|
)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
defp write_robots_txt(indexable) do
|
||||||
|
robots_txt =
|
||||||
|
EEx.eval_file(
|
||||||
|
Path.expand("robots_txt.eex", __DIR__),
|
||||||
|
indexable: indexable
|
||||||
|
)
|
||||||
|
|
||||||
|
static_dir = Pleroma.Config.get([:instance, :static_dir], "instance/static/")
|
||||||
|
|
||||||
|
unless File.exists?(static_dir) do
|
||||||
|
File.mkdir_p!(static_dir)
|
||||||
|
end
|
||||||
|
|
||||||
|
robots_txt_path = Path.join(static_dir, "robots.txt")
|
||||||
|
|
||||||
|
if File.exists?(robots_txt_path) do
|
||||||
|
File.cp!(robots_txt_path, "#{robots_txt_path}.bak")
|
||||||
|
Mix.shell().info("Backing up existing robots.txt to #{robots_txt_path}.bak")
|
||||||
|
end
|
||||||
|
|
||||||
|
File.write(robots_txt_path, robots_txt)
|
||||||
|
Mix.shell().info("Writing #{robots_txt_path}.")
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
2
lib/mix/tasks/pleroma/robots_txt.eex
Normal file
2
lib/mix/tasks/pleroma/robots_txt.eex
Normal file
|
@ -0,0 +1,2 @@
|
||||||
|
User-Agent: *
|
||||||
|
Disallow: <%= if indexable, do: "", else: "/" %>
|
32
lib/mix/tasks/pleroma/robotstxt.ex
Normal file
32
lib/mix/tasks/pleroma/robotstxt.ex
Normal file
|
@ -0,0 +1,32 @@
|
||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2019 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
defmodule Mix.Tasks.Pleroma.RobotsTxt do
|
||||||
|
use Mix.Task
|
||||||
|
|
||||||
|
@shortdoc "Generate robots.txt"
|
||||||
|
@moduledoc """
|
||||||
|
Generates robots.txt
|
||||||
|
|
||||||
|
## Overwrite robots.txt to disallow all
|
||||||
|
|
||||||
|
mix pleroma.robots_txt disallow_all
|
||||||
|
|
||||||
|
This will write a robots.txt that will hide all paths on your instance
|
||||||
|
from search engines and other robots that obey robots.txt
|
||||||
|
|
||||||
|
"""
|
||||||
|
def run(["disallow_all"]) do
|
||||||
|
static_dir = Pleroma.Config.get([:instance, :static_dir], "instance/static/")
|
||||||
|
|
||||||
|
if !File.exists?(static_dir) do
|
||||||
|
File.mkdir_p!(static_dir)
|
||||||
|
end
|
||||||
|
|
||||||
|
robots_txt_path = Path.join(static_dir, "robots.txt")
|
||||||
|
robots_txt_content = "User-Agent: *\nDisallow: /\n"
|
||||||
|
|
||||||
|
File.write!(robots_txt_path, robots_txt_content, [:write])
|
||||||
|
end
|
||||||
|
end
|
|
@ -13,6 +13,7 @@ config :pleroma, Pleroma.Web.Endpoint,
|
||||||
config :pleroma, :instance,
|
config :pleroma, :instance,
|
||||||
name: "<%= name %>",
|
name: "<%= name %>",
|
||||||
email: "<%= email %>",
|
email: "<%= email %>",
|
||||||
|
notify_email: "<%= notify_email %>",
|
||||||
limit: 5000,
|
limit: 5000,
|
||||||
registrations_open: true,
|
registrations_open: true,
|
||||||
dedupe_media: false
|
dedupe_media: false
|
||||||
|
@ -75,4 +76,3 @@ config :web_push_encryption, :vapid_details,
|
||||||
# storage_url: "https://swift-endpoint.prodider.com/v1/AUTH_<tenant>/<container>",
|
# storage_url: "https://swift-endpoint.prodider.com/v1/AUTH_<tenant>/<container>",
|
||||||
# object_url: "https://cdn-endpoint.provider.com/<container>"
|
# object_url: "https://cdn-endpoint.provider.com/<container>"
|
||||||
#
|
#
|
||||||
|
|
||||||
|
|
|
@ -6,8 +6,8 @@ defmodule Mix.Tasks.Pleroma.User do
|
||||||
use Mix.Task
|
use Mix.Task
|
||||||
import Ecto.Changeset
|
import Ecto.Changeset
|
||||||
alias Mix.Tasks.Pleroma.Common
|
alias Mix.Tasks.Pleroma.Common
|
||||||
alias Pleroma.Repo
|
|
||||||
alias Pleroma.User
|
alias Pleroma.User
|
||||||
|
alias Pleroma.UserInviteToken
|
||||||
|
|
||||||
@shortdoc "Manages Pleroma users"
|
@shortdoc "Manages Pleroma users"
|
||||||
@moduledoc """
|
@moduledoc """
|
||||||
|
@ -23,16 +23,32 @@ defmodule Mix.Tasks.Pleroma.User do
|
||||||
- `--password PASSWORD` - the user's password
|
- `--password PASSWORD` - the user's password
|
||||||
- `--moderator`/`--no-moderator` - whether the user is a moderator
|
- `--moderator`/`--no-moderator` - whether the user is a moderator
|
||||||
- `--admin`/`--no-admin` - whether the user is an admin
|
- `--admin`/`--no-admin` - whether the user is an admin
|
||||||
- `-y`, `--assume-yes`/`--no-assume-yes` - whether to assume yes to all questions
|
- `-y`, `--assume-yes`/`--no-assume-yes` - whether to assume yes to all questions
|
||||||
|
|
||||||
## Generate an invite link.
|
## Generate an invite link.
|
||||||
|
|
||||||
mix pleroma.user invite
|
mix pleroma.user invite [OPTION...]
|
||||||
|
|
||||||
|
Options:
|
||||||
|
- `--expires_at DATE` - last day on which token is active (e.g. "2019-04-05")
|
||||||
|
- `--max_use NUMBER` - maximum numbers of token uses
|
||||||
|
|
||||||
|
## List generated invites
|
||||||
|
|
||||||
|
mix pleroma.user invites
|
||||||
|
|
||||||
|
## Revoke invite
|
||||||
|
|
||||||
|
mix pleroma.user revoke_invite TOKEN OR TOKEN_ID
|
||||||
|
|
||||||
## Delete the user's account.
|
## Delete the user's account.
|
||||||
|
|
||||||
mix pleroma.user rm NICKNAME
|
mix pleroma.user rm NICKNAME
|
||||||
|
|
||||||
|
## Delete the user's activities.
|
||||||
|
|
||||||
|
mix pleroma.user delete_activities NICKNAME
|
||||||
|
|
||||||
## Deactivate or activate the user's account.
|
## Deactivate or activate the user's account.
|
||||||
|
|
||||||
mix pleroma.user toggle_activated NICKNAME
|
mix pleroma.user toggle_activated NICKNAME
|
||||||
|
@ -146,7 +162,7 @@ def run(["new", nickname, email | rest]) do
|
||||||
def run(["rm", nickname]) do
|
def run(["rm", nickname]) do
|
||||||
Common.start_pleroma()
|
Common.start_pleroma()
|
||||||
|
|
||||||
with %User{local: true} = user <- User.get_by_nickname(nickname) do
|
with %User{local: true} = user <- User.get_cached_by_nickname(nickname) do
|
||||||
User.delete(user)
|
User.delete(user)
|
||||||
Mix.shell().info("User #{nickname} deleted.")
|
Mix.shell().info("User #{nickname} deleted.")
|
||||||
else
|
else
|
||||||
|
@ -158,7 +174,7 @@ def run(["rm", nickname]) do
|
||||||
def run(["toggle_activated", nickname]) do
|
def run(["toggle_activated", nickname]) do
|
||||||
Common.start_pleroma()
|
Common.start_pleroma()
|
||||||
|
|
||||||
with %User{} = user <- User.get_by_nickname(nickname) do
|
with %User{} = user <- User.get_cached_by_nickname(nickname) do
|
||||||
{:ok, user} = User.deactivate(user, !user.info.deactivated)
|
{:ok, user} = User.deactivate(user, !user.info.deactivated)
|
||||||
|
|
||||||
Mix.shell().info(
|
Mix.shell().info(
|
||||||
|
@ -173,7 +189,7 @@ def run(["toggle_activated", nickname]) do
|
||||||
def run(["reset_password", nickname]) do
|
def run(["reset_password", nickname]) do
|
||||||
Common.start_pleroma()
|
Common.start_pleroma()
|
||||||
|
|
||||||
with %User{local: true} = user <- User.get_by_nickname(nickname),
|
with %User{local: true} = user <- User.get_cached_by_nickname(nickname),
|
||||||
{:ok, token} <- Pleroma.PasswordResetToken.create_token(user) do
|
{:ok, token} <- Pleroma.PasswordResetToken.create_token(user) do
|
||||||
Mix.shell().info("Generated password reset token for #{user.nickname}")
|
Mix.shell().info("Generated password reset token for #{user.nickname}")
|
||||||
|
|
||||||
|
@ -195,14 +211,14 @@ def run(["reset_password", nickname]) do
|
||||||
def run(["unsubscribe", nickname]) do
|
def run(["unsubscribe", nickname]) do
|
||||||
Common.start_pleroma()
|
Common.start_pleroma()
|
||||||
|
|
||||||
with %User{} = user <- User.get_by_nickname(nickname) do
|
with %User{} = user <- User.get_cached_by_nickname(nickname) do
|
||||||
Mix.shell().info("Deactivating #{user.nickname}")
|
Mix.shell().info("Deactivating #{user.nickname}")
|
||||||
User.deactivate(user)
|
User.deactivate(user)
|
||||||
|
|
||||||
{:ok, friends} = User.get_friends(user)
|
{:ok, friends} = User.get_friends(user)
|
||||||
|
|
||||||
Enum.each(friends, fn friend ->
|
Enum.each(friends, fn friend ->
|
||||||
user = Repo.get(User, user.id)
|
user = User.get_cached_by_id(user.id)
|
||||||
|
|
||||||
Mix.shell().info("Unsubscribing #{friend.nickname} from #{user.nickname}")
|
Mix.shell().info("Unsubscribing #{friend.nickname} from #{user.nickname}")
|
||||||
User.unfollow(user, friend)
|
User.unfollow(user, friend)
|
||||||
|
@ -210,7 +226,7 @@ def run(["unsubscribe", nickname]) do
|
||||||
|
|
||||||
:timer.sleep(500)
|
:timer.sleep(500)
|
||||||
|
|
||||||
user = Repo.get(User, user.id)
|
user = User.get_cached_by_id(user.id)
|
||||||
|
|
||||||
if Enum.empty?(user.following) do
|
if Enum.empty?(user.following) do
|
||||||
Mix.shell().info("Successfully unsubscribed all followers from #{user.nickname}")
|
Mix.shell().info("Successfully unsubscribed all followers from #{user.nickname}")
|
||||||
|
@ -234,7 +250,7 @@ def run(["set", nickname | rest]) do
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
|
|
||||||
with %User{local: true} = user <- User.get_by_nickname(nickname) do
|
with %User{local: true} = user <- User.get_cached_by_nickname(nickname) do
|
||||||
user =
|
user =
|
||||||
case Keyword.get(options, :moderator) do
|
case Keyword.get(options, :moderator) do
|
||||||
nil -> user
|
nil -> user
|
||||||
|
@ -261,7 +277,7 @@ def run(["set", nickname | rest]) do
|
||||||
def run(["tag", nickname | tags]) do
|
def run(["tag", nickname | tags]) do
|
||||||
Common.start_pleroma()
|
Common.start_pleroma()
|
||||||
|
|
||||||
with %User{} = user <- User.get_by_nickname(nickname) do
|
with %User{} = user <- User.get_cached_by_nickname(nickname) do
|
||||||
user = user |> User.tag(tags)
|
user = user |> User.tag(tags)
|
||||||
|
|
||||||
Mix.shell().info("Tags of #{user.nickname}: #{inspect(tags)}")
|
Mix.shell().info("Tags of #{user.nickname}: #{inspect(tags)}")
|
||||||
|
@ -274,7 +290,7 @@ def run(["tag", nickname | tags]) do
|
||||||
def run(["untag", nickname | tags]) do
|
def run(["untag", nickname | tags]) do
|
||||||
Common.start_pleroma()
|
Common.start_pleroma()
|
||||||
|
|
||||||
with %User{} = user <- User.get_by_nickname(nickname) do
|
with %User{} = user <- User.get_cached_by_nickname(nickname) do
|
||||||
user = user |> User.untag(tags)
|
user = user |> User.untag(tags)
|
||||||
|
|
||||||
Mix.shell().info("Tags of #{user.nickname}: #{inspect(tags)}")
|
Mix.shell().info("Tags of #{user.nickname}: #{inspect(tags)}")
|
||||||
|
@ -284,23 +300,91 @@ def run(["untag", nickname | tags]) do
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def run(["invite"]) do
|
def run(["invite" | rest]) do
|
||||||
|
{options, [], []} =
|
||||||
|
OptionParser.parse(rest,
|
||||||
|
strict: [
|
||||||
|
expires_at: :string,
|
||||||
|
max_use: :integer
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
options =
|
||||||
|
options
|
||||||
|
|> Keyword.update(:expires_at, {:ok, nil}, fn
|
||||||
|
nil -> {:ok, nil}
|
||||||
|
val -> Date.from_iso8601(val)
|
||||||
|
end)
|
||||||
|
|> Enum.into(%{})
|
||||||
|
|
||||||
Common.start_pleroma()
|
Common.start_pleroma()
|
||||||
|
|
||||||
with {:ok, token} <- Pleroma.UserInviteToken.create_token() do
|
with {:ok, val} <- options[:expires_at],
|
||||||
Mix.shell().info("Generated user invite token")
|
options = Map.put(options, :expires_at, val),
|
||||||
|
{:ok, invite} <- UserInviteToken.create_invite(options) do
|
||||||
|
Mix.shell().info(
|
||||||
|
"Generated user invite token " <> String.replace(invite.invite_type, "_", " ")
|
||||||
|
)
|
||||||
|
|
||||||
url =
|
url =
|
||||||
Pleroma.Web.Router.Helpers.redirect_url(
|
Pleroma.Web.Router.Helpers.redirect_url(
|
||||||
Pleroma.Web.Endpoint,
|
Pleroma.Web.Endpoint,
|
||||||
:registration_page,
|
:registration_page,
|
||||||
token.token
|
invite.token
|
||||||
)
|
)
|
||||||
|
|
||||||
IO.puts(url)
|
IO.puts(url)
|
||||||
|
else
|
||||||
|
error ->
|
||||||
|
Mix.shell().error("Could not create invite token: #{inspect(error)}")
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def run(["invites"]) do
|
||||||
|
Common.start_pleroma()
|
||||||
|
|
||||||
|
Mix.shell().info("Invites list:")
|
||||||
|
|
||||||
|
UserInviteToken.list_invites()
|
||||||
|
|> Enum.each(fn invite ->
|
||||||
|
expire_info =
|
||||||
|
with expires_at when not is_nil(expires_at) <- invite.expires_at do
|
||||||
|
" | Expires at: #{Date.to_string(expires_at)}"
|
||||||
|
end
|
||||||
|
|
||||||
|
using_info =
|
||||||
|
with max_use when not is_nil(max_use) <- invite.max_use do
|
||||||
|
" | Max use: #{max_use} Left use: #{max_use - invite.uses}"
|
||||||
|
end
|
||||||
|
|
||||||
|
Mix.shell().info(
|
||||||
|
"ID: #{invite.id} | Token: #{invite.token} | Token type: #{invite.invite_type} | Used: #{
|
||||||
|
invite.used
|
||||||
|
}#{expire_info}#{using_info}"
|
||||||
|
)
|
||||||
|
end)
|
||||||
|
end
|
||||||
|
|
||||||
|
def run(["revoke_invite", token]) do
|
||||||
|
Common.start_pleroma()
|
||||||
|
|
||||||
|
with {:ok, invite} <- UserInviteToken.find_by_token(token),
|
||||||
|
{:ok, _} <- UserInviteToken.update_invite(invite, %{used: true}) do
|
||||||
|
Mix.shell().info("Invite for token #{token} was revoked.")
|
||||||
|
else
|
||||||
|
_ -> Mix.shell().error("No invite found with token #{token}")
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def run(["delete_activities", nickname]) do
|
||||||
|
Common.start_pleroma()
|
||||||
|
|
||||||
|
with %User{local: true} = user <- User.get_cached_by_nickname(nickname) do
|
||||||
|
User.delete_user_activities(user)
|
||||||
|
Mix.shell().info("User #{nickname} statuses deleted.")
|
||||||
else
|
else
|
||||||
_ ->
|
_ ->
|
||||||
Mix.shell().error("Could not create invite token.")
|
Mix.shell().error("No local user #{nickname}")
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
|
|
@ -39,7 +39,7 @@ def used_changeset(struct) do
|
||||||
|
|
||||||
def reset_password(token, data) do
|
def reset_password(token, data) do
|
||||||
with %{used: false} = token <- Repo.get_by(PasswordResetToken, %{token: token}),
|
with %{used: false} = token <- Repo.get_by(PasswordResetToken, %{token: token}),
|
||||||
%User{} = user <- Repo.get(User, token.user_id),
|
%User{} = user <- User.get_cached_by_id(token.user_id),
|
||||||
{:ok, _user} <- User.reset_password(user, data),
|
{:ok, _user} <- User.reset_password(user, data),
|
||||||
{:ok, token} <- Repo.update(used_changeset(token)) do
|
{:ok, token} <- Repo.update(used_changeset(token)) do
|
||||||
{:ok, token}
|
{:ok, token}
|
||||||
|
|
|
@ -7,8 +7,10 @@ defmodule Pleroma.Activity do
|
||||||
|
|
||||||
alias Pleroma.Activity
|
alias Pleroma.Activity
|
||||||
alias Pleroma.Notification
|
alias Pleroma.Notification
|
||||||
|
alias Pleroma.Object
|
||||||
alias Pleroma.Repo
|
alias Pleroma.Repo
|
||||||
|
|
||||||
|
import Ecto.Changeset
|
||||||
import Ecto.Query
|
import Ecto.Query
|
||||||
|
|
||||||
@type t :: %__MODULE__{}
|
@type t :: %__MODULE__{}
|
||||||
|
@ -22,16 +24,53 @@ defmodule Pleroma.Activity do
|
||||||
"Like" => "favourite"
|
"Like" => "favourite"
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@mastodon_to_ap_notification_types for {k, v} <- @mastodon_notification_types,
|
||||||
|
into: %{},
|
||||||
|
do: {v, k}
|
||||||
|
|
||||||
schema "activities" do
|
schema "activities" do
|
||||||
field(:data, :map)
|
field(:data, :map)
|
||||||
field(:local, :boolean, default: true)
|
field(:local, :boolean, default: true)
|
||||||
field(:actor, :string)
|
field(:actor, :string)
|
||||||
field(:recipients, {:array, :string})
|
field(:recipients, {:array, :string}, default: [])
|
||||||
has_many(:notifications, Notification, on_delete: :delete_all)
|
has_many(:notifications, Notification, on_delete: :delete_all)
|
||||||
|
|
||||||
|
# Attention: this is a fake relation, don't try to preload it blindly and expect it to work!
|
||||||
|
# The foreign key is embedded in a jsonb field.
|
||||||
|
#
|
||||||
|
# To use it, you probably want to do an inner join and a preload:
|
||||||
|
#
|
||||||
|
# ```
|
||||||
|
# |> join(:inner, [activity], o in Object,
|
||||||
|
# on: fragment("(?->>'id') = COALESCE((?)->'object'->> 'id', (?)->>'object')",
|
||||||
|
# o.data, activity.data, activity.data))
|
||||||
|
# |> preload([activity, object], [object: object])
|
||||||
|
# ```
|
||||||
|
#
|
||||||
|
# As a convenience, Activity.with_preloaded_object() sets up an inner join and preload for the
|
||||||
|
# typical case.
|
||||||
|
has_one(:object, Object, on_delete: :nothing, foreign_key: :id)
|
||||||
|
|
||||||
timestamps()
|
timestamps()
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def with_preloaded_object(query) do
|
||||||
|
query
|
||||||
|
|> join(
|
||||||
|
:inner,
|
||||||
|
[activity],
|
||||||
|
o in Object,
|
||||||
|
on:
|
||||||
|
fragment(
|
||||||
|
"(?->>'id') = COALESCE(?->'object'->>'id', ?->>'object')",
|
||||||
|
o.data,
|
||||||
|
activity.data,
|
||||||
|
activity.data
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|> preload([activity, object], object: object)
|
||||||
|
end
|
||||||
|
|
||||||
def get_by_ap_id(ap_id) do
|
def get_by_ap_id(ap_id) do
|
||||||
Repo.one(
|
Repo.one(
|
||||||
from(
|
from(
|
||||||
|
@ -41,10 +80,51 @@ def get_by_ap_id(ap_id) do
|
||||||
)
|
)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def change(struct, params \\ %{}) do
|
||||||
|
struct
|
||||||
|
|> cast(params, [:data])
|
||||||
|
|> validate_required([:data])
|
||||||
|
|> unique_constraint(:ap_id, name: :activities_unique_apid_index)
|
||||||
|
end
|
||||||
|
|
||||||
|
def get_by_ap_id_with_object(ap_id) do
|
||||||
|
Repo.one(
|
||||||
|
from(
|
||||||
|
activity in Activity,
|
||||||
|
where: fragment("(?)->>'id' = ?", activity.data, ^to_string(ap_id)),
|
||||||
|
left_join: o in Object,
|
||||||
|
on:
|
||||||
|
fragment(
|
||||||
|
"(?->>'id') = COALESCE(?->'object'->>'id', ?->>'object')",
|
||||||
|
o.data,
|
||||||
|
activity.data,
|
||||||
|
activity.data
|
||||||
|
),
|
||||||
|
preload: [object: o]
|
||||||
|
)
|
||||||
|
)
|
||||||
|
end
|
||||||
|
|
||||||
def get_by_id(id) do
|
def get_by_id(id) do
|
||||||
Repo.get(Activity, id)
|
Repo.get(Activity, id)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def get_by_id_with_object(id) do
|
||||||
|
from(activity in Activity,
|
||||||
|
where: activity.id == ^id,
|
||||||
|
inner_join: o in Object,
|
||||||
|
on:
|
||||||
|
fragment(
|
||||||
|
"(?->>'id') = COALESCE(?->'object'->>'id', ?->>'object')",
|
||||||
|
o.data,
|
||||||
|
activity.data,
|
||||||
|
activity.data
|
||||||
|
),
|
||||||
|
preload: [object: o]
|
||||||
|
)
|
||||||
|
|> Repo.one()
|
||||||
|
end
|
||||||
|
|
||||||
def by_object_ap_id(ap_id) do
|
def by_object_ap_id(ap_id) do
|
||||||
from(
|
from(
|
||||||
activity in Activity,
|
activity in Activity,
|
||||||
|
@ -72,7 +152,7 @@ def create_by_object_ap_id(ap_ids) when is_list(ap_ids) do
|
||||||
)
|
)
|
||||||
end
|
end
|
||||||
|
|
||||||
def create_by_object_ap_id(ap_id) do
|
def create_by_object_ap_id(ap_id) when is_binary(ap_id) do
|
||||||
from(
|
from(
|
||||||
activity in Activity,
|
activity in Activity,
|
||||||
where:
|
where:
|
||||||
|
@ -86,6 +166,8 @@ def create_by_object_ap_id(ap_id) do
|
||||||
)
|
)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def create_by_object_ap_id(_), do: nil
|
||||||
|
|
||||||
def get_all_create_by_object_ap_id(ap_id) do
|
def get_all_create_by_object_ap_id(ap_id) do
|
||||||
Repo.all(create_by_object_ap_id(ap_id))
|
Repo.all(create_by_object_ap_id(ap_id))
|
||||||
end
|
end
|
||||||
|
@ -97,21 +179,60 @@ def get_create_by_object_ap_id(ap_id) when is_binary(ap_id) do
|
||||||
|
|
||||||
def get_create_by_object_ap_id(_), do: nil
|
def get_create_by_object_ap_id(_), do: nil
|
||||||
|
|
||||||
def normalize(obj) when is_map(obj), do: Activity.get_by_ap_id(obj["id"])
|
def create_by_object_ap_id_with_object(ap_id) when is_binary(ap_id) do
|
||||||
def normalize(ap_id) when is_binary(ap_id), do: Activity.get_by_ap_id(ap_id)
|
from(
|
||||||
def normalize(_), do: nil
|
activity in Activity,
|
||||||
|
where:
|
||||||
def get_in_reply_to_activity(%Activity{data: %{"object" => %{"inReplyTo" => ap_id}}}) do
|
fragment(
|
||||||
get_create_by_object_ap_id(ap_id)
|
"coalesce((?)->'object'->>'id', (?)->>'object') = ?",
|
||||||
|
activity.data,
|
||||||
|
activity.data,
|
||||||
|
^to_string(ap_id)
|
||||||
|
),
|
||||||
|
where: fragment("(?)->>'type' = 'Create'", activity.data),
|
||||||
|
inner_join: o in Object,
|
||||||
|
on:
|
||||||
|
fragment(
|
||||||
|
"(?->>'id') = COALESCE(?->'object'->>'id', ?->>'object')",
|
||||||
|
o.data,
|
||||||
|
activity.data,
|
||||||
|
activity.data
|
||||||
|
),
|
||||||
|
preload: [object: o]
|
||||||
|
)
|
||||||
end
|
end
|
||||||
|
|
||||||
def get_in_reply_to_activity(_), do: nil
|
def create_by_object_ap_id_with_object(_), do: nil
|
||||||
|
|
||||||
|
def get_create_by_object_ap_id_with_object(ap_id) when is_binary(ap_id) do
|
||||||
|
ap_id
|
||||||
|
|> create_by_object_ap_id_with_object()
|
||||||
|
|> Repo.one()
|
||||||
|
end
|
||||||
|
|
||||||
|
def get_create_by_object_ap_id_with_object(_), do: nil
|
||||||
|
|
||||||
|
defp get_in_reply_to_activity_from_object(%Object{data: %{"inReplyTo" => ap_id}}) do
|
||||||
|
get_create_by_object_ap_id_with_object(ap_id)
|
||||||
|
end
|
||||||
|
|
||||||
|
defp get_in_reply_to_activity_from_object(_), do: nil
|
||||||
|
|
||||||
|
def get_in_reply_to_activity(%Activity{data: %{"object" => object}}) do
|
||||||
|
get_in_reply_to_activity_from_object(Object.normalize(object))
|
||||||
|
end
|
||||||
|
|
||||||
|
def normalize(obj) when is_map(obj), do: get_by_ap_id_with_object(obj["id"])
|
||||||
|
def normalize(ap_id) when is_binary(ap_id), do: get_by_ap_id_with_object(ap_id)
|
||||||
|
def normalize(_), do: nil
|
||||||
|
|
||||||
def delete_by_ap_id(id) when is_binary(id) do
|
def delete_by_ap_id(id) when is_binary(id) do
|
||||||
by_object_ap_id(id)
|
by_object_ap_id(id)
|
||||||
|> Repo.delete_all(returning: true)
|
|> select([u], u)
|
||||||
|
|> Repo.delete_all()
|
||||||
|> elem(1)
|
|> elem(1)
|
||||||
|> Enum.find(fn
|
|> Enum.find(fn
|
||||||
|
%{data: %{"type" => "Create", "object" => ap_id}} when is_binary(ap_id) -> ap_id == id
|
||||||
%{data: %{"type" => "Create", "object" => %{"id" => ap_id}}} -> ap_id == id
|
%{data: %{"type" => "Create", "object" => %{"id" => ap_id}}} -> ap_id == id
|
||||||
_ -> nil
|
_ -> nil
|
||||||
end)
|
end)
|
||||||
|
@ -126,6 +247,10 @@ def mastodon_notification_type(%Activity{data: %{"type" => unquote(ap_type)}}),
|
||||||
|
|
||||||
def mastodon_notification_type(%Activity{}), do: nil
|
def mastodon_notification_type(%Activity{}), do: nil
|
||||||
|
|
||||||
|
def from_mastodon_notification_type(type) do
|
||||||
|
Map.get(@mastodon_to_ap_notification_types, type)
|
||||||
|
end
|
||||||
|
|
||||||
def all_by_actor_and_id(actor, status_ids \\ [])
|
def all_by_actor_and_id(actor, status_ids \\ [])
|
||||||
def all_by_actor_and_id(_actor, []), do: []
|
def all_by_actor_and_id(_actor, []), do: []
|
||||||
|
|
||||||
|
|
|
@ -25,6 +25,7 @@ def start(_type, _args) do
|
||||||
import Cachex.Spec
|
import Cachex.Spec
|
||||||
|
|
||||||
Pleroma.Config.DeprecationWarnings.warn()
|
Pleroma.Config.DeprecationWarnings.warn()
|
||||||
|
setup_instrumenters()
|
||||||
|
|
||||||
# Define workers and child supervisors to be supervised
|
# Define workers and child supervisors to be supervised
|
||||||
children =
|
children =
|
||||||
|
@ -103,15 +104,15 @@ def start(_type, _args) do
|
||||||
],
|
],
|
||||||
id: :cachex_idem
|
id: :cachex_idem
|
||||||
),
|
),
|
||||||
worker(Pleroma.FlakeId, [])
|
worker(Pleroma.FlakeId, []),
|
||||||
|
worker(Pleroma.ScheduledActivityWorker, [])
|
||||||
] ++
|
] ++
|
||||||
hackney_pool_children() ++
|
hackney_pool_children() ++
|
||||||
[
|
[
|
||||||
worker(Pleroma.Web.Federator.RetryQueue, []),
|
worker(Pleroma.Web.Federator.RetryQueue, []),
|
||||||
worker(Pleroma.Stats, []),
|
worker(Pleroma.Stats, []),
|
||||||
worker(Pleroma.Web.Push, []),
|
worker(Task, [&Pleroma.Web.Push.init/0], restart: :temporary, id: :web_push_init),
|
||||||
worker(Pleroma.Jobs, []),
|
worker(Task, [&Pleroma.Web.Federator.init/0], restart: :temporary, id: :federator_init)
|
||||||
worker(Task, [&Pleroma.Web.Federator.init/0], restart: :temporary)
|
|
||||||
] ++
|
] ++
|
||||||
streamer_child() ++
|
streamer_child() ++
|
||||||
chat_child() ++
|
chat_child() ++
|
||||||
|
@ -127,6 +128,24 @@ def start(_type, _args) do
|
||||||
Supervisor.start_link(children, opts)
|
Supervisor.start_link(children, opts)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
defp setup_instrumenters do
|
||||||
|
require Prometheus.Registry
|
||||||
|
|
||||||
|
:ok =
|
||||||
|
:telemetry.attach(
|
||||||
|
"prometheus-ecto",
|
||||||
|
[:pleroma, :repo, :query],
|
||||||
|
&Pleroma.Repo.Instrumenter.handle_event/4,
|
||||||
|
%{}
|
||||||
|
)
|
||||||
|
|
||||||
|
Prometheus.Registry.register_collector(:prometheus_process_collector)
|
||||||
|
Pleroma.Web.Endpoint.MetricsExporter.setup()
|
||||||
|
Pleroma.Web.Endpoint.PipelineInstrumenter.setup()
|
||||||
|
Pleroma.Web.Endpoint.Instrumenter.setup()
|
||||||
|
Pleroma.Repo.Instrumenter.setup()
|
||||||
|
end
|
||||||
|
|
||||||
def enabled_hackney_pools do
|
def enabled_hackney_pools do
|
||||||
[:media] ++
|
[:media] ++
|
||||||
if Application.get_env(:tesla, :adapter) == Tesla.Adapter.Hackney do
|
if Application.get_env(:tesla, :adapter) == Tesla.Adapter.Hackney do
|
||||||
|
|
60
lib/pleroma/bookmark.ex
Normal file
60
lib/pleroma/bookmark.ex
Normal file
|
@ -0,0 +1,60 @@
|
||||||
|
defmodule Pleroma.Bookmark do
|
||||||
|
use Ecto.Schema
|
||||||
|
|
||||||
|
import Ecto.Changeset
|
||||||
|
import Ecto.Query
|
||||||
|
|
||||||
|
alias Pleroma.Activity
|
||||||
|
alias Pleroma.Bookmark
|
||||||
|
alias Pleroma.FlakeId
|
||||||
|
alias Pleroma.Repo
|
||||||
|
alias Pleroma.User
|
||||||
|
|
||||||
|
@type t :: %__MODULE__{}
|
||||||
|
|
||||||
|
schema "bookmarks" do
|
||||||
|
belongs_to(:user, User, type: FlakeId)
|
||||||
|
belongs_to(:activity, Activity, type: FlakeId)
|
||||||
|
|
||||||
|
timestamps()
|
||||||
|
end
|
||||||
|
|
||||||
|
@spec create(FlakeId.t(), FlakeId.t()) :: {:ok, Bookmark.t()} | {:error, Changeset.t()}
|
||||||
|
def create(user_id, activity_id) do
|
||||||
|
attrs = %{
|
||||||
|
user_id: user_id,
|
||||||
|
activity_id: activity_id
|
||||||
|
}
|
||||||
|
|
||||||
|
%Bookmark{}
|
||||||
|
|> cast(attrs, [:user_id, :activity_id])
|
||||||
|
|> validate_required([:user_id, :activity_id])
|
||||||
|
|> unique_constraint(:activity_id, name: :bookmarks_user_id_activity_id_index)
|
||||||
|
|> Repo.insert()
|
||||||
|
end
|
||||||
|
|
||||||
|
@spec for_user_query(FlakeId.t()) :: Ecto.Query.t()
|
||||||
|
def for_user_query(user_id) do
|
||||||
|
Bookmark
|
||||||
|
|> where(user_id: ^user_id)
|
||||||
|
|> join(:inner, [b], activity in assoc(b, :activity))
|
||||||
|
|> preload([b, a], activity: a)
|
||||||
|
end
|
||||||
|
|
||||||
|
def get(user_id, activity_id) do
|
||||||
|
Bookmark
|
||||||
|
|> where(user_id: ^user_id)
|
||||||
|
|> where(activity_id: ^activity_id)
|
||||||
|
|> Repo.one()
|
||||||
|
end
|
||||||
|
|
||||||
|
@spec destroy(FlakeId.t(), FlakeId.t()) :: {:ok, Bookmark.t()} | {:error, Changeset.t()}
|
||||||
|
def destroy(user_id, activity_id) do
|
||||||
|
from(b in Bookmark,
|
||||||
|
where: b.user_id == ^user_id,
|
||||||
|
where: b.activity_id == ^activity_id
|
||||||
|
)
|
||||||
|
|> Repo.one()
|
||||||
|
|> Repo.delete()
|
||||||
|
end
|
||||||
|
end
|
|
@ -57,4 +57,8 @@ def delete([parent_key | keys]) do
|
||||||
def delete(key) do
|
def delete(key) do
|
||||||
Application.delete_env(:pleroma, key)
|
Application.delete_env(:pleroma, key)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def oauth_consumer_strategies, do: get([:auth, :oauth_consumer_strategies], [])
|
||||||
|
|
||||||
|
def oauth_consumer_enabled?, do: oauth_consumer_strategies() != []
|
||||||
end
|
end
|
||||||
|
|
|
@ -2,7 +2,7 @@
|
||||||
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
||||||
# SPDX-License-Identifier: AGPL-3.0-only
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
defmodule Pleroma.AdminEmail do
|
defmodule Pleroma.Emails.AdminEmail do
|
||||||
@moduledoc "Admin emails"
|
@moduledoc "Admin emails"
|
||||||
|
|
||||||
import Swoosh.Email
|
import Swoosh.Email
|
||||||
|
@ -11,7 +11,10 @@ defmodule Pleroma.AdminEmail do
|
||||||
|
|
||||||
defp instance_config, do: Pleroma.Config.get(:instance)
|
defp instance_config, do: Pleroma.Config.get(:instance)
|
||||||
defp instance_name, do: instance_config()[:name]
|
defp instance_name, do: instance_config()[:name]
|
||||||
defp instance_email, do: instance_config()[:email]
|
|
||||||
|
defp instance_notify_email do
|
||||||
|
Keyword.get(instance_config(), :notify_email, instance_config()[:email])
|
||||||
|
end
|
||||||
|
|
||||||
defp user_url(user) do
|
defp user_url(user) do
|
||||||
Helpers.o_status_url(Pleroma.Web.Endpoint, :feed_redirect, user.nickname)
|
Helpers.o_status_url(Pleroma.Web.Endpoint, :feed_redirect, user.nickname)
|
||||||
|
@ -29,9 +32,13 @@ def report(to, reporter, account, statuses, comment) do
|
||||||
if length(statuses) > 0 do
|
if length(statuses) > 0 do
|
||||||
statuses_list_html =
|
statuses_list_html =
|
||||||
statuses
|
statuses
|
||||||
|> Enum.map(fn %{id: id} ->
|
|> Enum.map(fn
|
||||||
status_url = Helpers.o_status_url(Pleroma.Web.Endpoint, :notice, id)
|
%{id: id} ->
|
||||||
"<li><a href=\"#{status_url}\">#{status_url}</li>"
|
status_url = Helpers.o_status_url(Pleroma.Web.Endpoint, :notice, id)
|
||||||
|
"<li><a href=\"#{status_url}\">#{status_url}</li>"
|
||||||
|
|
||||||
|
id when is_binary(id) ->
|
||||||
|
"<li><a href=\"#{id}\">#{id}</li>"
|
||||||
end)
|
end)
|
||||||
|> Enum.join("\n")
|
|> Enum.join("\n")
|
||||||
|
|
||||||
|
@ -55,7 +62,7 @@ def report(to, reporter, account, statuses, comment) do
|
||||||
|
|
||||||
new()
|
new()
|
||||||
|> to({to.name, to.email})
|
|> to({to.name, to.email})
|
||||||
|> from({instance_name(), instance_email()})
|
|> from({instance_name(), instance_notify_email()})
|
||||||
|> reply_to({reporter.name, reporter.email})
|
|> reply_to({reporter.name, reporter.email})
|
||||||
|> subject("#{instance_name()} Report")
|
|> subject("#{instance_name()} Report")
|
||||||
|> html_body(html_body)
|
|> html_body(html_body)
|
||||||
|
|
|
@ -2,11 +2,11 @@
|
||||||
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
||||||
# SPDX-License-Identifier: AGPL-3.0-only
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
defmodule Pleroma.Mailer do
|
defmodule Pleroma.Emails.Mailer do
|
||||||
use Swoosh.Mailer, otp_app: :pleroma
|
use Swoosh.Mailer, otp_app: :pleroma
|
||||||
|
|
||||||
def deliver_async(email, config \\ []) do
|
def deliver_async(email, config \\ []) do
|
||||||
Pleroma.Jobs.enqueue(:mailer, __MODULE__, [:deliver_async, email, config])
|
PleromaJobQueue.enqueue(:mailer, __MODULE__, [:deliver_async, email, config])
|
||||||
end
|
end
|
||||||
|
|
||||||
def perform(:deliver_async, email, config), do: deliver(email, config)
|
def perform(:deliver_async, email, config), do: deliver(email, config)
|
||||||
|
|
|
@ -2,7 +2,7 @@
|
||||||
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
||||||
# SPDX-License-Identifier: AGPL-3.0-only
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
defmodule Pleroma.UserEmail do
|
defmodule Pleroma.Emails.UserEmail do
|
||||||
@moduledoc "User emails"
|
@moduledoc "User emails"
|
||||||
|
|
||||||
import Swoosh.Email
|
import Swoosh.Email
|
||||||
|
@ -15,7 +15,8 @@ defp instance_config, do: Pleroma.Config.get(:instance)
|
||||||
defp instance_name, do: instance_config()[:name]
|
defp instance_name, do: instance_config()[:name]
|
||||||
|
|
||||||
defp sender do
|
defp sender do
|
||||||
{instance_name(), instance_config()[:email]}
|
email = Keyword.get(instance_config(), :notify_email, instance_config()[:email])
|
||||||
|
{instance_name(), email}
|
||||||
end
|
end
|
||||||
|
|
||||||
defp recipient(email, nil), do: email
|
defp recipient(email, nil), do: email
|
||||||
|
|
|
@ -6,15 +6,23 @@ defmodule Pleroma.Emoji do
|
||||||
@moduledoc """
|
@moduledoc """
|
||||||
The emojis are loaded from:
|
The emojis are loaded from:
|
||||||
|
|
||||||
* the built-in Finmojis (if enabled in configuration),
|
* emoji packs in INSTANCE-DIR/emoji
|
||||||
* the files: `config/emoji.txt` and `config/custom_emoji.txt`
|
* the files: `config/emoji.txt` and `config/custom_emoji.txt`
|
||||||
* glob paths
|
* glob paths, nested folder is used as tag name for grouping e.g. priv/static/emoji/custom/nested_folder
|
||||||
|
|
||||||
This GenServer stores in an ETS table the list of the loaded emojis, and also allows to reload the list at runtime.
|
This GenServer stores in an ETS table the list of the loaded emojis, and also allows to reload the list at runtime.
|
||||||
"""
|
"""
|
||||||
use GenServer
|
use GenServer
|
||||||
|
|
||||||
|
require Logger
|
||||||
|
|
||||||
|
@type pattern :: Regex.t() | module() | String.t()
|
||||||
|
@type patterns :: pattern() | [pattern()]
|
||||||
|
@type group_patterns :: keyword(patterns())
|
||||||
|
|
||||||
@ets __MODULE__.Ets
|
@ets __MODULE__.Ets
|
||||||
@ets_options [:ordered_set, :protected, :named_table, {:read_concurrency, true}]
|
@ets_options [:ordered_set, :protected, :named_table, {:read_concurrency, true}]
|
||||||
|
@groups Application.get_env(:pleroma, :emoji)[:groups]
|
||||||
|
|
||||||
@doc false
|
@doc false
|
||||||
def start_link do
|
def start_link do
|
||||||
|
@ -73,91 +81,94 @@ def code_change(_old_vsn, state, _extra) do
|
||||||
end
|
end
|
||||||
|
|
||||||
defp load do
|
defp load do
|
||||||
|
emoji_dir_path =
|
||||||
|
Path.join(
|
||||||
|
Pleroma.Config.get!([:instance, :static_dir]),
|
||||||
|
"emoji"
|
||||||
|
)
|
||||||
|
|
||||||
|
case File.ls(emoji_dir_path) do
|
||||||
|
{:error, :enoent} ->
|
||||||
|
# The custom emoji directory doesn't exist,
|
||||||
|
# don't do anything
|
||||||
|
nil
|
||||||
|
|
||||||
|
{:error, e} ->
|
||||||
|
# There was some other error
|
||||||
|
Logger.error("Could not access the custom emoji directory #{emoji_dir_path}: #{e}")
|
||||||
|
|
||||||
|
{:ok, packs} ->
|
||||||
|
# Print the packs we've found
|
||||||
|
Logger.info("Found emoji packs: #{Enum.join(packs, ", ")}")
|
||||||
|
|
||||||
|
emojis =
|
||||||
|
Enum.flat_map(
|
||||||
|
packs,
|
||||||
|
fn pack -> load_pack(Path.join(emoji_dir_path, pack)) end
|
||||||
|
)
|
||||||
|
|
||||||
|
true = :ets.insert(@ets, emojis)
|
||||||
|
end
|
||||||
|
|
||||||
|
# Compat thing for old custom emoji handling & default emoji,
|
||||||
|
# it should run even if there are no emoji packs
|
||||||
|
shortcode_globs = Application.get_env(:pleroma, :emoji)[:shortcode_globs] || []
|
||||||
|
|
||||||
emojis =
|
emojis =
|
||||||
(load_finmoji(Keyword.get(Application.get_env(:pleroma, :instance), :finmoji_enabled)) ++
|
(load_from_file("config/emoji.txt") ++
|
||||||
load_from_file("config/emoji.txt") ++
|
|
||||||
load_from_file("config/custom_emoji.txt") ++
|
load_from_file("config/custom_emoji.txt") ++
|
||||||
load_from_globs(
|
load_from_globs(shortcode_globs))
|
||||||
Keyword.get(Application.get_env(:pleroma, :emoji, []), :shortcode_globs, [])
|
|
||||||
))
|
|
||||||
|> Enum.reject(fn value -> value == nil end)
|
|> Enum.reject(fn value -> value == nil end)
|
||||||
|
|
||||||
true = :ets.insert(@ets, emojis)
|
true = :ets.insert(@ets, emojis)
|
||||||
|
|
||||||
:ok
|
:ok
|
||||||
end
|
end
|
||||||
|
|
||||||
@finmoji [
|
defp load_pack(pack_dir) do
|
||||||
"a_trusted_friend",
|
pack_name = Path.basename(pack_dir)
|
||||||
"alandislands",
|
|
||||||
"association",
|
emoji_txt = Path.join(pack_dir, "emoji.txt")
|
||||||
"auroraborealis",
|
|
||||||
"baby_in_a_box",
|
if File.exists?(emoji_txt) do
|
||||||
"bear",
|
load_from_file(emoji_txt)
|
||||||
"black_gold",
|
else
|
||||||
"christmasparty",
|
Logger.info(
|
||||||
"crosscountryskiing",
|
"No emoji.txt found for pack \"#{pack_name}\", assuming all .png files are emoji"
|
||||||
"cupofcoffee",
|
)
|
||||||
"education",
|
|
||||||
"fashionista_finns",
|
make_shortcode_to_file_map(pack_dir, [".png"])
|
||||||
"finnishlove",
|
|> Enum.map(fn {shortcode, rel_file} ->
|
||||||
"flag",
|
filename = Path.join("/emoji/#{pack_name}", rel_file)
|
||||||
"forest",
|
|
||||||
"four_seasons_of_bbq",
|
{shortcode, filename, [to_string(match_extra(@groups, filename))]}
|
||||||
"girlpower",
|
end)
|
||||||
"handshake",
|
end
|
||||||
"happiness",
|
|
||||||
"headbanger",
|
|
||||||
"icebreaker",
|
|
||||||
"iceman",
|
|
||||||
"joulutorttu",
|
|
||||||
"kaamos",
|
|
||||||
"kalsarikannit_f",
|
|
||||||
"kalsarikannit_m",
|
|
||||||
"karjalanpiirakka",
|
|
||||||
"kicksled",
|
|
||||||
"kokko",
|
|
||||||
"lavatanssit",
|
|
||||||
"losthopes_f",
|
|
||||||
"losthopes_m",
|
|
||||||
"mattinykanen",
|
|
||||||
"meanwhileinfinland",
|
|
||||||
"moominmamma",
|
|
||||||
"nordicfamily",
|
|
||||||
"out_of_office",
|
|
||||||
"peacemaker",
|
|
||||||
"perkele",
|
|
||||||
"pesapallo",
|
|
||||||
"polarbear",
|
|
||||||
"pusa_hispida_saimensis",
|
|
||||||
"reindeer",
|
|
||||||
"sami",
|
|
||||||
"sauna_f",
|
|
||||||
"sauna_m",
|
|
||||||
"sauna_whisk",
|
|
||||||
"sisu",
|
|
||||||
"stuck",
|
|
||||||
"suomimainittu",
|
|
||||||
"superfood",
|
|
||||||
"swan",
|
|
||||||
"the_cap",
|
|
||||||
"the_conductor",
|
|
||||||
"the_king",
|
|
||||||
"the_voice",
|
|
||||||
"theoriginalsanta",
|
|
||||||
"tomoffinland",
|
|
||||||
"torillatavataan",
|
|
||||||
"unbreakable",
|
|
||||||
"waiting",
|
|
||||||
"white_nights",
|
|
||||||
"woollysocks"
|
|
||||||
]
|
|
||||||
defp load_finmoji(true) do
|
|
||||||
Enum.map(@finmoji, fn finmoji ->
|
|
||||||
{finmoji, "/finmoji/128px/#{finmoji}-128.png"}
|
|
||||||
end)
|
|
||||||
end
|
end
|
||||||
|
|
||||||
defp load_finmoji(_), do: []
|
def make_shortcode_to_file_map(pack_dir, exts) do
|
||||||
|
find_all_emoji(pack_dir, exts)
|
||||||
|
|> Enum.map(&Path.relative_to(&1, pack_dir))
|
||||||
|
|> Enum.map(fn f -> {f |> Path.basename() |> Path.rootname(), f} end)
|
||||||
|
|> Enum.into(%{})
|
||||||
|
end
|
||||||
|
|
||||||
|
def find_all_emoji(dir, exts) do
|
||||||
|
Enum.reduce(
|
||||||
|
File.ls!(dir),
|
||||||
|
[],
|
||||||
|
fn f, acc ->
|
||||||
|
filepath = Path.join(dir, f)
|
||||||
|
|
||||||
|
if File.dir?(filepath) do
|
||||||
|
acc ++ find_all_emoji(filepath, exts)
|
||||||
|
else
|
||||||
|
acc ++ [filepath]
|
||||||
|
end
|
||||||
|
end
|
||||||
|
)
|
||||||
|
|> Enum.filter(fn f -> Path.extname(f) in exts end)
|
||||||
|
end
|
||||||
|
|
||||||
defp load_from_file(file) do
|
defp load_from_file(file) do
|
||||||
if File.exists?(file) do
|
if File.exists?(file) do
|
||||||
|
@ -172,8 +183,14 @@ defp load_from_file_stream(stream) do
|
||||||
|> Stream.map(&String.trim/1)
|
|> Stream.map(&String.trim/1)
|
||||||
|> Stream.map(fn line ->
|
|> Stream.map(fn line ->
|
||||||
case String.split(line, ~r/,\s*/) do
|
case String.split(line, ~r/,\s*/) do
|
||||||
[name, file] -> {name, file}
|
[name, file] ->
|
||||||
_ -> nil
|
{name, file, [to_string(match_extra(@groups, file))]}
|
||||||
|
|
||||||
|
[name, file | tags] ->
|
||||||
|
{name, file, tags}
|
||||||
|
|
||||||
|
_ ->
|
||||||
|
nil
|
||||||
end
|
end
|
||||||
end)
|
end)
|
||||||
|> Enum.to_list()
|
|> Enum.to_list()
|
||||||
|
@ -190,9 +207,40 @@ defp load_from_globs(globs) do
|
||||||
|> Enum.concat()
|
|> Enum.concat()
|
||||||
|
|
||||||
Enum.map(paths, fn path ->
|
Enum.map(paths, fn path ->
|
||||||
|
tag = match_extra(@groups, Path.join("/", Path.relative_to(path, static_path)))
|
||||||
shortcode = Path.basename(path, Path.extname(path))
|
shortcode = Path.basename(path, Path.extname(path))
|
||||||
external_path = Path.join("/", Path.relative_to(path, static_path))
|
external_path = Path.join("/", Path.relative_to(path, static_path))
|
||||||
{shortcode, external_path}
|
{shortcode, external_path, [to_string(tag)]}
|
||||||
|
end)
|
||||||
|
end
|
||||||
|
|
||||||
|
@doc """
|
||||||
|
Finds a matching group for the given emoji filename
|
||||||
|
"""
|
||||||
|
@spec match_extra(group_patterns(), String.t()) :: atom() | nil
|
||||||
|
def match_extra(group_patterns, filename) do
|
||||||
|
match_group_patterns(group_patterns, fn pattern ->
|
||||||
|
case pattern do
|
||||||
|
%Regex{} = regex -> Regex.match?(regex, filename)
|
||||||
|
string when is_binary(string) -> filename == string
|
||||||
|
end
|
||||||
|
end)
|
||||||
|
end
|
||||||
|
|
||||||
|
defp match_group_patterns(group_patterns, matcher) do
|
||||||
|
Enum.find_value(group_patterns, fn {group, patterns} ->
|
||||||
|
patterns =
|
||||||
|
patterns
|
||||||
|
|> List.wrap()
|
||||||
|
|> Enum.map(fn pattern ->
|
||||||
|
if String.contains?(pattern, "*") do
|
||||||
|
~r(#{String.replace(pattern, "*", ".*")})
|
||||||
|
else
|
||||||
|
pattern
|
||||||
|
end
|
||||||
|
end)
|
||||||
|
|
||||||
|
Enum.any?(patterns, matcher) && group
|
||||||
end)
|
end)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -46,7 +46,7 @@ def from_string(<<_::integer-size(128)>> = flake), do: flake
|
||||||
|
|
||||||
def from_string(string) when is_binary(string) and byte_size(string) < 18 do
|
def from_string(string) when is_binary(string) and byte_size(string) < 18 do
|
||||||
case Integer.parse(string) do
|
case Integer.parse(string) do
|
||||||
{id, _} -> <<0::integer-size(64), id::integer-size(64)>>
|
{id, ""} -> <<0::integer-size(64), id::integer-size(64)>>
|
||||||
_ -> nil
|
_ -> nil
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -8,20 +8,32 @@ defmodule Pleroma.Formatter do
|
||||||
alias Pleroma.User
|
alias Pleroma.User
|
||||||
alias Pleroma.Web.MediaProxy
|
alias Pleroma.Web.MediaProxy
|
||||||
|
|
||||||
|
@safe_mention_regex ~r/^(\s*(?<mentions>@.+?\s+)+)(?<rest>.*)/
|
||||||
|
@link_regex ~r"((?:http(s)?:\/\/)?[\w.-]+(?:\.[\w\.-]+)+[\w\-\._~%:/?#[\]@!\$&'\(\)\*\+,;=.]+)|[0-9a-z+\-\.]+:[0-9a-z$-_.+!*'(),]+"ui
|
||||||
@markdown_characters_regex ~r/(`|\*|_|{|}|[|]|\(|\)|#|\+|-|\.|!)/
|
@markdown_characters_regex ~r/(`|\*|_|{|}|[|]|\(|\)|#|\+|-|\.|!)/
|
||||||
@link_regex ~r{((?:http(s)?:\/\/)?[\w.-]+(?:\.[\w\.-]+)+[\w\-\._~%:/?#[\]@!\$&'\(\)\*\+,;=.]+)|[0-9a-z+\-\.]+:[0-9a-z$-_.+!*'(),]+}ui
|
|
||||||
# credo:disable-for-previous-line Credo.Check.Readability.MaxLineLength
|
|
||||||
|
|
||||||
@auto_linker_config hashtag: true,
|
@auto_linker_config hashtag: true,
|
||||||
hashtag_handler: &Pleroma.Formatter.hashtag_handler/4,
|
hashtag_handler: &Pleroma.Formatter.hashtag_handler/4,
|
||||||
mention: true,
|
mention: true,
|
||||||
mention_handler: &Pleroma.Formatter.mention_handler/4
|
mention_handler: &Pleroma.Formatter.mention_handler/4
|
||||||
|
|
||||||
|
def escape_mention_handler("@" <> nickname = mention, buffer, _, _) do
|
||||||
|
case User.get_cached_by_nickname(nickname) do
|
||||||
|
%User{} ->
|
||||||
|
# escape markdown characters with `\\`
|
||||||
|
# (we don't want something like @user__name to be parsed by markdown)
|
||||||
|
String.replace(mention, @markdown_characters_regex, "\\\\\\1")
|
||||||
|
|
||||||
|
_ ->
|
||||||
|
buffer
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
def mention_handler("@" <> nickname, buffer, opts, acc) do
|
def mention_handler("@" <> nickname, buffer, opts, acc) do
|
||||||
case User.get_cached_by_nickname(nickname) do
|
case User.get_cached_by_nickname(nickname) do
|
||||||
%User{id: id} = user ->
|
%User{id: id} = user ->
|
||||||
ap_id = get_ap_id(user)
|
ap_id = get_ap_id(user)
|
||||||
nickname_text = get_nickname_text(nickname, opts) |> maybe_escape(opts)
|
nickname_text = get_nickname_text(nickname, opts)
|
||||||
|
|
||||||
link =
|
link =
|
||||||
"<span class='h-card'><a data-user='#{id}' class='u-url mention' href='#{ap_id}'>@<span>#{
|
"<span class='h-card'><a data-user='#{id}' class='u-url mention' href='#{ap_id}'>@<span>#{
|
||||||
|
@ -45,15 +57,47 @@ def hashtag_handler("#" <> tag = tag_text, _buffer, _opts, acc) do
|
||||||
|
|
||||||
@doc """
|
@doc """
|
||||||
Parses a text and replace plain text links with HTML. Returns a tuple with a result text, mentions, and hashtags.
|
Parses a text and replace plain text links with HTML. Returns a tuple with a result text, mentions, and hashtags.
|
||||||
|
|
||||||
|
If the 'safe_mention' option is given, only consecutive mentions at the start the post are actually mentioned.
|
||||||
"""
|
"""
|
||||||
@spec linkify(String.t(), keyword()) ::
|
@spec linkify(String.t(), keyword()) ::
|
||||||
{String.t(), [{String.t(), User.t()}], [{String.t(), String.t()}]}
|
{String.t(), [{String.t(), User.t()}], [{String.t(), String.t()}]}
|
||||||
def linkify(text, options \\ []) do
|
def linkify(text, options \\ []) do
|
||||||
options = options ++ @auto_linker_config
|
options = options ++ @auto_linker_config
|
||||||
acc = %{mentions: MapSet.new(), tags: MapSet.new()}
|
|
||||||
{text, %{mentions: mentions, tags: tags}} = AutoLinker.link_map(text, acc, options)
|
|
||||||
|
|
||||||
{text, MapSet.to_list(mentions), MapSet.to_list(tags)}
|
if options[:safe_mention] && Regex.named_captures(@safe_mention_regex, text) do
|
||||||
|
%{"mentions" => mentions, "rest" => rest} = Regex.named_captures(@safe_mention_regex, text)
|
||||||
|
acc = %{mentions: MapSet.new(), tags: MapSet.new()}
|
||||||
|
|
||||||
|
{text_mentions, %{mentions: mentions}} = AutoLinker.link_map(mentions, acc, options)
|
||||||
|
{text_rest, %{tags: tags}} = AutoLinker.link_map(rest, acc, options)
|
||||||
|
|
||||||
|
{text_mentions <> text_rest, MapSet.to_list(mentions), MapSet.to_list(tags)}
|
||||||
|
else
|
||||||
|
acc = %{mentions: MapSet.new(), tags: MapSet.new()}
|
||||||
|
{text, %{mentions: mentions, tags: tags}} = AutoLinker.link_map(text, acc, options)
|
||||||
|
|
||||||
|
{text, MapSet.to_list(mentions), MapSet.to_list(tags)}
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
@doc """
|
||||||
|
Escapes a special characters in mention names.
|
||||||
|
"""
|
||||||
|
def mentions_escape(text, options \\ []) do
|
||||||
|
options =
|
||||||
|
Keyword.merge(options,
|
||||||
|
mention: true,
|
||||||
|
url: false,
|
||||||
|
mention_handler: &Pleroma.Formatter.escape_mention_handler/4
|
||||||
|
)
|
||||||
|
|
||||||
|
if options[:safe_mention] && Regex.named_captures(@safe_mention_regex, text) do
|
||||||
|
%{"mentions" => mentions, "rest" => rest} = Regex.named_captures(@safe_mention_regex, text)
|
||||||
|
AutoLinker.link(mentions, options) <> AutoLinker.link(rest, options)
|
||||||
|
else
|
||||||
|
AutoLinker.link(text, options)
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def emojify(text) do
|
def emojify(text) do
|
||||||
|
@ -63,9 +107,9 @@ def emojify(text) do
|
||||||
def emojify(text, nil), do: text
|
def emojify(text, nil), do: text
|
||||||
|
|
||||||
def emojify(text, emoji, strip \\ false) do
|
def emojify(text, emoji, strip \\ false) do
|
||||||
Enum.reduce(emoji, text, fn {emoji, file}, text ->
|
Enum.reduce(emoji, text, fn emoji_data, text ->
|
||||||
emoji = HTML.strip_tags(emoji)
|
emoji = HTML.strip_tags(elem(emoji_data, 0))
|
||||||
file = HTML.strip_tags(file)
|
file = HTML.strip_tags(elem(emoji_data, 1))
|
||||||
|
|
||||||
html =
|
html =
|
||||||
if not strip do
|
if not strip do
|
||||||
|
@ -87,7 +131,7 @@ def demojify(text) do
|
||||||
def demojify(text, nil), do: text
|
def demojify(text, nil), do: text
|
||||||
|
|
||||||
def get_emoji(text) when is_binary(text) do
|
def get_emoji(text) when is_binary(text) do
|
||||||
Enum.filter(Emoji.get_all(), fn {emoji, _} -> String.contains?(text, ":#{emoji}:") end)
|
Enum.filter(Emoji.get_all(), fn {emoji, _, _} -> String.contains?(text, ":#{emoji}:") end)
|
||||||
end
|
end
|
||||||
|
|
||||||
def get_emoji(_), do: []
|
def get_emoji(_), do: []
|
||||||
|
@ -126,10 +170,4 @@ defp get_ap_id(%User{ap_id: ap_id}), do: ap_id
|
||||||
|
|
||||||
defp get_nickname_text(nickname, %{mentions_format: :full}), do: User.full_nickname(nickname)
|
defp get_nickname_text(nickname, %{mentions_format: :full}), do: User.full_nickname(nickname)
|
||||||
defp get_nickname_text(nickname, _), do: User.local_nickname(nickname)
|
defp get_nickname_text(nickname, _), do: User.local_nickname(nickname)
|
||||||
|
|
||||||
defp maybe_escape(str, %{mentions_escape: true}) do
|
|
||||||
String.replace(str, @markdown_characters_regex, "\\\\\\1")
|
|
||||||
end
|
|
||||||
|
|
||||||
defp maybe_escape(str, _), do: str
|
|
||||||
end
|
end
|
||||||
|
|
|
@ -38,7 +38,7 @@ def init([ip, port]) do
|
||||||
defmodule Pleroma.Gopher.Server.ProtocolHandler do
|
defmodule Pleroma.Gopher.Server.ProtocolHandler do
|
||||||
alias Pleroma.Activity
|
alias Pleroma.Activity
|
||||||
alias Pleroma.HTML
|
alias Pleroma.HTML
|
||||||
alias Pleroma.Repo
|
alias Pleroma.Object
|
||||||
alias Pleroma.User
|
alias Pleroma.User
|
||||||
alias Pleroma.Web.ActivityPub.ActivityPub
|
alias Pleroma.Web.ActivityPub.ActivityPub
|
||||||
alias Pleroma.Web.ActivityPub.Visibility
|
alias Pleroma.Web.ActivityPub.Visibility
|
||||||
|
@ -66,7 +66,8 @@ def info(text) do
|
||||||
def link(name, selector, type \\ 1) do
|
def link(name, selector, type \\ 1) do
|
||||||
address = Pleroma.Web.Endpoint.host()
|
address = Pleroma.Web.Endpoint.host()
|
||||||
port = Pleroma.Config.get([:gopher, :port], 1234)
|
port = Pleroma.Config.get([:gopher, :port], 1234)
|
||||||
"#{type}#{name}\t#{selector}\t#{address}\t#{port}\r\n"
|
dstport = Pleroma.Config.get([:gopher, :dstport], port)
|
||||||
|
"#{type}#{name}\t#{selector}\t#{address}\t#{dstport}\r\n"
|
||||||
end
|
end
|
||||||
|
|
||||||
def render_activities(activities) do
|
def render_activities(activities) do
|
||||||
|
@ -75,14 +76,14 @@ def render_activities(activities) do
|
||||||
|> Enum.map(fn activity ->
|
|> Enum.map(fn activity ->
|
||||||
user = User.get_cached_by_ap_id(activity.data["actor"])
|
user = User.get_cached_by_ap_id(activity.data["actor"])
|
||||||
|
|
||||||
object = activity.data["object"]
|
object = Object.normalize(activity)
|
||||||
like_count = object["like_count"] || 0
|
like_count = object["like_count"] || 0
|
||||||
announcement_count = object["announcement_count"] || 0
|
announcement_count = object["announcement_count"] || 0
|
||||||
|
|
||||||
link("Post ##{activity.id} by #{user.nickname}", "/notices/#{activity.id}") <>
|
link("Post ##{activity.id} by #{user.nickname}", "/notices/#{activity.id}") <>
|
||||||
info("#{like_count} likes, #{announcement_count} repeats") <>
|
info("#{like_count} likes, #{announcement_count} repeats") <>
|
||||||
"i\tfake\t(NULL)\t0\r\n" <>
|
"i\tfake\t(NULL)\t0\r\n" <>
|
||||||
info(HTML.strip_tags(String.replace(activity.data["object"]["content"], "<br>", "\r")))
|
info(HTML.strip_tags(String.replace(object["content"], "<br>", "\r")))
|
||||||
end)
|
end)
|
||||||
|> Enum.join("i\tfake\t(NULL)\t0\r\n")
|
|> Enum.join("i\tfake\t(NULL)\t0\r\n")
|
||||||
end
|
end
|
||||||
|
@ -110,7 +111,7 @@ def response("/main/all") do
|
||||||
end
|
end
|
||||||
|
|
||||||
def response("/notices/" <> id) do
|
def response("/notices/" <> id) do
|
||||||
with %Activity{} = activity <- Repo.get(Activity, id),
|
with %Activity{} = activity <- Activity.get_by_id(id),
|
||||||
true <- Visibility.is_public?(activity) do
|
true <- Visibility.is_public?(activity) do
|
||||||
activities =
|
activities =
|
||||||
ActivityPub.fetch_activities_for_context(activity.data["context"])
|
ActivityPub.fetch_activities_for_context(activity.data["context"])
|
||||||
|
|
|
@ -28,27 +28,40 @@ def filter_tags(html, scrubber), do: Scrubber.scrub(html, scrubber)
|
||||||
def filter_tags(html), do: filter_tags(html, nil)
|
def filter_tags(html), do: filter_tags(html, nil)
|
||||||
def strip_tags(html), do: Scrubber.scrub(html, Scrubber.StripTags)
|
def strip_tags(html), do: Scrubber.scrub(html, Scrubber.StripTags)
|
||||||
|
|
||||||
def get_cached_scrubbed_html_for_object(content, scrubbers, object, module) do
|
def get_cached_scrubbed_html_for_activity(content, scrubbers, activity, key \\ "") do
|
||||||
key = "#{module}#{generate_scrubber_signature(scrubbers)}|#{object.id}"
|
key = "#{key}#{generate_scrubber_signature(scrubbers)}|#{activity.id}"
|
||||||
Cachex.fetch!(:scrubber_cache, key, fn _key -> ensure_scrubbed_html(content, scrubbers) end)
|
|
||||||
|
Cachex.fetch!(:scrubber_cache, key, fn _key ->
|
||||||
|
object = Pleroma.Object.normalize(activity)
|
||||||
|
ensure_scrubbed_html(content, scrubbers, object.data["fake"] || false)
|
||||||
|
end)
|
||||||
end
|
end
|
||||||
|
|
||||||
def get_cached_stripped_html_for_object(content, object, module) do
|
def get_cached_stripped_html_for_activity(content, activity, key) do
|
||||||
get_cached_scrubbed_html_for_object(
|
get_cached_scrubbed_html_for_activity(
|
||||||
content,
|
content,
|
||||||
HtmlSanitizeEx.Scrubber.StripTags,
|
HtmlSanitizeEx.Scrubber.StripTags,
|
||||||
object,
|
activity,
|
||||||
module
|
key
|
||||||
)
|
)
|
||||||
end
|
end
|
||||||
|
|
||||||
def ensure_scrubbed_html(
|
def ensure_scrubbed_html(
|
||||||
content,
|
content,
|
||||||
scrubbers
|
scrubbers,
|
||||||
|
false = _fake
|
||||||
) do
|
) do
|
||||||
{:commit, filter_tags(content, scrubbers)}
|
{:commit, filter_tags(content, scrubbers)}
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def ensure_scrubbed_html(
|
||||||
|
content,
|
||||||
|
scrubbers,
|
||||||
|
true = _fake
|
||||||
|
) do
|
||||||
|
{:ignore, filter_tags(content, scrubbers)}
|
||||||
|
end
|
||||||
|
|
||||||
defp generate_scrubber_signature(scrubber) when is_atom(scrubber) do
|
defp generate_scrubber_signature(scrubber) when is_atom(scrubber) do
|
||||||
generate_scrubber_signature([scrubber])
|
generate_scrubber_signature([scrubber])
|
||||||
end
|
end
|
||||||
|
@ -93,14 +106,31 @@ defmodule Pleroma.HTML.Scrubber.TwitterText do
|
||||||
|
|
||||||
# links
|
# links
|
||||||
Meta.allow_tag_with_uri_attributes("a", ["href", "data-user", "data-tag"], @valid_schemes)
|
Meta.allow_tag_with_uri_attributes("a", ["href", "data-user", "data-tag"], @valid_schemes)
|
||||||
Meta.allow_tag_with_these_attributes("a", ["name", "title", "class"])
|
|
||||||
|
Meta.allow_tag_with_this_attribute_values("a", "class", [
|
||||||
|
"hashtag",
|
||||||
|
"u-url",
|
||||||
|
"mention",
|
||||||
|
"u-url mention",
|
||||||
|
"mention u-url"
|
||||||
|
])
|
||||||
|
|
||||||
|
Meta.allow_tag_with_this_attribute_values("a", "rel", [
|
||||||
|
"tag",
|
||||||
|
"nofollow",
|
||||||
|
"noopener",
|
||||||
|
"noreferrer"
|
||||||
|
])
|
||||||
|
|
||||||
|
Meta.allow_tag_with_these_attributes("a", ["name", "title"])
|
||||||
|
|
||||||
# paragraphs and linebreaks
|
# paragraphs and linebreaks
|
||||||
Meta.allow_tag_with_these_attributes("br", [])
|
Meta.allow_tag_with_these_attributes("br", [])
|
||||||
Meta.allow_tag_with_these_attributes("p", [])
|
Meta.allow_tag_with_these_attributes("p", [])
|
||||||
|
|
||||||
# microformats
|
# microformats
|
||||||
Meta.allow_tag_with_these_attributes("span", ["class"])
|
Meta.allow_tag_with_this_attribute_values("span", "class", ["h-card"])
|
||||||
|
Meta.allow_tag_with_these_attributes("span", [])
|
||||||
|
|
||||||
# allow inline images for custom emoji
|
# allow inline images for custom emoji
|
||||||
@allow_inline_images Keyword.get(@markup, :allow_inline_images)
|
@allow_inline_images Keyword.get(@markup, :allow_inline_images)
|
||||||
|
@ -135,7 +165,23 @@ defmodule Pleroma.HTML.Scrubber.Default do
|
||||||
Meta.strip_comments()
|
Meta.strip_comments()
|
||||||
|
|
||||||
Meta.allow_tag_with_uri_attributes("a", ["href", "data-user", "data-tag"], @valid_schemes)
|
Meta.allow_tag_with_uri_attributes("a", ["href", "data-user", "data-tag"], @valid_schemes)
|
||||||
Meta.allow_tag_with_these_attributes("a", ["name", "title", "class"])
|
|
||||||
|
Meta.allow_tag_with_this_attribute_values("a", "class", [
|
||||||
|
"hashtag",
|
||||||
|
"u-url",
|
||||||
|
"mention",
|
||||||
|
"u-url mention",
|
||||||
|
"mention u-url"
|
||||||
|
])
|
||||||
|
|
||||||
|
Meta.allow_tag_with_this_attribute_values("a", "rel", [
|
||||||
|
"tag",
|
||||||
|
"nofollow",
|
||||||
|
"noopener",
|
||||||
|
"noreferrer"
|
||||||
|
])
|
||||||
|
|
||||||
|
Meta.allow_tag_with_these_attributes("a", ["name", "title"])
|
||||||
|
|
||||||
Meta.allow_tag_with_these_attributes("abbr", ["title"])
|
Meta.allow_tag_with_these_attributes("abbr", ["title"])
|
||||||
|
|
||||||
|
@ -150,11 +196,13 @@ defmodule Pleroma.HTML.Scrubber.Default do
|
||||||
Meta.allow_tag_with_these_attributes("ol", [])
|
Meta.allow_tag_with_these_attributes("ol", [])
|
||||||
Meta.allow_tag_with_these_attributes("p", [])
|
Meta.allow_tag_with_these_attributes("p", [])
|
||||||
Meta.allow_tag_with_these_attributes("pre", [])
|
Meta.allow_tag_with_these_attributes("pre", [])
|
||||||
Meta.allow_tag_with_these_attributes("span", ["class"])
|
|
||||||
Meta.allow_tag_with_these_attributes("strong", [])
|
Meta.allow_tag_with_these_attributes("strong", [])
|
||||||
Meta.allow_tag_with_these_attributes("u", [])
|
Meta.allow_tag_with_these_attributes("u", [])
|
||||||
Meta.allow_tag_with_these_attributes("ul", [])
|
Meta.allow_tag_with_these_attributes("ul", [])
|
||||||
|
|
||||||
|
Meta.allow_tag_with_this_attribute_values("span", "class", ["h-card"])
|
||||||
|
Meta.allow_tag_with_these_attributes("span", [])
|
||||||
|
|
||||||
@allow_inline_images Keyword.get(@markup, :allow_inline_images)
|
@allow_inline_images Keyword.get(@markup, :allow_inline_images)
|
||||||
|
|
||||||
if @allow_inline_images do
|
if @allow_inline_images do
|
||||||
|
|
|
@ -12,7 +12,7 @@ defmodule Pleroma.Instances.Instance do
|
||||||
|
|
||||||
schema "instances" do
|
schema "instances" do
|
||||||
field(:host, :string)
|
field(:host, :string)
|
||||||
field(:unreachable_since, :naive_datetime)
|
field(:unreachable_since, :naive_datetime_usec)
|
||||||
|
|
||||||
timestamps()
|
timestamps()
|
||||||
end
|
end
|
||||||
|
|
|
@ -1,152 +0,0 @@
|
||||||
# Pleroma: A lightweight social networking server
|
|
||||||
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
|
||||||
# SPDX-License-Identifier: AGPL-3.0-only
|
|
||||||
|
|
||||||
defmodule Pleroma.Jobs do
|
|
||||||
@moduledoc """
|
|
||||||
A basic job queue
|
|
||||||
"""
|
|
||||||
use GenServer
|
|
||||||
|
|
||||||
require Logger
|
|
||||||
|
|
||||||
def init(args) do
|
|
||||||
{:ok, args}
|
|
||||||
end
|
|
||||||
|
|
||||||
def start_link do
|
|
||||||
queues =
|
|
||||||
Pleroma.Config.get(Pleroma.Jobs)
|
|
||||||
|> Enum.map(fn {name, _} -> create_queue(name) end)
|
|
||||||
|> Enum.into(%{})
|
|
||||||
|
|
||||||
state = %{
|
|
||||||
queues: queues,
|
|
||||||
refs: %{}
|
|
||||||
}
|
|
||||||
|
|
||||||
GenServer.start_link(__MODULE__, state, name: __MODULE__)
|
|
||||||
end
|
|
||||||
|
|
||||||
def create_queue(name) do
|
|
||||||
{name, {:sets.new(), []}}
|
|
||||||
end
|
|
||||||
|
|
||||||
@doc """
|
|
||||||
Enqueues a job.
|
|
||||||
|
|
||||||
Returns `:ok`.
|
|
||||||
|
|
||||||
## Arguments
|
|
||||||
|
|
||||||
- `queue_name` - a queue name(must be specified in the config).
|
|
||||||
- `mod` - a worker module (must have `perform` function).
|
|
||||||
- `args` - a list of arguments for the `perform` function of the worker module.
|
|
||||||
- `priority` - a job priority (`0` by default).
|
|
||||||
|
|
||||||
## Examples
|
|
||||||
|
|
||||||
Enqueue `Module.perform/0` with `priority=1`:
|
|
||||||
|
|
||||||
iex> Pleroma.Jobs.enqueue(:example_queue, Module, [])
|
|
||||||
:ok
|
|
||||||
|
|
||||||
Enqueue `Module.perform(:job_name)` with `priority=5`:
|
|
||||||
|
|
||||||
iex> Pleroma.Jobs.enqueue(:example_queue, Module, [:job_name], 5)
|
|
||||||
:ok
|
|
||||||
|
|
||||||
Enqueue `Module.perform(:another_job, data)` with `priority=1`:
|
|
||||||
|
|
||||||
iex> data = "foobar"
|
|
||||||
iex> Pleroma.Jobs.enqueue(:example_queue, Module, [:another_job, data])
|
|
||||||
:ok
|
|
||||||
|
|
||||||
Enqueue `Module.perform(:foobar_job, :foo, :bar, 42)` with `priority=1`:
|
|
||||||
|
|
||||||
iex> Pleroma.Jobs.enqueue(:example_queue, Module, [:foobar_job, :foo, :bar, 42])
|
|
||||||
:ok
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
def enqueue(queue_name, mod, args, priority \\ 1)
|
|
||||||
|
|
||||||
if Mix.env() == :test do
|
|
||||||
def enqueue(_queue_name, mod, args, _priority) do
|
|
||||||
apply(mod, :perform, args)
|
|
||||||
end
|
|
||||||
else
|
|
||||||
@spec enqueue(atom(), atom(), [any()], integer()) :: :ok
|
|
||||||
def enqueue(queue_name, mod, args, priority) do
|
|
||||||
GenServer.cast(__MODULE__, {:enqueue, queue_name, mod, args, priority})
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
def handle_cast({:enqueue, queue_name, mod, args, priority}, state) do
|
|
||||||
{running_jobs, queue} = state[:queues][queue_name]
|
|
||||||
|
|
||||||
queue = enqueue_sorted(queue, {mod, args}, priority)
|
|
||||||
|
|
||||||
state =
|
|
||||||
state
|
|
||||||
|> update_queue(queue_name, {running_jobs, queue})
|
|
||||||
|> maybe_start_job(queue_name, running_jobs, queue)
|
|
||||||
|
|
||||||
{:noreply, state}
|
|
||||||
end
|
|
||||||
|
|
||||||
def handle_info({:DOWN, ref, :process, _pid, _reason}, state) do
|
|
||||||
queue_name = state.refs[ref]
|
|
||||||
|
|
||||||
{running_jobs, queue} = state[:queues][queue_name]
|
|
||||||
|
|
||||||
running_jobs = :sets.del_element(ref, running_jobs)
|
|
||||||
|
|
||||||
state =
|
|
||||||
state
|
|
||||||
|> remove_ref(ref)
|
|
||||||
|> update_queue(queue_name, {running_jobs, queue})
|
|
||||||
|> maybe_start_job(queue_name, running_jobs, queue)
|
|
||||||
|
|
||||||
{:noreply, state}
|
|
||||||
end
|
|
||||||
|
|
||||||
def maybe_start_job(state, queue_name, running_jobs, queue) do
|
|
||||||
if :sets.size(running_jobs) < Pleroma.Config.get([__MODULE__, queue_name, :max_jobs]) &&
|
|
||||||
queue != [] do
|
|
||||||
{{mod, args}, queue} = queue_pop(queue)
|
|
||||||
{:ok, pid} = Task.start(fn -> apply(mod, :perform, args) end)
|
|
||||||
mref = Process.monitor(pid)
|
|
||||||
|
|
||||||
state
|
|
||||||
|> add_ref(queue_name, mref)
|
|
||||||
|> update_queue(queue_name, {:sets.add_element(mref, running_jobs), queue})
|
|
||||||
else
|
|
||||||
state
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
def enqueue_sorted(queue, element, priority) do
|
|
||||||
[%{item: element, priority: priority} | queue]
|
|
||||||
|> Enum.sort_by(fn %{priority: priority} -> priority end)
|
|
||||||
end
|
|
||||||
|
|
||||||
def queue_pop([%{item: element} | queue]) do
|
|
||||||
{element, queue}
|
|
||||||
end
|
|
||||||
|
|
||||||
defp add_ref(state, queue_name, ref) do
|
|
||||||
refs = Map.put(state[:refs], ref, queue_name)
|
|
||||||
Map.put(state, :refs, refs)
|
|
||||||
end
|
|
||||||
|
|
||||||
defp remove_ref(state, ref) do
|
|
||||||
refs = Map.delete(state[:refs], ref)
|
|
||||||
Map.put(state, :refs, refs)
|
|
||||||
end
|
|
||||||
|
|
||||||
defp update_queue(state, queue_name, data) do
|
|
||||||
queues = Map.put(state[:queues], queue_name, data)
|
|
||||||
Map.put(state, :queues, queues)
|
|
||||||
end
|
|
||||||
end
|
|
|
@ -80,7 +80,7 @@ def get_lists_from_activity(%Activity{actor: ap_id}) do
|
||||||
|
|
||||||
# Get lists to which the account belongs.
|
# Get lists to which the account belongs.
|
||||||
def get_lists_account_belongs(%User{} = owner, account_id) do
|
def get_lists_account_belongs(%User{} = owner, account_id) do
|
||||||
user = Repo.get(User, account_id)
|
user = User.get_cached_by_id(account_id)
|
||||||
|
|
||||||
query =
|
query =
|
||||||
from(
|
from(
|
||||||
|
|
|
@ -7,12 +7,15 @@ defmodule Pleroma.Notification do
|
||||||
|
|
||||||
alias Pleroma.Activity
|
alias Pleroma.Activity
|
||||||
alias Pleroma.Notification
|
alias Pleroma.Notification
|
||||||
|
alias Pleroma.Object
|
||||||
|
alias Pleroma.Pagination
|
||||||
alias Pleroma.Repo
|
alias Pleroma.Repo
|
||||||
alias Pleroma.User
|
alias Pleroma.User
|
||||||
alias Pleroma.Web.CommonAPI
|
alias Pleroma.Web.CommonAPI
|
||||||
alias Pleroma.Web.CommonAPI.Utils
|
alias Pleroma.Web.CommonAPI.Utils
|
||||||
|
|
||||||
import Ecto.Query
|
import Ecto.Query
|
||||||
|
import Ecto.Changeset
|
||||||
|
|
||||||
schema "notifications" do
|
schema "notifications" do
|
||||||
field(:seen, :boolean, default: false)
|
field(:seen, :boolean, default: false)
|
||||||
|
@ -22,36 +25,30 @@ defmodule Pleroma.Notification do
|
||||||
timestamps()
|
timestamps()
|
||||||
end
|
end
|
||||||
|
|
||||||
# TODO: Make generic and unify (see activity_pub.ex)
|
def changeset(%Notification{} = notification, attrs) do
|
||||||
defp restrict_max(query, %{"max_id" => max_id}) do
|
notification
|
||||||
from(activity in query, where: activity.id < ^max_id)
|
|> cast(attrs, [:seen])
|
||||||
end
|
end
|
||||||
|
|
||||||
defp restrict_max(query, _), do: query
|
def for_user_query(user) do
|
||||||
|
Notification
|
||||||
defp restrict_since(query, %{"since_id" => since_id}) do
|
|> where(user_id: ^user.id)
|
||||||
from(activity in query, where: activity.id > ^since_id)
|
|> join(:inner, [n], activity in assoc(n, :activity))
|
||||||
|
|> join(:left, [n, a], object in Object,
|
||||||
|
on:
|
||||||
|
fragment(
|
||||||
|
"(?->>'id') = COALESCE((? -> 'object'::text) ->> 'id'::text)",
|
||||||
|
object.data,
|
||||||
|
a.data
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|> preload([n, a, o], activity: {a, object: o})
|
||||||
end
|
end
|
||||||
|
|
||||||
defp restrict_since(query, _), do: query
|
|
||||||
|
|
||||||
def for_user(user, opts \\ %{}) do
|
def for_user(user, opts \\ %{}) do
|
||||||
query =
|
user
|
||||||
from(
|
|> for_user_query()
|
||||||
n in Notification,
|
|> Pagination.fetch_paginated(opts)
|
||||||
where: n.user_id == ^user.id,
|
|
||||||
order_by: [desc: n.id],
|
|
||||||
join: activity in assoc(n, :activity),
|
|
||||||
preload: [activity: activity],
|
|
||||||
limit: 20
|
|
||||||
)
|
|
||||||
|
|
||||||
query =
|
|
||||||
query
|
|
||||||
|> restrict_since(opts)
|
|
||||||
|> restrict_max(opts)
|
|
||||||
|
|
||||||
Repo.all(query)
|
|
||||||
end
|
end
|
||||||
|
|
||||||
def set_read_up_to(%{id: user_id} = _user, id) do
|
def set_read_up_to(%{id: user_id} = _user, id) do
|
||||||
|
@ -68,6 +65,14 @@ def set_read_up_to(%{id: user_id} = _user, id) do
|
||||||
Repo.update_all(query, [])
|
Repo.update_all(query, [])
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def read_one(%User{} = user, notification_id) do
|
||||||
|
with {:ok, %Notification{} = notification} <- get(user, notification_id) do
|
||||||
|
notification
|
||||||
|
|> changeset(%{seen: true})
|
||||||
|
|> Repo.update()
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
def get(%{id: user_id} = _user, id) do
|
def get(%{id: user_id} = _user, id) do
|
||||||
query =
|
query =
|
||||||
from(
|
from(
|
||||||
|
@ -93,6 +98,14 @@ def clear(user) do
|
||||||
|> Repo.delete_all()
|
|> Repo.delete_all()
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def destroy_multiple(%{id: user_id} = _user, ids) do
|
||||||
|
from(n in Notification,
|
||||||
|
where: n.id in ^ids,
|
||||||
|
where: n.user_id == ^user_id
|
||||||
|
)
|
||||||
|
|> Repo.delete_all()
|
||||||
|
end
|
||||||
|
|
||||||
def dismiss(%{id: user_id} = _user, id) do
|
def dismiss(%{id: user_id} = _user, id) do
|
||||||
notification = Repo.get(Notification, id)
|
notification = Repo.get(Notification, id)
|
||||||
|
|
||||||
|
@ -117,13 +130,7 @@ def create_notifications(_), do: {:ok, []}
|
||||||
|
|
||||||
# TODO move to sql, too.
|
# TODO move to sql, too.
|
||||||
def create_notification(%Activity{} = activity, %User{} = user) do
|
def create_notification(%Activity{} = activity, %User{} = user) do
|
||||||
unless User.blocks?(user, %{ap_id: activity.data["actor"]}) or
|
unless skip?(activity, user) do
|
||||||
CommonAPI.thread_muted?(user, activity) or user.ap_id == activity.data["actor"] or
|
|
||||||
(activity.data["type"] == "Follow" and
|
|
||||||
Enum.any?(Notification.for_user(user), fn notif ->
|
|
||||||
notif.activity.data["type"] == "Follow" and
|
|
||||||
notif.activity.data["actor"] == activity.data["actor"]
|
|
||||||
end)) do
|
|
||||||
notification = %Notification{user_id: user.id, activity: activity}
|
notification = %Notification{user_id: user.id, activity: activity}
|
||||||
{:ok, notification} = Repo.insert(notification)
|
{:ok, notification} = Repo.insert(notification)
|
||||||
Pleroma.Web.Streamer.stream("user", notification)
|
Pleroma.Web.Streamer.stream("user", notification)
|
||||||
|
@ -143,10 +150,65 @@ def get_notified_from_activity(
|
||||||
[]
|
[]
|
||||||
|> Utils.maybe_notify_to_recipients(activity)
|
|> Utils.maybe_notify_to_recipients(activity)
|
||||||
|> Utils.maybe_notify_mentioned_recipients(activity)
|
|> Utils.maybe_notify_mentioned_recipients(activity)
|
||||||
|
|> Utils.maybe_notify_subscribers(activity)
|
||||||
|> Enum.uniq()
|
|> Enum.uniq()
|
||||||
|
|
||||||
User.get_users_from_set(recipients, local_only)
|
User.get_users_from_set(recipients, local_only)
|
||||||
end
|
end
|
||||||
|
|
||||||
def get_notified_from_activity(_, _local_only), do: []
|
def get_notified_from_activity(_, _local_only), do: []
|
||||||
|
|
||||||
|
def skip?(activity, user) do
|
||||||
|
[:self, :blocked, :local, :muted, :followers, :follows, :recently_followed]
|
||||||
|
|> Enum.any?(&skip?(&1, activity, user))
|
||||||
|
end
|
||||||
|
|
||||||
|
def skip?(:self, activity, user) do
|
||||||
|
activity.data["actor"] == user.ap_id
|
||||||
|
end
|
||||||
|
|
||||||
|
def skip?(:blocked, activity, user) do
|
||||||
|
actor = activity.data["actor"]
|
||||||
|
User.blocks?(user, %{ap_id: actor})
|
||||||
|
end
|
||||||
|
|
||||||
|
def skip?(:local, %{local: true}, %{info: %{notification_settings: %{"local" => false}}}),
|
||||||
|
do: true
|
||||||
|
|
||||||
|
def skip?(:local, %{local: false}, %{info: %{notification_settings: %{"remote" => false}}}),
|
||||||
|
do: true
|
||||||
|
|
||||||
|
def skip?(:muted, activity, user) do
|
||||||
|
actor = activity.data["actor"]
|
||||||
|
|
||||||
|
User.mutes?(user, %{ap_id: actor}) or CommonAPI.thread_muted?(user, activity)
|
||||||
|
end
|
||||||
|
|
||||||
|
def skip?(
|
||||||
|
:followers,
|
||||||
|
activity,
|
||||||
|
%{info: %{notification_settings: %{"followers" => false}}} = user
|
||||||
|
) do
|
||||||
|
actor = activity.data["actor"]
|
||||||
|
follower = User.get_cached_by_ap_id(actor)
|
||||||
|
User.following?(follower, user)
|
||||||
|
end
|
||||||
|
|
||||||
|
def skip?(:follows, activity, %{info: %{notification_settings: %{"follows" => false}}} = user) do
|
||||||
|
actor = activity.data["actor"]
|
||||||
|
followed = User.get_cached_by_ap_id(actor)
|
||||||
|
User.following?(user, followed)
|
||||||
|
end
|
||||||
|
|
||||||
|
def skip?(:recently_followed, %{data: %{"type" => "Follow"}} = activity, user) do
|
||||||
|
actor = activity.data["actor"]
|
||||||
|
|
||||||
|
Notification.for_user(user)
|
||||||
|
|> Enum.any?(fn
|
||||||
|
%{activity: %{data: %{"type" => "Follow", "actor" => ^actor}}} -> true
|
||||||
|
_ -> false
|
||||||
|
end)
|
||||||
|
end
|
||||||
|
|
||||||
|
def skip?(_, _, _), do: false
|
||||||
end
|
end
|
||||||
|
|
|
@ -7,6 +7,7 @@ defmodule Pleroma.Object do
|
||||||
|
|
||||||
alias Pleroma.Activity
|
alias Pleroma.Activity
|
||||||
alias Pleroma.Object
|
alias Pleroma.Object
|
||||||
|
alias Pleroma.Object.Fetcher
|
||||||
alias Pleroma.ObjectTombstone
|
alias Pleroma.ObjectTombstone
|
||||||
alias Pleroma.Repo
|
alias Pleroma.Repo
|
||||||
alias Pleroma.User
|
alias Pleroma.User
|
||||||
|
@ -14,6 +15,8 @@ defmodule Pleroma.Object do
|
||||||
import Ecto.Query
|
import Ecto.Query
|
||||||
import Ecto.Changeset
|
import Ecto.Changeset
|
||||||
|
|
||||||
|
require Logger
|
||||||
|
|
||||||
schema "objects" do
|
schema "objects" do
|
||||||
field(:data, :map)
|
field(:data, :map)
|
||||||
|
|
||||||
|
@ -38,9 +41,44 @@ def get_by_ap_id(ap_id) do
|
||||||
Repo.one(from(object in Object, where: fragment("(?)->>'id' = ?", object.data, ^ap_id)))
|
Repo.one(from(object in Object, where: fragment("(?)->>'id' = ?", object.data, ^ap_id)))
|
||||||
end
|
end
|
||||||
|
|
||||||
def normalize(%{"id" => ap_id}), do: normalize(ap_id)
|
def normalize(_, fetch_remote \\ true)
|
||||||
def normalize(ap_id) when is_binary(ap_id), do: get_cached_by_ap_id(ap_id)
|
# If we pass an Activity to Object.normalize(), we can try to use the preloaded object.
|
||||||
def normalize(_), do: nil
|
# Use this whenever possible, especially when walking graphs in an O(N) loop!
|
||||||
|
def normalize(%Object{} = object, _), do: object
|
||||||
|
def normalize(%Activity{object: %Object{} = object}, _), do: object
|
||||||
|
|
||||||
|
# A hack for fake activities
|
||||||
|
def normalize(%Activity{data: %{"object" => %{"fake" => true} = data}}, _) do
|
||||||
|
%Object{id: "pleroma:fake_object_id", data: data}
|
||||||
|
end
|
||||||
|
|
||||||
|
# Catch and log Object.normalize() calls where the Activity's child object is not
|
||||||
|
# preloaded.
|
||||||
|
def normalize(%Activity{data: %{"object" => %{"id" => ap_id}}}, fetch_remote) do
|
||||||
|
Logger.debug(
|
||||||
|
"Object.normalize() called without preloaded object (#{ap_id}). Consider preloading the object!"
|
||||||
|
)
|
||||||
|
|
||||||
|
Logger.debug("Backtrace: #{inspect(Process.info(:erlang.self(), :current_stacktrace))}")
|
||||||
|
|
||||||
|
normalize(ap_id, fetch_remote)
|
||||||
|
end
|
||||||
|
|
||||||
|
def normalize(%Activity{data: %{"object" => ap_id}}, fetch_remote) do
|
||||||
|
Logger.debug(
|
||||||
|
"Object.normalize() called without preloaded object (#{ap_id}). Consider preloading the object!"
|
||||||
|
)
|
||||||
|
|
||||||
|
Logger.debug("Backtrace: #{inspect(Process.info(:erlang.self(), :current_stacktrace))}")
|
||||||
|
|
||||||
|
normalize(ap_id, fetch_remote)
|
||||||
|
end
|
||||||
|
|
||||||
|
# Old way, try fetching the object through cache.
|
||||||
|
def normalize(%{"id" => ap_id}, fetch_remote), do: normalize(ap_id, fetch_remote)
|
||||||
|
def normalize(ap_id, false) when is_binary(ap_id), do: get_cached_by_ap_id(ap_id)
|
||||||
|
def normalize(ap_id, true) when is_binary(ap_id), do: Fetcher.fetch_object_from_id!(ap_id)
|
||||||
|
def normalize(_, _), do: nil
|
||||||
|
|
||||||
# Owned objects can only be mutated by their owner
|
# Owned objects can only be mutated by their owner
|
||||||
def authorize_mutation(%Object{data: %{"actor" => actor}}, %User{ap_id: ap_id}),
|
def authorize_mutation(%Object{data: %{"actor" => actor}}, %User{ap_id: ap_id}),
|
||||||
|
@ -104,4 +142,50 @@ def update_and_set_cache(changeset) do
|
||||||
e -> e
|
e -> e
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def increase_replies_count(ap_id) do
|
||||||
|
Object
|
||||||
|
|> where([o], fragment("?->>'id' = ?::text", o.data, ^to_string(ap_id)))
|
||||||
|
|> update([o],
|
||||||
|
set: [
|
||||||
|
data:
|
||||||
|
fragment(
|
||||||
|
"""
|
||||||
|
jsonb_set(?, '{repliesCount}',
|
||||||
|
(coalesce((?->>'repliesCount')::int, 0) + 1)::varchar::jsonb, true)
|
||||||
|
""",
|
||||||
|
o.data,
|
||||||
|
o.data
|
||||||
|
)
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|> Repo.update_all([])
|
||||||
|
|> case do
|
||||||
|
{1, [object]} -> set_cache(object)
|
||||||
|
_ -> {:error, "Not found"}
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def decrease_replies_count(ap_id) do
|
||||||
|
Object
|
||||||
|
|> where([o], fragment("?->>'id' = ?::text", o.data, ^to_string(ap_id)))
|
||||||
|
|> update([o],
|
||||||
|
set: [
|
||||||
|
data:
|
||||||
|
fragment(
|
||||||
|
"""
|
||||||
|
jsonb_set(?, '{repliesCount}',
|
||||||
|
(greatest(0, (?->>'repliesCount')::int - 1))::varchar::jsonb, true)
|
||||||
|
""",
|
||||||
|
o.data,
|
||||||
|
o.data
|
||||||
|
)
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|> Repo.update_all([])
|
||||||
|
|> case do
|
||||||
|
{1, [object]} -> set_cache(object)
|
||||||
|
_ -> {:error, "Not found"}
|
||||||
|
end
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
61
lib/pleroma/object/containment.ex
Normal file
61
lib/pleroma/object/containment.ex
Normal file
|
@ -0,0 +1,61 @@
|
||||||
|
defmodule Pleroma.Object.Containment do
|
||||||
|
@moduledoc """
|
||||||
|
# Object Containment
|
||||||
|
|
||||||
|
This module contains some useful functions for containing objects to specific
|
||||||
|
origins and determining those origins. They previously lived in the
|
||||||
|
ActivityPub `Transmogrifier` module.
|
||||||
|
|
||||||
|
Object containment is an important step in validating remote objects to prevent
|
||||||
|
spoofing, therefore removal of object containment functions is NOT recommended.
|
||||||
|
"""
|
||||||
|
def get_actor(%{"actor" => actor}) when is_binary(actor) do
|
||||||
|
actor
|
||||||
|
end
|
||||||
|
|
||||||
|
def get_actor(%{"actor" => actor}) when is_list(actor) do
|
||||||
|
if is_binary(Enum.at(actor, 0)) do
|
||||||
|
Enum.at(actor, 0)
|
||||||
|
else
|
||||||
|
Enum.find(actor, fn %{"type" => type} -> type in ["Person", "Service", "Application"] end)
|
||||||
|
|> Map.get("id")
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def get_actor(%{"actor" => %{"id" => id}}) when is_bitstring(id) do
|
||||||
|
id
|
||||||
|
end
|
||||||
|
|
||||||
|
def get_actor(%{"actor" => nil, "attributedTo" => actor}) when not is_nil(actor) do
|
||||||
|
get_actor(%{"actor" => actor})
|
||||||
|
end
|
||||||
|
|
||||||
|
@doc """
|
||||||
|
Checks that an imported AP object's actor matches the domain it came from.
|
||||||
|
"""
|
||||||
|
def contain_origin(_id, %{"actor" => nil}), do: :error
|
||||||
|
|
||||||
|
def contain_origin(id, %{"actor" => _actor} = params) do
|
||||||
|
id_uri = URI.parse(id)
|
||||||
|
actor_uri = URI.parse(get_actor(params))
|
||||||
|
|
||||||
|
if id_uri.host == actor_uri.host do
|
||||||
|
:ok
|
||||||
|
else
|
||||||
|
:error
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def contain_origin_from_id(_id, %{"id" => nil}), do: :error
|
||||||
|
|
||||||
|
def contain_origin_from_id(id, %{"id" => other_id} = _params) do
|
||||||
|
id_uri = URI.parse(id)
|
||||||
|
other_uri = URI.parse(other_id)
|
||||||
|
|
||||||
|
if id_uri.host == other_uri.host do
|
||||||
|
:ok
|
||||||
|
else
|
||||||
|
:error
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
75
lib/pleroma/object/fetcher.ex
Normal file
75
lib/pleroma/object/fetcher.ex
Normal file
|
@ -0,0 +1,75 @@
|
||||||
|
defmodule Pleroma.Object.Fetcher do
|
||||||
|
alias Pleroma.Object
|
||||||
|
alias Pleroma.Object.Containment
|
||||||
|
alias Pleroma.Web.ActivityPub.Transmogrifier
|
||||||
|
alias Pleroma.Web.OStatus
|
||||||
|
|
||||||
|
require Logger
|
||||||
|
|
||||||
|
@httpoison Application.get_env(:pleroma, :httpoison)
|
||||||
|
|
||||||
|
# TODO:
|
||||||
|
# This will create a Create activity, which we need internally at the moment.
|
||||||
|
def fetch_object_from_id(id) do
|
||||||
|
if object = Object.get_cached_by_ap_id(id) do
|
||||||
|
{:ok, object}
|
||||||
|
else
|
||||||
|
Logger.info("Fetching #{id} via AP")
|
||||||
|
|
||||||
|
with {:ok, data} <- fetch_and_contain_remote_object_from_id(id),
|
||||||
|
nil <- Object.normalize(data, false),
|
||||||
|
params <- %{
|
||||||
|
"type" => "Create",
|
||||||
|
"to" => data["to"],
|
||||||
|
"cc" => data["cc"],
|
||||||
|
"actor" => data["actor"] || data["attributedTo"],
|
||||||
|
"object" => data
|
||||||
|
},
|
||||||
|
:ok <- Containment.contain_origin(id, params),
|
||||||
|
{:ok, activity} <- Transmogrifier.handle_incoming(params) do
|
||||||
|
{:ok, Object.normalize(activity, false)}
|
||||||
|
else
|
||||||
|
{:error, {:reject, nil}} ->
|
||||||
|
{:reject, nil}
|
||||||
|
|
||||||
|
object = %Object{} ->
|
||||||
|
{:ok, object}
|
||||||
|
|
||||||
|
_e ->
|
||||||
|
Logger.info("Couldn't get object via AP, trying out OStatus fetching...")
|
||||||
|
|
||||||
|
case OStatus.fetch_activity_from_url(id) do
|
||||||
|
{:ok, [activity | _]} -> {:ok, Object.normalize(activity, false)}
|
||||||
|
e -> e
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def fetch_object_from_id!(id) do
|
||||||
|
with {:ok, object} <- fetch_object_from_id(id) do
|
||||||
|
object
|
||||||
|
else
|
||||||
|
_e ->
|
||||||
|
nil
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def fetch_and_contain_remote_object_from_id(id) do
|
||||||
|
Logger.info("Fetching object #{id} via AP")
|
||||||
|
|
||||||
|
with true <- String.starts_with?(id, "http"),
|
||||||
|
{:ok, %{body: body, status: code}} when code in 200..299 <-
|
||||||
|
@httpoison.get(
|
||||||
|
id,
|
||||||
|
[{:Accept, "application/activity+json"}]
|
||||||
|
),
|
||||||
|
{:ok, data} <- Jason.decode(body),
|
||||||
|
:ok <- Containment.contain_origin_from_id(id, data) do
|
||||||
|
{:ok, data}
|
||||||
|
else
|
||||||
|
e ->
|
||||||
|
{:error, e}
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
84
lib/pleroma/pagination.ex
Normal file
84
lib/pleroma/pagination.ex
Normal file
|
@ -0,0 +1,84 @@
|
||||||
|
defmodule Pleroma.Pagination do
|
||||||
|
@moduledoc """
|
||||||
|
Implements Mastodon-compatible pagination.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import Ecto.Query
|
||||||
|
import Ecto.Changeset
|
||||||
|
|
||||||
|
alias Pleroma.Repo
|
||||||
|
|
||||||
|
@default_limit 20
|
||||||
|
|
||||||
|
def fetch_paginated(query, params) do
|
||||||
|
options = cast_params(params)
|
||||||
|
|
||||||
|
query
|
||||||
|
|> paginate(options)
|
||||||
|
|> Repo.all()
|
||||||
|
|> enforce_order(options)
|
||||||
|
end
|
||||||
|
|
||||||
|
def paginate(query, options) do
|
||||||
|
query
|
||||||
|
|> restrict(:min_id, options)
|
||||||
|
|> restrict(:since_id, options)
|
||||||
|
|> restrict(:max_id, options)
|
||||||
|
|> restrict(:order, options)
|
||||||
|
|> restrict(:limit, options)
|
||||||
|
end
|
||||||
|
|
||||||
|
defp cast_params(params) do
|
||||||
|
param_types = %{
|
||||||
|
min_id: :string,
|
||||||
|
since_id: :string,
|
||||||
|
max_id: :string,
|
||||||
|
limit: :integer
|
||||||
|
}
|
||||||
|
|
||||||
|
params =
|
||||||
|
Enum.reduce(params, %{}, fn
|
||||||
|
{key, _value}, acc when is_atom(key) -> Map.drop(acc, [key])
|
||||||
|
{key, value}, acc -> Map.put(acc, key, value)
|
||||||
|
end)
|
||||||
|
|
||||||
|
changeset = cast({%{}, param_types}, params, Map.keys(param_types))
|
||||||
|
changeset.changes
|
||||||
|
end
|
||||||
|
|
||||||
|
defp restrict(query, :min_id, %{min_id: min_id}) do
|
||||||
|
where(query, [q], q.id > ^min_id)
|
||||||
|
end
|
||||||
|
|
||||||
|
defp restrict(query, :since_id, %{since_id: since_id}) do
|
||||||
|
where(query, [q], q.id > ^since_id)
|
||||||
|
end
|
||||||
|
|
||||||
|
defp restrict(query, :max_id, %{max_id: max_id}) do
|
||||||
|
where(query, [q], q.id < ^max_id)
|
||||||
|
end
|
||||||
|
|
||||||
|
defp restrict(query, :order, %{min_id: _}) do
|
||||||
|
order_by(query, [u], fragment("? asc nulls last", u.id))
|
||||||
|
end
|
||||||
|
|
||||||
|
defp restrict(query, :order, _options) do
|
||||||
|
order_by(query, [u], fragment("? desc nulls last", u.id))
|
||||||
|
end
|
||||||
|
|
||||||
|
defp restrict(query, :limit, options) do
|
||||||
|
limit = Map.get(options, :limit, @default_limit)
|
||||||
|
|
||||||
|
query
|
||||||
|
|> limit(^limit)
|
||||||
|
end
|
||||||
|
|
||||||
|
defp restrict(query, _, _), do: query
|
||||||
|
|
||||||
|
defp enforce_order(result, %{min_id: _}) do
|
||||||
|
result
|
||||||
|
|> Enum.reverse()
|
||||||
|
end
|
||||||
|
|
||||||
|
defp enforce_order(result, _), do: result
|
||||||
|
end
|
|
@ -21,7 +21,8 @@ def file_path(path) do
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
@only ~w(index.html static emoji packs sounds images instance favicon.png sw.js sw-pleroma.js)
|
@only ~w(index.html robots.txt static emoji packs sounds images instance favicon.png sw.js
|
||||||
|
sw-pleroma.js)
|
||||||
|
|
||||||
def init(opts) do
|
def init(opts) do
|
||||||
opts
|
opts
|
||||||
|
|
|
@ -24,6 +24,18 @@ def init(_opts) do
|
||||||
end
|
end
|
||||||
|
|
||||||
def call(%{request_path: <<"/", @path, "/", file::binary>>} = conn, opts) do
|
def call(%{request_path: <<"/", @path, "/", file::binary>>} = conn, opts) do
|
||||||
|
conn =
|
||||||
|
case fetch_query_params(conn) do
|
||||||
|
%{query_params: %{"name" => name}} = conn ->
|
||||||
|
name = String.replace(name, "\"", "\\\"")
|
||||||
|
|
||||||
|
conn
|
||||||
|
|> put_resp_header("content-disposition", "filename=\"#{name}\"")
|
||||||
|
|
||||||
|
conn ->
|
||||||
|
conn
|
||||||
|
end
|
||||||
|
|
||||||
config = Pleroma.Config.get([Pleroma.Upload])
|
config = Pleroma.Config.get([Pleroma.Upload])
|
||||||
|
|
||||||
with uploader <- Keyword.fetch!(config, :uploader),
|
with uploader <- Keyword.fetch!(config, :uploader),
|
||||||
|
|
|
@ -3,9 +3,7 @@
|
||||||
# SPDX-License-Identifier: AGPL-3.0-only
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
defmodule Pleroma.Plugs.UserFetcherPlug do
|
defmodule Pleroma.Plugs.UserFetcherPlug do
|
||||||
alias Pleroma.Repo
|
|
||||||
alias Pleroma.User
|
alias Pleroma.User
|
||||||
|
|
||||||
import Plug.Conn
|
import Plug.Conn
|
||||||
|
|
||||||
def init(options) do
|
def init(options) do
|
||||||
|
@ -14,26 +12,10 @@ def init(options) do
|
||||||
|
|
||||||
def call(conn, _options) do
|
def call(conn, _options) do
|
||||||
with %{auth_credentials: %{username: username}} <- conn.assigns,
|
with %{auth_credentials: %{username: username}} <- conn.assigns,
|
||||||
{:ok, %User{} = user} <- user_fetcher(username) do
|
%User{} = user <- User.get_by_nickname_or_email(username) do
|
||||||
conn
|
assign(conn, :auth_user, user)
|
||||||
|> assign(:auth_user, user)
|
|
||||||
else
|
else
|
||||||
_ -> conn
|
_ -> conn
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
defp user_fetcher(username_or_email) do
|
|
||||||
{
|
|
||||||
:ok,
|
|
||||||
cond do
|
|
||||||
# First, try logging in as if it was a name
|
|
||||||
user = Repo.get_by(User, %{nickname: username_or_email}) ->
|
|
||||||
user
|
|
||||||
|
|
||||||
# If we get nil, we try using it as an email
|
|
||||||
user = Repo.get_by(User, %{email: username_or_email}) ->
|
|
||||||
user
|
|
||||||
end
|
|
||||||
}
|
|
||||||
end
|
|
||||||
end
|
end
|
||||||
|
|
57
lib/pleroma/registration.ex
Normal file
57
lib/pleroma/registration.ex
Normal file
|
@ -0,0 +1,57 @@
|
||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
defmodule Pleroma.Registration do
|
||||||
|
use Ecto.Schema
|
||||||
|
|
||||||
|
import Ecto.Changeset
|
||||||
|
|
||||||
|
alias Pleroma.Registration
|
||||||
|
alias Pleroma.Repo
|
||||||
|
alias Pleroma.User
|
||||||
|
|
||||||
|
@primary_key {:id, Pleroma.FlakeId, autogenerate: true}
|
||||||
|
|
||||||
|
schema "registrations" do
|
||||||
|
belongs_to(:user, User, type: Pleroma.FlakeId)
|
||||||
|
field(:provider, :string)
|
||||||
|
field(:uid, :string)
|
||||||
|
field(:info, :map, default: %{})
|
||||||
|
|
||||||
|
timestamps()
|
||||||
|
end
|
||||||
|
|
||||||
|
def nickname(registration, default \\ nil),
|
||||||
|
do: Map.get(registration.info, "nickname", default)
|
||||||
|
|
||||||
|
def email(registration, default \\ nil),
|
||||||
|
do: Map.get(registration.info, "email", default)
|
||||||
|
|
||||||
|
def name(registration, default \\ nil),
|
||||||
|
do: Map.get(registration.info, "name", default)
|
||||||
|
|
||||||
|
def description(registration, default \\ nil),
|
||||||
|
do: Map.get(registration.info, "description", default)
|
||||||
|
|
||||||
|
def changeset(registration, params \\ %{}) do
|
||||||
|
registration
|
||||||
|
|> cast(params, [:user_id, :provider, :uid, :info])
|
||||||
|
|> validate_required([:provider, :uid])
|
||||||
|
|> foreign_key_constraint(:user_id)
|
||||||
|
|> unique_constraint(:uid, name: :registrations_provider_uid_index)
|
||||||
|
end
|
||||||
|
|
||||||
|
def bind_to_user(registration, user) do
|
||||||
|
registration
|
||||||
|
|> changeset(%{user_id: (user && user.id) || nil})
|
||||||
|
|> Repo.update()
|
||||||
|
end
|
||||||
|
|
||||||
|
def get_by_provider_uid(provider, uid) do
|
||||||
|
Repo.get_by(Registration,
|
||||||
|
provider: to_string(provider),
|
||||||
|
uid: to_string(uid)
|
||||||
|
)
|
||||||
|
end
|
||||||
|
end
|
|
@ -3,7 +3,14 @@
|
||||||
# SPDX-License-Identifier: AGPL-3.0-only
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
defmodule Pleroma.Repo do
|
defmodule Pleroma.Repo do
|
||||||
use Ecto.Repo, otp_app: :pleroma
|
use Ecto.Repo,
|
||||||
|
otp_app: :pleroma,
|
||||||
|
adapter: Ecto.Adapters.Postgres,
|
||||||
|
migration_timestamps: [type: :naive_datetime_usec]
|
||||||
|
|
||||||
|
defmodule Instrumenter do
|
||||||
|
use Prometheus.EctoInstrumenter
|
||||||
|
end
|
||||||
|
|
||||||
@doc """
|
@doc """
|
||||||
Dynamically loads the repository url from the
|
Dynamically loads the repository url from the
|
||||||
|
|
|
@ -311,7 +311,25 @@ defp build_resp_content_disposition_header(headers, opts) do
|
||||||
end
|
end
|
||||||
|
|
||||||
if attachment? do
|
if attachment? do
|
||||||
disposition = "attachment; filename=" <> Keyword.get(opts, :attachment_name, "attachment")
|
name =
|
||||||
|
try do
|
||||||
|
{{"content-disposition", content_disposition_string}, _} =
|
||||||
|
List.keytake(headers, "content-disposition", 0)
|
||||||
|
|
||||||
|
[name | _] =
|
||||||
|
Regex.run(
|
||||||
|
~r/filename="((?:[^"\\]|\\.)*)"/u,
|
||||||
|
content_disposition_string || "",
|
||||||
|
capture: :all_but_first
|
||||||
|
)
|
||||||
|
|
||||||
|
name
|
||||||
|
rescue
|
||||||
|
MatchError -> Keyword.get(opts, :attachment_name, "attachment")
|
||||||
|
end
|
||||||
|
|
||||||
|
disposition = "attachment; filename=\"#{name}\""
|
||||||
|
|
||||||
List.keystore(headers, "content-disposition", 0, {"content-disposition", disposition})
|
List.keystore(headers, "content-disposition", 0, {"content-disposition", disposition})
|
||||||
else
|
else
|
||||||
headers
|
headers
|
||||||
|
|
161
lib/pleroma/scheduled_activity.ex
Normal file
161
lib/pleroma/scheduled_activity.ex
Normal file
|
@ -0,0 +1,161 @@
|
||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
defmodule Pleroma.ScheduledActivity do
|
||||||
|
use Ecto.Schema
|
||||||
|
|
||||||
|
alias Pleroma.Config
|
||||||
|
alias Pleroma.Repo
|
||||||
|
alias Pleroma.ScheduledActivity
|
||||||
|
alias Pleroma.User
|
||||||
|
alias Pleroma.Web.CommonAPI.Utils
|
||||||
|
|
||||||
|
import Ecto.Query
|
||||||
|
import Ecto.Changeset
|
||||||
|
|
||||||
|
@min_offset :timer.minutes(5)
|
||||||
|
|
||||||
|
schema "scheduled_activities" do
|
||||||
|
belongs_to(:user, User, type: Pleroma.FlakeId)
|
||||||
|
field(:scheduled_at, :naive_datetime)
|
||||||
|
field(:params, :map)
|
||||||
|
|
||||||
|
timestamps()
|
||||||
|
end
|
||||||
|
|
||||||
|
def changeset(%ScheduledActivity{} = scheduled_activity, attrs) do
|
||||||
|
scheduled_activity
|
||||||
|
|> cast(attrs, [:scheduled_at, :params])
|
||||||
|
|> validate_required([:scheduled_at, :params])
|
||||||
|
|> validate_scheduled_at()
|
||||||
|
|> with_media_attachments()
|
||||||
|
end
|
||||||
|
|
||||||
|
defp with_media_attachments(
|
||||||
|
%{changes: %{params: %{"media_ids" => media_ids} = params}} = changeset
|
||||||
|
)
|
||||||
|
when is_list(media_ids) do
|
||||||
|
media_attachments = Utils.attachments_from_ids(%{"media_ids" => media_ids})
|
||||||
|
|
||||||
|
params =
|
||||||
|
params
|
||||||
|
|> Map.put("media_attachments", media_attachments)
|
||||||
|
|> Map.put("media_ids", media_ids)
|
||||||
|
|
||||||
|
put_change(changeset, :params, params)
|
||||||
|
end
|
||||||
|
|
||||||
|
defp with_media_attachments(changeset), do: changeset
|
||||||
|
|
||||||
|
def update_changeset(%ScheduledActivity{} = scheduled_activity, attrs) do
|
||||||
|
scheduled_activity
|
||||||
|
|> cast(attrs, [:scheduled_at])
|
||||||
|
|> validate_required([:scheduled_at])
|
||||||
|
|> validate_scheduled_at()
|
||||||
|
end
|
||||||
|
|
||||||
|
def validate_scheduled_at(changeset) do
|
||||||
|
validate_change(changeset, :scheduled_at, fn _, scheduled_at ->
|
||||||
|
cond do
|
||||||
|
not far_enough?(scheduled_at) ->
|
||||||
|
[scheduled_at: "must be at least 5 minutes from now"]
|
||||||
|
|
||||||
|
exceeds_daily_user_limit?(changeset.data.user_id, scheduled_at) ->
|
||||||
|
[scheduled_at: "daily limit exceeded"]
|
||||||
|
|
||||||
|
exceeds_total_user_limit?(changeset.data.user_id) ->
|
||||||
|
[scheduled_at: "total limit exceeded"]
|
||||||
|
|
||||||
|
true ->
|
||||||
|
[]
|
||||||
|
end
|
||||||
|
end)
|
||||||
|
end
|
||||||
|
|
||||||
|
def exceeds_daily_user_limit?(user_id, scheduled_at) do
|
||||||
|
ScheduledActivity
|
||||||
|
|> where(user_id: ^user_id)
|
||||||
|
|> where([sa], type(sa.scheduled_at, :date) == type(^scheduled_at, :date))
|
||||||
|
|> select([sa], count(sa.id))
|
||||||
|
|> Repo.one()
|
||||||
|
|> Kernel.>=(Config.get([ScheduledActivity, :daily_user_limit]))
|
||||||
|
end
|
||||||
|
|
||||||
|
def exceeds_total_user_limit?(user_id) do
|
||||||
|
ScheduledActivity
|
||||||
|
|> where(user_id: ^user_id)
|
||||||
|
|> select([sa], count(sa.id))
|
||||||
|
|> Repo.one()
|
||||||
|
|> Kernel.>=(Config.get([ScheduledActivity, :total_user_limit]))
|
||||||
|
end
|
||||||
|
|
||||||
|
def far_enough?(scheduled_at) when is_binary(scheduled_at) do
|
||||||
|
with {:ok, scheduled_at} <- Ecto.Type.cast(:naive_datetime, scheduled_at) do
|
||||||
|
far_enough?(scheduled_at)
|
||||||
|
else
|
||||||
|
_ -> false
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def far_enough?(scheduled_at) do
|
||||||
|
now = NaiveDateTime.utc_now()
|
||||||
|
diff = NaiveDateTime.diff(scheduled_at, now, :millisecond)
|
||||||
|
diff > @min_offset
|
||||||
|
end
|
||||||
|
|
||||||
|
def new(%User{} = user, attrs) do
|
||||||
|
%ScheduledActivity{user_id: user.id}
|
||||||
|
|> changeset(attrs)
|
||||||
|
end
|
||||||
|
|
||||||
|
def create(%User{} = user, attrs) do
|
||||||
|
user
|
||||||
|
|> new(attrs)
|
||||||
|
|> Repo.insert()
|
||||||
|
end
|
||||||
|
|
||||||
|
def get(%User{} = user, scheduled_activity_id) do
|
||||||
|
ScheduledActivity
|
||||||
|
|> where(user_id: ^user.id)
|
||||||
|
|> where(id: ^scheduled_activity_id)
|
||||||
|
|> Repo.one()
|
||||||
|
end
|
||||||
|
|
||||||
|
def update(%ScheduledActivity{} = scheduled_activity, attrs) do
|
||||||
|
scheduled_activity
|
||||||
|
|> update_changeset(attrs)
|
||||||
|
|> Repo.update()
|
||||||
|
end
|
||||||
|
|
||||||
|
def delete(%ScheduledActivity{} = scheduled_activity) do
|
||||||
|
scheduled_activity
|
||||||
|
|> Repo.delete()
|
||||||
|
end
|
||||||
|
|
||||||
|
def delete(id) when is_binary(id) or is_integer(id) do
|
||||||
|
ScheduledActivity
|
||||||
|
|> where(id: ^id)
|
||||||
|
|> select([sa], sa)
|
||||||
|
|> Repo.delete_all()
|
||||||
|
|> case do
|
||||||
|
{1, [scheduled_activity]} -> {:ok, scheduled_activity}
|
||||||
|
_ -> :error
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def for_user_query(%User{} = user) do
|
||||||
|
ScheduledActivity
|
||||||
|
|> where(user_id: ^user.id)
|
||||||
|
end
|
||||||
|
|
||||||
|
def due_activities(offset \\ 0) do
|
||||||
|
naive_datetime =
|
||||||
|
NaiveDateTime.utc_now()
|
||||||
|
|> NaiveDateTime.add(offset, :millisecond)
|
||||||
|
|
||||||
|
ScheduledActivity
|
||||||
|
|> where([sa], sa.scheduled_at < ^naive_datetime)
|
||||||
|
|> Repo.all()
|
||||||
|
end
|
||||||
|
end
|
58
lib/pleroma/scheduled_activity_worker.ex
Normal file
58
lib/pleroma/scheduled_activity_worker.ex
Normal file
|
@ -0,0 +1,58 @@
|
||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
defmodule Pleroma.ScheduledActivityWorker do
|
||||||
|
@moduledoc """
|
||||||
|
Sends scheduled activities to the job queue.
|
||||||
|
"""
|
||||||
|
|
||||||
|
alias Pleroma.Config
|
||||||
|
alias Pleroma.ScheduledActivity
|
||||||
|
alias Pleroma.User
|
||||||
|
alias Pleroma.Web.CommonAPI
|
||||||
|
use GenServer
|
||||||
|
require Logger
|
||||||
|
|
||||||
|
@schedule_interval :timer.minutes(1)
|
||||||
|
|
||||||
|
def start_link do
|
||||||
|
GenServer.start_link(__MODULE__, nil)
|
||||||
|
end
|
||||||
|
|
||||||
|
def init(_) do
|
||||||
|
if Config.get([ScheduledActivity, :enabled]) do
|
||||||
|
schedule_next()
|
||||||
|
{:ok, nil}
|
||||||
|
else
|
||||||
|
:ignore
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def perform(:execute, scheduled_activity_id) do
|
||||||
|
try do
|
||||||
|
{:ok, scheduled_activity} = ScheduledActivity.delete(scheduled_activity_id)
|
||||||
|
%User{} = user = User.get_cached_by_id(scheduled_activity.user_id)
|
||||||
|
{:ok, _result} = CommonAPI.post(user, scheduled_activity.params)
|
||||||
|
rescue
|
||||||
|
error ->
|
||||||
|
Logger.error(
|
||||||
|
"#{__MODULE__} Couldn't create a status from the scheduled activity: #{inspect(error)}"
|
||||||
|
)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def handle_info(:perform, state) do
|
||||||
|
ScheduledActivity.due_activities(@schedule_interval)
|
||||||
|
|> Enum.each(fn scheduled_activity ->
|
||||||
|
PleromaJobQueue.enqueue(:scheduled_activities, __MODULE__, [:execute, scheduled_activity.id])
|
||||||
|
end)
|
||||||
|
|
||||||
|
schedule_next()
|
||||||
|
{:noreply, state}
|
||||||
|
end
|
||||||
|
|
||||||
|
defp schedule_next do
|
||||||
|
Process.send_after(self(), :perform, @schedule_interval)
|
||||||
|
end
|
||||||
|
end
|
|
@ -70,7 +70,7 @@ def store(upload, opts \\ []) do
|
||||||
%{
|
%{
|
||||||
"type" => "Link",
|
"type" => "Link",
|
||||||
"mediaType" => upload.content_type,
|
"mediaType" => upload.content_type,
|
||||||
"href" => url_from_spec(opts.base_url, url_spec)
|
"href" => url_from_spec(upload, opts.base_url, url_spec)
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"name" => Map.get(opts, :description) || upload.name
|
"name" => Map.get(opts, :description) || upload.name
|
||||||
|
@ -219,14 +219,18 @@ defp tempfile_for_image(data) do
|
||||||
tmp_path
|
tmp_path
|
||||||
end
|
end
|
||||||
|
|
||||||
defp url_from_spec(base_url, {:file, path}) do
|
defp url_from_spec(%__MODULE__{name: name}, base_url, {:file, path}) do
|
||||||
path =
|
path =
|
||||||
path
|
URI.encode(path, &char_unescaped?/1) <>
|
||||||
|> URI.encode(&char_unescaped?/1)
|
if Pleroma.Config.get([__MODULE__, :link_name], false) do
|
||||||
|
"?name=#{URI.encode(name, &char_unescaped?/1)}"
|
||||||
|
else
|
||||||
|
""
|
||||||
|
end
|
||||||
|
|
||||||
[base_url, "media", path]
|
[base_url, "media", path]
|
||||||
|> Path.join()
|
|> Path.join()
|
||||||
end
|
end
|
||||||
|
|
||||||
defp url_from_spec(_base_url, {:url, url}), do: url
|
defp url_from_spec(_upload, _base_url, {:url, url}), do: url
|
||||||
end
|
end
|
||||||
|
|
|
@ -13,10 +13,15 @@ def get_file(file) do
|
||||||
bucket = Keyword.fetch!(config, :bucket)
|
bucket = Keyword.fetch!(config, :bucket)
|
||||||
|
|
||||||
bucket_with_namespace =
|
bucket_with_namespace =
|
||||||
if namespace = Keyword.get(config, :bucket_namespace) do
|
cond do
|
||||||
namespace <> ":" <> bucket
|
truncated_namespace = Keyword.get(config, :truncated_namespace) ->
|
||||||
else
|
truncated_namespace
|
||||||
bucket
|
|
||||||
|
namespace = Keyword.get(config, :bucket_namespace) ->
|
||||||
|
namespace <> ":" <> bucket
|
||||||
|
|
||||||
|
true ->
|
||||||
|
bucket
|
||||||
end
|
end
|
||||||
|
|
||||||
{:ok,
|
{:ok,
|
||||||
|
|
|
@ -10,9 +10,11 @@ defmodule Pleroma.User do
|
||||||
|
|
||||||
alias Comeonin.Pbkdf2
|
alias Comeonin.Pbkdf2
|
||||||
alias Pleroma.Activity
|
alias Pleroma.Activity
|
||||||
|
alias Pleroma.Bookmark
|
||||||
alias Pleroma.Formatter
|
alias Pleroma.Formatter
|
||||||
alias Pleroma.Notification
|
alias Pleroma.Notification
|
||||||
alias Pleroma.Object
|
alias Pleroma.Object
|
||||||
|
alias Pleroma.Registration
|
||||||
alias Pleroma.Repo
|
alias Pleroma.Repo
|
||||||
alias Pleroma.User
|
alias Pleroma.User
|
||||||
alias Pleroma.Web
|
alias Pleroma.Web
|
||||||
|
@ -50,23 +52,21 @@ defmodule Pleroma.User do
|
||||||
field(:local, :boolean, default: true)
|
field(:local, :boolean, default: true)
|
||||||
field(:follower_address, :string)
|
field(:follower_address, :string)
|
||||||
field(:search_rank, :float, virtual: true)
|
field(:search_rank, :float, virtual: true)
|
||||||
|
field(:search_type, :integer, virtual: true)
|
||||||
field(:tags, {:array, :string}, default: [])
|
field(:tags, {:array, :string}, default: [])
|
||||||
field(:bookmarks, {:array, :string}, default: [])
|
field(:last_refreshed_at, :naive_datetime_usec)
|
||||||
field(:last_refreshed_at, :naive_datetime)
|
has_many(:bookmarks, Bookmark)
|
||||||
has_many(:notifications, Notification)
|
has_many(:notifications, Notification)
|
||||||
|
has_many(:registrations, Registration)
|
||||||
embeds_one(:info, Pleroma.User.Info)
|
embeds_one(:info, Pleroma.User.Info)
|
||||||
|
|
||||||
timestamps()
|
timestamps()
|
||||||
end
|
end
|
||||||
|
|
||||||
def auth_active?(%User{local: false}), do: true
|
|
||||||
|
|
||||||
def auth_active?(%User{info: %User.Info{confirmation_pending: false}}), do: true
|
|
||||||
|
|
||||||
def auth_active?(%User{info: %User.Info{confirmation_pending: true}}),
|
def auth_active?(%User{info: %User.Info{confirmation_pending: true}}),
|
||||||
do: !Pleroma.Config.get([:instance, :account_activation_required])
|
do: !Pleroma.Config.get([:instance, :account_activation_required])
|
||||||
|
|
||||||
def auth_active?(_), do: false
|
def auth_active?(%User{}), do: true
|
||||||
|
|
||||||
def visible_for?(user, for_user \\ nil)
|
def visible_for?(user, for_user \\ nil)
|
||||||
|
|
||||||
|
@ -82,17 +82,17 @@ def superuser?(%User{local: true, info: %User.Info{is_admin: true}}), do: true
|
||||||
def superuser?(%User{local: true, info: %User.Info{is_moderator: true}}), do: true
|
def superuser?(%User{local: true, info: %User.Info{is_moderator: true}}), do: true
|
||||||
def superuser?(_), do: false
|
def superuser?(_), do: false
|
||||||
|
|
||||||
def avatar_url(user) do
|
def avatar_url(user, options \\ []) do
|
||||||
case user.avatar do
|
case user.avatar do
|
||||||
%{"url" => [%{"href" => href} | _]} -> href
|
%{"url" => [%{"href" => href} | _]} -> href
|
||||||
_ -> "#{Web.base_url()}/images/avi.png"
|
_ -> !options[:no_default] && "#{Web.base_url()}/images/avi.png"
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def banner_url(user) do
|
def banner_url(user, options \\ []) do
|
||||||
case user.info.banner do
|
case user.info.banner do
|
||||||
%{"url" => [%{"href" => href} | _]} -> href
|
%{"url" => [%{"href" => href} | _]} -> href
|
||||||
_ -> "#{Web.base_url()}/images/banner.png"
|
_ -> !options[:no_default] && "#{Web.base_url()}/images/banner.png"
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -104,9 +104,8 @@ def ap_id(%User{nickname: nickname}) do
|
||||||
"#{Web.base_url()}/users/#{nickname}"
|
"#{Web.base_url()}/users/#{nickname}"
|
||||||
end
|
end
|
||||||
|
|
||||||
def ap_followers(%User{} = user) do
|
def ap_followers(%User{follower_address: fa}) when is_binary(fa), do: fa
|
||||||
"#{ap_id(user)}/followers"
|
def ap_followers(%User{} = user), do: "#{ap_id(user)}/followers"
|
||||||
end
|
|
||||||
|
|
||||||
def user_info(%User{} = user) do
|
def user_info(%User{} = user) do
|
||||||
oneself = if user.local, do: 1, else: 0
|
oneself = if user.local, do: 1, else: 0
|
||||||
|
@ -220,7 +219,7 @@ def register_changeset(struct, params \\ %{}, opts \\ []) do
|
||||||
changeset =
|
changeset =
|
||||||
struct
|
struct
|
||||||
|> cast(params, [:bio, :email, :name, :nickname, :password, :password_confirmation])
|
|> cast(params, [:bio, :email, :name, :nickname, :password, :password_confirmation])
|
||||||
|> validate_required([:email, :name, :nickname, :password, :password_confirmation])
|
|> validate_required([:name, :nickname, :password, :password_confirmation])
|
||||||
|> validate_confirmation(:password)
|
|> validate_confirmation(:password)
|
||||||
|> unique_constraint(:email)
|
|> unique_constraint(:email)
|
||||||
|> unique_constraint(:nickname)
|
|> unique_constraint(:nickname)
|
||||||
|
@ -231,6 +230,13 @@ def register_changeset(struct, params \\ %{}, opts \\ []) do
|
||||||
|> validate_length(:name, min: 1, max: 100)
|
|> validate_length(:name, min: 1, max: 100)
|
||||||
|> put_change(:info, info_change)
|
|> put_change(:info, info_change)
|
||||||
|
|
||||||
|
changeset =
|
||||||
|
if opts[:external] do
|
||||||
|
changeset
|
||||||
|
else
|
||||||
|
validate_required(changeset, [:email])
|
||||||
|
end
|
||||||
|
|
||||||
if changeset.valid? do
|
if changeset.valid? do
|
||||||
hashed = Pbkdf2.hashpwsalt(changeset.changes[:password])
|
hashed = Pbkdf2.hashpwsalt(changeset.changes[:password])
|
||||||
ap_id = User.ap_id(%User{nickname: changeset.changes[:nickname]})
|
ap_id = User.ap_id(%User{nickname: changeset.changes[:nickname]})
|
||||||
|
@ -264,6 +270,7 @@ defp autofollow_users(user) do
|
||||||
def register(%Ecto.Changeset{} = changeset) do
|
def register(%Ecto.Changeset{} = changeset) do
|
||||||
with {:ok, user} <- Repo.insert(changeset),
|
with {:ok, user} <- Repo.insert(changeset),
|
||||||
{:ok, user} <- autofollow_users(user),
|
{:ok, user} <- autofollow_users(user),
|
||||||
|
{:ok, user} <- set_cache(user),
|
||||||
{:ok, _} <- Pleroma.User.WelcomeMessage.post_welcome_message_to_user(user),
|
{:ok, _} <- Pleroma.User.WelcomeMessage.post_welcome_message_to_user(user),
|
||||||
{:ok, _} <- try_send_confirmation_email(user) do
|
{:ok, _} <- try_send_confirmation_email(user) do
|
||||||
{:ok, user}
|
{:ok, user}
|
||||||
|
@ -274,8 +281,10 @@ def try_send_confirmation_email(%User{} = user) do
|
||||||
if user.info.confirmation_pending &&
|
if user.info.confirmation_pending &&
|
||||||
Pleroma.Config.get([:instance, :account_activation_required]) do
|
Pleroma.Config.get([:instance, :account_activation_required]) do
|
||||||
user
|
user
|
||||||
|> Pleroma.UserEmail.account_confirmation_email()
|
|> Pleroma.Emails.UserEmail.account_confirmation_email()
|
||||||
|> Pleroma.Mailer.deliver_async()
|
|> Pleroma.Emails.Mailer.deliver_async()
|
||||||
|
|
||||||
|
{:ok, :enqueued}
|
||||||
else
|
else
|
||||||
{:ok, :noop}
|
{:ok, :noop}
|
||||||
end
|
end
|
||||||
|
@ -335,10 +344,11 @@ def follow_all(follower, followeds) do
|
||||||
^followed_addresses
|
^followed_addresses
|
||||||
)
|
)
|
||||||
]
|
]
|
||||||
]
|
],
|
||||||
|
select: u
|
||||||
)
|
)
|
||||||
|
|
||||||
{1, [follower]} = Repo.update_all(q, [], returning: true)
|
{1, [follower]} = Repo.update_all(q, [])
|
||||||
|
|
||||||
Enum.each(followeds, fn followed ->
|
Enum.each(followeds, fn followed ->
|
||||||
update_follower_count(followed)
|
update_follower_count(followed)
|
||||||
|
@ -368,10 +378,11 @@ def follow(%User{} = follower, %User{info: info} = followed) do
|
||||||
q =
|
q =
|
||||||
from(u in User,
|
from(u in User,
|
||||||
where: u.id == ^follower.id,
|
where: u.id == ^follower.id,
|
||||||
update: [push: [following: ^ap_followers]]
|
update: [push: [following: ^ap_followers]],
|
||||||
|
select: u
|
||||||
)
|
)
|
||||||
|
|
||||||
{1, [follower]} = Repo.update_all(q, [], returning: true)
|
{1, [follower]} = Repo.update_all(q, [])
|
||||||
|
|
||||||
{:ok, _} = update_follower_count(followed)
|
{:ok, _} = update_follower_count(followed)
|
||||||
|
|
||||||
|
@ -386,10 +397,11 @@ def unfollow(%User{} = follower, %User{} = followed) do
|
||||||
q =
|
q =
|
||||||
from(u in User,
|
from(u in User,
|
||||||
where: u.id == ^follower.id,
|
where: u.id == ^follower.id,
|
||||||
update: [pull: [following: ^ap_followers]]
|
update: [pull: [following: ^ap_followers]],
|
||||||
|
select: u
|
||||||
)
|
)
|
||||||
|
|
||||||
{1, [follower]} = Repo.update_all(q, [], returning: true)
|
{1, [follower]} = Repo.update_all(q, [])
|
||||||
|
|
||||||
{:ok, followed} = update_follower_count(followed)
|
{:ok, followed} = update_follower_count(followed)
|
||||||
|
|
||||||
|
@ -443,10 +455,13 @@ def get_by_guessed_nickname(ap_id) do
|
||||||
name = List.last(String.split(ap_id, "/"))
|
name = List.last(String.split(ap_id, "/"))
|
||||||
nickname = "#{name}@#{domain}"
|
nickname = "#{name}@#{domain}"
|
||||||
|
|
||||||
get_by_nickname(nickname)
|
get_cached_by_nickname(nickname)
|
||||||
end
|
end
|
||||||
|
|
||||||
def set_cache(user) do
|
def set_cache({:ok, user}), do: set_cache(user)
|
||||||
|
def set_cache({:error, err}), do: {:error, err}
|
||||||
|
|
||||||
|
def set_cache(%User{} = user) do
|
||||||
Cachex.put(:user_cache, "ap_id:#{user.ap_id}", user)
|
Cachex.put(:user_cache, "ap_id:#{user.ap_id}", user)
|
||||||
Cachex.put(:user_cache, "nickname:#{user.nickname}", user)
|
Cachex.put(:user_cache, "nickname:#{user.nickname}", user)
|
||||||
Cachex.put(:user_cache, "user_info:#{user.id}", user_info(user))
|
Cachex.put(:user_cache, "user_info:#{user.id}", user_info(user))
|
||||||
|
@ -514,11 +529,10 @@ def get_by_nickname(nickname) do
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def get_by_email(email), do: Repo.get_by(User, email: email)
|
||||||
|
|
||||||
def get_by_nickname_or_email(nickname_or_email) do
|
def get_by_nickname_or_email(nickname_or_email) do
|
||||||
case user = Repo.get_by(User, nickname: nickname_or_email) do
|
get_by_nickname(nickname_or_email) || get_by_email(nickname_or_email)
|
||||||
%User{} -> user
|
|
||||||
nil -> Repo.get_by(User, email: nickname_or_email)
|
|
||||||
end
|
|
||||||
end
|
end
|
||||||
|
|
||||||
def get_cached_user_info(user) do
|
def get_cached_user_info(user) do
|
||||||
|
@ -543,6 +557,7 @@ def get_or_fetch_by_nickname(nickname) do
|
||||||
with [_nick, _domain] <- String.split(nickname, "@"),
|
with [_nick, _domain] <- String.split(nickname, "@"),
|
||||||
{:ok, user} <- fetch_by_nickname(nickname) do
|
{:ok, user} <- fetch_by_nickname(nickname) do
|
||||||
if Pleroma.Config.get([:fetch_initial_posts, :enabled]) do
|
if Pleroma.Config.get([:fetch_initial_posts, :enabled]) do
|
||||||
|
# TODO turn into job
|
||||||
{:ok, _} = Task.start(__MODULE__, :fetch_initial_posts, [user])
|
{:ok, _} = Task.start(__MODULE__, :fetch_initial_posts, [user])
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -645,7 +660,7 @@ def get_follow_requests(%User{} = user) do
|
||||||
users =
|
users =
|
||||||
user
|
user
|
||||||
|> User.get_follow_requests_query()
|
|> User.get_follow_requests_query()
|
||||||
|> join(:inner, [a], u in User, a.actor == u.ap_id)
|
|> join(:inner, [a], u in User, on: a.actor == u.ap_id)
|
||||||
|> where([a, u], not fragment("? @> ?", u.following, ^[user.follower_address]))
|
|> where([a, u], not fragment("? @> ?", u.following, ^[user.follower_address]))
|
||||||
|> group_by([a, u], u.id)
|
|> group_by([a, u], u.id)
|
||||||
|> select([a, u], u)
|
|> select([a, u], u)
|
||||||
|
@ -667,7 +682,8 @@ def increase_note_count(%User{} = user) do
|
||||||
)
|
)
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
|> Repo.update_all([], returning: true)
|
|> select([u], u)
|
||||||
|
|> Repo.update_all([])
|
||||||
|> case do
|
|> case do
|
||||||
{1, [user]} -> set_cache(user)
|
{1, [user]} -> set_cache(user)
|
||||||
_ -> {:error, user}
|
_ -> {:error, user}
|
||||||
|
@ -687,7 +703,8 @@ def decrease_note_count(%User{} = user) do
|
||||||
)
|
)
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
|> Repo.update_all([], returning: true)
|
|> select([u], u)
|
||||||
|
|> Repo.update_all([])
|
||||||
|> case do
|
|> case do
|
||||||
{1, [user]} -> set_cache(user)
|
{1, [user]} -> set_cache(user)
|
||||||
_ -> {:error, user}
|
_ -> {:error, user}
|
||||||
|
@ -733,7 +750,8 @@ def update_follower_count(%User{} = user) do
|
||||||
)
|
)
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
|> Repo.update_all([], returning: true)
|
|> select([u], u)
|
||||||
|
|> Repo.update_all([])
|
||||||
|> case do
|
|> case do
|
||||||
{1, [user]} -> set_cache(user)
|
{1, [user]} -> set_cache(user)
|
||||||
_ -> {:error, user}
|
_ -> {:error, user}
|
||||||
|
@ -774,90 +792,59 @@ def get_recipients_from_activity(%Activity{recipients: to}) do
|
||||||
Repo.all(query)
|
Repo.all(query)
|
||||||
end
|
end
|
||||||
|
|
||||||
@spec search_for_admin(%{
|
|
||||||
local: boolean(),
|
|
||||||
page: number(),
|
|
||||||
page_size: number()
|
|
||||||
}) :: {:ok, [Pleroma.User.t()], number()}
|
|
||||||
def search_for_admin(%{query: nil, local: local, page: page, page_size: page_size}) do
|
|
||||||
query =
|
|
||||||
from(u in User, order_by: u.id)
|
|
||||||
|> maybe_local_user_query(local)
|
|
||||||
|
|
||||||
paginated_query =
|
|
||||||
query
|
|
||||||
|> paginate(page, page_size)
|
|
||||||
|
|
||||||
count =
|
|
||||||
query
|
|
||||||
|> Repo.aggregate(:count, :id)
|
|
||||||
|
|
||||||
{:ok, Repo.all(paginated_query), count}
|
|
||||||
end
|
|
||||||
|
|
||||||
@spec search_for_admin(%{
|
|
||||||
query: binary(),
|
|
||||||
admin: Pleroma.User.t(),
|
|
||||||
local: boolean(),
|
|
||||||
page: number(),
|
|
||||||
page_size: number()
|
|
||||||
}) :: {:ok, [Pleroma.User.t()], number()}
|
|
||||||
def search_for_admin(%{
|
|
||||||
query: term,
|
|
||||||
admin: admin,
|
|
||||||
local: local,
|
|
||||||
page: page,
|
|
||||||
page_size: page_size
|
|
||||||
}) do
|
|
||||||
term = String.trim_leading(term, "@")
|
|
||||||
|
|
||||||
local_paginated_query =
|
|
||||||
User
|
|
||||||
|> maybe_local_user_query(local)
|
|
||||||
|> paginate(page, page_size)
|
|
||||||
|
|
||||||
search_query = fts_search_subquery(term, local_paginated_query)
|
|
||||||
|
|
||||||
count =
|
|
||||||
term
|
|
||||||
|> fts_search_subquery()
|
|
||||||
|> maybe_local_user_query(local)
|
|
||||||
|> Repo.aggregate(:count, :id)
|
|
||||||
|
|
||||||
{:ok, do_search(search_query, admin), count}
|
|
||||||
end
|
|
||||||
|
|
||||||
def search(query, resolve \\ false, for_user \\ nil) do
|
def search(query, resolve \\ false, for_user \\ nil) do
|
||||||
# Strip the beginning @ off if there is a query
|
# Strip the beginning @ off if there is a query
|
||||||
query = String.trim_leading(query, "@")
|
query = String.trim_leading(query, "@")
|
||||||
|
|
||||||
if resolve, do: get_or_fetch(query)
|
if resolve, do: get_or_fetch(query)
|
||||||
|
|
||||||
fts_results = do_search(fts_search_subquery(query), for_user)
|
{:ok, results} =
|
||||||
|
|
||||||
{:ok, trigram_results} =
|
|
||||||
Repo.transaction(fn ->
|
Repo.transaction(fn ->
|
||||||
Ecto.Adapters.SQL.query(Repo, "select set_limit(0.25)", [])
|
Ecto.Adapters.SQL.query(Repo, "select set_limit(0.25)", [])
|
||||||
do_search(trigram_search_subquery(query), for_user)
|
Repo.all(search_query(query, for_user))
|
||||||
end)
|
end)
|
||||||
|
|
||||||
Enum.uniq_by(fts_results ++ trigram_results, & &1.id)
|
results
|
||||||
end
|
end
|
||||||
|
|
||||||
defp do_search(subquery, for_user, options \\ []) do
|
def search_query(query, for_user) do
|
||||||
q =
|
fts_subquery = fts_search_subquery(query)
|
||||||
from(
|
trigram_subquery = trigram_search_subquery(query)
|
||||||
s in subquery(subquery),
|
union_query = from(s in trigram_subquery, union_all: ^fts_subquery)
|
||||||
order_by: [desc: s.search_rank],
|
distinct_query = from(s in subquery(union_query), order_by: s.search_type, distinct: s.id)
|
||||||
limit: ^(options[:limit] || 20)
|
|
||||||
)
|
|
||||||
|
|
||||||
results =
|
from(s in subquery(boost_search_rank_query(distinct_query, for_user)),
|
||||||
q
|
order_by: [desc: s.search_rank],
|
||||||
|> Repo.all()
|
limit: 20
|
||||||
|> Enum.filter(&(&1.search_rank > 0))
|
)
|
||||||
|
end
|
||||||
|
|
||||||
boost_search_results(results, for_user)
|
defp boost_search_rank_query(query, nil), do: query
|
||||||
|
|
||||||
|
defp boost_search_rank_query(query, for_user) do
|
||||||
|
friends_ids = get_friends_ids(for_user)
|
||||||
|
followers_ids = get_followers_ids(for_user)
|
||||||
|
|
||||||
|
from(u in subquery(query),
|
||||||
|
select_merge: %{
|
||||||
|
search_rank:
|
||||||
|
fragment(
|
||||||
|
"""
|
||||||
|
CASE WHEN (?) THEN (?) * 1.3
|
||||||
|
WHEN (?) THEN (?) * 1.2
|
||||||
|
WHEN (?) THEN (?) * 1.1
|
||||||
|
ELSE (?) END
|
||||||
|
""",
|
||||||
|
u.id in ^friends_ids and u.id in ^followers_ids,
|
||||||
|
u.search_rank,
|
||||||
|
u.id in ^friends_ids,
|
||||||
|
u.search_rank,
|
||||||
|
u.id in ^followers_ids,
|
||||||
|
u.search_rank,
|
||||||
|
u.search_rank
|
||||||
|
)
|
||||||
|
}
|
||||||
|
)
|
||||||
end
|
end
|
||||||
|
|
||||||
defp fts_search_subquery(term, query \\ User) do
|
defp fts_search_subquery(term, query \\ User) do
|
||||||
|
@ -872,6 +859,7 @@ defp fts_search_subquery(term, query \\ User) do
|
||||||
from(
|
from(
|
||||||
u in query,
|
u in query,
|
||||||
select_merge: %{
|
select_merge: %{
|
||||||
|
search_type: ^0,
|
||||||
search_rank:
|
search_rank:
|
||||||
fragment(
|
fragment(
|
||||||
"""
|
"""
|
||||||
|
@ -904,6 +892,8 @@ defp trigram_search_subquery(term) do
|
||||||
from(
|
from(
|
||||||
u in User,
|
u in User,
|
||||||
select_merge: %{
|
select_merge: %{
|
||||||
|
# ^1 gives 'Postgrex expected a binary, got 1' for some weird reason
|
||||||
|
search_type: fragment("?", 1),
|
||||||
search_rank:
|
search_rank:
|
||||||
fragment(
|
fragment(
|
||||||
"similarity(?, trim(? || ' ' || coalesce(?, '')))",
|
"similarity(?, trim(? || ' ' || coalesce(?, '')))",
|
||||||
|
@ -916,33 +906,6 @@ defp trigram_search_subquery(term) do
|
||||||
)
|
)
|
||||||
end
|
end
|
||||||
|
|
||||||
defp boost_search_results(results, nil), do: results
|
|
||||||
|
|
||||||
defp boost_search_results(results, for_user) do
|
|
||||||
friends_ids = get_friends_ids(for_user)
|
|
||||||
followers_ids = get_followers_ids(for_user)
|
|
||||||
|
|
||||||
Enum.map(
|
|
||||||
results,
|
|
||||||
fn u ->
|
|
||||||
search_rank_coef =
|
|
||||||
cond do
|
|
||||||
u.id in friends_ids ->
|
|
||||||
1.2
|
|
||||||
|
|
||||||
u.id in followers_ids ->
|
|
||||||
1.1
|
|
||||||
|
|
||||||
true ->
|
|
||||||
1
|
|
||||||
end
|
|
||||||
|
|
||||||
Map.put(u, :search_rank, u.search_rank * search_rank_coef)
|
|
||||||
end
|
|
||||||
)
|
|
||||||
|> Enum.sort_by(&(-&1.search_rank))
|
|
||||||
end
|
|
||||||
|
|
||||||
def blocks_import(%User{} = blocker, blocked_identifiers) when is_list(blocked_identifiers) do
|
def blocks_import(%User{} = blocker, blocked_identifiers) when is_list(blocked_identifiers) do
|
||||||
Enum.map(
|
Enum.map(
|
||||||
blocked_identifiers,
|
blocked_identifiers,
|
||||||
|
@ -984,6 +947,38 @@ def unmute(muter, %{ap_id: ap_id}) do
|
||||||
update_and_set_cache(cng)
|
update_and_set_cache(cng)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def subscribe(subscriber, %{ap_id: ap_id}) do
|
||||||
|
deny_follow_blocked = Pleroma.Config.get([:user, :deny_follow_blocked])
|
||||||
|
|
||||||
|
with %User{} = subscribed <- get_cached_by_ap_id(ap_id) do
|
||||||
|
blocked = blocks?(subscribed, subscriber) and deny_follow_blocked
|
||||||
|
|
||||||
|
if blocked do
|
||||||
|
{:error, "Could not subscribe: #{subscribed.nickname} is blocking you"}
|
||||||
|
else
|
||||||
|
info_cng =
|
||||||
|
subscribed.info
|
||||||
|
|> User.Info.add_to_subscribers(subscriber.ap_id)
|
||||||
|
|
||||||
|
change(subscribed)
|
||||||
|
|> put_embed(:info, info_cng)
|
||||||
|
|> update_and_set_cache()
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def unsubscribe(unsubscriber, %{ap_id: ap_id}) do
|
||||||
|
with %User{} = user <- get_cached_by_ap_id(ap_id) do
|
||||||
|
info_cng =
|
||||||
|
user.info
|
||||||
|
|> User.Info.remove_from_subscribers(unsubscriber.ap_id)
|
||||||
|
|
||||||
|
change(user)
|
||||||
|
|> put_embed(:info, info_cng)
|
||||||
|
|> update_and_set_cache()
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
def block(blocker, %User{ap_id: ap_id} = blocked) do
|
def block(blocker, %User{ap_id: ap_id} = blocked) do
|
||||||
# sever any follow relationships to prevent leaks per activitypub (Pleroma issue #213)
|
# sever any follow relationships to prevent leaks per activitypub (Pleroma issue #213)
|
||||||
blocker =
|
blocker =
|
||||||
|
@ -994,10 +989,20 @@ def block(blocker, %User{ap_id: ap_id} = blocked) do
|
||||||
blocker
|
blocker
|
||||||
end
|
end
|
||||||
|
|
||||||
|
blocker =
|
||||||
|
if subscribed_to?(blocked, blocker) do
|
||||||
|
{:ok, blocker} = unsubscribe(blocked, blocker)
|
||||||
|
blocker
|
||||||
|
else
|
||||||
|
blocker
|
||||||
|
end
|
||||||
|
|
||||||
if following?(blocked, blocker) do
|
if following?(blocked, blocker) do
|
||||||
unfollow(blocked, blocker)
|
unfollow(blocked, blocker)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
{:ok, blocker} = update_follower_count(blocker)
|
||||||
|
|
||||||
info_cng =
|
info_cng =
|
||||||
blocker.info
|
blocker.info
|
||||||
|> User.Info.add_to_block(ap_id)
|
|> User.Info.add_to_block(ap_id)
|
||||||
|
@ -1011,7 +1016,7 @@ def block(blocker, %User{ap_id: ap_id} = blocked) do
|
||||||
|
|
||||||
# helper to handle the block given only an actor's AP id
|
# helper to handle the block given only an actor's AP id
|
||||||
def block(blocker, %{ap_id: ap_id}) do
|
def block(blocker, %{ap_id: ap_id}) do
|
||||||
block(blocker, User.get_by_ap_id(ap_id))
|
block(blocker, get_cached_by_ap_id(ap_id))
|
||||||
end
|
end
|
||||||
|
|
||||||
def unblock(blocker, %{ap_id: ap_id}) do
|
def unblock(blocker, %{ap_id: ap_id}) do
|
||||||
|
@ -1040,12 +1045,21 @@ def blocks?(user, %{ap_id: ap_id}) do
|
||||||
end)
|
end)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def subscribed_to?(user, %{ap_id: ap_id}) do
|
||||||
|
with %User{} = target <- get_cached_by_ap_id(ap_id) do
|
||||||
|
Enum.member?(target.info.subscribers, user.ap_id)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
def muted_users(user),
|
def muted_users(user),
|
||||||
do: Repo.all(from(u in User, where: u.ap_id in ^user.info.mutes))
|
do: Repo.all(from(u in User, where: u.ap_id in ^user.info.mutes))
|
||||||
|
|
||||||
def blocked_users(user),
|
def blocked_users(user),
|
||||||
do: Repo.all(from(u in User, where: u.ap_id in ^user.info.blocks))
|
do: Repo.all(from(u in User, where: u.ap_id in ^user.info.blocks))
|
||||||
|
|
||||||
|
def subscribers(user),
|
||||||
|
do: Repo.all(from(u in User, where: u.ap_id in ^user.info.subscribers))
|
||||||
|
|
||||||
def block_domain(user, domain) do
|
def block_domain(user, domain) do
|
||||||
info_cng =
|
info_cng =
|
||||||
user.info
|
user.info
|
||||||
|
@ -1082,6 +1096,42 @@ def local_user_query(query \\ User) do
|
||||||
)
|
)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def maybe_external_user_query(query, external) do
|
||||||
|
if external, do: external_user_query(query), else: query
|
||||||
|
end
|
||||||
|
|
||||||
|
def external_user_query(query \\ User) do
|
||||||
|
from(
|
||||||
|
u in query,
|
||||||
|
where: u.local == false,
|
||||||
|
where: not is_nil(u.nickname)
|
||||||
|
)
|
||||||
|
end
|
||||||
|
|
||||||
|
def maybe_active_user_query(query, active) do
|
||||||
|
if active, do: active_user_query(query), else: query
|
||||||
|
end
|
||||||
|
|
||||||
|
def active_user_query(query \\ User) do
|
||||||
|
from(
|
||||||
|
u in query,
|
||||||
|
where: fragment("not (?->'deactivated' @> 'true')", u.info),
|
||||||
|
where: not is_nil(u.nickname)
|
||||||
|
)
|
||||||
|
end
|
||||||
|
|
||||||
|
def maybe_deactivated_user_query(query, deactivated) do
|
||||||
|
if deactivated, do: deactivated_user_query(query), else: query
|
||||||
|
end
|
||||||
|
|
||||||
|
def deactivated_user_query(query \\ User) do
|
||||||
|
from(
|
||||||
|
u in query,
|
||||||
|
where: fragment("(?->'deactivated' @> 'true')", u.info),
|
||||||
|
where: not is_nil(u.nickname)
|
||||||
|
)
|
||||||
|
end
|
||||||
|
|
||||||
def active_local_user_query do
|
def active_local_user_query do
|
||||||
from(
|
from(
|
||||||
u in local_user_query(),
|
u in local_user_query(),
|
||||||
|
@ -1107,32 +1157,41 @@ def deactivate(%User{} = user, status \\ true) do
|
||||||
update_and_set_cache(cng)
|
update_and_set_cache(cng)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def update_notification_settings(%User{} = user, settings \\ %{}) do
|
||||||
|
info_changeset = User.Info.update_notification_settings(user.info, settings)
|
||||||
|
|
||||||
|
change(user)
|
||||||
|
|> put_embed(:info, info_changeset)
|
||||||
|
|> update_and_set_cache()
|
||||||
|
end
|
||||||
|
|
||||||
def delete(%User{} = user) do
|
def delete(%User{} = user) do
|
||||||
{:ok, user} = User.deactivate(user)
|
{:ok, user} = User.deactivate(user)
|
||||||
|
|
||||||
# Remove all relationships
|
# Remove all relationships
|
||||||
{:ok, followers} = User.get_followers(user)
|
{:ok, followers} = User.get_followers(user)
|
||||||
|
|
||||||
followers
|
Enum.each(followers, fn follower -> User.unfollow(follower, user) end)
|
||||||
|> Enum.each(fn follower -> User.unfollow(follower, user) end)
|
|
||||||
|
|
||||||
{:ok, friends} = User.get_friends(user)
|
{:ok, friends} = User.get_friends(user)
|
||||||
|
|
||||||
friends
|
Enum.each(friends, fn followed -> User.unfollow(user, followed) end)
|
||||||
|> Enum.each(fn followed -> User.unfollow(user, followed) end)
|
|
||||||
|
|
||||||
query = from(a in Activity, where: a.actor == ^user.ap_id)
|
delete_user_activities(user)
|
||||||
|
end
|
||||||
|
|
||||||
Repo.all(query)
|
def delete_user_activities(%User{ap_id: ap_id} = user) do
|
||||||
|> Enum.each(fn activity ->
|
Activity
|
||||||
case activity.data["type"] do
|
|> where(actor: ^ap_id)
|
||||||
"Create" ->
|
|> Activity.with_preloaded_object()
|
||||||
ActivityPub.delete(Object.normalize(activity.data["object"]))
|
|> Repo.all()
|
||||||
|
|> Enum.each(fn
|
||||||
|
%{data: %{"type" => "Create"}} = activity ->
|
||||||
|
activity |> Object.normalize() |> ActivityPub.delete()
|
||||||
|
|
||||||
# TODO: Do something with likes, follows, repeats.
|
# TODO: Do something with likes, follows, repeats.
|
||||||
_ ->
|
_ ->
|
||||||
"Doing nothing"
|
"Doing nothing"
|
||||||
end
|
|
||||||
end)
|
end)
|
||||||
|
|
||||||
{:ok, user}
|
{:ok, user}
|
||||||
|
@ -1162,13 +1221,16 @@ def fetch_by_ap_id(ap_id) do
|
||||||
end
|
end
|
||||||
|
|
||||||
def get_or_fetch_by_ap_id(ap_id) do
|
def get_or_fetch_by_ap_id(ap_id) do
|
||||||
user = get_by_ap_id(ap_id)
|
user = get_cached_by_ap_id(ap_id)
|
||||||
|
|
||||||
if !is_nil(user) and !User.needs_update?(user) do
|
if !is_nil(user) and !User.needs_update?(user) do
|
||||||
{:ok, user}
|
{:ok, user}
|
||||||
else
|
else
|
||||||
with %User{} = user <- fetch_by_ap_id(ap_id) do
|
# Whether to fetch initial posts for the user (if it's a new user & the fetching is enabled)
|
||||||
if Pleroma.Config.get([:fetch_initial_posts, :enabled]) do
|
should_fetch_initial = is_nil(user) and Pleroma.Config.get([:fetch_initial_posts, :enabled])
|
||||||
|
|
||||||
|
if should_fetch_initial do
|
||||||
|
with {:ok, %User{} = user} = fetch_by_ap_id(ap_id) do
|
||||||
{:ok, _} = Task.start(__MODULE__, :fetch_initial_posts, [user])
|
{:ok, _} = Task.start(__MODULE__, :fetch_initial_posts, [user])
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -1182,7 +1244,7 @@ def get_or_fetch_by_ap_id(ap_id) do
|
||||||
def get_or_create_instance_user do
|
def get_or_create_instance_user do
|
||||||
relay_uri = "#{Pleroma.Web.Endpoint.url()}/relay"
|
relay_uri = "#{Pleroma.Web.Endpoint.url()}/relay"
|
||||||
|
|
||||||
if user = get_by_ap_id(relay_uri) do
|
if user = get_cached_by_ap_id(relay_uri) do
|
||||||
user
|
user
|
||||||
else
|
else
|
||||||
changes =
|
changes =
|
||||||
|
@ -1229,13 +1291,11 @@ defp blank?(""), do: nil
|
||||||
defp blank?(n), do: n
|
defp blank?(n), do: n
|
||||||
|
|
||||||
def insert_or_update_user(data) do
|
def insert_or_update_user(data) do
|
||||||
data =
|
data
|
||||||
data
|
|> Map.put(:name, blank?(data[:name]) || data[:nickname])
|
||||||
|> Map.put(:name, blank?(data[:name]) || data[:nickname])
|
|> remote_user_creation()
|
||||||
|
|> Repo.insert(on_conflict: :replace_all, conflict_target: :nickname)
|
||||||
cs = User.remote_user_creation(data)
|
|> set_cache()
|
||||||
|
|
||||||
Repo.insert(cs, on_conflict: :replace_all, conflict_target: :nickname)
|
|
||||||
end
|
end
|
||||||
|
|
||||||
def ap_enabled?(%User{local: true}), do: true
|
def ap_enabled?(%User{local: true}), do: true
|
||||||
|
@ -1251,8 +1311,8 @@ def get_or_fetch(nickname), do: get_or_fetch_by_nickname(nickname)
|
||||||
# this is because we have synchronous follow APIs and need to simulate them
|
# this is because we have synchronous follow APIs and need to simulate them
|
||||||
# with an async handshake
|
# with an async handshake
|
||||||
def wait_and_refresh(_, %User{local: true} = a, %User{local: true} = b) do
|
def wait_and_refresh(_, %User{local: true} = a, %User{local: true} = b) do
|
||||||
with %User{} = a <- Repo.get(User, a.id),
|
with %User{} = a <- User.get_cached_by_id(a.id),
|
||||||
%User{} = b <- Repo.get(User, b.id) do
|
%User{} = b <- User.get_cached_by_id(b.id) do
|
||||||
{:ok, a, b}
|
{:ok, a, b}
|
||||||
else
|
else
|
||||||
_e ->
|
_e ->
|
||||||
|
@ -1262,8 +1322,8 @@ def wait_and_refresh(_, %User{local: true} = a, %User{local: true} = b) do
|
||||||
|
|
||||||
def wait_and_refresh(timeout, %User{} = a, %User{} = b) do
|
def wait_and_refresh(timeout, %User{} = a, %User{} = b) do
|
||||||
with :ok <- :timer.sleep(timeout),
|
with :ok <- :timer.sleep(timeout),
|
||||||
%User{} = a <- Repo.get(User, a.id),
|
%User{} = a <- User.get_cached_by_id(a.id),
|
||||||
%User{} = b <- Repo.get(User, b.id) do
|
%User{} = b <- User.get_cached_by_id(b.id) do
|
||||||
{:ok, a, b}
|
{:ok, a, b}
|
||||||
else
|
else
|
||||||
_e ->
|
_e ->
|
||||||
|
@ -1302,7 +1362,7 @@ def tag(user_identifiers, tags) when is_list(user_identifiers) do
|
||||||
end
|
end
|
||||||
|
|
||||||
def tag(nickname, tags) when is_binary(nickname),
|
def tag(nickname, tags) when is_binary(nickname),
|
||||||
do: tag(User.get_by_nickname(nickname), tags)
|
do: tag(get_by_nickname(nickname), tags)
|
||||||
|
|
||||||
def tag(%User{} = user, tags),
|
def tag(%User{} = user, tags),
|
||||||
do: update_tags(user, Enum.uniq((user.tags || []) ++ normalize_tags(tags)))
|
do: update_tags(user, Enum.uniq((user.tags || []) ++ normalize_tags(tags)))
|
||||||
|
@ -1314,7 +1374,7 @@ def untag(user_identifiers, tags) when is_list(user_identifiers) do
|
||||||
end
|
end
|
||||||
|
|
||||||
def untag(nickname, tags) when is_binary(nickname),
|
def untag(nickname, tags) when is_binary(nickname),
|
||||||
do: untag(User.get_by_nickname(nickname), tags)
|
do: untag(get_by_nickname(nickname), tags)
|
||||||
|
|
||||||
def untag(%User{} = user, tags),
|
def untag(%User{} = user, tags),
|
||||||
do: update_tags(user, (user.tags || []) -- normalize_tags(tags))
|
do: update_tags(user, (user.tags || []) -- normalize_tags(tags))
|
||||||
|
@ -1328,22 +1388,6 @@ defp update_tags(%User{} = user, new_tags) do
|
||||||
updated_user
|
updated_user
|
||||||
end
|
end
|
||||||
|
|
||||||
def bookmark(%User{} = user, status_id) do
|
|
||||||
bookmarks = Enum.uniq(user.bookmarks ++ [status_id])
|
|
||||||
update_bookmarks(user, bookmarks)
|
|
||||||
end
|
|
||||||
|
|
||||||
def unbookmark(%User{} = user, status_id) do
|
|
||||||
bookmarks = Enum.uniq(user.bookmarks -- [status_id])
|
|
||||||
update_bookmarks(user, bookmarks)
|
|
||||||
end
|
|
||||||
|
|
||||||
def update_bookmarks(%User{} = user, bookmarks) do
|
|
||||||
user
|
|
||||||
|> change(%{bookmarks: bookmarks})
|
|
||||||
|> update_and_set_cache
|
|
||||||
end
|
|
||||||
|
|
||||||
defp normalize_tags(tags) do
|
defp normalize_tags(tags) do
|
||||||
[tags]
|
[tags]
|
||||||
|> List.flatten()
|
|> List.flatten()
|
||||||
|
@ -1393,4 +1437,8 @@ defp paginate(query, page, page_size) do
|
||||||
offset: ^((page - 1) * page_size)
|
offset: ^((page - 1) * page_size)
|
||||||
)
|
)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def showing_reblogs?(%User{} = user, %User{} = target) do
|
||||||
|
target.ap_id not in user.info.muted_reblogs
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -21,6 +21,8 @@ defmodule Pleroma.User.Info do
|
||||||
field(:blocks, {:array, :string}, default: [])
|
field(:blocks, {:array, :string}, default: [])
|
||||||
field(:domain_blocks, {:array, :string}, default: [])
|
field(:domain_blocks, {:array, :string}, default: [])
|
||||||
field(:mutes, {:array, :string}, default: [])
|
field(:mutes, {:array, :string}, default: [])
|
||||||
|
field(:muted_reblogs, {:array, :string}, default: [])
|
||||||
|
field(:subscribers, {:array, :string}, default: [])
|
||||||
field(:deactivated, :boolean, default: false)
|
field(:deactivated, :boolean, default: false)
|
||||||
field(:no_rich_text, :boolean, default: false)
|
field(:no_rich_text, :boolean, default: false)
|
||||||
field(:ap_enabled, :boolean, default: false)
|
field(:ap_enabled, :boolean, default: false)
|
||||||
|
@ -36,9 +38,14 @@ defmodule Pleroma.User.Info do
|
||||||
field(:salmon, :string, default: nil)
|
field(:salmon, :string, default: nil)
|
||||||
field(:hide_followers, :boolean, default: false)
|
field(:hide_followers, :boolean, default: false)
|
||||||
field(:hide_follows, :boolean, default: false)
|
field(:hide_follows, :boolean, default: false)
|
||||||
|
field(:hide_favorites, :boolean, default: true)
|
||||||
field(:pinned_activities, {:array, :string}, default: [])
|
field(:pinned_activities, {:array, :string}, default: [])
|
||||||
field(:flavour, :string, default: nil)
|
field(:flavour, :string, default: nil)
|
||||||
|
|
||||||
|
field(:notification_settings, :map,
|
||||||
|
default: %{"remote" => true, "local" => true, "followers" => true, "follows" => true}
|
||||||
|
)
|
||||||
|
|
||||||
# Found in the wild
|
# Found in the wild
|
||||||
# ap_id -> Where is this used?
|
# ap_id -> Where is this used?
|
||||||
# bio -> Where is this used?
|
# bio -> Where is this used?
|
||||||
|
@ -56,6 +63,19 @@ def set_activation_status(info, deactivated) do
|
||||||
|> validate_required([:deactivated])
|
|> validate_required([:deactivated])
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def update_notification_settings(info, settings) do
|
||||||
|
notification_settings =
|
||||||
|
info.notification_settings
|
||||||
|
|> Map.merge(settings)
|
||||||
|
|> Map.take(["remote", "local", "followers", "follows"])
|
||||||
|
|
||||||
|
params = %{notification_settings: notification_settings}
|
||||||
|
|
||||||
|
info
|
||||||
|
|> cast(params, [:notification_settings])
|
||||||
|
|> validate_required([:notification_settings])
|
||||||
|
end
|
||||||
|
|
||||||
def add_to_note_count(info, number) do
|
def add_to_note_count(info, number) do
|
||||||
set_note_count(info, info.note_count + number)
|
set_note_count(info, info.note_count + number)
|
||||||
end
|
end
|
||||||
|
@ -92,6 +112,14 @@ def set_blocks(info, blocks) do
|
||||||
|> validate_required([:blocks])
|
|> validate_required([:blocks])
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def set_subscribers(info, subscribers) do
|
||||||
|
params = %{subscribers: subscribers}
|
||||||
|
|
||||||
|
info
|
||||||
|
|> cast(params, [:subscribers])
|
||||||
|
|> validate_required([:subscribers])
|
||||||
|
end
|
||||||
|
|
||||||
def add_to_mutes(info, muted) do
|
def add_to_mutes(info, muted) do
|
||||||
set_mutes(info, Enum.uniq([muted | info.mutes]))
|
set_mutes(info, Enum.uniq([muted | info.mutes]))
|
||||||
end
|
end
|
||||||
|
@ -108,6 +136,14 @@ def remove_from_block(info, blocked) do
|
||||||
set_blocks(info, List.delete(info.blocks, blocked))
|
set_blocks(info, List.delete(info.blocks, blocked))
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def add_to_subscribers(info, subscribed) do
|
||||||
|
set_subscribers(info, Enum.uniq([subscribed | info.subscribers]))
|
||||||
|
end
|
||||||
|
|
||||||
|
def remove_from_subscribers(info, subscribed) do
|
||||||
|
set_subscribers(info, List.delete(info.subscribers, subscribed))
|
||||||
|
end
|
||||||
|
|
||||||
def set_domain_blocks(info, domain_blocks) do
|
def set_domain_blocks(info, domain_blocks) do
|
||||||
params = %{domain_blocks: domain_blocks}
|
params = %{domain_blocks: domain_blocks}
|
||||||
|
|
||||||
|
@ -167,6 +203,7 @@ def profile_update(info, params) do
|
||||||
:banner,
|
:banner,
|
||||||
:hide_follows,
|
:hide_follows,
|
||||||
:hide_followers,
|
:hide_followers,
|
||||||
|
:hide_favorites,
|
||||||
:background,
|
:background,
|
||||||
:show_role
|
:show_role
|
||||||
])
|
])
|
||||||
|
@ -190,14 +227,6 @@ def confirmation_changeset(info, params) do
|
||||||
cast(info, params, [:confirmation_pending, :confirmation_token])
|
cast(info, params, [:confirmation_pending, :confirmation_token])
|
||||||
end
|
end
|
||||||
|
|
||||||
def mastodon_profile_update(info, params) do
|
|
||||||
info
|
|
||||||
|> cast(params, [
|
|
||||||
:locked,
|
|
||||||
:banner
|
|
||||||
])
|
|
||||||
end
|
|
||||||
|
|
||||||
def mastodon_settings_update(info, settings) do
|
def mastodon_settings_update(info, settings) do
|
||||||
params = %{settings: settings}
|
params = %{settings: settings}
|
||||||
|
|
||||||
|
@ -259,4 +288,16 @@ def roles(%Info{is_moderator: is_moderator, is_admin: is_admin}) do
|
||||||
moderator: is_moderator
|
moderator: is_moderator
|
||||||
}
|
}
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def add_reblog_mute(info, ap_id) do
|
||||||
|
params = %{muted_reblogs: info.muted_reblogs ++ [ap_id]}
|
||||||
|
|
||||||
|
cast(info, params, [:muted_reblogs])
|
||||||
|
end
|
||||||
|
|
||||||
|
def remove_reblog_mute(info, ap_id) do
|
||||||
|
params = %{muted_reblogs: List.delete(info.muted_reblogs, ap_id)}
|
||||||
|
|
||||||
|
cast(info, params, [:muted_reblogs])
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -6,40 +6,119 @@ defmodule Pleroma.UserInviteToken do
|
||||||
use Ecto.Schema
|
use Ecto.Schema
|
||||||
|
|
||||||
import Ecto.Changeset
|
import Ecto.Changeset
|
||||||
|
import Ecto.Query
|
||||||
alias Pleroma.Repo
|
alias Pleroma.Repo
|
||||||
alias Pleroma.UserInviteToken
|
alias Pleroma.UserInviteToken
|
||||||
|
|
||||||
|
@type t :: %__MODULE__{}
|
||||||
|
@type token :: String.t()
|
||||||
|
|
||||||
schema "user_invite_tokens" do
|
schema "user_invite_tokens" do
|
||||||
field(:token, :string)
|
field(:token, :string)
|
||||||
field(:used, :boolean, default: false)
|
field(:used, :boolean, default: false)
|
||||||
|
field(:max_use, :integer)
|
||||||
|
field(:expires_at, :date)
|
||||||
|
field(:uses, :integer, default: 0)
|
||||||
|
field(:invite_type, :string)
|
||||||
|
|
||||||
timestamps()
|
timestamps()
|
||||||
end
|
end
|
||||||
|
|
||||||
def create_token do
|
@spec create_invite(map()) :: UserInviteToken.t()
|
||||||
|
def create_invite(params \\ %{}) do
|
||||||
|
%UserInviteToken{}
|
||||||
|
|> cast(params, [:max_use, :expires_at])
|
||||||
|
|> add_token()
|
||||||
|
|> assign_type()
|
||||||
|
|> Repo.insert()
|
||||||
|
end
|
||||||
|
|
||||||
|
defp add_token(changeset) do
|
||||||
token = :crypto.strong_rand_bytes(32) |> Base.url_encode64()
|
token = :crypto.strong_rand_bytes(32) |> Base.url_encode64()
|
||||||
|
put_change(changeset, :token, token)
|
||||||
token = %UserInviteToken{
|
|
||||||
used: false,
|
|
||||||
token: token
|
|
||||||
}
|
|
||||||
|
|
||||||
Repo.insert(token)
|
|
||||||
end
|
end
|
||||||
|
|
||||||
def used_changeset(struct) do
|
defp assign_type(%{changes: %{max_use: _max_use, expires_at: _expires_at}} = changeset) do
|
||||||
struct
|
put_change(changeset, :invite_type, "reusable_date_limited")
|
||||||
|> cast(%{}, [])
|
|
||||||
|> put_change(:used, true)
|
|
||||||
end
|
end
|
||||||
|
|
||||||
def mark_as_used(token) do
|
defp assign_type(%{changes: %{expires_at: _expires_at}} = changeset) do
|
||||||
with %{used: false} = token <- Repo.get_by(UserInviteToken, %{token: token}),
|
put_change(changeset, :invite_type, "date_limited")
|
||||||
{:ok, token} <- Repo.update(used_changeset(token)) do
|
end
|
||||||
{:ok, token}
|
|
||||||
else
|
defp assign_type(%{changes: %{max_use: _max_use}} = changeset) do
|
||||||
_e -> {:error, token}
|
put_change(changeset, :invite_type, "reusable")
|
||||||
|
end
|
||||||
|
|
||||||
|
defp assign_type(changeset), do: put_change(changeset, :invite_type, "one_time")
|
||||||
|
|
||||||
|
@spec list_invites() :: [UserInviteToken.t()]
|
||||||
|
def list_invites do
|
||||||
|
query = from(u in UserInviteToken, order_by: u.id)
|
||||||
|
Repo.all(query)
|
||||||
|
end
|
||||||
|
|
||||||
|
@spec update_invite!(UserInviteToken.t(), map()) :: UserInviteToken.t() | no_return()
|
||||||
|
def update_invite!(invite, changes) do
|
||||||
|
change(invite, changes) |> Repo.update!()
|
||||||
|
end
|
||||||
|
|
||||||
|
@spec update_invite(UserInviteToken.t(), map()) ::
|
||||||
|
{:ok, UserInviteToken.t()} | {:error, Changeset.t()}
|
||||||
|
def update_invite(invite, changes) do
|
||||||
|
change(invite, changes) |> Repo.update()
|
||||||
|
end
|
||||||
|
|
||||||
|
@spec find_by_token!(token()) :: UserInviteToken.t() | no_return()
|
||||||
|
def find_by_token!(token), do: Repo.get_by!(UserInviteToken, token: token)
|
||||||
|
|
||||||
|
@spec find_by_token(token()) :: {:ok, UserInviteToken.t()} | nil
|
||||||
|
def find_by_token(token) do
|
||||||
|
with invite <- Repo.get_by(UserInviteToken, token: token) do
|
||||||
|
{:ok, invite}
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@spec valid_invite?(UserInviteToken.t()) :: boolean()
|
||||||
|
def valid_invite?(%{invite_type: "one_time"} = invite) do
|
||||||
|
not invite.used
|
||||||
|
end
|
||||||
|
|
||||||
|
def valid_invite?(%{invite_type: "date_limited"} = invite) do
|
||||||
|
not_overdue_date?(invite) and not invite.used
|
||||||
|
end
|
||||||
|
|
||||||
|
def valid_invite?(%{invite_type: "reusable"} = invite) do
|
||||||
|
invite.uses < invite.max_use and not invite.used
|
||||||
|
end
|
||||||
|
|
||||||
|
def valid_invite?(%{invite_type: "reusable_date_limited"} = invite) do
|
||||||
|
not_overdue_date?(invite) and invite.uses < invite.max_use and not invite.used
|
||||||
|
end
|
||||||
|
|
||||||
|
defp not_overdue_date?(%{expires_at: expires_at}) do
|
||||||
|
Date.compare(Date.utc_today(), expires_at) in [:lt, :eq]
|
||||||
|
end
|
||||||
|
|
||||||
|
@spec update_usage!(UserInviteToken.t()) :: nil | UserInviteToken.t() | no_return()
|
||||||
|
def update_usage!(%{invite_type: "date_limited"}), do: nil
|
||||||
|
|
||||||
|
def update_usage!(%{invite_type: "one_time"} = invite),
|
||||||
|
do: update_invite!(invite, %{used: true})
|
||||||
|
|
||||||
|
def update_usage!(%{invite_type: invite_type} = invite)
|
||||||
|
when invite_type == "reusable" or invite_type == "reusable_date_limited" do
|
||||||
|
changes = %{
|
||||||
|
uses: invite.uses + 1
|
||||||
|
}
|
||||||
|
|
||||||
|
changes =
|
||||||
|
if changes.uses >= invite.max_use do
|
||||||
|
Map.put(changes, :used, true)
|
||||||
|
else
|
||||||
|
changes
|
||||||
|
end
|
||||||
|
|
||||||
|
update_invite!(invite, changes)
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -7,13 +7,14 @@ defmodule Pleroma.Web.ActivityPub.ActivityPub do
|
||||||
alias Pleroma.Instances
|
alias Pleroma.Instances
|
||||||
alias Pleroma.Notification
|
alias Pleroma.Notification
|
||||||
alias Pleroma.Object
|
alias Pleroma.Object
|
||||||
|
alias Pleroma.Object.Fetcher
|
||||||
|
alias Pleroma.Pagination
|
||||||
alias Pleroma.Repo
|
alias Pleroma.Repo
|
||||||
alias Pleroma.Upload
|
alias Pleroma.Upload
|
||||||
alias Pleroma.User
|
alias Pleroma.User
|
||||||
alias Pleroma.Web.ActivityPub.MRF
|
alias Pleroma.Web.ActivityPub.MRF
|
||||||
alias Pleroma.Web.ActivityPub.Transmogrifier
|
alias Pleroma.Web.ActivityPub.Transmogrifier
|
||||||
alias Pleroma.Web.Federator
|
alias Pleroma.Web.Federator
|
||||||
alias Pleroma.Web.OStatus
|
|
||||||
alias Pleroma.Web.WebFinger
|
alias Pleroma.Web.WebFinger
|
||||||
|
|
||||||
import Ecto.Query
|
import Ecto.Query
|
||||||
|
@ -89,15 +90,36 @@ def decrease_note_count_if_public(actor, object) do
|
||||||
if is_public?(object), do: User.decrease_note_count(actor), else: {:ok, actor}
|
if is_public?(object), do: User.decrease_note_count(actor), else: {:ok, actor}
|
||||||
end
|
end
|
||||||
|
|
||||||
def insert(map, local \\ true) when is_map(map) do
|
def increase_replies_count_if_reply(%{
|
||||||
|
"object" => %{"inReplyTo" => reply_ap_id} = object,
|
||||||
|
"type" => "Create"
|
||||||
|
}) do
|
||||||
|
if is_public?(object) do
|
||||||
|
Object.increase_replies_count(reply_ap_id)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def increase_replies_count_if_reply(_create_data), do: :noop
|
||||||
|
|
||||||
|
def decrease_replies_count_if_reply(%Object{
|
||||||
|
data: %{"inReplyTo" => reply_ap_id} = object
|
||||||
|
}) do
|
||||||
|
if is_public?(object) do
|
||||||
|
Object.decrease_replies_count(reply_ap_id)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def decrease_replies_count_if_reply(_object), do: :noop
|
||||||
|
|
||||||
|
def insert(map, local \\ true, fake \\ false) when is_map(map) do
|
||||||
with nil <- Activity.normalize(map),
|
with nil <- Activity.normalize(map),
|
||||||
map <- lazy_put_activity_defaults(map),
|
map <- lazy_put_activity_defaults(map, fake),
|
||||||
:ok <- check_actor_is_active(map["actor"]),
|
:ok <- check_actor_is_active(map["actor"]),
|
||||||
{_, true} <- {:remote_limit_error, check_remote_limit(map)},
|
{_, true} <- {:remote_limit_error, check_remote_limit(map)},
|
||||||
{:ok, map} <- MRF.filter(map),
|
{:ok, map} <- MRF.filter(map),
|
||||||
:ok <- insert_full_object(map) do
|
{recipients, _, _} = get_recipients(map),
|
||||||
{recipients, _, _} = get_recipients(map)
|
{:fake, false, map, recipients} <- {:fake, fake, map, recipients},
|
||||||
|
{:ok, map, object} <- insert_full_object(map) do
|
||||||
{:ok, activity} =
|
{:ok, activity} =
|
||||||
Repo.insert(%Activity{
|
Repo.insert(%Activity{
|
||||||
data: map,
|
data: map,
|
||||||
|
@ -106,6 +128,14 @@ def insert(map, local \\ true) when is_map(map) do
|
||||||
recipients: recipients
|
recipients: recipients
|
||||||
})
|
})
|
||||||
|
|
||||||
|
# Splice in the child object if we have one.
|
||||||
|
activity =
|
||||||
|
if !is_nil(object) do
|
||||||
|
Map.put(activity, :object, object)
|
||||||
|
else
|
||||||
|
activity
|
||||||
|
end
|
||||||
|
|
||||||
Task.start(fn ->
|
Task.start(fn ->
|
||||||
Pleroma.Web.RichMedia.Helpers.fetch_data_for_activity(activity)
|
Pleroma.Web.RichMedia.Helpers.fetch_data_for_activity(activity)
|
||||||
end)
|
end)
|
||||||
|
@ -114,8 +144,23 @@ def insert(map, local \\ true) when is_map(map) do
|
||||||
stream_out(activity)
|
stream_out(activity)
|
||||||
{:ok, activity}
|
{:ok, activity}
|
||||||
else
|
else
|
||||||
%Activity{} = activity -> {:ok, activity}
|
%Activity{} = activity ->
|
||||||
error -> {:error, error}
|
{:ok, activity}
|
||||||
|
|
||||||
|
{:fake, true, map, recipients} ->
|
||||||
|
activity = %Activity{
|
||||||
|
data: map,
|
||||||
|
local: local,
|
||||||
|
actor: map["actor"],
|
||||||
|
recipients: recipients,
|
||||||
|
id: "pleroma:fakeid"
|
||||||
|
}
|
||||||
|
|
||||||
|
Pleroma.Web.RichMedia.Helpers.fetch_data_for_activity(activity)
|
||||||
|
{:ok, activity}
|
||||||
|
|
||||||
|
error ->
|
||||||
|
{:error, error}
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -134,12 +179,14 @@ def stream_out(activity) do
|
||||||
end
|
end
|
||||||
|
|
||||||
if activity.data["type"] in ["Create"] do
|
if activity.data["type"] in ["Create"] do
|
||||||
activity.data["object"]
|
object = Object.normalize(activity)
|
||||||
|
|
||||||
|
object.data
|
||||||
|> Map.get("tag", [])
|
|> Map.get("tag", [])
|
||||||
|> Enum.filter(fn tag -> is_bitstring(tag) end)
|
|> Enum.filter(fn tag -> is_bitstring(tag) end)
|
||||||
|> Enum.each(fn tag -> Pleroma.Web.Streamer.stream("hashtag:" <> tag, activity) end)
|
|> Enum.each(fn tag -> Pleroma.Web.Streamer.stream("hashtag:" <> tag, activity) end)
|
||||||
|
|
||||||
if activity.data["object"]["attachment"] != [] do
|
if object.data["attachment"] != [] do
|
||||||
Pleroma.Web.Streamer.stream("public:media", activity)
|
Pleroma.Web.Streamer.stream("public:media", activity)
|
||||||
|
|
||||||
if activity.local do
|
if activity.local do
|
||||||
|
@ -151,14 +198,14 @@ def stream_out(activity) do
|
||||||
if !Enum.member?(activity.data["cc"] || [], public) &&
|
if !Enum.member?(activity.data["cc"] || [], public) &&
|
||||||
!Enum.member?(
|
!Enum.member?(
|
||||||
activity.data["to"],
|
activity.data["to"],
|
||||||
User.get_by_ap_id(activity.data["actor"]).follower_address
|
User.get_cached_by_ap_id(activity.data["actor"]).follower_address
|
||||||
),
|
),
|
||||||
do: Pleroma.Web.Streamer.stream("direct", activity)
|
do: Pleroma.Web.Streamer.stream("direct", activity)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def create(%{to: to, actor: actor, context: context, object: object} = params) do
|
def create(%{to: to, actor: actor, context: context, object: object} = params, fake \\ false) do
|
||||||
additional = params[:additional] || %{}
|
additional = params[:additional] || %{}
|
||||||
# only accept false as false value
|
# only accept false as false value
|
||||||
local = !(params[:local] == false)
|
local = !(params[:local] == false)
|
||||||
|
@ -169,12 +216,17 @@ def create(%{to: to, actor: actor, context: context, object: object} = params) d
|
||||||
%{to: to, actor: actor, published: published, context: context, object: object},
|
%{to: to, actor: actor, published: published, context: context, object: object},
|
||||||
additional
|
additional
|
||||||
),
|
),
|
||||||
{:ok, activity} <- insert(create_data, local),
|
{:ok, activity} <- insert(create_data, local, fake),
|
||||||
|
{:fake, false, activity} <- {:fake, fake, activity},
|
||||||
|
_ <- increase_replies_count_if_reply(create_data),
|
||||||
# Changing note count prior to enqueuing federation task in order to avoid
|
# Changing note count prior to enqueuing federation task in order to avoid
|
||||||
# race conditions on updating user.info
|
# race conditions on updating user.info
|
||||||
{:ok, _actor} <- increase_note_count_if_public(actor, activity),
|
{:ok, _actor} <- increase_note_count_if_public(actor, activity),
|
||||||
:ok <- maybe_federate(activity) do
|
:ok <- maybe_federate(activity) do
|
||||||
{:ok, activity}
|
{:ok, activity}
|
||||||
|
else
|
||||||
|
{:fake, true, activity} ->
|
||||||
|
{:ok, activity}
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -321,6 +373,7 @@ def delete(%Object{data: %{"id" => id, "actor" => actor}} = object, local \\ tru
|
||||||
"deleted_activity_id" => activity && activity.id
|
"deleted_activity_id" => activity && activity.id
|
||||||
},
|
},
|
||||||
{:ok, activity} <- insert(data, local),
|
{:ok, activity} <- insert(data, local),
|
||||||
|
_ <- decrease_replies_count_if_reply(object),
|
||||||
# Changing note count prior to enqueuing federation task in order to avoid
|
# Changing note count prior to enqueuing federation task in order to avoid
|
||||||
# race conditions on updating user.info
|
# race conditions on updating user.info
|
||||||
{:ok, _actor} <- decrease_note_count_if_public(user, object),
|
{:ok, _actor} <- decrease_note_count_if_public(user, object),
|
||||||
|
@ -370,20 +423,38 @@ def flag(
|
||||||
content: content
|
content: content
|
||||||
} = params
|
} = params
|
||||||
) do
|
) do
|
||||||
additional = params[:additional] || %{}
|
|
||||||
|
|
||||||
# only accept false as false value
|
# only accept false as false value
|
||||||
local = !(params[:local] == false)
|
local = !(params[:local] == false)
|
||||||
|
forward = !(params[:forward] == false)
|
||||||
|
|
||||||
%{
|
additional = params[:additional] || %{}
|
||||||
|
|
||||||
|
params = %{
|
||||||
actor: actor,
|
actor: actor,
|
||||||
context: context,
|
context: context,
|
||||||
account: account,
|
account: account,
|
||||||
statuses: statuses,
|
statuses: statuses,
|
||||||
content: content
|
content: content
|
||||||
}
|
}
|
||||||
|> make_flag_data(additional)
|
|
||||||
|> insert(local)
|
additional =
|
||||||
|
if forward do
|
||||||
|
Map.merge(additional, %{"to" => [], "cc" => [account.ap_id]})
|
||||||
|
else
|
||||||
|
Map.merge(additional, %{"to" => [], "cc" => []})
|
||||||
|
end
|
||||||
|
|
||||||
|
with flag_data <- make_flag_data(params, additional),
|
||||||
|
{:ok, activity} <- insert(flag_data, local),
|
||||||
|
:ok <- maybe_federate(activity) do
|
||||||
|
Enum.each(User.all_superusers(), fn superuser ->
|
||||||
|
superuser
|
||||||
|
|> Pleroma.Emails.AdminEmail.report(actor, account, statuses, content)
|
||||||
|
|> Pleroma.Emails.Mailer.deliver_async()
|
||||||
|
end)
|
||||||
|
|
||||||
|
{:ok, activity}
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def fetch_activities_for_context(context, opts \\ %{}) do
|
def fetch_activities_for_context(context, opts \\ %{}) do
|
||||||
|
@ -412,6 +483,7 @@ def fetch_activities_for_context(context, opts \\ %{}) do
|
||||||
),
|
),
|
||||||
order_by: [desc: :id]
|
order_by: [desc: :id]
|
||||||
)
|
)
|
||||||
|
|> Activity.with_preloaded_object()
|
||||||
|
|
||||||
Repo.all(query)
|
Repo.all(query)
|
||||||
end
|
end
|
||||||
|
@ -421,7 +493,7 @@ def fetch_public_activities(opts \\ %{}) do
|
||||||
|
|
||||||
q
|
q
|
||||||
|> restrict_unlisted()
|
|> restrict_unlisted()
|
||||||
|> Repo.all()
|
|> Pagination.fetch_paginated(opts)
|
||||||
|> Enum.reverse()
|
|> Enum.reverse()
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -500,37 +572,49 @@ defp restrict_since(query, %{"since_id" => since_id}) do
|
||||||
|
|
||||||
defp restrict_since(query, _), do: query
|
defp restrict_since(query, _), do: query
|
||||||
|
|
||||||
|
defp restrict_tag_reject(_query, %{"tag_reject" => _tag_reject, "skip_preload" => true}) do
|
||||||
|
raise "Can't use the child object without preloading!"
|
||||||
|
end
|
||||||
|
|
||||||
defp restrict_tag_reject(query, %{"tag_reject" => tag_reject})
|
defp restrict_tag_reject(query, %{"tag_reject" => tag_reject})
|
||||||
when is_list(tag_reject) and tag_reject != [] do
|
when is_list(tag_reject) and tag_reject != [] do
|
||||||
from(
|
from(
|
||||||
activity in query,
|
[_activity, object] in query,
|
||||||
where: fragment(~s(\(not \(? #> '{"object","tag"}'\) \\?| ?\)), activity.data, ^tag_reject)
|
where: fragment("not (?)->'tag' \\?| (?)", object.data, ^tag_reject)
|
||||||
)
|
)
|
||||||
end
|
end
|
||||||
|
|
||||||
defp restrict_tag_reject(query, _), do: query
|
defp restrict_tag_reject(query, _), do: query
|
||||||
|
|
||||||
|
defp restrict_tag_all(_query, %{"tag_all" => _tag_all, "skip_preload" => true}) do
|
||||||
|
raise "Can't use the child object without preloading!"
|
||||||
|
end
|
||||||
|
|
||||||
defp restrict_tag_all(query, %{"tag_all" => tag_all})
|
defp restrict_tag_all(query, %{"tag_all" => tag_all})
|
||||||
when is_list(tag_all) and tag_all != [] do
|
when is_list(tag_all) and tag_all != [] do
|
||||||
from(
|
from(
|
||||||
activity in query,
|
[_activity, object] in query,
|
||||||
where: fragment(~s(\(? #> '{"object","tag"}'\) \\?& ?), activity.data, ^tag_all)
|
where: fragment("(?)->'tag' \\?& (?)", object.data, ^tag_all)
|
||||||
)
|
)
|
||||||
end
|
end
|
||||||
|
|
||||||
defp restrict_tag_all(query, _), do: query
|
defp restrict_tag_all(query, _), do: query
|
||||||
|
|
||||||
|
defp restrict_tag(_query, %{"tag" => _tag, "skip_preload" => true}) do
|
||||||
|
raise "Can't use the child object without preloading!"
|
||||||
|
end
|
||||||
|
|
||||||
defp restrict_tag(query, %{"tag" => tag}) when is_list(tag) do
|
defp restrict_tag(query, %{"tag" => tag}) when is_list(tag) do
|
||||||
from(
|
from(
|
||||||
activity in query,
|
[_activity, object] in query,
|
||||||
where: fragment(~s(\(? #> '{"object","tag"}'\) \\?| ?), activity.data, ^tag)
|
where: fragment("(?)->'tag' \\?| (?)", object.data, ^tag)
|
||||||
)
|
)
|
||||||
end
|
end
|
||||||
|
|
||||||
defp restrict_tag(query, %{"tag" => tag}) when is_binary(tag) do
|
defp restrict_tag(query, %{"tag" => tag}) when is_binary(tag) do
|
||||||
from(
|
from(
|
||||||
activity in query,
|
[_activity, object] in query,
|
||||||
where: fragment(~s(? <@ (? #> '{"object","tag"}'\)), ^tag, activity.data)
|
where: fragment("(?)->'tag' \\? (?)", object.data, ^tag)
|
||||||
)
|
)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -564,26 +648,12 @@ defp restrict_recipients(query, recipients, user) do
|
||||||
)
|
)
|
||||||
end
|
end
|
||||||
|
|
||||||
defp restrict_limit(query, %{"limit" => limit}) do
|
|
||||||
from(activity in query, limit: ^limit)
|
|
||||||
end
|
|
||||||
|
|
||||||
defp restrict_limit(query, _), do: query
|
|
||||||
|
|
||||||
defp restrict_local(query, %{"local_only" => true}) do
|
defp restrict_local(query, %{"local_only" => true}) do
|
||||||
from(activity in query, where: activity.local == true)
|
from(activity in query, where: activity.local == true)
|
||||||
end
|
end
|
||||||
|
|
||||||
defp restrict_local(query, _), do: query
|
defp restrict_local(query, _), do: query
|
||||||
|
|
||||||
defp restrict_max(query, %{"max_id" => ""}), do: query
|
|
||||||
|
|
||||||
defp restrict_max(query, %{"max_id" => max_id}) do
|
|
||||||
from(activity in query, where: activity.id < ^max_id)
|
|
||||||
end
|
|
||||||
|
|
||||||
defp restrict_max(query, _), do: query
|
|
||||||
|
|
||||||
defp restrict_actor(query, %{"actor_id" => actor_id}) do
|
defp restrict_actor(query, %{"actor_id" => actor_id}) do
|
||||||
from(activity in query, where: activity.actor == ^actor_id)
|
from(activity in query, where: activity.actor == ^actor_id)
|
||||||
end
|
end
|
||||||
|
@ -609,10 +679,14 @@ defp restrict_favorited_by(query, %{"favorited_by" => ap_id}) do
|
||||||
|
|
||||||
defp restrict_favorited_by(query, _), do: query
|
defp restrict_favorited_by(query, _), do: query
|
||||||
|
|
||||||
|
defp restrict_media(_query, %{"only_media" => _val, "skip_preload" => true}) do
|
||||||
|
raise "Can't use the child object without preloading!"
|
||||||
|
end
|
||||||
|
|
||||||
defp restrict_media(query, %{"only_media" => val}) when val == "true" or val == "1" do
|
defp restrict_media(query, %{"only_media" => val}) when val == "true" or val == "1" do
|
||||||
from(
|
from(
|
||||||
activity in query,
|
[_activity, object] in query,
|
||||||
where: fragment(~s(not (? #> '{"object","attachment"}' = ?\)), activity.data, ^[])
|
where: fragment("not (?)->'attachment' = (?)", object.data, ^[])
|
||||||
)
|
)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -654,7 +728,14 @@ defp restrict_blocked(query, %{"blocking_user" => %User{info: info}}) do
|
||||||
from(
|
from(
|
||||||
activity in query,
|
activity in query,
|
||||||
where: fragment("not (? = ANY(?))", activity.actor, ^blocks),
|
where: fragment("not (? = ANY(?))", activity.actor, ^blocks),
|
||||||
where: fragment("not (?->'to' \\?| ?)", activity.data, ^blocks),
|
where: fragment("not (? && ?)", activity.recipients, ^blocks),
|
||||||
|
where:
|
||||||
|
fragment(
|
||||||
|
"not (?->>'type' = 'Announce' and ?->'to' \\?| ?)",
|
||||||
|
activity.data,
|
||||||
|
activity.data,
|
||||||
|
^blocks
|
||||||
|
),
|
||||||
where: fragment("not (split_part(?, '/', 3) = ANY(?))", activity.actor, ^domain_blocks)
|
where: fragment("not (split_part(?, '/', 3) = ANY(?))", activity.actor, ^domain_blocks)
|
||||||
)
|
)
|
||||||
end
|
end
|
||||||
|
@ -679,23 +760,41 @@ defp restrict_pinned(query, %{"pinned" => "true", "pinned_activity_ids" => ids})
|
||||||
|
|
||||||
defp restrict_pinned(query, _), do: query
|
defp restrict_pinned(query, _), do: query
|
||||||
|
|
||||||
|
defp restrict_muted_reblogs(query, %{"muting_user" => %User{info: info}}) do
|
||||||
|
muted_reblogs = info.muted_reblogs || []
|
||||||
|
|
||||||
|
from(
|
||||||
|
activity in query,
|
||||||
|
where:
|
||||||
|
fragment(
|
||||||
|
"not ( ?->>'type' = 'Announce' and ? = ANY(?))",
|
||||||
|
activity.data,
|
||||||
|
activity.actor,
|
||||||
|
^muted_reblogs
|
||||||
|
)
|
||||||
|
)
|
||||||
|
end
|
||||||
|
|
||||||
|
defp restrict_muted_reblogs(query, _), do: query
|
||||||
|
|
||||||
|
defp maybe_preload_objects(query, %{"skip_preload" => true}), do: query
|
||||||
|
|
||||||
|
defp maybe_preload_objects(query, _) do
|
||||||
|
query
|
||||||
|
|> Activity.with_preloaded_object()
|
||||||
|
end
|
||||||
|
|
||||||
def fetch_activities_query(recipients, opts \\ %{}) do
|
def fetch_activities_query(recipients, opts \\ %{}) do
|
||||||
base_query =
|
base_query = from(activity in Activity)
|
||||||
from(
|
|
||||||
activity in Activity,
|
|
||||||
limit: 20,
|
|
||||||
order_by: [fragment("? desc nulls last", activity.id)]
|
|
||||||
)
|
|
||||||
|
|
||||||
base_query
|
base_query
|
||||||
|
|> maybe_preload_objects(opts)
|
||||||
|> restrict_recipients(recipients, opts["user"])
|
|> restrict_recipients(recipients, opts["user"])
|
||||||
|> restrict_tag(opts)
|
|> restrict_tag(opts)
|
||||||
|> restrict_tag_reject(opts)
|
|> restrict_tag_reject(opts)
|
||||||
|> restrict_tag_all(opts)
|
|> restrict_tag_all(opts)
|
||||||
|> restrict_since(opts)
|
|> restrict_since(opts)
|
||||||
|> restrict_local(opts)
|
|> restrict_local(opts)
|
||||||
|> restrict_limit(opts)
|
|
||||||
|> restrict_max(opts)
|
|
||||||
|> restrict_actor(opts)
|
|> restrict_actor(opts)
|
||||||
|> restrict_type(opts)
|
|> restrict_type(opts)
|
||||||
|> restrict_favorited_by(opts)
|
|> restrict_favorited_by(opts)
|
||||||
|
@ -706,18 +805,19 @@ def fetch_activities_query(recipients, opts \\ %{}) do
|
||||||
|> restrict_replies(opts)
|
|> restrict_replies(opts)
|
||||||
|> restrict_reblogs(opts)
|
|> restrict_reblogs(opts)
|
||||||
|> restrict_pinned(opts)
|
|> restrict_pinned(opts)
|
||||||
|
|> restrict_muted_reblogs(opts)
|
||||||
end
|
end
|
||||||
|
|
||||||
def fetch_activities(recipients, opts \\ %{}) do
|
def fetch_activities(recipients, opts \\ %{}) do
|
||||||
fetch_activities_query(recipients, opts)
|
fetch_activities_query(recipients, opts)
|
||||||
|> Repo.all()
|
|> Pagination.fetch_paginated(opts)
|
||||||
|> Enum.reverse()
|
|> Enum.reverse()
|
||||||
end
|
end
|
||||||
|
|
||||||
def fetch_activities_bounded(recipients_to, recipients_cc, opts \\ %{}) do
|
def fetch_activities_bounded(recipients_to, recipients_cc, opts \\ %{}) do
|
||||||
fetch_activities_query([], opts)
|
fetch_activities_query([], opts)
|
||||||
|> restrict_to_cc(recipients_to, recipients_cc)
|
|> restrict_to_cc(recipients_to, recipients_cc)
|
||||||
|> Repo.all()
|
|> Pagination.fetch_paginated(opts)
|
||||||
|> Enum.reverse()
|
|> Enum.reverse()
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -782,7 +882,7 @@ def user_data_from_user_object(data) do
|
||||||
end
|
end
|
||||||
|
|
||||||
def fetch_and_prepare_user_from_ap_id(ap_id) do
|
def fetch_and_prepare_user_from_ap_id(ap_id) do
|
||||||
with {:ok, data} <- fetch_and_contain_remote_object_from_id(ap_id) do
|
with {:ok, data} <- Fetcher.fetch_and_contain_remote_object_from_id(ap_id) do
|
||||||
user_data_from_user_object(data)
|
user_data_from_user_object(data)
|
||||||
else
|
else
|
||||||
e -> Logger.error("Could not decode user at fetch #{ap_id}, #{inspect(e)}")
|
e -> Logger.error("Could not decode user at fetch #{ap_id}, #{inspect(e)}")
|
||||||
|
@ -790,7 +890,7 @@ def fetch_and_prepare_user_from_ap_id(ap_id) do
|
||||||
end
|
end
|
||||||
|
|
||||||
def make_user_from_ap_id(ap_id) do
|
def make_user_from_ap_id(ap_id) do
|
||||||
if _user = User.get_by_ap_id(ap_id) do
|
if _user = User.get_cached_by_ap_id(ap_id) do
|
||||||
Transmogrifier.upgrade_user_from_ap_id(ap_id)
|
Transmogrifier.upgrade_user_from_ap_id(ap_id)
|
||||||
else
|
else
|
||||||
with {:ok, data} <- fetch_and_prepare_user_from_ap_id(ap_id) do
|
with {:ok, data} <- fetch_and_prepare_user_from_ap_id(ap_id) do
|
||||||
|
@ -892,60 +992,6 @@ def publish_one(%{inbox: inbox, json: json, actor: actor, id: id} = params) do
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
# TODO:
|
|
||||||
# This will create a Create activity, which we need internally at the moment.
|
|
||||||
def fetch_object_from_id(id) do
|
|
||||||
if object = Object.get_cached_by_ap_id(id) do
|
|
||||||
{:ok, object}
|
|
||||||
else
|
|
||||||
with {:ok, data} <- fetch_and_contain_remote_object_from_id(id),
|
|
||||||
nil <- Object.normalize(data),
|
|
||||||
params <- %{
|
|
||||||
"type" => "Create",
|
|
||||||
"to" => data["to"],
|
|
||||||
"cc" => data["cc"],
|
|
||||||
"actor" => data["actor"] || data["attributedTo"],
|
|
||||||
"object" => data
|
|
||||||
},
|
|
||||||
:ok <- Transmogrifier.contain_origin(id, params),
|
|
||||||
{:ok, activity} <- Transmogrifier.handle_incoming(params) do
|
|
||||||
{:ok, Object.normalize(activity.data["object"])}
|
|
||||||
else
|
|
||||||
{:error, {:reject, nil}} ->
|
|
||||||
{:reject, nil}
|
|
||||||
|
|
||||||
object = %Object{} ->
|
|
||||||
{:ok, object}
|
|
||||||
|
|
||||||
_e ->
|
|
||||||
Logger.info("Couldn't get object via AP, trying out OStatus fetching...")
|
|
||||||
|
|
||||||
case OStatus.fetch_activity_from_url(id) do
|
|
||||||
{:ok, [activity | _]} -> {:ok, Object.normalize(activity.data["object"])}
|
|
||||||
e -> e
|
|
||||||
end
|
|
||||||
end
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
def fetch_and_contain_remote_object_from_id(id) do
|
|
||||||
Logger.info("Fetching object #{id} via AP")
|
|
||||||
|
|
||||||
with true <- String.starts_with?(id, "http"),
|
|
||||||
{:ok, %{body: body, status: code}} when code in 200..299 <-
|
|
||||||
@httpoison.get(
|
|
||||||
id,
|
|
||||||
[{:Accept, "application/activity+json"}]
|
|
||||||
),
|
|
||||||
{:ok, data} <- Jason.decode(body),
|
|
||||||
:ok <- Transmogrifier.contain_origin_from_id(id, data) do
|
|
||||||
{:ok, data}
|
|
||||||
else
|
|
||||||
e ->
|
|
||||||
{:error, e}
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
# filter out broken threads
|
# filter out broken threads
|
||||||
def contain_broken_threads(%Activity{} = activity, %User{} = user) do
|
def contain_broken_threads(%Activity{} = activity, %User{} = user) do
|
||||||
entire_thread_visible_for_user?(activity, user)
|
entire_thread_visible_for_user?(activity, user)
|
||||||
|
|
|
@ -7,6 +7,7 @@ defmodule Pleroma.Web.ActivityPub.ActivityPubController do
|
||||||
|
|
||||||
alias Pleroma.Activity
|
alias Pleroma.Activity
|
||||||
alias Pleroma.Object
|
alias Pleroma.Object
|
||||||
|
alias Pleroma.Object.Fetcher
|
||||||
alias Pleroma.User
|
alias Pleroma.User
|
||||||
alias Pleroma.Web.ActivityPub.ActivityPub
|
alias Pleroma.Web.ActivityPub.ActivityPub
|
||||||
alias Pleroma.Web.ActivityPub.ObjectView
|
alias Pleroma.Web.ActivityPub.ObjectView
|
||||||
|
@ -153,9 +154,10 @@ def outbox(conn, %{"nickname" => nickname} = params) do
|
||||||
end
|
end
|
||||||
|
|
||||||
def inbox(%{assigns: %{valid_signature: true}} = conn, %{"nickname" => nickname} = params) do
|
def inbox(%{assigns: %{valid_signature: true}} = conn, %{"nickname" => nickname} = params) do
|
||||||
with %User{} = user <- User.get_cached_by_nickname(nickname),
|
with %User{} = recipient <- User.get_cached_by_nickname(nickname),
|
||||||
true <- Utils.recipient_in_message(user.ap_id, params),
|
%User{} = actor <- User.get_or_fetch_by_ap_id(params["actor"]),
|
||||||
params <- Utils.maybe_splice_recipient(user.ap_id, params) do
|
true <- Utils.recipient_in_message(recipient, actor, params),
|
||||||
|
params <- Utils.maybe_splice_recipient(recipient.ap_id, params) do
|
||||||
Federator.incoming_ap_doc(params)
|
Federator.incoming_ap_doc(params)
|
||||||
json(conn, "ok")
|
json(conn, "ok")
|
||||||
end
|
end
|
||||||
|
@ -172,7 +174,7 @@ def inbox(conn, %{"type" => "Create"} = params) do
|
||||||
"Signature missing or not from author, relayed Create message, fetching object from source"
|
"Signature missing or not from author, relayed Create message, fetching object from source"
|
||||||
)
|
)
|
||||||
|
|
||||||
ActivityPub.fetch_object_from_id(params["object"]["id"])
|
Fetcher.fetch_object_from_id(params["object"]["id"])
|
||||||
|
|
||||||
json(conn, "ok")
|
json(conn, "ok")
|
||||||
end
|
end
|
||||||
|
|
|
@ -4,6 +4,10 @@
|
||||||
|
|
||||||
defmodule Pleroma.Web.ActivityPub.MRF.KeywordPolicy do
|
defmodule Pleroma.Web.ActivityPub.MRF.KeywordPolicy do
|
||||||
@behaviour Pleroma.Web.ActivityPub.MRF
|
@behaviour Pleroma.Web.ActivityPub.MRF
|
||||||
|
defp string_matches?(string, _) when not is_binary(string) do
|
||||||
|
false
|
||||||
|
end
|
||||||
|
|
||||||
defp string_matches?(string, pattern) when is_binary(pattern) do
|
defp string_matches?(string, pattern) when is_binary(pattern) do
|
||||||
String.contains?(string, pattern)
|
String.contains?(string, pattern)
|
||||||
end
|
end
|
||||||
|
@ -44,6 +48,20 @@ defp check_ftl_removal(
|
||||||
end
|
end
|
||||||
|
|
||||||
defp check_replace(%{"object" => %{"content" => content, "summary" => summary}} = message) do
|
defp check_replace(%{"object" => %{"content" => content, "summary" => summary}} = message) do
|
||||||
|
content =
|
||||||
|
if is_binary(content) do
|
||||||
|
content
|
||||||
|
else
|
||||||
|
""
|
||||||
|
end
|
||||||
|
|
||||||
|
summary =
|
||||||
|
if is_binary(summary) do
|
||||||
|
summary
|
||||||
|
else
|
||||||
|
""
|
||||||
|
end
|
||||||
|
|
||||||
{content, summary} =
|
{content, summary} =
|
||||||
Enum.reduce(
|
Enum.reduce(
|
||||||
Pleroma.Config.get([:mrf_keyword, :replace]),
|
Pleroma.Config.get([:mrf_keyword, :replace]),
|
||||||
|
@ -60,11 +78,6 @@ defp check_replace(%{"object" => %{"content" => content, "summary" => summary}}
|
||||||
|> put_in(["object", "summary"], summary)}
|
|> put_in(["object", "summary"], summary)}
|
||||||
end
|
end
|
||||||
|
|
||||||
@impl true
|
|
||||||
def filter(%{"object" => %{"content" => nil}} = message) do
|
|
||||||
{:ok, message}
|
|
||||||
end
|
|
||||||
|
|
||||||
@impl true
|
@impl true
|
||||||
def filter(%{"type" => "Create", "object" => %{"content" => _content}} = message) do
|
def filter(%{"type" => "Create", "object" => %{"content" => _content}} = message) do
|
||||||
with {:ok, message} <- check_reject(message),
|
with {:ok, message} <- check_reject(message),
|
||||||
|
|
|
@ -41,7 +41,7 @@ def unfollow(target_instance) do
|
||||||
|
|
||||||
def publish(%Activity{data: %{"type" => "Create"}} = activity) do
|
def publish(%Activity{data: %{"type" => "Create"}} = activity) do
|
||||||
with %User{} = user <- get_actor(),
|
with %User{} = user <- get_actor(),
|
||||||
%Object{} = object <- Object.normalize(activity.data["object"]["id"]) do
|
%Object{} = object <- Object.normalize(activity) do
|
||||||
ActivityPub.announce(user, object, nil, true, false)
|
ActivityPub.announce(user, object, nil, true, false)
|
||||||
else
|
else
|
||||||
e -> Logger.error("error: #{inspect(e)}")
|
e -> Logger.error("error: #{inspect(e)}")
|
||||||
|
|
|
@ -8,8 +8,10 @@ defmodule Pleroma.Web.ActivityPub.Transmogrifier do
|
||||||
"""
|
"""
|
||||||
alias Pleroma.Activity
|
alias Pleroma.Activity
|
||||||
alias Pleroma.Object
|
alias Pleroma.Object
|
||||||
|
alias Pleroma.Object.Containment
|
||||||
alias Pleroma.Repo
|
alias Pleroma.Repo
|
||||||
alias Pleroma.User
|
alias Pleroma.User
|
||||||
|
alias Pleroma.User
|
||||||
alias Pleroma.Web.ActivityPub.ActivityPub
|
alias Pleroma.Web.ActivityPub.ActivityPub
|
||||||
alias Pleroma.Web.ActivityPub.Utils
|
alias Pleroma.Web.ActivityPub.Utils
|
||||||
alias Pleroma.Web.ActivityPub.Visibility
|
alias Pleroma.Web.ActivityPub.Visibility
|
||||||
|
@ -18,56 +20,6 @@ defmodule Pleroma.Web.ActivityPub.Transmogrifier do
|
||||||
|
|
||||||
require Logger
|
require Logger
|
||||||
|
|
||||||
def get_actor(%{"actor" => actor}) when is_binary(actor) do
|
|
||||||
actor
|
|
||||||
end
|
|
||||||
|
|
||||||
def get_actor(%{"actor" => actor}) when is_list(actor) do
|
|
||||||
if is_binary(Enum.at(actor, 0)) do
|
|
||||||
Enum.at(actor, 0)
|
|
||||||
else
|
|
||||||
Enum.find(actor, fn %{"type" => type} -> type in ["Person", "Service", "Application"] end)
|
|
||||||
|> Map.get("id")
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
def get_actor(%{"actor" => %{"id" => id}}) when is_bitstring(id) do
|
|
||||||
id
|
|
||||||
end
|
|
||||||
|
|
||||||
def get_actor(%{"actor" => nil, "attributedTo" => actor}) when not is_nil(actor) do
|
|
||||||
get_actor(%{"actor" => actor})
|
|
||||||
end
|
|
||||||
|
|
||||||
@doc """
|
|
||||||
Checks that an imported AP object's actor matches the domain it came from.
|
|
||||||
"""
|
|
||||||
def contain_origin(_id, %{"actor" => nil}), do: :error
|
|
||||||
|
|
||||||
def contain_origin(id, %{"actor" => _actor} = params) do
|
|
||||||
id_uri = URI.parse(id)
|
|
||||||
actor_uri = URI.parse(get_actor(params))
|
|
||||||
|
|
||||||
if id_uri.host == actor_uri.host do
|
|
||||||
:ok
|
|
||||||
else
|
|
||||||
:error
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
def contain_origin_from_id(_id, %{"id" => nil}), do: :error
|
|
||||||
|
|
||||||
def contain_origin_from_id(id, %{"id" => other_id} = _params) do
|
|
||||||
id_uri = URI.parse(id)
|
|
||||||
other_uri = URI.parse(other_id)
|
|
||||||
|
|
||||||
if id_uri.host == other_uri.host do
|
|
||||||
:ok
|
|
||||||
else
|
|
||||||
:error
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
@doc """
|
@doc """
|
||||||
Modifies an incoming AP object (mastodon format) to our internal format.
|
Modifies an incoming AP object (mastodon format) to our internal format.
|
||||||
"""
|
"""
|
||||||
|
@ -83,14 +35,34 @@ def fix_object(object) do
|
||||||
|> fix_content_map
|
|> fix_content_map
|
||||||
|> fix_likes
|
|> fix_likes
|
||||||
|> fix_addressing
|
|> fix_addressing
|
||||||
|
|> fix_summary
|
||||||
|
end
|
||||||
|
|
||||||
|
def fix_summary(%{"summary" => nil} = object) do
|
||||||
|
object
|
||||||
|
|> Map.put("summary", "")
|
||||||
|
end
|
||||||
|
|
||||||
|
def fix_summary(%{"summary" => _} = object) do
|
||||||
|
# summary is present, nothing to do
|
||||||
|
object
|
||||||
|
end
|
||||||
|
|
||||||
|
def fix_summary(object) do
|
||||||
|
object
|
||||||
|
|> Map.put("summary", "")
|
||||||
end
|
end
|
||||||
|
|
||||||
def fix_addressing_list(map, field) do
|
def fix_addressing_list(map, field) do
|
||||||
if is_binary(map[field]) do
|
cond do
|
||||||
map
|
is_binary(map[field]) ->
|
||||||
|> Map.put(field, [map[field]])
|
Map.put(map, field, [map[field]])
|
||||||
else
|
|
||||||
map
|
is_nil(map[field]) ->
|
||||||
|
Map.put(map, field, [])
|
||||||
|
|
||||||
|
true ->
|
||||||
|
map
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -128,18 +100,47 @@ def fix_explicit_addressing(object) do
|
||||||
|> fix_explicit_addressing(explicit_mentions)
|
|> fix_explicit_addressing(explicit_mentions)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
# if as:Public is addressed, then make sure the followers collection is also addressed
|
||||||
|
# so that the activities will be delivered to local users.
|
||||||
|
def fix_implicit_addressing(%{"to" => to, "cc" => cc} = object, followers_collection) do
|
||||||
|
recipients = to ++ cc
|
||||||
|
|
||||||
|
if followers_collection not in recipients do
|
||||||
|
cond do
|
||||||
|
"https://www.w3.org/ns/activitystreams#Public" in cc ->
|
||||||
|
to = to ++ [followers_collection]
|
||||||
|
Map.put(object, "to", to)
|
||||||
|
|
||||||
|
"https://www.w3.org/ns/activitystreams#Public" in to ->
|
||||||
|
cc = cc ++ [followers_collection]
|
||||||
|
Map.put(object, "cc", cc)
|
||||||
|
|
||||||
|
true ->
|
||||||
|
object
|
||||||
|
end
|
||||||
|
else
|
||||||
|
object
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def fix_implicit_addressing(object, _), do: object
|
||||||
|
|
||||||
def fix_addressing(object) do
|
def fix_addressing(object) do
|
||||||
|
%User{} = user = User.get_or_fetch_by_ap_id(object["actor"])
|
||||||
|
followers_collection = User.ap_followers(user)
|
||||||
|
|
||||||
object
|
object
|
||||||
|> fix_addressing_list("to")
|
|> fix_addressing_list("to")
|
||||||
|> fix_addressing_list("cc")
|
|> fix_addressing_list("cc")
|
||||||
|> fix_addressing_list("bto")
|
|> fix_addressing_list("bto")
|
||||||
|> fix_addressing_list("bcc")
|
|> fix_addressing_list("bcc")
|
||||||
|> fix_explicit_addressing
|
|> fix_explicit_addressing
|
||||||
|
|> fix_implicit_addressing(followers_collection)
|
||||||
end
|
end
|
||||||
|
|
||||||
def fix_actor(%{"attributedTo" => actor} = object) do
|
def fix_actor(%{"attributedTo" => actor} = object) do
|
||||||
object
|
object
|
||||||
|> Map.put("actor", get_actor(%{"actor" => actor}))
|
|> Map.put("actor", Containment.get_actor(%{"actor" => actor}))
|
||||||
end
|
end
|
||||||
|
|
||||||
# Check for standardisation
|
# Check for standardisation
|
||||||
|
@ -174,14 +175,13 @@ def fix_in_reply_to(%{"inReplyTo" => in_reply_to} = object)
|
||||||
""
|
""
|
||||||
end
|
end
|
||||||
|
|
||||||
case fetch_obj_helper(in_reply_to_id) do
|
case get_obj_helper(in_reply_to_id) do
|
||||||
{:ok, replied_object} ->
|
{:ok, replied_object} ->
|
||||||
with %Activity{} = activity <-
|
with %Activity{} = _activity <-
|
||||||
Activity.get_create_by_object_ap_id(replied_object.data["id"]) do
|
Activity.get_create_by_object_ap_id(replied_object.data["id"]) do
|
||||||
object
|
object
|
||||||
|> Map.put("inReplyTo", replied_object.data["id"])
|
|> Map.put("inReplyTo", replied_object.data["id"])
|
||||||
|> Map.put("inReplyToAtomUri", object["inReplyToAtomUri"] || in_reply_to_id)
|
|> Map.put("inReplyToAtomUri", object["inReplyToAtomUri"] || in_reply_to_id)
|
||||||
|> Map.put("inReplyToStatusId", activity.id)
|
|
||||||
|> Map.put("conversation", replied_object.data["context"] || object["conversation"])
|
|> Map.put("conversation", replied_object.data["context"] || object["conversation"])
|
||||||
|> Map.put("context", replied_object.data["context"] || object["conversation"])
|
|> Map.put("context", replied_object.data["context"] || object["conversation"])
|
||||||
else
|
else
|
||||||
|
@ -355,6 +355,40 @@ defp get_follow_activity(follow_object, followed) do
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
# Flag objects are placed ahead of the ID check because Mastodon 2.8 and earlier send them
|
||||||
|
# with nil ID.
|
||||||
|
def handle_incoming(%{"type" => "Flag", "object" => objects, "actor" => actor} = data) do
|
||||||
|
with context <- data["context"] || Utils.generate_context_id(),
|
||||||
|
content <- data["content"] || "",
|
||||||
|
%User{} = actor <- User.get_cached_by_ap_id(actor),
|
||||||
|
|
||||||
|
# Reduce the object list to find the reported user.
|
||||||
|
%User{} = account <-
|
||||||
|
Enum.reduce_while(objects, nil, fn ap_id, _ ->
|
||||||
|
with %User{} = user <- User.get_cached_by_ap_id(ap_id) do
|
||||||
|
{:halt, user}
|
||||||
|
else
|
||||||
|
_ -> {:cont, nil}
|
||||||
|
end
|
||||||
|
end),
|
||||||
|
|
||||||
|
# Remove the reported user from the object list.
|
||||||
|
statuses <- Enum.filter(objects, fn ap_id -> ap_id != account.ap_id end) do
|
||||||
|
params = %{
|
||||||
|
actor: actor,
|
||||||
|
context: context,
|
||||||
|
account: account,
|
||||||
|
statuses: statuses,
|
||||||
|
content: content,
|
||||||
|
additional: %{
|
||||||
|
"cc" => [account.ap_id]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
ActivityPub.flag(params)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
# disallow objects with bogus IDs
|
# disallow objects with bogus IDs
|
||||||
def handle_incoming(%{"id" => nil}), do: :error
|
def handle_incoming(%{"id" => nil}), do: :error
|
||||||
def handle_incoming(%{"id" => ""}), do: :error
|
def handle_incoming(%{"id" => ""}), do: :error
|
||||||
|
@ -366,7 +400,7 @@ def handle_incoming(%{"id" => id}) when not (is_binary(id) and length(id) > 8),
|
||||||
# - emoji
|
# - emoji
|
||||||
def handle_incoming(%{"type" => "Create", "object" => %{"type" => objtype} = object} = data)
|
def handle_incoming(%{"type" => "Create", "object" => %{"type" => objtype} = object} = data)
|
||||||
when objtype in ["Article", "Note", "Video", "Page"] do
|
when objtype in ["Article", "Note", "Video", "Page"] do
|
||||||
actor = get_actor(data)
|
actor = Containment.get_actor(data)
|
||||||
|
|
||||||
data =
|
data =
|
||||||
Map.put(data, "actor", actor)
|
Map.put(data, "actor", actor)
|
||||||
|
@ -404,27 +438,53 @@ def handle_incoming(
|
||||||
with %User{local: true} = followed <- User.get_cached_by_ap_id(followed),
|
with %User{local: true} = followed <- User.get_cached_by_ap_id(followed),
|
||||||
{:ok, %User{} = follower} <- User.get_or_fetch_by_ap_id(follower),
|
{:ok, %User{} = follower} <- User.get_or_fetch_by_ap_id(follower),
|
||||||
{:ok, activity} <- ActivityPub.follow(follower, followed, id, false) do
|
{:ok, activity} <- ActivityPub.follow(follower, followed, id, false) do
|
||||||
if not User.locked?(followed) do
|
with deny_follow_blocked <- Pleroma.Config.get([:user, :deny_follow_blocked]),
|
||||||
|
{:user_blocked, false} <-
|
||||||
|
{:user_blocked, User.blocks?(followed, follower) && deny_follow_blocked},
|
||||||
|
{:user_locked, false} <- {:user_locked, User.locked?(followed)},
|
||||||
|
{:follow, {:ok, follower}} <- {:follow, User.follow(follower, followed)} do
|
||||||
ActivityPub.accept(%{
|
ActivityPub.accept(%{
|
||||||
to: [follower.ap_id],
|
to: [follower.ap_id],
|
||||||
actor: followed,
|
actor: followed,
|
||||||
object: data,
|
object: data,
|
||||||
local: true
|
local: true
|
||||||
})
|
})
|
||||||
|
else
|
||||||
|
{:user_blocked, true} ->
|
||||||
|
{:ok, _} = Utils.update_follow_state(activity, "reject")
|
||||||
|
|
||||||
User.follow(follower, followed)
|
ActivityPub.reject(%{
|
||||||
|
to: [follower.ap_id],
|
||||||
|
actor: followed,
|
||||||
|
object: data,
|
||||||
|
local: true
|
||||||
|
})
|
||||||
|
|
||||||
|
{:follow, {:error, _}} ->
|
||||||
|
{:ok, _} = Utils.update_follow_state(activity, "reject")
|
||||||
|
|
||||||
|
ActivityPub.reject(%{
|
||||||
|
to: [follower.ap_id],
|
||||||
|
actor: followed,
|
||||||
|
object: data,
|
||||||
|
local: true
|
||||||
|
})
|
||||||
|
|
||||||
|
{:user_locked, true} ->
|
||||||
|
:noop
|
||||||
end
|
end
|
||||||
|
|
||||||
{:ok, activity}
|
{:ok, activity}
|
||||||
else
|
else
|
||||||
_e -> :error
|
_e ->
|
||||||
|
:error
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def handle_incoming(
|
def handle_incoming(
|
||||||
%{"type" => "Accept", "object" => follow_object, "actor" => _actor, "id" => _id} = data
|
%{"type" => "Accept", "object" => follow_object, "actor" => _actor, "id" => _id} = data
|
||||||
) do
|
) do
|
||||||
with actor <- get_actor(data),
|
with actor <- Containment.get_actor(data),
|
||||||
{:ok, %User{} = followed} <- User.get_or_fetch_by_ap_id(actor),
|
{:ok, %User{} = followed} <- User.get_or_fetch_by_ap_id(actor),
|
||||||
{:ok, follow_activity} <- get_follow_activity(follow_object, followed),
|
{:ok, follow_activity} <- get_follow_activity(follow_object, followed),
|
||||||
{:ok, follow_activity} <- Utils.update_follow_state(follow_activity, "accept"),
|
{:ok, follow_activity} <- Utils.update_follow_state(follow_activity, "accept"),
|
||||||
|
@ -450,7 +510,7 @@ def handle_incoming(
|
||||||
def handle_incoming(
|
def handle_incoming(
|
||||||
%{"type" => "Reject", "object" => follow_object, "actor" => _actor, "id" => _id} = data
|
%{"type" => "Reject", "object" => follow_object, "actor" => _actor, "id" => _id} = data
|
||||||
) do
|
) do
|
||||||
with actor <- get_actor(data),
|
with actor <- Containment.get_actor(data),
|
||||||
{:ok, %User{} = followed} <- User.get_or_fetch_by_ap_id(actor),
|
{:ok, %User{} = followed} <- User.get_or_fetch_by_ap_id(actor),
|
||||||
{:ok, follow_activity} <- get_follow_activity(follow_object, followed),
|
{:ok, follow_activity} <- get_follow_activity(follow_object, followed),
|
||||||
{:ok, follow_activity} <- Utils.update_follow_state(follow_activity, "reject"),
|
{:ok, follow_activity} <- Utils.update_follow_state(follow_activity, "reject"),
|
||||||
|
@ -474,9 +534,9 @@ def handle_incoming(
|
||||||
def handle_incoming(
|
def handle_incoming(
|
||||||
%{"type" => "Like", "object" => object_id, "actor" => _actor, "id" => id} = data
|
%{"type" => "Like", "object" => object_id, "actor" => _actor, "id" => id} = data
|
||||||
) do
|
) do
|
||||||
with actor <- get_actor(data),
|
with actor <- Containment.get_actor(data),
|
||||||
{:ok, %User{} = actor} <- User.get_or_fetch_by_ap_id(actor),
|
{:ok, %User{} = actor} <- User.get_or_fetch_by_ap_id(actor),
|
||||||
{:ok, object} <- get_obj_helper(object_id) || fetch_obj_helper(object_id),
|
{:ok, object} <- get_obj_helper(object_id),
|
||||||
{:ok, activity, _object} <- ActivityPub.like(actor, object, id, false) do
|
{:ok, activity, _object} <- ActivityPub.like(actor, object, id, false) do
|
||||||
{:ok, activity}
|
{:ok, activity}
|
||||||
else
|
else
|
||||||
|
@ -487,9 +547,9 @@ def handle_incoming(
|
||||||
def handle_incoming(
|
def handle_incoming(
|
||||||
%{"type" => "Announce", "object" => object_id, "actor" => _actor, "id" => id} = data
|
%{"type" => "Announce", "object" => object_id, "actor" => _actor, "id" => id} = data
|
||||||
) do
|
) do
|
||||||
with actor <- get_actor(data),
|
with actor <- Containment.get_actor(data),
|
||||||
{:ok, %User{} = actor} <- User.get_or_fetch_by_ap_id(actor),
|
{:ok, %User{} = actor} <- User.get_or_fetch_by_ap_id(actor),
|
||||||
{:ok, object} <- get_obj_helper(object_id) || fetch_obj_helper(object_id),
|
{:ok, object} <- get_obj_helper(object_id),
|
||||||
public <- Visibility.is_public?(data),
|
public <- Visibility.is_public?(data),
|
||||||
{:ok, activity, _object} <- ActivityPub.announce(actor, object, id, false, public) do
|
{:ok, activity, _object} <- ActivityPub.announce(actor, object, id, false, public) do
|
||||||
{:ok, activity}
|
{:ok, activity}
|
||||||
|
@ -503,7 +563,7 @@ def handle_incoming(
|
||||||
data
|
data
|
||||||
)
|
)
|
||||||
when object_type in ["Person", "Application", "Service", "Organization"] do
|
when object_type in ["Person", "Application", "Service", "Organization"] do
|
||||||
with %User{ap_id: ^actor_id} = actor <- User.get_by_ap_id(object["id"]) do
|
with %User{ap_id: ^actor_id} = actor <- User.get_cached_by_ap_id(object["id"]) do
|
||||||
{:ok, new_user_data} = ActivityPub.user_data_from_user_object(object)
|
{:ok, new_user_data} = ActivityPub.user_data_from_user_object(object)
|
||||||
|
|
||||||
banner = new_user_data[:info]["banner"]
|
banner = new_user_data[:info]["banner"]
|
||||||
|
@ -542,10 +602,10 @@ def handle_incoming(
|
||||||
) do
|
) do
|
||||||
object_id = Utils.get_ap_id(object_id)
|
object_id = Utils.get_ap_id(object_id)
|
||||||
|
|
||||||
with actor <- get_actor(data),
|
with actor <- Containment.get_actor(data),
|
||||||
{:ok, %User{} = actor} <- User.get_or_fetch_by_ap_id(actor),
|
{:ok, %User{} = actor} <- User.get_or_fetch_by_ap_id(actor),
|
||||||
{:ok, object} <- get_obj_helper(object_id) || fetch_obj_helper(object_id),
|
{:ok, object} <- get_obj_helper(object_id),
|
||||||
:ok <- contain_origin(actor.ap_id, object.data),
|
:ok <- Containment.contain_origin(actor.ap_id, object.data),
|
||||||
{:ok, activity} <- ActivityPub.delete(object, false) do
|
{:ok, activity} <- ActivityPub.delete(object, false) do
|
||||||
{:ok, activity}
|
{:ok, activity}
|
||||||
else
|
else
|
||||||
|
@ -561,9 +621,9 @@ def handle_incoming(
|
||||||
"id" => id
|
"id" => id
|
||||||
} = data
|
} = data
|
||||||
) do
|
) do
|
||||||
with actor <- get_actor(data),
|
with actor <- Containment.get_actor(data),
|
||||||
{:ok, %User{} = actor} <- User.get_or_fetch_by_ap_id(actor),
|
{:ok, %User{} = actor} <- User.get_or_fetch_by_ap_id(actor),
|
||||||
{:ok, object} <- get_obj_helper(object_id) || fetch_obj_helper(object_id),
|
{:ok, object} <- get_obj_helper(object_id),
|
||||||
{:ok, activity, _} <- ActivityPub.unannounce(actor, object, id, false) do
|
{:ok, activity, _} <- ActivityPub.unannounce(actor, object, id, false) do
|
||||||
{:ok, activity}
|
{:ok, activity}
|
||||||
else
|
else
|
||||||
|
@ -631,9 +691,9 @@ def handle_incoming(
|
||||||
"id" => id
|
"id" => id
|
||||||
} = data
|
} = data
|
||||||
) do
|
) do
|
||||||
with actor <- get_actor(data),
|
with actor <- Containment.get_actor(data),
|
||||||
{:ok, %User{} = actor} <- User.get_or_fetch_by_ap_id(actor),
|
{:ok, %User{} = actor} <- User.get_or_fetch_by_ap_id(actor),
|
||||||
{:ok, object} <- get_obj_helper(object_id) || fetch_obj_helper(object_id),
|
{:ok, object} <- get_obj_helper(object_id),
|
||||||
{:ok, activity, _, _} <- ActivityPub.unlike(actor, object, id, false) do
|
{:ok, activity, _, _} <- ActivityPub.unlike(actor, object, id, false) do
|
||||||
{:ok, activity}
|
{:ok, activity}
|
||||||
else
|
else
|
||||||
|
@ -643,9 +703,6 @@ def handle_incoming(
|
||||||
|
|
||||||
def handle_incoming(_), do: :error
|
def handle_incoming(_), do: :error
|
||||||
|
|
||||||
def fetch_obj_helper(id) when is_bitstring(id), do: ActivityPub.fetch_object_from_id(id)
|
|
||||||
def fetch_obj_helper(obj) when is_map(obj), do: ActivityPub.fetch_object_from_id(obj["id"])
|
|
||||||
|
|
||||||
def get_obj_helper(id) do
|
def get_obj_helper(id) do
|
||||||
if object = Object.normalize(id), do: {:ok, object}, else: nil
|
if object = Object.normalize(id), do: {:ok, object}, else: nil
|
||||||
end
|
end
|
||||||
|
@ -682,9 +739,9 @@ def prepare_object(object) do
|
||||||
# internal -> Mastodon
|
# internal -> Mastodon
|
||||||
# """
|
# """
|
||||||
|
|
||||||
def prepare_outgoing(%{"type" => "Create", "object" => object} = data) do
|
def prepare_outgoing(%{"type" => "Create", "object" => object_id} = data) do
|
||||||
object =
|
object =
|
||||||
object
|
Object.normalize(object_id).data
|
||||||
|> prepare_object
|
|> prepare_object
|
||||||
|
|
||||||
data =
|
data =
|
||||||
|
@ -745,7 +802,7 @@ def prepare_outgoing(%{"type" => _type} = data) do
|
||||||
|
|
||||||
def maybe_fix_object_url(data) do
|
def maybe_fix_object_url(data) do
|
||||||
if is_binary(data["object"]) and not String.starts_with?(data["object"], "http") do
|
if is_binary(data["object"]) and not String.starts_with?(data["object"], "http") do
|
||||||
case fetch_obj_helper(data["object"]) do
|
case get_obj_helper(data["object"]) do
|
||||||
{:ok, relative_object} ->
|
{:ok, relative_object} ->
|
||||||
if relative_object.data["external_url"] do
|
if relative_object.data["external_url"] do
|
||||||
_data =
|
_data =
|
||||||
|
@ -887,8 +944,9 @@ defp strip_internal_tags(%{"tag" => tags} = object) do
|
||||||
|
|
||||||
defp strip_internal_tags(object), do: object
|
defp strip_internal_tags(object), do: object
|
||||||
|
|
||||||
defp user_upgrade_task(user) do
|
def perform(:user_upgrade, user) do
|
||||||
old_follower_address = User.ap_followers(user)
|
# we pass a fake user so that the followers collection is stripped away
|
||||||
|
old_follower_address = User.ap_followers(%User{nickname: user.nickname})
|
||||||
|
|
||||||
q =
|
q =
|
||||||
from(
|
from(
|
||||||
|
@ -931,28 +989,18 @@ defp user_upgrade_task(user) do
|
||||||
Repo.update_all(q, [])
|
Repo.update_all(q, [])
|
||||||
end
|
end
|
||||||
|
|
||||||
def upgrade_user_from_ap_id(ap_id, async \\ true) do
|
def upgrade_user_from_ap_id(ap_id) do
|
||||||
with %User{local: false} = user <- User.get_by_ap_id(ap_id),
|
with %User{local: false} = user <- User.get_cached_by_ap_id(ap_id),
|
||||||
{:ok, data} <- ActivityPub.fetch_and_prepare_user_from_ap_id(ap_id) do
|
{:ok, data} <- ActivityPub.fetch_and_prepare_user_from_ap_id(ap_id),
|
||||||
already_ap = User.ap_enabled?(user)
|
already_ap <- User.ap_enabled?(user),
|
||||||
|
{:ok, user} <- user |> User.upgrade_changeset(data) |> User.update_and_set_cache() do
|
||||||
{:ok, user} =
|
unless already_ap do
|
||||||
User.upgrade_changeset(user, data)
|
PleromaJobQueue.enqueue(:transmogrifier, __MODULE__, [:user_upgrade, user])
|
||||||
|> Repo.update()
|
|
||||||
|
|
||||||
if !already_ap do
|
|
||||||
# This could potentially take a long time, do it in the background
|
|
||||||
if async do
|
|
||||||
Task.start(fn ->
|
|
||||||
user_upgrade_task(user)
|
|
||||||
end)
|
|
||||||
else
|
|
||||||
user_upgrade_task(user)
|
|
||||||
end
|
|
||||||
end
|
end
|
||||||
|
|
||||||
{:ok, user}
|
{:ok, user}
|
||||||
else
|
else
|
||||||
|
%User{} = user -> {:ok, user}
|
||||||
e -> e
|
e -> e
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -52,7 +52,7 @@ defp recipient_in_collection(ap_id, coll) when is_binary(coll), do: ap_id == col
|
||||||
defp recipient_in_collection(ap_id, coll) when is_list(coll), do: ap_id in coll
|
defp recipient_in_collection(ap_id, coll) when is_list(coll), do: ap_id in coll
|
||||||
defp recipient_in_collection(_, _), do: false
|
defp recipient_in_collection(_, _), do: false
|
||||||
|
|
||||||
def recipient_in_message(ap_id, params) do
|
def recipient_in_message(%User{ap_id: ap_id} = recipient, %User{} = actor, params) do
|
||||||
cond do
|
cond do
|
||||||
recipient_in_collection(ap_id, params["to"]) ->
|
recipient_in_collection(ap_id, params["to"]) ->
|
||||||
true
|
true
|
||||||
|
@ -71,6 +71,11 @@ def recipient_in_message(ap_id, params) do
|
||||||
!params["to"] && !params["cc"] && !params["bto"] && !params["bcc"] ->
|
!params["to"] && !params["cc"] && !params["bto"] && !params["bcc"] ->
|
||||||
true
|
true
|
||||||
|
|
||||||
|
# if the message is sent from somebody the user is following, then assume it
|
||||||
|
# is addressed to the recipient
|
||||||
|
User.following?(recipient, actor) ->
|
||||||
|
true
|
||||||
|
|
||||||
true ->
|
true ->
|
||||||
false
|
false
|
||||||
end
|
end
|
||||||
|
@ -99,7 +104,10 @@ def make_json_ld_header do
|
||||||
%{
|
%{
|
||||||
"@context" => [
|
"@context" => [
|
||||||
"https://www.w3.org/ns/activitystreams",
|
"https://www.w3.org/ns/activitystreams",
|
||||||
"#{Web.base_url()}/schemas/litepub-0.1.jsonld"
|
"#{Web.base_url()}/schemas/litepub-0.1.jsonld",
|
||||||
|
%{
|
||||||
|
"@language" => "und"
|
||||||
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
end
|
end
|
||||||
|
@ -175,18 +183,26 @@ def maybe_federate(_), do: :ok
|
||||||
Adds an id and a published data if they aren't there,
|
Adds an id and a published data if they aren't there,
|
||||||
also adds it to an included object
|
also adds it to an included object
|
||||||
"""
|
"""
|
||||||
def lazy_put_activity_defaults(map) do
|
def lazy_put_activity_defaults(map, fake \\ false) do
|
||||||
%{data: %{"id" => context}, id: context_id} = create_context(map["context"])
|
|
||||||
|
|
||||||
map =
|
map =
|
||||||
map
|
unless fake do
|
||||||
|> Map.put_new_lazy("id", &generate_activity_id/0)
|
%{data: %{"id" => context}, id: context_id} = create_context(map["context"])
|
||||||
|> Map.put_new_lazy("published", &make_date/0)
|
|
||||||
|> Map.put_new("context", context)
|
map
|
||||||
|> Map.put_new("context_id", context_id)
|
|> Map.put_new_lazy("id", &generate_activity_id/0)
|
||||||
|
|> Map.put_new_lazy("published", &make_date/0)
|
||||||
|
|> Map.put_new("context", context)
|
||||||
|
|> Map.put_new("context_id", context_id)
|
||||||
|
else
|
||||||
|
map
|
||||||
|
|> Map.put_new("id", "pleroma:fakeid")
|
||||||
|
|> Map.put_new_lazy("published", &make_date/0)
|
||||||
|
|> Map.put_new("context", "pleroma:fakecontext")
|
||||||
|
|> Map.put_new("context_id", -1)
|
||||||
|
end
|
||||||
|
|
||||||
if is_map(map["object"]) do
|
if is_map(map["object"]) do
|
||||||
object = lazy_put_object_defaults(map["object"], map)
|
object = lazy_put_object_defaults(map["object"], map, fake)
|
||||||
%{map | "object" => object}
|
%{map | "object" => object}
|
||||||
else
|
else
|
||||||
map
|
map
|
||||||
|
@ -196,7 +212,18 @@ def lazy_put_activity_defaults(map) do
|
||||||
@doc """
|
@doc """
|
||||||
Adds an id and published date if they aren't there.
|
Adds an id and published date if they aren't there.
|
||||||
"""
|
"""
|
||||||
def lazy_put_object_defaults(map, activity \\ %{}) do
|
def lazy_put_object_defaults(map, activity \\ %{}, fake)
|
||||||
|
|
||||||
|
def lazy_put_object_defaults(map, activity, true = _fake) do
|
||||||
|
map
|
||||||
|
|> Map.put_new_lazy("published", &make_date/0)
|
||||||
|
|> Map.put_new("id", "pleroma:fake_object_id")
|
||||||
|
|> Map.put_new("context", activity["context"])
|
||||||
|
|> Map.put_new("fake", true)
|
||||||
|
|> Map.put_new("context_id", activity["context_id"])
|
||||||
|
end
|
||||||
|
|
||||||
|
def lazy_put_object_defaults(map, activity, _fake) do
|
||||||
map
|
map
|
||||||
|> Map.put_new_lazy("id", &generate_object_id/0)
|
|> Map.put_new_lazy("id", &generate_object_id/0)
|
||||||
|> Map.put_new_lazy("published", &make_date/0)
|
|> Map.put_new_lazy("published", &make_date/0)
|
||||||
|
@ -207,14 +234,18 @@ def lazy_put_object_defaults(map, activity \\ %{}) do
|
||||||
@doc """
|
@doc """
|
||||||
Inserts a full object if it is contained in an activity.
|
Inserts a full object if it is contained in an activity.
|
||||||
"""
|
"""
|
||||||
def insert_full_object(%{"object" => %{"type" => type} = object_data})
|
def insert_full_object(%{"object" => %{"type" => type} = object_data} = map)
|
||||||
when is_map(object_data) and type in @supported_object_types do
|
when is_map(object_data) and type in @supported_object_types do
|
||||||
with {:ok, _} <- Object.create(object_data) do
|
with {:ok, object} <- Object.create(object_data) do
|
||||||
:ok
|
map =
|
||||||
|
map
|
||||||
|
|> Map.put("object", object.data["id"])
|
||||||
|
|
||||||
|
{:ok, map, object}
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def insert_full_object(_), do: :ok
|
def insert_full_object(map), do: {:ok, map, nil}
|
||||||
|
|
||||||
def update_object_in_activities(%{data: %{"id" => id}} = object) do
|
def update_object_in_activities(%{data: %{"id" => id}} = object) do
|
||||||
# TODO
|
# TODO
|
||||||
|
@ -354,7 +385,7 @@ def update_follow_state(
|
||||||
[state, actor, object]
|
[state, actor, object]
|
||||||
)
|
)
|
||||||
|
|
||||||
activity = Repo.get(Activity, activity.id)
|
activity = Activity.get_by_id(activity.id)
|
||||||
{:ok, activity}
|
{:ok, activity}
|
||||||
rescue
|
rescue
|
||||||
e ->
|
e ->
|
||||||
|
@ -404,13 +435,15 @@ def fetch_latest_follow(%User{ap_id: follower_id}, %User{ap_id: followed_id}) do
|
||||||
activity.data
|
activity.data
|
||||||
),
|
),
|
||||||
where: activity.actor == ^follower_id,
|
where: activity.actor == ^follower_id,
|
||||||
|
# this is to use the index
|
||||||
where:
|
where:
|
||||||
fragment(
|
fragment(
|
||||||
"? @> ?",
|
"coalesce((?)->'object'->>'id', (?)->>'object') = ?",
|
||||||
activity.data,
|
activity.data,
|
||||||
^%{object: followed_id}
|
activity.data,
|
||||||
|
^followed_id
|
||||||
),
|
),
|
||||||
order_by: [desc: :id],
|
order_by: [fragment("? desc nulls last", activity.id)],
|
||||||
limit: 1
|
limit: 1
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -567,13 +600,15 @@ def fetch_latest_block(%User{ap_id: blocker_id}, %User{ap_id: blocked_id}) do
|
||||||
activity.data
|
activity.data
|
||||||
),
|
),
|
||||||
where: activity.actor == ^blocker_id,
|
where: activity.actor == ^blocker_id,
|
||||||
|
# this is to use the index
|
||||||
where:
|
where:
|
||||||
fragment(
|
fragment(
|
||||||
"? @> ?",
|
"coalesce((?)->'object'->>'id', (?)->>'object') = ?",
|
||||||
activity.data,
|
activity.data,
|
||||||
^%{object: blocked_id}
|
activity.data,
|
||||||
|
^blocked_id
|
||||||
),
|
),
|
||||||
order_by: [desc: :id],
|
order_by: [fragment("? desc nulls last", activity.id)],
|
||||||
limit: 1
|
limit: 1
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -621,7 +656,13 @@ def make_create_data(params, additional) do
|
||||||
#### Flag-related helpers
|
#### Flag-related helpers
|
||||||
|
|
||||||
def make_flag_data(params, additional) do
|
def make_flag_data(params, additional) do
|
||||||
status_ap_ids = Enum.map(params.statuses || [], & &1.data["id"])
|
status_ap_ids =
|
||||||
|
Enum.map(params.statuses || [], fn
|
||||||
|
%Activity{} = act -> act.data["id"]
|
||||||
|
act when is_map(act) -> act["id"]
|
||||||
|
act when is_binary(act) -> act
|
||||||
|
end)
|
||||||
|
|
||||||
object = [params.account.ap_id] ++ status_ap_ids
|
object = [params.account.ap_id] ++ status_ap_ids
|
||||||
|
|
||||||
%{
|
%{
|
||||||
|
|
|
@ -17,7 +17,7 @@ def render("object.json", %{object: %Object{} = object}) do
|
||||||
|
|
||||||
def render("object.json", %{object: %Activity{data: %{"type" => "Create"}} = activity}) do
|
def render("object.json", %{object: %Activity{data: %{"type" => "Create"}} = activity}) do
|
||||||
base = Pleroma.Web.ActivityPub.Utils.make_json_ld_header()
|
base = Pleroma.Web.ActivityPub.Utils.make_json_ld_header()
|
||||||
object = Object.normalize(activity.data["object"])
|
object = Object.normalize(activity)
|
||||||
|
|
||||||
additional =
|
additional =
|
||||||
Transmogrifier.prepare_object(activity.data)
|
Transmogrifier.prepare_object(activity.data)
|
||||||
|
@ -28,7 +28,7 @@ def render("object.json", %{object: %Activity{data: %{"type" => "Create"}} = act
|
||||||
|
|
||||||
def render("object.json", %{object: %Activity{} = activity}) do
|
def render("object.json", %{object: %Activity{} = activity}) do
|
||||||
base = Pleroma.Web.ActivityPub.Utils.make_json_ld_header()
|
base = Pleroma.Web.ActivityPub.Utils.make_json_ld_header()
|
||||||
object = Object.normalize(activity.data["object"])
|
object = Object.normalize(activity)
|
||||||
|
|
||||||
additional =
|
additional =
|
||||||
Transmogrifier.prepare_object(activity.data)
|
Transmogrifier.prepare_object(activity.data)
|
||||||
|
|
|
@ -87,16 +87,10 @@ def render("user.json", %{user: user}) do
|
||||||
"publicKeyPem" => public_key
|
"publicKeyPem" => public_key
|
||||||
},
|
},
|
||||||
"endpoints" => endpoints,
|
"endpoints" => endpoints,
|
||||||
"icon" => %{
|
|
||||||
"type" => "Image",
|
|
||||||
"url" => User.avatar_url(user)
|
|
||||||
},
|
|
||||||
"image" => %{
|
|
||||||
"type" => "Image",
|
|
||||||
"url" => User.banner_url(user)
|
|
||||||
},
|
|
||||||
"tag" => user.info.source_data["tag"] || []
|
"tag" => user.info.source_data["tag"] || []
|
||||||
}
|
}
|
||||||
|
|> Map.merge(maybe_make_image(&User.avatar_url/2, "icon", user))
|
||||||
|
|> Map.merge(maybe_make_image(&User.banner_url/2, "image", user))
|
||||||
|> Map.merge(Utils.make_json_ld_header())
|
|> Map.merge(Utils.make_json_ld_header())
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -294,4 +288,17 @@ def collection(collection, iri, page, show_items \\ true, total \\ nil) do
|
||||||
map
|
map
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
defp maybe_make_image(func, key, user) do
|
||||||
|
if image = func.(user, no_default: true) do
|
||||||
|
%{
|
||||||
|
key => %{
|
||||||
|
"type" => "Image",
|
||||||
|
"url" => image
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else
|
||||||
|
%{}
|
||||||
|
end
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -41,16 +41,21 @@ def visible_for_user?(activity, user) do
|
||||||
# guard
|
# guard
|
||||||
def entire_thread_visible_for_user?(nil, _user), do: false
|
def entire_thread_visible_for_user?(nil, _user), do: false
|
||||||
|
|
||||||
# child
|
# XXX: Probably even more inefficient than the previous implementation intended to be a placeholder untill https://git.pleroma.social/pleroma/pleroma/merge_requests/971 is in develop
|
||||||
def entire_thread_visible_for_user?(
|
# credo:disable-for-previous-line Credo.Check.Readability.MaxLineLength
|
||||||
%Activity{data: %{"object" => %{"inReplyTo" => parent_id}}} = tail,
|
|
||||||
user
|
|
||||||
)
|
|
||||||
when is_binary(parent_id) do
|
|
||||||
parent = Activity.get_in_reply_to_activity(tail)
|
|
||||||
visible_for_user?(tail, user) && entire_thread_visible_for_user?(parent, user)
|
|
||||||
end
|
|
||||||
|
|
||||||
# root
|
def entire_thread_visible_for_user?(
|
||||||
def entire_thread_visible_for_user?(tail, user), do: visible_for_user?(tail, user)
|
%Activity{} = tail,
|
||||||
|
# %Activity{data: %{"object" => %{"inReplyTo" => parent_id}}} = tail,
|
||||||
|
user
|
||||||
|
) do
|
||||||
|
case Object.normalize(tail) do
|
||||||
|
%{data: %{"inReplyTo" => parent_id}} when is_binary(parent_id) ->
|
||||||
|
parent = Activity.get_in_reply_to_activity(tail)
|
||||||
|
visible_for_user?(tail, user) && entire_thread_visible_for_user?(parent, user)
|
||||||
|
|
||||||
|
_ ->
|
||||||
|
visible_for_user?(tail, user)
|
||||||
|
end
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -3,27 +3,49 @@
|
||||||
# SPDX-License-Identifier: AGPL-3.0-only
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
defmodule Pleroma.Web.AdminAPI.AdminAPIController do
|
defmodule Pleroma.Web.AdminAPI.AdminAPIController do
|
||||||
@users_page_size 50
|
|
||||||
|
|
||||||
use Pleroma.Web, :controller
|
use Pleroma.Web, :controller
|
||||||
alias Pleroma.User
|
alias Pleroma.User
|
||||||
|
alias Pleroma.UserInviteToken
|
||||||
alias Pleroma.Web.ActivityPub.Relay
|
alias Pleroma.Web.ActivityPub.Relay
|
||||||
alias Pleroma.Web.AdminAPI.AccountView
|
alias Pleroma.Web.AdminAPI.AccountView
|
||||||
|
alias Pleroma.Web.AdminAPI.Search
|
||||||
|
|
||||||
import Pleroma.Web.ControllerHelper, only: [json_response: 3]
|
import Pleroma.Web.ControllerHelper, only: [json_response: 3]
|
||||||
|
|
||||||
require Logger
|
require Logger
|
||||||
|
|
||||||
|
@users_page_size 50
|
||||||
|
|
||||||
action_fallback(:errors)
|
action_fallback(:errors)
|
||||||
|
|
||||||
def user_delete(conn, %{"nickname" => nickname}) do
|
def user_delete(conn, %{"nickname" => nickname}) do
|
||||||
User.get_by_nickname(nickname)
|
User.get_cached_by_nickname(nickname)
|
||||||
|> User.delete()
|
|> User.delete()
|
||||||
|
|
||||||
conn
|
conn
|
||||||
|> json(nickname)
|
|> json(nickname)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def user_follow(conn, %{"follower" => follower_nick, "followed" => followed_nick}) do
|
||||||
|
with %User{} = follower <- User.get_cached_by_nickname(follower_nick),
|
||||||
|
%User{} = followed <- User.get_cached_by_nickname(followed_nick) do
|
||||||
|
User.follow(follower, followed)
|
||||||
|
end
|
||||||
|
|
||||||
|
conn
|
||||||
|
|> json("ok")
|
||||||
|
end
|
||||||
|
|
||||||
|
def user_unfollow(conn, %{"follower" => follower_nick, "followed" => followed_nick}) do
|
||||||
|
with %User{} = follower <- User.get_cached_by_nickname(follower_nick),
|
||||||
|
%User{} = followed <- User.get_cached_by_nickname(followed_nick) do
|
||||||
|
User.unfollow(follower, followed)
|
||||||
|
end
|
||||||
|
|
||||||
|
conn
|
||||||
|
|> json("ok")
|
||||||
|
end
|
||||||
|
|
||||||
def user_create(
|
def user_create(
|
||||||
conn,
|
conn,
|
||||||
%{"nickname" => nickname, "email" => email, "password" => password}
|
%{"nickname" => nickname, "email" => email, "password" => password}
|
||||||
|
@ -44,8 +66,17 @@ def user_create(
|
||||||
|> json(user.nickname)
|
|> json(user.nickname)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def user_show(conn, %{"nickname" => nickname}) do
|
||||||
|
with %User{} = user <- User.get_cached_by_nickname(nickname) do
|
||||||
|
conn
|
||||||
|
|> json(AccountView.render("show.json", %{user: user}))
|
||||||
|
else
|
||||||
|
_ -> {:error, :not_found}
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
def user_toggle_activation(conn, %{"nickname" => nickname}) do
|
def user_toggle_activation(conn, %{"nickname" => nickname}) do
|
||||||
user = User.get_by_nickname(nickname)
|
user = User.get_cached_by_nickname(nickname)
|
||||||
|
|
||||||
{:ok, updated_user} = User.deactivate(user, !user.info.deactivated)
|
{:ok, updated_user} = User.deactivate(user, !user.info.deactivated)
|
||||||
|
|
||||||
|
@ -63,17 +94,17 @@ def untag_users(conn, %{"nicknames" => nicknames, "tags" => tags}) do
|
||||||
do: json_response(conn, :no_content, "")
|
do: json_response(conn, :no_content, "")
|
||||||
end
|
end
|
||||||
|
|
||||||
def list_users(%{assigns: %{user: admin}} = conn, params) do
|
def list_users(conn, params) do
|
||||||
{page, page_size} = page_params(params)
|
{page, page_size} = page_params(params)
|
||||||
|
filters = maybe_parse_filters(params["filters"])
|
||||||
|
|
||||||
with {:ok, users, count} <-
|
search_params = %{
|
||||||
User.search_for_admin(%{
|
query: params["query"],
|
||||||
query: params["query"],
|
page: page,
|
||||||
admin: admin,
|
page_size: page_size
|
||||||
local: params["local_only"] == "true",
|
}
|
||||||
page: page,
|
|
||||||
page_size: page_size
|
with {:ok, users, count} <- Search.user(Map.merge(search_params, filters)),
|
||||||
}),
|
|
||||||
do:
|
do:
|
||||||
conn
|
conn
|
||||||
|> json(
|
|> json(
|
||||||
|
@ -85,9 +116,22 @@ def list_users(%{assigns: %{user: admin}} = conn, params) do
|
||||||
)
|
)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@filters ~w(local external active deactivated)
|
||||||
|
|
||||||
|
defp maybe_parse_filters(filters) when is_nil(filters) or filters == "", do: %{}
|
||||||
|
|
||||||
|
@spec maybe_parse_filters(String.t()) :: %{required(String.t()) => true} | %{}
|
||||||
|
defp maybe_parse_filters(filters) do
|
||||||
|
filters
|
||||||
|
|> String.split(",")
|
||||||
|
|> Enum.filter(&Enum.member?(@filters, &1))
|
||||||
|
|> Enum.map(&String.to_atom(&1))
|
||||||
|
|> Enum.into(%{}, &{&1, true})
|
||||||
|
end
|
||||||
|
|
||||||
def right_add(conn, %{"permission_group" => permission_group, "nickname" => nickname})
|
def right_add(conn, %{"permission_group" => permission_group, "nickname" => nickname})
|
||||||
when permission_group in ["moderator", "admin"] do
|
when permission_group in ["moderator", "admin"] do
|
||||||
user = User.get_by_nickname(nickname)
|
user = User.get_cached_by_nickname(nickname)
|
||||||
|
|
||||||
info =
|
info =
|
||||||
%{}
|
%{}
|
||||||
|
@ -112,7 +156,7 @@ def right_add(conn, _) do
|
||||||
end
|
end
|
||||||
|
|
||||||
def right_get(conn, %{"nickname" => nickname}) do
|
def right_get(conn, %{"nickname" => nickname}) do
|
||||||
user = User.get_by_nickname(nickname)
|
user = User.get_cached_by_nickname(nickname)
|
||||||
|
|
||||||
conn
|
conn
|
||||||
|> json(%{
|
|> json(%{
|
||||||
|
@ -134,7 +178,7 @@ def right_delete(
|
||||||
|> put_status(403)
|
|> put_status(403)
|
||||||
|> json(%{error: "You can't revoke your own admin status."})
|
|> json(%{error: "You can't revoke your own admin status."})
|
||||||
else
|
else
|
||||||
user = User.get_by_nickname(nickname)
|
user = User.get_cached_by_nickname(nickname)
|
||||||
|
|
||||||
info =
|
info =
|
||||||
%{}
|
%{}
|
||||||
|
@ -160,7 +204,7 @@ def right_delete(conn, _) do
|
||||||
|
|
||||||
def set_activation_status(conn, %{"nickname" => nickname, "status" => status}) do
|
def set_activation_status(conn, %{"nickname" => nickname, "status" => status}) do
|
||||||
with {:ok, status} <- Ecto.Type.cast(:boolean, status),
|
with {:ok, status} <- Ecto.Type.cast(:boolean, status),
|
||||||
%User{} = user <- User.get_by_nickname(nickname),
|
%User{} = user <- User.get_cached_by_nickname(nickname),
|
||||||
{:ok, _} <- User.deactivate(user, !status),
|
{:ok, _} <- User.deactivate(user, !status),
|
||||||
do: json_response(conn, :no_content, "")
|
do: json_response(conn, :no_content, "")
|
||||||
end
|
end
|
||||||
|
@ -192,29 +236,58 @@ def email_invite(%{assigns: %{user: user}} = conn, %{"email" => email} = params)
|
||||||
with true <-
|
with true <-
|
||||||
Pleroma.Config.get([:instance, :invites_enabled]) &&
|
Pleroma.Config.get([:instance, :invites_enabled]) &&
|
||||||
!Pleroma.Config.get([:instance, :registrations_open]),
|
!Pleroma.Config.get([:instance, :registrations_open]),
|
||||||
{:ok, invite_token} <- Pleroma.UserInviteToken.create_token(),
|
{:ok, invite_token} <- UserInviteToken.create_invite(),
|
||||||
email <-
|
email <-
|
||||||
Pleroma.UserEmail.user_invitation_email(user, invite_token, email, params["name"]),
|
Pleroma.Emails.UserEmail.user_invitation_email(
|
||||||
{:ok, _} <- Pleroma.Mailer.deliver(email) do
|
user,
|
||||||
|
invite_token,
|
||||||
|
email,
|
||||||
|
params["name"]
|
||||||
|
),
|
||||||
|
{:ok, _} <- Pleroma.Emails.Mailer.deliver(email) do
|
||||||
json_response(conn, :no_content, "")
|
json_response(conn, :no_content, "")
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
@doc "Get a account registeration invite token (base64 string)"
|
@doc "Get a account registeration invite token (base64 string)"
|
||||||
def get_invite_token(conn, _params) do
|
def get_invite_token(conn, params) do
|
||||||
{:ok, token} = Pleroma.UserInviteToken.create_token()
|
options = params["invite"] || %{}
|
||||||
|
{:ok, invite} = UserInviteToken.create_invite(options)
|
||||||
|
|
||||||
|
conn
|
||||||
|
|> json(invite.token)
|
||||||
|
end
|
||||||
|
|
||||||
|
@doc "Get list of created invites"
|
||||||
|
def invites(conn, _params) do
|
||||||
|
invites = UserInviteToken.list_invites()
|
||||||
|
|
||||||
|
conn
|
||||||
|
|> json(AccountView.render("invites.json", %{invites: invites}))
|
||||||
|
end
|
||||||
|
|
||||||
|
@doc "Revokes invite by token"
|
||||||
|
def revoke_invite(conn, %{"token" => token}) do
|
||||||
|
invite = UserInviteToken.find_by_token!(token)
|
||||||
|
{:ok, updated_invite} = UserInviteToken.update_invite(invite, %{used: true})
|
||||||
|
|
||||||
|
conn
|
||||||
|
|> json(AccountView.render("invite.json", %{invite: updated_invite}))
|
||||||
|
end
|
||||||
|
|
||||||
|
@doc "Get a password reset token (base64 string) for given nickname"
|
||||||
|
def get_password_reset(conn, %{"nickname" => nickname}) do
|
||||||
|
(%User{local: true} = user) = User.get_cached_by_nickname(nickname)
|
||||||
|
{:ok, token} = Pleroma.PasswordResetToken.create_token(user)
|
||||||
|
|
||||||
conn
|
conn
|
||||||
|> json(token.token)
|
|> json(token.token)
|
||||||
end
|
end
|
||||||
|
|
||||||
@doc "Get a password reset token (base64 string) for given nickname"
|
def errors(conn, {:error, :not_found}) do
|
||||||
def get_password_reset(conn, %{"nickname" => nickname}) do
|
|
||||||
(%User{local: true} = user) = User.get_by_nickname(nickname)
|
|
||||||
{:ok, token} = Pleroma.PasswordResetToken.create_token(user)
|
|
||||||
|
|
||||||
conn
|
conn
|
||||||
|> json(token.token)
|
|> put_status(404)
|
||||||
|
|> json("Not found")
|
||||||
end
|
end
|
||||||
|
|
||||||
def errors(conn, {:param_cast, _}) do
|
def errors(conn, {:param_cast, _}) do
|
||||||
|
|
54
lib/pleroma/web/admin_api/search.ex
Normal file
54
lib/pleroma/web/admin_api/search.ex
Normal file
|
@ -0,0 +1,54 @@
|
||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
defmodule Pleroma.Web.AdminAPI.Search do
|
||||||
|
import Ecto.Query
|
||||||
|
|
||||||
|
alias Pleroma.Repo
|
||||||
|
alias Pleroma.User
|
||||||
|
|
||||||
|
@page_size 50
|
||||||
|
|
||||||
|
def user(%{query: term} = params) when is_nil(term) or term == "" do
|
||||||
|
query = maybe_filtered_query(params)
|
||||||
|
|
||||||
|
paginated_query =
|
||||||
|
maybe_filtered_query(params)
|
||||||
|
|> paginate(params[:page] || 1, params[:page_size] || @page_size)
|
||||||
|
|
||||||
|
count = query |> Repo.aggregate(:count, :id)
|
||||||
|
|
||||||
|
results = Repo.all(paginated_query)
|
||||||
|
|
||||||
|
{:ok, results, count}
|
||||||
|
end
|
||||||
|
|
||||||
|
def user(%{query: term} = params) when is_binary(term) do
|
||||||
|
search_query = from(u in maybe_filtered_query(params), where: ilike(u.nickname, ^"%#{term}%"))
|
||||||
|
|
||||||
|
count = search_query |> Repo.aggregate(:count, :id)
|
||||||
|
|
||||||
|
results =
|
||||||
|
search_query
|
||||||
|
|> paginate(params[:page] || 1, params[:page_size] || @page_size)
|
||||||
|
|> Repo.all()
|
||||||
|
|
||||||
|
{:ok, results, count}
|
||||||
|
end
|
||||||
|
|
||||||
|
defp maybe_filtered_query(params) do
|
||||||
|
from(u in User, order_by: u.nickname)
|
||||||
|
|> User.maybe_local_user_query(params[:local])
|
||||||
|
|> User.maybe_external_user_query(params[:external])
|
||||||
|
|> User.maybe_active_user_query(params[:active])
|
||||||
|
|> User.maybe_deactivated_user_query(params[:deactivated])
|
||||||
|
end
|
||||||
|
|
||||||
|
defp paginate(query, page, page_size) do
|
||||||
|
from(u in query,
|
||||||
|
limit: ^page_size,
|
||||||
|
offset: ^((page - 1) * page_size)
|
||||||
|
)
|
||||||
|
end
|
||||||
|
end
|
|
@ -26,4 +26,22 @@ def render("show.json", %{user: user}) do
|
||||||
"tags" => user.tags || []
|
"tags" => user.tags || []
|
||||||
}
|
}
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def render("invite.json", %{invite: invite}) do
|
||||||
|
%{
|
||||||
|
"id" => invite.id,
|
||||||
|
"token" => invite.token,
|
||||||
|
"used" => invite.used,
|
||||||
|
"expires_at" => invite.expires_at,
|
||||||
|
"uses" => invite.uses,
|
||||||
|
"max_use" => invite.max_use,
|
||||||
|
"invite_type" => invite.invite_type
|
||||||
|
}
|
||||||
|
end
|
||||||
|
|
||||||
|
def render("invites.json", %{invites: invites}) do
|
||||||
|
%{
|
||||||
|
invites: render_many(invites, AccountView, "invite.json", as: :invite)
|
||||||
|
}
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -3,6 +3,7 @@
|
||||||
# SPDX-License-Identifier: AGPL-3.0-only
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
defmodule Pleroma.Web.Auth.Authenticator do
|
defmodule Pleroma.Web.Auth.Authenticator do
|
||||||
|
alias Pleroma.Registration
|
||||||
alias Pleroma.User
|
alias Pleroma.User
|
||||||
|
|
||||||
def implementation do
|
def implementation do
|
||||||
|
@ -15,11 +16,30 @@ def implementation do
|
||||||
@callback get_user(Plug.Conn.t()) :: {:ok, User.t()} | {:error, any()}
|
@callback get_user(Plug.Conn.t()) :: {:ok, User.t()} | {:error, any()}
|
||||||
def get_user(plug), do: implementation().get_user(plug)
|
def get_user(plug), do: implementation().get_user(plug)
|
||||||
|
|
||||||
|
@callback create_from_registration(Plug.Conn.t(), Registration.t()) ::
|
||||||
|
{:ok, User.t()} | {:error, any()}
|
||||||
|
def create_from_registration(plug, registration),
|
||||||
|
do: implementation().create_from_registration(plug, registration)
|
||||||
|
|
||||||
|
@callback get_registration(Plug.Conn.t()) ::
|
||||||
|
{:ok, Registration.t()} | {:error, any()}
|
||||||
|
def get_registration(plug), do: implementation().get_registration(plug)
|
||||||
|
|
||||||
@callback handle_error(Plug.Conn.t(), any()) :: any()
|
@callback handle_error(Plug.Conn.t(), any()) :: any()
|
||||||
def handle_error(plug, error), do: implementation().handle_error(plug, error)
|
def handle_error(plug, error),
|
||||||
|
do: implementation().handle_error(plug, error)
|
||||||
|
|
||||||
@callback auth_template() :: String.t() | nil
|
@callback auth_template() :: String.t() | nil
|
||||||
def auth_template do
|
def auth_template do
|
||||||
implementation().auth_template() || Pleroma.Config.get(:auth_template, "show.html")
|
# Note: `config :pleroma, :auth_template, "..."` support is deprecated
|
||||||
|
implementation().auth_template() ||
|
||||||
|
Pleroma.Config.get([:auth, :auth_template], Pleroma.Config.get(:auth_template)) ||
|
||||||
|
"show.html"
|
||||||
|
end
|
||||||
|
|
||||||
|
@callback oauth_consumer_template() :: String.t() | nil
|
||||||
|
def oauth_consumer_template do
|
||||||
|
implementation().oauth_consumer_template() ||
|
||||||
|
Pleroma.Config.get([:auth, :oauth_consumer_template], "consumer.html")
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue