Compare commits

...

69 commits

Author SHA1 Message Date
ac1352c916 Merge branch 'develop' into fedi-absturztau-be 2022-11-20 08:28:23 +01:00
6453297e9c Merge pull request 'Drop XSS auditor' (#292) from r3g_5z/akkoma:drop-xss-auditor into develop
Reviewed-on: AkkomaGang/akkoma#292
2022-11-20 04:00:25 +00:00
r3g_5z
f90552f62e
Drop XSS auditor
It's deprecated, removed in some, by all modern browsers and is known
to create XSS vulnerabilities in itself.

Signed-off-by: r3g_5z <june@terezi.dev>
2022-11-19 20:40:20 -05:00
fb5f846e8c Add languages to cheatsheet 2022-11-18 11:22:30 +00:00
14c1a4220b docs: Update list of clients (#284)
In addition to making the page refer to Akkoma instead of Pleroma, I've
also removed clients that were not updated in a year or more and updated
links to websites and the contact links of authors.

Also removed language that suggested these clients are in any way
"officially supported".

Co-authored-by: Francis Dinh <normandy@biribiri.dev>
Reviewed-on: AkkomaGang/akkoma#284
Co-authored-by: Norm <normandy@biribiri.dev>
Co-committed-by: Norm <normandy@biribiri.dev>
2022-11-18 11:19:37 +00:00
ab44b82af0 Merge pull request 'Update copyright info' (#285) from norm/akkoma:copyright-stuff into develop
Reviewed-on: AkkomaGang/akkoma#285
2022-11-18 11:17:24 +00:00
e1e0d5d759 microblogpub federation fixes (#288)
Co-authored-by: FloatingGhost <hannah@coffee-and-dreams.uk>
Reviewed-on: AkkomaGang/akkoma#288
2022-11-18 11:14:35 +00:00
e45b242d88
Update copyright info
- Bump years to 2022 where appropriate
- Add copyright for Akkoma authors
- Remove references to deleted images
2022-11-17 22:48:33 -05:00
9deae8c533 Merge pull request 'docs: Update links to list of akkoma instances' (#278) from norm/akkoma:update-akkoma-list-urls into develop
Reviewed-on: AkkomaGang/akkoma#278
2022-11-16 10:16:27 +00:00
d4ca1217d3 Be very specific about the double-quotes in strings 2022-11-16 10:13:41 +00:00
Haelwenn (lanodan) Monnier
3e0a5851e5 Set instance reachable on fetch 2022-11-15 17:23:47 +00:00
7a833aff90
docs: Update links to list of akkoma instances
The old links were for Pleroma instances and one of them isn't even active anymore.
2022-11-15 07:51:19 -05:00
2a1f17e3ed and i yoink (#275)
Co-authored-by: Mark Felder <feld@feld.me>
Co-authored-by: FloatingGhost <hannah@coffee-and-dreams.uk>
Reviewed-on: AkkomaGang/akkoma#275
2022-11-14 15:07:26 +00:00
893bfde66f Remove references to soykaf
Fixes #271
2022-11-14 00:01:31 +00:00
c1127e321b Add configurable timeline per oban job (#273)
Heavily inspired by https://git.pleroma.social/pleroma/pleroma/-/merge_requests/3777

Co-authored-by: FloatingGhost <hannah@coffee-and-dreams.uk>
Reviewed-on: AkkomaGang/akkoma#273
2022-11-13 23:55:51 +00:00
7d4c4aa16e Merge pull request 'change default redirectRootNoLogin to /main/public' (#272) from nocebo/akkoma:nocebo-default-public-tl into develop
Reviewed-on: AkkomaGang/akkoma#272
2022-11-13 22:45:22 +00:00
35cddd7cf7 change default redirectRootNoLogin to /main/public
close #268
2022-11-13 08:43:12 +00:00
19272be0ce Merge pull request 'Chores for 2022.11' (#266) from 2022-11-stable into develop
Reviewed-on: AkkomaGang/akkoma#266
2022-11-12 15:16:51 +00:00
89dbc7177b Chores for 2022.11 2022-11-11 16:12:04 +00:00
634463ff64 fix requirements 2022-11-11 16:07:07 +00:00
ac0c00cdee Add media sources to connect-src if media proxy is enabled 2022-11-10 17:26:51 +00:00
50458a17dc Merge branch 'develop' of akkoma.dev:AkkomaGang/akkoma into develop 2022-11-10 11:54:35 +00:00
bab1ab5b6c strip \r and \r from content-disposition filenames 2022-11-10 11:54:12 +00:00
dcc36df8cf add manual deploy for docs 2022-11-10 10:55:57 +00:00
77ed5fc674 Merge pull request 'Fix typo in README' (#262) from eloy/akkoma:develop into develop
Reviewed-on: AkkomaGang/akkoma#262
2022-11-10 10:52:59 +00:00
f8b4e360a0 Fix typo in README 2022-11-10 10:00:39 +01:00
539c6d6666 update requirements.txt 2022-11-10 03:40:36 +00:00
dc2e9845bb Merge remote-tracking branch 'origin/translations' into develop 2022-11-10 03:38:38 +00:00
66eb844bd2 Update documentation builder 2022-11-10 03:38:10 +00:00
c5b6cb746f add requested_by changelog entry 2022-11-10 03:17:00 +00:00
Weblate
f2c6749b57 Update translation files
Updated by "Squash Git commits" hook in Weblate.

Translation: Pleroma fe/Akkoma Backend (Errors)
Translate-URL: http://translate.akkoma.dev/projects/akkoma/akkoma-backend-errors/
2022-11-10 03:16:35 +00:00
Weblate
f7c1e15d08 Translated using Weblate (Spanish)
Currently translated at 21.6% (23 of 106 strings)

Co-authored-by: mint <they@mint.lgbt>
Translate-URL: http://translate.akkoma.dev/projects/akkoma/akkoma-backend-errors/es/
Translation: Pleroma fe/Akkoma Backend (Errors)
2022-11-10 03:16:35 +00:00
cc6a076202 Include requested_by in relationship (#260)
Co-authored-by: FloatingGhost <hannah@coffee-and-dreams.uk>
Reviewed-on: AkkomaGang/akkoma#260
2022-11-10 03:16:32 +00:00
53fbe26c80 reference "stable" in all URLs 2022-11-09 13:22:44 +00:00
0681a26dbb Remove unused pattern 2022-11-08 13:54:43 +00:00
4e8ab0deeb fix count of poll voters 2022-11-08 13:50:04 +00:00
2e895b6c02 make metdata check a debug log 2022-11-08 11:03:43 +00:00
479aacb1b6 Add fallback for reports that don't have attached activities 2022-11-08 11:01:47 +00:00
a0b8e3c842 Don't mess with the cache on metadata update 2022-11-08 10:39:01 +00:00
7bbaa8f8e0 automatically trim loading *. prefixes on domain blocks 2022-11-07 22:33:18 +00:00
c0eecb55bf Update finch 2022-11-07 13:32:34 +00:00
e0032e4799 Add rollbacks for associated_object_id 2022-11-07 00:08:20 +00:00
48309c141e Add "differences" in readme 2022-11-06 23:57:43 +00:00
31ad09010e Fix regex usage in MRF (#254)
fixes #235
fixes #228

Co-authored-by: FloatingGhost <hannah@coffee-and-dreams.uk>
Reviewed-on: AkkomaGang/akkoma#254
2022-11-06 23:50:32 +00:00
5123b3a5dd Add enabled check on /translation/languages 2022-11-06 22:55:26 +00:00
bbedbaaf5c Changelog 2022-11-06 22:50:11 +00:00
b7e8ce2350 Scrape instance nodeinfo (#251)
Co-authored-by: FloatingGhost <hannah@coffee-and-dreams.uk>
Reviewed-on: AkkomaGang/akkoma#251
2022-11-06 22:49:39 +00:00
ccdf55acff Fix instance name in email test 2022-11-04 18:42:12 +00:00
cc6d760814 Merge pull request 'Fix typo in CSP Report-To header name' (#250) from tcit/akkoma:fix-typo-in-csp-report-to-header-name into develop
Reviewed-on: AkkomaGang/akkoma#250
2022-11-04 18:41:26 +00:00
4d0a51221a
Fix typo in CSP Report-To header name
The header name was Report-To, not Reply-To.

In any case, that's now being changed to the Reporting-Endpoints HTTP
Response Header.
https://w3c.github.io/reporting/#header
https://github.com/w3c/reporting/issues/177

CanIUse says the Report-To header is still supported by current Chrome
and friends.
https://caniuse.com/mdn-http_headers_report-to

It doesn't have any data for the Reporting-Endpoints HTTP header, but
this article says Chrome 96 supports it.
https://web.dev/reporting-api/

(Even though that's come out one year ago, that's not compatible with
Network Error Logging which's still using the Report-To version of the
API)

Signed-off-by: Thomas Citharel <tcit@tcit.fr>
2022-11-04 15:02:13 +01:00
7cfce562a9 Add default favicon
Fixes pleroma-fe#185
2022-11-02 22:38:02 +00:00
346f72b471 Merge pull request 'Change default instance name to "Akkoma"' (#248) from norm/akkoma:default-instance-name into develop
Reviewed-on: AkkomaGang/akkoma#248
2022-11-02 01:29:42 +00:00
9682ec4c5f Change default instance name to "Akkoma"
This was left at "Pleroma" for some reason.
2022-11-01 20:52:17 +00:00
9038da01cc Merge pull request 'Push.Impl: support edits' (#244) from norm/akkoma:push-support-edits into develop
Reviewed-on: AkkomaGang/akkoma#244
2022-11-01 15:14:08 +00:00
e44e147b54 Merge pull request 'fix flaky test_user_relationship_test.exs:81' (#240) from ilja/akkoma:fix_flaky_test_user_relationship_test.exs_81 into develop
Reviewed-on: AkkomaGang/akkoma#240
2022-11-01 14:44:23 +00:00
d5bbc3eeb2 Merge pull request 'fix flaky test filter_controller_test.exs:200' (#239) from ilja/akkoma:fix_flaky_filter_controller_test.exs_200 into develop
Reviewed-on: AkkomaGang/akkoma#239
2022-11-01 14:42:43 +00:00
479542c692 Merge pull request 'fix flaky participation_test.exs' (#238) from ilja/akkoma:fix_erratic_participation_test into develop
Reviewed-on: AkkomaGang/akkoma#238
2022-11-01 14:37:06 +00:00
be5044f785 fix_flaky_transfer_task_test.exs (#237)
There were async calls happening, so they weren't always finished when assert happened.

I also fixed some bugs in the erratic tests that were introduced when removing :shout.:shout is a key where restart is needed, and was changed in the test to use :rate_limit (which also requires a restart). But there was a bug in the syntax that didn't get caught because the test was tagged as erratic and therefor didn't fail. Here I fixed it.

During compilation, we had a warning `:logger is used by the current application but the current application does not depend on :logger` which is now fixed as well (see commit message for complete stacktrace).

Co-authored-by: Ilja <ilja@ilja.space>
Reviewed-on: AkkomaGang/akkoma#237
Co-authored-by: ilja <akkoma.dev@ilja.space>
Co-committed-by: ilja <akkoma.dev@ilja.space>
2022-11-01 14:31:29 +00:00
f1dfd76b98 Fix rate_limiter_test.exs test "it restricts based on config values" (#233)
Fixes one of the 'erratic' tests

It used a timer to sleep.
But time also goes on when doing other things, so depending on hardware, the timings could be off.
I slightly changed the tests so we still test what we functionally want.
Instead of waiting until the cache expires I now have a function to expire the test and use that.

That means we're not testing any more if the cache really expires after a certain amount of time,
but that's the responsability of the dependency imo, so shouldn't be a problem.

I also changed `Pleroma.Web.Endpoint, :http, :ip` in the tests to `127.0.0.1`
Currently it was set to 8.8.8.8, but I see no reason for that and, while I assume that no calls
are made to it, it may come over as weird or suspicious to people.

Co-authored-by: Ilja <ilja@ilja.space>
Reviewed-on: AkkomaGang/akkoma#233
Co-authored-by: ilja <akkoma.dev@ilja.space>
Co-committed-by: ilja <akkoma.dev@ilja.space>
2022-11-01 14:25:54 +00:00
1bb8b76311 Fix tests in ldap registration 2022-11-01 14:21:35 +00:00
cbc693f832 Fix LDAP user registration (#229)
Simple fix for LDAP user registration. I'm not sure what changed but I managed to get Akkoma running in a debug session and figured out it was missing a match for an extra value at the end. I don't know Elixir all that well so I'm not sure if this was the correct way to do it... but it works. :)

Reviewed-on: AkkomaGang/akkoma#229
Co-authored-by: nullobsi <me@nullob.si>
Co-committed-by: nullobsi <me@nullob.si>
2022-11-01 14:17:55 +00:00
d782140e2b Reword stop gifs 2022-10-29 22:08:18 +01:00
4d9ca8909d Add StopGifs to description 2022-10-29 21:57:50 +01:00
marcin mikołajczak
6486211064
Push.Impl: support edits
Signed-off-by: marcin mikołajczak <git@mkljczk.pl>
2022-10-28 01:20:19 -04:00
ilja
3562eaeedc fix flaky test_user_relationship_test.exs:81
The problem was double. On the one hand, the function didn't actually return what was in the DB.
On the other hand the test was flaky because it used NaiveDateTime.utc_now() so test could fail or pass depending on a difference of microseconds.

Both are fixed now.
2022-10-23 13:31:01 +02:00
Ilja
a59d310982 fix flaky test filter_controller_test.exs:200 2022-10-23 13:07:02 +02:00
ilja
e6ceea3553 fix flaky participation_test.exs
It was tested if the updated_at after marking as "read" was equal as the updated_at at insertion, but that seems wrong.
Firstly, if a record is updated, you expect the updated_at to also update.
Secondly, the insert and update happen almost at the same time, so it's flaky regardless.

Here I make sure it has a much older updated_at during insert so we can clealy see the effect after update.
I also check that the updated_at is actually updated because I expect that this is the expected behaviour and it's also the current behaviour.
2022-10-23 12:33:31 +02:00
16a31872fe document local_bubble 2022-10-21 10:23:07 +01:00
7bb6df2d5b Remove unused DATA arg 2022-10-20 13:26:00 +01:00
82 changed files with 1830 additions and 547 deletions

1
.gitignore vendored
View file

@ -1,5 +1,6 @@
# App artifacts
docs/site
*.zip
*.sw*
secret
/_build

View file

@ -4,15 +4,26 @@ All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
## Unreleased
## 2022.11
## Added
- Officially supported docker release
- Ability to remove followers unilaterally without a block
- Scraping of nodeinfo from remote instances to display instance info
- `requested_by` in relationships when the user has requested to follow you
## Changes
- Follows no longer override domain blocks, a domain block is final
- Deletes are now the lowest priority to publish and will be handled after creates
- Domain blocks are now subdomain-matches by default
## Fixed
- Registrations via ldap are now compatible with the latest OTP24
## Update notes
- If you use LDAP and run from source, please update your elixir/erlang
to the latest. The changes in OTP24.3 are breaking.
- You can now remove the leading `*.` from domain blocks, but you do not have to.
## 2022.10

19
COPYING
View file

@ -1,12 +1,15 @@
Unless otherwise stated this repository is copyright © 2017-2021
Pleroma Authors <https://pleroma.social/>, and is distributed under
The GNU Affero General Public License Version 3, you should have received a
copy of the license file as AGPL-3.
Unless otherwise stated this repository is
Copyright © 2017-2022 Pleroma Authors <https://pleroma.social/>
Copyright © 2022 Akkoma Authors <https://akkoma.social/>
and is distributed under The GNU Affero General Public License Version 3, you
should have received a copy of the license file as AGPL-3.
---
Files inside docs directory are copyright © 2021 Pleroma Authors
<https://pleroma.social/>, and are distributed under the Creative Commons
Files inside docs directory are
Copyright © 2021-2022 Pleroma Authors <https://pleroma.social/>
Copyright © 2022 Akkoma Authors <https://akkoma.social/>
and are distributed under the Creative Commons
Attribution 4.0 International license, you should have received
a copy of the license file as CC-BY-4.0.
@ -16,9 +19,7 @@ The following files are copyright © 2019 shitposter.club, and are distributed
under the Creative Commons Attribution-ShareAlike 4.0 International license,
you should have received a copy of the license file as CC-BY-SA-4.0.
priv/static/images/pleroma-fox-tan.png
priv/static/images/pleroma-fox-tan-smol.png
priv/static/images/pleroma-tan.png
---
@ -35,8 +36,6 @@ The following files are copyright © 2017-2020 Pleroma Authors
Attribution-ShareAlike 4.0 International license, you should have received
a copy of the license file as CC-BY-SA-4.0.
priv/static/images/avi.png
priv/static/images/banner.png
priv/static/instance/thumbnail.jpeg
---

View file

@ -3,7 +3,6 @@ FROM hexpm/elixir:1.13.4-erlang-24.3.4.5-alpine-3.15.6
ENV MIX_ENV=prod
ARG HOME=/opt/akkoma
ARG DATA=/var/lib/akkoma
LABEL org.opencontainers.image.title="akkoma" \
org.opencontainers.image.description="Akkoma for Docker" \

View file

@ -8,12 +8,33 @@
This is a fork of Pleroma, which is a microblogging server software that can federate (= exchange messages with) other servers that support ActivityPub. What that means is that you can host a server for yourself or your friends and stay in control of your online identity, but still exchange messages with people on larger servers. Akkoma will federate with all servers that implement ActivityPub, like Friendica, GNU Social, Hubzilla, Mastodon, Misskey, Peertube, and Pixelfed.
Akkoma is written in Elixir and uses PostgresSQL for data storage.
Akkoma is written in Elixir and uses PostgreSQL for data storage.
For clients it supports the [Mastodon client API](https://docs.joinmastodon.org/api/guidelines/) with Pleroma extensions (see the API section on <https://docs.akkoma.dev/stable/>).
- [Client Applications for Akkoma](https://docs.akkoma.dev/stable/clients/)
## Differences with Pleroma
Akkoma is a faster-paced fork, it has a varied and potentially experimental feature set tailored specifically to the corner of the fediverse inhabited by the project
creator and contributors.
This should not be considered a one-for-one match with pleroma; it is more opinionated in many ways, and has a smaller community (which is good or
bad depending on your view)
For example, Akkoma has:
- Custom Emoji reactions (compatible with misskey)
- Misskey-flavoured markdown support
- Elasticsearch and Meilisearch support for search
- Mastodon frontend (Glitch-Soc and Fedibird flavours) support
- Automatic post translation via DeepL or LibreTranslate
- A multitude of heavy modifications to the Pleroma Frontend (Pleroma-FE)
- The "bubble" concept, in which instance administrators can choose closely-related instances to make a "community of communities", so to say
And takes a more opinionated stance on issues like Domain blocks, which are enforced far more on Akkoma.
Take a look at the Changelog if you want a full list of recent changes, everything since 3.0 has been Akkoma.
## Installation
### OTP releases (Recommended)
@ -25,15 +46,13 @@ If your platform is not supported, or you just want to be able to edit the sourc
- [Alpine Linux](https://docs.akkoma.dev/stable/installation/alpine_linux_en/)
- [Arch Linux](https://docs.akkoma.dev/stable/installation/arch_linux_en/)
- [Debian-based](https://docs.akkoma.dev/stable/installation/debian_based_en/)
- [Debian-based (jp)](https://docs.akkoma.dev/stable/installation/debian_based_jp/)
- [FreeBSD](https://docs.akkoma.dev/stable/installation/freebsd_en/)
- [Gentoo Linux](https://docs.akkoma.dev/stable/installation/gentoo_en/)
- [NetBSD](https://docs.akkoma.dev/stable/installation/netbsd_en/)
- [OpenBSD](https://docs.akkoma.dev/stable/installation/openbsd_en/)
- [OpenBSD (fi)](https://docs.akkoma.dev/stable/installation/openbsd_fi/)
### Docker
While we dont provide docker files, other people have written very good ones. Take a look at <https://github.com/angristan/docker-pleroma> or <https://glitch.sh/sn0w/pleroma-docker>.
Docker installation is supported via [this setup](https://docs.akkoma.dev/stable/installation/docker_en/)
### Compilation Troubleshooting
If you ever encounter compilation issues during the updating of Akkoma, you can try these commands and see if they fix things:
@ -45,3 +64,4 @@ If you ever encounter compilation issues during the updating of Akkoma, you can
## Documentation
- https://docs.akkoma.dev/stable
- https://docs.akkoma.dev/develop

View file

@ -185,7 +185,7 @@
adapter: []
config :pleroma, :instance,
name: "Pleroma",
name: "Akkoma",
email: "example@example.com",
notify_email: "noreply@example.com",
description: "Akkoma: The cooler fediverse server",
@ -317,7 +317,7 @@
nsfwCensorImage: "",
postContentType: "text/plain",
redirectRootLogin: "/main/friends",
redirectRootNoLogin: "/main/all",
redirectRootNoLogin: "/main/public",
scopeCopy: true,
sidebarRight: false,
showFeaturesPanel: true,
@ -567,7 +567,8 @@
attachments_cleanup: 1,
new_users_digest: 1,
mute_expire: 5,
search_indexing: 10
search_indexing: 10,
nodeinfo_fetcher: 1
],
plugins: [
Oban.Plugins.Pruner,
@ -583,6 +584,27 @@
federator_incoming: 5,
federator_outgoing: 5,
search_indexing: 2
],
timeout: [
activity_expiration: :timer.seconds(5),
token_expiration: :timer.seconds(5),
filter_expiration: :timer.seconds(5),
backup: :timer.seconds(900),
federator_incoming: :timer.seconds(10),
federator_outgoing: :timer.seconds(10),
ingestion_queue: :timer.seconds(5),
web_push: :timer.seconds(5),
mailer: :timer.seconds(5),
transmogrifier: :timer.seconds(5),
scheduled_activities: :timer.seconds(5),
poll_notifications: :timer.seconds(5),
background: :timer.seconds(5),
remote_fetcher: :timer.seconds(10),
attachments_cleanup: :timer.seconds(900),
new_users_digest: :timer.seconds(10),
mute_expire: :timer.seconds(5),
search_indexing: :timer.seconds(5),
nodeinfo_fetcher: :timer.seconds(10)
]
config :pleroma, Pleroma.Formatter,
@ -806,7 +828,8 @@
config :web_push_encryption, http_client: Pleroma.HTTP.WebPush
config :pleroma, :instances_favicons, enabled: false
config :pleroma, :instances_favicons, enabled: true
config :pleroma, :instances_nodeinfo, enabled: true
config :floki, :html_parser, Floki.HTMLParser.FastHtml

View file

@ -1389,6 +1389,12 @@
label: "Render misskey markdown",
type: :boolean,
description: "Whether to render Misskey-flavoured markdown"
},
%{
key: :stopGifs,
label: "Stop Gifs",
type: :boolean,
description: "Whether to pause animated images until they're hovered on"
}
]
},
@ -1973,6 +1979,32 @@
federator_incoming: 5,
federator_outgoing: 5
]
},
%{
key: :timeout,
type: {:keyword, :integer},
description: "Timeout for jobs, per `Oban` queue, in ms",
suggestions: [
activity_expiration: :timer.seconds(5),
token_expiration: :timer.seconds(5),
filter_expiration: :timer.seconds(5),
backup: :timer.seconds(900),
federator_incoming: :timer.seconds(10),
federator_outgoing: :timer.seconds(10),
ingestion_queue: :timer.seconds(5),
web_push: :timer.seconds(5),
mailer: :timer.seconds(5),
transmogrifier: :timer.seconds(5),
scheduled_activities: :timer.seconds(5),
poll_notifications: :timer.seconds(5),
background: :timer.seconds(5),
remote_fetcher: :timer.seconds(10),
attachments_cleanup: :timer.seconds(900),
new_users_digest: :timer.seconds(10),
mute_expire: :timer.seconds(5),
search_indexing: :timer.seconds(5),
nodeinfo_fetcher: :timer.seconds(10)
]
}
]
},
@ -3041,6 +3073,19 @@
}
]
},
%{
group: :pleroma,
key: :instances_nodeinfo,
type: :group,
description: "Control favicons for instances",
children: [
%{
key: :enabled,
type: :boolean,
description: "Allow/disallow getting instance nodeinfo"
}
]
},
%{
group: :ex_aws,
key: :s3,

View file

@ -139,6 +139,8 @@
# Reduce recompilation time
# https://dashbit.co/blog/speeding-up-re-compilation-of-elixir-projects
config :phoenix, :plug_init_mode, :runtime
config :pleroma, :instances_favicons, enabled: false
config :pleroma, :instances_nodeinfo, enabled: false
if File.exists?("./config/test.secret.exs") do
import_config "test.secret.exs"

View file

@ -1,9 +1,14 @@
all: install
pipenv run mkdocs build
branch := $(shell git rev-parse --abbrev-ref HEAD)
install:
pipenv install
clean:
rm -rf site
serve:
pipenv run python3 -m http.server -d site
zip:
zip -r docs.zip site/*
deploy:
cd site && rclone copy . scaleway:akkoma-docs/$(branch)

95
docs/Pipfile.lock generated
View file

@ -14,6 +14,22 @@
]
},
"default": {
"certifi": {
"hashes": [
"sha256:0d9c601124e5a6ba9712dbc60d9c53c21e34f5f641fe83002317394311bdce14",
"sha256:90c1a32f1d68f940488354e36370f6cca89f0f106db09518524c88d6ed83f382"
],
"markers": "python_version >= '3.6'",
"version": "==2022.9.24"
},
"charset-normalizer": {
"hashes": [
"sha256:5a3d016c7c547f69d6f81fb0db9449ce888b418b5b9952cc5e6e66843e9dd845",
"sha256:83e9a75d1911279afd89352c68b45348559d1fc0506b054b346651b5e7fee29f"
],
"markers": "python_version >= '3.6'",
"version": "==2.1.1"
},
"click": {
"hashes": [
"sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e",
@ -29,13 +45,13 @@
],
"version": "==2.1.0"
},
"importlib-metadata": {
"idna": {
"hashes": [
"sha256:637245b8bab2b6502fcbc752cc4b7a6f6243bb02b31c5c26156ad103d3d45670",
"sha256:7401a975809ea1fdc658c3aa4f78cc2195a0e019c5cbc4c06122884e9ae80c23"
"sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4",
"sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"
],
"markers": "python_version >= '3.7'",
"version": "==4.12.0"
"markers": "python_version >= '3.5'",
"version": "==3.4"
},
"jinja2": {
"hashes": [
@ -55,10 +71,10 @@
},
"markdown-include": {
"hashes": [
"sha256:6f5d680e36f7780c7f0f61dca53ca581bd50d1b56137ddcd6353efafa0c3e4a2"
"sha256:a06183b7c7225e73112737acdc6fe0ac0686c39457234eeb5ede23881fed001d"
],
"index": "pypi",
"version": "==0.6.0"
"version": "==0.7.0"
},
"markupsafe": {
"hashes": [
@ -116,27 +132,27 @@
},
"mkdocs": {
"hashes": [
"sha256:26bd2b03d739ac57a3e6eed0b7bcc86168703b719c27b99ad6ca91dc439aacde",
"sha256:b504405b04da38795fec9b2e5e28f6aa3a73bb0960cb6d5d27ead28952bd35ea"
"sha256:8947af423a6d0facf41ea1195b8e1e8c85ad94ac95ae307fe11232e0424b11c5",
"sha256:c8856a832c1e56702577023cd64cc5f84948280c1c0fcc6af4cd39006ea6aa8c"
],
"markers": "python_version >= '3.6'",
"version": "==1.3.0"
"markers": "python_version >= '3.7'",
"version": "==1.4.2"
},
"mkdocs-material": {
"hashes": [
"sha256:263f2721f3abe533b61f7c8bed435a0462620912742c919821ac2d698b4bfe67",
"sha256:dc82b667d2a83f0de581b46a6d0949732ab77e7638b87ea35b770b33bc02e75a"
"sha256:143ea55843b3747b640e1110824d91e8a4c670352380e166e64959f9abe98862",
"sha256:45eeabb23d2caba8fa3b85c91d9ec8e8b22add716e9bba8faf16d56af8aa5622"
],
"index": "pypi",
"version": "==8.3.9"
"version": "==8.5.9"
},
"mkdocs-material-extensions": {
"hashes": [
"sha256:a82b70e533ce060b2a5d9eb2bc2e1be201cf61f901f93704b4acf6e3d5983a44",
"sha256:bfd24dfdef7b41c312ede42648f9eb83476ea168ec163b613f9abd12bbfddba2"
"sha256:96ca979dae66d65c2099eefe189b49d5ac62f76afb59c38e069ffc7cf3c131ec",
"sha256:bcc2e5fc70c0ec50e59703ee6e639d87c7e664c0c441c014ea84461a90f1e902"
],
"markers": "python_version >= '3.6'",
"version": "==1.0.3"
"markers": "python_version >= '3.7'",
"version": "==1.1"
},
"packaging": {
"hashes": [
@ -148,19 +164,19 @@
},
"pygments": {
"hashes": [
"sha256:5eb116118f9612ff1ee89ac96437bb6b49e8f04d8a13b514ba26f620208e26eb",
"sha256:dc9c10fb40944260f6ed4c688ece0cd2048414940f1cea51b8b226318411c519"
"sha256:56a8508ae95f98e2b9bdf93a6be5ae3f7d8af858b43e02c5a2ff083726be40c1",
"sha256:f643f331ab57ba3c9d89212ee4a2dabc6e94f117cf4eefde99a0574720d14c42"
],
"markers": "python_version >= '3.6'",
"version": "==2.12.0"
"version": "==2.13.0"
},
"pymdown-extensions": {
"hashes": [
"sha256:3ef2d998c0d5fa7eb09291926d90d69391283561cf6306f85cd588a5eb5befa0",
"sha256:ec141c0f4983755349f0c8710416348d1a13753976c028186ed14f190c8061c4"
"sha256:1bd4a173095ef8c433b831af1f3cb13c10883be0c100ae613560668e594651f7",
"sha256:8e62688a8b1128acd42fa823f3d429d22f4284b5e6dd4d3cd56721559a5a211b"
],
"markers": "python_version >= '3.7'",
"version": "==9.5"
"version": "==9.8"
},
"pyparsing": {
"hashes": [
@ -180,6 +196,7 @@
},
"pyyaml": {
"hashes": [
"sha256:01b45c0191e6d66c470b6cf1b9531a771a83c1c4208272ead47a3ae4f2f603bf",
"sha256:0283c35a6a9fbf047493e3a0ce8d79ef5030852c51e9d911a27badfde0605293",
"sha256:055d937d65826939cb044fc8c9b08889e8c743fdc6a32b33e2390f66013e449b",
"sha256:07751360502caac1c067a8132d150cf3d61339af5691fe9e87803040dbc5db57",
@ -191,26 +208,32 @@
"sha256:277a0ef2981ca40581a47093e9e2d13b3f1fbbeffae064c1d21bfceba2030287",
"sha256:2cd5df3de48857ed0544b34e2d40e9fac445930039f3cfe4bcc592a1f836d513",
"sha256:40527857252b61eacd1d9af500c3337ba8deb8fc298940291486c465c8b46ec0",
"sha256:432557aa2c09802be39460360ddffd48156e30721f5e8d917f01d31694216782",
"sha256:473f9edb243cb1935ab5a084eb238d842fb8f404ed2193a915d1784b5a6b5fc0",
"sha256:48c346915c114f5fdb3ead70312bd042a953a8ce5c7106d5bfb1a5254e47da92",
"sha256:50602afada6d6cbfad699b0c7bb50d5ccffa7e46a3d738092afddc1f9758427f",
"sha256:68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2",
"sha256:77f396e6ef4c73fdc33a9157446466f1cff553d979bd00ecb64385760c6babdc",
"sha256:81957921f441d50af23654aa6c5e5eaf9b06aba7f0a19c18a538dc7ef291c5a1",
"sha256:819b3830a1543db06c4d4b865e70ded25be52a2e0631ccd2f6a47a2822f2fd7c",
"sha256:897b80890765f037df3403d22bab41627ca8811ae55e9a722fd0392850ec4d86",
"sha256:98c4d36e99714e55cfbaaee6dd5badbc9a1ec339ebfc3b1f52e293aee6bb71a4",
"sha256:9df7ed3b3d2e0ecfe09e14741b857df43adb5a3ddadc919a2d94fbdf78fea53c",
"sha256:9fa600030013c4de8165339db93d182b9431076eb98eb40ee068700c9c813e34",
"sha256:a80a78046a72361de73f8f395f1f1e49f956c6be882eed58505a15f3e430962b",
"sha256:afa17f5bc4d1b10afd4466fd3a44dc0e245382deca5b3c353d8b757f9e3ecb8d",
"sha256:b3d267842bf12586ba6c734f89d1f5b871df0273157918b0ccefa29deb05c21c",
"sha256:b5b9eccad747aabaaffbc6064800670f0c297e52c12754eb1d976c57e4f74dcb",
"sha256:bfaef573a63ba8923503d27530362590ff4f576c626d86a9fed95822a8255fd7",
"sha256:c5687b8d43cf58545ade1fe3e055f70eac7a5a1a0bf42824308d868289a95737",
"sha256:cba8c411ef271aa037d7357a2bc8f9ee8b58b9965831d9e51baf703280dc73d3",
"sha256:d15a181d1ecd0d4270dc32edb46f7cb7733c7c508857278d3d378d14d606db2d",
"sha256:d4b0ba9512519522b118090257be113b9468d804b19d63c71dbcf4a48fa32358",
"sha256:d4db7c7aef085872ef65a8fd7d6d09a14ae91f691dec3e87ee5ee0539d516f53",
"sha256:d4eccecf9adf6fbcc6861a38015c2a64f38b9d94838ac1810a9023a0609e1b78",
"sha256:d67d839ede4ed1b28a4e8909735fc992a923cdb84e618544973d7dfc71540803",
"sha256:daf496c58a8c52083df09b80c860005194014c3698698d1a57cbcfa182142a3a",
"sha256:dbad0e9d368bb989f4515da330b88a057617d16b6a8245084f1b05400f24609f",
"sha256:e61ceaab6f49fb8bdfaa0f92c4b57bcfbea54c09277b1b4f7ac376bfb7a7c174",
"sha256:f84fbc98b019fef2ee9a1cb3ce93e3187a6df0b2538a651bfb890254ba9f90b5"
],
@ -225,6 +248,14 @@
"markers": "python_version >= '3.6'",
"version": "==0.1"
},
"requests": {
"hashes": [
"sha256:7c5599b102feddaa661c826c56ab4fee28bfd17f5abca1ebbe3e7f19d7c97983",
"sha256:8fefa2a1a1365bf5520aac41836fbee479da67864514bdb821f31ce07ce65349"
],
"markers": "python_version >= '3.7' and python_version < '4'",
"version": "==2.28.1"
},
"six": {
"hashes": [
"sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926",
@ -233,6 +264,14 @@
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
"version": "==1.16.0"
},
"urllib3": {
"hashes": [
"sha256:3fa96cf423e6987997fc326ae8df396db2a8b7c667747d47ddd8ecba91f4a74e",
"sha256:b930dd878d5a8afb066a637fbb35144fe7901e3b209d1cd4f524bd0e9deee997"
],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4, 3.5' and python_version < '4'",
"version": "==1.26.12"
},
"watchdog": {
"hashes": [
"sha256:083171652584e1b8829581f965b9b7723ca5f9a2cd7e20271edf264cfd7c1412",
@ -263,14 +302,6 @@
],
"markers": "python_version >= '3.6'",
"version": "==2.1.9"
},
"zipp": {
"hashes": [
"sha256:56bf8aadb83c24db6c4b577e13de374ccfb67da2078beba1d037c17980bf43ad",
"sha256:c4f6e5bbf48e74f7a38e7cc5b0480ff42b0ae5178957d564d18932525d5cf099"
],
"markers": "python_version >= '3.7'",
"version": "==3.8.0"
}
},
"develop": {}

View file

@ -1,21 +1,8 @@
# Pleroma Clients
Note: Additional clients may be working but theses are officially supporting Pleroma.
Feel free to contact us to be added to this list!
# Akkoma Clients
Note: Additional clients may work, but these are known to work with Akkoma.
Apps listed here might not support all of Akkoma's features.
## Desktop
### Roma for Desktop
- Homepage: <https://www.pleroma.com/#desktopApp>
- Source Code: <https://github.com/roma-apps/roma-desktop>
- Platforms: Windows, Mac, Linux
- Features: MastoAPI, Streaming Ready
### Social
- Source Code: <https://gitlab.gnome.org/World/Social>
- Contact: [@brainblasted@social.libre.fi](https://social.libre.fi/users/brainblasted)
- Platforms: Linux (GNOME)
- Note(2019-01-28): Not at a pre-alpha stage yet
- Features: MastoAPI
### Whalebird
- Homepage: <https://whalebird.social/>
- Source Code: <https://github.com/h3poteto/whalebird-desktop>
@ -30,28 +17,16 @@ Feel free to contact us to be added to this list!
- Platforms: Android
- Features: MastoAPI, ActivityPub (Client-to-Server)
### Amaroq
- Homepage: <https://itunes.apple.com/us/app/amaroq-for-mastodon/id1214116200>
- Source Code: <https://github.com/ReticentJohn/Amaroq>
- Contact: [@eurasierboy@mastodon.social](https://mastodon.social/users/eurasierboy)
- Platforms: iOS
- Features: MastoAPI, No Streaming
### Fedilab
- Homepage: <https://fedilab.app/>
- Source Code: <https://framagit.org/tom79/fedilab/>
- Contact: [@fedilab@framapiaf.org](https://framapiaf.org/users/fedilab)
- Source Code: <https://codeberg.org/tom79/Fedilab>
- Contact: [@apps@toot.felilab.app](https://toot.fedilab.app/@apps)
- Platforms: Android
- Features: MastoAPI, Streaming Ready, Moderation, Text Formatting
### Kyclos
- Source Code: <https://git.pleroma.social/pleroma/harbour-kyclos>
- Platforms: SailfishOS
- Features: MastoAPI, No Streaming
### Husky
- Source code: <https://git.mentality.rip/FWGS/Husky>
- Contact: [@Husky@enigmatic.observer](https://enigmatic.observer/users/Husky)
- Source code: <https://git.sr.ht/~captainepoch/husky>
- Contact: [@captainepoch@stereophonic.space](https://stereophonic.space/captainepoch)
- Platforms: Android
- Features: MastoAPI, No Streaming, Emoji Reactions, Text Formatting, FE Stickers
@ -68,32 +43,7 @@ Feel free to contact us to be added to this list!
- Platforms: Android
- Features: MastoAPI, No Streaming
### Twidere
- Homepage: <https://twidere.mariotaku.org/>
- Source Code: <https://github.com/TwidereProject/Twidere-Android/>
- Contact: <me@mariotaku.org>
- Platform: Android
- Features: MastoAPI, No Streaming
### Indigenous
- Homepage: <https://indigenous.realize.be/>
- Source Code: <https://github.com/swentel/indigenous-android/>
- Contact: [@swentel@realize.be](https://realize.be)
- Platforms: Android
- Features: MastoAPI, No Streaming
## Alternative Web Interfaces
### Brutaldon
- Homepage: <https://jfm.carcosa.net/projects/software/brutaldon/>
- Source Code: <https://git.carcosa.net/jmcbray/brutaldon>
- Contact: [@gcupc@glitch.social](https://glitch.social/users/gcupc)
- Features: MastoAPI, No Streaming
### Halcyon
- Source Code: <https://notabug.org/halcyon-suite/halcyon>
- Contact: [@halcyon@social.csswg.org](https://social.csswg.org/users/halcyon)
- Features: MastoAPI, Streaming Ready
### Pinafore
- Homepage: <https://pinafore.social/>
- Source Code: <https://github.com/nolanlawson/pinafore>

View file

@ -59,6 +59,8 @@ To add configuration to your config file, you can copy it from the base config.
* `cleanup_attachments`: Remove attachments along with statuses. Does not affect duplicate files and attachments without status. Enabling this will increase load to database when deleting statuses on larger instances.
* `show_reactions`: Let favourites and emoji reactions be viewed through the API (default: `true`).
* `password_reset_token_validity`: The time after which reset tokens aren't accepted anymore, in seconds (default: one day).
* `local_bubble`: Array of domains representing instances closely related to yours. Used to populate the `bubble` timeline. e.g `["example.com"]`, (default: `[]`)
* `languages`: List of Language Codes used by the instance. This is used to try and set a default language from the frontend. It will try and find the first match between the languages set here and the user's browser languages. It will default to the first language in this setting if there is no match.. (default `["en"]`)
## :database
* `improved_hashtag_timeline`: Setting to force toggle / force disable improved hashtags timeline. `:enabled` forces hashtags to be fetched from `hashtags` table for hashtags timeline. `:disabled` forces object-embedded hashtags to be used (slower). Keep it `:auto` for automatic behaviour (it is auto-set to `:enabled` [unless overridden] when HashtagsTableMigrator completes).

View file

@ -23,7 +23,7 @@ This sets the `secure` flag on Akkomas session cookie. This makes sure, that
This will send additional HTTP security headers to the clients, including:
* `X-XSS-Protection: "1; mode=block"`
* `X-XSS-Protection: "0"`
* `X-Permitted-Cross-Domain-Policies: "none"`
* `X-Frame-Options: "DENY"`
* `X-Content-Type-Options: "nosniff"`

View file

@ -155,7 +155,7 @@ server {
location / {
add_header X-XSS-Protection "1; mode=block";
add_header X-XSS-Protection "0";
add_header X-Permitted-Cross-Domain-Policies none;
add_header X-Frame-Options DENY;
add_header X-Content-Type-Options nosniff;

View file

@ -99,7 +99,7 @@ server {
location / {
add_header X-XSS-Protection "1; mode=block";
add_header X-XSS-Protection "0";
add_header X-Permitted-Cross-Domain-Policies none;
add_header X-Frame-Options DENY;
add_header X-Content-Type-Options nosniff;

View file

@ -195,7 +195,7 @@ Additional parameters can be added to the JSON body/Form data:
- `preview`: boolean, if set to `true` the post won't be actually posted, but the status entity would still be rendered back. This could be useful for previewing rich text/custom emoji, for example.
- `content_type`: string, contain the MIME type of the status, it is transformed into HTML by the backend. You can get the list of the supported MIME types with the nodeinfo endpoint.
- `to`: A list of nicknames (like `lain@soykaf.club` or `lain` on the local server) that will be used to determine who is going to be addressed by this post. Using this will disable the implicit addressing by mentioned names in the `status` body, only the people in the `to` list will be addressed. The normal rules for post visibility are not affected by this and will still apply.
- `to`: A list of nicknames (like `admin@otp.akkoma.dev` or `admin` on the local server) that will be used to determine who is going to be addressed by this post. Using this will disable the implicit addressing by mentioned names in the `status` body, only the people in the `to` list will be addressed. The normal rules for post visibility are not affected by this and will still apply.
- `visibility`: string, besides standard MastoAPI values (`direct`, `private`, `unlisted`, `local` or `public`) it can be used to address a List by setting it to `list:LIST_ID`.
- `expires_in`: The number of seconds the posted activity should expire in. When a posted activity expires it will be deleted from the server, and a delete request for it will be federated. This needs to be longer than an hour.
- `in_reply_to_conversation_id`: Will reply to a given conversation, addressing only the people who are part of the recipient set of that conversation. Sets the visibility to `direct`.

View file

@ -40,5 +40,5 @@ The following is a config example to use with [Grafana](https://grafana.com)
metrics_path: /api/pleroma/app_metrics
scheme: https
static_configs:
- targets: ['pleroma.soykaf.com']
- targets: ['otp.akkoma.dev']
```

View file

@ -23,20 +23,20 @@ will be posted via [@akkoma@ihba](https://ihatebeinga.live/users/akkoma)
## How can I use it?
Akkoma instances are already widely deployed, a list can be found at <https://the-federation.info/pleroma> and <https://fediverse.network/pleroma>.
Akkoma instances are already widely deployed, a list can be found at <https://the-federation.info/akkoma> and <https://akkoma.fediverse.observer/list>.
If you don't feel like joining an existing instance, but instead prefer to deploy your own instance, that's easy too!
Installation instructions can be found in the installation section of these docs.
## I got an account, now what?
Great! Now you can explore the fediverse! Open the login page for your Akkoma instance (e.g. <https://pleroma.soykaf.com>) and login with your username and password. (If you don't have an account yet, click on Register)
Great! Now you can explore the fediverse! Open the login page for your Akkoma instance (e.g. <https://otp.akkoma.dev>) and login with your username and password. (If you don't have an account yet, click on Register)
### Pleroma-FE
The default front-end used by Akkoma is Pleroma-FE. You can find more information on what it is and how to use it in the [Introduction to Pleroma-FE](https://docs-fe.akkoma.dev/stable/).
### Mastodon interface
If the Pleroma-FE interface isn't your thing, or you're just trying something new but you want to keep using the familiar Mastodon interface, we got that too!
Just add a "/web" after your instance url (e.g. <https://pleroma.soykaf.com/web>) and you'll end on the Mastodon web interface, but with a Akkoma backend! MAGIC!
Just add a "/web" after your instance url (e.g. <https://otp.akkoma.dev/web>) and you'll end on the Mastodon web interface, but with a Akkoma backend! MAGIC!
The Mastodon interface is from the Glitch-soc fork. For more information on the Mastodon interface you can check the [Mastodon](https://docs.joinmastodon.org/) and [Glitch-soc](https://glitch-soc.github.io/docs/) documentation.
Remember, what you see is only the frontend part of Mastodon, the backend is still Akkoma.

View file

@ -87,7 +87,7 @@ export FLAVOUR="amd64-musl"
# Clone the release build into a temporary directory and unpack it
# Replace `stable` with `unstable` if you want to run the unstable branch
su akkoma -s $SHELL -lc "
curl 'https://akkoma-updates.s3-website.fr-par.scw.cloud/develop/akkoma-$FLAVOUR.zip' -o /tmp/akkoma.zip
curl 'https://akkoma-updates.s3-website.fr-par.scw.cloud/stable/akkoma-$FLAVOUR.zip' -o /tmp/akkoma.zip
unzip /tmp/akkoma.zip -d /tmp/
"

View file

@ -34,6 +34,15 @@ git pull -r
# to run "git merge stable" instead (or develop if you want)
```
### WARNING - Migrating from Pleroma Develop
If you are on pleroma develop, and have updated since 2022-08, you may have issues with database migrations.
Please roll back the given migrations:
```bash
MIX_ENV=prod mix ecto.rollback --migrations-path priv/repo/optional_migrations/pleroma_develop_rollbacks -n3
```
Then compile, migrate and restart as usual.
## From OTP

View file

@ -160,7 +160,7 @@ http protocol plerup { # Protocol for upstream akkoma server
match request header append "X-Forwarded-For" value "$REMOTE_ADDR" # This two header and the next one are not strictly required by akkoma but adding them won't hurt
match request header append "X-Forwarded-By" value "$SERVER_ADDR:$SERVER_PORT"
match response header append "X-XSS-Protection" value "1; mode=block"
match response header append "X-XSS-Protection" value "0"
match response header append "X-Permitted-Cross-Domain-Policies" value "none"
match response header append "X-Frame-Options" value "DENY"
match response header append "X-Content-Type-Options" value "nosniff"

View file

@ -123,7 +123,7 @@ export FLAVOUR="amd64-musl"
# Clone the release build into a temporary directory and unpack it
su akkoma -s $SHELL -lc "
curl 'https://akkoma-updates.s3-website.fr-par.scw.cloud/develop/akkoma-$FLAVOUR.zip' -o /tmp/akkoma.zip
curl 'https://akkoma-updates.s3-website.fr-par.scw.cloud/stable/akkoma-$FLAVOUR.zip' -o /tmp/akkoma.zip
unzip /tmp/akkoma.zip -d /tmp/
"

View file

@ -4,7 +4,7 @@ All stable OTP releases are cryptographically signed, to allow
you to verify the integrity if you choose to.
Releases are signed with [Signify](https://man.openbsd.org/signify.1),
with [the public key in the main repository](https://akkoma.dev/AkkomaGang/akkoma/src/branch/develop/SIGNING_KEY.pub)
with [the public key in the main repository](https://akkoma.dev/AkkomaGang/akkoma/src/branch/stable/SIGNING_KEY.pub)
Release URLs will always be of the form

View file

@ -1,22 +1,26 @@
certifi==2022.9.24
charset-normalizer==2.1.1
click==8.1.3
ghp-import==2.1.0
idna==3.4
importlib-metadata==4.12.0
Jinja2==3.1.2
Markdown==3.3.7
markdown-include==0.6.0
markdown-include==0.7.0
MarkupSafe==2.1.1
mergedeep==1.3.4
mkdocs==1.3.0
mkdocs-bootswatch==1.1
mkdocs-material==8.1.8
mkdocs-material-extensions==1.0.3
mkdocs==1.4.2
mkdocs-material==8.5.9
mkdocs-material-extensions==1.1
packaging==21.3
Pygments==2.11.2
pymdown-extensions==9.1
Pygments==2.13.0
pymdown-extensions==9.8
pyparsing==3.0.9
python-dateutil==2.8.2
PyYAML==6.0
pyyaml_env_tag==0.1
requests==2.28.1
six==1.16.0
urllib3==1.26.12
watchdog==2.1.9
zipp==3.8.0

View file

@ -156,7 +156,8 @@ defp cachex_children do
build_cachex("emoji_packs", expiration: emoji_packs_expiration(), limit: 10),
build_cachex("failed_proxy_url", limit: 2500),
build_cachex("banned_urls", default_ttl: :timer.hours(24 * 30), limit: 5_000),
build_cachex("translations", default_ttl: :timer.hours(24 * 30), limit: 2500)
build_cachex("translations", default_ttl: :timer.hours(24 * 30), limit: 2500),
build_cachex("instances", default_ttl: :timer.hours(24), ttl_interval: 1000, limit: 2500)
]
end

View file

@ -99,6 +99,7 @@ defp proxy_type(_), do: {:error, :unknown}
| {:error, atom()}
| nil
def parse_proxy(nil), do: nil
def parse_proxy(""), do: nil
def parse_proxy(proxy) when is_binary(proxy) do
with %URI{} = uri <- URI.parse(proxy),

View file

@ -5,6 +5,8 @@
defmodule Pleroma.Instances.Instance do
@moduledoc "Instance."
@cachex Pleroma.Config.get([:cachex, :provider], Cachex)
alias Pleroma.Instances
alias Pleroma.Instances.Instance
alias Pleroma.Repo
@ -22,7 +24,8 @@ defmodule Pleroma.Instances.Instance do
field(:host, :string)
field(:unreachable_since, :naive_datetime_usec)
field(:favicon, :string)
field(:favicon_updated_at, :naive_datetime)
field(:metadata_updated_at, :naive_datetime)
field(:nodeinfo, :map, default: %{})
timestamps()
end
@ -31,7 +34,7 @@ defmodule Pleroma.Instances.Instance do
def changeset(struct, params \\ %{}) do
struct
|> cast(params, [:host, :unreachable_since, :favicon, :favicon_updated_at])
|> cast(params, [:host, :unreachable_since, :favicon, :nodeinfo, :metadata_updated_at])
|> validate_required([:host])
|> unique_constraint(:host)
end
@ -138,63 +141,138 @@ defp parse_datetime(datetime) when is_binary(datetime) do
defp parse_datetime(datetime), do: datetime
def get_or_update_favicon(%URI{host: host} = instance_uri) do
existing_record = Repo.get_by(Instance, %{host: host})
def needs_update(nil), do: true
def needs_update(%Instance{metadata_updated_at: nil}), do: true
def needs_update(%Instance{metadata_updated_at: metadata_updated_at}) do
now = NaiveDateTime.utc_now()
NaiveDateTime.diff(now, metadata_updated_at) > 86_400
end
if existing_record && existing_record.favicon_updated_at &&
NaiveDateTime.diff(now, existing_record.favicon_updated_at) < 86_400 do
existing_record.favicon
def local do
%Instance{
host: Pleroma.Web.Endpoint.host(),
favicon: Pleroma.Web.Endpoint.url() <> "/favicon.png",
nodeinfo: Pleroma.Web.Nodeinfo.NodeinfoController.raw_nodeinfo()
}
end
def update_metadata(%URI{host: host} = uri) do
Logger.debug("Checking metadata for #{host}")
existing_record = Repo.get_by(Instance, %{host: host})
if reachable?(host) do
do_update_metadata(uri, existing_record)
else
favicon = scrape_favicon(instance_uri)
{:discard, :unreachable}
end
end
defp do_update_metadata(%URI{host: host} = uri, existing_record) do
if existing_record do
if needs_update(existing_record) do
Logger.info("Updating metadata for #{host}")
favicon = scrape_favicon(uri)
nodeinfo = scrape_nodeinfo(uri)
if existing_record do
existing_record
|> changeset(%{favicon: favicon, favicon_updated_at: now})
|> changeset(%{
host: host,
favicon: favicon,
nodeinfo: nodeinfo,
metadata_updated_at: NaiveDateTime.utc_now()
})
|> Repo.update()
else
%Instance{}
|> changeset(%{host: host, favicon: favicon, favicon_updated_at: now})
|> Repo.insert()
{:discard, "Does not require update"}
end
else
favicon = scrape_favicon(uri)
nodeinfo = scrape_nodeinfo(uri)
favicon
Logger.info("Creating metadata for #{host}")
%Instance{}
|> changeset(%{
host: host,
favicon: favicon,
nodeinfo: nodeinfo,
metadata_updated_at: NaiveDateTime.utc_now()
})
|> Repo.insert()
end
rescue
e ->
Logger.warn("Instance.get_or_update_favicon(\"#{host}\") error: #{inspect(e)}")
end
def get_favicon(%URI{host: host}) do
existing_record = Repo.get_by(Instance, %{host: host})
if existing_record do
existing_record.favicon
else
nil
end
end
defp scrape_nodeinfo(%URI{} = instance_uri) do
with true <- Pleroma.Config.get([:instances_nodeinfo, :enabled]),
{_, true} <- {:reachable, reachable?(instance_uri.host)},
{:ok, %Tesla.Env{status: 200, body: body}} <-
Tesla.get(
"https://#{instance_uri.host}/.well-known/nodeinfo",
headers: [{"Accept", "application/json"}]
),
{:ok, json} <- Jason.decode(body),
{:ok, %{"links" => links}} <- {:ok, json},
{:ok, %{"href" => href}} <-
{:ok,
Enum.find(links, &(&1["rel"] == "http://nodeinfo.diaspora.software/ns/schema/2.0"))},
{:ok, %Tesla.Env{body: data}} <-
Pleroma.HTTP.get(href, [{"accept", "application/json"}], []),
{:length, true} <- {:length, String.length(data) < 50_000},
{:ok, nodeinfo} <- Jason.decode(data) do
nodeinfo
else
{:reachable, false} ->
Logger.debug(
"Instance.scrape_nodeinfo(\"#{to_string(instance_uri)}\") ignored unreachable host"
)
nil
{:length, false} ->
Logger.debug(
"Instance.scrape_nodeinfo(\"#{to_string(instance_uri)}\") ignored too long body"
)
nil
_ ->
nil
end
end
defp scrape_favicon(%URI{} = instance_uri) do
try do
with {_, true} <- {:reachable, reachable?(instance_uri.host)},
{:ok, %Tesla.Env{body: html}} <-
Pleroma.HTTP.get(to_string(instance_uri), [{"accept", "text/html"}], []),
{_, [favicon_rel | _]} when is_binary(favicon_rel) <-
{:parse,
html |> Floki.parse_document!() |> Floki.attribute("link[rel=icon]", "href")},
{_, favicon} when is_binary(favicon) <-
{:merge, URI.merge(instance_uri, favicon_rel) |> to_string()} do
favicon
else
{:reachable, false} ->
Logger.debug(
"Instance.scrape_favicon(\"#{to_string(instance_uri)}\") ignored unreachable host"
)
nil
_ ->
nil
end
rescue
e ->
Logger.warn(
"Instance.scrape_favicon(\"#{to_string(instance_uri)}\") error: #{inspect(e)}"
with true <- Pleroma.Config.get([:instances_favicons, :enabled]),
{_, true} <- {:reachable, reachable?(instance_uri.host)},
{:ok, %Tesla.Env{body: html}} <-
Pleroma.HTTP.get(to_string(instance_uri), [{"accept", "text/html"}], []),
{_, [favicon_rel | _]} when is_binary(favicon_rel) <-
{:parse, html |> Floki.parse_document!() |> Floki.attribute("link[rel=icon]", "href")},
{_, favicon} when is_binary(favicon) <-
{:merge, URI.merge(instance_uri, favicon_rel) |> to_string()},
{:length, true} <- {:length, String.length(favicon) < 255} do
favicon
else
{:reachable, false} ->
Logger.debug(
"Instance.scrape_favicon(\"#{to_string(instance_uri)}\") ignored unreachable host"
)
nil
_ ->
nil
end
end
@ -217,4 +295,25 @@ def perform(:delete_instance, host) when is_binary(host) do
end)
|> Stream.run()
end
def get_by_url(url_or_host) do
url = host(url_or_host)
Repo.get_by(Instance, host: url)
end
def get_cached_by_url(url_or_host) do
url = host(url_or_host)
if url == Pleroma.Web.Endpoint.host() do
{:ok, local()}
else
@cachex.fetch!(:instances_cache, "instances:#{url}", fn _ ->
with %Instance{} = instance <- get_by_url(url) do
{:commit, {:ok, instance}}
else
_ -> {:ignore, nil}
end
end)
end
end
end

View file

@ -4,6 +4,7 @@
defmodule Pleroma.Object.Fetcher do
alias Pleroma.HTTP
alias Pleroma.Instances
alias Pleroma.Maps
alias Pleroma.Object
alias Pleroma.Object.Containment
@ -234,6 +235,10 @@ def fetch_and_contain_remote_object_from_id(id) when is_binary(id) do
{:ok, body} <- get_object(id),
{:ok, data} <- safe_json_decode(body),
:ok <- Containment.contain_origin_from_id(id, data) do
unless Instances.reachable?(id) do
Instances.set_reachable(id)
end
{:ok, data}
else
{:scheme, _} ->

View file

@ -1910,7 +1910,8 @@ def get_or_fetch_by_ap_id(ap_id) do
{%User{} = user, _} ->
{:ok, user}
_ ->
e ->
Logger.error("Could not fetch user, #{inspect(e)}")
{:error, :not_found}
end
end

View file

@ -67,8 +67,9 @@ def create(relationship_type, %User{} = source, %User{} = target) do
target_id: target.id
})
|> Repo.insert(
on_conflict: {:replace_all_except, [:id]},
conflict_target: [:source_id, :relationship_type, :target_id]
on_conflict: {:replace_all_except, [:id, :inserted_at]},
conflict_target: [:source_id, :relationship_type, :target_id],
returning: true
)
end

View file

@ -1530,6 +1530,18 @@ defp object_to_user_data(data, additional) do
# we request WebFinger here
nickname = additional[:nickname_from_acct] || generate_nickname(data)
# also_known_as must be a URL
also_known_as =
data
|> Map.get("alsoKnownAs", [])
|> Enum.filter(fn url ->
case URI.parse(url) do
%URI{scheme: "http"} -> true
%URI{scheme: "https"} -> true
_ -> false
end
end)
%{
ap_id: data["id"],
uri: get_actor_url(data["url"]),
@ -1547,7 +1559,7 @@ defp object_to_user_data(data, additional) do
featured_address: featured_address,
bio: data["summary"] || "",
actor_type: actor_type,
also_known_as: Map.get(data, "alsoKnownAs", []),
also_known_as: also_known_as,
public_key: public_key,
inbox: data["inbox"],
shared_inbox: shared_inbox,

View file

@ -149,9 +149,20 @@ defp get_policies(policy) when is_atom(policy), do: [policy]
defp get_policies(policies) when is_list(policies), do: policies
defp get_policies(_), do: []
# Matches the following:
# - https://baddomain.net
# - https://extra.baddomain.net/
# Does NOT match the following:
# - https://maybebaddomain.net/
def subdomain_regex("*." <> domain), do: subdomain_regex(domain)
def subdomain_regex(domain) do
~r/^(.+\.)?#{Regex.escape(domain)}$/i
end
@spec subdomains_regex([String.t()]) :: [Regex.t()]
def subdomains_regex(domains) when is_list(domains) do
for domain <- domains, do: ~r(^#{String.replace(domain, "*.", "(.*\\.)*")}$)i
Enum.map(domains, &subdomain_regex/1)
end
@spec subdomain_match?([Regex.t()], String.t()) :: boolean()

View file

@ -192,6 +192,7 @@ def handle(%{data: %{"type" => "Like"}} = object, meta) do
# - Increase the user note count
# - Increase the reply count
# - Increase replies count
# - Ask for scraping of nodeinfo
# - Set up ActivityExpiration
# - Set up notifications
# - Index incoming posts for search (if needed)
@ -209,6 +210,10 @@ def handle(%{data: %{"type" => "Create"}} = activity, meta) do
reply_depth = (meta[:depth] || 0) + 1
Pleroma.Workers.NodeInfoFetcherWorker.enqueue("process", %{
"source_url" => activity.data["actor"]
})
# FIXME: Force inReplyTo to replies
if Pleroma.Web.Federator.allowed_thread_distance?(reply_depth) and
object.data["replies"] != nil do
@ -234,7 +239,9 @@ def handle(%{data: %{"type" => "Create"}} = activity, meta) do
{:ok, activity, meta}
else
e -> Repo.rollback(e)
e ->
Logger.error(inspect(e))
Repo.rollback(e)
end
end

View file

@ -21,10 +21,12 @@ defmodule Pleroma.Web.AkkomaAPI.TranslationController do
@doc "GET /api/v1/akkoma/translation/languages"
def languages(conn, _params) do
with {:ok, source_languages, dest_languages} <- get_languages() do
with {:enabled, true} <- {:enabled, Pleroma.Config.get([:translator, :enabled])},
{:ok, source_languages, dest_languages} <- get_languages() do
conn
|> json(%{source: source_languages, target: dest_languages})
else
{:enabled, false} -> json(conn, %{})
e -> IO.inspect(e)
end
end

View file

@ -102,7 +102,7 @@ defp register_user(connection, base, uid, name) do
{:scope, :eldap.wholeSubtree()},
{:timeout, @search_timeout}
]) do
{:ok, {:eldap_search_result, [{:eldap_entry, _, attributes}], _}} ->
{:ok, {:eldap_search_result, [{:eldap_entry, _, attributes}], _, _}} ->
params = %{
name: name,
nickname: name,

View file

@ -48,7 +48,9 @@ def publish(%{id: "pleroma:fakeid"} = activity) do
@impl true
def publish(%{data: %{"object" => object}} = activity) when is_binary(object) do
PublisherWorker.enqueue("publish", %{"activity_id" => activity.id, "object_data" => nil})
PublisherWorker.enqueue("publish", %{"activity_id" => activity.id, "object_data" => nil},
priority: publish_priority(activity)
)
end
@impl true
@ -63,7 +65,7 @@ def publish(%{data: %{"object" => object}} = activity) when is_map(object) or is
)
end
defp publish_priority(%{type: "Delete"}), do: 3
defp publish_priority(%{data: %{"type" => "Delete"}}), do: 3
defp publish_priority(_), do: 0
# Job Worker Callbacks

View file

@ -94,12 +94,12 @@ def render(
followed_by =
if following_relationships do
case FollowingRelationship.find(following_relationships, target, reading_user) do
%{state: :follow_accept} -> true
_ -> false
end
target_to_user_following_relation =
FollowingRelationship.find(following_relationships, target, reading_user)
User.get_follow_state(target, reading_user, target_to_user_following_relation)
else
User.following?(target, reading_user)
User.get_follow_state(target, reading_user)
end
subscribing =
@ -115,7 +115,7 @@ def render(
%{
id: to_string(target.id),
following: follow_state == :follow_accept,
followed_by: followed_by,
followed_by: followed_by == :follow_accept,
blocking:
UserRelationship.exists?(
user_relationships,
@ -151,6 +151,7 @@ def render(
subscribing: subscribing,
notifying: subscribing,
requested: follow_state == :follow_pending,
requested_by: followed_by == :follow_pending,
domain_blocking: User.blocks_domain?(reading_user, target),
showing_reblogs:
not UserRelationship.exists?(
@ -186,6 +187,16 @@ def render("relationships.json", %{user: user, targets: targets} = opts) do
render_many(targets, AccountView, "relationship.json", render_opts)
end
def render("instance.json", %{instance: %Pleroma.Instances.Instance{} = instance}) do
%{
name: instance.host,
favicon: instance.favicon |> MediaProxy.url(),
nodeinfo: instance.nodeinfo
}
end
def render("instance.json", _), do: nil
defp do_render("show.json", %{user: user} = opts) do
user = User.sanitize_html(user, User.html_filter_policy(opts[:for]))
display_name = user.name || user.nickname
@ -230,16 +241,20 @@ defp do_render("show.json", %{user: user} = opts) do
%{}
end
favicon =
if Pleroma.Config.get([:instances_favicons, :enabled]) do
user
|> Map.get(:ap_id, "")
|> URI.parse()
|> URI.merge("/")
|> Pleroma.Instances.Instance.get_or_update_favicon()
|> MediaProxy.url()
instance =
with {:ok, instance} <- Pleroma.Instances.Instance.get_cached_by_url(user.ap_id) do
instance
else
_ ->
nil
end
favicon =
if is_nil(instance) do
nil
else
instance.favicon
|> MediaProxy.url()
end
%{
@ -271,7 +286,9 @@ defp do_render("show.json", %{user: user} = opts) do
}
},
last_status_at: user.last_status_at,
akkoma: %{
instance: render("instance.json", %{instance: instance})
},
# Pleroma extensions
# Note: it's insecure to output :email but fully-qualified nickname may serve as safe stub
fqn: User.full_nickname(user),

View file

@ -68,7 +68,7 @@ defp options_and_votes_count(options) do
end)
end
defp voters_count(%{data: %{"voters" => [_ | _] = voters}}) do
defp voters_count(%{data: %{"voters" => voters}}) when is_list(voters) do
length(voters)
end

View file

@ -209,212 +209,214 @@ def render(
end
def render("show.json", %{activity: %{data: %{"object" => _object}} = activity} = opts) do
object = Object.normalize(activity, fetch: false)
with %Object{} = object <- Object.normalize(activity, fetch: false) do
user = CommonAPI.get_user(activity.data["actor"])
user_follower_address = user.follower_address
user = CommonAPI.get_user(activity.data["actor"])
user_follower_address = user.follower_address
like_count = object.data["like_count"] || 0
announcement_count = object.data["announcement_count"] || 0
like_count = object.data["like_count"] || 0
announcement_count = object.data["announcement_count"] || 0
hashtags = Object.hashtags(object)
sensitive = object.data["sensitive"] || Enum.member?(hashtags, "nsfw")
hashtags = Object.hashtags(object)
sensitive = object.data["sensitive"] || Enum.member?(hashtags, "nsfw")
tags = Object.tags(object)
tags = Object.tags(object)
tag_mentions =
tags
|> Enum.filter(fn tag -> is_map(tag) and tag["type"] == "Mention" end)
|> Enum.map(fn tag -> tag["href"] end)
tag_mentions =
tags
|> Enum.filter(fn tag -> is_map(tag) and tag["type"] == "Mention" end)
|> Enum.map(fn tag -> tag["href"] end)
mentions =
(object.data["to"] ++ tag_mentions)
|> Enum.uniq()
|> Enum.map(fn
Pleroma.Constants.as_public() -> nil
^user_follower_address -> nil
ap_id -> User.get_cached_by_ap_id(ap_id)
end)
|> Enum.filter(& &1)
|> Enum.map(fn user -> AccountView.render("mention.json", %{user: user}) end)
mentions =
(object.data["to"] ++ tag_mentions)
|> Enum.uniq()
|> Enum.map(fn
Pleroma.Constants.as_public() -> nil
^user_follower_address -> nil
ap_id -> User.get_cached_by_ap_id(ap_id)
end)
|> Enum.filter(& &1)
|> Enum.map(fn user -> AccountView.render("mention.json", %{user: user}) end)
favorited = opts[:for] && opts[:for].ap_id in (object.data["likes"] || [])
favorited = opts[:for] && opts[:for].ap_id in (object.data["likes"] || [])
bookmarked = Activity.get_bookmark(activity, opts[:for]) != nil
bookmarked = Activity.get_bookmark(activity, opts[:for]) != nil
client_posted_this_activity = opts[:for] && user.id == opts[:for].id
client_posted_this_activity = opts[:for] && user.id == opts[:for].id
expires_at =
with true <- client_posted_this_activity,
%Oban.Job{scheduled_at: scheduled_at} <-
Pleroma.Workers.PurgeExpiredActivity.get_expiration(activity.id) do
scheduled_at
else
_ -> nil
end
expires_at =
with true <- client_posted_this_activity,
%Oban.Job{scheduled_at: scheduled_at} <-
Pleroma.Workers.PurgeExpiredActivity.get_expiration(activity.id) do
scheduled_at
else
_ -> nil
end
thread_muted? =
cond do
is_nil(opts[:for]) -> false
is_boolean(activity.thread_muted?) -> activity.thread_muted?
true -> CommonAPI.thread_muted?(opts[:for], activity)
end
thread_muted? =
cond do
is_nil(opts[:for]) -> false
is_boolean(activity.thread_muted?) -> activity.thread_muted?
true -> CommonAPI.thread_muted?(opts[:for], activity)
end
attachment_data = object.data["attachment"] || []
attachments = render_many(attachment_data, StatusView, "attachment.json", as: :attachment)
attachment_data = object.data["attachment"] || []
attachments = render_many(attachment_data, StatusView, "attachment.json", as: :attachment)
created_at = Utils.to_masto_date(object.data["published"])
created_at = Utils.to_masto_date(object.data["published"])
edited_at =
with %{"updated" => updated} <- object.data,
date <- Utils.to_masto_date(updated),
true <- date != "" do
date
else
_ ->
nil
end
edited_at =
with %{"updated" => updated} <- object.data,
date <- Utils.to_masto_date(updated),
true <- date != "" do
date
else
_ ->
nil
end
reply_to = get_reply_to(activity, opts)
reply_to = get_reply_to(activity, opts)
reply_to_user = reply_to && CommonAPI.get_user(reply_to.data["actor"])
reply_to_user = reply_to && CommonAPI.get_user(reply_to.data["actor"])
history_len =
1 +
(Object.Updater.history_for(object.data)
|> Map.get("orderedItems")
|> length())
history_len =
1 +
(Object.Updater.history_for(object.data)
|> Map.get("orderedItems")
|> length())
# See render("history.json", ...) for more details
# Here the implicit index of the current content is 0
chrono_order = history_len - 1
# See render("history.json", ...) for more details
# Here the implicit index of the current content is 0
chrono_order = history_len - 1
content =
object
|> render_content()
content =
object
|> render_content()
content_html =
content
|> Activity.HTML.get_cached_scrubbed_html_for_activity(
User.html_filter_policy(opts[:for]),
activity,
"mastoapi:content:#{chrono_order}"
)
content_plaintext =
content
|> Activity.HTML.get_cached_stripped_html_for_activity(
activity,
"mastoapi:content:#{chrono_order}"
)
summary = object.data["summary"] || ""
card = render("card.json", Pleroma.Web.RichMedia.Helpers.fetch_data_for_activity(activity))
url =
if user.local do
Pleroma.Web.Router.Helpers.o_status_url(Pleroma.Web.Endpoint, :notice, activity)
else
object.data["url"] || object.data["external_url"] || object.data["id"]
end
direct_conversation_id =
with {_, nil} <- {:direct_conversation_id, opts[:direct_conversation_id]},
{_, true} <- {:include_id, opts[:with_direct_conversation_id]},
{_, %User{} = for_user} <- {:for_user, opts[:for]} do
Activity.direct_conversation_id(activity, for_user)
else
{:direct_conversation_id, participation_id} when is_integer(participation_id) ->
participation_id
_e ->
nil
end
emoji_reactions =
object.data
|> Map.get("reactions", [])
|> EmojiReactionController.filter_allowed_users(
opts[:for],
Map.get(opts, :with_muted, false)
)
|> Stream.map(fn {emoji, users, url} ->
build_emoji_map(emoji, users, url, opts[:for])
end)
|> Enum.to_list()
# Status muted state (would do 1 request per status unless user mutes are preloaded)
muted =
thread_muted? ||
UserRelationship.exists?(
get_in(opts, [:relationships, :user_relationships]),
:mute,
opts[:for],
user,
fn for_user, user -> User.mutes?(for_user, user) end
content_html =
content
|> Activity.HTML.get_cached_scrubbed_html_for_activity(
User.html_filter_policy(opts[:for]),
activity,
"mastoapi:content:#{chrono_order}"
)
{pinned?, pinned_at} = pin_data(object, user)
content_plaintext =
content
|> Activity.HTML.get_cached_stripped_html_for_activity(
activity,
"mastoapi:content:#{chrono_order}"
)
quote = Activity.get_quoted_activity_from_object(object)
summary = object.data["summary"] || ""
%{
id: to_string(activity.id),
uri: object.data["id"],
url: url,
account:
AccountView.render("show.json", %{
user: user,
for: opts[:for]
}),
in_reply_to_id: reply_to && to_string(reply_to.id),
in_reply_to_account_id: reply_to_user && to_string(reply_to_user.id),
reblog: nil,
card: card,
content: content_html,
text: opts[:with_source] && get_source_text(object.data["source"]),
created_at: created_at,
edited_at: edited_at,
reblogs_count: announcement_count,
replies_count: object.data["repliesCount"] || 0,
favourites_count: like_count,
reblogged: reblogged?(activity, opts[:for]),
favourited: present?(favorited),
bookmarked: present?(bookmarked),
muted: muted,
pinned: pinned?,
sensitive: sensitive,
spoiler_text: summary,
visibility: get_visibility(object),
media_attachments: attachments,
poll: render(PollView, "show.json", object: object, for: opts[:for]),
mentions: mentions,
tags: build_tags(tags),
application: build_application(object.data["generator"]),
language: nil,
emojis: build_emojis(object.data["emoji"]),
quote_id: if(quote, do: quote.id, else: nil),
quote: maybe_render_quote(quote, opts),
emoji_reactions: emoji_reactions,
pleroma: %{
local: activity.local,
conversation_id: get_context_id(activity),
context: object.data["context"],
in_reply_to_account_acct: reply_to_user && reply_to_user.nickname,
content: %{"text/plain" => content_plaintext},
spoiler_text: %{"text/plain" => summary},
expires_at: expires_at,
direct_conversation_id: direct_conversation_id,
thread_muted: thread_muted?,
card = render("card.json", Pleroma.Web.RichMedia.Helpers.fetch_data_for_activity(activity))
url =
if user.local do
Pleroma.Web.Router.Helpers.o_status_url(Pleroma.Web.Endpoint, :notice, activity)
else
object.data["url"] || object.data["external_url"] || object.data["id"]
end
direct_conversation_id =
with {_, nil} <- {:direct_conversation_id, opts[:direct_conversation_id]},
{_, true} <- {:include_id, opts[:with_direct_conversation_id]},
{_, %User{} = for_user} <- {:for_user, opts[:for]} do
Activity.direct_conversation_id(activity, for_user)
else
{:direct_conversation_id, participation_id} when is_integer(participation_id) ->
participation_id
_e ->
nil
end
emoji_reactions =
object.data
|> Map.get("reactions", [])
|> EmojiReactionController.filter_allowed_users(
opts[:for],
Map.get(opts, :with_muted, false)
)
|> Stream.map(fn {emoji, users, url} ->
build_emoji_map(emoji, users, url, opts[:for])
end)
|> Enum.to_list()
# Status muted state (would do 1 request per status unless user mutes are preloaded)
muted =
thread_muted? ||
UserRelationship.exists?(
get_in(opts, [:relationships, :user_relationships]),
:mute,
opts[:for],
user,
fn for_user, user -> User.mutes?(for_user, user) end
)
{pinned?, pinned_at} = pin_data(object, user)
quote = Activity.get_quoted_activity_from_object(object)
%{
id: to_string(activity.id),
uri: object.data["id"],
url: url,
account:
AccountView.render("show.json", %{
user: user,
for: opts[:for]
}),
in_reply_to_id: reply_to && to_string(reply_to.id),
in_reply_to_account_id: reply_to_user && to_string(reply_to_user.id),
reblog: nil,
card: card,
content: content_html,
text: opts[:with_source] && get_source_text(object.data["source"]),
created_at: created_at,
edited_at: edited_at,
reblogs_count: announcement_count,
replies_count: object.data["repliesCount"] || 0,
favourites_count: like_count,
reblogged: reblogged?(activity, opts[:for]),
favourited: present?(favorited),
bookmarked: present?(bookmarked),
muted: muted,
pinned: pinned?,
sensitive: sensitive,
spoiler_text: summary,
visibility: get_visibility(object),
media_attachments: attachments,
poll: render(PollView, "show.json", object: object, for: opts[:for]),
mentions: mentions,
tags: build_tags(tags),
application: build_application(object.data["generator"]),
language: nil,
emojis: build_emojis(object.data["emoji"]),
quote_id: if(quote, do: quote.id, else: nil),
quote: maybe_render_quote(quote, opts),
emoji_reactions: emoji_reactions,
parent_visible: visible_for_user?(reply_to, opts[:for]),
pinned_at: pinned_at
},
akkoma: %{
source: object.data["source"]
pleroma: %{
local: activity.local,
conversation_id: get_context_id(activity),
context: object.data["context"],
in_reply_to_account_acct: reply_to_user && reply_to_user.nickname,
content: %{"text/plain" => content_plaintext},
spoiler_text: %{"text/plain" => summary},
expires_at: expires_at,
direct_conversation_id: direct_conversation_id,
thread_muted: thread_muted?,
emoji_reactions: emoji_reactions,
parent_visible: visible_for_user?(reply_to, opts[:for]),
pinned_at: pinned_at
},
akkoma: %{
source: object.data["source"]
}
}
}
else
nil -> nil
end
end
def render("show.json", _) do

View file

@ -42,7 +42,7 @@ def headers do
custom_http_frontend_headers = custom_http_frontend_headers()
headers = [
{"x-xss-protection", "1; mode=block"},
{"x-xss-protection", "0"},
{"x-permitted-cross-domain-policies", "none"},
{"x-frame-options", "DENY"},
{"x-content-type-options", "nosniff"},
@ -68,7 +68,7 @@ def headers do
]
}
[{"reply-to", Jason.encode!(report_group)} | headers]
[{"report-to", Jason.encode!(report_group)} | headers]
else
headers
end
@ -104,13 +104,12 @@ defp csp_string do
{[img_src, " https:"], [media_src, " https:"]}
end
connect_src = ["connect-src 'self' blob: ", static_url, ?\s, websocket_url]
connect_src =
if Config.get(:env) == :dev do
[connect_src, " http://localhost:3035/"]
if Config.get([:media_proxy, :enabled]) do
sources = build_csp_multimedia_source_list()
["connect-src 'self' blob: ", static_url, ?\s, websocket_url, ?\s, sources]
else
connect_src
["connect-src 'self' blob: ", static_url, ?\s, websocket_url]
end
script_src =

View file

@ -35,7 +35,7 @@ def call(%{request_path: <<"/", @path, "/", file::binary>>} = conn, opts) do
conn =
case fetch_query_params(conn) do
%{query_params: %{"name" => name}} = conn ->
name = String.replace(name, "\"", "\\\"")
name = escape_header_value(name)
put_resp_header(conn, "content-disposition", "filename=\"#{name}\"")
@ -98,4 +98,11 @@ defp get_media(conn, unknown, _) do
|> send_resp(:internal_server_error, dgettext("errors", "Internal Error"))
|> halt()
end
defp escape_header_value(value) do
value
|> String.replace("\"", "\\\"")
|> String.replace("\\r", "")
|> String.replace("\\n", "")
end
end

View file

@ -16,7 +16,7 @@ defmodule Pleroma.Web.Push.Impl do
require Logger
import Ecto.Query
@types ["Create", "Follow", "Announce", "Like", "Move", "EmojiReact"]
@types ["Create", "Follow", "Announce", "Like", "Move", "EmojiReact", "Update"]
@doc "Performs sending notifications for user subscriptions"
@spec perform(Notification.t()) :: list(any) | :error | {:error, :unknown_type}
@ -167,6 +167,15 @@ def format_body(
end
end
def format_body(
%{activity: %{data: %{"type" => "Update"}}},
actor,
_object,
_mastodon_type
) do
"@#{actor.nickname} edited a status"
end
def format_title(activity, mastodon_type \\ nil)
def format_title(%{activity: %{data: %{"directMessage" => true}}}, _mastodon_type) do
@ -180,6 +189,7 @@ def format_title(%{type: type}, mastodon_type) do
"follow_request" -> "New Follow Request"
"reblog" -> "New Repeat"
"favourite" -> "New Favorite"
"update" -> "New Update"
"pleroma:emoji_reaction" -> "New Reaction"
type -> "New #{String.capitalize(type || "event")}"
end

View file

@ -14,6 +14,11 @@ def process(backup, admin_user_id \\ nil) do
|> Oban.insert()
end
@impl Oban.Worker
def timeout(_job) do
Pleroma.Config.get([:workers, :timeout, :backup], :timer.minutes(1))
end
def schedule_deletion(backup) do
days = Pleroma.Config.get([Backup, :purge_after_days])
time = 60 * 60 * 24 * days
@ -30,6 +35,7 @@ def delete(backup) do
|> Oban.insert()
end
@impl true
def perform(%Job{
args: %{"op" => "process", "backup_id" => backup_id, "admin_user_id" => admin_user_id}
}) do

View file

@ -0,0 +1,18 @@
defmodule Pleroma.Workers.NodeInfoFetcherWorker do
use Pleroma.Workers.WorkerHelper, queue: "nodeinfo_fetcher"
alias Oban.Job
alias Pleroma.Instances.Instance
@impl Oban.Worker
def perform(%Job{
args: %{"op" => "process", "source_url" => domain}
}) do
uri =
domain
|> URI.parse()
|> URI.merge("/")
Instance.update_metadata(uri)
end
end

View file

@ -27,6 +27,11 @@ def enqueue(args) do
end
end
@impl Oban.Worker
def timeout(_job) do
Pleroma.Config.get([:workers, :timeout, :activity_expiration], :timer.minutes(1))
end
@impl true
def perform(%Oban.Job{args: %{"activity_id" => id}}) do
with %Activity{} = activity <- find_activity(id),

View file

@ -24,6 +24,11 @@ def enqueue(args) do
|> Oban.insert()
end
@impl Oban.Worker
def timeout(_job) do
Pleroma.Config.get([:workers, :timeout, :filter_expiration], :timer.minutes(1))
end
@impl true
def perform(%Job{args: %{"filter_id" => id}}) do
Pleroma.Filter

View file

@ -19,6 +19,11 @@ def enqueue(args) do
|> Oban.insert()
end
@impl Oban.Worker
def timeout(_job) do
Pleroma.Config.get([:workers, :timeout, :token_expiration], :timer.minutes(1))
end
@impl true
def perform(%Oban.Job{args: %{"token_id" => id, "mod" => module}}) do
module

View file

@ -43,6 +43,12 @@ def enqueue(op, params, worker_args \\ []) do
|> apply(:new, [params, worker_args])
|> Oban.insert()
end
@impl Oban.Worker
def timeout(_job) do
queue_atom = String.to_atom(unquote(queue))
Config.get([:workers, :timeout, queue_atom], :timer.minutes(1))
end
end
end
end

View file

@ -4,7 +4,7 @@ defmodule Pleroma.Mixfile do
def project do
[
app: :pleroma,
version: version("3.3.1"),
version: version("3.4.0"),
elixir: "~> 1.12",
elixirc_paths: elixirc_paths(Mix.env()),
compilers: [:phoenix, :gettext] ++ Mix.compilers(),
@ -139,7 +139,7 @@ defp deps do
{:castore, "~> 0.1"},
{:cowlib, "~> 2.9", override: true},
{:gun, "~> 2.0.0-rc.1", override: true},
{:finch, "~> 0.10.0"},
{:finch, "~> 0.13.0"},
{:jason, "~> 1.2"},
{:mogrify, "~> 0.9.1"},
{:ex_aws, "~> 2.1.6"},

View file

@ -7,7 +7,7 @@
"cachex": {:hex, :cachex, "3.4.0", "868b2959ea4aeb328c6b60ff66c8d5123c083466ad3c33d3d8b5f142e13101fb", [:mix], [{:eternal, "~> 1.2", [hex: :eternal, repo: "hexpm", optional: false]}, {:jumper, "~> 1.0", [hex: :jumper, repo: "hexpm", optional: false]}, {:sleeplocks, "~> 1.1", [hex: :sleeplocks, repo: "hexpm", optional: false]}, {:unsafe, "~> 1.0", [hex: :unsafe, repo: "hexpm", optional: false]}], "hexpm", "370123b1ab4fba4d2965fb18f87fd758325709787c8c5fce35b3fe80645ccbe5"},
"calendar": {:hex, :calendar, "1.0.0", "f52073a708528482ec33d0a171954ca610fe2bd28f1e871f247dc7f1565fa807", [:mix], [{:tzdata, "~> 0.5.20 or ~> 0.1.201603 or ~> 1.0", [hex: :tzdata, repo: "hexpm", optional: false]}], "hexpm", "990e9581920c82912a5ee50e62ff5ef96da6b15949a2ee4734f935fdef0f0a6f"},
"captcha": {:git, "https://git.pleroma.social/pleroma/elixir-libraries/elixir-captcha.git", "e0f16822d578866e186a0974d65ad58cddc1e2ab", [ref: "e0f16822d578866e186a0974d65ad58cddc1e2ab"]},
"castore": {:hex, :castore, "0.1.17", "ba672681de4e51ed8ec1f74ed624d104c0db72742ea1a5e74edbc770c815182f", [:mix], [], "hexpm", "d9844227ed52d26e7519224525cb6868650c272d4a3d327ce3ca5570c12163f9"},
"castore": {:hex, :castore, "0.1.18", "deb5b9ab02400561b6f5708f3e7660fc35ca2d51bfc6a940d2f513f89c2975fc", [:mix], [], "hexpm", "61bbaf6452b782ef80b33cdb45701afbcf0a918a45ebe7e73f1130d661e66a06"},
"certifi": {:hex, :certifi, "2.9.0", "6f2a475689dd47f19fb74334859d460a2dc4e3252a3324bd2111b8f0429e7e21", [:rebar3], [], "hexpm", "266da46bdb06d6c6d35fde799bcb28d36d985d424ad7c08b5bb48f5b5cdd4641"},
"combine": {:hex, :combine, "0.10.0", "eff8224eeb56498a2af13011d142c5e7997a80c8f5b97c499f84c841032e429f", [:mix], [], "hexpm", "1b1dbc1790073076580d0d1d64e42eae2366583e7aecd455d1215b0d16f2451b"},
"comeonin": {:hex, :comeonin, "5.3.3", "2c564dac95a35650e9b6acfe6d2952083d8a08e4a89b93a481acb552b325892e", [:mix], [], "hexpm", "3e38c9c2cb080828116597ca8807bb482618a315bfafd98c90bc22a821cc84df"},
@ -43,14 +43,14 @@
"fast_html": {:hex, :fast_html, "2.0.5", "c61760340606c1077ff1f196f17834056cb1dd3d5cb92a9f2cabf28bc6221c3c", [:make, :mix], [{:elixir_make, "~> 0.4", [hex: :elixir_make, repo: "hexpm", optional: false]}, {:nimble_pool, "~> 0.2.0", [hex: :nimble_pool, repo: "hexpm", optional: false]}], "hexpm", "605f4f4829443c14127694ebabb681778712ceecb4470ec32aa31012330e6506"},
"fast_sanitize": {:hex, :fast_sanitize, "0.2.3", "67b93dfb34e302bef49fec3aaab74951e0f0602fd9fa99085987af05bd91c7a5", [:mix], [{:fast_html, "~> 2.0", [hex: :fast_html, repo: "hexpm", optional: false]}, {:plug, "~> 1.8", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm", "e8ad286d10d0386e15d67d0ee125245ebcfbc7d7290b08712ba9013c8c5e56e2"},
"file_system": {:hex, :file_system, "0.2.10", "fb082005a9cd1711c05b5248710f8826b02d7d1784e7c3451f9c1231d4fc162d", [:mix], [], "hexpm", "41195edbfb562a593726eda3b3e8b103a309b733ad25f3d642ba49696bf715dc"},
"finch": {:hex, :finch, "0.10.2", "9ad27d68270d879f73f26604bb2e573d40f29bf0e907064a9a337f90a16a0312", [:mix], [{:castore, "~> 0.1", [hex: :castore, repo: "hexpm", optional: false]}, {:mint, "~> 1.3", [hex: :mint, repo: "hexpm", optional: false]}, {:nimble_options, "~> 0.4.0", [hex: :nimble_options, repo: "hexpm", optional: false]}, {:nimble_pool, "~> 0.2", [hex: :nimble_pool, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "dd8b11b282072cec2ef30852283949c248bd5d2820c88d8acc89402b81db7550"},
"finch": {:hex, :finch, "0.13.0", "c881e5460ec563bf02d4f4584079e62201db676ed4c0ef3e59189331c4eddf7b", [:mix], [{:castore, "~> 0.1", [hex: :castore, repo: "hexpm", optional: false]}, {:mime, "~> 1.0 or ~> 2.0", [hex: :mime, repo: "hexpm", optional: false]}, {:mint, "~> 1.3", [hex: :mint, repo: "hexpm", optional: false]}, {:nimble_options, "~> 0.4.0", [hex: :nimble_options, repo: "hexpm", optional: false]}, {:nimble_pool, "~> 0.2.6", [hex: :nimble_pool, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "49957dcde10dcdc042a123a507a9c5ec5a803f53646d451db2f7dea696fba6cc"},
"flake_id": {:hex, :flake_id, "0.1.0", "7716b086d2e405d09b647121a166498a0d93d1a623bead243e1f74216079ccb3", [:mix], [{:base62, "~> 1.2", [hex: :base62, repo: "hexpm", optional: false]}, {:ecto, ">= 2.0.0", [hex: :ecto, repo: "hexpm", optional: true]}], "hexpm", "31fc8090fde1acd267c07c36ea7365b8604055f897d3a53dd967658c691bd827"},
"floki": {:hex, :floki, "0.33.1", "f20f1eb471e726342b45ccb68edb9486729e7df94da403936ea94a794f072781", [:mix], [{:html_entities, "~> 0.5.0", [hex: :html_entities, repo: "hexpm", optional: false]}], "hexpm", "461035fd125f13fdf30f243c85a0b1e50afbec876cbf1ceefe6fddd2e6d712c6"},
"gen_smtp": {:hex, :gen_smtp, "0.15.0", "9f51960c17769b26833b50df0b96123605a8024738b62db747fece14eb2fbfcc", [:rebar3], [], "hexpm", "29bd14a88030980849c7ed2447b8db6d6c9278a28b11a44cafe41b791205440f"},
"gettext": {:git, "https://github.com/tusooa/gettext.git", "72fb2496b6c5280ed911bdc3756890e7f38a4808", [ref: "72fb2496b6c5280ed911bdc3756890e7f38a4808"]},
"gun": {:hex, :gun, "2.0.0-rc.2", "7c489a32dedccb77b6e82d1f3c5a7dadfbfa004ec14e322cdb5e579c438632d2", [:make, :rebar3], [{:cowlib, "2.11.0", [hex: :cowlib, repo: "hexpm", optional: false]}], "hexpm", "6b9d1eae146410d727140dbf8b404b9631302ecc2066d1d12f22097ad7d254fc"},
"hackney": {:hex, :hackney, "1.18.1", "f48bf88f521f2a229fc7bae88cf4f85adc9cd9bcf23b5dc8eb6a1788c662c4f6", [:rebar3], [{:certifi, "~>2.9.0", [hex: :certifi, repo: "hexpm", optional: false]}, {:idna, "~>6.1.0", [hex: :idna, repo: "hexpm", optional: false]}, {:metrics, "~>1.0.0", [hex: :metrics, repo: "hexpm", optional: false]}, {:mimerl, "~>1.1", [hex: :mimerl, repo: "hexpm", optional: false]}, {:parse_trans, "3.3.1", [hex: :parse_trans, repo: "hexpm", optional: false]}, {:ssl_verify_fun, "~>1.1.0", [hex: :ssl_verify_fun, repo: "hexpm", optional: false]}, {:unicode_util_compat, "~>0.7.0", [hex: :unicode_util_compat, repo: "hexpm", optional: false]}], "hexpm", "a4ecdaff44297e9b5894ae499e9a070ea1888c84afdd1fd9b7b2bc384950128e"},
"hpax": {:hex, :hpax, "0.1.1", "2396c313683ada39e98c20a75a82911592b47e5c24391363343bde74f82396ca", [:mix], [], "hexpm", "0ae7d5a0b04a8a60caf7a39fcf3ec476f35cc2cc16c05abea730d3ce6ac6c826"},
"hpax": {:hex, :hpax, "0.1.2", "09a75600d9d8bbd064cdd741f21fc06fc1f4cf3d0fcc335e5aa19be1a7235c84", [:mix], [], "hexpm", "2c87843d5a23f5f16748ebe77969880e29809580efdaccd615cd3bed628a8c13"},
"html_entities": {:hex, :html_entities, "0.5.2", "9e47e70598da7de2a9ff6af8758399251db6dbb7eebe2b013f2bbd2515895c3c", [:mix], [], "hexpm", "c53ba390403485615623b9531e97696f076ed415e8d8058b1dbaa28181f4fdcc"},
"http_signatures": {:hex, :http_signatures, "0.1.1", "ca7ebc1b61542b163644c8c3b1f0e0f41037d35f2395940d3c6c7deceab41fd8", [:mix], [], "hexpm", "cc3b8a007322cc7b624c0c15eec49ee58ac977254ff529a3c482f681465942a3"},
"httpoison": {:hex, :httpoison, "1.8.1", "df030d96de89dad2e9983f92b0c506a642d4b1f4a819c96ff77d12796189c63e", [:mix], [{:hackney, "~> 1.17", [hex: :hackney, repo: "hexpm", optional: false]}], "hexpm", "35156a6d678d6d516b9229e208942c405cf21232edd632327ecfaf4fd03e79e0"},

View file

@ -3,16 +3,16 @@ msgstr ""
"Project-Id-Version: PACKAGE VERSION\n"
"Report-Msgid-Bugs-To: \n"
"POT-Creation-Date: 2020-09-09 09:49+0000\n"
"PO-Revision-Date: 2020-09-11 21:26+0000\n"
"Last-Translator: tarteka <info@tarteka.net>\n"
"Language-Team: Spanish <https://translate.pleroma.social/projects/pleroma/"
"pleroma/es/>\n"
"PO-Revision-Date: 2022-08-19 09:25+0000\n"
"Last-Translator: mint <they@mint.lgbt>\n"
"Language-Team: Spanish <http://translate.akkoma.dev/projects/akkoma/"
"akkoma-backend-errors/es/>\n"
"Language: es\n"
"MIME-Version: 1.0\n"
"Content-Type: text/plain; charset=UTF-8\n"
"Content-Transfer-Encoding: 8bit\n"
"Plural-Forms: nplurals=2; plural=n != 1;\n"
"X-Generator: Weblate 4.0.4\n"
"X-Generator: Weblate 4.13.1\n"
## This file is a PO Template file.
##
@ -66,8 +66,8 @@ msgstr[1] "debe tener %{count} caracteres"
msgid "should have %{count} item(s)"
msgid_plural "should have %{count} item(s)"
msgstr[0] ""
msgstr[1] ""
msgstr[0] "debería tener %{count} item"
msgstr[1] "debería tener %{count} items"
msgid "should be at least %{count} character(s)"
msgid_plural "should be at least %{count} character(s)"

View file

@ -0,0 +1,163 @@
msgid ""
msgstr ""
"Project-Id-Version: PACKAGE VERSION\n"
"Report-Msgid-Bugs-To: \n"
"POT-Creation-Date: 2022-08-16 10:49+0000\n"
"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n"
"Last-Translator: Automatically generated\n"
"Language-Team: none\n"
"Language: nl\n"
"MIME-Version: 1.0\n"
"Content-Type: text/plain; charset=UTF-8\n"
"Content-Transfer-Encoding: 8bit\n"
"X-Generator: Translate Toolkit 3.7.1\n"
## This file is a PO Template file.
##
## `msgid`s here are often extracted from source code.
## Add new translations manually only if they're dynamic
## translations that can't be statically extracted.
##
## Run `mix gettext.extract` to bring this file up to
## date. Leave `msgstr`s empty as changing them here as no
## effect: edit them in PO (`.po`) files instead.
msgid "eperm"
msgstr ""
msgid "eacces"
msgstr ""
msgid "eagain"
msgstr ""
msgid "ebadf"
msgstr ""
msgid "ebadmsg"
msgstr ""
msgid "ebusy"
msgstr ""
msgid "edeadlk"
msgstr ""
msgid "edeadlock"
msgstr ""
msgid "edquot"
msgstr ""
msgid "eexist"
msgstr ""
msgid "efault"
msgstr ""
msgid "efbig"
msgstr ""
msgid "eftype"
msgstr ""
msgid "eintr"
msgstr ""
msgid "einval"
msgstr ""
msgid "eio"
msgstr ""
msgid "eisdir"
msgstr ""
msgid "eloop"
msgstr ""
msgid "emfile"
msgstr ""
msgid "emlink"
msgstr ""
msgid "emultihop"
msgstr ""
msgid "enametoolong"
msgstr ""
msgid "enfile"
msgstr ""
msgid "enobufs"
msgstr ""
msgid "enodev"
msgstr ""
msgid "enolck"
msgstr ""
msgid "enolink"
msgstr ""
msgid "enoent"
msgstr ""
msgid "enomem"
msgstr ""
msgid "enospc"
msgstr ""
msgid "enosr"
msgstr ""
msgid "enostr"
msgstr ""
msgid "enosys"
msgstr ""
msgid "enotblk"
msgstr ""
msgid "enotdir"
msgstr ""
msgid "enotsup"
msgstr ""
msgid "enxio"
msgstr ""
msgid "eopnotsupp"
msgstr ""
msgid "eoverflow"
msgstr ""
msgid "epipe"
msgstr ""
msgid "erange"
msgstr ""
msgid "erofs"
msgstr ""
msgid "espipe"
msgstr ""
msgid "esrch"
msgstr ""
msgid "estale"
msgstr ""
msgid "etxtbsy"
msgstr ""
msgid "exdev"
msgstr ""

View file

@ -0,0 +1,17 @@
defmodule Pleroma.Repo.Migrations.AddNodeinfo do
use Ecto.Migration
def up do
alter table(:instances) do
add_if_not_exists(:nodeinfo, :map, default: %{})
add_if_not_exists(:metadata_updated_at, :naive_datetime)
end
end
def down do
alter table(:instances) do
remove_if_exists(:nodeinfo, :map)
remove_if_exists(:metadata_updated_at, :naive_datetime)
end
end
end

View file

@ -0,0 +1,37 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2022 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Repo.Migrations.AddAssociatedObjectIdFunction do
use Ecto.Migration
def up do
statement = """
CREATE OR REPLACE FUNCTION associated_object_id(data jsonb) RETURNS varchar AS $$
DECLARE
object_data jsonb;
BEGIN
IF jsonb_typeof(data->'object') = 'array' THEN
object_data := data->'object'->0;
ELSE
object_data := data->'object';
END IF;
IF jsonb_typeof(object_data->'id') = 'string' THEN
RETURN object_data->>'id';
ELSIF jsonb_typeof(object_data) = 'string' THEN
RETURN object_data#>>'{}';
ELSE
RETURN NULL;
END IF;
END;
$$ LANGUAGE plpgsql IMMUTABLE;
"""
execute(statement)
end
def down do
execute("DROP FUNCTION IF EXISTS associated_object_id(data jsonb)")
end
end

View file

@ -0,0 +1,37 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2022 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Repo.Migrations.SwitchToAssociatedObjectIdIndex do
use Ecto.Migration
@disable_ddl_transaction true
@disable_migration_lock true
def up do
drop_if_exists(
index(:activities, ["(coalesce(data->'object'->>'id', data->>'object'))"],
name: :activities_create_objects_index
)
)
create(
index(:activities, ["associated_object_id(data)"],
name: :activities_create_objects_index,
concurrently: true
)
)
end
def down do
drop_if_exists(
index(:activities, ["associated_object_id(data)"], name: :activities_create_objects_index)
)
create(
index(:activities, ["(coalesce(data->'object'->>'id', data->>'object'))"],
name: :activities_create_objects_index,
concurrently: true
)
)
end
end

View file

@ -0,0 +1,156 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2022 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Repo.Migrations.ChangeThreadVisibilityToUseNewObjectIdIndex do
use Ecto.Migration
def up do
execute(update_thread_visibility())
end
def down do
execute(restore_thread_visibility())
end
def update_thread_visibility do
"""
CREATE OR REPLACE FUNCTION thread_visibility(actor varchar, activity_id varchar, local_public varchar default '') RETURNS boolean AS $$
DECLARE
public varchar := 'https://www.w3.org/ns/activitystreams#Public';
child objects%ROWTYPE;
activity activities%ROWTYPE;
author_fa varchar;
valid_recipients varchar[];
actor_user_following varchar[];
BEGIN
--- Fetch actor following
SELECT array_agg(following.follower_address) INTO actor_user_following FROM following_relationships
JOIN users ON users.id = following_relationships.follower_id
JOIN users AS following ON following.id = following_relationships.following_id
WHERE users.ap_id = actor;
--- Fetch our initial activity.
SELECT * INTO activity FROM activities WHERE activities.data->>'id' = activity_id;
LOOP
--- Ensure that we have an activity before continuing.
--- If we don't, the thread is not satisfiable.
IF activity IS NULL THEN
RETURN false;
END IF;
--- We only care about Create activities.
IF activity.data->>'type' != 'Create' THEN
RETURN true;
END IF;
--- Normalize the child object into child.
SELECT * INTO child FROM objects
INNER JOIN activities ON associated_object_id(activities.data) = objects.data->>'id'
WHERE associated_object_id(activity.data) = objects.data->>'id';
--- Fetch the author's AS2 following collection.
SELECT COALESCE(users.follower_address, '') INTO author_fa FROM users WHERE users.ap_id = activity.actor;
--- Prepare valid recipients array.
valid_recipients := ARRAY[actor, public];
--- If we specified local public, add it.
IF local_public <> '' THEN
valid_recipients := valid_recipients || local_public;
END IF;
IF ARRAY[author_fa] && actor_user_following THEN
valid_recipients := valid_recipients || author_fa;
END IF;
--- Check visibility.
IF NOT valid_recipients && activity.recipients THEN
--- activity not visible, break out of the loop
RETURN false;
END IF;
--- If there's a parent, load it and do this all over again.
IF (child.data->'inReplyTo' IS NOT NULL) AND (child.data->'inReplyTo' != 'null'::jsonb) THEN
SELECT * INTO activity FROM activities
INNER JOIN objects ON associated_object_id(activities.data) = objects.data->>'id'
WHERE child.data->>'inReplyTo' = objects.data->>'id';
ELSE
RETURN true;
END IF;
END LOOP;
END;
$$ LANGUAGE plpgsql IMMUTABLE;
"""
end
# priv/repo/migrations/20220509180452_change_thread_visibility_to_be_local_only_aware.exs
def restore_thread_visibility do
"""
CREATE OR REPLACE FUNCTION thread_visibility(actor varchar, activity_id varchar, local_public varchar default '') RETURNS boolean AS $$
DECLARE
public varchar := 'https://www.w3.org/ns/activitystreams#Public';
child objects%ROWTYPE;
activity activities%ROWTYPE;
author_fa varchar;
valid_recipients varchar[];
actor_user_following varchar[];
BEGIN
--- Fetch actor following
SELECT array_agg(following.follower_address) INTO actor_user_following FROM following_relationships
JOIN users ON users.id = following_relationships.follower_id
JOIN users AS following ON following.id = following_relationships.following_id
WHERE users.ap_id = actor;
--- Fetch our initial activity.
SELECT * INTO activity FROM activities WHERE activities.data->>'id' = activity_id;
LOOP
--- Ensure that we have an activity before continuing.
--- If we don't, the thread is not satisfiable.
IF activity IS NULL THEN
RETURN false;
END IF;
--- We only care about Create activities.
IF activity.data->>'type' != 'Create' THEN
RETURN true;
END IF;
--- Normalize the child object into child.
SELECT * INTO child FROM objects
INNER JOIN activities ON COALESCE(activities.data->'object'->>'id', activities.data->>'object') = objects.data->>'id'
WHERE COALESCE(activity.data->'object'->>'id', activity.data->>'object') = objects.data->>'id';
--- Fetch the author's AS2 following collection.
SELECT COALESCE(users.follower_address, '') INTO author_fa FROM users WHERE users.ap_id = activity.actor;
--- Prepare valid recipients array.
valid_recipients := ARRAY[actor, public];
--- If we specified local public, add it.
IF local_public <> '' THEN
valid_recipients := valid_recipients || local_public;
END IF;
IF ARRAY[author_fa] && actor_user_following THEN
valid_recipients := valid_recipients || author_fa;
END IF;
--- Check visibility.
IF NOT valid_recipients && activity.recipients THEN
--- activity not visible, break out of the loop
RETURN false;
END IF;
--- If there's a parent, load it and do this all over again.
IF (child.data->'inReplyTo' IS NOT NULL) AND (child.data->'inReplyTo' != 'null'::jsonb) THEN
SELECT * INTO activity FROM activities
INNER JOIN objects ON COALESCE(activities.data->'object'->>'id', activities.data->>'object') = objects.data->>'id'
WHERE child.data->>'inReplyTo' = objects.data->>'id';
ELSE
RETURN true;
END IF;
END LOOP;
END;
$$ LANGUAGE plpgsql IMMUTABLE;
"""
end
end

BIN
priv/static/favicon.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 7 KiB

View file

@ -61,6 +61,12 @@ def handle_cast(:refresh, _state) do
{:noreply, @init_state}
end
# Don't actually restart during tests.
# We just check if the correct call has been done.
# If we actually restart, we get errors during the tests like
# (RuntimeError) could not lookup Ecto repo Pleroma.Repo because it was not started or
# it does not exist
# See tests in Pleroma.Config.TransferTaskTest
def handle_cast({:restart, :test, _}, state) do
Logger.debug("pleroma manually restarted")
{:noreply, Map.put(state, :need_reboot, false)}
@ -74,6 +80,12 @@ def handle_cast({:restart, _, delay}, state) do
def handle_cast({:after_boot, _}, %{after_boot: true} = state), do: {:noreply, state}
# Don't actually restart during tests.
# We just check if the correct call has been done.
# If we actually restart, we get errors during the tests like
# (RuntimeError) could not lookup Ecto repo Pleroma.Repo because it was not started or
# it does not exist
# See tests in Pleroma.Config.TransferTaskTest
def handle_cast({:after_boot, :test}, state) do
Logger.debug("pleroma restarted after boot")
state = %{state | after_boot: true, rebooted: true}

View file

@ -13,7 +13,8 @@ def project do
def application do
[
mod: {Restarter, []}
mod: {Restarter, []},
extra_applications: [:logger]
]
end

View file

@ -0,0 +1,57 @@
{
"@context": [
"https://www.w3.org/ns/activitystreams",
"https://w3id.org/security/v1",
{
"Hashtag": "as:Hashtag",
"sensitive": "as:sensitive",
"manuallyApprovesFollowers": "as:manuallyApprovesFollowers",
"alsoKnownAs": {
"@id": "as:alsoKnownAs",
"@type": "@id"
},
"movedTo": {
"@id": "as:movedTo",
"@type": "@id"
},
"toot": "http://joinmastodon.org/ns#",
"featured": {
"@id": "toot:featured",
"@type": "@id"
},
"Emoji": "toot:Emoji",
"blurhash": "toot:blurhash",
"votersCount": "toot:votersCount",
"schema": "http://schema.org#",
"PropertyValue": "schema:PropertyValue",
"value": "schema:value",
"ostatus": "http://ostatus.org#",
"conversation": "ostatus:conversation"
}
],
"type": "Person",
"id": "https://mbp.example.com",
"following": "https://mbp.example.com/following",
"followers": "https://mbp.example.com/followers",
"featured": "https://mbp.example.com/featured",
"inbox": "https://mbp.example.com/inbox",
"outbox": "https://mbp.example.com/outbox",
"preferredUsername": "MBP",
"name": "MBP",
"summary": "wowee",
"endpoints": {
"sharedInbox": "https://mbp.example.com/inbox"
},
"url": "https://mbp.example.com/",
"manuallyApprovesFollowers": false,
"attachment": [],
"icon": {
"mediaType": "image/jpeg",
"type": "Image",
"url": "https://beta.4201337.xyz/static/denise.jpg"
},
"tag": [],
"alsoKnownAs": [
"example@elsewhere.com"
]
}

View file

@ -119,44 +119,87 @@ test "transfer config values with full subkey update" do
describe "pleroma restart" do
setup do
on_exit(fn -> Restarter.Pleroma.refresh() end)
on_exit(fn ->
Restarter.Pleroma.refresh()
# Restarter.Pleroma.refresh/0 is an asynchronous call.
# A GenServer will first finish the previous call before starting a new one.
# Here we do a synchronous call.
# That way we are sure that the previous call has finished before we continue.
# See https://stackoverflow.com/questions/51361856/how-to-use-task-await-with-genserver
Restarter.Pleroma.rebooted?()
end)
end
@tag :erratic
test "don't restart if no reboot time settings were changed" do
clear_config(:emoji)
insert(:config, key: :emoji, value: [groups: [a: 1, b: 2]])
refute String.contains?(
capture_log(fn -> TransferTask.start_link([]) end),
capture_log(fn ->
TransferTask.start_link([])
# TransferTask.start_link/1 is an asynchronous call.
# A GenServer will first finish the previous call before starting a new one.
# Here we do a synchronous call.
# That way we are sure that the previous call has finished before we continue.
Restarter.Pleroma.rebooted?()
end),
"pleroma restarted"
)
end
@tag :erratic
test "on reboot time key" do
clear_config([:pleroma, :rate_limit])
insert(:config, key: {:pleroma, :rate_limit}, value: [enabled: false])
assert capture_log(fn -> TransferTask.start_link([]) end) =~ "pleroma restarted"
clear_config(:rate_limit)
insert(:config, key: :rate_limit, value: [enabled: false])
# Note that we don't actually restart Pleroma.
# See module Restarter.Pleroma
assert capture_log(fn ->
TransferTask.start_link([])
# TransferTask.start_link/1 is an asynchronous call.
# A GenServer will first finish the previous call before starting a new one.
# Here we do a synchronous call.
# That way we are sure that the previous call has finished before we continue.
Restarter.Pleroma.rebooted?()
end) =~ "pleroma restarted"
end
@tag :erratic
test "on reboot time subkey" do
clear_config(Pleroma.Captcha)
insert(:config, key: Pleroma.Captcha, value: [seconds_valid: 60])
assert capture_log(fn -> TransferTask.start_link([]) end) =~ "pleroma restarted"
# Note that we don't actually restart Pleroma.
# See module Restarter.Pleroma
assert capture_log(fn ->
TransferTask.start_link([])
# TransferTask.start_link/1 is an asynchronous call.
# A GenServer will first finish the previous call before starting a new one.
# Here we do a synchronous call.
# That way we are sure that the previous call has finished before we continue.
Restarter.Pleroma.rebooted?()
end) =~ "pleroma restarted"
end
@tag :erratic
test "don't restart pleroma on reboot time key and subkey if there is false flag" do
clear_config([:pleroma, :rate_limit])
clear_config(:rate_limit)
clear_config(Pleroma.Captcha)
insert(:config, key: {:pleroma, :rate_limit}, value: [enabled: false])
insert(:config, key: :rate_limit, value: [enabled: false])
insert(:config, key: Pleroma.Captcha, value: [seconds_valid: 60])
refute String.contains?(
capture_log(fn -> TransferTask.load_and_update_env([], false) end),
capture_log(fn ->
TransferTask.load_and_update_env([], false)
# TransferTask.start_link/1 is an asynchronous call.
# A GenServer will first finish the previous call before starting a new one.
# Here we do a synchronous call.
# That way we are sure that the previous call has finished before we continue.
Restarter.Pleroma.rebooted?()
end),
"pleroma restarted"
)
end

View file

@ -122,11 +122,11 @@ test "recreating an existing participations sets it to unread" do
end
test "it marks a participation as read" do
participation = insert(:participation, %{read: false})
participation = insert(:participation, %{updated_at: ~N[2017-07-17 17:09:58], read: false})
{:ok, updated_participation} = Participation.mark_as_read(participation)
assert updated_participation.read
assert updated_participation.updated_at == participation.updated_at
assert :gt = NaiveDateTime.compare(updated_participation.updated_at, participation.updated_at)
end
test "it marks a participation as unread" do

View file

@ -27,7 +27,7 @@ test "build user invitation email" do
token = %Pleroma.UserInviteToken{token: "test-token"}
email = UserEmail.user_invitation_email(user, token, "test@test.com", "Jonh")
assert email.from == {config[:name], config[:notify_email]}
assert email.subject == "Invitation to Pleroma"
assert email.subject == "Invitation to Akkoma"
assert email.to == [{"Jonh", "test@test.com"}]
assert email.html_body =~

View file

@ -9,12 +9,16 @@ defmodule Pleroma.Instances.InstanceTest do
alias Pleroma.Tests.ObanHelpers
alias Pleroma.Web.CommonAPI
use Pleroma.DataCase
use Pleroma.DataCase, async: true
import ExUnit.CaptureLog
import Pleroma.Factory
setup_all do: clear_config([:instance, :federation_reachability_timeout_days], 1)
setup_all do
clear_config([:instance, :federation_reachability_timeout_days], 1)
clear_config([:instances_nodeinfo, :enabled], true)
clear_config([:instances_favicons, :enabled], true)
end
describe "set_reachable/1" do
test "clears `unreachable_since` of existing matching Instance record having non-nil `unreachable_since`" do
@ -102,62 +106,220 @@ test "does NOT modify `unreachable_since` value of existing record in case it's
end
end
describe "get_or_update_favicon/1" do
test "Scrapes favicon URLs" do
Tesla.Mock.mock(fn %{url: "https://favicon.example.org/"} ->
%Tesla.Env{
status: 200,
body: ~s[<html><head><link rel="icon" href="/favicon.png"></head></html>]
}
describe "update_metadata/1" do
test "Scrapes favicon URLs and nodeinfo" do
Tesla.Mock.mock(fn
%{url: "https://favicon.example.org/"} ->
%Tesla.Env{
status: 200,
body: ~s[<html><head><link rel="icon" href="/favicon.png"></head></html>]
}
%{url: "https://favicon.example.org/.well-known/nodeinfo"} ->
%Tesla.Env{
status: 200,
body:
Jason.encode!(%{
links: [
%{
rel: "http://nodeinfo.diaspora.software/ns/schema/2.0",
href: "https://favicon.example.org/nodeinfo/2.0"
}
]
})
}
%{url: "https://favicon.example.org/nodeinfo/2.0"} ->
%Tesla.Env{
status: 200,
body: Jason.encode!(%{version: "2.0", software: %{name: "Akkoma"}})
}
end)
assert "https://favicon.example.org/favicon.png" ==
Instance.get_or_update_favicon(URI.parse("https://favicon.example.org/"))
assert {:ok, %Instance{host: "favicon.example.org"}} =
Instance.update_metadata(URI.parse("https://favicon.example.org/"))
{:ok, instance} = Instance.get_cached_by_url("https://favicon.example.org/")
assert instance.favicon == "https://favicon.example.org/favicon.png"
assert instance.nodeinfo == %{"version" => "2.0", "software" => %{"name" => "Akkoma"}}
end
test "Returns nil on too long favicon URLs" do
test "Does not retain favicons that are too long" do
long_favicon_url =
"https://Lorem.ipsum.dolor.sit.amet/consecteturadipiscingelit/Praesentpharetrapurusutaliquamtempus/Mauriseulaoreetarcu/atfacilisisorci/Nullamporttitor/nequesedfeugiatmollis/dolormagnaefficiturlorem/nonpretiumsapienorcieurisus/Nullamveleratsem/Maecenassedaccumsanexnam/favicon.png"
Tesla.Mock.mock(fn %{url: "https://long-favicon.example.org/"} ->
%Tesla.Env{
status: 200,
body:
~s[<html><head><link rel="icon" href="] <> long_favicon_url <> ~s["></head></html>]
}
Tesla.Mock.mock(fn
%{url: "https://long-favicon.example.org/"} ->
%Tesla.Env{
status: 200,
body:
~s[<html><head><link rel="icon" href="] <> long_favicon_url <> ~s["></head></html>]
}
%{url: "https://long-favicon.example.org/.well-known/nodeinfo"} ->
%Tesla.Env{
status: 200,
body:
Jason.encode!(%{
links: [
%{
rel: "http://nodeinfo.diaspora.software/ns/schema/2.0",
href: "https://long-favicon.example.org/nodeinfo/2.0"
}
]
})
}
%{url: "https://long-favicon.example.org/nodeinfo/2.0"} ->
%Tesla.Env{
status: 200,
body: Jason.encode!(%{version: "2.0", software: %{name: "Akkoma"}})
}
end)
assert capture_log(fn ->
assert nil ==
Instance.get_or_update_favicon(
URI.parse("https://long-favicon.example.org/")
)
end) =~
"Instance.get_or_update_favicon(\"long-favicon.example.org\") error: %Postgrex.Error{"
assert {:ok, %Instance{host: "long-favicon.example.org"}} =
Instance.update_metadata(URI.parse("https://long-favicon.example.org/"))
{:ok, instance} = Instance.get_cached_by_url("https://long-favicon.example.org/")
assert instance.favicon == nil
end
test "Handles not getting a favicon URL properly" do
Tesla.Mock.mock(fn %{url: "https://no-favicon.example.org/"} ->
%Tesla.Env{
status: 200,
body: ~s[<html><head><h1>I wil look down and whisper "GNO.."</h1></head></html>]
}
Tesla.Mock.mock(fn
%{url: "https://no-favicon.example.org/"} ->
%Tesla.Env{
status: 200,
body: ~s[<html><head><h1>I wil look down and whisper "GNO.."</h1></head></html>]
}
%{url: "https://no-favicon.example.org/.well-known/nodeinfo"} ->
%Tesla.Env{
status: 200,
body:
Jason.encode!(%{
links: [
%{
rel: "http://nodeinfo.diaspora.software/ns/schema/2.0",
href: "https://no-favicon.example.org/nodeinfo/2.0"
}
]
})
}
%{url: "https://no-favicon.example.org/nodeinfo/2.0"} ->
%Tesla.Env{
status: 200,
body: Jason.encode!(%{version: "2.0", software: %{name: "Akkoma"}})
}
end)
refute capture_log(fn ->
assert nil ==
Instance.get_or_update_favicon(
URI.parse("https://no-favicon.example.org/")
)
end) =~ "Instance.scrape_favicon(\"https://no-favicon.example.org/\") error: "
assert {:ok, %Instance{host: "no-favicon.example.org"}} =
Instance.update_metadata(URI.parse("https://no-favicon.example.org/"))
end) =~ "Instance.update_metadata(\"https://no-favicon.example.org/\") error: "
end
test "Doesn't scrapes unreachable instances" do
test "Doesn't scrape unreachable instances" do
instance = insert(:instance, unreachable_since: Instances.reachability_datetime_threshold())
url = "https://" <> instance.host
assert capture_log(fn -> assert nil == Instance.get_or_update_favicon(URI.parse(url)) end) =~
"Instance.scrape_favicon(\"#{url}\") ignored unreachable host"
assert {:discard, :unreachable} == Instance.update_metadata(URI.parse(url))
end
test "doesn't continue scraping nodeinfo if we can't find a link" do
Tesla.Mock.mock(fn
%{url: "https://bad-nodeinfo.example.org/"} ->
%Tesla.Env{
status: 200,
body: ~s[<html><head><h1>I wil look down and whisper "GNO.."</h1></head></html>]
}
%{url: "https://bad-nodeinfo.example.org/.well-known/nodeinfo"} ->
%Tesla.Env{
status: 200,
body: "oepsie woepsie de nodeinfo is kapotie uwu"
}
end)
assert {:ok, %Instance{host: "bad-nodeinfo.example.org"}} =
Instance.update_metadata(URI.parse("https://bad-nodeinfo.example.org/"))
{:ok, instance} = Instance.get_cached_by_url("https://bad-nodeinfo.example.org/")
assert instance.nodeinfo == nil
end
test "doesn't store bad json in the nodeinfo" do
Tesla.Mock.mock(fn
%{url: "https://bad-nodeinfo.example.org/"} ->
%Tesla.Env{
status: 200,
body: ~s[<html><head><h1>I wil look down and whisper "GNO.."</h1></head></html>]
}
%{url: "https://bad-nodeinfo.example.org/.well-known/nodeinfo"} ->
%Tesla.Env{
status: 200,
body:
Jason.encode!(%{
links: [
%{
rel: "http://nodeinfo.diaspora.software/ns/schema/2.0",
href: "https://bad-nodeinfo.example.org/nodeinfo/2.0"
}
]
})
}
%{url: "https://bad-nodeinfo.example.org/nodeinfo/2.0"} ->
%Tesla.Env{
status: 200,
body: "oepsie woepsie de json might be bad uwu"
}
end)
assert {:ok, %Instance{host: "bad-nodeinfo.example.org"}} =
Instance.update_metadata(URI.parse("https://bad-nodeinfo.example.org/"))
{:ok, instance} = Instance.get_cached_by_url("https://bad-nodeinfo.example.org/")
assert instance.nodeinfo == nil
end
test "doesn't store incredibly long json nodeinfo" do
too_long = String.duplicate("a", 50_000)
Tesla.Mock.mock(fn
%{url: "https://bad-nodeinfo.example.org/"} ->
%Tesla.Env{
status: 200,
body: ~s[<html><head><h1>I wil look down and whisper "GNO.."</h1></head></html>]
}
%{url: "https://bad-nodeinfo.example.org/.well-known/nodeinfo"} ->
%Tesla.Env{
status: 200,
body:
Jason.encode!(%{
links: [
%{
rel: "http://nodeinfo.diaspora.software/ns/schema/2.0",
href: "https://bad-nodeinfo.example.org/nodeinfo/2.0"
}
]
})
}
%{url: "https://bad-nodeinfo.example.org/nodeinfo/2.0"} ->
%Tesla.Env{
status: 200,
body: Jason.encode!(%{version: "2.0", software: %{name: too_long}})
}
end)
assert {:ok, %Instance{host: "bad-nodeinfo.example.org"}} =
Instance.update_metadata(URI.parse("https://bad-nodeinfo.example.org/"))
{:ok, instance} = Instance.get_cached_by_url("https://bad-nodeinfo.example.org/")
assert instance.nodeinfo == nil
end
end

View file

@ -6,6 +6,7 @@ defmodule Pleroma.Object.FetcherTest do
use Pleroma.DataCase
alias Pleroma.Activity
alias Pleroma.Instances
alias Pleroma.Object
alias Pleroma.Object.Fetcher
@ -159,6 +160,17 @@ test "it does not fetch a spoofed object uploaded on an instance as an attachmen
"https://patch.cx/media/03ca3c8b4ac3ddd08bf0f84be7885f2f88de0f709112131a22d83650819e36c2.json"
)
end
test "it resets instance reachability on successful fetch" do
id = "http://mastodon.example.org/@admin/99541947525187367"
Instances.set_consistently_unreachable(id)
refute Instances.reachable?(id)
{:ok, _object} =
Fetcher.fetch_object_from_id("http://mastodon.example.org/@admin/99541947525187367")
assert Instances.reachable?(id)
end
end
describe "implementation quirks" do

View file

@ -5,8 +5,9 @@
defmodule Pleroma.UserRelationshipTest do
alias Pleroma.UserRelationship
use Pleroma.DataCase, async: true
use Pleroma.DataCase, async: false
import Mock
import Pleroma.Factory
describe "*_exists?/2" do
@ -79,7 +80,12 @@ test "creates user relationship record if it doesn't exist", %{users: [user1, us
end
test "if record already exists, returns it", %{users: [user1, user2]} do
user_block = UserRelationship.create_block(user1, user2)
user_block =
with_mock NaiveDateTime, [:passthrough], utc_now: fn -> ~N[2017-03-17 17:09:58] end do
{:ok, %{inserted_at: ~N[2017-03-17 17:09:58]}} =
UserRelationship.create_block(user1, user2)
end
assert user_block == UserRelationship.create_block(user1, user2)
end
end

View file

@ -444,17 +444,20 @@ test "it sends a welcome message if it is set" do
end
setup do:
clear_config(:mrf_simple,
media_removal: [],
media_nsfw: [],
federated_timeline_removal: [],
report_removal: [],
reject: [],
followers_only: [],
accept: [],
avatar_removal: [],
banner_removal: [],
reject_deletes: []
clear_config(
[:mrf_simple],
%{
media_removal: [],
media_nsfw: [],
federated_timeline_removal: [],
report_removal: [],
reject: [],
followers_only: [],
accept: [],
avatar_removal: [],
banner_removal: [],
reject_deletes: []
}
)
setup do:
@ -965,6 +968,25 @@ test "it returns the old user if stale, but unfetchable" do
assert user.last_refreshed_at == orig_user.last_refreshed_at
end
test "it doesn't fail on invalid alsoKnownAs entries" do
Tesla.Mock.mock(fn
%{url: "https://mbp.example.com/"} ->
%Tesla.Env{
status: 200,
body:
"test/fixtures/microblogpub/user_with_invalid_also_known_as.json"
|> File.read!(),
headers: [{"content-type", "application/activity+json"}]
}
_ ->
%Tesla.Env{status: 404}
end)
assert {:ok, %User{also_known_as: []}} =
User.get_or_fetch_by_ap_id("https://mbp.example.com/")
end
end
test "returns an ap_id for a user" do
@ -1324,7 +1346,7 @@ test "does not block domain with same end if wildcard added" do
collateral_user =
insert(:user, %{ap_id: "https://another-awful-and-rude-instance.com/user/bully"})
{:ok, user} = User.block_domain(user, "*.awful-and-rude-instance.com")
{:ok, user} = User.block_domain(user, "awful-and-rude-instance.com")
refute User.blocks?(user, collateral_user)
end
@ -1342,7 +1364,7 @@ test "blocks domain with wildcard for subdomain" do
user_domain = insert(:user, %{ap_id: "https://awful-and-rude-instance.com/user/bully"})
{:ok, user} = User.block_domain(user, "*.awful-and-rude-instance.com")
{:ok, user} = User.block_domain(user, "awful-and-rude-instance.com")
assert User.blocks?(user, user_from_subdomain)
assert User.blocks?(user, user_with_two_subdomains)

View file

@ -9,8 +9,8 @@ defmodule Pleroma.Web.ActivityPub.MRFTest do
test "subdomains_regex/1" do
assert MRF.subdomains_regex(["unsafe.tld", "*.unsafe.tld"]) == [
~r/^unsafe.tld$/i,
~r/^(.*\.)*unsafe.tld$/i
~r/^(.+\.)?unsafe\.tld$/i,
~r/^(.+\.)?unsafe\.tld$/i
]
end
@ -18,7 +18,7 @@ test "subdomains_regex/1" do
test "common domains" do
regexes = MRF.subdomains_regex(["unsafe.tld", "unsafe2.tld"])
assert regexes == [~r/^unsafe.tld$/i, ~r/^unsafe2.tld$/i]
assert regexes == [~r/^(.+\.)?unsafe\.tld$/i, ~r/^(.+\.)?unsafe2\.tld$/i]
assert MRF.subdomain_match?(regexes, "unsafe.tld")
assert MRF.subdomain_match?(regexes, "unsafe2.tld")
@ -27,9 +27,9 @@ test "common domains" do
end
test "wildcard domains with one subdomain" do
regexes = MRF.subdomains_regex(["*.unsafe.tld"])
regexes = MRF.subdomains_regex(["unsafe.tld"])
assert regexes == [~r/^(.*\.)*unsafe.tld$/i]
assert regexes == [~r/^(.+\.)?unsafe\.tld$/i]
assert MRF.subdomain_match?(regexes, "unsafe.tld")
assert MRF.subdomain_match?(regexes, "sub.unsafe.tld")
@ -38,9 +38,9 @@ test "wildcard domains with one subdomain" do
end
test "wildcard domains with two subdomains" do
regexes = MRF.subdomains_regex(["*.unsafe.tld"])
regexes = MRF.subdomains_regex(["unsafe.tld"])
assert regexes == [~r/^(.*\.)*unsafe.tld$/i]
assert regexes == [~r/^(.+\.)?unsafe\.tld$/i]
assert MRF.subdomain_match?(regexes, "unsafe.tld")
assert MRF.subdomain_match?(regexes, "sub.sub.unsafe.tld")
@ -51,7 +51,7 @@ test "wildcard domains with two subdomains" do
test "matches are case-insensitive" do
regexes = MRF.subdomains_regex(["UnSafe.TLD", "UnSAFE2.Tld"])
assert regexes == [~r/^UnSafe.TLD$/i, ~r/^UnSAFE2.Tld$/i]
assert regexes == [~r/^(.+\.)?UnSafe\.TLD$/i, ~r/^(.+\.)?UnSAFE2\.Tld$/i]
assert MRF.subdomain_match?(regexes, "UNSAFE.TLD")
assert MRF.subdomain_match?(regexes, "UNSAFE2.TLD")

View file

@ -21,6 +21,35 @@ defmodule Pleroma.Web.ActivityPub.SideEffectsTest do
import Mock
import Pleroma.Factory
describe "handle" do
test "it queues a fetch of instance information" do
author = insert(:user, local: false, ap_id: "https://wowee.example.com/users/1")
recipient = insert(:user, local: true)
{:ok, note_data, _meta} =
Builder.note(%Pleroma.Web.CommonAPI.ActivityDraft{
user: author,
to: [recipient.ap_id],
mentions: [recipient],
content_html: "hey",
extra: %{"id" => "https://wowee.example.com/notes/1"}
})
{:ok, create_activity_data, _meta} =
Builder.create(author, note_data["id"], [recipient.ap_id])
{:ok, create_activity, _meta} = ActivityPub.persist(create_activity_data, local: false)
{:ok, _create_activity, _meta} =
SideEffects.handle(create_activity, local: false, object_data: note_data)
assert_enqueued(
worker: Pleroma.Workers.NodeInfoFetcherWorker,
args: %{"op" => "process", "source_url" => "https://wowee.example.com/users/1"}
)
end
end
describe "handle_after_transaction" do
test "it streams out notifications and streams" do
author = insert(:user, local: true)

View file

@ -3,7 +3,7 @@
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Web.AdminAPI.ReportViewTest do
use Pleroma.DataCase, async: true
use Pleroma.DataCase, async: false
import Pleroma.Factory
@ -45,7 +45,7 @@ test "renders a report" do
ReportView.render("show.json", Report.extract_report_info(activity))
|> Map.delete(:created_at)
assert result == expected
assert Jason.encode!(result) == Jason.encode!(expected)
end
test "includes reported statuses" do

View file

@ -3,9 +3,10 @@
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Web.MastodonAPI.FilterControllerTest do
use Pleroma.Web.ConnCase, async: true
use Pleroma.Web.ConnCase, async: false
use Oban.Testing, repo: Pleroma.Repo
import Mock
import Pleroma.Factory
alias Pleroma.Filter
@ -53,25 +54,20 @@ test "a filter with expires_in", %{conn: conn, user: user} do
in_seconds = 600
response =
conn
|> put_req_header("content-type", "application/json")
|> post("/api/v1/filters", %{
"phrase" => "knights",
context: ["home"],
expires_in: in_seconds
})
|> json_response_and_validate_schema(200)
with_mock NaiveDateTime, [:passthrough], utc_now: fn -> ~N[2017-03-17 17:09:58] end do
conn
|> put_req_header("content-type", "application/json")
|> post("/api/v1/filters", %{
"phrase" => "knights",
context: ["home"],
expires_in: in_seconds
})
|> json_response_and_validate_schema(200)
end
assert response["irreversible"] == false
expires_at =
NaiveDateTime.utc_now()
|> NaiveDateTime.add(in_seconds)
assert NaiveDateTime.diff(
NaiveDateTime.from_iso8601!(response["expires_at"]),
expires_at
) < 5
assert response["expires_at"] == "2017-03-17T17:19:58.000Z"
filter = Filter.get(response["id"], user)
@ -183,26 +179,21 @@ test "with adding expires_at", %{conn: conn, user: user} do
in_seconds = 600
response =
conn
|> put_req_header("content-type", "application/json")
|> put("/api/v1/filters/#{filter.filter_id}", %{
phrase: "nii",
context: ["public"],
expires_in: in_seconds,
irreversible: true
})
|> json_response_and_validate_schema(200)
with_mock NaiveDateTime, [:passthrough], utc_now: fn -> ~N[2017-03-17 17:09:58] end do
conn
|> put_req_header("content-type", "application/json")
|> put("/api/v1/filters/#{filter.filter_id}", %{
phrase: "nii",
context: ["public"],
expires_in: in_seconds,
irreversible: true
})
|> json_response_and_validate_schema(200)
end
assert response["irreversible"] == true
expected_time =
NaiveDateTime.utc_now()
|> NaiveDateTime.add(in_seconds)
assert NaiveDateTime.diff(
NaiveDateTime.from_iso8601!(response["expires_at"]),
expected_time
) < 5
assert response["expires_at"] == "2017-03-17T17:19:58.000Z"
filter = Filter.get(response["id"], user)

View file

@ -3,7 +3,7 @@
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Web.MastodonAPI.AccountViewTest do
use Pleroma.DataCase
use Pleroma.DataCase, async: false
alias Pleroma.User
alias Pleroma.UserRelationship
@ -12,6 +12,7 @@ defmodule Pleroma.Web.MastodonAPI.AccountViewTest do
import Pleroma.Factory
import Tesla.Mock
import Mock
setup do
mock(fn env -> apply(HttpRequestMock, :request, [env]) end)
@ -25,6 +26,7 @@ test "Represent a user account" do
user =
insert(:user, %{
ap_id: "https://example.com/users/chikichikibanban",
follower_count: 3,
note_count: 5,
background: background_image,
@ -38,6 +40,8 @@ test "Represent a user account" do
also_known_as: ["https://shitposter.zone/users/shp"]
})
insert(:instance, %{host: "example.com", nodeinfo: %{version: "2.1"}})
expected = %{
id: to_string(user.id),
username: "shp",
@ -50,6 +54,15 @@ test "Represent a user account" do
statuses_count: 5,
note: "<span>valid html</span>. a<br/>b<br/>c<br/>d<br/>f &#39;&amp;&lt;&gt;&quot;",
url: user.ap_id,
akkoma: %{
instance: %{
name: "example.com",
nodeinfo: %{
"version" => "2.1"
},
favicon: nil
}
},
avatar: "http://localhost:4001/images/avi.png",
avatar_static: "http://localhost:4001/images/avi.png",
header: "http://localhost:4001/images/banner.png",
@ -98,9 +111,57 @@ test "Represent a user account" do
assert expected == AccountView.render("show.json", %{user: user, skip_visibility_check: true})
end
describe "nodeinfo" do
setup do
[
user: insert(:user, ap_id: "https://somewhere.example.com/users/chikichikibanban"),
instance:
insert(:instance, %{
host: "somewhere.example.com",
favicon: "https://example.com/favicon.ico"
})
]
end
test "is embedded in the account view", %{user: user} do
assert %{
akkoma: %{
instance: %{
name: "somewhere.example.com",
nodeinfo: %{
"version" => "2.0"
},
favicon: "https://example.com/favicon.ico"
}
}
} = AccountView.render("show.json", %{user: user, skip_visibility_check: true})
end
test "uses local nodeinfo for local users" do
user = insert(:user)
assert %{
akkoma: %{
instance: %{
name: "localhost",
nodeinfo: %{
software: %{
name: "akkoma"
}
}
}
}
} = AccountView.render("show.json", %{user: user, skip_visibility_check: true})
end
end
describe "favicon" do
setup do
[user: insert(:user)]
[
user: insert(:user, ap_id: "https://example.com/users/chikichikibanban"),
instance:
insert(:instance, %{host: "example.com", favicon: "https://example.com/favicon.ico"})
]
end
test "is parsed when :instance_favicons is enabled", %{user: user} do
@ -108,13 +169,14 @@ test "is parsed when :instance_favicons is enabled", %{user: user} do
assert %{
pleroma: %{
favicon:
"https://shitposter.club/plugins/Qvitter/img/gnusocial-favicons/favicon-16x16.png"
favicon: "https://example.com/favicon.ico"
}
} = AccountView.render("show.json", %{user: user, skip_visibility_check: true})
end
test "is nil when :instances_favicons is disabled", %{user: user} do
test "is nil when we have no instance", %{user: user} do
user = %{user | ap_id: "https://wowee.example.com/users/2"}
assert %{pleroma: %{favicon: nil}} =
AccountView.render("show.json", %{user: user, skip_visibility_check: true})
end
@ -176,11 +238,18 @@ test "Represent a Service(bot) account" do
},
fqn: "shp@shitposter.club",
last_status_at: nil,
akkoma: %{
instance: %{
name: "localhost",
favicon: "http://localhost:4001/favicon.png",
nodeinfo: %{version: "2.0"}
}
},
pleroma: %{
ap_id: user.ap_id,
also_known_as: [],
background_image: nil,
favicon: nil,
favicon: "http://localhost:4001/favicon.png",
is_confirmed: true,
tags: [],
is_admin: false,
@ -196,7 +265,13 @@ test "Represent a Service(bot) account" do
}
}
assert expected == AccountView.render("show.json", %{user: user, skip_visibility_check: true})
with_mock(
Pleroma.Web.Nodeinfo.NodeinfoController,
raw_nodeinfo: fn -> %{version: "2.0"} end
) do
assert expected ==
AccountView.render("show.json", %{user: user, skip_visibility_check: true})
end
end
test "Represent a Funkwhale channel" do
@ -272,6 +347,7 @@ defp test_relationship_rendering(user, other_user, expected_result) do
subscribing: false,
notifying: false,
requested: false,
requested_by: false,
domain_blocking: false,
showing_reblogs: true,
endorsed: false,
@ -357,6 +433,24 @@ test "represent a relationship for the user with a pending follow request" do
end
end
test "represent a relationship for a user with an inbound pending follow request" do
follower = insert(:user)
followed = insert(:user, is_locked: true)
{:ok, follower, followed, _} = CommonAPI.follow(follower, followed)
follower = User.get_cached_by_id(follower.id)
followed = User.get_cached_by_id(followed.id)
expected =
Map.merge(
@blank_response,
%{requested_by: true, followed_by: false, id: to_string(follower.id)}
)
test_relationship_rendering(followed, follower, expected)
end
test "returns the settings store if the requesting user is the represented user and it's requested specifically" do
user = insert(:user, pleroma_settings_store: %{fe: "test"})
@ -578,6 +672,8 @@ test "uses mediaproxy urls when it's enabled (regardless of media preview proxy
emoji: %{"joker_smile" => "https://evil.website/society.png"}
)
insert(:instance, %{host: "localhost", favicon: "https://evil.website/favicon.png"})
with media_preview_enabled <- [false, true] do
clear_config([:media_preview_proxy, :enabled], media_preview_enabled)
@ -586,6 +682,9 @@ test "uses mediaproxy urls when it's enabled (regardless of media preview proxy
{key, url} when key in [:avatar, :avatar_static, :header, :header_static] ->
String.starts_with?(url, Pleroma.Web.Endpoint.url())
{:akkoma, %{instance: %{favicon: favicon_url}}} ->
String.starts_with?(favicon_url, Pleroma.Web.Endpoint.url())
{:emojis, emojis} ->
Enum.all?(emojis, fn %{url: url, static_url: static_url} ->
String.starts_with?(url, Pleroma.Web.Endpoint.url()) &&
@ -598,4 +697,10 @@ test "uses mediaproxy urls when it's enabled (regardless of media preview proxy
|> assert()
end
end
test "returns nil in the instance field when no instance is held locally" do
user = insert(:user, ap_id: "https://example.com/users/1")
view = AccountView.render("show.json", %{user: user, skip_visibility_check: true})
assert view[:akkoma][:instance] == nil
end
end

View file

@ -71,7 +71,7 @@ test "creates a new user after successful LDAP authorization" do
equalityMatch: fn _type, _value -> :ok end,
wholeSubtree: fn -> :ok end,
search: fn _connection, _options ->
{:ok, {:eldap_search_result, [{:eldap_entry, '', []}], []}}
{:ok, {:eldap_search_result, [{:eldap_entry, '', []}], [], []}}
end,
close: fn _connection ->
send(self(), :close_connection)

View file

@ -59,9 +59,9 @@ test "it sends `report-to` & `report-uri` CSP response headers", %{conn: conn} d
assert csp =~ ~r|report-uri https://endpoint.com;report-to csp-endpoint;|
[reply_to] = Conn.get_resp_header(conn, "reply-to")
[report_to] = Conn.get_resp_header(conn, "report-to")
assert reply_to ==
assert report_to ==
"{\"endpoints\":[{\"url\":\"https://endpoint.com\"}],\"group\":\"csp-endpoint\",\"max-age\":10886400}"
end
@ -100,12 +100,14 @@ test "media_proxy with base_url", %{conn: conn} do
url = "https://example.com"
clear_config([:media_proxy, :base_url], url)
assert_media_img_src(conn, url)
assert_connect_src(conn, url)
end
test "upload with base url", %{conn: conn} do
url = "https://example2.com"
clear_config([Pleroma.Upload, :base_url], url)
assert_media_img_src(conn, url)
assert_connect_src(conn, url)
end
test "with S3 public endpoint", %{conn: conn} do
@ -138,6 +140,12 @@ defp assert_media_img_src(conn, url) do
assert csp =~ "img-src 'self' data: blob: #{url};"
end
defp assert_connect_src(conn, url) do
conn = get(conn, "/api/v1/instance")
[csp] = Conn.get_resp_header(conn, "content-security-policy")
assert csp =~ ~r/connect-src 'self' blob: [^;]+ #{url}/
end
test "it does not send CSP headers when disabled", %{conn: conn} do
clear_config([:http_security, :enabled], false)

View file

@ -48,38 +48,42 @@ test "it is enabled if remote_ip_found flag doesn't exist" do
refute RateLimiter.disabled?(build_conn())
end
@tag :erratic
test "it restricts based on config values" do
limiter_name = :test_plug_opts
scale = 80
limit = 5
clear_config([Pleroma.Web.Endpoint, :http, :ip], {8, 8, 8, 8})
clear_config([Pleroma.Web.Endpoint, :http, :ip], {127, 0, 0, 1})
clear_config([:rate_limit, limiter_name], {scale, limit})
plug_opts = RateLimiter.init(name: limiter_name)
conn = build_conn(:get, "/")
for i <- 1..5 do
conn = RateLimiter.call(conn, plug_opts)
assert {^i, _} = RateLimiter.inspect_bucket(conn, limiter_name, plug_opts)
Process.sleep(10)
for _ <- 1..5 do
conn_limited = RateLimiter.call(conn, plug_opts)
refute conn_limited.status == Conn.Status.code(:too_many_requests)
refute conn_limited.resp_body
refute conn_limited.halted
end
conn = RateLimiter.call(conn, plug_opts)
assert %{"error" => "Throttled"} = ConnTest.json_response(conn, :too_many_requests)
assert conn.halted
conn_limited = RateLimiter.call(conn, plug_opts)
assert %{"error" => "Throttled"} = ConnTest.json_response(conn_limited, :too_many_requests)
assert conn_limited.halted
Process.sleep(50)
expire_ttl(conn, limiter_name)
conn = build_conn(:get, "/")
for _ <- 1..5 do
conn_limited = RateLimiter.call(conn, plug_opts)
conn = RateLimiter.call(conn, plug_opts)
assert {1, 4} = RateLimiter.inspect_bucket(conn, limiter_name, plug_opts)
refute conn_limited.status == Conn.Status.code(:too_many_requests)
refute conn_limited.resp_body
refute conn_limited.halted
end
refute conn.status == Conn.Status.code(:too_many_requests)
refute conn.resp_body
refute conn.halted
conn_limited = RateLimiter.call(conn, plug_opts)
assert %{"error" => "Throttled"} = ConnTest.json_response(conn_limited, :too_many_requests)
assert conn_limited.halted
end
describe "options" do
@ -263,4 +267,12 @@ test "doesn't crash due to a race condition when multiple requests are made at t
refute {:err, :not_found} == RateLimiter.inspect_bucket(conn, limiter_name, opts)
end
def expire_ttl(%{remote_ip: remote_ip} = _conn, bucket_name_root) do
bucket_name = "anon:#{bucket_name_root}" |> String.to_atom()
key_name = "ip::#{remote_ip |> Tuple.to_list() |> Enum.join(".")}"
{:ok, bucket_value} = Cachex.get(bucket_name, key_name)
Cachex.put(bucket_name, key_name, bucket_value, ttl: -1)
end
end

View file

@ -40,4 +40,15 @@ test "sends Content-Disposition header when name param is set", %{
&(&1 == {"content-disposition", "filename=\"\\\"cofe\\\".gif\""})
)
end
test "removes control characters from the Content-Disposition header", %{
attachment_url: attachment_url
} do
conn = get(build_conn(), attachment_url <> "?name=\"cofe\".gif\\r\\n")
assert Enum.any?(
conn.resp_headers,
&(&1 == {"content-disposition", "filename=\"\\\"cofe\\\".gif\""})
)
end
end

View file

@ -200,6 +200,21 @@ test "renders title and body for pleroma:emoji_reaction activity" do
"New Reaction"
end
test "renders title and body for update activity" do
user = insert(:user)
{:ok, activity} = CommonAPI.post(user, %{status: "lorem ipsum"})
{:ok, activity} = CommonAPI.update(user, activity, %{status: "edited status"})
object = Object.normalize(activity, fetch: false)
assert Impl.format_body(%{activity: activity, type: "update"}, user, object) ==
"@#{user.nickname} edited a status"
assert Impl.format_title(%{activity: activity, type: "update"}) ==
"New Update"
end
test "renders title for create activity with direct visibility" do
user = insert(:user, nickname: "Bob")

View file

@ -3,7 +3,7 @@
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Web.StreamerTest do
use Pleroma.DataCase
use Pleroma.DataCase, async: false
import Pleroma.Factory

View file

@ -0,0 +1,52 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2022 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Workers.PublisherWorkerTest do
use Pleroma.DataCase, async: true
use Oban.Testing, repo: Pleroma.Repo
import Pleroma.Factory
alias Pleroma.Object
alias Pleroma.Web.ActivityPub.ActivityPub
alias Pleroma.Web.ActivityPub.Builder
alias Pleroma.Web.CommonAPI
alias Pleroma.Web.Federator
describe "Oban job priority:" do
setup do
user = insert(:user)
{:ok, post} = CommonAPI.post(user, %{status: "Regrettable post"})
object = Object.normalize(post, fetch: false)
{:ok, delete_data, _meta} = Builder.delete(user, object.data["id"])
{:ok, delete, _meta} = ActivityPub.persist(delete_data, local: true)
%{
post: post,
delete: delete
}
end
test "Deletions are lower priority", %{delete: delete} do
assert {:ok, %Oban.Job{priority: 3}} = Federator.publish(delete)
end
test "Creates are normal priority", %{post: post} do
assert {:ok, %Oban.Job{priority: 0}} = Federator.publish(post)
end
end
describe "Oban job timeout" do
test "should have a timeout" do
clear_config([:workers, :timeout, :federator_outgoing], :timer.minutes(2))
assert Pleroma.Workers.PublisherWorker.timeout(nil) == :timer.minutes(2)
end
test "should use a default timeout if none specified" do
clear_config([:workers, :timeout, :federator_outgoing])
assert Pleroma.Workers.PublisherWorker.timeout(nil) == :timer.seconds(10)
end
end
end

View file

@ -56,4 +56,9 @@ test "error if actiivity was not found" do
assert {:error, :activity_not_found} =
perform_job(Pleroma.Workers.PurgeExpiredActivity, %{activity_id: "some_if"})
end
test "has a timeout" do
clear_config([:workers, :timeout, :activity_expiration], 50)
assert Pleroma.Workers.PurgeExpiredActivity.timeout(%Oban.Job{}) == 50
end
end

View file

@ -49,4 +49,9 @@ test "error message for non-existent scheduled activity" do
ScheduledActivityWorker.perform(%Oban.Job{args: %{"activity_id" => 42}})
end) =~ "Couldn't find scheduled activity: 42"
end
test "has a timeout" do
clear_config([:workers, :timeout, :scheduled_activities], :timer.minutes(5))
assert ScheduledActivityWorker.timeout(nil) == :timer.minutes(5)
end
end

View file

@ -36,6 +36,15 @@ def conversation_factory do
}
end
def instance_factory(attrs \\ %{}) do
%Pleroma.Instances.Instance{
host: attrs[:domain] || "example.com",
nodeinfo: %{version: "2.0", openRegistrations: true},
unreachable_since: nil
}
|> Map.merge(attrs)
end
def user_factory(attrs \\ %{}) do
pem = Enum.random(@rsa_keys)
@ -522,13 +531,6 @@ def oauth_app_factory do
}
end
def instance_factory do
%Pleroma.Instances.Instance{
host: "domain.com",
unreachable_since: nil
}
end
def oauth_token_factory(attrs \\ %{}) do
scopes = Map.get(attrs, :scopes, ["read"])
oauth_app = Map.get_lazy(attrs, :app, fn -> insert(:oauth_app, scopes: scopes) end)