forked from AkkomaGang/akkoma
Merge develop
This commit is contained in:
commit
76c3e290fc
91 changed files with 2854 additions and 1120 deletions
5
.gitignore
vendored
5
.gitignore
vendored
|
@ -38,7 +38,12 @@ erl_crash.dump
|
||||||
|
|
||||||
# Prevent committing docs files
|
# Prevent committing docs files
|
||||||
/priv/static/doc/*
|
/priv/static/doc/*
|
||||||
|
docs/generated_config.md
|
||||||
|
|
||||||
# Code test coverage
|
# Code test coverage
|
||||||
/cover
|
/cover
|
||||||
/Elixir.*.coverdata
|
/Elixir.*.coverdata
|
||||||
|
|
||||||
|
.idea
|
||||||
|
pleroma.iml
|
||||||
|
|
||||||
|
|
|
@ -30,6 +30,8 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
|
||||||
- AdminAPI: Add "godmode" while fetching user statuses (i.e. admin can see private statuses)
|
- AdminAPI: Add "godmode" while fetching user statuses (i.e. admin can see private statuses)
|
||||||
- Improve digest email template
|
- Improve digest email template
|
||||||
– Pagination: (optional) return `total` alongside with `items` when paginating
|
– Pagination: (optional) return `total` alongside with `items` when paginating
|
||||||
|
- Replaced [pleroma_job_queue](https://git.pleroma.social/pleroma/pleroma_job_queue) and `Pleroma.Web.Federator.RetryQueue` with [Oban](https://github.com/sorentwo/oban) (see [`docs/config.md`](docs/config.md) on migrating customized worker / retry settings)
|
||||||
|
- Introduced [quantum](https://github.com/quantum-elixir/quantum-core) job scheduler
|
||||||
|
|
||||||
### Fixed
|
### Fixed
|
||||||
- Following from Osada
|
- Following from Osada
|
||||||
|
|
|
@ -51,6 +51,24 @@
|
||||||
telemetry_event: [Pleroma.Repo.Instrumenter],
|
telemetry_event: [Pleroma.Repo.Instrumenter],
|
||||||
migration_lock: nil
|
migration_lock: nil
|
||||||
|
|
||||||
|
scheduled_jobs =
|
||||||
|
with digest_config <- Application.get_env(:pleroma, :email_notifications)[:digest],
|
||||||
|
true <- digest_config[:active] do
|
||||||
|
[{digest_config[:schedule], {Pleroma.Daemons.DigestEmailDaemon, :perform, []}}]
|
||||||
|
else
|
||||||
|
_ -> []
|
||||||
|
end
|
||||||
|
|
||||||
|
scheduled_jobs =
|
||||||
|
scheduled_jobs ++
|
||||||
|
[{"0 */6 * * * *", {Pleroma.Web.Websub, :refresh_subscriptions, []}}]
|
||||||
|
|
||||||
|
config :pleroma, Pleroma.Scheduler,
|
||||||
|
global: true,
|
||||||
|
overlap: true,
|
||||||
|
timezone: :utc,
|
||||||
|
jobs: scheduled_jobs
|
||||||
|
|
||||||
config :pleroma, Pleroma.Captcha,
|
config :pleroma, Pleroma.Captcha,
|
||||||
enabled: false,
|
enabled: false,
|
||||||
seconds_valid: 60,
|
seconds_valid: 60,
|
||||||
|
@ -258,7 +276,7 @@
|
||||||
max_account_fields: 10,
|
max_account_fields: 10,
|
||||||
max_remote_account_fields: 20,
|
max_remote_account_fields: 20,
|
||||||
account_field_name_length: 512,
|
account_field_name_length: 512,
|
||||||
account_field_value_length: 512,
|
account_field_value_length: 2048,
|
||||||
external_user_synchronization: true
|
external_user_synchronization: true
|
||||||
|
|
||||||
config :pleroma, :markup,
|
config :pleroma, :markup,
|
||||||
|
@ -313,6 +331,10 @@
|
||||||
follow_handshake_timeout: 500,
|
follow_handshake_timeout: 500,
|
||||||
sign_object_fetches: true
|
sign_object_fetches: true
|
||||||
|
|
||||||
|
config :pleroma, :streamer,
|
||||||
|
workers: 3,
|
||||||
|
overflow_workers: 2
|
||||||
|
|
||||||
config :pleroma, :user, deny_follow_blocked: true
|
config :pleroma, :user, deny_follow_blocked: true
|
||||||
|
|
||||||
config :pleroma, :mrf_normalize_markup, scrub_policy: Pleroma.HTML.Scrubber.Default
|
config :pleroma, :mrf_normalize_markup, scrub_policy: Pleroma.HTML.Scrubber.Default
|
||||||
|
@ -451,13 +473,11 @@
|
||||||
"web"
|
"web"
|
||||||
]
|
]
|
||||||
|
|
||||||
config :pleroma, Pleroma.Web.Federator.RetryQueue,
|
config :pleroma, Oban,
|
||||||
enabled: false,
|
repo: Pleroma.Repo,
|
||||||
max_jobs: 20,
|
verbose: false,
|
||||||
initial_timeout: 30,
|
prune: {:maxlen, 1500},
|
||||||
max_retries: 5
|
queues: [
|
||||||
|
|
||||||
config :pleroma_job_queue, :queues,
|
|
||||||
activity_expiration: 10,
|
activity_expiration: 10,
|
||||||
federator_incoming: 50,
|
federator_incoming: 50,
|
||||||
federator_outgoing: 50,
|
federator_outgoing: 50,
|
||||||
|
@ -466,6 +486,13 @@
|
||||||
transmogrifier: 20,
|
transmogrifier: 20,
|
||||||
scheduled_activities: 10,
|
scheduled_activities: 10,
|
||||||
background: 5
|
background: 5
|
||||||
|
]
|
||||||
|
|
||||||
|
config :pleroma, :workers,
|
||||||
|
retries: [
|
||||||
|
federator_incoming: 5,
|
||||||
|
federator_outgoing: 5
|
||||||
|
]
|
||||||
|
|
||||||
config :pleroma, :fetch_initial_posts,
|
config :pleroma, :fetch_initial_posts,
|
||||||
enabled: false,
|
enabled: false,
|
||||||
|
|
|
@ -878,9 +878,9 @@
|
||||||
%{
|
%{
|
||||||
key: :account_field_value_length,
|
key: :account_field_value_length,
|
||||||
type: :integer,
|
type: :integer,
|
||||||
description: "An account field value maximum length (default: 512)",
|
description: "An account field value maximum length (default: 2048)",
|
||||||
suggestions: [
|
suggestions: [
|
||||||
512
|
2048
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
%{
|
%{
|
||||||
|
@ -1778,44 +1778,73 @@
|
||||||
group: :pleroma_job_queue,
|
group: :pleroma_job_queue,
|
||||||
key: :queues,
|
key: :queues,
|
||||||
type: :group,
|
type: :group,
|
||||||
description: "Pleroma Job Queue configuration: a list of queues with maximum concurrent jobs",
|
description: "[Deprecated] Replaced with `Oban`/`:queues` (keeping the same format)",
|
||||||
|
children: []
|
||||||
|
},
|
||||||
|
%{
|
||||||
|
group: :pleroma,
|
||||||
|
key: Pleroma.Web.Federator.RetryQueue,
|
||||||
|
type: :group,
|
||||||
|
description: "[Deprecated] See `Oban` and `:workers` sections for configuration notes",
|
||||||
children: [
|
children: [
|
||||||
%{
|
%{
|
||||||
key: :federator_outgoing,
|
key: :max_retries,
|
||||||
type: :integer,
|
type: :integer,
|
||||||
description: "Outgoing federation queue",
|
description: "[Deprecated] Replaced as `Oban`/`:queues`/`:outgoing_federation` value",
|
||||||
suggestions: [50]
|
suggestions: []
|
||||||
|
}
|
||||||
|
]
|
||||||
},
|
},
|
||||||
%{
|
%{
|
||||||
key: :federator_incoming,
|
group: :pleroma,
|
||||||
type: :integer,
|
key: Oban,
|
||||||
description: "Incoming federation queue",
|
type: :group,
|
||||||
suggestions: [50]
|
description: """
|
||||||
|
[Oban](https://github.com/sorentwo/oban) asynchronous job processor configuration.
|
||||||
|
|
||||||
|
Note: if you are running PostgreSQL in [`silent_mode`](https://postgresqlco.nf/en/doc/param/silent_mode?version=9.1),
|
||||||
|
it's advised to set [`log_destination`](https://postgresqlco.nf/en/doc/param/log_destination?version=9.1) to `syslog`,
|
||||||
|
otherwise `postmaster.log` file may grow because of "you don't own a lock of type ShareLock" warnings
|
||||||
|
(see https://github.com/sorentwo/oban/issues/52).
|
||||||
|
""",
|
||||||
|
children: [
|
||||||
|
%{
|
||||||
|
key: :repo,
|
||||||
|
type: :module,
|
||||||
|
description: "Application's Ecto repo",
|
||||||
|
suggestions: [Pleroma.Repo]
|
||||||
},
|
},
|
||||||
%{
|
%{
|
||||||
key: :mailer,
|
key: :verbose,
|
||||||
type: :integer,
|
type: :boolean,
|
||||||
description: "Email sender queue, see Pleroma.Emails.Mailer",
|
description: "Logs verbose mode",
|
||||||
suggestions: [10]
|
suggestions: [false, true]
|
||||||
},
|
},
|
||||||
%{
|
%{
|
||||||
key: :web_push,
|
key: :prune,
|
||||||
type: :integer,
|
type: [:atom, :tuple],
|
||||||
description: "Web push notifications queue",
|
description:
|
||||||
suggestions: [50]
|
"Non-retryable jobs [pruning settings](https://github.com/sorentwo/oban#pruning)",
|
||||||
|
suggestions: [:disabled, {:maxlen, 1500}, {:maxage, 60 * 60}]
|
||||||
},
|
},
|
||||||
%{
|
%{
|
||||||
key: :transmogrifier,
|
key: :queues,
|
||||||
type: :integer,
|
type: :keyword,
|
||||||
description: "Transmogrifier queue",
|
description:
|
||||||
suggestions: [20]
|
"Background jobs queues (keys: queues, values: max numbers of concurrent jobs)",
|
||||||
},
|
suggestions: [
|
||||||
%{
|
[
|
||||||
key: :scheduled_activities,
|
activity_expiration: 10,
|
||||||
type: :integer,
|
background: 5,
|
||||||
description: "Scheduled activities queue, see Pleroma.ScheduledActivities",
|
federator_incoming: 50,
|
||||||
suggestions: [10]
|
federator_outgoing: 50,
|
||||||
},
|
mailer: 10,
|
||||||
|
scheduled_activities: 10,
|
||||||
|
transmogrifier: 20,
|
||||||
|
web_push: 50
|
||||||
|
]
|
||||||
|
],
|
||||||
|
children: [
|
||||||
%{
|
%{
|
||||||
key: :activity_expiration,
|
key: :activity_expiration,
|
||||||
type: :integer,
|
type: :integer,
|
||||||
|
@ -1827,38 +1856,63 @@
|
||||||
type: :integer,
|
type: :integer,
|
||||||
description: "Background queue",
|
description: "Background queue",
|
||||||
suggestions: [5]
|
suggestions: [5]
|
||||||
|
},
|
||||||
|
%{
|
||||||
|
key: :federator_incoming,
|
||||||
|
type: :integer,
|
||||||
|
description: "Incoming federation queue",
|
||||||
|
suggestions: [50]
|
||||||
|
},
|
||||||
|
%{
|
||||||
|
key: :federator_outgoing,
|
||||||
|
type: :integer,
|
||||||
|
description: "Outgoing federation queue",
|
||||||
|
suggestions: [50]
|
||||||
|
},
|
||||||
|
%{
|
||||||
|
key: :mailer,
|
||||||
|
type: :integer,
|
||||||
|
description: "Email sender queue, see Pleroma.Emails.Mailer",
|
||||||
|
suggestions: [10]
|
||||||
|
},
|
||||||
|
%{
|
||||||
|
key: :scheduled_activities,
|
||||||
|
type: :integer,
|
||||||
|
description: "Scheduled activities queue, see Pleroma.ScheduledActivities",
|
||||||
|
suggestions: [10]
|
||||||
|
},
|
||||||
|
%{
|
||||||
|
key: :transmogrifier,
|
||||||
|
type: :integer,
|
||||||
|
description: "Transmogrifier queue",
|
||||||
|
suggestions: [20]
|
||||||
|
},
|
||||||
|
%{
|
||||||
|
key: :web_push,
|
||||||
|
type: :integer,
|
||||||
|
description: "Web push notifications queue",
|
||||||
|
suggestions: [50]
|
||||||
|
}
|
||||||
|
]
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
%{
|
%{
|
||||||
group: :pleroma,
|
group: :pleroma,
|
||||||
key: Pleroma.Web.Federator.RetryQueue,
|
key: :workers,
|
||||||
type: :group,
|
type: :group,
|
||||||
description: "",
|
description: "Includes custom worker options not interpretable directly by `Oban`",
|
||||||
children: [
|
children: [
|
||||||
%{
|
%{
|
||||||
key: :enabled,
|
key: :retries,
|
||||||
type: :boolean,
|
type: :keyword,
|
||||||
description: "If set to true, failed federation jobs will be retried",
|
description: "Max retry attempts for failed jobs, per `Oban` queue",
|
||||||
suggestions: [true, false]
|
suggestions: [
|
||||||
},
|
[
|
||||||
%{
|
federator_incoming: 5,
|
||||||
key: :max_jobs,
|
federator_outgoing: 5
|
||||||
type: :integer,
|
]
|
||||||
description: "The maximum amount of parallel federation jobs running at the same time",
|
]
|
||||||
suggestions: [20]
|
|
||||||
},
|
|
||||||
%{
|
|
||||||
key: :initial_timeout,
|
|
||||||
type: :integer,
|
|
||||||
description: "The initial timeout in seconds",
|
|
||||||
suggestions: [30]
|
|
||||||
},
|
|
||||||
%{
|
|
||||||
key: :max_retries,
|
|
||||||
type: :integer,
|
|
||||||
description: "The maximum number of times a federation job is retried",
|
|
||||||
suggestions: [5]
|
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
|
|
|
@ -61,7 +61,11 @@
|
||||||
|
|
||||||
config :web_push_encryption, :http_client, Pleroma.Web.WebPushHttpClientMock
|
config :web_push_encryption, :http_client, Pleroma.Web.WebPushHttpClientMock
|
||||||
|
|
||||||
config :pleroma_job_queue, disabled: true
|
config :pleroma, Oban,
|
||||||
|
queues: false,
|
||||||
|
prune: :disabled
|
||||||
|
|
||||||
|
config :pleroma, Pleroma.Scheduler, jobs: []
|
||||||
|
|
||||||
config :pleroma, Pleroma.ScheduledActivity,
|
config :pleroma, Pleroma.ScheduledActivity,
|
||||||
daily_user_limit: 2,
|
daily_user_limit: 2,
|
||||||
|
|
756
docs/config.md
756
docs/config.md
|
@ -1 +1,755 @@
|
||||||
This file is a placeholder, please run mix pleroma.docs to generate it.
|
# Configuration
|
||||||
|
|
||||||
|
This file describe the configuration, it is recommended to edit the relevant *.secret.exs file instead of the others founds in the ``config`` directory.
|
||||||
|
If you run Pleroma with ``MIX_ENV=prod`` the file is ``prod.secret.exs``, otherwise it is ``dev.secret.exs``.
|
||||||
|
|
||||||
|
## Pleroma.Upload
|
||||||
|
* `uploader`: Select which `Pleroma.Uploaders` to use
|
||||||
|
* `filters`: List of `Pleroma.Upload.Filter` to use.
|
||||||
|
* `link_name`: When enabled Pleroma will add a `name` parameter to the url of the upload, for example `https://instance.tld/media/corndog.png?name=corndog.png`. This is needed to provide the correct filename in Content-Disposition headers when using filters like `Pleroma.Upload.Filter.Dedupe`
|
||||||
|
* `base_url`: The base URL to access a user-uploaded file. Useful when you want to proxy the media files via another host.
|
||||||
|
* `proxy_remote`: If you're using a remote uploader, Pleroma will proxy media requests instead of redirecting to it.
|
||||||
|
* `proxy_opts`: Proxy options, see `Pleroma.ReverseProxy` documentation.
|
||||||
|
|
||||||
|
Note: `strip_exif` has been replaced by `Pleroma.Upload.Filter.Mogrify`.
|
||||||
|
|
||||||
|
## Pleroma.Uploaders.Local
|
||||||
|
* `uploads`: Which directory to store the user-uploads in, relative to pleroma’s working directory
|
||||||
|
|
||||||
|
## Pleroma.Uploaders.S3
|
||||||
|
* `bucket`: S3 bucket name
|
||||||
|
* `bucket_namespace`: S3 bucket namespace
|
||||||
|
* `public_endpoint`: S3 endpoint that the user finally accesses(ex. "https://s3.dualstack.ap-northeast-1.amazonaws.com")
|
||||||
|
* `truncated_namespace`: If you use S3 compatible service such as Digital Ocean Spaces or CDN, set folder name or "" etc.
|
||||||
|
For example, when using CDN to S3 virtual host format, set "".
|
||||||
|
At this time, write CNAME to CDN in public_endpoint.
|
||||||
|
|
||||||
|
## Pleroma.Upload.Filter.Mogrify
|
||||||
|
|
||||||
|
* `args`: List of actions for the `mogrify` command like `"strip"` or `["strip", "auto-orient", {"implode", "1"}]`.
|
||||||
|
|
||||||
|
## Pleroma.Upload.Filter.Dedupe
|
||||||
|
|
||||||
|
No specific configuration.
|
||||||
|
|
||||||
|
## Pleroma.Upload.Filter.AnonymizeFilename
|
||||||
|
|
||||||
|
This filter replaces the filename (not the path) of an upload. For complete obfuscation, add
|
||||||
|
`Pleroma.Upload.Filter.Dedupe` before AnonymizeFilename.
|
||||||
|
|
||||||
|
* `text`: Text to replace filenames in links. If empty, `{random}.extension` will be used. You can get the original filename extension by using `{extension}`, for example `custom-file-name.{extension}`.
|
||||||
|
|
||||||
|
## Pleroma.Emails.Mailer
|
||||||
|
* `adapter`: one of the mail adapters listed in [Swoosh readme](https://github.com/swoosh/swoosh#adapters), or `Swoosh.Adapters.Local` for in-memory mailbox.
|
||||||
|
* `api_key` / `password` and / or other adapter-specific settings, per the above documentation.
|
||||||
|
* `enabled`: Allows enable/disable send emails. Default: `false`.
|
||||||
|
|
||||||
|
An example for Sendgrid adapter:
|
||||||
|
|
||||||
|
```elixir
|
||||||
|
config :pleroma, Pleroma.Emails.Mailer,
|
||||||
|
adapter: Swoosh.Adapters.Sendgrid,
|
||||||
|
api_key: "YOUR_API_KEY"
|
||||||
|
```
|
||||||
|
|
||||||
|
An example for SMTP adapter:
|
||||||
|
|
||||||
|
```elixir
|
||||||
|
config :pleroma, Pleroma.Emails.Mailer,
|
||||||
|
adapter: Swoosh.Adapters.SMTP,
|
||||||
|
relay: "smtp.gmail.com",
|
||||||
|
username: "YOUR_USERNAME@gmail.com",
|
||||||
|
password: "YOUR_SMTP_PASSWORD",
|
||||||
|
port: 465,
|
||||||
|
ssl: true,
|
||||||
|
tls: :always,
|
||||||
|
auth: :always
|
||||||
|
```
|
||||||
|
|
||||||
|
## :uri_schemes
|
||||||
|
* `valid_schemes`: List of the scheme part that is considered valid to be an URL
|
||||||
|
|
||||||
|
## :instance
|
||||||
|
* `name`: The instance’s name
|
||||||
|
* `email`: Email used to reach an Administrator/Moderator of the instance
|
||||||
|
* `notify_email`: Email used for notifications.
|
||||||
|
* `description`: The instance’s description, can be seen in nodeinfo and ``/api/v1/instance``
|
||||||
|
* `limit`: Posts character limit (CW/Subject included in the counter)
|
||||||
|
* `remote_limit`: Hard character limit beyond which remote posts will be dropped.
|
||||||
|
* `upload_limit`: File size limit of uploads (except for avatar, background, banner)
|
||||||
|
* `avatar_upload_limit`: File size limit of user’s profile avatars
|
||||||
|
* `background_upload_limit`: File size limit of user’s profile backgrounds
|
||||||
|
* `banner_upload_limit`: File size limit of user’s profile banners
|
||||||
|
* `poll_limits`: A map with poll limits for **local** polls
|
||||||
|
* `max_options`: Maximum number of options
|
||||||
|
* `max_option_chars`: Maximum number of characters per option
|
||||||
|
* `min_expiration`: Minimum expiration time (in seconds)
|
||||||
|
* `max_expiration`: Maximum expiration time (in seconds)
|
||||||
|
* `registrations_open`: Enable registrations for anyone, invitations can be enabled when false.
|
||||||
|
* `invites_enabled`: Enable user invitations for admins (depends on `registrations_open: false`).
|
||||||
|
* `account_activation_required`: Require users to confirm their emails before signing in.
|
||||||
|
* `federating`: Enable federation with other instances
|
||||||
|
* `federation_incoming_replies_max_depth`: Max. depth of reply-to activities fetching on incoming federation, to prevent out-of-memory situations while fetching very long threads. If set to `nil`, threads of any depth will be fetched. Lower this value if you experience out-of-memory crashes.
|
||||||
|
* `federation_reachability_timeout_days`: Timeout (in days) of each external federation target being unreachable prior to pausing federating to it.
|
||||||
|
* `allow_relay`: Enable Pleroma’s Relay, which makes it possible to follow a whole instance
|
||||||
|
* `rewrite_policy`: Message Rewrite Policy, either one or a list. Here are the ones available by default:
|
||||||
|
* `Pleroma.Web.ActivityPub.MRF.NoOpPolicy`: Doesn’t modify activities (default)
|
||||||
|
* `Pleroma.Web.ActivityPub.MRF.DropPolicy`: Drops all activities. It generally doesn’t makes sense to use in production
|
||||||
|
* `Pleroma.Web.ActivityPub.MRF.SimplePolicy`: Restrict the visibility of activities from certains instances (See ``:mrf_simple`` section)
|
||||||
|
* `Pleroma.Web.ActivityPub.MRF.TagPolicy`: Applies policies to individual users based on tags, which can be set using pleroma-fe/admin-fe/any other app that supports Pleroma Admin API. For example it allows marking posts from individual users nsfw (sensitive)
|
||||||
|
* `Pleroma.Web.ActivityPub.MRF.SubchainPolicy`: Selectively runs other MRF policies when messages match (see ``:mrf_subchain`` section)
|
||||||
|
* `Pleroma.Web.ActivityPub.MRF.RejectNonPublic`: Drops posts with non-public visibility settings (See ``:mrf_rejectnonpublic`` section)
|
||||||
|
* `Pleroma.Web.ActivityPub.MRF.EnsureRePrepended`: Rewrites posts to ensure that replies to posts with subjects do not have an identical subject and instead begin with re:.
|
||||||
|
* `Pleroma.Web.ActivityPub.MRF.AntiLinkSpamPolicy`: Rejects posts from likely spambots by rejecting posts from new users that contain links.
|
||||||
|
* `Pleroma.Web.ActivityPub.MRF.MediaProxyWarmingPolicy`: Crawls attachments using their MediaProxy URLs so that the MediaProxy cache is primed.
|
||||||
|
* `Pleroma.Web.ActivityPub.MRF.MentionPolicy`: Drops posts mentioning configurable users. (see `:mrf_mention` section)
|
||||||
|
* `Pleroma.Web.ActivityPub.MRF.VocabularyPolicy`: Restricts activities to a configured set of vocabulary. (see `:mrf_vocabulary` section)
|
||||||
|
* `public`: Makes the client API in authentificated mode-only except for user-profiles. Useful for disabling the Local Timeline and The Whole Known Network.
|
||||||
|
* `quarantined_instances`: List of ActivityPub instances where private(DMs, followers-only) activities will not be send.
|
||||||
|
* `managed_config`: Whenether the config for pleroma-fe is configured in this config or in ``static/config.json``
|
||||||
|
* `allowed_post_formats`: MIME-type list of formats allowed to be posted (transformed into HTML)
|
||||||
|
* `mrf_transparency`: Make the content of your Message Rewrite Facility settings public (via nodeinfo).
|
||||||
|
* `mrf_transparency_exclusions`: Exclude specific instance names from MRF transparency. The use of the exclusions feature will be disclosed in nodeinfo as a boolean value.
|
||||||
|
* `scope_copy`: Copy the scope (private/unlisted/public) in replies to posts by default.
|
||||||
|
* `subject_line_behavior`: Allows changing the default behaviour of subject lines in replies. Valid values:
|
||||||
|
* "email": Copy and preprend re:, as in email.
|
||||||
|
* "masto": Copy verbatim, as in Mastodon.
|
||||||
|
* "noop": Don't copy the subject.
|
||||||
|
* `always_show_subject_input`: When set to false, auto-hide the subject field when it's empty.
|
||||||
|
* `extended_nickname_format`: Set to `true` to use extended local nicknames format (allows underscores/dashes). This will break federation with
|
||||||
|
older software for theses nicknames.
|
||||||
|
* `max_pinned_statuses`: The maximum number of pinned statuses. `0` will disable the feature.
|
||||||
|
* `autofollowed_nicknames`: Set to nicknames of (local) users that every new user should automatically follow.
|
||||||
|
* `no_attachment_links`: Set to true to disable automatically adding attachment link text to statuses
|
||||||
|
* `welcome_message`: A message that will be send to a newly registered users as a direct message.
|
||||||
|
* `welcome_user_nickname`: The nickname of the local user that sends the welcome message.
|
||||||
|
* `max_report_comment_size`: The maximum size of the report comment (Default: `1000`)
|
||||||
|
* `safe_dm_mentions`: If set to true, only mentions at the beginning of a post will be used to address people in direct messages. This is to prevent accidental mentioning of people when talking about them (e.g. "@friend hey i really don't like @enemy"). Default: `false`.
|
||||||
|
* `healthcheck`: If set to true, system data will be shown on ``/api/pleroma/healthcheck``.
|
||||||
|
* `remote_post_retention_days`: The default amount of days to retain remote posts when pruning the database.
|
||||||
|
* `user_bio_length`: A user bio maximum length (default: `5000`)
|
||||||
|
* `user_name_length`: A user name maximum length (default: `100`)
|
||||||
|
* `skip_thread_containment`: Skip filter out broken threads. The default is `false`.
|
||||||
|
* `limit_to_local_content`: Limit unauthenticated users to search for local statutes and users only. Possible values: `:unauthenticated`, `:all` and `false`. The default is `:unauthenticated`.
|
||||||
|
* `dynamic_configuration`: Allow transferring configuration to DB with the subsequent customization from Admin api.
|
||||||
|
* `max_account_fields`: The maximum number of custom fields in the user profile (default: `10`)
|
||||||
|
* `max_remote_account_fields`: The maximum number of custom fields in the remote user profile (default: `20`)
|
||||||
|
* `account_field_name_length`: An account field name maximum length (default: `512`)
|
||||||
|
* `account_field_value_length`: An account field value maximum length (default: `2048`)
|
||||||
|
* `external_user_synchronization`: Enabling following/followers counters synchronization for external users.
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
## :logger
|
||||||
|
* `backends`: `:console` is used to send logs to stdout, `{ExSyslogger, :ex_syslogger}` to log to syslog, and `Quack.Logger` to log to Slack
|
||||||
|
|
||||||
|
An example to enable ONLY ExSyslogger (f/ex in ``prod.secret.exs``) with info and debug suppressed:
|
||||||
|
```elixir
|
||||||
|
config :logger,
|
||||||
|
backends: [{ExSyslogger, :ex_syslogger}]
|
||||||
|
|
||||||
|
config :logger, :ex_syslogger,
|
||||||
|
level: :warn
|
||||||
|
```
|
||||||
|
|
||||||
|
Another example, keeping console output and adding the pid to syslog output:
|
||||||
|
```elixir
|
||||||
|
config :logger,
|
||||||
|
backends: [:console, {ExSyslogger, :ex_syslogger}]
|
||||||
|
|
||||||
|
config :logger, :ex_syslogger,
|
||||||
|
level: :warn,
|
||||||
|
option: [:pid, :ndelay]
|
||||||
|
```
|
||||||
|
|
||||||
|
See: [logger’s documentation](https://hexdocs.pm/logger/Logger.html) and [ex_syslogger’s documentation](https://hexdocs.pm/ex_syslogger/)
|
||||||
|
|
||||||
|
An example of logging info to local syslog, but warn to a Slack channel:
|
||||||
|
```elixir
|
||||||
|
config :logger,
|
||||||
|
backends: [ {ExSyslogger, :ex_syslogger}, Quack.Logger ],
|
||||||
|
level: :info
|
||||||
|
|
||||||
|
config :logger, :ex_syslogger,
|
||||||
|
level: :info,
|
||||||
|
ident: "pleroma",
|
||||||
|
format: "$metadata[$level] $message"
|
||||||
|
|
||||||
|
config :quack,
|
||||||
|
level: :warn,
|
||||||
|
meta: [:all],
|
||||||
|
webhook_url: "https://hooks.slack.com/services/YOUR-API-KEY-HERE"
|
||||||
|
```
|
||||||
|
|
||||||
|
See the [Quack Github](https://github.com/azohra/quack) for more details
|
||||||
|
|
||||||
|
## :frontend_configurations
|
||||||
|
|
||||||
|
This can be used to configure a keyword list that keeps the configuration data for any kind of frontend. By default, settings for `pleroma_fe` and `masto_fe` are configured.
|
||||||
|
|
||||||
|
Frontends can access these settings at `/api/pleroma/frontend_configurations`
|
||||||
|
|
||||||
|
To add your own configuration for PleromaFE, use it like this:
|
||||||
|
|
||||||
|
```elixir
|
||||||
|
config :pleroma, :frontend_configurations,
|
||||||
|
pleroma_fe: %{
|
||||||
|
theme: "pleroma-dark",
|
||||||
|
# ... see /priv/static/static/config.json for the available keys.
|
||||||
|
},
|
||||||
|
masto_fe: %{
|
||||||
|
showInstanceSpecificPanel: true
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
These settings **need to be complete**, they will override the defaults.
|
||||||
|
|
||||||
|
NOTE: for versions < 1.0, you need to set [`:fe`](#fe) to false, as shown a few lines below.
|
||||||
|
|
||||||
|
## :fe
|
||||||
|
__THIS IS DEPRECATED__
|
||||||
|
|
||||||
|
If you are using this method, please change it to the [`frontend_configurations`](#frontend_configurations) method.
|
||||||
|
Please **set this option to false** in your config like this:
|
||||||
|
|
||||||
|
```elixir
|
||||||
|
config :pleroma, :fe, false
|
||||||
|
```
|
||||||
|
|
||||||
|
This section is used to configure Pleroma-FE, unless ``:managed_config`` in ``:instance`` is set to false.
|
||||||
|
|
||||||
|
* `theme`: Which theme to use, they are defined in ``styles.json``
|
||||||
|
* `logo`: URL of the logo, defaults to Pleroma’s logo
|
||||||
|
* `logo_mask`: Whether to use only the logo's shape as a mask (true) or as a regular image (false)
|
||||||
|
* `logo_margin`: What margin to use around the logo
|
||||||
|
* `background`: URL of the background, unless viewing a user profile with a background that is set
|
||||||
|
* `redirect_root_no_login`: relative URL which indicates where to redirect when a user isn’t logged in.
|
||||||
|
* `redirect_root_login`: relative URL which indicates where to redirect when a user is logged in.
|
||||||
|
* `show_instance_panel`: Whenether to show the instance’s specific panel.
|
||||||
|
* `scope_options_enabled`: Enable setting an notice visibility and subject/CW when posting
|
||||||
|
* `formatting_options_enabled`: Enable setting a formatting different than plain-text (ie. HTML, Markdown) when posting, relates to ``:instance, allowed_post_formats``
|
||||||
|
* `collapse_message_with_subjects`: When a message has a subject(aka Content Warning), collapse it by default
|
||||||
|
* `hide_post_stats`: Hide notices statistics(repeats, favorites, …)
|
||||||
|
* `hide_user_stats`: Hide profile statistics(posts, posts per day, followers, followings, …)
|
||||||
|
|
||||||
|
## :assets
|
||||||
|
|
||||||
|
This section configures assets to be used with various frontends. Currently the only option
|
||||||
|
relates to mascots on the mastodon frontend
|
||||||
|
|
||||||
|
* `mascots`: KeywordList of mascots, each element __MUST__ contain both a `url` and a
|
||||||
|
`mime_type` key.
|
||||||
|
* `default_mascot`: An element from `mascots` - This will be used as the default mascot
|
||||||
|
on MastoFE (default: `:pleroma_fox_tan`)
|
||||||
|
|
||||||
|
## :mrf_simple
|
||||||
|
* `media_removal`: List of instances to remove medias from
|
||||||
|
* `media_nsfw`: List of instances to put medias as NSFW(sensitive) from
|
||||||
|
* `federated_timeline_removal`: List of instances to remove from Federated (aka The Whole Known Network) Timeline
|
||||||
|
* `reject`: List of instances to reject any activities from
|
||||||
|
* `accept`: List of instances to accept any activities from
|
||||||
|
* `report_removal`: List of instances to reject reports from
|
||||||
|
* `avatar_removal`: List of instances to strip avatars from
|
||||||
|
* `banner_removal`: List of instances to strip banners from
|
||||||
|
|
||||||
|
## :mrf_subchain
|
||||||
|
This policy processes messages through an alternate pipeline when a given message matches certain criteria.
|
||||||
|
All criteria are configured as a map of regular expressions to lists of policy modules.
|
||||||
|
|
||||||
|
* `match_actor`: Matches a series of regular expressions against the actor field.
|
||||||
|
|
||||||
|
Example:
|
||||||
|
|
||||||
|
```
|
||||||
|
config :pleroma, :mrf_subchain,
|
||||||
|
match_actor: %{
|
||||||
|
~r/https:\/\/example.com/s => [Pleroma.Web.ActivityPub.MRF.DropPolicy]
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## :mrf_rejectnonpublic
|
||||||
|
* `allow_followersonly`: whether to allow followers-only posts
|
||||||
|
* `allow_direct`: whether to allow direct messages
|
||||||
|
|
||||||
|
## :mrf_hellthread
|
||||||
|
* `delist_threshold`: Number of mentioned users after which the message gets delisted (the message can still be seen, but it will not show up in public timelines and mentioned users won't get notifications about it). Set to 0 to disable.
|
||||||
|
* `reject_threshold`: Number of mentioned users after which the messaged gets rejected. Set to 0 to disable.
|
||||||
|
|
||||||
|
## :mrf_keyword
|
||||||
|
* `reject`: A list of patterns which result in message being rejected, each pattern can be a string or a [regular expression](https://hexdocs.pm/elixir/Regex.html)
|
||||||
|
* `federated_timeline_removal`: A list of patterns which result in message being removed from federated timelines (a.k.a unlisted), each pattern can be a string or a [regular expression](https://hexdocs.pm/elixir/Regex.html)
|
||||||
|
* `replace`: A list of tuples containing `{pattern, replacement}`, `pattern` can be a string or a [regular expression](https://hexdocs.pm/elixir/Regex.html)
|
||||||
|
|
||||||
|
## :mrf_mention
|
||||||
|
* `actors`: A list of actors, for which to drop any posts mentioning.
|
||||||
|
|
||||||
|
## :mrf_vocabulary
|
||||||
|
* `accept`: A list of ActivityStreams terms to accept. If empty, all supported messages are accepted.
|
||||||
|
* `reject`: A list of ActivityStreams terms to reject. If empty, no messages are rejected.
|
||||||
|
|
||||||
|
## :media_proxy
|
||||||
|
* `enabled`: Enables proxying of remote media to the instance’s proxy
|
||||||
|
* `base_url`: The base URL to access a user-uploaded file. Useful when you want to proxy the media files via another host/CDN fronts.
|
||||||
|
* `proxy_opts`: All options defined in `Pleroma.ReverseProxy` documentation, defaults to `[max_body_length: (25*1_048_576)]`.
|
||||||
|
* `whitelist`: List of domains to bypass the mediaproxy
|
||||||
|
|
||||||
|
## :gopher
|
||||||
|
* `enabled`: Enables the gopher interface
|
||||||
|
* `ip`: IP address to bind to
|
||||||
|
* `port`: Port to bind to
|
||||||
|
* `dstport`: Port advertised in urls (optional, defaults to `port`)
|
||||||
|
|
||||||
|
## Pleroma.Web.Endpoint
|
||||||
|
`Phoenix` endpoint configuration, all configuration options can be viewed [here](https://hexdocs.pm/phoenix/Phoenix.Endpoint.html#module-dynamic-configuration), only common options are listed here
|
||||||
|
* `http` - a list containing http protocol configuration, all configuration options can be viewed [here](https://hexdocs.pm/plug_cowboy/Plug.Cowboy.html#module-options), only common options are listed here. For deployment using docker, you need to set this to `[ip: {0,0,0,0}, port: 4000]` to make pleroma accessible from other containers (such as your nginx server).
|
||||||
|
- `ip` - a tuple consisting of 4 integers
|
||||||
|
- `port`
|
||||||
|
* `url` - a list containing the configuration for generating urls, accepts
|
||||||
|
- `host` - the host without the scheme and a post (e.g `example.com`, not `https://example.com:2020`)
|
||||||
|
- `scheme` - e.g `http`, `https`
|
||||||
|
- `port`
|
||||||
|
- `path`
|
||||||
|
* `extra_cookie_attrs` - a list of `Key=Value` strings to be added as non-standard cookie attributes. Defaults to `["SameSite=Lax"]`. See the [SameSite article](https://www.owasp.org/index.php/SameSite) on OWASP for more info.
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
**Important note**: if you modify anything inside these lists, default `config.exs` values will be overwritten, which may result in breakage, to make sure this does not happen please copy the default value for the list from `config.exs` and modify/add only what you need
|
||||||
|
|
||||||
|
Example:
|
||||||
|
```elixir
|
||||||
|
config :pleroma, Pleroma.Web.Endpoint,
|
||||||
|
url: [host: "example.com", port: 2020, scheme: "https"],
|
||||||
|
http: [
|
||||||
|
# start copied from config.exs
|
||||||
|
dispatch: [
|
||||||
|
{:_,
|
||||||
|
[
|
||||||
|
{"/api/v1/streaming", Pleroma.Web.MastodonAPI.WebsocketHandler, []},
|
||||||
|
{"/websocket", Phoenix.Endpoint.CowboyWebSocket,
|
||||||
|
{Phoenix.Transports.WebSocket,
|
||||||
|
{Pleroma.Web.Endpoint, Pleroma.Web.UserSocket, websocket_config}}},
|
||||||
|
{:_, Phoenix.Endpoint.Cowboy2Handler, {Pleroma.Web.Endpoint, []}}
|
||||||
|
]}
|
||||||
|
# end copied from config.exs
|
||||||
|
],
|
||||||
|
port: 8080,
|
||||||
|
ip: {127, 0, 0, 1}
|
||||||
|
]
|
||||||
|
```
|
||||||
|
|
||||||
|
This will make Pleroma listen on `127.0.0.1` port `8080` and generate urls starting with `https://example.com:2020`
|
||||||
|
|
||||||
|
## :activitypub
|
||||||
|
* ``unfollow_blocked``: Whether blocks result in people getting unfollowed
|
||||||
|
* ``outgoing_blocks``: Whether to federate blocks to other instances
|
||||||
|
* ``deny_follow_blocked``: Whether to disallow following an account that has blocked the user in question
|
||||||
|
* ``sign_object_fetches``: Sign object fetches with HTTP signatures
|
||||||
|
|
||||||
|
## :http_security
|
||||||
|
* ``enabled``: Whether the managed content security policy is enabled
|
||||||
|
* ``sts``: Whether to additionally send a `Strict-Transport-Security` header
|
||||||
|
* ``sts_max_age``: The maximum age for the `Strict-Transport-Security` header if sent
|
||||||
|
* ``ct_max_age``: The maximum age for the `Expect-CT` header if sent
|
||||||
|
* ``referrer_policy``: The referrer policy to use, either `"same-origin"` or `"no-referrer"`
|
||||||
|
* ``report_uri``: Adds the specified url to `report-uri` and `report-to` group in CSP header.
|
||||||
|
|
||||||
|
## :mrf_user_allowlist
|
||||||
|
|
||||||
|
The keys in this section are the domain names that the policy should apply to.
|
||||||
|
Each key should be assigned a list of users that should be allowed through by
|
||||||
|
their ActivityPub ID.
|
||||||
|
|
||||||
|
An example:
|
||||||
|
|
||||||
|
```elixir
|
||||||
|
config :pleroma, :mrf_user_allowlist,
|
||||||
|
"example.org": ["https://example.org/users/admin"]
|
||||||
|
```
|
||||||
|
|
||||||
|
## :web_push_encryption, :vapid_details
|
||||||
|
|
||||||
|
Web Push Notifications configuration. You can use the mix task `mix web_push.gen.keypair` to generate it.
|
||||||
|
|
||||||
|
* ``subject``: a mailto link for the administrative contact. It’s best if this email is not a personal email address, but rather a group email so that if a person leaves an organization, is unavailable for an extended period, or otherwise can’t respond, someone else on the list can.
|
||||||
|
* ``public_key``: VAPID public key
|
||||||
|
* ``private_key``: VAPID private key
|
||||||
|
|
||||||
|
## Pleroma.Captcha
|
||||||
|
* `enabled`: Whether the captcha should be shown on registration
|
||||||
|
* `method`: The method/service to use for captcha
|
||||||
|
* `seconds_valid`: The time in seconds for which the captcha is valid
|
||||||
|
|
||||||
|
### Pleroma.Captcha.Kocaptcha
|
||||||
|
Kocaptcha is a very simple captcha service with a single API endpoint,
|
||||||
|
the source code is here: https://github.com/koto-bank/kocaptcha. The default endpoint
|
||||||
|
`https://captcha.kotobank.ch` is hosted by the developer.
|
||||||
|
|
||||||
|
* `endpoint`: the kocaptcha endpoint to use
|
||||||
|
|
||||||
|
## :admin_token
|
||||||
|
|
||||||
|
Allows to set a token that can be used to authenticate with the admin api without using an actual user by giving it as the 'admin_token' parameter. Example:
|
||||||
|
|
||||||
|
```elixir
|
||||||
|
config :pleroma, :admin_token, "somerandomtoken"
|
||||||
|
```
|
||||||
|
|
||||||
|
You can then do
|
||||||
|
|
||||||
|
```sh
|
||||||
|
curl "http://localhost:4000/api/pleroma/admin/invite_token?admin_token=somerandomtoken"
|
||||||
|
```
|
||||||
|
|
||||||
|
## Oban
|
||||||
|
|
||||||
|
[Oban](https://github.com/sorentwo/oban) asynchronous job processor configuration.
|
||||||
|
|
||||||
|
Configuration options described in [Oban readme](https://github.com/sorentwo/oban#usage):
|
||||||
|
* `repo` - app's Ecto repo (`Pleroma.Repo`)
|
||||||
|
* `verbose` - logs verbosity
|
||||||
|
* `prune` - non-retryable jobs [pruning settings](https://github.com/sorentwo/oban#pruning) (`:disabled` / `{:maxlen, value}` / `{:maxage, value}`)
|
||||||
|
* `queues` - job queues (see below)
|
||||||
|
|
||||||
|
Pleroma has the following queues:
|
||||||
|
|
||||||
|
* `activity_expiration` - Activity expiration
|
||||||
|
* `federator_outgoing` - Outgoing federation
|
||||||
|
* `federator_incoming` - Incoming federation
|
||||||
|
* `mailer` - Email sender, see [`Pleroma.Emails.Mailer`](#pleromaemailsmailer)
|
||||||
|
* `transmogrifier` - Transmogrifier
|
||||||
|
* `web_push` - Web push notifications
|
||||||
|
* `scheduled_activities` - Scheduled activities, see [`Pleroma.ScheduledActivity`](#pleromascheduledactivity)
|
||||||
|
|
||||||
|
Example:
|
||||||
|
|
||||||
|
```elixir
|
||||||
|
config :pleroma, Oban,
|
||||||
|
repo: Pleroma.Repo,
|
||||||
|
verbose: false,
|
||||||
|
prune: {:maxlen, 1500},
|
||||||
|
queues: [
|
||||||
|
federator_incoming: 50,
|
||||||
|
federator_outgoing: 50
|
||||||
|
]
|
||||||
|
```
|
||||||
|
|
||||||
|
This config contains two queues: `federator_incoming` and `federator_outgoing`. Both have the number of max concurrent jobs set to `50`.
|
||||||
|
|
||||||
|
### Migrating `pleroma_job_queue` settings
|
||||||
|
|
||||||
|
`config :pleroma_job_queue, :queues` is replaced by `config :pleroma, Oban, :queues` and uses the same format (keys are queues' names, values are max concurrent jobs numbers).
|
||||||
|
|
||||||
|
### Note on running with PostgreSQL in silent mode
|
||||||
|
|
||||||
|
If you are running PostgreSQL in [`silent_mode`](https://postgresqlco.nf/en/doc/param/silent_mode?version=9.1), it's advised to set [`log_destination`](https://postgresqlco.nf/en/doc/param/log_destination?version=9.1) to `syslog`,
|
||||||
|
otherwise `postmaster.log` file may grow because of "you don't own a lock of type ShareLock" warnings (see https://github.com/sorentwo/oban/issues/52).
|
||||||
|
|
||||||
|
## :workers
|
||||||
|
|
||||||
|
Includes custom worker options not interpretable directly by `Oban`.
|
||||||
|
|
||||||
|
* `retries` — keyword lists where keys are `Oban` queues (see above) and values are numbers of max attempts for failed jobs.
|
||||||
|
|
||||||
|
Example:
|
||||||
|
|
||||||
|
```elixir
|
||||||
|
config :pleroma, :workers,
|
||||||
|
retries: [
|
||||||
|
federator_incoming: 5,
|
||||||
|
federator_outgoing: 5
|
||||||
|
]
|
||||||
|
```
|
||||||
|
|
||||||
|
### Migrating `Pleroma.Web.Federator.RetryQueue` settings
|
||||||
|
|
||||||
|
* `max_retries` is replaced with `config :pleroma, :workers, retries: [federator_outgoing: 5]`
|
||||||
|
* `enabled: false` corresponds to `config :pleroma, :workers, retries: [federator_outgoing: 1]`
|
||||||
|
* deprecated options: `max_jobs`, `initial_timeout`
|
||||||
|
|
||||||
|
## Pleroma.Web.Metadata
|
||||||
|
* `providers`: a list of metadata providers to enable. Providers available:
|
||||||
|
* Pleroma.Web.Metadata.Providers.OpenGraph
|
||||||
|
* Pleroma.Web.Metadata.Providers.TwitterCard
|
||||||
|
* Pleroma.Web.Metadata.Providers.RelMe - add links from user bio with rel=me into the `<header>` as `<link rel=me>`
|
||||||
|
* `unfurl_nsfw`: If set to `true` nsfw attachments will be shown in previews
|
||||||
|
|
||||||
|
## :rich_media
|
||||||
|
* `enabled`: if enabled the instance will parse metadata from attached links to generate link previews
|
||||||
|
* `ignore_hosts`: list of hosts which will be ignored by the metadata parser. For example `["accounts.google.com", "xss.website"]`, defaults to `[]`.
|
||||||
|
* `ignore_tld`: list TLDs (top-level domains) which will ignore for parse metadata. default is ["local", "localdomain", "lan"]
|
||||||
|
* `parsers`: list of Rich Media parsers
|
||||||
|
|
||||||
|
## :fetch_initial_posts
|
||||||
|
* `enabled`: if enabled, when a new user is federated with, fetch some of their latest posts
|
||||||
|
* `pages`: the amount of pages to fetch
|
||||||
|
|
||||||
|
## :hackney_pools
|
||||||
|
|
||||||
|
Advanced. Tweaks Hackney (http client) connections pools.
|
||||||
|
|
||||||
|
There's three pools used:
|
||||||
|
|
||||||
|
* `:federation` for the federation jobs.
|
||||||
|
You may want this pool max_connections to be at least equal to the number of federator jobs + retry queue jobs.
|
||||||
|
* `:media` for rich media, media proxy
|
||||||
|
* `:upload` for uploaded media (if using a remote uploader and `proxy_remote: true`)
|
||||||
|
|
||||||
|
For each pool, the options are:
|
||||||
|
|
||||||
|
* `max_connections` - how much connections a pool can hold
|
||||||
|
* `timeout` - retention duration for connections
|
||||||
|
|
||||||
|
## :auto_linker
|
||||||
|
|
||||||
|
Configuration for the `auto_linker` library:
|
||||||
|
|
||||||
|
* `class: "auto-linker"` - specify the class to be added to the generated link. false to clear
|
||||||
|
* `rel: "noopener noreferrer"` - override the rel attribute. false to clear
|
||||||
|
* `new_window: true` - set to false to remove `target='_blank'` attribute
|
||||||
|
* `scheme: false` - Set to true to link urls with schema `http://google.com`
|
||||||
|
* `truncate: false` - Set to a number to truncate urls longer then the number. Truncated urls will end in `..`
|
||||||
|
* `strip_prefix: true` - Strip the scheme prefix
|
||||||
|
* `extra: false` - link urls with rarely used schemes (magnet, ipfs, irc, etc.)
|
||||||
|
|
||||||
|
Example:
|
||||||
|
|
||||||
|
```elixir
|
||||||
|
config :auto_linker,
|
||||||
|
opts: [
|
||||||
|
scheme: true,
|
||||||
|
extra: true,
|
||||||
|
class: false,
|
||||||
|
strip_prefix: false,
|
||||||
|
new_window: false,
|
||||||
|
rel: false
|
||||||
|
]
|
||||||
|
```
|
||||||
|
|
||||||
|
## Pleroma.Scheduler
|
||||||
|
|
||||||
|
Configuration for [Quantum](https://github.com/quantum-elixir/quantum-core) jobs scheduler.
|
||||||
|
|
||||||
|
See [Quantum readme](https://github.com/quantum-elixir/quantum-core#usage) for the list of supported options.
|
||||||
|
|
||||||
|
Example:
|
||||||
|
|
||||||
|
```elixir
|
||||||
|
config :pleroma, Pleroma.Scheduler,
|
||||||
|
global: true,
|
||||||
|
overlap: true,
|
||||||
|
timezone: :utc,
|
||||||
|
jobs: [{"0 */6 * * * *", {Pleroma.Web.Websub, :refresh_subscriptions, []}}]
|
||||||
|
```
|
||||||
|
|
||||||
|
The above example defines a single job which invokes `Pleroma.Web.Websub.refresh_subscriptions()` every 6 hours ("0 */6 * * * *", [crontab format](https://en.wikipedia.org/wiki/Cron)).
|
||||||
|
|
||||||
|
## Pleroma.ScheduledActivity
|
||||||
|
|
||||||
|
* `daily_user_limit`: the number of scheduled activities a user is allowed to create in a single day (Default: `25`)
|
||||||
|
* `total_user_limit`: the number of scheduled activities a user is allowed to create in total (Default: `300`)
|
||||||
|
* `enabled`: whether scheduled activities are sent to the job queue to be executed
|
||||||
|
|
||||||
|
## Pleroma.ActivityExpiration
|
||||||
|
|
||||||
|
# `enabled`: whether expired activities will be sent to the job queue to be deleted
|
||||||
|
|
||||||
|
## Pleroma.Web.Auth.Authenticator
|
||||||
|
|
||||||
|
* `Pleroma.Web.Auth.PleromaAuthenticator`: default database authenticator
|
||||||
|
* `Pleroma.Web.Auth.LDAPAuthenticator`: LDAP authentication
|
||||||
|
|
||||||
|
## :ldap
|
||||||
|
|
||||||
|
Use LDAP for user authentication. When a user logs in to the Pleroma
|
||||||
|
instance, the name and password will be verified by trying to authenticate
|
||||||
|
(bind) to an LDAP server. If a user exists in the LDAP directory but there
|
||||||
|
is no account with the same name yet on the Pleroma instance then a new
|
||||||
|
Pleroma account will be created with the same name as the LDAP user name.
|
||||||
|
|
||||||
|
* `enabled`: enables LDAP authentication
|
||||||
|
* `host`: LDAP server hostname
|
||||||
|
* `port`: LDAP port, e.g. 389 or 636
|
||||||
|
* `ssl`: true to use SSL, usually implies the port 636
|
||||||
|
* `sslopts`: additional SSL options
|
||||||
|
* `tls`: true to start TLS, usually implies the port 389
|
||||||
|
* `tlsopts`: additional TLS options
|
||||||
|
* `base`: LDAP base, e.g. "dc=example,dc=com"
|
||||||
|
* `uid`: LDAP attribute name to authenticate the user, e.g. when "cn", the filter will be "cn=username,base"
|
||||||
|
|
||||||
|
## BBS / SSH access
|
||||||
|
|
||||||
|
To enable simple command line interface accessible over ssh, add a setting like this to your configuration file:
|
||||||
|
|
||||||
|
```exs
|
||||||
|
app_dir = File.cwd!
|
||||||
|
priv_dir = Path.join([app_dir, "priv/ssh_keys"])
|
||||||
|
|
||||||
|
config :esshd,
|
||||||
|
enabled: true,
|
||||||
|
priv_dir: priv_dir,
|
||||||
|
handler: "Pleroma.BBS.Handler",
|
||||||
|
port: 10_022,
|
||||||
|
password_authenticator: "Pleroma.BBS.Authenticator"
|
||||||
|
```
|
||||||
|
|
||||||
|
Feel free to adjust the priv_dir and port number. Then you will have to create the key for the keys (in the example `priv/ssh_keys`) and create the host keys with `ssh-keygen -m PEM -N "" -b 2048 -t rsa -f ssh_host_rsa_key`. After restarting, you should be able to connect to your Pleroma instance with `ssh username@server -p $PORT`
|
||||||
|
|
||||||
|
## :auth
|
||||||
|
|
||||||
|
* `Pleroma.Web.Auth.PleromaAuthenticator`: default database authenticator
|
||||||
|
* `Pleroma.Web.Auth.LDAPAuthenticator`: LDAP authentication
|
||||||
|
|
||||||
|
Authentication / authorization settings.
|
||||||
|
|
||||||
|
* `auth_template`: authentication form template. By default it's `show.html` which corresponds to `lib/pleroma/web/templates/o_auth/o_auth/show.html.eex`.
|
||||||
|
* `oauth_consumer_template`: OAuth consumer mode authentication form template. By default it's `consumer.html` which corresponds to `lib/pleroma/web/templates/o_auth/o_auth/consumer.html.eex`.
|
||||||
|
* `oauth_consumer_strategies`: the list of enabled OAuth consumer strategies; by default it's set by `OAUTH_CONSUMER_STRATEGIES` environment variable. Each entry in this space-delimited string should be of format `<strategy>` or `<strategy>:<dependency>` (e.g. `twitter` or `keycloak:ueberauth_keycloak_strategy` in case dependency is named differently than `ueberauth_<strategy>`).
|
||||||
|
|
||||||
|
## :email_notifications
|
||||||
|
|
||||||
|
Email notifications settings.
|
||||||
|
|
||||||
|
- digest - emails of "what you've missed" for users who have been
|
||||||
|
inactive for a while.
|
||||||
|
- active: globally enable or disable digest emails
|
||||||
|
- schedule: When to send digest email, in [crontab format](https://en.wikipedia.org/wiki/Cron).
|
||||||
|
"0 0 * * 0" is the default, meaning "once a week at midnight on Sunday morning"
|
||||||
|
- interval: Minimum interval between digest emails to one user
|
||||||
|
- inactivity_threshold: Minimum user inactivity threshold
|
||||||
|
|
||||||
|
## Pleroma.Emails.UserEmail
|
||||||
|
|
||||||
|
- `:logo` - a path to a custom logo. Set it to `nil` to use the default Pleroma logo.
|
||||||
|
- `:styling` - a map with color settings for email templates.
|
||||||
|
|
||||||
|
## OAuth consumer mode
|
||||||
|
|
||||||
|
OAuth consumer mode allows sign in / sign up via external OAuth providers (e.g. Twitter, Facebook, Google, Microsoft, etc.).
|
||||||
|
Implementation is based on Ueberauth; see the list of [available strategies](https://github.com/ueberauth/ueberauth/wiki/List-of-Strategies).
|
||||||
|
|
||||||
|
Note: each strategy is shipped as a separate dependency; in order to get the strategies, run `OAUTH_CONSUMER_STRATEGIES="..." mix deps.get`,
|
||||||
|
e.g. `OAUTH_CONSUMER_STRATEGIES="twitter facebook google microsoft" mix deps.get`.
|
||||||
|
The server should also be started with `OAUTH_CONSUMER_STRATEGIES="..." mix phx.server` in case you enable any strategies.
|
||||||
|
|
||||||
|
Note: each strategy requires separate setup (on external provider side and Pleroma side). Below are the guidelines on setting up most popular strategies.
|
||||||
|
|
||||||
|
Note: make sure that `"SameSite=Lax"` is set in `extra_cookie_attrs` when you have this feature enabled. OAuth consumer mode will not work with `"SameSite=Strict"`
|
||||||
|
|
||||||
|
* For Twitter, [register an app](https://developer.twitter.com/en/apps), configure callback URL to https://<your_host>/oauth/twitter/callback
|
||||||
|
|
||||||
|
* For Facebook, [register an app](https://developers.facebook.com/apps), configure callback URL to https://<your_host>/oauth/facebook/callback, enable Facebook Login service at https://developers.facebook.com/apps/<app_id>/fb-login/settings/
|
||||||
|
|
||||||
|
* For Google, [register an app](https://console.developers.google.com), configure callback URL to https://<your_host>/oauth/google/callback
|
||||||
|
|
||||||
|
* For Microsoft, [register an app](https://portal.azure.com), configure callback URL to https://<your_host>/oauth/microsoft/callback
|
||||||
|
|
||||||
|
Once the app is configured on external OAuth provider side, add app's credentials and strategy-specific settings (if any — e.g. see Microsoft below) to `config/prod.secret.exs`,
|
||||||
|
per strategy's documentation (e.g. [ueberauth_twitter](https://github.com/ueberauth/ueberauth_twitter)). Example config basing on environment variables:
|
||||||
|
|
||||||
|
```elixir
|
||||||
|
# Twitter
|
||||||
|
config :ueberauth, Ueberauth.Strategy.Twitter.OAuth,
|
||||||
|
consumer_key: System.get_env("TWITTER_CONSUMER_KEY"),
|
||||||
|
consumer_secret: System.get_env("TWITTER_CONSUMER_SECRET")
|
||||||
|
|
||||||
|
# Facebook
|
||||||
|
config :ueberauth, Ueberauth.Strategy.Facebook.OAuth,
|
||||||
|
client_id: System.get_env("FACEBOOK_APP_ID"),
|
||||||
|
client_secret: System.get_env("FACEBOOK_APP_SECRET"),
|
||||||
|
redirect_uri: System.get_env("FACEBOOK_REDIRECT_URI")
|
||||||
|
|
||||||
|
# Google
|
||||||
|
config :ueberauth, Ueberauth.Strategy.Google.OAuth,
|
||||||
|
client_id: System.get_env("GOOGLE_CLIENT_ID"),
|
||||||
|
client_secret: System.get_env("GOOGLE_CLIENT_SECRET"),
|
||||||
|
redirect_uri: System.get_env("GOOGLE_REDIRECT_URI")
|
||||||
|
|
||||||
|
# Microsoft
|
||||||
|
config :ueberauth, Ueberauth.Strategy.Microsoft.OAuth,
|
||||||
|
client_id: System.get_env("MICROSOFT_CLIENT_ID"),
|
||||||
|
client_secret: System.get_env("MICROSOFT_CLIENT_SECRET")
|
||||||
|
|
||||||
|
config :ueberauth, Ueberauth,
|
||||||
|
providers: [
|
||||||
|
microsoft: {Ueberauth.Strategy.Microsoft, [callback_params: []]}
|
||||||
|
]
|
||||||
|
|
||||||
|
# Keycloak
|
||||||
|
# Note: make sure to add `keycloak:ueberauth_keycloak_strategy` entry to `OAUTH_CONSUMER_STRATEGIES` environment variable
|
||||||
|
keycloak_url = "https://publicly-reachable-keycloak-instance.org:8080"
|
||||||
|
|
||||||
|
config :ueberauth, Ueberauth.Strategy.Keycloak.OAuth,
|
||||||
|
client_id: System.get_env("KEYCLOAK_CLIENT_ID"),
|
||||||
|
client_secret: System.get_env("KEYCLOAK_CLIENT_SECRET"),
|
||||||
|
site: keycloak_url,
|
||||||
|
authorize_url: "#{keycloak_url}/auth/realms/master/protocol/openid-connect/auth",
|
||||||
|
token_url: "#{keycloak_url}/auth/realms/master/protocol/openid-connect/token",
|
||||||
|
userinfo_url: "#{keycloak_url}/auth/realms/master/protocol/openid-connect/userinfo",
|
||||||
|
token_method: :post
|
||||||
|
|
||||||
|
config :ueberauth, Ueberauth,
|
||||||
|
providers: [
|
||||||
|
keycloak: {Ueberauth.Strategy.Keycloak, [uid_field: :email]}
|
||||||
|
]
|
||||||
|
```
|
||||||
|
|
||||||
|
## OAuth 2.0 provider - :oauth2
|
||||||
|
|
||||||
|
Configure OAuth 2 provider capabilities:
|
||||||
|
|
||||||
|
* `token_expires_in` - The lifetime in seconds of the access token.
|
||||||
|
* `issue_new_refresh_token` - Keeps old refresh token or generate new refresh token when to obtain an access token.
|
||||||
|
* `clean_expired_tokens` - Enable a background job to clean expired oauth tokens. Defaults to `false`.
|
||||||
|
* `clean_expired_tokens_interval` - Interval to run the job to clean expired tokens. Defaults to `86_400_000` (24 hours).
|
||||||
|
|
||||||
|
## :emoji
|
||||||
|
* `shortcode_globs`: Location of custom emoji files. `*` can be used as a wildcard. Example `["/emoji/custom/**/*.png"]`
|
||||||
|
* `pack_extensions`: A list of file extensions for emojis, when no emoji.txt for a pack is present. Example `[".png", ".gif"]`
|
||||||
|
* `groups`: Emojis are ordered in groups (tags). This is an array of key-value pairs where the key is the groupname and the value the location or array of locations. `*` can be used as a wildcard. Example `[Custom: ["/emoji/*.png", "/emoji/custom/*.png"]]`
|
||||||
|
* `default_manifest`: Location of the JSON-manifest. This manifest contains information about the emoji-packs you can download. Currently only one manifest can be added (no arrays).
|
||||||
|
|
||||||
|
## Database options
|
||||||
|
|
||||||
|
### RUM indexing for full text search
|
||||||
|
* `rum_enabled`: If RUM indexes should be used. Defaults to `false`.
|
||||||
|
|
||||||
|
RUM indexes are an alternative indexing scheme that is not included in PostgreSQL by default. While they may eventually be mainlined, for now they have to be installed as a PostgreSQL extension from https://github.com/postgrespro/rum.
|
||||||
|
|
||||||
|
Their advantage over the standard GIN indexes is that they allow efficient ordering of search results by timestamp, which makes search queries a lot faster on larger servers, by one or two orders of magnitude. They take up around 3 times as much space as GIN indexes.
|
||||||
|
|
||||||
|
To enable them, both the `rum_enabled` flag has to be set and the following special migration has to be run:
|
||||||
|
|
||||||
|
`mix ecto.migrate --migrations-path priv/repo/optional_migrations/rum_indexing/`
|
||||||
|
|
||||||
|
This will probably take a long time.
|
||||||
|
|
||||||
|
## :rate_limit
|
||||||
|
|
||||||
|
This is an advanced feature and disabled by default.
|
||||||
|
|
||||||
|
A keyword list of rate limiters where a key is a limiter name and value is the limiter configuration. The basic configuration is a tuple where:
|
||||||
|
|
||||||
|
* The first element: `scale` (Integer). The time scale in milliseconds.
|
||||||
|
* The second element: `limit` (Integer). How many requests to limit in the time scale provided.
|
||||||
|
|
||||||
|
It is also possible to have different limits for unauthenticated and authenticated users: the keyword value must be a list of two tuples where the first one is a config for unauthenticated users and the second one is for authenticated.
|
||||||
|
|
||||||
|
See [`Pleroma.Plugs.RateLimiter`](Pleroma.Plugs.RateLimiter.html) documentation for examples.
|
||||||
|
|
||||||
|
Supported rate limiters:
|
||||||
|
|
||||||
|
* `:search` for the search requests (account & status search etc.)
|
||||||
|
* `:app_account_creation` for registering user accounts from the same IP address
|
||||||
|
* `:relations_actions` for actions on relations with all users (follow, unfollow)
|
||||||
|
* `:relation_id_action` for actions on relation with a specific user (follow, unfollow)
|
||||||
|
* `:statuses_actions` for create / delete / fav / unfav / reblog / unreblog actions on any statuses
|
||||||
|
* `:status_id_action` for fav / unfav or reblog / unreblog actions on the same status by the same user
|
||||||
|
|
||||||
|
## :web_cache_ttl
|
||||||
|
|
||||||
|
The expiration time for the web responses cache. Values should be in milliseconds or `nil` to disable expiration.
|
||||||
|
|
||||||
|
Available caches:
|
||||||
|
|
||||||
|
* `:activity_pub` - activity pub routes (except question activities). Defaults to `nil` (no expiration).
|
||||||
|
* `:activity_pub_question` - activity pub routes (question activities). Defaults to `30_000` (30 seconds).
|
||||||
|
|
63
lib/pleroma/activity/ir/topics.ex
Normal file
63
lib/pleroma/activity/ir/topics.ex
Normal file
|
@ -0,0 +1,63 @@
|
||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
defmodule Pleroma.Activity.Ir.Topics do
|
||||||
|
alias Pleroma.Object
|
||||||
|
alias Pleroma.Web.ActivityPub.Visibility
|
||||||
|
|
||||||
|
def get_activity_topics(activity) do
|
||||||
|
activity
|
||||||
|
|> Object.normalize()
|
||||||
|
|> generate_topics(activity)
|
||||||
|
|> List.flatten()
|
||||||
|
end
|
||||||
|
|
||||||
|
defp generate_topics(%{data: %{"type" => "Answer"}}, _) do
|
||||||
|
[]
|
||||||
|
end
|
||||||
|
|
||||||
|
defp generate_topics(object, activity) do
|
||||||
|
["user", "list"] ++ visibility_tags(object, activity)
|
||||||
|
end
|
||||||
|
|
||||||
|
defp visibility_tags(object, activity) do
|
||||||
|
case Visibility.get_visibility(activity) do
|
||||||
|
"public" ->
|
||||||
|
if activity.local do
|
||||||
|
["public", "public:local"]
|
||||||
|
else
|
||||||
|
["public"]
|
||||||
|
end
|
||||||
|
|> item_creation_tags(object, activity)
|
||||||
|
|
||||||
|
"direct" ->
|
||||||
|
["direct"]
|
||||||
|
|
||||||
|
_ ->
|
||||||
|
[]
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
defp item_creation_tags(tags, %{data: %{"type" => "Create"}} = object, activity) do
|
||||||
|
tags ++ hashtags_to_topics(object) ++ attachment_topics(object, activity)
|
||||||
|
end
|
||||||
|
|
||||||
|
defp item_creation_tags(tags, _, _) do
|
||||||
|
tags
|
||||||
|
end
|
||||||
|
|
||||||
|
defp hashtags_to_topics(%{data: %{"tag" => tags}}) do
|
||||||
|
tags
|
||||||
|
|> Enum.filter(&is_bitstring(&1))
|
||||||
|
|> Enum.map(fn tag -> "hashtag:" <> tag end)
|
||||||
|
end
|
||||||
|
|
||||||
|
defp hashtags_to_topics(_), do: []
|
||||||
|
|
||||||
|
defp attachment_topics(%{data: %{"attachment" => []}}, _act), do: []
|
||||||
|
|
||||||
|
defp attachment_topics(_object, %{local: true}), do: ["public:media", "public:local:media"]
|
||||||
|
|
||||||
|
defp attachment_topics(_object, _act), do: ["public:media"]
|
||||||
|
end
|
|
@ -31,18 +31,19 @@ def start(_type, _args) do
|
||||||
children =
|
children =
|
||||||
[
|
[
|
||||||
Pleroma.Repo,
|
Pleroma.Repo,
|
||||||
|
Pleroma.Scheduler,
|
||||||
Pleroma.Config.TransferTask,
|
Pleroma.Config.TransferTask,
|
||||||
Pleroma.Emoji,
|
Pleroma.Emoji,
|
||||||
Pleroma.Captcha,
|
Pleroma.Captcha,
|
||||||
Pleroma.FlakeId,
|
Pleroma.FlakeId,
|
||||||
Pleroma.ScheduledActivityWorker,
|
Pleroma.Daemons.ScheduledActivityDaemon,
|
||||||
Pleroma.ActivityExpirationWorker
|
Pleroma.Daemons.ActivityExpirationDaemon
|
||||||
] ++
|
] ++
|
||||||
cachex_children() ++
|
cachex_children() ++
|
||||||
hackney_pool_children() ++
|
hackney_pool_children() ++
|
||||||
[
|
[
|
||||||
Pleroma.Web.Federator.RetryQueue,
|
|
||||||
Pleroma.Stats,
|
Pleroma.Stats,
|
||||||
|
{Oban, Pleroma.Config.get(Oban)},
|
||||||
%{
|
%{
|
||||||
id: :web_push_init,
|
id: :web_push_init,
|
||||||
start: {Task, :start_link, [&Pleroma.Web.Push.init/0]},
|
start: {Task, :start_link, [&Pleroma.Web.Push.init/0]},
|
||||||
|
@ -70,9 +71,7 @@ def start(_type, _args) do
|
||||||
# See http://elixir-lang.org/docs/stable/elixir/Supervisor.html
|
# See http://elixir-lang.org/docs/stable/elixir/Supervisor.html
|
||||||
# for other strategies and supported options
|
# for other strategies and supported options
|
||||||
opts = [strategy: :one_for_one, name: Pleroma.Supervisor]
|
opts = [strategy: :one_for_one, name: Pleroma.Supervisor]
|
||||||
result = Supervisor.start_link(children, opts)
|
Supervisor.start_link(children, opts)
|
||||||
:ok = after_supervisor_start()
|
|
||||||
result
|
|
||||||
end
|
end
|
||||||
|
|
||||||
defp setup_instrumenters do
|
defp setup_instrumenters do
|
||||||
|
@ -142,7 +141,7 @@ defp oauth_cleanup_enabled?,
|
||||||
defp streamer_child(:test), do: []
|
defp streamer_child(:test), do: []
|
||||||
|
|
||||||
defp streamer_child(_) do
|
defp streamer_child(_) do
|
||||||
[Pleroma.Web.Streamer]
|
[Pleroma.Web.Streamer.supervisor()]
|
||||||
end
|
end
|
||||||
|
|
||||||
defp oauth_cleanup_child(true),
|
defp oauth_cleanup_child(true),
|
||||||
|
@ -164,17 +163,4 @@ defp hackney_pool_children do
|
||||||
:hackney_pool.child_spec(pool, options)
|
:hackney_pool.child_spec(pool, options)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
defp after_supervisor_start do
|
|
||||||
with digest_config <- Application.get_env(:pleroma, :email_notifications)[:digest],
|
|
||||||
true <- digest_config[:active] do
|
|
||||||
PleromaJobQueue.schedule(
|
|
||||||
digest_config[:schedule],
|
|
||||||
:digest_emails,
|
|
||||||
Pleroma.DigestEmailWorker
|
|
||||||
)
|
|
||||||
end
|
|
||||||
|
|
||||||
:ok
|
|
||||||
end
|
|
||||||
end
|
end
|
||||||
|
|
|
@ -2,13 +2,14 @@
|
||||||
# Copyright © 2019 Pleroma Authors <https://pleroma.social/>
|
# Copyright © 2019 Pleroma Authors <https://pleroma.social/>
|
||||||
# SPDX-License-Identifier: AGPL-3.0-only
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
defmodule Pleroma.ActivityExpirationWorker do
|
defmodule Pleroma.Daemons.ActivityExpirationDaemon do
|
||||||
alias Pleroma.Activity
|
alias Pleroma.Activity
|
||||||
alias Pleroma.ActivityExpiration
|
alias Pleroma.ActivityExpiration
|
||||||
alias Pleroma.Config
|
alias Pleroma.Config
|
||||||
alias Pleroma.Repo
|
alias Pleroma.Repo
|
||||||
alias Pleroma.User
|
alias Pleroma.User
|
||||||
alias Pleroma.Web.CommonAPI
|
alias Pleroma.Web.CommonAPI
|
||||||
|
|
||||||
require Logger
|
require Logger
|
||||||
use GenServer
|
use GenServer
|
||||||
import Ecto.Query
|
import Ecto.Query
|
||||||
|
@ -49,7 +50,10 @@ def perform(:execute, expiration_id) do
|
||||||
def handle_info(:perform, state) do
|
def handle_info(:perform, state) do
|
||||||
ActivityExpiration.due_expirations(@schedule_interval)
|
ActivityExpiration.due_expirations(@schedule_interval)
|
||||||
|> Enum.each(fn expiration ->
|
|> Enum.each(fn expiration ->
|
||||||
PleromaJobQueue.enqueue(:activity_expiration, __MODULE__, [:execute, expiration.id])
|
Pleroma.Workers.ActivityExpirationWorker.enqueue(
|
||||||
|
"activity_expiration",
|
||||||
|
%{"activity_expiration_id" => expiration.id}
|
||||||
|
)
|
||||||
end)
|
end)
|
||||||
|
|
||||||
schedule_next()
|
schedule_next()
|
|
@ -2,10 +2,11 @@
|
||||||
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
||||||
# SPDX-License-Identifier: AGPL-3.0-only
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
defmodule Pleroma.DigestEmailWorker do
|
defmodule Pleroma.Daemons.DigestEmailDaemon do
|
||||||
import Ecto.Query
|
alias Pleroma.Repo
|
||||||
|
alias Pleroma.Workers.DigestEmailsWorker
|
||||||
|
|
||||||
@queue_name :digest_emails
|
import Ecto.Query
|
||||||
|
|
||||||
def perform do
|
def perform do
|
||||||
config = Pleroma.Config.get([:email_notifications, :digest])
|
config = Pleroma.Config.get([:email_notifications, :digest])
|
||||||
|
@ -20,8 +21,10 @@ def perform do
|
||||||
where: u.last_digest_emailed_at < datetime_add(^now, ^negative_interval, "day"),
|
where: u.last_digest_emailed_at < datetime_add(^now, ^negative_interval, "day"),
|
||||||
select: u
|
select: u
|
||||||
)
|
)
|
||||||
|> Pleroma.Repo.all()
|
|> Repo.all()
|
||||||
|> Enum.each(&PleromaJobQueue.enqueue(@queue_name, __MODULE__, [&1]))
|
|> Enum.each(fn user ->
|
||||||
|
DigestEmailsWorker.enqueue("digest_email", %{"user_id" => user.id})
|
||||||
|
end)
|
||||||
end
|
end
|
||||||
|
|
||||||
@doc """
|
@doc """
|
|
@ -2,7 +2,7 @@
|
||||||
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
||||||
# SPDX-License-Identifier: AGPL-3.0-only
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
defmodule Pleroma.ScheduledActivityWorker do
|
defmodule Pleroma.Daemons.ScheduledActivityDaemon do
|
||||||
@moduledoc """
|
@moduledoc """
|
||||||
Sends scheduled activities to the job queue.
|
Sends scheduled activities to the job queue.
|
||||||
"""
|
"""
|
||||||
|
@ -11,6 +11,7 @@ defmodule Pleroma.ScheduledActivityWorker do
|
||||||
alias Pleroma.ScheduledActivity
|
alias Pleroma.ScheduledActivity
|
||||||
alias Pleroma.User
|
alias Pleroma.User
|
||||||
alias Pleroma.Web.CommonAPI
|
alias Pleroma.Web.CommonAPI
|
||||||
|
|
||||||
use GenServer
|
use GenServer
|
||||||
require Logger
|
require Logger
|
||||||
|
|
||||||
|
@ -45,7 +46,10 @@ def perform(:execute, scheduled_activity_id) do
|
||||||
def handle_info(:perform, state) do
|
def handle_info(:perform, state) do
|
||||||
ScheduledActivity.due_activities(@schedule_interval)
|
ScheduledActivity.due_activities(@schedule_interval)
|
||||||
|> Enum.each(fn scheduled_activity ->
|
|> Enum.each(fn scheduled_activity ->
|
||||||
PleromaJobQueue.enqueue(:scheduled_activities, __MODULE__, [:execute, scheduled_activity.id])
|
Pleroma.Workers.ScheduledActivityWorker.enqueue(
|
||||||
|
"execute",
|
||||||
|
%{"activity_id" => scheduled_activity.id}
|
||||||
|
)
|
||||||
end)
|
end)
|
||||||
|
|
||||||
schedule_next()
|
schedule_next()
|
51
lib/pleroma/delivery.ex
Normal file
51
lib/pleroma/delivery.ex
Normal file
|
@ -0,0 +1,51 @@
|
||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
defmodule Pleroma.Delivery do
|
||||||
|
use Ecto.Schema
|
||||||
|
|
||||||
|
alias Pleroma.Delivery
|
||||||
|
alias Pleroma.FlakeId
|
||||||
|
alias Pleroma.Object
|
||||||
|
alias Pleroma.Repo
|
||||||
|
alias Pleroma.User
|
||||||
|
alias Pleroma.User
|
||||||
|
|
||||||
|
import Ecto.Changeset
|
||||||
|
import Ecto.Query
|
||||||
|
|
||||||
|
schema "deliveries" do
|
||||||
|
belongs_to(:user, User, type: FlakeId)
|
||||||
|
belongs_to(:object, Object)
|
||||||
|
end
|
||||||
|
|
||||||
|
def changeset(delivery, params \\ %{}) do
|
||||||
|
delivery
|
||||||
|
|> cast(params, [:user_id, :object_id])
|
||||||
|
|> validate_required([:user_id, :object_id])
|
||||||
|
|> foreign_key_constraint(:object_id)
|
||||||
|
|> foreign_key_constraint(:user_id)
|
||||||
|
|> unique_constraint(:user_id, name: :deliveries_user_id_object_id_index)
|
||||||
|
end
|
||||||
|
|
||||||
|
def create(object_id, user_id) do
|
||||||
|
%Delivery{}
|
||||||
|
|> changeset(%{user_id: user_id, object_id: object_id})
|
||||||
|
|> Repo.insert(on_conflict: :nothing)
|
||||||
|
end
|
||||||
|
|
||||||
|
def get(object_id, user_id) do
|
||||||
|
from(d in Delivery, where: d.user_id == ^user_id and d.object_id == ^object_id)
|
||||||
|
|> Repo.one()
|
||||||
|
end
|
||||||
|
|
||||||
|
# A hack because user delete activities have a fake id for whatever reason
|
||||||
|
# TODO: Get rid of this
|
||||||
|
def delete_all_by_object_id("pleroma:fake_object_id"), do: {0, []}
|
||||||
|
|
||||||
|
def delete_all_by_object_id(object_id) do
|
||||||
|
from(d in Delivery, where: d.object_id == ^object_id)
|
||||||
|
|> Repo.delete_all()
|
||||||
|
end
|
||||||
|
end
|
|
@ -3,9 +3,9 @@ defmodule Pleroma.Docs.Markdown do
|
||||||
|
|
||||||
@spec process(keyword()) :: {:ok, String.t()}
|
@spec process(keyword()) :: {:ok, String.t()}
|
||||||
def process(descriptions) do
|
def process(descriptions) do
|
||||||
config_path = "docs/config.md"
|
config_path = "docs/generated_config.md"
|
||||||
{:ok, file} = File.open(config_path, [:utf8, :write])
|
{:ok, file} = File.open(config_path, [:utf8, :write])
|
||||||
IO.write(file, "# Configuration\n")
|
IO.write(file, "# Generated configuration\n")
|
||||||
IO.write(file, "Date of generation: #{Date.utc_today()}\n\n")
|
IO.write(file, "Date of generation: #{Date.utc_today()}\n\n")
|
||||||
|
|
||||||
IO.write(
|
IO.write(
|
||||||
|
|
|
@ -9,6 +9,7 @@ defmodule Pleroma.Emails.Mailer do
|
||||||
The module contains functions to delivery email using Swoosh.Mailer.
|
The module contains functions to delivery email using Swoosh.Mailer.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
alias Pleroma.Workers.MailerWorker
|
||||||
alias Swoosh.DeliveryError
|
alias Swoosh.DeliveryError
|
||||||
|
|
||||||
@otp_app :pleroma
|
@otp_app :pleroma
|
||||||
|
@ -19,7 +20,12 @@ def enabled?, do: Pleroma.Config.get([__MODULE__, :enabled])
|
||||||
|
|
||||||
@doc "add email to queue"
|
@doc "add email to queue"
|
||||||
def deliver_async(email, config \\ []) do
|
def deliver_async(email, config \\ []) do
|
||||||
PleromaJobQueue.enqueue(:mailer, __MODULE__, [:deliver_async, email, config])
|
encoded_email =
|
||||||
|
email
|
||||||
|
|> :erlang.term_to_binary()
|
||||||
|
|> Base.encode64()
|
||||||
|
|
||||||
|
MailerWorker.enqueue("email", %{"encoded_email" => encoded_email, "config" => config})
|
||||||
end
|
end
|
||||||
|
|
||||||
@doc "callback to perform send email from queue"
|
@doc "callback to perform send email from queue"
|
||||||
|
|
|
@ -90,7 +90,7 @@ def set_reachable(_), do: {:error, nil}
|
||||||
def set_unreachable(url_or_host, unreachable_since \\ nil)
|
def set_unreachable(url_or_host, unreachable_since \\ nil)
|
||||||
|
|
||||||
def set_unreachable(url_or_host, unreachable_since) when is_binary(url_or_host) do
|
def set_unreachable(url_or_host, unreachable_since) when is_binary(url_or_host) do
|
||||||
unreachable_since = unreachable_since || DateTime.utc_now()
|
unreachable_since = parse_datetime(unreachable_since) || NaiveDateTime.utc_now()
|
||||||
host = host(url_or_host)
|
host = host(url_or_host)
|
||||||
existing_record = Repo.get_by(Instance, %{host: host})
|
existing_record = Repo.get_by(Instance, %{host: host})
|
||||||
|
|
||||||
|
@ -114,4 +114,10 @@ def set_unreachable(url_or_host, unreachable_since) when is_binary(url_or_host)
|
||||||
end
|
end
|
||||||
|
|
||||||
def set_unreachable(_, _), do: {:error, nil}
|
def set_unreachable(_, _), do: {:error, nil}
|
||||||
|
|
||||||
|
defp parse_datetime(datetime) when is_binary(datetime) do
|
||||||
|
NaiveDateTime.from_iso8601(datetime)
|
||||||
|
end
|
||||||
|
|
||||||
|
defp parse_datetime(datetime), do: datetime
|
||||||
end
|
end
|
||||||
|
|
|
@ -210,8 +210,10 @@ def create_notification(%Activity{} = activity, %User{} = user) do
|
||||||
unless skip?(activity, user) do
|
unless skip?(activity, user) do
|
||||||
notification = %Notification{user_id: user.id, activity: activity}
|
notification = %Notification{user_id: user.id, activity: activity}
|
||||||
{:ok, notification} = Repo.insert(notification)
|
{:ok, notification} = Repo.insert(notification)
|
||||||
Streamer.stream("user", notification)
|
|
||||||
Streamer.stream("user:notification", notification)
|
["user", "user:notification"]
|
||||||
|
|> Streamer.stream(notification)
|
||||||
|
|
||||||
Push.send(notification)
|
Push.send(notification)
|
||||||
notification
|
notification
|
||||||
end
|
end
|
||||||
|
|
|
@ -20,6 +20,7 @@ defmodule Pleroma.Plugs.Cache do
|
||||||
|
|
||||||
- `ttl`: An expiration time (time-to-live). This value should be in milliseconds or `nil` to disable expiration. Defaults to `nil`.
|
- `ttl`: An expiration time (time-to-live). This value should be in milliseconds or `nil` to disable expiration. Defaults to `nil`.
|
||||||
- `query_params`: Take URL query string into account (`true`), ignore it (`false`) or limit to specific params only (list). Defaults to `true`.
|
- `query_params`: Take URL query string into account (`true`), ignore it (`false`) or limit to specific params only (list). Defaults to `true`.
|
||||||
|
- `tracking_fun`: A function that is called on successfull responses, no matter if the request is cached or not. It should accept a conn as the first argument and the value assigned to `tracking_fun_data` as the second.
|
||||||
|
|
||||||
Additionally, you can overwrite the TTL inside a controller action by assigning `cache_ttl` to the connection struct:
|
Additionally, you can overwrite the TTL inside a controller action by assigning `cache_ttl` to the connection struct:
|
||||||
|
|
||||||
|
@ -56,6 +57,11 @@ def call(%{method: "GET"} = conn, opts) do
|
||||||
{:ok, nil} ->
|
{:ok, nil} ->
|
||||||
cache_resp(conn, opts)
|
cache_resp(conn, opts)
|
||||||
|
|
||||||
|
{:ok, {content_type, body, tracking_fun_data}} ->
|
||||||
|
conn = opts.tracking_fun.(conn, tracking_fun_data)
|
||||||
|
|
||||||
|
send_cached(conn, {content_type, body})
|
||||||
|
|
||||||
{:ok, record} ->
|
{:ok, record} ->
|
||||||
send_cached(conn, record)
|
send_cached(conn, record)
|
||||||
|
|
||||||
|
@ -88,9 +94,17 @@ defp cache_resp(conn, opts) do
|
||||||
ttl = Map.get(conn.assigns, :cache_ttl, opts.ttl)
|
ttl = Map.get(conn.assigns, :cache_ttl, opts.ttl)
|
||||||
key = cache_key(conn, opts)
|
key = cache_key(conn, opts)
|
||||||
content_type = content_type(conn)
|
content_type = content_type(conn)
|
||||||
record = {content_type, body}
|
|
||||||
|
|
||||||
Cachex.put(:web_resp_cache, key, record, ttl: ttl)
|
conn =
|
||||||
|
unless opts[:tracking_fun] do
|
||||||
|
Cachex.put(:web_resp_cache, key, {content_type, body}, ttl: ttl)
|
||||||
|
conn
|
||||||
|
else
|
||||||
|
tracking_fun_data = Map.get(conn.assigns, :tracking_fun_data, nil)
|
||||||
|
Cachex.put(:web_resp_cache, key, {content_type, body, tracking_fun_data}, ttl: ttl)
|
||||||
|
|
||||||
|
opts.tracking_fun.(conn, tracking_fun_data)
|
||||||
|
end
|
||||||
|
|
||||||
put_resp_header(conn, "x-cache", "MISS from Pleroma")
|
put_resp_header(conn, "x-cache", "MISS from Pleroma")
|
||||||
|
|
||||||
|
|
|
@ -15,7 +15,8 @@ def call(%{assigns: %{valid_signature: true}} = conn, _opts) do
|
||||||
end
|
end
|
||||||
|
|
||||||
def call(conn, _opts) do
|
def call(conn, _opts) do
|
||||||
[signature | _] = get_req_header(conn, "signature")
|
headers = get_req_header(conn, "signature")
|
||||||
|
signature = Enum.at(headers, 0)
|
||||||
|
|
||||||
if signature do
|
if signature do
|
||||||
# set (request-target) header to the appropriate value
|
# set (request-target) header to the appropriate value
|
||||||
|
|
7
lib/pleroma/scheduler.ex
Normal file
7
lib/pleroma/scheduler.ex
Normal file
|
@ -0,0 +1,7 @@
|
||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
defmodule Pleroma.Scheduler do
|
||||||
|
use Quantum.Scheduler, otp_app: :pleroma
|
||||||
|
end
|
|
@ -11,6 +11,7 @@ defmodule Pleroma.User do
|
||||||
alias Comeonin.Pbkdf2
|
alias Comeonin.Pbkdf2
|
||||||
alias Ecto.Multi
|
alias Ecto.Multi
|
||||||
alias Pleroma.Activity
|
alias Pleroma.Activity
|
||||||
|
alias Pleroma.Delivery
|
||||||
alias Pleroma.Keys
|
alias Pleroma.Keys
|
||||||
alias Pleroma.Notification
|
alias Pleroma.Notification
|
||||||
alias Pleroma.Object
|
alias Pleroma.Object
|
||||||
|
@ -27,6 +28,7 @@ defmodule Pleroma.User do
|
||||||
alias Pleroma.Web.OStatus
|
alias Pleroma.Web.OStatus
|
||||||
alias Pleroma.Web.RelMe
|
alias Pleroma.Web.RelMe
|
||||||
alias Pleroma.Web.Websub
|
alias Pleroma.Web.Websub
|
||||||
|
alias Pleroma.Workers.BackgroundWorker
|
||||||
|
|
||||||
require Logger
|
require Logger
|
||||||
|
|
||||||
|
@ -61,6 +63,7 @@ defmodule Pleroma.User do
|
||||||
field(:last_digest_emailed_at, :naive_datetime)
|
field(:last_digest_emailed_at, :naive_datetime)
|
||||||
has_many(:notifications, Notification)
|
has_many(:notifications, Notification)
|
||||||
has_many(:registrations, Registration)
|
has_many(:registrations, Registration)
|
||||||
|
has_many(:deliveries, Delivery)
|
||||||
embeds_one(:info, User.Info)
|
embeds_one(:info, User.Info)
|
||||||
|
|
||||||
timestamps()
|
timestamps()
|
||||||
|
@ -174,11 +177,25 @@ def following_count(%User{} = user) do
|
||||||
|> Repo.aggregate(:count, :id)
|
|> Repo.aggregate(:count, :id)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
defp truncate_if_exists(params, key, max_length) do
|
||||||
|
if Map.has_key?(params, key) and is_binary(params[key]) do
|
||||||
|
{value, _chopped} = String.split_at(params[key], max_length)
|
||||||
|
Map.put(params, key, value)
|
||||||
|
else
|
||||||
|
params
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
def remote_user_creation(params) do
|
def remote_user_creation(params) do
|
||||||
bio_limit = Pleroma.Config.get([:instance, :user_bio_length], 5000)
|
bio_limit = Pleroma.Config.get([:instance, :user_bio_length], 5000)
|
||||||
name_limit = Pleroma.Config.get([:instance, :user_name_length], 100)
|
name_limit = Pleroma.Config.get([:instance, :user_name_length], 100)
|
||||||
|
|
||||||
params = Map.put(params, :info, params[:info] || %{})
|
params =
|
||||||
|
params
|
||||||
|
|> Map.put(:info, params[:info] || %{})
|
||||||
|
|> truncate_if_exists(:name, name_limit)
|
||||||
|
|> truncate_if_exists(:bio, bio_limit)
|
||||||
|
|
||||||
info_cng = User.Info.remote_user_creation(%User.Info{}, params[:info])
|
info_cng = User.Info.remote_user_creation(%User.Info{}, params[:info])
|
||||||
|
|
||||||
changes =
|
changes =
|
||||||
|
@ -633,8 +650,9 @@ def get_or_fetch_by_nickname(nickname) do
|
||||||
end
|
end
|
||||||
|
|
||||||
@doc "Fetch some posts when the user has just been federated with"
|
@doc "Fetch some posts when the user has just been federated with"
|
||||||
def fetch_initial_posts(user),
|
def fetch_initial_posts(user) do
|
||||||
do: PleromaJobQueue.enqueue(:background, __MODULE__, [:fetch_initial_posts, user])
|
BackgroundWorker.enqueue("fetch_initial_posts", %{"user_id" => user.id})
|
||||||
|
end
|
||||||
|
|
||||||
@spec get_followers_query(User.t(), pos_integer() | nil) :: Ecto.Query.t()
|
@spec get_followers_query(User.t(), pos_integer() | nil) :: Ecto.Query.t()
|
||||||
def get_followers_query(%User{} = user, nil) do
|
def get_followers_query(%User{} = user, nil) do
|
||||||
|
@ -1064,7 +1082,7 @@ def unblock_domain(user, domain) do
|
||||||
end
|
end
|
||||||
|
|
||||||
def deactivate_async(user, status \\ true) do
|
def deactivate_async(user, status \\ true) do
|
||||||
PleromaJobQueue.enqueue(:background, __MODULE__, [:deactivate_async, user, status])
|
BackgroundWorker.enqueue("deactivate_user", %{"user_id" => user.id, "status" => status})
|
||||||
end
|
end
|
||||||
|
|
||||||
def deactivate(%User{} = user, status \\ true) do
|
def deactivate(%User{} = user, status \\ true) do
|
||||||
|
@ -1092,9 +1110,9 @@ def update_notification_settings(%User{} = user, settings \\ %{}) do
|
||||||
|> update_and_set_cache()
|
|> update_and_set_cache()
|
||||||
end
|
end
|
||||||
|
|
||||||
@spec delete(User.t()) :: :ok
|
def delete(%User{} = user) do
|
||||||
def delete(%User{} = user),
|
BackgroundWorker.enqueue("delete_user", %{"user_id" => user.id})
|
||||||
do: PleromaJobQueue.enqueue(:background, __MODULE__, [:delete, user])
|
end
|
||||||
|
|
||||||
@spec perform(atom(), User.t()) :: {:ok, User.t()}
|
@spec perform(atom(), User.t()) :: {:ok, User.t()}
|
||||||
def perform(:delete, %User{} = user) do
|
def perform(:delete, %User{} = user) do
|
||||||
|
@ -1201,21 +1219,20 @@ def external_users(opts \\ []) do
|
||||||
Repo.all(query)
|
Repo.all(query)
|
||||||
end
|
end
|
||||||
|
|
||||||
def blocks_import(%User{} = blocker, blocked_identifiers) when is_list(blocked_identifiers),
|
def blocks_import(%User{} = blocker, blocked_identifiers) when is_list(blocked_identifiers) do
|
||||||
do:
|
BackgroundWorker.enqueue("blocks_import", %{
|
||||||
PleromaJobQueue.enqueue(:background, __MODULE__, [
|
"blocker_id" => blocker.id,
|
||||||
:blocks_import,
|
"blocked_identifiers" => blocked_identifiers
|
||||||
blocker,
|
})
|
||||||
blocked_identifiers
|
end
|
||||||
])
|
|
||||||
|
|
||||||
def follow_import(%User{} = follower, followed_identifiers) when is_list(followed_identifiers),
|
def follow_import(%User{} = follower, followed_identifiers)
|
||||||
do:
|
when is_list(followed_identifiers) do
|
||||||
PleromaJobQueue.enqueue(:background, __MODULE__, [
|
BackgroundWorker.enqueue("follow_import", %{
|
||||||
:follow_import,
|
"follower_id" => follower.id,
|
||||||
follower,
|
"followed_identifiers" => followed_identifiers
|
||||||
followed_identifiers
|
})
|
||||||
])
|
end
|
||||||
|
|
||||||
def delete_user_activities(%User{ap_id: ap_id} = user) do
|
def delete_user_activities(%User{ap_id: ap_id} = user) do
|
||||||
ap_id
|
ap_id
|
||||||
|
@ -1625,6 +1642,18 @@ def is_internal_user?(%User{nickname: nil}), do: true
|
||||||
def is_internal_user?(%User{local: true, nickname: "internal." <> _}), do: true
|
def is_internal_user?(%User{local: true, nickname: "internal." <> _}), do: true
|
||||||
def is_internal_user?(_), do: false
|
def is_internal_user?(_), do: false
|
||||||
|
|
||||||
|
# A hack because user delete activities have a fake id for whatever reason
|
||||||
|
# TODO: Get rid of this
|
||||||
|
def get_delivered_users_by_object_id("pleroma:fake_object_id"), do: []
|
||||||
|
|
||||||
|
def get_delivered_users_by_object_id(object_id) do
|
||||||
|
from(u in User,
|
||||||
|
inner_join: delivery in assoc(u, :deliveries),
|
||||||
|
where: delivery.object_id == ^object_id
|
||||||
|
)
|
||||||
|
|> Repo.all()
|
||||||
|
end
|
||||||
|
|
||||||
def change_email(user, email) do
|
def change_email(user, email) do
|
||||||
user
|
user
|
||||||
|> cast(%{email: email}, [:email])
|
|> cast(%{email: email}, [:email])
|
||||||
|
|
|
@ -242,6 +242,13 @@ def set_keys(info, keys) do
|
||||||
end
|
end
|
||||||
|
|
||||||
def remote_user_creation(info, params) do
|
def remote_user_creation(info, params) do
|
||||||
|
params =
|
||||||
|
if Map.has_key?(params, :fields) do
|
||||||
|
Map.put(params, :fields, Enum.map(params[:fields], &truncate_field/1))
|
||||||
|
else
|
||||||
|
params
|
||||||
|
end
|
||||||
|
|
||||||
info
|
info
|
||||||
|> cast(params, [
|
|> cast(params, [
|
||||||
:ap_enabled,
|
:ap_enabled,
|
||||||
|
@ -326,6 +333,16 @@ defp valid_field?(%{"name" => name, "value" => value}) do
|
||||||
|
|
||||||
defp valid_field?(_), do: false
|
defp valid_field?(_), do: false
|
||||||
|
|
||||||
|
defp truncate_field(%{"name" => name, "value" => value}) do
|
||||||
|
{name, _chopped} =
|
||||||
|
String.split_at(name, Pleroma.Config.get([:instance, :account_field_name_length], 255))
|
||||||
|
|
||||||
|
{value, _chopped} =
|
||||||
|
String.split_at(value, Pleroma.Config.get([:instance, :account_field_value_length], 255))
|
||||||
|
|
||||||
|
%{"name" => name, "value" => value}
|
||||||
|
end
|
||||||
|
|
||||||
@spec confirmation_changeset(Info.t(), keyword()) :: Changeset.t()
|
@spec confirmation_changeset(Info.t(), keyword()) :: Changeset.t()
|
||||||
def confirmation_changeset(info, opts) do
|
def confirmation_changeset(info, opts) do
|
||||||
need_confirmation? = Keyword.get(opts, :need_confirmation)
|
need_confirmation? = Keyword.get(opts, :need_confirmation)
|
||||||
|
|
|
@ -4,6 +4,7 @@
|
||||||
|
|
||||||
defmodule Pleroma.Web.ActivityPub.ActivityPub do
|
defmodule Pleroma.Web.ActivityPub.ActivityPub do
|
||||||
alias Pleroma.Activity
|
alias Pleroma.Activity
|
||||||
|
alias Pleroma.Activity.Ir.Topics
|
||||||
alias Pleroma.Config
|
alias Pleroma.Config
|
||||||
alias Pleroma.Conversation
|
alias Pleroma.Conversation
|
||||||
alias Pleroma.Notification
|
alias Pleroma.Notification
|
||||||
|
@ -17,7 +18,9 @@ defmodule Pleroma.Web.ActivityPub.ActivityPub do
|
||||||
alias Pleroma.User
|
alias Pleroma.User
|
||||||
alias Pleroma.Web.ActivityPub.MRF
|
alias Pleroma.Web.ActivityPub.MRF
|
||||||
alias Pleroma.Web.ActivityPub.Transmogrifier
|
alias Pleroma.Web.ActivityPub.Transmogrifier
|
||||||
|
alias Pleroma.Web.Streamer
|
||||||
alias Pleroma.Web.WebFinger
|
alias Pleroma.Web.WebFinger
|
||||||
|
alias Pleroma.Workers.BackgroundWorker
|
||||||
|
|
||||||
import Ecto.Query
|
import Ecto.Query
|
||||||
import Pleroma.Web.ActivityPub.Utils
|
import Pleroma.Web.ActivityPub.Utils
|
||||||
|
@ -146,7 +149,7 @@ def insert(map, local \\ true, fake \\ false, bypass_actor_check \\ false) when
|
||||||
activity
|
activity
|
||||||
end
|
end
|
||||||
|
|
||||||
PleromaJobQueue.enqueue(:background, Pleroma.Web.RichMedia.Helpers, [:fetch, activity])
|
BackgroundWorker.enqueue("fetch_data_for_activity", %{"activity_id" => activity.id})
|
||||||
|
|
||||||
Notification.create_notifications(activity)
|
Notification.create_notifications(activity)
|
||||||
SubscriptionNotification.create_notifications(activity)
|
SubscriptionNotification.create_notifications(activity)
|
||||||
|
@ -188,9 +191,7 @@ def stream_out_participations(participations) do
|
||||||
participations
|
participations
|
||||||
|> Repo.preload(:user)
|
|> Repo.preload(:user)
|
||||||
|
|
||||||
Enum.each(participations, fn participation ->
|
Streamer.stream("participation", participations)
|
||||||
Pleroma.Web.Streamer.stream("participation", participation)
|
|
||||||
end)
|
|
||||||
end
|
end
|
||||||
|
|
||||||
def stream_out_participations(%Object{data: %{"context" => context}}, user) do
|
def stream_out_participations(%Object{data: %{"context" => context}}, user) do
|
||||||
|
@ -209,41 +210,15 @@ def stream_out_participations(%Object{data: %{"context" => context}}, user) do
|
||||||
|
|
||||||
def stream_out_participations(_, _), do: :noop
|
def stream_out_participations(_, _), do: :noop
|
||||||
|
|
||||||
def stream_out(activity) do
|
def stream_out(%Activity{data: %{"type" => data_type}} = activity)
|
||||||
if activity.data["type"] in ["Create", "Announce", "Delete"] do
|
when data_type in ["Create", "Announce", "Delete"] do
|
||||||
object = Object.normalize(activity)
|
activity
|
||||||
# Do not stream out poll replies
|
|> Topics.get_activity_topics()
|
||||||
unless object.data["type"] == "Answer" do
|
|> Streamer.stream(activity)
|
||||||
Pleroma.Web.Streamer.stream("user", activity)
|
|
||||||
Pleroma.Web.Streamer.stream("list", activity)
|
|
||||||
|
|
||||||
if get_visibility(activity) == "public" do
|
|
||||||
Pleroma.Web.Streamer.stream("public", activity)
|
|
||||||
|
|
||||||
if activity.local do
|
|
||||||
Pleroma.Web.Streamer.stream("public:local", activity)
|
|
||||||
end
|
end
|
||||||
|
|
||||||
if activity.data["type"] in ["Create"] do
|
def stream_out(_activity) do
|
||||||
object.data
|
:noop
|
||||||
|> Map.get("tag", [])
|
|
||||||
|> Enum.filter(fn tag -> is_bitstring(tag) end)
|
|
||||||
|> Enum.each(fn tag -> Pleroma.Web.Streamer.stream("hashtag:" <> tag, activity) end)
|
|
||||||
|
|
||||||
if object.data["attachment"] != [] do
|
|
||||||
Pleroma.Web.Streamer.stream("public:media", activity)
|
|
||||||
|
|
||||||
if activity.local do
|
|
||||||
Pleroma.Web.Streamer.stream("public:local:media", activity)
|
|
||||||
end
|
|
||||||
end
|
|
||||||
end
|
|
||||||
else
|
|
||||||
if get_visibility(activity) == "direct",
|
|
||||||
do: Pleroma.Web.Streamer.stream("direct", activity)
|
|
||||||
end
|
|
||||||
end
|
|
||||||
end
|
|
||||||
end
|
end
|
||||||
|
|
||||||
def create(%{to: to, actor: actor, context: context, object: object} = params, fake \\ false) do
|
def create(%{to: to, actor: actor, context: context, object: object} = params, fake \\ false) do
|
||||||
|
|
|
@ -6,6 +6,7 @@ defmodule Pleroma.Web.ActivityPub.ActivityPubController do
|
||||||
use Pleroma.Web, :controller
|
use Pleroma.Web, :controller
|
||||||
|
|
||||||
alias Pleroma.Activity
|
alias Pleroma.Activity
|
||||||
|
alias Pleroma.Delivery
|
||||||
alias Pleroma.Object
|
alias Pleroma.Object
|
||||||
alias Pleroma.Object.Fetcher
|
alias Pleroma.Object.Fetcher
|
||||||
alias Pleroma.User
|
alias Pleroma.User
|
||||||
|
@ -23,7 +24,12 @@ defmodule Pleroma.Web.ActivityPub.ActivityPubController do
|
||||||
|
|
||||||
action_fallback(:errors)
|
action_fallback(:errors)
|
||||||
|
|
||||||
plug(Pleroma.Plugs.Cache, [query_params: false] when action in [:activity, :object])
|
plug(
|
||||||
|
Pleroma.Plugs.Cache,
|
||||||
|
[query_params: false, tracking_fun: &__MODULE__.track_object_fetch/2]
|
||||||
|
when action in [:activity, :object]
|
||||||
|
)
|
||||||
|
|
||||||
plug(Pleroma.Web.FederatingPlug when action in [:inbox, :relay])
|
plug(Pleroma.Web.FederatingPlug when action in [:inbox, :relay])
|
||||||
plug(:set_requester_reachable when action in [:inbox])
|
plug(:set_requester_reachable when action in [:inbox])
|
||||||
plug(:relay_active? when action in [:relay])
|
plug(:relay_active? when action in [:relay])
|
||||||
|
@ -54,6 +60,7 @@ def object(conn, %{"uuid" => uuid}) do
|
||||||
%Object{} = object <- Object.get_cached_by_ap_id(ap_id),
|
%Object{} = object <- Object.get_cached_by_ap_id(ap_id),
|
||||||
{_, true} <- {:public?, Visibility.is_public?(object)} do
|
{_, true} <- {:public?, Visibility.is_public?(object)} do
|
||||||
conn
|
conn
|
||||||
|
|> assign(:tracking_fun_data, object.id)
|
||||||
|> set_cache_ttl_for(object)
|
|> set_cache_ttl_for(object)
|
||||||
|> put_resp_content_type("application/activity+json")
|
|> put_resp_content_type("application/activity+json")
|
||||||
|> put_view(ObjectView)
|
|> put_view(ObjectView)
|
||||||
|
@ -64,6 +71,16 @@ def object(conn, %{"uuid" => uuid}) do
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def track_object_fetch(conn, nil), do: conn
|
||||||
|
|
||||||
|
def track_object_fetch(conn, object_id) do
|
||||||
|
with %{assigns: %{user: %User{id: user_id}}} <- conn do
|
||||||
|
Delivery.create(object_id, user_id)
|
||||||
|
end
|
||||||
|
|
||||||
|
conn
|
||||||
|
end
|
||||||
|
|
||||||
def object_likes(conn, %{"uuid" => uuid, "page" => page}) do
|
def object_likes(conn, %{"uuid" => uuid, "page" => page}) do
|
||||||
with ap_id <- o_status_url(conn, :object, uuid),
|
with ap_id <- o_status_url(conn, :object, uuid),
|
||||||
%Object{} = object <- Object.get_cached_by_ap_id(ap_id),
|
%Object{} = object <- Object.get_cached_by_ap_id(ap_id),
|
||||||
|
@ -99,6 +116,7 @@ def activity(conn, %{"uuid" => uuid}) do
|
||||||
%Activity{} = activity <- Activity.normalize(ap_id),
|
%Activity{} = activity <- Activity.normalize(ap_id),
|
||||||
{_, true} <- {:public?, Visibility.is_public?(activity)} do
|
{_, true} <- {:public?, Visibility.is_public?(activity)} do
|
||||||
conn
|
conn
|
||||||
|
|> maybe_set_tracking_data(activity)
|
||||||
|> set_cache_ttl_for(activity)
|
|> set_cache_ttl_for(activity)
|
||||||
|> put_resp_content_type("application/activity+json")
|
|> put_resp_content_type("application/activity+json")
|
||||||
|> put_view(ObjectView)
|
|> put_view(ObjectView)
|
||||||
|
@ -109,6 +127,13 @@ def activity(conn, %{"uuid" => uuid}) do
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
defp maybe_set_tracking_data(conn, %Activity{data: %{"type" => "Create"}} = activity) do
|
||||||
|
object_id = Object.normalize(activity).id
|
||||||
|
assign(conn, :tracking_fun_data, object_id)
|
||||||
|
end
|
||||||
|
|
||||||
|
defp maybe_set_tracking_data(conn, _activity), do: conn
|
||||||
|
|
||||||
defp set_cache_ttl_for(conn, %Activity{object: object}) do
|
defp set_cache_ttl_for(conn, %Activity{object: object}) do
|
||||||
set_cache_ttl_for(conn, object)
|
set_cache_ttl_for(conn, object)
|
||||||
end
|
end
|
||||||
|
|
|
@ -8,6 +8,7 @@ defmodule Pleroma.Web.ActivityPub.MRF.MediaProxyWarmingPolicy do
|
||||||
|
|
||||||
alias Pleroma.HTTP
|
alias Pleroma.HTTP
|
||||||
alias Pleroma.Web.MediaProxy
|
alias Pleroma.Web.MediaProxy
|
||||||
|
alias Pleroma.Workers.BackgroundWorker
|
||||||
|
|
||||||
require Logger
|
require Logger
|
||||||
|
|
||||||
|
@ -30,7 +31,7 @@ def perform(:preload, %{"object" => %{"attachment" => attachments}} = _message)
|
||||||
url
|
url
|
||||||
|> Enum.each(fn
|
|> Enum.each(fn
|
||||||
%{"href" => href} ->
|
%{"href" => href} ->
|
||||||
PleromaJobQueue.enqueue(:background, __MODULE__, [:prefetch, href])
|
BackgroundWorker.enqueue("media_proxy_prefetch", %{"url" => href})
|
||||||
|
|
||||||
x ->
|
x ->
|
||||||
Logger.debug("Unhandled attachment URL object #{inspect(x)}")
|
Logger.debug("Unhandled attachment URL object #{inspect(x)}")
|
||||||
|
@ -46,7 +47,7 @@ def filter(
|
||||||
%{"type" => "Create", "object" => %{"attachment" => attachments} = _object} = message
|
%{"type" => "Create", "object" => %{"attachment" => attachments} = _object} = message
|
||||||
)
|
)
|
||||||
when is_list(attachments) and length(attachments) > 0 do
|
when is_list(attachments) and length(attachments) > 0 do
|
||||||
PleromaJobQueue.enqueue(:background, __MODULE__, [:preload, message])
|
BackgroundWorker.enqueue("media_proxy_preload", %{"message" => message})
|
||||||
|
|
||||||
{:ok, message}
|
{:ok, message}
|
||||||
end
|
end
|
||||||
|
|
|
@ -5,8 +5,10 @@
|
||||||
defmodule Pleroma.Web.ActivityPub.Publisher do
|
defmodule Pleroma.Web.ActivityPub.Publisher do
|
||||||
alias Pleroma.Activity
|
alias Pleroma.Activity
|
||||||
alias Pleroma.Config
|
alias Pleroma.Config
|
||||||
|
alias Pleroma.Delivery
|
||||||
alias Pleroma.HTTP
|
alias Pleroma.HTTP
|
||||||
alias Pleroma.Instances
|
alias Pleroma.Instances
|
||||||
|
alias Pleroma.Object
|
||||||
alias Pleroma.User
|
alias Pleroma.User
|
||||||
alias Pleroma.Web.ActivityPub.Relay
|
alias Pleroma.Web.ActivityPub.Relay
|
||||||
alias Pleroma.Web.ActivityPub.Transmogrifier
|
alias Pleroma.Web.ActivityPub.Transmogrifier
|
||||||
|
@ -84,6 +86,15 @@ def publish_one(%{inbox: inbox, json: json, actor: %User{} = actor, id: id} = pa
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def publish_one(%{actor_id: actor_id} = params) do
|
||||||
|
actor = User.get_cached_by_id(actor_id)
|
||||||
|
|
||||||
|
params
|
||||||
|
|> Map.delete(:actor_id)
|
||||||
|
|> Map.put(:actor, actor)
|
||||||
|
|> publish_one()
|
||||||
|
end
|
||||||
|
|
||||||
defp should_federate?(inbox, public) do
|
defp should_federate?(inbox, public) do
|
||||||
if public do
|
if public do
|
||||||
true
|
true
|
||||||
|
@ -107,7 +118,18 @@ defp recipients(actor, activity) do
|
||||||
{:ok, []}
|
{:ok, []}
|
||||||
end
|
end
|
||||||
|
|
||||||
Pleroma.Web.Salmon.remote_users(actor, activity) ++ followers
|
fetchers =
|
||||||
|
with %Activity{data: %{"type" => "Delete"}} <- activity,
|
||||||
|
%Object{id: object_id} <- Object.normalize(activity),
|
||||||
|
fetchers <- User.get_delivered_users_by_object_id(object_id),
|
||||||
|
_ <- Delivery.delete_all_by_object_id(object_id) do
|
||||||
|
fetchers
|
||||||
|
else
|
||||||
|
_ ->
|
||||||
|
[]
|
||||||
|
end
|
||||||
|
|
||||||
|
Pleroma.Web.Salmon.remote_users(actor, activity) ++ followers ++ fetchers
|
||||||
end
|
end
|
||||||
|
|
||||||
defp get_cc_ap_ids(ap_id, recipients) do
|
defp get_cc_ap_ids(ap_id, recipients) do
|
||||||
|
@ -159,7 +181,8 @@ def determine_inbox(
|
||||||
Publishes an activity with BCC to all relevant peers.
|
Publishes an activity with BCC to all relevant peers.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def publish(actor, %{data: %{"bcc" => bcc}} = activity) when is_list(bcc) and bcc != [] do
|
def publish(%User{} = actor, %{data: %{"bcc" => bcc}} = activity)
|
||||||
|
when is_list(bcc) and bcc != [] do
|
||||||
public = is_public?(activity)
|
public = is_public?(activity)
|
||||||
{:ok, data} = Transmogrifier.prepare_outgoing(activity.data)
|
{:ok, data} = Transmogrifier.prepare_outgoing(activity.data)
|
||||||
|
|
||||||
|
@ -186,7 +209,7 @@ def publish(actor, %{data: %{"bcc" => bcc}} = activity) when is_list(bcc) and bc
|
||||||
Pleroma.Web.Federator.Publisher.enqueue_one(__MODULE__, %{
|
Pleroma.Web.Federator.Publisher.enqueue_one(__MODULE__, %{
|
||||||
inbox: inbox,
|
inbox: inbox,
|
||||||
json: json,
|
json: json,
|
||||||
actor: actor,
|
actor_id: actor.id,
|
||||||
id: activity.data["id"],
|
id: activity.data["id"],
|
||||||
unreachable_since: unreachable_since
|
unreachable_since: unreachable_since
|
||||||
})
|
})
|
||||||
|
@ -221,7 +244,7 @@ def publish(%User{} = actor, %Activity{} = activity) do
|
||||||
%{
|
%{
|
||||||
inbox: inbox,
|
inbox: inbox,
|
||||||
json: json,
|
json: json,
|
||||||
actor: actor,
|
actor_id: actor.id,
|
||||||
id: activity.data["id"],
|
id: activity.data["id"],
|
||||||
unreachable_since: unreachable_since
|
unreachable_since: unreachable_since
|
||||||
}
|
}
|
||||||
|
|
|
@ -15,6 +15,7 @@ defmodule Pleroma.Web.ActivityPub.Transmogrifier do
|
||||||
alias Pleroma.Web.ActivityPub.Utils
|
alias Pleroma.Web.ActivityPub.Utils
|
||||||
alias Pleroma.Web.ActivityPub.Visibility
|
alias Pleroma.Web.ActivityPub.Visibility
|
||||||
alias Pleroma.Web.Federator
|
alias Pleroma.Web.Federator
|
||||||
|
alias Pleroma.Workers.TransmogrifierWorker
|
||||||
|
|
||||||
import Ecto.Query
|
import Ecto.Query
|
||||||
|
|
||||||
|
@ -1051,7 +1052,7 @@ def upgrade_user_from_ap_id(ap_id) do
|
||||||
already_ap <- User.ap_enabled?(user),
|
already_ap <- User.ap_enabled?(user),
|
||||||
{:ok, user} <- user |> User.upgrade_changeset(data) |> User.update_and_set_cache() do
|
{:ok, user} <- user |> User.upgrade_changeset(data) |> User.update_and_set_cache() do
|
||||||
unless already_ap do
|
unless already_ap do
|
||||||
PleromaJobQueue.enqueue(:transmogrifier, __MODULE__, [:user_upgrade, user])
|
TransmogrifierWorker.enqueue("user_upgrade", %{"user_id" => user.id})
|
||||||
end
|
end
|
||||||
|
|
||||||
{:ok, user}
|
{:ok, user}
|
||||||
|
|
|
@ -167,14 +167,7 @@ def create_context(context) do
|
||||||
@spec maybe_federate(any()) :: :ok
|
@spec maybe_federate(any()) :: :ok
|
||||||
def maybe_federate(%Activity{local: true} = activity) do
|
def maybe_federate(%Activity{local: true} = activity) do
|
||||||
if Pleroma.Config.get!([:instance, :federating]) do
|
if Pleroma.Config.get!([:instance, :federating]) do
|
||||||
priority =
|
Pleroma.Web.Federator.publish(activity)
|
||||||
case activity.data["type"] do
|
|
||||||
"Delete" -> 10
|
|
||||||
"Create" -> 1
|
|
||||||
_ -> 5
|
|
||||||
end
|
|
||||||
|
|
||||||
Pleroma.Web.Federator.publish(activity, priority)
|
|
||||||
end
|
end
|
||||||
|
|
||||||
:ok
|
:ok
|
||||||
|
|
|
@ -10,16 +10,17 @@ defmodule Pleroma.Web.Federator do
|
||||||
alias Pleroma.Web.ActivityPub.Transmogrifier
|
alias Pleroma.Web.ActivityPub.Transmogrifier
|
||||||
alias Pleroma.Web.ActivityPub.Utils
|
alias Pleroma.Web.ActivityPub.Utils
|
||||||
alias Pleroma.Web.Federator.Publisher
|
alias Pleroma.Web.Federator.Publisher
|
||||||
alias Pleroma.Web.Federator.RetryQueue
|
|
||||||
alias Pleroma.Web.OStatus
|
alias Pleroma.Web.OStatus
|
||||||
alias Pleroma.Web.Websub
|
alias Pleroma.Web.Websub
|
||||||
|
alias Pleroma.Workers.PublisherWorker
|
||||||
|
alias Pleroma.Workers.ReceiverWorker
|
||||||
|
alias Pleroma.Workers.SubscriberWorker
|
||||||
|
|
||||||
require Logger
|
require Logger
|
||||||
|
|
||||||
def init do
|
def init do
|
||||||
# 1 minute
|
# To do: consider removing this call in favor of scheduled execution (`quantum`-based)
|
||||||
Process.sleep(1000 * 60)
|
refresh_subscriptions(schedule_in: 60)
|
||||||
refresh_subscriptions()
|
|
||||||
end
|
end
|
||||||
|
|
||||||
@doc "Addresses [memory leaks on recursive replies fetching](https://git.pleroma.social/pleroma/pleroma/issues/161)"
|
@doc "Addresses [memory leaks on recursive replies fetching](https://git.pleroma.social/pleroma/pleroma/issues/161)"
|
||||||
|
@ -37,50 +38,38 @@ def allowed_incoming_reply_depth?(depth) do
|
||||||
# Client API
|
# Client API
|
||||||
|
|
||||||
def incoming_doc(doc) do
|
def incoming_doc(doc) do
|
||||||
PleromaJobQueue.enqueue(:federator_incoming, __MODULE__, [:incoming_doc, doc])
|
ReceiverWorker.enqueue("incoming_doc", %{"body" => doc})
|
||||||
end
|
end
|
||||||
|
|
||||||
def incoming_ap_doc(params) do
|
def incoming_ap_doc(params) do
|
||||||
PleromaJobQueue.enqueue(:federator_incoming, __MODULE__, [:incoming_ap_doc, params])
|
ReceiverWorker.enqueue("incoming_ap_doc", %{"params" => params})
|
||||||
end
|
end
|
||||||
|
|
||||||
def publish(activity, priority \\ 1) do
|
def publish(%{id: "pleroma:fakeid"} = activity) do
|
||||||
PleromaJobQueue.enqueue(:federator_outgoing, __MODULE__, [:publish, activity], priority)
|
perform(:publish, activity)
|
||||||
|
end
|
||||||
|
|
||||||
|
def publish(activity) do
|
||||||
|
PublisherWorker.enqueue("publish", %{"activity_id" => activity.id})
|
||||||
end
|
end
|
||||||
|
|
||||||
def verify_websub(websub) do
|
def verify_websub(websub) do
|
||||||
PleromaJobQueue.enqueue(:federator_outgoing, __MODULE__, [:verify_websub, websub])
|
SubscriberWorker.enqueue("verify_websub", %{"websub_id" => websub.id})
|
||||||
end
|
end
|
||||||
|
|
||||||
def request_subscription(sub) do
|
def request_subscription(websub) do
|
||||||
PleromaJobQueue.enqueue(:federator_outgoing, __MODULE__, [:request_subscription, sub])
|
SubscriberWorker.enqueue("request_subscription", %{"websub_id" => websub.id})
|
||||||
end
|
end
|
||||||
|
|
||||||
def refresh_subscriptions do
|
def refresh_subscriptions(worker_args \\ []) do
|
||||||
PleromaJobQueue.enqueue(:federator_outgoing, __MODULE__, [:refresh_subscriptions])
|
SubscriberWorker.enqueue("refresh_subscriptions", %{}, worker_args ++ [max_attempts: 1])
|
||||||
end
|
end
|
||||||
|
|
||||||
# Job Worker Callbacks
|
# Job Worker Callbacks
|
||||||
|
|
||||||
def perform(:refresh_subscriptions) do
|
@spec perform(atom(), module(), any()) :: {:ok, any()} | {:error, any()}
|
||||||
Logger.debug("Federator running refresh subscriptions")
|
def perform(:publish_one, module, params) do
|
||||||
Websub.refresh_subscriptions()
|
apply(module, :publish_one, [params])
|
||||||
|
|
||||||
spawn(fn ->
|
|
||||||
# 6 hours
|
|
||||||
Process.sleep(1000 * 60 * 60 * 6)
|
|
||||||
refresh_subscriptions()
|
|
||||||
end)
|
|
||||||
end
|
|
||||||
|
|
||||||
def perform(:request_subscription, websub) do
|
|
||||||
Logger.debug("Refreshing #{websub.topic}")
|
|
||||||
|
|
||||||
with {:ok, websub} <- Websub.request_subscription(websub) do
|
|
||||||
Logger.debug("Successfully refreshed #{websub.topic}")
|
|
||||||
else
|
|
||||||
_e -> Logger.debug("Couldn't refresh #{websub.topic}")
|
|
||||||
end
|
|
||||||
end
|
end
|
||||||
|
|
||||||
def perform(:publish, activity) do
|
def perform(:publish, activity) do
|
||||||
|
@ -92,14 +81,6 @@ def perform(:publish, activity) do
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def perform(:verify_websub, websub) do
|
|
||||||
Logger.debug(fn ->
|
|
||||||
"Running WebSub verification for #{websub.id} (#{websub.topic}, #{websub.callback})"
|
|
||||||
end)
|
|
||||||
|
|
||||||
Websub.verify(websub)
|
|
||||||
end
|
|
||||||
|
|
||||||
def perform(:incoming_doc, doc) do
|
def perform(:incoming_doc, doc) do
|
||||||
Logger.info("Got document, trying to parse")
|
Logger.info("Got document, trying to parse")
|
||||||
OStatus.handle_incoming(doc)
|
OStatus.handle_incoming(doc)
|
||||||
|
@ -130,22 +111,27 @@ def perform(:incoming_ap_doc, params) do
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def perform(
|
def perform(:request_subscription, websub) do
|
||||||
:publish_single_websub,
|
Logger.debug("Refreshing #{websub.topic}")
|
||||||
%{xml: _xml, topic: _topic, callback: _callback, secret: _secret} = params
|
|
||||||
) do
|
|
||||||
case Websub.publish_one(params) do
|
|
||||||
{:ok, _} ->
|
|
||||||
:ok
|
|
||||||
|
|
||||||
{:error, _} ->
|
with {:ok, websub} <- Websub.request_subscription(websub) do
|
||||||
RetryQueue.enqueue(params, Websub)
|
Logger.debug("Successfully refreshed #{websub.topic}")
|
||||||
|
else
|
||||||
|
_e -> Logger.debug("Couldn't refresh #{websub.topic}")
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def perform(type, _) do
|
def perform(:verify_websub, websub) do
|
||||||
Logger.debug(fn -> "Unknown task: #{type}" end)
|
Logger.debug(fn ->
|
||||||
{:error, "Don't know what to do with this"}
|
"Running WebSub verification for #{websub.id} (#{websub.topic}, #{websub.callback})"
|
||||||
|
end)
|
||||||
|
|
||||||
|
Websub.verify(websub)
|
||||||
|
end
|
||||||
|
|
||||||
|
def perform(:refresh_subscriptions) do
|
||||||
|
Logger.debug("Federator running refresh subscriptions")
|
||||||
|
Websub.refresh_subscriptions()
|
||||||
end
|
end
|
||||||
|
|
||||||
def ap_enabled_actor(id) do
|
def ap_enabled_actor(id) do
|
||||||
|
|
|
@ -6,7 +6,7 @@ defmodule Pleroma.Web.Federator.Publisher do
|
||||||
alias Pleroma.Activity
|
alias Pleroma.Activity
|
||||||
alias Pleroma.Config
|
alias Pleroma.Config
|
||||||
alias Pleroma.User
|
alias Pleroma.User
|
||||||
alias Pleroma.Web.Federator.RetryQueue
|
alias Pleroma.Workers.PublisherWorker
|
||||||
|
|
||||||
require Logger
|
require Logger
|
||||||
|
|
||||||
|
@ -30,23 +30,11 @@ defmodule Pleroma.Web.Federator.Publisher do
|
||||||
Enqueue publishing a single activity.
|
Enqueue publishing a single activity.
|
||||||
"""
|
"""
|
||||||
@spec enqueue_one(module(), Map.t()) :: :ok
|
@spec enqueue_one(module(), Map.t()) :: :ok
|
||||||
def enqueue_one(module, %{} = params),
|
def enqueue_one(module, %{} = params) do
|
||||||
do: PleromaJobQueue.enqueue(:federator_outgoing, __MODULE__, [:publish_one, module, params])
|
PublisherWorker.enqueue(
|
||||||
|
"publish_one",
|
||||||
@spec perform(atom(), module(), any()) :: {:ok, any()} | {:error, any()}
|
%{"module" => to_string(module), "params" => params}
|
||||||
def perform(:publish_one, module, params) do
|
)
|
||||||
case apply(module, :publish_one, [params]) do
|
|
||||||
{:ok, _} ->
|
|
||||||
:ok
|
|
||||||
|
|
||||||
{:error, _e} ->
|
|
||||||
RetryQueue.enqueue(params, module)
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
def perform(type, _, _) do
|
|
||||||
Logger.debug("Unknown task: #{type}")
|
|
||||||
{:error, "Don't know what to do with this"}
|
|
||||||
end
|
end
|
||||||
|
|
||||||
@doc """
|
@doc """
|
||||||
|
|
|
@ -1,239 +0,0 @@
|
||||||
# Pleroma: A lightweight social networking server
|
|
||||||
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
|
||||||
# SPDX-License-Identifier: AGPL-3.0-only
|
|
||||||
|
|
||||||
defmodule Pleroma.Web.Federator.RetryQueue do
|
|
||||||
use GenServer
|
|
||||||
|
|
||||||
require Logger
|
|
||||||
|
|
||||||
def init(args) do
|
|
||||||
queue_table = :ets.new(:pleroma_retry_queue, [:bag, :protected])
|
|
||||||
|
|
||||||
{:ok, %{args | queue_table: queue_table, running_jobs: :sets.new()}}
|
|
||||||
end
|
|
||||||
|
|
||||||
def start_link(_) do
|
|
||||||
enabled =
|
|
||||||
if Pleroma.Config.get(:env) == :test,
|
|
||||||
do: true,
|
|
||||||
else: Pleroma.Config.get([__MODULE__, :enabled], false)
|
|
||||||
|
|
||||||
if enabled do
|
|
||||||
Logger.info("Starting retry queue")
|
|
||||||
|
|
||||||
linkres =
|
|
||||||
GenServer.start_link(
|
|
||||||
__MODULE__,
|
|
||||||
%{delivered: 0, dropped: 0, queue_table: nil, running_jobs: nil},
|
|
||||||
name: __MODULE__
|
|
||||||
)
|
|
||||||
|
|
||||||
maybe_kickoff_timer()
|
|
||||||
linkres
|
|
||||||
else
|
|
||||||
Logger.info("Retry queue disabled")
|
|
||||||
:ignore
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
def enqueue(data, transport, retries \\ 0) do
|
|
||||||
GenServer.cast(__MODULE__, {:maybe_enqueue, data, transport, retries + 1})
|
|
||||||
end
|
|
||||||
|
|
||||||
def get_stats do
|
|
||||||
GenServer.call(__MODULE__, :get_stats)
|
|
||||||
end
|
|
||||||
|
|
||||||
def reset_stats do
|
|
||||||
GenServer.call(__MODULE__, :reset_stats)
|
|
||||||
end
|
|
||||||
|
|
||||||
def get_retry_params(retries) do
|
|
||||||
if retries > Pleroma.Config.get([__MODULE__, :max_retries]) do
|
|
||||||
{:drop, "Max retries reached"}
|
|
||||||
else
|
|
||||||
{:retry, growth_function(retries)}
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
def get_retry_timer_interval do
|
|
||||||
Pleroma.Config.get([:retry_queue, :interval], 1000)
|
|
||||||
end
|
|
||||||
|
|
||||||
defp ets_count_expires(table, current_time) do
|
|
||||||
:ets.select_count(
|
|
||||||
table,
|
|
||||||
[
|
|
||||||
{
|
|
||||||
{:"$1", :"$2"},
|
|
||||||
[{:"=<", :"$1", {:const, current_time}}],
|
|
||||||
[true]
|
|
||||||
}
|
|
||||||
]
|
|
||||||
)
|
|
||||||
end
|
|
||||||
|
|
||||||
defp ets_pop_n_expired(table, current_time, desired) do
|
|
||||||
{popped, _continuation} =
|
|
||||||
:ets.select(
|
|
||||||
table,
|
|
||||||
[
|
|
||||||
{
|
|
||||||
{:"$1", :"$2"},
|
|
||||||
[{:"=<", :"$1", {:const, current_time}}],
|
|
||||||
[:"$_"]
|
|
||||||
}
|
|
||||||
],
|
|
||||||
desired
|
|
||||||
)
|
|
||||||
|
|
||||||
popped
|
|
||||||
|> Enum.each(fn e ->
|
|
||||||
:ets.delete_object(table, e)
|
|
||||||
end)
|
|
||||||
|
|
||||||
popped
|
|
||||||
end
|
|
||||||
|
|
||||||
def maybe_start_job(running_jobs, queue_table) do
|
|
||||||
# we don't want to hit the ets or the DateTime more times than we have to
|
|
||||||
# could optimize slightly further by not using the count, and instead grabbing
|
|
||||||
# up to N objects early...
|
|
||||||
current_time = DateTime.to_unix(DateTime.utc_now())
|
|
||||||
n_running_jobs = :sets.size(running_jobs)
|
|
||||||
|
|
||||||
if n_running_jobs < Pleroma.Config.get([__MODULE__, :max_jobs]) do
|
|
||||||
n_ready_jobs = ets_count_expires(queue_table, current_time)
|
|
||||||
|
|
||||||
if n_ready_jobs > 0 do
|
|
||||||
# figure out how many we could start
|
|
||||||
available_job_slots = Pleroma.Config.get([__MODULE__, :max_jobs]) - n_running_jobs
|
|
||||||
start_n_jobs(running_jobs, queue_table, current_time, available_job_slots)
|
|
||||||
else
|
|
||||||
running_jobs
|
|
||||||
end
|
|
||||||
else
|
|
||||||
running_jobs
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
defp start_n_jobs(running_jobs, _queue_table, _current_time, 0) do
|
|
||||||
running_jobs
|
|
||||||
end
|
|
||||||
|
|
||||||
defp start_n_jobs(running_jobs, queue_table, current_time, available_job_slots)
|
|
||||||
when available_job_slots > 0 do
|
|
||||||
candidates = ets_pop_n_expired(queue_table, current_time, available_job_slots)
|
|
||||||
|
|
||||||
candidates
|
|
||||||
|> List.foldl(running_jobs, fn {_, e}, rj ->
|
|
||||||
{:ok, pid} = Task.start(fn -> worker(e) end)
|
|
||||||
mref = Process.monitor(pid)
|
|
||||||
:sets.add_element(mref, rj)
|
|
||||||
end)
|
|
||||||
end
|
|
||||||
|
|
||||||
def worker({:send, data, transport, retries}) do
|
|
||||||
case transport.publish_one(data) do
|
|
||||||
{:ok, _} ->
|
|
||||||
GenServer.cast(__MODULE__, :inc_delivered)
|
|
||||||
:delivered
|
|
||||||
|
|
||||||
{:error, _reason} ->
|
|
||||||
enqueue(data, transport, retries)
|
|
||||||
:retry
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
def handle_call(:get_stats, _from, %{delivered: delivery_count, dropped: drop_count} = state) do
|
|
||||||
{:reply, %{delivered: delivery_count, dropped: drop_count}, state}
|
|
||||||
end
|
|
||||||
|
|
||||||
def handle_call(:reset_stats, _from, %{delivered: delivery_count, dropped: drop_count} = state) do
|
|
||||||
{:reply, %{delivered: delivery_count, dropped: drop_count},
|
|
||||||
%{state | delivered: 0, dropped: 0}}
|
|
||||||
end
|
|
||||||
|
|
||||||
def handle_cast(:reset_stats, state) do
|
|
||||||
{:noreply, %{state | delivered: 0, dropped: 0}}
|
|
||||||
end
|
|
||||||
|
|
||||||
def handle_cast(
|
|
||||||
{:maybe_enqueue, data, transport, retries},
|
|
||||||
%{dropped: drop_count, queue_table: queue_table, running_jobs: running_jobs} = state
|
|
||||||
) do
|
|
||||||
case get_retry_params(retries) do
|
|
||||||
{:retry, timeout} ->
|
|
||||||
:ets.insert(queue_table, {timeout, {:send, data, transport, retries}})
|
|
||||||
running_jobs = maybe_start_job(running_jobs, queue_table)
|
|
||||||
{:noreply, %{state | running_jobs: running_jobs}}
|
|
||||||
|
|
||||||
{:drop, message} ->
|
|
||||||
Logger.debug(message)
|
|
||||||
{:noreply, %{state | dropped: drop_count + 1}}
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
def handle_cast(:kickoff_timer, state) do
|
|
||||||
retry_interval = get_retry_timer_interval()
|
|
||||||
Process.send_after(__MODULE__, :retry_timer_run, retry_interval)
|
|
||||||
{:noreply, state}
|
|
||||||
end
|
|
||||||
|
|
||||||
def handle_cast(:inc_delivered, %{delivered: delivery_count} = state) do
|
|
||||||
{:noreply, %{state | delivered: delivery_count + 1}}
|
|
||||||
end
|
|
||||||
|
|
||||||
def handle_cast(:inc_dropped, %{dropped: drop_count} = state) do
|
|
||||||
{:noreply, %{state | dropped: drop_count + 1}}
|
|
||||||
end
|
|
||||||
|
|
||||||
def handle_info({:send, data, transport, retries}, %{delivered: delivery_count} = state) do
|
|
||||||
case transport.publish_one(data) do
|
|
||||||
{:ok, _} ->
|
|
||||||
{:noreply, %{state | delivered: delivery_count + 1}}
|
|
||||||
|
|
||||||
{:error, _reason} ->
|
|
||||||
enqueue(data, transport, retries)
|
|
||||||
{:noreply, state}
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
def handle_info(
|
|
||||||
:retry_timer_run,
|
|
||||||
%{queue_table: queue_table, running_jobs: running_jobs} = state
|
|
||||||
) do
|
|
||||||
maybe_kickoff_timer()
|
|
||||||
running_jobs = maybe_start_job(running_jobs, queue_table)
|
|
||||||
{:noreply, %{state | running_jobs: running_jobs}}
|
|
||||||
end
|
|
||||||
|
|
||||||
def handle_info({:DOWN, ref, :process, _pid, _reason}, state) do
|
|
||||||
%{running_jobs: running_jobs, queue_table: queue_table} = state
|
|
||||||
running_jobs = :sets.del_element(ref, running_jobs)
|
|
||||||
running_jobs = maybe_start_job(running_jobs, queue_table)
|
|
||||||
{:noreply, %{state | running_jobs: running_jobs}}
|
|
||||||
end
|
|
||||||
|
|
||||||
def handle_info(unknown, state) do
|
|
||||||
Logger.debug("RetryQueue: don't know what to do with #{inspect(unknown)}, ignoring")
|
|
||||||
{:noreply, state}
|
|
||||||
end
|
|
||||||
|
|
||||||
if Pleroma.Config.get(:env) == :test do
|
|
||||||
defp growth_function(_retries) do
|
|
||||||
_shutit = Pleroma.Config.get([__MODULE__, :initial_timeout])
|
|
||||||
DateTime.to_unix(DateTime.utc_now()) - 1
|
|
||||||
end
|
|
||||||
else
|
|
||||||
defp growth_function(retries) do
|
|
||||||
round(Pleroma.Config.get([__MODULE__, :initial_timeout]) * :math.pow(retries, 3)) +
|
|
||||||
DateTime.to_unix(DateTime.utc_now())
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
defp maybe_kickoff_timer do
|
|
||||||
GenServer.cast(__MODULE__, :kickoff_timer)
|
|
||||||
end
|
|
||||||
end
|
|
|
@ -8,6 +8,7 @@ defmodule Pleroma.Web.MastodonAPI.WebsocketHandler do
|
||||||
alias Pleroma.Repo
|
alias Pleroma.Repo
|
||||||
alias Pleroma.User
|
alias Pleroma.User
|
||||||
alias Pleroma.Web.OAuth.Token
|
alias Pleroma.Web.OAuth.Token
|
||||||
|
alias Pleroma.Web.Streamer
|
||||||
|
|
||||||
@behaviour :cowboy_websocket
|
@behaviour :cowboy_websocket
|
||||||
|
|
||||||
|
@ -24,7 +25,7 @@ defmodule Pleroma.Web.MastodonAPI.WebsocketHandler do
|
||||||
]
|
]
|
||||||
@anonymous_streams ["public", "public:local", "hashtag"]
|
@anonymous_streams ["public", "public:local", "hashtag"]
|
||||||
|
|
||||||
# Handled by periodic keepalive in Pleroma.Web.Streamer.
|
# Handled by periodic keepalive in Pleroma.Web.Streamer.Ping.
|
||||||
@timeout :infinity
|
@timeout :infinity
|
||||||
|
|
||||||
def init(%{qs: qs} = req, state) do
|
def init(%{qs: qs} = req, state) do
|
||||||
|
@ -65,7 +66,7 @@ def websocket_info(:subscribe, state) do
|
||||||
}, topic #{state.topic}"
|
}, topic #{state.topic}"
|
||||||
)
|
)
|
||||||
|
|
||||||
Pleroma.Web.Streamer.add_socket(state.topic, streamer_socket(state))
|
Streamer.add_socket(state.topic, streamer_socket(state))
|
||||||
{:ok, state}
|
{:ok, state}
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -80,7 +81,7 @@ def terminate(reason, _req, state) do
|
||||||
}, topic #{state.topic || "?"}: #{inspect(reason)}"
|
}, topic #{state.topic || "?"}: #{inspect(reason)}"
|
||||||
)
|
)
|
||||||
|
|
||||||
Pleroma.Web.Streamer.remove_socket(state.topic, streamer_socket(state))
|
Streamer.remove_socket(state.topic, streamer_socket(state))
|
||||||
:ok
|
:ok
|
||||||
end
|
end
|
||||||
|
|
||||||
|
|
|
@ -17,6 +17,7 @@ defmodule Pleroma.Web.OAuth.Token.CleanWorker do
|
||||||
)
|
)
|
||||||
|
|
||||||
alias Pleroma.Web.OAuth.Token
|
alias Pleroma.Web.OAuth.Token
|
||||||
|
alias Pleroma.Workers.BackgroundWorker
|
||||||
|
|
||||||
def start_link(_), do: GenServer.start_link(__MODULE__, %{})
|
def start_link(_), do: GenServer.start_link(__MODULE__, %{})
|
||||||
|
|
||||||
|
@ -27,9 +28,11 @@ def init(_) do
|
||||||
|
|
||||||
@doc false
|
@doc false
|
||||||
def handle_info(:perform, state) do
|
def handle_info(:perform, state) do
|
||||||
Token.delete_expired_tokens()
|
BackgroundWorker.enqueue("clean_expired_tokens", %{})
|
||||||
|
|
||||||
Process.send_after(self(), :perform, @interval)
|
Process.send_after(self(), :perform, @interval)
|
||||||
{:noreply, state}
|
{:noreply, state}
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def perform(:clean), do: Token.delete_expired_tokens()
|
||||||
end
|
end
|
||||||
|
|
|
@ -3,7 +3,7 @@
|
||||||
# SPDX-License-Identifier: AGPL-3.0-only
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
defmodule Pleroma.Web.Push do
|
defmodule Pleroma.Web.Push do
|
||||||
alias Pleroma.Web.Push.Impl
|
alias Pleroma.Workers.WebPusherWorker
|
||||||
|
|
||||||
require Logger
|
require Logger
|
||||||
|
|
||||||
|
@ -31,6 +31,7 @@ def enabled do
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def send(notification),
|
def send(notification) do
|
||||||
do: PleromaJobQueue.enqueue(:web_push, Impl, [notification])
|
WebPusherWorker.enqueue("web_push", %{"notification_id" => notification.id})
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -81,6 +81,7 @@ defp parse_url(url) do
|
||||||
{:ok, %Tesla.Env{body: html}} = Pleroma.HTTP.get(url, [], adapter: @hackney_options)
|
{:ok, %Tesla.Env{body: html}} = Pleroma.HTTP.get(url, [], adapter: @hackney_options)
|
||||||
|
|
||||||
html
|
html
|
||||||
|
|> parse_html
|
||||||
|> maybe_parse()
|
|> maybe_parse()
|
||||||
|> Map.put(:url, url)
|
|> Map.put(:url, url)
|
||||||
|> clean_parsed_data()
|
|> clean_parsed_data()
|
||||||
|
@ -91,6 +92,8 @@ defp parse_url(url) do
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
defp parse_html(html), do: Floki.parse(html)
|
||||||
|
|
||||||
defp maybe_parse(html) do
|
defp maybe_parse(html) do
|
||||||
Enum.reduce_while(parsers(), %{}, fn parser, acc ->
|
Enum.reduce_while(parsers(), %{}, fn parser, acc ->
|
||||||
case parser.parse(html, acc) do
|
case parser.parse(html, acc) do
|
||||||
|
@ -100,7 +103,8 @@ defp maybe_parse(html) do
|
||||||
end)
|
end)
|
||||||
end
|
end
|
||||||
|
|
||||||
defp check_parsed_data(%{title: title} = data) when is_binary(title) and byte_size(title) > 0 do
|
defp check_parsed_data(%{title: title} = data)
|
||||||
|
when is_binary(title) and byte_size(title) > 0 do
|
||||||
{:ok, data}
|
{:ok, data}
|
||||||
end
|
end
|
||||||
|
|
||||||
|
|
|
@ -135,6 +135,7 @@ defmodule Pleroma.Web.Router do
|
||||||
|
|
||||||
pipeline :http_signature do
|
pipeline :http_signature do
|
||||||
plug(Pleroma.Web.Plugs.HTTPSignaturePlug)
|
plug(Pleroma.Web.Plugs.HTTPSignaturePlug)
|
||||||
|
plug(Pleroma.Web.Plugs.MappedSignatureToIdentityPlug)
|
||||||
end
|
end
|
||||||
|
|
||||||
scope "/api/pleroma", Pleroma.Web.TwitterAPI do
|
scope "/api/pleroma", Pleroma.Web.TwitterAPI do
|
||||||
|
@ -542,6 +543,7 @@ defmodule Pleroma.Web.Router do
|
||||||
|
|
||||||
scope "/", Pleroma.Web do
|
scope "/", Pleroma.Web do
|
||||||
pipe_through(:ostatus)
|
pipe_through(:ostatus)
|
||||||
|
pipe_through(:http_signature)
|
||||||
|
|
||||||
get("/objects/:uuid", OStatus.OStatusController, :object)
|
get("/objects/:uuid", OStatus.OStatusController, :object)
|
||||||
get("/activities/:uuid", OStatus.OStatusController, :activity)
|
get("/activities/:uuid", OStatus.OStatusController, :activity)
|
||||||
|
|
|
@ -170,6 +170,15 @@ def publish_one(%{recipient: url, feed: feed} = params) when is_binary(url) do
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def publish_one(%{recipient_id: recipient_id} = params) do
|
||||||
|
recipient = User.get_cached_by_id(recipient_id)
|
||||||
|
|
||||||
|
params
|
||||||
|
|> Map.delete(:recipient_id)
|
||||||
|
|> Map.put(:recipient, recipient)
|
||||||
|
|> publish_one()
|
||||||
|
end
|
||||||
|
|
||||||
def publish_one(_), do: :noop
|
def publish_one(_), do: :noop
|
||||||
|
|
||||||
@supported_activities [
|
@supported_activities [
|
||||||
|
@ -218,7 +227,7 @@ def publish(%{info: %{keys: keys}} = user, %{data: %{"type" => type}} = activity
|
||||||
Logger.debug(fn -> "Sending Salmon to #{remote_user.ap_id}" end)
|
Logger.debug(fn -> "Sending Salmon to #{remote_user.ap_id}" end)
|
||||||
|
|
||||||
Publisher.enqueue_one(__MODULE__, %{
|
Publisher.enqueue_one(__MODULE__, %{
|
||||||
recipient: remote_user,
|
recipient_id: remote_user.id,
|
||||||
feed: feed,
|
feed: feed,
|
||||||
unreachable_since: reachable_urls_metadata[remote_user.info.salmon]
|
unreachable_since: reachable_urls_metadata[remote_user.info.salmon]
|
||||||
})
|
})
|
||||||
|
|
|
@ -1,326 +0,0 @@
|
||||||
# Pleroma: A lightweight social networking server
|
|
||||||
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
|
||||||
# SPDX-License-Identifier: AGPL-3.0-only
|
|
||||||
|
|
||||||
defmodule Pleroma.Web.Streamer do
|
|
||||||
use GenServer
|
|
||||||
require Logger
|
|
||||||
alias Pleroma.Activity
|
|
||||||
alias Pleroma.Config
|
|
||||||
alias Pleroma.Conversation.Participation
|
|
||||||
alias Pleroma.Notification
|
|
||||||
alias Pleroma.Object
|
|
||||||
alias Pleroma.SubscriptionNotification
|
|
||||||
alias Pleroma.User
|
|
||||||
alias Pleroma.Web.ActivityPub.ActivityPub
|
|
||||||
alias Pleroma.Web.ActivityPub.Visibility
|
|
||||||
alias Pleroma.Web.CommonAPI
|
|
||||||
alias Pleroma.Web.MastodonAPI.NotificationView
|
|
||||||
|
|
||||||
@keepalive_interval :timer.seconds(30)
|
|
||||||
|
|
||||||
def start_link(_) do
|
|
||||||
GenServer.start_link(__MODULE__, %{}, name: __MODULE__)
|
|
||||||
end
|
|
||||||
|
|
||||||
def add_socket(topic, socket) do
|
|
||||||
GenServer.cast(__MODULE__, %{action: :add, socket: socket, topic: topic})
|
|
||||||
end
|
|
||||||
|
|
||||||
def remove_socket(topic, socket) do
|
|
||||||
GenServer.cast(__MODULE__, %{action: :remove, socket: socket, topic: topic})
|
|
||||||
end
|
|
||||||
|
|
||||||
def stream(topic, item) do
|
|
||||||
GenServer.cast(__MODULE__, %{action: :stream, topic: topic, item: item})
|
|
||||||
end
|
|
||||||
|
|
||||||
def init(args) do
|
|
||||||
Process.send_after(self(), %{action: :ping}, @keepalive_interval)
|
|
||||||
|
|
||||||
{:ok, args}
|
|
||||||
end
|
|
||||||
|
|
||||||
def handle_info(%{action: :ping}, topics) do
|
|
||||||
topics
|
|
||||||
|> Map.values()
|
|
||||||
|> List.flatten()
|
|
||||||
|> Enum.each(fn socket ->
|
|
||||||
Logger.debug("Sending keepalive ping")
|
|
||||||
send(socket.transport_pid, {:text, ""})
|
|
||||||
end)
|
|
||||||
|
|
||||||
Process.send_after(self(), %{action: :ping}, @keepalive_interval)
|
|
||||||
|
|
||||||
{:noreply, topics}
|
|
||||||
end
|
|
||||||
|
|
||||||
def handle_cast(%{action: :stream, topic: "direct", item: item}, topics) do
|
|
||||||
recipient_topics =
|
|
||||||
User.get_recipients_from_activity(item)
|
|
||||||
|> Enum.map(fn %{id: id} -> "direct:#{id}" end)
|
|
||||||
|
|
||||||
Enum.each(recipient_topics || [], fn user_topic ->
|
|
||||||
Logger.debug("Trying to push direct message to #{user_topic}\n\n")
|
|
||||||
push_to_socket(topics, user_topic, item)
|
|
||||||
end)
|
|
||||||
|
|
||||||
{:noreply, topics}
|
|
||||||
end
|
|
||||||
|
|
||||||
def handle_cast(%{action: :stream, topic: "participation", item: participation}, topics) do
|
|
||||||
user_topic = "direct:#{participation.user_id}"
|
|
||||||
Logger.debug("Trying to push a conversation participation to #{user_topic}\n\n")
|
|
||||||
|
|
||||||
push_to_socket(topics, user_topic, participation)
|
|
||||||
|
|
||||||
{:noreply, topics}
|
|
||||||
end
|
|
||||||
|
|
||||||
def handle_cast(%{action: :stream, topic: "list", item: item}, topics) do
|
|
||||||
# filter the recipient list if the activity is not public, see #270.
|
|
||||||
recipient_lists =
|
|
||||||
case Visibility.is_public?(item) do
|
|
||||||
true ->
|
|
||||||
Pleroma.List.get_lists_from_activity(item)
|
|
||||||
|
|
||||||
_ ->
|
|
||||||
Pleroma.List.get_lists_from_activity(item)
|
|
||||||
|> Enum.filter(fn list ->
|
|
||||||
owner = User.get_cached_by_id(list.user_id)
|
|
||||||
|
|
||||||
Visibility.visible_for_user?(item, owner)
|
|
||||||
end)
|
|
||||||
end
|
|
||||||
|
|
||||||
recipient_topics =
|
|
||||||
recipient_lists
|
|
||||||
|> Enum.map(fn %{id: id} -> "list:#{id}" end)
|
|
||||||
|
|
||||||
Enum.each(recipient_topics || [], fn list_topic ->
|
|
||||||
Logger.debug("Trying to push message to #{list_topic}\n\n")
|
|
||||||
push_to_socket(topics, list_topic, item)
|
|
||||||
end)
|
|
||||||
|
|
||||||
{:noreply, topics}
|
|
||||||
end
|
|
||||||
|
|
||||||
def handle_cast(
|
|
||||||
%{action: :stream, topic: topic, item: %Notification{} = item},
|
|
||||||
topics
|
|
||||||
)
|
|
||||||
when topic in ["user", "user:notification"] do
|
|
||||||
topics
|
|
||||||
|> Map.get("#{topic}:#{item.user_id}", [])
|
|
||||||
|> Enum.each(fn socket ->
|
|
||||||
with %User{} = user <- User.get_cached_by_ap_id(socket.assigns[:user].ap_id),
|
|
||||||
true <- should_send?(user, item) do
|
|
||||||
send(
|
|
||||||
socket.transport_pid,
|
|
||||||
{:text, represent_notification(socket.assigns[:user], item)}
|
|
||||||
)
|
|
||||||
end
|
|
||||||
end)
|
|
||||||
|
|
||||||
{:noreply, topics}
|
|
||||||
end
|
|
||||||
|
|
||||||
def handle_cast(%{action: :stream, topic: "user", item: item}, topics) do
|
|
||||||
Logger.debug("Trying to push to users")
|
|
||||||
|
|
||||||
recipient_topics =
|
|
||||||
User.get_recipients_from_activity(item)
|
|
||||||
|> Enum.map(fn %{id: id} -> "user:#{id}" end)
|
|
||||||
|
|
||||||
Enum.each(recipient_topics, fn topic ->
|
|
||||||
push_to_socket(topics, topic, item)
|
|
||||||
end)
|
|
||||||
|
|
||||||
{:noreply, topics}
|
|
||||||
end
|
|
||||||
|
|
||||||
def handle_cast(%{action: :stream, topic: topic, item: item}, topics) do
|
|
||||||
Logger.debug("Trying to push to #{topic}")
|
|
||||||
Logger.debug("Pushing item to #{topic}")
|
|
||||||
push_to_socket(topics, topic, item)
|
|
||||||
{:noreply, topics}
|
|
||||||
end
|
|
||||||
|
|
||||||
def handle_cast(%{action: :add, topic: topic, socket: socket}, sockets) do
|
|
||||||
topic = internal_topic(topic, socket)
|
|
||||||
sockets_for_topic = sockets[topic] || []
|
|
||||||
sockets_for_topic = Enum.uniq([socket | sockets_for_topic])
|
|
||||||
sockets = Map.put(sockets, topic, sockets_for_topic)
|
|
||||||
Logger.debug("Got new conn for #{topic}")
|
|
||||||
{:noreply, sockets}
|
|
||||||
end
|
|
||||||
|
|
||||||
def handle_cast(%{action: :remove, topic: topic, socket: socket}, sockets) do
|
|
||||||
topic = internal_topic(topic, socket)
|
|
||||||
sockets_for_topic = sockets[topic] || []
|
|
||||||
sockets_for_topic = List.delete(sockets_for_topic, socket)
|
|
||||||
sockets = Map.put(sockets, topic, sockets_for_topic)
|
|
||||||
Logger.debug("Removed conn for #{topic}")
|
|
||||||
{:noreply, sockets}
|
|
||||||
end
|
|
||||||
|
|
||||||
def handle_cast(m, state) do
|
|
||||||
Logger.info("Unknown: #{inspect(m)}, #{inspect(state)}")
|
|
||||||
{:noreply, state}
|
|
||||||
end
|
|
||||||
|
|
||||||
defp represent_update(%Activity{} = activity, %User{} = user) do
|
|
||||||
%{
|
|
||||||
event: "update",
|
|
||||||
payload:
|
|
||||||
Pleroma.Web.MastodonAPI.StatusView.render(
|
|
||||||
"status.json",
|
|
||||||
activity: activity,
|
|
||||||
for: user
|
|
||||||
)
|
|
||||||
|> Jason.encode!()
|
|
||||||
}
|
|
||||||
|> Jason.encode!()
|
|
||||||
end
|
|
||||||
|
|
||||||
defp represent_update(%Activity{} = activity) do
|
|
||||||
%{
|
|
||||||
event: "update",
|
|
||||||
payload:
|
|
||||||
Pleroma.Web.MastodonAPI.StatusView.render(
|
|
||||||
"status.json",
|
|
||||||
activity: activity
|
|
||||||
)
|
|
||||||
|> Jason.encode!()
|
|
||||||
}
|
|
||||||
|> Jason.encode!()
|
|
||||||
end
|
|
||||||
|
|
||||||
def represent_conversation(%Participation{} = participation) do
|
|
||||||
%{
|
|
||||||
event: "conversation",
|
|
||||||
payload:
|
|
||||||
Pleroma.Web.MastodonAPI.ConversationView.render("participation.json", %{
|
|
||||||
participation: participation,
|
|
||||||
for: participation.user
|
|
||||||
})
|
|
||||||
|> Jason.encode!()
|
|
||||||
}
|
|
||||||
|> Jason.encode!()
|
|
||||||
end
|
|
||||||
|
|
||||||
@spec represent_notification(User.t(), Notification.t() | %SubscriptionNotification{}) ::
|
|
||||||
binary()
|
|
||||||
defp represent_notification(%User{} = user, notify) do
|
|
||||||
event =
|
|
||||||
case notify do
|
|
||||||
%Notification{} -> "notification"
|
|
||||||
%SubscriptionNotification{} -> "subscription_norification"
|
|
||||||
end
|
|
||||||
|
|
||||||
%{
|
|
||||||
event: event,
|
|
||||||
payload:
|
|
||||||
NotificationView.render(
|
|
||||||
"show.json",
|
|
||||||
%{notification: notify, for: user}
|
|
||||||
)
|
|
||||||
|> Jason.encode!()
|
|
||||||
}
|
|
||||||
|> Jason.encode!()
|
|
||||||
end
|
|
||||||
|
|
||||||
defp should_send?(%User{} = user, %Activity{} = item) do
|
|
||||||
blocks = user.info.blocks || []
|
|
||||||
mutes = user.info.mutes || []
|
|
||||||
reblog_mutes = user.info.muted_reblogs || []
|
|
||||||
domain_blocks = Pleroma.Web.ActivityPub.MRF.subdomains_regex(user.info.domain_blocks)
|
|
||||||
|
|
||||||
with parent when not is_nil(parent) <- Object.normalize(item),
|
|
||||||
true <- Enum.all?([blocks, mutes, reblog_mutes], &(item.actor not in &1)),
|
|
||||||
true <- Enum.all?([blocks, mutes], &(parent.data["actor"] not in &1)),
|
|
||||||
%{host: item_host} <- URI.parse(item.actor),
|
|
||||||
%{host: parent_host} <- URI.parse(parent.data["actor"]),
|
|
||||||
false <- Pleroma.Web.ActivityPub.MRF.subdomain_match?(domain_blocks, item_host),
|
|
||||||
false <- Pleroma.Web.ActivityPub.MRF.subdomain_match?(domain_blocks, parent_host),
|
|
||||||
true <- thread_containment(item, user),
|
|
||||||
false <- CommonAPI.thread_muted?(user, item) do
|
|
||||||
true
|
|
||||||
else
|
|
||||||
_ -> false
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
defp should_send?(%User{} = user, %Notification{activity: activity}) do
|
|
||||||
should_send?(user, activity)
|
|
||||||
end
|
|
||||||
|
|
||||||
def push_to_socket(topics, topic, %Activity{data: %{"type" => "Announce"}} = item) do
|
|
||||||
Enum.each(topics[topic] || [], fn socket ->
|
|
||||||
# Get the current user so we have up-to-date blocks etc.
|
|
||||||
if socket.assigns[:user] do
|
|
||||||
user = User.get_cached_by_ap_id(socket.assigns[:user].ap_id)
|
|
||||||
|
|
||||||
if should_send?(user, item) do
|
|
||||||
send(socket.transport_pid, {:text, represent_update(item, user)})
|
|
||||||
end
|
|
||||||
else
|
|
||||||
send(socket.transport_pid, {:text, represent_update(item)})
|
|
||||||
end
|
|
||||||
end)
|
|
||||||
end
|
|
||||||
|
|
||||||
def push_to_socket(topics, topic, %Participation{} = participation) do
|
|
||||||
Enum.each(topics[topic] || [], fn socket ->
|
|
||||||
send(socket.transport_pid, {:text, represent_conversation(participation)})
|
|
||||||
end)
|
|
||||||
end
|
|
||||||
|
|
||||||
def push_to_socket(topics, topic, %Activity{
|
|
||||||
data: %{"type" => "Delete", "deleted_activity_id" => deleted_activity_id}
|
|
||||||
}) do
|
|
||||||
Enum.each(topics[topic] || [], fn socket ->
|
|
||||||
send(
|
|
||||||
socket.transport_pid,
|
|
||||||
{:text, %{event: "delete", payload: to_string(deleted_activity_id)} |> Jason.encode!()}
|
|
||||||
)
|
|
||||||
end)
|
|
||||||
end
|
|
||||||
|
|
||||||
def push_to_socket(_topics, _topic, %Activity{data: %{"type" => "Delete"}}), do: :noop
|
|
||||||
|
|
||||||
def push_to_socket(topics, topic, item) do
|
|
||||||
Enum.each(topics[topic] || [], fn socket ->
|
|
||||||
# Get the current user so we have up-to-date blocks etc.
|
|
||||||
if socket.assigns[:user] do
|
|
||||||
user = User.get_cached_by_ap_id(socket.assigns[:user].ap_id)
|
|
||||||
blocks = user.info.blocks || []
|
|
||||||
mutes = user.info.mutes || []
|
|
||||||
|
|
||||||
with true <- Enum.all?([blocks, mutes], &(item.actor not in &1)),
|
|
||||||
true <- thread_containment(item, user) do
|
|
||||||
send(socket.transport_pid, {:text, represent_update(item, user)})
|
|
||||||
end
|
|
||||||
else
|
|
||||||
send(socket.transport_pid, {:text, represent_update(item)})
|
|
||||||
end
|
|
||||||
end)
|
|
||||||
end
|
|
||||||
|
|
||||||
defp internal_topic(topic, socket) when topic in ~w[user user:notification direct] do
|
|
||||||
"#{topic}:#{socket.assigns[:user].id}"
|
|
||||||
end
|
|
||||||
|
|
||||||
defp internal_topic(topic, _), do: topic
|
|
||||||
|
|
||||||
@spec thread_containment(Activity.t(), User.t()) :: boolean()
|
|
||||||
defp thread_containment(_activity, %User{info: %{skip_thread_containment: true}}), do: true
|
|
||||||
|
|
||||||
defp thread_containment(activity, user) do
|
|
||||||
if Config.get([:instance, :skip_thread_containment]) do
|
|
||||||
true
|
|
||||||
else
|
|
||||||
ActivityPub.contain_activity(activity, user)
|
|
||||||
end
|
|
||||||
end
|
|
||||||
end
|
|
33
lib/pleroma/web/streamer/ping.ex
Normal file
33
lib/pleroma/web/streamer/ping.ex
Normal file
|
@ -0,0 +1,33 @@
|
||||||
|
defmodule Pleroma.Web.Streamer.Ping do
|
||||||
|
use GenServer
|
||||||
|
require Logger
|
||||||
|
|
||||||
|
alias Pleroma.Web.Streamer.State
|
||||||
|
alias Pleroma.Web.Streamer.StreamerSocket
|
||||||
|
|
||||||
|
@keepalive_interval :timer.seconds(30)
|
||||||
|
|
||||||
|
def start_link(opts) do
|
||||||
|
ping_interval = Keyword.get(opts, :ping_interval, @keepalive_interval)
|
||||||
|
GenServer.start_link(__MODULE__, %{ping_interval: ping_interval}, name: __MODULE__)
|
||||||
|
end
|
||||||
|
|
||||||
|
def init(%{ping_interval: ping_interval} = args) do
|
||||||
|
Process.send_after(self(), :ping, ping_interval)
|
||||||
|
{:ok, args}
|
||||||
|
end
|
||||||
|
|
||||||
|
def handle_info(:ping, %{ping_interval: ping_interval} = state) do
|
||||||
|
State.get_sockets()
|
||||||
|
|> Map.values()
|
||||||
|
|> List.flatten()
|
||||||
|
|> Enum.each(fn %StreamerSocket{transport_pid: transport_pid} ->
|
||||||
|
Logger.debug("Sending keepalive ping")
|
||||||
|
send(transport_pid, {:text, ""})
|
||||||
|
end)
|
||||||
|
|
||||||
|
Process.send_after(self(), :ping, ping_interval)
|
||||||
|
|
||||||
|
{:noreply, state}
|
||||||
|
end
|
||||||
|
end
|
68
lib/pleroma/web/streamer/state.ex
Normal file
68
lib/pleroma/web/streamer/state.ex
Normal file
|
@ -0,0 +1,68 @@
|
||||||
|
defmodule Pleroma.Web.Streamer.State do
|
||||||
|
use GenServer
|
||||||
|
require Logger
|
||||||
|
|
||||||
|
alias Pleroma.Web.Streamer.StreamerSocket
|
||||||
|
|
||||||
|
def start_link(_) do
|
||||||
|
GenServer.start_link(__MODULE__, %{sockets: %{}}, name: __MODULE__)
|
||||||
|
end
|
||||||
|
|
||||||
|
def add_socket(topic, socket) do
|
||||||
|
GenServer.call(__MODULE__, {:add, socket, topic})
|
||||||
|
end
|
||||||
|
|
||||||
|
def remove_socket(topic, socket) do
|
||||||
|
GenServer.call(__MODULE__, {:remove, socket, topic})
|
||||||
|
end
|
||||||
|
|
||||||
|
def get_sockets do
|
||||||
|
%{sockets: stream_sockets} = GenServer.call(__MODULE__, :get_state)
|
||||||
|
stream_sockets
|
||||||
|
end
|
||||||
|
|
||||||
|
def init(init_arg) do
|
||||||
|
{:ok, init_arg}
|
||||||
|
end
|
||||||
|
|
||||||
|
def handle_call(:get_state, _from, state) do
|
||||||
|
{:reply, state, state}
|
||||||
|
end
|
||||||
|
|
||||||
|
def handle_call({:add, socket, topic}, _from, %{sockets: sockets} = state) do
|
||||||
|
internal_topic = internal_topic(topic, socket)
|
||||||
|
stream_socket = StreamerSocket.from_socket(socket)
|
||||||
|
|
||||||
|
sockets_for_topic =
|
||||||
|
sockets
|
||||||
|
|> Map.get(internal_topic, [])
|
||||||
|
|> List.insert_at(0, stream_socket)
|
||||||
|
|> Enum.uniq()
|
||||||
|
|
||||||
|
state = put_in(state, [:sockets, internal_topic], sockets_for_topic)
|
||||||
|
Logger.debug("Got new conn for #{topic}")
|
||||||
|
{:reply, state, state}
|
||||||
|
end
|
||||||
|
|
||||||
|
def handle_call({:remove, socket, topic}, _from, %{sockets: sockets} = state) do
|
||||||
|
internal_topic = internal_topic(topic, socket)
|
||||||
|
stream_socket = StreamerSocket.from_socket(socket)
|
||||||
|
|
||||||
|
sockets_for_topic =
|
||||||
|
sockets
|
||||||
|
|> Map.get(internal_topic, [])
|
||||||
|
|> List.delete(stream_socket)
|
||||||
|
|
||||||
|
state = Kernel.put_in(state, [:sockets, internal_topic], sockets_for_topic)
|
||||||
|
{:reply, state, state}
|
||||||
|
end
|
||||||
|
|
||||||
|
defp internal_topic(topic, socket)
|
||||||
|
when topic in ~w[user user:notification direct] do
|
||||||
|
"#{topic}:#{socket.assigns[:user].id}"
|
||||||
|
end
|
||||||
|
|
||||||
|
defp internal_topic(topic, _) do
|
||||||
|
topic
|
||||||
|
end
|
||||||
|
end
|
55
lib/pleroma/web/streamer/streamer.ex
Normal file
55
lib/pleroma/web/streamer/streamer.ex
Normal file
|
@ -0,0 +1,55 @@
|
||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
defmodule Pleroma.Web.Streamer do
|
||||||
|
alias Pleroma.Web.Streamer.State
|
||||||
|
alias Pleroma.Web.Streamer.Worker
|
||||||
|
|
||||||
|
@timeout 60_000
|
||||||
|
@mix_env Mix.env()
|
||||||
|
|
||||||
|
def add_socket(topic, socket) do
|
||||||
|
State.add_socket(topic, socket)
|
||||||
|
end
|
||||||
|
|
||||||
|
def remove_socket(topic, socket) do
|
||||||
|
State.remove_socket(topic, socket)
|
||||||
|
end
|
||||||
|
|
||||||
|
def get_sockets do
|
||||||
|
State.get_sockets()
|
||||||
|
end
|
||||||
|
|
||||||
|
def stream(topics, items) do
|
||||||
|
if should_send?() do
|
||||||
|
Task.async(fn ->
|
||||||
|
:poolboy.transaction(
|
||||||
|
:streamer_worker,
|
||||||
|
&Worker.stream(&1, topics, items),
|
||||||
|
@timeout
|
||||||
|
)
|
||||||
|
end)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def supervisor, do: Pleroma.Web.Streamer.Supervisor
|
||||||
|
|
||||||
|
defp should_send? do
|
||||||
|
handle_should_send(@mix_env)
|
||||||
|
end
|
||||||
|
|
||||||
|
defp handle_should_send(:test) do
|
||||||
|
case Process.whereis(:streamer_worker) do
|
||||||
|
nil ->
|
||||||
|
false
|
||||||
|
|
||||||
|
pid ->
|
||||||
|
Process.alive?(pid)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
defp handle_should_send(_) do
|
||||||
|
true
|
||||||
|
end
|
||||||
|
end
|
31
lib/pleroma/web/streamer/streamer_socket.ex
Normal file
31
lib/pleroma/web/streamer/streamer_socket.ex
Normal file
|
@ -0,0 +1,31 @@
|
||||||
|
defmodule Pleroma.Web.Streamer.StreamerSocket do
|
||||||
|
defstruct transport_pid: nil, user: nil
|
||||||
|
|
||||||
|
alias Pleroma.User
|
||||||
|
alias Pleroma.Web.Streamer.StreamerSocket
|
||||||
|
|
||||||
|
def from_socket(%{
|
||||||
|
transport_pid: transport_pid,
|
||||||
|
assigns: %{user: nil}
|
||||||
|
}) do
|
||||||
|
%StreamerSocket{
|
||||||
|
transport_pid: transport_pid
|
||||||
|
}
|
||||||
|
end
|
||||||
|
|
||||||
|
def from_socket(%{
|
||||||
|
transport_pid: transport_pid,
|
||||||
|
assigns: %{user: %User{} = user}
|
||||||
|
}) do
|
||||||
|
%StreamerSocket{
|
||||||
|
transport_pid: transport_pid,
|
||||||
|
user: user
|
||||||
|
}
|
||||||
|
end
|
||||||
|
|
||||||
|
def from_socket(%{transport_pid: transport_pid}) do
|
||||||
|
%StreamerSocket{
|
||||||
|
transport_pid: transport_pid
|
||||||
|
}
|
||||||
|
end
|
||||||
|
end
|
33
lib/pleroma/web/streamer/supervisor.ex
Normal file
33
lib/pleroma/web/streamer/supervisor.ex
Normal file
|
@ -0,0 +1,33 @@
|
||||||
|
defmodule Pleroma.Web.Streamer.Supervisor do
|
||||||
|
use Supervisor
|
||||||
|
|
||||||
|
def start_link(opts) do
|
||||||
|
Supervisor.start_link(__MODULE__, opts, name: __MODULE__)
|
||||||
|
end
|
||||||
|
|
||||||
|
def init(args) do
|
||||||
|
children = [
|
||||||
|
{Pleroma.Web.Streamer.State, args},
|
||||||
|
{Pleroma.Web.Streamer.Ping, args},
|
||||||
|
:poolboy.child_spec(:streamer_worker, poolboy_config())
|
||||||
|
]
|
||||||
|
|
||||||
|
opts = [strategy: :one_for_one, name: Pleroma.Web.Streamer.Supervisor]
|
||||||
|
Supervisor.init(children, opts)
|
||||||
|
end
|
||||||
|
|
||||||
|
defp poolboy_config do
|
||||||
|
opts =
|
||||||
|
Pleroma.Config.get(:streamer,
|
||||||
|
workers: 3,
|
||||||
|
overflow_workers: 2
|
||||||
|
)
|
||||||
|
|
||||||
|
[
|
||||||
|
{:name, {:local, :streamer_worker}},
|
||||||
|
{:worker_module, Pleroma.Web.Streamer.Worker},
|
||||||
|
{:size, opts[:workers]},
|
||||||
|
{:max_overflow, opts[:overflow_workers]}
|
||||||
|
]
|
||||||
|
end
|
||||||
|
end
|
220
lib/pleroma/web/streamer/worker.ex
Normal file
220
lib/pleroma/web/streamer/worker.ex
Normal file
|
@ -0,0 +1,220 @@
|
||||||
|
defmodule Pleroma.Web.Streamer.Worker do
|
||||||
|
use GenServer
|
||||||
|
|
||||||
|
require Logger
|
||||||
|
|
||||||
|
alias Pleroma.Activity
|
||||||
|
alias Pleroma.Config
|
||||||
|
alias Pleroma.Conversation.Participation
|
||||||
|
alias Pleroma.Notification
|
||||||
|
alias Pleroma.Object
|
||||||
|
alias Pleroma.User
|
||||||
|
alias Pleroma.Web.ActivityPub.ActivityPub
|
||||||
|
alias Pleroma.Web.ActivityPub.Visibility
|
||||||
|
alias Pleroma.Web.CommonAPI
|
||||||
|
alias Pleroma.Web.Streamer.State
|
||||||
|
alias Pleroma.Web.Streamer.StreamerSocket
|
||||||
|
alias Pleroma.Web.StreamerView
|
||||||
|
|
||||||
|
def start_link(_) do
|
||||||
|
GenServer.start_link(__MODULE__, %{}, [])
|
||||||
|
end
|
||||||
|
|
||||||
|
def init(init_arg) do
|
||||||
|
{:ok, init_arg}
|
||||||
|
end
|
||||||
|
|
||||||
|
def stream(pid, topics, items) do
|
||||||
|
GenServer.call(pid, {:stream, topics, items})
|
||||||
|
end
|
||||||
|
|
||||||
|
def handle_call({:stream, topics, item}, _from, state) when is_list(topics) do
|
||||||
|
Enum.each(topics, fn t ->
|
||||||
|
do_stream(%{topic: t, item: item})
|
||||||
|
end)
|
||||||
|
|
||||||
|
{:reply, state, state}
|
||||||
|
end
|
||||||
|
|
||||||
|
def handle_call({:stream, topic, items}, _from, state) when is_list(items) do
|
||||||
|
Enum.each(items, fn i ->
|
||||||
|
do_stream(%{topic: topic, item: i})
|
||||||
|
end)
|
||||||
|
|
||||||
|
{:reply, state, state}
|
||||||
|
end
|
||||||
|
|
||||||
|
def handle_call({:stream, topic, item}, _from, state) do
|
||||||
|
do_stream(%{topic: topic, item: item})
|
||||||
|
|
||||||
|
{:reply, state, state}
|
||||||
|
end
|
||||||
|
|
||||||
|
defp do_stream(%{topic: "direct", item: item}) do
|
||||||
|
recipient_topics =
|
||||||
|
User.get_recipients_from_activity(item)
|
||||||
|
|> Enum.map(fn %{id: id} -> "direct:#{id}" end)
|
||||||
|
|
||||||
|
Enum.each(recipient_topics, fn user_topic ->
|
||||||
|
Logger.debug("Trying to push direct message to #{user_topic}\n\n")
|
||||||
|
push_to_socket(State.get_sockets(), user_topic, item)
|
||||||
|
end)
|
||||||
|
end
|
||||||
|
|
||||||
|
defp do_stream(%{topic: "participation", item: participation}) do
|
||||||
|
user_topic = "direct:#{participation.user_id}"
|
||||||
|
Logger.debug("Trying to push a conversation participation to #{user_topic}\n\n")
|
||||||
|
|
||||||
|
push_to_socket(State.get_sockets(), user_topic, participation)
|
||||||
|
end
|
||||||
|
|
||||||
|
defp do_stream(%{topic: "list", item: item}) do
|
||||||
|
# filter the recipient list if the activity is not public, see #270.
|
||||||
|
recipient_lists =
|
||||||
|
case Visibility.is_public?(item) do
|
||||||
|
true ->
|
||||||
|
Pleroma.List.get_lists_from_activity(item)
|
||||||
|
|
||||||
|
_ ->
|
||||||
|
Pleroma.List.get_lists_from_activity(item)
|
||||||
|
|> Enum.filter(fn list ->
|
||||||
|
owner = User.get_cached_by_id(list.user_id)
|
||||||
|
|
||||||
|
Visibility.visible_for_user?(item, owner)
|
||||||
|
end)
|
||||||
|
end
|
||||||
|
|
||||||
|
recipient_topics =
|
||||||
|
recipient_lists
|
||||||
|
|> Enum.map(fn %{id: id} -> "list:#{id}" end)
|
||||||
|
|
||||||
|
Enum.each(recipient_topics, fn list_topic ->
|
||||||
|
Logger.debug("Trying to push message to #{list_topic}\n\n")
|
||||||
|
push_to_socket(State.get_sockets(), list_topic, item)
|
||||||
|
end)
|
||||||
|
end
|
||||||
|
|
||||||
|
defp do_stream(%{topic: topic, item: %Notification{} = item})
|
||||||
|
when topic in ["user", "user:notification"] do
|
||||||
|
State.get_sockets()
|
||||||
|
|> Map.get("#{topic}:#{item.user_id}", [])
|
||||||
|
|> Enum.each(fn %StreamerSocket{transport_pid: transport_pid, user: socket_user} ->
|
||||||
|
with %User{} = user <- User.get_cached_by_ap_id(socket_user.ap_id),
|
||||||
|
true <- should_send?(user, item) do
|
||||||
|
send(transport_pid, {:text, StreamerView.render("notification.json", socket_user, item)})
|
||||||
|
end
|
||||||
|
end)
|
||||||
|
end
|
||||||
|
|
||||||
|
defp do_stream(%{topic: "user", item: item}) do
|
||||||
|
Logger.debug("Trying to push to users")
|
||||||
|
|
||||||
|
recipient_topics =
|
||||||
|
User.get_recipients_from_activity(item)
|
||||||
|
|> Enum.map(fn %{id: id} -> "user:#{id}" end)
|
||||||
|
|
||||||
|
Enum.each(recipient_topics, fn topic ->
|
||||||
|
push_to_socket(State.get_sockets(), topic, item)
|
||||||
|
end)
|
||||||
|
end
|
||||||
|
|
||||||
|
defp do_stream(%{topic: topic, item: item}) do
|
||||||
|
Logger.debug("Trying to push to #{topic}")
|
||||||
|
Logger.debug("Pushing item to #{topic}")
|
||||||
|
push_to_socket(State.get_sockets(), topic, item)
|
||||||
|
end
|
||||||
|
|
||||||
|
defp should_send?(%User{} = user, %Activity{} = item) do
|
||||||
|
blocks = user.info.blocks || []
|
||||||
|
mutes = user.info.mutes || []
|
||||||
|
reblog_mutes = user.info.muted_reblogs || []
|
||||||
|
domain_blocks = Pleroma.Web.ActivityPub.MRF.subdomains_regex(user.info.domain_blocks)
|
||||||
|
|
||||||
|
with parent when not is_nil(parent) <- Object.normalize(item),
|
||||||
|
true <- Enum.all?([blocks, mutes, reblog_mutes], &(item.actor not in &1)),
|
||||||
|
true <- Enum.all?([blocks, mutes], &(parent.data["actor"] not in &1)),
|
||||||
|
%{host: item_host} <- URI.parse(item.actor),
|
||||||
|
%{host: parent_host} <- URI.parse(parent.data["actor"]),
|
||||||
|
false <- Pleroma.Web.ActivityPub.MRF.subdomain_match?(domain_blocks, item_host),
|
||||||
|
false <- Pleroma.Web.ActivityPub.MRF.subdomain_match?(domain_blocks, parent_host),
|
||||||
|
true <- thread_containment(item, user),
|
||||||
|
false <- CommonAPI.thread_muted?(user, item) do
|
||||||
|
true
|
||||||
|
else
|
||||||
|
_ -> false
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
defp should_send?(%User{} = user, %Notification{activity: activity}) do
|
||||||
|
should_send?(user, activity)
|
||||||
|
end
|
||||||
|
|
||||||
|
def push_to_socket(topics, topic, %Activity{data: %{"type" => "Announce"}} = item) do
|
||||||
|
Enum.each(topics[topic] || [], fn %StreamerSocket{
|
||||||
|
transport_pid: transport_pid,
|
||||||
|
user: socket_user
|
||||||
|
} ->
|
||||||
|
# Get the current user so we have up-to-date blocks etc.
|
||||||
|
if socket_user do
|
||||||
|
user = User.get_cached_by_ap_id(socket_user.ap_id)
|
||||||
|
|
||||||
|
if should_send?(user, item) do
|
||||||
|
send(transport_pid, {:text, StreamerView.render("update.json", item, user)})
|
||||||
|
end
|
||||||
|
else
|
||||||
|
send(transport_pid, {:text, StreamerView.render("update.json", item)})
|
||||||
|
end
|
||||||
|
end)
|
||||||
|
end
|
||||||
|
|
||||||
|
def push_to_socket(topics, topic, %Participation{} = participation) do
|
||||||
|
Enum.each(topics[topic] || [], fn %StreamerSocket{transport_pid: transport_pid} ->
|
||||||
|
send(transport_pid, {:text, StreamerView.render("conversation.json", participation)})
|
||||||
|
end)
|
||||||
|
end
|
||||||
|
|
||||||
|
def push_to_socket(topics, topic, %Activity{
|
||||||
|
data: %{"type" => "Delete", "deleted_activity_id" => deleted_activity_id}
|
||||||
|
}) do
|
||||||
|
Enum.each(topics[topic] || [], fn %StreamerSocket{transport_pid: transport_pid} ->
|
||||||
|
send(
|
||||||
|
transport_pid,
|
||||||
|
{:text, %{event: "delete", payload: to_string(deleted_activity_id)} |> Jason.encode!()}
|
||||||
|
)
|
||||||
|
end)
|
||||||
|
end
|
||||||
|
|
||||||
|
def push_to_socket(_topics, _topic, %Activity{data: %{"type" => "Delete"}}), do: :noop
|
||||||
|
|
||||||
|
def push_to_socket(topics, topic, item) do
|
||||||
|
Enum.each(topics[topic] || [], fn %StreamerSocket{
|
||||||
|
transport_pid: transport_pid,
|
||||||
|
user: socket_user
|
||||||
|
} ->
|
||||||
|
# Get the current user so we have up-to-date blocks etc.
|
||||||
|
if socket_user do
|
||||||
|
user = User.get_cached_by_ap_id(socket_user.ap_id)
|
||||||
|
blocks = user.info.blocks || []
|
||||||
|
mutes = user.info.mutes || []
|
||||||
|
|
||||||
|
with true <- Enum.all?([blocks, mutes], &(item.actor not in &1)),
|
||||||
|
true <- thread_containment(item, user) do
|
||||||
|
send(transport_pid, {:text, StreamerView.render("update.json", item, user)})
|
||||||
|
end
|
||||||
|
else
|
||||||
|
send(transport_pid, {:text, StreamerView.render("update.json", item)})
|
||||||
|
end
|
||||||
|
end)
|
||||||
|
end
|
||||||
|
|
||||||
|
@spec thread_containment(Activity.t(), User.t()) :: boolean()
|
||||||
|
defp thread_containment(_activity, %User{info: %{skip_thread_containment: true}}), do: true
|
||||||
|
|
||||||
|
defp thread_containment(activity, user) do
|
||||||
|
if Config.get([:instance, :skip_thread_containment]) do
|
||||||
|
true
|
||||||
|
else
|
||||||
|
ActivityPub.contain_activity(activity, user)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
|
@ -265,12 +265,7 @@ def follow_import(%{assigns: %{user: follower}} = conn, %{"list" => list}) do
|
||||||
String.split(line, ",") |> List.first()
|
String.split(line, ",") |> List.first()
|
||||||
end)
|
end)
|
||||||
|> List.delete("Account address") do
|
|> List.delete("Account address") do
|
||||||
PleromaJobQueue.enqueue(:background, User, [
|
User.follow_import(follower, followed_identifiers)
|
||||||
:follow_import,
|
|
||||||
follower,
|
|
||||||
followed_identifiers
|
|
||||||
])
|
|
||||||
|
|
||||||
json(conn, "job started")
|
json(conn, "job started")
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
@ -281,12 +276,7 @@ def blocks_import(conn, %{"list" => %Plug.Upload{} = listfile}) do
|
||||||
|
|
||||||
def blocks_import(%{assigns: %{user: blocker}} = conn, %{"list" => list}) do
|
def blocks_import(%{assigns: %{user: blocker}} = conn, %{"list" => list}) do
|
||||||
with blocked_identifiers <- String.split(list) do
|
with blocked_identifiers <- String.split(list) do
|
||||||
PleromaJobQueue.enqueue(:background, User, [
|
User.blocks_import(blocker, blocked_identifiers)
|
||||||
:blocks_import,
|
|
||||||
blocker,
|
|
||||||
blocked_identifiers
|
|
||||||
])
|
|
||||||
|
|
||||||
json(conn, "job started")
|
json(conn, "job started")
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
66
lib/pleroma/web/views/streamer_view.ex
Normal file
66
lib/pleroma/web/views/streamer_view.ex
Normal file
|
@ -0,0 +1,66 @@
|
||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
defmodule Pleroma.Web.StreamerView do
|
||||||
|
use Pleroma.Web, :view
|
||||||
|
|
||||||
|
alias Pleroma.Activity
|
||||||
|
alias Pleroma.Conversation.Participation
|
||||||
|
alias Pleroma.Notification
|
||||||
|
alias Pleroma.User
|
||||||
|
alias Pleroma.Web.MastodonAPI.NotificationView
|
||||||
|
|
||||||
|
def render("update.json", %Activity{} = activity, %User{} = user) do
|
||||||
|
%{
|
||||||
|
event: "update",
|
||||||
|
payload:
|
||||||
|
Pleroma.Web.MastodonAPI.StatusView.render(
|
||||||
|
"status.json",
|
||||||
|
activity: activity,
|
||||||
|
for: user
|
||||||
|
)
|
||||||
|
|> Jason.encode!()
|
||||||
|
}
|
||||||
|
|> Jason.encode!()
|
||||||
|
end
|
||||||
|
|
||||||
|
def render("notification.json", %User{} = user, %Notification{} = notify) do
|
||||||
|
%{
|
||||||
|
event: "notification",
|
||||||
|
payload:
|
||||||
|
NotificationView.render(
|
||||||
|
"show.json",
|
||||||
|
%{notification: notify, for: user}
|
||||||
|
)
|
||||||
|
|> Jason.encode!()
|
||||||
|
}
|
||||||
|
|> Jason.encode!()
|
||||||
|
end
|
||||||
|
|
||||||
|
def render("update.json", %Activity{} = activity) do
|
||||||
|
%{
|
||||||
|
event: "update",
|
||||||
|
payload:
|
||||||
|
Pleroma.Web.MastodonAPI.StatusView.render(
|
||||||
|
"status.json",
|
||||||
|
activity: activity
|
||||||
|
)
|
||||||
|
|> Jason.encode!()
|
||||||
|
}
|
||||||
|
|> Jason.encode!()
|
||||||
|
end
|
||||||
|
|
||||||
|
def render("conversation.json", %Participation{} = participation) do
|
||||||
|
%{
|
||||||
|
event: "conversation",
|
||||||
|
payload:
|
||||||
|
Pleroma.Web.MastodonAPI.ConversationView.render("participation.json", %{
|
||||||
|
participation: participation,
|
||||||
|
for: participation.user
|
||||||
|
})
|
||||||
|
|> Jason.encode!()
|
||||||
|
}
|
||||||
|
|> Jason.encode!()
|
||||||
|
end
|
||||||
|
end
|
18
lib/pleroma/workers/activity_expiration_worker.ex
Normal file
18
lib/pleroma/workers/activity_expiration_worker.ex
Normal file
|
@ -0,0 +1,18 @@
|
||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
defmodule Pleroma.Workers.ActivityExpirationWorker do
|
||||||
|
use Pleroma.Workers.WorkerHelper, queue: "activity_expiration"
|
||||||
|
|
||||||
|
@impl Oban.Worker
|
||||||
|
def perform(
|
||||||
|
%{
|
||||||
|
"op" => "activity_expiration",
|
||||||
|
"activity_expiration_id" => activity_expiration_id
|
||||||
|
},
|
||||||
|
_job
|
||||||
|
) do
|
||||||
|
Pleroma.Daemons.ActivityExpirationDaemon.perform(:execute, activity_expiration_id)
|
||||||
|
end
|
||||||
|
end
|
69
lib/pleroma/workers/background_worker.ex
Normal file
69
lib/pleroma/workers/background_worker.ex
Normal file
|
@ -0,0 +1,69 @@
|
||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
defmodule Pleroma.Workers.BackgroundWorker do
|
||||||
|
alias Pleroma.Activity
|
||||||
|
alias Pleroma.User
|
||||||
|
alias Pleroma.Web.ActivityPub.MRF.MediaProxyWarmingPolicy
|
||||||
|
alias Pleroma.Web.OAuth.Token.CleanWorker
|
||||||
|
|
||||||
|
use Pleroma.Workers.WorkerHelper, queue: "background"
|
||||||
|
|
||||||
|
@impl Oban.Worker
|
||||||
|
def perform(%{"op" => "fetch_initial_posts", "user_id" => user_id}, _job) do
|
||||||
|
user = User.get_cached_by_id(user_id)
|
||||||
|
User.perform(:fetch_initial_posts, user)
|
||||||
|
end
|
||||||
|
|
||||||
|
def perform(%{"op" => "deactivate_user", "user_id" => user_id, "status" => status}, _job) do
|
||||||
|
user = User.get_cached_by_id(user_id)
|
||||||
|
User.perform(:deactivate_async, user, status)
|
||||||
|
end
|
||||||
|
|
||||||
|
def perform(%{"op" => "delete_user", "user_id" => user_id}, _job) do
|
||||||
|
user = User.get_cached_by_id(user_id)
|
||||||
|
User.perform(:delete, user)
|
||||||
|
end
|
||||||
|
|
||||||
|
def perform(
|
||||||
|
%{
|
||||||
|
"op" => "blocks_import",
|
||||||
|
"blocker_id" => blocker_id,
|
||||||
|
"blocked_identifiers" => blocked_identifiers
|
||||||
|
},
|
||||||
|
_job
|
||||||
|
) do
|
||||||
|
blocker = User.get_cached_by_id(blocker_id)
|
||||||
|
User.perform(:blocks_import, blocker, blocked_identifiers)
|
||||||
|
end
|
||||||
|
|
||||||
|
def perform(
|
||||||
|
%{
|
||||||
|
"op" => "follow_import",
|
||||||
|
"follower_id" => follower_id,
|
||||||
|
"followed_identifiers" => followed_identifiers
|
||||||
|
},
|
||||||
|
_job
|
||||||
|
) do
|
||||||
|
follower = User.get_cached_by_id(follower_id)
|
||||||
|
User.perform(:follow_import, follower, followed_identifiers)
|
||||||
|
end
|
||||||
|
|
||||||
|
def perform(%{"op" => "clean_expired_tokens"}, _job) do
|
||||||
|
CleanWorker.perform(:clean)
|
||||||
|
end
|
||||||
|
|
||||||
|
def perform(%{"op" => "media_proxy_preload", "message" => message}, _job) do
|
||||||
|
MediaProxyWarmingPolicy.perform(:preload, message)
|
||||||
|
end
|
||||||
|
|
||||||
|
def perform(%{"op" => "media_proxy_prefetch", "url" => url}, _job) do
|
||||||
|
MediaProxyWarmingPolicy.perform(:prefetch, url)
|
||||||
|
end
|
||||||
|
|
||||||
|
def perform(%{"op" => "fetch_data_for_activity", "activity_id" => activity_id}, _job) do
|
||||||
|
activity = Activity.get_by_id(activity_id)
|
||||||
|
Pleroma.Web.RichMedia.Helpers.perform(:fetch, activity)
|
||||||
|
end
|
||||||
|
end
|
16
lib/pleroma/workers/digest_emails_worker.ex
Normal file
16
lib/pleroma/workers/digest_emails_worker.ex
Normal file
|
@ -0,0 +1,16 @@
|
||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
defmodule Pleroma.Workers.DigestEmailsWorker do
|
||||||
|
alias Pleroma.User
|
||||||
|
|
||||||
|
use Pleroma.Workers.WorkerHelper, queue: "digest_emails"
|
||||||
|
|
||||||
|
@impl Oban.Worker
|
||||||
|
def perform(%{"op" => "digest_email", "user_id" => user_id}, _job) do
|
||||||
|
user_id
|
||||||
|
|> User.get_cached_by_id()
|
||||||
|
|> Pleroma.Daemons.DigestEmailDaemon.perform()
|
||||||
|
end
|
||||||
|
end
|
15
lib/pleroma/workers/mailer_worker.ex
Normal file
15
lib/pleroma/workers/mailer_worker.ex
Normal file
|
@ -0,0 +1,15 @@
|
||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
defmodule Pleroma.Workers.MailerWorker do
|
||||||
|
use Pleroma.Workers.WorkerHelper, queue: "mailer"
|
||||||
|
|
||||||
|
@impl Oban.Worker
|
||||||
|
def perform(%{"op" => "email", "encoded_email" => encoded_email, "config" => config}, _job) do
|
||||||
|
encoded_email
|
||||||
|
|> Base.decode64!()
|
||||||
|
|> :erlang.binary_to_term()
|
||||||
|
|> Pleroma.Emails.Mailer.deliver(config)
|
||||||
|
end
|
||||||
|
end
|
25
lib/pleroma/workers/publisher_worker.ex
Normal file
25
lib/pleroma/workers/publisher_worker.ex
Normal file
|
@ -0,0 +1,25 @@
|
||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
defmodule Pleroma.Workers.PublisherWorker do
|
||||||
|
alias Pleroma.Activity
|
||||||
|
alias Pleroma.Web.Federator
|
||||||
|
|
||||||
|
use Pleroma.Workers.WorkerHelper, queue: "federator_outgoing"
|
||||||
|
|
||||||
|
def backoff(attempt) when is_integer(attempt) do
|
||||||
|
Pleroma.Workers.WorkerHelper.sidekiq_backoff(attempt, 5)
|
||||||
|
end
|
||||||
|
|
||||||
|
@impl Oban.Worker
|
||||||
|
def perform(%{"op" => "publish", "activity_id" => activity_id}, _job) do
|
||||||
|
activity = Activity.get_by_id(activity_id)
|
||||||
|
Federator.perform(:publish, activity)
|
||||||
|
end
|
||||||
|
|
||||||
|
def perform(%{"op" => "publish_one", "module" => module_name, "params" => params}, _job) do
|
||||||
|
params = Map.new(params, fn {k, v} -> {String.to_atom(k), v} end)
|
||||||
|
Federator.perform(:publish_one, String.to_atom(module_name), params)
|
||||||
|
end
|
||||||
|
end
|
18
lib/pleroma/workers/receiver_worker.ex
Normal file
18
lib/pleroma/workers/receiver_worker.ex
Normal file
|
@ -0,0 +1,18 @@
|
||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
defmodule Pleroma.Workers.ReceiverWorker do
|
||||||
|
alias Pleroma.Web.Federator
|
||||||
|
|
||||||
|
use Pleroma.Workers.WorkerHelper, queue: "federator_incoming"
|
||||||
|
|
||||||
|
@impl Oban.Worker
|
||||||
|
def perform(%{"op" => "incoming_doc", "body" => doc}, _job) do
|
||||||
|
Federator.perform(:incoming_doc, doc)
|
||||||
|
end
|
||||||
|
|
||||||
|
def perform(%{"op" => "incoming_ap_doc", "params" => params}, _job) do
|
||||||
|
Federator.perform(:incoming_ap_doc, params)
|
||||||
|
end
|
||||||
|
end
|
12
lib/pleroma/workers/scheduled_activity_worker.ex
Normal file
12
lib/pleroma/workers/scheduled_activity_worker.ex
Normal file
|
@ -0,0 +1,12 @@
|
||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
defmodule Pleroma.Workers.ScheduledActivityWorker do
|
||||||
|
use Pleroma.Workers.WorkerHelper, queue: "scheduled_activities"
|
||||||
|
|
||||||
|
@impl Oban.Worker
|
||||||
|
def perform(%{"op" => "execute", "activity_id" => activity_id}, _job) do
|
||||||
|
Pleroma.Daemons.ScheduledActivityDaemon.perform(:execute, activity_id)
|
||||||
|
end
|
||||||
|
end
|
26
lib/pleroma/workers/subscriber_worker.ex
Normal file
26
lib/pleroma/workers/subscriber_worker.ex
Normal file
|
@ -0,0 +1,26 @@
|
||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
defmodule Pleroma.Workers.SubscriberWorker do
|
||||||
|
alias Pleroma.Repo
|
||||||
|
alias Pleroma.Web.Federator
|
||||||
|
alias Pleroma.Web.Websub
|
||||||
|
|
||||||
|
use Pleroma.Workers.WorkerHelper, queue: "federator_outgoing"
|
||||||
|
|
||||||
|
@impl Oban.Worker
|
||||||
|
def perform(%{"op" => "refresh_subscriptions"}, _job) do
|
||||||
|
Federator.perform(:refresh_subscriptions)
|
||||||
|
end
|
||||||
|
|
||||||
|
def perform(%{"op" => "request_subscription", "websub_id" => websub_id}, _job) do
|
||||||
|
websub = Repo.get(Websub.WebsubClientSubscription, websub_id)
|
||||||
|
Federator.perform(:request_subscription, websub)
|
||||||
|
end
|
||||||
|
|
||||||
|
def perform(%{"op" => "verify_websub", "websub_id" => websub_id}, _job) do
|
||||||
|
websub = Repo.get(Websub.WebsubServerSubscription, websub_id)
|
||||||
|
Federator.perform(:verify_websub, websub)
|
||||||
|
end
|
||||||
|
end
|
15
lib/pleroma/workers/transmogrifier_worker.ex
Normal file
15
lib/pleroma/workers/transmogrifier_worker.ex
Normal file
|
@ -0,0 +1,15 @@
|
||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
defmodule Pleroma.Workers.TransmogrifierWorker do
|
||||||
|
alias Pleroma.User
|
||||||
|
|
||||||
|
use Pleroma.Workers.WorkerHelper, queue: "transmogrifier"
|
||||||
|
|
||||||
|
@impl Oban.Worker
|
||||||
|
def perform(%{"op" => "user_upgrade", "user_id" => user_id}, _job) do
|
||||||
|
user = User.get_cached_by_id(user_id)
|
||||||
|
Pleroma.Web.ActivityPub.Transmogrifier.perform(:user_upgrade, user)
|
||||||
|
end
|
||||||
|
end
|
16
lib/pleroma/workers/web_pusher_worker.ex
Normal file
16
lib/pleroma/workers/web_pusher_worker.ex
Normal file
|
@ -0,0 +1,16 @@
|
||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
defmodule Pleroma.Workers.WebPusherWorker do
|
||||||
|
alias Pleroma.Notification
|
||||||
|
alias Pleroma.Repo
|
||||||
|
|
||||||
|
use Pleroma.Workers.WorkerHelper, queue: "web_push"
|
||||||
|
|
||||||
|
@impl Oban.Worker
|
||||||
|
def perform(%{"op" => "web_push", "notification_id" => notification_id}, _job) do
|
||||||
|
notification = Repo.get(Notification, notification_id)
|
||||||
|
Pleroma.Web.Push.Impl.perform(notification)
|
||||||
|
end
|
||||||
|
end
|
46
lib/pleroma/workers/worker_helper.ex
Normal file
46
lib/pleroma/workers/worker_helper.ex
Normal file
|
@ -0,0 +1,46 @@
|
||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
defmodule Pleroma.Workers.WorkerHelper do
|
||||||
|
alias Pleroma.Config
|
||||||
|
alias Pleroma.Workers.WorkerHelper
|
||||||
|
|
||||||
|
def worker_args(queue) do
|
||||||
|
case Config.get([:workers, :retries, queue]) do
|
||||||
|
nil -> []
|
||||||
|
max_attempts -> [max_attempts: max_attempts]
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def sidekiq_backoff(attempt, pow \\ 4, base_backoff \\ 15) do
|
||||||
|
backoff =
|
||||||
|
:math.pow(attempt, pow) +
|
||||||
|
base_backoff +
|
||||||
|
:rand.uniform(2 * base_backoff) * attempt
|
||||||
|
|
||||||
|
trunc(backoff)
|
||||||
|
end
|
||||||
|
|
||||||
|
defmacro __using__(opts) do
|
||||||
|
caller_module = __CALLER__.module
|
||||||
|
queue = Keyword.fetch!(opts, :queue)
|
||||||
|
|
||||||
|
quote do
|
||||||
|
# Note: `max_attempts` is intended to be overridden in `new/2` call
|
||||||
|
use Oban.Worker,
|
||||||
|
queue: unquote(queue),
|
||||||
|
max_attempts: 1
|
||||||
|
|
||||||
|
def enqueue(op, params, worker_args \\ []) do
|
||||||
|
params = Map.merge(%{"op" => op}, params)
|
||||||
|
queue_atom = String.to_atom(unquote(queue))
|
||||||
|
worker_args = worker_args ++ WorkerHelper.worker_args(queue_atom)
|
||||||
|
|
||||||
|
unquote(caller_module)
|
||||||
|
|> apply(:new, [params, worker_args])
|
||||||
|
|> Pleroma.Repo.insert()
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
9
mix.exs
9
mix.exs
|
@ -101,6 +101,8 @@ defp deps do
|
||||||
{:phoenix_ecto, "~> 4.0"},
|
{:phoenix_ecto, "~> 4.0"},
|
||||||
{:ecto_sql, "~> 3.1"},
|
{:ecto_sql, "~> 3.1"},
|
||||||
{:postgrex, ">= 0.13.5"},
|
{:postgrex, ">= 0.13.5"},
|
||||||
|
{:oban, "~> 0.7"},
|
||||||
|
{:quantum, "~> 2.3"},
|
||||||
{:gettext, "~> 0.15"},
|
{:gettext, "~> 0.15"},
|
||||||
{:comeonin, "~> 4.1.1"},
|
{:comeonin, "~> 4.1.1"},
|
||||||
{:pbkdf2_elixir, "~> 0.12.3"},
|
{:pbkdf2_elixir, "~> 0.12.3"},
|
||||||
|
@ -131,7 +133,7 @@ defp deps do
|
||||||
{:phoenix_swoosh, "~> 0.2"},
|
{:phoenix_swoosh, "~> 0.2"},
|
||||||
{:gen_smtp, "~> 0.13"},
|
{:gen_smtp, "~> 0.13"},
|
||||||
{:websocket_client, git: "https://github.com/jeremyong/websocket_client.git", only: :test},
|
{:websocket_client, git: "https://github.com/jeremyong/websocket_client.git", only: :test},
|
||||||
{:floki, "~> 0.20.0"},
|
{:floki, "~> 0.23.0"},
|
||||||
{:ex_syslogger, github: "slashmili/ex_syslogger", tag: "1.4.0"},
|
{:ex_syslogger, github: "slashmili/ex_syslogger", tag: "1.4.0"},
|
||||||
{:timex, "~> 3.5"},
|
{:timex, "~> 3.5"},
|
||||||
{:ueberauth, "~> 0.4"},
|
{:ueberauth, "~> 0.4"},
|
||||||
|
@ -141,8 +143,8 @@ defp deps do
|
||||||
{:http_signatures,
|
{:http_signatures,
|
||||||
git: "https://git.pleroma.social/pleroma/http_signatures.git",
|
git: "https://git.pleroma.social/pleroma/http_signatures.git",
|
||||||
ref: "293d77bb6f4a67ac8bde1428735c3b42f22cbb30"},
|
ref: "293d77bb6f4a67ac8bde1428735c3b42f22cbb30"},
|
||||||
{:pleroma_job_queue, "~> 0.3"},
|
|
||||||
{:telemetry, "~> 0.3"},
|
{:telemetry, "~> 0.3"},
|
||||||
|
{:poolboy, "~> 1.5"},
|
||||||
{:prometheus_ex, "~> 3.0"},
|
{:prometheus_ex, "~> 3.0"},
|
||||||
{:prometheus_plugs, "~> 1.1"},
|
{:prometheus_plugs, "~> 1.1"},
|
||||||
{:prometheus_phoenix, "~> 1.3"},
|
{:prometheus_phoenix, "~> 1.3"},
|
||||||
|
@ -172,8 +174,7 @@ defp aliases do
|
||||||
"ecto.rollback": ["pleroma.ecto.rollback"],
|
"ecto.rollback": ["pleroma.ecto.rollback"],
|
||||||
"ecto.setup": ["ecto.create", "ecto.migrate", "run priv/repo/seeds.exs"],
|
"ecto.setup": ["ecto.create", "ecto.migrate", "run priv/repo/seeds.exs"],
|
||||||
"ecto.reset": ["ecto.drop", "ecto.setup"],
|
"ecto.reset": ["ecto.drop", "ecto.setup"],
|
||||||
test: ["ecto.create --quiet", "ecto.migrate", "test"],
|
test: ["ecto.create --quiet", "ecto.migrate", "test"]
|
||||||
docs: ["pleroma.docs", "docs"]
|
|
||||||
]
|
]
|
||||||
end
|
end
|
||||||
|
|
||||||
|
|
12
mix.lock
12
mix.lock
|
@ -17,7 +17,7 @@
|
||||||
"credo": {:hex, :credo, "0.9.3", "76fa3e9e497ab282e0cf64b98a624aa11da702854c52c82db1bf24e54ab7c97a", [:mix], [{:bunt, "~> 0.2.0", [hex: :bunt, repo: "hexpm", optional: false]}, {:poison, ">= 0.0.0", [hex: :poison, repo: "hexpm", optional: false]}], "hexpm"},
|
"credo": {:hex, :credo, "0.9.3", "76fa3e9e497ab282e0cf64b98a624aa11da702854c52c82db1bf24e54ab7c97a", [:mix], [{:bunt, "~> 0.2.0", [hex: :bunt, repo: "hexpm", optional: false]}, {:poison, ">= 0.0.0", [hex: :poison, repo: "hexpm", optional: false]}], "hexpm"},
|
||||||
"crontab": {:hex, :crontab, "1.1.7", "b9219f0bdc8678b94143655a8f229716c5810c0636a4489f98c0956137e53985", [:mix], [{:ecto, "~> 1.0 or ~> 2.0 or ~> 3.0", [hex: :ecto, repo: "hexpm", optional: true]}], "hexpm"},
|
"crontab": {:hex, :crontab, "1.1.7", "b9219f0bdc8678b94143655a8f229716c5810c0636a4489f98c0956137e53985", [:mix], [{:ecto, "~> 1.0 or ~> 2.0 or ~> 3.0", [hex: :ecto, repo: "hexpm", optional: true]}], "hexpm"},
|
||||||
"crypt": {:git, "https://github.com/msantos/crypt", "1f2b58927ab57e72910191a7ebaeff984382a1d3", [ref: "1f2b58927ab57e72910191a7ebaeff984382a1d3"]},
|
"crypt": {:git, "https://github.com/msantos/crypt", "1f2b58927ab57e72910191a7ebaeff984382a1d3", [ref: "1f2b58927ab57e72910191a7ebaeff984382a1d3"]},
|
||||||
"db_connection": {:hex, :db_connection, "2.0.6", "bde2f85d047969c5b5800cb8f4b3ed6316c8cb11487afedac4aa5f93fd39abfa", [:mix], [{:connection, "~> 1.0.2", [hex: :connection, repo: "hexpm", optional: false]}], "hexpm"},
|
"db_connection": {:hex, :db_connection, "2.1.1", "a51e8a2ee54ef2ae6ec41a668c85787ed40cb8944928c191280fe34c15b76ae5", [:mix], [{:connection, "~> 1.0.2", [hex: :connection, repo: "hexpm", optional: false]}], "hexpm"},
|
||||||
"decimal": {:hex, :decimal, "1.8.0", "ca462e0d885f09a1c5a342dbd7c1dcf27ea63548c65a65e67334f4b61803822e", [:mix], [], "hexpm"},
|
"decimal": {:hex, :decimal, "1.8.0", "ca462e0d885f09a1c5a342dbd7c1dcf27ea63548c65a65e67334f4b61803822e", [:mix], [], "hexpm"},
|
||||||
"deep_merge": {:hex, :deep_merge, "1.0.0", "b4aa1a0d1acac393bdf38b2291af38cb1d4a52806cf7a4906f718e1feb5ee961", [:mix], [], "hexpm"},
|
"deep_merge": {:hex, :deep_merge, "1.0.0", "b4aa1a0d1acac393bdf38b2291af38cb1d4a52806cf7a4906f718e1feb5ee961", [:mix], [], "hexpm"},
|
||||||
"earmark": {:hex, :earmark, "1.3.6", "ce1d0675e10a5bb46b007549362bd3f5f08908843957687d8484fe7f37466b19", [:mix], [], "hexpm"},
|
"earmark": {:hex, :earmark, "1.3.6", "ce1d0675e10a5bb46b007549362bd3f5f08908843957687d8484fe7f37466b19", [:mix], [], "hexpm"},
|
||||||
|
@ -34,8 +34,10 @@
|
||||||
"ex_rated": {:hex, :ex_rated, "1.3.3", "30ecbdabe91f7eaa9d37fa4e81c85ba420f371babeb9d1910adbcd79ec798d27", [:mix], [{:ex2ms, "~> 1.5", [hex: :ex2ms, repo: "hexpm", optional: false]}], "hexpm"},
|
"ex_rated": {:hex, :ex_rated, "1.3.3", "30ecbdabe91f7eaa9d37fa4e81c85ba420f371babeb9d1910adbcd79ec798d27", [:mix], [{:ex2ms, "~> 1.5", [hex: :ex2ms, repo: "hexpm", optional: false]}], "hexpm"},
|
||||||
"ex_syslogger": {:git, "https://github.com/slashmili/ex_syslogger.git", "f3963399047af17e038897c69e20d552e6899e1d", [tag: "1.4.0"]},
|
"ex_syslogger": {:git, "https://github.com/slashmili/ex_syslogger.git", "f3963399047af17e038897c69e20d552e6899e1d", [tag: "1.4.0"]},
|
||||||
"excoveralls": {:hex, :excoveralls, "0.11.1", "dd677fbdd49114fdbdbf445540ec735808250d56b011077798316505064edb2c", [:mix], [{:hackney, "~> 1.0", [hex: :hackney, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}], "hexpm"},
|
"excoveralls": {:hex, :excoveralls, "0.11.1", "dd677fbdd49114fdbdbf445540ec735808250d56b011077798316505064edb2c", [:mix], [{:hackney, "~> 1.0", [hex: :hackney, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}], "hexpm"},
|
||||||
"floki": {:hex, :floki, "0.20.4", "be42ac911fece24b4c72f3b5846774b6e61b83fe685c2fc9d62093277fb3bc86", [:mix], [{:html_entities, "~> 0.4.0", [hex: :html_entities, repo: "hexpm", optional: false]}, {:mochiweb, "~> 2.15", [hex: :mochiweb, repo: "hexpm", optional: false]}], "hexpm"},
|
"floki": {:hex, :floki, "0.23.0", "956ab6dba828c96e732454809fb0bd8d43ce0979b75f34de6322e73d4c917829", [:mix], [{:html_entities, "~> 0.4.0", [hex: :html_entities, repo: "hexpm", optional: false]}], "hexpm"},
|
||||||
"gen_smtp": {:hex, :gen_smtp, "0.14.0", "39846a03522456077c6429b4badfd1d55e5e7d0fdfb65e935b7c5e38549d9202", [:rebar3], [], "hexpm"},
|
"gen_smtp": {:hex, :gen_smtp, "0.14.0", "39846a03522456077c6429b4badfd1d55e5e7d0fdfb65e935b7c5e38549d9202", [:rebar3], [], "hexpm"},
|
||||||
|
"gen_stage": {:hex, :gen_stage, "0.14.2", "6a2a578a510c5bfca8a45e6b27552f613b41cf584b58210f017088d3d17d0b14", [:mix], [], "hexpm"},
|
||||||
|
"gen_state_machine": {:hex, :gen_state_machine, "2.0.5", "9ac15ec6e66acac994cc442dcc2c6f9796cf380ec4b08267223014be1c728a95", [:mix], [], "hexpm"},
|
||||||
"gettext": {:hex, :gettext, "0.17.0", "abe21542c831887a2b16f4c94556db9c421ab301aee417b7c4fbde7fbdbe01ec", [:mix], [], "hexpm"},
|
"gettext": {:hex, :gettext, "0.17.0", "abe21542c831887a2b16f4c94556db9c421ab301aee417b7c4fbde7fbdbe01ec", [:mix], [], "hexpm"},
|
||||||
"hackney": {:hex, :hackney, "1.15.1", "9f8f471c844b8ce395f7b6d8398139e26ddca9ebc171a8b91342ee15a19963f4", [:rebar3], [{:certifi, "2.5.1", [hex: :certifi, repo: "hexpm", optional: false]}, {:idna, "6.0.0", [hex: :idna, repo: "hexpm", optional: false]}, {:metrics, "1.0.1", [hex: :metrics, repo: "hexpm", optional: false]}, {:mimerl, "~>1.1", [hex: :mimerl, repo: "hexpm", optional: false]}, {:ssl_verify_fun, "1.1.4", [hex: :ssl_verify_fun, repo: "hexpm", optional: false]}], "hexpm"},
|
"hackney": {:hex, :hackney, "1.15.1", "9f8f471c844b8ce395f7b6d8398139e26ddca9ebc171a8b91342ee15a19963f4", [:rebar3], [{:certifi, "2.5.1", [hex: :certifi, repo: "hexpm", optional: false]}, {:idna, "6.0.0", [hex: :idna, repo: "hexpm", optional: false]}, {:metrics, "1.0.1", [hex: :metrics, repo: "hexpm", optional: false]}, {:mimerl, "~>1.1", [hex: :mimerl, repo: "hexpm", optional: false]}, {:ssl_verify_fun, "1.1.4", [hex: :ssl_verify_fun, repo: "hexpm", optional: false]}], "hexpm"},
|
||||||
"html_entities": {:hex, :html_entities, "0.4.0", "f2fee876858cf6aaa9db608820a3209e45a087c5177332799592142b50e89a6b", [:mix], [], "hexpm"},
|
"html_entities": {:hex, :html_entities, "0.4.0", "f2fee876858cf6aaa9db608820a3209e45a087c5177332799592142b50e89a6b", [:mix], [], "hexpm"},
|
||||||
|
@ -46,6 +48,7 @@
|
||||||
"jason": {:hex, :jason, "1.1.2", "b03dedea67a99223a2eaf9f1264ce37154564de899fd3d8b9a21b1a6fd64afe7", [:mix], [{:decimal, "~> 1.0", [hex: :decimal, repo: "hexpm", optional: true]}], "hexpm"},
|
"jason": {:hex, :jason, "1.1.2", "b03dedea67a99223a2eaf9f1264ce37154564de899fd3d8b9a21b1a6fd64afe7", [:mix], [{:decimal, "~> 1.0", [hex: :decimal, repo: "hexpm", optional: true]}], "hexpm"},
|
||||||
"joken": {:hex, :joken, "2.0.1", "ec9ab31bf660f343380da033b3316855197c8d4c6ef597fa3fcb451b326beb14", [:mix], [{:jose, "~> 1.9", [hex: :jose, repo: "hexpm", optional: false]}], "hexpm"},
|
"joken": {:hex, :joken, "2.0.1", "ec9ab31bf660f343380da033b3316855197c8d4c6ef597fa3fcb451b326beb14", [:mix], [{:jose, "~> 1.9", [hex: :jose, repo: "hexpm", optional: false]}], "hexpm"},
|
||||||
"jose": {:hex, :jose, "1.9.0", "4167c5f6d06ffaebffd15cdb8da61a108445ef5e85ab8f5a7ad926fdf3ada154", [:mix, :rebar3], [{:base64url, "~> 0.0.1", [hex: :base64url, repo: "hexpm", optional: false]}], "hexpm"},
|
"jose": {:hex, :jose, "1.9.0", "4167c5f6d06ffaebffd15cdb8da61a108445ef5e85ab8f5a7ad926fdf3ada154", [:mix, :rebar3], [{:base64url, "~> 0.0.1", [hex: :base64url, repo: "hexpm", optional: false]}], "hexpm"},
|
||||||
|
"libring": {:hex, :libring, "1.4.0", "41246ba2f3fbc76b3971f6bce83119dfec1eee17e977a48d8a9cfaaf58c2a8d6", [:mix], [], "hexpm"},
|
||||||
"makeup": {:hex, :makeup, "1.0.0", "671df94cf5a594b739ce03b0d0316aa64312cee2574b6a44becb83cd90fb05dc", [:mix], [{:nimble_parsec, "~> 0.5.0", [hex: :nimble_parsec, repo: "hexpm", optional: false]}], "hexpm"},
|
"makeup": {:hex, :makeup, "1.0.0", "671df94cf5a594b739ce03b0d0316aa64312cee2574b6a44becb83cd90fb05dc", [:mix], [{:nimble_parsec, "~> 0.5.0", [hex: :nimble_parsec, repo: "hexpm", optional: false]}], "hexpm"},
|
||||||
"makeup_elixir": {:hex, :makeup_elixir, "0.14.0", "cf8b7c66ad1cff4c14679698d532f0b5d45a3968ffbcbfd590339cb57742f1ae", [:mix], [{:makeup, "~> 1.0", [hex: :makeup, repo: "hexpm", optional: false]}], "hexpm"},
|
"makeup_elixir": {:hex, :makeup_elixir, "0.14.0", "cf8b7c66ad1cff4c14679698d532f0b5d45a3968ffbcbfd590339cb57742f1ae", [:mix], [{:makeup, "~> 1.0", [hex: :makeup, repo: "hexpm", optional: false]}], "hexpm"},
|
||||||
"meck": {:hex, :meck, "0.8.13", "ffedb39f99b0b99703b8601c6f17c7f76313ee12de6b646e671e3188401f7866", [:rebar3], [], "hexpm"},
|
"meck": {:hex, :meck, "0.8.13", "ffedb39f99b0b99703b8601c6f17c7f76313ee12de6b646e671e3188401f7866", [:rebar3], [], "hexpm"},
|
||||||
|
@ -57,6 +60,7 @@
|
||||||
"mogrify": {:hex, :mogrify, "0.6.1", "de1b527514f2d95a7bbe9642eb556061afb337e220cf97adbf3a4e6438ed70af", [:mix], [], "hexpm"},
|
"mogrify": {:hex, :mogrify, "0.6.1", "de1b527514f2d95a7bbe9642eb556061afb337e220cf97adbf3a4e6438ed70af", [:mix], [], "hexpm"},
|
||||||
"mox": {:hex, :mox, "0.5.1", "f86bb36026aac1e6f924a4b6d024b05e9adbed5c63e8daa069bd66fb3292165b", [:mix], [], "hexpm"},
|
"mox": {:hex, :mox, "0.5.1", "f86bb36026aac1e6f924a4b6d024b05e9adbed5c63e8daa069bd66fb3292165b", [:mix], [], "hexpm"},
|
||||||
"nimble_parsec": {:hex, :nimble_parsec, "0.5.1", "c90796ecee0289dbb5ad16d3ad06f957b0cd1199769641c961cfe0b97db190e0", [:mix], [], "hexpm"},
|
"nimble_parsec": {:hex, :nimble_parsec, "0.5.1", "c90796ecee0289dbb5ad16d3ad06f957b0cd1199769641c961cfe0b97db190e0", [:mix], [], "hexpm"},
|
||||||
|
"oban": {:hex, :oban, "0.7.1", "171bdd1b69c1a4a839f8c768f5e962fc22d1de1513d459fb6b8e0cbd34817a9a", [:mix], [{:ecto_sql, "~> 3.1", [hex: :ecto_sql, repo: "hexpm", optional: false]}, {:jason, "~> 1.1", [hex: :jason, repo: "hexpm", optional: false]}, {:postgrex, "~> 0.14", [hex: :postgrex, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm"},
|
||||||
"parse_trans": {:hex, :parse_trans, "3.3.0", "09765507a3c7590a784615cfd421d101aec25098d50b89d7aa1d66646bc571c1", [:rebar3], [], "hexpm"},
|
"parse_trans": {:hex, :parse_trans, "3.3.0", "09765507a3c7590a784615cfd421d101aec25098d50b89d7aa1d66646bc571c1", [:rebar3], [], "hexpm"},
|
||||||
"pbkdf2_elixir": {:hex, :pbkdf2_elixir, "0.12.3", "6706a148809a29c306062862c803406e88f048277f6e85b68faf73291e820b84", [:mix], [], "hexpm"},
|
"pbkdf2_elixir": {:hex, :pbkdf2_elixir, "0.12.3", "6706a148809a29c306062862c803406e88f048277f6e85b68faf73291e820b84", [:mix], [], "hexpm"},
|
||||||
"phoenix": {:hex, :phoenix, "1.4.9", "746d098e10741c334d88143d3c94cab1756435f94387a63441792e66ec0ee974", [:mix], [{:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:phoenix_pubsub, "~> 1.1", [hex: :phoenix_pubsub, repo: "hexpm", optional: false]}, {:plug, "~> 1.8.1 or ~> 1.9", [hex: :plug, repo: "hexpm", optional: false]}, {:plug_cowboy, "~> 1.0 or ~> 2.0", [hex: :plug_cowboy, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm"},
|
"phoenix": {:hex, :phoenix, "1.4.9", "746d098e10741c334d88143d3c94cab1756435f94387a63441792e66ec0ee974", [:mix], [{:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:phoenix_pubsub, "~> 1.1", [hex: :phoenix_pubsub, repo: "hexpm", optional: false]}, {:plug, "~> 1.8.1 or ~> 1.9", [hex: :plug, repo: "hexpm", optional: false]}, {:plug_cowboy, "~> 1.0 or ~> 2.0", [hex: :plug_cowboy, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm"},
|
||||||
|
@ -64,12 +68,12 @@
|
||||||
"phoenix_html": {:hex, :phoenix_html, "2.13.1", "fa8f034b5328e2dfa0e4131b5569379003f34bc1fafdaa84985b0b9d2f12e68b", [:mix], [{:plug, "~> 1.5", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm"},
|
"phoenix_html": {:hex, :phoenix_html, "2.13.1", "fa8f034b5328e2dfa0e4131b5569379003f34bc1fafdaa84985b0b9d2f12e68b", [:mix], [{:plug, "~> 1.5", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm"},
|
||||||
"phoenix_pubsub": {:hex, :phoenix_pubsub, "1.1.2", "496c303bdf1b2e98a9d26e89af5bba3ab487ba3a3735f74bf1f4064d2a845a3e", [:mix], [], "hexpm"},
|
"phoenix_pubsub": {:hex, :phoenix_pubsub, "1.1.2", "496c303bdf1b2e98a9d26e89af5bba3ab487ba3a3735f74bf1f4064d2a845a3e", [:mix], [], "hexpm"},
|
||||||
"phoenix_swoosh": {:hex, :phoenix_swoosh, "0.2.0", "a7e0b32077cd6d2323ae15198839b05d9caddfa20663fd85787479e81f89520e", [:mix], [{:phoenix, "~> 1.0", [hex: :phoenix, repo: "hexpm", optional: false]}, {:phoenix_html, "~> 2.2", [hex: :phoenix_html, repo: "hexpm", optional: false]}, {:swoosh, "~> 0.1", [hex: :swoosh, repo: "hexpm", optional: false]}], "hexpm"},
|
"phoenix_swoosh": {:hex, :phoenix_swoosh, "0.2.0", "a7e0b32077cd6d2323ae15198839b05d9caddfa20663fd85787479e81f89520e", [:mix], [{:phoenix, "~> 1.0", [hex: :phoenix, repo: "hexpm", optional: false]}, {:phoenix_html, "~> 2.2", [hex: :phoenix_html, repo: "hexpm", optional: false]}, {:swoosh, "~> 0.1", [hex: :swoosh, repo: "hexpm", optional: false]}], "hexpm"},
|
||||||
"pleroma_job_queue": {:hex, :pleroma_job_queue, "0.3.0", "b84538d621f0c3d6fcc1cff9d5648d3faaf873b8b21b94e6503428a07a48ec47", [:mix], [{:crontab, "~> 1.1", [hex: :crontab, repo: "hexpm", optional: false]}], "hexpm"},
|
|
||||||
"plug": {:hex, :plug, "1.8.2", "0bcce1daa420f189a6491f3940cc77ea7fb1919761175c9c3b59800d897440fc", [:mix], [{:mime, "~> 1.0", [hex: :mime, repo: "hexpm", optional: false]}, {:plug_crypto, "~> 1.0", [hex: :plug_crypto, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4", [hex: :telemetry, repo: "hexpm", optional: true]}], "hexpm"},
|
"plug": {:hex, :plug, "1.8.2", "0bcce1daa420f189a6491f3940cc77ea7fb1919761175c9c3b59800d897440fc", [:mix], [{:mime, "~> 1.0", [hex: :mime, repo: "hexpm", optional: false]}, {:plug_crypto, "~> 1.0", [hex: :plug_crypto, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4", [hex: :telemetry, repo: "hexpm", optional: true]}], "hexpm"},
|
||||||
"plug_cowboy": {:hex, :plug_cowboy, "2.1.0", "b75768153c3a8a9e8039d4b25bb9b14efbc58e9c4a6e6a270abff1cd30cbe320", [:mix], [{:cowboy, "~> 2.5", [hex: :cowboy, repo: "hexpm", optional: false]}, {:plug, "~> 1.7", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm"},
|
"plug_cowboy": {:hex, :plug_cowboy, "2.1.0", "b75768153c3a8a9e8039d4b25bb9b14efbc58e9c4a6e6a270abff1cd30cbe320", [:mix], [{:cowboy, "~> 2.5", [hex: :cowboy, repo: "hexpm", optional: false]}, {:plug, "~> 1.7", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm"},
|
||||||
"plug_crypto": {:hex, :plug_crypto, "1.0.0", "18e49317d3fa343f24620ed22795ec29d4a5e602d52d1513ccea0b07d8ea7d4d", [:mix], [], "hexpm"},
|
"plug_crypto": {:hex, :plug_crypto, "1.0.0", "18e49317d3fa343f24620ed22795ec29d4a5e602d52d1513ccea0b07d8ea7d4d", [:mix], [], "hexpm"},
|
||||||
"plug_static_index_html": {:hex, :plug_static_index_html, "1.0.0", "840123d4d3975585133485ea86af73cb2600afd7f2a976f9f5fd8b3808e636a0", [:mix], [{:plug, "~> 1.0", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm"},
|
"plug_static_index_html": {:hex, :plug_static_index_html, "1.0.0", "840123d4d3975585133485ea86af73cb2600afd7f2a976f9f5fd8b3808e636a0", [:mix], [{:plug, "~> 1.0", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm"},
|
||||||
"poison": {:hex, :poison, "3.1.0", "d9eb636610e096f86f25d9a46f35a9facac35609a7591b3be3326e99a0484665", [:mix], [], "hexpm"},
|
"poison": {:hex, :poison, "3.1.0", "d9eb636610e096f86f25d9a46f35a9facac35609a7591b3be3326e99a0484665", [:mix], [], "hexpm"},
|
||||||
|
"poolboy": {:hex, :poolboy, "1.5.2", "392b007a1693a64540cead79830443abf5762f5d30cf50bc95cb2c1aaafa006b", [:rebar3], [], "hexpm"},
|
||||||
"postgrex": {:hex, :postgrex, "0.14.3", "5754dee2fdf6e9e508cbf49ab138df964278700b764177e8f3871e658b345a1e", [:mix], [{:connection, "~> 1.0", [hex: :connection, repo: "hexpm", optional: false]}, {:db_connection, "~> 2.0", [hex: :db_connection, repo: "hexpm", optional: false]}, {:decimal, "~> 1.5", [hex: :decimal, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}], "hexpm"},
|
"postgrex": {:hex, :postgrex, "0.14.3", "5754dee2fdf6e9e508cbf49ab138df964278700b764177e8f3871e658b345a1e", [:mix], [{:connection, "~> 1.0", [hex: :connection, repo: "hexpm", optional: false]}, {:db_connection, "~> 2.0", [hex: :db_connection, repo: "hexpm", optional: false]}, {:decimal, "~> 1.5", [hex: :decimal, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}], "hexpm"},
|
||||||
"prometheus": {:hex, :prometheus, "4.4.1", "1e96073b3ed7788053768fea779cbc896ddc3bdd9ba60687f2ad50b252ac87d6", [:mix, :rebar3], [], "hexpm"},
|
"prometheus": {:hex, :prometheus, "4.4.1", "1e96073b3ed7788053768fea779cbc896ddc3bdd9ba60687f2ad50b252ac87d6", [:mix, :rebar3], [], "hexpm"},
|
||||||
"prometheus_ecto": {:hex, :prometheus_ecto, "1.4.1", "6c768ea9654de871e5b32fab2eac348467b3021604ebebbcbd8bcbe806a65ed5", [:mix], [{:ecto, "~> 2.0 or ~> 3.0", [hex: :ecto, repo: "hexpm", optional: false]}, {:prometheus_ex, "~> 1.1 or ~> 2.0 or ~> 3.0", [hex: :prometheus_ex, repo: "hexpm", optional: false]}], "hexpm"},
|
"prometheus_ecto": {:hex, :prometheus_ecto, "1.4.1", "6c768ea9654de871e5b32fab2eac348467b3021604ebebbcbd8bcbe806a65ed5", [:mix], [{:ecto, "~> 2.0 or ~> 3.0", [hex: :ecto, repo: "hexpm", optional: false]}, {:prometheus_ex, "~> 1.1 or ~> 2.0 or ~> 3.0", [hex: :prometheus_ex, repo: "hexpm", optional: false]}], "hexpm"},
|
||||||
|
@ -77,9 +81,11 @@
|
||||||
"prometheus_phoenix": {:hex, :prometheus_phoenix, "1.3.0", "c4b527e0b3a9ef1af26bdcfbfad3998f37795b9185d475ca610fe4388fdd3bb5", [:mix], [{:phoenix, "~> 1.4", [hex: :phoenix, repo: "hexpm", optional: false]}, {:prometheus_ex, "~> 1.3 or ~> 2.0 or ~> 3.0", [hex: :prometheus_ex, repo: "hexpm", optional: false]}], "hexpm"},
|
"prometheus_phoenix": {:hex, :prometheus_phoenix, "1.3.0", "c4b527e0b3a9ef1af26bdcfbfad3998f37795b9185d475ca610fe4388fdd3bb5", [:mix], [{:phoenix, "~> 1.4", [hex: :phoenix, repo: "hexpm", optional: false]}, {:prometheus_ex, "~> 1.3 or ~> 2.0 or ~> 3.0", [hex: :prometheus_ex, repo: "hexpm", optional: false]}], "hexpm"},
|
||||||
"prometheus_plugs": {:hex, :prometheus_plugs, "1.1.5", "25933d48f8af3a5941dd7b621c889749894d8a1082a6ff7c67cc99dec26377c5", [:mix], [{:accept, "~> 0.1", [hex: :accept, repo: "hexpm", optional: false]}, {:plug, "~> 1.0", [hex: :plug, repo: "hexpm", optional: false]}, {:prometheus_ex, "~> 1.1 or ~> 2.0 or ~> 3.0", [hex: :prometheus_ex, repo: "hexpm", optional: false]}, {:prometheus_process_collector, "~> 1.1", [hex: :prometheus_process_collector, repo: "hexpm", optional: true]}], "hexpm"},
|
"prometheus_plugs": {:hex, :prometheus_plugs, "1.1.5", "25933d48f8af3a5941dd7b621c889749894d8a1082a6ff7c67cc99dec26377c5", [:mix], [{:accept, "~> 0.1", [hex: :accept, repo: "hexpm", optional: false]}, {:plug, "~> 1.0", [hex: :plug, repo: "hexpm", optional: false]}, {:prometheus_ex, "~> 1.1 or ~> 2.0 or ~> 3.0", [hex: :prometheus_ex, repo: "hexpm", optional: false]}, {:prometheus_process_collector, "~> 1.1", [hex: :prometheus_process_collector, repo: "hexpm", optional: true]}], "hexpm"},
|
||||||
"quack": {:hex, :quack, "0.1.1", "cca7b4da1a233757fdb44b3334fce80c94785b3ad5a602053b7a002b5a8967bf", [:mix], [{:poison, ">= 1.0.0", [hex: :poison, repo: "hexpm", optional: false]}, {:tesla, "~> 1.2.0", [hex: :tesla, repo: "hexpm", optional: false]}], "hexpm"},
|
"quack": {:hex, :quack, "0.1.1", "cca7b4da1a233757fdb44b3334fce80c94785b3ad5a602053b7a002b5a8967bf", [:mix], [{:poison, ">= 1.0.0", [hex: :poison, repo: "hexpm", optional: false]}, {:tesla, "~> 1.2.0", [hex: :tesla, repo: "hexpm", optional: false]}], "hexpm"},
|
||||||
|
"quantum": {:hex, :quantum, "2.3.4", "72a0e8855e2adc101459eac8454787cb74ab4169de6ca50f670e72142d4960e9", [:mix], [{:calendar, "~> 0.17", [hex: :calendar, repo: "hexpm", optional: true]}, {:crontab, "~> 1.1", [hex: :crontab, repo: "hexpm", optional: false]}, {:gen_stage, "~> 0.12", [hex: :gen_stage, repo: "hexpm", optional: false]}, {:swarm, "~> 3.3", [hex: :swarm, repo: "hexpm", optional: false]}, {:timex, "~> 3.1", [hex: :timex, repo: "hexpm", optional: true]}], "hexpm"},
|
||||||
"ranch": {:hex, :ranch, "1.7.1", "6b1fab51b49196860b733a49c07604465a47bdb78aa10c1c16a3d199f7f8c881", [:rebar3], [], "hexpm"},
|
"ranch": {:hex, :ranch, "1.7.1", "6b1fab51b49196860b733a49c07604465a47bdb78aa10c1c16a3d199f7f8c881", [:rebar3], [], "hexpm"},
|
||||||
"recon": {:git, "https://github.com/ferd/recon.git", "75d70c7c08926d2f24f1ee6de14ee50fe8a52763", [tag: "2.4.0"]},
|
"recon": {:git, "https://github.com/ferd/recon.git", "75d70c7c08926d2f24f1ee6de14ee50fe8a52763", [tag: "2.4.0"]},
|
||||||
"ssl_verify_fun": {:hex, :ssl_verify_fun, "1.1.4", "f0eafff810d2041e93f915ef59899c923f4568f4585904d010387ed74988e77b", [:make, :mix, :rebar3], [], "hexpm"},
|
"ssl_verify_fun": {:hex, :ssl_verify_fun, "1.1.4", "f0eafff810d2041e93f915ef59899c923f4568f4585904d010387ed74988e77b", [:make, :mix, :rebar3], [], "hexpm"},
|
||||||
|
"swarm": {:hex, :swarm, "3.4.0", "64f8b30055d74640d2186c66354b33b999438692a91be275bb89cdc7e401f448", [:mix], [{:gen_state_machine, "~> 2.0", [hex: :gen_state_machine, repo: "hexpm", optional: false]}, {:libring, "~> 1.0", [hex: :libring, repo: "hexpm", optional: false]}], "hexpm"},
|
||||||
"sweet_xml": {:hex, :sweet_xml, "0.6.6", "fc3e91ec5dd7c787b6195757fbcf0abc670cee1e4172687b45183032221b66b8", [:mix], [], "hexpm"},
|
"sweet_xml": {:hex, :sweet_xml, "0.6.6", "fc3e91ec5dd7c787b6195757fbcf0abc670cee1e4172687b45183032221b66b8", [:mix], [], "hexpm"},
|
||||||
"swoosh": {:hex, :swoosh, "0.23.2", "7dda95ff0bf54a2298328d6899c74dae1223777b43563ccebebb4b5d2b61df38", [:mix], [{:cowboy, "~> 1.0.1 or ~> 1.1 or ~> 2.4", [hex: :cowboy, repo: "hexpm", optional: true]}, {:gen_smtp, "~> 0.13", [hex: :gen_smtp, repo: "hexpm", optional: true]}, {:hackney, "~> 1.9", [hex: :hackney, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}, {:mail, "~> 0.2", [hex: :mail, repo: "hexpm", optional: true]}, {:mime, "~> 1.1", [hex: :mime, repo: "hexpm", optional: false]}, {:plug_cowboy, ">= 1.0.0", [hex: :plug_cowboy, repo: "hexpm", optional: true]}], "hexpm"},
|
"swoosh": {:hex, :swoosh, "0.23.2", "7dda95ff0bf54a2298328d6899c74dae1223777b43563ccebebb4b5d2b61df38", [:mix], [{:cowboy, "~> 1.0.1 or ~> 1.1 or ~> 2.4", [hex: :cowboy, repo: "hexpm", optional: true]}, {:gen_smtp, "~> 0.13", [hex: :gen_smtp, repo: "hexpm", optional: true]}, {:hackney, "~> 1.9", [hex: :hackney, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}, {:mail, "~> 0.2", [hex: :mail, repo: "hexpm", optional: true]}, {:mime, "~> 1.1", [hex: :mime, repo: "hexpm", optional: false]}, {:plug_cowboy, ">= 1.0.0", [hex: :plug_cowboy, repo: "hexpm", optional: true]}], "hexpm"},
|
||||||
"syslog": {:git, "https://github.com/Vagabond/erlang-syslog.git", "4a6c6f2c996483e86c1320e9553f91d337bcb6aa", [tag: "1.0.5"]},
|
"syslog": {:git, "https://github.com/Vagabond/erlang-syslog.git", "4a6c6f2c996483e86c1320e9553f91d337bcb6aa", [tag: "1.0.5"]},
|
||||||
|
|
|
@ -0,0 +1,6 @@
|
||||||
|
defmodule Pleroma.Repo.Migrations.AddObanJobsTable do
|
||||||
|
use Ecto.Migration
|
||||||
|
|
||||||
|
defdelegate up, to: Oban.Migrations
|
||||||
|
defdelegate down, to: Oban.Migrations
|
||||||
|
end
|
12
priv/repo/migrations/20190912065617_create_deliveries.exs
Normal file
12
priv/repo/migrations/20190912065617_create_deliveries.exs
Normal file
|
@ -0,0 +1,12 @@
|
||||||
|
defmodule Pleroma.Repo.Migrations.CreateDeliveries do
|
||||||
|
use Ecto.Migration
|
||||||
|
|
||||||
|
def change do
|
||||||
|
create_if_not_exists table(:deliveries) do
|
||||||
|
add(:object_id, references(:objects, type: :id), null: false)
|
||||||
|
add(:user_id, references(:users, type: :uuid, on_delete: :delete_all), null: false)
|
||||||
|
end
|
||||||
|
create_if_not_exists index(:deliveries, :object_id, name: :deliveries_object_id)
|
||||||
|
create_if_not_exists(unique_index(:deliveries, [:user_id, :object_id]))
|
||||||
|
end
|
||||||
|
end
|
141
test/activity/ir/topics_test.exs
Normal file
141
test/activity/ir/topics_test.exs
Normal file
|
@ -0,0 +1,141 @@
|
||||||
|
defmodule Pleroma.Activity.Ir.TopicsTest do
|
||||||
|
use Pleroma.DataCase
|
||||||
|
|
||||||
|
alias Pleroma.Activity
|
||||||
|
alias Pleroma.Activity.Ir.Topics
|
||||||
|
alias Pleroma.Object
|
||||||
|
|
||||||
|
require Pleroma.Constants
|
||||||
|
|
||||||
|
describe "poll answer" do
|
||||||
|
test "produce no topics" do
|
||||||
|
activity = %Activity{object: %Object{data: %{"type" => "Answer"}}}
|
||||||
|
|
||||||
|
assert [] == Topics.get_activity_topics(activity)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
describe "non poll answer" do
|
||||||
|
test "always add user and list topics" do
|
||||||
|
activity = %Activity{object: %Object{data: %{"type" => "FooBar"}}}
|
||||||
|
topics = Topics.get_activity_topics(activity)
|
||||||
|
|
||||||
|
assert Enum.member?(topics, "user")
|
||||||
|
assert Enum.member?(topics, "list")
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
describe "public visibility" do
|
||||||
|
setup do
|
||||||
|
activity = %Activity{
|
||||||
|
object: %Object{data: %{"type" => "Note"}},
|
||||||
|
data: %{"to" => [Pleroma.Constants.as_public()]}
|
||||||
|
}
|
||||||
|
|
||||||
|
{:ok, activity: activity}
|
||||||
|
end
|
||||||
|
|
||||||
|
test "produces public topic", %{activity: activity} do
|
||||||
|
topics = Topics.get_activity_topics(activity)
|
||||||
|
|
||||||
|
assert Enum.member?(topics, "public")
|
||||||
|
end
|
||||||
|
|
||||||
|
test "local action produces public:local topic", %{activity: activity} do
|
||||||
|
activity = %{activity | local: true}
|
||||||
|
topics = Topics.get_activity_topics(activity)
|
||||||
|
|
||||||
|
assert Enum.member?(topics, "public:local")
|
||||||
|
end
|
||||||
|
|
||||||
|
test "non-local action does not produce public:local topic", %{activity: activity} do
|
||||||
|
activity = %{activity | local: false}
|
||||||
|
topics = Topics.get_activity_topics(activity)
|
||||||
|
|
||||||
|
refute Enum.member?(topics, "public:local")
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
describe "public visibility create events" do
|
||||||
|
setup do
|
||||||
|
activity = %Activity{
|
||||||
|
object: %Object{data: %{"type" => "Create", "attachment" => []}},
|
||||||
|
data: %{"to" => [Pleroma.Constants.as_public()]}
|
||||||
|
}
|
||||||
|
|
||||||
|
{:ok, activity: activity}
|
||||||
|
end
|
||||||
|
|
||||||
|
test "with no attachments doesn't produce public:media topics", %{activity: activity} do
|
||||||
|
topics = Topics.get_activity_topics(activity)
|
||||||
|
|
||||||
|
refute Enum.member?(topics, "public:media")
|
||||||
|
refute Enum.member?(topics, "public:local:media")
|
||||||
|
end
|
||||||
|
|
||||||
|
test "converts tags to hash tags", %{activity: %{object: %{data: data} = object} = activity} do
|
||||||
|
tagged_data = Map.put(data, "tag", ["foo", "bar"])
|
||||||
|
activity = %{activity | object: %{object | data: tagged_data}}
|
||||||
|
|
||||||
|
topics = Topics.get_activity_topics(activity)
|
||||||
|
|
||||||
|
assert Enum.member?(topics, "hashtag:foo")
|
||||||
|
assert Enum.member?(topics, "hashtag:bar")
|
||||||
|
end
|
||||||
|
|
||||||
|
test "only converts strinngs to hash tags", %{
|
||||||
|
activity: %{object: %{data: data} = object} = activity
|
||||||
|
} do
|
||||||
|
tagged_data = Map.put(data, "tag", [2])
|
||||||
|
activity = %{activity | object: %{object | data: tagged_data}}
|
||||||
|
|
||||||
|
topics = Topics.get_activity_topics(activity)
|
||||||
|
|
||||||
|
refute Enum.member?(topics, "hashtag:2")
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
describe "public visibility create events with attachments" do
|
||||||
|
setup do
|
||||||
|
activity = %Activity{
|
||||||
|
object: %Object{data: %{"type" => "Create", "attachment" => ["foo"]}},
|
||||||
|
data: %{"to" => [Pleroma.Constants.as_public()]}
|
||||||
|
}
|
||||||
|
|
||||||
|
{:ok, activity: activity}
|
||||||
|
end
|
||||||
|
|
||||||
|
test "produce public:media topics", %{activity: activity} do
|
||||||
|
topics = Topics.get_activity_topics(activity)
|
||||||
|
|
||||||
|
assert Enum.member?(topics, "public:media")
|
||||||
|
end
|
||||||
|
|
||||||
|
test "local produces public:local:media topics", %{activity: activity} do
|
||||||
|
topics = Topics.get_activity_topics(activity)
|
||||||
|
|
||||||
|
assert Enum.member?(topics, "public:local:media")
|
||||||
|
end
|
||||||
|
|
||||||
|
test "non-local doesn't produce public:local:media topics", %{activity: activity} do
|
||||||
|
activity = %{activity | local: false}
|
||||||
|
|
||||||
|
topics = Topics.get_activity_topics(activity)
|
||||||
|
|
||||||
|
refute Enum.member?(topics, "public:local:media")
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
describe "non-public visibility" do
|
||||||
|
test "produces direct topic" do
|
||||||
|
activity = %Activity{object: %Object{data: %{"type" => "Note"}}, data: %{"to" => []}}
|
||||||
|
topics = Topics.get_activity_topics(activity)
|
||||||
|
|
||||||
|
assert Enum.member?(topics, "direct")
|
||||||
|
refute Enum.member?(topics, "public")
|
||||||
|
refute Enum.member?(topics, "public:local")
|
||||||
|
refute Enum.member?(topics, "public:media")
|
||||||
|
refute Enum.member?(topics, "public:local:media")
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
|
@ -7,6 +7,7 @@ defmodule Pleroma.ActivityTest do
|
||||||
alias Pleroma.Activity
|
alias Pleroma.Activity
|
||||||
alias Pleroma.Bookmark
|
alias Pleroma.Bookmark
|
||||||
alias Pleroma.Object
|
alias Pleroma.Object
|
||||||
|
alias Pleroma.Tests.ObanHelpers
|
||||||
alias Pleroma.ThreadMute
|
alias Pleroma.ThreadMute
|
||||||
import Pleroma.Factory
|
import Pleroma.Factory
|
||||||
|
|
||||||
|
@ -125,7 +126,8 @@ test "when association is not loaded" do
|
||||||
}
|
}
|
||||||
|
|
||||||
{:ok, local_activity} = Pleroma.Web.CommonAPI.post(user, %{"status" => "find me!"})
|
{:ok, local_activity} = Pleroma.Web.CommonAPI.post(user, %{"status" => "find me!"})
|
||||||
{:ok, remote_activity} = Pleroma.Web.Federator.incoming_ap_doc(params)
|
{:ok, job} = Pleroma.Web.Federator.incoming_ap_doc(params)
|
||||||
|
{:ok, remote_activity} = ObanHelpers.perform(job)
|
||||||
%{local_activity: local_activity, remote_activity: remote_activity, user: user}
|
%{local_activity: local_activity, remote_activity: remote_activity, user: user}
|
||||||
end
|
end
|
||||||
|
|
||||||
|
|
|
@ -22,6 +22,8 @@ test "it goes through old direct conversations" do
|
||||||
{:ok, _activity} =
|
{:ok, _activity} =
|
||||||
CommonAPI.post(user, %{"visibility" => "direct", "status" => "hey @#{other_user.nickname}"})
|
CommonAPI.post(user, %{"visibility" => "direct", "status" => "hey @#{other_user.nickname}"})
|
||||||
|
|
||||||
|
Pleroma.Tests.ObanHelpers.perform_all()
|
||||||
|
|
||||||
Repo.delete_all(Conversation)
|
Repo.delete_all(Conversation)
|
||||||
Repo.delete_all(Conversation.Participation)
|
Repo.delete_all(Conversation.Participation)
|
||||||
|
|
||||||
|
|
|
@ -10,7 +10,7 @@ defmodule Pleroma.ActivityExpirationWorkerTest do
|
||||||
test "deletes an activity" do
|
test "deletes an activity" do
|
||||||
activity = insert(:note_activity)
|
activity = insert(:note_activity)
|
||||||
expiration = insert(:expiration_in_the_past, %{activity_id: activity.id})
|
expiration = insert(:expiration_in_the_past, %{activity_id: activity.id})
|
||||||
Pleroma.ActivityExpirationWorker.perform(:execute, expiration.id)
|
Pleroma.Daemons.ActivityExpirationDaemon.perform(:execute, expiration.id)
|
||||||
|
|
||||||
refute Repo.get(Activity, activity.id)
|
refute Repo.get(Activity, activity.id)
|
||||||
end
|
end
|
|
@ -2,11 +2,12 @@
|
||||||
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
||||||
# SPDX-License-Identifier: AGPL-3.0-only
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
defmodule Pleroma.DigestEmailWorkerTest do
|
defmodule Pleroma.DigestEmailDaemonTest do
|
||||||
use Pleroma.DataCase
|
use Pleroma.DataCase
|
||||||
import Pleroma.Factory
|
import Pleroma.Factory
|
||||||
|
|
||||||
alias Pleroma.DigestEmailWorker
|
alias Pleroma.Daemons.DigestEmailDaemon
|
||||||
|
alias Pleroma.Tests.ObanHelpers
|
||||||
alias Pleroma.User
|
alias Pleroma.User
|
||||||
alias Pleroma.Web.CommonAPI
|
alias Pleroma.Web.CommonAPI
|
||||||
|
|
||||||
|
@ -22,7 +23,10 @@ test "it sends digest emails" do
|
||||||
User.switch_email_notifications(user2, "digest", true)
|
User.switch_email_notifications(user2, "digest", true)
|
||||||
CommonAPI.post(user, %{"status" => "hey @#{user2.nickname}!"})
|
CommonAPI.post(user, %{"status" => "hey @#{user2.nickname}!"})
|
||||||
|
|
||||||
DigestEmailWorker.perform()
|
DigestEmailDaemon.perform()
|
||||||
|
ObanHelpers.perform_all()
|
||||||
|
# Performing job(s) enqueued at previous step
|
||||||
|
ObanHelpers.perform_all()
|
||||||
|
|
||||||
assert_received {:email, email}
|
assert_received {:email, email}
|
||||||
assert email.to == [{user2.name, user2.email}]
|
assert email.to == [{user2.name, user2.email}]
|
|
@ -2,7 +2,7 @@
|
||||||
# Copyright © 2017-2018 Pleroma Authors <https://pleroma.social/>
|
# Copyright © 2017-2018 Pleroma Authors <https://pleroma.social/>
|
||||||
# SPDX-License-Identifier: AGPL-3.0-only
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
defmodule Pleroma.ScheduledActivityWorkerTest do
|
defmodule Pleroma.ScheduledActivityDaemonTest do
|
||||||
use Pleroma.DataCase
|
use Pleroma.DataCase
|
||||||
alias Pleroma.ScheduledActivity
|
alias Pleroma.ScheduledActivity
|
||||||
import Pleroma.Factory
|
import Pleroma.Factory
|
||||||
|
@ -10,7 +10,7 @@ defmodule Pleroma.ScheduledActivityWorkerTest do
|
||||||
test "creates a status from the scheduled activity" do
|
test "creates a status from the scheduled activity" do
|
||||||
user = insert(:user)
|
user = insert(:user)
|
||||||
scheduled_activity = insert(:scheduled_activity, user: user, params: %{status: "hi"})
|
scheduled_activity = insert(:scheduled_activity, user: user, params: %{status: "hi"})
|
||||||
Pleroma.ScheduledActivityWorker.perform(:execute, scheduled_activity.id)
|
Pleroma.Daemons.ScheduledActivityDaemon.perform(:execute, scheduled_activity.id)
|
||||||
|
|
||||||
refute Repo.get(ScheduledActivity, scheduled_activity.id)
|
refute Repo.get(ScheduledActivity, scheduled_activity.id)
|
||||||
activity = Repo.all(Pleroma.Activity) |> Enum.find(&(&1.actor == user.ap_id))
|
activity = Repo.all(Pleroma.Activity) |> Enum.find(&(&1.actor == user.ap_id))
|
|
@ -11,7 +11,6 @@ defmodule Pleroma.Integration.MastodonWebsocketTest do
|
||||||
alias Pleroma.Integration.WebsocketClient
|
alias Pleroma.Integration.WebsocketClient
|
||||||
alias Pleroma.Web.CommonAPI
|
alias Pleroma.Web.CommonAPI
|
||||||
alias Pleroma.Web.OAuth
|
alias Pleroma.Web.OAuth
|
||||||
alias Pleroma.Web.Streamer
|
|
||||||
|
|
||||||
@path Pleroma.Web.Endpoint.url()
|
@path Pleroma.Web.Endpoint.url()
|
||||||
|> URI.parse()
|
|> URI.parse()
|
||||||
|
@ -19,16 +18,6 @@ defmodule Pleroma.Integration.MastodonWebsocketTest do
|
||||||
|> Map.put(:path, "/api/v1/streaming")
|
|> Map.put(:path, "/api/v1/streaming")
|
||||||
|> URI.to_string()
|
|> URI.to_string()
|
||||||
|
|
||||||
setup do
|
|
||||||
GenServer.start(Streamer, %{}, name: Streamer)
|
|
||||||
|
|
||||||
on_exit(fn ->
|
|
||||||
if pid = Process.whereis(Streamer) do
|
|
||||||
Process.exit(pid, :kill)
|
|
||||||
end
|
|
||||||
end)
|
|
||||||
end
|
|
||||||
|
|
||||||
def start_socket(qs \\ nil, headers \\ []) do
|
def start_socket(qs \\ nil, headers \\ []) do
|
||||||
path =
|
path =
|
||||||
case qs do
|
case qs do
|
||||||
|
@ -53,12 +42,14 @@ test "requires authentication and a valid token for protected streams" do
|
||||||
end)
|
end)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@tag needs_streamer: true
|
||||||
test "allows public streams without authentication" do
|
test "allows public streams without authentication" do
|
||||||
assert {:ok, _} = start_socket("?stream=public")
|
assert {:ok, _} = start_socket("?stream=public")
|
||||||
assert {:ok, _} = start_socket("?stream=public:local")
|
assert {:ok, _} = start_socket("?stream=public:local")
|
||||||
assert {:ok, _} = start_socket("?stream=hashtag&tag=lain")
|
assert {:ok, _} = start_socket("?stream=hashtag&tag=lain")
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@tag needs_streamer: true
|
||||||
test "receives well formatted events" do
|
test "receives well formatted events" do
|
||||||
user = insert(:user)
|
user = insert(:user)
|
||||||
{:ok, _} = start_socket("?stream=public")
|
{:ok, _} = start_socket("?stream=public")
|
||||||
|
@ -103,6 +94,7 @@ test "accepts valid tokens", state do
|
||||||
assert {:ok, _} = start_socket("?stream=user&access_token=#{state.token.token}")
|
assert {:ok, _} = start_socket("?stream=user&access_token=#{state.token.token}")
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@tag needs_streamer: true
|
||||||
test "accepts the 'user' stream", %{token: token} = _state do
|
test "accepts the 'user' stream", %{token: token} = _state do
|
||||||
assert {:ok, _} = start_socket("?stream=user&access_token=#{token.token}")
|
assert {:ok, _} = start_socket("?stream=user&access_token=#{token.token}")
|
||||||
|
|
||||||
|
@ -111,6 +103,7 @@ test "accepts the 'user' stream", %{token: token} = _state do
|
||||||
end) =~ ":badarg"
|
end) =~ ":badarg"
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@tag needs_streamer: true
|
||||||
test "accepts the 'user:notification' stream", %{token: token} = _state do
|
test "accepts the 'user:notification' stream", %{token: token} = _state do
|
||||||
assert {:ok, _} = start_socket("?stream=user:notification&access_token=#{token.token}")
|
assert {:ok, _} = start_socket("?stream=user:notification&access_token=#{token.token}")
|
||||||
|
|
||||||
|
@ -119,6 +112,7 @@ test "accepts the 'user:notification' stream", %{token: token} = _state do
|
||||||
end) =~ ":badarg"
|
end) =~ ":badarg"
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@tag needs_streamer: true
|
||||||
test "accepts valid token on Sec-WebSocket-Protocol header", %{token: token} do
|
test "accepts valid token on Sec-WebSocket-Protocol header", %{token: token} do
|
||||||
assert {:ok, _} = start_socket("?stream=user", [{"Sec-WebSocket-Protocol", token.token}])
|
assert {:ok, _} = start_socket("?stream=user", [{"Sec-WebSocket-Protocol", token.token}])
|
||||||
|
|
||||||
|
|
|
@ -8,6 +8,7 @@ defmodule Pleroma.NotificationTest do
|
||||||
import Pleroma.Factory
|
import Pleroma.Factory
|
||||||
|
|
||||||
alias Pleroma.Notification
|
alias Pleroma.Notification
|
||||||
|
alias Pleroma.Tests.ObanHelpers
|
||||||
alias Pleroma.User
|
alias Pleroma.User
|
||||||
alias Pleroma.Web.ActivityPub.Transmogrifier
|
alias Pleroma.Web.ActivityPub.Transmogrifier
|
||||||
alias Pleroma.Web.CommonAPI
|
alias Pleroma.Web.CommonAPI
|
||||||
|
@ -68,16 +69,7 @@ test "does not create a notification for subscribed users if status is a reply"
|
||||||
end
|
end
|
||||||
|
|
||||||
describe "create_notification" do
|
describe "create_notification" do
|
||||||
setup do
|
@tag needs_streamer: true
|
||||||
GenServer.start(Streamer, %{}, name: Streamer)
|
|
||||||
|
|
||||||
on_exit(fn ->
|
|
||||||
if pid = Process.whereis(Streamer) do
|
|
||||||
Process.exit(pid, :kill)
|
|
||||||
end
|
|
||||||
end)
|
|
||||||
end
|
|
||||||
|
|
||||||
test "it creates a notification for user and send to the 'user' and the 'user:notification' stream" do
|
test "it creates a notification for user and send to the 'user' and the 'user:notification' stream" do
|
||||||
user = insert(:user)
|
user = insert(:user)
|
||||||
task = Task.async(fn -> assert_receive {:text, _}, 4_000 end)
|
task = Task.async(fn -> assert_receive {:text, _}, 4_000 end)
|
||||||
|
@ -590,7 +582,8 @@ test "notifications are deleted if a local user is deleted" do
|
||||||
|
|
||||||
refute Enum.empty?(Notification.for_user(other_user))
|
refute Enum.empty?(Notification.for_user(other_user))
|
||||||
|
|
||||||
User.delete(user)
|
{:ok, job} = User.delete(user)
|
||||||
|
ObanHelpers.perform(job)
|
||||||
|
|
||||||
assert Enum.empty?(Notification.for_user(other_user))
|
assert Enum.empty?(Notification.for_user(other_user))
|
||||||
end
|
end
|
||||||
|
@ -635,6 +628,7 @@ test "notifications are deleted if a remote user is deleted" do
|
||||||
}
|
}
|
||||||
|
|
||||||
{:ok, _delete_activity} = Transmogrifier.handle_incoming(delete_user_message)
|
{:ok, _delete_activity} = Transmogrifier.handle_incoming(delete_user_message)
|
||||||
|
ObanHelpers.perform_all()
|
||||||
|
|
||||||
assert Enum.empty?(Notification.for_user(local_user))
|
assert Enum.empty?(Notification.for_user(local_user))
|
||||||
end
|
end
|
||||||
|
|
|
@ -40,6 +40,10 @@ defmodule Pleroma.Web.ConnCase do
|
||||||
Ecto.Adapters.SQL.Sandbox.mode(Pleroma.Repo, {:shared, self()})
|
Ecto.Adapters.SQL.Sandbox.mode(Pleroma.Repo, {:shared, self()})
|
||||||
end
|
end
|
||||||
|
|
||||||
|
if tags[:needs_streamer] do
|
||||||
|
start_supervised(Pleroma.Web.Streamer.supervisor())
|
||||||
|
end
|
||||||
|
|
||||||
{:ok, conn: Phoenix.ConnTest.build_conn()}
|
{:ok, conn: Phoenix.ConnTest.build_conn()}
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -39,6 +39,10 @@ defmodule Pleroma.DataCase do
|
||||||
Ecto.Adapters.SQL.Sandbox.mode(Pleroma.Repo, {:shared, self()})
|
Ecto.Adapters.SQL.Sandbox.mode(Pleroma.Repo, {:shared, self()})
|
||||||
end
|
end
|
||||||
|
|
||||||
|
if tags[:needs_streamer] do
|
||||||
|
start_supervised(Pleroma.Web.Streamer.supervisor())
|
||||||
|
end
|
||||||
|
|
||||||
:ok
|
:ok
|
||||||
end
|
end
|
||||||
|
|
||||||
|
|
42
test/support/oban_helpers.ex
Normal file
42
test/support/oban_helpers.ex
Normal file
|
@ -0,0 +1,42 @@
|
||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2018 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
defmodule Pleroma.Tests.ObanHelpers do
|
||||||
|
@moduledoc """
|
||||||
|
Oban test helpers.
|
||||||
|
"""
|
||||||
|
|
||||||
|
alias Pleroma.Repo
|
||||||
|
|
||||||
|
def perform_all do
|
||||||
|
Oban.Job
|
||||||
|
|> Repo.all()
|
||||||
|
|> perform()
|
||||||
|
end
|
||||||
|
|
||||||
|
def perform(%Oban.Job{} = job) do
|
||||||
|
res = apply(String.to_existing_atom("Elixir." <> job.worker), :perform, [job.args, job])
|
||||||
|
Repo.delete(job)
|
||||||
|
res
|
||||||
|
end
|
||||||
|
|
||||||
|
def perform(jobs) when is_list(jobs) do
|
||||||
|
for job <- jobs, do: perform(job)
|
||||||
|
end
|
||||||
|
|
||||||
|
def member?(%{} = job_args, jobs) when is_list(jobs) do
|
||||||
|
Enum.any?(jobs, fn job ->
|
||||||
|
member?(job_args, job.args)
|
||||||
|
end)
|
||||||
|
end
|
||||||
|
|
||||||
|
def member?(%{} = test_attrs, %{} = attrs) do
|
||||||
|
Enum.all?(
|
||||||
|
test_attrs,
|
||||||
|
fn {k, _v} -> member?(test_attrs[k], attrs[k]) end
|
||||||
|
)
|
||||||
|
end
|
||||||
|
|
||||||
|
def member?(x, y), do: x == y
|
||||||
|
end
|
|
@ -4,6 +4,7 @@ defmodule Mix.Tasks.Pleroma.DigestTest do
|
||||||
import Pleroma.Factory
|
import Pleroma.Factory
|
||||||
import Swoosh.TestAssertions
|
import Swoosh.TestAssertions
|
||||||
|
|
||||||
|
alias Pleroma.Tests.ObanHelpers
|
||||||
alias Pleroma.Web.CommonAPI
|
alias Pleroma.Web.CommonAPI
|
||||||
|
|
||||||
setup_all do
|
setup_all do
|
||||||
|
@ -39,6 +40,8 @@ test "Sends digest to the given user" do
|
||||||
|
|
||||||
:ok = Mix.Tasks.Pleroma.Digest.run(["test", user2.nickname, yesterday_date])
|
:ok = Mix.Tasks.Pleroma.Digest.run(["test", user2.nickname, yesterday_date])
|
||||||
|
|
||||||
|
ObanHelpers.perform_all()
|
||||||
|
|
||||||
assert_receive {:mix_shell, :info, [message]}
|
assert_receive {:mix_shell, :info, [message]}
|
||||||
assert message =~ "Digest email have been sent"
|
assert message =~ "Digest email have been sent"
|
||||||
|
|
||||||
|
|
|
@ -7,14 +7,16 @@ defmodule Pleroma.UserTest do
|
||||||
alias Pleroma.Builders.UserBuilder
|
alias Pleroma.Builders.UserBuilder
|
||||||
alias Pleroma.Object
|
alias Pleroma.Object
|
||||||
alias Pleroma.Repo
|
alias Pleroma.Repo
|
||||||
|
alias Pleroma.Tests.ObanHelpers
|
||||||
alias Pleroma.User
|
alias Pleroma.User
|
||||||
alias Pleroma.Web.ActivityPub.ActivityPub
|
alias Pleroma.Web.ActivityPub.ActivityPub
|
||||||
alias Pleroma.Web.CommonAPI
|
alias Pleroma.Web.CommonAPI
|
||||||
|
|
||||||
use Pleroma.DataCase
|
use Pleroma.DataCase
|
||||||
|
use Oban.Testing, repo: Pleroma.Repo
|
||||||
|
|
||||||
import Pleroma.Factory
|
|
||||||
import Mock
|
import Mock
|
||||||
|
import Pleroma.Factory
|
||||||
|
|
||||||
setup_all do
|
setup_all do
|
||||||
Tesla.Mock.mock_global(fn env -> apply(HttpRequestMock, :request, [env]) end)
|
Tesla.Mock.mock_global(fn env -> apply(HttpRequestMock, :request, [env]) end)
|
||||||
|
@ -570,22 +572,6 @@ test "it has required fields" do
|
||||||
refute cs.valid?
|
refute cs.valid?
|
||||||
end)
|
end)
|
||||||
end
|
end
|
||||||
|
|
||||||
test "it restricts some sizes" do
|
|
||||||
bio_limit = Pleroma.Config.get([:instance, :user_bio_length], 5000)
|
|
||||||
name_limit = Pleroma.Config.get([:instance, :user_name_length], 100)
|
|
||||||
|
|
||||||
[bio: bio_limit, name: name_limit]
|
|
||||||
|> Enum.each(fn {field, size} ->
|
|
||||||
string = String.pad_leading(".", size)
|
|
||||||
cs = User.remote_user_creation(Map.put(@valid_remote, field, string))
|
|
||||||
assert cs.valid?
|
|
||||||
|
|
||||||
string = String.pad_leading(".", size + 1)
|
|
||||||
cs = User.remote_user_creation(Map.put(@valid_remote, field, string))
|
|
||||||
refute cs.valid?
|
|
||||||
end)
|
|
||||||
end
|
|
||||||
end
|
end
|
||||||
|
|
||||||
describe "followers and friends" do
|
describe "followers and friends" do
|
||||||
|
@ -725,7 +711,9 @@ test "it imports user followings from list" do
|
||||||
user3.nickname
|
user3.nickname
|
||||||
]
|
]
|
||||||
|
|
||||||
result = User.follow_import(user1, identifiers)
|
{:ok, job} = User.follow_import(user1, identifiers)
|
||||||
|
result = ObanHelpers.perform(job)
|
||||||
|
|
||||||
assert is_list(result)
|
assert is_list(result)
|
||||||
assert result == [user2, user3]
|
assert result == [user2, user3]
|
||||||
end
|
end
|
||||||
|
@ -936,7 +924,9 @@ test "it imports user blocks from list" do
|
||||||
user3.nickname
|
user3.nickname
|
||||||
]
|
]
|
||||||
|
|
||||||
result = User.blocks_import(user1, identifiers)
|
{:ok, job} = User.blocks_import(user1, identifiers)
|
||||||
|
result = ObanHelpers.perform(job)
|
||||||
|
|
||||||
assert is_list(result)
|
assert is_list(result)
|
||||||
assert result == [user2, user3]
|
assert result == [user2, user3]
|
||||||
end
|
end
|
||||||
|
@ -1053,7 +1043,9 @@ test ".delete_user_activities deletes all create activities", %{user: user} do
|
||||||
test "it deletes deactivated user" do
|
test "it deletes deactivated user" do
|
||||||
{:ok, user} = insert(:user, info: %{deactivated: true}) |> User.set_cache()
|
{:ok, user} = insert(:user, info: %{deactivated: true}) |> User.set_cache()
|
||||||
|
|
||||||
assert {:ok, _} = User.delete(user)
|
{:ok, job} = User.delete(user)
|
||||||
|
{:ok, _user} = ObanHelpers.perform(job)
|
||||||
|
|
||||||
refute User.get_by_id(user.id)
|
refute User.get_by_id(user.id)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -1071,7 +1063,8 @@ test "it deletes a user, all follow relationships and all activities", %{user: u
|
||||||
{:ok, like_two, _} = CommonAPI.favorite(activity.id, follower)
|
{:ok, like_two, _} = CommonAPI.favorite(activity.id, follower)
|
||||||
{:ok, repeat, _} = CommonAPI.repeat(activity_two.id, user)
|
{:ok, repeat, _} = CommonAPI.repeat(activity_two.id, user)
|
||||||
|
|
||||||
{:ok, _} = User.delete(user)
|
{:ok, job} = User.delete(user)
|
||||||
|
{:ok, _user} = ObanHelpers.perform(job)
|
||||||
|
|
||||||
follower = User.get_cached_by_id(follower.id)
|
follower = User.get_cached_by_id(follower.id)
|
||||||
|
|
||||||
|
@ -1103,12 +1096,18 @@ test "it deletes a user, all follow relationships and all activities", %{user: u
|
||||||
{:ok, follower} = User.get_or_fetch_by_ap_id("http://mastodon.example.org/users/admin")
|
{:ok, follower} = User.get_or_fetch_by_ap_id("http://mastodon.example.org/users/admin")
|
||||||
{:ok, _} = User.follow(follower, user)
|
{:ok, _} = User.follow(follower, user)
|
||||||
|
|
||||||
{:ok, _user} = User.delete(user)
|
{:ok, job} = User.delete(user)
|
||||||
|
{:ok, _user} = ObanHelpers.perform(job)
|
||||||
|
|
||||||
assert called(
|
assert ObanHelpers.member?(
|
||||||
Pleroma.Web.ActivityPub.Publisher.publish_one(%{
|
%{
|
||||||
inbox: "http://mastodon.example.org/inbox"
|
"op" => "publish_one",
|
||||||
})
|
"params" => %{
|
||||||
|
"inbox" => "http://mastodon.example.org/inbox",
|
||||||
|
"id" => "pleroma:fakeid"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
all_enqueued(worker: Pleroma.Workers.PublisherWorker)
|
||||||
)
|
)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
@ -1117,13 +1116,62 @@ test "get_public_key_for_ap_id fetches a user that's not in the db" do
|
||||||
assert {:ok, _key} = User.get_public_key_for_ap_id("http://mastodon.example.org/users/admin")
|
assert {:ok, _key} = User.get_public_key_for_ap_id("http://mastodon.example.org/users/admin")
|
||||||
end
|
end
|
||||||
|
|
||||||
test "insert or update a user from given data" do
|
describe "insert or update a user from given data" do
|
||||||
|
test "with normal data" do
|
||||||
user = insert(:user, %{nickname: "nick@name.de"})
|
user = insert(:user, %{nickname: "nick@name.de"})
|
||||||
data = %{ap_id: user.ap_id <> "xxx", name: user.name, nickname: user.nickname}
|
data = %{ap_id: user.ap_id <> "xxx", name: user.name, nickname: user.nickname}
|
||||||
|
|
||||||
assert {:ok, %User{}} = User.insert_or_update_user(data)
|
assert {:ok, %User{}} = User.insert_or_update_user(data)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
test "with overly long fields" do
|
||||||
|
current_max_length = Pleroma.Config.get([:instance, :account_field_value_length], 255)
|
||||||
|
user = insert(:user, nickname: "nickname@supergood.domain")
|
||||||
|
|
||||||
|
data = %{
|
||||||
|
ap_id: user.ap_id,
|
||||||
|
name: user.name,
|
||||||
|
nickname: user.nickname,
|
||||||
|
info: %{
|
||||||
|
fields: [
|
||||||
|
%{"name" => "myfield", "value" => String.duplicate("h", current_max_length + 1)}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
assert {:ok, %User{}} = User.insert_or_update_user(data)
|
||||||
|
end
|
||||||
|
|
||||||
|
test "with an overly long bio" do
|
||||||
|
current_max_length = Pleroma.Config.get([:instance, :user_bio_length], 5000)
|
||||||
|
user = insert(:user, nickname: "nickname@supergood.domain")
|
||||||
|
|
||||||
|
data = %{
|
||||||
|
ap_id: user.ap_id,
|
||||||
|
name: user.name,
|
||||||
|
nickname: user.nickname,
|
||||||
|
bio: String.duplicate("h", current_max_length + 1),
|
||||||
|
info: %{}
|
||||||
|
}
|
||||||
|
|
||||||
|
assert {:ok, %User{}} = User.insert_or_update_user(data)
|
||||||
|
end
|
||||||
|
|
||||||
|
test "with an overly long display name" do
|
||||||
|
current_max_length = Pleroma.Config.get([:instance, :user_name_length], 100)
|
||||||
|
user = insert(:user, nickname: "nickname@supergood.domain")
|
||||||
|
|
||||||
|
data = %{
|
||||||
|
ap_id: user.ap_id,
|
||||||
|
name: String.duplicate("h", current_max_length + 1),
|
||||||
|
nickname: user.nickname,
|
||||||
|
info: %{}
|
||||||
|
}
|
||||||
|
|
||||||
|
assert {:ok, %User{}} = User.insert_or_update_user(data)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
describe "per-user rich-text filtering" do
|
describe "per-user rich-text filtering" do
|
||||||
test "html_filter_policy returns default policies, when rich-text is enabled" do
|
test "html_filter_policy returns default policies, when rich-text is enabled" do
|
||||||
user = insert(:user)
|
user = insert(:user)
|
||||||
|
@ -1153,7 +1201,8 @@ test "invalidate_cache works" do
|
||||||
test "User.delete() plugs any possible zombie objects" do
|
test "User.delete() plugs any possible zombie objects" do
|
||||||
user = insert(:user)
|
user = insert(:user)
|
||||||
|
|
||||||
{:ok, _} = User.delete(user)
|
{:ok, job} = User.delete(user)
|
||||||
|
{:ok, _} = ObanHelpers.perform(job)
|
||||||
|
|
||||||
{:ok, cached_user} = Cachex.get(:user_cache, "ap_id:#{user.ap_id}")
|
{:ok, cached_user} = Cachex.get(:user_cache, "ap_id:#{user.ap_id}")
|
||||||
|
|
||||||
|
|
|
@ -4,16 +4,21 @@
|
||||||
|
|
||||||
defmodule Pleroma.Web.ActivityPub.ActivityPubControllerTest do
|
defmodule Pleroma.Web.ActivityPub.ActivityPubControllerTest do
|
||||||
use Pleroma.Web.ConnCase
|
use Pleroma.Web.ConnCase
|
||||||
|
use Oban.Testing, repo: Pleroma.Repo
|
||||||
|
|
||||||
import Pleroma.Factory
|
import Pleroma.Factory
|
||||||
alias Pleroma.Activity
|
alias Pleroma.Activity
|
||||||
|
alias Pleroma.Delivery
|
||||||
alias Pleroma.Instances
|
alias Pleroma.Instances
|
||||||
alias Pleroma.Object
|
alias Pleroma.Object
|
||||||
|
alias Pleroma.Tests.ObanHelpers
|
||||||
alias Pleroma.User
|
alias Pleroma.User
|
||||||
alias Pleroma.Web.ActivityPub.ObjectView
|
alias Pleroma.Web.ActivityPub.ObjectView
|
||||||
alias Pleroma.Web.ActivityPub.Relay
|
alias Pleroma.Web.ActivityPub.Relay
|
||||||
alias Pleroma.Web.ActivityPub.UserView
|
alias Pleroma.Web.ActivityPub.UserView
|
||||||
alias Pleroma.Web.ActivityPub.Utils
|
alias Pleroma.Web.ActivityPub.Utils
|
||||||
alias Pleroma.Web.CommonAPI
|
alias Pleroma.Web.CommonAPI
|
||||||
|
alias Pleroma.Workers.ReceiverWorker
|
||||||
|
|
||||||
setup_all do
|
setup_all do
|
||||||
Tesla.Mock.mock_global(fn env -> apply(HttpRequestMock, :request, [env]) end)
|
Tesla.Mock.mock_global(fn env -> apply(HttpRequestMock, :request, [env]) end)
|
||||||
|
@ -365,7 +370,8 @@ test "it inserts an incoming activity into the database", %{conn: conn} do
|
||||||
|> post("/inbox", data)
|
|> post("/inbox", data)
|
||||||
|
|
||||||
assert "ok" == json_response(conn, 200)
|
assert "ok" == json_response(conn, 200)
|
||||||
:timer.sleep(500)
|
|
||||||
|
ObanHelpers.perform(all_enqueued(worker: ReceiverWorker))
|
||||||
assert Activity.get_by_ap_id(data["id"])
|
assert Activity.get_by_ap_id(data["id"])
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -407,7 +413,7 @@ test "it inserts an incoming activity into the database", %{conn: conn, data: da
|
||||||
|> post("/users/#{user.nickname}/inbox", data)
|
|> post("/users/#{user.nickname}/inbox", data)
|
||||||
|
|
||||||
assert "ok" == json_response(conn, 200)
|
assert "ok" == json_response(conn, 200)
|
||||||
:timer.sleep(500)
|
ObanHelpers.perform(all_enqueued(worker: ReceiverWorker))
|
||||||
assert Activity.get_by_ap_id(data["id"])
|
assert Activity.get_by_ap_id(data["id"])
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -436,7 +442,7 @@ test "it accepts messages from actors that are followed by the user", %{
|
||||||
|> post("/users/#{recipient.nickname}/inbox", data)
|
|> post("/users/#{recipient.nickname}/inbox", data)
|
||||||
|
|
||||||
assert "ok" == json_response(conn, 200)
|
assert "ok" == json_response(conn, 200)
|
||||||
:timer.sleep(500)
|
ObanHelpers.perform(all_enqueued(worker: ReceiverWorker))
|
||||||
assert Activity.get_by_ap_id(data["id"])
|
assert Activity.get_by_ap_id(data["id"])
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -526,6 +532,8 @@ test "it removes all follower collections but actor's", %{conn: conn} do
|
||||||
|> post("/users/#{recipient.nickname}/inbox", data)
|
|> post("/users/#{recipient.nickname}/inbox", data)
|
||||||
|> json_response(200)
|
|> json_response(200)
|
||||||
|
|
||||||
|
ObanHelpers.perform(all_enqueued(worker: ReceiverWorker))
|
||||||
|
|
||||||
activity = Activity.get_by_ap_id(data["id"])
|
activity = Activity.get_by_ap_id(data["id"])
|
||||||
|
|
||||||
assert activity.id
|
assert activity.id
|
||||||
|
@ -601,6 +609,7 @@ test "it inserts an incoming create activity into the database", %{conn: conn} d
|
||||||
|> post("/users/#{user.nickname}/outbox", data)
|
|> post("/users/#{user.nickname}/outbox", data)
|
||||||
|
|
||||||
result = json_response(conn, 201)
|
result = json_response(conn, 201)
|
||||||
|
|
||||||
assert Activity.get_by_ap_id(result["id"])
|
assert Activity.get_by_ap_id(result["id"])
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -885,4 +894,86 @@ test "it works for more than 10 users", %{conn: conn} do
|
||||||
assert result["totalItems"] == 15
|
assert result["totalItems"] == 15
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
describe "delivery tracking" do
|
||||||
|
test "it tracks a signed object fetch", %{conn: conn} do
|
||||||
|
user = insert(:user, local: false)
|
||||||
|
activity = insert(:note_activity)
|
||||||
|
object = Object.normalize(activity)
|
||||||
|
|
||||||
|
object_path = String.trim_leading(object.data["id"], Pleroma.Web.Endpoint.url())
|
||||||
|
|
||||||
|
conn
|
||||||
|
|> put_req_header("accept", "application/activity+json")
|
||||||
|
|> assign(:user, user)
|
||||||
|
|> get(object_path)
|
||||||
|
|> json_response(200)
|
||||||
|
|
||||||
|
assert Delivery.get(object.id, user.id)
|
||||||
|
end
|
||||||
|
|
||||||
|
test "it tracks a signed activity fetch", %{conn: conn} do
|
||||||
|
user = insert(:user, local: false)
|
||||||
|
activity = insert(:note_activity)
|
||||||
|
object = Object.normalize(activity)
|
||||||
|
|
||||||
|
activity_path = String.trim_leading(activity.data["id"], Pleroma.Web.Endpoint.url())
|
||||||
|
|
||||||
|
conn
|
||||||
|
|> put_req_header("accept", "application/activity+json")
|
||||||
|
|> assign(:user, user)
|
||||||
|
|> get(activity_path)
|
||||||
|
|> json_response(200)
|
||||||
|
|
||||||
|
assert Delivery.get(object.id, user.id)
|
||||||
|
end
|
||||||
|
|
||||||
|
test "it tracks a signed object fetch when the json is cached", %{conn: conn} do
|
||||||
|
user = insert(:user, local: false)
|
||||||
|
other_user = insert(:user, local: false)
|
||||||
|
activity = insert(:note_activity)
|
||||||
|
object = Object.normalize(activity)
|
||||||
|
|
||||||
|
object_path = String.trim_leading(object.data["id"], Pleroma.Web.Endpoint.url())
|
||||||
|
|
||||||
|
conn
|
||||||
|
|> put_req_header("accept", "application/activity+json")
|
||||||
|
|> assign(:user, user)
|
||||||
|
|> get(object_path)
|
||||||
|
|> json_response(200)
|
||||||
|
|
||||||
|
build_conn()
|
||||||
|
|> put_req_header("accept", "application/activity+json")
|
||||||
|
|> assign(:user, other_user)
|
||||||
|
|> get(object_path)
|
||||||
|
|> json_response(200)
|
||||||
|
|
||||||
|
assert Delivery.get(object.id, user.id)
|
||||||
|
assert Delivery.get(object.id, other_user.id)
|
||||||
|
end
|
||||||
|
|
||||||
|
test "it tracks a signed activity fetch when the json is cached", %{conn: conn} do
|
||||||
|
user = insert(:user, local: false)
|
||||||
|
other_user = insert(:user, local: false)
|
||||||
|
activity = insert(:note_activity)
|
||||||
|
object = Object.normalize(activity)
|
||||||
|
|
||||||
|
activity_path = String.trim_leading(activity.data["id"], Pleroma.Web.Endpoint.url())
|
||||||
|
|
||||||
|
conn
|
||||||
|
|> put_req_header("accept", "application/activity+json")
|
||||||
|
|> assign(:user, user)
|
||||||
|
|> get(activity_path)
|
||||||
|
|> json_response(200)
|
||||||
|
|
||||||
|
build_conn()
|
||||||
|
|> put_req_header("accept", "application/activity+json")
|
||||||
|
|> assign(:user, other_user)
|
||||||
|
|> get(activity_path)
|
||||||
|
|> json_response(200)
|
||||||
|
|
||||||
|
assert Delivery.get(object.id, user.id)
|
||||||
|
assert Delivery.get(object.id, other_user.id)
|
||||||
|
end
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -38,9 +38,7 @@ test "it streams them out" do
|
||||||
stream: fn _, _ -> nil end do
|
stream: fn _, _ -> nil end do
|
||||||
ActivityPub.stream_out_participations(conversation.participations)
|
ActivityPub.stream_out_participations(conversation.participations)
|
||||||
|
|
||||||
Enum.each(participations, fn participation ->
|
assert called(Pleroma.Web.Streamer.stream("participation", participations))
|
||||||
assert called(Pleroma.Web.Streamer.stream("participation", participation))
|
|
||||||
end)
|
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
@ -686,7 +684,7 @@ test "returns reblogs for users for whom reblogs have not been muted" do
|
||||||
user = insert(:user)
|
user = insert(:user)
|
||||||
|
|
||||||
{:ok, like_activity, _object} = ActivityPub.like(user, object_activity)
|
{:ok, like_activity, _object} = ActivityPub.like(user, object_activity)
|
||||||
assert called(Pleroma.Web.Federator.publish(like_activity, 5))
|
assert called(Pleroma.Web.Federator.publish(like_activity))
|
||||||
end
|
end
|
||||||
|
|
||||||
test "returns exist activity if object already liked" do
|
test "returns exist activity if object already liked" do
|
||||||
|
@ -747,7 +745,7 @@ test "adds a like activity to the db" do
|
||||||
{:ok, unlike_activity, _, object} = ActivityPub.unlike(user, object)
|
{:ok, unlike_activity, _, object} = ActivityPub.unlike(user, object)
|
||||||
assert object.data["like_count"] == 0
|
assert object.data["like_count"] == 0
|
||||||
|
|
||||||
assert called(Pleroma.Web.Federator.publish(unlike_activity, 5))
|
assert called(Pleroma.Web.Federator.publish(unlike_activity))
|
||||||
end
|
end
|
||||||
|
|
||||||
test "unliking a previously liked object" do
|
test "unliking a previously liked object" do
|
||||||
|
|
|
@ -6,6 +6,7 @@ defmodule Pleroma.Web.ActivityPub.MRF.MediaProxyWarmingPolicyTest do
|
||||||
use Pleroma.DataCase
|
use Pleroma.DataCase
|
||||||
|
|
||||||
alias Pleroma.HTTP
|
alias Pleroma.HTTP
|
||||||
|
alias Pleroma.Tests.ObanHelpers
|
||||||
alias Pleroma.Web.ActivityPub.MRF.MediaProxyWarmingPolicy
|
alias Pleroma.Web.ActivityPub.MRF.MediaProxyWarmingPolicy
|
||||||
|
|
||||||
import Mock
|
import Mock
|
||||||
|
@ -24,6 +25,11 @@ defmodule Pleroma.Web.ActivityPub.MRF.MediaProxyWarmingPolicyTest do
|
||||||
test "it prefetches media proxy URIs" do
|
test "it prefetches media proxy URIs" do
|
||||||
with_mock HTTP, get: fn _, _, _ -> {:ok, []} end do
|
with_mock HTTP, get: fn _, _, _ -> {:ok, []} end do
|
||||||
MediaProxyWarmingPolicy.filter(@message)
|
MediaProxyWarmingPolicy.filter(@message)
|
||||||
|
|
||||||
|
ObanHelpers.perform_all()
|
||||||
|
# Performing jobs which has been just enqueued
|
||||||
|
ObanHelpers.perform_all()
|
||||||
|
|
||||||
assert called(HTTP.get(:_, :_, :_))
|
assert called(HTTP.get(:_, :_, :_))
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -3,7 +3,7 @@
|
||||||
# SPDX-License-Identifier: AGPL-3.0-only
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
defmodule Pleroma.Web.ActivityPub.PublisherTest do
|
defmodule Pleroma.Web.ActivityPub.PublisherTest do
|
||||||
use Pleroma.DataCase
|
use Pleroma.Web.ConnCase
|
||||||
|
|
||||||
import ExUnit.CaptureLog
|
import ExUnit.CaptureLog
|
||||||
import Pleroma.Factory
|
import Pleroma.Factory
|
||||||
|
@ -12,7 +12,9 @@ defmodule Pleroma.Web.ActivityPub.PublisherTest do
|
||||||
|
|
||||||
alias Pleroma.Activity
|
alias Pleroma.Activity
|
||||||
alias Pleroma.Instances
|
alias Pleroma.Instances
|
||||||
|
alias Pleroma.Object
|
||||||
alias Pleroma.Web.ActivityPub.Publisher
|
alias Pleroma.Web.ActivityPub.Publisher
|
||||||
|
alias Pleroma.Web.CommonAPI
|
||||||
|
|
||||||
@as_public "https://www.w3.org/ns/activitystreams#Public"
|
@as_public "https://www.w3.org/ns/activitystreams#Public"
|
||||||
|
|
||||||
|
@ -263,10 +265,74 @@ test "it returns inbox for messages involving single recipients in total" do
|
||||||
assert called(
|
assert called(
|
||||||
Pleroma.Web.Federator.Publisher.enqueue_one(Publisher, %{
|
Pleroma.Web.Federator.Publisher.enqueue_one(Publisher, %{
|
||||||
inbox: "https://domain.com/users/nick1/inbox",
|
inbox: "https://domain.com/users/nick1/inbox",
|
||||||
actor: actor,
|
actor_id: actor.id,
|
||||||
id: note_activity.data["id"]
|
id: note_activity.data["id"]
|
||||||
})
|
})
|
||||||
)
|
)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
test_with_mock "publishes a delete activity to peers who signed fetch requests to the create acitvity/object.",
|
||||||
|
Pleroma.Web.Federator.Publisher,
|
||||||
|
[:passthrough],
|
||||||
|
[] do
|
||||||
|
fetcher =
|
||||||
|
insert(:user,
|
||||||
|
local: false,
|
||||||
|
info: %{
|
||||||
|
ap_enabled: true,
|
||||||
|
source_data: %{"inbox" => "https://domain.com/users/nick1/inbox"}
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
another_fetcher =
|
||||||
|
insert(:user,
|
||||||
|
local: false,
|
||||||
|
info: %{
|
||||||
|
ap_enabled: true,
|
||||||
|
source_data: %{"inbox" => "https://domain2.com/users/nick1/inbox"}
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
actor = insert(:user)
|
||||||
|
|
||||||
|
note_activity = insert(:note_activity, user: actor)
|
||||||
|
object = Object.normalize(note_activity)
|
||||||
|
|
||||||
|
activity_path = String.trim_leading(note_activity.data["id"], Pleroma.Web.Endpoint.url())
|
||||||
|
object_path = String.trim_leading(object.data["id"], Pleroma.Web.Endpoint.url())
|
||||||
|
|
||||||
|
build_conn()
|
||||||
|
|> put_req_header("accept", "application/activity+json")
|
||||||
|
|> assign(:user, fetcher)
|
||||||
|
|> get(object_path)
|
||||||
|
|> json_response(200)
|
||||||
|
|
||||||
|
build_conn()
|
||||||
|
|> put_req_header("accept", "application/activity+json")
|
||||||
|
|> assign(:user, another_fetcher)
|
||||||
|
|> get(activity_path)
|
||||||
|
|> json_response(200)
|
||||||
|
|
||||||
|
{:ok, delete} = CommonAPI.delete(note_activity.id, actor)
|
||||||
|
|
||||||
|
res = Publisher.publish(actor, delete)
|
||||||
|
assert res == :ok
|
||||||
|
|
||||||
|
assert called(
|
||||||
|
Pleroma.Web.Federator.Publisher.enqueue_one(Publisher, %{
|
||||||
|
inbox: "https://domain.com/users/nick1/inbox",
|
||||||
|
actor_id: actor.id,
|
||||||
|
id: delete.data["id"]
|
||||||
|
})
|
||||||
|
)
|
||||||
|
|
||||||
|
assert called(
|
||||||
|
Pleroma.Web.Federator.Publisher.enqueue_one(Publisher, %{
|
||||||
|
inbox: "https://domain2.com/users/nick1/inbox",
|
||||||
|
actor_id: actor.id,
|
||||||
|
id: delete.data["id"]
|
||||||
|
})
|
||||||
|
)
|
||||||
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -99,7 +99,7 @@ test "returns error when object is unknown" do
|
||||||
assert activity.data["type"] == "Announce"
|
assert activity.data["type"] == "Announce"
|
||||||
assert activity.data["actor"] == service_actor.ap_id
|
assert activity.data["actor"] == service_actor.ap_id
|
||||||
assert activity.data["object"] == obj.data["id"]
|
assert activity.data["object"] == obj.data["id"]
|
||||||
assert called(Pleroma.Web.Federator.publish(activity, 5))
|
assert called(Pleroma.Web.Federator.publish(activity))
|
||||||
end
|
end
|
||||||
|
|
||||||
test_with_mock "returns announce activity and not publish to federate",
|
test_with_mock "returns announce activity and not publish to federate",
|
||||||
|
@ -113,7 +113,7 @@ test "returns error when object is unknown" do
|
||||||
assert activity.data["type"] == "Announce"
|
assert activity.data["type"] == "Announce"
|
||||||
assert activity.data["actor"] == service_actor.ap_id
|
assert activity.data["actor"] == service_actor.ap_id
|
||||||
assert activity.data["object"] == obj.data["id"]
|
assert activity.data["object"] == obj.data["id"]
|
||||||
refute called(Pleroma.Web.Federator.publish(activity, 5))
|
refute called(Pleroma.Web.Federator.publish(activity))
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -8,6 +8,7 @@ defmodule Pleroma.Web.ActivityPub.TransmogrifierTest do
|
||||||
alias Pleroma.Object
|
alias Pleroma.Object
|
||||||
alias Pleroma.Object.Fetcher
|
alias Pleroma.Object.Fetcher
|
||||||
alias Pleroma.Repo
|
alias Pleroma.Repo
|
||||||
|
alias Pleroma.Tests.ObanHelpers
|
||||||
alias Pleroma.User
|
alias Pleroma.User
|
||||||
alias Pleroma.Web.ActivityPub.ActivityPub
|
alias Pleroma.Web.ActivityPub.ActivityPub
|
||||||
alias Pleroma.Web.ActivityPub.Transmogrifier
|
alias Pleroma.Web.ActivityPub.Transmogrifier
|
||||||
|
@ -648,6 +649,7 @@ test "it works for incoming user deletes" do
|
||||||
|> Poison.decode!()
|
|> Poison.decode!()
|
||||||
|
|
||||||
{:ok, _} = Transmogrifier.handle_incoming(data)
|
{:ok, _} = Transmogrifier.handle_incoming(data)
|
||||||
|
ObanHelpers.perform_all()
|
||||||
|
|
||||||
refute User.get_cached_by_ap_id(ap_id)
|
refute User.get_cached_by_ap_id(ap_id)
|
||||||
end
|
end
|
||||||
|
@ -1210,6 +1212,8 @@ test "it upgrades a user to activitypub" do
|
||||||
assert user.info.note_count == 1
|
assert user.info.note_count == 1
|
||||||
|
|
||||||
{:ok, user} = Transmogrifier.upgrade_user_from_ap_id("https://niu.moe/users/rye")
|
{:ok, user} = Transmogrifier.upgrade_user_from_ap_id("https://niu.moe/users/rye")
|
||||||
|
ObanHelpers.perform_all()
|
||||||
|
|
||||||
assert user.info.ap_enabled
|
assert user.info.ap_enabled
|
||||||
assert user.info.note_count == 1
|
assert user.info.note_count == 1
|
||||||
assert user.follower_address == "https://niu.moe/users/rye/followers"
|
assert user.follower_address == "https://niu.moe/users/rye/followers"
|
||||||
|
|
|
@ -2096,7 +2096,7 @@ test "queues key as atom", %{conn: conn} do
|
||||||
post(conn, "/api/pleroma/admin/config", %{
|
post(conn, "/api/pleroma/admin/config", %{
|
||||||
configs: [
|
configs: [
|
||||||
%{
|
%{
|
||||||
"group" => "pleroma_job_queue",
|
"group" => "oban",
|
||||||
"key" => ":queues",
|
"key" => ":queues",
|
||||||
"value" => [
|
"value" => [
|
||||||
%{"tuple" => [":federator_incoming", 50]},
|
%{"tuple" => [":federator_incoming", 50]},
|
||||||
|
@ -2114,7 +2114,7 @@ test "queues key as atom", %{conn: conn} do
|
||||||
assert json_response(conn, 200) == %{
|
assert json_response(conn, 200) == %{
|
||||||
"configs" => [
|
"configs" => [
|
||||||
%{
|
%{
|
||||||
"group" => "pleroma_job_queue",
|
"group" => "oban",
|
||||||
"key" => ":queues",
|
"key" => ":queues",
|
||||||
"value" => [
|
"value" => [
|
||||||
%{"tuple" => [":federator_incoming", 50]},
|
%{"tuple" => [":federator_incoming", 50]},
|
||||||
|
|
|
@ -4,9 +4,14 @@
|
||||||
|
|
||||||
defmodule Pleroma.Web.FederatorTest do
|
defmodule Pleroma.Web.FederatorTest do
|
||||||
alias Pleroma.Instances
|
alias Pleroma.Instances
|
||||||
|
alias Pleroma.Tests.ObanHelpers
|
||||||
alias Pleroma.Web.CommonAPI
|
alias Pleroma.Web.CommonAPI
|
||||||
alias Pleroma.Web.Federator
|
alias Pleroma.Web.Federator
|
||||||
|
alias Pleroma.Workers.PublisherWorker
|
||||||
|
|
||||||
use Pleroma.DataCase
|
use Pleroma.DataCase
|
||||||
|
use Oban.Testing, repo: Pleroma.Repo
|
||||||
|
|
||||||
import Pleroma.Factory
|
import Pleroma.Factory
|
||||||
import Mock
|
import Mock
|
||||||
|
|
||||||
|
@ -24,15 +29,6 @@ defmodule Pleroma.Web.FederatorTest do
|
||||||
clear_config([:instance, :rewrite_policy])
|
clear_config([:instance, :rewrite_policy])
|
||||||
clear_config([:mrf_keyword])
|
clear_config([:mrf_keyword])
|
||||||
|
|
||||||
describe "Publisher.perform" do
|
|
||||||
test "call `perform` with unknown task" do
|
|
||||||
assert {
|
|
||||||
:error,
|
|
||||||
"Don't know what to do with this"
|
|
||||||
} = Pleroma.Web.Federator.Publisher.perform("test", :ok, :ok)
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
describe "Publish an activity" do
|
describe "Publish an activity" do
|
||||||
setup do
|
setup do
|
||||||
user = insert(:user)
|
user = insert(:user)
|
||||||
|
@ -53,6 +49,7 @@ test "with relays active, it publishes to the relay", %{
|
||||||
} do
|
} do
|
||||||
with_mocks([relay_mock]) do
|
with_mocks([relay_mock]) do
|
||||||
Federator.publish(activity)
|
Federator.publish(activity)
|
||||||
|
ObanHelpers.perform(all_enqueued(worker: PublisherWorker))
|
||||||
end
|
end
|
||||||
|
|
||||||
assert_received :relay_publish
|
assert_received :relay_publish
|
||||||
|
@ -66,6 +63,7 @@ test "with relays deactivated, it does not publish to the relay", %{
|
||||||
|
|
||||||
with_mocks([relay_mock]) do
|
with_mocks([relay_mock]) do
|
||||||
Federator.publish(activity)
|
Federator.publish(activity)
|
||||||
|
ObanHelpers.perform(all_enqueued(worker: PublisherWorker))
|
||||||
end
|
end
|
||||||
|
|
||||||
refute_received :relay_publish
|
refute_received :relay_publish
|
||||||
|
@ -73,10 +71,7 @@ test "with relays deactivated, it does not publish to the relay", %{
|
||||||
end
|
end
|
||||||
|
|
||||||
describe "Targets reachability filtering in `publish`" do
|
describe "Targets reachability filtering in `publish`" do
|
||||||
test_with_mock "it federates only to reachable instances via AP",
|
test "it federates only to reachable instances via AP" do
|
||||||
Pleroma.Web.ActivityPub.Publisher,
|
|
||||||
[:passthrough],
|
|
||||||
[] do
|
|
||||||
user = insert(:user)
|
user = insert(:user)
|
||||||
|
|
||||||
{inbox1, inbox2} =
|
{inbox1, inbox2} =
|
||||||
|
@ -104,20 +99,20 @@ test "with relays deactivated, it does not publish to the relay", %{
|
||||||
{:ok, _activity} =
|
{:ok, _activity} =
|
||||||
CommonAPI.post(user, %{"status" => "HI @nick1@domain.com, @nick2@domain2.com!"})
|
CommonAPI.post(user, %{"status" => "HI @nick1@domain.com, @nick2@domain2.com!"})
|
||||||
|
|
||||||
assert called(
|
expected_dt = NaiveDateTime.to_iso8601(dt)
|
||||||
Pleroma.Web.ActivityPub.Publisher.publish_one(%{
|
|
||||||
inbox: inbox1,
|
|
||||||
unreachable_since: dt
|
|
||||||
})
|
|
||||||
)
|
|
||||||
|
|
||||||
refute called(Pleroma.Web.ActivityPub.Publisher.publish_one(%{inbox: inbox2}))
|
ObanHelpers.perform(all_enqueued(worker: PublisherWorker))
|
||||||
|
|
||||||
|
assert ObanHelpers.member?(
|
||||||
|
%{
|
||||||
|
"op" => "publish_one",
|
||||||
|
"params" => %{"inbox" => inbox1, "unreachable_since" => expected_dt}
|
||||||
|
},
|
||||||
|
all_enqueued(worker: PublisherWorker)
|
||||||
|
)
|
||||||
end
|
end
|
||||||
|
|
||||||
test_with_mock "it federates only to reachable instances via Websub",
|
test "it federates only to reachable instances via Websub" do
|
||||||
Pleroma.Web.Websub,
|
|
||||||
[:passthrough],
|
|
||||||
[] do
|
|
||||||
user = insert(:user)
|
user = insert(:user)
|
||||||
websub_topic = Pleroma.Web.OStatus.feed_path(user)
|
websub_topic = Pleroma.Web.OStatus.feed_path(user)
|
||||||
|
|
||||||
|
@ -142,23 +137,27 @@ test "with relays deactivated, it does not publish to the relay", %{
|
||||||
|
|
||||||
{:ok, _activity} = CommonAPI.post(user, %{"status" => "HI"})
|
{:ok, _activity} = CommonAPI.post(user, %{"status" => "HI"})
|
||||||
|
|
||||||
assert called(
|
expected_callback = sub2.callback
|
||||||
Pleroma.Web.Websub.publish_one(%{
|
expected_dt = NaiveDateTime.to_iso8601(dt)
|
||||||
callback: sub2.callback,
|
|
||||||
unreachable_since: dt
|
|
||||||
})
|
|
||||||
)
|
|
||||||
|
|
||||||
refute called(Pleroma.Web.Websub.publish_one(%{callback: sub1.callback}))
|
ObanHelpers.perform(all_enqueued(worker: PublisherWorker))
|
||||||
|
|
||||||
|
assert ObanHelpers.member?(
|
||||||
|
%{
|
||||||
|
"op" => "publish_one",
|
||||||
|
"params" => %{
|
||||||
|
"callback" => expected_callback,
|
||||||
|
"unreachable_since" => expected_dt
|
||||||
|
}
|
||||||
|
},
|
||||||
|
all_enqueued(worker: PublisherWorker)
|
||||||
|
)
|
||||||
end
|
end
|
||||||
|
|
||||||
test_with_mock "it federates only to reachable instances via Salmon",
|
test "it federates only to reachable instances via Salmon" do
|
||||||
Pleroma.Web.Salmon,
|
|
||||||
[:passthrough],
|
|
||||||
[] do
|
|
||||||
user = insert(:user)
|
user = insert(:user)
|
||||||
|
|
||||||
remote_user1 =
|
_remote_user1 =
|
||||||
insert(:user, %{
|
insert(:user, %{
|
||||||
local: false,
|
local: false,
|
||||||
nickname: "nick1@domain.com",
|
nickname: "nick1@domain.com",
|
||||||
|
@ -174,6 +173,8 @@ test "with relays deactivated, it does not publish to the relay", %{
|
||||||
info: %{salmon: "https://domain2.com/salmon"}
|
info: %{salmon: "https://domain2.com/salmon"}
|
||||||
})
|
})
|
||||||
|
|
||||||
|
remote_user2_id = remote_user2.id
|
||||||
|
|
||||||
dt = NaiveDateTime.utc_now()
|
dt = NaiveDateTime.utc_now()
|
||||||
Instances.set_unreachable(remote_user2.ap_id, dt)
|
Instances.set_unreachable(remote_user2.ap_id, dt)
|
||||||
|
|
||||||
|
@ -182,14 +183,20 @@ test "with relays deactivated, it does not publish to the relay", %{
|
||||||
{:ok, _activity} =
|
{:ok, _activity} =
|
||||||
CommonAPI.post(user, %{"status" => "HI @nick1@domain.com, @nick2@domain2.com!"})
|
CommonAPI.post(user, %{"status" => "HI @nick1@domain.com, @nick2@domain2.com!"})
|
||||||
|
|
||||||
assert called(
|
expected_dt = NaiveDateTime.to_iso8601(dt)
|
||||||
Pleroma.Web.Salmon.publish_one(%{
|
|
||||||
recipient: remote_user2,
|
|
||||||
unreachable_since: dt
|
|
||||||
})
|
|
||||||
)
|
|
||||||
|
|
||||||
refute called(Pleroma.Web.Salmon.publish_one(%{recipient: remote_user1}))
|
ObanHelpers.perform(all_enqueued(worker: PublisherWorker))
|
||||||
|
|
||||||
|
assert ObanHelpers.member?(
|
||||||
|
%{
|
||||||
|
"op" => "publish_one",
|
||||||
|
"params" => %{
|
||||||
|
"recipient_id" => remote_user2_id,
|
||||||
|
"unreachable_since" => expected_dt
|
||||||
|
}
|
||||||
|
},
|
||||||
|
all_enqueued(worker: PublisherWorker)
|
||||||
|
)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -209,7 +216,8 @@ test "successfully processes incoming AP docs with correct origin" do
|
||||||
"to" => ["https://www.w3.org/ns/activitystreams#Public"]
|
"to" => ["https://www.w3.org/ns/activitystreams#Public"]
|
||||||
}
|
}
|
||||||
|
|
||||||
{:ok, _activity} = Federator.incoming_ap_doc(params)
|
assert {:ok, job} = Federator.incoming_ap_doc(params)
|
||||||
|
assert {:ok, _activity} = ObanHelpers.perform(job)
|
||||||
end
|
end
|
||||||
|
|
||||||
test "rejects incoming AP docs with incorrect origin" do
|
test "rejects incoming AP docs with incorrect origin" do
|
||||||
|
@ -227,7 +235,8 @@ test "rejects incoming AP docs with incorrect origin" do
|
||||||
"to" => ["https://www.w3.org/ns/activitystreams#Public"]
|
"to" => ["https://www.w3.org/ns/activitystreams#Public"]
|
||||||
}
|
}
|
||||||
|
|
||||||
:error = Federator.incoming_ap_doc(params)
|
assert {:ok, job} = Federator.incoming_ap_doc(params)
|
||||||
|
assert :error = ObanHelpers.perform(job)
|
||||||
end
|
end
|
||||||
|
|
||||||
test "it does not crash if MRF rejects the post" do
|
test "it does not crash if MRF rejects the post" do
|
||||||
|
@ -242,7 +251,8 @@ test "it does not crash if MRF rejects the post" do
|
||||||
File.read!("test/fixtures/mastodon-post-activity.json")
|
File.read!("test/fixtures/mastodon-post-activity.json")
|
||||||
|> Poison.decode!()
|
|> Poison.decode!()
|
||||||
|
|
||||||
assert Federator.incoming_ap_doc(params) == :error
|
assert {:ok, job} = Federator.incoming_ap_doc(params)
|
||||||
|
assert :error = ObanHelpers.perform(job)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -16,7 +16,8 @@ defmodule Pleroma.Instances.InstanceTest do
|
||||||
|
|
||||||
describe "set_reachable/1" do
|
describe "set_reachable/1" do
|
||||||
test "clears `unreachable_since` of existing matching Instance record having non-nil `unreachable_since`" do
|
test "clears `unreachable_since` of existing matching Instance record having non-nil `unreachable_since`" do
|
||||||
instance = insert(:instance, unreachable_since: NaiveDateTime.utc_now())
|
unreachable_since = NaiveDateTime.to_iso8601(NaiveDateTime.utc_now())
|
||||||
|
instance = insert(:instance, unreachable_since: unreachable_since)
|
||||||
|
|
||||||
assert {:ok, instance} = Instance.set_reachable(instance.host)
|
assert {:ok, instance} = Instance.set_reachable(instance.host)
|
||||||
refute instance.unreachable_since
|
refute instance.unreachable_since
|
||||||
|
|
|
@ -14,6 +14,7 @@ defmodule Pleroma.Web.MastodonAPI.MastodonAPIControllerTest do
|
||||||
alias Pleroma.Repo
|
alias Pleroma.Repo
|
||||||
alias Pleroma.ScheduledActivity
|
alias Pleroma.ScheduledActivity
|
||||||
alias Pleroma.SubscriptionNotification
|
alias Pleroma.SubscriptionNotification
|
||||||
|
alias Pleroma.Tests.ObanHelpers
|
||||||
alias Pleroma.User
|
alias Pleroma.User
|
||||||
alias Pleroma.Web.ActivityPub.ActivityPub
|
alias Pleroma.Web.ActivityPub.ActivityPub
|
||||||
alias Pleroma.Web.CommonAPI
|
alias Pleroma.Web.CommonAPI
|
||||||
|
@ -752,7 +753,7 @@ test "get statuses by IDs", %{conn: conn} do
|
||||||
query_string = "ids[]=#{id1}&ids[]=#{id2}"
|
query_string = "ids[]=#{id1}&ids[]=#{id2}"
|
||||||
conn = get(conn, "/api/v1/statuses/?#{query_string}")
|
conn = get(conn, "/api/v1/statuses/?#{query_string}")
|
||||||
|
|
||||||
assert [%{"id" => ^id1}, %{"id" => ^id2}] = json_response(conn, :ok)
|
assert [%{"id" => ^id1}, %{"id" => ^id2}] = Enum.sort_by(json_response(conn, :ok), & &1["id"])
|
||||||
end
|
end
|
||||||
|
|
||||||
describe "deleting a status" do
|
describe "deleting a status" do
|
||||||
|
@ -4089,6 +4090,7 @@ test "it creates a PasswordResetToken record for user", %{user: user} do
|
||||||
end
|
end
|
||||||
|
|
||||||
test "it sends an email to user", %{user: user} do
|
test "it sends an email to user", %{user: user} do
|
||||||
|
ObanHelpers.perform_all()
|
||||||
token_record = Repo.get_by(Pleroma.PasswordResetToken, user_id: user.id)
|
token_record = Repo.get_by(Pleroma.PasswordResetToken, user_id: user.id)
|
||||||
|
|
||||||
email = Pleroma.Emails.UserEmail.password_reset_email(user, token_record.token)
|
email = Pleroma.Emails.UserEmail.password_reset_email(user, token_record.token)
|
||||||
|
@ -4149,6 +4151,8 @@ test "resend account confirmation email", %{conn: conn, user: user} do
|
||||||
|> post("/api/v1/pleroma/accounts/confirmation_resend?email=#{user.email}")
|
|> post("/api/v1/pleroma/accounts/confirmation_resend?email=#{user.email}")
|
||||||
|> json_response(:no_content)
|
|> json_response(:no_content)
|
||||||
|
|
||||||
|
ObanHelpers.perform_all()
|
||||||
|
|
||||||
email = Pleroma.Emails.UserEmail.account_confirmation_email(user)
|
email = Pleroma.Emails.UserEmail.account_confirmation_email(user)
|
||||||
notify_email = Config.get([:instance, :notify_email])
|
notify_email = Config.get([:instance, :notify_email])
|
||||||
instance_name = Config.get([:instance, :name])
|
instance_name = Config.get([:instance, :name])
|
||||||
|
|
|
@ -1,48 +0,0 @@
|
||||||
# Pleroma: A lightweight social networking server
|
|
||||||
# Copyright © 2017-2018 Pleroma Authors <https://pleroma.social/>
|
|
||||||
# SPDX-License-Identifier: AGPL-3.0-only
|
|
||||||
|
|
||||||
defmodule MockActivityPub do
|
|
||||||
def publish_one({ret, waiter}) do
|
|
||||||
send(waiter, :complete)
|
|
||||||
{ret, "success"}
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
defmodule Pleroma.Web.Federator.RetryQueueTest do
|
|
||||||
use Pleroma.DataCase
|
|
||||||
alias Pleroma.Web.Federator.RetryQueue
|
|
||||||
|
|
||||||
@small_retry_count 0
|
|
||||||
@hopeless_retry_count 10
|
|
||||||
|
|
||||||
setup do
|
|
||||||
RetryQueue.reset_stats()
|
|
||||||
end
|
|
||||||
|
|
||||||
test "RetryQueue responds to stats request" do
|
|
||||||
assert %{delivered: 0, dropped: 0} == RetryQueue.get_stats()
|
|
||||||
end
|
|
||||||
|
|
||||||
test "failed posts are retried" do
|
|
||||||
{:retry, _timeout} = RetryQueue.get_retry_params(@small_retry_count)
|
|
||||||
|
|
||||||
wait_task =
|
|
||||||
Task.async(fn ->
|
|
||||||
receive do
|
|
||||||
:complete -> :ok
|
|
||||||
end
|
|
||||||
end)
|
|
||||||
|
|
||||||
RetryQueue.enqueue({:ok, wait_task.pid}, MockActivityPub, @small_retry_count)
|
|
||||||
Task.await(wait_task)
|
|
||||||
assert %{delivered: 1, dropped: 0} == RetryQueue.get_stats()
|
|
||||||
end
|
|
||||||
|
|
||||||
test "posts that have been tried too many times are dropped" do
|
|
||||||
{:drop, _timeout} = RetryQueue.get_retry_params(@hopeless_retry_count)
|
|
||||||
|
|
||||||
RetryQueue.enqueue({:ok, nil}, MockActivityPub, @hopeless_retry_count)
|
|
||||||
assert %{delivered: 0, dropped: 1} == RetryQueue.get_stats()
|
|
||||||
end
|
|
||||||
end
|
|
|
@ -96,6 +96,6 @@ test "it gets a magic key" do
|
||||||
|
|
||||||
Salmon.publish(user, activity)
|
Salmon.publish(user, activity)
|
||||||
|
|
||||||
assert called(Publisher.enqueue_one(Salmon, %{recipient: mentioned_user}))
|
assert called(Publisher.enqueue_one(Salmon, %{recipient_id: mentioned_user.id}))
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
36
test/web/streamer/ping_test.exs
Normal file
36
test/web/streamer/ping_test.exs
Normal file
|
@ -0,0 +1,36 @@
|
||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
defmodule Pleroma.Web.PingTest do
|
||||||
|
use Pleroma.DataCase
|
||||||
|
|
||||||
|
import Pleroma.Factory
|
||||||
|
alias Pleroma.Web.Streamer
|
||||||
|
|
||||||
|
setup do
|
||||||
|
start_supervised({Streamer.supervisor(), [ping_interval: 30]})
|
||||||
|
|
||||||
|
:ok
|
||||||
|
end
|
||||||
|
|
||||||
|
describe "sockets" do
|
||||||
|
setup do
|
||||||
|
user = insert(:user)
|
||||||
|
{:ok, %{user: user}}
|
||||||
|
end
|
||||||
|
|
||||||
|
test "it sends pings", %{user: user} do
|
||||||
|
task =
|
||||||
|
Task.async(fn ->
|
||||||
|
assert_receive {:text, received_event}, 40
|
||||||
|
assert_receive {:text, received_event}, 40
|
||||||
|
assert_receive {:text, received_event}, 40
|
||||||
|
end)
|
||||||
|
|
||||||
|
Streamer.add_socket("public", %{transport_pid: task.pid, assigns: %{user: user}})
|
||||||
|
|
||||||
|
Task.await(task)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
54
test/web/streamer/state_test.exs
Normal file
54
test/web/streamer/state_test.exs
Normal file
|
@ -0,0 +1,54 @@
|
||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
defmodule Pleroma.Web.StateTest do
|
||||||
|
use Pleroma.DataCase
|
||||||
|
|
||||||
|
import Pleroma.Factory
|
||||||
|
alias Pleroma.Web.Streamer
|
||||||
|
alias Pleroma.Web.Streamer.StreamerSocket
|
||||||
|
|
||||||
|
@moduletag needs_streamer: true
|
||||||
|
|
||||||
|
describe "sockets" do
|
||||||
|
setup do
|
||||||
|
user = insert(:user)
|
||||||
|
user2 = insert(:user)
|
||||||
|
{:ok, %{user: user, user2: user2}}
|
||||||
|
end
|
||||||
|
|
||||||
|
test "it can add a socket", %{user: user} do
|
||||||
|
Streamer.add_socket("public", %{transport_pid: 1, assigns: %{user: user}})
|
||||||
|
|
||||||
|
assert(%{"public" => [%StreamerSocket{transport_pid: 1}]} = Streamer.get_sockets())
|
||||||
|
end
|
||||||
|
|
||||||
|
test "it can add multiple sockets per user", %{user: user} do
|
||||||
|
Streamer.add_socket("public", %{transport_pid: 1, assigns: %{user: user}})
|
||||||
|
Streamer.add_socket("public", %{transport_pid: 2, assigns: %{user: user}})
|
||||||
|
|
||||||
|
assert(
|
||||||
|
%{
|
||||||
|
"public" => [
|
||||||
|
%StreamerSocket{transport_pid: 2},
|
||||||
|
%StreamerSocket{transport_pid: 1}
|
||||||
|
]
|
||||||
|
} = Streamer.get_sockets()
|
||||||
|
)
|
||||||
|
end
|
||||||
|
|
||||||
|
test "it will not add a duplicate socket", %{user: user} do
|
||||||
|
Streamer.add_socket("activity", %{transport_pid: 1, assigns: %{user: user}})
|
||||||
|
Streamer.add_socket("activity", %{transport_pid: 1, assigns: %{user: user}})
|
||||||
|
|
||||||
|
assert(
|
||||||
|
%{
|
||||||
|
"activity" => [
|
||||||
|
%StreamerSocket{transport_pid: 1}
|
||||||
|
]
|
||||||
|
} = Streamer.get_sockets()
|
||||||
|
)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
|
@ -5,24 +5,20 @@
|
||||||
defmodule Pleroma.Web.StreamerTest do
|
defmodule Pleroma.Web.StreamerTest do
|
||||||
use Pleroma.DataCase
|
use Pleroma.DataCase
|
||||||
|
|
||||||
|
import Pleroma.Factory
|
||||||
|
|
||||||
alias Pleroma.List
|
alias Pleroma.List
|
||||||
alias Pleroma.User
|
alias Pleroma.User
|
||||||
alias Pleroma.Web.CommonAPI
|
alias Pleroma.Web.CommonAPI
|
||||||
alias Pleroma.Web.Streamer
|
alias Pleroma.Web.Streamer
|
||||||
import Pleroma.Factory
|
alias Pleroma.Web.Streamer.StreamerSocket
|
||||||
|
alias Pleroma.Web.Streamer.Worker
|
||||||
|
|
||||||
|
@moduletag needs_streamer: true
|
||||||
clear_config_all([:instance, :skip_thread_containment])
|
clear_config_all([:instance, :skip_thread_containment])
|
||||||
|
|
||||||
describe "user streams" do
|
describe "user streams" do
|
||||||
setup do
|
setup do
|
||||||
GenServer.start(Streamer, %{}, name: Streamer)
|
|
||||||
|
|
||||||
on_exit(fn ->
|
|
||||||
if pid = Process.whereis(Streamer) do
|
|
||||||
Process.exit(pid, :kill)
|
|
||||||
end
|
|
||||||
end)
|
|
||||||
|
|
||||||
user = insert(:user)
|
user = insert(:user)
|
||||||
notify = insert(:notification, user: user, activity: build(:note_activity))
|
notify = insert(:notification, user: user, activity: build(:note_activity))
|
||||||
{:ok, %{user: user, notify: notify}}
|
{:ok, %{user: user, notify: notify}}
|
||||||
|
@ -125,12 +121,10 @@ test "it sends to public" do
|
||||||
assert_receive {:text, _}, 4_000
|
assert_receive {:text, _}, 4_000
|
||||||
end)
|
end)
|
||||||
|
|
||||||
fake_socket = %{
|
fake_socket = %StreamerSocket{
|
||||||
transport_pid: task.pid,
|
transport_pid: task.pid,
|
||||||
assigns: %{
|
|
||||||
user: user
|
user: user
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
{:ok, activity} = CommonAPI.post(other_user, %{"status" => "Test"})
|
{:ok, activity} = CommonAPI.post(other_user, %{"status" => "Test"})
|
||||||
|
|
||||||
|
@ -138,7 +132,7 @@ test "it sends to public" do
|
||||||
"public" => [fake_socket]
|
"public" => [fake_socket]
|
||||||
}
|
}
|
||||||
|
|
||||||
Streamer.push_to_socket(topics, "public", activity)
|
Worker.push_to_socket(topics, "public", activity)
|
||||||
|
|
||||||
Task.await(task)
|
Task.await(task)
|
||||||
|
|
||||||
|
@ -155,12 +149,10 @@ test "it sends to public" do
|
||||||
assert received_event == expected_event
|
assert received_event == expected_event
|
||||||
end)
|
end)
|
||||||
|
|
||||||
fake_socket = %{
|
fake_socket = %StreamerSocket{
|
||||||
transport_pid: task.pid,
|
transport_pid: task.pid,
|
||||||
assigns: %{
|
|
||||||
user: user
|
user: user
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
{:ok, activity} = CommonAPI.delete(activity.id, other_user)
|
{:ok, activity} = CommonAPI.delete(activity.id, other_user)
|
||||||
|
|
||||||
|
@ -168,7 +160,7 @@ test "it sends to public" do
|
||||||
"public" => [fake_socket]
|
"public" => [fake_socket]
|
||||||
}
|
}
|
||||||
|
|
||||||
Streamer.push_to_socket(topics, "public", activity)
|
Worker.push_to_socket(topics, "public", activity)
|
||||||
|
|
||||||
Task.await(task)
|
Task.await(task)
|
||||||
end
|
end
|
||||||
|
@ -189,9 +181,9 @@ test "it doesn't send to user if recipients invalid and thread containment is en
|
||||||
)
|
)
|
||||||
|
|
||||||
task = Task.async(fn -> refute_receive {:text, _}, 1_000 end)
|
task = Task.async(fn -> refute_receive {:text, _}, 1_000 end)
|
||||||
fake_socket = %{transport_pid: task.pid, assigns: %{user: user}}
|
fake_socket = %StreamerSocket{transport_pid: task.pid, user: user}
|
||||||
topics = %{"public" => [fake_socket]}
|
topics = %{"public" => [fake_socket]}
|
||||||
Streamer.push_to_socket(topics, "public", activity)
|
Worker.push_to_socket(topics, "public", activity)
|
||||||
|
|
||||||
Task.await(task)
|
Task.await(task)
|
||||||
end
|
end
|
||||||
|
@ -211,9 +203,9 @@ test "it sends message if recipients invalid and thread containment is disabled"
|
||||||
)
|
)
|
||||||
|
|
||||||
task = Task.async(fn -> assert_receive {:text, _}, 1_000 end)
|
task = Task.async(fn -> assert_receive {:text, _}, 1_000 end)
|
||||||
fake_socket = %{transport_pid: task.pid, assigns: %{user: user}}
|
fake_socket = %StreamerSocket{transport_pid: task.pid, user: user}
|
||||||
topics = %{"public" => [fake_socket]}
|
topics = %{"public" => [fake_socket]}
|
||||||
Streamer.push_to_socket(topics, "public", activity)
|
Worker.push_to_socket(topics, "public", activity)
|
||||||
|
|
||||||
Task.await(task)
|
Task.await(task)
|
||||||
end
|
end
|
||||||
|
@ -233,9 +225,9 @@ test "it sends message if recipients invalid and thread containment is enabled b
|
||||||
)
|
)
|
||||||
|
|
||||||
task = Task.async(fn -> assert_receive {:text, _}, 1_000 end)
|
task = Task.async(fn -> assert_receive {:text, _}, 1_000 end)
|
||||||
fake_socket = %{transport_pid: task.pid, assigns: %{user: user}}
|
fake_socket = %StreamerSocket{transport_pid: task.pid, user: user}
|
||||||
topics = %{"public" => [fake_socket]}
|
topics = %{"public" => [fake_socket]}
|
||||||
Streamer.push_to_socket(topics, "public", activity)
|
Worker.push_to_socket(topics, "public", activity)
|
||||||
|
|
||||||
Task.await(task)
|
Task.await(task)
|
||||||
end
|
end
|
||||||
|
@ -251,12 +243,10 @@ test "it doesn't send to blocked users" do
|
||||||
refute_receive {:text, _}, 1_000
|
refute_receive {:text, _}, 1_000
|
||||||
end)
|
end)
|
||||||
|
|
||||||
fake_socket = %{
|
fake_socket = %StreamerSocket{
|
||||||
transport_pid: task.pid,
|
transport_pid: task.pid,
|
||||||
assigns: %{
|
|
||||||
user: user
|
user: user
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
{:ok, activity} = CommonAPI.post(blocked_user, %{"status" => "Test"})
|
{:ok, activity} = CommonAPI.post(blocked_user, %{"status" => "Test"})
|
||||||
|
|
||||||
|
@ -264,7 +254,7 @@ test "it doesn't send to blocked users" do
|
||||||
"public" => [fake_socket]
|
"public" => [fake_socket]
|
||||||
}
|
}
|
||||||
|
|
||||||
Streamer.push_to_socket(topics, "public", activity)
|
Worker.push_to_socket(topics, "public", activity)
|
||||||
|
|
||||||
Task.await(task)
|
Task.await(task)
|
||||||
end
|
end
|
||||||
|
@ -284,12 +274,10 @@ test "it doesn't send unwanted DMs to list" do
|
||||||
refute_receive {:text, _}, 1_000
|
refute_receive {:text, _}, 1_000
|
||||||
end)
|
end)
|
||||||
|
|
||||||
fake_socket = %{
|
fake_socket = %StreamerSocket{
|
||||||
transport_pid: task.pid,
|
transport_pid: task.pid,
|
||||||
assigns: %{
|
|
||||||
user: user_a
|
user: user_a
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
{:ok, activity} =
|
{:ok, activity} =
|
||||||
CommonAPI.post(user_b, %{
|
CommonAPI.post(user_b, %{
|
||||||
|
@ -301,7 +289,7 @@ test "it doesn't send unwanted DMs to list" do
|
||||||
"list:#{list.id}" => [fake_socket]
|
"list:#{list.id}" => [fake_socket]
|
||||||
}
|
}
|
||||||
|
|
||||||
Streamer.handle_cast(%{action: :stream, topic: "list", item: activity}, topics)
|
Worker.handle_call({:stream, "list", activity}, self(), topics)
|
||||||
|
|
||||||
Task.await(task)
|
Task.await(task)
|
||||||
end
|
end
|
||||||
|
@ -318,12 +306,10 @@ test "it doesn't send unwanted private posts to list" do
|
||||||
refute_receive {:text, _}, 1_000
|
refute_receive {:text, _}, 1_000
|
||||||
end)
|
end)
|
||||||
|
|
||||||
fake_socket = %{
|
fake_socket = %StreamerSocket{
|
||||||
transport_pid: task.pid,
|
transport_pid: task.pid,
|
||||||
assigns: %{
|
|
||||||
user: user_a
|
user: user_a
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
{:ok, activity} =
|
{:ok, activity} =
|
||||||
CommonAPI.post(user_b, %{
|
CommonAPI.post(user_b, %{
|
||||||
|
@ -335,12 +321,12 @@ test "it doesn't send unwanted private posts to list" do
|
||||||
"list:#{list.id}" => [fake_socket]
|
"list:#{list.id}" => [fake_socket]
|
||||||
}
|
}
|
||||||
|
|
||||||
Streamer.handle_cast(%{action: :stream, topic: "list", item: activity}, topics)
|
Worker.handle_call({:stream, "list", activity}, self(), topics)
|
||||||
|
|
||||||
Task.await(task)
|
Task.await(task)
|
||||||
end
|
end
|
||||||
|
|
||||||
test "it send wanted private posts to list" do
|
test "it sends wanted private posts to list" do
|
||||||
user_a = insert(:user)
|
user_a = insert(:user)
|
||||||
user_b = insert(:user)
|
user_b = insert(:user)
|
||||||
|
|
||||||
|
@ -354,12 +340,10 @@ test "it send wanted private posts to list" do
|
||||||
assert_receive {:text, _}, 1_000
|
assert_receive {:text, _}, 1_000
|
||||||
end)
|
end)
|
||||||
|
|
||||||
fake_socket = %{
|
fake_socket = %StreamerSocket{
|
||||||
transport_pid: task.pid,
|
transport_pid: task.pid,
|
||||||
assigns: %{
|
|
||||||
user: user_a
|
user: user_a
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
{:ok, activity} =
|
{:ok, activity} =
|
||||||
CommonAPI.post(user_b, %{
|
CommonAPI.post(user_b, %{
|
||||||
|
@ -367,11 +351,12 @@ test "it send wanted private posts to list" do
|
||||||
"visibility" => "private"
|
"visibility" => "private"
|
||||||
})
|
})
|
||||||
|
|
||||||
topics = %{
|
Streamer.add_socket(
|
||||||
"list:#{list.id}" => [fake_socket]
|
"list:#{list.id}",
|
||||||
}
|
fake_socket
|
||||||
|
)
|
||||||
|
|
||||||
Streamer.handle_cast(%{action: :stream, topic: "list", item: activity}, topics)
|
Worker.handle_call({:stream, "list", activity}, self(), %{})
|
||||||
|
|
||||||
Task.await(task)
|
Task.await(task)
|
||||||
end
|
end
|
||||||
|
@ -387,12 +372,10 @@ test "it doesn't send muted reblogs" do
|
||||||
refute_receive {:text, _}, 1_000
|
refute_receive {:text, _}, 1_000
|
||||||
end)
|
end)
|
||||||
|
|
||||||
fake_socket = %{
|
fake_socket = %StreamerSocket{
|
||||||
transport_pid: task.pid,
|
transport_pid: task.pid,
|
||||||
assigns: %{
|
|
||||||
user: user1
|
user: user1
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
{:ok, create_activity} = CommonAPI.post(user3, %{"status" => "I'm kawen"})
|
{:ok, create_activity} = CommonAPI.post(user3, %{"status" => "I'm kawen"})
|
||||||
{:ok, announce_activity, _} = CommonAPI.repeat(create_activity.id, user2)
|
{:ok, announce_activity, _} = CommonAPI.repeat(create_activity.id, user2)
|
||||||
|
@ -401,7 +384,7 @@ test "it doesn't send muted reblogs" do
|
||||||
"public" => [fake_socket]
|
"public" => [fake_socket]
|
||||||
}
|
}
|
||||||
|
|
||||||
Streamer.push_to_socket(topics, "public", announce_activity)
|
Worker.push_to_socket(topics, "public", announce_activity)
|
||||||
|
|
||||||
Task.await(task)
|
Task.await(task)
|
||||||
end
|
end
|
||||||
|
@ -417,6 +400,8 @@ test "it doesn't send posts from muted threads" do
|
||||||
|
|
||||||
task = Task.async(fn -> refute_receive {:text, _}, 4_000 end)
|
task = Task.async(fn -> refute_receive {:text, _}, 4_000 end)
|
||||||
|
|
||||||
|
Process.sleep(4000)
|
||||||
|
|
||||||
Streamer.add_socket(
|
Streamer.add_socket(
|
||||||
"user",
|
"user",
|
||||||
%{transport_pid: task.pid, assigns: %{user: user2}}
|
%{transport_pid: task.pid, assigns: %{user: user2}}
|
||||||
|
@ -428,14 +413,6 @@ test "it doesn't send posts from muted threads" do
|
||||||
|
|
||||||
describe "direct streams" do
|
describe "direct streams" do
|
||||||
setup do
|
setup do
|
||||||
GenServer.start(Streamer, %{}, name: Streamer)
|
|
||||||
|
|
||||||
on_exit(fn ->
|
|
||||||
if pid = Process.whereis(Streamer) do
|
|
||||||
Process.exit(pid, :kill)
|
|
||||||
end
|
|
||||||
end)
|
|
||||||
|
|
||||||
:ok
|
:ok
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -480,6 +457,8 @@ test "it doesn't send conversation update to the 'direct' streamj when the last
|
||||||
refute_receive {:text, _}, 4_000
|
refute_receive {:text, _}, 4_000
|
||||||
end)
|
end)
|
||||||
|
|
||||||
|
Process.sleep(1000)
|
||||||
|
|
||||||
Streamer.add_socket(
|
Streamer.add_socket(
|
||||||
"direct",
|
"direct",
|
||||||
%{transport_pid: task.pid, assigns: %{user: user}}
|
%{transport_pid: task.pid, assigns: %{user: user}}
|
||||||
|
@ -521,6 +500,8 @@ test "it sends conversation update to the 'direct' stream when a message is dele
|
||||||
assert last_status["id"] == to_string(create_activity.id)
|
assert last_status["id"] == to_string(create_activity.id)
|
||||||
end)
|
end)
|
||||||
|
|
||||||
|
Process.sleep(1000)
|
||||||
|
|
||||||
Streamer.add_socket(
|
Streamer.add_socket(
|
||||||
"direct",
|
"direct",
|
||||||
%{transport_pid: task.pid, assigns: %{user: user}}
|
%{transport_pid: task.pid, assigns: %{user: user}}
|
|
@ -5,6 +5,7 @@
|
||||||
defmodule Pleroma.Web.TwitterAPI.TwitterAPITest do
|
defmodule Pleroma.Web.TwitterAPI.TwitterAPITest do
|
||||||
use Pleroma.DataCase
|
use Pleroma.DataCase
|
||||||
alias Pleroma.Repo
|
alias Pleroma.Repo
|
||||||
|
alias Pleroma.Tests.ObanHelpers
|
||||||
alias Pleroma.User
|
alias Pleroma.User
|
||||||
alias Pleroma.UserInviteToken
|
alias Pleroma.UserInviteToken
|
||||||
alias Pleroma.Web.MastodonAPI.AccountView
|
alias Pleroma.Web.MastodonAPI.AccountView
|
||||||
|
@ -68,6 +69,7 @@ test "it sends confirmation email if :account_activation_required is specified i
|
||||||
}
|
}
|
||||||
|
|
||||||
{:ok, user} = TwitterAPI.register_user(data)
|
{:ok, user} = TwitterAPI.register_user(data)
|
||||||
|
ObanHelpers.perform_all()
|
||||||
|
|
||||||
assert user.info.confirmation_pending
|
assert user.info.confirmation_pending
|
||||||
|
|
||||||
|
|
|
@ -4,8 +4,10 @@
|
||||||
|
|
||||||
defmodule Pleroma.Web.TwitterAPI.UtilControllerTest do
|
defmodule Pleroma.Web.TwitterAPI.UtilControllerTest do
|
||||||
use Pleroma.Web.ConnCase
|
use Pleroma.Web.ConnCase
|
||||||
|
use Oban.Testing, repo: Pleroma.Repo
|
||||||
|
|
||||||
alias Pleroma.Repo
|
alias Pleroma.Repo
|
||||||
|
alias Pleroma.Tests.ObanHelpers
|
||||||
alias Pleroma.User
|
alias Pleroma.User
|
||||||
alias Pleroma.Web.CommonAPI
|
alias Pleroma.Web.CommonAPI
|
||||||
import ExUnit.CaptureLog
|
import ExUnit.CaptureLog
|
||||||
|
@ -43,8 +45,7 @@ test "it imports follow lists from file", %{conn: conn} do
|
||||||
{File, [],
|
{File, [],
|
||||||
read!: fn "follow_list.txt" ->
|
read!: fn "follow_list.txt" ->
|
||||||
"Account address,Show boosts\n#{user2.ap_id},true"
|
"Account address,Show boosts\n#{user2.ap_id},true"
|
||||||
end},
|
end}
|
||||||
{PleromaJobQueue, [:passthrough], []}
|
|
||||||
]) do
|
]) do
|
||||||
response =
|
response =
|
||||||
conn
|
conn
|
||||||
|
@ -52,15 +53,16 @@ test "it imports follow lists from file", %{conn: conn} do
|
||||||
|> post("/api/pleroma/follow_import", %{"list" => %Plug.Upload{path: "follow_list.txt"}})
|
|> post("/api/pleroma/follow_import", %{"list" => %Plug.Upload{path: "follow_list.txt"}})
|
||||||
|> json_response(:ok)
|
|> json_response(:ok)
|
||||||
|
|
||||||
assert called(
|
|
||||||
PleromaJobQueue.enqueue(
|
|
||||||
:background,
|
|
||||||
User,
|
|
||||||
[:follow_import, user1, [user2.ap_id]]
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
assert response == "job started"
|
assert response == "job started"
|
||||||
|
|
||||||
|
assert ObanHelpers.member?(
|
||||||
|
%{
|
||||||
|
"op" => "follow_import",
|
||||||
|
"follower_id" => user1.id,
|
||||||
|
"followed_identifiers" => [user2.ap_id]
|
||||||
|
},
|
||||||
|
all_enqueued(worker: Pleroma.Workers.BackgroundWorker)
|
||||||
|
)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -119,8 +121,7 @@ test "it imports blocks users from file", %{conn: conn} do
|
||||||
user3 = insert(:user)
|
user3 = insert(:user)
|
||||||
|
|
||||||
with_mocks([
|
with_mocks([
|
||||||
{File, [], read!: fn "blocks_list.txt" -> "#{user2.ap_id} #{user3.ap_id}" end},
|
{File, [], read!: fn "blocks_list.txt" -> "#{user2.ap_id} #{user3.ap_id}" end}
|
||||||
{PleromaJobQueue, [:passthrough], []}
|
|
||||||
]) do
|
]) do
|
||||||
response =
|
response =
|
||||||
conn
|
conn
|
||||||
|
@ -128,15 +129,16 @@ test "it imports blocks users from file", %{conn: conn} do
|
||||||
|> post("/api/pleroma/blocks_import", %{"list" => %Plug.Upload{path: "blocks_list.txt"}})
|
|> post("/api/pleroma/blocks_import", %{"list" => %Plug.Upload{path: "blocks_list.txt"}})
|
||||||
|> json_response(:ok)
|
|> json_response(:ok)
|
||||||
|
|
||||||
assert called(
|
|
||||||
PleromaJobQueue.enqueue(
|
|
||||||
:background,
|
|
||||||
User,
|
|
||||||
[:blocks_import, user1, [user2.ap_id, user3.ap_id]]
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
assert response == "job started"
|
assert response == "job started"
|
||||||
|
|
||||||
|
assert ObanHelpers.member?(
|
||||||
|
%{
|
||||||
|
"op" => "blocks_import",
|
||||||
|
"blocker_id" => user1.id,
|
||||||
|
"blocked_identifiers" => [user2.ap_id, user3.ap_id]
|
||||||
|
},
|
||||||
|
all_enqueued(worker: Pleroma.Workers.BackgroundWorker)
|
||||||
|
)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
@ -560,6 +562,7 @@ test "it returns HTTP 200", %{conn: conn} do
|
||||||
|> json_response(:ok)
|
|> json_response(:ok)
|
||||||
|
|
||||||
assert response == %{"status" => "success"}
|
assert response == %{"status" => "success"}
|
||||||
|
ObanHelpers.perform_all()
|
||||||
|
|
||||||
user = User.get_cached_by_id(user.id)
|
user = User.get_cached_by_id(user.id)
|
||||||
|
|
||||||
|
|
|
@ -4,11 +4,14 @@
|
||||||
|
|
||||||
defmodule Pleroma.Web.WebsubTest do
|
defmodule Pleroma.Web.WebsubTest do
|
||||||
use Pleroma.DataCase
|
use Pleroma.DataCase
|
||||||
|
use Oban.Testing, repo: Pleroma.Repo
|
||||||
|
|
||||||
|
alias Pleroma.Tests.ObanHelpers
|
||||||
alias Pleroma.Web.Router.Helpers
|
alias Pleroma.Web.Router.Helpers
|
||||||
alias Pleroma.Web.Websub
|
alias Pleroma.Web.Websub
|
||||||
alias Pleroma.Web.Websub.WebsubClientSubscription
|
alias Pleroma.Web.Websub.WebsubClientSubscription
|
||||||
alias Pleroma.Web.Websub.WebsubServerSubscription
|
alias Pleroma.Web.Websub.WebsubServerSubscription
|
||||||
|
alias Pleroma.Workers.SubscriberWorker
|
||||||
|
|
||||||
import Pleroma.Factory
|
import Pleroma.Factory
|
||||||
import Tesla.Mock
|
import Tesla.Mock
|
||||||
|
@ -224,6 +227,7 @@ test "it renews subscriptions that have less than a day of time left" do
|
||||||
})
|
})
|
||||||
|
|
||||||
_refresh = Websub.refresh_subscriptions()
|
_refresh = Websub.refresh_subscriptions()
|
||||||
|
ObanHelpers.perform(all_enqueued(worker: SubscriberWorker))
|
||||||
|
|
||||||
assert still_good == Repo.get(WebsubClientSubscription, still_good.id)
|
assert still_good == Repo.get(WebsubClientSubscription, still_good.id)
|
||||||
refute needs_refresh == Repo.get(WebsubClientSubscription, needs_refresh.id)
|
refute needs_refresh == Repo.get(WebsubClientSubscription, needs_refresh.id)
|
||||||
|
|
Loading…
Reference in a new issue