forked from AkkomaGang/akkoma
Merge branch 'develop' of git.pleroma.social:pleroma/pleroma into update/admin-fe-20200211
This commit is contained in:
commit
f9eb35d48e
68 changed files with 669 additions and 444 deletions
|
@ -37,6 +37,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
|
||||||
<details>
|
<details>
|
||||||
<summary>API Changes</summary>
|
<summary>API Changes</summary>
|
||||||
|
|
||||||
|
- **Breaking** EmojiReactions: Change endpoints and responses to align with Mastodon
|
||||||
- **Breaking** Admin API: `PATCH /api/pleroma/admin/users/:nickname/force_password_reset` is now `PATCH /api/pleroma/admin/users/force_password_reset` (accepts `nicknames` array in the request body)
|
- **Breaking** Admin API: `PATCH /api/pleroma/admin/users/:nickname/force_password_reset` is now `PATCH /api/pleroma/admin/users/force_password_reset` (accepts `nicknames` array in the request body)
|
||||||
- **Breaking:** Admin API: Return link alongside with token on password reset
|
- **Breaking:** Admin API: Return link alongside with token on password reset
|
||||||
- **Breaking:** Admin API: `PUT /api/pleroma/admin/reports/:id` is now `PATCH /api/pleroma/admin/reports`, see admin_api.md for details
|
- **Breaking:** Admin API: `PUT /api/pleroma/admin/reports/:id` is now `PATCH /api/pleroma/admin/reports`, see admin_api.md for details
|
||||||
|
|
|
@ -51,20 +51,6 @@
|
||||||
telemetry_event: [Pleroma.Repo.Instrumenter],
|
telemetry_event: [Pleroma.Repo.Instrumenter],
|
||||||
migration_lock: nil
|
migration_lock: nil
|
||||||
|
|
||||||
scheduled_jobs =
|
|
||||||
with digest_config <- Application.get_env(:pleroma, :email_notifications)[:digest],
|
|
||||||
true <- digest_config[:active] do
|
|
||||||
[{digest_config[:schedule], {Pleroma.Daemons.DigestEmailDaemon, :perform, []}}]
|
|
||||||
else
|
|
||||||
_ -> []
|
|
||||||
end
|
|
||||||
|
|
||||||
config :pleroma, Pleroma.Scheduler,
|
|
||||||
global: true,
|
|
||||||
overlap: true,
|
|
||||||
timezone: :utc,
|
|
||||||
jobs: scheduled_jobs
|
|
||||||
|
|
||||||
config :pleroma, Pleroma.Captcha,
|
config :pleroma, Pleroma.Captcha,
|
||||||
enabled: true,
|
enabled: true,
|
||||||
seconds_valid: 300,
|
seconds_valid: 300,
|
||||||
|
@ -495,6 +481,12 @@
|
||||||
scheduled_activities: 10,
|
scheduled_activities: 10,
|
||||||
background: 5,
|
background: 5,
|
||||||
attachments_cleanup: 5
|
attachments_cleanup: 5
|
||||||
|
],
|
||||||
|
crontab: [
|
||||||
|
{"0 0 * * *", Pleroma.Workers.Cron.ClearOauthTokenWorker},
|
||||||
|
{"0 * * * *", Pleroma.Workers.Cron.StatsWorker},
|
||||||
|
{"* * * * *", Pleroma.Workers.Cron.PurgeExpiredActivitiesWorker},
|
||||||
|
{"0 0 * * 0", Pleroma.Workers.Cron.DigestEmailsWorker}
|
||||||
]
|
]
|
||||||
|
|
||||||
config :pleroma, :workers,
|
config :pleroma, :workers,
|
||||||
|
@ -578,7 +570,6 @@
|
||||||
config :pleroma, :email_notifications,
|
config :pleroma, :email_notifications,
|
||||||
digest: %{
|
digest: %{
|
||||||
active: false,
|
active: false,
|
||||||
schedule: "0 0 * * 0",
|
|
||||||
interval: 7,
|
interval: 7,
|
||||||
inactivity_threshold: 7
|
inactivity_threshold: 7
|
||||||
}
|
}
|
||||||
|
@ -586,8 +577,7 @@
|
||||||
config :pleroma, :oauth2,
|
config :pleroma, :oauth2,
|
||||||
token_expires_in: 600,
|
token_expires_in: 600,
|
||||||
issue_new_refresh_token: true,
|
issue_new_refresh_token: true,
|
||||||
clean_expired_tokens: false,
|
clean_expired_tokens: false
|
||||||
clean_expired_tokens_interval: 86_400_000
|
|
||||||
|
|
||||||
config :pleroma, :database, rum_enabled: false
|
config :pleroma, :database, rum_enabled: false
|
||||||
|
|
||||||
|
@ -622,7 +612,8 @@
|
||||||
|
|
||||||
config :pleroma, configurable_from_database: false
|
config :pleroma, configurable_from_database: false
|
||||||
|
|
||||||
config :swarm, node_blacklist: [~r/myhtml_.*$/]
|
config :floki, :html_parser, Floki.HTMLParser.FastHtml
|
||||||
|
|
||||||
# Import environment specific config. This must remain at the bottom
|
# Import environment specific config. This must remain at the bottom
|
||||||
# of this file so it overrides the configuration defined above.
|
# of this file so it overrides the configuration defined above.
|
||||||
import_config "#{Mix.env()}.exs"
|
import_config "#{Mix.env()}.exs"
|
||||||
|
|
|
@ -2519,13 +2519,6 @@
|
||||||
key: :clean_expired_tokens,
|
key: :clean_expired_tokens,
|
||||||
type: :boolean,
|
type: :boolean,
|
||||||
description: "Enable a background job to clean expired oauth tokens. Default: `false`."
|
description: "Enable a background job to clean expired oauth tokens. Default: `false`."
|
||||||
},
|
|
||||||
%{
|
|
||||||
key: :clean_expired_tokens_interval,
|
|
||||||
type: :integer,
|
|
||||||
description:
|
|
||||||
"Interval to run the job to clean expired tokens. Default: 86_400_000 (24 hours).",
|
|
||||||
suggestions: [86_400_000]
|
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
|
|
|
@ -66,11 +66,8 @@
|
||||||
|
|
||||||
config :pleroma, Oban,
|
config :pleroma, Oban,
|
||||||
queues: false,
|
queues: false,
|
||||||
prune: :disabled
|
prune: :disabled,
|
||||||
|
crontab: false
|
||||||
config :pleroma, Pleroma.Scheduler,
|
|
||||||
jobs: [],
|
|
||||||
global: false
|
|
||||||
|
|
||||||
config :pleroma, Pleroma.ScheduledActivity,
|
config :pleroma, Pleroma.ScheduledActivity,
|
||||||
daily_user_limit: 2,
|
daily_user_limit: 2,
|
||||||
|
|
|
@ -29,7 +29,7 @@ Has these additional fields under the `pleroma` object:
|
||||||
- `spoiler_text`: a map consisting of alternate representations of the `spoiler_text` property with the key being it's mimetype. Currently the only alternate representation supported is `text/plain`
|
- `spoiler_text`: a map consisting of alternate representations of the `spoiler_text` property with the key being it's mimetype. Currently the only alternate representation supported is `text/plain`
|
||||||
- `expires_at`: a datetime (iso8601) that states when the post will expire (be deleted automatically), or empty if the post won't expire
|
- `expires_at`: a datetime (iso8601) that states when the post will expire (be deleted automatically), or empty if the post won't expire
|
||||||
- `thread_muted`: true if the thread the post belongs to is muted
|
- `thread_muted`: true if the thread the post belongs to is muted
|
||||||
- `emoji_reactions`: A list with emoji / reaction maps. The format is `{emoji: "☕", count: 1, reacted: true}`. Contains no information about the reacting users, for that use the `emoji_reactions_by` endpoint.
|
- `emoji_reactions`: A list with emoji / reaction maps. The format is `{name: "☕", count: 1, me: true}`. Contains no information about the reacting users, for that use the `/statuses/:id/reactions` endpoint.
|
||||||
|
|
||||||
## Attachments
|
## Attachments
|
||||||
|
|
||||||
|
@ -88,6 +88,9 @@ Behavior has changed:
|
||||||
|
|
||||||
- `/api/v1/accounts/search`: Does not require authentication
|
- `/api/v1/accounts/search`: Does not require authentication
|
||||||
|
|
||||||
|
## Search (global)
|
||||||
|
|
||||||
|
Unlisted posts are available in search results, they are considered to be public posts that shouldn't be shown in local/federated timeline.
|
||||||
|
|
||||||
## Notifications
|
## Notifications
|
||||||
|
|
||||||
|
|
|
@ -432,21 +432,21 @@ The status posting endpoint takes an additional parameter, `in_reply_to_conversa
|
||||||
|
|
||||||
Emoji reactions work a lot like favourites do. They make it possible to react to a post with a single emoji character.
|
Emoji reactions work a lot like favourites do. They make it possible to react to a post with a single emoji character.
|
||||||
|
|
||||||
## `POST /api/v1/pleroma/statuses/:id/react_with_emoji`
|
## `PUT /api/v1/pleroma/statuses/:id/reactions/:emoji`
|
||||||
### React to a post with a unicode emoji
|
### React to a post with a unicode emoji
|
||||||
* Method: `POST`
|
* Method: `PUT`
|
||||||
* Authentication: required
|
* Authentication: required
|
||||||
* Params: `emoji`: A single character unicode emoji
|
* Params: `emoji`: A single character unicode emoji
|
||||||
* Response: JSON, the status.
|
* Response: JSON, the status.
|
||||||
|
|
||||||
## `POST /api/v1/pleroma/statuses/:id/unreact_with_emoji`
|
## `DELETE /api/v1/pleroma/statuses/:id/reactions/:emoji`
|
||||||
### Remove a reaction to a post with a unicode emoji
|
### Remove a reaction to a post with a unicode emoji
|
||||||
* Method: `POST`
|
* Method: `DELETE`
|
||||||
* Authentication: required
|
* Authentication: required
|
||||||
* Params: `emoji`: A single character unicode emoji
|
* Params: `emoji`: A single character unicode emoji
|
||||||
* Response: JSON, the status.
|
* Response: JSON, the status.
|
||||||
|
|
||||||
## `GET /api/v1/pleroma/statuses/:id/emoji_reactions_by`
|
## `GET /api/v1/pleroma/statuses/:id/reactions`
|
||||||
### Get an object of emoji to account mappings with accounts that reacted to the post
|
### Get an object of emoji to account mappings with accounts that reacted to the post
|
||||||
* Method: `GET`
|
* Method: `GET`
|
||||||
* Authentication: optional
|
* Authentication: optional
|
||||||
|
@ -455,7 +455,7 @@ Emoji reactions work a lot like favourites do. They make it possible to react to
|
||||||
* Example Response:
|
* Example Response:
|
||||||
```json
|
```json
|
||||||
[
|
[
|
||||||
{"emoji": "😀", "count": 2, "reacted": true, "accounts": [{"id" => "xyz.."...}, {"id" => "zyx..."}]},
|
{"name": "😀", "count": 2, "me": true, "accounts": [{"id" => "xyz.."...}, {"id" => "zyx..."}]},
|
||||||
{"emoji": "☕", "count": 1, "reacted": false, "accounts": [{"id" => "abc..."}]}
|
{"name": "☕", "count": 1, "me": false, "accounts": [{"id" => "abc..."}]}
|
||||||
]
|
]
|
||||||
```
|
```
|
||||||
|
|
|
@ -1,17 +1,35 @@
|
||||||
# Backup/Restore your instance
|
# Backup/Restore/Move/Remove your instance
|
||||||
|
|
||||||
## Backup
|
## Backup
|
||||||
|
|
||||||
1. Stop the Pleroma service.
|
1. Stop the Pleroma service.
|
||||||
2. Go to the working directory of Pleroma (default is `/opt/pleroma`)
|
2. Go to the working directory of Pleroma (default is `/opt/pleroma`)
|
||||||
3. Run `sudo -Hu postgres pg_dump -d <pleroma_db> --format=custom -f </path/to/backup_location/pleroma.pgdump>`
|
3. Run `sudo -Hu postgres pg_dump -d <pleroma_db> --format=custom -f </path/to/backup_location/pleroma.pgdump>` (make sure the postgres user has write access to the destination file)
|
||||||
4. Copy `pleroma.pgdump`, `config/prod.secret.exs` and the `uploads` folder to your backup destination. If you have other modifications, copy those changes too.
|
4. Copy `pleroma.pgdump`, `config/prod.secret.exs` and the `uploads` folder to your backup destination. If you have other modifications, copy those changes too.
|
||||||
5. Restart the Pleroma service.
|
5. Restart the Pleroma service.
|
||||||
|
|
||||||
## Restore
|
## Restore/Move
|
||||||
|
|
||||||
1. Stop the Pleroma service.
|
1. Optionally reinstall Pleroma (either on the same server or on another server if you want to move servers). Try to use the same database name.
|
||||||
2. Go to the working directory of Pleroma (default is `/opt/pleroma`)
|
2. Stop the Pleroma service.
|
||||||
3. Copy the above mentioned files back to their original position.
|
3. Go to the working directory of Pleroma (default is `/opt/pleroma`)
|
||||||
4. Run `sudo -Hu postgres pg_restore -d <pleroma_db> -v -1 </path/to/backup_location/pleroma.pgdump>`
|
4. Copy the above mentioned files back to their original position.
|
||||||
5. Restart the Pleroma service.
|
5. Drop the existing database and recreate an empty one `sudo -Hu postgres psql -c 'DROP DATABASE <pleroma_db>;';` `sudo -Hu postgres psql -c 'CREATE DATABASE <pleroma_db>;';`
|
||||||
|
6. Run `sudo -Hu postgres pg_restore -d <pleroma_db> -v -1 </path/to/backup_location/pleroma.pgdump>`
|
||||||
|
7. If you installed a newer Pleroma version, you should run `mix ecto.migrate`[^1]. This task performs database migrations, if there were any.
|
||||||
|
8. Restart the Pleroma service.
|
||||||
|
|
||||||
|
[^1]: Prefix with `MIX_ENV=prod` to run it using the production config file.
|
||||||
|
|
||||||
|
## Remove
|
||||||
|
|
||||||
|
1. Optionally you can remove the users of your instance. This will trigger delete requests for their accounts and posts. Note that this is 'best effort' and doesn't mean that all traces of your instance will be gone from the fediverse.
|
||||||
|
* You can do this from the admin-FE where you can select all local users and delete the accounts using the *Moderate multiple users* dropdown.
|
||||||
|
* You can also list local users and delete them individualy using the CLI tasks for [Managing users](./CLI_tasks/user.md).
|
||||||
|
2. Stop the Pleroma service `systemctl stop pleroma`
|
||||||
|
3. Disable pleroma from systemd `systemctl disable pleroma`
|
||||||
|
4. Remove the files and folders you created during installation (see installation guide). This includes the pleroma, nginx and systemd files and folders.
|
||||||
|
5. Reload nginx now that the configuration is removed `systemctl reload nginx`
|
||||||
|
6. Remove the database and database user `sudo -Hu postgres psql -c 'DROP DATABASE <pleroma_db>;';` `sudo -Hu postgres psql -c 'DROP USER <pleroma_db>;';`
|
||||||
|
7. Remove the system user `userdel pleroma`
|
||||||
|
8. Remove the dependencies that you don't need anymore (see installation guide). Make sure you don't remove packages that are still needed for other software that you have running!
|
||||||
|
|
|
@ -37,6 +37,11 @@ Feel free to contact us to be added to this list!
|
||||||
- Platforms: Android
|
- Platforms: Android
|
||||||
- Features: Streaming Ready, Moderation, Text Formatting
|
- Features: Streaming Ready, Moderation, Text Formatting
|
||||||
|
|
||||||
|
### Kyclos
|
||||||
|
- Source Code: <https://git.pleroma.social/pleroma/harbour-kyclos>
|
||||||
|
- Platforms: SailfishOS
|
||||||
|
- Features: No Streaming
|
||||||
|
|
||||||
### Nekonium
|
### Nekonium
|
||||||
- Homepage: [F-Droid Repository](https://repo.gdgd.jp.net/), [Google Play](https://play.google.com/store/apps/details?id=com.apps.nekonium), [Amazon](https://www.amazon.co.jp/dp/B076FXPRBC/)
|
- Homepage: [F-Droid Repository](https://repo.gdgd.jp.net/), [Google Play](https://play.google.com/store/apps/details?id=com.apps.nekonium), [Amazon](https://www.amazon.co.jp/dp/B076FXPRBC/)
|
||||||
- Source: <https://gogs.gdgd.jp.net/lin/nekonium>
|
- Source: <https://gogs.gdgd.jp.net/lin/nekonium>
|
||||||
|
|
|
@ -513,6 +513,7 @@ Configuration options described in [Oban readme](https://github.com/sorentwo/oba
|
||||||
* `verbose` - logs verbosity
|
* `verbose` - logs verbosity
|
||||||
* `prune` - non-retryable jobs [pruning settings](https://github.com/sorentwo/oban#pruning) (`:disabled` / `{:maxlen, value}` / `{:maxage, value}`)
|
* `prune` - non-retryable jobs [pruning settings](https://github.com/sorentwo/oban#pruning) (`:disabled` / `{:maxlen, value}` / `{:maxage, value}`)
|
||||||
* `queues` - job queues (see below)
|
* `queues` - job queues (see below)
|
||||||
|
* `crontab` - periodic jobs, see [`Oban.Cron`](#obancron)
|
||||||
|
|
||||||
Pleroma has the following queues:
|
Pleroma has the following queues:
|
||||||
|
|
||||||
|
@ -524,6 +525,12 @@ Pleroma has the following queues:
|
||||||
* `web_push` - Web push notifications
|
* `web_push` - Web push notifications
|
||||||
* `scheduled_activities` - Scheduled activities, see [`Pleroma.ScheduledActivity`](#pleromascheduledactivity)
|
* `scheduled_activities` - Scheduled activities, see [`Pleroma.ScheduledActivity`](#pleromascheduledactivity)
|
||||||
|
|
||||||
|
#### Oban.Cron
|
||||||
|
|
||||||
|
Pleroma has these periodic job workers:
|
||||||
|
|
||||||
|
`Pleroma.Workers.Cron.ClearOauthTokenWorker` - a job worker to cleanup expired oauth tokens.
|
||||||
|
|
||||||
Example:
|
Example:
|
||||||
|
|
||||||
```elixir
|
```elixir
|
||||||
|
@ -534,6 +541,9 @@ config :pleroma, Oban,
|
||||||
queues: [
|
queues: [
|
||||||
federator_incoming: 50,
|
federator_incoming: 50,
|
||||||
federator_outgoing: 50
|
federator_outgoing: 50
|
||||||
|
],
|
||||||
|
crontab: [
|
||||||
|
{"0 0 * * *", Pleroma.Workers.Cron.ClearOauthTokenWorker}
|
||||||
]
|
]
|
||||||
```
|
```
|
||||||
|
|
||||||
|
@ -816,8 +826,7 @@ Configure OAuth 2 provider capabilities:
|
||||||
|
|
||||||
* `token_expires_in` - The lifetime in seconds of the access token.
|
* `token_expires_in` - The lifetime in seconds of the access token.
|
||||||
* `issue_new_refresh_token` - Keeps old refresh token or generate new refresh token when to obtain an access token.
|
* `issue_new_refresh_token` - Keeps old refresh token or generate new refresh token when to obtain an access token.
|
||||||
* `clean_expired_tokens` - Enable a background job to clean expired oauth tokens. Defaults to `false`.
|
* `clean_expired_tokens` - Enable a background job to clean expired oauth tokens. Defaults to `false`. Interval settings sets in configuration periodic jobs [`Oban.Cron`](#obancron)
|
||||||
* `clean_expired_tokens_interval` - Interval to run the job to clean expired tokens. Defaults to `86_400_000` (24 hours).
|
|
||||||
|
|
||||||
## Link parsing
|
## Link parsing
|
||||||
|
|
||||||
|
|
|
@ -42,12 +42,9 @@ def start(_type, _args) do
|
||||||
children =
|
children =
|
||||||
[
|
[
|
||||||
Pleroma.Repo,
|
Pleroma.Repo,
|
||||||
Pleroma.Scheduler,
|
|
||||||
Pleroma.Config.TransferTask,
|
Pleroma.Config.TransferTask,
|
||||||
Pleroma.Emoji,
|
Pleroma.Emoji,
|
||||||
Pleroma.Captcha,
|
Pleroma.Captcha,
|
||||||
Pleroma.Daemons.ScheduledActivityDaemon,
|
|
||||||
Pleroma.Daemons.ActivityExpirationDaemon,
|
|
||||||
Pleroma.Plugs.RateLimiter.Supervisor
|
Pleroma.Plugs.RateLimiter.Supervisor
|
||||||
] ++
|
] ++
|
||||||
cachex_children() ++
|
cachex_children() ++
|
||||||
|
@ -58,7 +55,6 @@ def start(_type, _args) do
|
||||||
{Oban, Pleroma.Config.get(Oban)}
|
{Oban, Pleroma.Config.get(Oban)}
|
||||||
] ++
|
] ++
|
||||||
task_children(@env) ++
|
task_children(@env) ++
|
||||||
oauth_cleanup_child(oauth_cleanup_enabled?()) ++
|
|
||||||
streamer_child(@env) ++
|
streamer_child(@env) ++
|
||||||
chat_child(@env, chat_enabled?()) ++
|
chat_child(@env, chat_enabled?()) ++
|
||||||
[
|
[
|
||||||
|
@ -160,20 +156,12 @@ defp build_cachex(type, opts),
|
||||||
|
|
||||||
defp chat_enabled?, do: Pleroma.Config.get([:chat, :enabled])
|
defp chat_enabled?, do: Pleroma.Config.get([:chat, :enabled])
|
||||||
|
|
||||||
defp oauth_cleanup_enabled?,
|
|
||||||
do: Pleroma.Config.get([:oauth2, :clean_expired_tokens], false)
|
|
||||||
|
|
||||||
defp streamer_child(:test), do: []
|
defp streamer_child(:test), do: []
|
||||||
|
|
||||||
defp streamer_child(_) do
|
defp streamer_child(_) do
|
||||||
[Pleroma.Web.Streamer.supervisor()]
|
[Pleroma.Web.Streamer.supervisor()]
|
||||||
end
|
end
|
||||||
|
|
||||||
defp oauth_cleanup_child(true),
|
|
||||||
do: [Pleroma.Web.OAuth.Token.CleanWorker]
|
|
||||||
|
|
||||||
defp oauth_cleanup_child(_), do: []
|
|
||||||
|
|
||||||
defp chat_child(_env, true) do
|
defp chat_child(_env, true) do
|
||||||
[Pleroma.Web.ChatChannel.ChatChannelState]
|
[Pleroma.Web.ChatChannel.ChatChannelState]
|
||||||
end
|
end
|
||||||
|
|
|
@ -1,66 +0,0 @@
|
||||||
# Pleroma: A lightweight social networking server
|
|
||||||
# Copyright © 2019 Pleroma Authors <https://pleroma.social/>
|
|
||||||
# SPDX-License-Identifier: AGPL-3.0-only
|
|
||||||
|
|
||||||
defmodule Pleroma.Daemons.ActivityExpirationDaemon do
|
|
||||||
alias Pleroma.Activity
|
|
||||||
alias Pleroma.ActivityExpiration
|
|
||||||
alias Pleroma.Config
|
|
||||||
alias Pleroma.Repo
|
|
||||||
alias Pleroma.User
|
|
||||||
alias Pleroma.Web.CommonAPI
|
|
||||||
|
|
||||||
require Logger
|
|
||||||
use GenServer
|
|
||||||
import Ecto.Query
|
|
||||||
|
|
||||||
@schedule_interval :timer.minutes(1)
|
|
||||||
|
|
||||||
def start_link(_) do
|
|
||||||
GenServer.start_link(__MODULE__, nil)
|
|
||||||
end
|
|
||||||
|
|
||||||
@impl true
|
|
||||||
def init(_) do
|
|
||||||
if Config.get([ActivityExpiration, :enabled]) do
|
|
||||||
schedule_next()
|
|
||||||
{:ok, nil}
|
|
||||||
else
|
|
||||||
:ignore
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
def perform(:execute, expiration_id) do
|
|
||||||
try do
|
|
||||||
expiration =
|
|
||||||
ActivityExpiration
|
|
||||||
|> where([e], e.id == ^expiration_id)
|
|
||||||
|> Repo.one!()
|
|
||||||
|
|
||||||
activity = Activity.get_by_id_with_object(expiration.activity_id)
|
|
||||||
user = User.get_by_ap_id(activity.object.data["actor"])
|
|
||||||
CommonAPI.delete(activity.id, user)
|
|
||||||
rescue
|
|
||||||
error ->
|
|
||||||
Logger.error("#{__MODULE__} Couldn't delete expired activity: #{inspect(error)}")
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
@impl true
|
|
||||||
def handle_info(:perform, state) do
|
|
||||||
ActivityExpiration.due_expirations(@schedule_interval)
|
|
||||||
|> Enum.each(fn expiration ->
|
|
||||||
Pleroma.Workers.ActivityExpirationWorker.enqueue(
|
|
||||||
"activity_expiration",
|
|
||||||
%{"activity_expiration_id" => expiration.id}
|
|
||||||
)
|
|
||||||
end)
|
|
||||||
|
|
||||||
schedule_next()
|
|
||||||
{:noreply, state}
|
|
||||||
end
|
|
||||||
|
|
||||||
defp schedule_next do
|
|
||||||
Process.send_after(self(), :perform, @schedule_interval)
|
|
||||||
end
|
|
||||||
end
|
|
|
@ -1,42 +0,0 @@
|
||||||
# Pleroma: A lightweight social networking server
|
|
||||||
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
|
||||||
# SPDX-License-Identifier: AGPL-3.0-only
|
|
||||||
|
|
||||||
defmodule Pleroma.Daemons.DigestEmailDaemon do
|
|
||||||
alias Pleroma.Repo
|
|
||||||
alias Pleroma.Workers.DigestEmailsWorker
|
|
||||||
|
|
||||||
import Ecto.Query
|
|
||||||
|
|
||||||
def perform do
|
|
||||||
config = Pleroma.Config.get([:email_notifications, :digest])
|
|
||||||
negative_interval = -Map.fetch!(config, :interval)
|
|
||||||
inactivity_threshold = Map.fetch!(config, :inactivity_threshold)
|
|
||||||
inactive_users_query = Pleroma.User.list_inactive_users_query(inactivity_threshold)
|
|
||||||
|
|
||||||
now = NaiveDateTime.truncate(NaiveDateTime.utc_now(), :second)
|
|
||||||
|
|
||||||
from(u in inactive_users_query,
|
|
||||||
where: fragment(~s(? ->'digest' @> 'true'), u.email_notifications),
|
|
||||||
where: u.last_digest_emailed_at < datetime_add(^now, ^negative_interval, "day"),
|
|
||||||
select: u
|
|
||||||
)
|
|
||||||
|> Repo.all()
|
|
||||||
|> Enum.each(fn user ->
|
|
||||||
DigestEmailsWorker.enqueue("digest_email", %{"user_id" => user.id})
|
|
||||||
end)
|
|
||||||
end
|
|
||||||
|
|
||||||
@doc """
|
|
||||||
Send digest email to the given user.
|
|
||||||
Updates `last_digest_emailed_at` field for the user and returns the updated user.
|
|
||||||
"""
|
|
||||||
@spec perform(Pleroma.User.t()) :: Pleroma.User.t()
|
|
||||||
def perform(user) do
|
|
||||||
with %Swoosh.Email{} = email <- Pleroma.Emails.UserEmail.digest_email(user) do
|
|
||||||
Pleroma.Emails.Mailer.deliver_async(email)
|
|
||||||
end
|
|
||||||
|
|
||||||
Pleroma.User.touch_last_digest_emailed_at(user)
|
|
||||||
end
|
|
||||||
end
|
|
|
@ -1,62 +0,0 @@
|
||||||
# Pleroma: A lightweight social networking server
|
|
||||||
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
|
||||||
# SPDX-License-Identifier: AGPL-3.0-only
|
|
||||||
|
|
||||||
defmodule Pleroma.Daemons.ScheduledActivityDaemon do
|
|
||||||
@moduledoc """
|
|
||||||
Sends scheduled activities to the job queue.
|
|
||||||
"""
|
|
||||||
|
|
||||||
alias Pleroma.Config
|
|
||||||
alias Pleroma.ScheduledActivity
|
|
||||||
alias Pleroma.User
|
|
||||||
alias Pleroma.Web.CommonAPI
|
|
||||||
|
|
||||||
use GenServer
|
|
||||||
require Logger
|
|
||||||
|
|
||||||
@schedule_interval :timer.minutes(1)
|
|
||||||
|
|
||||||
def start_link(_) do
|
|
||||||
GenServer.start_link(__MODULE__, nil)
|
|
||||||
end
|
|
||||||
|
|
||||||
def init(_) do
|
|
||||||
if Config.get([ScheduledActivity, :enabled]) do
|
|
||||||
schedule_next()
|
|
||||||
{:ok, nil}
|
|
||||||
else
|
|
||||||
:ignore
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
def perform(:execute, scheduled_activity_id) do
|
|
||||||
try do
|
|
||||||
{:ok, scheduled_activity} = ScheduledActivity.delete(scheduled_activity_id)
|
|
||||||
%User{} = user = User.get_cached_by_id(scheduled_activity.user_id)
|
|
||||||
{:ok, _result} = CommonAPI.post(user, scheduled_activity.params)
|
|
||||||
rescue
|
|
||||||
error ->
|
|
||||||
Logger.error(
|
|
||||||
"#{__MODULE__} Couldn't create a status from the scheduled activity: #{inspect(error)}"
|
|
||||||
)
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
def handle_info(:perform, state) do
|
|
||||||
ScheduledActivity.due_activities(@schedule_interval)
|
|
||||||
|> Enum.each(fn scheduled_activity ->
|
|
||||||
Pleroma.Workers.ScheduledActivityWorker.enqueue(
|
|
||||||
"execute",
|
|
||||||
%{"activity_id" => scheduled_activity.id}
|
|
||||||
)
|
|
||||||
end)
|
|
||||||
|
|
||||||
schedule_next()
|
|
||||||
{:noreply, state}
|
|
||||||
end
|
|
||||||
|
|
||||||
defp schedule_next do
|
|
||||||
Process.send_after(self(), :perform, @schedule_interval)
|
|
||||||
end
|
|
||||||
end
|
|
|
@ -108,6 +108,7 @@ def extract_first_external_url(object, content) do
|
||||||
Cachex.fetch!(:scrubber_cache, key, fn _key ->
|
Cachex.fetch!(:scrubber_cache, key, fn _key ->
|
||||||
result =
|
result =
|
||||||
content
|
content
|
||||||
|
|> Floki.parse_fragment!()
|
||||||
|> Floki.filter_out("a.mention,a.hashtag,a[rel~=\"tag\"]")
|
|> Floki.filter_out("a.mention,a.hashtag,a[rel~=\"tag\"]")
|
||||||
|> Floki.attribute("a", "href")
|
|> Floki.attribute("a", "href")
|
||||||
|> Enum.at(0)
|
|> Enum.at(0)
|
||||||
|
|
|
@ -5,15 +5,19 @@
|
||||||
defmodule Pleroma.ScheduledActivity do
|
defmodule Pleroma.ScheduledActivity do
|
||||||
use Ecto.Schema
|
use Ecto.Schema
|
||||||
|
|
||||||
|
alias Ecto.Multi
|
||||||
alias Pleroma.Config
|
alias Pleroma.Config
|
||||||
alias Pleroma.Repo
|
alias Pleroma.Repo
|
||||||
alias Pleroma.ScheduledActivity
|
alias Pleroma.ScheduledActivity
|
||||||
alias Pleroma.User
|
alias Pleroma.User
|
||||||
alias Pleroma.Web.CommonAPI.Utils
|
alias Pleroma.Web.CommonAPI.Utils
|
||||||
|
alias Pleroma.Workers.ScheduledActivityWorker
|
||||||
|
|
||||||
import Ecto.Query
|
import Ecto.Query
|
||||||
import Ecto.Changeset
|
import Ecto.Changeset
|
||||||
|
|
||||||
|
@type t :: %__MODULE__{}
|
||||||
|
|
||||||
@min_offset :timer.minutes(5)
|
@min_offset :timer.minutes(5)
|
||||||
|
|
||||||
schema "scheduled_activities" do
|
schema "scheduled_activities" do
|
||||||
|
@ -105,16 +109,32 @@ def far_enough?(scheduled_at) do
|
||||||
end
|
end
|
||||||
|
|
||||||
def new(%User{} = user, attrs) do
|
def new(%User{} = user, attrs) do
|
||||||
%ScheduledActivity{user_id: user.id}
|
changeset(%ScheduledActivity{user_id: user.id}, attrs)
|
||||||
|> changeset(attrs)
|
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@doc """
|
||||||
|
Creates ScheduledActivity and add to queue to perform at scheduled_at date
|
||||||
|
"""
|
||||||
|
@spec create(User.t(), map()) :: {:ok, ScheduledActivity.t()} | {:error, Ecto.Changeset.t()}
|
||||||
def create(%User{} = user, attrs) do
|
def create(%User{} = user, attrs) do
|
||||||
user
|
Multi.new()
|
||||||
|> new(attrs)
|
|> Multi.insert(:scheduled_activity, new(user, attrs))
|
||||||
|> Repo.insert()
|
|> maybe_add_jobs(Config.get([ScheduledActivity, :enabled]))
|
||||||
|
|> Repo.transaction()
|
||||||
|
|> transaction_response
|
||||||
end
|
end
|
||||||
|
|
||||||
|
defp maybe_add_jobs(multi, true) do
|
||||||
|
multi
|
||||||
|
|> Multi.run(:scheduled_activity_job, fn _repo, %{scheduled_activity: activity} ->
|
||||||
|
%{activity_id: activity.id}
|
||||||
|
|> ScheduledActivityWorker.new(scheduled_at: activity.scheduled_at)
|
||||||
|
|> Oban.insert()
|
||||||
|
end)
|
||||||
|
end
|
||||||
|
|
||||||
|
defp maybe_add_jobs(multi, _), do: multi
|
||||||
|
|
||||||
def get(%User{} = user, scheduled_activity_id) do
|
def get(%User{} = user, scheduled_activity_id) do
|
||||||
ScheduledActivity
|
ScheduledActivity
|
||||||
|> where(user_id: ^user.id)
|
|> where(user_id: ^user.id)
|
||||||
|
@ -122,25 +142,43 @@ def get(%User{} = user, scheduled_activity_id) do
|
||||||
|> Repo.one()
|
|> Repo.one()
|
||||||
end
|
end
|
||||||
|
|
||||||
def update(%ScheduledActivity{} = scheduled_activity, attrs) do
|
@spec update(ScheduledActivity.t(), map()) ::
|
||||||
scheduled_activity
|
{:ok, ScheduledActivity.t()} | {:error, Ecto.Changeset.t()}
|
||||||
|> update_changeset(attrs)
|
def update(%ScheduledActivity{id: id} = scheduled_activity, attrs) do
|
||||||
|> Repo.update()
|
with {:error, %Ecto.Changeset{valid?: true} = changeset} <-
|
||||||
|
{:error, update_changeset(scheduled_activity, attrs)} do
|
||||||
|
Multi.new()
|
||||||
|
|> Multi.update(:scheduled_activity, changeset)
|
||||||
|
|> Multi.update_all(:scheduled_job, job_query(id),
|
||||||
|
set: [scheduled_at: get_field(changeset, :scheduled_at)]
|
||||||
|
)
|
||||||
|
|> Repo.transaction()
|
||||||
|
|> transaction_response
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def delete(%ScheduledActivity{} = scheduled_activity) do
|
@doc "Deletes a ScheduledActivity and linked jobs."
|
||||||
scheduled_activity
|
@spec delete(ScheduledActivity.t() | binary() | integer) ::
|
||||||
|> Repo.delete()
|
{:ok, ScheduledActivity.t()} | {:error, Ecto.Changeset.t()}
|
||||||
|
def delete(%ScheduledActivity{id: id} = scheduled_activity) do
|
||||||
|
Multi.new()
|
||||||
|
|> Multi.delete(:scheduled_activity, scheduled_activity, stale_error_field: :id)
|
||||||
|
|> Multi.delete_all(:jobs, job_query(id))
|
||||||
|
|> Repo.transaction()
|
||||||
|
|> transaction_response
|
||||||
end
|
end
|
||||||
|
|
||||||
def delete(id) when is_binary(id) or is_integer(id) do
|
def delete(id) when is_binary(id) or is_integer(id) do
|
||||||
ScheduledActivity
|
delete(%__MODULE__{id: id})
|
||||||
|> where(id: ^id)
|
end
|
||||||
|> select([sa], sa)
|
|
||||||
|> Repo.delete_all()
|
defp transaction_response(result) do
|
||||||
|> case do
|
case result do
|
||||||
{1, [scheduled_activity]} -> {:ok, scheduled_activity}
|
{:ok, %{scheduled_activity: scheduled_activity}} ->
|
||||||
_ -> :error
|
{:ok, scheduled_activity}
|
||||||
|
|
||||||
|
{:error, _, changeset, _} ->
|
||||||
|
{:error, changeset}
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -158,4 +196,11 @@ def due_activities(offset \\ 0) do
|
||||||
|> where([sa], sa.scheduled_at < ^naive_datetime)
|
|> where([sa], sa.scheduled_at < ^naive_datetime)
|
||||||
|> Repo.all()
|
|> Repo.all()
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def job_query(scheduled_activity_id) do
|
||||||
|
from(j in Oban.Job,
|
||||||
|
where: j.queue == "scheduled_activities",
|
||||||
|
where: fragment("args ->> 'activity_id' = ?::text", ^to_string(scheduled_activity_id))
|
||||||
|
)
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -1,7 +0,0 @@
|
||||||
# Pleroma: A lightweight social networking server
|
|
||||||
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
|
||||||
# SPDX-License-Identifier: AGPL-3.0-only
|
|
||||||
|
|
||||||
defmodule Pleroma.Scheduler do
|
|
||||||
use Quantum.Scheduler, otp_app: :pleroma
|
|
||||||
end
|
|
|
@ -9,22 +9,43 @@ defmodule Pleroma.Stats do
|
||||||
|
|
||||||
use GenServer
|
use GenServer
|
||||||
|
|
||||||
@interval 1000 * 60 * 60
|
@init_state %{
|
||||||
|
peers: [],
|
||||||
|
stats: %{
|
||||||
|
domain_count: 0,
|
||||||
|
status_count: 0,
|
||||||
|
user_count: 0
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
def start_link(_) do
|
def start_link(_) do
|
||||||
GenServer.start_link(__MODULE__, initial_data(), name: __MODULE__)
|
GenServer.start_link(
|
||||||
|
__MODULE__,
|
||||||
|
@init_state,
|
||||||
|
name: __MODULE__
|
||||||
|
)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@doc "Performs update stats"
|
||||||
def force_update do
|
def force_update do
|
||||||
GenServer.call(__MODULE__, :force_update)
|
GenServer.call(__MODULE__, :force_update)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@doc "Performs collect stats"
|
||||||
|
def do_collect do
|
||||||
|
GenServer.cast(__MODULE__, :run_update)
|
||||||
|
end
|
||||||
|
|
||||||
|
@doc "Returns stats data"
|
||||||
|
@spec get_stats() :: %{domain_count: integer(), status_count: integer(), user_count: integer()}
|
||||||
def get_stats do
|
def get_stats do
|
||||||
%{stats: stats} = GenServer.call(__MODULE__, :get_state)
|
%{stats: stats} = GenServer.call(__MODULE__, :get_state)
|
||||||
|
|
||||||
stats
|
stats
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@doc "Returns list peers"
|
||||||
|
@spec get_peers() :: list(String.t())
|
||||||
def get_peers do
|
def get_peers do
|
||||||
%{peers: peers} = GenServer.call(__MODULE__, :get_state)
|
%{peers: peers} = GenServer.call(__MODULE__, :get_state)
|
||||||
|
|
||||||
|
@ -32,7 +53,6 @@ def get_peers do
|
||||||
end
|
end
|
||||||
|
|
||||||
def init(args) do
|
def init(args) do
|
||||||
Process.send(self(), :run_update, [])
|
|
||||||
{:ok, args}
|
{:ok, args}
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -45,17 +65,12 @@ def handle_call(:get_state, _from, state) do
|
||||||
{:reply, state, state}
|
{:reply, state, state}
|
||||||
end
|
end
|
||||||
|
|
||||||
def handle_info(:run_update, _state) do
|
def handle_cast(:run_update, _state) do
|
||||||
new_stats = get_stat_data()
|
new_stats = get_stat_data()
|
||||||
|
|
||||||
Process.send_after(self(), :run_update, @interval)
|
|
||||||
{:noreply, new_stats}
|
{:noreply, new_stats}
|
||||||
end
|
end
|
||||||
|
|
||||||
defp initial_data do
|
|
||||||
%{peers: [], stats: %{}}
|
|
||||||
end
|
|
||||||
|
|
||||||
defp get_stat_data do
|
defp get_stat_data do
|
||||||
peers =
|
peers =
|
||||||
from(
|
from(
|
||||||
|
@ -74,7 +89,11 @@ defp get_stat_data do
|
||||||
|
|
||||||
%{
|
%{
|
||||||
peers: peers,
|
peers: peers,
|
||||||
stats: %{domain_count: domain_count, status_count: status_count, user_count: user_count}
|
stats: %{
|
||||||
|
domain_count: domain_count,
|
||||||
|
status_count: status_count,
|
||||||
|
user_count: user_count
|
||||||
|
}
|
||||||
}
|
}
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -17,6 +17,7 @@ defp old_user?(%User{} = u) do
|
||||||
# does the post contain links?
|
# does the post contain links?
|
||||||
defp contains_links?(%{"content" => content} = _object) do
|
defp contains_links?(%{"content" => content} = _object) do
|
||||||
content
|
content
|
||||||
|
|> Floki.parse_fragment!()
|
||||||
|> Floki.filter_out("a.mention,a.hashtag,a[rel~=\"tag\"],a.zrl")
|
|> Floki.filter_out("a.mention,a.hashtag,a[rel~=\"tag\"],a.zrl")
|
||||||
|> Floki.attribute("a", "href")
|
|> Floki.attribute("a", "href")
|
||||||
|> length() > 0
|
|> length() > 0
|
||||||
|
|
|
@ -124,15 +124,18 @@ def create(
|
||||||
) do
|
) do
|
||||||
params = Map.put(params, "in_reply_to_status_id", params["in_reply_to_id"])
|
params = Map.put(params, "in_reply_to_status_id", params["in_reply_to_id"])
|
||||||
|
|
||||||
if ScheduledActivity.far_enough?(scheduled_at) do
|
with {:far_enough, true} <- {:far_enough, ScheduledActivity.far_enough?(scheduled_at)},
|
||||||
with {:ok, scheduled_activity} <-
|
attrs <- %{"params" => params, "scheduled_at" => scheduled_at},
|
||||||
ScheduledActivity.create(user, %{"params" => params, "scheduled_at" => scheduled_at}) do
|
{:ok, scheduled_activity} <- ScheduledActivity.create(user, attrs) do
|
||||||
conn
|
conn
|
||||||
|> put_view(ScheduledActivityView)
|
|> put_view(ScheduledActivityView)
|
||||||
|> render("show.json", scheduled_activity: scheduled_activity)
|
|> render("show.json", scheduled_activity: scheduled_activity)
|
||||||
end
|
|
||||||
else
|
else
|
||||||
create(conn, Map.drop(params, ["scheduled_at"]))
|
{:far_enough, _} ->
|
||||||
|
create(conn, Map.drop(params, ["scheduled_at"]))
|
||||||
|
|
||||||
|
error ->
|
||||||
|
error
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
|
|
@ -242,9 +242,9 @@ def render("show.json", %{activity: %{data: %{"object" => _object}} = activity}
|
||||||
with %{data: %{"reactions" => emoji_reactions}} <- object do
|
with %{data: %{"reactions" => emoji_reactions}} <- object do
|
||||||
Enum.map(emoji_reactions, fn [emoji, users] ->
|
Enum.map(emoji_reactions, fn [emoji, users] ->
|
||||||
%{
|
%{
|
||||||
emoji: emoji,
|
name: emoji,
|
||||||
count: length(users),
|
count: length(users),
|
||||||
reacted: !!(opts[:for] && opts[:for].ap_id in users)
|
me: !!(opts[:for] && opts[:for].ap_id in users)
|
||||||
}
|
}
|
||||||
end)
|
end)
|
||||||
else
|
else
|
||||||
|
|
|
@ -8,8 +8,10 @@ defmodule Pleroma.Web.Metadata.Providers.RelMe do
|
||||||
|
|
||||||
@impl Provider
|
@impl Provider
|
||||||
def build_tags(%{user: user}) do
|
def build_tags(%{user: user}) do
|
||||||
(Floki.attribute(user.bio, "link[rel~=me]", "href") ++
|
bio_tree = Floki.parse_fragment!(user.bio)
|
||||||
Floki.attribute(user.bio, "a[rel~=me]", "href"))
|
|
||||||
|
(Floki.attribute(bio_tree, "link[rel~=me]", "href") ++
|
||||||
|
Floki.attribute(bio_tree, "a[rel~=me]", "href"))
|
||||||
|> Enum.map(fn link ->
|
|> Enum.map(fn link ->
|
||||||
{:link, [rel: "me", href: link], []}
|
{:link, [rel: "me", href: link], []}
|
||||||
end)
|
end)
|
||||||
|
|
|
@ -1,34 +0,0 @@
|
||||||
# Pleroma: A lightweight social networking server
|
|
||||||
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
|
||||||
# SPDX-License-Identifier: AGPL-3.0-only
|
|
||||||
|
|
||||||
defmodule Pleroma.Web.OAuth.Token.CleanWorker do
|
|
||||||
@moduledoc """
|
|
||||||
The module represents functions to clean an expired oauth tokens.
|
|
||||||
"""
|
|
||||||
use GenServer
|
|
||||||
|
|
||||||
@ten_seconds 10_000
|
|
||||||
@one_day 86_400_000
|
|
||||||
|
|
||||||
alias Pleroma.Web.OAuth.Token
|
|
||||||
alias Pleroma.Workers.BackgroundWorker
|
|
||||||
|
|
||||||
def start_link(_), do: GenServer.start_link(__MODULE__, %{})
|
|
||||||
|
|
||||||
def init(_) do
|
|
||||||
Process.send_after(self(), :perform, @ten_seconds)
|
|
||||||
{:ok, nil}
|
|
||||||
end
|
|
||||||
|
|
||||||
@doc false
|
|
||||||
def handle_info(:perform, state) do
|
|
||||||
BackgroundWorker.enqueue("clean_expired_tokens", %{})
|
|
||||||
interval = Pleroma.Config.get([:oauth2, :clean_expired_tokens_interval], @one_day)
|
|
||||||
|
|
||||||
Process.send_after(self(), :perform, interval)
|
|
||||||
{:noreply, state}
|
|
||||||
end
|
|
||||||
|
|
||||||
def perform(:clean), do: Token.delete_expired_tokens()
|
|
||||||
end
|
|
|
@ -53,10 +53,10 @@ def emoji_reactions_by(%{assigns: %{user: user}} = conn, %{"id" => activity_id})
|
||||||
|> Enum.filter(& &1)
|
|> Enum.filter(& &1)
|
||||||
|
|
||||||
%{
|
%{
|
||||||
emoji: emoji,
|
name: emoji,
|
||||||
count: length(users),
|
count: length(users),
|
||||||
accounts: AccountView.render("index.json", %{users: users, for: user, as: :user}),
|
accounts: AccountView.render("index.json", %{users: users, for: user, as: :user}),
|
||||||
reacted: !!(user && user.ap_id in user_ap_ids)
|
me: !!(user && user.ap_id in user_ap_ids)
|
||||||
}
|
}
|
||||||
end)
|
end)
|
||||||
|
|
||||||
|
|
|
@ -27,9 +27,10 @@ def parse(_), do: {:error, "No URL provided"}
|
||||||
defp parse_url(url) do
|
defp parse_url(url) do
|
||||||
with {:ok, %Tesla.Env{body: html, status: status}} when status in 200..299 <-
|
with {:ok, %Tesla.Env{body: html, status: status}} when status in 200..299 <-
|
||||||
Pleroma.HTTP.get(url, [], adapter: @hackney_options),
|
Pleroma.HTTP.get(url, [], adapter: @hackney_options),
|
||||||
|
{:ok, html_tree} <- Floki.parse_document(html),
|
||||||
data <-
|
data <-
|
||||||
Floki.attribute(html, "link[rel~=me]", "href") ++
|
Floki.attribute(html_tree, "link[rel~=me]", "href") ++
|
||||||
Floki.attribute(html, "a[rel~=me]", "href") do
|
Floki.attribute(html_tree, "a[rel~=me]", "href") do
|
||||||
{:ok, data}
|
{:ok, data}
|
||||||
end
|
end
|
||||||
rescue
|
rescue
|
||||||
|
|
|
@ -81,18 +81,18 @@ defp parse_url(url) do
|
||||||
{:ok, %Tesla.Env{body: html}} = Pleroma.HTTP.get(url, [], adapter: @hackney_options)
|
{:ok, %Tesla.Env{body: html}} = Pleroma.HTTP.get(url, [], adapter: @hackney_options)
|
||||||
|
|
||||||
html
|
html
|
||||||
|> parse_html
|
|> parse_html()
|
||||||
|> maybe_parse()
|
|> maybe_parse()
|
||||||
|> Map.put(:url, url)
|
|> Map.put(:url, url)
|
||||||
|> clean_parsed_data()
|
|> clean_parsed_data()
|
||||||
|> check_parsed_data()
|
|> check_parsed_data()
|
||||||
rescue
|
rescue
|
||||||
e ->
|
e ->
|
||||||
{:error, "Parsing error: #{inspect(e)}"}
|
{:error, "Parsing error: #{inspect(e)} #{inspect(__STACKTRACE__)}"}
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
defp parse_html(html), do: Floki.parse(html)
|
defp parse_html(html), do: Floki.parse_document!(html)
|
||||||
|
|
||||||
defp maybe_parse(html) do
|
defp maybe_parse(html) do
|
||||||
Enum.reduce_while(parsers(), %{}, fn parser, acc ->
|
Enum.reduce_while(parsers(), %{}, fn parser, acc ->
|
||||||
|
|
|
@ -271,7 +271,7 @@ defmodule Pleroma.Web.Router do
|
||||||
scope "/api/v1/pleroma", Pleroma.Web.PleromaAPI do
|
scope "/api/v1/pleroma", Pleroma.Web.PleromaAPI do
|
||||||
pipe_through(:api)
|
pipe_through(:api)
|
||||||
|
|
||||||
get("/statuses/:id/emoji_reactions_by", PleromaAPIController, :emoji_reactions_by)
|
get("/statuses/:id/reactions", PleromaAPIController, :emoji_reactions_by)
|
||||||
end
|
end
|
||||||
|
|
||||||
scope "/api/v1/pleroma", Pleroma.Web.PleromaAPI do
|
scope "/api/v1/pleroma", Pleroma.Web.PleromaAPI do
|
||||||
|
@ -287,8 +287,8 @@ defmodule Pleroma.Web.Router do
|
||||||
pipe_through(:authenticated_api)
|
pipe_through(:authenticated_api)
|
||||||
|
|
||||||
patch("/conversations/:id", PleromaAPIController, :update_conversation)
|
patch("/conversations/:id", PleromaAPIController, :update_conversation)
|
||||||
post("/statuses/:id/react_with_emoji", PleromaAPIController, :react_with_emoji)
|
put("/statuses/:id/reactions/:emoji", PleromaAPIController, :react_with_emoji)
|
||||||
post("/statuses/:id/unreact_with_emoji", PleromaAPIController, :unreact_with_emoji)
|
delete("/statuses/:id/reactions/:emoji", PleromaAPIController, :unreact_with_emoji)
|
||||||
post("/notifications/read", PleromaAPIController, :read_notification)
|
post("/notifications/read", PleromaAPIController, :read_notification)
|
||||||
|
|
||||||
patch("/accounts/update_avatar", AccountController, :update_avatar)
|
patch("/accounts/update_avatar", AccountController, :update_avatar)
|
||||||
|
|
|
@ -1,18 +0,0 @@
|
||||||
# Pleroma: A lightweight social networking server
|
|
||||||
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
|
||||||
# SPDX-License-Identifier: AGPL-3.0-only
|
|
||||||
|
|
||||||
defmodule Pleroma.Workers.ActivityExpirationWorker do
|
|
||||||
use Pleroma.Workers.WorkerHelper, queue: "activity_expiration"
|
|
||||||
|
|
||||||
@impl Oban.Worker
|
|
||||||
def perform(
|
|
||||||
%{
|
|
||||||
"op" => "activity_expiration",
|
|
||||||
"activity_expiration_id" => activity_expiration_id
|
|
||||||
},
|
|
||||||
_job
|
|
||||||
) do
|
|
||||||
Pleroma.Daemons.ActivityExpirationDaemon.perform(:execute, activity_expiration_id)
|
|
||||||
end
|
|
||||||
end
|
|
|
@ -6,7 +6,6 @@ defmodule Pleroma.Workers.BackgroundWorker do
|
||||||
alias Pleroma.Activity
|
alias Pleroma.Activity
|
||||||
alias Pleroma.User
|
alias Pleroma.User
|
||||||
alias Pleroma.Web.ActivityPub.MRF.MediaProxyWarmingPolicy
|
alias Pleroma.Web.ActivityPub.MRF.MediaProxyWarmingPolicy
|
||||||
alias Pleroma.Web.OAuth.Token.CleanWorker
|
|
||||||
|
|
||||||
use Pleroma.Workers.WorkerHelper, queue: "background"
|
use Pleroma.Workers.WorkerHelper, queue: "background"
|
||||||
|
|
||||||
|
@ -55,10 +54,6 @@ def perform(
|
||||||
User.perform(:follow_import, follower, followed_identifiers)
|
User.perform(:follow_import, follower, followed_identifiers)
|
||||||
end
|
end
|
||||||
|
|
||||||
def perform(%{"op" => "clean_expired_tokens"}, _job) do
|
|
||||||
CleanWorker.perform(:clean)
|
|
||||||
end
|
|
||||||
|
|
||||||
def perform(%{"op" => "media_proxy_preload", "message" => message}, _job) do
|
def perform(%{"op" => "media_proxy_preload", "message" => message}, _job) do
|
||||||
MediaProxyWarmingPolicy.perform(:preload, message)
|
MediaProxyWarmingPolicy.perform(:preload, message)
|
||||||
end
|
end
|
||||||
|
|
21
lib/pleroma/workers/cron/clear_oauth_token_worker.ex
Normal file
21
lib/pleroma/workers/cron/clear_oauth_token_worker.ex
Normal file
|
@ -0,0 +1,21 @@
|
||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
defmodule Pleroma.Workers.Cron.ClearOauthTokenWorker do
|
||||||
|
@moduledoc """
|
||||||
|
The worker to cleanup expired oAuth tokens.
|
||||||
|
"""
|
||||||
|
|
||||||
|
use Oban.Worker, queue: "background"
|
||||||
|
|
||||||
|
alias Pleroma.Config
|
||||||
|
alias Pleroma.Web.OAuth.Token
|
||||||
|
|
||||||
|
@impl Oban.Worker
|
||||||
|
def perform(_opts, _job) do
|
||||||
|
if Config.get([:oauth2, :clean_expired_tokens], false) do
|
||||||
|
Token.delete_expired_tokens()
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
58
lib/pleroma/workers/cron/digest_emails_worker.ex
Normal file
58
lib/pleroma/workers/cron/digest_emails_worker.ex
Normal file
|
@ -0,0 +1,58 @@
|
||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
defmodule Pleroma.Workers.Cron.DigestEmailsWorker do
|
||||||
|
@moduledoc """
|
||||||
|
The worker to send digest emails.
|
||||||
|
"""
|
||||||
|
|
||||||
|
use Oban.Worker, queue: "digest_emails"
|
||||||
|
|
||||||
|
alias Pleroma.Config
|
||||||
|
alias Pleroma.Emails
|
||||||
|
alias Pleroma.Repo
|
||||||
|
alias Pleroma.User
|
||||||
|
|
||||||
|
import Ecto.Query
|
||||||
|
|
||||||
|
require Logger
|
||||||
|
|
||||||
|
@impl Oban.Worker
|
||||||
|
def perform(_opts, _job) do
|
||||||
|
config = Config.get([:email_notifications, :digest])
|
||||||
|
|
||||||
|
if config[:active] do
|
||||||
|
negative_interval = -Map.fetch!(config, :interval)
|
||||||
|
inactivity_threshold = Map.fetch!(config, :inactivity_threshold)
|
||||||
|
inactive_users_query = User.list_inactive_users_query(inactivity_threshold)
|
||||||
|
|
||||||
|
now = NaiveDateTime.truncate(NaiveDateTime.utc_now(), :second)
|
||||||
|
|
||||||
|
from(u in inactive_users_query,
|
||||||
|
where: fragment(~s(? ->'digest' @> 'true'), u.email_notifications),
|
||||||
|
where: u.last_digest_emailed_at < datetime_add(^now, ^negative_interval, "day"),
|
||||||
|
select: u
|
||||||
|
)
|
||||||
|
|> Repo.all()
|
||||||
|
|> send_emails
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def send_emails(users) do
|
||||||
|
Enum.each(users, &send_email/1)
|
||||||
|
end
|
||||||
|
|
||||||
|
@doc """
|
||||||
|
Send digest email to the given user.
|
||||||
|
Updates `last_digest_emailed_at` field for the user and returns the updated user.
|
||||||
|
"""
|
||||||
|
@spec send_email(User.t()) :: User.t()
|
||||||
|
def send_email(user) do
|
||||||
|
with %Swoosh.Email{} = email <- Emails.UserEmail.digest_email(user) do
|
||||||
|
Emails.Mailer.deliver_async(email)
|
||||||
|
end
|
||||||
|
|
||||||
|
User.touch_last_digest_emailed_at(user)
|
||||||
|
end
|
||||||
|
end
|
46
lib/pleroma/workers/cron/purge_expired_activities_worker.ex
Normal file
46
lib/pleroma/workers/cron/purge_expired_activities_worker.ex
Normal file
|
@ -0,0 +1,46 @@
|
||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
defmodule Pleroma.Workers.Cron.PurgeExpiredActivitiesWorker do
|
||||||
|
@moduledoc """
|
||||||
|
The worker to purge expired activities.
|
||||||
|
"""
|
||||||
|
|
||||||
|
use Oban.Worker, queue: "activity_expiration"
|
||||||
|
|
||||||
|
alias Pleroma.Activity
|
||||||
|
alias Pleroma.ActivityExpiration
|
||||||
|
alias Pleroma.Config
|
||||||
|
alias Pleroma.User
|
||||||
|
alias Pleroma.Web.CommonAPI
|
||||||
|
|
||||||
|
require Logger
|
||||||
|
|
||||||
|
@interval :timer.minutes(1)
|
||||||
|
|
||||||
|
@impl Oban.Worker
|
||||||
|
def perform(_opts, _job) do
|
||||||
|
if Config.get([ActivityExpiration, :enabled]) do
|
||||||
|
Enum.each(ActivityExpiration.due_expirations(@interval), &delete_activity/1)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def delete_activity(%ActivityExpiration{activity_id: activity_id}) do
|
||||||
|
with {:activity, %Activity{} = activity} <-
|
||||||
|
{:activity, Activity.get_by_id_with_object(activity_id)},
|
||||||
|
{:user, %User{} = user} <- {:user, User.get_by_ap_id(activity.object.data["actor"])} do
|
||||||
|
CommonAPI.delete(activity.id, user)
|
||||||
|
else
|
||||||
|
{:activity, _} ->
|
||||||
|
Logger.error(
|
||||||
|
"#{__MODULE__} Couldn't delete expired activity: not found activity ##{activity_id}"
|
||||||
|
)
|
||||||
|
|
||||||
|
{:user, _} ->
|
||||||
|
Logger.error(
|
||||||
|
"#{__MODULE__} Couldn't delete expired activity: not found actorof ##{activity_id}"
|
||||||
|
)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
16
lib/pleroma/workers/cron/stats_worker.ex
Normal file
16
lib/pleroma/workers/cron/stats_worker.ex
Normal file
|
@ -0,0 +1,16 @@
|
||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
defmodule Pleroma.Workers.Cron.StatsWorker do
|
||||||
|
@moduledoc """
|
||||||
|
The worker to update peers statistics.
|
||||||
|
"""
|
||||||
|
|
||||||
|
use Oban.Worker, queue: "background"
|
||||||
|
|
||||||
|
@impl Oban.Worker
|
||||||
|
def perform(_opts, _job) do
|
||||||
|
Pleroma.Stats.do_collect()
|
||||||
|
end
|
||||||
|
end
|
|
@ -1,16 +0,0 @@
|
||||||
# Pleroma: A lightweight social networking server
|
|
||||||
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
|
||||||
# SPDX-License-Identifier: AGPL-3.0-only
|
|
||||||
|
|
||||||
defmodule Pleroma.Workers.DigestEmailsWorker do
|
|
||||||
alias Pleroma.User
|
|
||||||
|
|
||||||
use Pleroma.Workers.WorkerHelper, queue: "digest_emails"
|
|
||||||
|
|
||||||
@impl Oban.Worker
|
|
||||||
def perform(%{"op" => "digest_email", "user_id" => user_id}, _job) do
|
|
||||||
user_id
|
|
||||||
|> User.get_cached_by_id()
|
|
||||||
|> Pleroma.Daemons.DigestEmailDaemon.perform()
|
|
||||||
end
|
|
||||||
end
|
|
|
@ -3,10 +3,42 @@
|
||||||
# SPDX-License-Identifier: AGPL-3.0-only
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
defmodule Pleroma.Workers.ScheduledActivityWorker do
|
defmodule Pleroma.Workers.ScheduledActivityWorker do
|
||||||
|
@moduledoc """
|
||||||
|
The worker to post scheduled activity.
|
||||||
|
"""
|
||||||
|
|
||||||
use Pleroma.Workers.WorkerHelper, queue: "scheduled_activities"
|
use Pleroma.Workers.WorkerHelper, queue: "scheduled_activities"
|
||||||
|
|
||||||
|
alias Pleroma.Config
|
||||||
|
alias Pleroma.ScheduledActivity
|
||||||
|
alias Pleroma.User
|
||||||
|
alias Pleroma.Web.CommonAPI
|
||||||
|
|
||||||
|
require Logger
|
||||||
|
|
||||||
@impl Oban.Worker
|
@impl Oban.Worker
|
||||||
def perform(%{"op" => "execute", "activity_id" => activity_id}, _job) do
|
def perform(%{"activity_id" => activity_id}, _job) do
|
||||||
Pleroma.Daemons.ScheduledActivityDaemon.perform(:execute, activity_id)
|
if Config.get([ScheduledActivity, :enabled]) do
|
||||||
|
case Pleroma.Repo.get(ScheduledActivity, activity_id) do
|
||||||
|
%ScheduledActivity{} = scheduled_activity ->
|
||||||
|
post_activity(scheduled_activity)
|
||||||
|
|
||||||
|
_ ->
|
||||||
|
Logger.error("#{__MODULE__} Couldn't find scheduled activity: #{activity_id}")
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
defp post_activity(%ScheduledActivity{user_id: user_id, params: params} = scheduled_activity) do
|
||||||
|
with {:delete, {:ok, _}} <- {:delete, ScheduledActivity.delete(scheduled_activity)},
|
||||||
|
{:user, %User{} = user} <- {:user, User.get_cached_by_id(user_id)},
|
||||||
|
{:post, {:ok, _}} <- {:post, CommonAPI.post(user, params)} do
|
||||||
|
:ok
|
||||||
|
else
|
||||||
|
error ->
|
||||||
|
Logger.error(
|
||||||
|
"#{__MODULE__} Couldn't create a status from the scheduled activity: #{inspect(error)}"
|
||||||
|
)
|
||||||
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
9
mix.exs
9
mix.exs
|
@ -63,7 +63,7 @@ def copy_nginx_config(%{path: target_path} = release) do
|
||||||
def application do
|
def application do
|
||||||
[
|
[
|
||||||
mod: {Pleroma.Application, []},
|
mod: {Pleroma.Application, []},
|
||||||
extra_applications: [:logger, :runtime_tools, :comeonin, :quack, :fast_sanitize, :swarm],
|
extra_applications: [:logger, :runtime_tools, :comeonin, :quack, :fast_sanitize],
|
||||||
included_applications: [:ex_syslogger]
|
included_applications: [:ex_syslogger]
|
||||||
]
|
]
|
||||||
end
|
end
|
||||||
|
@ -108,8 +108,7 @@ defp deps do
|
||||||
{:ecto_enum, "~> 1.4"},
|
{:ecto_enum, "~> 1.4"},
|
||||||
{:ecto_sql, "~> 3.3.2"},
|
{:ecto_sql, "~> 3.3.2"},
|
||||||
{:postgrex, ">= 0.13.5"},
|
{:postgrex, ">= 0.13.5"},
|
||||||
{:oban, "~> 0.12.0"},
|
{:oban, "~> 0.12.1"},
|
||||||
{:quantum, "~> 2.3"},
|
|
||||||
{:gettext, "~> 0.15"},
|
{:gettext, "~> 0.15"},
|
||||||
{:comeonin, "~> 4.1.1"},
|
{:comeonin, "~> 4.1.1"},
|
||||||
{:pbkdf2_elixir, "~> 0.12.3"},
|
{:pbkdf2_elixir, "~> 0.12.3"},
|
||||||
|
@ -140,7 +139,7 @@ defp deps do
|
||||||
{:phoenix_swoosh, "~> 0.2"},
|
{:phoenix_swoosh, "~> 0.2"},
|
||||||
{:gen_smtp, "~> 0.13"},
|
{:gen_smtp, "~> 0.13"},
|
||||||
{:websocket_client, git: "https://github.com/jeremyong/websocket_client.git", only: :test},
|
{:websocket_client, git: "https://github.com/jeremyong/websocket_client.git", only: :test},
|
||||||
{:floki, "~> 0.23.0"},
|
{:floki, "~> 0.25"},
|
||||||
{:ex_syslogger, github: "slashmili/ex_syslogger", tag: "1.4.0"},
|
{:ex_syslogger, github: "slashmili/ex_syslogger", tag: "1.4.0"},
|
||||||
{:timex, "~> 3.5"},
|
{:timex, "~> 3.5"},
|
||||||
{:ueberauth, "~> 0.4"},
|
{:ueberauth, "~> 0.4"},
|
||||||
|
@ -163,7 +162,7 @@ defp deps do
|
||||||
{:esshd, "~> 0.1.0", runtime: Application.get_env(:esshd, :enabled, false)},
|
{:esshd, "~> 0.1.0", runtime: Application.get_env(:esshd, :enabled, false)},
|
||||||
{:ex_const, "~> 0.2"},
|
{:ex_const, "~> 0.2"},
|
||||||
{:plug_static_index_html, "~> 1.0.0"},
|
{:plug_static_index_html, "~> 1.0.0"},
|
||||||
{:excoveralls, "~> 0.11.1", only: :test},
|
{:excoveralls, "~> 0.12.1", only: :test},
|
||||||
{:flake_id, "~> 0.1.0"},
|
{:flake_id, "~> 0.1.0"},
|
||||||
{:remote_ip,
|
{:remote_ip,
|
||||||
git: "https://git.pleroma.social/pleroma/remote_ip.git",
|
git: "https://git.pleroma.social/pleroma/remote_ip.git",
|
||||||
|
|
12
mix.lock
12
mix.lock
|
@ -36,17 +36,17 @@
|
||||||
"ex_doc": {:hex, :ex_doc, "0.21.2", "caca5bc28ed7b3bdc0b662f8afe2bee1eedb5c3cf7b322feeeb7c6ebbde089d6", [:mix], [{:earmark, "~> 1.3.3 or ~> 1.4", [hex: :earmark, repo: "hexpm", optional: false]}, {:makeup_elixir, "~> 0.14", [hex: :makeup_elixir, repo: "hexpm", optional: false]}], "hexpm"},
|
"ex_doc": {:hex, :ex_doc, "0.21.2", "caca5bc28ed7b3bdc0b662f8afe2bee1eedb5c3cf7b322feeeb7c6ebbde089d6", [:mix], [{:earmark, "~> 1.3.3 or ~> 1.4", [hex: :earmark, repo: "hexpm", optional: false]}, {:makeup_elixir, "~> 0.14", [hex: :makeup_elixir, repo: "hexpm", optional: false]}], "hexpm"},
|
||||||
"ex_machina": {:hex, :ex_machina, "2.3.0", "92a5ad0a8b10ea6314b876a99c8c9e3f25f4dde71a2a835845b136b9adaf199a", [:mix], [{:ecto, "~> 2.2 or ~> 3.0", [hex: :ecto, repo: "hexpm", optional: true]}, {:ecto_sql, "~> 3.0", [hex: :ecto_sql, repo: "hexpm", optional: true]}], "hexpm"},
|
"ex_machina": {:hex, :ex_machina, "2.3.0", "92a5ad0a8b10ea6314b876a99c8c9e3f25f4dde71a2a835845b136b9adaf199a", [:mix], [{:ecto, "~> 2.2 or ~> 3.0", [hex: :ecto, repo: "hexpm", optional: true]}, {:ecto_sql, "~> 3.0", [hex: :ecto_sql, repo: "hexpm", optional: true]}], "hexpm"},
|
||||||
"ex_syslogger": {:git, "https://github.com/slashmili/ex_syslogger.git", "f3963399047af17e038897c69e20d552e6899e1d", [tag: "1.4.0"]},
|
"ex_syslogger": {:git, "https://github.com/slashmili/ex_syslogger.git", "f3963399047af17e038897c69e20d552e6899e1d", [tag: "1.4.0"]},
|
||||||
"excoveralls": {:hex, :excoveralls, "0.11.2", "0c6f2c8db7683b0caa9d490fb8125709c54580b4255ffa7ad35f3264b075a643", [:mix], [{:hackney, "~> 1.0", [hex: :hackney, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}], "hexpm"},
|
"excoveralls": {:hex, :excoveralls, "0.12.1", "a553c59f6850d0aff3770e4729515762ba7c8e41eedde03208182a8dc9d0ce07", [:mix], [{:hackney, "~> 1.0", [hex: :hackney, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}], "hexpm"},
|
||||||
"fast_html": {:hex, :fast_html, "1.0.1", "5bc7df4dc4607ec2c314c16414e4111d79a209956c4f5df96602d194c61197f9", [:make, :mix], [], "hexpm"},
|
"fast_html": {:hex, :fast_html, "1.0.2", "b2a32022741699421e90762ce904cacb4faf12c10129acc3674262dd7fa5d2b6", [:make, :mix], [], "hexpm"},
|
||||||
"fast_sanitize": {:hex, :fast_sanitize, "0.1.6", "60a5ae96879956dea409a91a77f5dd2994c24cc10f80eefd8f9892ee4c0c7b25", [:mix], [{:fast_html, "~> 1.0", [hex: :fast_html, repo: "hexpm", optional: false]}, {:plug, "~> 1.8", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm"},
|
"fast_sanitize": {:hex, :fast_sanitize, "0.1.7", "2a7cd8734c88a2de6de55022104f8a3b87f1fdbe8bbf131d9049764b53d50d0d", [:mix], [{:fast_html, "~> 1.0", [hex: :fast_html, repo: "hexpm", optional: false]}, {:plug, "~> 1.8", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm"},
|
||||||
"flake_id": {:hex, :flake_id, "0.1.0", "7716b086d2e405d09b647121a166498a0d93d1a623bead243e1f74216079ccb3", [:mix], [{:base62, "~> 1.2", [hex: :base62, repo: "hexpm", optional: false]}, {:ecto, ">= 2.0.0", [hex: :ecto, repo: "hexpm", optional: true]}], "hexpm"},
|
"flake_id": {:hex, :flake_id, "0.1.0", "7716b086d2e405d09b647121a166498a0d93d1a623bead243e1f74216079ccb3", [:mix], [{:base62, "~> 1.2", [hex: :base62, repo: "hexpm", optional: false]}, {:ecto, ">= 2.0.0", [hex: :ecto, repo: "hexpm", optional: true]}], "hexpm"},
|
||||||
"floki": {:hex, :floki, "0.23.1", "e100306ce7d8841d70a559748e5091542e2cfc67ffb3ade92b89a8435034dab1", [:mix], [{:html_entities, "~> 0.5.0", [hex: :html_entities, repo: "hexpm", optional: false]}], "hexpm"},
|
"floki": {:hex, :floki, "0.25.0", "b1c9ddf5f32a3a90b43b76f3386ca054325dc2478af020e87b5111c19f2284ac", [:mix], [{:html_entities, "~> 0.5.0", [hex: :html_entities, repo: "hexpm", optional: false]}], "hexpm"},
|
||||||
"gen_smtp": {:hex, :gen_smtp, "0.15.0", "9f51960c17769b26833b50df0b96123605a8024738b62db747fece14eb2fbfcc", [:rebar3], [], "hexpm"},
|
"gen_smtp": {:hex, :gen_smtp, "0.15.0", "9f51960c17769b26833b50df0b96123605a8024738b62db747fece14eb2fbfcc", [:rebar3], [], "hexpm"},
|
||||||
"gen_stage": {:hex, :gen_stage, "0.14.3", "d0c66f1c87faa301c1a85a809a3ee9097a4264b2edf7644bf5c123237ef732bf", [:mix], [], "hexpm"},
|
"gen_stage": {:hex, :gen_stage, "0.14.3", "d0c66f1c87faa301c1a85a809a3ee9097a4264b2edf7644bf5c123237ef732bf", [:mix], [], "hexpm"},
|
||||||
"gen_state_machine": {:hex, :gen_state_machine, "2.0.5", "9ac15ec6e66acac994cc442dcc2c6f9796cf380ec4b08267223014be1c728a95", [:mix], [], "hexpm"},
|
"gen_state_machine": {:hex, :gen_state_machine, "2.0.5", "9ac15ec6e66acac994cc442dcc2c6f9796cf380ec4b08267223014be1c728a95", [:mix], [], "hexpm"},
|
||||||
"gettext": {:hex, :gettext, "0.17.1", "8baab33482df4907b3eae22f719da492cee3981a26e649b9c2be1c0192616962", [:mix], [], "hexpm"},
|
"gettext": {:hex, :gettext, "0.17.1", "8baab33482df4907b3eae22f719da492cee3981a26e649b9c2be1c0192616962", [:mix], [], "hexpm"},
|
||||||
"hackney": {:hex, :hackney, "1.15.2", "07e33c794f8f8964ee86cebec1a8ed88db5070e52e904b8f12209773c1036085", [:rebar3], [{:certifi, "2.5.1", [hex: :certifi, repo: "hexpm", optional: false]}, {:idna, "6.0.0", [hex: :idna, repo: "hexpm", optional: false]}, {:metrics, "1.0.1", [hex: :metrics, repo: "hexpm", optional: false]}, {:mimerl, "~>1.1", [hex: :mimerl, repo: "hexpm", optional: false]}, {:ssl_verify_fun, "1.1.5", [hex: :ssl_verify_fun, repo: "hexpm", optional: false]}], "hexpm"},
|
"hackney": {:hex, :hackney, "1.15.2", "07e33c794f8f8964ee86cebec1a8ed88db5070e52e904b8f12209773c1036085", [:rebar3], [{:certifi, "2.5.1", [hex: :certifi, repo: "hexpm", optional: false]}, {:idna, "6.0.0", [hex: :idna, repo: "hexpm", optional: false]}, {:metrics, "1.0.1", [hex: :metrics, repo: "hexpm", optional: false]}, {:mimerl, "~>1.1", [hex: :mimerl, repo: "hexpm", optional: false]}, {:ssl_verify_fun, "1.1.5", [hex: :ssl_verify_fun, repo: "hexpm", optional: false]}], "hexpm"},
|
||||||
"html_entities": {:hex, :html_entities, "0.5.0", "40f5c5b9cbe23073b48a4e69c67b6c11974f623a76165e2b92d098c0e88ccb1d", [:mix], [], "hexpm"},
|
"html_entities": {:hex, :html_entities, "0.5.1", "1c9715058b42c35a2ab65edc5b36d0ea66dd083767bef6e3edb57870ef556549", [:mix], [], "hexpm"},
|
||||||
"html_sanitize_ex": {:hex, :html_sanitize_ex, "1.3.0", "f005ad692b717691203f940c686208aa3d8ffd9dd4bb3699240096a51fa9564e", [:mix], [{:mochiweb, "~> 2.15", [hex: :mochiweb, repo: "hexpm", optional: false]}], "hexpm"},
|
"html_sanitize_ex": {:hex, :html_sanitize_ex, "1.3.0", "f005ad692b717691203f940c686208aa3d8ffd9dd4bb3699240096a51fa9564e", [:mix], [{:mochiweb, "~> 2.15", [hex: :mochiweb, repo: "hexpm", optional: false]}], "hexpm"},
|
||||||
"http_signatures": {:git, "https://git.pleroma.social/pleroma/http_signatures.git", "293d77bb6f4a67ac8bde1428735c3b42f22cbb30", [ref: "293d77bb6f4a67ac8bde1428735c3b42f22cbb30"]},
|
"http_signatures": {:git, "https://git.pleroma.social/pleroma/http_signatures.git", "293d77bb6f4a67ac8bde1428735c3b42f22cbb30", [ref: "293d77bb6f4a67ac8bde1428735c3b42f22cbb30"]},
|
||||||
"httpoison": {:hex, :httpoison, "1.6.1", "2ce5bf6e535cd0ab02e905ba8c276580bab80052c5c549f53ddea52d72e81f33", [:mix], [{:hackney, "~> 1.15 and >= 1.15.2", [hex: :hackney, repo: "hexpm", optional: false]}], "hexpm"},
|
"httpoison": {:hex, :httpoison, "1.6.1", "2ce5bf6e535cd0ab02e905ba8c276580bab80052c5c549f53ddea52d72e81f33", [:mix], [{:hackney, "~> 1.15 and >= 1.15.2", [hex: :hackney, repo: "hexpm", optional: false]}], "hexpm"},
|
||||||
|
@ -90,12 +90,10 @@
|
||||||
"prometheus_phoenix": {:hex, :prometheus_phoenix, "1.3.0", "c4b527e0b3a9ef1af26bdcfbfad3998f37795b9185d475ca610fe4388fdd3bb5", [:mix], [{:phoenix, "~> 1.4", [hex: :phoenix, repo: "hexpm", optional: false]}, {:prometheus_ex, "~> 1.3 or ~> 2.0 or ~> 3.0", [hex: :prometheus_ex, repo: "hexpm", optional: false]}], "hexpm"},
|
"prometheus_phoenix": {:hex, :prometheus_phoenix, "1.3.0", "c4b527e0b3a9ef1af26bdcfbfad3998f37795b9185d475ca610fe4388fdd3bb5", [:mix], [{:phoenix, "~> 1.4", [hex: :phoenix, repo: "hexpm", optional: false]}, {:prometheus_ex, "~> 1.3 or ~> 2.0 or ~> 3.0", [hex: :prometheus_ex, repo: "hexpm", optional: false]}], "hexpm"},
|
||||||
"prometheus_plugs": {:hex, :prometheus_plugs, "1.1.5", "25933d48f8af3a5941dd7b621c889749894d8a1082a6ff7c67cc99dec26377c5", [:mix], [{:accept, "~> 0.1", [hex: :accept, repo: "hexpm", optional: false]}, {:plug, "~> 1.0", [hex: :plug, repo: "hexpm", optional: false]}, {:prometheus_ex, "~> 1.1 or ~> 2.0 or ~> 3.0", [hex: :prometheus_ex, repo: "hexpm", optional: false]}, {:prometheus_process_collector, "~> 1.1", [hex: :prometheus_process_collector, repo: "hexpm", optional: true]}], "hexpm"},
|
"prometheus_plugs": {:hex, :prometheus_plugs, "1.1.5", "25933d48f8af3a5941dd7b621c889749894d8a1082a6ff7c67cc99dec26377c5", [:mix], [{:accept, "~> 0.1", [hex: :accept, repo: "hexpm", optional: false]}, {:plug, "~> 1.0", [hex: :plug, repo: "hexpm", optional: false]}, {:prometheus_ex, "~> 1.1 or ~> 2.0 or ~> 3.0", [hex: :prometheus_ex, repo: "hexpm", optional: false]}, {:prometheus_process_collector, "~> 1.1", [hex: :prometheus_process_collector, repo: "hexpm", optional: true]}], "hexpm"},
|
||||||
"quack": {:hex, :quack, "0.1.1", "cca7b4da1a233757fdb44b3334fce80c94785b3ad5a602053b7a002b5a8967bf", [:mix], [{:poison, ">= 1.0.0", [hex: :poison, repo: "hexpm", optional: false]}, {:tesla, "~> 1.2.0", [hex: :tesla, repo: "hexpm", optional: false]}], "hexpm"},
|
"quack": {:hex, :quack, "0.1.1", "cca7b4da1a233757fdb44b3334fce80c94785b3ad5a602053b7a002b5a8967bf", [:mix], [{:poison, ">= 1.0.0", [hex: :poison, repo: "hexpm", optional: false]}, {:tesla, "~> 1.2.0", [hex: :tesla, repo: "hexpm", optional: false]}], "hexpm"},
|
||||||
"quantum": {:hex, :quantum, "2.3.4", "72a0e8855e2adc101459eac8454787cb74ab4169de6ca50f670e72142d4960e9", [:mix], [{:calendar, "~> 0.17", [hex: :calendar, repo: "hexpm", optional: true]}, {:crontab, "~> 1.1", [hex: :crontab, repo: "hexpm", optional: false]}, {:gen_stage, "~> 0.12", [hex: :gen_stage, repo: "hexpm", optional: false]}, {:swarm, "~> 3.3", [hex: :swarm, repo: "hexpm", optional: false]}, {:timex, "~> 3.1", [hex: :timex, repo: "hexpm", optional: true]}], "hexpm"},
|
|
||||||
"ranch": {:hex, :ranch, "1.7.1", "6b1fab51b49196860b733a49c07604465a47bdb78aa10c1c16a3d199f7f8c881", [:rebar3], [], "hexpm"},
|
"ranch": {:hex, :ranch, "1.7.1", "6b1fab51b49196860b733a49c07604465a47bdb78aa10c1c16a3d199f7f8c881", [:rebar3], [], "hexpm"},
|
||||||
"recon": {:git, "https://github.com/ferd/recon.git", "75d70c7c08926d2f24f1ee6de14ee50fe8a52763", [tag: "2.4.0"]},
|
"recon": {:git, "https://github.com/ferd/recon.git", "75d70c7c08926d2f24f1ee6de14ee50fe8a52763", [tag: "2.4.0"]},
|
||||||
"remote_ip": {:git, "https://git.pleroma.social/pleroma/remote_ip.git", "825dc00aaba5a1b7c4202a532b696b595dd3bcb3", [ref: "825dc00aaba5a1b7c4202a532b696b595dd3bcb3"]},
|
"remote_ip": {:git, "https://git.pleroma.social/pleroma/remote_ip.git", "825dc00aaba5a1b7c4202a532b696b595dd3bcb3", [ref: "825dc00aaba5a1b7c4202a532b696b595dd3bcb3"]},
|
||||||
"ssl_verify_fun": {:hex, :ssl_verify_fun, "1.1.5", "6eaf7ad16cb568bb01753dbbd7a95ff8b91c7979482b95f38443fe2c8852a79b", [:make, :mix, :rebar3], [], "hexpm"},
|
"ssl_verify_fun": {:hex, :ssl_verify_fun, "1.1.5", "6eaf7ad16cb568bb01753dbbd7a95ff8b91c7979482b95f38443fe2c8852a79b", [:make, :mix, :rebar3], [], "hexpm"},
|
||||||
"swarm": {:hex, :swarm, "3.4.0", "64f8b30055d74640d2186c66354b33b999438692a91be275bb89cdc7e401f448", [:mix], [{:gen_state_machine, "~> 2.0", [hex: :gen_state_machine, repo: "hexpm", optional: false]}, {:libring, "~> 1.0", [hex: :libring, repo: "hexpm", optional: false]}], "hexpm"},
|
|
||||||
"sweet_xml": {:hex, :sweet_xml, "0.6.6", "fc3e91ec5dd7c787b6195757fbcf0abc670cee1e4172687b45183032221b66b8", [:mix], [], "hexpm"},
|
"sweet_xml": {:hex, :sweet_xml, "0.6.6", "fc3e91ec5dd7c787b6195757fbcf0abc670cee1e4172687b45183032221b66b8", [:mix], [], "hexpm"},
|
||||||
"swoosh": {:hex, :swoosh, "0.23.5", "bfd9404bbf5069b1be2ffd317923ce57e58b332e25dbca2a35dedd7820dfee5a", [:mix], [{:cowboy, "~> 1.0.1 or ~> 1.1 or ~> 2.4", [hex: :cowboy, repo: "hexpm", optional: true]}, {:gen_smtp, "~> 0.13", [hex: :gen_smtp, repo: "hexpm", optional: true]}, {:hackney, "~> 1.9", [hex: :hackney, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}, {:mail, "~> 0.2", [hex: :mail, repo: "hexpm", optional: true]}, {:mime, "~> 1.1", [hex: :mime, repo: "hexpm", optional: false]}, {:plug_cowboy, ">= 1.0.0", [hex: :plug_cowboy, repo: "hexpm", optional: true]}], "hexpm"},
|
"swoosh": {:hex, :swoosh, "0.23.5", "bfd9404bbf5069b1be2ffd317923ce57e58b332e25dbca2a35dedd7820dfee5a", [:mix], [{:cowboy, "~> 1.0.1 or ~> 1.1 or ~> 2.4", [hex: :cowboy, repo: "hexpm", optional: true]}, {:gen_smtp, "~> 0.13", [hex: :gen_smtp, repo: "hexpm", optional: true]}, {:hackney, "~> 1.9", [hex: :hackney, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}, {:mail, "~> 0.2", [hex: :mail, repo: "hexpm", optional: true]}, {:mime, "~> 1.1", [hex: :mime, repo: "hexpm", optional: false]}, {:plug_cowboy, ">= 1.0.0", [hex: :plug_cowboy, repo: "hexpm", optional: true]}], "hexpm"},
|
||||||
"syslog": {:git, "https://github.com/Vagabond/erlang-syslog.git", "4a6c6f2c996483e86c1320e9553f91d337bcb6aa", [tag: "1.0.5"]},
|
"syslog": {:git, "https://github.com/Vagabond/erlang-syslog.git", "4a6c6f2c996483e86c1320e9553f91d337bcb6aa", [tag: "1.0.5"]},
|
||||||
|
|
|
@ -1 +1 @@
|
||||||
<!DOCTYPE html><html lang=en><head><meta charset=utf-8><meta name=viewport content="width=device-width,initial-scale=1,user-scalable=no"><title>Pleroma</title><!--server-generated-meta--><link rel=icon type=image/png href=/favicon.png><link href=/static/css/vendors~app.b2603a50868c68a1c192.css rel=stylesheet><link href=/static/css/app.ae04505b31bb0ee2765e.css rel=stylesheet><link href=/static/fontello.1581007281335.css rel=stylesheet></head><body class=hidden><noscript>To use Pleroma, please enable JavaScript.</noscript><div id=app></div><script type=text/javascript src=/static/js/vendors~app.c26cf2fc57e9c1975e8d.js></script><script type=text/javascript src=/static/js/app.0aac253187b2af873849.js></script></body></html>
|
<!DOCTYPE html><html lang=en><head><meta charset=utf-8><meta name=viewport content="width=device-width,initial-scale=1,user-scalable=no"><title>Pleroma</title><!--server-generated-meta--><link rel=icon type=image/png href=/favicon.png><link href=/static/css/vendors~app.b2603a50868c68a1c192.css rel=stylesheet><link href=/static/css/app.ae04505b31bb0ee2765e.css rel=stylesheet><link href=/static/fontello.1581425930672.css rel=stylesheet></head><body class=hidden><noscript>To use Pleroma, please enable JavaScript.</noscript><div id=app></div><script type=text/javascript src=/static/js/vendors~app.52ac194cbc427f97f06e.js></script><script type=text/javascript src=/static/js/app.f8af8a9b83e330e80903.js></script></body></html>
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
|
@ -76,6 +76,8 @@
|
||||||
|
|
||||||
<glyph glyph-name="arrow-curved" unicode="" d="M799 302l0-56 112 0-223-223-224 223 112 0 0 56q0 116-81 197t-197 82-198-82-82-197q0 162 115 276t276 114 276-114 114-276z" horiz-adv-x="928" />
|
<glyph glyph-name="arrow-curved" unicode="" d="M799 302l0-56 112 0-223-223-224 223 112 0 0 56q0 116-81 197t-197 82-198-82-82-197q0 162 115 276t276 114 276-114 114-276z" horiz-adv-x="928" />
|
||||||
|
|
||||||
|
<glyph glyph-name="link" unicode="" d="M813 178q0 23-16 38l-116 116q-16 16-38 16-24 0-40-18 1-1 10-10t12-12 9-11 7-14 2-15q0-23-16-38t-38-16q-8 0-15 2t-14 7-11 9-12 12-10 10q-19-17-19-40 0-23 16-38l115-116q15-15 38-15 22 0 38 15l82 81q16 16 16 37z m-393 394q0 22-15 38l-115 115q-16 16-38 16-22 0-38-15l-82-82q-16-15-16-37 0-22 16-38l116-116q15-15 38-15 23 0 40 17-2 2-11 11t-12 12-8 10-7 14-2 16q0 22 15 38t38 15q9 0 16-2t14-7 11-8 12-12 10-11q18 17 18 41z m500-394q0-66-48-113l-82-81q-46-47-113-47-68 0-114 48l-115 115q-46 47-46 114 0 68 49 116l-49 49q-48-49-116-49-67 0-114 47l-116 116q-47 47-47 114t47 113l82 82q47 46 114 46 67 0 114-47l115-116q46-46 46-113 0-69-49-117l49-49q48 49 116 49 67 0 114-47l116-116q47-47 47-114z" horiz-adv-x="928.6" />
|
||||||
|
|
||||||
<glyph glyph-name="spin3" unicode="" d="M494 857c-266 0-483-210-494-472-1-19 13-20 13-20l84 0c16 0 19 10 19 18 10 199 176 358 378 358 107 0 205-45 273-118l-58-57c-11-12-11-27 5-31l247-50c21-5 46 11 37 44l-58 227c-2 9-16 22-29 13l-65-60c-89 91-214 148-352 148z m409-508c-16 0-19-10-19-18-10-199-176-358-377-358-108 0-205 45-274 118l59 57c10 12 10 27-5 31l-248 50c-21 5-46-11-37-44l58-227c2-9 16-22 30-13l64 60c89-91 214-148 353-148 265 0 482 210 493 473 1 18-13 19-13 19l-84 0z" horiz-adv-x="1000" />
|
<glyph glyph-name="spin3" unicode="" d="M494 857c-266 0-483-210-494-472-1-19 13-20 13-20l84 0c16 0 19 10 19 18 10 199 176 358 378 358 107 0 205-45 273-118l-58-57c-11-12-11-27 5-31l247-50c21-5 46 11 37 44l-58 227c-2 9-16 22-29 13l-65-60c-89 91-214 148-352 148z m409-508c-16 0-19-10-19-18-10-199-176-358-377-358-108 0-205 45-274 118l59 57c10 12 10 27-5 31l-248 50c-21 5-46-11-37-44l58-227c2-9 16-22 30-13l64 60c89-91 214-148 353-148 265 0 482 210 493 473 1 18-13 19-13 19l-84 0z" horiz-adv-x="1000" />
|
||||||
|
|
||||||
<glyph glyph-name="spin4" unicode="" d="M498 857c-114 0-228-39-320-116l0 0c173 140 428 130 588-31 134-134 164-332 89-495-10-29-5-50 12-68 21-20 61-23 84 0 3 3 12 15 15 24 71 180 33 393-112 539-99 98-228 147-356 147z m-409-274c-14 0-29-5-39-16-3-3-13-15-15-24-71-180-34-393 112-539 185-185 479-195 676-31l0 0c-173-140-428-130-589 31-134 134-163 333-89 495 11 29 6 50-12 68-11 11-27 17-44 16z" horiz-adv-x="1001" />
|
<glyph glyph-name="spin4" unicode="" d="M498 857c-114 0-228-39-320-116l0 0c173 140 428 130 588-31 134-134 164-332 89-495-10-29-5-50 12-68 21-20 61-23 84 0 3 3 12 15 15 24 71 180 33 393-112 539-99 98-228 147-356 147z m-409-274c-14 0-29-5-39-16-3-3-13-15-15-24-71-180-34-393 112-539 185-185 479-195 676-31l0 0c-173-140-428-130-589 31-134 134-163 333-89 495 11 29 6 50-12 68-11 11-27 17-44 16z" horiz-adv-x="1001" />
|
Before Width: | Height: | Size: 24 KiB After Width: | Height: | Size: 25 KiB |
Binary file not shown.
BIN
priv/static/static/font/fontello.1581425930672.woff
Normal file
BIN
priv/static/static/font/fontello.1581425930672.woff
Normal file
Binary file not shown.
BIN
priv/static/static/font/fontello.1581425930672.woff2
Normal file
BIN
priv/static/static/font/fontello.1581425930672.woff2
Normal file
Binary file not shown.
BIN
priv/static/static/fontello.1581425930672.css
vendored
Normal file
BIN
priv/static/static/fontello.1581425930672.css
vendored
Normal file
Binary file not shown.
|
@ -339,6 +339,12 @@
|
||||||
"css": "arrow-curved",
|
"css": "arrow-curved",
|
||||||
"code": 59426,
|
"code": 59426,
|
||||||
"src": "iconic"
|
"src": "iconic"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"uid": "0ddd3e8201ccc7d41f7b7c9d27eca6c1",
|
||||||
|
"css": "link",
|
||||||
|
"code": 59427,
|
||||||
|
"src": "fontawesome"
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|
Binary file not shown.
Binary file not shown.
BIN
priv/static/static/js/app.f8af8a9b83e330e80903.js
Normal file
BIN
priv/static/static/js/app.f8af8a9b83e330e80903.js
Normal file
Binary file not shown.
BIN
priv/static/static/js/app.f8af8a9b83e330e80903.js.map
Normal file
BIN
priv/static/static/js/app.f8af8a9b83e330e80903.js.map
Normal file
Binary file not shown.
Binary file not shown.
BIN
priv/static/static/js/vendors~app.52ac194cbc427f97f06e.js.map
Normal file
BIN
priv/static/static/js/vendors~app.52ac194cbc427f97f06e.js.map
Normal file
Binary file not shown.
Binary file not shown.
Binary file not shown.
|
@ -7,6 +7,8 @@ defmodule Pleroma.ActivityExpirationTest do
|
||||||
alias Pleroma.ActivityExpiration
|
alias Pleroma.ActivityExpiration
|
||||||
import Pleroma.Factory
|
import Pleroma.Factory
|
||||||
|
|
||||||
|
clear_config([ActivityExpiration, :enabled])
|
||||||
|
|
||||||
test "finds activities due to be deleted only" do
|
test "finds activities due to be deleted only" do
|
||||||
activity = insert(:note_activity)
|
activity = insert(:note_activity)
|
||||||
expiration_due = insert(:expiration_in_the_past, %{activity_id: activity.id})
|
expiration_due = insert(:expiration_in_the_past, %{activity_id: activity.id})
|
||||||
|
@ -24,4 +26,27 @@ test "denies expirations that don't live long enough" do
|
||||||
now = NaiveDateTime.utc_now()
|
now = NaiveDateTime.utc_now()
|
||||||
assert {:error, _} = ActivityExpiration.create(activity, now)
|
assert {:error, _} = ActivityExpiration.create(activity, now)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
test "deletes an expiration activity" do
|
||||||
|
Pleroma.Config.put([ActivityExpiration, :enabled], true)
|
||||||
|
activity = insert(:note_activity)
|
||||||
|
|
||||||
|
naive_datetime =
|
||||||
|
NaiveDateTime.add(
|
||||||
|
NaiveDateTime.utc_now(),
|
||||||
|
-:timer.minutes(2),
|
||||||
|
:millisecond
|
||||||
|
)
|
||||||
|
|
||||||
|
expiration =
|
||||||
|
insert(
|
||||||
|
:expiration_in_the_past,
|
||||||
|
%{activity_id: activity.id, scheduled_at: naive_datetime}
|
||||||
|
)
|
||||||
|
|
||||||
|
Pleroma.Workers.Cron.PurgeExpiredActivitiesWorker.perform(:ops, :pid)
|
||||||
|
|
||||||
|
refute Pleroma.Repo.get(Pleroma.Activity, activity.id)
|
||||||
|
refute Pleroma.Repo.get(Pleroma.ActivityExpiration, expiration.id)
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -1,17 +0,0 @@
|
||||||
# Pleroma: A lightweight social networking server
|
|
||||||
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
|
||||||
# SPDX-License-Identifier: AGPL-3.0-only
|
|
||||||
|
|
||||||
defmodule Pleroma.ActivityExpirationWorkerTest do
|
|
||||||
use Pleroma.DataCase
|
|
||||||
alias Pleroma.Activity
|
|
||||||
import Pleroma.Factory
|
|
||||||
|
|
||||||
test "deletes an activity" do
|
|
||||||
activity = insert(:note_activity)
|
|
||||||
expiration = insert(:expiration_in_the_past, %{activity_id: activity.id})
|
|
||||||
Pleroma.Daemons.ActivityExpirationDaemon.perform(:execute, expiration.id)
|
|
||||||
|
|
||||||
refute Repo.get(Activity, activity.id)
|
|
||||||
end
|
|
||||||
end
|
|
|
@ -1,19 +0,0 @@
|
||||||
# Pleroma: A lightweight social networking server
|
|
||||||
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
|
||||||
# SPDX-License-Identifier: AGPL-3.0-only
|
|
||||||
|
|
||||||
defmodule Pleroma.ScheduledActivityDaemonTest do
|
|
||||||
use Pleroma.DataCase
|
|
||||||
alias Pleroma.ScheduledActivity
|
|
||||||
import Pleroma.Factory
|
|
||||||
|
|
||||||
test "creates a status from the scheduled activity" do
|
|
||||||
user = insert(:user)
|
|
||||||
scheduled_activity = insert(:scheduled_activity, user: user, params: %{status: "hi"})
|
|
||||||
Pleroma.Daemons.ScheduledActivityDaemon.perform(:execute, scheduled_activity.id)
|
|
||||||
|
|
||||||
refute Repo.get(ScheduledActivity, scheduled_activity.id)
|
|
||||||
activity = Repo.all(Pleroma.Activity) |> Enum.find(&(&1.actor == user.ap_id))
|
|
||||||
assert Pleroma.Object.normalize(activity).data["content"] == "hi"
|
|
||||||
end
|
|
||||||
end
|
|
|
@ -8,11 +8,51 @@ defmodule Pleroma.ScheduledActivityTest do
|
||||||
alias Pleroma.ScheduledActivity
|
alias Pleroma.ScheduledActivity
|
||||||
import Pleroma.Factory
|
import Pleroma.Factory
|
||||||
|
|
||||||
|
clear_config([ScheduledActivity, :enabled])
|
||||||
|
|
||||||
setup context do
|
setup context do
|
||||||
DataCase.ensure_local_uploader(context)
|
DataCase.ensure_local_uploader(context)
|
||||||
end
|
end
|
||||||
|
|
||||||
describe "creation" do
|
describe "creation" do
|
||||||
|
test "scheduled activities with jobs when ScheduledActivity enabled" do
|
||||||
|
Pleroma.Config.put([ScheduledActivity, :enabled], true)
|
||||||
|
user = insert(:user)
|
||||||
|
|
||||||
|
today =
|
||||||
|
NaiveDateTime.utc_now()
|
||||||
|
|> NaiveDateTime.add(:timer.minutes(6), :millisecond)
|
||||||
|
|> NaiveDateTime.to_iso8601()
|
||||||
|
|
||||||
|
attrs = %{params: %{}, scheduled_at: today}
|
||||||
|
{:ok, sa1} = ScheduledActivity.create(user, attrs)
|
||||||
|
{:ok, sa2} = ScheduledActivity.create(user, attrs)
|
||||||
|
|
||||||
|
jobs =
|
||||||
|
Repo.all(from(j in Oban.Job, where: j.queue == "scheduled_activities", select: j.args))
|
||||||
|
|
||||||
|
assert jobs == [%{"activity_id" => sa1.id}, %{"activity_id" => sa2.id}]
|
||||||
|
end
|
||||||
|
|
||||||
|
test "scheduled activities without jobs when ScheduledActivity disabled" do
|
||||||
|
Pleroma.Config.put([ScheduledActivity, :enabled], false)
|
||||||
|
user = insert(:user)
|
||||||
|
|
||||||
|
today =
|
||||||
|
NaiveDateTime.utc_now()
|
||||||
|
|> NaiveDateTime.add(:timer.minutes(6), :millisecond)
|
||||||
|
|> NaiveDateTime.to_iso8601()
|
||||||
|
|
||||||
|
attrs = %{params: %{}, scheduled_at: today}
|
||||||
|
{:ok, _sa1} = ScheduledActivity.create(user, attrs)
|
||||||
|
{:ok, _sa2} = ScheduledActivity.create(user, attrs)
|
||||||
|
|
||||||
|
jobs =
|
||||||
|
Repo.all(from(j in Oban.Job, where: j.queue == "scheduled_activities", select: j.args))
|
||||||
|
|
||||||
|
assert jobs == []
|
||||||
|
end
|
||||||
|
|
||||||
test "when daily user limit is exceeded" do
|
test "when daily user limit is exceeded" do
|
||||||
user = insert(:user)
|
user = insert(:user)
|
||||||
|
|
||||||
|
@ -24,6 +64,7 @@ test "when daily user limit is exceeded" do
|
||||||
attrs = %{params: %{}, scheduled_at: today}
|
attrs = %{params: %{}, scheduled_at: today}
|
||||||
{:ok, _} = ScheduledActivity.create(user, attrs)
|
{:ok, _} = ScheduledActivity.create(user, attrs)
|
||||||
{:ok, _} = ScheduledActivity.create(user, attrs)
|
{:ok, _} = ScheduledActivity.create(user, attrs)
|
||||||
|
|
||||||
{:error, changeset} = ScheduledActivity.create(user, attrs)
|
{:error, changeset} = ScheduledActivity.create(user, attrs)
|
||||||
assert changeset.errors == [scheduled_at: {"daily limit exceeded", []}]
|
assert changeset.errors == [scheduled_at: {"daily limit exceeded", []}]
|
||||||
end
|
end
|
||||||
|
|
|
@ -54,6 +54,12 @@ defmacro __using__(_opts) do
|
||||||
clear_config_all: 2
|
clear_config_all: 2
|
||||||
]
|
]
|
||||||
|
|
||||||
|
def to_datetime(naive_datetime) do
|
||||||
|
naive_datetime
|
||||||
|
|> DateTime.from_naive!("Etc/UTC")
|
||||||
|
|> DateTime.truncate(:second)
|
||||||
|
end
|
||||||
|
|
||||||
def collect_ids(collection) do
|
def collect_ids(collection) do
|
||||||
collection
|
collection
|
||||||
|> Enum.map(& &1.id)
|
|> Enum.map(& &1.id)
|
||||||
|
|
|
@ -9,6 +9,9 @@ defmodule Pleroma.Web.MastodonAPI.ScheduledActivityControllerTest do
|
||||||
alias Pleroma.ScheduledActivity
|
alias Pleroma.ScheduledActivity
|
||||||
|
|
||||||
import Pleroma.Factory
|
import Pleroma.Factory
|
||||||
|
import Ecto.Query
|
||||||
|
|
||||||
|
clear_config([ScheduledActivity, :enabled])
|
||||||
|
|
||||||
test "shows scheduled activities" do
|
test "shows scheduled activities" do
|
||||||
%{user: user, conn: conn} = oauth_access(["read:statuses"])
|
%{user: user, conn: conn} = oauth_access(["read:statuses"])
|
||||||
|
@ -52,11 +55,26 @@ test "shows a scheduled activity" do
|
||||||
end
|
end
|
||||||
|
|
||||||
test "updates a scheduled activity" do
|
test "updates a scheduled activity" do
|
||||||
|
Pleroma.Config.put([ScheduledActivity, :enabled], true)
|
||||||
%{user: user, conn: conn} = oauth_access(["write:statuses"])
|
%{user: user, conn: conn} = oauth_access(["write:statuses"])
|
||||||
scheduled_activity = insert(:scheduled_activity, user: user)
|
|
||||||
|
|
||||||
new_scheduled_at =
|
scheduled_at = Timex.shift(NaiveDateTime.utc_now(), minutes: 60)
|
||||||
NaiveDateTime.add(NaiveDateTime.utc_now(), :timer.minutes(120), :millisecond)
|
|
||||||
|
{:ok, scheduled_activity} =
|
||||||
|
ScheduledActivity.create(
|
||||||
|
user,
|
||||||
|
%{
|
||||||
|
scheduled_at: scheduled_at,
|
||||||
|
params: build(:note).data
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
job = Repo.one(from(j in Oban.Job, where: j.queue == "scheduled_activities"))
|
||||||
|
|
||||||
|
assert job.args == %{"activity_id" => scheduled_activity.id}
|
||||||
|
assert DateTime.truncate(job.scheduled_at, :second) == to_datetime(scheduled_at)
|
||||||
|
|
||||||
|
new_scheduled_at = Timex.shift(NaiveDateTime.utc_now(), minutes: 120)
|
||||||
|
|
||||||
res_conn =
|
res_conn =
|
||||||
put(conn, "/api/v1/scheduled_statuses/#{scheduled_activity.id}", %{
|
put(conn, "/api/v1/scheduled_statuses/#{scheduled_activity.id}", %{
|
||||||
|
@ -65,6 +83,9 @@ test "updates a scheduled activity" do
|
||||||
|
|
||||||
assert %{"scheduled_at" => expected_scheduled_at} = json_response(res_conn, 200)
|
assert %{"scheduled_at" => expected_scheduled_at} = json_response(res_conn, 200)
|
||||||
assert expected_scheduled_at == Pleroma.Web.CommonAPI.Utils.to_masto_date(new_scheduled_at)
|
assert expected_scheduled_at == Pleroma.Web.CommonAPI.Utils.to_masto_date(new_scheduled_at)
|
||||||
|
job = refresh_record(job)
|
||||||
|
|
||||||
|
assert DateTime.truncate(job.scheduled_at, :second) == to_datetime(new_scheduled_at)
|
||||||
|
|
||||||
res_conn = put(conn, "/api/v1/scheduled_statuses/404", %{scheduled_at: new_scheduled_at})
|
res_conn = put(conn, "/api/v1/scheduled_statuses/404", %{scheduled_at: new_scheduled_at})
|
||||||
|
|
||||||
|
@ -72,8 +93,22 @@ test "updates a scheduled activity" do
|
||||||
end
|
end
|
||||||
|
|
||||||
test "deletes a scheduled activity" do
|
test "deletes a scheduled activity" do
|
||||||
|
Pleroma.Config.put([ScheduledActivity, :enabled], true)
|
||||||
%{user: user, conn: conn} = oauth_access(["write:statuses"])
|
%{user: user, conn: conn} = oauth_access(["write:statuses"])
|
||||||
scheduled_activity = insert(:scheduled_activity, user: user)
|
scheduled_at = Timex.shift(NaiveDateTime.utc_now(), minutes: 60)
|
||||||
|
|
||||||
|
{:ok, scheduled_activity} =
|
||||||
|
ScheduledActivity.create(
|
||||||
|
user,
|
||||||
|
%{
|
||||||
|
scheduled_at: scheduled_at,
|
||||||
|
params: build(:note).data
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
job = Repo.one(from(j in Oban.Job, where: j.queue == "scheduled_activities"))
|
||||||
|
|
||||||
|
assert job.args == %{"activity_id" => scheduled_activity.id}
|
||||||
|
|
||||||
res_conn =
|
res_conn =
|
||||||
conn
|
conn
|
||||||
|
@ -81,7 +116,8 @@ test "deletes a scheduled activity" do
|
||||||
|> delete("/api/v1/scheduled_statuses/#{scheduled_activity.id}")
|
|> delete("/api/v1/scheduled_statuses/#{scheduled_activity.id}")
|
||||||
|
|
||||||
assert %{} = json_response(res_conn, 200)
|
assert %{} = json_response(res_conn, 200)
|
||||||
assert nil == Repo.get(ScheduledActivity, scheduled_activity.id)
|
refute Repo.get(ScheduledActivity, scheduled_activity.id)
|
||||||
|
refute Repo.get(Oban.Job, job.id)
|
||||||
|
|
||||||
res_conn =
|
res_conn =
|
||||||
conn
|
conn
|
||||||
|
|
|
@ -37,15 +37,15 @@ test "has an emoji reaction list" do
|
||||||
status = StatusView.render("show.json", activity: activity)
|
status = StatusView.render("show.json", activity: activity)
|
||||||
|
|
||||||
assert status[:pleroma][:emoji_reactions] == [
|
assert status[:pleroma][:emoji_reactions] == [
|
||||||
%{emoji: "☕", count: 2, reacted: false},
|
%{name: "☕", count: 2, me: false},
|
||||||
%{emoji: "🍵", count: 1, reacted: false}
|
%{name: "🍵", count: 1, me: false}
|
||||||
]
|
]
|
||||||
|
|
||||||
status = StatusView.render("show.json", activity: activity, for: user)
|
status = StatusView.render("show.json", activity: activity, for: user)
|
||||||
|
|
||||||
assert status[:pleroma][:emoji_reactions] == [
|
assert status[:pleroma][:emoji_reactions] == [
|
||||||
%{emoji: "☕", count: 2, reacted: true},
|
%{name: "☕", count: 2, me: true},
|
||||||
%{emoji: "🍵", count: 1, reacted: false}
|
%{name: "🍵", count: 1, me: false}
|
||||||
]
|
]
|
||||||
end
|
end
|
||||||
|
|
||||||
|
|
|
@ -6,6 +6,7 @@ defmodule Pleroma.Web.NodeInfoTest do
|
||||||
use Pleroma.Web.ConnCase
|
use Pleroma.Web.ConnCase
|
||||||
|
|
||||||
import Pleroma.Factory
|
import Pleroma.Factory
|
||||||
|
clear_config([:mrf_simple])
|
||||||
|
|
||||||
test "GET /.well-known/nodeinfo", %{conn: conn} do
|
test "GET /.well-known/nodeinfo", %{conn: conn} do
|
||||||
links =
|
links =
|
||||||
|
|
|
@ -14,7 +14,7 @@ defmodule Pleroma.Web.PleromaAPI.PleromaAPIControllerTest do
|
||||||
|
|
||||||
import Pleroma.Factory
|
import Pleroma.Factory
|
||||||
|
|
||||||
test "POST /api/v1/pleroma/statuses/:id/react_with_emoji", %{conn: conn} do
|
test "PUT /api/v1/pleroma/statuses/:id/reactions/:emoji", %{conn: conn} do
|
||||||
user = insert(:user)
|
user = insert(:user)
|
||||||
other_user = insert(:user)
|
other_user = insert(:user)
|
||||||
|
|
||||||
|
@ -24,18 +24,19 @@ test "POST /api/v1/pleroma/statuses/:id/react_with_emoji", %{conn: conn} do
|
||||||
conn
|
conn
|
||||||
|> assign(:user, other_user)
|
|> assign(:user, other_user)
|
||||||
|> assign(:token, insert(:oauth_token, user: other_user, scopes: ["write:statuses"]))
|
|> assign(:token, insert(:oauth_token, user: other_user, scopes: ["write:statuses"]))
|
||||||
|> post("/api/v1/pleroma/statuses/#{activity.id}/react_with_emoji", %{"emoji" => "☕"})
|
|> put("/api/v1/pleroma/statuses/#{activity.id}/reactions/☕")
|
||||||
|> json_response(200)
|
|> json_response(200)
|
||||||
|
|
||||||
|
# We return the status, but this our implementation detail.
|
||||||
assert %{"id" => id} = result
|
assert %{"id" => id} = result
|
||||||
assert to_string(activity.id) == id
|
assert to_string(activity.id) == id
|
||||||
|
|
||||||
assert result["pleroma"]["emoji_reactions"] == [
|
assert result["pleroma"]["emoji_reactions"] == [
|
||||||
%{"emoji" => "☕", "count" => 1, "reacted" => true}
|
%{"name" => "☕", "count" => 1, "me" => true}
|
||||||
]
|
]
|
||||||
end
|
end
|
||||||
|
|
||||||
test "POST /api/v1/pleroma/statuses/:id/unreact_with_emoji", %{conn: conn} do
|
test "DELETE /api/v1/pleroma/statuses/:id/reactions/:emoji", %{conn: conn} do
|
||||||
user = insert(:user)
|
user = insert(:user)
|
||||||
other_user = insert(:user)
|
other_user = insert(:user)
|
||||||
|
|
||||||
|
@ -46,7 +47,7 @@ test "POST /api/v1/pleroma/statuses/:id/unreact_with_emoji", %{conn: conn} do
|
||||||
conn
|
conn
|
||||||
|> assign(:user, other_user)
|
|> assign(:user, other_user)
|
||||||
|> assign(:token, insert(:oauth_token, user: other_user, scopes: ["write:statuses"]))
|
|> assign(:token, insert(:oauth_token, user: other_user, scopes: ["write:statuses"]))
|
||||||
|> post("/api/v1/pleroma/statuses/#{activity.id}/unreact_with_emoji", %{"emoji" => "☕"})
|
|> delete("/api/v1/pleroma/statuses/#{activity.id}/reactions/☕")
|
||||||
|
|
||||||
assert %{"id" => id} = json_response(result, 200)
|
assert %{"id" => id} = json_response(result, 200)
|
||||||
assert to_string(activity.id) == id
|
assert to_string(activity.id) == id
|
||||||
|
@ -56,7 +57,7 @@ test "POST /api/v1/pleroma/statuses/:id/unreact_with_emoji", %{conn: conn} do
|
||||||
assert object.data["reaction_count"] == 0
|
assert object.data["reaction_count"] == 0
|
||||||
end
|
end
|
||||||
|
|
||||||
test "GET /api/v1/pleroma/statuses/:id/emoji_reactions_by", %{conn: conn} do
|
test "GET /api/v1/pleroma/statuses/:id/reactions", %{conn: conn} do
|
||||||
user = insert(:user)
|
user = insert(:user)
|
||||||
other_user = insert(:user)
|
other_user = insert(:user)
|
||||||
doomed_user = insert(:user)
|
doomed_user = insert(:user)
|
||||||
|
@ -65,7 +66,7 @@ test "GET /api/v1/pleroma/statuses/:id/emoji_reactions_by", %{conn: conn} do
|
||||||
|
|
||||||
result =
|
result =
|
||||||
conn
|
conn
|
||||||
|> get("/api/v1/pleroma/statuses/#{activity.id}/emoji_reactions_by")
|
|> get("/api/v1/pleroma/statuses/#{activity.id}/reactions")
|
||||||
|> json_response(200)
|
|> json_response(200)
|
||||||
|
|
||||||
assert result == []
|
assert result == []
|
||||||
|
@ -77,11 +78,10 @@ test "GET /api/v1/pleroma/statuses/:id/emoji_reactions_by", %{conn: conn} do
|
||||||
|
|
||||||
result =
|
result =
|
||||||
conn
|
conn
|
||||||
|> get("/api/v1/pleroma/statuses/#{activity.id}/emoji_reactions_by")
|
|> get("/api/v1/pleroma/statuses/#{activity.id}/reactions")
|
||||||
|> json_response(200)
|
|> json_response(200)
|
||||||
|
|
||||||
[%{"emoji" => "🎅", "count" => 1, "accounts" => [represented_user], "reacted" => false}] =
|
[%{"name" => "🎅", "count" => 1, "accounts" => [represented_user], "me" => false}] = result
|
||||||
result
|
|
||||||
|
|
||||||
assert represented_user["id"] == other_user.id
|
assert represented_user["id"] == other_user.id
|
||||||
|
|
||||||
|
@ -89,10 +89,10 @@ test "GET /api/v1/pleroma/statuses/:id/emoji_reactions_by", %{conn: conn} do
|
||||||
conn
|
conn
|
||||||
|> assign(:user, other_user)
|
|> assign(:user, other_user)
|
||||||
|> assign(:token, insert(:oauth_token, user: other_user, scopes: ["read:statuses"]))
|
|> assign(:token, insert(:oauth_token, user: other_user, scopes: ["read:statuses"]))
|
||||||
|> get("/api/v1/pleroma/statuses/#{activity.id}/emoji_reactions_by")
|
|> get("/api/v1/pleroma/statuses/#{activity.id}/reactions")
|
||||||
|> json_response(200)
|
|> json_response(200)
|
||||||
|
|
||||||
assert [%{"emoji" => "🎅", "count" => 1, "accounts" => [_represented_user], "reacted" => true}] =
|
assert [%{"name" => "🎅", "count" => 1, "accounts" => [_represented_user], "me" => true}] =
|
||||||
result
|
result
|
||||||
end
|
end
|
||||||
|
|
||||||
|
|
|
@ -7,11 +7,14 @@ defmodule Pleroma.Web.RichMedia.Parsers.TwitterCardTest do
|
||||||
alias Pleroma.Web.RichMedia.Parsers.TwitterCard
|
alias Pleroma.Web.RichMedia.Parsers.TwitterCard
|
||||||
|
|
||||||
test "returns error when html not contains twitter card" do
|
test "returns error when html not contains twitter card" do
|
||||||
assert TwitterCard.parse("", %{}) == {:error, "No twitter card metadata found"}
|
assert TwitterCard.parse([{"html", [], [{"head", [], []}, {"body", [], []}]}], %{}) ==
|
||||||
|
{:error, "No twitter card metadata found"}
|
||||||
end
|
end
|
||||||
|
|
||||||
test "parses twitter card with only name attributes" do
|
test "parses twitter card with only name attributes" do
|
||||||
html = File.read!("test/fixtures/nypd-facial-recognition-children-teenagers3.html")
|
html =
|
||||||
|
File.read!("test/fixtures/nypd-facial-recognition-children-teenagers3.html")
|
||||||
|
|> Floki.parse_document!()
|
||||||
|
|
||||||
assert TwitterCard.parse(html, %{}) ==
|
assert TwitterCard.parse(html, %{}) ==
|
||||||
{:ok,
|
{:ok,
|
||||||
|
@ -26,7 +29,9 @@ test "parses twitter card with only name attributes" do
|
||||||
end
|
end
|
||||||
|
|
||||||
test "parses twitter card with only property attributes" do
|
test "parses twitter card with only property attributes" do
|
||||||
html = File.read!("test/fixtures/nypd-facial-recognition-children-teenagers2.html")
|
html =
|
||||||
|
File.read!("test/fixtures/nypd-facial-recognition-children-teenagers2.html")
|
||||||
|
|> Floki.parse_document!()
|
||||||
|
|
||||||
assert TwitterCard.parse(html, %{}) ==
|
assert TwitterCard.parse(html, %{}) ==
|
||||||
{:ok,
|
{:ok,
|
||||||
|
@ -45,7 +50,9 @@ test "parses twitter card with only property attributes" do
|
||||||
end
|
end
|
||||||
|
|
||||||
test "parses twitter card with name & property attributes" do
|
test "parses twitter card with name & property attributes" do
|
||||||
html = File.read!("test/fixtures/nypd-facial-recognition-children-teenagers.html")
|
html =
|
||||||
|
File.read!("test/fixtures/nypd-facial-recognition-children-teenagers.html")
|
||||||
|
|> Floki.parse_document!()
|
||||||
|
|
||||||
assert TwitterCard.parse(html, %{}) ==
|
assert TwitterCard.parse(html, %{}) ==
|
||||||
{:ok,
|
{:ok,
|
||||||
|
@ -73,7 +80,8 @@ test "respect only first title tag on the page" do
|
||||||
"YTQ5MF9EQVIgZXhodW1hdGlvbiBvZiBNYXJnYXJldCBDb3JiaW4gZ3JhdmUgMTkyNi5qcGciXSxbInAiLCJjb252ZXJ0IiwiIl0sWyJwIiwiY29udmVydCIsIi1xdWFsaXR5IDgxIC1hdXRvLW9" <>
|
"YTQ5MF9EQVIgZXhodW1hdGlvbiBvZiBNYXJnYXJldCBDb3JiaW4gZ3JhdmUgMTkyNi5qcGciXSxbInAiLCJjb252ZXJ0IiwiIl0sWyJwIiwiY29udmVydCIsIi1xdWFsaXR5IDgxIC1hdXRvLW9" <>
|
||||||
"yaWVudCJdLFsicCIsInRodW1iIiwiNjAweD4iXV0/DAR%20exhumation%20of%20Margaret%20Corbin%20grave%201926.jpg"
|
"yaWVudCJdLFsicCIsInRodW1iIiwiNjAweD4iXV0/DAR%20exhumation%20of%20Margaret%20Corbin%20grave%201926.jpg"
|
||||||
|
|
||||||
html = File.read!("test/fixtures/margaret-corbin-grave-west-point.html")
|
html =
|
||||||
|
File.read!("test/fixtures/margaret-corbin-grave-west-point.html") |> Floki.parse_document!()
|
||||||
|
|
||||||
assert TwitterCard.parse(html, %{}) ==
|
assert TwitterCard.parse(html, %{}) ==
|
||||||
{:ok,
|
{:ok,
|
||||||
|
@ -87,7 +95,9 @@ test "respect only first title tag on the page" do
|
||||||
end
|
end
|
||||||
|
|
||||||
test "takes first founded title in html head if there is html markup error" do
|
test "takes first founded title in html head if there is html markup error" do
|
||||||
html = File.read!("test/fixtures/nypd-facial-recognition-children-teenagers4.html")
|
html =
|
||||||
|
File.read!("test/fixtures/nypd-facial-recognition-children-teenagers4.html")
|
||||||
|
|> Floki.parse_document!()
|
||||||
|
|
||||||
assert TwitterCard.parse(html, %{}) ==
|
assert TwitterCard.parse(html, %{}) ==
|
||||||
{:ok,
|
{:ok,
|
||||||
|
|
22
test/workers/cron/clear_oauth_token_worker_test.exs
Normal file
22
test/workers/cron/clear_oauth_token_worker_test.exs
Normal file
|
@ -0,0 +1,22 @@
|
||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
defmodule Pleroma.Workers.Cron.ClearOauthTokenWorkerTest do
|
||||||
|
use Pleroma.DataCase
|
||||||
|
|
||||||
|
import Pleroma.Factory
|
||||||
|
alias Pleroma.Workers.Cron.ClearOauthTokenWorker
|
||||||
|
|
||||||
|
clear_config([:oauth2, :clean_expired_tokens])
|
||||||
|
|
||||||
|
test "deletes expired tokens" do
|
||||||
|
insert(:oauth_token,
|
||||||
|
valid_until: NaiveDateTime.add(NaiveDateTime.utc_now(), -60 * 10)
|
||||||
|
)
|
||||||
|
|
||||||
|
Pleroma.Config.put([:oauth2, :clean_expired_tokens], true)
|
||||||
|
ClearOauthTokenWorker.perform(:opts, :job)
|
||||||
|
assert Pleroma.Repo.all(Pleroma.Web.OAuth.Token) == []
|
||||||
|
end
|
||||||
|
end
|
|
@ -2,16 +2,24 @@
|
||||||
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
||||||
# SPDX-License-Identifier: AGPL-3.0-only
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
defmodule Pleroma.DigestEmailDaemonTest do
|
defmodule Pleroma.Workers.Cron.DigestEmailsWorkerTest do
|
||||||
use Pleroma.DataCase
|
use Pleroma.DataCase
|
||||||
|
|
||||||
import Pleroma.Factory
|
import Pleroma.Factory
|
||||||
|
|
||||||
alias Pleroma.Daemons.DigestEmailDaemon
|
|
||||||
alias Pleroma.Tests.ObanHelpers
|
alias Pleroma.Tests.ObanHelpers
|
||||||
alias Pleroma.User
|
alias Pleroma.User
|
||||||
alias Pleroma.Web.CommonAPI
|
alias Pleroma.Web.CommonAPI
|
||||||
|
|
||||||
|
clear_config([:email_notifications, :digest])
|
||||||
|
|
||||||
test "it sends digest emails" do
|
test "it sends digest emails" do
|
||||||
|
Pleroma.Config.put([:email_notifications, :digest], %{
|
||||||
|
active: true,
|
||||||
|
inactivity_threshold: 7,
|
||||||
|
interval: 7
|
||||||
|
})
|
||||||
|
|
||||||
user = insert(:user)
|
user = insert(:user)
|
||||||
|
|
||||||
date =
|
date =
|
||||||
|
@ -23,8 +31,7 @@ test "it sends digest emails" do
|
||||||
{:ok, _} = User.switch_email_notifications(user2, "digest", true)
|
{:ok, _} = User.switch_email_notifications(user2, "digest", true)
|
||||||
CommonAPI.post(user, %{"status" => "hey @#{user2.nickname}!"})
|
CommonAPI.post(user, %{"status" => "hey @#{user2.nickname}!"})
|
||||||
|
|
||||||
DigestEmailDaemon.perform()
|
Pleroma.Workers.Cron.DigestEmailsWorker.perform(:opts, :pid)
|
||||||
ObanHelpers.perform_all()
|
|
||||||
# Performing job(s) enqueued at previous step
|
# Performing job(s) enqueued at previous step
|
||||||
ObanHelpers.perform_all()
|
ObanHelpers.perform_all()
|
||||||
|
|
56
test/workers/cron/purge_expired_activities_worker_test.exs
Normal file
56
test/workers/cron/purge_expired_activities_worker_test.exs
Normal file
|
@ -0,0 +1,56 @@
|
||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
defmodule Pleroma.Workers.Cron.PurgeExpiredActivitiesWorkerTest do
|
||||||
|
use Pleroma.DataCase
|
||||||
|
|
||||||
|
alias Pleroma.ActivityExpiration
|
||||||
|
alias Pleroma.Workers.Cron.PurgeExpiredActivitiesWorker
|
||||||
|
|
||||||
|
import Pleroma.Factory
|
||||||
|
import ExUnit.CaptureLog
|
||||||
|
|
||||||
|
clear_config([ActivityExpiration, :enabled])
|
||||||
|
|
||||||
|
test "deletes an expiration activity" do
|
||||||
|
Pleroma.Config.put([ActivityExpiration, :enabled], true)
|
||||||
|
activity = insert(:note_activity)
|
||||||
|
|
||||||
|
naive_datetime =
|
||||||
|
NaiveDateTime.add(
|
||||||
|
NaiveDateTime.utc_now(),
|
||||||
|
-:timer.minutes(2),
|
||||||
|
:millisecond
|
||||||
|
)
|
||||||
|
|
||||||
|
expiration =
|
||||||
|
insert(
|
||||||
|
:expiration_in_the_past,
|
||||||
|
%{activity_id: activity.id, scheduled_at: naive_datetime}
|
||||||
|
)
|
||||||
|
|
||||||
|
Pleroma.Workers.Cron.PurgeExpiredActivitiesWorker.perform(:ops, :pid)
|
||||||
|
|
||||||
|
refute Pleroma.Repo.get(Pleroma.Activity, activity.id)
|
||||||
|
refute Pleroma.Repo.get(Pleroma.ActivityExpiration, expiration.id)
|
||||||
|
end
|
||||||
|
|
||||||
|
describe "delete_activity/1" do
|
||||||
|
test "adds log message if activity isn't find" do
|
||||||
|
assert capture_log([level: :error], fn ->
|
||||||
|
PurgeExpiredActivitiesWorker.delete_activity(%ActivityExpiration{
|
||||||
|
activity_id: "test-activity"
|
||||||
|
})
|
||||||
|
end) =~ "Couldn't delete expired activity: not found activity"
|
||||||
|
end
|
||||||
|
|
||||||
|
test "adds log message if actor isn't find" do
|
||||||
|
assert capture_log([level: :error], fn ->
|
||||||
|
PurgeExpiredActivitiesWorker.delete_activity(%ActivityExpiration{
|
||||||
|
activity_id: "test-activity"
|
||||||
|
})
|
||||||
|
end) =~ "Couldn't delete expired activity: not found activity"
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
52
test/workers/scheduled_activity_worker_test.exs
Normal file
52
test/workers/scheduled_activity_worker_test.exs
Normal file
|
@ -0,0 +1,52 @@
|
||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
defmodule Pleroma.Workers.ScheduledActivityWorkerTest do
|
||||||
|
use Pleroma.DataCase
|
||||||
|
|
||||||
|
alias Pleroma.ScheduledActivity
|
||||||
|
alias Pleroma.Workers.ScheduledActivityWorker
|
||||||
|
|
||||||
|
import Pleroma.Factory
|
||||||
|
import ExUnit.CaptureLog
|
||||||
|
|
||||||
|
clear_config([ScheduledActivity, :enabled])
|
||||||
|
|
||||||
|
test "creates a status from the scheduled activity" do
|
||||||
|
Pleroma.Config.put([ScheduledActivity, :enabled], true)
|
||||||
|
user = insert(:user)
|
||||||
|
|
||||||
|
naive_datetime =
|
||||||
|
NaiveDateTime.add(
|
||||||
|
NaiveDateTime.utc_now(),
|
||||||
|
-:timer.minutes(2),
|
||||||
|
:millisecond
|
||||||
|
)
|
||||||
|
|
||||||
|
scheduled_activity =
|
||||||
|
insert(
|
||||||
|
:scheduled_activity,
|
||||||
|
scheduled_at: naive_datetime,
|
||||||
|
user: user,
|
||||||
|
params: %{status: "hi"}
|
||||||
|
)
|
||||||
|
|
||||||
|
ScheduledActivityWorker.perform(
|
||||||
|
%{"activity_id" => scheduled_activity.id},
|
||||||
|
:pid
|
||||||
|
)
|
||||||
|
|
||||||
|
refute Repo.get(ScheduledActivity, scheduled_activity.id)
|
||||||
|
activity = Repo.all(Pleroma.Activity) |> Enum.find(&(&1.actor == user.ap_id))
|
||||||
|
assert Pleroma.Object.normalize(activity).data["content"] == "hi"
|
||||||
|
end
|
||||||
|
|
||||||
|
test "adds log message if ScheduledActivity isn't find" do
|
||||||
|
Pleroma.Config.put([ScheduledActivity, :enabled], true)
|
||||||
|
|
||||||
|
assert capture_log([level: :error], fn ->
|
||||||
|
ScheduledActivityWorker.perform(%{"activity_id" => 42}, :pid)
|
||||||
|
end) =~ "Couldn't find scheduled activity"
|
||||||
|
end
|
||||||
|
end
|
Loading…
Reference in a new issue