forked from AkkomaGang/akkoma
Merge remote-tracking branch 'pleroma/develop' into cycles-router
This commit is contained in:
commit
e885b49e35
174 changed files with 5125 additions and 1278 deletions
|
@ -8,7 +8,9 @@ variables: &global_variables
|
||||||
MIX_ENV: test
|
MIX_ENV: test
|
||||||
|
|
||||||
cache: &global_cache_policy
|
cache: &global_cache_policy
|
||||||
key: ${CI_COMMIT_REF_SLUG}
|
key:
|
||||||
|
files:
|
||||||
|
- mix.lock
|
||||||
paths:
|
paths:
|
||||||
- deps
|
- deps
|
||||||
- _build
|
- _build
|
||||||
|
@ -22,16 +24,20 @@ stages:
|
||||||
- docker
|
- docker
|
||||||
|
|
||||||
before_script:
|
before_script:
|
||||||
|
- rm -rf _build/*/lib/pleroma
|
||||||
- apt-get update && apt-get install -y cmake
|
- apt-get update && apt-get install -y cmake
|
||||||
- mix local.hex --force
|
- mix local.hex --force
|
||||||
- mix local.rebar --force
|
- mix local.rebar --force
|
||||||
|
- mix deps.get
|
||||||
- apt-get -qq update
|
- apt-get -qq update
|
||||||
- apt-get install -y libmagic-dev
|
- apt-get install -y libmagic-dev
|
||||||
|
|
||||||
|
after_script:
|
||||||
|
- rm -rf _build/*/lib/pleroma
|
||||||
|
|
||||||
build:
|
build:
|
||||||
stage: build
|
stage: build
|
||||||
script:
|
script:
|
||||||
- mix deps.get
|
|
||||||
- mix compile --force
|
- mix compile --force
|
||||||
|
|
||||||
spec-build:
|
spec-build:
|
||||||
|
@ -52,7 +58,6 @@ benchmark:
|
||||||
alias: postgres
|
alias: postgres
|
||||||
command: ["postgres", "-c", "fsync=off", "-c", "synchronous_commit=off", "-c", "full_page_writes=off"]
|
command: ["postgres", "-c", "fsync=off", "-c", "synchronous_commit=off", "-c", "full_page_writes=off"]
|
||||||
script:
|
script:
|
||||||
- mix deps.get
|
|
||||||
- mix ecto.create
|
- mix ecto.create
|
||||||
- mix ecto.migrate
|
- mix ecto.migrate
|
||||||
- mix pleroma.load_testing
|
- mix pleroma.load_testing
|
||||||
|
@ -70,7 +75,6 @@ unit-testing:
|
||||||
command: ["postgres", "-c", "fsync=off", "-c", "synchronous_commit=off", "-c", "full_page_writes=off"]
|
command: ["postgres", "-c", "fsync=off", "-c", "synchronous_commit=off", "-c", "full_page_writes=off"]
|
||||||
script:
|
script:
|
||||||
- apt-get update && apt-get install -y libimage-exiftool-perl ffmpeg
|
- apt-get update && apt-get install -y libimage-exiftool-perl ffmpeg
|
||||||
- mix deps.get
|
|
||||||
- mix ecto.create
|
- mix ecto.create
|
||||||
- mix ecto.migrate
|
- mix ecto.migrate
|
||||||
- mix coveralls --preload-modules
|
- mix coveralls --preload-modules
|
||||||
|
@ -104,7 +108,6 @@ unit-testing-rum:
|
||||||
RUM_ENABLED: "true"
|
RUM_ENABLED: "true"
|
||||||
script:
|
script:
|
||||||
- apt-get update && apt-get install -y libimage-exiftool-perl ffmpeg
|
- apt-get update && apt-get install -y libimage-exiftool-perl ffmpeg
|
||||||
- mix deps.get
|
|
||||||
- mix ecto.create
|
- mix ecto.create
|
||||||
- mix ecto.migrate
|
- mix ecto.migrate
|
||||||
- "mix ecto.migrate --migrations-path priv/repo/optional_migrations/rum_indexing/"
|
- "mix ecto.migrate --migrations-path priv/repo/optional_migrations/rum_indexing/"
|
||||||
|
@ -120,7 +123,6 @@ analysis:
|
||||||
stage: test
|
stage: test
|
||||||
cache: *testing_cache_policy
|
cache: *testing_cache_policy
|
||||||
script:
|
script:
|
||||||
- mix deps.get
|
|
||||||
- mix credo --strict --only=warnings,todo,fixme,consistency,readability
|
- mix credo --strict --only=warnings,todo,fixme,consistency,readability
|
||||||
|
|
||||||
docs-deploy:
|
docs-deploy:
|
||||||
|
|
33
CHANGELOG.md
33
CHANGELOG.md
|
@ -4,6 +4,35 @@ All notable changes to this project will be documented in this file.
|
||||||
|
|
||||||
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
|
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
|
||||||
|
|
||||||
|
## Unreleased
|
||||||
|
|
||||||
|
### Changed
|
||||||
|
|
||||||
|
- The `application` metadata returned with statuses is no longer hardcoded. Apps that want to display these details will now have valid data for new posts after this change.
|
||||||
|
- HTTPSecurityPlug now sends a response header to opt out of Google's FLoC (Federated Learning of Cohorts) targeted advertising.
|
||||||
|
|
||||||
|
### Added
|
||||||
|
|
||||||
|
- MRF (`FollowBotPolicy`): New MRF Policy which makes a designated local Bot account attempt to follow all users in public Notes received by your instance. Users who require approving follower requests or have #nobot in their profile are excluded.
|
||||||
|
- Return OAuth token `id` (primary key) in POST `/oauth/token`.
|
||||||
|
- `AnalyzeMetadata` upload filter for extracting attachment dimensions and generating blurhashes.
|
||||||
|
- Attachment dimensions and blurhashes are federated when available.
|
||||||
|
|
||||||
|
### Fixed
|
||||||
|
- Don't crash so hard when email settings are invalid.
|
||||||
|
- Checking activated Upload Filters for required commands.
|
||||||
|
|
||||||
|
## Unreleased (Patch)
|
||||||
|
|
||||||
|
### Fixed
|
||||||
|
|
||||||
|
- Try to save exported ConfigDB settings (migrate_from_db) in the system temp directory if default location is not writable.
|
||||||
|
- Uploading custom instance thumbnail via AdminAPI/AdminFE generated invalid URL to the image
|
||||||
|
- Applying ConcurrentLimiter settings via AdminAPI
|
||||||
|
- User login failures if their `notification_settings` were in a NULL state.
|
||||||
|
- Mix task `pleroma.user delete_activities` query transaction timeout is now :infinity
|
||||||
|
- Fixed some Markdown issues, including trailing slash in links.
|
||||||
|
|
||||||
## [2.3.0] - 2020-03-01
|
## [2.3.0] - 2020-03-01
|
||||||
|
|
||||||
### Security
|
### Security
|
||||||
|
@ -18,6 +47,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
|
||||||
|
|
||||||
- **Breaking**: Changed `mix pleroma.user toggle_confirmed` to `mix pleroma.user confirm`
|
- **Breaking**: Changed `mix pleroma.user toggle_confirmed` to `mix pleroma.user confirm`
|
||||||
- **Breaking**: Changed `mix pleroma.user toggle_activated` to `mix pleroma.user activate/deactivate`
|
- **Breaking**: Changed `mix pleroma.user toggle_activated` to `mix pleroma.user activate/deactivate`
|
||||||
|
- **Breaking:** NSFW hashtag is no longer added on sensitive posts
|
||||||
- Polls now always return a `voters_count`, even if they are single-choice.
|
- Polls now always return a `voters_count`, even if they are single-choice.
|
||||||
- Admin Emails: The ap id is used as the user link in emails now.
|
- Admin Emails: The ap id is used as the user link in emails now.
|
||||||
- Improved registration workflow for email confirmation and account approval modes.
|
- Improved registration workflow for email confirmation and account approval modes.
|
||||||
|
@ -44,6 +74,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
|
||||||
- Pleroma API: Reroute `/api/pleroma/*` to `/api/v1/pleroma/*`
|
- Pleroma API: Reroute `/api/pleroma/*` to `/api/v1/pleroma/*`
|
||||||
|
|
||||||
</details>
|
</details>
|
||||||
|
- Improved hashtag timeline performance (requires a background migration).
|
||||||
|
|
||||||
### Added
|
### Added
|
||||||
|
|
||||||
|
@ -67,6 +98,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
|
||||||
<details>
|
<details>
|
||||||
<summary>API Changes</summary>
|
<summary>API Changes</summary>
|
||||||
- Admin API: (`GET /api/pleroma/admin/users`) filter users by `unconfirmed` status and `actor_type`.
|
- Admin API: (`GET /api/pleroma/admin/users`) filter users by `unconfirmed` status and `actor_type`.
|
||||||
|
- Admin API: OpenAPI spec for the user-related operations
|
||||||
- Pleroma API: `GET /api/v2/pleroma/chats` added. It is exactly like `GET /api/v1/pleroma/chats` except supports pagination.
|
- Pleroma API: `GET /api/v2/pleroma/chats` added. It is exactly like `GET /api/v1/pleroma/chats` except supports pagination.
|
||||||
- Pleroma API: Add `idempotency_key` to the chat message entity that can be used for optimistic message sending.
|
- Pleroma API: Add `idempotency_key` to the chat message entity that can be used for optimistic message sending.
|
||||||
- Pleroma API: (`GET /api/v1/pleroma/federation_status`) Add a way to get a list of unreachable instances.
|
- Pleroma API: (`GET /api/v1/pleroma/federation_status`) Add a way to get a list of unreachable instances.
|
||||||
|
@ -498,7 +530,6 @@ switched to a new configuration mechanism, however it was not officially removed
|
||||||
- Static-FE: Fix remote posts not being sanitized
|
- Static-FE: Fix remote posts not being sanitized
|
||||||
|
|
||||||
### Fixed
|
### Fixed
|
||||||
=======
|
|
||||||
- Rate limiter crashes when there is no explicitly specified ip in the config
|
- Rate limiter crashes when there is no explicitly specified ip in the config
|
||||||
- 500 errors when no `Accept` header is present if Static-FE is enabled
|
- 500 errors when no `Accept` header is present if Static-FE is enabled
|
||||||
- Instance panel not being updated immediately due to wrong `Cache-Control` headers
|
- Instance panel not being updated immediately due to wrong `Cache-Control` headers
|
||||||
|
|
|
@ -391,6 +391,11 @@
|
||||||
federated_timeline_removal: [],
|
federated_timeline_removal: [],
|
||||||
replace: []
|
replace: []
|
||||||
|
|
||||||
|
config :pleroma, :mrf_hashtag,
|
||||||
|
sensitive: ["nsfw"],
|
||||||
|
reject: [],
|
||||||
|
federated_timeline_removal: []
|
||||||
|
|
||||||
config :pleroma, :mrf_subchain, match_actor: %{}
|
config :pleroma, :mrf_subchain, match_actor: %{}
|
||||||
|
|
||||||
config :pleroma, :mrf_activity_expiration, days: 365
|
config :pleroma, :mrf_activity_expiration, days: 365
|
||||||
|
@ -404,6 +409,8 @@
|
||||||
threshold: 604_800,
|
threshold: 604_800,
|
||||||
actions: [:delist, :strip_followers]
|
actions: [:delist, :strip_followers]
|
||||||
|
|
||||||
|
config :pleroma, :mrf_follow_bot, follower_nickname: nil
|
||||||
|
|
||||||
config :pleroma, :rich_media,
|
config :pleroma, :rich_media,
|
||||||
enabled: true,
|
enabled: true,
|
||||||
ignore_hosts: [],
|
ignore_hosts: [],
|
||||||
|
@ -654,6 +661,10 @@
|
||||||
|
|
||||||
config :pleroma, :database, rum_enabled: false
|
config :pleroma, :database, rum_enabled: false
|
||||||
|
|
||||||
|
config :pleroma, :features, improved_hashtag_timeline: :auto
|
||||||
|
|
||||||
|
config :pleroma, :populate_hashtags_table, fault_rate_allowance: 0.01
|
||||||
|
|
||||||
config :pleroma, :env, Mix.env()
|
config :pleroma, :env, Mix.env()
|
||||||
|
|
||||||
config :http_signatures,
|
config :http_signatures,
|
||||||
|
|
|
@ -459,6 +459,42 @@
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
|
%{
|
||||||
|
group: :pleroma,
|
||||||
|
key: :features,
|
||||||
|
type: :group,
|
||||||
|
description: "Customizable features",
|
||||||
|
children: [
|
||||||
|
%{
|
||||||
|
key: :improved_hashtag_timeline,
|
||||||
|
type: {:dropdown, :atom},
|
||||||
|
description:
|
||||||
|
"Setting to force toggle / force disable improved hashtags timeline. `:enabled` forces hashtags to be fetched from `hashtags` table for hashtags timeline. `:disabled` forces object-embedded hashtags to be used (slower). Keep it `:auto` for automatic behaviour (it is auto-set to `:enabled` [unless overridden] when HashtagsTableMigrator completes).",
|
||||||
|
suggestions: [:auto, :enabled, :disabled]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
%{
|
||||||
|
group: :pleroma,
|
||||||
|
key: :populate_hashtags_table,
|
||||||
|
type: :group,
|
||||||
|
description: "`populate_hashtags_table` background migration settings",
|
||||||
|
children: [
|
||||||
|
%{
|
||||||
|
key: :fault_rate_allowance,
|
||||||
|
type: :float,
|
||||||
|
description:
|
||||||
|
"Max accepted rate of objects that failed in the migration. Any value from 0.0 which tolerates no errors to 1.0 which will enable the feature even if hashtags transfer failed for all records.",
|
||||||
|
suggestions: [0.01]
|
||||||
|
},
|
||||||
|
%{
|
||||||
|
key: :sleep_interval_ms,
|
||||||
|
type: :integer,
|
||||||
|
description:
|
||||||
|
"Sleep interval between each chunk of processed records in order to decrease the load on the system (defaults to 0 and should be keep default on most instances)."
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
%{
|
%{
|
||||||
group: :pleroma,
|
group: :pleroma,
|
||||||
key: :instance,
|
key: :instance,
|
||||||
|
@ -2906,6 +2942,23 @@
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
|
%{
|
||||||
|
group: :pleroma,
|
||||||
|
key: :mrf_follow_bot,
|
||||||
|
tab: :mrf,
|
||||||
|
related_policy: "Pleroma.Web.ActivityPub.MRF.FollowBotPolicy",
|
||||||
|
label: "MRF FollowBot Policy",
|
||||||
|
type: :group,
|
||||||
|
description: "Automatically follows newly discovered accounts.",
|
||||||
|
children: [
|
||||||
|
%{
|
||||||
|
key: :follower_nickname,
|
||||||
|
type: :string,
|
||||||
|
description: "The name of the bot account to use for following newly discovered users.",
|
||||||
|
suggestions: ["followbot"]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
%{
|
%{
|
||||||
group: :pleroma,
|
group: :pleroma,
|
||||||
key: :modules,
|
key: :modules,
|
||||||
|
|
|
@ -63,7 +63,12 @@
|
||||||
|
|
||||||
# Finally import the config/prod.secret.exs
|
# Finally import the config/prod.secret.exs
|
||||||
# which should be versioned separately.
|
# which should be versioned separately.
|
||||||
|
if File.exists?("./config/prod.secret.exs") do
|
||||||
import_config "prod.secret.exs"
|
import_config "prod.secret.exs"
|
||||||
|
else
|
||||||
|
"`config/prod.secret.exs` not found. You may want to create one by running `mix pleroma.instance gen`"
|
||||||
|
|> IO.warn([])
|
||||||
|
end
|
||||||
|
|
||||||
if File.exists?("./config/prod.exported_from_db.secret.exs"),
|
if File.exists?("./config/prod.exported_from_db.secret.exs"),
|
||||||
do: import_config("prod.exported_from_db.secret.exs")
|
do: import_config("prod.exported_from_db.secret.exs")
|
||||||
|
|
|
@ -32,16 +32,20 @@
|
||||||
config :pleroma, configurable_from_database: false
|
config :pleroma, configurable_from_database: false
|
||||||
```
|
```
|
||||||
|
|
||||||
To delete transferred settings from database optional flag `-d` can be used. `<env>` is `prod` by default.
|
Options:
|
||||||
|
|
||||||
|
- `<path>` - where to save migrated config. E.g. `--path=/tmp`. If file saved into non standart folder, you must manually copy file into directory where Pleroma can read it. For OTP install path will be `PLEROMA_CONFIG_PATH` or `/etc/pleroma`. For installation from source - `config` directory in the pleroma folder.
|
||||||
|
- `<env>` - environment, for which is migrated config. By default is `prod`.
|
||||||
|
- To delete transferred settings from database optional flag `-d` can be used
|
||||||
|
|
||||||
=== "OTP"
|
=== "OTP"
|
||||||
```sh
|
```sh
|
||||||
./bin/pleroma_ctl config migrate_from_db [--env=<env>] [-d]
|
./bin/pleroma_ctl config migrate_from_db [--env=<env>] [-d] [--path=<path>]
|
||||||
```
|
```
|
||||||
|
|
||||||
=== "From Source"
|
=== "From Source"
|
||||||
```sh
|
```sh
|
||||||
mix pleroma.config migrate_from_db [--env=<env>] [-d]
|
mix pleroma.config migrate_from_db [--env=<env>] [-d] [--path=<path>]
|
||||||
```
|
```
|
||||||
|
|
||||||
## Dump all of the config settings defined in the database
|
## Dump all of the config settings defined in the database
|
||||||
|
|
|
@ -65,6 +65,13 @@ To add configuration to your config file, you can copy it from the base config.
|
||||||
* `show_reactions`: Let favourites and emoji reactions be viewed through the API (default: `true`).
|
* `show_reactions`: Let favourites and emoji reactions be viewed through the API (default: `true`).
|
||||||
* `password_reset_token_validity`: The time after which reset tokens aren't accepted anymore, in seconds (default: one day).
|
* `password_reset_token_validity`: The time after which reset tokens aren't accepted anymore, in seconds (default: one day).
|
||||||
|
|
||||||
|
## :database
|
||||||
|
* `improved_hashtag_timeline`: Setting to force toggle / force disable improved hashtags timeline. `:enabled` forces hashtags to be fetched from `hashtags` table for hashtags timeline. `:disabled` forces object-embedded hashtags to be used (slower). Keep it `:auto` for automatic behaviour (it is auto-set to `:enabled` [unless overridden] when HashtagsTableMigrator completes).
|
||||||
|
|
||||||
|
## Background migrations
|
||||||
|
* `populate_hashtags_table/sleep_interval_ms`: Sleep interval between each chunk of processed records in order to decrease the load on the system (defaults to 0 and should be keep default on most instances).
|
||||||
|
* `populate_hashtags_table/fault_rate_allowance`: Max rate of failed objects to actually processed objects in order to enable the feature (any value from 0.0 which tolerates no errors to 1.0 which will enable the feature even if hashtags transfer failed for all records).
|
||||||
|
|
||||||
## Welcome
|
## Welcome
|
||||||
* `direct_message`: - welcome message sent as a direct message.
|
* `direct_message`: - welcome message sent as a direct message.
|
||||||
* `enabled`: Enables the send a direct message to a newly registered user. Defaults to `false`.
|
* `enabled`: Enables the send a direct message to a newly registered user. Defaults to `false`.
|
||||||
|
@ -117,6 +124,7 @@ To add configuration to your config file, you can copy it from the base config.
|
||||||
* `Pleroma.Web.ActivityPub.MRF.ObjectAgePolicy`: Rejects or delists posts based on their age when received. (See [`:mrf_object_age`](#mrf_object_age)).
|
* `Pleroma.Web.ActivityPub.MRF.ObjectAgePolicy`: Rejects or delists posts based on their age when received. (See [`:mrf_object_age`](#mrf_object_age)).
|
||||||
* `Pleroma.Web.ActivityPub.MRF.ActivityExpirationPolicy`: Sets a default expiration on all posts made by users of the local instance. Requires `Pleroma.Workers.PurgeExpiredActivity` to be enabled for processing the scheduled delections.
|
* `Pleroma.Web.ActivityPub.MRF.ActivityExpirationPolicy`: Sets a default expiration on all posts made by users of the local instance. Requires `Pleroma.Workers.PurgeExpiredActivity` to be enabled for processing the scheduled delections.
|
||||||
* `Pleroma.Web.ActivityPub.MRF.ForceBotUnlistedPolicy`: Makes all bot posts to disappear from public timelines.
|
* `Pleroma.Web.ActivityPub.MRF.ForceBotUnlistedPolicy`: Makes all bot posts to disappear from public timelines.
|
||||||
|
* `Pleroma.Web.ActivityPub.MRF.FollowBotPolicy`: Automatically follows newly discovered users from the specified bot account. Local accounts, locked accounts, and users with "#nobot" in their bio are respected and excluded from being followed.
|
||||||
* `transparency`: Make the content of your Message Rewrite Facility settings public (via nodeinfo).
|
* `transparency`: Make the content of your Message Rewrite Facility settings public (via nodeinfo).
|
||||||
* `transparency_exclusions`: Exclude specific instance names from MRF transparency. The use of the exclusions feature will be disclosed in nodeinfo as a boolean value.
|
* `transparency_exclusions`: Exclude specific instance names from MRF transparency. The use of the exclusions feature will be disclosed in nodeinfo as a boolean value.
|
||||||
|
|
||||||
|
@ -203,6 +211,21 @@ config :pleroma, :mrf_user_allowlist, %{
|
||||||
|
|
||||||
* `days`: Default global expiration time for all local Create activities (in days)
|
* `days`: Default global expiration time for all local Create activities (in days)
|
||||||
|
|
||||||
|
#### :mrf_hashtag
|
||||||
|
|
||||||
|
* `sensitive`: List of hashtags to mark activities as sensitive (default: `nsfw`)
|
||||||
|
* `federated_timeline_removal`: List of hashtags to remove activities from the federated timeline (aka TWNK)
|
||||||
|
* `reject`: List of hashtags to reject activities from
|
||||||
|
|
||||||
|
Notes:
|
||||||
|
- The hashtags in the configuration do not have a leading `#`.
|
||||||
|
- This MRF Policy is always enabled, if you want to disable it you have to set empty lists
|
||||||
|
|
||||||
|
#### :mrf_follow_bot
|
||||||
|
|
||||||
|
* `follower_nickname`: The name of the bot account to use for following newly discovered users. Using `followbot` or similar is strongly suggested.
|
||||||
|
|
||||||
|
|
||||||
### :activitypub
|
### :activitypub
|
||||||
* `unfollow_blocked`: Whether blocks result in people getting unfollowed
|
* `unfollow_blocked`: Whether blocks result in people getting unfollowed
|
||||||
* `outgoing_blocks`: Whether to federate blocks to other instances
|
* `outgoing_blocks`: Whether to federate blocks to other instances
|
||||||
|
|
|
@ -38,6 +38,7 @@ Has these additional fields under the `pleroma` object:
|
||||||
- `thread_muted`: true if the thread the post belongs to is muted
|
- `thread_muted`: true if the thread the post belongs to is muted
|
||||||
- `emoji_reactions`: A list with emoji / reaction maps. The format is `{name: "☕", count: 1, me: true}`. Contains no information about the reacting users, for that use the `/statuses/:id/reactions` endpoint.
|
- `emoji_reactions`: A list with emoji / reaction maps. The format is `{name: "☕", count: 1, me: true}`. Contains no information about the reacting users, for that use the `/statuses/:id/reactions` endpoint.
|
||||||
- `parent_visible`: If the parent of this post is visible to the user or not.
|
- `parent_visible`: If the parent of this post is visible to the user or not.
|
||||||
|
- `pinned_at`: a datetime (iso8601) when status was pinned, `null` otherwise.
|
||||||
|
|
||||||
## Scheduled statuses
|
## Scheduled statuses
|
||||||
|
|
||||||
|
@ -255,9 +256,29 @@ This information is returned in the `/api/v1/accounts/verify_credentials` endpoi
|
||||||
|
|
||||||
*Pleroma supports refreshing tokens.*
|
*Pleroma supports refreshing tokens.*
|
||||||
|
|
||||||
`POST /oauth/token`
|
### POST `/oauth/token`
|
||||||
|
|
||||||
Post here request with `grant_type=refresh_token` to obtain new access token. Returns an access token.
|
You can obtain access tokens for a user in a few additional ways.
|
||||||
|
|
||||||
|
#### Refreshing a token
|
||||||
|
|
||||||
|
To obtain a new access token from a refresh token, pass `grant_type=refresh_token` with the following extra parameters:
|
||||||
|
|
||||||
|
- `refresh_token`: The refresh token.
|
||||||
|
|
||||||
|
#### Getting a token with a password
|
||||||
|
|
||||||
|
To obtain a token from a user's password, pass `grant_type=password` with the following extra parameters:
|
||||||
|
|
||||||
|
- `username`: Username to authenticate.
|
||||||
|
- `password`: The user's password.
|
||||||
|
|
||||||
|
#### Response body
|
||||||
|
|
||||||
|
Additional fields are returned in the response:
|
||||||
|
|
||||||
|
- `id`: The primary key of this token in Pleroma's database.
|
||||||
|
- `me` (user tokens only): The ActivityPub ID of the user who owns the token.
|
||||||
|
|
||||||
## Account Registration
|
## Account Registration
|
||||||
|
|
||||||
|
|
|
@ -20,7 +20,7 @@ The default front-end used by Pleroma is Pleroma-FE. You can find more informati
|
||||||
|
|
||||||
### Mastodon interface
|
### Mastodon interface
|
||||||
If the Pleroma interface isn't your thing, or you're just trying something new but you want to keep using the familiar Mastodon interface, we got that too!
|
If the Pleroma interface isn't your thing, or you're just trying something new but you want to keep using the familiar Mastodon interface, we got that too!
|
||||||
Just add a "/web" after your instance url (e.g. <https://pleroma.soycaf.com/web>) and you'll end on the Mastodon web interface, but with a Pleroma backend! MAGIC!
|
Just add a "/web" after your instance url (e.g. <https://pleroma.soykaf.com/web>) and you'll end on the Mastodon web interface, but with a Pleroma backend! MAGIC!
|
||||||
The Mastodon interface is from the Glitch-soc fork. For more information on the Mastodon interface you can check the [Mastodon](https://docs.joinmastodon.org/) and [Glitch-soc](https://glitch-soc.github.io/docs/) documentation.
|
The Mastodon interface is from the Glitch-soc fork. For more information on the Mastodon interface you can check the [Mastodon](https://docs.joinmastodon.org/) and [Glitch-soc](https://glitch-soc.github.io/docs/) documentation.
|
||||||
|
|
||||||
Remember, what you see is only the frontend part of Mastodon, the backend is still Pleroma.
|
Remember, what you see is only the frontend part of Mastodon, the backend is still Pleroma.
|
||||||
|
|
|
@ -117,7 +117,7 @@ cd /opt/pleroma
|
||||||
sudo -Hu pleroma mix deps.get
|
sudo -Hu pleroma mix deps.get
|
||||||
```
|
```
|
||||||
|
|
||||||
* Generate the configuration: `sudo -Hu pleroma mix pleroma.instance gen`
|
* Generate the configuration: `sudo -Hu pleroma MIX_ENV=prod mix pleroma.instance gen`
|
||||||
* Answer with `yes` if it asks you to install `rebar3`.
|
* Answer with `yes` if it asks you to install `rebar3`.
|
||||||
* This may take some time, because parts of pleroma get compiled first.
|
* This may take some time, because parts of pleroma get compiled first.
|
||||||
* After that it will ask you a few questions about your instance and generates a configuration file in `config/generated_config.exs`.
|
* After that it will ask you a few questions about your instance and generates a configuration file in `config/generated_config.exs`.
|
||||||
|
|
|
@ -92,7 +92,7 @@ cd /opt/pleroma
|
||||||
sudo -Hu pleroma mix deps.get
|
sudo -Hu pleroma mix deps.get
|
||||||
```
|
```
|
||||||
|
|
||||||
* Generate the configuration: `sudo -Hu pleroma mix pleroma.instance gen`
|
* Generate the configuration: `sudo -Hu pleroma MIX_ENV=prod mix pleroma.instance gen`
|
||||||
* Answer with `yes` if it asks you to install `rebar3`.
|
* Answer with `yes` if it asks you to install `rebar3`.
|
||||||
* This may take some time, because parts of pleroma get compiled first.
|
* This may take some time, because parts of pleroma get compiled first.
|
||||||
* After that it will ask you a few questions about your instance and generates a configuration file in `config/generated_config.exs`.
|
* After that it will ask you a few questions about your instance and generates a configuration file in `config/generated_config.exs`.
|
||||||
|
|
|
@ -90,7 +90,7 @@ cd /opt/pleroma
|
||||||
sudo -Hu pleroma mix deps.get
|
sudo -Hu pleroma mix deps.get
|
||||||
```
|
```
|
||||||
|
|
||||||
* Generate the configuration: `sudo -Hu pleroma mix pleroma.instance gen`
|
* Generate the configuration: `sudo -Hu pleroma MIX_ENV=prod mix pleroma.instance gen`
|
||||||
* Answer with `yes` if it asks you to install `rebar3`.
|
* Answer with `yes` if it asks you to install `rebar3`.
|
||||||
* This may take some time, because parts of pleroma get compiled first.
|
* This may take some time, because parts of pleroma get compiled first.
|
||||||
* After that it will ask you a few questions about your instance and generates a configuration file in `config/generated_config.exs`.
|
* After that it will ask you a few questions about your instance and generates a configuration file in `config/generated_config.exs`.
|
||||||
|
|
|
@ -89,7 +89,7 @@ sudo -Hu pleroma mix deps.get
|
||||||
|
|
||||||
* コンフィギュレーションを生成します。
|
* コンフィギュレーションを生成します。
|
||||||
```
|
```
|
||||||
sudo -Hu pleroma mix pleroma.instance gen
|
sudo -Hu pleroma MIX_ENV=prod mix pleroma.instance gen
|
||||||
```
|
```
|
||||||
* rebar3をインストールしてもよいか聞かれたら、yesを入力してください。
|
* rebar3をインストールしてもよいか聞かれたら、yesを入力してください。
|
||||||
* このときにpleromaの一部がコンパイルされるため、この処理には時間がかかります。
|
* このときにpleromaの一部がコンパイルされるため、この処理には時間がかかります。
|
||||||
|
@ -103,7 +103,7 @@ sudo -Hu pleroma mv config/{generated_config.exs,prod.secret.exs}
|
||||||
|
|
||||||
* 先程のコマンドで、すでに `config/setup_db.psql` というファイルが作られています。このファイルをもとに、データベースを作成します。
|
* 先程のコマンドで、すでに `config/setup_db.psql` というファイルが作られています。このファイルをもとに、データベースを作成します。
|
||||||
```
|
```
|
||||||
sudo -Hu pleroma mix pleroma.instance gen
|
sudo -Hu pleroma MIX_ENV=prod mix pleroma.instance gen
|
||||||
```
|
```
|
||||||
|
|
||||||
* そして、データベースのマイグレーションを実行します。
|
* そして、データベースのマイグレーションを実行します。
|
||||||
|
|
|
@ -54,7 +54,7 @@ Configure Pleroma. Note that you need a domain name at this point:
|
||||||
```
|
```
|
||||||
$ cd /home/pleroma/pleroma
|
$ cd /home/pleroma/pleroma
|
||||||
$ mix deps.get # Enter "y" when asked to install Hex
|
$ mix deps.get # Enter "y" when asked to install Hex
|
||||||
$ mix pleroma.instance gen # You will be asked a few questions here.
|
$ MIX_ENV=prod mix pleroma.instance gen # You will be asked a few questions here.
|
||||||
$ cp config/generated_config.exs config/prod.secret.exs
|
$ cp config/generated_config.exs config/prod.secret.exs
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
|
@ -135,7 +135,7 @@ pleroma$ mix deps.get
|
||||||
* Generate the configuration:
|
* Generate the configuration:
|
||||||
|
|
||||||
```shell
|
```shell
|
||||||
pleroma$ mix pleroma.instance gen
|
pleroma$ MIX_ENV=prod mix pleroma.instance gen
|
||||||
```
|
```
|
||||||
|
|
||||||
* Answer with `yes` if it asks you to install `rebar3`.
|
* Answer with `yes` if it asks you to install `rebar3`.
|
||||||
|
|
|
@ -71,7 +71,7 @@ Configure Pleroma. Note that you need a domain name at this point:
|
||||||
```
|
```
|
||||||
$ cd /home/pleroma/pleroma
|
$ cd /home/pleroma/pleroma
|
||||||
$ mix deps.get
|
$ mix deps.get
|
||||||
$ mix pleroma.instance gen # You will be asked a few questions here.
|
$ MIX_ENV=prod mix pleroma.instance gen # You will be asked a few questions here.
|
||||||
```
|
```
|
||||||
|
|
||||||
Since Postgres is configured, we can now initialize the database. There should
|
Since Postgres is configured, we can now initialize the database. There should
|
||||||
|
|
|
@ -239,7 +239,7 @@ Enter a shell as \_pleroma (as root `su _pleroma -`) and enter pleroma's install
|
||||||
Then follow the main installation guide:
|
Then follow the main installation guide:
|
||||||
|
|
||||||
* run `mix deps.get`
|
* run `mix deps.get`
|
||||||
* run `mix pleroma.instance gen` and enter your instance's information when asked
|
* run `MIX_ENV=prod mix pleroma.instance gen` and enter your instance's information when asked
|
||||||
* copy config/generated\_config.exs to config/prod.secret.exs. The default values should be sufficient but you should edit it and check that everything seems OK.
|
* copy config/generated\_config.exs to config/prod.secret.exs. The default values should be sufficient but you should edit it and check that everything seems OK.
|
||||||
* exit your current shell back to a root one and run `psql -U postgres -f /home/_pleroma/pleroma/config/setup_db.psql` to setup the database.
|
* exit your current shell back to a root one and run `psql -U postgres -f /home/_pleroma/pleroma/config/setup_db.psql` to setup the database.
|
||||||
* return to a \_pleroma shell into pleroma's installation directory (`su _pleroma -;cd ~/pleroma`) and run `MIX_ENV=prod mix ecto.migrate`
|
* return to a \_pleroma shell into pleroma's installation directory (`su _pleroma -;cd ~/pleroma`) and run `MIX_ENV=prod mix ecto.migrate`
|
||||||
|
|
|
@ -290,7 +290,7 @@ nginx -t
|
||||||
|
|
||||||
## Create your first user and set as admin
|
## Create your first user and set as admin
|
||||||
```sh
|
```sh
|
||||||
cd /opt/pleroma/bin
|
cd /opt/pleroma
|
||||||
su pleroma -s $SHELL -lc "./bin/pleroma_ctl user new joeuser joeuser@sld.tld --admin"
|
su pleroma -s $SHELL -lc "./bin/pleroma_ctl user new joeuser joeuser@sld.tld --admin"
|
||||||
```
|
```
|
||||||
This will create an account withe the username of 'joeuser' with the email address of joeuser@sld.tld, and set that user's account as an admin. This will result in a link that you can paste into the browser, which logs you in and enables you to set the password.
|
This will create an account withe the username of 'joeuser' with the email address of joeuser@sld.tld, and set that user's account as an admin. This will result in a link that you can paste into the browser, which logs you in and enables you to set the password.
|
||||||
|
|
|
@ -27,7 +27,7 @@ def run(["migrate_from_db" | options]) do
|
||||||
|
|
||||||
{opts, _} =
|
{opts, _} =
|
||||||
OptionParser.parse!(options,
|
OptionParser.parse!(options,
|
||||||
strict: [env: :string, delete: :boolean],
|
strict: [env: :string, delete: :boolean, path: :string],
|
||||||
aliases: [d: :delete]
|
aliases: [d: :delete]
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -259,18 +259,43 @@ defp create(group, settings) do
|
||||||
defp migrate_from_db(opts) do
|
defp migrate_from_db(opts) do
|
||||||
env = opts[:env] || Pleroma.Config.get(:env)
|
env = opts[:env] || Pleroma.Config.get(:env)
|
||||||
|
|
||||||
|
filename = "#{env}.exported_from_db.secret.exs"
|
||||||
|
|
||||||
config_path =
|
config_path =
|
||||||
if Pleroma.Config.get(:release) do
|
cond do
|
||||||
|
opts[:path] ->
|
||||||
|
opts[:path]
|
||||||
|
|
||||||
|
Pleroma.Config.get(:release) ->
|
||||||
:config_path
|
:config_path
|
||||||
|> Pleroma.Config.get()
|
|> Pleroma.Config.get()
|
||||||
|> Path.dirname()
|
|> Path.dirname()
|
||||||
else
|
|
||||||
|
true ->
|
||||||
"config"
|
"config"
|
||||||
end
|
end
|
||||||
|> Path.join("#{env}.exported_from_db.secret.exs")
|
|> Path.join(filename)
|
||||||
|
|
||||||
file = File.open!(config_path, [:write, :utf8])
|
with {:ok, file} <- File.open(config_path, [:write, :utf8]) do
|
||||||
|
write_config(file, config_path, opts)
|
||||||
|
shell_info("Database configuration settings have been exported to #{config_path}")
|
||||||
|
else
|
||||||
|
_ ->
|
||||||
|
shell_error("Impossible to save settings to this directory #{Path.dirname(config_path)}")
|
||||||
|
tmp_config_path = Path.join(System.tmp_dir!(), filename)
|
||||||
|
file = File.open!(tmp_config_path)
|
||||||
|
|
||||||
|
shell_info(
|
||||||
|
"Saving database configuration settings to #{tmp_config_path}. Copy it to the #{
|
||||||
|
Path.dirname(config_path)
|
||||||
|
} manually."
|
||||||
|
)
|
||||||
|
|
||||||
|
write_config(file, tmp_config_path, opts)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
defp write_config(file, path, opts) do
|
||||||
IO.write(file, config_header())
|
IO.write(file, config_header())
|
||||||
|
|
||||||
ConfigDB
|
ConfigDB
|
||||||
|
@ -278,11 +303,7 @@ defp migrate_from_db(opts) do
|
||||||
|> Enum.each(&write_and_delete(&1, file, opts[:delete]))
|
|> Enum.each(&write_and_delete(&1, file, opts[:delete]))
|
||||||
|
|
||||||
:ok = File.close(file)
|
:ok = File.close(file)
|
||||||
System.cmd("mix", ["format", config_path])
|
System.cmd("mix", ["format", path])
|
||||||
|
|
||||||
shell_info(
|
|
||||||
"Database configuration settings have been exported to config/#{env}.exported_from_db.secret.exs"
|
|
||||||
)
|
|
||||||
end
|
end
|
||||||
|
|
||||||
if Code.ensure_loaded?(Config.Reader) do
|
if Code.ensure_loaded?(Config.Reader) do
|
||||||
|
|
|
@ -8,10 +8,13 @@ defmodule Mix.Tasks.Pleroma.Database do
|
||||||
alias Pleroma.Object
|
alias Pleroma.Object
|
||||||
alias Pleroma.Repo
|
alias Pleroma.Repo
|
||||||
alias Pleroma.User
|
alias Pleroma.User
|
||||||
|
|
||||||
require Logger
|
require Logger
|
||||||
require Pleroma.Constants
|
require Pleroma.Constants
|
||||||
|
|
||||||
import Ecto.Query
|
import Ecto.Query
|
||||||
import Mix.Pleroma
|
import Mix.Pleroma
|
||||||
|
|
||||||
use Mix.Task
|
use Mix.Task
|
||||||
|
|
||||||
@shortdoc "A collection of database related tasks"
|
@shortdoc "A collection of database related tasks"
|
||||||
|
@ -214,4 +217,32 @@ def run(["set_text_search_config", tsconfig]) do
|
||||||
shell_info('Done.')
|
shell_info('Done.')
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
# Rolls back a specific migration (leaving subsequent migrations applied).
|
||||||
|
# WARNING: imposes a risk of unrecoverable data loss — proceed at your own responsibility.
|
||||||
|
# Based on https://stackoverflow.com/a/53825840
|
||||||
|
def run(["rollback", version]) do
|
||||||
|
prompt = "SEVERE WARNING: this operation may result in unrecoverable data loss. Continue?"
|
||||||
|
|
||||||
|
if shell_prompt(prompt, "n") in ~w(Yn Y y) do
|
||||||
|
{_, result, _} =
|
||||||
|
Ecto.Migrator.with_repo(Pleroma.Repo, fn repo ->
|
||||||
|
version = String.to_integer(version)
|
||||||
|
re = ~r/^#{version}_.*\.exs/
|
||||||
|
path = Ecto.Migrator.migrations_path(repo)
|
||||||
|
|
||||||
|
with {_, "" <> file} <- {:find, Enum.find(File.ls!(path), &String.match?(&1, re))},
|
||||||
|
{_, [{mod, _} | _]} <- {:compile, Code.compile_file(Path.join(path, file))},
|
||||||
|
{_, :ok} <- {:rollback, Ecto.Migrator.down(repo, version, mod)} do
|
||||||
|
{:ok, "Reversed migration: #{file}"}
|
||||||
|
else
|
||||||
|
{:find, _} -> {:error, "No migration found with version prefix: #{version}"}
|
||||||
|
{:compile, e} -> {:error, "Problem compiling migration module: #{inspect(e)}"}
|
||||||
|
{:rollback, e} -> {:error, "Problem reversing migration: #{inspect(e)}"}
|
||||||
|
end
|
||||||
|
end)
|
||||||
|
|
||||||
|
shell_info(inspect(result))
|
||||||
|
end
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -113,6 +113,7 @@ def with_preloaded_bookmark(query, %User{} = user) do
|
||||||
from([a] in query,
|
from([a] in query,
|
||||||
left_join: b in Bookmark,
|
left_join: b in Bookmark,
|
||||||
on: b.user_id == ^user.id and b.activity_id == a.id,
|
on: b.user_id == ^user.id and b.activity_id == a.id,
|
||||||
|
as: :bookmark,
|
||||||
preload: [bookmark: b]
|
preload: [bookmark: b]
|
||||||
)
|
)
|
||||||
end
|
end
|
||||||
|
@ -123,6 +124,7 @@ def with_preloaded_report_notes(query) do
|
||||||
from([a] in query,
|
from([a] in query,
|
||||||
left_join: r in ReportNote,
|
left_join: r in ReportNote,
|
||||||
on: a.id == r.activity_id,
|
on: a.id == r.activity_id,
|
||||||
|
as: :report_note,
|
||||||
preload: [report_notes: r]
|
preload: [report_notes: r]
|
||||||
)
|
)
|
||||||
end
|
end
|
||||||
|
@ -182,38 +184,46 @@ def get_by_ap_id_with_object(ap_id) do
|
||||||
|> Repo.one()
|
|> Repo.one()
|
||||||
end
|
end
|
||||||
|
|
||||||
@spec get_by_id(String.t()) :: Activity.t() | nil
|
@doc """
|
||||||
def get_by_id(id) do
|
Gets activity by ID, doesn't load activities from deactivated actors by default.
|
||||||
case FlakeId.flake_id?(id) do
|
"""
|
||||||
true ->
|
@spec get_by_id(String.t(), keyword()) :: t() | nil
|
||||||
Activity
|
def get_by_id(id, opts \\ [filter: [:restrict_deactivated]]), do: get_by_id_with_opts(id, opts)
|
||||||
|> where([a], a.id == ^id)
|
|
||||||
|> restrict_deactivated_users()
|
|
||||||
|> Repo.one()
|
|
||||||
|
|
||||||
_ ->
|
@spec get_by_id_with_user_actor(String.t()) :: t() | nil
|
||||||
nil
|
def get_by_id_with_user_actor(id), do: get_by_id_with_opts(id, preload: [:user_actor])
|
||||||
end
|
|
||||||
|
@spec get_by_id_with_object(String.t()) :: t() | nil
|
||||||
|
def get_by_id_with_object(id), do: get_by_id_with_opts(id, preload: [:object])
|
||||||
|
|
||||||
|
defp get_by_id_with_opts(id, opts) do
|
||||||
|
if FlakeId.flake_id?(id) do
|
||||||
|
query = Queries.by_id(id)
|
||||||
|
|
||||||
|
with_filters_query =
|
||||||
|
if is_list(opts[:filter]) do
|
||||||
|
Enum.reduce(opts[:filter], query, fn
|
||||||
|
{:type, type}, acc -> Queries.by_type(acc, type)
|
||||||
|
:restrict_deactivated, acc -> restrict_deactivated_users(acc)
|
||||||
|
_, acc -> acc
|
||||||
|
end)
|
||||||
|
else
|
||||||
|
query
|
||||||
end
|
end
|
||||||
|
|
||||||
def get_by_id_with_user_actor(id) do
|
with_preloads_query =
|
||||||
case FlakeId.flake_id?(id) do
|
if is_list(opts[:preload]) do
|
||||||
true ->
|
Enum.reduce(opts[:preload], with_filters_query, fn
|
||||||
Activity
|
:user_actor, acc -> with_preloaded_user_actor(acc)
|
||||||
|> where([a], a.id == ^id)
|
:object, acc -> with_preloaded_object(acc)
|
||||||
|> with_preloaded_user_actor()
|
_, acc -> acc
|
||||||
|> Repo.one()
|
end)
|
||||||
|
else
|
||||||
_ ->
|
with_filters_query
|
||||||
nil
|
|
||||||
end
|
|
||||||
end
|
end
|
||||||
|
|
||||||
def get_by_id_with_object(id) do
|
Repo.one(with_preloads_query)
|
||||||
Activity
|
end
|
||||||
|> where(id: ^id)
|
|
||||||
|> with_preloaded_object()
|
|
||||||
|> Repo.one()
|
|
||||||
end
|
end
|
||||||
|
|
||||||
def all_by_ids_with_object(ids) do
|
def all_by_ids_with_object(ids) do
|
||||||
|
@ -267,6 +277,11 @@ def get_create_by_object_ap_id_with_object(ap_id) when is_binary(ap_id) do
|
||||||
|
|
||||||
def get_create_by_object_ap_id_with_object(_), do: nil
|
def get_create_by_object_ap_id_with_object(_), do: nil
|
||||||
|
|
||||||
|
@spec create_by_id_with_object(String.t()) :: t() | nil
|
||||||
|
def create_by_id_with_object(id) do
|
||||||
|
get_by_id_with_opts(id, preload: [:object], filter: [type: "Create"])
|
||||||
|
end
|
||||||
|
|
||||||
defp get_in_reply_to_activity_from_object(%Object{data: %{"inReplyTo" => ap_id}}) do
|
defp get_in_reply_to_activity_from_object(%Object{data: %{"inReplyTo" => ap_id}}) do
|
||||||
get_create_by_object_ap_id_with_object(ap_id)
|
get_create_by_object_ap_id_with_object(ap_id)
|
||||||
end
|
end
|
||||||
|
@ -366,12 +381,6 @@ def direct_conversation_id(activity, for_user) do
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
@spec pinned_by_actor?(Activity.t()) :: boolean()
|
|
||||||
def pinned_by_actor?(%Activity{} = activity) do
|
|
||||||
actor = user_actor(activity)
|
|
||||||
activity.id in actor.pinned_activities
|
|
||||||
end
|
|
||||||
|
|
||||||
@spec get_by_object_ap_id_with_object(String.t()) :: t() | nil
|
@spec get_by_object_ap_id_with_object(String.t()) :: t() | nil
|
||||||
def get_by_object_ap_id_with_object(ap_id) when is_binary(ap_id) do
|
def get_by_object_ap_id_with_object(ap_id) when is_binary(ap_id) do
|
||||||
ap_id
|
ap_id
|
||||||
|
@ -382,4 +391,13 @@ def get_by_object_ap_id_with_object(ap_id) when is_binary(ap_id) do
|
||||||
end
|
end
|
||||||
|
|
||||||
def get_by_object_ap_id_with_object(_), do: nil
|
def get_by_object_ap_id_with_object(_), do: nil
|
||||||
|
|
||||||
|
@spec add_by_params_query(String.t(), String.t(), String.t()) :: Ecto.Query.t()
|
||||||
|
def add_by_params_query(object_id, actor, target) do
|
||||||
|
object_id
|
||||||
|
|> Queries.by_object_id()
|
||||||
|
|> Queries.by_type("Add")
|
||||||
|
|> Queries.by_actor(actor)
|
||||||
|
|> where([a], fragment("?->>'target' = ?", a.data, ^target))
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -48,14 +48,12 @@ defp item_creation_tags(tags, _, _) do
|
||||||
tags
|
tags
|
||||||
end
|
end
|
||||||
|
|
||||||
defp hashtags_to_topics(%{data: %{"tag" => tags}}) do
|
defp hashtags_to_topics(object) do
|
||||||
tags
|
object
|
||||||
|> Enum.filter(&is_bitstring(&1))
|
|> Object.hashtags()
|
||||||
|> Enum.map(fn tag -> "hashtag:" <> tag end)
|
|> Enum.map(fn hashtag -> "hashtag:" <> hashtag end)
|
||||||
end
|
end
|
||||||
|
|
||||||
defp hashtags_to_topics(_), do: []
|
|
||||||
|
|
||||||
defp remote_topics(%{local: true}), do: []
|
defp remote_topics(%{local: true}), do: []
|
||||||
|
|
||||||
defp remote_topics(%{actor: actor}) when is_binary(actor),
|
defp remote_topics(%{actor: actor}) when is_binary(actor),
|
||||||
|
|
|
@ -14,6 +14,11 @@ defmodule Pleroma.Activity.Queries do
|
||||||
alias Pleroma.Activity
|
alias Pleroma.Activity
|
||||||
alias Pleroma.User
|
alias Pleroma.User
|
||||||
|
|
||||||
|
@spec by_id(query(), String.t()) :: query()
|
||||||
|
def by_id(query \\ Activity, id) do
|
||||||
|
from(a in query, where: a.id == ^id)
|
||||||
|
end
|
||||||
|
|
||||||
@spec by_ap_id(query, String.t()) :: query
|
@spec by_ap_id(query, String.t()) :: query
|
||||||
def by_ap_id(query \\ Activity, ap_id) do
|
def by_ap_id(query \\ Activity, ap_id) do
|
||||||
from(
|
from(
|
||||||
|
|
|
@ -103,9 +103,7 @@ def start(_type, _args) do
|
||||||
task_children(@mix_env) ++
|
task_children(@mix_env) ++
|
||||||
dont_run_in_test(@mix_env) ++
|
dont_run_in_test(@mix_env) ++
|
||||||
chat_child(chat_enabled?()) ++
|
chat_child(chat_enabled?()) ++
|
||||||
[
|
[Pleroma.Gopher.Server]
|
||||||
Pleroma.Gopher.Server
|
|
||||||
]
|
|
||||||
|
|
||||||
# See http://elixir-lang.org/docs/stable/elixir/Supervisor.html
|
# See http://elixir-lang.org/docs/stable/elixir/Supervisor.html
|
||||||
# for other strategies and supported options
|
# for other strategies and supported options
|
||||||
|
@ -230,6 +228,12 @@ defp dont_run_in_test(_) do
|
||||||
keys: :duplicate,
|
keys: :duplicate,
|
||||||
partitions: System.schedulers_online()
|
partitions: System.schedulers_online()
|
||||||
]}
|
]}
|
||||||
|
] ++ background_migrators()
|
||||||
|
end
|
||||||
|
|
||||||
|
defp background_migrators do
|
||||||
|
[
|
||||||
|
Pleroma.Migrators.HashtagsTableMigrator
|
||||||
]
|
]
|
||||||
end
|
end
|
||||||
|
|
||||||
|
|
|
@ -34,15 +34,16 @@ defp handle_result({:error, message}), do: raise(VerifyError, message: message)
|
||||||
defp check_welcome_message_config!(:ok) do
|
defp check_welcome_message_config!(:ok) do
|
||||||
if Pleroma.Config.get([:welcome, :email, :enabled], false) and
|
if Pleroma.Config.get([:welcome, :email, :enabled], false) and
|
||||||
not Pleroma.Emails.Mailer.enabled?() do
|
not Pleroma.Emails.Mailer.enabled?() do
|
||||||
Logger.error("""
|
Logger.warn("""
|
||||||
To send welcome email do you need to enable mail.
|
To send welcome emails, you need to enable the mailer.
|
||||||
\nconfig :pleroma, Pleroma.Emails.Mailer, enabled: true
|
Welcome emails will NOT be sent with the current config.
|
||||||
""")
|
|
||||||
|
|
||||||
{:error, "The mail disabled."}
|
Enable the mailer:
|
||||||
else
|
config :pleroma, Pleroma.Emails.Mailer, enabled: true
|
||||||
:ok
|
""")
|
||||||
end
|
end
|
||||||
|
|
||||||
|
:ok
|
||||||
end
|
end
|
||||||
|
|
||||||
defp check_welcome_message_config!(result), do: result
|
defp check_welcome_message_config!(result), do: result
|
||||||
|
@ -51,18 +52,21 @@ defp check_welcome_message_config!(result), do: result
|
||||||
#
|
#
|
||||||
def check_confirmation_accounts!(:ok) do
|
def check_confirmation_accounts!(:ok) do
|
||||||
if Pleroma.Config.get([:instance, :account_activation_required]) &&
|
if Pleroma.Config.get([:instance, :account_activation_required]) &&
|
||||||
not Pleroma.Config.get([Pleroma.Emails.Mailer, :enabled]) do
|
not Pleroma.Emails.Mailer.enabled?() do
|
||||||
Logger.error(
|
Logger.warn("""
|
||||||
"Account activation enabled, but no Mailer settings enabled.\n" <>
|
Account activation is required, but the mailer is disabled.
|
||||||
"Please set config :pleroma, :instance, account_activation_required: false\n" <>
|
Users will NOT be able to confirm their accounts with this config.
|
||||||
"Otherwise setup and enable Mailer."
|
Either disable account activation or enable the mailer.
|
||||||
)
|
|
||||||
|
|
||||||
{:error,
|
Disable account activation:
|
||||||
"Account activation enabled, but Mailer is disabled. Cannot send confirmation emails."}
|
config :pleroma, :instance, account_activation_required: false
|
||||||
else
|
|
||||||
:ok
|
Enable the mailer:
|
||||||
|
config :pleroma, Pleroma.Emails.Mailer, enabled: true
|
||||||
|
""")
|
||||||
end
|
end
|
||||||
|
|
||||||
|
:ok
|
||||||
end
|
end
|
||||||
|
|
||||||
def check_confirmation_accounts!(result), do: result
|
def check_confirmation_accounts!(result), do: result
|
||||||
|
@ -160,9 +164,11 @@ defp do_check_rum!(setting, migrate) do
|
||||||
|
|
||||||
defp check_system_commands!(:ok) do
|
defp check_system_commands!(:ok) do
|
||||||
filter_commands_statuses = [
|
filter_commands_statuses = [
|
||||||
check_filter(Pleroma.Upload.Filters.Exiftool, "exiftool"),
|
check_filter(Pleroma.Upload.Filter.Exiftool, "exiftool"),
|
||||||
check_filter(Pleroma.Upload.Filters.Mogrify, "mogrify"),
|
check_filter(Pleroma.Upload.Filter.Mogrify, "mogrify"),
|
||||||
check_filter(Pleroma.Upload.Filters.Mogrifun, "mogrify")
|
check_filter(Pleroma.Upload.Filter.Mogrifun, "mogrify"),
|
||||||
|
check_filter(Pleroma.Upload.Filter.AnalyzeMetadata, "mogrify"),
|
||||||
|
check_filter(Pleroma.Upload.Filter.AnalyzeMetadata, "convert")
|
||||||
]
|
]
|
||||||
|
|
||||||
preview_proxy_commands_status =
|
preview_proxy_commands_status =
|
||||||
|
|
|
@ -99,4 +99,8 @@ def restrict_unauthenticated_access?(resource, kind) do
|
||||||
def oauth_consumer_strategies, do: get([:auth, :oauth_consumer_strategies], [])
|
def oauth_consumer_strategies, do: get([:auth, :oauth_consumer_strategies], [])
|
||||||
|
|
||||||
def oauth_consumer_enabled?, do: oauth_consumer_strategies() != []
|
def oauth_consumer_enabled?, do: oauth_consumer_strategies() != []
|
||||||
|
|
||||||
|
def feature_enabled?(feature_name) do
|
||||||
|
get([:features, feature_name]) not in [nil, false, :disabled, :auto]
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
defmodule Pleroma.Config.ReleaseRuntimeProvider do
|
defmodule Pleroma.Config.ReleaseRuntimeProvider do
|
||||||
@moduledoc """
|
@moduledoc """
|
||||||
Imports `runtime.exs` and `{env}.exported_from_db.secret.exs` for elixir releases.
|
Imports runtime config and `{env}.exported_from_db.secret.exs` for releases.
|
||||||
"""
|
"""
|
||||||
@behaviour Config.Provider
|
@behaviour Config.Provider
|
||||||
|
|
||||||
|
@ -8,10 +8,11 @@ defmodule Pleroma.Config.ReleaseRuntimeProvider do
|
||||||
def init(opts), do: opts
|
def init(opts), do: opts
|
||||||
|
|
||||||
@impl true
|
@impl true
|
||||||
def load(config, _opts) do
|
def load(config, opts) do
|
||||||
with_defaults = Config.Reader.merge(config, Pleroma.Config.Holder.release_defaults())
|
with_defaults = Config.Reader.merge(config, Pleroma.Config.Holder.release_defaults())
|
||||||
|
|
||||||
config_path = System.get_env("PLEROMA_CONFIG_PATH") || "/etc/pleroma/config.exs"
|
config_path =
|
||||||
|
opts[:config_path] || System.get_env("PLEROMA_CONFIG_PATH") || "/etc/pleroma/config.exs"
|
||||||
|
|
||||||
with_runtime_config =
|
with_runtime_config =
|
||||||
if File.exists?(config_path) do
|
if File.exists?(config_path) do
|
||||||
|
@ -24,7 +25,7 @@ def load(config, _opts) do
|
||||||
warning = [
|
warning = [
|
||||||
IO.ANSI.red(),
|
IO.ANSI.red(),
|
||||||
IO.ANSI.bright(),
|
IO.ANSI.bright(),
|
||||||
"!!! #{config_path} not found! Please ensure it exists and that PLEROMA_CONFIG_PATH is unset or points to an existing file",
|
"!!! Config path is not declared! Please ensure it exists and that PLEROMA_CONFIG_PATH is unset or points to an existing file",
|
||||||
IO.ANSI.reset()
|
IO.ANSI.reset()
|
||||||
]
|
]
|
||||||
|
|
||||||
|
@ -33,13 +34,14 @@ def load(config, _opts) do
|
||||||
end
|
end
|
||||||
|
|
||||||
exported_config_path =
|
exported_config_path =
|
||||||
|
opts[:exported_config_path] ||
|
||||||
config_path
|
config_path
|
||||||
|> Path.dirname()
|
|> Path.dirname()
|
||||||
|> Path.join("prod.exported_from_db.secret.exs")
|
|> Path.join("#{Pleroma.Config.get(:env)}.exported_from_db.secret.exs")
|
||||||
|
|
||||||
with_exported =
|
with_exported =
|
||||||
if File.exists?(exported_config_path) do
|
if File.exists?(exported_config_path) do
|
||||||
exported_config = Config.Reader.read!(with_runtime_config)
|
exported_config = Config.Reader.read!(exported_config_path)
|
||||||
Config.Reader.merge(with_runtime_config, exported_config)
|
Config.Reader.merge(with_runtime_config, exported_config)
|
||||||
else
|
else
|
||||||
with_runtime_config
|
with_runtime_config
|
||||||
|
|
|
@ -387,6 +387,6 @@ defp find_valid_delimiter([delimiter | others], pattern, regex_delimiter) do
|
||||||
@spec module_name?(String.t()) :: boolean()
|
@spec module_name?(String.t()) :: boolean()
|
||||||
def module_name?(string) do
|
def module_name?(string) do
|
||||||
Regex.match?(~r/^(Pleroma|Phoenix|Tesla|Quack|Ueberauth|Swoosh)\./, string) or
|
Regex.match?(~r/^(Pleroma|Phoenix|Tesla|Quack|Ueberauth|Swoosh)\./, string) or
|
||||||
string in ["Oban", "Ueberauth", "ExSyslogger"]
|
string in ["Oban", "Ueberauth", "ExSyslogger", "ConcurrentLimiter"]
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
45
lib/pleroma/data_migration.ex
Normal file
45
lib/pleroma/data_migration.ex
Normal file
|
@ -0,0 +1,45 @@
|
||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2021 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
defmodule Pleroma.DataMigration do
|
||||||
|
use Ecto.Schema
|
||||||
|
|
||||||
|
alias Pleroma.DataMigration
|
||||||
|
alias Pleroma.DataMigration.State
|
||||||
|
alias Pleroma.Repo
|
||||||
|
|
||||||
|
import Ecto.Changeset
|
||||||
|
import Ecto.Query
|
||||||
|
|
||||||
|
schema "data_migrations" do
|
||||||
|
field(:name, :string)
|
||||||
|
field(:state, State, default: :pending)
|
||||||
|
field(:feature_lock, :boolean, default: false)
|
||||||
|
field(:params, :map, default: %{})
|
||||||
|
field(:data, :map, default: %{})
|
||||||
|
|
||||||
|
timestamps()
|
||||||
|
end
|
||||||
|
|
||||||
|
def changeset(data_migration, params \\ %{}) do
|
||||||
|
data_migration
|
||||||
|
|> cast(params, [:name, :state, :feature_lock, :params, :data])
|
||||||
|
|> validate_required([:name])
|
||||||
|
|> unique_constraint(:name)
|
||||||
|
end
|
||||||
|
|
||||||
|
def update_one_by_id(id, params \\ %{}) do
|
||||||
|
with {1, _} <-
|
||||||
|
from(dm in DataMigration, where: dm.id == ^id)
|
||||||
|
|> Repo.update_all(set: params) do
|
||||||
|
:ok
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def get_by_name(name) do
|
||||||
|
Repo.get_by(DataMigration, name: name)
|
||||||
|
end
|
||||||
|
|
||||||
|
def populate_hashtags_table, do: get_by_name("populate_hashtags_table")
|
||||||
|
end
|
|
@ -9,7 +9,6 @@ defmodule Pleroma.Delivery do
|
||||||
alias Pleroma.Object
|
alias Pleroma.Object
|
||||||
alias Pleroma.Repo
|
alias Pleroma.Repo
|
||||||
alias Pleroma.User
|
alias Pleroma.User
|
||||||
alias Pleroma.User
|
|
||||||
|
|
||||||
import Ecto.Changeset
|
import Ecto.Changeset
|
||||||
import Ecto.Query
|
import Ecto.Query
|
||||||
|
|
|
@ -1,256 +0,0 @@
|
||||||
# Pleroma: A lightweight social networking server
|
|
||||||
# Copyright © 2017-2021 Pleroma Authors <https://pleroma.social/>
|
|
||||||
# SPDX-License-Identifier: AGPL-3.0-only
|
|
||||||
#
|
|
||||||
# This file is derived from Earmark, under the following copyright:
|
|
||||||
# Copyright © 2014 Dave Thomas, The Pragmatic Programmers
|
|
||||||
# SPDX-License-Identifier: Apache-2.0
|
|
||||||
# Upstream: https://github.com/pragdave/earmark/blob/master/lib/earmark/html_renderer.ex
|
|
||||||
defmodule Pleroma.EarmarkRenderer do
|
|
||||||
@moduledoc false
|
|
||||||
|
|
||||||
alias Earmark.Block
|
|
||||||
alias Earmark.Context
|
|
||||||
alias Earmark.HtmlRenderer
|
|
||||||
alias Earmark.Options
|
|
||||||
|
|
||||||
import Earmark.Inline, only: [convert: 3]
|
|
||||||
import Earmark.Helpers.HtmlHelpers
|
|
||||||
import Earmark.Message, only: [add_messages_from: 2, get_messages: 1, set_messages: 2]
|
|
||||||
import Earmark.Context, only: [append: 2, set_value: 2]
|
|
||||||
import Earmark.Options, only: [get_mapper: 1]
|
|
||||||
|
|
||||||
@doc false
|
|
||||||
def render(blocks, %Context{options: %Options{}} = context) do
|
|
||||||
messages = get_messages(context)
|
|
||||||
|
|
||||||
{contexts, html} =
|
|
||||||
get_mapper(context.options).(
|
|
||||||
blocks,
|
|
||||||
&render_block(&1, put_in(context.options.messages, []))
|
|
||||||
)
|
|
||||||
|> Enum.unzip()
|
|
||||||
|
|
||||||
all_messages =
|
|
||||||
contexts
|
|
||||||
|> Enum.reduce(messages, fn ctx, messages1 -> messages1 ++ get_messages(ctx) end)
|
|
||||||
|
|
||||||
{put_in(context.options.messages, all_messages), html |> IO.iodata_to_binary()}
|
|
||||||
end
|
|
||||||
|
|
||||||
#############
|
|
||||||
# Paragraph #
|
|
||||||
#############
|
|
||||||
defp render_block(%Block.Para{lnb: lnb, lines: lines, attrs: attrs}, context) do
|
|
||||||
lines = convert(lines, lnb, context)
|
|
||||||
add_attrs(lines, "<p>#{lines.value}</p>", attrs, [], lnb)
|
|
||||||
end
|
|
||||||
|
|
||||||
########
|
|
||||||
# Html #
|
|
||||||
########
|
|
||||||
defp render_block(%Block.Html{html: html}, context) do
|
|
||||||
{context, html}
|
|
||||||
end
|
|
||||||
|
|
||||||
defp render_block(%Block.HtmlComment{lines: lines}, context) do
|
|
||||||
{context, lines}
|
|
||||||
end
|
|
||||||
|
|
||||||
defp render_block(%Block.HtmlOneline{html: html}, context) do
|
|
||||||
{context, html}
|
|
||||||
end
|
|
||||||
|
|
||||||
#########
|
|
||||||
# Ruler #
|
|
||||||
#########
|
|
||||||
defp render_block(%Block.Ruler{lnb: lnb, attrs: attrs}, context) do
|
|
||||||
add_attrs(context, "<hr />", attrs, [], lnb)
|
|
||||||
end
|
|
||||||
|
|
||||||
###########
|
|
||||||
# Heading #
|
|
||||||
###########
|
|
||||||
defp render_block(
|
|
||||||
%Block.Heading{lnb: lnb, level: level, content: content, attrs: attrs},
|
|
||||||
context
|
|
||||||
) do
|
|
||||||
converted = convert(content, lnb, context)
|
|
||||||
html = "<h#{level}>#{converted.value}</h#{level}>"
|
|
||||||
add_attrs(converted, html, attrs, [], lnb)
|
|
||||||
end
|
|
||||||
|
|
||||||
##############
|
|
||||||
# Blockquote #
|
|
||||||
##############
|
|
||||||
|
|
||||||
defp render_block(%Block.BlockQuote{lnb: lnb, blocks: blocks, attrs: attrs}, context) do
|
|
||||||
{context1, body} = render(blocks, context)
|
|
||||||
html = "<blockquote>#{body}</blockquote>"
|
|
||||||
add_attrs(context1, html, attrs, [], lnb)
|
|
||||||
end
|
|
||||||
|
|
||||||
#########
|
|
||||||
# Table #
|
|
||||||
#########
|
|
||||||
|
|
||||||
defp render_block(
|
|
||||||
%Block.Table{lnb: lnb, header: header, rows: rows, alignments: aligns, attrs: attrs},
|
|
||||||
context
|
|
||||||
) do
|
|
||||||
{context1, html} = add_attrs(context, "<table>", attrs, [], lnb)
|
|
||||||
context2 = set_value(context1, html)
|
|
||||||
|
|
||||||
context3 =
|
|
||||||
if header do
|
|
||||||
append(add_trs(append(context2, "<thead>"), [header], "th", aligns, lnb), "</thead>")
|
|
||||||
else
|
|
||||||
# Maybe an error, needed append(context, html)
|
|
||||||
context2
|
|
||||||
end
|
|
||||||
|
|
||||||
context4 = append(add_trs(append(context3, "<tbody>"), rows, "td", aligns, lnb), "</tbody>")
|
|
||||||
|
|
||||||
{context4, [context4.value, "</table>"]}
|
|
||||||
end
|
|
||||||
|
|
||||||
########
|
|
||||||
# Code #
|
|
||||||
########
|
|
||||||
|
|
||||||
defp render_block(
|
|
||||||
%Block.Code{lnb: lnb, language: language, attrs: attrs} = block,
|
|
||||||
%Context{options: options} = context
|
|
||||||
) do
|
|
||||||
class =
|
|
||||||
if language, do: ~s{ class="#{code_classes(language, options.code_class_prefix)}"}, else: ""
|
|
||||||
|
|
||||||
tag = ~s[<pre><code#{class}>]
|
|
||||||
lines = options.render_code.(block)
|
|
||||||
html = ~s[#{tag}#{lines}</code></pre>]
|
|
||||||
add_attrs(context, html, attrs, [], lnb)
|
|
||||||
end
|
|
||||||
|
|
||||||
#########
|
|
||||||
# Lists #
|
|
||||||
#########
|
|
||||||
|
|
||||||
defp render_block(
|
|
||||||
%Block.List{lnb: lnb, type: type, blocks: items, attrs: attrs, start: start},
|
|
||||||
context
|
|
||||||
) do
|
|
||||||
{context1, content} = render(items, context)
|
|
||||||
html = "<#{type}#{start}>#{content}</#{type}>"
|
|
||||||
add_attrs(context1, html, attrs, [], lnb)
|
|
||||||
end
|
|
||||||
|
|
||||||
# format a single paragraph list item, and remove the para tags
|
|
||||||
defp render_block(
|
|
||||||
%Block.ListItem{lnb: lnb, blocks: blocks, spaced: false, attrs: attrs},
|
|
||||||
context
|
|
||||||
)
|
|
||||||
when length(blocks) == 1 do
|
|
||||||
{context1, content} = render(blocks, context)
|
|
||||||
content = Regex.replace(~r{</?p>}, content, "")
|
|
||||||
html = "<li>#{content}</li>"
|
|
||||||
add_attrs(context1, html, attrs, [], lnb)
|
|
||||||
end
|
|
||||||
|
|
||||||
# format a spaced list item
|
|
||||||
defp render_block(%Block.ListItem{lnb: lnb, blocks: blocks, attrs: attrs}, context) do
|
|
||||||
{context1, content} = render(blocks, context)
|
|
||||||
html = "<li>#{content}</li>"
|
|
||||||
add_attrs(context1, html, attrs, [], lnb)
|
|
||||||
end
|
|
||||||
|
|
||||||
##################
|
|
||||||
# Footnote Block #
|
|
||||||
##################
|
|
||||||
|
|
||||||
defp render_block(%Block.FnList{blocks: footnotes}, context) do
|
|
||||||
items =
|
|
||||||
Enum.map(footnotes, fn note ->
|
|
||||||
blocks = append_footnote_link(note)
|
|
||||||
%Block.ListItem{attrs: "#fn:#{note.number}", type: :ol, blocks: blocks}
|
|
||||||
end)
|
|
||||||
|
|
||||||
{context1, html} = render_block(%Block.List{type: :ol, blocks: items}, context)
|
|
||||||
{context1, Enum.join([~s[<div class="footnotes">], "<hr />", html, "</div>"])}
|
|
||||||
end
|
|
||||||
|
|
||||||
#######################################
|
|
||||||
# Isolated IALs are rendered as paras #
|
|
||||||
#######################################
|
|
||||||
|
|
||||||
defp render_block(%Block.Ial{verbatim: verbatim}, context) do
|
|
||||||
{context, "<p>{:#{verbatim}}</p>"}
|
|
||||||
end
|
|
||||||
|
|
||||||
####################
|
|
||||||
# IDDef is ignored #
|
|
||||||
####################
|
|
||||||
|
|
||||||
defp render_block(%Block.IdDef{}, context), do: {context, ""}
|
|
||||||
|
|
||||||
#####################################
|
|
||||||
# And here are the inline renderers #
|
|
||||||
#####################################
|
|
||||||
|
|
||||||
defdelegate br, to: HtmlRenderer
|
|
||||||
defdelegate codespan(text), to: HtmlRenderer
|
|
||||||
defdelegate em(text), to: HtmlRenderer
|
|
||||||
defdelegate strong(text), to: HtmlRenderer
|
|
||||||
defdelegate strikethrough(text), to: HtmlRenderer
|
|
||||||
|
|
||||||
defdelegate link(url, text), to: HtmlRenderer
|
|
||||||
defdelegate link(url, text, title), to: HtmlRenderer
|
|
||||||
|
|
||||||
defdelegate image(path, alt, title), to: HtmlRenderer
|
|
||||||
|
|
||||||
defdelegate footnote_link(ref, backref, number), to: HtmlRenderer
|
|
||||||
|
|
||||||
# Table rows
|
|
||||||
defp add_trs(context, rows, tag, aligns, lnb) do
|
|
||||||
numbered_rows =
|
|
||||||
rows
|
|
||||||
|> Enum.zip(Stream.iterate(lnb, &(&1 + 1)))
|
|
||||||
|
|
||||||
numbered_rows
|
|
||||||
|> Enum.reduce(context, fn {row, lnb}, ctx ->
|
|
||||||
append(add_tds(append(ctx, "<tr>"), row, tag, aligns, lnb), "</tr>")
|
|
||||||
end)
|
|
||||||
end
|
|
||||||
|
|
||||||
defp add_tds(context, row, tag, aligns, lnb) do
|
|
||||||
Enum.reduce(1..length(row), context, add_td_fn(row, tag, aligns, lnb))
|
|
||||||
end
|
|
||||||
|
|
||||||
defp add_td_fn(row, tag, aligns, lnb) do
|
|
||||||
fn n, ctx ->
|
|
||||||
style =
|
|
||||||
case Enum.at(aligns, n - 1, :default) do
|
|
||||||
:default -> ""
|
|
||||||
align -> " style=\"text-align: #{align}\""
|
|
||||||
end
|
|
||||||
|
|
||||||
col = Enum.at(row, n - 1)
|
|
||||||
converted = convert(col, lnb, set_messages(ctx, []))
|
|
||||||
append(add_messages_from(ctx, converted), "<#{tag}#{style}>#{converted.value}</#{tag}>")
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
###############################
|
|
||||||
# Append Footnote Return Link #
|
|
||||||
###############################
|
|
||||||
|
|
||||||
defdelegate append_footnote_link(note), to: HtmlRenderer
|
|
||||||
defdelegate append_footnote_link(note, fnlink), to: HtmlRenderer
|
|
||||||
|
|
||||||
defdelegate render_code(lines), to: HtmlRenderer
|
|
||||||
|
|
||||||
defp code_classes(language, prefix) do
|
|
||||||
["" | String.split(prefix || "")]
|
|
||||||
|> Enum.map(fn pfx -> "#{pfx}#{language}" end)
|
|
||||||
|> Enum.join(" ")
|
|
||||||
end
|
|
||||||
end
|
|
|
@ -17,3 +17,11 @@
|
||||||
follow_accept: 2,
|
follow_accept: 2,
|
||||||
follow_reject: 3
|
follow_reject: 3
|
||||||
)
|
)
|
||||||
|
|
||||||
|
defenum(Pleroma.DataMigration.State,
|
||||||
|
pending: 1,
|
||||||
|
running: 2,
|
||||||
|
complete: 3,
|
||||||
|
failed: 4,
|
||||||
|
manual: 5
|
||||||
|
)
|
||||||
|
|
|
@ -121,6 +121,10 @@ def mentions_escape(text, options \\ []) do
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def markdown_to_html(text) do
|
||||||
|
Earmark.as_html!(text, %Earmark.Options{compact_output: true})
|
||||||
|
end
|
||||||
|
|
||||||
def html_escape({text, mentions, hashtags}, type) do
|
def html_escape({text, mentions, hashtags}, type) do
|
||||||
{html_escape(text, type), mentions, hashtags}
|
{html_escape(text, type), mentions, hashtags}
|
||||||
end
|
end
|
||||||
|
|
106
lib/pleroma/hashtag.ex
Normal file
106
lib/pleroma/hashtag.ex
Normal file
|
@ -0,0 +1,106 @@
|
||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
defmodule Pleroma.Hashtag do
|
||||||
|
use Ecto.Schema
|
||||||
|
|
||||||
|
import Ecto.Changeset
|
||||||
|
import Ecto.Query
|
||||||
|
|
||||||
|
alias Ecto.Multi
|
||||||
|
alias Pleroma.Hashtag
|
||||||
|
alias Pleroma.Object
|
||||||
|
alias Pleroma.Repo
|
||||||
|
|
||||||
|
schema "hashtags" do
|
||||||
|
field(:name, :string)
|
||||||
|
|
||||||
|
many_to_many(:objects, Object, join_through: "hashtags_objects", on_replace: :delete)
|
||||||
|
|
||||||
|
timestamps()
|
||||||
|
end
|
||||||
|
|
||||||
|
def normalize_name(name) do
|
||||||
|
name
|
||||||
|
|> String.downcase()
|
||||||
|
|> String.trim()
|
||||||
|
end
|
||||||
|
|
||||||
|
def get_or_create_by_name(name) do
|
||||||
|
changeset = changeset(%Hashtag{}, %{name: name})
|
||||||
|
|
||||||
|
Repo.insert(
|
||||||
|
changeset,
|
||||||
|
on_conflict: [set: [name: get_field(changeset, :name)]],
|
||||||
|
conflict_target: :name,
|
||||||
|
returning: true
|
||||||
|
)
|
||||||
|
end
|
||||||
|
|
||||||
|
def get_or_create_by_names(names) when is_list(names) do
|
||||||
|
names = Enum.map(names, &normalize_name/1)
|
||||||
|
timestamp = NaiveDateTime.truncate(NaiveDateTime.utc_now(), :second)
|
||||||
|
|
||||||
|
structs =
|
||||||
|
Enum.map(names, fn name ->
|
||||||
|
%Hashtag{}
|
||||||
|
|> changeset(%{name: name})
|
||||||
|
|> Map.get(:changes)
|
||||||
|
|> Map.merge(%{inserted_at: timestamp, updated_at: timestamp})
|
||||||
|
end)
|
||||||
|
|
||||||
|
try do
|
||||||
|
with {:ok, %{query_op: hashtags}} <-
|
||||||
|
Multi.new()
|
||||||
|
|> Multi.insert_all(:insert_all_op, Hashtag, structs,
|
||||||
|
on_conflict: :nothing,
|
||||||
|
conflict_target: :name
|
||||||
|
)
|
||||||
|
|> Multi.run(:query_op, fn _repo, _changes ->
|
||||||
|
{:ok, Repo.all(from(ht in Hashtag, where: ht.name in ^names))}
|
||||||
|
end)
|
||||||
|
|> Repo.transaction() do
|
||||||
|
{:ok, hashtags}
|
||||||
|
else
|
||||||
|
{:error, _name, value, _changes_so_far} -> {:error, value}
|
||||||
|
end
|
||||||
|
rescue
|
||||||
|
e -> {:error, e}
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def changeset(%Hashtag{} = struct, params) do
|
||||||
|
struct
|
||||||
|
|> cast(params, [:name])
|
||||||
|
|> update_change(:name, &normalize_name/1)
|
||||||
|
|> validate_required([:name])
|
||||||
|
|> unique_constraint(:name)
|
||||||
|
end
|
||||||
|
|
||||||
|
def unlink(%Object{id: object_id}) do
|
||||||
|
with {_, hashtag_ids} <-
|
||||||
|
from(hto in "hashtags_objects",
|
||||||
|
where: hto.object_id == ^object_id,
|
||||||
|
select: hto.hashtag_id
|
||||||
|
)
|
||||||
|
|> Repo.delete_all(),
|
||||||
|
{:ok, unreferenced_count} <- delete_unreferenced(hashtag_ids) do
|
||||||
|
{:ok, length(hashtag_ids), unreferenced_count}
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
@delete_unreferenced_query """
|
||||||
|
DELETE FROM hashtags WHERE id IN
|
||||||
|
(SELECT hashtags.id FROM hashtags
|
||||||
|
LEFT OUTER JOIN hashtags_objects
|
||||||
|
ON hashtags_objects.hashtag_id = hashtags.id
|
||||||
|
WHERE hashtags_objects.hashtag_id IS NULL AND hashtags.id = ANY($1));
|
||||||
|
"""
|
||||||
|
|
||||||
|
def delete_unreferenced(ids) do
|
||||||
|
with {:ok, %{num_rows: deleted_count}} <- Repo.query(@delete_unreferenced_query, [ids]) do
|
||||||
|
{:ok, deleted_count}
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
208
lib/pleroma/migrators/hashtags_table_migrator.ex
Normal file
208
lib/pleroma/migrators/hashtags_table_migrator.ex
Normal file
|
@ -0,0 +1,208 @@
|
||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2021 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
defmodule Pleroma.Migrators.HashtagsTableMigrator do
|
||||||
|
defmodule State do
|
||||||
|
use Pleroma.Migrators.Support.BaseMigratorState
|
||||||
|
|
||||||
|
@impl Pleroma.Migrators.Support.BaseMigratorState
|
||||||
|
defdelegate data_migration(), to: Pleroma.DataMigration, as: :populate_hashtags_table
|
||||||
|
end
|
||||||
|
|
||||||
|
use Pleroma.Migrators.Support.BaseMigrator
|
||||||
|
|
||||||
|
alias Pleroma.Hashtag
|
||||||
|
alias Pleroma.Migrators.Support.BaseMigrator
|
||||||
|
alias Pleroma.Object
|
||||||
|
|
||||||
|
@impl BaseMigrator
|
||||||
|
def feature_config_path, do: [:features, :improved_hashtag_timeline]
|
||||||
|
|
||||||
|
@impl BaseMigrator
|
||||||
|
def fault_rate_allowance, do: Config.get([:populate_hashtags_table, :fault_rate_allowance], 0)
|
||||||
|
|
||||||
|
@impl BaseMigrator
|
||||||
|
def perform do
|
||||||
|
data_migration_id = data_migration_id()
|
||||||
|
max_processed_id = get_stat(:max_processed_id, 0)
|
||||||
|
|
||||||
|
Logger.info("Transferring embedded hashtags to `hashtags` (from oid: #{max_processed_id})...")
|
||||||
|
|
||||||
|
query()
|
||||||
|
|> where([object], object.id > ^max_processed_id)
|
||||||
|
|> Repo.chunk_stream(100, :batches, timeout: :infinity)
|
||||||
|
|> Stream.each(fn objects ->
|
||||||
|
object_ids = Enum.map(objects, & &1.id)
|
||||||
|
|
||||||
|
results = Enum.map(objects, &transfer_object_hashtags(&1))
|
||||||
|
|
||||||
|
failed_ids =
|
||||||
|
results
|
||||||
|
|> Enum.filter(&(elem(&1, 0) == :error))
|
||||||
|
|> Enum.map(&elem(&1, 1))
|
||||||
|
|
||||||
|
# Count of objects with hashtags: `{:noop, id}` is returned for objects having other AS2 tags
|
||||||
|
chunk_affected_count =
|
||||||
|
results
|
||||||
|
|> Enum.filter(&(elem(&1, 0) == :ok))
|
||||||
|
|> length()
|
||||||
|
|
||||||
|
for failed_id <- failed_ids do
|
||||||
|
_ =
|
||||||
|
Repo.query(
|
||||||
|
"INSERT INTO data_migration_failed_ids(data_migration_id, record_id) " <>
|
||||||
|
"VALUES ($1, $2) ON CONFLICT DO NOTHING;",
|
||||||
|
[data_migration_id, failed_id]
|
||||||
|
)
|
||||||
|
end
|
||||||
|
|
||||||
|
_ =
|
||||||
|
Repo.query(
|
||||||
|
"DELETE FROM data_migration_failed_ids " <>
|
||||||
|
"WHERE data_migration_id = $1 AND record_id = ANY($2)",
|
||||||
|
[data_migration_id, object_ids -- failed_ids]
|
||||||
|
)
|
||||||
|
|
||||||
|
max_object_id = Enum.at(object_ids, -1)
|
||||||
|
|
||||||
|
put_stat(:max_processed_id, max_object_id)
|
||||||
|
increment_stat(:iteration_processed_count, length(object_ids))
|
||||||
|
increment_stat(:processed_count, length(object_ids))
|
||||||
|
increment_stat(:failed_count, length(failed_ids))
|
||||||
|
increment_stat(:affected_count, chunk_affected_count)
|
||||||
|
put_stat(:records_per_second, records_per_second())
|
||||||
|
persist_state()
|
||||||
|
|
||||||
|
# A quick and dirty approach to controlling the load this background migration imposes
|
||||||
|
sleep_interval = Config.get([:populate_hashtags_table, :sleep_interval_ms], 0)
|
||||||
|
Process.sleep(sleep_interval)
|
||||||
|
end)
|
||||||
|
|> Stream.run()
|
||||||
|
end
|
||||||
|
|
||||||
|
@impl BaseMigrator
|
||||||
|
def query do
|
||||||
|
# Note: most objects have Mention-type AS2 tags and no hashtags (but we can't filter them out)
|
||||||
|
# Note: not checking activity type, expecting remove_non_create_objects_hashtags/_ to clean up
|
||||||
|
from(
|
||||||
|
object in Object,
|
||||||
|
where:
|
||||||
|
fragment("(?)->'tag' IS NOT NULL AND (?)->'tag' != '[]'::jsonb", object.data, object.data),
|
||||||
|
select: %{
|
||||||
|
id: object.id,
|
||||||
|
tag: fragment("(?)->'tag'", object.data)
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|> join(:left, [o], hashtags_objects in fragment("SELECT object_id FROM hashtags_objects"),
|
||||||
|
on: hashtags_objects.object_id == o.id
|
||||||
|
)
|
||||||
|
|> where([_o, hashtags_objects], is_nil(hashtags_objects.object_id))
|
||||||
|
end
|
||||||
|
|
||||||
|
@spec transfer_object_hashtags(Map.t()) :: {:noop | :ok | :error, integer()}
|
||||||
|
defp transfer_object_hashtags(object) do
|
||||||
|
embedded_tags = if Map.has_key?(object, :tag), do: object.tag, else: object.data["tag"]
|
||||||
|
hashtags = Object.object_data_hashtags(%{"tag" => embedded_tags})
|
||||||
|
|
||||||
|
if Enum.any?(hashtags) do
|
||||||
|
transfer_object_hashtags(object, hashtags)
|
||||||
|
else
|
||||||
|
{:noop, object.id}
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
defp transfer_object_hashtags(object, hashtags) do
|
||||||
|
Repo.transaction(fn ->
|
||||||
|
with {:ok, hashtag_records} <- Hashtag.get_or_create_by_names(hashtags) do
|
||||||
|
maps = Enum.map(hashtag_records, &%{hashtag_id: &1.id, object_id: object.id})
|
||||||
|
base_error = "ERROR when inserting hashtags_objects for object with id #{object.id}"
|
||||||
|
|
||||||
|
try do
|
||||||
|
with {rows_count, _} when is_integer(rows_count) <-
|
||||||
|
Repo.insert_all("hashtags_objects", maps, on_conflict: :nothing) do
|
||||||
|
object.id
|
||||||
|
else
|
||||||
|
e ->
|
||||||
|
Logger.error("#{base_error}: #{inspect(e)}")
|
||||||
|
Repo.rollback(object.id)
|
||||||
|
end
|
||||||
|
rescue
|
||||||
|
e ->
|
||||||
|
Logger.error("#{base_error}: #{inspect(e)}")
|
||||||
|
Repo.rollback(object.id)
|
||||||
|
end
|
||||||
|
else
|
||||||
|
e ->
|
||||||
|
error = "ERROR: could not create hashtags for object #{object.id}: #{inspect(e)}"
|
||||||
|
Logger.error(error)
|
||||||
|
Repo.rollback(object.id)
|
||||||
|
end
|
||||||
|
end)
|
||||||
|
end
|
||||||
|
|
||||||
|
@impl BaseMigrator
|
||||||
|
def retry_failed do
|
||||||
|
data_migration_id = data_migration_id()
|
||||||
|
|
||||||
|
failed_objects_query()
|
||||||
|
|> Repo.chunk_stream(100, :one)
|
||||||
|
|> Stream.each(fn object ->
|
||||||
|
with {res, _} when res != :error <- transfer_object_hashtags(object) do
|
||||||
|
_ =
|
||||||
|
Repo.query(
|
||||||
|
"DELETE FROM data_migration_failed_ids " <>
|
||||||
|
"WHERE data_migration_id = $1 AND record_id = $2",
|
||||||
|
[data_migration_id, object.id]
|
||||||
|
)
|
||||||
|
end
|
||||||
|
end)
|
||||||
|
|> Stream.run()
|
||||||
|
|
||||||
|
put_stat(:failed_count, failures_count())
|
||||||
|
persist_state()
|
||||||
|
|
||||||
|
force_continue()
|
||||||
|
end
|
||||||
|
|
||||||
|
defp failed_objects_query do
|
||||||
|
from(o in Object)
|
||||||
|
|> join(:inner, [o], dmf in fragment("SELECT * FROM data_migration_failed_ids"),
|
||||||
|
on: dmf.record_id == o.id
|
||||||
|
)
|
||||||
|
|> where([_o, dmf], dmf.data_migration_id == ^data_migration_id())
|
||||||
|
|> order_by([o], asc: o.id)
|
||||||
|
end
|
||||||
|
|
||||||
|
@doc """
|
||||||
|
Service func to delete `hashtags_objects` for legacy objects not associated with Create activity.
|
||||||
|
Also deletes unreferenced `hashtags` records (might occur after deletion of `hashtags_objects`).
|
||||||
|
"""
|
||||||
|
def delete_non_create_activities_hashtags do
|
||||||
|
hashtags_objects_cleanup_query = """
|
||||||
|
DELETE FROM hashtags_objects WHERE object_id IN
|
||||||
|
(SELECT DISTINCT objects.id FROM objects
|
||||||
|
JOIN hashtags_objects ON hashtags_objects.object_id = objects.id LEFT JOIN activities
|
||||||
|
ON COALESCE(activities.data->'object'->>'id', activities.data->>'object') =
|
||||||
|
(objects.data->>'id')
|
||||||
|
AND activities.data->>'type' = 'Create'
|
||||||
|
WHERE activities.id IS NULL);
|
||||||
|
"""
|
||||||
|
|
||||||
|
hashtags_cleanup_query = """
|
||||||
|
DELETE FROM hashtags WHERE id IN
|
||||||
|
(SELECT hashtags.id FROM hashtags
|
||||||
|
LEFT OUTER JOIN hashtags_objects
|
||||||
|
ON hashtags_objects.hashtag_id = hashtags.id
|
||||||
|
WHERE hashtags_objects.hashtag_id IS NULL);
|
||||||
|
"""
|
||||||
|
|
||||||
|
{:ok, %{num_rows: hashtags_objects_count}} =
|
||||||
|
Repo.query(hashtags_objects_cleanup_query, [], timeout: :infinity)
|
||||||
|
|
||||||
|
{:ok, %{num_rows: hashtags_count}} =
|
||||||
|
Repo.query(hashtags_cleanup_query, [], timeout: :infinity)
|
||||||
|
|
||||||
|
{:ok, hashtags_objects_count, hashtags_count}
|
||||||
|
end
|
||||||
|
end
|
210
lib/pleroma/migrators/support/base_migrator.ex
Normal file
210
lib/pleroma/migrators/support/base_migrator.ex
Normal file
|
@ -0,0 +1,210 @@
|
||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2021 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
defmodule Pleroma.Migrators.Support.BaseMigrator do
|
||||||
|
@moduledoc """
|
||||||
|
Base background migrator functionality.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@callback perform() :: any()
|
||||||
|
@callback retry_failed() :: any()
|
||||||
|
@callback feature_config_path() :: list(atom())
|
||||||
|
@callback query() :: Ecto.Query.t()
|
||||||
|
@callback fault_rate_allowance() :: integer() | float()
|
||||||
|
|
||||||
|
defmacro __using__(_opts) do
|
||||||
|
quote do
|
||||||
|
use GenServer
|
||||||
|
|
||||||
|
require Logger
|
||||||
|
|
||||||
|
import Ecto.Query
|
||||||
|
|
||||||
|
alias __MODULE__.State
|
||||||
|
alias Pleroma.Config
|
||||||
|
alias Pleroma.Repo
|
||||||
|
|
||||||
|
@behaviour Pleroma.Migrators.Support.BaseMigrator
|
||||||
|
|
||||||
|
defdelegate data_migration(), to: State
|
||||||
|
defdelegate data_migration_id(), to: State
|
||||||
|
defdelegate state(), to: State
|
||||||
|
defdelegate persist_state(), to: State, as: :persist_to_db
|
||||||
|
defdelegate get_stat(key, value \\ nil), to: State, as: :get_data_key
|
||||||
|
defdelegate put_stat(key, value), to: State, as: :put_data_key
|
||||||
|
defdelegate increment_stat(key, increment), to: State, as: :increment_data_key
|
||||||
|
|
||||||
|
@reg_name {:global, __MODULE__}
|
||||||
|
|
||||||
|
def whereis, do: GenServer.whereis(@reg_name)
|
||||||
|
|
||||||
|
def start_link(_) do
|
||||||
|
case whereis() do
|
||||||
|
nil ->
|
||||||
|
GenServer.start_link(__MODULE__, nil, name: @reg_name)
|
||||||
|
|
||||||
|
pid ->
|
||||||
|
{:ok, pid}
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
@impl true
|
||||||
|
def init(_) do
|
||||||
|
{:ok, nil, {:continue, :init_state}}
|
||||||
|
end
|
||||||
|
|
||||||
|
@impl true
|
||||||
|
def handle_continue(:init_state, _state) do
|
||||||
|
{:ok, _} = State.start_link(nil)
|
||||||
|
|
||||||
|
data_migration = data_migration()
|
||||||
|
manual_migrations = Config.get([:instance, :manual_data_migrations], [])
|
||||||
|
|
||||||
|
cond do
|
||||||
|
Config.get(:env) == :test ->
|
||||||
|
update_status(:noop)
|
||||||
|
|
||||||
|
is_nil(data_migration) ->
|
||||||
|
message = "Data migration does not exist."
|
||||||
|
update_status(:failed, message)
|
||||||
|
Logger.error("#{__MODULE__}: #{message}")
|
||||||
|
|
||||||
|
data_migration.state == :manual or data_migration.name in manual_migrations ->
|
||||||
|
message = "Data migration is in manual execution or manual fix mode."
|
||||||
|
update_status(:manual, message)
|
||||||
|
Logger.warn("#{__MODULE__}: #{message}")
|
||||||
|
|
||||||
|
data_migration.state == :complete ->
|
||||||
|
on_complete(data_migration)
|
||||||
|
|
||||||
|
true ->
|
||||||
|
send(self(), :perform)
|
||||||
|
end
|
||||||
|
|
||||||
|
{:noreply, nil}
|
||||||
|
end
|
||||||
|
|
||||||
|
@impl true
|
||||||
|
def handle_info(:perform, state) do
|
||||||
|
State.reinit()
|
||||||
|
|
||||||
|
update_status(:running)
|
||||||
|
put_stat(:iteration_processed_count, 0)
|
||||||
|
put_stat(:started_at, NaiveDateTime.utc_now())
|
||||||
|
|
||||||
|
perform()
|
||||||
|
|
||||||
|
fault_rate = fault_rate()
|
||||||
|
put_stat(:fault_rate, fault_rate)
|
||||||
|
fault_rate_allowance = fault_rate_allowance()
|
||||||
|
|
||||||
|
cond do
|
||||||
|
fault_rate == 0 ->
|
||||||
|
set_complete()
|
||||||
|
|
||||||
|
is_float(fault_rate) and fault_rate <= fault_rate_allowance ->
|
||||||
|
message = """
|
||||||
|
Done with fault rate of #{fault_rate} which doesn't exceed #{fault_rate_allowance}.
|
||||||
|
Putting data migration to manual fix mode. Try running `#{__MODULE__}.retry_failed/0`.
|
||||||
|
"""
|
||||||
|
|
||||||
|
Logger.warn("#{__MODULE__}: #{message}")
|
||||||
|
update_status(:manual, message)
|
||||||
|
on_complete(data_migration())
|
||||||
|
|
||||||
|
true ->
|
||||||
|
message = "Too many failures. Try running `#{__MODULE__}.retry_failed/0`."
|
||||||
|
Logger.error("#{__MODULE__}: #{message}")
|
||||||
|
update_status(:failed, message)
|
||||||
|
end
|
||||||
|
|
||||||
|
persist_state()
|
||||||
|
{:noreply, state}
|
||||||
|
end
|
||||||
|
|
||||||
|
defp on_complete(data_migration) do
|
||||||
|
if data_migration.feature_lock || feature_state() == :disabled do
|
||||||
|
Logger.warn(
|
||||||
|
"#{__MODULE__}: migration complete but feature is locked; consider enabling."
|
||||||
|
)
|
||||||
|
|
||||||
|
:noop
|
||||||
|
else
|
||||||
|
Config.put(feature_config_path(), :enabled)
|
||||||
|
:ok
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
@doc "Approximate count for current iteration (including processed records count)"
|
||||||
|
def count(force \\ false, timeout \\ :infinity) do
|
||||||
|
stored_count = get_stat(:count)
|
||||||
|
|
||||||
|
if stored_count && !force do
|
||||||
|
stored_count
|
||||||
|
else
|
||||||
|
processed_count = get_stat(:processed_count, 0)
|
||||||
|
max_processed_id = get_stat(:max_processed_id, 0)
|
||||||
|
query = where(query(), [entity], entity.id > ^max_processed_id)
|
||||||
|
|
||||||
|
count = Repo.aggregate(query, :count, :id, timeout: timeout) + processed_count
|
||||||
|
put_stat(:count, count)
|
||||||
|
persist_state()
|
||||||
|
|
||||||
|
count
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def failures_count do
|
||||||
|
with {:ok, %{rows: [[count]]}} <-
|
||||||
|
Repo.query(
|
||||||
|
"SELECT COUNT(record_id) FROM data_migration_failed_ids WHERE data_migration_id = $1;",
|
||||||
|
[data_migration_id()]
|
||||||
|
) do
|
||||||
|
count
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def feature_state, do: Config.get(feature_config_path())
|
||||||
|
|
||||||
|
def force_continue do
|
||||||
|
send(whereis(), :perform)
|
||||||
|
end
|
||||||
|
|
||||||
|
def force_restart do
|
||||||
|
:ok = State.reset()
|
||||||
|
force_continue()
|
||||||
|
end
|
||||||
|
|
||||||
|
def set_complete do
|
||||||
|
update_status(:complete)
|
||||||
|
persist_state()
|
||||||
|
on_complete(data_migration())
|
||||||
|
end
|
||||||
|
|
||||||
|
defp update_status(status, message \\ nil) do
|
||||||
|
put_stat(:state, status)
|
||||||
|
put_stat(:message, message)
|
||||||
|
end
|
||||||
|
|
||||||
|
defp fault_rate do
|
||||||
|
with failures_count when is_integer(failures_count) <- failures_count() do
|
||||||
|
failures_count / Enum.max([get_stat(:affected_count, 0), 1])
|
||||||
|
else
|
||||||
|
_ -> :error
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
defp records_per_second do
|
||||||
|
get_stat(:iteration_processed_count, 0) / Enum.max([running_time(), 1])
|
||||||
|
end
|
||||||
|
|
||||||
|
defp running_time do
|
||||||
|
NaiveDateTime.diff(
|
||||||
|
NaiveDateTime.utc_now(),
|
||||||
|
get_stat(:started_at, NaiveDateTime.utc_now())
|
||||||
|
)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
117
lib/pleroma/migrators/support/base_migrator_state.ex
Normal file
117
lib/pleroma/migrators/support/base_migrator_state.ex
Normal file
|
@ -0,0 +1,117 @@
|
||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2021 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
defmodule Pleroma.Migrators.Support.BaseMigratorState do
|
||||||
|
@moduledoc """
|
||||||
|
Base background migrator state functionality.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@callback data_migration() :: Pleroma.DataMigration.t()
|
||||||
|
|
||||||
|
defmacro __using__(_opts) do
|
||||||
|
quote do
|
||||||
|
use Agent
|
||||||
|
|
||||||
|
alias Pleroma.DataMigration
|
||||||
|
|
||||||
|
@behaviour Pleroma.Migrators.Support.BaseMigratorState
|
||||||
|
@reg_name {:global, __MODULE__}
|
||||||
|
|
||||||
|
def start_link(_) do
|
||||||
|
Agent.start_link(fn -> load_state_from_db() end, name: @reg_name)
|
||||||
|
end
|
||||||
|
|
||||||
|
def data_migration, do: raise("data_migration/0 is not implemented")
|
||||||
|
defoverridable data_migration: 0
|
||||||
|
|
||||||
|
defp load_state_from_db do
|
||||||
|
data_migration = data_migration()
|
||||||
|
|
||||||
|
data =
|
||||||
|
if data_migration do
|
||||||
|
Map.new(data_migration.data, fn {k, v} -> {String.to_atom(k), v} end)
|
||||||
|
else
|
||||||
|
%{}
|
||||||
|
end
|
||||||
|
|
||||||
|
%{
|
||||||
|
data_migration_id: data_migration && data_migration.id,
|
||||||
|
data: data
|
||||||
|
}
|
||||||
|
end
|
||||||
|
|
||||||
|
def persist_to_db do
|
||||||
|
%{data_migration_id: data_migration_id, data: data} = state()
|
||||||
|
|
||||||
|
if data_migration_id do
|
||||||
|
DataMigration.update_one_by_id(data_migration_id, data: data)
|
||||||
|
else
|
||||||
|
{:error, :nil_data_migration_id}
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def reset do
|
||||||
|
%{data_migration_id: data_migration_id} = state()
|
||||||
|
|
||||||
|
with false <- is_nil(data_migration_id),
|
||||||
|
:ok <-
|
||||||
|
DataMigration.update_one_by_id(data_migration_id,
|
||||||
|
state: :pending,
|
||||||
|
data: %{}
|
||||||
|
) do
|
||||||
|
reinit()
|
||||||
|
else
|
||||||
|
true -> {:error, :nil_data_migration_id}
|
||||||
|
e -> e
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def reinit do
|
||||||
|
Agent.update(@reg_name, fn _state -> load_state_from_db() end)
|
||||||
|
end
|
||||||
|
|
||||||
|
def state do
|
||||||
|
Agent.get(@reg_name, & &1)
|
||||||
|
end
|
||||||
|
|
||||||
|
def get_data_key(key, default \\ nil) do
|
||||||
|
get_in(state(), [:data, key]) || default
|
||||||
|
end
|
||||||
|
|
||||||
|
def put_data_key(key, value) do
|
||||||
|
_ = persist_non_data_change(key, value)
|
||||||
|
|
||||||
|
Agent.update(@reg_name, fn state ->
|
||||||
|
put_in(state, [:data, key], value)
|
||||||
|
end)
|
||||||
|
end
|
||||||
|
|
||||||
|
def increment_data_key(key, increment \\ 1) do
|
||||||
|
Agent.update(@reg_name, fn state ->
|
||||||
|
initial_value = get_in(state, [:data, key]) || 0
|
||||||
|
updated_value = initial_value + increment
|
||||||
|
put_in(state, [:data, key], updated_value)
|
||||||
|
end)
|
||||||
|
end
|
||||||
|
|
||||||
|
defp persist_non_data_change(:state, value) do
|
||||||
|
with true <- get_data_key(:state) != value,
|
||||||
|
true <- value in Pleroma.DataMigration.State.__valid_values__(),
|
||||||
|
%{data_migration_id: data_migration_id} when not is_nil(data_migration_id) <-
|
||||||
|
state() do
|
||||||
|
DataMigration.update_one_by_id(data_migration_id, state: value)
|
||||||
|
else
|
||||||
|
false -> :ok
|
||||||
|
_ -> {:error, :nil_data_migration_id}
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
defp persist_non_data_change(_, _) do
|
||||||
|
nil
|
||||||
|
end
|
||||||
|
|
||||||
|
def data_migration_id, do: Map.get(state(), :data_migration_id)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
|
@ -10,6 +10,7 @@ defmodule Pleroma.Object do
|
||||||
|
|
||||||
alias Pleroma.Activity
|
alias Pleroma.Activity
|
||||||
alias Pleroma.Config
|
alias Pleroma.Config
|
||||||
|
alias Pleroma.Hashtag
|
||||||
alias Pleroma.Object
|
alias Pleroma.Object
|
||||||
alias Pleroma.Object.Fetcher
|
alias Pleroma.Object.Fetcher
|
||||||
alias Pleroma.ObjectTombstone
|
alias Pleroma.ObjectTombstone
|
||||||
|
@ -28,6 +29,8 @@ defmodule Pleroma.Object do
|
||||||
schema "objects" do
|
schema "objects" do
|
||||||
field(:data, :map)
|
field(:data, :map)
|
||||||
|
|
||||||
|
many_to_many(:hashtags, Hashtag, join_through: "hashtags_objects", on_replace: :delete)
|
||||||
|
|
||||||
timestamps()
|
timestamps()
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -49,7 +52,8 @@ def with_joined_activity(query, activity_type \\ "Create", join_type \\ :inner)
|
||||||
end
|
end
|
||||||
|
|
||||||
def create(data) do
|
def create(data) do
|
||||||
Object.change(%Object{}, %{data: data})
|
%Object{}
|
||||||
|
|> Object.change(%{data: data})
|
||||||
|> Repo.insert()
|
|> Repo.insert()
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -58,8 +62,41 @@ def change(struct, params \\ %{}) do
|
||||||
|> cast(params, [:data])
|
|> cast(params, [:data])
|
||||||
|> validate_required([:data])
|
|> validate_required([:data])
|
||||||
|> unique_constraint(:ap_id, name: :objects_unique_apid_index)
|
|> unique_constraint(:ap_id, name: :objects_unique_apid_index)
|
||||||
|
# Expecting `maybe_handle_hashtags_change/1` to run last:
|
||||||
|
|> maybe_handle_hashtags_change(struct)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
# Note: not checking activity type (assuming non-legacy objects are associated with Create act.)
|
||||||
|
defp maybe_handle_hashtags_change(changeset, struct) do
|
||||||
|
with %Ecto.Changeset{valid?: true} <- changeset,
|
||||||
|
data_hashtags_change = get_change(changeset, :data),
|
||||||
|
{_, true} <- {:changed, hashtags_changed?(struct, data_hashtags_change)},
|
||||||
|
{:ok, hashtag_records} <-
|
||||||
|
data_hashtags_change
|
||||||
|
|> object_data_hashtags()
|
||||||
|
|> Hashtag.get_or_create_by_names() do
|
||||||
|
put_assoc(changeset, :hashtags, hashtag_records)
|
||||||
|
else
|
||||||
|
%{valid?: false} ->
|
||||||
|
changeset
|
||||||
|
|
||||||
|
{:changed, false} ->
|
||||||
|
changeset
|
||||||
|
|
||||||
|
{:error, _} ->
|
||||||
|
validate_change(changeset, :data, fn _, _ ->
|
||||||
|
[data: "error referencing hashtags"]
|
||||||
|
end)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
defp hashtags_changed?(%Object{} = struct, %{"tag" => _} = data) do
|
||||||
|
Enum.sort(embedded_hashtags(struct)) !=
|
||||||
|
Enum.sort(object_data_hashtags(data))
|
||||||
|
end
|
||||||
|
|
||||||
|
defp hashtags_changed?(_, _), do: false
|
||||||
|
|
||||||
def get_by_id(nil), do: nil
|
def get_by_id(nil), do: nil
|
||||||
def get_by_id(id), do: Repo.get(Object, id)
|
def get_by_id(id), do: Repo.get(Object, id)
|
||||||
|
|
||||||
|
@ -187,9 +224,13 @@ def make_tombstone(%Object{data: %{"id" => id, "type" => type}}, deleted \\ Date
|
||||||
def swap_object_with_tombstone(object) do
|
def swap_object_with_tombstone(object) do
|
||||||
tombstone = make_tombstone(object)
|
tombstone = make_tombstone(object)
|
||||||
|
|
||||||
|
with {:ok, object} <-
|
||||||
object
|
object
|
||||||
|> Object.change(%{data: tombstone})
|
|> Object.change(%{data: tombstone})
|
||||||
|> Repo.update()
|
|> Repo.update() do
|
||||||
|
Hashtag.unlink(object)
|
||||||
|
{:ok, object}
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def delete(%Object{data: %{"id" => id}} = object) do
|
def delete(%Object{data: %{"id" => id}} = object) do
|
||||||
|
@ -349,4 +390,39 @@ def replies(object, opts \\ []) do
|
||||||
|
|
||||||
def self_replies(object, opts \\ []),
|
def self_replies(object, opts \\ []),
|
||||||
do: replies(object, Keyword.put(opts, :self_only, true))
|
do: replies(object, Keyword.put(opts, :self_only, true))
|
||||||
|
|
||||||
|
def tags(%Object{data: %{"tag" => tags}}) when is_list(tags), do: tags
|
||||||
|
|
||||||
|
def tags(_), do: []
|
||||||
|
|
||||||
|
def hashtags(%Object{} = object) do
|
||||||
|
# Note: always using embedded hashtags regardless whether they are migrated to hashtags table
|
||||||
|
# (embedded hashtags stay in sync anyways, and we avoid extra joins and preload hassle)
|
||||||
|
embedded_hashtags(object)
|
||||||
|
end
|
||||||
|
|
||||||
|
def embedded_hashtags(%Object{data: data}) do
|
||||||
|
object_data_hashtags(data)
|
||||||
|
end
|
||||||
|
|
||||||
|
def embedded_hashtags(_), do: []
|
||||||
|
|
||||||
|
def object_data_hashtags(%{"tag" => tags}) when is_list(tags) do
|
||||||
|
tags
|
||||||
|
|> Enum.filter(fn
|
||||||
|
%{"type" => "Hashtag"} = data -> Map.has_key?(data, "name")
|
||||||
|
plain_text when is_bitstring(plain_text) -> true
|
||||||
|
_ -> false
|
||||||
|
end)
|
||||||
|
|> Enum.map(fn
|
||||||
|
%{"name" => "#" <> hashtag} -> String.downcase(hashtag)
|
||||||
|
%{"name" => hashtag} -> String.downcase(hashtag)
|
||||||
|
hashtag when is_bitstring(hashtag) -> String.downcase(hashtag)
|
||||||
|
end)
|
||||||
|
|> Enum.uniq()
|
||||||
|
# Note: "" elements (plain text) might occur in `data.tag` for incoming objects
|
||||||
|
|> Enum.filter(&(&1 not in [nil, ""]))
|
||||||
|
end
|
||||||
|
|
||||||
|
def object_data_hashtags(_), do: []
|
||||||
end
|
end
|
||||||
|
|
|
@ -71,6 +71,14 @@ def contain_origin_from_id(id, %{"id" => other_id} = _params) when is_binary(oth
|
||||||
compare_uris(id_uri, other_uri)
|
compare_uris(id_uri, other_uri)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
# Mastodon pin activities don't have an id, so we check the object field, which will be pinned.
|
||||||
|
def contain_origin_from_id(id, %{"object" => object}) when is_binary(object) do
|
||||||
|
id_uri = URI.parse(id)
|
||||||
|
object_uri = URI.parse(object)
|
||||||
|
|
||||||
|
compare_uris(id_uri, object_uri)
|
||||||
|
end
|
||||||
|
|
||||||
def contain_origin_from_id(_id, _data), do: :error
|
def contain_origin_from_id(_id, _data), do: :error
|
||||||
|
|
||||||
def contain_child(%{"object" => %{"id" => id, "attributedTo" => _} = object}),
|
def contain_child(%{"object" => %{"id" => id, "attributedTo" => _} = object}),
|
||||||
|
|
|
@ -93,6 +93,7 @@ defp cast_params(params) do
|
||||||
max_id: :string,
|
max_id: :string,
|
||||||
offset: :integer,
|
offset: :integer,
|
||||||
limit: :integer,
|
limit: :integer,
|
||||||
|
skip_extra_order: :boolean,
|
||||||
skip_order: :boolean
|
skip_order: :boolean
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -114,6 +115,8 @@ defp restrict(query, :max_id, %{max_id: max_id}, table_binding) do
|
||||||
|
|
||||||
defp restrict(query, :order, %{skip_order: true}, _), do: query
|
defp restrict(query, :order, %{skip_order: true}, _), do: query
|
||||||
|
|
||||||
|
defp restrict(%{order_bys: [_ | _]} = query, :order, %{skip_extra_order: true}, _), do: query
|
||||||
|
|
||||||
defp restrict(query, :order, %{min_id: _}, table_binding) do
|
defp restrict(query, :order, %{min_id: _}, table_binding) do
|
||||||
order_by(
|
order_by(
|
||||||
query,
|
query,
|
||||||
|
|
|
@ -8,6 +8,8 @@ defmodule Pleroma.Repo do
|
||||||
adapter: Ecto.Adapters.Postgres,
|
adapter: Ecto.Adapters.Postgres,
|
||||||
migration_timestamps: [type: :naive_datetime_usec]
|
migration_timestamps: [type: :naive_datetime_usec]
|
||||||
|
|
||||||
|
use Ecto.Explain
|
||||||
|
|
||||||
import Ecto.Query
|
import Ecto.Query
|
||||||
require Logger
|
require Logger
|
||||||
|
|
||||||
|
@ -63,8 +65,8 @@ def get_assoc(resource, association) do
|
||||||
iex> Pleroma.Repo.chunk_stream(Pleroma.Activity.Queries.by_actor(ap_id), 500, :batches)
|
iex> Pleroma.Repo.chunk_stream(Pleroma.Activity.Queries.by_actor(ap_id), 500, :batches)
|
||||||
"""
|
"""
|
||||||
@spec chunk_stream(Ecto.Query.t(), integer(), atom()) :: Enumerable.t()
|
@spec chunk_stream(Ecto.Query.t(), integer(), atom()) :: Enumerable.t()
|
||||||
def chunk_stream(query, chunk_size, returns_as \\ :one) do
|
def chunk_stream(query, chunk_size, returns_as \\ :one, query_options \\ []) do
|
||||||
# We don't actually need start and end funcitons of resource streaming,
|
# We don't actually need start and end functions of resource streaming,
|
||||||
# but it seems to be the only way to not fetch records one-by-one and
|
# but it seems to be the only way to not fetch records one-by-one and
|
||||||
# have individual records be the elements of the stream, instead of
|
# have individual records be the elements of the stream, instead of
|
||||||
# lists of records
|
# lists of records
|
||||||
|
@ -76,7 +78,7 @@ def chunk_stream(query, chunk_size, returns_as \\ :one) do
|
||||||
|> order_by(asc: :id)
|
|> order_by(asc: :id)
|
||||||
|> where([r], r.id > ^last_id)
|
|> where([r], r.id > ^last_id)
|
||||||
|> limit(^chunk_size)
|
|> limit(^chunk_size)
|
||||||
|> all()
|
|> all(query_options)
|
||||||
|> case do
|
|> case do
|
||||||
[] ->
|
[] ->
|
||||||
{:halt, last_id}
|
{:halt, last_id}
|
||||||
|
|
|
@ -23,6 +23,9 @@ defmodule Pleroma.Upload do
|
||||||
is once created permanent and changing it (especially in uploaders) is probably a bad idea!
|
is once created permanent and changing it (especially in uploaders) is probably a bad idea!
|
||||||
* `:tempfile` - path to the temporary file. Prefer in-place changes on the file rather than changing the
|
* `:tempfile` - path to the temporary file. Prefer in-place changes on the file rather than changing the
|
||||||
path as the temporary file is also tracked by `Plug.Upload{}` and automatically deleted once the request is over.
|
path as the temporary file is also tracked by `Plug.Upload{}` and automatically deleted once the request is over.
|
||||||
|
* `:width` - width of the media in pixels
|
||||||
|
* `:height` - height of the media in pixels
|
||||||
|
* `:blurhash` - string hash of the image encoded with the blurhash algorithm (https://blurha.sh/)
|
||||||
|
|
||||||
Related behaviors:
|
Related behaviors:
|
||||||
|
|
||||||
|
@ -32,6 +35,7 @@ defmodule Pleroma.Upload do
|
||||||
"""
|
"""
|
||||||
alias Ecto.UUID
|
alias Ecto.UUID
|
||||||
alias Pleroma.Config
|
alias Pleroma.Config
|
||||||
|
alias Pleroma.Maps
|
||||||
require Logger
|
require Logger
|
||||||
|
|
||||||
@type source ::
|
@type source ::
|
||||||
|
@ -53,9 +57,12 @@ defmodule Pleroma.Upload do
|
||||||
name: String.t(),
|
name: String.t(),
|
||||||
tempfile: String.t(),
|
tempfile: String.t(),
|
||||||
content_type: String.t(),
|
content_type: String.t(),
|
||||||
|
width: integer(),
|
||||||
|
height: integer(),
|
||||||
|
blurhash: String.t(),
|
||||||
path: String.t()
|
path: String.t()
|
||||||
}
|
}
|
||||||
defstruct [:id, :name, :tempfile, :content_type, :path]
|
defstruct [:id, :name, :tempfile, :content_type, :width, :height, :blurhash, :path]
|
||||||
|
|
||||||
defp get_description(opts, upload) do
|
defp get_description(opts, upload) do
|
||||||
case {opts[:description], Pleroma.Config.get([Pleroma.Upload, :default_description])} do
|
case {opts[:description], Pleroma.Config.get([Pleroma.Upload, :default_description])} do
|
||||||
|
@ -89,9 +96,12 @@ def store(upload, opts \\ []) do
|
||||||
"mediaType" => upload.content_type,
|
"mediaType" => upload.content_type,
|
||||||
"href" => url_from_spec(upload, opts.base_url, url_spec)
|
"href" => url_from_spec(upload, opts.base_url, url_spec)
|
||||||
}
|
}
|
||||||
|
|> Maps.put_if_present("width", upload.width)
|
||||||
|
|> Maps.put_if_present("height", upload.height)
|
||||||
],
|
],
|
||||||
"name" => description
|
"name" => description
|
||||||
}}
|
}
|
||||||
|
|> Maps.put_if_present("blurhash", upload.blurhash)}
|
||||||
else
|
else
|
||||||
{:description_limit, _} ->
|
{:description_limit, _} ->
|
||||||
{:error, :description_too_long}
|
{:error, :description_too_long}
|
||||||
|
|
45
lib/pleroma/upload/filter/analyze_metadata.ex
Normal file
45
lib/pleroma/upload/filter/analyze_metadata.ex
Normal file
|
@ -0,0 +1,45 @@
|
||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2021 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
defmodule Pleroma.Upload.Filter.AnalyzeMetadata do
|
||||||
|
@moduledoc """
|
||||||
|
Extracts metadata about the upload, such as width/height
|
||||||
|
"""
|
||||||
|
require Logger
|
||||||
|
|
||||||
|
@behaviour Pleroma.Upload.Filter
|
||||||
|
|
||||||
|
@spec filter(Pleroma.Upload.t()) ::
|
||||||
|
{:ok, :filtered, Pleroma.Upload.t()} | {:ok, :noop} | {:error, String.t()}
|
||||||
|
def filter(%Pleroma.Upload{tempfile: file, content_type: "image" <> _} = upload) do
|
||||||
|
try do
|
||||||
|
image =
|
||||||
|
file
|
||||||
|
|> Mogrify.open()
|
||||||
|
|> Mogrify.verbose()
|
||||||
|
|
||||||
|
upload =
|
||||||
|
upload
|
||||||
|
|> Map.put(:width, image.width)
|
||||||
|
|> Map.put(:height, image.height)
|
||||||
|
|> Map.put(:blurhash, get_blurhash(file))
|
||||||
|
|
||||||
|
{:ok, :filtered, upload}
|
||||||
|
rescue
|
||||||
|
e in ErlangError ->
|
||||||
|
Logger.warn("#{__MODULE__}: #{inspect(e)}")
|
||||||
|
{:ok, :noop}
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def filter(_), do: {:ok, :noop}
|
||||||
|
|
||||||
|
defp get_blurhash(file) do
|
||||||
|
with {:ok, blurhash} <- :eblurhash.magick(file) do
|
||||||
|
blurhash
|
||||||
|
else
|
||||||
|
_ -> nil
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
|
@ -99,6 +99,7 @@ defmodule Pleroma.User do
|
||||||
field(:local, :boolean, default: true)
|
field(:local, :boolean, default: true)
|
||||||
field(:follower_address, :string)
|
field(:follower_address, :string)
|
||||||
field(:following_address, :string)
|
field(:following_address, :string)
|
||||||
|
field(:featured_address, :string)
|
||||||
field(:search_rank, :float, virtual: true)
|
field(:search_rank, :float, virtual: true)
|
||||||
field(:search_type, :integer, virtual: true)
|
field(:search_type, :integer, virtual: true)
|
||||||
field(:tags, {:array, :string}, default: [])
|
field(:tags, {:array, :string}, default: [])
|
||||||
|
@ -130,7 +131,6 @@ defmodule Pleroma.User do
|
||||||
field(:hide_followers, :boolean, default: false)
|
field(:hide_followers, :boolean, default: false)
|
||||||
field(:hide_follows, :boolean, default: false)
|
field(:hide_follows, :boolean, default: false)
|
||||||
field(:hide_favorites, :boolean, default: true)
|
field(:hide_favorites, :boolean, default: true)
|
||||||
field(:pinned_activities, {:array, :string}, default: [])
|
|
||||||
field(:email_notifications, :map, default: %{"digest" => false})
|
field(:email_notifications, :map, default: %{"digest" => false})
|
||||||
field(:mascot, :map, default: nil)
|
field(:mascot, :map, default: nil)
|
||||||
field(:emoji, :map, default: %{})
|
field(:emoji, :map, default: %{})
|
||||||
|
@ -148,6 +148,7 @@ defmodule Pleroma.User do
|
||||||
field(:accepts_chat_messages, :boolean, default: nil)
|
field(:accepts_chat_messages, :boolean, default: nil)
|
||||||
field(:last_active_at, :naive_datetime)
|
field(:last_active_at, :naive_datetime)
|
||||||
field(:disclose_client, :boolean, default: true)
|
field(:disclose_client, :boolean, default: true)
|
||||||
|
field(:pinned_objects, :map, default: %{})
|
||||||
|
|
||||||
embeds_one(
|
embeds_one(
|
||||||
:notification_settings,
|
:notification_settings,
|
||||||
|
@ -372,8 +373,10 @@ def banner_url(user, options \\ []) do
|
||||||
end
|
end
|
||||||
|
|
||||||
# Should probably be renamed or removed
|
# Should probably be renamed or removed
|
||||||
|
@spec ap_id(User.t()) :: String.t()
|
||||||
def ap_id(%User{nickname: nickname}), do: "#{Web.base_url()}/users/#{nickname}"
|
def ap_id(%User{nickname: nickname}), do: "#{Web.base_url()}/users/#{nickname}"
|
||||||
|
|
||||||
|
@spec ap_followers(User.t()) :: String.t()
|
||||||
def ap_followers(%User{follower_address: fa}) when is_binary(fa), do: fa
|
def ap_followers(%User{follower_address: fa}) when is_binary(fa), do: fa
|
||||||
def ap_followers(%User{} = user), do: "#{ap_id(user)}/followers"
|
def ap_followers(%User{} = user), do: "#{ap_id(user)}/followers"
|
||||||
|
|
||||||
|
@ -381,6 +384,11 @@ def ap_followers(%User{} = user), do: "#{ap_id(user)}/followers"
|
||||||
def ap_following(%User{following_address: fa}) when is_binary(fa), do: fa
|
def ap_following(%User{following_address: fa}) when is_binary(fa), do: fa
|
||||||
def ap_following(%User{} = user), do: "#{ap_id(user)}/following"
|
def ap_following(%User{} = user), do: "#{ap_id(user)}/following"
|
||||||
|
|
||||||
|
@spec ap_featured_collection(User.t()) :: String.t()
|
||||||
|
def ap_featured_collection(%User{featured_address: fa}) when is_binary(fa), do: fa
|
||||||
|
|
||||||
|
def ap_featured_collection(%User{} = user), do: "#{ap_id(user)}/collections/featured"
|
||||||
|
|
||||||
defp truncate_fields_param(params) do
|
defp truncate_fields_param(params) do
|
||||||
if Map.has_key?(params, :fields) do
|
if Map.has_key?(params, :fields) do
|
||||||
Map.put(params, :fields, Enum.map(params[:fields], &truncate_field/1))
|
Map.put(params, :fields, Enum.map(params[:fields], &truncate_field/1))
|
||||||
|
@ -443,6 +451,7 @@ def remote_user_changeset(struct \\ %User{local: false}, params) do
|
||||||
:uri,
|
:uri,
|
||||||
:follower_address,
|
:follower_address,
|
||||||
:following_address,
|
:following_address,
|
||||||
|
:featured_address,
|
||||||
:hide_followers,
|
:hide_followers,
|
||||||
:hide_follows,
|
:hide_follows,
|
||||||
:hide_followers_count,
|
:hide_followers_count,
|
||||||
|
@ -454,7 +463,8 @@ def remote_user_changeset(struct \\ %User{local: false}, params) do
|
||||||
:invisible,
|
:invisible,
|
||||||
:actor_type,
|
:actor_type,
|
||||||
:also_known_as,
|
:also_known_as,
|
||||||
:accepts_chat_messages
|
:accepts_chat_messages,
|
||||||
|
:pinned_objects
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
|> cast(params, [:name], empty_values: [])
|
|> cast(params, [:name], empty_values: [])
|
||||||
|
@ -686,7 +696,7 @@ def register_changeset_ldap(struct, params = %{password: password})
|
||||||
|> validate_format(:nickname, local_nickname_regex())
|
|> validate_format(:nickname, local_nickname_regex())
|
||||||
|> put_ap_id()
|
|> put_ap_id()
|
||||||
|> unique_constraint(:ap_id)
|
|> unique_constraint(:ap_id)
|
||||||
|> put_following_and_follower_address()
|
|> put_following_and_follower_and_featured_address()
|
||||||
end
|
end
|
||||||
|
|
||||||
def register_changeset(struct, params \\ %{}, opts \\ []) do
|
def register_changeset(struct, params \\ %{}, opts \\ []) do
|
||||||
|
@ -747,7 +757,7 @@ def register_changeset(struct, params \\ %{}, opts \\ []) do
|
||||||
|> put_password_hash
|
|> put_password_hash
|
||||||
|> put_ap_id()
|
|> put_ap_id()
|
||||||
|> unique_constraint(:ap_id)
|
|> unique_constraint(:ap_id)
|
||||||
|> put_following_and_follower_address()
|
|> put_following_and_follower_and_featured_address()
|
||||||
end
|
end
|
||||||
|
|
||||||
def maybe_validate_required_email(changeset, true), do: changeset
|
def maybe_validate_required_email(changeset, true), do: changeset
|
||||||
|
@ -765,11 +775,16 @@ defp put_ap_id(changeset) do
|
||||||
put_change(changeset, :ap_id, ap_id)
|
put_change(changeset, :ap_id, ap_id)
|
||||||
end
|
end
|
||||||
|
|
||||||
defp put_following_and_follower_address(changeset) do
|
defp put_following_and_follower_and_featured_address(changeset) do
|
||||||
followers = ap_followers(%User{nickname: get_field(changeset, :nickname)})
|
user = %User{nickname: get_field(changeset, :nickname)}
|
||||||
|
followers = ap_followers(user)
|
||||||
|
following = ap_following(user)
|
||||||
|
featured = ap_featured_collection(user)
|
||||||
|
|
||||||
changeset
|
changeset
|
||||||
|> put_change(:follower_address, followers)
|
|> put_change(:follower_address, followers)
|
||||||
|
|> put_change(:following_address, following)
|
||||||
|
|> put_change(:featured_address, featured)
|
||||||
end
|
end
|
||||||
|
|
||||||
defp autofollow_users(user) do
|
defp autofollow_users(user) do
|
||||||
|
@ -2255,13 +2270,6 @@ def update_background(user, background) do
|
||||||
|> update_and_set_cache()
|
|> update_and_set_cache()
|
||||||
end
|
end
|
||||||
|
|
||||||
def roles(%{is_moderator: is_moderator, is_admin: is_admin}) do
|
|
||||||
%{
|
|
||||||
admin: is_admin,
|
|
||||||
moderator: is_moderator
|
|
||||||
}
|
|
||||||
end
|
|
||||||
|
|
||||||
def validate_fields(changeset, remote? \\ false) do
|
def validate_fields(changeset, remote? \\ false) do
|
||||||
limit_name = if remote?, do: :max_remote_account_fields, else: :max_account_fields
|
limit_name = if remote?, do: :max_remote_account_fields, else: :max_account_fields
|
||||||
limit = Config.get([:instance, limit_name], 0)
|
limit = Config.get([:instance, limit_name], 0)
|
||||||
|
@ -2350,45 +2358,35 @@ def approval_changeset(user, set_approval: approved?) do
|
||||||
cast(user, %{is_approved: approved?}, [:is_approved])
|
cast(user, %{is_approved: approved?}, [:is_approved])
|
||||||
end
|
end
|
||||||
|
|
||||||
def add_pinnned_activity(user, %Pleroma.Activity{id: id}) do
|
@spec add_pinned_object_id(User.t(), String.t()) :: {:ok, User.t()} | {:error, term()}
|
||||||
if id not in user.pinned_activities do
|
def add_pinned_object_id(%User{} = user, object_id) do
|
||||||
max_pinned_statuses = Config.get([:instance, :max_pinned_statuses], 0)
|
if !user.pinned_objects[object_id] do
|
||||||
params = %{pinned_activities: user.pinned_activities ++ [id]}
|
params = %{pinned_objects: Map.put(user.pinned_objects, object_id, NaiveDateTime.utc_now())}
|
||||||
|
|
||||||
# if pinned activity was scheduled for deletion, we remove job
|
|
||||||
if expiration = Pleroma.Workers.PurgeExpiredActivity.get_expiration(id) do
|
|
||||||
Oban.cancel_job(expiration.id)
|
|
||||||
end
|
|
||||||
|
|
||||||
user
|
user
|
||||||
|> cast(params, [:pinned_activities])
|
|> cast(params, [:pinned_objects])
|
||||||
|> validate_length(:pinned_activities,
|
|> validate_change(:pinned_objects, fn :pinned_objects, pinned_objects ->
|
||||||
max: max_pinned_statuses,
|
max_pinned_statuses = Config.get([:instance, :max_pinned_statuses], 0)
|
||||||
message: "You have already pinned the maximum number of statuses"
|
|
||||||
)
|
if Enum.count(pinned_objects) <= max_pinned_statuses do
|
||||||
|
[]
|
||||||
|
else
|
||||||
|
[pinned_objects: "You have already pinned the maximum number of statuses"]
|
||||||
|
end
|
||||||
|
end)
|
||||||
else
|
else
|
||||||
change(user)
|
change(user)
|
||||||
end
|
end
|
||||||
|> update_and_set_cache()
|
|> update_and_set_cache()
|
||||||
end
|
end
|
||||||
|
|
||||||
def remove_pinnned_activity(user, %Pleroma.Activity{id: id, data: data}) do
|
@spec remove_pinned_object_id(User.t(), String.t()) :: {:ok, t()} | {:error, term()}
|
||||||
params = %{pinned_activities: List.delete(user.pinned_activities, id)}
|
def remove_pinned_object_id(%User{} = user, object_id) do
|
||||||
|
|
||||||
# if pinned activity was scheduled for deletion, we reschedule it for deletion
|
|
||||||
if data["expires_at"] do
|
|
||||||
# MRF.ActivityExpirationPolicy used UTC timestamps for expires_at in original implementation
|
|
||||||
{:ok, expires_at} =
|
|
||||||
data["expires_at"] |> Pleroma.EctoType.ActivityPub.ObjectValidators.DateTime.cast()
|
|
||||||
|
|
||||||
Pleroma.Workers.PurgeExpiredActivity.enqueue(%{
|
|
||||||
activity_id: id,
|
|
||||||
expires_at: expires_at
|
|
||||||
})
|
|
||||||
end
|
|
||||||
|
|
||||||
user
|
user
|
||||||
|> cast(params, [:pinned_activities])
|
|> cast(
|
||||||
|
%{pinned_objects: Map.delete(user.pinned_objects, object_id)},
|
||||||
|
[:pinned_objects]
|
||||||
|
)
|
||||||
|> update_and_set_cache()
|
|> update_and_set_cache()
|
||||||
end
|
end
|
||||||
|
|
||||||
|
|
|
@ -11,6 +11,8 @@ defmodule Pleroma.Utils do
|
||||||
eperm epipe erange erofs espipe esrch estale etxtbsy exdev
|
eperm epipe erange erofs espipe esrch estale etxtbsy exdev
|
||||||
)a
|
)a
|
||||||
|
|
||||||
|
@repo_timeout Pleroma.Config.get([Pleroma.Repo, :timeout], 15_000)
|
||||||
|
|
||||||
def compile_dir(dir) when is_binary(dir) do
|
def compile_dir(dir) when is_binary(dir) do
|
||||||
dir
|
dir
|
||||||
|> File.ls!()
|
|> File.ls!()
|
||||||
|
@ -63,4 +65,21 @@ def posix_error_message(code) when code in @posix_error_codes do
|
||||||
end
|
end
|
||||||
|
|
||||||
def posix_error_message(_), do: ""
|
def posix_error_message(_), do: ""
|
||||||
|
|
||||||
|
@doc """
|
||||||
|
Returns [timeout: integer] suitable for passing as an option to Repo functions.
|
||||||
|
|
||||||
|
This function detects if the execution was triggered from IEx shell, Mix task, or
|
||||||
|
./bin/pleroma_ctl and sets the timeout to :infinity, else returns the default timeout value.
|
||||||
|
"""
|
||||||
|
@spec query_timeout() :: [timeout: integer]
|
||||||
|
def query_timeout do
|
||||||
|
{parent, _, _, _} = Process.info(self(), :current_stacktrace) |> elem(1) |> Enum.fetch!(2)
|
||||||
|
|
||||||
|
cond do
|
||||||
|
parent |> to_string |> String.starts_with?("Elixir.Mix.Task") -> [timeout: :infinity]
|
||||||
|
parent == :erl_eval -> [timeout: :infinity]
|
||||||
|
true -> [timeout: @repo_timeout]
|
||||||
|
end
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -10,6 +10,7 @@ defmodule Pleroma.Web.ActivityPub.ActivityPub do
|
||||||
alias Pleroma.Conversation
|
alias Pleroma.Conversation
|
||||||
alias Pleroma.Conversation.Participation
|
alias Pleroma.Conversation.Participation
|
||||||
alias Pleroma.Filter
|
alias Pleroma.Filter
|
||||||
|
alias Pleroma.Hashtag
|
||||||
alias Pleroma.Maps
|
alias Pleroma.Maps
|
||||||
alias Pleroma.Notification
|
alias Pleroma.Notification
|
||||||
alias Pleroma.Object
|
alias Pleroma.Object
|
||||||
|
@ -465,6 +466,23 @@ def fetch_latest_direct_activity_id_for_context(context, opts \\ %{}) do
|
||||||
|> Repo.one()
|
|> Repo.one()
|
||||||
end
|
end
|
||||||
|
|
||||||
|
defp fetch_paginated_optimized(query, opts, pagination) do
|
||||||
|
# Note: tag-filtering funcs may apply "ORDER BY objects.id DESC",
|
||||||
|
# and extra sorting on "activities.id DESC NULLS LAST" would worse the query plan
|
||||||
|
opts = Map.put(opts, :skip_extra_order, true)
|
||||||
|
|
||||||
|
Pagination.fetch_paginated(query, opts, pagination)
|
||||||
|
end
|
||||||
|
|
||||||
|
def fetch_activities(recipients, opts \\ %{}, pagination \\ :keyset) do
|
||||||
|
list_memberships = Pleroma.List.memberships(opts[:user])
|
||||||
|
|
||||||
|
fetch_activities_query(recipients ++ list_memberships, opts)
|
||||||
|
|> fetch_paginated_optimized(opts, pagination)
|
||||||
|
|> Enum.reverse()
|
||||||
|
|> maybe_update_cc(list_memberships, opts[:user])
|
||||||
|
end
|
||||||
|
|
||||||
@spec fetch_public_or_unlisted_activities(map(), Pagination.type()) :: [Activity.t()]
|
@spec fetch_public_or_unlisted_activities(map(), Pagination.type()) :: [Activity.t()]
|
||||||
def fetch_public_or_unlisted_activities(opts \\ %{}, pagination \\ :keyset) do
|
def fetch_public_or_unlisted_activities(opts \\ %{}, pagination \\ :keyset) do
|
||||||
opts = Map.delete(opts, :user)
|
opts = Map.delete(opts, :user)
|
||||||
|
@ -472,7 +490,7 @@ def fetch_public_or_unlisted_activities(opts \\ %{}, pagination \\ :keyset) do
|
||||||
[Constants.as_public()]
|
[Constants.as_public()]
|
||||||
|> fetch_activities_query(opts)
|
|> fetch_activities_query(opts)
|
||||||
|> restrict_unlisted(opts)
|
|> restrict_unlisted(opts)
|
||||||
|> Pagination.fetch_paginated(opts, pagination)
|
|> fetch_paginated_optimized(opts, pagination)
|
||||||
end
|
end
|
||||||
|
|
||||||
@spec fetch_public_activities(map(), Pagination.type()) :: [Activity.t()]
|
@spec fetch_public_activities(map(), Pagination.type()) :: [Activity.t()]
|
||||||
|
@ -612,7 +630,7 @@ defp fetch_activities_for_user(user, reading_user, params) do
|
||||||
|> Map.put(:type, ["Create", "Announce"])
|
|> Map.put(:type, ["Create", "Announce"])
|
||||||
|> Map.put(:user, reading_user)
|
|> Map.put(:user, reading_user)
|
||||||
|> Map.put(:actor_id, user.ap_id)
|
|> Map.put(:actor_id, user.ap_id)
|
||||||
|> Map.put(:pinned_activity_ids, user.pinned_activities)
|
|> Map.put(:pinned_object_ids, Map.keys(user.pinned_objects))
|
||||||
|
|
||||||
params =
|
params =
|
||||||
if User.blocks?(reading_user, user) do
|
if User.blocks?(reading_user, user) do
|
||||||
|
@ -693,52 +711,144 @@ defp restrict_since(query, %{since_id: since_id}) do
|
||||||
|
|
||||||
defp restrict_since(query, _), do: query
|
defp restrict_since(query, _), do: query
|
||||||
|
|
||||||
defp restrict_tag_reject(_query, %{tag_reject: _tag_reject, skip_preload: true}) do
|
defp restrict_embedded_tag_all(_query, %{tag_all: _tag_all, skip_preload: true}) do
|
||||||
raise "Can't use the child object without preloading!"
|
raise_on_missing_preload()
|
||||||
end
|
end
|
||||||
|
|
||||||
defp restrict_tag_reject(query, %{tag_reject: [_ | _] = tag_reject}) do
|
defp restrict_embedded_tag_all(query, %{tag_all: [_ | _] = tag_all}) do
|
||||||
from(
|
|
||||||
[_activity, object] in query,
|
|
||||||
where: fragment("not (?)->'tag' \\?| (?)", object.data, ^tag_reject)
|
|
||||||
)
|
|
||||||
end
|
|
||||||
|
|
||||||
defp restrict_tag_reject(query, _), do: query
|
|
||||||
|
|
||||||
defp restrict_tag_all(_query, %{tag_all: _tag_all, skip_preload: true}) do
|
|
||||||
raise "Can't use the child object without preloading!"
|
|
||||||
end
|
|
||||||
|
|
||||||
defp restrict_tag_all(query, %{tag_all: [_ | _] = tag_all}) do
|
|
||||||
from(
|
from(
|
||||||
[_activity, object] in query,
|
[_activity, object] in query,
|
||||||
where: fragment("(?)->'tag' \\?& (?)", object.data, ^tag_all)
|
where: fragment("(?)->'tag' \\?& (?)", object.data, ^tag_all)
|
||||||
)
|
)
|
||||||
end
|
end
|
||||||
|
|
||||||
defp restrict_tag_all(query, _), do: query
|
defp restrict_embedded_tag_all(query, %{tag_all: tag}) when is_binary(tag) do
|
||||||
|
restrict_embedded_tag_any(query, %{tag: tag})
|
||||||
|
end
|
||||||
|
|
||||||
defp restrict_tag(_query, %{tag: _tag, skip_preload: true}) do
|
defp restrict_embedded_tag_all(query, _), do: query
|
||||||
|
|
||||||
|
defp restrict_embedded_tag_any(_query, %{tag: _tag, skip_preload: true}) do
|
||||||
|
raise_on_missing_preload()
|
||||||
|
end
|
||||||
|
|
||||||
|
defp restrict_embedded_tag_any(query, %{tag: [_ | _] = tag_any}) do
|
||||||
|
from(
|
||||||
|
[_activity, object] in query,
|
||||||
|
where: fragment("(?)->'tag' \\?| (?)", object.data, ^tag_any)
|
||||||
|
)
|
||||||
|
end
|
||||||
|
|
||||||
|
defp restrict_embedded_tag_any(query, %{tag: tag}) when is_binary(tag) do
|
||||||
|
restrict_embedded_tag_any(query, %{tag: [tag]})
|
||||||
|
end
|
||||||
|
|
||||||
|
defp restrict_embedded_tag_any(query, _), do: query
|
||||||
|
|
||||||
|
defp restrict_embedded_tag_reject_any(_query, %{tag_reject: _tag_reject, skip_preload: true}) do
|
||||||
|
raise_on_missing_preload()
|
||||||
|
end
|
||||||
|
|
||||||
|
defp restrict_embedded_tag_reject_any(query, %{tag_reject: [_ | _] = tag_reject}) do
|
||||||
|
from(
|
||||||
|
[_activity, object] in query,
|
||||||
|
where: fragment("not (?)->'tag' \\?| (?)", object.data, ^tag_reject)
|
||||||
|
)
|
||||||
|
end
|
||||||
|
|
||||||
|
defp restrict_embedded_tag_reject_any(query, %{tag_reject: tag_reject})
|
||||||
|
when is_binary(tag_reject) do
|
||||||
|
restrict_embedded_tag_reject_any(query, %{tag_reject: [tag_reject]})
|
||||||
|
end
|
||||||
|
|
||||||
|
defp restrict_embedded_tag_reject_any(query, _), do: query
|
||||||
|
|
||||||
|
defp object_ids_query_for_tags(tags) do
|
||||||
|
from(hto in "hashtags_objects")
|
||||||
|
|> join(:inner, [hto], ht in Pleroma.Hashtag, on: hto.hashtag_id == ht.id)
|
||||||
|
|> where([hto, ht], ht.name in ^tags)
|
||||||
|
|> select([hto], hto.object_id)
|
||||||
|
|> distinct([hto], true)
|
||||||
|
end
|
||||||
|
|
||||||
|
defp restrict_hashtag_all(_query, %{tag_all: _tag, skip_preload: true}) do
|
||||||
|
raise_on_missing_preload()
|
||||||
|
end
|
||||||
|
|
||||||
|
defp restrict_hashtag_all(query, %{tag_all: [single_tag]}) do
|
||||||
|
restrict_hashtag_any(query, %{tag: single_tag})
|
||||||
|
end
|
||||||
|
|
||||||
|
defp restrict_hashtag_all(query, %{tag_all: [_ | _] = tags}) do
|
||||||
|
from(
|
||||||
|
[_activity, object] in query,
|
||||||
|
where:
|
||||||
|
fragment(
|
||||||
|
"""
|
||||||
|
(SELECT array_agg(hashtags.name) FROM hashtags JOIN hashtags_objects
|
||||||
|
ON hashtags_objects.hashtag_id = hashtags.id WHERE hashtags.name = ANY(?)
|
||||||
|
AND hashtags_objects.object_id = ?) @> ?
|
||||||
|
""",
|
||||||
|
^tags,
|
||||||
|
object.id,
|
||||||
|
^tags
|
||||||
|
)
|
||||||
|
)
|
||||||
|
end
|
||||||
|
|
||||||
|
defp restrict_hashtag_all(query, %{tag_all: tag}) when is_binary(tag) do
|
||||||
|
restrict_hashtag_all(query, %{tag_all: [tag]})
|
||||||
|
end
|
||||||
|
|
||||||
|
defp restrict_hashtag_all(query, _), do: query
|
||||||
|
|
||||||
|
defp restrict_hashtag_any(_query, %{tag: _tag, skip_preload: true}) do
|
||||||
|
raise_on_missing_preload()
|
||||||
|
end
|
||||||
|
|
||||||
|
defp restrict_hashtag_any(query, %{tag: [_ | _] = tags}) do
|
||||||
|
hashtag_ids =
|
||||||
|
from(ht in Hashtag, where: ht.name in ^tags, select: ht.id)
|
||||||
|
|> Repo.all()
|
||||||
|
|
||||||
|
# Note: NO extra ordering should be done on "activities.id desc nulls last" for optimal plan
|
||||||
|
from(
|
||||||
|
[_activity, object] in query,
|
||||||
|
join: hto in "hashtags_objects",
|
||||||
|
on: hto.object_id == object.id,
|
||||||
|
where: hto.hashtag_id in ^hashtag_ids,
|
||||||
|
distinct: [desc: object.id],
|
||||||
|
order_by: [desc: object.id]
|
||||||
|
)
|
||||||
|
end
|
||||||
|
|
||||||
|
defp restrict_hashtag_any(query, %{tag: tag}) when is_binary(tag) do
|
||||||
|
restrict_hashtag_any(query, %{tag: [tag]})
|
||||||
|
end
|
||||||
|
|
||||||
|
defp restrict_hashtag_any(query, _), do: query
|
||||||
|
|
||||||
|
defp restrict_hashtag_reject_any(_query, %{tag_reject: _tag_reject, skip_preload: true}) do
|
||||||
|
raise_on_missing_preload()
|
||||||
|
end
|
||||||
|
|
||||||
|
defp restrict_hashtag_reject_any(query, %{tag_reject: [_ | _] = tags_reject}) do
|
||||||
|
from(
|
||||||
|
[_activity, object] in query,
|
||||||
|
where: object.id not in subquery(object_ids_query_for_tags(tags_reject))
|
||||||
|
)
|
||||||
|
end
|
||||||
|
|
||||||
|
defp restrict_hashtag_reject_any(query, %{tag_reject: tag_reject}) when is_binary(tag_reject) do
|
||||||
|
restrict_hashtag_reject_any(query, %{tag_reject: [tag_reject]})
|
||||||
|
end
|
||||||
|
|
||||||
|
defp restrict_hashtag_reject_any(query, _), do: query
|
||||||
|
|
||||||
|
defp raise_on_missing_preload do
|
||||||
raise "Can't use the child object without preloading!"
|
raise "Can't use the child object without preloading!"
|
||||||
end
|
end
|
||||||
|
|
||||||
defp restrict_tag(query, %{tag: tag}) when is_list(tag) do
|
|
||||||
from(
|
|
||||||
[_activity, object] in query,
|
|
||||||
where: fragment("(?)->'tag' \\?| (?)", object.data, ^tag)
|
|
||||||
)
|
|
||||||
end
|
|
||||||
|
|
||||||
defp restrict_tag(query, %{tag: tag}) when is_binary(tag) do
|
|
||||||
from(
|
|
||||||
[_activity, object] in query,
|
|
||||||
where: fragment("(?)->'tag' \\? (?)", object.data, ^tag)
|
|
||||||
)
|
|
||||||
end
|
|
||||||
|
|
||||||
defp restrict_tag(query, _), do: query
|
|
||||||
|
|
||||||
defp restrict_recipients(query, [], _user), do: query
|
defp restrict_recipients(query, [], _user), do: query
|
||||||
|
|
||||||
defp restrict_recipients(query, recipients, nil) do
|
defp restrict_recipients(query, recipients, nil) do
|
||||||
|
@ -965,8 +1075,18 @@ defp restrict_unlisted(query, %{restrict_unlisted: true}) do
|
||||||
|
|
||||||
defp restrict_unlisted(query, _), do: query
|
defp restrict_unlisted(query, _), do: query
|
||||||
|
|
||||||
defp restrict_pinned(query, %{pinned: true, pinned_activity_ids: ids}) do
|
defp restrict_pinned(query, %{pinned: true, pinned_object_ids: ids}) do
|
||||||
from(activity in query, where: activity.id in ^ids)
|
from(
|
||||||
|
[activity, object: o] in query,
|
||||||
|
where:
|
||||||
|
fragment(
|
||||||
|
"(?)->>'type' = 'Create' and coalesce((?)->'object'->>'id', (?)->>'object') = any (?)",
|
||||||
|
activity.data,
|
||||||
|
activity.data,
|
||||||
|
activity.data,
|
||||||
|
^ids
|
||||||
|
)
|
||||||
|
)
|
||||||
end
|
end
|
||||||
|
|
||||||
defp restrict_pinned(query, _), do: query
|
defp restrict_pinned(query, _), do: query
|
||||||
|
@ -1098,6 +1218,26 @@ defp maybe_order(query, %{order: :asc}) do
|
||||||
|
|
||||||
defp maybe_order(query, _), do: query
|
defp maybe_order(query, _), do: query
|
||||||
|
|
||||||
|
defp normalize_fetch_activities_query_opts(opts) do
|
||||||
|
Enum.reduce([:tag, :tag_all, :tag_reject], opts, fn key, opts ->
|
||||||
|
case opts[key] do
|
||||||
|
value when is_bitstring(value) ->
|
||||||
|
Map.put(opts, key, Hashtag.normalize_name(value))
|
||||||
|
|
||||||
|
value when is_list(value) ->
|
||||||
|
normalized_value =
|
||||||
|
value
|
||||||
|
|> Enum.map(&Hashtag.normalize_name/1)
|
||||||
|
|> Enum.uniq()
|
||||||
|
|
||||||
|
Map.put(opts, key, normalized_value)
|
||||||
|
|
||||||
|
_ ->
|
||||||
|
opts
|
||||||
|
end
|
||||||
|
end)
|
||||||
|
end
|
||||||
|
|
||||||
defp fetch_activities_query_ap_ids_ops(opts) do
|
defp fetch_activities_query_ap_ids_ops(opts) do
|
||||||
source_user = opts[:muting_user]
|
source_user = opts[:muting_user]
|
||||||
ap_id_relationships = if source_user, do: [:mute, :reblog_mute], else: []
|
ap_id_relationships = if source_user, do: [:mute, :reblog_mute], else: []
|
||||||
|
@ -1121,6 +1261,8 @@ defp fetch_activities_query_ap_ids_ops(opts) do
|
||||||
end
|
end
|
||||||
|
|
||||||
def fetch_activities_query(recipients, opts \\ %{}) do
|
def fetch_activities_query(recipients, opts \\ %{}) do
|
||||||
|
opts = normalize_fetch_activities_query_opts(opts)
|
||||||
|
|
||||||
{restrict_blocked_opts, restrict_muted_opts, restrict_muted_reblogs_opts} =
|
{restrict_blocked_opts, restrict_muted_opts, restrict_muted_reblogs_opts} =
|
||||||
fetch_activities_query_ap_ids_ops(opts)
|
fetch_activities_query_ap_ids_ops(opts)
|
||||||
|
|
||||||
|
@ -1128,6 +1270,7 @@ def fetch_activities_query(recipients, opts \\ %{}) do
|
||||||
skip_thread_containment: Config.get([:instance, :skip_thread_containment])
|
skip_thread_containment: Config.get([:instance, :skip_thread_containment])
|
||||||
}
|
}
|
||||||
|
|
||||||
|
query =
|
||||||
Activity
|
Activity
|
||||||
|> maybe_preload_objects(opts)
|
|> maybe_preload_objects(opts)
|
||||||
|> maybe_preload_bookmarks(opts)
|
|> maybe_preload_bookmarks(opts)
|
||||||
|
@ -1136,9 +1279,6 @@ def fetch_activities_query(recipients, opts \\ %{}) do
|
||||||
|> maybe_order(opts)
|
|> maybe_order(opts)
|
||||||
|> restrict_recipients(recipients, opts[:user])
|
|> restrict_recipients(recipients, opts[:user])
|
||||||
|> restrict_replies(opts)
|
|> restrict_replies(opts)
|
||||||
|> restrict_tag(opts)
|
|
||||||
|> restrict_tag_reject(opts)
|
|
||||||
|> restrict_tag_all(opts)
|
|
||||||
|> restrict_since(opts)
|
|> restrict_since(opts)
|
||||||
|> restrict_local(opts)
|
|> restrict_local(opts)
|
||||||
|> restrict_remote(opts)
|
|> restrict_remote(opts)
|
||||||
|
@ -1163,15 +1303,18 @@ def fetch_activities_query(recipients, opts \\ %{}) do
|
||||||
|> exclude_chat_messages(opts)
|
|> exclude_chat_messages(opts)
|
||||||
|> exclude_invisible_actors(opts)
|
|> exclude_invisible_actors(opts)
|
||||||
|> exclude_visibility(opts)
|
|> exclude_visibility(opts)
|
||||||
|
|
||||||
|
if Config.feature_enabled?(:improved_hashtag_timeline) do
|
||||||
|
query
|
||||||
|
|> restrict_hashtag_any(opts)
|
||||||
|
|> restrict_hashtag_all(opts)
|
||||||
|
|> restrict_hashtag_reject_any(opts)
|
||||||
|
else
|
||||||
|
query
|
||||||
|
|> restrict_embedded_tag_any(opts)
|
||||||
|
|> restrict_embedded_tag_all(opts)
|
||||||
|
|> restrict_embedded_tag_reject_any(opts)
|
||||||
end
|
end
|
||||||
|
|
||||||
def fetch_activities(recipients, opts \\ %{}, pagination \\ :keyset) do
|
|
||||||
list_memberships = Pleroma.List.memberships(opts[:user])
|
|
||||||
|
|
||||||
fetch_activities_query(recipients ++ list_memberships, opts)
|
|
||||||
|> Pagination.fetch_paginated(opts, pagination)
|
|
||||||
|> Enum.reverse()
|
|
||||||
|> maybe_update_cc(list_memberships, opts[:user])
|
|
||||||
end
|
end
|
||||||
|
|
||||||
@doc """
|
@doc """
|
||||||
|
@ -1250,21 +1393,17 @@ defp get_actor_url(url) when is_list(url) do
|
||||||
|
|
||||||
defp get_actor_url(_url), do: nil
|
defp get_actor_url(_url), do: nil
|
||||||
|
|
||||||
|
defp normalize_image(%{"url" => url}) do
|
||||||
|
%{
|
||||||
|
"type" => "Image",
|
||||||
|
"url" => [%{"href" => url}]
|
||||||
|
}
|
||||||
|
end
|
||||||
|
|
||||||
|
defp normalize_image(urls) when is_list(urls), do: urls |> List.first() |> normalize_image()
|
||||||
|
defp normalize_image(_), do: nil
|
||||||
|
|
||||||
defp object_to_user_data(data) do
|
defp object_to_user_data(data) do
|
||||||
avatar =
|
|
||||||
data["icon"]["url"] &&
|
|
||||||
%{
|
|
||||||
"type" => "Image",
|
|
||||||
"url" => [%{"href" => data["icon"]["url"]}]
|
|
||||||
}
|
|
||||||
|
|
||||||
banner =
|
|
||||||
data["image"]["url"] &&
|
|
||||||
%{
|
|
||||||
"type" => "Image",
|
|
||||||
"url" => [%{"href" => data["image"]["url"]}]
|
|
||||||
}
|
|
||||||
|
|
||||||
fields =
|
fields =
|
||||||
data
|
data
|
||||||
|> Map.get("attachment", [])
|
|> Map.get("attachment", [])
|
||||||
|
@ -1290,6 +1429,9 @@ defp object_to_user_data(data) do
|
||||||
invisible = data["invisible"] || false
|
invisible = data["invisible"] || false
|
||||||
actor_type = data["type"] || "Person"
|
actor_type = data["type"] || "Person"
|
||||||
|
|
||||||
|
featured_address = data["featured"]
|
||||||
|
{:ok, pinned_objects} = fetch_and_prepare_featured_from_ap_id(featured_address)
|
||||||
|
|
||||||
public_key =
|
public_key =
|
||||||
if is_map(data["publicKey"]) && is_binary(data["publicKey"]["publicKeyPem"]) do
|
if is_map(data["publicKey"]) && is_binary(data["publicKey"]["publicKeyPem"]) do
|
||||||
data["publicKey"]["publicKeyPem"]
|
data["publicKey"]["publicKeyPem"]
|
||||||
|
@ -1308,23 +1450,25 @@ defp object_to_user_data(data) do
|
||||||
ap_id: data["id"],
|
ap_id: data["id"],
|
||||||
uri: get_actor_url(data["url"]),
|
uri: get_actor_url(data["url"]),
|
||||||
ap_enabled: true,
|
ap_enabled: true,
|
||||||
banner: banner,
|
banner: normalize_image(data["image"]),
|
||||||
fields: fields,
|
fields: fields,
|
||||||
emoji: emojis,
|
emoji: emojis,
|
||||||
is_locked: is_locked,
|
is_locked: is_locked,
|
||||||
is_discoverable: is_discoverable,
|
is_discoverable: is_discoverable,
|
||||||
invisible: invisible,
|
invisible: invisible,
|
||||||
avatar: avatar,
|
avatar: normalize_image(data["icon"]),
|
||||||
name: data["name"],
|
name: data["name"],
|
||||||
follower_address: data["followers"],
|
follower_address: data["followers"],
|
||||||
following_address: data["following"],
|
following_address: data["following"],
|
||||||
|
featured_address: featured_address,
|
||||||
bio: data["summary"] || "",
|
bio: data["summary"] || "",
|
||||||
actor_type: actor_type,
|
actor_type: actor_type,
|
||||||
also_known_as: Map.get(data, "alsoKnownAs", []),
|
also_known_as: Map.get(data, "alsoKnownAs", []),
|
||||||
public_key: public_key,
|
public_key: public_key,
|
||||||
inbox: data["inbox"],
|
inbox: data["inbox"],
|
||||||
shared_inbox: shared_inbox,
|
shared_inbox: shared_inbox,
|
||||||
accepts_chat_messages: accepts_chat_messages
|
accepts_chat_messages: accepts_chat_messages,
|
||||||
|
pinned_objects: pinned_objects
|
||||||
}
|
}
|
||||||
|
|
||||||
# nickname can be nil because of virtual actors
|
# nickname can be nil because of virtual actors
|
||||||
|
@ -1462,6 +1606,41 @@ def maybe_handle_clashing_nickname(data) do
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def pin_data_from_featured_collection(%{
|
||||||
|
"type" => type,
|
||||||
|
"orderedItems" => objects
|
||||||
|
})
|
||||||
|
when type in ["OrderedCollection", "Collection"] do
|
||||||
|
Map.new(objects, fn %{"id" => object_ap_id} -> {object_ap_id, NaiveDateTime.utc_now()} end)
|
||||||
|
end
|
||||||
|
|
||||||
|
def fetch_and_prepare_featured_from_ap_id(nil) do
|
||||||
|
{:ok, %{}}
|
||||||
|
end
|
||||||
|
|
||||||
|
def fetch_and_prepare_featured_from_ap_id(ap_id) do
|
||||||
|
with {:ok, data} <- Fetcher.fetch_and_contain_remote_object_from_id(ap_id) do
|
||||||
|
{:ok, pin_data_from_featured_collection(data)}
|
||||||
|
else
|
||||||
|
e ->
|
||||||
|
Logger.error("Could not decode featured collection at fetch #{ap_id}, #{inspect(e)}")
|
||||||
|
{:ok, %{}}
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def pinned_fetch_task(nil), do: nil
|
||||||
|
|
||||||
|
def pinned_fetch_task(%{pinned_objects: pins}) do
|
||||||
|
if Enum.all?(pins, fn {ap_id, _} ->
|
||||||
|
Object.get_cached_by_ap_id(ap_id) ||
|
||||||
|
match?({:ok, _object}, Fetcher.fetch_object_from_id(ap_id))
|
||||||
|
end) do
|
||||||
|
:ok
|
||||||
|
else
|
||||||
|
:error
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
def make_user_from_ap_id(ap_id) do
|
def make_user_from_ap_id(ap_id) do
|
||||||
user = User.get_cached_by_ap_id(ap_id)
|
user = User.get_cached_by_ap_id(ap_id)
|
||||||
|
|
||||||
|
@ -1469,6 +1648,8 @@ def make_user_from_ap_id(ap_id) do
|
||||||
Transmogrifier.upgrade_user_from_ap_id(ap_id)
|
Transmogrifier.upgrade_user_from_ap_id(ap_id)
|
||||||
else
|
else
|
||||||
with {:ok, data} <- fetch_and_prepare_user_from_ap_id(ap_id) do
|
with {:ok, data} <- fetch_and_prepare_user_from_ap_id(ap_id) do
|
||||||
|
{:ok, _pid} = Task.start(fn -> pinned_fetch_task(data) end)
|
||||||
|
|
||||||
if user do
|
if user do
|
||||||
user
|
user
|
||||||
|> User.remote_user_changeset(data)
|
|> User.remote_user_changeset(data)
|
||||||
|
|
|
@ -543,4 +543,12 @@ def upload_media(%{assigns: %{user: %User{} = user}} = conn, %{"file" => file} =
|
||||||
|> json(object.data)
|
|> json(object.data)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def pinned(conn, %{"nickname" => nickname}) do
|
||||||
|
with %User{} = user <- User.get_cached_by_nickname(nickname) do
|
||||||
|
conn
|
||||||
|
|> put_resp_header("content-type", "application/activity+json")
|
||||||
|
|> json(UserView.render("featured.json", %{user: user}))
|
||||||
|
end
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -273,4 +273,36 @@ defp object_action(actor, object) do
|
||||||
"context" => object.data["context"]
|
"context" => object.data["context"]
|
||||||
}, []}
|
}, []}
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@spec pin(User.t(), Object.t()) :: {:ok, map(), keyword()}
|
||||||
|
def pin(%User{} = user, object) do
|
||||||
|
{:ok,
|
||||||
|
%{
|
||||||
|
"id" => Utils.generate_activity_id(),
|
||||||
|
"target" => pinned_url(user.nickname),
|
||||||
|
"object" => object.data["id"],
|
||||||
|
"actor" => user.ap_id,
|
||||||
|
"type" => "Add",
|
||||||
|
"to" => [Pleroma.Constants.as_public()],
|
||||||
|
"cc" => [user.follower_address]
|
||||||
|
}, []}
|
||||||
|
end
|
||||||
|
|
||||||
|
@spec unpin(User.t(), Object.t()) :: {:ok, map, keyword()}
|
||||||
|
def unpin(%User{} = user, object) do
|
||||||
|
{:ok,
|
||||||
|
%{
|
||||||
|
"id" => Utils.generate_activity_id(),
|
||||||
|
"target" => pinned_url(user.nickname),
|
||||||
|
"object" => object.data["id"],
|
||||||
|
"actor" => user.ap_id,
|
||||||
|
"type" => "Remove",
|
||||||
|
"to" => [Pleroma.Constants.as_public()],
|
||||||
|
"cc" => [user.follower_address]
|
||||||
|
}, []}
|
||||||
|
end
|
||||||
|
|
||||||
|
defp pinned_url(nickname) when is_binary(nickname) do
|
||||||
|
Pleroma.Web.Router.Helpers.activity_pub_url(Pleroma.Web.Endpoint, :pinned, nickname)
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -92,7 +92,9 @@ def pipeline_filter(%{} = message, meta) do
|
||||||
end
|
end
|
||||||
|
|
||||||
def get_policies do
|
def get_policies do
|
||||||
Pleroma.Config.get([:mrf, :policies], []) |> get_policies()
|
Pleroma.Config.get([:mrf, :policies], [])
|
||||||
|
|> get_policies()
|
||||||
|
|> Enum.concat([Pleroma.Web.ActivityPub.MRF.HashtagPolicy])
|
||||||
end
|
end
|
||||||
|
|
||||||
defp get_policies(policy) when is_atom(policy), do: [policy]
|
defp get_policies(policy) when is_atom(policy), do: [policy]
|
||||||
|
|
59
lib/pleroma/web/activity_pub/mrf/follow_bot_policy.ex
Normal file
59
lib/pleroma/web/activity_pub/mrf/follow_bot_policy.ex
Normal file
|
@ -0,0 +1,59 @@
|
||||||
|
defmodule Pleroma.Web.ActivityPub.MRF.FollowBotPolicy do
|
||||||
|
@behaviour Pleroma.Web.ActivityPub.MRF
|
||||||
|
alias Pleroma.Config
|
||||||
|
alias Pleroma.User
|
||||||
|
alias Pleroma.Web.CommonAPI
|
||||||
|
|
||||||
|
require Logger
|
||||||
|
|
||||||
|
@impl true
|
||||||
|
def filter(message) do
|
||||||
|
with follower_nickname <- Config.get([:mrf_follow_bot, :follower_nickname]),
|
||||||
|
%User{actor_type: "Service"} = follower <-
|
||||||
|
User.get_cached_by_nickname(follower_nickname),
|
||||||
|
%{"type" => "Create", "object" => %{"type" => "Note"}} <- message do
|
||||||
|
try_follow(follower, message)
|
||||||
|
else
|
||||||
|
nil ->
|
||||||
|
Logger.warn(
|
||||||
|
"#{__MODULE__} skipped because of missing `:mrf_follow_bot, :follower_nickname` configuration, the :follower_nickname
|
||||||
|
account does not exist, or the account is not correctly configured as a bot."
|
||||||
|
)
|
||||||
|
|
||||||
|
{:ok, message}
|
||||||
|
|
||||||
|
_ ->
|
||||||
|
{:ok, message}
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
defp try_follow(follower, message) do
|
||||||
|
to = Map.get(message, "to", [])
|
||||||
|
cc = Map.get(message, "cc", [])
|
||||||
|
actor = [message["actor"]]
|
||||||
|
|
||||||
|
Enum.concat([to, cc, actor])
|
||||||
|
|> List.flatten()
|
||||||
|
|> Enum.uniq()
|
||||||
|
|> User.get_all_by_ap_id()
|
||||||
|
|> Enum.each(fn user ->
|
||||||
|
with false <- user.local,
|
||||||
|
false <- User.following?(follower, user),
|
||||||
|
false <- User.locked?(user),
|
||||||
|
false <- (user.bio || "") |> String.downcase() |> String.contains?("nobot") do
|
||||||
|
Logger.debug(
|
||||||
|
"#{__MODULE__}: Follow request from #{follower.nickname} to #{user.nickname}"
|
||||||
|
)
|
||||||
|
|
||||||
|
CommonAPI.follow(follower, user)
|
||||||
|
end
|
||||||
|
end)
|
||||||
|
|
||||||
|
{:ok, message}
|
||||||
|
end
|
||||||
|
|
||||||
|
@impl true
|
||||||
|
def describe do
|
||||||
|
{:ok, %{}}
|
||||||
|
end
|
||||||
|
end
|
116
lib/pleroma/web/activity_pub/mrf/hashtag_policy.ex
Normal file
116
lib/pleroma/web/activity_pub/mrf/hashtag_policy.ex
Normal file
|
@ -0,0 +1,116 @@
|
||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2021 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
defmodule Pleroma.Web.ActivityPub.MRF.HashtagPolicy do
|
||||||
|
require Pleroma.Constants
|
||||||
|
|
||||||
|
alias Pleroma.Config
|
||||||
|
alias Pleroma.Object
|
||||||
|
|
||||||
|
@moduledoc """
|
||||||
|
Reject, TWKN-remove or Set-Sensitive messsages with specific hashtags (without the leading #)
|
||||||
|
|
||||||
|
Note: This MRF Policy is always enabled, if you want to disable it you have to set empty lists.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@behaviour Pleroma.Web.ActivityPub.MRF
|
||||||
|
|
||||||
|
defp check_reject(message, hashtags) do
|
||||||
|
if Enum.any?(Config.get([:mrf_hashtag, :reject]), fn match -> match in hashtags end) do
|
||||||
|
{:reject, "[HashtagPolicy] Matches with rejected keyword"}
|
||||||
|
else
|
||||||
|
{:ok, message}
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
defp check_ftl_removal(%{"to" => to} = message, hashtags) do
|
||||||
|
if Pleroma.Constants.as_public() in to and
|
||||||
|
Enum.any?(Config.get([:mrf_hashtag, :federated_timeline_removal]), fn match ->
|
||||||
|
match in hashtags
|
||||||
|
end) do
|
||||||
|
to = List.delete(to, Pleroma.Constants.as_public())
|
||||||
|
cc = [Pleroma.Constants.as_public() | message["cc"] || []]
|
||||||
|
|
||||||
|
message =
|
||||||
|
message
|
||||||
|
|> Map.put("to", to)
|
||||||
|
|> Map.put("cc", cc)
|
||||||
|
|> Kernel.put_in(["object", "to"], to)
|
||||||
|
|> Kernel.put_in(["object", "cc"], cc)
|
||||||
|
|
||||||
|
{:ok, message}
|
||||||
|
else
|
||||||
|
{:ok, message}
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
defp check_ftl_removal(message, _hashtags), do: {:ok, message}
|
||||||
|
|
||||||
|
defp check_sensitive(message, hashtags) do
|
||||||
|
if Enum.any?(Config.get([:mrf_hashtag, :sensitive]), fn match -> match in hashtags end) do
|
||||||
|
{:ok, Kernel.put_in(message, ["object", "sensitive"], true)}
|
||||||
|
else
|
||||||
|
{:ok, message}
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
@impl true
|
||||||
|
def filter(%{"type" => "Create", "object" => object} = message) do
|
||||||
|
hashtags = Object.hashtags(%Object{data: object})
|
||||||
|
|
||||||
|
if hashtags != [] do
|
||||||
|
with {:ok, message} <- check_reject(message, hashtags),
|
||||||
|
{:ok, message} <- check_ftl_removal(message, hashtags),
|
||||||
|
{:ok, message} <- check_sensitive(message, hashtags) do
|
||||||
|
{:ok, message}
|
||||||
|
end
|
||||||
|
else
|
||||||
|
{:ok, message}
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
@impl true
|
||||||
|
def filter(message), do: {:ok, message}
|
||||||
|
|
||||||
|
@impl true
|
||||||
|
def describe do
|
||||||
|
mrf_hashtag =
|
||||||
|
Config.get(:mrf_hashtag)
|
||||||
|
|> Enum.into(%{})
|
||||||
|
|
||||||
|
{:ok, %{mrf_hashtag: mrf_hashtag}}
|
||||||
|
end
|
||||||
|
|
||||||
|
@impl true
|
||||||
|
def config_description do
|
||||||
|
%{
|
||||||
|
key: :mrf_hashtag,
|
||||||
|
related_policy: "Pleroma.Web.ActivityPub.MRF.HashtagPolicy",
|
||||||
|
label: "MRF Hashtag",
|
||||||
|
description: @moduledoc,
|
||||||
|
children: [
|
||||||
|
%{
|
||||||
|
key: :reject,
|
||||||
|
type: {:list, :string},
|
||||||
|
description: "A list of hashtags which result in message being rejected.",
|
||||||
|
suggestions: ["foo"]
|
||||||
|
},
|
||||||
|
%{
|
||||||
|
key: :federated_timeline_removal,
|
||||||
|
type: {:list, :string},
|
||||||
|
description:
|
||||||
|
"A list of hashtags which result in message being removed from federated timelines (a.k.a unlisted).",
|
||||||
|
suggestions: ["foo"]
|
||||||
|
},
|
||||||
|
%{
|
||||||
|
key: :sensitive,
|
||||||
|
type: {:list, :string},
|
||||||
|
description:
|
||||||
|
"A list of hashtags which result in message being set as sensitive (a.k.a NSFW/R-18)",
|
||||||
|
suggestions: ["nsfw", "r18"]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
end
|
||||||
|
end
|
|
@ -64,20 +64,16 @@ defp check_media_nsfw(
|
||||||
%{host: actor_host} = _actor_info,
|
%{host: actor_host} = _actor_info,
|
||||||
%{
|
%{
|
||||||
"type" => "Create",
|
"type" => "Create",
|
||||||
"object" => child_object
|
"object" => %{} = _child_object
|
||||||
} = object
|
} = object
|
||||||
)
|
) do
|
||||||
when is_map(child_object) do
|
|
||||||
media_nsfw =
|
media_nsfw =
|
||||||
Config.get([:mrf_simple, :media_nsfw])
|
Config.get([:mrf_simple, :media_nsfw])
|
||||||
|> MRF.subdomains_regex()
|
|> MRF.subdomains_regex()
|
||||||
|
|
||||||
object =
|
object =
|
||||||
if MRF.subdomain_match?(media_nsfw, actor_host) do
|
if MRF.subdomain_match?(media_nsfw, actor_host) do
|
||||||
tags = (child_object["tag"] || []) ++ ["nsfw"]
|
Kernel.put_in(object, ["object", "sensitive"], true)
|
||||||
child_object = Map.put(child_object, "tag", tags)
|
|
||||||
child_object = Map.put(child_object, "sensitive", true)
|
|
||||||
Map.put(object, "object", child_object)
|
|
||||||
else
|
else
|
||||||
object
|
object
|
||||||
end
|
end
|
||||||
|
|
|
@ -28,20 +28,11 @@ defp process_tag(
|
||||||
"mrf_tag:media-force-nsfw",
|
"mrf_tag:media-force-nsfw",
|
||||||
%{
|
%{
|
||||||
"type" => "Create",
|
"type" => "Create",
|
||||||
"object" => %{"attachment" => child_attachment} = object
|
"object" => %{"attachment" => child_attachment}
|
||||||
} = message
|
} = message
|
||||||
)
|
)
|
||||||
when length(child_attachment) > 0 do
|
when length(child_attachment) > 0 do
|
||||||
tags = (object["tag"] || []) ++ ["nsfw"]
|
{:ok, Kernel.put_in(message, ["object", "sensitive"], true)}
|
||||||
|
|
||||||
object =
|
|
||||||
object
|
|
||||||
|> Map.put("tag", tags)
|
|
||||||
|> Map.put("sensitive", true)
|
|
||||||
|
|
||||||
message = Map.put(message, "object", object)
|
|
||||||
|
|
||||||
{:ok, message}
|
|
||||||
end
|
end
|
||||||
|
|
||||||
defp process_tag(
|
defp process_tag(
|
||||||
|
|
|
@ -17,6 +17,7 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidator do
|
||||||
alias Pleroma.Object.Containment
|
alias Pleroma.Object.Containment
|
||||||
alias Pleroma.User
|
alias Pleroma.User
|
||||||
alias Pleroma.Web.ActivityPub.ObjectValidators.AcceptRejectValidator
|
alias Pleroma.Web.ActivityPub.ObjectValidators.AcceptRejectValidator
|
||||||
|
alias Pleroma.Web.ActivityPub.ObjectValidators.AddRemoveValidator
|
||||||
alias Pleroma.Web.ActivityPub.ObjectValidators.AnnounceValidator
|
alias Pleroma.Web.ActivityPub.ObjectValidators.AnnounceValidator
|
||||||
alias Pleroma.Web.ActivityPub.ObjectValidators.AnswerValidator
|
alias Pleroma.Web.ActivityPub.ObjectValidators.AnswerValidator
|
||||||
alias Pleroma.Web.ActivityPub.ObjectValidators.ArticleNoteValidator
|
alias Pleroma.Web.ActivityPub.ObjectValidators.ArticleNoteValidator
|
||||||
|
@ -37,37 +38,6 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidator do
|
||||||
@impl true
|
@impl true
|
||||||
def validate(object, meta)
|
def validate(object, meta)
|
||||||
|
|
||||||
def validate(%{"type" => type} = object, meta)
|
|
||||||
when type in ~w[Accept Reject] do
|
|
||||||
with {:ok, object} <-
|
|
||||||
object
|
|
||||||
|> AcceptRejectValidator.cast_and_validate()
|
|
||||||
|> Ecto.Changeset.apply_action(:insert) do
|
|
||||||
object = stringify_keys(object)
|
|
||||||
{:ok, object, meta}
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
def validate(%{"type" => "Event"} = object, meta) do
|
|
||||||
with {:ok, object} <-
|
|
||||||
object
|
|
||||||
|> EventValidator.cast_and_validate()
|
|
||||||
|> Ecto.Changeset.apply_action(:insert) do
|
|
||||||
object = stringify_keys(object)
|
|
||||||
{:ok, object, meta}
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
def validate(%{"type" => "Follow"} = object, meta) do
|
|
||||||
with {:ok, object} <-
|
|
||||||
object
|
|
||||||
|> FollowValidator.cast_and_validate()
|
|
||||||
|> Ecto.Changeset.apply_action(:insert) do
|
|
||||||
object = stringify_keys(object)
|
|
||||||
{:ok, object, meta}
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
def validate(%{"type" => "Block"} = block_activity, meta) do
|
def validate(%{"type" => "Block"} = block_activity, meta) do
|
||||||
with {:ok, block_activity} <-
|
with {:ok, block_activity} <-
|
||||||
block_activity
|
block_activity
|
||||||
|
@ -87,16 +57,6 @@ def validate(%{"type" => "Block"} = block_activity, meta) do
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def validate(%{"type" => "Update"} = update_activity, meta) do
|
|
||||||
with {:ok, update_activity} <-
|
|
||||||
update_activity
|
|
||||||
|> UpdateValidator.cast_and_validate()
|
|
||||||
|> Ecto.Changeset.apply_action(:insert) do
|
|
||||||
update_activity = stringify_keys(update_activity)
|
|
||||||
{:ok, update_activity, meta}
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
def validate(%{"type" => "Undo"} = object, meta) do
|
def validate(%{"type" => "Undo"} = object, meta) do
|
||||||
with {:ok, object} <-
|
with {:ok, object} <-
|
||||||
object
|
object
|
||||||
|
@ -123,76 +83,6 @@ def validate(%{"type" => "Delete"} = object, meta) do
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def validate(%{"type" => "Like"} = object, meta) do
|
|
||||||
with {:ok, object} <-
|
|
||||||
object
|
|
||||||
|> LikeValidator.cast_and_validate()
|
|
||||||
|> Ecto.Changeset.apply_action(:insert) do
|
|
||||||
object = stringify_keys(object)
|
|
||||||
{:ok, object, meta}
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
def validate(%{"type" => "ChatMessage"} = object, meta) do
|
|
||||||
with {:ok, object} <-
|
|
||||||
object
|
|
||||||
|> ChatMessageValidator.cast_and_validate()
|
|
||||||
|> Ecto.Changeset.apply_action(:insert) do
|
|
||||||
object = stringify_keys(object)
|
|
||||||
{:ok, object, meta}
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
def validate(%{"type" => "Question"} = object, meta) do
|
|
||||||
with {:ok, object} <-
|
|
||||||
object
|
|
||||||
|> QuestionValidator.cast_and_validate()
|
|
||||||
|> Ecto.Changeset.apply_action(:insert) do
|
|
||||||
object = stringify_keys(object)
|
|
||||||
{:ok, object, meta}
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
def validate(%{"type" => type} = object, meta) when type in ~w[Audio Video] do
|
|
||||||
with {:ok, object} <-
|
|
||||||
object
|
|
||||||
|> AudioVideoValidator.cast_and_validate()
|
|
||||||
|> Ecto.Changeset.apply_action(:insert) do
|
|
||||||
object = stringify_keys(object)
|
|
||||||
{:ok, object, meta}
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
def validate(%{"type" => "Article"} = object, meta) do
|
|
||||||
with {:ok, object} <-
|
|
||||||
object
|
|
||||||
|> ArticleNoteValidator.cast_and_validate()
|
|
||||||
|> Ecto.Changeset.apply_action(:insert) do
|
|
||||||
object = stringify_keys(object)
|
|
||||||
{:ok, object, meta}
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
def validate(%{"type" => "Answer"} = object, meta) do
|
|
||||||
with {:ok, object} <-
|
|
||||||
object
|
|
||||||
|> AnswerValidator.cast_and_validate()
|
|
||||||
|> Ecto.Changeset.apply_action(:insert) do
|
|
||||||
object = stringify_keys(object)
|
|
||||||
{:ok, object, meta}
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
def validate(%{"type" => "EmojiReact"} = object, meta) do
|
|
||||||
with {:ok, object} <-
|
|
||||||
object
|
|
||||||
|> EmojiReactValidator.cast_and_validate()
|
|
||||||
|> Ecto.Changeset.apply_action(:insert) do
|
|
||||||
object = stringify_keys(object)
|
|
||||||
{:ok, object, meta}
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
def validate(
|
def validate(
|
||||||
%{"type" => "Create", "object" => %{"type" => "ChatMessage"} = object} = create_activity,
|
%{"type" => "Create", "object" => %{"type" => "ChatMessage"} = object} = create_activity,
|
||||||
meta
|
meta
|
||||||
|
@ -224,10 +114,60 @@ def validate(
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def validate(%{"type" => "Announce"} = object, meta) do
|
def validate(%{"type" => type} = object, meta)
|
||||||
|
when type in ~w[Event Question Audio Video Article] do
|
||||||
|
validator =
|
||||||
|
case type do
|
||||||
|
"Event" -> EventValidator
|
||||||
|
"Question" -> QuestionValidator
|
||||||
|
"Audio" -> AudioVideoValidator
|
||||||
|
"Video" -> AudioVideoValidator
|
||||||
|
"Article" -> ArticleNoteValidator
|
||||||
|
end
|
||||||
|
|
||||||
with {:ok, object} <-
|
with {:ok, object} <-
|
||||||
object
|
object
|
||||||
|> AnnounceValidator.cast_and_validate()
|
|> validator.cast_and_validate()
|
||||||
|
|> Ecto.Changeset.apply_action(:insert) do
|
||||||
|
object = stringify_keys(object)
|
||||||
|
|
||||||
|
# Insert copy of hashtags as strings for the non-hashtag table indexing
|
||||||
|
tag = (object["tag"] || []) ++ Object.hashtags(%Object{data: object})
|
||||||
|
object = Map.put(object, "tag", tag)
|
||||||
|
|
||||||
|
{:ok, object, meta}
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def validate(%{"type" => type} = object, meta)
|
||||||
|
when type in ~w[Accept Reject Follow Update Like EmojiReact Announce
|
||||||
|
ChatMessage Answer] do
|
||||||
|
validator =
|
||||||
|
case type do
|
||||||
|
"Accept" -> AcceptRejectValidator
|
||||||
|
"Reject" -> AcceptRejectValidator
|
||||||
|
"Follow" -> FollowValidator
|
||||||
|
"Update" -> UpdateValidator
|
||||||
|
"Like" -> LikeValidator
|
||||||
|
"EmojiReact" -> EmojiReactValidator
|
||||||
|
"Announce" -> AnnounceValidator
|
||||||
|
"ChatMessage" -> ChatMessageValidator
|
||||||
|
"Answer" -> AnswerValidator
|
||||||
|
end
|
||||||
|
|
||||||
|
with {:ok, object} <-
|
||||||
|
object
|
||||||
|
|> validator.cast_and_validate()
|
||||||
|
|> Ecto.Changeset.apply_action(:insert) do
|
||||||
|
object = stringify_keys(object)
|
||||||
|
{:ok, object, meta}
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def validate(%{"type" => type} = object, meta) when type in ~w(Add Remove) do
|
||||||
|
with {:ok, object} <-
|
||||||
|
object
|
||||||
|
|> AddRemoveValidator.cast_and_validate()
|
||||||
|> Ecto.Changeset.apply_action(:insert) do
|
|> Ecto.Changeset.apply_action(:insert) do
|
||||||
object = stringify_keys(object)
|
object = stringify_keys(object)
|
||||||
{:ok, object, meta}
|
{:ok, object, meta}
|
||||||
|
@ -260,7 +200,7 @@ def cast_and_apply(%{"type" => "Article"} = object) do
|
||||||
|
|
||||||
def cast_and_apply(o), do: {:error, {:validator_not_set, o}}
|
def cast_and_apply(o), do: {:error, {:validator_not_set, o}}
|
||||||
|
|
||||||
# is_struct/1 isn't present in Elixir 1.8.x
|
# is_struct/1 appears in Elixir 1.11
|
||||||
def stringify_keys(%{__struct__: _} = object) do
|
def stringify_keys(%{__struct__: _} = object) do
|
||||||
object
|
object
|
||||||
|> Map.from_struct()
|
|> Map.from_struct()
|
||||||
|
|
|
@ -27,7 +27,7 @@ def cast_data(data) do
|
||||||
|> cast(data, __schema__(:fields))
|
|> cast(data, __schema__(:fields))
|
||||||
end
|
end
|
||||||
|
|
||||||
def validate_data(cng) do
|
defp validate_data(cng) do
|
||||||
cng
|
cng
|
||||||
|> validate_required([:id, :type, :actor, :to, :cc, :object])
|
|> validate_required([:id, :type, :actor, :to, :cc, :object])
|
||||||
|> validate_inclusion(:type, ["Accept", "Reject"])
|
|> validate_inclusion(:type, ["Accept", "Reject"])
|
||||||
|
|
|
@ -0,0 +1,77 @@
|
||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2021 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
defmodule Pleroma.Web.ActivityPub.ObjectValidators.AddRemoveValidator do
|
||||||
|
use Ecto.Schema
|
||||||
|
|
||||||
|
import Ecto.Changeset
|
||||||
|
import Pleroma.Web.ActivityPub.ObjectValidators.CommonValidations
|
||||||
|
|
||||||
|
require Pleroma.Constants
|
||||||
|
|
||||||
|
alias Pleroma.EctoType.ActivityPub.ObjectValidators
|
||||||
|
alias Pleroma.User
|
||||||
|
|
||||||
|
@primary_key false
|
||||||
|
|
||||||
|
embedded_schema do
|
||||||
|
field(:id, ObjectValidators.ObjectID, primary_key: true)
|
||||||
|
field(:target)
|
||||||
|
field(:object, ObjectValidators.ObjectID)
|
||||||
|
field(:actor, ObjectValidators.ObjectID)
|
||||||
|
field(:type)
|
||||||
|
field(:to, ObjectValidators.Recipients, default: [])
|
||||||
|
field(:cc, ObjectValidators.Recipients, default: [])
|
||||||
|
end
|
||||||
|
|
||||||
|
def cast_and_validate(data) do
|
||||||
|
{:ok, actor} = User.get_or_fetch_by_ap_id(data["actor"])
|
||||||
|
|
||||||
|
{:ok, actor} = maybe_refetch_user(actor)
|
||||||
|
|
||||||
|
data
|
||||||
|
|> maybe_fix_data_for_mastodon(actor)
|
||||||
|
|> cast_data()
|
||||||
|
|> validate_data(actor)
|
||||||
|
end
|
||||||
|
|
||||||
|
defp maybe_fix_data_for_mastodon(data, actor) do
|
||||||
|
# Mastodon sends pin/unpin objects without id, to, cc fields
|
||||||
|
data
|
||||||
|
|> Map.put_new("id", Pleroma.Web.ActivityPub.Utils.generate_activity_id())
|
||||||
|
|> Map.put_new("to", [Pleroma.Constants.as_public()])
|
||||||
|
|> Map.put_new("cc", [actor.follower_address])
|
||||||
|
end
|
||||||
|
|
||||||
|
defp cast_data(data) do
|
||||||
|
cast(%__MODULE__{}, data, __schema__(:fields))
|
||||||
|
end
|
||||||
|
|
||||||
|
defp validate_data(changeset, actor) do
|
||||||
|
changeset
|
||||||
|
|> validate_required([:id, :target, :object, :actor, :type, :to, :cc])
|
||||||
|
|> validate_inclusion(:type, ~w(Add Remove))
|
||||||
|
|> validate_actor_presence()
|
||||||
|
|> validate_collection_belongs_to_actor(actor)
|
||||||
|
|> validate_object_presence()
|
||||||
|
end
|
||||||
|
|
||||||
|
defp validate_collection_belongs_to_actor(changeset, actor) do
|
||||||
|
validate_change(changeset, :target, fn :target, target ->
|
||||||
|
if target == actor.featured_address do
|
||||||
|
[]
|
||||||
|
else
|
||||||
|
[target: "collection doesn't belong to actor"]
|
||||||
|
end
|
||||||
|
end)
|
||||||
|
end
|
||||||
|
|
||||||
|
defp maybe_refetch_user(%User{featured_address: address} = user) when is_binary(address) do
|
||||||
|
{:ok, user}
|
||||||
|
end
|
||||||
|
|
||||||
|
defp maybe_refetch_user(%User{ap_id: ap_id}) do
|
||||||
|
Pleroma.Web.ActivityPub.Transmogrifier.upgrade_user_from_ap_id(ap_id)
|
||||||
|
end
|
||||||
|
end
|
|
@ -50,7 +50,7 @@ def fix_after_cast(cng) do
|
||||||
cng
|
cng
|
||||||
end
|
end
|
||||||
|
|
||||||
def validate_data(data_cng) do
|
defp validate_data(data_cng) do
|
||||||
data_cng
|
data_cng
|
||||||
|> validate_inclusion(:type, ["Announce"])
|
|> validate_inclusion(:type, ["Announce"])
|
||||||
|> validate_required([:id, :type, :object, :actor, :to, :cc])
|
|> validate_required([:id, :type, :object, :actor, :to, :cc])
|
||||||
|
|
|
@ -50,7 +50,7 @@ def changeset(struct, data) do
|
||||||
|> cast(data, __schema__(:fields))
|
|> cast(data, __schema__(:fields))
|
||||||
end
|
end
|
||||||
|
|
||||||
def validate_data(data_cng) do
|
defp validate_data(data_cng) do
|
||||||
data_cng
|
data_cng
|
||||||
|> validate_inclusion(:type, ["Answer"])
|
|> validate_inclusion(:type, ["Answer"])
|
||||||
|> validate_required([:id, :inReplyTo, :name, :attributedTo, :actor])
|
|> validate_required([:id, :inReplyTo, :name, :attributedTo, :actor])
|
||||||
|
|
|
@ -9,6 +9,7 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.ArticleNoteValidator do
|
||||||
alias Pleroma.Web.ActivityPub.ObjectValidators.AttachmentValidator
|
alias Pleroma.Web.ActivityPub.ObjectValidators.AttachmentValidator
|
||||||
alias Pleroma.Web.ActivityPub.ObjectValidators.CommonFixes
|
alias Pleroma.Web.ActivityPub.ObjectValidators.CommonFixes
|
||||||
alias Pleroma.Web.ActivityPub.ObjectValidators.CommonValidations
|
alias Pleroma.Web.ActivityPub.ObjectValidators.CommonValidations
|
||||||
|
alias Pleroma.Web.ActivityPub.ObjectValidators.TagValidator
|
||||||
alias Pleroma.Web.ActivityPub.Transmogrifier
|
alias Pleroma.Web.ActivityPub.Transmogrifier
|
||||||
|
|
||||||
import Ecto.Changeset
|
import Ecto.Changeset
|
||||||
|
@ -22,8 +23,7 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.ArticleNoteValidator do
|
||||||
field(:cc, ObjectValidators.Recipients, default: [])
|
field(:cc, ObjectValidators.Recipients, default: [])
|
||||||
field(:bto, ObjectValidators.Recipients, default: [])
|
field(:bto, ObjectValidators.Recipients, default: [])
|
||||||
field(:bcc, ObjectValidators.Recipients, default: [])
|
field(:bcc, ObjectValidators.Recipients, default: [])
|
||||||
# TODO: Write type
|
embeds_many(:tag, TagValidator)
|
||||||
field(:tag, {:array, :map}, default: [])
|
|
||||||
field(:type, :string)
|
field(:type, :string)
|
||||||
|
|
||||||
field(:name, :string)
|
field(:name, :string)
|
||||||
|
@ -90,11 +90,12 @@ def changeset(struct, data) do
|
||||||
data = fix(data)
|
data = fix(data)
|
||||||
|
|
||||||
struct
|
struct
|
||||||
|> cast(data, __schema__(:fields) -- [:attachment])
|
|> cast(data, __schema__(:fields) -- [:attachment, :tag])
|
||||||
|> cast_embed(:attachment)
|
|> cast_embed(:attachment)
|
||||||
|
|> cast_embed(:tag)
|
||||||
end
|
end
|
||||||
|
|
||||||
def validate_data(data_cng) do
|
defp validate_data(data_cng) do
|
||||||
data_cng
|
data_cng
|
||||||
|> validate_inclusion(:type, ["Article", "Note"])
|
|> validate_inclusion(:type, ["Article", "Note"])
|
||||||
|> validate_required([:id, :actor, :attributedTo, :type, :context, :context_id])
|
|> validate_required([:id, :actor, :attributedTo, :type, :context, :context_id])
|
||||||
|
|
|
@ -6,7 +6,6 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.AttachmentValidator do
|
||||||
use Ecto.Schema
|
use Ecto.Schema
|
||||||
|
|
||||||
alias Pleroma.EctoType.ActivityPub.ObjectValidators
|
alias Pleroma.EctoType.ActivityPub.ObjectValidators
|
||||||
alias Pleroma.Web.ActivityPub.ObjectValidators.UrlObjectValidator
|
|
||||||
|
|
||||||
import Ecto.Changeset
|
import Ecto.Changeset
|
||||||
|
|
||||||
|
@ -21,6 +20,8 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.AttachmentValidator do
|
||||||
field(:type, :string)
|
field(:type, :string)
|
||||||
field(:href, ObjectValidators.Uri)
|
field(:href, ObjectValidators.Uri)
|
||||||
field(:mediaType, :string, default: "application/octet-stream")
|
field(:mediaType, :string, default: "application/octet-stream")
|
||||||
|
field(:width, :integer)
|
||||||
|
field(:height, :integer)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -52,7 +53,7 @@ def url_changeset(struct, data) do
|
||||||
data = fix_media_type(data)
|
data = fix_media_type(data)
|
||||||
|
|
||||||
struct
|
struct
|
||||||
|> cast(data, [:type, :href, :mediaType])
|
|> cast(data, [:type, :href, :mediaType, :width, :height])
|
||||||
|> validate_inclusion(:type, ["Link"])
|
|> validate_inclusion(:type, ["Link"])
|
||||||
|> validate_required([:type, :href, :mediaType])
|
|> validate_required([:type, :href, :mediaType])
|
||||||
end
|
end
|
||||||
|
@ -90,7 +91,7 @@ defp fix_url(data) do
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def validate_data(cng) do
|
defp validate_data(cng) do
|
||||||
cng
|
cng
|
||||||
|> validate_inclusion(:type, ~w[Document Audio Image Video])
|
|> validate_inclusion(:type, ~w[Document Audio Image Video])
|
||||||
|> validate_required([:mediaType, :url, :type])
|
|> validate_required([:mediaType, :url, :type])
|
||||||
|
|
|
@ -5,11 +5,11 @@
|
||||||
defmodule Pleroma.Web.ActivityPub.ObjectValidators.AudioVideoValidator do
|
defmodule Pleroma.Web.ActivityPub.ObjectValidators.AudioVideoValidator do
|
||||||
use Ecto.Schema
|
use Ecto.Schema
|
||||||
|
|
||||||
alias Pleroma.EarmarkRenderer
|
|
||||||
alias Pleroma.EctoType.ActivityPub.ObjectValidators
|
alias Pleroma.EctoType.ActivityPub.ObjectValidators
|
||||||
alias Pleroma.Web.ActivityPub.ObjectValidators.AttachmentValidator
|
alias Pleroma.Web.ActivityPub.ObjectValidators.AttachmentValidator
|
||||||
alias Pleroma.Web.ActivityPub.ObjectValidators.CommonFixes
|
alias Pleroma.Web.ActivityPub.ObjectValidators.CommonFixes
|
||||||
alias Pleroma.Web.ActivityPub.ObjectValidators.CommonValidations
|
alias Pleroma.Web.ActivityPub.ObjectValidators.CommonValidations
|
||||||
|
alias Pleroma.Web.ActivityPub.ObjectValidators.TagValidator
|
||||||
alias Pleroma.Web.ActivityPub.Transmogrifier
|
alias Pleroma.Web.ActivityPub.Transmogrifier
|
||||||
|
|
||||||
import Ecto.Changeset
|
import Ecto.Changeset
|
||||||
|
@ -23,8 +23,7 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.AudioVideoValidator do
|
||||||
field(:cc, ObjectValidators.Recipients, default: [])
|
field(:cc, ObjectValidators.Recipients, default: [])
|
||||||
field(:bto, ObjectValidators.Recipients, default: [])
|
field(:bto, ObjectValidators.Recipients, default: [])
|
||||||
field(:bcc, ObjectValidators.Recipients, default: [])
|
field(:bcc, ObjectValidators.Recipients, default: [])
|
||||||
# TODO: Write type
|
embeds_many(:tag, TagValidator)
|
||||||
field(:tag, {:array, :map}, default: [])
|
|
||||||
field(:type, :string)
|
field(:type, :string)
|
||||||
|
|
||||||
field(:name, :string)
|
field(:name, :string)
|
||||||
|
@ -110,7 +109,7 @@ defp fix_content(%{"mediaType" => "text/markdown", "content" => content} = data)
|
||||||
when is_binary(content) do
|
when is_binary(content) do
|
||||||
content =
|
content =
|
||||||
content
|
content
|
||||||
|> Earmark.as_html!(%Earmark.Options{renderer: EarmarkRenderer})
|
|> Pleroma.Formatter.markdown_to_html()
|
||||||
|> Pleroma.HTML.filter_tags()
|
|> Pleroma.HTML.filter_tags()
|
||||||
|
|
||||||
Map.put(data, "content", content)
|
Map.put(data, "content", content)
|
||||||
|
@ -132,11 +131,12 @@ def changeset(struct, data) do
|
||||||
data = fix(data)
|
data = fix(data)
|
||||||
|
|
||||||
struct
|
struct
|
||||||
|> cast(data, __schema__(:fields) -- [:attachment])
|
|> cast(data, __schema__(:fields) -- [:attachment, :tag])
|
||||||
|> cast_embed(:attachment)
|
|> cast_embed(:attachment)
|
||||||
|
|> cast_embed(:tag)
|
||||||
end
|
end
|
||||||
|
|
||||||
def validate_data(data_cng) do
|
defp validate_data(data_cng) do
|
||||||
data_cng
|
data_cng
|
||||||
|> validate_inclusion(:type, ["Audio", "Video"])
|
|> validate_inclusion(:type, ["Audio", "Video"])
|
||||||
|> validate_required([:id, :actor, :attributedTo, :type, :context, :attachment])
|
|> validate_required([:id, :actor, :attributedTo, :type, :context, :attachment])
|
||||||
|
|
|
@ -26,7 +26,7 @@ def cast_data(data) do
|
||||||
|> cast(data, __schema__(:fields))
|
|> cast(data, __schema__(:fields))
|
||||||
end
|
end
|
||||||
|
|
||||||
def validate_data(cng) do
|
defp validate_data(cng) do
|
||||||
cng
|
cng
|
||||||
|> validate_required([:id, :type, :actor, :to, :cc, :object])
|
|> validate_required([:id, :type, :actor, :to, :cc, :object])
|
||||||
|> validate_inclusion(:type, ["Block"])
|
|> validate_inclusion(:type, ["Block"])
|
||||||
|
|
|
@ -67,7 +67,7 @@ def changeset(struct, data) do
|
||||||
|> cast_embed(:attachment)
|
|> cast_embed(:attachment)
|
||||||
end
|
end
|
||||||
|
|
||||||
def validate_data(data_cng) do
|
defp validate_data(data_cng) do
|
||||||
data_cng
|
data_cng
|
||||||
|> validate_inclusion(:type, ["ChatMessage"])
|
|> validate_inclusion(:type, ["ChatMessage"])
|
||||||
|> validate_required([:id, :actor, :to, :type, :published])
|
|> validate_required([:id, :actor, :to, :type, :published])
|
||||||
|
|
|
@ -9,6 +9,7 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.CommonValidations do
|
||||||
alias Pleroma.Object
|
alias Pleroma.Object
|
||||||
alias Pleroma.User
|
alias Pleroma.User
|
||||||
|
|
||||||
|
@spec validate_any_presence(Ecto.Changeset.t(), [atom()]) :: Ecto.Changeset.t()
|
||||||
def validate_any_presence(cng, fields) do
|
def validate_any_presence(cng, fields) do
|
||||||
non_empty =
|
non_empty =
|
||||||
fields
|
fields
|
||||||
|
@ -29,6 +30,7 @@ def validate_any_presence(cng, fields) do
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@spec validate_actor_presence(Ecto.Changeset.t(), keyword()) :: Ecto.Changeset.t()
|
||||||
def validate_actor_presence(cng, options \\ []) do
|
def validate_actor_presence(cng, options \\ []) do
|
||||||
field_name = Keyword.get(options, :field_name, :actor)
|
field_name = Keyword.get(options, :field_name, :actor)
|
||||||
|
|
||||||
|
@ -47,6 +49,7 @@ def validate_actor_presence(cng, options \\ []) do
|
||||||
end)
|
end)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@spec validate_object_presence(Ecto.Changeset.t(), keyword()) :: Ecto.Changeset.t()
|
||||||
def validate_object_presence(cng, options \\ []) do
|
def validate_object_presence(cng, options \\ []) do
|
||||||
field_name = Keyword.get(options, :field_name, :object)
|
field_name = Keyword.get(options, :field_name, :object)
|
||||||
allowed_types = Keyword.get(options, :allowed_types, false)
|
allowed_types = Keyword.get(options, :allowed_types, false)
|
||||||
|
@ -68,6 +71,7 @@ def validate_object_presence(cng, options \\ []) do
|
||||||
end)
|
end)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@spec validate_object_or_user_presence(Ecto.Changeset.t(), keyword()) :: Ecto.Changeset.t()
|
||||||
def validate_object_or_user_presence(cng, options \\ []) do
|
def validate_object_or_user_presence(cng, options \\ []) do
|
||||||
field_name = Keyword.get(options, :field_name, :object)
|
field_name = Keyword.get(options, :field_name, :object)
|
||||||
options = Keyword.put(options, :field_name, field_name)
|
options = Keyword.put(options, :field_name, field_name)
|
||||||
|
@ -83,6 +87,7 @@ def validate_object_or_user_presence(cng, options \\ []) do
|
||||||
if actor_cng.valid?, do: actor_cng, else: object_cng
|
if actor_cng.valid?, do: actor_cng, else: object_cng
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@spec validate_host_match(Ecto.Changeset.t(), [atom()]) :: Ecto.Changeset.t()
|
||||||
def validate_host_match(cng, fields \\ [:id, :actor]) do
|
def validate_host_match(cng, fields \\ [:id, :actor]) do
|
||||||
if same_domain?(cng, fields) do
|
if same_domain?(cng, fields) do
|
||||||
cng
|
cng
|
||||||
|
@ -95,6 +100,7 @@ def validate_host_match(cng, fields \\ [:id, :actor]) do
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@spec validate_fields_match(Ecto.Changeset.t(), [atom()]) :: Ecto.Changeset.t()
|
||||||
def validate_fields_match(cng, fields) do
|
def validate_fields_match(cng, fields) do
|
||||||
if map_unique?(cng, fields) do
|
if map_unique?(cng, fields) do
|
||||||
cng
|
cng
|
||||||
|
@ -122,12 +128,14 @@ defp map_unique?(cng, fields, func \\ & &1) do
|
||||||
end)
|
end)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@spec same_domain?(Ecto.Changeset.t(), [atom()]) :: boolean()
|
||||||
def same_domain?(cng, fields \\ [:actor, :object]) do
|
def same_domain?(cng, fields \\ [:actor, :object]) do
|
||||||
map_unique?(cng, fields, fn value -> URI.parse(value).host end)
|
map_unique?(cng, fields, fn value -> URI.parse(value).host end)
|
||||||
end
|
end
|
||||||
|
|
||||||
# This figures out if a user is able to create, delete or modify something
|
# This figures out if a user is able to create, delete or modify something
|
||||||
# based on the domain and superuser status
|
# based on the domain and superuser status
|
||||||
|
@spec validate_modification_rights(Ecto.Changeset.t()) :: Ecto.Changeset.t()
|
||||||
def validate_modification_rights(cng) do
|
def validate_modification_rights(cng) do
|
||||||
actor = User.get_cached_by_ap_id(get_field(cng, :actor))
|
actor = User.get_cached_by_ap_id(get_field(cng, :actor))
|
||||||
|
|
||||||
|
|
|
@ -39,7 +39,7 @@ def cast_and_validate(data, meta \\ []) do
|
||||||
|> validate_data(meta)
|
|> validate_data(meta)
|
||||||
end
|
end
|
||||||
|
|
||||||
def validate_data(cng, meta \\ []) do
|
defp validate_data(cng, meta) do
|
||||||
cng
|
cng
|
||||||
|> validate_required([:id, :actor, :to, :type, :object])
|
|> validate_required([:id, :actor, :to, :type, :object])
|
||||||
|> validate_inclusion(:type, ["Create"])
|
|> validate_inclusion(:type, ["Create"])
|
||||||
|
|
|
@ -79,7 +79,7 @@ defp fix(data, meta) do
|
||||||
|> CommonFixes.fix_actor()
|
|> CommonFixes.fix_actor()
|
||||||
end
|
end
|
||||||
|
|
||||||
def validate_data(cng, meta \\ []) do
|
defp validate_data(cng, meta) do
|
||||||
cng
|
cng
|
||||||
|> validate_required([:actor, :type, :object])
|
|> validate_required([:actor, :type, :object])
|
||||||
|> validate_inclusion(:type, ["Create"])
|
|> validate_inclusion(:type, ["Create"])
|
||||||
|
|
|
@ -53,7 +53,7 @@ def add_deleted_activity_id(cng) do
|
||||||
Tombstone
|
Tombstone
|
||||||
Video
|
Video
|
||||||
}
|
}
|
||||||
def validate_data(cng) do
|
defp validate_data(cng) do
|
||||||
cng
|
cng
|
||||||
|> validate_required([:id, :type, :actor, :to, :cc, :object])
|
|> validate_required([:id, :type, :actor, :to, :cc, :object])
|
||||||
|> validate_inclusion(:type, ["Delete"])
|
|> validate_inclusion(:type, ["Delete"])
|
||||||
|
|
|
@ -70,7 +70,7 @@ def validate_emoji(cng) do
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def validate_data(data_cng) do
|
defp validate_data(data_cng) do
|
||||||
data_cng
|
data_cng
|
||||||
|> validate_inclusion(:type, ["EmojiReact"])
|
|> validate_inclusion(:type, ["EmojiReact"])
|
||||||
|> validate_required([:id, :type, :object, :actor, :context, :to, :cc, :content])
|
|> validate_required([:id, :type, :object, :actor, :context, :to, :cc, :content])
|
||||||
|
|
|
@ -9,6 +9,7 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.EventValidator do
|
||||||
alias Pleroma.Web.ActivityPub.ObjectValidators.AttachmentValidator
|
alias Pleroma.Web.ActivityPub.ObjectValidators.AttachmentValidator
|
||||||
alias Pleroma.Web.ActivityPub.ObjectValidators.CommonFixes
|
alias Pleroma.Web.ActivityPub.ObjectValidators.CommonFixes
|
||||||
alias Pleroma.Web.ActivityPub.ObjectValidators.CommonValidations
|
alias Pleroma.Web.ActivityPub.ObjectValidators.CommonValidations
|
||||||
|
alias Pleroma.Web.ActivityPub.ObjectValidators.TagValidator
|
||||||
alias Pleroma.Web.ActivityPub.Transmogrifier
|
alias Pleroma.Web.ActivityPub.Transmogrifier
|
||||||
|
|
||||||
import Ecto.Changeset
|
import Ecto.Changeset
|
||||||
|
@ -23,8 +24,7 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.EventValidator do
|
||||||
field(:cc, ObjectValidators.Recipients, default: [])
|
field(:cc, ObjectValidators.Recipients, default: [])
|
||||||
field(:bto, ObjectValidators.Recipients, default: [])
|
field(:bto, ObjectValidators.Recipients, default: [])
|
||||||
field(:bcc, ObjectValidators.Recipients, default: [])
|
field(:bcc, ObjectValidators.Recipients, default: [])
|
||||||
# TODO: Write type
|
embeds_many(:tag, TagValidator)
|
||||||
field(:tag, {:array, :map}, default: [])
|
|
||||||
field(:type, :string)
|
field(:type, :string)
|
||||||
|
|
||||||
field(:name, :string)
|
field(:name, :string)
|
||||||
|
@ -81,11 +81,12 @@ def changeset(struct, data) do
|
||||||
data = fix(data)
|
data = fix(data)
|
||||||
|
|
||||||
struct
|
struct
|
||||||
|> cast(data, __schema__(:fields) -- [:attachment])
|
|> cast(data, __schema__(:fields) -- [:attachment, :tag])
|
||||||
|> cast_embed(:attachment)
|
|> cast_embed(:attachment)
|
||||||
|
|> cast_embed(:tag)
|
||||||
end
|
end
|
||||||
|
|
||||||
def validate_data(data_cng) do
|
defp validate_data(data_cng) do
|
||||||
data_cng
|
data_cng
|
||||||
|> validate_inclusion(:type, ["Event"])
|
|> validate_inclusion(:type, ["Event"])
|
||||||
|> validate_required([:id, :actor, :attributedTo, :type, :context, :context_id])
|
|> validate_required([:id, :actor, :attributedTo, :type, :context, :context_id])
|
||||||
|
|
|
@ -27,7 +27,7 @@ def cast_data(data) do
|
||||||
|> cast(data, __schema__(:fields))
|
|> cast(data, __schema__(:fields))
|
||||||
end
|
end
|
||||||
|
|
||||||
def validate_data(cng) do
|
defp validate_data(cng) do
|
||||||
cng
|
cng
|
||||||
|> validate_required([:id, :type, :actor, :to, :cc, :object])
|
|> validate_required([:id, :type, :actor, :to, :cc, :object])
|
||||||
|> validate_inclusion(:type, ["Follow"])
|
|> validate_inclusion(:type, ["Follow"])
|
||||||
|
|
|
@ -76,7 +76,7 @@ def fix_recipients(cng) do
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def validate_data(data_cng) do
|
defp validate_data(data_cng) do
|
||||||
data_cng
|
data_cng
|
||||||
|> validate_inclusion(:type, ["Like"])
|
|> validate_inclusion(:type, ["Like"])
|
||||||
|> validate_required([:id, :type, :object, :actor, :context, :to, :cc])
|
|> validate_required([:id, :type, :object, :actor, :context, :to, :cc])
|
||||||
|
|
|
@ -10,6 +10,7 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.QuestionValidator do
|
||||||
alias Pleroma.Web.ActivityPub.ObjectValidators.CommonFixes
|
alias Pleroma.Web.ActivityPub.ObjectValidators.CommonFixes
|
||||||
alias Pleroma.Web.ActivityPub.ObjectValidators.CommonValidations
|
alias Pleroma.Web.ActivityPub.ObjectValidators.CommonValidations
|
||||||
alias Pleroma.Web.ActivityPub.ObjectValidators.QuestionOptionsValidator
|
alias Pleroma.Web.ActivityPub.ObjectValidators.QuestionOptionsValidator
|
||||||
|
alias Pleroma.Web.ActivityPub.ObjectValidators.TagValidator
|
||||||
alias Pleroma.Web.ActivityPub.Transmogrifier
|
alias Pleroma.Web.ActivityPub.Transmogrifier
|
||||||
|
|
||||||
import Ecto.Changeset
|
import Ecto.Changeset
|
||||||
|
@ -24,8 +25,7 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.QuestionValidator do
|
||||||
field(:cc, ObjectValidators.Recipients, default: [])
|
field(:cc, ObjectValidators.Recipients, default: [])
|
||||||
field(:bto, ObjectValidators.Recipients, default: [])
|
field(:bto, ObjectValidators.Recipients, default: [])
|
||||||
field(:bcc, ObjectValidators.Recipients, default: [])
|
field(:bcc, ObjectValidators.Recipients, default: [])
|
||||||
# TODO: Write type
|
embeds_many(:tag, TagValidator)
|
||||||
field(:tag, {:array, :map}, default: [])
|
|
||||||
field(:type, :string)
|
field(:type, :string)
|
||||||
field(:content, :string)
|
field(:content, :string)
|
||||||
field(:context, :string)
|
field(:context, :string)
|
||||||
|
@ -93,13 +93,14 @@ def changeset(struct, data) do
|
||||||
data = fix(data)
|
data = fix(data)
|
||||||
|
|
||||||
struct
|
struct
|
||||||
|> cast(data, __schema__(:fields) -- [:anyOf, :oneOf, :attachment])
|
|> cast(data, __schema__(:fields) -- [:anyOf, :oneOf, :attachment, :tag])
|
||||||
|> cast_embed(:attachment)
|
|> cast_embed(:attachment)
|
||||||
|> cast_embed(:anyOf)
|
|> cast_embed(:anyOf)
|
||||||
|> cast_embed(:oneOf)
|
|> cast_embed(:oneOf)
|
||||||
|
|> cast_embed(:tag)
|
||||||
end
|
end
|
||||||
|
|
||||||
def validate_data(data_cng) do
|
defp validate_data(data_cng) do
|
||||||
data_cng
|
data_cng
|
||||||
|> validate_inclusion(:type, ["Question"])
|
|> validate_inclusion(:type, ["Question"])
|
||||||
|> validate_required([:id, :actor, :attributedTo, :type, :context, :context_id])
|
|> validate_required([:id, :actor, :attributedTo, :type, :context, :context_id])
|
||||||
|
|
|
@ -0,0 +1,77 @@
|
||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2021 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
defmodule Pleroma.Web.ActivityPub.ObjectValidators.TagValidator do
|
||||||
|
use Ecto.Schema
|
||||||
|
|
||||||
|
alias Pleroma.EctoType.ActivityPub.ObjectValidators
|
||||||
|
|
||||||
|
import Ecto.Changeset
|
||||||
|
|
||||||
|
@primary_key false
|
||||||
|
embedded_schema do
|
||||||
|
# Common
|
||||||
|
field(:type, :string)
|
||||||
|
field(:name, :string)
|
||||||
|
|
||||||
|
# Mention, Hashtag
|
||||||
|
field(:href, ObjectValidators.Uri)
|
||||||
|
|
||||||
|
# Emoji
|
||||||
|
embeds_one :icon, IconObjectValidator, primary_key: false do
|
||||||
|
field(:type, :string)
|
||||||
|
field(:url, ObjectValidators.Uri)
|
||||||
|
end
|
||||||
|
|
||||||
|
field(:updated, ObjectValidators.DateTime)
|
||||||
|
field(:id, ObjectValidators.Uri)
|
||||||
|
end
|
||||||
|
|
||||||
|
def cast_and_validate(data) do
|
||||||
|
data
|
||||||
|
|> cast_data()
|
||||||
|
end
|
||||||
|
|
||||||
|
def cast_data(data) do
|
||||||
|
%__MODULE__{}
|
||||||
|
|> changeset(data)
|
||||||
|
end
|
||||||
|
|
||||||
|
def changeset(struct, %{"type" => "Mention"} = data) do
|
||||||
|
struct
|
||||||
|
|> cast(data, [:type, :name, :href])
|
||||||
|
|> validate_required([:type, :href])
|
||||||
|
end
|
||||||
|
|
||||||
|
def changeset(struct, %{"type" => "Hashtag", "name" => name} = data) do
|
||||||
|
name =
|
||||||
|
cond do
|
||||||
|
"#" <> name -> name
|
||||||
|
name -> name
|
||||||
|
end
|
||||||
|
|> String.downcase()
|
||||||
|
|
||||||
|
data = Map.put(data, "name", name)
|
||||||
|
|
||||||
|
struct
|
||||||
|
|> cast(data, [:type, :name, :href])
|
||||||
|
|> validate_required([:type, :name])
|
||||||
|
end
|
||||||
|
|
||||||
|
def changeset(struct, %{"type" => "Emoji"} = data) do
|
||||||
|
data = Map.put(data, "name", String.trim(data["name"], ":"))
|
||||||
|
|
||||||
|
struct
|
||||||
|
|> cast(data, [:type, :name, :updated, :id])
|
||||||
|
|> cast_embed(:icon, with: &icon_changeset/2)
|
||||||
|
|> validate_required([:type, :name, :icon])
|
||||||
|
end
|
||||||
|
|
||||||
|
def icon_changeset(struct, data) do
|
||||||
|
struct
|
||||||
|
|> cast(data, [:type, :url])
|
||||||
|
|> validate_inclusion(:type, ~w[Image])
|
||||||
|
|> validate_required([:type, :url])
|
||||||
|
end
|
||||||
|
end
|
|
@ -38,7 +38,7 @@ def changeset(struct, data) do
|
||||||
|> cast(data, __schema__(:fields))
|
|> cast(data, __schema__(:fields))
|
||||||
end
|
end
|
||||||
|
|
||||||
def validate_data(data_cng) do
|
defp validate_data(data_cng) do
|
||||||
data_cng
|
data_cng
|
||||||
|> validate_inclusion(:type, ["Undo"])
|
|> validate_inclusion(:type, ["Undo"])
|
||||||
|> validate_required([:id, :type, :object, :actor, :to, :cc])
|
|> validate_required([:id, :type, :object, :actor, :to, :cc])
|
||||||
|
|
|
@ -28,7 +28,7 @@ def cast_data(data) do
|
||||||
|> cast(data, __schema__(:fields))
|
|> cast(data, __schema__(:fields))
|
||||||
end
|
end
|
||||||
|
|
||||||
def validate_data(cng) do
|
defp validate_data(cng) do
|
||||||
cng
|
cng
|
||||||
|> validate_required([:id, :type, :actor, :to, :cc, :object])
|
|> validate_required([:id, :type, :actor, :to, :cc, :object])
|
||||||
|> validate_inclusion(:type, ["Update"])
|
|> validate_inclusion(:type, ["Update"])
|
||||||
|
|
|
@ -7,6 +7,7 @@ defmodule Pleroma.Web.ActivityPub.Pipeline do
|
||||||
alias Pleroma.Config
|
alias Pleroma.Config
|
||||||
alias Pleroma.Object
|
alias Pleroma.Object
|
||||||
alias Pleroma.Repo
|
alias Pleroma.Repo
|
||||||
|
alias Pleroma.Utils
|
||||||
alias Pleroma.Web.ActivityPub.ActivityPub
|
alias Pleroma.Web.ActivityPub.ActivityPub
|
||||||
alias Pleroma.Web.ActivityPub.MRF
|
alias Pleroma.Web.ActivityPub.MRF
|
||||||
alias Pleroma.Web.ActivityPub.ObjectValidator
|
alias Pleroma.Web.ActivityPub.ObjectValidator
|
||||||
|
@ -24,7 +25,7 @@ defmodule Pleroma.Web.ActivityPub.Pipeline do
|
||||||
@spec common_pipeline(map(), keyword()) ::
|
@spec common_pipeline(map(), keyword()) ::
|
||||||
{:ok, Activity.t() | Object.t(), keyword()} | {:error, any()}
|
{:ok, Activity.t() | Object.t(), keyword()} | {:error, any()}
|
||||||
def common_pipeline(object, meta) do
|
def common_pipeline(object, meta) do
|
||||||
case Repo.transaction(fn -> do_common_pipeline(object, meta) end) do
|
case Repo.transaction(fn -> do_common_pipeline(object, meta) end, Utils.query_timeout()) do
|
||||||
{:ok, {:ok, activity, meta}} ->
|
{:ok, {:ok, activity, meta}} ->
|
||||||
@side_effects.handle_after_transaction(meta)
|
@side_effects.handle_after_transaction(meta)
|
||||||
{:ok, activity, meta}
|
{:ok, activity, meta}
|
||||||
|
@ -40,19 +41,17 @@ def common_pipeline(object, meta) do
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def do_common_pipeline(object, meta) do
|
def do_common_pipeline(%{__struct__: _}, _meta), do: {:error, :is_struct}
|
||||||
with {_, {:ok, validated_object, meta}} <-
|
|
||||||
{:validate_object, @object_validator.validate(object, meta)},
|
def do_common_pipeline(message, meta) do
|
||||||
{_, {:ok, mrfd_object, meta}} <-
|
with {_, {:ok, message, meta}} <- {:validate, @object_validator.validate(message, meta)},
|
||||||
{:mrf_object, @mrf.pipeline_filter(validated_object, meta)},
|
{_, {:ok, message, meta}} <- {:mrf, @mrf.pipeline_filter(message, meta)},
|
||||||
{_, {:ok, activity, meta}} <-
|
{_, {:ok, message, meta}} <- {:persist, @activity_pub.persist(message, meta)},
|
||||||
{:persist_object, @activity_pub.persist(mrfd_object, meta)},
|
{_, {:ok, message, meta}} <- {:side_effects, @side_effects.handle(message, meta)},
|
||||||
{_, {:ok, activity, meta}} <-
|
{_, {:ok, _}} <- {:federation, maybe_federate(message, meta)} do
|
||||||
{:execute_side_effects, @side_effects.handle(activity, meta)},
|
{:ok, message, meta}
|
||||||
{_, {:ok, _}} <- {:federation, maybe_federate(activity, meta)} do
|
|
||||||
{:ok, activity, meta}
|
|
||||||
else
|
else
|
||||||
{:mrf_object, {:reject, message, _}} -> {:reject, message}
|
{:mrf, {:reject, message, _}} -> {:reject, message}
|
||||||
e -> {:error, e}
|
e -> {:error, e}
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -276,10 +276,10 @@ def handle(%{data: %{"type" => "Delete", "object" => deleted_object}} = object,
|
||||||
result =
|
result =
|
||||||
case deleted_object do
|
case deleted_object do
|
||||||
%Object{} ->
|
%Object{} ->
|
||||||
with {:ok, deleted_object, activity} <- Object.delete(deleted_object),
|
with {:ok, deleted_object, _activity} <- Object.delete(deleted_object),
|
||||||
{_, actor} when is_binary(actor) <- {:actor, deleted_object.data["actor"]},
|
{_, actor} when is_binary(actor) <- {:actor, deleted_object.data["actor"]},
|
||||||
%User{} = user <- User.get_cached_by_ap_id(actor) do
|
%User{} = user <- User.get_cached_by_ap_id(actor) do
|
||||||
User.remove_pinnned_activity(user, activity)
|
User.remove_pinned_object_id(user, deleted_object.data["id"])
|
||||||
|
|
||||||
{:ok, user} = ActivityPub.decrease_note_count_if_public(user, deleted_object)
|
{:ok, user} = ActivityPub.decrease_note_count_if_public(user, deleted_object)
|
||||||
|
|
||||||
|
@ -312,6 +312,63 @@ def handle(%{data: %{"type" => "Delete", "object" => deleted_object}} = object,
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
# Tasks this handles:
|
||||||
|
# - adds pin to user
|
||||||
|
# - removes expiration job for pinned activity, if was set for expiration
|
||||||
|
@impl true
|
||||||
|
def handle(%{data: %{"type" => "Add"} = data} = object, meta) do
|
||||||
|
with %User{} = user <- User.get_cached_by_ap_id(data["actor"]),
|
||||||
|
{:ok, _user} <- User.add_pinned_object_id(user, data["object"]) do
|
||||||
|
# if pinned activity was scheduled for deletion, we remove job
|
||||||
|
if expiration = Pleroma.Workers.PurgeExpiredActivity.get_expiration(meta[:activity_id]) do
|
||||||
|
Oban.cancel_job(expiration.id)
|
||||||
|
end
|
||||||
|
|
||||||
|
{:ok, object, meta}
|
||||||
|
else
|
||||||
|
nil ->
|
||||||
|
{:error, :user_not_found}
|
||||||
|
|
||||||
|
{:error, changeset} ->
|
||||||
|
if changeset.errors[:pinned_objects] do
|
||||||
|
{:error, :pinned_statuses_limit_reached}
|
||||||
|
else
|
||||||
|
changeset.errors
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
# Tasks this handles:
|
||||||
|
# - removes pin from user
|
||||||
|
# - removes corresponding Add activity
|
||||||
|
# - if activity had expiration, recreates activity expiration job
|
||||||
|
@impl true
|
||||||
|
def handle(%{data: %{"type" => "Remove"} = data} = object, meta) do
|
||||||
|
with %User{} = user <- User.get_cached_by_ap_id(data["actor"]),
|
||||||
|
{:ok, _user} <- User.remove_pinned_object_id(user, data["object"]) do
|
||||||
|
data["object"]
|
||||||
|
|> Activity.add_by_params_query(user.ap_id, user.featured_address)
|
||||||
|
|> Repo.delete_all()
|
||||||
|
|
||||||
|
# if pinned activity was scheduled for deletion, we reschedule it for deletion
|
||||||
|
if meta[:expires_at] do
|
||||||
|
# MRF.ActivityExpirationPolicy used UTC timestamps for expires_at in original implementation
|
||||||
|
{:ok, expires_at} =
|
||||||
|
Pleroma.EctoType.ActivityPub.ObjectValidators.DateTime.cast(meta[:expires_at])
|
||||||
|
|
||||||
|
Pleroma.Workers.PurgeExpiredActivity.enqueue(%{
|
||||||
|
activity_id: meta[:activity_id],
|
||||||
|
expires_at: expires_at
|
||||||
|
})
|
||||||
|
end
|
||||||
|
|
||||||
|
{:ok, object, meta}
|
||||||
|
else
|
||||||
|
nil -> {:error, :user_not_found}
|
||||||
|
error -> error
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
# Nothing to do
|
# Nothing to do
|
||||||
@impl true
|
@impl true
|
||||||
def handle(object, meta) do
|
def handle(object, meta) do
|
||||||
|
|
|
@ -32,18 +32,17 @@ defmodule Pleroma.Web.ActivityPub.Transmogrifier do
|
||||||
"""
|
"""
|
||||||
def fix_object(object, options \\ []) do
|
def fix_object(object, options \\ []) do
|
||||||
object
|
object
|
||||||
|> strip_internal_fields
|
|> strip_internal_fields()
|
||||||
|> fix_actor
|
|> fix_actor()
|
||||||
|> fix_url
|
|> fix_url()
|
||||||
|> fix_attachments
|
|> fix_attachments()
|
||||||
|> fix_context
|
|> fix_context()
|
||||||
|> fix_in_reply_to(options)
|
|> fix_in_reply_to(options)
|
||||||
|> fix_emoji
|
|> fix_emoji()
|
||||||
|> fix_tag
|
|> fix_tag()
|
||||||
|> set_sensitive
|
|> fix_content_map()
|
||||||
|> fix_content_map
|
|> fix_addressing()
|
||||||
|> fix_addressing
|
|> fix_summary()
|
||||||
|> fix_summary
|
|
||||||
|> fix_type(options)
|
|> fix_type(options)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -245,6 +244,8 @@ def fix_attachments(%{"attachment" => attachment} = object) when is_list(attachm
|
||||||
"type" => Map.get(url || %{}, "type", "Link")
|
"type" => Map.get(url || %{}, "type", "Link")
|
||||||
}
|
}
|
||||||
|> Maps.put_if_present("mediaType", media_type)
|
|> Maps.put_if_present("mediaType", media_type)
|
||||||
|
|> Maps.put_if_present("width", (url || %{})["width"] || data["width"])
|
||||||
|
|> Maps.put_if_present("height", (url || %{})["height"] || data["height"])
|
||||||
|
|
||||||
%{
|
%{
|
||||||
"url" => [attachment_url],
|
"url" => [attachment_url],
|
||||||
|
@ -315,10 +316,9 @@ def fix_tag(%{"tag" => tag} = object) when is_list(tag) do
|
||||||
tags =
|
tags =
|
||||||
tag
|
tag
|
||||||
|> Enum.filter(fn data -> data["type"] == "Hashtag" and data["name"] end)
|
|> Enum.filter(fn data -> data["type"] == "Hashtag" and data["name"] end)
|
||||||
|> Enum.map(fn %{"name" => name} ->
|
|> Enum.map(fn
|
||||||
name
|
%{"name" => "#" <> hashtag} -> String.downcase(hashtag)
|
||||||
|> String.slice(1..-1)
|
%{"name" => hashtag} -> String.downcase(hashtag)
|
||||||
|> String.downcase()
|
|
||||||
end)
|
end)
|
||||||
|
|
||||||
Map.put(object, "tag", tag ++ tags)
|
Map.put(object, "tag", tag ++ tags)
|
||||||
|
@ -536,7 +536,7 @@ def handle_incoming(
|
||||||
end
|
end
|
||||||
|
|
||||||
def handle_incoming(%{"type" => type} = data, _options)
|
def handle_incoming(%{"type" => type} = data, _options)
|
||||||
when type in ~w{Like EmojiReact Announce} do
|
when type in ~w{Like EmojiReact Announce Add Remove} do
|
||||||
with :ok <- ObjectValidator.fetch_actor_and_object(data),
|
with :ok <- ObjectValidator.fetch_actor_and_object(data),
|
||||||
{:ok, activity, _meta} <-
|
{:ok, activity, _meta} <-
|
||||||
Pipeline.common_pipeline(data, local: false) do
|
Pipeline.common_pipeline(data, local: false) do
|
||||||
|
@ -566,7 +566,7 @@ def handle_incoming(
|
||||||
Pipeline.common_pipeline(data, local: false) do
|
Pipeline.common_pipeline(data, local: false) do
|
||||||
{:ok, activity}
|
{:ok, activity}
|
||||||
else
|
else
|
||||||
{:error, {:validate_object, _}} = e ->
|
{:error, {:validate, _}} = e ->
|
||||||
# Check if we have a create activity for this
|
# Check if we have a create activity for this
|
||||||
with {:ok, object_id} <- ObjectValidators.ObjectID.cast(data["object"]),
|
with {:ok, object_id} <- ObjectValidators.ObjectID.cast(data["object"]),
|
||||||
%Activity{data: %{"actor" => actor}} <-
|
%Activity{data: %{"actor" => actor}} <-
|
||||||
|
@ -742,7 +742,6 @@ def replies(_), do: []
|
||||||
# Prepares the object of an outgoing create activity.
|
# Prepares the object of an outgoing create activity.
|
||||||
def prepare_object(object) do
|
def prepare_object(object) do
|
||||||
object
|
object
|
||||||
|> set_sensitive
|
|
||||||
|> add_hashtags
|
|> add_hashtags
|
||||||
|> add_mention_tags
|
|> add_mention_tags
|
||||||
|> add_emoji_tags
|
|> add_emoji_tags
|
||||||
|
@ -933,15 +932,6 @@ def set_conversation(object) do
|
||||||
Map.put(object, "conversation", object["context"])
|
Map.put(object, "conversation", object["context"])
|
||||||
end
|
end
|
||||||
|
|
||||||
def set_sensitive(%{"sensitive" => _} = object) do
|
|
||||||
object
|
|
||||||
end
|
|
||||||
|
|
||||||
def set_sensitive(object) do
|
|
||||||
tags = object["tag"] || []
|
|
||||||
Map.put(object, "sensitive", "nsfw" in tags)
|
|
||||||
end
|
|
||||||
|
|
||||||
def set_type(%{"type" => "Answer"} = object) do
|
def set_type(%{"type" => "Answer"} = object) do
|
||||||
Map.put(object, "type", "Note")
|
Map.put(object, "type", "Note")
|
||||||
end
|
end
|
||||||
|
@ -961,7 +951,7 @@ def prepare_attachments(object) do
|
||||||
object
|
object
|
||||||
|> Map.get("attachment", [])
|
|> Map.get("attachment", [])
|
||||||
|> Enum.map(fn data ->
|
|> Enum.map(fn data ->
|
||||||
[%{"mediaType" => media_type, "href" => href} | _] = data["url"]
|
[%{"mediaType" => media_type, "href" => href} = url | _] = data["url"]
|
||||||
|
|
||||||
%{
|
%{
|
||||||
"url" => href,
|
"url" => href,
|
||||||
|
@ -969,6 +959,9 @@ def prepare_attachments(object) do
|
||||||
"name" => data["name"],
|
"name" => data["name"],
|
||||||
"type" => "Document"
|
"type" => "Document"
|
||||||
}
|
}
|
||||||
|
|> Maps.put_if_present("width", url["width"])
|
||||||
|
|> Maps.put_if_present("height", url["height"])
|
||||||
|
|> Maps.put_if_present("blurhash", data["blurhash"])
|
||||||
end)
|
end)
|
||||||
|
|
||||||
Map.put(object, "attachment", attachments)
|
Map.put(object, "attachment", attachments)
|
||||||
|
@ -1012,6 +1005,7 @@ def upgrade_user_from_ap_id(ap_id) do
|
||||||
with %User{local: false} = user <- User.get_cached_by_ap_id(ap_id),
|
with %User{local: false} = user <- User.get_cached_by_ap_id(ap_id),
|
||||||
{:ok, data} <- ActivityPub.fetch_and_prepare_user_from_ap_id(ap_id),
|
{:ok, data} <- ActivityPub.fetch_and_prepare_user_from_ap_id(ap_id),
|
||||||
{:ok, user} <- update_user(user, data) do
|
{:ok, user} <- update_user(user, data) do
|
||||||
|
{:ok, _pid} = Task.start(fn -> ActivityPub.pinned_fetch_task(user) end)
|
||||||
TransmogrifierWorker.enqueue("user_upgrade", %{"user_id" => user.id})
|
TransmogrifierWorker.enqueue("user_upgrade", %{"user_id" => user.id})
|
||||||
{:ok, user}
|
{:ok, user}
|
||||||
else
|
else
|
||||||
|
|
|
@ -6,8 +6,10 @@ defmodule Pleroma.Web.ActivityPub.UserView do
|
||||||
use Pleroma.Web, :view
|
use Pleroma.Web, :view
|
||||||
|
|
||||||
alias Pleroma.Keys
|
alias Pleroma.Keys
|
||||||
|
alias Pleroma.Object
|
||||||
alias Pleroma.Repo
|
alias Pleroma.Repo
|
||||||
alias Pleroma.User
|
alias Pleroma.User
|
||||||
|
alias Pleroma.Web.ActivityPub.ObjectView
|
||||||
alias Pleroma.Web.ActivityPub.Transmogrifier
|
alias Pleroma.Web.ActivityPub.Transmogrifier
|
||||||
alias Pleroma.Web.ActivityPub.Utils
|
alias Pleroma.Web.ActivityPub.Utils
|
||||||
alias Pleroma.Web.Endpoint
|
alias Pleroma.Web.Endpoint
|
||||||
|
@ -97,6 +99,7 @@ def render("user.json", %{user: user}) do
|
||||||
"followers" => "#{user.ap_id}/followers",
|
"followers" => "#{user.ap_id}/followers",
|
||||||
"inbox" => "#{user.ap_id}/inbox",
|
"inbox" => "#{user.ap_id}/inbox",
|
||||||
"outbox" => "#{user.ap_id}/outbox",
|
"outbox" => "#{user.ap_id}/outbox",
|
||||||
|
"featured" => "#{user.ap_id}/collections/featured",
|
||||||
"preferredUsername" => user.nickname,
|
"preferredUsername" => user.nickname,
|
||||||
"name" => user.name,
|
"name" => user.name,
|
||||||
"summary" => user.bio,
|
"summary" => user.bio,
|
||||||
|
@ -245,6 +248,24 @@ def render("activity_collection_page.json", %{
|
||||||
|> Map.merge(pagination)
|
|> Map.merge(pagination)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def render("featured.json", %{
|
||||||
|
user: %{featured_address: featured_address, pinned_objects: pinned_objects}
|
||||||
|
}) do
|
||||||
|
objects =
|
||||||
|
pinned_objects
|
||||||
|
|> Enum.sort_by(fn {_, pinned_at} -> pinned_at end, &>=/2)
|
||||||
|
|> Enum.map(fn {id, _} ->
|
||||||
|
ObjectView.render("object.json", %{object: Object.get_cached_by_ap_id(id)})
|
||||||
|
end)
|
||||||
|
|
||||||
|
%{
|
||||||
|
"id" => featured_address,
|
||||||
|
"type" => "OrderedCollection",
|
||||||
|
"orderedItems" => objects
|
||||||
|
}
|
||||||
|
|> Map.merge(Utils.make_json_ld_header())
|
||||||
|
end
|
||||||
|
|
||||||
defp maybe_put_total_items(map, false, _total), do: map
|
defp maybe_put_total_items(map, false, _total), do: map
|
||||||
|
|
||||||
defp maybe_put_total_items(map, true, total) do
|
defp maybe_put_total_items(map, true, total) do
|
||||||
|
|
|
@ -13,16 +13,17 @@ defmodule Pleroma.Web.AdminAPI.UserController do
|
||||||
alias Pleroma.Web.ActivityPub.Builder
|
alias Pleroma.Web.ActivityPub.Builder
|
||||||
alias Pleroma.Web.ActivityPub.Pipeline
|
alias Pleroma.Web.ActivityPub.Pipeline
|
||||||
alias Pleroma.Web.AdminAPI
|
alias Pleroma.Web.AdminAPI
|
||||||
alias Pleroma.Web.AdminAPI.AccountView
|
|
||||||
alias Pleroma.Web.AdminAPI.Search
|
alias Pleroma.Web.AdminAPI.Search
|
||||||
alias Pleroma.Web.Plugs.OAuthScopesPlug
|
alias Pleroma.Web.Plugs.OAuthScopesPlug
|
||||||
|
|
||||||
@users_page_size 50
|
@users_page_size 50
|
||||||
|
|
||||||
|
plug(Pleroma.Web.ApiSpec.CastAndValidate)
|
||||||
|
|
||||||
plug(
|
plug(
|
||||||
OAuthScopesPlug,
|
OAuthScopesPlug,
|
||||||
%{scopes: ["admin:read:accounts"]}
|
%{scopes: ["admin:read:accounts"]}
|
||||||
when action in [:list, :show]
|
when action in [:index, :show]
|
||||||
)
|
)
|
||||||
|
|
||||||
plug(
|
plug(
|
||||||
|
@ -44,13 +45,19 @@ defmodule Pleroma.Web.AdminAPI.UserController do
|
||||||
when action in [:follow, :unfollow]
|
when action in [:follow, :unfollow]
|
||||||
)
|
)
|
||||||
|
|
||||||
|
plug(:put_view, Pleroma.Web.AdminAPI.AccountView)
|
||||||
|
|
||||||
action_fallback(AdminAPI.FallbackController)
|
action_fallback(AdminAPI.FallbackController)
|
||||||
|
|
||||||
def delete(conn, %{"nickname" => nickname}) do
|
defdelegate open_api_operation(action), to: Pleroma.Web.ApiSpec.Admin.UserOperation
|
||||||
delete(conn, %{"nicknames" => [nickname]})
|
|
||||||
|
def delete(conn, %{nickname: nickname}) do
|
||||||
|
conn
|
||||||
|
|> Map.put(:body_params, %{nicknames: [nickname]})
|
||||||
|
|> delete(%{})
|
||||||
end
|
end
|
||||||
|
|
||||||
def delete(%{assigns: %{user: admin}} = conn, %{"nicknames" => nicknames}) do
|
def delete(%{assigns: %{user: admin}, body_params: %{nicknames: nicknames}} = conn, _) do
|
||||||
users = Enum.map(nicknames, &User.get_cached_by_nickname/1)
|
users = Enum.map(nicknames, &User.get_cached_by_nickname/1)
|
||||||
|
|
||||||
Enum.each(users, fn user ->
|
Enum.each(users, fn user ->
|
||||||
|
@ -67,10 +74,16 @@ def delete(%{assigns: %{user: admin}} = conn, %{"nicknames" => nicknames}) do
|
||||||
json(conn, nicknames)
|
json(conn, nicknames)
|
||||||
end
|
end
|
||||||
|
|
||||||
def follow(%{assigns: %{user: admin}} = conn, %{
|
def follow(
|
||||||
"follower" => follower_nick,
|
%{
|
||||||
"followed" => followed_nick
|
assigns: %{user: admin},
|
||||||
}) do
|
body_params: %{
|
||||||
|
follower: follower_nick,
|
||||||
|
followed: followed_nick
|
||||||
|
}
|
||||||
|
} = conn,
|
||||||
|
_
|
||||||
|
) do
|
||||||
with %User{} = follower <- User.get_cached_by_nickname(follower_nick),
|
with %User{} = follower <- User.get_cached_by_nickname(follower_nick),
|
||||||
%User{} = followed <- User.get_cached_by_nickname(followed_nick) do
|
%User{} = followed <- User.get_cached_by_nickname(followed_nick) do
|
||||||
User.follow(follower, followed)
|
User.follow(follower, followed)
|
||||||
|
@ -86,10 +99,16 @@ def follow(%{assigns: %{user: admin}} = conn, %{
|
||||||
json(conn, "ok")
|
json(conn, "ok")
|
||||||
end
|
end
|
||||||
|
|
||||||
def unfollow(%{assigns: %{user: admin}} = conn, %{
|
def unfollow(
|
||||||
"follower" => follower_nick,
|
%{
|
||||||
"followed" => followed_nick
|
assigns: %{user: admin},
|
||||||
}) do
|
body_params: %{
|
||||||
|
follower: follower_nick,
|
||||||
|
followed: followed_nick
|
||||||
|
}
|
||||||
|
} = conn,
|
||||||
|
_
|
||||||
|
) do
|
||||||
with %User{} = follower <- User.get_cached_by_nickname(follower_nick),
|
with %User{} = follower <- User.get_cached_by_nickname(follower_nick),
|
||||||
%User{} = followed <- User.get_cached_by_nickname(followed_nick) do
|
%User{} = followed <- User.get_cached_by_nickname(followed_nick) do
|
||||||
User.unfollow(follower, followed)
|
User.unfollow(follower, followed)
|
||||||
|
@ -105,9 +124,10 @@ def unfollow(%{assigns: %{user: admin}} = conn, %{
|
||||||
json(conn, "ok")
|
json(conn, "ok")
|
||||||
end
|
end
|
||||||
|
|
||||||
def create(%{assigns: %{user: admin}} = conn, %{"users" => users}) do
|
def create(%{assigns: %{user: admin}, body_params: %{users: users}} = conn, _) do
|
||||||
changesets =
|
changesets =
|
||||||
Enum.map(users, fn %{"nickname" => nickname, "email" => email, "password" => password} ->
|
users
|
||||||
|
|> Enum.map(fn %{nickname: nickname, email: email, password: password} ->
|
||||||
user_data = %{
|
user_data = %{
|
||||||
nickname: nickname,
|
nickname: nickname,
|
||||||
name: nickname,
|
name: nickname,
|
||||||
|
@ -124,52 +144,49 @@ def create(%{assigns: %{user: admin}} = conn, %{"users" => users}) do
|
||||||
end)
|
end)
|
||||||
|
|
||||||
case Pleroma.Repo.transaction(changesets) do
|
case Pleroma.Repo.transaction(changesets) do
|
||||||
{:ok, users} ->
|
{:ok, users_map} ->
|
||||||
res =
|
users =
|
||||||
users
|
users_map
|
||||||
|> Map.values()
|
|> Map.values()
|
||||||
|> Enum.map(fn user ->
|
|> Enum.map(fn user ->
|
||||||
{:ok, user} = User.post_register_action(user)
|
{:ok, user} = User.post_register_action(user)
|
||||||
|
|
||||||
user
|
user
|
||||||
end)
|
end)
|
||||||
|> Enum.map(&AccountView.render("created.json", %{user: &1}))
|
|
||||||
|
|
||||||
ModerationLog.insert_log(%{
|
ModerationLog.insert_log(%{
|
||||||
actor: admin,
|
actor: admin,
|
||||||
subjects: Map.values(users),
|
subjects: users,
|
||||||
action: "create"
|
action: "create"
|
||||||
})
|
})
|
||||||
|
|
||||||
json(conn, res)
|
render(conn, "created_many.json", users: users)
|
||||||
|
|
||||||
{:error, id, changeset, _} ->
|
{:error, id, changeset, _} ->
|
||||||
res =
|
changesets =
|
||||||
Enum.map(changesets.operations, fn
|
Enum.map(changesets.operations, fn
|
||||||
{current_id, {:changeset, _current_changeset, _}} when current_id == id ->
|
{^id, {:changeset, _current_changeset, _}} ->
|
||||||
AccountView.render("create-error.json", %{changeset: changeset})
|
changeset
|
||||||
|
|
||||||
{_, {:changeset, current_changeset, _}} ->
|
{_, {:changeset, current_changeset, _}} ->
|
||||||
AccountView.render("create-error.json", %{changeset: current_changeset})
|
current_changeset
|
||||||
end)
|
end)
|
||||||
|
|
||||||
conn
|
conn
|
||||||
|> put_status(:conflict)
|
|> put_status(:conflict)
|
||||||
|> json(res)
|
|> render("create_errors.json", changesets: changesets)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def show(%{assigns: %{user: admin}} = conn, %{"nickname" => nickname}) do
|
def show(%{assigns: %{user: admin}} = conn, %{nickname: nickname}) do
|
||||||
with %User{} = user <- User.get_cached_by_nickname_or_id(nickname, for: admin) do
|
with %User{} = user <- User.get_cached_by_nickname_or_id(nickname, for: admin) do
|
||||||
conn
|
render(conn, "show.json", %{user: user})
|
||||||
|> put_view(AccountView)
|
|
||||||
|> render("show.json", %{user: user})
|
|
||||||
else
|
else
|
||||||
_ -> {:error, :not_found}
|
_ -> {:error, :not_found}
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def toggle_activation(%{assigns: %{user: admin}} = conn, %{"nickname" => nickname}) do
|
def toggle_activation(%{assigns: %{user: admin}} = conn, %{nickname: nickname}) do
|
||||||
user = User.get_cached_by_nickname(nickname)
|
user = User.get_cached_by_nickname(nickname)
|
||||||
|
|
||||||
{:ok, updated_user} = User.set_activation(user, !user.is_active)
|
{:ok, updated_user} = User.set_activation(user, !user.is_active)
|
||||||
|
@ -182,12 +199,10 @@ def toggle_activation(%{assigns: %{user: admin}} = conn, %{"nickname" => nicknam
|
||||||
action: action
|
action: action
|
||||||
})
|
})
|
||||||
|
|
||||||
conn
|
render(conn, "show.json", user: updated_user)
|
||||||
|> put_view(AccountView)
|
|
||||||
|> render("show.json", %{user: updated_user})
|
|
||||||
end
|
end
|
||||||
|
|
||||||
def activate(%{assigns: %{user: admin}} = conn, %{"nicknames" => nicknames}) do
|
def activate(%{assigns: %{user: admin}, body_params: %{nicknames: nicknames}} = conn, _) do
|
||||||
users = Enum.map(nicknames, &User.get_cached_by_nickname/1)
|
users = Enum.map(nicknames, &User.get_cached_by_nickname/1)
|
||||||
{:ok, updated_users} = User.set_activation(users, true)
|
{:ok, updated_users} = User.set_activation(users, true)
|
||||||
|
|
||||||
|
@ -197,12 +212,10 @@ def activate(%{assigns: %{user: admin}} = conn, %{"nicknames" => nicknames}) do
|
||||||
action: "activate"
|
action: "activate"
|
||||||
})
|
})
|
||||||
|
|
||||||
conn
|
render(conn, "index.json", users: Keyword.values(updated_users))
|
||||||
|> put_view(AccountView)
|
|
||||||
|> render("index.json", %{users: Keyword.values(updated_users)})
|
|
||||||
end
|
end
|
||||||
|
|
||||||
def deactivate(%{assigns: %{user: admin}} = conn, %{"nicknames" => nicknames}) do
|
def deactivate(%{assigns: %{user: admin}, body_params: %{nicknames: nicknames}} = conn, _) do
|
||||||
users = Enum.map(nicknames, &User.get_cached_by_nickname/1)
|
users = Enum.map(nicknames, &User.get_cached_by_nickname/1)
|
||||||
{:ok, updated_users} = User.set_activation(users, false)
|
{:ok, updated_users} = User.set_activation(users, false)
|
||||||
|
|
||||||
|
@ -212,12 +225,10 @@ def deactivate(%{assigns: %{user: admin}} = conn, %{"nicknames" => nicknames}) d
|
||||||
action: "deactivate"
|
action: "deactivate"
|
||||||
})
|
})
|
||||||
|
|
||||||
conn
|
render(conn, "index.json", users: Keyword.values(updated_users))
|
||||||
|> put_view(AccountView)
|
|
||||||
|> render("index.json", %{users: Keyword.values(updated_users)})
|
|
||||||
end
|
end
|
||||||
|
|
||||||
def approve(%{assigns: %{user: admin}} = conn, %{"nicknames" => nicknames}) do
|
def approve(%{assigns: %{user: admin}, body_params: %{nicknames: nicknames}} = conn, _) do
|
||||||
users = Enum.map(nicknames, &User.get_cached_by_nickname/1)
|
users = Enum.map(nicknames, &User.get_cached_by_nickname/1)
|
||||||
{:ok, updated_users} = User.approve(users)
|
{:ok, updated_users} = User.approve(users)
|
||||||
|
|
||||||
|
@ -227,36 +238,27 @@ def approve(%{assigns: %{user: admin}} = conn, %{"nicknames" => nicknames}) do
|
||||||
action: "approve"
|
action: "approve"
|
||||||
})
|
})
|
||||||
|
|
||||||
conn
|
render(conn, "index.json", users: updated_users)
|
||||||
|> put_view(AccountView)
|
|
||||||
|> render("index.json", %{users: updated_users})
|
|
||||||
end
|
end
|
||||||
|
|
||||||
def list(conn, params) do
|
def index(conn, params) do
|
||||||
{page, page_size} = page_params(params)
|
{page, page_size} = page_params(params)
|
||||||
filters = maybe_parse_filters(params["filters"])
|
filters = maybe_parse_filters(params[:filters])
|
||||||
|
|
||||||
search_params =
|
search_params =
|
||||||
%{
|
%{
|
||||||
query: params["query"],
|
query: params[:query],
|
||||||
page: page,
|
page: page,
|
||||||
page_size: page_size,
|
page_size: page_size,
|
||||||
tags: params["tags"],
|
tags: params[:tags],
|
||||||
name: params["name"],
|
name: params[:name],
|
||||||
email: params["email"],
|
email: params[:email],
|
||||||
actor_types: params["actor_types"]
|
actor_types: params[:actor_types]
|
||||||
}
|
}
|
||||||
|> Map.merge(filters)
|
|> Map.merge(filters)
|
||||||
|
|
||||||
with {:ok, users, count} <- Search.user(search_params) do
|
with {:ok, users, count} <- Search.user(search_params) do
|
||||||
json(
|
render(conn, "index.json", users: users, count: count, page_size: page_size)
|
||||||
conn,
|
|
||||||
AccountView.render("index.json",
|
|
||||||
users: users,
|
|
||||||
count: count,
|
|
||||||
page_size: page_size
|
|
||||||
)
|
|
||||||
)
|
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -274,8 +276,8 @@ defp maybe_parse_filters(filters) do
|
||||||
|
|
||||||
defp page_params(params) do
|
defp page_params(params) do
|
||||||
{
|
{
|
||||||
fetch_integer_param(params, "page", 1),
|
fetch_integer_param(params, :page, 1),
|
||||||
fetch_integer_param(params, "page_size", @users_page_size)
|
fetch_integer_param(params, :page_size, @users_page_size)
|
||||||
}
|
}
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -75,7 +75,7 @@ def render("show.json", %{user: user}) do
|
||||||
"display_name" => display_name,
|
"display_name" => display_name,
|
||||||
"is_active" => user.is_active,
|
"is_active" => user.is_active,
|
||||||
"local" => user.local,
|
"local" => user.local,
|
||||||
"roles" => User.roles(user),
|
"roles" => roles(user),
|
||||||
"tags" => user.tags || [],
|
"tags" => user.tags || [],
|
||||||
"is_confirmed" => user.is_confirmed,
|
"is_confirmed" => user.is_confirmed,
|
||||||
"is_approved" => user.is_approved,
|
"is_approved" => user.is_approved,
|
||||||
|
@ -85,6 +85,10 @@ def render("show.json", %{user: user}) do
|
||||||
}
|
}
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def render("created_many.json", %{users: users}) do
|
||||||
|
render_many(users, AccountView, "created.json", as: :user)
|
||||||
|
end
|
||||||
|
|
||||||
def render("created.json", %{user: user}) do
|
def render("created.json", %{user: user}) do
|
||||||
%{
|
%{
|
||||||
type: "success",
|
type: "success",
|
||||||
|
@ -96,7 +100,11 @@ def render("created.json", %{user: user}) do
|
||||||
}
|
}
|
||||||
end
|
end
|
||||||
|
|
||||||
def render("create-error.json", %{changeset: %Ecto.Changeset{changes: changes, errors: errors}}) do
|
def render("create_errors.json", %{changesets: changesets}) do
|
||||||
|
render_many(changesets, AccountView, "create_error.json", as: :changeset)
|
||||||
|
end
|
||||||
|
|
||||||
|
def render("create_error.json", %{changeset: %Ecto.Changeset{changes: changes, errors: errors}}) do
|
||||||
%{
|
%{
|
||||||
type: "error",
|
type: "error",
|
||||||
code: 409,
|
code: 409,
|
||||||
|
@ -140,4 +148,11 @@ defp parse_error(errors) do
|
||||||
|
|
||||||
defp image_url(%{"url" => [%{"href" => href} | _]}), do: href
|
defp image_url(%{"url" => [%{"href" => href} | _]}), do: href
|
||||||
defp image_url(_), do: nil
|
defp image_url(_), do: nil
|
||||||
|
|
||||||
|
defp roles(%{is_moderator: is_moderator, is_admin: is_admin}) do
|
||||||
|
%{
|
||||||
|
admin: is_admin,
|
||||||
|
moderator: is_moderator
|
||||||
|
}
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -92,9 +92,10 @@ def spec(opts \\ []) do
|
||||||
"Invites",
|
"Invites",
|
||||||
"MediaProxy cache",
|
"MediaProxy cache",
|
||||||
"OAuth application managment",
|
"OAuth application managment",
|
||||||
"Report managment",
|
|
||||||
"Relays",
|
"Relays",
|
||||||
"Status administration"
|
"Report managment",
|
||||||
|
"Status administration",
|
||||||
|
"User administration"
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
%{"name" => "Applications", "tags" => ["Applications", "Push subscriptions"]},
|
%{"name" => "Applications", "tags" => ["Applications", "Push subscriptions"]},
|
||||||
|
|
|
@ -15,6 +15,7 @@ defmodule Pleroma.Web.ApiSpec.CastAndValidate do
|
||||||
|
|
||||||
@behaviour Plug
|
@behaviour Plug
|
||||||
|
|
||||||
|
alias OpenApiSpex.Plug.PutApiSpec
|
||||||
alias Plug.Conn
|
alias Plug.Conn
|
||||||
|
|
||||||
@impl Plug
|
@impl Plug
|
||||||
|
@ -25,12 +26,10 @@ def init(opts) do
|
||||||
end
|
end
|
||||||
|
|
||||||
@impl Plug
|
@impl Plug
|
||||||
def call(%{private: %{open_api_spex: private_data}} = conn, %{
|
|
||||||
operation_id: operation_id,
|
def call(conn, %{operation_id: operation_id, render_error: render_error}) do
|
||||||
render_error: render_error
|
{spec, operation_lookup} = PutApiSpec.get_spec_and_operation_lookup(conn)
|
||||||
}) do
|
operation = operation_lookup[operation_id]
|
||||||
spec = private_data.spec
|
|
||||||
operation = private_data.operation_lookup[operation_id]
|
|
||||||
|
|
||||||
content_type =
|
content_type =
|
||||||
case Conn.get_req_header(conn, "content-type") do
|
case Conn.get_req_header(conn, "content-type") do
|
||||||
|
@ -43,8 +42,7 @@ def call(%{private: %{open_api_spex: private_data}} = conn, %{
|
||||||
"application/json"
|
"application/json"
|
||||||
end
|
end
|
||||||
|
|
||||||
private_data = Map.put(private_data, :operation_id, operation_id)
|
conn = Conn.put_private(conn, :operation_id, operation_id)
|
||||||
conn = Conn.put_private(conn, :open_api_spex, private_data)
|
|
||||||
|
|
||||||
case cast_and_validate(spec, operation, conn, content_type, strict?()) do
|
case cast_and_validate(spec, operation, conn, content_type, strict?()) do
|
||||||
{:ok, conn} ->
|
{:ok, conn} ->
|
||||||
|
@ -64,25 +62,22 @@ def call(
|
||||||
private: %{
|
private: %{
|
||||||
phoenix_controller: controller,
|
phoenix_controller: controller,
|
||||||
phoenix_action: action,
|
phoenix_action: action,
|
||||||
open_api_spex: private_data
|
open_api_spex: %{spec_module: spec_module}
|
||||||
}
|
}
|
||||||
} = conn,
|
} = conn,
|
||||||
opts
|
opts
|
||||||
) do
|
) do
|
||||||
|
{spec, operation_lookup} = PutApiSpec.get_spec_and_operation_lookup(conn)
|
||||||
|
|
||||||
operation =
|
operation =
|
||||||
case private_data.operation_lookup[{controller, action}] do
|
case operation_lookup[{controller, action}] do
|
||||||
nil ->
|
nil ->
|
||||||
operation_id = controller.open_api_operation(action).operationId
|
operation_id = controller.open_api_operation(action).operationId
|
||||||
operation = private_data.operation_lookup[operation_id]
|
operation = operation_lookup[operation_id]
|
||||||
|
|
||||||
operation_lookup =
|
operation_lookup = Map.put(operation_lookup, {controller, action}, operation)
|
||||||
private_data.operation_lookup
|
|
||||||
|> Map.put({controller, action}, operation)
|
|
||||||
|
|
||||||
OpenApiSpex.Plug.Cache.adapter().put(
|
OpenApiSpex.Plug.Cache.adapter().put(spec_module, {spec, operation_lookup})
|
||||||
private_data.spec_module,
|
|
||||||
{private_data.spec, operation_lookup}
|
|
||||||
)
|
|
||||||
|
|
||||||
operation
|
operation
|
||||||
|
|
||||||
|
|
389
lib/pleroma/web/api_spec/operations/admin/user_operation.ex
Normal file
389
lib/pleroma/web/api_spec/operations/admin/user_operation.ex
Normal file
|
@ -0,0 +1,389 @@
|
||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2021 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
defmodule Pleroma.Web.ApiSpec.Admin.UserOperation do
|
||||||
|
alias OpenApiSpex.Operation
|
||||||
|
alias OpenApiSpex.Schema
|
||||||
|
alias Pleroma.Web.ApiSpec.Schemas.ActorType
|
||||||
|
alias Pleroma.Web.ApiSpec.Schemas.ApiError
|
||||||
|
|
||||||
|
import Pleroma.Web.ApiSpec.Helpers
|
||||||
|
|
||||||
|
def open_api_operation(action) do
|
||||||
|
operation = String.to_existing_atom("#{action}_operation")
|
||||||
|
apply(__MODULE__, operation, [])
|
||||||
|
end
|
||||||
|
|
||||||
|
def index_operation do
|
||||||
|
%Operation{
|
||||||
|
tags: ["User administration"],
|
||||||
|
summary: "List users",
|
||||||
|
operationId: "AdminAPI.UserController.index",
|
||||||
|
security: [%{"oAuth" => ["admin:read:accounts"]}],
|
||||||
|
parameters: [
|
||||||
|
Operation.parameter(:filters, :query, :string, "Comma separated list of filters"),
|
||||||
|
Operation.parameter(:query, :query, :string, "Search users query"),
|
||||||
|
Operation.parameter(:name, :query, :string, "Search by display name"),
|
||||||
|
Operation.parameter(:email, :query, :string, "Search by email"),
|
||||||
|
Operation.parameter(:page, :query, :integer, "Page Number"),
|
||||||
|
Operation.parameter(:page_size, :query, :integer, "Number of users to return per page"),
|
||||||
|
Operation.parameter(
|
||||||
|
:actor_types,
|
||||||
|
:query,
|
||||||
|
%Schema{type: :array, items: ActorType},
|
||||||
|
"Filter by actor type"
|
||||||
|
),
|
||||||
|
Operation.parameter(
|
||||||
|
:tags,
|
||||||
|
:query,
|
||||||
|
%Schema{type: :array, items: %Schema{type: :string}},
|
||||||
|
"Filter by tags"
|
||||||
|
)
|
||||||
|
| admin_api_params()
|
||||||
|
],
|
||||||
|
responses: %{
|
||||||
|
200 =>
|
||||||
|
Operation.response(
|
||||||
|
"Response",
|
||||||
|
"application/json",
|
||||||
|
%Schema{
|
||||||
|
type: :object,
|
||||||
|
properties: %{
|
||||||
|
users: %Schema{type: :array, items: user()},
|
||||||
|
count: %Schema{type: :integer},
|
||||||
|
page_size: %Schema{type: :integer}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
),
|
||||||
|
403 => Operation.response("Forbidden", "application/json", ApiError)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
end
|
||||||
|
|
||||||
|
def create_operation do
|
||||||
|
%Operation{
|
||||||
|
tags: ["User administration"],
|
||||||
|
summary: "Create a single or multiple users",
|
||||||
|
operationId: "AdminAPI.UserController.create",
|
||||||
|
security: [%{"oAuth" => ["admin:write:accounts"]}],
|
||||||
|
parameters: admin_api_params(),
|
||||||
|
requestBody:
|
||||||
|
request_body(
|
||||||
|
"Parameters",
|
||||||
|
%Schema{
|
||||||
|
description: "POST body for creating users",
|
||||||
|
type: :object,
|
||||||
|
properties: %{
|
||||||
|
users: %Schema{
|
||||||
|
type: :array,
|
||||||
|
items: %Schema{
|
||||||
|
type: :object,
|
||||||
|
properties: %{
|
||||||
|
nickname: %Schema{type: :string},
|
||||||
|
email: %Schema{type: :string},
|
||||||
|
password: %Schema{type: :string}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
),
|
||||||
|
responses: %{
|
||||||
|
200 =>
|
||||||
|
Operation.response("Response", "application/json", %Schema{
|
||||||
|
type: :array,
|
||||||
|
items: %Schema{
|
||||||
|
type: :object,
|
||||||
|
properties: %{
|
||||||
|
code: %Schema{type: :integer},
|
||||||
|
type: %Schema{type: :string},
|
||||||
|
data: %Schema{
|
||||||
|
type: :object,
|
||||||
|
properties: %{
|
||||||
|
email: %Schema{type: :string, format: :email},
|
||||||
|
nickname: %Schema{type: :string}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}),
|
||||||
|
403 => Operation.response("Forbidden", "application/json", ApiError),
|
||||||
|
409 =>
|
||||||
|
Operation.response("Conflict", "application/json", %Schema{
|
||||||
|
type: :array,
|
||||||
|
items: %Schema{
|
||||||
|
type: :object,
|
||||||
|
properties: %{
|
||||||
|
code: %Schema{type: :integer},
|
||||||
|
error: %Schema{type: :string},
|
||||||
|
type: %Schema{type: :string},
|
||||||
|
data: %Schema{
|
||||||
|
type: :object,
|
||||||
|
properties: %{
|
||||||
|
email: %Schema{type: :string, format: :email},
|
||||||
|
nickname: %Schema{type: :string}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
end
|
||||||
|
|
||||||
|
def show_operation do
|
||||||
|
%Operation{
|
||||||
|
tags: ["User administration"],
|
||||||
|
summary: "Show user",
|
||||||
|
operationId: "AdminAPI.UserController.show",
|
||||||
|
security: [%{"oAuth" => ["admin:read:accounts"]}],
|
||||||
|
parameters: [
|
||||||
|
Operation.parameter(
|
||||||
|
:nickname,
|
||||||
|
:path,
|
||||||
|
:string,
|
||||||
|
"User nickname or ID"
|
||||||
|
)
|
||||||
|
| admin_api_params()
|
||||||
|
],
|
||||||
|
responses: %{
|
||||||
|
200 => Operation.response("Response", "application/json", user()),
|
||||||
|
403 => Operation.response("Forbidden", "application/json", ApiError),
|
||||||
|
404 => Operation.response("Not Found", "application/json", ApiError)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
end
|
||||||
|
|
||||||
|
def follow_operation do
|
||||||
|
%Operation{
|
||||||
|
tags: ["User administration"],
|
||||||
|
summary: "Follow",
|
||||||
|
operationId: "AdminAPI.UserController.follow",
|
||||||
|
security: [%{"oAuth" => ["admin:write:follows"]}],
|
||||||
|
parameters: admin_api_params(),
|
||||||
|
requestBody:
|
||||||
|
request_body(
|
||||||
|
"Parameters",
|
||||||
|
%Schema{
|
||||||
|
type: :object,
|
||||||
|
properties: %{
|
||||||
|
follower: %Schema{type: :string, description: "Follower nickname"},
|
||||||
|
followed: %Schema{type: :string, description: "Followed nickname"}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
),
|
||||||
|
responses: %{
|
||||||
|
200 => Operation.response("Response", "application/json", %Schema{type: :string}),
|
||||||
|
403 => Operation.response("Forbidden", "application/json", ApiError)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
end
|
||||||
|
|
||||||
|
def unfollow_operation do
|
||||||
|
%Operation{
|
||||||
|
tags: ["User administration"],
|
||||||
|
summary: "Unfollow",
|
||||||
|
operationId: "AdminAPI.UserController.unfollow",
|
||||||
|
security: [%{"oAuth" => ["admin:write:follows"]}],
|
||||||
|
parameters: admin_api_params(),
|
||||||
|
requestBody:
|
||||||
|
request_body(
|
||||||
|
"Parameters",
|
||||||
|
%Schema{
|
||||||
|
type: :object,
|
||||||
|
properties: %{
|
||||||
|
follower: %Schema{type: :string, description: "Follower nickname"},
|
||||||
|
followed: %Schema{type: :string, description: "Followed nickname"}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
),
|
||||||
|
responses: %{
|
||||||
|
200 => Operation.response("Response", "application/json", %Schema{type: :string}),
|
||||||
|
403 => Operation.response("Forbidden", "application/json", ApiError)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
end
|
||||||
|
|
||||||
|
def approve_operation do
|
||||||
|
%Operation{
|
||||||
|
tags: ["User administration"],
|
||||||
|
summary: "Approve multiple users",
|
||||||
|
operationId: "AdminAPI.UserController.approve",
|
||||||
|
security: [%{"oAuth" => ["admin:write:accounts"]}],
|
||||||
|
parameters: admin_api_params(),
|
||||||
|
requestBody:
|
||||||
|
request_body(
|
||||||
|
"Parameters",
|
||||||
|
%Schema{
|
||||||
|
description: "POST body for deleting multiple users",
|
||||||
|
type: :object,
|
||||||
|
properties: %{
|
||||||
|
nicknames: %Schema{
|
||||||
|
type: :array,
|
||||||
|
items: %Schema{type: :string}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
),
|
||||||
|
responses: %{
|
||||||
|
200 =>
|
||||||
|
Operation.response("Response", "application/json", %Schema{
|
||||||
|
type: :object,
|
||||||
|
properties: %{user: %Schema{type: :array, items: user()}}
|
||||||
|
}),
|
||||||
|
403 => Operation.response("Forbidden", "application/json", ApiError)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
end
|
||||||
|
|
||||||
|
def toggle_activation_operation do
|
||||||
|
%Operation{
|
||||||
|
tags: ["User administration"],
|
||||||
|
summary: "Toggle user activation",
|
||||||
|
operationId: "AdminAPI.UserController.toggle_activation",
|
||||||
|
security: [%{"oAuth" => ["admin:write:accounts"]}],
|
||||||
|
parameters: [
|
||||||
|
Operation.parameter(:nickname, :path, :string, "User nickname")
|
||||||
|
| admin_api_params()
|
||||||
|
],
|
||||||
|
responses: %{
|
||||||
|
200 => Operation.response("Response", "application/json", user()),
|
||||||
|
403 => Operation.response("Forbidden", "application/json", ApiError)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
end
|
||||||
|
|
||||||
|
def activate_operation do
|
||||||
|
%Operation{
|
||||||
|
tags: ["User administration"],
|
||||||
|
summary: "Activate multiple users",
|
||||||
|
operationId: "AdminAPI.UserController.activate",
|
||||||
|
security: [%{"oAuth" => ["admin:write:accounts"]}],
|
||||||
|
parameters: admin_api_params(),
|
||||||
|
requestBody:
|
||||||
|
request_body(
|
||||||
|
"Parameters",
|
||||||
|
%Schema{
|
||||||
|
description: "POST body for deleting multiple users",
|
||||||
|
type: :object,
|
||||||
|
properties: %{
|
||||||
|
nicknames: %Schema{
|
||||||
|
type: :array,
|
||||||
|
items: %Schema{type: :string}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
),
|
||||||
|
responses: %{
|
||||||
|
200 =>
|
||||||
|
Operation.response("Response", "application/json", %Schema{
|
||||||
|
type: :object,
|
||||||
|
properties: %{user: %Schema{type: :array, items: user()}}
|
||||||
|
}),
|
||||||
|
403 => Operation.response("Forbidden", "application/json", ApiError)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
end
|
||||||
|
|
||||||
|
def deactivate_operation do
|
||||||
|
%Operation{
|
||||||
|
tags: ["User administration"],
|
||||||
|
summary: "Deactivates multiple users",
|
||||||
|
operationId: "AdminAPI.UserController.deactivate",
|
||||||
|
security: [%{"oAuth" => ["admin:write:accounts"]}],
|
||||||
|
parameters: admin_api_params(),
|
||||||
|
requestBody:
|
||||||
|
request_body(
|
||||||
|
"Parameters",
|
||||||
|
%Schema{
|
||||||
|
description: "POST body for deleting multiple users",
|
||||||
|
type: :object,
|
||||||
|
properties: %{
|
||||||
|
nicknames: %Schema{
|
||||||
|
type: :array,
|
||||||
|
items: %Schema{type: :string}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
),
|
||||||
|
responses: %{
|
||||||
|
200 =>
|
||||||
|
Operation.response("Response", "application/json", %Schema{
|
||||||
|
type: :object,
|
||||||
|
properties: %{user: %Schema{type: :array, items: user()}}
|
||||||
|
}),
|
||||||
|
403 => Operation.response("Forbidden", "application/json", ApiError)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
end
|
||||||
|
|
||||||
|
def delete_operation do
|
||||||
|
%Operation{
|
||||||
|
tags: ["User administration"],
|
||||||
|
summary: "Removes a single or multiple users",
|
||||||
|
operationId: "AdminAPI.UserController.delete",
|
||||||
|
security: [%{"oAuth" => ["admin:write:accounts"]}],
|
||||||
|
parameters: [
|
||||||
|
Operation.parameter(
|
||||||
|
:nickname,
|
||||||
|
:query,
|
||||||
|
:string,
|
||||||
|
"User nickname"
|
||||||
|
)
|
||||||
|
| admin_api_params()
|
||||||
|
],
|
||||||
|
requestBody:
|
||||||
|
request_body(
|
||||||
|
"Parameters",
|
||||||
|
%Schema{
|
||||||
|
description: "POST body for deleting multiple users",
|
||||||
|
type: :object,
|
||||||
|
properties: %{
|
||||||
|
nicknames: %Schema{
|
||||||
|
type: :array,
|
||||||
|
items: %Schema{type: :string}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
),
|
||||||
|
responses: %{
|
||||||
|
200 =>
|
||||||
|
Operation.response("Response", "application/json", %Schema{
|
||||||
|
description: "Array of nicknames",
|
||||||
|
type: :array,
|
||||||
|
items: %Schema{type: :string}
|
||||||
|
}),
|
||||||
|
403 => Operation.response("Forbidden", "application/json", ApiError)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
end
|
||||||
|
|
||||||
|
defp user do
|
||||||
|
%Schema{
|
||||||
|
type: :object,
|
||||||
|
properties: %{
|
||||||
|
id: %Schema{type: :string},
|
||||||
|
email: %Schema{type: :string, format: :email},
|
||||||
|
avatar: %Schema{type: :string, format: :uri},
|
||||||
|
nickname: %Schema{type: :string},
|
||||||
|
display_name: %Schema{type: :string},
|
||||||
|
is_active: %Schema{type: :boolean},
|
||||||
|
local: %Schema{type: :boolean},
|
||||||
|
roles: %Schema{
|
||||||
|
type: :object,
|
||||||
|
properties: %{
|
||||||
|
admin: %Schema{type: :boolean},
|
||||||
|
moderator: %Schema{type: :boolean}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
tags: %Schema{type: :array, items: %Schema{type: :string}},
|
||||||
|
is_confirmed: %Schema{type: :boolean},
|
||||||
|
is_approved: %Schema{type: :boolean},
|
||||||
|
url: %Schema{type: :string, format: :uri},
|
||||||
|
registration_reason: %Schema{type: :string, nullable: true},
|
||||||
|
actor_type: %Schema{type: :string}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
end
|
||||||
|
end
|
|
@ -59,7 +59,7 @@ def create_operation do
|
||||||
Operation.response(
|
Operation.response(
|
||||||
"Status. When `scheduled_at` is present, ScheduledStatus is returned instead",
|
"Status. When `scheduled_at` is present, ScheduledStatus is returned instead",
|
||||||
"application/json",
|
"application/json",
|
||||||
%Schema{oneOf: [Status, ScheduledStatus]}
|
%Schema{anyOf: [Status, ScheduledStatus]}
|
||||||
),
|
),
|
||||||
422 => Operation.response("Bad Request / MRF Rejection", "application/json", ApiError)
|
422 => Operation.response("Bad Request / MRF Rejection", "application/json", ApiError)
|
||||||
}
|
}
|
||||||
|
@ -182,7 +182,34 @@ def pin_operation do
|
||||||
parameters: [id_param()],
|
parameters: [id_param()],
|
||||||
responses: %{
|
responses: %{
|
||||||
200 => status_response(),
|
200 => status_response(),
|
||||||
400 => Operation.response("Error", "application/json", ApiError)
|
400 =>
|
||||||
|
Operation.response("Bad Request", "application/json", %Schema{
|
||||||
|
allOf: [ApiError],
|
||||||
|
title: "Unprocessable Entity",
|
||||||
|
example: %{
|
||||||
|
"error" => "You have already pinned the maximum number of statuses"
|
||||||
|
}
|
||||||
|
}),
|
||||||
|
404 =>
|
||||||
|
Operation.response("Not found", "application/json", %Schema{
|
||||||
|
allOf: [ApiError],
|
||||||
|
title: "Unprocessable Entity",
|
||||||
|
example: %{
|
||||||
|
"error" => "Record not found"
|
||||||
|
}
|
||||||
|
}),
|
||||||
|
422 =>
|
||||||
|
Operation.response(
|
||||||
|
"Unprocessable Entity",
|
||||||
|
"application/json",
|
||||||
|
%Schema{
|
||||||
|
allOf: [ApiError],
|
||||||
|
title: "Unprocessable Entity",
|
||||||
|
example: %{
|
||||||
|
"error" => "Someone else's status cannot be pinned"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
end
|
end
|
||||||
|
@ -197,7 +224,22 @@ def unpin_operation do
|
||||||
parameters: [id_param()],
|
parameters: [id_param()],
|
||||||
responses: %{
|
responses: %{
|
||||||
200 => status_response(),
|
200 => status_response(),
|
||||||
400 => Operation.response("Error", "application/json", ApiError)
|
400 =>
|
||||||
|
Operation.response("Bad Request", "application/json", %Schema{
|
||||||
|
allOf: [ApiError],
|
||||||
|
title: "Unprocessable Entity",
|
||||||
|
example: %{
|
||||||
|
"error" => "You have already pinned the maximum number of statuses"
|
||||||
|
}
|
||||||
|
}),
|
||||||
|
404 =>
|
||||||
|
Operation.response("Not found", "application/json", %Schema{
|
||||||
|
allOf: [ApiError],
|
||||||
|
title: "Unprocessable Entity",
|
||||||
|
example: %{
|
||||||
|
"error" => "Record not found"
|
||||||
|
}
|
||||||
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
end
|
end
|
||||||
|
|
|
@ -3,6 +3,7 @@
|
||||||
# SPDX-License-Identifier: AGPL-3.0-only
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
defmodule Pleroma.Web.ApiSpec.Schemas.BooleanLike do
|
defmodule Pleroma.Web.ApiSpec.Schemas.BooleanLike do
|
||||||
|
alias OpenApiSpex.Cast
|
||||||
alias OpenApiSpex.Schema
|
alias OpenApiSpex.Schema
|
||||||
|
|
||||||
require OpenApiSpex
|
require OpenApiSpex
|
||||||
|
@ -27,10 +28,13 @@ defmodule Pleroma.Web.ApiSpec.Schemas.BooleanLike do
|
||||||
%Schema{type: :boolean},
|
%Schema{type: :boolean},
|
||||||
%Schema{type: :string},
|
%Schema{type: :string},
|
||||||
%Schema{type: :integer}
|
%Schema{type: :integer}
|
||||||
]
|
],
|
||||||
|
"x-validate": __MODULE__
|
||||||
})
|
})
|
||||||
|
|
||||||
def after_cast(value, _schmea) do
|
def cast(%Cast{value: value} = context) do
|
||||||
{:ok, Pleroma.Web.ControllerHelper.truthy_param?(value)}
|
context
|
||||||
|
|> Map.put(:value, Pleroma.Web.ControllerHelper.truthy_param?(value))
|
||||||
|
|> Cast.ok()
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -194,6 +194,13 @@ defmodule Pleroma.Web.ApiSpec.Schemas.Status do
|
||||||
parent_visible: %Schema{
|
parent_visible: %Schema{
|
||||||
type: :boolean,
|
type: :boolean,
|
||||||
description: "`true` if the parent post is visible to the user"
|
description: "`true` if the parent post is visible to the user"
|
||||||
|
},
|
||||||
|
pinned_at: %Schema{
|
||||||
|
type: :string,
|
||||||
|
format: "date-time",
|
||||||
|
nullable: true,
|
||||||
|
description:
|
||||||
|
"A datetime (ISO 8601) that states when the post was pinned or `null` if the post is not pinned"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
|
|
@ -228,17 +228,7 @@ def favorite_helper(user, id) do
|
||||||
{:find_object, _} ->
|
{:find_object, _} ->
|
||||||
{:error, :not_found}
|
{:error, :not_found}
|
||||||
|
|
||||||
{:common_pipeline,
|
{:common_pipeline, {:error, {:validate, {:error, changeset}}}} = e ->
|
||||||
{
|
|
||||||
:error,
|
|
||||||
{
|
|
||||||
:validate_object,
|
|
||||||
{
|
|
||||||
:error,
|
|
||||||
changeset
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}} = e ->
|
|
||||||
if {:object, {"already liked by this actor", []}} in changeset.errors do
|
if {:object, {"already liked by this actor", []}} in changeset.errors do
|
||||||
{:ok, :already_liked}
|
{:ok, :already_liked}
|
||||||
else
|
else
|
||||||
|
@ -411,29 +401,58 @@ def post(user, %{status: _} = data) do
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def pin(id, %{ap_id: user_ap_id} = user) do
|
@spec pin(String.t(), User.t()) :: {:ok, Activity.t()} | {:error, term()}
|
||||||
with %Activity{
|
def pin(id, %User{} = user) do
|
||||||
actor: ^user_ap_id,
|
with %Activity{} = activity <- create_activity_by_id(id),
|
||||||
data: %{"type" => "Create"},
|
true <- activity_belongs_to_actor(activity, user.ap_id),
|
||||||
object: %Object{data: %{"type" => object_type}}
|
true <- object_type_is_allowed_for_pin(activity.object),
|
||||||
} = activity <- Activity.get_by_id_with_object(id),
|
true <- activity_is_public(activity),
|
||||||
true <- object_type in ["Note", "Article", "Question"],
|
{:ok, pin_data, _} <- Builder.pin(user, activity.object),
|
||||||
true <- Visibility.is_public?(activity),
|
{:ok, _pin, _} <-
|
||||||
{:ok, _user} <- User.add_pinnned_activity(user, activity) do
|
Pipeline.common_pipeline(pin_data,
|
||||||
|
local: true,
|
||||||
|
activity_id: id
|
||||||
|
) do
|
||||||
{:ok, activity}
|
{:ok, activity}
|
||||||
else
|
else
|
||||||
{:error, %{errors: [pinned_activities: {err, _}]}} -> {:error, err}
|
{:error, {:side_effects, error}} -> error
|
||||||
_ -> {:error, dgettext("errors", "Could not pin")}
|
error -> error
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
defp create_activity_by_id(id) do
|
||||||
|
with nil <- Activity.create_by_id_with_object(id) do
|
||||||
|
{:error, :not_found}
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
defp activity_belongs_to_actor(%{actor: actor}, actor), do: true
|
||||||
|
defp activity_belongs_to_actor(_, _), do: {:error, :ownership_error}
|
||||||
|
|
||||||
|
defp object_type_is_allowed_for_pin(%{data: %{"type" => type}}) do
|
||||||
|
with false <- type in ["Note", "Article", "Question"] do
|
||||||
|
{:error, :not_allowed}
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
defp activity_is_public(activity) do
|
||||||
|
with false <- Visibility.is_public?(activity) do
|
||||||
|
{:error, :visibility_error}
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
@spec unpin(String.t(), User.t()) :: {:ok, User.t()} | {:error, term()}
|
||||||
def unpin(id, user) do
|
def unpin(id, user) do
|
||||||
with %Activity{data: %{"type" => "Create"}} = activity <- Activity.get_by_id(id),
|
with %Activity{} = activity <- create_activity_by_id(id),
|
||||||
{:ok, _user} <- User.remove_pinnned_activity(user, activity) do
|
{:ok, unpin_data, _} <- Builder.unpin(user, activity.object),
|
||||||
|
{:ok, _unpin, _} <-
|
||||||
|
Pipeline.common_pipeline(unpin_data,
|
||||||
|
local: true,
|
||||||
|
activity_id: activity.id,
|
||||||
|
expires_at: activity.data["expires_at"],
|
||||||
|
featured_address: user.featured_address
|
||||||
|
) do
|
||||||
{:ok, activity}
|
{:ok, activity}
|
||||||
else
|
|
||||||
{:error, %{errors: [pinned_activities: {err, _}]}} -> {:error, err}
|
|
||||||
_ -> {:error, dgettext("errors", "Could not unpin")}
|
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
|
|
@ -5,6 +5,7 @@
|
||||||
defmodule Pleroma.Web.CommonAPI.ActivityDraft do
|
defmodule Pleroma.Web.CommonAPI.ActivityDraft do
|
||||||
alias Pleroma.Activity
|
alias Pleroma.Activity
|
||||||
alias Pleroma.Conversation.Participation
|
alias Pleroma.Conversation.Participation
|
||||||
|
alias Pleroma.Object
|
||||||
alias Pleroma.Web.CommonAPI
|
alias Pleroma.Web.CommonAPI
|
||||||
alias Pleroma.Web.CommonAPI.Utils
|
alias Pleroma.Web.CommonAPI.Utils
|
||||||
|
|
||||||
|
@ -179,13 +180,39 @@ defp context(draft) do
|
||||||
end
|
end
|
||||||
|
|
||||||
defp sensitive(draft) do
|
defp sensitive(draft) do
|
||||||
sensitive = draft.params[:sensitive] || Enum.member?(draft.tags, {"#nsfw", "nsfw"})
|
sensitive = draft.params[:sensitive]
|
||||||
%__MODULE__{draft | sensitive: sensitive}
|
%__MODULE__{draft | sensitive: sensitive}
|
||||||
end
|
end
|
||||||
|
|
||||||
defp object(draft) do
|
defp object(draft) do
|
||||||
emoji = Map.merge(Pleroma.Emoji.Formatter.get_emoji_map(draft.full_payload), draft.emoji)
|
emoji = Map.merge(Pleroma.Emoji.Formatter.get_emoji_map(draft.full_payload), draft.emoji)
|
||||||
|
|
||||||
|
# Sometimes people create posts with subject containing emoji,
|
||||||
|
# since subjects are usually copied this will result in a broken
|
||||||
|
# subject when someone replies from an instance that does not have
|
||||||
|
# the emoji or has it under different shortcode. This is an attempt
|
||||||
|
# to mitigate this by copying emoji from inReplyTo if they are present
|
||||||
|
# in the subject.
|
||||||
|
summary_emoji =
|
||||||
|
with %Activity{} <- draft.in_reply_to,
|
||||||
|
%Object{data: %{"tag" => [_ | _] = tag}} <- Object.normalize(draft.in_reply_to) do
|
||||||
|
Enum.reduce(tag, %{}, fn
|
||||||
|
%{"type" => "Emoji", "name" => name, "icon" => %{"url" => url}}, acc ->
|
||||||
|
if String.contains?(draft.summary, name) do
|
||||||
|
Map.put(acc, name, url)
|
||||||
|
else
|
||||||
|
acc
|
||||||
|
end
|
||||||
|
|
||||||
|
_, acc ->
|
||||||
|
acc
|
||||||
|
end)
|
||||||
|
else
|
||||||
|
_ -> %{}
|
||||||
|
end
|
||||||
|
|
||||||
|
emoji = Map.merge(emoji, summary_emoji)
|
||||||
|
|
||||||
object =
|
object =
|
||||||
Utils.make_note_data(draft)
|
Utils.make_note_data(draft)
|
||||||
|> Map.put("emoji", emoji)
|
|> Map.put("emoji", emoji)
|
||||||
|
|
|
@ -217,7 +217,6 @@ def make_content_html(%ActivityDraft{} = draft) do
|
||||||
draft.status
|
draft.status
|
||||||
|> format_input(content_type, options)
|
|> format_input(content_type, options)
|
||||||
|> maybe_add_attachments(draft.attachments, attachment_links)
|
|> maybe_add_attachments(draft.attachments, attachment_links)
|
||||||
|> maybe_add_nsfw_tag(draft.params)
|
|
||||||
end
|
end
|
||||||
|
|
||||||
defp get_content_type(content_type) do
|
defp get_content_type(content_type) do
|
||||||
|
@ -228,13 +227,6 @@ defp get_content_type(content_type) do
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
defp maybe_add_nsfw_tag({text, mentions, tags}, %{"sensitive" => sensitive})
|
|
||||||
when sensitive in [true, "True", "true", "1"] do
|
|
||||||
{text, mentions, [{"#nsfw", "nsfw"} | tags]}
|
|
||||||
end
|
|
||||||
|
|
||||||
defp maybe_add_nsfw_tag(data, _), do: data
|
|
||||||
|
|
||||||
def make_context(_, %Participation{} = participation) do
|
def make_context(_, %Participation{} = participation) do
|
||||||
Repo.preload(participation, :conversation).conversation.ap_id
|
Repo.preload(participation, :conversation).conversation.ap_id
|
||||||
end
|
end
|
||||||
|
@ -294,7 +286,7 @@ def format_input(text, "text/html", options) do
|
||||||
def format_input(text, "text/markdown", options) do
|
def format_input(text, "text/markdown", options) do
|
||||||
text
|
text
|
||||||
|> Formatter.mentions_escape(options)
|
|> Formatter.mentions_escape(options)
|
||||||
|> Earmark.as_html!(%Earmark.Options{renderer: Pleroma.EarmarkRenderer})
|
|> Formatter.markdown_to_html()
|
||||||
|> Formatter.linkify(options)
|
|> Formatter.linkify(options)
|
||||||
|> Formatter.html_escape("text/html")
|
|> Formatter.html_escape("text/html")
|
||||||
end
|
end
|
||||||
|
|
|
@ -32,6 +32,7 @@ def prepare_activity(activity, opts \\ []) do
|
||||||
|
|
||||||
%{
|
%{
|
||||||
activity: activity,
|
activity: activity,
|
||||||
|
object: object,
|
||||||
data: Map.get(object, :data),
|
data: Map.get(object, :data),
|
||||||
actor: actor
|
actor: actor
|
||||||
}
|
}
|
||||||
|
|
|
@ -30,6 +30,12 @@ def call(conn, {:error, error_message}) do
|
||||||
|> json(%{error: error_message})
|
|> json(%{error: error_message})
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def call(conn, {:error, status, message}) do
|
||||||
|
conn
|
||||||
|
|> put_status(status)
|
||||||
|
|> json(%{error: message})
|
||||||
|
end
|
||||||
|
|
||||||
def call(conn, _) do
|
def call(conn, _) do
|
||||||
conn
|
conn
|
||||||
|> put_status(:internal_server_error)
|
|> put_status(:internal_server_error)
|
||||||
|
|
|
@ -5,7 +5,7 @@
|
||||||
defmodule Pleroma.Web.MastodonAPI.InstanceController do
|
defmodule Pleroma.Web.MastodonAPI.InstanceController do
|
||||||
use Pleroma.Web, :controller
|
use Pleroma.Web, :controller
|
||||||
|
|
||||||
plug(OpenApiSpex.Plug.CastAndValidate)
|
plug(Pleroma.Web.ApiSpec.CastAndValidate)
|
||||||
|
|
||||||
plug(
|
plug(
|
||||||
:skip_plug,
|
:skip_plug,
|
||||||
|
|
|
@ -21,7 +21,7 @@ defmodule Pleroma.Web.MastodonAPI.StatusController do
|
||||||
alias Pleroma.Web.CommonAPI
|
alias Pleroma.Web.CommonAPI
|
||||||
alias Pleroma.Web.MastodonAPI.AccountView
|
alias Pleroma.Web.MastodonAPI.AccountView
|
||||||
alias Pleroma.Web.MastodonAPI.ScheduledActivityView
|
alias Pleroma.Web.MastodonAPI.ScheduledActivityView
|
||||||
# alias Pleroma.Web.OAuth.Token
|
alias Pleroma.Web.OAuth.Token
|
||||||
alias Pleroma.Web.Plugs.OAuthScopesPlug
|
alias Pleroma.Web.Plugs.OAuthScopesPlug
|
||||||
alias Pleroma.Web.Plugs.RateLimiter
|
alias Pleroma.Web.Plugs.RateLimiter
|
||||||
|
|
||||||
|
@ -260,6 +260,18 @@ def unfavourite(%{assigns: %{user: user}} = conn, %{id: activity_id}) do
|
||||||
def pin(%{assigns: %{user: user}} = conn, %{id: ap_id_or_id}) do
|
def pin(%{assigns: %{user: user}} = conn, %{id: ap_id_or_id}) do
|
||||||
with {:ok, activity} <- CommonAPI.pin(ap_id_or_id, user) do
|
with {:ok, activity} <- CommonAPI.pin(ap_id_or_id, user) do
|
||||||
try_render(conn, "show.json", activity: activity, for: user, as: :activity)
|
try_render(conn, "show.json", activity: activity, for: user, as: :activity)
|
||||||
|
else
|
||||||
|
{:error, :pinned_statuses_limit_reached} ->
|
||||||
|
{:error, "You have already pinned the maximum number of statuses"}
|
||||||
|
|
||||||
|
{:error, :ownership_error} ->
|
||||||
|
{:error, :unprocessable_entity, "Someone else's status cannot be pinned"}
|
||||||
|
|
||||||
|
{:error, :visibility_error} ->
|
||||||
|
{:error, :unprocessable_entity, "Non-public status cannot be pinned"}
|
||||||
|
|
||||||
|
error ->
|
||||||
|
error
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -420,16 +432,14 @@ def bookmarks(%{assigns: %{user: user}} = conn, params) do
|
||||||
)
|
)
|
||||||
end
|
end
|
||||||
|
|
||||||
# Deactivated for 2.3.0
|
defp put_application(params, %{assigns: %{token: %Token{user: %User{} = user} = token}} = _conn) do
|
||||||
# defp put_application(params,
|
if user.disclose_client do
|
||||||
# %{assigns: %{token: %Token{user: %User{} = user} = token}} = _conn) do
|
%{client_name: client_name, website: website} = Repo.preload(token, :app).app
|
||||||
# if user.disclose_client do
|
Map.put(params, :generator, %{type: "Application", name: client_name, url: website})
|
||||||
# %{client_name: client_name, website: website} = Repo.preload(token, :app).app
|
else
|
||||||
# Map.put(params, :generator, %{type: "Application", name: client_name, url: website})
|
Map.put(params, :generator, nil)
|
||||||
# else
|
end
|
||||||
# Map.put(params, :generator, nil)
|
end
|
||||||
# end
|
|
||||||
# end
|
|
||||||
|
|
||||||
defp put_application(params, _), do: Map.put(params, :generator, nil)
|
defp put_application(params, _), do: Map.put(params, :generator, nil)
|
||||||
end
|
end
|
||||||
|
|
|
@ -133,31 +133,22 @@ defp fail_on_bad_auth(conn) do
|
||||||
end
|
end
|
||||||
|
|
||||||
defp hashtag_fetching(params, user, local_only) do
|
defp hashtag_fetching(params, user, local_only) do
|
||||||
tags =
|
# Note: not sanitizing tag options at this stage (may be mix-cased, have duplicates etc.)
|
||||||
|
tags_any =
|
||||||
[params[:tag], params[:any]]
|
[params[:tag], params[:any]]
|
||||||
|> List.flatten()
|
|> List.flatten()
|
||||||
|> Enum.uniq()
|
|> Enum.filter(& &1)
|
||||||
|> Enum.reject(&is_nil/1)
|
|
||||||
|> Enum.map(&String.downcase/1)
|
|
||||||
|
|
||||||
tag_all =
|
tag_all = Map.get(params, :all, [])
|
||||||
params
|
tag_reject = Map.get(params, :none, [])
|
||||||
|> Map.get(:all, [])
|
|
||||||
|> Enum.map(&String.downcase/1)
|
|
||||||
|
|
||||||
tag_reject =
|
|
||||||
params
|
|
||||||
|> Map.get(:none, [])
|
|
||||||
|> Enum.map(&String.downcase/1)
|
|
||||||
|
|
||||||
_activities =
|
|
||||||
params
|
params
|
||||||
|> Map.put(:type, "Create")
|
|> Map.put(:type, "Create")
|
||||||
|> Map.put(:local_only, local_only)
|
|> Map.put(:local_only, local_only)
|
||||||
|> Map.put(:blocking_user, user)
|
|> Map.put(:blocking_user, user)
|
||||||
|> Map.put(:muting_user, user)
|
|> Map.put(:muting_user, user)
|
||||||
|> Map.put(:user, user)
|
|> Map.put(:user, user)
|
||||||
|> Map.put(:tag, tags)
|
|> Map.put(:tag, tags_any)
|
||||||
|> Map.put(:tag_all, tag_all)
|
|> Map.put(:tag_all, tag_all)
|
||||||
|> Map.put(:tag_reject, tag_reject)
|
|> Map.put(:tag_reject, tag_reject)
|
||||||
|> ActivityPub.fetch_public_activities()
|
|> ActivityPub.fetch_public_activities()
|
||||||
|
|
|
@ -23,7 +23,8 @@ def render("show.json", _) do
|
||||||
streaming_api: Pleroma.Web.Endpoint.websocket_url()
|
streaming_api: Pleroma.Web.Endpoint.websocket_url()
|
||||||
},
|
},
|
||||||
stats: Pleroma.Stats.get_stats(),
|
stats: Pleroma.Stats.get_stats(),
|
||||||
thumbnail: Pleroma.Web.base_url() <> Keyword.get(instance, :instance_thumbnail),
|
thumbnail:
|
||||||
|
URI.merge(Pleroma.Web.base_url(), Keyword.get(instance, :instance_thumbnail)) |> to_string,
|
||||||
languages: ["en"],
|
languages: ["en"],
|
||||||
registrations: Keyword.get(instance, :registrations_open),
|
registrations: Keyword.get(instance, :registrations_open),
|
||||||
approval_required: Keyword.get(instance, :account_approval_required),
|
approval_required: Keyword.get(instance, :account_approval_required),
|
||||||
|
|
|
@ -9,6 +9,7 @@ defmodule Pleroma.Web.MastodonAPI.StatusView do
|
||||||
|
|
||||||
alias Pleroma.Activity
|
alias Pleroma.Activity
|
||||||
alias Pleroma.HTML
|
alias Pleroma.HTML
|
||||||
|
alias Pleroma.Maps
|
||||||
alias Pleroma.Object
|
alias Pleroma.Object
|
||||||
alias Pleroma.Repo
|
alias Pleroma.Repo
|
||||||
alias Pleroma.User
|
alias Pleroma.User
|
||||||
|
@ -124,16 +125,16 @@ def render(
|
||||||
) do
|
) do
|
||||||
user = CommonAPI.get_user(activity.data["actor"])
|
user = CommonAPI.get_user(activity.data["actor"])
|
||||||
created_at = Utils.to_masto_date(activity.data["published"])
|
created_at = Utils.to_masto_date(activity.data["published"])
|
||||||
activity_object = Object.normalize(activity, fetch: false)
|
object = Object.normalize(activity, fetch: false)
|
||||||
|
|
||||||
reblogged_parent_activity =
|
reblogged_parent_activity =
|
||||||
if opts[:parent_activities] do
|
if opts[:parent_activities] do
|
||||||
Activity.Queries.find_by_object_ap_id(
|
Activity.Queries.find_by_object_ap_id(
|
||||||
opts[:parent_activities],
|
opts[:parent_activities],
|
||||||
activity_object.data["id"]
|
object.data["id"]
|
||||||
)
|
)
|
||||||
else
|
else
|
||||||
Activity.create_by_object_ap_id(activity_object.data["id"])
|
Activity.create_by_object_ap_id(object.data["id"])
|
||||||
|> Activity.with_preloaded_bookmark(opts[:for])
|
|> Activity.with_preloaded_bookmark(opts[:for])
|
||||||
|> Activity.with_set_thread_muted_field(opts[:for])
|
|> Activity.with_set_thread_muted_field(opts[:for])
|
||||||
|> Repo.one()
|
|> Repo.one()
|
||||||
|
@ -142,7 +143,7 @@ def render(
|
||||||
reblog_rendering_opts = Map.put(opts, :activity, reblogged_parent_activity)
|
reblog_rendering_opts = Map.put(opts, :activity, reblogged_parent_activity)
|
||||||
reblogged = render("show.json", reblog_rendering_opts)
|
reblogged = render("show.json", reblog_rendering_opts)
|
||||||
|
|
||||||
favorited = opts[:for] && opts[:for].ap_id in (activity_object.data["likes"] || [])
|
favorited = opts[:for] && opts[:for].ap_id in (object.data["likes"] || [])
|
||||||
|
|
||||||
bookmarked = Activity.get_bookmark(reblogged_parent_activity, opts[:for]) != nil
|
bookmarked = Activity.get_bookmark(reblogged_parent_activity, opts[:for]) != nil
|
||||||
|
|
||||||
|
@ -152,10 +153,12 @@ def render(
|
||||||
|> Enum.filter(& &1)
|
|> Enum.filter(& &1)
|
||||||
|> Enum.map(fn user -> AccountView.render("mention.json", %{user: user}) end)
|
|> Enum.map(fn user -> AccountView.render("mention.json", %{user: user}) end)
|
||||||
|
|
||||||
|
{pinned?, pinned_at} = pin_data(object, user)
|
||||||
|
|
||||||
%{
|
%{
|
||||||
id: to_string(activity.id),
|
id: to_string(activity.id),
|
||||||
uri: activity_object.data["id"],
|
uri: object.data["id"],
|
||||||
url: activity_object.data["id"],
|
url: object.data["id"],
|
||||||
account:
|
account:
|
||||||
AccountView.render("show.json", %{
|
AccountView.render("show.json", %{
|
||||||
user: user,
|
user: user,
|
||||||
|
@ -173,18 +176,19 @@ def render(
|
||||||
favourited: present?(favorited),
|
favourited: present?(favorited),
|
||||||
bookmarked: present?(bookmarked),
|
bookmarked: present?(bookmarked),
|
||||||
muted: false,
|
muted: false,
|
||||||
pinned: pinned?(activity, user),
|
pinned: pinned?,
|
||||||
sensitive: false,
|
sensitive: false,
|
||||||
spoiler_text: "",
|
spoiler_text: "",
|
||||||
visibility: get_visibility(activity),
|
visibility: get_visibility(activity),
|
||||||
media_attachments: reblogged[:media_attachments] || [],
|
media_attachments: reblogged[:media_attachments] || [],
|
||||||
mentions: mentions,
|
mentions: mentions,
|
||||||
tags: reblogged[:tags] || [],
|
tags: reblogged[:tags] || [],
|
||||||
application: build_application(activity_object.data["generator"]),
|
application: build_application(object.data["generator"]),
|
||||||
language: nil,
|
language: nil,
|
||||||
emojis: [],
|
emojis: [],
|
||||||
pleroma: %{
|
pleroma: %{
|
||||||
local: activity.local
|
local: activity.local,
|
||||||
|
pinned_at: pinned_at
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
end
|
end
|
||||||
|
@ -198,8 +202,10 @@ def render("show.json", %{activity: %{data: %{"object" => _object}} = activity}
|
||||||
like_count = object.data["like_count"] || 0
|
like_count = object.data["like_count"] || 0
|
||||||
announcement_count = object.data["announcement_count"] || 0
|
announcement_count = object.data["announcement_count"] || 0
|
||||||
|
|
||||||
tags = object.data["tag"] || []
|
hashtags = Object.hashtags(object)
|
||||||
sensitive = object.data["sensitive"] || Enum.member?(tags, "nsfw")
|
sensitive = object.data["sensitive"] || Enum.member?(hashtags, "nsfw")
|
||||||
|
|
||||||
|
tags = Object.tags(object)
|
||||||
|
|
||||||
tag_mentions =
|
tag_mentions =
|
||||||
tags
|
tags
|
||||||
|
@ -314,6 +320,8 @@ def render("show.json", %{activity: %{data: %{"object" => _object}} = activity}
|
||||||
fn for_user, user -> User.mutes?(for_user, user) end
|
fn for_user, user -> User.mutes?(for_user, user) end
|
||||||
)
|
)
|
||||||
|
|
||||||
|
{pinned?, pinned_at} = pin_data(object, user)
|
||||||
|
|
||||||
%{
|
%{
|
||||||
id: to_string(activity.id),
|
id: to_string(activity.id),
|
||||||
uri: object.data["id"],
|
uri: object.data["id"],
|
||||||
|
@ -337,7 +345,7 @@ def render("show.json", %{activity: %{data: %{"object" => _object}} = activity}
|
||||||
favourited: present?(favorited),
|
favourited: present?(favorited),
|
||||||
bookmarked: present?(bookmarked),
|
bookmarked: present?(bookmarked),
|
||||||
muted: muted,
|
muted: muted,
|
||||||
pinned: pinned?(activity, user),
|
pinned: pinned?,
|
||||||
sensitive: sensitive,
|
sensitive: sensitive,
|
||||||
spoiler_text: summary,
|
spoiler_text: summary,
|
||||||
visibility: get_visibility(object),
|
visibility: get_visibility(object),
|
||||||
|
@ -358,7 +366,8 @@ def render("show.json", %{activity: %{data: %{"object" => _object}} = activity}
|
||||||
direct_conversation_id: direct_conversation_id,
|
direct_conversation_id: direct_conversation_id,
|
||||||
thread_muted: thread_muted?,
|
thread_muted: thread_muted?,
|
||||||
emoji_reactions: emoji_reactions,
|
emoji_reactions: emoji_reactions,
|
||||||
parent_visible: visible_for_user?(reply_to, opts[:for])
|
parent_visible: visible_for_user?(reply_to, opts[:for]),
|
||||||
|
pinned_at: pinned_at
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
end
|
end
|
||||||
|
@ -379,12 +388,15 @@ def render("card.json", %{rich_media: rich_media, page_url: page_url}) do
|
||||||
|
|
||||||
page_url = page_url_data |> to_string
|
page_url = page_url_data |> to_string
|
||||||
|
|
||||||
image_url =
|
image_url_data =
|
||||||
if is_binary(rich_media["image"]) do
|
if is_binary(rich_media["image"]) do
|
||||||
URI.merge(page_url_data, URI.parse(rich_media["image"]))
|
URI.parse(rich_media["image"])
|
||||||
|> to_string
|
else
|
||||||
|
nil
|
||||||
end
|
end
|
||||||
|
|
||||||
|
image_url = build_image_url(image_url_data, page_url_data)
|
||||||
|
|
||||||
%{
|
%{
|
||||||
type: "link",
|
type: "link",
|
||||||
provider_name: page_url_data.host,
|
provider_name: page_url_data.host,
|
||||||
|
@ -406,6 +418,7 @@ def render("attachment.json", %{attachment: attachment}) do
|
||||||
media_type = attachment_url["mediaType"] || attachment_url["mimeType"] || "image"
|
media_type = attachment_url["mediaType"] || attachment_url["mimeType"] || "image"
|
||||||
href = attachment_url["href"] |> MediaProxy.url()
|
href = attachment_url["href"] |> MediaProxy.url()
|
||||||
href_preview = attachment_url["href"] |> MediaProxy.preview_url()
|
href_preview = attachment_url["href"] |> MediaProxy.preview_url()
|
||||||
|
meta = render("attachment_meta.json", %{attachment: attachment})
|
||||||
|
|
||||||
type =
|
type =
|
||||||
cond do
|
cond do
|
||||||
|
@ -428,8 +441,24 @@ def render("attachment.json", %{attachment: attachment}) do
|
||||||
pleroma: %{mime_type: media_type},
|
pleroma: %{mime_type: media_type},
|
||||||
blurhash: attachment["blurhash"]
|
blurhash: attachment["blurhash"]
|
||||||
}
|
}
|
||||||
|
|> Maps.put_if_present(:meta, meta)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def render("attachment_meta.json", %{
|
||||||
|
attachment: %{"url" => [%{"width" => width, "height" => height} | _]}
|
||||||
|
})
|
||||||
|
when is_integer(width) and is_integer(height) do
|
||||||
|
%{
|
||||||
|
original: %{
|
||||||
|
width: width,
|
||||||
|
height: height,
|
||||||
|
aspect: width / height
|
||||||
|
}
|
||||||
|
}
|
||||||
|
end
|
||||||
|
|
||||||
|
def render("attachment_meta.json", _), do: nil
|
||||||
|
|
||||||
def render("context.json", %{activity: activity, activities: activities, user: user}) do
|
def render("context.json", %{activity: activity, activities: activities, user: user}) do
|
||||||
%{ancestors: ancestors, descendants: descendants} =
|
%{ancestors: ancestors, descendants: descendants} =
|
||||||
activities
|
activities
|
||||||
|
@ -524,8 +553,13 @@ defp present?(nil), do: false
|
||||||
defp present?(false), do: false
|
defp present?(false), do: false
|
||||||
defp present?(_), do: true
|
defp present?(_), do: true
|
||||||
|
|
||||||
defp pinned?(%Activity{id: id}, %User{pinned_activities: pinned_activities}),
|
defp pin_data(%Object{data: %{"id" => object_id}}, %User{pinned_objects: pinned_objects}) do
|
||||||
do: id in pinned_activities
|
if pinned_at = pinned_objects[object_id] do
|
||||||
|
{true, Utils.to_masto_date(pinned_at)}
|
||||||
|
else
|
||||||
|
{false, nil}
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
defp build_emoji_map(emoji, users, current_user) do
|
defp build_emoji_map(emoji, users, current_user) do
|
||||||
%{
|
%{
|
||||||
|
@ -536,6 +570,27 @@ defp build_emoji_map(emoji, users, current_user) do
|
||||||
end
|
end
|
||||||
|
|
||||||
@spec build_application(map() | nil) :: map() | nil
|
@spec build_application(map() | nil) :: map() | nil
|
||||||
defp build_application(%{type: _type, name: name, url: url}), do: %{name: name, website: url}
|
defp build_application(%{"type" => _type, "name" => name, "url" => url}),
|
||||||
|
do: %{name: name, website: url}
|
||||||
|
|
||||||
defp build_application(_), do: nil
|
defp build_application(_), do: nil
|
||||||
|
|
||||||
|
# Workaround for Elixir issue #10771
|
||||||
|
# Avoid applying URI.merge unless necessary
|
||||||
|
# TODO: revert to always attempting URI.merge(image_url_data, page_url_data)
|
||||||
|
# when Elixir 1.12 is the minimum supported version
|
||||||
|
@spec build_image_url(struct() | nil, struct()) :: String.t() | nil
|
||||||
|
defp build_image_url(
|
||||||
|
%URI{scheme: image_scheme, host: image_host} = image_url_data,
|
||||||
|
%URI{} = _page_url_data
|
||||||
|
)
|
||||||
|
when not is_nil(image_scheme) and not is_nil(image_host) do
|
||||||
|
image_url_data |> to_string
|
||||||
|
end
|
||||||
|
|
||||||
|
defp build_image_url(%URI{} = image_url_data, %URI{} = page_url_data) do
|
||||||
|
URI.merge(page_url_data, image_url_data) |> to_string
|
||||||
|
end
|
||||||
|
|
||||||
|
defp build_image_url(_, _), do: nil
|
||||||
end
|
end
|
||||||
|
|
|
@ -121,6 +121,11 @@ def decode_url(sig, url) do
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def decode_url(encoded) do
|
||||||
|
[_, "proxy", sig, base64 | _] = URI.parse(encoded).path |> String.split("/")
|
||||||
|
decode_url(sig, base64)
|
||||||
|
end
|
||||||
|
|
||||||
defp signed_url(url) do
|
defp signed_url(url) do
|
||||||
:crypto.hmac(:sha, Config.get([Web.Endpoint, :secret_key_base]), url)
|
:crypto.hmac(:sha, Config.get([Web.Endpoint, :secret_key_base]), url)
|
||||||
end
|
end
|
||||||
|
|
|
@ -10,6 +10,7 @@ defmodule Pleroma.Web.OAuth.OAuthView do
|
||||||
|
|
||||||
def render("token.json", %{token: token} = opts) do
|
def render("token.json", %{token: token} = opts) do
|
||||||
response = %{
|
response = %{
|
||||||
|
id: token.id,
|
||||||
token_type: "Bearer",
|
token_type: "Bearer",
|
||||||
access_token: token.token,
|
access_token: token.token,
|
||||||
refresh_token: token.refresh_token,
|
refresh_token: token.refresh_token,
|
||||||
|
|
|
@ -10,7 +10,7 @@ defmodule Pleroma.Web.PleromaAPI.BackupController do
|
||||||
|
|
||||||
action_fallback(Pleroma.Web.MastodonAPI.FallbackController)
|
action_fallback(Pleroma.Web.MastodonAPI.FallbackController)
|
||||||
plug(OAuthScopesPlug, %{scopes: ["read:accounts"]} when action in [:index, :create])
|
plug(OAuthScopesPlug, %{scopes: ["read:accounts"]} when action in [:index, :create])
|
||||||
plug(OpenApiSpex.Plug.CastAndValidate, render_error: Pleroma.Web.ApiSpec.RenderError)
|
plug(Pleroma.Web.ApiSpec.CastAndValidate)
|
||||||
|
|
||||||
defdelegate open_api_operation(action), to: Pleroma.Web.ApiSpec.PleromaBackupOperation
|
defdelegate open_api_operation(action), to: Pleroma.Web.ApiSpec.PleromaBackupOperation
|
||||||
|
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue