diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index c7e8291d8..78e715d47 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -8,7 +8,9 @@ variables: &global_variables MIX_ENV: test cache: &global_cache_policy - key: ${CI_COMMIT_REF_SLUG} + key: + files: + - mix.lock paths: - deps - _build @@ -22,16 +24,20 @@ stages: - docker before_script: + - rm -rf _build/*/lib/pleroma - apt-get update && apt-get install -y cmake - mix local.hex --force - mix local.rebar --force + - mix deps.get - apt-get -qq update - apt-get install -y libmagic-dev +after_script: + - rm -rf _build/*/lib/pleroma + build: stage: build script: - - mix deps.get - mix compile --force spec-build: @@ -52,7 +58,6 @@ benchmark: alias: postgres command: ["postgres", "-c", "fsync=off", "-c", "synchronous_commit=off", "-c", "full_page_writes=off"] script: - - mix deps.get - mix ecto.create - mix ecto.migrate - mix pleroma.load_testing @@ -70,7 +75,6 @@ unit-testing: command: ["postgres", "-c", "fsync=off", "-c", "synchronous_commit=off", "-c", "full_page_writes=off"] script: - apt-get update && apt-get install -y libimage-exiftool-perl ffmpeg - - mix deps.get - mix ecto.create - mix ecto.migrate - mix coveralls --preload-modules @@ -104,7 +108,6 @@ unit-testing-rum: RUM_ENABLED: "true" script: - apt-get update && apt-get install -y libimage-exiftool-perl ffmpeg - - mix deps.get - mix ecto.create - mix ecto.migrate - "mix ecto.migrate --migrations-path priv/repo/optional_migrations/rum_indexing/" @@ -120,7 +123,6 @@ analysis: stage: test cache: *testing_cache_policy script: - - mix deps.get - mix credo --strict --only=warnings,todo,fixme,consistency,readability docs-deploy: @@ -175,8 +177,8 @@ spec-deploy: - apk add curl script: - curl -X POST -F"token=$API_DOCS_PIPELINE_TRIGGER" -F'ref=master' -F"variables[BRANCH]=$CI_COMMIT_REF_NAME" -F"variables[JOB_REF]=$CI_JOB_ID" https://git.pleroma.social/api/v4/projects/1130/trigger/pipeline - - + + stop_review_app: image: alpine:3.9 stage: deploy @@ -235,7 +237,7 @@ amd64-musl: stage: release artifacts: *release-artifacts only: *release-only - image: elixir:1.10.3-alpine + image: elixir:1.10.3-alpine cache: *release-cache variables: *release-variables before_script: &before-release-musl @@ -393,4 +395,4 @@ docker-adhoc: tags: - dind only: - - /^build-docker/.*$/@pleroma/pleroma \ No newline at end of file + - /^build-docker/.*$/@pleroma/pleroma diff --git a/CHANGELOG.md b/CHANGELOG.md index a55ebbf8a..5bb4b1e73 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,32 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/). +## Unreleased + +### Changed + +- The `application` metadata returned with statuses is no longer hardcoded. Apps that want to display these details will now have valid data for new posts after this change. +- HTTPSecurityPlug now sends a response header to opt out of Google's FLoC (Federated Learning of Cohorts) targeted advertising. + +### Added + +- MRF (`FollowBotPolicy`): New MRF Policy which makes a designated local Bot account attempt to follow all users in public Notes received by your instance. Users who require approving follower requests or have #nobot in their profile are excluded. +- Return OAuth token `id` (primary key) in POST `/oauth/token`. + +### Fixed +- Don't crash so hard when email settings are invalid. + +## Unreleased (Patch) + +### Fixed + +- Try to save exported ConfigDB settings (migrate_from_db) in the system temp directory if default location is not writable. +- Uploading custom instance thumbnail via AdminAPI/AdminFE generated invalid URL to the image +- Applying ConcurrentLimiter settings via AdminAPI +- User login failures if their `notification_settings` were in a NULL state. +- Mix task `pleroma.user delete_activities` query transaction timeout is now :infinity +- Fixed some Markdown issues, including trailing slash in links. + ## [2.3.0] - 2020-03-01 ### Security @@ -18,6 +44,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/). - **Breaking**: Changed `mix pleroma.user toggle_confirmed` to `mix pleroma.user confirm` - **Breaking**: Changed `mix pleroma.user toggle_activated` to `mix pleroma.user activate/deactivate` +- **Breaking:** NSFW hashtag is no longer added on sensitive posts - Polls now always return a `voters_count`, even if they are single-choice. - Admin Emails: The ap id is used as the user link in emails now. - Improved registration workflow for email confirmation and account approval modes. @@ -44,6 +71,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/). - Pleroma API: Reroute `/api/pleroma/*` to `/api/v1/pleroma/*` +- Improved hashtag timeline performance (requires a background migration). ### Added @@ -67,6 +95,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
API Changes - Admin API: (`GET /api/pleroma/admin/users`) filter users by `unconfirmed` status and `actor_type`. +- Admin API: OpenAPI spec for the user-related operations - Pleroma API: `GET /api/v2/pleroma/chats` added. It is exactly like `GET /api/v1/pleroma/chats` except supports pagination. - Pleroma API: Add `idempotency_key` to the chat message entity that can be used for optimistic message sending. - Pleroma API: (`GET /api/v1/pleroma/federation_status`) Add a way to get a list of unreachable instances. @@ -498,7 +527,6 @@ switched to a new configuration mechanism, however it was not officially removed - Static-FE: Fix remote posts not being sanitized ### Fixed -======= - Rate limiter crashes when there is no explicitly specified ip in the config - 500 errors when no `Accept` header is present if Static-FE is enabled - Instance panel not being updated immediately due to wrong `Cache-Control` headers diff --git a/config/config.exs b/config/config.exs index 66aee3264..4381068ac 100644 --- a/config/config.exs +++ b/config/config.exs @@ -391,6 +391,11 @@ federated_timeline_removal: [], replace: [] +config :pleroma, :mrf_hashtag, + sensitive: ["nsfw"], + reject: [], + federated_timeline_removal: [] + config :pleroma, :mrf_subchain, match_actor: %{} config :pleroma, :mrf_activity_expiration, days: 365 @@ -404,6 +409,8 @@ threshold: 604_800, actions: [:delist, :strip_followers] +config :pleroma, :mrf_follow_bot, follower_nickname: nil + config :pleroma, :rich_media, enabled: true, ignore_hosts: [], @@ -654,6 +661,10 @@ config :pleroma, :database, rum_enabled: false +config :pleroma, :features, improved_hashtag_timeline: :auto + +config :pleroma, :populate_hashtags_table, fault_rate_allowance: 0.01 + config :pleroma, :env, Mix.env() config :http_signatures, diff --git a/config/description.exs b/config/description.exs index d9b15e684..bb1f43305 100644 --- a/config/description.exs +++ b/config/description.exs @@ -459,6 +459,42 @@ } ] }, + %{ + group: :pleroma, + key: :features, + type: :group, + description: "Customizable features", + children: [ + %{ + key: :improved_hashtag_timeline, + type: {:dropdown, :atom}, + description: + "Setting to force toggle / force disable improved hashtags timeline. `:enabled` forces hashtags to be fetched from `hashtags` table for hashtags timeline. `:disabled` forces object-embedded hashtags to be used (slower). Keep it `:auto` for automatic behaviour (it is auto-set to `:enabled` [unless overridden] when HashtagsTableMigrator completes).", + suggestions: [:auto, :enabled, :disabled] + } + ] + }, + %{ + group: :pleroma, + key: :populate_hashtags_table, + type: :group, + description: "`populate_hashtags_table` background migration settings", + children: [ + %{ + key: :fault_rate_allowance, + type: :float, + description: + "Max accepted rate of objects that failed in the migration. Any value from 0.0 which tolerates no errors to 1.0 which will enable the feature even if hashtags transfer failed for all records.", + suggestions: [0.01] + }, + %{ + key: :sleep_interval_ms, + type: :integer, + description: + "Sleep interval between each chunk of processed records in order to decrease the load on the system (defaults to 0 and should be keep default on most instances)." + } + ] + }, %{ group: :pleroma, key: :instance, @@ -2906,6 +2942,23 @@ } ] }, + %{ + group: :pleroma, + key: :mrf_follow_bot, + tab: :mrf, + related_policy: "Pleroma.Web.ActivityPub.MRF.FollowBotPolicy", + label: "MRF FollowBot Policy", + type: :group, + description: "Automatically follows newly discovered accounts.", + children: [ + %{ + key: :follower_nickname, + type: :string, + description: "The name of the bot account to use for following newly discovered users.", + suggestions: ["followbot"] + } + ] + }, %{ group: :pleroma, key: :modules, diff --git a/docs/administration/CLI_tasks/config.md b/docs/administration/CLI_tasks/config.md index 000ed4d98..fc9f3cbd5 100644 --- a/docs/administration/CLI_tasks/config.md +++ b/docs/administration/CLI_tasks/config.md @@ -32,16 +32,20 @@ config :pleroma, configurable_from_database: false ``` -To delete transferred settings from database optional flag `-d` can be used. `` is `prod` by default. +Options: + +- `` - where to save migrated config. E.g. `--path=/tmp`. If file saved into non standart folder, you must manually copy file into directory where Pleroma can read it. For OTP install path will be `PLEROMA_CONFIG_PATH` or `/etc/pleroma`. For installation from source - `config` directory in the pleroma folder. +- `` - environment, for which is migrated config. By default is `prod`. +- To delete transferred settings from database optional flag `-d` can be used === "OTP" ```sh - ./bin/pleroma_ctl config migrate_from_db [--env=] [-d] + ./bin/pleroma_ctl config migrate_from_db [--env=] [-d] [--path=] ``` === "From Source" ```sh - mix pleroma.config migrate_from_db [--env=] [-d] + mix pleroma.config migrate_from_db [--env=] [-d] [--path=] ``` ## Dump all of the config settings defined in the database diff --git a/docs/configuration/cheatsheet.md b/docs/configuration/cheatsheet.md index 028c5e91d..069421722 100644 --- a/docs/configuration/cheatsheet.md +++ b/docs/configuration/cheatsheet.md @@ -65,6 +65,13 @@ To add configuration to your config file, you can copy it from the base config. * `show_reactions`: Let favourites and emoji reactions be viewed through the API (default: `true`). * `password_reset_token_validity`: The time after which reset tokens aren't accepted anymore, in seconds (default: one day). +## :database +* `improved_hashtag_timeline`: Setting to force toggle / force disable improved hashtags timeline. `:enabled` forces hashtags to be fetched from `hashtags` table for hashtags timeline. `:disabled` forces object-embedded hashtags to be used (slower). Keep it `:auto` for automatic behaviour (it is auto-set to `:enabled` [unless overridden] when HashtagsTableMigrator completes). + +## Background migrations +* `populate_hashtags_table/sleep_interval_ms`: Sleep interval between each chunk of processed records in order to decrease the load on the system (defaults to 0 and should be keep default on most instances). +* `populate_hashtags_table/fault_rate_allowance`: Max rate of failed objects to actually processed objects in order to enable the feature (any value from 0.0 which tolerates no errors to 1.0 which will enable the feature even if hashtags transfer failed for all records). + ## Welcome * `direct_message`: - welcome message sent as a direct message. * `enabled`: Enables the send a direct message to a newly registered user. Defaults to `false`. @@ -117,6 +124,7 @@ To add configuration to your config file, you can copy it from the base config. * `Pleroma.Web.ActivityPub.MRF.ObjectAgePolicy`: Rejects or delists posts based on their age when received. (See [`:mrf_object_age`](#mrf_object_age)). * `Pleroma.Web.ActivityPub.MRF.ActivityExpirationPolicy`: Sets a default expiration on all posts made by users of the local instance. Requires `Pleroma.Workers.PurgeExpiredActivity` to be enabled for processing the scheduled delections. * `Pleroma.Web.ActivityPub.MRF.ForceBotUnlistedPolicy`: Makes all bot posts to disappear from public timelines. + * `Pleroma.Web.ActivityPub.MRF.FollowBotPolicy`: Automatically follows newly discovered users from the specified bot account. Local accounts, locked accounts, and users with "#nobot" in their bio are respected and excluded from being followed. * `transparency`: Make the content of your Message Rewrite Facility settings public (via nodeinfo). * `transparency_exclusions`: Exclude specific instance names from MRF transparency. The use of the exclusions feature will be disclosed in nodeinfo as a boolean value. @@ -203,6 +211,21 @@ config :pleroma, :mrf_user_allowlist, %{ * `days`: Default global expiration time for all local Create activities (in days) +#### :mrf_hashtag + +* `sensitive`: List of hashtags to mark activities as sensitive (default: `nsfw`) +* `federated_timeline_removal`: List of hashtags to remove activities from the federated timeline (aka TWNK) +* `reject`: List of hashtags to reject activities from + +Notes: +- The hashtags in the configuration do not have a leading `#`. +- This MRF Policy is always enabled, if you want to disable it you have to set empty lists + +#### :mrf_follow_bot + +* `follower_nickname`: The name of the bot account to use for following newly discovered users. Using `followbot` or similar is strongly suggested. + + ### :activitypub * `unfollow_blocked`: Whether blocks result in people getting unfollowed * `outgoing_blocks`: Whether to federate blocks to other instances diff --git a/docs/development/API/differences_in_mastoapi_responses.md b/docs/development/API/differences_in_mastoapi_responses.md index a14fcb416..6c1ecb559 100644 --- a/docs/development/API/differences_in_mastoapi_responses.md +++ b/docs/development/API/differences_in_mastoapi_responses.md @@ -38,6 +38,7 @@ Has these additional fields under the `pleroma` object: - `thread_muted`: true if the thread the post belongs to is muted - `emoji_reactions`: A list with emoji / reaction maps. The format is `{name: "☕", count: 1, me: true}`. Contains no information about the reacting users, for that use the `/statuses/:id/reactions` endpoint. - `parent_visible`: If the parent of this post is visible to the user or not. +- `pinned_at`: a datetime (iso8601) when status was pinned, `null` otherwise. ## Scheduled statuses @@ -255,9 +256,29 @@ This information is returned in the `/api/v1/accounts/verify_credentials` endpoi *Pleroma supports refreshing tokens.* -`POST /oauth/token` +### POST `/oauth/token` -Post here request with `grant_type=refresh_token` to obtain new access token. Returns an access token. +You can obtain access tokens for a user in a few additional ways. + +#### Refreshing a token + +To obtain a new access token from a refresh token, pass `grant_type=refresh_token` with the following extra parameters: + +- `refresh_token`: The refresh token. + +#### Getting a token with a password + +To obtain a token from a user's password, pass `grant_type=password` with the following extra parameters: + +- `username`: Username to authenticate. +- `password`: The user's password. + +#### Response body + +Additional fields are returned in the response: + +- `id`: The primary key of this token in Pleroma's database. +- `me` (user tokens only): The ActivityPub ID of the user who owns the token. ## Account Registration diff --git a/docs/index.md b/docs/index.md index 1a90d0a8d..80c5d2631 100644 --- a/docs/index.md +++ b/docs/index.md @@ -20,7 +20,7 @@ The default front-end used by Pleroma is Pleroma-FE. You can find more informati ### Mastodon interface If the Pleroma interface isn't your thing, or you're just trying something new but you want to keep using the familiar Mastodon interface, we got that too! -Just add a "/web" after your instance url (e.g. ) and you'll end on the Mastodon web interface, but with a Pleroma backend! MAGIC! +Just add a "/web" after your instance url (e.g. ) and you'll end on the Mastodon web interface, but with a Pleroma backend! MAGIC! The Mastodon interface is from the Glitch-soc fork. For more information on the Mastodon interface you can check the [Mastodon](https://docs.joinmastodon.org/) and [Glitch-soc](https://glitch-soc.github.io/docs/) documentation. Remember, what you see is only the frontend part of Mastodon, the backend is still Pleroma. diff --git a/docs/installation/otp_en.md b/docs/installation/otp_en.md index 42e264e65..13f9636f3 100644 --- a/docs/installation/otp_en.md +++ b/docs/installation/otp_en.md @@ -290,7 +290,7 @@ nginx -t ## Create your first user and set as admin ```sh -cd /opt/pleroma/bin +cd /opt/pleroma su pleroma -s $SHELL -lc "./bin/pleroma_ctl user new joeuser joeuser@sld.tld --admin" ``` This will create an account withe the username of 'joeuser' with the email address of joeuser@sld.tld, and set that user's account as an admin. This will result in a link that you can paste into the browser, which logs you in and enables you to set the password. diff --git a/lib/mix/tasks/pleroma/config.ex b/lib/mix/tasks/pleroma/config.ex index 1962154b9..22502a522 100644 --- a/lib/mix/tasks/pleroma/config.ex +++ b/lib/mix/tasks/pleroma/config.ex @@ -27,7 +27,7 @@ def run(["migrate_from_db" | options]) do {opts, _} = OptionParser.parse!(options, - strict: [env: :string, delete: :boolean], + strict: [env: :string, delete: :boolean, path: :string], aliases: [d: :delete] ) @@ -259,18 +259,43 @@ defp create(group, settings) do defp migrate_from_db(opts) do env = opts[:env] || Pleroma.Config.get(:env) + filename = "#{env}.exported_from_db.secret.exs" + config_path = - if Pleroma.Config.get(:release) do - :config_path - |> Pleroma.Config.get() - |> Path.dirname() - else - "config" + cond do + opts[:path] -> + opts[:path] + + Pleroma.Config.get(:release) -> + :config_path + |> Pleroma.Config.get() + |> Path.dirname() + + true -> + "config" end - |> Path.join("#{env}.exported_from_db.secret.exs") + |> Path.join(filename) - file = File.open!(config_path, [:write, :utf8]) + with {:ok, file} <- File.open(config_path, [:write, :utf8]) do + write_config(file, config_path, opts) + shell_info("Database configuration settings have been exported to #{config_path}") + else + _ -> + shell_error("Impossible to save settings to this directory #{Path.dirname(config_path)}") + tmp_config_path = Path.join(System.tmp_dir!(), filename) + file = File.open!(tmp_config_path) + shell_info( + "Saving database configuration settings to #{tmp_config_path}. Copy it to the #{ + Path.dirname(config_path) + } manually." + ) + + write_config(file, tmp_config_path, opts) + end + end + + defp write_config(file, path, opts) do IO.write(file, config_header()) ConfigDB @@ -278,11 +303,7 @@ defp migrate_from_db(opts) do |> Enum.each(&write_and_delete(&1, file, opts[:delete])) :ok = File.close(file) - System.cmd("mix", ["format", config_path]) - - shell_info( - "Database configuration settings have been exported to config/#{env}.exported_from_db.secret.exs" - ) + System.cmd("mix", ["format", path]) end if Code.ensure_loaded?(Config.Reader) do diff --git a/lib/mix/tasks/pleroma/database.ex b/lib/mix/tasks/pleroma/database.ex index 2403ed581..e7f4b67a4 100644 --- a/lib/mix/tasks/pleroma/database.ex +++ b/lib/mix/tasks/pleroma/database.ex @@ -8,10 +8,13 @@ defmodule Mix.Tasks.Pleroma.Database do alias Pleroma.Object alias Pleroma.Repo alias Pleroma.User + require Logger require Pleroma.Constants + import Ecto.Query import Mix.Pleroma + use Mix.Task @shortdoc "A collection of database related tasks" @@ -214,4 +217,32 @@ def run(["set_text_search_config", tsconfig]) do shell_info('Done.') end end + + # Rolls back a specific migration (leaving subsequent migrations applied). + # WARNING: imposes a risk of unrecoverable data loss — proceed at your own responsibility. + # Based on https://stackoverflow.com/a/53825840 + def run(["rollback", version]) do + prompt = "SEVERE WARNING: this operation may result in unrecoverable data loss. Continue?" + + if shell_prompt(prompt, "n") in ~w(Yn Y y) do + {_, result, _} = + Ecto.Migrator.with_repo(Pleroma.Repo, fn repo -> + version = String.to_integer(version) + re = ~r/^#{version}_.*\.exs/ + path = Ecto.Migrator.migrations_path(repo) + + with {_, "" <> file} <- {:find, Enum.find(File.ls!(path), &String.match?(&1, re))}, + {_, [{mod, _} | _]} <- {:compile, Code.compile_file(Path.join(path, file))}, + {_, :ok} <- {:rollback, Ecto.Migrator.down(repo, version, mod)} do + {:ok, "Reversed migration: #{file}"} + else + {:find, _} -> {:error, "No migration found with version prefix: #{version}"} + {:compile, e} -> {:error, "Problem compiling migration module: #{inspect(e)}"} + {:rollback, e} -> {:error, "Problem reversing migration: #{inspect(e)}"} + end + end) + + shell_info(inspect(result)) + end + end end diff --git a/lib/pleroma/activity.ex b/lib/pleroma/activity.ex index 6542e684e..53beca5e6 100644 --- a/lib/pleroma/activity.ex +++ b/lib/pleroma/activity.ex @@ -113,6 +113,7 @@ def with_preloaded_bookmark(query, %User{} = user) do from([a] in query, left_join: b in Bookmark, on: b.user_id == ^user.id and b.activity_id == a.id, + as: :bookmark, preload: [bookmark: b] ) end @@ -123,6 +124,7 @@ def with_preloaded_report_notes(query) do from([a] in query, left_join: r in ReportNote, on: a.id == r.activity_id, + as: :report_note, preload: [report_notes: r] ) end @@ -182,40 +184,48 @@ def get_by_ap_id_with_object(ap_id) do |> Repo.one() end - @spec get_by_id(String.t()) :: Activity.t() | nil - def get_by_id(id) do - case FlakeId.flake_id?(id) do - true -> - Activity - |> where([a], a.id == ^id) - |> restrict_deactivated_users() - |> Repo.one() + @doc """ + Gets activity by ID, doesn't load activities from deactivated actors by default. + """ + @spec get_by_id(String.t(), keyword()) :: t() | nil + def get_by_id(id, opts \\ [filter: [:restrict_deactivated]]), do: get_by_id_with_opts(id, opts) - _ -> - nil + @spec get_by_id_with_user_actor(String.t()) :: t() | nil + def get_by_id_with_user_actor(id), do: get_by_id_with_opts(id, preload: [:user_actor]) + + @spec get_by_id_with_object(String.t()) :: t() | nil + def get_by_id_with_object(id), do: get_by_id_with_opts(id, preload: [:object]) + + defp get_by_id_with_opts(id, opts) do + if FlakeId.flake_id?(id) do + query = Queries.by_id(id) + + with_filters_query = + if is_list(opts[:filter]) do + Enum.reduce(opts[:filter], query, fn + {:type, type}, acc -> Queries.by_type(acc, type) + :restrict_deactivated, acc -> restrict_deactivated_users(acc) + _, acc -> acc + end) + else + query + end + + with_preloads_query = + if is_list(opts[:preload]) do + Enum.reduce(opts[:preload], with_filters_query, fn + :user_actor, acc -> with_preloaded_user_actor(acc) + :object, acc -> with_preloaded_object(acc) + _, acc -> acc + end) + else + with_filters_query + end + + Repo.one(with_preloads_query) end end - def get_by_id_with_user_actor(id) do - case FlakeId.flake_id?(id) do - true -> - Activity - |> where([a], a.id == ^id) - |> with_preloaded_user_actor() - |> Repo.one() - - _ -> - nil - end - end - - def get_by_id_with_object(id) do - Activity - |> where(id: ^id) - |> with_preloaded_object() - |> Repo.one() - end - def all_by_ids_with_object(ids) do Activity |> where([a], a.id in ^ids) @@ -267,6 +277,11 @@ def get_create_by_object_ap_id_with_object(ap_id) when is_binary(ap_id) do def get_create_by_object_ap_id_with_object(_), do: nil + @spec create_by_id_with_object(String.t()) :: t() | nil + def create_by_id_with_object(id) do + get_by_id_with_opts(id, preload: [:object], filter: [type: "Create"]) + end + defp get_in_reply_to_activity_from_object(%Object{data: %{"inReplyTo" => ap_id}}) do get_create_by_object_ap_id_with_object(ap_id) end @@ -366,12 +381,6 @@ def direct_conversation_id(activity, for_user) do end end - @spec pinned_by_actor?(Activity.t()) :: boolean() - def pinned_by_actor?(%Activity{} = activity) do - actor = user_actor(activity) - activity.id in actor.pinned_activities - end - @spec get_by_object_ap_id_with_object(String.t()) :: t() | nil def get_by_object_ap_id_with_object(ap_id) when is_binary(ap_id) do ap_id @@ -382,4 +391,13 @@ def get_by_object_ap_id_with_object(ap_id) when is_binary(ap_id) do end def get_by_object_ap_id_with_object(_), do: nil + + @spec add_by_params_query(String.t(), String.t(), String.t()) :: Ecto.Query.t() + def add_by_params_query(object_id, actor, target) do + object_id + |> Queries.by_object_id() + |> Queries.by_type("Add") + |> Queries.by_actor(actor) + |> where([a], fragment("?->>'target' = ?", a.data, ^target)) + end end diff --git a/lib/pleroma/activity/ir/topics.ex b/lib/pleroma/activity/ir/topics.ex index d94395fc1..7a603a615 100644 --- a/lib/pleroma/activity/ir/topics.ex +++ b/lib/pleroma/activity/ir/topics.ex @@ -48,14 +48,12 @@ defp item_creation_tags(tags, _, _) do tags end - defp hashtags_to_topics(%{data: %{"tag" => tags}}) do - tags - |> Enum.filter(&is_bitstring(&1)) - |> Enum.map(fn tag -> "hashtag:" <> tag end) + defp hashtags_to_topics(object) do + object + |> Object.hashtags() + |> Enum.map(fn hashtag -> "hashtag:" <> hashtag end) end - defp hashtags_to_topics(_), do: [] - defp remote_topics(%{local: true}), do: [] defp remote_topics(%{actor: actor}) when is_binary(actor), diff --git a/lib/pleroma/activity/queries.ex b/lib/pleroma/activity/queries.ex index a6b02a889..4632651b0 100644 --- a/lib/pleroma/activity/queries.ex +++ b/lib/pleroma/activity/queries.ex @@ -14,6 +14,11 @@ defmodule Pleroma.Activity.Queries do alias Pleroma.Activity alias Pleroma.User + @spec by_id(query(), String.t()) :: query() + def by_id(query \\ Activity, id) do + from(a in query, where: a.id == ^id) + end + @spec by_ap_id(query, String.t()) :: query def by_ap_id(query \\ Activity, ap_id) do from( diff --git a/lib/pleroma/application.ex b/lib/pleroma/application.ex index c853a2bb4..06d399b2e 100644 --- a/lib/pleroma/application.ex +++ b/lib/pleroma/application.ex @@ -103,9 +103,7 @@ def start(_type, _args) do task_children(@mix_env) ++ dont_run_in_test(@mix_env) ++ chat_child(chat_enabled?()) ++ - [ - Pleroma.Gopher.Server - ] + [Pleroma.Gopher.Server] # See http://elixir-lang.org/docs/stable/elixir/Supervisor.html # for other strategies and supported options @@ -230,6 +228,12 @@ defp dont_run_in_test(_) do keys: :duplicate, partitions: System.schedulers_online() ]} + ] ++ background_migrators() + end + + defp background_migrators do + [ + Pleroma.Migrators.HashtagsTableMigrator ] end diff --git a/lib/pleroma/application_requirements.ex b/lib/pleroma/application_requirements.ex index 6ef65b263..c412dec5e 100644 --- a/lib/pleroma/application_requirements.ex +++ b/lib/pleroma/application_requirements.ex @@ -34,15 +34,16 @@ defp handle_result({:error, message}), do: raise(VerifyError, message: message) defp check_welcome_message_config!(:ok) do if Pleroma.Config.get([:welcome, :email, :enabled], false) and not Pleroma.Emails.Mailer.enabled?() do - Logger.error(""" - To send welcome email do you need to enable mail. - \nconfig :pleroma, Pleroma.Emails.Mailer, enabled: true - """) + Logger.warn(""" + To send welcome emails, you need to enable the mailer. + Welcome emails will NOT be sent with the current config. - {:error, "The mail disabled."} - else - :ok + Enable the mailer: + config :pleroma, Pleroma.Emails.Mailer, enabled: true + """) end + + :ok end defp check_welcome_message_config!(result), do: result @@ -51,18 +52,21 @@ defp check_welcome_message_config!(result), do: result # def check_confirmation_accounts!(:ok) do if Pleroma.Config.get([:instance, :account_activation_required]) && - not Pleroma.Config.get([Pleroma.Emails.Mailer, :enabled]) do - Logger.error( - "Account activation enabled, but no Mailer settings enabled.\n" <> - "Please set config :pleroma, :instance, account_activation_required: false\n" <> - "Otherwise setup and enable Mailer." - ) + not Pleroma.Emails.Mailer.enabled?() do + Logger.warn(""" + Account activation is required, but the mailer is disabled. + Users will NOT be able to confirm their accounts with this config. + Either disable account activation or enable the mailer. - {:error, - "Account activation enabled, but Mailer is disabled. Cannot send confirmation emails."} - else - :ok + Disable account activation: + config :pleroma, :instance, account_activation_required: false + + Enable the mailer: + config :pleroma, Pleroma.Emails.Mailer, enabled: true + """) end + + :ok end def check_confirmation_accounts!(result), do: result diff --git a/lib/pleroma/config.ex b/lib/pleroma/config.ex index 2e15a3719..54e332595 100644 --- a/lib/pleroma/config.ex +++ b/lib/pleroma/config.ex @@ -99,4 +99,8 @@ def restrict_unauthenticated_access?(resource, kind) do def oauth_consumer_strategies, do: get([:auth, :oauth_consumer_strategies], []) def oauth_consumer_enabled?, do: oauth_consumer_strategies() != [] + + def feature_enabled?(feature_name) do + get([:features, feature_name]) not in [nil, false, :disabled, :auto] + end end diff --git a/lib/pleroma/config/release_runtime_provider.ex b/lib/pleroma/config/release_runtime_provider.ex index 8227195dc..e5e9d3dcd 100644 --- a/lib/pleroma/config/release_runtime_provider.ex +++ b/lib/pleroma/config/release_runtime_provider.ex @@ -1,6 +1,6 @@ defmodule Pleroma.Config.ReleaseRuntimeProvider do @moduledoc """ - Imports `runtime.exs` and `{env}.exported_from_db.secret.exs` for elixir releases. + Imports runtime config and `{env}.exported_from_db.secret.exs` for releases. """ @behaviour Config.Provider @@ -8,10 +8,11 @@ defmodule Pleroma.Config.ReleaseRuntimeProvider do def init(opts), do: opts @impl true - def load(config, _opts) do + def load(config, opts) do with_defaults = Config.Reader.merge(config, Pleroma.Config.Holder.release_defaults()) - config_path = System.get_env("PLEROMA_CONFIG_PATH") || "/etc/pleroma/config.exs" + config_path = + opts[:config_path] || System.get_env("PLEROMA_CONFIG_PATH") || "/etc/pleroma/config.exs" with_runtime_config = if File.exists?(config_path) do @@ -24,7 +25,7 @@ def load(config, _opts) do warning = [ IO.ANSI.red(), IO.ANSI.bright(), - "!!! #{config_path} not found! Please ensure it exists and that PLEROMA_CONFIG_PATH is unset or points to an existing file", + "!!! Config path is not declared! Please ensure it exists and that PLEROMA_CONFIG_PATH is unset or points to an existing file", IO.ANSI.reset() ] @@ -33,13 +34,14 @@ def load(config, _opts) do end exported_config_path = - config_path - |> Path.dirname() - |> Path.join("prod.exported_from_db.secret.exs") + opts[:exported_config_path] || + config_path + |> Path.dirname() + |> Path.join("#{Pleroma.Config.get(:env)}.exported_from_db.secret.exs") with_exported = if File.exists?(exported_config_path) do - exported_config = Config.Reader.read!(with_runtime_config) + exported_config = Config.Reader.read!(exported_config_path) Config.Reader.merge(with_runtime_config, exported_config) else with_runtime_config diff --git a/lib/pleroma/config_db.ex b/lib/pleroma/config_db.ex index b874e0e37..cb57673e3 100644 --- a/lib/pleroma/config_db.ex +++ b/lib/pleroma/config_db.ex @@ -387,6 +387,6 @@ defp find_valid_delimiter([delimiter | others], pattern, regex_delimiter) do @spec module_name?(String.t()) :: boolean() def module_name?(string) do Regex.match?(~r/^(Pleroma|Phoenix|Tesla|Quack|Ueberauth|Swoosh)\./, string) or - string in ["Oban", "Ueberauth", "ExSyslogger"] + string in ["Oban", "Ueberauth", "ExSyslogger", "ConcurrentLimiter"] end end diff --git a/lib/pleroma/data_migration.ex b/lib/pleroma/data_migration.ex new file mode 100644 index 000000000..1377af16e --- /dev/null +++ b/lib/pleroma/data_migration.ex @@ -0,0 +1,45 @@ +# Pleroma: A lightweight social networking server +# Copyright © 2017-2021 Pleroma Authors +# SPDX-License-Identifier: AGPL-3.0-only + +defmodule Pleroma.DataMigration do + use Ecto.Schema + + alias Pleroma.DataMigration + alias Pleroma.DataMigration.State + alias Pleroma.Repo + + import Ecto.Changeset + import Ecto.Query + + schema "data_migrations" do + field(:name, :string) + field(:state, State, default: :pending) + field(:feature_lock, :boolean, default: false) + field(:params, :map, default: %{}) + field(:data, :map, default: %{}) + + timestamps() + end + + def changeset(data_migration, params \\ %{}) do + data_migration + |> cast(params, [:name, :state, :feature_lock, :params, :data]) + |> validate_required([:name]) + |> unique_constraint(:name) + end + + def update_one_by_id(id, params \\ %{}) do + with {1, _} <- + from(dm in DataMigration, where: dm.id == ^id) + |> Repo.update_all(set: params) do + :ok + end + end + + def get_by_name(name) do + Repo.get_by(DataMigration, name: name) + end + + def populate_hashtags_table, do: get_by_name("populate_hashtags_table") +end diff --git a/lib/pleroma/delivery.ex b/lib/pleroma/delivery.ex index e8d536767..511d5cf58 100644 --- a/lib/pleroma/delivery.ex +++ b/lib/pleroma/delivery.ex @@ -9,7 +9,6 @@ defmodule Pleroma.Delivery do alias Pleroma.Object alias Pleroma.Repo alias Pleroma.User - alias Pleroma.User import Ecto.Changeset import Ecto.Query diff --git a/lib/pleroma/earmark_renderer.ex b/lib/pleroma/earmark_renderer.ex deleted file mode 100644 index 31cae3c72..000000000 --- a/lib/pleroma/earmark_renderer.ex +++ /dev/null @@ -1,256 +0,0 @@ -# Pleroma: A lightweight social networking server -# Copyright © 2017-2021 Pleroma Authors -# SPDX-License-Identifier: AGPL-3.0-only -# -# This file is derived from Earmark, under the following copyright: -# Copyright © 2014 Dave Thomas, The Pragmatic Programmers -# SPDX-License-Identifier: Apache-2.0 -# Upstream: https://github.com/pragdave/earmark/blob/master/lib/earmark/html_renderer.ex -defmodule Pleroma.EarmarkRenderer do - @moduledoc false - - alias Earmark.Block - alias Earmark.Context - alias Earmark.HtmlRenderer - alias Earmark.Options - - import Earmark.Inline, only: [convert: 3] - import Earmark.Helpers.HtmlHelpers - import Earmark.Message, only: [add_messages_from: 2, get_messages: 1, set_messages: 2] - import Earmark.Context, only: [append: 2, set_value: 2] - import Earmark.Options, only: [get_mapper: 1] - - @doc false - def render(blocks, %Context{options: %Options{}} = context) do - messages = get_messages(context) - - {contexts, html} = - get_mapper(context.options).( - blocks, - &render_block(&1, put_in(context.options.messages, [])) - ) - |> Enum.unzip() - - all_messages = - contexts - |> Enum.reduce(messages, fn ctx, messages1 -> messages1 ++ get_messages(ctx) end) - - {put_in(context.options.messages, all_messages), html |> IO.iodata_to_binary()} - end - - ############# - # Paragraph # - ############# - defp render_block(%Block.Para{lnb: lnb, lines: lines, attrs: attrs}, context) do - lines = convert(lines, lnb, context) - add_attrs(lines, "

#{lines.value}

", attrs, [], lnb) - end - - ######## - # Html # - ######## - defp render_block(%Block.Html{html: html}, context) do - {context, html} - end - - defp render_block(%Block.HtmlComment{lines: lines}, context) do - {context, lines} - end - - defp render_block(%Block.HtmlOneline{html: html}, context) do - {context, html} - end - - ######### - # Ruler # - ######### - defp render_block(%Block.Ruler{lnb: lnb, attrs: attrs}, context) do - add_attrs(context, "
", attrs, [], lnb) - end - - ########### - # Heading # - ########### - defp render_block( - %Block.Heading{lnb: lnb, level: level, content: content, attrs: attrs}, - context - ) do - converted = convert(content, lnb, context) - html = "#{converted.value}" - add_attrs(converted, html, attrs, [], lnb) - end - - ############## - # Blockquote # - ############## - - defp render_block(%Block.BlockQuote{lnb: lnb, blocks: blocks, attrs: attrs}, context) do - {context1, body} = render(blocks, context) - html = "
#{body}
" - add_attrs(context1, html, attrs, [], lnb) - end - - ######### - # Table # - ######### - - defp render_block( - %Block.Table{lnb: lnb, header: header, rows: rows, alignments: aligns, attrs: attrs}, - context - ) do - {context1, html} = add_attrs(context, "", attrs, [], lnb) - context2 = set_value(context1, html) - - context3 = - if header do - append(add_trs(append(context2, ""), [header], "th", aligns, lnb), "") - else - # Maybe an error, needed append(context, html) - context2 - end - - context4 = append(add_trs(append(context3, ""), rows, "td", aligns, lnb), "") - - {context4, [context4.value, "
"]} - end - - ######## - # Code # - ######## - - defp render_block( - %Block.Code{lnb: lnb, language: language, attrs: attrs} = block, - %Context{options: options} = context - ) do - class = - if language, do: ~s{ class="#{code_classes(language, options.code_class_prefix)}"}, else: "" - - tag = ~s[
]
-    lines = options.render_code.(block)
-    html = ~s[#{tag}#{lines}
] - add_attrs(context, html, attrs, [], lnb) - end - - ######### - # Lists # - ######### - - defp render_block( - %Block.List{lnb: lnb, type: type, blocks: items, attrs: attrs, start: start}, - context - ) do - {context1, content} = render(items, context) - html = "<#{type}#{start}>#{content}" - add_attrs(context1, html, attrs, [], lnb) - end - - # format a single paragraph list item, and remove the para tags - defp render_block( - %Block.ListItem{lnb: lnb, blocks: blocks, spaced: false, attrs: attrs}, - context - ) - when length(blocks) == 1 do - {context1, content} = render(blocks, context) - content = Regex.replace(~r{}, content, "") - html = "
  • #{content}
  • " - add_attrs(context1, html, attrs, [], lnb) - end - - # format a spaced list item - defp render_block(%Block.ListItem{lnb: lnb, blocks: blocks, attrs: attrs}, context) do - {context1, content} = render(blocks, context) - html = "
  • #{content}
  • " - add_attrs(context1, html, attrs, [], lnb) - end - - ################## - # Footnote Block # - ################## - - defp render_block(%Block.FnList{blocks: footnotes}, context) do - items = - Enum.map(footnotes, fn note -> - blocks = append_footnote_link(note) - %Block.ListItem{attrs: "#fn:#{note.number}", type: :ol, blocks: blocks} - end) - - {context1, html} = render_block(%Block.List{type: :ol, blocks: items}, context) - {context1, Enum.join([~s[
    ], "
    ", html, "
    "])} - end - - ####################################### - # Isolated IALs are rendered as paras # - ####################################### - - defp render_block(%Block.Ial{verbatim: verbatim}, context) do - {context, "

    {:#{verbatim}}

    "} - end - - #################### - # IDDef is ignored # - #################### - - defp render_block(%Block.IdDef{}, context), do: {context, ""} - - ##################################### - # And here are the inline renderers # - ##################################### - - defdelegate br, to: HtmlRenderer - defdelegate codespan(text), to: HtmlRenderer - defdelegate em(text), to: HtmlRenderer - defdelegate strong(text), to: HtmlRenderer - defdelegate strikethrough(text), to: HtmlRenderer - - defdelegate link(url, text), to: HtmlRenderer - defdelegate link(url, text, title), to: HtmlRenderer - - defdelegate image(path, alt, title), to: HtmlRenderer - - defdelegate footnote_link(ref, backref, number), to: HtmlRenderer - - # Table rows - defp add_trs(context, rows, tag, aligns, lnb) do - numbered_rows = - rows - |> Enum.zip(Stream.iterate(lnb, &(&1 + 1))) - - numbered_rows - |> Enum.reduce(context, fn {row, lnb}, ctx -> - append(add_tds(append(ctx, ""), row, tag, aligns, lnb), "") - end) - end - - defp add_tds(context, row, tag, aligns, lnb) do - Enum.reduce(1..length(row), context, add_td_fn(row, tag, aligns, lnb)) - end - - defp add_td_fn(row, tag, aligns, lnb) do - fn n, ctx -> - style = - case Enum.at(aligns, n - 1, :default) do - :default -> "" - align -> " style=\"text-align: #{align}\"" - end - - col = Enum.at(row, n - 1) - converted = convert(col, lnb, set_messages(ctx, [])) - append(add_messages_from(ctx, converted), "<#{tag}#{style}>#{converted.value}") - end - end - - ############################### - # Append Footnote Return Link # - ############################### - - defdelegate append_footnote_link(note), to: HtmlRenderer - defdelegate append_footnote_link(note, fnlink), to: HtmlRenderer - - defdelegate render_code(lines), to: HtmlRenderer - - defp code_classes(language, prefix) do - ["" | String.split(prefix || "")] - |> Enum.map(fn pfx -> "#{pfx}#{language}" end) - |> Enum.join(" ") - end -end diff --git a/lib/pleroma/ecto_enums.ex b/lib/pleroma/ecto_enums.ex index f198cccb7..2a9addabc 100644 --- a/lib/pleroma/ecto_enums.ex +++ b/lib/pleroma/ecto_enums.ex @@ -17,3 +17,11 @@ follow_accept: 2, follow_reject: 3 ) + +defenum(Pleroma.DataMigration.State, + pending: 1, + running: 2, + complete: 3, + failed: 4, + manual: 5 +) diff --git a/lib/pleroma/formatter.ex b/lib/pleroma/formatter.ex index 7a08e48a9..764e347ec 100644 --- a/lib/pleroma/formatter.ex +++ b/lib/pleroma/formatter.ex @@ -121,6 +121,10 @@ def mentions_escape(text, options \\ []) do end end + def markdown_to_html(text) do + Earmark.as_html!(text, %Earmark.Options{compact_output: true}) + end + def html_escape({text, mentions, hashtags}, type) do {html_escape(text, type), mentions, hashtags} end diff --git a/lib/pleroma/hashtag.ex b/lib/pleroma/hashtag.ex new file mode 100644 index 000000000..53e2e9c89 --- /dev/null +++ b/lib/pleroma/hashtag.ex @@ -0,0 +1,106 @@ +# Pleroma: A lightweight social networking server +# Copyright © 2017-2020 Pleroma Authors +# SPDX-License-Identifier: AGPL-3.0-only + +defmodule Pleroma.Hashtag do + use Ecto.Schema + + import Ecto.Changeset + import Ecto.Query + + alias Ecto.Multi + alias Pleroma.Hashtag + alias Pleroma.Object + alias Pleroma.Repo + + schema "hashtags" do + field(:name, :string) + + many_to_many(:objects, Object, join_through: "hashtags_objects", on_replace: :delete) + + timestamps() + end + + def normalize_name(name) do + name + |> String.downcase() + |> String.trim() + end + + def get_or_create_by_name(name) do + changeset = changeset(%Hashtag{}, %{name: name}) + + Repo.insert( + changeset, + on_conflict: [set: [name: get_field(changeset, :name)]], + conflict_target: :name, + returning: true + ) + end + + def get_or_create_by_names(names) when is_list(names) do + names = Enum.map(names, &normalize_name/1) + timestamp = NaiveDateTime.truncate(NaiveDateTime.utc_now(), :second) + + structs = + Enum.map(names, fn name -> + %Hashtag{} + |> changeset(%{name: name}) + |> Map.get(:changes) + |> Map.merge(%{inserted_at: timestamp, updated_at: timestamp}) + end) + + try do + with {:ok, %{query_op: hashtags}} <- + Multi.new() + |> Multi.insert_all(:insert_all_op, Hashtag, structs, + on_conflict: :nothing, + conflict_target: :name + ) + |> Multi.run(:query_op, fn _repo, _changes -> + {:ok, Repo.all(from(ht in Hashtag, where: ht.name in ^names))} + end) + |> Repo.transaction() do + {:ok, hashtags} + else + {:error, _name, value, _changes_so_far} -> {:error, value} + end + rescue + e -> {:error, e} + end + end + + def changeset(%Hashtag{} = struct, params) do + struct + |> cast(params, [:name]) + |> update_change(:name, &normalize_name/1) + |> validate_required([:name]) + |> unique_constraint(:name) + end + + def unlink(%Object{id: object_id}) do + with {_, hashtag_ids} <- + from(hto in "hashtags_objects", + where: hto.object_id == ^object_id, + select: hto.hashtag_id + ) + |> Repo.delete_all(), + {:ok, unreferenced_count} <- delete_unreferenced(hashtag_ids) do + {:ok, length(hashtag_ids), unreferenced_count} + end + end + + @delete_unreferenced_query """ + DELETE FROM hashtags WHERE id IN + (SELECT hashtags.id FROM hashtags + LEFT OUTER JOIN hashtags_objects + ON hashtags_objects.hashtag_id = hashtags.id + WHERE hashtags_objects.hashtag_id IS NULL AND hashtags.id = ANY($1)); + """ + + def delete_unreferenced(ids) do + with {:ok, %{num_rows: deleted_count}} <- Repo.query(@delete_unreferenced_query, [ids]) do + {:ok, deleted_count} + end + end +end diff --git a/lib/pleroma/migrators/hashtags_table_migrator.ex b/lib/pleroma/migrators/hashtags_table_migrator.ex new file mode 100644 index 000000000..b84058e11 --- /dev/null +++ b/lib/pleroma/migrators/hashtags_table_migrator.ex @@ -0,0 +1,208 @@ +# Pleroma: A lightweight social networking server +# Copyright © 2017-2021 Pleroma Authors +# SPDX-License-Identifier: AGPL-3.0-only + +defmodule Pleroma.Migrators.HashtagsTableMigrator do + defmodule State do + use Pleroma.Migrators.Support.BaseMigratorState + + @impl Pleroma.Migrators.Support.BaseMigratorState + defdelegate data_migration(), to: Pleroma.DataMigration, as: :populate_hashtags_table + end + + use Pleroma.Migrators.Support.BaseMigrator + + alias Pleroma.Hashtag + alias Pleroma.Migrators.Support.BaseMigrator + alias Pleroma.Object + + @impl BaseMigrator + def feature_config_path, do: [:features, :improved_hashtag_timeline] + + @impl BaseMigrator + def fault_rate_allowance, do: Config.get([:populate_hashtags_table, :fault_rate_allowance], 0) + + @impl BaseMigrator + def perform do + data_migration_id = data_migration_id() + max_processed_id = get_stat(:max_processed_id, 0) + + Logger.info("Transferring embedded hashtags to `hashtags` (from oid: #{max_processed_id})...") + + query() + |> where([object], object.id > ^max_processed_id) + |> Repo.chunk_stream(100, :batches, timeout: :infinity) + |> Stream.each(fn objects -> + object_ids = Enum.map(objects, & &1.id) + + results = Enum.map(objects, &transfer_object_hashtags(&1)) + + failed_ids = + results + |> Enum.filter(&(elem(&1, 0) == :error)) + |> Enum.map(&elem(&1, 1)) + + # Count of objects with hashtags: `{:noop, id}` is returned for objects having other AS2 tags + chunk_affected_count = + results + |> Enum.filter(&(elem(&1, 0) == :ok)) + |> length() + + for failed_id <- failed_ids do + _ = + Repo.query( + "INSERT INTO data_migration_failed_ids(data_migration_id, record_id) " <> + "VALUES ($1, $2) ON CONFLICT DO NOTHING;", + [data_migration_id, failed_id] + ) + end + + _ = + Repo.query( + "DELETE FROM data_migration_failed_ids " <> + "WHERE data_migration_id = $1 AND record_id = ANY($2)", + [data_migration_id, object_ids -- failed_ids] + ) + + max_object_id = Enum.at(object_ids, -1) + + put_stat(:max_processed_id, max_object_id) + increment_stat(:iteration_processed_count, length(object_ids)) + increment_stat(:processed_count, length(object_ids)) + increment_stat(:failed_count, length(failed_ids)) + increment_stat(:affected_count, chunk_affected_count) + put_stat(:records_per_second, records_per_second()) + persist_state() + + # A quick and dirty approach to controlling the load this background migration imposes + sleep_interval = Config.get([:populate_hashtags_table, :sleep_interval_ms], 0) + Process.sleep(sleep_interval) + end) + |> Stream.run() + end + + @impl BaseMigrator + def query do + # Note: most objects have Mention-type AS2 tags and no hashtags (but we can't filter them out) + # Note: not checking activity type, expecting remove_non_create_objects_hashtags/_ to clean up + from( + object in Object, + where: + fragment("(?)->'tag' IS NOT NULL AND (?)->'tag' != '[]'::jsonb", object.data, object.data), + select: %{ + id: object.id, + tag: fragment("(?)->'tag'", object.data) + } + ) + |> join(:left, [o], hashtags_objects in fragment("SELECT object_id FROM hashtags_objects"), + on: hashtags_objects.object_id == o.id + ) + |> where([_o, hashtags_objects], is_nil(hashtags_objects.object_id)) + end + + @spec transfer_object_hashtags(Map.t()) :: {:noop | :ok | :error, integer()} + defp transfer_object_hashtags(object) do + embedded_tags = if Map.has_key?(object, :tag), do: object.tag, else: object.data["tag"] + hashtags = Object.object_data_hashtags(%{"tag" => embedded_tags}) + + if Enum.any?(hashtags) do + transfer_object_hashtags(object, hashtags) + else + {:noop, object.id} + end + end + + defp transfer_object_hashtags(object, hashtags) do + Repo.transaction(fn -> + with {:ok, hashtag_records} <- Hashtag.get_or_create_by_names(hashtags) do + maps = Enum.map(hashtag_records, &%{hashtag_id: &1.id, object_id: object.id}) + base_error = "ERROR when inserting hashtags_objects for object with id #{object.id}" + + try do + with {rows_count, _} when is_integer(rows_count) <- + Repo.insert_all("hashtags_objects", maps, on_conflict: :nothing) do + object.id + else + e -> + Logger.error("#{base_error}: #{inspect(e)}") + Repo.rollback(object.id) + end + rescue + e -> + Logger.error("#{base_error}: #{inspect(e)}") + Repo.rollback(object.id) + end + else + e -> + error = "ERROR: could not create hashtags for object #{object.id}: #{inspect(e)}" + Logger.error(error) + Repo.rollback(object.id) + end + end) + end + + @impl BaseMigrator + def retry_failed do + data_migration_id = data_migration_id() + + failed_objects_query() + |> Repo.chunk_stream(100, :one) + |> Stream.each(fn object -> + with {res, _} when res != :error <- transfer_object_hashtags(object) do + _ = + Repo.query( + "DELETE FROM data_migration_failed_ids " <> + "WHERE data_migration_id = $1 AND record_id = $2", + [data_migration_id, object.id] + ) + end + end) + |> Stream.run() + + put_stat(:failed_count, failures_count()) + persist_state() + + force_continue() + end + + defp failed_objects_query do + from(o in Object) + |> join(:inner, [o], dmf in fragment("SELECT * FROM data_migration_failed_ids"), + on: dmf.record_id == o.id + ) + |> where([_o, dmf], dmf.data_migration_id == ^data_migration_id()) + |> order_by([o], asc: o.id) + end + + @doc """ + Service func to delete `hashtags_objects` for legacy objects not associated with Create activity. + Also deletes unreferenced `hashtags` records (might occur after deletion of `hashtags_objects`). + """ + def delete_non_create_activities_hashtags do + hashtags_objects_cleanup_query = """ + DELETE FROM hashtags_objects WHERE object_id IN + (SELECT DISTINCT objects.id FROM objects + JOIN hashtags_objects ON hashtags_objects.object_id = objects.id LEFT JOIN activities + ON COALESCE(activities.data->'object'->>'id', activities.data->>'object') = + (objects.data->>'id') + AND activities.data->>'type' = 'Create' + WHERE activities.id IS NULL); + """ + + hashtags_cleanup_query = """ + DELETE FROM hashtags WHERE id IN + (SELECT hashtags.id FROM hashtags + LEFT OUTER JOIN hashtags_objects + ON hashtags_objects.hashtag_id = hashtags.id + WHERE hashtags_objects.hashtag_id IS NULL); + """ + + {:ok, %{num_rows: hashtags_objects_count}} = + Repo.query(hashtags_objects_cleanup_query, [], timeout: :infinity) + + {:ok, %{num_rows: hashtags_count}} = + Repo.query(hashtags_cleanup_query, [], timeout: :infinity) + + {:ok, hashtags_objects_count, hashtags_count} + end +end diff --git a/lib/pleroma/migrators/support/base_migrator.ex b/lib/pleroma/migrators/support/base_migrator.ex new file mode 100644 index 000000000..1f8a5402b --- /dev/null +++ b/lib/pleroma/migrators/support/base_migrator.ex @@ -0,0 +1,210 @@ +# Pleroma: A lightweight social networking server +# Copyright © 2017-2021 Pleroma Authors +# SPDX-License-Identifier: AGPL-3.0-only + +defmodule Pleroma.Migrators.Support.BaseMigrator do + @moduledoc """ + Base background migrator functionality. + """ + + @callback perform() :: any() + @callback retry_failed() :: any() + @callback feature_config_path() :: list(atom()) + @callback query() :: Ecto.Query.t() + @callback fault_rate_allowance() :: integer() | float() + + defmacro __using__(_opts) do + quote do + use GenServer + + require Logger + + import Ecto.Query + + alias __MODULE__.State + alias Pleroma.Config + alias Pleroma.Repo + + @behaviour Pleroma.Migrators.Support.BaseMigrator + + defdelegate data_migration(), to: State + defdelegate data_migration_id(), to: State + defdelegate state(), to: State + defdelegate persist_state(), to: State, as: :persist_to_db + defdelegate get_stat(key, value \\ nil), to: State, as: :get_data_key + defdelegate put_stat(key, value), to: State, as: :put_data_key + defdelegate increment_stat(key, increment), to: State, as: :increment_data_key + + @reg_name {:global, __MODULE__} + + def whereis, do: GenServer.whereis(@reg_name) + + def start_link(_) do + case whereis() do + nil -> + GenServer.start_link(__MODULE__, nil, name: @reg_name) + + pid -> + {:ok, pid} + end + end + + @impl true + def init(_) do + {:ok, nil, {:continue, :init_state}} + end + + @impl true + def handle_continue(:init_state, _state) do + {:ok, _} = State.start_link(nil) + + data_migration = data_migration() + manual_migrations = Config.get([:instance, :manual_data_migrations], []) + + cond do + Config.get(:env) == :test -> + update_status(:noop) + + is_nil(data_migration) -> + message = "Data migration does not exist." + update_status(:failed, message) + Logger.error("#{__MODULE__}: #{message}") + + data_migration.state == :manual or data_migration.name in manual_migrations -> + message = "Data migration is in manual execution or manual fix mode." + update_status(:manual, message) + Logger.warn("#{__MODULE__}: #{message}") + + data_migration.state == :complete -> + on_complete(data_migration) + + true -> + send(self(), :perform) + end + + {:noreply, nil} + end + + @impl true + def handle_info(:perform, state) do + State.reinit() + + update_status(:running) + put_stat(:iteration_processed_count, 0) + put_stat(:started_at, NaiveDateTime.utc_now()) + + perform() + + fault_rate = fault_rate() + put_stat(:fault_rate, fault_rate) + fault_rate_allowance = fault_rate_allowance() + + cond do + fault_rate == 0 -> + set_complete() + + is_float(fault_rate) and fault_rate <= fault_rate_allowance -> + message = """ + Done with fault rate of #{fault_rate} which doesn't exceed #{fault_rate_allowance}. + Putting data migration to manual fix mode. Try running `#{__MODULE__}.retry_failed/0`. + """ + + Logger.warn("#{__MODULE__}: #{message}") + update_status(:manual, message) + on_complete(data_migration()) + + true -> + message = "Too many failures. Try running `#{__MODULE__}.retry_failed/0`." + Logger.error("#{__MODULE__}: #{message}") + update_status(:failed, message) + end + + persist_state() + {:noreply, state} + end + + defp on_complete(data_migration) do + if data_migration.feature_lock || feature_state() == :disabled do + Logger.warn( + "#{__MODULE__}: migration complete but feature is locked; consider enabling." + ) + + :noop + else + Config.put(feature_config_path(), :enabled) + :ok + end + end + + @doc "Approximate count for current iteration (including processed records count)" + def count(force \\ false, timeout \\ :infinity) do + stored_count = get_stat(:count) + + if stored_count && !force do + stored_count + else + processed_count = get_stat(:processed_count, 0) + max_processed_id = get_stat(:max_processed_id, 0) + query = where(query(), [entity], entity.id > ^max_processed_id) + + count = Repo.aggregate(query, :count, :id, timeout: timeout) + processed_count + put_stat(:count, count) + persist_state() + + count + end + end + + def failures_count do + with {:ok, %{rows: [[count]]}} <- + Repo.query( + "SELECT COUNT(record_id) FROM data_migration_failed_ids WHERE data_migration_id = $1;", + [data_migration_id()] + ) do + count + end + end + + def feature_state, do: Config.get(feature_config_path()) + + def force_continue do + send(whereis(), :perform) + end + + def force_restart do + :ok = State.reset() + force_continue() + end + + def set_complete do + update_status(:complete) + persist_state() + on_complete(data_migration()) + end + + defp update_status(status, message \\ nil) do + put_stat(:state, status) + put_stat(:message, message) + end + + defp fault_rate do + with failures_count when is_integer(failures_count) <- failures_count() do + failures_count / Enum.max([get_stat(:affected_count, 0), 1]) + else + _ -> :error + end + end + + defp records_per_second do + get_stat(:iteration_processed_count, 0) / Enum.max([running_time(), 1]) + end + + defp running_time do + NaiveDateTime.diff( + NaiveDateTime.utc_now(), + get_stat(:started_at, NaiveDateTime.utc_now()) + ) + end + end + end +end diff --git a/lib/pleroma/migrators/support/base_migrator_state.ex b/lib/pleroma/migrators/support/base_migrator_state.ex new file mode 100644 index 000000000..b698587f2 --- /dev/null +++ b/lib/pleroma/migrators/support/base_migrator_state.ex @@ -0,0 +1,117 @@ +# Pleroma: A lightweight social networking server +# Copyright © 2017-2021 Pleroma Authors +# SPDX-License-Identifier: AGPL-3.0-only + +defmodule Pleroma.Migrators.Support.BaseMigratorState do + @moduledoc """ + Base background migrator state functionality. + """ + + @callback data_migration() :: Pleroma.DataMigration.t() + + defmacro __using__(_opts) do + quote do + use Agent + + alias Pleroma.DataMigration + + @behaviour Pleroma.Migrators.Support.BaseMigratorState + @reg_name {:global, __MODULE__} + + def start_link(_) do + Agent.start_link(fn -> load_state_from_db() end, name: @reg_name) + end + + def data_migration, do: raise("data_migration/0 is not implemented") + defoverridable data_migration: 0 + + defp load_state_from_db do + data_migration = data_migration() + + data = + if data_migration do + Map.new(data_migration.data, fn {k, v} -> {String.to_atom(k), v} end) + else + %{} + end + + %{ + data_migration_id: data_migration && data_migration.id, + data: data + } + end + + def persist_to_db do + %{data_migration_id: data_migration_id, data: data} = state() + + if data_migration_id do + DataMigration.update_one_by_id(data_migration_id, data: data) + else + {:error, :nil_data_migration_id} + end + end + + def reset do + %{data_migration_id: data_migration_id} = state() + + with false <- is_nil(data_migration_id), + :ok <- + DataMigration.update_one_by_id(data_migration_id, + state: :pending, + data: %{} + ) do + reinit() + else + true -> {:error, :nil_data_migration_id} + e -> e + end + end + + def reinit do + Agent.update(@reg_name, fn _state -> load_state_from_db() end) + end + + def state do + Agent.get(@reg_name, & &1) + end + + def get_data_key(key, default \\ nil) do + get_in(state(), [:data, key]) || default + end + + def put_data_key(key, value) do + _ = persist_non_data_change(key, value) + + Agent.update(@reg_name, fn state -> + put_in(state, [:data, key], value) + end) + end + + def increment_data_key(key, increment \\ 1) do + Agent.update(@reg_name, fn state -> + initial_value = get_in(state, [:data, key]) || 0 + updated_value = initial_value + increment + put_in(state, [:data, key], updated_value) + end) + end + + defp persist_non_data_change(:state, value) do + with true <- get_data_key(:state) != value, + true <- value in Pleroma.DataMigration.State.__valid_values__(), + %{data_migration_id: data_migration_id} when not is_nil(data_migration_id) <- + state() do + DataMigration.update_one_by_id(data_migration_id, state: value) + else + false -> :ok + _ -> {:error, :nil_data_migration_id} + end + end + + defp persist_non_data_change(_, _) do + nil + end + + def data_migration_id, do: Map.get(state(), :data_migration_id) + end + end +end diff --git a/lib/pleroma/object.ex b/lib/pleroma/object.ex index aaf123840..3ba749d1a 100644 --- a/lib/pleroma/object.ex +++ b/lib/pleroma/object.ex @@ -10,6 +10,7 @@ defmodule Pleroma.Object do alias Pleroma.Activity alias Pleroma.Config + alias Pleroma.Hashtag alias Pleroma.Object alias Pleroma.Object.Fetcher alias Pleroma.ObjectTombstone @@ -28,6 +29,8 @@ defmodule Pleroma.Object do schema "objects" do field(:data, :map) + many_to_many(:hashtags, Hashtag, join_through: "hashtags_objects", on_replace: :delete) + timestamps() end @@ -49,7 +52,8 @@ def with_joined_activity(query, activity_type \\ "Create", join_type \\ :inner) end def create(data) do - Object.change(%Object{}, %{data: data}) + %Object{} + |> Object.change(%{data: data}) |> Repo.insert() end @@ -58,8 +62,41 @@ def change(struct, params \\ %{}) do |> cast(params, [:data]) |> validate_required([:data]) |> unique_constraint(:ap_id, name: :objects_unique_apid_index) + # Expecting `maybe_handle_hashtags_change/1` to run last: + |> maybe_handle_hashtags_change(struct) end + # Note: not checking activity type (assuming non-legacy objects are associated with Create act.) + defp maybe_handle_hashtags_change(changeset, struct) do + with %Ecto.Changeset{valid?: true} <- changeset, + data_hashtags_change = get_change(changeset, :data), + {_, true} <- {:changed, hashtags_changed?(struct, data_hashtags_change)}, + {:ok, hashtag_records} <- + data_hashtags_change + |> object_data_hashtags() + |> Hashtag.get_or_create_by_names() do + put_assoc(changeset, :hashtags, hashtag_records) + else + %{valid?: false} -> + changeset + + {:changed, false} -> + changeset + + {:error, _} -> + validate_change(changeset, :data, fn _, _ -> + [data: "error referencing hashtags"] + end) + end + end + + defp hashtags_changed?(%Object{} = struct, %{"tag" => _} = data) do + Enum.sort(embedded_hashtags(struct)) != + Enum.sort(object_data_hashtags(data)) + end + + defp hashtags_changed?(_, _), do: false + def get_by_id(nil), do: nil def get_by_id(id), do: Repo.get(Object, id) @@ -187,9 +224,13 @@ def make_tombstone(%Object{data: %{"id" => id, "type" => type}}, deleted \\ Date def swap_object_with_tombstone(object) do tombstone = make_tombstone(object) - object - |> Object.change(%{data: tombstone}) - |> Repo.update() + with {:ok, object} <- + object + |> Object.change(%{data: tombstone}) + |> Repo.update() do + Hashtag.unlink(object) + {:ok, object} + end end def delete(%Object{data: %{"id" => id}} = object) do @@ -349,4 +390,39 @@ def replies(object, opts \\ []) do def self_replies(object, opts \\ []), do: replies(object, Keyword.put(opts, :self_only, true)) + + def tags(%Object{data: %{"tag" => tags}}) when is_list(tags), do: tags + + def tags(_), do: [] + + def hashtags(%Object{} = object) do + # Note: always using embedded hashtags regardless whether they are migrated to hashtags table + # (embedded hashtags stay in sync anyways, and we avoid extra joins and preload hassle) + embedded_hashtags(object) + end + + def embedded_hashtags(%Object{data: data}) do + object_data_hashtags(data) + end + + def embedded_hashtags(_), do: [] + + def object_data_hashtags(%{"tag" => tags}) when is_list(tags) do + tags + |> Enum.filter(fn + %{"type" => "Hashtag"} = data -> Map.has_key?(data, "name") + plain_text when is_bitstring(plain_text) -> true + _ -> false + end) + |> Enum.map(fn + %{"name" => "#" <> hashtag} -> String.downcase(hashtag) + %{"name" => hashtag} -> String.downcase(hashtag) + hashtag when is_bitstring(hashtag) -> String.downcase(hashtag) + end) + |> Enum.uniq() + # Note: "" elements (plain text) might occur in `data.tag` for incoming objects + |> Enum.filter(&(&1 not in [nil, ""])) + end + + def object_data_hashtags(_), do: [] end diff --git a/lib/pleroma/object/containment.ex b/lib/pleroma/object/containment.ex index fb0398f92..040537acf 100644 --- a/lib/pleroma/object/containment.ex +++ b/lib/pleroma/object/containment.ex @@ -71,6 +71,14 @@ def contain_origin_from_id(id, %{"id" => other_id} = _params) when is_binary(oth compare_uris(id_uri, other_uri) end + # Mastodon pin activities don't have an id, so we check the object field, which will be pinned. + def contain_origin_from_id(id, %{"object" => object}) when is_binary(object) do + id_uri = URI.parse(id) + object_uri = URI.parse(object) + + compare_uris(id_uri, object_uri) + end + def contain_origin_from_id(_id, _data), do: :error def contain_child(%{"object" => %{"id" => id, "attributedTo" => _} = object}), diff --git a/lib/pleroma/pagination.ex b/lib/pleroma/pagination.ex index 0d24e1010..33e45a0eb 100644 --- a/lib/pleroma/pagination.ex +++ b/lib/pleroma/pagination.ex @@ -93,6 +93,7 @@ defp cast_params(params) do max_id: :string, offset: :integer, limit: :integer, + skip_extra_order: :boolean, skip_order: :boolean } @@ -114,6 +115,8 @@ defp restrict(query, :max_id, %{max_id: max_id}, table_binding) do defp restrict(query, :order, %{skip_order: true}, _), do: query + defp restrict(%{order_bys: [_ | _]} = query, :order, %{skip_extra_order: true}, _), do: query + defp restrict(query, :order, %{min_id: _}, table_binding) do order_by( query, diff --git a/lib/pleroma/repo.ex b/lib/pleroma/repo.ex index 4556352d0..b8ea06e33 100644 --- a/lib/pleroma/repo.ex +++ b/lib/pleroma/repo.ex @@ -8,6 +8,8 @@ defmodule Pleroma.Repo do adapter: Ecto.Adapters.Postgres, migration_timestamps: [type: :naive_datetime_usec] + use Ecto.Explain + import Ecto.Query require Logger @@ -63,8 +65,8 @@ def get_assoc(resource, association) do iex> Pleroma.Repo.chunk_stream(Pleroma.Activity.Queries.by_actor(ap_id), 500, :batches) """ @spec chunk_stream(Ecto.Query.t(), integer(), atom()) :: Enumerable.t() - def chunk_stream(query, chunk_size, returns_as \\ :one) do - # We don't actually need start and end funcitons of resource streaming, + def chunk_stream(query, chunk_size, returns_as \\ :one, query_options \\ []) do + # We don't actually need start and end functions of resource streaming, # but it seems to be the only way to not fetch records one-by-one and # have individual records be the elements of the stream, instead of # lists of records @@ -76,7 +78,7 @@ def chunk_stream(query, chunk_size, returns_as \\ :one) do |> order_by(asc: :id) |> where([r], r.id > ^last_id) |> limit(^chunk_size) - |> all() + |> all(query_options) |> case do [] -> {:halt, last_id} diff --git a/lib/pleroma/user.ex b/lib/pleroma/user.ex index 9942617d8..b78777141 100644 --- a/lib/pleroma/user.ex +++ b/lib/pleroma/user.ex @@ -99,6 +99,7 @@ defmodule Pleroma.User do field(:local, :boolean, default: true) field(:follower_address, :string) field(:following_address, :string) + field(:featured_address, :string) field(:search_rank, :float, virtual: true) field(:search_type, :integer, virtual: true) field(:tags, {:array, :string}, default: []) @@ -130,7 +131,6 @@ defmodule Pleroma.User do field(:hide_followers, :boolean, default: false) field(:hide_follows, :boolean, default: false) field(:hide_favorites, :boolean, default: true) - field(:pinned_activities, {:array, :string}, default: []) field(:email_notifications, :map, default: %{"digest" => false}) field(:mascot, :map, default: nil) field(:emoji, :map, default: %{}) @@ -148,6 +148,7 @@ defmodule Pleroma.User do field(:accepts_chat_messages, :boolean, default: nil) field(:last_active_at, :naive_datetime) field(:disclose_client, :boolean, default: true) + field(:pinned_objects, :map, default: %{}) embeds_one( :notification_settings, @@ -372,8 +373,10 @@ def banner_url(user, options \\ []) do end # Should probably be renamed or removed + @spec ap_id(User.t()) :: String.t() def ap_id(%User{nickname: nickname}), do: "#{Web.base_url()}/users/#{nickname}" + @spec ap_followers(User.t()) :: String.t() def ap_followers(%User{follower_address: fa}) when is_binary(fa), do: fa def ap_followers(%User{} = user), do: "#{ap_id(user)}/followers" @@ -381,6 +384,11 @@ def ap_followers(%User{} = user), do: "#{ap_id(user)}/followers" def ap_following(%User{following_address: fa}) when is_binary(fa), do: fa def ap_following(%User{} = user), do: "#{ap_id(user)}/following" + @spec ap_featured_collection(User.t()) :: String.t() + def ap_featured_collection(%User{featured_address: fa}) when is_binary(fa), do: fa + + def ap_featured_collection(%User{} = user), do: "#{ap_id(user)}/collections/featured" + defp truncate_fields_param(params) do if Map.has_key?(params, :fields) do Map.put(params, :fields, Enum.map(params[:fields], &truncate_field/1)) @@ -443,6 +451,7 @@ def remote_user_changeset(struct \\ %User{local: false}, params) do :uri, :follower_address, :following_address, + :featured_address, :hide_followers, :hide_follows, :hide_followers_count, @@ -454,7 +463,8 @@ def remote_user_changeset(struct \\ %User{local: false}, params) do :invisible, :actor_type, :also_known_as, - :accepts_chat_messages + :accepts_chat_messages, + :pinned_objects ] ) |> cast(params, [:name], empty_values: []) @@ -686,7 +696,7 @@ def register_changeset_ldap(struct, params = %{password: password}) |> validate_format(:nickname, local_nickname_regex()) |> put_ap_id() |> unique_constraint(:ap_id) - |> put_following_and_follower_address() + |> put_following_and_follower_and_featured_address() end def register_changeset(struct, params \\ %{}, opts \\ []) do @@ -747,7 +757,7 @@ def register_changeset(struct, params \\ %{}, opts \\ []) do |> put_password_hash |> put_ap_id() |> unique_constraint(:ap_id) - |> put_following_and_follower_address() + |> put_following_and_follower_and_featured_address() end def maybe_validate_required_email(changeset, true), do: changeset @@ -765,11 +775,16 @@ defp put_ap_id(changeset) do put_change(changeset, :ap_id, ap_id) end - defp put_following_and_follower_address(changeset) do - followers = ap_followers(%User{nickname: get_field(changeset, :nickname)}) + defp put_following_and_follower_and_featured_address(changeset) do + user = %User{nickname: get_field(changeset, :nickname)} + followers = ap_followers(user) + following = ap_following(user) + featured = ap_featured_collection(user) changeset |> put_change(:follower_address, followers) + |> put_change(:following_address, following) + |> put_change(:featured_address, featured) end defp autofollow_users(user) do @@ -2255,13 +2270,6 @@ def update_background(user, background) do |> update_and_set_cache() end - def roles(%{is_moderator: is_moderator, is_admin: is_admin}) do - %{ - admin: is_admin, - moderator: is_moderator - } - end - def validate_fields(changeset, remote? \\ false) do limit_name = if remote?, do: :max_remote_account_fields, else: :max_account_fields limit = Config.get([:instance, limit_name], 0) @@ -2350,45 +2358,35 @@ def approval_changeset(user, set_approval: approved?) do cast(user, %{is_approved: approved?}, [:is_approved]) end - def add_pinnned_activity(user, %Pleroma.Activity{id: id}) do - if id not in user.pinned_activities do - max_pinned_statuses = Config.get([:instance, :max_pinned_statuses], 0) - params = %{pinned_activities: user.pinned_activities ++ [id]} - - # if pinned activity was scheduled for deletion, we remove job - if expiration = Pleroma.Workers.PurgeExpiredActivity.get_expiration(id) do - Oban.cancel_job(expiration.id) - end + @spec add_pinned_object_id(User.t(), String.t()) :: {:ok, User.t()} | {:error, term()} + def add_pinned_object_id(%User{} = user, object_id) do + if !user.pinned_objects[object_id] do + params = %{pinned_objects: Map.put(user.pinned_objects, object_id, NaiveDateTime.utc_now())} user - |> cast(params, [:pinned_activities]) - |> validate_length(:pinned_activities, - max: max_pinned_statuses, - message: "You have already pinned the maximum number of statuses" - ) + |> cast(params, [:pinned_objects]) + |> validate_change(:pinned_objects, fn :pinned_objects, pinned_objects -> + max_pinned_statuses = Config.get([:instance, :max_pinned_statuses], 0) + + if Enum.count(pinned_objects) <= max_pinned_statuses do + [] + else + [pinned_objects: "You have already pinned the maximum number of statuses"] + end + end) else change(user) end |> update_and_set_cache() end - def remove_pinnned_activity(user, %Pleroma.Activity{id: id, data: data}) do - params = %{pinned_activities: List.delete(user.pinned_activities, id)} - - # if pinned activity was scheduled for deletion, we reschedule it for deletion - if data["expires_at"] do - # MRF.ActivityExpirationPolicy used UTC timestamps for expires_at in original implementation - {:ok, expires_at} = - data["expires_at"] |> Pleroma.EctoType.ActivityPub.ObjectValidators.DateTime.cast() - - Pleroma.Workers.PurgeExpiredActivity.enqueue(%{ - activity_id: id, - expires_at: expires_at - }) - end - + @spec remove_pinned_object_id(User.t(), String.t()) :: {:ok, t()} | {:error, term()} + def remove_pinned_object_id(%User{} = user, object_id) do user - |> cast(params, [:pinned_activities]) + |> cast( + %{pinned_objects: Map.delete(user.pinned_objects, object_id)}, + [:pinned_objects] + ) |> update_and_set_cache() end diff --git a/lib/pleroma/utils.ex b/lib/pleroma/utils.ex index bc0c95332..a446d3ae6 100644 --- a/lib/pleroma/utils.ex +++ b/lib/pleroma/utils.ex @@ -11,6 +11,8 @@ defmodule Pleroma.Utils do eperm epipe erange erofs espipe esrch estale etxtbsy exdev )a + @repo_timeout Pleroma.Config.get([Pleroma.Repo, :timeout], 15_000) + def compile_dir(dir) when is_binary(dir) do dir |> File.ls!() @@ -63,4 +65,21 @@ def posix_error_message(code) when code in @posix_error_codes do end def posix_error_message(_), do: "" + + @doc """ + Returns [timeout: integer] suitable for passing as an option to Repo functions. + + This function detects if the execution was triggered from IEx shell, Mix task, or + ./bin/pleroma_ctl and sets the timeout to :infinity, else returns the default timeout value. + """ + @spec query_timeout() :: [timeout: integer] + def query_timeout do + {parent, _, _, _} = Process.info(self(), :current_stacktrace) |> elem(1) |> Enum.fetch!(2) + + cond do + parent |> to_string |> String.starts_with?("Elixir.Mix.Task") -> [timeout: :infinity] + parent == :erl_eval -> [timeout: :infinity] + true -> [timeout: @repo_timeout] + end + end end diff --git a/lib/pleroma/web/activity_pub/activity_pub.ex b/lib/pleroma/web/activity_pub/activity_pub.ex index 5b45e2ca1..d0051d1cb 100644 --- a/lib/pleroma/web/activity_pub/activity_pub.ex +++ b/lib/pleroma/web/activity_pub/activity_pub.ex @@ -10,6 +10,7 @@ defmodule Pleroma.Web.ActivityPub.ActivityPub do alias Pleroma.Conversation alias Pleroma.Conversation.Participation alias Pleroma.Filter + alias Pleroma.Hashtag alias Pleroma.Maps alias Pleroma.Notification alias Pleroma.Object @@ -465,6 +466,23 @@ def fetch_latest_direct_activity_id_for_context(context, opts \\ %{}) do |> Repo.one() end + defp fetch_paginated_optimized(query, opts, pagination) do + # Note: tag-filtering funcs may apply "ORDER BY objects.id DESC", + # and extra sorting on "activities.id DESC NULLS LAST" would worse the query plan + opts = Map.put(opts, :skip_extra_order, true) + + Pagination.fetch_paginated(query, opts, pagination) + end + + def fetch_activities(recipients, opts \\ %{}, pagination \\ :keyset) do + list_memberships = Pleroma.List.memberships(opts[:user]) + + fetch_activities_query(recipients ++ list_memberships, opts) + |> fetch_paginated_optimized(opts, pagination) + |> Enum.reverse() + |> maybe_update_cc(list_memberships, opts[:user]) + end + @spec fetch_public_or_unlisted_activities(map(), Pagination.type()) :: [Activity.t()] def fetch_public_or_unlisted_activities(opts \\ %{}, pagination \\ :keyset) do opts = Map.delete(opts, :user) @@ -472,7 +490,7 @@ def fetch_public_or_unlisted_activities(opts \\ %{}, pagination \\ :keyset) do [Constants.as_public()] |> fetch_activities_query(opts) |> restrict_unlisted(opts) - |> Pagination.fetch_paginated(opts, pagination) + |> fetch_paginated_optimized(opts, pagination) end @spec fetch_public_activities(map(), Pagination.type()) :: [Activity.t()] @@ -612,7 +630,7 @@ defp fetch_activities_for_user(user, reading_user, params) do |> Map.put(:type, ["Create", "Announce"]) |> Map.put(:user, reading_user) |> Map.put(:actor_id, user.ap_id) - |> Map.put(:pinned_activity_ids, user.pinned_activities) + |> Map.put(:pinned_object_ids, Map.keys(user.pinned_objects)) params = if User.blocks?(reading_user, user) do @@ -693,52 +711,144 @@ defp restrict_since(query, %{since_id: since_id}) do defp restrict_since(query, _), do: query - defp restrict_tag_reject(_query, %{tag_reject: _tag_reject, skip_preload: true}) do - raise "Can't use the child object without preloading!" + defp restrict_embedded_tag_all(_query, %{tag_all: _tag_all, skip_preload: true}) do + raise_on_missing_preload() end - defp restrict_tag_reject(query, %{tag_reject: [_ | _] = tag_reject}) do - from( - [_activity, object] in query, - where: fragment("not (?)->'tag' \\?| (?)", object.data, ^tag_reject) - ) - end - - defp restrict_tag_reject(query, _), do: query - - defp restrict_tag_all(_query, %{tag_all: _tag_all, skip_preload: true}) do - raise "Can't use the child object without preloading!" - end - - defp restrict_tag_all(query, %{tag_all: [_ | _] = tag_all}) do + defp restrict_embedded_tag_all(query, %{tag_all: [_ | _] = tag_all}) do from( [_activity, object] in query, where: fragment("(?)->'tag' \\?& (?)", object.data, ^tag_all) ) end - defp restrict_tag_all(query, _), do: query + defp restrict_embedded_tag_all(query, %{tag_all: tag}) when is_binary(tag) do + restrict_embedded_tag_any(query, %{tag: tag}) + end - defp restrict_tag(_query, %{tag: _tag, skip_preload: true}) do + defp restrict_embedded_tag_all(query, _), do: query + + defp restrict_embedded_tag_any(_query, %{tag: _tag, skip_preload: true}) do + raise_on_missing_preload() + end + + defp restrict_embedded_tag_any(query, %{tag: [_ | _] = tag_any}) do + from( + [_activity, object] in query, + where: fragment("(?)->'tag' \\?| (?)", object.data, ^tag_any) + ) + end + + defp restrict_embedded_tag_any(query, %{tag: tag}) when is_binary(tag) do + restrict_embedded_tag_any(query, %{tag: [tag]}) + end + + defp restrict_embedded_tag_any(query, _), do: query + + defp restrict_embedded_tag_reject_any(_query, %{tag_reject: _tag_reject, skip_preload: true}) do + raise_on_missing_preload() + end + + defp restrict_embedded_tag_reject_any(query, %{tag_reject: [_ | _] = tag_reject}) do + from( + [_activity, object] in query, + where: fragment("not (?)->'tag' \\?| (?)", object.data, ^tag_reject) + ) + end + + defp restrict_embedded_tag_reject_any(query, %{tag_reject: tag_reject}) + when is_binary(tag_reject) do + restrict_embedded_tag_reject_any(query, %{tag_reject: [tag_reject]}) + end + + defp restrict_embedded_tag_reject_any(query, _), do: query + + defp object_ids_query_for_tags(tags) do + from(hto in "hashtags_objects") + |> join(:inner, [hto], ht in Pleroma.Hashtag, on: hto.hashtag_id == ht.id) + |> where([hto, ht], ht.name in ^tags) + |> select([hto], hto.object_id) + |> distinct([hto], true) + end + + defp restrict_hashtag_all(_query, %{tag_all: _tag, skip_preload: true}) do + raise_on_missing_preload() + end + + defp restrict_hashtag_all(query, %{tag_all: [single_tag]}) do + restrict_hashtag_any(query, %{tag: single_tag}) + end + + defp restrict_hashtag_all(query, %{tag_all: [_ | _] = tags}) do + from( + [_activity, object] in query, + where: + fragment( + """ + (SELECT array_agg(hashtags.name) FROM hashtags JOIN hashtags_objects + ON hashtags_objects.hashtag_id = hashtags.id WHERE hashtags.name = ANY(?) + AND hashtags_objects.object_id = ?) @> ? + """, + ^tags, + object.id, + ^tags + ) + ) + end + + defp restrict_hashtag_all(query, %{tag_all: tag}) when is_binary(tag) do + restrict_hashtag_all(query, %{tag_all: [tag]}) + end + + defp restrict_hashtag_all(query, _), do: query + + defp restrict_hashtag_any(_query, %{tag: _tag, skip_preload: true}) do + raise_on_missing_preload() + end + + defp restrict_hashtag_any(query, %{tag: [_ | _] = tags}) do + hashtag_ids = + from(ht in Hashtag, where: ht.name in ^tags, select: ht.id) + |> Repo.all() + + # Note: NO extra ordering should be done on "activities.id desc nulls last" for optimal plan + from( + [_activity, object] in query, + join: hto in "hashtags_objects", + on: hto.object_id == object.id, + where: hto.hashtag_id in ^hashtag_ids, + distinct: [desc: object.id], + order_by: [desc: object.id] + ) + end + + defp restrict_hashtag_any(query, %{tag: tag}) when is_binary(tag) do + restrict_hashtag_any(query, %{tag: [tag]}) + end + + defp restrict_hashtag_any(query, _), do: query + + defp restrict_hashtag_reject_any(_query, %{tag_reject: _tag_reject, skip_preload: true}) do + raise_on_missing_preload() + end + + defp restrict_hashtag_reject_any(query, %{tag_reject: [_ | _] = tags_reject}) do + from( + [_activity, object] in query, + where: object.id not in subquery(object_ids_query_for_tags(tags_reject)) + ) + end + + defp restrict_hashtag_reject_any(query, %{tag_reject: tag_reject}) when is_binary(tag_reject) do + restrict_hashtag_reject_any(query, %{tag_reject: [tag_reject]}) + end + + defp restrict_hashtag_reject_any(query, _), do: query + + defp raise_on_missing_preload do raise "Can't use the child object without preloading!" end - defp restrict_tag(query, %{tag: tag}) when is_list(tag) do - from( - [_activity, object] in query, - where: fragment("(?)->'tag' \\?| (?)", object.data, ^tag) - ) - end - - defp restrict_tag(query, %{tag: tag}) when is_binary(tag) do - from( - [_activity, object] in query, - where: fragment("(?)->'tag' \\? (?)", object.data, ^tag) - ) - end - - defp restrict_tag(query, _), do: query - defp restrict_recipients(query, [], _user), do: query defp restrict_recipients(query, recipients, nil) do @@ -965,8 +1075,18 @@ defp restrict_unlisted(query, %{restrict_unlisted: true}) do defp restrict_unlisted(query, _), do: query - defp restrict_pinned(query, %{pinned: true, pinned_activity_ids: ids}) do - from(activity in query, where: activity.id in ^ids) + defp restrict_pinned(query, %{pinned: true, pinned_object_ids: ids}) do + from( + [activity, object: o] in query, + where: + fragment( + "(?)->>'type' = 'Create' and coalesce((?)->'object'->>'id', (?)->>'object') = any (?)", + activity.data, + activity.data, + activity.data, + ^ids + ) + ) end defp restrict_pinned(query, _), do: query @@ -1098,6 +1218,26 @@ defp maybe_order(query, %{order: :asc}) do defp maybe_order(query, _), do: query + defp normalize_fetch_activities_query_opts(opts) do + Enum.reduce([:tag, :tag_all, :tag_reject], opts, fn key, opts -> + case opts[key] do + value when is_bitstring(value) -> + Map.put(opts, key, Hashtag.normalize_name(value)) + + value when is_list(value) -> + normalized_value = + value + |> Enum.map(&Hashtag.normalize_name/1) + |> Enum.uniq() + + Map.put(opts, key, normalized_value) + + _ -> + opts + end + end) + end + defp fetch_activities_query_ap_ids_ops(opts) do source_user = opts[:muting_user] ap_id_relationships = if source_user, do: [:mute, :reblog_mute], else: [] @@ -1121,6 +1261,8 @@ defp fetch_activities_query_ap_ids_ops(opts) do end def fetch_activities_query(recipients, opts \\ %{}) do + opts = normalize_fetch_activities_query_opts(opts) + {restrict_blocked_opts, restrict_muted_opts, restrict_muted_reblogs_opts} = fetch_activities_query_ap_ids_ops(opts) @@ -1128,50 +1270,51 @@ def fetch_activities_query(recipients, opts \\ %{}) do skip_thread_containment: Config.get([:instance, :skip_thread_containment]) } - Activity - |> maybe_preload_objects(opts) - |> maybe_preload_bookmarks(opts) - |> maybe_preload_report_notes(opts) - |> maybe_set_thread_muted_field(opts) - |> maybe_order(opts) - |> restrict_recipients(recipients, opts[:user]) - |> restrict_replies(opts) - |> restrict_tag(opts) - |> restrict_tag_reject(opts) - |> restrict_tag_all(opts) - |> restrict_since(opts) - |> restrict_local(opts) - |> restrict_remote(opts) - |> restrict_actor(opts) - |> restrict_type(opts) - |> restrict_state(opts) - |> restrict_favorited_by(opts) - |> restrict_blocked(restrict_blocked_opts) - |> restrict_muted(restrict_muted_opts) - |> restrict_filtered(opts) - |> restrict_media(opts) - |> restrict_visibility(opts) - |> restrict_thread_visibility(opts, config) - |> restrict_reblogs(opts) - |> restrict_pinned(opts) - |> restrict_muted_reblogs(restrict_muted_reblogs_opts) - |> restrict_instance(opts) - |> restrict_announce_object_actor(opts) - |> restrict_filtered(opts) - |> Activity.restrict_deactivated_users() - |> exclude_poll_votes(opts) - |> exclude_chat_messages(opts) - |> exclude_invisible_actors(opts) - |> exclude_visibility(opts) - end + query = + Activity + |> maybe_preload_objects(opts) + |> maybe_preload_bookmarks(opts) + |> maybe_preload_report_notes(opts) + |> maybe_set_thread_muted_field(opts) + |> maybe_order(opts) + |> restrict_recipients(recipients, opts[:user]) + |> restrict_replies(opts) + |> restrict_since(opts) + |> restrict_local(opts) + |> restrict_remote(opts) + |> restrict_actor(opts) + |> restrict_type(opts) + |> restrict_state(opts) + |> restrict_favorited_by(opts) + |> restrict_blocked(restrict_blocked_opts) + |> restrict_muted(restrict_muted_opts) + |> restrict_filtered(opts) + |> restrict_media(opts) + |> restrict_visibility(opts) + |> restrict_thread_visibility(opts, config) + |> restrict_reblogs(opts) + |> restrict_pinned(opts) + |> restrict_muted_reblogs(restrict_muted_reblogs_opts) + |> restrict_instance(opts) + |> restrict_announce_object_actor(opts) + |> restrict_filtered(opts) + |> Activity.restrict_deactivated_users() + |> exclude_poll_votes(opts) + |> exclude_chat_messages(opts) + |> exclude_invisible_actors(opts) + |> exclude_visibility(opts) - def fetch_activities(recipients, opts \\ %{}, pagination \\ :keyset) do - list_memberships = Pleroma.List.memberships(opts[:user]) - - fetch_activities_query(recipients ++ list_memberships, opts) - |> Pagination.fetch_paginated(opts, pagination) - |> Enum.reverse() - |> maybe_update_cc(list_memberships, opts[:user]) + if Config.feature_enabled?(:improved_hashtag_timeline) do + query + |> restrict_hashtag_any(opts) + |> restrict_hashtag_all(opts) + |> restrict_hashtag_reject_any(opts) + else + query + |> restrict_embedded_tag_any(opts) + |> restrict_embedded_tag_all(opts) + |> restrict_embedded_tag_reject_any(opts) + end end @doc """ @@ -1250,21 +1393,17 @@ defp get_actor_url(url) when is_list(url) do defp get_actor_url(_url), do: nil + defp normalize_image(%{"url" => url}) do + %{ + "type" => "Image", + "url" => [%{"href" => url}] + } + end + + defp normalize_image(urls) when is_list(urls), do: urls |> List.first() |> normalize_image() + defp normalize_image(_), do: nil + defp object_to_user_data(data) do - avatar = - data["icon"]["url"] && - %{ - "type" => "Image", - "url" => [%{"href" => data["icon"]["url"]}] - } - - banner = - data["image"]["url"] && - %{ - "type" => "Image", - "url" => [%{"href" => data["image"]["url"]}] - } - fields = data |> Map.get("attachment", []) @@ -1290,6 +1429,9 @@ defp object_to_user_data(data) do invisible = data["invisible"] || false actor_type = data["type"] || "Person" + featured_address = data["featured"] + {:ok, pinned_objects} = fetch_and_prepare_featured_from_ap_id(featured_address) + public_key = if is_map(data["publicKey"]) && is_binary(data["publicKey"]["publicKeyPem"]) do data["publicKey"]["publicKeyPem"] @@ -1308,23 +1450,25 @@ defp object_to_user_data(data) do ap_id: data["id"], uri: get_actor_url(data["url"]), ap_enabled: true, - banner: banner, + banner: normalize_image(data["image"]), fields: fields, emoji: emojis, is_locked: is_locked, is_discoverable: is_discoverable, invisible: invisible, - avatar: avatar, + avatar: normalize_image(data["icon"]), name: data["name"], follower_address: data["followers"], following_address: data["following"], + featured_address: featured_address, bio: data["summary"] || "", actor_type: actor_type, also_known_as: Map.get(data, "alsoKnownAs", []), public_key: public_key, inbox: data["inbox"], shared_inbox: shared_inbox, - accepts_chat_messages: accepts_chat_messages + accepts_chat_messages: accepts_chat_messages, + pinned_objects: pinned_objects } # nickname can be nil because of virtual actors @@ -1462,6 +1606,41 @@ def maybe_handle_clashing_nickname(data) do end end + def pin_data_from_featured_collection(%{ + "type" => type, + "orderedItems" => objects + }) + when type in ["OrderedCollection", "Collection"] do + Map.new(objects, fn %{"id" => object_ap_id} -> {object_ap_id, NaiveDateTime.utc_now()} end) + end + + def fetch_and_prepare_featured_from_ap_id(nil) do + {:ok, %{}} + end + + def fetch_and_prepare_featured_from_ap_id(ap_id) do + with {:ok, data} <- Fetcher.fetch_and_contain_remote_object_from_id(ap_id) do + {:ok, pin_data_from_featured_collection(data)} + else + e -> + Logger.error("Could not decode featured collection at fetch #{ap_id}, #{inspect(e)}") + {:ok, %{}} + end + end + + def pinned_fetch_task(nil), do: nil + + def pinned_fetch_task(%{pinned_objects: pins}) do + if Enum.all?(pins, fn {ap_id, _} -> + Object.get_cached_by_ap_id(ap_id) || + match?({:ok, _object}, Fetcher.fetch_object_from_id(ap_id)) + end) do + :ok + else + :error + end + end + def make_user_from_ap_id(ap_id) do user = User.get_cached_by_ap_id(ap_id) @@ -1469,6 +1648,8 @@ def make_user_from_ap_id(ap_id) do Transmogrifier.upgrade_user_from_ap_id(ap_id) else with {:ok, data} <- fetch_and_prepare_user_from_ap_id(ap_id) do + {:ok, _pid} = Task.start(fn -> pinned_fetch_task(data) end) + if user do user |> User.remote_user_changeset(data) diff --git a/lib/pleroma/web/activity_pub/activity_pub_controller.ex b/lib/pleroma/web/activity_pub/activity_pub_controller.ex index 9d3dcc7f9..5aa3b281a 100644 --- a/lib/pleroma/web/activity_pub/activity_pub_controller.ex +++ b/lib/pleroma/web/activity_pub/activity_pub_controller.ex @@ -543,4 +543,12 @@ def upload_media(%{assigns: %{user: %User{} = user}} = conn, %{"file" => file} = |> json(object.data) end end + + def pinned(conn, %{"nickname" => nickname}) do + with %User{} = user <- User.get_cached_by_nickname(nickname) do + conn + |> put_resp_header("content-type", "application/activity+json") + |> json(UserView.render("featured.json", %{user: user})) + end + end end diff --git a/lib/pleroma/web/activity_pub/builder.ex b/lib/pleroma/web/activity_pub/builder.ex index f56bfc600..91a45836f 100644 --- a/lib/pleroma/web/activity_pub/builder.ex +++ b/lib/pleroma/web/activity_pub/builder.ex @@ -273,4 +273,36 @@ defp object_action(actor, object) do "context" => object.data["context"] }, []} end + + @spec pin(User.t(), Object.t()) :: {:ok, map(), keyword()} + def pin(%User{} = user, object) do + {:ok, + %{ + "id" => Utils.generate_activity_id(), + "target" => pinned_url(user.nickname), + "object" => object.data["id"], + "actor" => user.ap_id, + "type" => "Add", + "to" => [Pleroma.Constants.as_public()], + "cc" => [user.follower_address] + }, []} + end + + @spec unpin(User.t(), Object.t()) :: {:ok, map, keyword()} + def unpin(%User{} = user, object) do + {:ok, + %{ + "id" => Utils.generate_activity_id(), + "target" => pinned_url(user.nickname), + "object" => object.data["id"], + "actor" => user.ap_id, + "type" => "Remove", + "to" => [Pleroma.Constants.as_public()], + "cc" => [user.follower_address] + }, []} + end + + defp pinned_url(nickname) when is_binary(nickname) do + Pleroma.Web.Router.Helpers.activity_pub_url(Pleroma.Web.Endpoint, :pinned, nickname) + end end diff --git a/lib/pleroma/web/activity_pub/mrf.ex b/lib/pleroma/web/activity_pub/mrf.ex index ef5a09a93..f2fec3ff6 100644 --- a/lib/pleroma/web/activity_pub/mrf.ex +++ b/lib/pleroma/web/activity_pub/mrf.ex @@ -92,7 +92,9 @@ def pipeline_filter(%{} = message, meta) do end def get_policies do - Pleroma.Config.get([:mrf, :policies], []) |> get_policies() + Pleroma.Config.get([:mrf, :policies], []) + |> get_policies() + |> Enum.concat([Pleroma.Web.ActivityPub.MRF.HashtagPolicy]) end defp get_policies(policy) when is_atom(policy), do: [policy] diff --git a/lib/pleroma/web/activity_pub/mrf/follow_bot_policy.ex b/lib/pleroma/web/activity_pub/mrf/follow_bot_policy.ex new file mode 100644 index 000000000..7307c9c14 --- /dev/null +++ b/lib/pleroma/web/activity_pub/mrf/follow_bot_policy.ex @@ -0,0 +1,59 @@ +defmodule Pleroma.Web.ActivityPub.MRF.FollowBotPolicy do + @behaviour Pleroma.Web.ActivityPub.MRF + alias Pleroma.Config + alias Pleroma.User + alias Pleroma.Web.CommonAPI + + require Logger + + @impl true + def filter(message) do + with follower_nickname <- Config.get([:mrf_follow_bot, :follower_nickname]), + %User{actor_type: "Service"} = follower <- + User.get_cached_by_nickname(follower_nickname), + %{"type" => "Create", "object" => %{"type" => "Note"}} <- message do + try_follow(follower, message) + else + nil -> + Logger.warn( + "#{__MODULE__} skipped because of missing `:mrf_follow_bot, :follower_nickname` configuration, the :follower_nickname + account does not exist, or the account is not correctly configured as a bot." + ) + + {:ok, message} + + _ -> + {:ok, message} + end + end + + defp try_follow(follower, message) do + to = Map.get(message, "to", []) + cc = Map.get(message, "cc", []) + actor = [message["actor"]] + + Enum.concat([to, cc, actor]) + |> List.flatten() + |> Enum.uniq() + |> User.get_all_by_ap_id() + |> Enum.each(fn user -> + with false <- user.local, + false <- User.following?(follower, user), + false <- User.locked?(user), + false <- (user.bio || "") |> String.downcase() |> String.contains?("nobot") do + Logger.debug( + "#{__MODULE__}: Follow request from #{follower.nickname} to #{user.nickname}" + ) + + CommonAPI.follow(follower, user) + end + end) + + {:ok, message} + end + + @impl true + def describe do + {:ok, %{}} + end +end diff --git a/lib/pleroma/web/activity_pub/mrf/hashtag_policy.ex b/lib/pleroma/web/activity_pub/mrf/hashtag_policy.ex new file mode 100644 index 000000000..def0c437c --- /dev/null +++ b/lib/pleroma/web/activity_pub/mrf/hashtag_policy.ex @@ -0,0 +1,116 @@ +# Pleroma: A lightweight social networking server +# Copyright © 2017-2021 Pleroma Authors +# SPDX-License-Identifier: AGPL-3.0-only + +defmodule Pleroma.Web.ActivityPub.MRF.HashtagPolicy do + require Pleroma.Constants + + alias Pleroma.Config + alias Pleroma.Object + + @moduledoc """ + Reject, TWKN-remove or Set-Sensitive messsages with specific hashtags (without the leading #) + + Note: This MRF Policy is always enabled, if you want to disable it you have to set empty lists. + """ + + @behaviour Pleroma.Web.ActivityPub.MRF + + defp check_reject(message, hashtags) do + if Enum.any?(Config.get([:mrf_hashtag, :reject]), fn match -> match in hashtags end) do + {:reject, "[HashtagPolicy] Matches with rejected keyword"} + else + {:ok, message} + end + end + + defp check_ftl_removal(%{"to" => to} = message, hashtags) do + if Pleroma.Constants.as_public() in to and + Enum.any?(Config.get([:mrf_hashtag, :federated_timeline_removal]), fn match -> + match in hashtags + end) do + to = List.delete(to, Pleroma.Constants.as_public()) + cc = [Pleroma.Constants.as_public() | message["cc"] || []] + + message = + message + |> Map.put("to", to) + |> Map.put("cc", cc) + |> Kernel.put_in(["object", "to"], to) + |> Kernel.put_in(["object", "cc"], cc) + + {:ok, message} + else + {:ok, message} + end + end + + defp check_ftl_removal(message, _hashtags), do: {:ok, message} + + defp check_sensitive(message, hashtags) do + if Enum.any?(Config.get([:mrf_hashtag, :sensitive]), fn match -> match in hashtags end) do + {:ok, Kernel.put_in(message, ["object", "sensitive"], true)} + else + {:ok, message} + end + end + + @impl true + def filter(%{"type" => "Create", "object" => object} = message) do + hashtags = Object.hashtags(%Object{data: object}) + + if hashtags != [] do + with {:ok, message} <- check_reject(message, hashtags), + {:ok, message} <- check_ftl_removal(message, hashtags), + {:ok, message} <- check_sensitive(message, hashtags) do + {:ok, message} + end + else + {:ok, message} + end + end + + @impl true + def filter(message), do: {:ok, message} + + @impl true + def describe do + mrf_hashtag = + Config.get(:mrf_hashtag) + |> Enum.into(%{}) + + {:ok, %{mrf_hashtag: mrf_hashtag}} + end + + @impl true + def config_description do + %{ + key: :mrf_hashtag, + related_policy: "Pleroma.Web.ActivityPub.MRF.HashtagPolicy", + label: "MRF Hashtag", + description: @moduledoc, + children: [ + %{ + key: :reject, + type: {:list, :string}, + description: "A list of hashtags which result in message being rejected.", + suggestions: ["foo"] + }, + %{ + key: :federated_timeline_removal, + type: {:list, :string}, + description: + "A list of hashtags which result in message being removed from federated timelines (a.k.a unlisted).", + suggestions: ["foo"] + }, + %{ + key: :sensitive, + type: {:list, :string}, + description: + "A list of hashtags which result in message being set as sensitive (a.k.a NSFW/R-18)", + suggestions: ["nsfw", "r18"] + } + ] + } + end +end diff --git a/lib/pleroma/web/activity_pub/mrf/simple_policy.ex b/lib/pleroma/web/activity_pub/mrf/simple_policy.ex index bb3838d2c..62024c58c 100644 --- a/lib/pleroma/web/activity_pub/mrf/simple_policy.ex +++ b/lib/pleroma/web/activity_pub/mrf/simple_policy.ex @@ -64,20 +64,16 @@ defp check_media_nsfw( %{host: actor_host} = _actor_info, %{ "type" => "Create", - "object" => child_object + "object" => %{} = _child_object } = object - ) - when is_map(child_object) do + ) do media_nsfw = Config.get([:mrf_simple, :media_nsfw]) |> MRF.subdomains_regex() object = if MRF.subdomain_match?(media_nsfw, actor_host) do - tags = (child_object["tag"] || []) ++ ["nsfw"] - child_object = Map.put(child_object, "tag", tags) - child_object = Map.put(child_object, "sensitive", true) - Map.put(object, "object", child_object) + Kernel.put_in(object, ["object", "sensitive"], true) else object end diff --git a/lib/pleroma/web/activity_pub/mrf/tag_policy.ex b/lib/pleroma/web/activity_pub/mrf/tag_policy.ex index 5739cee63..528093ac0 100644 --- a/lib/pleroma/web/activity_pub/mrf/tag_policy.ex +++ b/lib/pleroma/web/activity_pub/mrf/tag_policy.ex @@ -28,20 +28,11 @@ defp process_tag( "mrf_tag:media-force-nsfw", %{ "type" => "Create", - "object" => %{"attachment" => child_attachment} = object + "object" => %{"attachment" => child_attachment} } = message ) when length(child_attachment) > 0 do - tags = (object["tag"] || []) ++ ["nsfw"] - - object = - object - |> Map.put("tag", tags) - |> Map.put("sensitive", true) - - message = Map.put(message, "object", object) - - {:ok, message} + {:ok, Kernel.put_in(message, ["object", "sensitive"], true)} end defp process_tag( diff --git a/lib/pleroma/web/activity_pub/object_validator.ex b/lib/pleroma/web/activity_pub/object_validator.ex index 297c19cc0..1dce33f1a 100644 --- a/lib/pleroma/web/activity_pub/object_validator.ex +++ b/lib/pleroma/web/activity_pub/object_validator.ex @@ -17,6 +17,7 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidator do alias Pleroma.Object.Containment alias Pleroma.User alias Pleroma.Web.ActivityPub.ObjectValidators.AcceptRejectValidator + alias Pleroma.Web.ActivityPub.ObjectValidators.AddRemoveValidator alias Pleroma.Web.ActivityPub.ObjectValidators.AnnounceValidator alias Pleroma.Web.ActivityPub.ObjectValidators.AnswerValidator alias Pleroma.Web.ActivityPub.ObjectValidators.ArticleNoteValidator @@ -37,37 +38,6 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidator do @impl true def validate(object, meta) - def validate(%{"type" => type} = object, meta) - when type in ~w[Accept Reject] do - with {:ok, object} <- - object - |> AcceptRejectValidator.cast_and_validate() - |> Ecto.Changeset.apply_action(:insert) do - object = stringify_keys(object) - {:ok, object, meta} - end - end - - def validate(%{"type" => "Event"} = object, meta) do - with {:ok, object} <- - object - |> EventValidator.cast_and_validate() - |> Ecto.Changeset.apply_action(:insert) do - object = stringify_keys(object) - {:ok, object, meta} - end - end - - def validate(%{"type" => "Follow"} = object, meta) do - with {:ok, object} <- - object - |> FollowValidator.cast_and_validate() - |> Ecto.Changeset.apply_action(:insert) do - object = stringify_keys(object) - {:ok, object, meta} - end - end - def validate(%{"type" => "Block"} = block_activity, meta) do with {:ok, block_activity} <- block_activity @@ -87,16 +57,6 @@ def validate(%{"type" => "Block"} = block_activity, meta) do end end - def validate(%{"type" => "Update"} = update_activity, meta) do - with {:ok, update_activity} <- - update_activity - |> UpdateValidator.cast_and_validate() - |> Ecto.Changeset.apply_action(:insert) do - update_activity = stringify_keys(update_activity) - {:ok, update_activity, meta} - end - end - def validate(%{"type" => "Undo"} = object, meta) do with {:ok, object} <- object @@ -123,76 +83,6 @@ def validate(%{"type" => "Delete"} = object, meta) do end end - def validate(%{"type" => "Like"} = object, meta) do - with {:ok, object} <- - object - |> LikeValidator.cast_and_validate() - |> Ecto.Changeset.apply_action(:insert) do - object = stringify_keys(object) - {:ok, object, meta} - end - end - - def validate(%{"type" => "ChatMessage"} = object, meta) do - with {:ok, object} <- - object - |> ChatMessageValidator.cast_and_validate() - |> Ecto.Changeset.apply_action(:insert) do - object = stringify_keys(object) - {:ok, object, meta} - end - end - - def validate(%{"type" => "Question"} = object, meta) do - with {:ok, object} <- - object - |> QuestionValidator.cast_and_validate() - |> Ecto.Changeset.apply_action(:insert) do - object = stringify_keys(object) - {:ok, object, meta} - end - end - - def validate(%{"type" => type} = object, meta) when type in ~w[Audio Video] do - with {:ok, object} <- - object - |> AudioVideoValidator.cast_and_validate() - |> Ecto.Changeset.apply_action(:insert) do - object = stringify_keys(object) - {:ok, object, meta} - end - end - - def validate(%{"type" => "Article"} = object, meta) do - with {:ok, object} <- - object - |> ArticleNoteValidator.cast_and_validate() - |> Ecto.Changeset.apply_action(:insert) do - object = stringify_keys(object) - {:ok, object, meta} - end - end - - def validate(%{"type" => "Answer"} = object, meta) do - with {:ok, object} <- - object - |> AnswerValidator.cast_and_validate() - |> Ecto.Changeset.apply_action(:insert) do - object = stringify_keys(object) - {:ok, object, meta} - end - end - - def validate(%{"type" => "EmojiReact"} = object, meta) do - with {:ok, object} <- - object - |> EmojiReactValidator.cast_and_validate() - |> Ecto.Changeset.apply_action(:insert) do - object = stringify_keys(object) - {:ok, object, meta} - end - end - def validate( %{"type" => "Create", "object" => %{"type" => "ChatMessage"} = object} = create_activity, meta @@ -224,10 +114,60 @@ def validate( end end - def validate(%{"type" => "Announce"} = object, meta) do + def validate(%{"type" => type} = object, meta) + when type in ~w[Event Question Audio Video Article] do + validator = + case type do + "Event" -> EventValidator + "Question" -> QuestionValidator + "Audio" -> AudioVideoValidator + "Video" -> AudioVideoValidator + "Article" -> ArticleNoteValidator + end + with {:ok, object} <- object - |> AnnounceValidator.cast_and_validate() + |> validator.cast_and_validate() + |> Ecto.Changeset.apply_action(:insert) do + object = stringify_keys(object) + + # Insert copy of hashtags as strings for the non-hashtag table indexing + tag = (object["tag"] || []) ++ Object.hashtags(%Object{data: object}) + object = Map.put(object, "tag", tag) + + {:ok, object, meta} + end + end + + def validate(%{"type" => type} = object, meta) + when type in ~w[Accept Reject Follow Update Like EmojiReact Announce + ChatMessage Answer] do + validator = + case type do + "Accept" -> AcceptRejectValidator + "Reject" -> AcceptRejectValidator + "Follow" -> FollowValidator + "Update" -> UpdateValidator + "Like" -> LikeValidator + "EmojiReact" -> EmojiReactValidator + "Announce" -> AnnounceValidator + "ChatMessage" -> ChatMessageValidator + "Answer" -> AnswerValidator + end + + with {:ok, object} <- + object + |> validator.cast_and_validate() + |> Ecto.Changeset.apply_action(:insert) do + object = stringify_keys(object) + {:ok, object, meta} + end + end + + def validate(%{"type" => type} = object, meta) when type in ~w(Add Remove) do + with {:ok, object} <- + object + |> AddRemoveValidator.cast_and_validate() |> Ecto.Changeset.apply_action(:insert) do object = stringify_keys(object) {:ok, object, meta} @@ -260,7 +200,7 @@ def cast_and_apply(%{"type" => "Article"} = object) do def cast_and_apply(o), do: {:error, {:validator_not_set, o}} - # is_struct/1 isn't present in Elixir 1.8.x + # is_struct/1 appears in Elixir 1.11 def stringify_keys(%{__struct__: _} = object) do object |> Map.from_struct() diff --git a/lib/pleroma/web/activity_pub/object_validators/accept_reject_validator.ex b/lib/pleroma/web/activity_pub/object_validators/accept_reject_validator.ex index d31e780c3..b577a1044 100644 --- a/lib/pleroma/web/activity_pub/object_validators/accept_reject_validator.ex +++ b/lib/pleroma/web/activity_pub/object_validators/accept_reject_validator.ex @@ -27,7 +27,7 @@ def cast_data(data) do |> cast(data, __schema__(:fields)) end - def validate_data(cng) do + defp validate_data(cng) do cng |> validate_required([:id, :type, :actor, :to, :cc, :object]) |> validate_inclusion(:type, ["Accept", "Reject"]) diff --git a/lib/pleroma/web/activity_pub/object_validators/add_remove_validator.ex b/lib/pleroma/web/activity_pub/object_validators/add_remove_validator.ex new file mode 100644 index 000000000..f885aabe4 --- /dev/null +++ b/lib/pleroma/web/activity_pub/object_validators/add_remove_validator.ex @@ -0,0 +1,77 @@ +# Pleroma: A lightweight social networking server +# Copyright © 2017-2021 Pleroma Authors +# SPDX-License-Identifier: AGPL-3.0-only + +defmodule Pleroma.Web.ActivityPub.ObjectValidators.AddRemoveValidator do + use Ecto.Schema + + import Ecto.Changeset + import Pleroma.Web.ActivityPub.ObjectValidators.CommonValidations + + require Pleroma.Constants + + alias Pleroma.EctoType.ActivityPub.ObjectValidators + alias Pleroma.User + + @primary_key false + + embedded_schema do + field(:id, ObjectValidators.ObjectID, primary_key: true) + field(:target) + field(:object, ObjectValidators.ObjectID) + field(:actor, ObjectValidators.ObjectID) + field(:type) + field(:to, ObjectValidators.Recipients, default: []) + field(:cc, ObjectValidators.Recipients, default: []) + end + + def cast_and_validate(data) do + {:ok, actor} = User.get_or_fetch_by_ap_id(data["actor"]) + + {:ok, actor} = maybe_refetch_user(actor) + + data + |> maybe_fix_data_for_mastodon(actor) + |> cast_data() + |> validate_data(actor) + end + + defp maybe_fix_data_for_mastodon(data, actor) do + # Mastodon sends pin/unpin objects without id, to, cc fields + data + |> Map.put_new("id", Pleroma.Web.ActivityPub.Utils.generate_activity_id()) + |> Map.put_new("to", [Pleroma.Constants.as_public()]) + |> Map.put_new("cc", [actor.follower_address]) + end + + defp cast_data(data) do + cast(%__MODULE__{}, data, __schema__(:fields)) + end + + defp validate_data(changeset, actor) do + changeset + |> validate_required([:id, :target, :object, :actor, :type, :to, :cc]) + |> validate_inclusion(:type, ~w(Add Remove)) + |> validate_actor_presence() + |> validate_collection_belongs_to_actor(actor) + |> validate_object_presence() + end + + defp validate_collection_belongs_to_actor(changeset, actor) do + validate_change(changeset, :target, fn :target, target -> + if target == actor.featured_address do + [] + else + [target: "collection doesn't belong to actor"] + end + end) + end + + defp maybe_refetch_user(%User{featured_address: address} = user) when is_binary(address) do + {:ok, user} + end + + defp maybe_refetch_user(%User{ap_id: ap_id}) do + Pleroma.Web.ActivityPub.Transmogrifier.upgrade_user_from_ap_id(ap_id) + end +end diff --git a/lib/pleroma/web/activity_pub/object_validators/announce_validator.ex b/lib/pleroma/web/activity_pub/object_validators/announce_validator.ex index b08a33e68..576341790 100644 --- a/lib/pleroma/web/activity_pub/object_validators/announce_validator.ex +++ b/lib/pleroma/web/activity_pub/object_validators/announce_validator.ex @@ -50,7 +50,7 @@ def fix_after_cast(cng) do cng end - def validate_data(data_cng) do + defp validate_data(data_cng) do data_cng |> validate_inclusion(:type, ["Announce"]) |> validate_required([:id, :type, :object, :actor, :to, :cc]) diff --git a/lib/pleroma/web/activity_pub/object_validators/answer_validator.ex b/lib/pleroma/web/activity_pub/object_validators/answer_validator.ex index 15e4413cd..c9bd9e42d 100644 --- a/lib/pleroma/web/activity_pub/object_validators/answer_validator.ex +++ b/lib/pleroma/web/activity_pub/object_validators/answer_validator.ex @@ -50,7 +50,7 @@ def changeset(struct, data) do |> cast(data, __schema__(:fields)) end - def validate_data(data_cng) do + defp validate_data(data_cng) do data_cng |> validate_inclusion(:type, ["Answer"]) |> validate_required([:id, :inReplyTo, :name, :attributedTo, :actor]) diff --git a/lib/pleroma/web/activity_pub/object_validators/article_note_validator.ex b/lib/pleroma/web/activity_pub/object_validators/article_note_validator.ex index b0388ef3b..39ef6dc29 100644 --- a/lib/pleroma/web/activity_pub/object_validators/article_note_validator.ex +++ b/lib/pleroma/web/activity_pub/object_validators/article_note_validator.ex @@ -9,6 +9,7 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.ArticleNoteValidator do alias Pleroma.Web.ActivityPub.ObjectValidators.AttachmentValidator alias Pleroma.Web.ActivityPub.ObjectValidators.CommonFixes alias Pleroma.Web.ActivityPub.ObjectValidators.CommonValidations + alias Pleroma.Web.ActivityPub.ObjectValidators.TagValidator alias Pleroma.Web.ActivityPub.Transmogrifier import Ecto.Changeset @@ -22,8 +23,7 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.ArticleNoteValidator do field(:cc, ObjectValidators.Recipients, default: []) field(:bto, ObjectValidators.Recipients, default: []) field(:bcc, ObjectValidators.Recipients, default: []) - # TODO: Write type - field(:tag, {:array, :map}, default: []) + embeds_many(:tag, TagValidator) field(:type, :string) field(:name, :string) @@ -90,11 +90,12 @@ def changeset(struct, data) do data = fix(data) struct - |> cast(data, __schema__(:fields) -- [:attachment]) + |> cast(data, __schema__(:fields) -- [:attachment, :tag]) |> cast_embed(:attachment) + |> cast_embed(:tag) end - def validate_data(data_cng) do + defp validate_data(data_cng) do data_cng |> validate_inclusion(:type, ["Article", "Note"]) |> validate_required([:id, :actor, :attributedTo, :type, :context, :context_id]) diff --git a/lib/pleroma/web/activity_pub/object_validators/attachment_validator.ex b/lib/pleroma/web/activity_pub/object_validators/attachment_validator.ex index a99b40adc..bba2f5eb0 100644 --- a/lib/pleroma/web/activity_pub/object_validators/attachment_validator.ex +++ b/lib/pleroma/web/activity_pub/object_validators/attachment_validator.ex @@ -6,7 +6,6 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.AttachmentValidator do use Ecto.Schema alias Pleroma.EctoType.ActivityPub.ObjectValidators - alias Pleroma.Web.ActivityPub.ObjectValidators.UrlObjectValidator import Ecto.Changeset @@ -92,7 +91,7 @@ defp fix_url(data) do end end - def validate_data(cng) do + defp validate_data(cng) do cng |> validate_inclusion(:type, ~w[Document Audio Image Video]) |> validate_required([:mediaType, :url, :type]) diff --git a/lib/pleroma/web/activity_pub/object_validators/audio_video_validator.ex b/lib/pleroma/web/activity_pub/object_validators/audio_video_validator.ex index 4a96fef52..27e14b16d 100644 --- a/lib/pleroma/web/activity_pub/object_validators/audio_video_validator.ex +++ b/lib/pleroma/web/activity_pub/object_validators/audio_video_validator.ex @@ -5,11 +5,11 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.AudioVideoValidator do use Ecto.Schema - alias Pleroma.EarmarkRenderer alias Pleroma.EctoType.ActivityPub.ObjectValidators alias Pleroma.Web.ActivityPub.ObjectValidators.AttachmentValidator alias Pleroma.Web.ActivityPub.ObjectValidators.CommonFixes alias Pleroma.Web.ActivityPub.ObjectValidators.CommonValidations + alias Pleroma.Web.ActivityPub.ObjectValidators.TagValidator alias Pleroma.Web.ActivityPub.Transmogrifier import Ecto.Changeset @@ -23,8 +23,7 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.AudioVideoValidator do field(:cc, ObjectValidators.Recipients, default: []) field(:bto, ObjectValidators.Recipients, default: []) field(:bcc, ObjectValidators.Recipients, default: []) - # TODO: Write type - field(:tag, {:array, :map}, default: []) + embeds_many(:tag, TagValidator) field(:type, :string) field(:name, :string) @@ -110,7 +109,7 @@ defp fix_content(%{"mediaType" => "text/markdown", "content" => content} = data) when is_binary(content) do content = content - |> Earmark.as_html!(%Earmark.Options{renderer: EarmarkRenderer}) + |> Pleroma.Formatter.markdown_to_html() |> Pleroma.HTML.filter_tags() Map.put(data, "content", content) @@ -132,11 +131,12 @@ def changeset(struct, data) do data = fix(data) struct - |> cast(data, __schema__(:fields) -- [:attachment]) + |> cast(data, __schema__(:fields) -- [:attachment, :tag]) |> cast_embed(:attachment) + |> cast_embed(:tag) end - def validate_data(data_cng) do + defp validate_data(data_cng) do data_cng |> validate_inclusion(:type, ["Audio", "Video"]) |> validate_required([:id, :actor, :attributedTo, :type, :context, :attachment]) diff --git a/lib/pleroma/web/activity_pub/object_validators/block_validator.ex b/lib/pleroma/web/activity_pub/object_validators/block_validator.ex index c5f77bb76..88948135f 100644 --- a/lib/pleroma/web/activity_pub/object_validators/block_validator.ex +++ b/lib/pleroma/web/activity_pub/object_validators/block_validator.ex @@ -26,7 +26,7 @@ def cast_data(data) do |> cast(data, __schema__(:fields)) end - def validate_data(cng) do + defp validate_data(cng) do cng |> validate_required([:id, :type, :actor, :to, :cc, :object]) |> validate_inclusion(:type, ["Block"]) diff --git a/lib/pleroma/web/activity_pub/object_validators/chat_message_validator.ex b/lib/pleroma/web/activity_pub/object_validators/chat_message_validator.ex index 1189778f2..b153156b0 100644 --- a/lib/pleroma/web/activity_pub/object_validators/chat_message_validator.ex +++ b/lib/pleroma/web/activity_pub/object_validators/chat_message_validator.ex @@ -67,7 +67,7 @@ def changeset(struct, data) do |> cast_embed(:attachment) end - def validate_data(data_cng) do + defp validate_data(data_cng) do data_cng |> validate_inclusion(:type, ["ChatMessage"]) |> validate_required([:id, :actor, :to, :type, :published]) diff --git a/lib/pleroma/web/activity_pub/object_validators/common_validations.ex b/lib/pleroma/web/activity_pub/object_validators/common_validations.ex index 093549a45..940430588 100644 --- a/lib/pleroma/web/activity_pub/object_validators/common_validations.ex +++ b/lib/pleroma/web/activity_pub/object_validators/common_validations.ex @@ -9,6 +9,7 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.CommonValidations do alias Pleroma.Object alias Pleroma.User + @spec validate_any_presence(Ecto.Changeset.t(), [atom()]) :: Ecto.Changeset.t() def validate_any_presence(cng, fields) do non_empty = fields @@ -29,6 +30,7 @@ def validate_any_presence(cng, fields) do end end + @spec validate_actor_presence(Ecto.Changeset.t(), keyword()) :: Ecto.Changeset.t() def validate_actor_presence(cng, options \\ []) do field_name = Keyword.get(options, :field_name, :actor) @@ -47,6 +49,7 @@ def validate_actor_presence(cng, options \\ []) do end) end + @spec validate_object_presence(Ecto.Changeset.t(), keyword()) :: Ecto.Changeset.t() def validate_object_presence(cng, options \\ []) do field_name = Keyword.get(options, :field_name, :object) allowed_types = Keyword.get(options, :allowed_types, false) @@ -68,6 +71,7 @@ def validate_object_presence(cng, options \\ []) do end) end + @spec validate_object_or_user_presence(Ecto.Changeset.t(), keyword()) :: Ecto.Changeset.t() def validate_object_or_user_presence(cng, options \\ []) do field_name = Keyword.get(options, :field_name, :object) options = Keyword.put(options, :field_name, field_name) @@ -83,6 +87,7 @@ def validate_object_or_user_presence(cng, options \\ []) do if actor_cng.valid?, do: actor_cng, else: object_cng end + @spec validate_host_match(Ecto.Changeset.t(), [atom()]) :: Ecto.Changeset.t() def validate_host_match(cng, fields \\ [:id, :actor]) do if same_domain?(cng, fields) do cng @@ -95,6 +100,7 @@ def validate_host_match(cng, fields \\ [:id, :actor]) do end end + @spec validate_fields_match(Ecto.Changeset.t(), [atom()]) :: Ecto.Changeset.t() def validate_fields_match(cng, fields) do if map_unique?(cng, fields) do cng @@ -122,12 +128,14 @@ defp map_unique?(cng, fields, func \\ & &1) do end) end + @spec same_domain?(Ecto.Changeset.t(), [atom()]) :: boolean() def same_domain?(cng, fields \\ [:actor, :object]) do map_unique?(cng, fields, fn value -> URI.parse(value).host end) end # This figures out if a user is able to create, delete or modify something # based on the domain and superuser status + @spec validate_modification_rights(Ecto.Changeset.t()) :: Ecto.Changeset.t() def validate_modification_rights(cng) do actor = User.get_cached_by_ap_id(get_field(cng, :actor)) diff --git a/lib/pleroma/web/activity_pub/object_validators/create_chat_message_validator.ex b/lib/pleroma/web/activity_pub/object_validators/create_chat_message_validator.ex index 8384c16a7..7a31a99bf 100644 --- a/lib/pleroma/web/activity_pub/object_validators/create_chat_message_validator.ex +++ b/lib/pleroma/web/activity_pub/object_validators/create_chat_message_validator.ex @@ -39,7 +39,7 @@ def cast_and_validate(data, meta \\ []) do |> validate_data(meta) end - def validate_data(cng, meta \\ []) do + defp validate_data(cng, meta) do cng |> validate_required([:id, :actor, :to, :type, :object]) |> validate_inclusion(:type, ["Create"]) diff --git a/lib/pleroma/web/activity_pub/object_validators/create_generic_validator.ex b/lib/pleroma/web/activity_pub/object_validators/create_generic_validator.ex index bf56a918c..e06e442f4 100644 --- a/lib/pleroma/web/activity_pub/object_validators/create_generic_validator.ex +++ b/lib/pleroma/web/activity_pub/object_validators/create_generic_validator.ex @@ -79,7 +79,7 @@ defp fix(data, meta) do |> CommonFixes.fix_actor() end - def validate_data(cng, meta \\ []) do + defp validate_data(cng, meta) do cng |> validate_required([:actor, :type, :object]) |> validate_inclusion(:type, ["Create"]) diff --git a/lib/pleroma/web/activity_pub/object_validators/delete_validator.ex b/lib/pleroma/web/activity_pub/object_validators/delete_validator.ex index fc1a79a72..7da67bf16 100644 --- a/lib/pleroma/web/activity_pub/object_validators/delete_validator.ex +++ b/lib/pleroma/web/activity_pub/object_validators/delete_validator.ex @@ -53,7 +53,7 @@ def add_deleted_activity_id(cng) do Tombstone Video } - def validate_data(cng) do + defp validate_data(cng) do cng |> validate_required([:id, :type, :actor, :to, :cc, :object]) |> validate_inclusion(:type, ["Delete"]) diff --git a/lib/pleroma/web/activity_pub/object_validators/emoji_react_validator.ex b/lib/pleroma/web/activity_pub/object_validators/emoji_react_validator.ex index 1906e597e..ec7566515 100644 --- a/lib/pleroma/web/activity_pub/object_validators/emoji_react_validator.ex +++ b/lib/pleroma/web/activity_pub/object_validators/emoji_react_validator.ex @@ -70,7 +70,7 @@ def validate_emoji(cng) do end end - def validate_data(data_cng) do + defp validate_data(data_cng) do data_cng |> validate_inclusion(:type, ["EmojiReact"]) |> validate_required([:id, :type, :object, :actor, :context, :to, :cc, :content]) diff --git a/lib/pleroma/web/activity_pub/object_validators/event_validator.ex b/lib/pleroma/web/activity_pub/object_validators/event_validator.ex index 2e26726f8..d42458ef5 100644 --- a/lib/pleroma/web/activity_pub/object_validators/event_validator.ex +++ b/lib/pleroma/web/activity_pub/object_validators/event_validator.ex @@ -9,6 +9,7 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.EventValidator do alias Pleroma.Web.ActivityPub.ObjectValidators.AttachmentValidator alias Pleroma.Web.ActivityPub.ObjectValidators.CommonFixes alias Pleroma.Web.ActivityPub.ObjectValidators.CommonValidations + alias Pleroma.Web.ActivityPub.ObjectValidators.TagValidator alias Pleroma.Web.ActivityPub.Transmogrifier import Ecto.Changeset @@ -23,8 +24,7 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.EventValidator do field(:cc, ObjectValidators.Recipients, default: []) field(:bto, ObjectValidators.Recipients, default: []) field(:bcc, ObjectValidators.Recipients, default: []) - # TODO: Write type - field(:tag, {:array, :map}, default: []) + embeds_many(:tag, TagValidator) field(:type, :string) field(:name, :string) @@ -81,11 +81,12 @@ def changeset(struct, data) do data = fix(data) struct - |> cast(data, __schema__(:fields) -- [:attachment]) + |> cast(data, __schema__(:fields) -- [:attachment, :tag]) |> cast_embed(:attachment) + |> cast_embed(:tag) end - def validate_data(data_cng) do + defp validate_data(data_cng) do data_cng |> validate_inclusion(:type, ["Event"]) |> validate_required([:id, :actor, :attributedTo, :type, :context, :context_id]) diff --git a/lib/pleroma/web/activity_pub/object_validators/follow_validator.ex b/lib/pleroma/web/activity_pub/object_validators/follow_validator.ex index 6e428bacc..239cee5e7 100644 --- a/lib/pleroma/web/activity_pub/object_validators/follow_validator.ex +++ b/lib/pleroma/web/activity_pub/object_validators/follow_validator.ex @@ -27,7 +27,7 @@ def cast_data(data) do |> cast(data, __schema__(:fields)) end - def validate_data(cng) do + defp validate_data(cng) do cng |> validate_required([:id, :type, :actor, :to, :cc, :object]) |> validate_inclusion(:type, ["Follow"]) diff --git a/lib/pleroma/web/activity_pub/object_validators/like_validator.ex b/lib/pleroma/web/activity_pub/object_validators/like_validator.ex index 30c40b238..509da507b 100644 --- a/lib/pleroma/web/activity_pub/object_validators/like_validator.ex +++ b/lib/pleroma/web/activity_pub/object_validators/like_validator.ex @@ -76,7 +76,7 @@ def fix_recipients(cng) do end end - def validate_data(data_cng) do + defp validate_data(data_cng) do data_cng |> validate_inclusion(:type, ["Like"]) |> validate_required([:id, :type, :object, :actor, :context, :to, :cc]) diff --git a/lib/pleroma/web/activity_pub/object_validators/question_validator.ex b/lib/pleroma/web/activity_pub/object_validators/question_validator.ex index 6b746c997..7012e2e1d 100644 --- a/lib/pleroma/web/activity_pub/object_validators/question_validator.ex +++ b/lib/pleroma/web/activity_pub/object_validators/question_validator.ex @@ -10,6 +10,7 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.QuestionValidator do alias Pleroma.Web.ActivityPub.ObjectValidators.CommonFixes alias Pleroma.Web.ActivityPub.ObjectValidators.CommonValidations alias Pleroma.Web.ActivityPub.ObjectValidators.QuestionOptionsValidator + alias Pleroma.Web.ActivityPub.ObjectValidators.TagValidator alias Pleroma.Web.ActivityPub.Transmogrifier import Ecto.Changeset @@ -24,8 +25,7 @@ defmodule Pleroma.Web.ActivityPub.ObjectValidators.QuestionValidator do field(:cc, ObjectValidators.Recipients, default: []) field(:bto, ObjectValidators.Recipients, default: []) field(:bcc, ObjectValidators.Recipients, default: []) - # TODO: Write type - field(:tag, {:array, :map}, default: []) + embeds_many(:tag, TagValidator) field(:type, :string) field(:content, :string) field(:context, :string) @@ -93,13 +93,14 @@ def changeset(struct, data) do data = fix(data) struct - |> cast(data, __schema__(:fields) -- [:anyOf, :oneOf, :attachment]) + |> cast(data, __schema__(:fields) -- [:anyOf, :oneOf, :attachment, :tag]) |> cast_embed(:attachment) |> cast_embed(:anyOf) |> cast_embed(:oneOf) + |> cast_embed(:tag) end - def validate_data(data_cng) do + defp validate_data(data_cng) do data_cng |> validate_inclusion(:type, ["Question"]) |> validate_required([:id, :actor, :attributedTo, :type, :context, :context_id]) diff --git a/lib/pleroma/web/activity_pub/object_validators/tag_validator.ex b/lib/pleroma/web/activity_pub/object_validators/tag_validator.ex new file mode 100644 index 000000000..751021585 --- /dev/null +++ b/lib/pleroma/web/activity_pub/object_validators/tag_validator.ex @@ -0,0 +1,77 @@ +# Pleroma: A lightweight social networking server +# Copyright © 2017-2021 Pleroma Authors +# SPDX-License-Identifier: AGPL-3.0-only + +defmodule Pleroma.Web.ActivityPub.ObjectValidators.TagValidator do + use Ecto.Schema + + alias Pleroma.EctoType.ActivityPub.ObjectValidators + + import Ecto.Changeset + + @primary_key false + embedded_schema do + # Common + field(:type, :string) + field(:name, :string) + + # Mention, Hashtag + field(:href, ObjectValidators.Uri) + + # Emoji + embeds_one :icon, IconObjectValidator, primary_key: false do + field(:type, :string) + field(:url, ObjectValidators.Uri) + end + + field(:updated, ObjectValidators.DateTime) + field(:id, ObjectValidators.Uri) + end + + def cast_and_validate(data) do + data + |> cast_data() + end + + def cast_data(data) do + %__MODULE__{} + |> changeset(data) + end + + def changeset(struct, %{"type" => "Mention"} = data) do + struct + |> cast(data, [:type, :name, :href]) + |> validate_required([:type, :href]) + end + + def changeset(struct, %{"type" => "Hashtag", "name" => name} = data) do + name = + cond do + "#" <> name -> name + name -> name + end + |> String.downcase() + + data = Map.put(data, "name", name) + + struct + |> cast(data, [:type, :name, :href]) + |> validate_required([:type, :name]) + end + + def changeset(struct, %{"type" => "Emoji"} = data) do + data = Map.put(data, "name", String.trim(data["name"], ":")) + + struct + |> cast(data, [:type, :name, :updated, :id]) + |> cast_embed(:icon, with: &icon_changeset/2) + |> validate_required([:type, :name, :icon]) + end + + def icon_changeset(struct, data) do + struct + |> cast(data, [:type, :url]) + |> validate_inclusion(:type, ~w[Image]) + |> validate_required([:type, :url]) + end +end diff --git a/lib/pleroma/web/activity_pub/object_validators/undo_validator.ex b/lib/pleroma/web/activity_pub/object_validators/undo_validator.ex index 783a79ddb..e8af60ffa 100644 --- a/lib/pleroma/web/activity_pub/object_validators/undo_validator.ex +++ b/lib/pleroma/web/activity_pub/object_validators/undo_validator.ex @@ -38,7 +38,7 @@ def changeset(struct, data) do |> cast(data, __schema__(:fields)) end - def validate_data(data_cng) do + defp validate_data(data_cng) do data_cng |> validate_inclusion(:type, ["Undo"]) |> validate_required([:id, :type, :object, :actor, :to, :cc]) diff --git a/lib/pleroma/web/activity_pub/object_validators/update_validator.ex b/lib/pleroma/web/activity_pub/object_validators/update_validator.ex index a66d41400..6bb1dc7fa 100644 --- a/lib/pleroma/web/activity_pub/object_validators/update_validator.ex +++ b/lib/pleroma/web/activity_pub/object_validators/update_validator.ex @@ -28,7 +28,7 @@ def cast_data(data) do |> cast(data, __schema__(:fields)) end - def validate_data(cng) do + defp validate_data(cng) do cng |> validate_required([:id, :type, :actor, :to, :cc, :object]) |> validate_inclusion(:type, ["Update"]) diff --git a/lib/pleroma/web/activity_pub/pipeline.ex b/lib/pleroma/web/activity_pub/pipeline.ex index 195596f94..a0f2e0312 100644 --- a/lib/pleroma/web/activity_pub/pipeline.ex +++ b/lib/pleroma/web/activity_pub/pipeline.ex @@ -7,6 +7,7 @@ defmodule Pleroma.Web.ActivityPub.Pipeline do alias Pleroma.Config alias Pleroma.Object alias Pleroma.Repo + alias Pleroma.Utils alias Pleroma.Web.ActivityPub.ActivityPub alias Pleroma.Web.ActivityPub.MRF alias Pleroma.Web.ActivityPub.ObjectValidator @@ -24,7 +25,7 @@ defmodule Pleroma.Web.ActivityPub.Pipeline do @spec common_pipeline(map(), keyword()) :: {:ok, Activity.t() | Object.t(), keyword()} | {:error, any()} def common_pipeline(object, meta) do - case Repo.transaction(fn -> do_common_pipeline(object, meta) end) do + case Repo.transaction(fn -> do_common_pipeline(object, meta) end, Utils.query_timeout()) do {:ok, {:ok, activity, meta}} -> @side_effects.handle_after_transaction(meta) {:ok, activity, meta} @@ -40,19 +41,17 @@ def common_pipeline(object, meta) do end end - def do_common_pipeline(object, meta) do - with {_, {:ok, validated_object, meta}} <- - {:validate_object, @object_validator.validate(object, meta)}, - {_, {:ok, mrfd_object, meta}} <- - {:mrf_object, @mrf.pipeline_filter(validated_object, meta)}, - {_, {:ok, activity, meta}} <- - {:persist_object, @activity_pub.persist(mrfd_object, meta)}, - {_, {:ok, activity, meta}} <- - {:execute_side_effects, @side_effects.handle(activity, meta)}, - {_, {:ok, _}} <- {:federation, maybe_federate(activity, meta)} do - {:ok, activity, meta} + def do_common_pipeline(%{__struct__: _}, _meta), do: {:error, :is_struct} + + def do_common_pipeline(message, meta) do + with {_, {:ok, message, meta}} <- {:validate, @object_validator.validate(message, meta)}, + {_, {:ok, message, meta}} <- {:mrf, @mrf.pipeline_filter(message, meta)}, + {_, {:ok, message, meta}} <- {:persist, @activity_pub.persist(message, meta)}, + {_, {:ok, message, meta}} <- {:side_effects, @side_effects.handle(message, meta)}, + {_, {:ok, _}} <- {:federation, maybe_federate(message, meta)} do + {:ok, message, meta} else - {:mrf_object, {:reject, message, _}} -> {:reject, message} + {:mrf, {:reject, message, _}} -> {:reject, message} e -> {:error, e} end end diff --git a/lib/pleroma/web/activity_pub/side_effects.ex b/lib/pleroma/web/activity_pub/side_effects.ex index 0b9a9f0c5..5fe143c2b 100644 --- a/lib/pleroma/web/activity_pub/side_effects.ex +++ b/lib/pleroma/web/activity_pub/side_effects.ex @@ -276,10 +276,10 @@ def handle(%{data: %{"type" => "Delete", "object" => deleted_object}} = object, result = case deleted_object do %Object{} -> - with {:ok, deleted_object, activity} <- Object.delete(deleted_object), + with {:ok, deleted_object, _activity} <- Object.delete(deleted_object), {_, actor} when is_binary(actor) <- {:actor, deleted_object.data["actor"]}, %User{} = user <- User.get_cached_by_ap_id(actor) do - User.remove_pinnned_activity(user, activity) + User.remove_pinned_object_id(user, deleted_object.data["id"]) {:ok, user} = ActivityPub.decrease_note_count_if_public(user, deleted_object) @@ -312,6 +312,63 @@ def handle(%{data: %{"type" => "Delete", "object" => deleted_object}} = object, end end + # Tasks this handles: + # - adds pin to user + # - removes expiration job for pinned activity, if was set for expiration + @impl true + def handle(%{data: %{"type" => "Add"} = data} = object, meta) do + with %User{} = user <- User.get_cached_by_ap_id(data["actor"]), + {:ok, _user} <- User.add_pinned_object_id(user, data["object"]) do + # if pinned activity was scheduled for deletion, we remove job + if expiration = Pleroma.Workers.PurgeExpiredActivity.get_expiration(meta[:activity_id]) do + Oban.cancel_job(expiration.id) + end + + {:ok, object, meta} + else + nil -> + {:error, :user_not_found} + + {:error, changeset} -> + if changeset.errors[:pinned_objects] do + {:error, :pinned_statuses_limit_reached} + else + changeset.errors + end + end + end + + # Tasks this handles: + # - removes pin from user + # - removes corresponding Add activity + # - if activity had expiration, recreates activity expiration job + @impl true + def handle(%{data: %{"type" => "Remove"} = data} = object, meta) do + with %User{} = user <- User.get_cached_by_ap_id(data["actor"]), + {:ok, _user} <- User.remove_pinned_object_id(user, data["object"]) do + data["object"] + |> Activity.add_by_params_query(user.ap_id, user.featured_address) + |> Repo.delete_all() + + # if pinned activity was scheduled for deletion, we reschedule it for deletion + if meta[:expires_at] do + # MRF.ActivityExpirationPolicy used UTC timestamps for expires_at in original implementation + {:ok, expires_at} = + Pleroma.EctoType.ActivityPub.ObjectValidators.DateTime.cast(meta[:expires_at]) + + Pleroma.Workers.PurgeExpiredActivity.enqueue(%{ + activity_id: meta[:activity_id], + expires_at: expires_at + }) + end + + {:ok, object, meta} + else + nil -> {:error, :user_not_found} + error -> error + end + end + # Nothing to do @impl true def handle(object, meta) do diff --git a/lib/pleroma/web/activity_pub/transmogrifier.ex b/lib/pleroma/web/activity_pub/transmogrifier.ex index acb4f4b3e..d1a0867e2 100644 --- a/lib/pleroma/web/activity_pub/transmogrifier.ex +++ b/lib/pleroma/web/activity_pub/transmogrifier.ex @@ -32,18 +32,17 @@ defmodule Pleroma.Web.ActivityPub.Transmogrifier do """ def fix_object(object, options \\ []) do object - |> strip_internal_fields - |> fix_actor - |> fix_url - |> fix_attachments - |> fix_context + |> strip_internal_fields() + |> fix_actor() + |> fix_url() + |> fix_attachments() + |> fix_context() |> fix_in_reply_to(options) - |> fix_emoji - |> fix_tag - |> set_sensitive - |> fix_content_map - |> fix_addressing - |> fix_summary + |> fix_emoji() + |> fix_tag() + |> fix_content_map() + |> fix_addressing() + |> fix_summary() |> fix_type(options) end @@ -317,10 +316,9 @@ def fix_tag(%{"tag" => tag} = object) when is_list(tag) do tags = tag |> Enum.filter(fn data -> data["type"] == "Hashtag" and data["name"] end) - |> Enum.map(fn %{"name" => name} -> - name - |> String.slice(1..-1) - |> String.downcase() + |> Enum.map(fn + %{"name" => "#" <> hashtag} -> String.downcase(hashtag) + %{"name" => hashtag} -> String.downcase(hashtag) end) Map.put(object, "tag", tag ++ tags) @@ -538,7 +536,7 @@ def handle_incoming( end def handle_incoming(%{"type" => type} = data, _options) - when type in ~w{Like EmojiReact Announce} do + when type in ~w{Like EmojiReact Announce Add Remove} do with :ok <- ObjectValidator.fetch_actor_and_object(data), {:ok, activity, _meta} <- Pipeline.common_pipeline(data, local: false) do @@ -568,7 +566,7 @@ def handle_incoming( Pipeline.common_pipeline(data, local: false) do {:ok, activity} else - {:error, {:validate_object, _}} = e -> + {:error, {:validate, _}} = e -> # Check if we have a create activity for this with {:ok, object_id} <- ObjectValidators.ObjectID.cast(data["object"]), %Activity{data: %{"actor" => actor}} <- @@ -744,7 +742,6 @@ def replies(_), do: [] # Prepares the object of an outgoing create activity. def prepare_object(object) do object - |> set_sensitive |> add_hashtags |> add_mention_tags |> add_emoji_tags @@ -935,15 +932,6 @@ def set_conversation(object) do Map.put(object, "conversation", object["context"]) end - def set_sensitive(%{"sensitive" => _} = object) do - object - end - - def set_sensitive(object) do - tags = object["tag"] || [] - Map.put(object, "sensitive", "nsfw" in tags) - end - def set_type(%{"type" => "Answer"} = object) do Map.put(object, "type", "Note") end @@ -1016,6 +1004,7 @@ def upgrade_user_from_ap_id(ap_id) do with %User{local: false} = user <- User.get_cached_by_ap_id(ap_id), {:ok, data} <- ActivityPub.fetch_and_prepare_user_from_ap_id(ap_id), {:ok, user} <- update_user(user, data) do + {:ok, _pid} = Task.start(fn -> ActivityPub.pinned_fetch_task(user) end) TransmogrifierWorker.enqueue("user_upgrade", %{"user_id" => user.id}) {:ok, user} else diff --git a/lib/pleroma/web/activity_pub/views/user_view.ex b/lib/pleroma/web/activity_pub/views/user_view.ex index 8adc9878a..462f3b4a7 100644 --- a/lib/pleroma/web/activity_pub/views/user_view.ex +++ b/lib/pleroma/web/activity_pub/views/user_view.ex @@ -6,8 +6,10 @@ defmodule Pleroma.Web.ActivityPub.UserView do use Pleroma.Web, :view alias Pleroma.Keys + alias Pleroma.Object alias Pleroma.Repo alias Pleroma.User + alias Pleroma.Web.ActivityPub.ObjectView alias Pleroma.Web.ActivityPub.Transmogrifier alias Pleroma.Web.ActivityPub.Utils alias Pleroma.Web.Endpoint @@ -97,6 +99,7 @@ def render("user.json", %{user: user}) do "followers" => "#{user.ap_id}/followers", "inbox" => "#{user.ap_id}/inbox", "outbox" => "#{user.ap_id}/outbox", + "featured" => "#{user.ap_id}/collections/featured", "preferredUsername" => user.nickname, "name" => user.name, "summary" => user.bio, @@ -245,6 +248,24 @@ def render("activity_collection_page.json", %{ |> Map.merge(pagination) end + def render("featured.json", %{ + user: %{featured_address: featured_address, pinned_objects: pinned_objects} + }) do + objects = + pinned_objects + |> Enum.sort_by(fn {_, pinned_at} -> pinned_at end, &>=/2) + |> Enum.map(fn {id, _} -> + ObjectView.render("object.json", %{object: Object.get_cached_by_ap_id(id)}) + end) + + %{ + "id" => featured_address, + "type" => "OrderedCollection", + "orderedItems" => objects + } + |> Map.merge(Utils.make_json_ld_header()) + end + defp maybe_put_total_items(map, false, _total), do: map defp maybe_put_total_items(map, true, total) do diff --git a/lib/pleroma/web/admin_api/controllers/user_controller.ex b/lib/pleroma/web/admin_api/controllers/user_controller.ex index 65bc63cb9..d3e4c18a3 100644 --- a/lib/pleroma/web/admin_api/controllers/user_controller.ex +++ b/lib/pleroma/web/admin_api/controllers/user_controller.ex @@ -13,16 +13,17 @@ defmodule Pleroma.Web.AdminAPI.UserController do alias Pleroma.Web.ActivityPub.Builder alias Pleroma.Web.ActivityPub.Pipeline alias Pleroma.Web.AdminAPI - alias Pleroma.Web.AdminAPI.AccountView alias Pleroma.Web.AdminAPI.Search alias Pleroma.Web.Plugs.OAuthScopesPlug @users_page_size 50 + plug(Pleroma.Web.ApiSpec.CastAndValidate) + plug( OAuthScopesPlug, %{scopes: ["admin:read:accounts"]} - when action in [:list, :show] + when action in [:index, :show] ) plug( @@ -44,13 +45,19 @@ defmodule Pleroma.Web.AdminAPI.UserController do when action in [:follow, :unfollow] ) + plug(:put_view, Pleroma.Web.AdminAPI.AccountView) + action_fallback(AdminAPI.FallbackController) - def delete(conn, %{"nickname" => nickname}) do - delete(conn, %{"nicknames" => [nickname]}) + defdelegate open_api_operation(action), to: Pleroma.Web.ApiSpec.Admin.UserOperation + + def delete(conn, %{nickname: nickname}) do + conn + |> Map.put(:body_params, %{nicknames: [nickname]}) + |> delete(%{}) end - def delete(%{assigns: %{user: admin}} = conn, %{"nicknames" => nicknames}) do + def delete(%{assigns: %{user: admin}, body_params: %{nicknames: nicknames}} = conn, _) do users = Enum.map(nicknames, &User.get_cached_by_nickname/1) Enum.each(users, fn user -> @@ -67,10 +74,16 @@ def delete(%{assigns: %{user: admin}} = conn, %{"nicknames" => nicknames}) do json(conn, nicknames) end - def follow(%{assigns: %{user: admin}} = conn, %{ - "follower" => follower_nick, - "followed" => followed_nick - }) do + def follow( + %{ + assigns: %{user: admin}, + body_params: %{ + follower: follower_nick, + followed: followed_nick + } + } = conn, + _ + ) do with %User{} = follower <- User.get_cached_by_nickname(follower_nick), %User{} = followed <- User.get_cached_by_nickname(followed_nick) do User.follow(follower, followed) @@ -86,10 +99,16 @@ def follow(%{assigns: %{user: admin}} = conn, %{ json(conn, "ok") end - def unfollow(%{assigns: %{user: admin}} = conn, %{ - "follower" => follower_nick, - "followed" => followed_nick - }) do + def unfollow( + %{ + assigns: %{user: admin}, + body_params: %{ + follower: follower_nick, + followed: followed_nick + } + } = conn, + _ + ) do with %User{} = follower <- User.get_cached_by_nickname(follower_nick), %User{} = followed <- User.get_cached_by_nickname(followed_nick) do User.unfollow(follower, followed) @@ -105,9 +124,10 @@ def unfollow(%{assigns: %{user: admin}} = conn, %{ json(conn, "ok") end - def create(%{assigns: %{user: admin}} = conn, %{"users" => users}) do + def create(%{assigns: %{user: admin}, body_params: %{users: users}} = conn, _) do changesets = - Enum.map(users, fn %{"nickname" => nickname, "email" => email, "password" => password} -> + users + |> Enum.map(fn %{nickname: nickname, email: email, password: password} -> user_data = %{ nickname: nickname, name: nickname, @@ -124,52 +144,49 @@ def create(%{assigns: %{user: admin}} = conn, %{"users" => users}) do end) case Pleroma.Repo.transaction(changesets) do - {:ok, users} -> - res = - users + {:ok, users_map} -> + users = + users_map |> Map.values() |> Enum.map(fn user -> {:ok, user} = User.post_register_action(user) user end) - |> Enum.map(&AccountView.render("created.json", %{user: &1})) ModerationLog.insert_log(%{ actor: admin, - subjects: Map.values(users), + subjects: users, action: "create" }) - json(conn, res) + render(conn, "created_many.json", users: users) {:error, id, changeset, _} -> - res = + changesets = Enum.map(changesets.operations, fn - {current_id, {:changeset, _current_changeset, _}} when current_id == id -> - AccountView.render("create-error.json", %{changeset: changeset}) + {^id, {:changeset, _current_changeset, _}} -> + changeset {_, {:changeset, current_changeset, _}} -> - AccountView.render("create-error.json", %{changeset: current_changeset}) + current_changeset end) conn |> put_status(:conflict) - |> json(res) + |> render("create_errors.json", changesets: changesets) end end - def show(%{assigns: %{user: admin}} = conn, %{"nickname" => nickname}) do + def show(%{assigns: %{user: admin}} = conn, %{nickname: nickname}) do with %User{} = user <- User.get_cached_by_nickname_or_id(nickname, for: admin) do - conn - |> put_view(AccountView) - |> render("show.json", %{user: user}) + render(conn, "show.json", %{user: user}) else _ -> {:error, :not_found} end end - def toggle_activation(%{assigns: %{user: admin}} = conn, %{"nickname" => nickname}) do + def toggle_activation(%{assigns: %{user: admin}} = conn, %{nickname: nickname}) do user = User.get_cached_by_nickname(nickname) {:ok, updated_user} = User.set_activation(user, !user.is_active) @@ -182,12 +199,10 @@ def toggle_activation(%{assigns: %{user: admin}} = conn, %{"nickname" => nicknam action: action }) - conn - |> put_view(AccountView) - |> render("show.json", %{user: updated_user}) + render(conn, "show.json", user: updated_user) end - def activate(%{assigns: %{user: admin}} = conn, %{"nicknames" => nicknames}) do + def activate(%{assigns: %{user: admin}, body_params: %{nicknames: nicknames}} = conn, _) do users = Enum.map(nicknames, &User.get_cached_by_nickname/1) {:ok, updated_users} = User.set_activation(users, true) @@ -197,12 +212,10 @@ def activate(%{assigns: %{user: admin}} = conn, %{"nicknames" => nicknames}) do action: "activate" }) - conn - |> put_view(AccountView) - |> render("index.json", %{users: Keyword.values(updated_users)}) + render(conn, "index.json", users: Keyword.values(updated_users)) end - def deactivate(%{assigns: %{user: admin}} = conn, %{"nicknames" => nicknames}) do + def deactivate(%{assigns: %{user: admin}, body_params: %{nicknames: nicknames}} = conn, _) do users = Enum.map(nicknames, &User.get_cached_by_nickname/1) {:ok, updated_users} = User.set_activation(users, false) @@ -212,12 +225,10 @@ def deactivate(%{assigns: %{user: admin}} = conn, %{"nicknames" => nicknames}) d action: "deactivate" }) - conn - |> put_view(AccountView) - |> render("index.json", %{users: Keyword.values(updated_users)}) + render(conn, "index.json", users: Keyword.values(updated_users)) end - def approve(%{assigns: %{user: admin}} = conn, %{"nicknames" => nicknames}) do + def approve(%{assigns: %{user: admin}, body_params: %{nicknames: nicknames}} = conn, _) do users = Enum.map(nicknames, &User.get_cached_by_nickname/1) {:ok, updated_users} = User.approve(users) @@ -227,36 +238,27 @@ def approve(%{assigns: %{user: admin}} = conn, %{"nicknames" => nicknames}) do action: "approve" }) - conn - |> put_view(AccountView) - |> render("index.json", %{users: updated_users}) + render(conn, "index.json", users: updated_users) end - def list(conn, params) do + def index(conn, params) do {page, page_size} = page_params(params) - filters = maybe_parse_filters(params["filters"]) + filters = maybe_parse_filters(params[:filters]) search_params = %{ - query: params["query"], + query: params[:query], page: page, page_size: page_size, - tags: params["tags"], - name: params["name"], - email: params["email"], - actor_types: params["actor_types"] + tags: params[:tags], + name: params[:name], + email: params[:email], + actor_types: params[:actor_types] } |> Map.merge(filters) with {:ok, users, count} <- Search.user(search_params) do - json( - conn, - AccountView.render("index.json", - users: users, - count: count, - page_size: page_size - ) - ) + render(conn, "index.json", users: users, count: count, page_size: page_size) end end @@ -274,8 +276,8 @@ defp maybe_parse_filters(filters) do defp page_params(params) do { - fetch_integer_param(params, "page", 1), - fetch_integer_param(params, "page_size", @users_page_size) + fetch_integer_param(params, :page, 1), + fetch_integer_param(params, :page_size, @users_page_size) } end end diff --git a/lib/pleroma/web/admin_api/views/account_view.ex b/lib/pleroma/web/admin_api/views/account_view.ex index d7c63d385..e053a9b67 100644 --- a/lib/pleroma/web/admin_api/views/account_view.ex +++ b/lib/pleroma/web/admin_api/views/account_view.ex @@ -75,7 +75,7 @@ def render("show.json", %{user: user}) do "display_name" => display_name, "is_active" => user.is_active, "local" => user.local, - "roles" => User.roles(user), + "roles" => roles(user), "tags" => user.tags || [], "is_confirmed" => user.is_confirmed, "is_approved" => user.is_approved, @@ -85,6 +85,10 @@ def render("show.json", %{user: user}) do } end + def render("created_many.json", %{users: users}) do + render_many(users, AccountView, "created.json", as: :user) + end + def render("created.json", %{user: user}) do %{ type: "success", @@ -96,7 +100,11 @@ def render("created.json", %{user: user}) do } end - def render("create-error.json", %{changeset: %Ecto.Changeset{changes: changes, errors: errors}}) do + def render("create_errors.json", %{changesets: changesets}) do + render_many(changesets, AccountView, "create_error.json", as: :changeset) + end + + def render("create_error.json", %{changeset: %Ecto.Changeset{changes: changes, errors: errors}}) do %{ type: "error", code: 409, @@ -140,4 +148,11 @@ defp parse_error(errors) do defp image_url(%{"url" => [%{"href" => href} | _]}), do: href defp image_url(_), do: nil + + defp roles(%{is_moderator: is_moderator, is_admin: is_admin}) do + %{ + admin: is_admin, + moderator: is_moderator + } + end end diff --git a/lib/pleroma/web/api_spec.ex b/lib/pleroma/web/api_spec.ex index adc8762dc..528cd9cf4 100644 --- a/lib/pleroma/web/api_spec.ex +++ b/lib/pleroma/web/api_spec.ex @@ -92,9 +92,10 @@ def spec(opts \\ []) do "Invites", "MediaProxy cache", "OAuth application managment", - "Report managment", "Relays", - "Status administration" + "Report managment", + "Status administration", + "User administration" ] }, %{"name" => "Applications", "tags" => ["Applications", "Push subscriptions"]}, diff --git a/lib/pleroma/web/api_spec/cast_and_validate.ex b/lib/pleroma/web/api_spec/cast_and_validate.ex index a3da856ff..d23a7dcb6 100644 --- a/lib/pleroma/web/api_spec/cast_and_validate.ex +++ b/lib/pleroma/web/api_spec/cast_and_validate.ex @@ -15,6 +15,7 @@ defmodule Pleroma.Web.ApiSpec.CastAndValidate do @behaviour Plug + alias OpenApiSpex.Plug.PutApiSpec alias Plug.Conn @impl Plug @@ -25,12 +26,10 @@ def init(opts) do end @impl Plug - def call(%{private: %{open_api_spex: private_data}} = conn, %{ - operation_id: operation_id, - render_error: render_error - }) do - spec = private_data.spec - operation = private_data.operation_lookup[operation_id] + + def call(conn, %{operation_id: operation_id, render_error: render_error}) do + {spec, operation_lookup} = PutApiSpec.get_spec_and_operation_lookup(conn) + operation = operation_lookup[operation_id] content_type = case Conn.get_req_header(conn, "content-type") do @@ -43,8 +42,7 @@ def call(%{private: %{open_api_spex: private_data}} = conn, %{ "application/json" end - private_data = Map.put(private_data, :operation_id, operation_id) - conn = Conn.put_private(conn, :open_api_spex, private_data) + conn = Conn.put_private(conn, :operation_id, operation_id) case cast_and_validate(spec, operation, conn, content_type, strict?()) do {:ok, conn} -> @@ -64,25 +62,22 @@ def call( private: %{ phoenix_controller: controller, phoenix_action: action, - open_api_spex: private_data + open_api_spex: %{spec_module: spec_module} } } = conn, opts ) do + {spec, operation_lookup} = PutApiSpec.get_spec_and_operation_lookup(conn) + operation = - case private_data.operation_lookup[{controller, action}] do + case operation_lookup[{controller, action}] do nil -> operation_id = controller.open_api_operation(action).operationId - operation = private_data.operation_lookup[operation_id] + operation = operation_lookup[operation_id] - operation_lookup = - private_data.operation_lookup - |> Map.put({controller, action}, operation) + operation_lookup = Map.put(operation_lookup, {controller, action}, operation) - OpenApiSpex.Plug.Cache.adapter().put( - private_data.spec_module, - {private_data.spec, operation_lookup} - ) + OpenApiSpex.Plug.Cache.adapter().put(spec_module, {spec, operation_lookup}) operation diff --git a/lib/pleroma/web/api_spec/operations/admin/user_operation.ex b/lib/pleroma/web/api_spec/operations/admin/user_operation.ex new file mode 100644 index 000000000..c9d0bfd7c --- /dev/null +++ b/lib/pleroma/web/api_spec/operations/admin/user_operation.ex @@ -0,0 +1,389 @@ +# Pleroma: A lightweight social networking server +# Copyright © 2017-2021 Pleroma Authors +# SPDX-License-Identifier: AGPL-3.0-only + +defmodule Pleroma.Web.ApiSpec.Admin.UserOperation do + alias OpenApiSpex.Operation + alias OpenApiSpex.Schema + alias Pleroma.Web.ApiSpec.Schemas.ActorType + alias Pleroma.Web.ApiSpec.Schemas.ApiError + + import Pleroma.Web.ApiSpec.Helpers + + def open_api_operation(action) do + operation = String.to_existing_atom("#{action}_operation") + apply(__MODULE__, operation, []) + end + + def index_operation do + %Operation{ + tags: ["User administration"], + summary: "List users", + operationId: "AdminAPI.UserController.index", + security: [%{"oAuth" => ["admin:read:accounts"]}], + parameters: [ + Operation.parameter(:filters, :query, :string, "Comma separated list of filters"), + Operation.parameter(:query, :query, :string, "Search users query"), + Operation.parameter(:name, :query, :string, "Search by display name"), + Operation.parameter(:email, :query, :string, "Search by email"), + Operation.parameter(:page, :query, :integer, "Page Number"), + Operation.parameter(:page_size, :query, :integer, "Number of users to return per page"), + Operation.parameter( + :actor_types, + :query, + %Schema{type: :array, items: ActorType}, + "Filter by actor type" + ), + Operation.parameter( + :tags, + :query, + %Schema{type: :array, items: %Schema{type: :string}}, + "Filter by tags" + ) + | admin_api_params() + ], + responses: %{ + 200 => + Operation.response( + "Response", + "application/json", + %Schema{ + type: :object, + properties: %{ + users: %Schema{type: :array, items: user()}, + count: %Schema{type: :integer}, + page_size: %Schema{type: :integer} + } + } + ), + 403 => Operation.response("Forbidden", "application/json", ApiError) + } + } + end + + def create_operation do + %Operation{ + tags: ["User administration"], + summary: "Create a single or multiple users", + operationId: "AdminAPI.UserController.create", + security: [%{"oAuth" => ["admin:write:accounts"]}], + parameters: admin_api_params(), + requestBody: + request_body( + "Parameters", + %Schema{ + description: "POST body for creating users", + type: :object, + properties: %{ + users: %Schema{ + type: :array, + items: %Schema{ + type: :object, + properties: %{ + nickname: %Schema{type: :string}, + email: %Schema{type: :string}, + password: %Schema{type: :string} + } + } + } + } + } + ), + responses: %{ + 200 => + Operation.response("Response", "application/json", %Schema{ + type: :array, + items: %Schema{ + type: :object, + properties: %{ + code: %Schema{type: :integer}, + type: %Schema{type: :string}, + data: %Schema{ + type: :object, + properties: %{ + email: %Schema{type: :string, format: :email}, + nickname: %Schema{type: :string} + } + } + } + } + }), + 403 => Operation.response("Forbidden", "application/json", ApiError), + 409 => + Operation.response("Conflict", "application/json", %Schema{ + type: :array, + items: %Schema{ + type: :object, + properties: %{ + code: %Schema{type: :integer}, + error: %Schema{type: :string}, + type: %Schema{type: :string}, + data: %Schema{ + type: :object, + properties: %{ + email: %Schema{type: :string, format: :email}, + nickname: %Schema{type: :string} + } + } + } + } + }) + } + } + end + + def show_operation do + %Operation{ + tags: ["User administration"], + summary: "Show user", + operationId: "AdminAPI.UserController.show", + security: [%{"oAuth" => ["admin:read:accounts"]}], + parameters: [ + Operation.parameter( + :nickname, + :path, + :string, + "User nickname or ID" + ) + | admin_api_params() + ], + responses: %{ + 200 => Operation.response("Response", "application/json", user()), + 403 => Operation.response("Forbidden", "application/json", ApiError), + 404 => Operation.response("Not Found", "application/json", ApiError) + } + } + end + + def follow_operation do + %Operation{ + tags: ["User administration"], + summary: "Follow", + operationId: "AdminAPI.UserController.follow", + security: [%{"oAuth" => ["admin:write:follows"]}], + parameters: admin_api_params(), + requestBody: + request_body( + "Parameters", + %Schema{ + type: :object, + properties: %{ + follower: %Schema{type: :string, description: "Follower nickname"}, + followed: %Schema{type: :string, description: "Followed nickname"} + } + } + ), + responses: %{ + 200 => Operation.response("Response", "application/json", %Schema{type: :string}), + 403 => Operation.response("Forbidden", "application/json", ApiError) + } + } + end + + def unfollow_operation do + %Operation{ + tags: ["User administration"], + summary: "Unfollow", + operationId: "AdminAPI.UserController.unfollow", + security: [%{"oAuth" => ["admin:write:follows"]}], + parameters: admin_api_params(), + requestBody: + request_body( + "Parameters", + %Schema{ + type: :object, + properties: %{ + follower: %Schema{type: :string, description: "Follower nickname"}, + followed: %Schema{type: :string, description: "Followed nickname"} + } + } + ), + responses: %{ + 200 => Operation.response("Response", "application/json", %Schema{type: :string}), + 403 => Operation.response("Forbidden", "application/json", ApiError) + } + } + end + + def approve_operation do + %Operation{ + tags: ["User administration"], + summary: "Approve multiple users", + operationId: "AdminAPI.UserController.approve", + security: [%{"oAuth" => ["admin:write:accounts"]}], + parameters: admin_api_params(), + requestBody: + request_body( + "Parameters", + %Schema{ + description: "POST body for deleting multiple users", + type: :object, + properties: %{ + nicknames: %Schema{ + type: :array, + items: %Schema{type: :string} + } + } + } + ), + responses: %{ + 200 => + Operation.response("Response", "application/json", %Schema{ + type: :object, + properties: %{user: %Schema{type: :array, items: user()}} + }), + 403 => Operation.response("Forbidden", "application/json", ApiError) + } + } + end + + def toggle_activation_operation do + %Operation{ + tags: ["User administration"], + summary: "Toggle user activation", + operationId: "AdminAPI.UserController.toggle_activation", + security: [%{"oAuth" => ["admin:write:accounts"]}], + parameters: [ + Operation.parameter(:nickname, :path, :string, "User nickname") + | admin_api_params() + ], + responses: %{ + 200 => Operation.response("Response", "application/json", user()), + 403 => Operation.response("Forbidden", "application/json", ApiError) + } + } + end + + def activate_operation do + %Operation{ + tags: ["User administration"], + summary: "Activate multiple users", + operationId: "AdminAPI.UserController.activate", + security: [%{"oAuth" => ["admin:write:accounts"]}], + parameters: admin_api_params(), + requestBody: + request_body( + "Parameters", + %Schema{ + description: "POST body for deleting multiple users", + type: :object, + properties: %{ + nicknames: %Schema{ + type: :array, + items: %Schema{type: :string} + } + } + } + ), + responses: %{ + 200 => + Operation.response("Response", "application/json", %Schema{ + type: :object, + properties: %{user: %Schema{type: :array, items: user()}} + }), + 403 => Operation.response("Forbidden", "application/json", ApiError) + } + } + end + + def deactivate_operation do + %Operation{ + tags: ["User administration"], + summary: "Deactivates multiple users", + operationId: "AdminAPI.UserController.deactivate", + security: [%{"oAuth" => ["admin:write:accounts"]}], + parameters: admin_api_params(), + requestBody: + request_body( + "Parameters", + %Schema{ + description: "POST body for deleting multiple users", + type: :object, + properties: %{ + nicknames: %Schema{ + type: :array, + items: %Schema{type: :string} + } + } + } + ), + responses: %{ + 200 => + Operation.response("Response", "application/json", %Schema{ + type: :object, + properties: %{user: %Schema{type: :array, items: user()}} + }), + 403 => Operation.response("Forbidden", "application/json", ApiError) + } + } + end + + def delete_operation do + %Operation{ + tags: ["User administration"], + summary: "Removes a single or multiple users", + operationId: "AdminAPI.UserController.delete", + security: [%{"oAuth" => ["admin:write:accounts"]}], + parameters: [ + Operation.parameter( + :nickname, + :query, + :string, + "User nickname" + ) + | admin_api_params() + ], + requestBody: + request_body( + "Parameters", + %Schema{ + description: "POST body for deleting multiple users", + type: :object, + properties: %{ + nicknames: %Schema{ + type: :array, + items: %Schema{type: :string} + } + } + } + ), + responses: %{ + 200 => + Operation.response("Response", "application/json", %Schema{ + description: "Array of nicknames", + type: :array, + items: %Schema{type: :string} + }), + 403 => Operation.response("Forbidden", "application/json", ApiError) + } + } + end + + defp user do + %Schema{ + type: :object, + properties: %{ + id: %Schema{type: :string}, + email: %Schema{type: :string, format: :email}, + avatar: %Schema{type: :string, format: :uri}, + nickname: %Schema{type: :string}, + display_name: %Schema{type: :string}, + is_active: %Schema{type: :boolean}, + local: %Schema{type: :boolean}, + roles: %Schema{ + type: :object, + properties: %{ + admin: %Schema{type: :boolean}, + moderator: %Schema{type: :boolean} + } + }, + tags: %Schema{type: :array, items: %Schema{type: :string}}, + is_confirmed: %Schema{type: :boolean}, + is_approved: %Schema{type: :boolean}, + url: %Schema{type: :string, format: :uri}, + registration_reason: %Schema{type: :string, nullable: true}, + actor_type: %Schema{type: :string} + } + } + end +end diff --git a/lib/pleroma/web/api_spec/operations/status_operation.ex b/lib/pleroma/web/api_spec/operations/status_operation.ex index 40edc747d..802fbef3e 100644 --- a/lib/pleroma/web/api_spec/operations/status_operation.ex +++ b/lib/pleroma/web/api_spec/operations/status_operation.ex @@ -59,7 +59,7 @@ def create_operation do Operation.response( "Status. When `scheduled_at` is present, ScheduledStatus is returned instead", "application/json", - %Schema{oneOf: [Status, ScheduledStatus]} + %Schema{anyOf: [Status, ScheduledStatus]} ), 422 => Operation.response("Bad Request / MRF Rejection", "application/json", ApiError) } @@ -182,7 +182,34 @@ def pin_operation do parameters: [id_param()], responses: %{ 200 => status_response(), - 400 => Operation.response("Error", "application/json", ApiError) + 400 => + Operation.response("Bad Request", "application/json", %Schema{ + allOf: [ApiError], + title: "Unprocessable Entity", + example: %{ + "error" => "You have already pinned the maximum number of statuses" + } + }), + 404 => + Operation.response("Not found", "application/json", %Schema{ + allOf: [ApiError], + title: "Unprocessable Entity", + example: %{ + "error" => "Record not found" + } + }), + 422 => + Operation.response( + "Unprocessable Entity", + "application/json", + %Schema{ + allOf: [ApiError], + title: "Unprocessable Entity", + example: %{ + "error" => "Someone else's status cannot be pinned" + } + } + ) } } end @@ -197,7 +224,22 @@ def unpin_operation do parameters: [id_param()], responses: %{ 200 => status_response(), - 400 => Operation.response("Error", "application/json", ApiError) + 400 => + Operation.response("Bad Request", "application/json", %Schema{ + allOf: [ApiError], + title: "Unprocessable Entity", + example: %{ + "error" => "You have already pinned the maximum number of statuses" + } + }), + 404 => + Operation.response("Not found", "application/json", %Schema{ + allOf: [ApiError], + title: "Unprocessable Entity", + example: %{ + "error" => "Record not found" + } + }) } } end diff --git a/lib/pleroma/web/api_spec/schemas/boolean_like.ex b/lib/pleroma/web/api_spec/schemas/boolean_like.ex index eb001c5bb..778158f66 100644 --- a/lib/pleroma/web/api_spec/schemas/boolean_like.ex +++ b/lib/pleroma/web/api_spec/schemas/boolean_like.ex @@ -3,6 +3,7 @@ # SPDX-License-Identifier: AGPL-3.0-only defmodule Pleroma.Web.ApiSpec.Schemas.BooleanLike do + alias OpenApiSpex.Cast alias OpenApiSpex.Schema require OpenApiSpex @@ -27,10 +28,13 @@ defmodule Pleroma.Web.ApiSpec.Schemas.BooleanLike do %Schema{type: :boolean}, %Schema{type: :string}, %Schema{type: :integer} - ] + ], + "x-validate": __MODULE__ }) - def after_cast(value, _schmea) do - {:ok, Pleroma.Web.ControllerHelper.truthy_param?(value)} + def cast(%Cast{value: value} = context) do + context + |> Map.put(:value, Pleroma.Web.ControllerHelper.truthy_param?(value)) + |> Cast.ok() end end diff --git a/lib/pleroma/web/api_spec/schemas/status.ex b/lib/pleroma/web/api_spec/schemas/status.ex index 42fa98718..3d042dc19 100644 --- a/lib/pleroma/web/api_spec/schemas/status.ex +++ b/lib/pleroma/web/api_spec/schemas/status.ex @@ -194,6 +194,13 @@ defmodule Pleroma.Web.ApiSpec.Schemas.Status do parent_visible: %Schema{ type: :boolean, description: "`true` if the parent post is visible to the user" + }, + pinned_at: %Schema{ + type: :string, + format: "date-time", + nullable: true, + description: + "A datetime (ISO 8601) that states when the post was pinned or `null` if the post is not pinned" } } }, diff --git a/lib/pleroma/web/common_api.ex b/lib/pleroma/web/common_api.ex index b003e30c7..1b5f8491e 100644 --- a/lib/pleroma/web/common_api.ex +++ b/lib/pleroma/web/common_api.ex @@ -228,17 +228,7 @@ def favorite_helper(user, id) do {:find_object, _} -> {:error, :not_found} - {:common_pipeline, - { - :error, - { - :validate_object, - { - :error, - changeset - } - } - }} = e -> + {:common_pipeline, {:error, {:validate, {:error, changeset}}}} = e -> if {:object, {"already liked by this actor", []}} in changeset.errors do {:ok, :already_liked} else @@ -411,29 +401,58 @@ def post(user, %{status: _} = data) do end end - def pin(id, %{ap_id: user_ap_id} = user) do - with %Activity{ - actor: ^user_ap_id, - data: %{"type" => "Create"}, - object: %Object{data: %{"type" => object_type}} - } = activity <- Activity.get_by_id_with_object(id), - true <- object_type in ["Note", "Article", "Question"], - true <- Visibility.is_public?(activity), - {:ok, _user} <- User.add_pinnned_activity(user, activity) do + @spec pin(String.t(), User.t()) :: {:ok, Activity.t()} | {:error, term()} + def pin(id, %User{} = user) do + with %Activity{} = activity <- create_activity_by_id(id), + true <- activity_belongs_to_actor(activity, user.ap_id), + true <- object_type_is_allowed_for_pin(activity.object), + true <- activity_is_public(activity), + {:ok, pin_data, _} <- Builder.pin(user, activity.object), + {:ok, _pin, _} <- + Pipeline.common_pipeline(pin_data, + local: true, + activity_id: id + ) do {:ok, activity} else - {:error, %{errors: [pinned_activities: {err, _}]}} -> {:error, err} - _ -> {:error, dgettext("errors", "Could not pin")} + {:error, {:side_effects, error}} -> error + error -> error end end + defp create_activity_by_id(id) do + with nil <- Activity.create_by_id_with_object(id) do + {:error, :not_found} + end + end + + defp activity_belongs_to_actor(%{actor: actor}, actor), do: true + defp activity_belongs_to_actor(_, _), do: {:error, :ownership_error} + + defp object_type_is_allowed_for_pin(%{data: %{"type" => type}}) do + with false <- type in ["Note", "Article", "Question"] do + {:error, :not_allowed} + end + end + + defp activity_is_public(activity) do + with false <- Visibility.is_public?(activity) do + {:error, :visibility_error} + end + end + + @spec unpin(String.t(), User.t()) :: {:ok, User.t()} | {:error, term()} def unpin(id, user) do - with %Activity{data: %{"type" => "Create"}} = activity <- Activity.get_by_id(id), - {:ok, _user} <- User.remove_pinnned_activity(user, activity) do + with %Activity{} = activity <- create_activity_by_id(id), + {:ok, unpin_data, _} <- Builder.unpin(user, activity.object), + {:ok, _unpin, _} <- + Pipeline.common_pipeline(unpin_data, + local: true, + activity_id: activity.id, + expires_at: activity.data["expires_at"], + featured_address: user.featured_address + ) do {:ok, activity} - else - {:error, %{errors: [pinned_activities: {err, _}]}} -> {:error, err} - _ -> {:error, dgettext("errors", "Could not unpin")} end end diff --git a/lib/pleroma/web/common_api/activity_draft.ex b/lib/pleroma/web/common_api/activity_draft.ex index 73f1b0931..80a9fa7bb 100644 --- a/lib/pleroma/web/common_api/activity_draft.ex +++ b/lib/pleroma/web/common_api/activity_draft.ex @@ -5,6 +5,7 @@ defmodule Pleroma.Web.CommonAPI.ActivityDraft do alias Pleroma.Activity alias Pleroma.Conversation.Participation + alias Pleroma.Object alias Pleroma.Web.CommonAPI alias Pleroma.Web.CommonAPI.Utils @@ -179,13 +180,39 @@ defp context(draft) do end defp sensitive(draft) do - sensitive = draft.params[:sensitive] || Enum.member?(draft.tags, {"#nsfw", "nsfw"}) + sensitive = draft.params[:sensitive] %__MODULE__{draft | sensitive: sensitive} end defp object(draft) do emoji = Map.merge(Pleroma.Emoji.Formatter.get_emoji_map(draft.full_payload), draft.emoji) + # Sometimes people create posts with subject containing emoji, + # since subjects are usually copied this will result in a broken + # subject when someone replies from an instance that does not have + # the emoji or has it under different shortcode. This is an attempt + # to mitigate this by copying emoji from inReplyTo if they are present + # in the subject. + summary_emoji = + with %Activity{} <- draft.in_reply_to, + %Object{data: %{"tag" => [_ | _] = tag}} <- Object.normalize(draft.in_reply_to) do + Enum.reduce(tag, %{}, fn + %{"type" => "Emoji", "name" => name, "icon" => %{"url" => url}}, acc -> + if String.contains?(draft.summary, name) do + Map.put(acc, name, url) + else + acc + end + + _, acc -> + acc + end) + else + _ -> %{} + end + + emoji = Map.merge(emoji, summary_emoji) + object = Utils.make_note_data(draft) |> Map.put("emoji", emoji) diff --git a/lib/pleroma/web/common_api/utils.ex b/lib/pleroma/web/common_api/utils.ex index 9587dfa25..94a378e11 100644 --- a/lib/pleroma/web/common_api/utils.ex +++ b/lib/pleroma/web/common_api/utils.ex @@ -217,7 +217,6 @@ def make_content_html(%ActivityDraft{} = draft) do draft.status |> format_input(content_type, options) |> maybe_add_attachments(draft.attachments, attachment_links) - |> maybe_add_nsfw_tag(draft.params) end defp get_content_type(content_type) do @@ -228,13 +227,6 @@ defp get_content_type(content_type) do end end - defp maybe_add_nsfw_tag({text, mentions, tags}, %{"sensitive" => sensitive}) - when sensitive in [true, "True", "true", "1"] do - {text, mentions, [{"#nsfw", "nsfw"} | tags]} - end - - defp maybe_add_nsfw_tag(data, _), do: data - def make_context(_, %Participation{} = participation) do Repo.preload(participation, :conversation).conversation.ap_id end @@ -294,7 +286,7 @@ def format_input(text, "text/html", options) do def format_input(text, "text/markdown", options) do text |> Formatter.mentions_escape(options) - |> Earmark.as_html!(%Earmark.Options{renderer: Pleroma.EarmarkRenderer}) + |> Formatter.markdown_to_html() |> Formatter.linkify(options) |> Formatter.html_escape("text/html") end diff --git a/lib/pleroma/web/feed/feed_view.ex b/lib/pleroma/web/feed/feed_view.ex index df97d2f46..66940f311 100644 --- a/lib/pleroma/web/feed/feed_view.ex +++ b/lib/pleroma/web/feed/feed_view.ex @@ -32,6 +32,7 @@ def prepare_activity(activity, opts \\ []) do %{ activity: activity, + object: object, data: Map.get(object, :data), actor: actor } diff --git a/lib/pleroma/web/mastodon_api/controllers/fallback_controller.ex b/lib/pleroma/web/mastodon_api/controllers/fallback_controller.ex index d25f84837..84621500e 100644 --- a/lib/pleroma/web/mastodon_api/controllers/fallback_controller.ex +++ b/lib/pleroma/web/mastodon_api/controllers/fallback_controller.ex @@ -30,6 +30,12 @@ def call(conn, {:error, error_message}) do |> json(%{error: error_message}) end + def call(conn, {:error, status, message}) do + conn + |> put_status(status) + |> json(%{error: message}) + end + def call(conn, _) do conn |> put_status(:internal_server_error) diff --git a/lib/pleroma/web/mastodon_api/controllers/instance_controller.ex b/lib/pleroma/web/mastodon_api/controllers/instance_controller.ex index 267d0f03b..c7a5267d4 100644 --- a/lib/pleroma/web/mastodon_api/controllers/instance_controller.ex +++ b/lib/pleroma/web/mastodon_api/controllers/instance_controller.ex @@ -5,7 +5,7 @@ defmodule Pleroma.Web.MastodonAPI.InstanceController do use Pleroma.Web, :controller - plug(OpenApiSpex.Plug.CastAndValidate) + plug(Pleroma.Web.ApiSpec.CastAndValidate) plug( :skip_plug, diff --git a/lib/pleroma/web/mastodon_api/controllers/status_controller.ex b/lib/pleroma/web/mastodon_api/controllers/status_controller.ex index d1a58d5e1..724dc5c5d 100644 --- a/lib/pleroma/web/mastodon_api/controllers/status_controller.ex +++ b/lib/pleroma/web/mastodon_api/controllers/status_controller.ex @@ -21,7 +21,7 @@ defmodule Pleroma.Web.MastodonAPI.StatusController do alias Pleroma.Web.CommonAPI alias Pleroma.Web.MastodonAPI.AccountView alias Pleroma.Web.MastodonAPI.ScheduledActivityView - # alias Pleroma.Web.OAuth.Token + alias Pleroma.Web.OAuth.Token alias Pleroma.Web.Plugs.OAuthScopesPlug alias Pleroma.Web.Plugs.RateLimiter @@ -260,6 +260,18 @@ def unfavourite(%{assigns: %{user: user}} = conn, %{id: activity_id}) do def pin(%{assigns: %{user: user}} = conn, %{id: ap_id_or_id}) do with {:ok, activity} <- CommonAPI.pin(ap_id_or_id, user) do try_render(conn, "show.json", activity: activity, for: user, as: :activity) + else + {:error, :pinned_statuses_limit_reached} -> + {:error, "You have already pinned the maximum number of statuses"} + + {:error, :ownership_error} -> + {:error, :unprocessable_entity, "Someone else's status cannot be pinned"} + + {:error, :visibility_error} -> + {:error, :unprocessable_entity, "Non-public status cannot be pinned"} + + error -> + error end end @@ -420,16 +432,14 @@ def bookmarks(%{assigns: %{user: user}} = conn, params) do ) end - # Deactivated for 2.3.0 - # defp put_application(params, - # %{assigns: %{token: %Token{user: %User{} = user} = token}} = _conn) do - # if user.disclose_client do - # %{client_name: client_name, website: website} = Repo.preload(token, :app).app - # Map.put(params, :generator, %{type: "Application", name: client_name, url: website}) - # else - # Map.put(params, :generator, nil) - # end - # end + defp put_application(params, %{assigns: %{token: %Token{user: %User{} = user} = token}} = _conn) do + if user.disclose_client do + %{client_name: client_name, website: website} = Repo.preload(token, :app).app + Map.put(params, :generator, %{type: "Application", name: client_name, url: website}) + else + Map.put(params, :generator, nil) + end + end defp put_application(params, _), do: Map.put(params, :generator, nil) end diff --git a/lib/pleroma/web/mastodon_api/controllers/timeline_controller.ex b/lib/pleroma/web/mastodon_api/controllers/timeline_controller.ex index cef299aa4..c611958be 100644 --- a/lib/pleroma/web/mastodon_api/controllers/timeline_controller.ex +++ b/lib/pleroma/web/mastodon_api/controllers/timeline_controller.ex @@ -133,34 +133,25 @@ defp fail_on_bad_auth(conn) do end defp hashtag_fetching(params, user, local_only) do - tags = + # Note: not sanitizing tag options at this stage (may be mix-cased, have duplicates etc.) + tags_any = [params[:tag], params[:any]] |> List.flatten() - |> Enum.uniq() - |> Enum.reject(&is_nil/1) - |> Enum.map(&String.downcase/1) + |> Enum.filter(& &1) - tag_all = - params - |> Map.get(:all, []) - |> Enum.map(&String.downcase/1) + tag_all = Map.get(params, :all, []) + tag_reject = Map.get(params, :none, []) - tag_reject = - params - |> Map.get(:none, []) - |> Enum.map(&String.downcase/1) - - _activities = - params - |> Map.put(:type, "Create") - |> Map.put(:local_only, local_only) - |> Map.put(:blocking_user, user) - |> Map.put(:muting_user, user) - |> Map.put(:user, user) - |> Map.put(:tag, tags) - |> Map.put(:tag_all, tag_all) - |> Map.put(:tag_reject, tag_reject) - |> ActivityPub.fetch_public_activities() + params + |> Map.put(:type, "Create") + |> Map.put(:local_only, local_only) + |> Map.put(:blocking_user, user) + |> Map.put(:muting_user, user) + |> Map.put(:user, user) + |> Map.put(:tag, tags_any) + |> Map.put(:tag_all, tag_all) + |> Map.put(:tag_reject, tag_reject) + |> ActivityPub.fetch_public_activities() end # GET /api/v1/timelines/tag/:tag diff --git a/lib/pleroma/web/mastodon_api/views/instance_view.ex b/lib/pleroma/web/mastodon_api/views/instance_view.ex index 73205fb6d..dac68d8e6 100644 --- a/lib/pleroma/web/mastodon_api/views/instance_view.ex +++ b/lib/pleroma/web/mastodon_api/views/instance_view.ex @@ -23,7 +23,8 @@ def render("show.json", _) do streaming_api: Pleroma.Web.Endpoint.websocket_url() }, stats: Pleroma.Stats.get_stats(), - thumbnail: Pleroma.Web.base_url() <> Keyword.get(instance, :instance_thumbnail), + thumbnail: + URI.merge(Pleroma.Web.base_url(), Keyword.get(instance, :instance_thumbnail)) |> to_string, languages: ["en"], registrations: Keyword.get(instance, :registrations_open), approval_required: Keyword.get(instance, :account_approval_required), diff --git a/lib/pleroma/web/mastodon_api/views/status_view.ex b/lib/pleroma/web/mastodon_api/views/status_view.ex index 7f318e81b..8fdf30883 100644 --- a/lib/pleroma/web/mastodon_api/views/status_view.ex +++ b/lib/pleroma/web/mastodon_api/views/status_view.ex @@ -125,16 +125,16 @@ def render( ) do user = CommonAPI.get_user(activity.data["actor"]) created_at = Utils.to_masto_date(activity.data["published"]) - activity_object = Object.normalize(activity, fetch: false) + object = Object.normalize(activity, fetch: false) reblogged_parent_activity = if opts[:parent_activities] do Activity.Queries.find_by_object_ap_id( opts[:parent_activities], - activity_object.data["id"] + object.data["id"] ) else - Activity.create_by_object_ap_id(activity_object.data["id"]) + Activity.create_by_object_ap_id(object.data["id"]) |> Activity.with_preloaded_bookmark(opts[:for]) |> Activity.with_set_thread_muted_field(opts[:for]) |> Repo.one() @@ -143,7 +143,7 @@ def render( reblog_rendering_opts = Map.put(opts, :activity, reblogged_parent_activity) reblogged = render("show.json", reblog_rendering_opts) - favorited = opts[:for] && opts[:for].ap_id in (activity_object.data["likes"] || []) + favorited = opts[:for] && opts[:for].ap_id in (object.data["likes"] || []) bookmarked = Activity.get_bookmark(reblogged_parent_activity, opts[:for]) != nil @@ -153,10 +153,12 @@ def render( |> Enum.filter(& &1) |> Enum.map(fn user -> AccountView.render("mention.json", %{user: user}) end) + {pinned?, pinned_at} = pin_data(object, user) + %{ id: to_string(activity.id), - uri: activity_object.data["id"], - url: activity_object.data["id"], + uri: object.data["id"], + url: object.data["id"], account: AccountView.render("show.json", %{ user: user, @@ -174,18 +176,19 @@ def render( favourited: present?(favorited), bookmarked: present?(bookmarked), muted: false, - pinned: pinned?(activity, user), + pinned: pinned?, sensitive: false, spoiler_text: "", visibility: get_visibility(activity), media_attachments: reblogged[:media_attachments] || [], mentions: mentions, tags: reblogged[:tags] || [], - application: build_application(activity_object.data["generator"]), + application: build_application(object.data["generator"]), language: nil, emojis: [], pleroma: %{ - local: activity.local + local: activity.local, + pinned_at: pinned_at } } end @@ -199,8 +202,10 @@ def render("show.json", %{activity: %{data: %{"object" => _object}} = activity} like_count = object.data["like_count"] || 0 announcement_count = object.data["announcement_count"] || 0 - tags = object.data["tag"] || [] - sensitive = object.data["sensitive"] || Enum.member?(tags, "nsfw") + hashtags = Object.hashtags(object) + sensitive = object.data["sensitive"] || Enum.member?(hashtags, "nsfw") + + tags = Object.tags(object) tag_mentions = tags @@ -315,6 +320,8 @@ def render("show.json", %{activity: %{data: %{"object" => _object}} = activity} fn for_user, user -> User.mutes?(for_user, user) end ) + {pinned?, pinned_at} = pin_data(object, user) + %{ id: to_string(activity.id), uri: object.data["id"], @@ -338,7 +345,7 @@ def render("show.json", %{activity: %{data: %{"object" => _object}} = activity} favourited: present?(favorited), bookmarked: present?(bookmarked), muted: muted, - pinned: pinned?(activity, user), + pinned: pinned?, sensitive: sensitive, spoiler_text: summary, visibility: get_visibility(object), @@ -359,7 +366,8 @@ def render("show.json", %{activity: %{data: %{"object" => _object}} = activity} direct_conversation_id: direct_conversation_id, thread_muted: thread_muted?, emoji_reactions: emoji_reactions, - parent_visible: visible_for_user?(reply_to, opts[:for]) + parent_visible: visible_for_user?(reply_to, opts[:for]), + pinned_at: pinned_at } } end @@ -380,12 +388,15 @@ def render("card.json", %{rich_media: rich_media, page_url: page_url}) do page_url = page_url_data |> to_string - image_url = + image_url_data = if is_binary(rich_media["image"]) do - URI.merge(page_url_data, URI.parse(rich_media["image"])) - |> to_string + URI.parse(rich_media["image"]) + else + nil end + image_url = build_image_url(image_url_data, page_url_data) + %{ type: "link", provider_name: page_url_data.host, @@ -542,8 +553,13 @@ defp present?(nil), do: false defp present?(false), do: false defp present?(_), do: true - defp pinned?(%Activity{id: id}, %User{pinned_activities: pinned_activities}), - do: id in pinned_activities + defp pin_data(%Object{data: %{"id" => object_id}}, %User{pinned_objects: pinned_objects}) do + if pinned_at = pinned_objects[object_id] do + {true, Utils.to_masto_date(pinned_at)} + else + {false, nil} + end + end defp build_emoji_map(emoji, users, current_user) do %{ @@ -554,6 +570,27 @@ defp build_emoji_map(emoji, users, current_user) do end @spec build_application(map() | nil) :: map() | nil - defp build_application(%{type: _type, name: name, url: url}), do: %{name: name, website: url} + defp build_application(%{"type" => _type, "name" => name, "url" => url}), + do: %{name: name, website: url} + defp build_application(_), do: nil + + # Workaround for Elixir issue #10771 + # Avoid applying URI.merge unless necessary + # TODO: revert to always attempting URI.merge(image_url_data, page_url_data) + # when Elixir 1.12 is the minimum supported version + @spec build_image_url(struct() | nil, struct()) :: String.t() | nil + defp build_image_url( + %URI{scheme: image_scheme, host: image_host} = image_url_data, + %URI{} = _page_url_data + ) + when not is_nil(image_scheme) and not is_nil(image_host) do + image_url_data |> to_string + end + + defp build_image_url(%URI{} = image_url_data, %URI{} = page_url_data) do + URI.merge(page_url_data, image_url_data) |> to_string + end + + defp build_image_url(_, _), do: nil end diff --git a/lib/pleroma/web/media_proxy.ex b/lib/pleroma/web/media_proxy.ex index 27f337138..d0d4bb4b3 100644 --- a/lib/pleroma/web/media_proxy.ex +++ b/lib/pleroma/web/media_proxy.ex @@ -121,6 +121,11 @@ def decode_url(sig, url) do end end + def decode_url(encoded) do + [_, "proxy", sig, base64 | _] = URI.parse(encoded).path |> String.split("/") + decode_url(sig, base64) + end + defp signed_url(url) do :crypto.hmac(:sha, Config.get([Web.Endpoint, :secret_key_base]), url) end diff --git a/lib/pleroma/web/o_auth/o_auth_view.ex b/lib/pleroma/web/o_auth/o_auth_view.ex index 281bbcc3c..1419c96a2 100644 --- a/lib/pleroma/web/o_auth/o_auth_view.ex +++ b/lib/pleroma/web/o_auth/o_auth_view.ex @@ -10,6 +10,7 @@ defmodule Pleroma.Web.OAuth.OAuthView do def render("token.json", %{token: token} = opts) do response = %{ + id: token.id, token_type: "Bearer", access_token: token.token, refresh_token: token.refresh_token, diff --git a/lib/pleroma/web/pleroma_api/controllers/backup_controller.ex b/lib/pleroma/web/pleroma_api/controllers/backup_controller.ex index 315657e9c..fc5d16771 100644 --- a/lib/pleroma/web/pleroma_api/controllers/backup_controller.ex +++ b/lib/pleroma/web/pleroma_api/controllers/backup_controller.ex @@ -10,7 +10,7 @@ defmodule Pleroma.Web.PleromaAPI.BackupController do action_fallback(Pleroma.Web.MastodonAPI.FallbackController) plug(OAuthScopesPlug, %{scopes: ["read:accounts"]} when action in [:index, :create]) - plug(OpenApiSpex.Plug.CastAndValidate, render_error: Pleroma.Web.ApiSpec.RenderError) + plug(Pleroma.Web.ApiSpec.CastAndValidate) defdelegate open_api_operation(action), to: Pleroma.Web.ApiSpec.PleromaBackupOperation diff --git a/lib/pleroma/web/pleroma_api/controllers/chat_controller.ex b/lib/pleroma/web/pleroma_api/controllers/chat_controller.ex index 4adc685fe..dcd54b1af 100644 --- a/lib/pleroma/web/pleroma_api/controllers/chat_controller.ex +++ b/lib/pleroma/web/pleroma_api/controllers/chat_controller.ex @@ -38,7 +38,7 @@ defmodule Pleroma.Web.PleromaAPI.ChatController do %{scopes: ["read:chats"]} when action in [:messages, :index, :index2, :show] ) - plug(OpenApiSpex.Plug.CastAndValidate, render_error: Pleroma.Web.ApiSpec.RenderError) + plug(Pleroma.Web.ApiSpec.CastAndValidate) defdelegate open_api_operation(action), to: Pleroma.Web.ApiSpec.ChatOperation diff --git a/lib/pleroma/web/pleroma_api/controllers/user_import_controller.ex b/lib/pleroma/web/pleroma_api/controllers/user_import_controller.ex index 6d9a11fb6..078d470d9 100644 --- a/lib/pleroma/web/pleroma_api/controllers/user_import_controller.ex +++ b/lib/pleroma/web/pleroma_api/controllers/user_import_controller.ex @@ -15,7 +15,7 @@ defmodule Pleroma.Web.PleromaAPI.UserImportController do plug(OAuthScopesPlug, %{scopes: ["follow", "write:blocks"]} when action == :blocks) plug(OAuthScopesPlug, %{scopes: ["follow", "write:mutes"]} when action == :mutes) - plug(OpenApiSpex.Plug.CastAndValidate) + plug(Pleroma.Web.ApiSpec.CastAndValidate) defdelegate open_api_operation(action), to: ApiSpec.UserImportOperation def follow(%{body_params: %{list: %Plug.Upload{path: path}}} = conn, _) do diff --git a/lib/pleroma/web/plugs/http_security_plug.ex b/lib/pleroma/web/plugs/http_security_plug.ex index 0025b042a..d1e6cc9d3 100644 --- a/lib/pleroma/web/plugs/http_security_plug.ex +++ b/lib/pleroma/web/plugs/http_security_plug.ex @@ -48,7 +48,8 @@ def headers do {"x-content-type-options", "nosniff"}, {"referrer-policy", referrer_policy}, {"x-download-options", "noopen"}, - {"content-security-policy", csp_string()} + {"content-security-policy", csp_string()}, + {"permissions-policy", "interest-cohort=()"} ] headers = diff --git a/lib/pleroma/web/router.ex b/lib/pleroma/web/router.ex index 72ad14f05..ccf2ef796 100644 --- a/lib/pleroma/web/router.ex +++ b/lib/pleroma/web/router.ex @@ -204,7 +204,7 @@ defmodule Pleroma.Web.Router do get("/users/:nickname/credentials", AdminAPIController, :show_user_credentials) patch("/users/:nickname/credentials", AdminAPIController, :update_user_credentials) - get("/users", UserController, :list) + get("/users", UserController, :index) get("/users/:nickname", UserController, :show) get("/users/:nickname/statuses", AdminAPIController, :list_user_statuses) get("/users/:nickname/chats", AdminAPIController, :list_user_chats) @@ -704,6 +704,7 @@ defmodule Pleroma.Web.Router do # The following two are S2S as well, see `ActivityPub.fetch_follow_information_for_user/1`: get("/users/:nickname/followers", ActivityPubController, :followers) get("/users/:nickname/following", ActivityPubController, :following) + get("/users/:nickname/collections/featured", ActivityPubController, :pinned) end scope "/", Pleroma.Web.ActivityPub do diff --git a/lib/pleroma/web/templates/feed/feed/_activity.atom.eex b/lib/pleroma/web/templates/feed/feed/_activity.atom.eex index 3fd150c4e..6688830ba 100644 --- a/lib/pleroma/web/templates/feed/feed/_activity.atom.eex +++ b/lib/pleroma/web/templates/feed/feed/_activity.atom.eex @@ -22,7 +22,7 @@ <% end %> - <%= for tag <- @data["tag"] || [] do %> + <%= for tag <- Pleroma.Object.hashtags(@object) do %> <% end %> diff --git a/lib/pleroma/web/templates/feed/feed/_activity.rss.eex b/lib/pleroma/web/templates/feed/feed/_activity.rss.eex index 947bbb099..592b9dcdc 100644 --- a/lib/pleroma/web/templates/feed/feed/_activity.rss.eex +++ b/lib/pleroma/web/templates/feed/feed/_activity.rss.eex @@ -22,7 +22,7 @@ <%= activity_context(@activity) %> - <%= for tag <- @data["tag"] || [] do %> + <%= for tag <- Pleroma.Object.hashtags(@object) do %> <% end %> diff --git a/lib/pleroma/web/templates/feed/feed/_tag_activity.atom.eex b/lib/pleroma/web/templates/feed/feed/_tag_activity.atom.eex index cf5874a91..c2de28fe4 100644 --- a/lib/pleroma/web/templates/feed/feed/_tag_activity.atom.eex +++ b/lib/pleroma/web/templates/feed/feed/_tag_activity.atom.eex @@ -41,7 +41,7 @@ <% end %> <% end %> - <%= for tag <- @data["tag"] || [] do %> + <%= for tag <- Pleroma.Object.hashtags(@object) do %> <% end %> diff --git a/lib/pleroma/web/web_finger.ex b/lib/pleroma/web/web_finger.ex index 15002b29f..21b10e654 100644 --- a/lib/pleroma/web/web_finger.ex +++ b/lib/pleroma/web/web_finger.ex @@ -94,52 +94,56 @@ def represent_user(user, "XML") do |> XmlBuilder.to_doc() end - defp webfinger_from_xml(doc) do - subject = XML.string_from_xpath("//Subject", doc) + defp webfinger_from_xml(body) do + with {:ok, doc} <- XML.parse_document(body) do + subject = XML.string_from_xpath("//Subject", doc) - subscribe_address = - ~s{//Link[@rel="http://ostatus.org/schema/1.0/subscribe"]/@template} - |> XML.string_from_xpath(doc) + subscribe_address = + ~s{//Link[@rel="http://ostatus.org/schema/1.0/subscribe"]/@template} + |> XML.string_from_xpath(doc) - ap_id = - ~s{//Link[@rel="self" and @type="application/activity+json"]/@href} - |> XML.string_from_xpath(doc) + ap_id = + ~s{//Link[@rel="self" and @type="application/activity+json"]/@href} + |> XML.string_from_xpath(doc) - data = %{ - "subject" => subject, - "subscribe_address" => subscribe_address, - "ap_id" => ap_id - } + data = %{ + "subject" => subject, + "subscribe_address" => subscribe_address, + "ap_id" => ap_id + } - {:ok, data} + {:ok, data} + end end - defp webfinger_from_json(doc) do - data = - Enum.reduce(doc["links"], %{"subject" => doc["subject"]}, fn link, data -> - case {link["type"], link["rel"]} do - {"application/activity+json", "self"} -> - Map.put(data, "ap_id", link["href"]) + defp webfinger_from_json(body) do + with {:ok, doc} <- Jason.decode(body) do + data = + Enum.reduce(doc["links"], %{"subject" => doc["subject"]}, fn link, data -> + case {link["type"], link["rel"]} do + {"application/activity+json", "self"} -> + Map.put(data, "ap_id", link["href"]) - {"application/ld+json; profile=\"https://www.w3.org/ns/activitystreams\"", "self"} -> - Map.put(data, "ap_id", link["href"]) + {"application/ld+json; profile=\"https://www.w3.org/ns/activitystreams\"", "self"} -> + Map.put(data, "ap_id", link["href"]) - {nil, "http://ostatus.org/schema/1.0/subscribe"} -> - Map.put(data, "subscribe_address", link["template"]) + {nil, "http://ostatus.org/schema/1.0/subscribe"} -> + Map.put(data, "subscribe_address", link["template"]) - _ -> - Logger.debug("Unhandled type: #{inspect(link["type"])}") - data - end - end) + _ -> + Logger.debug("Unhandled type: #{inspect(link["type"])}") + data + end + end) - {:ok, data} + {:ok, data} + end end def get_template_from_xml(body) do xpath = "//Link[@rel='lrdd']/@template" - with doc when doc != :error <- XML.parse_document(body), + with {:ok, doc} <- XML.parse_document(body), template when template != nil <- XML.string_from_xpath(xpath, doc) do {:ok, template} end @@ -192,15 +196,23 @@ def finger(account) do address, [{"accept", "application/xrd+xml,application/jrd+json"}] ), - {:ok, %{status: status, body: body}} when status in 200..299 <- response do - doc = XML.parse_document(body) + {:ok, %{status: status, body: body, headers: headers}} when status in 200..299 <- + response do + case List.keyfind(headers, "content-type", 0) do + {_, content_type} -> + case Plug.Conn.Utils.media_type(content_type) do + {:ok, "application", subtype, _} when subtype in ~w(xrd+xml xml) -> + webfinger_from_xml(body) - if doc != :error do - webfinger_from_xml(doc) - else - with {:ok, doc} <- Jason.decode(body) do - webfinger_from_json(doc) - end + {:ok, "application", subtype, _} when subtype in ~w(jrd+json json) -> + webfinger_from_json(body) + + _ -> + {:error, {:content_type, content_type}} + end + + _ -> + {:error, {:content_type, nil}} end else e -> diff --git a/lib/pleroma/web/xml.ex b/lib/pleroma/web/xml.ex index 2b34611ac..0ab6e9d32 100644 --- a/lib/pleroma/web/xml.ex +++ b/lib/pleroma/web/xml.ex @@ -31,7 +31,7 @@ def parse_document(text) do |> :binary.bin_to_list() |> :xmerl_scan.string(quiet: true) - doc + {:ok, doc} rescue _e -> Logger.debug("Couldn't parse XML: #{inspect(text)}") diff --git a/mix.exs b/mix.exs index 436381f32..b449c82b3 100644 --- a/mix.exs +++ b/mix.exs @@ -4,7 +4,7 @@ defmodule Pleroma.Mixfile do def project do [ app: :pleroma, - version: version("2.3.0"), + version: version("2.3.50"), elixir: "~> 1.9", elixirc_paths: elixirc_paths(Mix.env()), compilers: [:phoenix, :gettext] ++ Mix.compilers(), @@ -38,7 +38,7 @@ def project do include_executables_for: [:unix], applications: [ex_syslogger: :load, syslog: :load, eldap: :transient], steps: [:assemble, &put_otp_version/1, ©_files/1, ©_nginx_config/1], - config_providers: [{Pleroma.Config.ReleaseRuntimeProvider, nil}] + config_providers: [{Pleroma.Config.ReleaseRuntimeProvider, []}] ] ] ] @@ -121,6 +121,7 @@ defp deps do {:phoenix_pubsub, "~> 2.0"}, {:phoenix_ecto, "~> 4.0"}, {:ecto_enum, "~> 1.4"}, + {:ecto_explain, "~> 0.1.2"}, {:ecto_sql, "~> 3.4.4"}, {:postgrex, ">= 0.15.5"}, {:oban, "~> 2.3.4"}, @@ -143,7 +144,7 @@ defp deps do {:ex_aws, "~> 2.1.6"}, {:ex_aws_s3, "~> 2.0"}, {:sweet_xml, "~> 0.6.6"}, - {:earmark, "1.4.3"}, + {:earmark, "1.4.15"}, {:bbcode_pleroma, "~> 0.2.0"}, {:crypt, git: "https://git.pleroma.social/pleroma/elixir-libraries/crypt.git", @@ -195,9 +196,7 @@ defp deps do {:majic, git: "https://git.pleroma.social/pleroma/elixir-libraries/majic.git", ref: "289cda1b6d0d70ccb2ba508a2b0bd24638db2880"}, - {:open_api_spex, - git: "https://git.pleroma.social/pleroma/elixir-libraries/open_api_spex.git", - ref: "f296ac0924ba3cf79c7a588c4c252889df4c2edd"}, + {:open_api_spex, "~> 3.10"}, ## dev & test {:ex_doc, "~> 0.22", only: :dev, runtime: false}, diff --git a/mix.lock b/mix.lock index 99be81826..22c31e0d6 100644 --- a/mix.lock +++ b/mix.lock @@ -27,10 +27,11 @@ "db_connection": {:hex, :db_connection, "2.3.1", "4c9f3ed1ef37471cbdd2762d6655be11e38193904d9c5c1c9389f1b891a3088e", [:mix], [{:connection, "~> 1.0", [hex: :connection, repo: "hexpm", optional: false]}], "hexpm", "abaab61780dde30301d840417890bd9f74131041afd02174cf4e10635b3a63f5"}, "decimal": {:hex, :decimal, "2.0.0", "a78296e617b0f5dd4c6caf57c714431347912ffb1d0842e998e9792b5642d697", [:mix], [], "hexpm", "34666e9c55dea81013e77d9d87370fe6cb6291d1ef32f46a1600230b1d44f577"}, "deep_merge": {:hex, :deep_merge, "1.0.0", "b4aa1a0d1acac393bdf38b2291af38cb1d4a52806cf7a4906f718e1feb5ee961", [:mix], [], "hexpm", "ce708e5f094b9cd4e8f2be4f00d2f4250c4095be93f8cd6d018c753894885430"}, - "earmark": {:hex, :earmark, "1.4.3", "364ca2e9710f6bff494117dbbd53880d84bebb692dafc3a78eb50aa3183f2bfd", [:mix], [], "hexpm", "8cf8a291ebf1c7b9539e3cddb19e9cef066c2441b1640f13c34c1d3cfc825fec"}, - "earmark_parser": {:hex, :earmark_parser, "1.4.10", "6603d7a603b9c18d3d20db69921527f82ef09990885ed7525003c7fe7dc86c56", [:mix], [], "hexpm", "8e2d5370b732385db2c9b22215c3f59c84ac7dda7ed7e544d7c459496ae519c0"}, + "earmark": {:hex, :earmark, "1.4.15", "2c7f924bf495ec1f65bd144b355d0949a05a254d0ec561740308a54946a67888", [:mix], [{:earmark_parser, ">= 1.4.13", [hex: :earmark_parser, repo: "hexpm", optional: false]}], "hexpm", "3b1209b85bc9f3586f370f7c363f6533788fb4e51db23aa79565875e7f9999ee"}, + "earmark_parser": {:hex, :earmark_parser, "1.4.13", "0c98163e7d04a15feb62000e1a891489feb29f3d10cb57d4f845c405852bbef8", [:mix], [], "hexpm", "d602c26af3a0af43d2f2645613f65841657ad6efc9f0e361c3b6c06b578214ba"}, "ecto": {:hex, :ecto, "3.4.6", "08f7afad3257d6eb8613309af31037e16c36808dfda5a3cd0cb4e9738db030e4", [:mix], [{:decimal, "~> 1.6 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "6f13a9e2a62e75c2dcfc7207bfc65645ab387af8360db4c89fee8b5a4bf3f70b"}, "ecto_enum": {:hex, :ecto_enum, "1.4.0", "d14b00e04b974afc69c251632d1e49594d899067ee2b376277efd8233027aec8", [:mix], [{:ecto, ">= 3.0.0", [hex: :ecto, repo: "hexpm", optional: false]}, {:ecto_sql, "> 3.0.0", [hex: :ecto_sql, repo: "hexpm", optional: false]}, {:mariaex, ">= 0.0.0", [hex: :mariaex, repo: "hexpm", optional: true]}, {:postgrex, ">= 0.0.0", [hex: :postgrex, repo: "hexpm", optional: true]}], "hexpm", "8fb55c087181c2b15eee406519dc22578fa60dd82c088be376d0010172764ee4"}, + "ecto_explain": {:hex, :ecto_explain, "0.1.2", "a9d504cbd4adc809911f796d5ef7ebb17a576a6d32286c3d464c015bd39d5541", [:mix], [], "hexpm", "1d0e7798ae30ecf4ce34e912e5354a0c1c832b7ebceba39298270b9a9f316330"}, "ecto_sql": {:hex, :ecto_sql, "3.4.5", "30161f81b167d561a9a2df4329c10ae05ff36eca7ccc84628f2c8b9fa1e43323", [:mix], [{:db_connection, "~> 2.2", [hex: :db_connection, repo: "hexpm", optional: false]}, {:ecto, "~> 3.4.3", [hex: :ecto, repo: "hexpm", optional: false]}, {:myxql, "~> 0.3.0 or ~> 0.4.0", [hex: :myxql, repo: "hexpm", optional: true]}, {:postgrex, "~> 0.15.0", [hex: :postgrex, repo: "hexpm", optional: true]}, {:tds, "~> 2.1.0", [hex: :tds, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "31990c6a3579b36a3c0841d34a94c275e727de8b84f58509da5f1b2032c98ac2"}, "eimp": {:hex, :eimp, "1.0.14", "fc297f0c7e2700457a95a60c7010a5f1dcb768a083b6d53f49cd94ab95a28f22", [:rebar3], [{:p1_utils, "1.0.18", [hex: :p1_utils, repo: "hexpm", optional: false]}], "hexpm", "501133f3112079b92d9e22da8b88bf4f0e13d4d67ae9c15c42c30bd25ceb83b6"}, "elixir_make": {:hex, :elixir_make, "0.6.2", "7dffacd77dec4c37b39af867cedaabb0b59f6a871f89722c25b28fcd4bd70530", [:mix], [], "hexpm", "03e49eadda22526a7e5279d53321d1cced6552f344ba4e03e619063de75348d9"}, @@ -51,7 +52,7 @@ "gen_smtp": {:hex, :gen_smtp, "0.15.0", "9f51960c17769b26833b50df0b96123605a8024738b62db747fece14eb2fbfcc", [:rebar3], [], "hexpm", "29bd14a88030980849c7ed2447b8db6d6c9278a28b11a44cafe41b791205440f"}, "gen_stage": {:hex, :gen_stage, "0.14.3", "d0c66f1c87faa301c1a85a809a3ee9097a4264b2edf7644bf5c123237ef732bf", [:mix], [], "hexpm"}, "gen_state_machine": {:hex, :gen_state_machine, "2.0.5", "9ac15ec6e66acac994cc442dcc2c6f9796cf380ec4b08267223014be1c728a95", [:mix], [], "hexpm"}, - "gettext": {:hex, :gettext, "0.18.0", "406d6b9e0e3278162c2ae1de0a60270452c553536772167e2d701f028116f870", [:mix], [], "hexpm", "c3f850be6367ebe1a08616c2158affe4a23231c70391050bf359d5f92f66a571"}, + "gettext": {:hex, :gettext, "0.18.2", "7df3ea191bb56c0309c00a783334b288d08a879f53a7014341284635850a6e55", [:mix], [], "hexpm", "f9f537b13d4fdd30f3039d33cb80144c3aa1f8d9698e47d7bcbcc8df93b1f5c5"}, "gun": {:git, "https://github.com/ninenines/gun.git", "921c47146b2d9567eac7e9a4d2ccc60fffd4f327", [ref: "921c47146b2d9567eac7e9a4d2ccc60fffd4f327"]}, "hackney": {:git, "https://git.pleroma.social/pleroma/elixir-libraries/hackney.git", "7d7119f0651515d6d7669c78393fd90950a3ec6e", [ref: "7d7119f0651515d6d7669c78393fd90950a3ec6e"]}, "html_entities": {:hex, :html_entities, "0.5.1", "1c9715058b42c35a2ab65edc5b36d0ea66dd083767bef6e3edb57870ef556549", [:mix], [], "hexpm", "30efab070904eb897ff05cd52fa61c1025d7f8ef3a9ca250bc4e6513d16c32de"}, @@ -82,7 +83,7 @@ "nimble_pool": {:hex, :nimble_pool, "0.1.0", "ffa9d5be27eee2b00b0c634eb649aa27f97b39186fec3c493716c2a33e784ec6", [:mix], [], "hexpm", "343a1eaa620ddcf3430a83f39f2af499fe2370390d4f785cd475b4df5acaf3f9"}, "nodex": {:git, "https://git.pleroma.social/pleroma/nodex", "cb6730f943cfc6aad674c92161be23a8411f15d1", [ref: "cb6730f943cfc6aad674c92161be23a8411f15d1"]}, "oban": {:hex, :oban, "2.3.4", "ec7509b9af2524d55f529cb7aee93d36131ae0bf0f37706f65d2fe707f4d9fd8", [:mix], [{:ecto_sql, ">= 3.4.3", [hex: :ecto_sql, repo: "hexpm", optional: false]}, {:jason, "~> 1.1", [hex: :jason, repo: "hexpm", optional: false]}, {:postgrex, "~> 0.14", [hex: :postgrex, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "c70ca0434758fd1805422ea4446af5e910ddc697c0c861549c8f0eb0cfbd2fdf"}, - "open_api_spex": {:git, "https://git.pleroma.social/pleroma/elixir-libraries/open_api_spex.git", "f296ac0924ba3cf79c7a588c4c252889df4c2edd", [ref: "f296ac0924ba3cf79c7a588c4c252889df4c2edd"]}, + "open_api_spex": {:hex, :open_api_spex, "3.10.0", "94e9521ad525b3fcf6dc77da7c45f87fdac24756d4de588cb0816b413e7c1844", [:mix], [{:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:plug, "~> 1.7", [hex: :plug, repo: "hexpm", optional: false]}, {:poison, "~> 3.1", [hex: :poison, repo: "hexpm", optional: true]}], "hexpm", "2dbb2bde3d2b821f06936e8dfaf3284331186556291946d84eeba3750ac28765"}, "p1_utils": {:hex, :p1_utils, "1.0.18", "3fe224de5b2e190d730a3c5da9d6e8540c96484cf4b4692921d1e28f0c32b01c", [:rebar3], [], "hexpm", "1fc8773a71a15553b179c986b22fbeead19b28fe486c332d4929700ffeb71f88"}, "parse_trans": {:git, "https://github.com/uwiger/parse_trans.git", "76abb347c3c1d00fb0ccf9e4b43e22b3d2288484", [tag: "3.3.0"]}, "pbkdf2_elixir": {:hex, :pbkdf2_elixir, "1.2.1", "9cbe354b58121075bd20eb83076900a3832324b7dd171a6895fab57b6bb2752c", [:mix], [{:comeonin, "~> 5.3", [hex: :comeonin, repo: "hexpm", optional: false]}], "hexpm", "d3b40a4a4630f0b442f19eca891fcfeeee4c40871936fed2f68e1c4faa30481f"}, @@ -116,9 +117,9 @@ "syslog": {:hex, :syslog, "1.1.0", "6419a232bea84f07b56dc575225007ffe34d9fdc91abe6f1b2f254fd71d8efc2", [:rebar3], [], "hexpm", "4c6a41373c7e20587be33ef841d3de6f3beba08519809329ecc4d27b15b659e1"}, "telemetry": {:hex, :telemetry, "0.4.2", "2808c992455e08d6177322f14d3bdb6b625fbcfd233a73505870d8738a2f4599", [:rebar3], [], "hexpm", "2d1419bd9dda6a206d7b5852179511722e2b18812310d304620c7bd92a13fcef"}, "tesla": {:hex, :tesla, "1.4.0", "1081bef0124b8bdec1c3d330bbe91956648fb008cf0d3950a369cda466a31a87", [:mix], [{:castore, "~> 0.1", [hex: :castore, repo: "hexpm", optional: true]}, {:exjsx, ">= 3.0.0", [hex: :exjsx, repo: "hexpm", optional: true]}, {:finch, "~> 0.3", [hex: :finch, repo: "hexpm", optional: true]}, {:fuse, "~> 2.4", [hex: :fuse, repo: "hexpm", optional: true]}, {:gun, "~> 1.3", [hex: :gun, repo: "hexpm", optional: true]}, {:hackney, "~> 1.6", [hex: :hackney, repo: "hexpm", optional: true]}, {:ibrowse, "~> 4.4.0", [hex: :ibrowse, repo: "hexpm", optional: true]}, {:jason, ">= 1.0.0", [hex: :jason, repo: "hexpm", optional: true]}, {:mime, "~> 1.0", [hex: :mime, repo: "hexpm", optional: false]}, {:mint, "~> 1.0", [hex: :mint, repo: "hexpm", optional: true]}, {:poison, ">= 1.0.0", [hex: :poison, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4", [hex: :telemetry, repo: "hexpm", optional: true]}], "hexpm", "bf1374a5569f5fca8e641363b63f7347d680d91388880979a33bc12a6eb3e0aa"}, - "timex": {:hex, :timex, "3.6.2", "845cdeb6119e2fef10751c0b247b6c59d86d78554c83f78db612e3290f819bc2", [:mix], [{:combine, "~> 0.10", [hex: :combine, repo: "hexpm", optional: false]}, {:gettext, "~> 0.10", [hex: :gettext, repo: "hexpm", optional: false]}, {:tzdata, "~> 0.1.8 or ~> 0.5 or ~> 1.0.0", [hex: :tzdata, repo: "hexpm", optional: false]}], "hexpm", "26030b46199d02a590be61c2394b37ea25a3664c02fafbeca0b24c972025d47a"}, + "timex": {:hex, :timex, "3.7.3", "df8a2ea814749d700d6878ab9eacac9fdb498ecee2f507cb0002ec172bc24d0f", [:mix], [{:combine, "~> 0.10", [hex: :combine, repo: "hexpm", optional: false]}, {:gettext, "~> 0.10", [hex: :gettext, repo: "hexpm", optional: false]}, {:tzdata, "~> 1.0", [hex: :tzdata, repo: "hexpm", optional: false]}], "hexpm", "8691c1d86ca3a7bc14a156e2199dc8927be95d1a8f0e3b69e4bb2d6262c53ac6"}, "trailing_format_plug": {:hex, :trailing_format_plug, "0.0.7", "64b877f912cf7273bed03379936df39894149e35137ac9509117e59866e10e45", [:mix], [{:plug, "> 0.12.0", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm", "bd4fde4c15f3e993a999e019d64347489b91b7a9096af68b2bdadd192afa693f"}, - "tzdata": {:hex, :tzdata, "1.0.4", "a3baa4709ea8dba552dca165af6ae97c624a2d6ac14bd265165eaa8e8af94af6", [:mix], [{:hackney, "~> 1.0", [hex: :hackney, repo: "hexpm", optional: false]}], "hexpm", "b02637db3df1fd66dd2d3c4f194a81633d0e4b44308d36c1b2fdfd1e4e6f169b"}, + "tzdata": {:hex, :tzdata, "1.0.5", "69f1ee029a49afa04ad77801febaf69385f3d3e3d1e4b56b9469025677b89a28", [:mix], [{:hackney, "~> 1.0", [hex: :hackney, repo: "hexpm", optional: false]}], "hexpm", "55519aa2a99e5d2095c1e61cc74c9be69688f8ab75c27da724eb8279ff402a5a"}, "ueberauth": {:hex, :ueberauth, "0.6.3", "d42ace28b870e8072cf30e32e385579c57b9cc96ec74fa1f30f30da9c14f3cc0", [:mix], [{:plug, "~> 1.5", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm", "afc293d8a1140d6591b53e3eaf415ca92842cb1d32fad3c450c6f045f7f91b60"}, "unicode_util_compat": {:git, "https://github.com/benoitc/unicode_util_compat.git", "38d7bc105f51159e8ea3279c40121db9db1e652f", [tag: "0.3.1"]}, "unsafe": {:hex, :unsafe, "1.0.1", "a27e1874f72ee49312e0a9ec2e0b27924214a05e3ddac90e91727bc76f8613d8", [:mix], [], "hexpm", "6c7729a2d214806450d29766abc2afaa7a2cbecf415be64f36a6691afebb50e5"}, diff --git a/priv/gettext/it/LC_MESSAGES/errors.po b/priv/gettext/it/LC_MESSAGES/errors.po index cd0cd6c65..6a6ec058e 100644 --- a/priv/gettext/it/LC_MESSAGES/errors.po +++ b/priv/gettext/it/LC_MESSAGES/errors.po @@ -3,8 +3,8 @@ msgstr "" "Project-Id-Version: PACKAGE VERSION\n" "Report-Msgid-Bugs-To: \n" "POT-Creation-Date: 2020-06-19 14:33+0000\n" -"PO-Revision-Date: 2020-07-09 14:40+0000\n" -"Last-Translator: Ben Is \n" +"PO-Revision-Date: 2021-03-13 09:40+0000\n" +"Last-Translator: Ben Is \n" "Language-Team: Italian \n" "Language: it\n" @@ -45,7 +45,7 @@ msgstr "ha una voce invalida" ## From Ecto.Changeset.validate_exclusion/3 msgid "is reserved" -msgstr "è vietato" +msgstr "è riservato" ## From Ecto.Changeset.validate_confirmation/3 msgid "does not match confirmation" @@ -123,7 +123,7 @@ msgstr "Richiesta invalida" #: lib/pleroma/web/activity_pub/activity_pub_controller.ex:425 #, elixir-format msgid "Can't delete object" -msgstr "Non puoi eliminare quest'oggetto" +msgstr "Oggetto non eliminabile" #: lib/pleroma/web/mastodon_api/controllers/status_controller.ex:196 #, elixir-format @@ -160,12 +160,12 @@ msgstr "Non puoi pubblicare un messaggio vuoto senza allegati" #: lib/pleroma/web/common_api/utils.ex:504 #, elixir-format msgid "Comment must be up to %{max_size} characters" -msgstr "I commenti posso al massimo consistere di %{max_size} caratteri" +msgstr "I commenti posso al massimo contenere %{max_size} caratteri" #: lib/pleroma/config/config_db.ex:222 #, elixir-format msgid "Config with params %{params} not found" -msgstr "Configurazione con parametri %{max_size} non trovata" +msgstr "Configurazione con parametri %{params} non trovata" #: lib/pleroma/web/common_api/common_api.ex:95 #, elixir-format @@ -200,7 +200,7 @@ msgstr "Non de-intestato" #: lib/pleroma/web/common_api/common_api.ex:126 #, elixir-format msgid "Could not unrepeat" -msgstr "Non de-ripetuto" +msgstr "Non de-condiviso" #: lib/pleroma/web/common_api/common_api.ex:428 #: lib/pleroma/web/common_api/common_api.ex:437 @@ -310,12 +310,12 @@ msgstr "Il messaggio ha superato la lunghezza massima" #: lib/pleroma/plugs/ensure_public_or_authenticated_plug.ex:31 #, elixir-format msgid "This resource requires authentication." -msgstr "Accedi per leggere." +msgstr "Accedi per poter leggere." #: lib/pleroma/plugs/rate_limiter/rate_limiter.ex:206 #, elixir-format msgid "Throttled" -msgstr "Strozzato" +msgstr "Limitato" #: lib/pleroma/web/common_api/common_api.ex:266 #, elixir-format @@ -347,17 +347,17 @@ msgstr "Devi aggiungere un indirizzo email valido" #: lib/pleroma/web/activity_pub/activity_pub_controller.ex:389 #, elixir-format msgid "can't read inbox of %{nickname} as %{as_nickname}" -msgstr "non puoi leggere i messaggi privati di %{nickname} come %{as_nickname}" +msgstr "non puoi leggere i messaggi di %{nickname} come %{as_nickname}" #: lib/pleroma/web/activity_pub/activity_pub_controller.ex:472 #, elixir-format msgid "can't update outbox of %{nickname} as %{as_nickname}" -msgstr "non puoi aggiornare gli inviati di %{nickname} come %{as_nickname}" +msgstr "non puoi inviare da %{nickname} come %{as_nickname}" #: lib/pleroma/web/common_api/common_api.ex:388 #, elixir-format msgid "conversation is already muted" -msgstr "la conversazione è già zittita" +msgstr "la conversazione è già silenziata" #: lib/pleroma/web/activity_pub/activity_pub_controller.ex:316 #: lib/pleroma/web/activity_pub/activity_pub_controller.ex:491 @@ -419,7 +419,7 @@ msgstr "Errore interno" #: lib/pleroma/web/oauth/fallback_controller.ex:29 #, elixir-format msgid "Invalid Username/Password" -msgstr "Nome utente/parola d'ordine invalidi" +msgstr "Nome utente/password invalidi" #: lib/pleroma/web/twitter_api/twitter_api.ex:118 #, elixir-format @@ -455,7 +455,7 @@ msgstr "Gestore OAuth non supportato: %{provider}." #: lib/pleroma/uploaders/uploader.ex:72 #, elixir-format msgid "Uploader callback timeout" -msgstr "Callback caricatmento scaduta" +msgstr "Callback caricamento scaduta" #: lib/pleroma/web/uploader_controller.ex:23 #, elixir-format @@ -496,7 +496,7 @@ msgstr "Parametro mancante: %{name}" #: lib/pleroma/web/oauth/oauth_controller.ex:322 #, elixir-format msgid "Password reset is required" -msgstr "Necessario reimpostare parola d'ordine" +msgstr "Necessario reimpostare password" #: lib/pleroma/tests/auth_test_controller.ex:9 #: lib/pleroma/web/activity_pub/activity_pub_controller.ex:6 lib/pleroma/web/admin_api/admin_api_controller.ex:6 @@ -540,34 +540,32 @@ msgstr "" #: lib/pleroma/web/pleroma_api/controllers/emoji_api_controller.ex:210 #, elixir-format msgid "Unexpected error occurred while adding file to pack." -msgstr "Errore inaspettato durante l'aggiunta del file al pacchetto." +msgstr "Errore inatteso durante l'aggiunta del file al pacchetto." #: lib/pleroma/web/pleroma_api/controllers/emoji_api_controller.ex:138 #, elixir-format msgid "Unexpected error occurred while creating pack." -msgstr "Errore inaspettato durante la creazione del pacchetto." +msgstr "Errore inatteso durante la creazione del pacchetto." #: lib/pleroma/web/pleroma_api/controllers/emoji_api_controller.ex:278 #, elixir-format msgid "Unexpected error occurred while removing file from pack." -msgstr "Errore inaspettato durante la rimozione del file dal pacchetto." +msgstr "Errore inatteso durante la rimozione del file dal pacchetto." #: lib/pleroma/web/pleroma_api/controllers/emoji_api_controller.ex:250 #, elixir-format msgid "Unexpected error occurred while updating file in pack." -msgstr "Errore inaspettato durante l'aggiornamento del file nel pacchetto." +msgstr "Errore inatteso durante l'aggiornamento del file nel pacchetto." #: lib/pleroma/web/pleroma_api/controllers/emoji_api_controller.ex:179 #, elixir-format msgid "Unexpected error occurred while updating pack metadata." -msgstr "Errore inaspettato durante l'aggiornamento dei metadati del pacchetto." +msgstr "Errore inatteso durante l'aggiornamento dei metadati del pacchetto." #: lib/pleroma/plugs/user_is_admin_plug.ex:21 #, elixir-format msgid "User is not an admin." -msgstr "" -"L'utente non è un amministratore." -"OAuth." +msgstr "L'utente non è un amministratore." #: lib/pleroma/web/mastodon_api/controllers/subscription_controller.ex:61 #, elixir-format diff --git a/priv/repo/migrations/20190711042021_create_safe_jsonb_set.exs b/priv/repo/migrations/20190711042021_create_safe_jsonb_set.exs index 43d616705..bfac09f9e 100644 --- a/priv/repo/migrations/20190711042021_create_safe_jsonb_set.exs +++ b/priv/repo/migrations/20190711042021_create_safe_jsonb_set.exs @@ -9,7 +9,7 @@ def change do begin result := jsonb_set(target, path, coalesce(new_value, 'null'::jsonb), create_missing); if result is NULL then - raise 'jsonb_set tried to wipe the object, please report this incindent to Pleroma bug tracker. https://git.pleroma.social/pleroma/pleroma/issues/new'; + raise 'jsonb_set tried to wipe the object, please report this incident to Pleroma bug tracker. https://git.pleroma.social/pleroma/pleroma/issues/new'; return target; else return result; diff --git a/priv/repo/migrations/20201221202251_create_hashtags.exs b/priv/repo/migrations/20201221202251_create_hashtags.exs new file mode 100644 index 000000000..8d2e9ae66 --- /dev/null +++ b/priv/repo/migrations/20201221202251_create_hashtags.exs @@ -0,0 +1,13 @@ +defmodule Pleroma.Repo.Migrations.CreateHashtags do + use Ecto.Migration + + def change do + create_if_not_exists table(:hashtags) do + add(:name, :citext, null: false) + + timestamps() + end + + create_if_not_exists(unique_index(:hashtags, [:name])) + end +end diff --git a/priv/repo/migrations/20201221202252_remove_data_from_hashtags.exs b/priv/repo/migrations/20201221202252_remove_data_from_hashtags.exs new file mode 100644 index 000000000..0442c3b87 --- /dev/null +++ b/priv/repo/migrations/20201221202252_remove_data_from_hashtags.exs @@ -0,0 +1,15 @@ +defmodule Pleroma.Repo.Migrations.RemoveDataFromHashtags do + use Ecto.Migration + + def up do + alter table(:hashtags) do + remove_if_exists(:data, :map) + end + end + + def down do + alter table(:hashtags) do + add_if_not_exists(:data, :map, default: %{}) + end + end +end diff --git a/priv/repo/migrations/20201221203824_create_hashtags_objects.exs b/priv/repo/migrations/20201221203824_create_hashtags_objects.exs new file mode 100644 index 000000000..581f32b3c --- /dev/null +++ b/priv/repo/migrations/20201221203824_create_hashtags_objects.exs @@ -0,0 +1,13 @@ +defmodule Pleroma.Repo.Migrations.CreateHashtagsObjects do + use Ecto.Migration + + def change do + create_if_not_exists table(:hashtags_objects, primary_key: false) do + add(:hashtag_id, references(:hashtags), null: false, primary_key: true) + add(:object_id, references(:objects), null: false, primary_key: true) + end + + # Note: PK index: "hashtags_objects_pkey" PRIMARY KEY, btree (hashtag_id, object_id) + create_if_not_exists(index(:hashtags_objects, [:object_id])) + end +end diff --git a/priv/repo/migrations/20210105195018_create_data_migrations.exs b/priv/repo/migrations/20210105195018_create_data_migrations.exs new file mode 100644 index 000000000..5f2e8d96c --- /dev/null +++ b/priv/repo/migrations/20210105195018_create_data_migrations.exs @@ -0,0 +1,17 @@ +defmodule Pleroma.Repo.Migrations.CreateDataMigrations do + use Ecto.Migration + + def change do + create_if_not_exists table(:data_migrations) do + add(:name, :string, null: false) + add(:state, :integer, default: 1) + add(:feature_lock, :boolean, default: false) + add(:params, :map, default: %{}) + add(:data, :map, default: %{}) + + timestamps() + end + + create_if_not_exists(unique_index(:data_migrations, [:name])) + end +end diff --git a/priv/repo/migrations/20210106183301_data_migration_create_populate_hashtags_table.exs b/priv/repo/migrations/20210106183301_data_migration_create_populate_hashtags_table.exs new file mode 100644 index 000000000..cf3cf26a0 --- /dev/null +++ b/priv/repo/migrations/20210106183301_data_migration_create_populate_hashtags_table.exs @@ -0,0 +1,16 @@ +defmodule Pleroma.Repo.Migrations.DataMigrationCreatePopulateHashtagsTable do + use Ecto.Migration + + def up do + dt = NaiveDateTime.utc_now() + + execute( + "INSERT INTO data_migrations(name, inserted_at, updated_at) " <> + "VALUES ('populate_hashtags_table', '#{dt}', '#{dt}') ON CONFLICT DO NOTHING;" + ) + end + + def down do + execute("DELETE FROM data_migrations WHERE name = 'populate_hashtags_table';") + end +end diff --git a/priv/repo/migrations/20210111172254_create_data_migration_failed_ids.exs b/priv/repo/migrations/20210111172254_create_data_migration_failed_ids.exs new file mode 100644 index 000000000..18afa74ac --- /dev/null +++ b/priv/repo/migrations/20210111172254_create_data_migration_failed_ids.exs @@ -0,0 +1,14 @@ +defmodule Pleroma.Repo.Migrations.CreateDataMigrationFailedIds do + use Ecto.Migration + + def change do + create_if_not_exists table(:data_migration_failed_ids, primary_key: false) do + add(:data_migration_id, references(:data_migrations), null: false, primary_key: true) + add(:record_id, :bigint, null: false, primary_key: true) + end + + create_if_not_exists( + unique_index(:data_migration_failed_ids, [:data_migration_id, :record_id]) + ) + end +end diff --git a/priv/repo/migrations/20210121080964_add_default_text_search_config.exs b/priv/repo/migrations/20210121080964_add_default_text_search_config.exs index 09b6cccc9..27f600b70 100644 --- a/priv/repo/migrations/20210121080964_add_default_text_search_config.exs +++ b/priv/repo/migrations/20210121080964_add_default_text_search_config.exs @@ -4,7 +4,7 @@ defmodule Pleroma.Repo.Migrations.AddDefaultTextSearchConfig do def change do execute("DO $$ BEGIN - execute 'ALTER DATABASE '||current_database()||' SET default_text_search_config = ''english'' '; + execute 'ALTER DATABASE \"'||current_database()||'\" SET default_text_search_config = ''english'' '; END $$;") end diff --git a/priv/repo/migrations/20210202110641_add_pinned_objects_to_users.exs b/priv/repo/migrations/20210202110641_add_pinned_objects_to_users.exs new file mode 100644 index 000000000..644527246 --- /dev/null +++ b/priv/repo/migrations/20210202110641_add_pinned_objects_to_users.exs @@ -0,0 +1,9 @@ +defmodule Pleroma.Repo.Migrations.AddPinnedObjectsToUsers do + use Ecto.Migration + + def change do + alter table(:users) do + add(:pinned_objects, :map) + end + end +end diff --git a/priv/repo/migrations/20210203141144_add_featured_address_to_users.exs b/priv/repo/migrations/20210203141144_add_featured_address_to_users.exs new file mode 100644 index 000000000..0f6a21611 --- /dev/null +++ b/priv/repo/migrations/20210203141144_add_featured_address_to_users.exs @@ -0,0 +1,23 @@ +defmodule Pleroma.Repo.Migrations.AddFeaturedAddressToUsers do + use Ecto.Migration + + def up do + alter table(:users) do + add(:featured_address, :string) + end + + create(index(:users, [:featured_address])) + + execute(""" + + update users set featured_address = concat(ap_id, '/collections/featured') where local = true and featured_address is null; + + """) + end + + def down do + alter table(:users) do + remove(:featured_address) + end + end +end diff --git a/priv/repo/migrations/20210205145000_move_pinned_activities_into_pinned_objects.exs b/priv/repo/migrations/20210205145000_move_pinned_activities_into_pinned_objects.exs new file mode 100644 index 000000000..9aee545e3 --- /dev/null +++ b/priv/repo/migrations/20210205145000_move_pinned_activities_into_pinned_objects.exs @@ -0,0 +1,28 @@ +defmodule Pleroma.Repo.Migrations.MovePinnedActivitiesIntoPinnedObjects do + use Ecto.Migration + + import Ecto.Query + + alias Pleroma.Repo + alias Pleroma.User + + def up do + from(u in User) + |> select([u], {u.id, fragment("?.pinned_activities", u)}) + |> Repo.stream() + |> Stream.each(fn {user_id, pinned_activities_ids} -> + pinned_activities = Pleroma.Activity.all_by_ids_with_object(pinned_activities_ids) + + pins = + Map.new(pinned_activities, fn %{object: %{data: %{"id" => object_id}}} -> + {object_id, NaiveDateTime.utc_now()} + end) + + from(u in User, where: u.id == ^user_id) + |> Repo.update_all(set: [pinned_objects: pins]) + end) + |> Stream.run() + end + + def down, do: :noop +end diff --git a/priv/repo/migrations/20210206045221_remove_pinned_activities_from_users.exs b/priv/repo/migrations/20210206045221_remove_pinned_activities_from_users.exs new file mode 100644 index 000000000..a3ee93f48 --- /dev/null +++ b/priv/repo/migrations/20210206045221_remove_pinned_activities_from_users.exs @@ -0,0 +1,15 @@ +defmodule Pleroma.Repo.Migrations.RemovePinnedActivitiesFromUsers do + use Ecto.Migration + + def up do + alter table(:users) do + remove(:pinned_activities) + end + end + + def down do + alter table(:users) do + add(:pinned_activities, {:array, :string}, default: []) + end + end +end diff --git a/priv/repo/migrations/20210222183840_remove_hashtags_objects_duplicate_index.exs b/priv/repo/migrations/20210222183840_remove_hashtags_objects_duplicate_index.exs new file mode 100644 index 000000000..6c4a2dfdc --- /dev/null +++ b/priv/repo/migrations/20210222183840_remove_hashtags_objects_duplicate_index.exs @@ -0,0 +1,11 @@ +defmodule Pleroma.Repo.Migrations.RemoveHashtagsObjectsDuplicateIndex do + use Ecto.Migration + + @moduledoc "Removes `hashtags_objects_hashtag_id_object_id_index` index (duplicate of PK index)." + + def up do + drop_if_exists(unique_index(:hashtags_objects, [:hashtag_id, :object_id])) + end + + def down, do: nil +end diff --git a/priv/repo/migrations/20210222184616_change_hashtags_name_to_text.exs b/priv/repo/migrations/20210222184616_change_hashtags_name_to_text.exs new file mode 100644 index 000000000..8940b6ca3 --- /dev/null +++ b/priv/repo/migrations/20210222184616_change_hashtags_name_to_text.exs @@ -0,0 +1,15 @@ +defmodule Pleroma.Repo.Migrations.ChangeHashtagsNameToText do + use Ecto.Migration + + def up do + alter table(:hashtags) do + modify(:name, :text) + end + end + + def down do + alter table(:hashtags) do + modify(:name, :citext) + end + end +end diff --git a/priv/repo/migrations/20210401143153_user_notification_settings_fix.exs b/priv/repo/migrations/20210401143153_user_notification_settings_fix.exs new file mode 100644 index 000000000..cf68f1be6 --- /dev/null +++ b/priv/repo/migrations/20210401143153_user_notification_settings_fix.exs @@ -0,0 +1,17 @@ +defmodule Pleroma.Repo.Migrations.UserNotificationSettingsFix do + use Ecto.Migration + + def up do + execute(~s(UPDATE users + SET + notification_settings = '{"followers": true, "follows": true, "non_follows": true, "non_followers": true}'::jsonb WHERE notification_settings IS NULL +)) + + execute("ALTER TABLE users + ALTER COLUMN notification_settings SET NOT NULL") + end + + def down do + :ok + end +end diff --git a/priv/scrubbers/default.ex b/priv/scrubbers/default.ex index 7b06994de..4694a92a5 100644 --- a/priv/scrubbers/default.ex +++ b/priv/scrubbers/default.ex @@ -39,6 +39,7 @@ defmodule Pleroma.HTML.Scrubber.Default do Meta.allow_tag_with_these_attributes(:code, []) Meta.allow_tag_with_these_attributes(:del, []) Meta.allow_tag_with_these_attributes(:em, []) + Meta.allow_tag_with_these_attributes(:hr, []) Meta.allow_tag_with_these_attributes(:i, []) Meta.allow_tag_with_these_attributes(:li, []) Meta.allow_tag_with_these_attributes(:ol, []) @@ -58,6 +59,8 @@ defmodule Pleroma.HTML.Scrubber.Default do Meta.allow_tag_with_this_attribute_values(:span, "class", ["h-card"]) Meta.allow_tag_with_these_attributes(:span, []) + Meta.allow_tag_with_this_attribute_values(:code, "class", ["inline"]) + @allow_inline_images Pleroma.Config.get([:markup, :allow_inline_images]) if @allow_inline_images do diff --git a/test/fixtures/bridgy/actor.json b/test/fixtures/bridgy/actor.json new file mode 100644 index 000000000..5b2d8982b --- /dev/null +++ b/test/fixtures/bridgy/actor.json @@ -0,0 +1,80 @@ +{ + "id": "https://fed.brid.gy/jk.nipponalba.scot", + "url": "https://fed.brid.gy/r/https://jk.nipponalba.scot", + "urls": [ + { + "value": "https://jk.nipponalba.scot" + }, + { + "value": "https://social.nipponalba.scot/jk" + }, + { + "value": "https://px.nipponalba.scot/jk" + } + ], + "@context": "https://www.w3.org/ns/activitystreams", + "type": "Person", + "name": "J K 🇯🇵🏴󠁧󠁢󠁳󠁣󠁴󠁿", + "image": [ + { + "url": "https://jk.nipponalba.scot/images/profile.jpg", + "type": "Image", + "name": "profile picture" + } + ], + "tag": [ + { + "type": "Tag", + "name": "Craft Beer" + }, + { + "type": "Tag", + "name": "Single Malt Whisky" + }, + { + "type": "Tag", + "name": "Homebrewing" + }, + { + "type": "Tag", + "name": "Scottish Politics" + }, + { + "type": "Tag", + "name": "Scottish History" + }, + { + "type": "Tag", + "name": "Japanese History" + }, + { + "type": "Tag", + "name": "Tech" + }, + { + "type": "Tag", + "name": "Veganism" + }, + { + "type": "Tag", + "name": "Cooking" + } + ], + "icon": [ + { + "url": "https://jk.nipponalba.scot/images/profile.jpg", + "type": "Image", + "name": "profile picture" + } + ], + "preferredUsername": "jk.nipponalba.scot", + "summary": "", + "publicKey": { + "id": "jk.nipponalba.scot", + "publicKeyPem": "-----BEGIN PUBLIC KEY-----\nMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDdarxwzxnNbJ2hneWOYHkYJowk\npyigQtxlUd0VjgSQHwxU9kWqfbrHBVADyTtcqi/4dAzQd3UnCI1TPNnn4LPZY9PW\noiWd3Zl1/EfLFxO7LU9GS7fcSLQkyj5JNhSlN3I8QPudZbybrgRDVZYooDe1D+52\n5KLGqC2ajrIVOiDRTQIDAQAB\n-----END PUBLIC KEY-----" + }, + "inbox": "https://fed.brid.gy/jk.nipponalba.scot/inbox", + "outbox": "https://fed.brid.gy/jk.nipponalba.scot/outbox", + "following": "https://fed.brid.gy/jk.nipponalba.scot/following", + "followers": "https://fed.brid.gy/jk.nipponalba.scot/followers" +} diff --git a/test/fixtures/config/temp.exported_from_db.secret.exs b/test/fixtures/config/temp.exported_from_db.secret.exs new file mode 100644 index 000000000..64bee7f32 --- /dev/null +++ b/test/fixtures/config/temp.exported_from_db.secret.exs @@ -0,0 +1,5 @@ +use Mix.Config + +config :pleroma, exported_config_merged: true + +config :pleroma, :first_setting, key: "new value" diff --git a/test/fixtures/mastodon/collections/featured.json b/test/fixtures/mastodon/collections/featured.json new file mode 100644 index 000000000..56f8f56fa --- /dev/null +++ b/test/fixtures/mastodon/collections/featured.json @@ -0,0 +1,39 @@ +{ + "@context": [ + "https://www.w3.org/ns/activitystreams", + "https://{{domain}}/schemas/litepub-0.1.jsonld", + { + "@language": "und" + } + ], + "id": "https://{{domain}}/users/{{nickname}}/collections/featured", + "orderedItems": [ + { + "@context": [ + "https://www.w3.org/ns/activitystreams", + "https://{{domain}}/schemas/litepub-0.1.jsonld", + { + "@language": "und" + } + ], + "actor": "https://{{domain}}/users/{{nickname}}", + "attachment": [], + "attributedTo": "https://{{domain}}/users/{{nickname}}", + "cc": [ + "https://{{domain}}/users/{{nickname}}/followers" + ], + "content": "", + "id": "https://{{domain}}/objects/{{object_id}}", + "published": "2021-02-12T15:13:43.915429Z", + "sensitive": false, + "source": "", + "summary": "", + "tag": [], + "to": [ + "https://www.w3.org/ns/activitystreams#Public" + ], + "type": "Note" + } + ], + "type": "OrderedCollection" +} diff --git a/test/fixtures/statuses/masto-note.json b/test/fixtures/statuses/masto-note.json new file mode 100644 index 000000000..6b96de473 --- /dev/null +++ b/test/fixtures/statuses/masto-note.json @@ -0,0 +1,47 @@ +{ + "@context": [ + "https://www.w3.org/ns/activitystreams", + { + "ostatus": "http://ostatus.org#", + "atomUri": "ostatus:atomUri", + "inReplyToAtomUri": "ostatus:inReplyToAtomUri", + "conversation": "ostatus:conversation", + "sensitive": "as:sensitive", + "toot": "http://joinmastodon.org/ns#", + "votersCount": "toot:votersCount" + } + ], + "id": "https://example.com/users/{{nickname}}/statuses/{{status_id}}", + "type": "Note", + "summary": null, + "inReplyTo": null, + "published": "2021-02-24T12:40:49Z", + "url": "https://example.com/@{{nickname}}/{{status_id}}", + "attributedTo": "https://example.com/users/{{nickname}}", + "to": [ + "https://www.w3.org/ns/activitystreams#Public" + ], + "cc": [ + "https://example.com/users/{{nickname}}/followers" + ], + "sensitive": false, + "atomUri": "https://example.com/users/{{nickname}}/statuses/{{status_id}}", + "inReplyToAtomUri": null, + "conversation": "tag:example.com,2021-02-24:objectId=15:objectType=Conversation", + "content": "

    ", + "contentMap": { + "en": "

    " + }, + "attachment": [], + "tag": [], + "replies": { + "id": "https://example.com/users/{{nickname}}/statuses/{{status_id}}/replies", + "type": "Collection", + "first": { + "type": "CollectionPage", + "next": "https://example.com/users/{{nickname}}/statuses/{{status_id}}/replies?only_other_accounts=true&page=true", + "partOf": "https://example.com/users/{{nickname}}/statuses/{{status_id}}/replies", + "items": [] + } + } +} diff --git a/test/fixtures/statuses/note.json b/test/fixtures/statuses/note.json new file mode 100644 index 000000000..41735cbc5 --- /dev/null +++ b/test/fixtures/statuses/note.json @@ -0,0 +1,27 @@ +{ + "@context": [ + "https://www.w3.org/ns/activitystreams", + "https://example.com/schemas/litepub-0.1.jsonld", + { + "@language": "und" + } + ], + "actor": "https://example.com/users/{{nickname}}", + "attachment": [], + "attributedTo": "https://example.com/users/{{nickname}}", + "cc": [ + "https://example.com/users/{{nickname}}/followers" + ], + "content": "Content", + "context": "https://example.com/contexts/e4b180e1-7403-477f-aeb4-de57e7a3fe7f", + "conversation": "https://example.com/contexts/e4b180e1-7403-477f-aeb4-de57e7a3fe7f", + "id": "https://example.com/objects/{{object_id}}", + "published": "2019-12-15T22:00:05.279583Z", + "sensitive": false, + "summary": "", + "tag": [], + "to": [ + "https://www.w3.org/ns/activitystreams#Public" + ], + "type": "Note" +} diff --git a/test/fixtures/tesla_mock/emoji-in-summary.json b/test/fixtures/tesla_mock/emoji-in-summary.json new file mode 100644 index 000000000..f77c6e2e8 --- /dev/null +++ b/test/fixtures/tesla_mock/emoji-in-summary.json @@ -0,0 +1,49 @@ +{ + "@context": [ + "https://www.w3.org/ns/activitystreams", + "https://patch.cx/schemas/litepub-0.1.jsonld", + { + "@language": "und" + } + ], + "actor": "https://patch.cx/users/rin", + "attachment": [], + "attributedTo": "https://patch.cx/users/rin", + "cc": [ + "https://patch.cx/users/rin/followers" + ], + "content": ":joker_disapprove:

    just grabbing a test fixture, nevermind me", + "context": "https://patch.cx/contexts/2c3ce4b4-18b1-4b1a-8965-3932027b5326", + "conversation": "https://patch.cx/contexts/2c3ce4b4-18b1-4b1a-8965-3932027b5326", + "id": "https://patch.cx/objects/a399c28e-c821-4820-bc3e-4afeb044c16f", + "published": "2021-03-22T16:54:46.461939Z", + "sensitive": null, + "source": ":joker_disapprove: \r\n\r\njust grabbing a test fixture, nevermind me", + "summary": ":joker_smile: ", + "tag": [ + { + "icon": { + "type": "Image", + "url": "https://patch.cx/emoji/custom/joker_disapprove.png" + }, + "id": "https://patch.cx/emoji/custom/joker_disapprove.png", + "name": ":joker_disapprove:", + "type": "Emoji", + "updated": "1970-01-01T00:00:00Z" + }, + { + "icon": { + "type": "Image", + "url": "https://patch.cx/emoji/custom/joker_smile.png" + }, + "id": "https://patch.cx/emoji/custom/joker_smile.png", + "name": ":joker_smile:", + "type": "Emoji", + "updated": "1970-01-01T00:00:00Z" + } + ], + "to": [ + "https://www.w3.org/ns/activitystreams#Public" + ], + "type": "Note" +} diff --git a/test/fixtures/tesla_mock/xn--q9jyb4c_host_meta b/test/fixtures/tesla_mock/xn--q9jyb4c_host_meta deleted file mode 100644 index 45d260e55..000000000 --- a/test/fixtures/tesla_mock/xn--q9jyb4c_host_meta +++ /dev/null @@ -1,4 +0,0 @@ - - - - diff --git a/test/fixtures/users_mock/masto_featured.json b/test/fixtures/users_mock/masto_featured.json new file mode 100644 index 000000000..646a343ad --- /dev/null +++ b/test/fixtures/users_mock/masto_featured.json @@ -0,0 +1,18 @@ +{ + "@context": [ + "https://www.w3.org/ns/activitystreams", + { + "ostatus": "http://ostatus.org#", + "atomUri": "ostatus:atomUri", + "inReplyToAtomUri": "ostatus:inReplyToAtomUri", + "conversation": "ostatus:conversation", + "sensitive": "as:sensitive", + "toot": "http://joinmastodon.org/ns#", + "votersCount": "toot:votersCount" + } + ], + "id": "https://{{domain}}/users/{{nickname}}/collections/featured", + "type": "OrderedCollection", + "totalItems": 0, + "orderedItems": [] +} diff --git a/test/fixtures/users_mock/user.json b/test/fixtures/users_mock/user.json new file mode 100644 index 000000000..c722a1145 --- /dev/null +++ b/test/fixtures/users_mock/user.json @@ -0,0 +1,42 @@ +{ + "@context": [ + "https://www.w3.org/ns/activitystreams", + "https://example.com/schemas/litepub-0.1.jsonld", + { + "@language": "und" + } + ], + "attachment": [], + "endpoints": { + "oauthAuthorizationEndpoint": "https://example.com/oauth/authorize", + "oauthRegistrationEndpoint": "https://example.com/api/v1/apps", + "oauthTokenEndpoint": "https://example.com/oauth/token", + "sharedInbox": "https://example.com/inbox" + }, + "followers": "https://example.com/users/{{nickname}}/followers", + "following": "https://example.com/users/{{nickname}}/following", + "icon": { + "type": "Image", + "url": "https://example.com/media/4e914f5b84e4a259a3f6c2d2edc9ab642f2ab05f3e3d9c52c81fc2d984b3d51e.jpg" + }, + "id": "https://example.com/users/{{nickname}}", + "image": { + "type": "Image", + "url": "https://example.com/media/f739efddefeee49c6e67e947c4811fdc911785c16ae43da4c3684051fbf8da6a.jpg?name=f739efddefeee49c6e67e947c4811fdc911785c16ae43da4c3684051fbf8da6a.jpg" + }, + "inbox": "https://example.com/users/{{nickname}}/inbox", + "manuallyApprovesFollowers": false, + "name": "{{nickname}}", + "outbox": "https://example.com/users/{{nickname}}/outbox", + "preferredUsername": "{{nickname}}", + "publicKey": { + "id": "https://example.com/users/{{nickname}}#main-key", + "owner": "https://example.com/users/{{nickname}}", + "publicKeyPem": "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA5DLtwGXNZElJyxFGfcVc\nXANhaMadj/iYYQwZjOJTV9QsbtiNBeIK54PJrYuU0/0YIdrvS1iqheX5IwXRhcwa\nhm3ZyLz7XeN9st7FBni4BmZMBtMpxAuYuu5p/jbWy13qAiYOhPreCx0wrWgm/lBD\n9mkgaxIxPooBE0S4ZWEJIDIV1Vft3AWcRUyWW1vIBK0uZzs6GYshbQZB952S0yo4\nFzI1hABGHncH8UvuFauh4EZ8tY7/X5I0pGRnDOcRN1dAht5w5yTA+6r5kebiFQjP\nIzN/eCO/a9Flrj9YGW7HDNtjSOH0A31PLRGlJtJO3yK57dnf5ppyCZGfL4emShQo\ncQIDAQAB\n-----END PUBLIC KEY-----\n\n" + }, + "featured": "https://example.com/users/{{nickname}}/collections/featured", + "summary": "your friendly neighborhood pleroma developer
    I like cute things and distributed systems, and really hate delete and redrafts", + "tag": [], + "type": "Person", + "url": "https://example.com/users/{{nickname}}" +} diff --git a/test/mix/tasks/pleroma/config_test.exs b/test/mix/tasks/pleroma/config_test.exs index 21f8f2286..3ed1e94b8 100644 --- a/test/mix/tasks/pleroma/config_test.exs +++ b/test/mix/tasks/pleroma/config_test.exs @@ -200,6 +200,44 @@ test "load a settings with large values and pass to file", %{temp_file: temp_fil end end + describe "migrate_from_db/1" do + setup do: clear_config(:configurable_from_database, true) + + setup do + insert_config_record(:pleroma, :setting_first, key: "value", key2: ["Activity"]) + insert_config_record(:pleroma, :setting_second, key: "value2", key2: [Repo]) + insert_config_record(:quack, :level, :info) + + path = "test/instance_static" + file_path = Path.join(path, "temp.exported_from_db.secret.exs") + + on_exit(fn -> File.rm!(file_path) end) + + [file_path: file_path] + end + + test "with path parameter", %{file_path: file_path} do + MixTask.run(["migrate_from_db", "--env", "temp", "--path", Path.dirname(file_path)]) + + file = File.read!(file_path) + assert file =~ "config :pleroma, :setting_first," + assert file =~ "config :pleroma, :setting_second," + assert file =~ "config :quack, :level, :info" + end + + test "release", %{file_path: file_path} do + clear_config(:release, true) + clear_config(:config_path, file_path) + + MixTask.run(["migrate_from_db", "--env", "temp"]) + + file = File.read!(file_path) + assert file =~ "config :pleroma, :setting_first," + assert file =~ "config :pleroma, :setting_second," + assert file =~ "config :quack, :level, :info" + end + end + describe "operations on database config" do setup do: clear_config(:configurable_from_database, true) diff --git a/test/pleroma/activity/ir/topics_test.exs b/test/pleroma/activity/ir/topics_test.exs index 6b848e04d..9c8e5d932 100644 --- a/test/pleroma/activity/ir/topics_test.exs +++ b/test/pleroma/activity/ir/topics_test.exs @@ -11,6 +11,8 @@ defmodule Pleroma.Activity.Ir.TopicsTest do require Pleroma.Constants + import Mock + describe "poll answer" do test "produce no topics" do activity = %Activity{object: %Object{data: %{"type" => "Answer"}}} @@ -77,14 +79,13 @@ test "with no attachments doesn't produce public:media topics", %{activity: acti refute Enum.member?(topics, "public:local:media") end - test "converts tags to hash tags", %{activity: %{object: %{data: data} = object} = activity} do - tagged_data = Map.put(data, "tag", ["foo", "bar"]) - activity = %{activity | object: %{object | data: tagged_data}} + test "converts tags to hash tags", %{activity: activity} do + with_mock(Object, [:passthrough], hashtags: fn _ -> ["foo", "bar"] end) do + topics = Topics.get_activity_topics(activity) - topics = Topics.get_activity_topics(activity) - - assert Enum.member?(topics, "hashtag:foo") - assert Enum.member?(topics, "hashtag:bar") + assert Enum.member?(topics, "hashtag:foo") + assert Enum.member?(topics, "hashtag:bar") + end end test "only converts strings to hash tags", %{ diff --git a/test/pleroma/activity_test.exs b/test/pleroma/activity_test.exs index 390a06344..962bc7e45 100644 --- a/test/pleroma/activity_test.exs +++ b/test/pleroma/activity_test.exs @@ -254,4 +254,26 @@ test "get_by_object_ap_id_with_object/1" do assert %{id: ^id} = Activity.get_by_object_ap_id_with_object(obj_id) end + + test "add_by_params_query/3" do + user = insert(:user) + + note = insert(:note_activity, user: user) + + insert(:add_activity, user: user, note: note) + insert(:add_activity, user: user, note: note) + insert(:add_activity, user: user) + + assert Repo.aggregate(Activity, :count, :id) == 4 + + add_query = + Activity.add_by_params_query(note.data["object"], user.ap_id, user.featured_address) + + assert Repo.aggregate(add_query, :count, :id) == 2 + + Repo.delete_all(add_query) + assert Repo.aggregate(add_query, :count, :id) == 0 + + assert Repo.aggregate(Activity, :count, :id) == 2 + end end diff --git a/test/pleroma/application_requirements_test.exs b/test/pleroma/application_requirements_test.exs index 683ac8c96..a54c37968 100644 --- a/test/pleroma/application_requirements_test.exs +++ b/test/pleroma/application_requirements_test.exs @@ -35,13 +35,13 @@ test "doesn't raise if the pool size is unexpected but the respective flag is se setup do: clear_config([:welcome]) setup do: clear_config([Pleroma.Emails.Mailer]) - test "raises if welcome email enabled but mail disabled" do + test "warns if welcome email enabled but mail disabled" do clear_config([:welcome, :email, :enabled], true) clear_config([Pleroma.Emails.Mailer, :enabled], false) - assert_raise Pleroma.ApplicationRequirements.VerifyError, "The mail disabled.", fn -> - capture_log(&Pleroma.ApplicationRequirements.verify!/0) - end + assert capture_log(fn -> + assert Pleroma.ApplicationRequirements.verify!() == :ok + end) =~ "Welcome emails will NOT be sent" end end @@ -57,15 +57,13 @@ test "raises if welcome email enabled but mail disabled" do setup do: clear_config([:instance, :account_activation_required]) - test "raises if account confirmation is required but mailer isn't enable" do + test "warns if account confirmation is required but mailer isn't enabled" do clear_config([:instance, :account_activation_required], true) clear_config([Pleroma.Emails.Mailer, :enabled], false) - assert_raise Pleroma.ApplicationRequirements.VerifyError, - "Account activation enabled, but Mailer is disabled. Cannot send confirmation emails.", - fn -> - capture_log(&Pleroma.ApplicationRequirements.verify!/0) - end + assert capture_log(fn -> + assert Pleroma.ApplicationRequirements.verify!() == :ok + end) =~ "Users will NOT be able to confirm their accounts" end test "doesn't do anything if account confirmation is disabled" do diff --git a/test/pleroma/config/release_runtime_provider_test.exs b/test/pleroma/config/release_runtime_provider_test.exs new file mode 100644 index 000000000..6578d3268 --- /dev/null +++ b/test/pleroma/config/release_runtime_provider_test.exs @@ -0,0 +1,45 @@ +defmodule Pleroma.Config.ReleaseRuntimeProviderTest do + use ExUnit.Case, async: true + + alias Pleroma.Config.ReleaseRuntimeProvider + + describe "load/2" do + test "loads release defaults config and warns about non-existent runtime config" do + ExUnit.CaptureIO.capture_io(fn -> + merged = ReleaseRuntimeProvider.load([], []) + assert merged == Pleroma.Config.Holder.release_defaults() + end) =~ + "!!! Config path is not declared! Please ensure it exists and that PLEROMA_CONFIG_PATH is unset or points to an existing file" + end + + test "merged runtime config" do + merged = + ReleaseRuntimeProvider.load([], config_path: "test/fixtures/config/temp.secret.exs") + + assert merged[:pleroma][:first_setting] == [key: "value", key2: [Pleroma.Repo]] + assert merged[:pleroma][:second_setting] == [key: "value2", key2: ["Activity"]] + end + + test "merged exported config" do + ExUnit.CaptureIO.capture_io(fn -> + merged = + ReleaseRuntimeProvider.load([], + exported_config_path: "test/fixtures/config/temp.exported_from_db.secret.exs" + ) + + assert merged[:pleroma][:exported_config_merged] + end) =~ + "!!! Config path is not declared! Please ensure it exists and that PLEROMA_CONFIG_PATH is unset or points to an existing file" + end + + test "runtime config is merged with exported config" do + merged = + ReleaseRuntimeProvider.load([], + config_path: "test/fixtures/config/temp.secret.exs", + exported_config_path: "test/fixtures/config/temp.exported_from_db.secret.exs" + ) + + assert merged[:pleroma][:first_setting] == [key2: [Pleroma.Repo], key: "new value"] + end + end +end diff --git a/test/pleroma/earmark_renderer_test.exs b/test/pleroma/earmark_renderer_test.exs deleted file mode 100644 index 776bc496a..000000000 --- a/test/pleroma/earmark_renderer_test.exs +++ /dev/null @@ -1,79 +0,0 @@ -# Pleroma: A lightweight social networking server -# Copyright © 2017-2021 Pleroma Authors -# SPDX-License-Identifier: AGPL-3.0-only -defmodule Pleroma.EarmarkRendererTest do - use Pleroma.DataCase, async: true - - test "Paragraph" do - code = ~s[Hello\n\nWorld!] - result = Earmark.as_html!(code, %Earmark.Options{renderer: Pleroma.EarmarkRenderer}) - assert result == "

    Hello

    World!

    " - end - - test "raw HTML" do - code = ~s[OwO] - result = Earmark.as_html!(code, %Earmark.Options{renderer: Pleroma.EarmarkRenderer}) - assert result == "

    #{code}

    " - end - - test "rulers" do - code = ~s[before\n\n-----\n\nafter] - result = Earmark.as_html!(code, %Earmark.Options{renderer: Pleroma.EarmarkRenderer}) - assert result == "

    before


    after

    " - end - - test "headings" do - code = ~s[# h1\n## h2\n### h3\n] - result = Earmark.as_html!(code, %Earmark.Options{renderer: Pleroma.EarmarkRenderer}) - assert result == ~s[

    h1

    h2

    h3

    ] - end - - test "blockquote" do - code = ~s[> whoms't are you quoting?] - result = Earmark.as_html!(code, %Earmark.Options{renderer: Pleroma.EarmarkRenderer}) - assert result == "

    whoms’t are you quoting?

    " - end - - test "code" do - code = ~s[`mix`] - result = Earmark.as_html!(code, %Earmark.Options{renderer: Pleroma.EarmarkRenderer}) - assert result == ~s[

    mix

    ] - - code = ~s[``mix``] - result = Earmark.as_html!(code, %Earmark.Options{renderer: Pleroma.EarmarkRenderer}) - assert result == ~s[

    mix

    ] - - code = ~s[```\nputs "Hello World"\n```] - result = Earmark.as_html!(code, %Earmark.Options{renderer: Pleroma.EarmarkRenderer}) - assert result == ~s[
    puts "Hello World"
    ] - end - - test "lists" do - code = ~s[- one\n- two\n- three\n- four] - result = Earmark.as_html!(code, %Earmark.Options{renderer: Pleroma.EarmarkRenderer}) - assert result == "
    • one
    • two
    • three
    • four
    " - - code = ~s[1. one\n2. two\n3. three\n4. four\n] - result = Earmark.as_html!(code, %Earmark.Options{renderer: Pleroma.EarmarkRenderer}) - assert result == "
    1. one
    2. two
    3. three
    4. four
    " - end - - test "delegated renderers" do - code = ~s[a
    b] - result = Earmark.as_html!(code, %Earmark.Options{renderer: Pleroma.EarmarkRenderer}) - assert result == "

    #{code}

    " - - code = ~s[*aaaa~*] - result = Earmark.as_html!(code, %Earmark.Options{renderer: Pleroma.EarmarkRenderer}) - assert result == ~s[

    aaaa~

    ] - - code = ~s[**aaaa~**] - result = Earmark.as_html!(code, %Earmark.Options{renderer: Pleroma.EarmarkRenderer}) - assert result == ~s[

    aaaa~

    ] - - # strikethrought - code = ~s[aaaa~] - result = Earmark.as_html!(code, %Earmark.Options{renderer: Pleroma.EarmarkRenderer}) - assert result == ~s[

    aaaa~

    ] - end -end diff --git a/test/pleroma/hashtag_test.exs b/test/pleroma/hashtag_test.exs new file mode 100644 index 000000000..0264dea0b --- /dev/null +++ b/test/pleroma/hashtag_test.exs @@ -0,0 +1,17 @@ +# Pleroma: A lightweight social networking server +# Copyright © 2017-2021 Pleroma Authors +# SPDX-License-Identifier: AGPL-3.0-only + +defmodule Pleroma.HashtagTest do + use Pleroma.DataCase + + alias Pleroma.Hashtag + + describe "changeset validations" do + test "ensure non-blank :name" do + changeset = Hashtag.changeset(%Hashtag{}, %{name: ""}) + + assert {:name, {"can't be blank", [validation: :required]}} in changeset.errors + end + end +end diff --git a/test/pleroma/object_test.exs b/test/pleroma/object_test.exs index db7678d5d..8320660a5 100644 --- a/test/pleroma/object_test.exs +++ b/test/pleroma/object_test.exs @@ -5,10 +5,13 @@ defmodule Pleroma.ObjectTest do use Pleroma.DataCase use Oban.Testing, repo: Pleroma.Repo + import ExUnit.CaptureLog import Pleroma.Factory import Tesla.Mock + alias Pleroma.Activity + alias Pleroma.Hashtag alias Pleroma.Object alias Pleroma.Repo alias Pleroma.Tests.ObanHelpers @@ -417,4 +420,28 @@ test "preserves internal fields on refetch", %{mock_modified: mock_modified} do assert updated_object.data["like_count"] == 1 end end + + describe ":hashtags association" do + test "Hashtag records are created with Object record and updated on its change" do + user = insert(:user) + + {:ok, %{object: object}} = + CommonAPI.post(user, %{status: "some text #hashtag1 #hashtag2 ..."}) + + assert [%Hashtag{name: "hashtag1"}, %Hashtag{name: "hashtag2"}] = + Enum.sort_by(object.hashtags, & &1.name) + + {:ok, object} = Object.update_data(object, %{"tag" => []}) + + assert [] = object.hashtags + + object = Object.get_by_id(object.id) |> Repo.preload(:hashtags) + assert [] = object.hashtags + + {:ok, object} = Object.update_data(object, %{"tag" => ["abc", "def"]}) + + assert [%Hashtag{name: "abc"}, %Hashtag{name: "def"}] = + Enum.sort_by(object.hashtags, & &1.name) + end + end end diff --git a/test/pleroma/user_test.exs b/test/pleroma/user_test.exs index 6f5bcab57..c6b631499 100644 --- a/test/pleroma/user_test.exs +++ b/test/pleroma/user_test.exs @@ -572,6 +572,24 @@ test "it sends a registration confirmed email if no others will be sent" do ) end + test "it fails gracefully with invalid email config" do + cng = User.register_changeset(%User{}, @full_user_data) + + # Disable the mailer but enable all the things that want to send emails + clear_config([Pleroma.Emails.Mailer, :enabled], false) + clear_config([:instance, :account_activation_required], true) + clear_config([:instance, :account_approval_required], true) + clear_config([:welcome, :email, :enabled], true) + clear_config([:welcome, :email, :sender], "lain@lain.com") + + # The user is still created + assert {:ok, %User{nickname: "nick"}} = User.register(cng) + + # No emails are sent + ObanHelpers.perform_all() + refute_email_sent() + end + test "it requires an email, name, nickname and password, bio is optional when account_activation_required is enabled" do clear_config([:instance, :account_activation_required], true) @@ -2338,4 +2356,49 @@ test "active_user_count/1" do assert User.active_user_count(6) == 3 assert User.active_user_count(1) == 1 end + + describe "pins" do + setup do + user = insert(:user) + + [user: user, object_id: object_id_from_created_activity(user)] + end + + test "unique pins", %{user: user, object_id: object_id} do + assert {:ok, %{pinned_objects: %{^object_id => pinned_at1} = pins} = updated_user} = + User.add_pinned_object_id(user, object_id) + + assert Enum.count(pins) == 1 + + assert {:ok, %{pinned_objects: %{^object_id => pinned_at2} = pins}} = + User.add_pinned_object_id(updated_user, object_id) + + assert pinned_at1 == pinned_at2 + + assert Enum.count(pins) == 1 + end + + test "respects max_pinned_statuses limit", %{user: user, object_id: object_id} do + clear_config([:instance, :max_pinned_statuses], 1) + {:ok, updated} = User.add_pinned_object_id(user, object_id) + + object_id2 = object_id_from_created_activity(user) + + {:error, %{errors: errors}} = User.add_pinned_object_id(updated, object_id2) + assert Keyword.has_key?(errors, :pinned_objects) + end + + test "remove_pinned_object_id/2", %{user: user, object_id: object_id} do + assert {:ok, updated} = User.add_pinned_object_id(user, object_id) + + {:ok, after_remove} = User.remove_pinned_object_id(updated, object_id) + assert after_remove.pinned_objects == %{} + end + end + + defp object_id_from_created_activity(user) do + %{id: id} = insert(:note_activity, user: user) + %{object: %{data: %{"id" => object_id}}} = Activity.get_by_id_with_object(id) + object_id + end end diff --git a/test/pleroma/web/activity_pub/activity_pub_controller_test.exs b/test/pleroma/web/activity_pub/activity_pub_controller_test.exs index 19e04d472..cea4b3a97 100644 --- a/test/pleroma/web/activity_pub/activity_pub_controller_test.exs +++ b/test/pleroma/web/activity_pub/activity_pub_controller_test.exs @@ -636,6 +636,186 @@ test "without valid signature, " <> |> post("/inbox", non_create_data) |> json_response(400) end + + test "accepts Add/Remove activities", %{conn: conn} do + object_id = "c61d6733-e256-4fe1-ab13-1e369789423f" + + status = + File.read!("test/fixtures/statuses/note.json") + |> String.replace("{{nickname}}", "lain") + |> String.replace("{{object_id}}", object_id) + + object_url = "https://example.com/objects/#{object_id}" + + user = + File.read!("test/fixtures/users_mock/user.json") + |> String.replace("{{nickname}}", "lain") + + actor = "https://example.com/users/lain" + + Tesla.Mock.mock(fn + %{ + method: :get, + url: ^object_url + } -> + %Tesla.Env{ + status: 200, + body: status, + headers: [{"content-type", "application/activity+json"}] + } + + %{ + method: :get, + url: ^actor + } -> + %Tesla.Env{ + status: 200, + body: user, + headers: [{"content-type", "application/activity+json"}] + } + + %{method: :get, url: "https://example.com/users/lain/collections/featured"} -> + %Tesla.Env{ + status: 200, + body: + "test/fixtures/users_mock/masto_featured.json" + |> File.read!() + |> String.replace("{{domain}}", "example.com") + |> String.replace("{{nickname}}", "lain"), + headers: [{"content-type", "application/activity+json"}] + } + end) + + data = %{ + "id" => "https://example.com/objects/d61d6733-e256-4fe1-ab13-1e369789423f", + "actor" => actor, + "object" => object_url, + "target" => "https://example.com/users/lain/collections/featured", + "type" => "Add", + "to" => [Pleroma.Constants.as_public()] + } + + assert "ok" == + conn + |> assign(:valid_signature, true) + |> put_req_header("content-type", "application/activity+json") + |> post("/inbox", data) + |> json_response(200) + + ObanHelpers.perform(all_enqueued(worker: ReceiverWorker)) + assert Activity.get_by_ap_id(data["id"]) + user = User.get_cached_by_ap_id(data["actor"]) + assert user.pinned_objects[data["object"]] + + data = %{ + "id" => "https://example.com/objects/d61d6733-e256-4fe1-ab13-1e369789423d", + "actor" => actor, + "object" => object_url, + "target" => "https://example.com/users/lain/collections/featured", + "type" => "Remove", + "to" => [Pleroma.Constants.as_public()] + } + + assert "ok" == + conn + |> assign(:valid_signature, true) + |> put_req_header("content-type", "application/activity+json") + |> post("/inbox", data) + |> json_response(200) + + ObanHelpers.perform(all_enqueued(worker: ReceiverWorker)) + user = refresh_record(user) + refute user.pinned_objects[data["object"]] + end + + test "mastodon pin/unpin", %{conn: conn} do + status_id = "105786274556060421" + + status = + File.read!("test/fixtures/statuses/masto-note.json") + |> String.replace("{{nickname}}", "lain") + |> String.replace("{{status_id}}", status_id) + + status_url = "https://example.com/users/lain/statuses/#{status_id}" + + user = + File.read!("test/fixtures/users_mock/user.json") + |> String.replace("{{nickname}}", "lain") + + actor = "https://example.com/users/lain" + + Tesla.Mock.mock(fn + %{ + method: :get, + url: ^status_url + } -> + %Tesla.Env{ + status: 200, + body: status, + headers: [{"content-type", "application/activity+json"}] + } + + %{ + method: :get, + url: ^actor + } -> + %Tesla.Env{ + status: 200, + body: user, + headers: [{"content-type", "application/activity+json"}] + } + + %{method: :get, url: "https://example.com/users/lain/collections/featured"} -> + %Tesla.Env{ + status: 200, + body: + "test/fixtures/users_mock/masto_featured.json" + |> File.read!() + |> String.replace("{{domain}}", "example.com") + |> String.replace("{{nickname}}", "lain"), + headers: [{"content-type", "application/activity+json"}] + } + end) + + data = %{ + "@context" => "https://www.w3.org/ns/activitystreams", + "actor" => actor, + "object" => status_url, + "target" => "https://example.com/users/lain/collections/featured", + "type" => "Add" + } + + assert "ok" == + conn + |> assign(:valid_signature, true) + |> put_req_header("content-type", "application/activity+json") + |> post("/inbox", data) + |> json_response(200) + + ObanHelpers.perform(all_enqueued(worker: ReceiverWorker)) + assert Activity.get_by_object_ap_id_with_object(data["object"]) + user = User.get_cached_by_ap_id(data["actor"]) + assert user.pinned_objects[data["object"]] + + data = %{ + "actor" => actor, + "object" => status_url, + "target" => "https://example.com/users/lain/collections/featured", + "type" => "Remove" + } + + assert "ok" == + conn + |> assign(:valid_signature, true) + |> put_req_header("content-type", "application/activity+json") + |> post("/inbox", data) + |> json_response(200) + + ObanHelpers.perform(all_enqueued(worker: ReceiverWorker)) + assert Activity.get_by_object_ap_id_with_object(data["object"]) + user = refresh_record(user) + refute user.pinned_objects[data["object"]] + end end describe "/users/:nickname/inbox" do @@ -1772,4 +1952,29 @@ test "POST /api/ap/upload_media", %{conn: conn} do |> json_response(403) end end + + test "pinned collection", %{conn: conn} do + clear_config([:instance, :max_pinned_statuses], 2) + user = insert(:user) + objects = insert_list(2, :note, user: user) + + Enum.reduce(objects, user, fn %{data: %{"id" => object_id}}, user -> + {:ok, updated} = User.add_pinned_object_id(user, object_id) + updated + end) + + %{nickname: nickname, featured_address: featured_address, pinned_objects: pinned_objects} = + refresh_record(user) + + %{"id" => ^featured_address, "orderedItems" => items} = + conn + |> get("/users/#{nickname}/collections/featured") + |> json_response(200) + + object_ids = Enum.map(items, & &1["id"]) + + assert Enum.all?(pinned_objects, fn {obj_id, _} -> + obj_id in object_ids + end) + end end diff --git a/test/pleroma/web/activity_pub/activity_pub_test.exs b/test/pleroma/web/activity_pub/activity_pub_test.exs index f4023856c..64e12066e 100644 --- a/test/pleroma/web/activity_pub/activity_pub_test.exs +++ b/test/pleroma/web/activity_pub/activity_pub_test.exs @@ -208,37 +208,173 @@ test "works for guppe actors" do assert user.name == "Bernie2020 group" assert user.actor_type == "Group" end + + test "works for bridgy actors" do + user_id = "https://fed.brid.gy/jk.nipponalba.scot" + + Tesla.Mock.mock(fn + %{method: :get, url: ^user_id} -> + %Tesla.Env{ + status: 200, + body: File.read!("test/fixtures/bridgy/actor.json"), + headers: [{"content-type", "application/activity+json"}] + } + end) + + {:ok, user} = ActivityPub.make_user_from_ap_id(user_id) + + assert user.actor_type == "Person" + + assert user.avatar == %{ + "type" => "Image", + "url" => [%{"href" => "https://jk.nipponalba.scot/images/profile.jpg"}] + } + + assert user.banner == %{ + "type" => "Image", + "url" => [%{"href" => "https://jk.nipponalba.scot/images/profile.jpg"}] + } + end + + test "fetches user featured collection" do + ap_id = "https://example.com/users/lain" + + featured_url = "https://example.com/users/lain/collections/featured" + + user_data = + "test/fixtures/users_mock/user.json" + |> File.read!() + |> String.replace("{{nickname}}", "lain") + |> Jason.decode!() + |> Map.put("featured", featured_url) + |> Jason.encode!() + + object_id = Ecto.UUID.generate() + + featured_data = + "test/fixtures/mastodon/collections/featured.json" + |> File.read!() + |> String.replace("{{domain}}", "example.com") + |> String.replace("{{nickname}}", "lain") + |> String.replace("{{object_id}}", object_id) + + object_url = "https://example.com/objects/#{object_id}" + + object_data = + "test/fixtures/statuses/note.json" + |> File.read!() + |> String.replace("{{object_id}}", object_id) + |> String.replace("{{nickname}}", "lain") + + Tesla.Mock.mock(fn + %{ + method: :get, + url: ^ap_id + } -> + %Tesla.Env{ + status: 200, + body: user_data, + headers: [{"content-type", "application/activity+json"}] + } + + %{ + method: :get, + url: ^featured_url + } -> + %Tesla.Env{ + status: 200, + body: featured_data, + headers: [{"content-type", "application/activity+json"}] + } + end) + + Tesla.Mock.mock_global(fn + %{ + method: :get, + url: ^object_url + } -> + %Tesla.Env{ + status: 200, + body: object_data, + headers: [{"content-type", "application/activity+json"}] + } + end) + + {:ok, user} = ActivityPub.make_user_from_ap_id(ap_id) + Process.sleep(50) + + assert user.featured_address == featured_url + assert Map.has_key?(user.pinned_objects, object_url) + + in_db = Pleroma.User.get_by_ap_id(ap_id) + assert in_db.featured_address == featured_url + assert Map.has_key?(user.pinned_objects, object_url) + + assert %{data: %{"id" => ^object_url}} = Object.get_by_ap_id(object_url) + end end test "it fetches the appropriate tag-restricted posts" do user = insert(:user) - {:ok, status_one} = CommonAPI.post(user, %{status: ". #test"}) + {:ok, status_one} = CommonAPI.post(user, %{status: ". #TEST"}) {:ok, status_two} = CommonAPI.post(user, %{status: ". #essais"}) - {:ok, status_three} = CommonAPI.post(user, %{status: ". #test #reject"}) + {:ok, status_three} = CommonAPI.post(user, %{status: ". #test #Reject"}) - fetch_one = ActivityPub.fetch_activities([], %{type: "Create", tag: "test"}) + {:ok, status_four} = CommonAPI.post(user, %{status: ". #Any1 #any2"}) + {:ok, status_five} = CommonAPI.post(user, %{status: ". #Any2 #any1"}) - fetch_two = ActivityPub.fetch_activities([], %{type: "Create", tag: ["test", "essais"]}) + for hashtag_timeline_strategy <- [:enabled, :disabled] do + clear_config([:features, :improved_hashtag_timeline], hashtag_timeline_strategy) - fetch_three = - ActivityPub.fetch_activities([], %{ - type: "Create", - tag: ["test", "essais"], - tag_reject: ["reject"] - }) + fetch_one = ActivityPub.fetch_activities([], %{type: "Create", tag: "test"}) - fetch_four = - ActivityPub.fetch_activities([], %{ - type: "Create", - tag: ["test"], - tag_all: ["test", "reject"] - }) + fetch_two = ActivityPub.fetch_activities([], %{type: "Create", tag: ["TEST", "essais"]}) - assert fetch_one == [status_one, status_three] - assert fetch_two == [status_one, status_two, status_three] - assert fetch_three == [status_one, status_two] - assert fetch_four == [status_three] + fetch_three = + ActivityPub.fetch_activities([], %{ + type: "Create", + tag: ["test", "Essais"], + tag_reject: ["reject"] + }) + + fetch_four = + ActivityPub.fetch_activities([], %{ + type: "Create", + tag: ["test"], + tag_all: ["test", "REJECT"] + }) + + # Testing that deduplication (if needed) is done on DB (not Ecto) level; :limit is important + fetch_five = + ActivityPub.fetch_activities([], %{ + type: "Create", + tag: ["ANY1", "any2"], + limit: 2 + }) + + fetch_six = + ActivityPub.fetch_activities([], %{ + type: "Create", + tag: ["any1", "Any2"], + tag_all: [], + tag_reject: [] + }) + + # Regression test: passing empty lists as filter options shouldn't affect the results + assert fetch_five == fetch_six + + [fetch_one, fetch_two, fetch_three, fetch_four, fetch_five] = + Enum.map([fetch_one, fetch_two, fetch_three, fetch_four, fetch_five], fn statuses -> + Enum.map(statuses, fn s -> Repo.preload(s, object: :hashtags) end) + end) + + assert fetch_one == [status_one, status_three] + assert fetch_two == [status_one, status_two, status_three] + assert fetch_three == [status_one, status_two] + assert fetch_four == [status_three] + assert fetch_five == [status_four, status_five] + end end describe "insertion" do diff --git a/test/pleroma/web/activity_pub/mrf/follow_bot_policy_test.exs b/test/pleroma/web/activity_pub/mrf/follow_bot_policy_test.exs new file mode 100644 index 000000000..a61562558 --- /dev/null +++ b/test/pleroma/web/activity_pub/mrf/follow_bot_policy_test.exs @@ -0,0 +1,126 @@ +# Pleroma: A lightweight social networking server +# Copyright © 2017-2021 Pleroma Authors +# SPDX-License-Identifier: AGPL-3.0-only + +defmodule Pleroma.Web.ActivityPub.MRF.FollowBotPolicyTest do + use Pleroma.DataCase, async: true + + alias Pleroma.User + alias Pleroma.Web.ActivityPub.MRF.FollowBotPolicy + + import Pleroma.Factory + + describe "FollowBotPolicy" do + test "follows remote users" do + bot = insert(:user, actor_type: "Service") + remote_user = insert(:user, local: false) + clear_config([:mrf_follow_bot, :follower_nickname], bot.nickname) + + message = %{ + "@context" => "https://www.w3.org/ns/activitystreams", + "to" => [remote_user.follower_address], + "cc" => ["https://www.w3.org/ns/activitystreams#Public"], + "type" => "Create", + "object" => %{ + "content" => "Test post", + "type" => "Note", + "attributedTo" => remote_user.ap_id, + "inReplyTo" => nil + }, + "actor" => remote_user.ap_id + } + + refute User.following?(bot, remote_user) + + assert User.get_follow_requests(remote_user) |> length == 0 + + FollowBotPolicy.filter(message) + + assert User.get_follow_requests(remote_user) |> length == 1 + end + + test "does not follow users with #nobot in bio" do + bot = insert(:user, actor_type: "Service") + remote_user = insert(:user, %{local: false, bio: "go away bots! #nobot"}) + clear_config([:mrf_follow_bot, :follower_nickname], bot.nickname) + + message = %{ + "@context" => "https://www.w3.org/ns/activitystreams", + "to" => [remote_user.follower_address], + "cc" => ["https://www.w3.org/ns/activitystreams#Public"], + "type" => "Create", + "object" => %{ + "content" => "I don't like follow bots", + "type" => "Note", + "attributedTo" => remote_user.ap_id, + "inReplyTo" => nil + }, + "actor" => remote_user.ap_id + } + + refute User.following?(bot, remote_user) + + assert User.get_follow_requests(remote_user) |> length == 0 + + FollowBotPolicy.filter(message) + + assert User.get_follow_requests(remote_user) |> length == 0 + end + + test "does not follow local users" do + bot = insert(:user, actor_type: "Service") + local_user = insert(:user, local: true) + clear_config([:mrf_follow_bot, :follower_nickname], bot.nickname) + + message = %{ + "@context" => "https://www.w3.org/ns/activitystreams", + "to" => [local_user.follower_address], + "cc" => ["https://www.w3.org/ns/activitystreams#Public"], + "type" => "Create", + "object" => %{ + "content" => "Hi I'm a local user", + "type" => "Note", + "attributedTo" => local_user.ap_id, + "inReplyTo" => nil + }, + "actor" => local_user.ap_id + } + + refute User.following?(bot, local_user) + + assert User.get_follow_requests(local_user) |> length == 0 + + FollowBotPolicy.filter(message) + + assert User.get_follow_requests(local_user) |> length == 0 + end + + test "does not follow users requiring follower approval" do + bot = insert(:user, actor_type: "Service") + remote_user = insert(:user, %{local: false, is_locked: true}) + clear_config([:mrf_follow_bot, :follower_nickname], bot.nickname) + + message = %{ + "@context" => "https://www.w3.org/ns/activitystreams", + "to" => [remote_user.follower_address], + "cc" => ["https://www.w3.org/ns/activitystreams#Public"], + "type" => "Create", + "object" => %{ + "content" => "I don't like randos following me", + "type" => "Note", + "attributedTo" => remote_user.ap_id, + "inReplyTo" => nil + }, + "actor" => remote_user.ap_id + } + + refute User.following?(bot, remote_user) + + assert User.get_follow_requests(remote_user) |> length == 0 + + FollowBotPolicy.filter(message) + + assert User.get_follow_requests(remote_user) |> length == 0 + end + end +end diff --git a/test/pleroma/web/activity_pub/mrf/hashtag_policy_test.exs b/test/pleroma/web/activity_pub/mrf/hashtag_policy_test.exs new file mode 100644 index 000000000..13415bb79 --- /dev/null +++ b/test/pleroma/web/activity_pub/mrf/hashtag_policy_test.exs @@ -0,0 +1,31 @@ +# Pleroma: A lightweight social networking server +# Copyright © 2017-2021 Pleroma Authors +# SPDX-License-Identifier: AGPL-3.0-only + +defmodule Pleroma.Web.ActivityPub.MRF.HashtagPolicyTest do + use Oban.Testing, repo: Pleroma.Repo + use Pleroma.DataCase + + alias Pleroma.Web.ActivityPub.Transmogrifier + alias Pleroma.Web.CommonAPI + + import Pleroma.Factory + + test "it sets the sensitive property with relevant hashtags" do + user = insert(:user) + + {:ok, activity} = CommonAPI.post(user, %{status: "#nsfw hey"}) + {:ok, modified} = Transmogrifier.prepare_outgoing(activity.data) + + assert modified["object"]["sensitive"] + end + + test "it doesn't sets the sensitive property with irrelevant hashtags" do + user = insert(:user) + + {:ok, activity} = CommonAPI.post(user, %{status: "#cofe hey"}) + {:ok, modified} = Transmogrifier.prepare_outgoing(activity.data) + + refute modified["object"]["sensitive"] + end +end diff --git a/test/pleroma/web/activity_pub/mrf/simple_policy_test.exs b/test/pleroma/web/activity_pub/mrf/simple_policy_test.exs index f48e5b39b..5c0aff26e 100644 --- a/test/pleroma/web/activity_pub/mrf/simple_policy_test.exs +++ b/test/pleroma/web/activity_pub/mrf/simple_policy_test.exs @@ -75,10 +75,7 @@ test "has a matching host" do local_message = build_local_message() assert SimplePolicy.filter(media_message) == - {:ok, - media_message - |> put_in(["object", "tag"], ["foo", "nsfw"]) - |> put_in(["object", "sensitive"], true)} + {:ok, put_in(media_message, ["object", "sensitive"], true)} assert SimplePolicy.filter(local_message) == {:ok, local_message} end @@ -89,10 +86,7 @@ test "match with wildcard domain" do local_message = build_local_message() assert SimplePolicy.filter(media_message) == - {:ok, - media_message - |> put_in(["object", "tag"], ["foo", "nsfw"]) - |> put_in(["object", "sensitive"], true)} + {:ok, put_in(media_message, ["object", "sensitive"], true)} assert SimplePolicy.filter(local_message) == {:ok, local_message} end diff --git a/test/pleroma/web/activity_pub/mrf/tag_policy_test.exs b/test/pleroma/web/activity_pub/mrf/tag_policy_test.exs index 66e98b7ee..faaadff79 100644 --- a/test/pleroma/web/activity_pub/mrf/tag_policy_test.exs +++ b/test/pleroma/web/activity_pub/mrf/tag_policy_test.exs @@ -114,7 +114,7 @@ test "Mark as sensitive on presence of attachments" do except_message = %{ "actor" => actor.ap_id, "type" => "Create", - "object" => %{"tag" => ["test", "nsfw"], "attachment" => ["file1"], "sensitive" => true} + "object" => %{"tag" => ["test"], "attachment" => ["file1"], "sensitive" => true} } assert TagPolicy.filter(message) == {:ok, except_message} diff --git a/test/pleroma/web/activity_pub/mrf_test.exs b/test/pleroma/web/activity_pub/mrf_test.exs index 7c1eef7e0..61d308b97 100644 --- a/test/pleroma/web/activity_pub/mrf_test.exs +++ b/test/pleroma/web/activity_pub/mrf_test.exs @@ -68,7 +68,12 @@ test "it works as expected with noop policy" do clear_config([:mrf, :policies], [Pleroma.Web.ActivityPub.MRF.NoOpPolicy]) expected = %{ - mrf_policies: ["NoOpPolicy"], + mrf_policies: ["NoOpPolicy", "HashtagPolicy"], + mrf_hashtag: %{ + federated_timeline_removal: [], + reject: [], + sensitive: ["nsfw"] + }, exclusions: false } @@ -79,8 +84,13 @@ test "it works as expected with mock policy" do clear_config([:mrf, :policies], [MRFModuleMock]) expected = %{ - mrf_policies: ["MRFModuleMock"], + mrf_policies: ["MRFModuleMock", "HashtagPolicy"], mrf_module_mock: "some config data", + mrf_hashtag: %{ + federated_timeline_removal: [], + reject: [], + sensitive: ["nsfw"] + }, exclusions: false } diff --git a/test/pleroma/web/activity_pub/pipeline_test.exs b/test/pleroma/web/activity_pub/pipeline_test.exs index 52fa933ee..e606fa3d1 100644 --- a/test/pleroma/web/activity_pub/pipeline_test.exs +++ b/test/pleroma/web/activity_pub/pipeline_test.exs @@ -25,9 +25,6 @@ defmodule Pleroma.Web.ActivityPub.PipelineTest do MRFMock |> expect(:pipeline_filter, fn o, m -> {:ok, o, m} end) - ActivityPubMock - |> expect(:persist, fn o, m -> {:ok, o, m} end) - SideEffectsMock |> expect(:handle, fn o, m -> {:ok, o, m} end) |> expect(:handle_after_transaction, fn m -> m end) @@ -42,6 +39,9 @@ test "when given an `object_data` in meta, Federation will receive a the origina activity_with_object = %{activity | data: Map.put(activity.data, "object", object)} + ActivityPubMock + |> expect(:persist, fn _, m -> {:ok, activity, m} end) + FederatorMock |> expect(:publish, fn ^activity_with_object -> :ok end) @@ -50,7 +50,7 @@ test "when given an `object_data` in meta, Federation will receive a the origina assert {:ok, ^activity, ^meta} = Pleroma.Web.ActivityPub.Pipeline.common_pipeline( - activity, + activity.data, meta ) end @@ -59,6 +59,9 @@ test "it goes through validation, filtering, persisting, side effects and federa activity = insert(:note_activity) meta = [local: true] + ActivityPubMock + |> expect(:persist, fn _, m -> {:ok, activity, m} end) + FederatorMock |> expect(:publish, fn ^activity -> :ok end) @@ -66,29 +69,35 @@ test "it goes through validation, filtering, persisting, side effects and federa |> expect(:get, fn [:instance, :federating] -> true end) assert {:ok, ^activity, ^meta} = - Pleroma.Web.ActivityPub.Pipeline.common_pipeline(activity, meta) + Pleroma.Web.ActivityPub.Pipeline.common_pipeline(activity.data, meta) end test "it goes through validation, filtering, persisting, side effects without federation for remote activities" do activity = insert(:note_activity) meta = [local: false] + ActivityPubMock + |> expect(:persist, fn _, m -> {:ok, activity, m} end) + ConfigMock |> expect(:get, fn [:instance, :federating] -> true end) assert {:ok, ^activity, ^meta} = - Pleroma.Web.ActivityPub.Pipeline.common_pipeline(activity, meta) + Pleroma.Web.ActivityPub.Pipeline.common_pipeline(activity.data, meta) end test "it goes through validation, filtering, persisting, side effects without federation for local activities if federation is deactivated" do activity = insert(:note_activity) meta = [local: true] + ActivityPubMock + |> expect(:persist, fn _, m -> {:ok, activity, m} end) + ConfigMock |> expect(:get, fn [:instance, :federating] -> false end) assert {:ok, ^activity, ^meta} = - Pleroma.Web.ActivityPub.Pipeline.common_pipeline(activity, meta) + Pleroma.Web.ActivityPub.Pipeline.common_pipeline(activity.data, meta) end end end diff --git a/test/pleroma/web/activity_pub/transmogrifier/add_remove_handling_test.exs b/test/pleroma/web/activity_pub/transmogrifier/add_remove_handling_test.exs new file mode 100644 index 000000000..fc7757125 --- /dev/null +++ b/test/pleroma/web/activity_pub/transmogrifier/add_remove_handling_test.exs @@ -0,0 +1,172 @@ +defmodule Pleroma.Web.ActivityPub.Transmogrifier.AddRemoveHandlingTest do + use Oban.Testing, repo: Pleroma.Repo + use Pleroma.DataCase, async: true + + require Pleroma.Constants + + import Pleroma.Factory + + alias Pleroma.User + alias Pleroma.Web.ActivityPub.Transmogrifier + + test "it accepts Add/Remove activities" do + user = + "test/fixtures/users_mock/user.json" + |> File.read!() + |> String.replace("{{nickname}}", "lain") + + object_id = "c61d6733-e256-4fe1-ab13-1e369789423f" + + object = + "test/fixtures/statuses/note.json" + |> File.read!() + |> String.replace("{{nickname}}", "lain") + |> String.replace("{{object_id}}", object_id) + + object_url = "https://example.com/objects/#{object_id}" + + actor = "https://example.com/users/lain" + + Tesla.Mock.mock(fn + %{ + method: :get, + url: ^actor + } -> + %Tesla.Env{ + status: 200, + body: user, + headers: [{"content-type", "application/activity+json"}] + } + + %{ + method: :get, + url: ^object_url + } -> + %Tesla.Env{ + status: 200, + body: object, + headers: [{"content-type", "application/activity+json"}] + } + + %{method: :get, url: "https://example.com/users/lain/collections/featured"} -> + %Tesla.Env{ + status: 200, + body: + "test/fixtures/users_mock/masto_featured.json" + |> File.read!() + |> String.replace("{{domain}}", "example.com") + |> String.replace("{{nickname}}", "lain"), + headers: [{"content-type", "application/activity+json"}] + } + end) + + message = %{ + "id" => "https://example.com/objects/d61d6733-e256-4fe1-ab13-1e369789423f", + "actor" => actor, + "object" => object_url, + "target" => "https://example.com/users/lain/collections/featured", + "type" => "Add", + "to" => [Pleroma.Constants.as_public()], + "cc" => ["https://example.com/users/lain/followers"] + } + + assert {:ok, activity} = Transmogrifier.handle_incoming(message) + assert activity.data == message + user = User.get_cached_by_ap_id(actor) + assert user.pinned_objects[object_url] + + remove = %{ + "id" => "http://localhost:400/objects/d61d6733-e256-4fe1-ab13-1e369789423d", + "actor" => actor, + "object" => object_url, + "target" => "https://example.com/users/lain/collections/featured", + "type" => "Remove", + "to" => [Pleroma.Constants.as_public()], + "cc" => ["https://example.com/users/lain/followers"] + } + + assert {:ok, activity} = Transmogrifier.handle_incoming(remove) + assert activity.data == remove + + user = refresh_record(user) + refute user.pinned_objects[object_url] + end + + test "Add/Remove activities for remote users without featured address" do + user = insert(:user, local: false, domain: "example.com") + + user = + user + |> Ecto.Changeset.change(featured_address: nil) + |> Repo.update!() + + %{host: host} = URI.parse(user.ap_id) + + user_data = + "test/fixtures/users_mock/user.json" + |> File.read!() + |> String.replace("{{nickname}}", user.nickname) + + object_id = "c61d6733-e256-4fe1-ab13-1e369789423f" + + object = + "test/fixtures/statuses/note.json" + |> File.read!() + |> String.replace("{{nickname}}", user.nickname) + |> String.replace("{{object_id}}", object_id) + + object_url = "https://#{host}/objects/#{object_id}" + + actor = "https://#{host}/users/#{user.nickname}" + + featured = "https://#{host}/users/#{user.nickname}/collections/featured" + + Tesla.Mock.mock(fn + %{ + method: :get, + url: ^actor + } -> + %Tesla.Env{ + status: 200, + body: user_data, + headers: [{"content-type", "application/activity+json"}] + } + + %{ + method: :get, + url: ^object_url + } -> + %Tesla.Env{ + status: 200, + body: object, + headers: [{"content-type", "application/activity+json"}] + } + + %{method: :get, url: ^featured} -> + %Tesla.Env{ + status: 200, + body: + "test/fixtures/users_mock/masto_featured.json" + |> File.read!() + |> String.replace("{{domain}}", "#{host}") + |> String.replace("{{nickname}}", user.nickname), + headers: [{"content-type", "application/activity+json"}] + } + end) + + message = %{ + "id" => "https://#{host}/objects/d61d6733-e256-4fe1-ab13-1e369789423f", + "actor" => actor, + "object" => object_url, + "target" => "https://#{host}/users/#{user.nickname}/collections/featured", + "type" => "Add", + "to" => [Pleroma.Constants.as_public()], + "cc" => ["https://#{host}/users/#{user.nickname}/followers"] + } + + assert {:ok, activity} = Transmogrifier.handle_incoming(message) + assert activity.data == message + user = User.get_cached_by_ap_id(actor) + assert user.pinned_objects[object_url] + end +end diff --git a/test/pleroma/web/activity_pub/transmogrifier/note_handling_test.exs b/test/pleroma/web/activity_pub/transmogrifier/note_handling_test.exs index 31586abc9..deb956410 100644 --- a/test/pleroma/web/activity_pub/transmogrifier/note_handling_test.exs +++ b/test/pleroma/web/activity_pub/transmogrifier/note_handling_test.exs @@ -39,7 +39,8 @@ test "it works for incoming notices with tag not being an array (kroeg)" do {:ok, %Activity{data: data, local: false}} = Transmogrifier.handle_incoming(data) object = Object.normalize(data["object"], fetch: false) - assert "test" in object.data["tag"] + assert "test" in Object.tags(object) + assert Object.hashtags(object) == ["test"] end test "it cleans up incoming notices which are not really DMs" do @@ -220,7 +221,8 @@ test "it works for incoming notices with hashtags" do {:ok, %Activity{data: data, local: false}} = Transmogrifier.handle_incoming(data) object = Object.normalize(data["object"], fetch: false) - assert Enum.at(object.data["tag"], 2) == "moo" + assert Enum.at(Object.tags(object), 2) == "moo" + assert Object.hashtags(object) == ["moo"] end test "it works for incoming notices with contentMap" do diff --git a/test/pleroma/web/activity_pub/transmogrifier_test.exs b/test/pleroma/web/activity_pub/transmogrifier_test.exs index 211e535a5..4c3fcb44a 100644 --- a/test/pleroma/web/activity_pub/transmogrifier_test.exs +++ b/test/pleroma/web/activity_pub/transmogrifier_test.exs @@ -153,15 +153,6 @@ test "it turns mentions into tags" do end end - test "it adds the sensitive property" do - user = insert(:user) - - {:ok, activity} = CommonAPI.post(user, %{status: "#nsfw hey"}) - {:ok, modified} = Transmogrifier.prepare_outgoing(activity.data) - - assert modified["object"]["sensitive"] - end - test "it adds the json-ld context and the conversation property" do user = insert(:user) diff --git a/test/pleroma/web/admin_api/controllers/config_controller_test.exs b/test/pleroma/web/admin_api/controllers/config_controller_test.exs index 578a4c914..c39c1b1e1 100644 --- a/test/pleroma/web/admin_api/controllers/config_controller_test.exs +++ b/test/pleroma/web/admin_api/controllers/config_controller_test.exs @@ -1410,6 +1410,82 @@ test "enables the welcome messages", %{conn: conn} do "need_reboot" => false } end + + test "custom instance thumbnail", %{conn: conn} do + clear_config([:instance]) + + params = %{ + "group" => ":pleroma", + "key" => ":instance", + "value" => [ + %{ + "tuple" => [ + ":instance_thumbnail", + "https://example.com/media/new_thumbnail.jpg" + ] + } + ] + } + + res = + assert conn + |> put_req_header("content-type", "application/json") + |> post("/api/pleroma/admin/config", %{"configs" => [params]}) + |> json_response_and_validate_schema(200) + + assert res == %{ + "configs" => [ + %{ + "db" => [":instance_thumbnail"], + "group" => ":pleroma", + "key" => ":instance", + "value" => params["value"] + } + ], + "need_reboot" => false + } + + _res = + assert conn + |> get("/api/v1/instance") + |> json_response_and_validate_schema(200) + + assert res = %{"thumbnail" => "https://example.com/media/new_thumbnail.jpg"} + end + + test "Concurrent Limiter", %{conn: conn} do + clear_config([ConcurrentLimiter]) + + params = %{ + "group" => ":pleroma", + "key" => "ConcurrentLimiter", + "value" => [ + %{ + "tuple" => [ + "Pleroma.Web.RichMedia.Helpers", + [ + %{"tuple" => [":max_running", 6]}, + %{"tuple" => [":max_waiting", 6]} + ] + ] + }, + %{ + "tuple" => [ + "Pleroma.Web.ActivityPub.MRF.MediaProxyWarmingPolicy", + [ + %{"tuple" => [":max_running", 7]}, + %{"tuple" => [":max_waiting", 7]} + ] + ] + } + ] + } + + assert conn + |> put_req_header("content-type", "application/json") + |> post("/api/pleroma/admin/config", %{"configs" => [params]}) + |> json_response_and_validate_schema(200) + end end describe "GET /api/pleroma/admin/config/descriptions" do diff --git a/test/pleroma/web/admin_api/controllers/user_controller_test.exs b/test/pleroma/web/admin_api/controllers/user_controller_test.exs index beb8a5d58..31319b5e5 100644 --- a/test/pleroma/web/admin_api/controllers/user_controller_test.exs +++ b/test/pleroma/web/admin_api/controllers/user_controller_test.exs @@ -44,7 +44,7 @@ test "with valid `admin_token` query parameter, skips OAuth scopes check" do conn = get(build_conn(), "/api/pleroma/admin/users/#{user.nickname}?admin_token=password123") - assert json_response(conn, 200) + assert json_response_and_validate_schema(conn, 200) end test "GET /api/pleroma/admin/users/:nickname requires admin:read:accounts or broader scope", @@ -67,7 +67,7 @@ test "GET /api/pleroma/admin/users/:nickname requires admin:read:accounts or bro |> assign(:token, good_token) |> get(url) - assert json_response(conn, 200) + assert json_response_and_validate_schema(conn, 200) end for good_token <- [good_token1, good_token2, good_token3] do @@ -87,7 +87,7 @@ test "GET /api/pleroma/admin/users/:nickname requires admin:read:accounts or bro |> assign(:token, bad_token) |> get(url) - assert json_response(conn, :forbidden) + assert json_response_and_validate_schema(conn, :forbidden) end end @@ -131,7 +131,7 @@ test "single user", %{admin: admin, conn: conn} do assert ModerationLog.get_log_entry_message(log_entry) == "@#{admin.nickname} deleted users: @#{user.nickname}" - assert json_response(conn, 200) == [user.nickname] + assert json_response_and_validate_schema(conn, 200) == [user.nickname] user = Repo.get(User, user.id) refute user.is_active @@ -152,28 +152,30 @@ test "multiple users", %{admin: admin, conn: conn} do user_one = insert(:user) user_two = insert(:user) - conn = + response = conn |> put_req_header("accept", "application/json") + |> put_req_header("content-type", "application/json") |> delete("/api/pleroma/admin/users", %{ nicknames: [user_one.nickname, user_two.nickname] }) + |> json_response_and_validate_schema(200) log_entry = Repo.one(ModerationLog) assert ModerationLog.get_log_entry_message(log_entry) == "@#{admin.nickname} deleted users: @#{user_one.nickname}, @#{user_two.nickname}" - response = json_response(conn, 200) assert response -- [user_one.nickname, user_two.nickname] == [] end end describe "/api/pleroma/admin/users" do test "Create", %{conn: conn} do - conn = + response = conn |> put_req_header("accept", "application/json") + |> put_req_header("content-type", "application/json") |> post("/api/pleroma/admin/users", %{ "users" => [ %{ @@ -188,8 +190,9 @@ test "Create", %{conn: conn} do } ] }) + |> json_response_and_validate_schema(200) + |> Enum.map(&Map.get(&1, "type")) - response = json_response(conn, 200) |> Enum.map(&Map.get(&1, "type")) assert response == ["success", "success"] log_entry = Repo.one(ModerationLog) @@ -203,6 +206,7 @@ test "Cannot create user with existing email", %{conn: conn} do conn = conn |> put_req_header("accept", "application/json") + |> put_req_header("content-type", "application/json") |> post("/api/pleroma/admin/users", %{ "users" => [ %{ @@ -213,7 +217,7 @@ test "Cannot create user with existing email", %{conn: conn} do ] }) - assert json_response(conn, 409) == [ + assert json_response_and_validate_schema(conn, 409) == [ %{ "code" => 409, "data" => %{ @@ -232,6 +236,7 @@ test "Cannot create user with existing nickname", %{conn: conn} do conn = conn |> put_req_header("accept", "application/json") + |> put_req_header("content-type", "application/json") |> post("/api/pleroma/admin/users", %{ "users" => [ %{ @@ -242,7 +247,7 @@ test "Cannot create user with existing nickname", %{conn: conn} do ] }) - assert json_response(conn, 409) == [ + assert json_response_and_validate_schema(conn, 409) == [ %{ "code" => 409, "data" => %{ @@ -261,6 +266,7 @@ test "Multiple user creation works in transaction", %{conn: conn} do conn = conn |> put_req_header("accept", "application/json") + |> put_req_header("content-type", "application/json") |> post("/api/pleroma/admin/users", %{ "users" => [ %{ @@ -276,7 +282,7 @@ test "Multiple user creation works in transaction", %{conn: conn} do ] }) - assert json_response(conn, 409) == [ + assert json_response_and_validate_schema(conn, 409) == [ %{ "code" => 409, "data" => %{ @@ -307,7 +313,7 @@ test "Show", %{conn: conn} do conn = get(conn, "/api/pleroma/admin/users/#{user.nickname}") - assert user_response(user) == json_response(conn, 200) + assert user_response(user) == json_response_and_validate_schema(conn, 200) end test "when the user doesn't exist", %{conn: conn} do @@ -315,7 +321,7 @@ test "when the user doesn't exist", %{conn: conn} do conn = get(conn, "/api/pleroma/admin/users/#{user.nickname}") - assert %{"error" => "Not found"} == json_response(conn, 404) + assert %{"error" => "Not found"} == json_response_and_validate_schema(conn, 404) end end @@ -326,6 +332,7 @@ test "allows to force-follow another user", %{admin: admin, conn: conn} do conn |> put_req_header("accept", "application/json") + |> put_req_header("content-type", "application/json") |> post("/api/pleroma/admin/users/follow", %{ "follower" => follower.nickname, "followed" => user.nickname @@ -352,6 +359,7 @@ test "allows to force-unfollow another user", %{admin: admin, conn: conn} do conn |> put_req_header("accept", "application/json") + |> put_req_header("content-type", "application/json") |> post("/api/pleroma/admin/users/unfollow", %{ "follower" => follower.nickname, "followed" => user.nickname @@ -395,7 +403,7 @@ test "renders users array for the first page", %{conn: conn, admin: admin} do ] |> Enum.sort_by(& &1["nickname"]) - assert json_response(conn, 200) == %{ + assert json_response_and_validate_schema(conn, 200) == %{ "count" => 3, "page_size" => 50, "users" => users @@ -410,7 +418,7 @@ test "pagination works correctly with service users", %{conn: conn} do assert %{"count" => 26, "page_size" => 10, "users" => users1} = conn |> get("/api/pleroma/admin/users?page=1&filters=", %{page_size: "10"}) - |> json_response(200) + |> json_response_and_validate_schema(200) assert Enum.count(users1) == 10 assert service1 not in users1 @@ -418,7 +426,7 @@ test "pagination works correctly with service users", %{conn: conn} do assert %{"count" => 26, "page_size" => 10, "users" => users2} = conn |> get("/api/pleroma/admin/users?page=2&filters=", %{page_size: "10"}) - |> json_response(200) + |> json_response_and_validate_schema(200) assert Enum.count(users2) == 10 assert service1 not in users2 @@ -426,7 +434,7 @@ test "pagination works correctly with service users", %{conn: conn} do assert %{"count" => 26, "page_size" => 10, "users" => users3} = conn |> get("/api/pleroma/admin/users?page=3&filters=", %{page_size: "10"}) - |> json_response(200) + |> json_response_and_validate_schema(200) assert Enum.count(users3) == 6 assert service1 not in users3 @@ -437,7 +445,7 @@ test "renders empty array for the second page", %{conn: conn} do conn = get(conn, "/api/pleroma/admin/users?page=2") - assert json_response(conn, 200) == %{ + assert json_response_and_validate_schema(conn, 200) == %{ "count" => 2, "page_size" => 50, "users" => [] @@ -449,7 +457,7 @@ test "regular search", %{conn: conn} do conn = get(conn, "/api/pleroma/admin/users?query=bo") - assert json_response(conn, 200) == %{ + assert json_response_and_validate_schema(conn, 200) == %{ "count" => 1, "page_size" => 50, "users" => [user_response(user, %{"local" => true})] @@ -462,7 +470,7 @@ test "search by domain", %{conn: conn} do conn = get(conn, "/api/pleroma/admin/users?query=domain.com") - assert json_response(conn, 200) == %{ + assert json_response_and_validate_schema(conn, 200) == %{ "count" => 1, "page_size" => 50, "users" => [user_response(user)] @@ -475,7 +483,7 @@ test "search by full nickname", %{conn: conn} do conn = get(conn, "/api/pleroma/admin/users?query=nickname@domain.com") - assert json_response(conn, 200) == %{ + assert json_response_and_validate_schema(conn, 200) == %{ "count" => 1, "page_size" => 50, "users" => [user_response(user)] @@ -488,7 +496,7 @@ test "search by display name", %{conn: conn} do conn = get(conn, "/api/pleroma/admin/users?name=display") - assert json_response(conn, 200) == %{ + assert json_response_and_validate_schema(conn, 200) == %{ "count" => 1, "page_size" => 50, "users" => [user_response(user)] @@ -501,7 +509,7 @@ test "search by email", %{conn: conn} do conn = get(conn, "/api/pleroma/admin/users?email=email@example.com") - assert json_response(conn, 200) == %{ + assert json_response_and_validate_schema(conn, 200) == %{ "count" => 1, "page_size" => 50, "users" => [user_response(user)] @@ -514,7 +522,7 @@ test "regular search with page size", %{conn: conn} do conn1 = get(conn, "/api/pleroma/admin/users?query=a&page_size=1&page=1") - assert json_response(conn1, 200) == %{ + assert json_response_and_validate_schema(conn1, 200) == %{ "count" => 2, "page_size" => 1, "users" => [user_response(user)] @@ -522,7 +530,7 @@ test "regular search with page size", %{conn: conn} do conn2 = get(conn, "/api/pleroma/admin/users?query=a&page_size=1&page=2") - assert json_response(conn2, 200) == %{ + assert json_response_and_validate_schema(conn2, 200) == %{ "count" => 2, "page_size" => 1, "users" => [user_response(user2)] @@ -542,7 +550,7 @@ test "only local users" do |> assign(:token, token) |> get("/api/pleroma/admin/users?query=bo&filters=local") - assert json_response(conn, 200) == %{ + assert json_response_and_validate_schema(conn, 200) == %{ "count" => 1, "page_size" => 50, "users" => [user_response(user)] @@ -570,7 +578,7 @@ test "only local users with no query", %{conn: conn, admin: old_admin} do ] |> Enum.sort_by(& &1["nickname"]) - assert json_response(conn, 200) == %{ + assert json_response_and_validate_schema(conn, 200) == %{ "count" => 3, "page_size" => 50, "users" => users @@ -587,7 +595,7 @@ test "only unconfirmed users", %{conn: conn} do result = conn |> get("/api/pleroma/admin/users?filters=unconfirmed") - |> json_response(200) + |> json_response_and_validate_schema(200) users = Enum.map([old_user, sad_user], fn user -> @@ -620,7 +628,7 @@ test "only unapproved users", %{conn: conn} do ) ] - assert json_response(conn, 200) == %{ + assert json_response_and_validate_schema(conn, 200) == %{ "count" => 1, "page_size" => 50, "users" => users @@ -647,7 +655,7 @@ test "load only admins", %{conn: conn, admin: admin} do ] |> Enum.sort_by(& &1["nickname"]) - assert json_response(conn, 200) == %{ + assert json_response_and_validate_schema(conn, 200) == %{ "count" => 2, "page_size" => 50, "users" => users @@ -661,7 +669,7 @@ test "load only moderators", %{conn: conn} do conn = get(conn, "/api/pleroma/admin/users?filters=is_moderator") - assert json_response(conn, 200) == %{ + assert json_response_and_validate_schema(conn, 200) == %{ "count" => 1, "page_size" => 50, "users" => [ @@ -682,8 +690,8 @@ test "load users with actor_type is Person", %{admin: admin, conn: conn} do response = conn - |> get(user_path(conn, :list), %{actor_types: ["Person"]}) - |> json_response(200) + |> get(user_path(conn, :index), %{actor_types: ["Person"]}) + |> json_response_and_validate_schema(200) users = [ @@ -705,8 +713,8 @@ test "load users with actor_type is Person and Service", %{admin: admin, conn: c response = conn - |> get(user_path(conn, :list), %{actor_types: ["Person", "Service"]}) - |> json_response(200) + |> get(user_path(conn, :index), %{actor_types: ["Person", "Service"]}) + |> json_response_and_validate_schema(200) users = [ @@ -728,8 +736,8 @@ test "load users with actor_type is Service", %{conn: conn} do response = conn - |> get(user_path(conn, :list), %{actor_types: ["Service"]}) - |> json_response(200) + |> get(user_path(conn, :index), %{actor_types: ["Service"]}) + |> json_response_and_validate_schema(200) users = [user_response(user_service, %{"actor_type" => "Service"})] @@ -751,7 +759,7 @@ test "load users with tags list", %{conn: conn} do ] |> Enum.sort_by(& &1["nickname"]) - assert json_response(conn, 200) == %{ + assert json_response_and_validate_schema(conn, 200) == %{ "count" => 2, "page_size" => 50, "users" => users @@ -776,7 +784,7 @@ test "`active` filters out users pending approval", %{token: token} do %{"id" => ^admin_id}, %{"id" => ^user_id} ] - } = json_response(conn, 200) + } = json_response_and_validate_schema(conn, 200) end test "it works with multiple filters" do @@ -793,7 +801,7 @@ test "it works with multiple filters" do |> assign(:token, token) |> get("/api/pleroma/admin/users?filters=deactivated,external") - assert json_response(conn, 200) == %{ + assert json_response_and_validate_schema(conn, 200) == %{ "count" => 1, "page_size" => 50, "users" => [user_response(user)] @@ -805,7 +813,7 @@ test "it omits relay user", %{admin: admin, conn: conn} do conn = get(conn, "/api/pleroma/admin/users") - assert json_response(conn, 200) == %{ + assert json_response_and_validate_schema(conn, 200) == %{ "count" => 1, "page_size" => 50, "users" => [ @@ -820,13 +828,14 @@ test "PATCH /api/pleroma/admin/users/activate", %{admin: admin, conn: conn} do user_two = insert(:user, is_active: false) conn = - patch( - conn, + conn + |> put_req_header("content-type", "application/json") + |> patch( "/api/pleroma/admin/users/activate", %{nicknames: [user_one.nickname, user_two.nickname]} ) - response = json_response(conn, 200) + response = json_response_and_validate_schema(conn, 200) assert Enum.map(response["users"], & &1["is_active"]) == [true, true] log_entry = Repo.one(ModerationLog) @@ -840,13 +849,14 @@ test "PATCH /api/pleroma/admin/users/deactivate", %{admin: admin, conn: conn} do user_two = insert(:user, is_active: true) conn = - patch( - conn, + conn + |> put_req_header("content-type", "application/json") + |> patch( "/api/pleroma/admin/users/deactivate", %{nicknames: [user_one.nickname, user_two.nickname]} ) - response = json_response(conn, 200) + response = json_response_and_validate_schema(conn, 200) assert Enum.map(response["users"], & &1["is_active"]) == [false, false] log_entry = Repo.one(ModerationLog) @@ -860,13 +870,14 @@ test "PATCH /api/pleroma/admin/users/approve", %{admin: admin, conn: conn} do user_two = insert(:user, is_approved: false) conn = - patch( - conn, + conn + |> put_req_header("content-type", "application/json") + |> patch( "/api/pleroma/admin/users/approve", %{nicknames: [user_one.nickname, user_two.nickname]} ) - response = json_response(conn, 200) + response = json_response_and_validate_schema(conn, 200) assert Enum.map(response["users"], & &1["is_approved"]) == [true, true] log_entry = Repo.one(ModerationLog) @@ -878,9 +889,12 @@ test "PATCH /api/pleroma/admin/users/approve", %{admin: admin, conn: conn} do test "PATCH /api/pleroma/admin/users/:nickname/toggle_activation", %{admin: admin, conn: conn} do user = insert(:user) - conn = patch(conn, "/api/pleroma/admin/users/#{user.nickname}/toggle_activation") + conn = + conn + |> put_req_header("content-type", "application/json") + |> patch("/api/pleroma/admin/users/#{user.nickname}/toggle_activation") - assert json_response(conn, 200) == + assert json_response_and_validate_schema(conn, 200) == user_response( user, %{"is_active" => !user.is_active} diff --git a/test/pleroma/web/common_api/utils_test.exs b/test/pleroma/web/common_api/utils_test.exs index f2043e152..b0e567ff0 100644 --- a/test/pleroma/web/common_api/utils_test.exs +++ b/test/pleroma/web/common_api/utils_test.exs @@ -168,6 +168,123 @@ test "works for text/markdown with mentions" do end end + describe "format_input/3 with markdown" do + test "Paragraph" do + code = ~s[Hello\n\nWorld!] + {result, [], []} = Utils.format_input(code, "text/markdown") + assert result == "

    Hello

    World!

    " + end + + test "links" do + code = "https://en.wikipedia.org/wiki/Animal_Crossing_(video_game)" + {result, [], []} = Utils.format_input(code, "text/markdown") + assert result == ~s[

    #{code}

    ] + + code = "https://github.com/pragdave/earmark/" + {result, [], []} = Utils.format_input(code, "text/markdown") + assert result == ~s[

    #{code}

    ] + end + + test "link with local mention" do + insert(:user, %{nickname: "lain"}) + + code = "https://example.com/@lain" + {result, [], []} = Utils.format_input(code, "text/markdown") + assert result == ~s[

    #{code}

    ] + end + + test "local mentions" do + mario = insert(:user, %{nickname: "mario"}) + luigi = insert(:user, %{nickname: "luigi"}) + + code = "@mario @luigi yo what's up?" + {result, _, []} = Utils.format_input(code, "text/markdown") + + assert result == + ~s[

    @mario @luigi yo what’s up?

    ] + end + + test "remote mentions" do + mario = insert(:user, %{nickname: "mario@mushroom.world", local: false}) + luigi = insert(:user, %{nickname: "luigi@mushroom.world", local: false}) + + code = "@mario@mushroom.world @luigi@mushroom.world yo what's up?" + {result, _, []} = Utils.format_input(code, "text/markdown") + + assert result == + ~s[

    @mario @luigi yo what’s up?

    ] + end + + test "raw HTML" do + code = ~s[OwO] + {result, [], []} = Utils.format_input(code, "text/markdown") + assert result == ~s[OwO] + end + + test "rulers" do + code = ~s[before\n\n-----\n\nafter] + {result, [], []} = Utils.format_input(code, "text/markdown") + assert result == "

    before


    after

    " + end + + test "blockquote" do + code = ~s[> whoms't are you quoting?] + {result, [], []} = Utils.format_input(code, "text/markdown") + assert result == "

    whoms’t are you quoting?

    " + end + + test "code" do + code = ~s[`mix`] + {result, [], []} = Utils.format_input(code, "text/markdown") + assert result == ~s[

    mix

    ] + + code = ~s[``mix``] + {result, [], []} = Utils.format_input(code, "text/markdown") + assert result == ~s[

    mix

    ] + + code = ~s[```\nputs "Hello World"\n```] + {result, [], []} = Utils.format_input(code, "text/markdown") + assert result == ~s[
    puts "Hello World"
    ] + + code = ~s[
    \n
    ] + {result, [], []} = Utils.format_input(code, "text/markdown") + assert result == ~s[
    <div>\n</div>
    ] + end + + test "lists" do + code = ~s[- one\n- two\n- three\n- four] + {result, [], []} = Utils.format_input(code, "text/markdown") + assert result == "
    • one
    • two
    • three
    • four
    " + + code = ~s[1. one\n2. two\n3. three\n4. four\n] + {result, [], []} = Utils.format_input(code, "text/markdown") + assert result == "
    1. one
    2. two
    3. three
    4. four
    " + end + + test "delegated renderers" do + code = ~s[*aaaa~*] + {result, [], []} = Utils.format_input(code, "text/markdown") + assert result == ~s[

    aaaa~

    ] + + code = ~s[**aaaa~**] + {result, [], []} = Utils.format_input(code, "text/markdown") + assert result == ~s[

    aaaa~

    ] + + # strikethrough + code = ~s[~~aaaa~~~] + {result, [], []} = Utils.format_input(code, "text/markdown") + assert result == ~s[

    aaaa~

    ] + end + end + describe "context_to_conversation_id" do test "creates a mapping object" do conversation_id = Utils.context_to_conversation_id("random context") diff --git a/test/pleroma/web/common_api_test.exs b/test/pleroma/web/common_api_test.exs index adfe58def..be94c93c2 100644 --- a/test/pleroma/web/common_api_test.exs +++ b/test/pleroma/web/common_api_test.exs @@ -25,6 +25,11 @@ defmodule Pleroma.Web.CommonAPITest do require Pleroma.Constants + setup_all do + Tesla.Mock.mock_global(fn env -> apply(HttpRequestMock, :request, [env]) end) + :ok + end + setup do: clear_config([:instance, :safe_dm_mentions]) setup do: clear_config([:instance, :limit]) setup do: clear_config([:instance, :max_pinned_statuses]) @@ -493,7 +498,7 @@ test "it de-duplicates tags" do object = Object.normalize(activity, fetch: false) - assert object.data["tag"] == ["2hu"] + assert Object.tags(object) == ["2hu"] end test "it adds emoji in the object" do @@ -517,6 +522,27 @@ test "it adds an emoji on an external site" do assert url == "#{Pleroma.Web.base_url()}/emoji/blank.png" end + test "it copies emoji from the subject of the parent post" do + %Object{} = + object = + Object.normalize("https://patch.cx/objects/a399c28e-c821-4820-bc3e-4afeb044c16f", + fetch: true + ) + + activity = Activity.get_create_by_object_ap_id(object.data["id"]) + user = insert(:user) + + {:ok, reply_activity} = + CommonAPI.post(user, %{ + in_reply_to_id: activity.id, + status: ":joker_disapprove:", + spoiler_text: ":joker_smile:" + }) + + assert Object.normalize(reply_activity).data["emoji"][":joker_smile:"] + refute Object.normalize(reply_activity).data["emoji"][":joker_disapprove:"] + end + test "deactivated users can't post" do user = insert(:user, is_active: false) assert {:error, _} = CommonAPI.post(user, %{status: "ye"}) @@ -571,7 +597,7 @@ test "it filters out obviously bad tags when accepting a post as Markdown" do object = Object.normalize(activity, fetch: false) - assert object.data["content"] == "

    2hu

    alert('xss')" + assert object.data["content"] == "

    2hu

    " assert object.data["source"] == post end @@ -801,13 +827,17 @@ test "favoriting a status twice returns ok, but without the like activity" do [user: user, activity: activity] end + test "activity not found error", %{user: user} do + assert {:error, :not_found} = CommonAPI.pin("id", user) + end + test "pin status", %{user: user, activity: activity} do assert {:ok, ^activity} = CommonAPI.pin(activity.id, user) - id = activity.id + %{data: %{"id" => object_id}} = Object.normalize(activity) user = refresh_record(user) - assert %User{pinned_activities: [^id]} = user + assert user.pinned_objects |> Map.keys() == [object_id] end test "pin poll", %{user: user} do @@ -819,10 +849,11 @@ test "pin poll", %{user: user} do assert {:ok, ^activity} = CommonAPI.pin(activity.id, user) - id = activity.id + %{data: %{"id" => object_id}} = Object.normalize(activity) + user = refresh_record(user) - assert %User{pinned_activities: [^id]} = user + assert user.pinned_objects |> Map.keys() == [object_id] end test "unlisted statuses can be pinned", %{user: user} do @@ -833,7 +864,7 @@ test "unlisted statuses can be pinned", %{user: user} do test "only self-authored can be pinned", %{activity: activity} do user = insert(:user) - assert {:error, "Could not pin"} = CommonAPI.pin(activity.id, user) + assert {:error, :ownership_error} = CommonAPI.pin(activity.id, user) end test "max pinned statuses", %{user: user, activity: activity_one} do @@ -843,8 +874,12 @@ test "max pinned statuses", %{user: user, activity: activity_one} do user = refresh_record(user) - assert {:error, "You have already pinned the maximum number of statuses"} = - CommonAPI.pin(activity_two.id, user) + assert {:error, :pinned_statuses_limit_reached} = CommonAPI.pin(activity_two.id, user) + end + + test "only public can be pinned", %{user: user} do + {:ok, activity} = CommonAPI.post(user, %{status: "private status", visibility: "private"}) + {:error, :visibility_error} = CommonAPI.pin(activity.id, user) end test "unpin status", %{user: user, activity: activity} do @@ -858,7 +893,7 @@ test "unpin status", %{user: user, activity: activity} do user = refresh_record(user) - assert %User{pinned_activities: []} = user + assert user.pinned_objects == %{} end test "should unpin when deleting a status", %{user: user, activity: activity} do @@ -870,7 +905,40 @@ test "should unpin when deleting a status", %{user: user, activity: activity} do user = refresh_record(user) - assert %User{pinned_activities: []} = user + assert user.pinned_objects == %{} + end + + test "ephemeral activity won't be deleted if was pinned", %{user: user} do + {:ok, activity} = CommonAPI.post(user, %{status: "Hello!", expires_in: 601}) + + assert Pleroma.Workers.PurgeExpiredActivity.get_expiration(activity.id) + + {:ok, _activity} = CommonAPI.pin(activity.id, user) + refute Pleroma.Workers.PurgeExpiredActivity.get_expiration(activity.id) + + user = refresh_record(user) + {:ok, _} = CommonAPI.unpin(activity.id, user) + + # recreates expiration job on unpin + assert Pleroma.Workers.PurgeExpiredActivity.get_expiration(activity.id) + end + + test "ephemeral activity deletion job won't be deleted on pinning error", %{ + user: user, + activity: activity + } do + clear_config([:instance, :max_pinned_statuses], 1) + + {:ok, _activity} = CommonAPI.pin(activity.id, user) + + {:ok, activity2} = CommonAPI.post(user, %{status: "another status", expires_in: 601}) + + assert Pleroma.Workers.PurgeExpiredActivity.get_expiration(activity2.id) + + user = refresh_record(user) + {:error, :pinned_statuses_limit_reached} = CommonAPI.pin(activity2.id, user) + + assert Pleroma.Workers.PurgeExpiredActivity.get_expiration(activity2.id) end end diff --git a/test/pleroma/web/mastodon_api/controllers/status_controller_test.exs b/test/pleroma/web/mastodon_api/controllers/status_controller_test.exs index e76c2760d..99ad87d05 100644 --- a/test/pleroma/web/mastodon_api/controllers/status_controller_test.exs +++ b/test/pleroma/web/mastodon_api/controllers/status_controller_test.exs @@ -358,7 +358,6 @@ test "posting a direct status", %{conn: conn} do assert activity.data["cc"] == [] end - @tag :skip test "discloses application metadata when enabled" do user = insert(:user, disclose_client: true) %{user: _user, token: token, conn: conn} = oauth_access(["write:statuses"], user: user) @@ -377,6 +376,16 @@ test "discloses application metadata when enabled" do "status" => "cofe is my copilot" }) + assert %{ + "content" => "cofe is my copilot" + } = json_response_and_validate_schema(result, 200) + + activity = result.assigns.activity.id + + result = + conn + |> get("api/v1/statuses/#{activity}") + assert %{ "content" => "cofe is my copilot", "application" => %{ @@ -397,6 +406,15 @@ test "hides application metadata when disabled" do "status" => "club mate is my wingman" }) + assert %{"content" => "club mate is my wingman"} = + json_response_and_validate_schema(result, 200) + + activity = result.assigns.activity.id + + result = + conn + |> get("api/v1/statuses/#{activity}") + assert %{ "content" => "club mate is my wingman", "application" => nil @@ -1191,20 +1209,27 @@ test "returns 404 error for a wrong id", %{conn: conn} do setup do: clear_config([:instance, :max_pinned_statuses], 1) test "pin status", %{conn: conn, user: user, activity: activity} do - id_str = to_string(activity.id) + id = activity.id - assert %{"id" => ^id_str, "pinned" => true} = + assert %{"id" => ^id, "pinned" => true} = conn |> put_req_header("content-type", "application/json") |> post("/api/v1/statuses/#{activity.id}/pin") |> json_response_and_validate_schema(200) - assert [%{"id" => ^id_str, "pinned" => true}] = + assert [%{"id" => ^id, "pinned" => true}] = conn |> get("/api/v1/accounts/#{user.id}/statuses?pinned=true") |> json_response_and_validate_schema(200) end + test "non authenticated user", %{activity: activity} do + assert build_conn() + |> put_req_header("content-type", "application/json") + |> post("/api/v1/statuses/#{activity.id}/pin") + |> json_response(403) == %{"error" => "Invalid credentials."} + end + test "/pin: returns 400 error when activity is not public", %{conn: conn, user: user} do {:ok, dm} = CommonAPI.post(user, %{status: "test", visibility: "direct"}) @@ -1213,7 +1238,18 @@ test "/pin: returns 400 error when activity is not public", %{conn: conn, user: |> put_req_header("content-type", "application/json") |> post("/api/v1/statuses/#{dm.id}/pin") - assert json_response_and_validate_schema(conn, 400) == %{"error" => "Could not pin"} + assert json_response_and_validate_schema(conn, 422) == %{ + "error" => "Non-public status cannot be pinned" + } + end + + test "pin by another user", %{activity: activity} do + %{conn: conn} = oauth_access(["write:accounts"]) + + assert conn + |> put_req_header("content-type", "application/json") + |> post("/api/v1/statuses/#{activity.id}/pin") + |> json_response(422) == %{"error" => "Someone else's status cannot be pinned"} end test "unpin status", %{conn: conn, user: user, activity: activity} do @@ -1234,13 +1270,11 @@ test "unpin status", %{conn: conn, user: user, activity: activity} do |> json_response_and_validate_schema(200) end - test "/unpin: returns 400 error when activity is not exist", %{conn: conn} do - conn = - conn - |> put_req_header("content-type", "application/json") - |> post("/api/v1/statuses/1/unpin") - - assert json_response_and_validate_schema(conn, 400) == %{"error" => "Could not unpin"} + test "/unpin: returns 404 error when activity doesn't exist", %{conn: conn} do + assert conn + |> put_req_header("content-type", "application/json") + |> post("/api/v1/statuses/1/unpin") + |> json_response_and_validate_schema(404) == %{"error" => "Record not found"} end test "max pinned statuses", %{conn: conn, user: user, activity: activity_one} do diff --git a/test/pleroma/web/mastodon_api/views/status_view_test.exs b/test/pleroma/web/mastodon_api/views/status_view_test.exs index e6c37e782..9dfdf8bf0 100644 --- a/test/pleroma/web/mastodon_api/views/status_view_test.exs +++ b/test/pleroma/web/mastodon_api/views/status_view_test.exs @@ -262,8 +262,8 @@ test "a note activity" do mentions: [], tags: [ %{ - name: "#{object_data["tag"]}", - url: "http://localhost:4001/tag/#{object_data["tag"]}" + name: "#{hd(object_data["tag"])}", + url: "http://localhost:4001/tag/#{hd(object_data["tag"])}" } ], application: nil, @@ -286,7 +286,8 @@ test "a note activity" do direct_conversation_id: nil, thread_muted: false, emoji_reactions: [], - parent_visible: false + parent_visible: false, + pinned_at: nil } } diff --git a/test/pleroma/web/media_proxy_test.exs b/test/pleroma/web/media_proxy_test.exs index 7411d0a7a..b5ee6328d 100644 --- a/test/pleroma/web/media_proxy_test.exs +++ b/test/pleroma/web/media_proxy_test.exs @@ -11,8 +11,7 @@ defmodule Pleroma.Web.MediaProxyTest do alias Pleroma.Web.MediaProxy defp decode_result(encoded) do - [_, "proxy", sig, base64 | _] = URI.parse(encoded).path |> String.split("/") - {:ok, decoded} = MediaProxy.decode_url(sig, base64) + {:ok, decoded} = MediaProxy.decode_url(encoded) decoded end diff --git a/test/pleroma/web/o_auth/o_auth_controller_test.exs b/test/pleroma/web/o_auth/o_auth_controller_test.exs index 312500feb..0fdd5b8e9 100644 --- a/test/pleroma/web/o_auth/o_auth_controller_test.exs +++ b/test/pleroma/web/o_auth/o_auth_controller_test.exs @@ -805,10 +805,12 @@ test "issues a token for `password` grant_type with valid credentials, with full "client_secret" => app.client_secret }) - assert %{"access_token" => token} = json_response(conn, 200) + assert %{"id" => id, "access_token" => access_token} = json_response(conn, 200) - token = Repo.get_by(Token, token: token) + token = Repo.get_by(Token, token: access_token) assert token + assert token.id == id + assert token.token == access_token assert token.scopes == app.scopes end diff --git a/test/pleroma/web/twitter_api/remote_follow_controller_test.exs b/test/pleroma/web/twitter_api/remote_follow_controller_test.exs index f389c272b..fa3b29006 100644 --- a/test/pleroma/web/twitter_api/remote_follow_controller_test.exs +++ b/test/pleroma/web/twitter_api/remote_follow_controller_test.exs @@ -27,6 +27,16 @@ test "adds status to pleroma instance if the `acct` is a status", %{conn: conn} body: File.read!("test/fixtures/tesla_mock/status.emelie.json") } + %{method: :get, url: "https://mastodon.social/users/emelie/collections/featured"} -> + %Tesla.Env{ + status: 200, + headers: [{"content-type", "application/activity+json"}], + body: + File.read!("test/fixtures/users_mock/masto_featured.json") + |> String.replace("{{domain}}", "mastodon.social") + |> String.replace("{{nickname}}", "emelie") + } + %{method: :get, url: "https://mastodon.social/users/emelie"} -> %Tesla.Env{ status: 200, @@ -52,6 +62,16 @@ test "show follow account page if the `acct` is a account link", %{conn: conn} d headers: [{"content-type", "application/activity+json"}], body: File.read!("test/fixtures/tesla_mock/emelie.json") } + + %{method: :get, url: "https://mastodon.social/users/emelie/collections/featured"} -> + %Tesla.Env{ + status: 200, + headers: [{"content-type", "application/activity+json"}], + body: + File.read!("test/fixtures/users_mock/masto_featured.json") + |> String.replace("{{domain}}", "mastodon.social") + |> String.replace("{{nickname}}", "emelie") + } end) response = @@ -70,6 +90,16 @@ test "show follow page if the `acct` is a account link", %{conn: conn} do headers: [{"content-type", "application/activity+json"}], body: File.read!("test/fixtures/tesla_mock/emelie.json") } + + %{method: :get, url: "https://mastodon.social/users/emelie/collections/featured"} -> + %Tesla.Env{ + status: 200, + headers: [{"content-type", "application/activity+json"}], + body: + File.read!("test/fixtures/users_mock/masto_featured.json") + |> String.replace("{{domain}}", "mastodon.social") + |> String.replace("{{nickname}}", "emelie") + } end) user = insert(:user) diff --git a/test/pleroma/web/web_finger_test.exs b/test/pleroma/web/web_finger_test.exs index 84477d5a1..2d7b4a40b 100644 --- a/test/pleroma/web/web_finger_test.exs +++ b/test/pleroma/web/web_finger_test.exs @@ -45,6 +45,26 @@ test "returns error for nonsensical input" do assert {:error, _} = WebFinger.finger("pleroma.social") end + test "returns error when there is no content-type header" do + Tesla.Mock.mock(fn + %{url: "http://social.heldscal.la/.well-known/host-meta"} -> + {:ok, + %Tesla.Env{ + status: 200, + body: File.read!("test/fixtures/tesla_mock/social.heldscal.la_host_meta") + }} + + %{ + url: + "https://social.heldscal.la/.well-known/webfinger?resource=acct:invalid_content@social.heldscal.la" + } -> + {:ok, %Tesla.Env{status: 200, body: ""}} + end) + + user = "invalid_content@social.heldscal.la" + assert {:error, {:content_type, nil}} = WebFinger.finger(user) + end + test "returns error when fails parse xml or json" do user = "invalid_content@social.heldscal.la" assert {:error, %Jason.DecodeError{}} = WebFinger.finger(user) @@ -113,5 +133,52 @@ test "it works with idna domains as link" do ap_id = "https://" <> to_string(:idna.encode("zetsubou.みんな")) <> "/users/lain" {:ok, _data} = WebFinger.finger(ap_id) end + + test "respects json content-type" do + Tesla.Mock.mock(fn + %{ + url: + "https://mastodon.social/.well-known/webfinger?resource=acct:emelie@mastodon.social" + } -> + {:ok, + %Tesla.Env{ + status: 200, + body: File.read!("test/fixtures/tesla_mock/webfinger_emelie.json"), + headers: [{"content-type", "application/jrd+json"}] + }} + + %{url: "http://mastodon.social/.well-known/host-meta"} -> + {:ok, + %Tesla.Env{ + status: 200, + body: File.read!("test/fixtures/tesla_mock/mastodon.social_host_meta") + }} + end) + + {:ok, _data} = WebFinger.finger("emelie@mastodon.social") + end + + test "respects xml content-type" do + Tesla.Mock.mock(fn + %{ + url: "https://pawoo.net/.well-known/webfinger?resource=acct:pekorino@pawoo.net" + } -> + {:ok, + %Tesla.Env{ + status: 200, + body: File.read!("test/fixtures/tesla_mock/https___pawoo.net_users_pekorino.xml"), + headers: [{"content-type", "application/xrd+xml"}] + }} + + %{url: "http://pawoo.net/.well-known/host-meta"} -> + {:ok, + %Tesla.Env{ + status: 200, + body: File.read!("test/fixtures/tesla_mock/pawoo.net_host_meta") + }} + end) + + {:ok, _data} = WebFinger.finger("pekorino@pawoo.net") + end end end diff --git a/test/support/conn_case.ex b/test/support/conn_case.ex index 953aa010a..deee98599 100644 --- a/test/support/conn_case.ex +++ b/test/support/conn_case.ex @@ -67,13 +67,11 @@ defp empty_json_response(conn) do end defp json_response_and_validate_schema( - %{ - private: %{ - open_api_spex: %{operation_id: op_id, operation_lookup: lookup, spec: spec} - } - } = conn, + %{private: %{operation_id: op_id}} = conn, status ) do + {spec, lookup} = OpenApiSpex.Plug.PutApiSpec.get_spec_and_operation_lookup(conn) + content_type = conn |> Plug.Conn.get_resp_header("content-type") diff --git a/test/support/factory.ex b/test/support/factory.ex index af4fff45b..5c4e65c81 100644 --- a/test/support/factory.ex +++ b/test/support/factory.ex @@ -4,6 +4,9 @@ defmodule Pleroma.Factory do use ExMachina.Ecto, repo: Pleroma.Repo + + require Pleroma.Constants + alias Pleroma.Object alias Pleroma.User @@ -41,23 +44,27 @@ def user_factory(attrs \\ %{}) do urls = if attrs[:local] == false do - base_domain = Enum.random(["domain1.com", "domain2.com", "domain3.com"]) + base_domain = attrs[:domain] || Enum.random(["domain1.com", "domain2.com", "domain3.com"]) ap_id = "https://#{base_domain}/users/#{user.nickname}" %{ ap_id: ap_id, follower_address: ap_id <> "/followers", - following_address: ap_id <> "/following" + following_address: ap_id <> "/following", + featured_address: ap_id <> "/collections/featured" } else %{ ap_id: User.ap_id(user), follower_address: User.ap_followers(user), - following_address: User.ap_following(user) + following_address: User.ap_following(user), + featured_address: User.ap_featured_collection(user) } end + attrs = Map.delete(attrs, :domain) + user |> Map.put(:raw_bio, user.bio) |> Map.merge(urls) @@ -221,6 +228,45 @@ def direct_note_activity_factory do } end + def add_activity_factory(attrs \\ %{}) do + featured_collection_activity(attrs, "Add") + end + + def remove_activity_factor(attrs \\ %{}) do + featured_collection_activity(attrs, "Remove") + end + + defp featured_collection_activity(attrs, type) do + user = attrs[:user] || insert(:user) + note = attrs[:note] || insert(:note, user: user) + + data_attrs = + attrs + |> Map.get(:data_attrs, %{}) + |> Map.put(:type, type) + + attrs = Map.drop(attrs, [:user, :note, :data_attrs]) + + data = + %{ + "id" => Pleroma.Web.ActivityPub.Utils.generate_activity_id(), + "target" => user.featured_address, + "object" => note.data["object"], + "actor" => note.data["actor"], + "type" => "Add", + "to" => [Pleroma.Constants.as_public()], + "cc" => [user.follower_address] + } + |> Map.merge(data_attrs) + + %Pleroma.Activity{ + data: data, + actor: data["actor"], + recipients: data["to"] + } + |> Map.merge(attrs) + end + def note_activity_factory(attrs \\ %{}) do user = attrs[:user] || insert(:user) note = attrs[:note] || insert(:note, user: user) diff --git a/test/support/http_request_mock.ex b/test/support/http_request_mock.ex index 1328d6225..8807c2d14 100644 --- a/test/support/http_request_mock.ex +++ b/test/support/http_request_mock.ex @@ -89,6 +89,18 @@ def get("https://mastodon.sdf.org/users/rinpatch", _, _, _) do }} end + def get("https://mastodon.sdf.org/users/rinpatch/collections/featured", _, _, _) do + {:ok, + %Tesla.Env{ + status: 200, + body: + File.read!("test/fixtures/users_mock/masto_featured.json") + |> String.replace("{{domain}}", "mastodon.sdf.org") + |> String.replace("{{nickname}}", "rinpatch"), + headers: [{"content-type", "application/activity+json"}] + }} + end + def get("https://patch.cx/objects/tesla_mock/poll_attachment", _, _, _) do {:ok, %Tesla.Env{ @@ -122,7 +134,7 @@ def get( %Tesla.Env{ status: 200, body: File.read!("test/fixtures/tesla_mock/mike@osada.macgirvin.com.json"), - headers: activitypub_object_headers() + headers: [{"content-type", "application/jrd+json"}] }} end @@ -187,7 +199,8 @@ def get( {:ok, %Tesla.Env{ status: 200, - body: File.read!("test/fixtures/tesla_mock/lain_squeet.me_webfinger.xml") + body: File.read!("test/fixtures/tesla_mock/lain_squeet.me_webfinger.xml"), + headers: [{"content-type", "application/xrd+xml"}] }} end @@ -526,22 +539,6 @@ def get( }} end - def get("http://zetsubou.xn--q9jyb4c/.well-known/host-meta", _, _, _) do - {:ok, - %Tesla.Env{ - status: 200, - body: File.read!("test/fixtures/tesla_mock/xn--q9jyb4c_host_meta") - }} - end - - def get("https://zetsubou.xn--q9jyb4c/.well-known/host-meta", _, _, _) do - {:ok, - %Tesla.Env{ - status: 200, - body: File.read!("test/fixtures/tesla_mock/xn--q9jyb4c_host_meta") - }} - end - def get("http://pleroma.soykaf.com/.well-known/host-meta", _, _, _) do {:ok, %Tesla.Env{ @@ -786,7 +783,8 @@ def get( {:ok, %Tesla.Env{ status: 200, - body: File.read!("test/fixtures/tesla_mock/shp@social.heldscal.la.xml") + body: File.read!("test/fixtures/tesla_mock/shp@social.heldscal.la.xml"), + headers: [{"content-type", "application/xrd+xml"}] }} end @@ -796,7 +794,7 @@ def get( _, [{"accept", "application/xrd+xml,application/jrd+json"}] ) do - {:ok, %Tesla.Env{status: 200, body: ""}} + {:ok, %Tesla.Env{status: 200, body: "", headers: [{"content-type", "application/jrd+json"}]}} end def get("http://framatube.org/.well-known/host-meta", _, _, _) do @@ -816,7 +814,7 @@ def get( {:ok, %Tesla.Env{ status: 200, - headers: [{"content-type", "application/json"}], + headers: [{"content-type", "application/jrd+json"}], body: File.read!("test/fixtures/tesla_mock/framasoft@framatube.org.json") }} end @@ -876,7 +874,7 @@ def get( {:ok, %Tesla.Env{ status: 200, - headers: [{"content-type", "application/json"}], + headers: [{"content-type", "application/jrd+json"}], body: File.read!("test/fixtures/tesla_mock/kaniini@gerzilla.de.json") }} end @@ -919,6 +917,18 @@ def get("https://mastodon.social/users/lambadalambda", _, _, _) do }} end + def get("https://mastodon.social/users/lambadalambda/collections/featured", _, _, _) do + {:ok, + %Tesla.Env{ + status: 200, + body: + File.read!("test/fixtures/users_mock/masto_featured.json") + |> String.replace("{{domain}}", "mastodon.social") + |> String.replace("{{nickname}}", "lambadalambda"), + headers: activitypub_object_headers() + }} + end + def get("https://apfed.club/channel/indio", _, _, _) do {:ok, %Tesla.Env{ @@ -1074,7 +1084,8 @@ def get( {:ok, %Tesla.Env{ status: 200, - body: File.read!("test/fixtures/lain.xml") + body: File.read!("test/fixtures/lain.xml"), + headers: [{"content-type", "application/xrd+xml"}] }} end @@ -1087,7 +1098,16 @@ def get( {:ok, %Tesla.Env{ status: 200, - body: File.read!("test/fixtures/lain.xml") + body: File.read!("test/fixtures/lain.xml"), + headers: [{"content-type", "application/xrd+xml"}] + }} + end + + def get("http://zetsubou.xn--q9jyb4c/.well-known/host-meta", _, _, _) do + {:ok, + %Tesla.Env{ + status: 200, + body: File.read!("test/fixtures/host-meta-zetsubou.xn--q9jyb4c.xml") }} end @@ -1153,7 +1173,8 @@ def get("https://mstdn.jp/.well-known/webfinger?resource=acct:kpherox@mstdn.jp", {:ok, %Tesla.Env{ status: 200, - body: File.read!("test/fixtures/tesla_mock/kpherox@mstdn.jp.xml") + body: File.read!("test/fixtures/tesla_mock/kpherox@mstdn.jp.xml"), + headers: [{"content-type", "application/xrd+xml"}] }} end @@ -1281,6 +1302,15 @@ def get("https://osada.macgirvin.com/", _, "", [{"accept", "text/html"}]) do }} end + def get("https://patch.cx/objects/a399c28e-c821-4820-bc3e-4afeb044c16f", _, _, _) do + {:ok, + %Tesla.Env{ + status: 200, + body: File.read!("test/fixtures/tesla_mock/emoji-in-summary.json"), + headers: activitypub_object_headers() + }} + end + def get(url, query, body, headers) do {:error, "Mock response not implemented for GET #{inspect(url)}, #{query}, #{inspect(body)}, #{