diff --git a/.buildpacks b/.buildpacks
deleted file mode 100644
index 31dd57096..000000000
--- a/.buildpacks
+++ /dev/null
@@ -1 +0,0 @@
-https://github.com/hashnuke/heroku-buildpack-elixir
diff --git a/CHANGELOG.md b/CHANGELOG.md
index c94383649..832f9e2b6 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -4,7 +4,12 @@ All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
-## Unreleased
+## 2024.04.1 (Security)
+
+## Fixed
+- Issue allowing non-owners to use media objects in posts
+- Issue allowing use of non-media objects as attachments and crashing timeline rendering
+- Issue allowing webfinger spoofing in certain situations
## 2024.04
@@ -37,6 +42,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
- Issue leading to Mastodon bot accounts being rejected
- Scope misdetection of remote posts resulting from not recognising
JSON-LD-compacted forms of public scope; affected e.g. federation with bovine
+- Ratelimits encountered when fetching objects are now respected; 429 responses will cause a backoff when we get one.
## Removed
- ActivityPub Client-To-Server write API endpoints have been disabled;
diff --git a/config/dokku.exs b/config/dokku.exs
deleted file mode 100644
index 1cc396c3d..000000000
--- a/config/dokku.exs
+++ /dev/null
@@ -1,25 +0,0 @@
-import Config
-
-config :pleroma, Pleroma.Web.Endpoint,
- http: [
- port: String.to_integer(System.get_env("PORT") || "4000"),
- protocol_options: [max_request_line_length: 8192, max_header_value_length: 8192]
- ],
- protocol: "http",
- secure_cookie_flag: false,
- url: [host: System.get_env("APP_HOST"), scheme: "https", port: 443],
- secret_key_base: "+S+ULgf7+N37c/lc9K66SMphnjQIRGklTu0BRr2vLm2ZzvK0Z6OH/PE77wlUNtvP"
-
-database_url =
- System.get_env("DATABASE_URL") ||
- raise """
- environment variable DATABASE_URL is missing.
- For example: ecto://USER:PASS@HOST/DATABASE
- """
-
-config :pleroma, Pleroma.Repo,
- # ssl: true,
- url: database_url,
- pool_size: String.to_integer(System.get_env("POOL_SIZE") || "10")
-
-config :pleroma, :instance, name: "#{System.get_env("APP_NAME")} CI Instance"
diff --git a/docs/docs/clients.md b/docs/docs/clients.md
index 1452b9505..b5ec500c9 100644
--- a/docs/docs/clients.md
+++ b/docs/docs/clients.md
@@ -1,12 +1,15 @@
# Akkoma Clients
-Note: Additional clients may work, but these are known to work with Akkoma.
-Apps listed here might not support all of Akkoma's features.
+This is a list of clients that are known to work with Akkoma.
+
+!!! warning
+ **Clients listed here are not officially supported by the Akkoma project.**
+ Some Akkoma features may be unsupported by these clients.
## Multiplatform
### Kaiteki
- Homepage:
- Source Code:
-- Contact: [@kaiteki@fedi.software](https://fedi.software/@Kaiteki)
+- Contact: [@kaiteki@social.kaiteki.app](https://social.kaiteki.app/@kaiteki)
- Platforms: Web, Windows, Linux, Android
- Features: MastoAPI, Supports multiple backends
@@ -38,12 +41,6 @@ Apps listed here might not support all of Akkoma's features.
- Platforms: Android
- Features: MastoAPI, No Streaming, Emoji Reactions, Text Formatting, FE Stickers
-### Fedi
-- Homepage:
-- Source Code: Proprietary, but gratis
-- Platforms: iOS, Android
-- Features: MastoAPI, Pleroma-specific features like Reactions
-
### Tusky
- Homepage:
- Source Code:
@@ -51,12 +48,18 @@ Apps listed here might not support all of Akkoma's features.
- Platforms: Android
- Features: MastoAPI, No Streaming
+### Subway Tooter
+- Source Code:
+- Contact: [@SubwayTooter@mastodon.juggler.jp](https://mastodon.juggler.jp/@SubwayTooter)
+- Platforms: Android
+- Features: MastoAPI, Editing, Emoji Reactions (including custom emoji)
+
## Alternative Web Interfaces
-### Pinafore
-- Note: Pinafore is unmaintained (See [the author's original article](https://nolanlawson.com/2023/01/09/retiring-pinafore/) for details)
-- Homepage:
-- Source Code:
-- Contact: [@pinafore@mastodon.technology](https://mastodon.technology/users/pinafore)
+### Enafore
+- An actively developed fork of Pinafore with improved Akkoma support
+- Homepage:
+- Source Code:
+- Contact: [@enfore@enafore.social](https://meta.enafore.social/@enafore)
- Features: MastoAPI, No Streaming
### Sengi
diff --git a/lib/pleroma/application.ex b/lib/pleroma/application.ex
index 28a86d0aa..e0fff6dd7 100644
--- a/lib/pleroma/application.ex
+++ b/lib/pleroma/application.ex
@@ -179,7 +179,9 @@ defp cachex_children do
build_cachex("translations", default_ttl: :timer.hours(24 * 30), limit: 2500),
build_cachex("instances", default_ttl: :timer.hours(24), ttl_interval: 1000, limit: 2500),
build_cachex("request_signatures", default_ttl: :timer.hours(24 * 30), limit: 3000),
- build_cachex("rel_me", default_ttl: :timer.hours(24 * 30), limit: 300)
+ build_cachex("rel_me", default_ttl: :timer.hours(24 * 30), limit: 300),
+ build_cachex("host_meta", default_ttl: :timer.minutes(120), limit: 5000),
+ build_cachex("http_backoff", default_ttl: :timer.hours(24 * 30), limit: 10000)
]
end
diff --git a/lib/pleroma/constants.ex b/lib/pleroma/constants.ex
index 94608a99b..2681d7671 100644
--- a/lib/pleroma/constants.ex
+++ b/lib/pleroma/constants.ex
@@ -64,4 +64,7 @@ defmodule Pleroma.Constants do
"Service"
]
)
+
+ # Internally used as top-level types for media attachments and user images
+ const(attachment_types, do: ["Document", "Image"])
end
diff --git a/lib/pleroma/http/backoff.ex b/lib/pleroma/http/backoff.ex
new file mode 100644
index 000000000..b3f734a92
--- /dev/null
+++ b/lib/pleroma/http/backoff.ex
@@ -0,0 +1,121 @@
+defmodule Pleroma.HTTP.Backoff do
+ alias Pleroma.HTTP
+ require Logger
+
+ @cachex Pleroma.Config.get([:cachex, :provider], Cachex)
+ @backoff_cache :http_backoff_cache
+
+ # attempt to parse a timestamp from a header
+ # returns nil if it can't parse the timestamp
+ @spec timestamp_or_nil(binary) :: DateTime.t() | nil
+ defp timestamp_or_nil(header) do
+ case DateTime.from_iso8601(header) do
+ {:ok, stamp, _} ->
+ stamp
+
+ _ ->
+ nil
+ end
+ end
+
+ # attempt to parse the x-ratelimit-reset header from the headers
+ @spec x_ratelimit_reset(headers :: list) :: DateTime.t() | nil
+ defp x_ratelimit_reset(headers) do
+ with {_header, value} <- List.keyfind(headers, "x-ratelimit-reset", 0),
+ true <- is_binary(value) do
+ timestamp_or_nil(value)
+ else
+ _ ->
+ nil
+ end
+ end
+
+ # attempt to parse the Retry-After header from the headers
+ # this can be either a timestamp _or_ a number of seconds to wait!
+ # we'll return a datetime if we can parse it, or nil if we can't
+ @spec retry_after(headers :: list) :: DateTime.t() | nil
+ defp retry_after(headers) do
+ with {_header, value} <- List.keyfind(headers, "retry-after", 0),
+ true <- is_binary(value) do
+ # first, see if it's an integer
+ case Integer.parse(value) do
+ {seconds, ""} ->
+ Logger.debug("Parsed Retry-After header: #{seconds} seconds")
+ DateTime.utc_now() |> Timex.shift(seconds: seconds)
+
+ _ ->
+ # if it's not an integer, try to parse it as a timestamp
+ timestamp_or_nil(value)
+ end
+ else
+ _ ->
+ nil
+ end
+ end
+
+ # given a set of headers, will attempt to find the next backoff timestamp
+ # if it can't find one, it will default to 5 minutes from now
+ @spec next_backoff_timestamp(%{headers: list}) :: DateTime.t()
+ defp next_backoff_timestamp(%{headers: headers}) when is_list(headers) do
+ default_5_minute_backoff =
+ DateTime.utc_now()
+ |> Timex.shift(seconds: 5 * 60)
+
+ backoff =
+ [&x_ratelimit_reset/1, &retry_after/1]
+ |> Enum.map(& &1.(headers))
+ |> Enum.find(&(&1 != nil))
+
+ if is_nil(backoff) do
+ Logger.debug("No backoff headers found, defaulting to 5 minutes from now")
+ default_5_minute_backoff
+ else
+ Logger.debug("Found backoff header, will back off until: #{backoff}")
+ backoff
+ end
+ end
+
+ defp next_backoff_timestamp(_), do: DateTime.utc_now() |> Timex.shift(seconds: 5 * 60)
+
+ # utility function to check the HTTP response for potential backoff headers
+ # will check if we get a 429 or 503 response, and if we do, will back off for a bit
+ @spec check_backoff({:ok | :error, HTTP.Env.t()}, binary()) ::
+ {:ok | :error, HTTP.Env.t()} | {:error, :ratelimit}
+ defp check_backoff({:ok, env}, host) do
+ case env.status do
+ status when status in [429, 503] ->
+ Logger.error("Rate limited on #{host}! Backing off...")
+ timestamp = next_backoff_timestamp(env)
+ ttl = Timex.diff(timestamp, DateTime.utc_now(), :seconds)
+ # we will cache the host for 5 minutes
+ @cachex.put(@backoff_cache, host, true, ttl: ttl)
+ {:error, :ratelimit}
+
+ _ ->
+ {:ok, env}
+ end
+ end
+
+ defp check_backoff(env, _), do: env
+
+ @doc """
+ this acts as a single throughput for all GET requests
+ we will check if the host is in the cache, and if it is, we will automatically fail the request
+ this ensures that we don't hammer the server with requests, and instead wait for the backoff to expire
+ this is a very simple implementation, and can be improved upon!
+ """
+ @spec get(binary, list, list) :: {:ok | :error, HTTP.Env.t()} | {:error, :ratelimit}
+ def get(url, headers \\ [], options \\ []) do
+ %{host: host} = URI.parse(url)
+
+ case @cachex.get(@backoff_cache, host) do
+ {:ok, nil} ->
+ url
+ |> HTTP.get(headers, options)
+ |> check_backoff(host)
+
+ _ ->
+ {:error, :ratelimit}
+ end
+ end
+end
diff --git a/lib/pleroma/object/fetcher.ex b/lib/pleroma/object/fetcher.ex
index b9d8dbaaa..937026e04 100644
--- a/lib/pleroma/object/fetcher.ex
+++ b/lib/pleroma/object/fetcher.ex
@@ -354,7 +354,7 @@ def get_object(id) do
with {:ok, %{body: body, status: code, headers: headers, url: final_url}}
when code in 200..299 <-
- HTTP.get(id, headers),
+ HTTP.Backoff.get(id, headers),
remote_host <-
URI.parse(final_url).host,
{:cross_domain_redirect, false} <-
diff --git a/lib/pleroma/scheduled_activity.ex b/lib/pleroma/scheduled_activity.ex
index 2b156341f..5292b1491 100644
--- a/lib/pleroma/scheduled_activity.ex
+++ b/lib/pleroma/scheduled_activity.ex
@@ -28,7 +28,7 @@ defmodule Pleroma.ScheduledActivity do
timestamps()
end
- def changeset(%ScheduledActivity{} = scheduled_activity, attrs) do
+ defp changeset(%ScheduledActivity{} = scheduled_activity, attrs) do
scheduled_activity
|> cast(attrs, [:scheduled_at, :params])
|> validate_required([:scheduled_at, :params])
@@ -40,26 +40,36 @@ defp with_media_attachments(
%{changes: %{params: %{"media_ids" => media_ids} = params}} = changeset
)
when is_list(media_ids) do
- media_attachments = Utils.attachments_from_ids(%{media_ids: media_ids})
+ user = User.get_by_id(changeset.data.user_id)
- params =
- params
- |> Map.put("media_attachments", media_attachments)
- |> Map.put("media_ids", media_ids)
+ case Utils.attachments_from_ids(user, %{media_ids: media_ids}) do
+ media_attachments when is_list(media_attachments) ->
+ params =
+ params
+ |> Map.put("media_attachments", media_attachments)
+ |> Map.put("media_ids", media_ids)
- put_change(changeset, :params, params)
+ put_change(changeset, :params, params)
+
+ {:error, _} = e ->
+ e
+
+ e ->
+ {:error, e}
+ end
end
defp with_media_attachments(changeset), do: changeset
- def update_changeset(%ScheduledActivity{} = scheduled_activity, attrs) do
+ defp update_changeset(%ScheduledActivity{} = scheduled_activity, attrs) do
+ # note: should this ever allow swapping media attachments, make sure ownership is checked
scheduled_activity
|> cast(attrs, [:scheduled_at])
|> validate_required([:scheduled_at])
|> validate_scheduled_at()
end
- def validate_scheduled_at(changeset) do
+ defp validate_scheduled_at(changeset) do
validate_change(changeset, :scheduled_at, fn _, scheduled_at ->
cond do
not far_enough?(scheduled_at) ->
@@ -77,7 +87,7 @@ def validate_scheduled_at(changeset) do
end)
end
- def exceeds_daily_user_limit?(user_id, scheduled_at) do
+ defp exceeds_daily_user_limit?(user_id, scheduled_at) do
ScheduledActivity
|> where(user_id: ^user_id)
|> where([sa], type(sa.scheduled_at, :date) == type(^scheduled_at, :date))
@@ -86,7 +96,7 @@ def exceeds_daily_user_limit?(user_id, scheduled_at) do
|> Kernel.>=(Config.get([ScheduledActivity, :daily_user_limit]))
end
- def exceeds_total_user_limit?(user_id) do
+ defp exceeds_total_user_limit?(user_id) do
ScheduledActivity
|> where(user_id: ^user_id)
|> select([sa], count(sa.id))
@@ -108,20 +118,29 @@ def far_enough?(scheduled_at) do
diff > @min_offset
end
- def new(%User{} = user, attrs) do
+ defp new(%User{} = user, attrs) do
changeset(%ScheduledActivity{user_id: user.id}, attrs)
end
@doc """
Creates ScheduledActivity and add to queue to perform at scheduled_at date
"""
- @spec create(User.t(), map()) :: {:ok, ScheduledActivity.t()} | {:error, Ecto.Changeset.t()}
+ @spec create(User.t(), map()) :: {:ok, ScheduledActivity.t()} | {:error, any()}
def create(%User{} = user, attrs) do
- Multi.new()
- |> Multi.insert(:scheduled_activity, new(user, attrs))
- |> maybe_add_jobs(Config.get([ScheduledActivity, :enabled]))
- |> Repo.transaction()
- |> transaction_response
+ case new(user, attrs) do
+ %Ecto.Changeset{} = sched_data ->
+ Multi.new()
+ |> Multi.insert(:scheduled_activity, sched_data)
+ |> maybe_add_jobs(Config.get([ScheduledActivity, :enabled]))
+ |> Repo.transaction()
+ |> transaction_response
+
+ {:error, _} = e ->
+ e
+
+ e ->
+ {:error, e}
+ end
end
defp maybe_add_jobs(multi, true) do
@@ -187,17 +206,7 @@ def for_user_query(%User{} = user) do
|> where(user_id: ^user.id)
end
- def due_activities(offset \\ 0) do
- naive_datetime =
- NaiveDateTime.utc_now()
- |> NaiveDateTime.add(offset, :millisecond)
-
- ScheduledActivity
- |> where([sa], sa.scheduled_at < ^naive_datetime)
- |> Repo.all()
- end
-
- def job_query(scheduled_activity_id) do
+ defp job_query(scheduled_activity_id) do
from(j in Oban.Job,
where: j.queue == "scheduled_activities",
where: fragment("args ->> 'activity_id' = ?::text", ^to_string(scheduled_activity_id))
diff --git a/lib/pleroma/signature.ex b/lib/pleroma/signature.ex
index 3d33fcd62..c4ac2c87e 100644
--- a/lib/pleroma/signature.ex
+++ b/lib/pleroma/signature.ex
@@ -10,7 +10,7 @@ defmodule Pleroma.Signature do
alias Pleroma.User
alias Pleroma.Web.ActivityPub.ActivityPub
- @known_suffixes ["/publickey", "/main-key"]
+ @known_suffixes ["/publickey", "/main-key", "#key"]
def key_id_to_actor_id(key_id) do
uri =
diff --git a/lib/pleroma/upload.ex b/lib/pleroma/upload.ex
index ad5c2c655..426fe9f1b 100644
--- a/lib/pleroma/upload.ex
+++ b/lib/pleroma/upload.ex
@@ -13,7 +13,6 @@ defmodule Pleroma.Upload do
* `:uploader`: override uploader
* `:filters`: override filters
* `:size_limit`: override size limit
- * `:activity_type`: override activity type
The `%Pleroma.Upload{}` struct: all documented fields are meant to be overwritten in filters:
@@ -48,7 +47,6 @@ defmodule Pleroma.Upload do
@type option ::
{:type, :avatar | :banner | :background}
| {:description, String.t()}
- | {:activity_type, String.t()}
| {:size_limit, nil | non_neg_integer()}
| {:uploader, module()}
| {:filters, [module()]}
@@ -143,7 +141,7 @@ defp get_opts(opts) do
end
%{
- activity_type: Keyword.get(opts, :activity_type, activity_type),
+ activity_type: activity_type,
size_limit: Keyword.get(opts, :size_limit, size_limit),
uploader: Keyword.get(opts, :uploader, Pleroma.Config.get([__MODULE__, :uploader])),
filters:
diff --git a/lib/pleroma/web/common_api/activity_draft.ex b/lib/pleroma/web/common_api/activity_draft.ex
index ced6371d6..2d24edec4 100644
--- a/lib/pleroma/web/common_api/activity_draft.ex
+++ b/lib/pleroma/web/common_api/activity_draft.ex
@@ -41,7 +41,7 @@ defmodule Pleroma.Web.CommonAPI.ActivityDraft do
preview?: false,
changes: %{}
- def new(user, params) do
+ defp new(user, params) do
%__MODULE__{user: user}
|> put_params(params)
end
@@ -92,9 +92,14 @@ defp full_payload(%{status: status, summary: summary} = draft) do
end
end
- defp attachments(%{params: params} = draft) do
- attachments = Utils.attachments_from_ids(params)
- %__MODULE__{draft | attachments: attachments}
+ defp attachments(%{params: params, user: user} = draft) do
+ case Utils.attachments_from_ids(user, params) do
+ attachments when is_list(attachments) ->
+ %__MODULE__{draft | attachments: attachments}
+
+ {:error, reason} ->
+ add_error(draft, reason)
+ end
end
defp in_reply_to(%{params: %{in_reply_to_status_id: ""}} = draft), do: draft
diff --git a/lib/pleroma/web/common_api/utils.ex b/lib/pleroma/web/common_api/utils.ex
index e79b12fc9..635143621 100644
--- a/lib/pleroma/web/common_api/utils.ex
+++ b/lib/pleroma/web/common_api/utils.ex
@@ -22,43 +22,31 @@ defmodule Pleroma.Web.CommonAPI.Utils do
require Logger
require Pleroma.Constants
- def attachments_from_ids(%{media_ids: ids, descriptions: desc}) do
- attachments_from_ids_descs(ids, desc)
+ def attachments_from_ids(user, %{media_ids: ids}) do
+ attachments_from_ids(user, ids, [])
end
- def attachments_from_ids(%{media_ids: ids}) do
- attachments_from_ids_no_descs(ids)
+ def attachments_from_ids(_, _), do: []
+
+ defp attachments_from_ids(_user, [], acc), do: Enum.reverse(acc)
+
+ defp attachments_from_ids(user, [media_id | ids], acc) do
+ with {_, %Object{} = object} <- {:get, get_attachment(media_id)},
+ :ok <- Object.authorize_access(object, user) do
+ attachments_from_ids(user, ids, [object.data | acc])
+ else
+ {:get, _} -> attachments_from_ids(user, ids, acc)
+ {:error, reason} -> {:error, reason}
+ end
end
- def attachments_from_ids(_), do: []
-
- def attachments_from_ids_no_descs([]), do: []
-
- def attachments_from_ids_no_descs(ids) do
- Enum.map(ids, fn media_id ->
- case get_attachment(media_id) do
- %Object{data: data} -> data
- _ -> nil
- end
- end)
- |> Enum.reject(&is_nil/1)
- end
-
- def attachments_from_ids_descs([], _), do: []
-
- def attachments_from_ids_descs(ids, descs_str) do
- {_, descs} = Jason.decode(descs_str)
-
- Enum.map(ids, fn media_id ->
- with %Object{data: data} <- get_attachment(media_id) do
- Map.put(data, "name", descs[media_id])
- end
- end)
- |> Enum.reject(&is_nil/1)
- end
-
- defp get_attachment(media_id) do
- Repo.get(Object, media_id)
+ def get_attachment(media_id) do
+ with %Object{} = object <- Repo.get(Object, media_id),
+ true <- object.data["type"] in Pleroma.Constants.attachment_types() do
+ object
+ else
+ _ -> nil
+ end
end
@spec get_to_and_cc(ActivityDraft.t()) :: {list(String.t()), list(String.t())}
diff --git a/lib/pleroma/web/mastodon_api/controllers/media_controller.ex b/lib/pleroma/web/mastodon_api/controllers/media_controller.ex
index 5918b288d..3e5a64b51 100644
--- a/lib/pleroma/web/mastodon_api/controllers/media_controller.ex
+++ b/lib/pleroma/web/mastodon_api/controllers/media_controller.ex
@@ -8,6 +8,7 @@ defmodule Pleroma.Web.MastodonAPI.MediaController do
alias Pleroma.Object
alias Pleroma.User
alias Pleroma.Web.ActivityPub.ActivityPub
+ alias Pleroma.Web.CommonAPI.Utils
alias Pleroma.Web.Plugs.OAuthScopesPlug
action_fallback(Pleroma.Web.MastodonAPI.FallbackController)
@@ -55,12 +56,15 @@ def create2(_conn, _data), do: {:error, :bad_request}
@doc "PUT /api/v1/media/:id"
def update(%{assigns: %{user: user}, body_params: %{description: description}} = conn, %{id: id}) do
- with %Object{} = object <- Object.get_by_id(id),
+ with {_, %Object{} = object} <- {:get, Utils.get_attachment(id)},
:ok <- Object.authorize_access(object, user),
{:ok, %Object{data: data}} <- Object.update_data(object, %{"name" => description}) do
attachment_data = Map.put(data, "id", object.id)
render(conn, "attachment.json", %{attachment: attachment_data})
+ else
+ {:get, _} -> {:error, :not_found}
+ e -> e
end
end
@@ -68,11 +72,14 @@ def update(conn, data), do: show(conn, data)
@doc "GET /api/v1/media/:id"
def show(%{assigns: %{user: user}} = conn, %{id: id}) do
- with %Object{data: data, id: object_id} = object <- Object.get_by_id(id),
+ with {_, %Object{data: data, id: object_id} = object} <- {:get, Utils.get_attachment(id)},
:ok <- Object.authorize_access(object, user) do
attachment_data = Map.put(data, "id", object_id)
render(conn, "attachment.json", %{attachment: attachment_data})
+ else
+ {:get, _} -> {:error, :not_found}
+ e -> e
end
end
diff --git a/lib/pleroma/web/mastodon_api/controllers/status_controller.ex b/lib/pleroma/web/mastodon_api/controllers/status_controller.ex
index 338a35052..acb5f15a0 100644
--- a/lib/pleroma/web/mastodon_api/controllers/status_controller.ex
+++ b/lib/pleroma/web/mastodon_api/controllers/status_controller.ex
@@ -87,7 +87,7 @@ defmodule Pleroma.Web.MastodonAPI.StatusController do
%{scopes: ["write:bookmarks"]} when action in [:bookmark, :unbookmark]
)
- @rate_limited_status_actions ~w(reblog unreblog favourite unfavourite create delete)a
+ @rate_limited_status_actions ~w(reblog unreblog favourite unfavourite create delete update)a
plug(
RateLimiter,
diff --git a/lib/pleroma/web/web_finger.ex b/lib/pleroma/web/web_finger.ex
index 9d5efbb3e..6a8c86a29 100644
--- a/lib/pleroma/web/web_finger.ex
+++ b/lib/pleroma/web/web_finger.ex
@@ -156,11 +156,21 @@ def get_template_from_xml(body) do
end
end
+ @cachex Pleroma.Config.get([:cachex, :provider], Cachex)
def find_lrdd_template(domain) do
+ @cachex.fetch!(:host_meta_cache, domain, fn _ ->
+ {:commit, fetch_lrdd_template(domain)}
+ end)
+ rescue
+ e -> {:error, "Cachex error: #{inspect(e)}"}
+ end
+
+ defp fetch_lrdd_template(domain) do
# WebFinger is restricted to HTTPS - https://tools.ietf.org/html/rfc7033#section-9.1
meta_url = "https://#{domain}/.well-known/host-meta"
- with {:ok, %{status: status, body: body}} when status in 200..299 <- HTTP.get(meta_url) do
+ with {:ok, %{status: status, body: body}} when status in 200..299 <-
+ HTTP.Backoff.get(meta_url) do
get_template_from_xml(body)
else
error ->
@@ -169,7 +179,7 @@ def find_lrdd_template(domain) do
end
end
- defp get_address_from_domain(domain, encoded_account) when is_binary(domain) do
+ defp get_address_from_domain(domain, "acct:" <> _ = encoded_account) when is_binary(domain) do
case find_lrdd_template(domain) do
{:ok, template} ->
String.replace(template, "{uri}", encoded_account)
@@ -179,6 +189,11 @@ defp get_address_from_domain(domain, encoded_account) when is_binary(domain) do
end
end
+ defp get_address_from_domain(domain, account) when is_binary(domain) do
+ encoded_account = URI.encode("acct:#{account}")
+ get_address_from_domain(domain, encoded_account)
+ end
+
defp get_address_from_domain(_, _), do: {:error, :webfinger_no_domain}
@spec finger(String.t()) :: {:ok, map()} | {:error, any()}
@@ -193,11 +208,9 @@ def finger(account) do
URI.parse(account).host
end
- encoded_account = URI.encode("acct:#{account}")
-
- with address when is_binary(address) <- get_address_from_domain(domain, encoded_account),
+ with address when is_binary(address) <- get_address_from_domain(domain, account),
{:ok, %{status: status, body: body, headers: headers}} when status in 200..299 <-
- HTTP.get(
+ HTTP.Backoff.get(
address,
[{"accept", "application/xrd+xml,application/jrd+json"}]
) do
@@ -217,10 +230,28 @@ def finger(account) do
_ ->
{:error, {:content_type, nil}}
end
+ |> case do
+ {:ok, data} -> validate_webfinger(address, data)
+ error -> error
+ end
else
error ->
Logger.debug("Couldn't finger #{account}: #{inspect(error)}")
error
end
end
+
+ defp validate_webfinger(request_url, %{"subject" => "acct:" <> acct = subject} = data) do
+ with [_name, acct_host] <- String.split(acct, "@"),
+ {_, url} <- {:address, get_address_from_domain(acct_host, subject)},
+ %URI{host: request_host} <- URI.parse(request_url),
+ %URI{host: acct_host} <- URI.parse(url),
+ {_, true} <- {:hosts_match, acct_host == request_host} do
+ {:ok, data}
+ else
+ _ -> {:error, {:webfinger_invalid, request_url, data}}
+ end
+ end
+
+ defp validate_webfinger(url, data), do: {:error, {:webfinger_invalid, url, data}}
end
diff --git a/mix.exs b/mix.exs
index 4d58821bc..872174174 100644
--- a/mix.exs
+++ b/mix.exs
@@ -4,7 +4,7 @@ defmodule Pleroma.Mixfile do
def project do
[
app: :pleroma,
- version: version("3.13.1"),
+ version: version("3.13.2"),
elixir: "~> 1.14",
elixirc_paths: elixirc_paths(Mix.env()),
compilers: Mix.compilers(),
diff --git a/priv/repo/migrations/20220220135625_upload_filter_exiftool_to_exiftool_strip_location.exs b/priv/repo/migrations/20220220135625_upload_filter_exiftool_to_exiftool_strip_location.exs
index 0d68a0787..86e02df01 100644
--- a/priv/repo/migrations/20220220135625_upload_filter_exiftool_to_exiftool_strip_location.exs
+++ b/priv/repo/migrations/20220220135625_upload_filter_exiftool_to_exiftool_strip_location.exs
@@ -1,37 +1,10 @@
defmodule Pleroma.Repo.Migrations.UploadFilterExiftoolToExiftoolStripMetadata do
use Ecto.Migration
- alias Pleroma.ConfigDB
-
- def up,
- do:
- ConfigDB.get_by_params(%{group: :pleroma, key: Pleroma.Upload})
- |> update_filtername(
- Pleroma.Upload.Filter.Exiftool,
- Pleroma.Upload.Filter.Exiftool.StripMetadata
- )
-
- def down,
- do:
- ConfigDB.get_by_params(%{group: :pleroma, key: Pleroma.Upload})
- |> update_filtername(
- Pleroma.Upload.Filter.Exiftool.StripMetadata,
- Pleroma.Upload.Filter.Exiftool
- )
-
- defp update_filtername(%{value: value}, from_filtername, to_filtername) do
- new_value =
- value
- |> Keyword.update(:filters, [], fn filters ->
- filters
- |> Enum.map(fn
- ^from_filtername -> to_filtername
- filter -> filter
- end)
- end)
-
- ConfigDB.update_or_create(%{group: :pleroma, key: Pleroma.Upload, value: new_value})
- end
-
- defp update_filtername(_, _, _), do: nil
+ # 20240425120000_upload_filter_exiftool_to_exiftool_strip_location.exs
+ # was originally committed with the id used in this file, but this breaks
+ # rollback order. Thus it was moved to 20240425120000 and this stub just prevents
+ # errors during large-scale rollbacks for anyone who already applied the old id
+ def up, do: :ok
+ def down, do: :ok
end
diff --git a/priv/repo/migrations/20240425120000_upload_filter_exiftool_to_exiftool_strip_location_real.exs b/priv/repo/migrations/20240425120000_upload_filter_exiftool_to_exiftool_strip_location_real.exs
new file mode 100644
index 000000000..5d2b880db
--- /dev/null
+++ b/priv/repo/migrations/20240425120000_upload_filter_exiftool_to_exiftool_strip_location_real.exs
@@ -0,0 +1,37 @@
+defmodule Pleroma.Repo.Migrations.UploadFilterExiftoolToExiftoolStripMetadataReal do
+ use Ecto.Migration
+
+ alias Pleroma.ConfigDB
+
+ def up,
+ do:
+ ConfigDB.get_by_params(%{group: :pleroma, key: Pleroma.Upload})
+ |> update_filtername(
+ Pleroma.Upload.Filter.Exiftool,
+ Pleroma.Upload.Filter.Exiftool.StripMetadata
+ )
+
+ def down,
+ do:
+ ConfigDB.get_by_params(%{group: :pleroma, key: Pleroma.Upload})
+ |> update_filtername(
+ Pleroma.Upload.Filter.Exiftool.StripMetadata,
+ Pleroma.Upload.Filter.Exiftool
+ )
+
+ defp update_filtername(%{value: value}, from_filtername, to_filtername) do
+ new_value =
+ value
+ |> Keyword.update(:filters, [], fn filters ->
+ filters
+ |> Enum.map(fn
+ ^from_filtername -> to_filtername
+ filter -> filter
+ end)
+ end)
+
+ ConfigDB.update_or_create(%{group: :pleroma, key: Pleroma.Upload, value: new_value})
+ end
+
+ defp update_filtername(_, _, _), do: nil
+end
diff --git a/test/fixtures/tesla_mock/bad.com_host_meta b/test/fixtures/tesla_mock/bad.com_host_meta
new file mode 100644
index 000000000..14cf3fa24
--- /dev/null
+++ b/test/fixtures/tesla_mock/bad.com_host_meta
@@ -0,0 +1,3 @@
+
+
+
diff --git a/test/fixtures/tesla_mock/webfinger_spoof.json b/test/fixtures/tesla_mock/webfinger_spoof.json
new file mode 100644
index 000000000..ebe2958ce
--- /dev/null
+++ b/test/fixtures/tesla_mock/webfinger_spoof.json
@@ -0,0 +1,28 @@
+{
+ "aliases": [
+ "https://bad.com/users/meanie",
+ "https://anotherbad.social/users/meanie"
+ ],
+ "links": [
+ {
+ "href": "https://bad.com/users/meanie",
+ "rel": "http://webfinger.net/rel/profile-page",
+ "type": "text/html"
+ },
+ {
+ "href": "https://bad.com/users/meanie",
+ "rel": "self",
+ "type": "application/activity+json"
+ },
+ {
+ "href": "https://bad.com/users/meanie",
+ "rel": "self",
+ "type": "application/ld+json; profile=\"https://www.w3.org/ns/activitystreams\""
+ },
+ {
+ "rel": "http://ostatus.org/schema/1.0/subscribe",
+ "template": "https://bad.com/ostatus_subscribe?acct={uri}"
+ }
+ ],
+ "subject": "acct:oopsie@notwhereitshouldbe.org"
+}
diff --git a/test/fixtures/webfinger/imposter-webfinger.json b/test/fixtures/webfinger/imposter-webfinger.json
new file mode 100644
index 000000000..e3d21a083
--- /dev/null
+++ b/test/fixtures/webfinger/imposter-webfinger.json
@@ -0,0 +1,41 @@
+{
+ "subject": "acct:oopsie@notwhereitshouldbe.com",
+ "aliases": [
+ "https://bad.com/webfingertest"
+ ],
+ "links": [
+ {
+ "rel": "http://webfinger.net/rel/profile-page",
+ "type": "text/html",
+ "href": "https://bad.com/webfingertest"
+ },
+ {
+ "rel": "self",
+ "type": "application/activity+json",
+ "href": "https://bad.com/webfingertest"
+ },
+ {
+ "rel": "http://ostatus.org/schema/1.0/subscribe",
+ "template": "https://bad.com/contact/follow?url={uri}"
+ },
+ {
+ "rel": "http://schemas.google.com/g/2010#updates-from",
+ "type": "application/atom+xml",
+ "href": ""
+ },
+ {
+ "rel": "salmon",
+ "href": "https://bad.com/salmon/friendica"
+ },
+ {
+ "rel": "http://microformats.org/profile/hcard",
+ "type": "text/html",
+ "href": "https://bad.com/hcard/friendica"
+ },
+ {
+ "rel": "http://joindiaspora.com/seed_location",
+ "type": "text/html",
+ "href": "https://bad.com"
+ }
+ ]
+}
diff --git a/test/pleroma/http/backoff_test.exs b/test/pleroma/http/backoff_test.exs
new file mode 100644
index 000000000..f1b27f5b5
--- /dev/null
+++ b/test/pleroma/http/backoff_test.exs
@@ -0,0 +1,105 @@
+defmodule Pleroma.HTTP.BackoffTest do
+ @backoff_cache :http_backoff_cache
+ use Pleroma.DataCase, async: false
+ alias Pleroma.HTTP.Backoff
+
+ defp within_tolerance?(ttl, expected) do
+ ttl > expected - 10 and ttl < expected + 10
+ end
+
+ describe "get/3" do
+ test "should return {:ok, env} when not rate limited" do
+ Tesla.Mock.mock_global(fn
+ %Tesla.Env{url: "https://akkoma.dev/api/v1/instance"} ->
+ {:ok, %Tesla.Env{status: 200, body: "ok"}}
+ end)
+
+ assert {:ok, env} = Backoff.get("https://akkoma.dev/api/v1/instance")
+ assert env.status == 200
+ end
+
+ test "should return {:error, env} when rate limited" do
+ # Shove a value into the cache to simulate a rate limit
+ Cachex.put(@backoff_cache, "akkoma.dev", true)
+ assert {:error, :ratelimit} = Backoff.get("https://akkoma.dev/api/v1/instance")
+ end
+
+ test "should insert a value into the cache when rate limited" do
+ Tesla.Mock.mock_global(fn
+ %Tesla.Env{url: "https://ratelimited.dev/api/v1/instance"} ->
+ {:ok, %Tesla.Env{status: 429, body: "Rate limited"}}
+ end)
+
+ assert {:error, :ratelimit} = Backoff.get("https://ratelimited.dev/api/v1/instance")
+ assert {:ok, true} = Cachex.get(@backoff_cache, "ratelimited.dev")
+ end
+
+ test "should insert a value into the cache when rate limited with a 503 response" do
+ Tesla.Mock.mock_global(fn
+ %Tesla.Env{url: "https://ratelimited.dev/api/v1/instance"} ->
+ {:ok, %Tesla.Env{status: 503, body: "Rate limited"}}
+ end)
+
+ assert {:error, :ratelimit} = Backoff.get("https://ratelimited.dev/api/v1/instance")
+ assert {:ok, true} = Cachex.get(@backoff_cache, "ratelimited.dev")
+ end
+
+ test "should parse the value of x-ratelimit-reset, if present" do
+ ten_minutes_from_now =
+ DateTime.utc_now() |> Timex.shift(minutes: 10) |> DateTime.to_iso8601()
+
+ Tesla.Mock.mock_global(fn
+ %Tesla.Env{url: "https://ratelimited.dev/api/v1/instance"} ->
+ {:ok,
+ %Tesla.Env{
+ status: 429,
+ body: "Rate limited",
+ headers: [{"x-ratelimit-reset", ten_minutes_from_now}]
+ }}
+ end)
+
+ assert {:error, :ratelimit} = Backoff.get("https://ratelimited.dev/api/v1/instance")
+ assert {:ok, true} = Cachex.get(@backoff_cache, "ratelimited.dev")
+ {:ok, ttl} = Cachex.ttl(@backoff_cache, "ratelimited.dev")
+ assert within_tolerance?(ttl, 600)
+ end
+
+ test "should parse the value of retry-after when it's a timestamp" do
+ ten_minutes_from_now =
+ DateTime.utc_now() |> Timex.shift(minutes: 10) |> DateTime.to_iso8601()
+
+ Tesla.Mock.mock_global(fn
+ %Tesla.Env{url: "https://ratelimited.dev/api/v1/instance"} ->
+ {:ok,
+ %Tesla.Env{
+ status: 429,
+ body: "Rate limited",
+ headers: [{"retry-after", ten_minutes_from_now}]
+ }}
+ end)
+
+ assert {:error, :ratelimit} = Backoff.get("https://ratelimited.dev/api/v1/instance")
+ assert {:ok, true} = Cachex.get(@backoff_cache, "ratelimited.dev")
+ {:ok, ttl} = Cachex.ttl(@backoff_cache, "ratelimited.dev")
+ assert within_tolerance?(ttl, 600)
+ end
+
+ test "should parse the value of retry-after when it's a number of seconds" do
+ Tesla.Mock.mock_global(fn
+ %Tesla.Env{url: "https://ratelimited.dev/api/v1/instance"} ->
+ {:ok,
+ %Tesla.Env{
+ status: 429,
+ body: "Rate limited",
+ headers: [{"retry-after", "600"}]
+ }}
+ end)
+
+ assert {:error, :ratelimit} = Backoff.get("https://ratelimited.dev/api/v1/instance")
+ assert {:ok, true} = Cachex.get(@backoff_cache, "ratelimited.dev")
+ # assert that the value is 10 minutes from now
+ {:ok, ttl} = Cachex.ttl(@backoff_cache, "ratelimited.dev")
+ assert within_tolerance?(ttl, 600)
+ end
+ end
+end
diff --git a/test/pleroma/signature_test.exs b/test/pleroma/signature_test.exs
index e2d02fe4c..b7c9bbb62 100644
--- a/test/pleroma/signature_test.exs
+++ b/test/pleroma/signature_test.exs
@@ -153,6 +153,11 @@ test "it deduces the actor ID for streams" do
{:ok, "https://example.com/users/1234"}
end
+ test "it deduces the actor ID for bridgy" do
+ assert Signature.key_id_to_actor_id("https://example.com/1234#key") ==
+ {:ok, "https://example.com/1234"}
+ end
+
test "it calls webfinger for 'acct:' accounts" do
with_mock(Pleroma.Web.WebFinger,
finger: fn _ -> {:ok, %{"ap_id" => "https://gensokyo.2hu/users/raymoo"}} end
diff --git a/test/pleroma/user_test.exs b/test/pleroma/user_test.exs
index 96ca8d0fd..de71f4b95 100644
--- a/test/pleroma/user_test.exs
+++ b/test/pleroma/user_test.exs
@@ -765,109 +765,19 @@ test "gets an existing user by nickname starting with http" do
setup do: clear_config([Pleroma.Web.WebFinger, :update_nickname_on_user_fetch], true)
test "for mastodon" do
- Tesla.Mock.mock(fn
- %{url: "https://example.com/.well-known/host-meta"} ->
- %Tesla.Env{
- status: 302,
- headers: [{"location", "https://sub.example.com/.well-known/host-meta"}]
- }
-
- %{url: "https://sub.example.com/.well-known/host-meta"} ->
- %Tesla.Env{
- status: 200,
- body:
- "test/fixtures/webfinger/masto-host-meta.xml"
- |> File.read!()
- |> String.replace("{{domain}}", "sub.example.com")
- }
-
- %{url: "https://sub.example.com/.well-known/webfinger?resource=acct:a@example.com"} ->
- %Tesla.Env{
- status: 200,
- body:
- "test/fixtures/webfinger/masto-webfinger.json"
- |> File.read!()
- |> String.replace("{{nickname}}", "a")
- |> String.replace("{{domain}}", "example.com")
- |> String.replace("{{subdomain}}", "sub.example.com"),
- headers: [{"content-type", "application/jrd+json"}]
- }
-
- %{url: "https://sub.example.com/users/a"} ->
- %Tesla.Env{
- status: 200,
- body:
- "test/fixtures/webfinger/masto-user.json"
- |> File.read!()
- |> String.replace("{{nickname}}", "a")
- |> String.replace("{{domain}}", "sub.example.com"),
- headers: [{"content-type", "application/activity+json"}]
- }
-
- %{url: "https://sub.example.com/users/a/collections/featured"} ->
- %Tesla.Env{
- status: 200,
- body:
- File.read!("test/fixtures/users_mock/masto_featured.json")
- |> String.replace("{{domain}}", "sub.example.com")
- |> String.replace("{{nickname}}", "a"),
- headers: [{"content-type", "application/activity+json"}]
- }
- end)
-
- ap_id = "a@example.com"
+ ap_id = "a@mastodon.example"
{:ok, fetched_user} = User.get_or_fetch(ap_id)
- assert fetched_user.ap_id == "https://sub.example.com/users/a"
- assert fetched_user.nickname == "a@example.com"
+ assert fetched_user.ap_id == "https://sub.mastodon.example/users/a"
+ assert fetched_user.nickname == "a@mastodon.example"
end
test "for pleroma" do
- Tesla.Mock.mock(fn
- %{url: "https://example.com/.well-known/host-meta"} ->
- %Tesla.Env{
- status: 302,
- headers: [{"location", "https://sub.example.com/.well-known/host-meta"}]
- }
-
- %{url: "https://sub.example.com/.well-known/host-meta"} ->
- %Tesla.Env{
- status: 200,
- body:
- "test/fixtures/webfinger/pleroma-host-meta.xml"
- |> File.read!()
- |> String.replace("{{domain}}", "sub.example.com")
- }
-
- %{url: "https://sub.example.com/.well-known/webfinger?resource=acct:a@example.com"} ->
- %Tesla.Env{
- status: 200,
- body:
- "test/fixtures/webfinger/pleroma-webfinger.json"
- |> File.read!()
- |> String.replace("{{nickname}}", "a")
- |> String.replace("{{domain}}", "example.com")
- |> String.replace("{{subdomain}}", "sub.example.com"),
- headers: [{"content-type", "application/jrd+json"}]
- }
-
- %{url: "https://sub.example.com/users/a"} ->
- %Tesla.Env{
- status: 200,
- body:
- "test/fixtures/webfinger/pleroma-user.json"
- |> File.read!()
- |> String.replace("{{nickname}}", "a")
- |> String.replace("{{domain}}", "sub.example.com"),
- headers: [{"content-type", "application/activity+json"}]
- }
- end)
-
- ap_id = "a@example.com"
+ ap_id = "a@pleroma.example"
{:ok, fetched_user} = User.get_or_fetch(ap_id)
- assert fetched_user.ap_id == "https://sub.example.com/users/a"
- assert fetched_user.nickname == "a@example.com"
+ assert fetched_user.ap_id == "https://sub.pleroma.example/users/a"
+ assert fetched_user.nickname == "a@pleroma.example"
end
end
diff --git a/test/pleroma/web/common_api/utils_test.exs b/test/pleroma/web/common_api/utils_test.exs
index f56d21c70..3d639d389 100644
--- a/test/pleroma/web/common_api/utils_test.exs
+++ b/test/pleroma/web/common_api/utils_test.exs
@@ -590,41 +590,22 @@ test "returns recipients when object not found" do
end
end
- describe "attachments_from_ids_descs/2" do
- test "returns [] when attachment ids is empty" do
- assert Utils.attachments_from_ids_descs([], "{}") == []
- end
-
- test "returns list attachments with desc" do
- object = insert(:note)
- desc = Jason.encode!(%{object.id => "test-desc"})
-
- assert Utils.attachments_from_ids_descs(["#{object.id}", "34"], desc) == [
- Map.merge(object.data, %{"name" => "test-desc"})
- ]
- end
- end
-
describe "attachments_from_ids/1" do
- test "returns attachments with descs" do
- object = insert(:note)
- desc = Jason.encode!(%{object.id => "test-desc"})
-
- assert Utils.attachments_from_ids(%{
- media_ids: ["#{object.id}"],
- descriptions: desc
- }) == [
- Map.merge(object.data, %{"name" => "test-desc"})
- ]
+ test "returns attachments without descs" do
+ user = insert(:user)
+ object = insert(:attachment, user: user)
+ assert Utils.attachments_from_ids(user, %{media_ids: ["#{object.id}"]}) == [object.data]
end
- test "returns attachments without descs" do
- object = insert(:note)
- assert Utils.attachments_from_ids(%{media_ids: ["#{object.id}"]}) == [object.data]
+ test "returns [] when passed non-media object ids" do
+ user = insert(:user)
+ object = insert(:note, user: user)
+ assert Utils.attachments_from_ids(user, %{media_ids: ["#{object.id}"]}) == []
end
test "returns [] when not pass media_ids" do
- assert Utils.attachments_from_ids(%{}) == []
+ user = insert(:user)
+ assert Utils.attachments_from_ids(user, %{}) == []
end
end
diff --git a/test/pleroma/web/mastodon_api/controllers/media_controller_test.exs b/test/pleroma/web/mastodon_api/controllers/media_controller_test.exs
index 7b5f5850d..a224f063b 100644
--- a/test/pleroma/web/mastodon_api/controllers/media_controller_test.exs
+++ b/test/pleroma/web/mastodon_api/controllers/media_controller_test.exs
@@ -6,6 +6,7 @@ defmodule Pleroma.Web.MastodonAPI.MediaControllerTest do
use Pleroma.Web.ConnCase, async: false
import ExUnit.CaptureLog
+ import Pleroma.Factory
alias Pleroma.Object
alias Pleroma.User
@@ -174,6 +175,18 @@ test "/api/v1/media/:id good request", %{conn: conn, object: object} do
assert media["description"] == "test-media"
assert refresh_record(object).data["name"] == "test-media"
end
+
+ test "won't update non-media", %{conn: conn, user: user} do
+ object = insert(:note, user: user)
+
+ response =
+ conn
+ |> put_req_header("content-type", "multipart/form-data")
+ |> put("/api/v1/media/#{object.id}", %{"description" => "test-media"})
+ |> json_response(404)
+
+ assert response == %{"error" => "Record not found"}
+ end
end
describe "Get media by id (/api/v1/media/:id)" do
@@ -207,6 +220,17 @@ test "it returns media object when requested by owner", %{conn: conn, object: ob
assert media["id"]
end
+ test "it returns 404 when requesting non-media object", %{conn: conn, user: user} do
+ object = insert(:note, user: user)
+
+ response =
+ conn
+ |> get("/api/v1/media/#{object.id}")
+ |> json_response(404)
+
+ assert response == %{"error" => "Record not found"}
+ end
+
test "it returns 403 if media object requested by non-owner", %{object: object, user: user} do
%{conn: conn, user: other_user} = oauth_access(["read:media"])
diff --git a/test/pleroma/web/mastodon_api/controllers/status_controller_test.exs b/test/pleroma/web/mastodon_api/controllers/status_controller_test.exs
index f58045640..a15fd42fc 100644
--- a/test/pleroma/web/mastodon_api/controllers/status_controller_test.exs
+++ b/test/pleroma/web/mastodon_api/controllers/status_controller_test.exs
@@ -220,6 +220,28 @@ test "posting an undefined status with an attachment", %{user: user, conn: conn}
assert json_response_and_validate_schema(conn, 200)
end
+ test "refuses to post non-owned media", %{conn: conn} do
+ other_user = insert(:user)
+
+ file = %Plug.Upload{
+ content_type: "image/jpeg",
+ path: Path.absname("test/fixtures/image.jpg"),
+ filename: "an_image.jpg"
+ }
+
+ {:ok, upload} = ActivityPub.upload(file, actor: other_user.ap_id)
+
+ conn =
+ conn
+ |> put_req_header("content-type", "application/json")
+ |> post("/api/v1/statuses", %{
+ "status" => "mew",
+ "media_ids" => [to_string(upload.id)]
+ })
+
+ assert json_response(conn, 422) == %{"error" => "forbidden"}
+ end
+
test "posting a status with an invalid language", %{conn: conn} do
conn =
conn
@@ -569,6 +591,29 @@ test "creates a scheduled activity with a media attachment", %{user: user, conn:
assert %{"type" => "image"} = media_attachment
end
+ test "refuses to schedule post with non-owned media", %{conn: conn} do
+ other_user = insert(:user)
+
+ file = %Plug.Upload{
+ content_type: "image/jpeg",
+ path: Path.absname("test/fixtures/image.jpg"),
+ filename: "an_image.jpg"
+ }
+
+ {:ok, upload} = ActivityPub.upload(file, actor: other_user.ap_id)
+
+ conn =
+ conn
+ |> put_req_header("content-type", "application/json")
+ |> post("/api/v1/statuses", %{
+ "status" => "mew",
+ "scheduled_at" => DateTime.add(DateTime.utc_now(), 6, :minute),
+ "media_ids" => [to_string(upload.id)]
+ })
+
+ assert json_response(conn, 403) == %{"error" => "Access denied"}
+ end
+
test "skips the scheduling and creates the activity if scheduled_at is earlier than 5 minutes from now",
%{conn: conn} do
scheduled_at =
@@ -2406,6 +2451,25 @@ test "it updates the attachments", %{conn: conn, user: user} do
assert [%{"id" => ^attachment_id}] = response["media_attachments"]
end
+ test "it does not update to non-owned attachments", %{conn: conn, user: user} do
+ other_user = insert(:user)
+ attachment = insert(:attachment, user: other_user)
+ attachment_id = to_string(attachment.id)
+
+ {:ok, activity} = CommonAPI.post(user, %{status: "mew mew #abc", spoiler_text: "#def"})
+
+ conn =
+ conn
+ |> put_req_header("content-type", "application/json")
+ |> put("/api/v1/statuses/#{activity.id}", %{
+ "status" => "mew mew #abc",
+ "spoiler_text" => "#def",
+ "media_ids" => [attachment_id]
+ })
+
+ assert json_response(conn, 400) == %{"error" => "internal_server_error"}
+ end
+
test "it does not update visibility", %{conn: conn, user: user} do
{:ok, activity} =
CommonAPI.post(user, %{
diff --git a/test/pleroma/web/mastodon_api/views/scheduled_activity_view_test.exs b/test/pleroma/web/mastodon_api/views/scheduled_activity_view_test.exs
index e323f3a1f..cc893f94e 100644
--- a/test/pleroma/web/mastodon_api/views/scheduled_activity_view_test.exs
+++ b/test/pleroma/web/mastodon_api/views/scheduled_activity_view_test.exs
@@ -47,8 +47,7 @@ test "A scheduled activity with a media attachment" do
expected = %{
id: to_string(scheduled_activity.id),
media_attachments:
- %{media_ids: [upload.id]}
- |> Utils.attachments_from_ids()
+ Utils.attachments_from_ids(user, %{media_ids: [upload.id]})
|> Enum.map(&StatusView.render("attachment.json", %{attachment: &1})),
params: %{
in_reply_to_id: to_string(activity.id),
diff --git a/test/pleroma/web/web_finger/web_finger_controller_test.exs b/test/pleroma/web/web_finger/web_finger_controller_test.exs
index f792f20e6..afb4ce92d 100644
--- a/test/pleroma/web/web_finger/web_finger_controller_test.exs
+++ b/test/pleroma/web/web_finger/web_finger_controller_test.exs
@@ -50,12 +50,7 @@ test "Webfinger JRD" do
]
end
- test "reach user on tld, while pleroma is runned on subdomain" do
- Pleroma.Web.Endpoint.config_change(
- [{Pleroma.Web.Endpoint, url: [host: "sub.example.com"]}],
- []
- )
-
+ test "reach user on tld, while pleroma is running on subdomain" do
clear_config([Pleroma.Web.Endpoint, :url, :host], "sub.example.com")
clear_config([Pleroma.Web.WebFinger, :domain], "example.com")
diff --git a/test/pleroma/web/web_finger_test.exs b/test/pleroma/web/web_finger_test.exs
index b47eabe80..2af084090 100644
--- a/test/pleroma/web/web_finger_test.exs
+++ b/test/pleroma/web/web_finger_test.exs
@@ -9,7 +9,7 @@ defmodule Pleroma.Web.WebFingerTest do
import Tesla.Mock
setup do
- mock(fn env -> apply(HttpRequestMock, :request, [env]) end)
+ mock_global(fn env -> apply(HttpRequestMock, :request, [env]) end)
:ok
end
@@ -76,15 +76,6 @@ test "returns the ActivityPub actor URI for an ActivityPub user" do
{:ok, _data} = WebFinger.finger(user)
end
- test "returns the ActivityPub actor URI and subscribe address for an ActivityPub user with the ld+json mimetype" do
- user = "kaniini@gerzilla.de"
-
- {:ok, data} = WebFinger.finger(user)
-
- assert data["ap_id"] == "https://gerzilla.de/channel/kaniini"
- assert data["subscribe_address"] == "https://gerzilla.de/follow?f=&url={uri}"
- end
-
test "it work for AP-only user" do
user = "kpherox@mstdn.jp"
@@ -99,12 +90,6 @@ test "it work for AP-only user" do
assert data["subscribe_address"] == "https://mstdn.jp/authorize_interaction?acct={uri}"
end
- test "it works for friendica" do
- user = "lain@squeet.me"
-
- {:ok, _data} = WebFinger.finger(user)
- end
-
test "it gets the xrd endpoint" do
{:ok, template} = WebFinger.find_lrdd_template("social.heldscal.la")
@@ -180,5 +165,44 @@ test "respects xml content-type" do
{:ok, _data} = WebFinger.finger("pekorino@pawoo.net")
end
+
+ test "prevents spoofing" do
+ Tesla.Mock.mock(fn
+ %{
+ url: "https://bad.com/.well-known/webfinger?resource=acct:meanie@bad.com"
+ } ->
+ {:ok,
+ %Tesla.Env{
+ status: 200,
+ body: File.read!("test/fixtures/tesla_mock/webfinger_spoof.json"),
+ headers: [{"content-type", "application/jrd+json"}]
+ }}
+
+ %{url: "https://bad.com/.well-known/host-meta"} ->
+ {:ok,
+ %Tesla.Env{
+ status: 200,
+ body: File.read!("test/fixtures/tesla_mock/bad.com_host_meta")
+ }}
+ end)
+
+ {:error, _data} = WebFinger.finger("meanie@bad.com")
+ end
+ end
+
+ @tag capture_log: true
+ test "prevents forgeries" do
+ Tesla.Mock.mock(fn
+ %{url: "https://bad.com/.well-known/webfinger?resource=acct:meanie@bad.com"} ->
+ fake_webfinger =
+ File.read!("test/fixtures/webfinger/imposter-webfinger.json") |> Jason.decode!()
+
+ Tesla.Mock.json(fake_webfinger)
+
+ %{url: "https://bad.com/.well-known/host-meta"} ->
+ {:ok, %Tesla.Env{status: 404}}
+ end)
+
+ assert {:error, {:webfinger_invalid, _, _}} = WebFinger.finger("meanie@bad.com")
end
end
diff --git a/test/support/http_request_mock.ex b/test/support/http_request_mock.ex
index 042e4110e..7bfcfaa0e 100644
--- a/test/support/http_request_mock.ex
+++ b/test/support/http_request_mock.ex
@@ -1464,6 +1464,177 @@ def get("https://p.helene.moe/objects/fd5910ac-d9dc-412e-8d1d-914b203296c4", _,
}}
end
+ def get("https://google.com/", _, _, _) do
+ {:ok, %Tesla.Env{status: 200, body: File.read!("test/fixtures/rich_media/google.html")}}
+ end
+
+ def get("https://yahoo.com/", _, _, _) do
+ {:ok, %Tesla.Env{status: 200, body: File.read!("test/fixtures/rich_media/yahoo.html")}}
+ end
+
+ def get("https://example.com/error", _, _, _), do: {:error, :overload}
+
+ def get("https://example.com/ogp-missing-title", _, _, _) do
+ {:ok,
+ %Tesla.Env{
+ status: 200,
+ body: File.read!("test/fixtures/rich_media/ogp-missing-title.html")
+ }}
+ end
+
+ def get("https://example.com/oembed", _, _, _) do
+ {:ok, %Tesla.Env{status: 200, body: File.read!("test/fixtures/rich_media/oembed.html")}}
+ end
+
+ def get("https://example.com/oembed.json", _, _, _) do
+ {:ok, %Tesla.Env{status: 200, body: File.read!("test/fixtures/rich_media/oembed.json")}}
+ end
+
+ def get("https://example.com/twitter-card", _, _, _) do
+ {:ok, %Tesla.Env{status: 200, body: File.read!("test/fixtures/rich_media/twitter_card.html")}}
+ end
+
+ def get("https://example.com/non-ogp", _, _, _) do
+ {:ok,
+ %Tesla.Env{status: 200, body: File.read!("test/fixtures/rich_media/non_ogp_embed.html")}}
+ end
+
+ def get("https://example.com/empty", _, _, _) do
+ {:ok, %Tesla.Env{status: 200, body: "hello"}}
+ end
+
+ def get("https://friends.grishka.me/posts/54642", _, _, _) do
+ {:ok,
+ %Tesla.Env{
+ status: 200,
+ body: File.read!("test/fixtures/tesla_mock/smithereen_non_anonymous_poll.json"),
+ headers: activitypub_object_headers()
+ }}
+ end
+
+ def get("https://friends.grishka.me/users/1", _, _, _) do
+ {:ok,
+ %Tesla.Env{
+ status: 200,
+ body: File.read!("test/fixtures/tesla_mock/smithereen_user.json"),
+ headers: activitypub_object_headers()
+ }}
+ end
+
+ def get("https://mastodon.example/.well-known/host-meta", _, _, _) do
+ {:ok,
+ %Tesla.Env{
+ status: 302,
+ headers: [{"location", "https://sub.mastodon.example/.well-known/host-meta"}]
+ }}
+ end
+
+ def get("https://sub.mastodon.example/.well-known/host-meta", _, _, _) do
+ {:ok,
+ %Tesla.Env{
+ status: 200,
+ body:
+ "test/fixtures/webfinger/masto-host-meta.xml"
+ |> File.read!()
+ |> String.replace("{{domain}}", "sub.mastodon.example")
+ }}
+ end
+
+ def get(
+ "https://sub.mastodon.example/.well-known/webfinger?resource=acct:a@mastodon.example",
+ _,
+ _,
+ _
+ ) do
+ {:ok,
+ %Tesla.Env{
+ status: 200,
+ body:
+ "test/fixtures/webfinger/masto-webfinger.json"
+ |> File.read!()
+ |> String.replace("{{nickname}}", "a")
+ |> String.replace("{{domain}}", "mastodon.example")
+ |> String.replace("{{subdomain}}", "sub.mastodon.example"),
+ headers: [{"content-type", "application/jrd+json"}]
+ }}
+ end
+
+ def get("https://sub.mastodon.example/users/a", _, _, _) do
+ {:ok,
+ %Tesla.Env{
+ status: 200,
+ body:
+ "test/fixtures/webfinger/masto-user.json"
+ |> File.read!()
+ |> String.replace("{{nickname}}", "a")
+ |> String.replace("{{domain}}", "sub.mastodon.example"),
+ headers: [{"content-type", "application/activity+json"}]
+ }}
+ end
+
+ def get("https://sub.mastodon.example/users/a/collections/featured", _, _, _) do
+ {:ok,
+ %Tesla.Env{
+ status: 200,
+ body:
+ File.read!("test/fixtures/users_mock/masto_featured.json")
+ |> String.replace("{{domain}}", "sub.mastodon.example")
+ |> String.replace("{{nickname}}", "a"),
+ headers: [{"content-type", "application/activity+json"}]
+ }}
+ end
+
+ def get("https://pleroma.example/.well-known/host-meta", _, _, _) do
+ {:ok,
+ %Tesla.Env{
+ status: 302,
+ headers: [{"location", "https://sub.pleroma.example/.well-known/host-meta"}]
+ }}
+ end
+
+ def get("https://sub.pleroma.example/.well-known/host-meta", _, _, _) do
+ {:ok,
+ %Tesla.Env{
+ status: 200,
+ body:
+ "test/fixtures/webfinger/pleroma-host-meta.xml"
+ |> File.read!()
+ |> String.replace("{{domain}}", "sub.pleroma.example")
+ }}
+ end
+
+ def get(
+ "https://sub.pleroma.example/.well-known/webfinger?resource=acct:a@pleroma.example",
+ _,
+ _,
+ _
+ ) do
+ {:ok,
+ %Tesla.Env{
+ status: 200,
+ body:
+ "test/fixtures/webfinger/pleroma-webfinger.json"
+ |> File.read!()
+ |> String.replace("{{nickname}}", "a")
+ |> String.replace("{{domain}}", "pleroma.example")
+ |> String.replace("{{subdomain}}", "sub.pleroma.example"),
+ headers: [{"content-type", "application/jrd+json"}]
+ }}
+ end
+
+ def get("https://sub.pleroma.example/users/a", _, _, _) do
+ {:ok,
+ %Tesla.Env{
+ status: 200,
+ body:
+ "test/fixtures/webfinger/pleroma-user.json"
+ |> File.read!()
+ |> String.replace("{{nickname}}", "a")
+ |> String.replace("{{domain}}", "sub.pleroma.example"),
+ headers: [{"content-type", "application/activity+json"}]
+ }}
+ end
+
def get(url, query, body, headers) do
{:error,
"Mock response not implemented for GET #{inspect(url)}, #{query}, #{inspect(body)}, #{inspect(headers)}"}