Merge remote-tracking branch 'oneric-sec/sec-2024-12' into develop
This commit is contained in:
commit
a47b02cb69
36 changed files with 1299 additions and 720 deletions
|
@ -10,6 +10,10 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
|
|||
- Oban (worker) dashboard at `/akkoma/oban`
|
||||
|
||||
## Fixed
|
||||
- fixed some holes in SigningKey verification potentially allowing they key-user mapping to be poisoned
|
||||
- frontend ZIP files can no longer traverse to paths outside their install dir
|
||||
- fixed user updates trying but failing to renew signing key information
|
||||
- fixed signing key refresh on key rotation
|
||||
|
||||
## Changed
|
||||
- Dropped obsolete `ap_enabled` indicator from user table and associated buggy logic
|
||||
|
@ -17,6 +21,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
|
|||
since the latter proved unreasonably costly to obtain for a merely nice-to-have statistic
|
||||
- Various other tweaks improving stat query performance and avoiding unecessary work on received AP documents
|
||||
- The HTML content for new posts (both Client-to-Server as well as Server-to-Server communication) will now use a different formatting to represent MFM. See [FEP-c16b](https://codeberg.org/fediverse/fep/src/branch/main/fep/c16b/fep-c16b.md) for more details.
|
||||
- HTTP signatures now test the most likely request-target alias first cutting down on overhead
|
||||
|
||||
## 2025.01.01
|
||||
|
||||
|
|
|
@ -54,9 +54,6 @@ If your platform is not supported, or you just want to be able to edit the sourc
|
|||
### Docker
|
||||
Docker installation is supported via [this setup](https://docs.akkoma.dev/stable/installation/docker_en/)
|
||||
|
||||
### Packages
|
||||
Akkoma is packaged for [YunoHost](https://yunohost.org) and can be found and installed from the [YunoHost app catalogue](https://yunohost.org/#/apps).
|
||||
|
||||
### Compilation Troubleshooting
|
||||
If you ever encounter compilation issues during the updating of Akkoma, you can try these commands and see if they fix things:
|
||||
|
||||
|
|
|
@ -166,10 +166,7 @@
|
|||
"application/xrd+xml" => ["xrd+xml"],
|
||||
"application/jrd+json" => ["jrd+json"],
|
||||
"application/activity+json" => ["activity+json"],
|
||||
"application/ld+json" => ["activity+json"],
|
||||
# Can be removed when bumping MIME past 2.0.5
|
||||
# see https://akkoma.dev/AkkomaGang/akkoma/issues/657
|
||||
"image/apng" => ["apng"]
|
||||
"application/ld+json" => ["activity+json"]
|
||||
}
|
||||
|
||||
config :mime, :extensions, %{
|
||||
|
@ -373,6 +370,7 @@
|
|||
note_replies_output_limit: 5,
|
||||
sign_object_fetches: true,
|
||||
authorized_fetch_mode: false,
|
||||
min_key_refetch_interval: 86_400,
|
||||
max_collection_objects: 50
|
||||
|
||||
config :pleroma, :streamer,
|
||||
|
|
|
@ -1,9 +0,0 @@
|
|||
# Installing on Yunohost
|
||||
|
||||
[YunoHost](https://yunohost.org) is a server operating system aimed at self-hosting. The YunoHost community maintains a package of Akkoma which allows you to install Akkoma on YunoHost. You can install it via the normal way through the admin web interface, or through the CLI. More information can be found at [the repo of the package](https://github.com/YunoHost-Apps/akkoma_ynh).
|
||||
|
||||
## Questions
|
||||
|
||||
Questions and problems related to the YunoHost parts can be done through the [YunoHost channels](https://yunohost.org/en/help).
|
||||
|
||||
For questions about Akkoma, check out the [Akkoma community channels](../../#community-channels).
|
|
@ -93,6 +93,7 @@ def run(["get-packs" | args]) do
|
|||
)
|
||||
|
||||
files = fetch_and_decode!(files_loc)
|
||||
files_to_unzip = for({_, f} <- files, do: f)
|
||||
|
||||
shell_info(IO.ANSI.format(["Unpacking ", :bright, pack_name]))
|
||||
|
||||
|
@ -103,17 +104,7 @@ def run(["get-packs" | args]) do
|
|||
pack_name
|
||||
])
|
||||
|
||||
files_to_unzip =
|
||||
Enum.map(
|
||||
files,
|
||||
fn {_, f} -> to_charlist(f) end
|
||||
)
|
||||
|
||||
{:ok, _} =
|
||||
:zip.unzip(binary_archive,
|
||||
cwd: to_charlist(pack_path),
|
||||
file_list: files_to_unzip
|
||||
)
|
||||
{:ok, _} = Pleroma.SafeZip.unzip_data(binary_archive, pack_path, files_to_unzip)
|
||||
|
||||
shell_info(IO.ANSI.format(["Writing pack.json for ", :bright, pack_name]))
|
||||
|
||||
|
@ -202,7 +193,7 @@ def run(["gen-pack" | args]) do
|
|||
|
||||
tmp_pack_dir = Path.join(System.tmp_dir!(), "emoji-pack-#{name}")
|
||||
|
||||
{:ok, _} = :zip.unzip(binary_archive, cwd: String.to_charlist(tmp_pack_dir))
|
||||
{:ok, _} = Pleroma.SafeZip.unzip_data(binary_archive, tmp_pack_dir)
|
||||
|
||||
emoji_map = Pleroma.Emoji.Loader.make_shortcode_to_file_map(tmp_pack_dir, exts)
|
||||
|
||||
|
|
|
@ -25,6 +25,7 @@ defmodule Pleroma.Emoji.Pack do
|
|||
alias Pleroma.Emoji
|
||||
alias Pleroma.Emoji.Pack
|
||||
alias Pleroma.Utils
|
||||
alias Pleroma.SafeZip
|
||||
|
||||
# Invalid/Malicious names are supposed to be filtered out before path joining,
|
||||
# but there are many entrypoints to affected functions so as the code changes
|
||||
|
@ -95,22 +96,20 @@ def delete(name) do
|
|||
end
|
||||
end
|
||||
|
||||
@spec unpack_zip_emojies(list(tuple())) :: list(map())
|
||||
defp unpack_zip_emojies(zip_files) do
|
||||
Enum.reduce(zip_files, [], fn
|
||||
{_, path, s, _, _, _}, acc when elem(s, 2) == :regular ->
|
||||
with(
|
||||
filename <- Path.basename(path),
|
||||
shortcode <- Path.basename(filename, Path.extname(filename)),
|
||||
false <- Emoji.exist?(shortcode)
|
||||
) do
|
||||
[%{path: path, filename: path, shortcode: shortcode} | acc]
|
||||
else
|
||||
_ -> acc
|
||||
end
|
||||
|
||||
_, acc ->
|
||||
acc
|
||||
@spec map_zip_emojis(list(String.t())) :: list(map())
|
||||
defp map_zip_emojis(zip_files) do
|
||||
Enum.reduce(zip_files, [], fn path, acc ->
|
||||
with(
|
||||
filename <- Path.basename(path),
|
||||
shortcode <- Path.basename(filename, Path.extname(filename)),
|
||||
# note: this only checks the shortcode, if an emoji already exists on the same path, but
|
||||
# with a different shortcode, the existing one will be degraded to an alias of the new
|
||||
false <- Emoji.exist?(shortcode)
|
||||
) do
|
||||
[%{path: path, filename: path, shortcode: shortcode} | acc]
|
||||
else
|
||||
_ -> acc
|
||||
end
|
||||
end)
|
||||
end
|
||||
|
||||
|
@ -118,18 +117,15 @@ defp unpack_zip_emojies(zip_files) do
|
|||
{:ok, t()}
|
||||
| {:error, File.posix() | atom()}
|
||||
def add_file(%Pack{} = pack, _, _, %Plug.Upload{content_type: "application/zip"} = file) do
|
||||
with {:ok, zip_files} <- :zip.table(to_charlist(file.path)),
|
||||
[_ | _] = emojies <- unpack_zip_emojies(zip_files),
|
||||
with {:ok, zip_files} <- SafeZip.list_dir_file(file.path),
|
||||
[_ | _] = emojis <- map_zip_emojis(zip_files),
|
||||
{:ok, tmp_dir} <- Utils.tmp_dir("emoji") do
|
||||
try do
|
||||
{:ok, _emoji_files} =
|
||||
:zip.unzip(
|
||||
to_charlist(file.path),
|
||||
[{:file_list, Enum.map(emojies, & &1[:path])}, {:cwd, to_charlist(tmp_dir)}]
|
||||
)
|
||||
SafeZip.unzip_file(file.path, tmp_dir, Enum.map(emojis, & &1[:path]))
|
||||
|
||||
{_, updated_pack} =
|
||||
Enum.map_reduce(emojies, pack, fn item, emoji_pack ->
|
||||
Enum.map_reduce(emojis, pack, fn item, emoji_pack ->
|
||||
emoji_file = %Plug.Upload{
|
||||
filename: item[:filename],
|
||||
path: path_join_safe(tmp_dir, item[:path])
|
||||
|
@ -446,16 +442,9 @@ defp downloadable?(pack) do
|
|||
end
|
||||
|
||||
defp create_archive_and_cache(pack, hash) do
|
||||
files = [
|
||||
~c"pack.json"
|
||||
| Enum.map(pack.files, fn {_, file} ->
|
||||
{:ok, file} = Path.safe_relative(file)
|
||||
to_charlist(file)
|
||||
end)
|
||||
]
|
||||
|
||||
{:ok, {_, result}} =
|
||||
:zip.zip(~c"#{pack.name}.zip", files, [:memory, cwd: to_charlist(pack.path)])
|
||||
pack_file_list = Enum.into(pack.files, [], fn {_, f} -> f end)
|
||||
files = ["pack.json" | pack_file_list]
|
||||
{:ok, {_, result}} = SafeZip.zip("#{pack.name}.zip", files, pack.path, true)
|
||||
|
||||
ttl_per_file = Pleroma.Config.get!([:emoji, :shared_pack_cache_seconds_per_file])
|
||||
overall_ttl = :timer.seconds(ttl_per_file * Enum.count(files))
|
||||
|
@ -626,11 +615,10 @@ defp copy_as(remote_pack, local_name) do
|
|||
|
||||
defp unzip(archive, pack_info, remote_pack, local_pack) do
|
||||
with :ok <- File.mkdir_p!(local_pack.path) do
|
||||
files = Enum.map(remote_pack["files"], fn {_, path} -> to_charlist(path) end)
|
||||
files = Enum.map(remote_pack["files"], fn {_, path} -> path end)
|
||||
# Fallback cannot contain a pack.json file
|
||||
files = if pack_info[:fallback], do: files, else: [~c"pack.json" | files]
|
||||
|
||||
:zip.unzip(archive, cwd: to_charlist(local_pack.path), file_list: files)
|
||||
files = if pack_info[:fallback], do: files, else: ["pack.json" | files]
|
||||
SafeZip.unzip_data(archive, local_pack.path, files)
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -693,13 +681,14 @@ defp update_sha_and_save_metadata(pack, data) do
|
|||
end
|
||||
|
||||
defp validate_has_all_files(pack, zip) do
|
||||
with {:ok, f_list} <- :zip.unzip(zip, [:memory]) do
|
||||
# Check if all files from the pack.json are in the archive
|
||||
pack.files
|
||||
|> Enum.all?(fn {_, from_manifest} ->
|
||||
List.keyfind(f_list, to_charlist(from_manifest), 0)
|
||||
# Check if all files from the pack.json are in the archive
|
||||
eset =
|
||||
Enum.reduce(pack.files, MapSet.new(), fn
|
||||
{_, file}, s -> MapSet.put(s, to_charlist(file))
|
||||
end)
|
||||
|> if(do: :ok, else: {:error, :incomplete})
|
||||
end
|
||||
|
||||
if SafeZip.contains_all_data?(zip, eset),
|
||||
do: :ok,
|
||||
else: {:error, :incomplete}
|
||||
end
|
||||
end
|
||||
|
|
|
@ -70,25 +70,12 @@ defp download_or_unzip(_frontend_info, temp_dir, file) do
|
|||
end
|
||||
|
||||
def unzip(zip, dest) do
|
||||
with {:ok, unzipped} <- :zip.unzip(zip, [:memory]) do
|
||||
File.rm_rf!(dest)
|
||||
File.mkdir_p!(dest)
|
||||
File.rm_rf!(dest)
|
||||
File.mkdir_p!(dest)
|
||||
|
||||
Enum.each(unzipped, fn {filename, data} ->
|
||||
path = filename
|
||||
|
||||
new_file_path = Path.join(dest, path)
|
||||
|
||||
new_file_path
|
||||
|> Path.dirname()
|
||||
|> File.rm()
|
||||
|
||||
new_file_path
|
||||
|> Path.dirname()
|
||||
|> File.mkdir_p!()
|
||||
|
||||
File.write!(new_file_path, data)
|
||||
end)
|
||||
case Pleroma.SafeZip.unzip_data(zip, dest) do
|
||||
{:ok, _} -> :ok
|
||||
error -> error
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
@ -216,9 +216,9 @@ def get_cached_by_ap_id(ap_id) do
|
|||
end
|
||||
|
||||
# Intentionally accepts non-Object arguments!
|
||||
@spec is_tombstone_object?(term()) :: boolean()
|
||||
def is_tombstone_object?(%Object{data: %{"type" => "Tombstone"}}), do: true
|
||||
def is_tombstone_object?(_), do: false
|
||||
@spec tombstone_object?(term()) :: boolean()
|
||||
def tombstone_object?(%Object{data: %{"type" => "Tombstone"}}), do: true
|
||||
def tombstone_object?(_), do: false
|
||||
|
||||
def make_tombstone(%Object{data: %{"id" => id, "type" => type}}, deleted \\ DateTime.utc_now()) do
|
||||
%ObjectTombstone{
|
||||
|
|
|
@ -157,4 +157,16 @@ def same_origin(id1, id2) do
|
|||
|
||||
compare_uris(uri1, uri2)
|
||||
end
|
||||
|
||||
@doc """
|
||||
Checks whether a key_id - owner_id pair are acceptable.
|
||||
|
||||
While in theory keys and actors on different domain could be verified
|
||||
by fetching both and checking the links on both ends (if different at all),
|
||||
this requires extra fetches and there are no known implementations with split
|
||||
actor and key domains, thus atm this simply requires same domain.
|
||||
"""
|
||||
def contain_key_user(key_id, user_id) do
|
||||
same_origin(key_id, user_id)
|
||||
end
|
||||
end
|
||||
|
|
|
@ -265,6 +265,64 @@ def fetch_and_contain_remote_object_from_id(%{"id" => id}, is_ap_id),
|
|||
def fetch_and_contain_remote_object_from_id(id, is_ap_id) when is_binary(id) do
|
||||
Logger.debug("Fetching object #{id} via AP [ap_id=#{is_ap_id}]")
|
||||
|
||||
fetch_and_contain_remote_ap_doc(
|
||||
id,
|
||||
is_ap_id,
|
||||
fn final_uri, data -> {Containment.contain_id_to_fetch(final_uri, data), data["id"]} end
|
||||
)
|
||||
end
|
||||
|
||||
def fetch_and_contain_remote_object_from_id(_id, _is_ap_id),
|
||||
do: {:error, :invalid_id}
|
||||
|
||||
def fetch_and_contain_remote_key(id) do
|
||||
Logger.debug("Fetching remote actor key #{id}")
|
||||
|
||||
fetch_and_contain_remote_ap_doc(
|
||||
id,
|
||||
# key IDs are alwas AP IDs which should resolve directly and exactly
|
||||
true,
|
||||
fn
|
||||
final_uri, %{"id" => user_id, "publicKey" => %{"id" => key_id}} ->
|
||||
# For non-fragment keys as used e.g. by GTS, the "id" won't match the fetch URI,
|
||||
# but the key ID will. Thus do NOT strict check the top-lelve id, but byte-exact
|
||||
# check the key ID (since for later lookups we need byte-exact matches).
|
||||
# This relies on fetching already enforcing a domain match for toplevel id and host
|
||||
with :ok <- Containment.contain_key_user(key_id, user_id),
|
||||
true <- key_id == final_uri do
|
||||
{:ok, key_id}
|
||||
else
|
||||
_ -> {:error, key_id}
|
||||
end
|
||||
|
||||
final_uri,
|
||||
%{"type" => "CryptographicKey", "id" => key_id, "owner" => user_id, "publicKeyPem" => _} ->
|
||||
# XXX: refactor into separate function isntead of duplicating
|
||||
with :ok <- Containment.contain_key_user(key_id, user_id),
|
||||
true <- key_id == final_uri do
|
||||
{:ok, key_id}
|
||||
else
|
||||
_ -> {:error, nil}
|
||||
end
|
||||
|
||||
_, _ ->
|
||||
{:error, nil}
|
||||
end
|
||||
)
|
||||
end
|
||||
|
||||
# Fetches an AP document and performing variable security checks on it.
|
||||
#
|
||||
# Note that the received documents "id" matching the final host domain
|
||||
# is always enforced before the custom ID check runs.
|
||||
@spec fetch_and_contain_remote_ap_doc(
|
||||
String.t(),
|
||||
boolean(),
|
||||
(String.t(), Map.t() -> {:ok | :error, String.t() | term()})
|
||||
) :: {:ok, Map.t()} | {:reject, term()} | {:error, term()}
|
||||
defp fetch_and_contain_remote_ap_doc(id, is_ap_id, verify_id) do
|
||||
Logger.debug("Dereferencing AP doc #{}")
|
||||
|
||||
with {:valid_uri_scheme, true} <- {:valid_uri_scheme, String.starts_with?(id, "http")},
|
||||
%URI{} = uri <- URI.parse(id),
|
||||
{:mrf_reject_check, {:ok, nil}} <-
|
||||
|
@ -277,7 +335,7 @@ def fetch_and_contain_remote_object_from_id(id, is_ap_id) when is_binary(id) do
|
|||
true <- !is_ap_id || final_id == id,
|
||||
{:ok, data} <- safe_json_decode(body),
|
||||
{_, :ok} <- {:containment, Containment.contain_origin(final_id, data)},
|
||||
{_, _, :ok} <- {:strict_id, data["id"], Containment.contain_id_to_fetch(final_id, data)} do
|
||||
{_, {:ok, _}} <- {:strict_id, verify_id.(final_id, data)} do
|
||||
unless Instances.reachable?(final_id) do
|
||||
Instances.set_reachable(final_id)
|
||||
end
|
||||
|
@ -289,14 +347,12 @@ def fetch_and_contain_remote_object_from_id(id, is_ap_id) when is_binary(id) do
|
|||
# Similarly keys, either use a fragment ID and are a subobjects or a distinct ID
|
||||
# but for compatibility are still a subobject presenting their owning actors ID at the toplevel.
|
||||
# Refetching _once_ from the listed id, should yield a strict match afterwards.
|
||||
{:strict_id, ap_id, _} = e ->
|
||||
case is_ap_id do
|
||||
false ->
|
||||
fetch_and_contain_remote_object_from_id(ap_id, true)
|
||||
|
||||
true ->
|
||||
log_fetch_error(id, e)
|
||||
{:error, :id_mismatch}
|
||||
{:strict_id, {_error, ap_id}} = e ->
|
||||
if !is_ap_id and is_binary(ap_id) do
|
||||
fetch_and_contain_remote_ap_doc(ap_id, true, verify_id)
|
||||
else
|
||||
log_fetch_error(id, e)
|
||||
{:error, :id_mismatch}
|
||||
end
|
||||
|
||||
{:mrf_reject_check, _} = e ->
|
||||
|
@ -327,9 +383,6 @@ def fetch_and_contain_remote_object_from_id(id, is_ap_id) when is_binary(id) do
|
|||
end
|
||||
end
|
||||
|
||||
def fetch_and_contain_remote_object_from_id(_id, _is_ap_id),
|
||||
do: {:error, :invalid_id}
|
||||
|
||||
# HOPEFULLY TEMPORARY
|
||||
# Basically none of our Tesla mocks in tests set the (supposed to
|
||||
# exist for Tesla proper) url parameter for their responses
|
||||
|
|
216
lib/pleroma/safe_zip.ex
Normal file
216
lib/pleroma/safe_zip.ex
Normal file
|
@ -0,0 +1,216 @@
|
|||
# Akkoma: Magically expressive social media
|
||||
# Copyright © 2024 Akkoma Authors <https://akkoma.dev/>
|
||||
# SPDX-License-Identifier: AGPL-3.0-only
|
||||
|
||||
defmodule Pleroma.SafeZip do
|
||||
@moduledoc """
|
||||
Wraps the subset of Erlang's zip module we’d like to use
|
||||
but enforces path-traversal safety everywhere and other checks.
|
||||
|
||||
For convenience almost all functions accept both elixir strings and charlists,
|
||||
but output elixir strings themselves. However, this means the input parameter type
|
||||
can no longer be used to distinguish archive file paths from archive binary data in memory,
|
||||
thus where needed both a _data and _file variant are provided.
|
||||
"""
|
||||
|
||||
@type text() :: String.t() | [char()]
|
||||
|
||||
defp is_safe_path?(path) do
|
||||
# Path accepts elixir’s chardata()
|
||||
case Path.safe_relative(path) do
|
||||
{:ok, _} -> true
|
||||
_ -> false
|
||||
end
|
||||
end
|
||||
|
||||
defp is_safe_type?(file_type) do
|
||||
if file_type in [:regular, :directory] do
|
||||
true
|
||||
else
|
||||
false
|
||||
end
|
||||
end
|
||||
|
||||
defp maybe_add_file(_type, _path_charlist, nil), do: nil
|
||||
|
||||
defp maybe_add_file(:regular, path_charlist, file_list),
|
||||
do: [to_string(path_charlist) | file_list]
|
||||
|
||||
defp maybe_add_file(_type, _path_charlist, file_list), do: file_list
|
||||
|
||||
@spec check_safe_archive_and_maybe_list_files(binary() | [char()], [term()], boolean()) ::
|
||||
{:ok, [String.t()]} | {:error, reason :: term()}
|
||||
defp check_safe_archive_and_maybe_list_files(archive, opts, list) do
|
||||
acc = if list, do: [], else: nil
|
||||
|
||||
with {:ok, table} <- :zip.table(archive, opts) do
|
||||
Enum.reduce_while(table, {:ok, acc}, fn
|
||||
# ZIP comment
|
||||
{:zip_comment, _}, acc ->
|
||||
{:cont, acc}
|
||||
|
||||
# File entry
|
||||
{:zip_file, path, info, _comment, _offset, _comp_size}, {:ok, fl} ->
|
||||
with {_, type} <- {:get_type, elem(info, 2)},
|
||||
{_, true} <- {:type, is_safe_type?(type)},
|
||||
{_, true} <- {:safe_path, is_safe_path?(path)} do
|
||||
{:cont, {:ok, maybe_add_file(type, path, fl)}}
|
||||
else
|
||||
{:get_type, e} ->
|
||||
{:halt,
|
||||
{:error, "Couldn't determine file type of ZIP entry at #{path} (#{inspect(e)})"}}
|
||||
|
||||
{:type, _} ->
|
||||
{:halt, {:error, "Potentially unsafe file type in ZIP at: #{path}"}}
|
||||
|
||||
{:safe_path, _} ->
|
||||
{:halt, {:error, "Unsafe path in ZIP: #{path}"}}
|
||||
end
|
||||
|
||||
# new OTP version?
|
||||
_, _acc ->
|
||||
{:halt, {:error, "Unknown ZIP record type"}}
|
||||
end)
|
||||
end
|
||||
end
|
||||
|
||||
@spec check_safe_archive_and_list_files(binary() | [char()], [term()]) ::
|
||||
{:ok, [String.t()]} | {:error, reason :: term()}
|
||||
defp check_safe_archive_and_list_files(archive, opts \\ []) do
|
||||
check_safe_archive_and_maybe_list_files(archive, opts, true)
|
||||
end
|
||||
|
||||
@spec check_safe_archive(binary() | [char()], [term()]) :: :ok | {:error, reason :: term()}
|
||||
defp check_safe_archive(archive, opts \\ []) do
|
||||
case check_safe_archive_and_maybe_list_files(archive, opts, false) do
|
||||
{:ok, _} -> :ok
|
||||
error -> error
|
||||
end
|
||||
end
|
||||
|
||||
@spec check_safe_file_list([text()], text()) :: :ok | {:error, term()}
|
||||
defp check_safe_file_list([], _), do: :ok
|
||||
|
||||
defp check_safe_file_list([path | tail], cwd) do
|
||||
with {_, true} <- {:path, is_safe_path?(path)},
|
||||
{_, {:ok, fstat}} <- {:stat, File.stat(Path.expand(path, cwd))},
|
||||
{_, true} <- {:type, is_safe_type?(fstat.type)} do
|
||||
check_safe_file_list(tail, cwd)
|
||||
else
|
||||
{:path, _} ->
|
||||
{:error, "Unsafe path escaping cwd: #{path}"}
|
||||
|
||||
{:stat, e} ->
|
||||
{:error, "Unable to check file type of #{path}: #{inspect(e)}"}
|
||||
|
||||
{:type, _} ->
|
||||
{:error, "Unsafe type at #{path}"}
|
||||
end
|
||||
end
|
||||
|
||||
defp check_safe_file_list(_, _), do: {:error, "Malformed file_list"}
|
||||
|
||||
@doc """
|
||||
Checks whether the archive data contais file entries for all paths from fset
|
||||
|
||||
Note this really only accepts entries corresponding to regular _files_,
|
||||
if a path is contained as for example an directory, this does not count as a match.
|
||||
"""
|
||||
@spec contains_all_data?(binary(), MapSet.t()) :: true | false
|
||||
def contains_all_data?(archive_data, fset) do
|
||||
with {:ok, table} <- :zip.table(archive_data) do
|
||||
remaining =
|
||||
Enum.reduce(table, fset, fn
|
||||
{:zip_file, path, info, _comment, _offset, _comp_size}, fset ->
|
||||
if elem(info, 2) == :regular do
|
||||
MapSet.delete(fset, path)
|
||||
else
|
||||
fset
|
||||
end
|
||||
|
||||
_, _ ->
|
||||
fset
|
||||
end)
|
||||
|> MapSet.size()
|
||||
|
||||
if remaining == 0, do: true, else: false
|
||||
else
|
||||
_ -> false
|
||||
end
|
||||
end
|
||||
|
||||
@doc """
|
||||
List all file entries in ZIP, or error if invalid or unsafe.
|
||||
|
||||
Note this really only lists regular files, no directories, ZIP comments or other types!
|
||||
"""
|
||||
@spec list_dir_file(text()) :: {:ok, [String.t()]} | {:error, reason :: term()}
|
||||
def list_dir_file(archive) do
|
||||
path = to_charlist(archive)
|
||||
check_safe_archive_and_list_files(path)
|
||||
end
|
||||
|
||||
defp stringify_zip({:ok, {fname, data}}), do: {:ok, {to_string(fname), data}}
|
||||
defp stringify_zip({:ok, fname}), do: {:ok, to_string(fname)}
|
||||
defp stringify_zip(ret), do: ret
|
||||
|
||||
@spec zip(text(), text(), [text()], boolean()) ::
|
||||
{:ok, file_name :: String.t()}
|
||||
| {:ok, {file_name :: String.t(), file_data :: binary()}}
|
||||
| {:error, reason :: term()}
|
||||
def zip(name, file_list, cwd, memory \\ false) do
|
||||
opts = [{:cwd, to_charlist(cwd)}]
|
||||
opts = if memory, do: [:memory | opts], else: opts
|
||||
|
||||
with :ok <- check_safe_file_list(file_list, cwd) do
|
||||
file_list = for f <- file_list, do: to_charlist(f)
|
||||
name = to_charlist(name)
|
||||
stringify_zip(:zip.zip(name, file_list, opts))
|
||||
end
|
||||
end
|
||||
|
||||
@spec unzip_file(text(), text(), [text()] | nil) ::
|
||||
{:ok, [String.t()]}
|
||||
| {:error, reason :: term()}
|
||||
| {:error, {name :: text(), reason :: term()}}
|
||||
def unzip_file(archive, target_dir, file_list \\ nil) do
|
||||
do_unzip(to_charlist(archive), to_charlist(target_dir), file_list)
|
||||
end
|
||||
|
||||
@spec unzip_data(binary(), text(), [text()] | nil) ::
|
||||
{:ok, [String.t()]}
|
||||
| {:error, reason :: term()}
|
||||
| {:error, {name :: text(), reason :: term()}}
|
||||
def unzip_data(archive, target_dir, file_list \\ nil) do
|
||||
do_unzip(archive, to_charlist(target_dir), file_list)
|
||||
end
|
||||
|
||||
defp stringify_unzip({:ok, [{_fname, _data} | _] = filebinlist}),
|
||||
do: {:ok, Enum.map(filebinlist, fn {fname, data} -> {to_string(fname), data} end)}
|
||||
|
||||
defp stringify_unzip({:ok, [_fname | _] = filelist}),
|
||||
do: {:ok, Enum.map(filelist, fn fname -> to_string(fname) end)}
|
||||
|
||||
defp stringify_unzip({:error, {fname, term}}), do: {:error, {to_string(fname), term}}
|
||||
defp stringify_unzip(ret), do: ret
|
||||
|
||||
@spec do_unzip(binary() | [char()], text(), [text()] | nil) ::
|
||||
{:ok, [String.t()]}
|
||||
| {:error, reason :: term()}
|
||||
| {:error, {name :: text(), reason :: term()}}
|
||||
defp do_unzip(archive, target_dir, file_list) do
|
||||
opts =
|
||||
if file_list != nil do
|
||||
[
|
||||
file_list: for(f <- file_list, do: to_charlist(f)),
|
||||
cwd: target_dir
|
||||
]
|
||||
else
|
||||
[cwd: target_dir]
|
||||
end
|
||||
|
||||
with :ok <- check_safe_archive(archive) do
|
||||
stringify_unzip(:zip.unzip(archive, opts))
|
||||
end
|
||||
end
|
||||
end
|
|
@ -9,37 +9,40 @@ defmodule Pleroma.Signature do
|
|||
alias Pleroma.User.SigningKey
|
||||
require Logger
|
||||
|
||||
def key_id_to_actor_id(key_id) do
|
||||
case SigningKey.key_id_to_ap_id(key_id) do
|
||||
nil ->
|
||||
# hm, we SHOULD have gotten this in the pipeline before we hit here!
|
||||
Logger.error("Could not figure out who owns the key #{key_id}")
|
||||
{:error, :key_owner_not_found}
|
||||
|
||||
key ->
|
||||
{:ok, key}
|
||||
end
|
||||
end
|
||||
|
||||
def fetch_public_key(conn) do
|
||||
with %{"keyId" => kid} <- HTTPSignatures.signature_for_conn(conn),
|
||||
{:ok, %SigningKey{}} <- SigningKey.get_or_fetch_by_key_id(kid),
|
||||
{:ok, actor_id} <- key_id_to_actor_id(kid),
|
||||
{:ok, public_key} <- User.get_public_key_for_ap_id(actor_id) do
|
||||
{:ok, public_key}
|
||||
with {_, %{"keyId" => kid}} <- {:keyid, HTTPSignatures.signature_for_conn(conn)},
|
||||
{_, {:ok, %SigningKey{} = sk}, _} <-
|
||||
{:fetch, SigningKey.get_or_fetch_by_key_id(kid), kid},
|
||||
{_, {:ok, decoded_key}} <- {:decode, SigningKey.public_key_decoded(sk)} do
|
||||
{:ok, decoded_key}
|
||||
else
|
||||
{:fetch, error, kid} ->
|
||||
Logger.error("Failed to acquire key from signature: #{kid} #{inspect(error)}")
|
||||
{:error, {:fetch, error}}
|
||||
|
||||
e ->
|
||||
{:error, e}
|
||||
end
|
||||
end
|
||||
|
||||
def refetch_public_key(conn) do
|
||||
with %{"keyId" => kid} <- HTTPSignatures.signature_for_conn(conn),
|
||||
{:ok, %SigningKey{}} <- SigningKey.get_or_fetch_by_key_id(kid),
|
||||
{:ok, actor_id} <- key_id_to_actor_id(kid),
|
||||
{:ok, public_key} <- User.get_public_key_for_ap_id(actor_id) do
|
||||
{:ok, public_key}
|
||||
with {_, %{"keyId" => kid}} <- {:keyid, HTTPSignatures.signature_for_conn(conn)},
|
||||
{_, {:ok, %SigningKey{} = sk}, _} <- {:fetch, SigningKey.refresh_by_key_id(kid), kid},
|
||||
{_, {:ok, decoded_key}} <- {:decode, SigningKey.public_key_decoded(sk)} do
|
||||
{:ok, decoded_key}
|
||||
else
|
||||
{:fetch, {:error, :too_young}, kid} ->
|
||||
Logger.debug("Refusing to refetch recently updated key: #{kid}")
|
||||
{:error, {:fetch, :too_young}}
|
||||
|
||||
{:fetch, {:error, :unknown}, kid} ->
|
||||
Logger.warning("Attempted to refresh unknown key; this should not happen: #{kid}")
|
||||
{:error, {:fetch, :unknown}}
|
||||
|
||||
{:fetch, error, kid} ->
|
||||
Logger.error("Failed to refresh stale key from signature: #{kid} #{inspect(error)}")
|
||||
{:error, {:fetch, error}}
|
||||
|
||||
e ->
|
||||
{:error, e}
|
||||
end
|
||||
|
|
|
@ -100,7 +100,6 @@ defmodule Pleroma.User do
|
|||
field(:password_hash, :string)
|
||||
field(:password, :string, virtual: true)
|
||||
field(:password_confirmation, :string, virtual: true)
|
||||
field(:keys, :string)
|
||||
field(:ap_id, :string)
|
||||
field(:avatar, :map, default: %{})
|
||||
field(:local, :boolean, default: true)
|
||||
|
@ -222,7 +221,9 @@ defmodule Pleroma.User do
|
|||
|
||||
# FOR THE FUTURE: We might want to make this a one-to-many relationship
|
||||
# it's entirely possible right now, but we don't have a use case for it
|
||||
has_one(:signing_key, SigningKey, foreign_key: :user_id)
|
||||
# XXX: in the future we’d also like to parse and honour key expiration times
|
||||
# instead of blindly accepting any change in signing keys
|
||||
has_one(:signing_key, SigningKey, foreign_key: :user_id, on_replace: :update)
|
||||
|
||||
timestamps()
|
||||
end
|
||||
|
@ -2067,17 +2068,6 @@ defp create_service_actor(uri, nickname) do
|
|||
|
||||
defdelegate public_key(user), to: SigningKey
|
||||
|
||||
def get_public_key_for_ap_id(ap_id) do
|
||||
with {:ok, %User{} = user} <- get_or_fetch_by_ap_id(ap_id),
|
||||
{:ok, public_key} <- SigningKey.public_key(user) do
|
||||
{:ok, public_key}
|
||||
else
|
||||
e ->
|
||||
Logger.error("Could not get public key for #{ap_id}.\n#{inspect(e)}")
|
||||
{:error, e}
|
||||
end
|
||||
end
|
||||
|
||||
@doc "Gets or fetch a user by uri or nickname."
|
||||
@spec get_or_fetch(String.t()) :: {:ok, User.t()} | {:error, String.t()}
|
||||
def get_or_fetch("http://" <> _host = uri), do: get_or_fetch_by_ap_id(uri)
|
||||
|
|
|
@ -119,7 +119,7 @@ def process(%__MODULE__{} = backup) do
|
|||
end
|
||||
end
|
||||
|
||||
@files [~c"actor.json", ~c"outbox.json", ~c"likes.json", ~c"bookmarks.json"]
|
||||
@files ["actor.json", "outbox.json", "likes.json", "bookmarks.json"]
|
||||
def export(%__MODULE__{} = backup) do
|
||||
backup = Repo.preload(backup, :user)
|
||||
name = String.trim_trailing(backup.file_name, ".zip")
|
||||
|
@ -130,10 +130,9 @@ def export(%__MODULE__{} = backup) do
|
|||
:ok <- statuses(dir, backup.user),
|
||||
:ok <- likes(dir, backup.user),
|
||||
:ok <- bookmarks(dir, backup.user),
|
||||
{:ok, zip_path} <-
|
||||
:zip.create(String.to_charlist(dir <> ".zip"), @files, cwd: String.to_charlist(dir)),
|
||||
{:ok, zip_path} <- Pleroma.SafeZip.zip(dir <> ".zip", @files, dir),
|
||||
{:ok, _} <- File.rm_rf(dir) do
|
||||
{:ok, to_string(zip_path)}
|
||||
{:ok, zip_path}
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
@ -56,16 +56,10 @@ def key_id_to_user_id(key_id) do
|
|||
def key_id_to_ap_id(key_id) do
|
||||
Logger.debug("Looking up key ID: #{key_id}")
|
||||
|
||||
result =
|
||||
from(sk in __MODULE__, where: sk.key_id == ^key_id)
|
||||
|> join(:inner, [sk], u in User, on: sk.user_id == u.id)
|
||||
|> select([sk, u], %{user: u})
|
||||
|> Repo.one()
|
||||
|
||||
case result do
|
||||
%{user: %User{ap_id: ap_id}} -> ap_id
|
||||
_ -> nil
|
||||
end
|
||||
from(sk in __MODULE__, where: sk.key_id == ^key_id)
|
||||
|> join(:inner, [sk], u in User, on: sk.user_id == u.id)
|
||||
|> select([sk, u], u.ap_id)
|
||||
|> Repo.one()
|
||||
end
|
||||
|
||||
@spec generate_rsa_pem() :: {:ok, binary()}
|
||||
|
@ -115,24 +109,18 @@ def private_pem_to_public_pem(private_pem) do
|
|||
{:ok, :public_key.pem_encode([public_key])}
|
||||
end
|
||||
|
||||
@spec public_key(User.t()) :: {:ok, binary()} | {:error, String.t()}
|
||||
@spec public_key(__MODULE__) :: {:ok, binary()} | {:error, String.t()}
|
||||
@doc """
|
||||
Given a user, return the public key for that user in binary format.
|
||||
Return public key data in binary format.
|
||||
"""
|
||||
def public_key(%User{} = user) do
|
||||
case Repo.preload(user, :signing_key) do
|
||||
%User{signing_key: %__MODULE__{public_key: public_key_pem}} ->
|
||||
key =
|
||||
public_key_pem
|
||||
|> :public_key.pem_decode()
|
||||
|> hd()
|
||||
|> :public_key.pem_entry_decode()
|
||||
def public_key_decoded(%__MODULE__{public_key: public_key_pem}) do
|
||||
decoded =
|
||||
public_key_pem
|
||||
|> :public_key.pem_decode()
|
||||
|> hd()
|
||||
|> :public_key.pem_entry_decode()
|
||||
|
||||
{:ok, key}
|
||||
|
||||
_ ->
|
||||
{:error, "key not found"}
|
||||
end
|
||||
{:ok, decoded}
|
||||
end
|
||||
|
||||
def public_key(_), do: {:error, "key not found"}
|
||||
|
@ -174,12 +162,12 @@ def private_key(%User{} = user) do
|
|||
Will either return the key if it exists locally, or fetch it from the remote instance.
|
||||
"""
|
||||
def get_or_fetch_by_key_id(key_id) do
|
||||
case key_id_to_user_id(key_id) do
|
||||
case Repo.get_by(__MODULE__, key_id: key_id) do
|
||||
nil ->
|
||||
fetch_remote_key(key_id)
|
||||
|
||||
user_id ->
|
||||
{:ok, Repo.get_by(__MODULE__, user_id: user_id)}
|
||||
key ->
|
||||
{:ok, key}
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -195,20 +183,28 @@ def get_or_fetch_by_key_id(key_id) do
|
|||
def fetch_remote_key(key_id) do
|
||||
Logger.debug("Fetching remote key: #{key_id}")
|
||||
|
||||
with {:ok, _body} = resp <-
|
||||
Pleroma.Object.Fetcher.fetch_and_contain_remote_object_from_id(key_id),
|
||||
{:ok, ap_id, public_key_pem} <- handle_signature_response(resp) do
|
||||
with {:ok, resp_body} <-
|
||||
Pleroma.Object.Fetcher.fetch_and_contain_remote_key(key_id),
|
||||
{:ok, ap_id, public_key_pem} <- handle_signature_response(resp_body),
|
||||
{:ok, user} <- User.get_or_fetch_by_ap_id(ap_id) do
|
||||
Logger.debug("Fetched remote key: #{ap_id}")
|
||||
# fetch the user
|
||||
{:ok, user} = User.get_or_fetch_by_ap_id(ap_id)
|
||||
# store the key
|
||||
key = %__MODULE__{
|
||||
key = %{
|
||||
user_id: user.id,
|
||||
public_key: public_key_pem,
|
||||
key_id: key_id
|
||||
}
|
||||
|
||||
Repo.insert(key, on_conflict: :replace_all, conflict_target: :key_id)
|
||||
key_cs =
|
||||
cast(%__MODULE__{}, key, [:user_id, :public_key, :key_id])
|
||||
|> unique_constraint(:user_id)
|
||||
|
||||
Repo.insert(key_cs,
|
||||
# while this should never run for local users anyway, etc make sure we really never loose privkey info!
|
||||
on_conflict: {:replace_all_except, [:inserted_at, :private_key]},
|
||||
# if the key owner overlaps with a distinct existing key entry, this intetionally still errros
|
||||
conflict_target: :key_id
|
||||
)
|
||||
else
|
||||
e ->
|
||||
Logger.debug("Failed to fetch remote key: #{inspect(e)}")
|
||||
|
@ -216,6 +212,23 @@ def fetch_remote_key(key_id) do
|
|||
end
|
||||
end
|
||||
|
||||
defp refresh_key(%__MODULE__{} = key) do
|
||||
min_backoff = Pleroma.Config.get!([:activitypub, :min_key_refetch_interval])
|
||||
|
||||
if Timex.diff(Timex.now(), key.updated_at, :seconds) >= min_backoff do
|
||||
fetch_remote_key(key.key_id)
|
||||
else
|
||||
{:error, :too_young}
|
||||
end
|
||||
end
|
||||
|
||||
def refresh_by_key_id(key_id) do
|
||||
case Repo.get_by(__MODULE__, key_id: key_id) do
|
||||
nil -> {:error, :unknown}
|
||||
key -> refresh_key(key)
|
||||
end
|
||||
end
|
||||
|
||||
# Take the response from the remote instance and extract the key details
|
||||
# will check if the key ID matches the owner of the key, if not, error
|
||||
defp extract_key_details(%{"id" => ap_id, "publicKey" => public_key}) do
|
||||
|
@ -227,7 +240,7 @@ defp extract_key_details(%{"id" => ap_id, "publicKey" => public_key}) do
|
|||
end
|
||||
end
|
||||
|
||||
defp handle_signature_response({:ok, body}) do
|
||||
defp handle_signature_response(body) do
|
||||
case body do
|
||||
%{
|
||||
"type" => "CryptographicKey",
|
||||
|
@ -247,9 +260,9 @@ defp handle_signature_response({:ok, body}) do
|
|||
|
||||
%{"error" => error} ->
|
||||
{:error, error}
|
||||
|
||||
other ->
|
||||
{:error, "Could not process key: #{inspect(other)}"}
|
||||
end
|
||||
end
|
||||
|
||||
defp handle_signature_response({:error, e}), do: {:error, e}
|
||||
defp handle_signature_response(other), do: {:error, "Could not fetch key: #{inspect(other)}"}
|
||||
end
|
||||
|
|
|
@ -22,7 +22,8 @@ def validate(object, meta)
|
|||
|
||||
def validate(%{"type" => type, "id" => _id} = data, meta)
|
||||
when type in Pleroma.Constants.actor_types() do
|
||||
with :ok <- validate_inbox(data),
|
||||
with :ok <- validate_pubkey(data),
|
||||
:ok <- validate_inbox(data),
|
||||
:ok <- contain_collection_origin(data) do
|
||||
{:ok, data, meta}
|
||||
else
|
||||
|
@ -33,6 +34,21 @@ def validate(%{"type" => type, "id" => _id} = data, meta)
|
|||
|
||||
def validate(_, _), do: {:error, "Not a user object"}
|
||||
|
||||
defp validate_pubkey(%{"id" => user_id, "publicKey" => %{"id" => pk_id, "publicKeyPem" => _key}}) do
|
||||
with {_, true} <- {:user, is_binary(user_id)},
|
||||
{_, true} <- {:key, is_binary(pk_id)},
|
||||
:ok <- Containment.contain_key_user(pk_id, user_id) do
|
||||
:ok
|
||||
else
|
||||
{:user, _} -> {:error, "Invalid user id: #{inspect(user_id)}"}
|
||||
{:key, _} -> {:error, "Invalid key id: #{inspect(pk_id)}"}
|
||||
:error -> {:error, "Problematic actor-key pairing: #{user_id} - #{pk_id}"}
|
||||
end
|
||||
end
|
||||
|
||||
# pubkey is optional atm
|
||||
defp validate_pubkey(_data), do: :ok
|
||||
|
||||
defp validate_inbox(%{"id" => id, "inbox" => inbox}) do
|
||||
case Containment.same_origin(id, inbox) do
|
||||
:ok -> :ok
|
||||
|
|
|
@ -29,6 +29,7 @@ defmodule Pleroma.Web.ActivityPub.Transmogrifier do
|
|||
|
||||
@doc """
|
||||
Modifies an incoming AP object (mastodon format) to our internal format.
|
||||
(This only deals with non-activity AP objects)
|
||||
"""
|
||||
def fix_object(object, options \\ []) do
|
||||
object
|
||||
|
@ -44,6 +45,38 @@ def fix_object(object, options \\ []) do
|
|||
|> fix_content_map()
|
||||
|> fix_addressing()
|
||||
|> fix_summary()
|
||||
|> fix_history(&fix_object/1)
|
||||
end
|
||||
|
||||
defp maybe_fix_object(%{"attributedTo" => _} = object), do: fix_object(object)
|
||||
defp maybe_fix_object(object), do: object
|
||||
|
||||
defp fix_history(%{"formerRepresentations" => %{"orderedItems" => list}} = obj, fix_fun)
|
||||
when is_list(list) do
|
||||
update_in(obj["formerRepresentations"]["orderedItems"], fn h -> Enum.map(h, fix_fun) end)
|
||||
end
|
||||
|
||||
defp fix_history(obj, _), do: obj
|
||||
|
||||
defp fix_recursive(obj, fun) do
|
||||
# unlike Erlang, Elixir does not support recursive inline functions
|
||||
# which would allow us to avoid reconstructing this on every recursion
|
||||
rec_fun = fn
|
||||
obj when is_map(obj) -> fix_recursive(obj, fun)
|
||||
# there may be simple AP IDs in history (or object field)
|
||||
obj -> obj
|
||||
end
|
||||
|
||||
obj
|
||||
|> fun.()
|
||||
|> fix_history(rec_fun)
|
||||
|> then(fn
|
||||
%{"object" => object} = doc when is_map(object) ->
|
||||
update_in(doc["object"], rec_fun)
|
||||
|
||||
apdoc ->
|
||||
apdoc
|
||||
end)
|
||||
end
|
||||
|
||||
def fix_summary(%{"summary" => nil} = object) do
|
||||
|
@ -414,17 +447,10 @@ defp get_reported(objects) do
|
|||
end
|
||||
|
||||
def handle_incoming(data, options \\ []) do
|
||||
data = normalise_addressing_public(data)
|
||||
|
||||
data =
|
||||
if data["object"] != nil do
|
||||
object = normalise_addressing_public(data["object"])
|
||||
Map.put(data, "object", object)
|
||||
else
|
||||
data
|
||||
end
|
||||
|
||||
handle_incoming_normalised(data, options)
|
||||
data
|
||||
|> fix_recursive(&normalise_addressing_public/1)
|
||||
|> fix_recursive(&strip_internal_fields/1)
|
||||
|> handle_incoming_normalised(options)
|
||||
end
|
||||
|
||||
defp handle_incoming_normalised(data, options)
|
||||
|
@ -490,7 +516,6 @@ defp handle_incoming_normalised(
|
|||
|
||||
object =
|
||||
data["object"]
|
||||
|> strip_internal_fields()
|
||||
|> fix_type(fetch_options)
|
||||
|> fix_in_reply_to(fetch_options)
|
||||
|> fix_quote_url(fetch_options)
|
||||
|
@ -545,6 +570,9 @@ defp handle_incoming_normalised(
|
|||
_options
|
||||
)
|
||||
when type in ~w{Update Block Follow Accept Reject} do
|
||||
fixed_obj = maybe_fix_object(data["object"])
|
||||
data = if fixed_obj != nil, do: %{data | "object" => fixed_obj}, else: data
|
||||
|
||||
with {:ok, %User{}} <- ObjectValidator.fetch_actor(data),
|
||||
{:ok, activity, _} <-
|
||||
Pipeline.common_pipeline(data, local: false) do
|
||||
|
@ -560,7 +588,7 @@ defp handle_incoming_normalised(
|
|||
|
||||
with {_, {:ok, object_id}} <- {:object_id, oid_result},
|
||||
object <- Object.get_cached_by_ap_id(object_id),
|
||||
{_, false} <- {:tombstone, Object.is_tombstone_object?(object) && !data["actor"]},
|
||||
{_, false} <- {:tombstone, Object.tombstone_object?(object) && !data["actor"]},
|
||||
{:ok, activity, _} <- Pipeline.common_pipeline(data, local: false) do
|
||||
{:ok, activity}
|
||||
else
|
||||
|
@ -1043,6 +1071,8 @@ def prepare_attachments(object) do
|
|||
Map.put(object, "attachment", attachments)
|
||||
end
|
||||
|
||||
# for outgoing docs immediately stripping internal fields recursively breaks later emoji transformations
|
||||
# (XXX: it would be better to reorder operations so we can always use recursive stripping)
|
||||
def strip_internal_fields(object) do
|
||||
Map.drop(object, Pleroma.Constants.object_internal_fields())
|
||||
end
|
||||
|
|
|
@ -94,6 +94,7 @@ def perform(:incoming_ap_doc, params) do
|
|||
with nil <- Activity.normalize(params["id"]),
|
||||
{_, :ok} <-
|
||||
{:correct_origin?, Containment.contain_origin_from_id(actor, params)},
|
||||
{_, :ok, _} <- {:local, Containment.contain_local_fetch(actor), actor},
|
||||
{:ok, activity} <- Transmogrifier.handle_incoming(params) do
|
||||
{:ok, activity}
|
||||
else
|
||||
|
@ -101,6 +102,13 @@ def perform(:incoming_ap_doc, params) do
|
|||
Logger.debug("Origin containment failure for #{params["id"]}")
|
||||
{:error, :origin_containment_failed}
|
||||
|
||||
{:local, _, actor} ->
|
||||
Logger.alert(
|
||||
"Received incoming AP doc with valid signature for local actor #{actor}! Likely key leak!\n#{inspect(params)}"
|
||||
)
|
||||
|
||||
{:error, :origin_containment_failed}
|
||||
|
||||
%Activity{} ->
|
||||
Logger.debug("Already had #{params["id"]}")
|
||||
{:error, :already_present}
|
||||
|
|
|
@ -8,8 +8,8 @@ defmodule Pleroma.Web.Plugs.HTTPSignaturePlug do
|
|||
|
||||
use Pleroma.Web, :verified_routes
|
||||
alias Pleroma.Activity
|
||||
alias Pleroma.Signature
|
||||
alias Pleroma.Instances
|
||||
alias Pleroma.User.SigningKey
|
||||
require Logger
|
||||
|
||||
@cachex Pleroma.Config.get([:cachex, :provider], Cachex)
|
||||
|
@ -77,10 +77,6 @@ defp assign_valid_signature_on_route_aliases(conn, [path | rest]) do
|
|||
|> put_req_header("(request-target)", request_target)
|
||||
|> maybe_put_created_psudoheader()
|
||||
|> maybe_put_expires_psudoheader()
|
||||
|> case do
|
||||
%{assigns: %{digest: digest}} = conn -> put_req_header(conn, "digest", digest)
|
||||
conn -> conn
|
||||
end
|
||||
|
||||
conn
|
||||
|> assign(:valid_signature, HTTPSignatures.validate_conn(conn))
|
||||
|
@ -93,7 +89,13 @@ defp maybe_assign_valid_signature(conn) do
|
|||
# set (request-target) header to the appropriate value
|
||||
# we also replace the digest header with the one we computed
|
||||
possible_paths =
|
||||
route_aliases(conn) ++ [conn.request_path, conn.request_path <> "?#{conn.query_string}"]
|
||||
[conn.request_path, conn.request_path <> "?#{conn.query_string}" | route_aliases(conn)]
|
||||
|
||||
conn =
|
||||
case conn do
|
||||
%{assigns: %{digest: digest}} = conn -> put_req_header(conn, "digest", digest)
|
||||
conn -> conn
|
||||
end
|
||||
|
||||
assign_valid_signature_on_route_aliases(conn, possible_paths)
|
||||
else
|
||||
|
@ -140,15 +142,17 @@ defp maybe_require_signature(conn), do: conn
|
|||
|
||||
defp signature_host(conn) do
|
||||
with {:key_id, %{"keyId" => kid}} <- {:key_id, HTTPSignatures.signature_for_conn(conn)},
|
||||
{:actor_id, {:ok, actor_id}} <- {:actor_id, Signature.key_id_to_actor_id(kid)} do
|
||||
{:actor_id, actor_id, _} when actor_id != nil <-
|
||||
{:actor_id, SigningKey.key_id_to_ap_id(kid), kid} do
|
||||
actor_id
|
||||
else
|
||||
{:key_id, e} ->
|
||||
Logger.error("Failed to extract key_id from signature: #{inspect(e)}")
|
||||
nil
|
||||
|
||||
{:actor_id, e} ->
|
||||
Logger.error("Failed to extract actor_id from signature: #{inspect(e)}")
|
||||
{:actor_id, _, kid} ->
|
||||
# SigningKeys SHOULD have been fetched before this gets called!
|
||||
Logger.error("Failed to extract actor_id from signature: signing key #{kid} not known")
|
||||
nil
|
||||
end
|
||||
end
|
||||
|
|
|
@ -62,10 +62,10 @@ defp call_static(%{request_path: request_path} = conn, opts, from) do
|
|||
opts =
|
||||
opts
|
||||
|> Map.put(:from, from)
|
||||
|> Map.put(:set_content_type, false)
|
||||
|> Map.put(:content_type, false)
|
||||
|
||||
conn
|
||||
|> set_static_content_type(request_path)
|
||||
|> Pleroma.Web.Plugs.StaticNoCT.call(opts)
|
||||
|> Plug.Static.call(opts)
|
||||
end
|
||||
end
|
||||
|
|
|
@ -1,469 +0,0 @@
|
|||
# This is almost identical to Plug.Static from Plug 1.15.3 (2024-01-16)
|
||||
# It being copied is a temporary measure to fix an urgent bug without
|
||||
# needing to wait for merge of a suitable patch upstream
|
||||
# The differences are:
|
||||
# - this leading comment
|
||||
# - renaming of the module from 'Plug.Static' to 'Pleroma.Web.Plugs.StaticNoCT'
|
||||
# - additon of set_content_type option
|
||||
|
||||
defmodule Pleroma.Web.Plugs.StaticNoCT do
|
||||
@moduledoc """
|
||||
A plug for serving static assets.
|
||||
|
||||
It requires two options:
|
||||
|
||||
* `:at` - the request path to reach for static assets.
|
||||
It must be a string.
|
||||
|
||||
* `:from` - the file system path to read static assets from.
|
||||
It can be either: a string containing a file system path, an
|
||||
atom representing the application name (where assets will
|
||||
be served from `priv/static`), a tuple containing the
|
||||
application name and the directory to serve assets from (besides
|
||||
`priv/static`), or an MFA tuple.
|
||||
|
||||
The preferred form is to use `:from` with an atom or tuple, since
|
||||
it will make your application independent from the starting directory.
|
||||
For example, if you pass:
|
||||
|
||||
plug Plug.Static, from: "priv/app/path"
|
||||
|
||||
Plug.Static will be unable to serve assets if you build releases
|
||||
or if you change the current directory. Instead do:
|
||||
|
||||
plug Plug.Static, from: {:app_name, "priv/app/path"}
|
||||
|
||||
If a static asset cannot be found, `Plug.Static` simply forwards
|
||||
the connection to the rest of the pipeline.
|
||||
|
||||
## Cache mechanisms
|
||||
|
||||
`Plug.Static` uses etags for HTTP caching. This means browsers/clients
|
||||
should cache assets on the first request and validate the cache on
|
||||
following requests, not downloading the static asset once again if it
|
||||
has not changed. The cache-control for etags is specified by the
|
||||
`cache_control_for_etags` option and defaults to `"public"`.
|
||||
|
||||
However, `Plug.Static` also supports direct cache control by using
|
||||
versioned query strings. If the request query string starts with
|
||||
"?vsn=", `Plug.Static` assumes the application is versioning assets
|
||||
and does not set the `ETag` header, meaning the cache behaviour will
|
||||
be specified solely by the `cache_control_for_vsn_requests` config,
|
||||
which defaults to `"public, max-age=31536000"`.
|
||||
|
||||
## Options
|
||||
|
||||
* `:encodings` - list of 2-ary tuples where first value is value of
|
||||
the `Accept-Encoding` header and second is extension of the file to
|
||||
be served if given encoding is accepted by client. Entries will be tested
|
||||
in order in list, so entries higher in list will be preferred. Defaults
|
||||
to: `[]`.
|
||||
|
||||
In addition to setting this value directly it supports 2 additional
|
||||
options for compatibility reasons:
|
||||
|
||||
+ `:brotli` - will append `{"br", ".br"}` to the encodings list.
|
||||
+ `:gzip` - will append `{"gzip", ".gz"}` to the encodings list.
|
||||
|
||||
Additional options will be added in the above order (Brotli takes
|
||||
preference over Gzip) to reflect older behaviour which was set due
|
||||
to fact that Brotli in general provides better compression ratio than
|
||||
Gzip.
|
||||
|
||||
* `:cache_control_for_etags` - sets the cache header for requests
|
||||
that use etags. Defaults to `"public"`.
|
||||
|
||||
* `:etag_generation` - specify a `{module, function, args}` to be used
|
||||
to generate an etag. The `path` of the resource will be passed to
|
||||
the function, as well as the `args`. If this option is not supplied,
|
||||
etags will be generated based off of file size and modification time.
|
||||
Note it is [recommended for the etag value to be quoted](https://tools.ietf.org/html/rfc7232#section-2.3),
|
||||
which Plug won't do automatically.
|
||||
|
||||
* `:cache_control_for_vsn_requests` - sets the cache header for
|
||||
requests starting with "?vsn=" in the query string. Defaults to
|
||||
`"public, max-age=31536000"`.
|
||||
|
||||
* `:only` - filters which requests to serve. This is useful to avoid
|
||||
file system access on every request when this plug is mounted
|
||||
at `"/"`. For example, if `only: ["images", "favicon.ico"]` is
|
||||
specified, only files in the "images" directory and the
|
||||
"favicon.ico" file will be served by `Plug.Static`.
|
||||
Note that `Plug.Static` matches these filters against request
|
||||
uri and not against the filesystem. When requesting
|
||||
a file with name containing non-ascii or special characters,
|
||||
you should use urlencoded form. For example, you should write
|
||||
`only: ["file%20name"]` instead of `only: ["file name"]`.
|
||||
Defaults to `nil` (no filtering).
|
||||
|
||||
* `:only_matching` - a relaxed version of `:only` that will
|
||||
serve any request as long as one of the given values matches the
|
||||
given path. For example, `only_matching: ["images", "favicon"]`
|
||||
will match any request that starts at "images" or "favicon",
|
||||
be it "/images/foo.png", "/images-high/foo.png", "/favicon.ico"
|
||||
or "/favicon-high.ico". Such matches are useful when serving
|
||||
digested files at the root. Defaults to `nil` (no filtering).
|
||||
|
||||
* `:headers` - other headers to be set when serving static assets. Specify either
|
||||
an enum of key-value pairs or a `{module, function, args}` to return an enum. The
|
||||
`conn` will be passed to the function, as well as the `args`.
|
||||
|
||||
* `:content_types` - custom MIME type mapping. As a map with filename as key
|
||||
and content type as value. For example:
|
||||
`content_types: %{"apple-app-site-association" => "application/json"}`.
|
||||
|
||||
* `:set_content_type` - by default Plug.Static (re)sets the content type header
|
||||
using auto-detection and the `:content_types` map. But when set to `false`
|
||||
no content-type header will be inserted instead retaining the original
|
||||
value or lack thereof. This can be useful when custom logic for appropiate
|
||||
content types is needed which cannot be reasonably expressed as a static
|
||||
filename map.
|
||||
|
||||
## Examples
|
||||
|
||||
This plug can be mounted in a `Plug.Builder` pipeline as follows:
|
||||
|
||||
defmodule MyPlug do
|
||||
use Plug.Builder
|
||||
|
||||
plug Plug.Static,
|
||||
at: "/public",
|
||||
from: :my_app,
|
||||
only: ~w(images robots.txt)
|
||||
plug :not_found
|
||||
|
||||
def not_found(conn, _) do
|
||||
send_resp(conn, 404, "not found")
|
||||
end
|
||||
end
|
||||
|
||||
"""
|
||||
|
||||
@behaviour Plug
|
||||
@allowed_methods ~w(GET HEAD)
|
||||
|
||||
import Plug.Conn
|
||||
alias Plug.Conn
|
||||
|
||||
# In this module, the `:prim_file` Erlang module along with the `:file_info`
|
||||
# record are used instead of the more common and Elixir-y `File` module and
|
||||
# `File.Stat` struct, respectively. The reason behind this is performance: all
|
||||
# the `File` operations pass through a single process in order to support node
|
||||
# operations that we simply don't need when serving assets.
|
||||
|
||||
require Record
|
||||
Record.defrecordp(:file_info, Record.extract(:file_info, from_lib: "kernel/include/file.hrl"))
|
||||
|
||||
defmodule InvalidPathError do
|
||||
defexception message: "invalid path for static asset", plug_status: 400
|
||||
end
|
||||
|
||||
@impl true
|
||||
def init(opts) do
|
||||
from =
|
||||
case Keyword.fetch!(opts, :from) do
|
||||
{_, _} = from -> from
|
||||
{_, _, _} = from -> from
|
||||
from when is_atom(from) -> {from, "priv/static"}
|
||||
from when is_binary(from) -> from
|
||||
_ -> raise ArgumentError, ":from must be an atom, a binary or a tuple"
|
||||
end
|
||||
|
||||
encodings =
|
||||
opts
|
||||
|> Keyword.get(:encodings, [])
|
||||
|> maybe_add("br", ".br", Keyword.get(opts, :brotli, false))
|
||||
|> maybe_add("gzip", ".gz", Keyword.get(opts, :gzip, false))
|
||||
|
||||
%{
|
||||
encodings: encodings,
|
||||
only_rules: {Keyword.get(opts, :only, []), Keyword.get(opts, :only_matching, [])},
|
||||
qs_cache: Keyword.get(opts, :cache_control_for_vsn_requests, "public, max-age=31536000"),
|
||||
et_cache: Keyword.get(opts, :cache_control_for_etags, "public"),
|
||||
et_generation: Keyword.get(opts, :etag_generation, nil),
|
||||
headers: Keyword.get(opts, :headers, %{}),
|
||||
content_types: Keyword.get(opts, :content_types, %{}),
|
||||
set_content_type: Keyword.get(opts, :set_content_type, true),
|
||||
from: from,
|
||||
at: opts |> Keyword.fetch!(:at) |> Plug.Router.Utils.split()
|
||||
}
|
||||
end
|
||||
|
||||
@impl true
|
||||
def call(
|
||||
conn = %Conn{method: meth},
|
||||
%{at: at, only_rules: only_rules, from: from, encodings: encodings} = options
|
||||
)
|
||||
when meth in @allowed_methods do
|
||||
segments = subset(at, conn.path_info)
|
||||
|
||||
if allowed?(only_rules, segments) do
|
||||
segments = Enum.map(segments, &uri_decode/1)
|
||||
|
||||
if invalid_path?(segments) do
|
||||
raise InvalidPathError, "invalid path for static asset: #{conn.request_path}"
|
||||
end
|
||||
|
||||
path = path(from, segments)
|
||||
range = get_req_header(conn, "range")
|
||||
encoding = file_encoding(conn, path, range, encodings)
|
||||
serve_static(encoding, conn, segments, range, options)
|
||||
else
|
||||
conn
|
||||
end
|
||||
end
|
||||
|
||||
def call(conn, _options) do
|
||||
conn
|
||||
end
|
||||
|
||||
defp uri_decode(path) do
|
||||
# TODO: Remove rescue as this can't fail from Elixir v1.13
|
||||
try do
|
||||
URI.decode(path)
|
||||
rescue
|
||||
ArgumentError ->
|
||||
raise InvalidPathError
|
||||
end
|
||||
end
|
||||
|
||||
defp allowed?(_only_rules, []), do: false
|
||||
defp allowed?({[], []}, _list), do: true
|
||||
|
||||
defp allowed?({full, prefix}, [h | _]) do
|
||||
h in full or (prefix != [] and match?({0, _}, :binary.match(h, prefix)))
|
||||
end
|
||||
|
||||
defp maybe_put_content_type(conn, false, _, _), do: conn
|
||||
|
||||
defp maybe_put_content_type(conn, _, types, filename) do
|
||||
content_type = Map.get(types, filename) || MIME.from_path(filename)
|
||||
|
||||
conn
|
||||
|> put_resp_header("content-type", content_type)
|
||||
end
|
||||
|
||||
defp serve_static({content_encoding, file_info, path}, conn, segments, range, options) do
|
||||
%{
|
||||
qs_cache: qs_cache,
|
||||
et_cache: et_cache,
|
||||
et_generation: et_generation,
|
||||
headers: headers,
|
||||
content_types: types,
|
||||
set_content_type: set_content_type
|
||||
} = options
|
||||
|
||||
case put_cache_header(conn, qs_cache, et_cache, et_generation, file_info, path) do
|
||||
{:stale, conn} ->
|
||||
filename = List.last(segments)
|
||||
|
||||
conn
|
||||
|> maybe_put_content_type(set_content_type, types, filename)
|
||||
|> put_resp_header("accept-ranges", "bytes")
|
||||
|> maybe_add_encoding(content_encoding)
|
||||
|> merge_headers(headers)
|
||||
|> serve_range(file_info, path, range, options)
|
||||
|
||||
{:fresh, conn} ->
|
||||
conn
|
||||
|> maybe_add_vary(options)
|
||||
|> send_resp(304, "")
|
||||
|> halt()
|
||||
end
|
||||
end
|
||||
|
||||
defp serve_static(:error, conn, _segments, _range, _options) do
|
||||
conn
|
||||
end
|
||||
|
||||
defp serve_range(conn, file_info, path, [range], options) do
|
||||
file_info(size: file_size) = file_info
|
||||
|
||||
with %{"bytes" => bytes} <- Plug.Conn.Utils.params(range),
|
||||
{range_start, range_end} <- start_and_end(bytes, file_size) do
|
||||
send_range(conn, path, range_start, range_end, file_size, options)
|
||||
else
|
||||
_ -> send_entire_file(conn, path, options)
|
||||
end
|
||||
end
|
||||
|
||||
defp serve_range(conn, _file_info, path, _range, options) do
|
||||
send_entire_file(conn, path, options)
|
||||
end
|
||||
|
||||
defp start_and_end("-" <> rest, file_size) do
|
||||
case Integer.parse(rest) do
|
||||
{last, ""} when last > 0 and last <= file_size -> {file_size - last, file_size - 1}
|
||||
_ -> :error
|
||||
end
|
||||
end
|
||||
|
||||
defp start_and_end(range, file_size) do
|
||||
case Integer.parse(range) do
|
||||
{first, "-"} when first >= 0 ->
|
||||
{first, file_size - 1}
|
||||
|
||||
{first, "-" <> rest} when first >= 0 ->
|
||||
case Integer.parse(rest) do
|
||||
{last, ""} when last >= first -> {first, min(last, file_size - 1)}
|
||||
_ -> :error
|
||||
end
|
||||
|
||||
_ ->
|
||||
:error
|
||||
end
|
||||
end
|
||||
|
||||
defp send_range(conn, path, 0, range_end, file_size, options) when range_end == file_size - 1 do
|
||||
send_entire_file(conn, path, options)
|
||||
end
|
||||
|
||||
defp send_range(conn, path, range_start, range_end, file_size, _options) do
|
||||
length = range_end - range_start + 1
|
||||
|
||||
conn
|
||||
|> put_resp_header("content-range", "bytes #{range_start}-#{range_end}/#{file_size}")
|
||||
|> send_file(206, path, range_start, length)
|
||||
|> halt()
|
||||
end
|
||||
|
||||
defp send_entire_file(conn, path, options) do
|
||||
conn
|
||||
|> maybe_add_vary(options)
|
||||
|> send_file(200, path)
|
||||
|> halt()
|
||||
end
|
||||
|
||||
defp maybe_add_encoding(conn, nil), do: conn
|
||||
defp maybe_add_encoding(conn, ce), do: put_resp_header(conn, "content-encoding", ce)
|
||||
|
||||
defp maybe_add_vary(conn, %{encodings: encodings}) do
|
||||
# If we serve gzip or brotli at any moment, we need to set the proper vary
|
||||
# header regardless of whether we are serving gzip content right now.
|
||||
# See: http://www.fastly.com/blog/best-practices-for-using-the-vary-header/
|
||||
if encodings != [] do
|
||||
update_in(conn.resp_headers, &[{"vary", "Accept-Encoding"} | &1])
|
||||
else
|
||||
conn
|
||||
end
|
||||
end
|
||||
|
||||
defp put_cache_header(
|
||||
%Conn{query_string: "vsn=" <> _} = conn,
|
||||
qs_cache,
|
||||
_et_cache,
|
||||
_et_generation,
|
||||
_file_info,
|
||||
_path
|
||||
)
|
||||
when is_binary(qs_cache) do
|
||||
{:stale, put_resp_header(conn, "cache-control", qs_cache)}
|
||||
end
|
||||
|
||||
defp put_cache_header(conn, _qs_cache, et_cache, et_generation, file_info, path)
|
||||
when is_binary(et_cache) do
|
||||
etag = etag_for_path(file_info, et_generation, path)
|
||||
|
||||
conn =
|
||||
conn
|
||||
|> put_resp_header("cache-control", et_cache)
|
||||
|> put_resp_header("etag", etag)
|
||||
|
||||
if etag in get_req_header(conn, "if-none-match") do
|
||||
{:fresh, conn}
|
||||
else
|
||||
{:stale, conn}
|
||||
end
|
||||
end
|
||||
|
||||
defp put_cache_header(conn, _, _, _, _, _) do
|
||||
{:stale, conn}
|
||||
end
|
||||
|
||||
defp etag_for_path(file_info, et_generation, path) do
|
||||
case et_generation do
|
||||
{module, function, args} ->
|
||||
apply(module, function, [path | args])
|
||||
|
||||
nil ->
|
||||
file_info(size: size, mtime: mtime) = file_info
|
||||
<<?", {size, mtime} |> :erlang.phash2() |> Integer.to_string(16)::binary, ?">>
|
||||
end
|
||||
end
|
||||
|
||||
defp file_encoding(conn, path, [_range], _encodings) do
|
||||
# We do not support compression for range queries.
|
||||
file_encoding(conn, path, nil, [])
|
||||
end
|
||||
|
||||
defp file_encoding(conn, path, _range, encodings) do
|
||||
encoded =
|
||||
Enum.find_value(encodings, fn {encoding, ext} ->
|
||||
if file_info = accept_encoding?(conn, encoding) && regular_file_info(path <> ext) do
|
||||
{encoding, file_info, path <> ext}
|
||||
end
|
||||
end)
|
||||
|
||||
cond do
|
||||
not is_nil(encoded) ->
|
||||
encoded
|
||||
|
||||
file_info = regular_file_info(path) ->
|
||||
{nil, file_info, path}
|
||||
|
||||
true ->
|
||||
:error
|
||||
end
|
||||
end
|
||||
|
||||
defp regular_file_info(path) do
|
||||
case :prim_file.read_file_info(path) do
|
||||
{:ok, file_info(type: :regular) = file_info} ->
|
||||
file_info
|
||||
|
||||
_ ->
|
||||
nil
|
||||
end
|
||||
end
|
||||
|
||||
defp accept_encoding?(conn, encoding) do
|
||||
encoding? = &String.contains?(&1, [encoding, "*"])
|
||||
|
||||
Enum.any?(get_req_header(conn, "accept-encoding"), fn accept ->
|
||||
accept |> Plug.Conn.Utils.list() |> Enum.any?(encoding?)
|
||||
end)
|
||||
end
|
||||
|
||||
defp maybe_add(list, key, value, true), do: list ++ [{key, value}]
|
||||
defp maybe_add(list, _key, _value, false), do: list
|
||||
|
||||
defp path({module, function, arguments}, segments)
|
||||
when is_atom(module) and is_atom(function) and is_list(arguments),
|
||||
do: Enum.join([apply(module, function, arguments) | segments], "/")
|
||||
|
||||
defp path({app, from}, segments) when is_atom(app) and is_binary(from),
|
||||
do: Enum.join([Application.app_dir(app), from | segments], "/")
|
||||
|
||||
defp path(from, segments),
|
||||
do: Enum.join([from | segments], "/")
|
||||
|
||||
defp subset([h | expected], [h | actual]), do: subset(expected, actual)
|
||||
defp subset([], actual), do: actual
|
||||
defp subset(_, _), do: []
|
||||
|
||||
defp invalid_path?(list) do
|
||||
invalid_path?(list, :binary.compile_pattern(["/", "\\", ":", "\0"]))
|
||||
end
|
||||
|
||||
defp invalid_path?([h | _], _match) when h in [".", "..", ""], do: true
|
||||
defp invalid_path?([h | t], match), do: String.contains?(h, match) or invalid_path?(t)
|
||||
defp invalid_path?([], _match), do: false
|
||||
|
||||
defp merge_headers(conn, {module, function, args}) do
|
||||
merge_headers(conn, apply(module, function, [conn | args]))
|
||||
end
|
||||
|
||||
defp merge_headers(conn, headers) do
|
||||
merge_resp_headers(conn, headers)
|
||||
end
|
||||
end
|
|
@ -88,12 +88,12 @@ defp get_media(conn, {:static_dir, directory}, opts) do
|
|||
Map.get(opts, :static_plug_opts)
|
||||
|> Map.put(:at, [@path])
|
||||
|> Map.put(:from, directory)
|
||||
|> Map.put(:set_content_type, false)
|
||||
|> Map.put(:content_type, false)
|
||||
|
||||
conn =
|
||||
conn
|
||||
|> set_content_type(opts, conn.request_path)
|
||||
|> Pleroma.Web.Plugs.StaticNoCT.call(static_opts)
|
||||
|> Plug.Static.call(static_opts)
|
||||
|
||||
if conn.halted do
|
||||
conn
|
||||
|
|
|
@ -96,7 +96,7 @@ defp parse_uri(true, url) do
|
|||
|> validate_page_url
|
||||
end
|
||||
|
||||
defp parse_uri(_, _), do: {:error, "not an URL"}
|
||||
defp parse_uri(_, _), do: {:error, "not a URL"}
|
||||
|
||||
defp get_tld(host) do
|
||||
host
|
||||
|
|
|
@ -11,7 +11,7 @@ def up do
|
|||
# Also this MUST use select, else the migration will fail in future installs with new user fields!
|
||||
from(u in Pleroma.User,
|
||||
where: u.local == true,
|
||||
select: {u.id, u.keys, u.ap_id}
|
||||
select: {u.id, fragment("?.keys", u), u.ap_id}
|
||||
)
|
||||
|> Repo.stream(timeout: :infinity)
|
||||
|> Enum.each(fn
|
||||
|
|
|
@ -0,0 +1,24 @@
|
|||
defmodule Pleroma.Repo.Migrations.SigningKeyNullability do
|
||||
use Ecto.Migration
|
||||
|
||||
import Ecto.Query
|
||||
|
||||
def up() do
|
||||
# Delete existing NULL entries; they are useless
|
||||
Pleroma.User.SigningKey
|
||||
|> where([s], is_nil(s.user_id) or is_nil(s.public_key))
|
||||
|> Pleroma.Repo.delete_all()
|
||||
|
||||
alter table(:signing_keys) do
|
||||
modify :user_id, :uuid, null: false
|
||||
modify :public_key, :text, null: false
|
||||
end
|
||||
end
|
||||
|
||||
def down() do
|
||||
alter table(:signing_keys) do
|
||||
modify :user_id, :uuid, null: true
|
||||
modify :public_key, :text, null: true
|
||||
end
|
||||
end
|
||||
end
|
|
@ -0,0 +1,36 @@
|
|||
defmodule Pleroma.Repo.Migrations.SigningKeyUniqueUserId do
|
||||
use Ecto.Migration
|
||||
|
||||
import Ecto.Query
|
||||
|
||||
def up() do
|
||||
# If dupes exists for any local user we do NOT want to delete the genuine privkey alongside the fake.
|
||||
# Instead just filter out anything pertaining to local users, if dupes exists manual intervention
|
||||
# is required anyway and index creation will just fail later (check against legacy field in users table)
|
||||
dupes =
|
||||
Pleroma.User.SigningKey
|
||||
|> join(:inner, [s], u in Pleroma.User, on: s.user_id == u.id)
|
||||
|> group_by([s], s.user_id)
|
||||
|> having([], count() > 1)
|
||||
|> having([_s, u], not fragment("bool_or(?)", u.local))
|
||||
|> select([s], s.user_id)
|
||||
|
||||
# Delete existing remote duplicates
|
||||
# they’ll be reinserted on the next user update
|
||||
# or proactively fetched when receiving a signature from it
|
||||
Pleroma.User.SigningKey
|
||||
|> where([s], s.user_id in subquery(dupes))
|
||||
|> Pleroma.Repo.delete_all()
|
||||
|
||||
drop_if_exists(index(:signing_keys, [:user_id]))
|
||||
|
||||
create_if_not_exists(
|
||||
index(:signing_keys, [:user_id], name: :signing_keys_user_id_index, unique: true)
|
||||
)
|
||||
end
|
||||
|
||||
def down() do
|
||||
drop_if_exists(index(:signing_keys, [:user_id]))
|
||||
create_if_not_exists(index(:signing_keys, [:user_id], name: :signing_keys_user_id_index))
|
||||
end
|
||||
end
|
|
@ -0,0 +1,31 @@
|
|||
defmodule Pleroma.Repo.Migrations.UsersDropLegacyKeyFields do
|
||||
use Ecto.Migration
|
||||
|
||||
def up() do
|
||||
alter table(:users) do
|
||||
remove :keys, :text
|
||||
remove :public_key, :text
|
||||
end
|
||||
end
|
||||
|
||||
def down() do
|
||||
# Using raw query since the "keys" field may not exist in the Elixir Ecto schema
|
||||
# causing issues when migrating data back and this requires column adds to be raw query too
|
||||
"""
|
||||
ALTER TABLE public.users
|
||||
ADD COLUMN keys text,
|
||||
ADD COLUMN public_key text;
|
||||
"""
|
||||
|> Pleroma.Repo.query!([], timeout: :infinity)
|
||||
|
||||
"""
|
||||
UPDATE public.users AS u
|
||||
SET keys = s.private_key
|
||||
FROM public.signing_keys AS s
|
||||
WHERE s.user_id = u.id AND
|
||||
u.local AND
|
||||
s.private_key IS NOT NULL;
|
||||
"""
|
||||
|> Pleroma.Repo.query!([], timeout: :infinity)
|
||||
end
|
||||
end
|
54
test/fixtures/tesla_mock/evil@remote.example_with_keyid_from_admin@mastdon.example.org.json
vendored
Normal file
54
test/fixtures/tesla_mock/evil@remote.example_with_keyid_from_admin@mastdon.example.org.json
vendored
Normal file
|
@ -0,0 +1,54 @@
|
|||
{
|
||||
"@context": [
|
||||
"https://www.w3.org/ns/activitystreams",
|
||||
"https://w3id.org/security/v1",
|
||||
{
|
||||
"manuallyApprovesFollowers": "as:manuallyApprovesFollowers",
|
||||
"sensitive": "as:sensitive",
|
||||
"movedTo": "as:movedTo",
|
||||
"Hashtag": "as:Hashtag",
|
||||
"ostatus": "http://ostatus.org#",
|
||||
"atomUri": "ostatus:atomUri",
|
||||
"inReplyToAtomUri": "ostatus:inReplyToAtomUri",
|
||||
"conversation": "ostatus:conversation",
|
||||
"toot": "http://joinmastodon.org/ns#",
|
||||
"Emoji": "toot:Emoji",
|
||||
"alsoKnownAs": {
|
||||
"@id": "as:alsoKnownAs",
|
||||
"@type": "@id"
|
||||
}
|
||||
}
|
||||
],
|
||||
"id": "http://remote.example/users/with_key_id_of_admin-mastodon.example.org",
|
||||
"type": "Person",
|
||||
"following": "http://remote.example/users/evil/following",
|
||||
"followers": "http://remote.example/users/evil/followers",
|
||||
"inbox": "http://remote.example/users/evil/inbox",
|
||||
"outbox": "http://remote.example/users/evil/outbox",
|
||||
"preferredUsername": "evil",
|
||||
"name": null,
|
||||
"discoverable": "true",
|
||||
"summary": "hii",
|
||||
"url": "http://remote.example/@evil",
|
||||
"manuallyApprovesFollowers": false,
|
||||
"publicKey": {
|
||||
"id": "http://mastodon.example.org/users/admin#main-key",
|
||||
"owner": "http://remote.example/users/evil",
|
||||
"publicKeyPem": "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAtc4Tir+3ADhSNF6VKrtW\nOU32T01w7V0yshmQei38YyiVwVvFu8XOP6ACchkdxbJ+C9mZud8qWaRJKVbFTMUG\nNX4+6Q+FobyuKrwN7CEwhDALZtaN2IPbaPd6uG1B7QhWorrY+yFa8f2TBM3BxnUy\nI4T+bMIZIEYG7KtljCBoQXuTQmGtuffO0UwJksidg2ffCF5Q+K//JfQagJ3UzrR+\nZXbKMJdAw4bCVJYs4Z5EhHYBwQWiXCyMGTd7BGlmMkY6Av7ZqHKC/owp3/0EWDNz\nNqF09Wcpr3y3e8nA10X40MJqp/wR+1xtxp+YGbq/Cj5hZGBG7etFOmIpVBrDOhry\nBwIDAQAB\n-----END PUBLIC KEY-----\n"
|
||||
},
|
||||
"attachment": [],
|
||||
"endpoints": {
|
||||
"sharedInbox": "http://remote.example/inbox"
|
||||
},
|
||||
"icon": {
|
||||
"type": "Image",
|
||||
"mediaType": "image/jpeg",
|
||||
"url": "https://cdn.niu.moe/accounts/avatars/000/033/323/original/fd7f8ae0b3ffedc9.jpeg"
|
||||
},
|
||||
"image": {
|
||||
"type": "Image",
|
||||
"mediaType": "image/png",
|
||||
"url": "https://cdn.niu.moe/accounts/headers/000/033/323/original/850b3448fa5fd477.png"
|
||||
},
|
||||
"alsoKnownAs": []
|
||||
}
|
|
@ -56,8 +56,19 @@ test "it returns error if public key is nil" do
|
|||
|
||||
describe "refetch_public_key/1" do
|
||||
test "it returns key" do
|
||||
clear_config([:activitypub, :min_key_refetch_interval], 0)
|
||||
ap_id = "https://mastodon.social/users/lambadalambda"
|
||||
|
||||
%Pleroma.User{signing_key: sk} =
|
||||
Pleroma.User.get_or_fetch_by_ap_id(ap_id)
|
||||
|> then(fn {:ok, u} -> u end)
|
||||
|> Pleroma.User.SigningKey.load_key()
|
||||
|
||||
{:ok, _} =
|
||||
%{sk | public_key: "-----BEGIN PUBLIC KEY-----\nasdfghjkl"}
|
||||
|> Ecto.Changeset.change()
|
||||
|> Pleroma.Repo.update()
|
||||
|
||||
assert Signature.refetch_public_key(make_fake_conn(ap_id)) == {:ok, @rsa_public_key}
|
||||
end
|
||||
end
|
||||
|
@ -114,16 +125,6 @@ test "it returns signature headers" do
|
|||
end
|
||||
end
|
||||
|
||||
describe "key_id_to_actor_id/1" do
|
||||
test "it reverses the key id to actor id" do
|
||||
user =
|
||||
insert(:user)
|
||||
|> with_signing_key()
|
||||
|
||||
assert Signature.key_id_to_actor_id(user.signing_key.key_id) == {:ok, user.ap_id}
|
||||
end
|
||||
end
|
||||
|
||||
describe "signed_date" do
|
||||
test "it returns formatted current date" do
|
||||
with_mock(NaiveDateTime, utc_now: fn -> ~N[2019-08-23 18:11:24.822233] end) do
|
||||
|
|
305
test/pleroma/user/signing_key_tests.ex
Normal file
305
test/pleroma/user/signing_key_tests.ex
Normal file
|
@ -0,0 +1,305 @@
|
|||
# Akkoma: Magically expressive social media
|
||||
# Copyright © 2024 Akkoma Authors <https://akkoma.dev/>
|
||||
# SPDX-License-Identifier: AGPL-3.0-only
|
||||
|
||||
defmodule Pleroma.User.SigningKeyTests do
|
||||
alias Pleroma.User
|
||||
alias Pleroma.User.SigningKey
|
||||
alias Pleroma.Repo
|
||||
|
||||
use Pleroma.DataCase
|
||||
use Oban.Testing, repo: Pleroma.Repo
|
||||
|
||||
import Pleroma.Factory
|
||||
|
||||
defp maybe_put(map, _, nil), do: map
|
||||
defp maybe_put(map, key, val), do: Kernel.put_in(map, key, val)
|
||||
|
||||
defp get_body_actor(key_id \\ nil, user_id \\ nil, owner_id \\ nil) do
|
||||
owner_id = owner_id || user_id
|
||||
|
||||
File.read!("test/fixtures/tesla_mock/admin@mastdon.example.org.json")
|
||||
|> Jason.decode!()
|
||||
|> maybe_put(["id"], user_id)
|
||||
|> maybe_put(["publicKey", "id"], key_id)
|
||||
|> maybe_put(["publicKey", "owner"], owner_id)
|
||||
|> Jason.encode!()
|
||||
end
|
||||
|
||||
defp get_body_rawkey(key_id, owner, pem \\ "RSA begin buplic key") do
|
||||
%{
|
||||
"type" => "CryptographicKey",
|
||||
"id" => key_id,
|
||||
"owner" => owner,
|
||||
"publicKeyPem" => pem
|
||||
}
|
||||
|> Jason.encode!()
|
||||
end
|
||||
|
||||
defmacro mock_tesla(
|
||||
url,
|
||||
get_body,
|
||||
status \\ 200,
|
||||
headers \\ []
|
||||
) do
|
||||
quote do
|
||||
Tesla.Mock.mock(fn
|
||||
%{method: :get, url: unquote(url)} ->
|
||||
%Tesla.Env{
|
||||
status: unquote(status),
|
||||
body: unquote(get_body),
|
||||
url: unquote(url),
|
||||
headers: [
|
||||
{"content-type",
|
||||
"application/ld+json; profile=\"https://www.w3.org/ns/activitystreams\""}
|
||||
| unquote(headers)
|
||||
]
|
||||
}
|
||||
end)
|
||||
end
|
||||
end
|
||||
|
||||
describe "succesfully" do
|
||||
test "inserts key and new user on fetch" do
|
||||
ap_id_actor = "https://mastodon.example.org/signing-key-test/actor"
|
||||
ap_id_key = ap_id_actor <> "#main-key"
|
||||
ap_doc = get_body_actor(ap_id_key, ap_id_actor)
|
||||
mock_tesla(ap_id_actor, ap_doc)
|
||||
|
||||
{:ok, %SigningKey{} = key} = SigningKey.fetch_remote_key(ap_id_key)
|
||||
user = User.get_by_id(key.user_id)
|
||||
|
||||
assert match?(%User{}, user)
|
||||
user = SigningKey.load_key(user)
|
||||
|
||||
assert user.ap_id == ap_id_actor
|
||||
assert user.signing_key.key_id == ap_id_key
|
||||
assert user.signing_key.key_id == key.key_id
|
||||
assert user.signing_key.private_key == nil
|
||||
end
|
||||
|
||||
test "updates existing key" do
|
||||
user =
|
||||
insert(:user, local: false, domain: "mastodon.example.org")
|
||||
|> with_signing_key()
|
||||
|
||||
ap_id_actor = user.ap_id
|
||||
ap_doc = get_body_actor(user.signing_key.key_id, ap_id_actor)
|
||||
mock_tesla(ap_id_actor, ap_doc)
|
||||
|
||||
old_pem = user.signing_key.public_key
|
||||
old_priv = user.signing_key.private_key
|
||||
|
||||
# note: the returned value does not fully match the value stored in the database
|
||||
# since inserted_at isn't changed on upserts
|
||||
{:ok, %SigningKey{} = key} = SigningKey.fetch_remote_key(user.signing_key.key_id)
|
||||
|
||||
refreshed_key = Repo.get_by(SigningKey, key_id: key.key_id)
|
||||
assert match?(%SigningKey{}, refreshed_key)
|
||||
refute refreshed_key.public_key == old_pem
|
||||
assert refreshed_key.private_key == old_priv
|
||||
assert refreshed_key.user_id == user.id
|
||||
assert key.public_key == refreshed_key.public_key
|
||||
end
|
||||
|
||||
test "finds known key by key_id" do
|
||||
sk = insert(:signing_key, key_id: "https://remote.example/signing-key-test/some-kown-key")
|
||||
{:ok, key} = SigningKey.get_or_fetch_by_key_id(sk.key_id)
|
||||
assert sk == key
|
||||
end
|
||||
|
||||
test "finds key for remote user" do
|
||||
user_with_preload =
|
||||
insert(:user, local: false)
|
||||
|> with_signing_key()
|
||||
|
||||
user = User.get_by_id(user_with_preload.id)
|
||||
assert !match?(%SigningKey{}, user.signing_key)
|
||||
|
||||
user = SigningKey.load_key(user)
|
||||
assert match?(%SigningKey{}, user.signing_key)
|
||||
|
||||
# the initial "with_signing_key" doesn't set timestamps, and meta differs (loaded vs built)
|
||||
# thus clear affected fields before comparison
|
||||
found_sk = %{user.signing_key | inserted_at: nil, updated_at: nil, __meta__: nil}
|
||||
ref_sk = %{user_with_preload.signing_key | __meta__: nil}
|
||||
assert found_sk == ref_sk
|
||||
end
|
||||
|
||||
test "finds remote user id by key id" do
|
||||
user =
|
||||
insert(:user, local: false)
|
||||
|> with_signing_key()
|
||||
|
||||
uid = SigningKey.key_id_to_user_id(user.signing_key.key_id)
|
||||
assert uid == user.id
|
||||
end
|
||||
|
||||
test "finds remote user ap id by key id" do
|
||||
user =
|
||||
insert(:user, local: false)
|
||||
|> with_signing_key()
|
||||
|
||||
uapid = SigningKey.key_id_to_ap_id(user.signing_key.key_id)
|
||||
assert uapid == user.ap_id
|
||||
end
|
||||
end
|
||||
|
||||
test "won't fetch keys for local users" do
|
||||
user =
|
||||
insert(:user, local: true)
|
||||
|> with_signing_key()
|
||||
|
||||
{:error, _} = SigningKey.fetch_remote_key(user.signing_key.key_id)
|
||||
end
|
||||
|
||||
test "fails insert with overlapping key owner" do
|
||||
user =
|
||||
insert(:user, local: false)
|
||||
|> with_signing_key()
|
||||
|
||||
second_key_id =
|
||||
user.signing_key.key_id
|
||||
|> URI.parse()
|
||||
|> Map.put(:fragment, nil)
|
||||
|> Map.put(:query, nil)
|
||||
|> URI.to_string()
|
||||
|> then(fn id -> id <> "/second_key" end)
|
||||
|
||||
ap_doc = get_body_rawkey(second_key_id, user.ap_id)
|
||||
mock_tesla(second_key_id, ap_doc)
|
||||
|
||||
res = SigningKey.fetch_remote_key(second_key_id)
|
||||
|
||||
assert match?({:error, %{errors: _}}, res)
|
||||
{:error, cs} = res
|
||||
assert Keyword.has_key?(cs.errors, :user_id)
|
||||
end
|
||||
|
||||
test "Fetched raw SigningKeys cannot take over arbitrary users" do
|
||||
# in theory cross-domain key and actor are fine, IF and ONLY IF
|
||||
# the actor also links back to this key, but this isn’t supported atm anyway
|
||||
user =
|
||||
insert(:user, local: false)
|
||||
|> with_signing_key()
|
||||
|
||||
remote_key_id = "https://remote.example/keys/for_local"
|
||||
keydoc = get_body_rawkey(remote_key_id, user.ap_id)
|
||||
mock_tesla(remote_key_id, keydoc)
|
||||
|
||||
{:error, _} = SigningKey.fetch_remote_key(remote_key_id)
|
||||
|
||||
refreshed_org_key = Repo.get_by(SigningKey, key_id: user.signing_key.key_id)
|
||||
refreshed_user_key = Repo.get_by(SigningKey, user_id: user.id)
|
||||
assert match?(%SigningKey{}, refreshed_org_key)
|
||||
assert match?(%SigningKey{}, refreshed_user_key)
|
||||
|
||||
actor_host = URI.parse(user.ap_id).host
|
||||
org_key_host = URI.parse(refreshed_org_key.key_id).host
|
||||
usr_key_host = URI.parse(refreshed_user_key.key_id).host
|
||||
assert actor_host == org_key_host
|
||||
assert actor_host == usr_key_host
|
||||
refute usr_key_host == "remote.example"
|
||||
|
||||
assert refreshed_user_key == refreshed_org_key
|
||||
assert user.signing_key.key_id == refreshed_org_key.key_id
|
||||
end
|
||||
|
||||
test "Fetched non-raw SigningKey cannot take over arbitrary users" do
|
||||
# this actually comes free with our fetch ID checks, but lets verify it here too for good measure
|
||||
user =
|
||||
insert(:user, local: false)
|
||||
|> with_signing_key()
|
||||
|
||||
remote_key_id = "https://remote.example/keys#for_local"
|
||||
keydoc = get_body_actor(remote_key_id, user.ap_id, user.ap_id)
|
||||
mock_tesla(remote_key_id, keydoc)
|
||||
|
||||
{:error, _} = SigningKey.fetch_remote_key(remote_key_id)
|
||||
|
||||
refreshed_org_key = Repo.get_by(SigningKey, key_id: user.signing_key.key_id)
|
||||
refreshed_user_key = Repo.get_by(SigningKey, user_id: user.id)
|
||||
assert match?(%SigningKey{}, refreshed_org_key)
|
||||
assert match?(%SigningKey{}, refreshed_user_key)
|
||||
|
||||
actor_host = URI.parse(user.ap_id).host
|
||||
org_key_host = URI.parse(refreshed_org_key.key_id).host
|
||||
usr_key_host = URI.parse(refreshed_user_key.key_id).host
|
||||
assert actor_host == org_key_host
|
||||
assert actor_host == usr_key_host
|
||||
refute usr_key_host == "remote.example"
|
||||
|
||||
assert refreshed_user_key == refreshed_org_key
|
||||
assert user.signing_key.key_id == refreshed_org_key.key_id
|
||||
end
|
||||
|
||||
test "remote users sharing signing key ID don't break our database" do
|
||||
# in principle a valid setup using this can be cosntructed,
|
||||
# but so far not observed in practice and our db scheme cannot handle it.
|
||||
# Thus make sure it doesn't break our db anything but gets rejected
|
||||
key_id = "https://mastodon.example.org/the_one_key"
|
||||
|
||||
user1 =
|
||||
insert(:user, local: false, domain: "mastodon.example.org")
|
||||
|> with_signing_key(%{key_id: key_id})
|
||||
|
||||
key_owner = "https://mastodon.example.org/#"
|
||||
|
||||
user2_ap_id = user1.ap_id <> "22"
|
||||
user2_doc = get_body_actor(user1.signing_key.key_id, user2_ap_id, key_owner)
|
||||
|
||||
user3_ap_id = user1.ap_id <> "333"
|
||||
user3_doc = get_body_actor(user1.signing_key.key_id, user2_ap_id)
|
||||
|
||||
standalone_key_doc =
|
||||
get_body_rawkey(key_id, "https://mastodon.example.org/#", user1.signing_key.public_key)
|
||||
|
||||
ap_headers = [
|
||||
{"content-type", "application/ld+json; profile=\"https://www.w3.org/ns/activitystreams\""}
|
||||
]
|
||||
|
||||
Tesla.Mock.mock(fn
|
||||
%{method: :get, url: ^key_id} ->
|
||||
%Tesla.Env{
|
||||
status: 200,
|
||||
body: standalone_key_doc,
|
||||
url: key_id,
|
||||
headers: ap_headers
|
||||
}
|
||||
|
||||
%{method: :get, url: ^user2_ap_id} ->
|
||||
%Tesla.Env{
|
||||
status: 200,
|
||||
body: user2_doc,
|
||||
url: user2_ap_id,
|
||||
headers: ap_headers
|
||||
}
|
||||
|
||||
%{method: :get, url: ^user3_ap_id} ->
|
||||
%Tesla.Env{
|
||||
status: 200,
|
||||
body: user3_doc,
|
||||
url: user3_ap_id,
|
||||
headers: ap_headers
|
||||
}
|
||||
end)
|
||||
|
||||
{:error, _} = SigningKey.fetch_remote_key(key_id)
|
||||
|
||||
{:ok, user2} = User.get_or_fetch_by_ap_id(user2_ap_id)
|
||||
{:ok, user3} = User.get_or_fetch_by_ap_id(user3_ap_id)
|
||||
|
||||
{:ok, db_key} = SigningKey.get_or_fetch_by_key_id(key_id)
|
||||
|
||||
keys =
|
||||
from(s in SigningKey, where: s.key_id == ^key_id)
|
||||
|> Repo.all()
|
||||
|
||||
assert match?([%SigningKey{}], keys)
|
||||
assert [db_key] == keys
|
||||
assert db_key.user_id == user1.id
|
||||
assert match?({:ok, _}, SigningKey.public_key(user1))
|
||||
assert {:error, "key not found"} == SigningKey.public_key(user2)
|
||||
assert {:error, "key not found"} == SigningKey.public_key(user3)
|
||||
end
|
||||
end
|
|
@ -10,6 +10,7 @@ defmodule Pleroma.UserTest do
|
|||
alias Pleroma.Repo
|
||||
alias Pleroma.Tests.ObanHelpers
|
||||
alias Pleroma.User
|
||||
alias Pleroma.User.SigningKey
|
||||
alias Pleroma.Web.ActivityPub.ActivityPub
|
||||
alias Pleroma.Web.CommonAPI
|
||||
|
||||
|
@ -460,12 +461,7 @@ test "it sends a welcome message if it is set" do
|
|||
}
|
||||
)
|
||||
|
||||
setup do:
|
||||
clear_config(:mrf,
|
||||
policies: [
|
||||
Pleroma.Web.ActivityPub.MRF.SimplePolicy
|
||||
]
|
||||
)
|
||||
setup do: clear_config([:mrf, :policies], [Pleroma.Web.ActivityPub.MRF.SimplePolicy])
|
||||
|
||||
test "it sends a welcome email message if it is set" do
|
||||
welcome_user = insert(:user)
|
||||
|
@ -908,6 +904,35 @@ test "it doesn't fail on invalid alsoKnownAs entries" do
|
|||
assert {:ok, %User{also_known_as: []}} =
|
||||
User.get_or_fetch_by_ap_id("https://mbp.example.com/")
|
||||
end
|
||||
|
||||
test "doesn't allow key_id poisoning" do
|
||||
{:error, {:validate, {:error, "Problematic actor-key pairing:" <> _}}} =
|
||||
User.fetch_by_ap_id(
|
||||
"http://remote.example/users/with_key_id_of_admin-mastodon.example.org"
|
||||
)
|
||||
|
||||
used_key_id = "http://mastodon.example.org/users/admin#main-key"
|
||||
refute Repo.get_by(SigningKey, key_id: used_key_id)
|
||||
|
||||
{:ok, user} = User.fetch_by_ap_id("http://mastodon.example.org/users/admin")
|
||||
user = SigningKey.load_key(user)
|
||||
|
||||
# ensure we checked for the right key before
|
||||
assert user.signing_key.key_id == used_key_id
|
||||
end
|
||||
|
||||
test "doesn't allow key_id takeovers" do
|
||||
{:ok, user} = User.fetch_by_ap_id("http://mastodon.example.org/users/admin")
|
||||
user = SigningKey.load_key(user)
|
||||
|
||||
{:error, {:validate, {:error, "Problematic actor-key pairing:" <> _}}} =
|
||||
User.fetch_by_ap_id(
|
||||
"http://remote.example/users/with_key_id_of_admin-mastodon.example.org"
|
||||
)
|
||||
|
||||
refreshed_sk = Repo.get_by(SigningKey, key_id: user.signing_key.key_id)
|
||||
assert refreshed_sk.user_id == user.id
|
||||
end
|
||||
end
|
||||
|
||||
test "returns an ap_id for a user" do
|
||||
|
@ -1678,7 +1703,6 @@ test "delete/1 purges a user when they wouldn't be fully deleted" do
|
|||
bio: "eyy lmao",
|
||||
name: "qqqqqqq",
|
||||
password_hash: "pdfk2$1b3n159001",
|
||||
keys: "RSA begin buplic key",
|
||||
avatar: %{"a" => "b"},
|
||||
tags: ["qqqqq"],
|
||||
banner: %{"a" => "b"},
|
||||
|
@ -1796,10 +1820,6 @@ test "unsuggests a user" do
|
|||
end
|
||||
end
|
||||
|
||||
test "get_public_key_for_ap_id fetches a user that's not in the db" do
|
||||
assert {:ok, _key} = User.get_public_key_for_ap_id("http://mastodon.example.org/users/admin")
|
||||
end
|
||||
|
||||
describe "per-user rich-text filtering" do
|
||||
test "html_filter_policy returns default policies, when rich-text is enabled" do
|
||||
user = insert(:user)
|
||||
|
@ -2202,6 +2222,56 @@ test "removes report notifs when user isn't superuser any more" do
|
|||
# is not a superuser any more
|
||||
assert [] = Notification.for_user(user)
|
||||
end
|
||||
|
||||
test "updates public key pem" do
|
||||
# note: in the future key updates might be limited to announced expirations
|
||||
user_org =
|
||||
insert(:user, local: false)
|
||||
|> with_signing_key()
|
||||
|
||||
old_pub = user_org.signing_key.public_key
|
||||
new_pub = "BEGIN RSA public key"
|
||||
refute old_pub == new_pub
|
||||
|
||||
{:ok, %User{} = user} =
|
||||
User.update_and_set_cache(user_org, %{signing_key: %{public_key: new_pub}})
|
||||
|
||||
user = SigningKey.load_key(user)
|
||||
|
||||
assert user.signing_key.public_key == new_pub
|
||||
end
|
||||
|
||||
test "updates public key id if valid" do
|
||||
# note: in the future key updates might be limited to announced expirations
|
||||
user_org =
|
||||
insert(:user, local: false)
|
||||
|> with_signing_key()
|
||||
|
||||
old_kid = user_org.signing_key.key_id
|
||||
new_kid = old_kid <> "_v2"
|
||||
|
||||
{:ok, %User{} = user} =
|
||||
User.update_and_set_cache(user_org, %{signing_key: %{key_id: new_kid}})
|
||||
|
||||
user = SigningKey.load_key(user)
|
||||
|
||||
assert user.signing_key.key_id == new_kid
|
||||
refute Repo.get_by(SigningKey, key_id: old_kid)
|
||||
end
|
||||
|
||||
test "refuses to sever existing key-user mappings" do
|
||||
user1 =
|
||||
insert(:user, local: false)
|
||||
|> with_signing_key()
|
||||
|
||||
user2 =
|
||||
insert(:user, local: false)
|
||||
|> with_signing_key()
|
||||
|
||||
assert_raise Ecto.ConstraintError, fn ->
|
||||
User.update_and_set_cache(user2, %{signing_key: %{key_id: user1.signing_key.key_id}})
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe "following/followers synchronization" do
|
||||
|
|
|
@ -16,7 +16,6 @@ defmodule Pleroma.Web.ActivityPub.ActivityPubControllerTest do
|
|||
alias Pleroma.Web.ActivityPub.ObjectView
|
||||
alias Pleroma.Web.ActivityPub.Relay
|
||||
alias Pleroma.Web.ActivityPub.UserView
|
||||
alias Pleroma.Web.ActivityPub.Utils
|
||||
alias Pleroma.Web.CommonAPI
|
||||
alias Pleroma.Web.Endpoint
|
||||
alias Pleroma.Workers.ReceiverWorker
|
||||
|
@ -562,7 +561,7 @@ test "it inserts an incoming activity into the database", %{conn: conn} do
|
|||
|> assign(:valid_signature, true)
|
||||
|> put_req_header(
|
||||
"signature",
|
||||
"keyId=\"http://mastodon.example.org/users/admin/main-key\""
|
||||
"keyId=\"http://mastodon.example.org/users/admin#main-key\""
|
||||
)
|
||||
|> put_req_header("content-type", "application/activity+json")
|
||||
|> post("/inbox", data)
|
||||
|
@ -680,7 +679,7 @@ test "accepts Add/Remove activities", %{conn: conn} do
|
|||
|> String.replace("{{nickname}}", "lain")
|
||||
|
||||
actor = "https://example.com/users/lain"
|
||||
key_id = "#{actor}/main-key"
|
||||
key_id = "#{actor}#main-key"
|
||||
|
||||
insert(:user,
|
||||
ap_id: actor,
|
||||
|
@ -742,7 +741,7 @@ test "accepts Add/Remove activities", %{conn: conn} do
|
|||
assert "ok" ==
|
||||
conn
|
||||
|> assign(:valid_signature, true)
|
||||
|> put_req_header("signature", "keyId=\"#{actor}/main-key\"")
|
||||
|> put_req_header("signature", "keyId=\"#{actor}#main-key\"")
|
||||
|> put_req_header("content-type", "application/activity+json")
|
||||
|> post("/inbox", data)
|
||||
|> json_response(200)
|
||||
|
@ -765,7 +764,7 @@ test "accepts Add/Remove activities", %{conn: conn} do
|
|||
assert "ok" ==
|
||||
conn
|
||||
|> assign(:valid_signature, true)
|
||||
|> put_req_header("signature", "keyId=\"#{actor}/main-key\"")
|
||||
|> put_req_header("signature", "keyId=\"#{actor}#main-key\"")
|
||||
|> put_req_header("content-type", "application/activity+json")
|
||||
|> post("/inbox", data)
|
||||
|> json_response(200)
|
||||
|
@ -791,7 +790,7 @@ test "mastodon pin/unpin", %{conn: conn} do
|
|||
|> String.replace("{{nickname}}", "lain")
|
||||
|
||||
actor = "https://example.com/users/lain"
|
||||
key_id = "#{actor}/main-key"
|
||||
key_id = "#{actor}#main-key"
|
||||
|
||||
sender =
|
||||
insert(:user,
|
||||
|
@ -884,7 +883,7 @@ test "mastodon pin/unpin", %{conn: conn} do
|
|||
assert "ok" ==
|
||||
conn
|
||||
|> assign(:valid_signature, true)
|
||||
|> put_req_header("signature", "keyId=\"#{actor}/main-key\"")
|
||||
|> put_req_header("signature", "keyId=\"#{actor}#main-key\"")
|
||||
|> put_req_header("content-type", "application/activity+json")
|
||||
|> post("/inbox", data)
|
||||
|> json_response(200)
|
||||
|
@ -916,7 +915,7 @@ test "it inserts an incoming activity into the database", %{conn: conn, data: da
|
|||
conn =
|
||||
conn
|
||||
|> assign(:valid_signature, true)
|
||||
|> put_req_header("signature", "keyId=\"#{data["actor"]}/main-key\"")
|
||||
|> put_req_header("signature", "keyId=\"#{data["actor"]}#main-key\"")
|
||||
|> put_req_header("content-type", "application/activity+json")
|
||||
|> post("/users/#{user.nickname}/inbox", data)
|
||||
|
||||
|
@ -940,7 +939,7 @@ test "it accepts messages with to as string instead of array", %{conn: conn, dat
|
|||
conn =
|
||||
conn
|
||||
|> assign(:valid_signature, true)
|
||||
|> put_req_header("signature", "keyId=\"#{data["actor"]}/main-key\"")
|
||||
|> put_req_header("signature", "keyId=\"#{data["actor"]}#main-key\"")
|
||||
|> put_req_header("content-type", "application/activity+json")
|
||||
|> post("/users/#{user.nickname}/inbox", data)
|
||||
|
||||
|
@ -962,7 +961,7 @@ test "it accepts messages with cc as string instead of array", %{conn: conn, dat
|
|||
conn =
|
||||
conn
|
||||
|> assign(:valid_signature, true)
|
||||
|> put_req_header("signature", "keyId=\"#{data["actor"]}/main-key\"")
|
||||
|> put_req_header("signature", "keyId=\"#{data["actor"]}#main-key\"")
|
||||
|> put_req_header("content-type", "application/activity+json")
|
||||
|> post("/users/#{user.nickname}/inbox", data)
|
||||
|
||||
|
@ -989,7 +988,7 @@ test "it accepts messages with bcc as string instead of array", %{conn: conn, da
|
|||
conn =
|
||||
conn
|
||||
|> assign(:valid_signature, true)
|
||||
|> put_req_header("signature", "keyId=\"#{data["actor"]}/main-key\"")
|
||||
|> put_req_header("signature", "keyId=\"#{data["actor"]}#main-key\"")
|
||||
|> put_req_header("content-type", "application/activity+json")
|
||||
|> post("/users/#{user.nickname}/inbox", data)
|
||||
|
||||
|
@ -1113,7 +1112,8 @@ test "it clears `unreachable` federation status of the sender", %{conn: conn, da
|
|||
end
|
||||
|
||||
test "it removes all follower collections but actor's", %{conn: conn} do
|
||||
[actor, recipient] = insert_pair(:user)
|
||||
actor = insert(:user, local: false)
|
||||
recipient = insert(:user, local: true)
|
||||
actor = with_signing_key(actor)
|
||||
|
||||
to = [
|
||||
|
@ -1127,7 +1127,7 @@ test "it removes all follower collections but actor's", %{conn: conn} do
|
|||
data = %{
|
||||
"@context" => ["https://www.w3.org/ns/activitystreams"],
|
||||
"type" => "Create",
|
||||
"id" => Utils.generate_activity_id(),
|
||||
"id" => actor.ap_id <> "/create/12345",
|
||||
"to" => to,
|
||||
"cc" => cc,
|
||||
"actor" => actor.ap_id,
|
||||
|
@ -1137,7 +1137,7 @@ test "it removes all follower collections but actor's", %{conn: conn} do
|
|||
"cc" => cc,
|
||||
"content" => "It's a note",
|
||||
"attributedTo" => actor.ap_id,
|
||||
"id" => Utils.generate_object_id()
|
||||
"id" => actor.ap_id <> "/note/12345"
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -164,6 +164,183 @@ test "it accepts quote posts" do
|
|||
# It fetched the quoted post
|
||||
assert Object.normalize("https://misskey.io/notes/8vs6wxufd0")
|
||||
end
|
||||
|
||||
test "doesn't allow remote edits to fake local likes" do
|
||||
# as a spot check for no internal fields getting injected
|
||||
now = DateTime.utc_now()
|
||||
pub_date = DateTime.to_iso8601(Timex.subtract(now, Timex.Duration.from_minutes(3)))
|
||||
edit_date = DateTime.to_iso8601(now)
|
||||
|
||||
local_user = insert(:user)
|
||||
|
||||
create_data = %{
|
||||
"type" => "Create",
|
||||
"id" => "http://mastodon.example.org/users/admin/statuses/2619539638/activity",
|
||||
"actor" => "http://mastodon.example.org/users/admin",
|
||||
"to" => ["as:Public"],
|
||||
"cc" => [],
|
||||
"object" => %{
|
||||
"type" => "Note",
|
||||
"id" => "http://mastodon.example.org/users/admin/statuses/2619539638",
|
||||
"attributedTo" => "http://mastodon.example.org/users/admin",
|
||||
"to" => ["as:Public"],
|
||||
"cc" => [],
|
||||
"published" => pub_date,
|
||||
"content" => "miaow",
|
||||
"likes" => [local_user.ap_id]
|
||||
}
|
||||
}
|
||||
|
||||
update_data =
|
||||
create_data
|
||||
|> Map.put("type", "Update")
|
||||
|> Map.put("id", create_data["object"]["id"] <> "/update/1")
|
||||
|> put_in(["object", "content"], "miaow :3")
|
||||
|> put_in(["object", "updated"], edit_date)
|
||||
|> put_in(["object", "formerRepresentations"], %{
|
||||
"type" => "OrderedCollection",
|
||||
"totalItems" => 1,
|
||||
"orderedItems" => [create_data["object"]]
|
||||
})
|
||||
|
||||
{:ok, %Pleroma.Activity{} = activity} = Transmogrifier.handle_incoming(create_data)
|
||||
%Pleroma.Object{} = object = Object.get_by_ap_id(activity.data["object"])
|
||||
assert object.data["content"] == "miaow"
|
||||
assert object.data["likes"] == []
|
||||
assert object.data["like_count"] == 0
|
||||
|
||||
{:ok, %Pleroma.Activity{} = activity} = Transmogrifier.handle_incoming(update_data)
|
||||
%Pleroma.Object{} = object = Object.get_by_ap_id(activity.data["object"]["id"])
|
||||
assert object.data["content"] == "miaow :3"
|
||||
assert object.data["likes"] == []
|
||||
assert object.data["like_count"] == 0
|
||||
end
|
||||
|
||||
test "doesn't trip over remote likes in notes" do
|
||||
now = DateTime.utc_now()
|
||||
pub_date = DateTime.to_iso8601(Timex.subtract(now, Timex.Duration.from_minutes(3)))
|
||||
edit_date = DateTime.to_iso8601(now)
|
||||
|
||||
create_data = %{
|
||||
"type" => "Create",
|
||||
"id" => "http://mastodon.example.org/users/admin/statuses/3409297097/activity",
|
||||
"actor" => "http://mastodon.example.org/users/admin",
|
||||
"to" => ["as:Public"],
|
||||
"cc" => [],
|
||||
"object" => %{
|
||||
"type" => "Note",
|
||||
"id" => "http://mastodon.example.org/users/admin/statuses/3409297097",
|
||||
"attributedTo" => "http://mastodon.example.org/users/admin",
|
||||
"to" => ["as:Public"],
|
||||
"cc" => [],
|
||||
"published" => pub_date,
|
||||
"content" => "miaow",
|
||||
"likes" => %{
|
||||
"id" => "http://mastodon.example.org/users/admin/statuses/3409297097/likes",
|
||||
"totalItems" => 0,
|
||||
"type" => "Collection"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
update_data =
|
||||
create_data
|
||||
|> Map.put("type", "Update")
|
||||
|> Map.put("id", create_data["object"]["id"] <> "/update/1")
|
||||
|> put_in(["object", "content"], "miaow :3")
|
||||
|> put_in(["object", "updated"], edit_date)
|
||||
|> put_in(["object", "likes", "totalItems"], 666)
|
||||
|> put_in(["object", "formerRepresentations"], %{
|
||||
"type" => "OrderedCollection",
|
||||
"totalItems" => 1,
|
||||
"orderedItems" => [create_data["object"]]
|
||||
})
|
||||
|
||||
{:ok, %Pleroma.Activity{} = activity} = Transmogrifier.handle_incoming(create_data)
|
||||
%Pleroma.Object{} = object = Object.get_by_ap_id(activity.data["object"])
|
||||
assert object.data["content"] == "miaow"
|
||||
assert object.data["likes"] == []
|
||||
assert object.data["like_count"] == 0
|
||||
|
||||
{:ok, %Pleroma.Activity{} = activity} = Transmogrifier.handle_incoming(update_data)
|
||||
%Pleroma.Object{} = object = Object.get_by_ap_id(activity.data["object"]["id"])
|
||||
assert object.data["content"] == "miaow :3"
|
||||
assert object.data["likes"] == []
|
||||
# in the future this should retain remote likes, but for now:
|
||||
assert object.data["like_count"] == 0
|
||||
end
|
||||
|
||||
test "doesn't trip over remote likes in polls" do
|
||||
now = DateTime.utc_now()
|
||||
pub_date = DateTime.to_iso8601(Timex.subtract(now, Timex.Duration.from_minutes(3)))
|
||||
edit_date = DateTime.to_iso8601(now)
|
||||
|
||||
create_data = %{
|
||||
"type" => "Create",
|
||||
"id" => "http://mastodon.example.org/users/admin/statuses/2471790073/activity",
|
||||
"actor" => "http://mastodon.example.org/users/admin",
|
||||
"to" => ["as:Public"],
|
||||
"cc" => [],
|
||||
"object" => %{
|
||||
"type" => "Question",
|
||||
"id" => "http://mastodon.example.org/users/admin/statuses/2471790073",
|
||||
"attributedTo" => "http://mastodon.example.org/users/admin",
|
||||
"to" => ["as:Public"],
|
||||
"cc" => [],
|
||||
"published" => pub_date,
|
||||
"content" => "vote!",
|
||||
"anyOf" => [
|
||||
%{
|
||||
"type" => "Note",
|
||||
"name" => "a",
|
||||
"replies" => %{
|
||||
"type" => "Collection",
|
||||
"totalItems" => 3
|
||||
}
|
||||
},
|
||||
%{
|
||||
"type" => "Note",
|
||||
"name" => "b",
|
||||
"replies" => %{
|
||||
"type" => "Collection",
|
||||
"totalItems" => 1
|
||||
}
|
||||
}
|
||||
],
|
||||
"likes" => %{
|
||||
"id" => "http://mastodon.example.org/users/admin/statuses/2471790073/likes",
|
||||
"totalItems" => 0,
|
||||
"type" => "Collection"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
update_data =
|
||||
create_data
|
||||
|> Map.put("type", "Update")
|
||||
|> Map.put("id", create_data["object"]["id"] <> "/update/1")
|
||||
|> put_in(["object", "content"], "vote now!")
|
||||
|> put_in(["object", "updated"], edit_date)
|
||||
|> put_in(["object", "likes", "totalItems"], 666)
|
||||
|> put_in(["object", "formerRepresentations"], %{
|
||||
"type" => "OrderedCollection",
|
||||
"totalItems" => 1,
|
||||
"orderedItems" => [create_data["object"]]
|
||||
})
|
||||
|
||||
{:ok, %Pleroma.Activity{} = activity} = Transmogrifier.handle_incoming(create_data)
|
||||
%Pleroma.Object{} = object = Object.get_by_ap_id(activity.data["object"])
|
||||
assert object.data["content"] == "vote!"
|
||||
assert object.data["likes"] == []
|
||||
assert object.data["like_count"] == 0
|
||||
|
||||
{:ok, %Pleroma.Activity{} = activity} = Transmogrifier.handle_incoming(update_data)
|
||||
%Pleroma.Object{} = object = Object.get_by_ap_id(activity.data["object"]["id"])
|
||||
assert object.data["content"] == "vote now!"
|
||||
assert object.data["likes"] == []
|
||||
# in the future this should retain remote likes, but for now:
|
||||
assert object.data["like_count"] == 0
|
||||
end
|
||||
end
|
||||
|
||||
describe "prepare outgoing" do
|
||||
|
|
|
@ -121,13 +121,13 @@ test "refuses to crawl plain HTTP and other scheme URL" do
|
|||
res = Parser.parse(url)
|
||||
|
||||
assert {:error, {:url, "scheme mismatch"}} == res or
|
||||
{:error, {:url, "not an URL"}} == res
|
||||
{:error, {:url, "not a URL"}} == res
|
||||
end)
|
||||
end
|
||||
|
||||
test "refuses to crawl malformed URLs" do
|
||||
url = "example.com[]/ogp"
|
||||
assert {:error, {:url, "not an URL"}} == Parser.parse(url)
|
||||
assert {:error, {:url, "not a URL"}} == Parser.parse(url)
|
||||
end
|
||||
|
||||
test "refuses to crawl URLs of private network from posts" do
|
||||
|
|
|
@ -7,13 +7,16 @@ defmodule Pleroma.Workers.ReceiverWorkerTest do
|
|||
use Oban.Testing, repo: Pleroma.Repo
|
||||
@moduletag :mocked
|
||||
|
||||
import ExUnit.CaptureLog
|
||||
import Mock
|
||||
import Pleroma.Factory
|
||||
|
||||
alias Pleroma.Web.ActivityPub.Utils
|
||||
alias Pleroma.Workers.ReceiverWorker
|
||||
|
||||
test "it ignores MRF reject" do
|
||||
params = insert(:note).data
|
||||
user = insert(:user, local: false)
|
||||
params = insert(:note, user: user, data: %{"id" => user.ap_id <> "/note/1"}).data
|
||||
|
||||
with_mock Pleroma.Web.ActivityPub.Transmogrifier,
|
||||
handle_incoming: fn _ -> {:reject, "MRF"} end do
|
||||
|
@ -23,4 +26,36 @@ test "it ignores MRF reject" do
|
|||
})
|
||||
end
|
||||
end
|
||||
|
||||
test "it errors on receiving local documents" do
|
||||
actor = insert(:user, local: true)
|
||||
recipient = insert(:user, local: true)
|
||||
|
||||
to = [recipient.ap_id]
|
||||
cc = []
|
||||
|
||||
params = %{
|
||||
"@context" => ["https://www.w3.org/ns/activitystreams"],
|
||||
"type" => "Create",
|
||||
"id" => Utils.generate_activity_id(),
|
||||
"to" => to,
|
||||
"cc" => cc,
|
||||
"actor" => actor.ap_id,
|
||||
"object" => %{
|
||||
"type" => "Note",
|
||||
"to" => to,
|
||||
"cc" => cc,
|
||||
"content" => "It's a note",
|
||||
"attributedTo" => actor.ap_id,
|
||||
"id" => Utils.generate_object_id()
|
||||
}
|
||||
}
|
||||
|
||||
assert capture_log(fn ->
|
||||
assert {:discard, :origin_containment_failed} ==
|
||||
ReceiverWorker.perform(%Oban.Job{
|
||||
args: %{"op" => "incoming_ap_doc", "params" => params}
|
||||
})
|
||||
end) =~ "[alert] Received incoming AP doc with valid signature for local actor"
|
||||
end
|
||||
end
|
||||
|
|
|
@ -424,7 +424,7 @@ def get("http://mastodon.example.org/users/admin", _, _, _) do
|
|||
}}
|
||||
end
|
||||
|
||||
def get("http://mastodon.example.org/users/admin/main-key", _, _, _) do
|
||||
def get("http://mastodon.example.org/users/admin#main-key", _, _, _) do
|
||||
{:ok,
|
||||
%Tesla.Env{
|
||||
status: 200,
|
||||
|
@ -433,6 +433,19 @@ def get("http://mastodon.example.org/users/admin/main-key", _, _, _) do
|
|||
}}
|
||||
end
|
||||
|
||||
def get("http://remote.example/users/with_key_id_of_admin-mastodon.example.org" = url, _, _, _) do
|
||||
{:ok,
|
||||
%Tesla.Env{
|
||||
status: 200,
|
||||
url: url,
|
||||
body:
|
||||
File.read!(
|
||||
"test/fixtures/tesla_mock/evil@remote.example_with_keyid_from_admin@mastdon.example.org.json"
|
||||
),
|
||||
headers: activitypub_object_headers()
|
||||
}}
|
||||
end
|
||||
|
||||
def get(
|
||||
"http://mastodon.example.org/users/admin/statuses/99512778738411822/replies?min_id=99512778738411824&page=true",
|
||||
_,
|
||||
|
|
Loading…
Add table
Reference in a new issue