Merge branch 'develop' into backoff-http

This commit is contained in:
Floatingghost 2024-04-26 19:06:18 +01:00
commit f531484063
14 changed files with 331 additions and 111 deletions

View file

@ -10,9 +10,13 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
- Support for [FEP-fffd](https://codeberg.org/fediverse/fep/src/branch/main/fep/fffd/fep-fffd.md) (proxy objects) - Support for [FEP-fffd](https://codeberg.org/fediverse/fep/src/branch/main/fep/fffd/fep-fffd.md) (proxy objects)
- Verified support for elixir 1.16 - Verified support for elixir 1.16
- Uploadfilter `Pleroma.Upload.Filter.Exiftool.ReadDescription` returns description values to the FE so they can pre fill the image description field - Uploadfilter `Pleroma.Upload.Filter.Exiftool.ReadDescription` returns description values to the FE so they can pre fill the image description field
NOTE: this filter MUST be placed before `Exiftool.StripMetadata` to work
## Changed ## Changed
- Inbound pipeline error handing was modified somewhat, which should lead to less incomprehensible log spam. Hopefully. - Inbound pipeline error handing was modified somewhat, which should lead to less incomprehensible log spam. Hopefully.
- Uploadfilter `Pleroma.Upload.Filter.Exiftool` was replaced by `Pleroma.Upload.Filter.Exiftool.StripMetadata`;
the latter strips all non-essential metadata by default but can be configured.
To regain the old behaviour of only stripping GPS data set `purge: ["gps:all"]`.
- Uploadfilter `Pleroma.Upload.Filter.Exiftool` has been renamed to `Pleroma.Upload.Filter.Exiftool.StripMetadata` - Uploadfilter `Pleroma.Upload.Filter.Exiftool` has been renamed to `Pleroma.Upload.Filter.Exiftool.StripMetadata`
- MRF.InlineQuotePolicy now prefers to insert display URLs instead of ActivityPub IDs - MRF.InlineQuotePolicy now prefers to insert display URLs instead of ActivityPub IDs
- Old accounts are no longer listed in WebFinger as aliases; this was breaking spec - Old accounts are no longer listed in WebFinger as aliases; this was breaking spec
@ -23,12 +27,14 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
- Move activities no longer operate on stale user data - Move activities no longer operate on stale user data
- Missing definitions in our JSON-LD context - Missing definitions in our JSON-LD context
- Issue mangling newlines in code blocks for RSS/Atom feeds - Issue mangling newlines in code blocks for RSS/Atom feeds
- static_fe squeezing non-square avatars and emoji - static\_fe squeezing non-square avatars and emoji
- Issue leading to properly JSON-LD compacted emoji reactions being rejected - Issue leading to properly JSON-LD compacted emoji reactions being rejected
- We now use a standard-compliant Accept header when fetching ActivityPub objects - We now use a standard-compliant Accept header when fetching ActivityPub objects
- /api/pleroma/notification_settings was rejecting body parameters; - /api/pleroma/notification\_settings was rejecting body parameters;
this also broke changing this setting via akkoma-fe this also broke changing this setting via akkoma-fe
- Issue leading to Mastodon bot accounts being rejected - Issue leading to Mastodon bot accounts being rejected
- Scope misdetection of remote posts resulting from not recognising
JSON-LD-compacted forms of public scope; affected e.g. federation with bovine
## Removed ## Removed
- ActivityPub Client-To-Server write API endpoints have been disabled; - ActivityPub Client-To-Server write API endpoints have been disabled;

View file

@ -1,2 +0,0 @@
web: mix phx.server
release: mix ecto.migrate

View file

@ -222,6 +222,26 @@
} }
] ]
}, },
%{
group: :pleroma,
key: Pleroma.Upload.Filter.Exiftool.StripMetadata,
type: :group,
description: "Strip specified metadata from image uploads",
children: [
%{
key: :purge,
description: "Metadata fields or groups to strip",
type: {:list, :string},
suggestions: ["all", "CommonIFD0"]
},
%{
key: :preserve,
description: "Metadata fields or groups to preserve (takes precedence over stripping)",
type: {:list, :string},
suggestions: ["ColorSpaces", "Orientation"]
}
]
},
%{ %{
group: :pleroma, group: :pleroma,
key: Pleroma.Emails.Mailer, key: Pleroma.Emails.Mailer,

View file

@ -1,7 +0,0 @@
{
"skip_files": [
"test/support",
"lib/mix/tasks/pleroma/benchmark.ex",
"lib/credo/check/consistency/file_location.ex"
]
}

View file

@ -37,7 +37,7 @@ If any of the options are left unspecified, you will be prompted interactively.
- `--static-dir <path>` - the directory custom public files should be read from (custom emojis, frontend bundle overrides, robots.txt, etc.) - `--static-dir <path>` - the directory custom public files should be read from (custom emojis, frontend bundle overrides, robots.txt, etc.)
- `--listen-ip <ip>` - the ip the app should listen to, defaults to 127.0.0.1 - `--listen-ip <ip>` - the ip the app should listen to, defaults to 127.0.0.1
- `--listen-port <port>` - the port the app should listen to, defaults to 4000 - `--listen-port <port>` - the port the app should listen to, defaults to 4000
- `--strip-uploads-metadata <Y|N>` - use ExifTool to strip uploads of sensitive metadata - `--strip-uploads-metadata <Y|N>` - use ExifTool to strip uploads of metadata when possible
- `--read-uploads-description <Y|N>` - use ExifTool to read image descriptions from uploads - `--read-uploads-description <Y|N>` - use ExifTool to read image descriptions from uploads
- `--anonymize-uploads <Y|N>` - randomize uploaded filenames - `--anonymize-uploads <Y|N>` - randomize uploaded filenames
- `--dedupe-uploads <Y|N>` - store files based on their hash to reduce data storage requirements if duplicates are uploaded with different filenames - `--dedupe-uploads <Y|N>` - store files based on their hash to reduce data storage requirements if duplicates are uploaded with different filenames

View file

@ -656,9 +656,10 @@ This filter replaces the declared filename (not the path) of an upload.
#### Pleroma.Upload.Filter.Exiftool.StripMetadata #### Pleroma.Upload.Filter.Exiftool.StripMetadata
This filter only strips the GPS and location metadata with Exiftool leaving color profiles and attributes intact. This filter strips metadata with Exiftool leaving color profiles and orientation intact.
No specific configuration. * `purge`: List of Exiftool tag names or tag group names to purge
* `preserve`: List of Exiftool tag names or tag group names to preserve even if they occur in the purge list
#### Pleroma.Upload.Filter.Exiftool.ReadDescription #### Pleroma.Upload.Filter.Exiftool.ReadDescription

View file

@ -14,7 +14,7 @@ Note: the packages are not required with the current default settings of Akkoma.
`ImageMagick` is a set of tools to create, edit, compose, or convert bitmap images. `ImageMagick` is a set of tools to create, edit, compose, or convert bitmap images.
It is required for the following Akkoma features: It is required for the following Akkoma features:
* `Pleroma.Upload.Filters.Mogrify`, `Pleroma.Upload.Filters.Mogrifun` upload filters (related config: `Plaroma.Upload/filters` in `config/config.exs`) * `Pleroma.Upload.Filters.Mogrify`, `Pleroma.Upload.Filters.Mogrifun` upload filters (related config: `Pleroma.Upload/filters` in `config/config.exs`)
* Media preview proxy for still images (related config: `media_preview_proxy/enabled` in `config/config.exs`) * Media preview proxy for still images (related config: `media_preview_proxy/enabled` in `config/config.exs`)
## `ffmpeg` ## `ffmpeg`
@ -29,5 +29,5 @@ It is required for the following Akkoma features:
`exiftool` is media files metadata reader/writer. `exiftool` is media files metadata reader/writer.
It is required for the following Akkoma features: It is required for the following Akkoma features:
* `Pleroma.Upload.Filters.Exiftool.StripMetadata` upload filter (related config: `Plaroma.Upload/filters` in `config/config.exs`) * `Pleroma.Upload.Filters.Exiftool.StripMetadata` upload filter (related config: `Pleroma.Upload/filters` in `config/config.exs`)
* `Pleroma.Upload.Filters.Exiftool.ReadDescription` upload filter (related config: `Plaroma.Upload/filters` in `config/config.exs`) * `Pleroma.Upload.Filters.Exiftool.ReadDescription` upload filter (related config: `Pleroma.Upload/filters` in `config/config.exs`)

View file

@ -1,2 +0,0 @@
elixir_version=1.14.3
erlang_version=25.3

View file

@ -3,7 +3,7 @@
# See the documentation at docs.akkoma.dev for your particular distro/OS for # See the documentation at docs.akkoma.dev for your particular distro/OS for
# installation instructions. # installation instructions.
proxy_cache_path /tmp/akkoma-media-cache levels=1:2 keys_zone=akkoma_media_cache:10m max_size=10g proxy_cache_path /tmp/akkoma-media-cache levels=1:2 keys_zone=akkoma_media_cache:10m max_size=1g
inactive=720m use_temp_path=off; inactive=720m use_temp_path=off;
# this is explicitly IPv4 since Pleroma.Web.Endpoint binds on IPv4 only # this is explicitly IPv4 since Pleroma.Web.Endpoint binds on IPv4 only

View file

@ -172,10 +172,10 @@ def run(["gen" | rest]) do
{strip_uploads_metadata_message, strip_uploads_metadata_default} = {strip_uploads_metadata_message, strip_uploads_metadata_default} =
if Pleroma.Utils.command_available?("exiftool") do if Pleroma.Utils.command_available?("exiftool") do
{"Do you want to strip location (GPS) data from uploaded images? This requires exiftool, it was detected as installed. (y/n)", {"Do you want to strip metadata from uploaded images? This requires exiftool, it was detected as installed. (y/n)",
"y"} "y"}
else else
{"Do you want to strip location (GPS) data from uploaded images? This requires exiftool, it was detected as not installed, please install it if you answer yes. (y/n)", {"Do you want to strip metadata from uploaded images? This requires exiftool, it was detected as not installed, please install it if you answer yes. (y/n)",
"n"} "n"}
end end

View file

@ -4,22 +4,40 @@
defmodule Pleroma.Upload.Filter.Exiftool.StripMetadata do defmodule Pleroma.Upload.Filter.Exiftool.StripMetadata do
@moduledoc """ @moduledoc """
Strips GPS related EXIF tags and overwrites the file in place. Tries to strip all image metadata but colorspace and orientation overwriting the file in place.
Also strips or replaces filesystem metadata e.g., timestamps. Also strips or replaces filesystem metadata e.g., timestamps.
""" """
@behaviour Pleroma.Upload.Filter @behaviour Pleroma.Upload.Filter
alias Pleroma.Config
@purge_default ["all", "CommonIFD0"]
@preserve_default ["ColorSpaceTags", "Orientation"]
@spec filter(Pleroma.Upload.t()) :: {:ok, :noop} | {:ok, :filtered} | {:error, String.t()} @spec filter(Pleroma.Upload.t()) :: {:ok, :noop} | {:ok, :filtered} | {:error, String.t()}
# Formats not compatible with exiftool at this time # Formats not compatible with exiftool at this time
def filter(%Pleroma.Upload{content_type: "image/heic"}), do: {:ok, :noop}
def filter(%Pleroma.Upload{content_type: "image/webp"}), do: {:ok, :noop} def filter(%Pleroma.Upload{content_type: "image/webp"}), do: {:ok, :noop}
def filter(%Pleroma.Upload{content_type: "image/svg+xml"}), do: {:ok, :noop} def filter(%Pleroma.Upload{content_type: "image/svg+xml"}), do: {:ok, :noop}
def filter(%Pleroma.Upload{content_type: "image/jxl"}), do: {:ok, :noop}
def filter(%Pleroma.Upload{tempfile: file, content_type: "image" <> _}) do def filter(%Pleroma.Upload{tempfile: file, content_type: "image" <> _}) do
purge_args =
Config.get([__MODULE__, :purge], @purge_default)
|> Enum.map(fn mgroup -> "-" <> mgroup <> "=" end)
preserve_args =
Config.get([__MODULE__, :preserve], @preserve_default)
|> Enum.map(fn mgroup -> "-" <> mgroup end)
|> then(fn
# If -TagsFromFile is not followed by tag selectors, it will copy most available tags
[] -> []
args -> ["-TagsFromFile", "@" | args]
end)
args = ["-ignoreMinorErrors", "-overwrite_original" | purge_args] ++ preserve_args ++ [file]
try do try do
case System.cmd("exiftool", ["-overwrite_original", "-gps:all=", file], parallelism: true) do case System.cmd("exiftool", args, parallelism: true) do
{_response, 0} -> {:ok, :filtered} {_response, 0} -> {:ok, :filtered}
{error, 1} -> {:error, error} {error, 1} -> {:error, error}
end end

View file

@ -58,21 +58,48 @@ def fix_summary(%{"summary" => _} = object) do
def fix_summary(object), do: Map.put(object, "summary", "") def fix_summary(object), do: Map.put(object, "summary", "")
def fix_addressing_list(map, field) do defp fix_addressing_list(addrs) do
addrs = map[field]
cond do cond do
is_list(addrs) -> is_list(addrs) -> Enum.filter(addrs, &is_binary/1)
Map.put(map, field, Enum.filter(addrs, &is_binary/1)) is_binary(addrs) -> [addrs]
true -> []
is_binary(addrs) ->
Map.put(map, field, [addrs])
true ->
Map.put(map, field, [])
end end
end end
# Due to JSON-LD simply "Public" and "as:Public" are equivalent to the full URI
# but to simplify later checks we only want to deal with one reperesentation internally
defp normalise_addressing_public_list(map, all_fields)
defp normalise_addressing_public_list(%{} = map, [field | fields]) do
full_uri = Pleroma.Constants.as_public()
map =
if map[field] != nil do
new_fval =
map[field]
|> fix_addressing_list()
|> Enum.map(fn
"Public" -> full_uri
"as:Public" -> full_uri
x -> x
end)
Map.put(map, field, new_fval)
else
map
end
normalise_addressing_public_list(map, fields)
end
defp normalise_addressing_public_list(map, _) do
map
end
defp normalise_addressing_public(map) do
normalise_addressing_public_list(map, ["to", "cc", "bto", "bcc"])
end
# if directMessage flag is set to true, leave the addressing alone # if directMessage flag is set to true, leave the addressing alone
def fix_explicit_addressing(%{"directMessage" => true} = object, _follower_collection), def fix_explicit_addressing(%{"directMessage" => true} = object, _follower_collection),
do: object do: object
@ -96,6 +123,10 @@ def fix_explicit_addressing(%{"to" => to, "cc" => cc} = object, follower_collect
|> Map.put("cc", final_cc) |> Map.put("cc", final_cc)
end end
def fix_addressing_list_key(map, field) do
Map.put(map, field, fix_addressing_list(map[field]))
end
def fix_addressing(object) do def fix_addressing(object) do
{:ok, %User{follower_address: follower_collection}} = {:ok, %User{follower_address: follower_collection}} =
object object
@ -103,10 +134,10 @@ def fix_addressing(object) do
|> User.get_or_fetch_by_ap_id() |> User.get_or_fetch_by_ap_id()
object object
|> fix_addressing_list("to") |> fix_addressing_list_key("to")
|> fix_addressing_list("cc") |> fix_addressing_list_key("cc")
|> fix_addressing_list("bto") |> fix_addressing_list_key("bto")
|> fix_addressing_list("bcc") |> fix_addressing_list_key("bcc")
|> fix_explicit_addressing(follower_collection) |> fix_explicit_addressing(follower_collection)
|> CommonFixes.fix_implicit_addressing(follower_collection) |> CommonFixes.fix_implicit_addressing(follower_collection)
end end
@ -383,11 +414,28 @@ defp get_reported(objects) do
end) end)
end end
def handle_incoming(data, options \\ []) def handle_incoming(data, options \\ []) do
data = normalise_addressing_public(data)
data =
if data["object"] != nil do
object = normalise_addressing_public(data["object"])
Map.put(data, "object", object)
else
data
end
handle_incoming_normalised(data, options)
end
defp handle_incoming_normalised(data, options)
# Flag objects are placed ahead of the ID check because Mastodon 2.8 and earlier send them # Flag objects are placed ahead of the ID check because Mastodon 2.8 and earlier send them
# with nil ID. # with nil ID.
def handle_incoming(%{"type" => "Flag", "object" => objects, "actor" => actor} = data, _options) do defp handle_incoming_normalised(
%{"type" => "Flag", "object" => objects, "actor" => actor} = data,
_options
) do
with context <- data["context"] || Utils.generate_context_id(), with context <- data["context"] || Utils.generate_context_id(),
content <- data["content"] || "", content <- data["content"] || "",
%User{} = actor <- User.get_cached_by_ap_id(actor), %User{} = actor <- User.get_cached_by_ap_id(actor),
@ -408,20 +456,21 @@ def handle_incoming(%{"type" => "Flag", "object" => objects, "actor" => actor} =
end end
# disallow objects with bogus IDs # disallow objects with bogus IDs
def handle_incoming(%{"id" => nil}, _options), do: :error defp handle_incoming_normalised(%{"id" => nil}, _options), do: :error
def handle_incoming(%{"id" => ""}, _options), do: :error defp handle_incoming_normalised(%{"id" => ""}, _options), do: :error
# length of https:// = 8, should validate better, but good enough for now. # length of https:// = 8, should validate better, but good enough for now.
def handle_incoming(%{"id" => id}, _options) when is_binary(id) and byte_size(id) < 8, defp handle_incoming_normalised(%{"id" => id}, _options)
do: :error when is_binary(id) and byte_size(id) < 8,
do: :error
@doc "Rewrite misskey likes into EmojiReacts" # Rewrite misskey likes into EmojiReacts
def handle_incoming( defp handle_incoming_normalised(
%{ %{
"type" => "Like", "type" => "Like",
"content" => reaction "content" => reaction
} = data, } = data,
options options
) do ) do
if Pleroma.Emoji.is_unicode_emoji?(reaction) || Pleroma.Emoji.matches_shortcode?(reaction) do if Pleroma.Emoji.is_unicode_emoji?(reaction) || Pleroma.Emoji.matches_shortcode?(reaction) do
data data
|> Map.put("type", "EmojiReact") |> Map.put("type", "EmojiReact")
@ -433,11 +482,11 @@ def handle_incoming(
end end
end end
def handle_incoming( defp handle_incoming_normalised(
%{"type" => "Create", "object" => %{"type" => objtype, "id" => obj_id}} = data, %{"type" => "Create", "object" => %{"type" => objtype, "id" => obj_id}} = data,
options options
) )
when objtype in ~w{Question Answer Audio Video Event Article Note Page} do when objtype in ~w{Question Answer Audio Video Event Article Note Page} do
fetch_options = Keyword.put(options, :depth, (options[:depth] || 0) + 1) fetch_options = Keyword.put(options, :depth, (options[:depth] || 0) + 1)
object = object =
@ -469,8 +518,8 @@ def handle_incoming(
end end
end end
def handle_incoming(%{"type" => type} = data, _options) defp handle_incoming_normalised(%{"type" => type} = data, _options)
when type in ~w{Like EmojiReact Announce Add Remove} do when type in ~w{Like EmojiReact Announce Add Remove} do
with :ok <- ObjectValidator.fetch_actor_and_object(data), with :ok <- ObjectValidator.fetch_actor_and_object(data),
{:ok, activity, _meta} <- Pipeline.common_pipeline(data, local: false) do {:ok, activity, _meta} <- Pipeline.common_pipeline(data, local: false) do
{:ok, activity} {:ok, activity}
@ -480,11 +529,11 @@ def handle_incoming(%{"type" => type} = data, _options)
end end
end end
def handle_incoming( defp handle_incoming_normalised(
%{"type" => type} = data, %{"type" => type} = data,
_options _options
) )
when type in ~w{Update Block Follow Accept Reject} do when type in ~w{Update Block Follow Accept Reject} do
with {:ok, %User{}} <- ObjectValidator.fetch_actor(data), with {:ok, %User{}} <- ObjectValidator.fetch_actor(data),
{:ok, activity, _} <- {:ok, activity, _} <-
Pipeline.common_pipeline(data, local: false) do Pipeline.common_pipeline(data, local: false) do
@ -492,10 +541,10 @@ def handle_incoming(
end end
end end
def handle_incoming( defp handle_incoming_normalised(
%{"type" => "Delete"} = data, %{"type" => "Delete"} = data,
_options _options
) do ) do
with {:ok, activity, _} <- with {:ok, activity, _} <-
Pipeline.common_pipeline(data, local: false) do Pipeline.common_pipeline(data, local: false) do
{:ok, activity} {:ok, activity}
@ -515,15 +564,15 @@ def handle_incoming(
end end
end end
def handle_incoming( defp handle_incoming_normalised(
%{ %{
"type" => "Undo", "type" => "Undo",
"object" => %{"type" => "Follow", "object" => followed}, "object" => %{"type" => "Follow", "object" => followed},
"actor" => follower, "actor" => follower,
"id" => id "id" => id
} = _data, } = _data,
_options _options
) do ) do
with %User{local: true} = followed <- User.get_cached_by_ap_id(followed), with %User{local: true} = followed <- User.get_cached_by_ap_id(followed),
{:ok, %User{} = follower} <- User.get_or_fetch_by_ap_id(follower), {:ok, %User{} = follower} <- User.get_or_fetch_by_ap_id(follower),
{:ok, activity} <- ActivityPub.unfollow(follower, followed, id, false) do {:ok, activity} <- ActivityPub.unfollow(follower, followed, id, false) do
@ -534,28 +583,28 @@ def handle_incoming(
end end
end end
def handle_incoming( defp handle_incoming_normalised(
%{ %{
"type" => "Undo", "type" => "Undo",
"object" => %{"type" => type} "object" => %{"type" => type}
} = data, } = data,
_options _options
) )
when type in ["Like", "EmojiReact", "Announce", "Block"] do when type in ["Like", "EmojiReact", "Announce", "Block"] do
with {:ok, activity, _} <- Pipeline.common_pipeline(data, local: false) do with {:ok, activity, _} <- Pipeline.common_pipeline(data, local: false) do
{:ok, activity} {:ok, activity}
end end
end end
# For Undos that don't have the complete object attached, try to find it in our database. # For Undos that don't have the complete object attached, try to find it in our database.
def handle_incoming( defp handle_incoming_normalised(
%{ %{
"type" => "Undo", "type" => "Undo",
"object" => object "object" => object
} = activity, } = activity,
options options
) )
when is_binary(object) do when is_binary(object) do
with %Activity{data: data} <- Activity.get_by_ap_id(object) do with %Activity{data: data} <- Activity.get_by_ap_id(object) do
activity activity
|> Map.put("object", data) |> Map.put("object", data)
@ -565,15 +614,15 @@ def handle_incoming(
end end
end end
def handle_incoming( defp handle_incoming_normalised(
%{ %{
"type" => "Move", "type" => "Move",
"actor" => origin_actor, "actor" => origin_actor,
"object" => origin_actor, "object" => origin_actor,
"target" => target_actor "target" => target_actor
}, },
_options _options
) do ) do
with %User{} = origin_user <- User.get_cached_by_ap_id(origin_actor), with %User{} = origin_user <- User.get_cached_by_ap_id(origin_actor),
# Use a dramatically shortened maximum age before refresh here because it is reasonable # Use a dramatically shortened maximum age before refresh here because it is reasonable
# for a user to # for a user to
@ -588,7 +637,7 @@ def handle_incoming(
end end
end end
def handle_incoming(_, _), do: :error defp handle_incoming_normalised(_, _), do: :error
@spec get_obj_helper(String.t(), Keyword.t()) :: {:ok, Object.t()} | nil @spec get_obj_helper(String.t(), Keyword.t()) :: {:ok, Object.t()} | nil
def get_obj_helper(id, options \\ []) do def get_obj_helper(id, options \\ []) do

View file

@ -6,29 +6,104 @@ defmodule Pleroma.Upload.Filter.Exiftool.StripMetadataTest do
use Pleroma.DataCase use Pleroma.DataCase
alias Pleroma.Upload.Filter alias Pleroma.Upload.Filter
test "apply exiftool filter" do @tag :tmp_dir
test "exiftool strip metadata strips GPS etc but preserves Orientation and ColorSpace by default",
%{tmp_dir: tmp_dir} do
assert Pleroma.Utils.command_available?("exiftool") assert Pleroma.Utils.command_available?("exiftool")
tmpfile = Path.join(tmp_dir, "tmp.jpg")
File.cp!( File.cp!(
"test/fixtures/DSCN0010.jpg", "test/fixtures/DSCN0010.jpg",
"test/fixtures/DSCN0010_tmp.jpg" tmpfile
) )
upload = %Pleroma.Upload{ upload = %Pleroma.Upload{
name: "image_with_GPS_data.jpg", name: "image_with_GPS_data.jpg",
content_type: "image/jpeg", content_type: "image/jpeg",
path: Path.absname("test/fixtures/DSCN0010.jpg"), path: Path.absname("test/fixtures/DSCN0010.jpg"),
tempfile: Path.absname("test/fixtures/DSCN0010_tmp.jpg") tempfile: Path.absname(tmpfile)
} }
assert Filter.Exiftool.StripMetadata.filter(upload) == {:ok, :filtered} assert Filter.Exiftool.StripMetadata.filter(upload) == {:ok, :filtered}
{exif_original, 0} = System.cmd("exiftool", ["test/fixtures/DSCN0010.jpg"]) exif_original = read_exif("test/fixtures/DSCN0010.jpg")
{exif_filtered, 0} = System.cmd("exiftool", ["test/fixtures/DSCN0010_tmp.jpg"]) exif_filtered = read_exif(tmpfile)
refute exif_original == exif_filtered refute exif_original == exif_filtered
assert String.match?(exif_original, ~r/GPS/) assert String.match?(exif_original, ~r/GPS/)
refute String.match?(exif_filtered, ~r/GPS/) refute String.match?(exif_filtered, ~r/GPS/)
assert String.match?(exif_original, ~r/Camera Model Name/)
refute String.match?(exif_filtered, ~r/Camera Model Name/)
assert String.match?(exif_original, ~r/Orientation/)
assert String.match?(exif_filtered, ~r/Orientation/)
assert String.match?(exif_original, ~r/Color Space/)
assert String.match?(exif_filtered, ~r/Color Space/)
end
# this is a nonsensical configuration, but it shouldn't explode
@tag :tmp_dir
test "exiftool strip metadata is a noop with empty purge list", %{tmp_dir: tmp_dir} do
assert Pleroma.Utils.command_available?("exiftool")
clear_config([Pleroma.Upload.Filter.Exiftool.StripMetadata, :purge], [])
tmpfile = Path.join(tmp_dir, "tmp.jpg")
File.cp!(
"test/fixtures/DSCN0010.jpg",
tmpfile
)
upload = %Pleroma.Upload{
name: "image_with_GPS_data.jpg",
content_type: "image/jpeg",
path: Path.absname("test/fixtures/DSCN0010.jpg"),
tempfile: Path.absname(tmpfile)
}
assert Filter.Exiftool.StripMetadata.filter(upload) == {:ok, :filtered}
exif_original = read_exif("test/fixtures/DSCN0010.jpg")
exif_filtered = read_exif(tmpfile)
assert exif_original == exif_filtered
end
@tag :tmp_dir
test "exiftool strip metadata works with empty preserve list", %{tmp_dir: tmp_dir} do
assert Pleroma.Utils.command_available?("exiftool")
clear_config([Pleroma.Upload.Filter.Exiftool.StripMetadata, :preserve], [])
tmpfile = Path.join(tmp_dir, "tmp.jpg")
File.cp!(
"test/fixtures/DSCN0010.jpg",
tmpfile
)
upload = %Pleroma.Upload{
name: "image_with_GPS_data.jpg",
content_type: "image/jpeg",
path: Path.absname("test/fixtures/DSCN0010.jpg"),
tempfile: Path.absname(tmpfile)
}
write_exif(["-ImageDescription=Trees and Houses", "-Orientation=1", tmpfile])
exif_extended = read_exif(tmpfile)
assert String.match?(exif_extended, ~r/Image Description[ \t]*:[ \t]*Trees and Houses/)
assert String.match?(exif_extended, ~r/Orientation/)
assert Filter.Exiftool.StripMetadata.filter(upload) == {:ok, :filtered}
exif_original = read_exif("test/fixtures/DSCN0010.jpg")
exif_filtered = read_exif(tmpfile)
refute exif_original == exif_filtered
refute exif_extended == exif_filtered
assert String.match?(exif_original, ~r/GPS/)
refute String.match?(exif_filtered, ~r/GPS/)
refute String.match?(exif_filtered, ~r/Image Description/)
refute String.match?(exif_filtered, ~r/Orientation/)
end end
test "verify webp files are skipped" do test "verify webp files are skipped" do
@ -39,4 +114,35 @@ test "verify webp files are skipped" do
assert Filter.Exiftool.StripMetadata.filter(upload) == {:ok, :noop} assert Filter.Exiftool.StripMetadata.filter(upload) == {:ok, :noop}
end end
test "verify svg files are skipped" do
upload = %Pleroma.Upload{
name: "sample.svg",
content_type: "image/svg+xml"
}
assert Filter.Exiftool.StripMetadata.filter(upload) == {:ok, :noop}
end
defp read_exif(file) do
# time and file path tags cause mismatches even for byte-identical files
{exif_data, 0} =
System.cmd("exiftool", [
"-x",
"Time:All",
"-x",
"Directory",
"-x",
"FileName",
"-x",
"FileSize",
file
])
exif_data
end
defp write_exif(args) do
{_response, 0} = System.cmd("exiftool", ["-ignoreMinorErrors", "-overwrite_original" | args])
end
end end

View file

@ -137,6 +137,37 @@ test "successfully processes incoming AP docs with correct origin" do
assert {:error, :already_present} = ObanHelpers.perform(job) assert {:error, :already_present} = ObanHelpers.perform(job)
end end
test "successfully normalises public scope descriptors" do
params = %{
"@context" => "https://www.w3.org/ns/activitystreams",
"actor" => "http://mastodon.example.org/users/admin",
"type" => "Create",
"id" => "http://mastodon.example.org/users/admin/activities/1",
"object" => %{
"type" => "Note",
"content" => "hi world!",
"id" => "http://mastodon.example.org/users/admin/objects/1",
"attributedTo" => "http://mastodon.example.org/users/admin",
"to" => ["Public"]
},
"to" => ["as:Public"]
}
assert {:ok, job} = Federator.incoming_ap_doc(params)
assert {:ok, activity} = ObanHelpers.perform(job)
assert activity.data["to"] == ["https://www.w3.org/ns/activitystreams#Public"]
object =
from(
object in Pleroma.Object,
where: fragment("(?)->>'id' = ?", object.data, ^activity.data["object"]),
limit: 1
)
|> Repo.one()
assert object.data["to"] == ["https://www.w3.org/ns/activitystreams#Public"]
end
test "rejects incoming AP docs with incorrect origin" do test "rejects incoming AP docs with incorrect origin" do
params = %{ params = %{
"@context" => "https://www.w3.org/ns/activitystreams", "@context" => "https://www.w3.org/ns/activitystreams",