diff --git a/CHANGELOG.md b/CHANGELOG.md
index 86991efe9..3473e8acb 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -6,16 +6,22 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
## [Unreleased]
### Added
- MRF: Support for priming the mediaproxy cache (`Pleroma.Web.ActivityPub.MRF.MediaProxyWarmingPolicy`)
+Configuration: `federation_incoming_replies_max_depth` option
- Mastodon API: Support for the [`tagged` filter](https://github.com/tootsuite/mastodon/pull/9755) in [`GET /api/v1/accounts/:id/statuses`](https://docs.joinmastodon.org/api/rest/accounts/#get-api-v1-accounts-id-statuses)
+- Mastodon API, streaming: Add support for passing the token in the `Sec-WebSocket-Protocol` header
- Admin API: Return users' tags when querying reports
- Admin API: Return avatar and display name when querying users
- Admin API: Allow querying user by ID
+- Added synchronization of following/followers counters for external users
### Fixed
- Not being able to pin unlisted posts
+- Metadata rendering errors resulting in the entire page being inaccessible
- Mastodon API: Handling of search timeouts (`/api/v1/search` and `/api/v2/search`)
+- Mastodon API: Embedded relationships not being properly rendered in the Account entity of Status entity
### Changed
+- Configuration: OpenGraph and TwitterCard providers enabled by default
- Configuration: Filter.AnonymizeFilename added ability to retain file extension with custom text
### Changed
diff --git a/config/config.exs b/config/config.exs
index e337f00aa..09681f122 100644
--- a/config/config.exs
+++ b/config/config.exs
@@ -218,6 +218,7 @@
},
registrations_open: true,
federating: true,
+ federation_incoming_replies_max_depth: 100,
federation_reachability_timeout_days: 7,
federation_publisher_modules: [
Pleroma.Web.ActivityPub.Publisher,
@@ -248,7 +249,14 @@
remote_post_retention_days: 90,
skip_thread_containment: true,
limit_to_local_content: :unauthenticated,
- dynamic_configuration: false
+ dynamic_configuration: false,
+ external_user_synchronization: [
+ enabled: false,
+ # every 2 hours
+ interval: 60 * 60 * 2,
+ max_retries: 3,
+ limit: 500
+ ]
config :pleroma, :markup,
# XXX - unfortunately, inline images must be enabled by default right now, because
@@ -358,7 +366,11 @@
port: 9999
config :pleroma, Pleroma.Web.Metadata,
- providers: [Pleroma.Web.Metadata.Providers.RelMe],
+ providers: [
+ Pleroma.Web.Metadata.Providers.OpenGraph,
+ Pleroma.Web.Metadata.Providers.TwitterCard,
+ Pleroma.Web.Metadata.Providers.RelMe
+ ],
unfurl_nsfw: false
config :pleroma, :suggestions,
diff --git a/config/test.exs b/config/test.exs
index 9d441a7f5..63443dde0 100644
--- a/config/test.exs
+++ b/config/test.exs
@@ -28,7 +28,8 @@
config :pleroma, :instance,
email: "admin@example.com",
notify_email: "noreply@example.com",
- skip_thread_containment: false
+ skip_thread_containment: false,
+ federating: false
# Configure your database
config :pleroma, Pleroma.Repo,
@@ -74,6 +75,8 @@
config :pleroma, :database, rum_enabled: rum_enabled
IO.puts("RUM enabled: #{rum_enabled}")
+config :pleroma, Pleroma.ReverseProxy.Client, Pleroma.ReverseProxy.ClientMock
+
try do
import_config "test.secret.exs"
rescue
diff --git a/docs/config.md b/docs/config.md
index 8afccb228..931155fe9 100644
--- a/docs/config.md
+++ b/docs/config.md
@@ -87,6 +87,7 @@ config :pleroma, Pleroma.Emails.Mailer,
* `invites_enabled`: Enable user invitations for admins (depends on `registrations_open: false`).
* `account_activation_required`: Require users to confirm their emails before signing in.
* `federating`: Enable federation with other instances
+* `federation_incoming_replies_max_depth`: Max. depth of reply-to activities fetching on incoming federation, to prevent out-of-memory situations while fetching very long threads. If set to `nil`, threads of any depth will be fetched. Lower this value if you experience out-of-memory crashes.
* `federation_reachability_timeout_days`: Timeout (in days) of each external federation target being unreachable prior to pausing federating to it.
* `allow_relay`: Enable Pleroma’s Relay, which makes it possible to follow a whole instance
* `rewrite_policy`: Message Rewrite Policy, either one or a list. Here are the ones available by default:
@@ -124,6 +125,12 @@ config :pleroma, Pleroma.Emails.Mailer,
* `skip_thread_containment`: Skip filter out broken threads. The default is `false`.
* `limit_to_local_content`: Limit unauthenticated users to search for local statutes and users only. Possible values: `:unauthenticated`, `:all` and `false`. The default is `:unauthenticated`.
* `dynamic_configuration`: Allow transferring configuration to DB with the subsequent customization from Admin api.
+* `external_user_synchronization`: Following/followers counters synchronization settings.
+ * `enabled`: Enables synchronization
+ * `interval`: Interval between synchronization.
+ * `max_retries`: Max rettries for host. After exceeding the limit, the check will not be carried out for users from this host.
+ * `limit`: Users batch size for processing in one time.
+
## :logger
diff --git a/lib/mix/tasks/pleroma/ecto/ecto.ex b/lib/mix/tasks/pleroma/ecto/ecto.ex
index 324f57fdd..b66f63376 100644
--- a/lib/mix/tasks/pleroma/ecto/ecto.ex
+++ b/lib/mix/tasks/pleroma/ecto/ecto.ex
@@ -1,6 +1,7 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2018 Pleroma Authors
# SPDX-License-Identifier: AGPL-3.0-onl
+
defmodule Mix.Tasks.Pleroma.Ecto do
@doc """
Ensures the given repository's migrations path exists on the file system.
diff --git a/lib/mix/tasks/pleroma/instance.ex b/lib/mix/tasks/pleroma/instance.ex
index a27c4b897..2ae16adc0 100644
--- a/lib/mix/tasks/pleroma/instance.ex
+++ b/lib/mix/tasks/pleroma/instance.ex
@@ -149,7 +149,7 @@ def run(["gen" | rest]) do
uploads_dir =
get_option(
options,
- :upload_dir,
+ :uploads_dir,
"What directory should media uploads go in (when using the local uploader)?",
Pleroma.Config.get([Pleroma.Uploaders.Local, :uploads])
)
diff --git a/lib/pleroma/application.ex b/lib/pleroma/application.ex
index ba4cf8486..86c348a0d 100644
--- a/lib/pleroma/application.ex
+++ b/lib/pleroma/application.ex
@@ -151,7 +151,11 @@ def start(_type, _args) do
start: {Pleroma.Web.Endpoint, :start_link, []},
type: :supervisor
},
- %{id: Pleroma.Gopher.Server, start: {Pleroma.Gopher.Server, :start_link, []}}
+ %{id: Pleroma.Gopher.Server, start: {Pleroma.Gopher.Server, :start_link, []}},
+ %{
+ id: Pleroma.User.SynchronizationWorker,
+ start: {Pleroma.User.SynchronizationWorker, :start_link, []}
+ }
]
# See http://elixir-lang.org/docs/stable/elixir/Supervisor.html
diff --git a/lib/pleroma/object.ex b/lib/pleroma/object.ex
index 4b181ec59..b8647dd26 100644
--- a/lib/pleroma/object.ex
+++ b/lib/pleroma/object.ex
@@ -44,20 +44,20 @@ def get_by_ap_id(ap_id) do
Repo.one(from(object in Object, where: fragment("(?)->>'id' = ?", object.data, ^ap_id)))
end
- def normalize(_, fetch_remote \\ true)
+ def normalize(_, fetch_remote \\ true, options \\ [])
# If we pass an Activity to Object.normalize(), we can try to use the preloaded object.
# Use this whenever possible, especially when walking graphs in an O(N) loop!
- def normalize(%Object{} = object, _), do: object
- def normalize(%Activity{object: %Object{} = object}, _), do: object
+ def normalize(%Object{} = object, _, _), do: object
+ def normalize(%Activity{object: %Object{} = object}, _, _), do: object
# A hack for fake activities
- def normalize(%Activity{data: %{"object" => %{"fake" => true} = data}}, _) do
+ def normalize(%Activity{data: %{"object" => %{"fake" => true} = data}}, _, _) do
%Object{id: "pleroma:fake_object_id", data: data}
end
# Catch and log Object.normalize() calls where the Activity's child object is not
# preloaded.
- def normalize(%Activity{data: %{"object" => %{"id" => ap_id}}}, fetch_remote) do
+ def normalize(%Activity{data: %{"object" => %{"id" => ap_id}}}, fetch_remote, _) do
Logger.debug(
"Object.normalize() called without preloaded object (#{ap_id}). Consider preloading the object!"
)
@@ -67,7 +67,7 @@ def normalize(%Activity{data: %{"object" => %{"id" => ap_id}}}, fetch_remote) do
normalize(ap_id, fetch_remote)
end
- def normalize(%Activity{data: %{"object" => ap_id}}, fetch_remote) do
+ def normalize(%Activity{data: %{"object" => ap_id}}, fetch_remote, _) do
Logger.debug(
"Object.normalize() called without preloaded object (#{ap_id}). Consider preloading the object!"
)
@@ -78,10 +78,14 @@ def normalize(%Activity{data: %{"object" => ap_id}}, fetch_remote) do
end
# Old way, try fetching the object through cache.
- def normalize(%{"id" => ap_id}, fetch_remote), do: normalize(ap_id, fetch_remote)
- def normalize(ap_id, false) when is_binary(ap_id), do: get_cached_by_ap_id(ap_id)
- def normalize(ap_id, true) when is_binary(ap_id), do: Fetcher.fetch_object_from_id!(ap_id)
- def normalize(_, _), do: nil
+ def normalize(%{"id" => ap_id}, fetch_remote, _), do: normalize(ap_id, fetch_remote)
+ def normalize(ap_id, false, _) when is_binary(ap_id), do: get_cached_by_ap_id(ap_id)
+
+ def normalize(ap_id, true, options) when is_binary(ap_id) do
+ Fetcher.fetch_object_from_id!(ap_id, options)
+ end
+
+ def normalize(_, _, _), do: nil
# Owned objects can only be mutated by their owner
def authorize_mutation(%Object{data: %{"actor" => actor}}, %User{ap_id: ap_id}),
diff --git a/lib/pleroma/object/fetcher.ex b/lib/pleroma/object/fetcher.ex
index c422490ac..fffbf2bbb 100644
--- a/lib/pleroma/object/fetcher.ex
+++ b/lib/pleroma/object/fetcher.ex
@@ -22,7 +22,7 @@ defp reinject_object(data) do
# TODO:
# This will create a Create activity, which we need internally at the moment.
- def fetch_object_from_id(id) do
+ def fetch_object_from_id(id, options \\ []) do
if object = Object.get_cached_by_ap_id(id) do
{:ok, object}
else
@@ -38,7 +38,7 @@ def fetch_object_from_id(id) do
"object" => data
},
:ok <- Containment.contain_origin(id, params),
- {:ok, activity} <- Transmogrifier.handle_incoming(params),
+ {:ok, activity} <- Transmogrifier.handle_incoming(params, options),
{:object, _data, %Object{} = object} <-
{:object, data, Object.normalize(activity, false)} do
{:ok, object}
@@ -63,8 +63,8 @@ def fetch_object_from_id(id) do
end
end
- def fetch_object_from_id!(id) do
- with {:ok, object} <- fetch_object_from_id(id) do
+ def fetch_object_from_id!(id, options \\ []) do
+ with {:ok, object} <- fetch_object_from_id(id, options) do
object
else
_e ->
diff --git a/lib/pleroma/reverse_proxy/client.ex b/lib/pleroma/reverse_proxy/client.ex
new file mode 100644
index 000000000..57c2d2cfd
--- /dev/null
+++ b/lib/pleroma/reverse_proxy/client.ex
@@ -0,0 +1,24 @@
+defmodule Pleroma.ReverseProxy.Client do
+ @callback request(atom(), String.t(), [tuple()], String.t(), list()) ::
+ {:ok, pos_integer(), [tuple()], reference() | map()}
+ | {:ok, pos_integer(), [tuple()]}
+ | {:ok, reference()}
+ | {:error, term()}
+
+ @callback stream_body(reference() | pid() | map()) ::
+ {:ok, binary()} | :done | {:error, String.t()}
+
+ @callback close(reference() | pid() | map()) :: :ok
+
+ def request(method, url, headers, "", opts \\ []) do
+ client().request(method, url, headers, "", opts)
+ end
+
+ def stream_body(ref), do: client().stream_body(ref)
+
+ def close(ref), do: client().close(ref)
+
+ defp client do
+ Pleroma.Config.get([Pleroma.ReverseProxy.Client], :hackney)
+ end
+end
diff --git a/lib/pleroma/reverse_proxy.ex b/lib/pleroma/reverse_proxy/reverse_proxy.ex
similarity index 97%
rename from lib/pleroma/reverse_proxy.ex
rename to lib/pleroma/reverse_proxy/reverse_proxy.ex
index de0f6e1bc..bf31e9cba 100644
--- a/lib/pleroma/reverse_proxy.ex
+++ b/lib/pleroma/reverse_proxy/reverse_proxy.ex
@@ -146,7 +146,7 @@ defp request(method, url, headers, hackney_opts) do
Logger.debug("#{__MODULE__} #{method} #{url} #{inspect(headers)}")
method = method |> String.downcase() |> String.to_existing_atom()
- case hackney().request(method, url, headers, "", hackney_opts) do
+ case client().request(method, url, headers, "", hackney_opts) do
{:ok, code, headers, client} when code in @valid_resp_codes ->
{:ok, code, downcase_headers(headers), client}
@@ -173,7 +173,7 @@ defp response(conn, client, url, status, headers, opts) do
halt(conn)
{:error, :closed, conn} ->
- :hackney.close(client)
+ client().close(client)
halt(conn)
{:error, error, conn} ->
@@ -181,7 +181,7 @@ defp response(conn, client, url, status, headers, opts) do
"#{__MODULE__} request to #{url} failed while reading/chunking: #{inspect(error)}"
)
- :hackney.close(client)
+ client().close(client)
halt(conn)
end
end
@@ -196,7 +196,7 @@ defp chunk_reply(conn, client, opts, sent_so_far, duration) do
duration,
Keyword.get(opts, :max_read_duration, @max_read_duration)
),
- {:ok, data} <- hackney().stream_body(client),
+ {:ok, data} <- client().stream_body(client),
{:ok, duration} <- increase_read_duration(duration),
sent_so_far = sent_so_far + byte_size(data),
:ok <- body_size_constraint(sent_so_far, Keyword.get(opts, :max_body_size)),
@@ -378,5 +378,5 @@ defp increase_read_duration(_) do
{:ok, :no_duration_limit, :no_duration_limit}
end
- defp hackney, do: Pleroma.Config.get(:hackney, :hackney)
+ defp client, do: Pleroma.ReverseProxy.Client
end
diff --git a/lib/pleroma/uploaders/swift/keystone.ex b/lib/pleroma/uploaders/swift/keystone.ex
deleted file mode 100644
index dd44c7561..000000000
--- a/lib/pleroma/uploaders/swift/keystone.ex
+++ /dev/null
@@ -1,51 +0,0 @@
-# Pleroma: A lightweight social networking server
-# Copyright © 2017-2019 Pleroma Authors
-# SPDX-License-Identifier: AGPL-3.0-only
-
-defmodule Pleroma.Uploaders.Swift.Keystone do
- use HTTPoison.Base
-
- def process_url(url) do
- Enum.join(
- [Pleroma.Config.get!([Pleroma.Uploaders.Swift, :auth_url]), url],
- "/"
- )
- end
-
- def process_response_body(body) do
- body
- |> Jason.decode!()
- end
-
- def get_token do
- settings = Pleroma.Config.get(Pleroma.Uploaders.Swift)
- username = Keyword.fetch!(settings, :username)
- password = Keyword.fetch!(settings, :password)
- tenant_id = Keyword.fetch!(settings, :tenant_id)
-
- case post(
- "/tokens",
- make_auth_body(username, password, tenant_id),
- ["Content-Type": "application/json"],
- hackney: [:insecure]
- ) do
- {:ok, %Tesla.Env{status: 200, body: body}} ->
- body["access"]["token"]["id"]
-
- {:ok, %Tesla.Env{status: _}} ->
- ""
- end
- end
-
- def make_auth_body(username, password, tenant) do
- Jason.encode!(%{
- :auth => %{
- :passwordCredentials => %{
- :username => username,
- :password => password
- },
- :tenantId => tenant
- }
- })
- end
-end
diff --git a/lib/pleroma/uploaders/swift/swift.ex b/lib/pleroma/uploaders/swift/swift.ex
deleted file mode 100644
index 2b0f2ad04..000000000
--- a/lib/pleroma/uploaders/swift/swift.ex
+++ /dev/null
@@ -1,29 +0,0 @@
-# Pleroma: A lightweight social networking server
-# Copyright © 2017-2019 Pleroma Authors
-# SPDX-License-Identifier: AGPL-3.0-only
-
-defmodule Pleroma.Uploaders.Swift.Client do
- use HTTPoison.Base
-
- def process_url(url) do
- Enum.join(
- [Pleroma.Config.get!([Pleroma.Uploaders.Swift, :storage_url]), url],
- "/"
- )
- end
-
- def upload_file(filename, body, content_type) do
- token = Pleroma.Uploaders.Swift.Keystone.get_token()
-
- case put("#{filename}", body, "X-Auth-Token": token, "Content-Type": content_type) do
- {:ok, %Tesla.Env{status: 201}} ->
- {:ok, {:file, filename}}
-
- {:ok, %Tesla.Env{status: 401}} ->
- {:error, "Unauthorized, Bad Token"}
-
- {:error, _} ->
- {:error, "Swift Upload Error"}
- end
- end
-end
diff --git a/lib/pleroma/uploaders/swift/uploader.ex b/lib/pleroma/uploaders/swift/uploader.ex
deleted file mode 100644
index d122b09e7..000000000
--- a/lib/pleroma/uploaders/swift/uploader.ex
+++ /dev/null
@@ -1,19 +0,0 @@
-# Pleroma: A lightweight social networking server
-# Copyright © 2017-2019 Pleroma Authors
-# SPDX-License-Identifier: AGPL-3.0-only
-
-defmodule Pleroma.Uploaders.Swift do
- @behaviour Pleroma.Uploaders.Uploader
-
- def get_file(name) do
- {:ok, {:url, Path.join([Pleroma.Config.get!([__MODULE__, :object_url]), name])}}
- end
-
- def put_file(upload) do
- Pleroma.Uploaders.Swift.Client.upload_file(
- upload.path,
- File.read!(upload.tmpfile),
- upload.content_type
- )
- end
-end
diff --git a/lib/pleroma/user.ex b/lib/pleroma/user.ex
index 09f86aaa2..d03810d1a 100644
--- a/lib/pleroma/user.ex
+++ b/lib/pleroma/user.ex
@@ -107,15 +107,25 @@ def ap_id(%User{nickname: nickname}) do
def ap_followers(%User{follower_address: fa}) when is_binary(fa), do: fa
def ap_followers(%User{} = user), do: "#{ap_id(user)}/followers"
- def user_info(%User{} = user) do
+ def user_info(%User{} = user, args \\ %{}) do
+ following_count =
+ if args[:following_count], do: args[:following_count], else: following_count(user)
+
+ follower_count =
+ if args[:follower_count], do: args[:follower_count], else: user.info.follower_count
+
%{
- following_count: following_count(user),
note_count: user.info.note_count,
- follower_count: user.info.follower_count,
locked: user.info.locked,
confirmation_pending: user.info.confirmation_pending,
default_scope: user.info.default_scope
}
+ |> Map.put(:following_count, following_count)
+ |> Map.put(:follower_count, follower_count)
+ end
+
+ def set_info_cache(user, args) do
+ Cachex.put(:user_cache, "user_info:#{user.id}", user_info(user, args))
end
def restrict_deactivated(query) do
@@ -1000,6 +1010,56 @@ def perform(:follow_import, %User{} = follower, followed_identifiers)
)
end
+ @spec sync_follow_counter() :: :ok
+ def sync_follow_counter,
+ do: PleromaJobQueue.enqueue(:background, __MODULE__, [:sync_follow_counters])
+
+ @spec perform(:sync_follow_counters) :: :ok
+ def perform(:sync_follow_counters) do
+ {:ok, _pid} = Agent.start_link(fn -> %{} end, name: :domain_errors)
+ config = Pleroma.Config.get([:instance, :external_user_synchronization])
+
+ :ok = sync_follow_counters(config)
+ Agent.stop(:domain_errors)
+ end
+
+ @spec sync_follow_counters(keyword()) :: :ok
+ def sync_follow_counters(opts \\ []) do
+ users = external_users(opts)
+
+ if length(users) > 0 do
+ errors = Agent.get(:domain_errors, fn state -> state end)
+ {last, updated_errors} = User.Synchronization.call(users, errors, opts)
+ Agent.update(:domain_errors, fn _state -> updated_errors end)
+ sync_follow_counters(max_id: last.id, limit: opts[:limit])
+ else
+ :ok
+ end
+ end
+
+ @spec external_users(keyword()) :: [User.t()]
+ def external_users(opts \\ []) do
+ query =
+ User.Query.build(%{
+ external: true,
+ active: true,
+ order_by: :id,
+ select: [:id, :ap_id, :info]
+ })
+
+ query =
+ if opts[:max_id],
+ do: where(query, [u], u.id > ^opts[:max_id]),
+ else: query
+
+ query =
+ if opts[:limit],
+ do: limit(query, ^opts[:limit]),
+ else: query
+
+ Repo.all(query)
+ end
+
def blocks_import(%User{} = blocker, blocked_identifiers) when is_list(blocked_identifiers),
do:
PleromaJobQueue.enqueue(:background, __MODULE__, [
diff --git a/lib/pleroma/user/query.ex b/lib/pleroma/user/query.ex
index ace9c05f2..f9bcc9e19 100644
--- a/lib/pleroma/user/query.ex
+++ b/lib/pleroma/user/query.ex
@@ -7,7 +7,7 @@ defmodule Pleroma.User.Query do
User query builder module. Builds query from new query or another user query.
## Example:
- query = Pleroma.User.Query(%{nickname: "nickname"})
+ query = Pleroma.User.Query.build(%{nickname: "nickname"})
another_query = Pleroma.User.Query.build(query, %{email: "email@example.com"})
Pleroma.Repo.all(query)
Pleroma.Repo.all(another_query)
@@ -47,7 +47,10 @@ defmodule Pleroma.User.Query do
friends: User.t(),
recipients_from_activity: [String.t()],
nickname: [String.t()],
- ap_id: [String.t()]
+ ap_id: [String.t()],
+ order_by: term(),
+ select: term(),
+ limit: pos_integer()
}
| %{}
@@ -141,6 +144,18 @@ defp compose_query({:recipients_from_activity, to}, query) do
where(query, [u], u.ap_id in ^to or fragment("? && ?", u.following, ^to))
end
+ defp compose_query({:order_by, key}, query) do
+ order_by(query, [u], field(u, ^key))
+ end
+
+ defp compose_query({:select, keys}, query) do
+ select(query, [u], ^keys)
+ end
+
+ defp compose_query({:limit, limit}, query) do
+ limit(query, ^limit)
+ end
+
defp compose_query(_unsupported_param, query), do: query
defp prepare_tag_criteria(tag, query) do
diff --git a/lib/pleroma/user/search.ex b/lib/pleroma/user/search.ex
index 7680c2afd..64eb6d2bc 100644
--- a/lib/pleroma/user/search.ex
+++ b/lib/pleroma/user/search.ex
@@ -150,7 +150,7 @@ defp boost_search_rank_query(query, for_user) do
@spec fts_search_subquery(User.t() | Ecto.Query.t(), String.t()) :: Ecto.Query.t()
defp fts_search_subquery(query, term) do
processed_query =
- term
+ String.trim_trailing(term, "@" <> local_domain())
|> String.replace(~r/\W+/, " ")
|> String.trim()
|> String.split()
@@ -192,6 +192,8 @@ defp fts_search_subquery(query, term) do
@spec trigram_search_subquery(User.t() | Ecto.Query.t(), String.t()) :: Ecto.Query.t()
defp trigram_search_subquery(query, term) do
+ term = String.trim_trailing(term, "@" <> local_domain())
+
from(
u in query,
select_merge: %{
@@ -209,4 +211,6 @@ defp trigram_search_subquery(query, term) do
)
|> User.restrict_deactivated()
end
+
+ defp local_domain, do: Pleroma.Config.get([Pleroma.Web.Endpoint, :url, :host])
end
diff --git a/lib/pleroma/user/synchronization.ex b/lib/pleroma/user/synchronization.ex
new file mode 100644
index 000000000..93660e08c
--- /dev/null
+++ b/lib/pleroma/user/synchronization.ex
@@ -0,0 +1,60 @@
+# Pleroma: A lightweight social networking server
+# Copyright © 2017-2018 Pleroma Authors
+# SPDX-License-Identifier: AGPL-3.0-only
+
+defmodule Pleroma.User.Synchronization do
+ alias Pleroma.HTTP
+ alias Pleroma.User
+
+ @spec call([User.t()], map(), keyword()) :: {User.t(), map()}
+ def call(users, errors, opts \\ []) do
+ do_call(users, errors, opts)
+ end
+
+ defp do_call([user | []], errors, opts) do
+ updated = fetch_counters(user, errors, opts)
+ {user, updated}
+ end
+
+ defp do_call([user | others], errors, opts) do
+ updated = fetch_counters(user, errors, opts)
+ do_call(others, updated, opts)
+ end
+
+ defp fetch_counters(user, errors, opts) do
+ %{host: host} = URI.parse(user.ap_id)
+
+ info = %{}
+ {following, errors} = fetch_counter(user.ap_id <> "/following", host, errors, opts)
+ info = if following, do: Map.put(info, :following_count, following), else: info
+
+ {followers, errors} = fetch_counter(user.ap_id <> "/followers", host, errors, opts)
+ info = if followers, do: Map.put(info, :follower_count, followers), else: info
+
+ User.set_info_cache(user, info)
+ errors
+ end
+
+ defp available_domain?(domain, errors, opts) do
+ max_retries = Keyword.get(opts, :max_retries, 3)
+ not (Map.has_key?(errors, domain) && errors[domain] >= max_retries)
+ end
+
+ defp fetch_counter(url, host, errors, opts) do
+ with true <- available_domain?(host, errors, opts),
+ {:ok, %{body: body, status: code}} when code in 200..299 <-
+ HTTP.get(
+ url,
+ [{:Accept, "application/activity+json"}]
+ ),
+ {:ok, data} <- Jason.decode(body) do
+ {data["totalItems"], errors}
+ else
+ false ->
+ {nil, errors}
+
+ _ ->
+ {nil, Map.update(errors, host, 1, &(&1 + 1))}
+ end
+ end
+end
diff --git a/lib/pleroma/user/synchronization_worker.ex b/lib/pleroma/user/synchronization_worker.ex
new file mode 100644
index 000000000..ba9cc3556
--- /dev/null
+++ b/lib/pleroma/user/synchronization_worker.ex
@@ -0,0 +1,32 @@
+# Pleroma: A lightweight social networking server
+# Copyright © 2017-2018 Pleroma Authors
+# SPDX-License-Identifier: AGPL-3.0-onl
+
+defmodule Pleroma.User.SynchronizationWorker do
+ use GenServer
+
+ def start_link do
+ config = Pleroma.Config.get([:instance, :external_user_synchronization])
+
+ if config[:enabled] do
+ GenServer.start_link(__MODULE__, interval: config[:interval])
+ else
+ :ignore
+ end
+ end
+
+ def init(opts) do
+ schedule_next(opts)
+ {:ok, opts}
+ end
+
+ def handle_info(:sync_follow_counters, opts) do
+ Pleroma.User.sync_follow_counter()
+ schedule_next(opts)
+ {:noreply, opts}
+ end
+
+ defp schedule_next(opts) do
+ Process.send_after(self(), :sync_follow_counters, opts[:interval])
+ end
+end
diff --git a/lib/pleroma/web/activity_pub/transmogrifier.ex b/lib/pleroma/web/activity_pub/transmogrifier.ex
index 3bb8b40b5..543d4bb7d 100644
--- a/lib/pleroma/web/activity_pub/transmogrifier.ex
+++ b/lib/pleroma/web/activity_pub/transmogrifier.ex
@@ -14,6 +14,7 @@ defmodule Pleroma.Web.ActivityPub.Transmogrifier do
alias Pleroma.Web.ActivityPub.ActivityPub
alias Pleroma.Web.ActivityPub.Utils
alias Pleroma.Web.ActivityPub.Visibility
+ alias Pleroma.Web.Federator
import Ecto.Query
@@ -22,20 +23,20 @@ defmodule Pleroma.Web.ActivityPub.Transmogrifier do
@doc """
Modifies an incoming AP object (mastodon format) to our internal format.
"""
- def fix_object(object) do
+ def fix_object(object, options \\ []) do
object
|> fix_actor
|> fix_url
|> fix_attachments
|> fix_context
- |> fix_in_reply_to
+ |> fix_in_reply_to(options)
|> fix_emoji
|> fix_tag
|> fix_content_map
|> fix_likes
|> fix_addressing
|> fix_summary
- |> fix_type
+ |> fix_type(options)
end
def fix_summary(%{"summary" => nil} = object) do
@@ -164,7 +165,9 @@ def fix_likes(object) do
object
end
- def fix_in_reply_to(%{"inReplyTo" => in_reply_to} = object)
+ def fix_in_reply_to(object, options \\ [])
+
+ def fix_in_reply_to(%{"inReplyTo" => in_reply_to} = object, options)
when not is_nil(in_reply_to) do
in_reply_to_id =
cond do
@@ -182,28 +185,34 @@ def fix_in_reply_to(%{"inReplyTo" => in_reply_to} = object)
""
end
- case get_obj_helper(in_reply_to_id) do
- {:ok, replied_object} ->
- with %Activity{} = _activity <-
- Activity.get_create_by_object_ap_id(replied_object.data["id"]) do
- object
- |> Map.put("inReplyTo", replied_object.data["id"])
- |> Map.put("inReplyToAtomUri", object["inReplyToAtomUri"] || in_reply_to_id)
- |> Map.put("conversation", replied_object.data["context"] || object["conversation"])
- |> Map.put("context", replied_object.data["context"] || object["conversation"])
- else
- e ->
- Logger.error("Couldn't fetch \"#{inspect(in_reply_to_id)}\", error: #{inspect(e)}")
- object
- end
+ object = Map.put(object, "inReplyToAtomUri", in_reply_to_id)
- e ->
- Logger.error("Couldn't fetch \"#{inspect(in_reply_to_id)}\", error: #{inspect(e)}")
- object
+ if Federator.allowed_incoming_reply_depth?(options[:depth]) do
+ case get_obj_helper(in_reply_to_id, options) do
+ {:ok, replied_object} ->
+ with %Activity{} = _activity <-
+ Activity.get_create_by_object_ap_id(replied_object.data["id"]) do
+ object
+ |> Map.put("inReplyTo", replied_object.data["id"])
+ |> Map.put("inReplyToAtomUri", object["inReplyToAtomUri"] || in_reply_to_id)
+ |> Map.put("conversation", replied_object.data["context"] || object["conversation"])
+ |> Map.put("context", replied_object.data["context"] || object["conversation"])
+ else
+ e ->
+ Logger.error("Couldn't fetch \"#{inspect(in_reply_to_id)}\", error: #{inspect(e)}")
+ object
+ end
+
+ e ->
+ Logger.error("Couldn't fetch \"#{inspect(in_reply_to_id)}\", error: #{inspect(e)}")
+ object
+ end
+ else
+ object
end
end
- def fix_in_reply_to(object), do: object
+ def fix_in_reply_to(object, _options), do: object
def fix_context(object) do
context = object["context"] || object["conversation"] || Utils.generate_context_id()
@@ -336,8 +345,13 @@ def fix_content_map(%{"contentMap" => content_map} = object) do
def fix_content_map(object), do: object
- def fix_type(%{"inReplyTo" => reply_id} = object) when is_binary(reply_id) do
- reply = Object.normalize(reply_id)
+ def fix_type(object, options \\ [])
+
+ def fix_type(%{"inReplyTo" => reply_id} = object, options) when is_binary(reply_id) do
+ reply =
+ if Federator.allowed_incoming_reply_depth?(options[:depth]) do
+ Object.normalize(reply_id, true)
+ end
if reply && (reply.data["type"] == "Question" and object["name"]) do
Map.put(object, "type", "Answer")
@@ -346,7 +360,7 @@ def fix_type(%{"inReplyTo" => reply_id} = object) when is_binary(reply_id) do
end
end
- def fix_type(object), do: object
+ def fix_type(object, _), do: object
defp mastodon_follow_hack(%{"id" => id, "actor" => follower_id}, followed) do
with true <- id =~ "follows",
@@ -374,9 +388,11 @@ defp get_follow_activity(follow_object, followed) do
end
end
+ def handle_incoming(data, options \\ [])
+
# Flag objects are placed ahead of the ID check because Mastodon 2.8 and earlier send them
# with nil ID.
- def handle_incoming(%{"type" => "Flag", "object" => objects, "actor" => actor} = data) do
+ def handle_incoming(%{"type" => "Flag", "object" => objects, "actor" => actor} = data, _options) do
with context <- data["context"] || Utils.generate_context_id(),
content <- data["content"] || "",
%User{} = actor <- User.get_cached_by_ap_id(actor),
@@ -409,15 +425,19 @@ def handle_incoming(%{"type" => "Flag", "object" => objects, "actor" => actor} =
end
# disallow objects with bogus IDs
- def handle_incoming(%{"id" => nil}), do: :error
- def handle_incoming(%{"id" => ""}), do: :error
+ def handle_incoming(%{"id" => nil}, _options), do: :error
+ def handle_incoming(%{"id" => ""}, _options), do: :error
# length of https:// = 8, should validate better, but good enough for now.
- def handle_incoming(%{"id" => id}) when not (is_binary(id) and length(id) > 8), do: :error
+ def handle_incoming(%{"id" => id}, _options) when not (is_binary(id) and length(id) > 8),
+ do: :error
# TODO: validate those with a Ecto scheme
# - tags
# - emoji
- def handle_incoming(%{"type" => "Create", "object" => %{"type" => objtype} = object} = data)
+ def handle_incoming(
+ %{"type" => "Create", "object" => %{"type" => objtype} = object} = data,
+ options
+ )
when objtype in ["Article", "Note", "Video", "Page", "Question", "Answer"] do
actor = Containment.get_actor(data)
@@ -427,7 +447,8 @@ def handle_incoming(%{"type" => "Create", "object" => %{"type" => objtype} = obj
with nil <- Activity.get_create_by_object_ap_id(object["id"]),
{:ok, %User{} = user} <- User.get_or_fetch_by_ap_id(data["actor"]) do
- object = fix_object(data["object"])
+ options = Keyword.put(options, :depth, (options[:depth] || 0) + 1)
+ object = fix_object(data["object"], options)
params = %{
to: data["to"],
@@ -452,7 +473,8 @@ def handle_incoming(%{"type" => "Create", "object" => %{"type" => objtype} = obj
end
def handle_incoming(
- %{"type" => "Follow", "object" => followed, "actor" => follower, "id" => id} = data
+ %{"type" => "Follow", "object" => followed, "actor" => follower, "id" => id} = data,
+ _options
) do
with %User{local: true} = followed <- User.get_cached_by_ap_id(followed),
{:ok, %User{} = follower} <- User.get_or_fetch_by_ap_id(follower),
@@ -503,7 +525,8 @@ def handle_incoming(
end
def handle_incoming(
- %{"type" => "Accept", "object" => follow_object, "actor" => _actor, "id" => _id} = data
+ %{"type" => "Accept", "object" => follow_object, "actor" => _actor, "id" => _id} = data,
+ _options
) do
with actor <- Containment.get_actor(data),
{:ok, %User{} = followed} <- User.get_or_fetch_by_ap_id(actor),
@@ -524,7 +547,8 @@ def handle_incoming(
end
def handle_incoming(
- %{"type" => "Reject", "object" => follow_object, "actor" => _actor, "id" => _id} = data
+ %{"type" => "Reject", "object" => follow_object, "actor" => _actor, "id" => _id} = data,
+ _options
) do
with actor <- Containment.get_actor(data),
{:ok, %User{} = followed} <- User.get_or_fetch_by_ap_id(actor),
@@ -548,7 +572,8 @@ def handle_incoming(
end
def handle_incoming(
- %{"type" => "Like", "object" => object_id, "actor" => _actor, "id" => id} = data
+ %{"type" => "Like", "object" => object_id, "actor" => _actor, "id" => id} = data,
+ _options
) do
with actor <- Containment.get_actor(data),
{:ok, %User{} = actor} <- User.get_or_fetch_by_ap_id(actor),
@@ -561,7 +586,8 @@ def handle_incoming(
end
def handle_incoming(
- %{"type" => "Announce", "object" => object_id, "actor" => _actor, "id" => id} = data
+ %{"type" => "Announce", "object" => object_id, "actor" => _actor, "id" => id} = data,
+ _options
) do
with actor <- Containment.get_actor(data),
{:ok, %User{} = actor} <- User.get_or_fetch_by_ap_id(actor),
@@ -576,7 +602,8 @@ def handle_incoming(
def handle_incoming(
%{"type" => "Update", "object" => %{"type" => object_type} = object, "actor" => actor_id} =
- data
+ data,
+ _options
)
when object_type in ["Person", "Application", "Service", "Organization"] do
with %User{ap_id: ^actor_id} = actor <- User.get_cached_by_ap_id(object["id"]) do
@@ -614,7 +641,8 @@ def handle_incoming(
# an error or a tombstone. This would allow us to verify that a deletion actually took
# place.
def handle_incoming(
- %{"type" => "Delete", "object" => object_id, "actor" => _actor, "id" => _id} = data
+ %{"type" => "Delete", "object" => object_id, "actor" => _actor, "id" => _id} = data,
+ _options
) do
object_id = Utils.get_ap_id(object_id)
@@ -635,7 +663,8 @@ def handle_incoming(
"object" => %{"type" => "Announce", "object" => object_id},
"actor" => _actor,
"id" => id
- } = data
+ } = data,
+ _options
) do
with actor <- Containment.get_actor(data),
{:ok, %User{} = actor} <- User.get_or_fetch_by_ap_id(actor),
@@ -653,7 +682,8 @@ def handle_incoming(
"object" => %{"type" => "Follow", "object" => followed},
"actor" => follower,
"id" => id
- } = _data
+ } = _data,
+ _options
) do
with %User{local: true} = followed <- User.get_cached_by_ap_id(followed),
{:ok, %User{} = follower} <- User.get_or_fetch_by_ap_id(follower),
@@ -671,7 +701,8 @@ def handle_incoming(
"object" => %{"type" => "Block", "object" => blocked},
"actor" => blocker,
"id" => id
- } = _data
+ } = _data,
+ _options
) do
with true <- Pleroma.Config.get([:activitypub, :accept_blocks]),
%User{local: true} = blocked <- User.get_cached_by_ap_id(blocked),
@@ -685,7 +716,8 @@ def handle_incoming(
end
def handle_incoming(
- %{"type" => "Block", "object" => blocked, "actor" => blocker, "id" => id} = _data
+ %{"type" => "Block", "object" => blocked, "actor" => blocker, "id" => id} = _data,
+ _options
) do
with true <- Pleroma.Config.get([:activitypub, :accept_blocks]),
%User{local: true} = blocked = User.get_cached_by_ap_id(blocked),
@@ -705,7 +737,8 @@ def handle_incoming(
"object" => %{"type" => "Like", "object" => object_id},
"actor" => _actor,
"id" => id
- } = data
+ } = data,
+ _options
) do
with actor <- Containment.get_actor(data),
{:ok, %User{} = actor} <- User.get_or_fetch_by_ap_id(actor),
@@ -717,10 +750,10 @@ def handle_incoming(
end
end
- def handle_incoming(_), do: :error
+ def handle_incoming(_, _), do: :error
- def get_obj_helper(id) do
- if object = Object.normalize(id), do: {:ok, object}, else: nil
+ def get_obj_helper(id, options \\ []) do
+ if object = Object.normalize(id, true, options), do: {:ok, object}, else: nil
end
def set_reply_to_uri(%{"inReplyTo" => in_reply_to} = object) when is_binary(in_reply_to) do
diff --git a/lib/pleroma/web/activity_pub/utils.ex b/lib/pleroma/web/activity_pub/utils.ex
index 514266cee..4288ea4c8 100644
--- a/lib/pleroma/web/activity_pub/utils.ex
+++ b/lib/pleroma/web/activity_pub/utils.ex
@@ -170,14 +170,17 @@ def create_context(context) do
Enqueues an activity for federation if it's local
"""
def maybe_federate(%Activity{local: true} = activity) do
- priority =
- case activity.data["type"] do
- "Delete" -> 10
- "Create" -> 1
- _ -> 5
- end
+ if Pleroma.Config.get!([:instance, :federating]) do
+ priority =
+ case activity.data["type"] do
+ "Delete" -> 10
+ "Create" -> 1
+ _ -> 5
+ end
+
+ Pleroma.Web.Federator.publish(activity, priority)
+ end
- Pleroma.Web.Federator.publish(activity, priority)
:ok
end
diff --git a/lib/pleroma/web/federator/federator.ex b/lib/pleroma/web/federator/federator.ex
index f4c9fe284..f4f9e83e0 100644
--- a/lib/pleroma/web/federator/federator.ex
+++ b/lib/pleroma/web/federator/federator.ex
@@ -22,6 +22,18 @@ def init do
refresh_subscriptions()
end
+ @doc "Addresses [memory leaks on recursive replies fetching](https://git.pleroma.social/pleroma/pleroma/issues/161)"
+ # credo:disable-for-previous-line Credo.Check.Readability.MaxLineLength
+ def allowed_incoming_reply_depth?(depth) do
+ max_replies_depth = Pleroma.Config.get([:instance, :federation_incoming_replies_max_depth])
+
+ if max_replies_depth do
+ (depth || 1) <= max_replies_depth
+ else
+ true
+ end
+ end
+
# Client API
def incoming_doc(doc) do
diff --git a/lib/pleroma/web/mastodon_api/views/status_view.ex b/lib/pleroma/web/mastodon_api/views/status_view.ex
index 6836d331a..ec582b919 100644
--- a/lib/pleroma/web/mastodon_api/views/status_view.ex
+++ b/lib/pleroma/web/mastodon_api/views/status_view.ex
@@ -104,7 +104,7 @@ def render(
id: to_string(activity.id),
uri: activity_object.data["id"],
url: activity_object.data["id"],
- account: AccountView.render("account.json", %{user: user}),
+ account: AccountView.render("account.json", %{user: user, for: opts[:for]}),
in_reply_to_id: nil,
in_reply_to_account_id: nil,
reblog: reblogged,
@@ -221,7 +221,7 @@ def render("status.json", %{activity: %{data: %{"object" => _object}} = activity
id: to_string(activity.id),
uri: object.data["id"],
url: url,
- account: AccountView.render("account.json", %{user: user}),
+ account: AccountView.render("account.json", %{user: user, for: opts[:for]}),
in_reply_to_id: reply_to && to_string(reply_to.id),
in_reply_to_account_id: reply_to_user && to_string(reply_to_user.id),
reblog: nil,
diff --git a/lib/pleroma/web/mastodon_api/websocket_handler.ex b/lib/pleroma/web/mastodon_api/websocket_handler.ex
index 3299e1721..dbd3542ea 100644
--- a/lib/pleroma/web/mastodon_api/websocket_handler.ex
+++ b/lib/pleroma/web/mastodon_api/websocket_handler.ex
@@ -29,9 +29,10 @@ defmodule Pleroma.Web.MastodonAPI.WebsocketHandler do
def init(%{qs: qs} = req, state) do
with params <- :cow_qs.parse_qs(qs),
+ sec_websocket <- :cowboy_req.header("sec-websocket-protocol", req, nil),
access_token <- List.keyfind(params, "access_token", 0),
{_, stream} <- List.keyfind(params, "stream", 0),
- {:ok, user} <- allow_request(stream, access_token),
+ {:ok, user} <- allow_request(stream, [access_token, sec_websocket]),
topic when is_binary(topic) <- expand_topic(stream, params) do
{:cowboy_websocket, req, %{user: user, topic: topic}, %{idle_timeout: @timeout}}
else
@@ -84,13 +85,21 @@ def terminate(reason, _req, state) do
end
# Public streams without authentication.
- defp allow_request(stream, nil) when stream in @anonymous_streams do
+ defp allow_request(stream, [nil, nil]) when stream in @anonymous_streams do
{:ok, nil}
end
# Authenticated streams.
- defp allow_request(stream, {"access_token", access_token}) when stream in @streams do
- with %Token{user_id: user_id} <- Repo.get_by(Token, token: access_token),
+ defp allow_request(stream, [access_token, sec_websocket]) when stream in @streams do
+ token =
+ with {"access_token", token} <- access_token do
+ token
+ else
+ _ -> sec_websocket
+ end
+
+ with true <- is_bitstring(token),
+ %Token{user_id: user_id} <- Repo.get_by(Token, token: token),
user = %User{} <- User.get_cached_by_id(user_id) do
{:ok, user}
else
diff --git a/lib/pleroma/web/media_proxy/media_proxy.ex b/lib/pleroma/web/media_proxy/media_proxy.ex
index cee6d8481..dd8888a02 100644
--- a/lib/pleroma/web/media_proxy/media_proxy.ex
+++ b/lib/pleroma/web/media_proxy/media_proxy.ex
@@ -33,20 +33,7 @@ defp whitelisted?(url) do
def encode_url(url) do
secret = Pleroma.Config.get([Pleroma.Web.Endpoint, :secret_key_base])
-
- # Must preserve `%2F` for compatibility with S3
- # https://git.pleroma.social/pleroma/pleroma/issues/580
- replacement = get_replacement(url, ":2F:")
-
- # The URL is url-decoded and encoded again to ensure it is correctly encoded and not twice.
- base64 =
- url
- |> String.replace("%2F", replacement)
- |> URI.decode()
- |> URI.encode()
- |> String.replace(replacement, "%2F")
- |> Base.url_encode64(@base64_opts)
-
+ base64 = Base.url_encode64(url, @base64_opts)
sig = :crypto.hmac(:sha, secret, base64)
sig64 = sig |> Base.url_encode64(@base64_opts)
@@ -80,12 +67,4 @@ def build_url(sig_base64, url_base64, filename \\ nil) do
|> Enum.filter(fn value -> value end)
|> Path.join()
end
-
- defp get_replacement(url, replacement) do
- if String.contains?(url, replacement) do
- get_replacement(url, replacement <> replacement)
- else
- replacement
- end
- end
end
diff --git a/lib/pleroma/web/metadata/opengraph.ex b/lib/pleroma/web/metadata/opengraph.ex
index 357b80a2d..4033ec38f 100644
--- a/lib/pleroma/web/metadata/opengraph.ex
+++ b/lib/pleroma/web/metadata/opengraph.ex
@@ -121,4 +121,6 @@ defp build_attachments(%{data: %{"attachment" => attachments}}) do
acc ++ rendered_tags
end)
end
+
+ defp build_attachments(_), do: []
end
diff --git a/lib/pleroma/web/metadata/twitter_card.ex b/lib/pleroma/web/metadata/twitter_card.ex
index 040b872e7..8dd01e0d5 100644
--- a/lib/pleroma/web/metadata/twitter_card.ex
+++ b/lib/pleroma/web/metadata/twitter_card.ex
@@ -117,6 +117,8 @@ defp build_attachments(id, %{data: %{"attachment" => attachments}}) do
end)
end
+ defp build_attachments(_id, _object), do: []
+
defp player_url(id) do
Pleroma.Web.Router.Helpers.o_status_url(Pleroma.Web.Endpoint, :notice_player, id)
end
diff --git a/lib/pleroma/web/ostatus/handlers/note_handler.ex b/lib/pleroma/web/ostatus/handlers/note_handler.ex
index ec6e5cfaf..8e0adad91 100644
--- a/lib/pleroma/web/ostatus/handlers/note_handler.ex
+++ b/lib/pleroma/web/ostatus/handlers/note_handler.ex
@@ -10,6 +10,7 @@ defmodule Pleroma.Web.OStatus.NoteHandler do
alias Pleroma.Web.ActivityPub.ActivityPub
alias Pleroma.Web.ActivityPub.Utils
alias Pleroma.Web.CommonAPI
+ alias Pleroma.Web.Federator
alias Pleroma.Web.OStatus
alias Pleroma.Web.XML
@@ -88,14 +89,15 @@ def add_external_url(note, entry) do
Map.put(note, "external_url", url)
end
- def fetch_replied_to_activity(entry, in_reply_to) do
+ def fetch_replied_to_activity(entry, in_reply_to, options \\ []) do
with %Activity{} = activity <- Activity.get_create_by_object_ap_id(in_reply_to) do
activity
else
_e ->
- with in_reply_to_href when not is_nil(in_reply_to_href) <-
+ with true <- Federator.allowed_incoming_reply_depth?(options[:depth]),
+ in_reply_to_href when not is_nil(in_reply_to_href) <-
XML.string_from_xpath("//thr:in-reply-to[1]/@href", entry),
- {:ok, [activity | _]} <- OStatus.fetch_activity_from_url(in_reply_to_href) do
+ {:ok, [activity | _]} <- OStatus.fetch_activity_from_url(in_reply_to_href, options) do
activity
else
_e -> nil
@@ -104,7 +106,7 @@ def fetch_replied_to_activity(entry, in_reply_to) do
end
# TODO: Clean this up a bit.
- def handle_note(entry, doc \\ nil) do
+ def handle_note(entry, doc \\ nil, options \\ []) do
with id <- XML.string_from_xpath("//id", entry),
activity when is_nil(activity) <- Activity.get_create_by_object_ap_id_with_object(id),
[author] <- :xmerl_xpath.string('//author[1]', doc),
@@ -112,7 +114,8 @@ def handle_note(entry, doc \\ nil) do
content_html <- OStatus.get_content(entry),
cw <- OStatus.get_cw(entry),
in_reply_to <- XML.string_from_xpath("//thr:in-reply-to[1]/@ref", entry),
- in_reply_to_activity <- fetch_replied_to_activity(entry, in_reply_to),
+ options <- Keyword.put(options, :depth, (options[:depth] || 0) + 1),
+ in_reply_to_activity <- fetch_replied_to_activity(entry, in_reply_to, options),
in_reply_to_object <-
(in_reply_to_activity && Object.normalize(in_reply_to_activity)) || nil,
in_reply_to <- (in_reply_to_object && in_reply_to_object.data["id"]) || in_reply_to,
diff --git a/lib/pleroma/web/ostatus/ostatus.ex b/lib/pleroma/web/ostatus/ostatus.ex
index 6ed089d84..502410c83 100644
--- a/lib/pleroma/web/ostatus/ostatus.ex
+++ b/lib/pleroma/web/ostatus/ostatus.ex
@@ -54,7 +54,7 @@ def remote_follow_path do
"#{Web.base_url()}/ostatus_subscribe?acct={uri}"
end
- def handle_incoming(xml_string) do
+ def handle_incoming(xml_string, options \\ []) do
with doc when doc != :error <- parse_document(xml_string) do
with {:ok, actor_user} <- find_make_or_update_user(doc),
do: Pleroma.Instances.set_reachable(actor_user.ap_id)
@@ -91,10 +91,12 @@ def handle_incoming(xml_string) do
_ ->
case object_type do
'http://activitystrea.ms/schema/1.0/note' ->
- with {:ok, activity} <- NoteHandler.handle_note(entry, doc), do: activity
+ with {:ok, activity} <- NoteHandler.handle_note(entry, doc, options),
+ do: activity
'http://activitystrea.ms/schema/1.0/comment' ->
- with {:ok, activity} <- NoteHandler.handle_note(entry, doc), do: activity
+ with {:ok, activity} <- NoteHandler.handle_note(entry, doc, options),
+ do: activity
_ ->
Logger.error("Couldn't parse incoming document")
@@ -359,7 +361,7 @@ def get_atom_url(body) do
end
end
- def fetch_activity_from_atom_url(url) do
+ def fetch_activity_from_atom_url(url, options \\ []) do
with true <- String.starts_with?(url, "http"),
{:ok, %{body: body, status: code}} when code in 200..299 <-
HTTP.get(
@@ -367,7 +369,7 @@ def fetch_activity_from_atom_url(url) do
[{:Accept, "application/atom+xml"}]
) do
Logger.debug("Got document from #{url}, handling...")
- handle_incoming(body)
+ handle_incoming(body, options)
else
e ->
Logger.debug("Couldn't get #{url}: #{inspect(e)}")
@@ -375,13 +377,13 @@ def fetch_activity_from_atom_url(url) do
end
end
- def fetch_activity_from_html_url(url) do
+ def fetch_activity_from_html_url(url, options \\ []) do
Logger.debug("Trying to fetch #{url}")
with true <- String.starts_with?(url, "http"),
{:ok, %{body: body}} <- HTTP.get(url, []),
{:ok, atom_url} <- get_atom_url(body) do
- fetch_activity_from_atom_url(atom_url)
+ fetch_activity_from_atom_url(atom_url, options)
else
e ->
Logger.debug("Couldn't get #{url}: #{inspect(e)}")
@@ -389,11 +391,11 @@ def fetch_activity_from_html_url(url) do
end
end
- def fetch_activity_from_url(url) do
- with {:ok, [_ | _] = activities} <- fetch_activity_from_atom_url(url) do
+ def fetch_activity_from_url(url, options \\ []) do
+ with {:ok, [_ | _] = activities} <- fetch_activity_from_atom_url(url, options) do
{:ok, activities}
else
- _e -> fetch_activity_from_html_url(url)
+ _e -> fetch_activity_from_html_url(url, options)
end
rescue
e ->
diff --git a/lib/pleroma/web/router.ex b/lib/pleroma/web/router.ex
index 055289dc5..ff9ed1640 100644
--- a/lib/pleroma/web/router.ex
+++ b/lib/pleroma/web/router.ex
@@ -724,6 +724,7 @@ defmodule Pleroma.Web.Router do
defmodule Fallback.RedirectController do
use Pleroma.Web, :controller
+ require Logger
alias Pleroma.User
alias Pleroma.Web.Metadata
@@ -750,7 +751,20 @@ def redirector_with_meta(conn, %{"maybe_nickname_or_id" => maybe_nickname_or_id}
def redirector_with_meta(conn, params) do
{:ok, index_content} = File.read(index_file_path())
- tags = Metadata.build_tags(params)
+
+ tags =
+ try do
+ Metadata.build_tags(params)
+ rescue
+ e ->
+ Logger.error(
+ "Metadata rendering for #{conn.request_path} failed.\n" <>
+ Exception.format(:error, e, __STACKTRACE__)
+ )
+
+ ""
+ end
+
response = String.replace(index_content, "", tags)
conn
diff --git a/mix.exs b/mix.exs
index 22d3d50df..8f64562ef 100644
--- a/mix.exs
+++ b/mix.exs
@@ -109,7 +109,6 @@ defp deps do
{:phoenix_html, "~> 2.10"},
{:calendar, "~> 0.17.4"},
{:cachex, "~> 3.0.2"},
- {:httpoison, "~> 1.2.0"},
{:poison, "~> 3.0", override: true},
{:tesla, "~> 1.2"},
{:jason, "~> 1.0"},
@@ -151,7 +150,8 @@ defp deps do
{:esshd, "~> 0.1.0", runtime: Application.get_env(:esshd, :enabled, false)},
{:ex_rated, "~> 1.3"},
{:plug_static_index_html, "~> 1.0.0"},
- {:excoveralls, "~> 0.11.1", only: :test}
+ {:excoveralls, "~> 0.11.1", only: :test},
+ {:mox, "~> 0.5", only: :test}
] ++ oauth_deps()
end
diff --git a/mix.lock b/mix.lock
index cae8d7d84..e711be635 100644
--- a/mix.lock
+++ b/mix.lock
@@ -52,6 +52,7 @@
"mochiweb": {:hex, :mochiweb, "2.15.0", "e1daac474df07651e5d17cc1e642c4069c7850dc4508d3db7263a0651330aacc", [:rebar3], [], "hexpm"},
"mock": {:hex, :mock, "0.3.3", "42a433794b1291a9cf1525c6d26b38e039e0d3a360732b5e467bfc77ef26c914", [:mix], [{:meck, "~> 0.8.13", [hex: :meck, repo: "hexpm", optional: false]}], "hexpm"},
"mogrify": {:hex, :mogrify, "0.6.1", "de1b527514f2d95a7bbe9642eb556061afb337e220cf97adbf3a4e6438ed70af", [:mix], [], "hexpm"},
+ "mox": {:hex, :mox, "0.5.1", "f86bb36026aac1e6f924a4b6d024b05e9adbed5c63e8daa069bd66fb3292165b", [:mix], [], "hexpm"},
"nimble_parsec": {:hex, :nimble_parsec, "0.5.0", "90e2eca3d0266e5c53f8fbe0079694740b9c91b6747f2b7e3c5d21966bba8300", [:mix], [], "hexpm"},
"parse_trans": {:hex, :parse_trans, "3.3.0", "09765507a3c7590a784615cfd421d101aec25098d50b89d7aa1d66646bc571c1", [:rebar3], [], "hexpm"},
"pbkdf2_elixir": {:hex, :pbkdf2_elixir, "0.12.3", "6706a148809a29c306062862c803406e88f048277f6e85b68faf73291e820b84", [:mix], [], "hexpm"},
@@ -65,14 +66,12 @@
"plug_crypto": {:hex, :plug_crypto, "1.0.0", "18e49317d3fa343f24620ed22795ec29d4a5e602d52d1513ccea0b07d8ea7d4d", [:mix], [], "hexpm"},
"plug_static_index_html": {:hex, :plug_static_index_html, "1.0.0", "840123d4d3975585133485ea86af73cb2600afd7f2a976f9f5fd8b3808e636a0", [:mix], [{:plug, "~> 1.0", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm"},
"poison": {:hex, :poison, "3.1.0", "d9eb636610e096f86f25d9a46f35a9facac35609a7591b3be3326e99a0484665", [:mix], [], "hexpm"},
- "poolboy": {:hex, :poolboy, "1.5.2", "392b007a1693a64540cead79830443abf5762f5d30cf50bc95cb2c1aaafa006b", [:rebar3], [], "hexpm"},
"postgrex": {:hex, :postgrex, "0.14.3", "5754dee2fdf6e9e508cbf49ab138df964278700b764177e8f3871e658b345a1e", [:mix], [{:connection, "~> 1.0", [hex: :connection, repo: "hexpm", optional: false]}, {:db_connection, "~> 2.0", [hex: :db_connection, repo: "hexpm", optional: false]}, {:decimal, "~> 1.5", [hex: :decimal, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}], "hexpm"},
"prometheus": {:hex, :prometheus, "4.2.2", "a830e77b79dc6d28183f4db050a7cac926a6c58f1872f9ef94a35cd989aceef8", [:mix, :rebar3], [], "hexpm"},
"prometheus_ecto": {:hex, :prometheus_ecto, "1.4.1", "6c768ea9654de871e5b32fab2eac348467b3021604ebebbcbd8bcbe806a65ed5", [:mix], [{:ecto, "~> 2.0 or ~> 3.0", [hex: :ecto, repo: "hexpm", optional: false]}, {:prometheus_ex, "~> 1.1 or ~> 2.0 or ~> 3.0", [hex: :prometheus_ex, repo: "hexpm", optional: false]}], "hexpm"},
"prometheus_ex": {:hex, :prometheus_ex, "3.0.5", "fa58cfd983487fc5ead331e9a3e0aa622c67232b3ec71710ced122c4c453a02f", [:mix], [{:prometheus, "~> 4.0", [hex: :prometheus, repo: "hexpm", optional: false]}], "hexpm"},
"prometheus_phoenix": {:hex, :prometheus_phoenix, "1.2.1", "964a74dfbc055f781d3a75631e06ce3816a2913976d1df7830283aa3118a797a", [:mix], [{:phoenix, "~> 1.3", [hex: :phoenix, repo: "hexpm", optional: false]}, {:prometheus_ex, "~> 1.3 or ~> 2.0 or ~> 3.0", [hex: :prometheus_ex, repo: "hexpm", optional: false]}], "hexpm"},
"prometheus_plugs": {:hex, :prometheus_plugs, "1.1.5", "25933d48f8af3a5941dd7b621c889749894d8a1082a6ff7c67cc99dec26377c5", [:mix], [{:accept, "~> 0.1", [hex: :accept, repo: "hexpm", optional: false]}, {:plug, "~> 1.0", [hex: :plug, repo: "hexpm", optional: false]}, {:prometheus_ex, "~> 1.1 or ~> 2.0 or ~> 3.0", [hex: :prometheus_ex, repo: "hexpm", optional: false]}, {:prometheus_process_collector, "~> 1.1", [hex: :prometheus_process_collector, repo: "hexpm", optional: true]}], "hexpm"},
- "prometheus_process_collector": {:hex, :prometheus_process_collector, "1.4.0", "6dbd39e3165b9ef1c94a7a820e9ffe08479f949dcdd431ed4aaea7b250eebfde", [:rebar3], [{:prometheus, "~> 4.0", [hex: :prometheus, repo: "hexpm", optional: false]}], "hexpm"},
"quack": {:hex, :quack, "0.1.1", "cca7b4da1a233757fdb44b3334fce80c94785b3ad5a602053b7a002b5a8967bf", [:mix], [{:poison, ">= 1.0.0", [hex: :poison, repo: "hexpm", optional: false]}, {:tesla, "~> 1.2.0", [hex: :tesla, repo: "hexpm", optional: false]}], "hexpm"},
"ranch": {:hex, :ranch, "1.7.1", "6b1fab51b49196860b733a49c07604465a47bdb78aa10c1c16a3d199f7f8c881", [:rebar3], [], "hexpm"},
"recon": {:git, "https://github.com/ferd/recon.git", "75d70c7c08926d2f24f1ee6de14ee50fe8a52763", [tag: "2.4.0"]},
diff --git a/priv/templates/sample_config.eex b/priv/templates/sample_config.eex
index 2d4a49328..5cc31c604 100644
--- a/priv/templates/sample_config.eex
+++ b/priv/templates/sample_config.eex
@@ -67,20 +67,3 @@ config :pleroma, Pleroma.Uploaders.Local, uploads: "<%= uploads_dir %>"
# For using third-party S3 clones like wasabi, also do:
# config :ex_aws, :s3,
# host: "s3.wasabisys.com"
-
-
-# Configure Openstack Swift support if desired.
-#
-# Many openstack deployments are different, so config is left very open with
-# no assumptions made on which provider you're using. This should allow very
-# wide support without needing separate handlers for OVH, Rackspace, etc.
-#
-# config :pleroma, Pleroma.Uploaders.Swift,
-# container: "some-container",
-# username: "api-username-yyyy",
-# password: "api-key-xxxx",
-# tenant_id: "",
-# auth_url: "https://keystone-endpoint.provider.com",
-# storage_url: "https://swift-endpoint.prodider.com/v1/AUTH_/",
-# object_url: "https://cdn-endpoint.provider.com/"
-#
diff --git a/test/conversation_test.exs b/test/conversation_test.exs
index 5903d10ff..aa193e0d4 100644
--- a/test/conversation_test.exs
+++ b/test/conversation_test.exs
@@ -11,6 +11,16 @@ defmodule Pleroma.ConversationTest do
import Pleroma.Factory
+ setup_all do
+ config_path = [:instance, :federating]
+ initial_setting = Pleroma.Config.get(config_path)
+
+ Pleroma.Config.put(config_path, true)
+ on_exit(fn -> Pleroma.Config.put(config_path, initial_setting) end)
+
+ :ok
+ end
+
test "it goes through old direct conversations" do
user = insert(:user)
other_user = insert(:user)
diff --git a/test/fixtures/httpoison_mock/7369654.atom b/test/fixtures/tesla_mock/7369654.atom
similarity index 100%
rename from test/fixtures/httpoison_mock/7369654.atom
rename to test/fixtures/tesla_mock/7369654.atom
diff --git a/test/fixtures/httpoison_mock/7369654.html b/test/fixtures/tesla_mock/7369654.html
similarity index 100%
rename from test/fixtures/httpoison_mock/7369654.html
rename to test/fixtures/tesla_mock/7369654.html
diff --git a/test/fixtures/httpoison_mock/7even.json b/test/fixtures/tesla_mock/7even.json
similarity index 100%
rename from test/fixtures/httpoison_mock/7even.json
rename to test/fixtures/tesla_mock/7even.json
diff --git a/test/fixtures/httpoison_mock/admin@mastdon.example.org.json b/test/fixtures/tesla_mock/admin@mastdon.example.org.json
similarity index 100%
rename from test/fixtures/httpoison_mock/admin@mastdon.example.org.json
rename to test/fixtures/tesla_mock/admin@mastdon.example.org.json
diff --git a/test/fixtures/httpoison_mock/atarifrosch_feed.xml b/test/fixtures/tesla_mock/atarifrosch_feed.xml
similarity index 100%
rename from test/fixtures/httpoison_mock/atarifrosch_feed.xml
rename to test/fixtures/tesla_mock/atarifrosch_feed.xml
diff --git a/test/fixtures/httpoison_mock/atarifrosch_webfinger.xml b/test/fixtures/tesla_mock/atarifrosch_webfinger.xml
similarity index 100%
rename from test/fixtures/httpoison_mock/atarifrosch_webfinger.xml
rename to test/fixtures/tesla_mock/atarifrosch_webfinger.xml
diff --git a/test/fixtures/httpoison_mock/baptiste.gelex.xyz-article.json b/test/fixtures/tesla_mock/baptiste.gelex.xyz-article.json
similarity index 100%
rename from test/fixtures/httpoison_mock/baptiste.gelex.xyz-article.json
rename to test/fixtures/tesla_mock/baptiste.gelex.xyz-article.json
diff --git a/test/fixtures/httpoison_mock/baptiste.gelex.xyz-user.json b/test/fixtures/tesla_mock/baptiste.gelex.xyz-user.json
similarity index 100%
rename from test/fixtures/httpoison_mock/baptiste.gelex.xyz-user.json
rename to test/fixtures/tesla_mock/baptiste.gelex.xyz-user.json
diff --git a/test/fixtures/httpoison_mock/eal_sakamoto.xml b/test/fixtures/tesla_mock/eal_sakamoto.xml
similarity index 100%
rename from test/fixtures/httpoison_mock/eal_sakamoto.xml
rename to test/fixtures/tesla_mock/eal_sakamoto.xml
diff --git a/test/fixtures/httpoison_mock/emelie.atom b/test/fixtures/tesla_mock/emelie.atom
similarity index 100%
rename from test/fixtures/httpoison_mock/emelie.atom
rename to test/fixtures/tesla_mock/emelie.atom
diff --git a/test/fixtures/httpoison_mock/emelie.json b/test/fixtures/tesla_mock/emelie.json
similarity index 100%
rename from test/fixtures/httpoison_mock/emelie.json
rename to test/fixtures/tesla_mock/emelie.json
diff --git a/test/fixtures/httpoison_mock/framasoft@framatube.org.json b/test/fixtures/tesla_mock/framasoft@framatube.org.json
similarity index 100%
rename from test/fixtures/httpoison_mock/framasoft@framatube.org.json
rename to test/fixtures/tesla_mock/framasoft@framatube.org.json
diff --git a/test/fixtures/httpoison_mock/framatube.org_host_meta b/test/fixtures/tesla_mock/framatube.org_host_meta
similarity index 100%
rename from test/fixtures/httpoison_mock/framatube.org_host_meta
rename to test/fixtures/tesla_mock/framatube.org_host_meta
diff --git a/test/fixtures/httpoison_mock/gerzilla.de_host_meta b/test/fixtures/tesla_mock/gerzilla.de_host_meta
similarity index 100%
rename from test/fixtures/httpoison_mock/gerzilla.de_host_meta
rename to test/fixtures/tesla_mock/gerzilla.de_host_meta
diff --git a/test/fixtures/httpoison_mock/gnusocial.de_host_meta b/test/fixtures/tesla_mock/gnusocial.de_host_meta
similarity index 100%
rename from test/fixtures/httpoison_mock/gnusocial.de_host_meta
rename to test/fixtures/tesla_mock/gnusocial.de_host_meta
diff --git a/test/fixtures/httpoison_mock/gs.example.org_host_meta b/test/fixtures/tesla_mock/gs.example.org_host_meta
similarity index 100%
rename from test/fixtures/httpoison_mock/gs.example.org_host_meta
rename to test/fixtures/tesla_mock/gs.example.org_host_meta
diff --git a/test/fixtures/httpoison_mock/hellpie.json b/test/fixtures/tesla_mock/hellpie.json
similarity index 100%
rename from test/fixtures/httpoison_mock/hellpie.json
rename to test/fixtures/tesla_mock/hellpie.json
diff --git a/test/fixtures/httpoison_mock/http___gs.example.org_4040_index.php_user_1.xml b/test/fixtures/tesla_mock/http___gs.example.org_4040_index.php_user_1.xml
similarity index 100%
rename from test/fixtures/httpoison_mock/http___gs.example.org_4040_index.php_user_1.xml
rename to test/fixtures/tesla_mock/http___gs.example.org_4040_index.php_user_1.xml
diff --git a/test/fixtures/httpoison_mock/http___mastodon.example.org_users_admin_status_1234.json b/test/fixtures/tesla_mock/http___mastodon.example.org_users_admin_status_1234.json
similarity index 100%
rename from test/fixtures/httpoison_mock/http___mastodon.example.org_users_admin_status_1234.json
rename to test/fixtures/tesla_mock/http___mastodon.example.org_users_admin_status_1234.json
diff --git a/test/fixtures/httpoison_mock/http__gs.example.org_index.php_api_statuses_user_timeline_1.atom.xml b/test/fixtures/tesla_mock/http__gs.example.org_index.php_api_statuses_user_timeline_1.atom.xml
similarity index 100%
rename from test/fixtures/httpoison_mock/http__gs.example.org_index.php_api_statuses_user_timeline_1.atom.xml
rename to test/fixtures/tesla_mock/http__gs.example.org_index.php_api_statuses_user_timeline_1.atom.xml
diff --git a/test/fixtures/httpoison_mock/https___info.pleroma.site_actor.json b/test/fixtures/tesla_mock/https___info.pleroma.site_actor.json
similarity index 100%
rename from test/fixtures/httpoison_mock/https___info.pleroma.site_actor.json
rename to test/fixtures/tesla_mock/https___info.pleroma.site_actor.json
diff --git a/test/fixtures/httpoison_mock/https___mamot.fr_users_Skruyb.atom b/test/fixtures/tesla_mock/https___mamot.fr_users_Skruyb.atom
similarity index 100%
rename from test/fixtures/httpoison_mock/https___mamot.fr_users_Skruyb.atom
rename to test/fixtures/tesla_mock/https___mamot.fr_users_Skruyb.atom
diff --git a/test/fixtures/httpoison_mock/https___mastodon.social_users_lambadalambda.atom b/test/fixtures/tesla_mock/https___mastodon.social_users_lambadalambda.atom
similarity index 100%
rename from test/fixtures/httpoison_mock/https___mastodon.social_users_lambadalambda.atom
rename to test/fixtures/tesla_mock/https___mastodon.social_users_lambadalambda.atom
diff --git a/test/fixtures/httpoison_mock/https___mastodon.social_users_lambadalambda.xml b/test/fixtures/tesla_mock/https___mastodon.social_users_lambadalambda.xml
similarity index 100%
rename from test/fixtures/httpoison_mock/https___mastodon.social_users_lambadalambda.xml
rename to test/fixtures/tesla_mock/https___mastodon.social_users_lambadalambda.xml
diff --git a/test/fixtures/httpoison_mock/https___osada.macgirvin.com_channel_mike.json b/test/fixtures/tesla_mock/https___osada.macgirvin.com_channel_mike.json
similarity index 100%
rename from test/fixtures/httpoison_mock/https___osada.macgirvin.com_channel_mike.json
rename to test/fixtures/tesla_mock/https___osada.macgirvin.com_channel_mike.json
diff --git a/test/fixtures/httpoison_mock/https___pawoo.net_users_aqidaqidaqid.xml b/test/fixtures/tesla_mock/https___pawoo.net_users_aqidaqidaqid.xml
similarity index 100%
rename from test/fixtures/httpoison_mock/https___pawoo.net_users_aqidaqidaqid.xml
rename to test/fixtures/tesla_mock/https___pawoo.net_users_aqidaqidaqid.xml
diff --git a/test/fixtures/httpoison_mock/https___pawoo.net_users_pekorino.atom b/test/fixtures/tesla_mock/https___pawoo.net_users_pekorino.atom
similarity index 100%
rename from test/fixtures/httpoison_mock/https___pawoo.net_users_pekorino.atom
rename to test/fixtures/tesla_mock/https___pawoo.net_users_pekorino.atom
diff --git a/test/fixtures/httpoison_mock/https___pawoo.net_users_pekorino.xml b/test/fixtures/tesla_mock/https___pawoo.net_users_pekorino.xml
similarity index 100%
rename from test/fixtures/httpoison_mock/https___pawoo.net_users_pekorino.xml
rename to test/fixtures/tesla_mock/https___pawoo.net_users_pekorino.xml
diff --git a/test/fixtures/httpoison_mock/https___pleroma.soykaf.com_users_lain.xml b/test/fixtures/tesla_mock/https___pleroma.soykaf.com_users_lain.xml
similarity index 100%
rename from test/fixtures/httpoison_mock/https___pleroma.soykaf.com_users_lain.xml
rename to test/fixtures/tesla_mock/https___pleroma.soykaf.com_users_lain.xml
diff --git a/test/fixtures/httpoison_mock/https___pleroma.soykaf.com_users_lain_feed.atom.xml b/test/fixtures/tesla_mock/https___pleroma.soykaf.com_users_lain_feed.atom.xml
similarity index 100%
rename from test/fixtures/httpoison_mock/https___pleroma.soykaf.com_users_lain_feed.atom.xml
rename to test/fixtures/tesla_mock/https___pleroma.soykaf.com_users_lain_feed.atom.xml
diff --git a/test/fixtures/httpoison_mock/https___prismo.news__mxb.json b/test/fixtures/tesla_mock/https___prismo.news__mxb.json
similarity index 100%
rename from test/fixtures/httpoison_mock/https___prismo.news__mxb.json
rename to test/fixtures/tesla_mock/https___prismo.news__mxb.json
diff --git a/test/fixtures/httpoison_mock/https___shitposter.club_api_statuses_show_2827873.atom.xml b/test/fixtures/tesla_mock/https___shitposter.club_api_statuses_show_2827873.atom.xml
similarity index 100%
rename from test/fixtures/httpoison_mock/https___shitposter.club_api_statuses_show_2827873.atom.xml
rename to test/fixtures/tesla_mock/https___shitposter.club_api_statuses_show_2827873.atom.xml
diff --git a/test/fixtures/httpoison_mock/https___shitposter.club_api_statuses_user_timeline_1.atom.xml b/test/fixtures/tesla_mock/https___shitposter.club_api_statuses_user_timeline_1.atom.xml
similarity index 100%
rename from test/fixtures/httpoison_mock/https___shitposter.club_api_statuses_user_timeline_1.atom.xml
rename to test/fixtures/tesla_mock/https___shitposter.club_api_statuses_user_timeline_1.atom.xml
diff --git a/test/fixtures/httpoison_mock/https___shitposter.club_notice_2827873.html b/test/fixtures/tesla_mock/https___shitposter.club_notice_2827873.html
similarity index 100%
rename from test/fixtures/httpoison_mock/https___shitposter.club_notice_2827873.html
rename to test/fixtures/tesla_mock/https___shitposter.club_notice_2827873.html
diff --git a/test/fixtures/httpoison_mock/https___shitposter.club_user_1.xml b/test/fixtures/tesla_mock/https___shitposter.club_user_1.xml
similarity index 100%
rename from test/fixtures/httpoison_mock/https___shitposter.club_user_1.xml
rename to test/fixtures/tesla_mock/https___shitposter.club_user_1.xml
diff --git a/test/fixtures/httpoison_mock/https___social.heldscal.la_api_statuses_user_timeline_23211.atom.xml b/test/fixtures/tesla_mock/https___social.heldscal.la_api_statuses_user_timeline_23211.atom.xml
similarity index 100%
rename from test/fixtures/httpoison_mock/https___social.heldscal.la_api_statuses_user_timeline_23211.atom.xml
rename to test/fixtures/tesla_mock/https___social.heldscal.la_api_statuses_user_timeline_23211.atom.xml
diff --git a/test/fixtures/httpoison_mock/https___social.heldscal.la_api_statuses_user_timeline_29191.atom.xml b/test/fixtures/tesla_mock/https___social.heldscal.la_api_statuses_user_timeline_29191.atom.xml
similarity index 100%
rename from test/fixtures/httpoison_mock/https___social.heldscal.la_api_statuses_user_timeline_29191.atom.xml
rename to test/fixtures/tesla_mock/https___social.heldscal.la_api_statuses_user_timeline_29191.atom.xml
diff --git a/test/fixtures/httpoison_mock/https___social.heldscal.la_user_23211.xml b/test/fixtures/tesla_mock/https___social.heldscal.la_user_23211.xml
similarity index 100%
rename from test/fixtures/httpoison_mock/https___social.heldscal.la_user_23211.xml
rename to test/fixtures/tesla_mock/https___social.heldscal.la_user_23211.xml
diff --git a/test/fixtures/httpoison_mock/https___social.heldscal.la_user_29191.xml b/test/fixtures/tesla_mock/https___social.heldscal.la_user_29191.xml
similarity index 100%
rename from test/fixtures/httpoison_mock/https___social.heldscal.la_user_29191.xml
rename to test/fixtures/tesla_mock/https___social.heldscal.la_user_29191.xml
diff --git a/test/fixtures/httpoison_mock/https__info.pleroma.site_activity.json b/test/fixtures/tesla_mock/https__info.pleroma.site_activity.json
similarity index 100%
rename from test/fixtures/httpoison_mock/https__info.pleroma.site_activity.json
rename to test/fixtures/tesla_mock/https__info.pleroma.site_activity.json
diff --git a/test/fixtures/httpoison_mock/https__info.pleroma.site_activity2.json b/test/fixtures/tesla_mock/https__info.pleroma.site_activity2.json
similarity index 100%
rename from test/fixtures/httpoison_mock/https__info.pleroma.site_activity2.json
rename to test/fixtures/tesla_mock/https__info.pleroma.site_activity2.json
diff --git a/test/fixtures/httpoison_mock/https__info.pleroma.site_activity3.json b/test/fixtures/tesla_mock/https__info.pleroma.site_activity3.json
similarity index 100%
rename from test/fixtures/httpoison_mock/https__info.pleroma.site_activity3.json
rename to test/fixtures/tesla_mock/https__info.pleroma.site_activity3.json
diff --git a/test/fixtures/httpoison_mock/https__info.pleroma.site_activity4.json b/test/fixtures/tesla_mock/https__info.pleroma.site_activity4.json
similarity index 100%
rename from test/fixtures/httpoison_mock/https__info.pleroma.site_activity4.json
rename to test/fixtures/tesla_mock/https__info.pleroma.site_activity4.json
diff --git a/test/fixtures/httpoison_mock/kaniini@gerzilla.de.json b/test/fixtures/tesla_mock/kaniini@gerzilla.de.json
similarity index 100%
rename from test/fixtures/httpoison_mock/kaniini@gerzilla.de.json
rename to test/fixtures/tesla_mock/kaniini@gerzilla.de.json
diff --git a/test/fixtures/httpoison_mock/kaniini@hubzilla.example.org.json b/test/fixtures/tesla_mock/kaniini@hubzilla.example.org.json
similarity index 100%
rename from test/fixtures/httpoison_mock/kaniini@hubzilla.example.org.json
rename to test/fixtures/tesla_mock/kaniini@hubzilla.example.org.json
diff --git a/test/fixtures/httpoison_mock/lain_squeet.me_webfinger.xml b/test/fixtures/tesla_mock/lain_squeet.me_webfinger.xml
similarity index 100%
rename from test/fixtures/httpoison_mock/lain_squeet.me_webfinger.xml
rename to test/fixtures/tesla_mock/lain_squeet.me_webfinger.xml
diff --git a/test/fixtures/httpoison_mock/lucifermysticus.json b/test/fixtures/tesla_mock/lucifermysticus.json
similarity index 100%
rename from test/fixtures/httpoison_mock/lucifermysticus.json
rename to test/fixtures/tesla_mock/lucifermysticus.json
diff --git a/test/fixtures/httpoison_mock/macgirvin.com_host_meta b/test/fixtures/tesla_mock/macgirvin.com_host_meta
similarity index 100%
rename from test/fixtures/httpoison_mock/macgirvin.com_host_meta
rename to test/fixtures/tesla_mock/macgirvin.com_host_meta
diff --git a/test/fixtures/httpoison_mock/mamot.fr_host_meta b/test/fixtures/tesla_mock/mamot.fr_host_meta
similarity index 100%
rename from test/fixtures/httpoison_mock/mamot.fr_host_meta
rename to test/fixtures/tesla_mock/mamot.fr_host_meta
diff --git a/test/fixtures/httpoison_mock/mastodon.social_host_meta b/test/fixtures/tesla_mock/mastodon.social_host_meta
similarity index 100%
rename from test/fixtures/httpoison_mock/mastodon.social_host_meta
rename to test/fixtures/tesla_mock/mastodon.social_host_meta
diff --git a/test/fixtures/httpoison_mock/mastodon.xyz_host_meta b/test/fixtures/tesla_mock/mastodon.xyz_host_meta
similarity index 100%
rename from test/fixtures/httpoison_mock/mastodon.xyz_host_meta
rename to test/fixtures/tesla_mock/mastodon.xyz_host_meta
diff --git a/test/fixtures/httpoison_mock/mayumayu.json b/test/fixtures/tesla_mock/mayumayu.json
similarity index 100%
rename from test/fixtures/httpoison_mock/mayumayu.json
rename to test/fixtures/tesla_mock/mayumayu.json
diff --git a/test/fixtures/httpoison_mock/mayumayupost.json b/test/fixtures/tesla_mock/mayumayupost.json
similarity index 100%
rename from test/fixtures/httpoison_mock/mayumayupost.json
rename to test/fixtures/tesla_mock/mayumayupost.json
diff --git a/test/fixtures/httpoison_mock/mike@osada.macgirvin.com.json b/test/fixtures/tesla_mock/mike@osada.macgirvin.com.json
similarity index 100%
rename from test/fixtures/httpoison_mock/mike@osada.macgirvin.com.json
rename to test/fixtures/tesla_mock/mike@osada.macgirvin.com.json
diff --git a/test/fixtures/httpoison_mock/nonexistant@social.heldscal.la.xml b/test/fixtures/tesla_mock/nonexistant@social.heldscal.la.xml
similarity index 100%
rename from test/fixtures/httpoison_mock/nonexistant@social.heldscal.la.xml
rename to test/fixtures/tesla_mock/nonexistant@social.heldscal.la.xml
diff --git a/test/fixtures/httpoison_mock/pawoo.net_host_meta b/test/fixtures/tesla_mock/pawoo.net_host_meta
similarity index 100%
rename from test/fixtures/httpoison_mock/pawoo.net_host_meta
rename to test/fixtures/tesla_mock/pawoo.net_host_meta
diff --git a/test/fixtures/httpoison_mock/peertube.moe-vid.json b/test/fixtures/tesla_mock/peertube.moe-vid.json
similarity index 100%
rename from test/fixtures/httpoison_mock/peertube.moe-vid.json
rename to test/fixtures/tesla_mock/peertube.moe-vid.json
diff --git a/test/fixtures/httpoison_mock/pleroma.soykaf.com_host_meta b/test/fixtures/tesla_mock/pleroma.soykaf.com_host_meta
similarity index 100%
rename from test/fixtures/httpoison_mock/pleroma.soykaf.com_host_meta
rename to test/fixtures/tesla_mock/pleroma.soykaf.com_host_meta
diff --git a/test/fixtures/httpoison_mock/puckipedia.com.json b/test/fixtures/tesla_mock/puckipedia.com.json
similarity index 100%
rename from test/fixtures/httpoison_mock/puckipedia.com.json
rename to test/fixtures/tesla_mock/puckipedia.com.json
diff --git a/test/fixtures/httpoison_mock/rinpatch.json b/test/fixtures/tesla_mock/rinpatch.json
similarity index 100%
rename from test/fixtures/httpoison_mock/rinpatch.json
rename to test/fixtures/tesla_mock/rinpatch.json
diff --git a/test/fixtures/httpoison_mock/rye.json b/test/fixtures/tesla_mock/rye.json
similarity index 100%
rename from test/fixtures/httpoison_mock/rye.json
rename to test/fixtures/tesla_mock/rye.json
diff --git a/test/fixtures/httpoison_mock/sakamoto.atom b/test/fixtures/tesla_mock/sakamoto.atom
similarity index 100%
rename from test/fixtures/httpoison_mock/sakamoto.atom
rename to test/fixtures/tesla_mock/sakamoto.atom
diff --git a/test/fixtures/httpoison_mock/sakamoto_eal_feed.atom b/test/fixtures/tesla_mock/sakamoto_eal_feed.atom
similarity index 100%
rename from test/fixtures/httpoison_mock/sakamoto_eal_feed.atom
rename to test/fixtures/tesla_mock/sakamoto_eal_feed.atom
diff --git a/test/fixtures/httpoison_mock/shitposter.club_host_meta b/test/fixtures/tesla_mock/shitposter.club_host_meta
similarity index 100%
rename from test/fixtures/httpoison_mock/shitposter.club_host_meta
rename to test/fixtures/tesla_mock/shitposter.club_host_meta
diff --git a/test/fixtures/httpoison_mock/shp@pleroma.soykaf.com.feed b/test/fixtures/tesla_mock/shp@pleroma.soykaf.com.feed
similarity index 100%
rename from test/fixtures/httpoison_mock/shp@pleroma.soykaf.com.feed
rename to test/fixtures/tesla_mock/shp@pleroma.soykaf.com.feed
diff --git a/test/fixtures/httpoison_mock/shp@pleroma.soykaf.com.webfigner b/test/fixtures/tesla_mock/shp@pleroma.soykaf.com.webfigner
similarity index 100%
rename from test/fixtures/httpoison_mock/shp@pleroma.soykaf.com.webfigner
rename to test/fixtures/tesla_mock/shp@pleroma.soykaf.com.webfigner
diff --git a/test/fixtures/httpoison_mock/shp@social.heldscal.la.xml b/test/fixtures/tesla_mock/shp@social.heldscal.la.xml
similarity index 100%
rename from test/fixtures/httpoison_mock/shp@social.heldscal.la.xml
rename to test/fixtures/tesla_mock/shp@social.heldscal.la.xml
diff --git a/test/fixtures/httpoison_mock/skruyb@mamot.fr.atom b/test/fixtures/tesla_mock/skruyb@mamot.fr.atom
similarity index 100%
rename from test/fixtures/httpoison_mock/skruyb@mamot.fr.atom
rename to test/fixtures/tesla_mock/skruyb@mamot.fr.atom
diff --git a/test/fixtures/httpoison_mock/social.heldscal.la_host_meta b/test/fixtures/tesla_mock/social.heldscal.la_host_meta
similarity index 100%
rename from test/fixtures/httpoison_mock/social.heldscal.la_host_meta
rename to test/fixtures/tesla_mock/social.heldscal.la_host_meta
diff --git a/test/fixtures/httpoison_mock/social.sakamoto.gq_host_meta b/test/fixtures/tesla_mock/social.sakamoto.gq_host_meta
similarity index 100%
rename from test/fixtures/httpoison_mock/social.sakamoto.gq_host_meta
rename to test/fixtures/tesla_mock/social.sakamoto.gq_host_meta
diff --git a/test/fixtures/httpoison_mock/social.stopwatchingus-heidelberg.de_host_meta b/test/fixtures/tesla_mock/social.stopwatchingus-heidelberg.de_host_meta
similarity index 100%
rename from test/fixtures/httpoison_mock/social.stopwatchingus-heidelberg.de_host_meta
rename to test/fixtures/tesla_mock/social.stopwatchingus-heidelberg.de_host_meta
diff --git a/test/fixtures/httpoison_mock/social.wxcafe.net_host_meta b/test/fixtures/tesla_mock/social.wxcafe.net_host_meta
similarity index 100%
rename from test/fixtures/httpoison_mock/social.wxcafe.net_host_meta
rename to test/fixtures/tesla_mock/social.wxcafe.net_host_meta
diff --git a/test/fixtures/httpoison_mock/spc_5381.atom b/test/fixtures/tesla_mock/spc_5381.atom
similarity index 100%
rename from test/fixtures/httpoison_mock/spc_5381.atom
rename to test/fixtures/tesla_mock/spc_5381.atom
diff --git a/test/fixtures/httpoison_mock/spc_5381_xrd.xml b/test/fixtures/tesla_mock/spc_5381_xrd.xml
similarity index 100%
rename from test/fixtures/httpoison_mock/spc_5381_xrd.xml
rename to test/fixtures/tesla_mock/spc_5381_xrd.xml
diff --git a/test/fixtures/httpoison_mock/squeet.me_host_meta b/test/fixtures/tesla_mock/squeet.me_host_meta
similarity index 100%
rename from test/fixtures/httpoison_mock/squeet.me_host_meta
rename to test/fixtures/tesla_mock/squeet.me_host_meta
diff --git a/test/fixtures/httpoison_mock/status.alpicola.com_host_meta b/test/fixtures/tesla_mock/status.alpicola.com_host_meta
similarity index 100%
rename from test/fixtures/httpoison_mock/status.alpicola.com_host_meta
rename to test/fixtures/tesla_mock/status.alpicola.com_host_meta
diff --git a/test/fixtures/httpoison_mock/status.emelie.json b/test/fixtures/tesla_mock/status.emelie.json
similarity index 100%
rename from test/fixtures/httpoison_mock/status.emelie.json
rename to test/fixtures/tesla_mock/status.emelie.json
diff --git a/test/fixtures/httpoison_mock/webfinger_emelie.json b/test/fixtures/tesla_mock/webfinger_emelie.json
similarity index 100%
rename from test/fixtures/httpoison_mock/webfinger_emelie.json
rename to test/fixtures/tesla_mock/webfinger_emelie.json
diff --git a/test/fixtures/httpoison_mock/winterdienst_webfinger.json b/test/fixtures/tesla_mock/winterdienst_webfinger.json
similarity index 100%
rename from test/fixtures/httpoison_mock/winterdienst_webfinger.json
rename to test/fixtures/tesla_mock/winterdienst_webfinger.json
diff --git a/test/fixtures/users_mock/masto_closed_followers.json b/test/fixtures/users_mock/masto_closed_followers.json
new file mode 100644
index 000000000..da296892d
--- /dev/null
+++ b/test/fixtures/users_mock/masto_closed_followers.json
@@ -0,0 +1,7 @@
+{
+ "@context": "https://www.w3.org/ns/activitystreams",
+ "id": "http://localhost:4001/users/masto_closed/followers",
+ "type": "OrderedCollection",
+ "totalItems": 437,
+ "first": "http://localhost:4001/users/masto_closed/followers?page=1"
+}
diff --git a/test/fixtures/users_mock/masto_closed_following.json b/test/fixtures/users_mock/masto_closed_following.json
new file mode 100644
index 000000000..146d49f9c
--- /dev/null
+++ b/test/fixtures/users_mock/masto_closed_following.json
@@ -0,0 +1,7 @@
+{
+ "@context": "https://www.w3.org/ns/activitystreams",
+ "id": "http://localhost:4001/users/masto_closed/following",
+ "type": "OrderedCollection",
+ "totalItems": 152,
+ "first": "http://localhost:4001/users/masto_closed/following?page=1"
+}
diff --git a/test/fixtures/users_mock/pleroma_followers.json b/test/fixtures/users_mock/pleroma_followers.json
new file mode 100644
index 000000000..db71d084b
--- /dev/null
+++ b/test/fixtures/users_mock/pleroma_followers.json
@@ -0,0 +1,20 @@
+{
+ "type": "OrderedCollection",
+ "totalItems": 527,
+ "id": "http://localhost:4001/users/fuser2/followers",
+ "first": {
+ "type": "OrderedCollectionPage",
+ "totalItems": 527,
+ "partOf": "http://localhost:4001/users/fuser2/followers",
+ "orderedItems": [],
+ "next": "http://localhost:4001/users/fuser2/followers?page=2",
+ "id": "http://localhost:4001/users/fuser2/followers?page=1"
+ },
+ "@context": [
+ "https://www.w3.org/ns/activitystreams",
+ "http://localhost:4001/schemas/litepub-0.1.jsonld",
+ {
+ "@language": "und"
+ }
+ ]
+}
diff --git a/test/fixtures/users_mock/pleroma_following.json b/test/fixtures/users_mock/pleroma_following.json
new file mode 100644
index 000000000..33d087703
--- /dev/null
+++ b/test/fixtures/users_mock/pleroma_following.json
@@ -0,0 +1,20 @@
+{
+ "type": "OrderedCollection",
+ "totalItems": 267,
+ "id": "http://localhost:4001/users/fuser2/following",
+ "first": {
+ "type": "OrderedCollectionPage",
+ "totalItems": 267,
+ "partOf": "http://localhost:4001/users/fuser2/following",
+ "orderedItems": [],
+ "next": "http://localhost:4001/users/fuser2/following?page=2",
+ "id": "http://localhost:4001/users/fuser2/following?page=1"
+ },
+ "@context": [
+ "https://www.w3.org/ns/activitystreams",
+ "http://localhost:4001/schemas/litepub-0.1.jsonld",
+ {
+ "@language": "und"
+ }
+ ]
+}
diff --git a/test/http/request_builder_test.exs b/test/http/request_builder_test.exs
new file mode 100644
index 000000000..a368999ff
--- /dev/null
+++ b/test/http/request_builder_test.exs
@@ -0,0 +1,91 @@
+defmodule Pleroma.HTTP.RequestBuilderTest do
+ use ExUnit.Case, async: true
+ alias Pleroma.HTTP.RequestBuilder
+
+ describe "headers/2" do
+ test "don't send pleroma user agent" do
+ assert RequestBuilder.headers(%{}, []) == %{headers: []}
+ end
+
+ test "send pleroma user agent" do
+ send = Pleroma.Config.get([:http, :send_user_agent])
+ Pleroma.Config.put([:http, :send_user_agent], true)
+
+ on_exit(fn ->
+ Pleroma.Config.put([:http, :send_user_agent], send)
+ end)
+
+ assert RequestBuilder.headers(%{}, []) == %{
+ headers: [{"User-Agent", Pleroma.Application.user_agent()}]
+ }
+ end
+ end
+
+ describe "add_optional_params/3" do
+ test "don't add if keyword is empty" do
+ assert RequestBuilder.add_optional_params(%{}, %{}, []) == %{}
+ end
+
+ test "add query parameter" do
+ assert RequestBuilder.add_optional_params(
+ %{},
+ %{query: :query, body: :body, another: :val},
+ [
+ {:query, "param1=val1¶m2=val2"},
+ {:body, "some body"}
+ ]
+ ) == %{query: "param1=val1¶m2=val2", body: "some body"}
+ end
+ end
+
+ describe "add_param/4" do
+ test "add file parameter" do
+ %{
+ body: %Tesla.Multipart{
+ boundary: _,
+ content_type_params: [],
+ parts: [
+ %Tesla.Multipart.Part{
+ body: %File.Stream{
+ line_or_bytes: 2048,
+ modes: [:raw, :read_ahead, :read, :binary],
+ path: "some-path/filename.png",
+ raw: true
+ },
+ dispositions: [name: "filename.png", filename: "filename.png"],
+ headers: []
+ }
+ ]
+ }
+ } = RequestBuilder.add_param(%{}, :file, "filename.png", "some-path/filename.png")
+ end
+
+ test "add key to body" do
+ %{
+ body: %Tesla.Multipart{
+ boundary: _,
+ content_type_params: [],
+ parts: [
+ %Tesla.Multipart.Part{
+ body: "\"someval\"",
+ dispositions: [name: "somekey"],
+ headers: ["Content-Type": "application/json"]
+ }
+ ]
+ }
+ } = RequestBuilder.add_param(%{}, :body, "somekey", "someval")
+ end
+
+ test "add form parameter" do
+ assert RequestBuilder.add_param(%{}, :form, "somename", "someval") == %{
+ body: %{"somename" => "someval"}
+ }
+ end
+
+ test "add for location" do
+ assert RequestBuilder.add_param(%{}, :some_location, "somekey", "someval") == %{
+ some_location: [{"somekey", "someval"}]
+ }
+ end
+ end
+end
diff --git a/test/integration/mastodon_websocket_test.exs b/test/integration/mastodon_websocket_test.exs
index a604713d8..3975cdcd6 100644
--- a/test/integration/mastodon_websocket_test.exs
+++ b/test/integration/mastodon_websocket_test.exs
@@ -107,5 +107,12 @@ test "accepts the 'user:notification' stream", %{token: token} = _state do
assert {:ok, _} = start_socket("?stream=user:notification&access_token=#{token.token}")
assert {:error, {403, "Forbidden"}} = start_socket("?stream=user:notification")
end
+
+ test "accepts valid token on Sec-WebSocket-Protocol header", %{token: token} do
+ assert {:ok, _} = start_socket("?stream=user", [{"Sec-WebSocket-Protocol", token.token}])
+
+ assert {:error, {403, "Forbidden"}} =
+ start_socket("?stream=user", [{"Sec-WebSocket-Protocol", "I am a friend"}])
+ end
end
end
diff --git a/test/media_proxy_test.exs b/test/media_proxy_test.exs
index b23aeb88b..1d6d170b7 100644
--- a/test/media_proxy_test.exs
+++ b/test/media_proxy_test.exs
@@ -70,14 +70,6 @@ test "encodes and decodes URL and ignores query params for the path" do
assert decode_result(encoded) == url
end
- test "ensures urls are url-encoded" do
- assert decode_result(url("https://pleroma.social/Hello world.jpg")) ==
- "https://pleroma.social/Hello%20world.jpg"
-
- assert decode_result(url("https://pleroma.social/Hello%20world.jpg")) ==
- "https://pleroma.social/Hello%20world.jpg"
- end
-
test "validates signature" do
secret_key_base = Pleroma.Config.get([Pleroma.Web.Endpoint, :secret_key_base])
@@ -141,10 +133,31 @@ test "uses the configured base_url" do
assert String.starts_with?(encoded, Pleroma.Config.get([:media_proxy, :base_url]))
end
- # https://git.pleroma.social/pleroma/pleroma/issues/580
- test "encoding S3 links (must preserve `%2F`)" do
+ # Some sites expect ASCII encoded characters in the URL to be preserved even if
+ # unnecessary.
+ # Issues: https://git.pleroma.social/pleroma/pleroma/issues/580
+ # https://git.pleroma.social/pleroma/pleroma/issues/1055
+ test "preserve ASCII encoding" do
url =
- "https://s3.amazonaws.com/example/test.png?X-Amz-Credential=your-access-key-id%2F20130721%2Fus-east-1%2Fs3%2Faws4_request"
+ "https://pleroma.com/%20/%21/%22/%23/%24/%25/%26/%27/%28/%29/%2A/%2B/%2C/%2D/%2E/%2F/%30/%31/%32/%33/%34/%35/%36/%37/%38/%39/%3A/%3B/%3C/%3D/%3E/%3F/%40/%41/%42/%43/%44/%45/%46/%47/%48/%49/%4A/%4B/%4C/%4D/%4E/%4F/%50/%51/%52/%53/%54/%55/%56/%57/%58/%59/%5A/%5B/%5C/%5D/%5E/%5F/%60/%61/%62/%63/%64/%65/%66/%67/%68/%69/%6A/%6B/%6C/%6D/%6E/%6F/%70/%71/%72/%73/%74/%75/%76/%77/%78/%79/%7A/%7B/%7C/%7D/%7E/%7F/%80/%81/%82/%83/%84/%85/%86/%87/%88/%89/%8A/%8B/%8C/%8D/%8E/%8F/%90/%91/%92/%93/%94/%95/%96/%97/%98/%99/%9A/%9B/%9C/%9D/%9E/%9F/%C2%A0/%A1/%A2/%A3/%A4/%A5/%A6/%A7/%A8/%A9/%AA/%AB/%AC/%C2%AD/%AE/%AF/%B0/%B1/%B2/%B3/%B4/%B5/%B6/%B7/%B8/%B9/%BA/%BB/%BC/%BD/%BE/%BF/%C0/%C1/%C2/%C3/%C4/%C5/%C6/%C7/%C8/%C9/%CA/%CB/%CC/%CD/%CE/%CF/%D0/%D1/%D2/%D3/%D4/%D5/%D6/%D7/%D8/%D9/%DA/%DB/%DC/%DD/%DE/%DF/%E0/%E1/%E2/%E3/%E4/%E5/%E6/%E7/%E8/%E9/%EA/%EB/%EC/%ED/%EE/%EF/%F0/%F1/%F2/%F3/%F4/%F5/%F6/%F7/%F8/%F9/%FA/%FB/%FC/%FD/%FE/%FF"
+
+ encoded = url(url)
+ assert decode_result(encoded) == url
+ end
+
+ # This includes unsafe/reserved characters which are not interpreted as part of the URL
+ # and would otherwise have to be ASCII encoded. It is our role to ensure the proxied URL
+ # is unmodified, so we are testing these characters anyway.
+ test "preserve non-unicode characters per RFC3986" do
+ url =
+ "https://pleroma.com/ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz1234567890-._~:/?#[]@!$&'()*+,;=|^`{}"
+
+ encoded = url(url)
+ assert decode_result(encoded) == url
+ end
+
+ test "preserve unicode characters" do
+ url = "https://ko.wikipedia.org/wiki/위키백과:대문"
encoded = url(url)
assert decode_result(encoded) == url
diff --git a/test/reverse_proxy_test.exs b/test/reverse_proxy_test.exs
new file mode 100644
index 000000000..75a61445a
--- /dev/null
+++ b/test/reverse_proxy_test.exs
@@ -0,0 +1,297 @@
+defmodule Pleroma.ReverseProxyTest do
+ use Pleroma.Web.ConnCase, async: true
+ import ExUnit.CaptureLog
+ import ExUnit.CaptureLog
+ import Mox
+ alias Pleroma.ReverseProxy
+ alias Pleroma.ReverseProxy.ClientMock
+
+ setup_all do
+ {:ok, _} = Registry.start_link(keys: :unique, name: Pleroma.ReverseProxy.ClientMock)
+ :ok
+ end
+
+ setup :verify_on_exit!
+
+ defp user_agent_mock(user_agent, invokes) do
+ json = Jason.encode!(%{"user-agent": user_agent})
+
+ ClientMock
+ |> expect(:request, fn :get, url, _, _, _ ->
+ Registry.register(Pleroma.ReverseProxy.ClientMock, url, 0)
+
+ {:ok, 200,
+ [
+ {"content-type", "application/json"},
+ {"content-length", byte_size(json) |> to_string()}
+ ], %{url: url}}
+ end)
+ |> expect(:stream_body, invokes, fn %{url: url} ->
+ case Registry.lookup(Pleroma.ReverseProxy.ClientMock, url) do
+ [{_, 0}] ->
+ Registry.update_value(Pleroma.ReverseProxy.ClientMock, url, &(&1 + 1))
+ {:ok, json}
+
+ [{_, 1}] ->
+ Registry.unregister(Pleroma.ReverseProxy.ClientMock, url)
+ :done
+ end
+ end)
+ end
+
+ describe "user-agent" do
+ test "don't keep", %{conn: conn} do
+ user_agent_mock("hackney/1.15.1", 2)
+ conn = ReverseProxy.call(conn, "/user-agent")
+ assert json_response(conn, 200) == %{"user-agent" => "hackney/1.15.1"}
+ end
+
+ test "keep", %{conn: conn} do
+ user_agent_mock(Pleroma.Application.user_agent(), 2)
+ conn = ReverseProxy.call(conn, "/user-agent-keep", keep_user_agent: true)
+ assert json_response(conn, 200) == %{"user-agent" => Pleroma.Application.user_agent()}
+ end
+ end
+
+ test "closed connection", %{conn: conn} do
+ ClientMock
+ |> expect(:request, fn :get, "/closed", _, _, _ -> {:ok, 200, [], %{}} end)
+ |> expect(:stream_body, fn _ -> {:error, :closed} end)
+ |> expect(:close, fn _ -> :ok end)
+
+ conn = ReverseProxy.call(conn, "/closed")
+ assert conn.halted
+ end
+
+ describe "max_body " do
+ test "length returns error if content-length more than option", %{conn: conn} do
+ user_agent_mock("hackney/1.15.1", 0)
+
+ assert capture_log(fn ->
+ ReverseProxy.call(conn, "/user-agent", max_body_length: 4)
+ end) =~
+ "[error] Elixir.Pleroma.ReverseProxy: request to \"/user-agent\" failed: :body_too_large"
+ end
+
+ defp stream_mock(invokes, with_close? \\ false) do
+ ClientMock
+ |> expect(:request, fn :get, "/stream-bytes/" <> length, _, _, _ ->
+ Registry.register(Pleroma.ReverseProxy.ClientMock, "/stream-bytes/" <> length, 0)
+
+ {:ok, 200, [{"content-type", "application/octet-stream"}],
+ %{url: "/stream-bytes/" <> length}}
+ end)
+ |> expect(:stream_body, invokes, fn %{url: "/stream-bytes/" <> length} ->
+ max = String.to_integer(length)
+
+ case Registry.lookup(Pleroma.ReverseProxy.ClientMock, "/stream-bytes/" <> length) do
+ [{_, current}] when current < max ->
+ Registry.update_value(
+ Pleroma.ReverseProxy.ClientMock,
+ "/stream-bytes/" <> length,
+ &(&1 + 10)
+ )
+
+ {:ok, "0123456789"}
+
+ [{_, ^max}] ->
+ Registry.unregister(Pleroma.ReverseProxy.ClientMock, "/stream-bytes/" <> length)
+ :done
+ end
+ end)
+
+ if with_close? do
+ expect(ClientMock, :close, fn _ -> :ok end)
+ end
+ end
+
+ test "max_body_size returns error if streaming body more than that option", %{conn: conn} do
+ stream_mock(3, true)
+
+ assert capture_log(fn ->
+ ReverseProxy.call(conn, "/stream-bytes/50", max_body_size: 30)
+ end) =~
+ "[warn] Elixir.Pleroma.ReverseProxy request to /stream-bytes/50 failed while reading/chunking: :body_too_large"
+ end
+ end
+
+ describe "HEAD requests" do
+ test "common", %{conn: conn} do
+ ClientMock
+ |> expect(:request, fn :head, "/head", _, _, _ ->
+ {:ok, 200, [{"content-type", "text/html; charset=utf-8"}]}
+ end)
+
+ conn = ReverseProxy.call(Map.put(conn, :method, "HEAD"), "/head")
+ assert html_response(conn, 200) == ""
+ end
+ end
+
+ defp error_mock(status) when is_integer(status) do
+ ClientMock
+ |> expect(:request, fn :get, "/status/" <> _, _, _, _ ->
+ {:error, status}
+ end)
+ end
+
+ describe "returns error on" do
+ test "500", %{conn: conn} do
+ error_mock(500)
+
+ capture_log(fn -> ReverseProxy.call(conn, "/status/500") end) =~
+ "[error] Elixir.Pleroma.ReverseProxy: request to /status/500 failed with HTTP status 500"
+ end
+
+ test "400", %{conn: conn} do
+ error_mock(400)
+
+ capture_log(fn -> ReverseProxy.call(conn, "/status/400") end) =~
+ "[error] Elixir.Pleroma.ReverseProxy: request to /status/400 failed with HTTP status 400"
+ end
+
+ test "204", %{conn: conn} do
+ ClientMock
+ |> expect(:request, fn :get, "/status/204", _, _, _ -> {:ok, 204, [], %{}} end)
+
+ capture_log(fn ->
+ conn = ReverseProxy.call(conn, "/status/204")
+ assert conn.resp_body == "Request failed: No Content"
+ assert conn.halted
+ end) =~
+ "[error] Elixir.Pleroma.ReverseProxy: request to \"/status/204\" failed with HTTP status 204"
+ end
+ end
+
+ test "streaming", %{conn: conn} do
+ stream_mock(21)
+ conn = ReverseProxy.call(conn, "/stream-bytes/200")
+ assert conn.state == :chunked
+ assert byte_size(conn.resp_body) == 200
+ assert Plug.Conn.get_resp_header(conn, "content-type") == ["application/octet-stream"]
+ end
+
+ defp headers_mock(_) do
+ ClientMock
+ |> expect(:request, fn :get, "/headers", headers, _, _ ->
+ Registry.register(Pleroma.ReverseProxy.ClientMock, "/headers", 0)
+ {:ok, 200, [{"content-type", "application/json"}], %{url: "/headers", headers: headers}}
+ end)
+ |> expect(:stream_body, 2, fn %{url: url, headers: headers} ->
+ case Registry.lookup(Pleroma.ReverseProxy.ClientMock, url) do
+ [{_, 0}] ->
+ Registry.update_value(Pleroma.ReverseProxy.ClientMock, url, &(&1 + 1))
+ headers = for {k, v} <- headers, into: %{}, do: {String.capitalize(k), v}
+ {:ok, Jason.encode!(%{headers: headers})}
+
+ [{_, 1}] ->
+ Registry.unregister(Pleroma.ReverseProxy.ClientMock, url)
+ :done
+ end
+ end)
+
+ :ok
+ end
+
+ describe "keep request headers" do
+ setup [:headers_mock]
+
+ test "header passes", %{conn: conn} do
+ conn =
+ Plug.Conn.put_req_header(
+ conn,
+ "accept",
+ "text/html"
+ )
+ |> ReverseProxy.call("/headers")
+
+ %{"headers" => headers} = json_response(conn, 200)
+ assert headers["Accept"] == "text/html"
+ end
+
+ test "header is filtered", %{conn: conn} do
+ conn =
+ Plug.Conn.put_req_header(
+ conn,
+ "accept-language",
+ "en-US"
+ )
+ |> ReverseProxy.call("/headers")
+
+ %{"headers" => headers} = json_response(conn, 200)
+ refute headers["Accept-Language"]
+ end
+ end
+
+ test "returns 400 on non GET, HEAD requests", %{conn: conn} do
+ conn = ReverseProxy.call(Map.put(conn, :method, "POST"), "/ip")
+ assert conn.status == 400
+ end
+
+ describe "cache resp headers" do
+ test "returns headers", %{conn: conn} do
+ ClientMock
+ |> expect(:request, fn :get, "/cache/" <> ttl, _, _, _ ->
+ {:ok, 200, [{"cache-control", "public, max-age=" <> ttl}], %{}}
+ end)
+ |> expect(:stream_body, fn _ -> :done end)
+
+ conn = ReverseProxy.call(conn, "/cache/10")
+ assert {"cache-control", "public, max-age=10"} in conn.resp_headers
+ end
+
+ test "add cache-control", %{conn: conn} do
+ ClientMock
+ |> expect(:request, fn :get, "/cache", _, _, _ ->
+ {:ok, 200, [{"ETag", "some ETag"}], %{}}
+ end)
+ |> expect(:stream_body, fn _ -> :done end)
+
+ conn = ReverseProxy.call(conn, "/cache")
+ assert {"cache-control", "public"} in conn.resp_headers
+ end
+ end
+
+ defp disposition_headers_mock(headers) do
+ ClientMock
+ |> expect(:request, fn :get, "/disposition", _, _, _ ->
+ Registry.register(Pleroma.ReverseProxy.ClientMock, "/disposition", 0)
+
+ {:ok, 200, headers, %{url: "/disposition"}}
+ end)
+ |> expect(:stream_body, 2, fn %{url: "/disposition"} ->
+ case Registry.lookup(Pleroma.ReverseProxy.ClientMock, "/disposition") do
+ [{_, 0}] ->
+ Registry.update_value(Pleroma.ReverseProxy.ClientMock, "/disposition", &(&1 + 1))
+ {:ok, ""}
+
+ [{_, 1}] ->
+ Registry.unregister(Pleroma.ReverseProxy.ClientMock, "/disposition")
+ :done
+ end
+ end)
+ end
+
+ describe "response content disposition header" do
+ test "not atachment", %{conn: conn} do
+ disposition_headers_mock([
+ {"content-type", "image/gif"},
+ {"content-length", 0}
+ ])
+
+ conn = ReverseProxy.call(conn, "/disposition")
+
+ assert {"content-type", "image/gif"} in conn.resp_headers
+ end
+
+ test "with content-disposition header", %{conn: conn} do
+ disposition_headers_mock([
+ {"content-disposition", "attachment; filename=\"filename.jpg\""},
+ {"content-length", 0}
+ ])
+
+ conn = ReverseProxy.call(conn, "/disposition")
+
+ assert {"content-disposition", "attachment; filename=\"filename.jpg\""} in conn.resp_headers
+ end
+ end
+end
diff --git a/test/support/http_request_mock.ex b/test/support/http_request_mock.ex
index 30169edb0..c593a5e4a 100644
--- a/test/support/http_request_mock.ex
+++ b/test/support/http_request_mock.ex
@@ -31,8 +31,7 @@ def get("https://osada.macgirvin.com/channel/mike", _, _, _) do
{:ok,
%Tesla.Env{
status: 200,
- body:
- File.read!("test/fixtures/httpoison_mock/https___osada.macgirvin.com_channel_mike.json")
+ body: File.read!("test/fixtures/tesla_mock/https___osada.macgirvin.com_channel_mike.json")
}}
end
@@ -40,7 +39,7 @@ def get("https://mastodon.social/users/emelie/statuses/101849165031453009", _, _
{:ok,
%Tesla.Env{
status: 200,
- body: File.read!("test/fixtures/httpoison_mock/status.emelie.json")
+ body: File.read!("test/fixtures/tesla_mock/status.emelie.json")
}}
end
@@ -48,7 +47,7 @@ def get("https://mastodon.social/users/emelie", _, _, _) do
{:ok,
%Tesla.Env{
status: 200,
- body: File.read!("test/fixtures/httpoison_mock/emelie.json")
+ body: File.read!("test/fixtures/tesla_mock/emelie.json")
}}
end
@@ -56,7 +55,7 @@ def get("https://mastodon.sdf.org/users/rinpatch", _, _, _) do
{:ok,
%Tesla.Env{
status: 200,
- body: File.read!("test/fixtures/httpoison_mock/rinpatch.json")
+ body: File.read!("test/fixtures/tesla_mock/rinpatch.json")
}}
end
@@ -69,7 +68,7 @@ def get(
{:ok,
%Tesla.Env{
status: 200,
- body: File.read!("test/fixtures/httpoison_mock/webfinger_emelie.json")
+ body: File.read!("test/fixtures/tesla_mock/webfinger_emelie.json")
}}
end
@@ -77,7 +76,7 @@ def get("https://mastodon.social/users/emelie.atom", _, _, _) do
{:ok,
%Tesla.Env{
status: 200,
- body: File.read!("test/fixtures/httpoison_mock/emelie.atom")
+ body: File.read!("test/fixtures/tesla_mock/emelie.atom")
}}
end
@@ -90,7 +89,7 @@ def get(
{:ok,
%Tesla.Env{
status: 200,
- body: File.read!("test/fixtures/httpoison_mock/mike@osada.macgirvin.com.json")
+ body: File.read!("test/fixtures/tesla_mock/mike@osada.macgirvin.com.json")
}}
end
@@ -103,7 +102,7 @@ def get(
{:ok,
%Tesla.Env{
status: 200,
- body: File.read!("test/fixtures/httpoison_mock/https___social.heldscal.la_user_29191.xml")
+ body: File.read!("test/fixtures/tesla_mock/https___social.heldscal.la_user_29191.xml")
}}
end
@@ -111,7 +110,7 @@ def get("https://pawoo.net/users/pekorino.atom", _, _, _) do
{:ok,
%Tesla.Env{
status: 200,
- body: File.read!("test/fixtures/httpoison_mock/https___pawoo.net_users_pekorino.atom")
+ body: File.read!("test/fixtures/tesla_mock/https___pawoo.net_users_pekorino.atom")
}}
end
@@ -124,7 +123,7 @@ def get(
{:ok,
%Tesla.Env{
status: 200,
- body: File.read!("test/fixtures/httpoison_mock/https___pawoo.net_users_pekorino.xml")
+ body: File.read!("test/fixtures/tesla_mock/https___pawoo.net_users_pekorino.xml")
}}
end
@@ -137,7 +136,7 @@ def get(
{:ok,
%Tesla.Env{
status: 200,
- body: File.read!("test/fixtures/httpoison_mock/atarifrosch_feed.xml")
+ body: File.read!("test/fixtures/tesla_mock/atarifrosch_feed.xml")
}}
end
@@ -150,7 +149,7 @@ def get(
{:ok,
%Tesla.Env{
status: 200,
- body: File.read!("test/fixtures/httpoison_mock/atarifrosch_webfinger.xml")
+ body: File.read!("test/fixtures/tesla_mock/atarifrosch_webfinger.xml")
}}
end
@@ -158,7 +157,7 @@ def get("https://mamot.fr/users/Skruyb.atom", _, _, _) do
{:ok,
%Tesla.Env{
status: 200,
- body: File.read!("test/fixtures/httpoison_mock/https___mamot.fr_users_Skruyb.atom")
+ body: File.read!("test/fixtures/tesla_mock/https___mamot.fr_users_Skruyb.atom")
}}
end
@@ -171,7 +170,7 @@ def get(
{:ok,
%Tesla.Env{
status: 200,
- body: File.read!("test/fixtures/httpoison_mock/skruyb@mamot.fr.atom")
+ body: File.read!("test/fixtures/tesla_mock/skruyb@mamot.fr.atom")
}}
end
@@ -184,7 +183,7 @@ def get(
{:ok,
%Tesla.Env{
status: 200,
- body: File.read!("test/fixtures/httpoison_mock/nonexistant@social.heldscal.la.xml")
+ body: File.read!("test/fixtures/tesla_mock/nonexistant@social.heldscal.la.xml")
}}
end
@@ -197,7 +196,7 @@ def get(
{:ok,
%Tesla.Env{
status: 200,
- body: File.read!("test/fixtures/httpoison_mock/lain_squeet.me_webfinger.xml")
+ body: File.read!("test/fixtures/tesla_mock/lain_squeet.me_webfinger.xml")
}}
end
@@ -210,7 +209,7 @@ def get(
{:ok,
%Tesla.Env{
status: 200,
- body: File.read!("test/fixtures/httpoison_mock/lucifermysticus.json")
+ body: File.read!("test/fixtures/tesla_mock/lucifermysticus.json")
}}
end
@@ -218,7 +217,7 @@ def get("https://prismo.news/@mxb", _, _, _) do
{:ok,
%Tesla.Env{
status: 200,
- body: File.read!("test/fixtures/httpoison_mock/https___prismo.news__mxb.json")
+ body: File.read!("test/fixtures/tesla_mock/https___prismo.news__mxb.json")
}}
end
@@ -231,7 +230,7 @@ def get(
{:ok,
%Tesla.Env{
status: 200,
- body: File.read!("test/fixtures/httpoison_mock/kaniini@hubzilla.example.org.json")
+ body: File.read!("test/fixtures/tesla_mock/kaniini@hubzilla.example.org.json")
}}
end
@@ -239,7 +238,7 @@ def get("https://niu.moe/users/rye", _, _, Accept: "application/activity+json")
{:ok,
%Tesla.Env{
status: 200,
- body: File.read!("test/fixtures/httpoison_mock/rye.json")
+ body: File.read!("test/fixtures/tesla_mock/rye.json")
}}
end
@@ -247,7 +246,7 @@ def get("https://n1u.moe/users/rye", _, _, Accept: "application/activity+json")
{:ok,
%Tesla.Env{
status: 200,
- body: File.read!("test/fixtures/httpoison_mock/rye.json")
+ body: File.read!("test/fixtures/tesla_mock/rye.json")
}}
end
@@ -257,7 +256,7 @@ def get("http://mastodon.example.org/users/admin/statuses/100787282858396771", _
status: 200,
body:
File.read!(
- "test/fixtures/httpoison_mock/http___mastodon.example.org_users_admin_status_1234.json"
+ "test/fixtures/tesla_mock/http___mastodon.example.org_users_admin_status_1234.json"
)
}}
end
@@ -266,7 +265,7 @@ def get("https://puckipedia.com/", _, _, Accept: "application/activity+json") do
{:ok,
%Tesla.Env{
status: 200,
- body: File.read!("test/fixtures/httpoison_mock/puckipedia.com.json")
+ body: File.read!("test/fixtures/tesla_mock/puckipedia.com.json")
}}
end
@@ -274,7 +273,7 @@ def get("https://peertube.moe/accounts/7even", _, _, _) do
{:ok,
%Tesla.Env{
status: 200,
- body: File.read!("test/fixtures/httpoison_mock/7even.json")
+ body: File.read!("test/fixtures/tesla_mock/7even.json")
}}
end
@@ -282,7 +281,7 @@ def get("https://peertube.moe/videos/watch/df5f464b-be8d-46fb-ad81-2d4c2d1630e3"
{:ok,
%Tesla.Env{
status: 200,
- body: File.read!("test/fixtures/httpoison_mock/peertube.moe-vid.json")
+ body: File.read!("test/fixtures/tesla_mock/peertube.moe-vid.json")
}}
end
@@ -290,7 +289,7 @@ def get("https://baptiste.gelez.xyz/@/BaptisteGelez", _, _, _) do
{:ok,
%Tesla.Env{
status: 200,
- body: File.read!("test/fixtures/httpoison_mock/baptiste.gelex.xyz-user.json")
+ body: File.read!("test/fixtures/tesla_mock/baptiste.gelex.xyz-user.json")
}}
end
@@ -298,7 +297,7 @@ def get("https://baptiste.gelez.xyz/~/PlumeDevelopment/this-month-in-plume-june-
{:ok,
%Tesla.Env{
status: 200,
- body: File.read!("test/fixtures/httpoison_mock/baptiste.gelex.xyz-article.json")
+ body: File.read!("test/fixtures/tesla_mock/baptiste.gelex.xyz-article.json")
}}
end
@@ -306,7 +305,7 @@ def get("http://mastodon.example.org/users/admin", _, _, Accept: "application/ac
{:ok,
%Tesla.Env{
status: 200,
- body: File.read!("test/fixtures/httpoison_mock/admin@mastdon.example.org.json")
+ body: File.read!("test/fixtures/tesla_mock/admin@mastdon.example.org.json")
}}
end
@@ -331,7 +330,7 @@ def get("https://shitposter.club/notice/7369654", _, _, _) do
{:ok,
%Tesla.Env{
status: 200,
- body: File.read!("test/fixtures/httpoison_mock/7369654.html")
+ body: File.read!("test/fixtures/tesla_mock/7369654.html")
}}
end
@@ -339,7 +338,7 @@ def get("https://mstdn.io/users/mayuutann", _, _, Accept: "application/activity+
{:ok,
%Tesla.Env{
status: 200,
- body: File.read!("test/fixtures/httpoison_mock/mayumayu.json")
+ body: File.read!("test/fixtures/tesla_mock/mayumayu.json")
}}
end
@@ -352,7 +351,7 @@ def get(
{:ok,
%Tesla.Env{
status: 200,
- body: File.read!("test/fixtures/httpoison_mock/mayumayupost.json")
+ body: File.read!("test/fixtures/tesla_mock/mayumayupost.json")
}}
end
@@ -362,7 +361,7 @@ def get("https://pleroma.soykaf.com/users/lain/feed.atom", _, _, _) do
status: 200,
body:
File.read!(
- "test/fixtures/httpoison_mock/https___pleroma.soykaf.com_users_lain_feed.atom.xml"
+ "test/fixtures/tesla_mock/https___pleroma.soykaf.com_users_lain_feed.atom.xml"
)
}}
end
@@ -375,7 +374,7 @@ def get(url, _, _, Accept: "application/xrd+xml,application/jrd+json")
{:ok,
%Tesla.Env{
status: 200,
- body: File.read!("test/fixtures/httpoison_mock/https___pleroma.soykaf.com_users_lain.xml")
+ body: File.read!("test/fixtures/tesla_mock/https___pleroma.soykaf.com_users_lain.xml")
}}
end
@@ -385,7 +384,7 @@ def get("https://shitposter.club/api/statuses/user_timeline/1.atom", _, _, _) do
status: 200,
body:
File.read!(
- "test/fixtures/httpoison_mock/https___shitposter.club_api_statuses_user_timeline_1.atom.xml"
+ "test/fixtures/tesla_mock/https___shitposter.club_api_statuses_user_timeline_1.atom.xml"
)
}}
end
@@ -399,7 +398,7 @@ def get(
{:ok,
%Tesla.Env{
status: 200,
- body: File.read!("test/fixtures/httpoison_mock/https___shitposter.club_user_1.xml")
+ body: File.read!("test/fixtures/tesla_mock/https___shitposter.club_user_1.xml")
}}
end
@@ -407,8 +406,7 @@ def get("https://shitposter.club/notice/2827873", _, _, _) do
{:ok,
%Tesla.Env{
status: 200,
- body:
- File.read!("test/fixtures/httpoison_mock/https___shitposter.club_notice_2827873.html")
+ body: File.read!("test/fixtures/tesla_mock/https___shitposter.club_notice_2827873.html")
}}
end
@@ -418,7 +416,7 @@ def get("https://shitposter.club/api/statuses/show/2827873.atom", _, _, _) do
status: 200,
body:
File.read!(
- "test/fixtures/httpoison_mock/https___shitposter.club_api_statuses_show_2827873.atom.xml"
+ "test/fixtures/tesla_mock/https___shitposter.club_api_statuses_show_2827873.atom.xml"
)
}}
end
@@ -431,7 +429,7 @@ def get("https://shitposter.club/api/statuses/user_timeline/5381.atom", _, _, _)
{:ok,
%Tesla.Env{
status: 200,
- body: File.read!("test/fixtures/httpoison_mock/spc_5381.atom")
+ body: File.read!("test/fixtures/tesla_mock/spc_5381.atom")
}}
end
@@ -444,7 +442,7 @@ def get(
{:ok,
%Tesla.Env{
status: 200,
- body: File.read!("test/fixtures/httpoison_mock/spc_5381_xrd.xml")
+ body: File.read!("test/fixtures/tesla_mock/spc_5381_xrd.xml")
}}
end
@@ -452,7 +450,7 @@ def get("http://shitposter.club/.well-known/host-meta", _, _, _) do
{:ok,
%Tesla.Env{
status: 200,
- body: File.read!("test/fixtures/httpoison_mock/shitposter.club_host_meta")
+ body: File.read!("test/fixtures/tesla_mock/shitposter.club_host_meta")
}}
end
@@ -460,7 +458,7 @@ def get("https://shitposter.club/api/statuses/show/7369654.atom", _, _, _) do
{:ok,
%Tesla.Env{
status: 200,
- body: File.read!("test/fixtures/httpoison_mock/7369654.atom")
+ body: File.read!("test/fixtures/tesla_mock/7369654.atom")
}}
end
@@ -468,7 +466,7 @@ def get("https://shitposter.club/notice/4027863", _, _, _) do
{:ok,
%Tesla.Env{
status: 200,
- body: File.read!("test/fixtures/httpoison_mock/7369654.html")
+ body: File.read!("test/fixtures/tesla_mock/7369654.html")
}}
end
@@ -476,7 +474,7 @@ def get("https://social.sakamoto.gq/users/eal/feed.atom", _, _, _) do
{:ok,
%Tesla.Env{
status: 200,
- body: File.read!("test/fixtures/httpoison_mock/sakamoto_eal_feed.atom")
+ body: File.read!("test/fixtures/tesla_mock/sakamoto_eal_feed.atom")
}}
end
@@ -484,7 +482,7 @@ def get("http://social.sakamoto.gq/.well-known/host-meta", _, _, _) do
{:ok,
%Tesla.Env{
status: 200,
- body: File.read!("test/fixtures/httpoison_mock/social.sakamoto.gq_host_meta")
+ body: File.read!("test/fixtures/tesla_mock/social.sakamoto.gq_host_meta")
}}
end
@@ -497,7 +495,7 @@ def get(
{:ok,
%Tesla.Env{
status: 200,
- body: File.read!("test/fixtures/httpoison_mock/eal_sakamoto.xml")
+ body: File.read!("test/fixtures/tesla_mock/eal_sakamoto.xml")
}}
end
@@ -507,14 +505,14 @@ def get(
_,
Accept: "application/atom+xml"
) do
- {:ok, %Tesla.Env{status: 200, body: File.read!("test/fixtures/httpoison_mock/sakamoto.atom")}}
+ {:ok, %Tesla.Env{status: 200, body: File.read!("test/fixtures/tesla_mock/sakamoto.atom")}}
end
def get("http://mastodon.social/.well-known/host-meta", _, _, _) do
{:ok,
%Tesla.Env{
status: 200,
- body: File.read!("test/fixtures/httpoison_mock/mastodon.social_host_meta")
+ body: File.read!("test/fixtures/tesla_mock/mastodon.social_host_meta")
}}
end
@@ -528,9 +526,7 @@ def get(
%Tesla.Env{
status: 200,
body:
- File.read!(
- "test/fixtures/httpoison_mock/https___mastodon.social_users_lambadalambda.xml"
- )
+ File.read!("test/fixtures/tesla_mock/https___mastodon.social_users_lambadalambda.xml")
}}
end
@@ -538,7 +534,7 @@ def get("http://gs.example.org/.well-known/host-meta", _, _, _) do
{:ok,
%Tesla.Env{
status: 200,
- body: File.read!("test/fixtures/httpoison_mock/gs.example.org_host_meta")
+ body: File.read!("test/fixtures/tesla_mock/gs.example.org_host_meta")
}}
end
@@ -552,9 +548,7 @@ def get(
%Tesla.Env{
status: 200,
body:
- File.read!(
- "test/fixtures/httpoison_mock/http___gs.example.org_4040_index.php_user_1.xml"
- )
+ File.read!("test/fixtures/tesla_mock/http___gs.example.org_4040_index.php_user_1.xml")
}}
end
@@ -573,7 +567,7 @@ def get("http://gs.example.org/index.php/api/statuses/user_timeline/1.atom", _,
status: 200,
body:
File.read!(
- "test/fixtures/httpoison_mock/http__gs.example.org_index.php_api_statuses_user_timeline_1.atom.xml"
+ "test/fixtures/tesla_mock/http__gs.example.org_index.php_api_statuses_user_timeline_1.atom.xml"
)
}}
end
@@ -584,14 +578,14 @@ def get("https://social.heldscal.la/api/statuses/user_timeline/29191.atom", _, _
status: 200,
body:
File.read!(
- "test/fixtures/httpoison_mock/https___social.heldscal.la_api_statuses_user_timeline_29191.atom.xml"
+ "test/fixtures/tesla_mock/https___social.heldscal.la_api_statuses_user_timeline_29191.atom.xml"
)
}}
end
def get("http://squeet.me/.well-known/host-meta", _, _, _) do
{:ok,
- %Tesla.Env{status: 200, body: File.read!("test/fixtures/httpoison_mock/squeet.me_host_meta")}}
+ %Tesla.Env{status: 200, body: File.read!("test/fixtures/tesla_mock/squeet.me_host_meta")}}
end
def get(
@@ -603,7 +597,7 @@ def get(
{:ok,
%Tesla.Env{
status: 200,
- body: File.read!("test/fixtures/httpoison_mock/lain_squeet.me_webfinger.xml")
+ body: File.read!("test/fixtures/tesla_mock/lain_squeet.me_webfinger.xml")
}}
end
@@ -616,7 +610,7 @@ def get(
{:ok,
%Tesla.Env{
status: 200,
- body: File.read!("test/fixtures/httpoison_mock/shp@social.heldscal.la.xml")
+ body: File.read!("test/fixtures/tesla_mock/shp@social.heldscal.la.xml")
}}
end
@@ -624,7 +618,7 @@ def get("http://framatube.org/.well-known/host-meta", _, _, _) do
{:ok,
%Tesla.Env{
status: 200,
- body: File.read!("test/fixtures/httpoison_mock/framatube.org_host_meta")
+ body: File.read!("test/fixtures/tesla_mock/framatube.org_host_meta")
}}
end
@@ -638,7 +632,7 @@ def get(
%Tesla.Env{
status: 200,
headers: [{"content-type", "application/json"}],
- body: File.read!("test/fixtures/httpoison_mock/framasoft@framatube.org.json")
+ body: File.read!("test/fixtures/tesla_mock/framasoft@framatube.org.json")
}}
end
@@ -646,7 +640,7 @@ def get("http://gnusocial.de/.well-known/host-meta", _, _, _) do
{:ok,
%Tesla.Env{
status: 200,
- body: File.read!("test/fixtures/httpoison_mock/gnusocial.de_host_meta")
+ body: File.read!("test/fixtures/tesla_mock/gnusocial.de_host_meta")
}}
end
@@ -659,7 +653,7 @@ def get(
{:ok,
%Tesla.Env{
status: 200,
- body: File.read!("test/fixtures/httpoison_mock/winterdienst_webfinger.json")
+ body: File.read!("test/fixtures/tesla_mock/winterdienst_webfinger.json")
}}
end
@@ -667,7 +661,7 @@ def get("http://status.alpicola.com/.well-known/host-meta", _, _, _) do
{:ok,
%Tesla.Env{
status: 200,
- body: File.read!("test/fixtures/httpoison_mock/status.alpicola.com_host_meta")
+ body: File.read!("test/fixtures/tesla_mock/status.alpicola.com_host_meta")
}}
end
@@ -675,7 +669,7 @@ def get("http://macgirvin.com/.well-known/host-meta", _, _, _) do
{:ok,
%Tesla.Env{
status: 200,
- body: File.read!("test/fixtures/httpoison_mock/macgirvin.com_host_meta")
+ body: File.read!("test/fixtures/tesla_mock/macgirvin.com_host_meta")
}}
end
@@ -683,7 +677,7 @@ def get("http://gerzilla.de/.well-known/host-meta", _, _, _) do
{:ok,
%Tesla.Env{
status: 200,
- body: File.read!("test/fixtures/httpoison_mock/gerzilla.de_host_meta")
+ body: File.read!("test/fixtures/tesla_mock/gerzilla.de_host_meta")
}}
end
@@ -697,7 +691,7 @@ def get(
%Tesla.Env{
status: 200,
headers: [{"content-type", "application/json"}],
- body: File.read!("test/fixtures/httpoison_mock/kaniini@gerzilla.de.json")
+ body: File.read!("test/fixtures/tesla_mock/kaniini@gerzilla.de.json")
}}
end
@@ -707,7 +701,7 @@ def get("https://social.heldscal.la/api/statuses/user_timeline/23211.atom", _, _
status: 200,
body:
File.read!(
- "test/fixtures/httpoison_mock/https___social.heldscal.la_api_statuses_user_timeline_23211.atom.xml"
+ "test/fixtures/tesla_mock/https___social.heldscal.la_api_statuses_user_timeline_23211.atom.xml"
)
}}
end
@@ -721,7 +715,7 @@ def get(
{:ok,
%Tesla.Env{
status: 200,
- body: File.read!("test/fixtures/httpoison_mock/https___social.heldscal.la_user_23211.xml")
+ body: File.read!("test/fixtures/tesla_mock/https___social.heldscal.la_user_23211.xml")
}}
end
@@ -729,7 +723,7 @@ def get("http://social.heldscal.la/.well-known/host-meta", _, _, _) do
{:ok,
%Tesla.Env{
status: 200,
- body: File.read!("test/fixtures/httpoison_mock/social.heldscal.la_host_meta")
+ body: File.read!("test/fixtures/tesla_mock/social.heldscal.la_host_meta")
}}
end
@@ -737,7 +731,7 @@ def get("https://social.heldscal.la/.well-known/host-meta", _, _, _) do
{:ok,
%Tesla.Env{
status: 200,
- body: File.read!("test/fixtures/httpoison_mock/social.heldscal.la_host_meta")
+ body: File.read!("test/fixtures/tesla_mock/social.heldscal.la_host_meta")
}}
end
@@ -765,6 +759,54 @@ def get("https://pleroma.local/notice/9kCP7V", _, _, _) do
{:ok, %Tesla.Env{status: 200, body: File.read!("test/fixtures/rich_media/ogp.html")}}
end
+ def get("http://localhost:4001/users/masto_closed/followers", _, _, _) do
+ {:ok,
+ %Tesla.Env{
+ status: 200,
+ body: File.read!("test/fixtures/users_mock/masto_closed_followers.json")
+ }}
+ end
+
+ def get("http://localhost:4001/users/masto_closed/following", _, _, _) do
+ {:ok,
+ %Tesla.Env{
+ status: 200,
+ body: File.read!("test/fixtures/users_mock/masto_closed_following.json")
+ }}
+ end
+
+ def get("http://localhost:4001/users/fuser2/followers", _, _, _) do
+ {:ok,
+ %Tesla.Env{
+ status: 200,
+ body: File.read!("test/fixtures/users_mock/pleroma_followers.json")
+ }}
+ end
+
+ def get("http://localhost:4001/users/fuser2/following", _, _, _) do
+ {:ok,
+ %Tesla.Env{
+ status: 200,
+ body: File.read!("test/fixtures/users_mock/pleroma_following.json")
+ }}
+ end
+
+ def get("http://domain-with-errors:4001/users/fuser1/followers", _, _, _) do
+ {:ok,
+ %Tesla.Env{
+ status: 504,
+ body: ""
+ }}
+ end
+
+ def get("http://domain-with-errors:4001/users/fuser1/following", _, _, _) do
+ {:ok,
+ %Tesla.Env{
+ status: 504,
+ body: ""
+ }}
+ end
+
def get("http://example.com/ogp-missing-data", _, _, _) do
{:ok,
%Tesla.Env{
diff --git a/test/tasks/ecto/ecto_test.exs b/test/tasks/ecto/ecto_test.exs
new file mode 100644
index 000000000..b48662c88
--- /dev/null
+++ b/test/tasks/ecto/ecto_test.exs
@@ -0,0 +1,11 @@
+defmodule Mix.Tasks.Pleroma.EctoTest do
+ use ExUnit.Case, async: true
+
+ test "raise on bad path" do
+ assert_raise RuntimeError, ~r/Could not find migrations directory/, fn ->
+ Mix.Tasks.Pleroma.Ecto.ensure_migrations_path(Pleroma.Repo,
+ migrations_path: "some-path"
+ )
+ end
+ end
+end
diff --git a/test/tasks/pleroma_test.exs b/test/tasks/pleroma_test.exs
new file mode 100644
index 000000000..e236ccbbb
--- /dev/null
+++ b/test/tasks/pleroma_test.exs
@@ -0,0 +1,46 @@
+defmodule Mix.PleromaTest do
+ use ExUnit.Case, async: true
+ import Mix.Pleroma
+
+ setup_all do
+ Mix.shell(Mix.Shell.Process)
+
+ on_exit(fn ->
+ Mix.shell(Mix.Shell.IO)
+ end)
+
+ :ok
+ end
+
+ describe "shell_prompt/1" do
+ test "input" do
+ send(self(), {:mix_shell_input, :prompt, "Yes"})
+
+ answer = shell_prompt("Do you want this?")
+ assert_received {:mix_shell, :prompt, [message]}
+ assert message =~ "Do you want this?"
+ assert answer == "Yes"
+ end
+
+ test "with defval" do
+ send(self(), {:mix_shell_input, :prompt, "\n"})
+
+ answer = shell_prompt("Do you want this?", "defval")
+
+ assert_received {:mix_shell, :prompt, [message]}
+ assert message =~ "Do you want this? [defval]"
+ assert answer == "defval"
+ end
+ end
+
+ describe "get_option/3" do
+ test "get from options" do
+ assert get_option([domain: "some-domain.com"], :domain, "Promt") == "some-domain.com"
+ end
+
+ test "get from prompt" do
+ send(self(), {:mix_shell_input, :prompt, "another-domain.com"})
+ assert get_option([], :domain, "Prompt") == "another-domain.com"
+ end
+ end
+end
diff --git a/test/tasks/robots_txt_test.exs b/test/tasks/robots_txt_test.exs
new file mode 100644
index 000000000..539193f73
--- /dev/null
+++ b/test/tasks/robots_txt_test.exs
@@ -0,0 +1,43 @@
+defmodule Mix.Tasks.Pleroma.RobotsTxtTest do
+ use ExUnit.Case, async: true
+ alias Mix.Tasks.Pleroma.RobotsTxt
+
+ test "creates new dir" do
+ path = "test/fixtures/new_dir/"
+ file_path = path <> "robots.txt"
+
+ static_dir = Pleroma.Config.get([:instance, :static_dir])
+ Pleroma.Config.put([:instance, :static_dir], path)
+
+ on_exit(fn ->
+ Pleroma.Config.put([:instance, :static_dir], static_dir)
+ {:ok, ["test/fixtures/new_dir/", "test/fixtures/new_dir/robots.txt"]} = File.rm_rf(path)
+ end)
+
+ RobotsTxt.run(["disallow_all"])
+
+ assert File.exists?(file_path)
+ {:ok, file} = File.read(file_path)
+
+ assert file == "User-Agent: *\nDisallow: /\n"
+ end
+
+ test "to existance folder" do
+ path = "test/fixtures/"
+ file_path = path <> "robots.txt"
+ static_dir = Pleroma.Config.get([:instance, :static_dir])
+ Pleroma.Config.put([:instance, :static_dir], path)
+
+ on_exit(fn ->
+ Pleroma.Config.put([:instance, :static_dir], static_dir)
+ :ok = File.rm(file_path)
+ end)
+
+ RobotsTxt.run(["disallow_all"])
+
+ assert File.exists?(file_path)
+ {:ok, file} = File.read(file_path)
+
+ assert file == "User-Agent: *\nDisallow: /\n"
+ end
+end
diff --git a/test/test_helper.exs b/test/test_helper.exs
index f604ba63d..3e33f0335 100644
--- a/test/test_helper.exs
+++ b/test/test_helper.exs
@@ -3,6 +3,6 @@
# SPDX-License-Identifier: AGPL-3.0-only
ExUnit.start()
-
Ecto.Adapters.SQL.Sandbox.mode(Pleroma.Repo, :manual)
+Mox.defmock(Pleroma.ReverseProxy.ClientMock, for: Pleroma.ReverseProxy.Client)
{:ok, _} = Application.ensure_all_started(:ex_machina)
diff --git a/test/user/synchronization_test.exs b/test/user/synchronization_test.exs
new file mode 100644
index 000000000..67b669431
--- /dev/null
+++ b/test/user/synchronization_test.exs
@@ -0,0 +1,104 @@
+# Pleroma: A lightweight social networking server
+# Copyright © 2017-2018 Pleroma Authors
+# SPDX-License-Identifier: AGPL-3.0-only
+
+defmodule Pleroma.User.SynchronizationTest do
+ use Pleroma.DataCase
+ import Pleroma.Factory
+ alias Pleroma.User
+ alias Pleroma.User.Synchronization
+
+ setup do
+ Tesla.Mock.mock(fn env -> apply(HttpRequestMock, :request, [env]) end)
+ :ok
+ end
+
+ test "update following/followers counters" do
+ user1 =
+ insert(:user,
+ local: false,
+ ap_id: "http://localhost:4001/users/masto_closed"
+ )
+
+ user2 = insert(:user, local: false, ap_id: "http://localhost:4001/users/fuser2")
+
+ users = User.external_users()
+ assert length(users) == 2
+ {user, %{}} = Synchronization.call(users, %{})
+ assert user == List.last(users)
+
+ %{follower_count: followers, following_count: following} = User.get_cached_user_info(user1)
+ assert followers == 437
+ assert following == 152
+
+ %{follower_count: followers, following_count: following} = User.get_cached_user_info(user2)
+
+ assert followers == 527
+ assert following == 267
+ end
+
+ test "don't check host if errors exist" do
+ user1 = insert(:user, local: false, ap_id: "http://domain-with-errors:4001/users/fuser1")
+
+ user2 = insert(:user, local: false, ap_id: "http://domain-with-errors:4001/users/fuser2")
+
+ users = User.external_users()
+ assert length(users) == 2
+
+ {user, %{"domain-with-errors" => 2}} =
+ Synchronization.call(users, %{"domain-with-errors" => 2}, max_retries: 2)
+
+ assert user == List.last(users)
+
+ %{follower_count: followers, following_count: following} = User.get_cached_user_info(user1)
+ assert followers == 0
+ assert following == 0
+
+ %{follower_count: followers, following_count: following} = User.get_cached_user_info(user2)
+
+ assert followers == 0
+ assert following == 0
+ end
+
+ test "don't check host if errors appeared" do
+ user1 = insert(:user, local: false, ap_id: "http://domain-with-errors:4001/users/fuser1")
+
+ user2 = insert(:user, local: false, ap_id: "http://domain-with-errors:4001/users/fuser2")
+
+ users = User.external_users()
+ assert length(users) == 2
+
+ {user, %{"domain-with-errors" => 2}} = Synchronization.call(users, %{}, max_retries: 2)
+
+ assert user == List.last(users)
+
+ %{follower_count: followers, following_count: following} = User.get_cached_user_info(user1)
+ assert followers == 0
+ assert following == 0
+
+ %{follower_count: followers, following_count: following} = User.get_cached_user_info(user2)
+
+ assert followers == 0
+ assert following == 0
+ end
+
+ test "other users after error appeared" do
+ user1 = insert(:user, local: false, ap_id: "http://domain-with-errors:4001/users/fuser1")
+ user2 = insert(:user, local: false, ap_id: "http://localhost:4001/users/fuser2")
+
+ users = User.external_users()
+ assert length(users) == 2
+
+ {user, %{"domain-with-errors" => 2}} = Synchronization.call(users, %{}, max_retries: 2)
+ assert user == List.last(users)
+
+ %{follower_count: followers, following_count: following} = User.get_cached_user_info(user1)
+ assert followers == 0
+ assert following == 0
+
+ %{follower_count: followers, following_count: following} = User.get_cached_user_info(user2)
+
+ assert followers == 527
+ assert following == 267
+ end
+end
diff --git a/test/user/synchronization_worker_test.exs b/test/user/synchronization_worker_test.exs
new file mode 100644
index 000000000..835c5327f
--- /dev/null
+++ b/test/user/synchronization_worker_test.exs
@@ -0,0 +1,49 @@
+# Pleroma: A lightweight social networking server
+# Copyright © 2017-2018 Pleroma Authors
+# SPDX-License-Identifier: AGPL-3.0-only
+
+defmodule Pleroma.User.SynchronizationWorkerTest do
+ use Pleroma.DataCase
+ import Pleroma.Factory
+
+ setup do
+ Tesla.Mock.mock_global(fn env -> apply(HttpRequestMock, :request, [env]) end)
+
+ config = Pleroma.Config.get([:instance, :external_user_synchronization])
+
+ for_update = [enabled: true, interval: 1000]
+
+ Pleroma.Config.put([:instance, :external_user_synchronization], for_update)
+
+ on_exit(fn ->
+ Pleroma.Config.put([:instance, :external_user_synchronization], config)
+ end)
+
+ :ok
+ end
+
+ test "sync follow counters" do
+ user1 =
+ insert(:user,
+ local: false,
+ ap_id: "http://localhost:4001/users/masto_closed"
+ )
+
+ user2 = insert(:user, local: false, ap_id: "http://localhost:4001/users/fuser2")
+
+ {:ok, _} = Pleroma.User.SynchronizationWorker.start_link()
+ :timer.sleep(1500)
+
+ %{follower_count: followers, following_count: following} =
+ Pleroma.User.get_cached_user_info(user1)
+
+ assert followers == 437
+ assert following == 152
+
+ %{follower_count: followers, following_count: following} =
+ Pleroma.User.get_cached_user_info(user2)
+
+ assert followers == 527
+ assert following == 267
+ end
+end
diff --git a/test/user_search_test.exs b/test/user_search_test.exs
index 8f8472aae..1f0162486 100644
--- a/test/user_search_test.exs
+++ b/test/user_search_test.exs
@@ -217,5 +217,36 @@ test "excludes a blocked users from search result" do
refute Enum.member?(account_ids, blocked_user2.id)
assert length(account_ids) == 3
end
+
+ test "local user has the same search_rank as for users with the same nickname, but another domain" do
+ user = insert(:user)
+ insert(:user, nickname: "lain@mastodon.social")
+ insert(:user, nickname: "lain")
+ insert(:user, nickname: "lain@pleroma.social")
+
+ assert User.search("lain@localhost", resolve: true, for_user: user)
+ |> Enum.each(fn u -> u.search_rank == 0.5 end)
+ end
+
+ test "localhost is the part of the domain" do
+ user = insert(:user)
+ insert(:user, nickname: "another@somedomain")
+ insert(:user, nickname: "lain")
+ insert(:user, nickname: "lain@examplelocalhost")
+
+ result = User.search("lain@examplelocalhost", resolve: true, for_user: user)
+ assert Enum.each(result, fn u -> u.search_rank == 0.5 end)
+ assert length(result) == 2
+ end
+
+ test "local user search with users" do
+ user = insert(:user)
+ local_user = insert(:user, nickname: "lain")
+ insert(:user, nickname: "another@localhost.com")
+ insert(:user, nickname: "localhost@localhost.com")
+
+ [result] = User.search("lain@localhost", resolve: true, for_user: user)
+ assert Map.put(result, :search_rank, nil) |> Map.put(:search_type, nil) == local_user
+ end
end
end
diff --git a/test/user_test.exs b/test/user_test.exs
index fb497843c..0f27d73f7 100644
--- a/test/user_test.exs
+++ b/test/user_test.exs
@@ -1183,4 +1183,121 @@ test "it returns a list of AP ids for a given set of nicknames" do
assert user_two.ap_id in ap_ids
end
end
+
+ describe "sync followers count" do
+ setup do
+ user1 = insert(:user, local: false, ap_id: "http://localhost:4001/users/masto_closed")
+ user2 = insert(:user, local: false, ap_id: "http://localhost:4001/users/fuser2")
+ insert(:user, local: true)
+ insert(:user, local: false, info: %{deactivated: true})
+ {:ok, user1: user1, user2: user2}
+ end
+
+ test "external_users/1 external active users with limit", %{user1: user1, user2: user2} do
+ [fdb_user1] = User.external_users(limit: 1)
+
+ assert fdb_user1.ap_id
+ assert fdb_user1.ap_id == user1.ap_id
+ assert fdb_user1.id == user1.id
+
+ [fdb_user2] = User.external_users(max_id: fdb_user1.id, limit: 1)
+
+ assert fdb_user2.ap_id
+ assert fdb_user2.ap_id == user2.ap_id
+ assert fdb_user2.id == user2.id
+
+ assert User.external_users(max_id: fdb_user2.id, limit: 1) == []
+ end
+
+ test "sync_follow_counters/1", %{user1: user1, user2: user2} do
+ {:ok, _pid} = Agent.start_link(fn -> %{} end, name: :domain_errors)
+
+ :ok = User.sync_follow_counters()
+
+ %{follower_count: followers, following_count: following} = User.get_cached_user_info(user1)
+ assert followers == 437
+ assert following == 152
+
+ %{follower_count: followers, following_count: following} = User.get_cached_user_info(user2)
+
+ assert followers == 527
+ assert following == 267
+
+ Agent.stop(:domain_errors)
+ end
+
+ test "sync_follow_counters/1 in separate batches", %{user1: user1, user2: user2} do
+ {:ok, _pid} = Agent.start_link(fn -> %{} end, name: :domain_errors)
+
+ :ok = User.sync_follow_counters(limit: 1)
+
+ %{follower_count: followers, following_count: following} = User.get_cached_user_info(user1)
+ assert followers == 437
+ assert following == 152
+
+ %{follower_count: followers, following_count: following} = User.get_cached_user_info(user2)
+
+ assert followers == 527
+ assert following == 267
+
+ Agent.stop(:domain_errors)
+ end
+
+ test "perform/1 with :sync_follow_counters", %{user1: user1, user2: user2} do
+ :ok = User.perform(:sync_follow_counters)
+ %{follower_count: followers, following_count: following} = User.get_cached_user_info(user1)
+ assert followers == 437
+ assert following == 152
+
+ %{follower_count: followers, following_count: following} = User.get_cached_user_info(user2)
+
+ assert followers == 527
+ assert following == 267
+ end
+ end
+
+ describe "set_info_cache/2" do
+ setup do
+ user = insert(:user)
+ {:ok, user: user}
+ end
+
+ test "update from args", %{user: user} do
+ User.set_info_cache(user, %{following_count: 15, follower_count: 18})
+
+ %{follower_count: followers, following_count: following} = User.get_cached_user_info(user)
+ assert followers == 18
+ assert following == 15
+ end
+
+ test "without args", %{user: user} do
+ User.set_info_cache(user, %{})
+
+ %{follower_count: followers, following_count: following} = User.get_cached_user_info(user)
+ assert followers == 0
+ assert following == 0
+ end
+ end
+
+ describe "user_info/2" do
+ setup do
+ user = insert(:user)
+ {:ok, user: user}
+ end
+
+ test "update from args", %{user: user} do
+ %{follower_count: followers, following_count: following} =
+ User.user_info(user, %{following_count: 15, follower_count: 18})
+
+ assert followers == 18
+ assert following == 15
+ end
+
+ test "without args", %{user: user} do
+ %{follower_count: followers, following_count: following} = User.user_info(user)
+
+ assert followers == 0
+ assert following == 0
+ end
+ end
end
diff --git a/test/web/activity_pub/activity_pub_controller_test.exs b/test/web/activity_pub/activity_pub_controller_test.exs
index 8b3233729..5a8a67155 100644
--- a/test/web/activity_pub/activity_pub_controller_test.exs
+++ b/test/web/activity_pub/activity_pub_controller_test.exs
@@ -15,6 +15,13 @@ defmodule Pleroma.Web.ActivityPub.ActivityPubControllerTest do
setup_all do
Tesla.Mock.mock_global(fn env -> apply(HttpRequestMock, :request, [env]) end)
+
+ config_path = [:instance, :federating]
+ initial_setting = Pleroma.Config.get(config_path)
+
+ Pleroma.Config.put(config_path, true)
+ on_exit(fn -> Pleroma.Config.put(config_path, initial_setting) end)
+
:ok
end
diff --git a/test/web/activity_pub/transmogrifier_test.exs b/test/web/activity_pub/transmogrifier_test.exs
index 68ec03c33..a914d3c4c 100644
--- a/test/web/activity_pub/transmogrifier_test.exs
+++ b/test/web/activity_pub/transmogrifier_test.exs
@@ -11,12 +11,13 @@ defmodule Pleroma.Web.ActivityPub.TransmogrifierTest do
alias Pleroma.User
alias Pleroma.Web.ActivityPub.ActivityPub
alias Pleroma.Web.ActivityPub.Transmogrifier
+ alias Pleroma.Web.CommonAPI
alias Pleroma.Web.OStatus
alias Pleroma.Web.Websub.WebsubClientSubscription
+ import Mock
import Pleroma.Factory
import ExUnit.CaptureLog
- alias Pleroma.Web.CommonAPI
setup_all do
Tesla.Mock.mock_global(fn env -> apply(HttpRequestMock, :request, [env]) end)
@@ -46,12 +47,10 @@ test "it fetches replied-to activities if we don't have them" do
data["object"]
|> Map.put("inReplyTo", "https://shitposter.club/notice/2827873")
- data =
- data
- |> Map.put("object", object)
-
+ data = Map.put(data, "object", object)
{:ok, returned_activity} = Transmogrifier.handle_incoming(data)
- returned_object = Object.normalize(returned_activity.data["object"])
+
+ returned_object = Object.normalize(returned_activity.data["object"], false)
assert activity =
Activity.get_create_by_object_ap_id(
@@ -61,6 +60,32 @@ test "it fetches replied-to activities if we don't have them" do
assert returned_object.data["inReplyToAtomUri"] == "https://shitposter.club/notice/2827873"
end
+ test "it does not fetch replied-to activities beyond max_replies_depth" do
+ data =
+ File.read!("test/fixtures/mastodon-post-activity.json")
+ |> Poison.decode!()
+
+ object =
+ data["object"]
+ |> Map.put("inReplyTo", "https://shitposter.club/notice/2827873")
+
+ data = Map.put(data, "object", object)
+
+ with_mock Pleroma.Web.Federator,
+ allowed_incoming_reply_depth?: fn _ -> false end do
+ {:ok, returned_activity} = Transmogrifier.handle_incoming(data)
+
+ returned_object = Object.normalize(returned_activity.data["object"], false)
+
+ refute Activity.get_create_by_object_ap_id(
+ "tag:shitposter.club,2017-05-05:noticeId=2827873:objectType=comment"
+ )
+
+ assert returned_object.data["inReplyToAtomUri"] ==
+ "https://shitposter.club/notice/2827873"
+ end
+ end
+
test "it does not crash if the object in inReplyTo can't be fetched" do
data =
File.read!("test/fixtures/mastodon-post-activity.json")
diff --git a/test/web/federator_test.exs b/test/web/federator_test.exs
index 0f43bc8f2..69dd4d747 100644
--- a/test/web/federator_test.exs
+++ b/test/web/federator_test.exs
@@ -12,6 +12,13 @@ defmodule Pleroma.Web.FederatorTest do
setup_all do
Tesla.Mock.mock_global(fn env -> apply(HttpRequestMock, :request, [env]) end)
+
+ config_path = [:instance, :federating]
+ initial_setting = Pleroma.Config.get(config_path)
+
+ Pleroma.Config.put(config_path, true)
+ on_exit(fn -> Pleroma.Config.put(config_path, initial_setting) end)
+
:ok
end
diff --git a/test/web/mastodon_api/status_view_test.exs b/test/web/mastodon_api/status_view_test.exs
index ec75150ab..73791a95b 100644
--- a/test/web/mastodon_api/status_view_test.exs
+++ b/test/web/mastodon_api/status_view_test.exs
@@ -444,4 +444,39 @@ test "detects vote status" do
assert Enum.at(result[:options], 2)[:votes_count] == 1
end
end
+
+ test "embeds a relationship in the account" do
+ user = insert(:user)
+ other_user = insert(:user)
+
+ {:ok, activity} =
+ CommonAPI.post(user, %{
+ "status" => "drink more water"
+ })
+
+ result = StatusView.render("status.json", %{activity: activity, for: other_user})
+
+ assert result[:account][:pleroma][:relationship] ==
+ AccountView.render("relationship.json", %{user: other_user, target: user})
+ end
+
+ test "embeds a relationship in the account in reposts" do
+ user = insert(:user)
+ other_user = insert(:user)
+
+ {:ok, activity} =
+ CommonAPI.post(user, %{
+ "status" => "˙˙ɐʎns"
+ })
+
+ {:ok, activity, _object} = CommonAPI.repeat(activity.id, other_user)
+
+ result = StatusView.render("status.json", %{activity: activity, for: user})
+
+ assert result[:account][:pleroma][:relationship] ==
+ AccountView.render("relationship.json", %{user: user, target: other_user})
+
+ assert result[:reblog][:account][:pleroma][:relationship] ==
+ AccountView.render("relationship.json", %{user: user, target: user})
+ end
end
diff --git a/test/web/ostatus/ostatus_controller_test.exs b/test/web/ostatus/ostatus_controller_test.exs
index 7441e5fce..eae44dba5 100644
--- a/test/web/ostatus/ostatus_controller_test.exs
+++ b/test/web/ostatus/ostatus_controller_test.exs
@@ -12,6 +12,13 @@ defmodule Pleroma.Web.OStatus.OStatusControllerTest do
setup_all do
Tesla.Mock.mock_global(fn env -> apply(HttpRequestMock, :request, [env]) end)
+
+ config_path = [:instance, :federating]
+ initial_setting = Pleroma.Config.get(config_path)
+
+ Pleroma.Config.put(config_path, true)
+ on_exit(fn -> Pleroma.Config.put(config_path, initial_setting) end)
+
:ok
end
diff --git a/test/web/ostatus/ostatus_test.exs b/test/web/ostatus/ostatus_test.exs
index f6be16862..acce33008 100644
--- a/test/web/ostatus/ostatus_test.exs
+++ b/test/web/ostatus/ostatus_test.exs
@@ -11,8 +11,10 @@ defmodule Pleroma.Web.OStatusTest do
alias Pleroma.User
alias Pleroma.Web.OStatus
alias Pleroma.Web.XML
- import Pleroma.Factory
+
import ExUnit.CaptureLog
+ import Mock
+ import Pleroma.Factory
setup_all do
Tesla.Mock.mock_global(fn env -> apply(HttpRequestMock, :request, [env]) end)
@@ -266,10 +268,13 @@ test "handle incoming favorites with locally available object - GS, websub" do
assert favorited_activity.local
end
- test "handle incoming replies" do
+ test_with_mock "handle incoming replies, fetching replied-to activities if we don't have them",
+ OStatus,
+ [:passthrough],
+ [] do
incoming = File.read!("test/fixtures/incoming_note_activity_answer.xml")
{:ok, [activity]} = OStatus.handle_incoming(incoming)
- object = Object.normalize(activity.data["object"])
+ object = Object.normalize(activity.data["object"], false)
assert activity.data["type"] == "Create"
assert object.data["type"] == "Note"
@@ -282,6 +287,23 @@ test "handle incoming replies" do
assert object.data["id"] == "tag:gs.example.org:4040,2017-04-25:noticeId=55:objectType=note"
assert "https://www.w3.org/ns/activitystreams#Public" in activity.data["to"]
+
+ assert called(OStatus.fetch_activity_from_url(object.data["inReplyTo"], :_))
+ end
+
+ test_with_mock "handle incoming replies, not fetching replied-to activities beyond max_replies_depth",
+ OStatus,
+ [:passthrough],
+ [] do
+ incoming = File.read!("test/fixtures/incoming_note_activity_answer.xml")
+
+ with_mock Pleroma.Web.Federator,
+ allowed_incoming_reply_depth?: fn _ -> false end do
+ {:ok, [activity]} = OStatus.handle_incoming(incoming)
+ object = Object.normalize(activity.data["object"], false)
+
+ refute called(OStatus.fetch_activity_from_url(object.data["inReplyTo"], :_))
+ end
end
test "handle incoming follows" do
diff --git a/test/web/plugs/federating_plug_test.exs b/test/web/plugs/federating_plug_test.exs
index 530562325..c01e01124 100644
--- a/test/web/plugs/federating_plug_test.exs
+++ b/test/web/plugs/federating_plug_test.exs
@@ -5,6 +5,15 @@
defmodule Pleroma.Web.FederatingPlugTest do
use Pleroma.Web.ConnCase
+ setup_all do
+ config_path = [:instance, :federating]
+ initial_setting = Pleroma.Config.get(config_path)
+
+ on_exit(fn -> Pleroma.Config.put(config_path, initial_setting) end)
+
+ :ok
+ end
+
test "returns and halt the conn when federating is disabled" do
Pleroma.Config.put([:instance, :federating], false)
@@ -14,11 +23,11 @@ test "returns and halt the conn when federating is disabled" do
assert conn.status == 404
assert conn.halted
-
- Pleroma.Config.put([:instance, :federating], true)
end
test "does nothing when federating is enabled" do
+ Pleroma.Config.put([:instance, :federating], true)
+
conn =
build_conn()
|> Pleroma.Web.FederatingPlug.call(%{})
diff --git a/test/web/web_finger/web_finger_controller_test.exs b/test/web/web_finger/web_finger_controller_test.exs
index 43fccfc7a..a14ed3126 100644
--- a/test/web/web_finger/web_finger_controller_test.exs
+++ b/test/web/web_finger/web_finger_controller_test.exs
@@ -10,6 +10,12 @@ defmodule Pleroma.Web.WebFinger.WebFingerControllerTest do
setup do
mock(fn env -> apply(HttpRequestMock, :request, [env]) end)
+
+ config_path = [:instance, :federating]
+ initial_setting = Pleroma.Config.get(config_path)
+
+ Pleroma.Config.put(config_path, true)
+ on_exit(fn -> Pleroma.Config.put(config_path, initial_setting) end)
:ok
end
diff --git a/test/web/websub/websub_controller_test.exs b/test/web/websub/websub_controller_test.exs
index f79745d58..aa7262beb 100644
--- a/test/web/websub/websub_controller_test.exs
+++ b/test/web/websub/websub_controller_test.exs
@@ -9,6 +9,16 @@ defmodule Pleroma.Web.Websub.WebsubControllerTest do
alias Pleroma.Web.Websub
alias Pleroma.Web.Websub.WebsubClientSubscription
+ setup_all do
+ config_path = [:instance, :federating]
+ initial_setting = Pleroma.Config.get(config_path)
+
+ Pleroma.Config.put(config_path, true)
+ on_exit(fn -> Pleroma.Config.put(config_path, initial_setting) end)
+
+ :ok
+ end
+
test "websub subscription request", %{conn: conn} do
user = insert(:user)