forked from AkkomaGang/akkoma
Merge branch 'develop' of https://git.pleroma.social/pleroma/pleroma into develop
This commit is contained in:
commit
8cec5796ae
193 changed files with 6033 additions and 1827 deletions
|
@ -12,7 +12,9 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
|
||||||
|
|
||||||
### Added
|
### Added
|
||||||
- NodeInfo: `pleroma:api/v1/notifications:include_types_filter` to the `features` list.
|
- NodeInfo: `pleroma:api/v1/notifications:include_types_filter` to the `features` list.
|
||||||
|
- NodeInfo: `pleroma_emoji_reactions` to the `features` list.
|
||||||
- Configuration: `:restrict_unauthenticated` setting, restrict access for unauthenticated users to timelines (public and federate), user profiles and statuses.
|
- Configuration: `:restrict_unauthenticated` setting, restrict access for unauthenticated users to timelines (public and federate), user profiles and statuses.
|
||||||
|
- New HTTP adapter [gun](https://github.com/ninenines/gun). Gun adapter requires minimum OTP version of 22.2 otherwise Pleroma won’t start. For hackney OTP update is not required.
|
||||||
<details>
|
<details>
|
||||||
<summary>API Changes</summary>
|
<summary>API Changes</summary>
|
||||||
- Mastodon API: Support for `include_types` in `/api/v1/notifications`.
|
- Mastodon API: Support for `include_types` in `/api/v1/notifications`.
|
||||||
|
|
4
COPYING
4
COPYING
|
@ -1,4 +1,4 @@
|
||||||
Unless otherwise stated this repository is copyright © 2017-2019
|
Unless otherwise stated this repository is copyright © 2017-2020
|
||||||
Pleroma Authors <https://pleroma.social/>, and is distributed under
|
Pleroma Authors <https://pleroma.social/>, and is distributed under
|
||||||
The GNU Affero General Public License Version 3, you should have received a
|
The GNU Affero General Public License Version 3, you should have received a
|
||||||
copy of the license file as AGPL-3.
|
copy of the license file as AGPL-3.
|
||||||
|
@ -23,7 +23,7 @@ priv/static/images/pleroma-fox-tan-shy.png
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
The following files are copyright © 2017-2019 Pleroma Authors
|
The following files are copyright © 2017-2020 Pleroma Authors
|
||||||
<https://pleroma.social/>, and are distributed under the Creative Commons
|
<https://pleroma.social/>, and are distributed under the Creative Commons
|
||||||
Attribution-ShareAlike 4.0 International license, you should have received
|
Attribution-ShareAlike 4.0 International license, you should have received
|
||||||
a copy of the license file as CC-BY-SA-4.0.
|
a copy of the license file as CC-BY-SA-4.0.
|
||||||
|
|
|
@ -12,7 +12,7 @@ RUN apk add git gcc g++ musl-dev make &&\
|
||||||
mkdir release &&\
|
mkdir release &&\
|
||||||
mix release --path release
|
mix release --path release
|
||||||
|
|
||||||
FROM alpine:3.9
|
FROM alpine:3.11
|
||||||
|
|
||||||
ARG BUILD_DATE
|
ARG BUILD_DATE
|
||||||
ARG VCS_REF
|
ARG VCS_REF
|
||||||
|
@ -33,7 +33,7 @@ ARG DATA=/var/lib/pleroma
|
||||||
|
|
||||||
RUN echo "http://nl.alpinelinux.org/alpine/latest-stable/community" >> /etc/apk/repositories &&\
|
RUN echo "http://nl.alpinelinux.org/alpine/latest-stable/community" >> /etc/apk/repositories &&\
|
||||||
apk update &&\
|
apk update &&\
|
||||||
apk add ncurses postgresql-client &&\
|
apk add imagemagick ncurses postgresql-client &&\
|
||||||
adduser --system --shell /bin/false --home ${HOME} pleroma &&\
|
adduser --system --shell /bin/false --home ${HOME} pleroma &&\
|
||||||
mkdir -p ${DATA}/uploads &&\
|
mkdir -p ${DATA}/uploads &&\
|
||||||
mkdir -p ${DATA}/static &&\
|
mkdir -p ${DATA}/static &&\
|
||||||
|
|
557
benchmarks/load_testing/activities.ex
Normal file
557
benchmarks/load_testing/activities.ex
Normal file
|
@ -0,0 +1,557 @@
|
||||||
|
defmodule Pleroma.LoadTesting.Activities do
|
||||||
|
@moduledoc """
|
||||||
|
Module for generating different activities.
|
||||||
|
"""
|
||||||
|
import Ecto.Query
|
||||||
|
import Pleroma.LoadTesting.Helper, only: [to_sec: 1]
|
||||||
|
|
||||||
|
alias Ecto.UUID
|
||||||
|
alias Pleroma.Constants
|
||||||
|
alias Pleroma.LoadTesting.Users
|
||||||
|
alias Pleroma.Repo
|
||||||
|
alias Pleroma.Web.CommonAPI
|
||||||
|
|
||||||
|
require Constants
|
||||||
|
|
||||||
|
@defaults [
|
||||||
|
iterations: 170,
|
||||||
|
friends_used: 20,
|
||||||
|
non_friends_used: 20
|
||||||
|
]
|
||||||
|
|
||||||
|
@max_concurrency 10
|
||||||
|
|
||||||
|
@visibility ~w(public private direct unlisted)
|
||||||
|
@types ~w(simple emoji mentions hell_thread attachment tag like reblog simple_thread remote)
|
||||||
|
@groups ~w(user friends non_friends)
|
||||||
|
|
||||||
|
@spec generate(User.t(), keyword()) :: :ok
|
||||||
|
def generate(user, opts \\ []) do
|
||||||
|
{:ok, _} =
|
||||||
|
Agent.start_link(fn -> %{} end,
|
||||||
|
name: :benchmark_state
|
||||||
|
)
|
||||||
|
|
||||||
|
opts = Keyword.merge(@defaults, opts)
|
||||||
|
|
||||||
|
friends =
|
||||||
|
user
|
||||||
|
|> Users.get_users(limit: opts[:friends_used], local: :local, friends?: true)
|
||||||
|
|> Enum.shuffle()
|
||||||
|
|
||||||
|
non_friends =
|
||||||
|
user
|
||||||
|
|> Users.get_users(limit: opts[:non_friends_used], local: :local, friends?: false)
|
||||||
|
|> Enum.shuffle()
|
||||||
|
|
||||||
|
task_data =
|
||||||
|
for visibility <- @visibility,
|
||||||
|
type <- @types,
|
||||||
|
group <- @groups,
|
||||||
|
do: {visibility, type, group}
|
||||||
|
|
||||||
|
IO.puts("Starting generating #{opts[:iterations]} iterations of activities...")
|
||||||
|
|
||||||
|
friends_thread = Enum.take(friends, 5)
|
||||||
|
non_friends_thread = Enum.take(friends, 5)
|
||||||
|
|
||||||
|
public_long_thread = fn ->
|
||||||
|
generate_long_thread("public", user, friends_thread, non_friends_thread, opts)
|
||||||
|
end
|
||||||
|
|
||||||
|
private_long_thread = fn ->
|
||||||
|
generate_long_thread("private", user, friends_thread, non_friends_thread, opts)
|
||||||
|
end
|
||||||
|
|
||||||
|
iterations = opts[:iterations]
|
||||||
|
|
||||||
|
{time, _} =
|
||||||
|
:timer.tc(fn ->
|
||||||
|
Enum.each(
|
||||||
|
1..iterations,
|
||||||
|
fn
|
||||||
|
i when i == iterations - 2 ->
|
||||||
|
spawn(public_long_thread)
|
||||||
|
spawn(private_long_thread)
|
||||||
|
generate_activities(user, friends, non_friends, Enum.shuffle(task_data), opts)
|
||||||
|
|
||||||
|
_ ->
|
||||||
|
generate_activities(user, friends, non_friends, Enum.shuffle(task_data), opts)
|
||||||
|
end
|
||||||
|
)
|
||||||
|
end)
|
||||||
|
|
||||||
|
IO.puts("Generating iterations of activities took #{to_sec(time)} sec.\n")
|
||||||
|
:ok
|
||||||
|
end
|
||||||
|
|
||||||
|
def generate_power_intervals(opts \\ []) do
|
||||||
|
count = Keyword.get(opts, :count, 20)
|
||||||
|
power = Keyword.get(opts, :power, 2)
|
||||||
|
IO.puts("Generating #{count} intervals for a power #{power} series...")
|
||||||
|
counts = Enum.map(1..count, fn n -> :math.pow(n, power) end)
|
||||||
|
sum = Enum.sum(counts)
|
||||||
|
|
||||||
|
densities =
|
||||||
|
Enum.map(counts, fn c ->
|
||||||
|
c / sum
|
||||||
|
end)
|
||||||
|
|
||||||
|
densities
|
||||||
|
|> Enum.reduce(0, fn density, acc ->
|
||||||
|
if acc == 0 do
|
||||||
|
[{0, density}]
|
||||||
|
else
|
||||||
|
[{_, lower} | _] = acc
|
||||||
|
[{lower, lower + density} | acc]
|
||||||
|
end
|
||||||
|
end)
|
||||||
|
|> Enum.reverse()
|
||||||
|
end
|
||||||
|
|
||||||
|
def generate_tagged_activities(opts \\ []) do
|
||||||
|
tag_count = Keyword.get(opts, :tag_count, 20)
|
||||||
|
users = Keyword.get(opts, :users, Repo.all(Pleroma.User))
|
||||||
|
activity_count = Keyword.get(opts, :count, 200_000)
|
||||||
|
|
||||||
|
intervals = generate_power_intervals(count: tag_count)
|
||||||
|
|
||||||
|
IO.puts(
|
||||||
|
"Generating #{activity_count} activities using #{tag_count} different tags of format `tag_n`, starting at tag_0"
|
||||||
|
)
|
||||||
|
|
||||||
|
Enum.each(1..activity_count, fn _ ->
|
||||||
|
random = :rand.uniform()
|
||||||
|
i = Enum.find_index(intervals, fn {lower, upper} -> lower <= random && upper > random end)
|
||||||
|
CommonAPI.post(Enum.random(users), %{"status" => "a post with the tag #tag_#{i}"})
|
||||||
|
end)
|
||||||
|
end
|
||||||
|
|
||||||
|
defp generate_long_thread(visibility, user, friends, non_friends, _opts) do
|
||||||
|
group =
|
||||||
|
if visibility == "public",
|
||||||
|
do: "friends",
|
||||||
|
else: "user"
|
||||||
|
|
||||||
|
tasks = get_reply_tasks(visibility, group) |> Stream.cycle() |> Enum.take(50)
|
||||||
|
|
||||||
|
{:ok, activity} =
|
||||||
|
CommonAPI.post(user, %{
|
||||||
|
"status" => "Start of #{visibility} long thread",
|
||||||
|
"visibility" => visibility
|
||||||
|
})
|
||||||
|
|
||||||
|
Agent.update(:benchmark_state, fn state ->
|
||||||
|
key =
|
||||||
|
if visibility == "public",
|
||||||
|
do: :public_thread,
|
||||||
|
else: :private_thread
|
||||||
|
|
||||||
|
Map.put(state, key, activity)
|
||||||
|
end)
|
||||||
|
|
||||||
|
acc = {activity.id, ["@" <> user.nickname, "reply to long thread"]}
|
||||||
|
insert_replies_for_long_thread(tasks, visibility, user, friends, non_friends, acc)
|
||||||
|
IO.puts("Generating #{visibility} long thread ended\n")
|
||||||
|
end
|
||||||
|
|
||||||
|
defp insert_replies_for_long_thread(tasks, visibility, user, friends, non_friends, acc) do
|
||||||
|
Enum.reduce(tasks, acc, fn
|
||||||
|
"friend", {id, data} ->
|
||||||
|
friend = Enum.random(friends)
|
||||||
|
insert_reply(friend, List.delete(data, "@" <> friend.nickname), id, visibility)
|
||||||
|
|
||||||
|
"non_friend", {id, data} ->
|
||||||
|
non_friend = Enum.random(non_friends)
|
||||||
|
insert_reply(non_friend, List.delete(data, "@" <> non_friend.nickname), id, visibility)
|
||||||
|
|
||||||
|
"user", {id, data} ->
|
||||||
|
insert_reply(user, List.delete(data, "@" <> user.nickname), id, visibility)
|
||||||
|
end)
|
||||||
|
end
|
||||||
|
|
||||||
|
defp generate_activities(user, friends, non_friends, task_data, opts) do
|
||||||
|
Task.async_stream(
|
||||||
|
task_data,
|
||||||
|
fn {visibility, type, group} ->
|
||||||
|
insert_activity(type, visibility, group, user, friends, non_friends, opts)
|
||||||
|
end,
|
||||||
|
max_concurrency: @max_concurrency,
|
||||||
|
timeout: 30_000
|
||||||
|
)
|
||||||
|
|> Stream.run()
|
||||||
|
end
|
||||||
|
|
||||||
|
defp insert_activity("simple", visibility, group, user, friends, non_friends, _opts) do
|
||||||
|
{:ok, _activity} =
|
||||||
|
group
|
||||||
|
|> get_actor(user, friends, non_friends)
|
||||||
|
|> CommonAPI.post(%{"status" => "Simple status", "visibility" => visibility})
|
||||||
|
end
|
||||||
|
|
||||||
|
defp insert_activity("emoji", visibility, group, user, friends, non_friends, _opts) do
|
||||||
|
{:ok, _activity} =
|
||||||
|
group
|
||||||
|
|> get_actor(user, friends, non_friends)
|
||||||
|
|> CommonAPI.post(%{
|
||||||
|
"status" => "Simple status with emoji :firefox:",
|
||||||
|
"visibility" => visibility
|
||||||
|
})
|
||||||
|
end
|
||||||
|
|
||||||
|
defp insert_activity("mentions", visibility, group, user, friends, non_friends, _opts) do
|
||||||
|
user_mentions =
|
||||||
|
get_random_mentions(friends, Enum.random(0..3)) ++
|
||||||
|
get_random_mentions(non_friends, Enum.random(0..3))
|
||||||
|
|
||||||
|
user_mentions =
|
||||||
|
if Enum.random([true, false]),
|
||||||
|
do: ["@" <> user.nickname | user_mentions],
|
||||||
|
else: user_mentions
|
||||||
|
|
||||||
|
{:ok, _activity} =
|
||||||
|
group
|
||||||
|
|> get_actor(user, friends, non_friends)
|
||||||
|
|> CommonAPI.post(%{
|
||||||
|
"status" => Enum.join(user_mentions, ", ") <> " simple status with mentions",
|
||||||
|
"visibility" => visibility
|
||||||
|
})
|
||||||
|
end
|
||||||
|
|
||||||
|
defp insert_activity("hell_thread", visibility, group, user, friends, non_friends, _opts) do
|
||||||
|
mentions =
|
||||||
|
with {:ok, nil} <- Cachex.get(:user_cache, "hell_thread_mentions") do
|
||||||
|
cached =
|
||||||
|
([user | Enum.take(friends, 10)] ++ Enum.take(non_friends, 10))
|
||||||
|
|> Enum.map(&"@#{&1.nickname}")
|
||||||
|
|> Enum.join(", ")
|
||||||
|
|
||||||
|
Cachex.put(:user_cache, "hell_thread_mentions", cached)
|
||||||
|
cached
|
||||||
|
else
|
||||||
|
{:ok, cached} -> cached
|
||||||
|
end
|
||||||
|
|
||||||
|
{:ok, _activity} =
|
||||||
|
group
|
||||||
|
|> get_actor(user, friends, non_friends)
|
||||||
|
|> CommonAPI.post(%{
|
||||||
|
"status" => mentions <> " hell thread status",
|
||||||
|
"visibility" => visibility
|
||||||
|
})
|
||||||
|
end
|
||||||
|
|
||||||
|
defp insert_activity("attachment", visibility, group, user, friends, non_friends, _opts) do
|
||||||
|
actor = get_actor(group, user, friends, non_friends)
|
||||||
|
|
||||||
|
obj_data = %{
|
||||||
|
"actor" => actor.ap_id,
|
||||||
|
"name" => "4467-11.jpg",
|
||||||
|
"type" => "Document",
|
||||||
|
"url" => [
|
||||||
|
%{
|
||||||
|
"href" =>
|
||||||
|
"#{Pleroma.Web.base_url()}/media/b1b873552422a07bf53af01f3c231c841db4dfc42c35efde681abaf0f2a4eab7.jpg",
|
||||||
|
"mediaType" => "image/jpeg",
|
||||||
|
"type" => "Link"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
object = Repo.insert!(%Pleroma.Object{data: obj_data})
|
||||||
|
|
||||||
|
{:ok, _activity} =
|
||||||
|
CommonAPI.post(actor, %{
|
||||||
|
"status" => "Post with attachment",
|
||||||
|
"visibility" => visibility,
|
||||||
|
"media_ids" => [object.id]
|
||||||
|
})
|
||||||
|
end
|
||||||
|
|
||||||
|
defp insert_activity("tag", visibility, group, user, friends, non_friends, _opts) do
|
||||||
|
{:ok, _activity} =
|
||||||
|
group
|
||||||
|
|> get_actor(user, friends, non_friends)
|
||||||
|
|> CommonAPI.post(%{"status" => "Status with #tag", "visibility" => visibility})
|
||||||
|
end
|
||||||
|
|
||||||
|
defp insert_activity("like", visibility, group, user, friends, non_friends, opts) do
|
||||||
|
actor = get_actor(group, user, friends, non_friends)
|
||||||
|
|
||||||
|
with activity_id when not is_nil(activity_id) <- get_random_create_activity_id(),
|
||||||
|
{:ok, _activity, _object} <- CommonAPI.favorite(activity_id, actor) do
|
||||||
|
:ok
|
||||||
|
else
|
||||||
|
{:error, _} ->
|
||||||
|
insert_activity("like", visibility, group, user, friends, non_friends, opts)
|
||||||
|
|
||||||
|
nil ->
|
||||||
|
Process.sleep(15)
|
||||||
|
insert_activity("like", visibility, group, user, friends, non_friends, opts)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
defp insert_activity("reblog", visibility, group, user, friends, non_friends, opts) do
|
||||||
|
actor = get_actor(group, user, friends, non_friends)
|
||||||
|
|
||||||
|
with activity_id when not is_nil(activity_id) <- get_random_create_activity_id(),
|
||||||
|
{:ok, _activity, _object} <- CommonAPI.repeat(activity_id, actor) do
|
||||||
|
:ok
|
||||||
|
else
|
||||||
|
{:error, _} ->
|
||||||
|
insert_activity("reblog", visibility, group, user, friends, non_friends, opts)
|
||||||
|
|
||||||
|
nil ->
|
||||||
|
Process.sleep(15)
|
||||||
|
insert_activity("reblog", visibility, group, user, friends, non_friends, opts)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
defp insert_activity("simple_thread", visibility, group, user, friends, non_friends, _opts)
|
||||||
|
when visibility in ["public", "unlisted", "private"] do
|
||||||
|
actor = get_actor(group, user, friends, non_friends)
|
||||||
|
tasks = get_reply_tasks(visibility, group)
|
||||||
|
|
||||||
|
{:ok, activity} =
|
||||||
|
CommonAPI.post(user, %{"status" => "Simple status", "visibility" => "unlisted"})
|
||||||
|
|
||||||
|
acc = {activity.id, ["@" <> actor.nickname, "reply to status"]}
|
||||||
|
insert_replies(tasks, visibility, user, friends, non_friends, acc)
|
||||||
|
end
|
||||||
|
|
||||||
|
defp insert_activity("simple_thread", "direct", group, user, friends, non_friends, _opts) do
|
||||||
|
actor = get_actor(group, user, friends, non_friends)
|
||||||
|
tasks = get_reply_tasks("direct", group)
|
||||||
|
|
||||||
|
list =
|
||||||
|
case group do
|
||||||
|
"non_friends" ->
|
||||||
|
Enum.take(non_friends, 3)
|
||||||
|
|
||||||
|
_ ->
|
||||||
|
Enum.take(friends, 3)
|
||||||
|
end
|
||||||
|
|
||||||
|
data = Enum.map(list, &("@" <> &1.nickname))
|
||||||
|
|
||||||
|
{:ok, activity} =
|
||||||
|
CommonAPI.post(actor, %{
|
||||||
|
"status" => Enum.join(data, ", ") <> "simple status",
|
||||||
|
"visibility" => "direct"
|
||||||
|
})
|
||||||
|
|
||||||
|
acc = {activity.id, ["@" <> user.nickname | data] ++ ["reply to status"]}
|
||||||
|
insert_direct_replies(tasks, user, list, acc)
|
||||||
|
end
|
||||||
|
|
||||||
|
defp insert_activity("remote", _, "user", _, _, _, _), do: :ok
|
||||||
|
|
||||||
|
defp insert_activity("remote", visibility, group, user, _friends, _non_friends, opts) do
|
||||||
|
remote_friends =
|
||||||
|
Users.get_users(user, limit: opts[:friends_used], local: :external, friends?: true)
|
||||||
|
|
||||||
|
remote_non_friends =
|
||||||
|
Users.get_users(user, limit: opts[:non_friends_used], local: :external, friends?: false)
|
||||||
|
|
||||||
|
actor = get_actor(group, user, remote_friends, remote_non_friends)
|
||||||
|
|
||||||
|
{act_data, obj_data} = prepare_activity_data(actor, visibility, user)
|
||||||
|
{activity_data, object_data} = other_data(actor)
|
||||||
|
|
||||||
|
activity_data
|
||||||
|
|> Map.merge(act_data)
|
||||||
|
|> Map.put("object", Map.merge(object_data, obj_data))
|
||||||
|
|> Pleroma.Web.ActivityPub.ActivityPub.insert(false)
|
||||||
|
end
|
||||||
|
|
||||||
|
defp get_actor("user", user, _friends, _non_friends), do: user
|
||||||
|
defp get_actor("friends", _user, friends, _non_friends), do: Enum.random(friends)
|
||||||
|
defp get_actor("non_friends", _user, _friends, non_friends), do: Enum.random(non_friends)
|
||||||
|
|
||||||
|
defp other_data(actor) do
|
||||||
|
%{host: host} = URI.parse(actor.ap_id)
|
||||||
|
datetime = DateTime.utc_now()
|
||||||
|
context_id = "http://#{host}:4000/contexts/#{UUID.generate()}"
|
||||||
|
activity_id = "http://#{host}:4000/activities/#{UUID.generate()}"
|
||||||
|
object_id = "http://#{host}:4000/objects/#{UUID.generate()}"
|
||||||
|
|
||||||
|
activity_data = %{
|
||||||
|
"actor" => actor.ap_id,
|
||||||
|
"context" => context_id,
|
||||||
|
"id" => activity_id,
|
||||||
|
"published" => datetime,
|
||||||
|
"type" => "Create",
|
||||||
|
"directMessage" => false
|
||||||
|
}
|
||||||
|
|
||||||
|
object_data = %{
|
||||||
|
"actor" => actor.ap_id,
|
||||||
|
"attachment" => [],
|
||||||
|
"attributedTo" => actor.ap_id,
|
||||||
|
"bcc" => [],
|
||||||
|
"bto" => [],
|
||||||
|
"content" => "Remote post",
|
||||||
|
"context" => context_id,
|
||||||
|
"conversation" => context_id,
|
||||||
|
"emoji" => %{},
|
||||||
|
"id" => object_id,
|
||||||
|
"published" => datetime,
|
||||||
|
"sensitive" => false,
|
||||||
|
"summary" => "",
|
||||||
|
"tag" => [],
|
||||||
|
"to" => ["https://www.w3.org/ns/activitystreams#Public"],
|
||||||
|
"type" => "Note"
|
||||||
|
}
|
||||||
|
|
||||||
|
{activity_data, object_data}
|
||||||
|
end
|
||||||
|
|
||||||
|
defp prepare_activity_data(actor, "public", _mention) do
|
||||||
|
obj_data = %{
|
||||||
|
"cc" => [actor.follower_address],
|
||||||
|
"to" => [Constants.as_public()]
|
||||||
|
}
|
||||||
|
|
||||||
|
act_data = %{
|
||||||
|
"cc" => [actor.follower_address],
|
||||||
|
"to" => [Constants.as_public()]
|
||||||
|
}
|
||||||
|
|
||||||
|
{act_data, obj_data}
|
||||||
|
end
|
||||||
|
|
||||||
|
defp prepare_activity_data(actor, "private", _mention) do
|
||||||
|
obj_data = %{
|
||||||
|
"cc" => [],
|
||||||
|
"to" => [actor.follower_address]
|
||||||
|
}
|
||||||
|
|
||||||
|
act_data = %{
|
||||||
|
"cc" => [],
|
||||||
|
"to" => [actor.follower_address]
|
||||||
|
}
|
||||||
|
|
||||||
|
{act_data, obj_data}
|
||||||
|
end
|
||||||
|
|
||||||
|
defp prepare_activity_data(actor, "unlisted", _mention) do
|
||||||
|
obj_data = %{
|
||||||
|
"cc" => [Constants.as_public()],
|
||||||
|
"to" => [actor.follower_address]
|
||||||
|
}
|
||||||
|
|
||||||
|
act_data = %{
|
||||||
|
"cc" => [Constants.as_public()],
|
||||||
|
"to" => [actor.follower_address]
|
||||||
|
}
|
||||||
|
|
||||||
|
{act_data, obj_data}
|
||||||
|
end
|
||||||
|
|
||||||
|
defp prepare_activity_data(_actor, "direct", mention) do
|
||||||
|
%{host: mentioned_host} = URI.parse(mention.ap_id)
|
||||||
|
|
||||||
|
obj_data = %{
|
||||||
|
"cc" => [],
|
||||||
|
"content" =>
|
||||||
|
"<span class=\"h-card\"><a class=\"u-url mention\" href=\"#{mention.ap_id}\" rel=\"ugc\">@<span>#{
|
||||||
|
mention.nickname
|
||||||
|
}</span></a></span> direct message",
|
||||||
|
"tag" => [
|
||||||
|
%{
|
||||||
|
"href" => mention.ap_id,
|
||||||
|
"name" => "@#{mention.nickname}@#{mentioned_host}",
|
||||||
|
"type" => "Mention"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"to" => [mention.ap_id]
|
||||||
|
}
|
||||||
|
|
||||||
|
act_data = %{
|
||||||
|
"cc" => [],
|
||||||
|
"directMessage" => true,
|
||||||
|
"to" => [mention.ap_id]
|
||||||
|
}
|
||||||
|
|
||||||
|
{act_data, obj_data}
|
||||||
|
end
|
||||||
|
|
||||||
|
defp get_reply_tasks("public", "user"), do: ~w(friend non_friend user)
|
||||||
|
defp get_reply_tasks("public", "friends"), do: ~w(non_friend user friend)
|
||||||
|
defp get_reply_tasks("public", "non_friends"), do: ~w(user friend non_friend)
|
||||||
|
|
||||||
|
defp get_reply_tasks(visibility, "user") when visibility in ["unlisted", "private"],
|
||||||
|
do: ~w(friend user friend)
|
||||||
|
|
||||||
|
defp get_reply_tasks(visibility, "friends") when visibility in ["unlisted", "private"],
|
||||||
|
do: ~w(user friend user)
|
||||||
|
|
||||||
|
defp get_reply_tasks(visibility, "non_friends") when visibility in ["unlisted", "private"],
|
||||||
|
do: []
|
||||||
|
|
||||||
|
defp get_reply_tasks("direct", "user"), do: ~w(friend user friend)
|
||||||
|
defp get_reply_tasks("direct", "friends"), do: ~w(user friend user)
|
||||||
|
defp get_reply_tasks("direct", "non_friends"), do: ~w(user non_friend user)
|
||||||
|
|
||||||
|
defp insert_replies(tasks, visibility, user, friends, non_friends, acc) do
|
||||||
|
Enum.reduce(tasks, acc, fn
|
||||||
|
"friend", {id, data} ->
|
||||||
|
friend = Enum.random(friends)
|
||||||
|
insert_reply(friend, data, id, visibility)
|
||||||
|
|
||||||
|
"non_friend", {id, data} ->
|
||||||
|
non_friend = Enum.random(non_friends)
|
||||||
|
insert_reply(non_friend, data, id, visibility)
|
||||||
|
|
||||||
|
"user", {id, data} ->
|
||||||
|
insert_reply(user, data, id, visibility)
|
||||||
|
end)
|
||||||
|
end
|
||||||
|
|
||||||
|
defp insert_direct_replies(tasks, user, list, acc) do
|
||||||
|
Enum.reduce(tasks, acc, fn
|
||||||
|
group, {id, data} when group in ["friend", "non_friend"] ->
|
||||||
|
actor = Enum.random(list)
|
||||||
|
|
||||||
|
{reply_id, _} =
|
||||||
|
insert_reply(actor, List.delete(data, "@" <> actor.nickname), id, "direct")
|
||||||
|
|
||||||
|
{reply_id, data}
|
||||||
|
|
||||||
|
"user", {id, data} ->
|
||||||
|
{reply_id, _} = insert_reply(user, List.delete(data, "@" <> user.nickname), id, "direct")
|
||||||
|
{reply_id, data}
|
||||||
|
end)
|
||||||
|
end
|
||||||
|
|
||||||
|
defp insert_reply(actor, data, activity_id, visibility) do
|
||||||
|
{:ok, reply} =
|
||||||
|
CommonAPI.post(actor, %{
|
||||||
|
"status" => Enum.join(data, ", "),
|
||||||
|
"visibility" => visibility,
|
||||||
|
"in_reply_to_status_id" => activity_id
|
||||||
|
})
|
||||||
|
|
||||||
|
{reply.id, ["@" <> actor.nickname | data]}
|
||||||
|
end
|
||||||
|
|
||||||
|
defp get_random_mentions(_users, count) when count == 0, do: []
|
||||||
|
|
||||||
|
defp get_random_mentions(users, count) do
|
||||||
|
users
|
||||||
|
|> Enum.shuffle()
|
||||||
|
|> Enum.take(count)
|
||||||
|
|> Enum.map(&"@#{&1.nickname}")
|
||||||
|
end
|
||||||
|
|
||||||
|
defp get_random_create_activity_id do
|
||||||
|
Repo.one(
|
||||||
|
from(a in Pleroma.Activity,
|
||||||
|
where: fragment("(?)->>'type' = ?", a.data, ^"Create"),
|
||||||
|
order_by: fragment("RANDOM()"),
|
||||||
|
limit: 1,
|
||||||
|
select: a.id
|
||||||
|
)
|
||||||
|
)
|
||||||
|
end
|
||||||
|
end
|
|
@ -1,260 +1,489 @@
|
||||||
defmodule Pleroma.LoadTesting.Fetcher do
|
defmodule Pleroma.LoadTesting.Fetcher do
|
||||||
use Pleroma.LoadTesting.Helper
|
alias Pleroma.Activity
|
||||||
|
alias Pleroma.Pagination
|
||||||
|
alias Pleroma.Repo
|
||||||
|
alias Pleroma.User
|
||||||
|
alias Pleroma.Web.ActivityPub.ActivityPub
|
||||||
|
alias Pleroma.Web.MastodonAPI.MastodonAPI
|
||||||
|
alias Pleroma.Web.MastodonAPI.StatusView
|
||||||
|
|
||||||
def fetch_user(user) do
|
@spec run_benchmarks(User.t()) :: any()
|
||||||
Benchee.run(%{
|
def run_benchmarks(user) do
|
||||||
|
fetch_user(user)
|
||||||
|
fetch_timelines(user)
|
||||||
|
render_views(user)
|
||||||
|
end
|
||||||
|
|
||||||
|
defp formatters do
|
||||||
|
[
|
||||||
|
Benchee.Formatters.Console
|
||||||
|
]
|
||||||
|
end
|
||||||
|
|
||||||
|
defp fetch_user(user) do
|
||||||
|
Benchee.run(
|
||||||
|
%{
|
||||||
"By id" => fn -> Repo.get_by(User, id: user.id) end,
|
"By id" => fn -> Repo.get_by(User, id: user.id) end,
|
||||||
"By ap_id" => fn -> Repo.get_by(User, ap_id: user.ap_id) end,
|
"By ap_id" => fn -> Repo.get_by(User, ap_id: user.ap_id) end,
|
||||||
"By email" => fn -> Repo.get_by(User, email: user.email) end,
|
"By email" => fn -> Repo.get_by(User, email: user.email) end,
|
||||||
"By nickname" => fn -> Repo.get_by(User, nickname: user.nickname) end
|
"By nickname" => fn -> Repo.get_by(User, nickname: user.nickname) end
|
||||||
})
|
},
|
||||||
|
formatters: formatters()
|
||||||
|
)
|
||||||
end
|
end
|
||||||
|
|
||||||
def query_timelines(user) do
|
defp fetch_timelines(user) do
|
||||||
home_timeline_params = %{
|
fetch_home_timeline(user)
|
||||||
"count" => 20,
|
fetch_direct_timeline(user)
|
||||||
"with_muted" => true,
|
fetch_public_timeline(user)
|
||||||
"type" => ["Create", "Announce"],
|
fetch_public_timeline(user, :local)
|
||||||
|
fetch_public_timeline(user, :tag)
|
||||||
|
fetch_notifications(user)
|
||||||
|
fetch_favourites(user)
|
||||||
|
fetch_long_thread(user)
|
||||||
|
end
|
||||||
|
|
||||||
|
defp render_views(user) do
|
||||||
|
render_timelines(user)
|
||||||
|
render_long_thread(user)
|
||||||
|
end
|
||||||
|
|
||||||
|
defp opts_for_home_timeline(user) do
|
||||||
|
%{
|
||||||
"blocking_user" => user,
|
"blocking_user" => user,
|
||||||
|
"count" => "20",
|
||||||
"muting_user" => user,
|
"muting_user" => user,
|
||||||
|
"type" => ["Create", "Announce"],
|
||||||
|
"user" => user,
|
||||||
|
"with_muted" => "true"
|
||||||
|
}
|
||||||
|
end
|
||||||
|
|
||||||
|
defp fetch_home_timeline(user) do
|
||||||
|
opts = opts_for_home_timeline(user)
|
||||||
|
|
||||||
|
recipients = [user.ap_id | User.following(user)]
|
||||||
|
|
||||||
|
first_page_last =
|
||||||
|
ActivityPub.fetch_activities(recipients, opts) |> Enum.reverse() |> List.last()
|
||||||
|
|
||||||
|
second_page_last =
|
||||||
|
ActivityPub.fetch_activities(recipients, Map.put(opts, "max_id", first_page_last.id))
|
||||||
|
|> Enum.reverse()
|
||||||
|
|> List.last()
|
||||||
|
|
||||||
|
third_page_last =
|
||||||
|
ActivityPub.fetch_activities(recipients, Map.put(opts, "max_id", second_page_last.id))
|
||||||
|
|> Enum.reverse()
|
||||||
|
|> List.last()
|
||||||
|
|
||||||
|
forth_page_last =
|
||||||
|
ActivityPub.fetch_activities(recipients, Map.put(opts, "max_id", third_page_last.id))
|
||||||
|
|> Enum.reverse()
|
||||||
|
|> List.last()
|
||||||
|
|
||||||
|
Benchee.run(
|
||||||
|
%{
|
||||||
|
"home timeline" => fn opts -> ActivityPub.fetch_activities(recipients, opts) end
|
||||||
|
},
|
||||||
|
inputs: %{
|
||||||
|
"1 page" => opts,
|
||||||
|
"2 page" => Map.put(opts, "max_id", first_page_last.id),
|
||||||
|
"3 page" => Map.put(opts, "max_id", second_page_last.id),
|
||||||
|
"4 page" => Map.put(opts, "max_id", third_page_last.id),
|
||||||
|
"5 page" => Map.put(opts, "max_id", forth_page_last.id),
|
||||||
|
"1 page only media" => Map.put(opts, "only_media", "true"),
|
||||||
|
"2 page only media" =>
|
||||||
|
Map.put(opts, "max_id", first_page_last.id) |> Map.put("only_media", "true"),
|
||||||
|
"3 page only media" =>
|
||||||
|
Map.put(opts, "max_id", second_page_last.id) |> Map.put("only_media", "true"),
|
||||||
|
"4 page only media" =>
|
||||||
|
Map.put(opts, "max_id", third_page_last.id) |> Map.put("only_media", "true"),
|
||||||
|
"5 page only media" =>
|
||||||
|
Map.put(opts, "max_id", forth_page_last.id) |> Map.put("only_media", "true")
|
||||||
|
},
|
||||||
|
formatters: formatters()
|
||||||
|
)
|
||||||
|
end
|
||||||
|
|
||||||
|
defp opts_for_direct_timeline(user) do
|
||||||
|
%{
|
||||||
|
:visibility => "direct",
|
||||||
|
"blocking_user" => user,
|
||||||
|
"count" => "20",
|
||||||
|
"type" => "Create",
|
||||||
|
"user" => user,
|
||||||
|
"with_muted" => "true"
|
||||||
|
}
|
||||||
|
end
|
||||||
|
|
||||||
|
defp fetch_direct_timeline(user) do
|
||||||
|
recipients = [user.ap_id]
|
||||||
|
|
||||||
|
opts = opts_for_direct_timeline(user)
|
||||||
|
|
||||||
|
first_page_last =
|
||||||
|
recipients
|
||||||
|
|> ActivityPub.fetch_activities_query(opts)
|
||||||
|
|> Pagination.fetch_paginated(opts)
|
||||||
|
|> List.last()
|
||||||
|
|
||||||
|
opts2 = Map.put(opts, "max_id", first_page_last.id)
|
||||||
|
|
||||||
|
second_page_last =
|
||||||
|
recipients
|
||||||
|
|> ActivityPub.fetch_activities_query(opts2)
|
||||||
|
|> Pagination.fetch_paginated(opts2)
|
||||||
|
|> List.last()
|
||||||
|
|
||||||
|
opts3 = Map.put(opts, "max_id", second_page_last.id)
|
||||||
|
|
||||||
|
third_page_last =
|
||||||
|
recipients
|
||||||
|
|> ActivityPub.fetch_activities_query(opts3)
|
||||||
|
|> Pagination.fetch_paginated(opts3)
|
||||||
|
|> List.last()
|
||||||
|
|
||||||
|
opts4 = Map.put(opts, "max_id", third_page_last.id)
|
||||||
|
|
||||||
|
forth_page_last =
|
||||||
|
recipients
|
||||||
|
|> ActivityPub.fetch_activities_query(opts4)
|
||||||
|
|> Pagination.fetch_paginated(opts4)
|
||||||
|
|> List.last()
|
||||||
|
|
||||||
|
Benchee.run(
|
||||||
|
%{
|
||||||
|
"direct timeline" => fn opts ->
|
||||||
|
ActivityPub.fetch_activities_query(recipients, opts) |> Pagination.fetch_paginated(opts)
|
||||||
|
end
|
||||||
|
},
|
||||||
|
inputs: %{
|
||||||
|
"1 page" => opts,
|
||||||
|
"2 page" => opts2,
|
||||||
|
"3 page" => opts3,
|
||||||
|
"4 page" => opts4,
|
||||||
|
"5 page" => Map.put(opts4, "max_id", forth_page_last.id)
|
||||||
|
},
|
||||||
|
formatters: formatters()
|
||||||
|
)
|
||||||
|
end
|
||||||
|
|
||||||
|
defp opts_for_public_timeline(user) do
|
||||||
|
%{
|
||||||
|
"type" => ["Create", "Announce"],
|
||||||
|
"local_only" => false,
|
||||||
|
"blocking_user" => user,
|
||||||
|
"muting_user" => user
|
||||||
|
}
|
||||||
|
end
|
||||||
|
|
||||||
|
defp opts_for_public_timeline(user, :local) do
|
||||||
|
%{
|
||||||
|
"type" => ["Create", "Announce"],
|
||||||
|
"local_only" => true,
|
||||||
|
"blocking_user" => user,
|
||||||
|
"muting_user" => user
|
||||||
|
}
|
||||||
|
end
|
||||||
|
|
||||||
|
defp opts_for_public_timeline(user, :tag) do
|
||||||
|
%{
|
||||||
|
"blocking_user" => user,
|
||||||
|
"count" => "20",
|
||||||
|
"local_only" => nil,
|
||||||
|
"muting_user" => user,
|
||||||
|
"tag" => ["tag"],
|
||||||
|
"tag_all" => [],
|
||||||
|
"tag_reject" => [],
|
||||||
|
"type" => "Create",
|
||||||
|
"user" => user,
|
||||||
|
"with_muted" => "true"
|
||||||
|
}
|
||||||
|
end
|
||||||
|
|
||||||
|
defp fetch_public_timeline(user) do
|
||||||
|
opts = opts_for_public_timeline(user)
|
||||||
|
|
||||||
|
fetch_public_timeline(opts, "public timeline")
|
||||||
|
end
|
||||||
|
|
||||||
|
defp fetch_public_timeline(user, :local) do
|
||||||
|
opts = opts_for_public_timeline(user, :local)
|
||||||
|
|
||||||
|
fetch_public_timeline(opts, "public timeline only local")
|
||||||
|
end
|
||||||
|
|
||||||
|
defp fetch_public_timeline(user, :tag) do
|
||||||
|
opts = opts_for_public_timeline(user, :tag)
|
||||||
|
|
||||||
|
fetch_public_timeline(opts, "hashtag timeline")
|
||||||
|
end
|
||||||
|
|
||||||
|
defp fetch_public_timeline(user, :only_media) do
|
||||||
|
opts = opts_for_public_timeline(user) |> Map.put("only_media", "true")
|
||||||
|
|
||||||
|
fetch_public_timeline(opts, "public timeline only media")
|
||||||
|
end
|
||||||
|
|
||||||
|
defp fetch_public_timeline(opts, title) when is_binary(title) do
|
||||||
|
first_page_last = ActivityPub.fetch_public_activities(opts) |> List.last()
|
||||||
|
|
||||||
|
second_page_last =
|
||||||
|
ActivityPub.fetch_public_activities(Map.put(opts, "max_id", first_page_last.id))
|
||||||
|
|> List.last()
|
||||||
|
|
||||||
|
third_page_last =
|
||||||
|
ActivityPub.fetch_public_activities(Map.put(opts, "max_id", second_page_last.id))
|
||||||
|
|> List.last()
|
||||||
|
|
||||||
|
forth_page_last =
|
||||||
|
ActivityPub.fetch_public_activities(Map.put(opts, "max_id", third_page_last.id))
|
||||||
|
|> List.last()
|
||||||
|
|
||||||
|
Benchee.run(
|
||||||
|
%{
|
||||||
|
title => fn opts ->
|
||||||
|
ActivityPub.fetch_public_activities(opts)
|
||||||
|
end
|
||||||
|
},
|
||||||
|
inputs: %{
|
||||||
|
"1 page" => opts,
|
||||||
|
"2 page" => Map.put(opts, "max_id", first_page_last.id),
|
||||||
|
"3 page" => Map.put(opts, "max_id", second_page_last.id),
|
||||||
|
"4 page" => Map.put(opts, "max_id", third_page_last.id),
|
||||||
|
"5 page" => Map.put(opts, "max_id", forth_page_last.id)
|
||||||
|
},
|
||||||
|
formatters: formatters()
|
||||||
|
)
|
||||||
|
end
|
||||||
|
|
||||||
|
defp opts_for_notifications do
|
||||||
|
%{"count" => "20", "with_muted" => "true"}
|
||||||
|
end
|
||||||
|
|
||||||
|
defp fetch_notifications(user) do
|
||||||
|
opts = opts_for_notifications()
|
||||||
|
|
||||||
|
first_page_last = MastodonAPI.get_notifications(user, opts) |> List.last()
|
||||||
|
|
||||||
|
second_page_last =
|
||||||
|
MastodonAPI.get_notifications(user, Map.put(opts, "max_id", first_page_last.id))
|
||||||
|
|> List.last()
|
||||||
|
|
||||||
|
third_page_last =
|
||||||
|
MastodonAPI.get_notifications(user, Map.put(opts, "max_id", second_page_last.id))
|
||||||
|
|> List.last()
|
||||||
|
|
||||||
|
forth_page_last =
|
||||||
|
MastodonAPI.get_notifications(user, Map.put(opts, "max_id", third_page_last.id))
|
||||||
|
|> List.last()
|
||||||
|
|
||||||
|
Benchee.run(
|
||||||
|
%{
|
||||||
|
"Notifications" => fn opts ->
|
||||||
|
MastodonAPI.get_notifications(user, opts)
|
||||||
|
end
|
||||||
|
},
|
||||||
|
inputs: %{
|
||||||
|
"1 page" => opts,
|
||||||
|
"2 page" => Map.put(opts, "max_id", first_page_last.id),
|
||||||
|
"3 page" => Map.put(opts, "max_id", second_page_last.id),
|
||||||
|
"4 page" => Map.put(opts, "max_id", third_page_last.id),
|
||||||
|
"5 page" => Map.put(opts, "max_id", forth_page_last.id)
|
||||||
|
},
|
||||||
|
formatters: formatters()
|
||||||
|
)
|
||||||
|
end
|
||||||
|
|
||||||
|
defp fetch_favourites(user) do
|
||||||
|
first_page_last = ActivityPub.fetch_favourites(user) |> List.last()
|
||||||
|
|
||||||
|
second_page_last =
|
||||||
|
ActivityPub.fetch_favourites(user, %{"max_id" => first_page_last.id}) |> List.last()
|
||||||
|
|
||||||
|
third_page_last =
|
||||||
|
ActivityPub.fetch_favourites(user, %{"max_id" => second_page_last.id}) |> List.last()
|
||||||
|
|
||||||
|
forth_page_last =
|
||||||
|
ActivityPub.fetch_favourites(user, %{"max_id" => third_page_last.id}) |> List.last()
|
||||||
|
|
||||||
|
Benchee.run(
|
||||||
|
%{
|
||||||
|
"Favourites" => fn opts ->
|
||||||
|
ActivityPub.fetch_favourites(user, opts)
|
||||||
|
end
|
||||||
|
},
|
||||||
|
inputs: %{
|
||||||
|
"1 page" => %{},
|
||||||
|
"2 page" => %{"max_id" => first_page_last.id},
|
||||||
|
"3 page" => %{"max_id" => second_page_last.id},
|
||||||
|
"4 page" => %{"max_id" => third_page_last.id},
|
||||||
|
"5 page" => %{"max_id" => forth_page_last.id}
|
||||||
|
},
|
||||||
|
formatters: formatters()
|
||||||
|
)
|
||||||
|
end
|
||||||
|
|
||||||
|
defp opts_for_long_thread(user) do
|
||||||
|
%{
|
||||||
|
"blocking_user" => user,
|
||||||
"user" => user
|
"user" => user
|
||||||
}
|
}
|
||||||
|
end
|
||||||
|
|
||||||
mastodon_public_timeline_params = %{
|
defp fetch_long_thread(user) do
|
||||||
"count" => 20,
|
%{public_thread: public, private_thread: private} =
|
||||||
"local_only" => true,
|
Agent.get(:benchmark_state, fn state -> state end)
|
||||||
"only_media" => "false",
|
|
||||||
"type" => ["Create", "Announce"],
|
|
||||||
"with_muted" => "true",
|
|
||||||
"blocking_user" => user,
|
|
||||||
"muting_user" => user
|
|
||||||
}
|
|
||||||
|
|
||||||
mastodon_federated_timeline_params = %{
|
opts = opts_for_long_thread(user)
|
||||||
"count" => 20,
|
|
||||||
"only_media" => "false",
|
|
||||||
"type" => ["Create", "Announce"],
|
|
||||||
"with_muted" => "true",
|
|
||||||
"blocking_user" => user,
|
|
||||||
"muting_user" => user
|
|
||||||
}
|
|
||||||
|
|
||||||
following = User.following(user)
|
private_input = {private.data["context"], Map.put(opts, "exclude_id", private.id)}
|
||||||
|
|
||||||
Benchee.run(%{
|
public_input = {public.data["context"], Map.put(opts, "exclude_id", public.id)}
|
||||||
"User home timeline" => fn ->
|
|
||||||
Pleroma.Web.ActivityPub.ActivityPub.fetch_activities(
|
Benchee.run(
|
||||||
following,
|
%{
|
||||||
home_timeline_params
|
"fetch context" => fn {context, opts} ->
|
||||||
)
|
ActivityPub.fetch_activities_for_context(context, opts)
|
||||||
end,
|
end
|
||||||
"User mastodon public timeline" => fn ->
|
},
|
||||||
Pleroma.Web.ActivityPub.ActivityPub.fetch_public_activities(
|
inputs: %{
|
||||||
mastodon_public_timeline_params
|
"Private long thread" => private_input,
|
||||||
)
|
"Public long thread" => public_input
|
||||||
end,
|
},
|
||||||
"User mastodon federated public timeline" => fn ->
|
formatters: formatters()
|
||||||
Pleroma.Web.ActivityPub.ActivityPub.fetch_public_activities(
|
|
||||||
mastodon_federated_timeline_params
|
|
||||||
)
|
)
|
||||||
end
|
end
|
||||||
})
|
|
||||||
|
|
||||||
home_activities =
|
defp render_timelines(user) do
|
||||||
Pleroma.Web.ActivityPub.ActivityPub.fetch_activities(
|
opts = opts_for_home_timeline(user)
|
||||||
following,
|
|
||||||
home_timeline_params
|
|
||||||
)
|
|
||||||
|
|
||||||
public_activities =
|
recipients = [user.ap_id | User.following(user)]
|
||||||
Pleroma.Web.ActivityPub.ActivityPub.fetch_public_activities(mastodon_public_timeline_params)
|
|
||||||
|
|
||||||
public_federated_activities =
|
home_activities = ActivityPub.fetch_activities(recipients, opts) |> Enum.reverse()
|
||||||
Pleroma.Web.ActivityPub.ActivityPub.fetch_public_activities(
|
|
||||||
mastodon_federated_timeline_params
|
|
||||||
)
|
|
||||||
|
|
||||||
Benchee.run(%{
|
recipients = [user.ap_id]
|
||||||
|
|
||||||
|
opts = opts_for_direct_timeline(user)
|
||||||
|
|
||||||
|
direct_activities =
|
||||||
|
recipients
|
||||||
|
|> ActivityPub.fetch_activities_query(opts)
|
||||||
|
|> Pagination.fetch_paginated(opts)
|
||||||
|
|
||||||
|
opts = opts_for_public_timeline(user)
|
||||||
|
|
||||||
|
public_activities = ActivityPub.fetch_public_activities(opts)
|
||||||
|
|
||||||
|
opts = opts_for_public_timeline(user, :tag)
|
||||||
|
|
||||||
|
tag_activities = ActivityPub.fetch_public_activities(opts)
|
||||||
|
|
||||||
|
opts = opts_for_notifications()
|
||||||
|
|
||||||
|
notifications = MastodonAPI.get_notifications(user, opts)
|
||||||
|
|
||||||
|
favourites = ActivityPub.fetch_favourites(user)
|
||||||
|
|
||||||
|
Benchee.run(
|
||||||
|
%{
|
||||||
"Rendering home timeline" => fn ->
|
"Rendering home timeline" => fn ->
|
||||||
Pleroma.Web.MastodonAPI.StatusView.render("index.json", %{
|
StatusView.render("index.json", %{
|
||||||
activities: home_activities,
|
activities: home_activities,
|
||||||
for: user,
|
for: user,
|
||||||
as: :activity
|
as: :activity
|
||||||
})
|
})
|
||||||
end,
|
end,
|
||||||
|
"Rendering direct timeline" => fn ->
|
||||||
|
StatusView.render("index.json", %{
|
||||||
|
activities: direct_activities,
|
||||||
|
for: user,
|
||||||
|
as: :activity
|
||||||
|
})
|
||||||
|
end,
|
||||||
"Rendering public timeline" => fn ->
|
"Rendering public timeline" => fn ->
|
||||||
Pleroma.Web.MastodonAPI.StatusView.render("index.json", %{
|
StatusView.render("index.json", %{
|
||||||
activities: public_activities,
|
activities: public_activities,
|
||||||
for: user,
|
for: user,
|
||||||
as: :activity
|
as: :activity
|
||||||
})
|
})
|
||||||
end,
|
end,
|
||||||
"Rendering public federated timeline" => fn ->
|
"Rendering tag timeline" => fn ->
|
||||||
Pleroma.Web.MastodonAPI.StatusView.render("index.json", %{
|
StatusView.render("index.json", %{
|
||||||
activities: public_federated_activities,
|
activities: tag_activities,
|
||||||
for: user,
|
for: user,
|
||||||
as: :activity
|
as: :activity
|
||||||
})
|
})
|
||||||
end,
|
end,
|
||||||
"Rendering favorites timeline" => fn ->
|
"Rendering notifications" => fn ->
|
||||||
conn = Phoenix.ConnTest.build_conn(:get, "http://localhost:4001/api/v1/favourites", nil)
|
Pleroma.Web.MastodonAPI.NotificationView.render("index.json", %{
|
||||||
Pleroma.Web.MastodonAPI.StatusController.favourites(
|
notifications: notifications,
|
||||||
%Plug.Conn{conn |
|
for: user
|
||||||
assigns: %{user: user},
|
})
|
||||||
query_params: %{"limit" => "0"},
|
end,
|
||||||
body_params: %{},
|
"Rendering favourites timeline" => fn ->
|
||||||
cookies: %{},
|
StatusView.render("index.json", %{
|
||||||
params: %{},
|
activities: favourites,
|
||||||
path_params: %{},
|
for: user,
|
||||||
private: %{
|
as: :activity
|
||||||
Pleroma.Web.Router => {[], %{}},
|
})
|
||||||
phoenix_router: Pleroma.Web.Router,
|
end
|
||||||
phoenix_action: :favourites,
|
},
|
||||||
phoenix_controller: Pleroma.Web.MastodonAPI.StatusController,
|
formatters: formatters()
|
||||||
phoenix_endpoint: Pleroma.Web.Endpoint,
|
)
|
||||||
phoenix_format: "json",
|
end
|
||||||
phoenix_layout: {Pleroma.Web.LayoutView, "app.html"},
|
|
||||||
phoenix_recycled: true,
|
|
||||||
|
|
||||||
phoenix_view: Pleroma.Web.MastodonAPI.StatusView,
|
defp render_long_thread(user) do
|
||||||
plug_session: %{"user_id" => user.id},
|
%{public_thread: public, private_thread: private} =
|
||||||
plug_session_fetch: :done,
|
Agent.get(:benchmark_state, fn state -> state end)
|
||||||
plug_session_info: :write,
|
|
||||||
plug_skip_csrf_protection: true
|
opts = %{for: user}
|
||||||
|
public_activity = Activity.get_by_id_with_object(public.id)
|
||||||
|
private_activity = Activity.get_by_id_with_object(private.id)
|
||||||
|
|
||||||
|
Benchee.run(
|
||||||
|
%{
|
||||||
|
"render" => fn opts ->
|
||||||
|
StatusView.render("show.json", opts)
|
||||||
|
end
|
||||||
|
},
|
||||||
|
inputs: %{
|
||||||
|
"Public root" => Map.put(opts, :activity, public_activity),
|
||||||
|
"Private root" => Map.put(opts, :activity, private_activity)
|
||||||
|
},
|
||||||
|
formatters: formatters()
|
||||||
|
)
|
||||||
|
|
||||||
|
fetch_opts = opts_for_long_thread(user)
|
||||||
|
|
||||||
|
public_context =
|
||||||
|
ActivityPub.fetch_activities_for_context(
|
||||||
|
public.data["context"],
|
||||||
|
Map.put(fetch_opts, "exclude_id", public.id)
|
||||||
|
)
|
||||||
|
|
||||||
|
private_context =
|
||||||
|
ActivityPub.fetch_activities_for_context(
|
||||||
|
private.data["context"],
|
||||||
|
Map.put(fetch_opts, "exclude_id", private.id)
|
||||||
|
)
|
||||||
|
|
||||||
|
Benchee.run(
|
||||||
|
%{
|
||||||
|
"render" => fn opts ->
|
||||||
|
StatusView.render("context.json", opts)
|
||||||
|
end
|
||||||
|
},
|
||||||
|
inputs: %{
|
||||||
|
"Public context" => %{user: user, activity: public_activity, activities: public_context},
|
||||||
|
"Private context" => %{
|
||||||
|
user: user,
|
||||||
|
activity: private_activity,
|
||||||
|
activities: private_context
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
%{})
|
formatters: formatters()
|
||||||
end,
|
|
||||||
})
|
|
||||||
end
|
|
||||||
|
|
||||||
def query_notifications(user) do
|
|
||||||
without_muted_params = %{"count" => "20", "with_muted" => "false"}
|
|
||||||
with_muted_params = %{"count" => "20", "with_muted" => "true"}
|
|
||||||
|
|
||||||
Benchee.run(%{
|
|
||||||
"Notifications without muted" => fn ->
|
|
||||||
Pleroma.Web.MastodonAPI.MastodonAPI.get_notifications(user, without_muted_params)
|
|
||||||
end,
|
|
||||||
"Notifications with muted" => fn ->
|
|
||||||
Pleroma.Web.MastodonAPI.MastodonAPI.get_notifications(user, with_muted_params)
|
|
||||||
end
|
|
||||||
})
|
|
||||||
|
|
||||||
without_muted_notifications =
|
|
||||||
Pleroma.Web.MastodonAPI.MastodonAPI.get_notifications(user, without_muted_params)
|
|
||||||
|
|
||||||
with_muted_notifications =
|
|
||||||
Pleroma.Web.MastodonAPI.MastodonAPI.get_notifications(user, with_muted_params)
|
|
||||||
|
|
||||||
Benchee.run(%{
|
|
||||||
"Render notifications without muted" => fn ->
|
|
||||||
Pleroma.Web.MastodonAPI.NotificationView.render("index.json", %{
|
|
||||||
notifications: without_muted_notifications,
|
|
||||||
for: user
|
|
||||||
})
|
|
||||||
end,
|
|
||||||
"Render notifications with muted" => fn ->
|
|
||||||
Pleroma.Web.MastodonAPI.NotificationView.render("index.json", %{
|
|
||||||
notifications: with_muted_notifications,
|
|
||||||
for: user
|
|
||||||
})
|
|
||||||
end
|
|
||||||
})
|
|
||||||
end
|
|
||||||
|
|
||||||
def query_dms(user) do
|
|
||||||
params = %{
|
|
||||||
"count" => "20",
|
|
||||||
"with_muted" => "true",
|
|
||||||
"type" => "Create",
|
|
||||||
"blocking_user" => user,
|
|
||||||
"user" => user,
|
|
||||||
visibility: "direct"
|
|
||||||
}
|
|
||||||
|
|
||||||
Benchee.run(%{
|
|
||||||
"Direct messages with muted" => fn ->
|
|
||||||
Pleroma.Web.ActivityPub.ActivityPub.fetch_activities_query([user.ap_id], params)
|
|
||||||
|> Pleroma.Pagination.fetch_paginated(params)
|
|
||||||
end,
|
|
||||||
"Direct messages without muted" => fn ->
|
|
||||||
Pleroma.Web.ActivityPub.ActivityPub.fetch_activities_query([user.ap_id], params)
|
|
||||||
|> Pleroma.Pagination.fetch_paginated(Map.put(params, "with_muted", false))
|
|
||||||
end
|
|
||||||
})
|
|
||||||
|
|
||||||
dms_with_muted =
|
|
||||||
Pleroma.Web.ActivityPub.ActivityPub.fetch_activities_query([user.ap_id], params)
|
|
||||||
|> Pleroma.Pagination.fetch_paginated(params)
|
|
||||||
|
|
||||||
dms_without_muted =
|
|
||||||
Pleroma.Web.ActivityPub.ActivityPub.fetch_activities_query([user.ap_id], params)
|
|
||||||
|> Pleroma.Pagination.fetch_paginated(Map.put(params, "with_muted", false))
|
|
||||||
|
|
||||||
Benchee.run(%{
|
|
||||||
"Rendering dms with muted" => fn ->
|
|
||||||
Pleroma.Web.MastodonAPI.StatusView.render("index.json", %{
|
|
||||||
activities: dms_with_muted,
|
|
||||||
for: user,
|
|
||||||
as: :activity
|
|
||||||
})
|
|
||||||
end,
|
|
||||||
"Rendering dms without muted" => fn ->
|
|
||||||
Pleroma.Web.MastodonAPI.StatusView.render("index.json", %{
|
|
||||||
activities: dms_without_muted,
|
|
||||||
for: user,
|
|
||||||
as: :activity
|
|
||||||
})
|
|
||||||
end
|
|
||||||
})
|
|
||||||
end
|
|
||||||
|
|
||||||
def query_long_thread(user, activity) do
|
|
||||||
Benchee.run(%{
|
|
||||||
"Fetch main post" => fn ->
|
|
||||||
Pleroma.Activity.get_by_id_with_object(activity.id)
|
|
||||||
end,
|
|
||||||
"Fetch context of main post" => fn ->
|
|
||||||
Pleroma.Web.ActivityPub.ActivityPub.fetch_activities_for_context(
|
|
||||||
activity.data["context"],
|
|
||||||
%{
|
|
||||||
"blocking_user" => user,
|
|
||||||
"user" => user,
|
|
||||||
"exclude_id" => activity.id
|
|
||||||
}
|
|
||||||
)
|
)
|
||||||
end
|
end
|
||||||
})
|
|
||||||
|
|
||||||
activity = Pleroma.Activity.get_by_id_with_object(activity.id)
|
|
||||||
|
|
||||||
context =
|
|
||||||
Pleroma.Web.ActivityPub.ActivityPub.fetch_activities_for_context(
|
|
||||||
activity.data["context"],
|
|
||||||
%{
|
|
||||||
"blocking_user" => user,
|
|
||||||
"user" => user,
|
|
||||||
"exclude_id" => activity.id
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
Benchee.run(%{
|
|
||||||
"Render status" => fn ->
|
|
||||||
Pleroma.Web.MastodonAPI.StatusView.render("show.json", %{
|
|
||||||
activity: activity,
|
|
||||||
for: user
|
|
||||||
})
|
|
||||||
end,
|
|
||||||
"Render context" => fn ->
|
|
||||||
Pleroma.Web.MastodonAPI.StatusView.render(
|
|
||||||
"index.json",
|
|
||||||
for: user,
|
|
||||||
activities: context,
|
|
||||||
as: :activity
|
|
||||||
)
|
|
||||||
|> Enum.reverse()
|
|
||||||
end
|
|
||||||
})
|
|
||||||
end
|
|
||||||
end
|
end
|
||||||
|
|
|
@ -1,410 +0,0 @@
|
||||||
defmodule Pleroma.LoadTesting.Generator do
|
|
||||||
use Pleroma.LoadTesting.Helper
|
|
||||||
alias Pleroma.Web.CommonAPI
|
|
||||||
|
|
||||||
def generate_like_activities(user, posts) do
|
|
||||||
count_likes = Kernel.trunc(length(posts) / 4)
|
|
||||||
IO.puts("Starting generating #{count_likes} like activities...")
|
|
||||||
|
|
||||||
{time, _} =
|
|
||||||
:timer.tc(fn ->
|
|
||||||
Task.async_stream(
|
|
||||||
Enum.take_random(posts, count_likes),
|
|
||||||
fn post -> {:ok, _, _} = CommonAPI.favorite(post.id, user) end,
|
|
||||||
max_concurrency: 10,
|
|
||||||
timeout: 30_000
|
|
||||||
)
|
|
||||||
|> Stream.run()
|
|
||||||
end)
|
|
||||||
|
|
||||||
IO.puts("Inserting like activities take #{to_sec(time)} sec.\n")
|
|
||||||
end
|
|
||||||
|
|
||||||
def generate_users(opts) do
|
|
||||||
IO.puts("Starting generating #{opts[:users_max]} users...")
|
|
||||||
{time, users} = :timer.tc(fn -> do_generate_users(opts) end)
|
|
||||||
|
|
||||||
IO.puts("Inserting users took #{to_sec(time)} sec.\n")
|
|
||||||
users
|
|
||||||
end
|
|
||||||
|
|
||||||
defp do_generate_users(opts) do
|
|
||||||
max = Keyword.get(opts, :users_max)
|
|
||||||
|
|
||||||
Task.async_stream(
|
|
||||||
1..max,
|
|
||||||
&generate_user_data(&1),
|
|
||||||
max_concurrency: 10,
|
|
||||||
timeout: 30_000
|
|
||||||
)
|
|
||||||
|> Enum.to_list()
|
|
||||||
end
|
|
||||||
|
|
||||||
defp generate_user_data(i) do
|
|
||||||
remote = Enum.random([true, false])
|
|
||||||
|
|
||||||
user = %User{
|
|
||||||
name: "Test テスト User #{i}",
|
|
||||||
email: "user#{i}@example.com",
|
|
||||||
nickname: "nick#{i}",
|
|
||||||
password_hash:
|
|
||||||
"$pbkdf2-sha512$160000$bU.OSFI7H/yqWb5DPEqyjw$uKp/2rmXw12QqnRRTqTtuk2DTwZfF8VR4MYW2xMeIlqPR/UX1nT1CEKVUx2CowFMZ5JON8aDvURrZpJjSgqXrg",
|
|
||||||
bio: "Tester Number #{i}",
|
|
||||||
local: remote
|
|
||||||
}
|
|
||||||
|
|
||||||
user_urls =
|
|
||||||
if remote do
|
|
||||||
base_url =
|
|
||||||
Enum.random(["https://domain1.com", "https://domain2.com", "https://domain3.com"])
|
|
||||||
|
|
||||||
ap_id = "#{base_url}/users/#{user.nickname}"
|
|
||||||
|
|
||||||
%{
|
|
||||||
ap_id: ap_id,
|
|
||||||
follower_address: ap_id <> "/followers",
|
|
||||||
following_address: ap_id <> "/following"
|
|
||||||
}
|
|
||||||
else
|
|
||||||
%{
|
|
||||||
ap_id: User.ap_id(user),
|
|
||||||
follower_address: User.ap_followers(user),
|
|
||||||
following_address: User.ap_following(user)
|
|
||||||
}
|
|
||||||
end
|
|
||||||
|
|
||||||
user = Map.merge(user, user_urls)
|
|
||||||
|
|
||||||
Repo.insert!(user)
|
|
||||||
end
|
|
||||||
|
|
||||||
def generate_activities(user, users) do
|
|
||||||
do_generate_activities(user, users)
|
|
||||||
end
|
|
||||||
|
|
||||||
defp do_generate_activities(user, users) do
|
|
||||||
IO.puts("Starting generating 20000 common activities...")
|
|
||||||
|
|
||||||
{time, _} =
|
|
||||||
:timer.tc(fn ->
|
|
||||||
Task.async_stream(
|
|
||||||
1..20_000,
|
|
||||||
fn _ ->
|
|
||||||
do_generate_activity([user | users])
|
|
||||||
end,
|
|
||||||
max_concurrency: 10,
|
|
||||||
timeout: 30_000
|
|
||||||
)
|
|
||||||
|> Stream.run()
|
|
||||||
end)
|
|
||||||
|
|
||||||
IO.puts("Inserting common activities take #{to_sec(time)} sec.\n")
|
|
||||||
|
|
||||||
IO.puts("Starting generating 20000 activities with mentions...")
|
|
||||||
|
|
||||||
{time, _} =
|
|
||||||
:timer.tc(fn ->
|
|
||||||
Task.async_stream(
|
|
||||||
1..20_000,
|
|
||||||
fn _ ->
|
|
||||||
do_generate_activity_with_mention(user, users)
|
|
||||||
end,
|
|
||||||
max_concurrency: 10,
|
|
||||||
timeout: 30_000
|
|
||||||
)
|
|
||||||
|> Stream.run()
|
|
||||||
end)
|
|
||||||
|
|
||||||
IO.puts("Inserting activities with menthions take #{to_sec(time)} sec.\n")
|
|
||||||
|
|
||||||
IO.puts("Starting generating 10000 activities with threads...")
|
|
||||||
|
|
||||||
{time, _} =
|
|
||||||
:timer.tc(fn ->
|
|
||||||
Task.async_stream(
|
|
||||||
1..10_000,
|
|
||||||
fn _ ->
|
|
||||||
do_generate_threads([user | users])
|
|
||||||
end,
|
|
||||||
max_concurrency: 10,
|
|
||||||
timeout: 30_000
|
|
||||||
)
|
|
||||||
|> Stream.run()
|
|
||||||
end)
|
|
||||||
|
|
||||||
IO.puts("Inserting activities with threads take #{to_sec(time)} sec.\n")
|
|
||||||
end
|
|
||||||
|
|
||||||
defp do_generate_activity(users) do
|
|
||||||
post = %{
|
|
||||||
"status" => "Some status without mention with random user"
|
|
||||||
}
|
|
||||||
|
|
||||||
CommonAPI.post(Enum.random(users), post)
|
|
||||||
end
|
|
||||||
|
|
||||||
def generate_power_intervals(opts \\ []) do
|
|
||||||
count = Keyword.get(opts, :count, 20)
|
|
||||||
power = Keyword.get(opts, :power, 2)
|
|
||||||
IO.puts("Generating #{count} intervals for a power #{power} series...")
|
|
||||||
counts = Enum.map(1..count, fn n -> :math.pow(n, power) end)
|
|
||||||
sum = Enum.sum(counts)
|
|
||||||
|
|
||||||
densities =
|
|
||||||
Enum.map(counts, fn c ->
|
|
||||||
c / sum
|
|
||||||
end)
|
|
||||||
|
|
||||||
densities
|
|
||||||
|> Enum.reduce(0, fn density, acc ->
|
|
||||||
if acc == 0 do
|
|
||||||
[{0, density}]
|
|
||||||
else
|
|
||||||
[{_, lower} | _] = acc
|
|
||||||
[{lower, lower + density} | acc]
|
|
||||||
end
|
|
||||||
end)
|
|
||||||
|> Enum.reverse()
|
|
||||||
end
|
|
||||||
|
|
||||||
def generate_tagged_activities(opts \\ []) do
|
|
||||||
tag_count = Keyword.get(opts, :tag_count, 20)
|
|
||||||
users = Keyword.get(opts, :users, Repo.all(User))
|
|
||||||
activity_count = Keyword.get(opts, :count, 200_000)
|
|
||||||
|
|
||||||
intervals = generate_power_intervals(count: tag_count)
|
|
||||||
|
|
||||||
IO.puts(
|
|
||||||
"Generating #{activity_count} activities using #{tag_count} different tags of format `tag_n`, starting at tag_0"
|
|
||||||
)
|
|
||||||
|
|
||||||
Enum.each(1..activity_count, fn _ ->
|
|
||||||
random = :rand.uniform()
|
|
||||||
i = Enum.find_index(intervals, fn {lower, upper} -> lower <= random && upper > random end)
|
|
||||||
CommonAPI.post(Enum.random(users), %{"status" => "a post with the tag #tag_#{i}"})
|
|
||||||
end)
|
|
||||||
end
|
|
||||||
|
|
||||||
defp do_generate_activity_with_mention(user, users) do
|
|
||||||
mentions_cnt = Enum.random([2, 3, 4, 5])
|
|
||||||
with_user = Enum.random([true, false])
|
|
||||||
users = Enum.shuffle(users)
|
|
||||||
mentions_users = Enum.take(users, mentions_cnt)
|
|
||||||
mentions_users = if with_user, do: [user | mentions_users], else: mentions_users
|
|
||||||
|
|
||||||
mentions_str =
|
|
||||||
Enum.map(mentions_users, fn user -> "@" <> user.nickname end) |> Enum.join(", ")
|
|
||||||
|
|
||||||
post = %{
|
|
||||||
"status" => mentions_str <> "some status with mentions random users"
|
|
||||||
}
|
|
||||||
|
|
||||||
CommonAPI.post(Enum.random(users), post)
|
|
||||||
end
|
|
||||||
|
|
||||||
defp do_generate_threads(users) do
|
|
||||||
thread_length = Enum.random([2, 3, 4, 5])
|
|
||||||
actor = Enum.random(users)
|
|
||||||
|
|
||||||
post = %{
|
|
||||||
"status" => "Start of the thread"
|
|
||||||
}
|
|
||||||
|
|
||||||
{:ok, activity} = CommonAPI.post(actor, post)
|
|
||||||
|
|
||||||
Enum.each(1..thread_length, fn _ ->
|
|
||||||
user = Enum.random(users)
|
|
||||||
|
|
||||||
post = %{
|
|
||||||
"status" => "@#{actor.nickname} reply to thread",
|
|
||||||
"in_reply_to_status_id" => activity.id
|
|
||||||
}
|
|
||||||
|
|
||||||
CommonAPI.post(user, post)
|
|
||||||
end)
|
|
||||||
end
|
|
||||||
|
|
||||||
def generate_remote_activities(user, users) do
|
|
||||||
do_generate_remote_activities(user, users)
|
|
||||||
end
|
|
||||||
|
|
||||||
defp do_generate_remote_activities(user, users) do
|
|
||||||
IO.puts("Starting generating 10000 remote activities...")
|
|
||||||
|
|
||||||
{time, _} =
|
|
||||||
:timer.tc(fn ->
|
|
||||||
Task.async_stream(
|
|
||||||
1..10_000,
|
|
||||||
fn i ->
|
|
||||||
do_generate_remote_activity(i, user, users)
|
|
||||||
end,
|
|
||||||
max_concurrency: 10,
|
|
||||||
timeout: 30_000
|
|
||||||
)
|
|
||||||
|> Stream.run()
|
|
||||||
end)
|
|
||||||
|
|
||||||
IO.puts("Inserting remote activities take #{to_sec(time)} sec.\n")
|
|
||||||
end
|
|
||||||
|
|
||||||
defp do_generate_remote_activity(i, user, users) do
|
|
||||||
actor = Enum.random(users)
|
|
||||||
%{host: host} = URI.parse(actor.ap_id)
|
|
||||||
date = Date.utc_today()
|
|
||||||
datetime = DateTime.utc_now()
|
|
||||||
|
|
||||||
map = %{
|
|
||||||
"actor" => actor.ap_id,
|
|
||||||
"cc" => [actor.follower_address, user.ap_id],
|
|
||||||
"context" => "tag:mastodon.example.org,#{date}:objectId=#{i}:objectType=Conversation",
|
|
||||||
"id" => actor.ap_id <> "/statuses/#{i}/activity",
|
|
||||||
"object" => %{
|
|
||||||
"actor" => actor.ap_id,
|
|
||||||
"atomUri" => actor.ap_id <> "/statuses/#{i}",
|
|
||||||
"attachment" => [],
|
|
||||||
"attributedTo" => actor.ap_id,
|
|
||||||
"bcc" => [],
|
|
||||||
"bto" => [],
|
|
||||||
"cc" => [actor.follower_address, user.ap_id],
|
|
||||||
"content" =>
|
|
||||||
"<p><span class=\"h-card\"><a href=\"" <>
|
|
||||||
user.ap_id <>
|
|
||||||
"\" class=\"u-url mention\">@<span>" <> user.nickname <> "</span></a></span></p>",
|
|
||||||
"context" => "tag:mastodon.example.org,#{date}:objectId=#{i}:objectType=Conversation",
|
|
||||||
"conversation" =>
|
|
||||||
"tag:mastodon.example.org,#{date}:objectId=#{i}:objectType=Conversation",
|
|
||||||
"emoji" => %{},
|
|
||||||
"id" => actor.ap_id <> "/statuses/#{i}",
|
|
||||||
"inReplyTo" => nil,
|
|
||||||
"inReplyToAtomUri" => nil,
|
|
||||||
"published" => datetime,
|
|
||||||
"sensitive" => true,
|
|
||||||
"summary" => "cw",
|
|
||||||
"tag" => [
|
|
||||||
%{
|
|
||||||
"href" => user.ap_id,
|
|
||||||
"name" => "@#{user.nickname}@#{host}",
|
|
||||||
"type" => "Mention"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"to" => ["https://www.w3.org/ns/activitystreams#Public"],
|
|
||||||
"type" => "Note",
|
|
||||||
"url" => "http://#{host}/@#{actor.nickname}/#{i}"
|
|
||||||
},
|
|
||||||
"published" => datetime,
|
|
||||||
"to" => ["https://www.w3.org/ns/activitystreams#Public"],
|
|
||||||
"type" => "Create"
|
|
||||||
}
|
|
||||||
|
|
||||||
Pleroma.Web.ActivityPub.ActivityPub.insert(map, false)
|
|
||||||
end
|
|
||||||
|
|
||||||
def generate_dms(user, users, opts) do
|
|
||||||
IO.puts("Starting generating #{opts[:dms_max]} DMs")
|
|
||||||
{time, _} = :timer.tc(fn -> do_generate_dms(user, users, opts) end)
|
|
||||||
IO.puts("Inserting dms take #{to_sec(time)} sec.\n")
|
|
||||||
end
|
|
||||||
|
|
||||||
defp do_generate_dms(user, users, opts) do
|
|
||||||
Task.async_stream(
|
|
||||||
1..opts[:dms_max],
|
|
||||||
fn _ ->
|
|
||||||
do_generate_dm(user, users)
|
|
||||||
end,
|
|
||||||
max_concurrency: 10,
|
|
||||||
timeout: 30_000
|
|
||||||
)
|
|
||||||
|> Stream.run()
|
|
||||||
end
|
|
||||||
|
|
||||||
defp do_generate_dm(user, users) do
|
|
||||||
post = %{
|
|
||||||
"status" => "@#{user.nickname} some direct message",
|
|
||||||
"visibility" => "direct"
|
|
||||||
}
|
|
||||||
|
|
||||||
CommonAPI.post(Enum.random(users), post)
|
|
||||||
end
|
|
||||||
|
|
||||||
def generate_long_thread(user, users, opts) do
|
|
||||||
IO.puts("Starting generating long thread with #{opts[:thread_length]} replies")
|
|
||||||
{time, activity} = :timer.tc(fn -> do_generate_long_thread(user, users, opts) end)
|
|
||||||
IO.puts("Inserting long thread replies take #{to_sec(time)} sec.\n")
|
|
||||||
{:ok, activity}
|
|
||||||
end
|
|
||||||
|
|
||||||
defp do_generate_long_thread(user, users, opts) do
|
|
||||||
{:ok, %{id: id} = activity} = CommonAPI.post(user, %{"status" => "Start of long thread"})
|
|
||||||
|
|
||||||
Task.async_stream(
|
|
||||||
1..opts[:thread_length],
|
|
||||||
fn _ -> do_generate_thread(users, id) end,
|
|
||||||
max_concurrency: 10,
|
|
||||||
timeout: 30_000
|
|
||||||
)
|
|
||||||
|> Stream.run()
|
|
||||||
|
|
||||||
activity
|
|
||||||
end
|
|
||||||
|
|
||||||
defp do_generate_thread(users, activity_id) do
|
|
||||||
CommonAPI.post(Enum.random(users), %{
|
|
||||||
"status" => "reply to main post",
|
|
||||||
"in_reply_to_status_id" => activity_id
|
|
||||||
})
|
|
||||||
end
|
|
||||||
|
|
||||||
def generate_non_visible_message(user, users) do
|
|
||||||
IO.puts("Starting generating 1000 non visible posts")
|
|
||||||
|
|
||||||
{time, _} =
|
|
||||||
:timer.tc(fn ->
|
|
||||||
do_generate_non_visible_posts(user, users)
|
|
||||||
end)
|
|
||||||
|
|
||||||
IO.puts("Inserting non visible posts take #{to_sec(time)} sec.\n")
|
|
||||||
end
|
|
||||||
|
|
||||||
defp do_generate_non_visible_posts(user, users) do
|
|
||||||
[not_friend | users] = users
|
|
||||||
|
|
||||||
make_friends(user, users)
|
|
||||||
|
|
||||||
Task.async_stream(1..1000, fn _ -> do_generate_non_visible_post(not_friend, users) end,
|
|
||||||
max_concurrency: 10,
|
|
||||||
timeout: 30_000
|
|
||||||
)
|
|
||||||
|> Stream.run()
|
|
||||||
end
|
|
||||||
|
|
||||||
defp make_friends(_user, []), do: nil
|
|
||||||
|
|
||||||
defp make_friends(user, [friend | users]) do
|
|
||||||
{:ok, _} = User.follow(user, friend)
|
|
||||||
{:ok, _} = User.follow(friend, user)
|
|
||||||
make_friends(user, users)
|
|
||||||
end
|
|
||||||
|
|
||||||
defp do_generate_non_visible_post(not_friend, users) do
|
|
||||||
post = %{
|
|
||||||
"status" => "some non visible post",
|
|
||||||
"visibility" => "private"
|
|
||||||
}
|
|
||||||
|
|
||||||
{:ok, activity} = CommonAPI.post(not_friend, post)
|
|
||||||
|
|
||||||
thread_length = Enum.random([2, 3, 4, 5])
|
|
||||||
|
|
||||||
Enum.each(1..thread_length, fn _ ->
|
|
||||||
user = Enum.random(users)
|
|
||||||
|
|
||||||
post = %{
|
|
||||||
"status" => "@#{not_friend.nickname} reply to non visible post",
|
|
||||||
"in_reply_to_status_id" => activity.id,
|
|
||||||
"visibility" => "private"
|
|
||||||
}
|
|
||||||
|
|
||||||
CommonAPI.post(user, post)
|
|
||||||
end)
|
|
||||||
end
|
|
||||||
end
|
|
|
@ -1,11 +1,14 @@
|
||||||
defmodule Pleroma.LoadTesting.Helper do
|
defmodule Pleroma.LoadTesting.Helper do
|
||||||
defmacro __using__(_) do
|
alias Ecto.Adapters.SQL
|
||||||
quote do
|
|
||||||
import Ecto.Query
|
|
||||||
alias Pleroma.Repo
|
alias Pleroma.Repo
|
||||||
alias Pleroma.User
|
|
||||||
|
|
||||||
defp to_sec(microseconds), do: microseconds / 1_000_000
|
def to_sec(microseconds), do: microseconds / 1_000_000
|
||||||
end
|
|
||||||
|
def clean_tables do
|
||||||
|
IO.puts("Deleting old data...\n")
|
||||||
|
SQL.query!(Repo, "TRUNCATE users CASCADE;")
|
||||||
|
SQL.query!(Repo, "TRUNCATE activities CASCADE;")
|
||||||
|
SQL.query!(Repo, "TRUNCATE objects CASCADE;")
|
||||||
|
SQL.query!(Repo, "TRUNCATE oban_jobs CASCADE;")
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
169
benchmarks/load_testing/users.ex
Normal file
169
benchmarks/load_testing/users.ex
Normal file
|
@ -0,0 +1,169 @@
|
||||||
|
defmodule Pleroma.LoadTesting.Users do
|
||||||
|
@moduledoc """
|
||||||
|
Module for generating users with friends.
|
||||||
|
"""
|
||||||
|
import Ecto.Query
|
||||||
|
import Pleroma.LoadTesting.Helper, only: [to_sec: 1]
|
||||||
|
|
||||||
|
alias Pleroma.Repo
|
||||||
|
alias Pleroma.User
|
||||||
|
alias Pleroma.User.Query
|
||||||
|
|
||||||
|
@defaults [
|
||||||
|
users: 20_000,
|
||||||
|
friends: 100
|
||||||
|
]
|
||||||
|
|
||||||
|
@max_concurrency 10
|
||||||
|
|
||||||
|
@spec generate(keyword()) :: User.t()
|
||||||
|
def generate(opts \\ []) do
|
||||||
|
opts = Keyword.merge(@defaults, opts)
|
||||||
|
|
||||||
|
generate_users(opts[:users])
|
||||||
|
|
||||||
|
main_user =
|
||||||
|
Repo.one(from(u in User, where: u.local == true, order_by: fragment("RANDOM()"), limit: 1))
|
||||||
|
|
||||||
|
make_friends(main_user, opts[:friends])
|
||||||
|
|
||||||
|
Repo.get(User, main_user.id)
|
||||||
|
end
|
||||||
|
|
||||||
|
def generate_users(max) do
|
||||||
|
IO.puts("Starting generating #{max} users...")
|
||||||
|
|
||||||
|
{time, users} =
|
||||||
|
:timer.tc(fn ->
|
||||||
|
Task.async_stream(
|
||||||
|
1..max,
|
||||||
|
&generate_user(&1),
|
||||||
|
max_concurrency: @max_concurrency,
|
||||||
|
timeout: 30_000
|
||||||
|
)
|
||||||
|
|> Enum.to_list()
|
||||||
|
end)
|
||||||
|
|
||||||
|
IO.puts("Generating users took #{to_sec(time)} sec.\n")
|
||||||
|
users
|
||||||
|
end
|
||||||
|
|
||||||
|
defp generate_user(i) do
|
||||||
|
remote = Enum.random([true, false])
|
||||||
|
|
||||||
|
%User{
|
||||||
|
name: "Test テスト User #{i}",
|
||||||
|
email: "user#{i}@example.com",
|
||||||
|
nickname: "nick#{i}",
|
||||||
|
password_hash: Comeonin.Pbkdf2.hashpwsalt("test"),
|
||||||
|
bio: "Tester Number #{i}",
|
||||||
|
local: !remote
|
||||||
|
}
|
||||||
|
|> user_urls()
|
||||||
|
|> Repo.insert!()
|
||||||
|
end
|
||||||
|
|
||||||
|
defp user_urls(%{local: true} = user) do
|
||||||
|
urls = %{
|
||||||
|
ap_id: User.ap_id(user),
|
||||||
|
follower_address: User.ap_followers(user),
|
||||||
|
following_address: User.ap_following(user)
|
||||||
|
}
|
||||||
|
|
||||||
|
Map.merge(user, urls)
|
||||||
|
end
|
||||||
|
|
||||||
|
defp user_urls(%{local: false} = user) do
|
||||||
|
base_domain = Enum.random(["domain1.com", "domain2.com", "domain3.com"])
|
||||||
|
|
||||||
|
ap_id = "https://#{base_domain}/users/#{user.nickname}"
|
||||||
|
|
||||||
|
urls = %{
|
||||||
|
ap_id: ap_id,
|
||||||
|
follower_address: ap_id <> "/followers",
|
||||||
|
following_address: ap_id <> "/following"
|
||||||
|
}
|
||||||
|
|
||||||
|
Map.merge(user, urls)
|
||||||
|
end
|
||||||
|
|
||||||
|
def make_friends(main_user, max) when is_integer(max) do
|
||||||
|
IO.puts("Starting making friends for #{max} users...")
|
||||||
|
|
||||||
|
{time, _} =
|
||||||
|
:timer.tc(fn ->
|
||||||
|
number_of_users =
|
||||||
|
(max / 2)
|
||||||
|
|> Kernel.trunc()
|
||||||
|
|
||||||
|
main_user
|
||||||
|
|> get_users(%{limit: number_of_users, local: :local})
|
||||||
|
|> run_stream(main_user)
|
||||||
|
|
||||||
|
main_user
|
||||||
|
|> get_users(%{limit: number_of_users, local: :external})
|
||||||
|
|> run_stream(main_user)
|
||||||
|
end)
|
||||||
|
|
||||||
|
IO.puts("Making friends took #{to_sec(time)} sec.\n")
|
||||||
|
end
|
||||||
|
|
||||||
|
def make_friends(%User{} = main_user, %User{} = user) do
|
||||||
|
{:ok, _} = User.follow(main_user, user)
|
||||||
|
{:ok, _} = User.follow(user, main_user)
|
||||||
|
end
|
||||||
|
|
||||||
|
@spec get_users(User.t(), keyword()) :: [User.t()]
|
||||||
|
def get_users(user, opts) do
|
||||||
|
criteria = %{limit: opts[:limit]}
|
||||||
|
|
||||||
|
criteria =
|
||||||
|
if opts[:local] do
|
||||||
|
Map.put(criteria, opts[:local], true)
|
||||||
|
else
|
||||||
|
criteria
|
||||||
|
end
|
||||||
|
|
||||||
|
criteria =
|
||||||
|
if opts[:friends?] do
|
||||||
|
Map.put(criteria, :friends, user)
|
||||||
|
else
|
||||||
|
criteria
|
||||||
|
end
|
||||||
|
|
||||||
|
query =
|
||||||
|
criteria
|
||||||
|
|> Query.build()
|
||||||
|
|> random_without_user(user)
|
||||||
|
|
||||||
|
query =
|
||||||
|
if opts[:friends?] == false do
|
||||||
|
friends_ids =
|
||||||
|
%{friends: user}
|
||||||
|
|> Query.build()
|
||||||
|
|> Repo.all()
|
||||||
|
|> Enum.map(& &1.id)
|
||||||
|
|
||||||
|
from(u in query, where: u.id not in ^friends_ids)
|
||||||
|
else
|
||||||
|
query
|
||||||
|
end
|
||||||
|
|
||||||
|
Repo.all(query)
|
||||||
|
end
|
||||||
|
|
||||||
|
defp random_without_user(query, user) do
|
||||||
|
from(u in query,
|
||||||
|
where: u.id != ^user.id,
|
||||||
|
order_by: fragment("RANDOM()")
|
||||||
|
)
|
||||||
|
end
|
||||||
|
|
||||||
|
defp run_stream(users, main_user) do
|
||||||
|
Task.async_stream(users, &make_friends(main_user, &1),
|
||||||
|
max_concurrency: @max_concurrency,
|
||||||
|
timeout: 30_000
|
||||||
|
)
|
||||||
|
|> Stream.run()
|
||||||
|
end
|
||||||
|
end
|
|
@ -1,9 +1,12 @@
|
||||||
defmodule Mix.Tasks.Pleroma.Benchmarks.Tags do
|
defmodule Mix.Tasks.Pleroma.Benchmarks.Tags do
|
||||||
use Mix.Task
|
use Mix.Task
|
||||||
alias Pleroma.Repo
|
|
||||||
alias Pleroma.LoadTesting.Generator
|
import Pleroma.LoadTesting.Helper, only: [clean_tables: 0]
|
||||||
import Ecto.Query
|
import Ecto.Query
|
||||||
|
|
||||||
|
alias Pleroma.Repo
|
||||||
|
alias Pleroma.Web.MastodonAPI.TimelineController
|
||||||
|
|
||||||
def run(_args) do
|
def run(_args) do
|
||||||
Mix.Pleroma.start_pleroma()
|
Mix.Pleroma.start_pleroma()
|
||||||
activities_count = Repo.aggregate(from(a in Pleroma.Activity), :count, :id)
|
activities_count = Repo.aggregate(from(a in Pleroma.Activity), :count, :id)
|
||||||
|
@ -11,8 +14,8 @@ def run(_args) do
|
||||||
if activities_count == 0 do
|
if activities_count == 0 do
|
||||||
IO.puts("Did not find any activities, cleaning and generating")
|
IO.puts("Did not find any activities, cleaning and generating")
|
||||||
clean_tables()
|
clean_tables()
|
||||||
Generator.generate_users(users_max: 10)
|
Pleroma.LoadTesting.Users.generate_users(10)
|
||||||
Generator.generate_tagged_activities()
|
Pleroma.LoadTesting.Activities.generate_tagged_activities()
|
||||||
else
|
else
|
||||||
IO.puts("Found #{activities_count} activities, won't generate new ones")
|
IO.puts("Found #{activities_count} activities, won't generate new ones")
|
||||||
end
|
end
|
||||||
|
@ -34,7 +37,7 @@ def run(_args) do
|
||||||
Benchee.run(
|
Benchee.run(
|
||||||
%{
|
%{
|
||||||
"Hashtag fetching, any" => fn tags ->
|
"Hashtag fetching, any" => fn tags ->
|
||||||
Pleroma.Web.MastodonAPI.TimelineController.hashtag_fetching(
|
TimelineController.hashtag_fetching(
|
||||||
%{
|
%{
|
||||||
"any" => tags
|
"any" => tags
|
||||||
},
|
},
|
||||||
|
@ -44,7 +47,7 @@ def run(_args) do
|
||||||
end,
|
end,
|
||||||
# Will always return zero results because no overlapping hashtags are generated.
|
# Will always return zero results because no overlapping hashtags are generated.
|
||||||
"Hashtag fetching, all" => fn tags ->
|
"Hashtag fetching, all" => fn tags ->
|
||||||
Pleroma.Web.MastodonAPI.TimelineController.hashtag_fetching(
|
TimelineController.hashtag_fetching(
|
||||||
%{
|
%{
|
||||||
"all" => tags
|
"all" => tags
|
||||||
},
|
},
|
||||||
|
@ -64,7 +67,7 @@ def run(_args) do
|
||||||
Benchee.run(
|
Benchee.run(
|
||||||
%{
|
%{
|
||||||
"Hashtag fetching" => fn tag ->
|
"Hashtag fetching" => fn tag ->
|
||||||
Pleroma.Web.MastodonAPI.TimelineController.hashtag_fetching(
|
TimelineController.hashtag_fetching(
|
||||||
%{
|
%{
|
||||||
"tag" => tag
|
"tag" => tag
|
||||||
},
|
},
|
||||||
|
@ -77,11 +80,4 @@ def run(_args) do
|
||||||
time: 5
|
time: 5
|
||||||
)
|
)
|
||||||
end
|
end
|
||||||
|
|
||||||
defp clean_tables do
|
|
||||||
IO.puts("Deleting old data...\n")
|
|
||||||
Ecto.Adapters.SQL.query!(Repo, "TRUNCATE users CASCADE;")
|
|
||||||
Ecto.Adapters.SQL.query!(Repo, "TRUNCATE activities CASCADE;")
|
|
||||||
Ecto.Adapters.SQL.query!(Repo, "TRUNCATE objects CASCADE;")
|
|
||||||
end
|
|
||||||
end
|
end
|
||||||
|
|
|
@ -1,9 +1,10 @@
|
||||||
defmodule Mix.Tasks.Pleroma.Benchmarks.Timelines do
|
defmodule Mix.Tasks.Pleroma.Benchmarks.Timelines do
|
||||||
use Mix.Task
|
use Mix.Task
|
||||||
alias Pleroma.Repo
|
|
||||||
alias Pleroma.LoadTesting.Generator
|
import Pleroma.LoadTesting.Helper, only: [clean_tables: 0]
|
||||||
|
|
||||||
alias Pleroma.Web.CommonAPI
|
alias Pleroma.Web.CommonAPI
|
||||||
|
alias Plug.Conn
|
||||||
|
|
||||||
def run(_args) do
|
def run(_args) do
|
||||||
Mix.Pleroma.start_pleroma()
|
Mix.Pleroma.start_pleroma()
|
||||||
|
@ -11,7 +12,7 @@ def run(_args) do
|
||||||
# Cleaning tables
|
# Cleaning tables
|
||||||
clean_tables()
|
clean_tables()
|
||||||
|
|
||||||
[{:ok, user} | users] = Generator.generate_users(users_max: 1000)
|
[{:ok, user} | users] = Pleroma.LoadTesting.Users.generate_users(1000)
|
||||||
|
|
||||||
# Let the user make 100 posts
|
# Let the user make 100 posts
|
||||||
|
|
||||||
|
@ -38,8 +39,8 @@ def run(_args) do
|
||||||
"user timeline, no followers" => fn reading_user ->
|
"user timeline, no followers" => fn reading_user ->
|
||||||
conn =
|
conn =
|
||||||
Phoenix.ConnTest.build_conn()
|
Phoenix.ConnTest.build_conn()
|
||||||
|> Plug.Conn.assign(:user, reading_user)
|
|> Conn.assign(:user, reading_user)
|
||||||
|> Plug.Conn.assign(:skip_link_headers, true)
|
|> Conn.assign(:skip_link_headers, true)
|
||||||
|
|
||||||
Pleroma.Web.MastodonAPI.AccountController.statuses(conn, %{"id" => user.id})
|
Pleroma.Web.MastodonAPI.AccountController.statuses(conn, %{"id" => user.id})
|
||||||
end
|
end
|
||||||
|
@ -56,8 +57,8 @@ def run(_args) do
|
||||||
"user timeline, all following" => fn reading_user ->
|
"user timeline, all following" => fn reading_user ->
|
||||||
conn =
|
conn =
|
||||||
Phoenix.ConnTest.build_conn()
|
Phoenix.ConnTest.build_conn()
|
||||||
|> Plug.Conn.assign(:user, reading_user)
|
|> Conn.assign(:user, reading_user)
|
||||||
|> Plug.Conn.assign(:skip_link_headers, true)
|
|> Conn.assign(:skip_link_headers, true)
|
||||||
|
|
||||||
Pleroma.Web.MastodonAPI.AccountController.statuses(conn, %{"id" => user.id})
|
Pleroma.Web.MastodonAPI.AccountController.statuses(conn, %{"id" => user.id})
|
||||||
end
|
end
|
||||||
|
@ -66,11 +67,4 @@ def run(_args) do
|
||||||
time: 60
|
time: 60
|
||||||
)
|
)
|
||||||
end
|
end
|
||||||
|
|
||||||
defp clean_tables do
|
|
||||||
IO.puts("Deleting old data...\n")
|
|
||||||
Ecto.Adapters.SQL.query!(Repo, "TRUNCATE users CASCADE;")
|
|
||||||
Ecto.Adapters.SQL.query!(Repo, "TRUNCATE activities CASCADE;")
|
|
||||||
Ecto.Adapters.SQL.query!(Repo, "TRUNCATE objects CASCADE;")
|
|
||||||
end
|
|
||||||
end
|
end
|
||||||
|
|
|
@ -1,114 +1,55 @@
|
||||||
defmodule Mix.Tasks.Pleroma.LoadTesting do
|
defmodule Mix.Tasks.Pleroma.LoadTesting do
|
||||||
use Mix.Task
|
use Mix.Task
|
||||||
use Pleroma.LoadTesting.Helper
|
import Ecto.Query
|
||||||
import Mix.Pleroma
|
import Pleroma.LoadTesting.Helper, only: [clean_tables: 0]
|
||||||
import Pleroma.LoadTesting.Generator
|
|
||||||
import Pleroma.LoadTesting.Fetcher
|
alias Pleroma.Repo
|
||||||
|
alias Pleroma.User
|
||||||
|
|
||||||
@shortdoc "Factory for generation data"
|
@shortdoc "Factory for generation data"
|
||||||
@moduledoc """
|
@moduledoc """
|
||||||
Generates data like:
|
Generates data like:
|
||||||
- local/remote users
|
- local/remote users
|
||||||
- local/remote activities with notifications
|
- local/remote activities with differrent visibility:
|
||||||
- direct messages
|
- simple activiities
|
||||||
- long thread
|
- with emoji
|
||||||
- non visible posts
|
- with mentions
|
||||||
|
- hellthreads
|
||||||
|
- with attachments
|
||||||
|
- with tags
|
||||||
|
- likes
|
||||||
|
- reblogs
|
||||||
|
- simple threads
|
||||||
|
- long threads
|
||||||
|
|
||||||
## Generate data
|
## Generate data
|
||||||
MIX_ENV=benchmark mix pleroma.load_testing --users 20000 --dms 20000 --thread_length 2000
|
MIX_ENV=benchmark mix pleroma.load_testing --users 20000 --friends 1000 --iterations 170 --friends_used 20 --non_friends_used 20
|
||||||
MIX_ENV=benchmark mix pleroma.load_testing -u 20000 -d 20000 -t 2000
|
MIX_ENV=benchmark mix pleroma.load_testing -u 20000 -f 1000 -i 170 -fu 20 -nfu 20
|
||||||
|
|
||||||
Options:
|
Options:
|
||||||
- `--users NUMBER` - number of users to generate. Defaults to: 20000. Alias: `-u`
|
- `--users NUMBER` - number of users to generate. Defaults to: 20000. Alias: `-u`
|
||||||
- `--dms NUMBER` - number of direct messages to generate. Defaults to: 20000. Alias `-d`
|
- `--friends NUMBER` - number of friends for main user. Defaults to: 1000. Alias: `-f`
|
||||||
- `--thread_length` - number of messages in thread. Defaults to: 2000. ALias `-t`
|
- `--iterations NUMBER` - number of iterations to generate activities. For each iteration in database is inserted about 120+ activities with different visibility, actors and types.Defaults to: 170. Alias: `-i`
|
||||||
|
- `--friends_used NUMBER` - number of main user friends used in activity generation. Defaults to: 20. Alias: `-fu`
|
||||||
|
- `--non_friends_used NUMBER` - number of non friends used in activity generation. Defaults to: 20. Alias: `-nfu`
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@aliases [u: :users, d: :dms, t: :thread_length]
|
@aliases [u: :users, f: :friends, i: :iterations, fu: :friends_used, nfu: :non_friends_used]
|
||||||
@switches [
|
@switches [
|
||||||
users: :integer,
|
users: :integer,
|
||||||
dms: :integer,
|
friends: :integer,
|
||||||
thread_length: :integer
|
iterations: :integer,
|
||||||
|
friends_used: :integer,
|
||||||
|
non_friends_used: :integer
|
||||||
]
|
]
|
||||||
@users_default 20_000
|
|
||||||
@dms_default 1_000
|
|
||||||
@thread_length_default 2_000
|
|
||||||
|
|
||||||
def run(args) do
|
def run(args) do
|
||||||
start_pleroma()
|
Mix.Pleroma.start_pleroma()
|
||||||
Pleroma.Config.put([:instance, :skip_thread_containment], true)
|
clean_tables()
|
||||||
{opts, _} = OptionParser.parse!(args, strict: @switches, aliases: @aliases)
|
{opts, _} = OptionParser.parse!(args, strict: @switches, aliases: @aliases)
|
||||||
|
|
||||||
users_max = Keyword.get(opts, :users, @users_default)
|
user = Pleroma.LoadTesting.Users.generate(opts)
|
||||||
dms_max = Keyword.get(opts, :dms, @dms_default)
|
Pleroma.LoadTesting.Activities.generate(user, opts)
|
||||||
thread_length = Keyword.get(opts, :thread_length, @thread_length_default)
|
|
||||||
|
|
||||||
clean_tables()
|
|
||||||
|
|
||||||
opts =
|
|
||||||
Keyword.put(opts, :users_max, users_max)
|
|
||||||
|> Keyword.put(:dms_max, dms_max)
|
|
||||||
|> Keyword.put(:thread_length, thread_length)
|
|
||||||
|
|
||||||
generate_users(opts)
|
|
||||||
|
|
||||||
# main user for queries
|
|
||||||
IO.puts("Fetching local main user...")
|
|
||||||
|
|
||||||
{time, user} =
|
|
||||||
:timer.tc(fn ->
|
|
||||||
Repo.one(
|
|
||||||
from(u in User, where: u.local == true, order_by: fragment("RANDOM()"), limit: 1)
|
|
||||||
)
|
|
||||||
end)
|
|
||||||
|
|
||||||
IO.puts("Fetching main user take #{to_sec(time)} sec.\n")
|
|
||||||
|
|
||||||
IO.puts("Fetching local users...")
|
|
||||||
|
|
||||||
{time, users} =
|
|
||||||
:timer.tc(fn ->
|
|
||||||
Repo.all(
|
|
||||||
from(u in User,
|
|
||||||
where: u.id != ^user.id,
|
|
||||||
where: u.local == true,
|
|
||||||
order_by: fragment("RANDOM()"),
|
|
||||||
limit: 10
|
|
||||||
)
|
|
||||||
)
|
|
||||||
end)
|
|
||||||
|
|
||||||
IO.puts("Fetching local users take #{to_sec(time)} sec.\n")
|
|
||||||
|
|
||||||
IO.puts("Fetching remote users...")
|
|
||||||
|
|
||||||
{time, remote_users} =
|
|
||||||
:timer.tc(fn ->
|
|
||||||
Repo.all(
|
|
||||||
from(u in User,
|
|
||||||
where: u.id != ^user.id,
|
|
||||||
where: u.local == false,
|
|
||||||
order_by: fragment("RANDOM()"),
|
|
||||||
limit: 10
|
|
||||||
)
|
|
||||||
)
|
|
||||||
end)
|
|
||||||
|
|
||||||
IO.puts("Fetching remote users take #{to_sec(time)} sec.\n")
|
|
||||||
|
|
||||||
generate_activities(user, users)
|
|
||||||
|
|
||||||
generate_remote_activities(user, remote_users)
|
|
||||||
|
|
||||||
generate_like_activities(
|
|
||||||
user, Pleroma.Repo.all(Pleroma.Activity.Queries.by_type("Create"))
|
|
||||||
)
|
|
||||||
|
|
||||||
generate_dms(user, users, opts)
|
|
||||||
|
|
||||||
{:ok, activity} = generate_long_thread(user, users, opts)
|
|
||||||
|
|
||||||
generate_non_visible_message(user, users)
|
|
||||||
|
|
||||||
IO.puts("Users in DB: #{Repo.aggregate(from(u in User), :count, :id)}")
|
IO.puts("Users in DB: #{Repo.aggregate(from(u in User), :count, :id)}")
|
||||||
|
|
||||||
|
@ -120,19 +61,6 @@ def run(args) do
|
||||||
"Notifications in DB: #{Repo.aggregate(from(n in Pleroma.Notification), :count, :id)}"
|
"Notifications in DB: #{Repo.aggregate(from(n in Pleroma.Notification), :count, :id)}"
|
||||||
)
|
)
|
||||||
|
|
||||||
fetch_user(user)
|
Pleroma.LoadTesting.Fetcher.run_benchmarks(user)
|
||||||
query_timelines(user)
|
|
||||||
query_notifications(user)
|
|
||||||
query_dms(user)
|
|
||||||
query_long_thread(user, activity)
|
|
||||||
Pleroma.Config.put([:instance, :skip_thread_containment], false)
|
|
||||||
query_timelines(user)
|
|
||||||
end
|
|
||||||
|
|
||||||
defp clean_tables do
|
|
||||||
IO.puts("Deleting old data...\n")
|
|
||||||
Ecto.Adapters.SQL.query!(Repo, "TRUNCATE users CASCADE;")
|
|
||||||
Ecto.Adapters.SQL.query!(Repo, "TRUNCATE activities CASCADE;")
|
|
||||||
Ecto.Adapters.SQL.query!(Repo, "TRUNCATE objects CASCADE;")
|
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -39,7 +39,7 @@
|
||||||
adapter: Ecto.Adapters.Postgres,
|
adapter: Ecto.Adapters.Postgres,
|
||||||
username: "postgres",
|
username: "postgres",
|
||||||
password: "postgres",
|
password: "postgres",
|
||||||
database: "pleroma_test",
|
database: "pleroma_benchmark",
|
||||||
hostname: System.get_env("DB_HOST") || "localhost",
|
hostname: System.get_env("DB_HOST") || "localhost",
|
||||||
pool_size: 10
|
pool_size: 10
|
||||||
|
|
||||||
|
|
|
@ -58,20 +58,6 @@
|
||||||
|
|
||||||
config :pleroma, Pleroma.Captcha.Kocaptcha, endpoint: "https://captcha.kotobank.ch"
|
config :pleroma, Pleroma.Captcha.Kocaptcha, endpoint: "https://captcha.kotobank.ch"
|
||||||
|
|
||||||
config :pleroma, :hackney_pools,
|
|
||||||
federation: [
|
|
||||||
max_connections: 50,
|
|
||||||
timeout: 150_000
|
|
||||||
],
|
|
||||||
media: [
|
|
||||||
max_connections: 50,
|
|
||||||
timeout: 150_000
|
|
||||||
],
|
|
||||||
upload: [
|
|
||||||
max_connections: 25,
|
|
||||||
timeout: 300_000
|
|
||||||
]
|
|
||||||
|
|
||||||
# Upload configuration
|
# Upload configuration
|
||||||
config :pleroma, Pleroma.Upload,
|
config :pleroma, Pleroma.Upload,
|
||||||
uploader: Pleroma.Uploaders.Local,
|
uploader: Pleroma.Uploaders.Local,
|
||||||
|
@ -184,21 +170,13 @@
|
||||||
"application/ld+json" => ["activity+json"]
|
"application/ld+json" => ["activity+json"]
|
||||||
}
|
}
|
||||||
|
|
||||||
config :tesla, adapter: Tesla.Adapter.Hackney
|
config :tesla, adapter: Tesla.Adapter.Gun
|
||||||
|
|
||||||
# Configures http settings, upstream proxy etc.
|
# Configures http settings, upstream proxy etc.
|
||||||
config :pleroma, :http,
|
config :pleroma, :http,
|
||||||
proxy_url: nil,
|
proxy_url: nil,
|
||||||
send_user_agent: true,
|
send_user_agent: true,
|
||||||
user_agent: :default,
|
user_agent: :default,
|
||||||
adapter: [
|
adapter: []
|
||||||
ssl_options: [
|
|
||||||
# Workaround for remote server certificate chain issues
|
|
||||||
partial_chain: &:hackney_connect.partial_chain/1,
|
|
||||||
# We don't support TLS v1.3 yet
|
|
||||||
versions: [:tlsv1, :"tlsv1.1", :"tlsv1.2"]
|
|
||||||
]
|
|
||||||
]
|
|
||||||
|
|
||||||
config :pleroma, :instance,
|
config :pleroma, :instance,
|
||||||
name: "Pleroma",
|
name: "Pleroma",
|
||||||
|
@ -625,6 +603,49 @@
|
||||||
parameters: [gin_fuzzy_search_limit: "500"],
|
parameters: [gin_fuzzy_search_limit: "500"],
|
||||||
prepare: :unnamed
|
prepare: :unnamed
|
||||||
|
|
||||||
|
config :pleroma, :connections_pool,
|
||||||
|
checkin_timeout: 250,
|
||||||
|
max_connections: 250,
|
||||||
|
retry: 1,
|
||||||
|
retry_timeout: 1000,
|
||||||
|
await_up_timeout: 5_000
|
||||||
|
|
||||||
|
config :pleroma, :pools,
|
||||||
|
federation: [
|
||||||
|
size: 50,
|
||||||
|
max_overflow: 10,
|
||||||
|
timeout: 150_000
|
||||||
|
],
|
||||||
|
media: [
|
||||||
|
size: 50,
|
||||||
|
max_overflow: 10,
|
||||||
|
timeout: 150_000
|
||||||
|
],
|
||||||
|
upload: [
|
||||||
|
size: 25,
|
||||||
|
max_overflow: 5,
|
||||||
|
timeout: 300_000
|
||||||
|
],
|
||||||
|
default: [
|
||||||
|
size: 10,
|
||||||
|
max_overflow: 2,
|
||||||
|
timeout: 10_000
|
||||||
|
]
|
||||||
|
|
||||||
|
config :pleroma, :hackney_pools,
|
||||||
|
federation: [
|
||||||
|
max_connections: 50,
|
||||||
|
timeout: 150_000
|
||||||
|
],
|
||||||
|
media: [
|
||||||
|
max_connections: 50,
|
||||||
|
timeout: 150_000
|
||||||
|
],
|
||||||
|
upload: [
|
||||||
|
max_connections: 25,
|
||||||
|
timeout: 300_000
|
||||||
|
]
|
||||||
|
|
||||||
config :pleroma, :restrict_unauthenticated,
|
config :pleroma, :restrict_unauthenticated,
|
||||||
timelines: %{local: false, federated: false},
|
timelines: %{local: false, federated: false},
|
||||||
profiles: %{local: false, remote: false},
|
profiles: %{local: false, remote: false},
|
||||||
|
|
|
@ -2916,6 +2916,219 @@
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
|
%{
|
||||||
|
group: :pleroma,
|
||||||
|
key: :connections_pool,
|
||||||
|
type: :group,
|
||||||
|
description: "Advanced settings for `gun` connections pool",
|
||||||
|
children: [
|
||||||
|
%{
|
||||||
|
key: :checkin_timeout,
|
||||||
|
type: :integer,
|
||||||
|
description: "Timeout to checkin connection from pool. Default: 250ms.",
|
||||||
|
suggestions: [250]
|
||||||
|
},
|
||||||
|
%{
|
||||||
|
key: :max_connections,
|
||||||
|
type: :integer,
|
||||||
|
description: "Maximum number of connections in the pool. Default: 250 connections.",
|
||||||
|
suggestions: [250]
|
||||||
|
},
|
||||||
|
%{
|
||||||
|
key: :retry,
|
||||||
|
type: :integer,
|
||||||
|
description:
|
||||||
|
"Number of retries, while `gun` will try to reconnect if connection goes down. Default: 1.",
|
||||||
|
suggestions: [1]
|
||||||
|
},
|
||||||
|
%{
|
||||||
|
key: :retry_timeout,
|
||||||
|
type: :integer,
|
||||||
|
description:
|
||||||
|
"Time between retries when `gun` will try to reconnect in milliseconds. Default: 1000ms.",
|
||||||
|
suggestions: [1000]
|
||||||
|
},
|
||||||
|
%{
|
||||||
|
key: :await_up_timeout,
|
||||||
|
type: :integer,
|
||||||
|
description: "Timeout while `gun` will wait until connection is up. Default: 5000ms.",
|
||||||
|
suggestions: [5000]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
%{
|
||||||
|
group: :pleroma,
|
||||||
|
key: :pools,
|
||||||
|
type: :group,
|
||||||
|
description: "Advanced settings for `gun` workers pools",
|
||||||
|
children: [
|
||||||
|
%{
|
||||||
|
key: :federation,
|
||||||
|
type: :keyword,
|
||||||
|
description: "Settings for federation pool.",
|
||||||
|
children: [
|
||||||
|
%{
|
||||||
|
key: :size,
|
||||||
|
type: :integer,
|
||||||
|
description: "Number workers in the pool.",
|
||||||
|
suggestions: [50]
|
||||||
|
},
|
||||||
|
%{
|
||||||
|
key: :max_overflow,
|
||||||
|
type: :integer,
|
||||||
|
description: "Number of additional workers if pool is under load.",
|
||||||
|
suggestions: [10]
|
||||||
|
},
|
||||||
|
%{
|
||||||
|
key: :timeout,
|
||||||
|
type: :integer,
|
||||||
|
description: "Timeout while `gun` will wait for response.",
|
||||||
|
suggestions: [150_000]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
%{
|
||||||
|
key: :media,
|
||||||
|
type: :keyword,
|
||||||
|
description: "Settings for media pool.",
|
||||||
|
children: [
|
||||||
|
%{
|
||||||
|
key: :size,
|
||||||
|
type: :integer,
|
||||||
|
description: "Number workers in the pool.",
|
||||||
|
suggestions: [50]
|
||||||
|
},
|
||||||
|
%{
|
||||||
|
key: :max_overflow,
|
||||||
|
type: :integer,
|
||||||
|
description: "Number of additional workers if pool is under load.",
|
||||||
|
suggestions: [10]
|
||||||
|
},
|
||||||
|
%{
|
||||||
|
key: :timeout,
|
||||||
|
type: :integer,
|
||||||
|
description: "Timeout while `gun` will wait for response.",
|
||||||
|
suggestions: [150_000]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
%{
|
||||||
|
key: :upload,
|
||||||
|
type: :keyword,
|
||||||
|
description: "Settings for upload pool.",
|
||||||
|
children: [
|
||||||
|
%{
|
||||||
|
key: :size,
|
||||||
|
type: :integer,
|
||||||
|
description: "Number workers in the pool.",
|
||||||
|
suggestions: [25]
|
||||||
|
},
|
||||||
|
%{
|
||||||
|
key: :max_overflow,
|
||||||
|
type: :integer,
|
||||||
|
description: "Number of additional workers if pool is under load.",
|
||||||
|
suggestions: [5]
|
||||||
|
},
|
||||||
|
%{
|
||||||
|
key: :timeout,
|
||||||
|
type: :integer,
|
||||||
|
description: "Timeout while `gun` will wait for response.",
|
||||||
|
suggestions: [300_000]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
%{
|
||||||
|
key: :default,
|
||||||
|
type: :keyword,
|
||||||
|
description: "Settings for default pool.",
|
||||||
|
children: [
|
||||||
|
%{
|
||||||
|
key: :size,
|
||||||
|
type: :integer,
|
||||||
|
description: "Number workers in the pool.",
|
||||||
|
suggestions: [10]
|
||||||
|
},
|
||||||
|
%{
|
||||||
|
key: :max_overflow,
|
||||||
|
type: :integer,
|
||||||
|
description: "Number of additional workers if pool is under load.",
|
||||||
|
suggestions: [2]
|
||||||
|
},
|
||||||
|
%{
|
||||||
|
key: :timeout,
|
||||||
|
type: :integer,
|
||||||
|
description: "Timeout while `gun` will wait for response.",
|
||||||
|
suggestions: [10_000]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
%{
|
||||||
|
group: :pleroma,
|
||||||
|
key: :hackney_pools,
|
||||||
|
type: :group,
|
||||||
|
description: "Advanced settings for `hackney` connections pools",
|
||||||
|
children: [
|
||||||
|
%{
|
||||||
|
key: :federation,
|
||||||
|
type: :keyword,
|
||||||
|
description: "Settings for federation pool.",
|
||||||
|
children: [
|
||||||
|
%{
|
||||||
|
key: :max_connections,
|
||||||
|
type: :integer,
|
||||||
|
description: "Number workers in the pool.",
|
||||||
|
suggestions: [50]
|
||||||
|
},
|
||||||
|
%{
|
||||||
|
key: :timeout,
|
||||||
|
type: :integer,
|
||||||
|
description: "Timeout while `hackney` will wait for response.",
|
||||||
|
suggestions: [150_000]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
%{
|
||||||
|
key: :media,
|
||||||
|
type: :keyword,
|
||||||
|
description: "Settings for media pool.",
|
||||||
|
children: [
|
||||||
|
%{
|
||||||
|
key: :max_connections,
|
||||||
|
type: :integer,
|
||||||
|
description: "Number workers in the pool.",
|
||||||
|
suggestions: [50]
|
||||||
|
},
|
||||||
|
%{
|
||||||
|
key: :timeout,
|
||||||
|
type: :integer,
|
||||||
|
description: "Timeout while `hackney` will wait for response.",
|
||||||
|
suggestions: [150_000]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
%{
|
||||||
|
key: :upload,
|
||||||
|
type: :keyword,
|
||||||
|
description: "Settings for upload pool.",
|
||||||
|
children: [
|
||||||
|
%{
|
||||||
|
key: :max_connections,
|
||||||
|
type: :integer,
|
||||||
|
description: "Number workers in the pool.",
|
||||||
|
suggestions: [25]
|
||||||
|
},
|
||||||
|
%{
|
||||||
|
key: :timeout,
|
||||||
|
type: :integer,
|
||||||
|
description: "Timeout while `hackney` will wait for response.",
|
||||||
|
suggestions: [300_000]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
%{
|
%{
|
||||||
group: :pleroma,
|
group: :pleroma,
|
||||||
key: :restrict_unauthenticated,
|
key: :restrict_unauthenticated,
|
||||||
|
|
|
@ -90,6 +90,8 @@
|
||||||
|
|
||||||
config :pleroma, :modules, runtime_dir: "test/fixtures/modules"
|
config :pleroma, :modules, runtime_dir: "test/fixtures/modules"
|
||||||
|
|
||||||
|
config :pleroma, Pleroma.Gun, Pleroma.GunMock
|
||||||
|
|
||||||
config :pleroma, Pleroma.Emails.NewUsersDigestEmail, enabled: true
|
config :pleroma, Pleroma.Emails.NewUsersDigestEmail, enabled: true
|
||||||
|
|
||||||
config :pleroma, Pleroma.Plugs.RemoteIp, enabled: false
|
config :pleroma, Pleroma.Plugs.RemoteIp, enabled: false
|
||||||
|
|
6
coveralls.json
Normal file
6
coveralls.json
Normal file
|
@ -0,0 +1,6 @@
|
||||||
|
{
|
||||||
|
"skip_files": [
|
||||||
|
"test/support",
|
||||||
|
"lib/mix/tasks/pleroma/benchmark.ex"
|
||||||
|
]
|
||||||
|
}
|
|
@ -841,6 +841,8 @@ Some modifications are necessary to save the config settings correctly:
|
||||||
Most of the settings will be applied in `runtime`, this means that you don't need to restart the instance. But some settings are applied in `compile time` and require a reboot of the instance, such as:
|
Most of the settings will be applied in `runtime`, this means that you don't need to restart the instance. But some settings are applied in `compile time` and require a reboot of the instance, such as:
|
||||||
- all settings inside these keys:
|
- all settings inside these keys:
|
||||||
- `:hackney_pools`
|
- `:hackney_pools`
|
||||||
|
- `:connections_pool`
|
||||||
|
- `:pools`
|
||||||
- `:chat`
|
- `:chat`
|
||||||
- partially settings inside these keys:
|
- partially settings inside these keys:
|
||||||
- `:seconds_valid` in `Pleroma.Captcha`
|
- `:seconds_valid` in `Pleroma.Captcha`
|
||||||
|
|
|
@ -164,6 +164,7 @@ Additional parameters can be added to the JSON body/Form data:
|
||||||
- `actor_type` - the type of this account.
|
- `actor_type` - the type of this account.
|
||||||
|
|
||||||
### Pleroma Settings Store
|
### Pleroma Settings Store
|
||||||
|
|
||||||
Pleroma has mechanism that allows frontends to save blobs of json for each user on the backend. This can be used to save frontend-specific settings for a user that the backend does not need to know about.
|
Pleroma has mechanism that allows frontends to save blobs of json for each user on the backend. This can be used to save frontend-specific settings for a user that the backend does not need to know about.
|
||||||
|
|
||||||
The parameter should have a form of `{frontend_name: {...}}`, with `frontend_name` identifying your type of client, e.g. `pleroma_fe`. It will overwrite everything under this property, but will not overwrite other frontend's settings.
|
The parameter should have a form of `{frontend_name: {...}}`, with `frontend_name` identifying your type of client, e.g. `pleroma_fe`. It will overwrite everything under this property, but will not overwrite other frontend's settings.
|
||||||
|
@ -172,17 +173,20 @@ This information is returned in the `verify_credentials` endpoint.
|
||||||
|
|
||||||
## Authentication
|
## Authentication
|
||||||
|
|
||||||
*Pleroma supports refreshing tokens.
|
*Pleroma supports refreshing tokens.*
|
||||||
|
|
||||||
`POST /oauth/token`
|
`POST /oauth/token`
|
||||||
Post here request with grant_type=refresh_token to obtain new access token. Returns an access token.
|
|
||||||
|
Post here request with `grant_type=refresh_token` to obtain new access token. Returns an access token.
|
||||||
|
|
||||||
## Account Registration
|
## Account Registration
|
||||||
|
|
||||||
`POST /api/v1/accounts`
|
`POST /api/v1/accounts`
|
||||||
|
|
||||||
Has theses additional parameters (which are the same as in Pleroma-API):
|
Has theses additional parameters (which are the same as in Pleroma-API):
|
||||||
* `fullname`: optional
|
|
||||||
* `bio`: optional
|
- `fullname`: optional
|
||||||
* `captcha_solution`: optional, contains provider-specific captcha solution,
|
- `bio`: optional
|
||||||
* `captcha_token`: optional, contains provider-specific captcha token
|
- `captcha_solution`: optional, contains provider-specific captcha solution,
|
||||||
* `token`: invite token required when the registerations aren't public.
|
- `captcha_token`: optional, contains provider-specific captcha token
|
||||||
|
- `token`: invite token required when the registrations aren't public.
|
||||||
|
|
|
@ -431,7 +431,7 @@ The status posting endpoint takes an additional parameter, `in_reply_to_conversa
|
||||||
|
|
||||||
# Emoji Reactions
|
# Emoji Reactions
|
||||||
|
|
||||||
Emoji reactions work a lot like favourites do. They make it possible to react to a post with a single emoji character.
|
Emoji reactions work a lot like favourites do. They make it possible to react to a post with a single emoji character. To detect the presence of this feature, you can check `pleroma_emoji_reactions` entry in the features list of nodeinfo.
|
||||||
|
|
||||||
## `PUT /api/v1/pleroma/statuses/:id/reactions/:emoji`
|
## `PUT /api/v1/pleroma/statuses/:id/reactions/:emoji`
|
||||||
### React to a post with a unicode emoji
|
### React to a post with a unicode emoji
|
||||||
|
|
|
@ -41,6 +41,6 @@ mix pleroma.emoji gen-pack PACK-URL
|
||||||
|
|
||||||
Currently, only .zip archives are recognized as remote pack files and packs are therefore assumed to be zip archives. This command is intended to run interactively and will first ask you some basic questions about the pack, then download the remote file and generate an SHA256 checksum for it, then generate an emoji file list for you.
|
Currently, only .zip archives are recognized as remote pack files and packs are therefore assumed to be zip archives. This command is intended to run interactively and will first ask you some basic questions about the pack, then download the remote file and generate an SHA256 checksum for it, then generate an emoji file list for you.
|
||||||
|
|
||||||
The manifest entry will either be written to a newly created `index.json` file or appended to the existing one, *replacing* the old pack with the same name if it was in the file previously.
|
The manifest entry will either be written to a newly created `pack_name.json` file (pack name is asked in questions) or appended to the existing one, *replacing* the old pack with the same name if it was in the file previously.
|
||||||
|
|
||||||
The file list will be written to the file specified previously, *replacing* that file. You _should_ check that the file list doesn't contain anything you don't need in the pack, that is, anything that is not an emoji (the whole pack is downloaded, but only emoji files are extracted).
|
The file list will be written to the file specified previously, *replacing* that file. You _should_ check that the file list doesn't contain anything you don't need in the pack, that is, anything that is not an emoji (the whole pack is downloaded, but only emoji files are extracted).
|
||||||
|
|
|
@ -369,8 +369,7 @@ Available caches:
|
||||||
* `proxy_url`: an upstream proxy to fetch posts and/or media with, (default: `nil`)
|
* `proxy_url`: an upstream proxy to fetch posts and/or media with, (default: `nil`)
|
||||||
* `send_user_agent`: should we include a user agent with HTTP requests? (default: `true`)
|
* `send_user_agent`: should we include a user agent with HTTP requests? (default: `true`)
|
||||||
* `user_agent`: what user agent should we use? (default: `:default`), must be string or `:default`
|
* `user_agent`: what user agent should we use? (default: `:default`), must be string or `:default`
|
||||||
* `adapter`: array of hackney options
|
* `adapter`: array of adapter options
|
||||||
|
|
||||||
|
|
||||||
### :hackney_pools
|
### :hackney_pools
|
||||||
|
|
||||||
|
@ -389,6 +388,42 @@ For each pool, the options are:
|
||||||
* `timeout` - retention duration for connections
|
* `timeout` - retention duration for connections
|
||||||
|
|
||||||
|
|
||||||
|
### :connections_pool
|
||||||
|
|
||||||
|
*For `gun` adapter*
|
||||||
|
|
||||||
|
Advanced settings for connections pool. Pool with opened connections. These connections can be reused in worker pools.
|
||||||
|
|
||||||
|
For big instances it's recommended to increase `config :pleroma, :connections_pool, max_connections: 500` up to 500-1000.
|
||||||
|
It will increase memory usage, but federation would work faster.
|
||||||
|
|
||||||
|
* `:checkin_timeout` - timeout to checkin connection from pool. Default: 250ms.
|
||||||
|
* `:max_connections` - maximum number of connections in the pool. Default: 250 connections.
|
||||||
|
* `:retry` - number of retries, while `gun` will try to reconnect if connection goes down. Default: 1.
|
||||||
|
* `:retry_timeout` - time between retries when `gun` will try to reconnect in milliseconds. Default: 1000ms.
|
||||||
|
* `:await_up_timeout` - timeout while `gun` will wait until connection is up. Default: 5000ms.
|
||||||
|
|
||||||
|
### :pools
|
||||||
|
|
||||||
|
*For `gun` adapter*
|
||||||
|
|
||||||
|
Advanced settings for workers pools.
|
||||||
|
|
||||||
|
There are four pools used:
|
||||||
|
|
||||||
|
* `:federation` for the federation jobs.
|
||||||
|
You may want this pool max_connections to be at least equal to the number of federator jobs + retry queue jobs.
|
||||||
|
* `:media` for rich media, media proxy
|
||||||
|
* `:upload` for uploaded media (if using a remote uploader and `proxy_remote: true`)
|
||||||
|
* `:default` for other requests
|
||||||
|
|
||||||
|
For each pool, the options are:
|
||||||
|
|
||||||
|
* `:size` - how much workers the pool can hold
|
||||||
|
* `:timeout` - timeout while `gun` will wait for response
|
||||||
|
* `:max_overflow` - additional workers if pool is under load
|
||||||
|
|
||||||
|
|
||||||
## Captcha
|
## Captcha
|
||||||
|
|
||||||
### Pleroma.Captcha
|
### Pleroma.Captcha
|
||||||
|
|
|
@ -5,6 +5,7 @@
|
||||||
defmodule Mix.Pleroma do
|
defmodule Mix.Pleroma do
|
||||||
@doc "Common functions to be reused in mix tasks"
|
@doc "Common functions to be reused in mix tasks"
|
||||||
def start_pleroma do
|
def start_pleroma do
|
||||||
|
Mix.Task.run("app.start")
|
||||||
Application.put_env(:phoenix, :serve_endpoints, false, persistent: true)
|
Application.put_env(:phoenix, :serve_endpoints, false, persistent: true)
|
||||||
|
|
||||||
if Pleroma.Config.get(:env) != :test do
|
if Pleroma.Config.get(:env) != :test do
|
||||||
|
|
|
@ -74,4 +74,43 @@ def run(["render_timeline", nickname | _] = args) do
|
||||||
inputs: inputs
|
inputs: inputs
|
||||||
)
|
)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def run(["adapters"]) do
|
||||||
|
start_pleroma()
|
||||||
|
|
||||||
|
:ok =
|
||||||
|
Pleroma.Gun.Conn.open(
|
||||||
|
"https://httpbin.org/stream-bytes/1500",
|
||||||
|
:gun_connections
|
||||||
|
)
|
||||||
|
|
||||||
|
Process.sleep(1_500)
|
||||||
|
|
||||||
|
Benchee.run(
|
||||||
|
%{
|
||||||
|
"Without conn and without pool" => fn ->
|
||||||
|
{:ok, %Tesla.Env{}} =
|
||||||
|
Pleroma.HTTP.get("https://httpbin.org/stream-bytes/1500", [],
|
||||||
|
adapter: [pool: :no_pool, receive_conn: false]
|
||||||
|
)
|
||||||
|
end,
|
||||||
|
"Without conn and with pool" => fn ->
|
||||||
|
{:ok, %Tesla.Env{}} =
|
||||||
|
Pleroma.HTTP.get("https://httpbin.org/stream-bytes/1500", [],
|
||||||
|
adapter: [receive_conn: false]
|
||||||
|
)
|
||||||
|
end,
|
||||||
|
"With reused conn and without pool" => fn ->
|
||||||
|
{:ok, %Tesla.Env{}} =
|
||||||
|
Pleroma.HTTP.get("https://httpbin.org/stream-bytes/1500", [],
|
||||||
|
adapter: [pool: :no_pool]
|
||||||
|
)
|
||||||
|
end,
|
||||||
|
"With reused conn and with pool" => fn ->
|
||||||
|
{:ok, %Tesla.Env{}} = Pleroma.HTTP.get("https://httpbin.org/stream-bytes/1500")
|
||||||
|
end
|
||||||
|
},
|
||||||
|
parallel: 10
|
||||||
|
)
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -4,18 +4,18 @@
|
||||||
|
|
||||||
defmodule Mix.Tasks.Pleroma.Emoji do
|
defmodule Mix.Tasks.Pleroma.Emoji do
|
||||||
use Mix.Task
|
use Mix.Task
|
||||||
|
import Mix.Pleroma
|
||||||
|
|
||||||
@shortdoc "Manages emoji packs"
|
@shortdoc "Manages emoji packs"
|
||||||
@moduledoc File.read!("docs/administration/CLI_tasks/emoji.md")
|
@moduledoc File.read!("docs/administration/CLI_tasks/emoji.md")
|
||||||
|
|
||||||
def run(["ls-packs" | args]) do
|
def run(["ls-packs" | args]) do
|
||||||
Mix.Pleroma.start_pleroma()
|
start_pleroma()
|
||||||
Application.ensure_all_started(:hackney)
|
|
||||||
|
|
||||||
{options, [], []} = parse_global_opts(args)
|
{options, [], []} = parse_global_opts(args)
|
||||||
|
|
||||||
manifest =
|
url_or_path = options[:manifest] || default_manifest()
|
||||||
fetch_manifest(if options[:manifest], do: options[:manifest], else: default_manifest())
|
manifest = fetch_manifest(url_or_path)
|
||||||
|
|
||||||
Enum.each(manifest, fn {name, info} ->
|
Enum.each(manifest, fn {name, info} ->
|
||||||
to_print = [
|
to_print = [
|
||||||
|
@ -36,14 +36,13 @@ def run(["ls-packs" | args]) do
|
||||||
end
|
end
|
||||||
|
|
||||||
def run(["get-packs" | args]) do
|
def run(["get-packs" | args]) do
|
||||||
Mix.Pleroma.start_pleroma()
|
start_pleroma()
|
||||||
Application.ensure_all_started(:hackney)
|
|
||||||
|
|
||||||
{options, pack_names, []} = parse_global_opts(args)
|
{options, pack_names, []} = parse_global_opts(args)
|
||||||
|
|
||||||
manifest_url = if options[:manifest], do: options[:manifest], else: default_manifest()
|
url_or_path = options[:manifest] || default_manifest()
|
||||||
|
|
||||||
manifest = fetch_manifest(manifest_url)
|
manifest = fetch_manifest(url_or_path)
|
||||||
|
|
||||||
for pack_name <- pack_names do
|
for pack_name <- pack_names do
|
||||||
if Map.has_key?(manifest, pack_name) do
|
if Map.has_key?(manifest, pack_name) do
|
||||||
|
@ -76,7 +75,10 @@ def run(["get-packs" | args]) do
|
||||||
end
|
end
|
||||||
|
|
||||||
# The url specified in files should be in the same directory
|
# The url specified in files should be in the same directory
|
||||||
files_url = Path.join(Path.dirname(manifest_url), pack["files"])
|
files_url =
|
||||||
|
url_or_path
|
||||||
|
|> Path.dirname()
|
||||||
|
|> Path.join(pack["files"])
|
||||||
|
|
||||||
IO.puts(
|
IO.puts(
|
||||||
IO.ANSI.format([
|
IO.ANSI.format([
|
||||||
|
@ -134,38 +136,51 @@ def run(["get-packs" | args]) do
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def run(["gen-pack", src]) do
|
def run(["gen-pack" | args]) do
|
||||||
Application.ensure_all_started(:hackney)
|
start_pleroma()
|
||||||
|
|
||||||
proposed_name = Path.basename(src) |> Path.rootname()
|
{opts, [src], []} =
|
||||||
name = String.trim(IO.gets("Pack name [#{proposed_name}]: "))
|
OptionParser.parse(
|
||||||
# If there's no name, use the default one
|
args,
|
||||||
name = if String.length(name) > 0, do: name, else: proposed_name
|
strict: [
|
||||||
|
name: :string,
|
||||||
license = String.trim(IO.gets("License: "))
|
license: :string,
|
||||||
homepage = String.trim(IO.gets("Homepage: "))
|
homepage: :string,
|
||||||
description = String.trim(IO.gets("Description: "))
|
description: :string,
|
||||||
|
files: :string,
|
||||||
proposed_files_name = "#{name}.json"
|
extensions: :string
|
||||||
files_name = String.trim(IO.gets("Save file list to [#{proposed_files_name}]: "))
|
]
|
||||||
files_name = if String.length(files_name) > 0, do: files_name, else: proposed_files_name
|
|
||||||
|
|
||||||
default_exts = [".png", ".gif"]
|
|
||||||
default_exts_str = Enum.join(default_exts, " ")
|
|
||||||
|
|
||||||
exts =
|
|
||||||
String.trim(
|
|
||||||
IO.gets("Emoji file extensions (separated with spaces) [#{default_exts_str}]: ")
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
proposed_name = Path.basename(src) |> Path.rootname()
|
||||||
|
name = get_option(opts, :name, "Pack name:", proposed_name)
|
||||||
|
license = get_option(opts, :license, "License:")
|
||||||
|
homepage = get_option(opts, :homepage, "Homepage:")
|
||||||
|
description = get_option(opts, :description, "Description:")
|
||||||
|
|
||||||
|
proposed_files_name = "#{name}_files.json"
|
||||||
|
files_name = get_option(opts, :files, "Save file list to:", proposed_files_name)
|
||||||
|
|
||||||
|
default_exts = [".png", ".gif"]
|
||||||
|
|
||||||
|
custom_exts =
|
||||||
|
get_option(
|
||||||
|
opts,
|
||||||
|
:extensions,
|
||||||
|
"Emoji file extensions (separated with spaces):",
|
||||||
|
Enum.join(default_exts, " ")
|
||||||
|
)
|
||||||
|
|> String.split(" ", trim: true)
|
||||||
|
|
||||||
exts =
|
exts =
|
||||||
if String.length(exts) > 0 do
|
if MapSet.equal?(MapSet.new(default_exts), MapSet.new(custom_exts)) do
|
||||||
String.split(exts, " ")
|
|
||||||
|> Enum.filter(fn e -> e |> String.trim() |> String.length() > 0 end)
|
|
||||||
else
|
|
||||||
default_exts
|
default_exts
|
||||||
|
else
|
||||||
|
custom_exts
|
||||||
end
|
end
|
||||||
|
|
||||||
|
IO.puts("Using #{Enum.join(exts, " ")} extensions")
|
||||||
|
|
||||||
IO.puts("Downloading the pack and generating SHA256")
|
IO.puts("Downloading the pack and generating SHA256")
|
||||||
|
|
||||||
binary_archive = Tesla.get!(client(), src).body
|
binary_archive = Tesla.get!(client(), src).body
|
||||||
|
@ -195,14 +210,16 @@ def run(["gen-pack", src]) do
|
||||||
IO.puts("""
|
IO.puts("""
|
||||||
|
|
||||||
#{files_name} has been created and contains the list of all found emojis in the pack.
|
#{files_name} has been created and contains the list of all found emojis in the pack.
|
||||||
Please review the files in the remove those not needed.
|
Please review the files in the pack and remove those not needed.
|
||||||
""")
|
""")
|
||||||
|
|
||||||
if File.exists?("index.json") do
|
pack_file = "#{name}.json"
|
||||||
existing_data = File.read!("index.json") |> Jason.decode!()
|
|
||||||
|
if File.exists?(pack_file) do
|
||||||
|
existing_data = File.read!(pack_file) |> Jason.decode!()
|
||||||
|
|
||||||
File.write!(
|
File.write!(
|
||||||
"index.json",
|
pack_file,
|
||||||
Jason.encode!(
|
Jason.encode!(
|
||||||
Map.merge(
|
Map.merge(
|
||||||
existing_data,
|
existing_data,
|
||||||
|
@ -212,11 +229,11 @@ def run(["gen-pack", src]) do
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
IO.puts("index.json file has been update with the #{name} pack")
|
IO.puts("#{pack_file} has been updated with the #{name} pack")
|
||||||
else
|
else
|
||||||
File.write!("index.json", Jason.encode!(pack_json, pretty: true))
|
File.write!(pack_file, Jason.encode!(pack_json, pretty: true))
|
||||||
|
|
||||||
IO.puts("index.json has been created with the #{name} pack")
|
IO.puts("#{pack_file} has been created with the #{name} pack")
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
|
|
@ -3,8 +3,12 @@
|
||||||
# SPDX-License-Identifier: AGPL-3.0-only
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
defmodule Pleroma.Application do
|
defmodule Pleroma.Application do
|
||||||
import Cachex.Spec
|
|
||||||
use Application
|
use Application
|
||||||
|
|
||||||
|
import Cachex.Spec
|
||||||
|
|
||||||
|
alias Pleroma.Config
|
||||||
|
|
||||||
require Logger
|
require Logger
|
||||||
|
|
||||||
@name Mix.Project.config()[:name]
|
@name Mix.Project.config()[:name]
|
||||||
|
@ -18,9 +22,9 @@ def named_version, do: @name <> " " <> @version
|
||||||
def repository, do: @repository
|
def repository, do: @repository
|
||||||
|
|
||||||
def user_agent do
|
def user_agent do
|
||||||
case Pleroma.Config.get([:http, :user_agent], :default) do
|
case Config.get([:http, :user_agent], :default) do
|
||||||
:default ->
|
:default ->
|
||||||
info = "#{Pleroma.Web.base_url()} <#{Pleroma.Config.get([:instance, :email], "")}>"
|
info = "#{Pleroma.Web.base_url()} <#{Config.get([:instance, :email], "")}>"
|
||||||
named_version() <> "; " <> info
|
named_version() <> "; " <> info
|
||||||
|
|
||||||
custom ->
|
custom ->
|
||||||
|
@ -33,27 +37,51 @@ def user_agent do
|
||||||
def start(_type, _args) do
|
def start(_type, _args) do
|
||||||
Pleroma.Config.Holder.save_default()
|
Pleroma.Config.Holder.save_default()
|
||||||
Pleroma.HTML.compile_scrubbers()
|
Pleroma.HTML.compile_scrubbers()
|
||||||
Pleroma.Config.DeprecationWarnings.warn()
|
Config.DeprecationWarnings.warn()
|
||||||
Pleroma.Plugs.HTTPSecurityPlug.warn_if_disabled()
|
Pleroma.Plugs.HTTPSecurityPlug.warn_if_disabled()
|
||||||
Pleroma.Repo.check_migrations_applied!()
|
Pleroma.Repo.check_migrations_applied!()
|
||||||
setup_instrumenters()
|
setup_instrumenters()
|
||||||
load_custom_modules()
|
load_custom_modules()
|
||||||
|
|
||||||
|
adapter = Application.get_env(:tesla, :adapter)
|
||||||
|
|
||||||
|
if adapter == Tesla.Adapter.Gun do
|
||||||
|
if version = Pleroma.OTPVersion.version() do
|
||||||
|
[major, minor] =
|
||||||
|
version
|
||||||
|
|> String.split(".")
|
||||||
|
|> Enum.map(&String.to_integer/1)
|
||||||
|
|> Enum.take(2)
|
||||||
|
|
||||||
|
if (major == 22 and minor < 2) or major < 22 do
|
||||||
|
raise "
|
||||||
|
!!!OTP VERSION WARNING!!!
|
||||||
|
You are using gun adapter with OTP version #{version}, which doesn't support correct handling of unordered certificates chains.
|
||||||
|
"
|
||||||
|
end
|
||||||
|
else
|
||||||
|
raise "
|
||||||
|
!!!OTP VERSION WARNING!!!
|
||||||
|
To support correct handling of unordered certificates chains - OTP version must be > 22.2.
|
||||||
|
"
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
# Define workers and child supervisors to be supervised
|
# Define workers and child supervisors to be supervised
|
||||||
children =
|
children =
|
||||||
[
|
[
|
||||||
Pleroma.Repo,
|
Pleroma.Repo,
|
||||||
Pleroma.Config.TransferTask,
|
Config.TransferTask,
|
||||||
Pleroma.Emoji,
|
Pleroma.Emoji,
|
||||||
Pleroma.Captcha,
|
Pleroma.Captcha,
|
||||||
Pleroma.Plugs.RateLimiter.Supervisor
|
Pleroma.Plugs.RateLimiter.Supervisor
|
||||||
] ++
|
] ++
|
||||||
cachex_children() ++
|
cachex_children() ++
|
||||||
hackney_pool_children() ++
|
http_children(adapter, @env) ++
|
||||||
[
|
[
|
||||||
Pleroma.Stats,
|
Pleroma.Stats,
|
||||||
Pleroma.JobQueueMonitor,
|
Pleroma.JobQueueMonitor,
|
||||||
{Oban, Pleroma.Config.get(Oban)}
|
{Oban, Config.get(Oban)}
|
||||||
] ++
|
] ++
|
||||||
task_children(@env) ++
|
task_children(@env) ++
|
||||||
streamer_child(@env) ++
|
streamer_child(@env) ++
|
||||||
|
@ -70,7 +98,7 @@ def start(_type, _args) do
|
||||||
end
|
end
|
||||||
|
|
||||||
def load_custom_modules do
|
def load_custom_modules do
|
||||||
dir = Pleroma.Config.get([:modules, :runtime_dir])
|
dir = Config.get([:modules, :runtime_dir])
|
||||||
|
|
||||||
if dir && File.exists?(dir) do
|
if dir && File.exists?(dir) do
|
||||||
dir
|
dir
|
||||||
|
@ -111,20 +139,6 @@ defp setup_instrumenters do
|
||||||
Pleroma.Web.Endpoint.Instrumenter.setup()
|
Pleroma.Web.Endpoint.Instrumenter.setup()
|
||||||
end
|
end
|
||||||
|
|
||||||
def enabled_hackney_pools do
|
|
||||||
[:media] ++
|
|
||||||
if Application.get_env(:tesla, :adapter) == Tesla.Adapter.Hackney do
|
|
||||||
[:federation]
|
|
||||||
else
|
|
||||||
[]
|
|
||||||
end ++
|
|
||||||
if Pleroma.Config.get([Pleroma.Upload, :proxy_remote]) do
|
|
||||||
[:upload]
|
|
||||||
else
|
|
||||||
[]
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
defp cachex_children do
|
defp cachex_children do
|
||||||
[
|
[
|
||||||
build_cachex("used_captcha", ttl_interval: seconds_valid_interval()),
|
build_cachex("used_captcha", ttl_interval: seconds_valid_interval()),
|
||||||
|
@ -146,7 +160,7 @@ defp idempotency_expiration,
|
||||||
do: expiration(default: :timer.seconds(6 * 60 * 60), interval: :timer.seconds(60))
|
do: expiration(default: :timer.seconds(6 * 60 * 60), interval: :timer.seconds(60))
|
||||||
|
|
||||||
defp seconds_valid_interval,
|
defp seconds_valid_interval,
|
||||||
do: :timer.seconds(Pleroma.Config.get!([Pleroma.Captcha, :seconds_valid]))
|
do: :timer.seconds(Config.get!([Pleroma.Captcha, :seconds_valid]))
|
||||||
|
|
||||||
defp build_cachex(type, opts),
|
defp build_cachex(type, opts),
|
||||||
do: %{
|
do: %{
|
||||||
|
@ -155,9 +169,9 @@ defp build_cachex(type, opts),
|
||||||
type: :worker
|
type: :worker
|
||||||
}
|
}
|
||||||
|
|
||||||
defp chat_enabled?, do: Pleroma.Config.get([:chat, :enabled])
|
defp chat_enabled?, do: Config.get([:chat, :enabled])
|
||||||
|
|
||||||
defp streamer_child(:test), do: []
|
defp streamer_child(env) when env in [:test, :benchmark], do: []
|
||||||
|
|
||||||
defp streamer_child(_) do
|
defp streamer_child(_) do
|
||||||
[Pleroma.Web.Streamer.supervisor()]
|
[Pleroma.Web.Streamer.supervisor()]
|
||||||
|
@ -169,13 +183,6 @@ defp chat_child(_env, true) do
|
||||||
|
|
||||||
defp chat_child(_, _), do: []
|
defp chat_child(_, _), do: []
|
||||||
|
|
||||||
defp hackney_pool_children do
|
|
||||||
for pool <- enabled_hackney_pools() do
|
|
||||||
options = Pleroma.Config.get([:hackney_pools, pool])
|
|
||||||
:hackney_pool.child_spec(pool, options)
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
defp task_children(:test) do
|
defp task_children(:test) do
|
||||||
[
|
[
|
||||||
%{
|
%{
|
||||||
|
@ -200,4 +207,31 @@ defp task_children(_) do
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
end
|
end
|
||||||
|
|
||||||
|
# start hackney and gun pools in tests
|
||||||
|
defp http_children(_, :test) do
|
||||||
|
hackney_options = Config.get([:hackney_pools, :federation])
|
||||||
|
hackney_pool = :hackney_pool.child_spec(:federation, hackney_options)
|
||||||
|
[hackney_pool, Pleroma.Pool.Supervisor]
|
||||||
|
end
|
||||||
|
|
||||||
|
defp http_children(Tesla.Adapter.Hackney, _) do
|
||||||
|
pools = [:federation, :media]
|
||||||
|
|
||||||
|
pools =
|
||||||
|
if Config.get([Pleroma.Upload, :proxy_remote]) do
|
||||||
|
[:upload | pools]
|
||||||
|
else
|
||||||
|
pools
|
||||||
|
end
|
||||||
|
|
||||||
|
for pool <- pools do
|
||||||
|
options = Config.get([:hackney_pools, pool])
|
||||||
|
:hackney_pool.child_spec(pool, options)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
defp http_children(Tesla.Adapter.Gun, _), do: [Pleroma.Pool.Supervisor]
|
||||||
|
|
||||||
|
defp http_children(_, _), do: []
|
||||||
end
|
end
|
||||||
|
|
|
@ -278,8 +278,6 @@ defp do_convert({:proxy_url, {type, host, port}}) do
|
||||||
}
|
}
|
||||||
end
|
end
|
||||||
|
|
||||||
defp do_convert({:partial_chain, entity}), do: %{"tuple" => [":partial_chain", inspect(entity)]}
|
|
||||||
|
|
||||||
defp do_convert(entity) when is_tuple(entity) do
|
defp do_convert(entity) when is_tuple(entity) do
|
||||||
value =
|
value =
|
||||||
entity
|
entity
|
||||||
|
@ -323,15 +321,6 @@ defp do_transform(%{"tuple" => [":proxy_url", %{"tuple" => [type, host, port]}]}
|
||||||
{:proxy_url, {do_transform_string(type), parse_host(host), port}}
|
{:proxy_url, {do_transform_string(type), parse_host(host), port}}
|
||||||
end
|
end
|
||||||
|
|
||||||
defp do_transform(%{"tuple" => [":partial_chain", entity]}) do
|
|
||||||
{partial_chain, []} =
|
|
||||||
entity
|
|
||||||
|> String.replace(~r/[^\w|^{:,[|^,|^[|^\]^}|^\/|^\.|^"]^\s/, "")
|
|
||||||
|> Code.eval_string()
|
|
||||||
|
|
||||||
{:partial_chain, partial_chain}
|
|
||||||
end
|
|
||||||
|
|
||||||
defp do_transform(%{"tuple" => entity}) do
|
defp do_transform(%{"tuple" => entity}) do
|
||||||
Enum.reduce(entity, {}, fn val, acc -> Tuple.append(acc, do_transform(val)) end)
|
Enum.reduce(entity, {}, fn val, acc -> Tuple.append(acc, do_transform(val)) end)
|
||||||
end
|
end
|
||||||
|
|
|
@ -5,6 +5,7 @@
|
||||||
defmodule Pleroma.Config.TransferTask do
|
defmodule Pleroma.Config.TransferTask do
|
||||||
use Task
|
use Task
|
||||||
|
|
||||||
|
alias Pleroma.Config
|
||||||
alias Pleroma.ConfigDB
|
alias Pleroma.ConfigDB
|
||||||
alias Pleroma.Repo
|
alias Pleroma.Repo
|
||||||
|
|
||||||
|
@ -18,7 +19,9 @@ defmodule Pleroma.Config.TransferTask do
|
||||||
{:pleroma, Oban},
|
{:pleroma, Oban},
|
||||||
{:pleroma, :rate_limit},
|
{:pleroma, :rate_limit},
|
||||||
{:pleroma, :markup},
|
{:pleroma, :markup},
|
||||||
{:plerome, :streamer}
|
{:pleroma, :streamer},
|
||||||
|
{:pleroma, :pools},
|
||||||
|
{:pleroma, :connections_pool}
|
||||||
]
|
]
|
||||||
|
|
||||||
@reboot_time_subkeys [
|
@reboot_time_subkeys [
|
||||||
|
@ -32,45 +35,33 @@ defmodule Pleroma.Config.TransferTask do
|
||||||
{:pleroma, :gopher, [:enabled]}
|
{:pleroma, :gopher, [:enabled]}
|
||||||
]
|
]
|
||||||
|
|
||||||
@reject [nil, :prometheus]
|
|
||||||
|
|
||||||
def start_link(_) do
|
def start_link(_) do
|
||||||
load_and_update_env()
|
load_and_update_env()
|
||||||
if Pleroma.Config.get(:env) == :test, do: Ecto.Adapters.SQL.Sandbox.checkin(Repo)
|
if Config.get(:env) == :test, do: Ecto.Adapters.SQL.Sandbox.checkin(Repo)
|
||||||
:ignore
|
:ignore
|
||||||
end
|
end
|
||||||
|
|
||||||
@spec load_and_update_env([ConfigDB.t()]) :: :ok | false
|
@spec load_and_update_env([ConfigDB.t()], boolean()) :: :ok
|
||||||
def load_and_update_env(deleted \\ [], restart_pleroma? \\ true) do
|
def load_and_update_env(deleted_settings \\ [], restart_pleroma? \\ true) do
|
||||||
with {:configurable, true} <-
|
with {_, true} <- {:configurable, Config.get(:configurable_from_database)} do
|
||||||
{:configurable, Pleroma.Config.get(:configurable_from_database)},
|
|
||||||
true <- Ecto.Adapters.SQL.table_exists?(Repo, "config"),
|
|
||||||
started_applications <- Application.started_applications() do
|
|
||||||
# We need to restart applications for loaded settings take effect
|
# We need to restart applications for loaded settings take effect
|
||||||
|
|
||||||
in_db = Repo.all(ConfigDB)
|
|
||||||
|
|
||||||
with_deleted = in_db ++ deleted
|
|
||||||
|
|
||||||
reject_for_restart = if restart_pleroma?, do: @reject, else: [:pleroma | @reject]
|
|
||||||
|
|
||||||
applications =
|
|
||||||
with_deleted
|
|
||||||
|> Enum.map(&merge_and_update(&1))
|
|
||||||
|> Enum.uniq()
|
|
||||||
# TODO: some problem with prometheus after restart!
|
# TODO: some problem with prometheus after restart!
|
||||||
|> Enum.reject(&(&1 in reject_for_restart))
|
reject_restart =
|
||||||
|
if restart_pleroma? do
|
||||||
# to be ensured that pleroma will be restarted last
|
[nil, :prometheus]
|
||||||
applications =
|
|
||||||
if :pleroma in applications do
|
|
||||||
List.delete(applications, :pleroma) ++ [:pleroma]
|
|
||||||
else
|
else
|
||||||
Restarter.Pleroma.rebooted()
|
[:pleroma, nil, :prometheus]
|
||||||
applications
|
|
||||||
end
|
end
|
||||||
|
|
||||||
Enum.each(applications, &restart(started_applications, &1, Pleroma.Config.get(:env)))
|
started_applications = Application.started_applications()
|
||||||
|
|
||||||
|
(Repo.all(ConfigDB) ++ deleted_settings)
|
||||||
|
|> Enum.map(&merge_and_update/1)
|
||||||
|
|> Enum.uniq()
|
||||||
|
|> Enum.reject(&(&1 in reject_restart))
|
||||||
|
|> maybe_set_pleroma_last()
|
||||||
|
|> Enum.each(&restart(started_applications, &1, Config.get(:env)))
|
||||||
|
|
||||||
:ok
|
:ok
|
||||||
else
|
else
|
||||||
|
@ -78,32 +69,19 @@ def load_and_update_env(deleted \\ [], restart_pleroma? \\ true) do
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
defp merge_and_update(setting) do
|
defp maybe_set_pleroma_last(apps) do
|
||||||
try do
|
# to be ensured that pleroma will be restarted last
|
||||||
key = ConfigDB.from_string(setting.key)
|
if :pleroma in apps do
|
||||||
group = ConfigDB.from_string(setting.group)
|
apps
|
||||||
|
|> List.delete(:pleroma)
|
||||||
default = Pleroma.Config.Holder.default_config(group, key)
|
|> List.insert_at(-1, :pleroma)
|
||||||
value = ConfigDB.from_binary(setting.value)
|
|
||||||
|
|
||||||
merged_value =
|
|
||||||
if Ecto.get_meta(setting, :state) == :deleted do
|
|
||||||
default
|
|
||||||
else
|
else
|
||||||
if can_be_merged?(default, value) do
|
Restarter.Pleroma.rebooted()
|
||||||
ConfigDB.merge_group(group, key, default, value)
|
apps
|
||||||
else
|
|
||||||
value
|
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
:ok = update_env(group, key, merged_value)
|
defp group_for_restart(:logger, key, _, merged_value) do
|
||||||
|
|
||||||
if group != :logger do
|
|
||||||
if group != :pleroma or pleroma_need_restart?(group, key, value) do
|
|
||||||
group
|
|
||||||
end
|
|
||||||
else
|
|
||||||
# change logger configuration in runtime, without restart
|
# change logger configuration in runtime, without restart
|
||||||
if Keyword.keyword?(merged_value) and
|
if Keyword.keyword?(merged_value) and
|
||||||
key not in [:compile_time_application, :backends, :compile_time_purge_matching] do
|
key not in [:compile_time_application, :backends, :compile_time_purge_matching] do
|
||||||
|
@ -114,6 +92,31 @@ defp merge_and_update(setting) do
|
||||||
|
|
||||||
nil
|
nil
|
||||||
end
|
end
|
||||||
|
|
||||||
|
defp group_for_restart(group, _, _, _) when group != :pleroma, do: group
|
||||||
|
|
||||||
|
defp group_for_restart(group, key, value, _) do
|
||||||
|
if pleroma_need_restart?(group, key, value), do: group
|
||||||
|
end
|
||||||
|
|
||||||
|
defp merge_and_update(setting) do
|
||||||
|
try do
|
||||||
|
key = ConfigDB.from_string(setting.key)
|
||||||
|
group = ConfigDB.from_string(setting.group)
|
||||||
|
|
||||||
|
default = Config.Holder.default_config(group, key)
|
||||||
|
value = ConfigDB.from_binary(setting.value)
|
||||||
|
|
||||||
|
merged_value =
|
||||||
|
cond do
|
||||||
|
Ecto.get_meta(setting, :state) == :deleted -> default
|
||||||
|
can_be_merged?(default, value) -> ConfigDB.merge_group(group, key, default, value)
|
||||||
|
true -> value
|
||||||
|
end
|
||||||
|
|
||||||
|
:ok = update_env(group, key, merged_value)
|
||||||
|
|
||||||
|
group_for_restart(group, key, value, merged_value)
|
||||||
rescue
|
rescue
|
||||||
error ->
|
error ->
|
||||||
error_msg =
|
error_msg =
|
||||||
|
|
45
lib/pleroma/gun/api.ex
Normal file
45
lib/pleroma/gun/api.ex
Normal file
|
@ -0,0 +1,45 @@
|
||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
defmodule Pleroma.Gun.API do
|
||||||
|
@behaviour Pleroma.Gun
|
||||||
|
|
||||||
|
alias Pleroma.Gun
|
||||||
|
|
||||||
|
@gun_keys [
|
||||||
|
:connect_timeout,
|
||||||
|
:http_opts,
|
||||||
|
:http2_opts,
|
||||||
|
:protocols,
|
||||||
|
:retry,
|
||||||
|
:retry_timeout,
|
||||||
|
:trace,
|
||||||
|
:transport,
|
||||||
|
:tls_opts,
|
||||||
|
:tcp_opts,
|
||||||
|
:socks_opts,
|
||||||
|
:ws_opts
|
||||||
|
]
|
||||||
|
|
||||||
|
@impl Gun
|
||||||
|
def open(host, port, opts \\ %{}), do: :gun.open(host, port, Map.take(opts, @gun_keys))
|
||||||
|
|
||||||
|
@impl Gun
|
||||||
|
defdelegate info(pid), to: :gun
|
||||||
|
|
||||||
|
@impl Gun
|
||||||
|
defdelegate close(pid), to: :gun
|
||||||
|
|
||||||
|
@impl Gun
|
||||||
|
defdelegate await_up(pid, timeout \\ 5_000), to: :gun
|
||||||
|
|
||||||
|
@impl Gun
|
||||||
|
defdelegate connect(pid, opts), to: :gun
|
||||||
|
|
||||||
|
@impl Gun
|
||||||
|
defdelegate await(pid, ref), to: :gun
|
||||||
|
|
||||||
|
@impl Gun
|
||||||
|
defdelegate set_owner(pid, owner), to: :gun
|
||||||
|
end
|
196
lib/pleroma/gun/conn.ex
Normal file
196
lib/pleroma/gun/conn.ex
Normal file
|
@ -0,0 +1,196 @@
|
||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
defmodule Pleroma.Gun.Conn do
|
||||||
|
@moduledoc """
|
||||||
|
Struct for gun connection data
|
||||||
|
"""
|
||||||
|
alias Pleroma.Gun
|
||||||
|
alias Pleroma.Pool.Connections
|
||||||
|
|
||||||
|
require Logger
|
||||||
|
|
||||||
|
@type gun_state :: :up | :down
|
||||||
|
@type conn_state :: :active | :idle
|
||||||
|
|
||||||
|
@type t :: %__MODULE__{
|
||||||
|
conn: pid(),
|
||||||
|
gun_state: gun_state(),
|
||||||
|
conn_state: conn_state(),
|
||||||
|
used_by: [pid()],
|
||||||
|
last_reference: pos_integer(),
|
||||||
|
crf: float(),
|
||||||
|
retries: pos_integer()
|
||||||
|
}
|
||||||
|
|
||||||
|
defstruct conn: nil,
|
||||||
|
gun_state: :open,
|
||||||
|
conn_state: :init,
|
||||||
|
used_by: [],
|
||||||
|
last_reference: 0,
|
||||||
|
crf: 1,
|
||||||
|
retries: 0
|
||||||
|
|
||||||
|
@spec open(String.t() | URI.t(), atom(), keyword()) :: :ok | nil
|
||||||
|
def open(url, name, opts \\ [])
|
||||||
|
def open(url, name, opts) when is_binary(url), do: open(URI.parse(url), name, opts)
|
||||||
|
|
||||||
|
def open(%URI{} = uri, name, opts) do
|
||||||
|
pool_opts = Pleroma.Config.get([:connections_pool], [])
|
||||||
|
|
||||||
|
opts =
|
||||||
|
opts
|
||||||
|
|> Enum.into(%{})
|
||||||
|
|> Map.put_new(:retry, pool_opts[:retry] || 1)
|
||||||
|
|> Map.put_new(:retry_timeout, pool_opts[:retry_timeout] || 1000)
|
||||||
|
|> Map.put_new(:await_up_timeout, pool_opts[:await_up_timeout] || 5_000)
|
||||||
|
|> maybe_add_tls_opts(uri)
|
||||||
|
|
||||||
|
key = "#{uri.scheme}:#{uri.host}:#{uri.port}"
|
||||||
|
|
||||||
|
conn_pid =
|
||||||
|
if Connections.count(name) < opts[:max_connection] do
|
||||||
|
do_open(uri, opts)
|
||||||
|
else
|
||||||
|
close_least_used_and_do_open(name, uri, opts)
|
||||||
|
end
|
||||||
|
|
||||||
|
if is_pid(conn_pid) do
|
||||||
|
conn = %Pleroma.Gun.Conn{
|
||||||
|
conn: conn_pid,
|
||||||
|
gun_state: :up,
|
||||||
|
conn_state: :active,
|
||||||
|
last_reference: :os.system_time(:second)
|
||||||
|
}
|
||||||
|
|
||||||
|
:ok = Gun.set_owner(conn_pid, Process.whereis(name))
|
||||||
|
Connections.add_conn(name, key, conn)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
defp maybe_add_tls_opts(opts, %URI{scheme: "http"}), do: opts
|
||||||
|
|
||||||
|
defp maybe_add_tls_opts(opts, %URI{scheme: "https", host: host}) do
|
||||||
|
tls_opts = [
|
||||||
|
verify: :verify_peer,
|
||||||
|
cacertfile: CAStore.file_path(),
|
||||||
|
depth: 20,
|
||||||
|
reuse_sessions: false,
|
||||||
|
verify_fun:
|
||||||
|
{&:ssl_verify_hostname.verify_fun/3,
|
||||||
|
[check_hostname: Pleroma.HTTP.Connection.format_host(host)]}
|
||||||
|
]
|
||||||
|
|
||||||
|
tls_opts =
|
||||||
|
if Keyword.keyword?(opts[:tls_opts]) do
|
||||||
|
Keyword.merge(tls_opts, opts[:tls_opts])
|
||||||
|
else
|
||||||
|
tls_opts
|
||||||
|
end
|
||||||
|
|
||||||
|
Map.put(opts, :tls_opts, tls_opts)
|
||||||
|
end
|
||||||
|
|
||||||
|
defp do_open(uri, %{proxy: {proxy_host, proxy_port}} = opts) do
|
||||||
|
connect_opts =
|
||||||
|
uri
|
||||||
|
|> destination_opts()
|
||||||
|
|> add_http2_opts(uri.scheme, Map.get(opts, :tls_opts, []))
|
||||||
|
|
||||||
|
with open_opts <- Map.delete(opts, :tls_opts),
|
||||||
|
{:ok, conn} <- Gun.open(proxy_host, proxy_port, open_opts),
|
||||||
|
{:ok, _} <- Gun.await_up(conn, opts[:await_up_timeout]),
|
||||||
|
stream <- Gun.connect(conn, connect_opts),
|
||||||
|
{:response, :fin, 200, _} <- Gun.await(conn, stream) do
|
||||||
|
conn
|
||||||
|
else
|
||||||
|
error ->
|
||||||
|
Logger.warn(
|
||||||
|
"Opening proxied connection to #{compose_uri_log(uri)} failed with error #{
|
||||||
|
inspect(error)
|
||||||
|
}"
|
||||||
|
)
|
||||||
|
|
||||||
|
error
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
defp do_open(uri, %{proxy: {proxy_type, proxy_host, proxy_port}} = opts) do
|
||||||
|
version =
|
||||||
|
proxy_type
|
||||||
|
|> to_string()
|
||||||
|
|> String.last()
|
||||||
|
|> case do
|
||||||
|
"4" -> 4
|
||||||
|
_ -> 5
|
||||||
|
end
|
||||||
|
|
||||||
|
socks_opts =
|
||||||
|
uri
|
||||||
|
|> destination_opts()
|
||||||
|
|> add_http2_opts(uri.scheme, Map.get(opts, :tls_opts, []))
|
||||||
|
|> Map.put(:version, version)
|
||||||
|
|
||||||
|
opts =
|
||||||
|
opts
|
||||||
|
|> Map.put(:protocols, [:socks])
|
||||||
|
|> Map.put(:socks_opts, socks_opts)
|
||||||
|
|
||||||
|
with {:ok, conn} <- Gun.open(proxy_host, proxy_port, opts),
|
||||||
|
{:ok, _} <- Gun.await_up(conn, opts[:await_up_timeout]) do
|
||||||
|
conn
|
||||||
|
else
|
||||||
|
error ->
|
||||||
|
Logger.warn(
|
||||||
|
"Opening socks proxied connection to #{compose_uri_log(uri)} failed with error #{
|
||||||
|
inspect(error)
|
||||||
|
}"
|
||||||
|
)
|
||||||
|
|
||||||
|
error
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
defp do_open(%URI{host: host, port: port} = uri, opts) do
|
||||||
|
host = Pleroma.HTTP.Connection.parse_host(host)
|
||||||
|
|
||||||
|
with {:ok, conn} <- Gun.open(host, port, opts),
|
||||||
|
{:ok, _} <- Gun.await_up(conn, opts[:await_up_timeout]) do
|
||||||
|
conn
|
||||||
|
else
|
||||||
|
error ->
|
||||||
|
Logger.warn(
|
||||||
|
"Opening connection to #{compose_uri_log(uri)} failed with error #{inspect(error)}"
|
||||||
|
)
|
||||||
|
|
||||||
|
error
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
defp destination_opts(%URI{host: host, port: port}) do
|
||||||
|
host = Pleroma.HTTP.Connection.parse_host(host)
|
||||||
|
%{host: host, port: port}
|
||||||
|
end
|
||||||
|
|
||||||
|
defp add_http2_opts(opts, "https", tls_opts) do
|
||||||
|
Map.merge(opts, %{protocols: [:http2], transport: :tls, tls_opts: tls_opts})
|
||||||
|
end
|
||||||
|
|
||||||
|
defp add_http2_opts(opts, _, _), do: opts
|
||||||
|
|
||||||
|
defp close_least_used_and_do_open(name, uri, opts) do
|
||||||
|
with [{key, conn} | _conns] <- Connections.get_unused_conns(name),
|
||||||
|
:ok <- Gun.close(conn.conn) do
|
||||||
|
Connections.remove_conn(name, key)
|
||||||
|
|
||||||
|
do_open(uri, opts)
|
||||||
|
else
|
||||||
|
[] -> {:error, :pool_overflowed}
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def compose_uri_log(%URI{scheme: scheme, host: host, path: path}) do
|
||||||
|
"#{scheme}://#{host}#{path}"
|
||||||
|
end
|
||||||
|
end
|
31
lib/pleroma/gun/gun.ex
Normal file
31
lib/pleroma/gun/gun.ex
Normal file
|
@ -0,0 +1,31 @@
|
||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
defmodule Pleroma.Gun do
|
||||||
|
@callback open(charlist(), pos_integer(), map()) :: {:ok, pid()}
|
||||||
|
@callback info(pid()) :: map()
|
||||||
|
@callback close(pid()) :: :ok
|
||||||
|
@callback await_up(pid, pos_integer()) :: {:ok, atom()} | {:error, atom()}
|
||||||
|
@callback connect(pid(), map()) :: reference()
|
||||||
|
@callback await(pid(), reference()) :: {:response, :fin, 200, []}
|
||||||
|
@callback set_owner(pid(), pid()) :: :ok
|
||||||
|
|
||||||
|
@api Pleroma.Config.get([Pleroma.Gun], Pleroma.Gun.API)
|
||||||
|
|
||||||
|
defp api, do: @api
|
||||||
|
|
||||||
|
def open(host, port, opts), do: api().open(host, port, opts)
|
||||||
|
|
||||||
|
def info(pid), do: api().info(pid)
|
||||||
|
|
||||||
|
def close(pid), do: api().close(pid)
|
||||||
|
|
||||||
|
def await_up(pid, timeout \\ 5_000), do: api().await_up(pid, timeout)
|
||||||
|
|
||||||
|
def connect(pid, opts), do: api().connect(pid, opts)
|
||||||
|
|
||||||
|
def await(pid, ref), do: api().await(pid, ref)
|
||||||
|
|
||||||
|
def set_owner(pid, owner), do: api().set_owner(pid, owner)
|
||||||
|
end
|
41
lib/pleroma/http/adapter_helper.ex
Normal file
41
lib/pleroma/http/adapter_helper.ex
Normal file
|
@ -0,0 +1,41 @@
|
||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
defmodule Pleroma.HTTP.AdapterHelper do
|
||||||
|
alias Pleroma.HTTP.Connection
|
||||||
|
|
||||||
|
@type proxy ::
|
||||||
|
{Connection.host(), pos_integer()}
|
||||||
|
| {Connection.proxy_type(), Connection.host(), pos_integer()}
|
||||||
|
|
||||||
|
@callback options(keyword(), URI.t()) :: keyword()
|
||||||
|
@callback after_request(keyword()) :: :ok
|
||||||
|
|
||||||
|
@spec options(keyword(), URI.t()) :: keyword()
|
||||||
|
def options(opts, _uri) do
|
||||||
|
proxy = Pleroma.Config.get([:http, :proxy_url], nil)
|
||||||
|
maybe_add_proxy(opts, format_proxy(proxy))
|
||||||
|
end
|
||||||
|
|
||||||
|
@spec maybe_get_conn(URI.t(), keyword()) :: keyword()
|
||||||
|
def maybe_get_conn(_uri, opts), do: opts
|
||||||
|
|
||||||
|
@spec after_request(keyword()) :: :ok
|
||||||
|
def after_request(_opts), do: :ok
|
||||||
|
|
||||||
|
@spec format_proxy(String.t() | tuple() | nil) :: proxy() | nil
|
||||||
|
def format_proxy(nil), do: nil
|
||||||
|
|
||||||
|
def format_proxy(proxy_url) do
|
||||||
|
case Connection.parse_proxy(proxy_url) do
|
||||||
|
{:ok, host, port} -> {host, port}
|
||||||
|
{:ok, type, host, port} -> {type, host, port}
|
||||||
|
_ -> nil
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
@spec maybe_add_proxy(keyword(), proxy() | nil) :: keyword()
|
||||||
|
def maybe_add_proxy(opts, nil), do: opts
|
||||||
|
def maybe_add_proxy(opts, proxy), do: Keyword.put_new(opts, :proxy, proxy)
|
||||||
|
end
|
77
lib/pleroma/http/adapter_helper/gun.ex
Normal file
77
lib/pleroma/http/adapter_helper/gun.ex
Normal file
|
@ -0,0 +1,77 @@
|
||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
defmodule Pleroma.HTTP.AdapterHelper.Gun do
|
||||||
|
@behaviour Pleroma.HTTP.AdapterHelper
|
||||||
|
|
||||||
|
alias Pleroma.HTTP.AdapterHelper
|
||||||
|
alias Pleroma.Pool.Connections
|
||||||
|
|
||||||
|
require Logger
|
||||||
|
|
||||||
|
@defaults [
|
||||||
|
connect_timeout: 5_000,
|
||||||
|
domain_lookup_timeout: 5_000,
|
||||||
|
tls_handshake_timeout: 5_000,
|
||||||
|
retry: 1,
|
||||||
|
retry_timeout: 1000,
|
||||||
|
await_up_timeout: 5_000
|
||||||
|
]
|
||||||
|
|
||||||
|
@spec options(keyword(), URI.t()) :: keyword()
|
||||||
|
def options(incoming_opts \\ [], %URI{} = uri) do
|
||||||
|
proxy =
|
||||||
|
Pleroma.Config.get([:http, :proxy_url])
|
||||||
|
|> AdapterHelper.format_proxy()
|
||||||
|
|
||||||
|
config_opts = Pleroma.Config.get([:http, :adapter], [])
|
||||||
|
|
||||||
|
@defaults
|
||||||
|
|> Keyword.merge(config_opts)
|
||||||
|
|> add_scheme_opts(uri)
|
||||||
|
|> AdapterHelper.maybe_add_proxy(proxy)
|
||||||
|
|> maybe_get_conn(uri, incoming_opts)
|
||||||
|
end
|
||||||
|
|
||||||
|
@spec after_request(keyword()) :: :ok
|
||||||
|
def after_request(opts) do
|
||||||
|
if opts[:conn] && opts[:body_as] != :chunks do
|
||||||
|
Connections.checkout(opts[:conn], self(), :gun_connections)
|
||||||
|
end
|
||||||
|
|
||||||
|
:ok
|
||||||
|
end
|
||||||
|
|
||||||
|
defp add_scheme_opts(opts, %{scheme: "http"}), do: opts
|
||||||
|
|
||||||
|
defp add_scheme_opts(opts, %{scheme: "https"}) do
|
||||||
|
opts
|
||||||
|
|> Keyword.put(:certificates_verification, true)
|
||||||
|
|> Keyword.put(:tls_opts, log_level: :warning)
|
||||||
|
end
|
||||||
|
|
||||||
|
defp maybe_get_conn(adapter_opts, uri, incoming_opts) do
|
||||||
|
{receive_conn?, opts} =
|
||||||
|
adapter_opts
|
||||||
|
|> Keyword.merge(incoming_opts)
|
||||||
|
|> Keyword.pop(:receive_conn, true)
|
||||||
|
|
||||||
|
if Connections.alive?(:gun_connections) and receive_conn? do
|
||||||
|
checkin_conn(uri, opts)
|
||||||
|
else
|
||||||
|
opts
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
defp checkin_conn(uri, opts) do
|
||||||
|
case Connections.checkin(uri, :gun_connections) do
|
||||||
|
nil ->
|
||||||
|
Task.start(Pleroma.Gun.Conn, :open, [uri, :gun_connections, opts])
|
||||||
|
opts
|
||||||
|
|
||||||
|
conn when is_pid(conn) ->
|
||||||
|
Keyword.merge(opts, conn: conn, close_conn: false)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
43
lib/pleroma/http/adapter_helper/hackney.ex
Normal file
43
lib/pleroma/http/adapter_helper/hackney.ex
Normal file
|
@ -0,0 +1,43 @@
|
||||||
|
defmodule Pleroma.HTTP.AdapterHelper.Hackney do
|
||||||
|
@behaviour Pleroma.HTTP.AdapterHelper
|
||||||
|
|
||||||
|
@defaults [
|
||||||
|
connect_timeout: 10_000,
|
||||||
|
recv_timeout: 20_000,
|
||||||
|
follow_redirect: true,
|
||||||
|
force_redirect: true,
|
||||||
|
pool: :federation
|
||||||
|
]
|
||||||
|
|
||||||
|
@spec options(keyword(), URI.t()) :: keyword()
|
||||||
|
def options(connection_opts \\ [], %URI{} = uri) do
|
||||||
|
proxy = Pleroma.Config.get([:http, :proxy_url])
|
||||||
|
|
||||||
|
config_opts = Pleroma.Config.get([:http, :adapter], [])
|
||||||
|
|
||||||
|
@defaults
|
||||||
|
|> Keyword.merge(config_opts)
|
||||||
|
|> Keyword.merge(connection_opts)
|
||||||
|
|> add_scheme_opts(uri)
|
||||||
|
|> Pleroma.HTTP.AdapterHelper.maybe_add_proxy(proxy)
|
||||||
|
end
|
||||||
|
|
||||||
|
defp add_scheme_opts(opts, %URI{scheme: "http"}), do: opts
|
||||||
|
|
||||||
|
defp add_scheme_opts(opts, %URI{scheme: "https", host: host}) do
|
||||||
|
ssl_opts = [
|
||||||
|
ssl_options: [
|
||||||
|
# Workaround for remote server certificate chain issues
|
||||||
|
partial_chain: &:hackney_connect.partial_chain/1,
|
||||||
|
|
||||||
|
# We don't support TLS v1.3 yet
|
||||||
|
versions: [:tlsv1, :"tlsv1.1", :"tlsv1.2"],
|
||||||
|
server_name_indication: to_charlist(host)
|
||||||
|
]
|
||||||
|
]
|
||||||
|
|
||||||
|
Keyword.merge(opts, ssl_opts)
|
||||||
|
end
|
||||||
|
|
||||||
|
def after_request(_), do: :ok
|
||||||
|
end
|
|
@ -4,40 +4,121 @@
|
||||||
|
|
||||||
defmodule Pleroma.HTTP.Connection do
|
defmodule Pleroma.HTTP.Connection do
|
||||||
@moduledoc """
|
@moduledoc """
|
||||||
Connection for http-requests.
|
Configure Tesla.Client with default and customized adapter options.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@hackney_options [
|
alias Pleroma.Config
|
||||||
connect_timeout: 10_000,
|
alias Pleroma.HTTP.AdapterHelper
|
||||||
recv_timeout: 20_000,
|
|
||||||
follow_redirect: true,
|
require Logger
|
||||||
force_redirect: true,
|
|
||||||
pool: :federation
|
@defaults [pool: :federation]
|
||||||
]
|
|
||||||
@adapter Application.get_env(:tesla, :adapter)
|
@type ip_address :: ipv4_address() | ipv6_address()
|
||||||
|
@type ipv4_address :: {0..255, 0..255, 0..255, 0..255}
|
||||||
|
@type ipv6_address ::
|
||||||
|
{0..65_535, 0..65_535, 0..65_535, 0..65_535, 0..65_535, 0..65_535, 0..65_535, 0..65_535}
|
||||||
|
@type proxy_type() :: :socks4 | :socks5
|
||||||
|
@type host() :: charlist() | ip_address()
|
||||||
|
|
||||||
@doc """
|
@doc """
|
||||||
Configure a client connection
|
Merge default connection & adapter options with received ones.
|
||||||
|
|
||||||
# Returns
|
|
||||||
|
|
||||||
Tesla.Env.client
|
|
||||||
"""
|
"""
|
||||||
@spec new(Keyword.t()) :: Tesla.Env.client()
|
|
||||||
def new(opts \\ []) do
|
@spec options(URI.t(), keyword()) :: keyword()
|
||||||
Tesla.client([], {@adapter, hackney_options(opts)})
|
def options(%URI{} = uri, opts \\ []) do
|
||||||
|
@defaults
|
||||||
|
|> pool_timeout()
|
||||||
|
|> Keyword.merge(opts)
|
||||||
|
|> adapter_helper().options(uri)
|
||||||
end
|
end
|
||||||
|
|
||||||
# fetch Hackney options
|
defp pool_timeout(opts) do
|
||||||
#
|
{config_key, default} =
|
||||||
def hackney_options(opts) do
|
if adapter() == Tesla.Adapter.Gun do
|
||||||
options = Keyword.get(opts, :adapter, [])
|
{:pools, Config.get([:pools, :default, :timeout])}
|
||||||
adapter_options = Pleroma.Config.get([:http, :adapter], [])
|
else
|
||||||
proxy_url = Pleroma.Config.get([:http, :proxy_url], nil)
|
{:hackney_pools, 10_000}
|
||||||
|
end
|
||||||
|
|
||||||
@hackney_options
|
timeout = Config.get([config_key, opts[:pool], :timeout], default)
|
||||||
|> Keyword.merge(adapter_options)
|
|
||||||
|> Keyword.merge(options)
|
Keyword.merge(opts, timeout: timeout)
|
||||||
|> Keyword.merge(proxy: proxy_url)
|
end
|
||||||
|
|
||||||
|
@spec after_request(keyword()) :: :ok
|
||||||
|
def after_request(opts), do: adapter_helper().after_request(opts)
|
||||||
|
|
||||||
|
defp adapter, do: Application.get_env(:tesla, :adapter)
|
||||||
|
|
||||||
|
defp adapter_helper do
|
||||||
|
case adapter() do
|
||||||
|
Tesla.Adapter.Gun -> AdapterHelper.Gun
|
||||||
|
Tesla.Adapter.Hackney -> AdapterHelper.Hackney
|
||||||
|
_ -> AdapterHelper
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
@spec parse_proxy(String.t() | tuple() | nil) ::
|
||||||
|
{:ok, host(), pos_integer()}
|
||||||
|
| {:ok, proxy_type(), host(), pos_integer()}
|
||||||
|
| {:error, atom()}
|
||||||
|
| nil
|
||||||
|
|
||||||
|
def parse_proxy(nil), do: nil
|
||||||
|
|
||||||
|
def parse_proxy(proxy) when is_binary(proxy) do
|
||||||
|
with [host, port] <- String.split(proxy, ":"),
|
||||||
|
{port, ""} <- Integer.parse(port) do
|
||||||
|
{:ok, parse_host(host), port}
|
||||||
|
else
|
||||||
|
{_, _} ->
|
||||||
|
Logger.warn("Parsing port failed #{inspect(proxy)}")
|
||||||
|
{:error, :invalid_proxy_port}
|
||||||
|
|
||||||
|
:error ->
|
||||||
|
Logger.warn("Parsing port failed #{inspect(proxy)}")
|
||||||
|
{:error, :invalid_proxy_port}
|
||||||
|
|
||||||
|
_ ->
|
||||||
|
Logger.warn("Parsing proxy failed #{inspect(proxy)}")
|
||||||
|
{:error, :invalid_proxy}
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def parse_proxy(proxy) when is_tuple(proxy) do
|
||||||
|
with {type, host, port} <- proxy do
|
||||||
|
{:ok, type, parse_host(host), port}
|
||||||
|
else
|
||||||
|
_ ->
|
||||||
|
Logger.warn("Parsing proxy failed #{inspect(proxy)}")
|
||||||
|
{:error, :invalid_proxy}
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
@spec parse_host(String.t() | atom() | charlist()) :: charlist() | ip_address()
|
||||||
|
def parse_host(host) when is_list(host), do: host
|
||||||
|
def parse_host(host) when is_atom(host), do: to_charlist(host)
|
||||||
|
|
||||||
|
def parse_host(host) when is_binary(host) do
|
||||||
|
host = to_charlist(host)
|
||||||
|
|
||||||
|
case :inet.parse_address(host) do
|
||||||
|
{:error, :einval} -> host
|
||||||
|
{:ok, ip} -> ip
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
@spec format_host(String.t()) :: charlist()
|
||||||
|
def format_host(host) do
|
||||||
|
host_charlist = to_charlist(host)
|
||||||
|
|
||||||
|
case :inet.parse_address(host_charlist) do
|
||||||
|
{:error, :einval} ->
|
||||||
|
:idna.encode(host_charlist)
|
||||||
|
|
||||||
|
{:ok, _ip} ->
|
||||||
|
host_charlist
|
||||||
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -4,21 +4,47 @@
|
||||||
|
|
||||||
defmodule Pleroma.HTTP do
|
defmodule Pleroma.HTTP do
|
||||||
@moduledoc """
|
@moduledoc """
|
||||||
|
Wrapper for `Tesla.request/2`.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
alias Pleroma.HTTP.Connection
|
alias Pleroma.HTTP.Connection
|
||||||
|
alias Pleroma.HTTP.Request
|
||||||
alias Pleroma.HTTP.RequestBuilder, as: Builder
|
alias Pleroma.HTTP.RequestBuilder, as: Builder
|
||||||
|
alias Tesla.Client
|
||||||
|
alias Tesla.Env
|
||||||
|
|
||||||
|
require Logger
|
||||||
|
|
||||||
@type t :: __MODULE__
|
@type t :: __MODULE__
|
||||||
|
|
||||||
@doc """
|
@doc """
|
||||||
Builds and perform http request.
|
Performs GET request.
|
||||||
|
|
||||||
|
See `Pleroma.HTTP.request/5`
|
||||||
|
"""
|
||||||
|
@spec get(Request.url() | nil, Request.headers(), keyword()) ::
|
||||||
|
nil | {:ok, Env.t()} | {:error, any()}
|
||||||
|
def get(url, headers \\ [], options \\ [])
|
||||||
|
def get(nil, _, _), do: nil
|
||||||
|
def get(url, headers, options), do: request(:get, url, "", headers, options)
|
||||||
|
|
||||||
|
@doc """
|
||||||
|
Performs POST request.
|
||||||
|
|
||||||
|
See `Pleroma.HTTP.request/5`
|
||||||
|
"""
|
||||||
|
@spec post(Request.url(), String.t(), Request.headers(), keyword()) ::
|
||||||
|
{:ok, Env.t()} | {:error, any()}
|
||||||
|
def post(url, body, headers \\ [], options \\ []),
|
||||||
|
do: request(:post, url, body, headers, options)
|
||||||
|
|
||||||
|
@doc """
|
||||||
|
Builds and performs http request.
|
||||||
|
|
||||||
# Arguments:
|
# Arguments:
|
||||||
`method` - :get, :post, :put, :delete
|
`method` - :get, :post, :put, :delete
|
||||||
`url`
|
`url` - full url
|
||||||
`body`
|
`body` - request body
|
||||||
`headers` - a keyworld list of headers, e.g. `[{"content-type", "text/plain"}]`
|
`headers` - a keyworld list of headers, e.g. `[{"content-type", "text/plain"}]`
|
||||||
`options` - custom, per-request middleware or adapter options
|
`options` - custom, per-request middleware or adapter options
|
||||||
|
|
||||||
|
@ -26,61 +52,66 @@ defmodule Pleroma.HTTP do
|
||||||
`{:ok, %Tesla.Env{}}` or `{:error, error}`
|
`{:ok, %Tesla.Env{}}` or `{:error, error}`
|
||||||
|
|
||||||
"""
|
"""
|
||||||
def request(method, url, body \\ "", headers \\ [], options \\ []) do
|
@spec request(atom(), Request.url(), String.t(), Request.headers(), keyword()) ::
|
||||||
try do
|
{:ok, Env.t()} | {:error, any()}
|
||||||
options =
|
def request(method, url, body, headers, options) when is_binary(url) do
|
||||||
process_request_options(options)
|
uri = URI.parse(url)
|
||||||
|> process_sni_options(url)
|
adapter_opts = Connection.options(uri, options[:adapter] || [])
|
||||||
|
options = put_in(options[:adapter], adapter_opts)
|
||||||
|
params = options[:params] || []
|
||||||
|
request = build_request(method, headers, options, url, body, params)
|
||||||
|
|
||||||
params = Keyword.get(options, :params, [])
|
adapter = Application.get_env(:tesla, :adapter)
|
||||||
|
client = Tesla.client([Tesla.Middleware.FollowRedirects], adapter)
|
||||||
|
|
||||||
%{}
|
pid = Process.whereis(adapter_opts[:pool])
|
||||||
|
|
||||||
|
pool_alive? =
|
||||||
|
if adapter == Tesla.Adapter.Gun && pid do
|
||||||
|
Process.alive?(pid)
|
||||||
|
else
|
||||||
|
false
|
||||||
|
end
|
||||||
|
|
||||||
|
request_opts =
|
||||||
|
adapter_opts
|
||||||
|
|> Enum.into(%{})
|
||||||
|
|> Map.put(:env, Pleroma.Config.get([:env]))
|
||||||
|
|> Map.put(:pool_alive?, pool_alive?)
|
||||||
|
|
||||||
|
response = request(client, request, request_opts)
|
||||||
|
|
||||||
|
Connection.after_request(adapter_opts)
|
||||||
|
|
||||||
|
response
|
||||||
|
end
|
||||||
|
|
||||||
|
@spec request(Client.t(), keyword(), map()) :: {:ok, Env.t()} | {:error, any()}
|
||||||
|
def request(%Client{} = client, request, %{env: :test}), do: request(client, request)
|
||||||
|
|
||||||
|
def request(%Client{} = client, request, %{body_as: :chunks}), do: request(client, request)
|
||||||
|
|
||||||
|
def request(%Client{} = client, request, %{pool_alive?: false}), do: request(client, request)
|
||||||
|
|
||||||
|
def request(%Client{} = client, request, %{pool: pool, timeout: timeout}) do
|
||||||
|
:poolboy.transaction(
|
||||||
|
pool,
|
||||||
|
&Pleroma.Pool.Request.execute(&1, client, request, timeout),
|
||||||
|
timeout
|
||||||
|
)
|
||||||
|
end
|
||||||
|
|
||||||
|
@spec request(Client.t(), keyword()) :: {:ok, Env.t()} | {:error, any()}
|
||||||
|
def request(client, request), do: Tesla.request(client, request)
|
||||||
|
|
||||||
|
defp build_request(method, headers, options, url, body, params) do
|
||||||
|
Builder.new()
|
||||||
|> Builder.method(method)
|
|> Builder.method(method)
|
||||||
|> Builder.headers(headers)
|
|> Builder.headers(headers)
|
||||||
|> Builder.opts(options)
|
|> Builder.opts(options)
|
||||||
|> Builder.url(url)
|
|> Builder.url(url)
|
||||||
|> Builder.add_param(:body, :body, body)
|
|> Builder.add_param(:body, :body, body)
|
||||||
|> Builder.add_param(:query, :query, params)
|
|> Builder.add_param(:query, :query, params)
|
||||||
|> Enum.into([])
|
|> Builder.convert_to_keyword()
|
||||||
|> (&Tesla.request(Connection.new(options), &1)).()
|
|
||||||
rescue
|
|
||||||
e ->
|
|
||||||
{:error, e}
|
|
||||||
catch
|
|
||||||
:exit, e ->
|
|
||||||
{:error, e}
|
|
||||||
end
|
end
|
||||||
end
|
|
||||||
|
|
||||||
defp process_sni_options(options, nil), do: options
|
|
||||||
|
|
||||||
defp process_sni_options(options, url) do
|
|
||||||
uri = URI.parse(url)
|
|
||||||
host = uri.host |> to_charlist()
|
|
||||||
|
|
||||||
case uri.scheme do
|
|
||||||
"https" -> options ++ [ssl: [server_name_indication: host]]
|
|
||||||
_ -> options
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
def process_request_options(options) do
|
|
||||||
Keyword.merge(Pleroma.HTTP.Connection.hackney_options([]), options)
|
|
||||||
end
|
|
||||||
|
|
||||||
@doc """
|
|
||||||
Performs GET request.
|
|
||||||
|
|
||||||
See `Pleroma.HTTP.request/5`
|
|
||||||
"""
|
|
||||||
def get(url, headers \\ [], options \\ []),
|
|
||||||
do: request(:get, url, "", headers, options)
|
|
||||||
|
|
||||||
@doc """
|
|
||||||
Performs POST request.
|
|
||||||
|
|
||||||
See `Pleroma.HTTP.request/5`
|
|
||||||
"""
|
|
||||||
def post(url, body, headers \\ [], options \\ []),
|
|
||||||
do: request(:post, url, body, headers, options)
|
|
||||||
end
|
end
|
||||||
|
|
23
lib/pleroma/http/request.ex
Normal file
23
lib/pleroma/http/request.ex
Normal file
|
@ -0,0 +1,23 @@
|
||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
defmodule Pleroma.HTTP.Request do
|
||||||
|
@moduledoc """
|
||||||
|
Request struct.
|
||||||
|
"""
|
||||||
|
defstruct method: :get, url: "", query: [], headers: [], body: "", opts: []
|
||||||
|
|
||||||
|
@type method :: :head | :get | :delete | :trace | :options | :post | :put | :patch
|
||||||
|
@type url :: String.t()
|
||||||
|
@type headers :: [{String.t(), String.t()}]
|
||||||
|
|
||||||
|
@type t :: %__MODULE__{
|
||||||
|
method: method(),
|
||||||
|
url: url(),
|
||||||
|
query: keyword(),
|
||||||
|
headers: headers(),
|
||||||
|
body: String.t(),
|
||||||
|
opts: keyword()
|
||||||
|
}
|
||||||
|
end
|
|
@ -7,136 +7,87 @@ defmodule Pleroma.HTTP.RequestBuilder do
|
||||||
Helper functions for building Tesla requests
|
Helper functions for building Tesla requests
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
alias Pleroma.HTTP.Request
|
||||||
|
alias Tesla.Multipart
|
||||||
|
|
||||||
@doc """
|
@doc """
|
||||||
Specify the request method when building a request
|
Creates new request
|
||||||
|
|
||||||
## Parameters
|
|
||||||
|
|
||||||
- request (Map) - Collected request options
|
|
||||||
- m (atom) - Request method
|
|
||||||
|
|
||||||
## Returns
|
|
||||||
|
|
||||||
Map
|
|
||||||
"""
|
"""
|
||||||
@spec method(map(), atom) :: map()
|
@spec new(Request.t()) :: Request.t()
|
||||||
def method(request, m) do
|
def new(%Request{} = request \\ %Request{}), do: request
|
||||||
Map.put_new(request, :method, m)
|
|
||||||
end
|
|
||||||
|
|
||||||
@doc """
|
@doc """
|
||||||
Specify the request method when building a request
|
Specify the request method when building a request
|
||||||
|
|
||||||
## Parameters
|
|
||||||
|
|
||||||
- request (Map) - Collected request options
|
|
||||||
- u (String) - Request URL
|
|
||||||
|
|
||||||
## Returns
|
|
||||||
|
|
||||||
Map
|
|
||||||
"""
|
"""
|
||||||
@spec url(map(), String.t()) :: map()
|
@spec method(Request.t(), Request.method()) :: Request.t()
|
||||||
def url(request, u) do
|
def method(request, m), do: %{request | method: m}
|
||||||
Map.put_new(request, :url, u)
|
|
||||||
end
|
@doc """
|
||||||
|
Specify the request method when building a request
|
||||||
|
"""
|
||||||
|
@spec url(Request.t(), Request.url()) :: Request.t()
|
||||||
|
def url(request, u), do: %{request | url: u}
|
||||||
|
|
||||||
@doc """
|
@doc """
|
||||||
Add headers to the request
|
Add headers to the request
|
||||||
"""
|
"""
|
||||||
@spec headers(map(), list(tuple)) :: map()
|
@spec headers(Request.t(), Request.headers()) :: Request.t()
|
||||||
def headers(request, header_list) do
|
def headers(request, headers) do
|
||||||
header_list =
|
headers_list =
|
||||||
if Pleroma.Config.get([:http, :send_user_agent]) do
|
if Pleroma.Config.get([:http, :send_user_agent]) do
|
||||||
header_list ++ [{"User-Agent", Pleroma.Application.user_agent()}]
|
[{"user-agent", Pleroma.Application.user_agent()} | headers]
|
||||||
else
|
else
|
||||||
header_list
|
headers
|
||||||
end
|
end
|
||||||
|
|
||||||
Map.put_new(request, :headers, header_list)
|
%{request | headers: headers_list}
|
||||||
end
|
end
|
||||||
|
|
||||||
@doc """
|
@doc """
|
||||||
Add custom, per-request middleware or adapter options to the request
|
Add custom, per-request middleware or adapter options to the request
|
||||||
"""
|
"""
|
||||||
@spec opts(map(), Keyword.t()) :: map()
|
@spec opts(Request.t(), keyword()) :: Request.t()
|
||||||
def opts(request, options) do
|
def opts(request, options), do: %{request | opts: options}
|
||||||
Map.put_new(request, :opts, options)
|
|
||||||
end
|
|
||||||
|
|
||||||
@doc """
|
@doc """
|
||||||
Add optional parameters to the request
|
Add optional parameters to the request
|
||||||
|
|
||||||
## Parameters
|
|
||||||
|
|
||||||
- request (Map) - Collected request options
|
|
||||||
- definitions (Map) - Map of parameter name to parameter location.
|
|
||||||
- options (KeywordList) - The provided optional parameters
|
|
||||||
|
|
||||||
## Returns
|
|
||||||
|
|
||||||
Map
|
|
||||||
"""
|
"""
|
||||||
@spec add_optional_params(map(), %{optional(atom) => atom}, keyword()) :: map()
|
@spec add_param(Request.t(), atom(), atom(), any()) :: Request.t()
|
||||||
def add_optional_params(request, _, []), do: request
|
def add_param(request, :query, :query, values), do: %{request | query: values}
|
||||||
|
|
||||||
def add_optional_params(request, definitions, [{key, value} | tail]) do
|
def add_param(request, :body, :body, value), do: %{request | body: value}
|
||||||
case definitions do
|
|
||||||
%{^key => location} ->
|
|
||||||
request
|
|
||||||
|> add_param(location, key, value)
|
|
||||||
|> add_optional_params(definitions, tail)
|
|
||||||
|
|
||||||
_ ->
|
|
||||||
add_optional_params(request, definitions, tail)
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
@doc """
|
|
||||||
Add optional parameters to the request
|
|
||||||
|
|
||||||
## Parameters
|
|
||||||
|
|
||||||
- request (Map) - Collected request options
|
|
||||||
- location (atom) - Where to put the parameter
|
|
||||||
- key (atom) - The name of the parameter
|
|
||||||
- value (any) - The value of the parameter
|
|
||||||
|
|
||||||
## Returns
|
|
||||||
|
|
||||||
Map
|
|
||||||
"""
|
|
||||||
@spec add_param(map(), atom, atom, any()) :: map()
|
|
||||||
def add_param(request, :query, :query, values), do: Map.put(request, :query, values)
|
|
||||||
|
|
||||||
def add_param(request, :body, :body, value), do: Map.put(request, :body, value)
|
|
||||||
|
|
||||||
def add_param(request, :body, key, value) do
|
def add_param(request, :body, key, value) do
|
||||||
request
|
request
|
||||||
|> Map.put_new_lazy(:body, &Tesla.Multipart.new/0)
|
|> Map.put(:body, Multipart.new())
|
||||||
|> Map.update!(
|
|> Map.update!(
|
||||||
:body,
|
:body,
|
||||||
&Tesla.Multipart.add_field(
|
&Multipart.add_field(
|
||||||
&1,
|
&1,
|
||||||
key,
|
key,
|
||||||
Jason.encode!(value),
|
Jason.encode!(value),
|
||||||
headers: [{:"Content-Type", "application/json"}]
|
headers: [{"content-type", "application/json"}]
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
end
|
end
|
||||||
|
|
||||||
def add_param(request, :file, name, path) do
|
def add_param(request, :file, name, path) do
|
||||||
request
|
request
|
||||||
|> Map.put_new_lazy(:body, &Tesla.Multipart.new/0)
|
|> Map.put(:body, Multipart.new())
|
||||||
|> Map.update!(:body, &Tesla.Multipart.add_file(&1, path, name: name))
|
|> Map.update!(:body, &Multipart.add_file(&1, path, name: name))
|
||||||
end
|
end
|
||||||
|
|
||||||
def add_param(request, :form, name, value) do
|
def add_param(request, :form, name, value) do
|
||||||
request
|
Map.update(request, :body, %{name => value}, &Map.put(&1, name, value))
|
||||||
|> Map.update(:body, %{name => value}, &Map.put(&1, name, value))
|
|
||||||
end
|
end
|
||||||
|
|
||||||
def add_param(request, location, key, value) do
|
def add_param(request, location, key, value) do
|
||||||
Map.update(request, location, [{key, value}], &(&1 ++ [{key, value}]))
|
Map.update(request, location, [{key, value}], &(&1 ++ [{key, value}]))
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def convert_to_keyword(request) do
|
||||||
|
request
|
||||||
|
|> Map.from_struct()
|
||||||
|
|> Enum.into([])
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -32,6 +32,18 @@ def get_actor(%{"actor" => nil, "attributedTo" => actor}) when not is_nil(actor)
|
||||||
get_actor(%{"actor" => actor})
|
get_actor(%{"actor" => actor})
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def get_object(%{"object" => id}) when is_binary(id) do
|
||||||
|
id
|
||||||
|
end
|
||||||
|
|
||||||
|
def get_object(%{"object" => %{"id" => id}}) when is_binary(id) do
|
||||||
|
id
|
||||||
|
end
|
||||||
|
|
||||||
|
def get_object(_) do
|
||||||
|
nil
|
||||||
|
end
|
||||||
|
|
||||||
# TODO: We explicitly allow 'tag' URIs through, due to references to legacy OStatus
|
# TODO: We explicitly allow 'tag' URIs through, due to references to legacy OStatus
|
||||||
# objects being present in the test suite environment. Once these objects are
|
# objects being present in the test suite environment. Once these objects are
|
||||||
# removed, please also remove this.
|
# removed, please also remove this.
|
||||||
|
|
|
@ -141,7 +141,7 @@ defp make_signature(id, date) do
|
||||||
date: date
|
date: date
|
||||||
})
|
})
|
||||||
|
|
||||||
[{:Signature, signature}]
|
[{"signature", signature}]
|
||||||
end
|
end
|
||||||
|
|
||||||
defp sign_fetch(headers, id, date) do
|
defp sign_fetch(headers, id, date) do
|
||||||
|
@ -154,7 +154,7 @@ defp sign_fetch(headers, id, date) do
|
||||||
|
|
||||||
defp maybe_date_fetch(headers, date) do
|
defp maybe_date_fetch(headers, date) do
|
||||||
if Pleroma.Config.get([:activitypub, :sign_object_fetches]) do
|
if Pleroma.Config.get([:activitypub, :sign_object_fetches]) do
|
||||||
headers ++ [{:Date, date}]
|
headers ++ [{"date", date}]
|
||||||
else
|
else
|
||||||
headers
|
headers
|
||||||
end
|
end
|
||||||
|
@ -166,7 +166,7 @@ def fetch_and_contain_remote_object_from_id(id) when is_binary(id) do
|
||||||
date = Pleroma.Signature.signed_date()
|
date = Pleroma.Signature.signed_date()
|
||||||
|
|
||||||
headers =
|
headers =
|
||||||
[{:Accept, "application/activity+json"}]
|
[{"accept", "application/activity+json"}]
|
||||||
|> maybe_date_fetch(date)
|
|> maybe_date_fetch(date)
|
||||||
|> sign_fetch(id, date)
|
|> sign_fetch(id, date)
|
||||||
|
|
||||||
|
|
28
lib/pleroma/otp_version.ex
Normal file
28
lib/pleroma/otp_version.ex
Normal file
|
@ -0,0 +1,28 @@
|
||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
defmodule Pleroma.OTPVersion do
|
||||||
|
@spec version() :: String.t() | nil
|
||||||
|
def version do
|
||||||
|
# OTP Version https://erlang.org/doc/system_principles/versions.html#otp-version
|
||||||
|
[
|
||||||
|
Path.join(:code.root_dir(), "OTP_VERSION"),
|
||||||
|
Path.join([:code.root_dir(), "releases", :erlang.system_info(:otp_release), "OTP_VERSION"])
|
||||||
|
]
|
||||||
|
|> get_version_from_files()
|
||||||
|
end
|
||||||
|
|
||||||
|
@spec get_version_from_files([Path.t()]) :: String.t() | nil
|
||||||
|
def get_version_from_files([]), do: nil
|
||||||
|
|
||||||
|
def get_version_from_files([path | paths]) do
|
||||||
|
if File.exists?(path) do
|
||||||
|
path
|
||||||
|
|> File.read!()
|
||||||
|
|> String.replace(~r/\r|\n|\s/, "")
|
||||||
|
else
|
||||||
|
get_version_from_files(paths)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
283
lib/pleroma/pool/connections.ex
Normal file
283
lib/pleroma/pool/connections.ex
Normal file
|
@ -0,0 +1,283 @@
|
||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
defmodule Pleroma.Pool.Connections do
|
||||||
|
use GenServer
|
||||||
|
|
||||||
|
alias Pleroma.Config
|
||||||
|
alias Pleroma.Gun
|
||||||
|
|
||||||
|
require Logger
|
||||||
|
|
||||||
|
@type domain :: String.t()
|
||||||
|
@type conn :: Pleroma.Gun.Conn.t()
|
||||||
|
|
||||||
|
@type t :: %__MODULE__{
|
||||||
|
conns: %{domain() => conn()},
|
||||||
|
opts: keyword()
|
||||||
|
}
|
||||||
|
|
||||||
|
defstruct conns: %{}, opts: []
|
||||||
|
|
||||||
|
@spec start_link({atom(), keyword()}) :: {:ok, pid()}
|
||||||
|
def start_link({name, opts}) do
|
||||||
|
GenServer.start_link(__MODULE__, opts, name: name)
|
||||||
|
end
|
||||||
|
|
||||||
|
@impl true
|
||||||
|
def init(opts), do: {:ok, %__MODULE__{conns: %{}, opts: opts}}
|
||||||
|
|
||||||
|
@spec checkin(String.t() | URI.t(), atom()) :: pid() | nil
|
||||||
|
def checkin(url, name)
|
||||||
|
def checkin(url, name) when is_binary(url), do: checkin(URI.parse(url), name)
|
||||||
|
|
||||||
|
def checkin(%URI{} = uri, name) do
|
||||||
|
timeout = Config.get([:connections_pool, :checkin_timeout], 250)
|
||||||
|
|
||||||
|
GenServer.call(name, {:checkin, uri}, timeout)
|
||||||
|
end
|
||||||
|
|
||||||
|
@spec alive?(atom()) :: boolean()
|
||||||
|
def alive?(name) do
|
||||||
|
if pid = Process.whereis(name) do
|
||||||
|
Process.alive?(pid)
|
||||||
|
else
|
||||||
|
false
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
@spec get_state(atom()) :: t()
|
||||||
|
def get_state(name) do
|
||||||
|
GenServer.call(name, :state)
|
||||||
|
end
|
||||||
|
|
||||||
|
@spec count(atom()) :: pos_integer()
|
||||||
|
def count(name) do
|
||||||
|
GenServer.call(name, :count)
|
||||||
|
end
|
||||||
|
|
||||||
|
@spec get_unused_conns(atom()) :: [{domain(), conn()}]
|
||||||
|
def get_unused_conns(name) do
|
||||||
|
GenServer.call(name, :unused_conns)
|
||||||
|
end
|
||||||
|
|
||||||
|
@spec checkout(pid(), pid(), atom()) :: :ok
|
||||||
|
def checkout(conn, pid, name) do
|
||||||
|
GenServer.cast(name, {:checkout, conn, pid})
|
||||||
|
end
|
||||||
|
|
||||||
|
@spec add_conn(atom(), String.t(), Pleroma.Gun.Conn.t()) :: :ok
|
||||||
|
def add_conn(name, key, conn) do
|
||||||
|
GenServer.cast(name, {:add_conn, key, conn})
|
||||||
|
end
|
||||||
|
|
||||||
|
@spec remove_conn(atom(), String.t()) :: :ok
|
||||||
|
def remove_conn(name, key) do
|
||||||
|
GenServer.cast(name, {:remove_conn, key})
|
||||||
|
end
|
||||||
|
|
||||||
|
@impl true
|
||||||
|
def handle_cast({:add_conn, key, conn}, state) do
|
||||||
|
state = put_in(state.conns[key], conn)
|
||||||
|
|
||||||
|
Process.monitor(conn.conn)
|
||||||
|
{:noreply, state}
|
||||||
|
end
|
||||||
|
|
||||||
|
@impl true
|
||||||
|
def handle_cast({:checkout, conn_pid, pid}, state) do
|
||||||
|
state =
|
||||||
|
with true <- Process.alive?(conn_pid),
|
||||||
|
{key, conn} <- find_conn(state.conns, conn_pid),
|
||||||
|
used_by <- List.keydelete(conn.used_by, pid, 0) do
|
||||||
|
conn_state = if used_by == [], do: :idle, else: conn.conn_state
|
||||||
|
|
||||||
|
put_in(state.conns[key], %{conn | conn_state: conn_state, used_by: used_by})
|
||||||
|
else
|
||||||
|
false ->
|
||||||
|
Logger.debug("checkout for closed conn #{inspect(conn_pid)}")
|
||||||
|
state
|
||||||
|
|
||||||
|
nil ->
|
||||||
|
Logger.debug("checkout for alive conn #{inspect(conn_pid)}, but is not in state")
|
||||||
|
state
|
||||||
|
end
|
||||||
|
|
||||||
|
{:noreply, state}
|
||||||
|
end
|
||||||
|
|
||||||
|
@impl true
|
||||||
|
def handle_cast({:remove_conn, key}, state) do
|
||||||
|
state = put_in(state.conns, Map.delete(state.conns, key))
|
||||||
|
{:noreply, state}
|
||||||
|
end
|
||||||
|
|
||||||
|
@impl true
|
||||||
|
def handle_call({:checkin, uri}, from, state) do
|
||||||
|
key = "#{uri.scheme}:#{uri.host}:#{uri.port}"
|
||||||
|
|
||||||
|
case state.conns[key] do
|
||||||
|
%{conn: pid, gun_state: :up} = conn ->
|
||||||
|
time = :os.system_time(:second)
|
||||||
|
last_reference = time - conn.last_reference
|
||||||
|
crf = crf(last_reference, 100, conn.crf)
|
||||||
|
|
||||||
|
state =
|
||||||
|
put_in(state.conns[key], %{
|
||||||
|
conn
|
||||||
|
| last_reference: time,
|
||||||
|
crf: crf,
|
||||||
|
conn_state: :active,
|
||||||
|
used_by: [from | conn.used_by]
|
||||||
|
})
|
||||||
|
|
||||||
|
{:reply, pid, state}
|
||||||
|
|
||||||
|
%{gun_state: :down} ->
|
||||||
|
{:reply, nil, state}
|
||||||
|
|
||||||
|
nil ->
|
||||||
|
{:reply, nil, state}
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
@impl true
|
||||||
|
def handle_call(:state, _from, state), do: {:reply, state, state}
|
||||||
|
|
||||||
|
@impl true
|
||||||
|
def handle_call(:count, _from, state) do
|
||||||
|
{:reply, Enum.count(state.conns), state}
|
||||||
|
end
|
||||||
|
|
||||||
|
@impl true
|
||||||
|
def handle_call(:unused_conns, _from, state) do
|
||||||
|
unused_conns =
|
||||||
|
state.conns
|
||||||
|
|> Enum.filter(&filter_conns/1)
|
||||||
|
|> Enum.sort(&sort_conns/2)
|
||||||
|
|
||||||
|
{:reply, unused_conns, state}
|
||||||
|
end
|
||||||
|
|
||||||
|
defp filter_conns({_, %{conn_state: :idle, used_by: []}}), do: true
|
||||||
|
defp filter_conns(_), do: false
|
||||||
|
|
||||||
|
defp sort_conns({_, c1}, {_, c2}) do
|
||||||
|
c1.crf <= c2.crf and c1.last_reference <= c2.last_reference
|
||||||
|
end
|
||||||
|
|
||||||
|
@impl true
|
||||||
|
def handle_info({:gun_up, conn_pid, _protocol}, state) do
|
||||||
|
%{origin_host: host, origin_scheme: scheme, origin_port: port} = Gun.info(conn_pid)
|
||||||
|
|
||||||
|
host =
|
||||||
|
case :inet.ntoa(host) do
|
||||||
|
{:error, :einval} -> host
|
||||||
|
ip -> ip
|
||||||
|
end
|
||||||
|
|
||||||
|
key = "#{scheme}:#{host}:#{port}"
|
||||||
|
|
||||||
|
state =
|
||||||
|
with {key, conn} <- find_conn(state.conns, conn_pid, key),
|
||||||
|
{true, key} <- {Process.alive?(conn_pid), key} do
|
||||||
|
put_in(state.conns[key], %{
|
||||||
|
conn
|
||||||
|
| gun_state: :up,
|
||||||
|
conn_state: :active,
|
||||||
|
retries: 0
|
||||||
|
})
|
||||||
|
else
|
||||||
|
{false, key} ->
|
||||||
|
put_in(
|
||||||
|
state.conns,
|
||||||
|
Map.delete(state.conns, key)
|
||||||
|
)
|
||||||
|
|
||||||
|
nil ->
|
||||||
|
:ok = Gun.close(conn_pid)
|
||||||
|
|
||||||
|
state
|
||||||
|
end
|
||||||
|
|
||||||
|
{:noreply, state}
|
||||||
|
end
|
||||||
|
|
||||||
|
@impl true
|
||||||
|
def handle_info({:gun_down, conn_pid, _protocol, _reason, _killed}, state) do
|
||||||
|
retries = Config.get([:connections_pool, :retry], 1)
|
||||||
|
# we can't get info on this pid, because pid is dead
|
||||||
|
state =
|
||||||
|
with {key, conn} <- find_conn(state.conns, conn_pid),
|
||||||
|
{true, key} <- {Process.alive?(conn_pid), key} do
|
||||||
|
if conn.retries == retries do
|
||||||
|
:ok = Gun.close(conn.conn)
|
||||||
|
|
||||||
|
put_in(
|
||||||
|
state.conns,
|
||||||
|
Map.delete(state.conns, key)
|
||||||
|
)
|
||||||
|
else
|
||||||
|
put_in(state.conns[key], %{
|
||||||
|
conn
|
||||||
|
| gun_state: :down,
|
||||||
|
retries: conn.retries + 1
|
||||||
|
})
|
||||||
|
end
|
||||||
|
else
|
||||||
|
{false, key} ->
|
||||||
|
put_in(
|
||||||
|
state.conns,
|
||||||
|
Map.delete(state.conns, key)
|
||||||
|
)
|
||||||
|
|
||||||
|
nil ->
|
||||||
|
Logger.debug(":gun_down for conn which isn't found in state")
|
||||||
|
|
||||||
|
state
|
||||||
|
end
|
||||||
|
|
||||||
|
{:noreply, state}
|
||||||
|
end
|
||||||
|
|
||||||
|
@impl true
|
||||||
|
def handle_info({:DOWN, _ref, :process, conn_pid, reason}, state) do
|
||||||
|
Logger.debug("received DOWM message for #{inspect(conn_pid)} reason -> #{inspect(reason)}")
|
||||||
|
|
||||||
|
state =
|
||||||
|
with {key, conn} <- find_conn(state.conns, conn_pid) do
|
||||||
|
Enum.each(conn.used_by, fn {pid, _ref} ->
|
||||||
|
Process.exit(pid, reason)
|
||||||
|
end)
|
||||||
|
|
||||||
|
put_in(
|
||||||
|
state.conns,
|
||||||
|
Map.delete(state.conns, key)
|
||||||
|
)
|
||||||
|
else
|
||||||
|
nil ->
|
||||||
|
Logger.debug(":DOWN for conn which isn't found in state")
|
||||||
|
|
||||||
|
state
|
||||||
|
end
|
||||||
|
|
||||||
|
{:noreply, state}
|
||||||
|
end
|
||||||
|
|
||||||
|
defp find_conn(conns, conn_pid) do
|
||||||
|
Enum.find(conns, fn {_key, conn} ->
|
||||||
|
conn.conn == conn_pid
|
||||||
|
end)
|
||||||
|
end
|
||||||
|
|
||||||
|
defp find_conn(conns, conn_pid, conn_key) do
|
||||||
|
Enum.find(conns, fn {key, conn} ->
|
||||||
|
key == conn_key and conn.conn == conn_pid
|
||||||
|
end)
|
||||||
|
end
|
||||||
|
|
||||||
|
def crf(current, steps, crf) do
|
||||||
|
1 + :math.pow(0.5, current / steps) * crf
|
||||||
|
end
|
||||||
|
end
|
22
lib/pleroma/pool/pool.ex
Normal file
22
lib/pleroma/pool/pool.ex
Normal file
|
@ -0,0 +1,22 @@
|
||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
defmodule Pleroma.Pool do
|
||||||
|
def child_spec(opts) do
|
||||||
|
poolboy_opts =
|
||||||
|
opts
|
||||||
|
|> Keyword.put(:worker_module, Pleroma.Pool.Request)
|
||||||
|
|> Keyword.put(:name, {:local, opts[:name]})
|
||||||
|
|> Keyword.put(:size, opts[:size])
|
||||||
|
|> Keyword.put(:max_overflow, opts[:max_overflow])
|
||||||
|
|
||||||
|
%{
|
||||||
|
id: opts[:id] || {__MODULE__, make_ref()},
|
||||||
|
start: {:poolboy, :start_link, [poolboy_opts, [name: opts[:name]]]},
|
||||||
|
restart: :permanent,
|
||||||
|
shutdown: 5000,
|
||||||
|
type: :worker
|
||||||
|
}
|
||||||
|
end
|
||||||
|
end
|
65
lib/pleroma/pool/request.ex
Normal file
65
lib/pleroma/pool/request.ex
Normal file
|
@ -0,0 +1,65 @@
|
||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
defmodule Pleroma.Pool.Request do
|
||||||
|
use GenServer
|
||||||
|
|
||||||
|
require Logger
|
||||||
|
|
||||||
|
def start_link(args) do
|
||||||
|
GenServer.start_link(__MODULE__, args)
|
||||||
|
end
|
||||||
|
|
||||||
|
@impl true
|
||||||
|
def init(_), do: {:ok, []}
|
||||||
|
|
||||||
|
@spec execute(pid() | atom(), Tesla.Client.t(), keyword(), pos_integer()) ::
|
||||||
|
{:ok, Tesla.Env.t()} | {:error, any()}
|
||||||
|
def execute(pid, client, request, timeout) do
|
||||||
|
GenServer.call(pid, {:execute, client, request}, timeout)
|
||||||
|
end
|
||||||
|
|
||||||
|
@impl true
|
||||||
|
def handle_call({:execute, client, request}, _from, state) do
|
||||||
|
response = Pleroma.HTTP.request(client, request)
|
||||||
|
|
||||||
|
{:reply, response, state}
|
||||||
|
end
|
||||||
|
|
||||||
|
@impl true
|
||||||
|
def handle_info({:gun_data, _conn, _stream, _, _}, state) do
|
||||||
|
{:noreply, state}
|
||||||
|
end
|
||||||
|
|
||||||
|
@impl true
|
||||||
|
def handle_info({:gun_up, _conn, _protocol}, state) do
|
||||||
|
{:noreply, state}
|
||||||
|
end
|
||||||
|
|
||||||
|
@impl true
|
||||||
|
def handle_info({:gun_down, _conn, _protocol, _reason, _killed}, state) do
|
||||||
|
{:noreply, state}
|
||||||
|
end
|
||||||
|
|
||||||
|
@impl true
|
||||||
|
def handle_info({:gun_error, _conn, _stream, _error}, state) do
|
||||||
|
{:noreply, state}
|
||||||
|
end
|
||||||
|
|
||||||
|
@impl true
|
||||||
|
def handle_info({:gun_push, _conn, _stream, _new_stream, _method, _uri, _headers}, state) do
|
||||||
|
{:noreply, state}
|
||||||
|
end
|
||||||
|
|
||||||
|
@impl true
|
||||||
|
def handle_info({:gun_response, _conn, _stream, _, _status, _headers}, state) do
|
||||||
|
{:noreply, state}
|
||||||
|
end
|
||||||
|
|
||||||
|
@impl true
|
||||||
|
def handle_info(msg, state) do
|
||||||
|
Logger.warn("Received unexpected message #{inspect(__MODULE__)} #{inspect(msg)}")
|
||||||
|
{:noreply, state}
|
||||||
|
end
|
||||||
|
end
|
42
lib/pleroma/pool/supervisor.ex
Normal file
42
lib/pleroma/pool/supervisor.ex
Normal file
|
@ -0,0 +1,42 @@
|
||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
defmodule Pleroma.Pool.Supervisor do
|
||||||
|
use Supervisor
|
||||||
|
|
||||||
|
alias Pleroma.Config
|
||||||
|
alias Pleroma.Pool
|
||||||
|
|
||||||
|
def start_link(args) do
|
||||||
|
Supervisor.start_link(__MODULE__, args, name: __MODULE__)
|
||||||
|
end
|
||||||
|
|
||||||
|
def init(_) do
|
||||||
|
conns_child = %{
|
||||||
|
id: Pool.Connections,
|
||||||
|
start:
|
||||||
|
{Pool.Connections, :start_link, [{:gun_connections, Config.get([:connections_pool])}]}
|
||||||
|
}
|
||||||
|
|
||||||
|
Supervisor.init([conns_child | pools()], strategy: :one_for_one)
|
||||||
|
end
|
||||||
|
|
||||||
|
defp pools do
|
||||||
|
pools = Config.get(:pools)
|
||||||
|
|
||||||
|
pools =
|
||||||
|
if Config.get([Pleroma.Upload, :proxy_remote]) == false do
|
||||||
|
Keyword.delete(pools, :upload)
|
||||||
|
else
|
||||||
|
pools
|
||||||
|
end
|
||||||
|
|
||||||
|
for {pool_name, pool_opts} <- pools do
|
||||||
|
pool_opts
|
||||||
|
|> Keyword.put(:id, {Pool, pool_name})
|
||||||
|
|> Keyword.put(:name, pool_name)
|
||||||
|
|> Pool.child_spec()
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
|
@ -3,19 +3,23 @@
|
||||||
# SPDX-License-Identifier: AGPL-3.0-only
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
defmodule Pleroma.ReverseProxy.Client do
|
defmodule Pleroma.ReverseProxy.Client do
|
||||||
@callback request(atom(), String.t(), [tuple()], String.t(), list()) ::
|
@type status :: pos_integer()
|
||||||
{:ok, pos_integer(), [tuple()], reference() | map()}
|
@type header_name :: String.t()
|
||||||
| {:ok, pos_integer(), [tuple()]}
|
@type header_value :: String.t()
|
||||||
|
@type headers :: [{header_name(), header_value()}]
|
||||||
|
|
||||||
|
@callback request(atom(), String.t(), headers(), String.t(), list()) ::
|
||||||
|
{:ok, status(), headers(), reference() | map()}
|
||||||
|
| {:ok, status(), headers()}
|
||||||
| {:ok, reference()}
|
| {:ok, reference()}
|
||||||
| {:error, term()}
|
| {:error, term()}
|
||||||
|
|
||||||
@callback stream_body(reference() | pid() | map()) ::
|
@callback stream_body(map()) :: {:ok, binary(), map()} | :done | {:error, atom() | String.t()}
|
||||||
{:ok, binary()} | :done | {:error, String.t()}
|
|
||||||
|
|
||||||
@callback close(reference() | pid() | map()) :: :ok
|
@callback close(reference() | pid() | map()) :: :ok
|
||||||
|
|
||||||
def request(method, url, headers, "", opts \\ []) do
|
def request(method, url, headers, body \\ "", opts \\ []) do
|
||||||
client().request(method, url, headers, "", opts)
|
client().request(method, url, headers, body, opts)
|
||||||
end
|
end
|
||||||
|
|
||||||
def stream_body(ref), do: client().stream_body(ref)
|
def stream_body(ref), do: client().stream_body(ref)
|
||||||
|
@ -23,6 +27,12 @@ def stream_body(ref), do: client().stream_body(ref)
|
||||||
def close(ref), do: client().close(ref)
|
def close(ref), do: client().close(ref)
|
||||||
|
|
||||||
defp client do
|
defp client do
|
||||||
Pleroma.Config.get([Pleroma.ReverseProxy.Client], :hackney)
|
:tesla
|
||||||
|
|> Application.get_env(:adapter)
|
||||||
|
|> client()
|
||||||
end
|
end
|
||||||
|
|
||||||
|
defp client(Tesla.Adapter.Hackney), do: Pleroma.ReverseProxy.Client.Hackney
|
||||||
|
defp client(Tesla.Adapter.Gun), do: Pleroma.ReverseProxy.Client.Tesla
|
||||||
|
defp client(_), do: Pleroma.Config.get!(Pleroma.ReverseProxy.Client)
|
||||||
end
|
end
|
||||||
|
|
24
lib/pleroma/reverse_proxy/client/hackney.ex
Normal file
24
lib/pleroma/reverse_proxy/client/hackney.ex
Normal file
|
@ -0,0 +1,24 @@
|
||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
defmodule Pleroma.ReverseProxy.Client.Hackney do
|
||||||
|
@behaviour Pleroma.ReverseProxy.Client
|
||||||
|
|
||||||
|
@impl true
|
||||||
|
def request(method, url, headers, body, opts \\ []) do
|
||||||
|
:hackney.request(method, url, headers, body, opts)
|
||||||
|
end
|
||||||
|
|
||||||
|
@impl true
|
||||||
|
def stream_body(ref) do
|
||||||
|
case :hackney.stream_body(ref) do
|
||||||
|
:done -> :done
|
||||||
|
{:ok, data} -> {:ok, data, ref}
|
||||||
|
{:error, error} -> {:error, error}
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
@impl true
|
||||||
|
def close(ref), do: :hackney.close(ref)
|
||||||
|
end
|
90
lib/pleroma/reverse_proxy/client/tesla.ex
Normal file
90
lib/pleroma/reverse_proxy/client/tesla.ex
Normal file
|
@ -0,0 +1,90 @@
|
||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
defmodule Pleroma.ReverseProxy.Client.Tesla do
|
||||||
|
@behaviour Pleroma.ReverseProxy.Client
|
||||||
|
|
||||||
|
@type headers() :: [{String.t(), String.t()}]
|
||||||
|
@type status() :: pos_integer()
|
||||||
|
|
||||||
|
@spec request(atom(), String.t(), headers(), String.t(), keyword()) ::
|
||||||
|
{:ok, status(), headers}
|
||||||
|
| {:ok, status(), headers, map()}
|
||||||
|
| {:error, atom() | String.t()}
|
||||||
|
| no_return()
|
||||||
|
|
||||||
|
@impl true
|
||||||
|
def request(method, url, headers, body, opts \\ []) do
|
||||||
|
check_adapter()
|
||||||
|
|
||||||
|
opts = Keyword.put(opts, :body_as, :chunks)
|
||||||
|
|
||||||
|
with {:ok, response} <-
|
||||||
|
Pleroma.HTTP.request(
|
||||||
|
method,
|
||||||
|
url,
|
||||||
|
body,
|
||||||
|
headers,
|
||||||
|
Keyword.put(opts, :adapter, opts)
|
||||||
|
) do
|
||||||
|
if is_map(response.body) and method != :head do
|
||||||
|
{:ok, response.status, response.headers, response.body}
|
||||||
|
else
|
||||||
|
{:ok, response.status, response.headers}
|
||||||
|
end
|
||||||
|
else
|
||||||
|
{:error, error} -> {:error, error}
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
@impl true
|
||||||
|
@spec stream_body(map()) ::
|
||||||
|
{:ok, binary(), map()} | {:error, atom() | String.t()} | :done | no_return()
|
||||||
|
def stream_body(%{pid: pid, opts: opts, fin: true}) do
|
||||||
|
# if connection was reused, but in tesla were redirects,
|
||||||
|
# tesla returns new opened connection, which must be closed manually
|
||||||
|
if opts[:old_conn], do: Tesla.Adapter.Gun.close(pid)
|
||||||
|
# if there were redirects we need to checkout old conn
|
||||||
|
conn = opts[:old_conn] || opts[:conn]
|
||||||
|
|
||||||
|
if conn, do: :ok = Pleroma.Pool.Connections.checkout(conn, self(), :gun_connections)
|
||||||
|
|
||||||
|
:done
|
||||||
|
end
|
||||||
|
|
||||||
|
def stream_body(client) do
|
||||||
|
case read_chunk!(client) do
|
||||||
|
{:fin, body} ->
|
||||||
|
{:ok, body, Map.put(client, :fin, true)}
|
||||||
|
|
||||||
|
{:nofin, part} ->
|
||||||
|
{:ok, part, client}
|
||||||
|
|
||||||
|
{:error, error} ->
|
||||||
|
{:error, error}
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
defp read_chunk!(%{pid: pid, stream: stream, opts: opts}) do
|
||||||
|
adapter = check_adapter()
|
||||||
|
adapter.read_chunk(pid, stream, opts)
|
||||||
|
end
|
||||||
|
|
||||||
|
@impl true
|
||||||
|
@spec close(map) :: :ok | no_return()
|
||||||
|
def close(%{pid: pid}) do
|
||||||
|
adapter = check_adapter()
|
||||||
|
adapter.close(pid)
|
||||||
|
end
|
||||||
|
|
||||||
|
defp check_adapter do
|
||||||
|
adapter = Application.get_env(:tesla, :adapter)
|
||||||
|
|
||||||
|
unless adapter == Tesla.Adapter.Gun do
|
||||||
|
raise "#{adapter} doesn't support reading body in chunks"
|
||||||
|
end
|
||||||
|
|
||||||
|
adapter
|
||||||
|
end
|
||||||
|
end
|
|
@ -3,8 +3,6 @@
|
||||||
# SPDX-License-Identifier: AGPL-3.0-only
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
defmodule Pleroma.ReverseProxy do
|
defmodule Pleroma.ReverseProxy do
|
||||||
alias Pleroma.HTTP
|
|
||||||
|
|
||||||
@keep_req_headers ~w(accept user-agent accept-encoding cache-control if-modified-since) ++
|
@keep_req_headers ~w(accept user-agent accept-encoding cache-control if-modified-since) ++
|
||||||
~w(if-unmodified-since if-none-match if-range range)
|
~w(if-unmodified-since if-none-match if-range range)
|
||||||
@resp_cache_headers ~w(etag date last-modified)
|
@resp_cache_headers ~w(etag date last-modified)
|
||||||
|
@ -58,10 +56,10 @@ defmodule Pleroma.ReverseProxy do
|
||||||
|
|
||||||
* `req_headers`, `resp_headers` additional headers.
|
* `req_headers`, `resp_headers` additional headers.
|
||||||
|
|
||||||
* `http`: options for [hackney](https://github.com/benoitc/hackney).
|
* `http`: options for [hackney](https://github.com/benoitc/hackney) or [gun](https://github.com/ninenines/gun).
|
||||||
|
|
||||||
"""
|
"""
|
||||||
@default_hackney_options [pool: :media]
|
@default_options [pool: :media]
|
||||||
|
|
||||||
@inline_content_types [
|
@inline_content_types [
|
||||||
"image/gif",
|
"image/gif",
|
||||||
|
@ -94,11 +92,7 @@ defmodule Pleroma.ReverseProxy do
|
||||||
def call(_conn, _url, _opts \\ [])
|
def call(_conn, _url, _opts \\ [])
|
||||||
|
|
||||||
def call(conn = %{method: method}, url, opts) when method in @methods do
|
def call(conn = %{method: method}, url, opts) when method in @methods do
|
||||||
hackney_opts =
|
client_opts = Keyword.merge(@default_options, Keyword.get(opts, :http, []))
|
||||||
Pleroma.HTTP.Connection.hackney_options([])
|
|
||||||
|> Keyword.merge(@default_hackney_options)
|
|
||||||
|> Keyword.merge(Keyword.get(opts, :http, []))
|
|
||||||
|> HTTP.process_request_options()
|
|
||||||
|
|
||||||
req_headers = build_req_headers(conn.req_headers, opts)
|
req_headers = build_req_headers(conn.req_headers, opts)
|
||||||
|
|
||||||
|
@ -110,7 +104,7 @@ def call(conn = %{method: method}, url, opts) when method in @methods do
|
||||||
end
|
end
|
||||||
|
|
||||||
with {:ok, nil} <- Cachex.get(:failed_proxy_url_cache, url),
|
with {:ok, nil} <- Cachex.get(:failed_proxy_url_cache, url),
|
||||||
{:ok, code, headers, client} <- request(method, url, req_headers, hackney_opts),
|
{:ok, code, headers, client} <- request(method, url, req_headers, client_opts),
|
||||||
:ok <-
|
:ok <-
|
||||||
header_length_constraint(
|
header_length_constraint(
|
||||||
headers,
|
headers,
|
||||||
|
@ -156,11 +150,11 @@ def call(conn, _, _) do
|
||||||
|> halt()
|
|> halt()
|
||||||
end
|
end
|
||||||
|
|
||||||
defp request(method, url, headers, hackney_opts) do
|
defp request(method, url, headers, opts) do
|
||||||
Logger.debug("#{__MODULE__} #{method} #{url} #{inspect(headers)}")
|
Logger.debug("#{__MODULE__} #{method} #{url} #{inspect(headers)}")
|
||||||
method = method |> String.downcase() |> String.to_existing_atom()
|
method = method |> String.downcase() |> String.to_existing_atom()
|
||||||
|
|
||||||
case client().request(method, url, headers, "", hackney_opts) do
|
case client().request(method, url, headers, "", opts) do
|
||||||
{:ok, code, headers, client} when code in @valid_resp_codes ->
|
{:ok, code, headers, client} when code in @valid_resp_codes ->
|
||||||
{:ok, code, downcase_headers(headers), client}
|
{:ok, code, downcase_headers(headers), client}
|
||||||
|
|
||||||
|
@ -210,7 +204,7 @@ defp chunk_reply(conn, client, opts, sent_so_far, duration) do
|
||||||
duration,
|
duration,
|
||||||
Keyword.get(opts, :max_read_duration, @max_read_duration)
|
Keyword.get(opts, :max_read_duration, @max_read_duration)
|
||||||
),
|
),
|
||||||
{:ok, data} <- client().stream_body(client),
|
{:ok, data, client} <- client().stream_body(client),
|
||||||
{:ok, duration} <- increase_read_duration(duration),
|
{:ok, duration} <- increase_read_duration(duration),
|
||||||
sent_so_far = sent_so_far + byte_size(data),
|
sent_so_far = sent_so_far + byte_size(data),
|
||||||
:ok <-
|
:ok <-
|
||||||
|
|
|
@ -305,16 +305,12 @@ def banner_url(user, options \\ []) do
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def profile_url(%User{source_data: %{"url" => url}}), do: url
|
|
||||||
def profile_url(%User{ap_id: ap_id}), do: ap_id
|
|
||||||
def profile_url(_), do: nil
|
|
||||||
|
|
||||||
def ap_id(%User{nickname: nickname}), do: "#{Web.base_url()}/users/#{nickname}"
|
def ap_id(%User{nickname: nickname}), do: "#{Web.base_url()}/users/#{nickname}"
|
||||||
|
|
||||||
def ap_followers(%User{follower_address: fa}) when is_binary(fa), do: fa
|
def ap_followers(%User{follower_address: fa}) when is_binary(fa), do: fa
|
||||||
def ap_followers(%User{} = user), do: "#{ap_id(user)}/followers"
|
def ap_followers(%User{} = user), do: "#{ap_id(user)}/followers"
|
||||||
|
|
||||||
@spec ap_following(User.t()) :: Sring.t()
|
@spec ap_following(User.t()) :: String.t()
|
||||||
def ap_following(%User{following_address: fa}) when is_binary(fa), do: fa
|
def ap_following(%User{following_address: fa}) when is_binary(fa), do: fa
|
||||||
def ap_following(%User{} = user), do: "#{ap_id(user)}/following"
|
def ap_following(%User{} = user), do: "#{ap_id(user)}/following"
|
||||||
|
|
||||||
|
|
|
@ -125,6 +125,21 @@ def increase_poll_votes_if_vote(%{
|
||||||
|
|
||||||
def increase_poll_votes_if_vote(_create_data), do: :noop
|
def increase_poll_votes_if_vote(_create_data), do: :noop
|
||||||
|
|
||||||
|
@spec persist(map(), keyword()) :: {:ok, Activity.t() | Object.t()}
|
||||||
|
def persist(object, meta) do
|
||||||
|
with local <- Keyword.fetch!(meta, :local),
|
||||||
|
{recipients, _, _} <- get_recipients(object),
|
||||||
|
{:ok, activity} <-
|
||||||
|
Repo.insert(%Activity{
|
||||||
|
data: object,
|
||||||
|
local: local,
|
||||||
|
recipients: recipients,
|
||||||
|
actor: object["actor"]
|
||||||
|
}) do
|
||||||
|
{:ok, activity, meta}
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
@spec insert(map(), boolean(), boolean(), boolean()) :: {:ok, Activity.t()} | {:error, any()}
|
@spec insert(map(), boolean(), boolean(), boolean()) :: {:ok, Activity.t()} | {:error, any()}
|
||||||
def insert(map, local \\ true, fake \\ false, bypass_actor_check \\ false) when is_map(map) do
|
def insert(map, local \\ true, fake \\ false, bypass_actor_check \\ false) when is_map(map) do
|
||||||
with nil <- Activity.normalize(map),
|
with nil <- Activity.normalize(map),
|
||||||
|
@ -1379,6 +1394,18 @@ def upload(file, opts \\ []) do
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@spec get_actor_url(any()) :: binary() | nil
|
||||||
|
defp get_actor_url(url) when is_binary(url), do: url
|
||||||
|
defp get_actor_url(%{"href" => href}) when is_binary(href), do: href
|
||||||
|
|
||||||
|
defp get_actor_url(url) when is_list(url) do
|
||||||
|
url
|
||||||
|
|> List.first()
|
||||||
|
|> get_actor_url()
|
||||||
|
end
|
||||||
|
|
||||||
|
defp get_actor_url(_url), do: nil
|
||||||
|
|
||||||
defp object_to_user_data(data) do
|
defp object_to_user_data(data) do
|
||||||
avatar =
|
avatar =
|
||||||
data["icon"]["url"] &&
|
data["icon"]["url"] &&
|
||||||
|
@ -1408,6 +1435,7 @@ defp object_to_user_data(data) do
|
||||||
|
|
||||||
user_data = %{
|
user_data = %{
|
||||||
ap_id: data["id"],
|
ap_id: data["id"],
|
||||||
|
uri: get_actor_url(data["url"]),
|
||||||
ap_enabled: true,
|
ap_enabled: true,
|
||||||
source_data: data,
|
source_data: data,
|
||||||
banner: banner,
|
banner: banner,
|
||||||
|
|
43
lib/pleroma/web/activity_pub/builder.ex
Normal file
43
lib/pleroma/web/activity_pub/builder.ex
Normal file
|
@ -0,0 +1,43 @@
|
||||||
|
defmodule Pleroma.Web.ActivityPub.Builder do
|
||||||
|
@moduledoc """
|
||||||
|
This module builds the objects. Meant to be used for creating local objects.
|
||||||
|
|
||||||
|
This module encodes our addressing policies and general shape of our objects.
|
||||||
|
"""
|
||||||
|
|
||||||
|
alias Pleroma.Object
|
||||||
|
alias Pleroma.User
|
||||||
|
alias Pleroma.Web.ActivityPub.Utils
|
||||||
|
alias Pleroma.Web.ActivityPub.Visibility
|
||||||
|
|
||||||
|
@spec like(User.t(), Object.t()) :: {:ok, map(), keyword()}
|
||||||
|
def like(actor, object) do
|
||||||
|
object_actor = User.get_cached_by_ap_id(object.data["actor"])
|
||||||
|
|
||||||
|
# Address the actor of the object, and our actor's follower collection if the post is public.
|
||||||
|
to =
|
||||||
|
if Visibility.is_public?(object) do
|
||||||
|
[actor.follower_address, object.data["actor"]]
|
||||||
|
else
|
||||||
|
[object.data["actor"]]
|
||||||
|
end
|
||||||
|
|
||||||
|
# CC everyone who's been addressed in the object, except ourself and the object actor's
|
||||||
|
# follower collection
|
||||||
|
cc =
|
||||||
|
(object.data["to"] ++ (object.data["cc"] || []))
|
||||||
|
|> List.delete(actor.ap_id)
|
||||||
|
|> List.delete(object_actor.follower_address)
|
||||||
|
|
||||||
|
{:ok,
|
||||||
|
%{
|
||||||
|
"id" => Utils.generate_activity_id(),
|
||||||
|
"actor" => actor.ap_id,
|
||||||
|
"type" => "Like",
|
||||||
|
"object" => object.data["id"],
|
||||||
|
"to" => to,
|
||||||
|
"cc" => cc,
|
||||||
|
"context" => object.data["context"]
|
||||||
|
}, []}
|
||||||
|
end
|
||||||
|
end
|
|
@ -1,5 +1,5 @@
|
||||||
# Pleroma: A lightweight social networking server
|
# Pleroma: A lightweight social networking server
|
||||||
# Copyright © 2019 Pleroma Authors <https://pleroma.social/>
|
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
|
||||||
# SPDX-License-Identifier: AGPL-3.0-only
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
defmodule Pleroma.Web.ActivityPub.MRF.AntiFollowbotPolicy do
|
defmodule Pleroma.Web.ActivityPub.MRF.AntiFollowbotPolicy do
|
||||||
|
|
|
@ -12,17 +12,23 @@ defmodule Pleroma.Web.ActivityPub.MRF.MediaProxyWarmingPolicy do
|
||||||
|
|
||||||
require Logger
|
require Logger
|
||||||
|
|
||||||
@hackney_options [
|
@options [
|
||||||
pool: :media,
|
pool: :media
|
||||||
recv_timeout: 10_000
|
|
||||||
]
|
]
|
||||||
|
|
||||||
def perform(:prefetch, url) do
|
def perform(:prefetch, url) do
|
||||||
Logger.debug("Prefetching #{inspect(url)}")
|
Logger.debug("Prefetching #{inspect(url)}")
|
||||||
|
|
||||||
|
opts =
|
||||||
|
if Application.get_env(:tesla, :adapter) == Tesla.Adapter.Hackney do
|
||||||
|
Keyword.put(@options, :recv_timeout, 10_000)
|
||||||
|
else
|
||||||
|
@options
|
||||||
|
end
|
||||||
|
|
||||||
url
|
url
|
||||||
|> MediaProxy.url()
|
|> MediaProxy.url()
|
||||||
|> HTTP.get([], adapter: @hackney_options)
|
|> HTTP.get([], adapter: opts)
|
||||||
end
|
end
|
||||||
|
|
||||||
def perform(:preload, %{"object" => %{"attachment" => attachments}} = _message) do
|
def perform(:preload, %{"object" => %{"attachment" => attachments}} = _message) do
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
# Pleroma: A lightweight social networking server
|
# Pleroma: A lightweight social networking server
|
||||||
# Copyright © 2019 Pleroma Authors <https://pleroma.social/>
|
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
|
||||||
# SPDX-License-Identifier: AGPL-3.0-only
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
defmodule Pleroma.Web.ActivityPub.MRF.NoPlaceholderTextPolicy do
|
defmodule Pleroma.Web.ActivityPub.MRF.NoPlaceholderTextPolicy do
|
||||||
|
|
37
lib/pleroma/web/activity_pub/object_validator.ex
Normal file
37
lib/pleroma/web/activity_pub/object_validator.ex
Normal file
|
@ -0,0 +1,37 @@
|
||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
defmodule Pleroma.Web.ActivityPub.ObjectValidator do
|
||||||
|
@moduledoc """
|
||||||
|
This module is responsible for validating an object (which can be an activity)
|
||||||
|
and checking if it is both well formed and also compatible with our view of
|
||||||
|
the system.
|
||||||
|
"""
|
||||||
|
|
||||||
|
alias Pleroma.Object
|
||||||
|
alias Pleroma.User
|
||||||
|
alias Pleroma.Web.ActivityPub.ObjectValidators.LikeValidator
|
||||||
|
|
||||||
|
@spec validate(map(), keyword()) :: {:ok, map(), keyword()} | {:error, any()}
|
||||||
|
def validate(object, meta)
|
||||||
|
|
||||||
|
def validate(%{"type" => "Like"} = object, meta) do
|
||||||
|
with {:ok, object} <-
|
||||||
|
object |> LikeValidator.cast_and_validate() |> Ecto.Changeset.apply_action(:insert) do
|
||||||
|
object = stringify_keys(object |> Map.from_struct())
|
||||||
|
{:ok, object, meta}
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def stringify_keys(object) do
|
||||||
|
object
|
||||||
|
|> Map.new(fn {key, val} -> {to_string(key), val} end)
|
||||||
|
end
|
||||||
|
|
||||||
|
def fetch_actor_and_object(object) do
|
||||||
|
User.get_or_fetch_by_ap_id(object["actor"])
|
||||||
|
Object.normalize(object["object"])
|
||||||
|
:ok
|
||||||
|
end
|
||||||
|
end
|
|
@ -0,0 +1,32 @@
|
||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
defmodule Pleroma.Web.ActivityPub.ObjectValidators.CommonValidations do
|
||||||
|
import Ecto.Changeset
|
||||||
|
|
||||||
|
alias Pleroma.Object
|
||||||
|
alias Pleroma.User
|
||||||
|
|
||||||
|
def validate_actor_presence(cng, field_name \\ :actor) do
|
||||||
|
cng
|
||||||
|
|> validate_change(field_name, fn field_name, actor ->
|
||||||
|
if User.get_cached_by_ap_id(actor) do
|
||||||
|
[]
|
||||||
|
else
|
||||||
|
[{field_name, "can't find user"}]
|
||||||
|
end
|
||||||
|
end)
|
||||||
|
end
|
||||||
|
|
||||||
|
def validate_object_presence(cng, field_name \\ :object) do
|
||||||
|
cng
|
||||||
|
|> validate_change(field_name, fn field_name, object ->
|
||||||
|
if Object.get_cached_by_ap_id(object) do
|
||||||
|
[]
|
||||||
|
else
|
||||||
|
[{field_name, "can't find object"}]
|
||||||
|
end
|
||||||
|
end)
|
||||||
|
end
|
||||||
|
end
|
|
@ -0,0 +1,30 @@
|
||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
defmodule Pleroma.Web.ActivityPub.ObjectValidators.CreateNoteValidator do
|
||||||
|
use Ecto.Schema
|
||||||
|
|
||||||
|
alias Pleroma.Web.ActivityPub.ObjectValidators.NoteValidator
|
||||||
|
alias Pleroma.Web.ActivityPub.ObjectValidators.Types
|
||||||
|
|
||||||
|
import Ecto.Changeset
|
||||||
|
|
||||||
|
@primary_key false
|
||||||
|
|
||||||
|
embedded_schema do
|
||||||
|
field(:id, Types.ObjectID, primary_key: true)
|
||||||
|
field(:actor, Types.ObjectID)
|
||||||
|
field(:type, :string)
|
||||||
|
field(:to, {:array, :string})
|
||||||
|
field(:cc, {:array, :string})
|
||||||
|
field(:bto, {:array, :string}, default: [])
|
||||||
|
field(:bcc, {:array, :string}, default: [])
|
||||||
|
|
||||||
|
embeds_one(:object, NoteValidator)
|
||||||
|
end
|
||||||
|
|
||||||
|
def cast_data(data) do
|
||||||
|
cast(%__MODULE__{}, data, __schema__(:fields))
|
||||||
|
end
|
||||||
|
end
|
|
@ -0,0 +1,57 @@
|
||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
defmodule Pleroma.Web.ActivityPub.ObjectValidators.LikeValidator do
|
||||||
|
use Ecto.Schema
|
||||||
|
|
||||||
|
alias Pleroma.Web.ActivityPub.ObjectValidators.Types
|
||||||
|
alias Pleroma.Web.ActivityPub.Utils
|
||||||
|
|
||||||
|
import Ecto.Changeset
|
||||||
|
import Pleroma.Web.ActivityPub.ObjectValidators.CommonValidations
|
||||||
|
|
||||||
|
@primary_key false
|
||||||
|
|
||||||
|
embedded_schema do
|
||||||
|
field(:id, Types.ObjectID, primary_key: true)
|
||||||
|
field(:type, :string)
|
||||||
|
field(:object, Types.ObjectID)
|
||||||
|
field(:actor, Types.ObjectID)
|
||||||
|
field(:context, :string)
|
||||||
|
field(:to, {:array, :string})
|
||||||
|
field(:cc, {:array, :string})
|
||||||
|
end
|
||||||
|
|
||||||
|
def cast_and_validate(data) do
|
||||||
|
data
|
||||||
|
|> cast_data()
|
||||||
|
|> validate_data()
|
||||||
|
end
|
||||||
|
|
||||||
|
def cast_data(data) do
|
||||||
|
%__MODULE__{}
|
||||||
|
|> cast(data, [:id, :type, :object, :actor, :context, :to, :cc])
|
||||||
|
end
|
||||||
|
|
||||||
|
def validate_data(data_cng) do
|
||||||
|
data_cng
|
||||||
|
|> validate_inclusion(:type, ["Like"])
|
||||||
|
|> validate_required([:id, :type, :object, :actor, :context, :to, :cc])
|
||||||
|
|> validate_actor_presence()
|
||||||
|
|> validate_object_presence()
|
||||||
|
|> validate_existing_like()
|
||||||
|
end
|
||||||
|
|
||||||
|
def validate_existing_like(%{changes: %{actor: actor, object: object}} = cng) do
|
||||||
|
if Utils.get_existing_like(actor, %{data: %{"id" => object}}) do
|
||||||
|
cng
|
||||||
|
|> add_error(:actor, "already liked this object")
|
||||||
|
|> add_error(:object, "already liked by this actor")
|
||||||
|
else
|
||||||
|
cng
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def validate_existing_like(cng), do: cng
|
||||||
|
end
|
|
@ -0,0 +1,63 @@
|
||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
defmodule Pleroma.Web.ActivityPub.ObjectValidators.NoteValidator do
|
||||||
|
use Ecto.Schema
|
||||||
|
|
||||||
|
alias Pleroma.Web.ActivityPub.ObjectValidators.Types
|
||||||
|
|
||||||
|
import Ecto.Changeset
|
||||||
|
|
||||||
|
@primary_key false
|
||||||
|
|
||||||
|
embedded_schema do
|
||||||
|
field(:id, Types.ObjectID, primary_key: true)
|
||||||
|
field(:to, {:array, :string}, default: [])
|
||||||
|
field(:cc, {:array, :string}, default: [])
|
||||||
|
field(:bto, {:array, :string}, default: [])
|
||||||
|
field(:bcc, {:array, :string}, default: [])
|
||||||
|
# TODO: Write type
|
||||||
|
field(:tag, {:array, :map}, default: [])
|
||||||
|
field(:type, :string)
|
||||||
|
field(:content, :string)
|
||||||
|
field(:context, :string)
|
||||||
|
field(:actor, Types.ObjectID)
|
||||||
|
field(:attributedTo, Types.ObjectID)
|
||||||
|
field(:summary, :string)
|
||||||
|
field(:published, Types.DateTime)
|
||||||
|
# TODO: Write type
|
||||||
|
field(:emoji, :map, default: %{})
|
||||||
|
field(:sensitive, :boolean, default: false)
|
||||||
|
# TODO: Write type
|
||||||
|
field(:attachment, {:array, :map}, default: [])
|
||||||
|
field(:replies_count, :integer, default: 0)
|
||||||
|
field(:like_count, :integer, default: 0)
|
||||||
|
field(:announcement_count, :integer, default: 0)
|
||||||
|
field(:inRepyTo, :string)
|
||||||
|
|
||||||
|
field(:likes, {:array, :string}, default: [])
|
||||||
|
field(:announcements, {:array, :string}, default: [])
|
||||||
|
|
||||||
|
# see if needed
|
||||||
|
field(:conversation, :string)
|
||||||
|
field(:context_id, :string)
|
||||||
|
end
|
||||||
|
|
||||||
|
def cast_and_validate(data) do
|
||||||
|
data
|
||||||
|
|> cast_data()
|
||||||
|
|> validate_data()
|
||||||
|
end
|
||||||
|
|
||||||
|
def cast_data(data) do
|
||||||
|
%__MODULE__{}
|
||||||
|
|> cast(data, __schema__(:fields))
|
||||||
|
end
|
||||||
|
|
||||||
|
def validate_data(data_cng) do
|
||||||
|
data_cng
|
||||||
|
|> validate_inclusion(:type, ["Note"])
|
||||||
|
|> validate_required([:id, :actor, :to, :cc, :type, :content, :context])
|
||||||
|
end
|
||||||
|
end
|
|
@ -0,0 +1,34 @@
|
||||||
|
defmodule Pleroma.Web.ActivityPub.ObjectValidators.Types.DateTime do
|
||||||
|
@moduledoc """
|
||||||
|
The AP standard defines the date fields in AP as xsd:DateTime. Elixir's
|
||||||
|
DateTime can't parse this, but it can parse the related iso8601. This
|
||||||
|
module punches the date until it looks like iso8601 and normalizes to
|
||||||
|
it.
|
||||||
|
|
||||||
|
DateTimes without a timezone offset are treated as UTC.
|
||||||
|
|
||||||
|
Reference: https://www.w3.org/TR/activitystreams-vocabulary/#dfn-published
|
||||||
|
"""
|
||||||
|
use Ecto.Type
|
||||||
|
|
||||||
|
def type, do: :string
|
||||||
|
|
||||||
|
def cast(datetime) when is_binary(datetime) do
|
||||||
|
with {:ok, datetime, _} <- DateTime.from_iso8601(datetime) do
|
||||||
|
{:ok, DateTime.to_iso8601(datetime)}
|
||||||
|
else
|
||||||
|
{:error, :missing_offset} -> cast("#{datetime}Z")
|
||||||
|
_e -> :error
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def cast(_), do: :error
|
||||||
|
|
||||||
|
def dump(data) do
|
||||||
|
{:ok, data}
|
||||||
|
end
|
||||||
|
|
||||||
|
def load(data) do
|
||||||
|
{:ok, data}
|
||||||
|
end
|
||||||
|
end
|
|
@ -0,0 +1,33 @@
|
||||||
|
defmodule Pleroma.Web.ActivityPub.ObjectValidators.Types.ObjectID do
|
||||||
|
use Ecto.Type
|
||||||
|
|
||||||
|
def type, do: :string
|
||||||
|
|
||||||
|
def cast(object) when is_binary(object) do
|
||||||
|
# Host has to be present and scheme has to be an http scheme (for now)
|
||||||
|
case URI.parse(object) do
|
||||||
|
%URI{host: nil} ->
|
||||||
|
:error
|
||||||
|
|
||||||
|
%URI{scheme: scheme} when scheme in ["https", "http"] ->
|
||||||
|
{:ok, object}
|
||||||
|
|
||||||
|
_ ->
|
||||||
|
:error
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def cast(%{"id" => object}), do: cast(object)
|
||||||
|
|
||||||
|
def cast(_) do
|
||||||
|
:error
|
||||||
|
end
|
||||||
|
|
||||||
|
def dump(data) do
|
||||||
|
{:ok, data}
|
||||||
|
end
|
||||||
|
|
||||||
|
def load(data) do
|
||||||
|
{:ok, data}
|
||||||
|
end
|
||||||
|
end
|
42
lib/pleroma/web/activity_pub/pipeline.ex
Normal file
42
lib/pleroma/web/activity_pub/pipeline.ex
Normal file
|
@ -0,0 +1,42 @@
|
||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
defmodule Pleroma.Web.ActivityPub.Pipeline do
|
||||||
|
alias Pleroma.Activity
|
||||||
|
alias Pleroma.Web.ActivityPub.ActivityPub
|
||||||
|
alias Pleroma.Web.ActivityPub.MRF
|
||||||
|
alias Pleroma.Web.ActivityPub.ObjectValidator
|
||||||
|
alias Pleroma.Web.ActivityPub.SideEffects
|
||||||
|
alias Pleroma.Web.Federator
|
||||||
|
|
||||||
|
@spec common_pipeline(map(), keyword()) :: {:ok, Activity.t(), keyword()} | {:error, any()}
|
||||||
|
def common_pipeline(object, meta) do
|
||||||
|
with {_, {:ok, validated_object, meta}} <-
|
||||||
|
{:validate_object, ObjectValidator.validate(object, meta)},
|
||||||
|
{_, {:ok, mrfd_object}} <- {:mrf_object, MRF.filter(validated_object)},
|
||||||
|
{_, {:ok, %Activity{} = activity, meta}} <-
|
||||||
|
{:persist_object, ActivityPub.persist(mrfd_object, meta)},
|
||||||
|
{_, {:ok, %Activity{} = activity, meta}} <-
|
||||||
|
{:execute_side_effects, SideEffects.handle(activity, meta)},
|
||||||
|
{_, {:ok, _}} <- {:federation, maybe_federate(activity, meta)} do
|
||||||
|
{:ok, activity, meta}
|
||||||
|
else
|
||||||
|
{:mrf_object, {:reject, _}} -> {:ok, nil, meta}
|
||||||
|
e -> {:error, e}
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
defp maybe_federate(activity, meta) do
|
||||||
|
with {:ok, local} <- Keyword.fetch(meta, :local) do
|
||||||
|
if local do
|
||||||
|
Federator.publish(activity)
|
||||||
|
{:ok, :federated}
|
||||||
|
else
|
||||||
|
{:ok, :not_federated}
|
||||||
|
end
|
||||||
|
else
|
||||||
|
_e -> {:error, :badarg}
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
28
lib/pleroma/web/activity_pub/side_effects.ex
Normal file
28
lib/pleroma/web/activity_pub/side_effects.ex
Normal file
|
@ -0,0 +1,28 @@
|
||||||
|
defmodule Pleroma.Web.ActivityPub.SideEffects do
|
||||||
|
@moduledoc """
|
||||||
|
This module looks at an inserted object and executes the side effects that it
|
||||||
|
implies. For example, a `Like` activity will increase the like count on the
|
||||||
|
liked object, a `Follow` activity will add the user to the follower
|
||||||
|
collection, and so on.
|
||||||
|
"""
|
||||||
|
alias Pleroma.Notification
|
||||||
|
alias Pleroma.Object
|
||||||
|
alias Pleroma.Web.ActivityPub.Utils
|
||||||
|
|
||||||
|
def handle(object, meta \\ [])
|
||||||
|
|
||||||
|
# Tasks this handles:
|
||||||
|
# - Add like to object
|
||||||
|
# - Set up notification
|
||||||
|
def handle(%{data: %{"type" => "Like"}} = object, meta) do
|
||||||
|
liked_object = Object.get_by_ap_id(object.data["object"])
|
||||||
|
Utils.add_like_to_object(object, liked_object)
|
||||||
|
Notification.create_notifications(object)
|
||||||
|
{:ok, object, meta}
|
||||||
|
end
|
||||||
|
|
||||||
|
# Nothing to do
|
||||||
|
def handle(object, meta) do
|
||||||
|
{:ok, object, meta}
|
||||||
|
end
|
||||||
|
end
|
|
@ -13,6 +13,9 @@ defmodule Pleroma.Web.ActivityPub.Transmogrifier do
|
||||||
alias Pleroma.Repo
|
alias Pleroma.Repo
|
||||||
alias Pleroma.User
|
alias Pleroma.User
|
||||||
alias Pleroma.Web.ActivityPub.ActivityPub
|
alias Pleroma.Web.ActivityPub.ActivityPub
|
||||||
|
alias Pleroma.Web.ActivityPub.ObjectValidator
|
||||||
|
alias Pleroma.Web.ActivityPub.ObjectValidators.LikeValidator
|
||||||
|
alias Pleroma.Web.ActivityPub.Pipeline
|
||||||
alias Pleroma.Web.ActivityPub.Utils
|
alias Pleroma.Web.ActivityPub.Utils
|
||||||
alias Pleroma.Web.ActivityPub.Visibility
|
alias Pleroma.Web.ActivityPub.Visibility
|
||||||
alias Pleroma.Web.Federator
|
alias Pleroma.Web.Federator
|
||||||
|
@ -609,17 +612,20 @@ def handle_incoming(
|
||||||
|> handle_incoming(options)
|
|> handle_incoming(options)
|
||||||
end
|
end
|
||||||
|
|
||||||
def handle_incoming(
|
def handle_incoming(%{"type" => "Like"} = data, _options) do
|
||||||
%{"type" => "Like", "object" => object_id, "actor" => _actor, "id" => id} = data,
|
with {_, {:ok, cast_data_sym}} <-
|
||||||
_options
|
{:casting_data,
|
||||||
) do
|
data |> LikeValidator.cast_data() |> Ecto.Changeset.apply_action(:insert)},
|
||||||
with actor <- Containment.get_actor(data),
|
cast_data = ObjectValidator.stringify_keys(Map.from_struct(cast_data_sym)),
|
||||||
{:ok, %User{} = actor} <- User.get_or_fetch_by_ap_id(actor),
|
:ok <- ObjectValidator.fetch_actor_and_object(cast_data),
|
||||||
{:ok, object} <- get_obj_helper(object_id),
|
{_, {:ok, cast_data}} <- {:ensure_context_presence, ensure_context_presence(cast_data)},
|
||||||
{:ok, activity, _object} <- ActivityPub.like(actor, object, id, false) do
|
{_, {:ok, cast_data}} <-
|
||||||
|
{:ensure_recipients_presence, ensure_recipients_presence(cast_data)},
|
||||||
|
{_, {:ok, activity, _meta}} <-
|
||||||
|
{:common_pipeline, Pipeline.common_pipeline(cast_data, local: false)} do
|
||||||
{:ok, activity}
|
{:ok, activity}
|
||||||
else
|
else
|
||||||
_e -> :error
|
e -> {:error, e}
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -1243,4 +1249,45 @@ def maybe_fix_user_url(%{"url" => url} = data) when is_map(url) do
|
||||||
def maybe_fix_user_url(data), do: data
|
def maybe_fix_user_url(data), do: data
|
||||||
|
|
||||||
def maybe_fix_user_object(data), do: maybe_fix_user_url(data)
|
def maybe_fix_user_object(data), do: maybe_fix_user_url(data)
|
||||||
|
|
||||||
|
defp ensure_context_presence(%{"context" => context} = data) when is_binary(context),
|
||||||
|
do: {:ok, data}
|
||||||
|
|
||||||
|
defp ensure_context_presence(%{"object" => object} = data) when is_binary(object) do
|
||||||
|
with %{data: %{"context" => context}} when is_binary(context) <- Object.normalize(object) do
|
||||||
|
{:ok, Map.put(data, "context", context)}
|
||||||
|
else
|
||||||
|
_ ->
|
||||||
|
{:error, :no_context}
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
defp ensure_context_presence(_) do
|
||||||
|
{:error, :no_context}
|
||||||
|
end
|
||||||
|
|
||||||
|
defp ensure_recipients_presence(%{"to" => [_ | _], "cc" => [_ | _]} = data),
|
||||||
|
do: {:ok, data}
|
||||||
|
|
||||||
|
defp ensure_recipients_presence(%{"object" => object} = data) do
|
||||||
|
case Object.normalize(object) do
|
||||||
|
%{data: %{"actor" => actor}} ->
|
||||||
|
data =
|
||||||
|
data
|
||||||
|
|> Map.put("to", [actor])
|
||||||
|
|> Map.put("cc", data["cc"] || [])
|
||||||
|
|
||||||
|
{:ok, data}
|
||||||
|
|
||||||
|
nil ->
|
||||||
|
{:error, :no_object}
|
||||||
|
|
||||||
|
_ ->
|
||||||
|
{:error, :no_actor}
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
defp ensure_recipients_presence(_) do
|
||||||
|
{:error, :no_object}
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -795,102 +795,6 @@ def get_reports(params, page, page_size) do
|
||||||
ActivityPub.fetch_activities([], params, :offset)
|
ActivityPub.fetch_activities([], params, :offset)
|
||||||
end
|
end
|
||||||
|
|
||||||
def parse_report_group(activity) do
|
|
||||||
reports = get_reports_by_status_id(activity["id"])
|
|
||||||
max_date = Enum.max_by(reports, &NaiveDateTime.from_iso8601!(&1.data["published"]))
|
|
||||||
actors = Enum.map(reports, & &1.user_actor)
|
|
||||||
[%{data: %{"object" => [account_id | _]}} | _] = reports
|
|
||||||
|
|
||||||
account =
|
|
||||||
AccountView.render("show.json", %{
|
|
||||||
user: User.get_by_ap_id(account_id)
|
|
||||||
})
|
|
||||||
|
|
||||||
status = get_status_data(activity)
|
|
||||||
|
|
||||||
%{
|
|
||||||
date: max_date.data["published"],
|
|
||||||
account: account,
|
|
||||||
status: status,
|
|
||||||
actors: Enum.uniq(actors),
|
|
||||||
reports: reports
|
|
||||||
}
|
|
||||||
end
|
|
||||||
|
|
||||||
defp get_status_data(status) do
|
|
||||||
case status["deleted"] do
|
|
||||||
true ->
|
|
||||||
%{
|
|
||||||
"id" => status["id"],
|
|
||||||
"deleted" => true
|
|
||||||
}
|
|
||||||
|
|
||||||
_ ->
|
|
||||||
Activity.get_by_ap_id(status["id"])
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
def get_reports_by_status_id(ap_id) do
|
|
||||||
from(a in Activity,
|
|
||||||
where: fragment("(?)->>'type' = 'Flag'", a.data),
|
|
||||||
where: fragment("(?)->'object' @> ?", a.data, ^[%{id: ap_id}]),
|
|
||||||
or_where: fragment("(?)->'object' @> ?", a.data, ^[ap_id])
|
|
||||||
)
|
|
||||||
|> Activity.with_preloaded_user_actor()
|
|
||||||
|> Repo.all()
|
|
||||||
end
|
|
||||||
|
|
||||||
@spec get_reports_grouped_by_status([String.t()]) :: %{
|
|
||||||
required(:groups) => [
|
|
||||||
%{
|
|
||||||
required(:date) => String.t(),
|
|
||||||
required(:account) => %{},
|
|
||||||
required(:status) => %{},
|
|
||||||
required(:actors) => [%User{}],
|
|
||||||
required(:reports) => [%Activity{}]
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
def get_reports_grouped_by_status(activity_ids) do
|
|
||||||
parsed_groups =
|
|
||||||
activity_ids
|
|
||||||
|> Enum.map(fn id ->
|
|
||||||
id
|
|
||||||
|> build_flag_object()
|
|
||||||
|> parse_report_group()
|
|
||||||
end)
|
|
||||||
|
|
||||||
%{
|
|
||||||
groups: parsed_groups
|
|
||||||
}
|
|
||||||
end
|
|
||||||
|
|
||||||
@spec get_reported_activities() :: [
|
|
||||||
%{
|
|
||||||
required(:activity) => String.t(),
|
|
||||||
required(:date) => String.t()
|
|
||||||
}
|
|
||||||
]
|
|
||||||
def get_reported_activities do
|
|
||||||
reported_activities_query =
|
|
||||||
from(a in Activity,
|
|
||||||
where: fragment("(?)->>'type' = 'Flag'", a.data),
|
|
||||||
select: %{
|
|
||||||
activity: fragment("jsonb_array_elements((? #- '{object,0}')->'object')", a.data)
|
|
||||||
},
|
|
||||||
group_by: fragment("activity")
|
|
||||||
)
|
|
||||||
|
|
||||||
from(a in subquery(reported_activities_query),
|
|
||||||
distinct: true,
|
|
||||||
select: %{
|
|
||||||
id: fragment("COALESCE(?->>'id'::text, ? #>> '{}')", a.activity, a.activity)
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|> Repo.all()
|
|
||||||
|> Enum.map(& &1.id)
|
|
||||||
end
|
|
||||||
|
|
||||||
def update_report_state(%Activity{} = activity, state)
|
def update_report_state(%Activity{} = activity, state)
|
||||||
when state in @strip_status_report_states do
|
when state in @strip_status_report_states do
|
||||||
{:ok, stripped_activity} = strip_report_status_data(activity)
|
{:ok, stripped_activity} = strip_report_status_data(activity)
|
||||||
|
|
|
@ -715,14 +715,6 @@ def list_reports(conn, params) do
|
||||||
|> render("index.json", %{reports: reports})
|
|> render("index.json", %{reports: reports})
|
||||||
end
|
end
|
||||||
|
|
||||||
def list_grouped_reports(conn, _params) do
|
|
||||||
statuses = Utils.get_reported_activities()
|
|
||||||
|
|
||||||
conn
|
|
||||||
|> put_view(ReportView)
|
|
||||||
|> render("index_grouped.json", Utils.get_reports_grouped_by_status(statuses))
|
|
||||||
end
|
|
||||||
|
|
||||||
def report_show(conn, %{"id" => id}) do
|
def report_show(conn, %{"id" => id}) do
|
||||||
with %Activity{} = report <- Activity.get_by_id(id) do
|
with %Activity{} = report <- Activity.get_by_id(id) do
|
||||||
conn
|
conn
|
||||||
|
|
|
@ -4,7 +4,7 @@
|
||||||
|
|
||||||
defmodule Pleroma.Web.AdminAPI.ReportView do
|
defmodule Pleroma.Web.AdminAPI.ReportView do
|
||||||
use Pleroma.Web, :view
|
use Pleroma.Web, :view
|
||||||
alias Pleroma.Activity
|
|
||||||
alias Pleroma.HTML
|
alias Pleroma.HTML
|
||||||
alias Pleroma.User
|
alias Pleroma.User
|
||||||
alias Pleroma.Web.AdminAPI.Report
|
alias Pleroma.Web.AdminAPI.Report
|
||||||
|
@ -44,32 +44,6 @@ def render("show.json", %{report: report, user: user, account: account, statuses
|
||||||
}
|
}
|
||||||
end
|
end
|
||||||
|
|
||||||
def render("index_grouped.json", %{groups: groups}) do
|
|
||||||
reports =
|
|
||||||
Enum.map(groups, fn group ->
|
|
||||||
status =
|
|
||||||
case group.status do
|
|
||||||
%Activity{} = activity -> StatusView.render("show.json", %{activity: activity})
|
|
||||||
_ -> group.status
|
|
||||||
end
|
|
||||||
|
|
||||||
%{
|
|
||||||
date: group[:date],
|
|
||||||
account: group[:account],
|
|
||||||
status: Map.put_new(status, "deleted", false),
|
|
||||||
actors: Enum.map(group[:actors], &merge_account_views/1),
|
|
||||||
reports:
|
|
||||||
group[:reports]
|
|
||||||
|> Enum.map(&Report.extract_report_info(&1))
|
|
||||||
|> Enum.map(&render(__MODULE__, "show.json", &1))
|
|
||||||
}
|
|
||||||
end)
|
|
||||||
|
|
||||||
%{
|
|
||||||
reports: reports
|
|
||||||
}
|
|
||||||
end
|
|
||||||
|
|
||||||
def render("index_notes.json", %{notes: notes}) when is_list(notes) do
|
def render("index_notes.json", %{notes: notes}) when is_list(notes) do
|
||||||
Enum.map(notes, &render(__MODULE__, "show_note.json", &1))
|
Enum.map(notes, &render(__MODULE__, "show_note.json", &1))
|
||||||
end
|
end
|
||||||
|
|
|
@ -12,6 +12,8 @@ defmodule Pleroma.Web.CommonAPI do
|
||||||
alias Pleroma.User
|
alias Pleroma.User
|
||||||
alias Pleroma.UserRelationship
|
alias Pleroma.UserRelationship
|
||||||
alias Pleroma.Web.ActivityPub.ActivityPub
|
alias Pleroma.Web.ActivityPub.ActivityPub
|
||||||
|
alias Pleroma.Web.ActivityPub.Builder
|
||||||
|
alias Pleroma.Web.ActivityPub.Pipeline
|
||||||
alias Pleroma.Web.ActivityPub.Utils
|
alias Pleroma.Web.ActivityPub.Utils
|
||||||
alias Pleroma.Web.ActivityPub.Visibility
|
alias Pleroma.Web.ActivityPub.Visibility
|
||||||
|
|
||||||
|
@ -19,6 +21,7 @@ defmodule Pleroma.Web.CommonAPI do
|
||||||
import Pleroma.Web.CommonAPI.Utils
|
import Pleroma.Web.CommonAPI.Utils
|
||||||
|
|
||||||
require Pleroma.Constants
|
require Pleroma.Constants
|
||||||
|
require Logger
|
||||||
|
|
||||||
def follow(follower, followed) do
|
def follow(follower, followed) do
|
||||||
timeout = Pleroma.Config.get([:activitypub, :follow_handshake_timeout])
|
timeout = Pleroma.Config.get([:activitypub, :follow_handshake_timeout])
|
||||||
|
@ -109,18 +112,51 @@ def unrepeat(id_or_ap_id, user) do
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def favorite(id_or_ap_id, user) do
|
@spec favorite(User.t(), binary()) :: {:ok, Activity.t() | :already_liked} | {:error, any()}
|
||||||
with {_, %Activity{} = activity} <- {:find_activity, get_by_id_or_ap_id(id_or_ap_id)},
|
def favorite(%User{} = user, id) do
|
||||||
object <- Object.normalize(activity),
|
case favorite_helper(user, id) do
|
||||||
like_activity <- Utils.get_existing_like(user.ap_id, object) do
|
{:ok, _} = res ->
|
||||||
if like_activity do
|
res
|
||||||
{:ok, like_activity, object}
|
|
||||||
else
|
{:error, :not_found} = res ->
|
||||||
ActivityPub.like(user, object)
|
res
|
||||||
|
|
||||||
|
{:error, e} ->
|
||||||
|
Logger.error("Could not favorite #{id}. Error: #{inspect(e, pretty: true)}")
|
||||||
|
{:error, dgettext("errors", "Could not favorite")}
|
||||||
end
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def favorite_helper(user, id) do
|
||||||
|
with {_, %Activity{object: object}} <- {:find_object, Activity.get_by_id_with_object(id)},
|
||||||
|
{_, {:ok, like_object, meta}} <- {:build_object, Builder.like(user, object)},
|
||||||
|
{_, {:ok, %Activity{} = activity, _meta}} <-
|
||||||
|
{:common_pipeline,
|
||||||
|
Pipeline.common_pipeline(like_object, Keyword.put(meta, :local, true))} do
|
||||||
|
{:ok, activity}
|
||||||
else
|
else
|
||||||
{:find_activity, _} -> {:error, :not_found}
|
{:find_object, _} ->
|
||||||
_ -> {:error, dgettext("errors", "Could not favorite")}
|
{:error, :not_found}
|
||||||
|
|
||||||
|
{:common_pipeline,
|
||||||
|
{
|
||||||
|
:error,
|
||||||
|
{
|
||||||
|
:validate_object,
|
||||||
|
{
|
||||||
|
:error,
|
||||||
|
changeset
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}} = e ->
|
||||||
|
if {:object, {"already liked by this actor", []}} in changeset.errors do
|
||||||
|
{:ok, :already_liked}
|
||||||
|
else
|
||||||
|
{:error, e}
|
||||||
|
end
|
||||||
|
|
||||||
|
e ->
|
||||||
|
{:error, e}
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
|
|
@ -207,9 +207,9 @@ def unreblog(%{assigns: %{user: user}} = conn, %{"id" => ap_id_or_id}) do
|
||||||
end
|
end
|
||||||
|
|
||||||
@doc "POST /api/v1/statuses/:id/favourite"
|
@doc "POST /api/v1/statuses/:id/favourite"
|
||||||
def favourite(%{assigns: %{user: user}} = conn, %{"id" => ap_id_or_id}) do
|
def favourite(%{assigns: %{user: user}} = conn, %{"id" => activity_id}) do
|
||||||
with {:ok, _fav, %{data: %{"id" => id}}} <- CommonAPI.favorite(ap_id_or_id, user),
|
with {:ok, _fav} <- CommonAPI.favorite(user, activity_id),
|
||||||
%Activity{} = activity <- Activity.get_create_by_object_ap_id(id) do
|
%Activity{} = activity <- Activity.get_by_id(activity_id) do
|
||||||
try_render(conn, "show.json", activity: activity, for: user, as: :activity)
|
try_render(conn, "show.json", activity: activity, for: user, as: :activity)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -13,16 +13,18 @@ defmodule Pleroma.Web.MastodonAPI.AccountView do
|
||||||
alias Pleroma.Web.MediaProxy
|
alias Pleroma.Web.MediaProxy
|
||||||
|
|
||||||
def render("index.json", %{users: users} = opts) do
|
def render("index.json", %{users: users} = opts) do
|
||||||
|
reading_user = opts[:for]
|
||||||
|
|
||||||
relationships_opt =
|
relationships_opt =
|
||||||
cond do
|
cond do
|
||||||
Map.has_key?(opts, :relationships) ->
|
Map.has_key?(opts, :relationships) ->
|
||||||
opts[:relationships]
|
opts[:relationships]
|
||||||
|
|
||||||
is_nil(opts[:for]) ->
|
is_nil(reading_user) ->
|
||||||
UserRelationship.view_relationships_option(nil, [])
|
UserRelationship.view_relationships_option(nil, [])
|
||||||
|
|
||||||
true ->
|
true ->
|
||||||
UserRelationship.view_relationships_option(opts[:for], users)
|
UserRelationship.view_relationships_option(reading_user, users)
|
||||||
end
|
end
|
||||||
|
|
||||||
opts = Map.put(opts, :relationships, relationships_opt)
|
opts = Map.put(opts, :relationships, relationships_opt)
|
||||||
|
@ -43,7 +45,7 @@ def render("mention.json", %{user: user}) do
|
||||||
id: to_string(user.id),
|
id: to_string(user.id),
|
||||||
acct: user.nickname,
|
acct: user.nickname,
|
||||||
username: username_from_nickname(user.nickname),
|
username: username_from_nickname(user.nickname),
|
||||||
url: User.profile_url(user)
|
url: user.uri || user.ap_id
|
||||||
}
|
}
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -143,7 +145,7 @@ def render("relationships.json", %{user: user, targets: targets} = opts) do
|
||||||
Map.has_key?(opts, :relationships) ->
|
Map.has_key?(opts, :relationships) ->
|
||||||
opts[:relationships]
|
opts[:relationships]
|
||||||
|
|
||||||
is_nil(opts[:for]) ->
|
is_nil(user) ->
|
||||||
UserRelationship.view_relationships_option(nil, [])
|
UserRelationship.view_relationships_option(nil, [])
|
||||||
|
|
||||||
true ->
|
true ->
|
||||||
|
@ -207,7 +209,7 @@ defp do_render("show.json", %{user: user} = opts) do
|
||||||
following_count: following_count,
|
following_count: following_count,
|
||||||
statuses_count: user.note_count,
|
statuses_count: user.note_count,
|
||||||
note: user.bio || "",
|
note: user.bio || "",
|
||||||
url: User.profile_url(user),
|
url: user.uri || user.ap_id,
|
||||||
avatar: image,
|
avatar: image,
|
||||||
avatar_static: image,
|
avatar_static: image,
|
||||||
header: header,
|
header: header,
|
||||||
|
|
|
@ -36,7 +36,7 @@ def render("index.json", %{notifications: notifications, for: reading_user} = op
|
||||||
Map.has_key?(opts, :relationships) ->
|
Map.has_key?(opts, :relationships) ->
|
||||||
opts[:relationships]
|
opts[:relationships]
|
||||||
|
|
||||||
is_nil(opts[:for]) ->
|
is_nil(reading_user) ->
|
||||||
UserRelationship.view_relationships_option(nil, [])
|
UserRelationship.view_relationships_option(nil, [])
|
||||||
|
|
||||||
true ->
|
true ->
|
||||||
|
|
|
@ -72,6 +72,8 @@ defp reblogged?(activity, user) do
|
||||||
end
|
end
|
||||||
|
|
||||||
def render("index.json", opts) do
|
def render("index.json", opts) do
|
||||||
|
reading_user = opts[:for]
|
||||||
|
|
||||||
# To do: check AdminAPIControllerTest on the reasons behind nil activities in the list
|
# To do: check AdminAPIControllerTest on the reasons behind nil activities in the list
|
||||||
activities = Enum.filter(opts.activities, & &1)
|
activities = Enum.filter(opts.activities, & &1)
|
||||||
replied_to_activities = get_replied_to_activities(activities)
|
replied_to_activities = get_replied_to_activities(activities)
|
||||||
|
@ -82,8 +84,8 @@ def render("index.json", opts) do
|
||||||
|> Enum.map(&Object.normalize(&1).data["id"])
|
|> Enum.map(&Object.normalize(&1).data["id"])
|
||||||
|> Activity.create_by_object_ap_id()
|
|> Activity.create_by_object_ap_id()
|
||||||
|> Activity.with_preloaded_object(:left)
|
|> Activity.with_preloaded_object(:left)
|
||||||
|> Activity.with_preloaded_bookmark(opts[:for])
|
|> Activity.with_preloaded_bookmark(reading_user)
|
||||||
|> Activity.with_set_thread_muted_field(opts[:for])
|
|> Activity.with_set_thread_muted_field(reading_user)
|
||||||
|> Repo.all()
|
|> Repo.all()
|
||||||
|
|
||||||
relationships_opt =
|
relationships_opt =
|
||||||
|
@ -91,13 +93,13 @@ def render("index.json", opts) do
|
||||||
Map.has_key?(opts, :relationships) ->
|
Map.has_key?(opts, :relationships) ->
|
||||||
opts[:relationships]
|
opts[:relationships]
|
||||||
|
|
||||||
is_nil(opts[:for]) ->
|
is_nil(reading_user) ->
|
||||||
UserRelationship.view_relationships_option(nil, [])
|
UserRelationship.view_relationships_option(nil, [])
|
||||||
|
|
||||||
true ->
|
true ->
|
||||||
actors = Enum.map(activities ++ parent_activities, &get_user(&1.data["actor"]))
|
actors = Enum.map(activities ++ parent_activities, &get_user(&1.data["actor"]))
|
||||||
|
|
||||||
UserRelationship.view_relationships_option(opts[:for], actors)
|
UserRelationship.view_relationships_option(reading_user, actors)
|
||||||
end
|
end
|
||||||
|
|
||||||
opts =
|
opts =
|
||||||
|
|
|
@ -6,7 +6,12 @@ defmodule Pleroma.Web.Metadata do
|
||||||
alias Phoenix.HTML
|
alias Phoenix.HTML
|
||||||
|
|
||||||
def build_tags(params) do
|
def build_tags(params) do
|
||||||
Enum.reduce(Pleroma.Config.get([__MODULE__, :providers], []), "", fn parser, acc ->
|
providers = [
|
||||||
|
Pleroma.Web.Metadata.Providers.RestrictIndexing
|
||||||
|
| Pleroma.Config.get([__MODULE__, :providers], [])
|
||||||
|
]
|
||||||
|
|
||||||
|
Enum.reduce(providers, "", fn parser, acc ->
|
||||||
rendered_html =
|
rendered_html =
|
||||||
params
|
params
|
||||||
|> parser.build_tags()
|
|> parser.build_tags()
|
||||||
|
|
|
@ -68,7 +68,7 @@ def build_tags(%{user: user}) do
|
||||||
property: "og:title",
|
property: "og:title",
|
||||||
content: Utils.user_name_string(user)
|
content: Utils.user_name_string(user)
|
||||||
], []},
|
], []},
|
||||||
{:meta, [property: "og:url", content: User.profile_url(user)], []},
|
{:meta, [property: "og:url", content: user.uri || user.ap_id], []},
|
||||||
{:meta, [property: "og:description", content: truncated_bio], []},
|
{:meta, [property: "og:description", content: truncated_bio], []},
|
||||||
{:meta, [property: "og:type", content: "website"], []},
|
{:meta, [property: "og:type", content: "website"], []},
|
||||||
{:meta, [property: "og:image", content: Utils.attachment_url(User.avatar_url(user))], []},
|
{:meta, [property: "og:image", content: Utils.attachment_url(User.avatar_url(user))], []},
|
||||||
|
|
25
lib/pleroma/web/metadata/restrict_indexing.ex
Normal file
25
lib/pleroma/web/metadata/restrict_indexing.ex
Normal file
|
@ -0,0 +1,25 @@
|
||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
defmodule Pleroma.Web.Metadata.Providers.RestrictIndexing do
|
||||||
|
@behaviour Pleroma.Web.Metadata.Providers.Provider
|
||||||
|
|
||||||
|
@moduledoc """
|
||||||
|
Restricts indexing of remote users.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@impl true
|
||||||
|
def build_tags(%{user: %{local: false}}) do
|
||||||
|
[
|
||||||
|
{:meta,
|
||||||
|
[
|
||||||
|
name: "robots",
|
||||||
|
content: "noindex, noarchive"
|
||||||
|
], []}
|
||||||
|
]
|
||||||
|
end
|
||||||
|
|
||||||
|
@impl true
|
||||||
|
def build_tags(%{user: %{local: true}}), do: []
|
||||||
|
end
|
|
@ -79,7 +79,8 @@ def raw_nodeinfo do
|
||||||
end,
|
end,
|
||||||
if Config.get([:instance, :safe_dm_mentions]) do
|
if Config.get([:instance, :safe_dm_mentions]) do
|
||||||
"safe_dm_mentions"
|
"safe_dm_mentions"
|
||||||
end
|
end,
|
||||||
|
"pleroma_emoji_reactions"
|
||||||
]
|
]
|
||||||
|> Enum.filter(& &1)
|
|> Enum.filter(& &1)
|
||||||
|
|
||||||
|
|
|
@ -3,11 +3,9 @@
|
||||||
# SPDX-License-Identifier: AGPL-3.0-only
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
defmodule Pleroma.Web.RelMe do
|
defmodule Pleroma.Web.RelMe do
|
||||||
@hackney_options [
|
@options [
|
||||||
pool: :media,
|
pool: :media,
|
||||||
recv_timeout: 2_000,
|
max_body: 2_000_000
|
||||||
max_body: 2_000_000,
|
|
||||||
with_body: true
|
|
||||||
]
|
]
|
||||||
|
|
||||||
if Pleroma.Config.get(:env) == :test do
|
if Pleroma.Config.get(:env) == :test do
|
||||||
|
@ -25,8 +23,18 @@ def parse(url) when is_binary(url) do
|
||||||
def parse(_), do: {:error, "No URL provided"}
|
def parse(_), do: {:error, "No URL provided"}
|
||||||
|
|
||||||
defp parse_url(url) do
|
defp parse_url(url) do
|
||||||
|
opts =
|
||||||
|
if Application.get_env(:tesla, :adapter) == Tesla.Adapter.Hackney do
|
||||||
|
Keyword.merge(@options,
|
||||||
|
recv_timeout: 2_000,
|
||||||
|
with_body: true
|
||||||
|
)
|
||||||
|
else
|
||||||
|
@options
|
||||||
|
end
|
||||||
|
|
||||||
with {:ok, %Tesla.Env{body: html, status: status}} when status in 200..299 <-
|
with {:ok, %Tesla.Env{body: html, status: status}} when status in 200..299 <-
|
||||||
Pleroma.HTTP.get(url, [], adapter: @hackney_options),
|
Pleroma.HTTP.get(url, [], adapter: opts),
|
||||||
{:ok, html_tree} <- Floki.parse_document(html),
|
{:ok, html_tree} <- Floki.parse_document(html),
|
||||||
data <-
|
data <-
|
||||||
Floki.attribute(html_tree, "link[rel~=me]", "href") ++
|
Floki.attribute(html_tree, "link[rel~=me]", "href") ++
|
||||||
|
|
|
@ -3,11 +3,9 @@
|
||||||
# SPDX-License-Identifier: AGPL-3.0-only
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
defmodule Pleroma.Web.RichMedia.Parser do
|
defmodule Pleroma.Web.RichMedia.Parser do
|
||||||
@hackney_options [
|
@options [
|
||||||
pool: :media,
|
pool: :media,
|
||||||
recv_timeout: 2_000,
|
max_body: 2_000_000
|
||||||
max_body: 2_000_000,
|
|
||||||
with_body: true
|
|
||||||
]
|
]
|
||||||
|
|
||||||
defp parsers do
|
defp parsers do
|
||||||
|
@ -77,8 +75,18 @@ defp get_ttl_from_image(data, url) do
|
||||||
end
|
end
|
||||||
|
|
||||||
defp parse_url(url) do
|
defp parse_url(url) do
|
||||||
|
opts =
|
||||||
|
if Application.get_env(:tesla, :adapter) == Tesla.Adapter.Hackney do
|
||||||
|
Keyword.merge(@options,
|
||||||
|
recv_timeout: 2_000,
|
||||||
|
with_body: true
|
||||||
|
)
|
||||||
|
else
|
||||||
|
@options
|
||||||
|
end
|
||||||
|
|
||||||
try do
|
try do
|
||||||
{:ok, %Tesla.Env{body: html}} = Pleroma.HTTP.get(url, [], adapter: @hackney_options)
|
{:ok, %Tesla.Env{body: html}} = Pleroma.HTTP.get(url, [], adapter: opts)
|
||||||
|
|
||||||
html
|
html
|
||||||
|> parse_html()
|
|> parse_html()
|
||||||
|
|
|
@ -186,7 +186,6 @@ defmodule Pleroma.Web.Router do
|
||||||
patch("/users/resend_confirmation_email", AdminAPIController, :resend_confirmation_email)
|
patch("/users/resend_confirmation_email", AdminAPIController, :resend_confirmation_email)
|
||||||
|
|
||||||
get("/reports", AdminAPIController, :list_reports)
|
get("/reports", AdminAPIController, :list_reports)
|
||||||
get("/grouped_reports", AdminAPIController, :list_grouped_reports)
|
|
||||||
get("/reports/:id", AdminAPIController, :report_show)
|
get("/reports/:id", AdminAPIController, :report_show)
|
||||||
patch("/reports", AdminAPIController, :reports_update)
|
patch("/reports", AdminAPIController, :reports_update)
|
||||||
post("/reports/:id/notes", AdminAPIController, :report_notes_create)
|
post("/reports/:id/notes", AdminAPIController, :report_notes_create)
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
<div class="p-author h-card">
|
<div class="p-author h-card">
|
||||||
<a class="u-url" rel="author noopener" href="<%= User.profile_url(@user) %>">
|
<a class="u-url" rel="author noopener" href="<%= (@user.uri || @user.ap_id) %>">
|
||||||
<div class="avatar">
|
<div class="avatar">
|
||||||
<img src="<%= User.avatar_url(@user) |> MediaProxy.url %>" width="48" height="48" alt="">
|
<img src="<%= User.avatar_url(@user) |> MediaProxy.url %>" width="48" height="48" alt="">
|
||||||
</div>
|
</div>
|
||||||
|
|
|
@ -8,7 +8,7 @@
|
||||||
<button type="submit" class="collapse">Remote follow</button>
|
<button type="submit" class="collapse">Remote follow</button>
|
||||||
</form>
|
</form>
|
||||||
<%= raw Formatter.emojify(@user.name, emoji_for_user(@user)) %> |
|
<%= raw Formatter.emojify(@user.name, emoji_for_user(@user)) %> |
|
||||||
<%= link "@#{@user.nickname}@#{Endpoint.host()}", to: User.profile_url(@user) %>
|
<%= link "@#{@user.nickname}@#{Endpoint.host()}", to: (@user.uri || @user.ap_id) %>
|
||||||
</h3>
|
</h3>
|
||||||
<p><%= raw @user.bio %></p>
|
<p><%= raw @user.bio %></p>
|
||||||
</header>
|
</header>
|
||||||
|
|
|
@ -173,7 +173,8 @@ def find_lrdd_template(domain) do
|
||||||
get_template_from_xml(body)
|
get_template_from_xml(body)
|
||||||
else
|
else
|
||||||
_ ->
|
_ ->
|
||||||
with {:ok, %{body: body}} <- HTTP.get("https://#{domain}/.well-known/host-meta", []) do
|
with {:ok, %{body: body, status: status}} when status in 200..299 <-
|
||||||
|
HTTP.get("https://#{domain}/.well-known/host-meta", []) do
|
||||||
get_template_from_xml(body)
|
get_template_from_xml(body)
|
||||||
else
|
else
|
||||||
e -> {:error, "Can't find LRDD template: #{inspect(e)}"}
|
e -> {:error, "Can't find LRDD template: #{inspect(e)}"}
|
||||||
|
@ -205,7 +206,7 @@ def finger(account) do
|
||||||
with response <-
|
with response <-
|
||||||
HTTP.get(
|
HTTP.get(
|
||||||
address,
|
address,
|
||||||
Accept: "application/xrd+xml,application/jrd+json"
|
[{"accept", "application/xrd+xml,application/jrd+json"}]
|
||||||
),
|
),
|
||||||
{:ok, %{status: status, body: body}} when status in 200..299 <- response do
|
{:ok, %{status: status, body: body}} when status in 200..299 <- response do
|
||||||
doc = XML.parse_document(body)
|
doc = XML.parse_document(body)
|
||||||
|
|
10
mix.exs
10
mix.exs
|
@ -119,7 +119,15 @@ defp deps do
|
||||||
{:calendar, "~> 0.17.4"},
|
{:calendar, "~> 0.17.4"},
|
||||||
{:cachex, "~> 3.2"},
|
{:cachex, "~> 3.2"},
|
||||||
{:poison, "~> 3.0", override: true},
|
{:poison, "~> 3.0", override: true},
|
||||||
{:tesla, "~> 1.3", override: true},
|
# {:tesla, "~> 1.3", override: true},
|
||||||
|
{:tesla,
|
||||||
|
git: "https://git.pleroma.social/pleroma/elixir-libraries/tesla.git",
|
||||||
|
ref: "61b7503cef33f00834f78ddfafe0d5d9dec2270b",
|
||||||
|
override: true},
|
||||||
|
{:castore, "~> 0.1"},
|
||||||
|
{:cowlib, "~> 2.8", override: true},
|
||||||
|
{:gun,
|
||||||
|
github: "ninenines/gun", ref: "e1a69b36b180a574c0ac314ced9613fdd52312cc", override: true},
|
||||||
{:jason, "~> 1.0"},
|
{:jason, "~> 1.0"},
|
||||||
{:mogrify, "~> 0.6.1"},
|
{:mogrify, "~> 0.6.1"},
|
||||||
{:ex_aws, "~> 2.1"},
|
{:ex_aws, "~> 2.1"},
|
||||||
|
|
4
mix.lock
4
mix.lock
|
@ -10,6 +10,7 @@
|
||||||
"cachex": {:hex, :cachex, "3.2.0", "a596476c781b0646e6cb5cd9751af2e2974c3e0d5498a8cab71807618b74fe2f", [:mix], [{:eternal, "~> 1.2", [hex: :eternal, repo: "hexpm", optional: false]}, {:jumper, "~> 1.0", [hex: :jumper, repo: "hexpm", optional: false]}, {:sleeplocks, "~> 1.1", [hex: :sleeplocks, repo: "hexpm", optional: false]}, {:unsafe, "~> 1.0", [hex: :unsafe, repo: "hexpm", optional: false]}], "hexpm", "aef93694067a43697ae0531727e097754a9e992a1e7946296f5969d6dd9ac986"},
|
"cachex": {:hex, :cachex, "3.2.0", "a596476c781b0646e6cb5cd9751af2e2974c3e0d5498a8cab71807618b74fe2f", [:mix], [{:eternal, "~> 1.2", [hex: :eternal, repo: "hexpm", optional: false]}, {:jumper, "~> 1.0", [hex: :jumper, repo: "hexpm", optional: false]}, {:sleeplocks, "~> 1.1", [hex: :sleeplocks, repo: "hexpm", optional: false]}, {:unsafe, "~> 1.0", [hex: :unsafe, repo: "hexpm", optional: false]}], "hexpm", "aef93694067a43697ae0531727e097754a9e992a1e7946296f5969d6dd9ac986"},
|
||||||
"calendar": {:hex, :calendar, "0.17.6", "ec291cb2e4ba499c2e8c0ef5f4ace974e2f9d02ae9e807e711a9b0c7850b9aee", [:mix], [{:tzdata, "~> 0.5.20 or ~> 0.1.201603 or ~> 1.0", [hex: :tzdata, repo: "hexpm", optional: false]}], "hexpm", "738d0e17a93c2ccfe4ddc707bdc8e672e9074c8569498483feb1c4530fb91b2b"},
|
"calendar": {:hex, :calendar, "0.17.6", "ec291cb2e4ba499c2e8c0ef5f4ace974e2f9d02ae9e807e711a9b0c7850b9aee", [:mix], [{:tzdata, "~> 0.5.20 or ~> 0.1.201603 or ~> 1.0", [hex: :tzdata, repo: "hexpm", optional: false]}], "hexpm", "738d0e17a93c2ccfe4ddc707bdc8e672e9074c8569498483feb1c4530fb91b2b"},
|
||||||
"captcha": {:git, "https://git.pleroma.social/pleroma/elixir-libraries/elixir-captcha.git", "e0f16822d578866e186a0974d65ad58cddc1e2ab", [ref: "e0f16822d578866e186a0974d65ad58cddc1e2ab"]},
|
"captcha": {:git, "https://git.pleroma.social/pleroma/elixir-libraries/elixir-captcha.git", "e0f16822d578866e186a0974d65ad58cddc1e2ab", [ref: "e0f16822d578866e186a0974d65ad58cddc1e2ab"]},
|
||||||
|
"castore": {:hex, :castore, "0.1.5", "591c763a637af2cc468a72f006878584bc6c306f8d111ef8ba1d4c10e0684010", [:mix], [], "hexpm", "6db356b2bc6cc22561e051ff545c20ad064af57647e436650aa24d7d06cd941a"},
|
||||||
"certifi": {:hex, :certifi, "2.5.1", "867ce347f7c7d78563450a18a6a28a8090331e77fa02380b4a21962a65d36ee5", [:rebar3], [{:parse_trans, "~>3.3", [hex: :parse_trans, repo: "hexpm", optional: false]}], "hexpm", "805abd97539caf89ec6d4732c91e62ba9da0cda51ac462380bbd28ee697a8c42"},
|
"certifi": {:hex, :certifi, "2.5.1", "867ce347f7c7d78563450a18a6a28a8090331e77fa02380b4a21962a65d36ee5", [:rebar3], [{:parse_trans, "~>3.3", [hex: :parse_trans, repo: "hexpm", optional: false]}], "hexpm", "805abd97539caf89ec6d4732c91e62ba9da0cda51ac462380bbd28ee697a8c42"},
|
||||||
"combine": {:hex, :combine, "0.10.0", "eff8224eeb56498a2af13011d142c5e7997a80c8f5b97c499f84c841032e429f", [:mix], [], "hexpm", "1b1dbc1790073076580d0d1d64e42eae2366583e7aecd455d1215b0d16f2451b"},
|
"combine": {:hex, :combine, "0.10.0", "eff8224eeb56498a2af13011d142c5e7997a80c8f5b97c499f84c841032e429f", [:mix], [], "hexpm", "1b1dbc1790073076580d0d1d64e42eae2366583e7aecd455d1215b0d16f2451b"},
|
||||||
"comeonin": {:hex, :comeonin, "4.1.2", "3eb5620fd8e35508991664b4c2b04dd41e52f1620b36957be837c1d7784b7592", [:mix], [{:argon2_elixir, "~> 1.2", [hex: :argon2_elixir, repo: "hexpm", optional: true]}, {:bcrypt_elixir, "~> 0.12.1 or ~> 1.0", [hex: :bcrypt_elixir, repo: "hexpm", optional: true]}, {:pbkdf2_elixir, "~> 0.12", [hex: :pbkdf2_elixir, repo: "hexpm", optional: true]}], "hexpm", "d8700a0ca4dbb616c22c9b3f6dd539d88deaafec3efe66869d6370c9a559b3e9"},
|
"comeonin": {:hex, :comeonin, "4.1.2", "3eb5620fd8e35508991664b4c2b04dd41e52f1620b36957be837c1d7784b7592", [:mix], [{:argon2_elixir, "~> 1.2", [hex: :argon2_elixir, repo: "hexpm", optional: true]}, {:bcrypt_elixir, "~> 0.12.1 or ~> 1.0", [hex: :bcrypt_elixir, repo: "hexpm", optional: true]}, {:pbkdf2_elixir, "~> 0.12", [hex: :pbkdf2_elixir, repo: "hexpm", optional: true]}], "hexpm", "d8700a0ca4dbb616c22c9b3f6dd539d88deaafec3efe66869d6370c9a559b3e9"},
|
||||||
|
@ -46,6 +47,7 @@
|
||||||
"gen_stage": {:hex, :gen_stage, "0.14.3", "d0c66f1c87faa301c1a85a809a3ee9097a4264b2edf7644bf5c123237ef732bf", [:mix], [], "hexpm"},
|
"gen_stage": {:hex, :gen_stage, "0.14.3", "d0c66f1c87faa301c1a85a809a3ee9097a4264b2edf7644bf5c123237ef732bf", [:mix], [], "hexpm"},
|
||||||
"gen_state_machine": {:hex, :gen_state_machine, "2.0.5", "9ac15ec6e66acac994cc442dcc2c6f9796cf380ec4b08267223014be1c728a95", [:mix], [], "hexpm"},
|
"gen_state_machine": {:hex, :gen_state_machine, "2.0.5", "9ac15ec6e66acac994cc442dcc2c6f9796cf380ec4b08267223014be1c728a95", [:mix], [], "hexpm"},
|
||||||
"gettext": {:hex, :gettext, "0.17.4", "f13088e1ec10ce01665cf25f5ff779e7df3f2dc71b37084976cf89d1aa124d5c", [:mix], [], "hexpm", "3c75b5ea8288e2ee7ea503ff9e30dfe4d07ad3c054576a6e60040e79a801e14d"},
|
"gettext": {:hex, :gettext, "0.17.4", "f13088e1ec10ce01665cf25f5ff779e7df3f2dc71b37084976cf89d1aa124d5c", [:mix], [], "hexpm", "3c75b5ea8288e2ee7ea503ff9e30dfe4d07ad3c054576a6e60040e79a801e14d"},
|
||||||
|
"gun": {:git, "https://github.com/ninenines/gun.git", "e1a69b36b180a574c0ac314ced9613fdd52312cc", [ref: "e1a69b36b180a574c0ac314ced9613fdd52312cc"]},
|
||||||
"hackney": {:hex, :hackney, "1.15.2", "07e33c794f8f8964ee86cebec1a8ed88db5070e52e904b8f12209773c1036085", [:rebar3], [{:certifi, "2.5.1", [hex: :certifi, repo: "hexpm", optional: false]}, {:idna, "6.0.0", [hex: :idna, repo: "hexpm", optional: false]}, {:metrics, "1.0.1", [hex: :metrics, repo: "hexpm", optional: false]}, {:mimerl, "~>1.1", [hex: :mimerl, repo: "hexpm", optional: false]}, {:ssl_verify_fun, "1.1.5", [hex: :ssl_verify_fun, repo: "hexpm", optional: false]}], "hexpm", "e0100f8ef7d1124222c11ad362c857d3df7cb5f4204054f9f0f4a728666591fc"},
|
"hackney": {:hex, :hackney, "1.15.2", "07e33c794f8f8964ee86cebec1a8ed88db5070e52e904b8f12209773c1036085", [:rebar3], [{:certifi, "2.5.1", [hex: :certifi, repo: "hexpm", optional: false]}, {:idna, "6.0.0", [hex: :idna, repo: "hexpm", optional: false]}, {:metrics, "1.0.1", [hex: :metrics, repo: "hexpm", optional: false]}, {:mimerl, "~>1.1", [hex: :mimerl, repo: "hexpm", optional: false]}, {:ssl_verify_fun, "1.1.5", [hex: :ssl_verify_fun, repo: "hexpm", optional: false]}], "hexpm", "e0100f8ef7d1124222c11ad362c857d3df7cb5f4204054f9f0f4a728666591fc"},
|
||||||
"html_entities": {:hex, :html_entities, "0.5.1", "1c9715058b42c35a2ab65edc5b36d0ea66dd083767bef6e3edb57870ef556549", [:mix], [], "hexpm", "30efab070904eb897ff05cd52fa61c1025d7f8ef3a9ca250bc4e6513d16c32de"},
|
"html_entities": {:hex, :html_entities, "0.5.1", "1c9715058b42c35a2ab65edc5b36d0ea66dd083767bef6e3edb57870ef556549", [:mix], [], "hexpm", "30efab070904eb897ff05cd52fa61c1025d7f8ef3a9ca250bc4e6513d16c32de"},
|
||||||
"html_sanitize_ex": {:hex, :html_sanitize_ex, "1.3.0", "f005ad692b717691203f940c686208aa3d8ffd9dd4bb3699240096a51fa9564e", [:mix], [{:mochiweb, "~> 2.15", [hex: :mochiweb, repo: "hexpm", optional: false]}], "hexpm"},
|
"html_sanitize_ex": {:hex, :html_sanitize_ex, "1.3.0", "f005ad692b717691203f940c686208aa3d8ffd9dd4bb3699240096a51fa9564e", [:mix], [{:mochiweb, "~> 2.15", [hex: :mochiweb, repo: "hexpm", optional: false]}], "hexpm"},
|
||||||
|
@ -101,7 +103,7 @@
|
||||||
"swoosh": {:hex, :swoosh, "0.23.5", "bfd9404bbf5069b1be2ffd317923ce57e58b332e25dbca2a35dedd7820dfee5a", [:mix], [{:cowboy, "~> 1.0.1 or ~> 1.1 or ~> 2.4", [hex: :cowboy, repo: "hexpm", optional: true]}, {:gen_smtp, "~> 0.13", [hex: :gen_smtp, repo: "hexpm", optional: true]}, {:hackney, "~> 1.9", [hex: :hackney, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}, {:mail, "~> 0.2", [hex: :mail, repo: "hexpm", optional: true]}, {:mime, "~> 1.1", [hex: :mime, repo: "hexpm", optional: false]}, {:plug_cowboy, ">= 1.0.0", [hex: :plug_cowboy, repo: "hexpm", optional: true]}], "hexpm", "e3928e1d2889a308aaf3e42755809ac21cffd77cb58eef01cbfdab4ce2fd1e21"},
|
"swoosh": {:hex, :swoosh, "0.23.5", "bfd9404bbf5069b1be2ffd317923ce57e58b332e25dbca2a35dedd7820dfee5a", [:mix], [{:cowboy, "~> 1.0.1 or ~> 1.1 or ~> 2.4", [hex: :cowboy, repo: "hexpm", optional: true]}, {:gen_smtp, "~> 0.13", [hex: :gen_smtp, repo: "hexpm", optional: true]}, {:hackney, "~> 1.9", [hex: :hackney, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}, {:mail, "~> 0.2", [hex: :mail, repo: "hexpm", optional: true]}, {:mime, "~> 1.1", [hex: :mime, repo: "hexpm", optional: false]}, {:plug_cowboy, ">= 1.0.0", [hex: :plug_cowboy, repo: "hexpm", optional: true]}], "hexpm", "e3928e1d2889a308aaf3e42755809ac21cffd77cb58eef01cbfdab4ce2fd1e21"},
|
||||||
"syslog": {:hex, :syslog, "1.0.6", "995970c9aa7feb380ac493302138e308d6e04fd57da95b439a6df5bb3bf75076", [:rebar3], [], "hexpm", "769ddfabd0d2a16f3f9c17eb7509951e0ca4f68363fb26f2ee51a8ec4a49881a"},
|
"syslog": {:hex, :syslog, "1.0.6", "995970c9aa7feb380ac493302138e308d6e04fd57da95b439a6df5bb3bf75076", [:rebar3], [], "hexpm", "769ddfabd0d2a16f3f9c17eb7509951e0ca4f68363fb26f2ee51a8ec4a49881a"},
|
||||||
"telemetry": {:hex, :telemetry, "0.4.1", "ae2718484892448a24470e6aa341bc847c3277bfb8d4e9289f7474d752c09c7f", [:rebar3], [], "hexpm", "4738382e36a0a9a2b6e25d67c960e40e1a2c95560b9f936d8e29de8cd858480f"},
|
"telemetry": {:hex, :telemetry, "0.4.1", "ae2718484892448a24470e6aa341bc847c3277bfb8d4e9289f7474d752c09c7f", [:rebar3], [], "hexpm", "4738382e36a0a9a2b6e25d67c960e40e1a2c95560b9f936d8e29de8cd858480f"},
|
||||||
"tesla": {:hex, :tesla, "1.3.2", "deb92c5c9ce35e747a395ba413ca78593a4f75bf0e1545630ee2e3d34264021e", [:mix], [{:castore, "~> 0.1", [hex: :castore, repo: "hexpm", optional: true]}, {:exjsx, ">= 3.0.0", [hex: :exjsx, repo: "hexpm", optional: true]}, {:fuse, "~> 2.4", [hex: :fuse, repo: "hexpm", optional: true]}, {:gun, "~> 1.3", [hex: :gun, repo: "hexpm", optional: true]}, {:hackney, "~> 1.6", [hex: :hackney, repo: "hexpm", optional: true]}, {:ibrowse, "~> 4.4.0", [hex: :ibrowse, repo: "hexpm", optional: true]}, {:jason, ">= 1.0.0", [hex: :jason, repo: "hexpm", optional: true]}, {:mime, "~> 1.0", [hex: :mime, repo: "hexpm", optional: false]}, {:mint, "~> 1.0", [hex: :mint, repo: "hexpm", optional: true]}, {:poison, ">= 1.0.0", [hex: :poison, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.3", [hex: :telemetry, repo: "hexpm", optional: true]}], "hexpm", "7567704c4790e21bd9a961b56d0b6a988ff68cc4dacfe6b2106e258da1d5cdda"},
|
"tesla": {:git, "https://git.pleroma.social/pleroma/elixir-libraries/tesla.git", "61b7503cef33f00834f78ddfafe0d5d9dec2270b", [ref: "61b7503cef33f00834f78ddfafe0d5d9dec2270b"]},
|
||||||
"timex": {:hex, :timex, "3.6.1", "efdf56d0e67a6b956cc57774353b0329c8ab7726766a11547e529357ffdc1d56", [:mix], [{:combine, "~> 0.10", [hex: :combine, repo: "hexpm", optional: false]}, {:gettext, "~> 0.10", [hex: :gettext, repo: "hexpm", optional: false]}, {:tzdata, "~> 0.1.8 or ~> 0.5 or ~> 1.0.0", [hex: :tzdata, repo: "hexpm", optional: false]}], "hexpm", "f354efb2400dd7a80fd9eb6c8419068c4f632da4ac47f3d8822d6e33f08bc852"},
|
"timex": {:hex, :timex, "3.6.1", "efdf56d0e67a6b956cc57774353b0329c8ab7726766a11547e529357ffdc1d56", [:mix], [{:combine, "~> 0.10", [hex: :combine, repo: "hexpm", optional: false]}, {:gettext, "~> 0.10", [hex: :gettext, repo: "hexpm", optional: false]}, {:tzdata, "~> 0.1.8 or ~> 0.5 or ~> 1.0.0", [hex: :tzdata, repo: "hexpm", optional: false]}], "hexpm", "f354efb2400dd7a80fd9eb6c8419068c4f632da4ac47f3d8822d6e33f08bc852"},
|
||||||
"trailing_format_plug": {:hex, :trailing_format_plug, "0.0.7", "64b877f912cf7273bed03379936df39894149e35137ac9509117e59866e10e45", [:mix], [{:plug, "> 0.12.0", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm", "bd4fde4c15f3e993a999e019d64347489b91b7a9096af68b2bdadd192afa693f"},
|
"trailing_format_plug": {:hex, :trailing_format_plug, "0.0.7", "64b877f912cf7273bed03379936df39894149e35137ac9509117e59866e10e45", [:mix], [{:plug, "> 0.12.0", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm", "bd4fde4c15f3e993a999e019d64347489b91b7a9096af68b2bdadd192afa693f"},
|
||||||
"tzdata": {:hex, :tzdata, "0.5.22", "f2ba9105117ee0360eae2eca389783ef7db36d533899b2e84559404dbc77ebb8", [:mix], [{:hackney, "~> 1.0", [hex: :hackney, repo: "hexpm", optional: false]}], "hexpm", "cd66c8a1e6a9e121d1f538b01bef459334bb4029a1ffb4eeeb5e4eae0337e7b6"},
|
"tzdata": {:hex, :tzdata, "0.5.22", "f2ba9105117ee0360eae2eca389783ef7db36d533899b2e84559404dbc77ebb8", [:mix], [{:hackney, "~> 1.0", [hex: :hackney, repo: "hexpm", optional: false]}], "hexpm", "cd66c8a1e6a9e121d1f538b01bef459334bb4029a1ffb4eeeb5e4eae0337e7b6"},
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
# Pleroma: A lightweight social networking server
|
# Pleroma: A lightweight social networking server
|
||||||
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
|
||||||
# SPDX-License-Identifier: AGPL-3.0-only
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
defmodule Pleroma.Repo.Migrations.CreateConversations do
|
defmodule Pleroma.Repo.Migrations.CreateConversations do
|
||||||
|
|
Binary file not shown.
Binary file not shown.
BIN
priv/static/adminfe/chunk-13e9.98eaadba.css
Normal file
BIN
priv/static/adminfe/chunk-13e9.98eaadba.css
Normal file
Binary file not shown.
BIN
priv/static/adminfe/chunk-2b9c.feb61a2b.css
Normal file
BIN
priv/static/adminfe/chunk-2b9c.feb61a2b.css
Normal file
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
BIN
priv/static/adminfe/chunk-87b3.3c6ede9c.css
Normal file
BIN
priv/static/adminfe/chunk-87b3.3c6ede9c.css
Normal file
Binary file not shown.
Binary file not shown.
Binary file not shown.
|
@ -1 +1 @@
|
||||||
<!DOCTYPE html><html><head><meta charset=utf-8><meta http-equiv=X-UA-Compatible content="IE=edge,chrome=1"><meta name=renderer content=webkit><meta name=viewport content="width=device-width,initial-scale=1,maximum-scale=1,user-scalable=no"><title>Admin FE</title><link rel="shortcut icon" href=favicon.ico><link href=chunk-elementUI.1abbc9b8.css rel=stylesheet><link href=chunk-libs.686b5876.css rel=stylesheet><link href=app.c836e084.css rel=stylesheet></head><body><div id=app></div><script type=text/javascript src=static/js/runtime.fa19e5d1.js></script><script type=text/javascript src=static/js/chunk-elementUI.fba0efec.js></script><script type=text/javascript src=static/js/chunk-libs.b8c453ab.js></script><script type=text/javascript src=static/js/app.d2c3c6b3.js></script></body></html>
|
<!DOCTYPE html><html><head><meta charset=utf-8><meta http-equiv=X-UA-Compatible content="IE=edge,chrome=1"><meta name=renderer content=webkit><meta name=viewport content="width=device-width,initial-scale=1,maximum-scale=1,user-scalable=no"><title>Admin FE</title><link rel="shortcut icon" href=favicon.ico><link href=chunk-elementUI.1abbc9b8.css rel=stylesheet><link href=chunk-libs.686b5876.css rel=stylesheet><link href=app.85534e14.css rel=stylesheet></head><body><div id=app></div><script type=text/javascript src=static/js/runtime.cb26bbd1.js></script><script type=text/javascript src=static/js/chunk-elementUI.fba0efec.js></script><script type=text/javascript src=static/js/chunk-libs.b8c453ab.js></script><script type=text/javascript src=static/js/app.d898cc2b.js></script></body></html>
|
Binary file not shown.
Binary file not shown.
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue