2018-12-23 20:04:54 +00:00
|
|
|
# Pleroma: A lightweight social networking server
|
2020-03-02 05:08:45 +00:00
|
|
|
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
|
2018-12-23 20:04:54 +00:00
|
|
|
# SPDX-License-Identifier: AGPL-3.0-only
|
|
|
|
|
2017-03-21 08:21:52 +00:00
|
|
|
defmodule Pleroma.Web.ActivityPub.ActivityPub do
|
2019-02-09 15:16:26 +00:00
|
|
|
alias Pleroma.Activity
|
2019-09-16 10:03:37 +00:00
|
|
|
alias Pleroma.Activity.Ir.Topics
|
2019-06-03 13:04:39 +00:00
|
|
|
alias Pleroma.Config
|
2019-12-30 08:30:20 +00:00
|
|
|
alias Pleroma.Constants
|
2019-04-10 14:33:45 +00:00
|
|
|
alias Pleroma.Conversation
|
2019-10-17 12:25:15 +00:00
|
|
|
alias Pleroma.Conversation.Participation
|
2019-03-05 02:52:23 +00:00
|
|
|
alias Pleroma.Notification
|
2019-02-09 15:16:26 +00:00
|
|
|
alias Pleroma.Object
|
2019-07-14 17:47:08 +00:00
|
|
|
alias Pleroma.Object.Containment
|
2018-12-01 22:53:10 +00:00
|
|
|
alias Pleroma.Object.Fetcher
|
2019-03-25 22:13:58 +00:00
|
|
|
alias Pleroma.Pagination
|
2019-03-05 02:52:23 +00:00
|
|
|
alias Pleroma.Repo
|
2019-02-09 15:16:26 +00:00
|
|
|
alias Pleroma.Upload
|
|
|
|
alias Pleroma.User
|
|
|
|
alias Pleroma.Web.ActivityPub.MRF
|
2019-03-05 02:52:23 +00:00
|
|
|
alias Pleroma.Web.ActivityPub.Transmogrifier
|
2019-10-05 12:49:45 +00:00
|
|
|
alias Pleroma.Web.ActivityPub.Utils
|
2019-09-16 10:03:37 +00:00
|
|
|
alias Pleroma.Web.Streamer
|
2019-03-05 02:52:23 +00:00
|
|
|
alias Pleroma.Web.WebFinger
|
2019-08-13 17:20:26 +00:00
|
|
|
alias Pleroma.Workers.BackgroundWorker
|
2019-02-06 19:20:02 +00:00
|
|
|
|
2017-03-21 16:53:20 +00:00
|
|
|
import Ecto.Query
|
2017-05-16 13:31:11 +00:00
|
|
|
import Pleroma.Web.ActivityPub.Utils
|
2019-02-22 12:29:52 +00:00
|
|
|
import Pleroma.Web.ActivityPub.Visibility
|
2019-02-06 19:20:02 +00:00
|
|
|
|
2017-05-07 18:16:07 +00:00
|
|
|
require Logger
|
2019-07-29 02:43:19 +00:00
|
|
|
require Pleroma.Constants
|
2017-03-21 08:21:52 +00:00
|
|
|
|
2018-06-18 04:33:41 +00:00
|
|
|
# For Announce activities, we filter the recipients based on following status for any actors
|
|
|
|
# that match actual users. See issue #164 for more information about why this is necessary.
|
2018-08-29 08:37:36 +00:00
|
|
|
defp get_recipients(%{"type" => "Announce"} = data) do
|
2019-05-01 09:11:17 +00:00
|
|
|
to = Map.get(data, "to", [])
|
|
|
|
cc = Map.get(data, "cc", [])
|
|
|
|
bcc = Map.get(data, "bcc", [])
|
2018-06-18 04:33:41 +00:00
|
|
|
actor = User.get_cached_by_ap_id(data["actor"])
|
|
|
|
|
2019-02-06 20:19:35 +00:00
|
|
|
recipients =
|
2019-05-01 09:11:17 +00:00
|
|
|
Enum.filter(Enum.concat([to, cc, bcc]), fn recipient ->
|
2019-02-06 20:19:35 +00:00
|
|
|
case User.get_cached_by_ap_id(recipient) do
|
2019-05-01 09:11:17 +00:00
|
|
|
nil -> true
|
|
|
|
user -> User.following?(user, actor)
|
2019-02-06 20:19:35 +00:00
|
|
|
end
|
|
|
|
end)
|
2018-08-29 08:37:36 +00:00
|
|
|
|
|
|
|
{recipients, to, cc}
|
2018-06-18 04:33:41 +00:00
|
|
|
end
|
|
|
|
|
2019-01-18 19:40:52 +00:00
|
|
|
defp get_recipients(%{"type" => "Create"} = data) do
|
2019-05-01 09:11:17 +00:00
|
|
|
to = Map.get(data, "to", [])
|
|
|
|
cc = Map.get(data, "cc", [])
|
|
|
|
bcc = Map.get(data, "bcc", [])
|
|
|
|
actor = Map.get(data, "actor", [])
|
|
|
|
recipients = [to, cc, bcc, [actor]] |> Enum.concat() |> Enum.uniq()
|
2019-01-18 19:40:52 +00:00
|
|
|
{recipients, to, cc}
|
|
|
|
end
|
|
|
|
|
2018-08-29 08:37:36 +00:00
|
|
|
defp get_recipients(data) do
|
2019-05-01 09:11:17 +00:00
|
|
|
to = Map.get(data, "to", [])
|
|
|
|
cc = Map.get(data, "cc", [])
|
|
|
|
bcc = Map.get(data, "bcc", [])
|
|
|
|
recipients = Enum.concat([to, cc, bcc])
|
2018-08-29 08:37:36 +00:00
|
|
|
{recipients, to, cc}
|
2017-12-12 17:07:14 +00:00
|
|
|
end
|
|
|
|
|
2018-05-13 23:28:56 +00:00
|
|
|
defp check_actor_is_active(actor) do
|
2018-05-19 03:17:56 +00:00
|
|
|
if not is_nil(actor) do
|
|
|
|
with user <- User.get_cached_by_ap_id(actor),
|
2019-10-16 18:59:21 +00:00
|
|
|
false <- user.deactivated do
|
2019-08-19 17:36:25 +00:00
|
|
|
true
|
2018-05-19 03:17:56 +00:00
|
|
|
else
|
2019-08-19 17:36:25 +00:00
|
|
|
_e -> false
|
2018-05-19 03:17:56 +00:00
|
|
|
end
|
2018-05-13 23:28:56 +00:00
|
|
|
else
|
2019-08-19 17:36:25 +00:00
|
|
|
true
|
2018-05-13 23:28:56 +00:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-01-26 02:50:49 +00:00
|
|
|
defp check_remote_limit(%{"object" => %{"content" => content}}) when not is_nil(content) do
|
2019-06-03 13:04:39 +00:00
|
|
|
limit = Config.get([:instance, :remote_limit])
|
2018-12-26 11:39:35 +00:00
|
|
|
String.length(content) <= limit
|
|
|
|
end
|
|
|
|
|
|
|
|
defp check_remote_limit(_), do: true
|
|
|
|
|
2019-03-03 10:21:03 +00:00
|
|
|
def increase_note_count_if_public(actor, object) do
|
|
|
|
if is_public?(object), do: User.increase_note_count(actor), else: {:ok, actor}
|
|
|
|
end
|
|
|
|
|
|
|
|
def decrease_note_count_if_public(actor, object) do
|
|
|
|
if is_public?(object), do: User.decrease_note_count(actor), else: {:ok, actor}
|
|
|
|
end
|
|
|
|
|
2019-03-25 17:21:48 +00:00
|
|
|
def increase_replies_count_if_reply(%{
|
2019-04-15 08:50:36 +00:00
|
|
|
"object" => %{"inReplyTo" => reply_ap_id} = object,
|
2019-03-25 17:21:48 +00:00
|
|
|
"type" => "Create"
|
|
|
|
}) do
|
|
|
|
if is_public?(object) do
|
|
|
|
Object.increase_replies_count(reply_ap_id)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def increase_replies_count_if_reply(_create_data), do: :noop
|
|
|
|
|
|
|
|
def decrease_replies_count_if_reply(%Object{
|
2019-04-15 08:50:36 +00:00
|
|
|
data: %{"inReplyTo" => reply_ap_id} = object
|
2019-03-25 17:21:48 +00:00
|
|
|
}) do
|
|
|
|
if is_public?(object) do
|
|
|
|
Object.decrease_replies_count(reply_ap_id)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def decrease_replies_count_if_reply(_object), do: :noop
|
|
|
|
|
2019-05-21 11:12:10 +00:00
|
|
|
def increase_poll_votes_if_vote(%{
|
|
|
|
"object" => %{"inReplyTo" => reply_ap_id, "name" => name},
|
|
|
|
"type" => "Create"
|
|
|
|
}) do
|
|
|
|
Object.increase_vote_count(reply_ap_id, name)
|
|
|
|
end
|
|
|
|
|
|
|
|
def increase_poll_votes_if_vote(_create_data), do: :noop
|
|
|
|
|
2019-10-16 14:16:39 +00:00
|
|
|
@spec persist(map(), keyword()) :: {:ok, Activity.t() | Object.t()}
|
|
|
|
def persist(object, meta) do
|
2020-03-26 14:16:54 +00:00
|
|
|
with local <- Keyword.fetch!(meta, :local),
|
|
|
|
{recipients, _, _} <- get_recipients(object),
|
|
|
|
{:ok, activity} <-
|
|
|
|
Repo.insert(%Activity{
|
|
|
|
data: object,
|
|
|
|
local: local,
|
|
|
|
recipients: recipients,
|
|
|
|
actor: object["actor"]
|
|
|
|
}) do
|
|
|
|
{:ok, activity, meta}
|
|
|
|
end
|
2019-10-16 14:16:39 +00:00
|
|
|
end
|
|
|
|
|
2019-12-30 08:30:20 +00:00
|
|
|
@spec insert(map(), boolean(), boolean(), boolean()) :: {:ok, Activity.t()} | {:error, any()}
|
2019-08-14 22:35:29 +00:00
|
|
|
def insert(map, local \\ true, fake \\ false, bypass_actor_check \\ false) when is_map(map) do
|
2018-06-18 21:21:03 +00:00
|
|
|
with nil <- Activity.normalize(map),
|
2019-03-30 10:57:54 +00:00
|
|
|
map <- lazy_put_activity_defaults(map, fake),
|
2019-08-19 17:36:25 +00:00
|
|
|
true <- bypass_actor_check || check_actor_is_active(map["actor"]),
|
2018-12-26 11:39:35 +00:00
|
|
|
{_, true} <- {:remote_limit_error, check_remote_limit(map)},
|
2018-05-10 16:34:09 +00:00
|
|
|
{:ok, map} <- MRF.filter(map),
|
2019-03-29 18:59:04 +00:00
|
|
|
{recipients, _, _} = get_recipients(map),
|
|
|
|
{:fake, false, map, recipients} <- {:fake, fake, map, recipients},
|
2019-10-18 03:57:32 +00:00
|
|
|
{:containment, :ok} <- {:containment, Containment.contain_child(map)},
|
2019-04-17 09:22:32 +00:00
|
|
|
{:ok, map, object} <- insert_full_object(map) do
|
2018-03-30 13:01:53 +00:00
|
|
|
{:ok, activity} =
|
|
|
|
Repo.insert(%Activity{
|
|
|
|
data: map,
|
|
|
|
local: local,
|
|
|
|
actor: map["actor"],
|
2018-08-29 18:38:30 +00:00
|
|
|
recipients: recipients
|
2018-03-30 13:01:53 +00:00
|
|
|
})
|
|
|
|
|
2019-03-23 00:40:08 +00:00
|
|
|
# Splice in the child object if we have one.
|
|
|
|
activity =
|
2019-08-27 13:21:03 +00:00
|
|
|
if not is_nil(object) do
|
2019-03-23 00:40:08 +00:00
|
|
|
Map.put(activity, :object, object)
|
|
|
|
else
|
|
|
|
activity
|
|
|
|
end
|
|
|
|
|
2019-08-31 18:58:42 +00:00
|
|
|
BackgroundWorker.enqueue("fetch_data_for_activity", %{"activity_id" => activity.id})
|
2019-01-28 06:07:18 +00:00
|
|
|
|
2017-09-11 14:15:28 +00:00
|
|
|
Notification.create_notifications(activity)
|
2019-05-03 11:39:14 +00:00
|
|
|
|
2019-10-17 12:25:15 +00:00
|
|
|
conversation = create_or_bump_conversation(activity, map["actor"])
|
|
|
|
participations = get_participations(conversation)
|
2017-11-19 12:47:50 +00:00
|
|
|
stream_out(activity)
|
2019-05-03 11:39:14 +00:00
|
|
|
stream_out_participations(participations)
|
2017-09-11 14:15:28 +00:00
|
|
|
{:ok, activity}
|
2017-05-16 13:31:11 +00:00
|
|
|
else
|
2019-03-29 18:59:04 +00:00
|
|
|
%Activity{} = activity ->
|
|
|
|
{:ok, activity}
|
|
|
|
|
|
|
|
{:fake, true, map, recipients} ->
|
2019-04-01 08:55:59 +00:00
|
|
|
activity = %Activity{
|
|
|
|
data: map,
|
|
|
|
local: local,
|
|
|
|
actor: map["actor"],
|
|
|
|
recipients: recipients,
|
|
|
|
id: "pleroma:fakeid"
|
|
|
|
}
|
|
|
|
|
2019-04-01 08:58:08 +00:00
|
|
|
Pleroma.Web.RichMedia.Helpers.fetch_data_for_activity(activity)
|
2019-04-01 08:55:59 +00:00
|
|
|
{:ok, activity}
|
2019-03-29 18:59:04 +00:00
|
|
|
|
|
|
|
error ->
|
|
|
|
{:error, error}
|
2017-05-07 18:13:10 +00:00
|
|
|
end
|
2017-03-21 08:21:52 +00:00
|
|
|
end
|
2017-03-21 16:53:20 +00:00
|
|
|
|
2019-10-17 12:25:15 +00:00
|
|
|
defp create_or_bump_conversation(activity, actor) do
|
|
|
|
with {:ok, conversation} <- Conversation.create_or_bump_for(activity),
|
|
|
|
%User{} = user <- User.get_cached_by_ap_id(actor),
|
|
|
|
Participation.mark_as_read(user, conversation) do
|
|
|
|
{:ok, conversation}
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
defp get_participations({:ok, conversation}) do
|
|
|
|
conversation
|
|
|
|
|> Repo.preload(:participations, force: true)
|
|
|
|
|> Map.get(:participations)
|
|
|
|
end
|
|
|
|
|
2019-05-03 11:39:14 +00:00
|
|
|
defp get_participations(_), do: []
|
|
|
|
|
|
|
|
def stream_out_participations(participations) do
|
|
|
|
participations =
|
|
|
|
participations
|
|
|
|
|> Repo.preload(:user)
|
|
|
|
|
2019-09-16 10:03:37 +00:00
|
|
|
Streamer.stream("participation", participations)
|
2019-05-03 11:39:14 +00:00
|
|
|
end
|
|
|
|
|
2019-06-24 07:14:04 +00:00
|
|
|
def stream_out_participations(%Object{data: %{"context" => context}}, user) do
|
|
|
|
with %Conversation{} = conversation <- Conversation.get_for_ap_id(context),
|
|
|
|
conversation = Repo.preload(conversation, :participations),
|
|
|
|
last_activity_id =
|
|
|
|
fetch_latest_activity_id_for_context(conversation.ap_id, %{
|
|
|
|
"user" => user,
|
|
|
|
"blocking_user" => user
|
|
|
|
}) do
|
|
|
|
if last_activity_id do
|
|
|
|
stream_out_participations(conversation.participations)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def stream_out_participations(_, _), do: :noop
|
|
|
|
|
2019-09-16 10:03:37 +00:00
|
|
|
def stream_out(%Activity{data: %{"type" => data_type}} = activity)
|
|
|
|
when data_type in ["Create", "Announce", "Delete"] do
|
|
|
|
activity
|
|
|
|
|> Topics.get_activity_topics()
|
|
|
|
|> Streamer.stream(activity)
|
|
|
|
end
|
|
|
|
|
|
|
|
def stream_out(_activity) do
|
|
|
|
:noop
|
2017-11-19 12:47:50 +00:00
|
|
|
end
|
|
|
|
|
2019-12-30 08:30:20 +00:00
|
|
|
@spec create(map(), boolean()) :: {:ok, Activity.t()} | {:error, any()}
|
|
|
|
def create(params, fake \\ false) do
|
|
|
|
with {:ok, result} <- Repo.transaction(fn -> do_create(params, fake) end) do
|
|
|
|
result
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
defp do_create(%{to: to, actor: actor, context: context, object: object} = params, fake) do
|
2018-02-15 18:59:03 +00:00
|
|
|
additional = params[:additional] || %{}
|
2018-03-30 13:01:53 +00:00
|
|
|
# only accept false as false value
|
|
|
|
local = !(params[:local] == false)
|
2018-02-15 18:59:03 +00:00
|
|
|
published = params[:published]
|
2019-12-30 08:30:20 +00:00
|
|
|
quick_insert? = Config.get([:env]) == :benchmark
|
2018-02-15 18:59:03 +00:00
|
|
|
|
2018-03-30 13:01:53 +00:00
|
|
|
with create_data <-
|
|
|
|
make_create_data(
|
|
|
|
%{to: to, actor: actor, published: published, context: context, object: object},
|
|
|
|
additional
|
|
|
|
),
|
2019-03-29 18:59:04 +00:00
|
|
|
{:ok, activity} <- insert(create_data, local, fake),
|
|
|
|
{:fake, false, activity} <- {:fake, fake, activity},
|
2019-03-25 17:21:48 +00:00
|
|
|
_ <- increase_replies_count_if_reply(create_data),
|
2019-05-21 11:12:10 +00:00
|
|
|
_ <- increase_poll_votes_if_vote(create_data),
|
2019-09-04 17:18:11 +00:00
|
|
|
{:quick_insert, false, activity} <- {:quick_insert, quick_insert?, activity},
|
2019-03-03 10:21:03 +00:00
|
|
|
{:ok, _actor} <- increase_note_count_if_public(actor, activity),
|
2019-01-21 11:16:51 +00:00
|
|
|
:ok <- maybe_federate(activity) do
|
2017-04-24 16:46:34 +00:00
|
|
|
{:ok, activity}
|
2019-03-29 18:59:04 +00:00
|
|
|
else
|
2019-09-04 17:18:11 +00:00
|
|
|
{:quick_insert, true, activity} ->
|
|
|
|
{:ok, activity}
|
|
|
|
|
2019-03-29 18:59:04 +00:00
|
|
|
{:fake, true, activity} ->
|
|
|
|
{:ok, activity}
|
2019-08-03 18:12:38 +00:00
|
|
|
|
|
|
|
{:error, message} ->
|
2019-12-30 08:30:20 +00:00
|
|
|
Repo.rollback(message)
|
2017-04-24 16:46:34 +00:00
|
|
|
end
|
2017-03-21 08:21:52 +00:00
|
|
|
end
|
2017-03-21 16:53:20 +00:00
|
|
|
|
2019-12-30 08:30:20 +00:00
|
|
|
@spec listen(map()) :: {:ok, Activity.t()} | {:error, any()}
|
2019-09-27 12:22:35 +00:00
|
|
|
def listen(%{to: to, actor: actor, context: context, object: object} = params) do
|
|
|
|
additional = params[:additional] || %{}
|
|
|
|
# only accept false as false value
|
|
|
|
local = !(params[:local] == false)
|
|
|
|
published = params[:published]
|
|
|
|
|
|
|
|
with listen_data <-
|
|
|
|
make_listen_data(
|
|
|
|
%{to: to, actor: actor, published: published, context: context, object: object},
|
|
|
|
additional
|
|
|
|
),
|
|
|
|
{:ok, activity} <- insert(listen_data, local),
|
|
|
|
:ok <- maybe_federate(activity) do
|
|
|
|
{:ok, activity}
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-12-30 08:30:20 +00:00
|
|
|
@spec accept(map()) :: {:ok, Activity.t()} | {:error, any()}
|
2019-10-11 09:48:58 +00:00
|
|
|
def accept(params) do
|
|
|
|
accept_or_reject("Accept", params)
|
|
|
|
end
|
2018-02-17 15:08:55 +00:00
|
|
|
|
2019-12-30 08:30:20 +00:00
|
|
|
@spec reject(map()) :: {:ok, Activity.t()} | {:error, any()}
|
2019-10-11 09:48:58 +00:00
|
|
|
def reject(params) do
|
|
|
|
accept_or_reject("Reject", params)
|
2018-02-17 15:08:55 +00:00
|
|
|
end
|
|
|
|
|
2019-12-30 08:30:20 +00:00
|
|
|
@spec accept_or_reject(String.t(), map()) :: {:ok, Activity.t()} | {:error, any()}
|
2019-10-11 09:48:58 +00:00
|
|
|
def accept_or_reject(type, %{to: to, actor: actor, object: object} = params) do
|
|
|
|
local = Map.get(params, :local, true)
|
|
|
|
activity_id = Map.get(params, :activity_id, nil)
|
2018-05-26 12:07:46 +00:00
|
|
|
|
2019-10-11 09:48:58 +00:00
|
|
|
with data <-
|
|
|
|
%{"to" => to, "type" => type, "actor" => actor.ap_id, "object" => object}
|
|
|
|
|> Utils.maybe_put("id", activity_id),
|
2018-05-26 12:07:46 +00:00
|
|
|
{:ok, activity} <- insert(data, local),
|
2019-03-03 15:39:37 +00:00
|
|
|
:ok <- maybe_federate(activity) do
|
2018-05-26 12:07:46 +00:00
|
|
|
{:ok, activity}
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-12-30 08:30:20 +00:00
|
|
|
@spec update(map()) :: {:ok, Activity.t()} | {:error, any()}
|
2018-02-25 15:14:25 +00:00
|
|
|
def update(%{to: to, cc: cc, actor: actor, object: object} = params) do
|
2018-03-30 13:01:53 +00:00
|
|
|
local = !(params[:local] == false)
|
2019-10-05 12:49:45 +00:00
|
|
|
activity_id = params[:activity_id]
|
2018-03-30 13:01:53 +00:00
|
|
|
|
|
|
|
with data <- %{
|
|
|
|
"to" => to,
|
|
|
|
"cc" => cc,
|
|
|
|
"type" => "Update",
|
|
|
|
"actor" => actor,
|
|
|
|
"object" => object
|
|
|
|
},
|
2019-10-05 12:49:45 +00:00
|
|
|
data <- Utils.maybe_put(data, "id", activity_id),
|
2018-02-25 15:14:25 +00:00
|
|
|
{:ok, activity} <- insert(data, local),
|
|
|
|
:ok <- maybe_federate(activity) do
|
|
|
|
{:ok, activity}
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-12-30 08:30:20 +00:00
|
|
|
@spec react_with_emoji(User.t(), Object.t(), String.t(), keyword()) ::
|
|
|
|
{:ok, Activity.t(), Object.t()} | {:error, any()}
|
2019-08-26 21:29:51 +00:00
|
|
|
def react_with_emoji(user, object, emoji, options \\ []) do
|
2019-12-30 08:30:20 +00:00
|
|
|
with {:ok, result} <-
|
|
|
|
Repo.transaction(fn -> do_react_with_emoji(user, object, emoji, options) end) do
|
|
|
|
result
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
defp do_react_with_emoji(user, object, emoji, options) do
|
2019-08-26 21:29:51 +00:00
|
|
|
with local <- Keyword.get(options, :local, true),
|
|
|
|
activity_id <- Keyword.get(options, :activity_id, nil),
|
2020-01-30 15:07:37 +00:00
|
|
|
true <- Pleroma.Emoji.is_unicode_emoji?(emoji),
|
2019-08-26 21:29:51 +00:00
|
|
|
reaction_data <- make_emoji_reaction_data(user, object, emoji, activity_id),
|
2019-09-03 21:50:04 +00:00
|
|
|
{:ok, activity} <- insert(reaction_data, local),
|
2019-09-30 13:51:09 +00:00
|
|
|
{:ok, object} <- add_emoji_reaction_to_object(activity, object),
|
|
|
|
:ok <- maybe_federate(activity) do
|
2019-08-26 21:29:51 +00:00
|
|
|
{:ok, activity, object}
|
2020-01-30 15:07:37 +00:00
|
|
|
else
|
2019-12-30 08:30:20 +00:00
|
|
|
false -> {:error, false}
|
|
|
|
{:error, error} -> Repo.rollback(error)
|
2019-08-26 21:29:51 +00:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-12-30 08:30:20 +00:00
|
|
|
@spec unreact_with_emoji(User.t(), String.t(), keyword()) ::
|
|
|
|
{:ok, Activity.t(), Object.t()} | {:error, any()}
|
2019-10-02 13:08:20 +00:00
|
|
|
def unreact_with_emoji(user, reaction_id, options \\ []) do
|
2019-12-30 08:30:20 +00:00
|
|
|
with {:ok, result} <-
|
|
|
|
Repo.transaction(fn -> do_unreact_with_emoji(user, reaction_id, options) end) do
|
|
|
|
result
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
defp do_unreact_with_emoji(user, reaction_id, options) do
|
2019-09-30 14:38:19 +00:00
|
|
|
with local <- Keyword.get(options, :local, true),
|
|
|
|
activity_id <- Keyword.get(options, :activity_id, nil),
|
2019-10-02 13:08:20 +00:00
|
|
|
user_ap_id <- user.ap_id,
|
|
|
|
%Activity{actor: ^user_ap_id} = reaction_activity <- Activity.get_by_ap_id(reaction_id),
|
|
|
|
object <- Object.normalize(reaction_activity),
|
|
|
|
unreact_data <- make_undo_data(user, reaction_activity, activity_id),
|
|
|
|
{:ok, activity} <- insert(unreact_data, local),
|
|
|
|
{:ok, object} <- remove_emoji_reaction_from_object(reaction_activity, object),
|
|
|
|
:ok <- maybe_federate(activity) do
|
|
|
|
{:ok, activity, object}
|
2020-01-30 15:07:37 +00:00
|
|
|
else
|
2019-12-30 08:30:20 +00:00
|
|
|
{:error, error} -> Repo.rollback(error)
|
2019-10-02 13:08:20 +00:00
|
|
|
end
|
2019-09-30 14:38:19 +00:00
|
|
|
end
|
|
|
|
|
2017-05-16 13:31:11 +00:00
|
|
|
# TODO: This is weird, maybe we shouldn't check here if we can make the activity.
|
2019-12-30 08:30:20 +00:00
|
|
|
@spec like(User.t(), Object.t(), String.t() | nil, boolean()) ::
|
|
|
|
{:ok, Activity.t(), Object.t()} | {:error, any()}
|
|
|
|
def like(user, object, activity_id \\ nil, local \\ true) do
|
|
|
|
with {:ok, result} <- Repo.transaction(fn -> do_like(user, object, activity_id, local) end) do
|
|
|
|
result
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
defp do_like(
|
|
|
|
%User{ap_id: ap_id} = user,
|
|
|
|
%Object{data: %{"id" => _}} = object,
|
|
|
|
activity_id,
|
|
|
|
local
|
|
|
|
) do
|
2017-05-16 13:31:11 +00:00
|
|
|
with nil <- get_existing_like(ap_id, object),
|
|
|
|
like_data <- make_like_data(user, object, activity_id),
|
|
|
|
{:ok, activity} <- insert(like_data, local),
|
|
|
|
{:ok, object} <- add_like_to_object(activity, object),
|
|
|
|
:ok <- maybe_federate(activity) do
|
|
|
|
{:ok, activity, object}
|
|
|
|
else
|
2019-12-30 08:30:20 +00:00
|
|
|
%Activity{} = activity ->
|
|
|
|
{:ok, activity, object}
|
|
|
|
|
|
|
|
{:error, error} ->
|
|
|
|
Repo.rollback(error)
|
2017-04-14 13:07:24 +00:00
|
|
|
end
|
2017-04-13 13:50:05 +00:00
|
|
|
end
|
|
|
|
|
2019-12-30 08:30:20 +00:00
|
|
|
@spec unlike(User.t(), Object.t(), String.t() | nil, boolean()) ::
|
|
|
|
{:ok, Activity.t(), Activity.t(), Object.t()} | {:ok, Object.t()} | {:error, any()}
|
2019-08-27 13:21:03 +00:00
|
|
|
def unlike(%User{} = actor, %Object{} = object, activity_id \\ nil, local \\ true) do
|
2019-12-30 08:30:20 +00:00
|
|
|
with {:ok, result} <-
|
|
|
|
Repo.transaction(fn -> do_unlike(actor, object, activity_id, local) end) do
|
|
|
|
result
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
defp do_unlike(actor, object, activity_id, local) do
|
2018-05-19 13:22:43 +00:00
|
|
|
with %Activity{} = like_activity <- get_existing_like(actor.ap_id, object),
|
|
|
|
unlike_data <- make_unlike_data(actor, like_activity, activity_id),
|
|
|
|
{:ok, unlike_activity} <- insert(unlike_data, local),
|
|
|
|
{:ok, _activity} <- Repo.delete(like_activity),
|
|
|
|
{:ok, object} <- remove_like_from_object(like_activity, object),
|
|
|
|
:ok <- maybe_federate(unlike_activity) do
|
|
|
|
{:ok, unlike_activity, like_activity, object}
|
2018-03-30 13:01:53 +00:00
|
|
|
else
|
2019-12-30 08:30:20 +00:00
|
|
|
nil -> {:ok, object}
|
|
|
|
{:error, error} -> Repo.rollback(error)
|
2017-04-14 16:08:47 +00:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-12-30 08:30:20 +00:00
|
|
|
@spec announce(User.t(), Object.t(), String.t() | nil, boolean(), boolean()) ::
|
|
|
|
{:ok, Activity.t(), Object.t()} | {:error, any()}
|
2018-03-30 13:01:53 +00:00
|
|
|
def announce(
|
|
|
|
%User{ap_id: _} = user,
|
|
|
|
%Object{data: %{"id" => _}} = object,
|
|
|
|
activity_id \\ nil,
|
2019-01-17 23:12:42 +00:00
|
|
|
local \\ true,
|
|
|
|
public \\ true
|
2018-03-30 13:01:53 +00:00
|
|
|
) do
|
2019-12-30 08:30:20 +00:00
|
|
|
with {:ok, result} <-
|
|
|
|
Repo.transaction(fn -> do_announce(user, object, activity_id, local, public) end) do
|
|
|
|
result
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
defp do_announce(user, object, activity_id, local, public) do
|
2019-10-01 15:49:52 +00:00
|
|
|
with true <- is_announceable?(object, user, public),
|
2019-01-17 23:12:42 +00:00
|
|
|
announce_data <- make_announce_data(user, object, activity_id, public),
|
2017-05-16 13:31:11 +00:00
|
|
|
{:ok, activity} <- insert(announce_data, local),
|
|
|
|
{:ok, object} <- add_announce_to_object(activity, object),
|
|
|
|
:ok <- maybe_federate(activity) do
|
|
|
|
{:ok, activity, object}
|
|
|
|
else
|
2019-12-30 08:30:20 +00:00
|
|
|
false -> {:error, false}
|
|
|
|
{:error, error} -> Repo.rollback(error)
|
2017-05-16 13:31:11 +00:00
|
|
|
end
|
2017-03-23 22:34:10 +00:00
|
|
|
end
|
|
|
|
|
2019-12-30 08:30:20 +00:00
|
|
|
@spec unannounce(User.t(), Object.t(), String.t() | nil, boolean()) ::
|
|
|
|
{:ok, Activity.t(), Object.t()} | {:ok, Object.t()} | {:error, any()}
|
2018-04-23 01:28:51 +00:00
|
|
|
def unannounce(
|
|
|
|
%User{} = actor,
|
|
|
|
%Object{} = object,
|
2018-05-09 03:59:36 +00:00
|
|
|
activity_id \\ nil,
|
|
|
|
local \\ true
|
2018-04-23 01:28:51 +00:00
|
|
|
) do
|
2019-12-30 08:30:20 +00:00
|
|
|
with {:ok, result} <-
|
|
|
|
Repo.transaction(fn -> do_unannounce(actor, object, activity_id, local) end) do
|
|
|
|
result
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
defp do_unannounce(actor, object, activity_id, local) do
|
2018-05-09 01:52:21 +00:00
|
|
|
with %Activity{} = announce_activity <- get_existing_announce(actor.ap_id, object),
|
|
|
|
unannounce_data <- make_unannounce_data(actor, announce_activity, activity_id),
|
2018-04-18 07:39:42 +00:00
|
|
|
{:ok, unannounce_activity} <- insert(unannounce_data, local),
|
2018-05-09 01:52:21 +00:00
|
|
|
:ok <- maybe_federate(unannounce_activity),
|
|
|
|
{:ok, _activity} <- Repo.delete(announce_activity),
|
|
|
|
{:ok, object} <- remove_announce_from_object(announce_activity, object) do
|
2018-06-14 01:29:55 +00:00
|
|
|
{:ok, unannounce_activity, object}
|
2018-04-14 07:39:16 +00:00
|
|
|
else
|
2019-12-30 08:30:20 +00:00
|
|
|
nil -> {:ok, object}
|
|
|
|
{:error, error} -> Repo.rollback(error)
|
2018-04-14 07:39:16 +00:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-12-30 08:30:20 +00:00
|
|
|
@spec follow(User.t(), User.t(), String.t() | nil, boolean()) ::
|
|
|
|
{:ok, Activity.t()} | {:error, any()}
|
2017-05-16 13:31:11 +00:00
|
|
|
def follow(follower, followed, activity_id \\ nil, local \\ true) do
|
2019-12-30 08:30:20 +00:00
|
|
|
with {:ok, result} <-
|
|
|
|
Repo.transaction(fn -> do_follow(follower, followed, activity_id, local) end) do
|
|
|
|
result
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
defp do_follow(follower, followed, activity_id, local) do
|
2017-05-16 13:31:11 +00:00
|
|
|
with data <- make_follow_data(follower, followed, activity_id),
|
|
|
|
{:ok, activity} <- insert(data, local),
|
2020-03-22 14:10:37 +00:00
|
|
|
:ok <- maybe_federate(activity) do
|
2017-05-16 13:31:11 +00:00
|
|
|
{:ok, activity}
|
2019-12-30 08:30:20 +00:00
|
|
|
else
|
|
|
|
{:error, error} -> Repo.rollback(error)
|
2017-05-16 13:31:11 +00:00
|
|
|
end
|
2017-03-23 16:56:49 +00:00
|
|
|
end
|
|
|
|
|
2019-12-30 08:30:20 +00:00
|
|
|
@spec unfollow(User.t(), User.t(), String.t() | nil, boolean()) ::
|
|
|
|
{:ok, Activity.t()} | nil | {:error, any()}
|
2018-05-21 01:01:14 +00:00
|
|
|
def unfollow(follower, followed, activity_id \\ nil, local \\ true) do
|
2019-12-30 08:30:20 +00:00
|
|
|
with {:ok, result} <-
|
|
|
|
Repo.transaction(fn -> do_unfollow(follower, followed, activity_id, local) end) do
|
|
|
|
result
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
defp do_unfollow(follower, followed, activity_id, local) do
|
2017-05-16 13:31:11 +00:00
|
|
|
with %Activity{} = follow_activity <- fetch_latest_follow(follower, followed),
|
2018-05-27 13:51:13 +00:00
|
|
|
{:ok, follow_activity} <- update_follow_state(follow_activity, "cancelled"),
|
2018-05-21 01:01:14 +00:00
|
|
|
unfollow_data <- make_unfollow_data(follower, followed, follow_activity, activity_id),
|
2017-05-16 13:31:11 +00:00
|
|
|
{:ok, activity} <- insert(unfollow_data, local),
|
2019-03-03 15:39:37 +00:00
|
|
|
:ok <- maybe_federate(activity) do
|
2017-05-16 13:31:11 +00:00
|
|
|
{:ok, activity}
|
2019-12-30 08:30:20 +00:00
|
|
|
else
|
|
|
|
nil -> nil
|
|
|
|
{:error, error} -> Repo.rollback(error)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
@spec delete(User.t() | Object.t(), keyword()) :: {:ok, User.t() | Object.t()} | {:error, any()}
|
|
|
|
def delete(entity, options \\ []) do
|
|
|
|
with {:ok, result} <- Repo.transaction(fn -> do_delete(entity, options) end) do
|
|
|
|
result
|
2017-05-16 13:31:11 +00:00
|
|
|
end
|
2017-03-23 22:34:10 +00:00
|
|
|
end
|
|
|
|
|
2019-12-30 08:30:20 +00:00
|
|
|
defp do_delete(%User{ap_id: ap_id, follower_address: follower_address} = user, _) do
|
2019-07-10 05:16:08 +00:00
|
|
|
with data <- %{
|
|
|
|
"to" => [follower_address],
|
|
|
|
"type" => "Delete",
|
|
|
|
"actor" => ap_id,
|
|
|
|
"object" => %{"type" => "Person", "id" => ap_id}
|
|
|
|
},
|
2019-08-14 22:35:29 +00:00
|
|
|
{:ok, activity} <- insert(data, true, true, true),
|
2019-07-10 05:16:08 +00:00
|
|
|
:ok <- maybe_federate(activity) do
|
|
|
|
{:ok, user}
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-12-30 08:30:20 +00:00
|
|
|
defp do_delete(%Object{data: %{"id" => id, "actor" => actor}} = object, options) do
|
2019-10-10 15:17:33 +00:00
|
|
|
local = Keyword.get(options, :local, true)
|
|
|
|
activity_id = Keyword.get(options, :activity_id, nil)
|
2019-10-11 09:25:45 +00:00
|
|
|
actor = Keyword.get(options, :actor, actor)
|
2019-10-10 15:17:33 +00:00
|
|
|
|
2017-09-04 16:47:33 +00:00
|
|
|
user = User.get_cached_by_ap_id(actor)
|
2019-03-14 17:43:14 +00:00
|
|
|
to = (object.data["to"] || []) ++ (object.data["cc"] || [])
|
2019-03-04 09:47:04 +00:00
|
|
|
|
2019-12-04 23:50:38 +00:00
|
|
|
with create_activity <- Activity.get_create_by_object_ap_id(id),
|
2019-10-11 09:25:45 +00:00
|
|
|
data <-
|
|
|
|
%{
|
|
|
|
"type" => "Delete",
|
|
|
|
"actor" => actor,
|
|
|
|
"object" => id,
|
|
|
|
"to" => to,
|
2019-12-04 23:50:38 +00:00
|
|
|
"deleted_activity_id" => create_activity && create_activity.id
|
2019-10-11 09:25:45 +00:00
|
|
|
}
|
|
|
|
|> maybe_put("id", activity_id),
|
2019-06-24 07:14:04 +00:00
|
|
|
{:ok, activity} <- insert(data, local, false),
|
2019-12-04 23:50:38 +00:00
|
|
|
{:ok, object, _create_activity} <- Object.delete(object),
|
2019-06-24 07:14:04 +00:00
|
|
|
stream_out_participations(object, user),
|
2019-03-25 17:21:48 +00:00
|
|
|
_ <- decrease_replies_count_if_reply(object),
|
2019-03-03 10:21:03 +00:00
|
|
|
{:ok, _actor} <- decrease_note_count_if_public(user, object),
|
2019-01-21 11:16:51 +00:00
|
|
|
:ok <- maybe_federate(activity) do
|
2017-09-04 16:47:33 +00:00
|
|
|
{:ok, activity}
|
2019-12-30 08:30:20 +00:00
|
|
|
else
|
|
|
|
{:error, error} ->
|
|
|
|
Repo.rollback(error)
|
2017-09-04 16:47:33 +00:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2020-03-24 13:38:18 +00:00
|
|
|
defp do_delete(%Object{data: %{"type" => "Tombstone", "id" => ap_id}}, _) do
|
|
|
|
activity =
|
|
|
|
ap_id
|
|
|
|
|> Activity.Queries.by_object_id()
|
|
|
|
|> Activity.Queries.by_type("Delete")
|
|
|
|
|> Repo.one()
|
|
|
|
|
|
|
|
{:ok, activity}
|
|
|
|
end
|
|
|
|
|
2019-12-30 08:30:20 +00:00
|
|
|
@spec block(User.t(), User.t(), String.t() | nil, boolean()) ::
|
|
|
|
{:ok, Activity.t()} | {:error, any()}
|
2018-05-21 01:01:14 +00:00
|
|
|
def block(blocker, blocked, activity_id \\ nil, local \\ true) do
|
2019-12-30 08:30:20 +00:00
|
|
|
with {:ok, result} <-
|
|
|
|
Repo.transaction(fn -> do_block(blocker, blocked, activity_id, local) end) do
|
|
|
|
result
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
defp do_block(blocker, blocked, activity_id, local) do
|
2019-06-03 13:04:39 +00:00
|
|
|
outgoing_blocks = Config.get([:activitypub, :outgoing_blocks])
|
|
|
|
unfollow_blocked = Config.get([:activitypub, :unfollow_blocked])
|
2018-05-18 22:09:56 +00:00
|
|
|
|
2019-05-30 08:33:58 +00:00
|
|
|
if unfollow_blocked do
|
2018-06-09 00:12:16 +00:00
|
|
|
follow_activity = fetch_latest_follow(blocker, blocked)
|
2019-05-30 08:33:58 +00:00
|
|
|
if follow_activity, do: unfollow(blocker, blocked, nil, local)
|
2018-05-18 22:09:56 +00:00
|
|
|
end
|
|
|
|
|
2018-06-25 06:05:44 +00:00
|
|
|
with true <- outgoing_blocks,
|
|
|
|
block_data <- make_block_data(blocker, blocked, activity_id),
|
2018-05-18 22:09:56 +00:00
|
|
|
{:ok, activity} <- insert(block_data, local),
|
|
|
|
:ok <- maybe_federate(activity) do
|
|
|
|
{:ok, activity}
|
2018-06-09 00:12:16 +00:00
|
|
|
else
|
2019-12-30 08:30:20 +00:00
|
|
|
{:error, error} -> Repo.rollback(error)
|
2018-05-18 22:09:56 +00:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-12-30 08:30:20 +00:00
|
|
|
@spec unblock(User.t(), User.t(), String.t() | nil, boolean()) ::
|
2020-01-17 08:51:08 +00:00
|
|
|
{:ok, Activity.t()} | {:error, any()} | nil
|
2018-05-21 01:01:14 +00:00
|
|
|
def unblock(blocker, blocked, activity_id \\ nil, local \\ true) do
|
2019-12-30 08:30:20 +00:00
|
|
|
with {:ok, result} <-
|
|
|
|
Repo.transaction(fn -> do_unblock(blocker, blocked, activity_id, local) end) do
|
|
|
|
result
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
defp do_unblock(blocker, blocked, activity_id, local) do
|
2018-05-18 22:09:56 +00:00
|
|
|
with %Activity{} = block_activity <- fetch_latest_block(blocker, blocked),
|
2018-05-21 01:01:14 +00:00
|
|
|
unblock_data <- make_unblock_data(blocker, blocked, block_activity, activity_id),
|
2018-05-18 22:09:56 +00:00
|
|
|
{:ok, activity} <- insert(unblock_data, local),
|
|
|
|
:ok <- maybe_federate(activity) do
|
|
|
|
{:ok, activity}
|
2019-12-30 08:30:20 +00:00
|
|
|
else
|
|
|
|
nil -> nil
|
|
|
|
{:error, error} -> Repo.rollback(error)
|
2018-05-18 22:09:56 +00:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-12-30 08:30:20 +00:00
|
|
|
@spec flag(map()) :: {:ok, Activity.t()} | {:error, any()}
|
2019-02-20 16:51:25 +00:00
|
|
|
def flag(
|
|
|
|
%{
|
|
|
|
actor: actor,
|
2019-09-04 12:25:12 +00:00
|
|
|
context: _context,
|
2019-02-20 16:51:25 +00:00
|
|
|
account: account,
|
|
|
|
statuses: statuses,
|
|
|
|
content: content
|
|
|
|
} = params
|
|
|
|
) do
|
|
|
|
# only accept false as false value
|
|
|
|
local = !(params[:local] == false)
|
2019-03-14 19:29:47 +00:00
|
|
|
forward = !(params[:forward] == false)
|
|
|
|
|
|
|
|
additional = params[:additional] || %{}
|
2019-02-20 16:51:25 +00:00
|
|
|
|
2019-03-14 19:29:47 +00:00
|
|
|
additional =
|
|
|
|
if forward do
|
|
|
|
Map.merge(additional, %{"to" => [], "cc" => [account.ap_id]})
|
|
|
|
else
|
2019-03-14 19:52:08 +00:00
|
|
|
Map.merge(additional, %{"to" => [], "cc" => []})
|
2019-03-14 19:29:47 +00:00
|
|
|
end
|
|
|
|
|
|
|
|
with flag_data <- make_flag_data(params, additional),
|
|
|
|
{:ok, activity} <- insert(flag_data, local),
|
2019-10-27 13:05:32 +00:00
|
|
|
{:ok, stripped_activity} <- strip_report_status_data(activity),
|
|
|
|
:ok <- maybe_federate(stripped_activity) do
|
2020-02-27 13:27:49 +00:00
|
|
|
User.all_superusers()
|
|
|
|
|> Enum.filter(fn user -> not is_nil(user.email) end)
|
|
|
|
|> Enum.each(fn superuser ->
|
2019-03-14 19:38:46 +00:00
|
|
|
superuser
|
2019-04-10 04:13:46 +00:00
|
|
|
|> Pleroma.Emails.AdminEmail.report(actor, account, statuses, content)
|
2019-04-10 04:05:05 +00:00
|
|
|
|> Pleroma.Emails.Mailer.deliver_async()
|
2019-03-14 19:38:46 +00:00
|
|
|
end)
|
|
|
|
|
2019-03-14 19:29:47 +00:00
|
|
|
{:ok, activity}
|
|
|
|
end
|
2019-02-20 16:51:25 +00:00
|
|
|
end
|
|
|
|
|
2019-12-30 08:30:20 +00:00
|
|
|
@spec move(User.t(), User.t(), boolean()) :: {:ok, Activity.t()} | {:error, any()}
|
2019-10-30 11:21:49 +00:00
|
|
|
def move(%User{} = origin, %User{} = target, local \\ true) do
|
|
|
|
params = %{
|
|
|
|
"type" => "Move",
|
|
|
|
"actor" => origin.ap_id,
|
|
|
|
"object" => origin.ap_id,
|
|
|
|
"target" => target.ap_id
|
|
|
|
}
|
|
|
|
|
|
|
|
with true <- origin.ap_id in target.also_known_as,
|
|
|
|
{:ok, activity} <- insert(params, local) do
|
|
|
|
maybe_federate(activity)
|
|
|
|
|
|
|
|
BackgroundWorker.enqueue("move_following", %{
|
|
|
|
"origin_id" => origin.id,
|
|
|
|
"target_id" => target.id
|
|
|
|
})
|
|
|
|
|
|
|
|
{:ok, activity}
|
|
|
|
else
|
|
|
|
false -> {:error, "Target account must have the origin in `alsoKnownAs`"}
|
|
|
|
err -> err
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2020-03-24 16:18:27 +00:00
|
|
|
def fetch_activities_for_context_query(context, opts) do
|
2019-12-30 08:30:20 +00:00
|
|
|
public = [Constants.as_public()]
|
2018-02-18 19:52:07 +00:00
|
|
|
|
2018-03-30 13:01:53 +00:00
|
|
|
recipients =
|
2019-10-10 19:35:32 +00:00
|
|
|
if opts["user"],
|
|
|
|
do: [opts["user"].ap_id | User.following(opts["user"])] ++ public,
|
|
|
|
else: public
|
2018-03-30 13:01:53 +00:00
|
|
|
|
2019-04-20 17:40:41 +00:00
|
|
|
from(activity in Activity)
|
2019-05-23 11:03:16 +00:00
|
|
|
|> maybe_preload_objects(opts)
|
2019-08-13 23:36:54 +00:00
|
|
|
|> maybe_preload_bookmarks(opts)
|
|
|
|
|> maybe_set_thread_muted_field(opts)
|
2019-04-20 17:40:41 +00:00
|
|
|
|> restrict_blocked(opts)
|
|
|
|
|> restrict_recipients(recipients, opts["user"])
|
|
|
|
|> where(
|
|
|
|
[activity],
|
|
|
|
fragment(
|
|
|
|
"?->>'type' = ? and ?->>'context' = ?",
|
|
|
|
activity.data,
|
|
|
|
"Create",
|
|
|
|
activity.data,
|
|
|
|
^context
|
2018-03-30 13:01:53 +00:00
|
|
|
)
|
2019-04-20 17:40:41 +00:00
|
|
|
)
|
2019-06-03 07:58:37 +00:00
|
|
|
|> exclude_poll_votes(opts)
|
2019-08-13 23:36:54 +00:00
|
|
|
|> exclude_id(opts)
|
2019-04-20 17:40:41 +00:00
|
|
|
|> order_by([activity], desc: activity.id)
|
|
|
|
end
|
|
|
|
|
|
|
|
@spec fetch_activities_for_context(String.t(), keyword() | map()) :: [Activity.t()]
|
|
|
|
def fetch_activities_for_context(context, opts \\ %{}) do
|
|
|
|
context
|
|
|
|
|> fetch_activities_for_context_query(opts)
|
|
|
|
|> Repo.all()
|
|
|
|
end
|
2018-03-30 13:01:53 +00:00
|
|
|
|
2019-04-20 17:40:41 +00:00
|
|
|
@spec fetch_latest_activity_id_for_context(String.t(), keyword() | map()) ::
|
2019-09-18 14:54:31 +00:00
|
|
|
FlakeId.Ecto.CompatType.t() | nil
|
2019-04-20 17:40:41 +00:00
|
|
|
def fetch_latest_activity_id_for_context(context, opts \\ %{}) do
|
|
|
|
context
|
2019-05-23 11:03:16 +00:00
|
|
|
|> fetch_activities_for_context_query(Map.merge(%{"skip_preload" => true}, opts))
|
2019-04-20 17:40:41 +00:00
|
|
|
|> limit(1)
|
|
|
|
|> select([a], a.id)
|
|
|
|
|> Repo.one()
|
2017-03-23 20:22:49 +00:00
|
|
|
end
|
|
|
|
|
2019-12-30 08:30:20 +00:00
|
|
|
@spec fetch_public_activities(map(), Pagination.type()) :: [Activity.t()]
|
2019-09-24 22:25:42 +00:00
|
|
|
def fetch_public_activities(opts \\ %{}, pagination \\ :keyset) do
|
2019-09-20 14:54:38 +00:00
|
|
|
opts = Map.drop(opts, ["user"])
|
2018-03-30 13:01:53 +00:00
|
|
|
|
2019-12-30 08:30:20 +00:00
|
|
|
[Constants.as_public()]
|
2019-09-20 14:54:38 +00:00
|
|
|
|> fetch_activities_query(opts)
|
2018-05-13 10:38:13 +00:00
|
|
|
|> restrict_unlisted()
|
2019-09-24 22:25:42 +00:00
|
|
|
|> Pagination.fetch_paginated(opts, pagination)
|
2017-03-22 13:45:17 +00:00
|
|
|
end
|
|
|
|
|
2018-05-11 02:17:33 +00:00
|
|
|
@valid_visibilities ~w[direct unlisted public private]
|
|
|
|
|
2019-03-01 06:37:29 +00:00
|
|
|
defp restrict_visibility(query, %{visibility: visibility})
|
|
|
|
when is_list(visibility) do
|
|
|
|
if Enum.all?(visibility, &(&1 in @valid_visibilities)) do
|
|
|
|
query =
|
|
|
|
from(
|
|
|
|
a in query,
|
|
|
|
where:
|
|
|
|
fragment(
|
|
|
|
"activity_visibility(?, ?, ?) = ANY (?)",
|
|
|
|
a.actor,
|
|
|
|
a.recipients,
|
|
|
|
a.data,
|
|
|
|
^visibility
|
|
|
|
)
|
|
|
|
)
|
|
|
|
|
|
|
|
query
|
|
|
|
else
|
|
|
|
Logger.error("Could not restrict visibility to #{visibility}")
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-01-09 15:45:09 +00:00
|
|
|
defp restrict_visibility(query, %{visibility: visibility})
|
|
|
|
when visibility in @valid_visibilities do
|
2019-06-04 12:04:36 +00:00
|
|
|
from(
|
|
|
|
a in query,
|
|
|
|
where:
|
|
|
|
fragment("activity_visibility(?, ?, ?) = ?", a.actor, a.recipients, a.data, ^visibility)
|
|
|
|
)
|
2018-05-11 02:17:33 +00:00
|
|
|
end
|
|
|
|
|
|
|
|
defp restrict_visibility(_query, %{visibility: visibility})
|
|
|
|
when visibility not in @valid_visibilities do
|
|
|
|
Logger.error("Could not restrict visibility to #{visibility}")
|
|
|
|
end
|
|
|
|
|
|
|
|
defp restrict_visibility(query, _visibility), do: query
|
|
|
|
|
2019-10-08 20:05:57 +00:00
|
|
|
defp exclude_visibility(query, %{"exclude_visibilities" => visibility})
|
|
|
|
when is_list(visibility) do
|
|
|
|
if Enum.all?(visibility, &(&1 in @valid_visibilities)) do
|
|
|
|
from(
|
|
|
|
a in query,
|
|
|
|
where:
|
|
|
|
not fragment(
|
|
|
|
"activity_visibility(?, ?, ?) = ANY (?)",
|
|
|
|
a.actor,
|
|
|
|
a.recipients,
|
|
|
|
a.data,
|
|
|
|
^visibility
|
|
|
|
)
|
|
|
|
)
|
|
|
|
else
|
|
|
|
Logger.error("Could not exclude visibility to #{visibility}")
|
|
|
|
query
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
defp exclude_visibility(query, %{"exclude_visibilities" => visibility})
|
|
|
|
when visibility in @valid_visibilities do
|
|
|
|
from(
|
|
|
|
a in query,
|
|
|
|
where:
|
|
|
|
not fragment(
|
|
|
|
"activity_visibility(?, ?, ?) = ?",
|
|
|
|
a.actor,
|
|
|
|
a.recipients,
|
|
|
|
a.data,
|
|
|
|
^visibility
|
|
|
|
)
|
|
|
|
)
|
|
|
|
end
|
|
|
|
|
|
|
|
defp exclude_visibility(query, %{"exclude_visibilities" => visibility})
|
|
|
|
when visibility not in @valid_visibilities do
|
|
|
|
Logger.error("Could not exclude visibility to #{visibility}")
|
|
|
|
query
|
|
|
|
end
|
|
|
|
|
|
|
|
defp exclude_visibility(query, _visibility), do: query
|
|
|
|
|
2019-06-04 12:04:36 +00:00
|
|
|
defp restrict_thread_visibility(query, _, %{skip_thread_containment: true} = _),
|
|
|
|
do: query
|
2019-03-25 00:06:02 +00:00
|
|
|
|
2019-06-04 12:20:24 +00:00
|
|
|
defp restrict_thread_visibility(
|
|
|
|
query,
|
2019-10-16 18:59:21 +00:00
|
|
|
%{"user" => %User{skip_thread_containment: true}},
|
2019-06-04 12:20:24 +00:00
|
|
|
_
|
|
|
|
),
|
|
|
|
do: query
|
|
|
|
|
2019-06-04 12:04:36 +00:00
|
|
|
defp restrict_thread_visibility(query, %{"user" => %User{ap_id: ap_id}}, _) do
|
|
|
|
from(
|
|
|
|
a in query,
|
|
|
|
where: fragment("thread_visibility(?, (?)->>'id') = true", ^ap_id, a.data)
|
|
|
|
)
|
2019-03-25 00:06:02 +00:00
|
|
|
end
|
|
|
|
|
2019-06-04 12:04:36 +00:00
|
|
|
defp restrict_thread_visibility(query, _, _), do: query
|
2019-03-25 00:06:02 +00:00
|
|
|
|
2019-09-28 02:12:12 +00:00
|
|
|
def fetch_user_abstract_activities(user, reading_user, params \\ %{}) do
|
|
|
|
params =
|
|
|
|
params
|
|
|
|
|> Map.put("user", reading_user)
|
|
|
|
|> Map.put("actor_id", user.ap_id)
|
|
|
|
|
|
|
|
recipients =
|
|
|
|
user_activities_recipients(%{
|
|
|
|
"godmode" => params["godmode"],
|
|
|
|
"reading_user" => reading_user
|
|
|
|
})
|
|
|
|
|
|
|
|
fetch_activities(recipients, params)
|
|
|
|
|> Enum.reverse()
|
|
|
|
end
|
|
|
|
|
2018-05-20 14:15:18 +00:00
|
|
|
def fetch_user_activities(user, reading_user, params \\ %{}) do
|
|
|
|
params =
|
|
|
|
params
|
|
|
|
|> Map.put("type", ["Create", "Announce"])
|
2019-08-14 00:02:09 +00:00
|
|
|
|> Map.put("user", reading_user)
|
2018-05-20 14:15:18 +00:00
|
|
|
|> Map.put("actor_id", user.ap_id)
|
2019-10-16 18:59:21 +00:00
|
|
|
|> Map.put("pinned_activity_ids", user.pinned_activities)
|
2018-05-20 14:15:18 +00:00
|
|
|
|
2019-12-06 13:25:13 +00:00
|
|
|
params =
|
|
|
|
if User.blocks?(reading_user, user) do
|
|
|
|
params
|
|
|
|
else
|
|
|
|
params
|
|
|
|
|> Map.put("blocking_user", reading_user)
|
|
|
|
|> Map.put("muting_user", reading_user)
|
|
|
|
end
|
|
|
|
|
2018-05-20 14:15:18 +00:00
|
|
|
recipients =
|
2019-07-23 22:50:09 +00:00
|
|
|
user_activities_recipients(%{
|
|
|
|
"godmode" => params["godmode"],
|
|
|
|
"reading_user" => reading_user
|
|
|
|
})
|
2018-05-20 14:15:18 +00:00
|
|
|
|
|
|
|
fetch_activities(recipients, params)
|
|
|
|
|> Enum.reverse()
|
|
|
|
end
|
|
|
|
|
2020-02-10 11:32:38 +00:00
|
|
|
def fetch_statuses(reading_user, params) do
|
2019-11-14 14:44:07 +00:00
|
|
|
params =
|
|
|
|
params
|
|
|
|
|> Map.put("type", ["Create", "Announce"])
|
|
|
|
|
2020-02-10 11:32:38 +00:00
|
|
|
recipients =
|
|
|
|
user_activities_recipients(%{
|
|
|
|
"godmode" => params["godmode"],
|
|
|
|
"reading_user" => reading_user
|
|
|
|
})
|
|
|
|
|
|
|
|
fetch_activities(recipients, params, :offset)
|
2019-11-14 14:44:07 +00:00
|
|
|
|> Enum.reverse()
|
|
|
|
end
|
|
|
|
|
2019-07-23 22:50:09 +00:00
|
|
|
defp user_activities_recipients(%{"godmode" => true}) do
|
|
|
|
[]
|
|
|
|
end
|
|
|
|
|
|
|
|
defp user_activities_recipients(%{"reading_user" => reading_user}) do
|
|
|
|
if reading_user do
|
2019-12-30 08:30:20 +00:00
|
|
|
[Constants.as_public()] ++ [reading_user.ap_id | User.following(reading_user)]
|
2019-07-23 22:50:09 +00:00
|
|
|
else
|
2019-12-30 08:30:20 +00:00
|
|
|
[Constants.as_public()]
|
2019-07-23 22:50:09 +00:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-01-15 15:39:23 +00:00
|
|
|
defp restrict_since(query, %{"since_id" => ""}), do: query
|
|
|
|
|
2017-05-16 13:31:11 +00:00
|
|
|
defp restrict_since(query, %{"since_id" => since_id}) do
|
2018-03-30 13:01:53 +00:00
|
|
|
from(activity in query, where: activity.id > ^since_id)
|
2017-05-16 13:31:11 +00:00
|
|
|
end
|
2018-03-30 13:01:53 +00:00
|
|
|
|
2017-05-16 13:31:11 +00:00
|
|
|
defp restrict_since(query, _), do: query
|
2017-03-21 16:53:20 +00:00
|
|
|
|
2019-04-18 05:31:08 +00:00
|
|
|
defp restrict_tag_reject(_query, %{"tag_reject" => _tag_reject, "skip_preload" => true}) do
|
|
|
|
raise "Can't use the child object without preloading!"
|
|
|
|
end
|
|
|
|
|
2019-01-10 15:44:28 +00:00
|
|
|
defp restrict_tag_reject(query, %{"tag_reject" => tag_reject})
|
|
|
|
when is_list(tag_reject) and tag_reject != [] do
|
2018-12-19 16:21:35 +00:00
|
|
|
from(
|
2019-04-17 22:37:04 +00:00
|
|
|
[_activity, object] in query,
|
|
|
|
where: fragment("not (?)->'tag' \\?| (?)", object.data, ^tag_reject)
|
2018-12-21 17:24:13 +00:00
|
|
|
)
|
|
|
|
end
|
|
|
|
|
2019-01-10 15:44:28 +00:00
|
|
|
defp restrict_tag_reject(query, _), do: query
|
|
|
|
|
2019-04-18 05:31:08 +00:00
|
|
|
defp restrict_tag_all(_query, %{"tag_all" => _tag_all, "skip_preload" => true}) do
|
|
|
|
raise "Can't use the child object without preloading!"
|
|
|
|
end
|
|
|
|
|
2019-01-10 15:44:28 +00:00
|
|
|
defp restrict_tag_all(query, %{"tag_all" => tag_all})
|
|
|
|
when is_list(tag_all) and tag_all != [] do
|
|
|
|
from(
|
2019-04-17 22:37:04 +00:00
|
|
|
[_activity, object] in query,
|
|
|
|
where: fragment("(?)->'tag' \\?& (?)", object.data, ^tag_all)
|
2019-01-10 15:44:28 +00:00
|
|
|
)
|
|
|
|
end
|
|
|
|
|
|
|
|
defp restrict_tag_all(query, _), do: query
|
|
|
|
|
2019-04-18 05:31:08 +00:00
|
|
|
defp restrict_tag(_query, %{"tag" => _tag, "skip_preload" => true}) do
|
|
|
|
raise "Can't use the child object without preloading!"
|
|
|
|
end
|
|
|
|
|
2018-12-21 17:24:13 +00:00
|
|
|
defp restrict_tag(query, %{"tag" => tag}) when is_list(tag) do
|
|
|
|
from(
|
2019-04-17 22:37:04 +00:00
|
|
|
[_activity, object] in query,
|
|
|
|
where: fragment("(?)->'tag' \\?| (?)", object.data, ^tag)
|
2018-12-19 16:21:35 +00:00
|
|
|
)
|
|
|
|
end
|
|
|
|
|
2018-12-21 17:24:13 +00:00
|
|
|
defp restrict_tag(query, %{"tag" => tag}) when is_binary(tag) do
|
2018-03-30 13:01:53 +00:00
|
|
|
from(
|
2019-04-17 22:37:04 +00:00
|
|
|
[_activity, object] in query,
|
|
|
|
where: fragment("(?)->'tag' \\? (?)", object.data, ^tag)
|
2018-03-30 13:01:53 +00:00
|
|
|
)
|
2017-09-14 11:22:09 +00:00
|
|
|
end
|
2018-03-30 13:01:53 +00:00
|
|
|
|
2017-09-14 11:22:09 +00:00
|
|
|
defp restrict_tag(query, _), do: query
|
|
|
|
|
2018-05-04 21:16:02 +00:00
|
|
|
defp restrict_recipients(query, [], _user), do: query
|
2018-03-30 13:01:53 +00:00
|
|
|
|
2018-02-18 14:20:36 +00:00
|
|
|
defp restrict_recipients(query, recipients, nil) do
|
2018-03-30 13:01:53 +00:00
|
|
|
from(activity in query, where: fragment("? && ?", ^recipients, activity.recipients))
|
2017-03-21 16:53:20 +00:00
|
|
|
end
|
2018-03-30 13:01:53 +00:00
|
|
|
|
2018-02-18 14:20:36 +00:00
|
|
|
defp restrict_recipients(query, recipients, user) do
|
2018-03-30 13:01:53 +00:00
|
|
|
from(
|
|
|
|
activity in query,
|
2018-02-18 14:20:36 +00:00
|
|
|
where: fragment("? && ?", ^recipients, activity.recipients),
|
|
|
|
or_where: activity.actor == ^user.ap_id
|
2018-03-30 13:01:53 +00:00
|
|
|
)
|
2018-02-18 14:20:36 +00:00
|
|
|
end
|
2017-03-23 23:09:08 +00:00
|
|
|
|
2017-05-16 13:31:11 +00:00
|
|
|
defp restrict_local(query, %{"local_only" => true}) do
|
2018-03-30 13:01:53 +00:00
|
|
|
from(activity in query, where: activity.local == true)
|
2017-04-15 10:11:20 +00:00
|
|
|
end
|
2018-03-30 13:01:53 +00:00
|
|
|
|
2017-05-16 13:31:11 +00:00
|
|
|
defp restrict_local(query, _), do: query
|
2017-04-15 10:11:20 +00:00
|
|
|
|
2017-05-16 13:31:11 +00:00
|
|
|
defp restrict_actor(query, %{"actor_id" => actor_id}) do
|
2018-03-30 13:01:53 +00:00
|
|
|
from(activity in query, where: activity.actor == ^actor_id)
|
2017-05-07 17:28:23 +00:00
|
|
|
end
|
2018-03-30 13:01:53 +00:00
|
|
|
|
2017-05-16 13:31:11 +00:00
|
|
|
defp restrict_actor(query, _), do: query
|
2017-05-07 17:28:23 +00:00
|
|
|
|
2017-09-17 12:20:54 +00:00
|
|
|
defp restrict_type(query, %{"type" => type}) when is_binary(type) do
|
2019-02-04 22:47:29 +00:00
|
|
|
from(activity in query, where: fragment("?->>'type' = ?", activity.data, ^type))
|
2017-09-17 12:20:54 +00:00
|
|
|
end
|
2018-03-30 13:01:53 +00:00
|
|
|
|
2017-09-09 10:05:17 +00:00
|
|
|
defp restrict_type(query, %{"type" => type}) do
|
2018-03-30 13:01:53 +00:00
|
|
|
from(activity in query, where: fragment("?->>'type' = ANY(?)", activity.data, ^type))
|
2017-09-09 10:05:17 +00:00
|
|
|
end
|
2018-03-30 13:01:53 +00:00
|
|
|
|
2017-09-09 10:05:17 +00:00
|
|
|
defp restrict_type(query, _), do: query
|
|
|
|
|
2019-05-16 19:09:18 +00:00
|
|
|
defp restrict_state(query, %{"state" => state}) do
|
|
|
|
from(activity in query, where: fragment("?->>'state' = ?", activity.data, ^state))
|
|
|
|
end
|
|
|
|
|
|
|
|
defp restrict_state(query, _), do: query
|
|
|
|
|
2017-09-17 11:09:49 +00:00
|
|
|
defp restrict_favorited_by(query, %{"favorited_by" => ap_id}) do
|
2018-03-30 13:01:53 +00:00
|
|
|
from(
|
2019-08-06 21:12:42 +00:00
|
|
|
[_activity, object] in query,
|
|
|
|
where: fragment("(?)->'likes' \\? (?)", object.data, ^ap_id)
|
2018-03-30 13:01:53 +00:00
|
|
|
)
|
2017-09-17 11:09:49 +00:00
|
|
|
end
|
2018-03-30 13:01:53 +00:00
|
|
|
|
2017-09-17 11:09:49 +00:00
|
|
|
defp restrict_favorited_by(query, _), do: query
|
|
|
|
|
2019-04-18 05:31:08 +00:00
|
|
|
defp restrict_media(_query, %{"only_media" => _val, "skip_preload" => true}) do
|
|
|
|
raise "Can't use the child object without preloading!"
|
|
|
|
end
|
|
|
|
|
2017-11-14 13:50:23 +00:00
|
|
|
defp restrict_media(query, %{"only_media" => val}) when val == "true" or val == "1" do
|
2018-03-30 13:01:53 +00:00
|
|
|
from(
|
2019-04-18 05:31:08 +00:00
|
|
|
[_activity, object] in query,
|
|
|
|
where: fragment("not (?)->'attachment' = (?)", object.data, ^[])
|
2018-03-30 13:01:53 +00:00
|
|
|
)
|
2017-11-14 13:41:16 +00:00
|
|
|
end
|
2018-03-30 13:01:53 +00:00
|
|
|
|
2017-11-14 13:41:16 +00:00
|
|
|
defp restrict_media(query, _), do: query
|
|
|
|
|
2018-06-18 03:18:39 +00:00
|
|
|
defp restrict_replies(query, %{"exclude_replies" => val}) when val == "true" or val == "1" do
|
|
|
|
from(
|
2019-10-07 12:29:33 +00:00
|
|
|
[_activity, object] in query,
|
|
|
|
where: fragment("?->>'inReplyTo' is null", object.data)
|
2018-06-18 03:18:39 +00:00
|
|
|
)
|
|
|
|
end
|
|
|
|
|
|
|
|
defp restrict_replies(query, _), do: query
|
|
|
|
|
2018-12-27 05:30:01 +00:00
|
|
|
defp restrict_reblogs(query, %{"exclude_reblogs" => val}) when val == "true" or val == "1" do
|
|
|
|
from(activity in query, where: fragment("?->>'type' != 'Announce'", activity.data))
|
|
|
|
end
|
|
|
|
|
|
|
|
defp restrict_reblogs(query, _), do: query
|
|
|
|
|
2019-02-27 15:37:42 +00:00
|
|
|
defp restrict_muted(query, %{"with_muted" => val}) when val in [true, "true", "1"], do: query
|
|
|
|
|
2019-10-20 10:42:42 +00:00
|
|
|
defp restrict_muted(query, %{"muting_user" => %User{} = user} = opts) do
|
2019-11-20 09:19:07 +00:00
|
|
|
mutes = opts["muted_users_ap_ids"] || User.muted_users_ap_ids(user)
|
2018-09-05 20:49:15 +00:00
|
|
|
|
2019-08-15 14:37:30 +00:00
|
|
|
query =
|
|
|
|
from([activity] in query,
|
|
|
|
where: fragment("not (? = ANY(?))", activity.actor, ^mutes),
|
|
|
|
where: fragment("not (?->'to' \\?| ?)", activity.data, ^mutes)
|
|
|
|
)
|
|
|
|
|
|
|
|
unless opts["skip_preload"] do
|
2019-09-06 23:11:26 +00:00
|
|
|
from([thread_mute: tm] in query, where: is_nil(tm.user_id))
|
2019-08-15 14:37:30 +00:00
|
|
|
else
|
|
|
|
query
|
|
|
|
end
|
2018-09-05 20:49:15 +00:00
|
|
|
end
|
|
|
|
|
|
|
|
defp restrict_muted(query, _), do: query
|
|
|
|
|
2019-11-15 18:38:54 +00:00
|
|
|
defp restrict_blocked(query, %{"blocking_user" => %User{} = user} = opts) do
|
2019-11-20 09:19:07 +00:00
|
|
|
blocked_ap_ids = opts["blocked_users_ap_ids"] || User.blocked_users_ap_ids(user)
|
2019-10-20 10:42:42 +00:00
|
|
|
domain_blocks = user.domain_blocks || []
|
2018-04-28 14:10:24 +00:00
|
|
|
|
2019-12-11 09:08:20 +00:00
|
|
|
following_ap_ids = User.get_friends_ap_ids(user)
|
2018-04-28 14:10:24 +00:00
|
|
|
|
2019-05-15 22:25:14 +00:00
|
|
|
query =
|
|
|
|
if has_named_binding?(query, :object), do: query, else: Activity.with_joined_object(query)
|
|
|
|
|
2018-04-28 14:10:24 +00:00
|
|
|
from(
|
2019-05-15 22:25:14 +00:00
|
|
|
[activity, object: o] in query,
|
2019-11-10 13:30:21 +00:00
|
|
|
where: fragment("not (? = ANY(?))", activity.actor, ^blocked_ap_ids),
|
|
|
|
where: fragment("not (? && ?)", activity.recipients, ^blocked_ap_ids),
|
2019-04-17 22:27:59 +00:00
|
|
|
where:
|
|
|
|
fragment(
|
|
|
|
"not (?->>'type' = 'Announce' and ?->'to' \\?| ?)",
|
|
|
|
activity.data,
|
|
|
|
activity.data,
|
2019-11-10 13:30:21 +00:00
|
|
|
^blocked_ap_ids
|
2019-04-17 22:27:59 +00:00
|
|
|
),
|
2019-12-10 16:41:43 +00:00
|
|
|
where:
|
|
|
|
fragment(
|
|
|
|
"(not (split_part(?, '/', 3) = ANY(?))) or ? = ANY(?)",
|
|
|
|
activity.actor,
|
|
|
|
^domain_blocks,
|
|
|
|
activity.actor,
|
|
|
|
^following_ap_ids
|
2019-04-17 22:27:59 +00:00
|
|
|
),
|
2019-12-10 16:41:43 +00:00
|
|
|
where:
|
|
|
|
fragment(
|
|
|
|
"(not (split_part(?->>'actor', '/', 3) = ANY(?))) or (?->>'actor') = ANY(?)",
|
|
|
|
o.data,
|
|
|
|
^domain_blocks,
|
|
|
|
o.data,
|
|
|
|
^following_ap_ids
|
|
|
|
)
|
2018-04-14 11:26:20 +00:00
|
|
|
)
|
2017-11-02 21:37:26 +00:00
|
|
|
end
|
2018-03-30 13:01:53 +00:00
|
|
|
|
2017-11-02 21:37:26 +00:00
|
|
|
defp restrict_blocked(query, _), do: query
|
|
|
|
|
2018-04-29 02:53:19 +00:00
|
|
|
defp restrict_unlisted(query) do
|
|
|
|
from(
|
|
|
|
activity in query,
|
2018-05-13 08:56:44 +00:00
|
|
|
where:
|
|
|
|
fragment(
|
2018-05-13 09:58:03 +00:00
|
|
|
"not (coalesce(?->'cc', '{}'::jsonb) \\?| ?)",
|
2018-05-13 08:56:44 +00:00
|
|
|
activity.data,
|
2019-12-30 08:30:20 +00:00
|
|
|
^[Constants.as_public()]
|
2018-05-13 08:56:44 +00:00
|
|
|
)
|
2018-04-29 02:53:19 +00:00
|
|
|
)
|
|
|
|
end
|
|
|
|
|
2019-01-07 13:45:33 +00:00
|
|
|
defp restrict_pinned(query, %{"pinned" => "true", "pinned_activity_ids" => ids}) do
|
|
|
|
from(activity in query, where: activity.id in ^ids)
|
|
|
|
end
|
|
|
|
|
|
|
|
defp restrict_pinned(query, _), do: query
|
|
|
|
|
2019-11-20 09:19:07 +00:00
|
|
|
defp restrict_muted_reblogs(query, %{"muting_user" => %User{} = user} = opts) do
|
|
|
|
muted_reblogs = opts["reblog_muted_users_ap_ids"] || User.reblog_muted_users_ap_ids(user)
|
2019-03-11 15:57:54 +00:00
|
|
|
|
|
|
|
from(
|
|
|
|
activity in query,
|
2019-03-27 09:28:53 +00:00
|
|
|
where:
|
|
|
|
fragment(
|
|
|
|
"not ( ?->>'type' = 'Announce' and ? = ANY(?))",
|
|
|
|
activity.data,
|
|
|
|
activity.actor,
|
|
|
|
^muted_reblogs
|
|
|
|
)
|
2019-03-11 15:57:54 +00:00
|
|
|
)
|
|
|
|
end
|
|
|
|
|
|
|
|
defp restrict_muted_reblogs(query, _), do: query
|
|
|
|
|
2019-11-14 14:44:07 +00:00
|
|
|
defp restrict_instance(query, %{"instance" => instance}) do
|
|
|
|
users =
|
|
|
|
from(
|
|
|
|
u in User,
|
|
|
|
select: u.ap_id,
|
|
|
|
where: fragment("? LIKE ?", u.nickname, ^"%@#{instance}")
|
|
|
|
)
|
|
|
|
|> Repo.all()
|
|
|
|
|
|
|
|
from(activity in query, where: activity.actor in ^users)
|
|
|
|
end
|
|
|
|
|
|
|
|
defp restrict_instance(query, _), do: query
|
|
|
|
|
2019-09-25 12:59:04 +00:00
|
|
|
defp exclude_poll_votes(query, %{"include_poll_votes" => true}), do: query
|
2019-05-22 18:49:19 +00:00
|
|
|
|
|
|
|
defp exclude_poll_votes(query, _) do
|
|
|
|
if has_named_binding?(query, :object) do
|
|
|
|
from([activity, object: o] in query,
|
|
|
|
where: fragment("not(?->>'type' = ?)", o.data, "Answer")
|
|
|
|
)
|
|
|
|
else
|
|
|
|
query
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-08-13 23:36:54 +00:00
|
|
|
defp exclude_id(query, %{"exclude_id" => id}) when is_binary(id) do
|
|
|
|
from(activity in query, where: activity.id != ^id)
|
|
|
|
end
|
|
|
|
|
|
|
|
defp exclude_id(query, _), do: query
|
|
|
|
|
2019-03-23 01:09:12 +00:00
|
|
|
defp maybe_preload_objects(query, %{"skip_preload" => true}), do: query
|
|
|
|
|
|
|
|
defp maybe_preload_objects(query, _) do
|
|
|
|
query
|
|
|
|
|> Activity.with_preloaded_object()
|
|
|
|
end
|
|
|
|
|
2019-05-07 15:00:50 +00:00
|
|
|
defp maybe_preload_bookmarks(query, %{"skip_preload" => true}), do: query
|
|
|
|
|
|
|
|
defp maybe_preload_bookmarks(query, opts) do
|
|
|
|
query
|
|
|
|
|> Activity.with_preloaded_bookmark(opts["user"])
|
|
|
|
end
|
|
|
|
|
2019-12-03 14:54:07 +00:00
|
|
|
defp maybe_preload_report_notes(query, %{"preload_report_notes" => true}) do
|
|
|
|
query
|
|
|
|
|> Activity.with_preloaded_report_notes()
|
|
|
|
end
|
|
|
|
|
|
|
|
defp maybe_preload_report_notes(query, _), do: query
|
|
|
|
|
2019-05-20 16:35:46 +00:00
|
|
|
defp maybe_set_thread_muted_field(query, %{"skip_preload" => true}), do: query
|
|
|
|
|
|
|
|
defp maybe_set_thread_muted_field(query, opts) do
|
|
|
|
query
|
2019-08-15 14:37:30 +00:00
|
|
|
|> Activity.with_set_thread_muted_field(opts["muting_user"] || opts["user"])
|
2019-05-20 16:35:46 +00:00
|
|
|
end
|
|
|
|
|
2019-05-07 19:30:27 +00:00
|
|
|
defp maybe_order(query, %{order: :desc}) do
|
|
|
|
query
|
|
|
|
|> order_by(desc: :id)
|
|
|
|
end
|
|
|
|
|
|
|
|
defp maybe_order(query, %{order: :asc}) do
|
|
|
|
query
|
|
|
|
|> order_by(asc: :id)
|
|
|
|
end
|
|
|
|
|
|
|
|
defp maybe_order(query, _), do: query
|
|
|
|
|
2019-11-20 09:19:07 +00:00
|
|
|
defp fetch_activities_query_ap_ids_ops(opts) do
|
|
|
|
source_user = opts["muting_user"]
|
2020-03-25 06:04:00 +00:00
|
|
|
ap_id_relationships = if source_user, do: [:mute, :reblog_mute], else: []
|
2019-11-20 09:19:07 +00:00
|
|
|
|
2020-03-25 06:04:00 +00:00
|
|
|
ap_id_relationships =
|
|
|
|
ap_id_relationships ++
|
2019-11-20 09:19:07 +00:00
|
|
|
if opts["blocking_user"] && opts["blocking_user"] == source_user do
|
|
|
|
[:block]
|
|
|
|
else
|
|
|
|
[]
|
|
|
|
end
|
|
|
|
|
2020-03-25 06:04:00 +00:00
|
|
|
preloaded_ap_ids = User.outgoing_relationships_ap_ids(source_user, ap_id_relationships)
|
2019-11-20 09:19:07 +00:00
|
|
|
|
|
|
|
restrict_blocked_opts = Map.merge(%{"blocked_users_ap_ids" => preloaded_ap_ids[:block]}, opts)
|
|
|
|
restrict_muted_opts = Map.merge(%{"muted_users_ap_ids" => preloaded_ap_ids[:mute]}, opts)
|
|
|
|
|
|
|
|
restrict_muted_reblogs_opts =
|
|
|
|
Map.merge(%{"reblog_muted_users_ap_ids" => preloaded_ap_ids[:reblog_mute]}, opts)
|
|
|
|
|
|
|
|
{restrict_blocked_opts, restrict_muted_opts, restrict_muted_reblogs_opts}
|
|
|
|
end
|
|
|
|
|
2018-02-18 14:32:11 +00:00
|
|
|
def fetch_activities_query(recipients, opts \\ %{}) do
|
2019-11-20 09:19:07 +00:00
|
|
|
{restrict_blocked_opts, restrict_muted_opts, restrict_muted_reblogs_opts} =
|
|
|
|
fetch_activities_query_ap_ids_ops(opts)
|
|
|
|
|
2019-06-04 12:20:24 +00:00
|
|
|
config = %{
|
|
|
|
skip_thread_containment: Config.get([:instance, :skip_thread_containment])
|
|
|
|
}
|
2017-03-29 00:05:51 +00:00
|
|
|
|
2019-05-13 09:15:14 +00:00
|
|
|
Activity
|
2019-03-23 01:09:12 +00:00
|
|
|
|> maybe_preload_objects(opts)
|
2019-05-07 15:00:50 +00:00
|
|
|
|> maybe_preload_bookmarks(opts)
|
2019-12-03 14:54:07 +00:00
|
|
|
|> maybe_preload_report_notes(opts)
|
2019-05-20 16:35:46 +00:00
|
|
|
|> maybe_set_thread_muted_field(opts)
|
2019-05-07 19:30:27 +00:00
|
|
|
|> maybe_order(opts)
|
2018-02-18 14:20:36 +00:00
|
|
|
|> restrict_recipients(recipients, opts["user"])
|
2017-09-14 11:22:09 +00:00
|
|
|
|> restrict_tag(opts)
|
2019-01-10 15:44:28 +00:00
|
|
|
|> restrict_tag_reject(opts)
|
|
|
|
|> restrict_tag_all(opts)
|
2017-05-16 13:31:11 +00:00
|
|
|
|> restrict_since(opts)
|
|
|
|
|> restrict_local(opts)
|
|
|
|
|> restrict_actor(opts)
|
2017-09-09 10:05:17 +00:00
|
|
|
|> restrict_type(opts)
|
2019-05-16 19:09:18 +00:00
|
|
|
|> restrict_state(opts)
|
2017-09-17 11:09:49 +00:00
|
|
|
|> restrict_favorited_by(opts)
|
2019-11-20 09:19:07 +00:00
|
|
|
|> restrict_blocked(restrict_blocked_opts)
|
|
|
|
|> restrict_muted(restrict_muted_opts)
|
2017-11-14 13:41:16 +00:00
|
|
|
|> restrict_media(opts)
|
2018-05-11 02:17:33 +00:00
|
|
|
|> restrict_visibility(opts)
|
2019-06-04 12:04:36 +00:00
|
|
|
|> restrict_thread_visibility(opts, config)
|
2018-06-18 03:18:39 +00:00
|
|
|
|> restrict_replies(opts)
|
2018-12-27 05:30:01 +00:00
|
|
|
|> restrict_reblogs(opts)
|
2019-01-07 13:45:33 +00:00
|
|
|
|> restrict_pinned(opts)
|
2019-11-20 09:19:07 +00:00
|
|
|
|> restrict_muted_reblogs(restrict_muted_reblogs_opts)
|
2019-11-14 14:44:07 +00:00
|
|
|
|> restrict_instance(opts)
|
2019-04-11 10:22:42 +00:00
|
|
|
|> Activity.restrict_deactivated_users()
|
2019-06-01 13:07:01 +00:00
|
|
|
|> exclude_poll_votes(opts)
|
2019-10-08 20:05:57 +00:00
|
|
|
|> exclude_visibility(opts)
|
2018-02-18 14:32:11 +00:00
|
|
|
end
|
|
|
|
|
2019-09-24 22:25:42 +00:00
|
|
|
def fetch_activities(recipients, opts \\ %{}, pagination \\ :keyset) do
|
2019-05-13 09:15:14 +00:00
|
|
|
list_memberships = Pleroma.List.memberships(opts["user"])
|
|
|
|
|
|
|
|
fetch_activities_query(recipients ++ list_memberships, opts)
|
2019-09-24 22:25:42 +00:00
|
|
|
|> Pagination.fetch_paginated(opts, pagination)
|
2018-03-30 13:01:53 +00:00
|
|
|
|> Enum.reverse()
|
2019-05-13 09:15:14 +00:00
|
|
|
|> maybe_update_cc(list_memberships, opts["user"])
|
|
|
|
end
|
|
|
|
|
2019-11-18 06:44:08 +00:00
|
|
|
@doc """
|
|
|
|
Fetch favorites activities of user with order by sort adds to favorites
|
|
|
|
"""
|
2019-12-30 08:30:20 +00:00
|
|
|
@spec fetch_favourites(User.t(), map(), Pagination.type()) :: list(Activity.t())
|
2019-11-18 13:56:25 +00:00
|
|
|
def fetch_favourites(user, params \\ %{}, pagination \\ :keyset) do
|
|
|
|
user.ap_id
|
|
|
|
|> Activity.Queries.by_actor()
|
|
|
|
|> Activity.Queries.by_type("Like")
|
|
|
|
|> Activity.with_joined_object()
|
|
|
|
|> Object.with_joined_activity()
|
|
|
|
|> select([_like, object, activity], %{activity | object: object})
|
2019-11-19 17:19:41 +00:00
|
|
|
|> order_by([like, _, _], desc: like.id)
|
2019-11-18 18:34:54 +00:00
|
|
|
|> Pagination.fetch_paginated(
|
|
|
|
Map.merge(params, %{"skip_order" => true}),
|
|
|
|
pagination,
|
|
|
|
:object_activity
|
|
|
|
)
|
2019-11-18 06:44:08 +00:00
|
|
|
end
|
|
|
|
|
2019-05-17 18:17:14 +00:00
|
|
|
defp maybe_update_cc(activities, list_memberships, %User{ap_id: user_ap_id})
|
|
|
|
when is_list(list_memberships) and length(list_memberships) > 0 do
|
2019-05-13 09:15:14 +00:00
|
|
|
Enum.map(activities, fn
|
2019-05-17 18:17:14 +00:00
|
|
|
%{data: %{"bcc" => bcc}} = activity when is_list(bcc) and length(bcc) > 0 ->
|
2019-05-13 09:15:14 +00:00
|
|
|
if Enum.any?(bcc, &(&1 in list_memberships)) do
|
2019-05-17 18:17:14 +00:00
|
|
|
update_in(activity.data["cc"], &[user_ap_id | &1])
|
2019-05-13 09:15:14 +00:00
|
|
|
else
|
|
|
|
activity
|
|
|
|
end
|
|
|
|
|
|
|
|
activity ->
|
|
|
|
activity
|
|
|
|
end)
|
2017-04-21 16:54:21 +00:00
|
|
|
end
|
|
|
|
|
2019-05-17 18:17:14 +00:00
|
|
|
defp maybe_update_cc(activities, _, _), do: activities
|
|
|
|
|
2019-05-31 12:25:17 +00:00
|
|
|
def fetch_activities_bounded_query(query, recipients, recipients_with_public) do
|
|
|
|
from(activity in query,
|
|
|
|
where:
|
|
|
|
fragment("? && ?", activity.recipients, ^recipients) or
|
|
|
|
(fragment("? && ?", activity.recipients, ^recipients_with_public) and
|
2019-12-30 08:30:20 +00:00
|
|
|
^Constants.as_public() in activity.recipients)
|
2019-05-31 12:25:17 +00:00
|
|
|
)
|
|
|
|
end
|
|
|
|
|
2019-09-24 22:25:42 +00:00
|
|
|
def fetch_activities_bounded(
|
|
|
|
recipients,
|
|
|
|
recipients_with_public,
|
|
|
|
opts \\ %{},
|
|
|
|
pagination \\ :keyset
|
|
|
|
) do
|
2018-08-29 08:51:23 +00:00
|
|
|
fetch_activities_query([], opts)
|
2019-05-31 12:25:17 +00:00
|
|
|
|> fetch_activities_bounded_query(recipients, recipients_with_public)
|
2019-09-24 22:25:42 +00:00
|
|
|
|> Pagination.fetch_paginated(opts, pagination)
|
2018-08-29 08:51:23 +00:00
|
|
|
|> Enum.reverse()
|
|
|
|
end
|
|
|
|
|
2019-12-30 08:30:20 +00:00
|
|
|
@spec upload(Upload.source(), keyword()) :: {:ok, Object.t()} | {:error, any()}
|
2018-11-23 16:40:45 +00:00
|
|
|
def upload(file, opts \\ []) do
|
|
|
|
with {:ok, data} <- Upload.store(file, opts) do
|
2018-12-06 07:26:17 +00:00
|
|
|
obj_data =
|
|
|
|
if opts[:actor] do
|
|
|
|
Map.put(data, "actor", opts[:actor])
|
|
|
|
else
|
|
|
|
data
|
|
|
|
end
|
|
|
|
|
2018-12-05 10:37:06 +00:00
|
|
|
Repo.insert(%Object{data: obj_data})
|
2018-10-29 16:30:12 +00:00
|
|
|
end
|
2017-03-29 00:05:51 +00:00
|
|
|
end
|
2017-12-12 17:07:14 +00:00
|
|
|
|
2020-03-30 00:01:09 +00:00
|
|
|
@spec get_actor_url(any()) :: binary() | nil
|
|
|
|
defp get_actor_url(url) when is_binary(url), do: url
|
|
|
|
defp get_actor_url(%{"href" => href}) when is_binary(href), do: href
|
|
|
|
|
|
|
|
defp get_actor_url(url) when is_list(url) do
|
|
|
|
url
|
|
|
|
|> List.first()
|
|
|
|
|> get_actor_url()
|
|
|
|
end
|
|
|
|
|
|
|
|
defp get_actor_url(_url), do: nil
|
|
|
|
|
2019-05-22 04:33:10 +00:00
|
|
|
defp object_to_user_data(data) do
|
2018-03-30 13:01:53 +00:00
|
|
|
avatar =
|
|
|
|
data["icon"]["url"] &&
|
|
|
|
%{
|
|
|
|
"type" => "Image",
|
|
|
|
"url" => [%{"href" => data["icon"]["url"]}]
|
|
|
|
}
|
|
|
|
|
|
|
|
banner =
|
|
|
|
data["image"]["url"] &&
|
|
|
|
%{
|
|
|
|
"type" => "Image",
|
|
|
|
"url" => [%{"href" => data["image"]["url"]}]
|
|
|
|
}
|
2018-02-25 15:14:25 +00:00
|
|
|
|
2019-08-07 11:14:22 +00:00
|
|
|
fields =
|
|
|
|
data
|
|
|
|
|> Map.get("attachment", [])
|
|
|
|
|> Enum.filter(fn %{"type" => t} -> t == "PropertyValue" end)
|
|
|
|
|> Enum.map(fn fields -> Map.take(fields, ["name", "value"]) end)
|
|
|
|
|
2020-04-03 11:03:32 +00:00
|
|
|
emojis =
|
|
|
|
data
|
|
|
|
|> Map.get("tag", [])
|
2020-04-17 21:55:56 +00:00
|
|
|
|> Enum.filter(fn
|
2020-04-17 22:48:37 +00:00
|
|
|
%{"type" => "Emoji"} -> true
|
2020-04-17 21:55:56 +00:00
|
|
|
_ -> false
|
|
|
|
end)
|
2020-04-03 11:03:32 +00:00
|
|
|
|> Enum.reduce(%{}, fn %{"icon" => %{"url" => url}, "name" => name}, acc ->
|
|
|
|
Map.put(acc, String.trim(name, ":"), url)
|
|
|
|
end)
|
|
|
|
|
2018-05-25 04:15:42 +00:00
|
|
|
locked = data["manuallyApprovesFollowers"] || false
|
2018-05-19 07:30:02 +00:00
|
|
|
data = Transmogrifier.maybe_fix_user_object(data)
|
2019-09-26 02:57:41 +00:00
|
|
|
discoverable = data["discoverable"] || false
|
2019-10-19 17:46:24 +00:00
|
|
|
invisible = data["invisible"] || false
|
2019-12-10 13:19:26 +00:00
|
|
|
actor_type = data["type"] || "Person"
|
2020-04-01 05:47:07 +00:00
|
|
|
|
|
|
|
public_key =
|
|
|
|
if is_map(data["publicKey"]) && is_binary(data["publicKey"]["publicKeyPem"]) do
|
|
|
|
data["publicKey"]["publicKeyPem"]
|
|
|
|
else
|
|
|
|
nil
|
|
|
|
end
|
|
|
|
|
|
|
|
shared_inbox =
|
|
|
|
if is_map(data["endpoints"]) && is_binary(data["endpoints"]["sharedInbox"]) do
|
|
|
|
data["endpoints"]["sharedInbox"]
|
|
|
|
else
|
|
|
|
nil
|
|
|
|
end
|
2018-05-19 07:30:02 +00:00
|
|
|
|
2018-02-25 15:14:25 +00:00
|
|
|
user_data = %{
|
|
|
|
ap_id: data["id"],
|
2020-03-30 00:01:09 +00:00
|
|
|
uri: get_actor_url(data["url"]),
|
2019-10-21 08:05:09 +00:00
|
|
|
ap_enabled: true,
|
|
|
|
banner: banner,
|
2019-10-16 18:59:21 +00:00
|
|
|
fields: fields,
|
2020-04-03 11:03:32 +00:00
|
|
|
emoji: emojis,
|
2019-10-16 18:59:21 +00:00
|
|
|
locked: locked,
|
2019-10-21 08:05:09 +00:00
|
|
|
discoverable: discoverable,
|
|
|
|
invisible: invisible,
|
2018-02-25 15:14:25 +00:00
|
|
|
avatar: avatar,
|
|
|
|
name: data["name"],
|
|
|
|
follower_address: data["followers"],
|
2019-07-10 13:01:32 +00:00
|
|
|
following_address: data["following"],
|
2019-10-25 12:14:18 +00:00
|
|
|
bio: data["summary"],
|
2019-12-10 13:19:26 +00:00
|
|
|
actor_type: actor_type,
|
2020-04-01 04:58:48 +00:00
|
|
|
also_known_as: Map.get(data, "alsoKnownAs", []),
|
2020-04-01 05:47:07 +00:00
|
|
|
public_key: public_key,
|
|
|
|
inbox: data["inbox"],
|
|
|
|
shared_inbox: shared_inbox
|
2018-02-25 15:14:25 +00:00
|
|
|
}
|
|
|
|
|
2018-08-06 06:50:18 +00:00
|
|
|
# nickname can be nil because of virtual actors
|
|
|
|
user_data =
|
|
|
|
if data["preferredUsername"] do
|
2018-08-06 08:26:36 +00:00
|
|
|
Map.put(
|
|
|
|
user_data,
|
|
|
|
:nickname,
|
|
|
|
"#{data["preferredUsername"]}@#{URI.parse(data["id"]).host}"
|
|
|
|
)
|
2018-08-06 06:50:18 +00:00
|
|
|
else
|
|
|
|
Map.put(user_data, :nickname, nil)
|
|
|
|
end
|
|
|
|
|
2018-02-25 15:14:25 +00:00
|
|
|
{:ok, user_data}
|
|
|
|
end
|
|
|
|
|
2019-07-13 20:56:10 +00:00
|
|
|
def fetch_follow_information_for_user(user) do
|
|
|
|
with {:ok, following_data} <-
|
|
|
|
Fetcher.fetch_and_contain_remote_object_from_id(user.following_address),
|
|
|
|
{:ok, hide_follows} <- collection_private(following_data),
|
2019-07-13 16:17:57 +00:00
|
|
|
{:ok, followers_data} <-
|
2019-07-13 20:56:10 +00:00
|
|
|
Fetcher.fetch_and_contain_remote_object_from_id(user.follower_address),
|
|
|
|
{:ok, hide_followers} <- collection_private(followers_data) do
|
2019-07-13 22:58:39 +00:00
|
|
|
{:ok,
|
|
|
|
%{
|
|
|
|
hide_follows: hide_follows,
|
2019-12-19 12:25:23 +00:00
|
|
|
follower_count: normalize_counter(followers_data["totalItems"]),
|
|
|
|
following_count: normalize_counter(following_data["totalItems"]),
|
2019-07-13 22:58:39 +00:00
|
|
|
hide_followers: hide_followers
|
|
|
|
}}
|
2019-07-13 20:56:10 +00:00
|
|
|
else
|
2019-12-19 12:25:23 +00:00
|
|
|
{:error, _} = e -> e
|
|
|
|
e -> {:error, e}
|
2019-07-13 20:56:10 +00:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-12-19 12:25:23 +00:00
|
|
|
defp normalize_counter(counter) when is_integer(counter), do: counter
|
|
|
|
defp normalize_counter(_), do: 0
|
|
|
|
|
2019-07-13 20:56:10 +00:00
|
|
|
defp maybe_update_follow_information(data) do
|
2019-12-30 08:30:20 +00:00
|
|
|
with {:enabled, true} <- {:enabled, Config.get([:instance, :external_user_synchronization])},
|
2019-07-13 22:58:39 +00:00
|
|
|
{:ok, info} <- fetch_follow_information_for_user(data) do
|
2019-10-23 16:30:02 +00:00
|
|
|
info = Map.merge(data[:info] || %{}, info)
|
2019-07-13 22:58:39 +00:00
|
|
|
Map.put(data, :info, info)
|
2019-07-13 16:17:57 +00:00
|
|
|
else
|
|
|
|
{:enabled, false} ->
|
|
|
|
data
|
|
|
|
|
|
|
|
e ->
|
|
|
|
Logger.error(
|
|
|
|
"Follower/Following counter update for #{data.ap_id} failed.\n" <> inspect(e)
|
|
|
|
)
|
|
|
|
|
|
|
|
data
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-12-19 12:25:23 +00:00
|
|
|
defp collection_private(%{"first" => %{"type" => type}})
|
|
|
|
when type in ["CollectionPage", "OrderedCollectionPage"],
|
|
|
|
do: {:ok, false}
|
|
|
|
|
2019-11-29 19:26:45 +00:00
|
|
|
defp collection_private(%{"first" => first}) do
|
2019-12-19 12:25:23 +00:00
|
|
|
with {:ok, %{"type" => type}} when type in ["CollectionPage", "OrderedCollectionPage"] <-
|
|
|
|
Fetcher.fetch_and_contain_remote_object_from_id(first) do
|
2019-07-13 20:56:10 +00:00
|
|
|
{:ok, false}
|
2019-07-13 16:17:57 +00:00
|
|
|
else
|
2019-12-19 12:25:23 +00:00
|
|
|
{:error, {:ok, %{status: code}}} when code in [401, 403] -> {:ok, true}
|
|
|
|
{:error, _} = e -> e
|
|
|
|
e -> {:error, e}
|
2019-07-13 16:17:57 +00:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-11-29 19:26:45 +00:00
|
|
|
defp collection_private(_data), do: {:ok, true}
|
|
|
|
|
2019-05-22 04:33:10 +00:00
|
|
|
def user_data_from_user_object(data) do
|
|
|
|
with {:ok, data} <- MRF.filter(data),
|
|
|
|
{:ok, data} <- object_to_user_data(data) do
|
|
|
|
{:ok, data}
|
|
|
|
else
|
|
|
|
e -> {:error, e}
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-02-21 21:21:40 +00:00
|
|
|
def fetch_and_prepare_user_from_ap_id(ap_id) do
|
2019-05-22 04:33:10 +00:00
|
|
|
with {:ok, data} <- Fetcher.fetch_and_contain_remote_object_from_id(ap_id),
|
2019-07-13 16:17:57 +00:00
|
|
|
{:ok, data} <- user_data_from_user_object(data),
|
|
|
|
data <- maybe_update_follow_information(data) do
|
2019-05-22 04:33:10 +00:00
|
|
|
{:ok, data}
|
2018-02-23 14:00:19 +00:00
|
|
|
else
|
2019-12-13 19:14:11 +00:00
|
|
|
{:error, "Object has been deleted"} = e ->
|
|
|
|
Logger.debug("Could not decode user at fetch #{ap_id}, #{inspect(e)}")
|
|
|
|
{:error, e}
|
|
|
|
|
2019-10-18 00:30:01 +00:00
|
|
|
e ->
|
|
|
|
Logger.error("Could not decode user at fetch #{ap_id}, #{inspect(e)}")
|
|
|
|
{:error, e}
|
2018-02-21 21:21:40 +00:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def make_user_from_ap_id(ap_id) do
|
2020-04-11 18:44:52 +00:00
|
|
|
user = User.get_cached_by_ap_id(ap_id)
|
|
|
|
|
|
|
|
if user && !User.ap_enabled?(user) do
|
2018-02-21 21:21:40 +00:00
|
|
|
Transmogrifier.upgrade_user_from_ap_id(ap_id)
|
|
|
|
else
|
|
|
|
with {:ok, data} <- fetch_and_prepare_user_from_ap_id(ap_id) do
|
2020-04-11 18:44:52 +00:00
|
|
|
if user do
|
|
|
|
user
|
|
|
|
|> User.remote_user_changeset(data)
|
|
|
|
|> User.update_and_set_cache()
|
|
|
|
else
|
|
|
|
data
|
|
|
|
|> User.remote_user_changeset()
|
|
|
|
|> Repo.insert()
|
|
|
|
|> User.set_cache()
|
|
|
|
end
|
2018-02-18 22:11:31 +00:00
|
|
|
else
|
2018-02-25 15:52:33 +00:00
|
|
|
e -> {:error, e}
|
2018-02-18 22:11:31 +00:00
|
|
|
end
|
2018-02-11 16:20:02 +00:00
|
|
|
end
|
|
|
|
end
|
2018-02-11 19:43:33 +00:00
|
|
|
|
2018-02-18 11:27:05 +00:00
|
|
|
def make_user_from_nickname(nickname) do
|
|
|
|
with {:ok, %{"ap_id" => ap_id}} when not is_nil(ap_id) <- WebFinger.finger(nickname) do
|
|
|
|
make_user_from_ap_id(ap_id)
|
2018-02-25 15:52:33 +00:00
|
|
|
else
|
2018-03-19 17:56:49 +00:00
|
|
|
_e -> {:error, "No AP id in WebFinger"}
|
2018-02-18 11:27:05 +00:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-10-26 06:16:51 +00:00
|
|
|
# filter out broken threads
|
|
|
|
def contain_broken_threads(%Activity{} = activity, %User{} = user) do
|
|
|
|
entire_thread_visible_for_user?(activity, user)
|
|
|
|
end
|
|
|
|
|
|
|
|
# do post-processing on a specific activity
|
|
|
|
def contain_activity(%Activity{} = activity, %User{} = user) do
|
2019-03-11 15:57:54 +00:00
|
|
|
contain_broken_threads(activity, user)
|
2018-10-26 06:16:51 +00:00
|
|
|
end
|
|
|
|
|
2019-05-08 16:19:20 +00:00
|
|
|
def fetch_direct_messages_query do
|
2019-05-08 15:37:00 +00:00
|
|
|
Activity
|
|
|
|
|> restrict_type(%{"type" => "Create"})
|
|
|
|
|> restrict_visibility(%{visibility: "direct"})
|
2019-05-09 14:39:28 +00:00
|
|
|
|> order_by([activity], asc: activity.id)
|
2019-05-08 15:37:00 +00:00
|
|
|
end
|
2017-03-21 08:21:52 +00:00
|
|
|
end
|