2018-12-23 20:04:54 +00:00
|
|
|
# Pleroma: A lightweight social networking server
|
2018-12-31 15:41:47 +00:00
|
|
|
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
2018-12-23 20:04:54 +00:00
|
|
|
# SPDX-License-Identifier: AGPL-3.0-only
|
|
|
|
|
2017-03-20 20:28:31 +00:00
|
|
|
defmodule Pleroma.User do
|
|
|
|
use Ecto.Schema
|
2017-05-05 09:46:59 +00:00
|
|
|
|
2019-02-09 15:16:26 +00:00
|
|
|
import Ecto.Changeset
|
|
|
|
import Ecto.Query
|
|
|
|
|
2019-03-05 02:52:23 +00:00
|
|
|
alias Comeonin.Pbkdf2
|
|
|
|
alias Pleroma.Activity
|
|
|
|
alias Pleroma.Formatter
|
|
|
|
alias Pleroma.Notification
|
|
|
|
alias Pleroma.Object
|
2019-02-09 15:16:26 +00:00
|
|
|
alias Pleroma.Repo
|
|
|
|
alias Pleroma.User
|
|
|
|
alias Pleroma.Web
|
2019-03-05 02:52:23 +00:00
|
|
|
alias Pleroma.Web.ActivityPub.ActivityPub
|
|
|
|
alias Pleroma.Web.ActivityPub.Utils
|
2018-12-02 19:03:53 +00:00
|
|
|
alias Pleroma.Web.CommonAPI.Utils, as: CommonUtils
|
2019-02-09 15:16:26 +00:00
|
|
|
alias Pleroma.Web.OAuth
|
2019-03-05 02:52:23 +00:00
|
|
|
alias Pleroma.Web.OStatus
|
2019-02-11 21:27:02 +00:00
|
|
|
alias Pleroma.Web.RelMe
|
2019-03-05 02:52:23 +00:00
|
|
|
alias Pleroma.Web.Websub
|
2017-03-20 20:28:31 +00:00
|
|
|
|
2018-12-29 09:02:37 +00:00
|
|
|
require Logger
|
|
|
|
|
2018-12-09 09:12:48 +00:00
|
|
|
@type t :: %__MODULE__{}
|
|
|
|
|
2019-01-09 15:08:24 +00:00
|
|
|
@primary_key {:id, Pleroma.FlakeId, autogenerate: true}
|
|
|
|
|
2019-03-05 04:37:33 +00:00
|
|
|
# credo:disable-for-next-line Credo.Check.Readability.MaxLineLength
|
2018-12-12 17:17:15 +00:00
|
|
|
@email_regex ~r/^[a-zA-Z0-9.!#$%&'*+\/=?^_`{|}~-]+@[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?(?:\.[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?)*$/
|
|
|
|
|
|
|
|
@strict_local_nickname_regex ~r/^[a-zA-Z\d]+$/
|
2018-12-12 20:44:08 +00:00
|
|
|
@extended_local_nickname_regex ~r/^[a-zA-Z\d_-]+$/
|
2018-12-12 17:17:15 +00:00
|
|
|
|
2017-03-20 20:28:31 +00:00
|
|
|
schema "users" do
|
2018-03-30 13:01:53 +00:00
|
|
|
field(:bio, :string)
|
|
|
|
field(:email, :string)
|
|
|
|
field(:name, :string)
|
|
|
|
field(:nickname, :string)
|
|
|
|
field(:password_hash, :string)
|
|
|
|
field(:password, :string, virtual: true)
|
|
|
|
field(:password_confirmation, :string, virtual: true)
|
|
|
|
field(:following, {:array, :string}, default: [])
|
|
|
|
field(:ap_id, :string)
|
|
|
|
field(:avatar, :map)
|
|
|
|
field(:local, :boolean, default: true)
|
|
|
|
field(:follower_address, :string)
|
2019-01-14 17:04:45 +00:00
|
|
|
field(:search_rank, :float, virtual: true)
|
2019-03-22 05:39:49 +00:00
|
|
|
field(:search_type, :integer, virtual: true)
|
2018-12-06 17:06:50 +00:00
|
|
|
field(:tags, {:array, :string}, default: [])
|
2018-09-19 00:04:56 +00:00
|
|
|
field(:bookmarks, {:array, :string}, default: [])
|
2019-03-20 12:59:27 +00:00
|
|
|
field(:last_refreshed_at, :naive_datetime_usec)
|
2018-03-30 13:01:53 +00:00
|
|
|
has_many(:notifications, Notification)
|
2018-11-18 17:27:04 +00:00
|
|
|
embeds_one(:info, Pleroma.User.Info)
|
2017-03-20 20:28:31 +00:00
|
|
|
|
|
|
|
timestamps()
|
|
|
|
end
|
2017-03-21 16:53:20 +00:00
|
|
|
|
2019-01-09 06:45:17 +00:00
|
|
|
def auth_active?(%User{local: false}), do: true
|
2019-01-09 06:36:50 +00:00
|
|
|
|
2019-01-09 06:21:21 +00:00
|
|
|
def auth_active?(%User{info: %User.Info{confirmation_pending: false}}), do: true
|
|
|
|
|
|
|
|
def auth_active?(%User{info: %User.Info{confirmation_pending: true}}),
|
|
|
|
do: !Pleroma.Config.get([:instance, :account_activation_required])
|
2018-12-19 15:56:52 +00:00
|
|
|
|
2019-01-09 06:21:21 +00:00
|
|
|
def auth_active?(_), do: false
|
2018-12-27 12:46:18 +00:00
|
|
|
|
2019-01-09 06:21:21 +00:00
|
|
|
def visible_for?(user, for_user \\ nil)
|
|
|
|
|
|
|
|
def visible_for?(%User{id: user_id}, %User{id: for_id}) when user_id == for_id, do: true
|
|
|
|
|
|
|
|
def visible_for?(%User{} = user, for_user) do
|
2019-01-09 06:36:50 +00:00
|
|
|
auth_active?(user) || superuser?(for_user)
|
2018-12-28 11:35:25 +00:00
|
|
|
end
|
|
|
|
|
2019-01-09 06:21:21 +00:00
|
|
|
def visible_for?(_, _), do: false
|
|
|
|
|
2019-01-09 06:41:25 +00:00
|
|
|
def superuser?(%User{local: true, info: %User.Info{is_admin: true}}), do: true
|
|
|
|
def superuser?(%User{local: true, info: %User.Info{is_moderator: true}}), do: true
|
2019-01-09 06:21:21 +00:00
|
|
|
def superuser?(_), do: false
|
2018-12-17 14:28:58 +00:00
|
|
|
|
2019-03-26 15:40:09 +00:00
|
|
|
def avatar_url(user, options \\ []) do
|
2017-04-17 12:12:36 +00:00
|
|
|
case user.avatar do
|
|
|
|
%{"url" => [%{"href" => href} | _]} -> href
|
2019-03-26 15:40:09 +00:00
|
|
|
_ -> !options[:no_default] && "#{Web.base_url()}/images/avi.png"
|
2017-04-17 12:12:36 +00:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-03-26 15:40:09 +00:00
|
|
|
def banner_url(user, options \\ []) do
|
2018-11-18 20:40:52 +00:00
|
|
|
case user.info.banner do
|
2017-09-16 11:44:08 +00:00
|
|
|
%{"url" => [%{"href" => href} | _]} -> href
|
2019-03-26 15:40:09 +00:00
|
|
|
_ -> !options[:no_default] && "#{Web.base_url()}/images/banner.png"
|
2017-09-16 11:44:08 +00:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-11-18 20:40:52 +00:00
|
|
|
def profile_url(%User{info: %{source_data: %{"url" => url}}}), do: url
|
2018-10-25 04:01:59 +00:00
|
|
|
def profile_url(%User{ap_id: ap_id}), do: ap_id
|
|
|
|
def profile_url(_), do: nil
|
|
|
|
|
2017-03-21 16:53:20 +00:00
|
|
|
def ap_id(%User{nickname: nickname}) do
|
2018-03-30 13:01:53 +00:00
|
|
|
"#{Web.base_url()}/users/#{nickname}"
|
2017-03-21 16:53:20 +00:00
|
|
|
end
|
|
|
|
|
2019-03-19 18:23:06 +00:00
|
|
|
def ap_followers(%User{follower_address: fa}) when is_binary(fa), do: fa
|
|
|
|
def ap_followers(%User{} = user), do: "#{ap_id(user)}/followers"
|
2017-03-22 17:36:08 +00:00
|
|
|
|
2017-04-20 22:51:09 +00:00
|
|
|
def user_info(%User{} = user) do
|
2017-11-19 22:31:39 +00:00
|
|
|
oneself = if user.local, do: 1, else: 0
|
2018-03-30 13:01:53 +00:00
|
|
|
|
2017-04-20 22:51:09 +00:00
|
|
|
%{
|
2017-11-19 22:31:39 +00:00
|
|
|
following_count: length(user.following) - oneself,
|
2018-11-18 17:06:02 +00:00
|
|
|
note_count: user.info.note_count,
|
|
|
|
follower_count: user.info.follower_count,
|
|
|
|
locked: user.info.locked,
|
2018-12-20 09:55:12 +00:00
|
|
|
confirmation_pending: user.info.confirmation_pending,
|
2018-11-18 17:06:02 +00:00
|
|
|
default_scope: user.info.default_scope
|
2017-04-20 22:51:09 +00:00
|
|
|
}
|
|
|
|
end
|
|
|
|
|
2017-05-09 16:11:51 +00:00
|
|
|
def remote_user_creation(params) do
|
2018-11-18 20:41:35 +00:00
|
|
|
params =
|
|
|
|
params
|
|
|
|
|> Map.put(:info, params[:info] || %{})
|
2018-11-18 20:40:52 +00:00
|
|
|
|
|
|
|
info_cng = User.Info.remote_user_creation(%User.Info{}, params[:info])
|
|
|
|
|
2018-03-30 13:01:53 +00:00
|
|
|
changes =
|
2018-11-20 18:07:01 +00:00
|
|
|
%User{}
|
2018-11-18 20:40:52 +00:00
|
|
|
|> cast(params, [:bio, :name, :ap_id, :nickname, :avatar])
|
2018-08-06 06:50:18 +00:00
|
|
|
|> validate_required([:name, :ap_id])
|
2018-03-30 13:01:53 +00:00
|
|
|
|> unique_constraint(:nickname)
|
|
|
|
|> validate_format(:nickname, @email_regex)
|
|
|
|
|> validate_length(:bio, max: 5000)
|
|
|
|
|> validate_length(:name, max: 100)
|
|
|
|
|> put_change(:local, false)
|
2018-11-18 20:40:52 +00:00
|
|
|
|> put_embed(:info, info_cng)
|
2018-03-30 13:01:53 +00:00
|
|
|
|
2017-07-19 16:49:25 +00:00
|
|
|
if changes.valid? do
|
2018-11-18 20:40:52 +00:00
|
|
|
case info_cng.changes[:source_data] do
|
2018-02-11 16:20:02 +00:00
|
|
|
%{"followers" => followers} ->
|
|
|
|
changes
|
|
|
|
|> put_change(:follower_address, followers)
|
2018-03-30 13:01:53 +00:00
|
|
|
|
2018-02-11 16:20:02 +00:00
|
|
|
_ ->
|
|
|
|
followers = User.ap_followers(%User{nickname: changes.changes[:nickname]})
|
2018-03-30 13:01:53 +00:00
|
|
|
|
2018-02-11 16:20:02 +00:00
|
|
|
changes
|
|
|
|
|> put_change(:follower_address, followers)
|
|
|
|
end
|
2017-07-19 16:49:25 +00:00
|
|
|
else
|
|
|
|
changes
|
|
|
|
end
|
2017-05-09 16:11:51 +00:00
|
|
|
end
|
|
|
|
|
2017-08-29 13:14:00 +00:00
|
|
|
def update_changeset(struct, params \\ %{}) do
|
2017-11-19 01:22:07 +00:00
|
|
|
struct
|
2018-12-01 09:40:01 +00:00
|
|
|
|> cast(params, [:bio, :name, :avatar])
|
2017-08-29 13:14:00 +00:00
|
|
|
|> unique_constraint(:nickname)
|
2018-12-12 17:17:15 +00:00
|
|
|
|> validate_format(:nickname, local_nickname_regex())
|
2018-04-25 13:41:59 +00:00
|
|
|
|> validate_length(:bio, max: 5000)
|
2017-08-29 13:14:00 +00:00
|
|
|
|> validate_length(:name, min: 1, max: 100)
|
|
|
|
end
|
|
|
|
|
2018-02-21 21:21:40 +00:00
|
|
|
def upgrade_changeset(struct, params \\ %{}) do
|
2018-09-19 06:13:18 +00:00
|
|
|
params =
|
|
|
|
params
|
|
|
|
|> Map.put(:last_refreshed_at, NaiveDateTime.utc_now())
|
|
|
|
|
2018-11-18 21:15:03 +00:00
|
|
|
info_cng =
|
|
|
|
struct.info
|
|
|
|
|> User.Info.user_upgrade(params[:info])
|
|
|
|
|
2018-02-21 21:21:40 +00:00
|
|
|
struct
|
2018-11-18 21:15:03 +00:00
|
|
|
|> cast(params, [:bio, :name, :follower_address, :avatar, :last_refreshed_at])
|
2018-02-21 21:21:40 +00:00
|
|
|
|> unique_constraint(:nickname)
|
2018-12-12 17:17:15 +00:00
|
|
|
|> validate_format(:nickname, local_nickname_regex())
|
2018-02-25 15:14:25 +00:00
|
|
|
|> validate_length(:bio, max: 5000)
|
|
|
|
|> validate_length(:name, max: 100)
|
2018-11-18 21:15:03 +00:00
|
|
|
|> put_embed(:info, info_cng)
|
2018-02-21 21:21:40 +00:00
|
|
|
end
|
|
|
|
|
2017-10-19 15:37:24 +00:00
|
|
|
def password_update_changeset(struct, params) do
|
2018-03-30 13:01:53 +00:00
|
|
|
changeset =
|
|
|
|
struct
|
|
|
|
|> cast(params, [:password, :password_confirmation])
|
|
|
|
|> validate_required([:password, :password_confirmation])
|
|
|
|
|> validate_confirmation(:password)
|
2017-10-19 15:37:24 +00:00
|
|
|
|
2018-10-13 23:45:11 +00:00
|
|
|
OAuth.Token.delete_user_tokens(struct)
|
|
|
|
OAuth.Authorization.delete_user_authorizations(struct)
|
|
|
|
|
2017-10-19 15:37:24 +00:00
|
|
|
if changeset.valid? do
|
|
|
|
hashed = Pbkdf2.hashpwsalt(changeset.changes[:password])
|
2018-03-30 13:01:53 +00:00
|
|
|
|
2017-10-19 15:37:24 +00:00
|
|
|
changeset
|
|
|
|
|> put_change(:password_hash, hashed)
|
|
|
|
else
|
|
|
|
changeset
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def reset_password(user, data) do
|
2017-12-08 16:50:11 +00:00
|
|
|
update_and_set_cache(password_update_changeset(user, data))
|
2017-10-19 15:37:24 +00:00
|
|
|
end
|
|
|
|
|
2018-12-18 10:13:57 +00:00
|
|
|
def register_changeset(struct, params \\ %{}, opts \\ []) do
|
|
|
|
confirmation_status =
|
|
|
|
if opts[:confirmed] || !Pleroma.Config.get([:instance, :account_activation_required]) do
|
|
|
|
:confirmed
|
|
|
|
else
|
|
|
|
:unconfirmed
|
|
|
|
end
|
|
|
|
|
2018-12-20 09:55:12 +00:00
|
|
|
info_change = User.Info.confirmation_changeset(%User.Info{}, confirmation_status)
|
|
|
|
|
2018-03-30 13:01:53 +00:00
|
|
|
changeset =
|
|
|
|
struct
|
|
|
|
|> cast(params, [:bio, :email, :name, :nickname, :password, :password_confirmation])
|
|
|
|
|> validate_required([:email, :name, :nickname, :password, :password_confirmation])
|
|
|
|
|> validate_confirmation(:password)
|
|
|
|
|> unique_constraint(:email)
|
|
|
|
|> unique_constraint(:nickname)
|
2018-12-25 19:09:27 +00:00
|
|
|
|> validate_exclusion(:nickname, Pleroma.Config.get([Pleroma.User, :restricted_nicknames]))
|
2018-12-12 17:17:15 +00:00
|
|
|
|> validate_format(:nickname, local_nickname_regex())
|
2018-03-30 13:01:53 +00:00
|
|
|
|> validate_format(:email, @email_regex)
|
|
|
|
|> validate_length(:bio, max: 1000)
|
|
|
|
|> validate_length(:name, min: 1, max: 100)
|
2018-12-20 09:55:12 +00:00
|
|
|
|> put_change(:info, info_change)
|
2017-04-15 14:40:09 +00:00
|
|
|
|
|
|
|
if changeset.valid? do
|
2017-04-27 13:18:50 +00:00
|
|
|
hashed = Pbkdf2.hashpwsalt(changeset.changes[:password])
|
2017-04-15 14:40:09 +00:00
|
|
|
ap_id = User.ap_id(%User{nickname: changeset.changes[:nickname]})
|
|
|
|
followers = User.ap_followers(%User{nickname: changeset.changes[:nickname]})
|
2018-03-30 13:01:53 +00:00
|
|
|
|
2017-04-15 14:40:09 +00:00
|
|
|
changeset
|
|
|
|
|> put_change(:password_hash, hashed)
|
|
|
|
|> put_change(:ap_id, ap_id)
|
2019-02-19 10:52:15 +00:00
|
|
|
|> unique_constraint(:ap_id)
|
2017-04-15 14:40:09 +00:00
|
|
|
|> put_change(:following, [followers])
|
2017-07-19 16:49:25 +00:00
|
|
|
|> put_change(:follower_address, followers)
|
2017-04-15 14:40:09 +00:00
|
|
|
else
|
|
|
|
changeset
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-01-08 08:55:33 +00:00
|
|
|
defp autofollow_users(user) do
|
|
|
|
candidates = Pleroma.Config.get([:instance, :autofollowed_nicknames])
|
|
|
|
|
|
|
|
autofollowed_users =
|
|
|
|
from(u in User,
|
|
|
|
where: u.local == true,
|
|
|
|
where: u.nickname in ^candidates
|
|
|
|
)
|
|
|
|
|> Repo.all()
|
|
|
|
|
2019-01-09 10:38:45 +00:00
|
|
|
follow_all(user, autofollowed_users)
|
2019-01-08 08:55:33 +00:00
|
|
|
end
|
|
|
|
|
2018-12-18 10:13:57 +00:00
|
|
|
@doc "Inserts provided changeset, performs post-registration actions (confirmation email sending etc.)"
|
|
|
|
def register(%Ecto.Changeset{} = changeset) do
|
2018-12-19 14:24:55 +00:00
|
|
|
with {:ok, user} <- Repo.insert(changeset),
|
2019-02-09 12:39:57 +00:00
|
|
|
{:ok, user} <- autofollow_users(user),
|
2019-02-16 15:42:34 +00:00
|
|
|
{:ok, _} <- Pleroma.User.WelcomeMessage.post_welcome_message_to_user(user),
|
2019-02-09 12:39:57 +00:00
|
|
|
{:ok, _} <- try_send_confirmation_email(user) do
|
2018-12-18 10:13:57 +00:00
|
|
|
{:ok, user}
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-12-18 14:13:52 +00:00
|
|
|
def try_send_confirmation_email(%User{} = user) do
|
2018-12-20 11:48:48 +00:00
|
|
|
if user.info.confirmation_pending &&
|
|
|
|
Pleroma.Config.get([:instance, :account_activation_required]) do
|
2018-12-18 14:13:52 +00:00
|
|
|
user
|
|
|
|
|> Pleroma.UserEmail.account_confirmation_email()
|
2019-02-20 16:51:25 +00:00
|
|
|
|> Pleroma.Mailer.deliver_async()
|
2018-12-18 14:13:52 +00:00
|
|
|
else
|
|
|
|
{:ok, :noop}
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-09-19 06:13:18 +00:00
|
|
|
def needs_update?(%User{local: true}), do: false
|
|
|
|
|
|
|
|
def needs_update?(%User{local: false, last_refreshed_at: nil}), do: true
|
|
|
|
|
|
|
|
def needs_update?(%User{local: false} = user) do
|
2019-03-05 04:03:13 +00:00
|
|
|
NaiveDateTime.diff(NaiveDateTime.utc_now(), user.last_refreshed_at) >= 86_400
|
2018-09-19 06:13:18 +00:00
|
|
|
end
|
|
|
|
|
|
|
|
def needs_update?(_), do: true
|
|
|
|
|
2018-11-18 17:53:50 +00:00
|
|
|
def maybe_direct_follow(%User{} = follower, %User{local: true, info: %{locked: true}}) do
|
2018-10-11 10:49:54 +00:00
|
|
|
{:ok, follower}
|
|
|
|
end
|
|
|
|
|
|
|
|
def maybe_direct_follow(%User{} = follower, %User{local: true} = followed) do
|
|
|
|
follow(follower, followed)
|
|
|
|
end
|
|
|
|
|
|
|
|
def maybe_direct_follow(%User{} = follower, %User{} = followed) do
|
2018-12-11 12:31:52 +00:00
|
|
|
if not User.ap_enabled?(followed) do
|
2018-05-25 09:31:42 +00:00
|
|
|
follow(follower, followed)
|
2018-05-28 16:42:18 +00:00
|
|
|
else
|
|
|
|
{:ok, follower}
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-12-09 09:12:48 +00:00
|
|
|
def maybe_follow(%User{} = follower, %User{info: _info} = followed) do
|
2018-05-28 16:42:18 +00:00
|
|
|
if not following?(follower, followed) do
|
|
|
|
follow(follower, followed)
|
2018-05-25 09:31:42 +00:00
|
|
|
else
|
2018-06-08 02:47:07 +00:00
|
|
|
{:ok, follower}
|
2018-05-25 09:31:42 +00:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-02-13 12:52:27 +00:00
|
|
|
@doc "A mass follow for local users. Respects blocks in both directions but does not create activities."
|
2019-01-09 10:35:23 +00:00
|
|
|
@spec follow_all(User.t(), list(User.t())) :: {atom(), User.t()}
|
|
|
|
def follow_all(follower, followeds) do
|
2019-02-09 12:24:23 +00:00
|
|
|
followed_addresses =
|
|
|
|
followeds
|
2019-02-13 12:52:27 +00:00
|
|
|
|> Enum.reject(fn followed -> blocks?(follower, followed) || blocks?(followed, follower) end)
|
2019-02-09 12:24:23 +00:00
|
|
|
|> Enum.map(fn %{follower_address: fa} -> fa end)
|
2019-01-09 10:35:23 +00:00
|
|
|
|
2019-01-30 18:33:25 +00:00
|
|
|
q =
|
|
|
|
from(u in User,
|
|
|
|
where: u.id == ^follower.id,
|
2019-01-31 17:07:46 +00:00
|
|
|
update: [
|
|
|
|
set: [
|
|
|
|
following:
|
|
|
|
fragment(
|
|
|
|
"array(select distinct unnest (array_cat(?, ?)))",
|
|
|
|
u.following,
|
|
|
|
^followed_addresses
|
|
|
|
)
|
|
|
|
]
|
2019-03-20 12:59:27 +00:00
|
|
|
],
|
|
|
|
select: u
|
2019-01-30 18:33:25 +00:00
|
|
|
)
|
|
|
|
|
2019-03-20 12:59:27 +00:00
|
|
|
{1, [follower]} = Repo.update_all(q, [])
|
2019-01-09 10:35:23 +00:00
|
|
|
|
|
|
|
Enum.each(followeds, fn followed ->
|
|
|
|
update_follower_count(followed)
|
|
|
|
end)
|
|
|
|
|
2019-01-30 18:33:25 +00:00
|
|
|
set_cache(follower)
|
2019-01-09 10:35:23 +00:00
|
|
|
end
|
|
|
|
|
2017-12-07 16:51:55 +00:00
|
|
|
def follow(%User{} = follower, %User{info: info} = followed) do
|
2018-06-23 21:32:00 +00:00
|
|
|
user_config = Application.get_env(:pleroma, :user)
|
|
|
|
deny_follow_blocked = Keyword.get(user_config, :deny_follow_blocked)
|
2018-06-23 21:16:08 +00:00
|
|
|
|
2017-07-19 16:49:25 +00:00
|
|
|
ap_followers = followed.follower_address
|
2018-02-17 15:08:55 +00:00
|
|
|
|
2018-05-25 03:16:02 +00:00
|
|
|
cond do
|
2018-11-18 17:46:04 +00:00
|
|
|
following?(follower, followed) or info.deactivated ->
|
2018-05-25 03:16:02 +00:00
|
|
|
{:error, "Could not follow user: #{followed.nickname} is already on your list."}
|
2017-05-06 12:09:39 +00:00
|
|
|
|
2018-06-23 21:27:07 +00:00
|
|
|
deny_follow_blocked and blocks?(followed, follower) ->
|
2018-05-25 03:16:02 +00:00
|
|
|
{:error, "Could not follow user: #{followed.nickname} blocked you."}
|
2017-03-22 17:36:08 +00:00
|
|
|
|
2018-05-25 03:16:02 +00:00
|
|
|
true ->
|
|
|
|
if !followed.local && follower.local && !ap_enabled?(followed) do
|
|
|
|
Websub.subscribe(follower, followed)
|
|
|
|
end
|
|
|
|
|
2019-01-30 18:21:04 +00:00
|
|
|
q =
|
|
|
|
from(u in User,
|
|
|
|
where: u.id == ^follower.id,
|
2019-03-20 12:59:27 +00:00
|
|
|
update: [push: [following: ^ap_followers]],
|
|
|
|
select: u
|
2019-01-30 18:21:04 +00:00
|
|
|
)
|
2017-07-22 15:42:15 +00:00
|
|
|
|
2019-03-20 12:59:27 +00:00
|
|
|
{1, [follower]} = Repo.update_all(q, [])
|
2017-07-22 15:42:15 +00:00
|
|
|
|
2018-05-25 03:16:02 +00:00
|
|
|
{:ok, _} = update_follower_count(followed)
|
|
|
|
|
2019-01-30 18:21:04 +00:00
|
|
|
set_cache(follower)
|
2017-04-12 14:34:36 +00:00
|
|
|
end
|
2017-03-22 17:36:08 +00:00
|
|
|
end
|
2017-03-23 12:13:09 +00:00
|
|
|
|
|
|
|
def unfollow(%User{} = follower, %User{} = followed) do
|
2017-07-19 16:49:25 +00:00
|
|
|
ap_followers = followed.follower_address
|
2018-03-30 13:01:53 +00:00
|
|
|
|
2017-11-19 23:21:53 +00:00
|
|
|
if following?(follower, followed) and follower.ap_id != followed.ap_id do
|
2019-01-30 18:21:04 +00:00
|
|
|
q =
|
|
|
|
from(u in User,
|
|
|
|
where: u.id == ^follower.id,
|
2019-03-20 12:59:27 +00:00
|
|
|
update: [pull: [following: ^ap_followers]],
|
|
|
|
select: u
|
2019-01-30 18:21:04 +00:00
|
|
|
)
|
2017-03-23 12:13:09 +00:00
|
|
|
|
2019-03-20 12:59:27 +00:00
|
|
|
{1, [follower]} = Repo.update_all(q, [])
|
2017-07-22 15:42:15 +00:00
|
|
|
|
|
|
|
{:ok, followed} = update_follower_count(followed)
|
|
|
|
|
2019-01-30 18:21:04 +00:00
|
|
|
set_cache(follower)
|
|
|
|
|
2017-07-22 15:42:15 +00:00
|
|
|
{:ok, follower, Utils.fetch_latest_follow(follower, followed)}
|
2017-04-12 14:34:36 +00:00
|
|
|
else
|
2017-04-27 13:18:50 +00:00
|
|
|
{:error, "Not subscribed!"}
|
2017-04-12 14:34:36 +00:00
|
|
|
end
|
2017-03-23 12:13:09 +00:00
|
|
|
end
|
2017-03-23 14:51:34 +00:00
|
|
|
|
2018-12-09 09:12:48 +00:00
|
|
|
@spec following?(User.t(), User.t()) :: boolean
|
2017-03-23 14:51:34 +00:00
|
|
|
def following?(%User{} = follower, %User{} = followed) do
|
2017-07-19 16:49:25 +00:00
|
|
|
Enum.member?(follower.following, followed.follower_address)
|
2017-03-23 14:51:34 +00:00
|
|
|
end
|
2017-04-14 15:13:51 +00:00
|
|
|
|
2018-12-29 09:02:37 +00:00
|
|
|
def follow_import(%User{} = follower, followed_identifiers)
|
|
|
|
when is_list(followed_identifiers) do
|
|
|
|
Enum.map(
|
|
|
|
followed_identifiers,
|
|
|
|
fn followed_identifier ->
|
|
|
|
with %User{} = followed <- get_or_fetch(followed_identifier),
|
|
|
|
{:ok, follower} <- maybe_direct_follow(follower, followed),
|
|
|
|
{:ok, _} <- ActivityPub.follow(follower, followed) do
|
|
|
|
followed
|
|
|
|
else
|
|
|
|
err ->
|
|
|
|
Logger.debug("follow_import failed for #{followed_identifier} with: #{inspect(err)}")
|
|
|
|
err
|
|
|
|
end
|
|
|
|
end
|
|
|
|
)
|
|
|
|
end
|
|
|
|
|
2018-05-26 14:55:16 +00:00
|
|
|
def locked?(%User{} = user) do
|
2018-11-20 18:41:44 +00:00
|
|
|
user.info.locked || false
|
2018-05-26 14:55:16 +00:00
|
|
|
end
|
|
|
|
|
2018-12-14 18:55:40 +00:00
|
|
|
def get_by_id(id) do
|
|
|
|
Repo.get_by(User, id: id)
|
|
|
|
end
|
|
|
|
|
2017-05-11 15:59:11 +00:00
|
|
|
def get_by_ap_id(ap_id) do
|
|
|
|
Repo.get_by(User, ap_id: ap_id)
|
|
|
|
end
|
|
|
|
|
2019-03-05 04:37:33 +00:00
|
|
|
# This is mostly an SPC migration fix. This guesses the user nickname by taking the last part
|
|
|
|
# of the ap_id and the domain and tries to get that user
|
2019-01-07 11:41:31 +00:00
|
|
|
def get_by_guessed_nickname(ap_id) do
|
|
|
|
domain = URI.parse(ap_id).host
|
|
|
|
name = List.last(String.split(ap_id, "/"))
|
|
|
|
nickname = "#{name}@#{domain}"
|
|
|
|
|
|
|
|
get_by_nickname(nickname)
|
|
|
|
end
|
|
|
|
|
2019-01-30 18:21:04 +00:00
|
|
|
def set_cache(user) do
|
|
|
|
Cachex.put(:user_cache, "ap_id:#{user.ap_id}", user)
|
|
|
|
Cachex.put(:user_cache, "nickname:#{user.nickname}", user)
|
|
|
|
Cachex.put(:user_cache, "user_info:#{user.id}", user_info(user))
|
|
|
|
{:ok, user}
|
|
|
|
end
|
|
|
|
|
2017-12-08 16:50:11 +00:00
|
|
|
def update_and_set_cache(changeset) do
|
|
|
|
with {:ok, user} <- Repo.update(changeset) do
|
2019-01-30 18:21:04 +00:00
|
|
|
set_cache(user)
|
2017-12-08 16:50:11 +00:00
|
|
|
else
|
|
|
|
e -> e
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-02-25 15:14:25 +00:00
|
|
|
def invalidate_cache(user) do
|
|
|
|
Cachex.del(:user_cache, "ap_id:#{user.ap_id}")
|
|
|
|
Cachex.del(:user_cache, "nickname:#{user.nickname}")
|
2018-11-01 07:52:58 +00:00
|
|
|
Cachex.del(:user_cache, "user_info:#{user.id}")
|
2018-02-25 15:14:25 +00:00
|
|
|
end
|
|
|
|
|
2017-04-14 15:13:51 +00:00
|
|
|
def get_cached_by_ap_id(ap_id) do
|
2017-04-17 09:36:17 +00:00
|
|
|
key = "ap_id:#{ap_id}"
|
2018-05-20 16:05:34 +00:00
|
|
|
Cachex.fetch!(:user_cache, key, fn _ -> get_by_ap_id(ap_id) end)
|
2017-04-14 15:13:51 +00:00
|
|
|
end
|
|
|
|
|
2018-12-14 18:55:40 +00:00
|
|
|
def get_cached_by_id(id) do
|
|
|
|
key = "id:#{id}"
|
2019-01-16 14:44:08 +00:00
|
|
|
|
|
|
|
ap_id =
|
|
|
|
Cachex.fetch!(:user_cache, key, fn _ ->
|
|
|
|
user = get_by_id(id)
|
2019-01-17 16:00:08 +00:00
|
|
|
|
|
|
|
if user do
|
|
|
|
Cachex.put(:user_cache, "ap_id:#{user.ap_id}", user)
|
|
|
|
{:commit, user.ap_id}
|
|
|
|
else
|
|
|
|
{:ignore, ""}
|
|
|
|
end
|
2019-01-16 14:44:08 +00:00
|
|
|
end)
|
|
|
|
|
|
|
|
get_cached_by_ap_id(ap_id)
|
2018-12-14 18:55:40 +00:00
|
|
|
end
|
|
|
|
|
2017-04-14 15:13:51 +00:00
|
|
|
def get_cached_by_nickname(nickname) do
|
2017-04-17 09:36:17 +00:00
|
|
|
key = "nickname:#{nickname}"
|
2018-05-20 16:05:34 +00:00
|
|
|
Cachex.fetch!(:user_cache, key, fn _ -> get_or_fetch_by_nickname(nickname) end)
|
2017-04-14 15:13:51 +00:00
|
|
|
end
|
2017-04-30 08:04:54 +00:00
|
|
|
|
2018-12-14 18:55:40 +00:00
|
|
|
def get_cached_by_nickname_or_id(nickname_or_id) do
|
2019-01-16 15:04:41 +00:00
|
|
|
get_cached_by_id(nickname_or_id) || get_cached_by_nickname(nickname_or_id)
|
2018-12-14 18:55:40 +00:00
|
|
|
end
|
|
|
|
|
2017-04-30 13:06:22 +00:00
|
|
|
def get_by_nickname(nickname) do
|
2018-12-29 09:15:46 +00:00
|
|
|
Repo.get_by(User, nickname: nickname) ||
|
2018-12-29 09:26:23 +00:00
|
|
|
if Regex.match?(~r(@#{Pleroma.Web.Endpoint.host()})i, nickname) do
|
2019-01-18 06:30:16 +00:00
|
|
|
Repo.get_by(User, nickname: local_nickname(nickname))
|
2018-12-29 09:15:46 +00:00
|
|
|
end
|
2017-04-30 13:05:16 +00:00
|
|
|
end
|
|
|
|
|
2018-04-18 10:13:57 +00:00
|
|
|
def get_by_nickname_or_email(nickname_or_email) do
|
|
|
|
case user = Repo.get_by(User, nickname: nickname_or_email) do
|
|
|
|
%User{} -> user
|
|
|
|
nil -> Repo.get_by(User, email: nickname_or_email)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2017-04-30 08:04:54 +00:00
|
|
|
def get_cached_user_info(user) do
|
|
|
|
key = "user_info:#{user.id}"
|
2018-05-20 16:05:34 +00:00
|
|
|
Cachex.fetch!(:user_cache, key, fn _ -> user_info(user) end)
|
2017-04-30 08:04:54 +00:00
|
|
|
end
|
2017-04-30 16:48:48 +00:00
|
|
|
|
2018-02-18 11:27:05 +00:00
|
|
|
def fetch_by_nickname(nickname) do
|
|
|
|
ap_try = ActivityPub.make_user_from_nickname(nickname)
|
|
|
|
|
|
|
|
case ap_try do
|
|
|
|
{:ok, user} -> {:ok, user}
|
|
|
|
_ -> OStatus.make_user(nickname)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2017-04-30 16:48:48 +00:00
|
|
|
def get_or_fetch_by_nickname(nickname) do
|
2018-03-30 13:01:53 +00:00
|
|
|
with %User{} = user <- get_by_nickname(nickname) do
|
2017-04-30 16:48:48 +00:00
|
|
|
user
|
2018-03-30 13:01:53 +00:00
|
|
|
else
|
|
|
|
_e ->
|
|
|
|
with [_nick, _domain] <- String.split(nickname, "@"),
|
|
|
|
{:ok, user} <- fetch_by_nickname(nickname) do
|
2019-03-06 21:13:26 +00:00
|
|
|
if Pleroma.Config.get([:fetch_initial_posts, :enabled]) do
|
|
|
|
{:ok, _} = Task.start(__MODULE__, :fetch_initial_posts, [user])
|
|
|
|
end
|
|
|
|
|
2018-03-30 13:01:53 +00:00
|
|
|
user
|
|
|
|
else
|
|
|
|
_e -> nil
|
|
|
|
end
|
2017-04-30 16:48:48 +00:00
|
|
|
end
|
2017-04-14 15:13:51 +00:00
|
|
|
end
|
2017-07-20 17:37:41 +00:00
|
|
|
|
2019-03-06 21:13:26 +00:00
|
|
|
@doc "Fetch some posts when the user has just been federated with"
|
|
|
|
def fetch_initial_posts(user) do
|
|
|
|
pages = Pleroma.Config.get!([:fetch_initial_posts, :pages])
|
|
|
|
|
|
|
|
Enum.each(
|
|
|
|
# Insert all the posts in reverse order, so they're in the right order on the timeline
|
|
|
|
Enum.reverse(Utils.fetch_ordered_collection(user.info.source_data["outbox"], pages)),
|
|
|
|
&Pleroma.Web.Federator.incoming_ap_doc/1
|
|
|
|
)
|
|
|
|
end
|
|
|
|
|
2019-01-09 17:14:32 +00:00
|
|
|
def get_followers_query(%User{id: id, follower_address: follower_address}, nil) do
|
2018-03-31 18:02:09 +00:00
|
|
|
from(
|
|
|
|
u in User,
|
|
|
|
where: fragment("? <@ ?", ^[follower_address], u.following),
|
|
|
|
where: u.id != ^id
|
|
|
|
)
|
|
|
|
end
|
|
|
|
|
2019-01-09 17:14:32 +00:00
|
|
|
def get_followers_query(user, page) do
|
2019-03-02 14:21:18 +00:00
|
|
|
from(u in get_followers_query(user, nil))
|
|
|
|
|> paginate(page, 20)
|
2019-01-09 17:14:32 +00:00
|
|
|
end
|
|
|
|
|
|
|
|
def get_followers_query(user), do: get_followers_query(user, nil)
|
|
|
|
|
|
|
|
def get_followers(user, page \\ nil) do
|
|
|
|
q = get_followers_query(user, page)
|
2017-07-20 17:37:41 +00:00
|
|
|
|
|
|
|
{:ok, Repo.all(q)}
|
|
|
|
end
|
|
|
|
|
2019-01-14 17:04:45 +00:00
|
|
|
def get_followers_ids(user, page \\ nil) do
|
|
|
|
q = get_followers_query(user, page)
|
|
|
|
|
|
|
|
Repo.all(from(u in q, select: u.id))
|
|
|
|
end
|
|
|
|
|
2019-01-09 17:14:32 +00:00
|
|
|
def get_friends_query(%User{id: id, following: following}, nil) do
|
2018-03-31 18:02:09 +00:00
|
|
|
from(
|
|
|
|
u in User,
|
|
|
|
where: u.follower_address in ^following,
|
|
|
|
where: u.id != ^id
|
|
|
|
)
|
|
|
|
end
|
|
|
|
|
2019-01-09 17:14:32 +00:00
|
|
|
def get_friends_query(user, page) do
|
2019-03-02 14:21:18 +00:00
|
|
|
from(u in get_friends_query(user, nil))
|
|
|
|
|> paginate(page, 20)
|
2019-01-09 17:14:32 +00:00
|
|
|
end
|
|
|
|
|
|
|
|
def get_friends_query(user), do: get_friends_query(user, nil)
|
|
|
|
|
|
|
|
def get_friends(user, page \\ nil) do
|
|
|
|
q = get_friends_query(user, page)
|
2017-07-20 17:37:41 +00:00
|
|
|
|
|
|
|
{:ok, Repo.all(q)}
|
|
|
|
end
|
2017-07-22 15:42:15 +00:00
|
|
|
|
2019-01-14 17:04:45 +00:00
|
|
|
def get_friends_ids(user, page \\ nil) do
|
|
|
|
q = get_friends_query(user, page)
|
|
|
|
|
|
|
|
Repo.all(from(u in q, select: u.id))
|
|
|
|
end
|
|
|
|
|
2018-05-26 16:03:32 +00:00
|
|
|
def get_follow_requests_query(%User{} = user) do
|
|
|
|
from(
|
|
|
|
a in Activity,
|
2018-05-28 18:31:48 +00:00
|
|
|
where:
|
|
|
|
fragment(
|
|
|
|
"? ->> 'type' = 'Follow'",
|
|
|
|
a.data
|
|
|
|
),
|
|
|
|
where:
|
|
|
|
fragment(
|
|
|
|
"? ->> 'state' = 'pending'",
|
|
|
|
a.data
|
|
|
|
),
|
|
|
|
where:
|
|
|
|
fragment(
|
2019-02-27 14:01:54 +00:00
|
|
|
"coalesce((?)->'object'->>'id', (?)->>'object') = ?",
|
2018-05-28 18:31:48 +00:00
|
|
|
a.data,
|
2019-02-27 14:01:54 +00:00
|
|
|
a.data,
|
|
|
|
^user.ap_id
|
2018-05-28 18:31:48 +00:00
|
|
|
)
|
2018-05-26 16:03:32 +00:00
|
|
|
)
|
|
|
|
end
|
|
|
|
|
|
|
|
def get_follow_requests(%User{} = user) do
|
|
|
|
users =
|
2019-02-09 23:26:29 +00:00
|
|
|
user
|
|
|
|
|> User.get_follow_requests_query()
|
2019-03-20 12:59:27 +00:00
|
|
|
|> join(:inner, [a], u in User, on: a.actor == u.ap_id)
|
2019-03-03 15:51:32 +00:00
|
|
|
|> where([a, u], not fragment("? @> ?", u.following, ^[user.follower_address]))
|
|
|
|
|> group_by([a, u], u.id)
|
|
|
|
|> select([a, u], u)
|
|
|
|
|> Repo.all()
|
2018-05-26 16:03:32 +00:00
|
|
|
|
|
|
|
{:ok, users}
|
|
|
|
end
|
|
|
|
|
2017-10-31 15:37:11 +00:00
|
|
|
def increase_note_count(%User{} = user) do
|
2019-03-03 14:27:09 +00:00
|
|
|
User
|
|
|
|
|> where(id: ^user.id)
|
|
|
|
|> update([u],
|
|
|
|
set: [
|
|
|
|
info:
|
|
|
|
fragment(
|
|
|
|
"jsonb_set(?, '{note_count}', ((?->>'note_count')::int + 1)::varchar::jsonb, true)",
|
|
|
|
u.info,
|
|
|
|
u.info
|
|
|
|
)
|
|
|
|
]
|
|
|
|
)
|
2019-03-20 12:59:27 +00:00
|
|
|
|> select([u], u)
|
|
|
|
|> Repo.update_all([])
|
2019-03-03 14:27:09 +00:00
|
|
|
|> case do
|
|
|
|
{1, [user]} -> set_cache(user)
|
|
|
|
_ -> {:error, user}
|
|
|
|
end
|
2017-10-31 15:37:11 +00:00
|
|
|
end
|
|
|
|
|
2018-04-24 09:34:18 +00:00
|
|
|
def decrease_note_count(%User{} = user) do
|
2019-03-03 14:27:09 +00:00
|
|
|
User
|
|
|
|
|> where(id: ^user.id)
|
|
|
|
|> update([u],
|
|
|
|
set: [
|
|
|
|
info:
|
|
|
|
fragment(
|
|
|
|
"jsonb_set(?, '{note_count}', (greatest(0, (?->>'note_count')::int - 1))::varchar::jsonb, true)",
|
|
|
|
u.info,
|
|
|
|
u.info
|
|
|
|
)
|
|
|
|
]
|
|
|
|
)
|
2019-03-20 12:59:27 +00:00
|
|
|
|> select([u], u)
|
|
|
|
|> Repo.update_all([])
|
2019-03-03 14:27:09 +00:00
|
|
|
|> case do
|
|
|
|
{1, [user]} -> set_cache(user)
|
|
|
|
_ -> {:error, user}
|
|
|
|
end
|
2018-04-24 09:34:18 +00:00
|
|
|
end
|
|
|
|
|
2017-07-22 15:42:15 +00:00
|
|
|
def update_note_count(%User{} = user) do
|
2018-03-30 13:01:53 +00:00
|
|
|
note_count_query =
|
|
|
|
from(
|
|
|
|
a in Object,
|
|
|
|
where: fragment("?->>'actor' = ? and ?->>'type' = 'Note'", a.data, ^user.ap_id, a.data),
|
|
|
|
select: count(a.id)
|
|
|
|
)
|
2017-07-22 15:42:15 +00:00
|
|
|
|
|
|
|
note_count = Repo.one(note_count_query)
|
|
|
|
|
2018-11-18 17:52:21 +00:00
|
|
|
info_cng = User.Info.set_note_count(user.info, note_count)
|
2017-07-22 15:42:15 +00:00
|
|
|
|
2018-11-18 17:52:21 +00:00
|
|
|
cng =
|
|
|
|
change(user)
|
|
|
|
|> put_embed(:info, info_cng)
|
2017-07-22 15:42:15 +00:00
|
|
|
|
2018-11-18 17:52:21 +00:00
|
|
|
update_and_set_cache(cng)
|
2017-07-22 15:42:15 +00:00
|
|
|
end
|
|
|
|
|
|
|
|
def update_follower_count(%User{} = user) do
|
2018-03-30 13:01:53 +00:00
|
|
|
follower_count_query =
|
2019-03-03 14:27:09 +00:00
|
|
|
User
|
|
|
|
|> where([u], ^user.follower_address in u.following)
|
|
|
|
|> where([u], u.id != ^user.id)
|
|
|
|
|> select([u], %{count: count(u.id)})
|
2017-07-22 15:42:15 +00:00
|
|
|
|
2019-03-03 14:27:09 +00:00
|
|
|
User
|
|
|
|
|> where(id: ^user.id)
|
|
|
|
|> join(:inner, [u], s in subquery(follower_count_query))
|
|
|
|
|> update([u, s],
|
|
|
|
set: [
|
|
|
|
info:
|
|
|
|
fragment(
|
|
|
|
"jsonb_set(?, '{follower_count}', ?::varchar::jsonb, true)",
|
|
|
|
u.info,
|
|
|
|
s.count
|
|
|
|
)
|
|
|
|
]
|
|
|
|
)
|
2019-03-20 12:59:27 +00:00
|
|
|
|> select([u], u)
|
|
|
|
|> Repo.update_all([])
|
2019-03-03 14:27:09 +00:00
|
|
|
|> case do
|
|
|
|
{1, [user]} -> set_cache(user)
|
|
|
|
_ -> {:error, user}
|
|
|
|
end
|
2017-07-22 15:42:15 +00:00
|
|
|
end
|
2017-09-11 14:15:28 +00:00
|
|
|
|
2018-11-09 08:23:45 +00:00
|
|
|
def get_users_from_set_query(ap_ids, false) do
|
2018-06-18 04:33:41 +00:00
|
|
|
from(
|
|
|
|
u in User,
|
2018-11-09 08:23:45 +00:00
|
|
|
where: u.ap_id in ^ap_ids
|
2018-11-08 19:30:55 +00:00
|
|
|
)
|
|
|
|
end
|
|
|
|
|
2018-11-09 08:23:45 +00:00
|
|
|
def get_users_from_set_query(ap_ids, true) do
|
|
|
|
query = get_users_from_set_query(ap_ids, false)
|
2018-11-08 19:30:55 +00:00
|
|
|
|
|
|
|
from(
|
|
|
|
u in query,
|
2018-06-18 04:33:41 +00:00
|
|
|
where: u.local == true
|
|
|
|
)
|
|
|
|
end
|
|
|
|
|
2018-11-09 08:23:45 +00:00
|
|
|
def get_users_from_set(ap_ids, local_only \\ true) do
|
|
|
|
get_users_from_set_query(ap_ids, local_only)
|
|
|
|
|> Repo.all()
|
|
|
|
end
|
|
|
|
|
2018-02-19 09:05:26 +00:00
|
|
|
def get_recipients_from_activity(%Activity{recipients: to}) do
|
2018-03-30 13:01:53 +00:00
|
|
|
query =
|
|
|
|
from(
|
|
|
|
u in User,
|
|
|
|
where: u.ap_id in ^to,
|
|
|
|
or_where: fragment("? && ?", u.following, ^to)
|
|
|
|
)
|
2017-11-16 15:49:51 +00:00
|
|
|
|
2018-03-30 13:01:53 +00:00
|
|
|
query = from(u in query, where: u.local == true)
|
2017-11-20 05:58:43 +00:00
|
|
|
|
2017-11-16 15:49:51 +00:00
|
|
|
Repo.all(query)
|
|
|
|
end
|
|
|
|
|
2019-03-04 18:26:32 +00:00
|
|
|
@spec search_for_admin(%{
|
|
|
|
local: boolean(),
|
|
|
|
page: number(),
|
|
|
|
page_size: number()
|
|
|
|
}) :: {:ok, [Pleroma.User.t()], number()}
|
|
|
|
def search_for_admin(%{query: nil, local: local, page: page, page_size: page_size}) do
|
|
|
|
query =
|
2019-03-17 10:23:08 +00:00
|
|
|
from(u in User, order_by: u.nickname)
|
2019-03-04 18:26:32 +00:00
|
|
|
|> maybe_local_user_query(local)
|
|
|
|
|
|
|
|
paginated_query =
|
|
|
|
query
|
|
|
|
|> paginate(page, page_size)
|
|
|
|
|
|
|
|
count =
|
|
|
|
query
|
|
|
|
|> Repo.aggregate(:count, :id)
|
|
|
|
|
|
|
|
{:ok, Repo.all(paginated_query), count}
|
|
|
|
end
|
|
|
|
|
|
|
|
@spec search_for_admin(%{
|
|
|
|
query: binary(),
|
2019-03-02 14:21:18 +00:00
|
|
|
local: boolean(),
|
|
|
|
page: number(),
|
|
|
|
page_size: number()
|
|
|
|
}) :: {:ok, [Pleroma.User.t()], number()}
|
2019-03-04 18:26:32 +00:00
|
|
|
def search_for_admin(%{
|
|
|
|
query: term,
|
|
|
|
local: local,
|
|
|
|
page: page,
|
|
|
|
page_size: page_size
|
|
|
|
}) do
|
2019-03-19 22:37:40 +00:00
|
|
|
maybe_local_query = User |> maybe_local_user_query(local)
|
2018-04-07 21:44:55 +00:00
|
|
|
|
2019-03-19 22:37:40 +00:00
|
|
|
search_query = from(u in maybe_local_query, where: ilike(u.nickname, ^"%#{term}%"))
|
|
|
|
count = search_query |> Repo.aggregate(:count, :id)
|
2019-03-19 22:40:50 +00:00
|
|
|
|
2019-03-19 22:37:40 +00:00
|
|
|
results =
|
|
|
|
search_query
|
2019-03-02 14:21:18 +00:00
|
|
|
|> paginate(page, page_size)
|
2019-03-19 22:37:40 +00:00
|
|
|
|> Repo.all()
|
2018-03-30 13:01:53 +00:00
|
|
|
|
2019-03-19 22:37:40 +00:00
|
|
|
{:ok, results, count}
|
2019-01-18 07:35:45 +00:00
|
|
|
end
|
2018-03-30 13:01:53 +00:00
|
|
|
|
2019-01-14 17:04:45 +00:00
|
|
|
def search(query, resolve \\ false, for_user \\ nil) do
|
2019-01-18 07:35:45 +00:00
|
|
|
# Strip the beginning @ off if there is a query
|
2018-04-07 21:44:55 +00:00
|
|
|
query = String.trim_leading(query, "@")
|
|
|
|
|
2019-02-14 19:58:24 +00:00
|
|
|
if resolve, do: get_or_fetch(query)
|
2018-03-30 13:01:53 +00:00
|
|
|
|
2019-03-21 20:16:32 +00:00
|
|
|
{:ok, results} =
|
2019-01-20 09:57:49 +00:00
|
|
|
Repo.transaction(fn ->
|
|
|
|
Ecto.Adapters.SQL.query(Repo, "select set_limit(0.25)", [])
|
2019-03-21 20:16:32 +00:00
|
|
|
Repo.all(search_query(query, for_user))
|
2019-01-20 09:57:49 +00:00
|
|
|
end)
|
2018-03-30 13:01:53 +00:00
|
|
|
|
2019-03-21 20:16:32 +00:00
|
|
|
results
|
2019-01-18 07:35:45 +00:00
|
|
|
end
|
2018-03-30 13:01:53 +00:00
|
|
|
|
2019-03-21 20:16:32 +00:00
|
|
|
def search_query(query, for_user) do
|
|
|
|
fts_subquery = fts_search_subquery(query)
|
|
|
|
trigram_subquery = trigram_search_subquery(query)
|
2019-03-22 05:39:49 +00:00
|
|
|
union_query = from(s in trigram_subquery, union_all: ^fts_subquery)
|
|
|
|
distinct_query = from(s in subquery(union_query), order_by: s.search_type, distinct: s.id)
|
2018-05-16 15:55:20 +00:00
|
|
|
|
2019-03-21 20:16:32 +00:00
|
|
|
from(s in subquery(boost_search_rank_query(distinct_query, for_user)),
|
|
|
|
order_by: [desc: s.search_rank],
|
|
|
|
limit: 20
|
|
|
|
)
|
|
|
|
end
|
2019-01-14 17:04:45 +00:00
|
|
|
|
2019-03-21 20:16:32 +00:00
|
|
|
defp boost_search_rank_query(query, nil), do: query
|
|
|
|
|
|
|
|
defp boost_search_rank_query(query, for_user) do
|
|
|
|
friends_ids = get_friends_ids(for_user)
|
|
|
|
followers_ids = get_followers_ids(for_user)
|
|
|
|
|
|
|
|
from(u in subquery(query),
|
|
|
|
select_merge: %{
|
|
|
|
search_rank:
|
|
|
|
fragment(
|
|
|
|
"""
|
|
|
|
CASE WHEN (?) THEN (?) * 1.3
|
|
|
|
WHEN (?) THEN (?) * 1.2
|
|
|
|
WHEN (?) THEN (?) * 1.1
|
|
|
|
ELSE (?) END
|
|
|
|
""",
|
|
|
|
u.id in ^friends_ids and u.id in ^followers_ids,
|
|
|
|
u.search_rank,
|
|
|
|
u.id in ^friends_ids,
|
|
|
|
u.search_rank,
|
|
|
|
u.id in ^followers_ids,
|
|
|
|
u.search_rank,
|
|
|
|
u.search_rank
|
|
|
|
)
|
|
|
|
}
|
|
|
|
)
|
2019-01-18 07:35:45 +00:00
|
|
|
end
|
2019-01-14 17:04:45 +00:00
|
|
|
|
2019-03-02 14:21:18 +00:00
|
|
|
defp fts_search_subquery(term, query \\ User) do
|
2019-01-18 07:35:45 +00:00
|
|
|
processed_query =
|
2019-03-01 17:13:02 +00:00
|
|
|
term
|
2019-01-18 07:35:45 +00:00
|
|
|
|> String.replace(~r/\W+/, " ")
|
|
|
|
|> String.trim()
|
|
|
|
|> String.split()
|
|
|
|
|> Enum.map(&(&1 <> ":*"))
|
|
|
|
|> Enum.join(" | ")
|
2019-01-14 17:04:45 +00:00
|
|
|
|
2019-01-18 07:35:45 +00:00
|
|
|
from(
|
2019-03-01 17:13:02 +00:00
|
|
|
u in query,
|
2019-01-18 07:35:45 +00:00
|
|
|
select_merge: %{
|
2019-03-22 05:39:49 +00:00
|
|
|
search_type: ^0,
|
2019-01-18 07:35:45 +00:00
|
|
|
search_rank:
|
|
|
|
fragment(
|
|
|
|
"""
|
|
|
|
ts_rank_cd(
|
|
|
|
setweight(to_tsvector('simple', regexp_replace(?, '\\W', ' ', 'g')), 'A') ||
|
|
|
|
setweight(to_tsvector('simple', regexp_replace(coalesce(?, ''), '\\W', ' ', 'g')), 'B'),
|
|
|
|
to_tsquery('simple', ?),
|
|
|
|
32
|
|
|
|
)
|
|
|
|
""",
|
|
|
|
u.nickname,
|
|
|
|
u.name,
|
|
|
|
^processed_query
|
|
|
|
)
|
|
|
|
},
|
2019-01-19 23:31:17 +00:00
|
|
|
where:
|
|
|
|
fragment(
|
|
|
|
"""
|
|
|
|
(setweight(to_tsvector('simple', regexp_replace(?, '\\W', ' ', 'g')), 'A') ||
|
|
|
|
setweight(to_tsvector('simple', regexp_replace(coalesce(?, ''), '\\W', ' ', 'g')), 'B')) @@ to_tsquery('simple', ?)
|
|
|
|
""",
|
|
|
|
u.nickname,
|
|
|
|
u.name,
|
|
|
|
^processed_query
|
|
|
|
)
|
2019-01-18 07:35:45 +00:00
|
|
|
)
|
|
|
|
end
|
2019-01-14 17:04:45 +00:00
|
|
|
|
2019-03-02 14:21:18 +00:00
|
|
|
defp trigram_search_subquery(term) do
|
2019-01-18 07:35:45 +00:00
|
|
|
from(
|
|
|
|
u in User,
|
|
|
|
select_merge: %{
|
2019-03-22 05:39:49 +00:00
|
|
|
# ^1 gives 'Postgrex expected a binary, got 1' for some weird reason
|
|
|
|
search_type: fragment("?", 1),
|
2019-01-18 07:35:45 +00:00
|
|
|
search_rank:
|
|
|
|
fragment(
|
2019-01-18 07:57:42 +00:00
|
|
|
"similarity(?, trim(? || ' ' || coalesce(?, '')))",
|
2019-03-01 17:13:02 +00:00
|
|
|
^term,
|
2019-01-18 07:35:45 +00:00
|
|
|
u.nickname,
|
|
|
|
u.name
|
|
|
|
)
|
|
|
|
},
|
2019-03-01 17:13:02 +00:00
|
|
|
where: fragment("trim(? || ' ' || coalesce(?, '')) % ?", u.nickname, u.name, ^term)
|
2019-01-18 07:35:45 +00:00
|
|
|
)
|
|
|
|
end
|
|
|
|
|
2018-12-29 09:02:37 +00:00
|
|
|
def blocks_import(%User{} = blocker, blocked_identifiers) when is_list(blocked_identifiers) do
|
|
|
|
Enum.map(
|
|
|
|
blocked_identifiers,
|
|
|
|
fn blocked_identifier ->
|
|
|
|
with %User{} = blocked <- get_or_fetch(blocked_identifier),
|
|
|
|
{:ok, blocker} <- block(blocker, blocked),
|
|
|
|
{:ok, _} <- ActivityPub.block(blocker, blocked) do
|
|
|
|
blocked
|
|
|
|
else
|
|
|
|
err ->
|
|
|
|
Logger.debug("blocks_import failed for #{blocked_identifier} with: #{inspect(err)}")
|
|
|
|
err
|
|
|
|
end
|
|
|
|
end
|
|
|
|
)
|
|
|
|
end
|
|
|
|
|
2018-09-03 21:20:22 +00:00
|
|
|
def mute(muter, %User{ap_id: ap_id}) do
|
2019-02-19 20:09:16 +00:00
|
|
|
info_cng =
|
|
|
|
muter.info
|
|
|
|
|> User.Info.add_to_mutes(ap_id)
|
|
|
|
|
|
|
|
cng =
|
|
|
|
change(muter)
|
|
|
|
|> put_embed(:info, info_cng)
|
2018-09-05 20:49:15 +00:00
|
|
|
|
2019-02-19 20:09:16 +00:00
|
|
|
update_and_set_cache(cng)
|
2018-09-05 20:49:15 +00:00
|
|
|
end
|
|
|
|
|
2019-02-19 20:09:16 +00:00
|
|
|
def unmute(muter, %{ap_id: ap_id}) do
|
|
|
|
info_cng =
|
|
|
|
muter.info
|
|
|
|
|> User.Info.remove_from_mutes(ap_id)
|
2018-09-05 20:49:15 +00:00
|
|
|
|
2019-02-19 20:09:16 +00:00
|
|
|
cng =
|
|
|
|
change(muter)
|
|
|
|
|> put_embed(:info, info_cng)
|
|
|
|
|
|
|
|
update_and_set_cache(cng)
|
2018-09-05 20:49:15 +00:00
|
|
|
end
|
|
|
|
|
2018-06-19 00:36:40 +00:00
|
|
|
def block(blocker, %User{ap_id: ap_id} = blocked) do
|
|
|
|
# sever any follow relationships to prevent leaks per activitypub (Pleroma issue #213)
|
|
|
|
blocker =
|
|
|
|
if following?(blocker, blocked) do
|
|
|
|
{:ok, blocker, _} = unfollow(blocker, blocked)
|
|
|
|
blocker
|
|
|
|
else
|
|
|
|
blocker
|
|
|
|
end
|
|
|
|
|
|
|
|
if following?(blocked, blocker) do
|
|
|
|
unfollow(blocked, blocker)
|
|
|
|
end
|
|
|
|
|
2018-11-18 17:40:31 +00:00
|
|
|
info_cng =
|
|
|
|
blocker.info
|
|
|
|
|> User.Info.add_to_block(ap_id)
|
2017-11-02 20:57:37 +00:00
|
|
|
|
2018-11-18 17:40:31 +00:00
|
|
|
cng =
|
|
|
|
change(blocker)
|
|
|
|
|> put_embed(:info, info_cng)
|
|
|
|
|
|
|
|
update_and_set_cache(cng)
|
2017-11-02 20:57:37 +00:00
|
|
|
end
|
|
|
|
|
2018-06-19 08:31:06 +00:00
|
|
|
# helper to handle the block given only an actor's AP id
|
|
|
|
def block(blocker, %{ap_id: ap_id}) do
|
|
|
|
block(blocker, User.get_by_ap_id(ap_id))
|
|
|
|
end
|
|
|
|
|
2018-11-18 17:40:31 +00:00
|
|
|
def unblock(blocker, %{ap_id: ap_id}) do
|
|
|
|
info_cng =
|
|
|
|
blocker.info
|
|
|
|
|> User.Info.remove_from_block(ap_id)
|
2017-11-02 20:57:37 +00:00
|
|
|
|
2018-11-18 17:40:31 +00:00
|
|
|
cng =
|
|
|
|
change(blocker)
|
|
|
|
|> put_embed(:info, info_cng)
|
|
|
|
|
|
|
|
update_and_set_cache(cng)
|
2017-11-02 20:57:37 +00:00
|
|
|
end
|
|
|
|
|
2019-02-27 15:46:47 +00:00
|
|
|
def mutes?(nil, _), do: false
|
2019-02-19 20:09:16 +00:00
|
|
|
def mutes?(user, %{ap_id: ap_id}), do: Enum.member?(user.info.mutes, ap_id)
|
2018-09-05 20:49:15 +00:00
|
|
|
|
2017-11-02 20:57:37 +00:00
|
|
|
def blocks?(user, %{ap_id: ap_id}) do
|
2018-11-18 17:40:31 +00:00
|
|
|
blocks = user.info.blocks
|
|
|
|
domain_blocks = user.info.domain_blocks
|
2018-06-03 19:01:37 +00:00
|
|
|
%{host: host} = URI.parse(ap_id)
|
2018-06-03 19:21:23 +00:00
|
|
|
|
|
|
|
Enum.member?(blocks, ap_id) ||
|
|
|
|
Enum.any?(domain_blocks, fn domain ->
|
|
|
|
host == domain
|
|
|
|
end)
|
2018-06-03 19:01:37 +00:00
|
|
|
end
|
|
|
|
|
2019-02-19 20:09:16 +00:00
|
|
|
def muted_users(user),
|
|
|
|
do: Repo.all(from(u in User, where: u.ap_id in ^user.info.mutes))
|
|
|
|
|
2018-12-28 18:08:07 +00:00
|
|
|
def blocked_users(user),
|
|
|
|
do: Repo.all(from(u in User, where: u.ap_id in ^user.info.blocks))
|
|
|
|
|
2018-06-03 19:01:37 +00:00
|
|
|
def block_domain(user, domain) do
|
2018-11-18 20:41:35 +00:00
|
|
|
info_cng =
|
|
|
|
user.info
|
|
|
|
|> User.Info.add_to_domain_block(domain)
|
2018-06-03 19:01:37 +00:00
|
|
|
|
2018-11-18 20:41:35 +00:00
|
|
|
cng =
|
|
|
|
change(user)
|
|
|
|
|> put_embed(:info, info_cng)
|
2018-11-18 20:40:52 +00:00
|
|
|
|
|
|
|
update_and_set_cache(cng)
|
2018-06-03 19:01:37 +00:00
|
|
|
end
|
|
|
|
|
|
|
|
def unblock_domain(user, domain) do
|
2018-11-18 20:41:35 +00:00
|
|
|
info_cng =
|
|
|
|
user.info
|
|
|
|
|> User.Info.remove_from_domain_block(domain)
|
2018-06-03 19:01:37 +00:00
|
|
|
|
2018-11-18 20:41:35 +00:00
|
|
|
cng =
|
|
|
|
change(user)
|
|
|
|
|> put_embed(:info, info_cng)
|
2018-11-18 20:40:52 +00:00
|
|
|
|
|
|
|
update_and_set_cache(cng)
|
2017-11-02 20:57:37 +00:00
|
|
|
end
|
|
|
|
|
2019-03-02 14:21:18 +00:00
|
|
|
def maybe_local_user_query(query, local) do
|
|
|
|
if local, do: local_user_query(query), else: query
|
2019-03-01 17:13:02 +00:00
|
|
|
end
|
|
|
|
|
2019-03-02 14:21:18 +00:00
|
|
|
def local_user_query(query \\ User) do
|
2018-08-06 10:44:25 +00:00
|
|
|
from(
|
2019-03-02 14:21:18 +00:00
|
|
|
u in query,
|
2018-08-06 10:44:25 +00:00
|
|
|
where: u.local == true,
|
|
|
|
where: not is_nil(u.nickname)
|
|
|
|
)
|
2017-11-30 13:59:44 +00:00
|
|
|
end
|
|
|
|
|
2019-01-17 16:16:02 +00:00
|
|
|
def active_local_user_query do
|
|
|
|
from(
|
|
|
|
u in local_user_query(),
|
2019-01-22 14:12:53 +00:00
|
|
|
where: fragment("not (?->'deactivated' @> 'true')", u.info)
|
2019-01-17 16:16:02 +00:00
|
|
|
)
|
|
|
|
end
|
|
|
|
|
|
|
|
def moderator_user_query do
|
2018-09-03 12:03:23 +00:00
|
|
|
from(
|
|
|
|
u in User,
|
|
|
|
where: u.local == true,
|
|
|
|
where: fragment("?->'is_moderator' @> 'true'", u.info)
|
|
|
|
)
|
|
|
|
end
|
|
|
|
|
2018-10-29 23:08:56 +00:00
|
|
|
def deactivate(%User{} = user, status \\ true) do
|
2018-11-18 17:06:02 +00:00
|
|
|
info_cng = User.Info.set_activation_status(user.info, status)
|
2018-11-18 17:27:04 +00:00
|
|
|
|
|
|
|
cng =
|
|
|
|
change(user)
|
|
|
|
|> put_embed(:info, info_cng)
|
2018-11-18 17:06:02 +00:00
|
|
|
|
|
|
|
update_and_set_cache(cng)
|
2017-12-07 16:47:23 +00:00
|
|
|
end
|
2017-12-07 17:13:05 +00:00
|
|
|
|
2018-03-30 13:01:53 +00:00
|
|
|
def delete(%User{} = user) do
|
2017-12-07 17:13:05 +00:00
|
|
|
{:ok, user} = User.deactivate(user)
|
|
|
|
|
|
|
|
# Remove all relationships
|
2018-03-30 13:01:53 +00:00
|
|
|
{:ok, followers} = User.get_followers(user)
|
|
|
|
|
2017-12-07 17:13:05 +00:00
|
|
|
followers
|
2018-03-30 13:01:53 +00:00
|
|
|
|> Enum.each(fn follower -> User.unfollow(follower, user) end)
|
2017-12-07 17:13:05 +00:00
|
|
|
|
|
|
|
{:ok, friends} = User.get_friends(user)
|
2018-03-30 13:01:53 +00:00
|
|
|
|
2017-12-07 17:13:05 +00:00
|
|
|
friends
|
2018-03-30 13:01:53 +00:00
|
|
|
|> Enum.each(fn followed -> User.unfollow(user, followed) end)
|
2017-12-07 17:13:05 +00:00
|
|
|
|
2019-03-23 03:04:42 +00:00
|
|
|
query =
|
|
|
|
from(a in Activity, where: a.actor == ^user.ap_id)
|
|
|
|
|> Activity.with_preloaded_object()
|
2017-12-08 16:50:11 +00:00
|
|
|
|
|
|
|
Repo.all(query)
|
2018-03-30 13:01:53 +00:00
|
|
|
|> Enum.each(fn activity ->
|
2017-12-08 16:50:11 +00:00
|
|
|
case activity.data["type"] do
|
2018-03-30 13:01:53 +00:00
|
|
|
"Create" ->
|
2019-03-23 03:04:42 +00:00
|
|
|
ActivityPub.delete(Object.normalize(activity))
|
2018-03-30 13:01:53 +00:00
|
|
|
|
|
|
|
# TODO: Do something with likes, follows, repeats.
|
|
|
|
_ ->
|
|
|
|
"Doing nothing"
|
2017-12-08 16:50:11 +00:00
|
|
|
end
|
|
|
|
end)
|
|
|
|
|
2018-11-01 07:56:21 +00:00
|
|
|
{:ok, user}
|
2017-12-07 17:13:05 +00:00
|
|
|
end
|
2017-12-12 09:17:21 +00:00
|
|
|
|
2018-12-01 11:46:08 +00:00
|
|
|
def html_filter_policy(%User{info: %{no_rich_text: true}}) do
|
2018-09-22 01:37:05 +00:00
|
|
|
Pleroma.HTML.Scrubber.TwitterText
|
|
|
|
end
|
|
|
|
|
2018-12-29 16:45:50 +00:00
|
|
|
@default_scrubbers Pleroma.Config.get([:markup, :scrub_policy])
|
|
|
|
|
|
|
|
def html_filter_policy(_), do: @default_scrubbers
|
2018-09-22 01:37:05 +00:00
|
|
|
|
2019-03-06 21:13:26 +00:00
|
|
|
def fetch_by_ap_id(ap_id) do
|
|
|
|
ap_try = ActivityPub.make_user_from_ap_id(ap_id)
|
|
|
|
|
|
|
|
case ap_try do
|
|
|
|
{:ok, user} ->
|
|
|
|
user
|
|
|
|
|
|
|
|
_ ->
|
|
|
|
case OStatus.make_user(ap_id) do
|
|
|
|
{:ok, user} -> user
|
|
|
|
_ -> {:error, "Could not fetch by AP id"}
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-02-11 19:43:33 +00:00
|
|
|
def get_or_fetch_by_ap_id(ap_id) do
|
2018-09-19 06:13:18 +00:00
|
|
|
user = get_by_ap_id(ap_id)
|
|
|
|
|
|
|
|
if !is_nil(user) and !User.needs_update?(user) do
|
2018-02-11 19:43:33 +00:00
|
|
|
user
|
|
|
|
else
|
2019-03-18 17:14:49 +00:00
|
|
|
# Whether to fetch initial posts for the user (if it's a new user & the fetching is enabled)
|
|
|
|
should_fetch_initial = is_nil(user) and Pleroma.Config.get([:fetch_initial_posts, :enabled])
|
|
|
|
|
2019-03-06 21:13:26 +00:00
|
|
|
user = fetch_by_ap_id(ap_id)
|
2018-03-30 13:01:53 +00:00
|
|
|
|
2019-03-18 17:14:49 +00:00
|
|
|
if should_fetch_initial do
|
2019-03-06 21:13:26 +00:00
|
|
|
with %User{} = user do
|
|
|
|
{:ok, _} = Task.start(__MODULE__, :fetch_initial_posts, [user])
|
|
|
|
end
|
2018-02-11 19:43:33 +00:00
|
|
|
end
|
2019-03-06 21:13:26 +00:00
|
|
|
|
|
|
|
user
|
2018-02-11 19:43:33 +00:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-08-06 05:46:37 +00:00
|
|
|
def get_or_create_instance_user do
|
2018-08-06 08:13:05 +00:00
|
|
|
relay_uri = "#{Pleroma.Web.Endpoint.url()}/relay"
|
|
|
|
|
|
|
|
if user = get_by_ap_id(relay_uri) do
|
2018-08-06 05:46:37 +00:00
|
|
|
user
|
|
|
|
else
|
|
|
|
changes =
|
2018-11-20 18:07:01 +00:00
|
|
|
%User{info: %User.Info{}}
|
2018-08-06 05:46:37 +00:00
|
|
|
|> cast(%{}, [:ap_id, :nickname, :local])
|
2018-08-06 08:13:05 +00:00
|
|
|
|> put_change(:ap_id, relay_uri)
|
2018-08-06 05:46:37 +00:00
|
|
|
|> put_change(:nickname, nil)
|
|
|
|
|> put_change(:local, true)
|
2018-08-06 08:13:05 +00:00
|
|
|
|> put_change(:follower_address, relay_uri <> "/followers")
|
2018-08-06 05:46:37 +00:00
|
|
|
|
|
|
|
{:ok, user} = Repo.insert(changes)
|
|
|
|
user
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-02-11 19:43:33 +00:00
|
|
|
# AP style
|
2018-03-30 13:01:53 +00:00
|
|
|
def public_key_from_info(%{
|
2018-11-27 17:12:03 +00:00
|
|
|
source_data: %{"publicKey" => %{"publicKeyPem" => public_key_pem}}
|
2018-03-30 13:01:53 +00:00
|
|
|
}) do
|
|
|
|
key =
|
2018-12-11 12:31:52 +00:00
|
|
|
public_key_pem
|
|
|
|
|> :public_key.pem_decode()
|
2018-03-30 13:01:53 +00:00
|
|
|
|> hd()
|
|
|
|
|> :public_key.pem_entry_decode()
|
2018-02-11 19:43:33 +00:00
|
|
|
|
2018-03-30 13:01:53 +00:00
|
|
|
{:ok, key}
|
2018-02-11 19:43:33 +00:00
|
|
|
end
|
|
|
|
|
|
|
|
# OStatus Magic Key
|
2018-11-27 17:12:03 +00:00
|
|
|
def public_key_from_info(%{magic_key: magic_key}) do
|
2018-02-11 19:43:33 +00:00
|
|
|
{:ok, Pleroma.Web.Salmon.decode_key(magic_key)}
|
|
|
|
end
|
|
|
|
|
2017-12-12 09:17:21 +00:00
|
|
|
def get_public_key_for_ap_id(ap_id) do
|
2018-02-11 19:43:33 +00:00
|
|
|
with %User{} = user <- get_or_fetch_by_ap_id(ap_id),
|
|
|
|
{:ok, public_key} <- public_key_from_info(user.info) do
|
2017-12-12 09:17:21 +00:00
|
|
|
{:ok, public_key}
|
|
|
|
else
|
|
|
|
_ -> :error
|
|
|
|
end
|
|
|
|
end
|
2018-02-11 16:20:02 +00:00
|
|
|
|
2018-02-17 17:15:48 +00:00
|
|
|
defp blank?(""), do: nil
|
|
|
|
defp blank?(n), do: n
|
|
|
|
|
2018-02-11 16:20:02 +00:00
|
|
|
def insert_or_update_user(data) do
|
2018-03-30 13:01:53 +00:00
|
|
|
data =
|
|
|
|
data
|
|
|
|
|> Map.put(:name, blank?(data[:name]) || data[:nickname])
|
|
|
|
|
2018-02-11 16:20:02 +00:00
|
|
|
cs = User.remote_user_creation(data)
|
2018-11-20 18:07:01 +00:00
|
|
|
|
2018-02-11 16:20:02 +00:00
|
|
|
Repo.insert(cs, on_conflict: :replace_all, conflict_target: :nickname)
|
|
|
|
end
|
2018-02-17 15:08:55 +00:00
|
|
|
|
2018-10-11 10:35:11 +00:00
|
|
|
def ap_enabled?(%User{local: true}), do: true
|
2018-11-18 20:40:52 +00:00
|
|
|
def ap_enabled?(%User{info: info}), do: info.ap_enabled
|
2018-02-25 15:40:37 +00:00
|
|
|
def ap_enabled?(_), do: false
|
2018-03-24 14:09:09 +00:00
|
|
|
|
2018-12-11 12:31:52 +00:00
|
|
|
@doc "Gets or fetch a user by uri or nickname."
|
|
|
|
@spec get_or_fetch(String.t()) :: User.t()
|
|
|
|
def get_or_fetch("http" <> _host = uri), do: get_or_fetch_by_ap_id(uri)
|
|
|
|
def get_or_fetch(nickname), do: get_or_fetch_by_nickname(nickname)
|
2018-10-05 23:40:49 +00:00
|
|
|
|
|
|
|
# wait a period of time and return newest version of the User structs
|
|
|
|
# this is because we have synchronous follow APIs and need to simulate them
|
|
|
|
# with an async handshake
|
|
|
|
def wait_and_refresh(_, %User{local: true} = a, %User{local: true} = b) do
|
|
|
|
with %User{} = a <- Repo.get(User, a.id),
|
|
|
|
%User{} = b <- Repo.get(User, b.id) do
|
|
|
|
{:ok, a, b}
|
|
|
|
else
|
|
|
|
_e ->
|
|
|
|
:error
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def wait_and_refresh(timeout, %User{} = a, %User{} = b) do
|
|
|
|
with :ok <- :timer.sleep(timeout),
|
|
|
|
%User{} = a <- Repo.get(User, a.id),
|
|
|
|
%User{} = b <- Repo.get(User, b.id) do
|
|
|
|
{:ok, a, b}
|
|
|
|
else
|
|
|
|
_e ->
|
|
|
|
:error
|
|
|
|
end
|
|
|
|
end
|
2018-12-02 19:03:53 +00:00
|
|
|
|
2018-12-09 21:01:43 +00:00
|
|
|
def parse_bio(bio, user \\ %User{info: %{source_data: %{}}})
|
2018-12-10 19:49:06 +00:00
|
|
|
def parse_bio(nil, _user), do: ""
|
|
|
|
def parse_bio(bio, _user) when bio == "", do: bio
|
2018-12-09 21:01:43 +00:00
|
|
|
|
|
|
|
def parse_bio(bio, user) do
|
2018-12-02 19:03:53 +00:00
|
|
|
emoji =
|
|
|
|
(user.info.source_data["tag"] || [])
|
|
|
|
|> Enum.filter(fn %{"type" => t} -> t == "Emoji" end)
|
|
|
|
|> Enum.map(fn %{"icon" => %{"url" => url}, "name" => name} ->
|
|
|
|
{String.trim(name, ":"), url}
|
|
|
|
end)
|
|
|
|
|
2019-02-11 21:27:02 +00:00
|
|
|
# TODO: get profile URLs other than user.ap_id
|
2019-03-02 05:57:28 +00:00
|
|
|
profile_urls = [user.ap_id]
|
2019-02-11 21:27:02 +00:00
|
|
|
|
2018-12-11 12:31:52 +00:00
|
|
|
bio
|
2019-03-02 06:04:49 +00:00
|
|
|
|> CommonUtils.format_input("text/plain",
|
2019-02-11 21:27:02 +00:00
|
|
|
mentions_format: :full,
|
|
|
|
rel: &RelMe.maybe_put_rel_me(&1, profile_urls)
|
2019-03-02 06:04:49 +00:00
|
|
|
)
|
2019-02-26 23:32:26 +00:00
|
|
|
|> elem(0)
|
2018-12-11 12:31:52 +00:00
|
|
|
|> Formatter.emojify(emoji)
|
2018-12-02 19:03:53 +00:00
|
|
|
end
|
2018-12-06 17:06:50 +00:00
|
|
|
|
2018-12-07 09:27:32 +00:00
|
|
|
def tag(user_identifiers, tags) when is_list(user_identifiers) do
|
|
|
|
Repo.transaction(fn ->
|
|
|
|
for user_identifier <- user_identifiers, do: tag(user_identifier, tags)
|
|
|
|
end)
|
|
|
|
end
|
2018-12-06 17:06:50 +00:00
|
|
|
|
2018-12-09 09:12:48 +00:00
|
|
|
def tag(nickname, tags) when is_binary(nickname),
|
|
|
|
do: tag(User.get_by_nickname(nickname), tags)
|
|
|
|
|
|
|
|
def tag(%User{} = user, tags),
|
2018-12-17 19:12:01 +00:00
|
|
|
do: update_tags(user, Enum.uniq((user.tags || []) ++ normalize_tags(tags)))
|
2018-12-09 09:12:48 +00:00
|
|
|
|
2018-12-07 09:27:32 +00:00
|
|
|
def untag(user_identifiers, tags) when is_list(user_identifiers) do
|
|
|
|
Repo.transaction(fn ->
|
|
|
|
for user_identifier <- user_identifiers, do: untag(user_identifier, tags)
|
|
|
|
end)
|
|
|
|
end
|
2018-12-06 17:06:50 +00:00
|
|
|
|
2018-12-07 09:27:32 +00:00
|
|
|
def untag(nickname, tags) when is_binary(nickname),
|
|
|
|
do: untag(User.get_by_nickname(nickname), tags)
|
2018-12-06 17:06:50 +00:00
|
|
|
|
2018-12-17 19:12:01 +00:00
|
|
|
def untag(%User{} = user, tags),
|
|
|
|
do: update_tags(user, (user.tags || []) -- normalize_tags(tags))
|
2018-12-06 17:06:50 +00:00
|
|
|
|
2018-12-07 09:27:32 +00:00
|
|
|
defp update_tags(%User{} = user, new_tags) do
|
|
|
|
{:ok, updated_user} =
|
|
|
|
user
|
|
|
|
|> change(%{tags: new_tags})
|
2019-02-19 07:43:37 +00:00
|
|
|
|> update_and_set_cache()
|
2018-12-06 17:06:50 +00:00
|
|
|
|
2018-12-07 09:27:32 +00:00
|
|
|
updated_user
|
2018-12-06 17:06:50 +00:00
|
|
|
end
|
2018-12-06 17:23:16 +00:00
|
|
|
|
2018-09-19 00:04:56 +00:00
|
|
|
def bookmark(%User{} = user, status_id) do
|
|
|
|
bookmarks = Enum.uniq(user.bookmarks ++ [status_id])
|
|
|
|
update_bookmarks(user, bookmarks)
|
|
|
|
end
|
|
|
|
|
|
|
|
def unbookmark(%User{} = user, status_id) do
|
|
|
|
bookmarks = Enum.uniq(user.bookmarks -- [status_id])
|
|
|
|
update_bookmarks(user, bookmarks)
|
|
|
|
end
|
|
|
|
|
|
|
|
def update_bookmarks(%User{} = user, bookmarks) do
|
|
|
|
user
|
|
|
|
|> change(%{bookmarks: bookmarks})
|
|
|
|
|> update_and_set_cache
|
|
|
|
end
|
|
|
|
|
2018-12-07 09:27:32 +00:00
|
|
|
defp normalize_tags(tags) do
|
|
|
|
[tags]
|
|
|
|
|> List.flatten()
|
|
|
|
|> Enum.map(&String.downcase(&1))
|
|
|
|
end
|
2018-12-12 17:17:15 +00:00
|
|
|
|
2019-03-05 03:18:43 +00:00
|
|
|
defp local_nickname_regex do
|
2018-12-12 17:17:15 +00:00
|
|
|
if Pleroma.Config.get([:instance, :extended_nickname_format]) do
|
|
|
|
@extended_local_nickname_regex
|
|
|
|
else
|
|
|
|
@strict_local_nickname_regex
|
|
|
|
end
|
|
|
|
end
|
2019-01-16 14:13:09 +00:00
|
|
|
|
2019-01-18 06:30:16 +00:00
|
|
|
def local_nickname(nickname_or_mention) do
|
|
|
|
nickname_or_mention
|
|
|
|
|> full_nickname()
|
|
|
|
|> String.split("@")
|
|
|
|
|> hd()
|
|
|
|
end
|
|
|
|
|
|
|
|
def full_nickname(nickname_or_mention),
|
|
|
|
do: String.trim_leading(nickname_or_mention, "@")
|
|
|
|
|
2019-01-16 14:13:09 +00:00
|
|
|
def error_user(ap_id) do
|
|
|
|
%User{
|
|
|
|
name: ap_id,
|
|
|
|
ap_id: ap_id,
|
|
|
|
info: %User.Info{},
|
|
|
|
nickname: "erroruser@example.com",
|
|
|
|
inserted_at: NaiveDateTime.utc_now()
|
|
|
|
}
|
|
|
|
end
|
2019-02-20 16:51:25 +00:00
|
|
|
|
|
|
|
def all_superusers do
|
|
|
|
from(
|
|
|
|
u in User,
|
|
|
|
where: u.local == true,
|
|
|
|
where: fragment("?->'is_admin' @> 'true' OR ?->'is_moderator' @> 'true'", u.info, u.info)
|
|
|
|
)
|
|
|
|
|> Repo.all()
|
|
|
|
end
|
2019-03-02 14:21:18 +00:00
|
|
|
|
|
|
|
defp paginate(query, page, page_size) do
|
|
|
|
from(u in query,
|
|
|
|
limit: ^page_size,
|
|
|
|
offset: ^((page - 1) * page_size)
|
|
|
|
)
|
|
|
|
end
|
2019-03-09 13:08:41 +00:00
|
|
|
|
|
|
|
def showing_reblogs?(%User{} = user, %User{} = target) do
|
2019-03-15 13:06:58 +00:00
|
|
|
target.ap_id not in user.info.muted_reblogs
|
2019-03-09 13:08:41 +00:00
|
|
|
end
|
2017-03-20 20:28:31 +00:00
|
|
|
end
|