2017-03-21 16:53:20 +00:00
|
|
|
defmodule Pleroma.Web.TwitterAPI.TwitterAPI do
|
2017-03-30 15:07:03 +00:00
|
|
|
alias Pleroma.{User, Activity, Repo, Object}
|
2017-03-21 16:53:20 +00:00
|
|
|
alias Pleroma.Web.ActivityPub.ActivityPub
|
2017-06-19 21:12:37 +00:00
|
|
|
alias Pleroma.Web.TwitterAPI.Representers.ActivityRepresenter
|
|
|
|
alias Pleroma.Web.TwitterAPI.UserView
|
2017-09-09 15:48:57 +00:00
|
|
|
alias Pleroma.Web.{OStatus, CommonAPI}
|
2017-09-16 12:33:47 +00:00
|
|
|
import Ecto.Query
|
2017-03-21 16:53:20 +00:00
|
|
|
|
2017-06-18 15:20:39 +00:00
|
|
|
@httpoison Application.get_env(:pleroma, :httpoison)
|
|
|
|
|
2017-11-19 01:22:07 +00:00
|
|
|
def create_status(%User{} = user, %{"status" => _} = data) do
|
2017-09-15 12:17:36 +00:00
|
|
|
CommonAPI.post(user, data)
|
2017-03-21 16:53:20 +00:00
|
|
|
end
|
|
|
|
|
2018-04-18 10:00:40 +00:00
|
|
|
def delete(%User{} = user, id) do
|
|
|
|
# TwitterAPI does not have an "unretweet" endpoint; instead this is done
|
|
|
|
# via the "destroy" endpoint. Therefore, there is a need to handle
|
|
|
|
# when the status to "delete" is actually an Announce (repeat) object.
|
|
|
|
with %Activity{data: %{"type" => type}} <- Repo.get(Activity, id) do
|
|
|
|
case type do
|
|
|
|
"Announce" -> unrepeat(user, id)
|
|
|
|
_ -> CommonAPI.delete(id, user)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2017-04-13 12:32:13 +00:00
|
|
|
def follow(%User{} = follower, params) do
|
2017-05-05 10:07:38 +00:00
|
|
|
with {:ok, %User{} = followed} <- get_user(params),
|
|
|
|
{:ok, follower} <- User.follow(follower, followed),
|
2018-03-30 13:01:53 +00:00
|
|
|
{:ok, activity} <- ActivityPub.follow(follower, followed) do
|
2017-05-05 10:07:38 +00:00
|
|
|
{:ok, follower, followed, activity}
|
2017-04-12 14:34:36 +00:00
|
|
|
else
|
|
|
|
err -> err
|
2017-03-22 17:36:08 +00:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2017-04-20 07:57:37 +00:00
|
|
|
def unfollow(%User{} = follower, params) do
|
2018-03-30 13:01:53 +00:00
|
|
|
with {:ok, %User{} = unfollowed} <- get_user(params),
|
|
|
|
{:ok, follower, follow_activity} <- User.unfollow(follower, unfollowed),
|
|
|
|
{:ok, _activity} <-
|
|
|
|
ActivityPub.insert(%{
|
|
|
|
"type" => "Undo",
|
|
|
|
"actor" => follower.ap_id,
|
|
|
|
# get latest Follow for these users
|
|
|
|
"object" => follow_activity.data["id"],
|
|
|
|
"published" => make_date()
|
|
|
|
}) do
|
|
|
|
{:ok, follower, unfollowed}
|
2017-04-12 14:34:36 +00:00
|
|
|
else
|
|
|
|
err -> err
|
2017-03-23 12:13:09 +00:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2017-11-07 22:04:53 +00:00
|
|
|
def block(%User{} = blocker, params) do
|
|
|
|
with {:ok, %User{} = blocked} <- get_user(params),
|
2018-03-30 13:01:53 +00:00
|
|
|
{:ok, blocker} <- User.block(blocker, blocked) do
|
2017-11-07 22:04:53 +00:00
|
|
|
{:ok, blocker, blocked}
|
|
|
|
else
|
|
|
|
err -> err
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def unblock(%User{} = blocker, params) do
|
|
|
|
with {:ok, %User{} = blocked} <- get_user(params),
|
2018-03-30 13:01:53 +00:00
|
|
|
{:ok, blocker} <- User.unblock(blocker, blocked) do
|
2017-11-07 22:04:53 +00:00
|
|
|
{:ok, blocker, blocked}
|
|
|
|
else
|
|
|
|
err -> err
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2017-09-09 15:48:57 +00:00
|
|
|
def repeat(%User{} = user, ap_id_or_id) do
|
|
|
|
with {:ok, _announce, %{data: %{"id" => id}}} = CommonAPI.repeat(ap_id_or_id, user),
|
2018-03-30 14:50:30 +00:00
|
|
|
%Activity{} = activity <- Activity.get_create_activity_by_object_ap_id(id) do
|
|
|
|
{:ok, activity}
|
2017-09-09 15:48:57 +00:00
|
|
|
end
|
2017-04-15 11:54:46 +00:00
|
|
|
end
|
|
|
|
|
2018-04-18 10:00:40 +00:00
|
|
|
defp unrepeat(%User{} = user, ap_id_or_id) do
|
|
|
|
with {:ok, _unannounce, activity, _object} <- CommonAPI.unrepeat(ap_id_or_id, user) do
|
|
|
|
{:ok, activity}
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2017-09-09 16:09:37 +00:00
|
|
|
def fav(%User{} = user, ap_id_or_id) do
|
|
|
|
with {:ok, _announce, %{data: %{"id" => id}}} = CommonAPI.favorite(ap_id_or_id, user),
|
2018-03-30 14:50:30 +00:00
|
|
|
%Activity{} = activity <- Activity.get_create_activity_by_object_ap_id(id) do
|
|
|
|
{:ok, activity}
|
2017-09-09 16:09:37 +00:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2017-09-09 16:30:02 +00:00
|
|
|
def unfav(%User{} = user, ap_id_or_id) do
|
|
|
|
with {:ok, %{data: %{"id" => id}}} = CommonAPI.unfavorite(ap_id_or_id, user),
|
2018-03-30 14:50:30 +00:00
|
|
|
%Activity{} = activity <- Activity.get_create_activity_by_object_ap_id(id) do
|
|
|
|
{:ok, activity}
|
2017-09-09 16:30:02 +00:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2017-04-14 14:13:34 +00:00
|
|
|
def upload(%Plug.Upload{} = file, format \\ "xml") do
|
2017-03-29 00:05:51 +00:00
|
|
|
{:ok, object} = ActivityPub.upload(file)
|
|
|
|
|
2017-03-30 14:08:23 +00:00
|
|
|
url = List.first(object.data["url"])
|
|
|
|
href = url["href"]
|
|
|
|
type = url["mediaType"]
|
|
|
|
|
2017-04-14 14:13:34 +00:00
|
|
|
case format do
|
|
|
|
"xml" ->
|
|
|
|
# Fake this as good as possible...
|
|
|
|
"""
|
|
|
|
<?xml version="1.0" encoding="UTF-8"?>
|
|
|
|
<rsp stat="ok" xmlns:atom="http://www.w3.org/2005/Atom">
|
|
|
|
<mediaid>#{object.id}</mediaid>
|
|
|
|
<media_id>#{object.id}</media_id>
|
|
|
|
<media_id_string>#{object.id}</media_id_string>
|
|
|
|
<media_url>#{href}</media_url>
|
|
|
|
<mediaurl>#{href}</mediaurl>
|
|
|
|
<atom:link rel="enclosure" href="#{href}" type="#{type}"></atom:link>
|
|
|
|
</rsp>
|
|
|
|
"""
|
2018-03-30 13:01:53 +00:00
|
|
|
|
2017-04-14 14:13:34 +00:00
|
|
|
"json" ->
|
|
|
|
%{
|
|
|
|
media_id: object.id,
|
|
|
|
media_id_string: "#{object.id}}",
|
|
|
|
media_url: href,
|
|
|
|
size: 0
|
2018-03-30 13:01:53 +00:00
|
|
|
}
|
|
|
|
|> Jason.encode!()
|
2017-04-14 14:13:34 +00:00
|
|
|
end
|
2017-03-29 00:05:51 +00:00
|
|
|
end
|
|
|
|
|
2017-04-16 08:25:27 +00:00
|
|
|
def register_user(params) do
|
|
|
|
params = %{
|
|
|
|
nickname: params["nickname"],
|
|
|
|
name: params["fullname"],
|
|
|
|
bio: params["bio"],
|
|
|
|
email: params["email"],
|
|
|
|
password: params["password"],
|
|
|
|
password_confirmation: params["confirm"]
|
|
|
|
}
|
|
|
|
|
|
|
|
changeset = User.register_changeset(%User{}, params)
|
|
|
|
|
|
|
|
with {:ok, user} <- Repo.insert(changeset) do
|
2017-06-19 21:12:37 +00:00
|
|
|
{:ok, user}
|
2017-04-16 08:25:27 +00:00
|
|
|
else
|
|
|
|
{:error, changeset} ->
|
2018-03-30 13:01:53 +00:00
|
|
|
errors =
|
|
|
|
Ecto.Changeset.traverse_errors(changeset, fn {msg, _opts} -> msg end)
|
|
|
|
|> Jason.encode!()
|
|
|
|
|
|
|
|
{:error, %{error: errors}}
|
2017-04-16 08:25:27 +00:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2017-06-12 15:12:55 +00:00
|
|
|
def get_by_id_or_nickname(id_or_nickname) do
|
|
|
|
if !is_integer(id_or_nickname) && :error == Integer.parse(id_or_nickname) do
|
|
|
|
Repo.get_by(User, nickname: id_or_nickname)
|
|
|
|
else
|
|
|
|
Repo.get(User, id_or_nickname)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2017-04-20 07:39:18 +00:00
|
|
|
def get_user(user \\ nil, params) do
|
2017-04-16 13:44:30 +00:00
|
|
|
case params do
|
2017-05-05 10:07:38 +00:00
|
|
|
%{"user_id" => user_id} ->
|
2017-06-12 15:12:55 +00:00
|
|
|
case target = get_by_id_or_nickname(user_id) do
|
2017-04-16 13:44:30 +00:00
|
|
|
nil ->
|
|
|
|
{:error, "No user with such user_id"}
|
2018-03-30 13:01:53 +00:00
|
|
|
|
2017-04-16 13:44:30 +00:00
|
|
|
_ ->
|
|
|
|
{:ok, target}
|
|
|
|
end
|
2018-03-30 13:01:53 +00:00
|
|
|
|
2017-05-05 10:07:38 +00:00
|
|
|
%{"screen_name" => nickname} ->
|
2017-04-16 13:44:30 +00:00
|
|
|
case target = Repo.get_by(User, nickname: nickname) do
|
|
|
|
nil ->
|
|
|
|
{:error, "No user with such screen_name"}
|
2018-03-30 13:01:53 +00:00
|
|
|
|
2017-04-16 13:44:30 +00:00
|
|
|
_ ->
|
|
|
|
{:ok, target}
|
|
|
|
end
|
2018-03-30 13:01:53 +00:00
|
|
|
|
2017-04-16 13:44:30 +00:00
|
|
|
_ ->
|
|
|
|
if user do
|
|
|
|
{:ok, user}
|
|
|
|
else
|
2017-04-16 14:05:48 +00:00
|
|
|
{:error, "You need to specify screen_name or user_id"}
|
2017-04-16 13:44:30 +00:00
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2017-11-19 01:22:07 +00:00
|
|
|
defp parse_int(string, default)
|
2018-03-30 13:01:53 +00:00
|
|
|
|
2017-09-16 12:33:47 +00:00
|
|
|
defp parse_int(string, default) when is_binary(string) do
|
|
|
|
with {n, _} <- Integer.parse(string) do
|
|
|
|
n
|
|
|
|
else
|
|
|
|
_e -> default
|
|
|
|
end
|
|
|
|
end
|
2018-03-30 13:01:53 +00:00
|
|
|
|
2017-09-16 12:33:47 +00:00
|
|
|
defp parse_int(_, default), do: default
|
|
|
|
|
|
|
|
def search(user, %{"q" => query} = params) do
|
|
|
|
limit = parse_int(params["rpp"], 20)
|
|
|
|
page = parse_int(params["page"], 1)
|
|
|
|
offset = (page - 1) * limit
|
|
|
|
|
2018-03-30 13:01:53 +00:00
|
|
|
q =
|
|
|
|
from(
|
|
|
|
a in Activity,
|
|
|
|
where: fragment("?->>'type' = 'Create'", a.data),
|
2018-04-07 14:40:03 +00:00
|
|
|
where: "https://www.w3.org/ns/activitystreams#Public" in a.recipients,
|
2018-03-30 13:01:53 +00:00
|
|
|
where:
|
|
|
|
fragment(
|
|
|
|
"to_tsvector('english', ?->'object'->>'content') @@ plainto_tsquery('english', ?)",
|
|
|
|
a.data,
|
|
|
|
^query
|
|
|
|
),
|
|
|
|
limit: ^limit,
|
|
|
|
offset: ^offset,
|
|
|
|
# this one isn't indexed so psql won't take the wrong index.
|
|
|
|
order_by: [desc: :inserted_at]
|
|
|
|
)
|
2017-09-16 12:33:47 +00:00
|
|
|
|
|
|
|
activities = Repo.all(q)
|
|
|
|
end
|
|
|
|
|
2017-04-04 23:04:54 +00:00
|
|
|
defp make_date do
|
2018-03-30 13:01:53 +00:00
|
|
|
DateTime.utc_now() |> DateTime.to_iso8601()
|
2017-04-04 23:04:54 +00:00
|
|
|
end
|
2017-04-30 11:53:26 +00:00
|
|
|
|
2018-04-02 13:28:35 +00:00
|
|
|
# DEPRECATED mostly, context objects are now created at insertion time.
|
2017-04-30 11:53:26 +00:00
|
|
|
def context_to_conversation_id(context) do
|
2017-05-01 14:15:21 +00:00
|
|
|
with %Object{id: id} <- Object.get_cached_by_ap_id(context) do
|
|
|
|
id
|
2018-03-30 13:01:53 +00:00
|
|
|
else
|
|
|
|
_e ->
|
2017-06-20 07:50:22 +00:00
|
|
|
changeset = Object.context_mapping(context)
|
2018-03-30 13:01:53 +00:00
|
|
|
|
2017-06-20 07:50:22 +00:00
|
|
|
case Repo.insert(changeset) do
|
2018-03-30 13:01:53 +00:00
|
|
|
{:ok, %{id: id}} ->
|
|
|
|
id
|
|
|
|
|
2017-06-20 07:50:22 +00:00
|
|
|
# This should be solved by an upsert, but it seems ecto
|
|
|
|
# has problems accessing the constraint inside the jsonb.
|
2018-03-30 13:01:53 +00:00
|
|
|
{:error, _} ->
|
|
|
|
Object.get_cached_by_ap_id(context).id
|
2017-06-20 07:50:22 +00:00
|
|
|
end
|
2017-05-01 14:15:21 +00:00
|
|
|
end
|
2017-04-30 11:53:26 +00:00
|
|
|
end
|
|
|
|
|
|
|
|
def conversation_id_to_context(id) do
|
|
|
|
with %Object{data: %{"id" => context}} <- Repo.get(Object, id) do
|
|
|
|
context
|
2018-03-30 13:01:53 +00:00
|
|
|
else
|
|
|
|
_e ->
|
|
|
|
{:error, "No such conversation"}
|
2017-04-30 11:53:26 +00:00
|
|
|
end
|
|
|
|
end
|
2017-05-10 16:44:57 +00:00
|
|
|
|
|
|
|
def get_external_profile(for_user, uri) do
|
2018-03-24 14:09:09 +00:00
|
|
|
with %User{} = user <- User.get_or_fetch(uri) do
|
2018-01-16 13:31:03 +00:00
|
|
|
spawn(fn ->
|
|
|
|
with url <- user.info["topic"],
|
2018-03-30 13:01:53 +00:00
|
|
|
{:ok, %{body: body}} <-
|
|
|
|
@httpoison.get(url, [], follow_redirect: true, timeout: 10000, recv_timeout: 20000) do
|
2018-01-16 13:31:03 +00:00
|
|
|
OStatus.handle_incoming(body)
|
|
|
|
end
|
|
|
|
end)
|
2018-03-30 13:01:53 +00:00
|
|
|
|
2017-06-19 21:12:37 +00:00
|
|
|
{:ok, UserView.render("show.json", %{user: user, for: for_user})}
|
2018-03-30 13:01:53 +00:00
|
|
|
else
|
|
|
|
_e ->
|
2017-05-12 16:50:47 +00:00
|
|
|
{:error, "Couldn't find user"}
|
2017-05-10 16:44:57 +00:00
|
|
|
end
|
|
|
|
end
|
2017-03-21 16:53:20 +00:00
|
|
|
end
|