Merge branch 'develop' into feature/new-registrations-digest

This commit is contained in:
Egor Kislitsyn 2020-01-22 21:18:27 +04:00
commit 5c842e3b68
No known key found for this signature in database
GPG key ID: 1B49CB15B71E7805
536 changed files with 718 additions and 217 deletions

View file

@ -10,6 +10,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
- **Breaking**: MDII uploader - **Breaking**: MDII uploader
### Changed ### Changed
- **Breaking:** Pleroma won't start if it detects unapplied migrations
- **Breaking:** attachments are removed along with statuses when there are no other references to it - **Breaking:** attachments are removed along with statuses when there are no other references to it
- **Breaking:** Elixir >=1.8 is now required (was >= 1.7) - **Breaking:** Elixir >=1.8 is now required (was >= 1.7)
- **Breaking:** attachment links (`config :pleroma, :instance, no_attachment_links` and `config :pleroma, Pleroma.Upload, link_name`) disabled by default - **Breaking:** attachment links (`config :pleroma, :instance, no_attachment_links` and `config :pleroma, Pleroma.Upload, link_name`) disabled by default
@ -43,6 +44,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
- Mastodon API, streaming: Add `pleroma.direct_conversation_id` to the `conversation` stream event payload. - Mastodon API, streaming: Add `pleroma.direct_conversation_id` to the `conversation` stream event payload.
- Admin API: Render whole status in grouped reports - Admin API: Render whole status in grouped reports
- Mastodon API: User timelines will now respect blocks, unless you are getting the user timeline of somebody you blocked (which would be empty otherwise). - Mastodon API: User timelines will now respect blocks, unless you are getting the user timeline of somebody you blocked (which would be empty otherwise).
- Mastodon API: Favoriting / Repeating a post multiple times will now return the identical response every time. Before, executing that action twice would return an error ("already favorited") on the second try.
</details> </details>
### Added ### Added
@ -92,6 +94,8 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
- Captcha: Support native provider - Captcha: Support native provider
- Captcha: Enable by default - Captcha: Enable by default
- Mastodon API: Add support for `account_id` param to filter notifications by the account - Mastodon API: Add support for `account_id` param to filter notifications by the account
- Mastodon API: Add `emoji_reactions` property to Statuses
- Mastodon API: Change emoji reaction reply format
</details> </details>
### Fixed ### Fixed

View file

@ -9,7 +9,7 @@ def generate_like_activities(user, posts) do
{time, _} = {time, _} =
:timer.tc(fn -> :timer.tc(fn ->
Task.async_stream( Task.async_stream(
Enum.take_random(posts, count_likes), Enum.take_random(posts, count_likes),
fn post -> {:ok, _, _} = CommonAPI.favorite(post.id, user) end, fn post -> {:ok, _, _} = CommonAPI.favorite(post.id, user) end,
max_concurrency: 10, max_concurrency: 10,
timeout: 30_000 timeout: 30_000
@ -142,6 +142,48 @@ defp do_generate_activity(users) do
CommonAPI.post(Enum.random(users), post) CommonAPI.post(Enum.random(users), post)
end end
def generate_power_intervals(opts \\ []) do
count = Keyword.get(opts, :count, 20)
power = Keyword.get(opts, :power, 2)
IO.puts("Generating #{count} intervals for a power #{power} series...")
counts = Enum.map(1..count, fn n -> :math.pow(n, power) end)
sum = Enum.sum(counts)
densities =
Enum.map(counts, fn c ->
c / sum
end)
densities
|> Enum.reduce(0, fn density, acc ->
if acc == 0 do
[{0, density}]
else
[{_, lower} | _] = acc
[{lower, lower + density} | acc]
end
end)
|> Enum.reverse()
end
def generate_tagged_activities(opts \\ []) do
tag_count = Keyword.get(opts, :tag_count, 20)
users = Keyword.get(opts, :users, Repo.all(User))
activity_count = Keyword.get(opts, :count, 200_000)
intervals = generate_power_intervals(count: tag_count)
IO.puts(
"Generating #{activity_count} activities using #{tag_count} different tags of format `tag_n`, starting at tag_0"
)
Enum.each(1..activity_count, fn _ ->
random = :rand.uniform()
i = Enum.find_index(intervals, fn {lower, upper} -> lower <= random && upper > random end)
CommonAPI.post(Enum.random(users), %{"status" => "a post with the tag #tag_#{i}"})
end)
end
defp do_generate_activity_with_mention(user, users) do defp do_generate_activity_with_mention(user, users) do
mentions_cnt = Enum.random([2, 3, 4, 5]) mentions_cnt = Enum.random([2, 3, 4, 5])
with_user = Enum.random([true, false]) with_user = Enum.random([true, false])

View file

@ -0,0 +1,87 @@
defmodule Mix.Tasks.Pleroma.Benchmarks.Tags do
use Mix.Task
alias Pleroma.Repo
alias Pleroma.LoadTesting.Generator
import Ecto.Query
def run(_args) do
Mix.Pleroma.start_pleroma()
activities_count = Repo.aggregate(from(a in Pleroma.Activity), :count, :id)
if activities_count == 0 do
IO.puts("Did not find any activities, cleaning and generating")
clean_tables()
Generator.generate_users(users_max: 10)
Generator.generate_tagged_activities()
else
IO.puts("Found #{activities_count} activities, won't generate new ones")
end
tags = Enum.map(0..20, fn i -> {"For #tag_#{i}", "tag_#{i}"} end)
Enum.each(tags, fn {_, tag} ->
query =
from(o in Pleroma.Object,
where: fragment("(?)->'tag' \\? (?)", o.data, ^tag)
)
count = Repo.aggregate(query, :count, :id)
IO.puts("Database contains #{count} posts tagged with #{tag}")
end)
user = Repo.all(Pleroma.User) |> List.first()
Benchee.run(
%{
"Hashtag fetching, any" => fn tags ->
Pleroma.Web.MastodonAPI.TimelineController.hashtag_fetching(
%{
"any" => tags
},
user,
false
)
end,
# Will always return zero results because no overlapping hashtags are generated.
"Hashtag fetching, all" => fn tags ->
Pleroma.Web.MastodonAPI.TimelineController.hashtag_fetching(
%{
"all" => tags
},
user,
false
)
end
},
inputs:
tags
|> Enum.map(fn {_, v} -> v end)
|> Enum.chunk_every(2)
|> Enum.map(fn tags -> {"For #{inspect(tags)}", tags} end),
time: 5
)
Benchee.run(
%{
"Hashtag fetching" => fn tag ->
Pleroma.Web.MastodonAPI.TimelineController.hashtag_fetching(
%{
"tag" => tag
},
user,
false
)
end
},
inputs: tags,
time: 5
)
end
defp clean_tables do
IO.puts("Deleting old data...\n")
Ecto.Adapters.SQL.query!(Repo, "TRUNCATE users CASCADE;")
Ecto.Adapters.SQL.query!(Repo, "TRUNCATE activities CASCADE;")
Ecto.Adapters.SQL.query!(Repo, "TRUNCATE objects CASCADE;")
end
end

View file

@ -503,6 +503,7 @@
transmogrifier: 20, transmogrifier: 20,
scheduled_activities: 10, scheduled_activities: 10,
background: 5, background: 5,
attachments_cleanup: 5,
new_users_digest: 1 new_users_digest: 1
], ],
crontab: [ crontab: [

View file

@ -29,6 +29,7 @@ Has these additional fields under the `pleroma` object:
- `spoiler_text`: a map consisting of alternate representations of the `spoiler_text` property with the key being it's mimetype. Currently the only alternate representation supported is `text/plain` - `spoiler_text`: a map consisting of alternate representations of the `spoiler_text` property with the key being it's mimetype. Currently the only alternate representation supported is `text/plain`
- `expires_at`: a datetime (iso8601) that states when the post will expire (be deleted automatically), or empty if the post won't expire - `expires_at`: a datetime (iso8601) that states when the post will expire (be deleted automatically), or empty if the post won't expire
- `thread_muted`: true if the thread the post belongs to is muted - `thread_muted`: true if the thread the post belongs to is muted
- `emoji_reactions`: A list with emoji / reaction count tuples. Contains no information about the reacting users, for that use the `emoji_reactions_by` endpoint.
## Attachments ## Attachments

View file

@ -451,11 +451,11 @@ Emoji reactions work a lot like favourites do. They make it possible to react to
* Method: `GET` * Method: `GET`
* Authentication: optional * Authentication: optional
* Params: None * Params: None
* Response: JSON, a map of emoji to account list mappings. * Response: JSON, a list of emoji/account list tuples, sorted by emoji insertion date, in ascending order, e.g, the first emoji in the list is the oldest.
* Example Response: * Example Response:
```json ```json
{ [
"😀" => [{"id" => "xyz.."...}, {"id" => "zyx..."}], ["😀", [{"id" => "xyz.."...}, {"id" => "zyx..."}]],
"🗡" => [{"id" => "abc..."}] ["☕", [{"id" => "abc..."}]]
} ]
``` ```

View file

@ -312,9 +312,7 @@ def restrict_deactivated_users(query) do
from(u in User.Query.build(deactivated: true), select: u.ap_id) from(u in User.Query.build(deactivated: true), select: u.ap_id)
|> Repo.all() |> Repo.all()
from(activity in query, Activity.Queries.exclude_authors(query, deactivated_users)
where: activity.actor not in ^deactivated_users
)
end end
defdelegate search(user, query, options \\ []), to: Pleroma.Activity.Search defdelegate search(user, query, options \\ []), to: Pleroma.Activity.Search

View file

@ -12,6 +12,7 @@ defmodule Pleroma.Activity.Queries do
@type query :: Ecto.Queryable.t() | Activity.t() @type query :: Ecto.Queryable.t() | Activity.t()
alias Pleroma.Activity alias Pleroma.Activity
alias Pleroma.User
@spec by_ap_id(query, String.t()) :: query @spec by_ap_id(query, String.t()) :: query
def by_ap_id(query \\ Activity, ap_id) do def by_ap_id(query \\ Activity, ap_id) do
@ -29,6 +30,11 @@ def by_actor(query \\ Activity, actor) do
) )
end end
@spec by_author(query, String.t()) :: query
def by_author(query \\ Activity, %User{ap_id: ap_id}) do
from(a in query, where: a.actor == ^ap_id)
end
@spec by_object_id(query, String.t() | [String.t()]) :: query @spec by_object_id(query, String.t() | [String.t()]) :: query
def by_object_id(query \\ Activity, object_id) def by_object_id(query \\ Activity, object_id)
@ -72,4 +78,8 @@ def exclude_type(query \\ Activity, activity_type) do
where: fragment("(?)->>'type' != ?", activity.data, ^activity_type) where: fragment("(?)->>'type' != ?", activity.data, ^activity_type)
) )
end end
def exclude_authors(query \\ Activity, actors) do
from(activity in query, where: activity.actor not in ^actors)
end
end end

View file

@ -26,18 +26,23 @@ def search(user, search_query, options \\ []) do
|> query_with(index_type, search_query) |> query_with(index_type, search_query)
|> maybe_restrict_local(user) |> maybe_restrict_local(user)
|> maybe_restrict_author(author) |> maybe_restrict_author(author)
|> maybe_restrict_blocked(user)
|> Pagination.fetch_paginated(%{"offset" => offset, "limit" => limit}, :offset) |> Pagination.fetch_paginated(%{"offset" => offset, "limit" => limit}, :offset)
|> maybe_fetch(user, search_query) |> maybe_fetch(user, search_query)
end end
def maybe_restrict_author(query, %User{} = author) do def maybe_restrict_author(query, %User{} = author) do
from([a, o] in query, Activity.Queries.by_author(query, author)
where: a.actor == ^author.ap_id
)
end end
def maybe_restrict_author(query, _), do: query def maybe_restrict_author(query, _), do: query
def maybe_restrict_blocked(query, %User{} = user) do
Activity.Queries.exclude_authors(query, User.blocked_users_ap_ids(user))
end
def maybe_restrict_blocked(query, _), do: query
defp restrict_public(q) do defp restrict_public(q) do
from([a, o] in q, from([a, o] in q,
where: fragment("?->>'type' = 'Create'", a.data), where: fragment("?->>'type' = 'Create'", a.data),

View file

@ -33,6 +33,7 @@ def user_agent do
def start(_type, _args) do def start(_type, _args) do
Pleroma.HTML.compile_scrubbers() Pleroma.HTML.compile_scrubbers()
Pleroma.Config.DeprecationWarnings.warn() Pleroma.Config.DeprecationWarnings.warn()
Pleroma.Repo.check_migrations_applied!()
setup_instrumenters() setup_instrumenters()
load_custom_modules() load_custom_modules()

View file

@ -19,6 +19,8 @@ defmodule Pleroma.Object do
@type t() :: %__MODULE__{} @type t() :: %__MODULE__{}
@derive {Jason.Encoder, only: [:data]}
schema "objects" do schema "objects" do
field(:data, :map) field(:data, :map)
@ -180,85 +182,17 @@ def swap_object_with_tombstone(object) do
def delete(%Object{data: %{"id" => id}} = object) do def delete(%Object{data: %{"id" => id}} = object) do
with {:ok, _obj} = swap_object_with_tombstone(object), with {:ok, _obj} = swap_object_with_tombstone(object),
:ok <- delete_attachments(object),
deleted_activity = Activity.delete_all_by_object_ap_id(id), deleted_activity = Activity.delete_all_by_object_ap_id(id),
{:ok, true} <- Cachex.del(:object_cache, "object:#{id}"), {:ok, true} <- Cachex.del(:object_cache, "object:#{id}"),
{:ok, _} <- Cachex.del(:web_resp_cache, URI.parse(id).path) do {:ok, _} <- Cachex.del(:web_resp_cache, URI.parse(id).path),
{:ok, _} <-
Pleroma.Workers.AttachmentsCleanupWorker.enqueue("cleanup_attachments", %{
"object" => object
}) do
{:ok, object, deleted_activity} {:ok, object, deleted_activity}
end end
end end
defp delete_attachments(%{data: %{"attachment" => [_ | _] = attachments, "actor" => actor}}) do
hrefs =
Enum.flat_map(attachments, fn attachment ->
Enum.map(attachment["url"], & &1["href"])
end)
names = Enum.map(attachments, & &1["name"])
uploader = Pleroma.Config.get([Pleroma.Upload, :uploader])
# find all objects for copies of the attachments, name and actor doesn't matter here
delete_ids =
from(o in Object,
where:
fragment(
"to_jsonb(array(select jsonb_array_elements((?)#>'{url}') ->> 'href'))::jsonb \\?| (?)",
o.data,
^hrefs
)
)
|> Repo.all()
# we should delete 1 object for any given attachment, but don't delete files if
# there are more than 1 object for it
|> Enum.reduce(%{}, fn %{
id: id,
data: %{
"url" => [%{"href" => href}],
"actor" => obj_actor,
"name" => name
}
},
acc ->
Map.update(acc, href, %{id: id, count: 1}, fn val ->
case obj_actor == actor and name in names do
true ->
# set id of the actor's object that will be deleted
%{val | id: id, count: val.count + 1}
false ->
# another actor's object, just increase count to not delete file
%{val | count: val.count + 1}
end
end)
end)
|> Enum.map(fn {href, %{id: id, count: count}} ->
# only delete files that have single instance
with 1 <- count do
prefix =
case Pleroma.Config.get([Pleroma.Upload, :base_url]) do
nil -> "media"
_ -> ""
end
base_url = Pleroma.Config.get([__MODULE__, :base_url], Pleroma.Web.base_url())
file_path = String.trim_leading(href, "#{base_url}/#{prefix}")
uploader.delete_file(file_path)
end
id
end)
from(o in Object, where: o.id in ^delete_ids)
|> Repo.delete_all()
:ok
end
defp delete_attachments(%{data: _data}), do: :ok
def prune(%Object{data: %{"id" => id}} = object) do def prune(%Object{data: %{"id" => id}} = object) do
with {:ok, object} <- Repo.delete(object), with {:ok, object} <- Repo.delete(object),
{:ok, true} <- Cachex.del(:object_cache, "object:#{id}"), {:ok, true} <- Cachex.del(:object_cache, "object:#{id}"),

View file

@ -8,6 +8,8 @@ defmodule Pleroma.Repo do
adapter: Ecto.Adapters.Postgres, adapter: Ecto.Adapters.Postgres,
migration_timestamps: [type: :naive_datetime_usec] migration_timestamps: [type: :naive_datetime_usec]
require Logger
defmodule Instrumenter do defmodule Instrumenter do
use Prometheus.EctoInstrumenter use Prometheus.EctoInstrumenter
end end
@ -47,4 +49,37 @@ def get_assoc(resource, association) do
_ -> {:error, :not_found} _ -> {:error, :not_found}
end end
end end
def check_migrations_applied!() do
unless Pleroma.Config.get(
[:i_am_aware_this_may_cause_data_loss, :disable_migration_check],
false
) do
Ecto.Migrator.with_repo(__MODULE__, fn repo ->
down_migrations =
Ecto.Migrator.migrations(repo)
|> Enum.reject(fn
{:up, _, _} -> true
{:down, _, _} -> false
end)
if length(down_migrations) > 0 do
down_migrations_text =
Enum.map(down_migrations, fn {:down, id, name} -> "- #{name} (#{id})\n" end)
Logger.error(
"The following migrations were not applied:\n#{down_migrations_text}If you want to start Pleroma anyway, set\nconfig :pleroma, :i_am_aware_this_may_cause_data_loss, disable_migration_check: true"
)
raise Pleroma.Repo.UnappliedMigrationsError
end
end)
else
:ok
end
end
end
defmodule Pleroma.Repo.UnappliedMigrationsError do
defexception message: "Unapplied Migrations detected"
end end

View file

@ -20,7 +20,7 @@ def filter(%{"type" => message_type} = message) do
with accepted_vocabulary <- Pleroma.Config.get([:mrf_vocabulary, :accept]), with accepted_vocabulary <- Pleroma.Config.get([:mrf_vocabulary, :accept]),
rejected_vocabulary <- Pleroma.Config.get([:mrf_vocabulary, :reject]), rejected_vocabulary <- Pleroma.Config.get([:mrf_vocabulary, :reject]),
true <- true <-
length(accepted_vocabulary) == 0 || Enum.member?(accepted_vocabulary, message_type), Enum.empty?(accepted_vocabulary) || Enum.member?(accepted_vocabulary, message_type),
false <- false <-
length(rejected_vocabulary) > 0 && Enum.member?(rejected_vocabulary, message_type), length(rejected_vocabulary) > 0 && Enum.member?(rejected_vocabulary, message_type),
{:ok, _} <- filter(message["object"]) do {:ok, _} <- filter(message["object"]) do

View file

@ -658,24 +658,8 @@ def handle_incoming(
with %User{ap_id: ^actor_id} = actor <- User.get_cached_by_ap_id(object["id"]) do with %User{ap_id: ^actor_id} = actor <- User.get_cached_by_ap_id(object["id"]) do
{:ok, new_user_data} = ActivityPub.user_data_from_user_object(object) {:ok, new_user_data} = ActivityPub.user_data_from_user_object(object)
locked = new_user_data[:locked] || false
attachment = get_in(new_user_data, [:source_data, "attachment"]) || []
invisible = new_user_data[:invisible] || false
fields =
attachment
|> Enum.filter(fn %{"type" => t} -> t == "PropertyValue" end)
|> Enum.map(fn fields -> Map.take(fields, ["name", "value"]) end)
update_data =
new_user_data
|> Map.take([:avatar, :banner, :bio, :name, :also_known_as])
|> Map.put(:fields, fields)
|> Map.put(:locked, locked)
|> Map.put(:invisible, invisible)
actor actor
|> User.upgrade_changeset(update_data, true) |> User.upgrade_changeset(new_user_data, true)
|> User.update_and_set_cache() |> User.update_and_set_cache()
ActivityPub.update(%{ ActivityPub.update(%{

View file

@ -312,19 +312,12 @@ def make_emoji_reaction_data(user, object, emoji, activity_id) do
|> Map.put("content", emoji) |> Map.put("content", emoji)
end end
@spec update_element_in_object(String.t(), list(any), Object.t()) :: @spec update_element_in_object(String.t(), list(any), Object.t(), integer() | nil) ::
{:ok, Object.t()} | {:error, Ecto.Changeset.t()} {:ok, Object.t()} | {:error, Ecto.Changeset.t()}
def update_element_in_object(property, element, object) do def update_element_in_object(property, element, object, count \\ nil) do
length = length =
if is_map(element) do count ||
element length(element)
|> Map.values()
|> List.flatten()
|> length()
else
element
|> length()
end
data = data =
Map.merge( Map.merge(
@ -344,29 +337,52 @@ def add_emoji_reaction_to_object(
%Activity{data: %{"content" => emoji, "actor" => actor}}, %Activity{data: %{"content" => emoji, "actor" => actor}},
object object
) do ) do
reactions = object.data["reactions"] || %{} reactions = object.data["reactions"] || []
emoji_actors = reactions[emoji] || []
new_emoji_actors = [actor | emoji_actors] |> Enum.uniq() new_reactions =
new_reactions = Map.put(reactions, emoji, new_emoji_actors) case Enum.find_index(reactions, fn [candidate, _] -> emoji == candidate end) do
update_element_in_object("reaction", new_reactions, object) nil ->
reactions ++ [[emoji, [actor]]]
index ->
List.update_at(
reactions,
index,
fn [emoji, users] -> [emoji, Enum.uniq([actor | users])] end
)
end
count = emoji_count(new_reactions)
update_element_in_object("reaction", new_reactions, object, count)
end
def emoji_count(reactions_list) do
Enum.reduce(reactions_list, 0, fn [_, users], acc -> acc + length(users) end)
end end
def remove_emoji_reaction_from_object( def remove_emoji_reaction_from_object(
%Activity{data: %{"content" => emoji, "actor" => actor}}, %Activity{data: %{"content" => emoji, "actor" => actor}},
object object
) do ) do
reactions = object.data["reactions"] || %{} reactions = object.data["reactions"] || []
emoji_actors = reactions[emoji] || []
new_emoji_actors = List.delete(emoji_actors, actor)
new_reactions = new_reactions =
if new_emoji_actors == [] do case Enum.find_index(reactions, fn [candidate, _] -> emoji == candidate end) do
Map.delete(reactions, emoji) nil ->
else reactions
Map.put(reactions, emoji, new_emoji_actors)
index ->
List.update_at(
reactions,
index,
fn [emoji, users] -> [emoji, List.delete(users, actor)] end
)
|> Enum.reject(fn [_, users] -> Enum.empty?(users) end)
end end
update_element_in_object("reaction", new_reactions, object) count = emoji_count(new_reactions)
update_element_in_object("reaction", new_reactions, object, count)
end end
@spec add_like_to_object(Activity.t(), Object.t()) :: @spec add_like_to_object(Activity.t(), Object.t()) ::

View file

@ -75,7 +75,7 @@ defmodule Pleroma.Web.AdminAPI.AdminAPIController do
plug( plug(
OAuthScopesPlug, OAuthScopesPlug,
%{scopes: ["write:reports"], admin: true} %{scopes: ["write:reports"], admin: true}
when action in [:report_update_state, :report_respond] when action in [:reports_update]
) )
plug( plug(
@ -639,7 +639,7 @@ def get_password_reset(conn, %{"nickname" => nickname}) do
def force_password_reset(%{assigns: %{user: admin}} = conn, %{"nicknames" => nicknames}) do def force_password_reset(%{assigns: %{user: admin}} = conn, %{"nicknames" => nicknames}) do
users = nicknames |> Enum.map(&User.get_cached_by_nickname/1) users = nicknames |> Enum.map(&User.get_cached_by_nickname/1)
Enum.map(users, &User.force_password_reset_async/1) Enum.each(users, &User.force_password_reset_async/1)
ModerationLog.insert_log(%{ ModerationLog.insert_log(%{
actor: admin, actor: admin,

View file

@ -85,9 +85,13 @@ def delete(activity_id, user) do
def repeat(id_or_ap_id, user, params \\ %{}) do def repeat(id_or_ap_id, user, params \\ %{}) do
with %Activity{} = activity <- get_by_id_or_ap_id(id_or_ap_id), with %Activity{} = activity <- get_by_id_or_ap_id(id_or_ap_id),
object <- Object.normalize(activity), object <- Object.normalize(activity),
nil <- Utils.get_existing_announce(user.ap_id, object), announce_activity <- Utils.get_existing_announce(user.ap_id, object),
public <- public_announce?(object, params) do public <- public_announce?(object, params) do
ActivityPub.announce(user, object, nil, true, public) if announce_activity do
{:ok, announce_activity, object}
else
ActivityPub.announce(user, object, nil, true, public)
end
else else
_ -> {:error, dgettext("errors", "Could not repeat")} _ -> {:error, dgettext("errors", "Could not repeat")}
end end
@ -105,8 +109,12 @@ def unrepeat(id_or_ap_id, user) do
def favorite(id_or_ap_id, user) do def favorite(id_or_ap_id, user) do
with %Activity{} = activity <- get_by_id_or_ap_id(id_or_ap_id), with %Activity{} = activity <- get_by_id_or_ap_id(id_or_ap_id),
object <- Object.normalize(activity), object <- Object.normalize(activity),
nil <- Utils.get_existing_like(user.ap_id, object) do like_activity <- Utils.get_existing_like(user.ap_id, object) do
ActivityPub.like(user, object) if like_activity do
{:ok, like_activity, object}
else
ActivityPub.like(user, object)
end
else else
_ -> {:error, dgettext("errors", "Could not favorite")} _ -> {:error, dgettext("errors", "Could not favorite")}
end end

View file

@ -43,7 +43,7 @@ defp do_search(version, %{assigns: %{user: user}} = conn, %{"q" => query} = para
result = result =
default_values default_values
|> Enum.map(fn {resource, default_value} -> |> Enum.map(fn {resource, default_value} ->
if params["type"] == nil or params["type"] == resource do if params["type"] in [nil, resource] do
{resource, fn -> resource_search(version, resource, query, options) end} {resource, fn -> resource_search(version, resource, query, options) end}
else else
{resource, fn -> default_value end} {resource, fn -> default_value end}

View file

@ -6,9 +6,9 @@ defmodule Pleroma.Web.MastodonAPI.SubscriptionController do
@moduledoc "The module represents functions to manage user subscriptions." @moduledoc "The module represents functions to manage user subscriptions."
use Pleroma.Web, :controller use Pleroma.Web, :controller
alias Pleroma.Web.MastodonAPI.PushSubscriptionView, as: View
alias Pleroma.Web.Push alias Pleroma.Web.Push
alias Pleroma.Web.Push.Subscription alias Pleroma.Web.Push.Subscription
alias Pleroma.Web.MastodonAPI.PushSubscriptionView, as: View
action_fallback(:errors) action_fallback(:errors)

View file

@ -77,10 +77,7 @@ def public(%{assigns: %{user: user}} = conn, params) do
|> render("index.json", activities: activities, for: user, as: :activity) |> render("index.json", activities: activities, for: user, as: :activity)
end end
# GET /api/v1/timelines/tag/:tag def hashtag_fetching(params, user, local_only) do
def hashtag(%{assigns: %{user: user}} = conn, params) do
local_only = truthy_param?(params["local"])
tags = tags =
[params["tag"], params["any"]] [params["tag"], params["any"]]
|> List.flatten() |> List.flatten()
@ -98,7 +95,7 @@ def hashtag(%{assigns: %{user: user}} = conn, params) do
|> Map.get("none", []) |> Map.get("none", [])
|> Enum.map(&String.downcase(&1)) |> Enum.map(&String.downcase(&1))
activities = _activities =
params params
|> Map.put("type", "Create") |> Map.put("type", "Create")
|> Map.put("local_only", local_only) |> Map.put("local_only", local_only)
@ -109,6 +106,13 @@ def hashtag(%{assigns: %{user: user}} = conn, params) do
|> Map.put("tag_all", tag_all) |> Map.put("tag_all", tag_all)
|> Map.put("tag_reject", tag_reject) |> Map.put("tag_reject", tag_reject)
|> ActivityPub.fetch_public_activities() |> ActivityPub.fetch_public_activities()
end
# GET /api/v1/timelines/tag/:tag
def hashtag(%{assigns: %{user: user}} = conn, params) do
local_only = truthy_param?(params["local"])
activities = hashtag_fetching(params, user, local_only)
conn conn
|> add_link_headers(activities, %{"local" => local_only}) |> add_link_headers(activities, %{"local" => local_only})

View file

@ -253,6 +253,15 @@ def render("show.json", %{activity: %{data: %{"object" => _object}} = activity}
nil nil
end end
emoji_reactions =
with %{data: %{"reactions" => emoji_reactions}} <- object do
Enum.map(emoji_reactions, fn [emoji, users] ->
[emoji, length(users)]
end)
else
_ -> []
end
%{ %{
id: to_string(activity.id), id: to_string(activity.id),
uri: object.data["id"], uri: object.data["id"],
@ -293,7 +302,8 @@ def render("show.json", %{activity: %{data: %{"object" => _object}} = activity}
spoiler_text: %{"text/plain" => summary_plaintext}, spoiler_text: %{"text/plain" => summary_plaintext},
expires_at: expires_at, expires_at: expires_at,
direct_conversation_id: direct_conversation_id, direct_conversation_id: direct_conversation_id,
thread_muted: thread_muted? thread_muted: thread_muted?,
emoji_reactions: emoji_reactions
} }
} }
end end

View file

@ -14,10 +14,10 @@ defmodule Pleroma.Web.OAuth.OAuthController do
alias Pleroma.Web.ControllerHelper alias Pleroma.Web.ControllerHelper
alias Pleroma.Web.OAuth.App alias Pleroma.Web.OAuth.App
alias Pleroma.Web.OAuth.Authorization alias Pleroma.Web.OAuth.Authorization
alias Pleroma.Web.OAuth.Scopes
alias Pleroma.Web.OAuth.Token alias Pleroma.Web.OAuth.Token
alias Pleroma.Web.OAuth.Token.Strategy.RefreshToken alias Pleroma.Web.OAuth.Token.Strategy.RefreshToken
alias Pleroma.Web.OAuth.Token.Strategy.Revoke, as: RevokeToken alias Pleroma.Web.OAuth.Token.Strategy.Revoke, as: RevokeToken
alias Pleroma.Web.OAuth.Scopes
require Logger require Logger

View file

@ -23,7 +23,7 @@ defmodule Pleroma.Web.PleromaAPI.PleromaAPIController do
plug( plug(
OAuthScopesPlug, OAuthScopesPlug,
%{scopes: ["read:statuses"]} %{scopes: ["read:statuses"]}
when action in [:conversation, :conversation_statuses, :emoji_reactions_by] when action in [:conversation, :conversation_statuses]
) )
plug( plug(
@ -43,21 +43,21 @@ defmodule Pleroma.Web.PleromaAPI.PleromaAPIController do
def emoji_reactions_by(%{assigns: %{user: user}} = conn, %{"id" => activity_id}) do def emoji_reactions_by(%{assigns: %{user: user}} = conn, %{"id" => activity_id}) do
with %Activity{} = activity <- Activity.get_by_id_with_object(activity_id), with %Activity{} = activity <- Activity.get_by_id_with_object(activity_id),
%Object{data: %{"reactions" => emoji_reactions}} <- Object.normalize(activity) do %Object{data: %{"reactions" => emoji_reactions}} when is_list(emoji_reactions) <-
Object.normalize(activity) do
reactions = reactions =
emoji_reactions emoji_reactions
|> Enum.map(fn {emoji, users} -> |> Enum.map(fn [emoji, users] ->
users = Enum.map(users, &User.get_cached_by_ap_id/1) users = Enum.map(users, &User.get_cached_by_ap_id/1)
{emoji, AccountView.render("index.json", %{users: users, for: user, as: :user})} {emoji, AccountView.render("index.json", %{users: users, for: user, as: :user})}
end) end)
|> Enum.into(%{})
conn conn
|> json(reactions) |> json(reactions)
else else
_e -> _e ->
conn conn
|> json(%{}) |> json([])
end end
end end

View file

@ -0,0 +1,88 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Workers.AttachmentsCleanupWorker do
import Ecto.Query
alias Pleroma.Object
alias Pleroma.Repo
use Pleroma.Workers.WorkerHelper, queue: "attachments_cleanup"
@impl Oban.Worker
def perform(
%{"object" => %{"data" => %{"attachment" => [_ | _] = attachments, "actor" => actor}}},
_job
) do
hrefs =
Enum.flat_map(attachments, fn attachment ->
Enum.map(attachment["url"], & &1["href"])
end)
names = Enum.map(attachments, & &1["name"])
uploader = Pleroma.Config.get([Pleroma.Upload, :uploader])
# find all objects for copies of the attachments, name and actor doesn't matter here
delete_ids =
from(o in Object,
where:
fragment(
"to_jsonb(array(select jsonb_array_elements((?)#>'{url}') ->> 'href' where jsonb_typeof((?)#>'{url}') = 'array'))::jsonb \\?| (?)",
o.data,
o.data,
^hrefs
)
)
# The query above can be time consumptive on large instances until we
# refactor how uploads are stored
|> Repo.all(timout: :infinity)
# we should delete 1 object for any given attachment, but don't delete
# files if there are more than 1 object for it
|> Enum.reduce(%{}, fn %{
id: id,
data: %{
"url" => [%{"href" => href}],
"actor" => obj_actor,
"name" => name
}
},
acc ->
Map.update(acc, href, %{id: id, count: 1}, fn val ->
case obj_actor == actor and name in names do
true ->
# set id of the actor's object that will be deleted
%{val | id: id, count: val.count + 1}
false ->
# another actor's object, just increase count to not delete file
%{val | count: val.count + 1}
end
end)
end)
|> Enum.map(fn {href, %{id: id, count: count}} ->
# only delete files that have single instance
with 1 <- count do
prefix =
case Pleroma.Config.get([Pleroma.Upload, :base_url]) do
nil -> "media"
_ -> ""
end
base_url = Pleroma.Config.get([__MODULE__, :base_url], Pleroma.Web.base_url())
file_path = String.trim_leading(href, "#{base_url}/#{prefix}")
uploader.delete_file(file_path)
end
id
end)
from(o in Object, where: o.id in ^delete_ids)
|> Repo.delete_all()
end
def perform(%{"object" => _object}, _job), do: :ok
end

View file

@ -124,7 +124,7 @@ defp deps do
{:earmark, "~> 1.3"}, {:earmark, "~> 1.3"},
{:bbcode, "~> 0.1.1"}, {:bbcode, "~> 0.1.1"},
{:ex_machina, "~> 2.3", only: :test}, {:ex_machina, "~> 2.3", only: :test},
{:credo, "~> 0.9.3", only: [:dev, :test]}, {:credo, "~> 1.1.0", only: [:dev, :test], runtime: false},
{:mock, "~> 0.3.3", only: :test}, {:mock, "~> 0.3.3", only: :test},
{:crypt, {:crypt,
git: "https://github.com/msantos/crypt", ref: "1f2b58927ab57e72910191a7ebaeff984382a1d3"}, git: "https://github.com/msantos/crypt", ref: "1f2b58927ab57e72910191a7ebaeff984382a1d3"},

View file

@ -16,7 +16,7 @@
"cors_plug": {:hex, :cors_plug, "1.5.2", "72df63c87e4f94112f458ce9d25800900cc88608c1078f0e4faddf20933eda6e", [:mix], [{:plug, "~> 1.3 or ~> 1.4 or ~> 1.5", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm"}, "cors_plug": {:hex, :cors_plug, "1.5.2", "72df63c87e4f94112f458ce9d25800900cc88608c1078f0e4faddf20933eda6e", [:mix], [{:plug, "~> 1.3 or ~> 1.4 or ~> 1.5", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm"},
"cowboy": {:hex, :cowboy, "2.7.0", "91ed100138a764355f43316b1d23d7ff6bdb0de4ea618cb5d8677c93a7a2f115", [:rebar3], [{:cowlib, "~> 2.8.0", [hex: :cowlib, repo: "hexpm", optional: false]}, {:ranch, "~> 1.7.1", [hex: :ranch, repo: "hexpm", optional: false]}], "hexpm"}, "cowboy": {:hex, :cowboy, "2.7.0", "91ed100138a764355f43316b1d23d7ff6bdb0de4ea618cb5d8677c93a7a2f115", [:rebar3], [{:cowlib, "~> 2.8.0", [hex: :cowlib, repo: "hexpm", optional: false]}, {:ranch, "~> 1.7.1", [hex: :ranch, repo: "hexpm", optional: false]}], "hexpm"},
"cowlib": {:hex, :cowlib, "2.8.0", "fd0ff1787db84ac415b8211573e9a30a3ebe71b5cbff7f720089972b2319c8a4", [:rebar3], [], "hexpm"}, "cowlib": {:hex, :cowlib, "2.8.0", "fd0ff1787db84ac415b8211573e9a30a3ebe71b5cbff7f720089972b2319c8a4", [:rebar3], [], "hexpm"},
"credo": {:hex, :credo, "0.9.3", "76fa3e9e497ab282e0cf64b98a624aa11da702854c52c82db1bf24e54ab7c97a", [:mix], [{:bunt, "~> 0.2.0", [hex: :bunt, repo: "hexpm", optional: false]}, {:poison, ">= 0.0.0", [hex: :poison, repo: "hexpm", optional: false]}], "hexpm"}, "credo": {:hex, :credo, "1.1.5", "caec7a3cadd2e58609d7ee25b3931b129e739e070539ad1a0cd7efeeb47014f4", [:mix], [{:bunt, "~> 0.2.0", [hex: :bunt, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}], "hexpm"},
"crontab": {:hex, :crontab, "1.1.8", "2ce0e74777dfcadb28a1debbea707e58b879e6aa0ffbf9c9bb540887bce43617", [:mix], [{:ecto, "~> 1.0 or ~> 2.0 or ~> 3.0", [hex: :ecto, repo: "hexpm", optional: true]}], "hexpm"}, "crontab": {:hex, :crontab, "1.1.8", "2ce0e74777dfcadb28a1debbea707e58b879e6aa0ffbf9c9bb540887bce43617", [:mix], [{:ecto, "~> 1.0 or ~> 2.0 or ~> 3.0", [hex: :ecto, repo: "hexpm", optional: true]}], "hexpm"},
"crypt": {:git, "https://github.com/msantos/crypt", "1f2b58927ab57e72910191a7ebaeff984382a1d3", [ref: "1f2b58927ab57e72910191a7ebaeff984382a1d3"]}, "crypt": {:git, "https://github.com/msantos/crypt", "1f2b58927ab57e72910191a7ebaeff984382a1d3", [ref: "1f2b58927ab57e72910191a7ebaeff984382a1d3"]},
"custom_base": {:hex, :custom_base, "0.2.1", "4a832a42ea0552299d81652aa0b1f775d462175293e99dfbe4d7dbaab785a706", [:mix], [], "hexpm"}, "custom_base": {:hex, :custom_base, "0.2.1", "4a832a42ea0552299d81652aa0b1f775d462175293e99dfbe4d7dbaab785a706", [:mix], [], "hexpm"},
@ -63,7 +63,7 @@
"mime": {:hex, :mime, "1.3.1", "30ce04ab3175b6ad0bdce0035cba77bba68b813d523d1aac73d9781b4d193cf8", [:mix], [], "hexpm"}, "mime": {:hex, :mime, "1.3.1", "30ce04ab3175b6ad0bdce0035cba77bba68b813d523d1aac73d9781b4d193cf8", [:mix], [], "hexpm"},
"mimerl": {:hex, :mimerl, "1.2.0", "67e2d3f571088d5cfd3e550c383094b47159f3eee8ffa08e64106cdf5e981be3", [:rebar3], [], "hexpm"}, "mimerl": {:hex, :mimerl, "1.2.0", "67e2d3f571088d5cfd3e550c383094b47159f3eee8ffa08e64106cdf5e981be3", [:rebar3], [], "hexpm"},
"mochiweb": {:hex, :mochiweb, "2.18.0", "eb55f1db3e6e960fac4e6db4e2db9ec3602cc9f30b86cd1481d56545c3145d2e", [:rebar3], [], "hexpm"}, "mochiweb": {:hex, :mochiweb, "2.18.0", "eb55f1db3e6e960fac4e6db4e2db9ec3602cc9f30b86cd1481d56545c3145d2e", [:rebar3], [], "hexpm"},
"mock": {:hex, :mock, "0.3.3", "42a433794b1291a9cf1525c6d26b38e039e0d3a360732b5e467bfc77ef26c914", [:mix], [{:meck, "~> 0.8.13", [hex: :meck, repo: "hexpm", optional: false]}], "hexpm"}, "mock": {:hex, :mock, "0.3.4", "c5862eb3b8c64237f45f586cf00c9d892ba07bb48305a43319d428ce3c2897dd", [:mix], [{:meck, "~> 0.8.13", [hex: :meck, repo: "hexpm", optional: false]}], "hexpm"},
"mogrify": {:hex, :mogrify, "0.6.1", "de1b527514f2d95a7bbe9642eb556061afb337e220cf97adbf3a4e6438ed70af", [:mix], [], "hexpm"}, "mogrify": {:hex, :mogrify, "0.6.1", "de1b527514f2d95a7bbe9642eb556061afb337e220cf97adbf3a4e6438ed70af", [:mix], [], "hexpm"},
"mox": {:hex, :mox, "0.5.1", "f86bb36026aac1e6f924a4b6d024b05e9adbed5c63e8daa069bd66fb3292165b", [:mix], [], "hexpm"}, "mox": {:hex, :mox, "0.5.1", "f86bb36026aac1e6f924a4b6d024b05e9adbed5c63e8daa069bd66fb3292165b", [:mix], [], "hexpm"},
"myhtmlex": {:git, "https://git.pleroma.social/pleroma/myhtmlex.git", "ad0097e2f61d4953bfef20fb6abddf23b87111e6", [ref: "ad0097e2f61d4953bfef20fb6abddf23b87111e6", submodules: true]}, "myhtmlex": {:git, "https://git.pleroma.social/pleroma/myhtmlex.git", "ad0097e2f61d4953bfef20fb6abddf23b87111e6", [ref: "ad0097e2f61d4953bfef20fb6abddf23b87111e6", submodules: true]},

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

View file

@ -1 +1 @@
<!DOCTYPE html><html><head><meta charset=utf-8><meta http-equiv=X-UA-Compatible content="IE=edge,chrome=1"><meta name=renderer content=webkit><meta name=viewport content="width=device-width,initial-scale=1,maximum-scale=1,user-scalable=no"><title>Admin FE</title><link rel="shortcut icon" href=favicon.ico><link href=chunk-elementUI.a842fb0a.css rel=stylesheet><link href=chunk-libs.57fe98a3.css rel=stylesheet><link href=app.fdd73ce4.css rel=stylesheet></head><body><div id=app></div><script type=text/javascript src=static/js/runtime.d6d1aaab.js></script><script type=text/javascript src=static/js/chunk-elementUI.fa319e7b.js></script><script type=text/javascript src=static/js/chunk-libs.35c18287.js></script><script type=text/javascript src=static/js/app.19b7049e.js></script></body></html> <!DOCTYPE html><html><head><meta charset=utf-8><meta http-equiv=X-UA-Compatible content="IE=edge,chrome=1"><meta name=renderer content=webkit><meta name=viewport content="width=device-width,initial-scale=1,maximum-scale=1,user-scalable=no"><title>Admin FE</title><link rel="shortcut icon" href=favicon.ico><link href=chunk-elementUI.1abbc9b8.css rel=stylesheet><link href=chunk-libs.57fe98a3.css rel=stylesheet><link href=app.fdd73ce4.css rel=stylesheet></head><body><div id=app></div><script type=text/javascript src=static/js/runtime.cab03b3e.js></script><script type=text/javascript src=static/js/chunk-elementUI.2de79b84.js></script><script type=text/javascript src=static/js/chunk-libs.680db3fc.js></script><script type=text/javascript src=static/js/app.3da0f475.js></script></body></html>

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

File diff suppressed because one or more lines are too long

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

File diff suppressed because one or more lines are too long

Some files were not shown because too many files have changed in this diff Show more